use of org.apache.hyracks.algebricks.runtime.base.IUnnestingEvaluatorFactory in project asterixdb by apache.
the class RangeDescriptor method createUnnestingEvaluatorFactory.
@Override
public IUnnestingEvaluatorFactory createUnnestingEvaluatorFactory(final IScalarEvaluatorFactory[] args) {
return new IUnnestingEvaluatorFactory() {
private static final long serialVersionUID = 1L;
@Override
public IUnnestingEvaluator createUnnestingEvaluator(final IHyracksTaskContext ctx) throws HyracksDataException {
return new IUnnestingEvaluator() {
private ArrayBackedValueStorage resultStorage = new ArrayBackedValueStorage();
@SuppressWarnings("rawtypes")
private ISerializerDeserializer serde = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
private IPointable inputVal = new VoidPointable();
private IScalarEvaluator eval0 = args[0].createScalarEvaluator(ctx);
private IScalarEvaluator eval1 = args[1].createScalarEvaluator(ctx);
private AMutableInt64 aInt64 = new AMutableInt64(0);
private long current;
private long max;
@Override
public void init(IFrameTupleReference tuple) throws HyracksDataException {
eval0.evaluate(tuple, inputVal);
current = ATypeHierarchy.getLongValue(getIdentifier().getName(), 0, inputVal.getByteArray(), inputVal.getStartOffset());
eval1.evaluate(tuple, inputVal);
max = ATypeHierarchy.getLongValue(getIdentifier().getName(), 1, inputVal.getByteArray(), inputVal.getStartOffset());
}
@SuppressWarnings("unchecked")
@Override
public boolean step(IPointable result) throws HyracksDataException {
if (current > max) {
return false;
}
aInt64.setValue(current);
resultStorage.reset();
serde.serialize(aInt64, resultStorage.getDataOutput());
result.set(resultStorage);
current++;
return true;
}
};
}
};
}
use of org.apache.hyracks.algebricks.runtime.base.IUnnestingEvaluatorFactory in project asterixdb by apache.
the class SubsetCollectionDescriptor method createUnnestingEvaluatorFactory.
@Override
public IUnnestingEvaluatorFactory createUnnestingEvaluatorFactory(final IScalarEvaluatorFactory[] args) {
return new IUnnestingEvaluatorFactory() {
private static final long serialVersionUID = 1L;
@Override
public IUnnestingEvaluator createUnnestingEvaluator(IHyracksTaskContext ctx) throws HyracksDataException {
return new IUnnestingEvaluator() {
private IPointable inputVal = new VoidPointable();
private IScalarEvaluator evalList = args[0].createScalarEvaluator(ctx);
private IScalarEvaluator evalStart = args[1].createScalarEvaluator(ctx);
private IScalarEvaluator evalLen = args[2].createScalarEvaluator(ctx);
private ArrayBackedValueStorage resultStorage = new ArrayBackedValueStorage();
private int numItems;
private int numItemsMax;
private int posStart;
private int posCrt;
private ATypeTag itemTag;
private boolean selfDescList = false;
private boolean metUnknown = false;
@Override
public void init(IFrameTupleReference tuple) throws HyracksDataException {
try {
evalStart.evaluate(tuple, inputVal);
posStart = ATypeHierarchy.getIntegerValue(getIdentifier().getName(), 0, inputVal.getByteArray(), inputVal.getStartOffset());
evalLen.evaluate(tuple, inputVal);
numItems = ATypeHierarchy.getIntegerValue(getIdentifier().getName(), 1, inputVal.getByteArray(), inputVal.getStartOffset());
evalList.evaluate(tuple, inputVal);
byte[] serList = inputVal.getByteArray();
int offset = inputVal.getStartOffset();
metUnknown = false;
byte typeTag = serList[offset];
if (typeTag == ATypeTag.SERIALIZED_MISSING_TYPE_TAG || typeTag == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
metUnknown = true;
return;
}
if (typeTag != ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG && typeTag != ATypeTag.SERIALIZED_UNORDEREDLIST_TYPE_TAG) {
throw new RuntimeDataException(ErrorCode.COERCION, getIdentifier());
}
if (typeTag == ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG) {
numItemsMax = AOrderedListSerializerDeserializer.getNumberOfItems(serList, offset);
} else {
numItemsMax = AUnorderedListSerializerDeserializer.getNumberOfItems(serList, offset);
}
itemTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(serList[offset + 1]);
if (itemTag == ATypeTag.ANY) {
selfDescList = true;
}
posCrt = posStart;
} catch (IOException e) {
throw new HyracksDataException(e);
}
}
@Override
public boolean step(IPointable result) throws HyracksDataException {
if (!metUnknown && posCrt < posStart + numItems && posCrt < numItemsMax) {
resultStorage.reset();
byte[] serList = inputVal.getByteArray();
int offset = inputVal.getStartOffset();
int itemLength = 0;
try {
int itemOffset = AOrderedListSerializerDeserializer.getItemOffset(serList, offset, posCrt);
if (selfDescList) {
itemTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(serList[itemOffset]);
}
itemLength = NonTaggedFormatUtil.getFieldValueLength(serList, itemOffset, itemTag, selfDescList);
if (!selfDescList) {
resultStorage.getDataOutput().writeByte(itemTag.serialize());
}
resultStorage.getDataOutput().write(serList, itemOffset, itemLength + (!selfDescList ? 0 : 1));
} catch (IOException e) {
throw new HyracksDataException(e);
} catch (AsterixException e) {
throw new HyracksDataException(e);
}
result.set(resultStorage);
++posCrt;
return true;
}
return false;
}
};
}
};
}
use of org.apache.hyracks.algebricks.runtime.base.IUnnestingEvaluatorFactory in project asterixdb by apache.
the class AbstractUnnestPOperator method contributeRuntimeOperator.
@Override
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException {
AbstractUnnestNonMapOperator unnest = (AbstractUnnestNonMapOperator) op;
int outCol = opSchema.findVariable(unnest.getVariable());
ILogicalExpression unnestExpr = unnest.getExpressionRef().getValue();
IExpressionRuntimeProvider expressionRuntimeProvider = context.getExpressionRuntimeProvider();
boolean exit = false;
if (unnestExpr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
exit = true;
} else {
AbstractFunctionCallExpression fce = (AbstractFunctionCallExpression) unnestExpr;
if (fce.getKind() != FunctionKind.UNNEST) {
exit = true;
}
}
if (exit) {
throw new AlgebricksException("Unnest expression " + unnestExpr + " is not an unnesting function call.");
}
UnnestingFunctionCallExpression agg = (UnnestingFunctionCallExpression) unnestExpr;
IUnnestingEvaluatorFactory unnestingFactory = expressionRuntimeProvider.createUnnestingFunctionFactory(agg, context.getTypeEnvironment(op.getInputs().get(0).getValue()), inputSchemas, context);
int[] projectionList = JobGenHelper.projectAllVariables(opSchema);
UnnestRuntimeFactory unnestRuntime = new UnnestRuntimeFactory(outCol, unnestingFactory, projectionList, unnest.getPositionWriter(), leftOuter, context.getMissingWriterFactory());
RecordDescriptor recDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context);
builder.contributeMicroOperator(unnest, unnestRuntime, recDesc);
ILogicalOperator src = unnest.getInputs().get(0).getValue();
builder.contributeGraphEdge(src, 0, unnest, 0);
}
use of org.apache.hyracks.algebricks.runtime.base.IUnnestingEvaluatorFactory in project asterixdb by apache.
the class PushRuntimeTest method etsUnnestWrite.
@Test
public void etsUnnestWrite() throws Exception {
JobSpecification spec = new JobSpecification(FRAME_SIZE);
EmptyTupleSourceRuntimeFactory ets = new EmptyTupleSourceRuntimeFactory();
RecordDescriptor etsDesc = new RecordDescriptor(new ISerializerDeserializer[] {});
IUnnestingEvaluatorFactory aggregFactory = new IntArrayUnnester(new int[] { 100, 200, 300 });
UnnestRuntimeFactory unnest = new UnnestRuntimeFactory(0, aggregFactory, new int[] { 0 }, false, null);
RecordDescriptor unnestDesc = new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
String filePath = PATH_ACTUAL + SEPARATOR + "etsUnnestWrite.out";
File outFile = new File(filePath);
SinkWriterRuntimeFactory writer = new SinkWriterRuntimeFactory(new int[] { 0 }, new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE }, outFile, PrinterBasedWriterFactory.INSTANCE, unnestDesc);
AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 0, 0, new IPushRuntimeFactory[] { ets, unnest, writer }, new RecordDescriptor[] { etsDesc, unnestDesc, null });
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, algebricksOp, new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID });
spec.addRoot(algebricksOp);
AlgebricksHyracksIntegrationUtil.runJob(spec);
StringBuilder buf = new StringBuilder();
readFileToString(outFile, buf);
Assert.assertEquals("100200300", buf.toString());
outFile.delete();
}
use of org.apache.hyracks.algebricks.runtime.base.IUnnestingEvaluatorFactory in project asterixdb by apache.
the class PushRuntimeTest method etsUnnestRunningaggregateWrite.
@Test
public void etsUnnestRunningaggregateWrite() throws Exception {
JobSpecification spec = new JobSpecification(FRAME_SIZE);
EmptyTupleSourceRuntimeFactory ets = new EmptyTupleSourceRuntimeFactory();
RecordDescriptor etsDesc = new RecordDescriptor(new ISerializerDeserializer[] {});
IUnnestingEvaluatorFactory aggregFactory = new IntArrayUnnester(new int[] { 100, 200, 300 });
UnnestRuntimeFactory unnest = new UnnestRuntimeFactory(0, aggregFactory, new int[] { 0 }, false, null);
RecordDescriptor unnestDesc = new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
RunningAggregateRuntimeFactory ragg = new RunningAggregateRuntimeFactory(new int[] { 1 }, new IRunningAggregateEvaluatorFactory[] { new TupleCountRunningAggregateFunctionFactory() }, new int[] { 0, 1 });
RecordDescriptor raggDesc = new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
String filePath = PATH_ACTUAL + SEPARATOR + "etsUnnestRunningaggregateWrite.out";
File outFile = new File(filePath);
SinkWriterRuntimeFactory writer = new SinkWriterRuntimeFactory(new int[] { 1 }, new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE }, outFile, PrinterBasedWriterFactory.INSTANCE, raggDesc);
AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 0, 0, new IPushRuntimeFactory[] { ets, unnest, ragg, writer }, new RecordDescriptor[] { etsDesc, unnestDesc, raggDesc, null });
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, algebricksOp, new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID });
spec.addRoot(algebricksOp);
AlgebricksHyracksIntegrationUtil.runJob(spec);
StringBuilder buf = new StringBuilder();
readFileToString(outFile, buf);
Assert.assertEquals("123", buf.toString());
outFile.delete();
}
Aggregations