use of org.apache.hyracks.algebricks.runtime.operators.std.RunningAggregateRuntimeFactory in project asterixdb by apache.
the class RunningAggregatePOperator method contributeRuntimeOperator.
@Override
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException {
RunningAggregateOperator ragg = (RunningAggregateOperator) op;
List<LogicalVariable> variables = ragg.getVariables();
List<Mutable<ILogicalExpression>> expressions = ragg.getExpressions();
int[] outColumns = new int[variables.size()];
for (int i = 0; i < outColumns.length; i++) {
outColumns[i] = opSchema.findVariable(variables.get(i));
}
IRunningAggregateEvaluatorFactory[] runningAggFuns = new IRunningAggregateEvaluatorFactory[expressions.size()];
IExpressionRuntimeProvider expressionRuntimeProvider = context.getExpressionRuntimeProvider();
for (int i = 0; i < runningAggFuns.length; i++) {
StatefulFunctionCallExpression expr = (StatefulFunctionCallExpression) expressions.get(i).getValue();
runningAggFuns[i] = expressionRuntimeProvider.createRunningAggregateFunctionFactory(expr, context.getTypeEnvironment(op.getInputs().get(0).getValue()), inputSchemas, context);
}
// TODO push projections into the operator
int[] projectionList = JobGenHelper.projectAllVariables(opSchema);
RunningAggregateRuntimeFactory runtime = new RunningAggregateRuntimeFactory(outColumns, runningAggFuns, projectionList);
// contribute one Asterix framewriter
RecordDescriptor recDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context);
builder.contributeMicroOperator(ragg, runtime, recDesc);
// and contribute one edge from its child
ILogicalOperator src = ragg.getInputs().get(0).getValue();
builder.contributeGraphEdge(src, 0, ragg, 0);
}
use of org.apache.hyracks.algebricks.runtime.operators.std.RunningAggregateRuntimeFactory in project asterixdb by apache.
the class PushRuntimeTest method etsUnnestRunningaggregateWrite.
@Test
public void etsUnnestRunningaggregateWrite() throws Exception {
JobSpecification spec = new JobSpecification(FRAME_SIZE);
EmptyTupleSourceRuntimeFactory ets = new EmptyTupleSourceRuntimeFactory();
RecordDescriptor etsDesc = new RecordDescriptor(new ISerializerDeserializer[] {});
IUnnestingEvaluatorFactory aggregFactory = new IntArrayUnnester(new int[] { 100, 200, 300 });
UnnestRuntimeFactory unnest = new UnnestRuntimeFactory(0, aggregFactory, new int[] { 0 }, false, null);
RecordDescriptor unnestDesc = new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
RunningAggregateRuntimeFactory ragg = new RunningAggregateRuntimeFactory(new int[] { 1 }, new IRunningAggregateEvaluatorFactory[] { new TupleCountRunningAggregateFunctionFactory() }, new int[] { 0, 1 });
RecordDescriptor raggDesc = new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
String filePath = PATH_ACTUAL + SEPARATOR + "etsUnnestRunningaggregateWrite.out";
File outFile = new File(filePath);
SinkWriterRuntimeFactory writer = new SinkWriterRuntimeFactory(new int[] { 1 }, new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE }, outFile, PrinterBasedWriterFactory.INSTANCE, raggDesc);
AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 0, 0, new IPushRuntimeFactory[] { ets, unnest, ragg, writer }, new RecordDescriptor[] { etsDesc, unnestDesc, raggDesc, null });
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, algebricksOp, new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID });
spec.addRoot(algebricksOp);
AlgebricksHyracksIntegrationUtil.runJob(spec);
StringBuilder buf = new StringBuilder();
readFileToString(outFile, buf);
Assert.assertEquals("123", buf.toString());
outFile.delete();
}
Aggregations