Search in sources :

Example 1 with NestedTupleSourceRuntime

use of org.apache.hyracks.algebricks.runtime.operators.std.NestedTupleSourceRuntimeFactory.NestedTupleSourceRuntime in project asterixdb by apache.

the class NestedPlansAccumulatingAggregatorFactory method createAggregator.

@Override
public IAggregatorDescriptor createAggregator(IHyracksTaskContext ctx, RecordDescriptor inRecordDesc, RecordDescriptor outRecordDescriptor, int[] keys, int[] partialKeys) throws HyracksDataException {
    final AggregatorOutput outputWriter = new AggregatorOutput(subplans, keyFieldIdx.length, decorFieldIdx.length);
    final NestedTupleSourceRuntime[] pipelines = new NestedTupleSourceRuntime[subplans.length];
    for (int i = 0; i < subplans.length; i++) {
        pipelines[i] = (NestedTupleSourceRuntime) assemblePipeline(subplans[i], outputWriter, ctx);
    }
    return new IAggregatorDescriptor() {

        @Override
        public void init(ArrayTupleBuilder tupleBuilder, IFrameTupleAccessor accessor, int tIndex, AggregateState state) throws HyracksDataException {
            ArrayTupleBuilder tb = outputWriter.getTupleBuilder();
            tb.reset();
            for (int i = 0; i < keyFieldIdx.length; ++i) {
                tb.addField(accessor, tIndex, keyFieldIdx[i]);
            }
            for (int i = 0; i < decorFieldIdx.length; ++i) {
                tb.addField(accessor, tIndex, decorFieldIdx[i]);
            }
            for (int i = 0; i < pipelines.length; ++i) {
                pipelines[i].open();
            }
            // aggregate the first tuple
            for (int i = 0; i < pipelines.length; i++) {
                pipelines[i].writeTuple(accessor.getBuffer(), tIndex);
            }
        }

        @Override
        public void aggregate(IFrameTupleAccessor accessor, int tIndex, IFrameTupleAccessor stateAccessor, int stateTupleIndex, AggregateState state) throws HyracksDataException {
            for (int i = 0; i < pipelines.length; i++) {
                pipelines[i].writeTuple(accessor.getBuffer(), tIndex);
            }
        }

        @Override
        public boolean outputFinalResult(ArrayTupleBuilder tupleBuilder, IFrameTupleAccessor stateAccessor, int tIndex, AggregateState state) throws HyracksDataException {
            for (int i = 0; i < pipelines.length; i++) {
                outputWriter.setInputIdx(i);
                pipelines[i].close();
            }
            // outputWriter.writeTuple(appender);
            tupleBuilder.reset();
            ArrayTupleBuilder tb = outputWriter.getTupleBuilder();
            byte[] data = tb.getByteArray();
            int[] fieldEnds = tb.getFieldEndOffsets();
            int start = 0;
            int offset;
            for (int i = 0; i < fieldEnds.length; i++) {
                if (i > 0) {
                    start = fieldEnds[i - 1];
                }
                offset = fieldEnds[i] - start;
                tupleBuilder.addField(data, start, offset);
            }
            return true;
        }

        @Override
        public AggregateState createAggregateStates() {
            return new AggregateState();
        }

        @Override
        public void reset() {
        }

        @Override
        public boolean outputPartialResult(ArrayTupleBuilder tupleBuilder, IFrameTupleAccessor accessor, int tIndex, AggregateState state) throws HyracksDataException {
            throw new IllegalStateException("this method should not be called");
        }

        @Override
        public void close() {
        }
    };
}
Also used : AggregateState(org.apache.hyracks.dataflow.std.group.AggregateState) NestedTupleSourceRuntime(org.apache.hyracks.algebricks.runtime.operators.std.NestedTupleSourceRuntimeFactory.NestedTupleSourceRuntime) IFrameTupleAccessor(org.apache.hyracks.api.comm.IFrameTupleAccessor) IAggregatorDescriptor(org.apache.hyracks.dataflow.std.group.IAggregatorDescriptor) ArrayTupleBuilder(org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder)

Example 2 with NestedTupleSourceRuntime

use of org.apache.hyracks.algebricks.runtime.operators.std.NestedTupleSourceRuntimeFactory.NestedTupleSourceRuntime in project asterixdb by apache.

the class NestedPlansRunningAggregatorFactory method createAggregator.

/* (non-Javadoc)
     * @see org.apache.hyracks.dataflow.std.group.IAggregatorDescriptorFactory#createAggregator(org.apache.hyracks.api.context.IHyracksTaskContext, org.apache.hyracks.api.dataflow.value.RecordDescriptor, org.apache.hyracks.api.dataflow.value.RecordDescriptor, int[], int[])
     */
@Override
public IAggregatorDescriptor createAggregator(final IHyracksTaskContext ctx, RecordDescriptor inRecordDescriptor, RecordDescriptor outRecordDescriptor, int[] keyFields, int[] keyFieldsInPartialResults, final IFrameWriter writer) throws HyracksDataException {
    final RunningAggregatorOutput outputWriter = new RunningAggregatorOutput(ctx, subplans, keyFieldIdx.length, decorFieldIdx.length, writer);
    final NestedTupleSourceRuntime[] pipelines = new NestedTupleSourceRuntime[subplans.length];
    for (int i = 0; i < subplans.length; i++) {
        pipelines[i] = (NestedTupleSourceRuntime) assemblePipeline(subplans[i], outputWriter, ctx);
    }
    final ArrayTupleBuilder gbyTb = outputWriter.getGroupByTupleBuilder();
    return new IAggregatorDescriptor() {

        @Override
        public void init(ArrayTupleBuilder tupleBuilder, IFrameTupleAccessor accessor, int tIndex, AggregateState state) throws HyracksDataException {
            for (int i = 0; i < pipelines.length; ++i) {
                pipelines[i].open();
            }
            gbyTb.reset();
            for (int i = 0; i < keyFieldIdx.length; ++i) {
                gbyTb.addField(accessor, tIndex, keyFieldIdx[i]);
            }
            for (int i = 0; i < decorFieldIdx.length; ++i) {
                gbyTb.addField(accessor, tIndex, decorFieldIdx[i]);
            }
            // aggregate the first tuple
            for (int i = 0; i < pipelines.length; i++) {
                outputWriter.setInputIdx(i);
                pipelines[i].writeTuple(accessor.getBuffer(), tIndex);
            }
        }

        @Override
        public void aggregate(IFrameTupleAccessor accessor, int tIndex, IFrameTupleAccessor stateAccessor, int stateTupleIndex, AggregateState state) throws HyracksDataException {
            for (int i = 0; i < pipelines.length; i++) {
                outputWriter.setInputIdx(i);
                pipelines[i].writeTuple(accessor.getBuffer(), tIndex);
            }
        }

        @Override
        public boolean outputFinalResult(ArrayTupleBuilder tupleBuilder, IFrameTupleAccessor accessor, int tIndex, AggregateState state) throws HyracksDataException {
            for (int i = 0; i < pipelines.length; ++i) {
                outputWriter.setInputIdx(i);
                pipelines[i].close();
            }
            return false;
        }

        @Override
        public AggregateState createAggregateStates() {
            return new AggregateState();
        }

        @Override
        public void reset() {
        }

        @Override
        public boolean outputPartialResult(ArrayTupleBuilder tupleBuilder, IFrameTupleAccessor accessor, int tIndex, AggregateState state) throws HyracksDataException {
            throw new IllegalStateException("this method should not be called");
        }

        @Override
        public void close() {
        }
    };
}
Also used : AggregateState(org.apache.hyracks.dataflow.std.group.AggregateState) NestedTupleSourceRuntime(org.apache.hyracks.algebricks.runtime.operators.std.NestedTupleSourceRuntimeFactory.NestedTupleSourceRuntime) IFrameTupleAccessor(org.apache.hyracks.api.comm.IFrameTupleAccessor) ArrayTupleBuilder(org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder) IAggregatorDescriptor(org.apache.hyracks.dataflow.std.group.IAggregatorDescriptor)

Example 3 with NestedTupleSourceRuntime

use of org.apache.hyracks.algebricks.runtime.operators.std.NestedTupleSourceRuntimeFactory.NestedTupleSourceRuntime in project asterixdb by apache.

the class SubplanRuntimeFactory method createOneOutputPushRuntime.

@Override
public AbstractOneInputOneOutputPushRuntime createOneOutputPushRuntime(final IHyracksTaskContext ctx) throws HyracksDataException {
    RecordDescriptor pipelineOutputRecordDescriptor = null;
    final PipelineAssembler pa = new PipelineAssembler(pipeline, 1, 1, inputRecordDesc, pipelineOutputRecordDescriptor);
    final IMissingWriter[] nullWriters = new IMissingWriter[missingWriterFactories.length];
    for (int i = 0; i < missingWriterFactories.length; i++) {
        nullWriters[i] = missingWriterFactories[i].createMissingWriter();
    }
    return new AbstractOneInputOneOutputOneFramePushRuntime() {

        /**
             * Computes the outer product between a given tuple and the frames
             * passed.
             */
        class TupleOuterProduct implements IFrameWriter {

            private boolean smthWasWritten = false;

            private FrameTupleAccessor ta = new FrameTupleAccessor(pipeline.getRecordDescriptors()[pipeline.getRecordDescriptors().length - 1]);

            private ArrayTupleBuilder tb = new ArrayTupleBuilder(nullWriters.length);

            @Override
            public void open() throws HyracksDataException {
                smthWasWritten = false;
            }

            @Override
            public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
                ta.reset(buffer);
                int nTuple = ta.getTupleCount();
                for (int t = 0; t < nTuple; t++) {
                    appendConcat(tRef.getFrameTupleAccessor(), tRef.getTupleIndex(), ta, t);
                }
                smthWasWritten = true;
            }

            @Override
            public void close() throws HyracksDataException {
                if (!smthWasWritten) {
                    // the case when we need to write nulls
                    appendNullsToTuple();
                    appendToFrameFromTupleBuilder(tb);
                }
            }

            @Override
            public void fail() throws HyracksDataException {
                writer.fail();
            }

            private void appendNullsToTuple() throws HyracksDataException {
                tb.reset();
                int n0 = tRef.getFieldCount();
                for (int f = 0; f < n0; f++) {
                    tb.addField(tRef.getFrameTupleAccessor(), tRef.getTupleIndex(), f);
                }
                DataOutput dos = tb.getDataOutput();
                for (int i = 0; i < nullWriters.length; i++) {
                    nullWriters[i].writeMissing(dos);
                    tb.addFieldEndOffset();
                }
            }
        }

        IFrameWriter endPipe = new TupleOuterProduct();

        NestedTupleSourceRuntime startOfPipeline = (NestedTupleSourceRuntime) pa.assemblePipeline(endPipe, ctx);

        boolean first = true;

        @Override
        public void open() throws HyracksDataException {
            writer.open();
            if (first) {
                first = false;
                initAccessAppendRef(ctx);
            }
        }

        @Override
        public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
            tAccess.reset(buffer);
            int nTuple = tAccess.getTupleCount();
            for (int t = 0; t < nTuple; t++) {
                tRef.reset(tAccess, t);
                startOfPipeline.writeTuple(buffer, t);
                try {
                    startOfPipeline.open();
                } catch (Exception e) {
                    startOfPipeline.fail();
                    throw e;
                } finally {
                    startOfPipeline.close();
                }
            }
        }

        @Override
        public void flush() throws HyracksDataException {
            writer.flush();
        }
    };
}
Also used : DataOutput(java.io.DataOutput) IFrameWriter(org.apache.hyracks.api.comm.IFrameWriter) RecordDescriptor(org.apache.hyracks.api.dataflow.value.RecordDescriptor) AbstractOneInputOneOutputOneFramePushRuntime(org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime) NestedTupleSourceRuntime(org.apache.hyracks.algebricks.runtime.operators.std.NestedTupleSourceRuntimeFactory.NestedTupleSourceRuntime) ArrayTupleBuilder(org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder) ByteBuffer(java.nio.ByteBuffer) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) NotImplementedException(org.apache.hyracks.algebricks.common.exceptions.NotImplementedException) IMissingWriter(org.apache.hyracks.api.dataflow.value.IMissingWriter) FrameTupleAccessor(org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor)

Aggregations

NestedTupleSourceRuntime (org.apache.hyracks.algebricks.runtime.operators.std.NestedTupleSourceRuntimeFactory.NestedTupleSourceRuntime)3 ArrayTupleBuilder (org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder)3 IFrameTupleAccessor (org.apache.hyracks.api.comm.IFrameTupleAccessor)2 AggregateState (org.apache.hyracks.dataflow.std.group.AggregateState)2 IAggregatorDescriptor (org.apache.hyracks.dataflow.std.group.IAggregatorDescriptor)2 DataOutput (java.io.DataOutput)1 ByteBuffer (java.nio.ByteBuffer)1 NotImplementedException (org.apache.hyracks.algebricks.common.exceptions.NotImplementedException)1 AbstractOneInputOneOutputOneFramePushRuntime (org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime)1 IFrameWriter (org.apache.hyracks.api.comm.IFrameWriter)1 IMissingWriter (org.apache.hyracks.api.dataflow.value.IMissingWriter)1 RecordDescriptor (org.apache.hyracks.api.dataflow.value.RecordDescriptor)1 HyracksDataException (org.apache.hyracks.api.exceptions.HyracksDataException)1 FrameTupleAccessor (org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor)1