Search in sources :

Example 86 with ArrayTupleBuilder

use of org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder in project asterixdb by apache.

the class EmptyTupleSourceRuntimeFactory method createPushRuntime.

@Override
public IPushRuntime createPushRuntime(final IHyracksTaskContext ctx) throws HyracksDataException {
    return new AbstractOneInputSourcePushRuntime() {

        private final ArrayTupleBuilder tb = new ArrayTupleBuilder(0);

        private final FrameTupleAppender appender = new FrameTupleAppender(new VSizeFrame(ctx));

        @Override
        public void open() throws HyracksDataException {
            writer.open();
            if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
                throw new IllegalStateException();
            }
            appender.write(writer, true);
        }

        @Override
        public void close() throws HyracksDataException {
            writer.close();
        }

        @Override
        public void flush() throws HyracksDataException {
            appender.flush(writer);
        }
    };
}
Also used : AbstractOneInputSourcePushRuntime(org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputSourcePushRuntime) FrameTupleAppender(org.apache.hyracks.dataflow.common.comm.io.FrameTupleAppender) ArrayTupleBuilder(org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder) VSizeFrame(org.apache.hyracks.api.comm.VSizeFrame)

Example 87 with ArrayTupleBuilder

use of org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder in project asterixdb by apache.

the class RunningAggregateRuntimeFactory method createOneOutputPushRuntime.

@Override
public AbstractOneInputOneOutputOneFramePushRuntime createOneOutputPushRuntime(final IHyracksTaskContext ctx) throws HyracksDataException {
    final int[] projectionToOutColumns = new int[projectionList.length];
    for (int j = 0; j < projectionList.length; j++) {
        projectionToOutColumns[j] = Arrays.binarySearch(outColumns, projectionList[j]);
    }
    return new AbstractOneInputOneOutputOneFramePushRuntime() {

        private final IPointable p = VoidPointable.FACTORY.createPointable();

        private final IRunningAggregateEvaluator[] raggs = new IRunningAggregateEvaluator[runningAggregates.length];

        private final ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(projectionList.length);

        private boolean first = true;

        private boolean isOpen = false;

        @Override
        public void open() throws HyracksDataException {
            initAccessAppendRef(ctx);
            if (first) {
                first = false;
                int n = runningAggregates.length;
                for (int i = 0; i < n; i++) {
                    raggs[i] = runningAggregates[i].createRunningAggregateEvaluator(ctx);
                }
            }
            for (int i = 0; i < runningAggregates.length; i++) {
                raggs[i].init();
            }
            isOpen = true;
            writer.open();
        }

        @Override
        public void close() throws HyracksDataException {
            if (isOpen) {
                super.close();
            }
        }

        @Override
        public void fail() throws HyracksDataException {
            if (isOpen) {
                super.fail();
            }
        }

        @Override
        public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
            tAccess.reset(buffer);
            int nTuple = tAccess.getTupleCount();
            for (int t = 0; t < nTuple; t++) {
                tRef.reset(tAccess, t);
                produceTuple(tupleBuilder, tAccess, t, tRef);
                appendToFrameFromTupleBuilder(tupleBuilder);
            }
        }

        private void produceTuple(ArrayTupleBuilder tb, IFrameTupleAccessor accessor, int tIndex, FrameTupleReference tupleRef) throws HyracksDataException {
            tb.reset();
            for (int f = 0; f < projectionList.length; f++) {
                int k = projectionToOutColumns[f];
                if (k >= 0) {
                    raggs[k].step(tupleRef, p);
                    tb.addField(p.getByteArray(), p.getStartOffset(), p.getLength());
                } else {
                    tb.addField(accessor, tIndex, projectionList[f]);
                }
            }
        }

        @Override
        public void flush() throws HyracksDataException {
            appender.flush(writer);
        }
    };
}
Also used : AbstractOneInputOneOutputOneFramePushRuntime(org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime) IFrameTupleAccessor(org.apache.hyracks.api.comm.IFrameTupleAccessor) FrameTupleReference(org.apache.hyracks.dataflow.common.data.accessors.FrameTupleReference) ArrayTupleBuilder(org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder) IPointable(org.apache.hyracks.data.std.api.IPointable) ByteBuffer(java.nio.ByteBuffer)

Example 88 with ArrayTupleBuilder

use of org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder in project asterixdb by apache.

the class SubplanRuntimeFactory method createOneOutputPushRuntime.

@Override
public AbstractOneInputOneOutputPushRuntime createOneOutputPushRuntime(final IHyracksTaskContext ctx) throws HyracksDataException {
    RecordDescriptor pipelineOutputRecordDescriptor = null;
    final PipelineAssembler pa = new PipelineAssembler(pipeline, 1, 1, inputRecordDesc, pipelineOutputRecordDescriptor);
    final IMissingWriter[] nullWriters = new IMissingWriter[missingWriterFactories.length];
    for (int i = 0; i < missingWriterFactories.length; i++) {
        nullWriters[i] = missingWriterFactories[i].createMissingWriter();
    }
    return new AbstractOneInputOneOutputOneFramePushRuntime() {

        /**
             * Computes the outer product between a given tuple and the frames
             * passed.
             */
        class TupleOuterProduct implements IFrameWriter {

            private boolean smthWasWritten = false;

            private FrameTupleAccessor ta = new FrameTupleAccessor(pipeline.getRecordDescriptors()[pipeline.getRecordDescriptors().length - 1]);

            private ArrayTupleBuilder tb = new ArrayTupleBuilder(nullWriters.length);

            @Override
            public void open() throws HyracksDataException {
                smthWasWritten = false;
            }

            @Override
            public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
                ta.reset(buffer);
                int nTuple = ta.getTupleCount();
                for (int t = 0; t < nTuple; t++) {
                    appendConcat(tRef.getFrameTupleAccessor(), tRef.getTupleIndex(), ta, t);
                }
                smthWasWritten = true;
            }

            @Override
            public void close() throws HyracksDataException {
                if (!smthWasWritten) {
                    // the case when we need to write nulls
                    appendNullsToTuple();
                    appendToFrameFromTupleBuilder(tb);
                }
            }

            @Override
            public void fail() throws HyracksDataException {
                writer.fail();
            }

            private void appendNullsToTuple() throws HyracksDataException {
                tb.reset();
                int n0 = tRef.getFieldCount();
                for (int f = 0; f < n0; f++) {
                    tb.addField(tRef.getFrameTupleAccessor(), tRef.getTupleIndex(), f);
                }
                DataOutput dos = tb.getDataOutput();
                for (int i = 0; i < nullWriters.length; i++) {
                    nullWriters[i].writeMissing(dos);
                    tb.addFieldEndOffset();
                }
            }
        }

        IFrameWriter endPipe = new TupleOuterProduct();

        NestedTupleSourceRuntime startOfPipeline = (NestedTupleSourceRuntime) pa.assemblePipeline(endPipe, ctx);

        boolean first = true;

        @Override
        public void open() throws HyracksDataException {
            writer.open();
            if (first) {
                first = false;
                initAccessAppendRef(ctx);
            }
        }

        @Override
        public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
            tAccess.reset(buffer);
            int nTuple = tAccess.getTupleCount();
            for (int t = 0; t < nTuple; t++) {
                tRef.reset(tAccess, t);
                startOfPipeline.writeTuple(buffer, t);
                try {
                    startOfPipeline.open();
                } catch (Exception e) {
                    startOfPipeline.fail();
                    throw e;
                } finally {
                    startOfPipeline.close();
                }
            }
        }

        @Override
        public void flush() throws HyracksDataException {
            writer.flush();
        }
    };
}
Also used : DataOutput(java.io.DataOutput) IFrameWriter(org.apache.hyracks.api.comm.IFrameWriter) RecordDescriptor(org.apache.hyracks.api.dataflow.value.RecordDescriptor) AbstractOneInputOneOutputOneFramePushRuntime(org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime) NestedTupleSourceRuntime(org.apache.hyracks.algebricks.runtime.operators.std.NestedTupleSourceRuntimeFactory.NestedTupleSourceRuntime) ArrayTupleBuilder(org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder) ByteBuffer(java.nio.ByteBuffer) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) NotImplementedException(org.apache.hyracks.algebricks.common.exceptions.NotImplementedException) IMissingWriter(org.apache.hyracks.api.dataflow.value.IMissingWriter) FrameTupleAccessor(org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor)

Example 89 with ArrayTupleBuilder

use of org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder in project asterixdb by apache.

the class SimpleAlgebricksAccumulatingAggregatorFactory method createAggregator.

@Override
public IAggregatorDescriptor createAggregator(final IHyracksTaskContext ctx, RecordDescriptor inRecordDesc, RecordDescriptor outRecordDescriptor, int[] aggKeys, int[] partialKeys) throws HyracksDataException {
    return new IAggregatorDescriptor() {

        private FrameTupleReference ftr = new FrameTupleReference();

        private IPointable p = VoidPointable.FACTORY.createPointable();

        @Override
        public void init(ArrayTupleBuilder tupleBuilder, IFrameTupleAccessor accessor, int tIndex, AggregateState state) throws HyracksDataException {
            IAggregateEvaluator[] agg = (IAggregateEvaluator[]) state.state;
            // initialize aggregate functions
            for (int i = 0; i < agg.length; i++) {
                agg[i].init();
            }
            ftr.reset(accessor, tIndex);
            for (int i = 0; i < agg.length; i++) {
                agg[i].step(ftr);
            }
        }

        @Override
        public void aggregate(IFrameTupleAccessor accessor, int tIndex, IFrameTupleAccessor stateAccessor, int stateTupleIndex, AggregateState state) throws HyracksDataException {
            IAggregateEvaluator[] agg = (IAggregateEvaluator[]) state.state;
            ftr.reset(accessor, tIndex);
            for (int i = 0; i < agg.length; i++) {
                agg[i].step(ftr);
            }
        }

        @Override
        public boolean outputFinalResult(ArrayTupleBuilder tupleBuilder, IFrameTupleAccessor stateAccessor, int tIndex, AggregateState state) throws HyracksDataException {
            IAggregateEvaluator[] agg = (IAggregateEvaluator[]) state.state;
            for (int i = 0; i < agg.length; i++) {
                agg[i].finish(p);
                tupleBuilder.addField(p.getByteArray(), p.getStartOffset(), p.getLength());
            }
            return true;
        }

        @Override
        public AggregateState createAggregateStates() throws HyracksDataException {
            IAggregateEvaluator[] agg = new IAggregateEvaluator[aggFactories.length];
            for (int i = 0; i < agg.length; i++) {
                agg[i] = aggFactories[i].createAggregateEvaluator(ctx);
            }
            return new AggregateState(agg);
        }

        @Override
        public void reset() {
        }

        @Override
        public boolean outputPartialResult(ArrayTupleBuilder tupleBuilder, IFrameTupleAccessor stateAccessor, int tIndex, AggregateState state) throws HyracksDataException {
            IAggregateEvaluator[] agg = (IAggregateEvaluator[]) state.state;
            for (int i = 0; i < agg.length; i++) {
                agg[i].finishPartial(p);
                tupleBuilder.addField(p.getByteArray(), p.getStartOffset(), p.getLength());
            }
            return true;
        }

        @Override
        public void close() {
        }
    };
}
Also used : AggregateState(org.apache.hyracks.dataflow.std.group.AggregateState) IFrameTupleAccessor(org.apache.hyracks.api.comm.IFrameTupleAccessor) FrameTupleReference(org.apache.hyracks.dataflow.common.data.accessors.FrameTupleReference) IAggregatorDescriptor(org.apache.hyracks.dataflow.std.group.IAggregatorDescriptor) ArrayTupleBuilder(org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder) IAggregateEvaluator(org.apache.hyracks.algebricks.runtime.base.IAggregateEvaluator) IPointable(org.apache.hyracks.data.std.api.IPointable)

Example 90 with ArrayTupleBuilder

use of org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder in project asterixdb by apache.

the class UnnestRuntimeFactory method createOneOutputPushRuntime.

@Override
public AbstractOneInputOneOutputOneFramePushRuntime createOneOutputPushRuntime(final IHyracksTaskContext ctx) throws HyracksDataException {
    ByteArrayOutputStream bos = new ByteArrayOutputStream();
    DataOutput output = new DataOutputStream(bos);
    if (missingWriterFactory != null) {
        IMissingWriter missingWriter = missingWriterFactory.createMissingWriter();
        missingWriter.writeMissing(output);
    }
    byte[] missingBytes = bos.toByteArray();
    int missingBytesLen = bos.size();
    return new AbstractOneInputOneOutputOneFramePushRuntime() {

        private IPointable p = VoidPointable.FACTORY.createPointable();

        private ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(projectionList.length);

        private IUnnestingEvaluator unnest = unnestingFactory.createUnnestingEvaluator(ctx);

        @Override
        public void open() throws HyracksDataException {
            writer.open();
            if (tRef == null) {
                initAccessAppendRef(ctx);
            }
        }

        @Override
        public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
            tAccess.reset(buffer);
            int nTuple = tAccess.getTupleCount();
            for (int t = 0; t < nTuple; t++) {
                tRef.reset(tAccess, t);
                try {
                    unnest.init(tRef);
                    unnesting(t);
                } catch (IOException ae) {
                    throw new HyracksDataException(ae);
                }
            }
        }

        private void unnesting(int t) throws IOException {
            // Assumes that when unnesting the tuple, each step() call for each element
            // in the tuple will increase the positionIndex, and the positionIndex will
            // be reset when a new tuple is to be processed.
            int positionIndex = 1;
            boolean emitted = false;
            do {
                if (!unnest.step(p)) {
                    break;
                }
                writeOutput(t, positionIndex++, false);
                emitted = true;
            } while (true);
            if (leftOuter && !emitted) {
                writeOutput(t, -1, true);
            }
        }

        private void writeOutput(int t, int positionIndex, boolean missing) throws HyracksDataException, IOException {
            if (!unnestColIsProjected && positionWriter == null) {
                appendProjectionToFrame(t, projectionList);
                appendToFrameFromTupleBuilder(tupleBuilder);
                return;
            }
            tupleBuilder.reset();
            for (int f = 0; f < outColPos; f++) {
                tupleBuilder.addField(tAccess, t, f);
            }
            if (unnestColIsProjected) {
                if (missing) {
                    tupleBuilder.addField(missingBytes, 0, missingBytesLen);
                } else {
                    tupleBuilder.addField(p.getByteArray(), p.getStartOffset(), p.getLength());
                }
            }
            for (int f = unnestColIsProjected ? outColPos + 1 : outColPos; f < (positionWriter != null ? projectionList.length - 1 : projectionList.length); f++) {
                tupleBuilder.addField(tAccess, t, f);
            }
            if (positionWriter != null) {
                // Write the positional variable
                if (missing) {
                    tupleBuilder.addField(missingBytes, 0, missingBytesLen);
                } else {
                    positionWriter.write(tupleBuilder.getDataOutput(), positionIndex);
                    tupleBuilder.addFieldEndOffset();
                }
            }
            appendToFrameFromTupleBuilder(tupleBuilder);
        }

        @Override
        public void flush() throws HyracksDataException {
            appender.flush(writer);
        }
    };
}
Also used : DataOutput(java.io.DataOutput) DataOutputStream(java.io.DataOutputStream) AbstractOneInputOneOutputOneFramePushRuntime(org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime) ArrayTupleBuilder(org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder) ByteArrayOutputStream(java.io.ByteArrayOutputStream) IPointable(org.apache.hyracks.data.std.api.IPointable) IOException(java.io.IOException) ByteBuffer(java.nio.ByteBuffer) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) IMissingWriter(org.apache.hyracks.api.dataflow.value.IMissingWriter) IUnnestingEvaluator(org.apache.hyracks.algebricks.runtime.base.IUnnestingEvaluator)

Aggregations

ArrayTupleBuilder (org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder)99 ArrayTupleReference (org.apache.hyracks.dataflow.common.comm.io.ArrayTupleReference)45 ISerializerDeserializer (org.apache.hyracks.api.dataflow.value.ISerializerDeserializer)42 Test (org.junit.Test)40 HyracksDataException (org.apache.hyracks.api.exceptions.HyracksDataException)35 DataOutput (java.io.DataOutput)33 IBinaryComparatorFactory (org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory)25 UTF8StringSerializerDeserializer (org.apache.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer)24 ITypeTraits (org.apache.hyracks.api.dataflow.value.ITypeTraits)21 RecordDescriptor (org.apache.hyracks.api.dataflow.value.RecordDescriptor)21 ITreeIndex (org.apache.hyracks.storage.am.common.api.ITreeIndex)18 FrameTupleAppender (org.apache.hyracks.dataflow.common.comm.io.FrameTupleAppender)17 ConstantTupleSourceOperatorDescriptor (org.apache.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor)17 VSizeFrame (org.apache.hyracks.api.comm.VSizeFrame)16 JobSpecification (org.apache.hyracks.api.job.JobSpecification)16 OneToOneConnectorDescriptor (org.apache.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor)16 IIndexAccessor (org.apache.hyracks.storage.common.IIndexAccessor)16 IFileSplitProvider (org.apache.hyracks.dataflow.std.file.IFileSplitProvider)15 BTreeSearchOperatorDescriptor (org.apache.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor)14 IOperatorDescriptor (org.apache.hyracks.api.dataflow.IOperatorDescriptor)12