Search in sources :

Example 21 with IFrameTupleAccessor

use of org.apache.hyracks.api.comm.IFrameTupleAccessor in project asterixdb by apache.

the class MultiFieldsAggregatorFactory method createAggregator.

/*
     * (non-Javadoc)
     *
     * @see
     * org.apache.hyracks.dataflow.std.aggregations.IAggregatorDescriptorFactory
     * #createAggregator(org.apache.hyracks.api.context.IHyracksTaskContext,
     * org.apache.hyracks.api.dataflow.value.RecordDescriptor,
     * org.apache.hyracks.api.dataflow.value.RecordDescriptor)
     */
@Override
public IAggregatorDescriptor createAggregator(IHyracksTaskContext ctx, RecordDescriptor inRecordDescriptor, RecordDescriptor outRecordDescriptor, final int[] keyFields, final int[] keyFieldsInPartialResults) throws HyracksDataException {
    final IFieldAggregateDescriptor[] aggregators = new IFieldAggregateDescriptor[aggregatorFactories.length];
    for (int i = 0; i < aggregators.length; i++) {
        aggregators[i] = aggregatorFactories[i].createAggregator(ctx, inRecordDescriptor, outRecordDescriptor);
    }
    if (this.keys == null) {
        this.keys = keyFields;
    }
    return new IAggregatorDescriptor() {

        @Override
        public void reset() {
            for (int i = 0; i < aggregators.length; i++) {
                aggregators[i].reset();
            }
        }

        @Override
        public boolean outputPartialResult(ArrayTupleBuilder tupleBuilder, IFrameTupleAccessor stateAccessor, int tIndex, AggregateState state) throws HyracksDataException {
            DataOutput dos = tupleBuilder.getDataOutput();
            int tupleOffset = stateAccessor.getTupleStartOffset(tIndex);
            for (int i = 0; i < aggregators.length; i++) {
                int fieldOffset = stateAccessor.getFieldStartOffset(tIndex, keys.length + i);
                aggregators[i].outputPartialResult(dos, stateAccessor.getBuffer().array(), fieldOffset + stateAccessor.getFieldSlotsLength() + tupleOffset, ((AggregateState[]) state.state)[i]);
                tupleBuilder.addFieldEndOffset();
            }
            return true;
        }

        @Override
        public boolean outputFinalResult(ArrayTupleBuilder tupleBuilder, IFrameTupleAccessor stateAccessor, int tIndex, AggregateState state) throws HyracksDataException {
            DataOutput dos = tupleBuilder.getDataOutput();
            int tupleOffset = stateAccessor.getTupleStartOffset(tIndex);
            for (int i = 0; i < aggregators.length; i++) {
                if (aggregators[i].needsBinaryState()) {
                    int fieldOffset = stateAccessor.getFieldStartOffset(tIndex, keys.length + i);
                    aggregators[i].outputFinalResult(dos, stateAccessor.getBuffer().array(), tupleOffset + stateAccessor.getFieldSlotsLength() + fieldOffset, ((AggregateState[]) state.state)[i]);
                } else {
                    aggregators[i].outputFinalResult(dos, null, 0, ((AggregateState[]) state.state)[i]);
                }
                tupleBuilder.addFieldEndOffset();
            }
            return true;
        }

        @Override
        public void init(ArrayTupleBuilder tupleBuilder, IFrameTupleAccessor accessor, int tIndex, AggregateState state) throws HyracksDataException {
            DataOutput dos = tupleBuilder.getDataOutput();
            for (int i = 0; i < aggregators.length; i++) {
                aggregators[i].init(accessor, tIndex, dos, ((AggregateState[]) state.state)[i]);
                if (aggregators[i].needsBinaryState()) {
                    tupleBuilder.addFieldEndOffset();
                }
            }
        }

        @Override
        public AggregateState createAggregateStates() {
            AggregateState[] states = new AggregateState[aggregators.length];
            for (int i = 0; i < states.length; i++) {
                states[i] = aggregators[i].createState();
            }
            return new AggregateState(states);
        }

        @Override
        public void close() {
            for (int i = 0; i < aggregators.length; i++) {
                aggregators[i].close();
            }
        }

        @Override
        public void aggregate(IFrameTupleAccessor accessor, int tIndex, IFrameTupleAccessor stateAccessor, int stateTupleIndex, AggregateState state) throws HyracksDataException {
            if (stateAccessor != null) {
                int stateTupleOffset = stateAccessor.getTupleStartOffset(stateTupleIndex);
                int fieldIndex = 0;
                for (int i = 0; i < aggregators.length; i++) {
                    if (aggregators[i].needsBinaryState()) {
                        int stateFieldOffset = stateAccessor.getFieldStartOffset(stateTupleIndex, keys.length + fieldIndex);
                        aggregators[i].aggregate(accessor, tIndex, stateAccessor.getBuffer().array(), stateTupleOffset + stateAccessor.getFieldSlotsLength() + stateFieldOffset, ((AggregateState[]) state.state)[i]);
                        fieldIndex++;
                    } else {
                        aggregators[i].aggregate(accessor, tIndex, null, 0, ((AggregateState[]) state.state)[i]);
                    }
                }
            } else {
                for (int i = 0; i < aggregators.length; i++) {
                    aggregators[i].aggregate(accessor, tIndex, null, 0, ((AggregateState[]) state.state)[i]);
                }
            }
        }
    };
}
Also used : DataOutput(java.io.DataOutput) AggregateState(org.apache.hyracks.dataflow.std.group.AggregateState) IFrameTupleAccessor(org.apache.hyracks.api.comm.IFrameTupleAccessor) IAggregatorDescriptor(org.apache.hyracks.dataflow.std.group.IAggregatorDescriptor) ArrayTupleBuilder(org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder) IFieldAggregateDescriptor(org.apache.hyracks.dataflow.std.group.IFieldAggregateDescriptor)

Example 22 with IFrameTupleAccessor

use of org.apache.hyracks.api.comm.IFrameTupleAccessor in project asterixdb by apache.

the class AvgFieldMergeAggregatorFactory method createAggregator.

/*
     * (non-Javadoc)
     *
     * @see org.apache.hyracks.dataflow.std.aggregations.
     * IFieldAggregateDescriptorFactory
     * #createAggregator(org.apache.hyracks.api.context.IHyracksTaskContext,
     * org.apache.hyracks.api.dataflow.value.RecordDescriptor,
     * org.apache.hyracks.api.dataflow.value.RecordDescriptor)
     */
@Override
public IFieldAggregateDescriptor createAggregator(IHyracksTaskContext ctx, RecordDescriptor inRecordDescriptor, RecordDescriptor outRecordDescriptor) throws HyracksDataException {
    return new IFieldAggregateDescriptor() {

        @Override
        public void reset() {
        }

        @Override
        public void outputPartialResult(DataOutput fieldOutput, byte[] data, int offset, AggregateState state) throws HyracksDataException {
            int sum, count;
            if (!useObjectState) {
                sum = IntegerPointable.getInteger(data, offset);
                count = IntegerPointable.getInteger(data, offset + 4);
            } else {
                Integer[] fields = (Integer[]) state.state;
                sum = fields[0];
                count = fields[1];
            }
            try {
                fieldOutput.writeInt(sum);
                fieldOutput.writeInt(count);
            } catch (IOException e) {
                throw new HyracksDataException("I/O exception when writing aggregation to the output buffer.");
            }
        }

        @Override
        public void outputFinalResult(DataOutput fieldOutput, byte[] data, int offset, AggregateState state) throws HyracksDataException {
            int sum, count;
            if (!useObjectState) {
                sum = IntegerPointable.getInteger(data, offset);
                count = IntegerPointable.getInteger(data, offset + 4);
            } else {
                Integer[] fields = (Integer[]) state.state;
                sum = fields[0];
                count = fields[1];
            }
            try {
                fieldOutput.writeFloat((float) sum / count);
            } catch (IOException e) {
                throw new HyracksDataException("I/O exception when writing aggregation to the output buffer.");
            }
        }

        @Override
        public void close() {
        // TODO Auto-generated method stub
        }

        @Override
        public void aggregate(IFrameTupleAccessor accessor, int tIndex, byte[] data, int offset, AggregateState state) throws HyracksDataException {
            int sum = 0, count = 0;
            int tupleOffset = accessor.getTupleStartOffset(tIndex);
            int fieldStart = accessor.getFieldStartOffset(tIndex, aggField);
            sum += IntegerPointable.getInteger(accessor.getBuffer().array(), tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
            count += 1;
            if (!useObjectState) {
                ByteBuffer buf = ByteBuffer.wrap(data);
                sum += buf.getInt(offset);
                count += buf.getInt(offset + 4);
                buf.putInt(offset, sum);
                buf.putInt(offset + 4, count);
            } else {
                Integer[] fields = (Integer[]) state.state;
                sum += fields[0];
                count += fields[1];
                state.state = new Integer[] { sum, count };
            }
        }

        @Override
        public boolean needsObjectState() {
            return useObjectState;
        }

        @Override
        public boolean needsBinaryState() {
            return !useObjectState;
        }

        @Override
        public AggregateState createState() {
            return new AggregateState(new Integer[] { 0, 0 });
        }

        @Override
        public void init(IFrameTupleAccessor accessor, int tIndex, DataOutput fieldOutput, AggregateState state) throws HyracksDataException {
            int sum = 0;
            int count = 0;
            int tupleOffset = accessor.getTupleStartOffset(tIndex);
            int fieldStart = accessor.getFieldStartOffset(tIndex, aggField);
            sum += IntegerPointable.getInteger(accessor.getBuffer().array(), tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
            count += IntegerPointable.getInteger(accessor.getBuffer().array(), tupleOffset + accessor.getFieldSlotsLength() + fieldStart + 4);
            if (!useObjectState) {
                try {
                    fieldOutput.writeInt(sum);
                    fieldOutput.writeInt(count);
                } catch (IOException e) {
                    throw new HyracksDataException("I/O exception when initializing the aggregator.");
                }
            } else {
                state.state = new Integer[] { sum, count };
            }
        }
    };
}
Also used : DataOutput(java.io.DataOutput) AggregateState(org.apache.hyracks.dataflow.std.group.AggregateState) IFrameTupleAccessor(org.apache.hyracks.api.comm.IFrameTupleAccessor) IOException(java.io.IOException) IFieldAggregateDescriptor(org.apache.hyracks.dataflow.std.group.IFieldAggregateDescriptor) ByteBuffer(java.nio.ByteBuffer) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException)

Example 23 with IFrameTupleAccessor

use of org.apache.hyracks.api.comm.IFrameTupleAccessor in project asterixdb by apache.

the class ResultSerializerFactoryProvider method getResultSerializerFactoryProvider.

public IResultSerializerFactory getResultSerializerFactoryProvider() {
    return new IResultSerializerFactory() {

        private static final long serialVersionUID = 1L;

        @Override
        public IResultSerializer createResultSerializer(final RecordDescriptor recordDesc, final PrintStream printStream) {
            return new IResultSerializer() {

                private static final long serialVersionUID = 1L;

                ByteBufferInputStream bbis = new ByteBufferInputStream();

                DataInputStream di = new DataInputStream(bbis);

                @Override
                public void init() throws HyracksDataException {
                }

                @Override
                public boolean appendTuple(IFrameTupleAccessor tAccess, int tIdx) throws HyracksDataException {
                    int start = tAccess.getTupleStartOffset(tIdx) + tAccess.getFieldSlotsLength();
                    bbis.setByteBuffer(tAccess.getBuffer(), start);
                    Object[] record = new Object[recordDesc.getFieldCount()];
                    for (int i = 0; i < record.length; ++i) {
                        Object instance = recordDesc.getFields()[i].deserialize(di);
                        if (i == 0) {
                            printStream.print(String.valueOf(instance));
                        } else {
                            printStream.print(", " + String.valueOf(instance));
                        }
                    }
                    printStream.println();
                    return true;
                }
            };
        }
    };
}
Also used : PrintStream(java.io.PrintStream) IResultSerializer(org.apache.hyracks.api.dataflow.value.IResultSerializer) RecordDescriptor(org.apache.hyracks.api.dataflow.value.RecordDescriptor) IResultSerializerFactory(org.apache.hyracks.api.dataflow.value.IResultSerializerFactory) ByteBufferInputStream(org.apache.hyracks.dataflow.common.comm.util.ByteBufferInputStream) IFrameTupleAccessor(org.apache.hyracks.api.comm.IFrameTupleAccessor) DataInputStream(java.io.DataInputStream)

Example 24 with IFrameTupleAccessor

use of org.apache.hyracks.api.comm.IFrameTupleAccessor in project asterixdb by apache.

the class PredicateEvaluatorFactoryProvider method getPredicateEvaluatorFactory.

@Override
public IPredicateEvaluatorFactory getPredicateEvaluatorFactory(final int[] keys0, final int[] keys1) {
    return new IPredicateEvaluatorFactory() {

        private static final long serialVersionUID = 1L;

        @Override
        public IPredicateEvaluator createPredicateEvaluator() {
            return new IPredicateEvaluator() {

                @Override
                public boolean evaluate(IFrameTupleAccessor fta0, int tupId0, IFrameTupleAccessor fta1, int tupId1) {
                    int tStart0 = fta0.getTupleStartOffset(tupId0);
                    int fStartOffset0 = fta0.getFieldSlotsLength() + tStart0;
                    for (int k0 : keys0) {
                        int fieldStartIx = fta0.getFieldStartOffset(tupId0, k0);
                        ATypeTag typeTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(fta0.getBuffer().array()[fieldStartIx + fStartOffset0]);
                        if (typeTag == ATypeTag.MISSING || typeTag == ATypeTag.NULL) {
                            return false;
                        }
                    }
                    int tStart1 = fta1.getTupleStartOffset(tupId1);
                    int fStartOffset1 = fta1.getFieldSlotsLength() + tStart1;
                    for (int k1 : keys1) {
                        int fieldStartIx = fta1.getFieldStartOffset(tupId1, k1);
                        ATypeTag typeTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(fta1.getBuffer().array()[fieldStartIx + fStartOffset1]);
                        if (typeTag == ATypeTag.MISSING || typeTag == ATypeTag.NULL) {
                            return false;
                        }
                    }
                    //none of the fields (from both sides) is NULL
                    return true;
                }
            };
        }
    };
}
Also used : IPredicateEvaluator(org.apache.hyracks.api.dataflow.value.IPredicateEvaluator) IPredicateEvaluatorFactory(org.apache.hyracks.api.dataflow.value.IPredicateEvaluatorFactory) ATypeTag(org.apache.asterix.om.types.ATypeTag) IFrameTupleAccessor(org.apache.hyracks.api.comm.IFrameTupleAccessor)

Example 25 with IFrameTupleAccessor

use of org.apache.hyracks.api.comm.IFrameTupleAccessor in project asterixdb by apache.

the class AbstractMultiNCIntegrationTest method runTest.

protected void runTest(JobSpecification spec) throws Exception {
    if (LOGGER.isLoggable(Level.INFO)) {
        LOGGER.info(spec.toJSON().asText());
    }
    JobId jobId = hcc.startJob(spec, EnumSet.of(JobFlag.PROFILE_RUNTIME));
    if (LOGGER.isLoggable(Level.INFO)) {
        LOGGER.info(jobId.toString());
    }
    int nReaders = 1;
    FrameManager resultDisplayFrameMgr = new FrameManager(spec.getFrameSize());
    VSizeFrame resultFrame = new VSizeFrame(resultDisplayFrameMgr);
    IFrameTupleAccessor frameTupleAccessor = new ResultFrameTupleAccessor();
    if (!spec.getResultSetIds().isEmpty()) {
        IHyracksDataset hyracksDataset = new HyracksDataset(hcc, spec.getFrameSize(), nReaders);
        IHyracksDatasetReader reader = hyracksDataset.createReader(jobId, spec.getResultSetIds().get(0));
        ObjectMapper om = new ObjectMapper();
        ArrayNode resultRecords = om.createArrayNode();
        ByteBufferInputStream bbis = new ByteBufferInputStream();
        int readSize = reader.read(resultFrame);
        while (readSize > 0) {
            try {
                frameTupleAccessor.reset(resultFrame.getBuffer());
                for (int tIndex = 0; tIndex < frameTupleAccessor.getTupleCount(); tIndex++) {
                    int start = frameTupleAccessor.getTupleStartOffset(tIndex);
                    int length = frameTupleAccessor.getTupleEndOffset(tIndex) - start;
                    bbis.setByteBuffer(resultFrame.getBuffer(), start);
                    byte[] recordBytes = new byte[length];
                    bbis.read(recordBytes, 0, length);
                    resultRecords.add(new String(recordBytes, 0, length));
                }
            } finally {
                try {
                    bbis.close();
                } catch (IOException e) {
                    throw new HyracksDataException(e);
                }
            }
            readSize = reader.read(resultFrame);
        }
    }
    hcc.waitForCompletion(jobId);
    dumpOutputFiles();
}
Also used : ByteBufferInputStream(org.apache.hyracks.dataflow.common.comm.util.ByteBufferInputStream) IOException(java.io.IOException) VSizeFrame(org.apache.hyracks.api.comm.VSizeFrame) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) HyracksDataset(org.apache.hyracks.client.dataset.HyracksDataset) IHyracksDataset(org.apache.hyracks.api.dataset.IHyracksDataset) IFrameTupleAccessor(org.apache.hyracks.api.comm.IFrameTupleAccessor) FrameManager(org.apache.hyracks.control.nc.resources.memory.FrameManager) ResultFrameTupleAccessor(org.apache.hyracks.dataflow.common.comm.io.ResultFrameTupleAccessor) IHyracksDatasetReader(org.apache.hyracks.api.dataset.IHyracksDatasetReader) ArrayNode(com.fasterxml.jackson.databind.node.ArrayNode) IHyracksDataset(org.apache.hyracks.api.dataset.IHyracksDataset) JobId(org.apache.hyracks.api.job.JobId) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper)

Aggregations

IFrameTupleAccessor (org.apache.hyracks.api.comm.IFrameTupleAccessor)31 HyracksDataException (org.apache.hyracks.api.exceptions.HyracksDataException)12 AggregateState (org.apache.hyracks.dataflow.std.group.AggregateState)11 DataOutput (java.io.DataOutput)10 ArrayTupleBuilder (org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder)10 IOException (java.io.IOException)8 ByteBuffer (java.nio.ByteBuffer)8 VSizeFrame (org.apache.hyracks.api.comm.VSizeFrame)7 IFieldAggregateDescriptor (org.apache.hyracks.dataflow.std.group.IFieldAggregateDescriptor)7 FrameTupleReference (org.apache.hyracks.dataflow.common.data.accessors.FrameTupleReference)6 IAggregatorDescriptor (org.apache.hyracks.dataflow.std.group.IAggregatorDescriptor)5 FrameTupleAccessor (org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor)4 FrameTupleAppender (org.apache.hyracks.dataflow.common.comm.io.FrameTupleAppender)4 IFrame (org.apache.hyracks.api.comm.IFrame)3 ITuplePartitionComputer (org.apache.hyracks.api.dataflow.value.ITuplePartitionComputer)3 RecordDescriptor (org.apache.hyracks.api.dataflow.value.RecordDescriptor)3 FrameManager (org.apache.hyracks.control.nc.resources.memory.FrameManager)3 IPointable (org.apache.hyracks.data.std.api.IPointable)3 ByteBufferInputStream (org.apache.hyracks.dataflow.common.comm.util.ByteBufferInputStream)3 Test (org.junit.Test)3