Search in sources :

Example 16 with IScalarEvaluatorFactory

use of org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory in project asterixdb by apache.

the class DateFromUnixTimeInDaysDescriptor method createEvaluatorFactory.

@Override
public IScalarEvaluatorFactory createEvaluatorFactory(final IScalarEvaluatorFactory[] args) {
    return new IScalarEvaluatorFactory() {

        private static final long serialVersionUID = 1L;

        @Override
        public IScalarEvaluator createScalarEvaluator(final IHyracksTaskContext ctx) throws HyracksDataException {
            return new IScalarEvaluator() {

                private ArrayBackedValueStorage resultStorage = new ArrayBackedValueStorage();

                private DataOutput out = resultStorage.getDataOutput();

                private IPointable argPtr = new VoidPointable();

                private IScalarEvaluator eval = args[0].createScalarEvaluator(ctx);

                private AMutableDate aDate = new AMutableDate(0);

                // possible returning types
                @SuppressWarnings("unchecked")
                private ISerializerDeserializer<ADate> dateSerde = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADATE);

                @Override
                public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
                    resultStorage.reset();
                    eval.evaluate(tuple, argPtr);
                    aDate.setValue(ATypeHierarchy.getIntegerValue(getIdentifier().getName(), 0, argPtr.getByteArray(), argPtr.getStartOffset()));
                    dateSerde.serialize(aDate, out);
                    result.set(resultStorage);
                }
            };
        }
    };
}
Also used : DataOutput(java.io.DataOutput) ArrayBackedValueStorage(org.apache.hyracks.data.std.util.ArrayBackedValueStorage) AMutableDate(org.apache.asterix.om.base.AMutableDate) IHyracksTaskContext(org.apache.hyracks.api.context.IHyracksTaskContext) VoidPointable(org.apache.hyracks.data.std.primitive.VoidPointable) IFrameTupleReference(org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference) IPointable(org.apache.hyracks.data.std.api.IPointable) IScalarEvaluator(org.apache.hyracks.algebricks.runtime.base.IScalarEvaluator) ISerializerDeserializer(org.apache.hyracks.api.dataflow.value.ISerializerDeserializer) IScalarEvaluatorFactory(org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory)

Example 17 with IScalarEvaluatorFactory

use of org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory in project asterixdb by apache.

the class PushRuntimeTest method scanSelectWrite.

@Test
public void scanSelectWrite() throws Exception {
    JobSpecification spec = new JobSpecification(FRAME_SIZE);
    // the scanner
    FileSplit[] intFileSplits = new FileSplit[1];
    intFileSplits[0] = new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID, "data" + File.separator + "simple" + File.separator + "int-part1.tbl");
    IFileSplitProvider intSplitProvider = new ConstantFileSplitProvider(intFileSplits);
    RecordDescriptor intScannerDesc = new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
    IValueParserFactory[] valueParsers = new IValueParserFactory[] { IntegerParserFactory.INSTANCE };
    FileScanOperatorDescriptor intScanner = new FileScanOperatorDescriptor(spec, intSplitProvider, new DelimitedDataTupleParserFactory(valueParsers, '|'), intScannerDesc);
    PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, intScanner, DEFAULT_NODES);
    // the algebricks op.
    IScalarEvaluatorFactory cond = new IntegerGreaterThanEvalFactory(new IntegerConstantEvalFactory(2), new TupleFieldEvaluatorFactory(0));
    StreamSelectRuntimeFactory select = new StreamSelectRuntimeFactory(cond, new int[] { 0 }, BinaryBooleanInspectorImpl.FACTORY, false, -1, null);
    RecordDescriptor selectDesc = intScannerDesc;
    String filePath = PATH_ACTUAL + SEPARATOR + "scanSelectWrite.out";
    File outFile = new File(filePath);
    SinkWriterRuntimeFactory writer = new SinkWriterRuntimeFactory(new int[] { 0 }, new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE }, outFile, PrinterBasedWriterFactory.INSTANCE, selectDesc);
    AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 0, new IPushRuntimeFactory[] { select, writer }, new RecordDescriptor[] { selectDesc, null });
    PartitionConstraintHelper.addPartitionCountConstraint(spec, algebricksOp, 1);
    spec.connect(new OneToOneConnectorDescriptor(spec), intScanner, 0, algebricksOp, 0);
    spec.addRoot(algebricksOp);
    AlgebricksHyracksIntegrationUtil.runJob(spec);
    StringBuilder buf = new StringBuilder();
    readFileToString(outFile, buf);
    Assert.assertEquals("0", buf.toString());
    outFile.delete();
}
Also used : TupleFieldEvaluatorFactory(org.apache.hyracks.algebricks.runtime.evaluators.TupleFieldEvaluatorFactory) IValueParserFactory(org.apache.hyracks.dataflow.common.data.parsers.IValueParserFactory) IFileSplitProvider(org.apache.hyracks.dataflow.std.file.IFileSplitProvider) RecordDescriptor(org.apache.hyracks.api.dataflow.value.RecordDescriptor) ConstantFileSplitProvider(org.apache.hyracks.dataflow.std.file.ConstantFileSplitProvider) AlgebricksMetaOperatorDescriptor(org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor) DelimitedDataTupleParserFactory(org.apache.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory) OneToOneConnectorDescriptor(org.apache.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor) FileSplit(org.apache.hyracks.api.io.FileSplit) ManagedFileSplit(org.apache.hyracks.api.io.ManagedFileSplit) IScalarEvaluatorFactory(org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory) StreamSelectRuntimeFactory(org.apache.hyracks.algebricks.runtime.operators.std.StreamSelectRuntimeFactory) ManagedFileSplit(org.apache.hyracks.api.io.ManagedFileSplit) SinkWriterRuntimeFactory(org.apache.hyracks.algebricks.runtime.operators.std.SinkWriterRuntimeFactory) FileScanOperatorDescriptor(org.apache.hyracks.dataflow.std.file.FileScanOperatorDescriptor) JobSpecification(org.apache.hyracks.api.job.JobSpecification) File(java.io.File) Test(org.junit.Test)

Example 18 with IScalarEvaluatorFactory

use of org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory in project asterixdb by apache.

the class PushRuntimeTest method scanSortGbySelectWrite.

@Test
public void scanSortGbySelectWrite() throws Exception {
    JobSpecification spec = new JobSpecification(FRAME_SIZE);
    // the scanner
    FileSplit[] fileSplits = new FileSplit[1];
    fileSplits[0] = new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID, "data" + File.separator + "tpch0.001" + File.separator + "customer.tbl");
    IFileSplitProvider splitProvider = new ConstantFileSplitProvider(fileSplits);
    RecordDescriptor scannerDesc = new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE, new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(), IntegerSerializerDeserializer.INSTANCE, new UTF8StringSerializerDeserializer(), FloatSerializerDeserializer.INSTANCE, new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
    IValueParserFactory[] valueParsers = new IValueParserFactory[] { IntegerParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, IntegerParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, FloatParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE };
    FileScanOperatorDescriptor scanner = new FileScanOperatorDescriptor(spec, splitProvider, new DelimitedDataTupleParserFactory(valueParsers, '|'), scannerDesc);
    PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, scanner, new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID });
    // the sort (by nation id)
    RecordDescriptor sortDesc = scannerDesc;
    InMemorySortOperatorDescriptor sort = new InMemorySortOperatorDescriptor(spec, new int[] { 3 }, new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) }, sortDesc);
    PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sort, new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID });
    // the group-by
    NestedTupleSourceRuntimeFactory nts = new NestedTupleSourceRuntimeFactory();
    RecordDescriptor ntsDesc = sortDesc;
    AggregateRuntimeFactory agg = new AggregateRuntimeFactory(new IAggregateEvaluatorFactory[] { new TupleCountAggregateFunctionFactory() });
    RecordDescriptor aggDesc = new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
    AlgebricksPipeline pipeline = new AlgebricksPipeline(new IPushRuntimeFactory[] { nts, agg }, new RecordDescriptor[] { ntsDesc, aggDesc });
    NestedPlansAccumulatingAggregatorFactory npaaf = new NestedPlansAccumulatingAggregatorFactory(new AlgebricksPipeline[] { pipeline }, new int[] { 3 }, new int[] {});
    RecordDescriptor gbyDesc = new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
    PreclusteredGroupOperatorDescriptor gby = new PreclusteredGroupOperatorDescriptor(spec, new int[] { 3 }, new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) }, npaaf, gbyDesc);
    PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, gby, new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID });
    // the algebricks op.
    IScalarEvaluatorFactory cond = new IntegerEqualsEvalFactory(new IntegerConstantEvalFactory(3), // Canadian customers
    new TupleFieldEvaluatorFactory(0));
    StreamSelectRuntimeFactory select = new StreamSelectRuntimeFactory(cond, new int[] { 1 }, BinaryBooleanInspectorImpl.FACTORY, false, -1, null);
    RecordDescriptor selectDesc = new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
    String filePath = PATH_ACTUAL + SEPARATOR + "scanSortGbySelectWrite.out";
    File outFile = new File(filePath);
    SinkWriterRuntimeFactory writer = new SinkWriterRuntimeFactory(new int[] { 0 }, new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE }, outFile, PrinterBasedWriterFactory.INSTANCE, selectDesc);
    AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 0, new IPushRuntimeFactory[] { select, writer }, new RecordDescriptor[] { selectDesc, null });
    PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, algebricksOp, new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID });
    spec.connect(new OneToOneConnectorDescriptor(spec), scanner, 0, sort, 0);
    spec.connect(new OneToOneConnectorDescriptor(spec), sort, 0, gby, 0);
    spec.connect(new OneToOneConnectorDescriptor(spec), gby, 0, algebricksOp, 0);
    spec.addRoot(algebricksOp);
    AlgebricksHyracksIntegrationUtil.runJob(spec);
    StringBuilder buf = new StringBuilder();
    readFileToString(outFile, buf);
    Assert.assertEquals("9", buf.toString());
    outFile.delete();
}
Also used : AggregateRuntimeFactory(org.apache.hyracks.algebricks.runtime.operators.aggreg.AggregateRuntimeFactory) RunningAggregateRuntimeFactory(org.apache.hyracks.algebricks.runtime.operators.std.RunningAggregateRuntimeFactory) IFileSplitProvider(org.apache.hyracks.dataflow.std.file.IFileSplitProvider) RecordDescriptor(org.apache.hyracks.api.dataflow.value.RecordDescriptor) OneToOneConnectorDescriptor(org.apache.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor) FileSplit(org.apache.hyracks.api.io.FileSplit) ManagedFileSplit(org.apache.hyracks.api.io.ManagedFileSplit) UTF8StringSerializerDeserializer(org.apache.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer) TupleCountAggregateFunctionFactory(org.apache.hyracks.algebricks.runtime.aggregators.TupleCountAggregateFunctionFactory) NestedTupleSourceRuntimeFactory(org.apache.hyracks.algebricks.runtime.operators.std.NestedTupleSourceRuntimeFactory) ManagedFileSplit(org.apache.hyracks.api.io.ManagedFileSplit) FileScanOperatorDescriptor(org.apache.hyracks.dataflow.std.file.FileScanOperatorDescriptor) JobSpecification(org.apache.hyracks.api.job.JobSpecification) AlgebricksPipeline(org.apache.hyracks.algebricks.runtime.base.AlgebricksPipeline) TupleFieldEvaluatorFactory(org.apache.hyracks.algebricks.runtime.evaluators.TupleFieldEvaluatorFactory) IValueParserFactory(org.apache.hyracks.dataflow.common.data.parsers.IValueParserFactory) InMemorySortOperatorDescriptor(org.apache.hyracks.dataflow.std.sort.InMemorySortOperatorDescriptor) ConstantFileSplitProvider(org.apache.hyracks.dataflow.std.file.ConstantFileSplitProvider) AlgebricksMetaOperatorDescriptor(org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor) DelimitedDataTupleParserFactory(org.apache.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory) NestedPlansAccumulatingAggregatorFactory(org.apache.hyracks.algebricks.runtime.operators.aggreg.NestedPlansAccumulatingAggregatorFactory) IScalarEvaluatorFactory(org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory) StreamSelectRuntimeFactory(org.apache.hyracks.algebricks.runtime.operators.std.StreamSelectRuntimeFactory) SinkWriterRuntimeFactory(org.apache.hyracks.algebricks.runtime.operators.std.SinkWriterRuntimeFactory) PreclusteredGroupOperatorDescriptor(org.apache.hyracks.dataflow.std.group.preclustered.PreclusteredGroupOperatorDescriptor) File(java.io.File) Test(org.junit.Test)

Example 19 with IScalarEvaluatorFactory

use of org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory in project asterixdb by apache.

the class MillisecondsFromDayTimeDurationDescriptor method createEvaluatorFactory.

@Override
public IScalarEvaluatorFactory createEvaluatorFactory(final IScalarEvaluatorFactory[] args) {
    return new IScalarEvaluatorFactory() {

        private static final long serialVersionUID = 1L;

        @Override
        public IScalarEvaluator createScalarEvaluator(final IHyracksTaskContext ctx) throws HyracksDataException {
            return new IScalarEvaluator() {

                private ArrayBackedValueStorage resultStorage = new ArrayBackedValueStorage();

                private DataOutput out = resultStorage.getDataOutput();

                private IPointable argPtr0 = new VoidPointable();

                private IScalarEvaluator eval0 = args[0].createScalarEvaluator(ctx);

                @SuppressWarnings("unchecked")
                private ISerializerDeserializer<AInt64> int64Serde = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);

                AMutableInt64 aInt64 = new AMutableInt64(0);

                @Override
                public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
                    resultStorage.reset();
                    eval0.evaluate(tuple, argPtr0);
                    byte[] bytes = argPtr0.getByteArray();
                    int offset = argPtr0.getStartOffset();
                    if (bytes[offset] != ATypeTag.SERIALIZED_DAY_TIME_DURATION_TYPE_TAG) {
                        throw new TypeMismatchException(getIdentifier(), 0, bytes[offset], ATypeTag.SERIALIZED_DAY_TIME_DURATION_TYPE_TAG);
                    }
                    aInt64.setValue(ADayTimeDurationSerializerDeserializer.getDayTime(bytes, offset + 1));
                    int64Serde.serialize(aInt64, out);
                    result.set(resultStorage);
                }
            };
        }
    };
}
Also used : DataOutput(java.io.DataOutput) TypeMismatchException(org.apache.asterix.runtime.exceptions.TypeMismatchException) IPointable(org.apache.hyracks.data.std.api.IPointable) IScalarEvaluator(org.apache.hyracks.algebricks.runtime.base.IScalarEvaluator) ISerializerDeserializer(org.apache.hyracks.api.dataflow.value.ISerializerDeserializer) IScalarEvaluatorFactory(org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory) ArrayBackedValueStorage(org.apache.hyracks.data.std.util.ArrayBackedValueStorage) IHyracksTaskContext(org.apache.hyracks.api.context.IHyracksTaskContext) VoidPointable(org.apache.hyracks.data.std.primitive.VoidPointable) IFrameTupleReference(org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference) AMutableInt64(org.apache.asterix.om.base.AMutableInt64)

Example 20 with IScalarEvaluatorFactory

use of org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory in project asterixdb by apache.

the class OverlapBinsDescriptor method createEvaluatorFactory.

@Override
public IScalarEvaluatorFactory createEvaluatorFactory(final IScalarEvaluatorFactory[] args) {
    return new IScalarEvaluatorFactory() {

        private static final long serialVersionUID = 1L;

        @Override
        public IScalarEvaluator createScalarEvaluator(final IHyracksTaskContext ctx) throws HyracksDataException {
            return new IScalarEvaluator() {

                private final ArrayBackedValueStorage resultStorage = new ArrayBackedValueStorage();

                private final DataOutput out = resultStorage.getDataOutput();

                private final IPointable argPtr0 = new VoidPointable();

                private final IPointable argPtr1 = new VoidPointable();

                private final IPointable argPtr2 = new VoidPointable();

                private final IScalarEvaluator eval0 = args[0].createScalarEvaluator(ctx);

                private final IScalarEvaluator eval1 = args[1].createScalarEvaluator(ctx);

                private final IScalarEvaluator eval2 = args[2].createScalarEvaluator(ctx);

                // for output
                private OrderedListBuilder listBuilder = new OrderedListBuilder();

                private ArrayBackedValueStorage listStorage = new ArrayBackedValueStorage();

                protected final AOrderedListType intListType = new AOrderedListType(BuiltinType.AINTERVAL, null);

                private final AMutableInterval aInterval = new AMutableInterval(0, 0, (byte) -1);

                @SuppressWarnings("unchecked")
                private final ISerializerDeserializer<AInterval> intervalSerde = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINTERVAL);

                private final GregorianCalendarSystem gregCalSys = GregorianCalendarSystem.getInstance();

                @Override
                public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
                    resultStorage.reset();
                    eval0.evaluate(tuple, argPtr0);
                    eval1.evaluate(tuple, argPtr1);
                    eval2.evaluate(tuple, argPtr2);
                    byte[] bytes0 = argPtr0.getByteArray();
                    int offset0 = argPtr0.getStartOffset();
                    ATypeTag type0 = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(bytes0[offset0]);
                    long intervalStart;
                    long intervalEnd;
                    byte intervalTypeTag;
                    if (type0 == ATypeTag.INTERVAL) {
                        intervalStart = AIntervalSerializerDeserializer.getIntervalStart(bytes0, offset0 + 1);
                        intervalEnd = AIntervalSerializerDeserializer.getIntervalEnd(bytes0, offset0 + 1);
                        intervalTypeTag = AIntervalSerializerDeserializer.getIntervalTimeType(bytes0, offset0 + 1);
                        if (intervalTypeTag == ATypeTag.SERIALIZED_DATE_TYPE_TAG) {
                            intervalStart = intervalStart * GregorianCalendarSystem.CHRONON_OF_DAY;
                        }
                    } else {
                        throw new TypeMismatchException(getIdentifier(), 0, bytes0[offset0], ATypeTag.SERIALIZED_INTERVAL_TYPE_TAG);
                    }
                    // get the anchor instance time
                    byte[] bytes1 = argPtr1.getByteArray();
                    int offset1 = argPtr1.getStartOffset();
                    ATypeTag type1 = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(bytes1[offset1]);
                    if (intervalTypeTag != bytes1[offset1]) {
                        throw new IncompatibleTypeException(getIdentifier(), intervalTypeTag, bytes1[offset1]);
                    }
                    long anchorTime;
                    switch(type1) {
                        case DATE:
                            anchorTime = ADateSerializerDeserializer.getChronon(bytes1, offset1 + 1) * GregorianCalendarSystem.CHRONON_OF_DAY;
                            break;
                        case TIME:
                            anchorTime = ATimeSerializerDeserializer.getChronon(bytes1, offset1 + 1);
                            break;
                        case DATETIME:
                            anchorTime = ADateTimeSerializerDeserializer.getChronon(bytes1, offset1 + 1);
                            break;
                        default:
                            throw new TypeMismatchException(getIdentifier(), 1, bytes1[offset1], ATypeTag.SERIALIZED_DATE_TYPE_TAG, ATypeTag.SERIALIZED_TIME_TYPE_TAG, ATypeTag.SERIALIZED_DATETIME_TYPE_TAG);
                    }
                    byte[] bytes2 = argPtr2.getByteArray();
                    int offset2 = argPtr2.getStartOffset();
                    ATypeTag type2 = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(bytes2[offset2]);
                    int yearMonth = 0;
                    long dayTime = 0;
                    long firstBinIndex;
                    switch(type2) {
                        case YEARMONTHDURATION:
                            yearMonth = AYearMonthDurationSerializerDeserializer.getYearMonth(bytes2, offset2 + 1);
                            int yearStart = gregCalSys.getYear(anchorTime);
                            int monthStart = gregCalSys.getMonthOfYear(anchorTime, yearStart);
                            int yearToBin = gregCalSys.getYear(intervalStart);
                            int monthToBin = gregCalSys.getMonthOfYear(intervalStart, yearToBin);
                            int totalMonths = (yearToBin - yearStart) * 12 + (monthToBin - monthStart);
                            firstBinIndex = totalMonths / yearMonth + ((totalMonths < 0 && totalMonths % yearMonth != 0) ? -1 : 0);
                            if (firstBinIndex > Integer.MAX_VALUE) {
                                throw new OverflowException(getIdentifier());
                            }
                            if (firstBinIndex < Integer.MIN_VALUE) {
                                throw new UnderflowException(getIdentifier());
                            }
                            break;
                        case DAYTIMEDURATION:
                            dayTime = ADayTimeDurationSerializerDeserializer.getDayTime(bytes2, offset2 + 1);
                            long totalChronon = intervalStart - anchorTime;
                            firstBinIndex = totalChronon / dayTime + ((totalChronon < 0 && totalChronon % dayTime != 0) ? -1 : 0);
                            break;
                        default:
                            throw new TypeMismatchException(getIdentifier(), 2, bytes2[offset2], ATypeTag.SERIALIZED_YEAR_MONTH_DURATION_TYPE_TAG, ATypeTag.SERIALIZED_DAY_TIME_DURATION_TYPE_TAG);
                    }
                    long binStartChronon;
                    long binEndChronon;
                    int binOffset;
                    listBuilder.reset(intListType);
                    try {
                        if (intervalTypeTag == ATypeTag.SERIALIZED_DATE_TYPE_TAG) {
                            binOffset = 0;
                            do {
                                binStartChronon = DurationArithmeticOperations.addDuration(anchorTime, yearMonth * (int) (firstBinIndex + binOffset), dayTime * (firstBinIndex + binOffset), false);
                                binEndChronon = DurationArithmeticOperations.addDuration(anchorTime, yearMonth * ((int) (firstBinIndex + binOffset) + 1), dayTime * ((firstBinIndex + binOffset) + 1), false);
                                binStartChronon = binStartChronon / GregorianCalendarSystem.CHRONON_OF_DAY + ((binStartChronon < 0 && binStartChronon % GregorianCalendarSystem.CHRONON_OF_DAY != 0) ? -1 : 0);
                                binEndChronon = binEndChronon / GregorianCalendarSystem.CHRONON_OF_DAY + ((binEndChronon < 0 && binEndChronon % GregorianCalendarSystem.CHRONON_OF_DAY != 0) ? -1 : 0);
                                aInterval.setValue(binStartChronon, binEndChronon, intervalTypeTag);
                                listStorage.reset();
                                intervalSerde.serialize(aInterval, listStorage.getDataOutput());
                                listBuilder.addItem(listStorage);
                                binOffset++;
                            } while (binEndChronon < intervalEnd);
                        } else if (intervalTypeTag == ATypeTag.SERIALIZED_TIME_TYPE_TAG) {
                            if (yearMonth != 0) {
                                throw new InvalidDataFormatException(getIdentifier(), ATypeTag.SERIALIZED_INTERVAL_TYPE_TAG);
                            }
                            binOffset = 0;
                            binStartChronon = DurationArithmeticOperations.addDuration(anchorTime, yearMonth * (int) (firstBinIndex + binOffset), dayTime * (firstBinIndex + binOffset), true);
                            binEndChronon = DurationArithmeticOperations.addDuration(anchorTime, yearMonth * ((int) (firstBinIndex + binOffset) + 1), dayTime * ((firstBinIndex + binOffset) + 1), true);
                            if (binStartChronon < 0 || binStartChronon >= GregorianCalendarSystem.CHRONON_OF_DAY) {
                                // avoid the case where a time bin is before 00:00:00 or no early than 24:00:00
                                throw new InvalidDataFormatException(getIdentifier(), ATypeTag.SERIALIZED_INTERVAL_TYPE_TAG);
                            }
                            while (!((binStartChronon < intervalStart && binEndChronon <= intervalStart) || (binStartChronon >= intervalEnd && binEndChronon > intervalEnd))) {
                                aInterval.setValue(binStartChronon, binEndChronon, intervalTypeTag);
                                listStorage.reset();
                                intervalSerde.serialize(aInterval, listStorage.getDataOutput());
                                listBuilder.addItem(listStorage);
                                binOffset++;
                                binStartChronon = DurationArithmeticOperations.addDuration(anchorTime, yearMonth * (int) (firstBinIndex + binOffset), dayTime * (firstBinIndex + binOffset), true);
                                binEndChronon = DurationArithmeticOperations.addDuration(anchorTime, yearMonth * ((int) (firstBinIndex + binOffset) + 1), dayTime * ((firstBinIndex + binOffset) + 1), true);
                                if (binStartChronon == GregorianCalendarSystem.CHRONON_OF_DAY) {
                                    break;
                                }
                                if (binEndChronon < binStartChronon) {
                                    throw new InvalidDataFormatException(getIdentifier(), ATypeTag.SERIALIZED_INTERVAL_TYPE_TAG);
                                }
                            }
                        } else if (intervalTypeTag == ATypeTag.SERIALIZED_DATETIME_TYPE_TAG) {
                            binOffset = 0;
                            do {
                                binStartChronon = DurationArithmeticOperations.addDuration(anchorTime, yearMonth * (int) (firstBinIndex + binOffset), dayTime * (firstBinIndex + binOffset), false);
                                binEndChronon = DurationArithmeticOperations.addDuration(anchorTime, yearMonth * ((int) (firstBinIndex + binOffset) + 1), dayTime * ((firstBinIndex + binOffset) + 1), false);
                                aInterval.setValue(binStartChronon, binEndChronon, intervalTypeTag);
                                listStorage.reset();
                                intervalSerde.serialize(aInterval, listStorage.getDataOutput());
                                listBuilder.addItem(listStorage);
                                binOffset++;
                            } while (binEndChronon < intervalEnd);
                        } else {
                            throw new TypeMismatchException(getIdentifier(), 0, bytes0[offset0], ATypeTag.SERIALIZED_DATE_TYPE_TAG, ATypeTag.SERIALIZED_TIME_TYPE_TAG, ATypeTag.SERIALIZED_DATETIME_TYPE_TAG);
                        }
                        listBuilder.write(out, true);
                    } catch (IOException e1) {
                        throw new HyracksDataException(e1);
                    }
                    result.set(resultStorage);
                }
            };
        }
    };
}
Also used : DataOutput(java.io.DataOutput) OrderedListBuilder(org.apache.asterix.builders.OrderedListBuilder) TypeMismatchException(org.apache.asterix.runtime.exceptions.TypeMismatchException) IPointable(org.apache.hyracks.data.std.api.IPointable) IScalarEvaluator(org.apache.hyracks.algebricks.runtime.base.IScalarEvaluator) InvalidDataFormatException(org.apache.asterix.runtime.exceptions.InvalidDataFormatException) VoidPointable(org.apache.hyracks.data.std.primitive.VoidPointable) IncompatibleTypeException(org.apache.asterix.runtime.exceptions.IncompatibleTypeException) GregorianCalendarSystem(org.apache.asterix.om.base.temporal.GregorianCalendarSystem) AOrderedListType(org.apache.asterix.om.types.AOrderedListType) IOException(java.io.IOException) ISerializerDeserializer(org.apache.hyracks.api.dataflow.value.ISerializerDeserializer) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) IScalarEvaluatorFactory(org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory) ArrayBackedValueStorage(org.apache.hyracks.data.std.util.ArrayBackedValueStorage) ATypeTag(org.apache.asterix.om.types.ATypeTag) IHyracksTaskContext(org.apache.hyracks.api.context.IHyracksTaskContext) IFrameTupleReference(org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference) UnderflowException(org.apache.asterix.runtime.exceptions.UnderflowException) AMutableInterval(org.apache.asterix.om.base.AMutableInterval) OverflowException(org.apache.asterix.runtime.exceptions.OverflowException)

Aggregations

IScalarEvaluatorFactory (org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory)167 IHyracksTaskContext (org.apache.hyracks.api.context.IHyracksTaskContext)146 IPointable (org.apache.hyracks.data.std.api.IPointable)136 IFrameTupleReference (org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference)134 IScalarEvaluator (org.apache.hyracks.algebricks.runtime.base.IScalarEvaluator)132 DataOutput (java.io.DataOutput)129 ArrayBackedValueStorage (org.apache.hyracks.data.std.util.ArrayBackedValueStorage)129 VoidPointable (org.apache.hyracks.data.std.primitive.VoidPointable)124 TypeMismatchException (org.apache.asterix.runtime.exceptions.TypeMismatchException)110 ISerializerDeserializer (org.apache.hyracks.api.dataflow.value.ISerializerDeserializer)107 IOException (java.io.IOException)77 HyracksDataException (org.apache.hyracks.api.exceptions.HyracksDataException)61 InvalidDataFormatException (org.apache.asterix.runtime.exceptions.InvalidDataFormatException)48 UTF8StringPointable (org.apache.hyracks.data.std.primitive.UTF8StringPointable)40 AMutableInt64 (org.apache.asterix.om.base.AMutableInt64)24 ATypeTag (org.apache.asterix.om.types.ATypeTag)19 GregorianCalendarSystem (org.apache.asterix.om.base.temporal.GregorianCalendarSystem)13 AMutableDateTime (org.apache.asterix.om.base.AMutableDateTime)11 AMutableDouble (org.apache.asterix.om.base.AMutableDouble)11 AMutablePoint (org.apache.asterix.om.base.AMutablePoint)11