Search in sources :

Example 56 with IScalarEvaluatorFactory

use of org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory in project asterixdb by apache.

the class DurationFromMillisecondsDescriptor method createEvaluatorFactory.

@Override
public IScalarEvaluatorFactory createEvaluatorFactory(final IScalarEvaluatorFactory[] args) {
    return new IScalarEvaluatorFactory() {

        private static final long serialVersionUID = 1L;

        @Override
        public IScalarEvaluator createScalarEvaluator(final IHyracksTaskContext ctx) throws HyracksDataException {
            return new IScalarEvaluator() {

                private ArrayBackedValueStorage resultStorage = new ArrayBackedValueStorage();

                private DataOutput out = resultStorage.getDataOutput();

                private IPointable argPtr0 = new VoidPointable();

                private IScalarEvaluator eval0 = args[0].createScalarEvaluator(ctx);

                @SuppressWarnings("unchecked")
                private ISerializerDeserializer<ADuration> durationSerde = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADURATION);

                AMutableDuration aDuration = new AMutableDuration(0, 0);

                @Override
                public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
                    resultStorage.reset();
                    eval0.evaluate(tuple, argPtr0);
                    byte[] bytes = argPtr0.getByteArray();
                    int offset = argPtr0.getStartOffset();
                    ATypeTag argPtrTypeTag = ATypeTag.VALUE_TYPE_MAPPING[bytes[offset]];
                    switch(argPtrTypeTag) {
                        case TINYINT:
                            aDuration.setValue(0, AInt8SerializerDeserializer.getByte(bytes, offset + 1));
                            break;
                        case SMALLINT:
                            aDuration.setValue(0, AInt16SerializerDeserializer.getShort(bytes, offset + 1));
                            break;
                        case INTEGER:
                            aDuration.setValue(0, AInt32SerializerDeserializer.getInt(bytes, offset + 1));
                            break;
                        case BIGINT:
                            aDuration.setValue(0, AInt64SerializerDeserializer.getLong(bytes, offset + 1));
                            break;
                        default:
                            throw new TypeMismatchException(getIdentifier(), 0, bytes[offset], ATypeTag.SERIALIZED_INT8_TYPE_TAG, ATypeTag.SERIALIZED_INT16_TYPE_TAG, ATypeTag.SERIALIZED_INT32_TYPE_TAG, ATypeTag.SERIALIZED_INT64_TYPE_TAG);
                    }
                    durationSerde.serialize(aDuration, out);
                    result.set(resultStorage);
                }
            };
        }
    };
}
Also used : DataOutput(java.io.DataOutput) AMutableDuration(org.apache.asterix.om.base.AMutableDuration) TypeMismatchException(org.apache.asterix.runtime.exceptions.TypeMismatchException) IPointable(org.apache.hyracks.data.std.api.IPointable) IScalarEvaluator(org.apache.hyracks.algebricks.runtime.base.IScalarEvaluator) ISerializerDeserializer(org.apache.hyracks.api.dataflow.value.ISerializerDeserializer) IScalarEvaluatorFactory(org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory) ArrayBackedValueStorage(org.apache.hyracks.data.std.util.ArrayBackedValueStorage) ATypeTag(org.apache.asterix.om.types.ATypeTag) IHyracksTaskContext(org.apache.hyracks.api.context.IHyracksTaskContext) VoidPointable(org.apache.hyracks.data.std.primitive.VoidPointable) IFrameTupleReference(org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference)

Example 57 with IScalarEvaluatorFactory

use of org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory in project asterixdb by apache.

the class DurationFromMonthsDescriptor method createEvaluatorFactory.

@Override
public IScalarEvaluatorFactory createEvaluatorFactory(final IScalarEvaluatorFactory[] args) {
    return new IScalarEvaluatorFactory() {

        private static final long serialVersionUID = 1L;

        @Override
        public IScalarEvaluator createScalarEvaluator(final IHyracksTaskContext ctx) throws HyracksDataException {
            return new IScalarEvaluator() {

                private ArrayBackedValueStorage resultStorage = new ArrayBackedValueStorage();

                private DataOutput out = resultStorage.getDataOutput();

                private IPointable argPtr0 = new VoidPointable();

                private IScalarEvaluator eval0 = args[0].createScalarEvaluator(ctx);

                @SuppressWarnings("unchecked")
                private ISerializerDeserializer<ADuration> durationSerde = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADURATION);

                AMutableDuration aDuration = new AMutableDuration(0, 0);

                @Override
                public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
                    resultStorage.reset();
                    eval0.evaluate(tuple, argPtr0);
                    byte[] bytes = argPtr0.getByteArray();
                    int offset = argPtr0.getStartOffset();
                    aDuration.setValue(ATypeHierarchy.getIntegerValue(getIdentifier().getName(), 0, bytes, offset), 0);
                    durationSerde.serialize(aDuration, out);
                    result.set(resultStorage);
                }
            };
        }
    };
}
Also used : DataOutput(java.io.DataOutput) ArrayBackedValueStorage(org.apache.hyracks.data.std.util.ArrayBackedValueStorage) AMutableDuration(org.apache.asterix.om.base.AMutableDuration) IHyracksTaskContext(org.apache.hyracks.api.context.IHyracksTaskContext) VoidPointable(org.apache.hyracks.data.std.primitive.VoidPointable) IFrameTupleReference(org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference) IPointable(org.apache.hyracks.data.std.api.IPointable) IScalarEvaluator(org.apache.hyracks.algebricks.runtime.base.IScalarEvaluator) ISerializerDeserializer(org.apache.hyracks.api.dataflow.value.ISerializerDeserializer) IScalarEvaluatorFactory(org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory)

Example 58 with IScalarEvaluatorFactory

use of org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory in project asterixdb by apache.

the class SecondaryIndexOperationsHelper method createAssignOp.

protected AlgebricksMetaOperatorDescriptor createAssignOp(JobSpecification spec, int numSecondaryKeyFields, RecordDescriptor secondaryRecDesc) throws AlgebricksException {
    int[] outColumns = new int[numSecondaryKeyFields + numFilterFields];
    int[] projectionList = new int[numSecondaryKeyFields + numPrimaryKeys + numFilterFields];
    for (int i = 0; i < numSecondaryKeyFields + numFilterFields; i++) {
        outColumns[i] = numPrimaryKeys + i;
    }
    int projCount = 0;
    for (int i = 0; i < numSecondaryKeyFields; i++) {
        projectionList[projCount++] = numPrimaryKeys + i;
    }
    for (int i = 0; i < numPrimaryKeys; i++) {
        projectionList[projCount++] = i;
    }
    if (numFilterFields > 0) {
        projectionList[projCount] = numPrimaryKeys + numSecondaryKeyFields;
    }
    IScalarEvaluatorFactory[] sefs = new IScalarEvaluatorFactory[secondaryFieldAccessEvalFactories.length];
    for (int i = 0; i < secondaryFieldAccessEvalFactories.length; ++i) {
        sefs[i] = secondaryFieldAccessEvalFactories[i];
    }
    AssignRuntimeFactory assign = new AssignRuntimeFactory(outColumns, sefs, projectionList);
    AlgebricksMetaOperatorDescriptor asterixAssignOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 1, new IPushRuntimeFactory[] { assign }, new RecordDescriptor[] { secondaryRecDesc });
    AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, asterixAssignOp, primaryPartitionConstraint);
    return asterixAssignOp;
}
Also used : AlgebricksMetaOperatorDescriptor(org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor) AssignRuntimeFactory(org.apache.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory) AlgebricksPartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint) IScalarEvaluatorFactory(org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory)

Example 59 with IScalarEvaluatorFactory

use of org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory in project asterixdb by apache.

the class SecondaryIndexOperationsHelper method createCastOp.

protected AlgebricksMetaOperatorDescriptor createCastOp(JobSpecification spec, DatasetType dsType) {
    CastTypeDescriptor castFuncDesc = (CastTypeDescriptor) CastTypeDescriptor.FACTORY.createFunctionDescriptor();
    castFuncDesc.setImmutableStates(enforcedItemType, itemType);
    int[] outColumns = new int[1];
    int[] projectionList = new int[(dataset.hasMetaPart() ? 2 : 1) + numPrimaryKeys];
    int recordIdx;
    //external datascan operator returns a record as the first field, instead of the last in internal case
    if (dsType == DatasetType.EXTERNAL) {
        recordIdx = 0;
        outColumns[0] = 0;
    } else {
        recordIdx = numPrimaryKeys;
        outColumns[0] = numPrimaryKeys;
    }
    for (int i = 0; i <= numPrimaryKeys; i++) {
        projectionList[i] = i;
    }
    if (dataset.hasMetaPart()) {
        projectionList[numPrimaryKeys + 1] = numPrimaryKeys + 1;
    }
    IScalarEvaluatorFactory[] castEvalFact = new IScalarEvaluatorFactory[] { new ColumnAccessEvalFactory(recordIdx) };
    IScalarEvaluatorFactory[] sefs = new IScalarEvaluatorFactory[1];
    sefs[0] = castFuncDesc.createEvaluatorFactory(castEvalFact);
    AssignRuntimeFactory castAssign = new AssignRuntimeFactory(outColumns, sefs, projectionList);
    return new AlgebricksMetaOperatorDescriptor(spec, 1, 1, new IPushRuntimeFactory[] { castAssign }, new RecordDescriptor[] { enforcedRecDesc });
}
Also used : CastTypeDescriptor(org.apache.asterix.runtime.evaluators.functions.CastTypeDescriptor) AlgebricksMetaOperatorDescriptor(org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor) ColumnAccessEvalFactory(org.apache.hyracks.algebricks.runtime.evaluators.ColumnAccessEvalFactory) AssignRuntimeFactory(org.apache.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory) AlgebricksPartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint) IScalarEvaluatorFactory(org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory)

Example 60 with IScalarEvaluatorFactory

use of org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory in project asterixdb by apache.

the class DatetimeFromUnixTimeInSecsDescriptor method createEvaluatorFactory.

/* (non-Javadoc)
     * @see org.apache.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(org.apache.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
     */
@Override
public IScalarEvaluatorFactory createEvaluatorFactory(final IScalarEvaluatorFactory[] args) {
    return new IScalarEvaluatorFactory() {

        private static final long serialVersionUID = 1L;

        @Override
        public IScalarEvaluator createScalarEvaluator(final IHyracksTaskContext ctx) throws HyracksDataException {
            return new IScalarEvaluator() {

                private ArrayBackedValueStorage resultStorage = new ArrayBackedValueStorage();

                private DataOutput out = resultStorage.getDataOutput();

                private IPointable argPtr = new VoidPointable();

                private IScalarEvaluator eval = args[0].createScalarEvaluator(ctx);

                // possible output types
                @SuppressWarnings("unchecked")
                private ISerializerDeserializer<ADateTime> datetimeSerde = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADATETIME);

                private AMutableDateTime aDatetime = new AMutableDateTime(0);

                @Override
                public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
                    resultStorage.reset();
                    eval.evaluate(tuple, argPtr);
                    byte[] bytes = argPtr.getByteArray();
                    int offset = argPtr.getStartOffset();
                    ATypeTag argPtrTypeTag = ATypeTag.VALUE_TYPE_MAPPING[bytes[offset]];
                    switch(argPtrTypeTag) {
                        case TINYINT:
                            aDatetime.setValue(AInt8SerializerDeserializer.getByte(bytes, offset + 1) * 1000l);
                            break;
                        case SMALLINT:
                            aDatetime.setValue(AInt16SerializerDeserializer.getShort(bytes, offset + 1) * 1000l);
                            break;
                        case INTEGER:
                            aDatetime.setValue(AInt32SerializerDeserializer.getInt(bytes, offset + 1) * 1000l);
                            break;
                        case BIGINT:
                            aDatetime.setValue(AInt64SerializerDeserializer.getLong(bytes, offset + 1) * 1000l);
                            break;
                        default:
                            throw new TypeMismatchException(getIdentifier(), 0, bytes[offset], ATypeTag.SERIALIZED_INT8_TYPE_TAG, ATypeTag.SERIALIZED_INT16_TYPE_TAG, ATypeTag.SERIALIZED_INT32_TYPE_TAG, ATypeTag.SERIALIZED_INT64_TYPE_TAG);
                    }
                    datetimeSerde.serialize(aDatetime, out);
                    result.set(resultStorage);
                }
            };
        }
    };
}
Also used : DataOutput(java.io.DataOutput) TypeMismatchException(org.apache.asterix.runtime.exceptions.TypeMismatchException) IPointable(org.apache.hyracks.data.std.api.IPointable) IScalarEvaluator(org.apache.hyracks.algebricks.runtime.base.IScalarEvaluator) ISerializerDeserializer(org.apache.hyracks.api.dataflow.value.ISerializerDeserializer) IScalarEvaluatorFactory(org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory) AMutableDateTime(org.apache.asterix.om.base.AMutableDateTime) ArrayBackedValueStorage(org.apache.hyracks.data.std.util.ArrayBackedValueStorage) ATypeTag(org.apache.asterix.om.types.ATypeTag) IHyracksTaskContext(org.apache.hyracks.api.context.IHyracksTaskContext) VoidPointable(org.apache.hyracks.data.std.primitive.VoidPointable) IFrameTupleReference(org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference)

Aggregations

IScalarEvaluatorFactory (org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory)167 IHyracksTaskContext (org.apache.hyracks.api.context.IHyracksTaskContext)146 IPointable (org.apache.hyracks.data.std.api.IPointable)136 IFrameTupleReference (org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference)134 IScalarEvaluator (org.apache.hyracks.algebricks.runtime.base.IScalarEvaluator)132 DataOutput (java.io.DataOutput)129 ArrayBackedValueStorage (org.apache.hyracks.data.std.util.ArrayBackedValueStorage)129 VoidPointable (org.apache.hyracks.data.std.primitive.VoidPointable)124 TypeMismatchException (org.apache.asterix.runtime.exceptions.TypeMismatchException)110 ISerializerDeserializer (org.apache.hyracks.api.dataflow.value.ISerializerDeserializer)107 IOException (java.io.IOException)77 HyracksDataException (org.apache.hyracks.api.exceptions.HyracksDataException)61 InvalidDataFormatException (org.apache.asterix.runtime.exceptions.InvalidDataFormatException)48 UTF8StringPointable (org.apache.hyracks.data.std.primitive.UTF8StringPointable)40 AMutableInt64 (org.apache.asterix.om.base.AMutableInt64)24 ATypeTag (org.apache.asterix.om.types.ATypeTag)19 GregorianCalendarSystem (org.apache.asterix.om.base.temporal.GregorianCalendarSystem)13 AMutableDateTime (org.apache.asterix.om.base.AMutableDateTime)11 AMutableDouble (org.apache.asterix.om.base.AMutableDouble)11 AMutablePoint (org.apache.asterix.om.base.AMutablePoint)11