use of org.apache.hyracks.algebricks.runtime.evaluators.ColumnAccessEvalFactory in project asterixdb by apache.
the class SecondaryIndexOperationsHelper method createCastOp.
protected AlgebricksMetaOperatorDescriptor createCastOp(JobSpecification spec, DatasetType dsType) {
CastTypeDescriptor castFuncDesc = (CastTypeDescriptor) CastTypeDescriptor.FACTORY.createFunctionDescriptor();
castFuncDesc.setImmutableStates(enforcedItemType, itemType);
int[] outColumns = new int[1];
int[] projectionList = new int[(dataset.hasMetaPart() ? 2 : 1) + numPrimaryKeys];
int recordIdx;
//external datascan operator returns a record as the first field, instead of the last in internal case
if (dsType == DatasetType.EXTERNAL) {
recordIdx = 0;
outColumns[0] = 0;
} else {
recordIdx = numPrimaryKeys;
outColumns[0] = numPrimaryKeys;
}
for (int i = 0; i <= numPrimaryKeys; i++) {
projectionList[i] = i;
}
if (dataset.hasMetaPart()) {
projectionList[numPrimaryKeys + 1] = numPrimaryKeys + 1;
}
IScalarEvaluatorFactory[] castEvalFact = new IScalarEvaluatorFactory[] { new ColumnAccessEvalFactory(recordIdx) };
IScalarEvaluatorFactory[] sefs = new IScalarEvaluatorFactory[1];
sefs[0] = castFuncDesc.createEvaluatorFactory(castEvalFact);
AssignRuntimeFactory castAssign = new AssignRuntimeFactory(outColumns, sefs, projectionList);
return new AlgebricksMetaOperatorDescriptor(spec, 1, 1, new IPushRuntimeFactory[] { castAssign }, new RecordDescriptor[] { enforcedRecDesc });
}
use of org.apache.hyracks.algebricks.runtime.evaluators.ColumnAccessEvalFactory in project asterixdb by apache.
the class NonTaggedDataFormat method partitioningEvaluatorFactory.
@SuppressWarnings("unchecked")
@Override
public Triple<IScalarEvaluatorFactory, ScalarFunctionCallExpression, IAType> partitioningEvaluatorFactory(ARecordType recType, List<String> fldName) throws AlgebricksException {
String[] names = recType.getFieldNames();
int n = names.length;
if (fldName.size() > 1) {
for (int i = 0; i < n; i++) {
if (names[i].equals(fldName.get(0))) {
IScalarEvaluatorFactory recordEvalFactory = new ColumnAccessEvalFactory(GlobalConfig.DEFAULT_INPUT_DATA_COLUMN);
ArrayBackedValueStorage abvs = new ArrayBackedValueStorage();
DataOutput dos = abvs.getDataOutput();
try {
AInt32 ai = new AInt32(i);
SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(ai.getType()).serialize(ai, dos);
} catch (HyracksDataException e) {
throw new AlgebricksException(e);
}
IScalarEvaluatorFactory fldIndexEvalFactory = new ConstantEvalFactory(Arrays.copyOf(abvs.getByteArray(), abvs.getLength()));
IScalarEvaluatorFactory evalFactory = new FieldAccessByIndexEvalFactory(recordEvalFactory, fldIndexEvalFactory, recType);
IFunctionInfo finfoAccess = BuiltinFunctions.getAsterixFunctionInfo(BuiltinFunctions.FIELD_ACCESS_BY_INDEX);
ScalarFunctionCallExpression partitionFun = new ScalarFunctionCallExpression(finfoAccess, new MutableObject<ILogicalExpression>(new VariableReferenceExpression(METADATA_DUMMY_VAR)), new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(new AInt32(i)))));
return new Triple<IScalarEvaluatorFactory, ScalarFunctionCallExpression, IAType>(evalFactory, partitionFun, recType.getFieldTypes()[i]);
}
}
} else {
IScalarEvaluatorFactory recordEvalFactory = new ColumnAccessEvalFactory(GlobalConfig.DEFAULT_INPUT_DATA_COLUMN);
ArrayBackedValueStorage abvs = new ArrayBackedValueStorage();
DataOutput dos = abvs.getDataOutput();
AOrderedList as = new AOrderedList(fldName);
try {
SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(as.getType()).serialize(as, dos);
} catch (HyracksDataException e) {
throw new AlgebricksException(e);
}
IScalarEvaluatorFactory evalFactory = new FieldAccessNestedEvalFactory(recordEvalFactory, recType, fldName);
IFunctionInfo finfoAccess = BuiltinFunctions.getAsterixFunctionInfo(BuiltinFunctions.FIELD_ACCESS_NESTED);
ScalarFunctionCallExpression partitionFun = new ScalarFunctionCallExpression(finfoAccess, new MutableObject<ILogicalExpression>(new VariableReferenceExpression(METADATA_DUMMY_VAR)), new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(as))));
return new Triple<IScalarEvaluatorFactory, ScalarFunctionCallExpression, IAType>(evalFactory, partitionFun, recType.getSubFieldType(fldName));
}
throw new AlgebricksException("Could not find field " + fldName + " in the schema.");
}
use of org.apache.hyracks.algebricks.runtime.evaluators.ColumnAccessEvalFactory in project asterixdb by apache.
the class AbstractScalarAggregateDescriptor method createEvaluatorFactory.
@Override
public IScalarEvaluatorFactory createEvaluatorFactory(final IScalarEvaluatorFactory[] args) throws AlgebricksException {
// The aggregate function will get a SingleFieldFrameTupleReference that points to the result of the ScanCollection.
// The list-item will always reside in the first field (column) of the SingleFieldFrameTupleReference.
IScalarEvaluatorFactory[] aggFuncArgs = new IScalarEvaluatorFactory[1];
aggFuncArgs[0] = new ColumnAccessEvalFactory(0);
// Create aggregate function from this scalar version.
FunctionIdentifier fid = BuiltinFunctions.getAggregateFunction(getIdentifier());
IFunctionManager mgr = FunctionManagerHolder.getFunctionManager();
IFunctionDescriptor fd = mgr.lookupFunction(fid);
AbstractAggregateFunctionDynamicDescriptor aggFuncDesc = (AbstractAggregateFunctionDynamicDescriptor) fd;
final IAggregateEvaluatorFactory aggFuncFactory = aggFuncDesc.createAggregateEvaluatorFactory(aggFuncArgs);
return new IScalarEvaluatorFactory() {
private static final long serialVersionUID = 1L;
@Override
public IScalarEvaluator createScalarEvaluator(IHyracksTaskContext ctx) throws HyracksDataException {
// Use ScanCollection to iterate over list items.
ScanCollectionUnnestingFunctionFactory scanCollectionFactory = new ScanCollectionUnnestingFunctionFactory(args[0]);
return new GenericScalarAggregateFunction(aggFuncFactory.createAggregateEvaluator(ctx), scanCollectionFactory, ctx);
}
};
}
use of org.apache.hyracks.algebricks.runtime.evaluators.ColumnAccessEvalFactory in project asterixdb by apache.
the class NonTaggedDataFormat method getFieldAccessEvaluatorFactory.
@SuppressWarnings("unchecked")
@Override
public IScalarEvaluatorFactory getFieldAccessEvaluatorFactory(ARecordType recType, List<String> fldName, int recordColumn) throws AlgebricksException {
String[] names = recType.getFieldNames();
int n = names.length;
boolean fieldFound = false;
IScalarEvaluatorFactory recordEvalFactory = new ColumnAccessEvalFactory(recordColumn);
ArrayBackedValueStorage abvs = new ArrayBackedValueStorage();
DataOutput dos = abvs.getDataOutput();
IScalarEvaluatorFactory evalFactory = null;
if (fldName.size() == 1) {
for (int i = 0; i < n; i++) {
if (names[i].equals(fldName.get(0))) {
fieldFound = true;
try {
AInt32 ai = new AInt32(i);
SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(ai.getType()).serialize(ai, dos);
} catch (HyracksDataException e) {
throw new AlgebricksException(e);
}
IScalarEvaluatorFactory fldIndexEvalFactory = new ConstantEvalFactory(Arrays.copyOf(abvs.getByteArray(), abvs.getLength()));
evalFactory = new FieldAccessByIndexEvalFactory(recordEvalFactory, fldIndexEvalFactory, recType);
return evalFactory;
}
}
}
if (fldName.size() > 1 || (!fieldFound && recType.isOpen())) {
if (fldName.size() == 1) {
AString as = new AString(fldName.get(0));
try {
SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(as.getType()).serialize(as, dos);
} catch (HyracksDataException e) {
throw new AlgebricksException(e);
}
} else {
AOrderedList as = new AOrderedList(fldName);
try {
SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(as.getType()).serialize(as, dos);
} catch (HyracksDataException e) {
throw new AlgebricksException(e);
}
}
IScalarEvaluatorFactory[] factories = new IScalarEvaluatorFactory[2];
factories[0] = recordEvalFactory;
if (fldName.size() > 1) {
evalFactory = new FieldAccessNestedEvalFactory(recordEvalFactory, recType, fldName);
} else {
evalFactory = FieldAccessByNameDescriptor.FACTORY.createFunctionDescriptor().createEvaluatorFactory(factories);
}
return evalFactory;
} else {
throw new AlgebricksException("Could not find field " + fldName + " in the schema.");
}
}
use of org.apache.hyracks.algebricks.runtime.evaluators.ColumnAccessEvalFactory in project asterixdb by apache.
the class SecondaryIndexOperationsHelper method createFilterNullsSelectOp.
public AlgebricksMetaOperatorDescriptor createFilterNullsSelectOp(JobSpecification spec, int numSecondaryKeyFields, RecordDescriptor secondaryRecDesc) throws AlgebricksException {
IScalarEvaluatorFactory[] andArgsEvalFactories = new IScalarEvaluatorFactory[numSecondaryKeyFields];
NotDescriptor notDesc = new NotDescriptor();
IsUnknownDescriptor isUnknownDesc = new IsUnknownDescriptor();
for (int i = 0; i < numSecondaryKeyFields; i++) {
// Access column i, and apply 'is not null'.
ColumnAccessEvalFactory columnAccessEvalFactory = new ColumnAccessEvalFactory(i);
IScalarEvaluatorFactory isUnknownEvalFactory = isUnknownDesc.createEvaluatorFactory(new IScalarEvaluatorFactory[] { columnAccessEvalFactory });
IScalarEvaluatorFactory notEvalFactory = notDesc.createEvaluatorFactory(new IScalarEvaluatorFactory[] { isUnknownEvalFactory });
andArgsEvalFactories[i] = notEvalFactory;
}
IScalarEvaluatorFactory selectCond;
if (numSecondaryKeyFields > 1) {
// Create conjunctive condition where all secondary index keys must
// satisfy 'is not null'.
AndDescriptor andDesc = new AndDescriptor();
selectCond = andDesc.createEvaluatorFactory(andArgsEvalFactories);
} else {
selectCond = andArgsEvalFactories[0];
}
StreamSelectRuntimeFactory select = new StreamSelectRuntimeFactory(selectCond, null, BinaryBooleanInspector.FACTORY, false, -1, null);
AlgebricksMetaOperatorDescriptor asterixSelectOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 1, new IPushRuntimeFactory[] { select }, new RecordDescriptor[] { secondaryRecDesc });
AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, asterixSelectOp, primaryPartitionConstraint);
return asterixSelectOp;
}
Aggregations