use of org.apache.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment in project asterixdb by apache.
the class InsertDeleteUpsertPOperator method contributeRuntimeOperator.
@SuppressWarnings("unchecked")
@Override
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException {
InsertDeleteUpsertOperator insertDeleteOp = (InsertDeleteUpsertOperator) op;
IMetadataProvider mp = context.getMetadataProvider();
IVariableTypeEnvironment typeEnv = context.getTypeEnvironment(op);
JobSpecification spec = builder.getJobSpec();
RecordDescriptor inputDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op.getInputs().get(0).getValue()), inputSchemas[0], context);
Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> runtimeAndConstraints = null;
if (operation == Kind.INSERT) {
runtimeAndConstraints = mp.getInsertRuntime(dataSource, propagatedSchema, typeEnv, keys, payload, additionalFilteringKeys, additionalNonFilteringFields, inputDesc, context, spec, false);
} else if (operation == Kind.DELETE) {
runtimeAndConstraints = mp.getDeleteRuntime(dataSource, propagatedSchema, typeEnv, keys, payload, additionalFilteringKeys, inputDesc, context, spec);
} else if (operation == Kind.UPSERT) {
runtimeAndConstraints = mp.getUpsertRuntime(dataSource, inputSchemas[0], typeEnv, keys, payload, additionalFilteringKeys, additionalNonFilteringFields, inputDesc, context, spec);
} else {
throw new AlgebricksException("Unsupported Operation " + operation);
}
builder.contributeHyracksOperator(insertDeleteOp, runtimeAndConstraints.first);
builder.contributeAlgebricksPartitionConstraint(runtimeAndConstraints.first, runtimeAndConstraints.second);
ILogicalOperator src = insertDeleteOp.getInputs().get(0).getValue();
builder.contributeGraphEdge(src, 0, insertDeleteOp, 0);
}
use of org.apache.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment in project asterixdb by apache.
the class MicroPreclusteredGroupByPOperator method contributeRuntimeOperator.
@Override
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException {
int[] keys = JobGenHelper.variablesToFieldIndexes(columnList, inputSchemas[0]);
GroupByOperator gby = (GroupByOperator) op;
IVariableTypeEnvironment env = context.getTypeEnvironment(op);
int[] fdColumns = getFdColumns(gby, inputSchemas[0]);
// compile subplans and set the gby op. schema accordingly
AlgebricksPipeline[] subplans = compileSubplans(inputSchemas[0], gby, opSchema, context);
IAggregatorDescriptorFactory aggregatorFactory = new NestedPlansAccumulatingAggregatorFactory(subplans, keys, fdColumns);
IBinaryComparatorFactory[] comparatorFactories = JobGenHelper.variablesToAscBinaryComparatorFactories(columnList, env, context);
RecordDescriptor recordDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context);
RecordDescriptor inputRecordDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op.getInputs().get(0).getValue()), inputSchemas[0], context);
MicroPreClusteredGroupRuntimeFactory runtime = new MicroPreClusteredGroupRuntimeFactory(keys, comparatorFactories, aggregatorFactory, inputRecordDesc, recordDescriptor, null);
builder.contributeMicroOperator(gby, runtime, recordDescriptor);
ILogicalOperator src = op.getInputs().get(0).getValue();
builder.contributeGraphEdge(src, 0, op, 0);
}
use of org.apache.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment in project asterixdb by apache.
the class InMemoryHashJoinPOperator method contributeRuntimeOperator.
@Override
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException {
int[] keysLeft = JobGenHelper.variablesToFieldIndexes(keysLeftBranch, inputSchemas[0]);
int[] keysRight = JobGenHelper.variablesToFieldIndexes(keysRightBranch, inputSchemas[1]);
IVariableTypeEnvironment env = context.getTypeEnvironment(op);
IBinaryHashFunctionFactory[] hashFunFactories = JobGenHelper.variablesToBinaryHashFunctionFactories(keysLeftBranch, env, context);
IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[keysLeft.length];
int i = 0;
IBinaryComparatorFactoryProvider bcfp = context.getBinaryComparatorFactoryProvider();
for (LogicalVariable v : keysLeftBranch) {
Object t = env.getVarType(v);
comparatorFactories[i++] = bcfp.getBinaryComparatorFactory(t, true);
}
IPredicateEvaluatorFactoryProvider predEvaluatorFactoryProvider = context.getPredicateEvaluatorFactoryProvider();
IPredicateEvaluatorFactory predEvaluatorFactory = (predEvaluatorFactoryProvider == null ? null : predEvaluatorFactoryProvider.getPredicateEvaluatorFactory(keysLeft, keysRight));
RecordDescriptor recDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context);
IOperatorDescriptorRegistry spec = builder.getJobSpec();
IOperatorDescriptor opDesc = null;
switch(kind) {
case INNER:
{
opDesc = new InMemoryHashJoinOperatorDescriptor(spec, keysLeft, keysRight, hashFunFactories, comparatorFactories, recDescriptor, tableSize, predEvaluatorFactory, memSizeInFrames);
break;
}
case LEFT_OUTER:
{
IMissingWriterFactory[] nonMatchWriterFactories = new IMissingWriterFactory[inputSchemas[1].getSize()];
for (int j = 0; j < nonMatchWriterFactories.length; j++) {
nonMatchWriterFactories[j] = context.getMissingWriterFactory();
}
opDesc = new InMemoryHashJoinOperatorDescriptor(spec, keysLeft, keysRight, hashFunFactories, comparatorFactories, predEvaluatorFactory, recDescriptor, true, nonMatchWriterFactories, tableSize, memSizeInFrames);
break;
}
default:
{
throw new NotImplementedException();
}
}
contributeOpDesc(builder, (AbstractLogicalOperator) op, opDesc);
ILogicalOperator src1 = op.getInputs().get(0).getValue();
builder.contributeGraphEdge(src1, 0, op, 0);
ILogicalOperator src2 = op.getInputs().get(1).getValue();
builder.contributeGraphEdge(src2, 0, op, 1);
}
use of org.apache.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment in project asterixdb by apache.
the class InMemoryStableSortPOperator method contributeRuntimeOperator.
@Override
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException {
RecordDescriptor recDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context);
int n = sortColumns.length;
int[] sortFields = new int[n];
IBinaryComparatorFactory[] comps = new IBinaryComparatorFactory[n];
int i = 0;
INormalizedKeyComputerFactoryProvider nkcfProvider = context.getNormalizedKeyComputerFactoryProvider();
INormalizedKeyComputerFactory nkcf = null;
IVariableTypeEnvironment env = context.getTypeEnvironment(op);
for (OrderColumn oc : sortColumns) {
LogicalVariable var = oc.getColumn();
sortFields[i] = opSchema.findVariable(var);
Object type = env.getVarType(var);
OrderKind order = oc.getOrder();
if (i == 0 && nkcfProvider != null && type != null) {
nkcf = nkcfProvider.getNormalizedKeyComputerFactory(type, order == OrderKind.ASC);
}
IBinaryComparatorFactoryProvider bcfp = context.getBinaryComparatorFactoryProvider();
comps[i] = bcfp.getBinaryComparatorFactory(type, oc.getOrder() == OrderKind.ASC);
i++;
}
IPushRuntimeFactory runtime = new InMemorySortRuntimeFactory(sortFields, nkcf, comps, null);
builder.contributeMicroOperator(op, runtime, recDescriptor);
ILogicalOperator src = op.getInputs().get(0).getValue();
builder.contributeGraphEdge(src, 0, op, 0);
}
use of org.apache.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment in project asterixdb by apache.
the class IndexInsertDeleteUpsertPOperator method contributeRuntimeOperator.
@SuppressWarnings({ "rawtypes", "unchecked" })
@Override
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException {
IndexInsertDeleteUpsertOperator insertDeleteUpsertOp = (IndexInsertDeleteUpsertOperator) op;
IMetadataProvider mp = context.getMetadataProvider();
JobSpecification spec = builder.getJobSpec();
RecordDescriptor inputDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op.getInputs().get(0).getValue()), inputSchemas[0], context);
Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> runtimeAndConstraints = null;
IVariableTypeEnvironment typeEnv = context.getTypeEnvironment(insertDeleteUpsertOp);
if (insertDeleteUpsertOp.getOperation() == Kind.INSERT) {
runtimeAndConstraints = mp.getIndexInsertRuntime(dataSourceIndex, propagatedSchema, inputSchemas, typeEnv, primaryKeys, secondaryKeys, additionalFilteringKeys, filterExpr, inputDesc, context, spec, false);
} else if (insertDeleteUpsertOp.getOperation() == Kind.DELETE) {
runtimeAndConstraints = mp.getIndexDeleteRuntime(dataSourceIndex, propagatedSchema, inputSchemas, typeEnv, primaryKeys, secondaryKeys, additionalFilteringKeys, filterExpr, inputDesc, context, spec);
} else if (insertDeleteUpsertOp.getOperation() == Kind.UPSERT) {
runtimeAndConstraints = mp.getIndexUpsertRuntime(dataSourceIndex, propagatedSchema, inputSchemas, typeEnv, primaryKeys, secondaryKeys, additionalFilteringKeys, filterExpr, prevSecondaryKeys, prevAdditionalFilteringKey, inputDesc, context, spec);
}
builder.contributeHyracksOperator(insertDeleteUpsertOp, runtimeAndConstraints.first);
builder.contributeAlgebricksPartitionConstraint(runtimeAndConstraints.first, runtimeAndConstraints.second);
ILogicalOperator src = insertDeleteUpsertOp.getInputs().get(0).getValue();
builder.contributeGraphEdge(src, 0, insertDeleteUpsertOp, 0);
}
Aggregations