use of org.apache.hyracks.api.dataflow.IOperatorDescriptor in project asterixdb by apache.
the class JobBuilder method setPartitionConstraintsTopdown.
private void setPartitionConstraintsTopdown(OperatorDescriptorId opId, Map<IConnectorDescriptor, TargetConstraint> tgtConstraints, IOperatorDescriptor parentOp) {
List<IConnectorDescriptor> opInputs = jobSpec.getOperatorInputMap().get(opId);
AlgebricksPartitionConstraint opConstraint;
IOperatorDescriptor opDesc = jobSpec.getOperatorMap().get(opId);
if (opInputs != null) {
for (IConnectorDescriptor conn : opInputs) {
ConnectorDescriptorId cid = conn.getConnectorId();
org.apache.commons.lang3.tuple.Pair<org.apache.commons.lang3.tuple.Pair<IOperatorDescriptor, Integer>, org.apache.commons.lang3.tuple.Pair<IOperatorDescriptor, Integer>> p = jobSpec.getConnectorOperatorMap().get(cid);
IOperatorDescriptor src = p.getLeft().getLeft();
TargetConstraint constraint = tgtConstraints.get(conn);
if (constraint != null) {
if (constraint == TargetConstraint.SAME_COUNT) {
opConstraint = partitionConstraintMap.get(opDesc);
if (partitionConstraintMap.get(src) == null) {
if (opConstraint != null) {
partitionConstraintMap.put(src, opConstraint);
AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(jobSpec, src, opConstraint);
}
}
}
}
// Post Order DFS
setPartitionConstraintsTopdown(src.getOperatorId(), tgtConstraints, opDesc);
}
}
}
use of org.apache.hyracks.api.dataflow.IOperatorDescriptor in project asterixdb by apache.
the class JobBuilder method findOpDescForAlgebraicOp.
private IOperatorDescriptor findOpDescForAlgebraicOp(ILogicalOperator op) throws AlgebricksException {
IOperatorDescriptor hOpDesc = hyracksOps.get(op);
if (hOpDesc != null) {
return hOpDesc;
}
Integer metaOpKey = algebraicOpBelongingToMetaAsterixOp.get(op);
if (metaOpKey == null) {
throw new AlgebricksException("Could not generate operator descriptor for operator " + op);
}
return metaAsterixOps.get(metaOpKey);
}
use of org.apache.hyracks.api.dataflow.IOperatorDescriptor in project asterixdb by apache.
the class JobBuilder method buildSpec.
@Override
public void buildSpec(List<ILogicalOperator> roots) throws AlgebricksException {
buildAsterixComponents();
Map<IConnectorDescriptor, TargetConstraint> tgtConstraints = setupConnectors();
for (ILogicalOperator r : roots) {
IOperatorDescriptor opDesc = findOpDescForAlgebraicOp(r);
jobSpec.addRoot(opDesc);
}
setAllPartitionConstraints(tgtConstraints);
}
use of org.apache.hyracks.api.dataflow.IOperatorDescriptor in project asterixdb by apache.
the class BulkloadPOperator method contributeRuntimeOperator.
@SuppressWarnings({ "rawtypes", "unchecked" })
@Override
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException {
InsertDeleteUpsertOperator insertDeleteOp = (InsertDeleteUpsertOperator) op;
assert insertDeleteOp.getOperation() == Kind.INSERT;
assert insertDeleteOp.isBulkload();
IMetadataProvider mp = context.getMetadataProvider();
IVariableTypeEnvironment typeEnv = context.getTypeEnvironment(op);
JobSpecification spec = builder.getJobSpec();
RecordDescriptor inputDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op.getInputs().get(0).getValue()), inputSchemas[0], context);
Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> runtimeAndConstraints = mp.getInsertRuntime(dataSource, propagatedSchema, typeEnv, primaryKeys, payload, additionalFilteringKeys, additionalNonFilterVars, inputDesc, context, spec, true);
builder.contributeHyracksOperator(insertDeleteOp, runtimeAndConstraints.first);
builder.contributeAlgebricksPartitionConstraint(runtimeAndConstraints.first, runtimeAndConstraints.second);
ILogicalOperator src = insertDeleteOp.getInputs().get(0).getValue();
builder.contributeGraphEdge(src, 0, insertDeleteOp, 0);
}
use of org.apache.hyracks.api.dataflow.IOperatorDescriptor in project asterixdb by apache.
the class DataSourceScanPOperator method contributeRuntimeOperator.
@SuppressWarnings("unchecked")
@Override
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException {
DataSourceScanOperator scan = (DataSourceScanOperator) op;
IMetadataProvider mp = context.getMetadataProvider();
IVariableTypeEnvironment typeEnv = context.getTypeEnvironment(op);
List<LogicalVariable> vars = scan.getVariables();
List<LogicalVariable> projectVars = scan.getProjectVariables();
Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = mp.getScannerRuntime(dataSource, vars, projectVars, scan.isProjectPushed(), scan.getMinFilterVars(), scan.getMaxFilterVars(), opSchema, typeEnv, context, builder.getJobSpec(), implConfig);
builder.contributeHyracksOperator(scan, p.first);
if (p.second != null) {
builder.contributeAlgebricksPartitionConstraint(p.first, p.second);
}
ILogicalOperator srcExchange = scan.getInputs().get(0).getValue();
builder.contributeGraphEdge(srcExchange, 0, scan, 0);
}
Aggregations