use of org.apache.hyracks.api.dataflow.IConnectorDescriptor in project asterixdb by apache.
the class HashPartitionExchangePOperator method createConnectorDescriptor.
@Override
public Pair<IConnectorDescriptor, TargetConstraint> createConnectorDescriptor(IConnectorDescriptorRegistry spec, ILogicalOperator op, IOperatorSchema opSchema, JobGenContext context) throws AlgebricksException {
int[] keys = new int[hashFields.size()];
IBinaryHashFunctionFactory[] hashFunctionFactories = new IBinaryHashFunctionFactory[hashFields.size()];
int i = 0;
IBinaryHashFunctionFactoryProvider hashFunProvider = context.getBinaryHashFunctionFactoryProvider();
IVariableTypeEnvironment env = context.getTypeEnvironment(op);
for (LogicalVariable v : hashFields) {
keys[i] = opSchema.findVariable(v);
hashFunctionFactories[i] = hashFunProvider.getBinaryHashFunctionFactory(env.getVarType(v));
++i;
}
ITuplePartitionComputerFactory tpcf = new FieldHashPartitionComputerFactory(keys, hashFunctionFactories);
IConnectorDescriptor conn = new MToNPartitioningConnectorDescriptor(spec, tpcf);
return new Pair<>(conn, null);
}
use of org.apache.hyracks.api.dataflow.IConnectorDescriptor in project asterixdb by apache.
the class HashPartitionMergeExchangePOperator method createConnectorDescriptor.
@Override
public Pair<IConnectorDescriptor, TargetConstraint> createConnectorDescriptor(IConnectorDescriptorRegistry spec, ILogicalOperator op, IOperatorSchema opSchema, JobGenContext context) throws AlgebricksException {
int[] keys = new int[partitionFields.size()];
IBinaryHashFunctionFactory[] hashFunctionFactories = new IBinaryHashFunctionFactory[partitionFields.size()];
IVariableTypeEnvironment env = context.getTypeEnvironment(op);
{
int i = 0;
IBinaryHashFunctionFactoryProvider hashFunProvider = context.getBinaryHashFunctionFactoryProvider();
for (LogicalVariable v : partitionFields) {
keys[i] = opSchema.findVariable(v);
hashFunctionFactories[i] = hashFunProvider.getBinaryHashFunctionFactory(env.getVarType(v));
++i;
}
}
ITuplePartitionComputerFactory tpcf = new FieldHashPartitionComputerFactory(keys, hashFunctionFactories);
int n = orderColumns.size();
int[] sortFields = new int[n];
IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[n];
INormalizedKeyComputerFactoryProvider nkcfProvider = context.getNormalizedKeyComputerFactoryProvider();
INormalizedKeyComputerFactory nkcf = null;
int j = 0;
for (OrderColumn oc : orderColumns) {
LogicalVariable var = oc.getColumn();
sortFields[j] = opSchema.findVariable(var);
Object type = env.getVarType(var);
IBinaryComparatorFactoryProvider bcfp = context.getBinaryComparatorFactoryProvider();
comparatorFactories[j] = bcfp.getBinaryComparatorFactory(type, oc.getOrder() == OrderKind.ASC);
if (j == 0 && nkcfProvider != null && type != null) {
nkcf = nkcfProvider.getNormalizedKeyComputerFactory(type, oc.getOrder() == OrderKind.ASC);
}
j++;
}
IConnectorDescriptor conn = new MToNPartitioningMergingConnectorDescriptor(spec, tpcf, sortFields, comparatorFactories, nkcf);
return new Pair<IConnectorDescriptor, TargetConstraint>(conn, null);
}
use of org.apache.hyracks.api.dataflow.IConnectorDescriptor in project asterixdb by apache.
the class RandomPartitionExchangePOperator method contributeRuntimeOperator.
@Override
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException {
Pair<IConnectorDescriptor, TargetConstraint> connPair = createConnectorDescriptor(builder.getJobSpec(), op, opSchema, context);
builder.contributeConnectorWithTargetConstraint(op, connPair.first, connPair.second);
ILogicalOperator src = op.getInputs().get(0).getValue();
builder.contributeGraphEdge(src, 0, op, 0);
}
use of org.apache.hyracks.api.dataflow.IConnectorDescriptor in project asterixdb by apache.
the class AbstractExchangePOperator method contributeRuntimeOperator.
@Override
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException {
Pair<IConnectorDescriptor, TargetConstraint> connPair = createConnectorDescriptor(builder.getJobSpec(), op, opSchema, context);
builder.contributeConnectorWithTargetConstraint(op, connPair.first, connPair.second);
ILogicalOperator src = op.getInputs().get(0).getValue();
builder.contributeGraphEdge(src, 0, op, 0);
}
use of org.apache.hyracks.api.dataflow.IConnectorDescriptor in project asterixdb by apache.
the class ActivityClusterPlanner method computePartitionCounts.
private Map<ActivityId, ActivityPartitionDetails> computePartitionCounts(ActivityCluster ac) throws HyracksException {
PartitionConstraintSolver solver = executor.getSolver();
Set<LValueConstraintExpression> lValues = new HashSet<>();
for (ActivityId anId : ac.getActivityMap().keySet()) {
lValues.add(new PartitionCountExpression(anId.getOperatorDescriptorId()));
}
solver.solve(lValues);
Map<OperatorDescriptorId, Integer> nPartMap = new HashMap<>();
for (LValueConstraintExpression lv : lValues) {
Object value = solver.getValue(lv);
if (value == null) {
throw new HyracksException("No value found for " + lv);
}
if (!(value instanceof Number)) {
throw new HyracksException("Unexpected type of value bound to " + lv + ": " + value.getClass() + "(" + value + ")");
}
int nParts = ((Number) value).intValue();
if (nParts <= 0) {
throw new HyracksException("Unsatisfiable number of partitions for " + lv + ": " + nParts);
}
nPartMap.put(((PartitionCountExpression) lv).getOperatorDescriptorId(), nParts);
}
Map<ActivityId, ActivityPartitionDetails> activityPartsMap = new HashMap<>();
for (ActivityId anId : ac.getActivityMap().keySet()) {
int nParts = nPartMap.get(anId.getOperatorDescriptorId());
int[] nInputPartitions = null;
List<IConnectorDescriptor> inputs = ac.getActivityInputMap().get(anId);
if (inputs != null) {
nInputPartitions = new int[inputs.size()];
for (int i = 0; i < nInputPartitions.length; ++i) {
ConnectorDescriptorId cdId = inputs.get(i).getConnectorId();
ActivityId aid = ac.getProducerActivity(cdId);
Integer nPartInt = nPartMap.get(aid.getOperatorDescriptorId());
nInputPartitions[i] = nPartInt;
}
}
int[] nOutputPartitions = null;
List<IConnectorDescriptor> outputs = ac.getActivityOutputMap().get(anId);
if (outputs != null) {
nOutputPartitions = new int[outputs.size()];
for (int i = 0; i < nOutputPartitions.length; ++i) {
ConnectorDescriptorId cdId = outputs.get(i).getConnectorId();
ActivityId aid = ac.getConsumerActivity(cdId);
Integer nPartInt = nPartMap.get(aid.getOperatorDescriptorId());
nOutputPartitions[i] = nPartInt;
}
}
ActivityPartitionDetails apd = new ActivityPartitionDetails(nParts, nInputPartitions, nOutputPartitions);
activityPartsMap.put(anId, apd);
}
return activityPartsMap;
}
Aggregations