use of org.apache.hyracks.api.constraints.expressions.PartitionCountExpression in project asterixdb by apache.
the class ActivityClusterPlanner method computePartitionCounts.
private Map<ActivityId, ActivityPartitionDetails> computePartitionCounts(ActivityCluster ac) throws HyracksException {
PartitionConstraintSolver solver = executor.getSolver();
Set<LValueConstraintExpression> lValues = new HashSet<>();
for (ActivityId anId : ac.getActivityMap().keySet()) {
lValues.add(new PartitionCountExpression(anId.getOperatorDescriptorId()));
}
solver.solve(lValues);
Map<OperatorDescriptorId, Integer> nPartMap = new HashMap<>();
for (LValueConstraintExpression lv : lValues) {
Object value = solver.getValue(lv);
if (value == null) {
throw new HyracksException("No value found for " + lv);
}
if (!(value instanceof Number)) {
throw new HyracksException("Unexpected type of value bound to " + lv + ": " + value.getClass() + "(" + value + ")");
}
int nParts = ((Number) value).intValue();
if (nParts <= 0) {
throw new HyracksException("Unsatisfiable number of partitions for " + lv + ": " + nParts);
}
nPartMap.put(((PartitionCountExpression) lv).getOperatorDescriptorId(), nParts);
}
Map<ActivityId, ActivityPartitionDetails> activityPartsMap = new HashMap<>();
for (ActivityId anId : ac.getActivityMap().keySet()) {
int nParts = nPartMap.get(anId.getOperatorDescriptorId());
int[] nInputPartitions = null;
List<IConnectorDescriptor> inputs = ac.getActivityInputMap().get(anId);
if (inputs != null) {
nInputPartitions = new int[inputs.size()];
for (int i = 0; i < nInputPartitions.length; ++i) {
ConnectorDescriptorId cdId = inputs.get(i).getConnectorId();
ActivityId aid = ac.getProducerActivity(cdId);
Integer nPartInt = nPartMap.get(aid.getOperatorDescriptorId());
nInputPartitions[i] = nPartInt;
}
}
int[] nOutputPartitions = null;
List<IConnectorDescriptor> outputs = ac.getActivityOutputMap().get(anId);
if (outputs != null) {
nOutputPartitions = new int[outputs.size()];
for (int i = 0; i < nOutputPartitions.length; ++i) {
ConnectorDescriptorId cdId = outputs.get(i).getConnectorId();
ActivityId aid = ac.getConsumerActivity(cdId);
Integer nPartInt = nPartMap.get(aid.getOperatorDescriptorId());
nOutputPartitions[i] = nPartInt;
}
}
ActivityPartitionDetails apd = new ActivityPartitionDetails(nParts, nInputPartitions, nOutputPartitions);
activityPartsMap.put(anId, apd);
}
return activityPartsMap;
}
use of org.apache.hyracks.api.constraints.expressions.PartitionCountExpression in project asterixdb by apache.
the class JobSpecification method toJSON.
@SuppressWarnings("incomplete-switch")
public ObjectNode toJSON() throws IOException {
ObjectMapper om = new ObjectMapper();
ObjectNode jjob = om.createObjectNode();
ArrayNode jopArray = om.createArrayNode();
for (Map.Entry<OperatorDescriptorId, IOperatorDescriptor> e : opMap.entrySet()) {
ObjectNode op = e.getValue().toJSON();
if (!userConstraints.isEmpty()) {
// Add operator partition constraints to each JSON operator.
ObjectNode pcObject = om.createObjectNode();
ObjectNode pleObject = om.createObjectNode();
Iterator<Constraint> test = userConstraints.iterator();
while (test.hasNext()) {
Constraint constraint = test.next();
switch(constraint.getLValue().getTag()) {
case PARTITION_COUNT:
PartitionCountExpression pce = (PartitionCountExpression) constraint.getLValue();
if (e.getKey() == pce.getOperatorDescriptorId()) {
pcObject.put("count", getConstraintExpressionRValue(constraint));
}
break;
case PARTITION_LOCATION:
PartitionLocationExpression ple = (PartitionLocationExpression) constraint.getLValue();
if (e.getKey() == ple.getOperatorDescriptorId()) {
pleObject.put(Integer.toString(ple.getPartition()), getConstraintExpressionRValue(constraint));
}
break;
}
}
if (pleObject.size() > 0) {
pcObject.set("location", pleObject);
}
if (pcObject.size() > 0) {
op.set("partition-constraints", pcObject);
}
}
jopArray.add(op);
}
jjob.set("operators", jopArray);
ArrayNode jcArray = om.createArrayNode();
for (Map.Entry<ConnectorDescriptorId, IConnectorDescriptor> e : connMap.entrySet()) {
ObjectNode conn = om.createObjectNode();
Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>> connection = connectorOpMap.get(e.getKey());
if (connection != null) {
conn.put("in-operator-id", connection.getLeft().getLeft().getOperatorId().toString());
conn.put("in-operator-port", connection.getLeft().getRight().intValue());
conn.put("out-operator-id", connection.getRight().getLeft().getOperatorId().toString());
conn.put("out-operator-port", connection.getRight().getRight().intValue());
}
conn.set("connector", e.getValue().toJSON());
jcArray.add(conn);
}
jjob.set("connectors", jcArray);
return jjob;
}
use of org.apache.hyracks.api.constraints.expressions.PartitionCountExpression in project asterixdb by apache.
the class OneToOneConnectorDescriptor method contributeSchedulingConstraints.
@Override
public void contributeSchedulingConstraints(IConstraintAcceptor constraintAcceptor, ActivityCluster ac, ICCServiceContext serviceCtx) {
OperatorDescriptorId consumer = ac.getConsumerActivity(getConnectorId()).getOperatorDescriptorId();
OperatorDescriptorId producer = ac.getProducerActivity(getConnectorId()).getOperatorDescriptorId();
constraintAcceptor.addConstraint(new Constraint(new PartitionCountExpression(consumer), new PartitionCountExpression(producer)));
}
Aggregations