use of org.apache.hyracks.api.dataflow.ActivityId in project asterixdb by apache.
the class UnionAllOperatorDescriptor method contributeActivities.
@Override
public void contributeActivities(IActivityGraphBuilder builder) {
UnionActivityNode uba = new UnionActivityNode(new ActivityId(getOperatorId(), 0));
builder.addActivity(this, uba);
for (int i = 0; i < inputArity; ++i) {
builder.addSourceEdge(i, uba, i);
}
builder.addTargetEdge(0, uba, 0);
}
use of org.apache.hyracks.api.dataflow.ActivityId in project asterixdb by apache.
the class OptimizedHybridHashJoinOperatorDescriptor method contributeActivities.
@Override
public void contributeActivities(IActivityGraphBuilder builder) {
ActivityId buildAid = new ActivityId(odId, BUILD_AND_PARTITION_ACTIVITY_ID);
ActivityId probeAid = new ActivityId(odId, PARTITION_AND_JOIN_ACTIVITY_ID);
PartitionAndBuildActivityNode phase1 = new PartitionAndBuildActivityNode(buildAid, probeAid);
ProbeAndJoinActivityNode phase2 = new ProbeAndJoinActivityNode(probeAid, buildAid);
builder.addActivity(this, phase1);
builder.addSourceEdge(1, phase1, 0);
builder.addActivity(this, phase2);
builder.addSourceEdge(0, phase2, 0);
builder.addBlockingEdge(phase1, phase2);
builder.addTargetEdge(0, phase2, 0);
}
use of org.apache.hyracks.api.dataflow.ActivityId in project asterixdb by apache.
the class ActivityCluster method toJSON.
public JsonNode toJSON() {
ObjectMapper om = new ObjectMapper();
ArrayNode jans = om.createArrayNode();
ObjectNode jac = om.createObjectNode();
for (IActivity an : activities.values()) {
ObjectNode jan = om.createObjectNode();
jan.put("id", an.getActivityId().toString());
jan.put("java-class", an.getClass().getName());
List<IConnectorDescriptor> inputs = activityInputMap.get(an.getActivityId());
if (inputs != null) {
ArrayNode jInputs = om.createArrayNode();
for (int i = 0; i < inputs.size(); ++i) {
ObjectNode jInput = om.createObjectNode();
jInput.put("input-port", i);
jInput.put("connector-id", inputs.get(i).getConnectorId().toString());
jInputs.add(jInput);
}
jan.set("inputs", jInputs);
}
List<IConnectorDescriptor> outputs = activityOutputMap.get(an.getActivityId());
if (outputs != null) {
ArrayNode jOutputs = om.createArrayNode();
for (int i = 0; i < outputs.size(); ++i) {
ObjectNode jOutput = om.createObjectNode();
jOutput.put("output-port", i);
jOutput.put("connector-id", outputs.get(i).getConnectorId().toString());
jOutputs.add(jOutput);
}
jan.set("outputs", jOutputs);
}
Set<ActivityId> blockers = getBlocked2BlockerMap().get(an.getActivityId());
if (blockers != null) {
ArrayNode jDeps = om.createArrayNode();
for (ActivityId blocker : blockers) {
jDeps.add(blocker.toString());
}
jan.set("depends-on", jDeps);
}
jans.add(jan);
}
jac.set("activities", jans);
return jac;
}
use of org.apache.hyracks.api.dataflow.ActivityId in project asterixdb by apache.
the class SuperActivity method createPushRuntime.
@Override
public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx, final IRecordDescriptorProvider recordDescProvider, final int partition, final int nPartitions) throws HyracksDataException {
final Map<ActivityId, IActivity> startActivities = new HashMap<ActivityId, IActivity>();
Map<ActivityId, IActivity> activities = getActivityMap();
for (Entry<ActivityId, IActivity> entry : activities.entrySet()) {
/**
* extract start activities
*/
List<IConnectorDescriptor> conns = getActivityInputMap().get(entry.getKey());
if (conns == null || conns.isEmpty()) {
startActivities.put(entry.getKey(), entry.getValue());
}
}
/**
* wrap a RecordDescriptorProvider for the super activity
*/
IRecordDescriptorProvider wrappedRecDescProvider = new IRecordDescriptorProvider() {
@Override
public RecordDescriptor getInputRecordDescriptor(ActivityId aid, int inputIndex) {
if (startActivities.get(aid) != null) {
/**
* if the activity is a start (input boundary) activity
*/
int superActivityInputChannel = SuperActivity.this.getClusterInputIndex(Pair.of(aid, inputIndex));
if (superActivityInputChannel >= 0) {
return recordDescProvider.getInputRecordDescriptor(activityId, superActivityInputChannel);
}
}
if (SuperActivity.this.getActivityMap().get(aid) != null) {
/**
* if the activity is an internal activity of the super activity
*/
IConnectorDescriptor conn = getActivityInputMap().get(aid).get(inputIndex);
return getConnectorRecordDescriptorMap().get(conn.getConnectorId());
}
/**
* the following is for the case where the activity is in other SuperActivities
*/
ActivityClusterGraph acg = SuperActivity.this.getActivityClusterGraph();
for (Entry<ActivityClusterId, ActivityCluster> entry : acg.getActivityClusterMap().entrySet()) {
ActivityCluster ac = entry.getValue();
for (Entry<ActivityId, IActivity> saEntry : ac.getActivityMap().entrySet()) {
SuperActivity sa = (SuperActivity) saEntry.getValue();
if (sa.getActivityMap().get(aid) != null) {
List<IConnectorDescriptor> conns = sa.getActivityInputMap().get(aid);
if (conns != null && conns.size() >= inputIndex) {
IConnectorDescriptor conn = conns.get(inputIndex);
return sa.getConnectorRecordDescriptorMap().get(conn.getConnectorId());
} else {
int superActivityInputChannel = sa.getClusterInputIndex(Pair.of(aid, inputIndex));
if (superActivityInputChannel >= 0) {
return recordDescProvider.getInputRecordDescriptor(sa.getActivityId(), superActivityInputChannel);
}
}
}
}
}
return null;
}
@Override
public RecordDescriptor getOutputRecordDescriptor(ActivityId aid, int outputIndex) {
/**
* if the activity is an output-boundary activity
*/
int superActivityOutputChannel = SuperActivity.this.getClusterOutputIndex(Pair.of(aid, outputIndex));
if (superActivityOutputChannel >= 0) {
return recordDescProvider.getOutputRecordDescriptor(activityId, superActivityOutputChannel);
}
if (SuperActivity.this.getActivityMap().get(aid) != null) {
/**
* if the activity is an internal activity of the super activity
*/
IConnectorDescriptor conn = getActivityOutputMap().get(aid).get(outputIndex);
return getConnectorRecordDescriptorMap().get(conn.getConnectorId());
}
/**
* the following is for the case where the activity is in other SuperActivities
*/
ActivityClusterGraph acg = SuperActivity.this.getActivityClusterGraph();
for (Entry<ActivityClusterId, ActivityCluster> entry : acg.getActivityClusterMap().entrySet()) {
ActivityCluster ac = entry.getValue();
for (Entry<ActivityId, IActivity> saEntry : ac.getActivityMap().entrySet()) {
SuperActivity sa = (SuperActivity) saEntry.getValue();
if (sa.getActivityMap().get(aid) != null) {
List<IConnectorDescriptor> conns = sa.getActivityOutputMap().get(aid);
if (conns != null && conns.size() >= outputIndex) {
IConnectorDescriptor conn = conns.get(outputIndex);
return sa.getConnectorRecordDescriptorMap().get(conn.getConnectorId());
} else {
superActivityOutputChannel = sa.getClusterOutputIndex(Pair.of(aid, outputIndex));
if (superActivityOutputChannel >= 0) {
return recordDescProvider.getOutputRecordDescriptor(sa.getActivityId(), superActivityOutputChannel);
}
}
}
}
}
return null;
}
};
return new SuperActivityOperatorNodePushable(this, startActivities, ctx, wrappedRecDescProvider, partition, nPartitions);
}
use of org.apache.hyracks.api.dataflow.ActivityId in project asterixdb by apache.
the class ActivityClusterGraphBuilder method merge.
private void merge(Map<ActivityId, Set<ActivityId>> eqSetMap, Set<Set<ActivityId>> eqSets, ActivityId t1, ActivityId t2) {
Set<ActivityId> stage1 = eqSetMap.get(t1);
Set<ActivityId> stage2 = eqSetMap.get(t2);
Set<ActivityId> mergedSet = new HashSet<ActivityId>();
mergedSet.addAll(stage1);
mergedSet.addAll(stage2);
eqSets.remove(stage1);
eqSets.remove(stage2);
eqSets.add(mergedSet);
for (ActivityId t : mergedSet) {
eqSetMap.put(t, mergedSet);
}
}
Aggregations