use of org.apache.hyracks.api.partitions.PartitionId in project asterixdb by apache.
the class ProfilingPartitionWriterFactory method createFrameWriter.
@Override
public IFrameWriter createFrameWriter(final int receiverIndex) throws HyracksDataException {
final IFrameWriter writer = new ConnectorSenderProfilingFrameWriter(ctx, delegate.createFrameWriter(receiverIndex), cd.getConnectorId(), senderIndex, receiverIndex);
return new IFrameWriter() {
private long openTime;
private long closeTime;
MultiResolutionEventProfiler mrep = new MultiResolutionEventProfiler(N_SAMPLES);
@Override
public void open() throws HyracksDataException {
openTime = System.currentTimeMillis();
writer.open();
}
@Override
public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
mrep.reportEvent();
writer.nextFrame(buffer);
}
@Override
public void fail() throws HyracksDataException {
writer.fail();
}
@Override
public void close() throws HyracksDataException {
closeTime = System.currentTimeMillis();
try {
((Task) ctx).setPartitionSendProfile(new PartitionProfile(new PartitionId(ctx.getJobletContext().getJobId(), cd.getConnectorId(), senderIndex, receiverIndex), openTime, closeTime, mrep));
} finally {
writer.close();
}
}
@Override
public void flush() throws HyracksDataException {
writer.flush();
}
};
}
use of org.apache.hyracks.api.partitions.PartitionId in project asterixdb by apache.
the class StartTasksWork method createInputChannels.
/**
* Create a list of known channels for each input connector
*
* @param td
* the task attempt id
* @param inputs
* the input connector descriptors
* @return a list of known channels, one for each connector
* @throws UnknownHostException
*/
private List<List<PartitionChannel>> createInputChannels(TaskAttemptDescriptor td, List<IConnectorDescriptor> inputs) throws UnknownHostException {
NetworkAddress[][] inputAddresses = td.getInputPartitionLocations();
List<List<PartitionChannel>> channelsForInputConnectors = new ArrayList<>();
if (inputAddresses != null) {
for (int i = 0; i < inputAddresses.length; i++) {
List<PartitionChannel> channels = new ArrayList<>();
if (inputAddresses[i] != null) {
for (int j = 0; j < inputAddresses[i].length; j++) {
NetworkAddress networkAddress = inputAddresses[i][j];
PartitionId pid = new PartitionId(jobId, inputs.get(i).getConnectorId(), j, td.getTaskAttemptId().getTaskId().getPartition());
PartitionChannel channel = new PartitionChannel(pid, new NetworkInputChannel(ncs.getNetworkManager(), new InetSocketAddress(InetAddress.getByAddress(networkAddress.lookupIpAddress()), networkAddress.getPort()), pid, 5));
channels.add(channel);
}
}
channelsForInputConnectors.add(channels);
}
}
return channelsForInputConnectors;
}
use of org.apache.hyracks.api.partitions.PartitionId in project asterixdb by apache.
the class PartitionMatchMaker method removePartitionRequests.
public void removePartitionRequests(Set<PartitionId> partitionIds, final Set<TaskAttemptId> taIds) {
LOGGER.info("Removing partition requests: " + partitionIds);
IEntryFilter<PartitionRequest> filter = new IEntryFilter<PartitionRequest>() {
@Override
public boolean matches(PartitionRequest o) {
return taIds.contains(o.getRequestingTaskAttemptId());
}
};
for (PartitionId pid : partitionIds) {
List<PartitionRequest> requests = partitionRequests.get(pid);
if (requests != null) {
removeEntries(requests, filter);
if (requests.isEmpty()) {
partitionRequests.remove(pid);
}
}
}
}
use of org.apache.hyracks.api.partitions.PartitionId in project asterixdb by apache.
the class NonDeterministicChannelReader method notifyDataAvailability.
@Override
public synchronized void notifyDataAvailability(IInputChannel channel, int nFrames) {
PartitionId pid = (PartitionId) channel.getAttachment();
int senderIndex = pid.getSenderIndex();
if (LOGGER.isLoggable(Level.FINE)) {
LOGGER.fine("Data available: " + pid.getConnectorDescriptorId() + " sender: " + senderIndex + " receiver: " + pid.getReceiverIndex());
}
availableFrameCounts[senderIndex] += nFrames;
frameAvailability.set(senderIndex);
notifyAll();
}
use of org.apache.hyracks.api.partitions.PartitionId in project asterixdb by apache.
the class ActivityClusterPlanner method computeTaskClusters.
private TaskCluster[] computeTaskClusters(ActivityCluster ac, JobRun jobRun, Map<ActivityId, ActivityPlan> activityPlanMap) {
Set<ActivityId> activities = ac.getActivityMap().keySet();
Map<TaskId, List<Pair<TaskId, ConnectorDescriptorId>>> taskConnectivity = computeTaskConnectivity(jobRun, activityPlanMap, activities);
TaskCluster[] taskClusters = ac.getActivityClusterGraph().isUseConnectorPolicyForScheduling() ? buildConnectorPolicyAwareTaskClusters(ac, activityPlanMap, taskConnectivity) : buildConnectorPolicyUnawareTaskClusters(ac, activityPlanMap);
for (TaskCluster tc : taskClusters) {
Set<TaskCluster> tcDependencyTaskClusters = tc.getDependencyTaskClusters();
for (Task ts : tc.getTasks()) {
TaskId tid = ts.getTaskId();
List<Pair<TaskId, ConnectorDescriptorId>> cInfoList = taskConnectivity.get(tid);
if (cInfoList != null) {
for (Pair<TaskId, ConnectorDescriptorId> p : cInfoList) {
Task targetTS = activityPlanMap.get(p.getLeft().getActivityId()).getTasks()[p.getLeft().getPartition()];
TaskCluster targetTC = targetTS.getTaskCluster();
if (targetTC != tc) {
ConnectorDescriptorId cdId = p.getRight();
PartitionId pid = new PartitionId(jobRun.getJobId(), cdId, tid.getPartition(), p.getLeft().getPartition());
tc.getProducedPartitions().add(pid);
targetTC.getRequiredPartitions().add(pid);
partitionProducingTaskClusterMap.put(pid, tc);
}
}
}
for (TaskId dTid : ts.getDependencies()) {
TaskCluster dTC = getTaskCluster(dTid);
dTC.getDependentTaskClusters().add(tc);
tcDependencyTaskClusters.add(dTC);
}
}
}
return taskClusters;
}
Aggregations