Search in sources :

Example 41 with JobId

use of org.apache.hyracks.api.job.JobId in project asterixdb by apache.

the class DatasetPartitionManager method createDatasetPartitionWriter.

@Override
public IFrameWriter createDatasetPartitionWriter(IHyracksTaskContext ctx, ResultSetId rsId, boolean orderedResult, boolean asyncMode, int partition, int nPartitions) throws HyracksException {
    DatasetPartitionWriter dpw;
    JobId jobId = ctx.getJobletContext().getJobId();
    synchronized (this) {
        dpw = new DatasetPartitionWriter(ctx, this, jobId, rsId, asyncMode, orderedResult, partition, nPartitions, datasetMemoryManager, fileFactory);
        ResultSetMap rsIdMap = (ResultSetMap) partitionResultStateMap.computeIfAbsent(jobId, k -> new ResultSetMap());
        ResultState[] resultStates = rsIdMap.createOrGetResultStates(rsId, nPartitions);
        resultStates[partition] = dpw.getResultState();
    }
    LOGGER.fine("Initialized partition writer: JobId: " + jobId + ":partition: " + partition);
    return dpw;
}
Also used : DefaultDeallocatableRegistry(org.apache.hyracks.control.nc.resources.DefaultDeallocatableRegistry) ResultStateSweeper(org.apache.hyracks.control.common.dataset.ResultStateSweeper) Executor(java.util.concurrent.Executor) IDatasetStateRecord(org.apache.hyracks.api.dataset.IDatasetStateRecord) Set(java.util.Set) IFrameWriter(org.apache.hyracks.api.comm.IFrameWriter) IDatasetPartitionManager(org.apache.hyracks.api.dataset.IDatasetPartitionManager) WorkspaceFileFactory(org.apache.hyracks.control.nc.io.WorkspaceFileFactory) IWorkspaceFileFactory(org.apache.hyracks.api.io.IWorkspaceFileFactory) Logger(java.util.logging.Logger) IHyracksTaskContext(org.apache.hyracks.api.context.IHyracksTaskContext) LinkedHashMap(java.util.LinkedHashMap) HyracksException(org.apache.hyracks.api.exceptions.HyracksException) NodeControllerService(org.apache.hyracks.control.nc.NodeControllerService) JobId(org.apache.hyracks.api.job.JobId) Map(java.util.Map) ResultSetId(org.apache.hyracks.api.dataset.ResultSetId) JobId(org.apache.hyracks.api.job.JobId)

Example 42 with JobId

use of org.apache.hyracks.api.job.JobId in project asterixdb by apache.

the class NetworkManager method readInitialMessage.

private static PartitionId readInitialMessage(ByteBuffer buffer) {
    JobId jobId = new JobId(buffer.getLong());
    ConnectorDescriptorId cdid = new ConnectorDescriptorId(buffer.getInt());
    int senderIndex = buffer.getInt();
    int receiverIndex = buffer.getInt();
    return new PartitionId(jobId, cdid, senderIndex, receiverIndex);
}
Also used : ConnectorDescriptorId(org.apache.hyracks.api.dataflow.ConnectorDescriptorId) PartitionId(org.apache.hyracks.api.partitions.PartitionId) JobId(org.apache.hyracks.api.job.JobId)

Example 43 with JobId

use of org.apache.hyracks.api.job.JobId in project asterixdb by apache.

the class Waiter method print.

public PrintWriter print(PrintWriter pw) {
    for (JobId jId : getJobIds()) {
        pw.print(jId.toString());
        pw.print(" - ");
        pw.println(String.valueOf(getDatasetJobRecord(jId)));
    }
    pw.flush();
    return pw;
}
Also used : JobId(org.apache.hyracks.api.job.JobId)

Example 44 with JobId

use of org.apache.hyracks.api.job.JobId in project asterixdb by apache.

the class JobExecutor method startTasks.

private void startTasks(Map<String, List<TaskAttemptDescriptor>> taskAttemptMap) throws HyracksException {
    final DeploymentId deploymentId = jobRun.getDeploymentId();
    final JobId jobId = jobRun.getJobId();
    final ActivityClusterGraph acg = jobRun.getActivityClusterGraph();
    final Map<ConnectorDescriptorId, IConnectorPolicy> connectorPolicies = new HashMap<>(jobRun.getConnectorPolicyMap());
    INodeManager nodeManager = ccs.getNodeManager();
    try {
        byte[] acgBytes = predistributed ? null : JavaSerializationUtils.serialize(acg);
        for (Map.Entry<String, List<TaskAttemptDescriptor>> entry : taskAttemptMap.entrySet()) {
            String nodeId = entry.getKey();
            final List<TaskAttemptDescriptor> taskDescriptors = entry.getValue();
            final NodeControllerState node = nodeManager.getNodeControllerState(nodeId);
            if (node != null) {
                node.getActiveJobIds().add(jobRun.getJobId());
                boolean changed = jobRun.getParticipatingNodeIds().add(nodeId);
                if (LOGGER.isLoggable(Level.FINE)) {
                    LOGGER.fine("Starting: " + taskDescriptors + " at " + entry.getKey());
                }
                byte[] jagBytes = changed ? acgBytes : null;
                node.getNodeController().startTasks(deploymentId, jobId, jagBytes, taskDescriptors, connectorPolicies, jobRun.getFlags());
            }
        }
    } catch (Exception e) {
        throw new HyracksException(e);
    }
}
Also used : INodeManager(org.apache.hyracks.control.cc.cluster.INodeManager) DeploymentId(org.apache.hyracks.api.deployment.DeploymentId) HashMap(java.util.HashMap) IConnectorPolicy(org.apache.hyracks.api.dataflow.connectors.IConnectorPolicy) ConnectorDescriptorId(org.apache.hyracks.api.dataflow.ConnectorDescriptorId) HyracksException(org.apache.hyracks.api.exceptions.HyracksException) HyracksException(org.apache.hyracks.api.exceptions.HyracksException) TaskAttemptDescriptor(org.apache.hyracks.control.common.job.TaskAttemptDescriptor) ActivityClusterGraph(org.apache.hyracks.api.job.ActivityClusterGraph) ArrayList(java.util.ArrayList) List(java.util.List) NodeControllerState(org.apache.hyracks.control.cc.NodeControllerState) HashMap(java.util.HashMap) Map(java.util.Map) JobId(org.apache.hyracks.api.job.JobId)

Example 45 with JobId

use of org.apache.hyracks.api.job.JobId in project asterixdb by apache.

the class JobManager method prepareComplete.

@Override
public void prepareComplete(JobRun run, JobStatus status, List<Exception> exceptions) throws HyracksException {
    checkJob(run);
    if (status == JobStatus.FAILURE_BEFORE_EXECUTION) {
        run.setPendingStatus(JobStatus.FAILURE, exceptions);
        finalComplete(run);
        return;
    }
    JobId jobId = run.getJobId();
    HyracksException caughtException = null;
    if (run.getPendingStatus() != null && run.getCleanupPendingNodeIds().isEmpty()) {
        finalComplete(run);
        return;
    }
    if (run.getPendingStatus() != null) {
        LOGGER.warning("Ignoring duplicate cleanup for JobRun with id: " + jobId);
        return;
    }
    Set<String> targetNodes = run.getParticipatingNodeIds();
    run.getCleanupPendingNodeIds().addAll(targetNodes);
    if (run.getPendingStatus() != JobStatus.FAILURE && run.getPendingStatus() != JobStatus.TERMINATED) {
        run.setPendingStatus(status, exceptions);
    }
    if (targetNodes != null && !targetNodes.isEmpty()) {
        INodeManager nodeManager = ccs.getNodeManager();
        Set<String> toDelete = new HashSet<>();
        for (String n : targetNodes) {
            NodeControllerState ncs = nodeManager.getNodeControllerState(n);
            try {
                if (ncs == null) {
                    toDelete.add(n);
                } else {
                    ncs.getNodeController().cleanUpJoblet(jobId, status);
                }
            } catch (Exception e) {
                LOGGER.log(Level.SEVERE, e.getMessage(), e);
                if (caughtException == null) {
                    caughtException = new HyracksException(e);
                } else {
                    caughtException.addSuppressed(e);
                }
            }
        }
        targetNodes.removeAll(toDelete);
        run.getCleanupPendingNodeIds().removeAll(toDelete);
        if (run.getCleanupPendingNodeIds().isEmpty()) {
            finalComplete(run);
        }
    } else {
        finalComplete(run);
    }
    // throws caught exceptions if any
    if (caughtException != null) {
        throw caughtException;
    }
}
Also used : INodeManager(org.apache.hyracks.control.cc.cluster.INodeManager) HyracksException(org.apache.hyracks.api.exceptions.HyracksException) NodeControllerState(org.apache.hyracks.control.cc.NodeControllerState) JobId(org.apache.hyracks.api.job.JobId) InvocationTargetException(java.lang.reflect.InvocationTargetException) HyracksException(org.apache.hyracks.api.exceptions.HyracksException) HashSet(java.util.HashSet)

Aggregations

JobId (org.apache.hyracks.api.job.JobId)57 JobSpecification (org.apache.hyracks.api.job.JobSpecification)17 IHyracksClientConnection (org.apache.hyracks.api.client.IHyracksClientConnection)13 HyracksConnection (org.apache.hyracks.api.client.HyracksConnection)11 HyracksException (org.apache.hyracks.api.exceptions.HyracksException)10 CmdLineParser (org.kohsuke.args4j.CmdLineParser)9 Test (org.junit.Test)5 IOException (java.io.IOException)4 ArrayList (java.util.ArrayList)4 HashSet (java.util.HashSet)4 Map (java.util.Map)4 NodeControllerState (org.apache.hyracks.control.cc.NodeControllerState)4 INodeManager (org.apache.hyracks.control.cc.cluster.INodeManager)4 Joblet (org.apache.hyracks.control.nc.Joblet)4 ObjectNode (com.fasterxml.jackson.databind.node.ObjectNode)3 HashMap (java.util.HashMap)3 HyracksDataException (org.apache.hyracks.api.exceptions.HyracksDataException)3 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)2 InvocationTargetException (java.lang.reflect.InvocationTargetException)2 RemoteException (java.rmi.RemoteException)2