Search in sources :

Example 1 with IDatasetPartitionManager

use of org.apache.hyracks.api.dataset.IDatasetPartitionManager in project asterixdb by apache.

the class AbortTasksWork method run.

@Override
public void run() {
    if (LOGGER.isLoggable(Level.INFO)) {
        LOGGER.info("Aborting Tasks: " + jobId + ":" + tasks);
    }
    IDatasetPartitionManager dpm = ncs.getDatasetPartitionManager();
    if (dpm != null) {
        ncs.getDatasetPartitionManager().abortReader(jobId);
    }
    Joblet ji = ncs.getJobletMap().get(jobId);
    if (ji != null) {
        Map<TaskAttemptId, Task> taskMap = ji.getTaskMap();
        for (TaskAttemptId taId : tasks) {
            Task task = taskMap.get(taId);
            if (task != null) {
                task.abort();
            }
        }
    } else {
        LOGGER.log(Level.WARNING, "Joblet couldn't be found. Tasks of job " + jobId + " have all either completed or failed");
    }
}
Also used : IDatasetPartitionManager(org.apache.hyracks.api.dataset.IDatasetPartitionManager) Task(org.apache.hyracks.control.nc.Task) TaskAttemptId(org.apache.hyracks.api.dataflow.TaskAttemptId) Joblet(org.apache.hyracks.control.nc.Joblet)

Example 2 with IDatasetPartitionManager

use of org.apache.hyracks.api.dataset.IDatasetPartitionManager in project asterixdb by apache.

the class ResultWriterOperatorDescriptor method createPushRuntime.

@Override
public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx, IRecordDescriptorProvider recordDescProvider, final int partition, final int nPartitions) throws HyracksDataException {
    final IDatasetPartitionManager dpm = ctx.getDatasetPartitionManager();
    final IFrame frame = new VSizeFrame(ctx);
    final FrameOutputStream frameOutputStream = new FrameOutputStream(ctx.getInitialFrameSize());
    frameOutputStream.reset(frame, true);
    PrintStream printStream = new PrintStream(frameOutputStream);
    final RecordDescriptor outRecordDesc = recordDescProvider.getInputRecordDescriptor(getActivityId(), 0);
    final IResultSerializer resultSerializer = resultSerializerFactory.createResultSerializer(outRecordDesc, printStream);
    final FrameTupleAccessor frameTupleAccessor = new FrameTupleAccessor(outRecordDesc);
    return new AbstractUnaryInputSinkOperatorNodePushable() {

        private IFrameWriter datasetPartitionWriter;

        private boolean failed = false;

        @Override
        public void open() throws HyracksDataException {
            try {
                datasetPartitionWriter = dpm.createDatasetPartitionWriter(ctx, rsId, ordered, asyncMode, partition, nPartitions);
                datasetPartitionWriter.open();
                resultSerializer.init();
            } catch (HyracksException e) {
                throw HyracksDataException.create(e);
            }
        }

        @Override
        public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
            frameTupleAccessor.reset(buffer);
            for (int tIndex = 0; tIndex < frameTupleAccessor.getTupleCount(); tIndex++) {
                resultSerializer.appendTuple(frameTupleAccessor, tIndex);
                if (!frameOutputStream.appendTuple()) {
                    frameOutputStream.flush(datasetPartitionWriter);
                    resultSerializer.appendTuple(frameTupleAccessor, tIndex);
                    frameOutputStream.appendTuple();
                }
            }
        }

        @Override
        public void fail() throws HyracksDataException {
            failed = true;
            datasetPartitionWriter.fail();
        }

        @Override
        public void close() throws HyracksDataException {
            try {
                if (!failed && frameOutputStream.getTupleCount() > 0) {
                    frameOutputStream.flush(datasetPartitionWriter);
                }
            } catch (Exception e) {
                datasetPartitionWriter.fail();
                throw e;
            } finally {
                datasetPartitionWriter.close();
            }
        }

        @Override
        public String toString() {
            StringBuilder sb = new StringBuilder();
            sb.append("{ ");
            sb.append("\"rsId\": \"").append(rsId).append("\", ");
            sb.append("\"ordered\": ").append(ordered).append(", ");
            sb.append("\"asyncMode\": ").append(asyncMode).append(" }");
            return sb.toString();
        }
    };
}
Also used : PrintStream(java.io.PrintStream) IFrameWriter(org.apache.hyracks.api.comm.IFrameWriter) IFrame(org.apache.hyracks.api.comm.IFrame) IResultSerializer(org.apache.hyracks.api.dataflow.value.IResultSerializer) RecordDescriptor(org.apache.hyracks.api.dataflow.value.RecordDescriptor) HyracksException(org.apache.hyracks.api.exceptions.HyracksException) ByteBuffer(java.nio.ByteBuffer) VSizeFrame(org.apache.hyracks.api.comm.VSizeFrame) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) IOException(java.io.IOException) HyracksException(org.apache.hyracks.api.exceptions.HyracksException) IDatasetPartitionManager(org.apache.hyracks.api.dataset.IDatasetPartitionManager) AbstractUnaryInputSinkOperatorNodePushable(org.apache.hyracks.dataflow.std.base.AbstractUnaryInputSinkOperatorNodePushable) FrameOutputStream(org.apache.hyracks.dataflow.common.comm.io.FrameOutputStream) FrameTupleAccessor(org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor)

Example 3 with IDatasetPartitionManager

use of org.apache.hyracks.api.dataset.IDatasetPartitionManager in project asterixdb by apache.

the class NotifyTaskFailureWork method run.

@Override
public void run() {
    try {
        JobId jobId = task.getJobletContext().getJobId();
        IDatasetPartitionManager dpm = ncs.getDatasetPartitionManager();
        if (dpm != null) {
            dpm.abortReader(jobId);
        }
        ncs.getClusterController().notifyTaskFailure(jobId, task.getTaskAttemptId(), ncs.getId(), exceptions);
    //exceptions.get(0).printStackTrace();
    } catch (Exception e) {
        e.printStackTrace();
    }
    task.getJoblet().removeTask(task);
}
Also used : IDatasetPartitionManager(org.apache.hyracks.api.dataset.IDatasetPartitionManager) JobId(org.apache.hyracks.api.job.JobId)

Aggregations

IDatasetPartitionManager (org.apache.hyracks.api.dataset.IDatasetPartitionManager)3 IOException (java.io.IOException)1 PrintStream (java.io.PrintStream)1 ByteBuffer (java.nio.ByteBuffer)1 IFrame (org.apache.hyracks.api.comm.IFrame)1 IFrameWriter (org.apache.hyracks.api.comm.IFrameWriter)1 VSizeFrame (org.apache.hyracks.api.comm.VSizeFrame)1 TaskAttemptId (org.apache.hyracks.api.dataflow.TaskAttemptId)1 IResultSerializer (org.apache.hyracks.api.dataflow.value.IResultSerializer)1 RecordDescriptor (org.apache.hyracks.api.dataflow.value.RecordDescriptor)1 HyracksDataException (org.apache.hyracks.api.exceptions.HyracksDataException)1 HyracksException (org.apache.hyracks.api.exceptions.HyracksException)1 JobId (org.apache.hyracks.api.job.JobId)1 Joblet (org.apache.hyracks.control.nc.Joblet)1 Task (org.apache.hyracks.control.nc.Task)1 FrameOutputStream (org.apache.hyracks.dataflow.common.comm.io.FrameOutputStream)1 FrameTupleAccessor (org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor)1 AbstractUnaryInputSinkOperatorNodePushable (org.apache.hyracks.dataflow.std.base.AbstractUnaryInputSinkOperatorNodePushable)1