Search in sources :

Example 1 with IPartitionCollector

use of org.apache.hyracks.api.comm.IPartitionCollector in project asterixdb by apache.

the class MToNPartitioningMergingConnectorDescriptor method createPartitionCollector.

@Override
public IPartitionCollector createPartitionCollector(IHyracksTaskContext ctx, RecordDescriptor recordDesc, int index, int nProducerPartitions, int nConsumerPartitions) throws HyracksDataException {
    IBinaryComparator[] comparators = new IBinaryComparator[comparatorFactories.length];
    for (int i = 0; i < comparatorFactories.length; ++i) {
        comparators[i] = comparatorFactories[i].createBinaryComparator();
    }
    INormalizedKeyComputer nmkComputer = nkcFactory == null ? null : nkcFactory.createNormalizedKeyComputer();
    IPartitionBatchManager pbm = new NonDeterministicPartitionBatchManager(nProducerPartitions);
    IFrameReader sortMergeFrameReader = new SortMergeFrameReader(ctx, nProducerPartitions, nProducerPartitions, sortFields, comparators, nmkComputer, recordDesc, pbm);
    BitSet expectedPartitions = new BitSet();
    expectedPartitions.set(0, nProducerPartitions);
    return new PartitionCollector(ctx, getConnectorId(), index, expectedPartitions, sortMergeFrameReader, pbm);
}
Also used : INormalizedKeyComputer(org.apache.hyracks.api.dataflow.value.INormalizedKeyComputer) SortMergeFrameReader(org.apache.hyracks.dataflow.std.collectors.SortMergeFrameReader) IFrameReader(org.apache.hyracks.api.comm.IFrameReader) IPartitionCollector(org.apache.hyracks.api.comm.IPartitionCollector) PartitionCollector(org.apache.hyracks.dataflow.std.collectors.PartitionCollector) IPartitionBatchManager(org.apache.hyracks.dataflow.std.collectors.IPartitionBatchManager) BitSet(java.util.BitSet) IBinaryComparator(org.apache.hyracks.api.dataflow.value.IBinaryComparator) NonDeterministicPartitionBatchManager(org.apache.hyracks.dataflow.std.collectors.NonDeterministicPartitionBatchManager)

Example 2 with IPartitionCollector

use of org.apache.hyracks.api.comm.IPartitionCollector in project asterixdb by apache.

the class OneToOneConnectorDescriptor method createPartitionCollector.

@Override
public IPartitionCollector createPartitionCollector(IHyracksTaskContext ctx, RecordDescriptor recordDesc, int index, int nProducerPartitions, int nConsumerPartitions) throws HyracksDataException {
    BitSet expectedPartitions = new BitSet(nProducerPartitions);
    expectedPartitions.set(index);
    NonDeterministicChannelReader channelReader = new NonDeterministicChannelReader(nProducerPartitions, expectedPartitions);
    NonDeterministicFrameReader frameReader = new NonDeterministicFrameReader(channelReader);
    return new PartitionCollector(ctx, getConnectorId(), index, expectedPartitions, frameReader, channelReader);
}
Also used : IPartitionCollector(org.apache.hyracks.api.comm.IPartitionCollector) PartitionCollector(org.apache.hyracks.dataflow.std.collectors.PartitionCollector) NonDeterministicFrameReader(org.apache.hyracks.dataflow.std.collectors.NonDeterministicFrameReader) BitSet(java.util.BitSet) NonDeterministicChannelReader(org.apache.hyracks.dataflow.std.collectors.NonDeterministicChannelReader)

Example 3 with IPartitionCollector

use of org.apache.hyracks.api.comm.IPartitionCollector in project asterixdb by apache.

the class StartTasksWork method run.

@Override
public void run() {
    Task task = null;
    try {
        NCServiceContext serviceCtx = ncs.getContext();
        Joblet joblet = getOrCreateLocalJoblet(deploymentId, jobId, serviceCtx, acgBytes);
        final ActivityClusterGraph acg = joblet.getActivityClusterGraph();
        IRecordDescriptorProvider rdp = new IRecordDescriptorProvider() {

            @Override
            public RecordDescriptor getOutputRecordDescriptor(ActivityId aid, int outputIndex) {
                ActivityCluster ac = acg.getActivityMap().get(aid);
                IConnectorDescriptor conn = ac.getActivityOutputMap().get(aid).get(outputIndex);
                return ac.getConnectorRecordDescriptorMap().get(conn.getConnectorId());
            }

            @Override
            public RecordDescriptor getInputRecordDescriptor(ActivityId aid, int inputIndex) {
                ActivityCluster ac = acg.getActivityMap().get(aid);
                IConnectorDescriptor conn = ac.getActivityInputMap().get(aid).get(inputIndex);
                return ac.getConnectorRecordDescriptorMap().get(conn.getConnectorId());
            }
        };
        for (TaskAttemptDescriptor td : taskDescriptors) {
            TaskAttemptId taId = td.getTaskAttemptId();
            TaskId tid = taId.getTaskId();
            ActivityId aid = tid.getActivityId();
            ActivityCluster ac = acg.getActivityMap().get(aid);
            IActivity han = ac.getActivityMap().get(aid);
            if (LOGGER.isLoggable(Level.INFO)) {
                LOGGER.info("Initializing " + taId + " -> " + han);
            }
            final int partition = tid.getPartition();
            List<IConnectorDescriptor> inputs = ac.getActivityInputMap().get(aid);
            task = new Task(joblet, taId, han.getClass().getName(), ncs.getExecutor(), ncs, createInputChannels(td, inputs));
            IOperatorNodePushable operator = han.createPushRuntime(task, rdp, partition, td.getPartitionCount());
            List<IPartitionCollector> collectors = new ArrayList<>();
            if (inputs != null) {
                for (int i = 0; i < inputs.size(); ++i) {
                    IConnectorDescriptor conn = inputs.get(i);
                    IConnectorPolicy cPolicy = connectorPoliciesMap.get(conn.getConnectorId());
                    if (LOGGER.isLoggable(Level.INFO)) {
                        LOGGER.info("input: " + i + ": " + conn.getConnectorId());
                    }
                    RecordDescriptor recordDesc = ac.getConnectorRecordDescriptorMap().get(conn.getConnectorId());
                    IPartitionCollector collector = createPartitionCollector(td, partition, task, i, conn, recordDesc, cPolicy);
                    collectors.add(collector);
                }
            }
            List<IConnectorDescriptor> outputs = ac.getActivityOutputMap().get(aid);
            if (outputs != null) {
                for (int i = 0; i < outputs.size(); ++i) {
                    final IConnectorDescriptor conn = outputs.get(i);
                    RecordDescriptor recordDesc = ac.getConnectorRecordDescriptorMap().get(conn.getConnectorId());
                    IConnectorPolicy cPolicy = connectorPoliciesMap.get(conn.getConnectorId());
                    IPartitionWriterFactory pwFactory = createPartitionWriterFactory(task, cPolicy, jobId, conn, partition, taId, flags);
                    if (LOGGER.isLoggable(Level.INFO)) {
                        LOGGER.info("output: " + i + ": " + conn.getConnectorId());
                    }
                    IFrameWriter writer = conn.createPartitioner(task, recordDesc, pwFactory, partition, td.getPartitionCount(), td.getOutputPartitionCounts()[i]);
                    operator.setOutputFrameWriter(i, writer, recordDesc);
                }
            }
            task.setTaskRuntime(collectors.toArray(new IPartitionCollector[collectors.size()]), operator);
            joblet.addTask(task);
            task.start();
        }
    } catch (Exception e) {
        LOGGER.log(Level.WARNING, "Failure starting a task", e);
        // notify cc of start task failure
        List<Exception> exceptions = new ArrayList<>();
        ExceptionUtils.setNodeIds(exceptions, ncs.getId());
        ncs.getWorkQueue().schedule(new NotifyTaskFailureWork(ncs, task, exceptions));
    }
}
Also used : IFrameWriter(org.apache.hyracks.api.comm.IFrameWriter) Task(org.apache.hyracks.control.nc.Task) TaskId(org.apache.hyracks.api.dataflow.TaskId) RecordDescriptor(org.apache.hyracks.api.dataflow.value.RecordDescriptor) ActivityId(org.apache.hyracks.api.dataflow.ActivityId) ArrayList(java.util.ArrayList) IRecordDescriptorProvider(org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider) Joblet(org.apache.hyracks.control.nc.Joblet) IActivity(org.apache.hyracks.api.dataflow.IActivity) NCServiceContext(org.apache.hyracks.control.nc.application.NCServiceContext) INCServiceContext(org.apache.hyracks.api.application.INCServiceContext) List(java.util.List) ArrayList(java.util.ArrayList) IConnectorDescriptor(org.apache.hyracks.api.dataflow.IConnectorDescriptor) IPartitionCollector(org.apache.hyracks.api.comm.IPartitionCollector) TaskAttemptId(org.apache.hyracks.api.dataflow.TaskAttemptId) IConnectorPolicy(org.apache.hyracks.api.dataflow.connectors.IConnectorPolicy) IPartitionWriterFactory(org.apache.hyracks.api.comm.IPartitionWriterFactory) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) UnknownHostException(java.net.UnknownHostException) HyracksException(org.apache.hyracks.api.exceptions.HyracksException) ActivityCluster(org.apache.hyracks.api.job.ActivityCluster) TaskAttemptDescriptor(org.apache.hyracks.control.common.job.TaskAttemptDescriptor) ActivityClusterGraph(org.apache.hyracks.api.job.ActivityClusterGraph) IOperatorNodePushable(org.apache.hyracks.api.dataflow.IOperatorNodePushable)

Example 4 with IPartitionCollector

use of org.apache.hyracks.api.comm.IPartitionCollector in project asterixdb by apache.

the class Task method run.

@Override
public void run() {
    Thread ct = Thread.currentThread();
    String threadName = ct.getName();
    // the thread is not escaped from interruption.
    if (!addPendingThread(ct)) {
        exceptions.add(new InterruptedException("Task " + getTaskAttemptId() + " was aborted!"));
        ExceptionUtils.setNodeIds(exceptions, ncs.getId());
        ncs.getWorkQueue().schedule(new NotifyTaskFailureWork(ncs, this, exceptions));
        return;
    }
    ct.setName(displayName + ":" + taskAttemptId + ":" + 0);
    try {
        try {
            operator.initialize();
            if (collectors.length > 0) {
                final Semaphore sem = new Semaphore(collectors.length - 1);
                for (int i = 1; i < collectors.length; ++i) {
                    final IPartitionCollector collector = collectors[i];
                    final IFrameWriter writer = operator.getInputFrameWriter(i);
                    sem.acquire();
                    final int cIdx = i;
                    executorService.execute(new Runnable() {

                        @Override
                        public void run() {
                            Thread thread = Thread.currentThread();
                            // the thread is not escaped from interruption.
                            if (!addPendingThread(thread)) {
                                return;
                            }
                            String oldName = thread.getName();
                            thread.setName(displayName + ":" + taskAttemptId + ":" + cIdx);
                            thread.setPriority(Thread.MIN_PRIORITY);
                            try {
                                pushFrames(collector, inputChannelsFromConnectors.get(cIdx), writer);
                            } catch (HyracksDataException e) {
                                synchronized (Task.this) {
                                    exceptions.add(e);
                                }
                            } finally {
                                thread.setName(oldName);
                                sem.release();
                                removePendingThread(thread);
                            }
                        }
                    });
                }
                try {
                    pushFrames(collectors[0], inputChannelsFromConnectors.get(0), operator.getInputFrameWriter(0));
                } finally {
                    sem.acquire(collectors.length - 1);
                }
            }
        } finally {
            operator.deinitialize();
        }
        NodeControllerService ncs = joblet.getNodeController();
        ncs.getWorkQueue().schedule(new NotifyTaskCompleteWork(ncs, this));
    } catch (Exception e) {
        exceptions.add(e);
    } finally {
        ct.setName(threadName);
        close();
        removePendingThread(ct);
    }
    if (!exceptions.isEmpty()) {
        for (Exception e : exceptions) {
            e.printStackTrace();
        }
        NodeControllerService ncs = joblet.getNodeController();
        ExceptionUtils.setNodeIds(exceptions, ncs.getId());
        ncs.getWorkQueue().schedule(new NotifyTaskFailureWork(ncs, this, exceptions));
    }
}
Also used : IPartitionCollector(org.apache.hyracks.api.comm.IPartitionCollector) IFrameWriter(org.apache.hyracks.api.comm.IFrameWriter) Semaphore(java.util.concurrent.Semaphore) NotifyTaskCompleteWork(org.apache.hyracks.control.nc.work.NotifyTaskCompleteWork) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) HyracksException(org.apache.hyracks.api.exceptions.HyracksException) NotifyTaskFailureWork(org.apache.hyracks.control.nc.work.NotifyTaskFailureWork)

Example 5 with IPartitionCollector

use of org.apache.hyracks.api.comm.IPartitionCollector in project asterixdb by apache.

the class LocalityAwareMToNPartitioningConnectorDescriptor method createPartitionCollector.

/*
     * (non-Javadoc)
     *
     * @see org.apache.hyracks.api.dataflow.IConnectorDescriptor#
     * createPartitionCollector
     * (org.apache.hyracks.api.context.IHyracksTaskContext,
     * org.apache.hyracks.api.dataflow.value.RecordDescriptor, int, int, int)
     */
@Override
public IPartitionCollector createPartitionCollector(IHyracksTaskContext ctx, RecordDescriptor recordDesc, int receiverIndex, int nProducerPartitions, int nConsumerPartitions) throws HyracksDataException {
    BitSet expectedPartitions = new BitSet(nProducerPartitions);
    for (int i = 0; i < nProducerPartitions; i++) {
        if (localityMap.isConnected(i, receiverIndex, nConsumerPartitions)) {
            expectedPartitions.set(i);
        }
    }
    NonDeterministicChannelReader channelReader = new NonDeterministicChannelReader(nProducerPartitions, expectedPartitions);
    NonDeterministicFrameReader frameReader = new NonDeterministicFrameReader(channelReader);
    return new PartitionCollector(ctx, getConnectorId(), receiverIndex, expectedPartitions, frameReader, channelReader);
}
Also used : IPartitionCollector(org.apache.hyracks.api.comm.IPartitionCollector) PartitionCollector(org.apache.hyracks.dataflow.std.collectors.PartitionCollector) NonDeterministicFrameReader(org.apache.hyracks.dataflow.std.collectors.NonDeterministicFrameReader) BitSet(java.util.BitSet) NonDeterministicChannelReader(org.apache.hyracks.dataflow.std.collectors.NonDeterministicChannelReader)

Aggregations

IPartitionCollector (org.apache.hyracks.api.comm.IPartitionCollector)7 BitSet (java.util.BitSet)5 PartitionCollector (org.apache.hyracks.dataflow.std.collectors.PartitionCollector)5 NonDeterministicChannelReader (org.apache.hyracks.dataflow.std.collectors.NonDeterministicChannelReader)4 NonDeterministicFrameReader (org.apache.hyracks.dataflow.std.collectors.NonDeterministicFrameReader)4 IFrameWriter (org.apache.hyracks.api.comm.IFrameWriter)2 HyracksDataException (org.apache.hyracks.api.exceptions.HyracksDataException)2 HyracksException (org.apache.hyracks.api.exceptions.HyracksException)2 UnknownHostException (java.net.UnknownHostException)1 ArrayList (java.util.ArrayList)1 List (java.util.List)1 Semaphore (java.util.concurrent.Semaphore)1 INCServiceContext (org.apache.hyracks.api.application.INCServiceContext)1 IFrameReader (org.apache.hyracks.api.comm.IFrameReader)1 IPartitionWriterFactory (org.apache.hyracks.api.comm.IPartitionWriterFactory)1 ActivityId (org.apache.hyracks.api.dataflow.ActivityId)1 IActivity (org.apache.hyracks.api.dataflow.IActivity)1 IConnectorDescriptor (org.apache.hyracks.api.dataflow.IConnectorDescriptor)1 IOperatorNodePushable (org.apache.hyracks.api.dataflow.IOperatorNodePushable)1 TaskAttemptId (org.apache.hyracks.api.dataflow.TaskAttemptId)1