Search in sources :

Example 41 with Processor

use of com.hazelcast.jet.core.Processor in project hazelcast-jet by hazelcast.

the class ExecutionPlan method initialize.

public void initialize(NodeEngine nodeEngine, long jobId, long executionId, SnapshotContext snapshotContext) {
    this.nodeEngine = nodeEngine;
    this.executionId = executionId;
    initProcSuppliers();
    initDag();
    this.ptionArrgmt = new PartitionArrangement(partitionOwners, nodeEngine.getThisAddress());
    JetInstance instance = getJetInstance(nodeEngine);
    for (VertexDef vertex : vertices) {
        Collection<? extends Processor> processors = createProcessors(vertex, vertex.localParallelism());
        // create StoreSnapshotTasklet and the queues to it
        QueuedPipe<Object>[] snapshotQueues = new QueuedPipe[vertex.localParallelism()];
        Arrays.setAll(snapshotQueues, i -> new OneToOneConcurrentArrayQueue<>(SNAPSHOT_QUEUE_SIZE));
        ConcurrentConveyor<Object> ssConveyor = ConcurrentConveyor.concurrentConveyor(null, snapshotQueues);
        StoreSnapshotTasklet ssTasklet = new StoreSnapshotTasklet(snapshotContext, jobId, new ConcurrentInboundEdgeStream(ssConveyor, 0, 0, lastSnapshotId, true, -1, "ssFrom:" + vertex.name()), nodeEngine, vertex.name(), vertex.isHigherPriorityUpstream());
        tasklets.add(ssTasklet);
        int localProcessorIdx = 0;
        for (Processor p : processors) {
            int globalProcessorIndex = vertex.getProcIdxOffset() + localProcessorIdx;
            String loggerName = createLoggerName(p.getClass().getName(), vertex.name(), globalProcessorIndex);
            ProcCtx context = new ProcCtx(instance, nodeEngine.getSerializationService(), nodeEngine.getLogger(loggerName), vertex.name(), globalProcessorIndex, jobConfig.getProcessingGuarantee(), vertex.localParallelism(), vertex.totalParallelism());
            String probePrefix = String.format("jet.job.%s.%s#%d", idToString(executionId), vertex.name(), localProcessorIdx);
            ((NodeEngineImpl) nodeEngine).getMetricsRegistry().scanAndRegister(p, probePrefix);
            // createOutboundEdgeStreams() populates localConveyorMap and edgeSenderConveyorMap.
            // Also populates instance fields: senderMap, receiverMap, tasklets.
            List<OutboundEdgeStream> outboundStreams = createOutboundEdgeStreams(vertex, localProcessorIdx);
            List<InboundEdgeStream> inboundStreams = createInboundEdgeStreams(vertex, localProcessorIdx);
            OutboundCollector snapshotCollector = new ConveyorCollector(ssConveyor, localProcessorIdx, null);
            ProcessorTasklet processorTasklet = new ProcessorTasklet(context, p, inboundStreams, outboundStreams, snapshotContext, snapshotCollector, jobConfig.getMaxWatermarkRetainMillis());
            tasklets.add(processorTasklet);
            this.processors.add(p);
            localProcessorIdx++;
        }
    }
    List<ReceiverTasklet> allReceivers = receiverMap.values().stream().flatMap(o -> o.values().stream()).flatMap(a -> a.values().stream()).collect(toList());
    tasklets.addAll(allReceivers);
}
Also used : Arrays(java.util.Arrays) SnapshotContext(com.hazelcast.jet.impl.execution.SnapshotContext) ConcurrentConveyor.concurrentConveyor(com.hazelcast.internal.util.concurrent.ConcurrentConveyor.concurrentConveyor) Processor(com.hazelcast.jet.core.Processor) Address(com.hazelcast.nio.Address) OutboundCollector.compositeCollector(com.hazelcast.jet.impl.execution.OutboundCollector.compositeCollector) Util.writeList(com.hazelcast.jet.impl.util.Util.writeList) Util.idToString(com.hazelcast.jet.impl.util.Util.idToString) ProcessorTasklet(com.hazelcast.jet.impl.execution.ProcessorTasklet) Collectors.toMap(java.util.stream.Collectors.toMap) ConcurrentConveyor(com.hazelcast.internal.util.concurrent.ConcurrentConveyor) Map(java.util.Map) Util.memoize(com.hazelcast.jet.impl.util.Util.memoize) ObjectDataInput(com.hazelcast.nio.ObjectDataInput) InboundEdgeStream(com.hazelcast.jet.impl.execution.InboundEdgeStream) Collection(java.util.Collection) Util.getJetInstance(com.hazelcast.jet.impl.util.Util.getJetInstance) JobConfig(com.hazelcast.jet.config.JobConfig) Set(java.util.Set) ConcurrentInboundEdgeStream(com.hazelcast.jet.impl.execution.ConcurrentInboundEdgeStream) Collectors(java.util.stream.Collectors) Objects(java.util.Objects) Util.readList(com.hazelcast.jet.impl.util.Util.readList) List(java.util.List) DEFAULT_QUEUE_SIZE(com.hazelcast.jet.config.EdgeConfig.DEFAULT_QUEUE_SIZE) StoreSnapshotTasklet(com.hazelcast.jet.impl.execution.StoreSnapshotTasklet) ObjectDataOutput(com.hazelcast.nio.ObjectDataOutput) IntStream(java.util.stream.IntStream) IdentifiedDataSerializable(com.hazelcast.nio.serialization.IdentifiedDataSerializable) JetInstance(com.hazelcast.jet.JetInstance) OutboundEdgeStream(com.hazelcast.jet.impl.execution.OutboundEdgeStream) RoutingPolicy(com.hazelcast.jet.core.Edge.RoutingPolicy) SenderTasklet(com.hazelcast.jet.impl.execution.SenderTasklet) HashMap(java.util.HashMap) Supplier(java.util.function.Supplier) ProcSupplierCtx(com.hazelcast.jet.impl.execution.init.Contexts.ProcSupplierCtx) ArrayList(java.util.ArrayList) JetException(com.hazelcast.jet.JetException) ConveyorCollector(com.hazelcast.jet.impl.execution.ConveyorCollector) ReceiverTasklet(com.hazelcast.jet.impl.execution.ReceiverTasklet) ILogger(com.hazelcast.logging.ILogger) ProcessorSupplier(com.hazelcast.jet.core.ProcessorSupplier) QueuedPipe(com.hazelcast.internal.util.concurrent.QueuedPipe) JetConfig(com.hazelcast.jet.config.JetConfig) NodeEngineImpl(com.hazelcast.spi.impl.NodeEngineImpl) OneToOneConcurrentArrayQueue(com.hazelcast.internal.util.concurrent.OneToOneConcurrentArrayQueue) Tasklet(com.hazelcast.jet.impl.execution.Tasklet) ProcCtx(com.hazelcast.jet.impl.execution.init.Contexts.ProcCtx) IPartitionService(com.hazelcast.spi.partition.IPartitionService) IOException(java.io.IOException) NodeEngine(com.hazelcast.spi.NodeEngine) ConveyorCollectorWithPartition(com.hazelcast.jet.impl.execution.ConveyorCollectorWithPartition) Collectors.toList(java.util.stream.Collectors.toList) OutboundCollector(com.hazelcast.jet.impl.execution.OutboundCollector) JetService(com.hazelcast.jet.impl.JetService) ProcessingGuarantee(com.hazelcast.jet.config.ProcessingGuarantee) Processor(com.hazelcast.jet.core.Processor) OutboundCollector(com.hazelcast.jet.impl.execution.OutboundCollector) Util.idToString(com.hazelcast.jet.impl.util.Util.idToString) ConcurrentInboundEdgeStream(com.hazelcast.jet.impl.execution.ConcurrentInboundEdgeStream) ConveyorCollector(com.hazelcast.jet.impl.execution.ConveyorCollector) ProcCtx(com.hazelcast.jet.impl.execution.init.Contexts.ProcCtx) InboundEdgeStream(com.hazelcast.jet.impl.execution.InboundEdgeStream) ConcurrentInboundEdgeStream(com.hazelcast.jet.impl.execution.ConcurrentInboundEdgeStream) StoreSnapshotTasklet(com.hazelcast.jet.impl.execution.StoreSnapshotTasklet) ProcessorTasklet(com.hazelcast.jet.impl.execution.ProcessorTasklet) Util.getJetInstance(com.hazelcast.jet.impl.util.Util.getJetInstance) JetInstance(com.hazelcast.jet.JetInstance) OutboundEdgeStream(com.hazelcast.jet.impl.execution.OutboundEdgeStream) ReceiverTasklet(com.hazelcast.jet.impl.execution.ReceiverTasklet) QueuedPipe(com.hazelcast.internal.util.concurrent.QueuedPipe)

Example 42 with Processor

use of com.hazelcast.jet.core.Processor in project hazelcast-jet by hazelcast.

the class Reducers method buildCombiner.

private static <T> Vertex buildCombiner(DAG dag, Vertex accumulate, BinaryOperator<T> combiner) {
    DistributedSupplier<Processor> supplier = () -> new CombineP<>(combiner);
    Vertex combine = dag.newVertex("combine", supplier).localParallelism(1);
    dag.edge(between(accumulate, combine).distributed().allToOne());
    return combine;
}
Also used : Vertex(com.hazelcast.jet.core.Vertex) Processor(com.hazelcast.jet.core.Processor) CombineP(com.hazelcast.jet.stream.impl.processor.CombineP)

Example 43 with Processor

use of com.hazelcast.jet.core.Processor in project hazelcast-jet by hazelcast.

the class CollectorReducer method buildCombiner.

static <A, R> Vertex buildCombiner(DAG dag, Vertex accumulatorVertex, Object combiner) {
    DistributedSupplier<Processor> processorSupplier = getCombinerSupplier(combiner);
    Vertex combinerVertex = dag.newVertex("combiner", processorSupplier).localParallelism(1);
    dag.edge(between(accumulatorVertex, combinerVertex).distributed().allToOne());
    return combinerVertex;
}
Also used : Vertex(com.hazelcast.jet.core.Vertex) Processor(com.hazelcast.jet.core.Processor)

Example 44 with Processor

use of com.hazelcast.jet.core.Processor in project hazelcast-jet by hazelcast.

the class StreamEventJournalPTest method when_lostItems_afterRestore.

@Test
public void when_lostItems_afterRestore() {
    TestOutbox outbox = new TestOutbox(new int[] { 16 }, 16);
    final Processor p = supplier.get();
    p.init(outbox, new TestProcessorContext());
    List<Object> output = new ArrayList<>();
    assertTrueEventually(() -> {
        assertFalse("Processor should never complete", p.complete());
        outbox.drainQueueAndReset(0, output, true);
        assertTrue("consumed different number of items than expected", output.size() == 0);
    }, 3);
    assertTrueEventually(() -> {
        assertTrue("Processor did not finish snapshot", p.saveToSnapshot());
    }, 3);
    // overflow journal
    fillJournal(CAPACITY_PER_PARTITION + 1);
    List<Entry> snapshotItems = new ArrayList<>();
    outbox.drainSnapshotQueueAndReset(snapshotItems, false);
    System.out.println("Restoring journal");
    // restore from snapshot
    assertRestore(snapshotItems);
}
Also used : Entry(java.util.Map.Entry) Processor(com.hazelcast.jet.core.Processor) TestOutbox(com.hazelcast.jet.core.test.TestOutbox) ArrayList(java.util.ArrayList) TestProcessorContext(com.hazelcast.jet.core.test.TestProcessorContext) Test(org.junit.Test)

Example 45 with Processor

use of com.hazelcast.jet.core.Processor in project hazelcast-jet by hazelcast.

the class StreamEventJournalPTest method when_lostItems.

@Test
public void when_lostItems() {
    TestOutbox outbox = new TestOutbox(new int[] { 16 }, 16);
    Processor p = supplier.get();
    p.init(outbox, new TestProcessorContext());
    // overflow the journal
    fillJournal(CAPACITY_PER_PARTITION + 1);
    // fill and consume
    List<Object> actual = new ArrayList<>();
    assertTrueEventually(() -> {
        assertFalse("Processor should never complete", p.complete());
        outbox.drainQueueAndReset(0, actual, true);
        assertTrue("consumed different number of items than expected", actual.size() == JOURNAL_CAPACITY);
    }, 3);
}
Also used : Processor(com.hazelcast.jet.core.Processor) TestOutbox(com.hazelcast.jet.core.test.TestOutbox) ArrayList(java.util.ArrayList) TestProcessorContext(com.hazelcast.jet.core.test.TestProcessorContext) Test(org.junit.Test)

Aggregations

Processor (com.hazelcast.jet.core.Processor)49 Test (org.junit.Test)24 ArrayList (java.util.ArrayList)22 TestProcessorContext (com.hazelcast.jet.core.test.TestProcessorContext)17 QuickTest (com.hazelcast.test.annotation.QuickTest)17 ParallelJVMTest (com.hazelcast.test.annotation.ParallelJVMTest)16 TestOutbox (com.hazelcast.jet.core.test.TestOutbox)14 List (java.util.List)13 Nonnull (javax.annotation.Nonnull)13 TestInbox (com.hazelcast.jet.core.test.TestInbox)11 Watermark (com.hazelcast.jet.core.Watermark)10 Collection (java.util.Collection)9 Collections.singletonList (java.util.Collections.singletonList)9 Entry (java.util.Map.Entry)9 ProcessorSupplier (com.hazelcast.jet.core.ProcessorSupplier)8 FunctionEx (com.hazelcast.function.FunctionEx)7 Job (com.hazelcast.jet.Job)7 JobConfig (com.hazelcast.jet.config.JobConfig)7 DAG (com.hazelcast.jet.core.DAG)7 Arrays (java.util.Arrays)6