use of com.hazelcast.jet.core.Processor in project hazelcast-jet by hazelcast.
the class ExecutionPlan method initialize.
public void initialize(NodeEngine nodeEngine, long jobId, long executionId, SnapshotContext snapshotContext) {
this.nodeEngine = nodeEngine;
this.executionId = executionId;
initProcSuppliers();
initDag();
this.ptionArrgmt = new PartitionArrangement(partitionOwners, nodeEngine.getThisAddress());
JetInstance instance = getJetInstance(nodeEngine);
for (VertexDef vertex : vertices) {
Collection<? extends Processor> processors = createProcessors(vertex, vertex.localParallelism());
// create StoreSnapshotTasklet and the queues to it
QueuedPipe<Object>[] snapshotQueues = new QueuedPipe[vertex.localParallelism()];
Arrays.setAll(snapshotQueues, i -> new OneToOneConcurrentArrayQueue<>(SNAPSHOT_QUEUE_SIZE));
ConcurrentConveyor<Object> ssConveyor = ConcurrentConveyor.concurrentConveyor(null, snapshotQueues);
StoreSnapshotTasklet ssTasklet = new StoreSnapshotTasklet(snapshotContext, jobId, new ConcurrentInboundEdgeStream(ssConveyor, 0, 0, lastSnapshotId, true, -1, "ssFrom:" + vertex.name()), nodeEngine, vertex.name(), vertex.isHigherPriorityUpstream());
tasklets.add(ssTasklet);
int localProcessorIdx = 0;
for (Processor p : processors) {
int globalProcessorIndex = vertex.getProcIdxOffset() + localProcessorIdx;
String loggerName = createLoggerName(p.getClass().getName(), vertex.name(), globalProcessorIndex);
ProcCtx context = new ProcCtx(instance, nodeEngine.getSerializationService(), nodeEngine.getLogger(loggerName), vertex.name(), globalProcessorIndex, jobConfig.getProcessingGuarantee(), vertex.localParallelism(), vertex.totalParallelism());
String probePrefix = String.format("jet.job.%s.%s#%d", idToString(executionId), vertex.name(), localProcessorIdx);
((NodeEngineImpl) nodeEngine).getMetricsRegistry().scanAndRegister(p, probePrefix);
// createOutboundEdgeStreams() populates localConveyorMap and edgeSenderConveyorMap.
// Also populates instance fields: senderMap, receiverMap, tasklets.
List<OutboundEdgeStream> outboundStreams = createOutboundEdgeStreams(vertex, localProcessorIdx);
List<InboundEdgeStream> inboundStreams = createInboundEdgeStreams(vertex, localProcessorIdx);
OutboundCollector snapshotCollector = new ConveyorCollector(ssConveyor, localProcessorIdx, null);
ProcessorTasklet processorTasklet = new ProcessorTasklet(context, p, inboundStreams, outboundStreams, snapshotContext, snapshotCollector, jobConfig.getMaxWatermarkRetainMillis());
tasklets.add(processorTasklet);
this.processors.add(p);
localProcessorIdx++;
}
}
List<ReceiverTasklet> allReceivers = receiverMap.values().stream().flatMap(o -> o.values().stream()).flatMap(a -> a.values().stream()).collect(toList());
tasklets.addAll(allReceivers);
}
use of com.hazelcast.jet.core.Processor in project hazelcast-jet by hazelcast.
the class Reducers method buildCombiner.
private static <T> Vertex buildCombiner(DAG dag, Vertex accumulate, BinaryOperator<T> combiner) {
DistributedSupplier<Processor> supplier = () -> new CombineP<>(combiner);
Vertex combine = dag.newVertex("combine", supplier).localParallelism(1);
dag.edge(between(accumulate, combine).distributed().allToOne());
return combine;
}
use of com.hazelcast.jet.core.Processor in project hazelcast-jet by hazelcast.
the class CollectorReducer method buildCombiner.
static <A, R> Vertex buildCombiner(DAG dag, Vertex accumulatorVertex, Object combiner) {
DistributedSupplier<Processor> processorSupplier = getCombinerSupplier(combiner);
Vertex combinerVertex = dag.newVertex("combiner", processorSupplier).localParallelism(1);
dag.edge(between(accumulatorVertex, combinerVertex).distributed().allToOne());
return combinerVertex;
}
use of com.hazelcast.jet.core.Processor in project hazelcast-jet by hazelcast.
the class StreamEventJournalPTest method when_lostItems_afterRestore.
@Test
public void when_lostItems_afterRestore() {
TestOutbox outbox = new TestOutbox(new int[] { 16 }, 16);
final Processor p = supplier.get();
p.init(outbox, new TestProcessorContext());
List<Object> output = new ArrayList<>();
assertTrueEventually(() -> {
assertFalse("Processor should never complete", p.complete());
outbox.drainQueueAndReset(0, output, true);
assertTrue("consumed different number of items than expected", output.size() == 0);
}, 3);
assertTrueEventually(() -> {
assertTrue("Processor did not finish snapshot", p.saveToSnapshot());
}, 3);
// overflow journal
fillJournal(CAPACITY_PER_PARTITION + 1);
List<Entry> snapshotItems = new ArrayList<>();
outbox.drainSnapshotQueueAndReset(snapshotItems, false);
System.out.println("Restoring journal");
// restore from snapshot
assertRestore(snapshotItems);
}
use of com.hazelcast.jet.core.Processor in project hazelcast-jet by hazelcast.
the class StreamEventJournalPTest method when_lostItems.
@Test
public void when_lostItems() {
TestOutbox outbox = new TestOutbox(new int[] { 16 }, 16);
Processor p = supplier.get();
p.init(outbox, new TestProcessorContext());
// overflow the journal
fillJournal(CAPACITY_PER_PARTITION + 1);
// fill and consume
List<Object> actual = new ArrayList<>();
assertTrueEventually(() -> {
assertFalse("Processor should never complete", p.complete());
outbox.drainQueueAndReset(0, actual, true);
assertTrue("consumed different number of items than expected", actual.size() == JOURNAL_CAPACITY);
}, 3);
}
Aggregations