Search in sources :

Example 11 with Processor

use of com.hazelcast.jet.core.Processor in project hazelcast by hazelcast.

the class UpdateProcessorSupplier method get.

@Nonnull
@Override
public Collection<? extends Processor> get(int count) {
    List<Processor> processors = new ArrayList<>(count);
    for (int i = 0; i < count; i++) {
        String mapName = this.mapName;
        Processor processor = new AsyncTransformUsingServiceBatchedP<>(ServiceFactories.nonSharedService(SecuredFunctions.iMapFn(mapName)), null, MAX_CONCURRENT_OPS, MAX_BATCH_SIZE, (IMap<Object, Object> map, List<JetSqlRow> rows) -> update(rows, map));
        processors.add(processor);
    }
    return processors;
}
Also used : IMap(com.hazelcast.map.IMap) Processor(com.hazelcast.jet.core.Processor) ArrayList(java.util.ArrayList) ArrayList(java.util.ArrayList) Collections.singletonList(java.util.Collections.singletonList) List(java.util.List) AsyncTransformUsingServiceBatchedP(com.hazelcast.jet.impl.processor.AsyncTransformUsingServiceBatchedP) Nonnull(javax.annotation.Nonnull)

Example 12 with Processor

use of com.hazelcast.jet.core.Processor in project hazelcast by hazelcast.

the class ExecutionPlan method initialize.

/**
 * A method called on the members as part of the InitExecutionOperation.
 * Creates tasklets, inboxes/outboxes and connects these to make them ready
 * for a later StartExecutionOperation.
 */
public void initialize(NodeEngineImpl nodeEngine, long jobId, long executionId, @Nonnull SnapshotContext snapshotContext, ConcurrentHashMap<String, File> tempDirectories, InternalSerializationService jobSerializationService) {
    this.nodeEngine = nodeEngine;
    this.jobClassLoaderService = ((JetServiceBackend) nodeEngine.getService(JetServiceBackend.SERVICE_NAME)).getJobClassLoaderService();
    this.executionId = executionId;
    initProcSuppliers(jobId, tempDirectories, jobSerializationService);
    initDag(jobSerializationService);
    this.ptionArrgmt = new PartitionArrangement(partitionAssignment, nodeEngine.getThisAddress());
    Set<Integer> higherPriorityVertices = VertexDef.getHigherPriorityVertices(vertices);
    for (Address destAddr : remoteMembers.get()) {
        Connection conn = getMemberConnection(nodeEngine, destAddr);
        if (conn == null) {
            throw new TopologyChangedException("no connection to job participant: " + destAddr);
        }
        memberConnections.put(destAddr, conn);
    }
    for (VertexDef vertex : vertices) {
        ClassLoader processorClassLoader = isLightJob ? null : jobClassLoaderService.getProcessorClassLoader(jobId, vertex.name());
        Collection<? extends Processor> processors = doWithClassLoader(processorClassLoader, () -> createProcessors(vertex, vertex.localParallelism()));
        String jobPrefix = prefix(jobConfig.getName(), jobId, vertex.name());
        // create StoreSnapshotTasklet and the queues to it
        ConcurrentConveyor<Object> ssConveyor = null;
        if (!isLightJob) {
            // Note that we create the snapshot queues for all non-light jobs, even if they don't have
            // processing guarantee enabled, because in EE one can request a snapshot also for
            // non-snapshotted jobs.
            @SuppressWarnings("unchecked") QueuedPipe<Object>[] snapshotQueues = new QueuedPipe[vertex.localParallelism()];
            Arrays.setAll(snapshotQueues, i -> new OneToOneConcurrentArrayQueue<>(SNAPSHOT_QUEUE_SIZE));
            ssConveyor = ConcurrentConveyor.concurrentConveyor(null, snapshotQueues);
            ILogger storeSnapshotLogger = prefixedLogger(nodeEngine.getLogger(StoreSnapshotTasklet.class), jobPrefix);
            StoreSnapshotTasklet ssTasklet = new StoreSnapshotTasklet(snapshotContext, ConcurrentInboundEdgeStream.create(ssConveyor, 0, 0, true, jobPrefix + "/ssFrom", null), new AsyncSnapshotWriterImpl(nodeEngine, snapshotContext, vertex.name(), memberIndex, memberCount, jobSerializationService), storeSnapshotLogger, vertex.name(), higherPriorityVertices.contains(vertex.vertexId()));
            tasklets.add(ssTasklet);
        }
        int localProcessorIdx = 0;
        for (Processor processor : processors) {
            int globalProcessorIndex = memberIndex * vertex.localParallelism() + localProcessorIdx;
            String processorPrefix = prefix(jobConfig.getName(), jobId, vertex.name(), globalProcessorIndex);
            ILogger logger = prefixedLogger(nodeEngine.getLogger(processor.getClass()), processorPrefix);
            ProcCtx context = new ProcCtx(nodeEngine, jobId, executionId, getJobConfig(), logger, vertex.name(), localProcessorIdx, globalProcessorIndex, isLightJob, partitionAssignment, vertex.localParallelism(), memberIndex, memberCount, tempDirectories, jobSerializationService, subject, processorClassLoader);
            // createOutboundEdgeStreams() populates localConveyorMap and edgeSenderConveyorMap.
            // Also populates instance fields: senderMap, receiverMap, tasklets.
            List<OutboundEdgeStream> outboundStreams = createOutboundEdgeStreams(vertex, localProcessorIdx, jobPrefix, jobSerializationService);
            List<InboundEdgeStream> inboundStreams = createInboundEdgeStreams(vertex, localProcessorIdx, jobPrefix, globalProcessorIndex);
            OutboundCollector snapshotCollector = ssConveyor == null ? null : new ConveyorCollector(ssConveyor, localProcessorIdx, null);
            // vertices which are only used for snapshot restore will not be marked as "source=true" in metrics
            // also do not consider snapshot restore edges for determining source tag
            boolean isSource = vertex.inboundEdges().stream().allMatch(EdgeDef::isSnapshotRestoreEdge) && !vertex.isSnapshotVertex();
            ProcessorTasklet processorTasklet = new ProcessorTasklet(context, nodeEngine.getExecutionService().getExecutor(TASKLET_INIT_CLOSE_EXECUTOR_NAME), jobSerializationService, processor, inboundStreams, outboundStreams, snapshotContext, snapshotCollector, isSource);
            tasklets.add(processorTasklet);
            this.processors.add(processor);
            localProcessorIdx++;
        }
    }
    List<ReceiverTasklet> allReceivers = receiverMap.values().stream().flatMap(o -> o.values().stream()).flatMap(a -> a.values().stream()).collect(toList());
    tasklets.addAll(allReceivers);
}
Also used : Address(com.hazelcast.cluster.Address) ImdgUtil.getMemberConnection(com.hazelcast.jet.impl.util.ImdgUtil.getMemberConnection) Arrays(java.util.Arrays) SnapshotContext(com.hazelcast.jet.impl.execution.SnapshotContext) Collections.unmodifiableList(java.util.Collections.unmodifiableList) ConcurrentConveyor.concurrentConveyor(com.hazelcast.internal.util.concurrent.ConcurrentConveyor.concurrentConveyor) Processor(com.hazelcast.jet.core.Processor) OutboundCollector.compositeCollector(com.hazelcast.jet.impl.execution.OutboundCollector.compositeCollector) ObjectWithPartitionId(com.hazelcast.jet.impl.util.ObjectWithPartitionId) ProcessorTasklet(com.hazelcast.jet.impl.execution.ProcessorTasklet) ImdgUtil(com.hazelcast.jet.impl.util.ImdgUtil) Collectors.toMap(java.util.stream.Collectors.toMap) ConcurrentConveyor(com.hazelcast.internal.util.concurrent.ConcurrentConveyor) Map(java.util.Map) Util.memoize(com.hazelcast.jet.impl.util.Util.memoize) SerializationServiceAware(com.hazelcast.internal.serialization.SerializationServiceAware) DISTRIBUTE_TO_ALL(com.hazelcast.jet.core.Edge.DISTRIBUTE_TO_ALL) ObjectDataInput(com.hazelcast.nio.ObjectDataInput) TASKLET_INIT_CLOSE_EXECUTOR_NAME(com.hazelcast.jet.impl.execution.TaskletExecutionService.TASKLET_INIT_CLOSE_EXECUTOR_NAME) InboundEdgeStream(com.hazelcast.jet.impl.execution.InboundEdgeStream) PrefixedLogger.prefix(com.hazelcast.jet.impl.util.PrefixedLogger.prefix) Collection(java.util.Collection) IPartitionService(com.hazelcast.internal.partition.IPartitionService) JobConfig(com.hazelcast.jet.config.JobConfig) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Set(java.util.Set) ConcurrentInboundEdgeStream(com.hazelcast.jet.impl.execution.ConcurrentInboundEdgeStream) Collectors(java.util.stream.Collectors) Objects(java.util.Objects) List(java.util.List) Stream(java.util.stream.Stream) DEFAULT_QUEUE_SIZE(com.hazelcast.jet.config.EdgeConfig.DEFAULT_QUEUE_SIZE) StoreSnapshotTasklet(com.hazelcast.jet.impl.execution.StoreSnapshotTasklet) ObjectDataOutput(com.hazelcast.nio.ObjectDataOutput) TopologyChangedException(com.hazelcast.jet.core.TopologyChangedException) IntStream(java.util.stream.IntStream) ComparatorEx(com.hazelcast.function.ComparatorEx) IdentifiedDataSerializable(com.hazelcast.nio.serialization.IdentifiedDataSerializable) ImdgUtil.writeList(com.hazelcast.jet.impl.util.ImdgUtil.writeList) OutboundEdgeStream(com.hazelcast.jet.impl.execution.OutboundEdgeStream) RoutingPolicy(com.hazelcast.jet.core.Edge.RoutingPolicy) Util.doWithClassLoader(com.hazelcast.jet.impl.util.Util.doWithClassLoader) SenderTasklet(com.hazelcast.jet.impl.execution.SenderTasklet) HashMap(java.util.HashMap) Supplier(java.util.function.Supplier) ProcSupplierCtx(com.hazelcast.jet.impl.execution.init.Contexts.ProcSupplierCtx) ArrayList(java.util.ArrayList) PrefixedLogger.prefixedLogger(com.hazelcast.jet.impl.util.PrefixedLogger.prefixedLogger) JetException(com.hazelcast.jet.JetException) ConveyorCollector(com.hazelcast.jet.impl.execution.ConveyorCollector) ReceiverTasklet(com.hazelcast.jet.impl.execution.ReceiverTasklet) ILogger(com.hazelcast.logging.ILogger) InternalSerializationService(com.hazelcast.internal.serialization.InternalSerializationService) Nonnull(javax.annotation.Nonnull) ProcessorSupplier(com.hazelcast.jet.core.ProcessorSupplier) QueuedPipe(com.hazelcast.internal.util.concurrent.QueuedPipe) IntFunction(java.util.function.IntFunction) JetConfig(com.hazelcast.jet.config.JetConfig) NodeEngineImpl(com.hazelcast.spi.impl.NodeEngineImpl) OneToOneConcurrentArrayQueue(com.hazelcast.internal.util.concurrent.OneToOneConcurrentArrayQueue) Connection(com.hazelcast.internal.nio.Connection) Tasklet(com.hazelcast.jet.impl.execution.Tasklet) ProcCtx(com.hazelcast.jet.impl.execution.init.Contexts.ProcCtx) AsyncSnapshotWriterImpl(com.hazelcast.jet.impl.util.AsyncSnapshotWriterImpl) IOException(java.io.IOException) ConveyorCollectorWithPartition(com.hazelcast.jet.impl.execution.ConveyorCollectorWithPartition) Subject(javax.security.auth.Subject) File(java.io.File) ImdgUtil.readList(com.hazelcast.jet.impl.util.ImdgUtil.readList) Collectors.toList(java.util.stream.Collectors.toList) OutboundCollector(com.hazelcast.jet.impl.execution.OutboundCollector) JobClassLoaderService(com.hazelcast.jet.impl.JobClassLoaderService) ProcessingGuarantee(com.hazelcast.jet.config.ProcessingGuarantee) JetServiceBackend(com.hazelcast.jet.impl.JetServiceBackend) Processor(com.hazelcast.jet.core.Processor) Address(com.hazelcast.cluster.Address) AsyncSnapshotWriterImpl(com.hazelcast.jet.impl.util.AsyncSnapshotWriterImpl) OutboundCollector(com.hazelcast.jet.impl.execution.OutboundCollector) ConveyorCollector(com.hazelcast.jet.impl.execution.ConveyorCollector) ProcCtx(com.hazelcast.jet.impl.execution.init.Contexts.ProcCtx) InboundEdgeStream(com.hazelcast.jet.impl.execution.InboundEdgeStream) ConcurrentInboundEdgeStream(com.hazelcast.jet.impl.execution.ConcurrentInboundEdgeStream) Util.doWithClassLoader(com.hazelcast.jet.impl.util.Util.doWithClassLoader) ILogger(com.hazelcast.logging.ILogger) StoreSnapshotTasklet(com.hazelcast.jet.impl.execution.StoreSnapshotTasklet) ProcessorTasklet(com.hazelcast.jet.impl.execution.ProcessorTasklet) ImdgUtil.getMemberConnection(com.hazelcast.jet.impl.util.ImdgUtil.getMemberConnection) Connection(com.hazelcast.internal.nio.Connection) OutboundEdgeStream(com.hazelcast.jet.impl.execution.OutboundEdgeStream) ReceiverTasklet(com.hazelcast.jet.impl.execution.ReceiverTasklet) TopologyChangedException(com.hazelcast.jet.core.TopologyChangedException) QueuedPipe(com.hazelcast.internal.util.concurrent.QueuedPipe)

Example 13 with Processor

use of com.hazelcast.jet.core.Processor in project hazelcast by hazelcast.

the class WriteJdbcP method metaSupplier.

/**
 * Use {@link SinkProcessors#writeJdbcP}.
 */
public static <T> ProcessorMetaSupplier metaSupplier(@Nullable String jdbcUrl, @Nonnull String updateQuery, @Nonnull SupplierEx<? extends CommonDataSource> dataSourceSupplier, @Nonnull BiConsumerEx<? super PreparedStatement, ? super T> bindFn, boolean exactlyOnce, int batchLimit) {
    checkSerializable(dataSourceSupplier, "newConnectionFn");
    checkSerializable(bindFn, "bindFn");
    checkPositive(batchLimit, "batchLimit");
    return ProcessorMetaSupplier.preferLocalParallelismOne(ConnectorPermission.jdbc(jdbcUrl, ACTION_WRITE), new ProcessorSupplier() {

        private transient CommonDataSource dataSource;

        @Override
        public void init(@Nonnull Context context) {
            dataSource = dataSourceSupplier.get();
        }

        @Nonnull
        @Override
        public Collection<? extends Processor> get(int count) {
            return IntStream.range(0, count).mapToObj(i -> new WriteJdbcP<>(updateQuery, dataSource, bindFn, exactlyOnce, batchLimit)).collect(Collectors.toList());
        }

        @Override
        public List<Permission> permissions() {
            return singletonList(ConnectorPermission.jdbc(jdbcUrl, ACTION_WRITE));
        }
    });
}
Also used : Processor(com.hazelcast.jet.core.Processor) Nonnull(javax.annotation.Nonnull) Collection(java.util.Collection) Collections.singletonList(java.util.Collections.singletonList) List(java.util.List) ProcessorSupplier(com.hazelcast.jet.core.ProcessorSupplier) CommonDataSource(javax.sql.CommonDataSource)

Example 14 with Processor

use of com.hazelcast.jet.core.Processor in project hazelcast by hazelcast.

the class ProcessorTasklet method init.

@Override
public void init() {
    ManagedContext managedContext = serializationService.getManagedContext();
    if (managedContext != null) {
        Processor toInit = processor instanceof ProcessorWrapper ? ((ProcessorWrapper) processor).getWrapped() : processor;
        Object initialized = null;
        try {
            initialized = managedContext.initialize(toInit);
            toInit = (Processor) initialized;
        } catch (ClassCastException e) {
            throw new IllegalArgumentException(String.format("The initialized object(%s) should be an instance of %s", initialized, Processor.class), e);
        }
        if (processor instanceof ProcessorWrapper) {
            ((ProcessorWrapper) processor).setWrapped(toInit);
        } else {
            processor = toInit;
        }
    }
    try {
        doWithClassLoader(context.classLoader(), () -> processor.init(outbox, context));
    } catch (Exception e) {
        throw sneakyThrow(e);
    }
}
Also used : Processor(com.hazelcast.jet.core.Processor) ProcessorWrapper(com.hazelcast.jet.impl.processor.ProcessorWrapper) ManagedContext(com.hazelcast.core.ManagedContext) JetException(com.hazelcast.jet.JetException)

Example 15 with Processor

use of com.hazelcast.jet.core.Processor in project hazelcast by hazelcast.

the class JobMetrics_MiscTest method when_jobSuspended_andMetricsNotStored_then_onlyPeriodicMetricsReturned.

@Test
public void when_jobSuspended_andMetricsNotStored_then_onlyPeriodicMetricsReturned() throws Throwable {
    DAG dag = new DAG();
    Vertex v1 = dag.newVertex("v1", TestProcessors.MockP::new);
    Vertex v2 = dag.newVertex("v2", (SupplierEx<Processor>) TestProcessors.NoOutputSourceP::new);
    dag.edge(between(v1, v2));
    // init
    JobConfig config = new JobConfig().setMetricsEnabled(// enable metric collection
    true).setStoreMetricsAfterJobCompletion(// disable metric saving on completion
    false);
    Job job = hz().getJet().newJob(dag, config);
    // when
    TestProcessors.NoOutputSourceP.executionStarted.await();
    // then
    assertJobStatusEventually(job, JobStatus.RUNNING);
    assertTrueEventually(() -> assertJobHasMetrics(job, false));
    // when
    job.suspend();
    // then
    assertJobStatusEventually(job, SUSPENDED);
    assertTrueEventually(() -> assertEmptyJobMetrics(job, false));
    // when
    job.resume();
    // then
    assertJobStatusEventually(job, RUNNING);
    assertTrueEventually(() -> assertJobHasMetrics(job, false));
    // when
    TestProcessors.NoOutputSourceP.proceedLatch.countDown();
    job.join();
    // then
    assertJobStatusEventually(job, JobStatus.COMPLETED);
    assertEmptyJobMetrics(job, false);
}
Also used : Vertex(com.hazelcast.jet.core.Vertex) TestProcessors(com.hazelcast.jet.core.TestProcessors) Processor(com.hazelcast.jet.core.Processor) MockP(com.hazelcast.jet.core.TestProcessors.MockP) DAG(com.hazelcast.jet.core.DAG) Job(com.hazelcast.jet.Job) JobConfig(com.hazelcast.jet.config.JobConfig) ParallelJVMTest(com.hazelcast.test.annotation.ParallelJVMTest) QuickTest(com.hazelcast.test.annotation.QuickTest) Test(org.junit.Test)

Aggregations

Processor (com.hazelcast.jet.core.Processor)49 Test (org.junit.Test)24 ArrayList (java.util.ArrayList)22 TestProcessorContext (com.hazelcast.jet.core.test.TestProcessorContext)17 QuickTest (com.hazelcast.test.annotation.QuickTest)17 ParallelJVMTest (com.hazelcast.test.annotation.ParallelJVMTest)16 TestOutbox (com.hazelcast.jet.core.test.TestOutbox)14 List (java.util.List)13 Nonnull (javax.annotation.Nonnull)13 TestInbox (com.hazelcast.jet.core.test.TestInbox)11 Watermark (com.hazelcast.jet.core.Watermark)10 Collection (java.util.Collection)9 Collections.singletonList (java.util.Collections.singletonList)9 Entry (java.util.Map.Entry)9 ProcessorSupplier (com.hazelcast.jet.core.ProcessorSupplier)8 FunctionEx (com.hazelcast.function.FunctionEx)7 Job (com.hazelcast.jet.Job)7 JobConfig (com.hazelcast.jet.config.JobConfig)7 DAG (com.hazelcast.jet.core.DAG)7 Arrays (java.util.Arrays)6