Search in sources :

Example 6 with JobConfig

use of com.hazelcast.jet.config.JobConfig in project hazelcast by hazelcast.

the class JobClassLoaderService method createProcessorClassLoaders.

private Map<String, ClassLoader> createProcessorClassLoaders(long jobId, JobConfig jobConfig, ClassLoader parent) {
    logger.fine("Create processor classloader map for job " + idToString(jobId));
    String customLibDir = nodeEngine.getProperties().getString(ClusterProperty.PROCESSOR_CUSTOM_LIB_DIR);
    Map<String, ClassLoader> classLoaderMap = new HashMap<>();
    for (Entry<String, List<String>> entry : jobConfig.getCustomClassPaths().entrySet()) {
        List<URL> list = entry.getValue().stream().map(jar -> {
            try {
                Path path = Paths.get(customLibDir, jar);
                return path.toUri().toURL();
            } catch (MalformedURLException e) {
                throw new JetException(e);
            }
        }).collect(Collectors.toList());
        URL[] urls = list.toArray(new URL[] {});
        classLoaderMap.put(entry.getKey(), new ChildFirstClassLoader(urls, parent));
    }
    return unmodifiableMap(classLoaderMap);
}
Also used : URL(java.net.URL) LoggingUtil.logFinest(com.hazelcast.jet.impl.util.LoggingUtil.logFinest) HashMap(java.util.HashMap) JetDelegatingClassLoader(com.hazelcast.jet.impl.deployment.JetDelegatingClassLoader) JetException(com.hazelcast.jet.JetException) ILogger(com.hazelcast.logging.ILogger) Map(java.util.Map) ChildFirstClassLoader(com.hazelcast.jet.impl.deployment.ChildFirstClassLoader) Nonnull(javax.annotation.Nonnull) Path(java.nio.file.Path) EnumSet(java.util.EnumSet) JetConfig(com.hazelcast.jet.config.JetConfig) HazelcastException(com.hazelcast.core.HazelcastException) NodeEngine(com.hazelcast.spi.impl.NodeEngine) MalformedURLException(java.net.MalformedURLException) JobConfig(com.hazelcast.jet.config.JobConfig) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) IOException(java.io.IOException) PrivilegedAction(java.security.PrivilegedAction) Collectors(java.util.stream.Collectors) ClusterProperty(com.hazelcast.spi.properties.ClusterProperty) List(java.util.List) Util.idToString(com.hazelcast.jet.Util.idToString) Paths(java.nio.file.Paths) LoggingUtil.logFine(com.hazelcast.jet.impl.util.LoggingUtil.logFine) Entry(java.util.Map.Entry) JetClassLoader(com.hazelcast.jet.impl.deployment.JetClassLoader) Collections.unmodifiableMap(java.util.Collections.unmodifiableMap) AccessController(java.security.AccessController) Collections(java.util.Collections) Util(com.hazelcast.jet.Util) Path(java.nio.file.Path) MalformedURLException(java.net.MalformedURLException) HashMap(java.util.HashMap) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Util.idToString(com.hazelcast.jet.Util.idToString) JetException(com.hazelcast.jet.JetException) URL(java.net.URL) ChildFirstClassLoader(com.hazelcast.jet.impl.deployment.ChildFirstClassLoader) JetDelegatingClassLoader(com.hazelcast.jet.impl.deployment.JetDelegatingClassLoader) ChildFirstClassLoader(com.hazelcast.jet.impl.deployment.ChildFirstClassLoader) JetClassLoader(com.hazelcast.jet.impl.deployment.JetClassLoader) List(java.util.List)

Example 7 with JobConfig

use of com.hazelcast.jet.config.JobConfig in project hazelcast by hazelcast.

the class Util method copyMapUsingJob.

// used in jet-enterprise
@SuppressWarnings("WeakerAccess")
public static CompletableFuture<Void> copyMapUsingJob(HazelcastInstance instance, int queueSize, String sourceMap, String targetMap) {
    DAG dag = new DAG();
    Vertex source = dag.newVertex("readMap(" + sourceMap + ')', readMapP(sourceMap));
    Vertex sink = dag.newVertex("writeMap(" + targetMap + ')', writeMapP(targetMap));
    dag.edge(between(source, sink).setConfig(new EdgeConfig().setQueueSize(queueSize)));
    JobConfig jobConfig = new JobConfig().setName("copy-" + sourceMap + "-to-" + targetMap);
    return instance.getJet().newJob(dag, jobConfig).getFuture();
}
Also used : Vertex(com.hazelcast.jet.core.Vertex) EdgeConfig(com.hazelcast.jet.config.EdgeConfig) DAG(com.hazelcast.jet.core.DAG) JobConfig(com.hazelcast.jet.config.JobConfig)

Example 8 with JobConfig

use of com.hazelcast.jet.config.JobConfig in project hazelcast by hazelcast.

the class StreamKafkaPTest method when_duplicateTopicsProvide_then_uniqueTopicsSubscribed.

@Test
public void when_duplicateTopicsProvide_then_uniqueTopicsSubscribed() {
    HazelcastInstance[] instances = instances();
    assertClusterSizeEventually(2, instances);
    // need new topic because we want 2 partitions only
    String topic = randomString();
    kafkaTestSupport.createTopic(topic, 2);
    Pipeline p = Pipeline.create();
    // Pass the same topic twice
    p.readFrom(KafkaSources.kafka(properties(), topic, topic)).withoutTimestamps().setLocalParallelism(1).writeTo(Sinks.list("sink"));
    JobConfig config = new JobConfig();
    Job job = instances[0].getJet().newJob(p, config);
    assertJobStatusEventually(job, JobStatus.RUNNING, 10);
    int messageCount = 1000;
    for (int i = 0; i < messageCount; i++) {
        kafkaTestSupport.produce(topic, i, Integer.toString(i));
    }
    IList<Object> list = instances[0].getList("sink");
    try {
        // Wait for all messages
        assertTrueEventually(() -> assertThat(list).hasSize(messageCount), 15);
        // Check there are no more messages (duplicates..)
        assertTrueAllTheTime(() -> assertThat(list).hasSize(messageCount), 1);
    } finally {
        job.cancel();
    }
}
Also used : HazelcastInstance(com.hazelcast.core.HazelcastInstance) Job(com.hazelcast.jet.Job) JobConfig(com.hazelcast.jet.config.JobConfig) Pipeline(com.hazelcast.jet.pipeline.Pipeline) ParallelJVMTest(com.hazelcast.test.annotation.ParallelJVMTest) QuickTest(com.hazelcast.test.annotation.QuickTest) Test(org.junit.Test)

Example 9 with JobConfig

use of com.hazelcast.jet.config.JobConfig in project hazelcast by hazelcast.

the class StreamKafkaPTest method when_partitionAddedWhileJobDown_then_consumedFromBeginning.

@Test
public void when_partitionAddedWhileJobDown_then_consumedFromBeginning() throws Exception {
    IList<Entry<Integer, String>> sinkList = instance().getList("sinkList");
    Pipeline p = Pipeline.create();
    Properties properties = properties();
    properties.setProperty("auto.offset.reset", "latest");
    p.readFrom(KafkaSources.<Integer, String>kafka(properties, topic1Name)).withoutTimestamps().writeTo(Sinks.list(sinkList));
    Job job = instance().getJet().newJob(p, new JobConfig().setProcessingGuarantee(EXACTLY_ONCE));
    assertTrueEventually(() -> {
        // This might add multiple `0` events to the topic - we need to do this because the source starts from
        // the latest position and we don't exactly know when it starts, so we try repeatedly
        kafkaTestSupport.produce(topic1Name, 0, "0").get();
        assertFalse(sinkList.isEmpty());
        assertEquals(entry(0, "0"), sinkList.get(0));
    });
    job.suspend();
    assertJobStatusEventually(job, JobStatus.SUSPENDED);
    // Note that the job might not have consumed all the zeroes from the topic at this point
    // When
    kafkaTestSupport.setPartitionCount(topic1Name, INITIAL_PARTITION_COUNT + 2);
    // We produce to a partition that didn't exist during the previous job execution.
    // The job must start reading the new partition from the beginning, otherwise it would miss this item.
    Entry<Integer, String> event = produceEventToNewPartition(INITIAL_PARTITION_COUNT);
    job.resume();
    // All events after the resume will be loaded: the non-consumed zeroes, and the possibly multiple
    // events added in produceEventToNewPartition(). But they must include the event added to the new partition.
    assertTrueEventually(() -> assertThat(sinkList).contains(event));
}
Also used : Entry(java.util.Map.Entry) SimpleImmutableEntry(java.util.AbstractMap.SimpleImmutableEntry) Properties(java.util.Properties) Job(com.hazelcast.jet.Job) JobConfig(com.hazelcast.jet.config.JobConfig) Pipeline(com.hazelcast.jet.pipeline.Pipeline) ParallelJVMTest(com.hazelcast.test.annotation.ParallelJVMTest) QuickTest(com.hazelcast.test.annotation.QuickTest) Test(org.junit.Test)

Example 10 with JobConfig

use of com.hazelcast.jet.config.JobConfig in project hazelcast by hazelcast.

the class StreamKafkaPTest method integrationTest.

private void integrationTest(ProcessingGuarantee guarantee) throws Exception {
    int messageCount = 20;
    HazelcastInstance[] instances = new HazelcastInstance[2];
    Arrays.setAll(instances, i -> createHazelcastInstance());
    Pipeline p = Pipeline.create();
    p.readFrom(KafkaSources.kafka(properties(), topic1Name, topic2Name)).withoutTimestamps().writeTo(Sinks.list("sink"));
    JobConfig config = new JobConfig();
    config.setProcessingGuarantee(guarantee);
    config.setSnapshotIntervalMillis(500);
    Job job = instances[0].getJet().newJob(p, config);
    sleepSeconds(3);
    for (int i = 0; i < messageCount; i++) {
        kafkaTestSupport.produce(topic1Name, i, Integer.toString(i));
        kafkaTestSupport.produce(topic2Name, i - messageCount, Integer.toString(i - messageCount));
    }
    IList<Object> list = instances[0].getList("sink");
    assertTrueEventually(() -> {
        assertEquals(messageCount * 2, list.size());
        for (int i = 0; i < messageCount; i++) {
            Entry<Integer, String> entry1 = createEntry(i);
            Entry<Integer, String> entry2 = createEntry(i - messageCount);
            assertTrue("missing entry: " + entry1, list.contains(entry1));
            assertTrue("missing entry: " + entry2, list.contains(entry2));
        }
    }, 15);
    if (guarantee != ProcessingGuarantee.NONE) {
        // wait until a new snapshot appears
        JobRepository jr = new JobRepository(instances[0]);
        long currentMax = jr.getJobExecutionRecord(job.getId()).snapshotId();
        assertTrueEventually(() -> {
            JobExecutionRecord jobExecutionRecord = jr.getJobExecutionRecord(job.getId());
            assertNotNull("jobExecutionRecord == null", jobExecutionRecord);
            long newMax = jobExecutionRecord.snapshotId();
            assertTrue("no snapshot produced", newMax > currentMax);
            System.out.println("snapshot " + newMax + " found, previous was " + currentMax);
        });
        // Bring down one member. Job should restart and drain additional items (and maybe
        // some of the previous duplicately).
        instances[1].getLifecycleService().terminate();
        Thread.sleep(500);
        for (int i = messageCount; i < 2 * messageCount; i++) {
            kafkaTestSupport.produce(topic1Name, i, Integer.toString(i));
            kafkaTestSupport.produce(topic2Name, i - messageCount, Integer.toString(i - messageCount));
        }
        assertTrueEventually(() -> {
            assertTrue("Not all messages were received", list.size() >= messageCount * 4);
            for (int i = 0; i < 2 * messageCount; i++) {
                Entry<Integer, String> entry1 = createEntry(i);
                Entry<Integer, String> entry2 = createEntry(i - messageCount);
                assertTrue("missing entry: " + entry1.toString(), list.contains(entry1));
                assertTrue("missing entry: " + entry2.toString(), list.contains(entry2));
            }
        }, 10);
    }
    assertFalse(job.getFuture().isDone());
    // cancel the job
    job.cancel();
    assertTrueEventually(() -> assertTrue(job.getFuture().isDone()));
}
Also used : JobRepository(com.hazelcast.jet.impl.JobRepository) JobExecutionRecord(com.hazelcast.jet.impl.JobExecutionRecord) JobConfig(com.hazelcast.jet.config.JobConfig) Pipeline(com.hazelcast.jet.pipeline.Pipeline) HazelcastInstance(com.hazelcast.core.HazelcastInstance) Job(com.hazelcast.jet.Job)

Aggregations

JobConfig (com.hazelcast.jet.config.JobConfig)248 Test (org.junit.Test)194 Job (com.hazelcast.jet.Job)160 ParallelJVMTest (com.hazelcast.test.annotation.ParallelJVMTest)111 QuickTest (com.hazelcast.test.annotation.QuickTest)109 Pipeline (com.hazelcast.jet.pipeline.Pipeline)68 HazelcastInstance (com.hazelcast.core.HazelcastInstance)64 MockPS (com.hazelcast.jet.core.TestProcessors.MockPS)46 Assert.assertEquals (org.junit.Assert.assertEquals)43 Category (org.junit.experimental.categories.Category)43 DAG (com.hazelcast.jet.core.DAG)41 JobRepository (com.hazelcast.jet.impl.JobRepository)40 List (java.util.List)36 NoOutputSourceP (com.hazelcast.jet.core.TestProcessors.NoOutputSourceP)35 Config (com.hazelcast.config.Config)33 Assert.assertTrue (org.junit.Assert.assertTrue)32 ArrayList (java.util.ArrayList)30 RUNNING (com.hazelcast.jet.core.JobStatus.RUNNING)27 Sinks (com.hazelcast.jet.pipeline.Sinks)27 RunWith (org.junit.runner.RunWith)27