Search in sources :

Example 1 with Job

use of com.hazelcast.jet.Job in project hazelcast-jet by hazelcast.

the class KafkaSinkTest method when_recordLingerEnabled_then_sentOnCompletion.

@Test
public void when_recordLingerEnabled_then_sentOnCompletion() throws Exception {
    // When
    // 1 hour
    properties.setProperty("linger.ms", "3600000");
    // Given
    Pipeline p = Pipeline.create();
    p.drawFrom(Sources.<Entry<String, String>>batchFromProcessor("source", ProcessorMetaSupplier.of(ProcessorWithEntryAndLatch::new))).drainTo(KafkaSinks.kafka(properties, topic));
    Job job = instance.newJob(p);
    // the event should not appear in the topic due to linger.ms
    try (KafkaConsumer<String, String> consumer = createConsumer(brokerConnectionString, topic)) {
        assertTrueAllTheTime(() -> assertEquals(0, consumer.poll(100).count()), 2);
    }
    // Then
    ProcessorWithEntryAndLatch.isDone = true;
    job.join();
    System.out.println("Job finished");
    assertTopicContentsEventually(singletonMap("k", "v"), false);
}
Also used : Entry(java.util.Map.Entry) Job(com.hazelcast.jet.Job) Pipeline(com.hazelcast.jet.pipeline.Pipeline) Test(org.junit.Test)

Example 2 with Job

use of com.hazelcast.jet.Job in project hazelcast-jet by hazelcast.

the class StreamKafkaPTest method integrationTest.

private void integrationTest(ProcessingGuarantee guarantee) throws Exception {
    int messageCount = 20;
    JetInstance[] instances = new JetInstance[2];
    Arrays.setAll(instances, i -> createJetMember());
    Pipeline p = Pipeline.create();
    p.drawFrom(KafkaSources.kafka(properties, topic1Name, topic2Name)).drainTo(Sinks.list("sink"));
    JobConfig config = new JobConfig();
    config.setProcessingGuarantee(guarantee);
    config.setSnapshotIntervalMillis(500);
    Job job = instances[0].newJob(p, config);
    sleepAtLeastSeconds(3);
    for (int i = 0; i < messageCount; i++) {
        produce(topic1Name, i, Integer.toString(i));
        produce(topic2Name, i - messageCount, Integer.toString(i - messageCount));
    }
    IList<Object> list = instances[0].getList("sink");
    assertTrueEventually(() -> {
        assertEquals(messageCount * 2, list.size());
        for (int i = 0; i < messageCount; i++) {
            Entry<Integer, String> entry1 = createEntry(i);
            Entry<Integer, String> entry2 = createEntry(i - messageCount);
            assertTrue("missing entry: " + entry1, list.contains(entry1));
            assertTrue("missing entry: " + entry2, list.contains(entry2));
        }
    }, 5);
    if (guarantee != ProcessingGuarantee.NONE) {
        // wait until the items are consumed and a new snapshot appears
        assertTrueEventually(() -> assertTrue(list.size() == messageCount * 2));
        IMapJet<Long, Object> snapshotsMap = instances[0].getMap(SnapshotRepository.snapshotsMapName(job.getId()));
        Long currentMax = maxSuccessfulSnapshot(snapshotsMap);
        assertTrueEventually(() -> {
            Long newMax = maxSuccessfulSnapshot(snapshotsMap);
            assertTrue("no snapshot produced", newMax != null && !newMax.equals(currentMax));
            System.out.println("snapshot " + newMax + " found, previous was " + currentMax);
        });
        // Bring down one member. Job should restart and drain additional items (and maybe
        // some of the previous duplicately).
        instances[1].shutdown();
        Thread.sleep(500);
        for (int i = messageCount; i < 2 * messageCount; i++) {
            produce(topic1Name, i, Integer.toString(i));
            produce(topic2Name, i - messageCount, Integer.toString(i - messageCount));
        }
        assertTrueEventually(() -> {
            assertTrue("Not all messages were received", list.size() >= messageCount * 4);
            for (int i = 0; i < 2 * messageCount; i++) {
                Entry<Integer, String> entry1 = createEntry(i);
                Entry<Integer, String> entry2 = createEntry(i - messageCount);
                assertTrue("missing entry: " + entry1.toString(), list.contains(entry1));
                assertTrue("missing entry: " + entry2.toString(), list.contains(entry2));
            }
        }, 10);
    }
    assertFalse(job.getFuture().isDone());
    // cancel the job
    job.cancel();
    assertTrueEventually(() -> assertTrue(job.getFuture().isDone()));
}
Also used : JetInstance(com.hazelcast.jet.JetInstance) JobConfig(com.hazelcast.jet.config.JobConfig) Pipeline(com.hazelcast.jet.pipeline.Pipeline) Job(com.hazelcast.jet.Job)

Example 3 with Job

use of com.hazelcast.jet.Job in project hazelcast-jet by hazelcast.

the class SinksTest method mapWithEntryProcessor_when_entryIsLocked_then_entryIsNotUpdated.

@Test
public void mapWithEntryProcessor_when_entryIsLocked_then_entryIsNotUpdated() {
    // Given
    srcMap.put("key", 1);
    srcMap.lock("key");
    // When
    p.drawFrom(Sources.<String, Integer>map(srcName)).drainTo(Sinks.mapWithEntryProcessor(srcName, Entry::getKey, entry -> new IncrementEntryProcessor<>(10)));
    Job job = jet().newJob(p);
    // Then
    assertTrueEventually(() -> assertEquals(RUNNING, job.getStatus()));
    assertEquals(1, srcMap.size());
    assertEquals(1, srcMap.get("key").intValue());
    srcMap.unlock("key");
    assertTrueEventually(() -> assertEquals(11, srcMap.get("key").intValue()), 10);
    job.join();
}
Also used : BeforeClass(org.junit.BeforeClass) IntStream.range(java.util.stream.IntStream.range) IMapJet(com.hazelcast.jet.IMapJet) CacheSimpleConfig(com.hazelcast.config.CacheSimpleConfig) Util.entry(com.hazelcast.jet.Util.entry) DataSerializable(com.hazelcast.nio.serialization.DataSerializable) ClientConfig(com.hazelcast.client.config.ClientConfig) ObjectDataInput(com.hazelcast.nio.ObjectDataInput) Job(com.hazelcast.jet.Job) Config(com.hazelcast.config.Config) HazelcastInstance(com.hazelcast.core.HazelcastInstance) AfterClass(org.junit.AfterClass) AbstractStage.transformOf(com.hazelcast.jet.impl.pipeline.AbstractStage.transformOf) Set(java.util.Set) Assert.assertTrue(org.junit.Assert.assertTrue) IOException(java.io.IOException) Test(org.junit.Test) List(java.util.List) Collectors.toList(java.util.stream.Collectors.toList) IMap(com.hazelcast.core.IMap) Hazelcast(com.hazelcast.core.Hazelcast) ICache(com.hazelcast.cache.ICache) AbstractEntryProcessor(com.hazelcast.map.AbstractEntryProcessor) Entry(java.util.Map.Entry) ObjectDataOutput(com.hazelcast.nio.ObjectDataOutput) RUNNING(com.hazelcast.jet.core.JobStatus.RUNNING) Assert.assertEquals(org.junit.Assert.assertEquals) Job(com.hazelcast.jet.Job) Test(org.junit.Test)

Example 4 with Job

use of com.hazelcast.jet.Job in project hazelcast-jet by hazelcast.

the class SourcesTest method fileChanges.

@Test
@Ignore("Changes on the file is not reflected as an event from the File System, needs more investigation")
public void fileChanges() throws Exception {
    // Given
    File directory = createTempDirectory();
    // this is a pre-existing file, should not be picked up
    File file = new File(directory, randomName());
    appendToFile(file, "hello", "pre-existing");
    sleepAtLeastMillis(50);
    // When
    p.drawFrom(Sources.fileWatcher(directory.getPath())).drainTo(sink);
    Job job = jet().newJob(p);
    // wait for the processor to initialize
    assertTrueEventually(() -> assertEquals(JobStatus.RUNNING, job.getStatus()));
    // Then
    // pre-existing file should not be picked up
    assertEquals(0, sinkList.size());
    appendToFile(file, "third line");
    // now, only new line should be picked up
    int nodeCount = jet().getCluster().getMembers().size();
    assertTrueEventually(() -> assertEquals(nodeCount, sinkList.size()));
}
Also used : Job(com.hazelcast.jet.Job) File(java.io.File) Ignore(org.junit.Ignore) Test(org.junit.Test)

Example 5 with Job

use of com.hazelcast.jet.Job in project hazelcast-jet by hazelcast.

the class StreamUtil method executeJob.

public static void executeJob(StreamContext context, DAG dag) {
    JobConfig jobConfig = context.getJobConfig() != null ? context.getJobConfig() : new JobConfig();
    Job job = context.getJetInstance().newJob(dag, jobConfig);
    try {
        job.getFuture().get();
    } catch (InterruptedException | ExecutionException e) {
        throw rethrow(e);
    }
    context.getStreamListeners().forEach(Runnable::run);
}
Also used : Job(com.hazelcast.jet.Job) ExecutionException(java.util.concurrent.ExecutionException) JobConfig(com.hazelcast.jet.config.JobConfig)

Aggregations

Job (com.hazelcast.jet.Job)509 Test (org.junit.Test)434 ParallelJVMTest (com.hazelcast.test.annotation.ParallelJVMTest)248 QuickTest (com.hazelcast.test.annotation.QuickTest)238 JobConfig (com.hazelcast.jet.config.JobConfig)175 HazelcastInstance (com.hazelcast.core.HazelcastInstance)154 MockPS (com.hazelcast.jet.core.TestProcessors.MockPS)129 Pipeline (com.hazelcast.jet.pipeline.Pipeline)111 NoOutputSourceP (com.hazelcast.jet.core.TestProcessors.NoOutputSourceP)83 DAG (com.hazelcast.jet.core.DAG)77 Category (org.junit.experimental.categories.Category)71 Assert.assertEquals (org.junit.Assert.assertEquals)70 Assert.assertTrue (org.junit.Assert.assertTrue)66 List (java.util.List)63 Vertex (com.hazelcast.jet.core.Vertex)62 JobRepository (com.hazelcast.jet.impl.JobRepository)62 NightlyTest (com.hazelcast.test.annotation.NightlyTest)59 RunWith (org.junit.runner.RunWith)58 Before (org.junit.Before)56 MockP (com.hazelcast.jet.core.TestProcessors.MockP)54