use of com.hazelcast.jet.pipeline.Pipeline in project hazelcast by hazelcast.
the class SpringServiceFactoriesTest method testMapBatchUsingSpringBean.
@Test
public void testMapBatchUsingSpringBean() {
Pipeline pipeline = Pipeline.create();
pipeline.readFrom(TestSources.items(1L, 2L, 3L, 4L, 5L, 6L)).mapUsingService(bean("calculator"), Calculator::multiply).writeTo(assertAnyOrder(asList(-1L, -2L, -3L, -4L, -5L, -6L)));
jet.newJob(pipeline).join();
}
use of com.hazelcast.jet.pipeline.Pipeline in project hazelcast by hazelcast.
the class SpringServiceFactoriesTest method testFilterStreamUsingSpringBean.
@Test
public void testFilterStreamUsingSpringBean() {
Pipeline pipeline = Pipeline.create();
pipeline.readFrom(TestSources.itemStream(100)).withNativeTimestamps(0).map(SimpleEvent::sequence).filterUsingService(bean("calculator"), Calculator::filter).writeTo(assertCollectedEventually(10, c -> {
assertTrue(c.size() > 100);
c.forEach(i -> assertEquals(0, i % 2));
}));
Job job = jet.newJob(pipeline);
assertJobCompleted(job);
}
use of com.hazelcast.jet.pipeline.Pipeline in project hazelcast by hazelcast.
the class SpringServiceFactoriesTest method testFilterBatchUsingSpringBean.
@Test
public void testFilterBatchUsingSpringBean() {
Pipeline pipeline = Pipeline.create();
pipeline.readFrom(TestSources.items(1L, 2L, 3L, 4L, 5L, 6L)).filterUsingService(bean("calculator"), Calculator::filter).writeTo(assertAnyOrder(asList(2L, 4L, 6L)));
jet.newJob(pipeline).join();
}
use of com.hazelcast.jet.pipeline.Pipeline in project hazelcast by hazelcast.
the class StreamKafkaPTest method when_duplicateTopicsProvide_then_uniqueTopicsSubscribed.
@Test
public void when_duplicateTopicsProvide_then_uniqueTopicsSubscribed() {
HazelcastInstance[] instances = instances();
assertClusterSizeEventually(2, instances);
// need new topic because we want 2 partitions only
String topic = randomString();
kafkaTestSupport.createTopic(topic, 2);
Pipeline p = Pipeline.create();
// Pass the same topic twice
p.readFrom(KafkaSources.kafka(properties(), topic, topic)).withoutTimestamps().setLocalParallelism(1).writeTo(Sinks.list("sink"));
JobConfig config = new JobConfig();
Job job = instances[0].getJet().newJob(p, config);
assertJobStatusEventually(job, JobStatus.RUNNING, 10);
int messageCount = 1000;
for (int i = 0; i < messageCount; i++) {
kafkaTestSupport.produce(topic, i, Integer.toString(i));
}
IList<Object> list = instances[0].getList("sink");
try {
// Wait for all messages
assertTrueEventually(() -> assertThat(list).hasSize(messageCount), 15);
// Check there are no more messages (duplicates..)
assertTrueAllTheTime(() -> assertThat(list).hasSize(messageCount), 1);
} finally {
job.cancel();
}
}
use of com.hazelcast.jet.pipeline.Pipeline in project hazelcast by hazelcast.
the class StreamKafkaPTest method when_projectionFunctionProvided_thenAppliedToReadRecords.
@Test
public void when_projectionFunctionProvided_thenAppliedToReadRecords() {
int messageCount = 20;
Pipeline p = Pipeline.create();
p.readFrom(KafkaSources.<Integer, String, String>kafka(properties(), rec -> rec.value() + "-x", topic1Name)).withoutTimestamps().writeTo(Sinks.list("sink"));
instance().getJet().newJob(p);
sleepAtLeastSeconds(3);
for (int i = 0; i < messageCount; i++) {
kafkaTestSupport.produce(topic1Name, i, Integer.toString(i));
}
IList<String> list = instance().getList("sink");
assertTrueEventually(() -> {
assertEquals(messageCount, list.size());
for (int i = 0; i < messageCount; i++) {
String value = i + "-x";
assertTrue("missing entry: " + value, list.contains(value));
}
}, 5);
}
Aggregations