use of com.hazelcast.jet.pipeline.JournalInitialPosition.START_FROM_OLDEST in project hazelcast-jet by hazelcast.
the class WindowAggregateTransform_IntegrationTest method test_aggregate2_with_aggregateBuilder.
@Test
public void test_aggregate2_with_aggregateBuilder() {
IMap<Long, String> map = instance.getMap("source");
// key is timestamp
map.put(0L, "foo");
map.put(2L, "baz");
map.put(10L, "flush-item");
IMap<Long, String> map2 = instance.getMap("source1");
// key is timestamp
map2.put(0L, "faa");
map2.put(2L, "buu");
map2.put(10L, "flush-item");
Pipeline p = Pipeline.create();
StreamStage<Entry<Long, String>> stage1 = p.drawFrom(Sources.<Long, String>mapJournal("source1", START_FROM_OLDEST));
stage1.addTimestamps(Entry::getKey, 0);
WindowAggregateBuilder<Entry<Long, String>> b = p.drawFrom(Sources.<Long, String>mapJournal("source", START_FROM_OLDEST)).addTimestamps(Entry::getKey, 0).window(WindowDefinition.tumbling(2)).aggregateBuilder();
Tag<Entry<Long, String>> tag0 = b.tag0();
Tag<Entry<Long, String>> tag1 = b.add(stage1);
b.build(AggregateOperation.withCreate(TwoBags::twoBags).andAccumulate(tag0, (acc, item0) -> acc.bag0().add(item0)).andAccumulate(tag1, (acc, item1) -> acc.bag1().add(item1)).andCombine(TwoBags::combineWith).andDeduct(TwoBags::deduct).andFinish(TwoBags::finish)).peek().drainTo(Sinks.list("sink"));
instance.newJob(p);
assertTrueEventually(() -> assertEquals(listToString(asList(new TimestampedItem<>(2, TwoBags.twoBags(asList(entry(0L, "foo")), asList(entry(0L, "faa")))), new TimestampedItem<>(4, TwoBags.twoBags(asList(entry(2L, "baz")), asList(entry(2L, "buu")))))), listToString(instance.getHazelcastInstance().getList("sink"))), 5);
}
use of com.hazelcast.jet.pipeline.JournalInitialPosition.START_FROM_OLDEST in project hazelcast-jet by hazelcast.
the class WindowGroupTransform_IntegrationTest method testWindowDefinition.
@Test
public void testWindowDefinition() {
Pipeline p = Pipeline.create();
SlidingWindowDef tumbling = WindowDefinition.tumbling(2);
StageWithGroupingAndWindow<Entry<Long, String>, Character> stage = p.drawFrom(Sources.<Long, String>mapJournal("source", START_FROM_OLDEST)).groupingKey(entry -> entry.getValue().charAt(0)).window(tumbling);
assertEquals(tumbling, stage.windowDefinition());
}
use of com.hazelcast.jet.pipeline.JournalInitialPosition.START_FROM_OLDEST in project hazelcast-jet by hazelcast.
the class WindowGroupTransform_IntegrationTest method testSliding_windowFirst_aggregate3.
@Test
public void testSliding_windowFirst_aggregate3() {
IMap<Long, String> map = instance.getMap("source");
// key is timestamp
map.put(0L, "foo");
map.put(2L, "taz");
map.put(10L, "flush-item");
IMap<Long, String> map1 = instance.getMap("source1");
// key is timestamp
map1.put(0L, "faa");
map1.put(2L, "tuu");
map1.put(10L, "flush-item");
IMap<Long, String> map2 = instance.getMap("source2");
// key is timestamp
map2.put(0L, "fzz");
map2.put(2L, "tcc");
map2.put(10L, "flush-item");
Pipeline p = Pipeline.create();
StreamStageWithGrouping<Entry<Long, String>, Character> stage1 = p.drawFrom(Sources.<Long, String>mapJournal("source1", START_FROM_OLDEST)).addTimestamps(Entry::getKey, 0).groupingKey(entry -> entry.getValue().charAt(0));
StreamStageWithGrouping<Entry<Long, String>, Character> stage2 = p.drawFrom(Sources.<Long, String>mapJournal("source2", START_FROM_OLDEST)).addTimestamps(Entry::getKey, 0).groupingKey(entry -> entry.getValue().charAt(0));
p.drawFrom(Sources.<Long, String>mapJournal("source", START_FROM_OLDEST)).addTimestamps(Entry::getKey, 0).window(WindowDefinition.tumbling(2)).groupingKey(entry -> entry.getValue().charAt(0)).aggregate3(stage1, stage2, toThreeBags()).peek().drainTo(Sinks.list("sink"));
instance.newJob(p);
assertTrueEventually(() -> {
assertEquals(listToString(asList(new TimestampedEntry<>(2, 'f', ThreeBags.threeBags(asList(entry(0L, "foo")), asList(entry(0L, "faa")), asList(entry(0L, "fzz")))), new TimestampedEntry<>(4, 't', ThreeBags.threeBags(asList(entry(2L, "taz")), asList(entry(2L, "tuu")), asList(entry(2L, "tcc")))))), listToString(instance.getHazelcastInstance().getList("sink")));
}, 5);
}
use of com.hazelcast.jet.pipeline.JournalInitialPosition.START_FROM_OLDEST in project hazelcast-jet by hazelcast.
the class HazelcastConnectorTest method when_streamCache_withFilterAndProjection.
@Test
public void when_streamCache_withFilterAndProjection() {
DAG dag = new DAG();
Vertex source = dag.newVertex("source", SourceProcessors.<Integer, Integer, Integer>streamCacheP(streamSourceName, event -> !event.getKey().equals(0), EventJournalCacheEvent::getKey, START_FROM_OLDEST, wmGenParams(i -> i, limitingLag(0), noThrottling(), 10_000)));
Vertex sink = dag.newVertex("sink", writeListP(streamSinkName));
dag.edge(between(source, sink));
Job job = jetInstance.newJob(dag);
ICacheJet<Integer, Integer> sourceCache = jetInstance.getCacheManager().getCache(streamSourceName);
range(0, ENTRY_COUNT).forEach(i -> sourceCache.put(i, i));
assertSizeEventually(ENTRY_COUNT - 1, jetInstance.getList(streamSinkName));
assertFalse(jetInstance.getList(streamSinkName).contains(0));
assertTrue(jetInstance.getList(streamSinkName).contains(1));
job.cancel();
}
use of com.hazelcast.jet.pipeline.JournalInitialPosition.START_FROM_OLDEST in project hazelcast by hazelcast.
the class AsyncTransformUsingServiceBatchP_IntegrationTest method test_pipelineApi_mapNotPartitioned.
@Test
public void test_pipelineApi_mapNotPartitioned() {
Pipeline p = Pipeline.create();
p.readFrom(Sources.mapJournal(journaledMap, START_FROM_OLDEST, EventJournalMapEvent::getNewValue, alwaysTrue())).withoutTimestamps().mapUsingServiceAsyncBatched(serviceFactory, 128, transformNotPartitionedFn(i -> i + "-1")).setLocalParallelism(2).writeTo(Sinks.list(sinkList));
instance().getJet().newJob(p, jobConfig);
assertResult(i -> Stream.of(i + "-1"), NUM_ITEMS);
}
Aggregations