use of com.hazelcast.jet.pipeline.JournalInitialPosition.START_FROM_OLDEST in project hazelcast by hazelcast.
the class HazelcastRemoteConnectorTest method when_streamRemoteMap_withPredicateAndProjection.
@Test
public void when_streamRemoteMap_withPredicateAndProjection() {
DAG dag = new DAG();
Vertex source = dag.newVertex(SOURCE_NAME, SourceProcessors.<Integer, Integer, Integer>streamRemoteMapP(SOURCE_NAME, clientConfig, event -> event.getKey() != 0, EventJournalMapEvent::getKey, START_FROM_OLDEST, eventTimePolicy(i -> i, limitingLag(0), 1, 0, 10_000)));
Vertex sink = dag.newVertex(SINK_NAME, writeListP(SINK_NAME));
dag.edge(between(source, sink));
Job job = localHz.getJet().newJob(dag);
populateMap(remoteHz.getMap(SOURCE_NAME));
assertSizeEventually(ITEM_COUNT - 1, localHz.getList(SINK_NAME));
assertFalse(localHz.getList(SINK_NAME).contains(0));
assertTrue(localHz.getList(SINK_NAME).contains(1));
job.cancel();
}
use of com.hazelcast.jet.pipeline.JournalInitialPosition.START_FROM_OLDEST in project hazelcast-jet by hazelcast.
the class WindowAggregateTransform_IntegrationTest method testSession.
@Test
public void testSession() {
IMap<Long, String> map = instance.getMap("source");
map.put(0L, "foo");
map.put(3L, "bar");
map.put(4L, "baz");
map.put(10L, "flush-item");
Pipeline p = Pipeline.create();
p.drawFrom(Sources.<Long, String>mapJournal("source", START_FROM_OLDEST)).addTimestamps(Entry::getKey, 0).window(WindowDefinition.session(2)).aggregate(toSet(), (winStart, winEnd, result) -> new WindowResult<>(winStart, winEnd, "", result)).drainTo(Sinks.list("sink"));
instance.newJob(p);
assertTrueEventually(() -> assertEquals(listToString(asList(new WindowResult<>(0, 2, "", set(entry(0L, "foo"))), new WindowResult<>(3, 6, "", set(entry(3L, "bar"), entry(4L, "baz"))))), listToString(instance.getHazelcastInstance().getList("sink"))), 5);
}
use of com.hazelcast.jet.pipeline.JournalInitialPosition.START_FROM_OLDEST in project hazelcast-jet by hazelcast.
the class WindowGroupTransform_IntegrationTest method testSliding_windowFirst_aggregate2.
@Test
public void testSliding_windowFirst_aggregate2() {
IMap<Long, String> map = instance.getMap("source");
// key is timestamp
map.put(0L, "foo");
map.put(2L, "taz");
map.put(10L, "flush-item");
IMap<Long, String> map2 = instance.getMap("source1");
// key is timestamp
map2.put(0L, "faa");
map2.put(2L, "tuu");
map2.put(10L, "flush-item");
Pipeline p = Pipeline.create();
StreamStageWithGrouping<Entry<Long, String>, Character> stage1 = p.drawFrom(Sources.<Long, String>mapJournal("source1", START_FROM_OLDEST)).addTimestamps(Entry::getKey, 0).groupingKey(entry -> entry.getValue().charAt(0));
p.drawFrom(Sources.<Long, String>mapJournal("source", START_FROM_OLDEST)).addTimestamps(Entry::getKey, 0).window(WindowDefinition.tumbling(2)).groupingKey(entry -> entry.getValue().charAt(0)).aggregate2(stage1, toTwoBags()).peek().drainTo(Sinks.list("sink"));
instance.newJob(p);
assertTrueEventually(() -> {
assertEquals(listToString(asList(new TimestampedEntry<>(2, 'f', TwoBags.twoBags(asList(entry(0L, "foo")), asList(entry(0L, "faa")))), new TimestampedEntry<>(4, 't', TwoBags.twoBags(asList(entry(2L, "taz")), asList(entry(2L, "tuu")))))), listToString(instance.getHazelcastInstance().getList("sink")));
}, 5);
}
use of com.hazelcast.jet.pipeline.JournalInitialPosition.START_FROM_OLDEST in project hazelcast-jet by hazelcast.
the class WindowGroupTransform_IntegrationTest method testSliding_windowFirst_aggregate3_with_aggregateBuilder.
@Test
public void testSliding_windowFirst_aggregate3_with_aggregateBuilder() {
IMap<Long, String> map = instance.getMap("source");
// key is timestamp
map.put(0L, "foo");
map.put(2L, "taz");
map.put(10L, "flush-item");
IMap<Long, String> map1 = instance.getMap("source1");
// key is timestamp
map1.put(0L, "faa");
map1.put(2L, "tuu");
map1.put(10L, "flush-item");
IMap<Long, String> map2 = instance.getMap("source2");
// key is timestamp
map2.put(0L, "fzz");
map2.put(2L, "tcc");
map2.put(10L, "flush-item");
Pipeline p = Pipeline.create();
StreamStageWithGrouping<Entry<Long, String>, Character> stage1 = p.drawFrom(Sources.<Long, String>mapJournal("source1", START_FROM_OLDEST)).addTimestamps(Entry::getKey, 0).groupingKey(entry -> entry.getValue().charAt(0));
StreamStageWithGrouping<Entry<Long, String>, Character> stage2 = p.drawFrom(Sources.<Long, String>mapJournal("source2", START_FROM_OLDEST)).addTimestamps(Entry::getKey, 0).groupingKey(entry -> entry.getValue().charAt(0));
WindowGroupAggregateBuilder<Entry<Long, String>, Character> b = p.drawFrom(Sources.<Long, String>mapJournal("source", START_FROM_OLDEST)).addTimestamps(Entry::getKey, 0).window(WindowDefinition.tumbling(2)).groupingKey(entry -> entry.getValue().charAt(0)).aggregateBuilder();
Tag<Entry<Long, String>> tag0 = b.tag0();
Tag<Entry<Long, String>> tag1 = b.add(stage1);
Tag<Entry<Long, String>> tag2 = b.add(stage2);
b.build(AggregateOperation.withCreate(ThreeBags::threeBags).andAccumulate(tag0, (acc, item0) -> acc.bag0().add(item0)).andAccumulate(tag1, (acc, item1) -> acc.bag1().add(item1)).andAccumulate(tag2, (acc, item2) -> acc.bag2().add(item2)).andCombine(ThreeBags::combineWith).andDeduct(ThreeBags::deduct).andFinish(ThreeBags::finish)).peek().drainTo(Sinks.list("sink"));
instance.newJob(p);
assertTrueEventually(() -> {
assertEquals(listToString(asList(new TimestampedEntry<>(2, 'f', ThreeBags.threeBags(asList(entry(0L, "foo")), asList(entry(0L, "faa")), asList(entry(0L, "fzz")))), new TimestampedEntry<>(4, 't', ThreeBags.threeBags(asList(entry(2L, "taz")), asList(entry(2L, "tuu")), asList(entry(2L, "tcc")))))), listToString(instance.getHazelcastInstance().getList("sink")));
}, 5);
}
use of com.hazelcast.jet.pipeline.JournalInitialPosition.START_FROM_OLDEST in project hazelcast-jet by hazelcast.
the class HazelcastConnectorTest method when_streamMap_withFilterAndProjection.
@Test
public void when_streamMap_withFilterAndProjection() {
DAG dag = new DAG();
Vertex source = dag.newVertex("source", SourceProcessors.<Integer, Integer, Integer>streamMapP(streamSourceName, event -> event.getKey() != 0, EventJournalMapEvent::getKey, START_FROM_OLDEST, wmGenParams(i -> i, limitingLag(0), noThrottling(), 10_000)));
Vertex sink = dag.newVertex("sink", writeListP(streamSinkName));
dag.edge(between(source, sink));
Job job = jetInstance.newJob(dag);
IMapJet<Integer, Integer> sourceMap = jetInstance.getMap(streamSourceName);
range(0, ENTRY_COUNT).forEach(i -> sourceMap.put(i, i));
assertSizeEventually(ENTRY_COUNT - 1, jetInstance.getList(streamSinkName));
assertFalse(jetInstance.getList(streamSinkName).contains(0));
assertTrue(jetInstance.getList(streamSinkName).contains(1));
job.cancel();
}
Aggregations