use of com.hazelcast.jet.core.Vertex in project hazelcast by hazelcast.
the class HazelcastConnectorTest method when_readMap_withProjectionToNull_then_nullsSkipped.
@Test
public void when_readMap_withProjectionToNull_then_nullsSkipped() {
IMap<Integer, Entry<Integer, String>> sourceMap = instance().getMap(sourceName);
range(0, ENTRY_COUNT).forEach(i -> sourceMap.put(i, entry(i, i % 2 == 0 ? null : String.valueOf(i))));
DAG dag = new DAG();
Vertex source = dag.newVertex("source", readMapP(sourceName, new TruePredicate<>(), Projections.singleAttribute("value")));
Vertex sink = dag.newVertex("sink", writeListP(sinkName));
dag.edge(between(source, sink));
instance().getJet().newJob(dag).join();
checkContents_projectedToNull(sinkName);
}
use of com.hazelcast.jet.core.Vertex in project hazelcast by hazelcast.
the class HazelcastConnectorTest method when_streamMap_withFilterAndProjection.
@Test
public void when_streamMap_withFilterAndProjection() {
DAG dag = new DAG();
Vertex source = dag.newVertex("source", SourceProcessors.<Integer, Integer, Integer>streamMapP(streamSourceName, event -> event.getKey() != 0, EventJournalMapEvent::getKey, START_FROM_OLDEST, eventTimePolicy(i -> i, limitingLag(0), 1, 0, 10_000)));
Vertex sink = dag.newVertex("sink", writeListP(streamSinkName));
dag.edge(between(source, sink));
Job job = instance().getJet().newJob(dag);
IMap<Integer, Integer> sourceMap = instance().getMap(streamSourceName);
range(0, ENTRY_COUNT).forEach(i -> sourceMap.put(i, i));
assertSizeEventually(ENTRY_COUNT - 1, instance().getList(streamSinkName));
assertFalse(instance().getList(streamSinkName).contains(0));
assertTrue(instance().getList(streamSinkName).contains(1));
job.cancel();
}
use of com.hazelcast.jet.core.Vertex in project hazelcast by hazelcast.
the class HazelcastConnectorTest method test_defaultFilter_cacheJournal.
@Test
public void test_defaultFilter_cacheJournal() {
DAG dag = new DAG();
Vertex source = dag.newVertex("source", streamCacheP(streamSourceName, START_FROM_OLDEST, eventTimePolicy(Entry<Integer, Integer>::getValue, limitingLag(0), 1, 0, 10_000)));
Vertex sink = dag.newVertex("sink", writeListP(streamSinkName));
dag.edge(between(source, sink));
Job job = instance().getJet().newJob(dag);
ICache<Object, Object> sourceCache = instance().getCacheManager().getCache(streamSourceName);
// ADDED
sourceCache.put(1, 1);
// REMOVED - filtered out
sourceCache.remove(1);
// UPDATED
sourceCache.put(1, 2);
IList<Entry<Integer, Integer>> sinkList = instance().getList(streamSinkName);
assertTrueEventually(() -> {
assertEquals(2, sinkList.size());
Entry<Integer, Integer> e = sinkList.get(0);
assertEquals(Integer.valueOf(1), e.getKey());
assertEquals(Integer.valueOf(1), e.getValue());
e = sinkList.get(1);
assertEquals(Integer.valueOf(1), e.getKey());
assertEquals(Integer.valueOf(2), e.getValue());
});
job.cancel();
}
use of com.hazelcast.jet.core.Vertex in project hazelcast by hazelcast.
the class HazelcastConnectorTest method when_streamCache.
@Test
public void when_streamCache() {
DAG dag = new DAG();
Vertex source = dag.newVertex("source", streamCacheP(streamSourceName, START_FROM_OLDEST, eventTimePolicy(Entry<Integer, Integer>::getValue, limitingLag(0), 1, 0, 10_000)));
Vertex sink = dag.newVertex("sink", writeListP(streamSinkName));
dag.edge(between(source, sink));
Job job = instance().getJet().newJob(dag);
ICache<Integer, Integer> sourceCache = instance().getCacheManager().getCache(streamSourceName);
range(0, ENTRY_COUNT).forEach(i -> sourceCache.put(i, i));
assertSizeEventually(ENTRY_COUNT, instance().getList(streamSinkName));
job.cancel();
}
use of com.hazelcast.jet.core.Vertex in project hazelcast by hazelcast.
the class HazelcastConnectorTest method when_readMap_and_writeMap.
@Test
public void when_readMap_and_writeMap() {
IMap<Integer, Integer> sourceMap = instance().getMap(sourceName);
range(0, ENTRY_COUNT).forEach(i -> sourceMap.put(i, i));
DAG dag = new DAG();
Vertex source = dag.newVertex("source", readMapP(sourceName));
Vertex sink = dag.newVertex("sink", writeMapP(sinkName));
dag.edge(between(source, sink));
instance().getJet().newJob(dag).join();
assertEquals(ENTRY_COUNT, instance().getMap(sinkName).size());
}
Aggregations