use of com.hazelcast.map.EventJournalMapEvent in project hazelcast by hazelcast.
the class HazelcastConnectorTest method when_streamMap_withFilterAndProjection.
@Test
public void when_streamMap_withFilterAndProjection() {
DAG dag = new DAG();
Vertex source = dag.newVertex("source", SourceProcessors.<Integer, Integer, Integer>streamMapP(streamSourceName, event -> event.getKey() != 0, EventJournalMapEvent::getKey, START_FROM_OLDEST, eventTimePolicy(i -> i, limitingLag(0), 1, 0, 10_000)));
Vertex sink = dag.newVertex("sink", writeListP(streamSinkName));
dag.edge(between(source, sink));
Job job = instance().getJet().newJob(dag);
IMap<Integer, Integer> sourceMap = instance().getMap(streamSourceName);
range(0, ENTRY_COUNT).forEach(i -> sourceMap.put(i, i));
assertSizeEventually(ENTRY_COUNT - 1, instance().getList(streamSinkName));
assertFalse(instance().getList(streamSinkName).contains(0));
assertTrue(instance().getList(streamSinkName).contains(1));
job.cancel();
}
use of com.hazelcast.map.EventJournalMapEvent in project hazelcast by hazelcast.
the class StreamEventJournalPTest method setUp.
@Before
public void setUp() {
Config config = smallInstanceConfig();
config.setProperty(PARTITION_COUNT.getName(), String.valueOf(NUM_PARTITIONS));
config.getMapConfig("*").getEventJournalConfig().setEnabled(true).setCapacity(JOURNAL_CAPACITY);
instance = createHazelcastInstance(config);
map = (MapProxyImpl<String, Integer>) instance.<String, Integer>getMap("test");
List<Integer> allPartitions = IntStream.range(0, NUM_PARTITIONS).boxed().collect(toList());
supplier = () -> new StreamEventJournalP<>(map, allPartitions, e -> true, EventJournalMapEvent::getNewValue, START_FROM_OLDEST, false, noEventTime());
key0 = generateKeyForPartition(instance, 0);
key1 = generateKeyForPartition(instance, 1);
}
use of com.hazelcast.map.EventJournalMapEvent in project hazelcast by hazelcast.
the class HazelcastRemoteConnectorTest method when_streamRemoteMap_withPredicateAndProjection.
@Test
public void when_streamRemoteMap_withPredicateAndProjection() {
DAG dag = new DAG();
Vertex source = dag.newVertex(SOURCE_NAME, SourceProcessors.<Integer, Integer, Integer>streamRemoteMapP(SOURCE_NAME, clientConfig, event -> event.getKey() != 0, EventJournalMapEvent::getKey, START_FROM_OLDEST, eventTimePolicy(i -> i, limitingLag(0), 1, 0, 10_000)));
Vertex sink = dag.newVertex(SINK_NAME, writeListP(SINK_NAME));
dag.edge(between(source, sink));
Job job = localHz.getJet().newJob(dag);
populateMap(remoteHz.getMap(SOURCE_NAME));
assertSizeEventually(ITEM_COUNT - 1, localHz.getList(SINK_NAME));
assertFalse(localHz.getList(SINK_NAME).contains(0));
assertTrue(localHz.getList(SINK_NAME).contains(1));
job.cancel();
}
Aggregations