use of com.hazelcast.jet.pipeline.test.AssertionCompletedException in project hazelcast by hazelcast.
the class KinesisIntegrationTest method timestampsAndWatermarks.
@Test
@Category(SerialTest.class)
public void timestampsAndWatermarks() {
HELPER.createStream(1);
sendMessages();
try {
Pipeline pipeline = Pipeline.create();
pipeline.readFrom(kinesisSource().build()).withNativeTimestamps(0).window(WindowDefinition.sliding(500, 100)).aggregate(counting()).apply(assertCollectedEventually(ASSERT_TRUE_EVENTUALLY_TIMEOUT, windowResults -> {
// multiple windows, so watermark works
assertTrue(windowResults.size() > 1);
}));
hz().getJet().newJob(pipeline).join();
fail("Expected exception not thrown");
} catch (CompletionException ce) {
Throwable cause = peel(ce);
assertTrue(cause instanceof JetException);
assertTrue(cause.getCause() instanceof AssertionCompletedException);
}
}
use of com.hazelcast.jet.pipeline.test.AssertionCompletedException in project hazelcast by hazelcast.
the class OrderedProcessingMultipleMemberTest method multiple_nodes.
@Test
public void multiple_nodes() {
// because of mapJournal's capacity, increasing this value too much can break the test
int itemCount = 250;
int keyCount = 8;
String mapName = "test-map-" + idx;
StreamStage<Map.Entry<Long, Long>> srcStage = p.readFrom(Sources.<Long, Long>mapJournal(mapName, JournalInitialPosition.START_FROM_OLDEST)).withoutTimestamps();
StreamStage<Map.Entry<Long, Long>> applied = srcStage.apply(transform);
applied.groupingKey(Map.Entry::getKey).mapStateful(() -> create(keyCount), this::orderValidator).writeTo(AssertionSinks.assertCollectedEventually(60, list -> {
assertTrue("when", itemCount <= list.size());
assertFalse("There is some reordered items in the list", list.contains(false));
}));
IMap<Long, Long> testMap = instances[0].getMap(mapName);
LongStream.range(0, itemCount).boxed().forEachOrdered(i -> testMap.put(i % keyCount, i));
Job job = instances[0].newJob(p);
try {
job.join();
fail("Job should have completed with an AssertionCompletedException, but completed normally");
} catch (CompletionException e) {
testMap.clear();
String errorMsg = e.getCause().getMessage();
assertTrue("Job was expected to complete with AssertionCompletedException, but completed with: " + e.getCause(), errorMsg.contains(AssertionCompletedException.class.getName()));
}
}
use of com.hazelcast.jet.pipeline.test.AssertionCompletedException in project hazelcast by hazelcast.
the class OrderedProcessingMergingStagesTest method when_merge_applied_partial_orders_are_preserved.
@Test
public void when_merge_applied_partial_orders_are_preserved() {
int validatedItemCountPerGenerator = ITEM_COUNT;
int eventsPerSecondPerGenerator = 5 * ITEM_COUNT;
int generatorCount = 4;
// Generate monotonic increasing items that are distinct for each generator.
GeneratorFunction<Long> generator1 = (ts, seq) -> generatorCount * seq;
GeneratorFunction<Long> generator2 = (ts, seq) -> generatorCount * seq + 1;
GeneratorFunction<Long> generator3 = (ts, seq) -> generatorCount * seq + 2;
GeneratorFunction<Long> generator4 = (ts, seq) -> generatorCount * seq + 3;
List<Long> sequence1 = LongStream.range(0, validatedItemCountPerGenerator).map(i -> generatorCount * i).boxed().collect(toList());
List<Long> sequence2 = LongStream.range(0, validatedItemCountPerGenerator).map(i -> generatorCount * i + 1).boxed().collect(toList());
List<Long> sequence3 = LongStream.range(0, validatedItemCountPerGenerator).map(i -> generatorCount * i + 2).boxed().collect(toList());
List<Long> sequence4 = LongStream.range(0, validatedItemCountPerGenerator).map(i -> generatorCount * i + 3).boxed().collect(toList());
StreamStage<Long> srcStage = p.readFrom(itemsParallel(eventsPerSecondPerGenerator, Arrays.asList(generator1, generator2))).withIngestionTimestamps().setLocalParallelism(HIGH_LOCAL_PARALLELISM);
StreamStage<Long> srcStage2 = p.readFrom(itemsParallel(eventsPerSecondPerGenerator, Arrays.asList(generator3, generator4))).withIngestionTimestamps().setLocalParallelism(LOW_LOCAL_PARALLELISM);
StreamStage<Long> merged = srcStage.merge(srcStage2).setLocalParallelism(HIGH_LOCAL_PARALLELISM);
merged.filter(i -> i % generatorCount == 0).writeTo(AssertionSinks.assertCollectedEventually(60, list -> Assert.assertArrayEquals(list.toArray(), sequence1.toArray())));
merged.filter(i -> i % generatorCount == 1).writeTo(AssertionSinks.assertCollectedEventually(60, list -> Assert.assertArrayEquals(list.toArray(), sequence2.toArray())));
merged.filter(i -> i % generatorCount == 2).writeTo(AssertionSinks.assertCollectedEventually(60, list -> Assert.assertArrayEquals(list.toArray(), sequence3.toArray())));
merged.filter(i -> i % generatorCount == 3).writeTo(AssertionSinks.assertCollectedEventually(60, list -> Assert.assertArrayEquals(list.toArray(), sequence4.toArray())));
Job job = hz.getJet().newJob(p);
try {
job.join();
fail("Job should have completed with an AssertionCompletedException, but completed normally");
} catch (CompletionException e) {
String errorMsg = e.getCause().getMessage();
assertTrue("Job was expected to complete with AssertionCompletedException, but completed with: " + e.getCause(), errorMsg.contains(AssertionCompletedException.class.getName()));
}
}
use of com.hazelcast.jet.pipeline.test.AssertionCompletedException in project hazelcast by hazelcast.
the class OrderedProcessingMergingStagesTest method when_innerJoin2_applied_primary_stream_order_is_preserved.
@Test
public void when_innerJoin2_applied_primary_stream_order_is_preserved() {
int validatedItemCountPerGenerator = ITEM_COUNT;
int eventsPerSecondPerGenerator = 5 * ITEM_COUNT;
int generatorCount = 2;
// Generate monotonic increasing items that are distinct for each generator.
GeneratorFunction<Map.Entry<Long, Long>> generator1 = (ts, seq) -> Util.entry(0L, generatorCount * seq);
GeneratorFunction<Map.Entry<Long, Long>> generator2 = (ts, seq) -> Util.entry(1L, generatorCount * seq + 1);
StreamStage<Map.Entry<Long, Long>> srcStage = p.readFrom(itemsParallel(eventsPerSecondPerGenerator, Arrays.asList(generator1, generator2))).withIngestionTimestamps().setLocalParallelism(HIGH_LOCAL_PARALLELISM);
BatchStage<Map.Entry<Long, Long>> batchStage = p.readFrom(TestSources.items(Util.entry(0L, 0L), Util.entry(1L, 0L)));
BatchStage<Map.Entry<Long, Long>> batchStage2 = p.readFrom(TestSources.items(Util.entry(0L, 0L), Util.entry(1L, 0L)));
StreamStage<Map.Entry<Long, Long>> joined = srcStage.innerHashJoin2(batchStage, JoinClause.onKeys(Map.Entry::getKey, Map.Entry::getKey), batchStage2, JoinClause.onKeys(Map.Entry::getKey, Map.Entry::getKey), (primary, stg1, stg2) -> primary).setLocalParallelism(HIGH_LOCAL_PARALLELISM);
joined.groupingKey(Map.Entry::getKey).mapStateful(() -> create(generatorCount), this::orderValidator).writeTo(AssertionSinks.assertCollectedEventually(60, list -> {
assertTrue("when", validatedItemCountPerGenerator <= list.size());
assertFalse("There is some reordered items in the list", list.contains(false));
}));
Job job = hz.getJet().newJob(p);
try {
job.join();
fail("Job should have completed with an AssertionCompletedException, but completed normally");
} catch (CompletionException e) {
String errorMsg = e.getCause().getMessage();
assertTrue("Job was expected to complete with AssertionCompletedException, but completed with: " + e.getCause(), errorMsg.contains(AssertionCompletedException.class.getName()));
}
}
use of com.hazelcast.jet.pipeline.test.AssertionCompletedException in project hazelcast by hazelcast.
the class OrderedProcessingMergingStagesTest method when_hashJoin_applied_primary_stream_order_is_preserved.
@Test
public void when_hashJoin_applied_primary_stream_order_is_preserved() {
int validatedItemCountPerGenerator = ITEM_COUNT;
int eventsPerSecondPerGenerator = 5 * ITEM_COUNT;
int generatorCount = 2;
// Generate monotonic increasing items that are distinct for each generator.
GeneratorFunction<Map.Entry<Long, Long>> generator1 = (ts, seq) -> Util.entry(0L, generatorCount * seq);
GeneratorFunction<Map.Entry<Long, Long>> generator2 = (ts, seq) -> Util.entry(1L, generatorCount * seq + 1);
StreamStage<Map.Entry<Long, Long>> srcStage = p.readFrom(itemsParallel(eventsPerSecondPerGenerator, Arrays.asList(generator1, generator2))).withIngestionTimestamps().setLocalParallelism(HIGH_LOCAL_PARALLELISM);
BatchStage<Map.Entry<Long, Long>> batchStage = p.readFrom(TestSources.items(Util.entry(0L, 0L), Util.entry(1L, 0L)));
StreamStage<Map.Entry<Long, Long>> joined = srcStage.hashJoin(batchStage, JoinClause.onKeys(Map.Entry::getKey, Map.Entry::getKey), (primary, stage) -> primary).setLocalParallelism(HIGH_LOCAL_PARALLELISM);
joined.groupingKey(Map.Entry::getKey).mapStateful(() -> create(generatorCount), this::orderValidator).writeTo(AssertionSinks.assertCollectedEventually(60, list -> {
assertTrue("when", validatedItemCountPerGenerator <= list.size());
assertFalse("There is some reordered items in the list", list.contains(false));
}));
Job job = hz.getJet().newJob(p);
try {
job.join();
fail("Job should have completed with an AssertionCompletedException, but completed normally");
} catch (CompletionException e) {
String errorMsg = e.getCause().getMessage();
assertTrue("Job was expected to complete with AssertionCompletedException, but completed with: " + e.getCause(), errorMsg.contains(AssertionCompletedException.class.getName()));
}
}
Aggregations