use of com.hazelcast.jet.pipeline.test.GeneratorFunction in project hazelcast by hazelcast.
the class OrderedStreamProcessingTest method when_source_is_parallel_2.
@Test
public void when_source_is_parallel_2() {
int validatedItemCountPerGenerator = ITEM_COUNT;
int eventsPerSecondPerGenerator = 5 * ITEM_COUNT;
int generatorCount = 4;
// Generate monotonic increasing items that are distinct for each generator.
GeneratorFunction<Long> generator1 = (ts, seq) -> generatorCount * seq;
GeneratorFunction<Long> generator2 = (ts, seq) -> generatorCount * seq + 1;
GeneratorFunction<Long> generator3 = (ts, seq) -> generatorCount * seq + 2;
GeneratorFunction<Long> generator4 = (ts, seq) -> generatorCount * seq + 3;
List<Long> sequence1 = LongStream.range(0, validatedItemCountPerGenerator).map(i -> generatorCount * i).boxed().collect(toList());
List<Long> sequence2 = LongStream.range(0, validatedItemCountPerGenerator).map(i -> generatorCount * i + 1).boxed().collect(toList());
List<Long> sequence3 = LongStream.range(0, validatedItemCountPerGenerator).map(i -> generatorCount * i + 2).boxed().collect(toList());
List<Long> sequence4 = LongStream.range(0, validatedItemCountPerGenerator).map(i -> generatorCount * i + 3).boxed().collect(toList());
StreamStage<Long> srcStage = p.readFrom(itemsParallel(eventsPerSecondPerGenerator, Arrays.asList(generator1, generator2, generator3, generator4))).withIngestionTimestamps();
StreamStage<Long> applied = srcStage.apply(transform);
applied.filter(i -> i % generatorCount == 0).writeTo(AssertionSinks.assertCollectedEventually(60, list -> Assert.assertArrayEquals(list.toArray(), sequence1.toArray())));
applied.filter(i -> i % generatorCount == 1).writeTo(AssertionSinks.assertCollectedEventually(60, list -> Assert.assertArrayEquals(list.toArray(), sequence2.toArray())));
applied.filter(i -> i % generatorCount == 2).writeTo(AssertionSinks.assertCollectedEventually(60, list -> Assert.assertArrayEquals(list.toArray(), sequence3.toArray())));
applied.filter(i -> i % generatorCount == 3).writeTo(AssertionSinks.assertCollectedEventually(60, list -> Assert.assertArrayEquals(list.toArray(), sequence4.toArray())));
Job job = jet.newJob(p);
try {
job.join();
fail("Job should have completed with an AssertionCompletedException, but completed normally");
} catch (CompletionException e) {
String errorMsg = e.getCause().getMessage();
assertTrue("Job was expected to complete with AssertionCompletedException, but completed with: " + e.getCause(), errorMsg.contains(AssertionCompletedException.class.getName()));
}
}
use of com.hazelcast.jet.pipeline.test.GeneratorFunction in project hazelcast by hazelcast.
the class OrderedStreamProcessingTest method when_source_is_parallel.
@Test
public void when_source_is_parallel() {
int validatedItemCountPerGenerator = ITEM_COUNT;
int eventsPerSecondPerGenerator = 5 * ITEM_COUNT;
int generatorCount = 4;
// Generate monotonic increasing items that are distinct for each generator.
GeneratorFunction<Long> generator1 = (ts, seq) -> generatorCount * seq;
GeneratorFunction<Long> generator2 = (ts, seq) -> generatorCount * seq + 1;
GeneratorFunction<Long> generator3 = (ts, seq) -> generatorCount * seq + 2;
GeneratorFunction<Long> generator4 = (ts, seq) -> generatorCount * seq + 3;
StreamStage<Long> srcStage = p.readFrom(itemsParallel(eventsPerSecondPerGenerator, Arrays.asList(generator1, generator2, generator3, generator4))).withIngestionTimestamps();
StreamStage<Long> applied = srcStage.apply(transform);
applied.mapStateful(() -> create(generatorCount), this::orderValidator).writeTo(AssertionSinks.assertCollectedEventually(60, list -> {
assertTrue("when", validatedItemCountPerGenerator <= list.size());
assertFalse("There is some reordered items in the list", list.contains(false));
}));
Job job = jet.newJob(p);
try {
job.join();
fail("Job should have completed with an AssertionCompletedException, but completed normally");
} catch (CompletionException e) {
String errorMsg = e.getCause().getMessage();
assertTrue("Job was expected to complete with AssertionCompletedException, but completed with: " + e.getCause(), errorMsg.contains(AssertionCompletedException.class.getName()));
}
}
use of com.hazelcast.jet.pipeline.test.GeneratorFunction in project hazelcast by hazelcast.
the class OrderedProcessingMergingStagesTest method when_hashJoin2_applied_primary_stream_order_is_preserved.
@Test
public void when_hashJoin2_applied_primary_stream_order_is_preserved() {
int validatedItemCountPerGenerator = ITEM_COUNT;
int eventsPerSecondPerGenerator = 5 * ITEM_COUNT;
int generatorCount = 2;
// Generate monotonic increasing items that are distinct for each generator.
GeneratorFunction<Map.Entry<Long, Long>> generator1 = (ts, seq) -> Util.entry(0L, generatorCount * seq);
GeneratorFunction<Map.Entry<Long, Long>> generator2 = (ts, seq) -> Util.entry(1L, generatorCount * seq + 1);
StreamStage<Map.Entry<Long, Long>> srcStage = p.readFrom(itemsParallel(eventsPerSecondPerGenerator, Arrays.asList(generator1, generator2))).withIngestionTimestamps();
BatchStage<Map.Entry<Long, Long>> batchStage = p.readFrom(TestSources.items(Util.entry(0L, 0L), Util.entry(1L, 0L)));
BatchStage<Map.Entry<Long, Long>> batchStage2 = p.readFrom(TestSources.items(Util.entry(0L, 0L), Util.entry(1L, 0L)));
StreamStage<Map.Entry<Long, Long>> joined = srcStage.hashJoin2(batchStage, JoinClause.onKeys(Map.Entry::getKey, Map.Entry::getKey), batchStage2, JoinClause.onKeys(Map.Entry::getKey, Map.Entry::getKey), (primary, stg1, stg2) -> primary).setLocalParallelism(HIGH_LOCAL_PARALLELISM);
joined.groupingKey(Map.Entry::getKey).mapStateful(() -> create(generatorCount), this::orderValidator).writeTo(AssertionSinks.assertCollectedEventually(60, list -> {
assertTrue("when", validatedItemCountPerGenerator <= list.size());
assertFalse("There is some reordered items in the list", list.contains(false));
}));
Job job = hz.getJet().newJob(p);
try {
job.join();
fail("Job should have completed with an AssertionCompletedException, but completed normally");
} catch (CompletionException e) {
String errorMsg = e.getCause().getMessage();
assertTrue("Job was expected to complete with AssertionCompletedException, but completed with: " + e.getCause(), errorMsg.contains(AssertionCompletedException.class.getName()));
}
}
use of com.hazelcast.jet.pipeline.test.GeneratorFunction in project hazelcast by hazelcast.
the class OrderedProcessingMergingStagesTest method when_innerJoin_applied_primary_stream_order_is_preserved.
@Test
public void when_innerJoin_applied_primary_stream_order_is_preserved() {
int validatedItemCountPerGenerator = ITEM_COUNT;
int eventsPerSecondPerGenerator = 5 * ITEM_COUNT;
int generatorCount = 2;
// Generate monotonic increasing items that are distinct for each generator.
GeneratorFunction<Map.Entry<Long, Long>> generator1 = (ts, seq) -> Util.entry(0L, generatorCount * seq);
GeneratorFunction<Map.Entry<Long, Long>> generator2 = (ts, seq) -> Util.entry(1L, generatorCount * seq + 1);
StreamStage<Map.Entry<Long, Long>> srcStage = p.readFrom(itemsParallel(eventsPerSecondPerGenerator, Arrays.asList(generator1, generator2))).withIngestionTimestamps().setLocalParallelism(HIGH_LOCAL_PARALLELISM);
BatchStage<Map.Entry<Long, Long>> batchStage = p.readFrom(TestSources.items(Util.entry(0L, 0L), Util.entry(1L, 0L)));
StreamStage<Map.Entry<Long, Long>> joined = srcStage.innerHashJoin(batchStage, JoinClause.onKeys(Map.Entry::getKey, Map.Entry::getKey), (primary, stg) -> primary).setLocalParallelism(LOW_LOCAL_PARALLELISM);
joined.groupingKey(Map.Entry::getKey).mapStateful(() -> create(generatorCount), this::orderValidator).writeTo(AssertionSinks.assertCollectedEventually(60, list -> {
assertTrue("when", validatedItemCountPerGenerator <= list.size());
assertFalse("There is some reordered items in the list", list.contains(false));
}));
Job job = hz.getJet().newJob(p);
try {
job.join();
fail("Job should have completed with an AssertionCompletedException, but completed normally");
} catch (CompletionException e) {
String errorMsg = e.getCause().getMessage();
assertTrue("Job was expected to complete with AssertionCompletedException, but completed with: " + e.getCause(), errorMsg.contains(AssertionCompletedException.class.getName()));
}
}
use of com.hazelcast.jet.pipeline.test.GeneratorFunction in project hazelcast by hazelcast.
the class OrderedProcessingMergingStagesTest method when_merge_applied_partial_orders_are_preserved.
@Test
public void when_merge_applied_partial_orders_are_preserved() {
int validatedItemCountPerGenerator = ITEM_COUNT;
int eventsPerSecondPerGenerator = 5 * ITEM_COUNT;
int generatorCount = 4;
// Generate monotonic increasing items that are distinct for each generator.
GeneratorFunction<Long> generator1 = (ts, seq) -> generatorCount * seq;
GeneratorFunction<Long> generator2 = (ts, seq) -> generatorCount * seq + 1;
GeneratorFunction<Long> generator3 = (ts, seq) -> generatorCount * seq + 2;
GeneratorFunction<Long> generator4 = (ts, seq) -> generatorCount * seq + 3;
List<Long> sequence1 = LongStream.range(0, validatedItemCountPerGenerator).map(i -> generatorCount * i).boxed().collect(toList());
List<Long> sequence2 = LongStream.range(0, validatedItemCountPerGenerator).map(i -> generatorCount * i + 1).boxed().collect(toList());
List<Long> sequence3 = LongStream.range(0, validatedItemCountPerGenerator).map(i -> generatorCount * i + 2).boxed().collect(toList());
List<Long> sequence4 = LongStream.range(0, validatedItemCountPerGenerator).map(i -> generatorCount * i + 3).boxed().collect(toList());
StreamStage<Long> srcStage = p.readFrom(itemsParallel(eventsPerSecondPerGenerator, Arrays.asList(generator1, generator2))).withIngestionTimestamps().setLocalParallelism(HIGH_LOCAL_PARALLELISM);
StreamStage<Long> srcStage2 = p.readFrom(itemsParallel(eventsPerSecondPerGenerator, Arrays.asList(generator3, generator4))).withIngestionTimestamps().setLocalParallelism(LOW_LOCAL_PARALLELISM);
StreamStage<Long> merged = srcStage.merge(srcStage2).setLocalParallelism(HIGH_LOCAL_PARALLELISM);
merged.filter(i -> i % generatorCount == 0).writeTo(AssertionSinks.assertCollectedEventually(60, list -> Assert.assertArrayEquals(list.toArray(), sequence1.toArray())));
merged.filter(i -> i % generatorCount == 1).writeTo(AssertionSinks.assertCollectedEventually(60, list -> Assert.assertArrayEquals(list.toArray(), sequence2.toArray())));
merged.filter(i -> i % generatorCount == 2).writeTo(AssertionSinks.assertCollectedEventually(60, list -> Assert.assertArrayEquals(list.toArray(), sequence3.toArray())));
merged.filter(i -> i % generatorCount == 3).writeTo(AssertionSinks.assertCollectedEventually(60, list -> Assert.assertArrayEquals(list.toArray(), sequence4.toArray())));
Job job = hz.getJet().newJob(p);
try {
job.join();
fail("Job should have completed with an AssertionCompletedException, but completed normally");
} catch (CompletionException e) {
String errorMsg = e.getCause().getMessage();
assertTrue("Job was expected to complete with AssertionCompletedException, but completed with: " + e.getCause(), errorMsg.contains(AssertionCompletedException.class.getName()));
}
}
Aggregations