use of java.util.concurrent.CompletionException in project hbase by apache.
the class TestThreadLocalPoolMap method testLocality.
@Test
public void testLocality() throws ExecutionException, InterruptedException {
String key = "key";
AtomicInteger id = new AtomicInteger();
Runnable runnable = () -> {
try {
String myId = Integer.toString(id.getAndIncrement());
for (int i = 0; i < 3; i++) {
String result = poolMap.getOrCreate(key, () -> myId);
assertEquals(myId, result);
Thread.yield();
}
} catch (IOException e) {
throw new CompletionException(e);
}
};
CompletableFuture<Void> future1 = CompletableFuture.runAsync(runnable);
CompletableFuture<Void> future2 = CompletableFuture.runAsync(runnable);
/* test for successful completion */
future1.get();
future2.get();
assertEquals(2, poolMap.values().size());
}
use of java.util.concurrent.CompletionException in project hbase by apache.
the class TestRoundRobinPoolMap method testMultiThreadedRoundRobin.
@Test
public void testMultiThreadedRoundRobin() throws ExecutionException, InterruptedException {
String key = "key";
AtomicInteger id = new AtomicInteger();
List<String> results = Collections.synchronizedList(new ArrayList<>());
Runnable runnable = () -> {
try {
for (int i = 0; i < POOL_SIZE; i++) {
String value = Integer.toString(id.getAndIncrement());
String result = poolMap.getOrCreate(key, () -> value);
results.add(result);
Thread.yield();
}
} catch (IOException e) {
throw new CompletionException(e);
}
};
CompletableFuture<Void> future1 = CompletableFuture.runAsync(runnable);
CompletableFuture<Void> future2 = CompletableFuture.runAsync(runnable);
/* test for successful completion */
future1.get();
future2.get();
assertEquals(POOL_SIZE, poolMap.values().size());
/* check every elements occur twice */
Collections.sort(results);
Iterator<String> iterator = results.iterator();
for (int i = 0; i < POOL_SIZE; i++) {
String next1 = iterator.next();
String next2 = iterator.next();
assertEquals(next1, next2);
}
assertFalse(iterator.hasNext());
}
use of java.util.concurrent.CompletionException in project hazelcast by hazelcast.
the class OrderedProcessingMergingStagesTest method when_hashJoin2_applied_primary_stream_order_is_preserved.
@Test
public void when_hashJoin2_applied_primary_stream_order_is_preserved() {
int validatedItemCountPerGenerator = ITEM_COUNT;
int eventsPerSecondPerGenerator = 5 * ITEM_COUNT;
int generatorCount = 2;
// Generate monotonic increasing items that are distinct for each generator.
GeneratorFunction<Map.Entry<Long, Long>> generator1 = (ts, seq) -> Util.entry(0L, generatorCount * seq);
GeneratorFunction<Map.Entry<Long, Long>> generator2 = (ts, seq) -> Util.entry(1L, generatorCount * seq + 1);
StreamStage<Map.Entry<Long, Long>> srcStage = p.readFrom(itemsParallel(eventsPerSecondPerGenerator, Arrays.asList(generator1, generator2))).withIngestionTimestamps();
BatchStage<Map.Entry<Long, Long>> batchStage = p.readFrom(TestSources.items(Util.entry(0L, 0L), Util.entry(1L, 0L)));
BatchStage<Map.Entry<Long, Long>> batchStage2 = p.readFrom(TestSources.items(Util.entry(0L, 0L), Util.entry(1L, 0L)));
StreamStage<Map.Entry<Long, Long>> joined = srcStage.hashJoin2(batchStage, JoinClause.onKeys(Map.Entry::getKey, Map.Entry::getKey), batchStage2, JoinClause.onKeys(Map.Entry::getKey, Map.Entry::getKey), (primary, stg1, stg2) -> primary).setLocalParallelism(HIGH_LOCAL_PARALLELISM);
joined.groupingKey(Map.Entry::getKey).mapStateful(() -> create(generatorCount), this::orderValidator).writeTo(AssertionSinks.assertCollectedEventually(60, list -> {
assertTrue("when", validatedItemCountPerGenerator <= list.size());
assertFalse("There is some reordered items in the list", list.contains(false));
}));
Job job = hz.getJet().newJob(p);
try {
job.join();
fail("Job should have completed with an AssertionCompletedException, but completed normally");
} catch (CompletionException e) {
String errorMsg = e.getCause().getMessage();
assertTrue("Job was expected to complete with AssertionCompletedException, but completed with: " + e.getCause(), errorMsg.contains(AssertionCompletedException.class.getName()));
}
}
use of java.util.concurrent.CompletionException in project hazelcast by hazelcast.
the class OrderedProcessingMergingStagesTest method when_innerJoin_applied_primary_stream_order_is_preserved.
@Test
public void when_innerJoin_applied_primary_stream_order_is_preserved() {
int validatedItemCountPerGenerator = ITEM_COUNT;
int eventsPerSecondPerGenerator = 5 * ITEM_COUNT;
int generatorCount = 2;
// Generate monotonic increasing items that are distinct for each generator.
GeneratorFunction<Map.Entry<Long, Long>> generator1 = (ts, seq) -> Util.entry(0L, generatorCount * seq);
GeneratorFunction<Map.Entry<Long, Long>> generator2 = (ts, seq) -> Util.entry(1L, generatorCount * seq + 1);
StreamStage<Map.Entry<Long, Long>> srcStage = p.readFrom(itemsParallel(eventsPerSecondPerGenerator, Arrays.asList(generator1, generator2))).withIngestionTimestamps().setLocalParallelism(HIGH_LOCAL_PARALLELISM);
BatchStage<Map.Entry<Long, Long>> batchStage = p.readFrom(TestSources.items(Util.entry(0L, 0L), Util.entry(1L, 0L)));
StreamStage<Map.Entry<Long, Long>> joined = srcStage.innerHashJoin(batchStage, JoinClause.onKeys(Map.Entry::getKey, Map.Entry::getKey), (primary, stg) -> primary).setLocalParallelism(LOW_LOCAL_PARALLELISM);
joined.groupingKey(Map.Entry::getKey).mapStateful(() -> create(generatorCount), this::orderValidator).writeTo(AssertionSinks.assertCollectedEventually(60, list -> {
assertTrue("when", validatedItemCountPerGenerator <= list.size());
assertFalse("There is some reordered items in the list", list.contains(false));
}));
Job job = hz.getJet().newJob(p);
try {
job.join();
fail("Job should have completed with an AssertionCompletedException, but completed normally");
} catch (CompletionException e) {
String errorMsg = e.getCause().getMessage();
assertTrue("Job was expected to complete with AssertionCompletedException, but completed with: " + e.getCause(), errorMsg.contains(AssertionCompletedException.class.getName()));
}
}
use of java.util.concurrent.CompletionException in project hazelcast by hazelcast.
the class OrderedStreamProcessingTest method when_source_is_parallel_2.
@Test
public void when_source_is_parallel_2() {
int validatedItemCountPerGenerator = ITEM_COUNT;
int eventsPerSecondPerGenerator = 5 * ITEM_COUNT;
int generatorCount = 4;
// Generate monotonic increasing items that are distinct for each generator.
GeneratorFunction<Long> generator1 = (ts, seq) -> generatorCount * seq;
GeneratorFunction<Long> generator2 = (ts, seq) -> generatorCount * seq + 1;
GeneratorFunction<Long> generator3 = (ts, seq) -> generatorCount * seq + 2;
GeneratorFunction<Long> generator4 = (ts, seq) -> generatorCount * seq + 3;
List<Long> sequence1 = LongStream.range(0, validatedItemCountPerGenerator).map(i -> generatorCount * i).boxed().collect(toList());
List<Long> sequence2 = LongStream.range(0, validatedItemCountPerGenerator).map(i -> generatorCount * i + 1).boxed().collect(toList());
List<Long> sequence3 = LongStream.range(0, validatedItemCountPerGenerator).map(i -> generatorCount * i + 2).boxed().collect(toList());
List<Long> sequence4 = LongStream.range(0, validatedItemCountPerGenerator).map(i -> generatorCount * i + 3).boxed().collect(toList());
StreamStage<Long> srcStage = p.readFrom(itemsParallel(eventsPerSecondPerGenerator, Arrays.asList(generator1, generator2, generator3, generator4))).withIngestionTimestamps();
StreamStage<Long> applied = srcStage.apply(transform);
applied.filter(i -> i % generatorCount == 0).writeTo(AssertionSinks.assertCollectedEventually(60, list -> Assert.assertArrayEquals(list.toArray(), sequence1.toArray())));
applied.filter(i -> i % generatorCount == 1).writeTo(AssertionSinks.assertCollectedEventually(60, list -> Assert.assertArrayEquals(list.toArray(), sequence2.toArray())));
applied.filter(i -> i % generatorCount == 2).writeTo(AssertionSinks.assertCollectedEventually(60, list -> Assert.assertArrayEquals(list.toArray(), sequence3.toArray())));
applied.filter(i -> i % generatorCount == 3).writeTo(AssertionSinks.assertCollectedEventually(60, list -> Assert.assertArrayEquals(list.toArray(), sequence4.toArray())));
Job job = jet.newJob(p);
try {
job.join();
fail("Job should have completed with an AssertionCompletedException, but completed normally");
} catch (CompletionException e) {
String errorMsg = e.getCause().getMessage();
assertTrue("Job was expected to complete with AssertionCompletedException, but completed with: " + e.getCause(), errorMsg.contains(AssertionCompletedException.class.getName()));
}
}
Aggregations