use of cz.o2.proxima.direct.storage.ListBatchReader in project proxima-platform by O2-Czech-Republic.
the class BatchLogReadTest method testBatchLogReadWithLimit.
@Test(timeout = 60000)
public void testBatchLogReadWithLimit() {
int numElements = 1000;
List<StreamElement> input = createInput(numElements);
ListBatchReader reader = ListBatchReader.of(direct.getContext(), input);
testReadingFromBatchLogMany(50, BatchLogRead.of(Collections.singletonList(this.data), 50, repo, reader));
}
use of cz.o2.proxima.direct.storage.ListBatchReader in project proxima-platform by O2-Czech-Republic.
the class BatchLogReadTest method testReadingFromBatchLog.
@Test(timeout = 30000)
public void testReadingFromBatchLog() {
List<StreamElement> data = createInput(1);
ListBatchReader reader = ListBatchReader.of(context, data);
testReadingFromBatchLog(Collections.singletonList(this.data), reader);
}
use of cz.o2.proxima.direct.storage.ListBatchReader in project proxima-platform by O2-Czech-Republic.
the class BatchLogSourceFunctionTest method testRunAndClose.
@Test
void testRunAndClose() throws Exception {
final Repository repository = Repository.ofTest(ConfigFactory.parseString(MODEL));
final AttributeDescriptor<?> attribute = repository.getEntity("test").getAttribute("data");
final BatchLogSourceFunction<StreamElement> sourceFunction = new BatchLogSourceFunction<StreamElement>(repository.asFactory(), Collections.singletonList(attribute), ResultExtractor.identity()) {
@Override
BatchLogReader createLogReader(List<AttributeDescriptor<?>> attributeDescriptors) {
final DirectDataOperator direct = repository.getOrCreateOperator(DirectDataOperator.class);
final ListBatchReader reader = ListBatchReader.ofPartitioned(direct.getContext());
return OffsetTrackingBatchLogReader.of(reader);
}
};
final AbstractStreamOperatorTestHarness<StreamElement> testHarness = createTestHarness(sourceFunction, 1, 0);
testHarness.initializeEmptyState();
testHarness.open();
final CheckedThread runThread = new CheckedThread("run") {
@Override
public void go() throws Exception {
sourceFunction.run(new TestSourceContext<StreamElement>() {
@Override
public void collect(StreamElement element) {
// No-op.
}
});
}
};
runThread.start();
sourceFunction.awaitRunning();
sourceFunction.cancel();
testHarness.close();
// Make sure run thread finishes normally.
runThread.sync();
}
use of cz.o2.proxima.direct.storage.ListBatchReader in project proxima-platform by O2-Czech-Republic.
the class BatchLogReaderTest method testObserveOffsets.
@Test
public void testObserveOffsets() throws InterruptedException {
final List<StreamElement> firstPartition = newPartition("first_", 100);
final List<StreamElement> secondPartition = newPartition("second_", 80);
final List<StreamElement> thirdPartition = newPartition("third_", 60);
final ListBatchReader reader = ListBatchReader.ofPartitioned(direct.getContext(), Arrays.asList(firstPartition, secondPartition, thirdPartition));
final BlockingQueue<String> consumed = new LinkedBlockingQueue<>();
final CountDownLatch doneConsuming = new CountDownLatch(1);
reader.observeOffsets(Arrays.asList(Offset.of(Partition.of(0), 50, false), Offset.of(Partition.of(2), 40, false)), Collections.singletonList(attr), new BatchLogObserver() {
@Override
public boolean onNext(StreamElement element) {
assertTrue(consumed.add(element.getKey()));
return true;
}
@Override
public void onCompleted() {
doneConsuming.countDown();
}
});
doneConsuming.await();
final Set<String> expected = Streams.concat(firstPartition.subList(50, firstPartition.size()).stream(), thirdPartition.subList(40, thirdPartition.size()).stream()).map(StreamElement::getKey).collect(Collectors.toSet());
assertEquals(expected, new HashSet<>(consumed));
}
use of cz.o2.proxima.direct.storage.ListBatchReader in project proxima-platform by O2-Czech-Republic.
the class BatchLogReaderTest method testObserveReadOffset.
@Test
public void testObserveReadOffset() throws InterruptedException {
final List<StreamElement> firstPartition = newPartition("first_", 10);
final List<StreamElement> secondPartition = newPartition("second_", 20);
final List<StreamElement> thirdPartition = newPartition("third_", 30);
final ListBatchReader reader = ListBatchReader.ofPartitioned(direct.getContext(), Arrays.asList(firstPartition, secondPartition, thirdPartition));
final ConcurrentMap<Partition, Offset> lastOffsets = new ConcurrentHashMap<>();
final CountDownLatch doneConsuming = new CountDownLatch(1);
reader.observe(Arrays.asList(Partition.of(0), Partition.of(1), Partition.of(2)), Collections.singletonList(attr), new BatchLogObserver() {
@Override
public boolean onNext(StreamElement element, OnNextContext context) {
lastOffsets.merge(context.getPartition(), context.getOffset(), (oldValue, newValue) -> {
assertTrue(oldValue.getElementIndex() < newValue.getElementIndex());
return newValue;
});
return true;
}
@Override
public void onCompleted() {
doneConsuming.countDown();
}
});
doneConsuming.await();
assertEquals(Offset.of(Partition.of(0), 9, true), lastOffsets.get(Partition.of(0)));
assertEquals(Offset.of(Partition.of(1), 19, true), lastOffsets.get(Partition.of(1)));
assertEquals(Offset.of(Partition.of(2), 29, true), lastOffsets.get(Partition.of(2)));
}
Aggregations