use of cz.o2.proxima.repository.AttributeDescriptor in project proxima-platform by O2-Czech-Republic.
the class HBaseLogReader method observe.
@Override
public ObserveHandle observe(List<Partition> partitions, List<AttributeDescriptor<?>> attributes, BatchLogObserver observer) {
TerminationContext terminationContext = new TerminationContext(observer);
observeInternal(partitions, attributes, observer, terminationContext);
return terminationContext.asObserveHandle();
}
use of cz.o2.proxima.repository.AttributeDescriptor in project proxima-platform by O2-Czech-Republic.
the class HadoopBatchLogReader method observe.
@Override
public ObserveHandle observe(List<Partition> partitions, List<AttributeDescriptor<?>> attributes, BatchLogObserver observer) {
TerminationContext terminationContext = new TerminationContext(observer);
observeInternal(partitions, attributes, observer, terminationContext);
return terminationContext.asObserveHandle();
}
use of cz.o2.proxima.repository.AttributeDescriptor in project proxima-platform by O2-Czech-Republic.
the class BeamStreamTest method testInterruptible.
@Test(timeout = 10000)
public void testInterruptible() throws InterruptedException {
Repository repo = Repository.ofTest(ConfigFactory.load("test-reference.conf"));
BeamDataOperator op = repo.getOrCreateOperator(BeamDataOperator.class);
EntityDescriptor gateway = repo.getEntity("gateway");
AttributeDescriptor<?> armed = gateway.getAttribute("armed");
SynchronousQueue<Boolean> interrupt = new SynchronousQueue<>();
Stream<StreamElement> stream = BeamStream.stream(op, Position.OLDEST, false, true, interrupt::take, BeamStream::createPipelineDefault, armed);
CountDownLatch latch = new CountDownLatch(1);
new Thread(() -> {
// collect endless stream
stream.collect();
latch.countDown();
}).start();
// terminate
interrupt.put(true);
// and wait until the pipeline terminates
latch.await();
// make sonar happy
assertTrue(true);
}
use of cz.o2.proxima.repository.AttributeDescriptor in project proxima-platform by O2-Czech-Republic.
the class AttributeFamilyProxyDataDescriptorTest method testReadingFromProxy.
@Test
public void testReadingFromProxy() {
EntityDescriptor proxied = repo.getEntity("proxied");
AttributeDescriptor<byte[]> event = proxied.getAttribute("event.*");
direct.getWriter(event).orElseThrow(() -> new IllegalArgumentException("Missing writer for " + event)).write(newEvent(proxied, event), (succ, exc) -> {
});
Pipeline p = Pipeline.create();
PCollection<StreamElement> input = beam.getBatchSnapshot(p, event);
PCollection<Long> result = input.apply(Count.globally());
PAssert.that(result).containsInAnyOrder(1L);
assertNotNull(p.run());
}
use of cz.o2.proxima.repository.AttributeDescriptor in project proxima-platform by O2-Czech-Republic.
the class BeamDataOperatorTest method testStreamFromOldestWithKafkaTest.
@Test
public void testStreamFromOldestWithKafkaTest() {
Config config = ConfigFactory.parseMap(Collections.singletonMap("attributeFamilies.event-storage-stream.storage", "kafka-test://dummy/events")).withFallback(ConfigFactory.load("test-reference.conf"));
Repository repo = Repository.ofTest(config);
EntityDescriptor event = repo.getEntity("event");
AttributeDescriptor<?> data = event.getAttribute("data");
int numElements = 10000;
long now = System.currentTimeMillis();
try (DirectDataOperator direct = repo.getOrCreateOperator(DirectDataOperator.class);
BeamDataOperator operator = repo.getOrCreateOperator(BeamDataOperator.class)) {
for (int i = 0; i < numElements; i++) {
direct.getWriter(data).orElseThrow(() -> new IllegalStateException("Missing writer for data")).write(StreamElement.upsert(event, data, UUID.randomUUID().toString(), UUID.randomUUID().toString(), data.getName(), now + i, new byte[] {}), (succ, exc) -> {
});
}
Pipeline p = Pipeline.create();
PCollection<StreamElement> input = operator.getStream(p, Position.OLDEST, true, true, data);
PCollection<Long> count = input.apply(Count.globally());
PAssert.that(count).containsInAnyOrder(Collections.singletonList((long) numElements));
assertNotNull(p.run());
}
}
Aggregations