use of cz.o2.proxima.util.TestUtils.createTestFamily in project proxima-platform by O2-Czech-Republic.
the class HadoopStorageTest method testObserveCancel.
@Test(timeout = 5000L)
public void testObserveCancel() throws InterruptedException {
Map<String, Object> cfg = cfg(HadoopDataAccessor.HADOOP_ROLL_INTERVAL, -1);
HadoopDataAccessor accessor = new HadoopDataAccessor(TestUtils.createTestFamily(entity, uri, cfg));
CountDownLatch latch = new CountDownLatch(1);
writeOneElement(accessor, (success, error) -> {
assertTrue(success);
assertNull(error);
latch.countDown();
}).updateWatermark(Long.MAX_VALUE);
latch.await();
BatchLogReader reader = accessor.getBatchLogReader(direct.getContext()).orElse(null);
assertNotNull(reader);
List<Partition> partitions = reader.getPartitions();
assertEquals(1, partitions.size());
CountDownLatch cancelledLatch = new CountDownLatch(1);
AtomicReference<ObserveHandle> handle = new AtomicReference<>();
handle.set(reader.observe(partitions, Collections.singletonList(attribute), new BatchLogObserver() {
@Override
public boolean onNext(StreamElement element) {
handle.get().close();
return true;
}
@Override
public void onCompleted() {
fail("onCompleted should not have been called");
}
@Override
public void onCancelled() {
cancelledLatch.countDown();
}
@Override
public boolean onError(Throwable error) {
onCancelled();
return true;
}
}));
cancelledLatch.await();
}
use of cz.o2.proxima.util.TestUtils.createTestFamily in project proxima-platform by O2-Czech-Republic.
the class HadoopStorageTest method testWriteElementNotYetFlushed.
@Test(timeout = 5_000L)
public void testWriteElementNotYetFlushed() throws InterruptedException {
Map<String, Object> cfg = cfg(HadoopDataAccessor.HADOOP_ROLL_INTERVAL, 1000);
HadoopDataAccessor accessor = new HadoopDataAccessor(TestUtils.createTestFamily(entity, uri, cfg));
CountDownLatch latch = new CountDownLatch(1);
BulkAttributeWriter writer = writeOneElement(accessor, ((success, error) -> {
if (error != null) {
log.error("Failed to flush write", error);
}
assertTrue("Error in flush " + error, success);
assertNull(error);
latch.countDown();
}));
assertTrue(root.exists());
List<File> files = listRecursively(root);
assertEquals("Expected single file in " + files, 1, files.size());
assertTrue(Iterables.getOnlyElement(files).getAbsolutePath().contains("_tmp"));
BatchLogReader reader = accessor.getBatchLogReader(direct.getContext()).orElse(null);
assertNotNull(reader);
List<Partition> partitions = reader.getPartitions();
assertTrue("Expected empty partitions, got " + partitions, partitions.isEmpty());
// advance watermark to flush
writer.updateWatermark(Long.MAX_VALUE);
latch.await();
partitions = reader.getPartitions();
assertEquals(1, partitions.size());
BlockingQueue<StreamElement> queue = new SynchronousQueue<>();
reader.observe(partitions, Collections.singletonList(attribute), new BatchLogObserver() {
@Override
public boolean onNext(StreamElement element) {
ExceptionUtils.unchecked(() -> queue.put(element));
return true;
}
});
StreamElement element = queue.take();
assertNotNull(element);
}
use of cz.o2.proxima.util.TestUtils.createTestFamily in project proxima-platform by O2-Czech-Republic.
the class HadoopStorageTest method testWriteElementJson.
@Test(timeout = 5000L)
public void testWriteElementJson() throws InterruptedException {
Map<String, Object> cfg = cfg(HadoopDataAccessor.HADOOP_ROLL_INTERVAL, -1, "hadoop.format", "json");
HadoopDataAccessor accessor = new HadoopDataAccessor(TestUtils.createTestFamily(entity, uri, cfg));
CountDownLatch latch = new CountDownLatch(1);
BulkAttributeWriter writer = writeOneElement(accessor, ((success, error) -> {
assertTrue(success);
assertNull(error);
latch.countDown();
}));
writer.updateWatermark(Long.MAX_VALUE);
latch.await();
assertTrue(root.exists());
List<File> files = listRecursively(root);
assertEquals("Expected single file in " + files, 1, files.size());
assertFalse(Iterables.getOnlyElement(files).getAbsolutePath().contains("_tmp"));
BatchLogReader reader = accessor.getBatchLogReader(direct.getContext()).orElse(null);
assertNotNull(reader);
List<Partition> partitions = reader.getPartitions();
assertEquals(1, partitions.size());
BlockingQueue<StreamElement> queue = new SynchronousQueue<>();
reader.observe(partitions, Collections.singletonList(attribute), new BatchLogObserver() {
@Override
public boolean onNext(StreamElement element) {
ExceptionUtils.unchecked(() -> queue.put(element));
return true;
}
});
StreamElement element = queue.take();
assertNotNull(element);
}
use of cz.o2.proxima.util.TestUtils.createTestFamily in project proxima-platform by O2-Czech-Republic.
the class HadoopStorageTest method testHashCodeAndEquals.
@Test
public void testHashCodeAndEquals() {
TestUtils.assertHashCodeAndEquals(new HadoopStorage(), new HadoopStorage());
EntityDescriptor entity = EntityDescriptor.newBuilder().setName("dummy").build();
final AttributeFamilyDescriptor family = TestUtils.createTestFamily(entity, URI.create("hdfs://host:9000/path"));
TestUtils.assertHashCodeAndEquals(new HadoopDataAccessor(family), new HadoopDataAccessor(family));
}
use of cz.o2.proxima.util.TestUtils.createTestFamily in project proxima-platform by O2-Czech-Republic.
the class GCloudStorageAccessorTest method testNamingConventionWithBucketAndNoPath.
@Test
public void testNamingConventionWithBucketAndNoPath() {
GCloudStorageAccessor accessor = new GCloudStorageAccessor(TestUtils.createTestFamily(entity, URI.create("gs://bucket")));
NamingConvention convention = accessor.getNamingConvention();
assertTrue(convention.nameOf(1500000000000L).startsWith("/2017/07/"));
}
Aggregations