use of io.druid.timeline.DataSegment in project druid by druid-io.
the class ZkCoordinatorTest method testInjector.
@Test
public void testInjector() throws Exception {
Injector injector = Guice.createInjector(new Module() {
@Override
public void configure(Binder binder) {
binder.bind(ObjectMapper.class).toInstance(jsonMapper);
binder.bind(SegmentLoaderConfig.class).toInstance(new SegmentLoaderConfig() {
@Override
public File getInfoDir() {
return infoDir;
}
@Override
public int getNumLoadingThreads() {
return 5;
}
@Override
public int getAnnounceIntervalMillis() {
return 50;
}
});
binder.bind(ZkPathsConfig.class).toInstance(new ZkPathsConfig() {
@Override
public String getBase() {
return "/druid";
}
});
binder.bind(DruidServerMetadata.class).toInstance(new DruidServerMetadata("dummyServer", "dummyHost", 0, "dummyType", "normal", 0));
binder.bind(DataSegmentAnnouncer.class).toInstance(announcer);
binder.bind(CuratorFramework.class).toInstance(curator);
binder.bind(ServerManager.class).toInstance(serverManager);
binder.bind(ScheduledExecutorFactory.class).toInstance(ScheduledExecutors.createFactory(new Lifecycle()));
}
});
ZkCoordinator zkCoordinator = injector.getInstance(ZkCoordinator.class);
List<DataSegment> segments = Lists.newLinkedList();
for (int i = 0; i < COUNT; ++i) {
segments.add(makeSegment("test" + i, "1", new Interval("P1d/2011-04-01")));
segments.add(makeSegment("test" + i, "1", new Interval("P1d/2011-04-02")));
segments.add(makeSegment("test" + i, "2", new Interval("P1d/2011-04-02")));
segments.add(makeSegment("test_two" + i, "1", new Interval("P1d/2011-04-01")));
segments.add(makeSegment("test_two" + i, "1", new Interval("P1d/2011-04-02")));
}
Collections.sort(segments);
for (DataSegment segment : segments) {
writeSegmentToCache(segment);
}
checkCache(segments);
Assert.assertTrue(serverManager.getDataSourceCounts().isEmpty());
zkCoordinator.start();
Assert.assertTrue(!serverManager.getDataSourceCounts().isEmpty());
for (int i = 0; i < COUNT; ++i) {
Assert.assertEquals(3L, serverManager.getDataSourceCounts().get("test" + i).longValue());
Assert.assertEquals(2L, serverManager.getDataSourceCounts().get("test_two" + i).longValue());
}
Assert.assertEquals(5 * COUNT, announceCount.get());
zkCoordinator.stop();
for (DataSegment segment : segments) {
deleteSegmentFromCache(segment);
}
Assert.assertEquals(0, infoDir.listFiles().length);
Assert.assertTrue(infoDir.delete());
}
use of io.druid.timeline.DataSegment in project druid by druid-io.
the class ZkCoordinatorTest method checkCache.
private void checkCache(List<DataSegment> segments) throws IOException {
Assert.assertTrue(infoDir.exists());
File[] files = infoDir.listFiles();
List<File> sortedFiles = Lists.newArrayList(files);
Collections.sort(sortedFiles);
Assert.assertEquals(segments.size(), sortedFiles.size());
for (int i = 0; i < sortedFiles.size(); i++) {
DataSegment segment = jsonMapper.readValue(sortedFiles.get(i), DataSegment.class);
Assert.assertEquals(segments.get(i), segment);
}
}
use of io.druid.timeline.DataSegment in project druid by druid-io.
the class LocalDataSegmentFinderTest method testFindSegments.
@Test
public void testFindSegments() throws SegmentLoadingException, IOException {
final LocalDataSegmentFinder localDataSegmentFinder = new LocalDataSegmentFinder(mapper);
final Set<DataSegment> segments = localDataSegmentFinder.findSegments(dataSourceDir.getAbsolutePath(), false);
Assert.assertEquals(5, segments.size());
DataSegment updatedSegment1 = null;
DataSegment updatedSegment2 = null;
DataSegment updatedSegment3 = null;
DataSegment updatedSegment4_0 = null;
DataSegment updatedSegment4_1 = null;
for (DataSegment dataSegment : segments) {
if (dataSegment.getIdentifier().equals(SEGMENT_1.getIdentifier())) {
updatedSegment1 = dataSegment;
} else if (dataSegment.getIdentifier().equals(SEGMENT_2.getIdentifier())) {
updatedSegment2 = dataSegment;
} else if (dataSegment.getIdentifier().equals(SEGMENT_3.getIdentifier())) {
updatedSegment3 = dataSegment;
} else if (dataSegment.getIdentifier().equals(SEGMENT_4_0.getIdentifier())) {
updatedSegment4_0 = dataSegment;
} else if (dataSegment.getIdentifier().equals(SEGMENT_4_1.getIdentifier())) {
updatedSegment4_1 = dataSegment;
} else {
Assert.fail("Unexpected segment");
}
}
Assert.assertEquals(descriptor1.getAbsolutePath(), getDescriptorPath(updatedSegment1));
Assert.assertEquals(descriptor2.getAbsolutePath(), getDescriptorPath(updatedSegment2));
Assert.assertEquals(descriptor3.getAbsolutePath(), getDescriptorPath(updatedSegment3));
Assert.assertEquals(descriptor4_0.getAbsolutePath(), getDescriptorPath(updatedSegment4_0));
Assert.assertEquals(descriptor4_1.getAbsolutePath(), getDescriptorPath(updatedSegment4_1));
final String serializedSegment1 = mapper.writeValueAsString(updatedSegment1);
final String serializedSegment2 = mapper.writeValueAsString(updatedSegment2);
final String serializedSegment3 = mapper.writeValueAsString(updatedSegment3);
final String serializedSegment4_0 = mapper.writeValueAsString(updatedSegment4_0);
final String serializedSegment4_1 = mapper.writeValueAsString(updatedSegment4_1);
// since updateDescriptor was not enabled, descriptor.json still has stale information
Assert.assertNotEquals(serializedSegment1, FileUtils.readFileToString(descriptor1));
Assert.assertNotEquals(serializedSegment2, FileUtils.readFileToString(descriptor2));
Assert.assertNotEquals(serializedSegment3, FileUtils.readFileToString(descriptor3));
Assert.assertNotEquals(serializedSegment4_0, FileUtils.readFileToString(descriptor4_0));
Assert.assertNotEquals(serializedSegment4_1, FileUtils.readFileToString(descriptor4_1));
// enable updateDescriptor so that descriptors.json will be updated to relfect the new loadSpec
final Set<DataSegment> segments2 = localDataSegmentFinder.findSegments(dataSourceDir.getAbsolutePath(), true);
Assert.assertEquals(segments, segments2);
Assert.assertEquals(serializedSegment1, FileUtils.readFileToString(descriptor1));
Assert.assertEquals(serializedSegment2, FileUtils.readFileToString(descriptor2));
Assert.assertEquals(serializedSegment3, FileUtils.readFileToString(descriptor3));
Assert.assertEquals(serializedSegment4_0, FileUtils.readFileToString(descriptor4_0));
Assert.assertEquals(serializedSegment4_1, FileUtils.readFileToString(descriptor4_1));
}
use of io.druid.timeline.DataSegment in project druid by druid-io.
the class LocalDataSegmentPusherTest method testPush.
@Test
public void testPush() throws IOException {
/* DataSegment - Used to create LoadSpec and Create outDir (Local Deep Storage location in this case)
File dataSegmentFile - Used to get location of segment files like version.bin, meta.smoosh and xxxxx.smoosh
*/
final DataSegment dataSegment2 = dataSegment.withVersion("v2");
DataSegment returnSegment1 = localDataSegmentPusher.push(dataSegmentFiles, dataSegment);
DataSegment returnSegment2 = localDataSegmentPusher.push(dataSegmentFiles, dataSegment2);
Assert.assertNotNull(returnSegment1);
Assert.assertEquals(dataSegment, returnSegment1);
Assert.assertNotNull(returnSegment2);
Assert.assertEquals(dataSegment2, returnSegment2);
Assert.assertNotEquals(DataSegmentPusherUtil.getStorageDir(dataSegment), DataSegmentPusherUtil.getStorageDir(dataSegment2));
for (DataSegment returnSegment : ImmutableList.of(returnSegment1, returnSegment2)) {
File outDir = new File(config.getStorageDirectory(), DataSegmentPusherUtil.getStorageDir(returnSegment));
File versionFile = new File(outDir, "index.zip");
File descriptorJson = new File(outDir, "descriptor.json");
Assert.assertTrue(versionFile.exists());
Assert.assertTrue(descriptorJson.exists());
}
}
use of io.druid.timeline.DataSegment in project druid by druid-io.
the class SegmentLoaderLocalCacheManagerTest method testRetrySuccessAtSecondLocation.
@Test
public void testRetrySuccessAtSecondLocation() throws Exception {
final List<StorageLocationConfig> locations = Lists.newArrayList();
final StorageLocationConfig locationConfig = new StorageLocationConfig();
final File localStorageFolder = tmpFolder.newFolder("local_storage_folder");
// mock can't write in first location
localStorageFolder.setWritable(false);
locationConfig.setPath(localStorageFolder);
locationConfig.setMaxSize(1000000000L);
locations.add(locationConfig);
final StorageLocationConfig locationConfig2 = new StorageLocationConfig();
final File localStorageFolder2 = tmpFolder.newFolder("local_storage_folder2");
locationConfig2.setPath(localStorageFolder2);
locationConfig2.setMaxSize(10000000L);
locations.add(locationConfig2);
manager = new SegmentLoaderLocalCacheManager(TestHelper.getTestIndexIO(), new SegmentLoaderConfig().withLocations(locations), jsonMapper);
final File segmentSrcFolder = tmpFolder.newFolder("segmentSrcFolder");
final DataSegment segmentToDownload = dataSegmentWithInterval("2014-10-20T00:00:00Z/P1D").withLoadSpec(ImmutableMap.<String, Object>of("type", "local", "path", segmentSrcFolder.getCanonicalPath() + "/test_segment_loader" + "/2014-10-20T00:00:00.000Z_2014-10-21T00:00:00.000Z/2015-05-27T03:38:35.683Z" + "/0/index.zip"));
// manually create a local segment under segmentSrcFolder
final File localSegmentFile = new File(segmentSrcFolder, "test_segment_loader/2014-10-20T00:00:00.000Z_2014-10-21T00:00:00.000Z/2015-05-27T03:38:35.683Z/0");
localSegmentFile.mkdirs();
final File indexZip = new File(localSegmentFile, "index.zip");
indexZip.createNewFile();
Assert.assertFalse("Expect cache miss before downloading segment", manager.isSegmentLoaded(segmentToDownload));
File segmentFile = manager.getSegmentFiles(segmentToDownload);
Assert.assertTrue(segmentFile.getAbsolutePath().contains("/local_storage_folder2/"));
Assert.assertTrue("Expect cache hit after downloading segment", manager.isSegmentLoaded(segmentToDownload));
manager.cleanup(segmentToDownload);
Assert.assertFalse("Expect cache miss after dropping segment", manager.isSegmentLoaded(segmentToDownload));
}
Aggregations