use of org.apache.druid.segment.loading.SegmentLoaderConfig in project druid by druid-io.
the class IndexTaskTest method setup.
@Before
public void setup() throws IOException {
appenderatorsManager = new TestAppenderatorsManager();
final File cacheDir = temporaryFolder.newFolder();
segmentCacheManager = new SegmentLocalCacheManager(new SegmentLoaderConfig() {
@Override
public List<StorageLocationConfig> getLocations() {
return Collections.singletonList(new StorageLocationConfig(cacheDir, null, null));
}
}, jsonMapper);
taskRunner = new TestTaskRunner();
}
use of org.apache.druid.segment.loading.SegmentLoaderConfig in project druid by druid-io.
the class DruidServerConfigTest method testServerMaxSizePrecedence.
@Test
public void testServerMaxSizePrecedence() throws Exception {
String serverConfigWithDefaultSizeStr = "{\"maxSize\":0,\"tier\":\"_default_tier\",\"priority\":0," + "\"hiddenProperties\":[\"druid.metadata.storage.connector.password\"," + "\"druid.s3.accessKey\",\"druid.s3.secretKey\"]}\n";
String serverConfigWithNonDefaultSizeStr = "{\"maxSize\":123456,\"tier\":\"_default_tier\",\"priority\":0," + "\"hiddenProperties\":[\"druid.metadata.storage.connector.password\"," + "\"druid.s3.accessKey\",\"druid.s3.secretKey\"]}\n";
final List<StorageLocationConfig> locations = new ArrayList<>();
final StorageLocationConfig locationConfig1 = new StorageLocationConfig(testSegmentCacheDir1, 10000000000L, null);
locations.add(locationConfig1);
mapper.setInjectableValues(new InjectableValues.Std().addValue(ObjectMapper.class, new DefaultObjectMapper()).addValue(SegmentLoaderConfig.class, new SegmentLoaderConfig().withLocations(locations)));
DruidServerConfig serverConfigWithDefaultSize = mapper.readValue(mapper.writeValueAsString(mapper.readValue(serverConfigWithDefaultSizeStr, DruidServerConfig.class)), DruidServerConfig.class);
DruidServerConfig serverConfigWithNonDefaultSize = mapper.readValue(mapper.writeValueAsString(mapper.readValue(serverConfigWithNonDefaultSizeStr, DruidServerConfig.class)), DruidServerConfig.class);
Assert.assertEquals(serverConfigWithDefaultSize.getMaxSize(), 10000000000L);
Assert.assertEquals(serverConfigWithNonDefaultSize.getMaxSize(), 123456L);
}
use of org.apache.druid.segment.loading.SegmentLoaderConfig in project druid by druid-io.
the class SegmentLoadDropHandlerTest method setUp.
@Before
public void setUp() throws IOException {
try {
testStorageLocation = new TestStorageLocation(temporaryFolder);
infoDir = testStorageLocation.getInfoDir();
} catch (IOException e) {
throw new RuntimeException(e);
}
locations = Collections.singletonList(testStorageLocation.toStorageLocationConfig());
scheduledRunnable = new ArrayList<>();
segmentCacheManager = new CacheTestSegmentCacheManager();
segmentLoader = new CacheTestSegmentLoader();
segmentManager = new SegmentManager(segmentLoader);
segmentsAnnouncedByMe = new ConcurrentSkipListSet<>();
announceCount = new AtomicInteger(0);
announcer = new DataSegmentAnnouncer() {
@Override
public void announceSegment(DataSegment segment) {
segmentsAnnouncedByMe.add(segment);
announceCount.incrementAndGet();
}
@Override
public void unannounceSegment(DataSegment segment) {
segmentsAnnouncedByMe.remove(segment);
announceCount.decrementAndGet();
}
@Override
public void announceSegments(Iterable<DataSegment> segments) {
for (DataSegment segment : segments) {
segmentsAnnouncedByMe.add(segment);
}
announceCount.addAndGet(Iterables.size(segments));
}
@Override
public void unannounceSegments(Iterable<DataSegment> segments) {
for (DataSegment segment : segments) {
segmentsAnnouncedByMe.remove(segment);
}
announceCount.addAndGet(-Iterables.size(segments));
}
};
segmentLoaderConfig = new SegmentLoaderConfig() {
@Override
public File getInfoDir() {
return testStorageLocation.getInfoDir();
}
@Override
public int getNumLoadingThreads() {
return 5;
}
@Override
public int getAnnounceIntervalMillis() {
return 50;
}
@Override
public List<StorageLocationConfig> getLocations() {
return locations;
}
@Override
public int getDropSegmentDelayMillis() {
return 0;
}
};
noAnnouncerSegmentLoaderConfig = new SegmentLoaderConfig() {
@Override
public File getInfoDir() {
return testStorageLocation.getInfoDir();
}
@Override
public int getNumLoadingThreads() {
return 5;
}
@Override
public int getAnnounceIntervalMillis() {
return 0;
}
@Override
public List<StorageLocationConfig> getLocations() {
return locations;
}
@Override
public int getDropSegmentDelayMillis() {
return 0;
}
};
segmentLoaderConfigNoLocations = new SegmentLoaderConfig() {
@Override
public int getNumLoadingThreads() {
return 5;
}
@Override
public int getAnnounceIntervalMillis() {
return 50;
}
@Override
public int getDropSegmentDelayMillis() {
return 0;
}
};
scheduledExecutorFactory = new ScheduledExecutorFactory() {
@Override
public ScheduledExecutorService create(int corePoolSize, String nameFormat) {
/*
Override normal behavoir by adding the runnable to a list so that you can make sure
all the shceduled runnables are executed by explicitly calling run() on each item in the list
*/
return new ScheduledThreadPoolExecutor(corePoolSize, Execs.makeThreadFactory(nameFormat)) {
@Override
public ScheduledFuture<?> schedule(Runnable command, long delay, TimeUnit unit) {
scheduledRunnable.add(command);
return null;
}
};
}
};
segmentLoadDropHandler = new SegmentLoadDropHandler(jsonMapper, segmentLoaderConfig, announcer, Mockito.mock(DataSegmentServerAnnouncer.class), segmentManager, segmentCacheManager, scheduledExecutorFactory.create(5, "SegmentLoadDropHandlerTest-[%d]"), new ServerTypeConfig(ServerType.HISTORICAL));
}
use of org.apache.druid.segment.loading.SegmentLoaderConfig in project druid by druid-io.
the class SegmentLoadDropHandlerTest method testStartStop.
@Test
public void testStartStop() throws Exception {
SegmentLoadDropHandler handler = new SegmentLoadDropHandler(jsonMapper, new SegmentLoaderConfig() {
@Override
public File getInfoDir() {
return infoDir;
}
@Override
public int getNumLoadingThreads() {
return 5;
}
@Override
public List<StorageLocationConfig> getLocations() {
return locations;
}
@Override
public int getAnnounceIntervalMillis() {
return 50;
}
}, announcer, Mockito.mock(DataSegmentServerAnnouncer.class), segmentManager, segmentCacheManager, new ServerTypeConfig(ServerType.HISTORICAL));
Set<DataSegment> segments = new HashSet<>();
for (int i = 0; i < COUNT; ++i) {
segments.add(makeSegment("test" + i, "1", Intervals.of("P1d/2011-04-01")));
segments.add(makeSegment("test" + i, "1", Intervals.of("P1d/2011-04-02")));
segments.add(makeSegment("test" + i, "2", Intervals.of("P1d/2011-04-02")));
segments.add(makeSegment("test_two" + i, "1", Intervals.of("P1d/2011-04-01")));
segments.add(makeSegment("test_two" + i, "1", Intervals.of("P1d/2011-04-02")));
}
for (DataSegment segment : segments) {
testStorageLocation.writeSegmentInfoToCache(segment);
}
testStorageLocation.checkInfoCache(segments);
Assert.assertTrue(segmentManager.getDataSourceCounts().isEmpty());
handler.start();
Assert.assertTrue(!segmentManager.getDataSourceCounts().isEmpty());
for (int i = 0; i < COUNT; ++i) {
Assert.assertEquals(3L, segmentManager.getDataSourceCounts().get("test" + i).longValue());
Assert.assertEquals(2L, segmentManager.getDataSourceCounts().get("test_two" + i).longValue());
}
Assert.assertEquals(5 * COUNT, announceCount.get());
handler.stop();
for (DataSegment segment : segments) {
testStorageLocation.deleteSegmentInfoFromCache(segment);
}
Assert.assertEquals(0, infoDir.listFiles().length);
Assert.assertTrue(infoDir.delete());
}
Aggregations