use of org.apache.druid.timeline.DataSegment in project druid by druid-io.
the class QueryRunnerBasedOnClusteredClientTestBase method prepareCluster.
protected void prepareCluster(int numServers) {
Preconditions.checkArgument(numServers < 25, "Cannot be larger than 24");
for (int i = 0; i < numServers; i++) {
final int partitionId = i % 2;
final int intervalIndex = i / 2;
final Interval interval = Intervals.of("2000-01-01T%02d/PT1H", intervalIndex);
final DataSegment segment = newSegment(interval, partitionId, 2);
addServer(SimpleServerView.createServer(i + 1), segment, generateSegment(segment));
}
}
use of org.apache.druid.timeline.DataSegment in project druid by druid-io.
the class RetryQueryRunnerTest method dropSegmentFromServerAndAddNewServerForSegment.
/**
* Drops a segment from the {@code fromServer} and creates a new server serving the dropped segment.
* This method updates the server view.
*/
private void dropSegmentFromServerAndAddNewServerForSegment(DruidServer fromServer) {
final NonnullPair<DataSegment, QueryableIndex> pair = unannounceSegmentFromServer(fromServer);
final DataSegment segmentToMove = pair.lhs;
final QueryableIndex queryableIndexToMove = pair.rhs;
addServer(SimpleServerView.createServer(11), segmentToMove, queryableIndexToMove);
}
use of org.apache.druid.timeline.DataSegment in project druid by druid-io.
the class OmniDataSegmentKillerTest method testKillSegmentUnknowType.
@Test
public void testKillSegmentUnknowType() {
final DataSegment segment = Mockito.mock(DataSegment.class);
Mockito.when(segment.getLoadSpec()).thenReturn(ImmutableMap.of("type", "unknown-type"));
final Injector injector = createInjector(null);
final OmniDataSegmentKiller segmentKiller = injector.getInstance(OmniDataSegmentKiller.class);
Assert.assertThrows("Unknown loader type[unknown-type]. Known types are [explode]", SegmentLoadingException.class, () -> segmentKiller.kill(segment));
}
use of org.apache.druid.timeline.DataSegment in project druid by druid-io.
the class SegmentLocalCacheManagerConcurrencyTest method testGetSegment.
@Test
public void testGetSegment() throws IOException, ExecutionException, InterruptedException {
final File localStorageFolder = tmpFolder.newFolder("local_storage_folder");
final List<DataSegment> segmentsToLoad = new ArrayList<>(4);
final Interval interval = Intervals.of("2019-01-01/P1D");
for (int partitionId = 0; partitionId < 4; partitionId++) {
final String segmentPath = Paths.get(localStorageFolder.getCanonicalPath(), dataSource, StringUtils.format("%s_%s", interval.getStart().toString(), interval.getEnd().toString()), segmentVersion, String.valueOf(partitionId)).toString();
// manually create a local segment under localStorageFolder
final File localSegmentFile = new File(localStorageFolder, segmentPath);
FileUtils.mkdirp(localSegmentFile);
final File indexZip = new File(localSegmentFile, "index.zip");
indexZip.createNewFile();
final DataSegment segment = newSegment(interval, partitionId).withLoadSpec(ImmutableMap.of("type", "local", "path", localSegmentFile.getAbsolutePath()));
segmentsToLoad.add(segment);
}
final List<Future> futures = segmentsToLoad.stream().map(segment -> executorService.submit(() -> manager.getSegmentFiles(segment))).collect(Collectors.toList());
expectedException.expect(ExecutionException.class);
expectedException.expectCause(CoreMatchers.instanceOf(SegmentLoadingException.class));
expectedException.expectMessage("Failed to load segment");
for (Future future : futures) {
future.get();
}
}
use of org.apache.druid.timeline.DataSegment in project druid by druid-io.
the class SqlSegmentsMetadataManagerTest method testMarkAsUnusedAllSegmentsInDataSource.
@Test(timeout = 60_000)
public void testMarkAsUnusedAllSegmentsInDataSource() throws IOException, InterruptedException {
sqlSegmentsMetadataManager.startPollingDatabasePeriodically();
sqlSegmentsMetadataManager.poll();
Assert.assertTrue(sqlSegmentsMetadataManager.isPollingDatabasePeriodically());
final String newDataSource = "wikipedia2";
final DataSegment newSegment = createNewSegment1(newDataSource);
publisher.publishSegment(newSegment);
awaitDataSourceAppeared(newDataSource);
int numChangedSegments = sqlSegmentsMetadataManager.markAsUnusedAllSegmentsInDataSource(newDataSource);
Assert.assertEquals(1, numChangedSegments);
awaitDataSourceDisappeared(newDataSource);
Assert.assertNull(sqlSegmentsMetadataManager.getImmutableDataSourceWithUsedSegments(newDataSource));
}
Aggregations