Search in sources :

Example 96 with DataSegment

use of org.apache.druid.timeline.DataSegment in project druid by druid-io.

the class HttpIndexingServiceClientTest method testCompact.

@Test
public void testCompact() throws Exception {
    DataSegment segment = new DataSegment("test", Intervals.of("2015-04-12/2015-04-13"), "1", ImmutableMap.of("bucket", "bucket", "path", "test/2015-04-12T00:00:00.000Z_2015-04-13T00:00:00.000Z/1/0/index.zip"), null, null, NoneShardSpec.instance(), 0, 1);
    Capture captureTask = EasyMock.newCapture();
    HttpResponse response = EasyMock.createMock(HttpResponse.class);
    EasyMock.expect(response.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
    EasyMock.expect(response.getContent()).andReturn(new BigEndianHeapChannelBuffer(0));
    EasyMock.replay(response);
    StringFullResponseHolder responseHolder = new StringFullResponseHolder(response, StandardCharsets.UTF_8).addChunk(jsonMapper.writeValueAsString(ImmutableMap.of("task", "aaa")));
    EasyMock.expect(druidLeaderClient.makeRequest(HttpMethod.POST, "/druid/indexer/v1/task")).andReturn(new Request(HttpMethod.POST, new URL("http://localhost:8090/druid/indexer/v1/task"))).anyTimes();
    EasyMock.expect(druidLeaderClient.go(EasyMock.anyObject(Request.class))).andReturn(responseHolder).anyTimes();
    EasyMock.expect(mockMapper.writeValueAsBytes(EasyMock.capture(captureTask))).andReturn(new byte[] { 1, 2, 3 }).anyTimes();
    EasyMock.expect(mockMapper.readValue(EasyMock.anyString(), EasyMock.eq(JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT))).andReturn(ImmutableMap.of()).anyTimes();
    EasyMock.replay(druidLeaderClient, mockMapper);
    HttpIndexingServiceClient httpIndexingServiceClient = new HttpIndexingServiceClient(mockMapper, druidLeaderClient);
    try {
        httpIndexingServiceClient.compactSegments("test-compact", ImmutableList.of(segment), 50, null, null, null, null, null, null, null);
    } catch (Exception e) {
        // Ignore IllegalStateException as taskId is internally generated and returned task id will failed check
        Assert.assertEquals(IllegalStateException.class.getName(), e.getCause().getClass().getName());
    }
    ClientCompactionTaskQuery taskQuery = (ClientCompactionTaskQuery) captureTask.getValue();
    Assert.assertNull(taskQuery.getIoConfig().getInputSpec().getSha256OfSortedSegmentIds());
}
Also used : StringFullResponseHolder(org.apache.druid.java.util.http.client.response.StringFullResponseHolder) Request(org.apache.druid.java.util.http.client.Request) HttpResponse(org.jboss.netty.handler.codec.http.HttpResponse) BigEndianHeapChannelBuffer(org.jboss.netty.buffer.BigEndianHeapChannelBuffer) DataSegment(org.apache.druid.timeline.DataSegment) Capture(org.easymock.Capture) URL(java.net.URL) ExpectedException(org.junit.rules.ExpectedException) Test(org.junit.Test)

Example 97 with DataSegment

use of org.apache.druid.timeline.DataSegment in project druid by druid-io.

the class MetadataSegmentView method poll.

private void poll() {
    log.info("polling published segments from coordinator");
    final JsonParserIterator<SegmentWithOvershadowedStatus> metadataSegments = getMetadataSegments(coordinatorDruidLeaderClient, jsonMapper, segmentWatcherConfig.getWatchedDataSources());
    final ImmutableSortedSet.Builder<SegmentWithOvershadowedStatus> builder = ImmutableSortedSet.naturalOrder();
    while (metadataSegments.hasNext()) {
        final SegmentWithOvershadowedStatus segment = metadataSegments.next();
        final DataSegment interned = DataSegmentInterner.intern(segment.getDataSegment());
        final SegmentWithOvershadowedStatus segmentWithOvershadowedStatus = new SegmentWithOvershadowedStatus(interned, segment.isOvershadowed());
        builder.add(segmentWithOvershadowedStatus);
    }
    publishedSegments = builder.build();
    cachePopulated.countDown();
}
Also used : SegmentWithOvershadowedStatus(org.apache.druid.timeline.SegmentWithOvershadowedStatus) ImmutableSortedSet(com.google.common.collect.ImmutableSortedSet) DataSegment(org.apache.druid.timeline.DataSegment)

Example 98 with DataSegment

use of org.apache.druid.timeline.DataSegment in project druid by druid-io.

the class BroadcastSegmentIndexedTableTest method setup.

@Before
public void setup() throws IOException, SegmentLoadingException {
    final ObjectMapper mapper = new DefaultObjectMapper();
    mapper.registerModule(new SegmentizerModule());
    final IndexIO indexIO = new IndexIO(mapper, () -> 0);
    mapper.setInjectableValues(new InjectableValues.Std().addValue(ExprMacroTable.class.getName(), TestExprMacroTable.INSTANCE).addValue(ObjectMapper.class.getName(), mapper).addValue(IndexIO.class, indexIO).addValue(DataSegment.PruneSpecsHolder.class, DataSegment.PruneSpecsHolder.DEFAULT));
    final IndexMerger indexMerger = new IndexMergerV9(mapper, indexIO, OffHeapMemorySegmentWriteOutMediumFactory.instance());
    Interval testInterval = Intervals.of("2011-01-12T00:00:00.000Z/2011-05-01T00:00:00.000Z");
    IncrementalIndex data = TestIndex.makeRealtimeIndex("druid.sample.numeric.tsv");
    File segment = new File(temporaryFolder.newFolder(), "segment");
    File persisted = indexMerger.persist(data, testInterval, segment, new IndexSpec(), null);
    File factoryJson = new File(persisted, "factory.json");
    Assert.assertTrue(factoryJson.exists());
    SegmentizerFactory factory = mapper.readValue(factoryJson, SegmentizerFactory.class);
    Assert.assertTrue(factory instanceof MMappedQueryableSegmentizerFactory);
    DataSegment dataSegment = new DataSegment(DATASOURCE, testInterval, DateTimes.nowUtc().toString(), ImmutableMap.of(), columnNames, ImmutableList.of(), null, null, segment.getTotalSpace());
    backingSegment = (QueryableIndexSegment) factory.factorize(dataSegment, segment, false, SegmentLazyLoadFailCallback.NOOP);
    columnNames = ImmutableList.<String>builder().add(ColumnHolder.TIME_COLUMN_NAME).addAll(backingSegment.asQueryableIndex().getColumnNames()).build();
    broadcastTable = new BroadcastSegmentIndexedTable(backingSegment, keyColumns, dataSegment.getVersion());
}
Also used : IndexMerger(org.apache.druid.segment.IndexMerger) IndexSpec(org.apache.druid.segment.IndexSpec) IncrementalIndex(org.apache.druid.segment.incremental.IncrementalIndex) IndexMergerV9(org.apache.druid.segment.IndexMergerV9) MMappedQueryableSegmentizerFactory(org.apache.druid.segment.loading.MMappedQueryableSegmentizerFactory) MMappedQueryableSegmentizerFactory(org.apache.druid.segment.loading.MMappedQueryableSegmentizerFactory) SegmentizerFactory(org.apache.druid.segment.loading.SegmentizerFactory) InjectableValues(com.fasterxml.jackson.databind.InjectableValues) DataSegment(org.apache.druid.timeline.DataSegment) TestExprMacroTable(org.apache.druid.query.expression.TestExprMacroTable) ExprMacroTable(org.apache.druid.math.expr.ExprMacroTable) IndexIO(org.apache.druid.segment.IndexIO) SegmentizerModule(org.apache.druid.jackson.SegmentizerModule) DefaultObjectMapper(org.apache.druid.jackson.DefaultObjectMapper) File(java.io.File) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) DefaultObjectMapper(org.apache.druid.jackson.DefaultObjectMapper) Interval(org.joda.time.Interval) Before(org.junit.Before)

Example 99 with DataSegment

use of org.apache.druid.timeline.DataSegment in project druid by druid-io.

the class SegmentPublisherHelperTest method testAnnotateCorePartitionSetSizeForSingleDimensionShardSpec.

@Test
public void testAnnotateCorePartitionSetSizeForSingleDimensionShardSpec() {
    final Set<DataSegment> segments = ImmutableSet.of(newSegment(new BuildingSingleDimensionShardSpec(0, "dim", null, "ccc", 0)), newSegment(new BuildingSingleDimensionShardSpec(1, "dim", null, "ccc", 1)), newSegment(new BuildingSingleDimensionShardSpec(2, "dim", null, "ccc", 2)));
    final Set<DataSegment> annotated = SegmentPublisherHelper.annotateShardSpec(segments);
    for (DataSegment segment : annotated) {
        Assert.assertSame(SingleDimensionShardSpec.class, segment.getShardSpec().getClass());
        final SingleDimensionShardSpec shardSpec = (SingleDimensionShardSpec) segment.getShardSpec();
        Assert.assertEquals(3, shardSpec.getNumCorePartitions());
    }
}
Also used : BuildingSingleDimensionShardSpec(org.apache.druid.timeline.partition.BuildingSingleDimensionShardSpec) DataSegment(org.apache.druid.timeline.DataSegment) BuildingSingleDimensionShardSpec(org.apache.druid.timeline.partition.BuildingSingleDimensionShardSpec) SingleDimensionShardSpec(org.apache.druid.timeline.partition.SingleDimensionShardSpec) Test(org.junit.Test)

Example 100 with DataSegment

use of org.apache.druid.timeline.DataSegment in project druid by druid-io.

the class SegmentPublisherHelperTest method testAnnotateCorePartitionSetSizeForHashNumberedShardSpec.

@Test
public void testAnnotateCorePartitionSetSizeForHashNumberedShardSpec() {
    final Set<DataSegment> segments = ImmutableSet.of(newSegment(new BuildingHashBasedNumberedShardSpec(0, 0, 3, null, HashPartitionFunction.MURMUR3_32_ABS, new ObjectMapper())), newSegment(new BuildingHashBasedNumberedShardSpec(1, 1, 3, null, HashPartitionFunction.MURMUR3_32_ABS, new ObjectMapper())), newSegment(new BuildingHashBasedNumberedShardSpec(2, 2, 3, null, HashPartitionFunction.MURMUR3_32_ABS, new ObjectMapper())));
    final Set<DataSegment> annotated = SegmentPublisherHelper.annotateShardSpec(segments);
    for (DataSegment segment : annotated) {
        Assert.assertSame(HashBasedNumberedShardSpec.class, segment.getShardSpec().getClass());
        final HashBasedNumberedShardSpec shardSpec = (HashBasedNumberedShardSpec) segment.getShardSpec();
        Assert.assertEquals(3, shardSpec.getNumCorePartitions());
    }
}
Also used : HashBasedNumberedShardSpec(org.apache.druid.timeline.partition.HashBasedNumberedShardSpec) BuildingHashBasedNumberedShardSpec(org.apache.druid.timeline.partition.BuildingHashBasedNumberedShardSpec) BuildingHashBasedNumberedShardSpec(org.apache.druid.timeline.partition.BuildingHashBasedNumberedShardSpec) DataSegment(org.apache.druid.timeline.DataSegment) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) Test(org.junit.Test)

Aggregations

DataSegment (org.apache.druid.timeline.DataSegment)612 Test (org.junit.Test)386 ArrayList (java.util.ArrayList)161 Interval (org.joda.time.Interval)158 File (java.io.File)138 Map (java.util.Map)110 List (java.util.List)108 ImmutableList (com.google.common.collect.ImmutableList)77 IOException (java.io.IOException)77 HashMap (java.util.HashMap)74 ImmutableMap (com.google.common.collect.ImmutableMap)72 NumberedShardSpec (org.apache.druid.timeline.partition.NumberedShardSpec)68 HashSet (java.util.HashSet)58 TaskStatus (org.apache.druid.indexer.TaskStatus)53 Collectors (java.util.stream.Collectors)52 Set (java.util.Set)50 CountDownLatch (java.util.concurrent.CountDownLatch)50 ISE (org.apache.druid.java.util.common.ISE)50 SegmentId (org.apache.druid.timeline.SegmentId)47 LinearShardSpec (org.apache.druid.timeline.partition.LinearShardSpec)45