Search in sources :

Example 1 with RetryPolicyConfig

use of org.apache.druid.indexing.common.RetryPolicyConfig in project druid by druid-io.

the class ClientCompactionTaskQuerySerdeTest method testCompactionTaskToClientCompactionTaskQuery.

@Test
public void testCompactionTaskToClientCompactionTaskQuery() throws IOException {
    final ObjectMapper mapper = setupInjectablesInObjectMapper(new DefaultObjectMapper());
    final CompactionTask.Builder builder = new CompactionTask.Builder("datasource", new SegmentCacheManagerFactory(mapper), new RetryPolicyFactory(new RetryPolicyConfig()));
    final CompactionTask task = builder.inputSpec(new CompactionIntervalSpec(Intervals.of("2019/2020"), "testSha256OfSortedSegmentIds"), true).tuningConfig(new ParallelIndexTuningConfig(null, null, null, 40000, 2000L, null, null, null, new SegmentsSplitHintSpec(new HumanReadableBytes(100000L), 10), new DynamicPartitionsSpec(100, 30000L), new IndexSpec(new DefaultBitmapSerdeFactory(), CompressionStrategy.LZ4, CompressionStrategy.LZF, LongEncodingStrategy.LONGS), new IndexSpec(new DefaultBitmapSerdeFactory(), CompressionStrategy.LZ4, CompressionStrategy.UNCOMPRESSED, LongEncodingStrategy.AUTO), 2, null, null, 1000L, TmpFileSegmentWriteOutMediumFactory.instance(), null, 100, 5, 1000L, new Duration(3000L), 7, 1000, 100, null, null, null, null, null, null)).granularitySpec(new ClientCompactionTaskGranularitySpec(Granularities.DAY, Granularities.HOUR, true)).dimensionsSpec(DimensionsSpec.builder().setDimensions(DimensionsSpec.getDefaultSchemas(ImmutableList.of("ts", "dim"))).setDimensionExclusions(ImmutableList.of("__time", "val")).build()).metricsSpec(new AggregatorFactory[] { new CountAggregatorFactory("cnt") }).transformSpec(new ClientCompactionTaskTransformSpec(new SelectorDimFilter("dim1", "foo", null))).build();
    final ClientCompactionTaskQuery expected = new ClientCompactionTaskQuery(task.getId(), "datasource", new ClientCompactionIOConfig(new ClientCompactionIntervalSpec(Intervals.of("2019/2020"), "testSha256OfSortedSegmentIds"), true), new ClientCompactionTaskQueryTuningConfig(100, 40000, 2000L, 30000L, new SegmentsSplitHintSpec(new HumanReadableBytes(100000L), 10), new DynamicPartitionsSpec(100, 30000L), new IndexSpec(new DefaultBitmapSerdeFactory(), CompressionStrategy.LZ4, CompressionStrategy.LZF, LongEncodingStrategy.LONGS), new IndexSpec(new DefaultBitmapSerdeFactory(), CompressionStrategy.LZ4, CompressionStrategy.UNCOMPRESSED, LongEncodingStrategy.AUTO), 2, 1000L, TmpFileSegmentWriteOutMediumFactory.instance(), 100, 5, 1000L, new Duration(3000L), 7, 1000, 100), new ClientCompactionTaskGranularitySpec(Granularities.DAY, Granularities.HOUR, true), new ClientCompactionTaskDimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("ts", "dim"))), new AggregatorFactory[] { new CountAggregatorFactory("cnt") }, new ClientCompactionTaskTransformSpec(new SelectorDimFilter("dim1", "foo", null)), new HashMap<>());
    final byte[] json = mapper.writeValueAsBytes(task);
    final ClientCompactionTaskQuery actual = (ClientCompactionTaskQuery) mapper.readValue(json, ClientTaskQuery.class);
    Assert.assertEquals(expected, actual);
}
Also used : IndexSpec(org.apache.druid.segment.IndexSpec) ClientCompactionIOConfig(org.apache.druid.client.indexing.ClientCompactionIOConfig) ClientTaskQuery(org.apache.druid.client.indexing.ClientTaskQuery) ClientCompactionTaskQueryTuningConfig(org.apache.druid.client.indexing.ClientCompactionTaskQueryTuningConfig) SegmentsSplitHintSpec(org.apache.druid.data.input.SegmentsSplitHintSpec) ClientCompactionIntervalSpec(org.apache.druid.client.indexing.ClientCompactionIntervalSpec) DefaultBitmapSerdeFactory(org.apache.druid.segment.data.BitmapSerde.DefaultBitmapSerdeFactory) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) ClientCompactionTaskQuery(org.apache.druid.client.indexing.ClientCompactionTaskQuery) ParallelIndexTuningConfig(org.apache.druid.indexing.common.task.batch.parallel.ParallelIndexTuningConfig) HumanReadableBytes(org.apache.druid.java.util.common.HumanReadableBytes) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) DefaultObjectMapper(org.apache.druid.jackson.DefaultObjectMapper) RetryPolicyConfig(org.apache.druid.indexing.common.RetryPolicyConfig) ClientCompactionIntervalSpec(org.apache.druid.client.indexing.ClientCompactionIntervalSpec) SegmentCacheManagerFactory(org.apache.druid.indexing.common.SegmentCacheManagerFactory) Duration(org.joda.time.Duration) ClientCompactionTaskTransformSpec(org.apache.druid.client.indexing.ClientCompactionTaskTransformSpec) ClientCompactionTaskGranularitySpec(org.apache.druid.client.indexing.ClientCompactionTaskGranularitySpec) RetryPolicyFactory(org.apache.druid.indexing.common.RetryPolicyFactory) DynamicPartitionsSpec(org.apache.druid.indexer.partitions.DynamicPartitionsSpec) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) ClientCompactionTaskDimensionsSpec(org.apache.druid.client.indexing.ClientCompactionTaskDimensionsSpec) DefaultObjectMapper(org.apache.druid.jackson.DefaultObjectMapper) Test(org.junit.Test)

Example 2 with RetryPolicyConfig

use of org.apache.druid.indexing.common.RetryPolicyConfig in project druid by druid-io.

the class IngestSegmentFirehoseFactoryTimelineTest method constructorFeeder.

@Parameterized.Parameters(name = "{0}")
public static Collection<Object[]> constructorFeeder() {
    final List<TestCase> testCases = ImmutableList.of(tc("2000/2000T02", 3, 7, ds("2000/2000T01", "v1", 0, ir("2000", 1), ir("2000T00:01", 2)), ds("2000T01/2000T02", "v1", 0, ir("2000T01", 4))), /* Adjacent segments */
    tc("2000/2000T02", 3, 7, ds("2000/2000T02", "v1", 0, ir("2000", 1), ir("2000T00:01", 2), ir("2000T01", 8)), ds("2000T01/2000T02", "v2", 0, ir("2000T01:01", 4))), /* 1H segment overlaid on top of 2H segment */
    tc("2000/2000-01-02", 4, 23, ds("2000/2000-01-02", "v1", 0, ir("2000", 1), ir("2000T00:01", 2), ir("2000T01", 8), ir("2000T02", 16)), ds("2000T01/2000T02", "v2", 0, ir("2000T01:01", 4))), /* 1H segment overlaid on top of 1D segment */
    tc("2000/2000T02", 4, 15, ds("2000/2000T02", "v1", 0, ir("2000", 1), ir("2000T00:01", 2), ir("2000T01", 8)), ds("2000/2000T02", "v1", 1, ir("2000T01:01", 4))), /* Segment set with two segments for the same interval */
    tc("2000T01/2000T02", 1, 2, ds("2000/2000T03", "v1", 0, ir("2000", 1), ir("2000T01", 2), ir("2000T02", 4))), /* Segment wider than desired interval */
    tc("2000T02/2000T04", 2, 12, ds("2000/2000T03", "v1", 0, ir("2000", 1), ir("2000T01", 2), ir("2000T02", 4)), ds("2000T03/2000T04", "v1", 0, ir("2000T03", 8))));
    final List<Object[]> constructors = new ArrayList<>();
    for (final TestCase testCase : testCases) {
        SegmentHandoffNotifierFactory notifierFactory = EasyMock.createNiceMock(SegmentHandoffNotifierFactory.class);
        EasyMock.replay(notifierFactory);
        final SegmentCacheManagerFactory slf = new SegmentCacheManagerFactory(MAPPER);
        final RetryPolicyFactory retryPolicyFactory = new RetryPolicyFactory(new RetryPolicyConfig());
        final CoordinatorClient cc = new CoordinatorClient(null, null) {

            @Override
            public Collection<DataSegment> fetchUsedSegmentsInDataSourceForIntervals(String dataSource, List<Interval> intervals) {
                // Expect the interval we asked for
                if (intervals.equals(ImmutableList.of(testCase.interval))) {
                    return ImmutableSet.copyOf(testCase.segments);
                } else {
                    throw new IllegalArgumentException("BAD");
                }
            }

            @Override
            public DataSegment fetchUsedSegment(String dataSource, String segmentId) {
                return testCase.segments.stream().filter(s -> s.getId().toString().equals(segmentId)).findAny().get();
            }
        };
        final IngestSegmentFirehoseFactory factory = new IngestSegmentFirehoseFactory(DATA_SOURCE, testCase.interval, null, TrueDimFilter.instance(), Arrays.asList(DIMENSIONS), Arrays.asList(METRICS), // Split as much as possible
        1L, INDEX_IO, cc, slf, retryPolicyFactory);
        constructors.add(new Object[] { testCase.toString(), factory, testCase.tmpDir, testCase.expectedCount, testCase.expectedSum, testCase.segments.size() });
    }
    return constructors;
}
Also used : RetryPolicyConfig(org.apache.druid.indexing.common.RetryPolicyConfig) ArrayList(java.util.ArrayList) SegmentCacheManagerFactory(org.apache.druid.indexing.common.SegmentCacheManagerFactory) RetryPolicyFactory(org.apache.druid.indexing.common.RetryPolicyFactory) DataSegment(org.apache.druid.timeline.DataSegment) SegmentHandoffNotifierFactory(org.apache.druid.segment.handoff.SegmentHandoffNotifierFactory) CoordinatorClient(org.apache.druid.client.coordinator.CoordinatorClient) List(java.util.List) ArrayList(java.util.ArrayList) ImmutableList(com.google.common.collect.ImmutableList)

Example 3 with RetryPolicyConfig

use of org.apache.druid.indexing.common.RetryPolicyConfig in project druid by druid-io.

the class IngestSegmentFirehoseFactoryTest method constructorFeeder.

@Parameterized.Parameters(name = "{0}")
public static Collection<Object[]> constructorFeeder() throws IOException {
    final IndexSpec indexSpec = new IndexSpec();
    final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder().withMinTimestamp(JodaUtils.MIN_INSTANT).withDimensionsSpec(ROW_PARSER).withMetrics(new LongSumAggregatorFactory(METRIC_LONG_NAME, DIM_LONG_NAME), new DoubleSumAggregatorFactory(METRIC_FLOAT_NAME, DIM_FLOAT_NAME)).build();
    final IncrementalIndex index = new OnheapIncrementalIndex.Builder().setIndexSchema(schema).setMaxRowCount(MAX_ROWS * MAX_SHARD_NUMBER).build();
    for (Integer i = 0; i < MAX_ROWS; ++i) {
        index.add(ROW_PARSER.parseBatch(buildRow(i.longValue())).get(0));
    }
    FileUtils.mkdirp(PERSIST_DIR);
    INDEX_MERGER_V9.persist(index, PERSIST_DIR, indexSpec, null);
    final CoordinatorClient cc = new CoordinatorClient(null, null) {

        @Override
        public Collection<DataSegment> fetchUsedSegmentsInDataSourceForIntervals(String dataSource, List<Interval> intervals) {
            return ImmutableSet.copyOf(SEGMENT_SET);
        }
    };
    SegmentHandoffNotifierFactory notifierFactory = EasyMock.createNiceMock(SegmentHandoffNotifierFactory.class);
    EasyMock.replay(notifierFactory);
    final SegmentCacheManagerFactory slf = new SegmentCacheManagerFactory(MAPPER);
    final RetryPolicyFactory retryPolicyFactory = new RetryPolicyFactory(new RetryPolicyConfig());
    Collection<Object[]> values = new ArrayList<>();
    for (InputRowParser parser : Arrays.<InputRowParser>asList(ROW_PARSER, new MapInputRowParser(new JSONParseSpec(new TimestampSpec(TIME_COLUMN, "auto", null), DimensionsSpec.builder().setDimensionExclusions(ImmutableList.of(DIM_FLOAT_NAME, DIM_LONG_NAME)).build(), null, null, null)))) {
        for (List<String> dim_names : Arrays.<List<String>>asList(null, ImmutableList.of(DIM_NAME))) {
            for (List<String> metric_names : Arrays.<List<String>>asList(null, ImmutableList.of(METRIC_LONG_NAME, METRIC_FLOAT_NAME))) {
                for (Boolean wrapInCombining : Arrays.asList(false, true)) {
                    final IngestSegmentFirehoseFactory isfFactory = new IngestSegmentFirehoseFactory(TASK.getDataSource(), Intervals.ETERNITY, null, new SelectorDimFilter(DIM_NAME, DIM_VALUE, null), dim_names, metric_names, null, INDEX_IO, cc, slf, retryPolicyFactory);
                    final FirehoseFactory factory = wrapInCombining ? new CombiningFirehoseFactory(ImmutableList.of(isfFactory)) : isfFactory;
                    values.add(new Object[] { StringUtils.format("DimNames[%s]MetricNames[%s]ParserDimNames[%s]WrapInCombining[%s]", dim_names == null ? "null" : "dims", metric_names == null ? "null" : "metrics", parser == ROW_PARSER ? "dims" : "null", wrapInCombining), factory, parser });
                }
            }
        }
    }
    return values;
}
Also used : IndexSpec(org.apache.druid.segment.IndexSpec) MapInputRowParser(org.apache.druid.data.input.impl.MapInputRowParser) CombiningFirehoseFactory(org.apache.druid.segment.realtime.firehose.CombiningFirehoseFactory) FirehoseFactory(org.apache.druid.data.input.FirehoseFactory) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) OnheapIncrementalIndex(org.apache.druid.segment.incremental.OnheapIncrementalIndex) ArrayList(java.util.ArrayList) DataSegment(org.apache.druid.timeline.DataSegment) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) CoordinatorClient(org.apache.druid.client.coordinator.CoordinatorClient) List(java.util.List) ArrayList(java.util.ArrayList) ImmutableList(com.google.common.collect.ImmutableList) JSONParseSpec(org.apache.druid.data.input.impl.JSONParseSpec) IncrementalIndexSchema(org.apache.druid.segment.incremental.IncrementalIndexSchema) CombiningFirehoseFactory(org.apache.druid.segment.realtime.firehose.CombiningFirehoseFactory) DoubleSumAggregatorFactory(org.apache.druid.query.aggregation.DoubleSumAggregatorFactory) RetryPolicyConfig(org.apache.druid.indexing.common.RetryPolicyConfig) IncrementalIndex(org.apache.druid.segment.incremental.IncrementalIndex) OnheapIncrementalIndex(org.apache.druid.segment.incremental.OnheapIncrementalIndex) SegmentCacheManagerFactory(org.apache.druid.indexing.common.SegmentCacheManagerFactory) RetryPolicyFactory(org.apache.druid.indexing.common.RetryPolicyFactory) SegmentHandoffNotifierFactory(org.apache.druid.segment.handoff.SegmentHandoffNotifierFactory) InputRowParser(org.apache.druid.data.input.impl.InputRowParser) MapInputRowParser(org.apache.druid.data.input.impl.MapInputRowParser)

Example 4 with RetryPolicyConfig

use of org.apache.druid.indexing.common.RetryPolicyConfig in project druid by druid-io.

the class AbstractParallelIndexSupervisorTaskTest method prepareObjectMapper.

public void prepareObjectMapper(ObjectMapper objectMapper, IndexIO indexIO) {
    final TaskConfig taskConfig = new TaskConfig(null, null, null, null, null, false, null, null, null, false, false, TaskConfig.BATCH_PROCESSING_MODE_DEFAULT.name());
    objectMapper.setInjectableValues(new InjectableValues.Std().addValue(ExprMacroTable.class, LookupEnabledTestExprMacroTable.INSTANCE).addValue(IndexIO.class, indexIO).addValue(ObjectMapper.class, objectMapper).addValue(ChatHandlerProvider.class, new NoopChatHandlerProvider()).addValue(AuthConfig.class, new AuthConfig()).addValue(AuthorizerMapper.class, null).addValue(RowIngestionMetersFactory.class, new DropwizardRowIngestionMetersFactory()).addValue(DataSegment.PruneSpecsHolder.class, DataSegment.PruneSpecsHolder.DEFAULT).addValue(AuthorizerMapper.class, new AuthorizerMapper(ImmutableMap.of())).addValue(AppenderatorsManager.class, TestUtils.APPENDERATORS_MANAGER).addValue(LocalDataSegmentPuller.class, new LocalDataSegmentPuller()).addValue(CoordinatorClient.class, coordinatorClient).addValue(SegmentCacheManagerFactory.class, new SegmentCacheManagerFactory(objectMapper)).addValue(RetryPolicyFactory.class, new RetryPolicyFactory(new RetryPolicyConfig())).addValue(TaskConfig.class, taskConfig));
    objectMapper.registerSubtypes(new NamedType(ParallelIndexSupervisorTask.class, ParallelIndexSupervisorTask.TYPE), new NamedType(CompactionTask.CompactionTuningConfig.class, CompactionTask.CompactionTuningConfig.TYPE), new NamedType(SinglePhaseSubTask.class, SinglePhaseSubTask.TYPE), new NamedType(PartialHashSegmentGenerateTask.class, PartialHashSegmentGenerateTask.TYPE), new NamedType(PartialRangeSegmentGenerateTask.class, PartialRangeSegmentGenerateTask.TYPE), new NamedType(PartialGenericSegmentMergeTask.class, PartialGenericSegmentMergeTask.TYPE), new NamedType(PartialDimensionDistributionTask.class, PartialDimensionDistributionTask.TYPE), new NamedType(PartialDimensionCardinalityTask.class, PartialDimensionCardinalityTask.TYPE));
}
Also used : RetryPolicyConfig(org.apache.druid.indexing.common.RetryPolicyConfig) NoopChatHandlerProvider(org.apache.druid.segment.realtime.firehose.NoopChatHandlerProvider) NamedType(com.fasterxml.jackson.databind.jsontype.NamedType) SegmentCacheManagerFactory(org.apache.druid.indexing.common.SegmentCacheManagerFactory) TaskConfig(org.apache.druid.indexing.common.config.TaskConfig) AuthConfig(org.apache.druid.server.security.AuthConfig) DataSegment(org.apache.druid.timeline.DataSegment) RetryPolicyFactory(org.apache.druid.indexing.common.RetryPolicyFactory) LookupEnabledTestExprMacroTable(org.apache.druid.query.expression.LookupEnabledTestExprMacroTable) ExprMacroTable(org.apache.druid.math.expr.ExprMacroTable) DropwizardRowIngestionMetersFactory(org.apache.druid.indexing.common.stats.DropwizardRowIngestionMetersFactory) LocalDataSegmentPuller(org.apache.druid.segment.loading.LocalDataSegmentPuller) AuthorizerMapper(org.apache.druid.server.security.AuthorizerMapper) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) RowIngestionMetersFactory(org.apache.druid.segment.incremental.RowIngestionMetersFactory) DropwizardRowIngestionMetersFactory(org.apache.druid.indexing.common.stats.DropwizardRowIngestionMetersFactory)

Example 5 with RetryPolicyConfig

use of org.apache.druid.indexing.common.RetryPolicyConfig in project druid by druid-io.

the class RemoteTaskActionClientTest method testSubmitSimple.

@Test
public void testSubmitSimple() throws Exception {
    Request request = new Request(HttpMethod.POST, new URL("http://localhost:1234/xx"));
    EasyMock.expect(druidLeaderClient.makeRequest(HttpMethod.POST, "/druid/indexer/v1/action")).andReturn(request);
    // return status code 200 and a list with size equals 1
    Map<String, Object> responseBody = new HashMap<>();
    final List<TaskLock> expectedLocks = Collections.singletonList(new TimeChunkLock(TaskLockType.SHARED, "groupId", "dataSource", Intervals.of("2019/2020"), "version", 0));
    responseBody.put("result", expectedLocks);
    String strResult = objectMapper.writeValueAsString(responseBody);
    final HttpResponse response = EasyMock.createNiceMock(HttpResponse.class);
    EasyMock.expect(response.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
    EasyMock.expect(response.getContent()).andReturn(new BigEndianHeapChannelBuffer(0));
    EasyMock.replay(response);
    StringFullResponseHolder responseHolder = new StringFullResponseHolder(response, StandardCharsets.UTF_8).addChunk(strResult);
    // set up mocks
    EasyMock.expect(druidLeaderClient.go(request)).andReturn(responseHolder);
    EasyMock.replay(druidLeaderClient);
    Task task = NoopTask.create("id", 0);
    RemoteTaskActionClient client = new RemoteTaskActionClient(task, druidLeaderClient, new RetryPolicyFactory(new RetryPolicyConfig()), objectMapper);
    final List<TaskLock> locks = client.submit(new LockListAction());
    Assert.assertEquals(expectedLocks, locks);
    EasyMock.verify(druidLeaderClient);
}
Also used : Task(org.apache.druid.indexing.common.task.Task) NoopTask(org.apache.druid.indexing.common.task.NoopTask) RetryPolicyConfig(org.apache.druid.indexing.common.RetryPolicyConfig) HashMap(java.util.HashMap) TimeChunkLock(org.apache.druid.indexing.common.TimeChunkLock) Request(org.apache.druid.java.util.http.client.Request) HttpResponse(org.jboss.netty.handler.codec.http.HttpResponse) BigEndianHeapChannelBuffer(org.jboss.netty.buffer.BigEndianHeapChannelBuffer) RetryPolicyFactory(org.apache.druid.indexing.common.RetryPolicyFactory) URL(java.net.URL) StringFullResponseHolder(org.apache.druid.java.util.http.client.response.StringFullResponseHolder) TaskLock(org.apache.druid.indexing.common.TaskLock) Test(org.junit.Test)

Aggregations

RetryPolicyConfig (org.apache.druid.indexing.common.RetryPolicyConfig)5 RetryPolicyFactory (org.apache.druid.indexing.common.RetryPolicyFactory)5 SegmentCacheManagerFactory (org.apache.druid.indexing.common.SegmentCacheManagerFactory)4 DataSegment (org.apache.druid.timeline.DataSegment)3 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)2 ImmutableList (com.google.common.collect.ImmutableList)2 ArrayList (java.util.ArrayList)2 List (java.util.List)2 CoordinatorClient (org.apache.druid.client.coordinator.CoordinatorClient)2 SelectorDimFilter (org.apache.druid.query.filter.SelectorDimFilter)2 IndexSpec (org.apache.druid.segment.IndexSpec)2 SegmentHandoffNotifierFactory (org.apache.druid.segment.handoff.SegmentHandoffNotifierFactory)2 Test (org.junit.Test)2 NamedType (com.fasterxml.jackson.databind.jsontype.NamedType)1 URL (java.net.URL)1 HashMap (java.util.HashMap)1 ClientCompactionIOConfig (org.apache.druid.client.indexing.ClientCompactionIOConfig)1 ClientCompactionIntervalSpec (org.apache.druid.client.indexing.ClientCompactionIntervalSpec)1 ClientCompactionTaskDimensionsSpec (org.apache.druid.client.indexing.ClientCompactionTaskDimensionsSpec)1 ClientCompactionTaskGranularitySpec (org.apache.druid.client.indexing.ClientCompactionTaskGranularitySpec)1