Search in sources :

Example 6 with ArbitraryGranularitySpec

use of org.apache.druid.segment.indexing.granularity.ArbitraryGranularitySpec in project druid by druid-io.

the class IndexIngestionSpecTest method testFirehoseAndInputFormat.

@Test
public void testFirehoseAndInputFormat() {
    expectedException.expect(IllegalArgumentException.class);
    expectedException.expectMessage("Cannot use firehose and inputFormat together.");
    final IndexIngestionSpec spec = new IndexIngestionSpec(new DataSchema("dataSource", new TimestampSpec(null, null, null), DimensionsSpec.EMPTY, new AggregatorFactory[0], new ArbitraryGranularitySpec(Granularities.NONE, null), null), new IndexIOConfig(new NoopFirehoseFactory(), null, new NoopInputFormat(), null, null), null);
}
Also used : DataSchema(org.apache.druid.segment.indexing.DataSchema) IndexIngestionSpec(org.apache.druid.indexing.common.task.IndexTask.IndexIngestionSpec) IndexIOConfig(org.apache.druid.indexing.common.task.IndexTask.IndexIOConfig) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) NoopFirehoseFactory(org.apache.druid.data.input.impl.NoopFirehoseFactory) NoopInputFormat(org.apache.druid.data.input.impl.NoopInputFormat) AggregatorFactory(org.apache.druid.query.aggregation.AggregatorFactory) ArbitraryGranularitySpec(org.apache.druid.segment.indexing.granularity.ArbitraryGranularitySpec) Test(org.junit.Test)

Example 7 with ArbitraryGranularitySpec

use of org.apache.druid.segment.indexing.granularity.ArbitraryGranularitySpec in project druid by druid-io.

the class IndexIngestionSpecTest method testFirehoseAndInputSource.

@Test
public void testFirehoseAndInputSource() {
    expectedException.expect(IllegalArgumentException.class);
    expectedException.expectMessage("At most one of [Property{name='firehose', value=NoopFirehoseFactory{}}, Property{name='inputSource'");
    final IndexIngestionSpec spec = new IndexIngestionSpec(new DataSchema("dataSource", new TimestampSpec(null, null, null), DimensionsSpec.EMPTY, new AggregatorFactory[0], new ArbitraryGranularitySpec(Granularities.NONE, null), null), new IndexIOConfig(new NoopFirehoseFactory(), new NoopInputSource(), null, null, null), null);
}
Also used : DataSchema(org.apache.druid.segment.indexing.DataSchema) IndexIngestionSpec(org.apache.druid.indexing.common.task.IndexTask.IndexIngestionSpec) IndexIOConfig(org.apache.druid.indexing.common.task.IndexTask.IndexIOConfig) NoopInputSource(org.apache.druid.data.input.impl.NoopInputSource) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) NoopFirehoseFactory(org.apache.druid.data.input.impl.NoopFirehoseFactory) AggregatorFactory(org.apache.druid.query.aggregation.AggregatorFactory) ArbitraryGranularitySpec(org.apache.druid.segment.indexing.granularity.ArbitraryGranularitySpec) Test(org.junit.Test)

Example 8 with ArbitraryGranularitySpec

use of org.apache.druid.segment.indexing.granularity.ArbitraryGranularitySpec in project druid by druid-io.

the class MaterializedViewSupervisorSpec method createTask.

public HadoopIndexTask createTask(Interval interval, String version, List<DataSegment> segments) {
    String taskId = StringUtils.format("%s_%s_%s", TASK_PREFIX, dataSourceName, DateTimes.nowUtc());
    // generate parser
    Map<String, Object> parseSpec = new HashMap<>();
    parseSpec.put("format", "timeAndDims");
    parseSpec.put("dimensionsSpec", dimensionsSpec);
    Map<String, Object> parser = new HashMap<>();
    parser.put("type", "map");
    parser.put("parseSpec", parseSpec);
    // generate HadoopTuningConfig
    HadoopTuningConfig tuningConfigForTask = new HadoopTuningConfig(tuningConfig.getWorkingPath(), version, tuningConfig.getPartitionsSpec(), tuningConfig.getShardSpecs(), tuningConfig.getIndexSpec(), tuningConfig.getIndexSpecForIntermediatePersists(), tuningConfig.getAppendableIndexSpec(), tuningConfig.getMaxRowsInMemory(), tuningConfig.getMaxBytesInMemory(), tuningConfig.isLeaveIntermediate(), tuningConfig.isCleanupOnFailure(), tuningConfig.isOverwriteFiles(), tuningConfig.isIgnoreInvalidRows(), tuningConfig.getJobProperties(), tuningConfig.isCombineText(), tuningConfig.getUseCombiner(), tuningConfig.getMaxRowsInMemory(), tuningConfig.getNumBackgroundPersistThreads(), tuningConfig.isForceExtendableShardSpecs(), true, tuningConfig.getUserAllowedHadoopPrefix(), tuningConfig.isLogParseExceptions(), tuningConfig.getMaxParseExceptions(), tuningConfig.isUseYarnRMJobStatusFallback(), tuningConfig.getAwaitSegmentAvailabilityTimeoutMillis());
    // generate granularity
    ArbitraryGranularitySpec granularitySpec = new ArbitraryGranularitySpec(Granularities.NONE, ImmutableList.of(interval));
    // generate DataSchema
    DataSchema dataSchema = new DataSchema(dataSourceName, parser, aggregators, granularitySpec, TransformSpec.NONE, objectMapper);
    // generate DatasourceIngestionSpec
    DatasourceIngestionSpec datasourceIngestionSpec = new DatasourceIngestionSpec(baseDataSource, null, ImmutableList.of(interval), segments, null, null, null, false, null);
    // generate HadoopIOConfig
    Map<String, Object> inputSpec = new HashMap<>();
    inputSpec.put("type", "dataSource");
    inputSpec.put("ingestionSpec", datasourceIngestionSpec);
    HadoopIOConfig hadoopIOConfig = new HadoopIOConfig(inputSpec, null, null);
    // generate HadoopIngestionSpec
    HadoopIngestionSpec spec = new HadoopIngestionSpec(dataSchema, hadoopIOConfig, tuningConfigForTask);
    // generate HadoopIndexTask
    HadoopIndexTask task = new HadoopIndexTask(taskId, spec, hadoopCoordinates, hadoopDependencyCoordinates, classpathPrefix, objectMapper, context, authorizerMapper, chatHandlerProvider);
    return task;
}
Also used : DataSchema(org.apache.druid.segment.indexing.DataSchema) DatasourceIngestionSpec(org.apache.druid.indexer.hadoop.DatasourceIngestionSpec) HadoopIngestionSpec(org.apache.druid.indexer.HadoopIngestionSpec) HashMap(java.util.HashMap) HadoopTuningConfig(org.apache.druid.indexer.HadoopTuningConfig) ArbitraryGranularitySpec(org.apache.druid.segment.indexing.granularity.ArbitraryGranularitySpec) HadoopIndexTask(org.apache.druid.indexing.common.task.HadoopIndexTask) HadoopIOConfig(org.apache.druid.indexer.HadoopIOConfig)

Example 9 with ArbitraryGranularitySpec

use of org.apache.druid.segment.indexing.granularity.ArbitraryGranularitySpec in project druid by druid-io.

the class IndexIngestionSpecTest method testParserAndInputFormat.

@Test
public void testParserAndInputFormat() {
    expectedException.expect(IllegalArgumentException.class);
    expectedException.expectMessage("Cannot use parser and inputSource together. Try using inputFormat instead of parser.");
    final IndexIngestionSpec spec = new IndexIngestionSpec(new DataSchema("dataSource", ImmutableMap.of("fake", "parser map"), new AggregatorFactory[0], new ArbitraryGranularitySpec(Granularities.NONE, null), null, null), new IndexIOConfig(null, new NoopInputSource(), new NoopInputFormat(), null, null), null);
}
Also used : DataSchema(org.apache.druid.segment.indexing.DataSchema) IndexIngestionSpec(org.apache.druid.indexing.common.task.IndexTask.IndexIngestionSpec) IndexIOConfig(org.apache.druid.indexing.common.task.IndexTask.IndexIOConfig) NoopInputSource(org.apache.druid.data.input.impl.NoopInputSource) NoopInputFormat(org.apache.druid.data.input.impl.NoopInputFormat) AggregatorFactory(org.apache.druid.query.aggregation.AggregatorFactory) ArbitraryGranularitySpec(org.apache.druid.segment.indexing.granularity.ArbitraryGranularitySpec) Test(org.junit.Test)

Example 10 with ArbitraryGranularitySpec

use of org.apache.druid.segment.indexing.granularity.ArbitraryGranularitySpec in project druid by druid-io.

the class IndexIngestionSpecTest method testParserAndInputSource.

@Test
public void testParserAndInputSource() {
    expectedException.expect(IllegalArgumentException.class);
    expectedException.expectMessage("Cannot use parser and inputSource together.");
    final IndexIngestionSpec spec = new IndexIngestionSpec(new DataSchema("dataSource", ImmutableMap.of("fake", "parser map"), new AggregatorFactory[0], new ArbitraryGranularitySpec(Granularities.NONE, null), null, null), new IndexIOConfig(null, new NoopInputSource(), null, null, null), null);
}
Also used : DataSchema(org.apache.druid.segment.indexing.DataSchema) IndexIngestionSpec(org.apache.druid.indexing.common.task.IndexTask.IndexIngestionSpec) IndexIOConfig(org.apache.druid.indexing.common.task.IndexTask.IndexIOConfig) NoopInputSource(org.apache.druid.data.input.impl.NoopInputSource) AggregatorFactory(org.apache.druid.query.aggregation.AggregatorFactory) ArbitraryGranularitySpec(org.apache.druid.segment.indexing.granularity.ArbitraryGranularitySpec) Test(org.junit.Test)

Aggregations

ArbitraryGranularitySpec (org.apache.druid.segment.indexing.granularity.ArbitraryGranularitySpec)19 Test (org.junit.Test)17 TimestampSpec (org.apache.druid.data.input.impl.TimestampSpec)15 IdUtilsTest (org.apache.druid.common.utils.IdUtilsTest)11 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)11 JSONParseSpec (org.apache.druid.data.input.impl.JSONParseSpec)10 StringInputRowParser (org.apache.druid.data.input.impl.StringInputRowParser)9 DoubleSumAggregatorFactory (org.apache.druid.query.aggregation.DoubleSumAggregatorFactory)9 DataSchema (org.apache.druid.segment.indexing.DataSchema)7 DimensionsSpec (org.apache.druid.data.input.impl.DimensionsSpec)6 AggregatorFactory (org.apache.druid.query.aggregation.AggregatorFactory)5 IndexIOConfig (org.apache.druid.indexing.common.task.IndexTask.IndexIOConfig)4 IndexIngestionSpec (org.apache.druid.indexing.common.task.IndexTask.IndexIngestionSpec)4 NoopInputSource (org.apache.druid.data.input.impl.NoopInputSource)3 NoopFirehoseFactory (org.apache.druid.data.input.impl.NoopFirehoseFactory)2 NoopInputFormat (org.apache.druid.data.input.impl.NoopInputFormat)2 TypeReference (com.fasterxml.jackson.core.type.TypeReference)1 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)1 BufferedWriter (java.io.BufferedWriter)1 File (java.io.File)1