Search in sources :

Example 11 with MapInputRowParser

use of org.apache.druid.data.input.impl.MapInputRowParser in project druid by druid-io.

the class EventReceiverFirehoseIdleTest method setUp.

@Before
public void setUp() {
    req = EasyMock.createMock(HttpServletRequest.class);
    eventReceiverFirehoseFactory = new EventReceiverFirehoseFactory(SERVICE_NAME, CAPACITY, MAX_IDLE_TIME, null, new DefaultObjectMapper(), new DefaultObjectMapper(), register, AuthTestUtils.TEST_AUTHORIZER_MAPPER);
    firehose = (EventReceiverFirehoseFactory.EventReceiverFirehose) eventReceiverFirehoseFactory.connect(new MapInputRowParser(new JSONParseSpec(new TimestampSpec("timestamp", "auto", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("d1"))), null, null, null)), null);
}
Also used : HttpServletRequest(javax.servlet.http.HttpServletRequest) MapInputRowParser(org.apache.druid.data.input.impl.MapInputRowParser) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) DefaultObjectMapper(org.apache.druid.jackson.DefaultObjectMapper) JSONParseSpec(org.apache.druid.data.input.impl.JSONParseSpec) Before(org.junit.Before)

Example 12 with MapInputRowParser

use of org.apache.druid.data.input.impl.MapInputRowParser in project druid by druid-io.

the class DefaultOfflineAppenderatorFactoryTest method testBuild.

@Test
public void testBuild() throws IOException, SegmentNotWritableException {
    Injector injector = Initialization.makeInjectorWithModules(GuiceInjectors.makeStartupInjector(), ImmutableList.<Module>of(new Module() {

        @Override
        public void configure(Binder binder) {
            binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/tool");
            binder.bindConstant().annotatedWith(Names.named("servicePort")).to(9999);
            binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1);
            binder.bind(DruidProcessingConfig.class).toInstance(new DruidProcessingConfig() {

                @Override
                public String getFormatString() {
                    return "processing-%s";
                }

                @Override
                public int intermediateComputeSizeBytes() {
                    return 100 * 1024 * 1024;
                }

                @Override
                public int getNumThreads() {
                    return 1;
                }

                @Override
                public int columnCacheSizeBytes() {
                    return 25 * 1024 * 1024;
                }
            });
            binder.bind(ColumnConfig.class).to(DruidProcessingConfig.class);
        }
    }));
    ObjectMapper objectMapper = injector.getInstance(ObjectMapper.class);
    AppenderatorFactory defaultOfflineAppenderatorFactory = objectMapper.readerFor(AppenderatorFactory.class).readValue("{\"type\":\"offline\"}");
    final Map<String, Object> parserMap = objectMapper.convertValue(new MapInputRowParser(new JSONParseSpec(new TimestampSpec("ts", "auto", null), DimensionsSpec.EMPTY, null, null, null)), Map.class);
    DataSchema schema = new DataSchema("dataSourceName", parserMap, new AggregatorFactory[] { new CountAggregatorFactory("count"), new LongSumAggregatorFactory("met", "met") }, new UniformGranularitySpec(Granularities.MINUTE, Granularities.NONE, null), null, objectMapper);
    RealtimeTuningConfig tuningConfig = new RealtimeTuningConfig(null, 75000, null, null, null, null, temporaryFolder.newFolder(), null, null, null, null, null, null, 0, 0, null, null, null, null, null);
    Appenderator appenderator = defaultOfflineAppenderatorFactory.build(schema, tuningConfig, new FireDepartmentMetrics());
    try {
        Assert.assertEquals("dataSourceName", appenderator.getDataSource());
        Assert.assertEquals(null, appenderator.startJob());
        SegmentIdWithShardSpec identifier = new SegmentIdWithShardSpec("dataSourceName", Intervals.of("2000/2001"), "A", new LinearShardSpec(0));
        Assert.assertEquals(0, ((AppenderatorImpl) appenderator).getRowsInMemory());
        appenderator.add(identifier, StreamAppenderatorTest.ir("2000", "bar", 1), null);
        Assert.assertEquals(1, ((AppenderatorImpl) appenderator).getRowsInMemory());
        appenderator.add(identifier, StreamAppenderatorTest.ir("2000", "baz", 1), null);
        Assert.assertEquals(2, ((AppenderatorImpl) appenderator).getRowsInMemory());
        appenderator.close();
        Assert.assertEquals(0, ((AppenderatorImpl) appenderator).getRowsInMemory());
    } finally {
        appenderator.close();
    }
}
Also used : ColumnConfig(org.apache.druid.segment.column.ColumnConfig) MapInputRowParser(org.apache.druid.data.input.impl.MapInputRowParser) LinearShardSpec(org.apache.druid.timeline.partition.LinearShardSpec) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) RealtimeTuningConfig(org.apache.druid.segment.indexing.RealtimeTuningConfig) DataSchema(org.apache.druid.segment.indexing.DataSchema) Binder(com.google.inject.Binder) UniformGranularitySpec(org.apache.druid.segment.indexing.granularity.UniformGranularitySpec) FireDepartmentMetrics(org.apache.druid.segment.realtime.FireDepartmentMetrics) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) Injector(com.google.inject.Injector) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) Module(com.google.inject.Module) DruidProcessingConfig(org.apache.druid.query.DruidProcessingConfig) JSONParseSpec(org.apache.druid.data.input.impl.JSONParseSpec) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) Test(org.junit.Test)

Example 13 with MapInputRowParser

use of org.apache.druid.data.input.impl.MapInputRowParser in project hive by apache.

the class DruidOutputFormat method getHiveRecordWriter.

@Override
public FileSinkOperator.RecordWriter getHiveRecordWriter(JobConf jc, Path finalOutPath, Class<? extends Writable> valueClass, boolean isCompressed, Properties tableProperties, Progressable progress) throws IOException {
    final int targetNumShardsPerGranularity = Integer.parseUnsignedInt(tableProperties.getProperty(Constants.DRUID_TARGET_SHARDS_PER_GRANULARITY, "0"));
    final int maxPartitionSize = targetNumShardsPerGranularity > 0 ? -1 : HiveConf.getIntVar(jc, HiveConf.ConfVars.HIVE_DRUID_MAX_PARTITION_SIZE);
    // If datasource is in the table properties, it is an INSERT/INSERT OVERWRITE as the datasource
    // name was already persisted. Otherwise, it is a CT/CTAS and we need to get the name from the
    // job properties that are set by configureOutputJobProperties in the DruidStorageHandler
    final String dataSource = tableProperties.getProperty(Constants.DRUID_DATA_SOURCE) == null ? jc.get(Constants.DRUID_DATA_SOURCE) : tableProperties.getProperty(Constants.DRUID_DATA_SOURCE);
    final String segmentDirectory = jc.get(DruidConstants.DRUID_SEGMENT_INTERMEDIATE_DIRECTORY);
    final GranularitySpec granularitySpec = DruidStorageHandlerUtils.getGranularitySpec(jc, tableProperties);
    final String columnNameProperty = tableProperties.getProperty(serdeConstants.LIST_COLUMNS);
    final String columnTypeProperty = tableProperties.getProperty(serdeConstants.LIST_COLUMN_TYPES);
    if (StringUtils.isEmpty(columnNameProperty) || StringUtils.isEmpty(columnTypeProperty)) {
        throw new IllegalStateException(String.format("List of columns names [%s] or columns type [%s] is/are not present", columnNameProperty, columnTypeProperty));
    }
    ArrayList<String> columnNames = Lists.newArrayList(columnNameProperty.split(","));
    if (!columnNames.contains(DruidConstants.DEFAULT_TIMESTAMP_COLUMN)) {
        throw new IllegalStateException("Timestamp column (' " + DruidConstants.DEFAULT_TIMESTAMP_COLUMN + "') not specified in create table; list of columns is : " + tableProperties.getProperty(serdeConstants.LIST_COLUMNS));
    }
    ArrayList<TypeInfo> columnTypes = TypeInfoUtils.getTypeInfosFromTypeString(columnTypeProperty);
    Pair<List<DimensionSchema>, AggregatorFactory[]> dimensionsAndAggregates = DruidStorageHandlerUtils.getDimensionsAndAggregates(columnNames, columnTypes);
    final InputRowParser inputRowParser = new MapInputRowParser(new TimeAndDimsParseSpec(new TimestampSpec(DruidConstants.DEFAULT_TIMESTAMP_COLUMN, "auto", null), new DimensionsSpec(dimensionsAndAggregates.lhs, Lists.newArrayList(Constants.DRUID_TIMESTAMP_GRANULARITY_COL_NAME, Constants.DRUID_SHARD_KEY_COL_NAME), null)));
    Map<String, Object> inputParser = DruidStorageHandlerUtils.JSON_MAPPER.convertValue(inputRowParser, new TypeReference<Map<String, Object>>() {
    });
    final DataSchema dataSchema = new DataSchema(Preconditions.checkNotNull(dataSource, "Data source name is null"), inputParser, dimensionsAndAggregates.rhs, granularitySpec, null, DruidStorageHandlerUtils.JSON_MAPPER);
    final String workingPath = jc.get(DruidConstants.DRUID_JOB_WORKING_DIRECTORY);
    final String version = jc.get(DruidConstants.DRUID_SEGMENT_VERSION);
    String basePersistDirectory = HiveConf.getVar(jc, HiveConf.ConfVars.HIVE_DRUID_BASE_PERSIST_DIRECTORY);
    if (Strings.isNullOrEmpty(basePersistDirectory)) {
        basePersistDirectory = System.getProperty("java.io.tmpdir");
    }
    Integer maxRowInMemory = HiveConf.getIntVar(jc, HiveConf.ConfVars.HIVE_DRUID_MAX_ROW_IN_MEMORY);
    IndexSpec indexSpec = DruidStorageHandlerUtils.getIndexSpec(jc);
    RealtimeTuningConfig realtimeTuningConfig = new RealtimeTuningConfig(maxRowInMemory, null, null, null, new File(basePersistDirectory, dataSource), new CustomVersioningPolicy(version), null, null, null, indexSpec, null, true, 0, 0, true, null, 0L, null, null);
    LOG.debug(String.format("running with Data schema [%s] ", dataSchema));
    return new DruidRecordWriter(dataSchema, realtimeTuningConfig, DruidStorageHandlerUtils.createSegmentPusherForDirectory(segmentDirectory, jc), maxPartitionSize, new Path(workingPath, SEGMENTS_DESCRIPTOR_DIR_NAME), finalOutPath.getFileSystem(jc));
}
Also used : IndexSpec(org.apache.druid.segment.IndexSpec) MapInputRowParser(org.apache.druid.data.input.impl.MapInputRowParser) TimeAndDimsParseSpec(org.apache.druid.data.input.impl.TimeAndDimsParseSpec) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) ArrayList(java.util.ArrayList) List(java.util.List) Path(org.apache.hadoop.fs.Path) RealtimeTuningConfig(org.apache.druid.segment.indexing.RealtimeTuningConfig) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) DataSchema(org.apache.druid.segment.indexing.DataSchema) GranularitySpec(org.apache.druid.segment.indexing.granularity.GranularitySpec) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) MapInputRowParser(org.apache.druid.data.input.impl.MapInputRowParser) InputRowParser(org.apache.druid.data.input.impl.InputRowParser) CustomVersioningPolicy(org.apache.druid.segment.realtime.plumber.CustomVersioningPolicy) Map(java.util.Map) File(java.io.File)

Aggregations

MapInputRowParser (org.apache.druid.data.input.impl.MapInputRowParser)13 TimestampSpec (org.apache.druid.data.input.impl.TimestampSpec)13 DimensionsSpec (org.apache.druid.data.input.impl.DimensionsSpec)10 TimeAndDimsParseSpec (org.apache.druid.data.input.impl.TimeAndDimsParseSpec)8 LongSumAggregatorFactory (org.apache.druid.query.aggregation.LongSumAggregatorFactory)7 DataSchema (org.apache.druid.segment.indexing.DataSchema)6 JSONParseSpec (org.apache.druid.data.input.impl.JSONParseSpec)5 CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)5 RealtimeTuningConfig (org.apache.druid.segment.indexing.RealtimeTuningConfig)5 UniformGranularitySpec (org.apache.druid.segment.indexing.granularity.UniformGranularitySpec)5 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)4 ArrayList (java.util.ArrayList)4 InputRowParser (org.apache.druid.data.input.impl.InputRowParser)4 File (java.io.File)3 List (java.util.List)3 Map (java.util.Map)3 FirehoseFactory (org.apache.druid.data.input.FirehoseFactory)3 FloatDimensionSchema (org.apache.druid.data.input.impl.FloatDimensionSchema)3 LongDimensionSchema (org.apache.druid.data.input.impl.LongDimensionSchema)3 DefaultObjectMapper (org.apache.druid.jackson.DefaultObjectMapper)3