use of org.apache.druid.data.input.impl.JSONParseSpec in project druid by druid-io.
the class EventReceiverFirehoseTest method setUp.
@Before
public void setUp() {
req = EasyMock.createMock(HttpServletRequest.class);
eventReceiverFirehoseFactory = new EventReceiverFirehoseFactory(SERVICE_NAME, CAPACITY, MAX_IDLE_TIME_MILLIS, null, new DefaultObjectMapper(), new DefaultObjectMapper(), register, AuthTestUtils.TEST_AUTHORIZER_MAPPER);
firehose = (EventReceiverFirehoseFactory.EventReceiverFirehose) eventReceiverFirehoseFactory.connect(new MapInputRowParser(new JSONParseSpec(new TimestampSpec("timestamp", "auto", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("d1"))), null, null, null)), null);
}
use of org.apache.druid.data.input.impl.JSONParseSpec in project druid by druid-io.
the class FireDepartmentTest method testSerde.
@Test
public void testSerde() throws Exception {
ObjectMapper jsonMapper = new DefaultObjectMapper();
jsonMapper.setInjectableValues(new InjectableValues.Std().addValue(ObjectMapper.class, jsonMapper));
FireDepartment schema = new FireDepartment(new DataSchema("foo", jsonMapper.convertValue(new StringInputRowParser(new JSONParseSpec(new TimestampSpec("timestamp", "auto", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(Arrays.asList("dim1", "dim2"))), null, null, null), null), Map.class), new AggregatorFactory[] { new CountAggregatorFactory("count") }, new UniformGranularitySpec(Granularities.HOUR, Granularities.MINUTE, null), null, jsonMapper), new RealtimeIOConfig(null, new RealtimePlumberSchool(null, null, null, null, null, null, null, NoopJoinableFactory.INSTANCE, TestHelper.getTestIndexMergerV9(OffHeapMemorySegmentWriteOutMediumFactory.instance()), TestHelper.getTestIndexIO(), MapCache.create(0), NO_CACHE_CONFIG, new CachePopulatorStats(), TestHelper.makeJsonMapper())), RealtimeTuningConfig.makeDefaultTuningConfig(new File("/tmp/nonexistent")));
String json = jsonMapper.writeValueAsString(schema);
FireDepartment newSchema = jsonMapper.readValue(json, FireDepartment.class);
Assert.assertEquals(schema.getDataSchema().getDataSource(), newSchema.getDataSchema().getDataSource());
Assert.assertEquals("/tmp/nonexistent", schema.getTuningConfig().getBasePersistDirectory().toString());
}
use of org.apache.druid.data.input.impl.JSONParseSpec in project druid by druid-io.
the class MultiValuedDimensionTest method setup.
@Before
public void setup() throws Exception {
incrementalIndex = new OnheapIncrementalIndex.Builder().setSimpleTestingIndexSchema(new CountAggregatorFactory("count")).setMaxRowCount(5000).build();
StringInputRowParser parser = new StringInputRowParser(new CSVParseSpec(new TimestampSpec("timestamp", "iso", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("product", "tags", "othertags"))), "\t", ImmutableList.of("timestamp", "product", "tags", "othertags"), false, 0), "UTF-8");
String[] rows = new String[] { "2011-01-12T00:00:00.000Z,product_1,t1\tt2\tt3,u1\tu2", "2011-01-13T00:00:00.000Z,product_2,t3\tt4\tt5,u3\tu4", "2011-01-14T00:00:00.000Z,product_3,t5\tt6\tt7,u1\tu5", "2011-01-14T00:00:00.000Z,product_4,\"\",u2" };
for (String row : rows) {
incrementalIndex.add(parser.parse(row));
}
persistedSegmentDir = FileUtils.createTempDir();
TestHelper.getTestIndexMergerV9(segmentWriteOutMediumFactory).persist(incrementalIndex, persistedSegmentDir, new IndexSpec(), null);
queryableIndex = TestHelper.getTestIndexIO().loadIndex(persistedSegmentDir);
StringInputRowParser parserNullSampler = new StringInputRowParser(new JSONParseSpec(new TimestampSpec("time", "iso", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("product", "tags", "othertags")))), "UTF-8");
incrementalIndexNullSampler = new OnheapIncrementalIndex.Builder().setSimpleTestingIndexSchema(new CountAggregatorFactory("count")).setMaxRowCount(5000).build();
String[] rowsNullSampler = new String[] { "{\"time\":\"2011-01-13T00:00:00.000Z\",\"product\":\"product_1\",\"tags\":[],\"othertags\":[\"u1\", \"u2\"]}", "{\"time\":\"2011-01-12T00:00:00.000Z\",\"product\":\"product_2\",\"othertags\":[\"u3\", \"u4\"]}", "{\"time\":\"2011-01-14T00:00:00.000Z\",\"product\":\"product_3\",\"tags\":[\"\"],\"othertags\":[\"u1\", \"u5\"]}", "{\"time\":\"2011-01-15T00:00:00.000Z\",\"product\":\"product_4\",\"tags\":[\"t1\", \"t2\", \"\"],\"othertags\":[\"u6\", \"u7\"]}", "{\"time\":\"2011-01-16T00:00:00.000Z\",\"product\":\"product_5\",\"tags\":[],\"othertags\":[]}", "{\"time\":\"2011-01-16T00:00:00.000Z\",\"product\":\"product_6\"}", "{\"time\":\"2011-01-16T00:00:00.000Z\",\"product\":\"product_7\",\"othertags\":[]}", "{\"time\":\"2011-01-16T00:00:00.000Z\",\"product\":\"product_8\",\"tags\":[\"\"],\"othertags\":[]}" };
for (String row : rowsNullSampler) {
incrementalIndexNullSampler.add(parserNullSampler.parse(row));
}
persistedSegmentDirNullSampler = FileUtils.createTempDir();
TestHelper.getTestIndexMergerV9(segmentWriteOutMediumFactory).persist(incrementalIndexNullSampler, persistedSegmentDirNullSampler, new IndexSpec(), null);
queryableIndexNullSampler = TestHelper.getTestIndexIO().loadIndex(persistedSegmentDirNullSampler);
}
use of org.apache.druid.data.input.impl.JSONParseSpec in project druid by druid-io.
the class ProtobufInputRowParserTest method setUp.
@Before
public void setUp() {
parseSpec = new JSONParseSpec(new TimestampSpec("timestamp", "iso", null), new DimensionsSpec(Lists.newArrayList(new StringDimensionSchema("event"), new StringDimensionSchema("id"), new StringDimensionSchema("someOtherId"), new StringDimensionSchema("isValid"))), new JSONPathSpec(true, Lists.newArrayList(new JSONPathFieldSpec(JSONPathFieldType.ROOT, "eventType", "eventType"), new JSONPathFieldSpec(JSONPathFieldType.PATH, "foobar", "$.foo.bar"), new JSONPathFieldSpec(JSONPathFieldType.PATH, "bar0", "$.bar[0].bar"))), null, null);
flatParseSpec = new JSONParseSpec(new TimestampSpec("timestamp", "iso", null), new DimensionsSpec(Lists.newArrayList(new StringDimensionSchema("event"), new StringDimensionSchema("id"), new StringDimensionSchema("someOtherId"), new StringDimensionSchema("isValid"))), null, null, null);
flatParseSpecWithComplexTimestamp = new JSONParseSpec(new TimestampSpec("otherTimestamp", "iso", null), new DimensionsSpec(Lists.newArrayList(new StringDimensionSchema("event"), new StringDimensionSchema("id"), new StringDimensionSchema("someOtherId"), new StringDimensionSchema("isValid"))), null, null, null);
decoder = new FileBasedProtobufBytesDecoder("prototest.desc", "ProtoTestEvent");
}
use of org.apache.druid.data.input.impl.JSONParseSpec in project druid by druid-io.
the class IngestSegmentFirehoseFactoryTest method constructorFeeder.
@Parameterized.Parameters(name = "{0}")
public static Collection<Object[]> constructorFeeder() throws IOException {
final IndexSpec indexSpec = new IndexSpec();
final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder().withMinTimestamp(JodaUtils.MIN_INSTANT).withDimensionsSpec(ROW_PARSER).withMetrics(new LongSumAggregatorFactory(METRIC_LONG_NAME, DIM_LONG_NAME), new DoubleSumAggregatorFactory(METRIC_FLOAT_NAME, DIM_FLOAT_NAME)).build();
final IncrementalIndex index = new OnheapIncrementalIndex.Builder().setIndexSchema(schema).setMaxRowCount(MAX_ROWS * MAX_SHARD_NUMBER).build();
for (Integer i = 0; i < MAX_ROWS; ++i) {
index.add(ROW_PARSER.parseBatch(buildRow(i.longValue())).get(0));
}
FileUtils.mkdirp(PERSIST_DIR);
INDEX_MERGER_V9.persist(index, PERSIST_DIR, indexSpec, null);
final CoordinatorClient cc = new CoordinatorClient(null, null) {
@Override
public Collection<DataSegment> fetchUsedSegmentsInDataSourceForIntervals(String dataSource, List<Interval> intervals) {
return ImmutableSet.copyOf(SEGMENT_SET);
}
};
SegmentHandoffNotifierFactory notifierFactory = EasyMock.createNiceMock(SegmentHandoffNotifierFactory.class);
EasyMock.replay(notifierFactory);
final SegmentCacheManagerFactory slf = new SegmentCacheManagerFactory(MAPPER);
final RetryPolicyFactory retryPolicyFactory = new RetryPolicyFactory(new RetryPolicyConfig());
Collection<Object[]> values = new ArrayList<>();
for (InputRowParser parser : Arrays.<InputRowParser>asList(ROW_PARSER, new MapInputRowParser(new JSONParseSpec(new TimestampSpec(TIME_COLUMN, "auto", null), DimensionsSpec.builder().setDimensionExclusions(ImmutableList.of(DIM_FLOAT_NAME, DIM_LONG_NAME)).build(), null, null, null)))) {
for (List<String> dim_names : Arrays.<List<String>>asList(null, ImmutableList.of(DIM_NAME))) {
for (List<String> metric_names : Arrays.<List<String>>asList(null, ImmutableList.of(METRIC_LONG_NAME, METRIC_FLOAT_NAME))) {
for (Boolean wrapInCombining : Arrays.asList(false, true)) {
final IngestSegmentFirehoseFactory isfFactory = new IngestSegmentFirehoseFactory(TASK.getDataSource(), Intervals.ETERNITY, null, new SelectorDimFilter(DIM_NAME, DIM_VALUE, null), dim_names, metric_names, null, INDEX_IO, cc, slf, retryPolicyFactory);
final FirehoseFactory factory = wrapInCombining ? new CombiningFirehoseFactory(ImmutableList.of(isfFactory)) : isfFactory;
values.add(new Object[] { StringUtils.format("DimNames[%s]MetricNames[%s]ParserDimNames[%s]WrapInCombining[%s]", dim_names == null ? "null" : "dims", metric_names == null ? "null" : "metrics", parser == ROW_PARSER ? "dims" : "null", wrapInCombining), factory, parser });
}
}
}
}
return values;
}
Aggregations