use of org.apache.druid.data.input.impl.TimestampSpec in project druid by druid-io.
the class FireDepartmentTest method testSerde.
@Test
public void testSerde() throws Exception {
ObjectMapper jsonMapper = new DefaultObjectMapper();
jsonMapper.setInjectableValues(new InjectableValues.Std().addValue(ObjectMapper.class, jsonMapper));
FireDepartment schema = new FireDepartment(new DataSchema("foo", jsonMapper.convertValue(new StringInputRowParser(new JSONParseSpec(new TimestampSpec("timestamp", "auto", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(Arrays.asList("dim1", "dim2"))), null, null, null), null), Map.class), new AggregatorFactory[] { new CountAggregatorFactory("count") }, new UniformGranularitySpec(Granularities.HOUR, Granularities.MINUTE, null), null, jsonMapper), new RealtimeIOConfig(null, new RealtimePlumberSchool(null, null, null, null, null, null, null, NoopJoinableFactory.INSTANCE, TestHelper.getTestIndexMergerV9(OffHeapMemorySegmentWriteOutMediumFactory.instance()), TestHelper.getTestIndexIO(), MapCache.create(0), NO_CACHE_CONFIG, new CachePopulatorStats(), TestHelper.makeJsonMapper())), RealtimeTuningConfig.makeDefaultTuningConfig(new File("/tmp/nonexistent")));
String json = jsonMapper.writeValueAsString(schema);
FireDepartment newSchema = jsonMapper.readValue(json, FireDepartment.class);
Assert.assertEquals(schema.getDataSchema().getDataSource(), newSchema.getDataSchema().getDataSource());
Assert.assertEquals("/tmp/nonexistent", schema.getTuningConfig().getBasePersistDirectory().toString());
}
use of org.apache.druid.data.input.impl.TimestampSpec in project druid by druid-io.
the class QueryableIndexColumnCapabilitiesTest method setup.
@BeforeClass
public static void setup() throws IOException {
MapInputRowParser parser = new MapInputRowParser(new TimeAndDimsParseSpec(new TimestampSpec("time", "auto", null), new DimensionsSpec(ImmutableList.<DimensionSchema>builder().addAll(DimensionsSpec.getDefaultSchemas(ImmutableList.of("d1", "d2"))).add(new DoubleDimensionSchema("d3")).add(new FloatDimensionSchema("d4")).add(new LongDimensionSchema("d5")).build())));
AggregatorFactory[] metricsSpecs = new AggregatorFactory[] { new CountAggregatorFactory("cnt"), new DoubleSumAggregatorFactory("m1", "d3"), new FloatSumAggregatorFactory("m2", "d4"), new LongSumAggregatorFactory("m3", "d5"), new HyperUniquesAggregatorFactory("m4", "d1") };
List<InputRow> rows = new ArrayList<>();
Map<String, Object> event = ImmutableMap.<String, Object>builder().put("time", DateTimes.nowUtc().getMillis()).put("d1", "some string").put("d2", ImmutableList.of("some", "list")).put("d3", 1.234).put("d4", 1.234f).put("d5", 10L).build();
rows.add(Iterables.getOnlyElement(parser.parseBatch(event)));
IndexBuilder builder = IndexBuilder.create().rows(rows).schema(new IncrementalIndexSchema.Builder().withMetrics(metricsSpecs).withDimensionsSpec(parser).withRollup(false).build()).tmpDir(temporaryFolder.newFolder());
INC_INDEX = builder.buildIncrementalIndex();
MMAP_INDEX = builder.buildMMappedIndex();
List<InputRow> rowsWithNulls = new ArrayList<>();
rowsWithNulls.add(Iterables.getOnlyElement(parser.parseBatch(event)));
Map<String, Object> eventWithNulls = new HashMap<>();
eventWithNulls.put("time", DateTimes.nowUtc().getMillis());
eventWithNulls.put("d1", null);
eventWithNulls.put("d2", ImmutableList.of());
eventWithNulls.put("d3", null);
eventWithNulls.put("d4", null);
eventWithNulls.put("d5", null);
rowsWithNulls.add(Iterables.getOnlyElement(parser.parseBatch(eventWithNulls)));
IndexBuilder builderWithNulls = IndexBuilder.create().rows(rowsWithNulls).schema(new IncrementalIndexSchema.Builder().withMetrics(metricsSpecs).withDimensionsSpec(parser).withRollup(false).build()).tmpDir(temporaryFolder.newFolder());
INC_INDEX_WITH_NULLS = builderWithNulls.buildIncrementalIndex();
MMAP_INDEX_WITH_NULLS = builderWithNulls.buildMMappedIndex();
}
use of org.apache.druid.data.input.impl.TimestampSpec in project druid by druid-io.
the class TestIndex method makeRealtimeIndex.
public static IncrementalIndex makeRealtimeIndex(final CharSource source, boolean rollup, boolean bitmap) {
final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder().withMinTimestamp(DateTimes.of("2011-01-12T00:00:00.000Z").getMillis()).withTimestampSpec(new TimestampSpec("ds", "auto", null)).withDimensionsSpec(bitmap ? DIMENSIONS_SPEC : DIMENSIONS_SPEC_NO_BITMAPS).withVirtualColumns(VIRTUAL_COLUMNS).withMetrics(METRIC_AGGS).withRollup(rollup).build();
final IncrementalIndex retVal = new OnheapIncrementalIndex.Builder().setIndexSchema(schema).setMaxRowCount(10000).build();
try {
return loadIncrementalIndex(retVal, source);
} catch (Exception e) {
if (rollup) {
realtimeIndex = null;
} else {
noRollupRealtimeIndex = null;
}
throw new RuntimeException(e);
}
}
use of org.apache.druid.data.input.impl.TimestampSpec in project druid by druid-io.
the class MetadataTest method testMerge.
@Test
public void testMerge() {
Assert.assertNull(Metadata.merge(null, null));
Assert.assertNull(Metadata.merge(ImmutableList.of(), null));
List<Metadata> metadataToBeMerged = new ArrayList<>();
metadataToBeMerged.add(null);
Assert.assertNull(Metadata.merge(metadataToBeMerged, null));
// sanity merge check
AggregatorFactory[] aggs = new AggregatorFactory[] { new LongMaxAggregatorFactory("n", "f") };
final Metadata m1 = new Metadata(Collections.singletonMap("k", "v"), aggs, new TimestampSpec("ds", "auto", null), Granularities.ALL, Boolean.FALSE);
final Metadata m2 = new Metadata(Collections.singletonMap("k", "v"), aggs, new TimestampSpec("ds", "auto", null), Granularities.ALL, Boolean.FALSE);
final Metadata m3 = new Metadata(Collections.singletonMap("k", "v"), aggs, new TimestampSpec("ds", "auto", null), Granularities.ALL, Boolean.TRUE);
final Metadata merged = new Metadata(Collections.singletonMap("k", "v"), new AggregatorFactory[] { new LongMaxAggregatorFactory("n", "n") }, new TimestampSpec("ds", "auto", null), Granularities.ALL, Boolean.FALSE);
Assert.assertEquals(merged, Metadata.merge(ImmutableList.of(m1, m2), null));
// merge check with one metadata being null
metadataToBeMerged.clear();
metadataToBeMerged.add(m1);
metadataToBeMerged.add(m2);
metadataToBeMerged.add(null);
final Metadata merged2 = new Metadata(Collections.singletonMap("k", "v"), null, null, null, null);
Assert.assertEquals(merged2, Metadata.merge(metadataToBeMerged, null));
// merge check with client explicitly providing merged aggregators
AggregatorFactory[] explicitAggs = new AggregatorFactory[] { new DoubleMaxAggregatorFactory("x", "y") };
final Metadata merged3 = new Metadata(Collections.singletonMap("k", "v"), explicitAggs, null, null, null);
Assert.assertEquals(merged3, Metadata.merge(metadataToBeMerged, explicitAggs));
final Metadata merged4 = new Metadata(Collections.singletonMap("k", "v"), explicitAggs, new TimestampSpec("ds", "auto", null), Granularities.ALL, null);
Assert.assertEquals(merged4, Metadata.merge(ImmutableList.of(m3, m2), explicitAggs));
}
use of org.apache.druid.data.input.impl.TimestampSpec in project druid by druid-io.
the class MultiValuedDimensionTest method setup.
@Before
public void setup() throws Exception {
incrementalIndex = new OnheapIncrementalIndex.Builder().setSimpleTestingIndexSchema(new CountAggregatorFactory("count")).setMaxRowCount(5000).build();
StringInputRowParser parser = new StringInputRowParser(new CSVParseSpec(new TimestampSpec("timestamp", "iso", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("product", "tags", "othertags"))), "\t", ImmutableList.of("timestamp", "product", "tags", "othertags"), false, 0), "UTF-8");
String[] rows = new String[] { "2011-01-12T00:00:00.000Z,product_1,t1\tt2\tt3,u1\tu2", "2011-01-13T00:00:00.000Z,product_2,t3\tt4\tt5,u3\tu4", "2011-01-14T00:00:00.000Z,product_3,t5\tt6\tt7,u1\tu5", "2011-01-14T00:00:00.000Z,product_4,\"\",u2" };
for (String row : rows) {
incrementalIndex.add(parser.parse(row));
}
persistedSegmentDir = FileUtils.createTempDir();
TestHelper.getTestIndexMergerV9(segmentWriteOutMediumFactory).persist(incrementalIndex, persistedSegmentDir, new IndexSpec(), null);
queryableIndex = TestHelper.getTestIndexIO().loadIndex(persistedSegmentDir);
StringInputRowParser parserNullSampler = new StringInputRowParser(new JSONParseSpec(new TimestampSpec("time", "iso", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("product", "tags", "othertags")))), "UTF-8");
incrementalIndexNullSampler = new OnheapIncrementalIndex.Builder().setSimpleTestingIndexSchema(new CountAggregatorFactory("count")).setMaxRowCount(5000).build();
String[] rowsNullSampler = new String[] { "{\"time\":\"2011-01-13T00:00:00.000Z\",\"product\":\"product_1\",\"tags\":[],\"othertags\":[\"u1\", \"u2\"]}", "{\"time\":\"2011-01-12T00:00:00.000Z\",\"product\":\"product_2\",\"othertags\":[\"u3\", \"u4\"]}", "{\"time\":\"2011-01-14T00:00:00.000Z\",\"product\":\"product_3\",\"tags\":[\"\"],\"othertags\":[\"u1\", \"u5\"]}", "{\"time\":\"2011-01-15T00:00:00.000Z\",\"product\":\"product_4\",\"tags\":[\"t1\", \"t2\", \"\"],\"othertags\":[\"u6\", \"u7\"]}", "{\"time\":\"2011-01-16T00:00:00.000Z\",\"product\":\"product_5\",\"tags\":[],\"othertags\":[]}", "{\"time\":\"2011-01-16T00:00:00.000Z\",\"product\":\"product_6\"}", "{\"time\":\"2011-01-16T00:00:00.000Z\",\"product\":\"product_7\",\"othertags\":[]}", "{\"time\":\"2011-01-16T00:00:00.000Z\",\"product\":\"product_8\",\"tags\":[\"\"],\"othertags\":[]}" };
for (String row : rowsNullSampler) {
incrementalIndexNullSampler.add(parserNullSampler.parse(row));
}
persistedSegmentDirNullSampler = FileUtils.createTempDir();
TestHelper.getTestIndexMergerV9(segmentWriteOutMediumFactory).persist(incrementalIndexNullSampler, persistedSegmentDirNullSampler, new IndexSpec(), null);
queryableIndexNullSampler = TestHelper.getTestIndexIO().loadIndex(persistedSegmentDirNullSampler);
}
Aggregations