use of org.apache.druid.data.input.impl.DimensionsSpec in project druid by druid-io.
the class IndexGeneratorCombinerTest method setUp.
@Before
public void setUp() throws Exception {
HadoopDruidIndexerConfig config = new HadoopDruidIndexerConfig(new HadoopIngestionSpec(new DataSchema("website", HadoopDruidIndexerConfig.JSON_MAPPER.convertValue(new StringInputRowParser(new TimeAndDimsParseSpec(new TimestampSpec("timestamp", "yyyyMMddHH", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("host", "keywords")))), null), Map.class), new AggregatorFactory[] { new LongSumAggregatorFactory("visited_sum", "visited"), new HyperUniquesAggregatorFactory("unique_hosts", "host") }, new UniformGranularitySpec(Granularities.DAY, Granularities.NONE, ImmutableList.of(Intervals.of("2010/2011"))), null, HadoopDruidIndexerConfig.JSON_MAPPER), new HadoopIOConfig(ImmutableMap.of("paths", "/tmp/dummy", "type", "static"), null, "/tmp/dummy"), HadoopTuningConfig.makeDefaultTuningConfig().withWorkingPath("/tmp/work").withVersion("ver")));
Configuration hadoopConfig = new Configuration();
hadoopConfig.set(HadoopDruidIndexerConfig.CONFIG_PROPERTY, HadoopDruidIndexerConfig.JSON_MAPPER.writeValueAsString(config));
Reducer.Context context = EasyMock.createMock(Reducer.Context.class);
EasyMock.expect(context.getConfiguration()).andReturn(hadoopConfig);
EasyMock.replay(context);
aggregators = config.getSchema().getDataSchema().getAggregators();
combiner = new IndexGeneratorJob.IndexGeneratorCombiner();
combiner.setup(context);
}
use of org.apache.druid.data.input.impl.DimensionsSpec in project druid by druid-io.
the class IndexGeneratorCombinerTest method testMultipleRowsMerged.
@Test
public void testMultipleRowsMerged() throws Exception {
long timestamp = System.currentTimeMillis();
Bucket bucket = new Bucket(0, DateTimes.utc(timestamp), 0);
SortableBytes keySortableBytes = new SortableBytes(bucket.toGroupKey(), new byte[0]);
BytesWritable key = keySortableBytes.toBytesWritable();
DimensionsSpec dimensionsSpec = new DimensionsSpec(Arrays.asList(new StringDimensionSchema("host"), new StringDimensionSchema("keywords")));
Map<String, InputRowSerde.IndexSerdeTypeHelper> typeHelperMap = InputRowSerde.getTypeHelperMap(dimensionsSpec);
InputRow row1 = new MapBasedInputRow(timestamp, ImmutableList.of("keywords"), ImmutableMap.of("host", "host1", "keywords", Arrays.asList("foo", "bar"), "visited", 10));
InputRow row2 = new MapBasedInputRow(timestamp, ImmutableList.of("keywords"), ImmutableMap.of("host", "host2", "keywords", Arrays.asList("foo", "bar"), "visited", 5));
List<BytesWritable> rows = Lists.newArrayList(new BytesWritable(InputRowSerde.toBytes(typeHelperMap, row1, aggregators).getSerializedRow()), new BytesWritable(InputRowSerde.toBytes(typeHelperMap, row2, aggregators).getSerializedRow()));
Reducer.Context context = EasyMock.createNiceMock(Reducer.Context.class);
Capture<BytesWritable> captureKey = Capture.newInstance();
Capture<BytesWritable> captureVal = Capture.newInstance();
context.write(EasyMock.capture(captureKey), EasyMock.capture(captureVal));
EasyMock.replay(context);
combiner.reduce(key, rows, context);
EasyMock.verify(context);
Assert.assertTrue(captureKey.getValue() == key);
InputRow capturedRow = InputRowSerde.fromBytes(typeHelperMap, captureVal.getValue().getBytes(), aggregators);
Assert.assertEquals(Arrays.asList("host", "keywords"), capturedRow.getDimensions());
Assert.assertEquals(ImmutableList.of(), capturedRow.getDimension("host"));
Assert.assertEquals(Arrays.asList("bar", "foo"), capturedRow.getDimension("keywords"));
Assert.assertEquals(15, capturedRow.getMetric("visited_sum").longValue());
Assert.assertEquals(2.0, (Double) HyperUniquesAggregatorFactory.estimateCardinality(capturedRow.getRaw("unique_hosts"), false), 0.001);
}
use of org.apache.druid.data.input.impl.DimensionsSpec in project druid by druid-io.
the class InputRowSerdeTest method testDimensionNullOrDefaultForNumerics.
@Test
public void testDimensionNullOrDefaultForNumerics() {
HashMap<String, Object> eventWithNulls = new HashMap<>();
eventWithNulls.put("d1", null);
eventWithNulls.put("d2", Arrays.asList("d2v1", "d2v2"));
eventWithNulls.put("d3", null);
eventWithNulls.put("d4", null);
eventWithNulls.put("d5", null);
InputRow in = new MapBasedInputRow(timestamp, dims, eventWithNulls);
DimensionsSpec dimensionsSpec = new DimensionsSpec(Arrays.asList(new StringDimensionSchema("d1"), new StringDimensionSchema("d2"), new LongDimensionSchema("d3"), new FloatDimensionSchema("d4"), new DoubleDimensionSchema("d5")));
byte[] result = InputRowSerde.toBytes(InputRowSerde.getTypeHelperMap(dimensionsSpec), in, new AggregatorFactory[0]).getSerializedRow();
if (NullHandling.replaceWithDefault()) {
long expected = 0;
// timestamp bytes + dims length
expected += 9;
// dim_non_existing writes: 1 16 1 bytes
expected += 18;
// d1: writes 1 2 1 bytes
expected += 4;
// d2: writes 1 2 1 1 4 1 4 bytes
expected += 14;
// d3: writes 1 2 8 bytes
expected += 11;
// d4: writes 1 2 4 bytes
expected += 7;
// d5: writes 1 2 8 bytes
expected += 11;
// writes aggregator length
expected += 1;
Assert.assertEquals(expected, result.length);
Assert.assertArrayEquals(new byte[] { 0, 0, 0, 0, 0, 0, 0, 0 }, Arrays.copyOfRange(result, 48, 56));
Assert.assertArrayEquals(new byte[] { 0, 0, 0, 0 }, Arrays.copyOfRange(result, 59, 63));
Assert.assertArrayEquals(new byte[] { 0, 0, 0, 0, 0, 0, 0, 0 }, Arrays.copyOfRange(result, 66, 74));
} else {
long expected = 9 + 18 + 4 + 14 + 4 + 4 + 4 + 1;
Assert.assertEquals(expected, result.length);
Assert.assertEquals(result[48], NullHandling.IS_NULL_BYTE);
Assert.assertEquals(result[52], NullHandling.IS_NULL_BYTE);
Assert.assertEquals(result[56], NullHandling.IS_NULL_BYTE);
}
}
use of org.apache.druid.data.input.impl.DimensionsSpec in project druid by druid-io.
the class InputRowSerdeTest method testDimensionParseExceptions.
@Test
public void testDimensionParseExceptions() {
InputRowSerde.SerializeResult result;
InputRow in = new MapBasedInputRow(timestamp, dims, event);
AggregatorFactory[] aggregatorFactories = new AggregatorFactory[] { new LongSumAggregatorFactory("m2out", "m2") };
DimensionsSpec dimensionsSpec = new DimensionsSpec(Collections.singletonList(new LongDimensionSchema("d1")));
result = InputRowSerde.toBytes(InputRowSerde.getTypeHelperMap(dimensionsSpec), in, aggregatorFactories);
Assert.assertEquals(Collections.singletonList("could not convert value [d1v] to long"), result.getParseExceptionMessages());
dimensionsSpec = new DimensionsSpec(Collections.singletonList(new FloatDimensionSchema("d1")));
result = InputRowSerde.toBytes(InputRowSerde.getTypeHelperMap(dimensionsSpec), in, aggregatorFactories);
Assert.assertEquals(Collections.singletonList("could not convert value [d1v] to float"), result.getParseExceptionMessages());
dimensionsSpec = new DimensionsSpec(Collections.singletonList(new DoubleDimensionSchema("d1")));
result = InputRowSerde.toBytes(InputRowSerde.getTypeHelperMap(dimensionsSpec), in, aggregatorFactories);
Assert.assertEquals(Collections.singletonList("could not convert value [d1v] to double"), result.getParseExceptionMessages());
}
use of org.apache.druid.data.input.impl.DimensionsSpec in project druid by druid-io.
the class InputRowSerdeTest method testThrowParseExceptions.
@Test
public void testThrowParseExceptions() {
InputRow in = new MapBasedInputRow(timestamp, dims, event);
AggregatorFactory[] aggregatorFactories = new AggregatorFactory[] { new DoubleSumAggregatorFactory("agg_non_existing", "agg_non_existing_in"), new DoubleSumAggregatorFactory("m1out", "m1"), new LongSumAggregatorFactory("m2out", "m2"), new HyperUniquesAggregatorFactory("m3out", "m3"), // Unparseable from String to Long
new LongSumAggregatorFactory("unparseable", "m3") };
DimensionsSpec dimensionsSpec = new DimensionsSpec(Arrays.asList(new StringDimensionSchema("d1"), new StringDimensionSchema("d2"), new LongDimensionSchema("d3"), new FloatDimensionSchema("d4"), new DoubleDimensionSchema("d5")));
InputRowSerde.SerializeResult result = InputRowSerde.toBytes(InputRowSerde.getTypeHelperMap(dimensionsSpec), in, aggregatorFactories);
Assert.assertEquals(Collections.singletonList("Unable to parse value[m3v] for field[m3]"), result.getParseExceptionMessages());
}
Aggregations