use of io.druid.data.input.impl.TimeAndDimsParseSpec in project druid by druid-io.
the class OrcHadoopInputRowParserTest method testTypeFromParseSpec.
@Test
public void testTypeFromParseSpec() {
ParseSpec parseSpec = new TimeAndDimsParseSpec(new TimestampSpec("timestamp", "auto", null), new DimensionsSpec(ImmutableList.<DimensionSchema>of(new StringDimensionSchema("col1"), new StringDimensionSchema("col2")), null, null));
String typeString = OrcHadoopInputRowParser.typeStringFromParseSpec(parseSpec);
String expected = "struct<timestamp:string,col1:string,col2:string>";
Assert.assertEquals(expected, typeString);
}
use of io.druid.data.input.impl.TimeAndDimsParseSpec in project druid by druid-io.
the class IndexGeneratorCombinerTest method setUp.
@Before
public void setUp() throws Exception {
HadoopDruidIndexerConfig config = new HadoopDruidIndexerConfig(new HadoopIngestionSpec(new DataSchema("website", HadoopDruidIndexerConfig.JSON_MAPPER.convertValue(new StringInputRowParser(new TimeAndDimsParseSpec(new TimestampSpec("timestamp", "yyyyMMddHH", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("host", "keywords")), null, null)), null), Map.class), new AggregatorFactory[] { new LongSumAggregatorFactory("visited_sum", "visited"), new HyperUniquesAggregatorFactory("unique_hosts", "host") }, new UniformGranularitySpec(Granularities.DAY, Granularities.NONE, ImmutableList.of(Interval.parse("2010/2011"))), HadoopDruidIndexerConfig.JSON_MAPPER), new HadoopIOConfig(ImmutableMap.<String, Object>of("paths", "/tmp/dummy", "type", "static"), null, "/tmp/dummy"), HadoopTuningConfig.makeDefaultTuningConfig().withWorkingPath("/tmp/work").withVersion("ver")));
Configuration hadoopConfig = new Configuration();
hadoopConfig.set(HadoopDruidIndexerConfig.CONFIG_PROPERTY, HadoopDruidIndexerConfig.JSON_MAPPER.writeValueAsString(config));
Reducer.Context context = EasyMock.createMock(Reducer.Context.class);
EasyMock.expect(context.getConfiguration()).andReturn(hadoopConfig);
EasyMock.replay(context);
aggregators = config.getSchema().getDataSchema().getAggregators();
combiner = new IndexGeneratorJob.IndexGeneratorCombiner();
combiner.setup(context);
}
use of io.druid.data.input.impl.TimeAndDimsParseSpec in project druid by druid-io.
the class ProtoBufInputRowParserTest method testParse.
/*
* eventType = 1;
*
* required uint64 id = 2;
* required string timestamp = 3;
* optional uint32 someOtherId = 4;
* optional bool isValid = 5;
* optional string description = 6;
*
* optional float someFloatColumn = 7;
* optional uint32 someIntColumn = 8;
* optional uint64 someLongColumn = 9;
*/
@Test
public void testParse() throws Exception {
//configure parser with desc file
ProtoBufInputRowParser parser = new ProtoBufInputRowParser(new TimeAndDimsParseSpec(new TimestampSpec("timestamp", "iso", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(Arrays.asList(DIMENSIONS)), Arrays.<String>asList(), null)), "prototest.desc");
//create binary of proto test event
DateTime dateTime = new DateTime(2012, 07, 12, 9, 30);
ProtoTestEventWrapper.ProtoTestEvent event = ProtoTestEventWrapper.ProtoTestEvent.newBuilder().setDescription("description").setEventType(ProtoTestEventWrapper.ProtoTestEvent.EventCategory.CATEGORY_ONE).setId(4711L).setIsValid(true).setSomeOtherId(4712).setTimestamp(dateTime.toString()).setSomeFloatColumn(47.11F).setSomeIntColumn(815).setSomeLongColumn(816L).build();
ByteArrayOutputStream out = new ByteArrayOutputStream();
event.writeTo(out);
InputRow row = parser.parse(ByteBuffer.wrap(out.toByteArray()));
System.out.println(row);
assertEquals(Arrays.asList(DIMENSIONS), row.getDimensions());
assertEquals(dateTime.getMillis(), row.getTimestampFromEpoch());
assertDimensionEquals(row, "id", "4711");
assertDimensionEquals(row, "isValid", "true");
assertDimensionEquals(row, "someOtherId", "4712");
assertDimensionEquals(row, "description", "description");
assertDimensionEquals(row, "eventType", ProtoTestEventWrapper.ProtoTestEvent.EventCategory.CATEGORY_ONE.name());
assertEquals(47.11F, row.getFloatMetric("someFloatColumn"), 0.0);
assertEquals(815.0F, row.getFloatMetric("someIntColumn"), 0.0);
assertEquals(816.0F, row.getFloatMetric("someLongColumn"), 0.0);
}
use of io.druid.data.input.impl.TimeAndDimsParseSpec in project druid by druid-io.
the class OrcHadoopInputRowParserTest method testSerde.
@Test
public void testSerde() throws IOException {
String parserString = "{\n" + " \"type\": \"orc\",\n" + " \"parseSpec\": {\n" + " \"format\": \"timeAndDims\",\n" + " \"timestampSpec\": {\n" + " \"column\": \"timestamp\",\n" + " \"format\": \"auto\"\n" + " },\n" + " \"dimensionsSpec\": {\n" + " \"dimensions\": [\n" + " \"col1\",\n" + " \"col2\"\n" + " ],\n" + " \"dimensionExclusions\": [],\n" + " \"spatialDimensions\": []\n" + " }\n" + " },\n" + " \"typeString\": \"struct<timestamp:string,col1:string,col2:array<string>,val1:float>\"\n" + " }";
InputRowParser parser = mapper.readValue(parserString, InputRowParser.class);
InputRowParser expected = new OrcHadoopInputRowParser(new TimeAndDimsParseSpec(new TimestampSpec("timestamp", "auto", null), new DimensionsSpec(ImmutableList.<DimensionSchema>of(new StringDimensionSchema("col1"), new StringDimensionSchema("col2")), null, null)), "struct<timestamp:string,col1:string,col2:array<string>,val1:float>");
Assert.assertEquals(expected, parser);
}
Aggregations