use of io.druid.data.input.impl.TimestampSpec in project druid by druid-io.
the class MultiValuedDimensionTest method setupClass.
@BeforeClass
public static void setupClass() throws Exception {
incrementalIndex = new OnheapIncrementalIndex(0, Granularities.NONE, new AggregatorFactory[] { new CountAggregatorFactory("count") }, true, true, true, 5000);
StringInputRowParser parser = new StringInputRowParser(new CSVParseSpec(new TimestampSpec("timestamp", "iso", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("product", "tags")), null, null), "\t", ImmutableList.of("timestamp", "product", "tags")), "UTF-8");
String[] rows = new String[] { "2011-01-12T00:00:00.000Z,product_1,t1\tt2\tt3", "2011-01-13T00:00:00.000Z,product_2,t3\tt4\tt5", "2011-01-14T00:00:00.000Z,product_3,t5\tt6\tt7", "2011-01-14T00:00:00.000Z,product_4" };
for (String row : rows) {
incrementalIndex.add(parser.parse(row));
}
persistedSegmentDir = Files.createTempDir();
TestHelper.getTestIndexMerger().persist(incrementalIndex, persistedSegmentDir, new IndexSpec());
queryableIndex = TestHelper.getTestIndexIO().loadIndex(persistedSegmentDir);
}
use of io.druid.data.input.impl.TimestampSpec in project druid by druid-io.
the class ProtoBufInputRowParserTest method testParse.
/*
* eventType = 1;
*
* required uint64 id = 2;
* required string timestamp = 3;
* optional uint32 someOtherId = 4;
* optional bool isValid = 5;
* optional string description = 6;
*
* optional float someFloatColumn = 7;
* optional uint32 someIntColumn = 8;
* optional uint64 someLongColumn = 9;
*/
@Test
public void testParse() throws Exception {
//configure parser with desc file
ProtoBufInputRowParser parser = new ProtoBufInputRowParser(new TimeAndDimsParseSpec(new TimestampSpec("timestamp", "iso", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(Arrays.asList(DIMENSIONS)), Arrays.<String>asList(), null)), "prototest.desc");
//create binary of proto test event
DateTime dateTime = new DateTime(2012, 07, 12, 9, 30);
ProtoTestEventWrapper.ProtoTestEvent event = ProtoTestEventWrapper.ProtoTestEvent.newBuilder().setDescription("description").setEventType(ProtoTestEventWrapper.ProtoTestEvent.EventCategory.CATEGORY_ONE).setId(4711L).setIsValid(true).setSomeOtherId(4712).setTimestamp(dateTime.toString()).setSomeFloatColumn(47.11F).setSomeIntColumn(815).setSomeLongColumn(816L).build();
ByteArrayOutputStream out = new ByteArrayOutputStream();
event.writeTo(out);
InputRow row = parser.parse(ByteBuffer.wrap(out.toByteArray()));
System.out.println(row);
assertEquals(Arrays.asList(DIMENSIONS), row.getDimensions());
assertEquals(dateTime.getMillis(), row.getTimestampFromEpoch());
assertDimensionEquals(row, "id", "4711");
assertDimensionEquals(row, "isValid", "true");
assertDimensionEquals(row, "someOtherId", "4712");
assertDimensionEquals(row, "description", "description");
assertDimensionEquals(row, "eventType", ProtoTestEventWrapper.ProtoTestEvent.EventCategory.CATEGORY_ONE.name());
assertEquals(47.11F, row.getFloatMetric("someFloatColumn"), 0.0);
assertEquals(815.0F, row.getFloatMetric("someIntColumn"), 0.0);
assertEquals(816.0F, row.getFloatMetric("someLongColumn"), 0.0);
}
use of io.druid.data.input.impl.TimestampSpec in project druid by druid-io.
the class DefaultOfflineAppenderatorFactoryTest method testBuild.
@Test
public void testBuild() throws IOException, SegmentNotWritableException {
Injector injector = Initialization.makeInjectorWithModules(GuiceInjectors.makeStartupInjector(), ImmutableList.<Module>of(new Module() {
@Override
public void configure(Binder binder) {
binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/tool");
binder.bindConstant().annotatedWith(Names.named("servicePort")).to(9999);
binder.bind(DruidProcessingConfig.class).toInstance(new DruidProcessingConfig() {
@Override
public String getFormatString() {
return "processing-%s";
}
@Override
public int intermediateComputeSizeBytes() {
return 100 * 1024 * 1024;
}
@Override
public int getNumThreads() {
return 1;
}
@Override
public int columnCacheSizeBytes() {
return 25 * 1024 * 1024;
}
});
binder.bind(ColumnConfig.class).to(DruidProcessingConfig.class);
}
}));
ObjectMapper objectMapper = injector.getInstance(ObjectMapper.class);
AppenderatorFactory defaultOfflineAppenderatorFactory = objectMapper.reader(AppenderatorFactory.class).readValue("{\"type\":\"offline\"}");
final Map<String, Object> parserMap = objectMapper.convertValue(new MapInputRowParser(new JSONParseSpec(new TimestampSpec("ts", "auto", null), new DimensionsSpec(null, null, null), null, null)), Map.class);
DataSchema schema = new DataSchema("dataSourceName", parserMap, new AggregatorFactory[] { new CountAggregatorFactory("count"), new LongSumAggregatorFactory("met", "met") }, new UniformGranularitySpec(Granularities.MINUTE, Granularities.NONE, null), objectMapper);
RealtimeTuningConfig tuningConfig = new RealtimeTuningConfig(75000, null, null, temporaryFolder.newFolder(), null, null, null, null, null, null, 0, 0, null, null);
try (Appenderator appenderator = defaultOfflineAppenderatorFactory.build(schema, tuningConfig, new FireDepartmentMetrics())) {
Assert.assertEquals("dataSourceName", appenderator.getDataSource());
Assert.assertEquals(null, appenderator.startJob());
SegmentIdentifier identifier = new SegmentIdentifier("dataSourceName", new Interval("2000/2001"), "A", new LinearShardSpec(0));
Assert.assertEquals(0, ((AppenderatorImpl) appenderator).getRowsInMemory());
appenderator.add(identifier, AppenderatorTest.IR("2000", "bar", 1), Suppliers.ofInstance(Committers.nil()));
Assert.assertEquals(1, ((AppenderatorImpl) appenderator).getRowsInMemory());
appenderator.add(identifier, AppenderatorTest.IR("2000", "baz", 1), Suppliers.ofInstance(Committers.nil()));
Assert.assertEquals(2, ((AppenderatorImpl) appenderator).getRowsInMemory());
appenderator.close();
Assert.assertEquals(0, ((AppenderatorImpl) appenderator).getRowsInMemory());
}
}
use of io.druid.data.input.impl.TimestampSpec in project druid by druid-io.
the class EventReceiverFirehoseTest method setUp.
@Before
public void setUp() throws Exception {
req = EasyMock.createMock(HttpServletRequest.class);
eventReceiverFirehoseFactory = new EventReceiverFirehoseFactory(SERVICE_NAME, CAPACITY, null, new DefaultObjectMapper(), new DefaultObjectMapper(), register);
firehose = (EventReceiverFirehoseFactory.EventReceiverFirehose) eventReceiverFirehoseFactory.connect(new MapInputRowParser(new JSONParseSpec(new TimestampSpec("timestamp", "auto", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("d1")), null, null), null, null)));
}
use of io.druid.data.input.impl.TimestampSpec in project druid by druid-io.
the class EventReceiverFirehoseTest method testDuplicateRegistering.
@Test(expected = ISE.class)
public void testDuplicateRegistering() throws IOException {
EventReceiverFirehoseFactory eventReceiverFirehoseFactory2 = new EventReceiverFirehoseFactory(SERVICE_NAME, CAPACITY, null, new DefaultObjectMapper(), new DefaultObjectMapper(), register);
EventReceiverFirehoseFactory.EventReceiverFirehose firehose2 = (EventReceiverFirehoseFactory.EventReceiverFirehose) eventReceiverFirehoseFactory2.connect(new MapInputRowParser(new JSONParseSpec(new TimestampSpec("timestamp", "auto", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("d1")), null, null), null, null)));
}
Aggregations