use of org.apache.druid.data.input.impl.TimestampSpec in project druid by druid-io.
the class SinkTest method testSwap.
@Test
public void testSwap() throws Exception {
final DataSchema schema = new DataSchema("test", new TimestampSpec(null, null, null), DimensionsSpec.EMPTY, new AggregatorFactory[] { new CountAggregatorFactory("rows") }, new UniformGranularitySpec(Granularities.HOUR, Granularities.MINUTE, null), null);
final Interval interval = Intervals.of("2013-01-01/2013-01-02");
final String version = DateTimes.nowUtc().toString();
RealtimeTuningConfig tuningConfig = new RealtimeTuningConfig(null, 100, null, null, new Period("P1Y"), null, null, null, null, null, null, null, null, 0, 0, null, null, null, null, null);
final Sink sink = new Sink(interval, schema, tuningConfig.getShardSpec(), version, tuningConfig.getAppendableIndexSpec(), tuningConfig.getMaxRowsInMemory(), tuningConfig.getMaxBytesInMemoryOrDefault(), true, tuningConfig.getDedupColumn());
sink.add(new InputRow() {
@Override
public List<String> getDimensions() {
return new ArrayList<>();
}
@Override
public long getTimestampFromEpoch() {
return DateTimes.of("2013-01-01").getMillis();
}
@Override
public DateTime getTimestamp() {
return DateTimes.of("2013-01-01");
}
@Override
public List<String> getDimension(String dimension) {
return new ArrayList<>();
}
@Override
public Number getMetric(String metric) {
return 0;
}
@Override
public Object getRaw(String dimension) {
return null;
}
@Override
public int compareTo(Row o) {
return 0;
}
}, false);
FireHydrant currHydrant = sink.getCurrHydrant();
Assert.assertEquals(Intervals.of("2013-01-01/PT1M"), currHydrant.getIndex().getInterval());
FireHydrant swapHydrant = sink.swap();
sink.add(new InputRow() {
@Override
public List<String> getDimensions() {
return new ArrayList<>();
}
@Override
public long getTimestampFromEpoch() {
return DateTimes.of("2013-01-01").getMillis();
}
@Override
public DateTime getTimestamp() {
return DateTimes.of("2013-01-01");
}
@Override
public List<String> getDimension(String dimension) {
return new ArrayList<>();
}
@Override
public Number getMetric(String metric) {
return 0;
}
@Override
public Object getRaw(String dimension) {
return null;
}
@Override
public int compareTo(Row o) {
return 0;
}
}, false);
Assert.assertEquals(currHydrant, swapHydrant);
Assert.assertNotSame(currHydrant, sink.getCurrHydrant());
Assert.assertEquals(Intervals.of("2013-01-01/PT1M"), sink.getCurrHydrant().getIndex().getInterval());
Assert.assertEquals(2, Iterators.size(sink.iterator()));
}
use of org.apache.druid.data.input.impl.TimestampSpec in project druid by druid-io.
the class SinkTest method testDedup.
@Test
public void testDedup() throws Exception {
final DataSchema schema = new DataSchema("test", new TimestampSpec(null, null, null), DimensionsSpec.EMPTY, new AggregatorFactory[] { new CountAggregatorFactory("rows") }, new UniformGranularitySpec(Granularities.HOUR, Granularities.MINUTE, null), null);
final Interval interval = Intervals.of("2013-01-01/2013-01-02");
final String version = DateTimes.nowUtc().toString();
RealtimeTuningConfig tuningConfig = new RealtimeTuningConfig(null, 100, null, null, new Period("P1Y"), null, null, null, null, null, null, null, null, 0, 0, null, null, null, null, "dedupColumn");
final Sink sink = new Sink(interval, schema, tuningConfig.getShardSpec(), version, tuningConfig.getAppendableIndexSpec(), tuningConfig.getMaxRowsInMemory(), tuningConfig.getMaxBytesInMemoryOrDefault(), true, tuningConfig.getDedupColumn());
int rows = sink.add(new MapBasedInputRow(DateTimes.of("2013-01-01"), ImmutableList.of("field", "dedupColumn"), ImmutableMap.of("field1", "value1", "dedupColumn", "v1")), false).getRowCount();
Assert.assertTrue(rows > 0);
// dedupColumn is null
rows = sink.add(new MapBasedInputRow(DateTimes.of("2013-01-01"), ImmutableList.of("field", "dedupColumn"), ImmutableMap.of("field1", "value2")), false).getRowCount();
Assert.assertTrue(rows > 0);
// dedupColumn is null
rows = sink.add(new MapBasedInputRow(DateTimes.of("2013-01-01"), ImmutableList.of("field", "dedupColumn"), ImmutableMap.of("field1", "value3")), false).getRowCount();
Assert.assertTrue(rows > 0);
rows = sink.add(new MapBasedInputRow(DateTimes.of("2013-01-01"), ImmutableList.of("field", "dedupColumn"), ImmutableMap.of("field1", "value4", "dedupColumn", "v2")), false).getRowCount();
Assert.assertTrue(rows > 0);
rows = sink.add(new MapBasedInputRow(DateTimes.of("2013-01-01"), ImmutableList.of("field", "dedupColumn"), ImmutableMap.of("field1", "value5", "dedupColumn", "v1")), false).getRowCount();
Assert.assertTrue(rows == -2);
}
use of org.apache.druid.data.input.impl.TimestampSpec in project druid by druid-io.
the class EventReceiverFirehoseTest method setUp.
@Before
public void setUp() {
req = EasyMock.createMock(HttpServletRequest.class);
eventReceiverFirehoseFactory = new EventReceiverFirehoseFactory(SERVICE_NAME, CAPACITY, MAX_IDLE_TIME_MILLIS, null, new DefaultObjectMapper(), new DefaultObjectMapper(), register, AuthTestUtils.TEST_AUTHORIZER_MAPPER);
firehose = (EventReceiverFirehoseFactory.EventReceiverFirehose) eventReceiverFirehoseFactory.connect(new MapInputRowParser(new JSONParseSpec(new TimestampSpec("timestamp", "auto", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("d1"))), null, null, null)), null);
}
use of org.apache.druid.data.input.impl.TimestampSpec in project druid by druid-io.
the class UnifiedIndexerAppenderatorsManagerTest method setup.
@Before
public void setup() {
appenderatorConfig = EasyMock.createMock(AppenderatorConfig.class);
EasyMock.expect(appenderatorConfig.getMaxPendingPersists()).andReturn(0);
EasyMock.expect(appenderatorConfig.isSkipBytesInMemoryOverheadCheck()).andReturn(false);
EasyMock.replay(appenderatorConfig);
appenderator = manager.createClosedSegmentsOfflineAppenderatorForTask("taskId", new DataSchema("myDataSource", new TimestampSpec("__time", "millis", null), null, null, new UniformGranularitySpec(Granularities.HOUR, Granularities.HOUR, false, Collections.emptyList()), null), appenderatorConfig, new FireDepartmentMetrics(), new NoopDataSegmentPusher(), TestHelper.makeJsonMapper(), TestHelper.getTestIndexIO(), TestHelper.getTestIndexMergerV9(OnHeapMemorySegmentWriteOutMediumFactory.instance()), new NoopRowIngestionMeters(), new ParseExceptionHandler(new NoopRowIngestionMeters(), false, 0, 0), true);
}
use of org.apache.druid.data.input.impl.TimestampSpec in project druid by druid-io.
the class LocalFirehoseFactoryTest method testConnect.
@Test
public void testConnect() throws IOException {
try (final Firehose firehose = factory.connect(new StringInputRowParser(new CSVParseSpec(new TimestampSpec("timestamp", "auto", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(Arrays.asList("timestamp", "a"))), ",", Arrays.asList("timestamp", "a"), false, 0), StandardCharsets.UTF_8.name()), null)) {
final List<Row> rows = new ArrayList<>();
while (firehose.hasMore()) {
rows.add(firehose.nextRow());
}
Assert.assertEquals(5, rows.size());
rows.sort(Comparator.comparing(Row::getTimestamp));
for (int i = 0; i < 5; i++) {
final List<String> dimVals = rows.get(i).getDimension("a");
Assert.assertEquals(1, dimVals.size());
Assert.assertEquals(i + "th test file", dimVals.get(0));
}
}
}
Aggregations