use of org.apache.druid.data.input.InputRow in project druid by druid-io.
the class TransformerTest method testTransformTimeColumn.
@Test
public void testTransformTimeColumn() {
final Transformer transformer = new Transformer(new TransformSpec(null, ImmutableList.of(new ExpressionTransform("__time", "timestamp_shift(__time, 'P1D', -2)", TestExprMacroTable.INSTANCE))));
final DateTime now = DateTimes.nowUtc();
final InputRow row = new MapBasedInputRow(now, ImmutableList.of("dim"), ImmutableMap.of("__time", now, "dim", false));
final InputRow actual = transformer.transform(row);
Assert.assertNotNull(actual);
Assert.assertEquals(now.minusDays(2), actual.getTimestamp());
}
use of org.apache.druid.data.input.InputRow in project druid by druid-io.
the class TransformerTest method testTransformWithStringTransformOnListColumnThrowingException.
@Ignore("Disabled until https://github.com/apache/druid/issues/9824 is fixed")
@Test
public void testTransformWithStringTransformOnListColumnThrowingException() {
final Transformer transformer = new Transformer(new TransformSpec(null, ImmutableList.of(new ExpressionTransform("dim", "strlen(dim)", TestExprMacroTable.INSTANCE))));
final InputRow row = new MapBasedInputRow(DateTimes.nowUtc(), ImmutableList.of("dim"), ImmutableMap.of("dim", ImmutableList.of(10, 20, 100)));
final InputRow actual = transformer.transform(row);
Assert.assertNotNull(actual);
Assert.assertEquals(ImmutableList.of("dim"), actual.getDimensions());
// Unlike for querying, Druid doesn't explode multi-valued columns automatically for ingestion.
expectedException.expect(AssertionError.class);
actual.getRaw("dim");
}
use of org.apache.druid.data.input.InputRow in project druid by druid-io.
the class HyperUniquesSerdeForTest method getExtractor.
@Override
public ComplexMetricExtractor getExtractor() {
return new ComplexMetricExtractor() {
@Override
public Class<HyperLogLogCollector> extractedClass() {
return HyperLogLogCollector.class;
}
@Override
public HyperLogLogCollector extractValue(InputRow inputRow, String metricName) {
Object rawValue = inputRow.getRaw(metricName);
if (rawValue instanceof HyperLogLogCollector) {
return (HyperLogLogCollector) rawValue;
} else {
HyperLogLogCollector collector = HyperLogLogCollector.makeLatestCollector();
List<String> dimValues = inputRow.getDimension(metricName);
if (dimValues == null) {
return collector;
}
for (String dimensionValue : dimValues) {
collector.add(hashFn.hashBytes(StringUtils.toUtf8(dimensionValue)).asBytes());
}
return collector;
}
}
};
}
use of org.apache.druid.data.input.InputRow in project druid by druid-io.
the class TransformSpecTest method testTransformTimeFromOtherFields.
@Test
public void testTransformTimeFromOtherFields() {
final TransformSpec transformSpec = new TransformSpec(null, ImmutableList.of(new ExpressionTransform("__time", "(a + b) * 3600000", TestExprMacroTable.INSTANCE)));
Assert.assertEquals(ImmutableSet.of("a", "b"), transformSpec.getRequiredColumns());
final InputRowParser<Map<String, Object>> parser = transformSpec.decorate(PARSER);
final InputRow row = parser.parseBatch(ROW1).get(0);
Assert.assertNotNull(row);
Assert.assertEquals(DateTimes.of("1970-01-01T05:00:00Z"), row.getTimestamp());
Assert.assertEquals(DateTimes.of("1970-01-01T05:00:00Z").getMillis(), row.getTimestampFromEpoch());
}
use of org.apache.druid.data.input.InputRow in project druid by druid-io.
the class IndexMergerTestBase method persistAndLoad.
private QueryableIndex persistAndLoad(List<DimensionSchema> schema, InputRow... rows) throws IOException {
IncrementalIndex toPersist = IncrementalIndexTest.createIndex(null, new DimensionsSpec(schema));
for (InputRow row : rows) {
toPersist.add(row);
}
final File tempDir = temporaryFolder.newFolder();
return closer.closeLater(indexIO.loadIndex(indexMerger.persist(toPersist, tempDir, indexSpec, null)));
}
Aggregations