use of io.druid.data.input.InputRow in project druid by druid-io.
the class BenchmarkDataGeneratorTest method testNormal.
@Test
public void testNormal() throws Exception {
List<BenchmarkColumnSchema> schemas = new ArrayList<>();
RowValueTracker tracker = new RowValueTracker();
schemas.add(BenchmarkColumnSchema.makeNormal("dimA", ValueType.FLOAT, false, 1, null, 8.0, 1.0, false));
schemas.add(BenchmarkColumnSchema.makeNormal("dimB", ValueType.STRING, false, 1, 0.50, 88.0, 2.0, false));
BenchmarkDataGenerator dataGenerator = new BenchmarkDataGenerator(schemas, 9999, 0, 0, 1000.0);
for (int i = 0; i < 100; i++) {
InputRow row = dataGenerator.nextRow();
//System.out.println("N-ROW: " + row);
tracker.addRow(row);
}
tracker.printStuff();
}
use of io.druid.data.input.InputRow in project druid by druid-io.
the class BenchmarkDataGeneratorTest method testIntervalBasedTimeGeneration.
@Test
public void testIntervalBasedTimeGeneration() throws Exception {
List<BenchmarkColumnSchema> schemas = new ArrayList<>();
schemas.add(BenchmarkColumnSchema.makeEnumeratedSequential("dimB", ValueType.STRING, false, 1, null, Arrays.<Object>asList("Hello", "World", "Foo", "Bar")));
BenchmarkDataGenerator dataGenerator = new BenchmarkDataGenerator(schemas, 9999, new Interval(50000, 600000), 100);
for (int i = 0; i < 100; i++) {
InputRow row = dataGenerator.nextRow();
//System.out.println("S-ROW: " + row);
}
BenchmarkDataGenerator dataGenerator2 = new BenchmarkDataGenerator(schemas, 9999, new Interval(50000, 50001), 100);
for (int i = 0; i < 100; i++) {
InputRow row = dataGenerator2.nextRow();
//System.out.println("S2-ROW: " + row);
}
}
use of io.druid.data.input.InputRow in project druid by druid-io.
the class IndexBuilder method buildIncrementalIndexWithRows.
private static IncrementalIndex buildIncrementalIndexWithRows(IncrementalIndexSchema schema, int maxRows, Iterable<InputRow> rows) {
Preconditions.checkNotNull(schema, "schema");
final IncrementalIndex incrementalIndex = new OnheapIncrementalIndex(schema, true, maxRows);
for (InputRow row : rows) {
try {
incrementalIndex.add(row);
} catch (IndexSizeExceededException e) {
throw Throwables.propagate(e);
}
}
return incrementalIndex;
}
use of io.druid.data.input.InputRow in project druid by druid-io.
the class InputRowParserSerdeTest method testMapInputRowParserSerde.
@Test
public void testMapInputRowParserSerde() throws Exception {
final MapInputRowParser parser = new MapInputRowParser(new JSONParseSpec(new TimestampSpec("timeposix", "posix", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("foo", "bar")), ImmutableList.of("baz"), null), null, null));
final MapInputRowParser parser2 = jsonMapper.readValue(jsonMapper.writeValueAsBytes(parser), MapInputRowParser.class);
final InputRow parsed = parser2.parse(ImmutableMap.<String, Object>of("foo", "x", "bar", "y", "qux", "z", "timeposix", "1"));
Assert.assertEquals(ImmutableList.of("foo", "bar"), parsed.getDimensions());
Assert.assertEquals(ImmutableList.of("x"), parsed.getDimension("foo"));
Assert.assertEquals(ImmutableList.of("y"), parsed.getDimension("bar"));
Assert.assertEquals(1000, parsed.getTimestampFromEpoch());
}
use of io.druid.data.input.InputRow in project druid by druid-io.
the class InputRowParserSerdeTest method testCharsetParseHelper.
private InputRow testCharsetParseHelper(Charset charset) throws Exception {
final StringInputRowParser parser = new StringInputRowParser(new JSONParseSpec(new TimestampSpec("timestamp", "iso", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("foo", "bar")), null, null), null, null), charset.name());
final ByteBufferInputRowParser parser2 = jsonMapper.readValue(jsonMapper.writeValueAsBytes(parser), ByteBufferInputRowParser.class);
final InputRow parsed = parser2.parse(ByteBuffer.wrap("{\"foo\":\"x\",\"bar\":\"y\",\"qux\":\"z\",\"timestamp\":\"3000\"}".getBytes(charset)));
return parsed;
}
Aggregations