Search in sources :

Example 66 with InputRow

use of io.druid.data.input.InputRow in project druid by druid-io.

the class BenchmarkDataGeneratorTest method testNormal.

@Test
public void testNormal() throws Exception {
    List<BenchmarkColumnSchema> schemas = new ArrayList<>();
    RowValueTracker tracker = new RowValueTracker();
    schemas.add(BenchmarkColumnSchema.makeNormal("dimA", ValueType.FLOAT, false, 1, null, 8.0, 1.0, false));
    schemas.add(BenchmarkColumnSchema.makeNormal("dimB", ValueType.STRING, false, 1, 0.50, 88.0, 2.0, false));
    BenchmarkDataGenerator dataGenerator = new BenchmarkDataGenerator(schemas, 9999, 0, 0, 1000.0);
    for (int i = 0; i < 100; i++) {
        InputRow row = dataGenerator.nextRow();
        //System.out.println("N-ROW: " + row);
        tracker.addRow(row);
    }
    tracker.printStuff();
}
Also used : ArrayList(java.util.ArrayList) BenchmarkDataGenerator(io.druid.benchmark.datagen.BenchmarkDataGenerator) InputRow(io.druid.data.input.InputRow) BenchmarkColumnSchema(io.druid.benchmark.datagen.BenchmarkColumnSchema) Test(org.junit.Test)

Example 67 with InputRow

use of io.druid.data.input.InputRow in project druid by druid-io.

the class BenchmarkDataGeneratorTest method testIntervalBasedTimeGeneration.

@Test
public void testIntervalBasedTimeGeneration() throws Exception {
    List<BenchmarkColumnSchema> schemas = new ArrayList<>();
    schemas.add(BenchmarkColumnSchema.makeEnumeratedSequential("dimB", ValueType.STRING, false, 1, null, Arrays.<Object>asList("Hello", "World", "Foo", "Bar")));
    BenchmarkDataGenerator dataGenerator = new BenchmarkDataGenerator(schemas, 9999, new Interval(50000, 600000), 100);
    for (int i = 0; i < 100; i++) {
        InputRow row = dataGenerator.nextRow();
    //System.out.println("S-ROW: " + row);
    }
    BenchmarkDataGenerator dataGenerator2 = new BenchmarkDataGenerator(schemas, 9999, new Interval(50000, 50001), 100);
    for (int i = 0; i < 100; i++) {
        InputRow row = dataGenerator2.nextRow();
    //System.out.println("S2-ROW: " + row);
    }
}
Also used : ArrayList(java.util.ArrayList) BenchmarkDataGenerator(io.druid.benchmark.datagen.BenchmarkDataGenerator) InputRow(io.druid.data.input.InputRow) BenchmarkColumnSchema(io.druid.benchmark.datagen.BenchmarkColumnSchema) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 68 with InputRow

use of io.druid.data.input.InputRow in project druid by druid-io.

the class IndexBuilder method buildIncrementalIndexWithRows.

private static IncrementalIndex buildIncrementalIndexWithRows(IncrementalIndexSchema schema, int maxRows, Iterable<InputRow> rows) {
    Preconditions.checkNotNull(schema, "schema");
    final IncrementalIndex incrementalIndex = new OnheapIncrementalIndex(schema, true, maxRows);
    for (InputRow row : rows) {
        try {
            incrementalIndex.add(row);
        } catch (IndexSizeExceededException e) {
            throw Throwables.propagate(e);
        }
    }
    return incrementalIndex;
}
Also used : IncrementalIndex(io.druid.segment.incremental.IncrementalIndex) OnheapIncrementalIndex(io.druid.segment.incremental.OnheapIncrementalIndex) OnheapIncrementalIndex(io.druid.segment.incremental.OnheapIncrementalIndex) InputRow(io.druid.data.input.InputRow) IndexSizeExceededException(io.druid.segment.incremental.IndexSizeExceededException)

Example 69 with InputRow

use of io.druid.data.input.InputRow in project druid by druid-io.

the class InputRowParserSerdeTest method testMapInputRowParserSerde.

@Test
public void testMapInputRowParserSerde() throws Exception {
    final MapInputRowParser parser = new MapInputRowParser(new JSONParseSpec(new TimestampSpec("timeposix", "posix", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("foo", "bar")), ImmutableList.of("baz"), null), null, null));
    final MapInputRowParser parser2 = jsonMapper.readValue(jsonMapper.writeValueAsBytes(parser), MapInputRowParser.class);
    final InputRow parsed = parser2.parse(ImmutableMap.<String, Object>of("foo", "x", "bar", "y", "qux", "z", "timeposix", "1"));
    Assert.assertEquals(ImmutableList.of("foo", "bar"), parsed.getDimensions());
    Assert.assertEquals(ImmutableList.of("x"), parsed.getDimension("foo"));
    Assert.assertEquals(ImmutableList.of("y"), parsed.getDimension("bar"));
    Assert.assertEquals(1000, parsed.getTimestampFromEpoch());
}
Also used : InputRow(io.druid.data.input.InputRow) Test(org.junit.Test)

Example 70 with InputRow

use of io.druid.data.input.InputRow in project druid by druid-io.

the class InputRowParserSerdeTest method testCharsetParseHelper.

private InputRow testCharsetParseHelper(Charset charset) throws Exception {
    final StringInputRowParser parser = new StringInputRowParser(new JSONParseSpec(new TimestampSpec("timestamp", "iso", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("foo", "bar")), null, null), null, null), charset.name());
    final ByteBufferInputRowParser parser2 = jsonMapper.readValue(jsonMapper.writeValueAsBytes(parser), ByteBufferInputRowParser.class);
    final InputRow parsed = parser2.parse(ByteBuffer.wrap("{\"foo\":\"x\",\"bar\":\"y\",\"qux\":\"z\",\"timestamp\":\"3000\"}".getBytes(charset)));
    return parsed;
}
Also used : ByteBufferInputRowParser(io.druid.data.input.ByteBufferInputRowParser) InputRow(io.druid.data.input.InputRow)

Aggregations

InputRow (io.druid.data.input.InputRow)81 Test (org.junit.Test)35 MapBasedInputRow (io.druid.data.input.MapBasedInputRow)24 BenchmarkDataGenerator (io.druid.benchmark.datagen.BenchmarkDataGenerator)22 File (java.io.File)18 Setup (org.openjdk.jmh.annotations.Setup)15 HyperUniquesSerde (io.druid.query.aggregation.hyperloglog.HyperUniquesSerde)14 Firehose (io.druid.data.input.Firehose)12 OnheapIncrementalIndex (io.druid.segment.incremental.OnheapIncrementalIndex)12 IndexSpec (io.druid.segment.IndexSpec)11 ArrayList (java.util.ArrayList)11 IncrementalIndex (io.druid.segment.incremental.IncrementalIndex)10 DateTime (org.joda.time.DateTime)10 QueryableIndex (io.druid.segment.QueryableIndex)9 IOException (java.io.IOException)9 BenchmarkColumnSchema (io.druid.benchmark.datagen.BenchmarkColumnSchema)8 Interval (org.joda.time.Interval)8 ParseException (io.druid.java.util.common.parsers.ParseException)7 AggregatorFactory (io.druid.query.aggregation.AggregatorFactory)6 DataSegment (io.druid.timeline.DataSegment)5