Search in sources :

Example 11 with InputSourceReader

use of org.apache.druid.data.input.InputSourceReader in project druid by druid-io.

the class InputSourceSampler method sample.

public SamplerResponse sample(final InputSource inputSource, // inputFormat can be null only if inputSource.needsFormat() = false or parser is specified.
@Nullable final InputFormat inputFormat, @Nullable final DataSchema dataSchema, @Nullable final SamplerConfig samplerConfig) {
    Preconditions.checkNotNull(inputSource, "inputSource required");
    if (inputSource.needsFormat()) {
        Preconditions.checkNotNull(inputFormat, "inputFormat required");
    }
    final DataSchema nonNullDataSchema = dataSchema == null ? DEFAULT_DATA_SCHEMA : dataSchema;
    final SamplerConfig nonNullSamplerConfig = samplerConfig == null ? SamplerConfig.empty() : samplerConfig;
    final Closer closer = Closer.create();
    final File tempDir = FileUtils.createTempDir();
    closer.register(() -> FileUtils.deleteDirectory(tempDir));
    try {
        final InputSourceReader reader = buildReader(nonNullSamplerConfig, nonNullDataSchema, inputSource, inputFormat, tempDir);
        try (final CloseableIterator<InputRowListPlusRawValues> iterator = reader.sample();
            final IncrementalIndex index = buildIncrementalIndex(nonNullSamplerConfig, nonNullDataSchema);
            final Closer closer1 = closer) {
            List<SamplerResponseRow> responseRows = new ArrayList<>(nonNullSamplerConfig.getNumRows());
            int numRowsIndexed = 0;
            while (responseRows.size() < nonNullSamplerConfig.getNumRows() && iterator.hasNext()) {
                final InputRowListPlusRawValues inputRowListPlusRawValues = iterator.next();
                final List<Map<String, Object>> rawColumnsList = inputRowListPlusRawValues.getRawValuesList();
                final ParseException parseException = inputRowListPlusRawValues.getParseException();
                if (parseException != null) {
                    if (rawColumnsList != null) {
                        // add all rows to response
                        responseRows.addAll(rawColumnsList.stream().map(rawColumns -> new SamplerResponseRow(rawColumns, null, true, parseException.getMessage())).collect(Collectors.toList()));
                    } else {
                        // no data parsed, add one response row
                        responseRows.add(new SamplerResponseRow(null, null, true, parseException.getMessage()));
                    }
                    continue;
                }
                List<InputRow> inputRows = inputRowListPlusRawValues.getInputRows();
                if (inputRows == null) {
                    continue;
                }
                for (int i = 0; i < inputRows.size(); i++) {
                    // InputRowListPlusRawValues guarantees the size of rawColumnsList and inputRows are the same
                    Map<String, Object> rawColumns = rawColumnsList == null ? null : rawColumnsList.get(i);
                    InputRow row = inputRows.get(i);
                    // keep the index of the row to be added to responseRows for further use
                    final int rowIndex = responseRows.size();
                    IncrementalIndexAddResult addResult = index.add(new SamplerInputRow(row, rowIndex), true);
                    if (addResult.hasParseException()) {
                        responseRows.add(new SamplerResponseRow(rawColumns, null, true, addResult.getParseException().getMessage()));
                    } else {
                        // store the raw value; will be merged with the data from the IncrementalIndex later
                        responseRows.add(new SamplerResponseRow(rawColumns, null, null, null));
                        numRowsIndexed++;
                    }
                }
            }
            final List<String> columnNames = index.getColumnNames();
            columnNames.remove(SamplerInputRow.SAMPLER_ORDERING_COLUMN);
            for (Row row : index) {
                Map<String, Object> parsed = new LinkedHashMap<>();
                parsed.put(ColumnHolder.TIME_COLUMN_NAME, row.getTimestampFromEpoch());
                columnNames.forEach(k -> parsed.put(k, row.getRaw(k)));
                Number sortKey = row.getMetric(SamplerInputRow.SAMPLER_ORDERING_COLUMN);
                if (sortKey != null) {
                    responseRows.set(sortKey.intValue(), responseRows.get(sortKey.intValue()).withParsed(parsed));
                }
            }
            // make sure size of responseRows meets the input
            if (responseRows.size() > nonNullSamplerConfig.getNumRows()) {
                responseRows = responseRows.subList(0, nonNullSamplerConfig.getNumRows());
            }
            int numRowsRead = responseRows.size();
            return new SamplerResponse(numRowsRead, numRowsIndexed, responseRows.stream().filter(Objects::nonNull).filter(x -> x.getParsed() != null || x.isUnparseable() != null).collect(Collectors.toList()));
        }
    } catch (Exception e) {
        throw new SamplerException(e, "Failed to sample data: %s", e.getMessage());
    }
}
Also used : ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) Closer(org.apache.druid.java.util.common.io.Closer) InputRowListPlusRawValues(org.apache.druid.data.input.InputRowListPlusRawValues) IncrementalIndex(org.apache.druid.segment.incremental.IncrementalIndex) OnheapIncrementalIndex(org.apache.druid.segment.incremental.OnheapIncrementalIndex) SamplerResponse(org.apache.druid.client.indexing.SamplerResponse) ParseException(org.apache.druid.java.util.common.parsers.ParseException) DataSchema(org.apache.druid.segment.indexing.DataSchema) InputSourceReader(org.apache.druid.data.input.InputSourceReader) TimedShutoffInputSourceReader(org.apache.druid.data.input.impl.TimedShutoffInputSourceReader) IncrementalIndexAddResult(org.apache.druid.segment.incremental.IncrementalIndexAddResult) InputRow(org.apache.druid.data.input.InputRow) SamplerResponseRow(org.apache.druid.client.indexing.SamplerResponse.SamplerResponseRow) ParseException(org.apache.druid.java.util.common.parsers.ParseException) Row(org.apache.druid.data.input.Row) SamplerResponseRow(org.apache.druid.client.indexing.SamplerResponse.SamplerResponseRow) InputRow(org.apache.druid.data.input.InputRow) File(java.io.File) LinkedHashMap(java.util.LinkedHashMap) Map(java.util.Map)

Example 12 with InputSourceReader

use of org.apache.druid.data.input.InputSourceReader in project druid by druid-io.

the class InputSourceSampler method buildReader.

private InputSourceReader buildReader(SamplerConfig samplerConfig, DataSchema dataSchema, InputSource inputSource, @Nullable InputFormat inputFormat, File tempDir) {
    final InputRowSchema inputRowSchema = InputRowSchemas.fromDataSchema(dataSchema);
    InputSourceReader reader = inputSource.reader(inputRowSchema, inputFormat, tempDir);
    if (samplerConfig.getTimeoutMs() > 0) {
        reader = new TimedShutoffInputSourceReader(reader, DateTimes.nowUtc().plusMillis(samplerConfig.getTimeoutMs()));
    }
    return dataSchema.getTransformSpec().decorate(reader);
}
Also used : InputSourceReader(org.apache.druid.data.input.InputSourceReader) TimedShutoffInputSourceReader(org.apache.druid.data.input.impl.TimedShutoffInputSourceReader) InputRowSchema(org.apache.druid.data.input.InputRowSchema) TimedShutoffInputSourceReader(org.apache.druid.data.input.impl.TimedShutoffInputSourceReader)

Example 13 with InputSourceReader

use of org.apache.druid.data.input.InputSourceReader in project druid by druid-io.

the class SqlInputSourceTest method testMultipleSplits.

@Test
public void testMultipleSplits() throws Exception {
    derbyConnector = derbyConnectorRule.getConnector();
    SqlTestUtils testUtils = new SqlTestUtils(derbyConnector);
    testUtils.createAndUpdateTable(TABLE_NAME_1, 10);
    testUtils.createAndUpdateTable(TABLE_NAME_2, 10);
    final File tempDir = createFirehoseTmpDir("testMultipleSplit");
    SqlInputSource sqlInputSource = new SqlInputSource(SQLLIST2, true, testUtils.getDerbyFirehoseConnector(), mapper);
    InputSourceReader sqlReader = sqlInputSource.fixedFormatReader(INPUT_ROW_SCHEMA, tempDir);
    CloseableIterator<InputRow> resultIterator = sqlReader.read();
    final List<Row> rows = new ArrayList<>();
    while (resultIterator.hasNext()) {
        rows.add(resultIterator.next());
    }
    assertResult(rows, SQLLIST2);
    testUtils.dropTable(TABLE_NAME_1);
    testUtils.dropTable(TABLE_NAME_2);
}
Also used : InputSourceReader(org.apache.druid.data.input.InputSourceReader) InputRow(org.apache.druid.data.input.InputRow) ArrayList(java.util.ArrayList) Row(org.apache.druid.data.input.Row) InputRow(org.apache.druid.data.input.InputRow) File(java.io.File) Test(org.junit.Test)

Example 14 with InputSourceReader

use of org.apache.druid.data.input.InputSourceReader in project druid by druid-io.

the class SqlInputSourceTest method testSingleSplit.

@Test
public void testSingleSplit() throws Exception {
    derbyConnector = derbyConnectorRule.getConnector();
    SqlTestUtils testUtils = new SqlTestUtils(derbyConnector);
    testUtils.createAndUpdateTable(TABLE_NAME_1, 10);
    final File tempDir = createFirehoseTmpDir("testSingleSplit");
    SqlInputSource sqlInputSource = new SqlInputSource(SQLLIST1, true, testUtils.getDerbyFirehoseConnector(), mapper);
    InputSourceReader sqlReader = sqlInputSource.fixedFormatReader(INPUT_ROW_SCHEMA, tempDir);
    CloseableIterator<InputRow> resultIterator = sqlReader.read();
    final List<Row> rows = new ArrayList<>();
    while (resultIterator.hasNext()) {
        rows.add(resultIterator.next());
    }
    assertResult(rows, SQLLIST1);
    testUtils.dropTable(TABLE_NAME_1);
}
Also used : InputSourceReader(org.apache.druid.data.input.InputSourceReader) InputRow(org.apache.druid.data.input.InputRow) ArrayList(java.util.ArrayList) Row(org.apache.druid.data.input.Row) InputRow(org.apache.druid.data.input.InputRow) File(java.io.File) Test(org.junit.Test)

Example 15 with InputSourceReader

use of org.apache.druid.data.input.InputSourceReader in project druid by druid-io.

the class SqlInputSourceTest method testSample.

@Test
public void testSample() throws Exception {
    derbyConnector = derbyConnectorRule.getConnector();
    SqlTestUtils testUtils = new SqlTestUtils(derbyConnector);
    testUtils.createAndUpdateTable(TABLE_NAME_1, 10);
    try {
        final File tempDir = createFirehoseTmpDir("testSingleSplit");
        SqlInputSource sqlInputSource = new SqlInputSource(SQLLIST1, true, testUtils.getDerbyFirehoseConnector(), mapper);
        InputSourceReader sqlReader = sqlInputSource.fixedFormatReader(INPUT_ROW_SCHEMA, tempDir);
        CloseableIterator<InputRowListPlusRawValues> resultIterator = sqlReader.sample();
        final List<InputRowListPlusRawValues> rows = new ArrayList<>();
        while (resultIterator.hasNext()) {
            InputRowListPlusRawValues row = resultIterator.next();
            Assert.assertNull(row.getParseException());
            rows.add(row);
        }
        assertResult(rows.stream().flatMap(r -> r.getInputRows().stream()).collect(Collectors.toList()), SQLLIST1);
    } finally {
        testUtils.dropTable(TABLE_NAME_1);
    }
}
Also used : InputSourceReader(org.apache.druid.data.input.InputSourceReader) InputRowListPlusRawValues(org.apache.druid.data.input.InputRowListPlusRawValues) ArrayList(java.util.ArrayList) File(java.io.File) Test(org.junit.Test)

Aggregations

InputSourceReader (org.apache.druid.data.input.InputSourceReader)15 InputRow (org.apache.druid.data.input.InputRow)12 Test (org.junit.Test)12 InputRowSchema (org.apache.druid.data.input.InputRowSchema)10 TimestampSpec (org.apache.druid.data.input.impl.TimestampSpec)9 CsvInputFormat (org.apache.druid.data.input.impl.CsvInputFormat)8 DimensionsSpec (org.apache.druid.data.input.impl.DimensionsSpec)8 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)8 ArrayList (java.util.ArrayList)6 File (java.io.File)5 Map (java.util.Map)3 Row (org.apache.druid.data.input.Row)3 Collection (java.util.Collection)2 Collections (java.util.Collections)2 List (java.util.List)2 ThreadLocalRandom (java.util.concurrent.ThreadLocalRandom)2 Nullable (javax.annotation.Nullable)2 InputRowListPlusRawValues (org.apache.druid.data.input.InputRowListPlusRawValues)2 TimedShutoffInputSourceReader (org.apache.druid.data.input.impl.TimedShutoffInputSourceReader)2 JacksonInject (com.fasterxml.jackson.annotation.JacksonInject)1