Search in sources :

Example 11 with ParseException

use of io.druid.java.util.common.parsers.ParseException in project druid by druid-io.

the class MapInputRowParser method parse.

@Override
public InputRow parse(Map<String, Object> theMap) {
    final List<String> dimensions = parseSpec.getDimensionsSpec().hasCustomDimensions() ? parseSpec.getDimensionsSpec().getDimensionNames() : Lists.newArrayList(Sets.difference(theMap.keySet(), parseSpec.getDimensionsSpec().getDimensionExclusions()));
    final DateTime timestamp;
    try {
        timestamp = parseSpec.getTimestampSpec().extractTimestamp(theMap);
        if (timestamp == null) {
            final String input = theMap.toString();
            throw new NullPointerException(String.format("Null timestamp in input: %s", input.length() < 100 ? input : input.substring(0, 100) + "..."));
        }
    } catch (Exception e) {
        throw new ParseException(e, "Unparseable timestamp found!");
    }
    return new MapBasedInputRow(timestamp.getMillis(), dimensions, theMap);
}
Also used : ParseException(io.druid.java.util.common.parsers.ParseException) MapBasedInputRow(io.druid.data.input.MapBasedInputRow) DateTime(org.joda.time.DateTime) ParseException(io.druid.java.util.common.parsers.ParseException)

Example 12 with ParseException

use of io.druid.java.util.common.parsers.ParseException in project druid by druid-io.

the class StringInputRowParser method buildStringKeyMap.

private Map<String, Object> buildStringKeyMap(ByteBuffer input) {
    int payloadSize = input.remaining();
    if (chars == null || chars.remaining() < payloadSize) {
        chars = CharBuffer.allocate(payloadSize);
    }
    final CoderResult coderResult = charset.newDecoder().onMalformedInput(CodingErrorAction.REPLACE).onUnmappableCharacter(CodingErrorAction.REPLACE).decode(input, chars, true);
    Map<String, Object> theMap;
    if (coderResult.isUnderflow()) {
        chars.flip();
        try {
            theMap = parseString(chars.toString());
        } finally {
            chars.clear();
        }
    } else {
        throw new ParseException("Failed with CoderResult[%s]", coderResult);
    }
    return theMap;
}
Also used : ParseException(io.druid.java.util.common.parsers.ParseException) CoderResult(java.nio.charset.CoderResult)

Example 13 with ParseException

use of io.druid.java.util.common.parsers.ParseException in project druid by druid-io.

the class ReplayableFirehoseFactoryTest method testReplayableFirehoseWithoutReportParseExceptions.

@Test
public void testReplayableFirehoseWithoutReportParseExceptions() throws Exception {
    final boolean[] hasMore = { true };
    replayableFirehoseFactory = new ReplayableFirehoseFactory(delegateFactory, false, 10000, 3, mapper);
    expect(delegateFactory.connect(parser)).andReturn(delegateFirehose);
    expect(delegateFirehose.hasMore()).andAnswer(new IAnswer<Boolean>() {

        @Override
        public Boolean answer() throws Throwable {
            return hasMore[0];
        }
    }).anyTimes();
    expect(delegateFirehose.nextRow()).andReturn(testRows.get(0)).andReturn(testRows.get(1)).andThrow(new ParseException("unparseable!")).andAnswer(new IAnswer<InputRow>() {

        @Override
        public InputRow answer() throws Throwable {
            hasMore[0] = false;
            return testRows.get(2);
        }
    });
    delegateFirehose.close();
    replayAll();
    List<InputRow> rows = Lists.newArrayList();
    try (Firehose firehose = replayableFirehoseFactory.connect(parser)) {
        while (firehose.hasMore()) {
            rows.add(firehose.nextRow());
        }
    }
    Assert.assertEquals(testRows, rows);
    verifyAll();
}
Also used : IAnswer(org.easymock.IAnswer) Firehose(io.druid.data.input.Firehose) MapBasedInputRow(io.druid.data.input.MapBasedInputRow) InputRow(io.druid.data.input.InputRow) ReplayableFirehoseFactory(io.druid.segment.realtime.firehose.ReplayableFirehoseFactory) ParseException(io.druid.java.util.common.parsers.ParseException) Test(org.junit.Test)

Example 14 with ParseException

use of io.druid.java.util.common.parsers.ParseException in project druid by druid-io.

the class SchemaRegistryBasedAvroBytesDecoder method parse.

@Override
public GenericRecord parse(ByteBuffer bytes) {
    try {
        // ignore first \0 byte
        bytes.get();
        // extract schema registry id
        int id = bytes.getInt();
        int length = bytes.limit() - 1 - 4;
        int offset = bytes.position() + bytes.arrayOffset();
        Schema schema = registry.getByID(id);
        DatumReader<GenericRecord> reader = new GenericDatumReader<>(schema);
        return reader.read(null, DecoderFactory.get().binaryDecoder(bytes.array(), offset, length, null));
    } catch (Exception e) {
        throw new ParseException(e, "Fail to decode avro message!");
    }
}
Also used : GenericDatumReader(org.apache.avro.generic.GenericDatumReader) Schema(org.apache.avro.Schema) ParseException(io.druid.java.util.common.parsers.ParseException) GenericRecord(org.apache.avro.generic.GenericRecord) ParseException(io.druid.java.util.common.parsers.ParseException)

Example 15 with ParseException

use of io.druid.java.util.common.parsers.ParseException in project druid by druid-io.

the class SchemaRepoBasedAvroBytesDecoder method parse.

@Override
public GenericRecord parse(ByteBuffer bytes) {
    Pair<SUBJECT, ID> subjectAndId = subjectAndIdConverter.getSubjectAndId(bytes);
    Schema schema = typedRepository.getSchema(subjectAndId.lhs, subjectAndId.rhs);
    DatumReader<GenericRecord> reader = new GenericDatumReader<GenericRecord>(schema);
    ByteBufferInputStream inputStream = new ByteBufferInputStream(Collections.singletonList(bytes));
    try {
        return reader.read(null, DecoderFactory.get().binaryDecoder(inputStream, null));
    } catch (IOException e) {
        throw new ParseException(e, "Fail to decode avro message!");
    }
}
Also used : GenericDatumReader(org.apache.avro.generic.GenericDatumReader) Schema(org.apache.avro.Schema) ByteBufferInputStream(org.apache.avro.util.ByteBufferInputStream) IOException(java.io.IOException) ParseException(io.druid.java.util.common.parsers.ParseException) GenericRecord(org.apache.avro.generic.GenericRecord)

Aggregations

ParseException (io.druid.java.util.common.parsers.ParseException)15 InputRow (io.druid.data.input.InputRow)6 ISE (io.druid.java.util.common.ISE)5 IOException (java.io.IOException)4 GenericDatumReader (org.apache.avro.generic.GenericDatumReader)4 GenericRecord (org.apache.avro.generic.GenericRecord)4 Firehose (io.druid.data.input.Firehose)3 MapBasedInputRow (io.druid.data.input.MapBasedInputRow)3 AggregatorFactory (io.druid.query.aggregation.AggregatorFactory)3 RealtimeIOConfig (io.druid.segment.indexing.RealtimeIOConfig)3 Map (java.util.Map)3 ByteBufferInputStream (org.apache.avro.util.ByteBufferInputStream)3 DateTime (org.joda.time.DateTime)3 Supplier (com.google.common.base.Supplier)2 ImmutableMap (com.google.common.collect.ImmutableMap)2 Committer (io.druid.data.input.Committer)2 SegmentTransactionalInsertAction (io.druid.indexing.common.actions.SegmentTransactionalInsertAction)2 Aggregator (io.druid.query.aggregation.Aggregator)2 FireDepartment (io.druid.segment.realtime.FireDepartment)2 RealtimeMetricsMonitor (io.druid.segment.realtime.RealtimeMetricsMonitor)2