use of io.druid.java.util.common.parsers.ParseException in project druid by druid-io.
the class MapInputRowParser method parse.
@Override
public InputRow parse(Map<String, Object> theMap) {
final List<String> dimensions = parseSpec.getDimensionsSpec().hasCustomDimensions() ? parseSpec.getDimensionsSpec().getDimensionNames() : Lists.newArrayList(Sets.difference(theMap.keySet(), parseSpec.getDimensionsSpec().getDimensionExclusions()));
final DateTime timestamp;
try {
timestamp = parseSpec.getTimestampSpec().extractTimestamp(theMap);
if (timestamp == null) {
final String input = theMap.toString();
throw new NullPointerException(String.format("Null timestamp in input: %s", input.length() < 100 ? input : input.substring(0, 100) + "..."));
}
} catch (Exception e) {
throw new ParseException(e, "Unparseable timestamp found!");
}
return new MapBasedInputRow(timestamp.getMillis(), dimensions, theMap);
}
use of io.druid.java.util.common.parsers.ParseException in project druid by druid-io.
the class StringInputRowParser method buildStringKeyMap.
private Map<String, Object> buildStringKeyMap(ByteBuffer input) {
int payloadSize = input.remaining();
if (chars == null || chars.remaining() < payloadSize) {
chars = CharBuffer.allocate(payloadSize);
}
final CoderResult coderResult = charset.newDecoder().onMalformedInput(CodingErrorAction.REPLACE).onUnmappableCharacter(CodingErrorAction.REPLACE).decode(input, chars, true);
Map<String, Object> theMap;
if (coderResult.isUnderflow()) {
chars.flip();
try {
theMap = parseString(chars.toString());
} finally {
chars.clear();
}
} else {
throw new ParseException("Failed with CoderResult[%s]", coderResult);
}
return theMap;
}
use of io.druid.java.util.common.parsers.ParseException in project druid by druid-io.
the class ReplayableFirehoseFactoryTest method testReplayableFirehoseWithoutReportParseExceptions.
@Test
public void testReplayableFirehoseWithoutReportParseExceptions() throws Exception {
final boolean[] hasMore = { true };
replayableFirehoseFactory = new ReplayableFirehoseFactory(delegateFactory, false, 10000, 3, mapper);
expect(delegateFactory.connect(parser)).andReturn(delegateFirehose);
expect(delegateFirehose.hasMore()).andAnswer(new IAnswer<Boolean>() {
@Override
public Boolean answer() throws Throwable {
return hasMore[0];
}
}).anyTimes();
expect(delegateFirehose.nextRow()).andReturn(testRows.get(0)).andReturn(testRows.get(1)).andThrow(new ParseException("unparseable!")).andAnswer(new IAnswer<InputRow>() {
@Override
public InputRow answer() throws Throwable {
hasMore[0] = false;
return testRows.get(2);
}
});
delegateFirehose.close();
replayAll();
List<InputRow> rows = Lists.newArrayList();
try (Firehose firehose = replayableFirehoseFactory.connect(parser)) {
while (firehose.hasMore()) {
rows.add(firehose.nextRow());
}
}
Assert.assertEquals(testRows, rows);
verifyAll();
}
use of io.druid.java.util.common.parsers.ParseException in project druid by druid-io.
the class SchemaRegistryBasedAvroBytesDecoder method parse.
@Override
public GenericRecord parse(ByteBuffer bytes) {
try {
// ignore first \0 byte
bytes.get();
// extract schema registry id
int id = bytes.getInt();
int length = bytes.limit() - 1 - 4;
int offset = bytes.position() + bytes.arrayOffset();
Schema schema = registry.getByID(id);
DatumReader<GenericRecord> reader = new GenericDatumReader<>(schema);
return reader.read(null, DecoderFactory.get().binaryDecoder(bytes.array(), offset, length, null));
} catch (Exception e) {
throw new ParseException(e, "Fail to decode avro message!");
}
}
use of io.druid.java.util.common.parsers.ParseException in project druid by druid-io.
the class SchemaRepoBasedAvroBytesDecoder method parse.
@Override
public GenericRecord parse(ByteBuffer bytes) {
Pair<SUBJECT, ID> subjectAndId = subjectAndIdConverter.getSubjectAndId(bytes);
Schema schema = typedRepository.getSchema(subjectAndId.lhs, subjectAndId.rhs);
DatumReader<GenericRecord> reader = new GenericDatumReader<GenericRecord>(schema);
ByteBufferInputStream inputStream = new ByteBufferInputStream(Collections.singletonList(bytes));
try {
return reader.read(null, DecoderFactory.get().binaryDecoder(inputStream, null));
} catch (IOException e) {
throw new ParseException(e, "Fail to decode avro message!");
}
}
Aggregations