use of org.apache.druid.java.util.common.parsers.ParseException in project druid by druid-io.
the class JsonReader method parseInputRows.
@Override
protected List<InputRow> parseInputRows(String intermediateRow) throws IOException, ParseException {
final List<InputRow> inputRows;
try (JsonParser parser = jsonFactory.createParser(intermediateRow)) {
final MappingIterator<JsonNode> delegate = mapper.readValues(parser, JsonNode.class);
inputRows = FluentIterable.from(() -> delegate).transform(jsonNode -> MapInputRowParser.parse(inputRowSchema, flattener.flatten(jsonNode))).toList();
} catch (RuntimeException e) {
// JsonParseException will be thrown from MappingIterator#hasNext or MappingIterator#next when input json text is ill-formed
if (e.getCause() instanceof JsonParseException) {
throw new ParseException(intermediateRow, e, "Unable to parse row [%s]", intermediateRow);
}
// throw unknown exception
throw e;
}
if (CollectionUtils.isNullOrEmpty(inputRows)) {
throw new ParseException(intermediateRow, "Unable to parse [%s] as the intermediateRow resulted in empty input row", intermediateRow);
}
return inputRows;
}
use of org.apache.druid.java.util.common.parsers.ParseException in project druid by druid-io.
the class MapInputRowParser method parse.
@VisibleForTesting
static InputRow parse(TimestampSpec timestampSpec, DimensionsSpec dimensionsSpec, Map<String, Object> theMap) throws ParseException {
final List<String> dimensionsToUse = findDimensions(dimensionsSpec, theMap);
final DateTime timestamp;
try {
timestamp = timestampSpec.extractTimestamp(theMap);
} catch (Exception e) {
String rawMap = rawMapToPrint(theMap);
throw new ParseException(rawMap, e, "Timestamp[%s] is unparseable! Event: %s", timestampSpec.getRawTimestamp(theMap), rawMap);
}
if (timestamp == null) {
String rawMap = rawMapToPrint(theMap);
throw new ParseException(rawMap, "Timestamp[%s] is unparseable! Event: %s", timestampSpec.getRawTimestamp(theMap), rawMap);
}
if (!Intervals.ETERNITY.contains(timestamp)) {
String rawMap = rawMapToPrint(theMap);
throw new ParseException(rawMap, "Encountered row with timestamp[%s] that cannot be represented as a long: [%s]", timestamp, rawMap);
}
return new MapBasedInputRow(timestamp, dimensionsToUse, theMap);
}
use of org.apache.druid.java.util.common.parsers.ParseException in project druid by druid-io.
the class InfluxParser method parseToMap.
@Nullable
@Override
public Map<String, Object> parseToMap(String input) {
CharStream charStream = new ANTLRInputStream(input);
InfluxLineProtocolLexer lexer = new InfluxLineProtocolLexer(charStream);
TokenStream tokenStream = new CommonTokenStream(lexer);
InfluxLineProtocolParser parser = new InfluxLineProtocolParser(tokenStream);
List<InfluxLineProtocolParser.LineContext> lines = parser.lines().line();
if (parser.getNumberOfSyntaxErrors() != 0) {
throw new ParseException(null, "Unable to parse line.");
}
if (lines.size() != 1) {
throw new ParseException(null, "Multiple lines present; unable to parse more than one per record.");
}
Map<String, Object> out = new LinkedHashMap<>();
InfluxLineProtocolParser.LineContext line = lines.get(0);
String measurement = parseIdentifier(line.identifier());
if (!checkWhitelist(measurement)) {
throw new ParseException(null, "Metric [%s] not whitelisted.", measurement);
}
out.put(MEASUREMENT_KEY, measurement);
if (line.tag_set() != null) {
line.tag_set().tag_pair().forEach(t -> parseTag(t, out));
}
line.field_set().field_pair().forEach(t -> parseField(t, out));
if (line.timestamp() != null) {
String timestamp = line.timestamp().getText();
parseTimestamp(timestamp, out);
}
return out;
}
use of org.apache.druid.java.util.common.parsers.ParseException in project druid by druid-io.
the class ProtobufInputRowParser method parseBatch.
@Override
public List<InputRow> parseBatch(ByteBuffer input) {
if (parser == null) {
parser = parseSpec.makeParser();
}
Map<String, Object> record;
DateTime timestamp;
if (isFlatSpec) {
try {
DynamicMessage message = protobufBytesDecoder.parse(input);
record = CollectionUtils.mapKeys(message.getAllFields(), k -> k.getJsonName());
timestamp = this.timestampSpec.extractTimestamp(record);
} catch (Exception ex) {
throw new ParseException(null, ex, "Protobuf message could not be parsed");
}
} else {
try {
DynamicMessage message = protobufBytesDecoder.parse(input);
String json = JsonFormat.printer().print(message);
record = parser.parseToMap(json);
timestamp = this.timestampSpec.extractTimestamp(record);
} catch (InvalidProtocolBufferException e) {
throw new ParseException(null, e, "Protobuf message could not be parsed");
}
}
final List<String> dimensions;
if (!this.dimensions.isEmpty()) {
dimensions = this.dimensions;
} else {
dimensions = Lists.newArrayList(Sets.difference(record.keySet(), parseSpec.getDimensionsSpec().getDimensionExclusions()));
}
return ImmutableList.of(new MapBasedInputRow(timestamp, dimensions, record));
}
use of org.apache.druid.java.util.common.parsers.ParseException in project druid by druid-io.
the class ParseExceptionHandlerTest method testMetricWhenAllConfigurationsAreTurnedOff.
@Test
public void testMetricWhenAllConfigurationsAreTurnedOff() {
final ParseException parseException = new ParseException(null, "test");
final RowIngestionMeters rowIngestionMeters = new SimpleRowIngestionMeters();
final ParseExceptionHandler parseExceptionHandler = new ParseExceptionHandler(rowIngestionMeters, false, Integer.MAX_VALUE, 0);
IntStream.range(0, 100).forEach(i -> {
parseExceptionHandler.handle(parseException);
Assert.assertEquals(i + 1, rowIngestionMeters.getUnparseable());
});
}
Aggregations