use of org.apache.drill.exec.store.easy.json.loader.JsonLoaderOptions in project drill by apache.
the class JsonMessageReader method parseAndWrite.
private void parseAndWrite(ConsumerRecord<?, ?> record, byte[] recordArray) {
stream.setValue(new ByteArrayInputStream(recordArray));
if (kafkaJsonLoader == null) {
JsonLoaderOptions jsonLoaderOptions = new JsonLoaderOptions();
jsonLoaderOptions.allTextMode = readOptions.isAllTextMode();
jsonLoaderOptions.readNumbersAsDouble = readOptions.isReadNumbersAsDouble();
jsonLoaderOptions.skipMalformedRecords = readOptions.isSkipInvalidRecords();
jsonLoaderOptions.allowNanInf = readOptions.isAllowNanInf();
jsonLoaderOptions.enableEscapeAnyChar = readOptions.isAllowEscapeAnyChar();
jsonLoaderOptions.skipMalformedDocument = readOptions.isSkipInvalidRecords();
kafkaJsonLoader = (KafkaJsonLoader) new KafkaJsonLoader.KafkaJsonLoaderBuilder().resultSetLoader(resultSetLoader).standardOptions(negotiator.queryOptions()).options(jsonLoaderOptions).errorContext(negotiator.parentErrorContext()).fromStream(() -> stream).build();
}
RowSetLoader rowWriter = resultSetLoader.writer();
rowWriter.start();
if (kafkaJsonLoader.parser().next()) {
writeValue(rowWriter, MetaDataField.KAFKA_TOPIC, record.topic());
writeValue(rowWriter, MetaDataField.KAFKA_PARTITION_ID, record.partition());
writeValue(rowWriter, MetaDataField.KAFKA_OFFSET, record.offset());
writeValue(rowWriter, MetaDataField.KAFKA_TIMESTAMP, record.timestamp());
writeValue(rowWriter, MetaDataField.KAFKA_MSG_KEY, record.key() != null ? record.key().toString() : null);
rowWriter.save();
}
}
use of org.apache.drill.exec.store.easy.json.loader.JsonLoaderOptions in project drill by apache.
the class HttpBatchReader method open.
@Override
public boolean open(SchemaNegotiator negotiator) {
// Result set loader setup
String tempDirPath = negotiator.drillConfig().getString(ExecConstants.DRILL_TMP_DIR);
HttpUrl url = buildUrl();
logger.debug("Final URL: {}", url);
CustomErrorContext errorContext = new ChildErrorContext(negotiator.parentErrorContext()) {
@Override
public void addContext(UserException.Builder builder) {
super.addContext(builder);
builder.addContext("URL", url.toString());
}
};
negotiator.setErrorContext(errorContext);
// Http client setup
SimpleHttp http = SimpleHttp.builder().scanDefn(subScan).url(url).tempDir(new File(tempDirPath)).proxyConfig(proxySettings(negotiator.drillConfig(), url)).errorContext(errorContext).build();
// JSON loader setup
resultSetLoader = negotiator.build();
if (implicitColumnsAreProjected()) {
implicitColumns = new ImplicitColumns(resultSetLoader.writer());
buildImplicitColumns();
}
InputStream inStream = http.getInputStream();
populateImplicitFieldMap(http);
try {
JsonLoaderBuilder jsonBuilder = new JsonLoaderBuilder().implicitFields(implicitColumns).resultSetLoader(resultSetLoader).standardOptions(negotiator.queryOptions()).maxRows(maxRecords).dataPath(subScan.tableSpec().connectionConfig().dataPath()).errorContext(errorContext).fromStream(inStream);
if (subScan.tableSpec().connectionConfig().jsonOptions() != null) {
JsonLoaderOptions jsonOptions = subScan.tableSpec().connectionConfig().jsonOptions().getJsonOptions(negotiator.queryOptions());
jsonBuilder.options(jsonOptions);
} else {
jsonBuilder.standardOptions(negotiator.queryOptions());
}
jsonLoader = jsonBuilder.build();
} catch (Throwable t) {
// Paranoia: ensure stream is closed if anything goes wrong.
// After this, the JSON loader will close the stream.
AutoCloseables.closeSilently(inStream);
throw t;
}
return true;
}
Aggregations