use of org.apache.drill.exec.store.easy.json.reader.CountingJsonReader in project drill by axbaretto.
the class JSONRecordReader method setup.
@Override
public void setup(final OperatorContext context, final OutputMutator output) throws ExecutionSetupException {
try {
if (hadoopPath != null) {
this.stream = fileSystem.openPossiblyCompressedStream(hadoopPath);
}
this.writer = new VectorContainerWriter(output, unionEnabled);
if (isSkipQuery()) {
this.jsonReader = new CountingJsonReader(fragmentContext.getManagedBuffer(), enableNanInf);
} else {
this.jsonReader = new JsonReader.Builder(fragmentContext.getManagedBuffer()).schemaPathColumns(ImmutableList.copyOf(getColumns())).allTextMode(enableAllTextMode).skipOuterList(true).readNumbersAsDouble(readNumbersAsDouble).enableNanInf(enableNanInf).build();
}
setupParser();
} catch (final Exception e) {
handleAndRaise("Failure reading JSON file", e);
}
}
use of org.apache.drill.exec.store.easy.json.reader.CountingJsonReader in project drill by apache.
the class JSONRecordReader method setup.
@Override
public void setup(OperatorContext context, OutputMutator output) throws ExecutionSetupException {
try {
if (hadoopPath != null) {
stream = fileSystem.openPossiblyCompressedStream(hadoopPath);
}
writer = new VectorContainerWriter(output, unionEnabled);
if (isSkipQuery()) {
jsonReader = new CountingJsonReader(fragmentContext.getManagedBuffer(), enableNanInf, enableEscapeAnyChar);
} else {
this.jsonReader = new JsonReader.Builder(fragmentContext.getManagedBuffer()).schemaPathColumns(ImmutableList.copyOf(getColumns())).allTextMode(enableAllTextMode).skipOuterList(true).readNumbersAsDouble(readNumbersAsDouble).enableNanInf(enableNanInf).enableEscapeAnyChar(enableEscapeAnyChar).build();
}
setupParser();
} catch (Exception e) {
handleAndRaise("Failure reading JSON file", e);
}
}
Aggregations