use of org.apache.druid.data.input.impl.FileEntity in project druid by druid-io.
the class AvroOCFReaderTest method createReader.
private InputEntityReader createReader(ObjectMapper mapper, Map<String, Object> readerSchema) throws Exception {
final GenericRecord someAvroDatum = AvroStreamInputRowParserTest.buildSomeAvroDatum();
final File someAvroFile = AvroHadoopInputRowParserTest.createAvroFile(someAvroDatum);
final TimestampSpec timestampSpec = new TimestampSpec("timestamp", "auto", null);
final DimensionsSpec dimensionsSpec = new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("eventType")));
final AvroOCFInputFormat inputFormat = new AvroOCFInputFormat(mapper, null, readerSchema, null, null);
final InputRowSchema schema = new InputRowSchema(timestampSpec, dimensionsSpec, ColumnsFilter.all());
final FileEntity entity = new FileEntity(someAvroFile);
return inputFormat.createReader(schema, entity, temporaryFolder.newFolder());
}
use of org.apache.druid.data.input.impl.FileEntity in project druid by druid-io.
the class BaseParquetReaderTest method createReader.
InputEntityReader createReader(String parquetFile, InputRowSchema schema, JSONPathSpec flattenSpec, boolean binaryAsString) {
FileEntity entity = new FileEntity(new File(parquetFile));
ParquetInputFormat parquet = new ParquetInputFormat(flattenSpec, binaryAsString, new Configuration());
return parquet.createReader(schema, entity, null);
}
use of org.apache.druid.data.input.impl.FileEntity in project druid by druid-io.
the class OrcReaderTest method createReader.
private InputEntityReader createReader(TimestampSpec timestampSpec, DimensionsSpec dimensionsSpec, InputFormat inputFormat, String dataFile) throws IOException {
final InputRowSchema schema = new InputRowSchema(timestampSpec, dimensionsSpec, ColumnsFilter.all());
final FileEntity entity = new FileEntity(new File(dataFile));
return inputFormat.createReader(schema, entity, temporaryFolder.newFolder());
}
Aggregations