use of org.apache.hadoop.hive.ql.io.orc.OrcSerde in project presto by prestodb.
the class TestOrcReaderPositions method createSequentialFile.
private static void createSequentialFile(File file, int count) throws IOException, SerDeException {
FileSinkOperator.RecordWriter writer = createOrcRecordWriter(file, ORC_12, CompressionKind.NONE, BIGINT);
Serializer serde = new OrcSerde();
SettableStructObjectInspector objectInspector = createSettableStructObjectInspector("test", BIGINT);
Object row = objectInspector.create();
StructField field = objectInspector.getAllStructFieldRefs().get(0);
for (int i = 0; i < count; i++) {
objectInspector.setStructFieldData(row, field, (long) i);
Writable record = serde.serialize(row, objectInspector);
writer.write(record);
}
writer.close(false);
}
use of org.apache.hadoop.hive.ql.io.orc.OrcSerde in project presto by prestodb.
the class TestOrcReaderPositions method createMultiStripeFile.
// write 5 stripes of 20 values each: (0,3,6,..,57), (60,..,117), .., (..297)
private static void createMultiStripeFile(File file) throws IOException, ReflectiveOperationException, SerDeException {
FileSinkOperator.RecordWriter writer = createOrcRecordWriter(file, ORC_12, CompressionKind.NONE, ImmutableList.of(BIGINT, VARCHAR));
Serializer serde = new OrcSerde();
SettableStructObjectInspector objectInspector = createSettableStructObjectInspector(ImmutableList.of(BIGINT, VARCHAR));
Object row = objectInspector.create();
StructField bigintField = objectInspector.getAllStructFieldRefs().get(0);
StructField varcharField = objectInspector.getAllStructFieldRefs().get(1);
for (int i = 0; i < 300; i += 3) {
if ((i > 0) && (i % 60 == 0)) {
flushWriter(writer);
}
objectInspector.setStructFieldData(row, bigintField, (long) i);
objectInspector.setStructFieldData(row, varcharField, String.valueOf(i));
Writable record = serde.serialize(row, objectInspector);
writer.write(record);
}
writer.close(false);
}
use of org.apache.hadoop.hive.ql.io.orc.OrcSerde in project presto by prestodb.
the class TestOrcMapNullKey method createSingleColumnMapFileWithNullValues.
private static TempFile createSingleColumnMapFileWithNullValues(Type mapType, Map<Long, Long> map) throws IOException {
OrcSerde serde = new OrcSerde();
TempFile tempFile = new TempFile();
FileSinkOperator.RecordWriter writer = createOrcRecordWriter(tempFile.getFile(), ORC_12, CompressionKind.NONE, mapType);
SettableStructObjectInspector objectInspector = createSettableStructObjectInspector("test", mapType);
Object row = objectInspector.create();
StructField field = objectInspector.getAllStructFieldRefs().get(0);
objectInspector.setStructFieldData(row, field, map);
Writable record = serde.serialize(row, objectInspector);
writer.write(record);
writer.close(false);
return tempFile;
}
Aggregations