use of org.apache.hadoop.hive.serde2.Serializer in project presto by prestodb.
the class OrcTester method writeOrcFileColumnOld.
public static DataSize writeOrcFileColumnOld(File outputFile, Format format, RecordWriter recordWriter, ObjectInspector columnObjectInspector, Iterator<?> values) throws Exception {
SettableStructObjectInspector objectInspector = createSettableStructObjectInspector("test", columnObjectInspector);
Object row = objectInspector.create();
List<StructField> fields = ImmutableList.copyOf(objectInspector.getAllStructFieldRefs());
int i = 0;
TypeInfo typeInfo = getTypeInfoFromTypeString(columnObjectInspector.getTypeName());
while (values.hasNext()) {
Object value = values.next();
value = preprocessWriteValueOld(typeInfo, value);
objectInspector.setStructFieldData(row, fields.get(0), value);
@SuppressWarnings("deprecation") Serializer serde;
if (DWRF == format) {
serde = new com.facebook.hive.orc.OrcSerde();
if (i == 142_345) {
setDwrfLowMemoryFlag(recordWriter);
}
} else {
serde = new OrcSerde();
}
Writable record = serde.serialize(row, objectInspector);
recordWriter.write(record);
i++;
}
recordWriter.close(false);
return succinctBytes(outputFile.length());
}
use of org.apache.hadoop.hive.serde2.Serializer in project presto by prestodb.
the class TestOrcReaderPositions method createMultiStripeFile.
// write 5 stripes of 20 values each: (0,3,6,..,57), (60,..,117), .., (..297)
private static void createMultiStripeFile(File file) throws IOException, ReflectiveOperationException, SerDeException {
FileSinkOperator.RecordWriter writer = createOrcRecordWriter(file, ORC_12, OrcTester.Compression.NONE, javaLongObjectInspector);
@SuppressWarnings("deprecation") Serializer serde = new OrcSerde();
SettableStructObjectInspector objectInspector = createSettableStructObjectInspector("test", javaLongObjectInspector);
Object row = objectInspector.create();
StructField field = objectInspector.getAllStructFieldRefs().get(0);
for (int i = 0; i < 300; i += 3) {
if ((i > 0) && (i % 60 == 0)) {
flushWriter(writer);
}
objectInspector.setStructFieldData(row, field, (long) i);
Writable record = serde.serialize(row, objectInspector);
writer.write(record);
}
writer.close(false);
}
use of org.apache.hadoop.hive.serde2.Serializer in project presto by prestodb.
the class TestOrcReaderPositions method createSequentialFile.
private static void createSequentialFile(File file, int count) throws IOException, ReflectiveOperationException, SerDeException {
FileSinkOperator.RecordWriter writer = createOrcRecordWriter(file, ORC_12, OrcTester.Compression.NONE, javaLongObjectInspector);
@SuppressWarnings("deprecation") Serializer serde = new OrcSerde();
SettableStructObjectInspector objectInspector = createSettableStructObjectInspector("test", javaLongObjectInspector);
Object row = objectInspector.create();
StructField field = objectInspector.getAllStructFieldRefs().get(0);
for (int i = 0; i < count; i++) {
objectInspector.setStructFieldData(row, field, (long) i);
Writable record = serde.serialize(row, objectInspector);
writer.write(record);
}
writer.close(false);
}
use of org.apache.hadoop.hive.serde2.Serializer in project presto by prestodb.
the class RcFileTester method writeRcFileColumnOld.
private static DataSize writeRcFileColumnOld(File outputFile, Format format, Compression compression, Type type, Iterator<?> values) throws Exception {
ObjectInspector columnObjectInspector = getJavaObjectInspector(type);
RecordWriter recordWriter = createRcFileWriterOld(outputFile, compression, columnObjectInspector);
SettableStructObjectInspector objectInspector = createSettableStructObjectInspector("test", columnObjectInspector);
Object row = objectInspector.create();
List<StructField> fields = ImmutableList.copyOf(objectInspector.getAllStructFieldRefs());
@SuppressWarnings("deprecation") Serializer serializer = format.createSerializer();
Properties tableProperties = new Properties();
tableProperties.setProperty("columns", "test");
tableProperties.setProperty("columns.types", objectInspector.getTypeName());
serializer.initialize(new JobConf(false), tableProperties);
while (values.hasNext()) {
Object value = values.next();
value = preprocessWriteValueOld(type, value);
objectInspector.setStructFieldData(row, fields.get(0), value);
Writable record = serializer.serialize(row, objectInspector);
recordWriter.write(record);
}
recordWriter.close(false);
return new DataSize(outputFile.length(), BYTE).convertToMostSuccinctDataSize();
}
Aggregations