use of org.apache.hadoop.hive.serde2.objectinspector.SettableStructObjectInspector in project hive by apache.
the class VectorDeserializeRow method convertStructRowColumn.
private Object convertStructRowColumn(ColumnVector colVector, int batchIndex, Field field) throws IOException {
final SettableStructObjectInspector structOI = (SettableStructObjectInspector) field.objectInspector;
final List<? extends StructField> structFields = structOI.getAllStructFieldRefs();
final StructComplexTypeHelper structHelper = (StructComplexTypeHelper) field.getComplexHelper();
final Field[] fields = structHelper.getFields();
final StructColumnVector structColumnVector = (StructColumnVector) colVector;
final Object struct = structOI.create();
for (int i = 0; i < fields.length; i++) {
final Object fieldObject = convertComplexFieldRowColumn(structColumnVector.fields[i], batchIndex, fields[i]);
structOI.setStructFieldData(struct, structFields.get(i), fieldObject);
}
deserializeRead.finishComplexVariableFieldsType();
return struct;
}
use of org.apache.hadoop.hive.serde2.objectinspector.SettableStructObjectInspector in project presto by prestodb.
the class ParquetTester method writeParquetColumn.
private static DataSize writeParquetColumn(JobConf jobConf, File outputFile, CompressionCodecName compressionCodecName, Properties tableProperties, SettableStructObjectInspector objectInspector, Iterator<?>[] valuesByField, Optional<MessageType> parquetSchema, boolean singleLevelArray) throws Exception {
RecordWriter recordWriter = new TestMapredParquetOutputFormat(parquetSchema, singleLevelArray).getHiveRecordWriter(jobConf, new Path(outputFile.toURI()), Text.class, compressionCodecName != UNCOMPRESSED, tableProperties, () -> {
});
Object row = objectInspector.create();
List<StructField> fields = ImmutableList.copyOf(objectInspector.getAllStructFieldRefs());
while (stream(valuesByField).allMatch(Iterator::hasNext)) {
for (int field = 0; field < fields.size(); field++) {
Object value = valuesByField[field].next();
objectInspector.setStructFieldData(row, fields.get(field), value);
}
ParquetHiveSerDe serde = new ParquetHiveSerDe();
serde.initialize(jobConf, tableProperties, null);
Writable record = serde.serialize(row, objectInspector);
recordWriter.write(record);
}
recordWriter.close(false);
return succinctBytes(outputFile.length());
}
use of org.apache.hadoop.hive.serde2.objectinspector.SettableStructObjectInspector in project presto by prestodb.
the class OrcTester method writeOrcFileColumnHive.
private static DataSize writeOrcFileColumnHive(File outputFile, Format format, RecordWriter recordWriter, List<Type> types, List<List<?>> values) throws Exception {
SettableStructObjectInspector objectInspector = createSettableStructObjectInspector(types);
Object row = objectInspector.create();
List<StructField> fields = ImmutableList.copyOf(objectInspector.getAllStructFieldRefs());
Serializer serializer = format.createSerializer();
for (int i = 0; i < values.get(0).size(); i++) {
for (int j = 0; j < types.size(); j++) {
Object value = preprocessWriteValueHive(types.get(j), values.get(j).get(i));
objectInspector.setStructFieldData(row, fields.get(j), value);
}
if (DWRF == format) {
if (i == 142_345) {
setDwrfLowMemoryFlag(recordWriter);
}
}
Writable record = serializer.serialize(row, objectInspector);
recordWriter.write(record);
}
recordWriter.close(false);
return succinctBytes(outputFile.length());
}
use of org.apache.hadoop.hive.serde2.objectinspector.SettableStructObjectInspector in project presto by prestodb.
the class AbstractTestOrcReader method createTempFile.
private static TempFile createTempFile(int nRecords) throws IOException, SerDeException {
TempFile file = new TempFile();
RecordWriter writer = createOrcRecordWriter(file.getFile(), ORC_12, CompressionKind.NONE, BIGINT);
@SuppressWarnings("deprecation") Serializer serde = new OrcSerde();
SettableStructObjectInspector objectInspector = createSettableStructObjectInspector("test", BIGINT);
Object row = objectInspector.create();
StructField field = objectInspector.getAllStructFieldRefs().get(0);
objectInspector.setStructFieldData(row, field, 1L);
Writable record = serde.serialize(row, objectInspector);
for (int i = 0; i < nRecords; i++) {
writer.write(record);
}
writer.close(false);
return file;
}
use of org.apache.hadoop.hive.serde2.objectinspector.SettableStructObjectInspector in project presto by prestodb.
the class TestOrcReaderMemoryUsage method createSingleColumnFileWithNullValues.
/**
* Write a file that contains a number of rows with 1 BIGINT column, and some rows have null values.
*/
private static TempFile createSingleColumnFileWithNullValues(int rows) throws IOException, ReflectiveOperationException, SerDeException {
Serializer serde = new OrcSerde();
TempFile tempFile = new TempFile();
FileSinkOperator.RecordWriter writer = createOrcRecordWriter(tempFile.getFile(), ORC_12, CompressionKind.NONE, BIGINT);
SettableStructObjectInspector objectInspector = createSettableStructObjectInspector("test", BIGINT);
Object row = objectInspector.create();
StructField field = objectInspector.getAllStructFieldRefs().get(0);
for (int i = 0; i < rows; i++) {
if (i % 10 == 0) {
objectInspector.setStructFieldData(row, field, null);
} else {
objectInspector.setStructFieldData(row, field, (long) i);
}
Writable record = serde.serialize(row, objectInspector);
writer.write(record);
}
writer.close(false);
return tempFile;
}
Aggregations