use of org.apache.hadoop.hive.ql.io.orc.OrcProto.Type in project nifi by apache.
the class OrcFlowFileWriter method writeTypes.
private static void writeTypes(OrcProto.Footer.Builder builder, TreeWriter treeWriter) {
OrcProto.Type.Builder type = OrcProto.Type.newBuilder();
switch(treeWriter.inspector.getCategory()) {
case PRIMITIVE:
switch(((PrimitiveObjectInspector) treeWriter.inspector).getPrimitiveCategory()) {
case BOOLEAN:
type.setKind(OrcProto.Type.Kind.BOOLEAN);
break;
case BYTE:
type.setKind(OrcProto.Type.Kind.BYTE);
break;
case SHORT:
type.setKind(OrcProto.Type.Kind.SHORT);
break;
case INT:
type.setKind(OrcProto.Type.Kind.INT);
break;
case LONG:
type.setKind(OrcProto.Type.Kind.LONG);
break;
case FLOAT:
type.setKind(OrcProto.Type.Kind.FLOAT);
break;
case DOUBLE:
type.setKind(OrcProto.Type.Kind.DOUBLE);
break;
case STRING:
type.setKind(OrcProto.Type.Kind.STRING);
break;
case CHAR:
// The char length needs to be written to file and should be available
// from the object inspector
CharTypeInfo charTypeInfo = (CharTypeInfo) ((PrimitiveObjectInspector) treeWriter.inspector).getTypeInfo();
type.setKind(Type.Kind.CHAR);
type.setMaximumLength(charTypeInfo.getLength());
break;
case VARCHAR:
// The varchar length needs to be written to file and should be available
// from the object inspector
VarcharTypeInfo typeInfo = (VarcharTypeInfo) ((PrimitiveObjectInspector) treeWriter.inspector).getTypeInfo();
type.setKind(Type.Kind.VARCHAR);
type.setMaximumLength(typeInfo.getLength());
break;
case BINARY:
type.setKind(OrcProto.Type.Kind.BINARY);
break;
case TIMESTAMP:
type.setKind(OrcProto.Type.Kind.TIMESTAMP);
break;
case DATE:
type.setKind(OrcProto.Type.Kind.DATE);
break;
case DECIMAL:
DecimalTypeInfo decTypeInfo = (DecimalTypeInfo) ((PrimitiveObjectInspector) treeWriter.inspector).getTypeInfo();
type.setKind(OrcProto.Type.Kind.DECIMAL);
type.setPrecision(decTypeInfo.precision());
type.setScale(decTypeInfo.scale());
break;
default:
throw new IllegalArgumentException("Unknown primitive category: " + ((PrimitiveObjectInspector) treeWriter.inspector).getPrimitiveCategory());
}
break;
case LIST:
type.setKind(OrcProto.Type.Kind.LIST);
type.addSubtypes(treeWriter.childrenWriters[0].id);
break;
case MAP:
type.setKind(OrcProto.Type.Kind.MAP);
type.addSubtypes(treeWriter.childrenWriters[0].id);
type.addSubtypes(treeWriter.childrenWriters[1].id);
break;
case STRUCT:
type.setKind(OrcProto.Type.Kind.STRUCT);
for (TreeWriter child : treeWriter.childrenWriters) {
type.addSubtypes(child.id);
}
for (StructField field : ((StructTreeWriter) treeWriter).fields) {
type.addFieldNames(field.getFieldName());
}
break;
case UNION:
type.setKind(OrcProto.Type.Kind.UNION);
for (TreeWriter child : treeWriter.childrenWriters) {
type.addSubtypes(child.id);
}
break;
default:
throw new IllegalArgumentException("Unknown category: " + treeWriter.inspector.getCategory());
}
builder.addTypes(type);
for (TreeWriter child : treeWriter.childrenWriters) {
writeTypes(builder, child);
}
}
Aggregations