use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector in project hive by apache.
the class DruidSerDe method initFromMetaDataQuery.
private void initFromMetaDataQuery(final Configuration configuration, final Properties properties) throws SerDeException {
final List<String> columnNames = new ArrayList<>();
final List<PrimitiveTypeInfo> columnTypes = new ArrayList<>();
final List<ObjectInspector> inspectors = new ArrayList<>();
String dataSource = properties.getProperty(Constants.DRUID_DATA_SOURCE);
if (dataSource == null) {
throw new SerDeException("Druid data source not specified; use " + Constants.DRUID_DATA_SOURCE + " in table properties");
}
SegmentMetadataQueryBuilder builder = new Druids.SegmentMetadataQueryBuilder();
builder.dataSource(dataSource);
builder.merge(true);
builder.analysisTypes();
SegmentMetadataQuery query = builder.build();
// Execute query in Druid
String address = HiveConf.getVar(configuration, HiveConf.ConfVars.HIVE_DRUID_BROKER_DEFAULT_ADDRESS);
if (org.apache.commons.lang3.StringUtils.isEmpty(address)) {
throw new SerDeException("Druid broker address not specified in configuration");
}
// Infer schema
SegmentAnalysis schemaInfo;
try {
schemaInfo = submitMetadataRequest(address, query);
} catch (IOException e) {
throw new SerDeException(e);
}
for (Entry<String, ColumnAnalysis> columnInfo : schemaInfo.getColumns().entrySet()) {
if (columnInfo.getKey().equals(DruidStorageHandlerUtils.DEFAULT_TIMESTAMP_COLUMN)) {
// Special handling for timestamp column
// field name
columnNames.add(columnInfo.getKey());
// field type
PrimitiveTypeInfo type = tsTZTypeInfo;
columnTypes.add(type);
inspectors.add(PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(type));
continue;
}
// field name
columnNames.add(columnInfo.getKey());
PrimitiveTypeInfo type = DruidSerDeUtils.convertDruidToHiveType(// field type
columnInfo.getValue().getType());
columnTypes.add(type instanceof TimestampLocalTZTypeInfo ? tsTZTypeInfo : type);
inspectors.add(PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(type));
}
columns = columnNames.toArray(new String[columnNames.size()]);
types = columnTypes.toArray(new PrimitiveTypeInfo[columnTypes.size()]);
inspector = ObjectInspectorFactory.getStandardStructObjectInspector(columnNames, inspectors);
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector in project hive by apache.
the class SerdeRandomRowSource method getObjectInspector.
private ObjectInspector getObjectInspector(TypeInfo typeInfo) {
ObjectInspector objectInspector;
switch(typeInfo.getCategory()) {
case PRIMITIVE:
{
final PrimitiveTypeInfo primitiveType = (PrimitiveTypeInfo) typeInfo;
objectInspector = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(primitiveType);
}
break;
case MAP:
{
final MapTypeInfo mapType = (MapTypeInfo) typeInfo;
final MapObjectInspector mapInspector = ObjectInspectorFactory.getStandardMapObjectInspector(getObjectInspector(mapType.getMapKeyTypeInfo()), getObjectInspector(mapType.getMapValueTypeInfo()));
objectInspector = mapInspector;
}
break;
case LIST:
{
final ListTypeInfo listType = (ListTypeInfo) typeInfo;
final ListObjectInspector listInspector = ObjectInspectorFactory.getStandardListObjectInspector(getObjectInspector(listType.getListElementTypeInfo()));
objectInspector = listInspector;
}
break;
case STRUCT:
{
final StructTypeInfo structType = (StructTypeInfo) typeInfo;
final List<TypeInfo> fieldTypes = structType.getAllStructFieldTypeInfos();
final List<ObjectInspector> fieldInspectors = new ArrayList<ObjectInspector>();
for (TypeInfo fieldType : fieldTypes) {
fieldInspectors.add(getObjectInspector(fieldType));
}
final StructObjectInspector structInspector = ObjectInspectorFactory.getStandardStructObjectInspector(structType.getAllStructFieldNames(), fieldInspectors);
objectInspector = structInspector;
}
break;
case UNION:
{
final UnionTypeInfo unionType = (UnionTypeInfo) typeInfo;
final List<TypeInfo> fieldTypes = unionType.getAllUnionObjectTypeInfos();
final List<ObjectInspector> fieldInspectors = new ArrayList<ObjectInspector>();
for (TypeInfo fieldType : fieldTypes) {
fieldInspectors.add(getObjectInspector(fieldType));
}
final UnionObjectInspector unionInspector = ObjectInspectorFactory.getStandardUnionObjectInspector(fieldInspectors);
objectInspector = unionInspector;
}
break;
default:
throw new RuntimeException("Unexpected category " + typeInfo.getCategory());
}
Preconditions.checkState(objectInspector != null);
return objectInspector;
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector in project hive by apache.
the class ObjectInspectorConverters method getConvertedOI.
/**
* Utility function to convert from one object inspector type to another.
* The output object inspector type should have all fields as settableOI type.
* The above condition can be violated only if equalsCheck is true and inputOI is
* equal to outputOI.
* @param inputOI : input object inspector
* @param outputOI : output object inspector
* @param oiSettableProperties : The object inspector to isSettable mapping used to cache
* intermediate results.
* @param equalsCheck : Do we need to check if the inputOI and outputOI are the same?
* true : If they are the same, we return the object inspector directly.
* false : Do not perform an equality check on inputOI and outputOI
* @return : The output object inspector containing all settable fields. The return value
* can contain non-settable fields only if inputOI equals outputOI and equalsCheck is
* true.
*/
public static ObjectInspector getConvertedOI(ObjectInspector inputOI, ObjectInspector outputOI, Map<ObjectInspector, Boolean> oiSettableProperties, boolean equalsCheck) {
// 2. If the outputOI has all fields settable, return it
if ((equalsCheck && inputOI.equals(outputOI)) || ObjectInspectorUtils.hasAllFieldsSettable(outputOI, oiSettableProperties) == true) {
return outputOI;
}
// T is settable recursively i.e all the nested fields are also settable.
switch(outputOI.getCategory()) {
case PRIMITIVE:
// Create a writable object inspector for primitive type and return it.
PrimitiveObjectInspector primOutputOI = (PrimitiveObjectInspector) outputOI;
return PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(primOutputOI.getTypeInfo());
case STRUCT:
StructObjectInspector structOutputOI = (StructObjectInspector) outputOI;
// create a standard settable struct object inspector.
List<? extends StructField> listFields = structOutputOI.getAllStructFieldRefs();
List<String> structFieldNames = new ArrayList<String>(listFields.size());
List<ObjectInspector> structFieldObjectInspectors = new ArrayList<ObjectInspector>(listFields.size());
for (StructField listField : listFields) {
structFieldNames.add(listField.getFieldName());
// We need to make sure that the underlying fields are settable as well.
// Hence, the recursive call for each field.
// Note that equalsCheck is false while invoking getConvertedOI() because
// we need to bypass the initial inputOI.equals(outputOI) check.
structFieldObjectInspectors.add(getConvertedOI(listField.getFieldObjectInspector(), listField.getFieldObjectInspector(), oiSettableProperties, false));
}
return ObjectInspectorFactory.getStandardStructObjectInspector(structFieldNames, structFieldObjectInspectors);
case LIST:
ListObjectInspector listOutputOI = (ListObjectInspector) outputOI;
// We need to make sure that the list element type is settable.
return ObjectInspectorFactory.getStandardListObjectInspector(getConvertedOI(listOutputOI.getListElementObjectInspector(), listOutputOI.getListElementObjectInspector(), oiSettableProperties, false));
case MAP:
MapObjectInspector mapOutputOI = (MapObjectInspector) outputOI;
// We need to make sure that the key type and the value types are settable.
return ObjectInspectorFactory.getStandardMapObjectInspector(getConvertedOI(mapOutputOI.getMapKeyObjectInspector(), mapOutputOI.getMapKeyObjectInspector(), oiSettableProperties, false), getConvertedOI(mapOutputOI.getMapValueObjectInspector(), mapOutputOI.getMapValueObjectInspector(), oiSettableProperties, false));
case UNION:
UnionObjectInspector unionOutputOI = (UnionObjectInspector) outputOI;
// create a standard settable union object inspector
List<ObjectInspector> unionListFields = unionOutputOI.getObjectInspectors();
List<ObjectInspector> unionFieldObjectInspectors = new ArrayList<ObjectInspector>(unionListFields.size());
for (ObjectInspector listField : unionListFields) {
// We need to make sure that all the field associated with the union are settable.
unionFieldObjectInspectors.add(getConvertedOI(listField, listField, oiSettableProperties, false));
}
return ObjectInspectorFactory.getStandardUnionObjectInspector(unionFieldObjectInspectors);
default:
// Unsupported in-memory structure.
throw new RuntimeException("Hive internal error: conversion of " + inputOI.getTypeName() + " to " + outputOI.getTypeName() + " not supported yet.");
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector in project hive by apache.
the class VectorExpressionWriterFactory method processVectorInspector.
/**
* Creates the value writers for an struct object inspector.
* Creates an appropriate output object inspector.
*/
public static void processVectorInspector(StructObjectInspector structObjInspector, SingleOIDClosure closure) throws HiveException {
List<? extends StructField> fields = structObjInspector.getAllStructFieldRefs();
VectorExpressionWriter[] writers = new VectorExpressionWriter[fields.size()];
List<ObjectInspector> oids = new ArrayList<ObjectInspector>(writers.length);
ArrayList<String> columnNames = new ArrayList<String>();
int i = 0;
for (StructField field : fields) {
ObjectInspector fieldObjInsp = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(TypeInfoUtils.getTypeInfoFromObjectInspector(field.getFieldObjectInspector()));
writers[i] = VectorExpressionWriterFactory.genVectorExpressionWritable(fieldObjInsp);
columnNames.add(field.getFieldName());
oids.add(writers[i].getObjectInspector());
i++;
}
ObjectInspector objectInspector = ObjectInspectorFactory.getStandardStructObjectInspector(columnNames, oids);
closure.assign(writers, objectInspector);
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector in project hive by apache.
the class VectorExpressionWriterFactory method processVectorExpressions.
/**
* Creates the value writers for a column vector expression list.
* Creates an appropriate output object inspector.
*/
public static void processVectorExpressions(List<ExprNodeDesc> nodesDesc, List<String> columnNames, SingleOIDClosure closure) throws HiveException {
VectorExpressionWriter[] writers = getExpressionWriters(nodesDesc);
List<ObjectInspector> oids = new ArrayList<ObjectInspector>(writers.length);
for (int i = 0; i < writers.length; ++i) {
oids.add(writers[i].getObjectInspector());
}
ObjectInspector objectInspector = ObjectInspectorFactory.getStandardStructObjectInspector(columnNames, oids);
closure.assign(writers, objectInspector);
}
Aggregations