use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector in project hive by apache.
the class VectorUDAFAvgDecimal method initPartialResultInspector.
private void initPartialResultInspector() {
// the output type of the vectorized partial aggregate must match the
// expected type for the row-mode aggregation
// For decimal, the type is "same number of integer digits and 4 more decimal digits"
DecimalTypeInfo dtiSum = GenericUDAFAverage.deriveSumFieldTypeInfo(inputPrecision, inputScale);
this.sumScale = (short) dtiSum.scale();
this.sumPrecision = (short) dtiSum.precision();
List<ObjectInspector> foi = new ArrayList<ObjectInspector>();
foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
foi.add(PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(dtiSum));
List<String> fname = new ArrayList<String>();
fname.add("count");
fname.add("sum");
soi = ObjectInspectorFactory.getStandardStructObjectInspector(fname, foi);
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector in project hive by apache.
the class DynamicSerDe method dynamicSerDeStructBaseToObjectInspector.
public static ObjectInspector dynamicSerDeStructBaseToObjectInspector(DynamicSerDeTypeBase bt) throws SerDeException {
if (bt.isList()) {
return ObjectInspectorFactory.getStandardListObjectInspector(dynamicSerDeStructBaseToObjectInspector(((DynamicSerDeTypeList) bt).getElementType()));
} else if (bt.isMap()) {
DynamicSerDeTypeMap btMap = (DynamicSerDeTypeMap) bt;
return ObjectInspectorFactory.getStandardMapObjectInspector(dynamicSerDeStructBaseToObjectInspector(btMap.getKeyType()), dynamicSerDeStructBaseToObjectInspector(btMap.getValueType()));
} else if (bt.isPrimitive()) {
PrimitiveTypeEntry pte = PrimitiveObjectInspectorUtils.getTypeEntryFromPrimitiveJavaClass(bt.getRealType());
return PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(pte.primitiveCategory);
} else {
// Must be a struct
DynamicSerDeStructBase btStruct = (DynamicSerDeStructBase) bt;
DynamicSerDeFieldList fieldList = btStruct.getFieldList();
DynamicSerDeField[] fields = fieldList.getChildren();
ArrayList<String> fieldNames = new ArrayList<String>(fields.length);
ArrayList<ObjectInspector> fieldObjectInspectors = new ArrayList<ObjectInspector>(fields.length);
for (DynamicSerDeField field : fields) {
fieldNames.add(field.name);
fieldObjectInspectors.add(dynamicSerDeStructBaseToObjectInspector(field.getFieldType().getMyType()));
}
return ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldObjectInspectors);
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector in project hive by apache.
the class SerdeRandomRowSource method chooseSchema.
private void chooseSchema() {
HashSet hashSet = null;
boolean allTypes;
boolean onlyOne = (r.nextInt(100) == 7);
if (onlyOne) {
columnCount = 1;
allTypes = false;
} else {
allTypes = r.nextBoolean();
if (allTypes) {
// One of each type.
columnCount = possibleHiveTypeNames.length;
hashSet = new HashSet<Integer>();
} else {
columnCount = 1 + r.nextInt(20);
}
}
typeNames = new ArrayList<String>(columnCount);
primitiveCategories = new PrimitiveCategory[columnCount];
primitiveTypeInfos = new PrimitiveTypeInfo[columnCount];
primitiveObjectInspectorList = new ArrayList<ObjectInspector>(columnCount);
List<String> columnNames = new ArrayList<String>(columnCount);
for (int c = 0; c < columnCount; c++) {
columnNames.add(String.format("col%d", c));
String typeName;
if (onlyOne) {
typeName = possibleHiveTypeNames[r.nextInt(possibleHiveTypeNames.length)];
} else {
int typeNum;
if (allTypes) {
while (true) {
typeNum = r.nextInt(possibleHiveTypeNames.length);
Integer typeNumInteger = new Integer(typeNum);
if (!hashSet.contains(typeNumInteger)) {
hashSet.add(typeNumInteger);
break;
}
}
} else {
typeNum = r.nextInt(possibleHiveTypeNames.length);
}
typeName = possibleHiveTypeNames[typeNum];
}
if (typeName.equals("char")) {
int maxLength = 1 + r.nextInt(100);
typeName = String.format("char(%d)", maxLength);
} else if (typeName.equals("varchar")) {
int maxLength = 1 + r.nextInt(100);
typeName = String.format("varchar(%d)", maxLength);
} else if (typeName.equals("decimal")) {
typeName = String.format("decimal(%d,%d)", HiveDecimal.SYSTEM_DEFAULT_PRECISION, HiveDecimal.SYSTEM_DEFAULT_SCALE);
}
PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) TypeInfoUtils.getTypeInfoFromTypeString(typeName);
primitiveTypeInfos[c] = primitiveTypeInfo;
PrimitiveCategory primitiveCategory = primitiveTypeInfo.getPrimitiveCategory();
primitiveCategories[c] = primitiveCategory;
primitiveObjectInspectorList.add(PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(primitiveTypeInfo));
typeNames.add(typeName);
}
rowStructObjectInspector = ObjectInspectorFactory.getStandardStructObjectInspector(columnNames, primitiveObjectInspectorList);
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector in project hive by apache.
the class VectorRandomRowSource method getObjectInspector.
private ObjectInspector getObjectInspector(TypeInfo typeInfo) {
final ObjectInspector objectInspector;
switch(typeInfo.getCategory()) {
case PRIMITIVE:
{
final PrimitiveTypeInfo primitiveType = (PrimitiveTypeInfo) typeInfo;
objectInspector = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(primitiveType);
}
break;
case MAP:
{
final MapTypeInfo mapType = (MapTypeInfo) typeInfo;
final MapObjectInspector mapInspector = ObjectInspectorFactory.getStandardMapObjectInspector(getObjectInspector(mapType.getMapKeyTypeInfo()), getObjectInspector(mapType.getMapValueTypeInfo()));
objectInspector = mapInspector;
}
break;
case LIST:
{
final ListTypeInfo listType = (ListTypeInfo) typeInfo;
final ListObjectInspector listInspector = ObjectInspectorFactory.getStandardListObjectInspector(getObjectInspector(listType.getListElementTypeInfo()));
objectInspector = listInspector;
}
break;
case STRUCT:
{
final StructTypeInfo structType = (StructTypeInfo) typeInfo;
final List<TypeInfo> fieldTypes = structType.getAllStructFieldTypeInfos();
final List<ObjectInspector> fieldInspectors = new ArrayList<ObjectInspector>();
for (TypeInfo fieldType : fieldTypes) {
fieldInspectors.add(getObjectInspector(fieldType));
}
final StructObjectInspector structInspector = ObjectInspectorFactory.getStandardStructObjectInspector(structType.getAllStructFieldNames(), fieldInspectors);
objectInspector = structInspector;
}
break;
case UNION:
{
final UnionTypeInfo unionType = (UnionTypeInfo) typeInfo;
final List<TypeInfo> fieldTypes = unionType.getAllUnionObjectTypeInfos();
final List<ObjectInspector> fieldInspectors = new ArrayList<ObjectInspector>();
for (TypeInfo fieldType : fieldTypes) {
fieldInspectors.add(getObjectInspector(fieldType));
}
final UnionObjectInspector unionInspector = ObjectInspectorFactory.getStandardUnionObjectInspector(fieldInspectors);
objectInspector = unionInspector;
}
break;
default:
throw new RuntimeException("Unexpected category " + typeInfo.getCategory());
}
Preconditions.checkState(objectInspector != null);
return objectInspector;
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector in project hive by apache.
the class VectorRandomRowSource method chooseSchema.
private void chooseSchema(SupportedTypes supportedTypes, int maxComplexDepth) {
HashSet hashSet = null;
final boolean allTypes;
final boolean onlyOne = (r.nextInt(100) == 7);
if (onlyOne) {
columnCount = 1;
allTypes = false;
} else {
allTypes = r.nextBoolean();
if (allTypes) {
switch(supportedTypes) {
case ALL:
columnCount = possibleHivePrimitiveTypeNames.length + possibleHiveComplexTypeNames.length;
break;
case ALL_EXCEPT_MAP:
columnCount = possibleHivePrimitiveTypeNames.length + possibleHiveComplexTypeNames.length - 1;
break;
case PRIMITIVES:
columnCount = possibleHivePrimitiveTypeNames.length;
break;
}
hashSet = new HashSet<Integer>();
} else {
columnCount = 1 + r.nextInt(20);
}
}
typeNames = new ArrayList<String>(columnCount);
categories = new Category[columnCount];
typeInfos = new TypeInfo[columnCount];
objectInspectorList = new ArrayList<ObjectInspector>(columnCount);
primitiveCategories = new PrimitiveCategory[columnCount];
primitiveTypeInfos = new PrimitiveTypeInfo[columnCount];
primitiveObjectInspectorList = new ArrayList<ObjectInspector>(columnCount);
List<String> columnNames = new ArrayList<String>(columnCount);
for (int c = 0; c < columnCount; c++) {
columnNames.add(String.format("col%d", c));
final String typeName;
if (onlyOne) {
typeName = getRandomTypeName(supportedTypes);
} else {
int typeNum;
if (allTypes) {
int maxTypeNum = 0;
switch(supportedTypes) {
case PRIMITIVES:
maxTypeNum = possibleHivePrimitiveTypeNames.length;
break;
case ALL_EXCEPT_MAP:
maxTypeNum = possibleHivePrimitiveTypeNames.length + possibleHiveComplexTypeNames.length - 1;
break;
case ALL:
maxTypeNum = possibleHivePrimitiveTypeNames.length + possibleHiveComplexTypeNames.length;
break;
}
while (true) {
typeNum = r.nextInt(maxTypeNum);
Integer typeNumInteger = new Integer(typeNum);
if (!hashSet.contains(typeNumInteger)) {
hashSet.add(typeNumInteger);
break;
}
}
} else {
if (supportedTypes == SupportedTypes.PRIMITIVES || r.nextInt(10) != 0) {
typeNum = r.nextInt(possibleHivePrimitiveTypeNames.length);
} else {
typeNum = possibleHivePrimitiveTypeNames.length + r.nextInt(possibleHiveComplexTypeNames.length);
if (supportedTypes == SupportedTypes.ALL_EXCEPT_MAP) {
typeNum--;
}
}
}
if (typeNum < possibleHivePrimitiveTypeNames.length) {
typeName = possibleHivePrimitiveTypeNames[typeNum];
} else {
typeName = possibleHiveComplexTypeNames[typeNum - possibleHivePrimitiveTypeNames.length];
}
}
String decoratedTypeName = getDecoratedTypeName(typeName, supportedTypes, 0, maxComplexDepth);
final TypeInfo typeInfo;
try {
typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(decoratedTypeName);
} catch (Exception e) {
throw new RuntimeException("Cannot convert type name " + decoratedTypeName + " to a type " + e);
}
typeInfos[c] = typeInfo;
final Category category = typeInfo.getCategory();
categories[c] = category;
ObjectInspector objectInspector = getObjectInspector(typeInfo);
switch(category) {
case PRIMITIVE:
{
final PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) typeInfo;
objectInspector = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(primitiveTypeInfo);
primitiveTypeInfos[c] = primitiveTypeInfo;
PrimitiveCategory primitiveCategory = primitiveTypeInfo.getPrimitiveCategory();
primitiveCategories[c] = primitiveCategory;
primitiveObjectInspectorList.add(objectInspector);
}
break;
case LIST:
case MAP:
case STRUCT:
case UNION:
primitiveObjectInspectorList.add(null);
break;
default:
throw new RuntimeException("Unexpected catagory " + category);
}
objectInspectorList.add(objectInspector);
if (category == Category.PRIMITIVE) {
}
typeNames.add(decoratedTypeName);
}
rowStructObjectInspector = ObjectInspectorFactory.getStandardStructObjectInspector(columnNames, objectInspectorList);
alphabets = new String[columnCount];
}
Aggregations