use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo in project hive by apache.
the class VectorExpressionWriterFactory method processVectorInspector.
/**
* Creates the value writers for an struct object inspector.
* Creates an appropriate output object inspector.
*/
public static void processVectorInspector(StructObjectInspector structObjInspector, SingleOIDClosure closure) throws HiveException {
List<? extends StructField> fields = structObjInspector.getAllStructFieldRefs();
VectorExpressionWriter[] writers = new VectorExpressionWriter[fields.size()];
List<ObjectInspector> oids = new ArrayList<ObjectInspector>(writers.length);
ArrayList<String> columnNames = new ArrayList<String>();
int i = 0;
for (StructField field : fields) {
ObjectInspector fieldObjInsp = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(TypeInfoUtils.getTypeInfoFromObjectInspector(field.getFieldObjectInspector()));
writers[i] = VectorExpressionWriterFactory.genVectorExpressionWritable(fieldObjInsp);
columnNames.add(field.getFieldName());
oids.add(writers[i].getObjectInspector());
i++;
}
ObjectInspector objectInspector = ObjectInspectorFactory.getStandardStructObjectInspector(columnNames, oids);
closure.assign(writers, objectInspector);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo in project hive by apache.
the class VectorExpressionWriterFactory method genVectorExpressionWritable.
/**
* Compiles the appropriate vector expression writer based on an expression info (ExprNodeDesc)
*/
public static VectorExpressionWriter genVectorExpressionWritable(VectorExpression vecExpr) throws HiveException {
TypeInfo outputTypeInfo = vecExpr.getOutputTypeInfo();
ObjectInspector objectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(outputTypeInfo);
return genVectorExpressionWritable(objectInspector);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo in project hive by apache.
the class VectorRandomRowSource method randomRow.
public Object[] randomRow() {
final Object[] row = new Object[columnCount];
if (generationSpecList == null) {
for (int c = 0; c < columnCount; c++) {
row[c] = randomWritable(c);
}
} else {
for (int c = 0; c < columnCount; c++) {
GenerationSpec generationSpec = generationSpecList.get(c);
GenerationSpec.GenerationKind generationKind = generationSpec.getGenerationKind();
Object object;
switch(generationKind) {
case SAME_TYPE:
object = randomWritable(c, generationSpec.getColumnAllowNulls());
break;
case OMIT_GENERATION:
object = null;
break;
case STRING_FAMILY:
{
TypeInfo typeInfo = generationSpec.getTypeInfo();
StringGenerationOption stringGenerationOption = generationSpec.getStringGenerationOption();
object = randomStringFamily(r, typeInfo, stringGenerationOption, true);
}
break;
case STRING_FAMILY_OTHER_TYPE_VALUE:
{
TypeInfo typeInfo = generationSpec.getTypeInfo();
TypeInfo otherTypeTypeInfo = generationSpec.getSourceTypeInfo();
object = randomStringFamilyOtherTypeValue(r, typeInfo, otherTypeTypeInfo, true);
}
break;
case TIMESTAMP_MILLISECONDS:
{
LongWritable longWritable = (LongWritable) randomWritable(c);
if (longWritable != null) {
while (true) {
long longValue = longWritable.get();
if (longValue >= MIN_FOUR_DIGIT_YEAR_MILLIS && longValue <= MAX_FOUR_DIGIT_YEAR_MILLIS) {
break;
}
longWritable.set((Long) VectorRandomRowSource.randomPrimitiveObject(r, TypeInfoFactory.longTypeInfo));
}
}
object = longWritable;
}
break;
case VALUE_LIST:
{
List<Object> valueList = generationSpec.getValueList();
final int valueCount = valueList.size();
TypeInfo typeInfo = generationSpec.getTypeInfo();
Category category = typeInfo.getCategory();
switch(category) {
case PRIMITIVE:
case STRUCT:
object = valueList.get(r.nextInt(valueCount));
break;
case LIST:
{
final int elementCount = r.nextInt(valueCount);
ListTypeInfo listTypeInfo = (ListTypeInfo) typeInfo;
TypeInfo elementTypeInfo = listTypeInfo.getListElementTypeInfo();
final ObjectInspector elementObjectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(elementTypeInfo);
List<Object> list = new ArrayList<Object>(elementCount);
for (int i = 0; i < elementCount; i++) {
Object elementWritable = randomWritable(elementTypeInfo, elementObjectInspector, allowNull);
list.add(elementWritable);
}
object = list;
}
break;
case MAP:
{
final int elementCount = r.nextInt(valueCount);
MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo;
TypeInfo valueTypeInfo = mapTypeInfo.getMapValueTypeInfo();
final ObjectInspector valueObjectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(valueTypeInfo);
Map<Object, Object> map = new HashMap<Object, Object>(elementCount);
for (int i = 0; i < elementCount; i++) {
Object key = valueList.get(r.nextInt(valueCount));
Object valueWritable = randomWritable(valueTypeInfo, valueObjectInspector, allowNull);
if (!map.containsKey(key)) {
map.put(key, valueWritable);
}
}
object = map;
}
break;
default:
throw new RuntimeException("Unexpected category " + category);
}
}
break;
default:
throw new RuntimeException("Unexpected generationKind " + generationKind);
}
row[c] = object;
}
}
return row;
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo in project hive by apache.
the class AggregationBase method extractResultObjects.
private static void extractResultObjects(VectorizedRowBatch outputBatch, short[] keys, VectorExtractRow resultVectorExtractRow, TypeInfo outputTypeInfo, Object[] scrqtchRow, Object[] results) {
final boolean isPrimitive = (outputTypeInfo instanceof PrimitiveTypeInfo);
ObjectInspector objectInspector;
if (isPrimitive) {
objectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(outputTypeInfo);
} else {
objectInspector = null;
}
for (int batchIndex = 0; batchIndex < outputBatch.size; batchIndex++) {
resultVectorExtractRow.extractRow(outputBatch, batchIndex, scrqtchRow);
if (isPrimitive) {
Object copyResult = ObjectInspectorUtils.copyToStandardObject(scrqtchRow[0], objectInspector, ObjectInspectorCopyOption.WRITABLE);
results[keys[batchIndex]] = copyResult;
} else {
results[keys[batchIndex]] = scrqtchRow[0];
}
}
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo in project hive by apache.
the class AggregationBase method verifyAggregationResults.
protected void verifyAggregationResults(TypeInfo typeInfo, TypeInfo outputTypeInfo, int maxKeyCount, GenericUDAFEvaluator.Mode udafEvaluatorMode, Object[] resultsArray) {
// Row-mode is the expected results.
Object[] expectedResults = (Object[]) resultsArray[0];
ObjectInspector objectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(outputTypeInfo);
for (int v = 1; v < AggregationTestMode.count; v++) {
Object[] vectorResults = (Object[]) resultsArray[v];
for (short key = 0; key < maxKeyCount + 1; key++) {
Object expectedResult = expectedResults[key];
Object vectorResult = vectorResults[key];
if (expectedResult == null || vectorResult == null) {
if (expectedResult != null || vectorResult != null) {
Assert.fail("Key " + key + " typeName " + typeInfo.getTypeName() + " outputTypeName " + outputTypeInfo.getTypeName() + " " + AggregationTestMode.values()[v] + " result is NULL " + (vectorResult == null ? "YES" : "NO result " + vectorResult.toString()) + " does not match row-mode expected result is NULL " + (expectedResult == null ? "YES" : "NO result " + expectedResult.toString()) + " udafEvaluatorMode " + udafEvaluatorMode);
}
} else {
if (!compareObjects(expectedResult, vectorResult, outputTypeInfo, objectInspector)) {
Assert.fail("Key " + key + " typeName " + typeInfo.getTypeName() + " outputTypeName " + outputTypeInfo.getTypeName() + " " + AggregationTestMode.values()[v] + " result " + vectorResult.toString() + " (" + vectorResult.getClass().getSimpleName() + ")" + " does not match row-mode expected result " + expectedResult.toString() + " (" + expectedResult.getClass().getSimpleName() + ")" + " udafEvaluatorMode " + udafEvaluatorMode);
}
}
}
}
}
Aggregations