use of org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableIntObjectInspector in project hive by apache.
the class VectorUDFAdaptor method setOutputCol.
private void setOutputCol(ColumnVector colVec, int i, Object value) {
/* Depending on the output type, get the value, cast the result to the
* correct type if needed, and assign the result into the output vector.
*/
if (outputOI instanceof WritableStringObjectInspector) {
BytesColumnVector bv = (BytesColumnVector) colVec;
Text t;
if (value instanceof String) {
t = new Text((String) value);
} else {
t = ((WritableStringObjectInspector) outputOI).getPrimitiveWritableObject(value);
}
bv.setVal(i, t.getBytes(), 0, t.getLength());
} else if (outputOI instanceof WritableHiveCharObjectInspector) {
WritableHiveCharObjectInspector writableHiveCharObjectOI = (WritableHiveCharObjectInspector) outputOI;
int maxLength = ((CharTypeInfo) writableHiveCharObjectOI.getTypeInfo()).getLength();
BytesColumnVector bv = (BytesColumnVector) colVec;
HiveCharWritable hiveCharWritable;
if (value instanceof HiveCharWritable) {
hiveCharWritable = ((HiveCharWritable) value);
} else {
hiveCharWritable = writableHiveCharObjectOI.getPrimitiveWritableObject(value);
}
Text t = hiveCharWritable.getTextValue();
// In vector mode, we stored CHAR as unpadded.
StringExpr.rightTrimAndTruncate(bv, i, t.getBytes(), 0, t.getLength(), maxLength);
} else if (outputOI instanceof WritableHiveVarcharObjectInspector) {
WritableHiveVarcharObjectInspector writableHiveVarcharObjectOI = (WritableHiveVarcharObjectInspector) outputOI;
int maxLength = ((VarcharTypeInfo) writableHiveVarcharObjectOI.getTypeInfo()).getLength();
BytesColumnVector bv = (BytesColumnVector) colVec;
HiveVarcharWritable hiveVarcharWritable;
if (value instanceof HiveVarcharWritable) {
hiveVarcharWritable = ((HiveVarcharWritable) value);
} else {
hiveVarcharWritable = writableHiveVarcharObjectOI.getPrimitiveWritableObject(value);
}
Text t = hiveVarcharWritable.getTextValue();
StringExpr.truncate(bv, i, t.getBytes(), 0, t.getLength(), maxLength);
} else if (outputOI instanceof WritableIntObjectInspector) {
LongColumnVector lv = (LongColumnVector) colVec;
if (value instanceof Integer) {
lv.vector[i] = (Integer) value;
} else {
lv.vector[i] = ((WritableIntObjectInspector) outputOI).get(value);
}
} else if (outputOI instanceof WritableLongObjectInspector) {
LongColumnVector lv = (LongColumnVector) colVec;
if (value instanceof Long) {
lv.vector[i] = (Long) value;
} else {
lv.vector[i] = ((WritableLongObjectInspector) outputOI).get(value);
}
} else if (outputOI instanceof WritableDoubleObjectInspector) {
DoubleColumnVector dv = (DoubleColumnVector) colVec;
if (value instanceof Double) {
dv.vector[i] = (Double) value;
} else {
dv.vector[i] = ((WritableDoubleObjectInspector) outputOI).get(value);
}
} else if (outputOI instanceof WritableFloatObjectInspector) {
DoubleColumnVector dv = (DoubleColumnVector) colVec;
if (value instanceof Float) {
dv.vector[i] = (Float) value;
} else {
dv.vector[i] = ((WritableFloatObjectInspector) outputOI).get(value);
}
} else if (outputOI instanceof WritableShortObjectInspector) {
LongColumnVector lv = (LongColumnVector) colVec;
if (value instanceof Short) {
lv.vector[i] = (Short) value;
} else {
lv.vector[i] = ((WritableShortObjectInspector) outputOI).get(value);
}
} else if (outputOI instanceof WritableByteObjectInspector) {
LongColumnVector lv = (LongColumnVector) colVec;
if (value instanceof Byte) {
lv.vector[i] = (Byte) value;
} else {
lv.vector[i] = ((WritableByteObjectInspector) outputOI).get(value);
}
} else if (outputOI instanceof WritableTimestampObjectInspector) {
TimestampColumnVector tv = (TimestampColumnVector) colVec;
Timestamp ts;
if (value instanceof Timestamp) {
ts = (Timestamp) value;
} else {
ts = ((WritableTimestampObjectInspector) outputOI).getPrimitiveJavaObject(value);
}
tv.set(i, ts);
} else if (outputOI instanceof WritableDateObjectInspector) {
LongColumnVector lv = (LongColumnVector) colVec;
Date ts;
if (value instanceof Date) {
ts = (Date) value;
} else {
ts = ((WritableDateObjectInspector) outputOI).getPrimitiveJavaObject(value);
}
long l = DateWritable.dateToDays(ts);
lv.vector[i] = l;
} else if (outputOI instanceof WritableBooleanObjectInspector) {
LongColumnVector lv = (LongColumnVector) colVec;
if (value instanceof Boolean) {
lv.vector[i] = (Boolean) value ? 1 : 0;
} else {
lv.vector[i] = ((WritableBooleanObjectInspector) outputOI).get(value) ? 1 : 0;
}
} else if (outputOI instanceof WritableHiveDecimalObjectInspector) {
DecimalColumnVector dcv = (DecimalColumnVector) colVec;
if (value instanceof HiveDecimal) {
dcv.set(i, (HiveDecimal) value);
} else {
HiveDecimal hd = ((WritableHiveDecimalObjectInspector) outputOI).getPrimitiveJavaObject(value);
dcv.set(i, hd);
}
} else if (outputOI instanceof WritableBinaryObjectInspector) {
BytesWritable bw = (BytesWritable) value;
BytesColumnVector bv = (BytesColumnVector) colVec;
bv.setVal(i, bw.getBytes(), 0, bw.getLength());
} else {
throw new RuntimeException("Unhandled object type " + outputOI.getTypeName() + " inspector class " + outputOI.getClass().getName() + " value class " + value.getClass().getName());
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableIntObjectInspector in project hive by apache.
the class VectorRandomRowSource method getWritableObject.
public static Object getWritableObject(int column, Object object, List<ObjectInspector> primitiveObjectInspectorList, PrimitiveCategory[] primitiveCategories, PrimitiveTypeInfo[] primitiveTypeInfos) {
ObjectInspector objectInspector = primitiveObjectInspectorList.get(column);
PrimitiveCategory primitiveCategory = primitiveCategories[column];
PrimitiveTypeInfo primitiveTypeInfo = primitiveTypeInfos[column];
switch(primitiveCategory) {
case BOOLEAN:
return ((WritableBooleanObjectInspector) objectInspector).create((boolean) object);
case BYTE:
return ((WritableByteObjectInspector) objectInspector).create((byte) object);
case SHORT:
return ((WritableShortObjectInspector) objectInspector).create((short) object);
case INT:
return ((WritableIntObjectInspector) objectInspector).create((int) object);
case LONG:
return ((WritableLongObjectInspector) objectInspector).create((long) object);
case DATE:
return ((WritableDateObjectInspector) objectInspector).create((Date) object);
case FLOAT:
return ((WritableFloatObjectInspector) objectInspector).create((float) object);
case DOUBLE:
return ((WritableDoubleObjectInspector) objectInspector).create((double) object);
case STRING:
return ((WritableStringObjectInspector) objectInspector).create((String) object);
case CHAR:
{
WritableHiveCharObjectInspector writableCharObjectInspector = new WritableHiveCharObjectInspector((CharTypeInfo) primitiveTypeInfo);
return writableCharObjectInspector.create((HiveChar) object);
}
case VARCHAR:
{
WritableHiveVarcharObjectInspector writableVarcharObjectInspector = new WritableHiveVarcharObjectInspector((VarcharTypeInfo) primitiveTypeInfo);
return writableVarcharObjectInspector.create((HiveVarchar) object);
}
case BINARY:
return PrimitiveObjectInspectorFactory.writableBinaryObjectInspector.create((byte[]) object);
case TIMESTAMP:
return ((WritableTimestampObjectInspector) objectInspector).create((Timestamp) object);
case INTERVAL_YEAR_MONTH:
return ((WritableHiveIntervalYearMonthObjectInspector) objectInspector).create((HiveIntervalYearMonth) object);
case INTERVAL_DAY_TIME:
return ((WritableHiveIntervalDayTimeObjectInspector) objectInspector).create((HiveIntervalDayTime) object);
case DECIMAL:
{
WritableHiveDecimalObjectInspector writableDecimalObjectInspector = new WritableHiveDecimalObjectInspector((DecimalTypeInfo) primitiveTypeInfo);
HiveDecimalWritable result = (HiveDecimalWritable) writableDecimalObjectInspector.create((HiveDecimal) object);
return result;
}
default:
throw new Error("Unknown primitive category " + primitiveCategory);
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableIntObjectInspector in project hive by apache.
the class TestStreaming method deserializeDeltaFileRow.
// Assumes stored data schema = [acid fields],string,int,string
// return array of 6 fields, where the last field has the actual data
private static Object[] deserializeDeltaFileRow(Object row, StructObjectInspector inspector) {
List<? extends StructField> fields = inspector.getAllStructFieldRefs();
WritableIntObjectInspector f0ins = (WritableIntObjectInspector) fields.get(0).getFieldObjectInspector();
WritableLongObjectInspector f1ins = (WritableLongObjectInspector) fields.get(1).getFieldObjectInspector();
WritableIntObjectInspector f2ins = (WritableIntObjectInspector) fields.get(2).getFieldObjectInspector();
WritableLongObjectInspector f3ins = (WritableLongObjectInspector) fields.get(3).getFieldObjectInspector();
WritableLongObjectInspector f4ins = (WritableLongObjectInspector) fields.get(4).getFieldObjectInspector();
StructObjectInspector f5ins = (StructObjectInspector) fields.get(5).getFieldObjectInspector();
int f0 = f0ins.get(inspector.getStructFieldData(row, fields.get(0)));
long f1 = f1ins.get(inspector.getStructFieldData(row, fields.get(1)));
int f2 = f2ins.get(inspector.getStructFieldData(row, fields.get(2)));
long f3 = f3ins.get(inspector.getStructFieldData(row, fields.get(3)));
long f4 = f4ins.get(inspector.getStructFieldData(row, fields.get(4)));
SampleRec f5 = deserializeInner(inspector.getStructFieldData(row, fields.get(5)), f5ins);
return new Object[] { f0, f1, f2, f3, f4, f5 };
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableIntObjectInspector in project hive by apache.
the class TestVectorIndex method doIndexOnRandomDataType.
private boolean doIndexOnRandomDataType(Random random, boolean isList, String keyTypeName, String elementRootTypeName, boolean allowNulls, boolean isScalarIndex) throws Exception {
String elementTypeName = VectorRandomRowSource.getDecoratedTypeName(random, elementRootTypeName, SupportedTypes.ALL, /* allowedTypeNameSet */
null, /* depth */
0, /* maxDepth */
3);
TypeInfo elementTypeInfo = TypeInfoUtils.getTypeInfoFromTypeString(elementTypeName);
ObjectInspector elementObjectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(elementTypeInfo);
// ----------------------------------------------------------------------------------------------
final TypeInfo keyTypeInfo;
if (isList) {
keyTypeInfo = TypeInfoFactory.intTypeInfo;
} else {
keyTypeInfo = TypeInfoUtils.getTypeInfoFromTypeString(keyTypeName);
}
final ObjectInspector keyObjectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(keyTypeInfo);
Object exampleObject = (isList ? ((WritableIntObjectInspector) keyObjectInspector).create(0) : VectorRandomRowSource.randomWritable(random, keyTypeInfo, keyObjectInspector, DataTypePhysicalVariation.NONE, /* allowNull */
false));
WritableComparator writableComparator = WritableComparator.get((Class<? extends WritableComparable>) exampleObject.getClass());
final int allKeyCount = 10 + random.nextInt(10);
final int keyCount = 5 + random.nextInt(allKeyCount / 2);
List<Object> allKeyList = new ArrayList<Object>(allKeyCount);
Set<Object> allKeyTreeSet = new TreeSet<Object>(writableComparator);
int fillAllKeyCount = 0;
while (fillAllKeyCount < allKeyCount) {
Object object;
if (isList) {
WritableIntObjectInspector writableOI = (WritableIntObjectInspector) keyObjectInspector;
int index = random.nextInt(keyCount);
object = writableOI.create(index);
while (allKeyTreeSet.contains(object)) {
index = (random.nextBoolean() ? random.nextInt() : (random.nextBoolean() ? -1 : keyCount));
object = writableOI.create(index);
}
} else {
do {
object = VectorRandomRowSource.randomWritable(random, keyTypeInfo, keyObjectInspector, DataTypePhysicalVariation.NONE, /* allowNull */
false);
} while (allKeyTreeSet.contains(object));
}
allKeyList.add(object);
allKeyTreeSet.add(object);
fillAllKeyCount++;
}
List<Object> keyList = new ArrayList<Object>();
Set<Object> keyTreeSet = new TreeSet<Object>(writableComparator);
int fillKeyCount = 0;
while (fillKeyCount < keyCount) {
Object newKey = allKeyList.get(random.nextInt(allKeyCount));
if (keyTreeSet.contains(newKey)) {
continue;
}
keyList.add(newKey);
keyTreeSet.add(newKey);
fillKeyCount++;
}
// ----------------------------------------------------------------------------------------------
final TypeInfo typeInfo;
if (isList) {
ListTypeInfo listTypeInfo = new ListTypeInfo();
listTypeInfo.setListElementTypeInfo(elementTypeInfo);
typeInfo = listTypeInfo;
} else {
MapTypeInfo mapTypeInfo = new MapTypeInfo();
mapTypeInfo.setMapKeyTypeInfo(keyTypeInfo);
mapTypeInfo.setMapValueTypeInfo(elementTypeInfo);
typeInfo = mapTypeInfo;
}
final String typeName = typeInfo.getTypeName();
final ObjectInspector objectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(typeInfo);
// ----------------------------------------------------------------------------------------------
GenerationSpec generationSpec = GenerationSpec.createSameType(typeInfo);
List<GenerationSpec> generationSpecList = new ArrayList<GenerationSpec>();
List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
List<String> columns = new ArrayList<String>();
List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
int columnNum = 1;
ExprNodeDesc keyColExpr;
if (!isScalarIndex) {
generationSpecList.add(GenerationSpec.createValueList(keyTypeInfo, keyList));
explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
String columnName = "col" + columnNum++;
columns.add(columnName);
keyColExpr = new ExprNodeColumnDesc(keyTypeInfo, columnName, "table", false);
} else {
Object scalarWritable = keyList.get(random.nextInt(keyCount));
final Object scalarObject = VectorRandomRowSource.getNonWritableObject(scalarWritable, keyTypeInfo, keyObjectInspector);
keyColExpr = new ExprNodeConstantDesc(keyTypeInfo, scalarObject);
}
/*
System.out.println("*DEBUG* typeName " + typeName);
System.out.println("*DEBUG* keyColExpr " + keyColExpr.toString());
System.out.println("*DEBUG* keyList " + keyList.toString());
System.out.println("*DEBUG* allKeyList " + allKeyList.toString());
*/
generationSpecList.add(GenerationSpec.createValueList(typeInfo, keyList));
explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
String columnName = "col" + columnNum++;
columns.add(columnName);
ExprNodeDesc listOrMapColExpr;
listOrMapColExpr = new ExprNodeColumnDesc(typeInfo, columnName, "table", false);
children.add(listOrMapColExpr);
children.add(keyColExpr);
VectorRandomRowSource rowSource = new VectorRandomRowSource();
rowSource.initGenerationSpecSchema(random, generationSpecList, /* maxComplexDepth */
0, /* allowNull */
allowNulls, /* isUnicodeOk */
true, explicitDataTypePhysicalVariationList);
String[] columnNames = columns.toArray(new String[0]);
Object[][] randomRows = rowSource.randomRows(100000);
VectorRandomBatchSource batchSource = VectorRandomBatchSource.createInterestingBatches(random, rowSource, randomRows, null);
final GenericUDF udf = new GenericUDFIndex();
ObjectInspector[] argumentOIs = new ObjectInspector[2];
argumentOIs[0] = objectInspector;
argumentOIs[1] = keyObjectInspector;
final ObjectInspector outputObjectInspector = udf.initialize(argumentOIs);
TypeInfo outputTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(outputObjectInspector);
ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(elementTypeInfo, udf, children);
System.out.println("here");
final int rowCount = randomRows.length;
Object[][] resultObjectsArray = new Object[IndexTestMode.count][];
for (int i = 0; i < IndexTestMode.count; i++) {
Object[] resultObjects = new Object[rowCount];
resultObjectsArray[i] = resultObjects;
IndexTestMode indexTestMode = IndexTestMode.values()[i];
switch(indexTestMode) {
case ROW_MODE:
if (!doRowCastTest(typeInfo, columns, children, udf, exprDesc, randomRows, rowSource.rowStructObjectInspector(), elementObjectInspector, outputTypeInfo, resultObjects)) {
return false;
}
break;
case ADAPTOR:
case VECTOR_EXPRESSION:
if (!doVectorCastTest(typeInfo, columns, columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), children, udf, exprDesc, indexTestMode, batchSource, exprDesc.getWritableObjectInspector(), outputTypeInfo, resultObjects)) {
return false;
}
break;
default:
throw new RuntimeException("Unexpected IF statement test mode " + indexTestMode);
}
}
for (int i = 0; i < rowCount; i++) {
// Row-mode is the expected value.
Object expectedResult = resultObjectsArray[0][i];
for (int v = 1; v < IndexTestMode.count; v++) {
Object vectorResult = resultObjectsArray[v][i];
IndexTestMode indexTestMode = IndexTestMode.values()[v];
if (expectedResult == null || vectorResult == null) {
if (expectedResult != null || vectorResult != null) {
Assert.fail("Row " + i + " sourceTypeName " + typeName + " " + indexTestMode + " result is NULL " + (vectorResult == null ? "YES" : "NO result " + vectorResult.toString()) + " does not match row-mode expected result is NULL " + (expectedResult == null ? "YES" : "NO result " + expectedResult.toString()) + " row values " + Arrays.toString(randomRows[i]) + " exprDesc " + exprDesc.toString());
}
} else {
if (!expectedResult.equals(vectorResult)) {
Assert.fail("Row " + i + " sourceTypeName " + typeName + " " + indexTestMode + " result " + vectorResult.toString() + " (" + vectorResult.getClass().getSimpleName() + ")" + " does not match row-mode expected result " + expectedResult.toString() + " (" + expectedResult.getClass().getSimpleName() + ")" + " row values " + Arrays.toString(randomRows[i]) + " exprDesc " + exprDesc.toString());
}
}
}
}
return true;
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableIntObjectInspector in project hive by apache.
the class TestStreaming method deserializeDeltaFileRow.
// Assumes stored data schema = [acid fields],string,int,string
// return array of 6 fields, where the last field has the actual data
private static Object[] deserializeDeltaFileRow(Object row, StructObjectInspector inspector) {
List<? extends StructField> fields = inspector.getAllStructFieldRefs();
WritableIntObjectInspector f0ins = (WritableIntObjectInspector) fields.get(0).getFieldObjectInspector();
WritableLongObjectInspector f1ins = (WritableLongObjectInspector) fields.get(1).getFieldObjectInspector();
WritableIntObjectInspector f2ins = (WritableIntObjectInspector) fields.get(2).getFieldObjectInspector();
WritableLongObjectInspector f3ins = (WritableLongObjectInspector) fields.get(3).getFieldObjectInspector();
WritableLongObjectInspector f4ins = (WritableLongObjectInspector) fields.get(4).getFieldObjectInspector();
StructObjectInspector f5ins = (StructObjectInspector) fields.get(5).getFieldObjectInspector();
int f0 = f0ins.get(inspector.getStructFieldData(row, fields.get(0)));
long f1 = f1ins.get(inspector.getStructFieldData(row, fields.get(1)));
int f2 = f2ins.get(inspector.getStructFieldData(row, fields.get(2)));
long f3 = f3ins.get(inspector.getStructFieldData(row, fields.get(3)));
long f4 = f4ins.get(inspector.getStructFieldData(row, fields.get(4)));
SampleRec f5 = deserializeInner(inspector.getStructFieldData(row, fields.get(5)), f5ins);
return new Object[] { f0, f1, f2, f3, f4, f5 };
}
Aggregations