use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category in project hive by apache.
the class TypedBytesSerDe method initialize.
@Override
public void initialize(Configuration conf, Properties tbl) throws SerDeException {
// We can get the table definition from tbl.
serializeBytesWritable = new BytesWritable();
barrStr = new NonSyncDataOutputBuffer();
tbOut = new TypedBytesWritableOutput(barrStr);
inBarrStr = new NonSyncDataInputBuffer();
tbIn = new TypedBytesWritableInput(inBarrStr);
// Read the configuration parameters
String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS);
String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES);
final String columnNameDelimiter = tbl.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? tbl.getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : String.valueOf(SerDeUtils.COMMA);
columnNames = Arrays.asList(columnNameProperty.split(columnNameDelimiter));
columnTypes = null;
if (columnTypeProperty.length() == 0) {
columnTypes = new ArrayList<TypeInfo>();
} else {
columnTypes = TypeInfoUtils.getTypeInfosFromTypeString(columnTypeProperty);
}
assert columnNames.size() == columnTypes.size();
numColumns = columnNames.size();
// All columns have to be primitive.
for (int c = 0; c < numColumns; c++) {
if (columnTypes.get(c).getCategory() != Category.PRIMITIVE) {
throw new SerDeException(getClass().getName() + " only accepts primitive columns, but column[" + c + "] named " + columnNames.get(c) + " has category " + columnTypes.get(c).getCategory());
}
}
// Constructing the row ObjectInspector:
// The row consists of some string columns, each column will be a java
// String object.
List<ObjectInspector> columnOIs = new ArrayList<ObjectInspector>(columnNames.size());
for (int c = 0; c < numColumns; c++) {
columnOIs.add(TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(columnTypes.get(c)));
}
// StandardStruct uses ArrayList to store the row.
rowOI = ObjectInspectorFactory.getStandardStructObjectInspector(columnNames, columnOIs);
// Constructing the row object, etc, which will be reused for all rows.
row = new ArrayList<Object>(numColumns);
for (int c = 0; c < numColumns; c++) {
row.add(null);
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category in project hive by apache.
the class TypedBytesSerDe method deserializeField.
static Object deserializeField(TypedBytesWritableInput in, TypeInfo type, Object reuse) throws IOException {
// read the type
Class<? extends Writable> writableType = in.readType();
if (writableType != null && writableType.isAssignableFrom(NullWritable.class)) {
// indicates that the recorded value is null
return null;
}
switch(type.getCategory()) {
case PRIMITIVE:
{
PrimitiveTypeInfo ptype = (PrimitiveTypeInfo) type;
switch(ptype.getPrimitiveCategory()) {
case VOID:
{
return null;
}
case BOOLEAN:
{
BooleanWritable r = reuse == null ? new BooleanWritable() : (BooleanWritable) reuse;
r = in.readBoolean(r);
return r;
}
case BYTE:
{
ByteWritable r = reuse == null ? new ByteWritable() : (ByteWritable) reuse;
r = in.readByte(r);
return r;
}
case SHORT:
{
ShortWritable r = reuse == null ? new ShortWritable() : (ShortWritable) reuse;
r = in.readShort(r);
return r;
}
case INT:
{
IntWritable r = reuse == null ? new IntWritable() : (IntWritable) reuse;
r = in.readInt(r);
return r;
}
case LONG:
{
LongWritable r = reuse == null ? new LongWritable() : (LongWritable) reuse;
r = in.readLong(r);
return r;
}
case FLOAT:
{
FloatWritable r = reuse == null ? new FloatWritable() : (FloatWritable) reuse;
r = in.readFloat(r);
return r;
}
case DOUBLE:
{
DoubleWritable r = reuse == null ? new DoubleWritable() : (DoubleWritable) reuse;
r = in.readDouble(r);
return r;
}
case STRING:
{
Text r = reuse == null ? new Text() : (Text) reuse;
r = in.readText(r);
return r;
}
default:
{
throw new RuntimeException("Unrecognized type: " + ptype.getPrimitiveCategory());
}
}
}
// Currently, deserialization of complex types is not supported
case LIST:
case MAP:
case STRUCT:
default:
{
throw new RuntimeException("Unsupported category: " + type.getCategory());
}
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category in project hive by apache.
the class GenericUDFBaseCompare method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 2) {
throw new UDFArgumentException(opName + " requires two arguments.");
}
argumentOIs = arguments;
for (int i = 0; i < arguments.length; i++) {
Category category = arguments[i].getCategory();
if (category != Category.PRIMITIVE) {
throw new UDFArgumentTypeException(i, "The " + GenericUDFUtils.getOrdinal(i + 1) + " argument of " + opName + " is expected to a " + Category.PRIMITIVE.toString().toLowerCase() + " type, but " + category.toString().toLowerCase() + " is found");
}
}
if (TypeInfoUtils.getTypeInfoFromObjectInspector(arguments[0]).equals(TypeInfoFactory.stringTypeInfo) && TypeInfoUtils.getTypeInfoFromObjectInspector(arguments[1]).equals(TypeInfoFactory.stringTypeInfo)) {
soi0 = (StringObjectInspector) arguments[0];
soi1 = (StringObjectInspector) arguments[1];
if (soi0.preferWritable() || soi1.preferWritable()) {
compareType = CompareType.COMPARE_TEXT;
} else {
compareType = CompareType.COMPARE_STRING;
}
} else if (TypeInfoUtils.getTypeInfoFromObjectInspector(arguments[0]).equals(TypeInfoFactory.intTypeInfo) && TypeInfoUtils.getTypeInfoFromObjectInspector(arguments[1]).equals(TypeInfoFactory.intTypeInfo)) {
compareType = CompareType.COMPARE_INT;
ioi0 = (IntObjectInspector) arguments[0];
ioi1 = (IntObjectInspector) arguments[1];
} else if (TypeInfoUtils.getTypeInfoFromObjectInspector(arguments[0]).equals(TypeInfoFactory.longTypeInfo) && TypeInfoUtils.getTypeInfoFromObjectInspector(arguments[1]).equals(TypeInfoFactory.longTypeInfo)) {
compareType = CompareType.COMPARE_LONG;
loi0 = (LongObjectInspector) arguments[0];
loi1 = (LongObjectInspector) arguments[1];
} else if (TypeInfoUtils.getTypeInfoFromObjectInspector(arguments[0]).equals(TypeInfoFactory.byteTypeInfo) && TypeInfoUtils.getTypeInfoFromObjectInspector(arguments[1]).equals(TypeInfoFactory.byteTypeInfo)) {
compareType = CompareType.COMPARE_BYTE;
byoi0 = (ByteObjectInspector) arguments[0];
byoi1 = (ByteObjectInspector) arguments[1];
} else if (TypeInfoUtils.getTypeInfoFromObjectInspector(arguments[0]).equals(TypeInfoFactory.booleanTypeInfo) && TypeInfoUtils.getTypeInfoFromObjectInspector(arguments[1]).equals(TypeInfoFactory.booleanTypeInfo)) {
compareType = CompareType.COMPARE_BOOL;
boi0 = (BooleanObjectInspector) arguments[0];
boi1 = (BooleanObjectInspector) arguments[1];
} else {
TypeInfo oiTypeInfo0 = TypeInfoUtils.getTypeInfoFromObjectInspector(arguments[0]);
TypeInfo oiTypeInfo1 = TypeInfoUtils.getTypeInfoFromObjectInspector(arguments[1]);
if (oiTypeInfo0 == oiTypeInfo1 || TypeInfoUtils.doPrimitiveCategoriesMatch(oiTypeInfo0, oiTypeInfo1)) {
compareType = CompareType.SAME_TYPE;
} else {
compareType = CompareType.NEED_CONVERT;
TypeInfo compareType = FunctionRegistry.getCommonClassForComparison(oiTypeInfo0, oiTypeInfo1);
// For now, we always convert to double if we can't find a common type
compareOI = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo((compareType == null) ? TypeInfoFactory.doubleTypeInfo : compareType);
converter0 = ObjectInspectorConverters.getConverter(arguments[0], compareOI);
converter1 = ObjectInspectorConverters.getConverter(arguments[1], compareOI);
}
}
return PrimitiveObjectInspectorFactory.writableBooleanObjectInspector;
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category in project hive by apache.
the class GenericUDFBaseUnary method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 1) {
throw new UDFArgumentException(opName + " requires one argument.");
}
Category category = arguments[0].getCategory();
if (category != Category.PRIMITIVE) {
throw new UDFArgumentTypeException(0, "The " + GenericUDFUtils.getOrdinal(1) + " argument of " + opName + " is expected to a " + Category.PRIMITIVE.toString().toLowerCase() + " type, but " + category.toString().toLowerCase() + " is found");
}
inputOI = (PrimitiveObjectInspector) arguments[0];
if (!FunctionRegistry.isNumericType(inputOI.getTypeInfo()) && (inputOI.getTypeInfo() != TypeInfoFactory.intervalDayTimeTypeInfo) && (inputOI.getTypeInfo() != TypeInfoFactory.intervalYearMonthTypeInfo)) {
throw new UDFArgumentTypeException(0, "The " + GenericUDFUtils.getOrdinal(1) + " argument of " + opName + " is expected to be a " + "numeric or interval type, but " + inputOI.getTypeName() + " is found");
}
PrimitiveTypeInfo resultTypeInfo = deriveResultTypeInfo(inputOI.getTypeInfo());
resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(resultTypeInfo);
converter = ObjectInspectorConverters.getConverter(inputOI, resultOI);
return resultOI;
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category in project hive by apache.
the class GenericUDFNullif method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
argumentOIs = arguments;
checkArgsSize(arguments, 2, 2);
returnOIResolver = new GenericUDFUtils.ReturnObjectInspectorResolver(true);
returnOIResolver.update(arguments[0]);
boolean isPrimitive = (arguments[0] instanceof PrimitiveObjectInspector);
if (isPrimitive) {
PrimitiveObjectInspector primitive0 = (PrimitiveObjectInspector) arguments[0];
PrimitiveObjectInspector primitive1 = (PrimitiveObjectInspector) arguments[1];
PrimitiveGrouping pcat0 = PrimitiveObjectInspectorUtils.getPrimitiveGrouping(primitive0.getPrimitiveCategory());
PrimitiveGrouping pcat1 = PrimitiveObjectInspectorUtils.getPrimitiveGrouping(primitive1.getPrimitiveCategory());
if (pcat0 == PrimitiveGrouping.VOID_GROUP) {
throw new UDFArgumentTypeException(0, "NULLIF may not accept types belonging to " + pcat0 + " as first argument");
}
if (pcat1 != PrimitiveGrouping.VOID_GROUP && pcat0 != pcat1) {
throw new UDFArgumentTypeException(1, "The expressions after NULLIF should belong to the same category: \"" + pcat0 + "\" is expected but \"" + pcat1 + "\" is found");
}
} else {
String typeName0 = arguments[0].getTypeName();
String typeName1 = arguments[1].getTypeName();
if (!typeName0.equals(typeName1)) {
throw new UDFArgumentTypeException(1, "The expressions after NULLIF should all have the same type: \"" + typeName0 + "\" is expected but \"" + typeName1 + "\" is found");
}
}
return returnOIResolver.get();
}
Aggregations