use of org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector in project hive by apache.
the class GenericUDFTrunc method initializeDate.
private ObjectInspector initializeDate(ObjectInspector[] arguments) throws UDFArgumentLengthException, UDFArgumentTypeException {
if (arguments.length != 2) {
throw new UDFArgumentLengthException("trunc() requires 2 argument, got " + arguments.length);
}
if (arguments[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but " + arguments[0].getTypeName() + " is passed. as first arguments");
}
if (arguments[1].getCategory() != ObjectInspector.Category.PRIMITIVE) {
throw new UDFArgumentTypeException(1, "Only primitive type arguments are accepted but " + arguments[1].getTypeName() + " is passed. as second arguments");
}
ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
inputType1 = ((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory();
switch(inputType1) {
case STRING:
case VARCHAR:
case CHAR:
case VOID:
inputType1 = PrimitiveCategory.STRING;
textConverter1 = ObjectInspectorConverters.getConverter(arguments[0], PrimitiveObjectInspectorFactory.writableStringObjectInspector);
break;
case TIMESTAMP:
timestampConverter = new TimestampConverter((PrimitiveObjectInspector) arguments[0], PrimitiveObjectInspectorFactory.writableTimestampObjectInspector);
break;
case DATE:
dateWritableConverter = ObjectInspectorConverters.getConverter(arguments[0], PrimitiveObjectInspectorFactory.writableDateObjectInspector);
break;
default:
throw new UDFArgumentTypeException(0, "TRUNC() only takes STRING/TIMESTAMP/DATEWRITABLE types as first argument, got " + inputType1);
}
inputType2 = ((PrimitiveObjectInspector) arguments[1]).getPrimitiveCategory();
if (PrimitiveObjectInspectorUtils.getPrimitiveGrouping(inputType2) != PrimitiveGrouping.STRING_GROUP && PrimitiveObjectInspectorUtils.getPrimitiveGrouping(inputType2) != PrimitiveGrouping.VOID_GROUP) {
throw new UDFArgumentTypeException(1, "trunc() only takes STRING/CHAR/VARCHAR types as second argument, got " + inputType2);
}
inputType2 = PrimitiveCategory.STRING;
if (arguments[1] instanceof ConstantObjectInspector) {
Object obj = ((ConstantObjectInspector) arguments[1]).getWritableConstantValue();
fmtInput = obj != null ? obj.toString() : null;
} else {
textConverter2 = ObjectInspectorConverters.getConverter(arguments[1], PrimitiveObjectInspectorFactory.writableStringObjectInspector);
}
return outputOI;
}
use of org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector in project hive by apache.
the class GenericUDFLikeAll method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length < 2) {
throw new UDFArgumentLengthException("The like all operator requires at least one pattern for matching, got " + (arguments.length - 1));
}
inputTypes = new PrimitiveCategory[arguments.length];
converters = new Converter[arguments.length];
/**
*expects string or null arguments
*/
for (int idx = 0; idx < arguments.length; idx++) {
checkArgPrimitive(arguments, idx);
checkArgGroups(arguments, idx, inputTypes, PrimitiveGrouping.STRING_GROUP, PrimitiveGrouping.VOID_GROUP);
PrimitiveCategory inputType = ((PrimitiveObjectInspector) arguments[idx]).getPrimitiveCategory();
if (arguments[idx] instanceof ConstantObjectInspector && idx != 0) {
Object constValue = ((ConstantObjectInspector) arguments[idx]).getWritableConstantValue();
if (!isConstantNullPatternContain && constValue == null) {
isConstantNullPatternContain = true;
}
} else if (idx != 0 && isAllPatternsConstant) {
isAllPatternsConstant = false;
}
converters[idx] = ObjectInspectorConverters.getConverter(arguments[idx], getOutputOI(inputType));
inputTypes[idx] = inputType;
}
return PrimitiveObjectInspectorFactory.writableBooleanObjectInspector;
}
use of org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector in project hive by apache.
the class VectorizationContext method getStructInExpression.
private VectorExpression getStructInExpression(List<ExprNodeDesc> childExpr, ExprNodeDesc colExpr, TypeInfo colTypeInfo, List<ExprNodeDesc> inChildren, VectorExpressionDescriptor.Mode mode, TypeInfo returnType) throws HiveException {
VectorExpression expr = null;
StructTypeInfo structTypeInfo = (StructTypeInfo) colTypeInfo;
ArrayList<TypeInfo> fieldTypeInfos = structTypeInfo.getAllStructFieldTypeInfos();
final int fieldCount = fieldTypeInfos.size();
ColumnVector.Type[] fieldVectorColumnTypes = new ColumnVector.Type[fieldCount];
InConstantType[] fieldInConstantTypes = new InConstantType[fieldCount];
for (int f = 0; f < fieldCount; f++) {
TypeInfo fieldTypeInfo = fieldTypeInfos.get(f);
// Only primitive fields supports for now.
if (fieldTypeInfo.getCategory() != Category.PRIMITIVE) {
return null;
}
// We are going to serialize using the 4 basic types.
ColumnVector.Type fieldVectorColumnType = getColumnVectorTypeFromTypeInfo(fieldTypeInfo);
fieldVectorColumnTypes[f] = fieldVectorColumnType;
// We currently evaluate the IN (..) constants in special ways.
PrimitiveCategory fieldPrimitiveCategory = ((PrimitiveTypeInfo) fieldTypeInfo).getPrimitiveCategory();
InConstantType inConstantType = getInConstantTypeFromPrimitiveCategory(fieldPrimitiveCategory);
fieldInConstantTypes[f] = inConstantType;
}
Output buffer = new Output();
BinarySortableSerializeWrite binarySortableSerializeWrite = new BinarySortableSerializeWrite(fieldCount);
final int inChildrenCount = inChildren.size();
byte[][] serializedInChildren = new byte[inChildrenCount][];
try {
for (int i = 0; i < inChildrenCount; i++) {
final ExprNodeDesc node = inChildren.get(i);
final Object[] constants;
if (node instanceof ExprNodeConstantDesc) {
ExprNodeConstantDesc constNode = (ExprNodeConstantDesc) node;
ConstantObjectInspector output = constNode.getWritableObjectInspector();
constants = ((List<?>) output.getWritableConstantValue()).toArray();
} else {
ExprNodeGenericFuncDesc exprNode = (ExprNodeGenericFuncDesc) node;
ExprNodeEvaluator<?> evaluator = ExprNodeEvaluatorFactory.get(exprNode);
ObjectInspector output = evaluator.initialize(exprNode.getWritableObjectInspector());
constants = (Object[]) evaluator.evaluate(null);
}
binarySortableSerializeWrite.set(buffer);
for (int f = 0; f < fieldCount; f++) {
Object constant = constants[f];
if (constant == null) {
binarySortableSerializeWrite.writeNull();
} else {
InConstantType inConstantType = fieldInConstantTypes[f];
switch(inConstantType) {
case STRING_FAMILY:
{
byte[] bytes;
if (constant instanceof Text) {
Text text = (Text) constant;
bytes = text.getBytes();
binarySortableSerializeWrite.writeString(bytes, 0, text.getLength());
} else {
throw new HiveException("Unexpected constant String type " + constant.getClass().getSimpleName());
}
}
break;
case INT_FAMILY:
{
long value;
if (constant instanceof IntWritable) {
value = ((IntWritable) constant).get();
} else if (constant instanceof LongWritable) {
value = ((LongWritable) constant).get();
} else {
throw new HiveException("Unexpected constant Long type " + constant.getClass().getSimpleName());
}
binarySortableSerializeWrite.writeLong(value);
}
break;
case FLOAT_FAMILY:
{
double value;
if (constant instanceof DoubleWritable) {
value = ((DoubleWritable) constant).get();
} else {
throw new HiveException("Unexpected constant Double type " + constant.getClass().getSimpleName());
}
binarySortableSerializeWrite.writeDouble(value);
}
break;
// UNDONE...
case DATE:
case TIMESTAMP:
case DECIMAL:
default:
throw new RuntimeException("Unexpected IN constant type " + inConstantType.name());
}
}
}
serializedInChildren[i] = Arrays.copyOfRange(buffer.getData(), 0, buffer.getLength());
}
} catch (Exception e) {
throw new HiveException(e);
}
// Create a single child representing the scratch column where we will
// generate the serialized keys of the batch.
int scratchBytesCol = ocm.allocateOutputColumn(TypeInfoFactory.stringTypeInfo);
Class<?> cl = (mode == VectorExpressionDescriptor.Mode.FILTER ? FilterStructColumnInList.class : StructColumnInList.class);
expr = createVectorExpression(cl, null, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
((IStringInExpr) expr).setInListValues(serializedInChildren);
((IStructInExpr) expr).setScratchBytesColumn(scratchBytesCol);
((IStructInExpr) expr).setStructColumnExprs(this, colExpr.getChildren(), fieldVectorColumnTypes);
return expr;
}
use of org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector in project hive by apache.
the class GenericUDFTrunc method initializeNumber.
private ObjectInspector initializeNumber(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length < 1 || arguments.length > 2) {
throw new UDFArgumentLengthException("TRUNC requires one or two argument, got " + arguments.length);
}
if (arguments[0].getCategory() != Category.PRIMITIVE) {
throw new UDFArgumentTypeException(0, "TRUNC input only takes primitive types, got " + arguments[0].getTypeName());
}
inputOI = (PrimitiveObjectInspector) arguments[0];
if (arguments.length == 2) {
if (arguments[1].getCategory() != Category.PRIMITIVE) {
throw new UDFArgumentTypeException(1, "TRUNC second argument only takes primitive types, got " + arguments[1].getTypeName());
}
inputScaleOI = (PrimitiveObjectInspector) arguments[1];
inputSacleConst = arguments[1] instanceof ConstantObjectInspector;
if (inputSacleConst) {
try {
Object obj = ((ConstantObjectInspector) arguments[1]).getWritableConstantValue();
fmtInput = obj != null ? obj.toString() : null;
scale = Integer.parseInt(fmtInput);
} catch (Exception e) {
throw new UDFArgumentException("TRUNC input only takes integer values, got " + fmtInput);
}
} else {
switch(inputScaleOI.getPrimitiveCategory()) {
case BYTE:
byteConverter = ObjectInspectorConverters.getConverter(arguments[1], PrimitiveObjectInspectorFactory.writableByteObjectInspector);
break;
case SHORT:
shortConverter = ObjectInspectorConverters.getConverter(arguments[1], PrimitiveObjectInspectorFactory.writableShortObjectInspector);
break;
case INT:
intConverter = ObjectInspectorConverters.getConverter(arguments[1], PrimitiveObjectInspectorFactory.writableIntObjectInspector);
break;
case LONG:
longConverter = ObjectInspectorConverters.getConverter(arguments[1], PrimitiveObjectInspectorFactory.writableLongObjectInspector);
break;
default:
throw new UDFArgumentTypeException(1, getFuncName().toUpperCase() + " second argument only takes integer values");
}
}
}
inputType1 = inputOI.getPrimitiveCategory();
ObjectInspector outputOI = null;
switch(inputType1) {
case DECIMAL:
outputOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputType1);
break;
case VOID:
case BYTE:
case SHORT:
case INT:
case LONG:
case FLOAT:
case DOUBLE:
outputOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputType1);
break;
default:
throw new UDFArgumentTypeException(0, "Only numeric or string group data types are allowed for TRUNC function. Got " + inputType1.name());
}
return outputOI;
}
use of org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector in project hive by apache.
the class GenericUDFStruct method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
int numFields = arguments.length;
ret = new Object[numFields];
ArrayList<String> fname = new ArrayList<String>(numFields);
for (int f = 1; f <= numFields; f++) {
fname.add("col" + f);
}
boolean constantStruct = true;
for (int i = 0; i < arguments.length; i++) {
ObjectInspector oi = arguments[i];
constantStruct &= (oi.getCategory() == Category.PRIMITIVE) && (oi instanceof ConstantObjectInspector);
if (constantStruct) {
// nested complex types trigger Kryo issue #216 in plan deserialization
ret[i] = ((ConstantObjectInspector) oi).getWritableConstantValue();
}
}
if (constantStruct) {
return ObjectInspectorFactory.getStandardConstantStructObjectInspector(fname, Arrays.asList(arguments), Arrays.asList(ret));
} else {
return ObjectInspectorFactory.getStandardStructObjectInspector(fname, Arrays.asList(arguments));
}
}
Aggregations