use of org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector in project hive by apache.
the class TypeCheckProcFactory method toPrimitiveConstDesc.
private static ExprNodeConstantDesc toPrimitiveConstDesc(ColumnInfo colInfo, ObjectInspector inspector) {
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) inspector;
Object constant = ((ConstantObjectInspector) inspector).getWritableConstantValue();
ExprNodeConstantDesc constantExpr = new ExprNodeConstantDesc(colInfo.getType(), poi.getPrimitiveJavaObject(constant));
constantExpr.setFoldedFromCol(colInfo.getInternalName());
return constantExpr;
}
use of org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector in project hive by apache.
the class RexNodeConverter method convert.
protected RexNode convert(ExprNodeConstantDesc literal) throws CalciteSemanticException {
RexBuilder rexBuilder = cluster.getRexBuilder();
RelDataTypeFactory dtFactory = rexBuilder.getTypeFactory();
PrimitiveTypeInfo hiveType = (PrimitiveTypeInfo) literal.getTypeInfo();
RelDataType calciteDataType = TypeConverter.convert(hiveType, dtFactory);
PrimitiveCategory hiveTypeCategory = hiveType.getPrimitiveCategory();
ConstantObjectInspector coi = literal.getWritableObjectInspector();
Object value = ObjectInspectorUtils.copyToStandardJavaObject(coi.getWritableConstantValue(), coi);
RexNode calciteLiteral = null;
// If value is null, the type should also be VOID.
if (value == null) {
hiveTypeCategory = PrimitiveCategory.VOID;
}
// TODO: Verify if we need to use ConstantObjectInspector to unwrap data
switch(hiveTypeCategory) {
case BOOLEAN:
calciteLiteral = rexBuilder.makeLiteral(((Boolean) value).booleanValue());
break;
case BYTE:
calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Byte) value), calciteDataType);
break;
case SHORT:
calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Short) value), calciteDataType);
break;
case INT:
calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Integer) value));
break;
case LONG:
calciteLiteral = rexBuilder.makeBigintLiteral(new BigDecimal((Long) value));
break;
// TODO: is Decimal an exact numeric or approximate numeric?
case DECIMAL:
if (value instanceof HiveDecimal) {
value = ((HiveDecimal) value).bigDecimalValue();
} else if (value instanceof Decimal128) {
value = ((Decimal128) value).toBigDecimal();
}
if (value == null) {
// literals.
throw new CalciteSemanticException("Expression " + literal.getExprString() + " is not a valid decimal", UnsupportedFeature.Invalid_decimal);
// TODO: return createNullLiteral(literal);
}
BigDecimal bd = (BigDecimal) value;
BigInteger unscaled = bd.unscaledValue();
int precision = bd.unscaledValue().abs().toString().length();
int scale = bd.scale();
RelDataType relType;
if (precision > scale) {
// bd is greater than or equal to 1
relType = cluster.getTypeFactory().createSqlType(SqlTypeName.DECIMAL, precision, scale);
} else {
// bd is less than 1
relType = cluster.getTypeFactory().createSqlType(SqlTypeName.DECIMAL, scale + 1, scale);
}
calciteLiteral = rexBuilder.makeExactLiteral(bd, relType);
break;
case FLOAT:
calciteLiteral = rexBuilder.makeApproxLiteral(new BigDecimal(Float.toString((Float) value)), calciteDataType);
break;
case DOUBLE:
// TODO: The best solution is to support NaN in expression reduction.
if (Double.isNaN((Double) value)) {
throw new CalciteSemanticException("NaN", UnsupportedFeature.Invalid_decimal);
}
calciteLiteral = rexBuilder.makeApproxLiteral(new BigDecimal(Double.toString((Double) value)), calciteDataType);
break;
case CHAR:
if (value instanceof HiveChar) {
value = ((HiveChar) value).getValue();
}
calciteLiteral = rexBuilder.makeCharLiteral(asUnicodeString((String) value));
break;
case VARCHAR:
if (value instanceof HiveVarchar) {
value = ((HiveVarchar) value).getValue();
}
calciteLiteral = rexBuilder.makeCharLiteral(asUnicodeString((String) value));
break;
case STRING:
calciteLiteral = rexBuilder.makeCharLiteral(asUnicodeString((String) value));
break;
case DATE:
final Calendar cal = Calendar.getInstance(Locale.getDefault());
cal.setTime((Date) value);
calciteLiteral = rexBuilder.makeDateLiteral(DateString.fromCalendarFields(cal));
break;
case TIMESTAMP:
final TimestampString tsString;
if (value instanceof Calendar) {
tsString = TimestampString.fromCalendarFields((Calendar) value);
} else {
final Timestamp ts = (Timestamp) value;
final Calendar calt = Calendar.getInstance(Locale.getDefault());
calt.setTimeInMillis(ts.getTime());
tsString = TimestampString.fromCalendarFields(calt).withNanos(ts.getNanos());
}
// Must call makeLiteral, not makeTimestampLiteral
// to have the RexBuilder.roundTime logic kick in
calciteLiteral = rexBuilder.makeLiteral(tsString, rexBuilder.getTypeFactory().createSqlType(SqlTypeName.TIMESTAMP, rexBuilder.getTypeFactory().getTypeSystem().getDefaultPrecision(SqlTypeName.TIMESTAMP)), false);
break;
case TIMESTAMPLOCALTZ:
final TimestampString tsLocalTZString;
if (value == null) {
tsLocalTZString = null;
} else {
Instant i = ((TimestampTZ) value).getZonedDateTime().toInstant();
tsLocalTZString = TimestampString.fromMillisSinceEpoch(i.toEpochMilli()).withNanos(i.getNano());
}
calciteLiteral = rexBuilder.makeTimestampWithLocalTimeZoneLiteral(tsLocalTZString, rexBuilder.getTypeFactory().getTypeSystem().getDefaultPrecision(SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE));
break;
case INTERVAL_YEAR_MONTH:
// Calcite year-month literal value is months as BigDecimal
BigDecimal totalMonths = BigDecimal.valueOf(((HiveIntervalYearMonth) value).getTotalMonths());
calciteLiteral = rexBuilder.makeIntervalLiteral(totalMonths, new SqlIntervalQualifier(TimeUnit.YEAR, TimeUnit.MONTH, new SqlParserPos(1, 1)));
break;
case INTERVAL_DAY_TIME:
// Calcite day-time interval is millis value as BigDecimal
// Seconds converted to millis
BigDecimal secsValueBd = BigDecimal.valueOf(((HiveIntervalDayTime) value).getTotalSeconds() * 1000);
// Nanos converted to millis
BigDecimal nanosValueBd = BigDecimal.valueOf(((HiveIntervalDayTime) value).getNanos(), 6);
calciteLiteral = rexBuilder.makeIntervalLiteral(secsValueBd.add(nanosValueBd), new SqlIntervalQualifier(TimeUnit.MILLISECOND, null, new SqlParserPos(1, 1)));
break;
case VOID:
calciteLiteral = cluster.getRexBuilder().makeLiteral(null, cluster.getTypeFactory().createSqlType(SqlTypeName.NULL), true);
break;
case BINARY:
case UNKNOWN:
default:
throw new RuntimeException("UnSupported Literal");
}
return calciteLiteral;
}
use of org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector in project hive by apache.
the class ObjectInspectorUtils method getConstantObjectInspector.
public static ConstantObjectInspector getConstantObjectInspector(ObjectInspector oi, Object value) {
if (oi instanceof ConstantObjectInspector) {
return (ConstantObjectInspector) oi;
}
ObjectInspector writableOI = getStandardObjectInspector(oi, ObjectInspectorCopyOption.WRITABLE);
Object writableValue = value == null ? value : ObjectInspectorConverters.getConverter(oi, writableOI).convert(value);
switch(writableOI.getCategory()) {
case PRIMITIVE:
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
return PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(poi.getTypeInfo(), writableValue);
case LIST:
ListObjectInspector loi = (ListObjectInspector) oi;
return ObjectInspectorFactory.getStandardConstantListObjectInspector(getStandardObjectInspector(loi.getListElementObjectInspector(), ObjectInspectorCopyOption.WRITABLE), (List<?>) writableValue);
case MAP:
MapObjectInspector moi = (MapObjectInspector) oi;
return ObjectInspectorFactory.getStandardConstantMapObjectInspector(getStandardObjectInspector(moi.getMapKeyObjectInspector(), ObjectInspectorCopyOption.WRITABLE), getStandardObjectInspector(moi.getMapValueObjectInspector(), ObjectInspectorCopyOption.WRITABLE), (Map<?, ?>) writableValue);
case STRUCT:
StructObjectInspector soi = (StructObjectInspector) oi;
List<? extends StructField> fields = soi.getAllStructFieldRefs();
List<String> fieldNames = new ArrayList<String>(fields.size());
List<ObjectInspector> fieldObjectInspectors = new ArrayList<ObjectInspector>(fields.size());
for (StructField f : fields) {
fieldNames.add(f.getFieldName());
fieldObjectInspectors.add(getStandardObjectInspector(f.getFieldObjectInspector(), ObjectInspectorCopyOption.WRITABLE));
}
if (value != null && (writableValue.getClass().isArray())) {
writableValue = java.util.Arrays.asList((Object[]) writableValue);
}
return ObjectInspectorFactory.getStandardConstantStructObjectInspector(fieldNames, fieldObjectInspectors, (List<?>) writableValue);
default:
throw new IllegalArgumentException(writableOI.getCategory() + " not yet supported for constant OI");
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector in project hive by apache.
the class ConstantPropagateProcFactory method evaluateFunction.
/**
* Evaluate UDF
*
* @param udf UDF object
* @param exprs
* @param oldExprs
* @return null if expression cannot be evaluated (not all parameters are constants). Or evaluated
* ExprNodeConstantDesc if possible.
* @throws HiveException
*/
private static ExprNodeDesc evaluateFunction(GenericUDF udf, List<ExprNodeDesc> exprs, List<ExprNodeDesc> oldExprs) {
DeferredJavaObject[] arguments = new DeferredJavaObject[exprs.size()];
ObjectInspector[] argois = new ObjectInspector[exprs.size()];
for (int i = 0; i < exprs.size(); i++) {
ExprNodeDesc desc = exprs.get(i);
if (desc instanceof ExprNodeConstantDesc) {
ExprNodeConstantDesc constant = (ExprNodeConstantDesc) exprs.get(i);
if (!constant.getTypeInfo().equals(oldExprs.get(i).getTypeInfo())) {
constant = typeCast(constant, oldExprs.get(i).getTypeInfo());
if (constant == null) {
return null;
}
}
if (constant.getTypeInfo().getCategory() != Category.PRIMITIVE) {
// nested complex types cannot be folded cleanly
return null;
}
Object value = constant.getValue();
PrimitiveTypeInfo pti = (PrimitiveTypeInfo) constant.getTypeInfo();
Object writableValue = null == value ? value : PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(pti).getPrimitiveWritableObject(value);
arguments[i] = new DeferredJavaObject(writableValue);
argois[i] = ObjectInspectorUtils.getConstantObjectInspector(constant.getWritableObjectInspector(), writableValue);
} else if (desc instanceof ExprNodeGenericFuncDesc) {
ExprNodeDesc evaluatedFn = foldExpr((ExprNodeGenericFuncDesc) desc);
if (null == evaluatedFn || !(evaluatedFn instanceof ExprNodeConstantDesc)) {
return null;
}
ExprNodeConstantDesc constant = (ExprNodeConstantDesc) evaluatedFn;
if (constant.getTypeInfo().getCategory() != Category.PRIMITIVE) {
// nested complex types cannot be folded cleanly
return null;
}
Object writableValue = PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector((PrimitiveTypeInfo) constant.getTypeInfo()).getPrimitiveWritableObject(constant.getValue());
arguments[i] = new DeferredJavaObject(writableValue);
argois[i] = ObjectInspectorUtils.getConstantObjectInspector(constant.getWritableObjectInspector(), writableValue);
} else {
return null;
}
}
try {
ObjectInspector oi = udf.initialize(argois);
Object o = udf.evaluate(arguments);
if (LOG.isDebugEnabled()) {
LOG.debug(udf.getClass().getName() + "(" + exprs + ")=" + o);
}
if (o == null) {
return new ExprNodeConstantDesc(TypeInfoUtils.getTypeInfoFromObjectInspector(oi), o);
}
Class<?> clz = o.getClass();
if (PrimitiveObjectInspectorUtils.isPrimitiveWritableClass(clz)) {
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
TypeInfo typeInfo = poi.getTypeInfo();
o = poi.getPrimitiveJavaObject(o);
if (typeInfo.getTypeName().contains(serdeConstants.DECIMAL_TYPE_NAME) || typeInfo.getTypeName().contains(serdeConstants.VARCHAR_TYPE_NAME) || typeInfo.getTypeName().contains(serdeConstants.CHAR_TYPE_NAME) || typeInfo.getTypeName().contains(serdeConstants.TIMESTAMPLOCALTZ_TYPE_NAME)) {
return new ExprNodeConstantDesc(typeInfo, o);
}
} else if (udf instanceof GenericUDFStruct && oi instanceof StandardConstantStructObjectInspector) {
// do not fold named_struct, only struct()
ConstantObjectInspector coi = (ConstantObjectInspector) oi;
TypeInfo structType = TypeInfoUtils.getTypeInfoFromObjectInspector(coi);
return new ExprNodeConstantDesc(structType, ObjectInspectorUtils.copyToStandardJavaObject(o, coi));
} else if (!PrimitiveObjectInspectorUtils.isPrimitiveJavaClass(clz)) {
if (LOG.isErrorEnabled()) {
LOG.error("Unable to evaluate " + udf + ". Return value unrecoginizable.");
}
return null;
} else {
// fall through
}
String constStr = null;
if (arguments.length == 1 && FunctionRegistry.isOpCast(udf)) {
// remember original string representation of constant.
constStr = arguments[0].get().toString();
}
return new ExprNodeConstantDesc(o).setFoldedFromVal(constStr);
} catch (HiveException e) {
LOG.error("Evaluation function " + udf.getClass() + " failed in Constant Propagation Optimizer.");
throw new RuntimeException(e);
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector in project hive by apache.
the class GenericUDFSortArrayByField method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
GenericUDFUtils.ReturnObjectInspectorResolver returnOIResolver;
returnOIResolver = new GenericUDFUtils.ReturnObjectInspectorResolver(true);
/**
*This UDF requires minimum 2 arguments array_name,field name
*/
if (arguments.length < 2) {
throw new UDFArgumentLengthException("SORT_ARRAY_BY requires minimum 2 arguments, got " + arguments.length);
}
/**
*First argument must be array
*/
switch(arguments[0].getCategory()) {
case LIST:
listObjectInspector = (ListObjectInspector) arguments[0];
break;
default:
throw new UDFArgumentTypeException(0, "Argument 1 of function SORT_ARRAY_BY must be " + serdeConstants.LIST_TYPE_NAME + ", but " + arguments[0].getTypeName() + " was found.");
}
/**
*Elements inside first argument(array) must be tuple(s)
*/
switch(listObjectInspector.getListElementObjectInspector().getCategory()) {
case STRUCT:
structObjectInspector = (StructObjectInspector) listObjectInspector.getListElementObjectInspector();
break;
default:
throw new UDFArgumentTypeException(0, "Element[s] of first argument array in function SORT_ARRAY_BY must be " + serdeConstants.STRUCT_TYPE_NAME + ", but " + listObjectInspector.getTypeName() + " was found.");
}
/**
*All sort fields argument name and sort order name must be in String type
*/
converters = new Converter[arguments.length];
inputTypes = new PrimitiveCategory[arguments.length];
fields = new StructField[arguments.length - 1];
noOfInputFields = arguments.length - 1;
for (int i = 1; i < arguments.length; i++) {
checkArgPrimitive(arguments, i);
checkArgGroups(arguments, i, inputTypes, PrimitiveGrouping.STRING_GROUP);
if (arguments[i] instanceof ConstantObjectInspector) {
String fieldName = getConstantStringValue(arguments, i);
/**
*checking whether any sorting order (ASC,DESC) has specified in last argument
*/
if (i != 1 && (i == arguments.length - 1) && (fieldName.trim().toUpperCase().equals(SORT_ORDER_TYPE.ASC.name()) || fieldName.trim().toUpperCase().equals(SORT_ORDER_TYPE.DESC.name()))) {
sortOrder = SORT_ORDER_TYPE.valueOf(fieldName.trim().toUpperCase());
noOfInputFields -= 1;
continue;
}
fields[i - 1] = structObjectInspector.getStructFieldRef(getConstantStringValue(arguments, i));
}
obtainStringConverter(arguments, i, inputTypes, converters);
}
ObjectInspector returnOI = returnOIResolver.get(structObjectInspector);
converters[0] = ObjectInspectorConverters.getConverter(structObjectInspector, returnOI);
return ObjectInspectorFactory.getStandardListObjectInspector(structObjectInspector);
}
Aggregations