use of org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo in project hive by apache.
the class MyTestPrimitiveClass method getPrimitiveWritableObject.
public Object getPrimitiveWritableObject(int index, PrimitiveTypeInfo primitiveTypeInfo) {
int field = 0;
if (index == field++) {
return (myBool == null ? null : PrimitiveObjectInspectorFactory.writableBooleanObjectInspector.create((boolean) myBool));
} else if (index == field++) {
return (myByte == null ? null : PrimitiveObjectInspectorFactory.writableByteObjectInspector.create((byte) myByte));
} else if (index == field++) {
return (myShort == null ? null : PrimitiveObjectInspectorFactory.writableShortObjectInspector.create((short) myShort));
} else if (index == field++) {
return (myInt == null ? null : PrimitiveObjectInspectorFactory.writableIntObjectInspector.create((int) myInt));
} else if (index == field++) {
return (myLong == null ? null : PrimitiveObjectInspectorFactory.writableLongObjectInspector.create((long) myLong));
} else if (index == field++) {
return (myFloat == null ? null : PrimitiveObjectInspectorFactory.writableFloatObjectInspector.create((float) myFloat));
} else if (index == field++) {
return (myDouble == null ? null : PrimitiveObjectInspectorFactory.writableDoubleObjectInspector.create((double) myDouble));
} else if (index == field++) {
return (myString == null ? null : PrimitiveObjectInspectorFactory.writableStringObjectInspector.create(myString));
} else if (index == field++) {
if (myHiveChar == null) {
return null;
}
CharTypeInfo charTypeInfo = (CharTypeInfo) primitiveTypeInfo;
WritableHiveCharObjectInspector writableCharObjectInspector = new WritableHiveCharObjectInspector(charTypeInfo);
return writableCharObjectInspector.create(myHiveChar);
} else if (index == field++) {
if (myHiveVarchar == null) {
return null;
}
VarcharTypeInfo varcharTypeInfo = (VarcharTypeInfo) primitiveTypeInfo;
WritableHiveVarcharObjectInspector writableVarcharObjectInspector = new WritableHiveVarcharObjectInspector(varcharTypeInfo);
return writableVarcharObjectInspector.create(myHiveVarchar);
} else if (index == field++) {
return (myBinary == null ? null : PrimitiveObjectInspectorFactory.writableBinaryObjectInspector.create(myBinary));
} else if (index == field++) {
if (myDecimal == null) {
return null;
}
DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfo;
WritableHiveDecimalObjectInspector writableDecimalObjectInspector = new WritableHiveDecimalObjectInspector(decimalTypeInfo);
return writableDecimalObjectInspector.create(myDecimal);
} else if (index == field++) {
return (myDate == null ? null : PrimitiveObjectInspectorFactory.writableDateObjectInspector.create(myDate));
} else if (index == field++) {
return (myTimestamp == null ? null : PrimitiveObjectInspectorFactory.writableTimestampObjectInspector.create(myTimestamp));
} else if (index == field++) {
return (myIntervalYearMonth == null ? null : PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector.create(myIntervalYearMonth));
} else if (index == field++) {
return (myIntervalDayTime == null ? null : PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector.create(myIntervalDayTime));
} else {
throw new Error("Field " + " field not handled");
}
}
use of org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo in project hive by apache.
the class VectorizationContext method getImplicitCastExpression.
/**
* The GenericUDFs might need their children output to be cast to the given castType.
* This method returns a cast expression that would achieve the required casting.
*/
private ExprNodeDesc getImplicitCastExpression(GenericUDF udf, ExprNodeDesc child, TypeInfo castType) throws HiveException {
TypeInfo inputTypeInfo = child.getTypeInfo();
String inputTypeString = inputTypeInfo.getTypeName();
String castTypeString = castType.getTypeName();
if (inputTypeString.equals(castTypeString)) {
// Nothing to be done
return null;
}
boolean inputTypeDecimal = false;
boolean castTypeDecimal = false;
if (decimalTypePattern.matcher(inputTypeString).matches()) {
inputTypeDecimal = true;
}
if (decimalTypePattern.matcher(castTypeString).matches()) {
castTypeDecimal = true;
}
if (castTypeDecimal && !inputTypeDecimal) {
if (needsImplicitCastForDecimal(udf)) {
// Cast the input to decimal
// If castType is decimal, try not to lose precision for numeric types.
castType = updatePrecision(inputTypeInfo, (DecimalTypeInfo) castType);
GenericUDFToDecimal castToDecimalUDF = new GenericUDFToDecimal();
castToDecimalUDF.setTypeInfo(castType);
List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
children.add(child);
ExprNodeDesc desc = new ExprNodeGenericFuncDesc(castType, castToDecimalUDF, children);
return desc;
}
} else if (!castTypeDecimal && inputTypeDecimal) {
if (needsImplicitCastForDecimal(udf)) {
// Cast decimal input to returnType
GenericUDF genericUdf = getGenericUDFForCast(castType);
List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
children.add(child);
ExprNodeDesc desc = new ExprNodeGenericFuncDesc(castType, genericUdf, children);
return desc;
}
} else {
// Casts to exact types including long to double etc. are needed in some special cases.
if (udf instanceof GenericUDFCoalesce || udf instanceof GenericUDFNvl || udf instanceof GenericUDFElt) {
GenericUDF genericUdf = getGenericUDFForCast(castType);
List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
children.add(child);
ExprNodeDesc desc = new ExprNodeGenericFuncDesc(castType, genericUdf, children);
return desc;
}
}
return null;
}
use of org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo in project hive by apache.
the class TypeConverter method convert.
public static RelDataType convert(PrimitiveTypeInfo type, RelDataTypeFactory dtFactory) {
RelDataType convertedType = null;
switch(type.getPrimitiveCategory()) {
case VOID:
convertedType = dtFactory.createSqlType(SqlTypeName.NULL);
break;
case BOOLEAN:
convertedType = dtFactory.createSqlType(SqlTypeName.BOOLEAN);
break;
case BYTE:
convertedType = dtFactory.createSqlType(SqlTypeName.TINYINT);
break;
case SHORT:
convertedType = dtFactory.createSqlType(SqlTypeName.SMALLINT);
break;
case INT:
convertedType = dtFactory.createSqlType(SqlTypeName.INTEGER);
break;
case LONG:
convertedType = dtFactory.createSqlType(SqlTypeName.BIGINT);
break;
case FLOAT:
convertedType = dtFactory.createSqlType(SqlTypeName.FLOAT);
break;
case DOUBLE:
convertedType = dtFactory.createSqlType(SqlTypeName.DOUBLE);
break;
case STRING:
convertedType = dtFactory.createTypeWithCharsetAndCollation(dtFactory.createSqlType(SqlTypeName.VARCHAR, Integer.MAX_VALUE), Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME), SqlCollation.IMPLICIT);
break;
case DATE:
convertedType = dtFactory.createSqlType(SqlTypeName.DATE);
break;
case TIMESTAMP:
convertedType = dtFactory.createSqlType(SqlTypeName.TIMESTAMP);
break;
case INTERVAL_YEAR_MONTH:
convertedType = dtFactory.createSqlIntervalType(new SqlIntervalQualifier(TimeUnit.YEAR, TimeUnit.MONTH, new SqlParserPos(1, 1)));
break;
case INTERVAL_DAY_TIME:
convertedType = dtFactory.createSqlIntervalType(new SqlIntervalQualifier(TimeUnit.DAY, TimeUnit.SECOND, new SqlParserPos(1, 1)));
break;
case BINARY:
convertedType = dtFactory.createSqlType(SqlTypeName.BINARY);
break;
case DECIMAL:
DecimalTypeInfo dtInf = (DecimalTypeInfo) type;
convertedType = dtFactory.createSqlType(SqlTypeName.DECIMAL, dtInf.precision(), dtInf.scale());
break;
case VARCHAR:
convertedType = dtFactory.createTypeWithCharsetAndCollation(dtFactory.createSqlType(SqlTypeName.VARCHAR, ((BaseCharTypeInfo) type).getLength()), Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME), SqlCollation.IMPLICIT);
break;
case CHAR:
convertedType = dtFactory.createTypeWithCharsetAndCollation(dtFactory.createSqlType(SqlTypeName.CHAR, ((BaseCharTypeInfo) type).getLength()), Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME), SqlCollation.IMPLICIT);
break;
case UNKNOWN:
convertedType = dtFactory.createSqlType(SqlTypeName.OTHER);
break;
}
if (null == convertedType) {
throw new RuntimeException("Unsupported Type : " + type.getTypeName());
}
return dtFactory.createTypeWithNullability(convertedType, true);
}
use of org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo in project hive by apache.
the class DDLSemanticAnalyzer method getTypeName.
public static String getTypeName(ASTNode node) throws SemanticException {
int token = node.getType();
String typeName;
// datetime type isn't currently supported
if (token == HiveParser.TOK_DATETIME) {
throw new SemanticException(ErrorMsg.UNSUPPORTED_TYPE.getMsg());
}
switch(token) {
case HiveParser.TOK_CHAR:
CharTypeInfo charTypeInfo = ParseUtils.getCharTypeInfo(node);
typeName = charTypeInfo.getQualifiedName();
break;
case HiveParser.TOK_VARCHAR:
VarcharTypeInfo varcharTypeInfo = ParseUtils.getVarcharTypeInfo(node);
typeName = varcharTypeInfo.getQualifiedName();
break;
case HiveParser.TOK_DECIMAL:
DecimalTypeInfo decTypeInfo = ParseUtils.getDecimalTypeTypeInfo(node);
typeName = decTypeInfo.getQualifiedName();
break;
default:
typeName = TokenToTypeName.get(token);
}
return typeName;
}
use of org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo in project hive by apache.
the class VectorUDAFAvgDecimal method initPartialResultInspector.
private void initPartialResultInspector() {
// the output type of the vectorized partial aggregate must match the
// expected type for the row-mode aggregation
// For decimal, the type is "same number of integer digits and 4 more decimal digits"
DecimalTypeInfo dtiSum = GenericUDAFAverage.deriveSumFieldTypeInfo(inputPrecision, inputScale);
this.sumScale = (short) dtiSum.scale();
this.sumPrecision = (short) dtiSum.precision();
List<ObjectInspector> foi = new ArrayList<ObjectInspector>();
foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
foi.add(PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(dtiSum));
List<String> fname = new ArrayList<String>();
fname.add("count");
fname.add("sum");
soi = ObjectInspectorFactory.getStandardStructObjectInspector(fname, foi);
}
Aggregations