Search in sources :

Example 16 with DecimalTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo in project hive by apache.

the class MyTestPrimitiveClass method getPrimitiveWritableObject.

public Object getPrimitiveWritableObject(int index, PrimitiveTypeInfo primitiveTypeInfo) {
    int field = 0;
    if (index == field++) {
        return (myBool == null ? null : PrimitiveObjectInspectorFactory.writableBooleanObjectInspector.create((boolean) myBool));
    } else if (index == field++) {
        return (myByte == null ? null : PrimitiveObjectInspectorFactory.writableByteObjectInspector.create((byte) myByte));
    } else if (index == field++) {
        return (myShort == null ? null : PrimitiveObjectInspectorFactory.writableShortObjectInspector.create((short) myShort));
    } else if (index == field++) {
        return (myInt == null ? null : PrimitiveObjectInspectorFactory.writableIntObjectInspector.create((int) myInt));
    } else if (index == field++) {
        return (myLong == null ? null : PrimitiveObjectInspectorFactory.writableLongObjectInspector.create((long) myLong));
    } else if (index == field++) {
        return (myFloat == null ? null : PrimitiveObjectInspectorFactory.writableFloatObjectInspector.create((float) myFloat));
    } else if (index == field++) {
        return (myDouble == null ? null : PrimitiveObjectInspectorFactory.writableDoubleObjectInspector.create((double) myDouble));
    } else if (index == field++) {
        return (myString == null ? null : PrimitiveObjectInspectorFactory.writableStringObjectInspector.create(myString));
    } else if (index == field++) {
        if (myHiveChar == null) {
            return null;
        }
        CharTypeInfo charTypeInfo = (CharTypeInfo) primitiveTypeInfo;
        WritableHiveCharObjectInspector writableCharObjectInspector = new WritableHiveCharObjectInspector(charTypeInfo);
        return writableCharObjectInspector.create(myHiveChar);
    } else if (index == field++) {
        if (myHiveVarchar == null) {
            return null;
        }
        VarcharTypeInfo varcharTypeInfo = (VarcharTypeInfo) primitiveTypeInfo;
        WritableHiveVarcharObjectInspector writableVarcharObjectInspector = new WritableHiveVarcharObjectInspector(varcharTypeInfo);
        return writableVarcharObjectInspector.create(myHiveVarchar);
    } else if (index == field++) {
        return (myBinary == null ? null : PrimitiveObjectInspectorFactory.writableBinaryObjectInspector.create(myBinary));
    } else if (index == field++) {
        if (myDecimal == null) {
            return null;
        }
        DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfo;
        WritableHiveDecimalObjectInspector writableDecimalObjectInspector = new WritableHiveDecimalObjectInspector(decimalTypeInfo);
        return writableDecimalObjectInspector.create(myDecimal);
    } else if (index == field++) {
        return (myDate == null ? null : PrimitiveObjectInspectorFactory.writableDateObjectInspector.create(myDate));
    } else if (index == field++) {
        return (myTimestamp == null ? null : PrimitiveObjectInspectorFactory.writableTimestampObjectInspector.create(myTimestamp));
    } else if (index == field++) {
        return (myIntervalYearMonth == null ? null : PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector.create(myIntervalYearMonth));
    } else if (index == field++) {
        return (myIntervalDayTime == null ? null : PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector.create(myIntervalDayTime));
    } else {
        throw new Error("Field " + " field not handled");
    }
}
Also used : WritableHiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveCharObjectInspector) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) WritableHiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveVarcharObjectInspector) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) WritableHiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveDecimalObjectInspector)

Example 17 with DecimalTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo in project hive by apache.

the class VectorizationContext method getImplicitCastExpression.

/**
   * The GenericUDFs might need their children output to be cast to the given castType.
   * This method returns a cast expression that would achieve the required casting.
   */
private ExprNodeDesc getImplicitCastExpression(GenericUDF udf, ExprNodeDesc child, TypeInfo castType) throws HiveException {
    TypeInfo inputTypeInfo = child.getTypeInfo();
    String inputTypeString = inputTypeInfo.getTypeName();
    String castTypeString = castType.getTypeName();
    if (inputTypeString.equals(castTypeString)) {
        // Nothing to be done
        return null;
    }
    boolean inputTypeDecimal = false;
    boolean castTypeDecimal = false;
    if (decimalTypePattern.matcher(inputTypeString).matches()) {
        inputTypeDecimal = true;
    }
    if (decimalTypePattern.matcher(castTypeString).matches()) {
        castTypeDecimal = true;
    }
    if (castTypeDecimal && !inputTypeDecimal) {
        if (needsImplicitCastForDecimal(udf)) {
            // Cast the input to decimal
            // If castType is decimal, try not to lose precision for numeric types.
            castType = updatePrecision(inputTypeInfo, (DecimalTypeInfo) castType);
            GenericUDFToDecimal castToDecimalUDF = new GenericUDFToDecimal();
            castToDecimalUDF.setTypeInfo(castType);
            List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
            children.add(child);
            ExprNodeDesc desc = new ExprNodeGenericFuncDesc(castType, castToDecimalUDF, children);
            return desc;
        }
    } else if (!castTypeDecimal && inputTypeDecimal) {
        if (needsImplicitCastForDecimal(udf)) {
            // Cast decimal input to returnType
            GenericUDF genericUdf = getGenericUDFForCast(castType);
            List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
            children.add(child);
            ExprNodeDesc desc = new ExprNodeGenericFuncDesc(castType, genericUdf, children);
            return desc;
        }
    } else {
        // Casts to exact types including long to double etc. are needed in some special cases.
        if (udf instanceof GenericUDFCoalesce || udf instanceof GenericUDFNvl || udf instanceof GenericUDFElt) {
            GenericUDF genericUdf = getGenericUDFForCast(castType);
            List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
            children.add(child);
            ExprNodeDesc desc = new ExprNodeGenericFuncDesc(castType, genericUdf, children);
            return desc;
        }
    }
    return null;
}
Also used : ArrayList(java.util.ArrayList) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) VectorUDAFMaxString(org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFMaxString) VectorUDAFMinString(org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFMinString) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) BaseCharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.BaseCharTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) ArrayList(java.util.ArrayList) List(java.util.List) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc)

Example 18 with DecimalTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo in project hive by apache.

the class TypeConverter method convert.

public static RelDataType convert(PrimitiveTypeInfo type, RelDataTypeFactory dtFactory) {
    RelDataType convertedType = null;
    switch(type.getPrimitiveCategory()) {
        case VOID:
            convertedType = dtFactory.createSqlType(SqlTypeName.NULL);
            break;
        case BOOLEAN:
            convertedType = dtFactory.createSqlType(SqlTypeName.BOOLEAN);
            break;
        case BYTE:
            convertedType = dtFactory.createSqlType(SqlTypeName.TINYINT);
            break;
        case SHORT:
            convertedType = dtFactory.createSqlType(SqlTypeName.SMALLINT);
            break;
        case INT:
            convertedType = dtFactory.createSqlType(SqlTypeName.INTEGER);
            break;
        case LONG:
            convertedType = dtFactory.createSqlType(SqlTypeName.BIGINT);
            break;
        case FLOAT:
            convertedType = dtFactory.createSqlType(SqlTypeName.FLOAT);
            break;
        case DOUBLE:
            convertedType = dtFactory.createSqlType(SqlTypeName.DOUBLE);
            break;
        case STRING:
            convertedType = dtFactory.createTypeWithCharsetAndCollation(dtFactory.createSqlType(SqlTypeName.VARCHAR, Integer.MAX_VALUE), Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME), SqlCollation.IMPLICIT);
            break;
        case DATE:
            convertedType = dtFactory.createSqlType(SqlTypeName.DATE);
            break;
        case TIMESTAMP:
            convertedType = dtFactory.createSqlType(SqlTypeName.TIMESTAMP);
            break;
        case INTERVAL_YEAR_MONTH:
            convertedType = dtFactory.createSqlIntervalType(new SqlIntervalQualifier(TimeUnit.YEAR, TimeUnit.MONTH, new SqlParserPos(1, 1)));
            break;
        case INTERVAL_DAY_TIME:
            convertedType = dtFactory.createSqlIntervalType(new SqlIntervalQualifier(TimeUnit.DAY, TimeUnit.SECOND, new SqlParserPos(1, 1)));
            break;
        case BINARY:
            convertedType = dtFactory.createSqlType(SqlTypeName.BINARY);
            break;
        case DECIMAL:
            DecimalTypeInfo dtInf = (DecimalTypeInfo) type;
            convertedType = dtFactory.createSqlType(SqlTypeName.DECIMAL, dtInf.precision(), dtInf.scale());
            break;
        case VARCHAR:
            convertedType = dtFactory.createTypeWithCharsetAndCollation(dtFactory.createSqlType(SqlTypeName.VARCHAR, ((BaseCharTypeInfo) type).getLength()), Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME), SqlCollation.IMPLICIT);
            break;
        case CHAR:
            convertedType = dtFactory.createTypeWithCharsetAndCollation(dtFactory.createSqlType(SqlTypeName.CHAR, ((BaseCharTypeInfo) type).getLength()), Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME), SqlCollation.IMPLICIT);
            break;
        case UNKNOWN:
            convertedType = dtFactory.createSqlType(SqlTypeName.OTHER);
            break;
    }
    if (null == convertedType) {
        throw new RuntimeException("Unsupported Type : " + type.getTypeName());
    }
    return dtFactory.createTypeWithNullability(convertedType, true);
}
Also used : DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) SqlParserPos(org.apache.calcite.sql.parser.SqlParserPos) SqlIntervalQualifier(org.apache.calcite.sql.SqlIntervalQualifier) RelDataType(org.apache.calcite.rel.type.RelDataType)

Example 19 with DecimalTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo in project hive by apache.

the class DDLSemanticAnalyzer method getTypeName.

public static String getTypeName(ASTNode node) throws SemanticException {
    int token = node.getType();
    String typeName;
    // datetime type isn't currently supported
    if (token == HiveParser.TOK_DATETIME) {
        throw new SemanticException(ErrorMsg.UNSUPPORTED_TYPE.getMsg());
    }
    switch(token) {
        case HiveParser.TOK_CHAR:
            CharTypeInfo charTypeInfo = ParseUtils.getCharTypeInfo(node);
            typeName = charTypeInfo.getQualifiedName();
            break;
        case HiveParser.TOK_VARCHAR:
            VarcharTypeInfo varcharTypeInfo = ParseUtils.getVarcharTypeInfo(node);
            typeName = varcharTypeInfo.getQualifiedName();
            break;
        case HiveParser.TOK_DECIMAL:
            DecimalTypeInfo decTypeInfo = ParseUtils.getDecimalTypeTypeInfo(node);
            typeName = decTypeInfo.getQualifiedName();
            break;
        default:
            typeName = TokenToTypeName.get(token);
    }
    return typeName;
}
Also used : DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo)

Example 20 with DecimalTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo in project hive by apache.

the class VectorUDAFAvgDecimal method initPartialResultInspector.

private void initPartialResultInspector() {
    // the output type of the vectorized partial aggregate must match the
    // expected type for the row-mode aggregation
    // For decimal, the type is "same number of integer digits and 4 more decimal digits"
    DecimalTypeInfo dtiSum = GenericUDAFAverage.deriveSumFieldTypeInfo(inputPrecision, inputScale);
    this.sumScale = (short) dtiSum.scale();
    this.sumPrecision = (short) dtiSum.precision();
    List<ObjectInspector> foi = new ArrayList<ObjectInspector>();
    foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
    foi.add(PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(dtiSum));
    List<String> fname = new ArrayList<String>();
    fname.add("count");
    fname.add("sum");
    soi = ObjectInspectorFactory.getStandardStructObjectInspector(fname, foi);
}
Also used : DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) ArrayList(java.util.ArrayList)

Aggregations

DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)40 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)14 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)14 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)13 CharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo)12 VarcharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo)12 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)11 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)10 IntWritable (org.apache.hadoop.io.IntWritable)10 Test (org.junit.Test)10 HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)9 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)9 PrimitiveCategory (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)9 Timestamp (java.sql.Timestamp)8 Date (java.sql.Date)7 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)7 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)7 BooleanWritable (org.apache.hadoop.io.BooleanWritable)7 BytesWritable (org.apache.hadoop.io.BytesWritable)7 FloatWritable (org.apache.hadoop.io.FloatWritable)7