Search in sources :

Example 76 with HiveDecimal

use of org.apache.hadoop.hive.common.type.HiveDecimal in project presto by prestodb.

the class GenericHiveRecordCursor method parseDecimalColumn.

private void parseDecimalColumn(int column) {
    loaded[column] = true;
    Object fieldData = rowInspector.getStructFieldData(rowData, structFields[column]);
    if (fieldData == null) {
        nulls[column] = true;
    } else {
        Object fieldValue = ((PrimitiveObjectInspector) fieldInspectors[column]).getPrimitiveJavaObject(fieldData);
        checkState(fieldValue != null, "fieldValue should not be null");
        HiveDecimal decimal = (HiveDecimal) fieldValue;
        DecimalType columnType = (DecimalType) types[column];
        BigInteger unscaledDecimal = rescale(decimal.unscaledValue(), decimal.scale(), columnType.getScale());
        if (columnType.isShort()) {
            longs[column] = unscaledDecimal.longValue();
        } else {
            slices[column] = Decimals.encodeUnscaledValue(unscaledDecimal);
        }
        nulls[column] = false;
    }
}
Also used : HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) DecimalType(com.facebook.presto.spi.type.DecimalType) BigInteger(java.math.BigInteger) SerDeUtils.getBlockObject(com.facebook.presto.hive.util.SerDeUtils.getBlockObject) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)

Example 77 with HiveDecimal

use of org.apache.hadoop.hive.common.type.HiveDecimal in project phoenix by apache.

the class PhoenixSerializer method serialize.

public Writable serialize(Object values, ObjectInspector objInspector, DmlType dmlType) {
    pResultWritable.clear();
    final StructObjectInspector structInspector = (StructObjectInspector) objInspector;
    final List<? extends StructField> fieldList = structInspector.getAllStructFieldRefs();
    if (LOG.isTraceEnabled()) {
        LOG.trace("FieldList : " + fieldList + " values(" + values.getClass() + ") : " + values);
    }
    int fieldCount = columnCount;
    if (dmlType == DmlType.UPDATE || dmlType == DmlType.DELETE) {
        fieldCount++;
    }
    for (int i = 0; i < fieldCount; i++) {
        if (fieldList.size() <= i) {
            break;
        }
        StructField structField = fieldList.get(i);
        if (LOG.isTraceEnabled()) {
            LOG.trace("structField[" + i + "] : " + structField);
        }
        if (structField != null) {
            Object fieldValue = structInspector.getStructFieldData(values, structField);
            ObjectInspector fieldOI = structField.getFieldObjectInspector();
            String fieldName = structField.getFieldName();
            if (LOG.isTraceEnabled()) {
                LOG.trace("Field " + fieldName + "[" + i + "] : " + fieldValue + ", " + fieldOI);
            }
            Object value = null;
            switch(fieldOI.getCategory()) {
                case PRIMITIVE:
                    value = ((PrimitiveObjectInspector) fieldOI).getPrimitiveJavaObject(fieldValue);
                    if (LOG.isTraceEnabled()) {
                        LOG.trace("Field " + fieldName + "[" + i + "] : " + value + "(" + value.getClass() + ")");
                    }
                    if (value instanceof HiveDecimal) {
                        value = ((HiveDecimal) value).bigDecimalValue();
                    } else if (value instanceof HiveChar) {
                        value = ((HiveChar) value).getValue().trim();
                    }
                    pResultWritable.add(value);
                    break;
                case LIST:
                    // Not support for arrays in insert statement yet
                    break;
                case STRUCT:
                    if (dmlType == DmlType.DELETE) {
                        // When update/delete, First value is struct<transactionid:bigint,
                        // bucketid:int,rowid:bigint,primaryKey:binary>>
                        List<Object> fieldValueList = ((StandardStructObjectInspector) fieldOI).getStructFieldsDataAsList(fieldValue);
                        // convert to map from binary of primary key.
                        @SuppressWarnings("unchecked") Map<String, Object> primaryKeyMap = (Map<String, Object>) PhoenixStorageHandlerUtil.toMap(((BytesWritable) fieldValueList.get(3)).getBytes());
                        for (Object pkValue : primaryKeyMap.values()) {
                            pResultWritable.add(pkValue);
                        }
                    }
                    break;
                default:
                    new SerDeException("Phoenix Unsupported column type: " + fieldOI.getCategory());
            }
        }
    }
    return pResultWritable;
}
Also used : StandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) BytesWritable(org.apache.hadoop.io.BytesWritable) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) StandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector) Map(java.util.Map) SerDeException(org.apache.hadoop.hive.serde2.SerDeException) StandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Example 78 with HiveDecimal

use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.

the class VectorizationContext method castConstantToDouble.

private Double castConstantToDouble(Object scalar, TypeInfo type) throws HiveException {
    if (null == scalar) {
        return null;
    }
    PrimitiveTypeInfo ptinfo = (PrimitiveTypeInfo) type;
    String typename = type.getTypeName();
    switch(ptinfo.getPrimitiveCategory()) {
        case FLOAT:
        case DOUBLE:
        case BYTE:
        case SHORT:
        case INT:
        case LONG:
            return ((Number) scalar).doubleValue();
        case DECIMAL:
            HiveDecimal decimalVal = (HiveDecimal) scalar;
            return decimalVal.doubleValue();
        default:
            throw new HiveException("Unsupported type " + typename + " for cast to Double");
    }
}
Also used : HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)

Example 79 with HiveDecimal

use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.

the class VectorizationContext method castConstantToString.

private String castConstantToString(Object scalar, TypeInfo type) throws HiveException {
    if (null == scalar) {
        return null;
    }
    PrimitiveTypeInfo ptinfo = (PrimitiveTypeInfo) type;
    String typename = type.getTypeName();
    switch(ptinfo.getPrimitiveCategory()) {
        case FLOAT:
        case DOUBLE:
        case BYTE:
        case SHORT:
        case INT:
        case LONG:
            return ((Number) scalar).toString();
        case DECIMAL:
            HiveDecimal decimalVal = (HiveDecimal) scalar;
            return decimalVal.toString();
        default:
            throw new HiveException("Unsupported type " + typename + " for cast to String");
    }
}
Also used : HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)

Example 80 with HiveDecimal

use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.

the class VectorizationContext method getCastToDecimal.

private VectorExpression getCastToDecimal(List<ExprNodeDesc> childExpr, TypeInfo returnType) throws HiveException {
    ExprNodeDesc child = childExpr.get(0);
    String inputType = childExpr.get(0).getTypeString();
    if (child instanceof ExprNodeConstantDesc) {
        // Return a constant vector expression
        Object constantValue = ((ExprNodeConstantDesc) child).getValue();
        HiveDecimal decimalValue = castConstantToDecimal(constantValue, child.getTypeInfo());
        return getConstantVectorExpression(decimalValue, returnType, VectorExpressionDescriptor.Mode.PROJECTION);
    }
    if (isIntFamily(inputType)) {
        return createVectorExpression(CastLongToDecimal.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
    } else if (isFloatFamily(inputType)) {
        return createVectorExpression(CastDoubleToDecimal.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
    } else if (decimalTypePattern.matcher(inputType).matches()) {
        if (child instanceof ExprNodeColumnDesc) {
            int colIndex = getInputColumnIndex((ExprNodeColumnDesc) child);
            DataTypePhysicalVariation dataTypePhysicalVariation = getDataTypePhysicalVariation(colIndex);
            if (dataTypePhysicalVariation == DataTypePhysicalVariation.DECIMAL_64) {
                // Do Decimal64 conversion instead.
                return createDecimal64ToDecimalConversion(colIndex, returnType);
            } else {
                return createVectorExpression(CastDecimalToDecimal.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
            }
        } else {
            return createVectorExpression(CastDecimalToDecimal.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
        }
    } else if (isStringFamily(inputType)) {
        return createVectorExpression(CastStringToDecimal.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
    } else if (inputType.equals("timestamp")) {
        return createVectorExpression(CastTimestampToDecimal.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
    }
    return null;
}
Also used : ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) DataTypePhysicalVariation(org.apache.hadoop.hive.common.type.DataTypePhysicalVariation) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc)

Aggregations

HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)94 Test (org.junit.Test)28 Timestamp (java.sql.Timestamp)24 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)23 DecimalColumnVector (org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector)22 Text (org.apache.hadoop.io.Text)22 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)21 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)21 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)20 Date (java.sql.Date)19 VectorizedRowBatch (org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch)18 BytesWritable (org.apache.hadoop.io.BytesWritable)17 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)16 HiveIntervalYearMonth (org.apache.hadoop.hive.common.type.HiveIntervalYearMonth)15 DateWritable (org.apache.hadoop.hive.serde2.io.DateWritable)15 TimestampWritable (org.apache.hadoop.hive.serde2.io.TimestampWritable)15 HiveIntervalDayTime (org.apache.hadoop.hive.common.type.HiveIntervalDayTime)14 IntWritable (org.apache.hadoop.io.IntWritable)14 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)13 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)13