Search in sources :

Example 81 with HiveDecimal

use of org.apache.hadoop.hive.common.type.HiveDecimal in project presto by prestodb.

the class GenericHiveRecordCursor method parseDecimalColumn.

private void parseDecimalColumn(int column) {
    loaded[column] = true;
    Object fieldData = rowInspector.getStructFieldData(rowData, structFields[column]);
    if (fieldData == null) {
        nulls[column] = true;
    } else {
        Object fieldValue = ((PrimitiveObjectInspector) fieldInspectors[column]).getPrimitiveJavaObject(fieldData);
        checkState(fieldValue != null, "fieldValue should not be null");
        HiveDecimal decimal = (HiveDecimal) fieldValue;
        DecimalType columnType = (DecimalType) types[column];
        BigInteger unscaledDecimal = rescale(decimal.unscaledValue(), decimal.scale(), columnType.getScale());
        if (columnType.isShort()) {
            longs[column] = unscaledDecimal.longValue();
        } else {
            slices[column] = Decimals.encodeUnscaledValue(unscaledDecimal);
        }
        nulls[column] = false;
    }
}
Also used : HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) DecimalType(com.facebook.presto.spi.type.DecimalType) BigInteger(java.math.BigInteger) SerDeUtils.getBlockObject(com.facebook.presto.hive.util.SerDeUtils.getBlockObject) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)

Example 82 with HiveDecimal

use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.

the class MultiValueBoundaryScanner method isEqual.

@Override
public boolean isEqual(Object v1, Object v2) {
    HiveDecimal d1 = PrimitiveObjectInspectorUtils.getHiveDecimal(v1, (PrimitiveObjectInspector) expressionDef.getOI());
    HiveDecimal d2 = PrimitiveObjectInspectorUtils.getHiveDecimal(v2, (PrimitiveObjectInspector) expressionDef.getOI());
    if (d1 != null && d2 != null) {
        return d1.equals(d2);
    }
    // True if both are null
    return d1 == null && d2 == null;
}
Also used : HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal)

Example 83 with HiveDecimal

use of org.apache.hadoop.hive.common.type.HiveDecimal in project phoenix by apache.

the class PhoenixSerializer method serialize.

public Writable serialize(Object values, ObjectInspector objInspector, DmlType dmlType) {
    pResultWritable.clear();
    final StructObjectInspector structInspector = (StructObjectInspector) objInspector;
    final List<? extends StructField> fieldList = structInspector.getAllStructFieldRefs();
    if (LOG.isTraceEnabled()) {
        LOG.trace("FieldList : " + fieldList + " values(" + values.getClass() + ") : " + values);
    }
    int fieldCount = columnCount;
    if (dmlType == DmlType.UPDATE || dmlType == DmlType.DELETE) {
        fieldCount++;
    }
    for (int i = 0; i < fieldCount; i++) {
        if (fieldList.size() <= i) {
            break;
        }
        StructField structField = fieldList.get(i);
        if (LOG.isTraceEnabled()) {
            LOG.trace("structField[" + i + "] : " + structField);
        }
        if (structField != null) {
            Object fieldValue = structInspector.getStructFieldData(values, structField);
            ObjectInspector fieldOI = structField.getFieldObjectInspector();
            String fieldName = structField.getFieldName();
            if (LOG.isTraceEnabled()) {
                LOG.trace("Field " + fieldName + "[" + i + "] : " + fieldValue + ", " + fieldOI);
            }
            Object value = null;
            switch(fieldOI.getCategory()) {
                case PRIMITIVE:
                    value = ((PrimitiveObjectInspector) fieldOI).getPrimitiveJavaObject(fieldValue);
                    if (LOG.isTraceEnabled()) {
                        LOG.trace("Field " + fieldName + "[" + i + "] : " + value + "(" + value.getClass() + ")");
                    }
                    if (value instanceof HiveDecimal) {
                        value = ((HiveDecimal) value).bigDecimalValue();
                    } else if (value instanceof HiveChar) {
                        value = ((HiveChar) value).getValue().trim();
                    }
                    pResultWritable.add(value);
                    break;
                case LIST:
                    // Not support for arrays in insert statement yet
                    break;
                case STRUCT:
                    if (dmlType == DmlType.DELETE) {
                        // When update/delete, First value is struct<transactionid:bigint,
                        // bucketid:int,rowid:bigint,primaryKey:binary>>
                        List<Object> fieldValueList = ((StandardStructObjectInspector) fieldOI).getStructFieldsDataAsList(fieldValue);
                        // convert to map from binary of primary key.
                        @SuppressWarnings("unchecked") Map<String, Object> primaryKeyMap = (Map<String, Object>) PhoenixStorageHandlerUtil.toMap(((BytesWritable) fieldValueList.get(3)).getBytes());
                        for (Object pkValue : primaryKeyMap.values()) {
                            pResultWritable.add(pkValue);
                        }
                    }
                    break;
                default:
                    new SerDeException("Phoenix Unsupported column type: " + fieldOI.getCategory());
            }
        }
    }
    return pResultWritable;
}
Also used : StandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) BytesWritable(org.apache.hadoop.io.BytesWritable) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) StandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector) Map(java.util.Map) SerDeException(org.apache.hadoop.hive.serde2.SerDeException) StandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Aggregations

HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)83 Test (org.junit.Test)28 DecimalColumnVector (org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector)24 VectorizedRowBatch (org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch)18 Text (org.apache.hadoop.io.Text)16 Timestamp (java.sql.Timestamp)15 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)15 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)15 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)14 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)13 Date (java.sql.Date)11 BytesWritable (org.apache.hadoop.io.BytesWritable)11 TestVectorizedRowBatch (org.apache.hadoop.hive.ql.exec.vector.TestVectorizedRowBatch)10 TimestampWritable (org.apache.hadoop.hive.serde2.io.TimestampWritable)10 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)9 DateWritable (org.apache.hadoop.hive.serde2.io.DateWritable)9 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)9 IntWritable (org.apache.hadoop.io.IntWritable)9 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)8 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)8