use of org.apache.hadoop.hive.common.type.HiveDecimal in project presto by prestodb.
the class GenericHiveRecordCursor method parseDecimalColumn.
private void parseDecimalColumn(int column) {
loaded[column] = true;
Object fieldData = rowInspector.getStructFieldData(rowData, structFields[column]);
if (fieldData == null) {
nulls[column] = true;
} else {
Object fieldValue = ((PrimitiveObjectInspector) fieldInspectors[column]).getPrimitiveJavaObject(fieldData);
checkState(fieldValue != null, "fieldValue should not be null");
HiveDecimal decimal = (HiveDecimal) fieldValue;
DecimalType columnType = (DecimalType) types[column];
BigInteger unscaledDecimal = rescale(decimal.unscaledValue(), decimal.scale(), columnType.getScale());
if (columnType.isShort()) {
longs[column] = unscaledDecimal.longValue();
} else {
slices[column] = Decimals.encodeUnscaledValue(unscaledDecimal);
}
nulls[column] = false;
}
}
use of org.apache.hadoop.hive.common.type.HiveDecimal in project phoenix by apache.
the class PhoenixSerializer method serialize.
public Writable serialize(Object values, ObjectInspector objInspector, DmlType dmlType) {
pResultWritable.clear();
final StructObjectInspector structInspector = (StructObjectInspector) objInspector;
final List<? extends StructField> fieldList = structInspector.getAllStructFieldRefs();
if (LOG.isTraceEnabled()) {
LOG.trace("FieldList : " + fieldList + " values(" + values.getClass() + ") : " + values);
}
int fieldCount = columnCount;
if (dmlType == DmlType.UPDATE || dmlType == DmlType.DELETE) {
fieldCount++;
}
for (int i = 0; i < fieldCount; i++) {
if (fieldList.size() <= i) {
break;
}
StructField structField = fieldList.get(i);
if (LOG.isTraceEnabled()) {
LOG.trace("structField[" + i + "] : " + structField);
}
if (structField != null) {
Object fieldValue = structInspector.getStructFieldData(values, structField);
ObjectInspector fieldOI = structField.getFieldObjectInspector();
String fieldName = structField.getFieldName();
if (LOG.isTraceEnabled()) {
LOG.trace("Field " + fieldName + "[" + i + "] : " + fieldValue + ", " + fieldOI);
}
Object value = null;
switch(fieldOI.getCategory()) {
case PRIMITIVE:
value = ((PrimitiveObjectInspector) fieldOI).getPrimitiveJavaObject(fieldValue);
if (LOG.isTraceEnabled()) {
LOG.trace("Field " + fieldName + "[" + i + "] : " + value + "(" + value.getClass() + ")");
}
if (value instanceof HiveDecimal) {
value = ((HiveDecimal) value).bigDecimalValue();
} else if (value instanceof HiveChar) {
value = ((HiveChar) value).getValue().trim();
}
pResultWritable.add(value);
break;
case LIST:
// Not support for arrays in insert statement yet
break;
case STRUCT:
if (dmlType == DmlType.DELETE) {
// When update/delete, First value is struct<transactionid:bigint,
// bucketid:int,rowid:bigint,primaryKey:binary>>
List<Object> fieldValueList = ((StandardStructObjectInspector) fieldOI).getStructFieldsDataAsList(fieldValue);
// convert to map from binary of primary key.
@SuppressWarnings("unchecked") Map<String, Object> primaryKeyMap = (Map<String, Object>) PhoenixStorageHandlerUtil.toMap(((BytesWritable) fieldValueList.get(3)).getBytes());
for (Object pkValue : primaryKeyMap.values()) {
pResultWritable.add(pkValue);
}
}
break;
default:
new SerDeException("Phoenix Unsupported column type: " + fieldOI.getCategory());
}
}
}
return pResultWritable;
}
use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.
the class VectorizationContext method castConstantToDouble.
private Double castConstantToDouble(Object scalar, TypeInfo type) throws HiveException {
if (null == scalar) {
return null;
}
PrimitiveTypeInfo ptinfo = (PrimitiveTypeInfo) type;
String typename = type.getTypeName();
switch(ptinfo.getPrimitiveCategory()) {
case FLOAT:
case DOUBLE:
case BYTE:
case SHORT:
case INT:
case LONG:
return ((Number) scalar).doubleValue();
case DECIMAL:
HiveDecimal decimalVal = (HiveDecimal) scalar;
return decimalVal.doubleValue();
default:
throw new HiveException("Unsupported type " + typename + " for cast to Double");
}
}
use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.
the class VectorizationContext method castConstantToString.
private String castConstantToString(Object scalar, TypeInfo type) throws HiveException {
if (null == scalar) {
return null;
}
PrimitiveTypeInfo ptinfo = (PrimitiveTypeInfo) type;
String typename = type.getTypeName();
switch(ptinfo.getPrimitiveCategory()) {
case FLOAT:
case DOUBLE:
case BYTE:
case SHORT:
case INT:
case LONG:
return ((Number) scalar).toString();
case DECIMAL:
HiveDecimal decimalVal = (HiveDecimal) scalar;
return decimalVal.toString();
default:
throw new HiveException("Unsupported type " + typename + " for cast to String");
}
}
use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.
the class VectorizationContext method getCastToDecimal.
private VectorExpression getCastToDecimal(List<ExprNodeDesc> childExpr, TypeInfo returnType) throws HiveException {
ExprNodeDesc child = childExpr.get(0);
String inputType = childExpr.get(0).getTypeString();
if (child instanceof ExprNodeConstantDesc) {
// Return a constant vector expression
Object constantValue = ((ExprNodeConstantDesc) child).getValue();
HiveDecimal decimalValue = castConstantToDecimal(constantValue, child.getTypeInfo());
return getConstantVectorExpression(decimalValue, returnType, VectorExpressionDescriptor.Mode.PROJECTION);
}
if (isIntFamily(inputType)) {
return createVectorExpression(CastLongToDecimal.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
} else if (isFloatFamily(inputType)) {
return createVectorExpression(CastDoubleToDecimal.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
} else if (decimalTypePattern.matcher(inputType).matches()) {
if (child instanceof ExprNodeColumnDesc) {
int colIndex = getInputColumnIndex((ExprNodeColumnDesc) child);
DataTypePhysicalVariation dataTypePhysicalVariation = getDataTypePhysicalVariation(colIndex);
if (dataTypePhysicalVariation == DataTypePhysicalVariation.DECIMAL_64) {
// Do Decimal64 conversion instead.
return createDecimal64ToDecimalConversion(colIndex, returnType);
} else {
return createVectorExpression(CastDecimalToDecimal.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
}
} else {
return createVectorExpression(CastDecimalToDecimal.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
}
} else if (isStringFamily(inputType)) {
return createVectorExpression(CastStringToDecimal.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
} else if (inputType.equals("timestamp")) {
return createVectorExpression(CastTimestampToDecimal.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
}
return null;
}
Aggregations