use of org.apache.hadoop.hive.common.type.HiveDecimal in project presto by prestodb.
the class GenericHiveRecordCursor method parseDecimalColumn.
private void parseDecimalColumn(int column) {
loaded[column] = true;
Object fieldData = rowInspector.getStructFieldData(rowData, structFields[column]);
if (fieldData == null) {
nulls[column] = true;
} else {
Object fieldValue = ((PrimitiveObjectInspector) fieldInspectors[column]).getPrimitiveJavaObject(fieldData);
checkState(fieldValue != null, "fieldValue should not be null");
HiveDecimal decimal = (HiveDecimal) fieldValue;
DecimalType columnType = (DecimalType) types[column];
BigInteger unscaledDecimal = rescale(decimal.unscaledValue(), decimal.scale(), columnType.getScale());
if (columnType.isShort()) {
longs[column] = unscaledDecimal.longValue();
} else {
slices[column] = Decimals.encodeUnscaledValue(unscaledDecimal);
}
nulls[column] = false;
}
}
use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.
the class MultiValueBoundaryScanner method isEqual.
@Override
public boolean isEqual(Object v1, Object v2) {
HiveDecimal d1 = PrimitiveObjectInspectorUtils.getHiveDecimal(v1, (PrimitiveObjectInspector) expressionDef.getOI());
HiveDecimal d2 = PrimitiveObjectInspectorUtils.getHiveDecimal(v2, (PrimitiveObjectInspector) expressionDef.getOI());
if (d1 != null && d2 != null) {
return d1.equals(d2);
}
// True if both are null
return d1 == null && d2 == null;
}
use of org.apache.hadoop.hive.common.type.HiveDecimal in project phoenix by apache.
the class PhoenixSerializer method serialize.
public Writable serialize(Object values, ObjectInspector objInspector, DmlType dmlType) {
pResultWritable.clear();
final StructObjectInspector structInspector = (StructObjectInspector) objInspector;
final List<? extends StructField> fieldList = structInspector.getAllStructFieldRefs();
if (LOG.isTraceEnabled()) {
LOG.trace("FieldList : " + fieldList + " values(" + values.getClass() + ") : " + values);
}
int fieldCount = columnCount;
if (dmlType == DmlType.UPDATE || dmlType == DmlType.DELETE) {
fieldCount++;
}
for (int i = 0; i < fieldCount; i++) {
if (fieldList.size() <= i) {
break;
}
StructField structField = fieldList.get(i);
if (LOG.isTraceEnabled()) {
LOG.trace("structField[" + i + "] : " + structField);
}
if (structField != null) {
Object fieldValue = structInspector.getStructFieldData(values, structField);
ObjectInspector fieldOI = structField.getFieldObjectInspector();
String fieldName = structField.getFieldName();
if (LOG.isTraceEnabled()) {
LOG.trace("Field " + fieldName + "[" + i + "] : " + fieldValue + ", " + fieldOI);
}
Object value = null;
switch(fieldOI.getCategory()) {
case PRIMITIVE:
value = ((PrimitiveObjectInspector) fieldOI).getPrimitiveJavaObject(fieldValue);
if (LOG.isTraceEnabled()) {
LOG.trace("Field " + fieldName + "[" + i + "] : " + value + "(" + value.getClass() + ")");
}
if (value instanceof HiveDecimal) {
value = ((HiveDecimal) value).bigDecimalValue();
} else if (value instanceof HiveChar) {
value = ((HiveChar) value).getValue().trim();
}
pResultWritable.add(value);
break;
case LIST:
// Not support for arrays in insert statement yet
break;
case STRUCT:
if (dmlType == DmlType.DELETE) {
// When update/delete, First value is struct<transactionid:bigint,
// bucketid:int,rowid:bigint,primaryKey:binary>>
List<Object> fieldValueList = ((StandardStructObjectInspector) fieldOI).getStructFieldsDataAsList(fieldValue);
// convert to map from binary of primary key.
@SuppressWarnings("unchecked") Map<String, Object> primaryKeyMap = (Map<String, Object>) PhoenixStorageHandlerUtil.toMap(((BytesWritable) fieldValueList.get(3)).getBytes());
for (Object pkValue : primaryKeyMap.values()) {
pResultWritable.add(pkValue);
}
}
break;
default:
new SerDeException("Phoenix Unsupported column type: " + fieldOI.getCategory());
}
}
}
return pResultWritable;
}
Aggregations