use of org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo in project hive by apache.
the class TypeQualifiers method fromTypeInfo.
public static TypeQualifiers fromTypeInfo(PrimitiveTypeInfo pti) {
TypeQualifiers result = null;
if (pti instanceof VarcharTypeInfo) {
result = new TypeQualifiers();
result.setCharacterMaximumLength(((VarcharTypeInfo) pti).getLength());
} else if (pti instanceof CharTypeInfo) {
result = new TypeQualifiers();
result.setCharacterMaximumLength(((CharTypeInfo) pti).getLength());
} else if (pti instanceof DecimalTypeInfo) {
result = new TypeQualifiers();
result.setPrecision(((DecimalTypeInfo) pti).precision());
result.setScale(((DecimalTypeInfo) pti).scale());
}
return result;
}
use of org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo in project hive by apache.
the class HiveDecimalUtils method enforcePrecisionScale.
public static HiveDecimalWritable enforcePrecisionScale(HiveDecimalWritable writable, DecimalTypeInfo typeInfo) {
if (writable == null) {
return null;
}
HiveDecimalWritable result = new HiveDecimalWritable(writable);
result.mutateEnforcePrecisionScale(typeInfo.precision(), typeInfo.scale());
return (result.isSet() ? result : null);
}
use of org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo in project hive by apache.
the class SerdeRandomRowSource method randomObject.
public Object randomObject(int column) {
PrimitiveCategory primitiveCategory = primitiveCategories[column];
PrimitiveTypeInfo primitiveTypeInfo = primitiveTypeInfos[column];
switch(primitiveCategory) {
case BOOLEAN:
return Boolean.valueOf(r.nextInt(1) == 1);
case BYTE:
return Byte.valueOf((byte) r.nextInt());
case SHORT:
return Short.valueOf((short) r.nextInt());
case INT:
return Integer.valueOf(r.nextInt());
case LONG:
return Long.valueOf(r.nextLong());
case DATE:
return RandomTypeUtil.getRandDate(r);
case FLOAT:
return Float.valueOf(r.nextFloat() * 10 - 5);
case DOUBLE:
return Double.valueOf(r.nextDouble() * 10 - 5);
case STRING:
return RandomTypeUtil.getRandString(r);
case CHAR:
return getRandHiveChar(r, (CharTypeInfo) primitiveTypeInfo);
case VARCHAR:
return getRandHiveVarchar(r, (VarcharTypeInfo) primitiveTypeInfo);
case BINARY:
return getRandBinary(r, 1 + r.nextInt(100));
case TIMESTAMP:
return RandomTypeUtil.getRandTimestamp(r);
case INTERVAL_YEAR_MONTH:
return getRandIntervalYearMonth(r);
case INTERVAL_DAY_TIME:
return getRandIntervalDayTime(r);
case DECIMAL:
{
HiveDecimal dec = getRandHiveDecimal(r, (DecimalTypeInfo) primitiveTypeInfo);
return dec;
}
default:
throw new Error("Unknown primitive category " + primitiveCategory);
}
}
use of org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo in project hive by apache.
the class SerdeRandomRowSource method getWritableObject.
public Object getWritableObject(int column, Object object) {
ObjectInspector objectInspector = primitiveObjectInspectorList.get(column);
PrimitiveCategory primitiveCategory = primitiveCategories[column];
PrimitiveTypeInfo primitiveTypeInfo = primitiveTypeInfos[column];
switch(primitiveCategory) {
case BOOLEAN:
return ((WritableBooleanObjectInspector) objectInspector).create((boolean) object);
case BYTE:
return ((WritableByteObjectInspector) objectInspector).create((byte) object);
case SHORT:
return ((WritableShortObjectInspector) objectInspector).create((short) object);
case INT:
return ((WritableIntObjectInspector) objectInspector).create((int) object);
case LONG:
return ((WritableLongObjectInspector) objectInspector).create((long) object);
case DATE:
return ((WritableDateObjectInspector) objectInspector).create((Date) object);
case FLOAT:
return ((WritableFloatObjectInspector) objectInspector).create((float) object);
case DOUBLE:
return ((WritableDoubleObjectInspector) objectInspector).create((double) object);
case STRING:
return ((WritableStringObjectInspector) objectInspector).create((String) object);
case CHAR:
{
WritableHiveCharObjectInspector writableCharObjectInspector = new WritableHiveCharObjectInspector((CharTypeInfo) primitiveTypeInfo);
return writableCharObjectInspector.create((HiveChar) object);
}
case VARCHAR:
{
WritableHiveVarcharObjectInspector writableVarcharObjectInspector = new WritableHiveVarcharObjectInspector((VarcharTypeInfo) primitiveTypeInfo);
return writableVarcharObjectInspector.create((HiveVarchar) object);
}
case BINARY:
return PrimitiveObjectInspectorFactory.writableBinaryObjectInspector.create((byte[]) object);
case TIMESTAMP:
return ((WritableTimestampObjectInspector) objectInspector).create((Timestamp) object);
case INTERVAL_YEAR_MONTH:
return ((WritableHiveIntervalYearMonthObjectInspector) objectInspector).create((HiveIntervalYearMonth) object);
case INTERVAL_DAY_TIME:
return ((WritableHiveIntervalDayTimeObjectInspector) objectInspector).create((HiveIntervalDayTime) object);
case DECIMAL:
{
WritableHiveDecimalObjectInspector writableDecimalObjectInspector = new WritableHiveDecimalObjectInspector((DecimalTypeInfo) primitiveTypeInfo);
return writableDecimalObjectInspector.create((HiveDecimal) object);
}
default:
throw new Error("Unknown primitive category " + primitiveCategory);
}
}
use of org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo in project hive by apache.
the class AvroDeserializer method deserializePrimitive.
private Object deserializePrimitive(Object datum, Schema fileSchema, Schema recordSchema, PrimitiveTypeInfo columnType) throws AvroSerdeException {
switch(columnType.getPrimitiveCategory()) {
case STRING:
// To workaround AvroUTF8
return datum.toString();
// and convert it to a string. Yay!
case BINARY:
if (recordSchema.getType() == Type.FIXED) {
Fixed fixed = (Fixed) datum;
return fixed.bytes();
} else if (recordSchema.getType() == Type.BYTES) {
return AvroSerdeUtils.getBytesFromByteBuffer((ByteBuffer) datum);
} else {
throw new AvroSerdeException("Unexpected Avro schema for Binary TypeInfo: " + recordSchema.getType());
}
case DECIMAL:
if (fileSchema == null) {
throw new AvroSerdeException("File schema is missing for decimal field. Reader schema is " + columnType);
}
int scale = 0;
try {
scale = fileSchema.getJsonProp(AvroSerDe.AVRO_PROP_SCALE).asInt();
} catch (Exception ex) {
throw new AvroSerdeException("Failed to obtain scale value from file schema: " + fileSchema, ex);
}
HiveDecimal dec = AvroSerdeUtils.getHiveDecimalFromByteBuffer((ByteBuffer) datum, scale);
JavaHiveDecimalObjectInspector oi = (JavaHiveDecimalObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector((DecimalTypeInfo) columnType);
return oi.set(null, dec);
case CHAR:
if (fileSchema == null) {
throw new AvroSerdeException("File schema is missing for char field. Reader schema is " + columnType);
}
int maxLength = 0;
try {
maxLength = fileSchema.getJsonProp(AvroSerDe.AVRO_PROP_MAX_LENGTH).getValueAsInt();
} catch (Exception ex) {
throw new AvroSerdeException("Failed to obtain maxLength value for char field from file schema: " + fileSchema, ex);
}
String str = datum.toString();
HiveChar hc = new HiveChar(str, maxLength);
return hc;
case VARCHAR:
if (fileSchema == null) {
throw new AvroSerdeException("File schema is missing for varchar field. Reader schema is " + columnType);
}
maxLength = 0;
try {
maxLength = fileSchema.getJsonProp(AvroSerDe.AVRO_PROP_MAX_LENGTH).getValueAsInt();
} catch (Exception ex) {
throw new AvroSerdeException("Failed to obtain maxLength value for varchar field from file schema: " + fileSchema, ex);
}
str = datum.toString();
HiveVarchar hvc = new HiveVarchar(str, maxLength);
return hvc;
case DATE:
if (recordSchema.getType() != Type.INT) {
throw new AvroSerdeException("Unexpected Avro schema for Date TypeInfo: " + recordSchema.getType());
}
return new Date(DateWritable.daysToMillis((Integer) datum));
case TIMESTAMP:
if (recordSchema.getType() != Type.LONG) {
throw new AvroSerdeException("Unexpected Avro schema for Date TypeInfo: " + recordSchema.getType());
}
return new Timestamp((Long) datum);
default:
return datum;
}
}
Aggregations