use of org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo in project hive by apache.
the class SerdeRandomRowSource method getWritableObject.
public Object getWritableObject(int column, Object object) {
ObjectInspector objectInspector = primitiveObjectInspectorList.get(column);
PrimitiveCategory primitiveCategory = primitiveCategories[column];
PrimitiveTypeInfo primitiveTypeInfo = primitiveTypeInfos[column];
switch(primitiveCategory) {
case BOOLEAN:
return ((WritableBooleanObjectInspector) objectInspector).create((boolean) object);
case BYTE:
return ((WritableByteObjectInspector) objectInspector).create((byte) object);
case SHORT:
return ((WritableShortObjectInspector) objectInspector).create((short) object);
case INT:
return ((WritableIntObjectInspector) objectInspector).create((int) object);
case LONG:
return ((WritableLongObjectInspector) objectInspector).create((long) object);
case DATE:
return ((WritableDateObjectInspector) objectInspector).create((Date) object);
case FLOAT:
return ((WritableFloatObjectInspector) objectInspector).create((float) object);
case DOUBLE:
return ((WritableDoubleObjectInspector) objectInspector).create((double) object);
case STRING:
return ((WritableStringObjectInspector) objectInspector).create((String) object);
case CHAR:
{
WritableHiveCharObjectInspector writableCharObjectInspector = new WritableHiveCharObjectInspector((CharTypeInfo) primitiveTypeInfo);
return writableCharObjectInspector.create((HiveChar) object);
}
case VARCHAR:
{
WritableHiveVarcharObjectInspector writableVarcharObjectInspector = new WritableHiveVarcharObjectInspector((VarcharTypeInfo) primitiveTypeInfo);
return writableVarcharObjectInspector.create((HiveVarchar) object);
}
case BINARY:
return PrimitiveObjectInspectorFactory.writableBinaryObjectInspector.create((byte[]) object);
case TIMESTAMP:
return ((WritableTimestampObjectInspector) objectInspector).create((Timestamp) object);
case INTERVAL_YEAR_MONTH:
return ((WritableHiveIntervalYearMonthObjectInspector) objectInspector).create((HiveIntervalYearMonth) object);
case INTERVAL_DAY_TIME:
return ((WritableHiveIntervalDayTimeObjectInspector) objectInspector).create((HiveIntervalDayTime) object);
case DECIMAL:
{
WritableHiveDecimalObjectInspector writableDecimalObjectInspector = new WritableHiveDecimalObjectInspector((DecimalTypeInfo) primitiveTypeInfo);
return writableDecimalObjectInspector.create((HiveDecimal) object);
}
default:
throw new Error("Unknown primitive category " + primitiveCategory);
}
}
use of org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo in project hive by apache.
the class WritableHiveVarcharObjectInspector method copyObject.
@Override
public Object copyObject(Object o) {
if (o == null) {
return null;
}
if (o instanceof Text) {
String str = ((Text) o).toString();
HiveVarcharWritable hcw = new HiveVarcharWritable();
hcw.set(str, ((VarcharTypeInfo) typeInfo).getLength());
return hcw;
}
HiveVarcharWritable writable = (HiveVarcharWritable) o;
if (doesWritableMatchTypeParams((HiveVarcharWritable) o)) {
return new HiveVarcharWritable(writable);
}
return getWritableWithParams(writable);
}
use of org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo in project hive by apache.
the class WritableHiveVarcharObjectInspector method getPrimitiveWritableObject.
@Override
public HiveVarcharWritable getPrimitiveWritableObject(Object o) {
// then output new writable with correct params.
if (o == null) {
return null;
}
if (o instanceof Text) {
String str = ((Text) o).toString();
HiveVarcharWritable hcw = new HiveVarcharWritable();
hcw.set(str, ((VarcharTypeInfo) typeInfo).getLength());
return hcw;
}
HiveVarcharWritable writable = ((HiveVarcharWritable) o);
if (doesWritableMatchTypeParams((HiveVarcharWritable) o)) {
return writable;
}
return getWritableWithParams(writable);
}
use of org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo in project hive by apache.
the class TypeInfoToSchema method createAvroPrimitive.
private Schema createAvroPrimitive(TypeInfo typeInfo) {
PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) typeInfo;
Schema schema;
switch(primitiveTypeInfo.getPrimitiveCategory()) {
case STRING:
schema = Schema.create(Schema.Type.STRING);
break;
case CHAR:
schema = AvroSerdeUtils.getSchemaFor("{" + "\"type\":\"" + AvroSerDe.AVRO_STRING_TYPE_NAME + "\"," + "\"logicalType\":\"" + AvroSerDe.CHAR_TYPE_NAME + "\"," + "\"maxLength\":" + ((CharTypeInfo) typeInfo).getLength() + "}");
break;
case VARCHAR:
schema = AvroSerdeUtils.getSchemaFor("{" + "\"type\":\"" + AvroSerDe.AVRO_STRING_TYPE_NAME + "\"," + "\"logicalType\":\"" + AvroSerDe.VARCHAR_TYPE_NAME + "\"," + "\"maxLength\":" + ((VarcharTypeInfo) typeInfo).getLength() + "}");
break;
case BINARY:
schema = Schema.create(Schema.Type.BYTES);
break;
case BYTE:
schema = Schema.create(Schema.Type.INT);
break;
case SHORT:
schema = Schema.create(Schema.Type.INT);
break;
case INT:
schema = Schema.create(Schema.Type.INT);
break;
case LONG:
schema = Schema.create(Schema.Type.LONG);
break;
case FLOAT:
schema = Schema.create(Schema.Type.FLOAT);
break;
case DOUBLE:
schema = Schema.create(Schema.Type.DOUBLE);
break;
case BOOLEAN:
schema = Schema.create(Schema.Type.BOOLEAN);
break;
case DECIMAL:
DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo;
String precision = String.valueOf(decimalTypeInfo.precision());
String scale = String.valueOf(decimalTypeInfo.scale());
schema = AvroSerdeUtils.getSchemaFor("{" + "\"type\":\"bytes\"," + "\"logicalType\":\"decimal\"," + "\"precision\":" + precision + "," + "\"scale\":" + scale + "}");
break;
case DATE:
schema = AvroSerdeUtils.getSchemaFor("{" + "\"type\":\"" + AvroSerDe.AVRO_INT_TYPE_NAME + "\"," + "\"logicalType\":\"" + AvroSerDe.DATE_TYPE_NAME + "\"}");
break;
case TIMESTAMP:
schema = AvroSerdeUtils.getSchemaFor("{" + "\"type\":\"" + AvroSerDe.AVRO_LONG_TYPE_NAME + "\"," + "\"logicalType\":\"" + AvroSerDe.TIMESTAMP_TYPE_NAME + "\"}");
break;
case VOID:
schema = Schema.create(Schema.Type.NULL);
break;
default:
throw new UnsupportedOperationException(typeInfo + " is not supported.");
}
return schema;
}
use of org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo in project hive by apache.
the class TypeQualifiers method fromTypeInfo.
public static TypeQualifiers fromTypeInfo(PrimitiveTypeInfo pti) {
TypeQualifiers result = null;
if (pti instanceof VarcharTypeInfo) {
result = new TypeQualifiers();
result.setCharacterMaximumLength(((VarcharTypeInfo) pti).getLength());
} else if (pti instanceof CharTypeInfo) {
result = new TypeQualifiers();
result.setCharacterMaximumLength(((CharTypeInfo) pti).getLength());
} else if (pti instanceof DecimalTypeInfo) {
result = new TypeQualifiers();
result.setPrecision(((DecimalTypeInfo) pti).precision());
result.setScale(((DecimalTypeInfo) pti).scale());
}
return result;
}
Aggregations