use of org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo in project hive by apache.
the class TypeInfoFactory method createPrimitiveTypeInfo.
/**
* Create PrimitiveTypeInfo instance for the given full name of the type. The returned
* type is one of the parameterized type info such as VarcharTypeInfo.
*
* @param fullName Fully qualified name of the type
* @return PrimitiveTypeInfo instance
*/
private static PrimitiveTypeInfo createPrimitiveTypeInfo(String fullName) {
String baseName = TypeInfoUtils.getBaseName(fullName);
PrimitiveTypeEntry typeEntry = PrimitiveObjectInspectorUtils.getTypeEntryFromTypeName(baseName);
if (null == typeEntry) {
throw new RuntimeException("Unknown type " + fullName);
}
TypeInfoUtils.PrimitiveParts parts = TypeInfoUtils.parsePrimitiveParts(fullName);
if (parts.typeParams == null || parts.typeParams.length < 1) {
return null;
}
switch(typeEntry.primitiveCategory) {
case CHAR:
if (parts.typeParams.length != 1) {
return null;
}
return new CharTypeInfo(Integer.valueOf(parts.typeParams[0]));
case VARCHAR:
if (parts.typeParams.length != 1) {
return null;
}
return new VarcharTypeInfo(Integer.valueOf(parts.typeParams[0]));
case DECIMAL:
if (parts.typeParams.length != 2) {
return null;
}
return new DecimalTypeInfo(Integer.valueOf(parts.typeParams[0]), Integer.valueOf(parts.typeParams[1]));
default:
return null;
}
}
use of org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo in project hive by apache.
the class SerdeRandomRowSource method randomObject.
public Object randomObject(int column) {
PrimitiveCategory primitiveCategory = primitiveCategories[column];
PrimitiveTypeInfo primitiveTypeInfo = primitiveTypeInfos[column];
switch(primitiveCategory) {
case BOOLEAN:
return Boolean.valueOf(r.nextInt(1) == 1);
case BYTE:
return Byte.valueOf((byte) r.nextInt());
case SHORT:
return Short.valueOf((short) r.nextInt());
case INT:
return Integer.valueOf(r.nextInt());
case LONG:
return Long.valueOf(r.nextLong());
case DATE:
return RandomTypeUtil.getRandDate(r);
case FLOAT:
return Float.valueOf(r.nextFloat() * 10 - 5);
case DOUBLE:
return Double.valueOf(r.nextDouble() * 10 - 5);
case STRING:
return RandomTypeUtil.getRandString(r);
case CHAR:
return getRandHiveChar(r, (CharTypeInfo) primitiveTypeInfo);
case VARCHAR:
return getRandHiveVarchar(r, (VarcharTypeInfo) primitiveTypeInfo);
case BINARY:
return getRandBinary(r, 1 + r.nextInt(100));
case TIMESTAMP:
return RandomTypeUtil.getRandTimestamp(r);
case INTERVAL_YEAR_MONTH:
return getRandIntervalYearMonth(r);
case INTERVAL_DAY_TIME:
return getRandIntervalDayTime(r);
case DECIMAL:
{
HiveDecimal dec = getRandHiveDecimal(r, (DecimalTypeInfo) primitiveTypeInfo);
return dec;
}
default:
throw new Error("Unknown primitive category " + primitiveCategory);
}
}
use of org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo in project hive by apache.
the class SerdeRandomRowSource method getWritableObject.
public Object getWritableObject(int column, Object object) {
ObjectInspector objectInspector = primitiveObjectInspectorList.get(column);
PrimitiveCategory primitiveCategory = primitiveCategories[column];
PrimitiveTypeInfo primitiveTypeInfo = primitiveTypeInfos[column];
switch(primitiveCategory) {
case BOOLEAN:
return ((WritableBooleanObjectInspector) objectInspector).create((boolean) object);
case BYTE:
return ((WritableByteObjectInspector) objectInspector).create((byte) object);
case SHORT:
return ((WritableShortObjectInspector) objectInspector).create((short) object);
case INT:
return ((WritableIntObjectInspector) objectInspector).create((int) object);
case LONG:
return ((WritableLongObjectInspector) objectInspector).create((long) object);
case DATE:
return ((WritableDateObjectInspector) objectInspector).create((Date) object);
case FLOAT:
return ((WritableFloatObjectInspector) objectInspector).create((float) object);
case DOUBLE:
return ((WritableDoubleObjectInspector) objectInspector).create((double) object);
case STRING:
return ((WritableStringObjectInspector) objectInspector).create((String) object);
case CHAR:
{
WritableHiveCharObjectInspector writableCharObjectInspector = new WritableHiveCharObjectInspector((CharTypeInfo) primitiveTypeInfo);
return writableCharObjectInspector.create((HiveChar) object);
}
case VARCHAR:
{
WritableHiveVarcharObjectInspector writableVarcharObjectInspector = new WritableHiveVarcharObjectInspector((VarcharTypeInfo) primitiveTypeInfo);
return writableVarcharObjectInspector.create((HiveVarchar) object);
}
case BINARY:
return PrimitiveObjectInspectorFactory.writableBinaryObjectInspector.create((byte[]) object);
case TIMESTAMP:
return ((WritableTimestampObjectInspector) objectInspector).create((Timestamp) object);
case INTERVAL_YEAR_MONTH:
return ((WritableHiveIntervalYearMonthObjectInspector) objectInspector).create((HiveIntervalYearMonth) object);
case INTERVAL_DAY_TIME:
return ((WritableHiveIntervalDayTimeObjectInspector) objectInspector).create((HiveIntervalDayTime) object);
case DECIMAL:
{
WritableHiveDecimalObjectInspector writableDecimalObjectInspector = new WritableHiveDecimalObjectInspector((DecimalTypeInfo) primitiveTypeInfo);
return writableDecimalObjectInspector.create((HiveDecimal) object);
}
default:
throw new Error("Unknown primitive category " + primitiveCategory);
}
}
use of org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo in project hive by apache.
the class TypeInfoToSchema method createAvroPrimitive.
private Schema createAvroPrimitive(TypeInfo typeInfo) {
PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) typeInfo;
Schema schema;
switch(primitiveTypeInfo.getPrimitiveCategory()) {
case STRING:
schema = Schema.create(Schema.Type.STRING);
break;
case CHAR:
schema = AvroSerdeUtils.getSchemaFor("{" + "\"type\":\"" + AvroSerDe.AVRO_STRING_TYPE_NAME + "\"," + "\"logicalType\":\"" + AvroSerDe.CHAR_TYPE_NAME + "\"," + "\"maxLength\":" + ((CharTypeInfo) typeInfo).getLength() + "}");
break;
case VARCHAR:
schema = AvroSerdeUtils.getSchemaFor("{" + "\"type\":\"" + AvroSerDe.AVRO_STRING_TYPE_NAME + "\"," + "\"logicalType\":\"" + AvroSerDe.VARCHAR_TYPE_NAME + "\"," + "\"maxLength\":" + ((VarcharTypeInfo) typeInfo).getLength() + "}");
break;
case BINARY:
schema = Schema.create(Schema.Type.BYTES);
break;
case BYTE:
schema = Schema.create(Schema.Type.INT);
break;
case SHORT:
schema = Schema.create(Schema.Type.INT);
break;
case INT:
schema = Schema.create(Schema.Type.INT);
break;
case LONG:
schema = Schema.create(Schema.Type.LONG);
break;
case FLOAT:
schema = Schema.create(Schema.Type.FLOAT);
break;
case DOUBLE:
schema = Schema.create(Schema.Type.DOUBLE);
break;
case BOOLEAN:
schema = Schema.create(Schema.Type.BOOLEAN);
break;
case DECIMAL:
DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo;
String precision = String.valueOf(decimalTypeInfo.precision());
String scale = String.valueOf(decimalTypeInfo.scale());
schema = AvroSerdeUtils.getSchemaFor("{" + "\"type\":\"bytes\"," + "\"logicalType\":\"decimal\"," + "\"precision\":" + precision + "," + "\"scale\":" + scale + "}");
break;
case DATE:
schema = AvroSerdeUtils.getSchemaFor("{" + "\"type\":\"" + AvroSerDe.AVRO_INT_TYPE_NAME + "\"," + "\"logicalType\":\"" + AvroSerDe.DATE_TYPE_NAME + "\"}");
break;
case TIMESTAMP:
schema = AvroSerdeUtils.getSchemaFor("{" + "\"type\":\"" + AvroSerDe.AVRO_LONG_TYPE_NAME + "\"," + "\"logicalType\":\"" + AvroSerDe.TIMESTAMP_TYPE_NAME + "\"}");
break;
case VOID:
schema = Schema.create(Schema.Type.NULL);
break;
default:
throw new UnsupportedOperationException(typeInfo + " is not supported.");
}
return schema;
}
use of org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo in project hive by apache.
the class TypeQualifiers method fromTypeInfo.
public static TypeQualifiers fromTypeInfo(PrimitiveTypeInfo pti) {
TypeQualifiers result = null;
if (pti instanceof VarcharTypeInfo) {
result = new TypeQualifiers();
result.setCharacterMaximumLength(((VarcharTypeInfo) pti).getLength());
} else if (pti instanceof CharTypeInfo) {
result = new TypeQualifiers();
result.setCharacterMaximumLength(((CharTypeInfo) pti).getLength());
} else if (pti instanceof DecimalTypeInfo) {
result = new TypeQualifiers();
result.setPrecision(((DecimalTypeInfo) pti).precision());
result.setScale(((DecimalTypeInfo) pti).scale());
}
return result;
}
Aggregations