use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfo in project hive by apache.
the class HBaseUtils method desierliazeDbNameTableNameFromPartitionKey.
private static List<String> desierliazeDbNameTableNameFromPartitionKey(byte[] key, Configuration conf) {
StringBuffer names = new StringBuffer();
names.append("dbName,tableName,");
StringBuffer types = new StringBuffer();
types.append("string,string,");
BinarySortableSerDe serDe = new BinarySortableSerDe();
Properties props = new Properties();
props.setProperty(serdeConstants.LIST_COLUMNS, names.toString());
props.setProperty(serdeConstants.LIST_COLUMN_TYPES, types.toString());
try {
serDe.initialize(conf, props);
List deserializedkeys = ((List) serDe.deserialize(new BytesWritable(key))).subList(0, 2);
List<String> keys = new ArrayList<>();
for (int i = 0; i < deserializedkeys.size(); i++) {
Object deserializedKey = deserializedkeys.get(i);
if (deserializedKey == null) {
throw new RuntimeException("Can't have a null dbname or tablename");
} else {
TypeInfo inputType = TypeInfoUtils.getTypeInfoFromTypeString("string");
ObjectInspector inputOI = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(inputType);
Converter converter = ObjectInspectorConverters.getConverter(inputOI, PrimitiveObjectInspectorFactory.javaStringObjectInspector);
keys.add((String) converter.convert(deserializedKey));
}
}
return keys;
} catch (SerDeException e) {
throw new RuntimeException("Error when deserialize key", e);
}
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfo in project hive by apache.
the class HBaseUtils method buildPartitionKey.
static byte[] buildPartitionKey(String dbName, String tableName, List<String> partTypes, List<String> partVals, boolean endPrefix) {
Object[] components = new Object[partVals.size()];
for (int i = 0; i < partVals.size(); i++) {
TypeInfo expectedType = TypeInfoUtils.getTypeInfoFromTypeString(partTypes.get(i));
ObjectInspector outputOI = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(expectedType);
Converter converter = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaStringObjectInspector, outputOI);
components[i] = converter.convert(partVals.get(i));
}
return buildSerializedPartitionKey(dbName, tableName, partTypes, components, endPrefix);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfo in project hive by apache.
the class HiveParquetSchemaTestUtils method testConversion.
public static void testConversion(final String columnNamesStr, final String columnsTypeStr, final String actualSchema) throws Exception {
final List<String> columnNames = createHiveColumnsFrom(columnNamesStr);
final List<TypeInfo> columnTypes = createHiveTypeInfoFrom(columnsTypeStr);
final MessageType messageTypeFound = HiveSchemaConverter.convert(columnNames, columnTypes);
final MessageType expectedMT = MessageTypeParser.parseMessageType(actualSchema);
assertEquals("converting " + columnNamesStr + ": " + columnsTypeStr + " to " + actualSchema, expectedMT, messageTypeFound);
// Required to check the original types manually as PrimitiveType.equals does not care about it
List<Type> expectedFields = expectedMT.getFields();
List<Type> actualFields = messageTypeFound.getFields();
for (int i = 0, n = expectedFields.size(); i < n; ++i) {
OriginalType exp = expectedFields.get(i).getOriginalType();
OriginalType act = actualFields.get(i).getOriginalType();
assertEquals("Original types of the field do not match", exp, act);
}
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfo in project hive by apache.
the class GenericUDTFGetSplits method convertTypeString.
private TypeDesc convertTypeString(String typeString) throws HiveException {
TypeDesc typeDesc;
TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(typeString);
Preconditions.checkState(typeInfo.getCategory() == ObjectInspector.Category.PRIMITIVE, "Unsupported non-primitive type " + typeString);
switch(((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory()) {
case BOOLEAN:
typeDesc = new TypeDesc(TypeDesc.Type.BOOLEAN);
break;
case BYTE:
typeDesc = new TypeDesc(TypeDesc.Type.TINYINT);
break;
case SHORT:
typeDesc = new TypeDesc(TypeDesc.Type.SMALLINT);
break;
case INT:
typeDesc = new TypeDesc(TypeDesc.Type.INT);
break;
case LONG:
typeDesc = new TypeDesc(TypeDesc.Type.BIGINT);
break;
case FLOAT:
typeDesc = new TypeDesc(TypeDesc.Type.FLOAT);
break;
case DOUBLE:
typeDesc = new TypeDesc(TypeDesc.Type.DOUBLE);
break;
case STRING:
typeDesc = new TypeDesc(TypeDesc.Type.STRING);
break;
case CHAR:
CharTypeInfo charTypeInfo = (CharTypeInfo) typeInfo;
typeDesc = new TypeDesc(TypeDesc.Type.CHAR, charTypeInfo.getLength());
break;
case VARCHAR:
VarcharTypeInfo varcharTypeInfo = (VarcharTypeInfo) typeInfo;
typeDesc = new TypeDesc(TypeDesc.Type.VARCHAR, varcharTypeInfo.getLength());
break;
case DATE:
typeDesc = new TypeDesc(TypeDesc.Type.DATE);
break;
case TIMESTAMP:
typeDesc = new TypeDesc(TypeDesc.Type.TIMESTAMP);
break;
case BINARY:
typeDesc = new TypeDesc(TypeDesc.Type.BINARY);
break;
case DECIMAL:
DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo;
typeDesc = new TypeDesc(TypeDesc.Type.DECIMAL, decimalTypeInfo.getPrecision(), decimalTypeInfo.getScale());
break;
default:
throw new HiveException("Unsupported type " + typeString);
}
return typeDesc;
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfo in project druid by druid-io.
the class OrcHadoopInputRowParser method getTablePropertiesFromStructTypeInfo.
public static Properties getTablePropertiesFromStructTypeInfo(StructTypeInfo structTypeInfo) {
Properties table = new Properties();
table.setProperty("columns", StringUtils.join(structTypeInfo.getAllStructFieldNames(), ","));
table.setProperty("columns.types", StringUtils.join(Lists.transform(structTypeInfo.getAllStructFieldTypeInfos(), new Function<TypeInfo, String>() {
@Nullable
@Override
public String apply(@Nullable TypeInfo typeInfo) {
return typeInfo.getTypeName();
}
}), ","));
return table;
}
Aggregations