Search in sources :

Example 11 with TypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfo in project hive by apache.

the class HBaseUtils method desierliazeDbNameTableNameFromPartitionKey.

private static List<String> desierliazeDbNameTableNameFromPartitionKey(byte[] key, Configuration conf) {
    StringBuffer names = new StringBuffer();
    names.append("dbName,tableName,");
    StringBuffer types = new StringBuffer();
    types.append("string,string,");
    BinarySortableSerDe serDe = new BinarySortableSerDe();
    Properties props = new Properties();
    props.setProperty(serdeConstants.LIST_COLUMNS, names.toString());
    props.setProperty(serdeConstants.LIST_COLUMN_TYPES, types.toString());
    try {
        serDe.initialize(conf, props);
        List deserializedkeys = ((List) serDe.deserialize(new BytesWritable(key))).subList(0, 2);
        List<String> keys = new ArrayList<>();
        for (int i = 0; i < deserializedkeys.size(); i++) {
            Object deserializedKey = deserializedkeys.get(i);
            if (deserializedKey == null) {
                throw new RuntimeException("Can't have a null dbname or tablename");
            } else {
                TypeInfo inputType = TypeInfoUtils.getTypeInfoFromTypeString("string");
                ObjectInspector inputOI = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(inputType);
                Converter converter = ObjectInspectorConverters.getConverter(inputOI, PrimitiveObjectInspectorFactory.javaStringObjectInspector);
                keys.add((String) converter.convert(deserializedKey));
            }
        }
        return keys;
    } catch (SerDeException e) {
        throw new RuntimeException("Error when deserialize key", e);
    }
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) BinarySortableSerDe(org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe) ArrayList(java.util.ArrayList) BytesWritable(org.apache.hadoop.io.BytesWritable) ByteString(com.google.protobuf.ByteString) Properties(java.util.Properties) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) Converter(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter) List(java.util.List) ArrayList(java.util.ArrayList) SerDeException(org.apache.hadoop.hive.serde2.SerDeException)

Example 12 with TypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfo in project hive by apache.

the class HBaseUtils method buildPartitionKey.

static byte[] buildPartitionKey(String dbName, String tableName, List<String> partTypes, List<String> partVals, boolean endPrefix) {
    Object[] components = new Object[partVals.size()];
    for (int i = 0; i < partVals.size(); i++) {
        TypeInfo expectedType = TypeInfoUtils.getTypeInfoFromTypeString(partTypes.get(i));
        ObjectInspector outputOI = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(expectedType);
        Converter converter = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaStringObjectInspector, outputOI);
        components[i] = converter.convert(partVals.get(i));
    }
    return buildSerializedPartitionKey(dbName, tableName, partTypes, components, endPrefix);
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) Converter(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)

Example 13 with TypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfo in project hive by apache.

the class HiveParquetSchemaTestUtils method testConversion.

public static void testConversion(final String columnNamesStr, final String columnsTypeStr, final String actualSchema) throws Exception {
    final List<String> columnNames = createHiveColumnsFrom(columnNamesStr);
    final List<TypeInfo> columnTypes = createHiveTypeInfoFrom(columnsTypeStr);
    final MessageType messageTypeFound = HiveSchemaConverter.convert(columnNames, columnTypes);
    final MessageType expectedMT = MessageTypeParser.parseMessageType(actualSchema);
    assertEquals("converting " + columnNamesStr + ": " + columnsTypeStr + " to " + actualSchema, expectedMT, messageTypeFound);
    // Required to check the original types manually as PrimitiveType.equals does not care about it
    List<Type> expectedFields = expectedMT.getFields();
    List<Type> actualFields = messageTypeFound.getFields();
    for (int i = 0, n = expectedFields.size(); i < n; ++i) {
        OriginalType exp = expectedFields.get(i).getOriginalType();
        OriginalType act = actualFields.get(i).getOriginalType();
        assertEquals("Original types of the field do not match", exp, act);
    }
}
Also used : OriginalType(org.apache.parquet.schema.OriginalType) MessageType(org.apache.parquet.schema.MessageType) Type(org.apache.parquet.schema.Type) OriginalType(org.apache.parquet.schema.OriginalType) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) MessageType(org.apache.parquet.schema.MessageType)

Example 14 with TypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfo in project hive by apache.

the class GenericUDTFGetSplits method convertTypeString.

private TypeDesc convertTypeString(String typeString) throws HiveException {
    TypeDesc typeDesc;
    TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(typeString);
    Preconditions.checkState(typeInfo.getCategory() == ObjectInspector.Category.PRIMITIVE, "Unsupported non-primitive type " + typeString);
    switch(((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory()) {
        case BOOLEAN:
            typeDesc = new TypeDesc(TypeDesc.Type.BOOLEAN);
            break;
        case BYTE:
            typeDesc = new TypeDesc(TypeDesc.Type.TINYINT);
            break;
        case SHORT:
            typeDesc = new TypeDesc(TypeDesc.Type.SMALLINT);
            break;
        case INT:
            typeDesc = new TypeDesc(TypeDesc.Type.INT);
            break;
        case LONG:
            typeDesc = new TypeDesc(TypeDesc.Type.BIGINT);
            break;
        case FLOAT:
            typeDesc = new TypeDesc(TypeDesc.Type.FLOAT);
            break;
        case DOUBLE:
            typeDesc = new TypeDesc(TypeDesc.Type.DOUBLE);
            break;
        case STRING:
            typeDesc = new TypeDesc(TypeDesc.Type.STRING);
            break;
        case CHAR:
            CharTypeInfo charTypeInfo = (CharTypeInfo) typeInfo;
            typeDesc = new TypeDesc(TypeDesc.Type.CHAR, charTypeInfo.getLength());
            break;
        case VARCHAR:
            VarcharTypeInfo varcharTypeInfo = (VarcharTypeInfo) typeInfo;
            typeDesc = new TypeDesc(TypeDesc.Type.VARCHAR, varcharTypeInfo.getLength());
            break;
        case DATE:
            typeDesc = new TypeDesc(TypeDesc.Type.DATE);
            break;
        case TIMESTAMP:
            typeDesc = new TypeDesc(TypeDesc.Type.TIMESTAMP);
            break;
        case BINARY:
            typeDesc = new TypeDesc(TypeDesc.Type.BINARY);
            break;
        case DECIMAL:
            DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo;
            typeDesc = new TypeDesc(TypeDesc.Type.DECIMAL, decimalTypeInfo.getPrecision(), decimalTypeInfo.getScale());
            break;
        default:
            throw new HiveException("Unsupported type " + typeString);
    }
    return typeDesc;
}
Also used : DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) TypeDesc(org.apache.hadoop.hive.llap.TypeDesc) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)

Example 15 with TypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfo in project druid by druid-io.

the class OrcHadoopInputRowParser method getTablePropertiesFromStructTypeInfo.

public static Properties getTablePropertiesFromStructTypeInfo(StructTypeInfo structTypeInfo) {
    Properties table = new Properties();
    table.setProperty("columns", StringUtils.join(structTypeInfo.getAllStructFieldNames(), ","));
    table.setProperty("columns.types", StringUtils.join(Lists.transform(structTypeInfo.getAllStructFieldTypeInfos(), new Function<TypeInfo, String>() {

        @Nullable
        @Override
        public String apply(@Nullable TypeInfo typeInfo) {
            return typeInfo.getTypeName();
        }
    }), ","));
    return table;
}
Also used : Properties(java.util.Properties) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) Nullable(javax.annotation.Nullable)

Aggregations

TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)516 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)287 ArrayList (java.util.ArrayList)202 StructTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo)193 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)167 ListTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo)151 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)148 MapTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo)138 Test (org.junit.Test)135 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)107 UnionTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo)78 HashMap (java.util.HashMap)74 PrimitiveCategory (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)71 CharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo)69 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)67 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)63 ExprNodeColumnDesc (org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc)61 VarcharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo)59 List (java.util.List)54 HiveConf (org.apache.hadoop.hive.conf.HiveConf)53