Search in sources :

Example 86 with PRIMITIVE

use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category.PRIMITIVE in project hive by apache.

the class DeserializeRead method allocateCurrentWritable.

/*
   * This class is used to read one field at a time.  Simple fields like long, double, int are read
   * into to primitive current* members; the non-simple field types like Date, Timestamp, etc, are
   * read into a current object that this method will allocate.
   *
   * This method handles complex type fields by recursively calling this method.
   */
private void allocateCurrentWritable(TypeInfo typeInfo) {
    switch(typeInfo.getCategory()) {
        case PRIMITIVE:
            switch(((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory()) {
                case DATE:
                    if (currentDateWritable == null) {
                        currentDateWritable = new DateWritableV2();
                    }
                    break;
                case TIMESTAMP:
                    if (currentTimestampWritable == null) {
                        currentTimestampWritable = new TimestampWritableV2();
                    }
                    break;
                case INTERVAL_YEAR_MONTH:
                    if (currentHiveIntervalYearMonthWritable == null) {
                        currentHiveIntervalYearMonthWritable = new HiveIntervalYearMonthWritable();
                    }
                    break;
                case INTERVAL_DAY_TIME:
                    if (currentHiveIntervalDayTimeWritable == null) {
                        currentHiveIntervalDayTimeWritable = new HiveIntervalDayTimeWritable();
                    }
                    break;
                case DECIMAL:
                    if (currentHiveDecimalWritable == null) {
                        currentHiveDecimalWritable = new HiveDecimalWritable();
                    }
                    break;
                default:
            }
            break;
        case LIST:
            allocateCurrentWritable(((ListTypeInfo) typeInfo).getListElementTypeInfo());
            break;
        case MAP:
            allocateCurrentWritable(((MapTypeInfo) typeInfo).getMapKeyTypeInfo());
            allocateCurrentWritable(((MapTypeInfo) typeInfo).getMapValueTypeInfo());
            break;
        case STRUCT:
            for (TypeInfo fieldTypeInfo : ((StructTypeInfo) typeInfo).getAllStructFieldTypeInfos()) {
                allocateCurrentWritable(fieldTypeInfo);
            }
            break;
        case UNION:
            for (TypeInfo fieldTypeInfo : ((UnionTypeInfo) typeInfo).getAllUnionObjectTypeInfos()) {
                allocateCurrentWritable(fieldTypeInfo);
            }
            break;
        default:
            throw new RuntimeException("Unexpected category " + typeInfo.getCategory());
    }
}
Also used : HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) DateWritableV2(org.apache.hadoop.hive.serde2.io.DateWritableV2) HiveIntervalDayTimeWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) HiveIntervalYearMonthWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable) TimestampWritableV2(org.apache.hadoop.hive.serde2.io.TimestampWritableV2) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) UnionTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) UnionTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo)

Example 87 with PRIMITIVE

use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category.PRIMITIVE in project hive by apache.

the class TeradataBinarySerde method serializeField.

private void serializeField(Object objectForField, ObjectInspector oi, TypeInfo ti) throws IOException, SerDeException {
    switch(oi.getCategory()) {
        case PRIMITIVE:
            PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
            switch(poi.getPrimitiveCategory()) {
                // Teradata Type: BYTEINT
                case BYTE:
                    ByteObjectInspector boi = (ByteObjectInspector) poi;
                    byte b = 0;
                    if (objectForField != null) {
                        b = boi.get(objectForField);
                    }
                    out.write(b);
                    return;
                // Teradata Type: SMALLINT
                case SHORT:
                    ShortObjectInspector spoi = (ShortObjectInspector) poi;
                    short s = 0;
                    if (objectForField != null) {
                        s = spoi.get(objectForField);
                    }
                    out.writeShort(s);
                    return;
                // Teradata Type: INT
                case INT:
                    IntObjectInspector ioi = (IntObjectInspector) poi;
                    int i = 0;
                    if (objectForField != null) {
                        i = ioi.get(objectForField);
                    }
                    out.writeInt(i);
                    return;
                // Teradata Type: BIGINT
                case LONG:
                    LongObjectInspector loi = (LongObjectInspector) poi;
                    long l = 0;
                    if (objectForField != null) {
                        l = loi.get(objectForField);
                    }
                    out.writeLong(l);
                    return;
                // Teradata Type: FLOAT
                case DOUBLE:
                    DoubleObjectInspector doi = (DoubleObjectInspector) poi;
                    double d = 0;
                    if (objectForField != null) {
                        d = doi.get(objectForField);
                    }
                    out.writeDouble(d);
                    return;
                // Teradata Type: VARCHAR
                case VARCHAR:
                    HiveVarcharObjectInspector hvoi = (HiveVarcharObjectInspector) poi;
                    HiveVarcharWritable hv = hvoi.getPrimitiveWritableObject(objectForField);
                    // assert the length of varchar record fits into the table definition
                    if (hv != null) {
                        assert ((VarcharTypeInfo) ti).getLength() >= hv.getHiveVarchar().getCharacterLength();
                    }
                    out.writeVarChar(hv);
                    return;
                // Teradata Type: TIMESTAMP
                case TIMESTAMP:
                    TimestampObjectInspector tsoi = (TimestampObjectInspector) poi;
                    TimestampWritableV2 ts = tsoi.getPrimitiveWritableObject(objectForField);
                    out.writeTimestamp(ts, getTimeStampByteNum(timestampPrecision));
                    return;
                // Teradata Type: DATE
                case DATE:
                    DateObjectInspector dtoi = (DateObjectInspector) poi;
                    DateWritableV2 dw = dtoi.getPrimitiveWritableObject(objectForField);
                    out.writeDate(dw);
                    return;
                // Teradata Type: CHAR
                case CHAR:
                    HiveCharObjectInspector coi = (HiveCharObjectInspector) poi;
                    HiveCharWritable hc = coi.getPrimitiveWritableObject(objectForField);
                    // assert the length of char record fits into the table definition
                    if (hc != null) {
                        assert ((CharTypeInfo) ti).getLength() >= hc.getHiveChar().getCharacterLength();
                    }
                    out.writeChar(hc, getCharByteNum(charCharset) * ((CharTypeInfo) ti).getLength());
                    return;
                // Teradata Type: DECIMAL
                case DECIMAL:
                    DecimalTypeInfo dtype = (DecimalTypeInfo) ti;
                    int precision = dtype.precision();
                    int scale = dtype.scale();
                    HiveDecimalObjectInspector hdoi = (HiveDecimalObjectInspector) poi;
                    HiveDecimalWritable hd = hdoi.getPrimitiveWritableObject(objectForField);
                    // assert the precision of decimal record fits into the table definition
                    if (hd != null) {
                        assert (dtype.getPrecision() >= hd.precision());
                    }
                    out.writeDecimal(hd, getDecimalByteNum(precision), scale);
                    return;
                // Teradata Type: VARBYTE
                case BINARY:
                    BinaryObjectInspector bnoi = (BinaryObjectInspector) poi;
                    BytesWritable byw = bnoi.getPrimitiveWritableObject(objectForField);
                    out.writeVarByte(byw);
                    return;
                default:
                    throw new SerDeException("Unrecognized type: " + poi.getPrimitiveCategory());
            }
        // Currently, serialization of complex types is not supported
        case LIST:
        case MAP:
        case STRUCT:
        default:
            throw new SerDeException("Unrecognized type: " + oi.getCategory());
    }
}
Also used : LongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector) DateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector) IntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector) BinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector) ByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector) TimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector) ShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector) HiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector) HiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector) SerDeException(org.apache.hadoop.hive.serde2.SerDeException) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) DateWritableV2(org.apache.hadoop.hive.serde2.io.DateWritableV2) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) TimestampWritableV2(org.apache.hadoop.hive.serde2.io.TimestampWritableV2) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) DoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector) HiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)

Example 88 with PRIMITIVE

use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category.PRIMITIVE in project hive by apache.

the class RegexSerDe method initialize.

@Override
public void initialize(Configuration configuration, Properties tableProperties, Properties partitionProperties) throws SerDeException {
    super.initialize(configuration, tableProperties, partitionProperties);
    numColumns = getColumnNames().size();
    // Read the configuration parameters
    inputRegex = properties.getProperty(INPUT_REGEX);
    boolean inputRegexIgnoreCase = "true".equalsIgnoreCase(properties.getProperty(INPUT_REGEX_CASE_SENSITIVE));
    // output format string is not supported anymore, warn user of deprecation
    if (null != properties.getProperty("output.format.string")) {
        log.warn("output.format.string has been deprecated");
    }
    // Parse the configuration parameters
    if (inputRegex != null) {
        inputPattern = Pattern.compile(inputRegex, Pattern.DOTALL + (inputRegexIgnoreCase ? Pattern.CASE_INSENSITIVE : 0));
    } else {
        inputPattern = null;
        throw new SerDeException("This table does not have serde property \"input.regex\"!");
    }
    /* Constructing the row ObjectInspector:
     * The row consists of some set of primitive columns, each column will
     * be a java object of primitive type.
     */
    List<ObjectInspector> columnOIs = new ArrayList<>(getColumnNames().size());
    for (int c = 0; c < numColumns; c++) {
        TypeInfo typeInfo = getColumnTypes().get(c);
        if (typeInfo instanceof PrimitiveTypeInfo) {
            PrimitiveTypeInfo pti = (PrimitiveTypeInfo) typeInfo;
            AbstractPrimitiveJavaObjectInspector oi = PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(pti);
            columnOIs.add(oi);
        } else {
            throw new SerDeException(getClass().getName() + " doesn't allow column [" + c + "] named " + getColumnNames().get(c) + " with type " + typeInfo);
        }
    }
    // StandardStruct uses ArrayList to store the row.
    rowOI = ObjectInspectorFactory.getStandardStructObjectInspector(getColumnNames(), columnOIs, Lists.newArrayList(Splitter.on('\0').split(properties.getProperty("columns.comments"))));
    // Constructing the row object, etc, which will be reused for all rows.
    row = new ArrayList<>(Collections.nCopies(numColumns, null));
    outputFields = new Object[numColumns];
    outputRowText = new Text();
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) AbstractPrimitiveJavaObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.AbstractPrimitiveJavaObjectInspector) ArrayList(java.util.ArrayList) AbstractPrimitiveJavaObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.AbstractPrimitiveJavaObjectInspector) Text(org.apache.hadoop.io.Text) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)

Example 89 with PRIMITIVE

use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category.PRIMITIVE in project hive by apache.

the class SerDeUtils method buildJSONString.

static void buildJSONString(StringBuilder sb, Object o, ObjectInspector oi, String nullStr) {
    switch(oi.getCategory()) {
        case PRIMITIVE:
            {
                PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
                if (o == null) {
                    sb.append(nullStr);
                } else {
                    switch(poi.getPrimitiveCategory()) {
                        case BOOLEAN:
                            {
                                boolean b = ((BooleanObjectInspector) poi).get(o);
                                sb.append(b ? "true" : "false");
                                break;
                            }
                        case BYTE:
                            {
                                sb.append(((ByteObjectInspector) poi).get(o));
                                break;
                            }
                        case SHORT:
                            {
                                sb.append(((ShortObjectInspector) poi).get(o));
                                break;
                            }
                        case INT:
                            {
                                sb.append(((IntObjectInspector) poi).get(o));
                                break;
                            }
                        case LONG:
                            {
                                sb.append(((LongObjectInspector) poi).get(o));
                                break;
                            }
                        case FLOAT:
                            {
                                sb.append(((FloatObjectInspector) poi).get(o));
                                break;
                            }
                        case DOUBLE:
                            {
                                sb.append(((DoubleObjectInspector) poi).get(o));
                                break;
                            }
                        case STRING:
                            {
                                sb.append('"');
                                sb.append(escapeString(((StringObjectInspector) poi).getPrimitiveJavaObject(o)));
                                sb.append('"');
                                break;
                            }
                        case CHAR:
                            {
                                sb.append('"');
                                sb.append(escapeString(((HiveCharObjectInspector) poi).getPrimitiveJavaObject(o).toString()));
                                sb.append('"');
                                break;
                            }
                        case VARCHAR:
                            {
                                sb.append('"');
                                sb.append(escapeString(((HiveVarcharObjectInspector) poi).getPrimitiveJavaObject(o).toString()));
                                sb.append('"');
                                break;
                            }
                        case DATE:
                            {
                                sb.append('"');
                                sb.append(((DateObjectInspector) poi).getPrimitiveWritableObject(o));
                                sb.append('"');
                                break;
                            }
                        case TIMESTAMP:
                            {
                                sb.append('"');
                                sb.append(((TimestampObjectInspector) poi).getPrimitiveWritableObject(o));
                                sb.append('"');
                                break;
                            }
                        case TIMESTAMPLOCALTZ:
                            {
                                sb.append('"');
                                sb.append(((TimestampLocalTZObjectInspector) poi).getPrimitiveWritableObject(o));
                                sb.append('"');
                                break;
                            }
                        case BINARY:
                            {
                                BytesWritable bw = ((BinaryObjectInspector) oi).getPrimitiveWritableObject(o);
                                Text txt = new Text();
                                txt.set(bw.getBytes(), 0, bw.getLength());
                                sb.append(txt.toString());
                                break;
                            }
                        case DECIMAL:
                            {
                                sb.append(((HiveDecimalObjectInspector) oi).getPrimitiveJavaObject(o));
                                break;
                            }
                        case INTERVAL_YEAR_MONTH:
                            {
                                sb.append(((HiveIntervalYearMonthObjectInspector) oi).getPrimitiveJavaObject(o));
                                break;
                            }
                        case INTERVAL_DAY_TIME:
                            {
                                sb.append(((HiveIntervalDayTimeObjectInspector) oi).getPrimitiveJavaObject(o));
                                break;
                            }
                        default:
                            throw new RuntimeException("Unknown primitive type: " + poi.getPrimitiveCategory());
                    }
                }
                break;
            }
        case LIST:
            {
                ListObjectInspector loi = (ListObjectInspector) oi;
                ObjectInspector listElementObjectInspector = loi.getListElementObjectInspector();
                List<?> olist = loi.getList(o);
                if (olist == null) {
                    sb.append(nullStr);
                } else {
                    sb.append(LBRACKET);
                    for (int i = 0; i < olist.size(); i++) {
                        if (i > 0) {
                            sb.append(COMMA);
                        }
                        buildJSONString(sb, olist.get(i), listElementObjectInspector, JSON_NULL);
                    }
                    sb.append(RBRACKET);
                }
                break;
            }
        case MAP:
            {
                MapObjectInspector moi = (MapObjectInspector) oi;
                ObjectInspector mapKeyObjectInspector = moi.getMapKeyObjectInspector();
                ObjectInspector mapValueObjectInspector = moi.getMapValueObjectInspector();
                Map<?, ?> omap = moi.getMap(o);
                if (omap == null) {
                    sb.append(nullStr);
                } else {
                    sb.append(LBRACE);
                    boolean first = true;
                    for (Object entry : omap.entrySet()) {
                        if (first) {
                            first = false;
                        } else {
                            sb.append(COMMA);
                        }
                        Map.Entry<?, ?> e = (Map.Entry<?, ?>) entry;
                        buildJSONString(sb, e.getKey(), mapKeyObjectInspector, JSON_NULL);
                        sb.append(COLON);
                        buildJSONString(sb, e.getValue(), mapValueObjectInspector, JSON_NULL);
                    }
                    sb.append(RBRACE);
                }
                break;
            }
        case STRUCT:
            {
                StructObjectInspector soi = (StructObjectInspector) oi;
                List<? extends StructField> structFields = soi.getAllStructFieldRefs();
                if (o == null) {
                    sb.append(nullStr);
                } else {
                    sb.append(LBRACE);
                    for (int i = 0; i < structFields.size(); i++) {
                        if (i > 0) {
                            sb.append(COMMA);
                        }
                        sb.append(QUOTE);
                        sb.append(structFields.get(i).getFieldName());
                        sb.append(QUOTE);
                        sb.append(COLON);
                        buildJSONString(sb, soi.getStructFieldData(o, structFields.get(i)), structFields.get(i).getFieldObjectInspector(), JSON_NULL);
                    }
                    sb.append(RBRACE);
                }
                break;
            }
        case UNION:
            {
                UnionObjectInspector uoi = (UnionObjectInspector) oi;
                if (o == null) {
                    sb.append(nullStr);
                } else {
                    sb.append(LBRACE);
                    sb.append(uoi.getTag(o));
                    sb.append(COLON);
                    buildJSONString(sb, uoi.getField(o), uoi.getObjectInspectors().get(uoi.getTag(o)), JSON_NULL);
                    sb.append(RBRACE);
                }
                break;
            }
        default:
            throw new RuntimeException("Unknown type in ObjectInspector!");
    }
}
Also used : ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) TimestampLocalTZObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampLocalTZObjectInspector) HiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector) UnionObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector) HiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector) IntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector) HiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector) BooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector) HiveIntervalYearMonthObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveIntervalYearMonthObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) LongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector) BinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) FloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector) StringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector) HiveIntervalDayTimeObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveIntervalDayTimeObjectInspector) ByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector) DoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector) TimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector) DateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector) BinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector) BytesWritable(org.apache.hadoop.io.BytesWritable) Text(org.apache.hadoop.io.Text) StringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) List(java.util.List) BooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector) Map(java.util.Map) UnionObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Example 90 with PRIMITIVE

use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category.PRIMITIVE in project hive by apache.

the class BinarySortableDeserializeRead method createField.

private Field createField(TypeInfo typeInfo) {
    final Field field = new Field();
    final Category category = typeInfo.getCategory();
    field.category = category;
    field.typeInfo = typeInfo;
    switch(category) {
        case PRIMITIVE:
            field.primitiveCategory = ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory();
            break;
        case LIST:
            field.children = new Field[1];
            field.children[0] = createField(((ListTypeInfo) typeInfo).getListElementTypeInfo());
            break;
        case MAP:
            field.children = new Field[2];
            field.children[0] = createField(((MapTypeInfo) typeInfo).getMapKeyTypeInfo());
            field.children[1] = createField(((MapTypeInfo) typeInfo).getMapValueTypeInfo());
            break;
        case STRUCT:
            StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo;
            List<TypeInfo> fieldTypeInfos = structTypeInfo.getAllStructFieldTypeInfos();
            field.count = fieldTypeInfos.size();
            field.children = createFields(fieldTypeInfos.toArray(new TypeInfo[fieldTypeInfos.size()]));
            break;
        case UNION:
            UnionTypeInfo unionTypeInfo = (UnionTypeInfo) typeInfo;
            List<TypeInfo> objectTypeInfos = unionTypeInfo.getAllUnionObjectTypeInfos();
            field.count = 2;
            field.children = createFields(objectTypeInfos.toArray(new TypeInfo[objectTypeInfos.size()]));
            break;
        default:
            throw new RuntimeException();
    }
    return field;
}
Also used : Category(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) UnionTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) UnionTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo)

Aggregations

PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)83 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)75 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)74 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)64 ArrayList (java.util.ArrayList)54 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)52 ListTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo)47 StructTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo)46 MapTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo)45 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)44 PrimitiveCategory (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)43 ListObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector)38 BytesWritable (org.apache.hadoop.io.BytesWritable)36 Text (org.apache.hadoop.io.Text)35 MapObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector)34 List (java.util.List)30 Map (java.util.Map)30 CharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo)30 UnionTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo)30 UDFArgumentException (org.apache.hadoop.hive.ql.exec.UDFArgumentException)27