Search in sources :

Example 21 with PRIMITIVE

use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category.PRIMITIVE in project flink by apache.

the class GenericUDFLegacyGroupingID method initialize.

@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    // we accept two arguments: the new GROUPING__ID and the number of GBY expressions
    if (arguments.length != 2) {
        throw new UDFArgumentLengthException("Expect 2 arguments but actually got " + arguments.length);
    }
    if (arguments[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
        throw new UDFArgumentTypeException(0, "First argument should be primitive type");
    }
    if (arguments[1].getCategory() != ObjectInspector.Category.PRIMITIVE) {
        throw new UDFArgumentTypeException(1, "Second argument should be primitive type");
    }
    groupingIdOI = (PrimitiveObjectInspector) arguments[0];
    if (groupingIdOI.getPrimitiveCategory() != PrimitiveObjectInspector.PrimitiveCategory.LONG) {
        throw new UDFArgumentTypeException(0, "First argument should be a long");
    }
    PrimitiveObjectInspector numExprOI = (PrimitiveObjectInspector) arguments[1];
    if (numExprOI.getPrimitiveCategory() != PrimitiveObjectInspector.PrimitiveCategory.INT) {
        throw new UDFArgumentTypeException(1, "Second argument should be an int");
    }
    if (!(numExprOI instanceof ConstantObjectInspector)) {
        throw new UDFArgumentTypeException(1, "Second argument should be a constant");
    }
    numExprs = PrimitiveObjectInspectorUtils.getInt(((ConstantObjectInspector) numExprOI).getWritableConstantValue(), numExprOI);
    if (numExprs < 1 || numExprs > 64) {
        throw new UDFArgumentException("Number of GROUP BY expressions out of range: " + numExprs);
    }
    return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) UDFArgumentLengthException(org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException) UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector)

Example 22 with PRIMITIVE

use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category.PRIMITIVE in project flink by apache.

the class HiveParserTypeCheckProcFactory method toExprNodeDesc.

private static ExprNodeDesc toExprNodeDesc(ColumnInfo colInfo) {
    ObjectInspector inspector = colInfo.getObjectInspector();
    if (inspector instanceof ConstantObjectInspector && inspector instanceof PrimitiveObjectInspector) {
        PrimitiveObjectInspector poi = (PrimitiveObjectInspector) inspector;
        Object constant = ((ConstantObjectInspector) inspector).getWritableConstantValue();
        return new ExprNodeConstantDesc(colInfo.getType(), poi.getPrimitiveJavaObject(constant));
    }
    // non-constant or non-primitive constants
    ExprNodeColumnDesc column = new ExprNodeColumnDesc(colInfo);
    column.setSkewedCol(colInfo.isSkewedCol());
    return column;
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector) ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector)

Example 23 with PRIMITIVE

use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category.PRIMITIVE in project haivvreo by jghoman.

the class AvroObjectInspectorGenerator method createObjectInspectorWorker.

private ObjectInspector createObjectInspectorWorker(TypeInfo ti) throws SerDeException {
    // at deserialization and the object inspector will never see the actual union.
    if (!supportedCategories(ti))
        throw new HaivvreoException("Don't yet support this type: " + ti);
    ObjectInspector result;
    switch(ti.getCategory()) {
        case PRIMITIVE:
            PrimitiveTypeInfo pti = (PrimitiveTypeInfo) ti;
            result = PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(pti.getPrimitiveCategory());
            break;
        case STRUCT:
            StructTypeInfo sti = (StructTypeInfo) ti;
            ArrayList<ObjectInspector> ois = new ArrayList<ObjectInspector>(sti.getAllStructFieldTypeInfos().size());
            for (TypeInfo typeInfo : sti.getAllStructFieldTypeInfos()) {
                ois.add(createObjectInspectorWorker(typeInfo));
            }
            result = ObjectInspectorFactory.getStandardStructObjectInspector(sti.getAllStructFieldNames(), ois);
            break;
        case MAP:
            MapTypeInfo mti = (MapTypeInfo) ti;
            result = ObjectInspectorFactory.getStandardMapObjectInspector(PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveObjectInspector.PrimitiveCategory.STRING), createObjectInspectorWorker(mti.getMapValueTypeInfo()));
            break;
        case LIST:
            ListTypeInfo ati = (ListTypeInfo) ti;
            result = ObjectInspectorFactory.getStandardListObjectInspector(createObjectInspectorWorker(ati.getListElementTypeInfo()));
            break;
        case UNION:
            UnionTypeInfo uti = (UnionTypeInfo) ti;
            List<TypeInfo> allUnionObjectTypeInfos = uti.getAllUnionObjectTypeInfos();
            List<ObjectInspector> unionObjectInspectors = new ArrayList<ObjectInspector>(allUnionObjectTypeInfos.size());
            for (TypeInfo typeInfo : allUnionObjectTypeInfos) {
                unionObjectInspectors.add(createObjectInspectorWorker(typeInfo));
            }
            result = ObjectInspectorFactory.getStandardUnionObjectInspector(unionObjectInspectors);
            break;
        default:
            throw new HaivvreoException("No Hive categories matched: " + ti);
    }
    return result;
}
Also used : PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) ArrayList(java.util.ArrayList)

Example 24 with PRIMITIVE

use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category.PRIMITIVE in project hive by apache.

the class AccumuloRowSerializer method serializeRowId.

/**
 * Serialize an Accumulo rowid
 */
protected byte[] serializeRowId(Object rowId, StructField rowIdField, ColumnMapping rowIdMapping) throws IOException {
    if (rowId == null) {
        throw new IOException("Accumulo rowId cannot be NULL");
    }
    // Reset the buffer we're going to use
    output.reset();
    ObjectInspector rowIdFieldOI = rowIdField.getFieldObjectInspector();
    String rowIdMappingType = rowIdMapping.getColumnType();
    TypeInfo rowIdTypeInfo = TypeInfoUtils.getTypeInfoFromTypeString(rowIdMappingType);
    if (!rowIdFieldOI.getCategory().equals(ObjectInspector.Category.PRIMITIVE) && rowIdTypeInfo.getCategory() == ObjectInspector.Category.PRIMITIVE) {
        // we always serialize the String type using the escaped algorithm for LazyString
        writeString(output, SerDeUtils.getJSONString(rowId, rowIdFieldOI), PrimitiveObjectInspectorFactory.javaStringObjectInspector);
        return output.toByteArray();
    }
    // use the serialization option switch to write primitive values as either a variable
    // length UTF8 string or a fixed width bytes if serializing in binary format
    getSerializedValue(rowIdFieldOI, rowId, output, rowIdMapping);
    return output.toByteArray();
}
Also used : ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) IOException(java.io.IOException) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)

Example 25 with PRIMITIVE

use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category.PRIMITIVE in project hive by apache.

the class AccumuloRowSerializer method writeWithLevel.

/**
 * Recursively serialize an Object using its {@link ObjectInspector}, respecting the
 * separators defined by the {@link LazySerDeParameters}.
 * @param oi ObjectInspector for the current object
 * @param value The current object
 * @param output A buffer output is written to
 * @param mapping The mapping for this Hive column
 * @param level The current level/offset for the SerDe separator
 * @throws IOException
 */
protected void writeWithLevel(ObjectInspector oi, Object value, ByteStream.Output output, ColumnMapping mapping, int level) throws IOException {
    switch(oi.getCategory()) {
        case PRIMITIVE:
            if (mapping.getEncoding() == ColumnEncoding.BINARY) {
                this.writeBinary(output, value, (PrimitiveObjectInspector) oi);
            } else {
                this.writeString(output, value, (PrimitiveObjectInspector) oi);
            }
            return;
        case LIST:
            char separator = (char) serDeParams.getSeparators()[level];
            ListObjectInspector loi = (ListObjectInspector) oi;
            List<?> list = loi.getList(value);
            ObjectInspector eoi = loi.getListElementObjectInspector();
            if (list == null) {
                log.debug("No objects found when serializing list");
                return;
            } else {
                for (int i = 0; i < list.size(); i++) {
                    if (i > 0) {
                        output.write(separator);
                    }
                    writeWithLevel(eoi, list.get(i), output, mapping, level + 1);
                }
            }
            return;
        case MAP:
            char sep = (char) serDeParams.getSeparators()[level];
            char keyValueSeparator = (char) serDeParams.getSeparators()[level + 1];
            MapObjectInspector moi = (MapObjectInspector) oi;
            ObjectInspector koi = moi.getMapKeyObjectInspector();
            ObjectInspector voi = moi.getMapValueObjectInspector();
            Map<?, ?> map = moi.getMap(value);
            if (map == null) {
                log.debug("No object found when serializing map");
                return;
            } else {
                boolean first = true;
                for (Map.Entry<?, ?> entry : map.entrySet()) {
                    if (first) {
                        first = false;
                    } else {
                        output.write(sep);
                    }
                    writeWithLevel(koi, entry.getKey(), output, mapping, level + 2);
                    output.write(keyValueSeparator);
                    writeWithLevel(voi, entry.getValue(), output, mapping, level + 2);
                }
            }
            return;
        case STRUCT:
            sep = (char) serDeParams.getSeparators()[level];
            StructObjectInspector soi = (StructObjectInspector) oi;
            List<? extends StructField> fields = soi.getAllStructFieldRefs();
            list = soi.getStructFieldsDataAsList(value);
            if (list == null) {
                log.debug("No object found when serializing struct");
                return;
            } else {
                for (int i = 0; i < list.size(); i++) {
                    if (i > 0) {
                        output.write(sep);
                    }
                    writeWithLevel(fields.get(i).getFieldObjectInspector(), list.get(i), output, mapping, level + 1);
                }
            }
            return;
        default:
            throw new RuntimeException("Unknown category type: " + oi.getCategory());
    }
}
Also used : ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) Map(java.util.Map) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Aggregations

PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)83 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)75 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)74 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)64 ArrayList (java.util.ArrayList)54 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)52 ListTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo)47 StructTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo)46 MapTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo)45 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)44 PrimitiveCategory (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)43 ListObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector)38 BytesWritable (org.apache.hadoop.io.BytesWritable)36 Text (org.apache.hadoop.io.Text)35 MapObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector)34 List (java.util.List)30 Map (java.util.Map)30 CharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo)30 UnionTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo)30 UDFArgumentException (org.apache.hadoop.hive.ql.exec.UDFArgumentException)27