Search in sources :

Example 46 with PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector in project hive by apache.

the class VectorRandomRowSource method chooseSchema.

private void chooseSchema(SupportedTypes supportedTypes, int maxComplexDepth) {
    HashSet hashSet = null;
    final boolean allTypes;
    final boolean onlyOne = (r.nextInt(100) == 7);
    if (onlyOne) {
        columnCount = 1;
        allTypes = false;
    } else {
        allTypes = r.nextBoolean();
        if (allTypes) {
            switch(supportedTypes) {
                case ALL:
                    columnCount = possibleHivePrimitiveTypeNames.length + possibleHiveComplexTypeNames.length;
                    break;
                case ALL_EXCEPT_MAP:
                    columnCount = possibleHivePrimitiveTypeNames.length + possibleHiveComplexTypeNames.length - 1;
                    break;
                case PRIMITIVES:
                    columnCount = possibleHivePrimitiveTypeNames.length;
                    break;
            }
            hashSet = new HashSet<Integer>();
        } else {
            columnCount = 1 + r.nextInt(20);
        }
    }
    typeNames = new ArrayList<String>(columnCount);
    categories = new Category[columnCount];
    typeInfos = new TypeInfo[columnCount];
    objectInspectorList = new ArrayList<ObjectInspector>(columnCount);
    primitiveCategories = new PrimitiveCategory[columnCount];
    primitiveTypeInfos = new PrimitiveTypeInfo[columnCount];
    primitiveObjectInspectorList = new ArrayList<ObjectInspector>(columnCount);
    List<String> columnNames = new ArrayList<String>(columnCount);
    for (int c = 0; c < columnCount; c++) {
        columnNames.add(String.format("col%d", c));
        final String typeName;
        if (onlyOne) {
            typeName = getRandomTypeName(supportedTypes);
        } else {
            int typeNum;
            if (allTypes) {
                int maxTypeNum = 0;
                switch(supportedTypes) {
                    case PRIMITIVES:
                        maxTypeNum = possibleHivePrimitiveTypeNames.length;
                        break;
                    case ALL_EXCEPT_MAP:
                        maxTypeNum = possibleHivePrimitiveTypeNames.length + possibleHiveComplexTypeNames.length - 1;
                        break;
                    case ALL:
                        maxTypeNum = possibleHivePrimitiveTypeNames.length + possibleHiveComplexTypeNames.length;
                        break;
                }
                while (true) {
                    typeNum = r.nextInt(maxTypeNum);
                    Integer typeNumInteger = new Integer(typeNum);
                    if (!hashSet.contains(typeNumInteger)) {
                        hashSet.add(typeNumInteger);
                        break;
                    }
                }
            } else {
                if (supportedTypes == SupportedTypes.PRIMITIVES || r.nextInt(10) != 0) {
                    typeNum = r.nextInt(possibleHivePrimitiveTypeNames.length);
                } else {
                    typeNum = possibleHivePrimitiveTypeNames.length + r.nextInt(possibleHiveComplexTypeNames.length);
                    if (supportedTypes == SupportedTypes.ALL_EXCEPT_MAP) {
                        typeNum--;
                    }
                }
            }
            if (typeNum < possibleHivePrimitiveTypeNames.length) {
                typeName = possibleHivePrimitiveTypeNames[typeNum];
            } else {
                typeName = possibleHiveComplexTypeNames[typeNum - possibleHivePrimitiveTypeNames.length];
            }
        }
        String decoratedTypeName = getDecoratedTypeName(typeName, supportedTypes, 0, maxComplexDepth);
        final TypeInfo typeInfo;
        try {
            typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(decoratedTypeName);
        } catch (Exception e) {
            throw new RuntimeException("Cannot convert type name " + decoratedTypeName + " to a type " + e);
        }
        typeInfos[c] = typeInfo;
        final Category category = typeInfo.getCategory();
        categories[c] = category;
        ObjectInspector objectInspector = getObjectInspector(typeInfo);
        switch(category) {
            case PRIMITIVE:
                {
                    final PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) typeInfo;
                    objectInspector = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(primitiveTypeInfo);
                    primitiveTypeInfos[c] = primitiveTypeInfo;
                    PrimitiveCategory primitiveCategory = primitiveTypeInfo.getPrimitiveCategory();
                    primitiveCategories[c] = primitiveCategory;
                    primitiveObjectInspectorList.add(objectInspector);
                }
                break;
            case LIST:
            case MAP:
            case STRUCT:
            case UNION:
                primitiveObjectInspectorList.add(null);
                break;
            default:
                throw new RuntimeException("Unexpected catagory " + category);
        }
        objectInspectorList.add(objectInspector);
        if (category == Category.PRIMITIVE) {
        }
        typeNames.add(decoratedTypeName);
    }
    rowStructObjectInspector = ObjectInspectorFactory.getStandardStructObjectInspector(columnNames, objectInspectorList);
    alphabets = new String[columnCount];
}
Also used : WritableIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableIntObjectInspector) WritableByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableByteObjectInspector) UnionObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector) StandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector) WritableDoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableDoubleObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) StandardListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardListObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) WritableStringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableStringObjectInspector) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) WritableHiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveCharObjectInspector) WritableHiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveVarcharObjectInspector) WritableBooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableBooleanObjectInspector) WritableTimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableTimestampObjectInspector) WritableHiveIntervalDayTimeObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveIntervalDayTimeObjectInspector) WritableShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableShortObjectInspector) StandardMapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardMapObjectInspector) WritableHiveIntervalYearMonthObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveIntervalYearMonthObjectInspector) WritableFloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableFloatObjectInspector) WritableLongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableLongObjectInspector) StandardUnionObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector) WritableDateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableDateObjectInspector) WritableHiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveDecimalObjectInspector) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) Category(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category) ArrayList(java.util.ArrayList) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) UnionTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) HashSet(java.util.HashSet)

Example 47 with PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector in project hive by apache.

the class SparkDynamicPartitionPruner method prunePartitionSingleSource.

private void prunePartitionSingleSource(SourceInfo info, MapWork work) throws HiveException {
    Set<Object> values = info.values;
    // strip the column name of the targetId
    String columnName = info.columnName.substring(info.columnName.indexOf(':') + 1);
    ObjectInspector oi = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(info.columnType));
    ObjectInspectorConverters.Converter converter = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaStringObjectInspector, oi);
    StructObjectInspector soi = ObjectInspectorFactory.getStandardStructObjectInspector(Collections.singletonList(columnName), Collections.singletonList(oi));
    @SuppressWarnings("rawtypes") ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(info.partKey);
    eval.initialize(soi);
    applyFilterToPartitions(work, converter, eval, columnName, values);
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) ObjectInspectorConverters(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters) ExprNodeEvaluator(org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Example 48 with PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector in project hive by apache.

the class MapJoinTestDescription method computeDerived.

public void computeDerived() {
    bigTableColumnNamesList = Arrays.asList(bigTableColumnNames);
    bigTableKeyColumnNames = new String[bigTableKeyColumnNums.length];
    bigTableKeyTypeInfos = new TypeInfo[bigTableKeyColumnNums.length];
    for (int i = 0; i < bigTableKeyColumnNums.length; i++) {
        bigTableKeyColumnNames[i] = bigTableColumnNames[bigTableKeyColumnNums[i]];
        bigTableKeyTypeInfos[i] = bigTableTypeInfos[bigTableKeyColumnNums[i]];
    }
    smallTableValueColumnNamesList = Arrays.asList(smallTableValueColumnNames);
    bigTableObjectInspectors = new ObjectInspector[bigTableTypeInfos.length];
    for (int i = 0; i < bigTableTypeInfos.length; i++) {
        bigTableObjectInspectors[i] = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector((PrimitiveTypeInfo) bigTableTypeInfos[i]);
    }
    bigTableObjectInspectorsList = Arrays.asList(bigTableObjectInspectors);
    smallTableObjectInspectors = new ObjectInspector[smallTableValueTypeInfos.length];
    smallTablePrimitiveCategories = new PrimitiveCategory[smallTableValueTypeInfos.length];
    smallTableValuePrimitiveTypeInfos = new PrimitiveTypeInfo[smallTableValueTypeInfos.length];
    for (int i = 0; i < smallTableValueTypeInfos.length; i++) {
        PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) smallTableValueTypeInfos[i];
        smallTableObjectInspectors[i] = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(primitiveTypeInfo);
        smallTablePrimitiveCategories[i] = primitiveTypeInfo.getPrimitiveCategory();
        smallTableValuePrimitiveTypeInfos[i] = primitiveTypeInfo;
    }
    smallTableObjectInspectorsList = Arrays.asList(smallTableObjectInspectors);
    bigTableStandardObjectInspector = ObjectInspectorFactory.getStandardStructObjectInspector(bigTableColumnNamesList, Arrays.asList((ObjectInspector[]) bigTableObjectInspectors));
    smallTableStandardObjectInspector = ObjectInspectorFactory.getStandardStructObjectInspector(smallTableValueColumnNamesList, Arrays.asList((ObjectInspector[]) smallTableObjectInspectors));
    inputObjectInspectors = new ObjectInspector[] { bigTableStandardObjectInspector, smallTableStandardObjectInspector };
    int outputLength = bigTableRetainColumnNums.length + smallTableRetainKeyColumnNums.length + smallTableRetainValueColumnNums.length;
    outputColumnNames = createOutputColumnNames(outputLength);
    outputTypeInfos = new TypeInfo[outputLength];
    int outputIndex = 0;
    for (int i = 0; i < bigTableRetainColumnNums.length; i++) {
        outputTypeInfos[outputIndex++] = bigTableTypeInfos[bigTableRetainColumnNums[i]];
    }
    // }
    for (int i = 0; i < smallTableRetainValueColumnNums.length; i++) {
        outputTypeInfos[outputIndex++] = smallTableValueTypeInfos[smallTableRetainValueColumnNums[i]];
    }
    outputObjectInspectors = new ObjectInspector[outputLength];
    for (int i = 0; i < outputLength; i++) {
        PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) outputTypeInfos[i];
        outputObjectInspectors[i] = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(primitiveTypeInfo);
    }
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) StandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)

Example 49 with PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector in project hive by apache.

the class TestVectorMapJoinFastRowHashMap method addAndVerifyRows.

private void addAndVerifyRows(VectorRandomRowSource valueSource, Object[][] rows, VectorMapJoinFastHashTable map, HashTableKeyType hashTableKeyType, VerifyFastRowHashMap verifyTable, String[] keyTypeNames, boolean doClipping, boolean useExactBytes) throws HiveException, IOException, SerDeException {
    final int keyCount = keyTypeNames.length;
    PrimitiveTypeInfo[] keyPrimitiveTypeInfos = new PrimitiveTypeInfo[keyCount];
    PrimitiveCategory[] keyPrimitiveCategories = new PrimitiveCategory[keyCount];
    ArrayList<ObjectInspector> keyPrimitiveObjectInspectorList = new ArrayList<ObjectInspector>(keyCount);
    for (int i = 0; i < keyCount; i++) {
        PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) TypeInfoUtils.getTypeInfoFromTypeString(keyTypeNames[i]);
        keyPrimitiveTypeInfos[i] = primitiveTypeInfo;
        PrimitiveCategory primitiveCategory = primitiveTypeInfo.getPrimitiveCategory();
        keyPrimitiveCategories[i] = primitiveCategory;
        keyPrimitiveObjectInspectorList.add(PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(primitiveTypeInfo));
    }
    boolean[] keyColumnSortOrderIsDesc = new boolean[keyCount];
    Arrays.fill(keyColumnSortOrderIsDesc, false);
    byte[] keyColumnNullMarker = new byte[keyCount];
    Arrays.fill(keyColumnNullMarker, BinarySortableSerDe.ZERO);
    byte[] keyColumnNotNullMarker = new byte[keyCount];
    Arrays.fill(keyColumnNotNullMarker, BinarySortableSerDe.ONE);
    BinarySortableSerializeWrite keySerializeWrite = new BinarySortableSerializeWrite(keyColumnSortOrderIsDesc, keyColumnNullMarker, keyColumnNotNullMarker);
    TypeInfo[] valueTypeInfos = valueSource.typeInfos();
    final int columnCount = valueTypeInfos.length;
    SerializeWrite valueSerializeWrite = new LazyBinarySerializeWrite(columnCount);
    final int count = rows.length;
    for (int i = 0; i < count; i++) {
        Object[] valueRow = rows[i];
        Output valueOutput = new Output();
        ((LazyBinarySerializeWrite) valueSerializeWrite).set(valueOutput);
        for (int index = 0; index < columnCount; index++) {
            VerifyFastRow.serializeWrite(valueSerializeWrite, valueTypeInfos[index], valueRow[index]);
        }
        byte[] value = Arrays.copyOf(valueOutput.getData(), valueOutput.getLength());
        // Add a new key or add a value to an existing key?
        byte[] key;
        if (random.nextBoolean() || verifyTable.getCount() == 0) {
            Object[] keyRow = VectorRandomRowSource.randomWritablePrimitiveRow(keyCount, random, keyPrimitiveTypeInfos);
            Output keyOutput = new Output();
            keySerializeWrite.set(keyOutput);
            for (int index = 0; index < keyCount; index++) {
                VerifyFastRow.serializeWrite(keySerializeWrite, keyPrimitiveTypeInfos[index], keyRow[index]);
            }
            key = Arrays.copyOf(keyOutput.getData(), keyOutput.getLength());
            verifyTable.add(key, keyRow, value, valueRow);
        } else {
            key = verifyTable.addRandomExisting(value, valueRow, random);
        }
        // Serialize keyRow into key bytes.
        BytesWritable keyWritable = new BytesWritable(key);
        BytesWritable valueWritable = new BytesWritable(value);
        map.putRow(keyWritable, valueWritable);
    // verifyTable.verify(map);
    }
    verifyTable.verify(map, hashTableKeyType, valueTypeInfos, doClipping, useExactBytes, random);
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) ArrayList(java.util.ArrayList) LazyBinarySerializeWrite(org.apache.hadoop.hive.serde2.lazybinary.fast.LazyBinarySerializeWrite) BytesWritable(org.apache.hadoop.io.BytesWritable) BinarySortableSerializeWrite(org.apache.hadoop.hive.serde2.binarysortable.fast.BinarySortableSerializeWrite) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) Output(org.apache.hadoop.hive.serde2.ByteStream.Output) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) SerializeWrite(org.apache.hadoop.hive.serde2.fast.SerializeWrite) BinarySortableSerializeWrite(org.apache.hadoop.hive.serde2.binarysortable.fast.BinarySortableSerializeWrite) LazyBinarySerializeWrite(org.apache.hadoop.hive.serde2.lazybinary.fast.LazyBinarySerializeWrite)

Example 50 with PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector in project hive by apache.

the class DynamicPartitionPruner method prunePartitionSingleSource.

@VisibleForTesting
protected void prunePartitionSingleSource(String source, SourceInfo si) throws HiveException {
    if (si.skipPruning.get()) {
        // in this case we've determined that there's too much data
        // to prune dynamically.
        LOG.info("Skip pruning on " + source + ", column " + si.columnName);
        return;
    }
    Set<Object> values = si.values;
    String columnName = si.columnName;
    if (LOG.isDebugEnabled()) {
        StringBuilder sb = new StringBuilder("Pruning ");
        sb.append(columnName);
        sb.append(" with ");
        for (Object value : values) {
            sb.append(value == null ? null : value.toString());
            sb.append(", ");
        }
        LOG.debug(sb.toString());
    }
    ObjectInspector oi = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(si.columnType));
    Converter converter = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaStringObjectInspector, oi);
    StructObjectInspector soi = ObjectInspectorFactory.getStandardStructObjectInspector(Collections.singletonList(columnName), Collections.singletonList(oi));
    @SuppressWarnings("rawtypes") ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(si.partKey);
    eval.initialize(soi);
    applyFilterToPartitions(converter, eval, columnName, values);
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) ExprNodeEvaluator(org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator) Converter(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) VisibleForTesting(com.google.common.annotations.VisibleForTesting)

Aggregations

ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)77 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)63 Test (org.junit.Test)45 DeferredJavaObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject)35 DeferredObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject)35 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)24 ArrayList (java.util.ArrayList)20 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)20 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)18 UDFArgumentException (org.apache.hadoop.hive.ql.exec.UDFArgumentException)17 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)14 PrimitiveCategory (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)13 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)12 WritableHiveCharObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveCharObjectInspector)9 WritableHiveDecimalObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveDecimalObjectInspector)9 WritableHiveVarcharObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveVarcharObjectInspector)9 WritableStringObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableStringObjectInspector)9 VarcharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo)9 StandardStructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector)8 WritableBooleanObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableBooleanObjectInspector)8