Search in sources :

Example 16 with PrimitiveTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo in project hive by apache.

the class RegexSerDe method deserialize.

@Override
public Object deserialize(Writable blob) throws SerDeException {
    Text rowText = (Text) blob;
    Matcher m = inputPattern.matcher(rowText.toString());
    if (m.groupCount() != numColumns) {
        throw new SerDeException("Number of matching groups doesn't match the number of columns");
    }
    // If do not match, ignore the line, return a row with all nulls.
    if (!m.matches()) {
        unmatchedRowsCount++;
        if (!alreadyLoggedNoMatch) {
            // Report the row if its the first time
            LOG.warn("" + unmatchedRowsCount + " unmatched rows are found: " + rowText);
            alreadyLoggedNoMatch = true;
        }
        return null;
    }
    // Otherwise, return the row.
    for (int c = 0; c < numColumns; c++) {
        try {
            String t = m.group(c + 1);
            TypeInfo typeInfo = columnTypes.get(c);
            // Convert the column to the correct type when needed and set in row obj
            PrimitiveTypeInfo pti = (PrimitiveTypeInfo) typeInfo;
            switch(pti.getPrimitiveCategory()) {
                case STRING:
                    row.set(c, t);
                    break;
                case BYTE:
                    Byte b;
                    b = Byte.valueOf(t);
                    row.set(c, b);
                    break;
                case SHORT:
                    Short s;
                    s = Short.valueOf(t);
                    row.set(c, s);
                    break;
                case INT:
                    Integer i;
                    i = Integer.valueOf(t);
                    row.set(c, i);
                    break;
                case LONG:
                    Long l;
                    l = Long.valueOf(t);
                    row.set(c, l);
                    break;
                case FLOAT:
                    Float f;
                    f = Float.valueOf(t);
                    row.set(c, f);
                    break;
                case DOUBLE:
                    Double d;
                    d = Double.valueOf(t);
                    row.set(c, d);
                    break;
                case BOOLEAN:
                    Boolean bool;
                    bool = Boolean.valueOf(t);
                    row.set(c, bool);
                    break;
                case TIMESTAMP:
                    Timestamp ts;
                    ts = Timestamp.valueOf(t);
                    row.set(c, ts);
                    break;
                case DATE:
                    Date date;
                    date = Date.valueOf(t);
                    row.set(c, date);
                    break;
                case DECIMAL:
                    HiveDecimal bd = HiveDecimal.create(t);
                    row.set(c, bd);
                    break;
                case CHAR:
                    HiveChar hc = new HiveChar(t, ((CharTypeInfo) typeInfo).getLength());
                    row.set(c, hc);
                    break;
                case VARCHAR:
                    HiveVarchar hv = new HiveVarchar(t, ((VarcharTypeInfo) typeInfo).getLength());
                    row.set(c, hv);
                    break;
                default:
                    throw new SerDeException("Unsupported type " + typeInfo);
            }
        } catch (RuntimeException e) {
            partialMatchedRowsCount++;
            if (!alreadyLoggedPartialMatch) {
                // Report the row if its the first row
                LOG.warn("" + partialMatchedRowsCount + " partially unmatched rows are found, " + " cannot find group " + c + ": " + rowText);
                alreadyLoggedPartialMatch = true;
            }
            row.set(c, null);
        }
    }
    return row;
}
Also used : Matcher(java.util.regex.Matcher) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) Text(org.apache.hadoop.io.Text) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) Timestamp(java.sql.Timestamp) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) Date(java.sql.Date) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal)

Example 17 with PrimitiveTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo in project hive by apache.

the class RegexSerDe method initialize.

@Override
public void initialize(Configuration conf, Properties tbl) throws SerDeException {
    // We can get the table definition from tbl.
    // Read the configuration parameters
    inputRegex = tbl.getProperty(INPUT_REGEX);
    String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS);
    String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES);
    boolean inputRegexIgnoreCase = "true".equalsIgnoreCase(tbl.getProperty(INPUT_REGEX_CASE_SENSITIVE));
    // output format string is not supported anymore, warn user of deprecation
    if (null != tbl.getProperty("output.format.string")) {
        LOG.warn("output.format.string has been deprecated");
    }
    // Parse the configuration parameters
    if (inputRegex != null) {
        inputPattern = Pattern.compile(inputRegex, Pattern.DOTALL + (inputRegexIgnoreCase ? Pattern.CASE_INSENSITIVE : 0));
    } else {
        inputPattern = null;
        throw new SerDeException("This table does not have serde property \"input.regex\"!");
    }
    final String columnNameDelimiter = tbl.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? tbl.getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : String.valueOf(SerDeUtils.COMMA);
    List<String> columnNames = Arrays.asList(columnNameProperty.split(columnNameDelimiter));
    columnTypes = TypeInfoUtils.getTypeInfosFromTypeString(columnTypeProperty);
    assert columnNames.size() == columnTypes.size();
    numColumns = columnNames.size();
    /* Constructing the row ObjectInspector:
     * The row consists of some set of primitive columns, each column will
     * be a java object of primitive type.
     */
    List<ObjectInspector> columnOIs = new ArrayList<ObjectInspector>(columnNames.size());
    for (int c = 0; c < numColumns; c++) {
        TypeInfo typeInfo = columnTypes.get(c);
        if (typeInfo instanceof PrimitiveTypeInfo) {
            PrimitiveTypeInfo pti = (PrimitiveTypeInfo) columnTypes.get(c);
            AbstractPrimitiveJavaObjectInspector oi = PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(pti);
            columnOIs.add(oi);
        } else {
            throw new SerDeException(getClass().getName() + " doesn't allow column [" + c + "] named " + columnNames.get(c) + " with type " + columnTypes.get(c));
        }
    }
    // StandardStruct uses ArrayList to store the row.
    rowOI = ObjectInspectorFactory.getStandardStructObjectInspector(columnNames, columnOIs, Lists.newArrayList(Splitter.on('\0').split(tbl.getProperty("columns.comments"))));
    row = new ArrayList<Object>(numColumns);
    // Constructing the row object, etc, which will be reused for all rows.
    for (int c = 0; c < numColumns; c++) {
        row.add(null);
    }
    outputFields = new Object[numColumns];
    outputRowText = new Text();
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) AbstractPrimitiveJavaObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.AbstractPrimitiveJavaObjectInspector) ArrayList(java.util.ArrayList) AbstractPrimitiveJavaObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.AbstractPrimitiveJavaObjectInspector) Text(org.apache.hadoop.io.Text) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)

Example 18 with PrimitiveTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo in project hive by apache.

the class TypeInfoToSchema method createAvroMap.

private Schema createAvroMap(TypeInfo typeInfo) {
    TypeInfo keyTypeInfo = ((MapTypeInfo) typeInfo).getMapKeyTypeInfo();
    if (((PrimitiveTypeInfo) keyTypeInfo).getPrimitiveCategory() != PrimitiveObjectInspector.PrimitiveCategory.STRING) {
        throw new UnsupportedOperationException("Key of Map can only be a String");
    }
    TypeInfo valueTypeInfo = ((MapTypeInfo) typeInfo).getMapValueTypeInfo();
    Schema valueSchema = createAvroSchema(valueTypeInfo);
    return Schema.createMap(valueSchema);
}
Also used : MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) Schema(org.apache.avro.Schema) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) UnionTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo)

Example 19 with PrimitiveTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo in project hive by apache.

the class VerifyFast method verifyDeserializeRead.

public static void verifyDeserializeRead(DeserializeRead deserializeRead, PrimitiveTypeInfo primitiveTypeInfo, Writable writable) throws IOException {
    boolean isNull;
    isNull = !deserializeRead.readNextField();
    if (isNull) {
        if (writable != null) {
            TestCase.fail("Field reports null but object is not null (class " + writable.getClass().getName() + ", " + writable.toString() + ")");
        }
        return;
    } else if (writable == null) {
        TestCase.fail("Field report not null but object is null");
    }
    switch(primitiveTypeInfo.getPrimitiveCategory()) {
        case BOOLEAN:
            {
                boolean value = deserializeRead.currentBoolean;
                if (!(writable instanceof BooleanWritable)) {
                    TestCase.fail("Boolean expected writable not Boolean");
                }
                boolean expected = ((BooleanWritable) writable).get();
                if (value != expected) {
                    TestCase.fail("Boolean field mismatch (expected " + expected + " found " + value + ")");
                }
            }
            break;
        case BYTE:
            {
                byte value = deserializeRead.currentByte;
                if (!(writable instanceof ByteWritable)) {
                    TestCase.fail("Byte expected writable not Byte");
                }
                byte expected = ((ByteWritable) writable).get();
                if (value != expected) {
                    TestCase.fail("Byte field mismatch (expected " + (int) expected + " found " + (int) value + ")");
                }
            }
            break;
        case SHORT:
            {
                short value = deserializeRead.currentShort;
                if (!(writable instanceof ShortWritable)) {
                    TestCase.fail("Short expected writable not Short");
                }
                short expected = ((ShortWritable) writable).get();
                if (value != expected) {
                    TestCase.fail("Short field mismatch (expected " + expected + " found " + value + ")");
                }
            }
            break;
        case INT:
            {
                int value = deserializeRead.currentInt;
                if (!(writable instanceof IntWritable)) {
                    TestCase.fail("Integer expected writable not Integer");
                }
                int expected = ((IntWritable) writable).get();
                if (value != expected) {
                    TestCase.fail("Int field mismatch (expected " + expected + " found " + value + ")");
                }
            }
            break;
        case LONG:
            {
                long value = deserializeRead.currentLong;
                if (!(writable instanceof LongWritable)) {
                    TestCase.fail("Long expected writable not Long");
                }
                Long expected = ((LongWritable) writable).get();
                if (value != expected) {
                    TestCase.fail("Long field mismatch (expected " + expected + " found " + value + ")");
                }
            }
            break;
        case FLOAT:
            {
                float value = deserializeRead.currentFloat;
                if (!(writable instanceof FloatWritable)) {
                    TestCase.fail("Float expected writable not Float");
                }
                float expected = ((FloatWritable) writable).get();
                if (value != expected) {
                    TestCase.fail("Float field mismatch (expected " + expected + " found " + value + ")");
                }
            }
            break;
        case DOUBLE:
            {
                double value = deserializeRead.currentDouble;
                if (!(writable instanceof DoubleWritable)) {
                    TestCase.fail("Double expected writable not Double");
                }
                double expected = ((DoubleWritable) writable).get();
                if (value != expected) {
                    TestCase.fail("Double field mismatch (expected " + expected + " found " + value + ")");
                }
            }
            break;
        case STRING:
            {
                byte[] stringBytes = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
                Text text = new Text(stringBytes);
                String string = text.toString();
                String expected = ((Text) writable).toString();
                if (!string.equals(expected)) {
                    TestCase.fail("String field mismatch (expected '" + expected + "' found '" + string + "')");
                }
            }
            break;
        case CHAR:
            {
                byte[] stringBytes = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
                Text text = new Text(stringBytes);
                String string = text.toString();
                HiveChar hiveChar = new HiveChar(string, ((CharTypeInfo) primitiveTypeInfo).getLength());
                HiveChar expected = ((HiveCharWritable) writable).getHiveChar();
                if (!hiveChar.equals(expected)) {
                    TestCase.fail("Char field mismatch (expected '" + expected + "' found '" + hiveChar + "')");
                }
            }
            break;
        case VARCHAR:
            {
                byte[] stringBytes = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
                Text text = new Text(stringBytes);
                String string = text.toString();
                HiveVarchar hiveVarchar = new HiveVarchar(string, ((VarcharTypeInfo) primitiveTypeInfo).getLength());
                HiveVarchar expected = ((HiveVarcharWritable) writable).getHiveVarchar();
                if (!hiveVarchar.equals(expected)) {
                    TestCase.fail("Varchar field mismatch (expected '" + expected + "' found '" + hiveVarchar + "')");
                }
            }
            break;
        case DECIMAL:
            {
                HiveDecimal value = deserializeRead.currentHiveDecimalWritable.getHiveDecimal();
                if (value == null) {
                    TestCase.fail("Decimal field evaluated to NULL");
                }
                HiveDecimal expected = ((HiveDecimalWritable) writable).getHiveDecimal();
                if (!value.equals(expected)) {
                    DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfo;
                    int precision = decimalTypeInfo.getPrecision();
                    int scale = decimalTypeInfo.getScale();
                    TestCase.fail("Decimal field mismatch (expected " + expected.toString() + " found " + value.toString() + ") precision " + precision + ", scale " + scale);
                }
            }
            break;
        case DATE:
            {
                Date value = deserializeRead.currentDateWritable.get();
                Date expected = ((DateWritable) writable).get();
                if (!value.equals(expected)) {
                    TestCase.fail("Date field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
                }
            }
            break;
        case TIMESTAMP:
            {
                Timestamp value = deserializeRead.currentTimestampWritable.getTimestamp();
                Timestamp expected = ((TimestampWritable) writable).getTimestamp();
                if (!value.equals(expected)) {
                    TestCase.fail("Timestamp field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
                }
            }
            break;
        case INTERVAL_YEAR_MONTH:
            {
                HiveIntervalYearMonth value = deserializeRead.currentHiveIntervalYearMonthWritable.getHiveIntervalYearMonth();
                HiveIntervalYearMonth expected = ((HiveIntervalYearMonthWritable) writable).getHiveIntervalYearMonth();
                if (!value.equals(expected)) {
                    TestCase.fail("HiveIntervalYearMonth field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
                }
            }
            break;
        case INTERVAL_DAY_TIME:
            {
                HiveIntervalDayTime value = deserializeRead.currentHiveIntervalDayTimeWritable.getHiveIntervalDayTime();
                HiveIntervalDayTime expected = ((HiveIntervalDayTimeWritable) writable).getHiveIntervalDayTime();
                if (!value.equals(expected)) {
                    TestCase.fail("HiveIntervalDayTime field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
                }
            }
            break;
        case BINARY:
            {
                byte[] byteArray = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
                BytesWritable bytesWritable = (BytesWritable) writable;
                byte[] expected = Arrays.copyOfRange(bytesWritable.getBytes(), 0, bytesWritable.getLength());
                if (byteArray.length != expected.length) {
                    TestCase.fail("Byte Array field mismatch (expected " + Arrays.toString(expected) + " found " + Arrays.toString(byteArray) + ")");
                }
                for (int b = 0; b < byteArray.length; b++) {
                    if (byteArray[b] != expected[b]) {
                        TestCase.fail("Byte Array field mismatch (expected " + Arrays.toString(expected) + " found " + Arrays.toString(byteArray) + ")");
                    }
                }
            }
            break;
        default:
            throw new Error("Unknown primitive category " + primitiveTypeInfo.getPrimitiveCategory());
    }
}
Also used : VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) Text(org.apache.hadoop.io.Text) BytesWritable(org.apache.hadoop.io.BytesWritable) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) Timestamp(java.sql.Timestamp) Date(java.sql.Date) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) FloatWritable(org.apache.hadoop.io.FloatWritable) HiveIntervalYearMonth(org.apache.hadoop.hive.common.type.HiveIntervalYearMonth) BooleanWritable(org.apache.hadoop.io.BooleanWritable) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime)

Example 20 with PrimitiveTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo in project hive by apache.

the class MyTestPrimitiveClass method getPrimitiveWritableObject.

public Object getPrimitiveWritableObject(int index, PrimitiveTypeInfo primitiveTypeInfo) {
    int field = 0;
    if (index == field++) {
        return (myBool == null ? null : PrimitiveObjectInspectorFactory.writableBooleanObjectInspector.create((boolean) myBool));
    } else if (index == field++) {
        return (myByte == null ? null : PrimitiveObjectInspectorFactory.writableByteObjectInspector.create((byte) myByte));
    } else if (index == field++) {
        return (myShort == null ? null : PrimitiveObjectInspectorFactory.writableShortObjectInspector.create((short) myShort));
    } else if (index == field++) {
        return (myInt == null ? null : PrimitiveObjectInspectorFactory.writableIntObjectInspector.create((int) myInt));
    } else if (index == field++) {
        return (myLong == null ? null : PrimitiveObjectInspectorFactory.writableLongObjectInspector.create((long) myLong));
    } else if (index == field++) {
        return (myFloat == null ? null : PrimitiveObjectInspectorFactory.writableFloatObjectInspector.create((float) myFloat));
    } else if (index == field++) {
        return (myDouble == null ? null : PrimitiveObjectInspectorFactory.writableDoubleObjectInspector.create((double) myDouble));
    } else if (index == field++) {
        return (myString == null ? null : PrimitiveObjectInspectorFactory.writableStringObjectInspector.create(myString));
    } else if (index == field++) {
        if (myHiveChar == null) {
            return null;
        }
        CharTypeInfo charTypeInfo = (CharTypeInfo) primitiveTypeInfo;
        WritableHiveCharObjectInspector writableCharObjectInspector = new WritableHiveCharObjectInspector(charTypeInfo);
        return writableCharObjectInspector.create(myHiveChar);
    } else if (index == field++) {
        if (myHiveVarchar == null) {
            return null;
        }
        VarcharTypeInfo varcharTypeInfo = (VarcharTypeInfo) primitiveTypeInfo;
        WritableHiveVarcharObjectInspector writableVarcharObjectInspector = new WritableHiveVarcharObjectInspector(varcharTypeInfo);
        return writableVarcharObjectInspector.create(myHiveVarchar);
    } else if (index == field++) {
        return (myBinary == null ? null : PrimitiveObjectInspectorFactory.writableBinaryObjectInspector.create(myBinary));
    } else if (index == field++) {
        if (myDecimal == null) {
            return null;
        }
        DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfo;
        WritableHiveDecimalObjectInspector writableDecimalObjectInspector = new WritableHiveDecimalObjectInspector(decimalTypeInfo);
        return writableDecimalObjectInspector.create(myDecimal);
    } else if (index == field++) {
        return (myDate == null ? null : PrimitiveObjectInspectorFactory.writableDateObjectInspector.create(myDate));
    } else if (index == field++) {
        return (myTimestamp == null ? null : PrimitiveObjectInspectorFactory.writableTimestampObjectInspector.create(myTimestamp));
    } else if (index == field++) {
        return (myIntervalYearMonth == null ? null : PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector.create(myIntervalYearMonth));
    } else if (index == field++) {
        return (myIntervalDayTime == null ? null : PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector.create(myIntervalDayTime));
    } else {
        throw new Error("Field " + " field not handled");
    }
}
Also used : WritableHiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveCharObjectInspector) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) WritableHiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveVarcharObjectInspector) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) WritableHiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveDecimalObjectInspector)

Aggregations

PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)110 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)41 ArrayList (java.util.ArrayList)37 PrimitiveCategory (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)33 StructTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo)26 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)25 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)23 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)20 ListTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo)19 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)18 MapTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo)18 HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)15 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)15 BytesWritable (org.apache.hadoop.io.BytesWritable)15 CharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo)14 VarcharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo)14 IntWritable (org.apache.hadoop.io.IntWritable)13 Text (org.apache.hadoop.io.Text)13 Category (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category)11 BooleanWritable (org.apache.hadoop.io.BooleanWritable)11