Search in sources :

Example 26 with CharTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo in project hive by apache.

the class VectorAssignRow method initTargetEntry.

/*
   * Initialize one column's target related arrays.
   */
private void initTargetEntry(int logicalColumnIndex, int projectionColumnNum, TypeInfo typeInfo) {
    isConvert[logicalColumnIndex] = false;
    projectionColumnNums[logicalColumnIndex] = projectionColumnNum;
    Category category = typeInfo.getCategory();
    targetCategories[logicalColumnIndex] = category;
    if (category == Category.PRIMITIVE) {
        PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) typeInfo;
        PrimitiveCategory primitiveCategory = primitiveTypeInfo.getPrimitiveCategory();
        targetPrimitiveCategories[logicalColumnIndex] = primitiveCategory;
        switch(primitiveCategory) {
            case CHAR:
                maxLengths[logicalColumnIndex] = ((CharTypeInfo) primitiveTypeInfo).getLength();
                break;
            case VARCHAR:
                maxLengths[logicalColumnIndex] = ((VarcharTypeInfo) primitiveTypeInfo).getLength();
                break;
            default:
                // No additional data type specific setting.
                break;
        }
    }
}
Also used : PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) Category(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)

Example 27 with CharTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo in project hive by apache.

the class VectorDeserializeRow method initSourceEntry.

/*
   * Initialize one column's source deserializtion related arrays.
   */
private void initSourceEntry(int logicalColumnIndex, int projectionColumnNum, TypeInfo sourceTypeInfo) {
    isConvert[logicalColumnIndex] = false;
    projectionColumnNums[logicalColumnIndex] = projectionColumnNum;
    Category sourceCategory = sourceTypeInfo.getCategory();
    sourceCategories[logicalColumnIndex] = sourceCategory;
    if (sourceCategory == Category.PRIMITIVE) {
        PrimitiveTypeInfo sourcePrimitiveTypeInfo = (PrimitiveTypeInfo) sourceTypeInfo;
        PrimitiveCategory sourcePrimitiveCategory = sourcePrimitiveTypeInfo.getPrimitiveCategory();
        sourcePrimitiveCategories[logicalColumnIndex] = sourcePrimitiveCategory;
        switch(sourcePrimitiveCategory) {
            case CHAR:
                maxLengths[logicalColumnIndex] = ((CharTypeInfo) sourcePrimitiveTypeInfo).getLength();
                break;
            case VARCHAR:
                maxLengths[logicalColumnIndex] = ((VarcharTypeInfo) sourcePrimitiveTypeInfo).getLength();
                break;
            default:
                // No additional data type specific setting.
                break;
        }
    } else {
        // We don't currently support complex types.
        Preconditions.checkState(false);
    }
}
Also used : PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) Category(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)

Example 28 with CharTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo in project hive by apache.

the class TestVectorizationContext method testIfConditionalExprs.

/**
   * Test that correct VectorExpression classes are chosen for the
   * IF (expr1, expr2, expr3) conditional expression for integer, float,
   * boolean, timestamp and string input types. expr1 is always an input column expression
   * of type long. expr2 and expr3 can be column expressions or constants of other types
   * but must have the same type.
   */
@Test
public void testIfConditionalExprs() throws HiveException {
    ExprNodeColumnDesc col1Expr = new ExprNodeColumnDesc(Long.class, "col1", "table", false);
    ExprNodeColumnDesc col2Expr = new ExprNodeColumnDesc(Long.class, "col2", "table", false);
    ExprNodeColumnDesc col3Expr = new ExprNodeColumnDesc(Long.class, "col3", "table", false);
    ExprNodeConstantDesc constDesc2 = new ExprNodeConstantDesc(new Integer(1));
    ExprNodeConstantDesc constDesc3 = new ExprNodeConstantDesc(new Integer(2));
    // long column/column IF
    GenericUDFIf udf = new GenericUDFIf();
    List<ExprNodeDesc> children1 = new ArrayList<ExprNodeDesc>();
    children1.add(col1Expr);
    children1.add(col2Expr);
    children1.add(col3Expr);
    ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, udf, children1);
    List<String> columns = new ArrayList<String>();
    columns.add("col0");
    columns.add("col1");
    columns.add("col2");
    columns.add("col3");
    VectorizationContext vc = new VectorizationContext("name", columns);
    VectorExpression ve = vc.getVectorExpression(exprDesc);
    assertTrue(ve instanceof IfExprLongColumnLongColumn);
    // long column/scalar IF
    children1.set(2, new ExprNodeConstantDesc(1L));
    ve = vc.getVectorExpression(exprDesc);
    assertTrue(ve instanceof IfExprLongColumnLongScalar);
    // long scalar/scalar IF
    children1.set(1, new ExprNodeConstantDesc(1L));
    ve = vc.getVectorExpression(exprDesc);
    assertTrue(ve instanceof IfExprLongScalarLongScalar);
    // long scalar/column IF
    children1.set(2, col3Expr);
    ve = vc.getVectorExpression(exprDesc);
    assertTrue(ve instanceof IfExprLongScalarLongColumn);
    // test for double type
    col2Expr = new ExprNodeColumnDesc(Double.class, "col2", "table", false);
    col3Expr = new ExprNodeColumnDesc(Double.class, "col3", "table", false);
    // double column/column IF
    children1.set(1, col2Expr);
    children1.set(2, col3Expr);
    ve = vc.getVectorExpression(exprDesc);
    assertTrue(ve instanceof IfExprDoubleColumnDoubleColumn);
    // double column/scalar IF
    children1.set(2, new ExprNodeConstantDesc(1D));
    ve = vc.getVectorExpression(exprDesc);
    assertTrue(ve instanceof IfExprDoubleColumnDoubleScalar);
    // double scalar/scalar IF
    children1.set(1, new ExprNodeConstantDesc(1D));
    ve = vc.getVectorExpression(exprDesc);
    assertTrue(ve instanceof IfExprDoubleScalarDoubleScalar);
    // double scalar/column IF
    children1.set(2, col3Expr);
    ve = vc.getVectorExpression(exprDesc);
    assertTrue(ve instanceof IfExprDoubleScalarDoubleColumn);
    // double scalar/long column IF
    children1.set(2, new ExprNodeColumnDesc(Long.class, "col3", "table", false));
    ve = vc.getVectorExpression(exprDesc);
    assertTrue(ve instanceof IfExprDoubleScalarLongColumn);
    // Additional combinations of (long,double)X(column,scalar) for each of the second
    // and third arguments are omitted. We have coverage of all the source templates
    // already.
    // test for timestamp type
    col2Expr = new ExprNodeColumnDesc(Timestamp.class, "col2", "table", false);
    col3Expr = new ExprNodeColumnDesc(Timestamp.class, "col3", "table", false);
    // timestamp column/column IF
    children1.set(1, col2Expr);
    children1.set(2, col3Expr);
    ve = vc.getVectorExpression(exprDesc);
    assertTrue(ve instanceof IfExprTimestampColumnColumn);
    // timestamp column/scalar IF where scalar is really a CAST of a constant to timestamp.
    ExprNodeGenericFuncDesc f = new ExprNodeGenericFuncDesc();
    f.setGenericUDF(new GenericUDFTimestamp());
    f.setTypeInfo(TypeInfoFactory.timestampTypeInfo);
    List<ExprNodeDesc> children2 = new ArrayList<ExprNodeDesc>();
    f.setChildren(children2);
    children2.add(new ExprNodeConstantDesc("2013-11-05 00:00:00.000"));
    children1.set(2, f);
    ve = vc.getVectorExpression(exprDesc);
    // We check for two different classes below because initially the result
    // is IfExprLongColumnLongColumn but in the future if the system is enhanced
    // with constant folding then the result will be IfExprLongColumnLongScalar.
    assertTrue(IfExprTimestampColumnColumn.class == ve.getClass() || IfExprTimestampColumnScalar.class == ve.getClass());
    // timestamp scalar/scalar
    children1.set(1, f);
    ve = vc.getVectorExpression(exprDesc);
    assertTrue(IfExprTimestampColumnColumn.class == ve.getClass() || IfExprTimestampScalarScalar.class == ve.getClass());
    // timestamp scalar/column
    children1.set(2, col3Expr);
    ve = vc.getVectorExpression(exprDesc);
    assertTrue(IfExprTimestampColumnColumn.class == ve.getClass() || IfExprTimestampScalarColumn.class == ve.getClass());
    // test for boolean type
    col2Expr = new ExprNodeColumnDesc(Boolean.class, "col2", "table", false);
    col3Expr = new ExprNodeColumnDesc(Boolean.class, "col3", "table", false);
    // column/column
    children1.set(1, col2Expr);
    children1.set(2, col3Expr);
    ve = vc.getVectorExpression(exprDesc);
    assertTrue(ve instanceof IfExprLongColumnLongColumn);
    // column/scalar IF
    children1.set(2, new ExprNodeConstantDesc(true));
    ve = vc.getVectorExpression(exprDesc);
    assertTrue(ve instanceof IfExprLongColumnLongScalar);
    // scalar/scalar IF
    children1.set(1, new ExprNodeConstantDesc(true));
    ve = vc.getVectorExpression(exprDesc);
    assertTrue(ve instanceof IfExprLongScalarLongScalar);
    // scalar/column IF
    children1.set(2, col3Expr);
    ve = vc.getVectorExpression(exprDesc);
    assertTrue(ve instanceof IfExprLongScalarLongColumn);
    // test for string type
    constDesc2 = new ExprNodeConstantDesc("Alpha");
    constDesc3 = new ExprNodeConstantDesc("Bravo");
    col2Expr = new ExprNodeColumnDesc(String.class, "col2", "table", false);
    col3Expr = new ExprNodeColumnDesc(String.class, "col3", "table", false);
    // column/column
    children1.set(1, col2Expr);
    children1.set(2, col3Expr);
    ve = vc.getVectorExpression(exprDesc);
    assertTrue(ve instanceof IfExprStringGroupColumnStringGroupColumn);
    // column/scalar
    children1.set(2, constDesc3);
    ve = vc.getVectorExpression(exprDesc);
    assertTrue(ve instanceof IfExprStringGroupColumnStringScalar);
    // scalar/scalar
    children1.set(1, constDesc2);
    ve = vc.getVectorExpression(exprDesc);
    assertTrue(ve instanceof IfExprStringScalarStringScalar);
    // scalar/column
    children1.set(2, col3Expr);
    ve = vc.getVectorExpression(exprDesc);
    assertTrue(ve instanceof IfExprStringScalarStringGroupColumn);
    // test for CHAR type
    CharTypeInfo charTypeInfo = new CharTypeInfo(10);
    constDesc2 = new ExprNodeConstantDesc(charTypeInfo, new HiveChar("Alpha", 10));
    constDesc3 = new ExprNodeConstantDesc(charTypeInfo, new HiveChar("Bravo", 10));
    col2Expr = new ExprNodeColumnDesc(charTypeInfo, "col2", "table", false);
    col3Expr = new ExprNodeColumnDesc(charTypeInfo, "col3", "table", false);
    // column/column
    children1.set(1, col2Expr);
    children1.set(2, col3Expr);
    ve = vc.getVectorExpression(exprDesc);
    assertTrue(ve instanceof IfExprStringGroupColumnStringGroupColumn);
    // column/scalar
    children1.set(2, constDesc3);
    ve = vc.getVectorExpression(exprDesc);
    assertTrue(ve instanceof IfExprStringGroupColumnCharScalar);
    // scalar/scalar
    children1.set(1, constDesc2);
    //    ve = vc.getVectorExpression(exprDesc);
    //    assertTrue(ve instanceof IfExprCharScalarCharScalar);
    // scalar/column
    children1.set(2, col3Expr);
    ve = vc.getVectorExpression(exprDesc);
    assertTrue(ve instanceof IfExprCharScalarStringGroupColumn);
    // test for VARCHAR type
    VarcharTypeInfo varcharTypeInfo = new VarcharTypeInfo(10);
    constDesc2 = new ExprNodeConstantDesc(varcharTypeInfo, new HiveVarchar("Alpha", 10));
    constDesc3 = new ExprNodeConstantDesc(varcharTypeInfo, new HiveVarchar("Bravo", 10));
    col2Expr = new ExprNodeColumnDesc(varcharTypeInfo, "col2", "table", false);
    col3Expr = new ExprNodeColumnDesc(varcharTypeInfo, "col3", "table", false);
    // column/column
    children1.set(1, col2Expr);
    children1.set(2, col3Expr);
    ve = vc.getVectorExpression(exprDesc);
    assertTrue(ve instanceof IfExprStringGroupColumnStringGroupColumn);
    // column/scalar
    children1.set(2, constDesc3);
    ve = vc.getVectorExpression(exprDesc);
    assertTrue(ve instanceof IfExprStringGroupColumnVarCharScalar);
    // scalar/scalar
    children1.set(1, constDesc2);
    //    ve = vc.getVectorExpression(exprDesc);
    //    assertTrue(ve instanceof IfExprVarCharScalarVarCharScalar);
    // scalar/column
    children1.set(2, col3Expr);
    ve = vc.getVectorExpression(exprDesc);
    assertTrue(ve instanceof IfExprVarCharScalarStringGroupColumn);
}
Also used : IfExprStringGroupColumnVarCharScalar(org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringGroupColumnVarCharScalar) GenericUDFIf(org.apache.hadoop.hive.ql.udf.generic.GenericUDFIf) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) IfExprCharScalarStringGroupColumn(org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprCharScalarStringGroupColumn) ArrayList(java.util.ArrayList) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) IfExprStringGroupColumnStringGroupColumn(org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringGroupColumnStringGroupColumn) GenericUDFTimestamp(org.apache.hadoop.hive.ql.udf.generic.GenericUDFTimestamp) IfExprLongColumnLongScalar(org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprLongColumnLongScalar) VectorUDFUnixTimeStampTimestamp(org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFUnixTimeStampTimestamp) VectorUDFYearTimestamp(org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFYearTimestamp) GenericUDFTimestamp(org.apache.hadoop.hive.ql.udf.generic.GenericUDFTimestamp) Timestamp(java.sql.Timestamp) IfExprLongScalarLongScalar(org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprLongScalarLongScalar) IfExprDoubleScalarDoubleColumn(org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprDoubleScalarDoubleColumn) IfExprLongColumnLongColumn(org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprLongColumnLongColumn) IfExprTimestampColumnColumn(org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprTimestampColumnColumn) IfExprStringGroupColumnCharScalar(org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringGroupColumnCharScalar) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) IfExprDoubleColumnDoubleScalar(org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprDoubleColumnDoubleScalar) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) IfExprVarCharScalarStringGroupColumn(org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprVarCharScalarStringGroupColumn) IfExprLongScalarLongColumn(org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprLongScalarLongColumn) IfExprStringScalarStringGroupColumn(org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringScalarStringGroupColumn) ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) BRoundWithNumDigitsDoubleToDouble(org.apache.hadoop.hive.ql.exec.vector.expressions.BRoundWithNumDigitsDoubleToDouble) FuncRoundDoubleToDouble(org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncRoundDoubleToDouble) FuncBRoundDoubleToDouble(org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncBRoundDoubleToDouble) FuncLogWithBaseDoubleToDouble(org.apache.hadoop.hive.ql.exec.vector.expressions.FuncLogWithBaseDoubleToDouble) FuncLogWithBaseLongToDouble(org.apache.hadoop.hive.ql.exec.vector.expressions.FuncLogWithBaseLongToDouble) FuncPowerDoubleToDouble(org.apache.hadoop.hive.ql.exec.vector.expressions.FuncPowerDoubleToDouble) FuncLnDoubleToDouble(org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncLnDoubleToDouble) FuncSinDoubleToDouble(org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncSinDoubleToDouble) RoundWithNumDigitsDoubleToDouble(org.apache.hadoop.hive.ql.exec.vector.expressions.RoundWithNumDigitsDoubleToDouble) IfExprDoubleColumnDoubleColumn(org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprDoubleColumnDoubleColumn) IfExprStringGroupColumnStringScalar(org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringGroupColumnStringScalar) IfExprDoubleScalarDoubleScalar(org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprDoubleScalarDoubleScalar) IfExprDoubleScalarLongColumn(org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprDoubleScalarLongColumn) VectorExpression(org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression) DynamicValueVectorExpression(org.apache.hadoop.hive.ql.exec.vector.expressions.DynamicValueVectorExpression) IfExprStringScalarStringScalar(org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringScalarStringScalar) Test(org.junit.Test)

Example 29 with CharTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo in project hive by apache.

the class TestGenericUDFCeil method testChar.

@Test
public void testChar() throws HiveException {
    GenericUDFCeil udf = new GenericUDFCeil();
    HiveChar vc = new HiveChar("-32300.004747", 12);
    HiveCharWritable input = new HiveCharWritable(vc);
    CharTypeInfo inputTypeInfo = TypeInfoFactory.getCharTypeInfo(12);
    ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo) };
    DeferredObject[] args = { new DeferredJavaObject(input) };
    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
    Assert.assertEquals(TypeInfoFactory.longTypeInfo, oi.getTypeInfo());
    LongWritable res = (LongWritable) udf.evaluate(args);
    Assert.assertEquals(-32300L, res.get());
}
Also used : PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) DeferredJavaObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) DeferredObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) LongWritable(org.apache.hadoop.io.LongWritable) Test(org.junit.Test)

Example 30 with CharTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo in project hive by apache.

the class StatsUtils method getAvgColLenOf.

/**
   * Get the raw data size of variable length data types
   * @param conf
   *          - hive conf
   * @param oi
   *          - object inspector
   * @param colType
   *          - column type
   * @return raw data size
   */
public static long getAvgColLenOf(HiveConf conf, ObjectInspector oi, String colType) {
    long configVarLen = HiveConf.getIntVar(conf, HiveConf.ConfVars.HIVE_STATS_MAX_VARIABLE_LENGTH);
    String colTypeLowCase = colType.toLowerCase();
    if (colTypeLowCase.equals(serdeConstants.STRING_TYPE_NAME)) {
        // constant string projection Ex: select "hello" from table
        if (oi instanceof ConstantObjectInspector) {
            ConstantObjectInspector coi = (ConstantObjectInspector) oi;
            // if writable constant is null then return size 0
            Object constantValue = coi.getWritableConstantValue();
            return constantValue == null ? 0 : constantValue.toString().length();
        } else if (oi instanceof StringObjectInspector) {
            // return the variable length from config
            return configVarLen;
        }
    } else if (colTypeLowCase.startsWith(serdeConstants.VARCHAR_TYPE_NAME)) {
        // constant varchar projection
        if (oi instanceof ConstantObjectInspector) {
            ConstantObjectInspector coi = (ConstantObjectInspector) oi;
            // if writable constant is null then return size 0
            Object constantValue = coi.getWritableConstantValue();
            return constantValue == null ? 0 : constantValue.toString().length();
        } else if (oi instanceof HiveVarcharObjectInspector) {
            VarcharTypeInfo type = (VarcharTypeInfo) ((HiveVarcharObjectInspector) oi).getTypeInfo();
            return type.getLength();
        }
    } else if (colTypeLowCase.startsWith(serdeConstants.CHAR_TYPE_NAME)) {
        // constant char projection
        if (oi instanceof ConstantObjectInspector) {
            ConstantObjectInspector coi = (ConstantObjectInspector) oi;
            // if writable constant is null then return size 0
            Object constantValue = coi.getWritableConstantValue();
            return constantValue == null ? 0 : constantValue.toString().length();
        } else if (oi instanceof HiveCharObjectInspector) {
            CharTypeInfo type = (CharTypeInfo) ((HiveCharObjectInspector) oi).getTypeInfo();
            return type.getLength();
        }
    } else if (colTypeLowCase.equals(serdeConstants.BINARY_TYPE_NAME)) {
        // constant byte arrays
        if (oi instanceof ConstantObjectInspector) {
            ConstantObjectInspector coi = (ConstantObjectInspector) oi;
            // if writable constant is null then return size 0
            BytesWritable constantValue = (BytesWritable) coi.getWritableConstantValue();
            return constantValue == null ? 0 : constantValue.getLength();
        } else if (oi instanceof BinaryObjectInspector) {
            // return the variable length from config
            return configVarLen;
        }
    } else {
        // complex types (map, list, struct, union)
        return getSizeOfComplexTypes(conf, oi);
    }
    throw new IllegalArgumentException("Size requested for unknown type: " + colType + " OI: " + oi.getTypeName());
}
Also used : VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) HiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) WritableBinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableBinaryObjectInspector) BinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector) BytesWritable(org.apache.hadoop.io.BytesWritable) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector) StringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector) WritableStringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableStringObjectInspector) HiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector)

Aggregations

CharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo)24 VarcharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo)19 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)17 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)13 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)13 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)12 Timestamp (java.sql.Timestamp)11 HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)9 PrimitiveCategory (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)9 Text (org.apache.hadoop.io.Text)9 Test (org.junit.Test)9 Date (java.sql.Date)8 HiveCharWritable (org.apache.hadoop.hive.serde2.io.HiveCharWritable)8 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)7 ArrayList (java.util.ArrayList)5 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)5 BytesWritable (org.apache.hadoop.io.BytesWritable)5 DeferredJavaObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject)4 DeferredObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject)4 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)4