Search in sources :

Example 36 with HiveDecimal

use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.

the class VectorAssignRow method assignRowColumn.

private void assignRowColumn(ColumnVector columnVector, int batchIndex, TypeInfo targetTypeInfo, Object object) {
    if (object == null) {
        VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
        return;
    }
    switch(targetTypeInfo.getCategory()) {
        case PRIMITIVE:
            {
                final PrimitiveCategory targetPrimitiveCategory = ((PrimitiveTypeInfo) targetTypeInfo).getPrimitiveCategory();
                switch(targetPrimitiveCategory) {
                    case VOID:
                        VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
                        return;
                    case BOOLEAN:
                        if (object instanceof Boolean) {
                            ((LongColumnVector) columnVector).vector[batchIndex] = (((Boolean) object) ? 1 : 0);
                        } else {
                            ((LongColumnVector) columnVector).vector[batchIndex] = (((BooleanWritable) object).get() ? 1 : 0);
                        }
                        break;
                    case BYTE:
                        if (object instanceof Byte) {
                            ((LongColumnVector) columnVector).vector[batchIndex] = ((Byte) object);
                        } else {
                            ((LongColumnVector) columnVector).vector[batchIndex] = ((ByteWritable) object).get();
                        }
                        break;
                    case SHORT:
                        if (object instanceof Short) {
                            ((LongColumnVector) columnVector).vector[batchIndex] = ((Short) object);
                        } else {
                            ((LongColumnVector) columnVector).vector[batchIndex] = ((ShortWritable) object).get();
                        }
                        break;
                    case INT:
                        if (object instanceof Integer) {
                            ((LongColumnVector) columnVector).vector[batchIndex] = ((Integer) object);
                        } else {
                            ((LongColumnVector) columnVector).vector[batchIndex] = ((IntWritable) object).get();
                        }
                        break;
                    case LONG:
                        if (object instanceof Long) {
                            ((LongColumnVector) columnVector).vector[batchIndex] = ((Long) object);
                        } else {
                            ((LongColumnVector) columnVector).vector[batchIndex] = ((LongWritable) object).get();
                        }
                        break;
                    case TIMESTAMP:
                        if (object instanceof Timestamp) {
                            ((TimestampColumnVector) columnVector).set(batchIndex, ((Timestamp) object));
                        } else {
                            ((TimestampColumnVector) columnVector).set(batchIndex, ((TimestampWritable) object).getTimestamp());
                        }
                        break;
                    case DATE:
                        if (object instanceof Date) {
                            ((LongColumnVector) columnVector).vector[batchIndex] = DateWritable.dateToDays((Date) object);
                        } else {
                            ((LongColumnVector) columnVector).vector[batchIndex] = ((DateWritable) object).getDays();
                        }
                        break;
                    case FLOAT:
                        if (object instanceof Float) {
                            ((DoubleColumnVector) columnVector).vector[batchIndex] = ((Float) object);
                        } else {
                            ((DoubleColumnVector) columnVector).vector[batchIndex] = ((FloatWritable) object).get();
                        }
                        break;
                    case DOUBLE:
                        if (object instanceof Double) {
                            ((DoubleColumnVector) columnVector).vector[batchIndex] = ((Double) object);
                        } else {
                            ((DoubleColumnVector) columnVector).vector[batchIndex] = ((DoubleWritable) object).get();
                        }
                        break;
                    case BINARY:
                        {
                            if (object instanceof byte[]) {
                                byte[] bytes = (byte[]) object;
                                ((BytesColumnVector) columnVector).setVal(batchIndex, bytes, 0, bytes.length);
                            } else {
                                BytesWritable bw = (BytesWritable) object;
                                ((BytesColumnVector) columnVector).setVal(batchIndex, bw.getBytes(), 0, bw.getLength());
                            }
                        }
                        break;
                    case STRING:
                        {
                            if (object instanceof String) {
                                String string = (String) object;
                                byte[] bytes = string.getBytes();
                                ((BytesColumnVector) columnVector).setVal(batchIndex, bytes, 0, bytes.length);
                            } else {
                                Text tw = (Text) object;
                                ((BytesColumnVector) columnVector).setVal(batchIndex, tw.getBytes(), 0, tw.getLength());
                            }
                        }
                        break;
                    case VARCHAR:
                        {
                            // UNDONE: Performance problem with conversion to String, then bytes...
                            // We store VARCHAR type stripped of pads.
                            HiveVarchar hiveVarchar;
                            if (object instanceof HiveVarchar) {
                                hiveVarchar = (HiveVarchar) object;
                            } else {
                                hiveVarchar = ((HiveVarcharWritable) object).getHiveVarchar();
                            }
                            // TODO: HIVE-13624 Do we need maxLength checking?
                            byte[] bytes = hiveVarchar.getValue().getBytes();
                            ((BytesColumnVector) columnVector).setVal(batchIndex, bytes, 0, bytes.length);
                        }
                        break;
                    case CHAR:
                        {
                            // UNDONE: Performance problem with conversion to String, then bytes...
                            // We store CHAR type stripped of pads.
                            HiveChar hiveChar;
                            if (object instanceof HiveChar) {
                                hiveChar = (HiveChar) object;
                            } else {
                                hiveChar = ((HiveCharWritable) object).getHiveChar();
                            }
                            // TODO: HIVE-13624 Do we need maxLength checking?
                            // We store CHAR in vector row batch with padding stripped.
                            byte[] bytes = hiveChar.getStrippedValue().getBytes();
                            ((BytesColumnVector) columnVector).setVal(batchIndex, bytes, 0, bytes.length);
                        }
                        break;
                    case DECIMAL:
                        if (object instanceof HiveDecimal) {
                            ((DecimalColumnVector) columnVector).set(batchIndex, (HiveDecimal) object);
                        } else {
                            ((DecimalColumnVector) columnVector).set(batchIndex, (HiveDecimalWritable) object);
                        }
                        break;
                    case INTERVAL_YEAR_MONTH:
                        if (object instanceof HiveIntervalYearMonth) {
                            ((LongColumnVector) columnVector).vector[batchIndex] = ((HiveIntervalYearMonth) object).getTotalMonths();
                        } else {
                            ((LongColumnVector) columnVector).vector[batchIndex] = ((HiveIntervalYearMonthWritable) object).getHiveIntervalYearMonth().getTotalMonths();
                        }
                        break;
                    case INTERVAL_DAY_TIME:
                        if (object instanceof HiveIntervalDayTime) {
                            ((IntervalDayTimeColumnVector) columnVector).set(batchIndex, (HiveIntervalDayTime) object);
                        } else {
                            ((IntervalDayTimeColumnVector) columnVector).set(batchIndex, ((HiveIntervalDayTimeWritable) object).getHiveIntervalDayTime());
                        }
                        break;
                    default:
                        throw new RuntimeException("Primitive category " + targetPrimitiveCategory.name() + " not supported");
                }
            }
            break;
        case LIST:
            {
                final ListColumnVector listColumnVector = (ListColumnVector) columnVector;
                final ListTypeInfo listTypeInfo = (ListTypeInfo) targetTypeInfo;
                final TypeInfo elementTypeInfo = listTypeInfo.getListElementTypeInfo();
                final List list = (List) object;
                final int size = list.size();
                final int childCount = listColumnVector.childCount;
                listColumnVector.offsets[batchIndex] = childCount;
                listColumnVector.lengths[batchIndex] = size;
                listColumnVector.childCount = childCount + size;
                listColumnVector.child.ensureSize(childCount + size, true);
                for (int i = 0; i < size; i++) {
                    assignRowColumn(listColumnVector.child, childCount + i, elementTypeInfo, list.get(i));
                }
            }
            break;
        case MAP:
            {
                final MapColumnVector mapColumnVector = (MapColumnVector) columnVector;
                final MapTypeInfo mapTypeInfo = (MapTypeInfo) targetTypeInfo;
                final Map<Object, Object> map = (Map<Object, Object>) object;
                final int size = map.size();
                int childCount = mapColumnVector.childCount;
                mapColumnVector.offsets[batchIndex] = childCount;
                mapColumnVector.lengths[batchIndex] = size;
                mapColumnVector.keys.ensureSize(childCount + size, true);
                mapColumnVector.values.ensureSize(childCount + size, true);
                for (Map.Entry<Object, Object> entry : map.entrySet()) {
                    assignRowColumn(mapColumnVector.keys, childCount, mapTypeInfo.getMapKeyTypeInfo(), entry.getKey());
                    assignRowColumn(mapColumnVector.values, childCount, mapTypeInfo.getMapValueTypeInfo(), entry.getValue());
                    childCount++;
                }
                mapColumnVector.childCount = childCount;
            }
            break;
        case STRUCT:
            {
                final StructColumnVector structColumnVector = (StructColumnVector) columnVector;
                final StructTypeInfo targetStructTypeInfo = (StructTypeInfo) targetTypeInfo;
                final List<TypeInfo> targetFieldTypeInfos = targetStructTypeInfo.getAllStructFieldTypeInfos();
                final int size = targetFieldTypeInfos.size();
                if (object instanceof List) {
                    final List struct = (List) object;
                    for (int i = 0; i < size; i++) {
                        assignRowColumn(structColumnVector.fields[i], batchIndex, targetFieldTypeInfos.get(i), struct.get(i));
                    }
                } else {
                    final Object[] array = (Object[]) object;
                    for (int i = 0; i < size; i++) {
                        assignRowColumn(structColumnVector.fields[i], batchIndex, targetFieldTypeInfos.get(i), array[i]);
                    }
                }
            }
            break;
        case UNION:
            {
                final StandardUnion union = (StandardUnion) object;
                final UnionColumnVector unionColumnVector = (UnionColumnVector) columnVector;
                final UnionTypeInfo unionTypeInfo = (UnionTypeInfo) targetTypeInfo;
                final List<TypeInfo> objectTypeInfos = unionTypeInfo.getAllUnionObjectTypeInfos();
                final byte tag = union.getTag();
                unionColumnVector.tags[batchIndex] = tag;
                assignRowColumn(unionColumnVector.fields[tag], batchIndex, objectTypeInfos.get(tag), union.getObject());
            }
            break;
        default:
            throw new RuntimeException("Category " + targetTypeInfo.getCategory().name() + " not supported");
    }
    /*
     * We always set the null flag to false when there is a value.
     */
    columnVector.isNull[batchIndex] = false;
}
Also used : HiveChar(org.apache.hadoop.hive.common.type.HiveChar) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) Timestamp(java.sql.Timestamp) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) List(java.util.List) LongWritable(org.apache.hadoop.io.LongWritable) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) Text(org.apache.hadoop.io.Text) HiveIntervalDayTimeWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) UnionTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) Date(java.sql.Date) FloatWritable(org.apache.hadoop.io.FloatWritable) HiveIntervalYearMonth(org.apache.hadoop.hive.common.type.HiveIntervalYearMonth) BooleanWritable(org.apache.hadoop.io.BooleanWritable) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) StandardUnion(org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector.StandardUnion) Map(java.util.Map) UnionTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo)

Example 37 with HiveDecimal

use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.

the class VectorizationContext method getInExpression.

/**
 * Create a filter or boolean-valued expression for column IN ( <list-of-constants> )
 */
private VectorExpression getInExpression(List<ExprNodeDesc> childExpr, VectorExpressionDescriptor.Mode mode, TypeInfo returnType) throws HiveException {
    ExprNodeDesc colExpr = childExpr.get(0);
    List<ExprNodeDesc> inChildren = childExpr.subList(1, childExpr.size());
    String colType = colExpr.getTypeString();
    colType = VectorizationContext.mapTypeNameSynonyms(colType);
    TypeInfo colTypeInfo = TypeInfoUtils.getTypeInfoFromTypeString(colType);
    Category category = colTypeInfo.getCategory();
    if (category == Category.STRUCT) {
        return getStructInExpression(childExpr, colExpr, colTypeInfo, inChildren, mode, returnType);
    } else if (category != Category.PRIMITIVE) {
        return null;
    }
    // prepare arguments for createVectorExpression
    List<ExprNodeDesc> childrenForInList = evaluateCastOnConstants(inChildren);
    /* This method assumes that the IN list has no NULL entries. That is enforced elsewhere,
     * in the Vectorizer class. If NULL is passed in as a list entry, behavior is not defined.
     * If in the future, NULL values are allowed in the IN list, be sure to handle 3-valued
     * logic correctly. E.g. NOT (col IN (null)) should be considered UNKNOWN, so that would
     * become FALSE in the WHERE clause, and cause the row in question to be filtered out.
     * See the discussion in Jira HIVE-5583.
     */
    VectorExpression expr = null;
    // Validate the IN items are only constants.
    for (ExprNodeDesc inListChild : childrenForInList) {
        if (!(inListChild instanceof ExprNodeConstantDesc)) {
            throw new HiveException("Vectorizing IN expression only supported for constant values");
        }
    }
    // determine class
    Class<?> cl = null;
    // non-vectorized validates that explicitly during UDF init.
    if (isIntFamily(colType)) {
        cl = (mode == VectorExpressionDescriptor.Mode.FILTER ? FilterLongColumnInList.class : LongColumnInList.class);
        long[] inVals = new long[childrenForInList.size()];
        for (int i = 0; i != inVals.length; i++) {
            inVals[i] = getIntFamilyScalarAsLong((ExprNodeConstantDesc) childrenForInList.get(i));
        }
        expr = createVectorExpression(cl, childExpr.subList(0, 1), VectorExpressionDescriptor.Mode.PROJECTION, returnType);
        ((ILongInExpr) expr).setInListValues(inVals);
    } else if (isTimestampFamily(colType)) {
        cl = (mode == VectorExpressionDescriptor.Mode.FILTER ? FilterTimestampColumnInList.class : TimestampColumnInList.class);
        Timestamp[] inVals = new Timestamp[childrenForInList.size()];
        for (int i = 0; i != inVals.length; i++) {
            inVals[i] = getTimestampScalar(childrenForInList.get(i));
        }
        expr = createVectorExpression(cl, childExpr.subList(0, 1), VectorExpressionDescriptor.Mode.PROJECTION, returnType);
        ((ITimestampInExpr) expr).setInListValues(inVals);
    } else if (isStringFamily(colType)) {
        cl = (mode == VectorExpressionDescriptor.Mode.FILTER ? FilterStringColumnInList.class : StringColumnInList.class);
        byte[][] inVals = new byte[childrenForInList.size()][];
        for (int i = 0; i != inVals.length; i++) {
            inVals[i] = getStringScalarAsByteArray((ExprNodeConstantDesc) childrenForInList.get(i));
        }
        expr = createVectorExpression(cl, childExpr.subList(0, 1), VectorExpressionDescriptor.Mode.PROJECTION, returnType);
        ((IStringInExpr) expr).setInListValues(inVals);
    } else if (isFloatFamily(colType)) {
        cl = (mode == VectorExpressionDescriptor.Mode.FILTER ? FilterDoubleColumnInList.class : DoubleColumnInList.class);
        double[] inValsD = new double[childrenForInList.size()];
        for (int i = 0; i != inValsD.length; i++) {
            inValsD[i] = getNumericScalarAsDouble(childrenForInList.get(i));
        }
        expr = createVectorExpression(cl, childExpr.subList(0, 1), VectorExpressionDescriptor.Mode.PROJECTION, returnType);
        ((IDoubleInExpr) expr).setInListValues(inValsD);
    } else if (isDecimalFamily(colType)) {
        cl = (mode == VectorExpressionDescriptor.Mode.FILTER ? FilterDecimalColumnInList.class : DecimalColumnInList.class);
        HiveDecimal[] inValsD = new HiveDecimal[childrenForInList.size()];
        for (int i = 0; i != inValsD.length; i++) {
            inValsD[i] = (HiveDecimal) getVectorTypeScalarValue((ExprNodeConstantDesc) childrenForInList.get(i));
        }
        expr = createVectorExpression(cl, childExpr.subList(0, 1), VectorExpressionDescriptor.Mode.PROJECTION, returnType);
        ((IDecimalInExpr) expr).setInListValues(inValsD);
    } else if (isDateFamily(colType)) {
        cl = (mode == VectorExpressionDescriptor.Mode.FILTER ? FilterLongColumnInList.class : LongColumnInList.class);
        long[] inVals = new long[childrenForInList.size()];
        for (int i = 0; i != inVals.length; i++) {
            inVals[i] = (Long) getVectorTypeScalarValue((ExprNodeConstantDesc) childrenForInList.get(i));
        }
        expr = createVectorExpression(cl, childExpr.subList(0, 1), VectorExpressionDescriptor.Mode.PROJECTION, returnType);
        ((ILongInExpr) expr).setInListValues(inVals);
    }
    // execution to fall back to row mode.
    return expr;
}
Also used : ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) Category(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) BaseCharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.BaseCharTypeInfo) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc)

Example 38 with HiveDecimal

use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.

the class VectorizedRowBatchCtx method addPartitionColsToBatch.

public void addPartitionColsToBatch(ColumnVector[] cols, Object[] partitionValues) {
    if (partitionValues != null) {
        for (int i = 0; i < partitionColumnCount; i++) {
            Object value = partitionValues[i];
            int colIndex = dataColumnCount + i;
            String partitionColumnName = rowColumnNames[colIndex];
            PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) rowColumnTypeInfos[colIndex];
            switch(primitiveTypeInfo.getPrimitiveCategory()) {
                case BOOLEAN:
                    {
                        LongColumnVector lcv = (LongColumnVector) cols[colIndex];
                        if (value == null) {
                            lcv.noNulls = false;
                            lcv.isNull[0] = true;
                            lcv.isRepeating = true;
                        } else {
                            lcv.fill((Boolean) value == true ? 1 : 0);
                        }
                    }
                    break;
                case BYTE:
                    {
                        LongColumnVector lcv = (LongColumnVector) cols[colIndex];
                        if (value == null) {
                            lcv.noNulls = false;
                            lcv.isNull[0] = true;
                            lcv.isRepeating = true;
                        } else {
                            lcv.fill((Byte) value);
                        }
                    }
                    break;
                case SHORT:
                    {
                        LongColumnVector lcv = (LongColumnVector) cols[colIndex];
                        if (value == null) {
                            lcv.noNulls = false;
                            lcv.isNull[0] = true;
                            lcv.isRepeating = true;
                        } else {
                            lcv.fill((Short) value);
                        }
                    }
                    break;
                case INT:
                    {
                        LongColumnVector lcv = (LongColumnVector) cols[colIndex];
                        if (value == null) {
                            lcv.noNulls = false;
                            lcv.isNull[0] = true;
                            lcv.isRepeating = true;
                        } else {
                            lcv.fill((Integer) value);
                        }
                    }
                    break;
                case LONG:
                    {
                        LongColumnVector lcv = (LongColumnVector) cols[colIndex];
                        if (value == null) {
                            lcv.noNulls = false;
                            lcv.isNull[0] = true;
                            lcv.isRepeating = true;
                        } else {
                            lcv.fill((Long) value);
                        }
                    }
                    break;
                case DATE:
                    {
                        LongColumnVector lcv = (LongColumnVector) cols[colIndex];
                        if (value == null) {
                            lcv.noNulls = false;
                            lcv.isNull[0] = true;
                            lcv.isRepeating = true;
                        } else {
                            lcv.fill(DateWritable.dateToDays((Date) value));
                        }
                    }
                    break;
                case TIMESTAMP:
                    {
                        TimestampColumnVector lcv = (TimestampColumnVector) cols[colIndex];
                        if (value == null) {
                            lcv.noNulls = false;
                            lcv.isNull[0] = true;
                            lcv.isRepeating = true;
                        } else {
                            lcv.fill((Timestamp) value);
                        }
                    }
                    break;
                case INTERVAL_YEAR_MONTH:
                    {
                        LongColumnVector lcv = (LongColumnVector) cols[colIndex];
                        if (value == null) {
                            lcv.noNulls = false;
                            lcv.isNull[0] = true;
                            lcv.isRepeating = true;
                        } else {
                            lcv.fill(((HiveIntervalYearMonth) value).getTotalMonths());
                        }
                    }
                case INTERVAL_DAY_TIME:
                    {
                        IntervalDayTimeColumnVector icv = (IntervalDayTimeColumnVector) cols[colIndex];
                        if (value == null) {
                            icv.noNulls = false;
                            icv.isNull[0] = true;
                            icv.isRepeating = true;
                        } else {
                            icv.fill(((HiveIntervalDayTime) value));
                        }
                    }
                case FLOAT:
                    {
                        DoubleColumnVector dcv = (DoubleColumnVector) cols[colIndex];
                        if (value == null) {
                            dcv.noNulls = false;
                            dcv.isNull[0] = true;
                            dcv.isRepeating = true;
                        } else {
                            dcv.fill((Float) value);
                        }
                    }
                    break;
                case DOUBLE:
                    {
                        DoubleColumnVector dcv = (DoubleColumnVector) cols[colIndex];
                        if (value == null) {
                            dcv.noNulls = false;
                            dcv.isNull[0] = true;
                            dcv.isRepeating = true;
                        } else {
                            dcv.fill((Double) value);
                        }
                    }
                    break;
                case DECIMAL:
                    {
                        DecimalColumnVector dv = (DecimalColumnVector) cols[colIndex];
                        if (value == null) {
                            dv.noNulls = false;
                            dv.isNull[0] = true;
                            dv.isRepeating = true;
                        } else {
                            dv.fill((HiveDecimal) value);
                        }
                    }
                    break;
                case BINARY:
                    {
                        BytesColumnVector bcv = (BytesColumnVector) cols[colIndex];
                        byte[] bytes = (byte[]) value;
                        if (bytes == null) {
                            bcv.noNulls = false;
                            bcv.isNull[0] = true;
                            bcv.isRepeating = true;
                        } else {
                            bcv.fill(bytes);
                        }
                    }
                    break;
                case STRING:
                case CHAR:
                case VARCHAR:
                    {
                        BytesColumnVector bcv = (BytesColumnVector) cols[colIndex];
                        String sVal = value.toString();
                        if (sVal == null) {
                            bcv.noNulls = false;
                            bcv.isNull[0] = true;
                            bcv.isRepeating = true;
                        } else {
                            bcv.fill(sVal.getBytes());
                        }
                    }
                    break;
                default:
                    throw new RuntimeException("Unable to recognize the partition type " + primitiveTypeInfo.getPrimitiveCategory() + " for column " + partitionColumnName);
            }
        }
    }
}
Also used : Timestamp(java.sql.Timestamp) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) HiveIntervalYearMonth(org.apache.hadoop.hive.common.type.HiveIntervalYearMonth) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal)

Example 39 with HiveDecimal

use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.

the class DecimalColumnInList method transientInit.

@Override
public void transientInit() throws HiveException {
    super.transientInit();
    inSet = new HashSet<HiveDecimalWritable>(inListValues.length);
    for (HiveDecimal val : inListValues) {
        inSet.add(new HiveDecimalWritable(val));
    }
}
Also used : HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal)

Example 40 with HiveDecimal

use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.

the class FilterDecimalColumnInList method transientInit.

@Override
public void transientInit() throws HiveException {
    super.transientInit();
    inSet = new HashSet<HiveDecimalWritable>(inListValues.length);
    for (HiveDecimal val : inListValues) {
        inSet.add(new HiveDecimalWritable(val));
    }
}
Also used : HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal)

Aggregations

HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)94 Test (org.junit.Test)28 Timestamp (java.sql.Timestamp)24 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)23 DecimalColumnVector (org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector)22 Text (org.apache.hadoop.io.Text)22 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)21 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)21 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)20 Date (java.sql.Date)19 VectorizedRowBatch (org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch)18 BytesWritable (org.apache.hadoop.io.BytesWritable)17 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)16 HiveIntervalYearMonth (org.apache.hadoop.hive.common.type.HiveIntervalYearMonth)15 DateWritable (org.apache.hadoop.hive.serde2.io.DateWritable)15 TimestampWritable (org.apache.hadoop.hive.serde2.io.TimestampWritable)15 HiveIntervalDayTime (org.apache.hadoop.hive.common.type.HiveIntervalDayTime)14 IntWritable (org.apache.hadoop.io.IntWritable)14 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)13 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)13