Search in sources :

Example 16 with ShortWritable

use of org.apache.hadoop.hive.serde2.io.ShortWritable in project hive by apache.

the class VectorExtractRow method extractRowColumn.

public Object extractRowColumn(ColumnVector colVector, TypeInfo typeInfo, ObjectInspector objectInspector, int batchIndex) {
    if (colVector == null) {
        // may ask for them..
        return null;
    }
    final int adjustedIndex = (colVector.isRepeating ? 0 : batchIndex);
    if (!colVector.noNulls && colVector.isNull[adjustedIndex]) {
        return null;
    }
    final Category category = typeInfo.getCategory();
    switch(category) {
        case PRIMITIVE:
            {
                final PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) typeInfo;
                final PrimitiveCategory primitiveCategory = primitiveTypeInfo.getPrimitiveCategory();
                final Writable primitiveWritable = VectorizedBatchUtil.getPrimitiveWritable(primitiveCategory);
                switch(primitiveCategory) {
                    case VOID:
                        return null;
                    case BOOLEAN:
                        ((BooleanWritable) primitiveWritable).set(((LongColumnVector) colVector).vector[adjustedIndex] == 0 ? false : true);
                        return primitiveWritable;
                    case BYTE:
                        ((ByteWritable) primitiveWritable).set((byte) ((LongColumnVector) colVector).vector[adjustedIndex]);
                        return primitiveWritable;
                    case SHORT:
                        ((ShortWritable) primitiveWritable).set((short) ((LongColumnVector) colVector).vector[adjustedIndex]);
                        return primitiveWritable;
                    case INT:
                        ((IntWritable) primitiveWritable).set((int) ((LongColumnVector) colVector).vector[adjustedIndex]);
                        return primitiveWritable;
                    case LONG:
                        ((LongWritable) primitiveWritable).set(((LongColumnVector) colVector).vector[adjustedIndex]);
                        return primitiveWritable;
                    case TIMESTAMP:
                        // From java.sql.Timestamp used by vectorization to serializable org.apache.hadoop.hive.common.type.Timestamp
                        java.sql.Timestamp ts = ((TimestampColumnVector) colVector).asScratchTimestamp(adjustedIndex);
                        Timestamp serializableTS = Timestamp.ofEpochMilli(ts.getTime(), ts.getNanos());
                        ((TimestampWritableV2) primitiveWritable).set(serializableTS);
                        return primitiveWritable;
                    case DATE:
                        ((DateWritableV2) primitiveWritable).set((int) ((LongColumnVector) colVector).vector[adjustedIndex]);
                        return primitiveWritable;
                    case FLOAT:
                        ((FloatWritable) primitiveWritable).set((float) ((DoubleColumnVector) colVector).vector[adjustedIndex]);
                        return primitiveWritable;
                    case DOUBLE:
                        ((DoubleWritable) primitiveWritable).set(((DoubleColumnVector) colVector).vector[adjustedIndex]);
                        return primitiveWritable;
                    case BINARY:
                        {
                            final BytesColumnVector bytesColVector = ((BytesColumnVector) colVector);
                            final byte[] bytes = bytesColVector.vector[adjustedIndex];
                            final int start = bytesColVector.start[adjustedIndex];
                            final int length = bytesColVector.length[adjustedIndex];
                            BytesWritable bytesWritable = (BytesWritable) primitiveWritable;
                            if (bytes == null || length == 0) {
                                if (length > 0) {
                                    nullBytesReadError(primitiveCategory, batchIndex);
                                }
                                bytesWritable.set(EMPTY_BYTES, 0, 0);
                            } else {
                                bytesWritable.set(bytes, start, length);
                            }
                            return primitiveWritable;
                        }
                    case STRING:
                        {
                            final BytesColumnVector bytesColVector = ((BytesColumnVector) colVector);
                            final byte[] bytes = bytesColVector.vector[adjustedIndex];
                            final int start = bytesColVector.start[adjustedIndex];
                            final int length = bytesColVector.length[adjustedIndex];
                            if (bytes == null || length == 0) {
                                if (length > 0) {
                                    nullBytesReadError(primitiveCategory, batchIndex);
                                }
                                ((Text) primitiveWritable).set(EMPTY_BYTES, 0, 0);
                            } else {
                                // Use org.apache.hadoop.io.Text as our helper to go from byte[] to String.
                                ((Text) primitiveWritable).set(bytes, start, length);
                            }
                            return primitiveWritable;
                        }
                    case VARCHAR:
                        {
                            final BytesColumnVector bytesColVector = ((BytesColumnVector) colVector);
                            final byte[] bytes = bytesColVector.vector[adjustedIndex];
                            final int start = bytesColVector.start[adjustedIndex];
                            final int length = bytesColVector.length[adjustedIndex];
                            final HiveVarcharWritable hiveVarcharWritable = (HiveVarcharWritable) primitiveWritable;
                            if (bytes == null || length == 0) {
                                if (length > 0) {
                                    nullBytesReadError(primitiveCategory, batchIndex);
                                }
                                hiveVarcharWritable.set(EMPTY_STRING, -1);
                            } else {
                                final int adjustedLength = StringExpr.truncate(bytes, start, length, ((VarcharTypeInfo) primitiveTypeInfo).getLength());
                                if (adjustedLength == 0) {
                                    hiveVarcharWritable.set(EMPTY_STRING, -1);
                                } else {
                                    hiveVarcharWritable.set(new String(bytes, start, adjustedLength, Charsets.UTF_8), -1);
                                }
                            }
                            return primitiveWritable;
                        }
                    case CHAR:
                        {
                            final BytesColumnVector bytesColVector = ((BytesColumnVector) colVector);
                            final byte[] bytes = bytesColVector.vector[adjustedIndex];
                            final int start = bytesColVector.start[adjustedIndex];
                            final int length = bytesColVector.length[adjustedIndex];
                            final HiveCharWritable hiveCharWritable = (HiveCharWritable) primitiveWritable;
                            final int maxLength = ((CharTypeInfo) primitiveTypeInfo).getLength();
                            if (bytes == null || length == 0) {
                                if (length > 0) {
                                    nullBytesReadError(primitiveCategory, batchIndex);
                                }
                                hiveCharWritable.set(EMPTY_STRING, maxLength);
                            } else {
                                final int adjustedLength = StringExpr.rightTrimAndTruncate(bytes, start, length, ((CharTypeInfo) primitiveTypeInfo).getLength());
                                if (adjustedLength == 0) {
                                    hiveCharWritable.set(EMPTY_STRING, maxLength);
                                } else {
                                    hiveCharWritable.set(new String(bytes, start, adjustedLength, Charsets.UTF_8), maxLength);
                                }
                            }
                            return primitiveWritable;
                        }
                    case DECIMAL:
                        if (colVector instanceof Decimal64ColumnVector) {
                            Decimal64ColumnVector dec32ColVector = (Decimal64ColumnVector) colVector;
                            ((HiveDecimalWritable) primitiveWritable).deserialize64(dec32ColVector.vector[adjustedIndex], dec32ColVector.scale);
                        } else {
                            // The HiveDecimalWritable set method will quickly copy the deserialized decimal writable fields.
                            ((HiveDecimalWritable) primitiveWritable).set(((DecimalColumnVector) colVector).vector[adjustedIndex]);
                        }
                        return primitiveWritable;
                    case INTERVAL_YEAR_MONTH:
                        ((HiveIntervalYearMonthWritable) primitiveWritable).set((int) ((LongColumnVector) colVector).vector[adjustedIndex]);
                        return primitiveWritable;
                    case INTERVAL_DAY_TIME:
                        ((HiveIntervalDayTimeWritable) primitiveWritable).set(((IntervalDayTimeColumnVector) colVector).asScratchIntervalDayTime(adjustedIndex));
                        return primitiveWritable;
                    default:
                        throw new RuntimeException("Primitive category " + primitiveCategory.name() + " not supported");
                }
            }
        case LIST:
            {
                final ListColumnVector listColumnVector = (ListColumnVector) colVector;
                final ListTypeInfo listTypeInfo = (ListTypeInfo) typeInfo;
                final ListObjectInspector listObjectInspector = (ListObjectInspector) objectInspector;
                final int offset = (int) listColumnVector.offsets[adjustedIndex];
                final int size = (int) listColumnVector.lengths[adjustedIndex];
                final List list = new ArrayList();
                for (int i = 0; i < size; i++) {
                    list.add(extractRowColumn(listColumnVector.child, listTypeInfo.getListElementTypeInfo(), listObjectInspector.getListElementObjectInspector(), offset + i));
                }
                return list;
            }
        case MAP:
            {
                final MapColumnVector mapColumnVector = (MapColumnVector) colVector;
                final MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo;
                final MapObjectInspector mapObjectInspector = (MapObjectInspector) objectInspector;
                final int offset = (int) mapColumnVector.offsets[adjustedIndex];
                final int size = (int) mapColumnVector.lengths[adjustedIndex];
                final Map<Object, Object> map = new LinkedHashMap<Object, Object>();
                for (int i = 0; i < size; i++) {
                    final Object key = extractRowColumn(mapColumnVector.keys, mapTypeInfo.getMapKeyTypeInfo(), mapObjectInspector.getMapKeyObjectInspector(), offset + i);
                    final Object value = extractRowColumn(mapColumnVector.values, mapTypeInfo.getMapValueTypeInfo(), mapObjectInspector.getMapValueObjectInspector(), offset + i);
                    map.put(key, value);
                }
                return map;
            }
        case STRUCT:
            {
                final StructColumnVector structColumnVector = (StructColumnVector) colVector;
                final StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo;
                final StandardStructObjectInspector structInspector = (StandardStructObjectInspector) objectInspector;
                final List<TypeInfo> fieldTypeInfos = structTypeInfo.getAllStructFieldTypeInfos();
                final int size = fieldTypeInfos.size();
                final List<? extends StructField> structFields = structInspector.getAllStructFieldRefs();
                final Object struct = structInspector.create();
                for (int i = 0; i < size; i++) {
                    final StructField structField = structFields.get(i);
                    final TypeInfo fieldTypeInfo = fieldTypeInfos.get(i);
                    final Object value = extractRowColumn(structColumnVector.fields[i], fieldTypeInfo, structField.getFieldObjectInspector(), adjustedIndex);
                    structInspector.setStructFieldData(struct, structField, value);
                }
                return struct;
            }
        case UNION:
            {
                final UnionTypeInfo unionTypeInfo = (UnionTypeInfo) typeInfo;
                final List<TypeInfo> objectTypeInfos = unionTypeInfo.getAllUnionObjectTypeInfos();
                final UnionObjectInspector unionInspector = (UnionObjectInspector) objectInspector;
                final List<ObjectInspector> unionInspectors = unionInspector.getObjectInspectors();
                final UnionColumnVector unionColumnVector = (UnionColumnVector) colVector;
                final byte tag = (byte) unionColumnVector.tags[adjustedIndex];
                final Object object = extractRowColumn(unionColumnVector.fields[tag], objectTypeInfos.get(tag), unionInspectors.get(tag), adjustedIndex);
                final StandardUnion standardUnion = new StandardUnion();
                standardUnion.setTag(tag);
                standardUnion.setObject(object);
                return standardUnion;
            }
        default:
            throw new RuntimeException("Category " + category.name() + " not supported");
    }
}
Also used : PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) Category(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category) ArrayList(java.util.ArrayList) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) Writable(org.apache.hadoop.io.Writable) LongWritable(org.apache.hadoop.io.LongWritable) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) HiveIntervalYearMonthWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable) HiveIntervalDayTimeWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) FloatWritable(org.apache.hadoop.io.FloatWritable) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) Timestamp(org.apache.hadoop.hive.common.type.Timestamp) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) ArrayList(java.util.ArrayList) List(java.util.List) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) UnionObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) Text(org.apache.hadoop.io.Text) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) UnionTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) StandardUnion(org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector.StandardUnion) StandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector) LinkedHashMap(java.util.LinkedHashMap) Map(java.util.Map) UnionTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo)

Example 17 with ShortWritable

use of org.apache.hadoop.hive.serde2.io.ShortWritable in project hive by apache.

the class JoinUtil method computeMapJoinValues.

/**
 * Return the value as a standard object. StandardObject can be inspected by a
 * standard ObjectInspector.
 */
public static Object[] computeMapJoinValues(Object row, List<ExprNodeEvaluator> valueFields, List<ObjectInspector> valueFieldsOI, List<ExprNodeEvaluator> filters, List<ObjectInspector> filtersOI, int[] filterMap) throws HiveException {
    // Compute the keys
    Object[] nr;
    if (filterMap != null) {
        nr = new Object[valueFields.size() + 1];
        // add whether the row is filtered or not.
        nr[valueFields.size()] = new ShortWritable(isFiltered(row, filters, filtersOI, filterMap));
    } else {
        nr = new Object[valueFields.size()];
    }
    for (int i = 0; i < valueFields.size(); i++) {
        nr[i] = ObjectInspectorUtils.copyToStandardObject(valueFields.get(i).evaluate(row), valueFieldsOI.get(i), ObjectInspectorCopyOption.WRITABLE);
    }
    return nr;
}
Also used : ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable)

Example 18 with ShortWritable

use of org.apache.hadoop.hive.serde2.io.ShortWritable in project hive by apache.

the class RecordReaderImpl method nextShort.

static ShortWritable nextShort(ColumnVector vector, int row, Object previous) {
    if (vector.isRepeating) {
        row = 0;
    }
    if (vector.noNulls || !vector.isNull[row]) {
        ShortWritable result;
        if (previous == null || previous.getClass() != ShortWritable.class) {
            result = new ShortWritable();
        } else {
            result = (ShortWritable) previous;
        }
        result.set((short) ((LongColumnVector) vector).vector[row]);
        return result;
    } else {
        return null;
    }
}
Also used : ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable)

Example 19 with ShortWritable

use of org.apache.hadoop.hive.serde2.io.ShortWritable in project hive by apache.

the class VectorAssignRow method assignRowColumn.

private void assignRowColumn(ColumnVector columnVector, int batchIndex, TypeInfo targetTypeInfo, Object object) {
    if (object == null) {
        assignNullRowColumn(columnVector, batchIndex, targetTypeInfo);
        return;
    }
    switch(targetTypeInfo.getCategory()) {
        case PRIMITIVE:
            {
                final PrimitiveCategory targetPrimitiveCategory = ((PrimitiveTypeInfo) targetTypeInfo).getPrimitiveCategory();
                switch(targetPrimitiveCategory) {
                    case VOID:
                        VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
                        return;
                    case BOOLEAN:
                        if (object instanceof Boolean) {
                            ((LongColumnVector) columnVector).vector[batchIndex] = (((Boolean) object) ? 1 : 0);
                        } else {
                            ((LongColumnVector) columnVector).vector[batchIndex] = (((BooleanWritable) object).get() ? 1 : 0);
                        }
                        break;
                    case BYTE:
                        if (object instanceof Byte) {
                            ((LongColumnVector) columnVector).vector[batchIndex] = ((Byte) object);
                        } else {
                            ((LongColumnVector) columnVector).vector[batchIndex] = ((ByteWritable) object).get();
                        }
                        break;
                    case SHORT:
                        if (object instanceof Short) {
                            ((LongColumnVector) columnVector).vector[batchIndex] = ((Short) object);
                        } else {
                            ((LongColumnVector) columnVector).vector[batchIndex] = ((ShortWritable) object).get();
                        }
                        break;
                    case INT:
                        if (object instanceof Integer) {
                            ((LongColumnVector) columnVector).vector[batchIndex] = ((Integer) object);
                        } else {
                            ((LongColumnVector) columnVector).vector[batchIndex] = ((IntWritable) object).get();
                        }
                        break;
                    case LONG:
                        if (object instanceof Long) {
                            ((LongColumnVector) columnVector).vector[batchIndex] = ((Long) object);
                        } else {
                            ((LongColumnVector) columnVector).vector[batchIndex] = ((LongWritable) object).get();
                        }
                        break;
                    case TIMESTAMP:
                        if (object instanceof Timestamp) {
                            ((TimestampColumnVector) columnVector).set(batchIndex, ((Timestamp) object).toSqlTimestamp());
                        } else {
                            ((TimestampColumnVector) columnVector).set(batchIndex, ((TimestampWritableV2) object).getTimestamp().toSqlTimestamp());
                        }
                        break;
                    case DATE:
                        if (object instanceof Date) {
                            ((LongColumnVector) columnVector).vector[batchIndex] = DateWritableV2.dateToDays((Date) object);
                        } else {
                            ((LongColumnVector) columnVector).vector[batchIndex] = ((DateWritableV2) object).getDays();
                        }
                        break;
                    case FLOAT:
                        if (object instanceof Float) {
                            ((DoubleColumnVector) columnVector).vector[batchIndex] = ((Float) object);
                        } else {
                            ((DoubleColumnVector) columnVector).vector[batchIndex] = ((FloatWritable) object).get();
                        }
                        break;
                    case DOUBLE:
                        if (object instanceof Double) {
                            ((DoubleColumnVector) columnVector).vector[batchIndex] = ((Double) object);
                        } else {
                            ((DoubleColumnVector) columnVector).vector[batchIndex] = ((DoubleWritable) object).get();
                        }
                        break;
                    case BINARY:
                        {
                            if (object instanceof byte[]) {
                                byte[] bytes = (byte[]) object;
                                ((BytesColumnVector) columnVector).setVal(batchIndex, bytes, 0, bytes.length);
                            } else {
                                BytesWritable bw = (BytesWritable) object;
                                ((BytesColumnVector) columnVector).setVal(batchIndex, bw.getBytes(), 0, bw.getLength());
                            }
                        }
                        break;
                    case STRING:
                        {
                            if (object instanceof String) {
                                String string = (String) object;
                                byte[] bytes = string.getBytes();
                                ((BytesColumnVector) columnVector).setVal(batchIndex, bytes, 0, bytes.length);
                            } else {
                                Text tw = (Text) object;
                                ((BytesColumnVector) columnVector).setVal(batchIndex, tw.getBytes(), 0, tw.getLength());
                            }
                        }
                        break;
                    case VARCHAR:
                        {
                            // UNDONE: Performance problem with conversion to String, then bytes...
                            // We store VARCHAR type stripped of pads.
                            HiveVarchar hiveVarchar;
                            if (object instanceof HiveVarchar) {
                                hiveVarchar = (HiveVarchar) object;
                            } else {
                                hiveVarchar = ((HiveVarcharWritable) object).getHiveVarchar();
                            }
                            // TODO: HIVE-13624 Do we need maxLength checking?
                            byte[] bytes = hiveVarchar.getValue().getBytes();
                            ((BytesColumnVector) columnVector).setVal(batchIndex, bytes, 0, bytes.length);
                        }
                        break;
                    case CHAR:
                        {
                            // UNDONE: Performance problem with conversion to String, then bytes...
                            // We store CHAR type stripped of pads.
                            HiveChar hiveChar;
                            if (object instanceof HiveChar) {
                                hiveChar = (HiveChar) object;
                            } else {
                                hiveChar = ((HiveCharWritable) object).getHiveChar();
                            }
                            // TODO: HIVE-13624 Do we need maxLength checking?
                            // We store CHAR in vector row batch with padding stripped.
                            byte[] bytes = hiveChar.getStrippedValue().getBytes();
                            ((BytesColumnVector) columnVector).setVal(batchIndex, bytes, 0, bytes.length);
                        }
                        break;
                    case DECIMAL:
                        if (columnVector instanceof DecimalColumnVector) {
                            if (object instanceof HiveDecimal) {
                                ((DecimalColumnVector) columnVector).set(batchIndex, (HiveDecimal) object);
                            } else {
                                ((DecimalColumnVector) columnVector).set(batchIndex, (HiveDecimalWritable) object);
                            }
                        } else {
                            if (object instanceof HiveDecimal) {
                                ((Decimal64ColumnVector) columnVector).set(batchIndex, (HiveDecimal) object);
                            } else {
                                ((Decimal64ColumnVector) columnVector).set(batchIndex, (HiveDecimalWritable) object);
                            }
                        }
                        break;
                    case INTERVAL_YEAR_MONTH:
                        if (object instanceof HiveIntervalYearMonth) {
                            ((LongColumnVector) columnVector).vector[batchIndex] = ((HiveIntervalYearMonth) object).getTotalMonths();
                        } else {
                            ((LongColumnVector) columnVector).vector[batchIndex] = ((HiveIntervalYearMonthWritable) object).getHiveIntervalYearMonth().getTotalMonths();
                        }
                        break;
                    case INTERVAL_DAY_TIME:
                        if (object instanceof HiveIntervalDayTime) {
                            ((IntervalDayTimeColumnVector) columnVector).set(batchIndex, (HiveIntervalDayTime) object);
                        } else {
                            ((IntervalDayTimeColumnVector) columnVector).set(batchIndex, ((HiveIntervalDayTimeWritable) object).getHiveIntervalDayTime());
                        }
                        break;
                    default:
                        throw new RuntimeException("Primitive category " + targetPrimitiveCategory.name() + " not supported");
                }
            }
            break;
        case LIST:
            {
                final ListColumnVector listColumnVector = (ListColumnVector) columnVector;
                final ListTypeInfo listTypeInfo = (ListTypeInfo) targetTypeInfo;
                final TypeInfo elementTypeInfo = listTypeInfo.getListElementTypeInfo();
                final List list = (List) object;
                final int size = list.size();
                final int childCount = listColumnVector.childCount;
                listColumnVector.offsets[batchIndex] = childCount;
                listColumnVector.lengths[batchIndex] = size;
                listColumnVector.childCount = childCount + size;
                listColumnVector.child.ensureSize(childCount + size, true);
                for (int i = 0; i < size; i++) {
                    assignRowColumn(listColumnVector.child, childCount + i, elementTypeInfo, list.get(i));
                }
            }
            break;
        case MAP:
            {
                final MapColumnVector mapColumnVector = (MapColumnVector) columnVector;
                final MapTypeInfo mapTypeInfo = (MapTypeInfo) targetTypeInfo;
                final Map<Object, Object> map = (Map<Object, Object>) object;
                final int size = map.size();
                int childCount = mapColumnVector.childCount;
                mapColumnVector.offsets[batchIndex] = childCount;
                mapColumnVector.lengths[batchIndex] = size;
                mapColumnVector.keys.ensureSize(childCount + size, true);
                mapColumnVector.values.ensureSize(childCount + size, true);
                for (Map.Entry<Object, Object> entry : map.entrySet()) {
                    assignRowColumn(mapColumnVector.keys, childCount, mapTypeInfo.getMapKeyTypeInfo(), entry.getKey());
                    assignRowColumn(mapColumnVector.values, childCount, mapTypeInfo.getMapValueTypeInfo(), entry.getValue());
                    childCount++;
                }
                mapColumnVector.childCount = childCount;
            }
            break;
        case STRUCT:
            {
                final StructColumnVector structColumnVector = (StructColumnVector) columnVector;
                final StructTypeInfo targetStructTypeInfo = (StructTypeInfo) targetTypeInfo;
                final List<TypeInfo> targetFieldTypeInfos = targetStructTypeInfo.getAllStructFieldTypeInfos();
                final int size = targetFieldTypeInfos.size();
                if (object instanceof List) {
                    final List struct = (List) object;
                    for (int i = 0; i < size; i++) {
                        assignRowColumn(structColumnVector.fields[i], batchIndex, targetFieldTypeInfos.get(i), struct.get(i));
                    }
                } else {
                    final Object[] array = (Object[]) object;
                    for (int i = 0; i < size; i++) {
                        assignRowColumn(structColumnVector.fields[i], batchIndex, targetFieldTypeInfos.get(i), array[i]);
                    }
                }
            }
            break;
        case UNION:
            {
                final StandardUnion union = (StandardUnion) object;
                final UnionColumnVector unionColumnVector = (UnionColumnVector) columnVector;
                final UnionTypeInfo unionTypeInfo = (UnionTypeInfo) targetTypeInfo;
                final List<TypeInfo> objectTypeInfos = unionTypeInfo.getAllUnionObjectTypeInfos();
                final byte tag = union.getTag();
                unionColumnVector.tags[batchIndex] = tag;
                assignRowColumn(unionColumnVector.fields[tag], batchIndex, objectTypeInfos.get(tag), union.getObject());
            }
            break;
        default:
            throw new RuntimeException("Category " + targetTypeInfo.getCategory().name() + " not supported");
    }
    /*
     * We always set the null flag to false when there is a value.
     */
    columnVector.isNull[batchIndex] = false;
}
Also used : HiveChar(org.apache.hadoop.hive.common.type.HiveChar) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) Timestamp(org.apache.hadoop.hive.common.type.Timestamp) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) List(java.util.List) ArrayList(java.util.ArrayList) LongWritable(org.apache.hadoop.io.LongWritable) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) DateWritableV2(org.apache.hadoop.hive.serde2.io.DateWritableV2) BytesWritable(org.apache.hadoop.io.BytesWritable) Text(org.apache.hadoop.io.Text) HiveIntervalDayTimeWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) UnionTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) Date(org.apache.hadoop.hive.common.type.Date) FloatWritable(org.apache.hadoop.io.FloatWritable) HiveIntervalYearMonth(org.apache.hadoop.hive.common.type.HiveIntervalYearMonth) BooleanWritable(org.apache.hadoop.io.BooleanWritable) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) StandardUnion(org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector.StandardUnion) Map(java.util.Map) UnionTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo)

Example 20 with ShortWritable

use of org.apache.hadoop.hive.serde2.io.ShortWritable in project hive by apache.

the class GenericUDFTrunc method evaluateNumber.

private Object evaluateNumber(DeferredObject[] arguments) throws HiveException, UDFArgumentTypeException {
    if (arguments[0] == null) {
        return null;
    }
    Object input = arguments[0].get();
    if (input == null) {
        return null;
    }
    if (arguments.length == 2 && arguments[1] != null && arguments[1].get() != null && !inputSacleConst) {
        Object scaleObj = null;
        switch(inputScaleOI.getPrimitiveCategory()) {
            case BYTE:
                scaleObj = byteConverter.convert(arguments[1].get());
                scale = ((ByteWritable) scaleObj).get();
                break;
            case SHORT:
                scaleObj = shortConverter.convert(arguments[1].get());
                scale = ((ShortWritable) scaleObj).get();
                break;
            case INT:
                scaleObj = intConverter.convert(arguments[1].get());
                scale = ((IntWritable) scaleObj).get();
                break;
            case LONG:
                scaleObj = longConverter.convert(arguments[1].get());
                long l = ((LongWritable) scaleObj).get();
                if (l < Integer.MIN_VALUE || l > Integer.MAX_VALUE) {
                    throw new UDFArgumentException(getFuncName().toUpperCase() + " scale argument out of allowed range");
                }
                scale = (int) l;
            default:
                break;
        }
    }
    switch(inputType1) {
        case VOID:
            return null;
        case DECIMAL:
            HiveDecimalWritable decimalWritable = (HiveDecimalWritable) inputOI.getPrimitiveWritableObject(input);
            HiveDecimal dec = trunc(decimalWritable.getHiveDecimal(), scale);
            if (dec == null) {
                return null;
            }
            return new HiveDecimalWritable(dec);
        case BYTE:
            ByteWritable byteWritable = (ByteWritable) inputOI.getPrimitiveWritableObject(input);
            if (scale >= 0) {
                return byteWritable;
            } else {
                return new ByteWritable((byte) trunc(byteWritable.get(), scale));
            }
        case SHORT:
            ShortWritable shortWritable = (ShortWritable) inputOI.getPrimitiveWritableObject(input);
            if (scale >= 0) {
                return shortWritable;
            } else {
                return new ShortWritable((short) trunc(shortWritable.get(), scale));
            }
        case INT:
            IntWritable intWritable = (IntWritable) inputOI.getPrimitiveWritableObject(input);
            if (scale >= 0) {
                return intWritable;
            } else {
                return new IntWritable((int) trunc(intWritable.get(), scale));
            }
        case LONG:
            LongWritable longWritable = (LongWritable) inputOI.getPrimitiveWritableObject(input);
            if (scale >= 0) {
                return longWritable;
            } else {
                return new LongWritable(trunc(longWritable.get(), scale));
            }
        case FLOAT:
            float f = ((FloatWritable) inputOI.getPrimitiveWritableObject(input)).get();
            return new FloatWritable((float) trunc(f, scale));
        case DOUBLE:
            return trunc(((DoubleWritable) inputOI.getPrimitiveWritableObject(input)), scale);
        default:
            throw new UDFArgumentTypeException(0, "Only numeric or string group data types are allowed for TRUNC function. Got " + inputType1.name());
    }
}
Also used : HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) FloatWritable(org.apache.hadoop.io.FloatWritable) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable)

Aggregations

ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)92 IntWritable (org.apache.hadoop.io.IntWritable)61 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)53 LongWritable (org.apache.hadoop.io.LongWritable)53 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)48 Test (org.junit.Test)47 Text (org.apache.hadoop.io.Text)42 FloatWritable (org.apache.hadoop.io.FloatWritable)40 BooleanWritable (org.apache.hadoop.io.BooleanWritable)37 BytesWritable (org.apache.hadoop.io.BytesWritable)29 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)28 ArrayList (java.util.ArrayList)25 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)23 HiveCharWritable (org.apache.hadoop.hive.serde2.io.HiveCharWritable)18 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)17 HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)17 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)17 HiveVarcharWritable (org.apache.hadoop.hive.serde2.io.HiveVarcharWritable)17 Writable (org.apache.hadoop.io.Writable)17 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)15