Search in sources :

Example 96 with HiveDecimalWritable

use of org.apache.hadoop.hive.serde2.io.HiveDecimalWritable in project hive by apache.

the class VectorColumnAssignFactory method buildObjectAssign.

public static VectorColumnAssign buildObjectAssign(VectorizedRowBatch outputBatch, int outColIndex, PrimitiveCategory category) throws HiveException {
    VectorColumnAssign outVCA = null;
    ColumnVector destCol = outputBatch.cols[outColIndex];
    if (destCol == null) {
        switch(category) {
            case VOID:
                outVCA = new VectorLongColumnAssign() {

                    // This is a dummy assigner
                    @Override
                    public void assignObjectValue(Object val, int destIndex) throws HiveException {
                        // This is no-op, there is no column to assign to and val is expected to be null
                        assert (val == null);
                    }
                };
                break;
            default:
                throw new HiveException("Incompatible (null) vector column and primitive category " + category);
        }
    } else if (destCol instanceof LongColumnVector) {
        switch(category) {
            case BOOLEAN:
                outVCA = new VectorLongColumnAssign() {

                    @Override
                    public void assignObjectValue(Object val, int destIndex) throws HiveException {
                        if (val == null) {
                            assignNull(destIndex);
                        } else {
                            BooleanWritable bw = (BooleanWritable) val;
                            assignLong(bw.get() ? 1 : 0, destIndex);
                        }
                    }
                }.init(outputBatch, (LongColumnVector) destCol);
                break;
            case BYTE:
                outVCA = new VectorLongColumnAssign() {

                    @Override
                    public void assignObjectValue(Object val, int destIndex) throws HiveException {
                        if (val == null) {
                            assignNull(destIndex);
                        } else {
                            ByteWritable bw = (ByteWritable) val;
                            assignLong(bw.get(), destIndex);
                        }
                    }
                }.init(outputBatch, (LongColumnVector) destCol);
                break;
            case SHORT:
                outVCA = new VectorLongColumnAssign() {

                    @Override
                    public void assignObjectValue(Object val, int destIndex) throws HiveException {
                        if (val == null) {
                            assignNull(destIndex);
                        } else {
                            ShortWritable bw = (ShortWritable) val;
                            assignLong(bw.get(), destIndex);
                        }
                    }
                }.init(outputBatch, (LongColumnVector) destCol);
                break;
            case INT:
                outVCA = new VectorLongColumnAssign() {

                    @Override
                    public void assignObjectValue(Object val, int destIndex) throws HiveException {
                        if (val == null) {
                            assignNull(destIndex);
                        } else {
                            IntWritable bw = (IntWritable) val;
                            assignLong(bw.get(), destIndex);
                        }
                    }
                }.init(outputBatch, (LongColumnVector) destCol);
                break;
            case LONG:
                outVCA = new VectorLongColumnAssign() {

                    @Override
                    public void assignObjectValue(Object val, int destIndex) throws HiveException {
                        if (val == null) {
                            assignNull(destIndex);
                        } else {
                            LongWritable bw = (LongWritable) val;
                            assignLong(bw.get(), destIndex);
                        }
                    }
                }.init(outputBatch, (LongColumnVector) destCol);
                break;
            case TIMESTAMP:
                outVCA = new VectorTimestampColumnAssign() {

                    @Override
                    public void assignObjectValue(Object val, int destIndex) throws HiveException {
                        if (val == null) {
                            assignNull(destIndex);
                        } else {
                            assignTimestamp((TimestampWritable) val, destIndex);
                        }
                    }
                }.init(outputBatch, (TimestampColumnVector) destCol);
                break;
            case DATE:
                outVCA = new VectorLongColumnAssign() {

                    @Override
                    public void assignObjectValue(Object val, int destIndex) throws HiveException {
                        if (val == null) {
                            assignNull(destIndex);
                        } else {
                            DateWritable bw = (DateWritable) val;
                            assignLong(bw.getDays(), destIndex);
                        }
                    }
                }.init(outputBatch, (LongColumnVector) destCol);
                break;
            case INTERVAL_YEAR_MONTH:
                outVCA = new VectorLongColumnAssign() {

                    @Override
                    public void assignObjectValue(Object val, int destIndex) throws HiveException {
                        if (val == null) {
                            assignNull(destIndex);
                        } else {
                            HiveIntervalYearMonthWritable bw = (HiveIntervalYearMonthWritable) val;
                            assignLong(bw.getHiveIntervalYearMonth().getTotalMonths(), destIndex);
                        }
                    }
                }.init(outputBatch, (LongColumnVector) destCol);
                break;
            case INTERVAL_DAY_TIME:
                outVCA = new VectorIntervalDayTimeColumnAssign() {

                    @Override
                    public void assignObjectValue(Object val, int destIndex) throws HiveException {
                        if (val == null) {
                            assignNull(destIndex);
                        } else {
                            HiveIntervalDayTimeWritable bw = (HiveIntervalDayTimeWritable) val;
                            assignIntervalDayTime(bw.getHiveIntervalDayTime(), destIndex);
                        }
                    }
                }.init(outputBatch, (IntervalDayTimeColumnVector) destCol);
                break;
            default:
                throw new HiveException("Incompatible Long vector column and primitive category " + category);
        }
    } else if (destCol instanceof DoubleColumnVector) {
        switch(category) {
            case DOUBLE:
                outVCA = new VectorDoubleColumnAssign() {

                    @Override
                    public void assignObjectValue(Object val, int destIndex) throws HiveException {
                        if (val == null) {
                            assignNull(destIndex);
                        } else {
                            DoubleWritable bw = (DoubleWritable) val;
                            assignDouble(bw.get(), destIndex);
                        }
                    }
                }.init(outputBatch, (DoubleColumnVector) destCol);
                break;
            case FLOAT:
                outVCA = new VectorDoubleColumnAssign() {

                    @Override
                    public void assignObjectValue(Object val, int destIndex) throws HiveException {
                        if (val == null) {
                            assignNull(destIndex);
                        } else {
                            FloatWritable bw = (FloatWritable) val;
                            assignDouble(bw.get(), destIndex);
                        }
                    }
                }.init(outputBatch, (DoubleColumnVector) destCol);
                break;
            default:
                throw new HiveException("Incompatible Double vector column and primitive category " + category);
        }
    } else if (destCol instanceof BytesColumnVector) {
        switch(category) {
            case BINARY:
                outVCA = new VectorBytesColumnAssign() {

                    @Override
                    public void assignObjectValue(Object val, int destIndex) throws HiveException {
                        if (val == null) {
                            assignNull(destIndex);
                        } else {
                            BytesWritable bw = (BytesWritable) val;
                            byte[] bytes = bw.getBytes();
                            assignBytes(bytes, 0, bw.getLength(), destIndex);
                        }
                    }
                }.init(outputBatch, (BytesColumnVector) destCol);
                break;
            case STRING:
                outVCA = new VectorBytesColumnAssign() {

                    @Override
                    public void assignObjectValue(Object val, int destIndex) throws HiveException {
                        if (val == null) {
                            assignNull(destIndex);
                        } else {
                            Text bw = (Text) val;
                            byte[] bytes = bw.getBytes();
                            assignBytes(bytes, 0, bw.getLength(), destIndex);
                        }
                    }
                }.init(outputBatch, (BytesColumnVector) destCol);
                break;
            case VARCHAR:
                outVCA = new VectorBytesColumnAssign() {

                    @Override
                    public void assignObjectValue(Object val, int destIndex) throws HiveException {
                        if (val == null) {
                            assignNull(destIndex);
                        } else {
                            // We store VARCHAR type stripped of pads.
                            HiveVarchar hiveVarchar;
                            if (val instanceof HiveVarchar) {
                                hiveVarchar = (HiveVarchar) val;
                            } else {
                                hiveVarchar = ((HiveVarcharWritable) val).getHiveVarchar();
                            }
                            byte[] bytes = hiveVarchar.getValue().getBytes();
                            assignBytes(bytes, 0, bytes.length, destIndex);
                        }
                    }
                }.init(outputBatch, (BytesColumnVector) destCol);
                break;
            case CHAR:
                outVCA = new VectorBytesColumnAssign() {

                    @Override
                    public void assignObjectValue(Object val, int destIndex) throws HiveException {
                        if (val == null) {
                            assignNull(destIndex);
                        } else {
                            // We store CHAR type stripped of pads.
                            HiveChar hiveChar;
                            if (val instanceof HiveChar) {
                                hiveChar = (HiveChar) val;
                            } else {
                                hiveChar = ((HiveCharWritable) val).getHiveChar();
                            }
                            byte[] bytes = hiveChar.getStrippedValue().getBytes();
                            assignBytes(bytes, 0, bytes.length, destIndex);
                        }
                    }
                }.init(outputBatch, (BytesColumnVector) destCol);
                break;
            default:
                throw new HiveException("Incompatible Bytes vector column and primitive category " + category);
        }
    } else if (destCol instanceof DecimalColumnVector) {
        switch(category) {
            case DECIMAL:
                outVCA = new VectorDecimalColumnAssign() {

                    @Override
                    public void assignObjectValue(Object val, int destIndex) throws HiveException {
                        if (val == null) {
                            assignNull(destIndex);
                        } else {
                            if (val instanceof HiveDecimal) {
                                assignDecimal((HiveDecimal) val, destIndex);
                            } else {
                                assignDecimal((HiveDecimalWritable) val, destIndex);
                            }
                        }
                    }
                }.init(outputBatch, (DecimalColumnVector) destCol);
                break;
            default:
                throw new HiveException("Incompatible Decimal vector column and primitive category " + category);
        }
    } else {
        throw new HiveException("Unknown vector column type " + destCol.getClass().getName());
    }
    return outVCA;
}
Also used : HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) HiveIntervalDayTimeWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) Text(org.apache.hadoop.io.Text) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) HiveIntervalYearMonthWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable) FloatWritable(org.apache.hadoop.io.FloatWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable)

Example 97 with HiveDecimalWritable

use of org.apache.hadoop.hive.serde2.io.HiveDecimalWritable in project hive by apache.

the class VectorDeserializeRow method convertRowColumn.

/**
   * Convert one row column value that is the current value in deserializeRead.
   *
   * We deserialize into a writable and then pass that writable to an instance of VectorAssignRow
   * to convert the writable to the target data type and assign it into the VectorizedRowBatch.
   *
   * @param batch
   * @param batchIndex
   * @param logicalColumnIndex
   * @throws IOException
   */
private void convertRowColumn(VectorizedRowBatch batch, int batchIndex, int logicalColumnIndex) throws IOException {
    final int projectionColumnNum = projectionColumnNums[logicalColumnIndex];
    Writable convertSourceWritable = convertSourceWritables[logicalColumnIndex];
    switch(sourceCategories[logicalColumnIndex]) {
        case PRIMITIVE:
            {
                switch(sourcePrimitiveCategories[logicalColumnIndex]) {
                    case VOID:
                        convertSourceWritable = null;
                        break;
                    case BOOLEAN:
                        ((BooleanWritable) convertSourceWritable).set(deserializeRead.currentBoolean);
                        break;
                    case BYTE:
                        ((ByteWritable) convertSourceWritable).set(deserializeRead.currentByte);
                        break;
                    case SHORT:
                        ((ShortWritable) convertSourceWritable).set(deserializeRead.currentShort);
                        break;
                    case INT:
                        ((IntWritable) convertSourceWritable).set(deserializeRead.currentInt);
                        break;
                    case LONG:
                        ((LongWritable) convertSourceWritable).set(deserializeRead.currentLong);
                        break;
                    case TIMESTAMP:
                        ((TimestampWritable) convertSourceWritable).set(deserializeRead.currentTimestampWritable);
                        break;
                    case DATE:
                        ((DateWritable) convertSourceWritable).set(deserializeRead.currentDateWritable);
                        break;
                    case FLOAT:
                        ((FloatWritable) convertSourceWritable).set(deserializeRead.currentFloat);
                        break;
                    case DOUBLE:
                        ((DoubleWritable) convertSourceWritable).set(deserializeRead.currentDouble);
                        break;
                    case BINARY:
                        if (deserializeRead.currentBytes == null) {
                            LOG.info("null binary entry: batchIndex " + batchIndex + " projection column num " + projectionColumnNum);
                        }
                        ((BytesWritable) convertSourceWritable).set(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesLength);
                        break;
                    case STRING:
                        if (deserializeRead.currentBytes == null) {
                            throw new RuntimeException("null string entry: batchIndex " + batchIndex + " projection column num " + projectionColumnNum);
                        }
                        // Use org.apache.hadoop.io.Text as our helper to go from byte[] to String.
                        ((Text) convertSourceWritable).set(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesLength);
                        break;
                    case VARCHAR:
                        {
                            // that does not use Java String objects.
                            if (deserializeRead.currentBytes == null) {
                                throw new RuntimeException("null varchar entry: batchIndex " + batchIndex + " projection column num " + projectionColumnNum);
                            }
                            int adjustedLength = StringExpr.truncate(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesLength, maxLengths[logicalColumnIndex]);
                            ((HiveVarcharWritable) convertSourceWritable).set(new String(deserializeRead.currentBytes, deserializeRead.currentBytesStart, adjustedLength, Charsets.UTF_8), -1);
                        }
                        break;
                    case CHAR:
                        {
                            // that does not use Java String objects.
                            if (deserializeRead.currentBytes == null) {
                                throw new RuntimeException("null char entry: batchIndex " + batchIndex + " projection column num " + projectionColumnNum);
                            }
                            int adjustedLength = StringExpr.rightTrimAndTruncate(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesLength, maxLengths[logicalColumnIndex]);
                            ((HiveCharWritable) convertSourceWritable).set(new String(deserializeRead.currentBytes, deserializeRead.currentBytesStart, adjustedLength, Charsets.UTF_8), -1);
                        }
                        break;
                    case DECIMAL:
                        ((HiveDecimalWritable) convertSourceWritable).set(deserializeRead.currentHiveDecimalWritable);
                        break;
                    case INTERVAL_YEAR_MONTH:
                        ((HiveIntervalYearMonthWritable) convertSourceWritable).set(deserializeRead.currentHiveIntervalYearMonthWritable);
                        break;
                    case INTERVAL_DAY_TIME:
                        ((HiveIntervalDayTimeWritable) convertSourceWritable).set(deserializeRead.currentHiveIntervalDayTimeWritable);
                        break;
                    default:
                        throw new RuntimeException("Primitive category " + sourcePrimitiveCategories[logicalColumnIndex] + " not supported");
                }
            }
            break;
        default:
            throw new RuntimeException("Category " + sourceCategories[logicalColumnIndex] + " not supported");
    }
    /*
     * Convert our source object we just read into the target object and store that in the
     * VectorizedRowBatch.
     */
    convertVectorAssignRow.assignConvertRowColumn(batch, batchIndex, logicalColumnIndex, convertSourceWritable);
}
Also used : ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) Writable(org.apache.hadoop.io.Writable) LongWritable(org.apache.hadoop.io.LongWritable) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) HiveIntervalYearMonthWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable) HiveIntervalDayTimeWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) FloatWritable(org.apache.hadoop.io.FloatWritable) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable)

Example 98 with HiveDecimalWritable

use of org.apache.hadoop.hive.serde2.io.HiveDecimalWritable in project presto by prestodb.

the class ParquetShortDecimalColumnReader method readValue.

@Override
protected void readValue(BlockBuilder blockBuilder, Type type) {
    if (definitionLevel == columnDescriptor.getMaxDefinitionLevel()) {
        long decimalValue;
        if (columnDescriptor.getType().equals(INT32)) {
            HiveDecimalWritable hiveDecimalWritable = new HiveDecimalWritable(HiveDecimal.create(valuesReader.readInteger()));
            decimalValue = getShortDecimalValue(hiveDecimalWritable, ((DecimalType) type).getScale());
        } else if (columnDescriptor.getType().equals(INT64)) {
            HiveDecimalWritable hiveDecimalWritable = new HiveDecimalWritable(HiveDecimal.create(valuesReader.readLong()));
            decimalValue = getShortDecimalValue(hiveDecimalWritable, ((DecimalType) type).getScale());
        } else {
            decimalValue = getShortDecimalValue(valuesReader.readBytes().getBytes());
        }
        type.writeLong(blockBuilder, decimalValue);
    } else {
        blockBuilder.appendNull();
    }
}
Also used : HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) DecimalType(com.facebook.presto.spi.type.DecimalType)

Example 99 with HiveDecimalWritable

use of org.apache.hadoop.hive.serde2.io.HiveDecimalWritable in project hive by apache.

the class VectorPTFEvaluatorDecimalMin method evaluateGroupBatch.

public void evaluateGroupBatch(VectorizedRowBatch batch, boolean isLastGroupBatch) {
    evaluateInputExpr(batch);
    // Determine minimum of all non-null decimal column values; maintain isGroupResultNull.
    // We do not filter when PTF is in reducer.
    Preconditions.checkState(!batch.selectedInUse);
    final int size = batch.size;
    if (size == 0) {
        return;
    }
    DecimalColumnVector decimalColVector = ((DecimalColumnVector) batch.cols[inputColumnNum]);
    if (decimalColVector.isRepeating) {
        if (decimalColVector.noNulls || !decimalColVector.isNull[0]) {
            if (isGroupResultNull) {
                min.set(decimalColVector.vector[0]);
                isGroupResultNull = false;
            } else {
                HiveDecimalWritable repeatedMin = decimalColVector.vector[0];
                if (repeatedMin.compareTo(min) == -1) {
                    min.set(repeatedMin);
                }
            }
        }
    } else if (decimalColVector.noNulls) {
        HiveDecimalWritable[] vector = decimalColVector.vector;
        if (isGroupResultNull) {
            min.set(vector[0]);
            isGroupResultNull = false;
        } else {
            final HiveDecimalWritable dec = vector[0];
            if (dec.compareTo(min) == -1) {
                min.set(dec);
            }
        }
        for (int i = 1; i < size; i++) {
            final HiveDecimalWritable dec = vector[i];
            if (dec.compareTo(min) == -1) {
                min.set(dec);
            }
        }
    } else {
        boolean[] batchIsNull = decimalColVector.isNull;
        int i = 0;
        while (batchIsNull[i]) {
            if (++i >= size) {
                return;
            }
        }
        HiveDecimalWritable[] vector = decimalColVector.vector;
        if (isGroupResultNull) {
            min.set(vector[i++]);
            isGroupResultNull = false;
        } else {
            final HiveDecimalWritable dec = vector[i++];
            if (dec.compareTo(min) == -1) {
                min.set(dec);
            }
        }
        for (; i < size; i++) {
            if (!batchIsNull[i]) {
                final HiveDecimalWritable dec = vector[i];
                if (dec.compareTo(min) == -1) {
                    min.set(dec);
                }
            }
        }
    }
}
Also used : DecimalColumnVector(org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)

Example 100 with HiveDecimalWritable

use of org.apache.hadoop.hive.serde2.io.HiveDecimalWritable in project hive by apache.

the class DruidSerDe method deserialize.

@Override
public Object deserialize(Writable writable) throws SerDeException {
    final DruidWritable input = (DruidWritable) writable;
    final List<Object> output = Lists.newArrayListWithExpectedSize(columns.length);
    for (int i = 0; i < columns.length; i++) {
        final Object value = input.getValue().get(columns[i]);
        if (value == null) {
            output.add(null);
            continue;
        }
        switch(types[i].getPrimitiveCategory()) {
            case TIMESTAMP:
                output.add(new TimestampWritable(Timestamp.valueOf(ZonedDateTime.ofInstant(Instant.ofEpochMilli(((Number) value).longValue()), tsTZTypeInfo.timeZone()).format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")).toString())));
                break;
            case TIMESTAMPLOCALTZ:
                output.add(new TimestampLocalTZWritable(new TimestampTZ(ZonedDateTime.ofInstant(Instant.ofEpochMilli(((Number) value).longValue()), ((TimestampLocalTZTypeInfo) types[i]).timeZone()))));
                break;
            case BYTE:
                output.add(new ByteWritable(((Number) value).byteValue()));
                break;
            case SHORT:
                output.add(new ShortWritable(((Number) value).shortValue()));
                break;
            case INT:
                output.add(new IntWritable(((Number) value).intValue()));
                break;
            case LONG:
                output.add(new LongWritable(((Number) value).longValue()));
                break;
            case FLOAT:
                output.add(new FloatWritable(((Number) value).floatValue()));
                break;
            case DOUBLE:
                output.add(new DoubleWritable(((Number) value).doubleValue()));
                break;
            case DECIMAL:
                output.add(new HiveDecimalWritable(HiveDecimal.create(((Number) value).doubleValue())));
                break;
            case CHAR:
                output.add(new HiveCharWritable(new HiveChar(value.toString(), ((CharTypeInfo) types[i]).getLength())));
                break;
            case VARCHAR:
                output.add(new HiveVarcharWritable(new HiveVarchar(value.toString(), ((VarcharTypeInfo) types[i]).getLength())));
                break;
            case STRING:
                output.add(new Text(value.toString()));
                break;
            case BOOLEAN:
                output.add(new BooleanWritable(Boolean.valueOf(value.toString())));
                break;
            default:
                throw new SerDeException("Unknown type: " + types[i].getPrimitiveCategory());
        }
    }
    return output;
}
Also used : HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) TimestampLocalTZTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TimestampLocalTZTypeInfo) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) Text(org.apache.hadoop.io.Text) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) TimestampTZ(org.apache.hadoop.hive.common.type.TimestampTZ) FloatWritable(org.apache.hadoop.io.FloatWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) TimestampLocalTZWritable(org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) SerDeException(org.apache.hadoop.hive.serde2.SerDeException)

Aggregations

HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)185 Test (org.junit.Test)42 LongWritable (org.apache.hadoop.io.LongWritable)39 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)36 HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)35 IntWritable (org.apache.hadoop.io.IntWritable)35 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)34 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)31 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)30 Text (org.apache.hadoop.io.Text)30 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)28 BytesWritable (org.apache.hadoop.io.BytesWritable)28 FloatWritable (org.apache.hadoop.io.FloatWritable)28 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)27 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)27 BooleanWritable (org.apache.hadoop.io.BooleanWritable)27 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)26 DeferredJavaObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject)26 DeferredObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject)26 DecimalColumnVector (org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector)25