Search in sources :

Example 31 with DecimalTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo in project hive by apache.

the class TestVectorExpressionWriters method testSetterDecimal.

private void testSetterDecimal(DecimalTypeInfo type) throws HiveException {
    DecimalColumnVector dcv = VectorizedRowGroupGenUtil.generateDecimalColumnVector(type, true, false, this.vectorSize, new Random(10));
    dcv.isNull[2] = true;
    Object[] values = new Object[this.vectorSize];
    VectorExpressionWriter vew = getWriter(type);
    for (int i = 0; i < vectorSize; i++) {
        // setValue() should be able to handle null input
        values[i] = null;
        values[i] = vew.setValue(values[i], dcv, i);
        if (values[i] != null) {
            Writable expected = getWritableValue(type, dcv.vector[i].getHiveDecimal());
            Assert.assertEquals(expected, values[i]);
        } else {
            Assert.assertTrue(dcv.isNull[i]);
        }
    }
}
Also used : DecimalColumnVector(org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector) Random(java.util.Random) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) Writable(org.apache.hadoop.io.Writable) LongWritable(org.apache.hadoop.io.LongWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) FloatWritable(org.apache.hadoop.io.FloatWritable)

Example 32 with DecimalTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo in project hive by apache.

the class TestVectorExpressionWriters method testVectorExpressionWriterDecimal.

@Test
public void testVectorExpressionWriterDecimal() throws HiveException {
    DecimalTypeInfo typeInfo = TypeInfoFactory.getDecimalTypeInfo(38, 18);
    testWriterDecimal(typeInfo);
}
Also used : DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) Test(org.junit.Test)

Example 33 with DecimalTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo in project hive by apache.

the class TestVectorExpressionWriters method testVectorExpressionSetterDecimal.

@Test
public void testVectorExpressionSetterDecimal() throws HiveException {
    DecimalTypeInfo typeInfo = TypeInfoFactory.getDecimalTypeInfo(38, 18);
    testSetterDecimal(typeInfo);
}
Also used : DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) Test(org.junit.Test)

Example 34 with DecimalTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo in project hive by apache.

the class VectorUDAFAvgDecimal method init.

@Override
public void init(AggregationDesc desc) throws HiveException {
    ExprNodeDesc inputExpr = desc.getParameters().get(0);
    DecimalTypeInfo tiInput = (DecimalTypeInfo) inputExpr.getTypeInfo();
    this.inputScale = (short) tiInput.scale();
    this.inputPrecision = (short) tiInput.precision();
    initPartialResultInspector();
}
Also used : DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc)

Example 35 with DecimalTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo in project hive by apache.

the class TypeInfoToSchema method createAvroPrimitive.

private Schema createAvroPrimitive(TypeInfo typeInfo) {
    PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) typeInfo;
    Schema schema;
    switch(primitiveTypeInfo.getPrimitiveCategory()) {
        case STRING:
            schema = Schema.create(Schema.Type.STRING);
            break;
        case CHAR:
            schema = AvroSerdeUtils.getSchemaFor("{" + "\"type\":\"" + AvroSerDe.AVRO_STRING_TYPE_NAME + "\"," + "\"logicalType\":\"" + AvroSerDe.CHAR_TYPE_NAME + "\"," + "\"maxLength\":" + ((CharTypeInfo) typeInfo).getLength() + "}");
            break;
        case VARCHAR:
            schema = AvroSerdeUtils.getSchemaFor("{" + "\"type\":\"" + AvroSerDe.AVRO_STRING_TYPE_NAME + "\"," + "\"logicalType\":\"" + AvroSerDe.VARCHAR_TYPE_NAME + "\"," + "\"maxLength\":" + ((VarcharTypeInfo) typeInfo).getLength() + "}");
            break;
        case BINARY:
            schema = Schema.create(Schema.Type.BYTES);
            break;
        case BYTE:
            schema = Schema.create(Schema.Type.INT);
            break;
        case SHORT:
            schema = Schema.create(Schema.Type.INT);
            break;
        case INT:
            schema = Schema.create(Schema.Type.INT);
            break;
        case LONG:
            schema = Schema.create(Schema.Type.LONG);
            break;
        case FLOAT:
            schema = Schema.create(Schema.Type.FLOAT);
            break;
        case DOUBLE:
            schema = Schema.create(Schema.Type.DOUBLE);
            break;
        case BOOLEAN:
            schema = Schema.create(Schema.Type.BOOLEAN);
            break;
        case DECIMAL:
            DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo;
            String precision = String.valueOf(decimalTypeInfo.precision());
            String scale = String.valueOf(decimalTypeInfo.scale());
            schema = AvroSerdeUtils.getSchemaFor("{" + "\"type\":\"bytes\"," + "\"logicalType\":\"decimal\"," + "\"precision\":" + precision + "," + "\"scale\":" + scale + "}");
            break;
        case DATE:
            schema = AvroSerdeUtils.getSchemaFor("{" + "\"type\":\"" + AvroSerDe.AVRO_INT_TYPE_NAME + "\"," + "\"logicalType\":\"" + AvroSerDe.DATE_TYPE_NAME + "\"}");
            break;
        case TIMESTAMP:
            schema = AvroSerdeUtils.getSchemaFor("{" + "\"type\":\"" + AvroSerDe.AVRO_LONG_TYPE_NAME + "\"," + "\"logicalType\":\"" + AvroSerDe.TIMESTAMP_TYPE_NAME + "\"}");
            break;
        case VOID:
            schema = Schema.create(Schema.Type.NULL);
            break;
        default:
            throw new UnsupportedOperationException(typeInfo + " is not supported.");
    }
    return schema;
}
Also used : DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) Schema(org.apache.avro.Schema) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)

Aggregations

DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)53 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)23 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)21 HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)16 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)16 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)15 CharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo)15 VarcharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo)15 IntWritable (org.apache.hadoop.io.IntWritable)15 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)14 Date (java.sql.Date)13 Timestamp (java.sql.Timestamp)13 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)13 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)12 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)12 BooleanWritable (org.apache.hadoop.io.BooleanWritable)12 BytesWritable (org.apache.hadoop.io.BytesWritable)12 FloatWritable (org.apache.hadoop.io.FloatWritable)12 LongWritable (org.apache.hadoop.io.LongWritable)12 ArrayList (java.util.ArrayList)11