Search in sources :

Example 61 with TypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfo in project hive by apache.

the class TestVectorExpressionWriters method testSetterLong.

private void testSetterLong(TypeInfo type) throws HiveException {
    LongColumnVector lcv = VectorizedRowGroupGenUtil.generateLongColumnVector(true, false, vectorSize, new Random(10));
    lcv.isNull[3] = true;
    Object[] values = new Object[this.vectorSize];
    VectorExpressionWriter vew = getWriter(type);
    for (int i = 0; i < vectorSize; i++) {
        // setValue() should be able to handle null input
        values[i] = null;
        values[i] = vew.setValue(values[i], lcv, i);
        if (values[i] != null) {
            Writable expected = getWritableValue(type, lcv.vector[i]);
            Assert.assertEquals(expected, values[i]);
        } else {
            Assert.assertTrue(lcv.isNull[i]);
        }
    }
}
Also used : Random(java.util.Random) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) Writable(org.apache.hadoop.io.Writable) LongWritable(org.apache.hadoop.io.LongWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) FloatWritable(org.apache.hadoop.io.FloatWritable) LongColumnVector(org.apache.hadoop.hive.ql.exec.vector.LongColumnVector)

Example 62 with TypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfo in project hive by apache.

the class TestVectorExpressionWriters method testSetterText.

private void testSetterText(TypeInfo type) throws HiveException {
    Text t1 = new Text("alpha");
    Text t2 = new Text("beta");
    BytesColumnVector bcv = new BytesColumnVector(vectorSize);
    bcv.noNulls = false;
    bcv.initBuffer();
    bcv.setVal(0, t1.getBytes(), 0, t1.getLength());
    bcv.isNull[1] = true;
    bcv.setVal(2, t2.getBytes(), 0, t2.getLength());
    bcv.isNull[3] = true;
    bcv.setVal(4, t1.getBytes(), 0, t1.getLength());
    Object[] values = new Object[this.vectorSize];
    VectorExpressionWriter vew = getWriter(type);
    for (int i = 0; i < vectorSize; i++) {
        // setValue() should be able to handle null input
        values[i] = null;
        Writable w = (Writable) vew.setValue(values[i], bcv, i);
        if (w != null) {
            byte[] val = new byte[bcv.length[i]];
            System.arraycopy(bcv.vector[i], bcv.start[i], val, 0, bcv.length[i]);
            Writable expected = getWritableValue(type, val);
            Assert.assertEquals(expected, w);
        } else {
            Assert.assertTrue(bcv.isNull[i]);
        }
    }
}
Also used : BytesColumnVector(org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) Writable(org.apache.hadoop.io.Writable) LongWritable(org.apache.hadoop.io.LongWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) FloatWritable(org.apache.hadoop.io.FloatWritable) Text(org.apache.hadoop.io.Text)

Example 63 with TypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfo in project hive by apache.

the class TestVectorExpressionWriters method testWriterTimestamp.

private void testWriterTimestamp(TypeInfo type) throws HiveException {
    Timestamp[] timestampValues = new Timestamp[vectorSize];
    TimestampColumnVector tcv = VectorizedRowGroupGenUtil.generateTimestampColumnVector(true, false, vectorSize, new Random(10), timestampValues);
    tcv.isNull[3] = true;
    VectorExpressionWriter vew = getWriter(type);
    for (int i = 0; i < vectorSize; i++) {
        Writable w = (Writable) vew.writeValue(tcv, i);
        if (w != null) {
            Writable expected = getWritableValue(type, timestampValues[i]);
            TimestampWritable t1 = (TimestampWritable) expected;
            TimestampWritable t2 = (TimestampWritable) w;
            Assert.assertTrue(t1.equals(t2));
        } else {
            Assert.assertTrue(tcv.isNull[i]);
        }
    }
}
Also used : TimestampColumnVector(org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector) Random(java.util.Random) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) Writable(org.apache.hadoop.io.Writable) LongWritable(org.apache.hadoop.io.LongWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) FloatWritable(org.apache.hadoop.io.FloatWritable) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) Timestamp(java.sql.Timestamp)

Example 64 with TypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfo in project hive by apache.

the class TestVectorExpressionWriters method testSetterDouble.

private void testSetterDouble(TypeInfo type) throws HiveException {
    DoubleColumnVector dcv = VectorizedRowGroupGenUtil.generateDoubleColumnVector(true, false, this.vectorSize, new Random(10));
    dcv.isNull[2] = true;
    Object[] values = new Object[this.vectorSize];
    VectorExpressionWriter vew = getWriter(type);
    for (int i = 0; i < vectorSize; i++) {
        // setValue() should be able to handle null input
        values[i] = null;
        values[i] = vew.setValue(values[i], dcv, i);
        if (values[i] != null) {
            Writable expected = getWritableValue(type, dcv.vector[i]);
            Assert.assertEquals(expected, values[i]);
        } else {
            Assert.assertTrue(dcv.isNull[i]);
        }
    }
}
Also used : DoubleColumnVector(org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector) Random(java.util.Random) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) Writable(org.apache.hadoop.io.Writable) LongWritable(org.apache.hadoop.io.LongWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) FloatWritable(org.apache.hadoop.io.FloatWritable)

Example 65 with TypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfo in project hive by apache.

the class HiveParquetSchemaTestUtils method testConversion.

public static void testConversion(final String columnNamesStr, final String columnsTypeStr, final String actualSchema) throws Exception {
    final List<String> columnNames = createHiveColumnsFrom(columnNamesStr);
    final List<TypeInfo> columnTypes = createHiveTypeInfoFrom(columnsTypeStr);
    final MessageType messageTypeFound = HiveSchemaConverter.convert(columnNames, columnTypes);
    final MessageType expectedMT = MessageTypeParser.parseMessageType(actualSchema);
    assertEquals("converting " + columnNamesStr + ": " + columnsTypeStr + " to " + actualSchema, expectedMT, messageTypeFound);
    // Required to check the original types manually as PrimitiveType.equals does not care about it
    List<Type> expectedFields = expectedMT.getFields();
    List<Type> actualFields = messageTypeFound.getFields();
    for (int i = 0, n = expectedFields.size(); i < n; ++i) {
        OriginalType exp = expectedFields.get(i).getOriginalType();
        OriginalType act = actualFields.get(i).getOriginalType();
        assertEquals("Original types of the field do not match", exp, act);
    }
}
Also used : OriginalType(org.apache.parquet.schema.OriginalType) MessageType(org.apache.parquet.schema.MessageType) Type(org.apache.parquet.schema.Type) OriginalType(org.apache.parquet.schema.OriginalType) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) MessageType(org.apache.parquet.schema.MessageType)

Aggregations

TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)292 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)181 StructTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo)144 ArrayList (java.util.ArrayList)124 ListTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo)97 MapTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo)91 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)89 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)77 UnionTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo)61 Test (org.junit.Test)54 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)53 PrimitiveCategory (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)50 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)44 Text (org.apache.hadoop.io.Text)41 CharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo)39 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)36 VarcharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo)35 List (java.util.List)33 HashMap (java.util.HashMap)32 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)32