use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfo in project hive by apache.
the class TestVectorExpressionWriters method testSetterLong.
private void testSetterLong(TypeInfo type) throws HiveException {
LongColumnVector lcv = VectorizedRowGroupGenUtil.generateLongColumnVector(true, false, vectorSize, new Random(10));
lcv.isNull[3] = true;
Object[] values = new Object[this.vectorSize];
VectorExpressionWriter vew = getWriter(type);
for (int i = 0; i < vectorSize; i++) {
// setValue() should be able to handle null input
values[i] = null;
values[i] = vew.setValue(values[i], lcv, i);
if (values[i] != null) {
Writable expected = getWritableValue(type, lcv.vector[i]);
Assert.assertEquals(expected, values[i]);
} else {
Assert.assertTrue(lcv.isNull[i]);
}
}
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfo in project hive by apache.
the class TestVectorExpressionWriters method testSetterText.
private void testSetterText(TypeInfo type) throws HiveException {
Text t1 = new Text("alpha");
Text t2 = new Text("beta");
BytesColumnVector bcv = new BytesColumnVector(vectorSize);
bcv.noNulls = false;
bcv.initBuffer();
bcv.setVal(0, t1.getBytes(), 0, t1.getLength());
bcv.isNull[1] = true;
bcv.setVal(2, t2.getBytes(), 0, t2.getLength());
bcv.isNull[3] = true;
bcv.setVal(4, t1.getBytes(), 0, t1.getLength());
Object[] values = new Object[this.vectorSize];
VectorExpressionWriter vew = getWriter(type);
for (int i = 0; i < vectorSize; i++) {
// setValue() should be able to handle null input
values[i] = null;
Writable w = (Writable) vew.setValue(values[i], bcv, i);
if (w != null) {
byte[] val = new byte[bcv.length[i]];
System.arraycopy(bcv.vector[i], bcv.start[i], val, 0, bcv.length[i]);
Writable expected = getWritableValue(type, val);
Assert.assertEquals(expected, w);
} else {
Assert.assertTrue(bcv.isNull[i]);
}
}
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfo in project hive by apache.
the class TestVectorExpressionWriters method testWriterTimestamp.
private void testWriterTimestamp(TypeInfo type) throws HiveException {
Timestamp[] timestampValues = new Timestamp[vectorSize];
TimestampColumnVector tcv = VectorizedRowGroupGenUtil.generateTimestampColumnVector(true, false, vectorSize, new Random(10), timestampValues);
tcv.isNull[3] = true;
VectorExpressionWriter vew = getWriter(type);
for (int i = 0; i < vectorSize; i++) {
Writable w = (Writable) vew.writeValue(tcv, i);
if (w != null) {
Writable expected = getWritableValue(type, timestampValues[i]);
TimestampWritable t1 = (TimestampWritable) expected;
TimestampWritable t2 = (TimestampWritable) w;
Assert.assertTrue(t1.equals(t2));
} else {
Assert.assertTrue(tcv.isNull[i]);
}
}
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfo in project hive by apache.
the class TestVectorExpressionWriters method testSetterDouble.
private void testSetterDouble(TypeInfo type) throws HiveException {
DoubleColumnVector dcv = VectorizedRowGroupGenUtil.generateDoubleColumnVector(true, false, this.vectorSize, new Random(10));
dcv.isNull[2] = true;
Object[] values = new Object[this.vectorSize];
VectorExpressionWriter vew = getWriter(type);
for (int i = 0; i < vectorSize; i++) {
// setValue() should be able to handle null input
values[i] = null;
values[i] = vew.setValue(values[i], dcv, i);
if (values[i] != null) {
Writable expected = getWritableValue(type, dcv.vector[i]);
Assert.assertEquals(expected, values[i]);
} else {
Assert.assertTrue(dcv.isNull[i]);
}
}
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfo in project hive by apache.
the class HiveParquetSchemaTestUtils method testConversion.
public static void testConversion(final String columnNamesStr, final String columnsTypeStr, final String actualSchema) throws Exception {
final List<String> columnNames = createHiveColumnsFrom(columnNamesStr);
final List<TypeInfo> columnTypes = createHiveTypeInfoFrom(columnsTypeStr);
final MessageType messageTypeFound = HiveSchemaConverter.convert(columnNames, columnTypes);
final MessageType expectedMT = MessageTypeParser.parseMessageType(actualSchema);
assertEquals("converting " + columnNamesStr + ": " + columnsTypeStr + " to " + actualSchema, expectedMT, messageTypeFound);
// Required to check the original types manually as PrimitiveType.equals does not care about it
List<Type> expectedFields = expectedMT.getFields();
List<Type> actualFields = messageTypeFound.getFields();
for (int i = 0, n = expectedFields.size(); i < n; ++i) {
OriginalType exp = expectedFields.get(i).getOriginalType();
OriginalType act = actualFields.get(i).getOriginalType();
assertEquals("Original types of the field do not match", exp, act);
}
}
Aggregations