use of org.apache.hadoop.hive.serde2.SerDeException in project hive by apache.
the class TestLazyBinaryColumnarSerDe method testLazyBinaryColumnarSerDeWithEmptyBinary.
public void testLazyBinaryColumnarSerDeWithEmptyBinary() throws SerDeException {
StructObjectInspector oi = (StructObjectInspector) ObjectInspectorFactory.getReflectionObjectInspector(OuterStruct.class, ObjectInspectorOptions.JAVA);
String cols = ObjectInspectorUtils.getFieldNames(oi);
Properties props = new Properties();
props.setProperty(serdeConstants.LIST_COLUMNS, cols);
props.setProperty(serdeConstants.LIST_COLUMN_TYPES, ObjectInspectorUtils.getFieldTypes(oi));
LazyBinaryColumnarSerDe serde = new LazyBinaryColumnarSerDe();
SerDeUtils.initializeSerDe(serde, new Configuration(), props, null);
OuterStruct outerStruct = new OuterStruct();
outerStruct.mByte = 101;
outerStruct.mShort = 2002;
outerStruct.mInt = 3003;
outerStruct.mLong = 4004l;
outerStruct.mFloat = 5005.01f;
outerStruct.mDouble = 6006.001d;
outerStruct.mString = "";
outerStruct.mBA = new byte[] {};
outerStruct.mArray = new ArrayList<InnerStruct>();
outerStruct.mMap = new TreeMap<String, InnerStruct>();
outerStruct.mStruct = new InnerStruct(180018, 190019l);
try {
serde.serialize(outerStruct, oi);
} catch (RuntimeException re) {
assertEquals(re.getMessage(), "LazyBinaryColumnarSerde cannot serialize a non-null " + "zero length binary field. Consider using either LazyBinarySerde or ColumnarSerde.");
return;
}
assert false;
}
use of org.apache.hadoop.hive.serde2.SerDeException in project hive by apache.
the class TestAvroObjectInspectorGenerator method primitiveTypesWorkCorrectly.
@Test
public void primitiveTypesWorkCorrectly() throws SerDeException {
final String bunchOfPrimitives = "{\n" + " \"namespace\": \"testing\",\n" + " \"name\": \"PrimitiveTypes\",\n" + " \"type\": \"record\",\n" + " \"fields\": [\n" + " {\n" + " \"name\":\"aString\",\n" + " \"type\":\"string\"\n" + " },\n" + " {\n" + " \"name\":\"anInt\",\n" + " \"type\":\"int\"\n" + " },\n" + " {\n" + " \"name\":\"aBoolean\",\n" + " \"type\":\"boolean\"\n" + " },\n" + " {\n" + " \"name\":\"aLong\",\n" + " \"type\":\"long\"\n" + " },\n" + " {\n" + " \"name\":\"aFloat\",\n" + " \"type\":\"float\"\n" + " },\n" + " {\n" + " \"name\":\"aDouble\",\n" + " \"type\":\"double\"\n" + " },\n" + " {\n" + " \"name\":\"aNull\",\n" + " \"type\":\"null\"\n" + " }\n" + " ]\n" + "}";
AvroObjectInspectorGenerator aoig = new AvroObjectInspectorGenerator(AvroSerdeUtils.getSchemaFor(bunchOfPrimitives));
String[] expectedColumnNames = { "aString", "anInt", "aBoolean", "aLong", "aFloat", "aDouble", "aNull" };
verifyColumnNames(expectedColumnNames, aoig.getColumnNames());
TypeInfo[] expectedColumnTypes = { STRING, INT, BOOLEAN, LONG, FLOAT, DOUBLE, VOID };
verifyColumnTypes(expectedColumnTypes, aoig.getColumnTypes());
// Rip apart the object inspector, making sure we got what we expect.
final ObjectInspector oi = aoig.getObjectInspector();
assertTrue(oi instanceof StandardStructObjectInspector);
final StandardStructObjectInspector ssoi = (StandardStructObjectInspector) oi;
List<? extends StructField> structFields = ssoi.getAllStructFieldRefs();
assertEquals(expectedColumnNames.length, structFields.size());
for (int i = 0; i < expectedColumnNames.length; i++) {
assertEquals("Column names don't match", expectedColumnNames[i].toLowerCase(), structFields.get(i).getFieldName());
assertEquals("Column types don't match", expectedColumnTypes[i].getTypeName(), structFields.get(i).getFieldObjectInspector().getTypeName());
}
}
use of org.apache.hadoop.hive.serde2.SerDeException in project hive by apache.
the class TestAvroSerde method initializeDoesNotReuseSchemasFromConf.
@Test
public void initializeDoesNotReuseSchemasFromConf() throws SerDeException {
// Hive will re-use the Configuration object that it passes in to be
// initialized. Therefore we need to make sure we don't look for any
// old schemas within it.
Configuration conf = new Configuration();
conf.set(AvroTableProperties.AVRO_SERDE_SCHEMA.getPropName(), originalSchema.toString(false));
Properties props = new Properties();
props.put(AvroTableProperties.SCHEMA_LITERAL.getPropName(), newSchemaString);
AvroSerDe asd = new AvroSerDe();
SerDeUtils.initializeSerDe(asd, conf, props, null);
// Verify that the schema now within the configuration is the one passed
// in via the properties
assertEquals(newSchema, AvroSerdeUtils.getSchemaFor(conf.get(AvroTableProperties.AVRO_SERDE_SCHEMA.getPropName())));
}
use of org.apache.hadoop.hive.serde2.SerDeException in project hive by apache.
the class TestAvroDeserializer method unionTester.
private ResultPair unionTester(Schema ws, Schema rs, GenericData.Record record) throws SerDeException, IOException {
assertTrue(GENERIC_DATA.validate(ws, record));
AvroGenericRecordWritable garw = Utils.serializeAndDeserializeRecord(record);
AvroObjectInspectorGenerator aoig = new AvroObjectInspectorGenerator(rs);
AvroDeserializer de = new AvroDeserializer();
ArrayList<Object> row = (ArrayList<Object>) de.deserialize(aoig.getColumnNames(), aoig.getColumnTypes(), garw, rs);
assertEquals(1, row.size());
StandardStructObjectInspector oi = (StandardStructObjectInspector) aoig.getObjectInspector();
List<? extends StructField> fieldRefs = oi.getAllStructFieldRefs();
assertEquals(1, fieldRefs.size());
StructField fieldRef = fieldRefs.get(0);
assertEquals("aunion", fieldRef.getFieldName());
Object theUnion = oi.getStructFieldData(row, fieldRef);
assertTrue(fieldRef.getFieldObjectInspector() instanceof UnionObjectInspector);
UnionObjectInspector fieldObjectInspector = (UnionObjectInspector) fieldRef.getFieldObjectInspector();
Object value = fieldObjectInspector.getField(theUnion);
return new ResultPair(fieldObjectInspector, value, theUnion);
}
use of org.apache.hadoop.hive.serde2.SerDeException in project hive by apache.
the class TestAvroDeserializer method canDeserializeBytes.
@Test
public void canDeserializeBytes() throws SerDeException, IOException {
Schema s = AvroSerdeUtils.getSchemaFor(TestAvroObjectInspectorGenerator.BYTES_SCHEMA);
GenericData.Record record = new GenericData.Record(s);
byte[] bytes = "ANANCIENTBLUEBOX".getBytes();
ByteBuffer bb = ByteBuffer.wrap(bytes);
bb.rewind();
record.put("bytesField", bb);
assertTrue(GENERIC_DATA.validate(s, record));
AvroGenericRecordWritable garw = Utils.serializeAndDeserializeRecord(record);
AvroObjectInspectorGenerator aoig = new AvroObjectInspectorGenerator(s);
AvroDeserializer de = new AvroDeserializer();
ArrayList<Object> row = (ArrayList<Object>) de.deserialize(aoig.getColumnNames(), aoig.getColumnTypes(), garw, s);
assertEquals(1, row.size());
Object byteObject = row.get(0);
assertTrue(byteObject instanceof byte[]);
byte[] outBytes = (byte[]) byteObject;
// Verify the raw object that's been created
for (int i = 0; i < bytes.length; i++) {
assertEquals(bytes[i], outBytes[i]);
}
// Now go the correct way, through objectinspectors
StandardStructObjectInspector oi = (StandardStructObjectInspector) aoig.getObjectInspector();
List<Object> fieldsDataAsList = oi.getStructFieldsDataAsList(row);
assertEquals(1, fieldsDataAsList.size());
StructField fieldRef = oi.getStructFieldRef("bytesField");
outBytes = (byte[]) oi.getStructFieldData(row, fieldRef);
for (int i = 0; i < outBytes.length; i++) {
assertEquals(bytes[i], outBytes[i]);
}
}
Aggregations