use of org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector in project haivvreo by jghoman.
the class TestAvroDeserializer method canDeserializeEnums.
// Enums are one of two types we fudge for Hive. Enums go in, Strings come out.
@Test
public void canDeserializeEnums() throws SerDeException, IOException {
Schema s = Schema.parse(TestAvroObjectInspectorGenerator.ENUM_SCHEMA);
GenericData.Record record = new GenericData.Record(s);
record.put("baddies", "DALEKS");
assertTrue(GENERIC_DATA.validate(s, record));
AvroGenericRecordWritable garw = Utils.serializeAndDeserializeRecord(record);
AvroObjectInspectorGenerator aoig = new AvroObjectInspectorGenerator(s);
AvroDeserializer de = new AvroDeserializer();
ArrayList<Object> row = (ArrayList<Object>) de.deserialize(aoig.getColumnNames(), aoig.getColumnTypes(), garw, s);
assertEquals(1, row.size());
StandardStructObjectInspector oi = (StandardStructObjectInspector) aoig.getObjectInspector();
List<? extends StructField> fieldRefs = oi.getAllStructFieldRefs();
assertEquals(1, fieldRefs.size());
StructField fieldRef = fieldRefs.get(0);
assertEquals("baddies", fieldRef.getFieldName());
Object theStringObject = oi.getStructFieldData(row, fieldRef);
assertTrue(fieldRef.getFieldObjectInspector() instanceof StringObjectInspector);
StringObjectInspector soi = (StringObjectInspector) fieldRef.getFieldObjectInspector();
String finalValue = soi.getPrimitiveJavaObject(theStringObject);
assertEquals("DALEKS", finalValue);
}
use of org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector in project haivvreo by jghoman.
the class TestAvroDeserializer method canDeserializeVoidType.
@Test
public void canDeserializeVoidType() throws IOException, SerDeException {
String schemaString = "{\n" + " \"type\": \"record\", \n" + " \"name\": \"nullTest\",\n" + " \"fields\" : [\n" + " {\"name\": \"isANull\", \"type\": \"null\"}\n" + " ]\n" + "}";
Schema s = Schema.parse(schemaString);
GenericData.Record record = new GenericData.Record(s);
record.put("isANull", null);
assertTrue(GENERIC_DATA.validate(s, record));
AvroGenericRecordWritable garw = Utils.serializeAndDeserializeRecord(record);
AvroObjectInspectorGenerator aoig = new AvroObjectInspectorGenerator(s);
AvroDeserializer de = new AvroDeserializer();
ArrayList<Object> row = (ArrayList<Object>) de.deserialize(aoig.getColumnNames(), aoig.getColumnTypes(), garw, s);
assertEquals(1, row.size());
Object theVoidObject = row.get(0);
assertNull(theVoidObject);
StandardStructObjectInspector oi = (StandardStructObjectInspector) aoig.getObjectInspector();
StructField fieldRef = oi.getStructFieldRef("isANull");
Object shouldBeNull = oi.getStructFieldData(row, fieldRef);
assertNull(shouldBeNull);
assertTrue(fieldRef.getFieldObjectInspector() instanceof VoidObjectInspector);
}
use of org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector in project haivvreo by jghoman.
the class TestAvroSerde method verifyErrorSchemaReturned.
private void verifyErrorSchemaReturned(Properties props) throws SerDeException {
AvroSerDe asd = new AvroSerDe();
asd.initialize(new Configuration(), props);
assertTrue(asd.getObjectInspector() instanceof StandardStructObjectInspector);
StandardStructObjectInspector oi = (StandardStructObjectInspector) asd.getObjectInspector();
List<? extends StructField> allStructFieldRefs = oi.getAllStructFieldRefs();
assertEquals(SchemaResolutionProblem.SIGNAL_BAD_SCHEMA.getFields().size(), allStructFieldRefs.size());
StructField firstField = allStructFieldRefs.get(0);
assertTrue(firstField.toString().contains("error_error_error_error_error_error_error"));
try {
Writable mock = Mockito.mock(Writable.class);
asd.deserialize(mock);
fail("Should have thrown a BadSchemaException");
} catch (BadSchemaException bse) {
// good
}
try {
Object o = Mockito.mock(Object.class);
ObjectInspector mockOI = Mockito.mock(ObjectInspector.class);
asd.serialize(o, mockOI);
fail("Should have thrown a BadSchemaException");
} catch (BadSchemaException bse) {
// good
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector in project hive by apache.
the class VectorizedBatchUtil method convertToStandardStructObjectInspector.
public static StandardStructObjectInspector convertToStandardStructObjectInspector(StructObjectInspector structObjectInspector) throws HiveException {
List<? extends StructField> fields = structObjectInspector.getAllStructFieldRefs();
List<ObjectInspector> oids = new ArrayList<ObjectInspector>();
ArrayList<String> columnNames = new ArrayList<String>();
for (StructField field : fields) {
TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(field.getFieldObjectInspector().getTypeName());
ObjectInspector standardWritableObjectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(typeInfo);
oids.add(standardWritableObjectInspector);
columnNames.add(field.getFieldName());
}
return ObjectInspectorFactory.getStandardStructObjectInspector(columnNames, oids);
}
use of org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector in project hive by apache.
the class MyTestPrimitiveClass method getRowInspector.
public StructObjectInspector getRowInspector(PrimitiveTypeInfo[] primitiveTypeInfos) {
List<String> columnNames = new ArrayList<String>(primitiveCount);
List<ObjectInspector> primitiveObjectInspectorList = new ArrayList<ObjectInspector>(primitiveCount);
for (int index = 0; index < MyTestPrimitiveClass.primitiveCount; index++) {
columnNames.add(String.format("col%d", index));
PrimitiveTypeInfo primitiveTypeInfo = primitiveTypeInfos[index];
PrimitiveCategory primitiveCategory = primitiveTypeInfo.getPrimitiveCategory();
primitiveObjectInspectorList.add(PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(primitiveCategory));
}
StandardStructObjectInspector rowOI = ObjectInspectorFactory.getStandardStructObjectInspector(columnNames, primitiveObjectInspectorList);
return rowOI;
}
Aggregations