use of org.apache.hadoop.hive.serde2.objectinspector.StructField in project hive by apache.
the class TestProtocolBuffersObjectInspectors method testProtocolBuffersObjectInspectors.
public void testProtocolBuffersObjectInspectors() throws Throwable {
try {
ObjectInspector oi1 = ObjectInspectorFactory.getReflectionObjectInspector(Complex.class, ObjectInspectorFactory.ObjectInspectorOptions.PROTOCOL_BUFFERS);
ObjectInspector oi2 = ObjectInspectorFactory.getReflectionObjectInspector(Complex.class, ObjectInspectorFactory.ObjectInspectorOptions.PROTOCOL_BUFFERS);
assertEquals(oi1, oi2);
// metadata
assertEquals(Category.STRUCT, oi1.getCategory());
StructObjectInspector soi = (StructObjectInspector) oi1;
List<? extends StructField> fields = soi.getAllStructFieldRefs();
assertEquals(5, fields.size());
assertEquals(fields.get(0), soi.getStructFieldRef("aint_"));
// null
for (int i = 0; i < fields.size(); i++) {
assertNull(soi.getStructFieldData(null, fields.get(i)));
}
// real object
List<Integer> c2 = Arrays.asList(new Integer[] { 1, 2, 3 });
List<String> c3 = Arrays.asList(new String[] { "one", "two" });
List<IntString> c4 = new ArrayList<IntString>();
Complex c = new Complex(1, "test", c2, c3, c4);
assertEquals(1, soi.getStructFieldData(c, fields.get(0)));
assertEquals("test", soi.getStructFieldData(c, fields.get(1)));
assertEquals(c2, soi.getStructFieldData(c, fields.get(2)));
assertEquals(c3, soi.getStructFieldData(c, fields.get(3)));
assertEquals(c4, soi.getStructFieldData(c, fields.get(4)));
ArrayList<Object> cfields = new ArrayList<Object>();
for (int i = 0; i < 5; i++) {
cfields.add(soi.getStructFieldData(c, fields.get(i)));
}
assertEquals(cfields, soi.getStructFieldsDataAsList(c));
// sub fields
assertEquals(PrimitiveObjectInspectorFactory.javaIntObjectInspector, fields.get(0).getFieldObjectInspector());
assertEquals(PrimitiveObjectInspectorFactory.javaStringObjectInspector, fields.get(1).getFieldObjectInspector());
assertEquals(ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaIntObjectInspector), fields.get(2).getFieldObjectInspector());
assertEquals(ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaStringObjectInspector), fields.get(3).getFieldObjectInspector());
assertEquals(ObjectInspectorFactory.getStandardListObjectInspector(ObjectInspectorFactory.getReflectionObjectInspector(IntString.class, ObjectInspectorFactory.ObjectInspectorOptions.PROTOCOL_BUFFERS)), fields.get(4).getFieldObjectInspector());
} catch (Throwable e) {
e.printStackTrace();
throw e;
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.StructField in project hive by apache.
the class TestThriftObjectInspectors method testThriftObjectInspectors.
public void testThriftObjectInspectors() throws Throwable {
try {
ObjectInspector oi1 = ObjectInspectorFactory.getReflectionObjectInspector(Complex.class, ObjectInspectorFactory.ObjectInspectorOptions.THRIFT);
ObjectInspector oi2 = ObjectInspectorFactory.getReflectionObjectInspector(Complex.class, ObjectInspectorFactory.ObjectInspectorOptions.THRIFT);
assertEquals(oi1, oi2);
// metadata
assertEquals(Category.STRUCT, oi1.getCategory());
StructObjectInspector soi = (StructObjectInspector) oi1;
List<? extends StructField> fields = soi.getAllStructFieldRefs();
assertEquals(10, fields.size());
assertEquals(fields.get(0), soi.getStructFieldRef("aint"));
// null
for (int i = 0; i < fields.size(); i++) {
assertNull(soi.getStructFieldData(null, fields.get(i)));
}
ObjectInspector oi = ObjectInspectorFactory.getReflectionObjectInspector(PropValueUnion.class, ObjectInspectorFactory.ObjectInspectorOptions.THRIFT);
assertNotNull(oi.toString());
// real object
Complex c = new Complex();
c.setAint(1);
c.setAString("test");
List<Integer> c2 = Arrays.asList(new Integer[] { 1, 2, 3 });
c.setLint(c2);
List<String> c3 = Arrays.asList(new String[] { "one", "two" });
c.setLString(c3);
List<IntString> c4 = new ArrayList<IntString>();
c.setLintString(c4);
c.setMStringString(null);
c.setAttributes(null);
c.setUnionField1(null);
c.setUnionField2(null);
c.setUnionField3(null);
assertEquals(1, soi.getStructFieldData(c, fields.get(0)));
assertEquals("test", soi.getStructFieldData(c, fields.get(1)));
assertEquals(c2, soi.getStructFieldData(c, fields.get(2)));
assertEquals(c3, soi.getStructFieldData(c, fields.get(3)));
assertEquals(c4, soi.getStructFieldData(c, fields.get(4)));
assertNull(soi.getStructFieldData(c, fields.get(5)));
assertNull(soi.getStructFieldData(c, fields.get(6)));
assertNull(soi.getStructFieldData(c, fields.get(7)));
assertNull(soi.getStructFieldData(c, fields.get(8)));
assertNull(soi.getStructFieldData(c, fields.get(9)));
ArrayList<Object> cfields = new ArrayList<Object>();
for (int i = 0; i < 10; i++) {
cfields.add(soi.getStructFieldData(c, fields.get(i)));
}
assertEquals(cfields, soi.getStructFieldsDataAsList(c));
// sub fields
assertEquals(PrimitiveObjectInspectorFactory.javaIntObjectInspector, fields.get(0).getFieldObjectInspector());
assertEquals(PrimitiveObjectInspectorFactory.javaStringObjectInspector, fields.get(1).getFieldObjectInspector());
assertEquals(ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaIntObjectInspector), fields.get(2).getFieldObjectInspector());
assertEquals(ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaStringObjectInspector), fields.get(3).getFieldObjectInspector());
assertEquals(ObjectInspectorFactory.getStandardListObjectInspector(ObjectInspectorFactory.getReflectionObjectInspector(IntString.class, ObjectInspectorFactory.ObjectInspectorOptions.THRIFT)), fields.get(4).getFieldObjectInspector());
assertEquals(ObjectInspectorFactory.getStandardMapObjectInspector(PrimitiveObjectInspectorFactory.javaStringObjectInspector, PrimitiveObjectInspectorFactory.javaStringObjectInspector), fields.get(5).getFieldObjectInspector());
} catch (Throwable e) {
e.printStackTrace();
throw e;
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.StructField in project haivvreo by jghoman.
the class TestAvroObjectInspectorGenerator method primitiveTypesWorkCorrectly.
@Test
public void primitiveTypesWorkCorrectly() throws SerDeException {
final String bunchOfPrimitives = "{\n" + " \"namespace\": \"testing\",\n" + " \"name\": \"PrimitiveTypes\",\n" + " \"type\": \"record\",\n" + " \"fields\": [\n" + " {\n" + " \"name\":\"aString\",\n" + " \"type\":\"string\"\n" + " },\n" + " {\n" + " \"name\":\"anInt\",\n" + " \"type\":\"int\"\n" + " },\n" + " {\n" + " \"name\":\"aBoolean\",\n" + " \"type\":\"boolean\"\n" + " },\n" + " {\n" + " \"name\":\"aLong\",\n" + " \"type\":\"long\"\n" + " },\n" + " {\n" + " \"name\":\"aFloat\",\n" + " \"type\":\"float\"\n" + " },\n" + " {\n" + " \"name\":\"aDouble\",\n" + " \"type\":\"double\"\n" + " },\n" + " {\n" + " \"name\":\"aNull\",\n" + " \"type\":\"null\"\n" + " }\n" + " ]\n" + "}";
AvroObjectInspectorGenerator aoig = new AvroObjectInspectorGenerator(Schema.parse(bunchOfPrimitives));
String[] expectedColumnNames = { "aString", "anInt", "aBoolean", "aLong", "aFloat", "aDouble", "aNull" };
verifyColumnNames(expectedColumnNames, aoig.getColumnNames());
TypeInfo[] expectedColumnTypes = { STRING, INT, BOOLEAN, LONG, FLOAT, DOUBLE, VOID };
verifyColumnTypes(expectedColumnTypes, aoig.getColumnTypes());
// Rip apart the object inspector, making sure we got what we expect.
final ObjectInspector oi = aoig.getObjectInspector();
assertTrue(oi instanceof StandardStructObjectInspector);
final StandardStructObjectInspector ssoi = (StandardStructObjectInspector) oi;
List<? extends StructField> structFields = ssoi.getAllStructFieldRefs();
assertEquals(expectedColumnNames.length, structFields.size());
for (int i = 0; i < expectedColumnNames.length; i++) {
assertEquals("Column names don't match", expectedColumnNames[i].toLowerCase(), structFields.get(i).getFieldName());
assertEquals("Column types don't match", expectedColumnTypes[i].getTypeName(), structFields.get(i).getFieldObjectInspector().getTypeName());
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.StructField in project druid by druid-io.
the class OrcHadoopInputRowParser method parse.
@Override
public InputRow parse(OrcStruct input) {
Map<String, Object> map = Maps.newHashMap();
List<? extends StructField> fields = oip.getAllStructFieldRefs();
for (StructField field : fields) {
ObjectInspector objectInspector = field.getFieldObjectInspector();
switch(objectInspector.getCategory()) {
case PRIMITIVE:
PrimitiveObjectInspector primitiveObjectInspector = (PrimitiveObjectInspector) objectInspector;
map.put(field.getFieldName(), primitiveObjectInspector.getPrimitiveJavaObject(oip.getStructFieldData(input, field)));
break;
case // array case - only 1-depth array supported yet
LIST:
ListObjectInspector listObjectInspector = (ListObjectInspector) objectInspector;
map.put(field.getFieldName(), getListObject(listObjectInspector, oip.getStructFieldData(input, field)));
break;
default:
break;
}
}
TimestampSpec timestampSpec = parseSpec.getTimestampSpec();
DateTime dateTime = timestampSpec.extractTimestamp(map);
return new MapBasedInputRow(dateTime, dimensions, map);
}
use of org.apache.hadoop.hive.serde2.objectinspector.StructField in project hive by apache.
the class HBaseRowSerializer method serializeKeyField.
byte[] serializeKeyField(Object keyValue, StructField keyField, ColumnMapping keyMapping) throws IOException {
if (keyValue == null) {
throw new IOException("HBase row key cannot be NULL");
}
ObjectInspector keyFieldOI = keyField.getFieldObjectInspector();
if (!keyFieldOI.getCategory().equals(ObjectInspector.Category.PRIMITIVE) && keyMapping.isCategory(ObjectInspector.Category.PRIMITIVE)) {
// we always serialize the String type using the escaped algorithm for LazyString
return serialize(SerDeUtils.getJSONString(keyValue, keyFieldOI), PrimitiveObjectInspectorFactory.javaStringObjectInspector, 1, false);
}
// use the serialization option switch to write primitive values as either a variable
// length UTF8 string or a fixed width bytes if serializing in binary format
boolean writeBinary = keyMapping.binaryStorage.get(0);
return serialize(keyValue, keyFieldOI, 1, writeBinary);
}
Aggregations