use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfo in project haivvreo by jghoman.
the class SchemaToTypeInfo method generateArrayTypeInfo.
private static TypeInfo generateArrayTypeInfo(Schema schema) throws HaivvreoException {
assert schema.getType().equals(Schema.Type.ARRAY);
Schema itemsType = schema.getElementType();
TypeInfo itemsTypeInfo = generateTypeInfo(itemsType);
return TypeInfoFactory.getListTypeInfo(itemsTypeInfo);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfo in project haivvreo by jghoman.
the class SchemaToTypeInfo method generateMapTypeInfo.
/**
* Generate a TypeInfo for an Avro Map. This is made slightly simpler in that
* Avro only allows maps with strings for keys.
*/
private static TypeInfo generateMapTypeInfo(Schema schema) throws HaivvreoException {
assert schema.getType().equals(Schema.Type.MAP);
Schema valueType = schema.getValueType();
TypeInfo ti = generateTypeInfo(valueType);
return TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.getPrimitiveTypeInfo("string"), ti);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfo in project haivvreo by jghoman.
the class TestAvroSerializer method serializeAndDeserialize.
/**
* Verify that we can serialize an avro value by taking one, running it through
* the deser process and then serialize it again.
*/
private GenericRecord serializeAndDeserialize(String recordValue, String fieldName, Object fieldValue) throws SerDeException, IOException {
Schema s = buildSchema(recordValue);
GenericData.Record r = new GenericData.Record(s);
r.put(fieldName, fieldValue);
AvroSerializer as = new AvroSerializer();
AvroDeserializer ad = new AvroDeserializer();
AvroObjectInspectorGenerator aoig = new AvroObjectInspectorGenerator(s);
ObjectInspector oi = aoig.getObjectInspector();
List<String> columnNames = aoig.getColumnNames();
List<TypeInfo> columnTypes = aoig.getColumnTypes();
AvroGenericRecordWritable agrw = Utils.serializeAndDeserializeRecord(r);
Object obj = ad.deserialize(columnNames, columnTypes, agrw, s);
Writable result = as.serialize(obj, oi, columnNames, columnTypes, s);
assertTrue(result instanceof AvroGenericRecordWritable);
GenericRecord r2 = ((AvroGenericRecordWritable) result).getRecord();
assertEquals(s, r2.getSchema());
return r2;
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfo in project haivvreo by jghoman.
the class TestAvroObjectInspectorGenerator method primitiveTypesWorkCorrectly.
@Test
public void primitiveTypesWorkCorrectly() throws SerDeException {
final String bunchOfPrimitives = "{\n" + " \"namespace\": \"testing\",\n" + " \"name\": \"PrimitiveTypes\",\n" + " \"type\": \"record\",\n" + " \"fields\": [\n" + " {\n" + " \"name\":\"aString\",\n" + " \"type\":\"string\"\n" + " },\n" + " {\n" + " \"name\":\"anInt\",\n" + " \"type\":\"int\"\n" + " },\n" + " {\n" + " \"name\":\"aBoolean\",\n" + " \"type\":\"boolean\"\n" + " },\n" + " {\n" + " \"name\":\"aLong\",\n" + " \"type\":\"long\"\n" + " },\n" + " {\n" + " \"name\":\"aFloat\",\n" + " \"type\":\"float\"\n" + " },\n" + " {\n" + " \"name\":\"aDouble\",\n" + " \"type\":\"double\"\n" + " },\n" + " {\n" + " \"name\":\"aNull\",\n" + " \"type\":\"null\"\n" + " }\n" + " ]\n" + "}";
AvroObjectInspectorGenerator aoig = new AvroObjectInspectorGenerator(Schema.parse(bunchOfPrimitives));
String[] expectedColumnNames = { "aString", "anInt", "aBoolean", "aLong", "aFloat", "aDouble", "aNull" };
verifyColumnNames(expectedColumnNames, aoig.getColumnNames());
TypeInfo[] expectedColumnTypes = { STRING, INT, BOOLEAN, LONG, FLOAT, DOUBLE, VOID };
verifyColumnTypes(expectedColumnTypes, aoig.getColumnTypes());
// Rip apart the object inspector, making sure we got what we expect.
final ObjectInspector oi = aoig.getObjectInspector();
assertTrue(oi instanceof StandardStructObjectInspector);
final StandardStructObjectInspector ssoi = (StandardStructObjectInspector) oi;
List<? extends StructField> structFields = ssoi.getAllStructFieldRefs();
assertEquals(expectedColumnNames.length, structFields.size());
for (int i = 0; i < expectedColumnNames.length; i++) {
assertEquals("Column names don't match", expectedColumnNames[i].toLowerCase(), structFields.get(i).getFieldName());
assertEquals("Column types don't match", expectedColumnTypes[i].getTypeName(), structFields.get(i).getFieldObjectInspector().getTypeName());
}
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfo in project druid by druid-io.
the class OrcHadoopInputRowParser method getTablePropertiesFromStructTypeInfo.
public static Properties getTablePropertiesFromStructTypeInfo(StructTypeInfo structTypeInfo) {
Properties table = new Properties();
table.setProperty("columns", StringUtils.join(structTypeInfo.getAllStructFieldNames(), ","));
table.setProperty("columns.types", StringUtils.join(Lists.transform(structTypeInfo.getAllStructFieldTypeInfos(), new Function<TypeInfo, String>() {
@Nullable
@Override
public String apply(@Nullable TypeInfo typeInfo) {
return typeInfo.getTypeName();
}
}), ","));
return table;
}
Aggregations