use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory in project hive by apache.
the class TestDruidSerDe method serializeObject.
private static void serializeObject(Properties properties, DruidSerDe serDe, Object[] rowObject, DruidWritable druidWritable) throws SerDeException {
// Build OI with timestamp granularity column
final List<String> columnNames = new ArrayList<>(Utilities.getColumnNames(properties));
columnNames.add(Constants.DRUID_TIMESTAMP_GRANULARITY_COL_NAME);
final List<PrimitiveTypeInfo> columnTypes = Utilities.getColumnTypes(properties).stream().map(TypeInfoFactory::getPrimitiveTypeInfo).collect(Collectors.toList());
columnTypes.add(TypeInfoFactory.getPrimitiveTypeInfo("timestamp"));
List<ObjectInspector> inspectors = columnTypes.stream().map(PrimitiveObjectInspectorFactory::getPrimitiveWritableObjectInspector).collect(Collectors.toList());
ObjectInspector inspector = ObjectInspectorFactory.getStandardStructObjectInspector(columnNames, inspectors);
// Serialize
DruidWritable writable = (DruidWritable) serDe.serialize(rowObject, inspector);
// Check result
assertEquals(druidWritable.getValue().size(), writable.getValue().size());
for (Entry<String, Object> e : druidWritable.getValue().entrySet()) {
assertEquals(e.getValue(), writable.getValue().get(e.getKey()));
}
}
Aggregations