use of org.apache.hadoop.hive.druid.serde.DruidSerDe in project hive by apache.
the class TestDruidSerDe method testDruidSerializer.
/**
* Test the default behavior of the objects and object inspectors.
* @throws IOException
* @throws IllegalAccessException
* @throws IllegalArgumentException
* @throws SecurityException
* @throws NoSuchFieldException
* @throws JsonMappingException
* @throws JsonParseException
* @throws InvocationTargetException
* @throws NoSuchMethodException
*/
@Test
public void testDruidSerializer() throws SerDeException, JsonParseException, JsonMappingException, NoSuchFieldException, SecurityException, IllegalArgumentException, IllegalAccessException, IOException, InterruptedException, NoSuchMethodException, InvocationTargetException {
// Create, initialize, and test the SerDe
DruidSerDe serDe = new DruidSerDe();
Configuration conf = new Configuration();
Properties tbl;
// Mixed source (all types)
tbl = createPropertiesSource(COLUMN_NAMES, COLUMN_TYPES);
SerDeUtils.initializeSerDe(serDe, conf, tbl, null);
serializeObject(tbl, serDe, ROW_OBJECT, DRUID_WRITABLE);
}
use of org.apache.hadoop.hive.druid.serde.DruidSerDe in project hive by apache.
the class DruidVectorizedWrapper method createAndInitializeSerde.
private static DruidSerDe createAndInitializeSerde(Configuration jobConf) {
DruidSerDe serDe = new DruidSerDe();
MapWork mapWork = Preconditions.checkNotNull(Utilities.getMapWork(jobConf), "Map work is null");
Properties properties = mapWork.getPartitionDescs().stream().map(partitionDesc -> partitionDesc.getTableDesc().getProperties()).findAny().orElseThrow(() -> new RuntimeException("Can not find table property at the map work"));
try {
serDe.initialize(jobConf, properties, null);
} catch (SerDeException e) {
throw new RuntimeException("Can not initialized the serde", e);
}
return serDe;
}
Aggregations