use of org.apache.hadoop.hive.serde2.SerDeException in project hive by apache.
the class DruidSerDe method inferSchema.
/* Select query */
private void inferSchema(SelectQuery query, List<String> columnNames, List<PrimitiveTypeInfo> columnTypes, String address) throws SerDeException {
// Timestamp column
columnNames.add(DruidTable.DEFAULT_TIMESTAMP_COLUMN);
columnTypes.add(TypeInfoFactory.timestampTypeInfo);
// Dimension columns
for (DimensionSpec ds : query.getDimensions()) {
columnNames.add(ds.getOutputName());
columnTypes.add(TypeInfoFactory.stringTypeInfo);
}
// The type for metric columns is not explicit in the query, thus in this case
// we need to emit a metadata query to know their type
SegmentMetadataQueryBuilder builder = new Druids.SegmentMetadataQueryBuilder();
builder.dataSource(query.getDataSource());
builder.merge(true);
builder.analysisTypes();
SegmentMetadataQuery metadataQuery = builder.build();
// Execute query in Druid
SegmentAnalysis schemaInfo;
try {
schemaInfo = submitMetadataRequest(address, metadataQuery);
} catch (IOException e) {
throw new SerDeException(e);
}
if (schemaInfo == null) {
throw new SerDeException("Connected to Druid but could not retrieve datasource information");
}
for (String metric : query.getMetrics()) {
columnNames.add(metric);
columnTypes.add(DruidSerDeUtils.convertDruidToHiveType(schemaInfo.getColumns().get(metric).getType()));
}
}
use of org.apache.hadoop.hive.serde2.SerDeException in project hive by apache.
the class HBaseSerDeParameters method createValueFactories.
private List<HBaseValueFactory> createValueFactories(Configuration conf, Properties tbl) throws SerDeException {
List<HBaseValueFactory> valueFactories = new ArrayList<HBaseValueFactory>();
try {
for (int i = 0; i < columnMappings.size(); i++) {
String serType = getSerializationType(conf, tbl, columnMappings.getColumnsMapping()[i]);
if (AVRO_SERIALIZATION_TYPE.equals(serType)) {
Schema schema = getSchema(conf, tbl, columnMappings.getColumnsMapping()[i]);
valueFactories.add(new AvroHBaseValueFactory(i, schema));
} else if (STRUCT_SERIALIZATION_TYPE.equals(serType)) {
String structValueClassName = tbl.getProperty(HBaseSerDe.HBASE_STRUCT_SERIALIZER_CLASS);
if (structValueClassName == null) {
throw new IllegalArgumentException(HBaseSerDe.HBASE_STRUCT_SERIALIZER_CLASS + " must be set for hbase columns of type [" + STRUCT_SERIALIZATION_TYPE + "]");
}
Class<?> structValueClass = loadClass(structValueClassName, job);
valueFactories.add(new StructHBaseValueFactory(i, structValueClass));
} else {
valueFactories.add(new DefaultHBaseValueFactory(i));
}
}
} catch (Exception e) {
throw new SerDeException(e);
}
return valueFactories;
}
use of org.apache.hadoop.hive.serde2.SerDeException in project hive by apache.
the class TestHBaseSerDe method deserializeAndSerialize.
private void deserializeAndSerialize(HBaseSerDe serDe, Result r, Put p, Object[] expectedFieldsData) throws SerDeException {
// Get the row structure
StructObjectInspector oi = (StructObjectInspector) serDe.getObjectInspector();
List<? extends StructField> fieldRefs = oi.getAllStructFieldRefs();
assertEquals(9, fieldRefs.size());
// Deserialize
Object row = serDe.deserialize(new ResultWritable(r));
for (int i = 0; i < fieldRefs.size(); i++) {
Object fieldData = oi.getStructFieldData(row, fieldRefs.get(i));
if (fieldData != null) {
fieldData = ((LazyPrimitive<?, ?>) fieldData).getWritableObject();
}
assertEquals("Field " + i, expectedFieldsData[i], fieldData);
}
// Serialize
assertEquals(PutWritable.class, serDe.getSerializedClass());
PutWritable serializedPut = (PutWritable) serDe.serialize(row, oi);
assertEquals("Serialized data", p.toString(), String.valueOf(serializedPut.getPut()));
}
use of org.apache.hadoop.hive.serde2.SerDeException in project hive by apache.
the class TestHBaseSerDe method deserializeAndSerializeHiveStructColumnFamily.
private void deserializeAndSerializeHiveStructColumnFamily(HBaseSerDe serDe, Result r, Put p, Object[] expectedFieldsData, int[] expectedMapSize, List<Object> expectedQualifiers, Object notPresentKey) throws SerDeException, IOException {
StructObjectInspector soi = (StructObjectInspector) serDe.getObjectInspector();
List<? extends StructField> fieldRefs = soi.getAllStructFieldRefs();
Object row = serDe.deserialize(new ResultWritable(r));
int k = 0;
for (int i = 0; i < fieldRefs.size(); i++) {
Object fieldData = soi.getStructFieldData(row, fieldRefs.get(i));
assertNotNull(fieldData);
if (fieldData instanceof LazyPrimitive<?, ?>) {
assertEquals(expectedFieldsData[i], ((LazyPrimitive<?, ?>) fieldData).getWritableObject());
} else if (fieldData instanceof LazyHBaseCellMap) {
for (int j = 0; j < ((LazyHBaseCellMap) fieldData).getMapSize(); j++) {
assertEquals(expectedFieldsData[k + 1], ((LazyHBaseCellMap) fieldData).getMapValueElement(expectedQualifiers.get(k)).toString().trim());
k++;
}
assertEquals(expectedMapSize[i - 1], ((LazyHBaseCellMap) fieldData).getMapSize());
// Make sure that the unwanted key is not present in the map
assertNull(((LazyHBaseCellMap) fieldData).getMapValueElement(notPresentKey));
} else {
fail("Error: field data not an instance of LazyPrimitive<?, ?> or LazyHBaseCellMap");
}
}
SerDeUtils.getJSONString(row, soi);
// Now serialize
Put put = ((PutWritable) serDe.serialize(row, soi)).getPut();
assertNotNull(put);
}
use of org.apache.hadoop.hive.serde2.SerDeException in project hive by apache.
the class DelimitedInputWriter method encode.
@Override
public Object encode(byte[] record) throws SerializationError {
try {
BytesWritable blob = new BytesWritable();
blob.set(record, 0, record.length);
return serde.deserialize(blob);
} catch (SerDeException e) {
throw new SerializationError("Unable to convert byte[] record into Object", e);
}
}
Aggregations