use of org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector in project hive by apache.
the class TestLazyBinaryColumnarSerDe method testLazyBinaryColumnarSerDeWithEmptyBinary.
public void testLazyBinaryColumnarSerDeWithEmptyBinary() throws SerDeException {
StructObjectInspector oi = (StructObjectInspector) ObjectInspectorFactory.getReflectionObjectInspector(OuterStruct.class, ObjectInspectorOptions.JAVA);
String cols = ObjectInspectorUtils.getFieldNames(oi);
Properties props = new Properties();
props.setProperty(serdeConstants.LIST_COLUMNS, cols);
props.setProperty(serdeConstants.LIST_COLUMN_TYPES, ObjectInspectorUtils.getFieldTypes(oi));
LazyBinaryColumnarSerDe serde = new LazyBinaryColumnarSerDe();
SerDeUtils.initializeSerDe(serde, new Configuration(), props, null);
OuterStruct outerStruct = new OuterStruct();
outerStruct.mByte = 101;
outerStruct.mShort = 2002;
outerStruct.mInt = 3003;
outerStruct.mLong = 4004l;
outerStruct.mFloat = 5005.01f;
outerStruct.mDouble = 6006.001d;
outerStruct.mString = "";
outerStruct.mBA = new byte[] {};
outerStruct.mArray = new ArrayList<InnerStruct>();
outerStruct.mMap = new TreeMap<String, InnerStruct>();
outerStruct.mStruct = new InnerStruct(180018, 190019l);
try {
serde.serialize(outerStruct, oi);
} catch (RuntimeException re) {
assertEquals(re.getMessage(), "LazyBinaryColumnarSerde cannot serialize a non-null " + "zero length binary field. Consider using either LazyBinarySerde or ColumnarSerde.");
return;
}
assert false;
}
use of org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector in project hive by apache.
the class SQLOperation method decodeFromString.
private RowSet decodeFromString(List<Object> rows, RowSet rowSet) throws SQLException, SerDeException {
getSerDe();
StructObjectInspector soi = (StructObjectInspector) serde.getObjectInspector();
List<? extends StructField> fieldRefs = soi.getAllStructFieldRefs();
Object[] deserializedFields = new Object[fieldRefs.size()];
Object rowObj;
ObjectInspector fieldOI;
int protocol = getProtocolVersion().getValue();
for (Object rowString : rows) {
try {
rowObj = serde.deserialize(new BytesWritable(((String) rowString).getBytes("UTF-8")));
} catch (UnsupportedEncodingException e) {
throw new SerDeException(e);
}
for (int i = 0; i < fieldRefs.size(); i++) {
StructField fieldRef = fieldRefs.get(i);
fieldOI = fieldRef.getFieldObjectInspector();
Object fieldData = soi.getStructFieldData(rowObj, fieldRef);
deserializedFields[i] = SerDeUtils.toThriftPayload(fieldData, fieldOI, protocol);
}
rowSet.addRow(deserializedFields);
}
return rowSet;
}
use of org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector in project hive by apache.
the class HBaseRowSerializer method serialize.
private boolean serialize(Object obj, ObjectInspector objInspector, int level, ByteStream.Output ss) throws IOException {
switch(objInspector.getCategory()) {
case PRIMITIVE:
LazyUtils.writePrimitiveUTF8(ss, obj, (PrimitiveObjectInspector) objInspector, escaped, escapeChar, needsEscape);
return true;
case LIST:
char separator = (char) separators[level];
ListObjectInspector loi = (ListObjectInspector) objInspector;
List<?> list = loi.getList(obj);
ObjectInspector eoi = loi.getListElementObjectInspector();
if (list == null) {
return false;
} else {
for (int i = 0; i < list.size(); i++) {
if (i > 0) {
ss.write(separator);
}
serialize(list.get(i), eoi, level + 1, ss);
}
}
return true;
case MAP:
char sep = (char) separators[level];
char keyValueSeparator = (char) separators[level + 1];
MapObjectInspector moi = (MapObjectInspector) objInspector;
ObjectInspector koi = moi.getMapKeyObjectInspector();
ObjectInspector voi = moi.getMapValueObjectInspector();
Map<?, ?> map = moi.getMap(obj);
if (map == null) {
return false;
} else {
boolean first = true;
for (Map.Entry<?, ?> entry : map.entrySet()) {
if (first) {
first = false;
} else {
ss.write(sep);
}
serialize(entry.getKey(), koi, level + 2, ss);
if (entry.getValue() != null) {
ss.write(keyValueSeparator);
serialize(entry.getValue(), voi, level + 2, ss);
}
}
}
return true;
case STRUCT:
sep = (char) separators[level];
StructObjectInspector soi = (StructObjectInspector) objInspector;
List<? extends StructField> fields = soi.getAllStructFieldRefs();
list = soi.getStructFieldsDataAsList(obj);
if (list == null) {
return false;
} else {
for (int i = 0; i < list.size(); i++) {
if (i > 0) {
ss.write(sep);
}
serialize(list.get(i), fields.get(i).getFieldObjectInspector(), level + 1, ss);
}
}
return true;
case UNION:
{
// union type currently not totally supported. See HIVE-2390
return false;
}
default:
throw new RuntimeException("Unknown category type: " + objInspector.getCategory());
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector in project hive by apache.
the class TestHBaseSerDe method deserializeAndSerialize.
private void deserializeAndSerialize(HBaseSerDe serDe, Result r, Put p, Object[] expectedFieldsData) throws SerDeException {
// Get the row structure
StructObjectInspector oi = (StructObjectInspector) serDe.getObjectInspector();
List<? extends StructField> fieldRefs = oi.getAllStructFieldRefs();
assertEquals(9, fieldRefs.size());
// Deserialize
Object row = serDe.deserialize(new ResultWritable(r));
for (int i = 0; i < fieldRefs.size(); i++) {
Object fieldData = oi.getStructFieldData(row, fieldRefs.get(i));
if (fieldData != null) {
fieldData = ((LazyPrimitive<?, ?>) fieldData).getWritableObject();
}
assertEquals("Field " + i, expectedFieldsData[i], fieldData);
}
// Serialize
assertEquals(PutWritable.class, serDe.getSerializedClass());
PutWritable serializedPut = (PutWritable) serDe.serialize(row, oi);
assertEquals("Serialized data", p.toString(), String.valueOf(serializedPut.getPut()));
}
use of org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector in project hive by apache.
the class TestHBaseSerDe method deserializeAndSerializeHiveStructColumnFamily.
private void deserializeAndSerializeHiveStructColumnFamily(HBaseSerDe serDe, Result r, Put p, Object[] expectedFieldsData, int[] expectedMapSize, List<Object> expectedQualifiers, Object notPresentKey) throws SerDeException, IOException {
StructObjectInspector soi = (StructObjectInspector) serDe.getObjectInspector();
List<? extends StructField> fieldRefs = soi.getAllStructFieldRefs();
Object row = serDe.deserialize(new ResultWritable(r));
int k = 0;
for (int i = 0; i < fieldRefs.size(); i++) {
Object fieldData = soi.getStructFieldData(row, fieldRefs.get(i));
assertNotNull(fieldData);
if (fieldData instanceof LazyPrimitive<?, ?>) {
assertEquals(expectedFieldsData[i], ((LazyPrimitive<?, ?>) fieldData).getWritableObject());
} else if (fieldData instanceof LazyHBaseCellMap) {
for (int j = 0; j < ((LazyHBaseCellMap) fieldData).getMapSize(); j++) {
assertEquals(expectedFieldsData[k + 1], ((LazyHBaseCellMap) fieldData).getMapValueElement(expectedQualifiers.get(k)).toString().trim());
k++;
}
assertEquals(expectedMapSize[i - 1], ((LazyHBaseCellMap) fieldData).getMapSize());
// Make sure that the unwanted key is not present in the map
assertNull(((LazyHBaseCellMap) fieldData).getMapValueElement(notPresentKey));
} else {
fail("Error: field data not an instance of LazyPrimitive<?, ?> or LazyHBaseCellMap");
}
}
SerDeUtils.getJSONString(row, soi);
// Now serialize
Put put = ((PutWritable) serDe.serialize(row, soi)).getPut();
assertNotNull(put);
}
Aggregations