use of org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector in project hive by apache.
the class JsonSerDe method serialize.
/**
* Given an object and object inspector pair, traverse the object
* and generate a Text representation of the object.
*/
@Override
public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException {
StringBuilder sb = new StringBuilder();
try {
StructObjectInspector soi = (StructObjectInspector) objInspector;
List<? extends StructField> structFields = soi.getAllStructFieldRefs();
assert (columnNames.size() == structFields.size());
if (obj == null) {
sb.append("null");
} else {
sb.append(SerDeUtils.LBRACE);
for (int i = 0; i < structFields.size(); i++) {
if (i > 0) {
sb.append(SerDeUtils.COMMA);
}
appendWithQuotes(sb, columnNames.get(i));
sb.append(SerDeUtils.COLON);
buildJSONString(sb, soi.getStructFieldData(obj, structFields.get(i)), structFields.get(i).getFieldObjectInspector());
}
sb.append(SerDeUtils.RBRACE);
}
} catch (IOException e) {
LOG.warn("Error generating json text from object.", e);
throw new SerDeException(e);
}
return new Text(sb.toString());
}
use of org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector in project hive by apache.
the class JsonSerDe method buildJSONString.
// TODO : code section copied over from SerDeUtils because of non-standard json production there
// should use quotes for all field names. We should fix this there, and then remove this copy.
// See http://jackson.codehaus.org/1.7.3/javadoc/org/codehaus/jackson/JsonParser.Feature.html#ALLOW_UNQUOTED_FIELD_NAMES
// for details - trying to enable Jackson to ignore that doesn't seem to work(compilation failure
// when attempting to use that feature, so having to change the production itself.
// Also, throws IOException when Binary is detected.
private static void buildJSONString(StringBuilder sb, Object o, ObjectInspector oi) throws IOException {
switch(oi.getCategory()) {
case PRIMITIVE:
{
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
if (o == null) {
sb.append("null");
} else {
switch(poi.getPrimitiveCategory()) {
case BOOLEAN:
{
boolean b = ((BooleanObjectInspector) poi).get(o);
sb.append(b ? "true" : "false");
break;
}
case BYTE:
{
sb.append(((ByteObjectInspector) poi).get(o));
break;
}
case SHORT:
{
sb.append(((ShortObjectInspector) poi).get(o));
break;
}
case INT:
{
sb.append(((IntObjectInspector) poi).get(o));
break;
}
case LONG:
{
sb.append(((LongObjectInspector) poi).get(o));
break;
}
case FLOAT:
{
sb.append(((FloatObjectInspector) poi).get(o));
break;
}
case DOUBLE:
{
sb.append(((DoubleObjectInspector) poi).get(o));
break;
}
case STRING:
{
String s = SerDeUtils.escapeString(((StringObjectInspector) poi).getPrimitiveJavaObject(o));
appendWithQuotes(sb, s);
break;
}
case BINARY:
{
throw new IOException("JsonSerDe does not support BINARY type");
}
case DATE:
Date d = ((DateObjectInspector) poi).getPrimitiveJavaObject(o);
appendWithQuotes(sb, d.toString());
break;
case TIMESTAMP:
{
Timestamp t = ((TimestampObjectInspector) poi).getPrimitiveJavaObject(o);
appendWithQuotes(sb, t.toString());
break;
}
case DECIMAL:
sb.append(((HiveDecimalObjectInspector) poi).getPrimitiveJavaObject(o));
break;
case VARCHAR:
{
String s = SerDeUtils.escapeString(((HiveVarcharObjectInspector) poi).getPrimitiveJavaObject(o).toString());
appendWithQuotes(sb, s);
break;
}
case CHAR:
{
//this should use HiveChar.getPaddedValue() but it's protected; currently (v0.13)
// HiveChar.toString() returns getPaddedValue()
String s = SerDeUtils.escapeString(((HiveCharObjectInspector) poi).getPrimitiveJavaObject(o).toString());
appendWithQuotes(sb, s);
break;
}
default:
throw new RuntimeException("Unknown primitive type: " + poi.getPrimitiveCategory());
}
}
break;
}
case LIST:
{
ListObjectInspector loi = (ListObjectInspector) oi;
ObjectInspector listElementObjectInspector = loi.getListElementObjectInspector();
List<?> olist = loi.getList(o);
if (olist == null) {
sb.append("null");
} else {
sb.append(SerDeUtils.LBRACKET);
for (int i = 0; i < olist.size(); i++) {
if (i > 0) {
sb.append(SerDeUtils.COMMA);
}
buildJSONString(sb, olist.get(i), listElementObjectInspector);
}
sb.append(SerDeUtils.RBRACKET);
}
break;
}
case MAP:
{
MapObjectInspector moi = (MapObjectInspector) oi;
ObjectInspector mapKeyObjectInspector = moi.getMapKeyObjectInspector();
ObjectInspector mapValueObjectInspector = moi.getMapValueObjectInspector();
Map<?, ?> omap = moi.getMap(o);
if (omap == null) {
sb.append("null");
} else {
sb.append(SerDeUtils.LBRACE);
boolean first = true;
for (Object entry : omap.entrySet()) {
if (first) {
first = false;
} else {
sb.append(SerDeUtils.COMMA);
}
Map.Entry<?, ?> e = (Map.Entry<?, ?>) entry;
StringBuilder keyBuilder = new StringBuilder();
buildJSONString(keyBuilder, e.getKey(), mapKeyObjectInspector);
String keyString = keyBuilder.toString().trim();
if ((!keyString.isEmpty()) && (keyString.charAt(0) != SerDeUtils.QUOTE)) {
appendWithQuotes(sb, keyString);
} else {
sb.append(keyString);
}
sb.append(SerDeUtils.COLON);
buildJSONString(sb, e.getValue(), mapValueObjectInspector);
}
sb.append(SerDeUtils.RBRACE);
}
break;
}
case STRUCT:
{
StructObjectInspector soi = (StructObjectInspector) oi;
List<? extends StructField> structFields = soi.getAllStructFieldRefs();
if (o == null) {
sb.append("null");
} else {
sb.append(SerDeUtils.LBRACE);
for (int i = 0; i < structFields.size(); i++) {
if (i > 0) {
sb.append(SerDeUtils.COMMA);
}
appendWithQuotes(sb, structFields.get(i).getFieldName());
sb.append(SerDeUtils.COLON);
buildJSONString(sb, soi.getStructFieldData(o, structFields.get(i)), structFields.get(i).getFieldObjectInspector());
}
sb.append(SerDeUtils.RBRACE);
}
break;
}
case UNION:
{
UnionObjectInspector uoi = (UnionObjectInspector) oi;
if (o == null) {
sb.append("null");
} else {
sb.append(SerDeUtils.LBRACE);
sb.append(uoi.getTag(o));
sb.append(SerDeUtils.COLON);
buildJSONString(sb, uoi.getField(o), uoi.getObjectInspectors().get(uoi.getTag(o)));
sb.append(SerDeUtils.RBRACE);
}
break;
}
default:
throw new RuntimeException("Unknown type in ObjectInspector!");
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector in project hive by apache.
the class InternalUtil method getObjectInspector.
private static ObjectInspector getObjectInspector(TypeInfo type) throws IOException {
switch(type.getCategory()) {
case PRIMITIVE:
PrimitiveTypeInfo primitiveType = (PrimitiveTypeInfo) type;
return PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(primitiveType);
case MAP:
MapTypeInfo mapType = (MapTypeInfo) type;
MapObjectInspector mapInspector = ObjectInspectorFactory.getStandardMapObjectInspector(getObjectInspector(mapType.getMapKeyTypeInfo()), getObjectInspector(mapType.getMapValueTypeInfo()));
return mapInspector;
case LIST:
ListTypeInfo listType = (ListTypeInfo) type;
ListObjectInspector listInspector = ObjectInspectorFactory.getStandardListObjectInspector(getObjectInspector(listType.getListElementTypeInfo()));
return listInspector;
case STRUCT:
StructTypeInfo structType = (StructTypeInfo) type;
List<TypeInfo> fieldTypes = structType.getAllStructFieldTypeInfos();
List<ObjectInspector> fieldInspectors = new ArrayList<ObjectInspector>();
for (TypeInfo fieldType : fieldTypes) {
fieldInspectors.add(getObjectInspector(fieldType));
}
StructObjectInspector structInspector = ObjectInspectorFactory.getStandardStructObjectInspector(structType.getAllStructFieldNames(), fieldInspectors);
return structInspector;
default:
throw new IOException("Unknown field schema type");
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector in project hive by apache.
the class TestLazyHBaseObject method testLazyHBaseRow3.
/**
* Test the LazyHBaseRow class with a one-to-one/onto mapping between Hive columns and
* HBase column family/column qualifier pairs. The column types are primitive and fields
* are stored in binary format in HBase.
* @throws SerDeException
*/
public void testLazyHBaseRow3() throws SerDeException {
List<TypeInfo> fieldTypeInfos = TypeInfoUtils.getTypeInfosFromTypeString("string,int,tinyint,smallint,bigint,float,double,string,boolean");
List<String> fieldNames = Arrays.asList(new String[] { "key", "c_int", "c_byte", "c_short", "c_long", "c_float", "c_double", "c_string", "c_bool" });
Text nullSequence = new Text("\\N");
String hbaseColumnsMapping = ":key#str,cf-int:cq-int#bin,cf-byte:cq-byte#bin," + "cf-short:cq-short#bin,cf-long:cq-long#bin,cf-float:cq-float#bin,cf-double:cq-double#bin," + "cf-string:cq-string#str,cf-bool:cq-bool#bin";
ColumnMappings columnMappings = null;
try {
columnMappings = HBaseSerDe.parseColumnsMapping(hbaseColumnsMapping);
} catch (SerDeException e) {
fail(e.toString());
}
ColumnMapping[] columnsMapping = columnMappings.getColumnsMapping();
for (int i = 0; i < columnsMapping.length; i++) {
ColumnMapping colMap = columnsMapping[i];
if (i == 0 || i == 7) {
colMap.binaryStorage.add(false);
} else {
colMap.binaryStorage.add(true);
}
}
ObjectInspector oi = LazyFactory.createLazyStructInspector(fieldNames, fieldTypeInfos, new byte[] { ' ', ':', '=' }, nullSequence, false, false, (byte) 0);
LazyHBaseRow o = new LazyHBaseRow((LazySimpleStructObjectInspector) oi, columnMappings);
byte[] rowKey = "row-key".getBytes();
List<KeyValue> kvs = new ArrayList<KeyValue>();
byte[] value;
for (int i = 1; i < columnsMapping.length; i++) {
switch(i) {
case 1:
value = Bytes.toBytes(1);
break;
case 2:
value = new byte[] { (byte) 1 };
break;
case 3:
value = Bytes.toBytes((short) 1);
break;
case 4:
value = Bytes.toBytes((long) 1);
break;
case 5:
value = Bytes.toBytes((float) 1.0F);
break;
case 6:
value = Bytes.toBytes((double) 1.0);
break;
case 7:
value = "Hadoop, Hive, with HBase storage handler.".getBytes();
break;
case 8:
value = Bytes.toBytes(true);
break;
default:
throw new RuntimeException("Not expected: " + i);
}
ColumnMapping colMap = columnsMapping[i];
kvs.add(new KeyValue(rowKey, colMap.familyNameBytes, colMap.qualifierNameBytes, value));
}
Collections.sort(kvs, KeyValue.COMPARATOR);
Result result = new Result(kvs);
o.init(result);
List<? extends StructField> fieldRefs = ((StructObjectInspector) oi).getAllStructFieldRefs();
for (int i = 0; i < fieldRefs.size(); i++) {
Object fieldData = ((StructObjectInspector) oi).getStructFieldData(o, fieldRefs.get(i));
assert (fieldData != null);
assert (fieldData instanceof LazyPrimitive<?, ?>);
Writable writable = ((LazyPrimitive<?, ?>) fieldData).getWritableObject();
switch(i) {
case 0:
Text text = new Text("row-key");
assertEquals(text, writable);
break;
case 1:
IntWritable iw = new IntWritable(1);
assertEquals(iw, writable);
break;
case 2:
ByteWritable bw = new ByteWritable((byte) 1);
assertEquals(bw, writable);
break;
case 3:
ShortWritable sw = new ShortWritable((short) 1);
assertEquals(sw, writable);
break;
case 4:
LongWritable lw = new LongWritable(1);
assertEquals(lw, writable);
break;
case 5:
FloatWritable fw = new FloatWritable(1.0F);
assertEquals(fw, writable);
break;
case 6:
DoubleWritable dw = new DoubleWritable(1.0);
assertEquals(dw, writable);
break;
case 7:
Text t = new Text("Hadoop, Hive, with HBase storage handler.");
assertEquals(t, writable);
break;
case 8:
BooleanWritable boolWritable = new BooleanWritable(true);
assertEquals(boolWritable, writable);
break;
default:
fail("Error: Unanticipated value in deserializing fields for HBaseSerDe.");
break;
}
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector in project hive by apache.
the class TestSerDe method serialize.
@Override
public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException {
if (objInspector.getCategory() != Category.STRUCT) {
throw new SerDeException(getClass().toString() + " can only serialize struct types, but we got: " + objInspector.getTypeName());
}
StructObjectInspector soi = (StructObjectInspector) objInspector;
List<? extends StructField> fields = soi.getAllStructFieldRefs();
StringBuilder sb = new StringBuilder();
for (int i = 0; i < fields.size(); i++) {
if (i > 0) {
sb.append(separator);
}
Object column = soi.getStructFieldData(obj, fields.get(i));
if (fields.get(i).getFieldObjectInspector().getCategory() == Category.PRIMITIVE) {
// For primitive object, serialize to plain string
sb.append(column == null ? nullString : column.toString());
} else {
// For complex object, serialize to JSON format
sb.append(SerDeUtils.getJSONString(column, fields.get(i).getFieldObjectInspector()));
}
}
serializeCache.set(sb.toString());
return serializeCache;
}
Aggregations