use of org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector in project hive by apache.
the class ObjectInspectorUtils method compare.
/**
* Compare two objects with their respective ObjectInspectors.
* if nullValueOpt is MAXVALUE, treat null as maximum value.
* if nullValueOpt is MINVALUE, treat null as minimum value.
*/
public static int compare(Object o1, ObjectInspector oi1, Object o2, ObjectInspector oi2, MapEqualComparer mapEqualComparer, NullValueOption nullValueOpt) {
if (oi1.getCategory() != oi2.getCategory()) {
return oi1.getCategory().compareTo(oi2.getCategory());
}
int nullCmpRtn = -1;
switch(nullValueOpt) {
case MAXVALUE:
nullCmpRtn = 1;
break;
case MINVALUE:
nullCmpRtn = -1;
break;
}
if (o1 == null) {
return o2 == null ? 0 : nullCmpRtn;
} else if (o2 == null) {
return -nullCmpRtn;
}
switch(oi1.getCategory()) {
case PRIMITIVE:
{
PrimitiveObjectInspector poi1 = ((PrimitiveObjectInspector) oi1);
PrimitiveObjectInspector poi2 = ((PrimitiveObjectInspector) oi2);
if (poi1.getPrimitiveCategory() != poi2.getPrimitiveCategory()) {
return poi1.getPrimitiveCategory().compareTo(poi2.getPrimitiveCategory());
}
switch(poi1.getPrimitiveCategory()) {
case VOID:
return 0;
case BOOLEAN:
{
int v1 = ((BooleanObjectInspector) poi1).get(o1) ? 1 : 0;
int v2 = ((BooleanObjectInspector) poi2).get(o2) ? 1 : 0;
return v1 - v2;
}
case BYTE:
{
int v1 = ((ByteObjectInspector) poi1).get(o1);
int v2 = ((ByteObjectInspector) poi2).get(o2);
return v1 - v2;
}
case SHORT:
{
int v1 = ((ShortObjectInspector) poi1).get(o1);
int v2 = ((ShortObjectInspector) poi2).get(o2);
return v1 - v2;
}
case INT:
{
int v1 = ((IntObjectInspector) poi1).get(o1);
int v2 = ((IntObjectInspector) poi2).get(o2);
return v1 > v2 ? 1 : (v1 < v2 ? -1 : 0);
}
case LONG:
{
long v1 = ((LongObjectInspector) poi1).get(o1);
long v2 = ((LongObjectInspector) poi2).get(o2);
return v1 > v2 ? 1 : (v1 < v2 ? -1 : 0);
}
case FLOAT:
{
float v1 = ((FloatObjectInspector) poi1).get(o1);
float v2 = ((FloatObjectInspector) poi2).get(o2);
// The IEEE 754 floating point spec specifies that signed -0.0 and 0.0 should be treated as equal.
if (v1 == 0.0f && v2 == 0.0f) {
return 0;
} else {
// Float.compare() treats -0.0 and 0.0 as different
return Float.compare(v1, v2);
}
}
case DOUBLE:
{
double v1 = ((DoubleObjectInspector) poi1).get(o1);
double v2 = ((DoubleObjectInspector) poi2).get(o2);
// The IEEE 754 floating point spec specifies that signed -0.0 and 0.0 should be treated as equal.
if (v1 == 0.0d && v2 == 0.0d) {
return 0;
} else {
// Double.compare() treats -0.0 and 0.0 as different
return Double.compare(v1, v2);
}
}
case STRING:
{
if (poi1.preferWritable() || poi2.preferWritable()) {
Text t1 = (Text) poi1.getPrimitiveWritableObject(o1);
Text t2 = (Text) poi2.getPrimitiveWritableObject(o2);
return t1 == null ? (t2 == null ? 0 : -1) : (t2 == null ? 1 : t1.compareTo(t2));
} else {
String s1 = (String) poi1.getPrimitiveJavaObject(o1);
String s2 = (String) poi2.getPrimitiveJavaObject(o2);
return s1 == null ? (s2 == null ? 0 : -1) : (s2 == null ? 1 : s1.compareTo(s2));
}
}
case CHAR:
{
HiveCharWritable t1 = ((HiveCharObjectInspector) poi1).getPrimitiveWritableObject(o1);
HiveCharWritable t2 = ((HiveCharObjectInspector) poi2).getPrimitiveWritableObject(o2);
return t1.compareTo(t2);
}
case VARCHAR:
{
HiveVarcharWritable t1 = ((HiveVarcharObjectInspector) poi1).getPrimitiveWritableObject(o1);
HiveVarcharWritable t2 = ((HiveVarcharObjectInspector) poi2).getPrimitiveWritableObject(o2);
return t1.compareTo(t2);
}
case BINARY:
{
BytesWritable bw1 = ((BinaryObjectInspector) poi1).getPrimitiveWritableObject(o1);
BytesWritable bw2 = ((BinaryObjectInspector) poi2).getPrimitiveWritableObject(o2);
return bw1.compareTo(bw2);
}
case DATE:
{
DateWritable d1 = ((DateObjectInspector) poi1).getPrimitiveWritableObject(o1);
DateWritable d2 = ((DateObjectInspector) poi2).getPrimitiveWritableObject(o2);
return d1.compareTo(d2);
}
case TIMESTAMP:
{
TimestampWritable t1 = ((TimestampObjectInspector) poi1).getPrimitiveWritableObject(o1);
TimestampWritable t2 = ((TimestampObjectInspector) poi2).getPrimitiveWritableObject(o2);
return t1.compareTo(t2);
}
case INTERVAL_YEAR_MONTH:
{
HiveIntervalYearMonthWritable i1 = ((HiveIntervalYearMonthObjectInspector) poi1).getPrimitiveWritableObject(o1);
HiveIntervalYearMonthWritable i2 = ((HiveIntervalYearMonthObjectInspector) poi2).getPrimitiveWritableObject(o2);
return i1.compareTo(i2);
}
case INTERVAL_DAY_TIME:
{
HiveIntervalDayTimeWritable i1 = ((HiveIntervalDayTimeObjectInspector) poi1).getPrimitiveWritableObject(o1);
HiveIntervalDayTimeWritable i2 = ((HiveIntervalDayTimeObjectInspector) poi2).getPrimitiveWritableObject(o2);
return i1.compareTo(i2);
}
case DECIMAL:
{
HiveDecimalWritable t1 = ((HiveDecimalObjectInspector) poi1).getPrimitiveWritableObject(o1);
HiveDecimalWritable t2 = ((HiveDecimalObjectInspector) poi2).getPrimitiveWritableObject(o2);
return t1.compareTo(t2);
}
default:
{
throw new RuntimeException("Unknown type: " + poi1.getPrimitiveCategory());
}
}
}
case STRUCT:
{
StructObjectInspector soi1 = (StructObjectInspector) oi1;
StructObjectInspector soi2 = (StructObjectInspector) oi2;
List<? extends StructField> fields1 = soi1.getAllStructFieldRefs();
List<? extends StructField> fields2 = soi2.getAllStructFieldRefs();
int minimum = Math.min(fields1.size(), fields2.size());
for (int i = 0; i < minimum; i++) {
int r = compare(soi1.getStructFieldData(o1, fields1.get(i)), fields1.get(i).getFieldObjectInspector(), soi2.getStructFieldData(o2, fields2.get(i)), fields2.get(i).getFieldObjectInspector(), mapEqualComparer, nullValueOpt);
if (r != 0) {
return r;
}
}
return fields1.size() - fields2.size();
}
case LIST:
{
ListObjectInspector loi1 = (ListObjectInspector) oi1;
ListObjectInspector loi2 = (ListObjectInspector) oi2;
int minimum = Math.min(loi1.getListLength(o1), loi2.getListLength(o2));
for (int i = 0; i < minimum; i++) {
int r = compare(loi1.getListElement(o1, i), loi1.getListElementObjectInspector(), loi2.getListElement(o2, i), loi2.getListElementObjectInspector(), mapEqualComparer, nullValueOpt);
if (r != 0) {
return r;
}
}
return loi1.getListLength(o1) - loi2.getListLength(o2);
}
case MAP:
{
if (mapEqualComparer == null) {
throw new RuntimeException("Compare on map type not supported!");
} else {
return mapEqualComparer.compare(o1, (MapObjectInspector) oi1, o2, (MapObjectInspector) oi2);
}
}
case UNION:
{
UnionObjectInspector uoi1 = (UnionObjectInspector) oi1;
UnionObjectInspector uoi2 = (UnionObjectInspector) oi2;
byte tag1 = uoi1.getTag(o1);
byte tag2 = uoi2.getTag(o2);
if (tag1 != tag2) {
return tag1 - tag2;
}
return compare(uoi1.getField(o1), uoi1.getObjectInspectors().get(tag1), uoi2.getField(o2), uoi2.getObjectInspectors().get(tag2), mapEqualComparer, nullValueOpt);
}
default:
throw new RuntimeException("Compare on unknown type: " + oi1.getCategory());
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector in project hive by apache.
the class GenericUDFInBloomFilter method evaluate.
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
// Return if either of the arguments is null
if (arguments[0].get() == null || arguments[1].get() == null) {
return null;
}
if (!initializedBloomFilter) {
// Setup the bloom filter once
try {
BytesWritable bw = (BytesWritable) arguments[1].get();
byte[] bytes = new byte[bw.getLength()];
System.arraycopy(bw.getBytes(), 0, bytes, 0, bw.getLength());
bloomFilter = BloomFilter.deserialize(new ByteArrayInputStream(bytes));
} catch (IOException e) {
throw new HiveException(e);
}
initializedBloomFilter = true;
}
// Check if the value is in bloom filter
switch(((PrimitiveObjectInspector) valObjectInspector).getTypeInfo().getPrimitiveCategory()) {
case BOOLEAN:
boolean vBoolean = ((BooleanObjectInspector) valObjectInspector).get(arguments[0].get());
return bloomFilter.testLong(vBoolean ? 1 : 0);
case BYTE:
byte vByte = ((ByteObjectInspector) valObjectInspector).get(arguments[0].get());
return bloomFilter.testLong(vByte);
case SHORT:
short vShort = ((ShortObjectInspector) valObjectInspector).get(arguments[0].get());
return bloomFilter.testLong(vShort);
case INT:
int vInt = ((IntObjectInspector) valObjectInspector).get(arguments[0].get());
return bloomFilter.testLong(vInt);
case LONG:
long vLong = ((LongObjectInspector) valObjectInspector).get(arguments[0].get());
return bloomFilter.testLong(vLong);
case FLOAT:
float vFloat = ((FloatObjectInspector) valObjectInspector).get(arguments[0].get());
return bloomFilter.testDouble(vFloat);
case DOUBLE:
double vDouble = ((DoubleObjectInspector) valObjectInspector).get(arguments[0].get());
return bloomFilter.testDouble(vDouble);
case DECIMAL:
HiveDecimalWritable vDecimal = ((HiveDecimalObjectInspector) valObjectInspector).getPrimitiveWritableObject(arguments[0].get());
int startIdx = vDecimal.toBytes(scratchBuffer);
return bloomFilter.testBytes(scratchBuffer, startIdx, scratchBuffer.length - startIdx);
case DATE:
DateWritable vDate = ((DateObjectInspector) valObjectInspector).getPrimitiveWritableObject(arguments[0].get());
return bloomFilter.testLong(vDate.getDays());
case TIMESTAMP:
Timestamp vTimeStamp = ((TimestampObjectInspector) valObjectInspector).getPrimitiveJavaObject(arguments[0].get());
return bloomFilter.testLong(vTimeStamp.getTime());
case CHAR:
Text vChar = ((HiveCharObjectInspector) valObjectInspector).getPrimitiveWritableObject(arguments[0].get()).getStrippedValue();
return bloomFilter.testBytes(vChar.getBytes(), 0, vChar.getLength());
case VARCHAR:
Text vVarchar = ((HiveVarcharObjectInspector) valObjectInspector).getPrimitiveWritableObject(arguments[0].get()).getTextValue();
return bloomFilter.testBytes(vVarchar.getBytes(), 0, vVarchar.getLength());
case STRING:
Text vString = ((StringObjectInspector) valObjectInspector).getPrimitiveWritableObject(arguments[0].get());
return bloomFilter.testBytes(vString.getBytes(), 0, vString.getLength());
case BINARY:
BytesWritable vBytes = ((BinaryObjectInspector) valObjectInspector).getPrimitiveWritableObject(arguments[0].get());
return bloomFilter.testBytes(vBytes.getBytes(), 0, vBytes.getLength());
default:
throw new UDFArgumentTypeException(0, "Bad primitive category " + ((PrimitiveTypeInfo) valObjectInspector).getPrimitiveCategory());
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector in project hive by apache.
the class SerDeUtils method buildJSONString.
static void buildJSONString(StringBuilder sb, Object o, ObjectInspector oi, String nullStr) {
switch(oi.getCategory()) {
case PRIMITIVE:
{
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
if (o == null) {
sb.append(nullStr);
} else {
switch(poi.getPrimitiveCategory()) {
case BOOLEAN:
{
boolean b = ((BooleanObjectInspector) poi).get(o);
sb.append(b ? "true" : "false");
break;
}
case BYTE:
{
sb.append(((ByteObjectInspector) poi).get(o));
break;
}
case SHORT:
{
sb.append(((ShortObjectInspector) poi).get(o));
break;
}
case INT:
{
sb.append(((IntObjectInspector) poi).get(o));
break;
}
case LONG:
{
sb.append(((LongObjectInspector) poi).get(o));
break;
}
case FLOAT:
{
sb.append(((FloatObjectInspector) poi).get(o));
break;
}
case DOUBLE:
{
sb.append(((DoubleObjectInspector) poi).get(o));
break;
}
case STRING:
{
sb.append('"');
sb.append(escapeString(((StringObjectInspector) poi).getPrimitiveJavaObject(o)));
sb.append('"');
break;
}
case CHAR:
{
sb.append('"');
sb.append(escapeString(((HiveCharObjectInspector) poi).getPrimitiveJavaObject(o).toString()));
sb.append('"');
break;
}
case VARCHAR:
{
sb.append('"');
sb.append(escapeString(((HiveVarcharObjectInspector) poi).getPrimitiveJavaObject(o).toString()));
sb.append('"');
break;
}
case DATE:
{
sb.append('"');
sb.append(((DateObjectInspector) poi).getPrimitiveWritableObject(o));
sb.append('"');
break;
}
case TIMESTAMP:
{
sb.append('"');
sb.append(((TimestampObjectInspector) poi).getPrimitiveWritableObject(o));
sb.append('"');
break;
}
case BINARY:
{
BytesWritable bw = ((BinaryObjectInspector) oi).getPrimitiveWritableObject(o);
Text txt = new Text();
txt.set(bw.getBytes(), 0, bw.getLength());
sb.append(txt.toString());
break;
}
case DECIMAL:
{
sb.append(((HiveDecimalObjectInspector) oi).getPrimitiveJavaObject(o));
break;
}
case INTERVAL_YEAR_MONTH:
{
sb.append(((HiveIntervalYearMonthObjectInspector) oi).getPrimitiveJavaObject(o));
break;
}
case INTERVAL_DAY_TIME:
{
sb.append(((HiveIntervalDayTimeObjectInspector) oi).getPrimitiveJavaObject(o));
break;
}
default:
throw new RuntimeException("Unknown primitive type: " + poi.getPrimitiveCategory());
}
}
break;
}
case LIST:
{
ListObjectInspector loi = (ListObjectInspector) oi;
ObjectInspector listElementObjectInspector = loi.getListElementObjectInspector();
List<?> olist = loi.getList(o);
if (olist == null) {
sb.append(nullStr);
} else {
sb.append(LBRACKET);
for (int i = 0; i < olist.size(); i++) {
if (i > 0) {
sb.append(COMMA);
}
buildJSONString(sb, olist.get(i), listElementObjectInspector, JSON_NULL);
}
sb.append(RBRACKET);
}
break;
}
case MAP:
{
MapObjectInspector moi = (MapObjectInspector) oi;
ObjectInspector mapKeyObjectInspector = moi.getMapKeyObjectInspector();
ObjectInspector mapValueObjectInspector = moi.getMapValueObjectInspector();
Map<?, ?> omap = moi.getMap(o);
if (omap == null) {
sb.append(nullStr);
} else {
sb.append(LBRACE);
boolean first = true;
for (Object entry : omap.entrySet()) {
if (first) {
first = false;
} else {
sb.append(COMMA);
}
Map.Entry<?, ?> e = (Map.Entry<?, ?>) entry;
buildJSONString(sb, e.getKey(), mapKeyObjectInspector, JSON_NULL);
sb.append(COLON);
buildJSONString(sb, e.getValue(), mapValueObjectInspector, JSON_NULL);
}
sb.append(RBRACE);
}
break;
}
case STRUCT:
{
StructObjectInspector soi = (StructObjectInspector) oi;
List<? extends StructField> structFields = soi.getAllStructFieldRefs();
if (o == null) {
sb.append(nullStr);
} else {
sb.append(LBRACE);
for (int i = 0; i < structFields.size(); i++) {
if (i > 0) {
sb.append(COMMA);
}
sb.append(QUOTE);
sb.append(structFields.get(i).getFieldName());
sb.append(QUOTE);
sb.append(COLON);
buildJSONString(sb, soi.getStructFieldData(o, structFields.get(i)), structFields.get(i).getFieldObjectInspector(), JSON_NULL);
}
sb.append(RBRACE);
}
break;
}
case UNION:
{
UnionObjectInspector uoi = (UnionObjectInspector) oi;
if (o == null) {
sb.append(nullStr);
} else {
sb.append(LBRACE);
sb.append(uoi.getTag(o));
sb.append(COLON);
buildJSONString(sb, uoi.getField(o), uoi.getObjectInspectors().get(uoi.getTag(o)), JSON_NULL);
sb.append(RBRACE);
}
break;
}
default:
throw new RuntimeException("Unknown type in ObjectInspector!");
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector in project hive by apache.
the class JsonSerDe method buildJSONString.
// TODO : code section copied over from SerDeUtils because of non-standard json production there
// should use quotes for all field names. We should fix this there, and then remove this copy.
// See http://jackson.codehaus.org/1.7.3/javadoc/org/codehaus/jackson/JsonParser.Feature.html#ALLOW_UNQUOTED_FIELD_NAMES
// for details - trying to enable Jackson to ignore that doesn't seem to work(compilation failure
// when attempting to use that feature, so having to change the production itself.
// Also, throws IOException when Binary is detected.
private static void buildJSONString(StringBuilder sb, Object o, ObjectInspector oi) throws IOException {
switch(oi.getCategory()) {
case PRIMITIVE:
{
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
if (o == null) {
sb.append("null");
} else {
switch(poi.getPrimitiveCategory()) {
case BOOLEAN:
{
boolean b = ((BooleanObjectInspector) poi).get(o);
sb.append(b ? "true" : "false");
break;
}
case BYTE:
{
sb.append(((ByteObjectInspector) poi).get(o));
break;
}
case SHORT:
{
sb.append(((ShortObjectInspector) poi).get(o));
break;
}
case INT:
{
sb.append(((IntObjectInspector) poi).get(o));
break;
}
case LONG:
{
sb.append(((LongObjectInspector) poi).get(o));
break;
}
case FLOAT:
{
sb.append(((FloatObjectInspector) poi).get(o));
break;
}
case DOUBLE:
{
sb.append(((DoubleObjectInspector) poi).get(o));
break;
}
case STRING:
{
String s = SerDeUtils.escapeString(((StringObjectInspector) poi).getPrimitiveJavaObject(o));
appendWithQuotes(sb, s);
break;
}
case BINARY:
{
throw new IOException("JsonSerDe does not support BINARY type");
}
case DATE:
Date d = ((DateObjectInspector) poi).getPrimitiveJavaObject(o);
appendWithQuotes(sb, d.toString());
break;
case TIMESTAMP:
{
Timestamp t = ((TimestampObjectInspector) poi).getPrimitiveJavaObject(o);
appendWithQuotes(sb, t.toString());
break;
}
case DECIMAL:
sb.append(((HiveDecimalObjectInspector) poi).getPrimitiveJavaObject(o));
break;
case VARCHAR:
{
String s = SerDeUtils.escapeString(((HiveVarcharObjectInspector) poi).getPrimitiveJavaObject(o).toString());
appendWithQuotes(sb, s);
break;
}
case CHAR:
{
//this should use HiveChar.getPaddedValue() but it's protected; currently (v0.13)
// HiveChar.toString() returns getPaddedValue()
String s = SerDeUtils.escapeString(((HiveCharObjectInspector) poi).getPrimitiveJavaObject(o).toString());
appendWithQuotes(sb, s);
break;
}
default:
throw new RuntimeException("Unknown primitive type: " + poi.getPrimitiveCategory());
}
}
break;
}
case LIST:
{
ListObjectInspector loi = (ListObjectInspector) oi;
ObjectInspector listElementObjectInspector = loi.getListElementObjectInspector();
List<?> olist = loi.getList(o);
if (olist == null) {
sb.append("null");
} else {
sb.append(SerDeUtils.LBRACKET);
for (int i = 0; i < olist.size(); i++) {
if (i > 0) {
sb.append(SerDeUtils.COMMA);
}
buildJSONString(sb, olist.get(i), listElementObjectInspector);
}
sb.append(SerDeUtils.RBRACKET);
}
break;
}
case MAP:
{
MapObjectInspector moi = (MapObjectInspector) oi;
ObjectInspector mapKeyObjectInspector = moi.getMapKeyObjectInspector();
ObjectInspector mapValueObjectInspector = moi.getMapValueObjectInspector();
Map<?, ?> omap = moi.getMap(o);
if (omap == null) {
sb.append("null");
} else {
sb.append(SerDeUtils.LBRACE);
boolean first = true;
for (Object entry : omap.entrySet()) {
if (first) {
first = false;
} else {
sb.append(SerDeUtils.COMMA);
}
Map.Entry<?, ?> e = (Map.Entry<?, ?>) entry;
StringBuilder keyBuilder = new StringBuilder();
buildJSONString(keyBuilder, e.getKey(), mapKeyObjectInspector);
String keyString = keyBuilder.toString().trim();
if ((!keyString.isEmpty()) && (keyString.charAt(0) != SerDeUtils.QUOTE)) {
appendWithQuotes(sb, keyString);
} else {
sb.append(keyString);
}
sb.append(SerDeUtils.COLON);
buildJSONString(sb, e.getValue(), mapValueObjectInspector);
}
sb.append(SerDeUtils.RBRACE);
}
break;
}
case STRUCT:
{
StructObjectInspector soi = (StructObjectInspector) oi;
List<? extends StructField> structFields = soi.getAllStructFieldRefs();
if (o == null) {
sb.append("null");
} else {
sb.append(SerDeUtils.LBRACE);
for (int i = 0; i < structFields.size(); i++) {
if (i > 0) {
sb.append(SerDeUtils.COMMA);
}
appendWithQuotes(sb, structFields.get(i).getFieldName());
sb.append(SerDeUtils.COLON);
buildJSONString(sb, soi.getStructFieldData(o, structFields.get(i)), structFields.get(i).getFieldObjectInspector());
}
sb.append(SerDeUtils.RBRACE);
}
break;
}
case UNION:
{
UnionObjectInspector uoi = (UnionObjectInspector) oi;
if (o == null) {
sb.append("null");
} else {
sb.append(SerDeUtils.LBRACE);
sb.append(uoi.getTag(o));
sb.append(SerDeUtils.COLON);
buildJSONString(sb, uoi.getField(o), uoi.getObjectInspectors().get(uoi.getTag(o)));
sb.append(SerDeUtils.RBRACE);
}
break;
}
default:
throw new RuntimeException("Unknown type in ObjectInspector!");
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector in project hive by apache.
the class LazyBinarySerDe method serialize.
/**
* A recursive function that serialize an object to a byte buffer based on its
* object inspector.
*
* @param byteStream
* the byte stream storing the serialization data
* @param obj
* the object to serialize
* @param objInspector
* the object inspector
* @param skipLengthPrefix a boolean indicating whether length prefix is
* needed for list/map/struct
* @param warnedOnceNullMapKey a boolean indicating whether a warning
* has been issued once already when encountering null map keys
*/
public static void serialize(RandomAccessOutput byteStream, Object obj, ObjectInspector objInspector, boolean skipLengthPrefix, BooleanRef warnedOnceNullMapKey) throws SerDeException {
// do nothing for null object
if (null == obj) {
return;
}
switch(objInspector.getCategory()) {
case PRIMITIVE:
{
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) objInspector;
switch(poi.getPrimitiveCategory()) {
case VOID:
{
return;
}
case BOOLEAN:
{
boolean v = ((BooleanObjectInspector) poi).get(obj);
byteStream.write((byte) (v ? 1 : 0));
return;
}
case BYTE:
{
ByteObjectInspector boi = (ByteObjectInspector) poi;
byte v = boi.get(obj);
byteStream.write(v);
return;
}
case SHORT:
{
ShortObjectInspector spoi = (ShortObjectInspector) poi;
short v = spoi.get(obj);
byteStream.write((byte) (v >> 8));
byteStream.write((byte) (v));
return;
}
case INT:
{
IntObjectInspector ioi = (IntObjectInspector) poi;
int v = ioi.get(obj);
LazyBinaryUtils.writeVInt(byteStream, v);
return;
}
case LONG:
{
LongObjectInspector loi = (LongObjectInspector) poi;
long v = loi.get(obj);
LazyBinaryUtils.writeVLong(byteStream, v);
return;
}
case FLOAT:
{
FloatObjectInspector foi = (FloatObjectInspector) poi;
int v = Float.floatToIntBits(foi.get(obj));
byteStream.write((byte) (v >> 24));
byteStream.write((byte) (v >> 16));
byteStream.write((byte) (v >> 8));
byteStream.write((byte) (v));
return;
}
case DOUBLE:
{
DoubleObjectInspector doi = (DoubleObjectInspector) poi;
LazyBinaryUtils.writeDouble(byteStream, doi.get(obj));
return;
}
case STRING:
{
StringObjectInspector soi = (StringObjectInspector) poi;
Text t = soi.getPrimitiveWritableObject(obj);
serializeText(byteStream, t, skipLengthPrefix);
return;
}
case CHAR:
{
HiveCharObjectInspector hcoi = (HiveCharObjectInspector) poi;
Text t = hcoi.getPrimitiveWritableObject(obj).getTextValue();
serializeText(byteStream, t, skipLengthPrefix);
return;
}
case VARCHAR:
{
HiveVarcharObjectInspector hcoi = (HiveVarcharObjectInspector) poi;
Text t = hcoi.getPrimitiveWritableObject(obj).getTextValue();
serializeText(byteStream, t, skipLengthPrefix);
return;
}
case BINARY:
{
BinaryObjectInspector baoi = (BinaryObjectInspector) poi;
BytesWritable bw = baoi.getPrimitiveWritableObject(obj);
int length = bw.getLength();
if (!skipLengthPrefix) {
LazyBinaryUtils.writeVInt(byteStream, length);
} else {
if (length == 0) {
throw new RuntimeException("LazyBinaryColumnarSerde cannot serialize a non-null zero " + "length binary field. Consider using either LazyBinarySerde or ColumnarSerde.");
}
}
byteStream.write(bw.getBytes(), 0, length);
return;
}
case DATE:
{
DateWritable d = ((DateObjectInspector) poi).getPrimitiveWritableObject(obj);
writeDateToByteStream(byteStream, d);
return;
}
case TIMESTAMP:
{
TimestampObjectInspector toi = (TimestampObjectInspector) poi;
TimestampWritable t = toi.getPrimitiveWritableObject(obj);
t.writeToByteStream(byteStream);
return;
}
case INTERVAL_YEAR_MONTH:
{
HiveIntervalYearMonthWritable intervalYearMonth = ((HiveIntervalYearMonthObjectInspector) poi).getPrimitiveWritableObject(obj);
intervalYearMonth.writeToByteStream(byteStream);
return;
}
case INTERVAL_DAY_TIME:
{
HiveIntervalDayTimeWritable intervalDayTime = ((HiveIntervalDayTimeObjectInspector) poi).getPrimitiveWritableObject(obj);
intervalDayTime.writeToByteStream(byteStream);
return;
}
case DECIMAL:
{
HiveDecimalObjectInspector bdoi = (HiveDecimalObjectInspector) poi;
HiveDecimalWritable t = bdoi.getPrimitiveWritableObject(obj);
if (t == null) {
return;
}
writeToByteStream(byteStream, t);
return;
}
default:
{
throw new RuntimeException("Unrecognized type: " + poi.getPrimitiveCategory());
}
}
}
case LIST:
{
ListObjectInspector loi = (ListObjectInspector) objInspector;
ObjectInspector eoi = loi.getListElementObjectInspector();
int byteSizeStart = 0;
int listStart = 0;
if (!skipLengthPrefix) {
// 1/ reserve spaces for the byte size of the list
// which is a integer and takes four bytes
byteSizeStart = byteStream.getLength();
byteStream.reserve(4);
listStart = byteStream.getLength();
}
// 2/ write the size of the list as a VInt
int size = loi.getListLength(obj);
LazyBinaryUtils.writeVInt(byteStream, size);
// 3/ write the null bytes
byte nullByte = 0;
for (int eid = 0; eid < size; eid++) {
// set the bit to 1 if an element is not null
if (null != loi.getListElement(obj, eid)) {
nullByte |= 1 << (eid % 8);
}
// if this is the last element
if (7 == eid % 8 || eid == size - 1) {
byteStream.write(nullByte);
nullByte = 0;
}
}
// 4/ write element by element from the list
for (int eid = 0; eid < size; eid++) {
serialize(byteStream, loi.getListElement(obj, eid), eoi, false, warnedOnceNullMapKey);
}
if (!skipLengthPrefix) {
// 5/ update the list byte size
int listEnd = byteStream.getLength();
int listSize = listEnd - listStart;
writeSizeAtOffset(byteStream, byteSizeStart, listSize);
}
return;
}
case MAP:
{
MapObjectInspector moi = (MapObjectInspector) objInspector;
ObjectInspector koi = moi.getMapKeyObjectInspector();
ObjectInspector voi = moi.getMapValueObjectInspector();
Map<?, ?> map = moi.getMap(obj);
int byteSizeStart = 0;
int mapStart = 0;
if (!skipLengthPrefix) {
// 1/ reserve spaces for the byte size of the map
// which is a integer and takes four bytes
byteSizeStart = byteStream.getLength();
byteStream.reserve(4);
mapStart = byteStream.getLength();
}
// 2/ write the size of the map which is a VInt
int size = map.size();
LazyBinaryUtils.writeVInt(byteStream, size);
// 3/ write the null bytes
int b = 0;
byte nullByte = 0;
for (Map.Entry<?, ?> entry : map.entrySet()) {
// set the bit to 1 if a key is not null
if (null != entry.getKey()) {
nullByte |= 1 << (b % 8);
} else if (warnedOnceNullMapKey != null) {
if (!warnedOnceNullMapKey.value) {
LOG.warn("Null map key encountered! Ignoring similar problems.");
}
warnedOnceNullMapKey.value = true;
}
b++;
// set the bit to 1 if a value is not null
if (null != entry.getValue()) {
nullByte |= 1 << (b % 8);
}
b++;
// or if this is the last key-value pair
if (0 == b % 8 || b == size * 2) {
byteStream.write(nullByte);
nullByte = 0;
}
}
// 4/ write key-value pairs one by one
for (Map.Entry<?, ?> entry : map.entrySet()) {
serialize(byteStream, entry.getKey(), koi, false, warnedOnceNullMapKey);
serialize(byteStream, entry.getValue(), voi, false, warnedOnceNullMapKey);
}
if (!skipLengthPrefix) {
// 5/ update the byte size of the map
int mapEnd = byteStream.getLength();
int mapSize = mapEnd - mapStart;
writeSizeAtOffset(byteStream, byteSizeStart, mapSize);
}
return;
}
case STRUCT:
case UNION:
{
int byteSizeStart = 0;
int typeStart = 0;
if (!skipLengthPrefix) {
// 1/ reserve spaces for the byte size of the struct
// which is a integer and takes four bytes
byteSizeStart = byteStream.getLength();
byteStream.reserve(4);
typeStart = byteStream.getLength();
}
if (ObjectInspector.Category.STRUCT.equals(objInspector.getCategory())) {
// 2/ serialize the struct
serializeStruct(byteStream, obj, (StructObjectInspector) objInspector, warnedOnceNullMapKey);
} else {
// 2/ serialize the union
serializeUnion(byteStream, obj, (UnionObjectInspector) objInspector, warnedOnceNullMapKey);
}
if (!skipLengthPrefix) {
// 3/ update the byte size of the struct
int typeEnd = byteStream.getLength();
int typeSize = typeEnd - typeStart;
writeSizeAtOffset(byteStream, byteSizeStart, typeSize);
}
return;
}
default:
{
throw new RuntimeException("Unrecognized type: " + objInspector.getCategory());
}
}
}
Aggregations