use of org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector in project hive by apache.
the class DynamicSerDeTypei32 method serialize.
@Override
public void serialize(Object o, ObjectInspector oi, TProtocol oprot) throws TException, SerDeException, NoSuchFieldException, IllegalAccessException {
IntObjectInspector poi = (IntObjectInspector) oi;
oprot.writeI32(poi.get(o));
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector in project presto by prestodb.
the class HiveBucketing method getHiveBucket.
public static Optional<HiveBucket> getHiveBucket(List<Entry<ObjectInspector, Object>> columnBindings, int bucketCount) {
try {
@SuppressWarnings("resource") GenericUDFHash udf = new GenericUDFHash();
ObjectInspector[] objectInspectors = new ObjectInspector[columnBindings.size()];
DeferredObject[] deferredObjects = new DeferredObject[columnBindings.size()];
int i = 0;
for (Entry<ObjectInspector, Object> entry : columnBindings) {
objectInspectors[i] = getJavaObjectInspector(entry.getKey());
deferredObjects[i] = getJavaDeferredObject(entry.getValue(), entry.getKey());
i++;
}
ObjectInspector udfInspector = udf.initialize(objectInspectors);
IntObjectInspector inspector = (IntObjectInspector) udfInspector;
Object result = udf.evaluate(deferredObjects);
HiveKey hiveKey = new HiveKey();
hiveKey.setHashCode(inspector.get(result));
int bucketNumber = new DefaultHivePartitioner<>().getBucket(hiveKey, null, bucketCount);
return Optional.of(new HiveBucket(bucketNumber, bucketCount));
} catch (HiveException e) {
log.debug(e, "Error evaluating bucket number");
return Optional.empty();
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector in project hive by apache.
the class JsonSerDe method buildJSONString.
// TODO : code section copied over from SerDeUtils because of non-standard json production there
// should use quotes for all field names. We should fix this there, and then remove this copy.
// See http://jackson.codehaus.org/1.7.3/javadoc/org/codehaus/jackson/JsonParser.Feature.html#ALLOW_UNQUOTED_FIELD_NAMES
// for details - trying to enable Jackson to ignore that doesn't seem to work(compilation failure
// when attempting to use that feature, so having to change the production itself.
// Also, throws IOException when Binary is detected.
private static void buildJSONString(StringBuilder sb, Object o, ObjectInspector oi) throws IOException {
switch(oi.getCategory()) {
case PRIMITIVE:
{
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
if (o == null) {
sb.append("null");
} else {
switch(poi.getPrimitiveCategory()) {
case BOOLEAN:
{
boolean b = ((BooleanObjectInspector) poi).get(o);
sb.append(b ? "true" : "false");
break;
}
case BYTE:
{
sb.append(((ByteObjectInspector) poi).get(o));
break;
}
case SHORT:
{
sb.append(((ShortObjectInspector) poi).get(o));
break;
}
case INT:
{
sb.append(((IntObjectInspector) poi).get(o));
break;
}
case LONG:
{
sb.append(((LongObjectInspector) poi).get(o));
break;
}
case FLOAT:
{
sb.append(((FloatObjectInspector) poi).get(o));
break;
}
case DOUBLE:
{
sb.append(((DoubleObjectInspector) poi).get(o));
break;
}
case STRING:
{
String s = SerDeUtils.escapeString(((StringObjectInspector) poi).getPrimitiveJavaObject(o));
appendWithQuotes(sb, s);
break;
}
case BINARY:
byte[] b = ((BinaryObjectInspector) oi).getPrimitiveJavaObject(o);
Text txt = new Text();
txt.set(b, 0, b.length);
appendWithQuotes(sb, SerDeUtils.escapeString(txt.toString()));
break;
case DATE:
Date d = ((DateObjectInspector) poi).getPrimitiveJavaObject(o);
appendWithQuotes(sb, d.toString());
break;
case TIMESTAMP:
{
Timestamp t = ((TimestampObjectInspector) poi).getPrimitiveJavaObject(o);
appendWithQuotes(sb, t.toString());
break;
}
case DECIMAL:
sb.append(((HiveDecimalObjectInspector) poi).getPrimitiveJavaObject(o));
break;
case VARCHAR:
{
String s = SerDeUtils.escapeString(((HiveVarcharObjectInspector) poi).getPrimitiveJavaObject(o).toString());
appendWithQuotes(sb, s);
break;
}
case CHAR:
{
// this should use HiveChar.getPaddedValue() but it's protected; currently (v0.13)
// HiveChar.toString() returns getPaddedValue()
String s = SerDeUtils.escapeString(((HiveCharObjectInspector) poi).getPrimitiveJavaObject(o).toString());
appendWithQuotes(sb, s);
break;
}
default:
throw new RuntimeException("Unknown primitive type: " + poi.getPrimitiveCategory());
}
}
break;
}
case LIST:
{
ListObjectInspector loi = (ListObjectInspector) oi;
ObjectInspector listElementObjectInspector = loi.getListElementObjectInspector();
List<?> olist = loi.getList(o);
if (olist == null) {
sb.append("null");
} else {
sb.append(SerDeUtils.LBRACKET);
for (int i = 0; i < olist.size(); i++) {
if (i > 0) {
sb.append(SerDeUtils.COMMA);
}
buildJSONString(sb, olist.get(i), listElementObjectInspector);
}
sb.append(SerDeUtils.RBRACKET);
}
break;
}
case MAP:
{
MapObjectInspector moi = (MapObjectInspector) oi;
ObjectInspector mapKeyObjectInspector = moi.getMapKeyObjectInspector();
ObjectInspector mapValueObjectInspector = moi.getMapValueObjectInspector();
Map<?, ?> omap = moi.getMap(o);
if (omap == null) {
sb.append("null");
} else {
sb.append(SerDeUtils.LBRACE);
boolean first = true;
for (Object entry : omap.entrySet()) {
if (first) {
first = false;
} else {
sb.append(SerDeUtils.COMMA);
}
Map.Entry<?, ?> e = (Map.Entry<?, ?>) entry;
StringBuilder keyBuilder = new StringBuilder();
buildJSONString(keyBuilder, e.getKey(), mapKeyObjectInspector);
String keyString = keyBuilder.toString().trim();
if ((!keyString.isEmpty()) && (keyString.charAt(0) != SerDeUtils.QUOTE)) {
appendWithQuotes(sb, keyString);
} else {
sb.append(keyString);
}
sb.append(SerDeUtils.COLON);
buildJSONString(sb, e.getValue(), mapValueObjectInspector);
}
sb.append(SerDeUtils.RBRACE);
}
break;
}
case STRUCT:
{
StructObjectInspector soi = (StructObjectInspector) oi;
List<? extends StructField> structFields = soi.getAllStructFieldRefs();
if (o == null) {
sb.append("null");
} else {
sb.append(SerDeUtils.LBRACE);
for (int i = 0; i < structFields.size(); i++) {
if (i > 0) {
sb.append(SerDeUtils.COMMA);
}
appendWithQuotes(sb, structFields.get(i).getFieldName());
sb.append(SerDeUtils.COLON);
buildJSONString(sb, soi.getStructFieldData(o, structFields.get(i)), structFields.get(i).getFieldObjectInspector());
}
sb.append(SerDeUtils.RBRACE);
}
break;
}
case UNION:
{
UnionObjectInspector uoi = (UnionObjectInspector) oi;
if (o == null) {
sb.append("null");
} else {
sb.append(SerDeUtils.LBRACE);
sb.append(uoi.getTag(o));
sb.append(SerDeUtils.COLON);
buildJSONString(sb, uoi.getField(o), uoi.getObjectInspectors().get(uoi.getTag(o)));
sb.append(SerDeUtils.RBRACE);
}
break;
}
default:
throw new RuntimeException("Unknown type in ObjectInspector!");
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector in project presto by prestodb.
the class TestDataWritableWriter method writePrimitive.
/**
* It writes the primitive value to the Parquet RecordConsumer.
*
* @param value The object that contains the primitive value.
* @param inspector The object inspector used to get the correct value type.
*/
private void writePrimitive(final Object value, final PrimitiveObjectInspector inspector) {
if (value == null) {
return;
}
switch(inspector.getPrimitiveCategory()) {
case VOID:
return;
case DOUBLE:
recordConsumer.addDouble(((DoubleObjectInspector) inspector).get(value));
break;
case BOOLEAN:
recordConsumer.addBoolean(((BooleanObjectInspector) inspector).get(value));
break;
case FLOAT:
recordConsumer.addFloat(((FloatObjectInspector) inspector).get(value));
break;
case BYTE:
recordConsumer.addInteger(((ByteObjectInspector) inspector).get(value));
break;
case INT:
recordConsumer.addInteger(((IntObjectInspector) inspector).get(value));
break;
case LONG:
recordConsumer.addLong(((LongObjectInspector) inspector).get(value));
break;
case SHORT:
recordConsumer.addInteger(((ShortObjectInspector) inspector).get(value));
break;
case STRING:
String v = ((StringObjectInspector) inspector).getPrimitiveJavaObject(value);
recordConsumer.addBinary(Binary.fromString(v));
break;
case CHAR:
String vChar = ((HiveCharObjectInspector) inspector).getPrimitiveJavaObject(value).getStrippedValue();
recordConsumer.addBinary(Binary.fromString(vChar));
break;
case VARCHAR:
String vVarchar = ((HiveVarcharObjectInspector) inspector).getPrimitiveJavaObject(value).getValue();
recordConsumer.addBinary(Binary.fromString(vVarchar));
break;
case BINARY:
byte[] vBinary = ((BinaryObjectInspector) inspector).getPrimitiveJavaObject(value);
recordConsumer.addBinary(Binary.fromByteArray(vBinary));
break;
case TIMESTAMP:
Timestamp ts = ((TimestampObjectInspector) inspector).getPrimitiveJavaObject(value);
recordConsumer.addBinary(NanoTimeUtils.getNanoTime(ts, false).toBinary());
break;
case DECIMAL:
HiveDecimal vDecimal = ((HiveDecimal) inspector.getPrimitiveJavaObject(value));
DecimalTypeInfo decTypeInfo = (DecimalTypeInfo) inspector.getTypeInfo();
recordConsumer.addBinary(decimalToBinary(vDecimal, decTypeInfo));
break;
case DATE:
Date vDate = ((DateObjectInspector) inspector).getPrimitiveJavaObject(value);
recordConsumer.addInteger(DateWritable.dateToDays(vDate));
break;
default:
throw new IllegalArgumentException("Unsupported primitive data type: " + inspector.getPrimitiveCategory());
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector in project hive by apache.
the class KuduSerDe method serialize.
/**
* Serialize an object by navigating inside the Object with the ObjectInspector.
*/
@Override
public KuduWritable serialize(Object obj, ObjectInspector objectInspector) throws SerDeException {
Preconditions.checkArgument(objectInspector.getCategory() == Category.STRUCT);
StructObjectInspector soi = (StructObjectInspector) objectInspector;
List<Object> writableObj = soi.getStructFieldsDataAsList(obj);
List<? extends StructField> fields = soi.getAllStructFieldRefs();
PartialRow row = schema.newPartialRow();
for (int i = 0; i < schema.getColumnCount(); i++) {
StructField field = fields.get(i);
Object value = writableObj.get(i);
if (value == null) {
row.setNull(i);
} else {
Type type = schema.getColumnByIndex(i).getType();
ObjectInspector inspector = field.getFieldObjectInspector();
switch(type) {
case BOOL:
boolean boolVal = ((BooleanObjectInspector) inspector).get(value);
row.addBoolean(i, boolVal);
break;
case INT8:
byte byteVal = ((ByteObjectInspector) inspector).get(value);
row.addByte(i, byteVal);
break;
case INT16:
short shortVal = ((ShortObjectInspector) inspector).get(value);
row.addShort(i, shortVal);
break;
case INT32:
int intVal = ((IntObjectInspector) inspector).get(value);
row.addInt(i, intVal);
break;
case INT64:
long longVal = ((LongObjectInspector) inspector).get(value);
row.addLong(i, longVal);
break;
case UNIXTIME_MICROS:
// Calling toSqlTimestamp and using the addTimestamp API ensures we properly
// convert Hive localDateTime to UTC.
java.sql.Timestamp timestampVal = ((TimestampObjectInspector) inspector).getPrimitiveJavaObject(value).toSqlTimestamp();
row.addTimestamp(i, timestampVal);
break;
case DECIMAL:
HiveDecimal decimalVal = ((HiveDecimalObjectInspector) inspector).getPrimitiveJavaObject(value);
row.addDecimal(i, decimalVal.bigDecimalValue());
break;
case FLOAT:
float floatVal = ((FloatObjectInspector) inspector).get(value);
row.addFloat(i, floatVal);
break;
case DOUBLE:
double doubleVal = ((DoubleObjectInspector) inspector).get(value);
row.addDouble(i, doubleVal);
break;
case STRING:
String stringVal = ((StringObjectInspector) inspector).getPrimitiveJavaObject(value);
row.addString(i, stringVal);
break;
case BINARY:
byte[] bytesVal = ((BinaryObjectInspector) inspector).getPrimitiveJavaObject(value);
row.addBinary(i, bytesVal);
break;
default:
throw new SerDeException("Unsupported column type: " + type.name());
}
}
}
return new KuduWritable(row);
}
Aggregations