use of org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector in project hive by apache.
the class DruidSerDe method serialize.
@Override
public Writable serialize(Object o, ObjectInspector objectInspector) throws SerDeException {
if (objectInspector.getCategory() != ObjectInspector.Category.STRUCT) {
throw new SerDeException(getClass().toString() + " can only serialize struct types, but we got: " + objectInspector.getTypeName());
}
// Prepare the field ObjectInspectors
StructObjectInspector soi = (StructObjectInspector) objectInspector;
List<? extends StructField> fields = soi.getAllStructFieldRefs();
List<Object> values = soi.getStructFieldsDataAsList(o);
// We deserialize the result
final Map<String, Object> value = new HashMap<>();
for (int i = 0; i < columns.length; i++) {
if (values.get(i) == null) {
// null, we just add it
value.put(columns[i], null);
continue;
}
final Object res;
switch(types[i].getPrimitiveCategory()) {
case TIMESTAMP:
res = ((TimestampObjectInspector) fields.get(i).getFieldObjectInspector()).getPrimitiveJavaObject(values.get(i)).getTime();
break;
case TIMESTAMPLOCALTZ:
res = ((TimestampLocalTZObjectInspector) fields.get(i).getFieldObjectInspector()).getPrimitiveJavaObject(values.get(i)).getZonedDateTime().toInstant().toEpochMilli();
break;
case BYTE:
res = ((ByteObjectInspector) fields.get(i).getFieldObjectInspector()).get(values.get(i));
break;
case SHORT:
res = ((ShortObjectInspector) fields.get(i).getFieldObjectInspector()).get(values.get(i));
break;
case INT:
res = ((IntObjectInspector) fields.get(i).getFieldObjectInspector()).get(values.get(i));
break;
case LONG:
res = ((LongObjectInspector) fields.get(i).getFieldObjectInspector()).get(values.get(i));
break;
case FLOAT:
res = ((FloatObjectInspector) fields.get(i).getFieldObjectInspector()).get(values.get(i));
break;
case DOUBLE:
res = ((DoubleObjectInspector) fields.get(i).getFieldObjectInspector()).get(values.get(i));
break;
case DECIMAL:
res = ((HiveDecimalObjectInspector) fields.get(i).getFieldObjectInspector()).getPrimitiveJavaObject(values.get(i)).doubleValue();
break;
case CHAR:
res = ((HiveCharObjectInspector) fields.get(i).getFieldObjectInspector()).getPrimitiveJavaObject(values.get(i)).getValue();
break;
case VARCHAR:
res = ((HiveVarcharObjectInspector) fields.get(i).getFieldObjectInspector()).getPrimitiveJavaObject(values.get(i)).getValue();
break;
case STRING:
res = ((StringObjectInspector) fields.get(i).getFieldObjectInspector()).getPrimitiveJavaObject(values.get(i));
break;
case BOOLEAN:
res = ((BooleanObjectInspector) fields.get(i).getFieldObjectInspector()).get(values.get(i));
break;
default:
throw new SerDeException("Unknown type: " + types[i].getPrimitiveCategory());
}
value.put(columns[i], res);
}
// Extract the partitions keys segments granularity and partition key if any
// First Segment Granularity has to be here.
final int granularityFieldIndex = columns.length;
assert values.size() > granularityFieldIndex;
Preconditions.checkArgument(fields.get(granularityFieldIndex).getFieldName().equals(Constants.DRUID_TIMESTAMP_GRANULARITY_COL_NAME));
value.put(Constants.DRUID_TIMESTAMP_GRANULARITY_COL_NAME, ((TimestampObjectInspector) fields.get(granularityFieldIndex).getFieldObjectInspector()).getPrimitiveJavaObject(values.get(granularityFieldIndex)).getTime());
if (values.size() == columns.length + 2) {
// Then partition number if any.
final int partitionNumPos = granularityFieldIndex + 1;
Preconditions.checkArgument(fields.get(partitionNumPos).getFieldName().equals(Constants.DRUID_SHARD_KEY_COL_NAME), String.format("expecting to encounter %s but was %s", Constants.DRUID_SHARD_KEY_COL_NAME, fields.get(partitionNumPos).getFieldName()));
value.put(Constants.DRUID_SHARD_KEY_COL_NAME, ((LongObjectInspector) fields.get(partitionNumPos).getFieldObjectInspector()).get(values.get(partitionNumPos)));
}
return new DruidWritable(value);
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector in project hive by apache.
the class GenericUDFDBOutput method evaluate.
/**
* @return 0 on success -1 on failure
*/
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
url = ((StringObjectInspector) argumentOI[0]).getPrimitiveJavaObject(arguments[0].get());
user = ((StringObjectInspector) argumentOI[1]).getPrimitiveJavaObject(arguments[1].get());
pass = ((StringObjectInspector) argumentOI[2]).getPrimitiveJavaObject(arguments[2].get());
try {
connection = DriverManager.getConnection(url, user, pass);
} catch (SQLException ex) {
LOG.error("Driver loading or connection issue", ex);
result.set(2);
}
if (connection != null) {
try {
PreparedStatement ps = connection.prepareStatement(((StringObjectInspector) argumentOI[3]).getPrimitiveJavaObject(arguments[3].get()));
for (int i = 4; i < arguments.length; ++i) {
PrimitiveObjectInspector poi = ((PrimitiveObjectInspector) argumentOI[i]);
ps.setObject(i - 3, poi.getPrimitiveJavaObject(arguments[i].get()));
}
ps.execute();
ps.close();
result.set(0);
} catch (SQLException e) {
LOG.error("Underlying SQL exception", e);
result.set(1);
} finally {
try {
connection.close();
} catch (Exception ex) {
LOG.error("Underlying SQL exception during close", ex);
}
}
}
return result;
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector in project hive by apache.
the class ColumnStatisticsObjTranslator method unpackPrimitiveObject.
private static void unpackPrimitiveObject(ObjectInspector oi, Object o, String fieldName, ColumnStatisticsObj statsObj) throws UnsupportedDoubleException {
if (o == null) {
return;
}
// First infer the type of object
if (fieldName.equals("columntype")) {
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
String s = ((StringObjectInspector) poi).getPrimitiveJavaObject(o);
ColumnStatisticsData statsData = new ColumnStatisticsData();
if (s.equalsIgnoreCase("long")) {
LongColumnStatsDataInspector longStats = new LongColumnStatsDataInspector();
statsData.setLongStats(longStats);
statsObj.setStatsData(statsData);
} else if (s.equalsIgnoreCase("double")) {
DoubleColumnStatsDataInspector doubleStats = new DoubleColumnStatsDataInspector();
statsData.setDoubleStats(doubleStats);
statsObj.setStatsData(statsData);
} else if (s.equalsIgnoreCase("string")) {
StringColumnStatsDataInspector stringStats = new StringColumnStatsDataInspector();
statsData.setStringStats(stringStats);
statsObj.setStatsData(statsData);
} else if (s.equalsIgnoreCase("boolean")) {
BooleanColumnStatsData booleanStats = new BooleanColumnStatsData();
statsData.setBooleanStats(booleanStats);
statsObj.setStatsData(statsData);
} else if (s.equalsIgnoreCase("binary")) {
BinaryColumnStatsData binaryStats = new BinaryColumnStatsData();
statsData.setBinaryStats(binaryStats);
statsObj.setStatsData(statsData);
} else if (s.equalsIgnoreCase("decimal")) {
DecimalColumnStatsDataInspector decimalStats = new DecimalColumnStatsDataInspector();
statsData.setDecimalStats(decimalStats);
statsObj.setStatsData(statsData);
} else if (s.equalsIgnoreCase("date")) {
DateColumnStatsDataInspector dateStats = new DateColumnStatsDataInspector();
statsData.setDateStats(dateStats);
statsObj.setStatsData(statsData);
}
} else {
// invoke the right unpack method depending on data type of the column
if (statsObj.getStatsData().isSetBooleanStats()) {
unpackBooleanStats(oi, o, fieldName, statsObj);
} else if (statsObj.getStatsData().isSetLongStats()) {
unpackLongStats(oi, o, fieldName, statsObj);
} else if (statsObj.getStatsData().isSetDoubleStats()) {
unpackDoubleStats(oi, o, fieldName, statsObj);
} else if (statsObj.getStatsData().isSetStringStats()) {
unpackStringStats(oi, o, fieldName, statsObj);
} else if (statsObj.getStatsData().isSetBinaryStats()) {
unpackBinaryStats(oi, o, fieldName, statsObj);
} else if (statsObj.getStatsData().isSetDecimalStats()) {
unpackDecimalStats(oi, o, fieldName, statsObj);
} else if (statsObj.getStatsData().isSetDateStats()) {
unpackDateStats(oi, o, fieldName, statsObj);
}
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector in project hive by apache.
the class StatsUtils method getAvgColLenOf.
/**
* Get the raw data size of variable length data types
* @param conf
* - hive conf
* @param oi
* - object inspector
* @param colType
* - column type
* @return raw data size
*/
public static long getAvgColLenOf(HiveConf conf, ObjectInspector oi, String colType) {
long configVarLen = HiveConf.getIntVar(conf, HiveConf.ConfVars.HIVE_STATS_MAX_VARIABLE_LENGTH);
String colTypeLowCase = colType.toLowerCase();
if (colTypeLowCase.equals(serdeConstants.STRING_TYPE_NAME)) {
// constant string projection Ex: select "hello" from table
if (oi instanceof ConstantObjectInspector) {
ConstantObjectInspector coi = (ConstantObjectInspector) oi;
// if writable constant is null then return size 0
Object constantValue = coi.getWritableConstantValue();
return constantValue == null ? 0 : constantValue.toString().length();
} else if (oi instanceof StringObjectInspector) {
// return the variable length from config
return configVarLen;
}
} else if (colTypeLowCase.startsWith(serdeConstants.VARCHAR_TYPE_NAME)) {
// constant varchar projection
if (oi instanceof ConstantObjectInspector) {
ConstantObjectInspector coi = (ConstantObjectInspector) oi;
// if writable constant is null then return size 0
Object constantValue = coi.getWritableConstantValue();
return constantValue == null ? 0 : constantValue.toString().length();
} else if (oi instanceof HiveVarcharObjectInspector) {
VarcharTypeInfo type = (VarcharTypeInfo) ((HiveVarcharObjectInspector) oi).getTypeInfo();
return type.getLength();
}
} else if (colTypeLowCase.startsWith(serdeConstants.CHAR_TYPE_NAME)) {
// constant char projection
if (oi instanceof ConstantObjectInspector) {
ConstantObjectInspector coi = (ConstantObjectInspector) oi;
// if writable constant is null then return size 0
Object constantValue = coi.getWritableConstantValue();
return constantValue == null ? 0 : constantValue.toString().length();
} else if (oi instanceof HiveCharObjectInspector) {
CharTypeInfo type = (CharTypeInfo) ((HiveCharObjectInspector) oi).getTypeInfo();
return type.getLength();
}
} else if (colTypeLowCase.equals(serdeConstants.BINARY_TYPE_NAME)) {
// constant byte arrays
if (oi instanceof ConstantObjectInspector) {
ConstantObjectInspector coi = (ConstantObjectInspector) oi;
// if writable constant is null then return size 0
BytesWritable constantValue = (BytesWritable) coi.getWritableConstantValue();
return constantValue == null ? 0 : constantValue.getLength();
} else if (oi instanceof BinaryObjectInspector) {
// return the variable length from config
return configVarLen;
}
} else {
// complex types (map, list, struct, union)
return getSizeOfComplexTypes(conf, oi);
}
throw new IllegalArgumentException("Size requested for unknown type: " + colType + " OI: " + oi.getTypeName());
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector in project hive by apache.
the class BinarySortableSerDe method serialize.
static void serialize(ByteStream.Output buffer, Object o, ObjectInspector oi, boolean invert, byte nullMarker, byte notNullMarker) throws SerDeException {
// Is this field a null?
if (o == null) {
writeByte(buffer, nullMarker, invert);
return;
}
// This field is not a null.
writeByte(buffer, notNullMarker, invert);
switch(oi.getCategory()) {
case PRIMITIVE:
{
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
switch(poi.getPrimitiveCategory()) {
case VOID:
{
return;
}
case BOOLEAN:
{
boolean v = ((BooleanObjectInspector) poi).get(o);
writeByte(buffer, (byte) (v ? 2 : 1), invert);
return;
}
case BYTE:
{
ByteObjectInspector boi = (ByteObjectInspector) poi;
byte v = boi.get(o);
writeByte(buffer, (byte) (v ^ 0x80), invert);
return;
}
case SHORT:
{
ShortObjectInspector spoi = (ShortObjectInspector) poi;
short v = spoi.get(o);
serializeShort(buffer, v, invert);
return;
}
case INT:
{
IntObjectInspector ioi = (IntObjectInspector) poi;
int v = ioi.get(o);
serializeInt(buffer, v, invert);
return;
}
case LONG:
{
LongObjectInspector loi = (LongObjectInspector) poi;
long v = loi.get(o);
serializeLong(buffer, v, invert);
return;
}
case FLOAT:
{
FloatObjectInspector foi = (FloatObjectInspector) poi;
serializeFloat(buffer, foi.get(o), invert);
return;
}
case DOUBLE:
{
DoubleObjectInspector doi = (DoubleObjectInspector) poi;
serializeDouble(buffer, doi.get(o), invert);
return;
}
case STRING:
{
StringObjectInspector soi = (StringObjectInspector) poi;
Text t = soi.getPrimitiveWritableObject(o);
serializeBytes(buffer, t.getBytes(), t.getLength(), invert);
return;
}
case CHAR:
{
HiveCharObjectInspector hcoi = (HiveCharObjectInspector) poi;
HiveCharWritable hc = hcoi.getPrimitiveWritableObject(o);
// Trailing space should ignored for char comparisons.
// So write stripped values for this SerDe.
Text t = hc.getStrippedValue();
serializeBytes(buffer, t.getBytes(), t.getLength(), invert);
return;
}
case VARCHAR:
{
HiveVarcharObjectInspector hcoi = (HiveVarcharObjectInspector) poi;
HiveVarcharWritable hc = hcoi.getPrimitiveWritableObject(o);
// use varchar's text field directly
Text t = hc.getTextValue();
serializeBytes(buffer, t.getBytes(), t.getLength(), invert);
return;
}
case BINARY:
{
BinaryObjectInspector baoi = (BinaryObjectInspector) poi;
BytesWritable ba = baoi.getPrimitiveWritableObject(o);
byte[] toSer = new byte[ba.getLength()];
System.arraycopy(ba.getBytes(), 0, toSer, 0, ba.getLength());
serializeBytes(buffer, toSer, ba.getLength(), invert);
return;
}
case DATE:
{
DateObjectInspector doi = (DateObjectInspector) poi;
int v = doi.getPrimitiveWritableObject(o).getDays();
serializeInt(buffer, v, invert);
return;
}
case TIMESTAMP:
{
TimestampObjectInspector toi = (TimestampObjectInspector) poi;
TimestampWritable t = toi.getPrimitiveWritableObject(o);
serializeTimestampWritable(buffer, t, invert);
return;
}
case TIMESTAMPLOCALTZ:
{
TimestampLocalTZObjectInspector toi = (TimestampLocalTZObjectInspector) poi;
TimestampLocalTZWritable t = toi.getPrimitiveWritableObject(o);
serializeTimestampTZWritable(buffer, t, invert);
return;
}
case INTERVAL_YEAR_MONTH:
{
HiveIntervalYearMonthObjectInspector ioi = (HiveIntervalYearMonthObjectInspector) poi;
HiveIntervalYearMonth intervalYearMonth = ioi.getPrimitiveJavaObject(o);
serializeHiveIntervalYearMonth(buffer, intervalYearMonth, invert);
return;
}
case INTERVAL_DAY_TIME:
{
HiveIntervalDayTimeObjectInspector ioi = (HiveIntervalDayTimeObjectInspector) poi;
HiveIntervalDayTime intervalDayTime = ioi.getPrimitiveJavaObject(o);
serializeHiveIntervalDayTime(buffer, intervalDayTime, invert);
return;
}
case DECIMAL:
{
HiveDecimalObjectInspector boi = (HiveDecimalObjectInspector) poi;
HiveDecimal dec = boi.getPrimitiveJavaObject(o);
serializeHiveDecimal(buffer, dec, invert);
return;
}
default:
{
throw new RuntimeException("Unrecognized type: " + poi.getPrimitiveCategory());
}
}
}
case LIST:
{
ListObjectInspector loi = (ListObjectInspector) oi;
ObjectInspector eoi = loi.getListElementObjectInspector();
// \1 followed by each element
int size = loi.getListLength(o);
for (int eid = 0; eid < size; eid++) {
writeByte(buffer, (byte) 1, invert);
serialize(buffer, loi.getListElement(o, eid), eoi, invert, nullMarker, notNullMarker);
}
// and \0 to terminate
writeByte(buffer, (byte) 0, invert);
return;
}
case MAP:
{
MapObjectInspector moi = (MapObjectInspector) oi;
ObjectInspector koi = moi.getMapKeyObjectInspector();
ObjectInspector voi = moi.getMapValueObjectInspector();
// \1 followed by each key and then each value
Map<?, ?> map = moi.getMap(o);
for (Map.Entry<?, ?> entry : map.entrySet()) {
writeByte(buffer, (byte) 1, invert);
serialize(buffer, entry.getKey(), koi, invert, nullMarker, notNullMarker);
serialize(buffer, entry.getValue(), voi, invert, nullMarker, notNullMarker);
}
// and \0 to terminate
writeByte(buffer, (byte) 0, invert);
return;
}
case STRUCT:
{
StructObjectInspector soi = (StructObjectInspector) oi;
List<? extends StructField> fields = soi.getAllStructFieldRefs();
for (int i = 0; i < fields.size(); i++) {
serialize(buffer, soi.getStructFieldData(o, fields.get(i)), fields.get(i).getFieldObjectInspector(), invert, nullMarker, notNullMarker);
}
return;
}
case UNION:
{
UnionObjectInspector uoi = (UnionObjectInspector) oi;
byte tag = uoi.getTag(o);
writeByte(buffer, tag, invert);
serialize(buffer, uoi.getField(o), uoi.getObjectInspectors().get(tag), invert, nullMarker, notNullMarker);
return;
}
default:
{
throw new RuntimeException("Unrecognized type: " + oi.getCategory());
}
}
}
Aggregations