use of org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector in project hive by apache.
the class HBaseRowSerializer method serialize.
private boolean serialize(Object obj, ObjectInspector objInspector, int level, ByteStream.Output ss) throws IOException {
switch(objInspector.getCategory()) {
case PRIMITIVE:
LazyUtils.writePrimitiveUTF8(ss, obj, (PrimitiveObjectInspector) objInspector, escaped, escapeChar, needsEscape);
return true;
case LIST:
char separator = (char) separators[level];
ListObjectInspector loi = (ListObjectInspector) objInspector;
List<?> list = loi.getList(obj);
ObjectInspector eoi = loi.getListElementObjectInspector();
if (list == null) {
return false;
} else {
for (int i = 0; i < list.size(); i++) {
if (i > 0) {
ss.write(separator);
}
serialize(list.get(i), eoi, level + 1, ss);
}
}
return true;
case MAP:
char sep = (char) separators[level];
char keyValueSeparator = (char) separators[level + 1];
MapObjectInspector moi = (MapObjectInspector) objInspector;
ObjectInspector koi = moi.getMapKeyObjectInspector();
ObjectInspector voi = moi.getMapValueObjectInspector();
Map<?, ?> map = moi.getMap(obj);
if (map == null) {
return false;
} else {
boolean first = true;
for (Map.Entry<?, ?> entry : map.entrySet()) {
if (first) {
first = false;
} else {
ss.write(sep);
}
serialize(entry.getKey(), koi, level + 2, ss);
if (entry.getValue() != null) {
ss.write(keyValueSeparator);
serialize(entry.getValue(), voi, level + 2, ss);
}
}
}
return true;
case STRUCT:
sep = (char) separators[level];
StructObjectInspector soi = (StructObjectInspector) objInspector;
List<? extends StructField> fields = soi.getAllStructFieldRefs();
list = soi.getStructFieldsDataAsList(obj);
if (list == null) {
return false;
} else {
for (int i = 0; i < list.size(); i++) {
if (i > 0) {
ss.write(sep);
}
serialize(list.get(i), fields.get(i).getFieldObjectInspector(), level + 1, ss);
}
}
return true;
case UNION:
{
// union type currently not totally supported. See HIVE-2390
return false;
}
default:
throw new RuntimeException("Unknown category type: " + objInspector.getCategory());
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector in project hive by apache.
the class HiveHBaseTableInputFormat method getTimestampVal.
private long getTimestampVal(IndexSearchCondition sc) throws IOException {
long timestamp;
try {
ExprNodeConstantEvaluator eval = new ExprNodeConstantEvaluator(sc.getConstantDesc());
ObjectInspector inspector = eval.initialize(null);
Object value = eval.evaluate(null);
if (inspector instanceof LongObjectInspector) {
timestamp = ((LongObjectInspector) inspector).get(value);
} else {
PrimitiveObjectInspector primitive = (PrimitiveObjectInspector) inspector;
timestamp = PrimitiveObjectInspectorUtils.getTimestamp(value, primitive).getTime();
}
} catch (HiveException e) {
throw new IOException(e);
}
return timestamp;
}
use of org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector in project hive by apache.
the class HiveHBaseTableInputFormat method getConstantVal.
private byte[] getConstantVal(Object writable, PrimitiveObjectInspector poi, boolean isKeyBinary) throws IOException {
if (!isKeyBinary) {
// Key is stored in text format. Get bytes representation of constant also of
// text format.
byte[] startRow;
ByteStream.Output serializeStream = new ByteStream.Output();
LazyUtils.writePrimitiveUTF8(serializeStream, writable, poi, false, (byte) 0, null);
startRow = new byte[serializeStream.getLength()];
System.arraycopy(serializeStream.getData(), 0, startRow, 0, serializeStream.getLength());
return startRow;
}
PrimitiveCategory pc = poi.getPrimitiveCategory();
switch(poi.getPrimitiveCategory()) {
case INT:
return Bytes.toBytes(((IntWritable) writable).get());
case BOOLEAN:
return Bytes.toBytes(((BooleanWritable) writable).get());
case LONG:
return Bytes.toBytes(((LongWritable) writable).get());
case FLOAT:
return Bytes.toBytes(((FloatWritable) writable).get());
case DOUBLE:
return Bytes.toBytes(((DoubleWritable) writable).get());
case SHORT:
return Bytes.toBytes(((ShortWritable) writable).get());
case STRING:
return Bytes.toBytes(((Text) writable).toString());
case BYTE:
return Bytes.toBytes(((ByteWritable) writable).get());
default:
throw new IOException("Type not supported " + pc);
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector in project hive by apache.
the class GenericUDAFSumList method getEvaluator.
@Override
public GenericUDAFEvaluator getEvaluator(GenericUDAFParameterInfo info) throws SemanticException {
ObjectInspector[] inspectors = info.getParameterObjectInspectors();
if (inspectors.length != 1) {
throw new UDFArgumentTypeException(inspectors.length - 1, "Exactly one argument is expected.");
}
if (inspectors[0].getCategory() != ObjectInspector.Category.LIST) {
throw new UDFArgumentTypeException(0, "Argument should be a list type");
}
ListObjectInspector listOI = (ListObjectInspector) inspectors[0];
ObjectInspector elementOI = listOI.getListElementObjectInspector();
if (elementOI.getCategory() != ObjectInspector.Category.PRIMITIVE) {
throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but " + elementOI.getTypeName() + " is passed.");
}
PrimitiveObjectInspector.PrimitiveCategory pcat = ((PrimitiveObjectInspector) elementOI).getPrimitiveCategory();
return new GenericUDAFSumLong();
}
use of org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector in project hive by apache.
the class FilterOperator method process.
@Override
public void process(Object row, int tag) throws HiveException {
ObjectInspector rowInspector = inputObjInspectors[tag];
if (conditionInspector == null) {
conditionInspector = (PrimitiveObjectInspector) conditionEvaluator.initialize(rowInspector);
}
// set the comparison in the IOContext and the type of the UDF
if (conf.isSortedFilter() && ioContext.useSorted()) {
if (!(conditionEvaluator instanceof ExprNodeGenericFuncEvaluator)) {
LOG.error("Attempted to use the fact data is sorted when the conditionEvaluator is not " + "of type ExprNodeGenericFuncEvaluator");
ioContext.setUseSorted(false);
return;
} else {
ioContext.setComparison(((ExprNodeGenericFuncEvaluator) conditionEvaluator).compare(row));
}
if (ioContext.getGenericUDFClassName() == null) {
ioContext.setGenericUDFClassName(((ExprNodeGenericFuncEvaluator) conditionEvaluator).genericUDF.getClass().getName());
}
// If we are currently searching the data for a place to begin, do not return data yet
if (ioContext.isBinarySearching()) {
consecutiveSearches++;
// order to avoid timeout
if (((consecutiveSearches % heartbeatInterval) == 0) && (reporter != null)) {
reporter.progress();
}
return;
}
}
Object condition = conditionEvaluator.evaluate(row);
// point for a linear scan has been identified, at which point this value is unset.
if (ioContext.isBinarySearching()) {
return;
}
Boolean ret = (Boolean) conditionInspector.getPrimitiveJavaObject(condition);
if (Boolean.TRUE.equals(ret)) {
forward(row, rowInspector);
}
}
Aggregations