use of org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector in project hive by apache.
the class WindowingTableFunction method iterator.
@SuppressWarnings("rawtypes")
@Override
public Iterator<Object> iterator(PTFPartitionIterator<Object> pItr) throws HiveException {
WindowTableFunctionDef wTFnDef = (WindowTableFunctionDef) getTableDef();
ArrayList<Object> output = new ArrayList<Object>();
List<?>[] outputFromPivotFunctions = new List<?>[wTFnDef.getWindowFunctions().size()];
ArrayList<Integer> wFnsWithWindows = new ArrayList<Integer>();
PTFPartition iPart = pItr.getPartition();
int i = 0;
for (WindowFunctionDef wFn : wTFnDef.getWindowFunctions()) {
boolean processWindow = processWindow(wFn.getWindowFrame());
pItr.reset();
if (!processWindow && !wFn.isPivotResult()) {
Object out = evaluateFunctionOnPartition(wFn, iPart);
output.add(out);
} else if (wFn.isPivotResult()) {
GenericUDAFEvaluator streamingEval = wFn.getWFnEval().getWindowingEvaluator(wFn.getWindowFrame());
if (streamingEval != null && streamingEval instanceof ISupportStreamingModeForWindowing) {
ISupportStreamingModeForWindowing strEval = (ISupportStreamingModeForWindowing) streamingEval;
if (strEval.getRowsRemainingAfterTerminate() == 0) {
wFn.setWFnEval(streamingEval);
if (wFn.getOI() instanceof ListObjectInspector) {
ListObjectInspector listOI = (ListObjectInspector) wFn.getOI();
wFn.setOI(listOI.getListElementObjectInspector());
}
output.add(null);
wFnsWithWindows.add(i);
} else {
outputFromPivotFunctions[i] = (List) evaluateFunctionOnPartition(wFn, iPart);
output.add(null);
}
} else {
outputFromPivotFunctions[i] = (List) evaluateFunctionOnPartition(wFn, iPart);
output.add(null);
}
} else {
output.add(null);
wFnsWithWindows.add(i);
}
i++;
}
for (i = 0; i < iPart.getOutputOI().getAllStructFieldRefs().size(); i++) {
output.add(null);
}
if (wTFnDef.getRankLimit() != -1) {
rnkLimitDef = new RankLimit(wTFnDef.getRankLimit(), wTFnDef.getRankLimitFunction(), wTFnDef.getWindowFunctions());
}
return new WindowingIterator(iPart, output, outputFromPivotFunctions, ArrayUtils.toPrimitive(wFnsWithWindows.toArray(new Integer[wFnsWithWindows.size()])));
}
use of org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector in project hive by apache.
the class ObjectInspectorConverters method getConvertedOI.
/**
* Utility function to convert from one object inspector type to another.
* The output object inspector type should have all fields as settableOI type.
* The above condition can be violated only if equalsCheck is true and inputOI is
* equal to outputOI.
* @param inputOI : input object inspector
* @param outputOI : output object inspector
* @param oiSettableProperties : The object inspector to isSettable mapping used to cache
* intermediate results.
* @param equalsCheck : Do we need to check if the inputOI and outputOI are the same?
* true : If they are the same, we return the object inspector directly.
* false : Do not perform an equality check on inputOI and outputOI
* @return : The output object inspector containing all settable fields. The return value
* can contain non-settable fields only if inputOI equals outputOI and equalsCheck is
* true.
*/
public static ObjectInspector getConvertedOI(ObjectInspector inputOI, ObjectInspector outputOI, Map<ObjectInspector, Boolean> oiSettableProperties, boolean equalsCheck) {
// 2. If the outputOI has all fields settable, return it
if ((equalsCheck && inputOI.equals(outputOI)) || ObjectInspectorUtils.hasAllFieldsSettable(outputOI, oiSettableProperties) == true) {
return outputOI;
}
// T is settable recursively i.e all the nested fields are also settable.
switch(outputOI.getCategory()) {
case PRIMITIVE:
// Create a writable object inspector for primitive type and return it.
PrimitiveObjectInspector primOutputOI = (PrimitiveObjectInspector) outputOI;
return PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(primOutputOI.getTypeInfo());
case STRUCT:
StructObjectInspector structOutputOI = (StructObjectInspector) outputOI;
// create a standard settable struct object inspector.
List<? extends StructField> listFields = structOutputOI.getAllStructFieldRefs();
List<String> structFieldNames = new ArrayList<String>(listFields.size());
List<ObjectInspector> structFieldObjectInspectors = new ArrayList<ObjectInspector>(listFields.size());
for (StructField listField : listFields) {
structFieldNames.add(listField.getFieldName());
// We need to make sure that the underlying fields are settable as well.
// Hence, the recursive call for each field.
// Note that equalsCheck is false while invoking getConvertedOI() because
// we need to bypass the initial inputOI.equals(outputOI) check.
structFieldObjectInspectors.add(getConvertedOI(listField.getFieldObjectInspector(), listField.getFieldObjectInspector(), oiSettableProperties, false));
}
return ObjectInspectorFactory.getStandardStructObjectInspector(structFieldNames, structFieldObjectInspectors);
case LIST:
ListObjectInspector listOutputOI = (ListObjectInspector) outputOI;
// We need to make sure that the list element type is settable.
return ObjectInspectorFactory.getStandardListObjectInspector(getConvertedOI(listOutputOI.getListElementObjectInspector(), listOutputOI.getListElementObjectInspector(), oiSettableProperties, false));
case MAP:
MapObjectInspector mapOutputOI = (MapObjectInspector) outputOI;
// We need to make sure that the key type and the value types are settable.
return ObjectInspectorFactory.getStandardMapObjectInspector(getConvertedOI(mapOutputOI.getMapKeyObjectInspector(), mapOutputOI.getMapKeyObjectInspector(), oiSettableProperties, false), getConvertedOI(mapOutputOI.getMapValueObjectInspector(), mapOutputOI.getMapValueObjectInspector(), oiSettableProperties, false));
case UNION:
UnionObjectInspector unionOutputOI = (UnionObjectInspector) outputOI;
// create a standard settable union object inspector
List<ObjectInspector> unionListFields = unionOutputOI.getObjectInspectors();
List<ObjectInspector> unionFieldObjectInspectors = new ArrayList<ObjectInspector>(unionListFields.size());
for (ObjectInspector listField : unionListFields) {
// We need to make sure that all the field associated with the union are settable.
unionFieldObjectInspectors.add(getConvertedOI(listField, listField, oiSettableProperties, false));
}
return ObjectInspectorFactory.getStandardUnionObjectInspector(unionFieldObjectInspectors);
default:
// Unsupported in-memory structure.
throw new RuntimeException("Hive internal error: conversion of " + inputOI.getTypeName() + " to " + outputOI.getTypeName() + " not supported yet.");
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector in project hive by apache.
the class ObjectInspectorUtils method hasAllFieldsSettable.
/**
*
* @param oi - Input object inspector
* @param oiSettableProperties - Lookup map to cache the result.(If no caching, pass null)
* @return - true if : (1) oi is an instance of settable<DataType>OI.
* (2) All the embedded object inspectors are instances of settable<DataType>OI.
* If (1) or (2) is false, return false.
*/
public static boolean hasAllFieldsSettable(ObjectInspector oi, Map<ObjectInspector, Boolean> oiSettableProperties) {
// If the result is already present in the cache, return it.
if (!(oiSettableProperties == null) && oiSettableProperties.containsKey(oi)) {
return oiSettableProperties.get(oi).booleanValue();
}
// If the top-level object inspector is non-settable return false
if (!(isInstanceOfSettableOI(oi))) {
return setOISettablePropertiesMap(oi, oiSettableProperties, false);
}
Boolean returnValue = true;
switch(oi.getCategory()) {
case PRIMITIVE:
break;
case STRUCT:
StructObjectInspector structOutputOI = (StructObjectInspector) oi;
List<? extends StructField> listFields = structOutputOI.getAllStructFieldRefs();
for (StructField listField : listFields) {
if (!hasAllFieldsSettable(listField.getFieldObjectInspector(), oiSettableProperties)) {
returnValue = false;
break;
}
}
break;
case LIST:
ListObjectInspector listOutputOI = (ListObjectInspector) oi;
returnValue = hasAllFieldsSettable(listOutputOI.getListElementObjectInspector(), oiSettableProperties);
break;
case MAP:
MapObjectInspector mapOutputOI = (MapObjectInspector) oi;
returnValue = hasAllFieldsSettable(mapOutputOI.getMapKeyObjectInspector(), oiSettableProperties) && hasAllFieldsSettable(mapOutputOI.getMapValueObjectInspector(), oiSettableProperties);
break;
case UNION:
UnionObjectInspector unionOutputOI = (UnionObjectInspector) oi;
List<ObjectInspector> unionListFields = unionOutputOI.getObjectInspectors();
for (ObjectInspector listField : unionListFields) {
if (!hasAllFieldsSettable(listField, oiSettableProperties)) {
returnValue = false;
break;
}
}
break;
default:
throw new RuntimeException("Hive internal error inside hasAllFieldsSettable : " + oi.getTypeName() + " not supported yet.");
}
return setOISettablePropertiesMap(oi, oiSettableProperties, returnValue);
}
use of org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector in project hive by apache.
the class ObjectInspectorUtils method getConstantObjectInspector.
public static ConstantObjectInspector getConstantObjectInspector(ObjectInspector oi, Object value) {
if (oi instanceof ConstantObjectInspector) {
return (ConstantObjectInspector) oi;
}
ObjectInspector writableOI = getStandardObjectInspector(oi, ObjectInspectorCopyOption.WRITABLE);
Object writableValue = value == null ? value : ObjectInspectorConverters.getConverter(oi, writableOI).convert(value);
switch(writableOI.getCategory()) {
case PRIMITIVE:
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
return PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(poi.getTypeInfo(), writableValue);
case LIST:
ListObjectInspector loi = (ListObjectInspector) oi;
return ObjectInspectorFactory.getStandardConstantListObjectInspector(getStandardObjectInspector(loi.getListElementObjectInspector(), ObjectInspectorCopyOption.WRITABLE), (List<?>) writableValue);
case MAP:
MapObjectInspector moi = (MapObjectInspector) oi;
return ObjectInspectorFactory.getStandardConstantMapObjectInspector(getStandardObjectInspector(moi.getMapKeyObjectInspector(), ObjectInspectorCopyOption.WRITABLE), getStandardObjectInspector(moi.getMapValueObjectInspector(), ObjectInspectorCopyOption.WRITABLE), (Map<?, ?>) writableValue);
case STRUCT:
StructObjectInspector soi = (StructObjectInspector) oi;
List<? extends StructField> fields = soi.getAllStructFieldRefs();
List<String> fieldNames = new ArrayList<String>(fields.size());
List<ObjectInspector> fieldObjectInspectors = new ArrayList<ObjectInspector>(fields.size());
for (StructField f : fields) {
fieldNames.add(f.getFieldName());
fieldObjectInspectors.add(getStandardObjectInspector(f.getFieldObjectInspector(), ObjectInspectorCopyOption.WRITABLE));
}
if (value != null && (writableValue.getClass().isArray())) {
writableValue = java.util.Arrays.asList((Object[]) writableValue);
}
return ObjectInspectorFactory.getStandardConstantStructObjectInspector(fieldNames, fieldObjectInspectors, (List<?>) writableValue);
default:
throw new IllegalArgumentException(writableOI.getCategory() + " not yet supported for constant OI");
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector in project hive by apache.
the class ObjectInspectorUtils method compare.
/**
* Compare two objects with their respective ObjectInspectors.
* if nullValueOpt is MAXVALUE, treat null as maximum value.
* if nullValueOpt is MINVALUE, treat null as minimum value.
*/
public static int compare(Object o1, ObjectInspector oi1, Object o2, ObjectInspector oi2, MapEqualComparer mapEqualComparer, NullValueOption nullValueOpt) {
if (oi1.getCategory() != oi2.getCategory()) {
return oi1.getCategory().compareTo(oi2.getCategory());
}
int nullCmpRtn = -1;
switch(nullValueOpt) {
case MAXVALUE:
nullCmpRtn = 1;
break;
case MINVALUE:
nullCmpRtn = -1;
break;
}
if (o1 == null) {
return o2 == null ? 0 : nullCmpRtn;
} else if (o2 == null) {
return -nullCmpRtn;
}
switch(oi1.getCategory()) {
case PRIMITIVE:
{
PrimitiveObjectInspector poi1 = ((PrimitiveObjectInspector) oi1);
PrimitiveObjectInspector poi2 = ((PrimitiveObjectInspector) oi2);
if (poi1.getPrimitiveCategory() != poi2.getPrimitiveCategory()) {
return poi1.getPrimitiveCategory().compareTo(poi2.getPrimitiveCategory());
}
switch(poi1.getPrimitiveCategory()) {
case VOID:
return 0;
case BOOLEAN:
{
int v1 = ((BooleanObjectInspector) poi1).get(o1) ? 1 : 0;
int v2 = ((BooleanObjectInspector) poi2).get(o2) ? 1 : 0;
return v1 - v2;
}
case BYTE:
{
int v1 = ((ByteObjectInspector) poi1).get(o1);
int v2 = ((ByteObjectInspector) poi2).get(o2);
return v1 - v2;
}
case SHORT:
{
int v1 = ((ShortObjectInspector) poi1).get(o1);
int v2 = ((ShortObjectInspector) poi2).get(o2);
return v1 - v2;
}
case INT:
{
int v1 = ((IntObjectInspector) poi1).get(o1);
int v2 = ((IntObjectInspector) poi2).get(o2);
return v1 > v2 ? 1 : (v1 < v2 ? -1 : 0);
}
case LONG:
{
long v1 = ((LongObjectInspector) poi1).get(o1);
long v2 = ((LongObjectInspector) poi2).get(o2);
return v1 > v2 ? 1 : (v1 < v2 ? -1 : 0);
}
case FLOAT:
{
float v1 = ((FloatObjectInspector) poi1).get(o1);
float v2 = ((FloatObjectInspector) poi2).get(o2);
// The IEEE 754 floating point spec specifies that signed -0.0 and 0.0 should be treated as equal.
if (v1 == 0.0f && v2 == 0.0f) {
return 0;
} else {
// Float.compare() treats -0.0 and 0.0 as different
return Float.compare(v1, v2);
}
}
case DOUBLE:
{
double v1 = ((DoubleObjectInspector) poi1).get(o1);
double v2 = ((DoubleObjectInspector) poi2).get(o2);
// The IEEE 754 floating point spec specifies that signed -0.0 and 0.0 should be treated as equal.
if (v1 == 0.0d && v2 == 0.0d) {
return 0;
} else {
// Double.compare() treats -0.0 and 0.0 as different
return Double.compare(v1, v2);
}
}
case STRING:
{
if (poi1.preferWritable() || poi2.preferWritable()) {
Text t1 = (Text) poi1.getPrimitiveWritableObject(o1);
Text t2 = (Text) poi2.getPrimitiveWritableObject(o2);
return t1 == null ? (t2 == null ? 0 : -1) : (t2 == null ? 1 : t1.compareTo(t2));
} else {
String s1 = (String) poi1.getPrimitiveJavaObject(o1);
String s2 = (String) poi2.getPrimitiveJavaObject(o2);
return s1 == null ? (s2 == null ? 0 : -1) : (s2 == null ? 1 : s1.compareTo(s2));
}
}
case CHAR:
{
HiveCharWritable t1 = ((HiveCharObjectInspector) poi1).getPrimitiveWritableObject(o1);
HiveCharWritable t2 = ((HiveCharObjectInspector) poi2).getPrimitiveWritableObject(o2);
return t1.compareTo(t2);
}
case VARCHAR:
{
HiveVarcharWritable t1 = ((HiveVarcharObjectInspector) poi1).getPrimitiveWritableObject(o1);
HiveVarcharWritable t2 = ((HiveVarcharObjectInspector) poi2).getPrimitiveWritableObject(o2);
return t1.compareTo(t2);
}
case BINARY:
{
BytesWritable bw1 = ((BinaryObjectInspector) poi1).getPrimitiveWritableObject(o1);
BytesWritable bw2 = ((BinaryObjectInspector) poi2).getPrimitiveWritableObject(o2);
return bw1.compareTo(bw2);
}
case DATE:
{
DateWritable d1 = ((DateObjectInspector) poi1).getPrimitiveWritableObject(o1);
DateWritable d2 = ((DateObjectInspector) poi2).getPrimitiveWritableObject(o2);
return d1.compareTo(d2);
}
case TIMESTAMP:
{
TimestampWritable t1 = ((TimestampObjectInspector) poi1).getPrimitiveWritableObject(o1);
TimestampWritable t2 = ((TimestampObjectInspector) poi2).getPrimitiveWritableObject(o2);
return t1.compareTo(t2);
}
case INTERVAL_YEAR_MONTH:
{
HiveIntervalYearMonthWritable i1 = ((HiveIntervalYearMonthObjectInspector) poi1).getPrimitiveWritableObject(o1);
HiveIntervalYearMonthWritable i2 = ((HiveIntervalYearMonthObjectInspector) poi2).getPrimitiveWritableObject(o2);
return i1.compareTo(i2);
}
case INTERVAL_DAY_TIME:
{
HiveIntervalDayTimeWritable i1 = ((HiveIntervalDayTimeObjectInspector) poi1).getPrimitiveWritableObject(o1);
HiveIntervalDayTimeWritable i2 = ((HiveIntervalDayTimeObjectInspector) poi2).getPrimitiveWritableObject(o2);
return i1.compareTo(i2);
}
case DECIMAL:
{
HiveDecimalWritable t1 = ((HiveDecimalObjectInspector) poi1).getPrimitiveWritableObject(o1);
HiveDecimalWritable t2 = ((HiveDecimalObjectInspector) poi2).getPrimitiveWritableObject(o2);
return t1.compareTo(t2);
}
default:
{
throw new RuntimeException("Unknown type: " + poi1.getPrimitiveCategory());
}
}
}
case STRUCT:
{
StructObjectInspector soi1 = (StructObjectInspector) oi1;
StructObjectInspector soi2 = (StructObjectInspector) oi2;
List<? extends StructField> fields1 = soi1.getAllStructFieldRefs();
List<? extends StructField> fields2 = soi2.getAllStructFieldRefs();
int minimum = Math.min(fields1.size(), fields2.size());
for (int i = 0; i < minimum; i++) {
int r = compare(soi1.getStructFieldData(o1, fields1.get(i)), fields1.get(i).getFieldObjectInspector(), soi2.getStructFieldData(o2, fields2.get(i)), fields2.get(i).getFieldObjectInspector(), mapEqualComparer, nullValueOpt);
if (r != 0) {
return r;
}
}
return fields1.size() - fields2.size();
}
case LIST:
{
ListObjectInspector loi1 = (ListObjectInspector) oi1;
ListObjectInspector loi2 = (ListObjectInspector) oi2;
int minimum = Math.min(loi1.getListLength(o1), loi2.getListLength(o2));
for (int i = 0; i < minimum; i++) {
int r = compare(loi1.getListElement(o1, i), loi1.getListElementObjectInspector(), loi2.getListElement(o2, i), loi2.getListElementObjectInspector(), mapEqualComparer, nullValueOpt);
if (r != 0) {
return r;
}
}
return loi1.getListLength(o1) - loi2.getListLength(o2);
}
case MAP:
{
if (mapEqualComparer == null) {
throw new RuntimeException("Compare on map type not supported!");
} else {
return mapEqualComparer.compare(o1, (MapObjectInspector) oi1, o2, (MapObjectInspector) oi2);
}
}
case UNION:
{
UnionObjectInspector uoi1 = (UnionObjectInspector) oi1;
UnionObjectInspector uoi2 = (UnionObjectInspector) oi2;
byte tag1 = uoi1.getTag(o1);
byte tag2 = uoi2.getTag(o2);
if (tag1 != tag2) {
return tag1 - tag2;
}
return compare(uoi1.getField(o1), uoi1.getObjectInspectors().get(tag1), uoi2.getField(o2), uoi2.getObjectInspectors().get(tag2), mapEqualComparer, nullValueOpt);
}
default:
throw new RuntimeException("Compare on unknown type: " + oi1.getCategory());
}
}
Aggregations