use of org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector in project hive by apache.
the class DynamicSerDeTypeSet method serialize.
@Override
public void serialize(Object o, ObjectInspector oi, TProtocol oprot) throws TException, SerDeException, NoSuchFieldException, IllegalAccessException {
ListObjectInspector loi = (ListObjectInspector) oi;
Set<Object> set = (Set<Object>) o;
DynamicSerDeTypeBase mt = getElementType();
tset = new TSet(mt.getType(), set.size());
oprot.writeSetBegin(tset);
for (Object element : set) {
mt.serialize(element, loi.getListElementObjectInspector(), oprot);
}
// in theory, the below call isn't needed in non thrift_mode, but let's not
// get too crazy
oprot.writeSetEnd();
}
use of org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector in project hive by apache.
the class WindowingTableFunction method iterator.
@SuppressWarnings("rawtypes")
@Override
public Iterator<Object> iterator(PTFPartitionIterator<Object> pItr) throws HiveException {
WindowTableFunctionDef wTFnDef = (WindowTableFunctionDef) getTableDef();
ArrayList<Object> output = new ArrayList<Object>();
List<?>[] outputFromPivotFunctions = new List<?>[wTFnDef.getWindowFunctions().size()];
ArrayList<Integer> wFnsWithWindows = new ArrayList<Integer>();
PTFPartition iPart = pItr.getPartition();
int i = 0;
for (WindowFunctionDef wFn : wTFnDef.getWindowFunctions()) {
boolean processWindow = processWindow(wFn.getWindowFrame());
pItr.reset();
if (!processWindow && !wFn.isPivotResult()) {
Object out = evaluateFunctionOnPartition(wFn, iPart);
output.add(out);
} else if (wFn.isPivotResult()) {
GenericUDAFEvaluator streamingEval = wFn.getWFnEval().getWindowingEvaluator(wFn.getWindowFrame());
if (streamingEval != null && streamingEval instanceof ISupportStreamingModeForWindowing) {
ISupportStreamingModeForWindowing strEval = (ISupportStreamingModeForWindowing) streamingEval;
if (strEval.getRowsRemainingAfterTerminate() == 0) {
wFn.setWFnEval(streamingEval);
if (wFn.getOI() instanceof ListObjectInspector) {
ListObjectInspector listOI = (ListObjectInspector) wFn.getOI();
wFn.setOI(listOI.getListElementObjectInspector());
}
output.add(null);
wFnsWithWindows.add(i);
} else {
outputFromPivotFunctions[i] = (List) evaluateFunctionOnPartition(wFn, iPart);
output.add(null);
}
} else {
outputFromPivotFunctions[i] = (List) evaluateFunctionOnPartition(wFn, iPart);
output.add(null);
}
} else {
output.add(null);
wFnsWithWindows.add(i);
}
i++;
}
for (i = 0; i < iPart.getOutputOI().getAllStructFieldRefs().size(); i++) {
output.add(null);
}
if (wTFnDef.getRankLimit() != -1) {
rnkLimitDef = new RankLimit(wTFnDef.getRankLimit(), wTFnDef.getRankLimitFunction(), wTFnDef.getWindowFunctions());
}
return new WindowingIterator(iPart, output, outputFromPivotFunctions, ArrayUtils.toPrimitive(wFnsWithWindows.toArray(new Integer[wFnsWithWindows.size()])));
}
use of org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector in project druid by druid-io.
the class OrcHadoopInputRowParser method parse.
@Override
public InputRow parse(OrcStruct input) {
Map<String, Object> map = Maps.newHashMap();
List<? extends StructField> fields = oip.getAllStructFieldRefs();
for (StructField field : fields) {
ObjectInspector objectInspector = field.getFieldObjectInspector();
switch(objectInspector.getCategory()) {
case PRIMITIVE:
PrimitiveObjectInspector primitiveObjectInspector = (PrimitiveObjectInspector) objectInspector;
map.put(field.getFieldName(), primitiveObjectInspector.getPrimitiveJavaObject(oip.getStructFieldData(input, field)));
break;
case // array case - only 1-depth array supported yet
LIST:
ListObjectInspector listObjectInspector = (ListObjectInspector) objectInspector;
map.put(field.getFieldName(), getListObject(listObjectInspector, oip.getStructFieldData(input, field)));
break;
default:
break;
}
}
TimestampSpec timestampSpec = parseSpec.getTimestampSpec();
DateTime dateTime = timestampSpec.extractTimestamp(map);
return new MapBasedInputRow(dateTime, dimensions, map);
}
use of org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector in project hive by apache.
the class GenericUDAFSumList method getEvaluator.
@Override
public GenericUDAFEvaluator getEvaluator(GenericUDAFParameterInfo info) throws SemanticException {
ObjectInspector[] inspectors = info.getParameterObjectInspectors();
if (inspectors.length != 1) {
throw new UDFArgumentTypeException(inspectors.length - 1, "Exactly one argument is expected.");
}
if (inspectors[0].getCategory() != ObjectInspector.Category.LIST) {
throw new UDFArgumentTypeException(0, "Argument should be a list type");
}
ListObjectInspector listOI = (ListObjectInspector) inspectors[0];
ObjectInspector elementOI = listOI.getListElementObjectInspector();
if (elementOI.getCategory() != ObjectInspector.Category.PRIMITIVE) {
throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but " + elementOI.getTypeName() + " is passed.");
}
PrimitiveObjectInspector.PrimitiveCategory pcat = ((PrimitiveObjectInspector) elementOI).getPrimitiveCategory();
return new GenericUDAFSumLong();
}
use of org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector in project asterixdb by apache.
the class HiveRecordParser method parseOrderedList.
private void parseOrderedList(AOrderedListType aOrderedListType, Object obj, ListObjectInspector foi) throws HyracksDataException {
OrderedListBuilder orderedListBuilder = getOrderedListBuilder();
IAType itemType = null;
if (aOrderedListType != null)
itemType = aOrderedListType.getItemType();
orderedListBuilder.reset(aOrderedListType);
int n = foi.getListLength(obj);
for (int i = 0; i < n; i++) {
Object element = foi.getListElement(obj, i);
ObjectInspector eoi = foi.getListElementObjectInspector();
if (element == null) {
throw new RuntimeDataException(ErrorCode.PARSER_HIVE_NULL_VALUE_IN_LIST);
}
parseItem(itemType, element, eoi, listItemBuffer.getDataOutput(), true);
orderedListBuilder.addItem(listItemBuffer);
}
orderedListBuilder.write(fieldValueBuffer.getDataOutput(), true);
}
Aggregations