use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector in project hive by apache.
the class GenericUDFMacro method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
checkNotNull(colTypes, "colTypes");
checkNotNull(arguments, "arguments");
checkNotNull(bodyDesc, "bodyDesc");
if (colTypes.size() != arguments.length) {
throw new UDFArgumentLengthException("The macro " + macroName + " accepts exactly " + colTypes.size() + " arguments.");
}
try {
body = ExprNodeEvaluatorFactory.get(bodyDesc);
} catch (HiveException ex) {
throw new UDFArgumentException(ex);
}
converters = new ObjectInspectorConverters.Converter[arguments.length];
ArrayList<ObjectInspector> colObjectInspectors = new ArrayList<ObjectInspector>(colTypes.size());
for (int index = 0; index < arguments.length; ++index) {
ObjectInspector objectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(colTypes.get(index));
colObjectInspectors.add(objectInspector);
converters[index] = ObjectInspectorConverters.getConverter(arguments[index], objectInspector);
}
evaluatedArguments = new ArrayList<Object>(arguments.length);
ObjectInspector structOI = ObjectInspectorFactory.getStandardStructObjectInspector(colNames, colObjectInspectors);
try {
return body.initialize(structOI);
} catch (HiveException ex) {
throw new UDFArgumentException(ex);
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector in project hive by apache.
the class GenericUDTFGetSplits method initialize.
@Override
public StructObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
LOG.debug("initializing GenericUDFGetSplits");
if (SessionState.get() == null || SessionState.get().getConf() == null) {
throw new IllegalStateException("Cannot run get splits outside HS2");
}
LOG.debug("Initialized conf, jc and metastore connection");
if (arguments.length != 2) {
throw new UDFArgumentLengthException("The function GET_SPLITS accepts 2 arguments.");
} else if (!(arguments[0] instanceof StringObjectInspector)) {
LOG.error("Got " + arguments[0].getTypeName() + " instead of string.");
throw new UDFArgumentTypeException(0, "\"" + "string\" is expected at function GET_SPLITS, " + "but \"" + arguments[0].getTypeName() + "\" is found");
} else if (!(arguments[1] instanceof IntObjectInspector)) {
LOG.error("Got " + arguments[1].getTypeName() + " instead of int.");
throw new UDFArgumentTypeException(1, "\"" + "int\" is expected at function GET_SPLITS, " + "but \"" + arguments[1].getTypeName() + "\" is found");
}
stringOI = (StringObjectInspector) arguments[0];
intOI = (IntObjectInspector) arguments[1];
List<String> names = Arrays.asList("split");
List<ObjectInspector> fieldOIs = Arrays.<ObjectInspector>asList(PrimitiveObjectInspectorFactory.javaByteArrayObjectInspector);
StructObjectInspector outputOI = ObjectInspectorFactory.getStandardStructObjectInspector(names, fieldOIs);
LOG.debug("done initializing GenericUDFGetSplits");
return outputOI;
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector in project hive by apache.
the class GenericUDTFStack method initialize.
@Override
public StructObjectInspector initialize(ObjectInspector[] args) throws UDFArgumentException {
if (args.length < 2) {
throw new UDFArgumentException("STACK() expects at least two arguments.");
}
if (!(args[0] instanceof ConstantObjectInspector)) {
throw new UDFArgumentException("The first argument to STACK() must be a constant integer (got " + args[0].getTypeName() + " instead).");
}
numRows = (IntWritable) ((ConstantObjectInspector) args[0]).getWritableConstantValue();
if (numRows == null || numRows.get() < 1) {
throw new UDFArgumentException("STACK() expects its first argument to be >= 1.");
}
// Divide and round up.
numCols = (args.length - 1 + numRows.get() - 1) / numRows.get();
for (int jj = 0; jj < numCols; ++jj) {
returnOIResolvers.add(new ReturnObjectInspectorResolver());
for (int ii = 0; ii < numRows.get(); ++ii) {
int index = ii * numCols + jj + 1;
if (index < args.length && !returnOIResolvers.get(jj).update(args[index])) {
throw new UDFArgumentException("Argument " + (jj + 1) + "'s type (" + args[jj + 1].getTypeName() + ") should be equal to argument " + index + "'s type (" + args[index].getTypeName() + ")");
}
}
}
forwardObj = new Object[numCols];
for (int ii = 0; ii < args.length; ++ii) {
argOIs.add(args[ii]);
}
ArrayList<String> fieldNames = new ArrayList<String>();
ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>();
for (int ii = 0; ii < numCols; ++ii) {
fieldNames.add("col" + ii);
fieldOIs.add(returnOIResolvers.get(ii).get());
}
return ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldOIs);
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector in project hive by apache.
the class MatchPath method createSelectListOI.
protected static StructObjectInspector createSelectListOI(MatchPath evaluator, PTFInputDef inpDef) {
StructObjectInspector inOI = inpDef.getOutputShape().getOI();
ArrayList<String> inputColumnNames = new ArrayList<String>();
ArrayList<String> selectListNames = new ArrayList<String>();
ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>();
for (StructField f : inOI.getAllStructFieldRefs()) {
String inputColName = evaluator.inputColumnNamesMap.get(f.getFieldName());
if (inputColName != null) {
inputColumnNames.add(inputColName);
selectListNames.add(f.getFieldName());
fieldOIs.add(f.getFieldObjectInspector());
}
}
StandardListObjectInspector pathAttrOI = ObjectInspectorFactory.getStandardListObjectInspector(ObjectInspectorFactory.getStandardStructObjectInspector(inputColumnNames, fieldOIs));
ArrayList<ObjectInspector> selectFieldOIs = new ArrayList<ObjectInspector>();
selectFieldOIs.addAll(fieldOIs);
selectFieldOIs.add(pathAttrOI);
selectListNames.add(MatchPath.PATHATTR_NAME);
return ObjectInspectorFactory.getStandardStructObjectInspector(selectListNames, selectFieldOIs);
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector in project hive by apache.
the class TestGenericUDFSortArray method testSortStruct.
@Test
public void testSortStruct() throws HiveException {
ObjectInspector[] inputOIs = { ObjectInspectorFactory.getStandardListObjectInspector(ObjectInspectorFactory.getStandardStructObjectInspector(asList("f1", "f2", "f3", "f4"), asList(PrimitiveObjectInspectorFactory.writableStringObjectInspector, PrimitiveObjectInspectorFactory.writableDoubleObjectInspector, PrimitiveObjectInspectorFactory.writableDateObjectInspector, ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableIntObjectInspector)))) };
udf.initialize(inputOIs);
Object i1 = asList(new Text("a"), new DoubleWritable(3.1415), new DateWritable(new Date(2015, 5, 26)), asList(new IntWritable(1), new IntWritable(3), new IntWritable(2), new IntWritable(4)));
Object i2 = asList(new Text("b"), new DoubleWritable(3.14), new DateWritable(new Date(2015, 5, 26)), asList(new IntWritable(1), new IntWritable(3), new IntWritable(2), new IntWritable(4)));
Object i3 = asList(new Text("a"), new DoubleWritable(3.1415), new DateWritable(new Date(2015, 5, 25)), asList(new IntWritable(1), new IntWritable(3), new IntWritable(2), new IntWritable(5)));
Object i4 = asList(new Text("a"), new DoubleWritable(3.1415), new DateWritable(new Date(2015, 5, 25)), asList(new IntWritable(1), new IntWritable(3), new IntWritable(2), new IntWritable(4)));
runAndVerify(asList(i1, i2, i3, i4), asList(i4, i3, i1, i2));
}
Aggregations