use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.intTypeInfo in project hive by apache.
the class TestVectorizer method testAggregateOnUDF.
@Test
public void testAggregateOnUDF() throws HiveException {
ExprNodeColumnDesc colExprA = new ExprNodeColumnDesc(Integer.class, "col1", "T", false);
ExprNodeColumnDesc colExprB = new ExprNodeColumnDesc(Integer.class, "col2", "T", false);
List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
children.add(colExprA);
ExprNodeGenericFuncDesc exprNodeDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.intTypeInfo, new GenericUDFAbs(), children);
ArrayList<ExprNodeDesc> params = new ArrayList<ExprNodeDesc>();
params.add(exprNodeDesc);
List<ObjectInspector> paramOIs = new ArrayList<ObjectInspector>();
paramOIs.add(exprNodeDesc.getWritableObjectInspector());
AggregationDesc aggDesc = new AggregationDesc("sum", FunctionRegistry.getGenericUDAFEvaluator("sum", paramOIs, false, false), params, false, GenericUDAFEvaluator.Mode.PARTIAL1);
ArrayList<String> outputColumnNames = new ArrayList<String>();
outputColumnNames.add("_col0");
GroupByDesc desc = new GroupByDesc();
desc.setVectorDesc(new VectorGroupByDesc());
desc.setOutputColumnNames(outputColumnNames);
ArrayList<AggregationDesc> aggDescList = new ArrayList<AggregationDesc>();
aggDescList.add(aggDesc);
desc.setAggregators(aggDescList);
ArrayList<ExprNodeDesc> grpByKeys = new ArrayList<ExprNodeDesc>();
grpByKeys.add(colExprB);
desc.setKeys(grpByKeys);
Operator<? extends OperatorDesc> gbyOp = OperatorFactory.get(new CompilationOpContext(), desc);
desc.setMode(GroupByDesc.Mode.HASH);
Vectorizer v = new Vectorizer();
v.testSetCurrentBaseWork(new MapWork());
Assert.assertTrue(v.validateMapWorkOperator(gbyOp, null, false));
VectorGroupByOperator vectorOp = (VectorGroupByOperator) v.vectorizeOperator(gbyOp, vContext, false, null);
Assert.assertEquals(VectorUDAFSumLong.class, vectorOp.getAggregators()[0].getClass());
VectorUDAFSumLong udaf = (VectorUDAFSumLong) vectorOp.getAggregators()[0];
Assert.assertEquals(FuncAbsLongToLong.class, udaf.getInputExpression().getClass());
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.intTypeInfo in project hive by apache.
the class SemanticAnalyzer method getPartitionColsFromBucketColsForUpdateDelete.
// We have to set up the bucketing columns differently for update and deletes,
// as it is always using the ROW__ID column.
private ArrayList<ExprNodeDesc> getPartitionColsFromBucketColsForUpdateDelete(Operator input, boolean convert) throws SemanticException {
//return genConvertCol(dest, qb, tab, table_desc, input, Arrays.asList(0), convert);
// In the case of update and delete the bucketing column is always the first column,
// and it isn't in the table info. So rather than asking the table for it,
// we'll construct it ourself and send it back. This is based on the work done in
// genConvertCol below.
ColumnInfo rowField = opParseCtx.get(input).getRowResolver().getColumnInfos().get(0);
TypeInfo rowFieldTypeInfo = rowField.getType();
ExprNodeDesc column = new ExprNodeColumnDesc(rowFieldTypeInfo, rowField.getInternalName(), rowField.getTabAlias(), true);
if (convert) {
column = ParseUtils.createConversionCast(column, TypeInfoFactory.intTypeInfo);
}
ArrayList<ExprNodeDesc> rlist = new ArrayList<ExprNodeDesc>(1);
rlist.add(column);
return rlist;
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.intTypeInfo in project hive by apache.
the class TestGenericUDFOPNegative method testInt.
@Test
public void testInt() throws HiveException {
GenericUDFOPNegative udf = new GenericUDFOPNegative();
IntWritable input = new IntWritable(747);
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableIntObjectInspector };
DeferredObject[] args = { new DeferredJavaObject(input) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.intTypeInfo, oi.getTypeInfo());
IntWritable res = (IntWritable) udf.evaluate(args);
Assert.assertEquals(-747, res.get());
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.intTypeInfo in project hive by apache.
the class TestGenericUDFInternalInterval method testInvalidString.
@Test(expected = UDFArgumentException.class)
public void testInvalidString() throws Exception {
try (GenericUDFInternalInterval udf = new GenericUDFInternalInterval()) {
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.intTypeInfo, new IntWritable(HiveParser.TOK_INTERVAL_DAY_LITERAL)), PrimitiveObjectInspectorFactory.writableStringObjectInspector };
DeferredObject[] args = { new DeferredJavaObject(new ByteWritable((byte) 4)), new DeferredJavaObject(new Text("invalid")) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.intervalDayTimeTypeInfo, oi.getTypeInfo());
// should emit an exception
udf.evaluate(args);
}
}
Aggregations