use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.longTypeInfo in project hive by apache.
the class TestVectorUDFAdaptor method testLongUDF.
@Test
public void testLongUDF() {
// create a syntax tree for a simple function call "longudf(col0)"
ExprNodeGenericFuncDesc funcDesc;
TypeInfo typeInfo = TypeInfoFactory.longTypeInfo;
GenericUDFBridge genericUDFBridge = new GenericUDFBridge("longudf", false, LongUDF.class.getName());
List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
ExprNodeColumnDesc colDesc = new ExprNodeColumnDesc(typeInfo, "col0", "tablename", false);
children.add(colDesc);
VectorUDFArgDesc[] argDescs = new VectorUDFArgDesc[1];
argDescs[0] = new VectorUDFArgDesc();
argDescs[0].setVariable(0);
funcDesc = new ExprNodeGenericFuncDesc(typeInfo, genericUDFBridge, genericUDFBridge.getUdfName(), children);
// create the adaptor for this function call to work in vector mode
VectorUDFAdaptor vudf = null;
try {
vudf = new VectorUDFAdaptor(funcDesc, 1, "Long", argDescs);
} catch (HiveException e) {
// We should never get here.
assertTrue(false);
}
VectorizedRowBatch b = getBatchLongInLongOut();
vudf.evaluate(b);
// verify output
LongColumnVector out = (LongColumnVector) b.cols[1];
assertEquals(1000, out.vector[0]);
assertEquals(1001, out.vector[1]);
assertEquals(1002, out.vector[2]);
assertTrue(out.noNulls);
assertFalse(out.isRepeating);
// with nulls
b = getBatchLongInLongOut();
out = (LongColumnVector) b.cols[1];
b.cols[0].noNulls = false;
vudf.evaluate(b);
assertFalse(out.noNulls);
assertEquals(1000, out.vector[0]);
assertEquals(1001, out.vector[1]);
assertTrue(out.isNull[2]);
assertFalse(out.isRepeating);
// with repeating
b = getBatchLongInLongOut();
out = (LongColumnVector) b.cols[1];
b.cols[0].isRepeating = true;
vudf.evaluate(b);
// The implementation may or may not set output it isRepeting.
// That is implementation-defined.
assertTrue(b.cols[1].isRepeating && out.vector[0] == 1000 || !b.cols[1].isRepeating && out.vector[2] == 1000);
assertEquals(3, b.size);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.longTypeInfo in project hive by apache.
the class TestStreamingMin method minLong.
public void minLong(Iterator<Long> inVals, int inSz, int numPreceding, int numFollowing, Iterator<Long> outVals) throws HiveException {
GenericUDAFMin fnR = new GenericUDAFMin();
TypeInfo[] inputTypes = { TypeInfoFactory.longTypeInfo };
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableLongObjectInspector };
LongWritable[] in = new LongWritable[1];
in[0] = new LongWritable();
TestStreamingSum._agg(fnR, inputTypes, inVals, TypeHandler.LongHandler, in, inputOIs, inSz, numPreceding, numFollowing, outVals);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.longTypeInfo in project hive by apache.
the class TestStreamingSum method sumLong.
public void sumLong(Iterator<Long> inVals, int inSz, int numPreceding, int numFollowing, Iterator<Long> outVals) throws HiveException {
GenericUDAFSum fnR = new GenericUDAFSum();
TypeInfo[] inputTypes = { TypeInfoFactory.longTypeInfo };
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableLongObjectInspector };
LongWritable[] in = new LongWritable[1];
in[0] = new LongWritable();
_agg(fnR, inputTypes, inVals, TypeHandler.LongHandler, in, inputOIs, inSz, numPreceding, numFollowing, outVals);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.longTypeInfo in project hive by apache.
the class TestGenericUDFOPNegative method testLong.
@Test
public void testLong() throws HiveException {
GenericUDFOPNegative udf = new GenericUDFOPNegative();
LongWritable input = new LongWritable(3234747);
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableLongObjectInspector };
DeferredObject[] args = { new DeferredJavaObject(input) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.longTypeInfo, oi.getTypeInfo());
LongWritable res = (LongWritable) udf.evaluate(args);
Assert.assertEquals(-3234747L, res.get());
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.longTypeInfo in project hive by apache.
the class MapJoinMultiKeyBenchBase method doSetup.
public void doSetup(VectorMapJoinVariation vectorMapJoinVariation, MapJoinTestImplementation mapJoinImplementation) throws Exception {
HiveConf hiveConf = new HiveConf();
long seed = 2543;
// 100,000.
int rowCount = 100000;
String[] bigTableColumnNames = new String[] { "b1", "b2", "b3" };
TypeInfo[] bigTableTypeInfos = new TypeInfo[] { TypeInfoFactory.intTypeInfo, TypeInfoFactory.longTypeInfo, TypeInfoFactory.stringTypeInfo };
int[] bigTableKeyColumnNums = new int[] { 0, 1, 2 };
String[] smallTableValueColumnNames = new String[] { "sv1" };
TypeInfo[] smallTableValueTypeInfos = new TypeInfo[] { TypeInfoFactory.stringTypeInfo };
int[] bigTableRetainColumnNums = new int[] { 0, 1, 2 };
int[] smallTableRetainKeyColumnNums = new int[] {};
int[] smallTableRetainValueColumnNums = new int[] { 0 };
SmallTableGenerationParameters smallTableGenerationParameters = new SmallTableGenerationParameters();
smallTableGenerationParameters.setValueOption(ValueOption.ONLY_ONE);
setupMapJoin(hiveConf, seed, rowCount, vectorMapJoinVariation, mapJoinImplementation, bigTableColumnNames, bigTableTypeInfos, bigTableKeyColumnNums, smallTableValueColumnNames, smallTableValueTypeInfos, bigTableRetainColumnNums, smallTableRetainKeyColumnNums, smallTableRetainValueColumnNums, smallTableGenerationParameters);
}
Aggregations