use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.intTypeInfo in project hive by apache.
the class TestGenericUDFSha2 method testShaNullStr.
public void testShaNullStr() throws HiveException {
GenericUDFSha2 udf = new GenericUDFSha2();
ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
IntWritable lenWr = null;
ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.intTypeInfo, lenWr);
ObjectInspector[] arguments = { valueOI0, valueOI1 };
udf.initialize(arguments);
runAndVerifyStr("ABC", lenWr, null, udf);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.intTypeInfo in project hive by apache.
the class TestGenericUDFSha2 method testSha200Bin.
public void testSha200Bin() throws HiveException {
GenericUDFSha2 udf = new GenericUDFSha2();
ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableBinaryObjectInspector;
IntWritable lenWr = new IntWritable(200);
ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.intTypeInfo, lenWr);
ObjectInspector[] arguments = { valueOI0, valueOI1 };
udf.initialize(arguments);
runAndVerifyBin(new byte[] { 65, 66, 67 }, lenWr, null, udf);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.intTypeInfo in project hive by apache.
the class TestGenericUDFSha2 method testSha256Bin.
public void testSha256Bin() throws HiveException {
GenericUDFSha2 udf = new GenericUDFSha2();
ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableBinaryObjectInspector;
IntWritable lenWr = new IntWritable(256);
ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.intTypeInfo, lenWr);
ObjectInspector[] arguments = { valueOI0, valueOI1 };
udf.initialize(arguments);
runAndVerifyBin(new byte[] { 65, 66, 67 }, lenWr, "b5d4045c3f466fa91fe2cc6abe79232a1a57cdf104f7a26e716e0a1e2789df78", udf);
runAndVerifyBin(new byte[0], lenWr, "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", udf);
// null
runAndVerifyBin(null, lenWr, null, udf);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.intTypeInfo in project hive by apache.
the class TestGenericUDFMacro method setup.
@Before
public void setup() throws Exception {
name = "fixed_number";
colNames = new ArrayList<String>();
colTypes = new ArrayList<TypeInfo>();
colNames.add("x");
colTypes.add(TypeInfoFactory.intTypeInfo);
colNames.add("y");
colTypes.add(TypeInfoFactory.intTypeInfo);
x = new IntWritable(1);
y = new IntWritable(2);
expected = x.get() + y.get();
bodyDesc = new ExprNodeConstantDesc(expected);
inspectors = new ObjectInspector[] { PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.intTypeInfo, x), PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.intTypeInfo, y) };
arguments = new DeferredObject[] { new DeferredJavaObject(x), new DeferredJavaObject(y) };
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.intTypeInfo in project hive by apache.
the class TestLazyAccumuloMap method testBinaryIntMap.
@Test
public void testBinaryIntMap() throws SerDeException, IOException {
AccumuloHiveRow row = new AccumuloHiveRow("row");
row.add(new Text("cf1"), new Text(toBytes(1)), toBytes(2));
row.add(new Text("cf1"), new Text(toBytes(2)), toBytes(4));
row.add(new Text("cf1"), new Text(toBytes(3)), toBytes(6));
HiveAccumuloMapColumnMapping mapping = new HiveAccumuloMapColumnMapping("cf1", null, ColumnEncoding.BINARY, ColumnEncoding.BINARY, "column", TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.intTypeInfo, TypeInfoFactory.intTypeInfo).toString());
// Map of Integer to String
Text nullSequence = new Text("\\N");
ObjectInspector oi = LazyFactory.createLazyObjectInspector(TypeInfoUtils.getTypeInfosFromTypeString("map<int,int>").get(0), new byte[] { (byte) 1, (byte) 2 }, 0, nullSequence, false, (byte) 0);
LazyAccumuloMap map = new LazyAccumuloMap((LazyMapObjectInspector) oi);
map.init(row, mapping);
Assert.assertEquals(3, map.getMapSize());
Object o = map.getMapValueElement(new IntWritable(1));
Assert.assertNotNull(o);
Assert.assertEquals(new IntWritable(2), ((LazyInteger) o).getWritableObject());
o = map.getMapValueElement(new IntWritable(2));
Assert.assertNotNull(o);
Assert.assertEquals(new IntWritable(4), ((LazyInteger) o).getWritableObject());
o = map.getMapValueElement(new IntWritable(3));
Assert.assertNotNull(o);
Assert.assertEquals(new IntWritable(6), ((LazyInteger) o).getWritableObject());
}
Aggregations