use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.intTypeInfo in project hive by apache.
the class TestGenericUDFSha2 method testShaNullBin.
public void testShaNullBin() throws HiveException {
GenericUDFSha2 udf = new GenericUDFSha2();
ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableBinaryObjectInspector;
IntWritable lenWr = null;
ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.intTypeInfo, lenWr);
ObjectInspector[] arguments = { valueOI0, valueOI1 };
udf.initialize(arguments);
runAndVerifyBin(new byte[] { 65, 66, 67 }, lenWr, null, udf);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.intTypeInfo in project hive by apache.
the class TestGenericUDFSha2 method testSha0Bin.
public void testSha0Bin() throws HiveException {
GenericUDFSha2 udf = new GenericUDFSha2();
ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableBinaryObjectInspector;
IntWritable lenWr = new IntWritable(0);
ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.intTypeInfo, lenWr);
ObjectInspector[] arguments = { valueOI0, valueOI1 };
udf.initialize(arguments);
runAndVerifyBin(new byte[] { 65, 66, 67 }, lenWr, "b5d4045c3f466fa91fe2cc6abe79232a1a57cdf104f7a26e716e0a1e2789df78", udf);
runAndVerifyBin(new byte[0], lenWr, "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", udf);
// null
runAndVerifyBin(null, lenWr, null, udf);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.intTypeInfo in project hive by apache.
the class TestGenericUDFSha2 method testSha256Str.
public void testSha256Str() throws HiveException {
GenericUDFSha2 udf = new GenericUDFSha2();
ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
IntWritable lenWr = new IntWritable(256);
ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.intTypeInfo, lenWr);
ObjectInspector[] arguments = { valueOI0, valueOI1 };
udf.initialize(arguments);
runAndVerifyStr("ABC", lenWr, "b5d4045c3f466fa91fe2cc6abe79232a1a57cdf104f7a26e716e0a1e2789df78", udf);
runAndVerifyStr("", lenWr, "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", udf);
// null
runAndVerifyStr(null, lenWr, null, udf);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.intTypeInfo in project hive by apache.
the class TestAccumuloRowSerializer method testBinarySerialization.
@Test
public void testBinarySerialization() throws IOException, SerDeException {
List<String> columns = Arrays.asList("row", "cq1", "cq2", "cq3");
List<TypeInfo> types = Arrays.<TypeInfo>asList(TypeInfoFactory.stringTypeInfo, TypeInfoFactory.intTypeInfo, TypeInfoFactory.intTypeInfo, TypeInfoFactory.stringTypeInfo);
List<String> typeNames = new ArrayList<String>(types.size());
for (TypeInfo type : types) {
typeNames.add(type.getTypeName());
}
Properties tableProperties = new Properties();
tableProperties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS, ":rowid,cf:cq1#b,cf:cq2#b,cf:cq3");
tableProperties.setProperty(serdeConstants.FIELD_DELIM, " ");
tableProperties.setProperty(serdeConstants.LIST_COLUMNS, Joiner.on(',').join(columns));
tableProperties.setProperty(serdeConstants.LIST_COLUMN_TYPES, Joiner.on(',').join(typeNames));
AccumuloSerDeParameters accumuloSerDeParams = new AccumuloSerDeParameters(new Configuration(), tableProperties, AccumuloSerDe.class.getSimpleName());
LazySerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();
LazySimpleStructObjectInspector oi = (LazySimpleStructObjectInspector) LazyFactory.createLazyStructInspector(columns, types, serDeParams.getSeparators(), serDeParams.getNullSequence(), serDeParams.isLastColumnTakesRest(), serDeParams.isEscaped(), serDeParams.getEscapeChar());
AccumuloRowSerializer serializer = new AccumuloRowSerializer(0, serDeParams, accumuloSerDeParams.getColumnMappings(), new ColumnVisibility(), accumuloSerDeParams.getRowIdFactory());
// Create the LazyStruct from the LazyStruct...Inspector
LazyStruct obj = (LazyStruct) LazyFactory.createLazyObject(oi);
ByteArrayRef byteRef = new ByteArrayRef();
byteRef.setData(new byte[] { 'r', 'o', 'w', '1', ' ', '1', '0', ' ', '2', '0', ' ', 'v', 'a', 'l', 'u', 'e' });
obj.init(byteRef, 0, byteRef.getData().length);
Mutation m = (Mutation) serializer.serialize(obj, oi);
Assert.assertArrayEquals("row1".getBytes(), m.getRow());
List<ColumnUpdate> updates = m.getUpdates();
Assert.assertEquals(3, updates.size());
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream out = new DataOutputStream(baos);
ColumnUpdate update = updates.get(0);
Assert.assertEquals("cf", new String(update.getColumnFamily()));
Assert.assertEquals("cq1", new String(update.getColumnQualifier()));
out.writeInt(10);
Assert.assertArrayEquals(baos.toByteArray(), update.getValue());
update = updates.get(1);
Assert.assertEquals("cf", new String(update.getColumnFamily()));
Assert.assertEquals("cq2", new String(update.getColumnQualifier()));
baos.reset();
out.writeInt(20);
Assert.assertArrayEquals(baos.toByteArray(), update.getValue());
update = updates.get(2);
Assert.assertEquals("cf", new String(update.getColumnFamily()));
Assert.assertEquals("cq3", new String(update.getColumnQualifier()));
Assert.assertEquals("value", new String(update.getValue()));
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.intTypeInfo in project hive by apache.
the class MapJoinMultiKeyBenchBase method doSetup.
public void doSetup(VectorMapJoinVariation vectorMapJoinVariation, MapJoinTestImplementation mapJoinImplementation) throws Exception {
HiveConf hiveConf = new HiveConf();
long seed = 2543;
// 100,000.
int rowCount = 100000;
String[] bigTableColumnNames = new String[] { "b1", "b2", "b3" };
TypeInfo[] bigTableTypeInfos = new TypeInfo[] { TypeInfoFactory.intTypeInfo, TypeInfoFactory.longTypeInfo, TypeInfoFactory.stringTypeInfo };
int[] bigTableKeyColumnNums = new int[] { 0, 1, 2 };
String[] smallTableValueColumnNames = new String[] { "sv1" };
TypeInfo[] smallTableValueTypeInfos = new TypeInfo[] { TypeInfoFactory.stringTypeInfo };
int[] bigTableRetainColumnNums = new int[] { 0, 1, 2 };
int[] smallTableRetainKeyColumnNums = new int[] {};
int[] smallTableRetainValueColumnNums = new int[] { 0 };
SmallTableGenerationParameters smallTableGenerationParameters = new SmallTableGenerationParameters();
smallTableGenerationParameters.setValueOption(ValueOption.ONLY_ONE);
setupMapJoin(hiveConf, seed, rowCount, vectorMapJoinVariation, mapJoinImplementation, bigTableColumnNames, bigTableTypeInfos, bigTableKeyColumnNums, smallTableValueColumnNames, smallTableValueTypeInfos, bigTableRetainColumnNums, smallTableRetainKeyColumnNums, smallTableRetainValueColumnNums, smallTableGenerationParameters);
}
Aggregations