use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.
the class TestGenericUDFSortArrayByField method testSortTupleArrayStructOrderDESC.
@Test
public void testSortTupleArrayStructOrderDESC() throws HiveException {
List<ObjectInspector> tuple = new ArrayList<ObjectInspector>();
tuple.add(PrimitiveObjectInspectorFactory.writableStringObjectInspector);
tuple.add(ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableIntObjectInspector));
ObjectInspector field = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.stringTypeInfo, new Text("Scores"));
ObjectInspector orderField = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.stringTypeInfo, new Text("desc"));
ObjectInspector[] inputOIs = { ObjectInspectorFactory.getStandardListObjectInspector(ObjectInspectorFactory.getStandardStructObjectInspector(asList("Student", "Scores"), tuple)), field, orderField };
udf.initialize(inputOIs);
Object i1 = asList(new Text("Foo"), asList(new IntWritable(4), new IntWritable(3), new IntWritable(2), new IntWritable(1)));
Object i2 = asList(new Text("Boo"), asList(new IntWritable(2), new IntWritable(3), new IntWritable(2), new IntWritable(1)));
Object i3 = asList(new Text("Tom"), asList(new IntWritable(10), new IntWritable(3), new IntWritable(2), new IntWritable(1)));
GenericUDF.DeferredJavaObject[] argas = { new GenericUDF.DeferredJavaObject(asList(i1, i2, i3)), new GenericUDF.DeferredJavaObject(field), new GenericUDF.DeferredJavaObject(orderField) };
runAndVerify(argas, asList(i3, i1, i2));
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.
the class MapJoinOneLongKeyBenchBase method doSetup.
public void doSetup(VectorMapJoinVariation vectorMapJoinVariation, MapJoinTestImplementation mapJoinImplementation) throws Exception {
HiveConf hiveConf = new HiveConf();
long seed = 2543;
// 10,000,000.
int rowCount = 10000000;
String[] bigTableColumnNames = new String[] { "number1" };
TypeInfo[] bigTableTypeInfos = new TypeInfo[] { TypeInfoFactory.longTypeInfo };
int[] bigTableKeyColumnNums = new int[] { 0 };
String[] smallTableValueColumnNames = new String[] { "sv1", "sv2" };
TypeInfo[] smallTableValueTypeInfos = new TypeInfo[] { TypeInfoFactory.dateTypeInfo, TypeInfoFactory.stringTypeInfo };
int[] bigTableRetainColumnNums = new int[] { 0 };
int[] smallTableRetainKeyColumnNums = new int[] {};
int[] smallTableRetainValueColumnNums = new int[] { 0, 1 };
SmallTableGenerationParameters smallTableGenerationParameters = new SmallTableGenerationParameters();
smallTableGenerationParameters.setValueOption(ValueOption.ONLY_ONE);
setupMapJoin(hiveConf, seed, rowCount, vectorMapJoinVariation, mapJoinImplementation, bigTableColumnNames, bigTableTypeInfos, bigTableKeyColumnNums, smallTableValueColumnNames, smallTableValueTypeInfos, bigTableRetainColumnNums, smallTableRetainKeyColumnNums, smallTableRetainValueColumnNums, smallTableGenerationParameters);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.
the class MapJoinOneStringKeyBenchBase method doSetup.
public void doSetup(VectorMapJoinVariation vectorMapJoinVariation, MapJoinTestImplementation mapJoinImplementation) throws Exception {
HiveConf hiveConf = new HiveConf();
long seed = 2543;
// 100,000.
int rowCount = 100000;
String[] bigTableColumnNames = new String[] { "b1" };
TypeInfo[] bigTableTypeInfos = new TypeInfo[] { TypeInfoFactory.stringTypeInfo };
int[] bigTableKeyColumnNums = new int[] { 0 };
String[] smallTableValueColumnNames = new String[] { "sv1", "sv2" };
TypeInfo[] smallTableValueTypeInfos = new TypeInfo[] { TypeInfoFactory.dateTypeInfo, TypeInfoFactory.timestampTypeInfo };
int[] bigTableRetainColumnNums = new int[] { 0 };
int[] smallTableRetainKeyColumnNums = new int[] {};
int[] smallTableRetainValueColumnNums = new int[] { 0, 1 };
SmallTableGenerationParameters smallTableGenerationParameters = new SmallTableGenerationParameters();
smallTableGenerationParameters.setValueOption(ValueOption.ONLY_ONE);
setupMapJoin(hiveConf, seed, rowCount, vectorMapJoinVariation, mapJoinImplementation, bigTableColumnNames, bigTableTypeInfos, bigTableKeyColumnNums, smallTableValueColumnNames, smallTableValueTypeInfos, bigTableRetainColumnNums, smallTableRetainKeyColumnNums, smallTableRetainValueColumnNums, smallTableGenerationParameters);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.
the class TestVectorGenericDateExpressions method testDateSubColScalar.
@Test
public void testDateSubColScalar() throws HiveException {
for (PrimitiveCategory colType1 : dateTimestampStringTypes) testDateAddColScalar(colType1, false);
VectorExpression udf = new VectorUDFDateSubColScalar(0, 0, 1);
udf.setInputTypeInfos(new TypeInfo[] { TypeInfoFactory.stringTypeInfo, TypeInfoFactory.timestampTypeInfo });
udf.transientInit();
VectorizedRowBatch batch = new VectorizedRowBatch(2, 1);
batch.cols[0] = new BytesColumnVector(1);
batch.cols[1] = new LongColumnVector(1);
BytesColumnVector bcv = (BytesColumnVector) batch.cols[0];
byte[] bytes = "error".getBytes(utf8);
bcv.vector[0] = bytes;
bcv.start[0] = 0;
bcv.length[0] = bytes.length;
udf.evaluate(batch);
Assert.assertEquals(batch.cols[1].isNull[0], true);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.
the class TestVectorGenericDateExpressions method testDateSubScalarCol.
@Test
public void testDateSubScalarCol() throws HiveException {
for (PrimitiveCategory scalarType1 : dateTimestampStringTypes) testDateAddScalarCol(scalarType1, false);
VectorExpression udf = new VectorUDFDateSubScalarCol("error".getBytes(utf8), 0, 1);
udf.setInputTypeInfos(new TypeInfo[] { TypeInfoFactory.stringTypeInfo, TypeInfoFactory.timestampTypeInfo });
udf.transientInit();
VectorizedRowBatch batch = new VectorizedRowBatch(2, 1);
batch.cols[0] = new LongColumnVector(1);
batch.cols[1] = new LongColumnVector(1);
udf.evaluate(batch);
Assert.assertEquals(batch.cols[1].isNull[0], true);
}
Aggregations