use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.
the class TestMapJoinOperator method doTestString0.
public boolean doTestString0(long seed, int hiveConfVariation, VectorMapJoinVariation vectorMapJoinVariation, MapJoinPlanVariation mapJoinPlanVariation) throws Exception {
int rowCount = 10;
HiveConf hiveConf = new HiveConf();
if (!addNonLongHiveConfVariation(hiveConfVariation, hiveConf)) {
return true;
}
TypeInfo[] bigTableTypeInfos = null;
int[] bigTableKeyColumnNums = null;
TypeInfo[] smallTableValueTypeInfos = null;
int[] smallTableRetainKeyColumnNums = null;
SmallTableGenerationParameters smallTableGenerationParameters = new SmallTableGenerationParameters();
MapJoinTestDescription testDesc = null;
MapJoinTestData testData = null;
// One plain STRING key column.
bigTableTypeInfos = new TypeInfo[] { TypeInfoFactory.stringTypeInfo };
bigTableKeyColumnNums = new int[] { 0 };
smallTableRetainKeyColumnNums = new int[] { 0 };
smallTableValueTypeInfos = new TypeInfo[] { TypeInfoFactory.dateTypeInfo, TypeInfoFactory.timestampTypeInfo };
// ----------------------------------------------------------------------------------------------
testDesc = new MapJoinTestDescription(hiveConf, vectorMapJoinVariation, bigTableTypeInfos, bigTableKeyColumnNums, smallTableValueTypeInfos, smallTableRetainKeyColumnNums, smallTableGenerationParameters, mapJoinPlanVariation);
if (!goodTestVariation(testDesc)) {
return false;
}
// Prepare data. Good for ANY implementation variation.
testData = new MapJoinTestData(rowCount, testDesc, seed);
executeTest(testDesc, testData, "testString0");
return false;
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.
the class TestMapJoinOperator method doTestLong1.
public boolean doTestLong1(long seed, int rowCount, int hiveConfVariation, VectorMapJoinVariation vectorMapJoinVariation, MapJoinPlanVariation mapJoinPlanVariation) throws Exception {
HiveConf hiveConf = new HiveConf();
if (!addLongHiveConfVariation(hiveConfVariation, hiveConf)) {
return true;
}
TypeInfo[] bigTableTypeInfos = null;
int[] bigTableKeyColumnNums = null;
TypeInfo[] smallTableValueTypeInfos = null;
int[] smallTableRetainKeyColumnNums = null;
SmallTableGenerationParameters smallTableGenerationParameters = new SmallTableGenerationParameters();
MapJoinTestDescription testDesc = null;
MapJoinTestData testData = null;
// Big Table: int key, long value; Small Table: no key retained, string value
bigTableTypeInfos = new TypeInfo[] { TypeInfoFactory.intTypeInfo, TypeInfoFactory.longTypeInfo };
bigTableKeyColumnNums = new int[] { 0 };
smallTableRetainKeyColumnNums = new int[] {};
smallTableValueTypeInfos = new TypeInfo[] { TypeInfoFactory.stringTypeInfo };
testDesc = new MapJoinTestDescription(hiveConf, vectorMapJoinVariation, bigTableTypeInfos, bigTableKeyColumnNums, smallTableValueTypeInfos, smallTableRetainKeyColumnNums, smallTableGenerationParameters, mapJoinPlanVariation);
if (!goodTestVariation(testDesc)) {
return false;
}
testData = new MapJoinTestData(rowCount, testDesc, seed);
executeTest(testDesc, testData, "testLong1");
return false;
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.
the class TestMapJoinOperator method doTestLong3_NoRegularKeys.
public boolean doTestLong3_NoRegularKeys(long seed, int rowCount, int hiveConfVariation, VectorMapJoinVariation vectorMapJoinVariation, MapJoinPlanVariation mapJoinPlanVariation) throws Exception {
HiveConf hiveConf = new HiveConf();
if (!addLongHiveConfVariation(hiveConfVariation, hiveConf)) {
return true;
}
TypeInfo[] bigTableTypeInfos = null;
int[] bigTableKeyColumnNums = null;
TypeInfo[] smallTableValueTypeInfos = null;
int[] smallTableRetainKeyColumnNums = null;
SmallTableGenerationParameters smallTableGenerationParameters = new SmallTableGenerationParameters();
smallTableGenerationParameters.setValueOption(ValueOption.NO_REGULAR_SMALL_KEYS);
MapJoinTestDescription testDesc = null;
MapJoinTestData testData = null;
// Big Table: int key, string value; Small Table: key retained, decimal value
bigTableTypeInfos = new TypeInfo[] { TypeInfoFactory.intTypeInfo, TypeInfoFactory.stringTypeInfo };
bigTableKeyColumnNums = new int[] { 0 };
smallTableRetainKeyColumnNums = new int[] { 0 };
smallTableValueTypeInfos = new TypeInfo[] { new DecimalTypeInfo(38, 18) };
testDesc = new MapJoinTestDescription(hiveConf, vectorMapJoinVariation, bigTableTypeInfos, bigTableKeyColumnNums, smallTableValueTypeInfos, smallTableRetainKeyColumnNums, smallTableGenerationParameters, mapJoinPlanVariation);
if (!goodTestVariation(testDesc)) {
return false;
}
testData = new MapJoinTestData(rowCount, testDesc, seed);
executeTest(testDesc, testData, "doTestLong3_NoRegularKeys");
return false;
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.
the class TestMapJoinOperator method doTestMultiKey2.
public boolean doTestMultiKey2(long seed, int hiveConfVariation, VectorMapJoinVariation vectorMapJoinVariation, MapJoinPlanVariation mapJoinPlanVariation) throws Exception {
int rowCount = 10;
HiveConf hiveConf = new HiveConf();
if (!addNonLongHiveConfVariation(hiveConfVariation, hiveConf)) {
return true;
}
TypeInfo[] bigTableTypeInfos = null;
int[] bigTableKeyColumnNums = null;
TypeInfo[] smallTableValueTypeInfos = null;
int[] smallTableRetainKeyColumnNums = null;
SmallTableGenerationParameters smallTableGenerationParameters = new SmallTableGenerationParameters();
MapJoinTestDescription testDesc = null;
MapJoinTestData testData = null;
// Three key columns.
bigTableTypeInfos = new TypeInfo[] { TypeInfoFactory.longTypeInfo, TypeInfoFactory.shortTypeInfo, TypeInfoFactory.stringTypeInfo };
bigTableKeyColumnNums = new int[] { 0, 1, 2 };
smallTableRetainKeyColumnNums = new int[] { 0, 1, 2 };
smallTableValueTypeInfos = new TypeInfo[] { TypeInfoFactory.stringTypeInfo };
// ----------------------------------------------------------------------------------------------
testDesc = new MapJoinTestDescription(hiveConf, vectorMapJoinVariation, bigTableTypeInfos, bigTableKeyColumnNums, smallTableValueTypeInfos, smallTableRetainKeyColumnNums, smallTableGenerationParameters, mapJoinPlanVariation);
if (!goodTestVariation(testDesc)) {
return false;
}
// Prepare data. Good for ANY implementation variation.
testData = new MapJoinTestData(rowCount, testDesc, seed);
executeTest(testDesc, testData, "testMultiKey0");
return false;
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.
the class TestVectorUDFAdaptor method testGenericUDF.
// test the UDF adaptor for a generic UDF (as opposed to a legacy UDF)
@Test
public void testGenericUDF() throws HiveException {
// create a syntax tree for a function call 'myisnull(col0, "UNKNOWN")'
ExprNodeGenericFuncDesc funcDesc;
GenericUDF genericUDF = new GenericUDFIsNull();
TypeInfo typeInfoStr = TypeInfoFactory.stringTypeInfo;
List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
children.add(new ExprNodeColumnDesc(typeInfoStr, "col0", "tablename", false));
children.add(new ExprNodeConstantDesc(typeInfoStr, "UNKNOWN"));
VectorUDFArgDesc[] argDescs = new VectorUDFArgDesc[2];
for (int i = 0; i < 2; i++) {
argDescs[i] = new VectorUDFArgDesc();
}
argDescs[0].setVariable(0);
argDescs[1].setConstant((ExprNodeConstantDesc) children.get(1));
funcDesc = new ExprNodeGenericFuncDesc(typeInfoStr, genericUDF, "myisnull", children);
// create the adaptor for this function call to work in vector mode
VectorUDFAdaptor vudf = null;
try {
vudf = new VectorUDFAdaptor(funcDesc, 3, "String", argDescs);
} catch (HiveException e) {
// We should never get here.
assertTrue(false);
}
VectorizedRowBatch b;
byte[] red = null;
byte[] unknown = null;
try {
red = "red".getBytes("UTF-8");
unknown = "UNKNOWN".getBytes("UTF-8");
} catch (Exception e) {
;
}
BytesColumnVector out;
// with nulls
b = getBatchStrDblLongWithStrOut();
b.cols[0].noNulls = false;
// set 1st entry to null
b.cols[0].isNull[0] = true;
vudf.evaluate(b);
out = (BytesColumnVector) b.cols[3];
// verify outputs
int cmp = StringExpr.compare(red, 0, red.length, out.vector[1], out.start[1], out.length[1]);
assertEquals(0, cmp);
cmp = StringExpr.compare(unknown, 0, unknown.length, out.vector[0], out.start[0], out.length[0]);
assertEquals(0, cmp);
// output entry should not be null for null input for this particular generic UDF
assertTrue(out.noNulls || !out.isNull[0]);
}
Aggregations