use of org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc in project hive by apache.
the class TestVectorCastStatement method doRowCastTest.
private boolean doRowCastTest(TypeInfo typeInfo, TypeInfo targetTypeInfo, List<String> columns, List<ExprNodeDesc> children, Object[][] randomRows, ObjectInspector rowInspector, Object[] resultObjects) throws Exception {
GenericUDF udf;
try {
udf = VectorizationContext.getGenericUDFForCast(targetTypeInfo);
} catch (HiveException e) {
return false;
}
ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(targetTypeInfo, udf, children);
/*
System.out.println(
"*DEBUG* typeInfo " + typeInfo.toString() +
" targetTypeInfo " + targetTypeInfo +
" castStmtTestMode ROW_MODE" +
" exprDesc " + exprDesc.toString());
*/
HiveConf hiveConf = new HiveConf();
ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(exprDesc, hiveConf);
try {
evaluator.initialize(rowInspector);
} catch (HiveException e) {
return false;
}
ObjectInspector objectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(targetTypeInfo);
final int rowCount = randomRows.length;
for (int i = 0; i < rowCount; i++) {
Object[] row = randomRows[i];
Object result = evaluator.evaluate(row);
Object copyResult = ObjectInspectorUtils.copyToStandardObject(result, objectInspector, ObjectInspectorCopyOption.WRITABLE);
resultObjects[i] = copyResult;
}
return true;
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc in project hive by apache.
the class TestVectorDateAddSub method doDateAddSubTestsWithDiffColumnScalar.
private void doDateAddSubTestsWithDiffColumnScalar(Random random, String dateTimeStringTypeName, String integerTypeName, ColumnScalarMode columnScalarMode, boolean isAdd) throws Exception {
TypeInfo dateTimeStringTypeInfo = TypeInfoUtils.getTypeInfoFromTypeString(dateTimeStringTypeName);
PrimitiveCategory dateTimeStringPrimitiveCategory = ((PrimitiveTypeInfo) dateTimeStringTypeInfo).getPrimitiveCategory();
boolean isStringFamily = (dateTimeStringPrimitiveCategory == PrimitiveCategory.STRING || dateTimeStringPrimitiveCategory == PrimitiveCategory.CHAR || dateTimeStringPrimitiveCategory == PrimitiveCategory.VARCHAR);
TypeInfo integerTypeInfo = TypeInfoUtils.getTypeInfoFromTypeString(integerTypeName);
PrimitiveCategory integerPrimitiveCategory = ((PrimitiveTypeInfo) integerTypeInfo).getPrimitiveCategory();
List<GenerationSpec> generationSpecList = new ArrayList<GenerationSpec>();
List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
List<String> columns = new ArrayList<String>();
int columnNum = 1;
ExprNodeDesc col1Expr;
if (columnScalarMode == ColumnScalarMode.COLUMN_COLUMN || columnScalarMode == ColumnScalarMode.COLUMN_SCALAR) {
if (!isStringFamily) {
generationSpecList.add(GenerationSpec.createSameType(dateTimeStringTypeInfo));
} else {
generationSpecList.add(GenerationSpec.createStringFamilyOtherTypeValue(dateTimeStringTypeInfo, TypeInfoFactory.dateTypeInfo));
}
explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
String columnName = "col" + (columnNum++);
col1Expr = new ExprNodeColumnDesc(dateTimeStringTypeInfo, columnName, "table", false);
columns.add(columnName);
} else {
Object scalar1Object;
if (!isStringFamily) {
scalar1Object = VectorRandomRowSource.randomPrimitiveObject(random, (PrimitiveTypeInfo) dateTimeStringTypeInfo);
} else {
scalar1Object = VectorRandomRowSource.randomStringFamilyOtherTypeValue(random, dateTimeStringTypeInfo, TypeInfoFactory.dateTypeInfo, false);
}
col1Expr = new ExprNodeConstantDesc(dateTimeStringTypeInfo, scalar1Object);
}
ExprNodeDesc col2Expr;
if (columnScalarMode == ColumnScalarMode.COLUMN_COLUMN || columnScalarMode == ColumnScalarMode.SCALAR_COLUMN) {
generationSpecList.add(GenerationSpec.createSameType(integerTypeInfo));
explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
String columnName = "col" + (columnNum++);
col2Expr = new ExprNodeColumnDesc(integerTypeInfo, columnName, "table", false);
columns.add(columnName);
} else {
Object scalar2Object = VectorRandomRowSource.randomPrimitiveObject(random, (PrimitiveTypeInfo) integerTypeInfo);
scalar2Object = smallerRange(random, integerPrimitiveCategory, /* wantWritable */
false);
col2Expr = new ExprNodeConstantDesc(integerTypeInfo, scalar2Object);
}
List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
children.add(col1Expr);
children.add(col2Expr);
// ----------------------------------------------------------------------------------------------
String[] columnNames = columns.toArray(new String[0]);
VectorRandomRowSource rowSource = new VectorRandomRowSource();
rowSource.initGenerationSpecSchema(random, generationSpecList, /* maxComplexDepth */
0, /* allowNull */
true, /* isUnicodeOk */
true, explicitDataTypePhysicalVariationList);
Object[][] randomRows = rowSource.randomRows(100000);
if (columnScalarMode == ColumnScalarMode.COLUMN_COLUMN || columnScalarMode == ColumnScalarMode.SCALAR_COLUMN) {
// Fixup numbers to limit the range to 0 ... N-1.
for (int i = 0; i < randomRows.length; i++) {
Object[] row = randomRows[i];
if (row[columnNum - 2] != null) {
row[columnNum - 2] = smallerRange(random, integerPrimitiveCategory, /* wantWritable */
true);
}
}
}
VectorRandomBatchSource batchSource = VectorRandomBatchSource.createInterestingBatches(random, rowSource, randomRows, null);
String[] outputScratchTypeNames = new String[] { "date" };
VectorizedRowBatchCtx batchContext = new VectorizedRowBatchCtx(columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), /* dataColumnNums */
null, /* partitionColumnCount */
0, /* virtualColumnCount */
0, /* neededVirtualColumns */
null, outputScratchTypeNames, null);
final int rowCount = randomRows.length;
Object[][] resultObjectsArray = new Object[DateAddSubTestMode.count][];
for (int i = 0; i < DateAddSubTestMode.count; i++) {
Object[] resultObjects = new Object[rowCount];
resultObjectsArray[i] = resultObjects;
GenericUDF udf = (isAdd ? new GenericUDFDateAdd() : new GenericUDFDateSub());
ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.dateTypeInfo, udf, children);
DateAddSubTestMode dateAddSubTestMode = DateAddSubTestMode.values()[i];
switch(dateAddSubTestMode) {
case ROW_MODE:
doRowDateAddSubTest(dateTimeStringTypeInfo, integerTypeInfo, columns, children, isAdd, exprDesc, randomRows, columnScalarMode, rowSource.rowStructObjectInspector(), resultObjects);
break;
case ADAPTOR:
case VECTOR_EXPRESSION:
doVectorDateAddSubTest(dateTimeStringTypeInfo, integerTypeInfo, columns, rowSource.typeInfos(), children, isAdd, exprDesc, dateAddSubTestMode, columnScalarMode, batchSource, batchContext, resultObjects);
break;
default:
throw new RuntimeException("Unexpected IF statement test mode " + dateAddSubTestMode);
}
}
for (int i = 0; i < rowCount; i++) {
// Row-mode is the expected value.
Object expectedResult = resultObjectsArray[0][i];
for (int v = 1; v < DateAddSubTestMode.count; v++) {
Object vectorResult = resultObjectsArray[v][i];
if (expectedResult == null || vectorResult == null) {
if (expectedResult != null || vectorResult != null) {
Assert.fail("Row " + i + " " + DateAddSubTestMode.values()[v] + " isAdd " + isAdd + " " + columnScalarMode + " result is NULL " + (vectorResult == null) + " does not match row-mode expected result is NULL " + (expectedResult == null) + " row values " + Arrays.toString(randomRows[i]));
}
} else {
if (!expectedResult.equals(vectorResult)) {
Assert.fail("Row " + i + " " + DateAddSubTestMode.values()[v] + " isAdd " + isAdd + " " + columnScalarMode + " result " + vectorResult.toString() + " (" + vectorResult.getClass().getSimpleName() + ")" + " does not match row-mode expected result " + expectedResult.toString() + " (" + expectedResult.getClass().getSimpleName() + ")" + " row values " + Arrays.toString(randomRows[i]));
}
}
}
}
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc in project hive by apache.
the class TestOperatorSignature method getTsOp.
private Operator<TableScanDesc> getTsOp(int i) {
Table tblMetadata = new Table("db", "table");
TableScanDesc desc = new TableScanDesc("alias_" + cCtx.nextOperatorId(), tblMetadata);
List<ExprNodeDesc> as = Lists.newArrayList(new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, Integer.valueOf(i)), new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo, "c1", "aa", false));
ExprNodeGenericFuncDesc f1 = new ExprNodeGenericFuncDesc(TypeInfoFactory.intTypeInfo, udf, as);
desc.setFilterExpr(f1);
Operator<TableScanDesc> ts = OperatorFactory.get(cCtx, desc);
return ts;
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc in project hive by apache.
the class TestRuntimeStatsPersistence method getTsOp.
private Operator<TableScanDesc> getTsOp(int i) {
Table tblMetadata = new Table("db", "table");
TableScanDesc desc = new TableScanDesc("alias", /*+ cCtx.nextOperatorId()*/
tblMetadata);
List<ExprNodeDesc> as = Lists.newArrayList(new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, Integer.valueOf(i)), new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo, "c1", "aa", false));
ExprNodeGenericFuncDesc f1 = new ExprNodeGenericFuncDesc(TypeInfoFactory.intTypeInfo, udf, as);
desc.setFilterExpr(f1);
Operator<TableScanDesc> ts = OperatorFactory.get(cCtx, desc);
return ts;
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc in project hive by apache.
the class SharedWorkOptimizer method extractConjsIgnoringDPPPreds.
private static Multiset<String> extractConjsIgnoringDPPPreds(ExprNodeDesc predicate) {
List<ExprNodeDesc> conjsOp = ExprNodeDescUtils.split(predicate);
Multiset<String> conjsOpString = TreeMultiset.create();
for (int i = 0; i < conjsOp.size(); i++) {
if (conjsOp.get(i) instanceof ExprNodeGenericFuncDesc) {
ExprNodeGenericFuncDesc func = (ExprNodeGenericFuncDesc) conjsOp.get(i);
if (GenericUDFInBloomFilter.class == func.getGenericUDF().getClass()) {
continue;
} else if (GenericUDFBetween.class == func.getGenericUDF().getClass() && (func.getChildren().get(2) instanceof ExprNodeDynamicValueDesc || func.getChildren().get(3) instanceof ExprNodeDynamicValueDesc)) {
continue;
}
} else if (conjsOp.get(i) instanceof ExprNodeDynamicListDesc) {
continue;
}
conjsOpString.add(conjsOp.get(i).toString());
}
return conjsOpString;
}
Aggregations