use of org.apache.hadoop.hive.ql.plan.ExprNodeDynamicListDesc in project hive by apache.
the class HiveIcebergStorageHandler method collectColumnAndReplaceDummyValues.
/**
* Recursively replaces the ExprNodeDynamicListDesc nodes by a dummy ExprNodeConstantDesc so we can test if we can
* convert the predicate to an Iceberg predicate when pruning the partitions later. Also collects the column names
* in the filter.
* <p>
* Please make sure that it is ok to change the input node (clone if needed)
* @param node The node we are traversing
* @param foundColumn The column we already found
*/
private String collectColumnAndReplaceDummyValues(ExprNodeDesc node, String foundColumn) {
String column = foundColumn;
List<ExprNodeDesc> children = node.getChildren();
if (children != null && !children.isEmpty()) {
ListIterator<ExprNodeDesc> iterator = children.listIterator();
while (iterator.hasNext()) {
ExprNodeDesc child = iterator.next();
if (child instanceof ExprNodeDynamicListDesc) {
Object dummy;
switch(((PrimitiveTypeInfo) child.getTypeInfo()).getPrimitiveCategory()) {
case INT:
case SHORT:
dummy = 1;
break;
case LONG:
dummy = 1L;
break;
case TIMESTAMP:
case TIMESTAMPLOCALTZ:
dummy = new Timestamp();
break;
case CHAR:
case VARCHAR:
case STRING:
dummy = "1";
break;
case DOUBLE:
case FLOAT:
case DECIMAL:
dummy = 1.1;
break;
case DATE:
dummy = new Date();
break;
case BOOLEAN:
dummy = true;
break;
default:
throw new UnsupportedOperationException("Not supported primitive type in partition pruning: " + child.getTypeInfo());
}
iterator.set(new ExprNodeConstantDesc(child.getTypeInfo(), dummy));
} else {
String newColumn;
if (child instanceof ExprNodeColumnDesc) {
newColumn = ((ExprNodeColumnDesc) child).getColumn();
} else {
newColumn = collectColumnAndReplaceDummyValues(child, column);
}
if (column != null && newColumn != null && !newColumn.equals(column)) {
throw new UnsupportedOperationException("Partition pruning does not support filtering for more columns");
}
if (column == null) {
column = newColumn;
}
}
}
}
return column;
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeDynamicListDesc in project hive by apache.
the class SharedWorkOptimizer method extractConjsIgnoringDPPPreds.
private static Multiset<String> extractConjsIgnoringDPPPreds(ExprNodeDesc predicate) {
List<ExprNodeDesc> conjsOp = ExprNodeDescUtils.split(predicate);
Multiset<String> conjsOpString = TreeMultiset.create();
for (int i = 0; i < conjsOp.size(); i++) {
if (conjsOp.get(i) instanceof ExprNodeGenericFuncDesc) {
ExprNodeGenericFuncDesc func = (ExprNodeGenericFuncDesc) conjsOp.get(i);
if (GenericUDFInBloomFilter.class == func.getGenericUDF().getClass()) {
continue;
} else if (GenericUDFBetween.class == func.getGenericUDF().getClass() && (func.getChildren().get(2) instanceof ExprNodeDynamicValueDesc || func.getChildren().get(3) instanceof ExprNodeDynamicValueDesc)) {
continue;
}
} else if (conjsOp.get(i) instanceof ExprNodeDynamicListDesc) {
continue;
}
conjsOpString.add(conjsOp.get(i).toString());
}
return conjsOpString;
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeDynamicListDesc in project hive by apache.
the class RedundantDynamicPruningConditionsRemoval method collect.
private static void collect(ExprNodeDesc parent, ExprNodeDesc child, CollectContext listContext) {
if (child instanceof ExprNodeGenericFuncDesc && ((ExprNodeGenericFuncDesc) child).getGenericUDF() instanceof GenericUDFIn) {
if (child.getChildren().get(1) instanceof ExprNodeDynamicListDesc) {
listContext.dynamicListNodes.add(new Pair<ExprNodeDesc, ExprNodeDesc>(child, parent));
}
return;
}
if (child instanceof ExprNodeGenericFuncDesc && ((ExprNodeGenericFuncDesc) child).getGenericUDF() instanceof GenericUDFBaseCompare && child.getChildren().size() == 2) {
ExprNodeDesc leftCol = child.getChildren().get(0);
ExprNodeDesc rightCol = child.getChildren().get(1);
ExprNodeColumnDesc leftColDesc = ExprNodeDescUtils.getColumnExpr(leftCol);
if (leftColDesc != null) {
boolean rightConstant = false;
if (rightCol instanceof ExprNodeConstantDesc) {
rightConstant = true;
} else if (rightCol instanceof ExprNodeGenericFuncDesc) {
ExprNodeDesc foldedExpr = ConstantPropagateProcFactory.foldExpr((ExprNodeGenericFuncDesc) rightCol);
rightConstant = foldedExpr != null;
}
if (rightConstant) {
listContext.comparatorNodes.add(leftColDesc);
}
} else {
ExprNodeColumnDesc rightColDesc = ExprNodeDescUtils.getColumnExpr(rightCol);
if (rightColDesc != null) {
boolean leftConstant = false;
if (leftCol instanceof ExprNodeConstantDesc) {
leftConstant = true;
} else if (leftCol instanceof ExprNodeGenericFuncDesc) {
ExprNodeDesc foldedExpr = ConstantPropagateProcFactory.foldExpr((ExprNodeGenericFuncDesc) leftCol);
leftConstant = foldedExpr != null;
}
if (leftConstant) {
listContext.comparatorNodes.add(rightColDesc);
}
}
}
return;
}
if (FunctionRegistry.isOpAnd(child)) {
for (ExprNodeDesc newChild : child.getChildren()) {
collect(child, newChild, listContext);
}
}
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeDynamicListDesc in project hive by apache.
the class DynamicPartitionPruner method replaceDynamicLists.
/**
* Recursively replaces the ExprNodeDynamicListDesc to the list of the actual values. As a result of this call the
* original expression is modified so it can be used for pushing down to the TableScan for filtering the data at the
* source.
* <p>
* Please make sure to clone the predicate if needed since the original node will be modified.
* @param node The node we are traversing
* @param dynArgs The constant values we are substituting
*/
private void replaceDynamicLists(ExprNodeDesc node, Collection<ExprNodeConstantDesc> dynArgs) {
List<ExprNodeDesc> children = node.getChildren();
if (children != null && !children.isEmpty()) {
ListIterator<ExprNodeDesc> iterator = node.getChildren().listIterator();
while (iterator.hasNext()) {
ExprNodeDesc child = iterator.next();
if (child instanceof ExprNodeDynamicListDesc) {
iterator.remove();
dynArgs.forEach(iterator::add);
} else {
replaceDynamicLists(child, dynArgs);
}
}
}
}
Aggregations