use of org.apache.drill.common.expression.LogicalExpression in project drill by apache.
the class ExpressionTreeMaterializer method materializeFilterExpr.
public static LogicalExpression materializeFilterExpr(LogicalExpression expr, Map<SchemaPath, ColumnStatistics> fieldTypes, ErrorCollector errorCollector, FunctionLookupContext functionLookupContext) {
final FilterMaterializeVisitor filterMaterializeVisitor = new FilterMaterializeVisitor(fieldTypes, errorCollector);
LogicalExpression out = expr.accept(filterMaterializeVisitor, functionLookupContext);
return out;
}
use of org.apache.drill.common.expression.LogicalExpression in project drill by apache.
the class FunctionGenerationHelper method getOrderingComparator.
/**
* Finds ordering comparator ("compare_to...") FunctionHolderExpression with
* a specified ordering for NULL (and considering NULLS <i>equal</i>).
* @param null_high whether NULL should compare as the lowest value (if
* {@code false}) or the highest value (if {@code true})
* @param left ...
* @param right ...
* @param registry ...
* @return
* FunctionHolderExpression containing the found function implementation
*/
public static LogicalExpression getOrderingComparator(boolean null_high, HoldingContainer left, HoldingContainer right, FunctionImplementationRegistry registry) {
final String comparator_name = null_high ? COMPARE_TO_NULLS_HIGH : COMPARE_TO_NULLS_LOW;
if (!isComparableType(left.getMajorType()) || !isComparableType(right.getMajorType())) {
throw new UnsupportedOperationException(formatCanNotCompareMsg(left.getMajorType(), right.getMajorType()));
}
LogicalExpression comparisonFunctionExpression = getFunctionExpression(comparator_name, Types.required(MinorType.INT), registry, left, right);
ErrorCollector collector = new ErrorCollectorImpl();
if (!isUnionType(left.getMajorType()) && !isUnionType(right.getMajorType())) {
return ExpressionTreeMaterializer.materialize(comparisonFunctionExpression, null, collector, registry);
} else {
LogicalExpression typeComparisonFunctionExpression = getTypeComparisonFunction(comparisonFunctionExpression, left, right);
return ExpressionTreeMaterializer.materialize(typeComparisonFunctionExpression, null, collector, registry);
}
}
use of org.apache.drill.common.expression.LogicalExpression in project drill by apache.
the class AggPrelBase method toDrill.
protected LogicalExpression toDrill(AggregateCall call, List<String> fn) {
List<LogicalExpression> args = Lists.newArrayList();
for (Integer i : call.getArgList()) {
args.add(FieldReference.getWithQuotedRef(fn.get(i)));
}
// for count(1).
if (args.isEmpty()) {
args.add(new ValueExpressions.LongExpression(1l));
}
LogicalExpression expr = new FunctionCall(call.getAggregation().getName().toLowerCase(), args, ExpressionPosition.UNKNOWN);
return expr;
}
use of org.apache.drill.common.expression.LogicalExpression in project drill by apache.
the class ParquetFilterBuilder method visitBooleanOperator.
@Override
public LogicalExpression visitBooleanOperator(BooleanOperator op, Set<LogicalExpression> value) {
List<LogicalExpression> childPredicates = new ArrayList<>();
String functionName = op.getName();
for (LogicalExpression arg : op.args) {
LogicalExpression childPredicate = arg.accept(this, value);
if (childPredicate == null) {
if (functionName.equals("booleanOr")) {
// we can't include any leg of the OR if any of the predicates cannot be converted
return null;
}
} else {
childPredicates.add(childPredicate);
}
}
if (childPredicates.size() == 0) {
// none leg is qualified, return null.
return null;
} else if (childPredicates.size() == 1) {
// only one leg is qualified, remove boolean op.
return childPredicates.get(0);
} else {
if (functionName.equals("booleanOr")) {
return new ParquetPredicates.OrPredicate(op.getName(), childPredicates, op.getPosition());
} else {
return new ParquetPredicates.AndPredicate(op.getName(), childPredicates, op.getPosition());
}
}
}
use of org.apache.drill.common.expression.LogicalExpression in project drill by apache.
the class ParquetGroupScan method applyFilter.
public GroupScan applyFilter(LogicalExpression filterExpr, UdfUtilities udfUtilities, FunctionImplementationRegistry functionImplementationRegistry, OptionManager optionManager) {
if (fileSet.size() == 1 || !(parquetTableMetadata.isRowGroupPrunable()) || rowGroupInfos.size() > optionManager.getOption(PlannerSettings.PARQUET_ROWGROUP_FILTER_PUSHDOWN_PLANNING_THRESHOLD)) {
// - # of row groups is beyond PARQUET_ROWGROUP_FILTER_PUSHDOWN_PLANNING_THRESHOLD.
return null;
}
final Set<SchemaPath> schemaPathsInExpr = filterExpr.accept(new ParquetRGFilterEvaluator.FieldReferenceFinder(), null);
final List<RowGroupMetadata> qualifiedRGs = new ArrayList<>(parquetTableMetadata.getFiles().size());
// HashSet keeps a fileName unique.
Set<String> qualifiedFileNames = Sets.newHashSet();
ParquetFilterPredicate filterPredicate = null;
for (ParquetFileMetadata file : parquetTableMetadata.getFiles()) {
final ImplicitColumnExplorer columnExplorer = new ImplicitColumnExplorer(optionManager, this.columns);
Map<String, String> implicitColValues = columnExplorer.populateImplicitColumns(file.getPath(), selectionRoot);
for (RowGroupMetadata rowGroup : file.getRowGroups()) {
ParquetMetaStatCollector statCollector = new ParquetMetaStatCollector(parquetTableMetadata, rowGroup.getColumns(), implicitColValues);
Map<SchemaPath, ColumnStatistics> columnStatisticsMap = statCollector.collectColStat(schemaPathsInExpr);
if (filterPredicate == null) {
ErrorCollector errorCollector = new ErrorCollectorImpl();
LogicalExpression materializedFilter = ExpressionTreeMaterializer.materializeFilterExpr(filterExpr, columnStatisticsMap, errorCollector, functionImplementationRegistry);
if (errorCollector.hasErrors()) {
logger.error("{} error(s) encountered when materialize filter expression : {}", errorCollector.getErrorCount(), errorCollector.toErrorString());
return null;
}
// logger.debug("materializedFilter : {}", ExpressionStringBuilder.toString(materializedFilter));
Set<LogicalExpression> constantBoundaries = ConstantExpressionIdentifier.getConstantExpressionSet(materializedFilter);
filterPredicate = (ParquetFilterPredicate) ParquetFilterBuilder.buildParquetFilterPredicate(materializedFilter, constantBoundaries, udfUtilities);
if (filterPredicate == null) {
return null;
}
}
if (ParquetRGFilterEvaluator.canDrop(filterPredicate, columnStatisticsMap, rowGroup.getRowCount())) {
continue;
}
qualifiedRGs.add(rowGroup);
// TODO : optimize when 1 file contains m row groups.
qualifiedFileNames.add(file.getPath());
}
}
if (qualifiedFileNames.size() == fileSet.size()) {
// There is no reduction of rowGroups. Return the original groupScan.
logger.debug("applyFilter does not have any pruning!");
return null;
} else if (qualifiedFileNames.size() == 0) {
logger.warn("All rowgroups have been filtered out. Add back one to get schema from scannner");
qualifiedFileNames.add(fileSet.iterator().next());
}
try {
FileSelection newSelection = new FileSelection(null, Lists.newArrayList(qualifiedFileNames), getSelectionRoot(), cacheFileRoot, false);
logger.info("applyFilter {} reduce parquet file # from {} to {}", ExpressionStringBuilder.toString(filterExpr), fileSet.size(), qualifiedFileNames.size());
return this.clone(newSelection);
} catch (IOException e) {
logger.warn("Could not apply filter prune due to Exception : {}", e);
return null;
}
}
Aggregations