Search in sources :

Example 11 with TableScan

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rel.core.TableScan in project drill by apache.

the class CassandraEnumerablePrelContext method getTablePath.

@Override
public String getTablePath(RelNode input) {
    TableScan scan = Objects.requireNonNull(DrillRelOptUtil.findScan(input));
    List<String> qualifiedName = scan.getTable().getQualifiedName();
    return String.join(".", qualifiedName.subList(0, qualifiedName.size() - 1));
}
Also used : TableScan(org.apache.calcite.rel.core.TableScan)

Example 12 with TableScan

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rel.core.TableScan in project drill by apache.

the class DrillRelMdSelectivity method getScanSelectivity.

private Double getScanSelectivity(RelNode rel, RelMetadataQuery mq, RexNode predicate) {
    double ROWCOUNT_UNKNOWN = -1.0;
    GroupScan scan = null;
    PlannerSettings settings = PrelUtil.getPlannerSettings(rel.getCluster().getPlanner());
    final RexBuilder rexBuilder = rel.getCluster().getRexBuilder();
    if (rel instanceof DrillScanRel) {
        scan = ((DrillScanRel) rel).getGroupScan();
    } else if (rel instanceof ScanPrel) {
        scan = ((ScanPrel) rel).getGroupScan();
    }
    if (scan != null) {
        if (settings.isStatisticsEnabled() && scan instanceof DbGroupScan) {
            double filterRows = ((DbGroupScan) scan).getRowCount(predicate, rel);
            double totalRows = ((DbGroupScan) scan).getRowCount(null, rel);
            if (filterRows != ROWCOUNT_UNKNOWN && totalRows != ROWCOUNT_UNKNOWN && totalRows > 0) {
                return Math.min(1.0, filterRows / totalRows);
            }
        }
    }
    // Do not mess with statistics used for DBGroupScans.
    if (rel instanceof TableScan) {
        if (DrillRelOptUtil.guessRows(rel)) {
            return super.getSelectivity(rel, mq, predicate);
        }
        DrillTable table = Utilities.getDrillTable(rel.getTable());
        try {
            TableMetadata tableMetadata;
            if (table != null && (tableMetadata = table.getGroupScan().getTableMetadata()) != null && TableStatisticsKind.HAS_DESCRIPTIVE_STATISTICS.getValue(tableMetadata)) {
                List<SchemaPath> fieldNames;
                if (rel instanceof DrillScanRelBase) {
                    fieldNames = ((DrillScanRelBase) rel).getGroupScan().getColumns();
                } else {
                    fieldNames = rel.getRowType().getFieldNames().stream().map(SchemaPath::getSimplePath).collect(Collectors.toList());
                }
                return getScanSelectivityInternal(tableMetadata, predicate, fieldNames, rexBuilder);
            }
        } catch (IOException e) {
            super.getSelectivity(rel, mq, predicate);
        }
    }
    return super.getSelectivity(rel, mq, predicate);
}
Also used : TableMetadata(org.apache.drill.metastore.metadata.TableMetadata) TableScan(org.apache.calcite.rel.core.TableScan) DrillScanRel(org.apache.drill.exec.planner.logical.DrillScanRel) ScanPrel(org.apache.drill.exec.planner.physical.ScanPrel) DrillTable(org.apache.drill.exec.planner.logical.DrillTable) PlannerSettings(org.apache.drill.exec.planner.physical.PlannerSettings) IOException(java.io.IOException) DbGroupScan(org.apache.drill.exec.physical.base.DbGroupScan) GroupScan(org.apache.drill.exec.physical.base.GroupScan) SchemaPath(org.apache.drill.common.expression.SchemaPath) DbGroupScan(org.apache.drill.exec.physical.base.DbGroupScan) DrillScanRelBase(org.apache.drill.exec.planner.common.DrillScanRelBase) RexBuilder(org.apache.calcite.rex.RexBuilder)

Example 13 with TableScan

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rel.core.TableScan in project drill by apache.

the class DrillPushProjectIntoScanRule method onMatch.

@Override
public void onMatch(RelOptRuleCall call) {
    Project project = call.rel(0);
    TableScan scan = call.rel(1);
    try {
        if (scan.getRowType().getFieldList().isEmpty()) {
            return;
        }
        ProjectPushInfo projectPushInfo = DrillRelOptUtil.getFieldsInformation(scan.getRowType(), project.getProjects());
        if (!canPushProjectIntoScan(scan.getTable(), projectPushInfo) || skipScanConversion(projectPushInfo.createNewRowType(project.getCluster().getTypeFactory()), scan)) {
            // project above scan may be removed in ProjectRemoveRule for the case when it is trivial
            return;
        }
        TableScan newScan = createScan(scan, projectPushInfo);
        List<RexNode> newProjects = new ArrayList<>();
        for (RexNode n : project.getChildExps()) {
            newProjects.add(n.accept(projectPushInfo.getInputReWriter()));
        }
        Project newProject = createProject(project, newScan, newProjects);
        if (ProjectRemoveRule.isTrivial(newProject)) {
            call.transformTo(newScan);
        } else {
            call.transformTo(newProject);
        }
    } catch (IOException e) {
        throw new DrillRuntimeException(e);
    }
}
Also used : Project(org.apache.calcite.rel.core.Project) LogicalProject(org.apache.calcite.rel.logical.LogicalProject) TableScan(org.apache.calcite.rel.core.TableScan) EnumerableTableScan(org.apache.calcite.adapter.enumerable.EnumerableTableScan) ProjectPushInfo(org.apache.drill.exec.planner.common.DrillRelOptUtil.ProjectPushInfo) ArrayList(java.util.ArrayList) IOException(java.io.IOException) DrillRuntimeException(org.apache.drill.common.exceptions.DrillRuntimeException) RexNode(org.apache.calcite.rex.RexNode)

Example 14 with TableScan

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rel.core.TableScan in project drill by apache.

the class MetastoreAnalyzeTableHandler method convertToDrel.

/**
 * Converts to Drill logical plan
 */
private DrillRel convertToDrel(RelNode relNode, SqlMetastoreAnalyzeTable sqlAnalyzeTable, DrillTableInfo drillTableInfo) throws ForemanSetupException, IOException {
    RelBuilder relBuilder = LOGICAL_BUILDER.create(relNode.getCluster(), null);
    DrillTable table = drillTableInfo.drillTable();
    AnalyzeInfoProvider analyzeInfoProvider = table.getGroupScan().getAnalyzeInfoProvider();
    List<String> schemaPath = drillTableInfo.schemaPath();
    String pluginName = schemaPath.get(0);
    String workspaceName = Strings.join(schemaPath.subList(1, schemaPath.size()), AbstractSchema.SCHEMA_SEPARATOR);
    String tableName = drillTableInfo.tableName();
    TableInfo tableInfo = TableInfo.builder().name(tableName).owner(table.getUserName()).type(analyzeInfoProvider.getTableTypeName()).storagePlugin(pluginName).workspace(workspaceName).build();
    ColumnNamesOptions columnNamesOptions = new ColumnNamesOptions(context.getOptions());
    List<String> segmentColumns = analyzeInfoProvider.getSegmentColumns(table, columnNamesOptions).stream().map(SchemaPath::getRootSegmentPath).collect(Collectors.toList());
    List<NamedExpression> segmentExpressions = segmentColumns.stream().map(partitionName -> new NamedExpression(SchemaPath.getSimplePath(partitionName), FieldReference.getWithQuotedRef(partitionName))).collect(Collectors.toList());
    List<MetadataInfo> rowGroupsInfo = Collections.emptyList();
    List<MetadataInfo> filesInfo = Collections.emptyList();
    Multimap<Integer, MetadataInfo> segments = ArrayListMultimap.create();
    BasicTablesRequests basicRequests;
    try {
        basicRequests = context.getMetastoreRegistry().get().tables().basicRequests();
    } catch (MetastoreException e) {
        logger.error("Error when obtaining Metastore instance for table {}", tableName, e);
        DrillRel convertedRelNode = convertToRawDrel(relBuilder.values(new String[] { MetastoreAnalyzeConstants.OK_FIELD_NAME, MetastoreAnalyzeConstants.SUMMARY_FIELD_NAME }, false, e.getMessage()).build());
        return new DrillScreenRel(convertedRelNode.getCluster(), convertedRelNode.getTraitSet(), convertedRelNode);
    }
    MetadataType metadataLevel = getMetadataType(sqlAnalyzeTable);
    List<SchemaPath> interestingColumns = sqlAnalyzeTable.getFieldNames();
    MetastoreTableInfo metastoreTableInfo = basicRequests.metastoreTableInfo(tableInfo);
    List<MetadataInfo> allMetaToHandle = null;
    List<MetadataInfo> metadataToRemove = new ArrayList<>();
    // whether incremental analyze may be produced
    if (metastoreTableInfo.isExists()) {
        RelNode finalRelNode = relNode;
        CheckedSupplier<TableScan, SqlUnsupportedException> tableScanSupplier = () -> DrillRelOptUtil.findScan(convertToDrel(finalRelNode.getInput(0)));
        MetadataInfoCollector metadataInfoCollector = analyzeInfoProvider.getMetadataInfoCollector(basicRequests, tableInfo, (FormatSelection) table.getSelection(), context.getPlannerSettings(), tableScanSupplier, interestingColumns, metadataLevel, segmentColumns.size());
        if (!metadataInfoCollector.isOutdated()) {
            DrillRel convertedRelNode = convertToRawDrel(relBuilder.values(new String[] { MetastoreAnalyzeConstants.OK_FIELD_NAME, MetastoreAnalyzeConstants.SUMMARY_FIELD_NAME }, false, "Table metadata is up to date, analyze wasn't performed.").build());
            return new DrillScreenRel(convertedRelNode.getCluster(), convertedRelNode.getTraitSet(), convertedRelNode);
        }
        // updates scan to read updated / new files, pass removed files into metadata handler
        relNode = relNode.copy(relNode.getTraitSet(), Collections.singletonList(metadataInfoCollector.getPrunedScan()));
        filesInfo = metadataInfoCollector.getFilesInfo();
        segments = metadataInfoCollector.getSegmentsInfo();
        rowGroupsInfo = metadataInfoCollector.getRowGroupsInfo();
        allMetaToHandle = metadataInfoCollector.getAllMetaToHandle();
        metadataToRemove = metadataInfoCollector.getMetadataToRemove();
    }
    // Step 2: constructs plan for producing analyze
    DrillRel convertedRelNode = convertToRawDrel(relNode);
    boolean createNewAggregations = true;
    // List of columns for which statistics should be collected: interesting columns + segment columns
    List<SchemaPath> statisticsColumns = interestingColumns == null ? null : new ArrayList<>(interestingColumns);
    if (statisticsColumns != null) {
        segmentColumns.stream().map(SchemaPath::getSimplePath).forEach(statisticsColumns::add);
    }
    SchemaPath locationField = analyzeInfoProvider.getLocationField(columnNamesOptions);
    if (analyzeInfoProvider.supportsMetadataType(MetadataType.ROW_GROUP) && metadataLevel.includes(MetadataType.ROW_GROUP)) {
        MetadataHandlerContext handlerContext = MetadataHandlerContext.builder().tableInfo(tableInfo).metadataToHandle(rowGroupsInfo).metadataType(MetadataType.ROW_GROUP).depthLevel(segmentExpressions.size()).segmentColumns(segmentColumns).build();
        convertedRelNode = getRowGroupAggRelNode(segmentExpressions, convertedRelNode, createNewAggregations, statisticsColumns, handlerContext);
        createNewAggregations = false;
        locationField = SchemaPath.getSimplePath(MetastoreAnalyzeConstants.LOCATION_FIELD);
    }
    if (analyzeInfoProvider.supportsMetadataType(MetadataType.FILE) && metadataLevel.includes(MetadataType.FILE)) {
        MetadataHandlerContext handlerContext = MetadataHandlerContext.builder().tableInfo(tableInfo).metadataToHandle(filesInfo).metadataType(MetadataType.FILE).depthLevel(segmentExpressions.size()).segmentColumns(segmentColumns).build();
        convertedRelNode = getFileAggRelNode(segmentExpressions, convertedRelNode, createNewAggregations, statisticsColumns, locationField, handlerContext);
        locationField = SchemaPath.getSimplePath(MetastoreAnalyzeConstants.LOCATION_FIELD);
        createNewAggregations = false;
    }
    if (analyzeInfoProvider.supportsMetadataType(MetadataType.SEGMENT) && metadataLevel.includes(MetadataType.SEGMENT)) {
        for (int i = segmentExpressions.size(); i > 0; i--) {
            MetadataHandlerContext handlerContext = MetadataHandlerContext.builder().tableInfo(tableInfo).metadataToHandle(new ArrayList<>(segments.get(i - 1))).metadataType(MetadataType.SEGMENT).depthLevel(i).segmentColumns(segmentColumns.subList(0, i)).build();
            convertedRelNode = getSegmentAggRelNode(segmentExpressions, convertedRelNode, createNewAggregations, statisticsColumns, locationField, i, handlerContext);
            locationField = SchemaPath.getSimplePath(MetastoreAnalyzeConstants.LOCATION_FIELD);
            createNewAggregations = false;
        }
    }
    if (analyzeInfoProvider.supportsMetadataType(MetadataType.TABLE) && metadataLevel.includes(MetadataType.TABLE)) {
        MetadataHandlerContext handlerContext = MetadataHandlerContext.builder().tableInfo(tableInfo).metadataToHandle(Collections.emptyList()).metadataType(MetadataType.TABLE).depthLevel(segmentExpressions.size()).segmentColumns(segmentColumns).build();
        convertedRelNode = getTableAggRelNode(convertedRelNode, createNewAggregations, statisticsColumns, locationField, handlerContext);
    } else {
        throw new IllegalStateException("Analyze table with NONE level");
    }
    boolean useStatistics = context.getOptions().getOption(PlannerSettings.STATISTICS_USE);
    SqlNumericLiteral samplePercentLiteral = sqlAnalyzeTable.getSamplePercent();
    double samplePercent = samplePercentLiteral == null ? 100.0 : samplePercentLiteral.intValue(true);
    // Step 3: adds rel nodes for producing statistics analyze if required
    RelNode analyzeRel = useStatistics ? new DrillAnalyzeRel(convertedRelNode.getCluster(), convertedRelNode.getTraitSet(), convertToRawDrel(relNode), samplePercent) : convertToRawDrel(relBuilder.values(new String[] { "" }, "").build());
    MetadataControllerContext metadataControllerContext = MetadataControllerContext.builder().tableInfo(tableInfo).metastoreTableInfo(metastoreTableInfo).location(((FormatSelection) table.getSelection()).getSelection().getSelectionRoot()).interestingColumns(interestingColumns).segmentColumns(segmentColumns).metadataToHandle(allMetaToHandle).metadataToRemove(metadataToRemove).analyzeMetadataLevel(metadataLevel).build();
    convertedRelNode = new MetadataControllerRel(convertedRelNode.getCluster(), convertedRelNode.getTraitSet(), convertedRelNode, analyzeRel, metadataControllerContext);
    return new DrillScreenRel(convertedRelNode.getCluster(), convertedRelNode.getTraitSet(), convertedRelNode);
}
Also used : MetadataType(org.apache.drill.metastore.metadata.MetadataType) Arrays(java.util.Arrays) UserException(org.apache.drill.common.exceptions.UserException) DrillRel(org.apache.drill.exec.planner.logical.DrillRel) LoggerFactory(org.slf4j.LoggerFactory) MetadataInfo(org.apache.drill.metastore.metadata.MetadataInfo) ArrayListMultimap(org.apache.drill.shaded.guava.com.google.common.collect.ArrayListMultimap) MetadataHandlerContext(org.apache.drill.exec.metastore.analyze.MetadataHandlerContext) PhysicalOperator(org.apache.drill.exec.physical.base.PhysicalOperator) SqlNode(org.apache.calcite.sql.SqlNode) RelBuilder(org.apache.calcite.tools.RelBuilder) FieldReference(org.apache.drill.common.expression.FieldReference) Multimap(org.apache.drill.shaded.guava.com.google.common.collect.Multimap) SqlSelect(org.apache.calcite.sql.SqlSelect) BasicTablesRequests(org.apache.drill.metastore.components.tables.BasicTablesRequests) SchemaPath(org.apache.drill.common.expression.SchemaPath) MetastoreAnalyzeConstants(org.apache.drill.exec.metastore.analyze.MetastoreAnalyzeConstants) Collectors(java.util.stream.Collectors) List(java.util.List) ValidationException(org.apache.calcite.tools.ValidationException) ForemanSetupException(org.apache.drill.exec.work.foreman.ForemanSetupException) SqlNumericLiteral(org.apache.calcite.sql.SqlNumericLiteral) MetadataInfoCollector(org.apache.drill.exec.metastore.analyze.MetadataInfoCollector) ExecConstants(org.apache.drill.exec.ExecConstants) SqlMetastoreAnalyzeTable(org.apache.drill.exec.planner.sql.parser.SqlMetastoreAnalyzeTable) MetadataAggregateContext(org.apache.drill.exec.metastore.analyze.MetadataAggregateContext) TableScan(org.apache.calcite.rel.core.TableScan) TableInfo(org.apache.drill.metastore.metadata.TableInfo) MetastoreException(org.apache.drill.metastore.exceptions.MetastoreException) MetadataControllerContext(org.apache.drill.exec.metastore.analyze.MetadataControllerContext) Pointer(org.apache.drill.exec.util.Pointer) DrillTable(org.apache.drill.exec.planner.logical.DrillTable) ColumnNamesOptions(org.apache.drill.exec.metastore.ColumnNamesOptions) ArrayList(java.util.ArrayList) SqlLiteral(org.apache.calcite.sql.SqlLiteral) SqlUnsupportedException(org.apache.drill.exec.work.foreman.SqlUnsupportedException) NamedExpression(org.apache.drill.common.logical.data.NamedExpression) MetadataAggRel(org.apache.drill.exec.planner.logical.MetadataAggRel) DrillRelOptUtil(org.apache.drill.exec.planner.common.DrillRelOptUtil) FormatSelection(org.apache.drill.exec.store.dfs.FormatSelection) SqlIdentifier(org.apache.calcite.sql.SqlIdentifier) MetadataHandlerRel(org.apache.drill.exec.planner.logical.MetadataHandlerRel) CheckedSupplier(org.apache.drill.common.util.function.CheckedSupplier) RelDataType(org.apache.calcite.rel.type.RelDataType) SqlParserPos(org.apache.calcite.sql.parser.SqlParserPos) Logger(org.slf4j.Logger) DrillAnalyzeRel(org.apache.drill.exec.planner.logical.DrillAnalyzeRel) IOException(java.io.IOException) RelNode(org.apache.calcite.rel.RelNode) Prel(org.apache.drill.exec.planner.physical.Prel) AbstractSchema(org.apache.drill.exec.store.AbstractSchema) PlannerSettings(org.apache.drill.exec.planner.physical.PlannerSettings) RelConversionException(org.apache.calcite.tools.RelConversionException) Strings(org.apache.parquet.Strings) DrillScreenRel(org.apache.drill.exec.planner.logical.DrillScreenRel) PhysicalPlan(org.apache.drill.exec.physical.PhysicalPlan) LOGICAL_BUILDER(org.apache.drill.exec.planner.logical.DrillRelFactories.LOGICAL_BUILDER) SqlNodeList(org.apache.calcite.sql.SqlNodeList) MetadataControllerRel(org.apache.drill.exec.planner.logical.MetadataControllerRel) MetastoreTableInfo(org.apache.drill.metastore.components.tables.MetastoreTableInfo) Collections(java.util.Collections) AnalyzeInfoProvider(org.apache.drill.exec.metastore.analyze.AnalyzeInfoProvider) MetastoreTableInfo(org.apache.drill.metastore.components.tables.MetastoreTableInfo) DrillTable(org.apache.drill.exec.planner.logical.DrillTable) AnalyzeInfoProvider(org.apache.drill.exec.metastore.analyze.AnalyzeInfoProvider) ArrayList(java.util.ArrayList) FormatSelection(org.apache.drill.exec.store.dfs.FormatSelection) BasicTablesRequests(org.apache.drill.metastore.components.tables.BasicTablesRequests) DrillAnalyzeRel(org.apache.drill.exec.planner.logical.DrillAnalyzeRel) SchemaPath(org.apache.drill.common.expression.SchemaPath) SqlUnsupportedException(org.apache.drill.exec.work.foreman.SqlUnsupportedException) ColumnNamesOptions(org.apache.drill.exec.metastore.ColumnNamesOptions) MetadataHandlerContext(org.apache.drill.exec.metastore.analyze.MetadataHandlerContext) TableInfo(org.apache.drill.metastore.metadata.TableInfo) MetastoreTableInfo(org.apache.drill.metastore.components.tables.MetastoreTableInfo) MetadataControllerRel(org.apache.drill.exec.planner.logical.MetadataControllerRel) MetadataInfo(org.apache.drill.metastore.metadata.MetadataInfo) TableScan(org.apache.calcite.rel.core.TableScan) RelBuilder(org.apache.calcite.tools.RelBuilder) MetastoreException(org.apache.drill.metastore.exceptions.MetastoreException) MetadataType(org.apache.drill.metastore.metadata.MetadataType) DrillScreenRel(org.apache.drill.exec.planner.logical.DrillScreenRel) RelNode(org.apache.calcite.rel.RelNode) NamedExpression(org.apache.drill.common.logical.data.NamedExpression) DrillRel(org.apache.drill.exec.planner.logical.DrillRel) MetadataInfoCollector(org.apache.drill.exec.metastore.analyze.MetadataInfoCollector) MetadataControllerContext(org.apache.drill.exec.metastore.analyze.MetadataControllerContext) SqlNumericLiteral(org.apache.calcite.sql.SqlNumericLiteral)

Example 15 with TableScan

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rel.core.TableScan in project drill by apache.

the class ParquetPruneScanRule method getFilterOnScanParquet.

public static RelOptRule getFilterOnScanParquet(OptimizerRulesContext optimizerRulesContext) {
    return new PruneScanRule(RelOptHelper.some(DrillFilterRel.class, RelOptHelper.any(DrillScanRel.class)), "PruneScanRule:Filter_On_Scan_Parquet", optimizerRulesContext) {

        @Override
        public PartitionDescriptor getPartitionDescriptor(PlannerSettings settings, TableScan scanRel) {
            return new ParquetPartitionDescriptor(settings, (DrillScanRel) scanRel);
        }

        @Override
        public boolean matches(RelOptRuleCall call) {
            final DrillScanRel scan = call.rel(1);
            GroupScan groupScan = scan.getGroupScan();
            // this rule is applicable only for parquet based partition pruning
            if (PrelUtil.getPlannerSettings(scan.getCluster().getPlanner()).isHepPartitionPruningEnabled()) {
                return groupScan instanceof AbstractParquetGroupScan && groupScan.supportsPartitionFilterPushdown() && !scan.partitionFilterPushdown();
            } else {
                return groupScan instanceof AbstractParquetGroupScan && groupScan.supportsPartitionFilterPushdown();
            }
        }

        @Override
        public void onMatch(RelOptRuleCall call) {
            final DrillFilterRel filterRel = call.rel(0);
            final DrillScanRel scanRel = call.rel(1);
            doOnMatch(call, filterRel, null, scanRel);
        }
    };
}
Also used : AbstractParquetGroupScan(org.apache.drill.exec.store.parquet.AbstractParquetGroupScan) GroupScan(org.apache.drill.exec.physical.base.GroupScan) TableScan(org.apache.calcite.rel.core.TableScan) DrillScanRel(org.apache.drill.exec.planner.logical.DrillScanRel) AbstractParquetGroupScan(org.apache.drill.exec.store.parquet.AbstractParquetGroupScan) PlannerSettings(org.apache.drill.exec.planner.physical.PlannerSettings) DrillFilterRel(org.apache.drill.exec.planner.logical.DrillFilterRel) RelOptRuleCall(org.apache.calcite.plan.RelOptRuleCall) ParquetPartitionDescriptor(org.apache.drill.exec.planner.ParquetPartitionDescriptor)

Aggregations

TableScan (org.apache.calcite.rel.core.TableScan)51 RelNode (org.apache.calcite.rel.RelNode)19 ArrayList (java.util.ArrayList)14 PlannerSettings (org.apache.drill.exec.planner.physical.PlannerSettings)13 Project (org.apache.calcite.rel.core.Project)12 DrillScanRel (org.apache.drill.exec.planner.logical.DrillScanRel)11 Filter (org.apache.calcite.rel.core.Filter)10 IOException (java.io.IOException)9 RexNode (org.apache.calcite.rex.RexNode)9 GroupScan (org.apache.drill.exec.physical.base.GroupScan)9 RelOptRuleCall (org.apache.calcite.plan.RelOptRuleCall)8 DrillFilterRel (org.apache.drill.exec.planner.logical.DrillFilterRel)8 Aggregate (org.apache.calcite.rel.core.Aggregate)7 LogicalProject (org.apache.calcite.rel.logical.LogicalProject)7 RexBuilder (org.apache.calcite.rex.RexBuilder)6 RelOptCluster (org.apache.calcite.plan.RelOptCluster)5 RelShuttleImpl (org.apache.calcite.rel.RelShuttleImpl)5 LogicalJoin (org.apache.calcite.rel.logical.LogicalJoin)5 RelDataType (org.apache.calcite.rel.type.RelDataType)5 SchemaPath (org.apache.drill.common.expression.SchemaPath)5