use of org.apache.phoenix.compile.QueryCompiler in project phoenix by apache.
the class QueryOptimizer method optimize.
public QueryPlan optimize(PhoenixStatement statement, SelectStatement select, ColumnResolver resolver, List<? extends PDatum> targetColumns, ParallelIteratorFactory parallelIteratorFactory) throws SQLException {
QueryCompiler compiler = new QueryCompiler(statement, select, resolver, targetColumns, parallelIteratorFactory, new SequenceManager(statement));
QueryPlan dataPlan = compiler.compile();
return optimize(dataPlan, statement, targetColumns, parallelIteratorFactory);
}
use of org.apache.phoenix.compile.QueryCompiler in project phoenix by apache.
the class QueryOptimizer method addPlan.
private static QueryPlan addPlan(PhoenixStatement statement, SelectStatement select, PTable index, List<? extends PDatum> targetColumns, ParallelIteratorFactory parallelIteratorFactory, QueryPlan dataPlan, boolean isHinted) throws SQLException {
int nColumns = dataPlan.getProjector().getColumnCount();
String tableAlias = dataPlan.getTableRef().getTableAlias();
// double quote in case it's case sensitive
String alias = tableAlias == null ? null : '"' + tableAlias + '"';
String schemaName = index.getParentSchemaName().getString();
schemaName = schemaName.length() == 0 ? null : '"' + schemaName + '"';
String tableName = '"' + index.getTableName().getString() + '"';
TableNode table = FACTORY.namedTable(alias, FACTORY.table(schemaName, tableName));
SelectStatement indexSelect = FACTORY.select(select, table);
ColumnResolver resolver = FromCompiler.getResolverForQuery(indexSelect, statement.getConnection());
// We will or will not do tuple projection according to the data plan.
boolean isProjected = dataPlan.getContext().getResolver().getTables().get(0).getTable().getType() == PTableType.PROJECTED;
// Check index state of now potentially updated index table to make sure it's active
if (PIndexState.ACTIVE.equals(resolver.getTables().get(0).getTable().getIndexState())) {
try {
// translate nodes that match expressions that are indexed to the associated column parse node
indexSelect = ParseNodeRewriter.rewrite(indexSelect, new IndexExpressionParseNodeRewriter(index, null, statement.getConnection(), indexSelect.getUdfParseNodes()));
QueryCompiler compiler = new QueryCompiler(statement, indexSelect, resolver, targetColumns, parallelIteratorFactory, dataPlan.getContext().getSequenceManager(), isProjected);
QueryPlan plan = compiler.compile();
// then we can use the index even the query doesn't have where clause.
if (index.getIndexType() == IndexType.LOCAL && indexSelect.getWhere() == null && !plan.getContext().getDataColumns().isEmpty()) {
return null;
}
// must contain all columns from the data table to be able to be used.
if (plan.getTableRef().getTable().getIndexState() == PIndexState.ACTIVE) {
if (plan.getProjector().getColumnCount() == nColumns) {
return plan;
} else if (index.getIndexType() == IndexType.GLOBAL) {
String schemaNameStr = index.getSchemaName() == null ? null : index.getSchemaName().getString();
String tableNameStr = index.getTableName() == null ? null : index.getTableName().getString();
throw new ColumnNotFoundException(schemaNameStr, tableNameStr, null, "*");
}
}
} catch (ColumnNotFoundException e) {
/* Means that a column is being used that's not in our index.
* Since we currently don't keep stats, we don't know the selectivity of the index.
* For now, if this is a hinted plan, we will try rewriting the query as a subquery;
* otherwise we just don't use this index (as opposed to trying to join back from
* the index table to the data table.
*/
SelectStatement dataSelect = (SelectStatement) dataPlan.getStatement();
ParseNode where = dataSelect.getWhere();
if (isHinted && where != null) {
StatementContext context = new StatementContext(statement, resolver);
WhereConditionRewriter whereRewriter = new WhereConditionRewriter(FromCompiler.getResolver(dataPlan.getTableRef()), context);
where = where.accept(whereRewriter);
if (where != null) {
PTable dataTable = dataPlan.getTableRef().getTable();
List<PColumn> pkColumns = dataTable.getPKColumns();
List<AliasedNode> aliasedNodes = Lists.<AliasedNode>newArrayListWithExpectedSize(pkColumns.size());
List<ParseNode> nodes = Lists.<ParseNode>newArrayListWithExpectedSize(pkColumns.size());
boolean isSalted = dataTable.getBucketNum() != null;
boolean isTenantSpecific = dataTable.isMultiTenant() && statement.getConnection().getTenantId() != null;
int posOffset = (isSalted ? 1 : 0) + (isTenantSpecific ? 1 : 0);
for (int i = posOffset; i < pkColumns.size(); i++) {
PColumn column = pkColumns.get(i);
String indexColName = IndexUtil.getIndexColumnName(column);
ParseNode indexColNode = new ColumnParseNode(null, '"' + indexColName + '"', indexColName);
PDataType indexColType = IndexUtil.getIndexColumnDataType(column);
PDataType dataColType = column.getDataType();
if (indexColType != dataColType) {
indexColNode = FACTORY.cast(indexColNode, dataColType, null, null);
}
aliasedNodes.add(FACTORY.aliasedNode(null, indexColNode));
nodes.add(new ColumnParseNode(null, '"' + column.getName().getString() + '"'));
}
SelectStatement innerSelect = FACTORY.select(indexSelect.getFrom(), indexSelect.getHint(), false, aliasedNodes, where, null, null, null, null, null, indexSelect.getBindCount(), false, indexSelect.hasSequence(), Collections.<SelectStatement>emptyList(), indexSelect.getUdfParseNodes());
ParseNode outerWhere = FACTORY.in(nodes.size() == 1 ? nodes.get(0) : FACTORY.rowValueConstructor(nodes), FACTORY.subquery(innerSelect, false), false, true);
ParseNode extractedCondition = whereRewriter.getExtractedCondition();
if (extractedCondition != null) {
outerWhere = FACTORY.and(Lists.newArrayList(outerWhere, extractedCondition));
}
HintNode hint = HintNode.combine(HintNode.subtract(indexSelect.getHint(), new Hint[] { Hint.INDEX, Hint.NO_CHILD_PARENT_JOIN_OPTIMIZATION }), FACTORY.hint("NO_INDEX"));
SelectStatement query = FACTORY.select(dataSelect, hint, outerWhere);
ColumnResolver queryResolver = FromCompiler.getResolverForQuery(query, statement.getConnection());
query = SubqueryRewriter.transform(query, queryResolver, statement.getConnection());
queryResolver = FromCompiler.getResolverForQuery(query, statement.getConnection());
query = StatementNormalizer.normalize(query, queryResolver);
QueryPlan plan = new QueryCompiler(statement, query, queryResolver, targetColumns, parallelIteratorFactory, dataPlan.getContext().getSequenceManager(), isProjected).compile();
return plan;
}
}
}
}
return null;
}
Aggregations