use of org.apache.phoenix.parse.ColumnParseNode in project phoenix by apache.
the class JoinCompiler method extractFromSelect.
private List<AliasedNode> extractFromSelect(List<AliasedNode> select, TableRef tableRef, ColumnResolver resolver) throws SQLException {
List<AliasedNode> ret = new ArrayList<AliasedNode>();
ColumnRefParseNodeVisitor visitor = new ColumnRefParseNodeVisitor(resolver, statement.getConnection());
for (AliasedNode aliasedNode : select) {
ParseNode node = aliasedNode.getNode();
if (node instanceof TableWildcardParseNode) {
TableName tableName = ((TableWildcardParseNode) node).getTableName();
if (tableRef.equals(resolver.resolveTable(tableName.getSchemaName(), tableName.getTableName()))) {
ret.clear();
ret.add(aliasedNode);
return ret;
}
continue;
}
node.accept(visitor);
ColumnRefParseNodeVisitor.ColumnRefType type = visitor.getContentType(Collections.singletonList(tableRef));
if (type == ColumnRefParseNodeVisitor.ColumnRefType.SELF_ONLY) {
ret.add(aliasedNode);
} else if (type == ColumnRefParseNodeVisitor.ColumnRefType.COMPLEX) {
for (Map.Entry<ColumnRef, ColumnParseNode> entry : visitor.getColumnRefMap().entrySet()) {
if (entry.getKey().getTableRef().equals(tableRef)) {
ret.add(NODE_FACTORY.aliasedNode(null, entry.getValue()));
}
}
}
visitor.reset();
}
return ret;
}
use of org.apache.phoenix.parse.ColumnParseNode in project phoenix by apache.
the class SubselectRewriter method createAliasedNodesFromSubselect.
/**
* create new aliasedNodes from subSelectStatement's select alias.
* @param subSelectStatement
* @param rewrittenOrderByNodes
* @return
*/
private List<AliasedNode> createAliasedNodesFromSubselect(SelectStatement subSelectStatement, ArrayList<OrderByNode> rewrittenOrderByNodes) throws SQLException {
List<AliasedNode> selectAliasedNodes = subSelectStatement.getSelect();
List<AliasedNode> newSelectAliasedNodes = new ArrayList<AliasedNode>(selectAliasedNodes.size());
Map<ParseNode, Integer> rewrittenOrderByParseNodeToIndex = new HashMap<ParseNode, Integer>(rewrittenOrderByNodes.size());
for (int index = 0; index < rewrittenOrderByNodes.size(); index++) {
OrderByNode rewrittenOrderByNode = rewrittenOrderByNodes.get(index);
rewrittenOrderByParseNodeToIndex.put(rewrittenOrderByNode.getNode(), Integer.valueOf(index));
}
for (AliasedNode selectAliasedNode : selectAliasedNodes) {
String selectAliasName = selectAliasedNode.getAlias();
ParseNode oldSelectAliasParseNode = selectAliasedNode.getNode();
if (selectAliasName == null) {
selectAliasName = SchemaUtil.normalizeIdentifier(oldSelectAliasParseNode.getAlias());
}
// we must has alias for sum(code)
if (selectAliasName == null) {
throw new SQLExceptionInfo.Builder(SQLExceptionCode.SUBQUERY_SELECT_LIST_COLUMN_MUST_HAS_ALIAS).setMessage("the subquery is:" + subSelectStatement).build().buildException();
}
ColumnParseNode newColumnParseNode = NODE_FACTORY.column(null, selectAliasName, selectAliasName);
Integer index = rewrittenOrderByParseNodeToIndex.get(oldSelectAliasParseNode);
if (index != null) {
// replace the rewrittenOrderByNode's child to newColumnParseNode
OrderByNode oldOrderByNode = rewrittenOrderByNodes.get(index);
rewrittenOrderByNodes.set(index, NODE_FACTORY.orderBy(newColumnParseNode, oldOrderByNode.isNullsLast(), oldOrderByNode.isAscending()));
}
AliasedNode newSelectAliasNode = NODE_FACTORY.aliasedNode(null, newColumnParseNode);
newSelectAliasedNodes.add(newSelectAliasNode);
}
return newSelectAliasedNodes;
}
use of org.apache.phoenix.parse.ColumnParseNode in project phoenix by apache.
the class IndexStatementRewriter method visit.
@Override
public ParseNode visit(ColumnParseNode node) throws SQLException {
ColumnRef dataColRef = getResolver().resolveColumn(node.getSchemaName(), node.getTableName(), node.getName());
PColumn dataCol = dataColRef.getColumn();
TableRef dataTableRef = dataColRef.getTableRef();
// view constants if based on an UPDATABLE view
if (dataCol.getViewConstant() != null) {
byte[] viewConstant = dataCol.getViewConstant();
// Ignore last byte, as it's only there so we can have a way to differentiate null
// from the absence of a value.
ptr.set(viewConstant, 0, viewConstant.length - 1);
Object literal = dataCol.getDataType().toObject(ptr);
return new LiteralParseNode(literal, dataCol.getDataType());
}
TableName tName = getReplacedTableName(dataTableRef);
if (multiTableRewriteMap != null && tName == null)
return node;
String indexColName = IndexUtil.getIndexColumnName(dataCol);
ParseNode indexColNode = new ColumnParseNode(tName, '"' + indexColName + '"', node.getAlias());
PDataType indexColType = IndexUtil.getIndexColumnDataType(dataCol);
PDataType dataColType = dataColRef.getColumn().getDataType();
// TODO: test case for this
if (!isTopLevel() && indexColType != dataColType) {
indexColNode = FACTORY.cast(indexColNode, dataColType, null, null);
}
return indexColNode;
}
use of org.apache.phoenix.parse.ColumnParseNode in project phoenix by apache.
the class QueryOptimizer method addPlan.
private QueryPlan addPlan(PhoenixStatement statement, SelectStatement select, PTable index, List<? extends PDatum> targetColumns, ParallelIteratorFactory parallelIteratorFactory, QueryPlan dataPlan, boolean isHinted) throws SQLException {
int nColumns = dataPlan.getProjector().getColumnCount();
String tableAlias = dataPlan.getTableRef().getTableAlias();
// double quote in case it's case sensitive
String alias = tableAlias == null ? null : '"' + tableAlias + '"';
String schemaName = index.getParentSchemaName().getString();
schemaName = schemaName.length() == 0 ? null : '"' + schemaName + '"';
String tableName = '"' + index.getTableName().getString() + '"';
TableNode table = FACTORY.namedTable(alias, FACTORY.table(schemaName, tableName), select.getTableSamplingRate());
SelectStatement indexSelect = FACTORY.select(select, table);
ColumnResolver resolver = FromCompiler.getResolverForQuery(indexSelect, statement.getConnection());
// We will or will not do tuple projection according to the data plan.
boolean isProjected = dataPlan.getContext().getResolver().getTables().get(0).getTable().getType() == PTableType.PROJECTED;
// Check index state of now potentially updated index table to make sure it's active
TableRef indexTableRef = resolver.getTables().get(0);
PTable indexTable = indexTableRef.getTable();
PIndexState indexState = indexTable.getIndexState();
Map<TableRef, QueryPlan> dataPlans = Collections.singletonMap(indexTableRef, dataPlan);
if (indexState == PIndexState.ACTIVE || indexState == PIndexState.PENDING_ACTIVE || (indexState == PIndexState.PENDING_DISABLE && isUnderPendingDisableThreshold(indexTableRef.getCurrentTime(), indexTable.getIndexDisableTimestamp()))) {
try {
// translate nodes that match expressions that are indexed to the associated column parse node
indexSelect = ParseNodeRewriter.rewrite(indexSelect, new IndexExpressionParseNodeRewriter(index, null, statement.getConnection(), indexSelect.getUdfParseNodes()));
QueryCompiler compiler = new QueryCompiler(statement, indexSelect, resolver, targetColumns, parallelIteratorFactory, dataPlan.getContext().getSequenceManager(), isProjected, true, dataPlans);
QueryPlan plan = compiler.compile();
// then we can use the index even the query doesn't have where clause.
if (index.getIndexType() == IndexType.LOCAL && indexSelect.getWhere() == null && !plan.getContext().getDataColumns().isEmpty()) {
return null;
}
indexTableRef = plan.getTableRef();
indexTable = indexTableRef.getTable();
indexState = indexTable.getIndexState();
// must contain all columns from the data table to be able to be used.
if (indexState == PIndexState.ACTIVE || indexState == PIndexState.PENDING_ACTIVE || (indexState == PIndexState.PENDING_DISABLE && isUnderPendingDisableThreshold(indexTableRef.getCurrentTime(), indexTable.getIndexDisableTimestamp()))) {
if (plan.getProjector().getColumnCount() == nColumns) {
return plan;
} else if (index.getIndexType() == IndexType.GLOBAL) {
String schemaNameStr = index.getSchemaName() == null ? null : index.getSchemaName().getString();
String tableNameStr = index.getTableName() == null ? null : index.getTableName().getString();
throw new ColumnNotFoundException(schemaNameStr, tableNameStr, null, "*");
}
}
} catch (ColumnNotFoundException e) {
/* Means that a column is being used that's not in our index.
* Since we currently don't keep stats, we don't know the selectivity of the index.
* For now, if this is a hinted plan, we will try rewriting the query as a subquery;
* otherwise we just don't use this index (as opposed to trying to join back from
* the index table to the data table.
*/
SelectStatement dataSelect = (SelectStatement) dataPlan.getStatement();
ParseNode where = dataSelect.getWhere();
if (isHinted && where != null) {
StatementContext context = new StatementContext(statement, resolver);
WhereConditionRewriter whereRewriter = new WhereConditionRewriter(FromCompiler.getResolver(dataPlan.getTableRef()), context);
where = where.accept(whereRewriter);
if (where != null) {
PTable dataTable = dataPlan.getTableRef().getTable();
List<PColumn> pkColumns = dataTable.getPKColumns();
List<AliasedNode> aliasedNodes = Lists.<AliasedNode>newArrayListWithExpectedSize(pkColumns.size());
List<ParseNode> nodes = Lists.<ParseNode>newArrayListWithExpectedSize(pkColumns.size());
boolean isSalted = dataTable.getBucketNum() != null;
boolean isTenantSpecific = dataTable.isMultiTenant() && statement.getConnection().getTenantId() != null;
int posOffset = (isSalted ? 1 : 0) + (isTenantSpecific ? 1 : 0);
for (int i = posOffset; i < pkColumns.size(); i++) {
PColumn column = pkColumns.get(i);
String indexColName = IndexUtil.getIndexColumnName(column);
ParseNode indexColNode = new ColumnParseNode(null, '"' + indexColName + '"', indexColName);
PDataType indexColType = IndexUtil.getIndexColumnDataType(column);
PDataType dataColType = column.getDataType();
if (indexColType != dataColType) {
indexColNode = FACTORY.cast(indexColNode, dataColType, null, null);
}
aliasedNodes.add(FACTORY.aliasedNode(null, indexColNode));
nodes.add(new ColumnParseNode(null, '"' + column.getName().getString() + '"'));
}
SelectStatement innerSelect = FACTORY.select(indexSelect.getFrom(), indexSelect.getHint(), false, aliasedNodes, where, null, null, null, null, null, indexSelect.getBindCount(), false, indexSelect.hasSequence(), Collections.<SelectStatement>emptyList(), indexSelect.getUdfParseNodes());
ParseNode outerWhere = FACTORY.in(nodes.size() == 1 ? nodes.get(0) : FACTORY.rowValueConstructor(nodes), FACTORY.subquery(innerSelect, false), false, true);
ParseNode extractedCondition = whereRewriter.getExtractedCondition();
if (extractedCondition != null) {
outerWhere = FACTORY.and(Lists.newArrayList(outerWhere, extractedCondition));
}
HintNode hint = HintNode.combine(HintNode.subtract(indexSelect.getHint(), new Hint[] { Hint.INDEX, Hint.NO_CHILD_PARENT_JOIN_OPTIMIZATION }), FACTORY.hint("NO_INDEX"));
SelectStatement query = FACTORY.select(dataSelect, hint, outerWhere);
ColumnResolver queryResolver = FromCompiler.getResolverForQuery(query, statement.getConnection());
query = SubqueryRewriter.transform(query, queryResolver, statement.getConnection());
queryResolver = FromCompiler.getResolverForQuery(query, statement.getConnection());
query = StatementNormalizer.normalize(query, queryResolver);
QueryPlan plan = new QueryCompiler(statement, query, queryResolver, targetColumns, parallelIteratorFactory, dataPlan.getContext().getSequenceManager(), isProjected, true, dataPlans).compile();
return plan;
}
}
}
}
return null;
}
Aggregations