Search in sources :

Example 1 with LiteralParseNode

use of org.apache.phoenix.parse.LiteralParseNode in project phoenix by apache.

the class MetaDataEndpointImpl method addArgumentToFunction.

private void addArgumentToFunction(List<Cell> results, PName functionName, PName type, Cell[] functionKeyValues, List<FunctionArgument> arguments, short argPosition) throws SQLException {
    int i = 0;
    int j = 0;
    while (i < results.size() && j < FUNCTION_ARG_KV_COLUMNS.size()) {
        Cell kv = results.get(i);
        Cell searchKv = FUNCTION_ARG_KV_COLUMNS.get(j);
        int cmp = Bytes.compareTo(kv.getQualifierArray(), kv.getQualifierOffset(), kv.getQualifierLength(), searchKv.getQualifierArray(), searchKv.getQualifierOffset(), searchKv.getQualifierLength());
        if (cmp == 0) {
            functionKeyValues[j++] = kv;
            i++;
        } else if (cmp > 0) {
            functionKeyValues[j++] = null;
        } else {
            // shouldn't happen - means unexpected KV in system table column row
            i++;
        }
    }
    Cell isArrayKv = functionKeyValues[IS_ARRAY_INDEX];
    boolean isArrayType = isArrayKv == null ? false : Boolean.TRUE.equals(PBoolean.INSTANCE.toObject(isArrayKv.getValueArray(), isArrayKv.getValueOffset(), isArrayKv.getValueLength()));
    Cell isConstantKv = functionKeyValues[IS_CONSTANT_INDEX];
    boolean isConstant = isConstantKv == null ? false : Boolean.TRUE.equals(PBoolean.INSTANCE.toObject(isConstantKv.getValueArray(), isConstantKv.getValueOffset(), isConstantKv.getValueLength()));
    Cell defaultValueKv = functionKeyValues[DEFAULT_VALUE_INDEX];
    String defaultValue = defaultValueKv == null ? null : (String) PVarchar.INSTANCE.toObject(defaultValueKv.getValueArray(), defaultValueKv.getValueOffset(), defaultValueKv.getValueLength());
    Cell minValueKv = functionKeyValues[MIN_VALUE_INDEX];
    String minValue = minValueKv == null ? null : (String) PVarchar.INSTANCE.toObject(minValueKv.getValueArray(), minValueKv.getValueOffset(), minValueKv.getValueLength());
    Cell maxValueKv = functionKeyValues[MAX_VALUE_INDEX];
    String maxValue = maxValueKv == null ? null : (String) PVarchar.INSTANCE.toObject(maxValueKv.getValueArray(), maxValueKv.getValueOffset(), maxValueKv.getValueLength());
    FunctionArgument arg = new FunctionArgument(type.getString(), isArrayType, isConstant, defaultValue == null ? null : LiteralExpression.newConstant((new LiteralParseNode(defaultValue)).getValue()), minValue == null ? null : LiteralExpression.newConstant((new LiteralParseNode(minValue)).getValue()), maxValue == null ? null : LiteralExpression.newConstant((new LiteralParseNode(maxValue)).getValue()), argPosition);
    arguments.add(arg);
}
Also used : ByteString(com.google.protobuf.ByteString) Cell(org.apache.hadoop.hbase.Cell) FunctionArgument(org.apache.phoenix.parse.PFunction.FunctionArgument) PTinyint(org.apache.phoenix.schema.types.PTinyint) PSmallint(org.apache.phoenix.schema.types.PSmallint) LiteralParseNode(org.apache.phoenix.parse.LiteralParseNode)

Example 2 with LiteralParseNode

use of org.apache.phoenix.parse.LiteralParseNode in project phoenix by apache.

the class TraceQueryPlan method iterator.

@Override
public ResultIterator iterator(ParallelScanGrouper scanGrouper) throws SQLException {
    final PhoenixConnection conn = stmt.getConnection();
    if (conn.getTraceScope() == null && !traceStatement.isTraceOn()) {
        return ResultIterator.EMPTY_ITERATOR;
    }
    return new ResultIterator() {

        @Override
        public void close() throws SQLException {
        }

        @Override
        public Tuple next() throws SQLException {
            if (!first)
                return null;
            TraceScope traceScope = conn.getTraceScope();
            if (traceStatement.isTraceOn()) {
                conn.setSampler(Tracing.getConfiguredSampler(traceStatement));
                if (conn.getSampler() == Sampler.NEVER) {
                    closeTraceScope(conn);
                }
                if (traceScope == null && !conn.getSampler().equals(Sampler.NEVER)) {
                    traceScope = Tracing.startNewSpan(conn, "Enabling trace");
                    if (traceScope.getSpan() != null) {
                        conn.setTraceScope(traceScope);
                    } else {
                        closeTraceScope(conn);
                    }
                }
            } else {
                closeTraceScope(conn);
                conn.setSampler(Sampler.NEVER);
            }
            if (traceScope == null || traceScope.getSpan() == null)
                return null;
            first = false;
            ImmutableBytesWritable ptr = new ImmutableBytesWritable();
            ParseNodeFactory factory = new ParseNodeFactory();
            LiteralParseNode literal = factory.literal(traceScope.getSpan().getTraceId());
            LiteralExpression expression = LiteralExpression.newConstant(literal.getValue(), PLong.INSTANCE, Determinism.ALWAYS);
            expression.evaluate(null, ptr);
            byte[] rowKey = ByteUtil.copyKeyBytesIfNecessary(ptr);
            Cell cell = CellUtil.createCell(rowKey, HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY, EnvironmentEdgeManager.currentTimeMillis(), Type.Put.getCode(), HConstants.EMPTY_BYTE_ARRAY);
            List<Cell> cells = new ArrayList<Cell>(1);
            cells.add(cell);
            return new ResultTuple(Result.create(cells));
        }

        private void closeTraceScope(final PhoenixConnection conn) {
            if (conn.getTraceScope() != null) {
                conn.getTraceScope().close();
                conn.setTraceScope(null);
            }
        }

        @Override
        public void explain(List<String> planSteps) {
        }
    };
}
Also used : PhoenixConnection(org.apache.phoenix.jdbc.PhoenixConnection) ImmutableBytesWritable(org.apache.hadoop.hbase.io.ImmutableBytesWritable) LiteralExpression(org.apache.phoenix.expression.LiteralExpression) ResultTuple(org.apache.phoenix.schema.tuple.ResultTuple) ResultIterator(org.apache.phoenix.iterate.ResultIterator) TraceScope(org.apache.htrace.TraceScope) ArrayList(java.util.ArrayList) LiteralParseNode(org.apache.phoenix.parse.LiteralParseNode) List(java.util.List) ArrayList(java.util.ArrayList) Cell(org.apache.hadoop.hbase.Cell) ParseNodeFactory(org.apache.phoenix.parse.ParseNodeFactory)

Example 3 with LiteralParseNode

use of org.apache.phoenix.parse.LiteralParseNode in project phoenix by apache.

the class OrderByCompiler method compile.

/**
 * Gets a list of columns in the ORDER BY clause
 * @param context the query context for tracking various states
 * associated with the given select statement
 * @param statement TODO
 * @param groupBy the list of columns in the GROUP BY clause
 * @param limit the row limit or null if no limit
 * @return the compiled ORDER BY clause
 * @throws SQLException
 */
public static OrderBy compile(StatementContext context, SelectStatement statement, GroupBy groupBy, Integer limit, Integer offset, RowProjector rowProjector, TupleProjector tupleProjector, boolean isInRowKeyOrder) throws SQLException {
    List<OrderByNode> orderByNodes = statement.getOrderBy();
    if (orderByNodes.isEmpty()) {
        return OrderBy.EMPTY_ORDER_BY;
    }
    // for ungroupedAggregates as GROUP BY expression, check against an empty group by
    ExpressionCompiler compiler;
    if (groupBy.isUngroupedAggregate()) {
        compiler = new ExpressionCompiler(context, GroupBy.EMPTY_GROUP_BY) {

            @Override
            protected Expression addExpression(Expression expression) {
                return expression;
            }

            @Override
            protected void addColumn(PColumn column) {
            }
        };
    } else {
        compiler = new ExpressionCompiler(context, groupBy);
    }
    // accumulate columns in ORDER BY
    OrderPreservingTracker tracker = new OrderPreservingTracker(context, groupBy, Ordering.ORDERED, orderByNodes.size(), tupleProjector);
    LinkedHashSet<OrderByExpression> orderByExpressions = Sets.newLinkedHashSetWithExpectedSize(orderByNodes.size());
    for (OrderByNode node : orderByNodes) {
        ParseNode parseNode = node.getNode();
        Expression expression = null;
        if (parseNode instanceof LiteralParseNode && ((LiteralParseNode) parseNode).getType() == PInteger.INSTANCE) {
            Integer index = (Integer) ((LiteralParseNode) parseNode).getValue();
            int size = rowProjector.getColumnProjectors().size();
            if (index > size || index <= 0) {
                throw new SQLExceptionInfo.Builder(SQLExceptionCode.PARAM_INDEX_OUT_OF_BOUND).build().buildException();
            }
            expression = rowProjector.getColumnProjector(index - 1).getExpression();
        } else {
            expression = node.getNode().accept(compiler);
            // Detect mix of aggregate and non aggregates (i.e. ORDER BY txns, SUM(txns)
            if (!expression.isStateless() && !compiler.isAggregate()) {
                if (statement.isAggregate() || statement.isDistinct()) {
                    // Detect ORDER BY not in SELECT DISTINCT: SELECT DISTINCT count(*) FROM t ORDER BY x
                    if (statement.isDistinct()) {
                        throw new SQLExceptionInfo.Builder(SQLExceptionCode.ORDER_BY_NOT_IN_SELECT_DISTINCT).setMessage(expression.toString()).build().buildException();
                    }
                    ExpressionCompiler.throwNonAggExpressionInAggException(expression.toString());
                }
            }
        }
        if (!expression.isStateless()) {
            boolean isAscending = node.isAscending();
            boolean isNullsLast = node.isNullsLast();
            tracker.track(expression, isAscending ? SortOrder.ASC : SortOrder.DESC, isNullsLast);
            // since this is the order they actually are in.
            if (expression.getSortOrder() == SortOrder.DESC) {
                isAscending = !isAscending;
            }
            OrderByExpression orderByExpression = new OrderByExpression(expression, isNullsLast, isAscending);
            orderByExpressions.add(orderByExpression);
        }
        compiler.reset();
    }
    // we can remove ORDER BY clauses in case of only COUNT(DISTINCT...) clauses
    if (orderByExpressions.isEmpty() || groupBy.isUngroupedAggregate()) {
        return OrderBy.EMPTY_ORDER_BY;
    }
    // If we're ordering by the order returned by the scan, we don't need an order by
    if (isInRowKeyOrder && tracker.isOrderPreserving()) {
        if (tracker.isReverse()) {
            // REV_ROW_KEY_ORDER_BY scan would not take effect for a projected table, so don't return it for such table types.
            if (context.getConnection().getQueryServices().getProps().getBoolean(QueryServices.USE_REVERSE_SCAN_ATTRIB, QueryServicesOptions.DEFAULT_USE_REVERSE_SCAN) && !context.getScanRanges().useSkipScanFilter() && context.getCurrentTable().getTable().getType() != PTableType.PROJECTED && context.getCurrentTable().getTable().getType() != PTableType.SUBQUERY) {
                return OrderBy.REV_ROW_KEY_ORDER_BY;
            }
        } else {
            return OrderBy.FWD_ROW_KEY_ORDER_BY;
        }
    }
    return new OrderBy(Lists.newArrayList(orderByExpressions.iterator()));
}
Also used : OrderByNode(org.apache.phoenix.parse.OrderByNode) LiteralParseNode(org.apache.phoenix.parse.LiteralParseNode) PColumn(org.apache.phoenix.schema.PColumn) PInteger(org.apache.phoenix.schema.types.PInteger) OrderByExpression(org.apache.phoenix.expression.OrderByExpression) Expression(org.apache.phoenix.expression.Expression) LiteralParseNode(org.apache.phoenix.parse.LiteralParseNode) ParseNode(org.apache.phoenix.parse.ParseNode) OrderByExpression(org.apache.phoenix.expression.OrderByExpression) SQLExceptionInfo(org.apache.phoenix.exception.SQLExceptionInfo)

Example 4 with LiteralParseNode

use of org.apache.phoenix.parse.LiteralParseNode in project phoenix by apache.

the class IndexStatementRewriter method visit.

@Override
public ParseNode visit(ColumnParseNode node) throws SQLException {
    ColumnRef dataColRef = getResolver().resolveColumn(node.getSchemaName(), node.getTableName(), node.getName());
    PColumn dataCol = dataColRef.getColumn();
    TableRef dataTableRef = dataColRef.getTableRef();
    // view constants if based on an UPDATABLE view
    if (dataCol.getViewConstant() != null) {
        byte[] viewConstant = dataCol.getViewConstant();
        // Ignore last byte, as it's only there so we can have a way to differentiate null
        // from the absence of a value.
        ptr.set(viewConstant, 0, viewConstant.length - 1);
        Object literal = dataCol.getDataType().toObject(ptr);
        return new LiteralParseNode(literal, dataCol.getDataType());
    }
    TableName tName = getReplacedTableName(dataTableRef);
    if (multiTableRewriteMap != null && tName == null)
        return node;
    String indexColName = IndexUtil.getIndexColumnName(dataCol);
    ParseNode indexColNode = new ColumnParseNode(tName, '"' + indexColName + '"', node.getAlias());
    PDataType indexColType = IndexUtil.getIndexColumnDataType(dataCol);
    PDataType dataColType = dataColRef.getColumn().getDataType();
    // TODO: test case for this
    if (!isTopLevel() && indexColType != dataColType) {
        indexColNode = FACTORY.cast(indexColNode, dataColType, null, null);
    }
    return indexColNode;
}
Also used : PColumn(org.apache.phoenix.schema.PColumn) TableName(org.apache.phoenix.parse.TableName) PDataType(org.apache.phoenix.schema.types.PDataType) ColumnParseNode(org.apache.phoenix.parse.ColumnParseNode) WildcardParseNode(org.apache.phoenix.parse.WildcardParseNode) LiteralParseNode(org.apache.phoenix.parse.LiteralParseNode) ColumnParseNode(org.apache.phoenix.parse.ColumnParseNode) TableWildcardParseNode(org.apache.phoenix.parse.TableWildcardParseNode) ParseNode(org.apache.phoenix.parse.ParseNode) FamilyWildcardParseNode(org.apache.phoenix.parse.FamilyWildcardParseNode) ColumnRef(org.apache.phoenix.schema.ColumnRef) TableRef(org.apache.phoenix.schema.TableRef) LiteralParseNode(org.apache.phoenix.parse.LiteralParseNode)

Example 5 with LiteralParseNode

use of org.apache.phoenix.parse.LiteralParseNode in project phoenix by apache.

the class UpsertCompiler method prependTenantAndViewConstants.

private static SelectStatement prependTenantAndViewConstants(PTable table, SelectStatement select, String tenantId, Set<PColumn> addViewColumns, boolean useServerTimestamp) {
    if ((!table.isMultiTenant() || tenantId == null) && table.getViewIndexId() == null && addViewColumns.isEmpty() && !useServerTimestamp) {
        return select;
    }
    List<AliasedNode> selectNodes = newArrayListWithCapacity(select.getSelect().size() + 1 + addViewColumns.size());
    if (table.getViewIndexId() != null) {
        selectNodes.add(new AliasedNode(null, new LiteralParseNode(table.getViewIndexId())));
    }
    if (table.isMultiTenant() && tenantId != null) {
        selectNodes.add(new AliasedNode(null, new LiteralParseNode(tenantId)));
    }
    selectNodes.addAll(select.getSelect());
    for (PColumn column : addViewColumns) {
        byte[] byteValue = column.getViewConstant();
        Object value = column.getDataType().toObject(byteValue, 0, byteValue.length - 1);
        selectNodes.add(new AliasedNode(null, new LiteralParseNode(value)));
    }
    if (useServerTimestamp) {
        PColumn rowTimestampCol = table.getPKColumns().get(table.getRowTimestampColPos());
        selectNodes.add(new AliasedNode(null, getNodeForRowTimestampColumn(rowTimestampCol)));
    }
    return SelectStatement.create(select, selectNodes);
}
Also used : PColumn(org.apache.phoenix.schema.PColumn) AliasedNode(org.apache.phoenix.parse.AliasedNode) LiteralParseNode(org.apache.phoenix.parse.LiteralParseNode)

Aggregations

LiteralParseNode (org.apache.phoenix.parse.LiteralParseNode)7 Cell (org.apache.hadoop.hbase.Cell)3 PColumn (org.apache.phoenix.schema.PColumn)3 ArrayList (java.util.ArrayList)2 List (java.util.List)2 ImmutableBytesWritable (org.apache.hadoop.hbase.io.ImmutableBytesWritable)2 LiteralExpression (org.apache.phoenix.expression.LiteralExpression)2 ResultIterator (org.apache.phoenix.iterate.ResultIterator)2 ParseNode (org.apache.phoenix.parse.ParseNode)2 ParseNodeFactory (org.apache.phoenix.parse.ParseNodeFactory)2 ResultTuple (org.apache.phoenix.schema.tuple.ResultTuple)2 PDataType (org.apache.phoenix.schema.types.PDataType)2 ByteString (com.google.protobuf.ByteString)1 IOException (java.io.IOException)1 SQLException (java.sql.SQLException)1 Timestamp (java.sql.Timestamp)1 Configuration (org.apache.hadoop.conf.Configuration)1 FileSystem (org.apache.hadoop.fs.FileSystem)1 Path (org.apache.hadoop.fs.Path)1 RemoteIterator (org.apache.hadoop.fs.RemoteIterator)1