Search in sources :

Example 1 with PColumnImpl

use of org.apache.phoenix.schema.PColumnImpl in project phoenix by apache.

the class LiteralResultIteratorPlanTest method createProjectedTableFromLiterals.

private TableRef createProjectedTableFromLiterals(Object[] row) {
    List<PColumn> columns = Lists.<PColumn>newArrayList();
    for (int i = 0; i < row.length; i++) {
        String name = ParseNodeFactory.createTempAlias();
        Expression expr = LiteralExpression.newConstant(row[i]);
        PName colName = PNameFactory.newName(name);
        columns.add(new PColumnImpl(PNameFactory.newName(name), PNameFactory.newName(VALUE_COLUMN_FAMILY), expr.getDataType(), expr.getMaxLength(), expr.getScale(), expr.isNullable(), i, expr.getSortOrder(), null, null, false, name, false, false, colName.getBytes()));
    }
    try {
        PTable pTable = PTableImpl.makePTable(null, PName.EMPTY_NAME, PName.EMPTY_NAME, PTableType.SUBQUERY, null, MetaDataProtocol.MIN_TABLE_TIMESTAMP, PTable.INITIAL_SEQ_NUM, null, null, columns, null, null, Collections.<PTable>emptyList(), false, Collections.<PName>emptyList(), null, null, false, false, false, null, null, null, true, false, 0, 0L, false, null, false, ImmutableStorageScheme.ONE_CELL_PER_COLUMN, QualifierEncodingScheme.NON_ENCODED_QUALIFIERS, EncodedCQCounter.NULL_COUNTER, true);
        TableRef sourceTable = new TableRef(pTable);
        List<ColumnRef> sourceColumnRefs = Lists.<ColumnRef>newArrayList();
        for (PColumn column : sourceTable.getTable().getColumns()) {
            sourceColumnRefs.add(new ColumnRef(sourceTable, column.getPosition()));
        }
        return new TableRef(TupleProjectionCompiler.createProjectedTable(sourceTable, sourceColumnRefs, false));
    } catch (SQLException e) {
        throw new RuntimeException(e);
    }
}
Also used : PColumnImpl(org.apache.phoenix.schema.PColumnImpl) SQLException(java.sql.SQLException) PTable(org.apache.phoenix.schema.PTable) PColumn(org.apache.phoenix.schema.PColumn) Expression(org.apache.phoenix.expression.Expression) ProjectedColumnExpression(org.apache.phoenix.expression.ProjectedColumnExpression) LiteralExpression(org.apache.phoenix.expression.LiteralExpression) PName(org.apache.phoenix.schema.PName) ColumnRef(org.apache.phoenix.schema.ColumnRef) TableRef(org.apache.phoenix.schema.TableRef)

Example 2 with PColumnImpl

use of org.apache.phoenix.schema.PColumnImpl in project phoenix by apache.

the class UnnestArrayPlanTest method testUnnestArrays.

private void testUnnestArrays(PArrayDataType arrayType, List<Object[]> arrays, boolean withOrdinality) throws Exception {
    PDataType baseType = PDataType.fromTypeId(arrayType.getSqlType() - PDataType.ARRAY_TYPE_BASE);
    List<Tuple> tuples = toTuples(arrayType, arrays);
    LiteralResultIterationPlan subPlan = new LiteralResultIterationPlan(tuples, CONTEXT, SelectStatement.SELECT_ONE, TableRef.EMPTY_TABLE_REF, RowProjector.EMPTY_PROJECTOR, null, null, OrderBy.EMPTY_ORDER_BY, null);
    LiteralExpression dummy = LiteralExpression.newConstant(null, arrayType);
    RowKeyValueAccessor accessor = new RowKeyValueAccessor(Arrays.asList(dummy), 0);
    UnnestArrayPlan plan = new UnnestArrayPlan(subPlan, new RowKeyColumnExpression(dummy, accessor), withOrdinality);
    PName colName = PNameFactory.newName("ELEM");
    PColumn elemColumn = new PColumnImpl(PNameFactory.newName("ELEM"), PNameFactory.newName(VALUE_COLUMN_FAMILY), baseType, null, null, true, 0, SortOrder.getDefault(), null, null, false, "", false, false, colName.getBytes());
    colName = PNameFactory.newName("IDX");
    PColumn indexColumn = withOrdinality ? new PColumnImpl(colName, PNameFactory.newName(VALUE_COLUMN_FAMILY), PInteger.INSTANCE, null, null, true, 0, SortOrder.getDefault(), null, null, false, "", false, false, colName.getBytes()) : null;
    List<PColumn> columns = withOrdinality ? Arrays.asList(elemColumn, indexColumn) : Arrays.asList(elemColumn);
    ProjectedColumnExpression elemExpr = new ProjectedColumnExpression(elemColumn, columns, 0, elemColumn.getName().getString());
    ProjectedColumnExpression indexExpr = withOrdinality ? new ProjectedColumnExpression(indexColumn, columns, 1, indexColumn.getName().getString()) : null;
    ImmutableBytesWritable ptr = new ImmutableBytesWritable();
    ResultIterator iterator = plan.iterator();
    for (Object[] o : flatten(arrays)) {
        Tuple tuple = iterator.next();
        assertNotNull(tuple);
        assertTrue(elemExpr.evaluate(tuple, ptr));
        Object elem = baseType.toObject(ptr);
        assertEquals(o[0], elem);
        if (withOrdinality) {
            assertTrue(indexExpr.evaluate(tuple, ptr));
            Object index = PInteger.INSTANCE.toObject(ptr);
            assertEquals(o[1], index);
        }
    }
    assertNull(iterator.next());
}
Also used : PColumnImpl(org.apache.phoenix.schema.PColumnImpl) ImmutableBytesWritable(org.apache.hadoop.hbase.io.ImmutableBytesWritable) RowKeyValueAccessor(org.apache.phoenix.schema.RowKeyValueAccessor) LiteralExpression(org.apache.phoenix.expression.LiteralExpression) ResultIterator(org.apache.phoenix.iterate.ResultIterator) ProjectedColumnExpression(org.apache.phoenix.expression.ProjectedColumnExpression) RowKeyColumnExpression(org.apache.phoenix.expression.RowKeyColumnExpression) PColumn(org.apache.phoenix.schema.PColumn) PDataType(org.apache.phoenix.schema.types.PDataType) PName(org.apache.phoenix.schema.PName) Tuple(org.apache.phoenix.schema.tuple.Tuple) SingleKeyValueTuple(org.apache.phoenix.schema.tuple.SingleKeyValueTuple)

Example 3 with PColumnImpl

use of org.apache.phoenix.schema.PColumnImpl in project phoenix by apache.

the class UnionCompiler method contructSchemaTable.

public static TableRef contructSchemaTable(PhoenixStatement statement, List<QueryPlan> plans, List<AliasedNode> selectNodes) throws SQLException {
    List<TargetDataExpression> targetTypes = checkProjectionNumAndExpressions(plans);
    for (int i = 0; i < plans.size(); i++) {
        QueryPlan subPlan = plans.get(i);
        TupleProjector projector = getTupleProjector(subPlan.getProjector(), targetTypes);
        subPlan = new TupleProjectionPlan(subPlan, projector, null);
        plans.set(i, subPlan);
    }
    QueryPlan plan = plans.get(0);
    List<PColumn> projectedColumns = new ArrayList<PColumn>();
    for (int i = 0; i < plan.getProjector().getColumnCount(); i++) {
        ColumnProjector colProj = plan.getProjector().getColumnProjector(i);
        String name = selectNodes == null ? colProj.getName() : selectNodes.get(i).getAlias();
        PName colName = PNameFactory.newName(name);
        PColumnImpl projectedColumn = new PColumnImpl(PNameFactory.newName(name), UNION_FAMILY_NAME, targetTypes.get(i).getType(), targetTypes.get(i).getMaxLength(), targetTypes.get(i).getScale(), colProj.getExpression().isNullable(), i, targetTypes.get(i).getSortOrder(), 500, null, false, colProj.getExpression().toString(), false, false, colName.getBytes());
        projectedColumns.add(projectedColumn);
    }
    Long scn = statement.getConnection().getSCN();
    PTable tempTable = PTableImpl.makePTable(statement.getConnection().getTenantId(), UNION_SCHEMA_NAME, UNION_TABLE_NAME, PTableType.SUBQUERY, null, HConstants.LATEST_TIMESTAMP, scn == null ? HConstants.LATEST_TIMESTAMP : scn, null, null, projectedColumns, null, null, null, true, null, null, null, true, true, true, null, null, null, false, false, 0, 0L, SchemaUtil.isNamespaceMappingEnabled(PTableType.SUBQUERY, statement.getConnection().getQueryServices().getProps()), null, false, ImmutableStorageScheme.ONE_CELL_PER_COLUMN, QualifierEncodingScheme.NON_ENCODED_QUALIFIERS, PTable.EncodedCQCounter.NULL_COUNTER, true);
    TableRef tableRef = new TableRef(null, tempTable, 0, false);
    return tableRef;
}
Also used : PColumnImpl(org.apache.phoenix.schema.PColumnImpl) TupleProjector(org.apache.phoenix.execute.TupleProjector) TupleProjectionPlan(org.apache.phoenix.execute.TupleProjectionPlan) ArrayList(java.util.ArrayList) PTable(org.apache.phoenix.schema.PTable) PColumn(org.apache.phoenix.schema.PColumn) PName(org.apache.phoenix.schema.PName) TableRef(org.apache.phoenix.schema.TableRef)

Example 4 with PColumnImpl

use of org.apache.phoenix.schema.PColumnImpl in project phoenix by apache.

the class MetaDataEndpointImpl method addColumnToTable.

private void addColumnToTable(List<Cell> results, PName colName, PName famName, Cell[] colKeyValues, List<PColumn> columns, boolean isSalted) {
    int i = 0;
    int j = 0;
    while (i < results.size() && j < COLUMN_KV_COLUMNS.size()) {
        Cell kv = results.get(i);
        Cell searchKv = COLUMN_KV_COLUMNS.get(j);
        int cmp = Bytes.compareTo(kv.getQualifierArray(), kv.getQualifierOffset(), kv.getQualifierLength(), searchKv.getQualifierArray(), searchKv.getQualifierOffset(), searchKv.getQualifierLength());
        if (cmp == 0) {
            colKeyValues[j++] = kv;
            i++;
        } else if (cmp > 0) {
            colKeyValues[j++] = null;
        } else {
            // shouldn't happen - means unexpected KV in system table column row
            i++;
        }
    }
    if (colKeyValues[DATA_TYPE_INDEX] == null || colKeyValues[NULLABLE_INDEX] == null || colKeyValues[ORDINAL_POSITION_INDEX] == null) {
        throw new IllegalStateException("Didn't find all required key values in '" + colName.getString() + "' column metadata row");
    }
    Cell columnSizeKv = colKeyValues[COLUMN_SIZE_INDEX];
    Integer maxLength = columnSizeKv == null ? null : PInteger.INSTANCE.getCodec().decodeInt(columnSizeKv.getValueArray(), columnSizeKv.getValueOffset(), SortOrder.getDefault());
    Cell decimalDigitKv = colKeyValues[DECIMAL_DIGITS_INDEX];
    Integer scale = decimalDigitKv == null ? null : PInteger.INSTANCE.getCodec().decodeInt(decimalDigitKv.getValueArray(), decimalDigitKv.getValueOffset(), SortOrder.getDefault());
    Cell ordinalPositionKv = colKeyValues[ORDINAL_POSITION_INDEX];
    int position = PInteger.INSTANCE.getCodec().decodeInt(ordinalPositionKv.getValueArray(), ordinalPositionKv.getValueOffset(), SortOrder.getDefault()) + (isSalted ? 1 : 0);
    Cell nullableKv = colKeyValues[NULLABLE_INDEX];
    boolean isNullable = PInteger.INSTANCE.getCodec().decodeInt(nullableKv.getValueArray(), nullableKv.getValueOffset(), SortOrder.getDefault()) != ResultSetMetaData.columnNoNulls;
    Cell dataTypeKv = colKeyValues[DATA_TYPE_INDEX];
    PDataType dataType = PDataType.fromTypeId(PInteger.INSTANCE.getCodec().decodeInt(dataTypeKv.getValueArray(), dataTypeKv.getValueOffset(), SortOrder.getDefault()));
    // For
    if (maxLength == null && dataType == PBinary.INSTANCE)
        dataType = PVarbinary.INSTANCE;
    // backward
    // compatibility.
    Cell sortOrderKv = colKeyValues[SORT_ORDER_INDEX];
    SortOrder sortOrder = sortOrderKv == null ? SortOrder.getDefault() : SortOrder.fromSystemValue(PInteger.INSTANCE.getCodec().decodeInt(sortOrderKv.getValueArray(), sortOrderKv.getValueOffset(), SortOrder.getDefault()));
    Cell arraySizeKv = colKeyValues[ARRAY_SIZE_INDEX];
    Integer arraySize = arraySizeKv == null ? null : PInteger.INSTANCE.getCodec().decodeInt(arraySizeKv.getValueArray(), arraySizeKv.getValueOffset(), SortOrder.getDefault());
    Cell viewConstantKv = colKeyValues[VIEW_CONSTANT_INDEX];
    byte[] viewConstant = viewConstantKv == null ? null : viewConstantKv.getValue();
    Cell isViewReferencedKv = colKeyValues[IS_VIEW_REFERENCED_INDEX];
    boolean isViewReferenced = isViewReferencedKv != null && Boolean.TRUE.equals(PBoolean.INSTANCE.toObject(isViewReferencedKv.getValueArray(), isViewReferencedKv.getValueOffset(), isViewReferencedKv.getValueLength()));
    Cell columnDefKv = colKeyValues[COLUMN_DEF_INDEX];
    String expressionStr = columnDefKv == null ? null : (String) PVarchar.INSTANCE.toObject(columnDefKv.getValueArray(), columnDefKv.getValueOffset(), columnDefKv.getValueLength());
    Cell isRowTimestampKV = colKeyValues[IS_ROW_TIMESTAMP_INDEX];
    boolean isRowTimestamp = isRowTimestampKV == null ? false : Boolean.TRUE.equals(PBoolean.INSTANCE.toObject(isRowTimestampKV.getValueArray(), isRowTimestampKV.getValueOffset(), isRowTimestampKV.getValueLength()));
    boolean isPkColumn = famName == null || famName.getString() == null;
    Cell columnQualifierKV = colKeyValues[COLUMN_QUALIFIER_INDEX];
    // Older tables won't have column qualifier metadata present. To make things simpler, just set the
    // column qualifier bytes by using the column name.
    byte[] columnQualifierBytes = columnQualifierKV != null ? Arrays.copyOfRange(columnQualifierKV.getValueArray(), columnQualifierKV.getValueOffset(), columnQualifierKV.getValueOffset() + columnQualifierKV.getValueLength()) : (isPkColumn ? null : colName.getBytes());
    PColumn column = new PColumnImpl(colName, famName, dataType, maxLength, scale, isNullable, position - 1, sortOrder, arraySize, viewConstant, isViewReferenced, expressionStr, isRowTimestamp, false, columnQualifierBytes);
    columns.add(column);
}
Also used : PInteger(org.apache.phoenix.schema.types.PInteger) PColumn(org.apache.phoenix.schema.PColumn) PColumnImpl(org.apache.phoenix.schema.PColumnImpl) PDataType(org.apache.phoenix.schema.types.PDataType) SortOrder(org.apache.phoenix.schema.SortOrder) ByteString(com.google.protobuf.ByteString) Cell(org.apache.hadoop.hbase.Cell) PTinyint(org.apache.phoenix.schema.types.PTinyint) PSmallint(org.apache.phoenix.schema.types.PSmallint)

Example 5 with PColumnImpl

use of org.apache.phoenix.schema.PColumnImpl in project phoenix by apache.

the class FromCompiler method getResolverForCompiledDerivedTable.

public static ColumnResolver getResolverForCompiledDerivedTable(PhoenixConnection connection, TableRef tableRef, RowProjector projector) throws SQLException {
    List<PColumn> projectedColumns = new ArrayList<PColumn>();
    PTable table = tableRef.getTable();
    for (PColumn column : table.getColumns()) {
        Expression sourceExpression = projector.getColumnProjector(column.getPosition()).getExpression();
        PColumnImpl projectedColumn = new PColumnImpl(column.getName(), column.getFamilyName(), sourceExpression.getDataType(), sourceExpression.getMaxLength(), sourceExpression.getScale(), sourceExpression.isNullable(), column.getPosition(), sourceExpression.getSortOrder(), column.getArraySize(), column.getViewConstant(), column.isViewReferenced(), column.getExpressionStr(), column.isRowTimestamp(), column.isDynamic(), column.getColumnQualifierBytes());
        projectedColumns.add(projectedColumn);
    }
    PTable t = PTableImpl.makePTable(table, projectedColumns);
    return new SingleTableColumnResolver(connection, new TableRef(tableRef.getTableAlias(), t, tableRef.getLowerBoundTimeStamp(), tableRef.hasDynamicCols()));
}
Also used : PColumn(org.apache.phoenix.schema.PColumn) PColumnImpl(org.apache.phoenix.schema.PColumnImpl) Expression(org.apache.phoenix.expression.Expression) ArrayList(java.util.ArrayList) PTable(org.apache.phoenix.schema.PTable) TableRef(org.apache.phoenix.schema.TableRef)

Aggregations

PColumn (org.apache.phoenix.schema.PColumn)12 PColumnImpl (org.apache.phoenix.schema.PColumnImpl)12 PName (org.apache.phoenix.schema.PName)9 PTable (org.apache.phoenix.schema.PTable)6 TableRef (org.apache.phoenix.schema.TableRef)5 ByteArrayInputStream (java.io.ByteArrayInputStream)4 ByteArrayOutputStream (java.io.ByteArrayOutputStream)4 DataInputStream (java.io.DataInputStream)4 DataOutputStream (java.io.DataOutputStream)4 Expression (org.apache.phoenix.expression.Expression)4 LiteralExpression (org.apache.phoenix.expression.LiteralExpression)4 ArrayList (java.util.ArrayList)3 ProjectedColumnExpression (org.apache.phoenix.expression.ProjectedColumnExpression)3 ColumnRef (org.apache.phoenix.schema.ColumnRef)3 Test (org.junit.Test)3 SQLException (java.sql.SQLException)2 ImmutableBytesWritable (org.apache.hadoop.hbase.io.ImmutableBytesWritable)2 Pair (org.apache.hadoop.hbase.util.Pair)2 PhoenixConnection (org.apache.phoenix.jdbc.PhoenixConnection)2 PDataType (org.apache.phoenix.schema.types.PDataType)2