Search in sources :

Example 6 with PColumnImpl

use of org.apache.phoenix.schema.PColumnImpl in project phoenix by apache.

the class UnionCompiler method contructSchemaTable.

public static TableRef contructSchemaTable(PhoenixStatement statement, List<QueryPlan> plans, List<AliasedNode> selectNodes) throws SQLException {
    List<TargetDataExpression> targetTypes = checkProjectionNumAndExpressions(plans);
    for (int i = 0; i < plans.size(); i++) {
        QueryPlan subPlan = plans.get(i);
        TupleProjector projector = getTupleProjector(subPlan.getProjector(), targetTypes);
        subPlan = new TupleProjectionPlan(subPlan, projector, null);
        plans.set(i, subPlan);
    }
    QueryPlan plan = plans.get(0);
    List<PColumn> projectedColumns = new ArrayList<PColumn>();
    for (int i = 0; i < plan.getProjector().getColumnCount(); i++) {
        ColumnProjector colProj = plan.getProjector().getColumnProjector(i);
        String name = selectNodes == null ? colProj.getName() : selectNodes.get(i).getAlias();
        PName colName = PNameFactory.newName(name);
        PColumnImpl projectedColumn = new PColumnImpl(PNameFactory.newName(name), UNION_FAMILY_NAME, targetTypes.get(i).getType(), targetTypes.get(i).getMaxLength(), targetTypes.get(i).getScale(), colProj.getExpression().isNullable(), i, targetTypes.get(i).getSortOrder(), 500, null, false, colProj.getExpression().toString(), false, false, colName.getBytes());
        projectedColumns.add(projectedColumn);
    }
    Long scn = statement.getConnection().getSCN();
    PTable tempTable = PTableImpl.makePTable(statement.getConnection().getTenantId(), UNION_SCHEMA_NAME, UNION_TABLE_NAME, PTableType.SUBQUERY, null, HConstants.LATEST_TIMESTAMP, scn == null ? HConstants.LATEST_TIMESTAMP : scn, null, null, projectedColumns, null, null, null, true, null, null, null, true, true, true, null, null, null, false, false, 0, 0L, SchemaUtil.isNamespaceMappingEnabled(PTableType.SUBQUERY, statement.getConnection().getQueryServices().getProps()), null, false, ImmutableStorageScheme.ONE_CELL_PER_COLUMN, QualifierEncodingScheme.NON_ENCODED_QUALIFIERS, PTable.EncodedCQCounter.NULL_COUNTER, true);
    TableRef tableRef = new TableRef(null, tempTable, 0, false);
    return tableRef;
}
Also used : PColumnImpl(org.apache.phoenix.schema.PColumnImpl) TupleProjector(org.apache.phoenix.execute.TupleProjector) TupleProjectionPlan(org.apache.phoenix.execute.TupleProjectionPlan) ArrayList(java.util.ArrayList) PTable(org.apache.phoenix.schema.PTable) PColumn(org.apache.phoenix.schema.PColumn) PName(org.apache.phoenix.schema.PName) TableRef(org.apache.phoenix.schema.TableRef)

Example 7 with PColumnImpl

use of org.apache.phoenix.schema.PColumnImpl in project phoenix by apache.

the class MetaDataEndpointImpl method addColumnToTable.

private void addColumnToTable(List<Cell> results, PName colName, PName famName, Cell[] colKeyValues, List<PColumn> columns, boolean isSalted) {
    int i = 0;
    int j = 0;
    while (i < results.size() && j < COLUMN_KV_COLUMNS.size()) {
        Cell kv = results.get(i);
        Cell searchKv = COLUMN_KV_COLUMNS.get(j);
        int cmp = Bytes.compareTo(kv.getQualifierArray(), kv.getQualifierOffset(), kv.getQualifierLength(), searchKv.getQualifierArray(), searchKv.getQualifierOffset(), searchKv.getQualifierLength());
        if (cmp == 0) {
            colKeyValues[j++] = kv;
            i++;
        } else if (cmp > 0) {
            colKeyValues[j++] = null;
        } else {
            // shouldn't happen - means unexpected KV in system table column row
            i++;
        }
    }
    if (colKeyValues[DATA_TYPE_INDEX] == null || colKeyValues[NULLABLE_INDEX] == null || colKeyValues[ORDINAL_POSITION_INDEX] == null) {
        throw new IllegalStateException("Didn't find all required key values in '" + colName.getString() + "' column metadata row");
    }
    Cell columnSizeKv = colKeyValues[COLUMN_SIZE_INDEX];
    Integer maxLength = columnSizeKv == null ? null : PInteger.INSTANCE.getCodec().decodeInt(columnSizeKv.getValueArray(), columnSizeKv.getValueOffset(), SortOrder.getDefault());
    Cell decimalDigitKv = colKeyValues[DECIMAL_DIGITS_INDEX];
    Integer scale = decimalDigitKv == null ? null : PInteger.INSTANCE.getCodec().decodeInt(decimalDigitKv.getValueArray(), decimalDigitKv.getValueOffset(), SortOrder.getDefault());
    Cell ordinalPositionKv = colKeyValues[ORDINAL_POSITION_INDEX];
    int position = PInteger.INSTANCE.getCodec().decodeInt(ordinalPositionKv.getValueArray(), ordinalPositionKv.getValueOffset(), SortOrder.getDefault()) + (isSalted ? 1 : 0);
    Cell nullableKv = colKeyValues[NULLABLE_INDEX];
    boolean isNullable = PInteger.INSTANCE.getCodec().decodeInt(nullableKv.getValueArray(), nullableKv.getValueOffset(), SortOrder.getDefault()) != ResultSetMetaData.columnNoNulls;
    Cell dataTypeKv = colKeyValues[DATA_TYPE_INDEX];
    PDataType dataType = PDataType.fromTypeId(PInteger.INSTANCE.getCodec().decodeInt(dataTypeKv.getValueArray(), dataTypeKv.getValueOffset(), SortOrder.getDefault()));
    // For
    if (maxLength == null && dataType == PBinary.INSTANCE)
        dataType = PVarbinary.INSTANCE;
    // backward
    // compatibility.
    Cell sortOrderKv = colKeyValues[SORT_ORDER_INDEX];
    SortOrder sortOrder = sortOrderKv == null ? SortOrder.getDefault() : SortOrder.fromSystemValue(PInteger.INSTANCE.getCodec().decodeInt(sortOrderKv.getValueArray(), sortOrderKv.getValueOffset(), SortOrder.getDefault()));
    Cell arraySizeKv = colKeyValues[ARRAY_SIZE_INDEX];
    Integer arraySize = arraySizeKv == null ? null : PInteger.INSTANCE.getCodec().decodeInt(arraySizeKv.getValueArray(), arraySizeKv.getValueOffset(), SortOrder.getDefault());
    Cell viewConstantKv = colKeyValues[VIEW_CONSTANT_INDEX];
    byte[] viewConstant = viewConstantKv == null ? null : viewConstantKv.getValue();
    Cell isViewReferencedKv = colKeyValues[IS_VIEW_REFERENCED_INDEX];
    boolean isViewReferenced = isViewReferencedKv != null && Boolean.TRUE.equals(PBoolean.INSTANCE.toObject(isViewReferencedKv.getValueArray(), isViewReferencedKv.getValueOffset(), isViewReferencedKv.getValueLength()));
    Cell columnDefKv = colKeyValues[COLUMN_DEF_INDEX];
    String expressionStr = columnDefKv == null ? null : (String) PVarchar.INSTANCE.toObject(columnDefKv.getValueArray(), columnDefKv.getValueOffset(), columnDefKv.getValueLength());
    Cell isRowTimestampKV = colKeyValues[IS_ROW_TIMESTAMP_INDEX];
    boolean isRowTimestamp = isRowTimestampKV == null ? false : Boolean.TRUE.equals(PBoolean.INSTANCE.toObject(isRowTimestampKV.getValueArray(), isRowTimestampKV.getValueOffset(), isRowTimestampKV.getValueLength()));
    boolean isPkColumn = famName == null || famName.getString() == null;
    Cell columnQualifierKV = colKeyValues[COLUMN_QUALIFIER_INDEX];
    // Older tables won't have column qualifier metadata present. To make things simpler, just set the
    // column qualifier bytes by using the column name.
    byte[] columnQualifierBytes = columnQualifierKV != null ? Arrays.copyOfRange(columnQualifierKV.getValueArray(), columnQualifierKV.getValueOffset(), columnQualifierKV.getValueOffset() + columnQualifierKV.getValueLength()) : (isPkColumn ? null : colName.getBytes());
    PColumn column = new PColumnImpl(colName, famName, dataType, maxLength, scale, isNullable, position - 1, sortOrder, arraySize, viewConstant, isViewReferenced, expressionStr, isRowTimestamp, false, columnQualifierBytes);
    columns.add(column);
}
Also used : PInteger(org.apache.phoenix.schema.types.PInteger) PColumn(org.apache.phoenix.schema.PColumn) PColumnImpl(org.apache.phoenix.schema.PColumnImpl) PDataType(org.apache.phoenix.schema.types.PDataType) SortOrder(org.apache.phoenix.schema.SortOrder) ByteString(com.google.protobuf.ByteString) Cell(org.apache.hadoop.hbase.Cell) PTinyint(org.apache.phoenix.schema.types.PTinyint) PSmallint(org.apache.phoenix.schema.types.PSmallint)

Example 8 with PColumnImpl

use of org.apache.phoenix.schema.PColumnImpl in project phoenix by apache.

the class ConnectionQueryServicesImpl method addColumnQualifierColumn.

// Special method for adding the column qualifier column for 4.10. 
private PhoenixConnection addColumnQualifierColumn(PhoenixConnection oldMetaConnection, Long timestamp) throws SQLException {
    Properties props = PropertiesUtil.deepCopy(oldMetaConnection.getClientInfo());
    props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(timestamp));
    // Cannot go through DriverManager or you end up in an infinite loop because it'll call init again
    PhoenixConnection metaConnection = new PhoenixConnection(oldMetaConnection, this, props);
    PTable sysCatalogPTable = metaConnection.getTable(new PTableKey(null, PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME));
    int numColumns = sysCatalogPTable.getColumns().size();
    try (PreparedStatement mutateTable = metaConnection.prepareStatement(MetaDataClient.MUTATE_TABLE)) {
        mutateTable.setString(1, null);
        mutateTable.setString(2, SYSTEM_CATALOG_SCHEMA);
        mutateTable.setString(3, SYSTEM_CATALOG_TABLE);
        mutateTable.setString(4, PTableType.SYSTEM.getSerializedValue());
        mutateTable.setLong(5, sysCatalogPTable.getSequenceNumber() + 1);
        mutateTable.setInt(6, numColumns + 1);
        mutateTable.execute();
    }
    List<Mutation> tableMetadata = new ArrayList<>();
    tableMetadata.addAll(metaConnection.getMutationState().toMutations(metaConnection.getSCN()).next().getSecond());
    metaConnection.rollback();
    PColumn column = new PColumnImpl(PNameFactory.newName("COLUMN_QUALIFIER"), PNameFactory.newName(DEFAULT_COLUMN_FAMILY_NAME), PVarbinary.INSTANCE, null, null, true, numColumns, SortOrder.ASC, null, null, false, null, false, false, Bytes.toBytes("COLUMN_QUALIFIER"));
    String upsertColumnMetadata = "UPSERT INTO " + SYSTEM_CATALOG_SCHEMA + ".\"" + SYSTEM_CATALOG_TABLE + "\"( " + TENANT_ID + "," + TABLE_SCHEM + "," + TABLE_NAME + "," + COLUMN_NAME + "," + COLUMN_FAMILY + "," + DATA_TYPE + "," + NULLABLE + "," + COLUMN_SIZE + "," + DECIMAL_DIGITS + "," + ORDINAL_POSITION + "," + SORT_ORDER + "," + DATA_TABLE_NAME + "," + ARRAY_SIZE + "," + VIEW_CONSTANT + "," + IS_VIEW_REFERENCED + "," + PK_NAME + "," + KEY_SEQ + "," + COLUMN_DEF + "," + IS_ROW_TIMESTAMP + ") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)";
    try (PreparedStatement colUpsert = metaConnection.prepareStatement(upsertColumnMetadata)) {
        colUpsert.setString(1, null);
        colUpsert.setString(2, SYSTEM_CATALOG_SCHEMA);
        colUpsert.setString(3, SYSTEM_CATALOG_TABLE);
        colUpsert.setString(4, "COLUMN_QUALIFIER");
        colUpsert.setString(5, DEFAULT_COLUMN_FAMILY);
        colUpsert.setInt(6, column.getDataType().getSqlType());
        colUpsert.setInt(7, ResultSetMetaData.columnNullable);
        colUpsert.setNull(8, Types.INTEGER);
        colUpsert.setNull(9, Types.INTEGER);
        colUpsert.setInt(10, sysCatalogPTable.getBucketNum() != null ? numColumns : (numColumns + 1));
        colUpsert.setInt(11, SortOrder.ASC.getSystemValue());
        colUpsert.setString(12, null);
        colUpsert.setNull(13, Types.INTEGER);
        colUpsert.setBytes(14, null);
        colUpsert.setBoolean(15, false);
        colUpsert.setString(16, sysCatalogPTable.getPKName() == null ? null : sysCatalogPTable.getPKName().getString());
        colUpsert.setNull(17, Types.SMALLINT);
        colUpsert.setNull(18, Types.VARCHAR);
        colUpsert.setBoolean(19, false);
        colUpsert.execute();
    }
    tableMetadata.addAll(metaConnection.getMutationState().toMutations(metaConnection.getSCN()).next().getSecond());
    metaConnection.rollback();
    metaConnection.getQueryServices().addColumn(tableMetadata, sysCatalogPTable, Collections.<String, List<Pair<String, Object>>>emptyMap(), Collections.<String>emptySet(), Lists.newArrayList(column));
    metaConnection.removeTable(null, SYSTEM_CATALOG_NAME, null, timestamp);
    ConnectionQueryServicesImpl.this.removeTable(null, SYSTEM_CATALOG_NAME, null, timestamp);
    clearCache();
    return metaConnection;
}
Also used : PhoenixConnection(org.apache.phoenix.jdbc.PhoenixConnection) PColumnImpl(org.apache.phoenix.schema.PColumnImpl) ArrayList(java.util.ArrayList) PreparedStatement(java.sql.PreparedStatement) Properties(java.util.Properties) PTable(org.apache.phoenix.schema.PTable) PTinyint(org.apache.phoenix.schema.types.PTinyint) PUnsignedTinyint(org.apache.phoenix.schema.types.PUnsignedTinyint) MultiRowMutationEndpoint(org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint) PColumn(org.apache.phoenix.schema.PColumn) Mutation(org.apache.hadoop.hbase.client.Mutation) PTableKey(org.apache.phoenix.schema.PTableKey) Pair(org.apache.hadoop.hbase.util.Pair)

Example 9 with PColumnImpl

use of org.apache.phoenix.schema.PColumnImpl in project phoenix by apache.

the class ColumnExpressionTest method testSerialization.

@Test
public void testSerialization() throws Exception {
    int maxLen = 30;
    int scale = 5;
    PName colName = PNameFactory.newName("c1");
    PColumn column = new PColumnImpl(colName, PNameFactory.newName("f1"), PDecimal.INSTANCE, maxLen, scale, true, 20, SortOrder.getDefault(), 0, null, false, null, false, false, colName.getBytes());
    ColumnExpression colExp = new KeyValueColumnExpression(column);
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    DataOutputStream dOut = new DataOutputStream(baos);
    colExp.write(dOut);
    dOut.flush();
    ColumnExpression colExp2 = new KeyValueColumnExpression();
    byte[] bytes = baos.toByteArray();
    DataInputStream dIn = new DataInputStream(new ByteArrayInputStream(bytes, 0, bytes.length));
    colExp2.readFields(dIn);
    assertEquals(maxLen, colExp2.getMaxLength().intValue());
    assertEquals(scale, colExp2.getScale().intValue());
    assertEquals(PDecimal.INSTANCE, colExp2.getDataType());
}
Also used : PColumn(org.apache.phoenix.schema.PColumn) PColumnImpl(org.apache.phoenix.schema.PColumnImpl) ByteArrayInputStream(java.io.ByteArrayInputStream) DataOutputStream(java.io.DataOutputStream) PName(org.apache.phoenix.schema.PName) ByteArrayOutputStream(java.io.ByteArrayOutputStream) DataInputStream(java.io.DataInputStream) Test(org.junit.Test)

Example 10 with PColumnImpl

use of org.apache.phoenix.schema.PColumnImpl in project phoenix by apache.

the class ColumnExpressionTest method testSerializationWithNullScaleAndMaxLength.

@Test
public void testSerializationWithNullScaleAndMaxLength() throws Exception {
    PName colName = PNameFactory.newName("c1");
    PColumn column = new PColumnImpl(colName, PNameFactory.newName("f1"), PDecimal.INSTANCE, null, null, true, 20, SortOrder.getDefault(), 0, null, false, null, false, false, colName.getBytes());
    ColumnExpression colExp = new KeyValueColumnExpression(column);
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    DataOutputStream dOut = new DataOutputStream(baos);
    colExp.write(dOut);
    dOut.flush();
    ColumnExpression colExp2 = new KeyValueColumnExpression();
    byte[] bytes = baos.toByteArray();
    DataInputStream dIn = new DataInputStream(new ByteArrayInputStream(bytes, 0, bytes.length));
    colExp2.readFields(dIn);
    assertNull(colExp2.getMaxLength());
    assertNull(colExp2.getScale());
}
Also used : PColumn(org.apache.phoenix.schema.PColumn) PColumnImpl(org.apache.phoenix.schema.PColumnImpl) ByteArrayInputStream(java.io.ByteArrayInputStream) DataOutputStream(java.io.DataOutputStream) PName(org.apache.phoenix.schema.PName) ByteArrayOutputStream(java.io.ByteArrayOutputStream) DataInputStream(java.io.DataInputStream) Test(org.junit.Test)

Aggregations

PColumn (org.apache.phoenix.schema.PColumn)12 PColumnImpl (org.apache.phoenix.schema.PColumnImpl)12 PName (org.apache.phoenix.schema.PName)9 PTable (org.apache.phoenix.schema.PTable)6 TableRef (org.apache.phoenix.schema.TableRef)5 ByteArrayInputStream (java.io.ByteArrayInputStream)4 ByteArrayOutputStream (java.io.ByteArrayOutputStream)4 DataInputStream (java.io.DataInputStream)4 DataOutputStream (java.io.DataOutputStream)4 ArrayList (java.util.ArrayList)4 Expression (org.apache.phoenix.expression.Expression)4 LiteralExpression (org.apache.phoenix.expression.LiteralExpression)4 ProjectedColumnExpression (org.apache.phoenix.expression.ProjectedColumnExpression)3 ColumnRef (org.apache.phoenix.schema.ColumnRef)3 Test (org.junit.Test)3 SQLException (java.sql.SQLException)2 Cell (org.apache.hadoop.hbase.Cell)2 ImmutableBytesWritable (org.apache.hadoop.hbase.io.ImmutableBytesWritable)2 Pair (org.apache.hadoop.hbase.util.Pair)2 ResultIterator (org.apache.phoenix.iterate.ResultIterator)2