Search in sources :

Example 46 with PColumn

use of org.apache.phoenix.schema.PColumn in project phoenix by apache.

the class PostIndexDDLCompiler method compile.

public MutationPlan compile(final PTable indexTable) throws SQLException {
    /*
         * Compiles an UPSERT SELECT command to read from the data table and populate the index table
         */
    StringBuilder indexColumns = new StringBuilder();
    StringBuilder dataColumns = new StringBuilder();
    // Add the pk index columns
    List<PColumn> indexPKColumns = indexTable.getPKColumns();
    int nIndexPKColumns = indexTable.getPKColumns().size();
    boolean isSalted = indexTable.getBucketNum() != null;
    boolean isMultiTenant = connection.getTenantId() != null && indexTable.isMultiTenant();
    boolean isViewIndex = indexTable.getViewIndexId() != null;
    int posOffset = (isSalted ? 1 : 0) + (isMultiTenant ? 1 : 0) + (isViewIndex ? 1 : 0);
    for (int i = posOffset; i < nIndexPKColumns; i++) {
        PColumn col = indexPKColumns.get(i);
        String indexColName = col.getName().getString();
        // need to escape backslash as this used in the SELECT statement
        String dataColName = StringUtil.escapeBackslash(col.getExpressionStr());
        dataColumns.append(dataColName).append(",");
        indexColumns.append('"').append(indexColName).append("\",");
        indexColumnNames.add(indexColName);
        dataColumnNames.add(dataColName);
    }
    // Add the covered columns
    for (PColumnFamily family : indexTable.getColumnFamilies()) {
        for (PColumn col : family.getColumns()) {
            if (col.getViewConstant() == null) {
                String indexColName = col.getName().getString();
                String dataFamilyName = IndexUtil.getDataColumnFamilyName(indexColName);
                String dataColumnName = IndexUtil.getDataColumnName(indexColName);
                if (!dataFamilyName.equals("")) {
                    dataColumns.append('"').append(dataFamilyName).append("\".");
                }
                dataColumns.append('"').append(dataColumnName).append("\",");
                indexColumns.append('"').append(indexColName).append("\",");
                indexColumnNames.add(indexColName);
                dataColumnNames.add(dataColumnName);
            }
        }
    }
    final PTable dataTable = dataTableRef.getTable();
    dataColumns.setLength(dataColumns.length() - 1);
    indexColumns.setLength(indexColumns.length() - 1);
    String schemaName = dataTable.getSchemaName().getString();
    String tableName = indexTable.getTableName().getString();
    StringBuilder updateStmtStr = new StringBuilder();
    updateStmtStr.append("UPSERT /*+ NO_INDEX */ INTO ").append(schemaName.length() == 0 ? "" : '"' + schemaName + "\".").append('"').append(tableName).append("\"(").append(indexColumns).append(") ");
    final StringBuilder selectQueryBuilder = new StringBuilder();
    selectQueryBuilder.append(" SELECT ").append(dataColumns).append(" FROM ").append(schemaName.length() == 0 ? "" : '"' + schemaName + "\".").append('"').append(dataTable.getTableName().getString()).append('"');
    this.selectQuery = selectQueryBuilder.toString();
    updateStmtStr.append(this.selectQuery);
    try (final PhoenixStatement statement = new PhoenixStatement(connection)) {
        DelegateMutationPlan delegate = new DelegateMutationPlan(statement.compileMutation(updateStmtStr.toString())) {

            @Override
            public MutationState execute() throws SQLException {
                connection.getMutationState().commitDDLFence(dataTable);
                return super.execute();
            }
        };
        return delegate;
    }
}
Also used : PColumn(org.apache.phoenix.schema.PColumn) PColumnFamily(org.apache.phoenix.schema.PColumnFamily) PhoenixStatement(org.apache.phoenix.jdbc.PhoenixStatement) PTable(org.apache.phoenix.schema.PTable)

Example 47 with PColumn

use of org.apache.phoenix.schema.PColumn in project phoenix by apache.

the class ProjectionCompiler method compile.

/**
 * Builds the projection for the scan
 * @param context query context kept between compilation of different query clauses
 * @param statement TODO
 * @param groupBy compiled GROUP BY clause
 * @param targetColumns list of columns, parallel to aliasedNodes, that are being set for an
 * UPSERT SELECT statement. Used to coerce expression types to the expected target type.
 * @return projector used to access row values during scan
 * @throws SQLException
 */
public static RowProjector compile(StatementContext context, SelectStatement statement, GroupBy groupBy, List<? extends PDatum> targetColumns, Expression where) throws SQLException {
    List<KeyValueColumnExpression> arrayKVRefs = new ArrayList<KeyValueColumnExpression>();
    List<ProjectedColumnExpression> arrayProjectedColumnRefs = new ArrayList<ProjectedColumnExpression>();
    List<Expression> arrayKVFuncs = new ArrayList<Expression>();
    List<Expression> arrayOldFuncs = new ArrayList<Expression>();
    Map<Expression, Integer> arrayExpressionCounts = new HashMap<>();
    List<AliasedNode> aliasedNodes = statement.getSelect();
    // Setup projected columns in Scan
    SelectClauseVisitor selectVisitor = new SelectClauseVisitor(context, groupBy, arrayKVRefs, arrayKVFuncs, arrayExpressionCounts, arrayProjectedColumnRefs, arrayOldFuncs, statement);
    List<ExpressionProjector> projectedColumns = new ArrayList<ExpressionProjector>();
    ColumnResolver resolver = context.getResolver();
    TableRef tableRef = context.getCurrentTable();
    PTable table = tableRef.getTable();
    boolean resolveColumn = !tableRef.equals(resolver.getTables().get(0));
    boolean isWildcard = false;
    Scan scan = context.getScan();
    int index = 0;
    List<Expression> projectedExpressions = Lists.newArrayListWithExpectedSize(aliasedNodes.size());
    List<byte[]> projectedFamilies = Lists.newArrayListWithExpectedSize(aliasedNodes.size());
    for (AliasedNode aliasedNode : aliasedNodes) {
        ParseNode node = aliasedNode.getNode();
        // TODO: visitor?
        if (node instanceof WildcardParseNode) {
            if (statement.isAggregate()) {
                ExpressionCompiler.throwNonAggExpressionInAggException(node.toString());
            }
            if (tableRef == TableRef.EMPTY_TABLE_REF) {
                throw new SQLExceptionInfo.Builder(SQLExceptionCode.NO_TABLE_SPECIFIED_FOR_WILDCARD_SELECT).build().buildException();
            }
            isWildcard = true;
            if (tableRef.getTable().getType() == PTableType.INDEX && ((WildcardParseNode) node).isRewrite()) {
                projectAllIndexColumns(context, tableRef, resolveColumn, projectedExpressions, projectedColumns, targetColumns);
            } else {
                projectAllTableColumns(context, tableRef, resolveColumn, projectedExpressions, projectedColumns, targetColumns);
            }
        } else if (node instanceof TableWildcardParseNode) {
            TableName tName = ((TableWildcardParseNode) node).getTableName();
            TableRef tRef = resolver.resolveTable(tName.getSchemaName(), tName.getTableName());
            if (tRef.equals(tableRef)) {
                isWildcard = true;
            }
            if (tRef.getTable().getType() == PTableType.INDEX && ((TableWildcardParseNode) node).isRewrite()) {
                projectAllIndexColumns(context, tRef, true, projectedExpressions, projectedColumns, targetColumns);
            } else {
                projectAllTableColumns(context, tRef, true, projectedExpressions, projectedColumns, targetColumns);
            }
        } else if (node instanceof FamilyWildcardParseNode) {
            if (tableRef == TableRef.EMPTY_TABLE_REF) {
                throw new SQLExceptionInfo.Builder(SQLExceptionCode.NO_TABLE_SPECIFIED_FOR_WILDCARD_SELECT).build().buildException();
            }
            // Project everything for SELECT cf.*
            String cfName = ((FamilyWildcardParseNode) node).getName();
            // Delay projecting to scan, as when any other column in the column family gets
            // added to the scan, it overwrites that we want to project the entire column
            // family. Instead, we do the projection at the end.
            // TODO: consider having a ScanUtil.addColumn and ScanUtil.addFamily to work
            // around this, as this code depends on this function being the last place where
            // columns are projected (which is currently true, but could change).
            projectedFamilies.add(Bytes.toBytes(cfName));
            if (tableRef.getTable().getType() == PTableType.INDEX && ((FamilyWildcardParseNode) node).isRewrite()) {
                projectIndexColumnFamily(context, cfName, tableRef, resolveColumn, projectedExpressions, projectedColumns);
            } else {
                projectTableColumnFamily(context, cfName, tableRef, resolveColumn, projectedExpressions, projectedColumns);
            }
        } else {
            Expression expression = node.accept(selectVisitor);
            projectedExpressions.add(expression);
            expression = coerceIfNecessary(index, targetColumns, expression);
            if (node instanceof BindParseNode) {
                context.getBindManager().addParamMetaData((BindParseNode) node, expression);
            }
            if (!node.isStateless()) {
                if (!selectVisitor.isAggregate() && statement.isAggregate()) {
                    ExpressionCompiler.throwNonAggExpressionInAggException(expression.toString());
                }
            }
            String columnAlias = aliasedNode.getAlias() != null ? aliasedNode.getAlias() : SchemaUtil.normalizeIdentifier(aliasedNode.getNode().getAlias());
            boolean isCaseSensitive = aliasedNode.getAlias() != null ? aliasedNode.isCaseSensitve() : (columnAlias != null ? SchemaUtil.isCaseSensitive(aliasedNode.getNode().getAlias()) : selectVisitor.isCaseSensitive);
            String name = columnAlias == null ? expression.toString() : columnAlias;
            projectedColumns.add(new ExpressionProjector(name, tableRef.getTableAlias() == null ? (table.getName() == null ? "" : table.getName().getString()) : tableRef.getTableAlias(), expression, isCaseSensitive));
        }
        selectVisitor.reset();
        index++;
    }
    for (int i = arrayProjectedColumnRefs.size() - 1; i >= 0; i--) {
        Expression expression = arrayProjectedColumnRefs.get(i);
        Integer count = arrayExpressionCounts.get(expression);
        if (count != 0) {
            arrayKVRefs.remove(i);
            arrayKVFuncs.remove(i);
            arrayOldFuncs.remove(i);
        }
    }
    if (arrayKVFuncs.size() > 0 && arrayKVRefs.size() > 0) {
        serailizeArrayIndexInformationAndSetInScan(context, arrayKVFuncs, arrayKVRefs);
        KeyValueSchemaBuilder builder = new KeyValueSchemaBuilder(0);
        for (Expression expression : arrayKVRefs) {
            builder.addField(expression);
        }
        KeyValueSchema kvSchema = builder.build();
        ValueBitSet arrayIndexesBitSet = ValueBitSet.newInstance(kvSchema);
        builder = new KeyValueSchemaBuilder(0);
        for (Expression expression : arrayKVFuncs) {
            builder.addField(expression);
        }
        KeyValueSchema arrayIndexesSchema = builder.build();
        Map<Expression, Expression> replacementMap = new HashMap<>();
        for (int i = 0; i < arrayOldFuncs.size(); i++) {
            Expression function = arrayKVFuncs.get(i);
            replacementMap.put(arrayOldFuncs.get(i), new ArrayIndexExpression(i, function.getDataType(), arrayIndexesBitSet, arrayIndexesSchema));
        }
        ReplaceArrayFunctionExpressionVisitor visitor = new ReplaceArrayFunctionExpressionVisitor(replacementMap);
        for (int i = 0; i < projectedColumns.size(); i++) {
            ExpressionProjector projector = projectedColumns.get(i);
            projectedColumns.set(i, new ExpressionProjector(projector.getName(), tableRef.getTableAlias() == null ? (table.getName() == null ? "" : table.getName().getString()) : tableRef.getTableAlias(), projector.getExpression().accept(visitor), projector.isCaseSensitive()));
        }
    }
    boolean isProjectEmptyKeyValue = false;
    if (isWildcard) {
        projectAllColumnFamilies(table, scan);
    } else {
        isProjectEmptyKeyValue = where == null || LiteralExpression.isTrue(where) || where.requiresFinalEvaluation();
        for (byte[] family : projectedFamilies) {
            projectColumnFamily(table, scan, family);
        }
    }
    // TODO make estimatedByteSize more accurate by counting the joined columns.
    int estimatedKeySize = table.getRowKeySchema().getEstimatedValueLength();
    int estimatedByteSize = 0;
    for (Map.Entry<byte[], NavigableSet<byte[]>> entry : scan.getFamilyMap().entrySet()) {
        try {
            PColumnFamily family = table.getColumnFamily(entry.getKey());
            if (entry.getValue() == null) {
                for (PColumn column : family.getColumns()) {
                    Integer maxLength = column.getMaxLength();
                    int byteSize = column.getDataType().isFixedWidth() ? maxLength == null ? column.getDataType().getByteSize() : maxLength : RowKeySchema.ESTIMATED_VARIABLE_LENGTH_SIZE;
                    estimatedByteSize += SizedUtil.KEY_VALUE_SIZE + estimatedKeySize + byteSize;
                }
            } else {
                for (byte[] cq : entry.getValue()) {
                    PColumn column = family.getPColumnForColumnQualifier(cq);
                    Integer maxLength = column.getMaxLength();
                    int byteSize = column.getDataType().isFixedWidth() ? maxLength == null ? column.getDataType().getByteSize() : maxLength : RowKeySchema.ESTIMATED_VARIABLE_LENGTH_SIZE;
                    estimatedByteSize += SizedUtil.KEY_VALUE_SIZE + estimatedKeySize + byteSize;
                }
            }
        } catch (ColumnFamilyNotFoundException e) {
        // Ignore as this can happen for local indexes when the data table has a column family, but there are no covered columns in the family
        }
    }
    return new RowProjector(projectedColumns, Math.max(estimatedKeySize, estimatedByteSize), isProjectEmptyKeyValue, resolver.hasUDFs(), isWildcard);
}
Also used : NavigableSet(java.util.NavigableSet) HashMap(java.util.HashMap) KeyValueSchemaBuilder(org.apache.phoenix.schema.KeyValueSchema.KeyValueSchemaBuilder) ArrayList(java.util.ArrayList) FamilyWildcardParseNode(org.apache.phoenix.parse.FamilyWildcardParseNode) WildcardParseNode(org.apache.phoenix.parse.WildcardParseNode) TableWildcardParseNode(org.apache.phoenix.parse.TableWildcardParseNode) KeyValueSchemaBuilder(org.apache.phoenix.schema.KeyValueSchema.KeyValueSchemaBuilder) PTable(org.apache.phoenix.schema.PTable) PColumn(org.apache.phoenix.schema.PColumn) ReplaceArrayFunctionExpressionVisitor(org.apache.phoenix.expression.visitor.ReplaceArrayFunctionExpressionVisitor) FunctionParseNode(org.apache.phoenix.parse.FunctionParseNode) BindParseNode(org.apache.phoenix.parse.BindParseNode) ColumnParseNode(org.apache.phoenix.parse.ColumnParseNode) SequenceValueParseNode(org.apache.phoenix.parse.SequenceValueParseNode) FamilyWildcardParseNode(org.apache.phoenix.parse.FamilyWildcardParseNode) WildcardParseNode(org.apache.phoenix.parse.WildcardParseNode) TableWildcardParseNode(org.apache.phoenix.parse.TableWildcardParseNode) ParseNode(org.apache.phoenix.parse.ParseNode) KeyValueColumnExpression(org.apache.phoenix.expression.KeyValueColumnExpression) KeyValueSchema(org.apache.phoenix.schema.KeyValueSchema) ValueBitSet(org.apache.phoenix.schema.ValueBitSet) TableWildcardParseNode(org.apache.phoenix.parse.TableWildcardParseNode) ProjectedColumnExpression(org.apache.phoenix.expression.ProjectedColumnExpression) AliasedNode(org.apache.phoenix.parse.AliasedNode) PColumnFamily(org.apache.phoenix.schema.PColumnFamily) ColumnFamilyNotFoundException(org.apache.phoenix.schema.ColumnFamilyNotFoundException) TableName(org.apache.phoenix.parse.TableName) FamilyWildcardParseNode(org.apache.phoenix.parse.FamilyWildcardParseNode) KeyValueColumnExpression(org.apache.phoenix.expression.KeyValueColumnExpression) BaseTerminalExpression(org.apache.phoenix.expression.BaseTerminalExpression) Expression(org.apache.phoenix.expression.Expression) SingleCellColumnExpression(org.apache.phoenix.expression.SingleCellColumnExpression) ProjectedColumnExpression(org.apache.phoenix.expression.ProjectedColumnExpression) CoerceExpression(org.apache.phoenix.expression.CoerceExpression) LiteralExpression(org.apache.phoenix.expression.LiteralExpression) Scan(org.apache.hadoop.hbase.client.Scan) Map(java.util.Map) HashMap(java.util.HashMap) TableRef(org.apache.phoenix.schema.TableRef) BindParseNode(org.apache.phoenix.parse.BindParseNode)

Example 48 with PColumn

use of org.apache.phoenix.schema.PColumn in project phoenix by apache.

the class AppendOnlySchemaIT method testAddColumns.

private void testAddColumns(boolean sameClient) throws Exception {
    Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
    try (Connection conn1 = DriverManager.getConnection(getUrl(), props);
        Connection conn2 = sameClient ? conn1 : DriverManager.getConnection(getUrl(), props)) {
        String metricTableName = generateUniqueName();
        String viewName = generateUniqueName();
        String metricIdSeqTableName = generateUniqueName();
        // create sequence for auto partition
        conn1.createStatement().execute("CREATE SEQUENCE " + metricIdSeqTableName + " CACHE 1");
        // create base table
        conn1.createStatement().execute("CREATE TABLE " + metricTableName + " (metricId INTEGER NOT NULL, metricVal1 DOUBLE, CONSTRAINT PK PRIMARY KEY(metricId))" + " APPEND_ONLY_SCHEMA = true, UPDATE_CACHE_FREQUENCY=1, AUTO_PARTITION_SEQ=" + metricIdSeqTableName);
        // create view
        String ddl = "CREATE VIEW IF NOT EXISTS " + viewName + "( hostName varchar NOT NULL," + " CONSTRAINT HOSTNAME_PK PRIMARY KEY (hostName))" + " AS SELECT * FROM " + metricTableName + " UPDATE_CACHE_FREQUENCY=300000";
        conn1.createStatement().execute(ddl);
        conn1.createStatement().execute("UPSERT INTO " + viewName + "(hostName, metricVal1) VALUES('host1', 1.0)");
        conn1.commit();
        // execute ddl that creates that same view with an additional pk column and regular column
        // and also changes the order of the pk columns (which is not respected since we only
        // allow appending columns)
        ddl = "CREATE VIEW IF NOT EXISTS " + viewName + "( instanceName varchar, hostName varchar, metricVal2 double, metricVal1 double" + " CONSTRAINT HOSTNAME_PK PRIMARY KEY (instancename, hostName))" + " AS SELECT * FROM " + metricTableName + " UPDATE_CACHE_FREQUENCY=300000";
        conn2.createStatement().execute(ddl);
        conn2.createStatement().execute("UPSERT INTO " + viewName + "(hostName, instanceName, metricVal1, metricval2) VALUES('host2', 'instance2', 21.0, 22.0)");
        conn2.commit();
        conn1.createStatement().execute("UPSERT INTO " + viewName + "(hostName, metricVal1) VALUES('host3', 3.0)");
        conn1.commit();
        // verify data exists
        ResultSet rs = conn2.createStatement().executeQuery("SELECT * from " + viewName);
        // verify the two columns were added correctly
        PTable table = conn2.unwrap(PhoenixConnection.class).getTable(new PTableKey(null, viewName));
        List<PColumn> pkColumns = table.getPKColumns();
        assertEquals(3, table.getPKColumns().size());
        // even though the second create view statement changed the order of the pk, the original order is maintained
        PColumn metricId = pkColumns.get(0);
        assertEquals("METRICID", metricId.getName().getString());
        assertFalse(metricId.isNullable());
        PColumn hostName = pkColumns.get(1);
        assertEquals("HOSTNAME", hostName.getName().getString());
        // hostname name is not nullable even though the second create statement changed it to nullable
        // since we only allow appending columns
        assertFalse(hostName.isNullable());
        PColumn instanceName = pkColumns.get(2);
        assertEquals("INSTANCENAME", instanceName.getName().getString());
        assertTrue(instanceName.isNullable());
        List<PColumn> columns = table.getColumns();
        assertEquals("METRICID", columns.get(0).getName().getString());
        assertEquals("METRICVAL1", columns.get(1).getName().getString());
        assertEquals("HOSTNAME", columns.get(2).getName().getString());
        assertEquals("INSTANCENAME", columns.get(3).getName().getString());
        assertEquals("METRICVAL2", columns.get(4).getName().getString());
        // verify the data
        assertTrue(rs.next());
        assertEquals(1, rs.getInt(1));
        assertEquals(1.0, rs.getDouble(2), 1e-6);
        assertEquals("host1", rs.getString(3));
        assertEquals(null, rs.getString(4));
        assertEquals(0.0, rs.getDouble(5), 1e-6);
        assertTrue(rs.next());
        assertEquals(1, rs.getInt(1));
        assertEquals(21.0, rs.getDouble(2), 1e-6);
        assertEquals("host2", rs.getString(3));
        assertEquals("instance2", rs.getString(4));
        assertEquals(22.0, rs.getDouble(5), 1e-6);
        assertTrue(rs.next());
        assertEquals(1, rs.getInt(1));
        assertEquals(3.0, rs.getDouble(2), 1e-6);
        assertEquals("host3", rs.getString(3));
        assertEquals(null, rs.getString(4));
        assertEquals(0.0, rs.getDouble(5), 1e-6);
        assertFalse(rs.next());
    }
}
Also used : PColumn(org.apache.phoenix.schema.PColumn) PhoenixConnection(org.apache.phoenix.jdbc.PhoenixConnection) Connection(java.sql.Connection) PhoenixConnection(org.apache.phoenix.jdbc.PhoenixConnection) ResultSet(java.sql.ResultSet) Properties(java.util.Properties) PTableKey(org.apache.phoenix.schema.PTableKey) PTable(org.apache.phoenix.schema.PTable)

Example 49 with PColumn

use of org.apache.phoenix.schema.PColumn in project phoenix by apache.

the class AutoPartitionViewsIT method testValidateAttributes.

@Test
public void testValidateAttributes() throws SQLException {
    try (Connection conn = DriverManager.getConnection(getUrl());
        Connection viewConn1 = isMultiTenant ? DriverManager.getConnection(TENANT_SPECIFIC_URL1) : DriverManager.getConnection(getUrl());
        Connection viewConn2 = isMultiTenant ? DriverManager.getConnection(TENANT_SPECIFIC_URL1) : DriverManager.getConnection(getUrl())) {
        String tableName = generateUniqueName();
        String autoSeqName = generateUniqueName();
        try {
            String ddl = String.format("CREATE TABLE " + tableName + " (%s metricId VARCHAR, val1 DOUBLE, val2 DOUBLE CONSTRAINT PK PRIMARY KEY( %s metricId)) %s", isMultiTenant ? "tenantId VARCHAR, " : "", isMultiTenant ? "tenantId, " : "", String.format(tableDDLOptions, autoSeqName));
            conn.createStatement().execute(ddl);
            fail("Sequence value must be castable to the auto partition id column data type");
        } catch (SQLException e) {
            assertEquals(SQLExceptionCode.SEQUENCE_NOT_CASTABLE_TO_AUTO_PARTITION_ID_COLUMN.getErrorCode(), e.getErrorCode());
        }
        String ddl = String.format("CREATE TABLE " + tableName + " (%s metricId INTEGER NOT NULL, val1 DOUBLE, val2 DOUBLE CONSTRAINT PK PRIMARY KEY( %s metricId)) %s", isMultiTenant ? "tenantId VARCHAR NOT NULL, " : "", isMultiTenant ? "tenantId, " : "", String.format(tableDDLOptions, autoSeqName));
        conn.createStatement().execute(ddl);
        String baseViewName = generateUniqueName();
        String metricView1 = baseViewName + "_VIEW1";
        String metricView2 = baseViewName + "_VIEW2";
        String metricView3 = baseViewName + "_VIEW3";
        String metricView4 = baseViewName + "_VIEW4";
        try {
            viewConn1.createStatement().execute("CREATE VIEW " + metricView1 + "  AS SELECT * FROM " + tableName);
            fail("Auto-partition sequence must be created before view is created");
        } catch (SequenceNotFoundException e) {
        }
        conn.createStatement().execute("CREATE SEQUENCE " + autoSeqName + " start with " + (Integer.MAX_VALUE - 2) + " cache 1");
        viewConn1.createStatement().execute("CREATE VIEW " + metricView1 + " AS SELECT * FROM " + tableName + " WHERE val2=1.2");
        // create a view without a where clause
        viewConn1.createStatement().execute("CREATE VIEW " + metricView2 + " AS SELECT * FROM " + tableName);
        // create a view with a complex where clause
        viewConn1.createStatement().execute("CREATE VIEW " + metricView3 + " AS SELECT * FROM " + tableName + " WHERE val1=1.0 OR val2=2.0");
        try {
            viewConn1.createStatement().execute("CREATE VIEW " + metricView4 + " AS SELECT * FROM " + tableName);
            fail("Creating a view with a partition id that is too large should fail");
        } catch (SQLException e) {
            assertEquals(SQLExceptionCode.CANNOT_COERCE_AUTO_PARTITION_ID.getErrorCode(), e.getErrorCode());
        }
        if (isMultiTenant) {
            // load tables into cache
            viewConn1.createStatement().execute("SELECT * FROM " + metricView1);
            viewConn1.createStatement().execute("SELECT * FROM " + metricView2);
            viewConn1.createStatement().execute("SELECT * FROM " + metricView3);
        }
        PhoenixConnection pconn = viewConn1.unwrap(PhoenixConnection.class);
        PTable view1 = pconn.getTable(new PTableKey(pconn.getTenantId(), metricView1));
        PTable view2 = pconn.getTable(new PTableKey(pconn.getTenantId(), metricView2));
        PTable view3 = pconn.getTable(new PTableKey(pconn.getTenantId(), metricView3));
        // verify the view statement was set correctly
        String expectedViewStatement1 = "SELECT * FROM \"" + tableName + "\" WHERE VAL2 = 1.2 AND METRICID = " + (Integer.MAX_VALUE - 2);
        String expectedViewStatement2 = "SELECT * FROM \"" + tableName + "\" WHERE METRICID = " + (Integer.MAX_VALUE - 1);
        String expectedViewStatement3 = "SELECT * FROM \"" + tableName + "\" WHERE (VAL1 = 1.0 OR VAL2 = 2.0) AND METRICID = " + Integer.MAX_VALUE;
        assertEquals("Unexpected view statement", expectedViewStatement1, view1.getViewStatement());
        assertEquals("Unexpected view statement", expectedViewStatement2, view2.getViewStatement());
        assertEquals("Unexpected view statement", expectedViewStatement3, view3.getViewStatement());
        // verify isViewReferenced was set correctly
        int expectedParitionColIndex = isMultiTenant ? 1 : 0;
        PColumn partitionCol1 = view1.getColumns().get(expectedParitionColIndex);
        PColumn partitionCol2 = view2.getColumns().get(expectedParitionColIndex);
        PColumn partitionCol3 = view3.getColumns().get(expectedParitionColIndex);
        assertTrue("Partition column view referenced attribute should be true ", partitionCol1.isViewReferenced());
        assertTrue("Partition column view referenced attribute should be true ", partitionCol2.isViewReferenced());
        assertTrue("Partition column view referenced attribute should be true ", partitionCol3.isViewReferenced());
        // verify viewConstant was set correctly
        byte[] expectedPartition1 = new byte[Bytes.SIZEOF_INT + 1];
        PInteger.INSTANCE.toBytes(Integer.MAX_VALUE - 2, expectedPartition1, 0);
        byte[] expectedPartition2 = new byte[Bytes.SIZEOF_INT + 1];
        PInteger.INSTANCE.toBytes(Integer.MAX_VALUE - 1, expectedPartition2, 0);
        byte[] expectedPartition3 = new byte[Bytes.SIZEOF_INT + 1];
        PInteger.INSTANCE.toBytes(Integer.MAX_VALUE, expectedPartition3, 0);
        assertArrayEquals("Unexpected Partition column view constant attribute", expectedPartition1, partitionCol1.getViewConstant());
        assertArrayEquals("Unexpected Partition column view constant attribute", expectedPartition2, partitionCol2.getViewConstant());
        assertArrayEquals("Unexpected Partition column view constant attribute", expectedPartition3, partitionCol3.getViewConstant());
        // verify that the table was created correctly on the server
        viewConn2.createStatement().execute("SELECT * FROM " + metricView1);
        viewConn2.createStatement().execute("SELECT * FROM " + metricView2);
        viewConn2.createStatement().execute("SELECT * FROM " + metricView3);
        pconn = viewConn2.unwrap(PhoenixConnection.class);
        view1 = pconn.getTable(new PTableKey(pconn.getTenantId(), metricView1));
        view2 = pconn.getTable(new PTableKey(pconn.getTenantId(), metricView2));
        view3 = pconn.getTable(new PTableKey(pconn.getTenantId(), metricView3));
        // verify the view statement was set correctly
        assertEquals("Unexpected view statement", expectedViewStatement1, view1.getViewStatement());
        assertEquals("Unexpected view statement", expectedViewStatement2, view2.getViewStatement());
        assertEquals("Unexpected view statement", expectedViewStatement3, view3.getViewStatement());
        // verify isViewReferenced was set correctly
        partitionCol1 = view1.getColumns().get(expectedParitionColIndex);
        partitionCol2 = view2.getColumns().get(expectedParitionColIndex);
        partitionCol3 = view3.getColumns().get(expectedParitionColIndex);
        assertTrue("Partition column view referenced attribute should be true ", partitionCol1.isViewReferenced());
        assertTrue("Partition column view referenced attribute should be true ", partitionCol2.isViewReferenced());
        assertTrue("Partition column view referenced attribute should be true ", partitionCol3.isViewReferenced());
        // verify viewConstant was set correctly
        assertArrayEquals("Unexpected Partition column view constant attribute", expectedPartition1, partitionCol1.getViewConstant());
        assertArrayEquals("Unexpected Partition column view constant attribute", expectedPartition2, partitionCol2.getViewConstant());
        assertArrayEquals("Unexpected Partition column view constant attribute", expectedPartition3, partitionCol3.getViewConstant());
    }
}
Also used : PColumn(org.apache.phoenix.schema.PColumn) PhoenixConnection(org.apache.phoenix.jdbc.PhoenixConnection) SQLException(java.sql.SQLException) Connection(java.sql.Connection) PhoenixConnection(org.apache.phoenix.jdbc.PhoenixConnection) SequenceNotFoundException(org.apache.phoenix.schema.SequenceNotFoundException) PTableKey(org.apache.phoenix.schema.PTableKey) PTable(org.apache.phoenix.schema.PTable) Test(org.junit.Test)

Example 50 with PColumn

use of org.apache.phoenix.schema.PColumn in project phoenix by apache.

the class IndexScrutiny method scrutinizeIndex.

public static long scrutinizeIndex(Connection conn, String fullTableName, String fullIndexName) throws SQLException {
    PhoenixConnection pconn = conn.unwrap(PhoenixConnection.class);
    PTable ptable = pconn.getTable(new PTableKey(pconn.getTenantId(), fullTableName));
    int tableColumnOffset = 0;
    List<PColumn> tableColumns = ptable.getColumns();
    List<PColumn> tablePKColumns = ptable.getPKColumns();
    if (ptable.getBucketNum() != null) {
        tableColumnOffset = 1;
        tableColumns = tableColumns.subList(tableColumnOffset, tableColumns.size());
        tablePKColumns = tablePKColumns.subList(tableColumnOffset, tablePKColumns.size());
    }
    PTable pindex = pconn.getTable(new PTableKey(pconn.getTenantId(), fullIndexName));
    List<PColumn> indexColumns = pindex.getColumns();
    int indexColumnOffset = 0;
    if (pindex.getBucketNum() != null) {
        indexColumnOffset = 1;
    }
    if (pindex.getViewIndexId() != null) {
        indexColumnOffset++;
    }
    if (indexColumnOffset > 0) {
        indexColumns = indexColumns.subList(indexColumnOffset, indexColumns.size());
    }
    StringBuilder indexQueryBuf = new StringBuilder("SELECT ");
    for (PColumn dcol : tablePKColumns) {
        indexQueryBuf.append("CAST(\"" + IndexUtil.getIndexColumnName(dcol) + "\" AS " + dcol.getDataType().getSqlTypeName() + ")");
        indexQueryBuf.append(",");
    }
    for (PColumn icol : indexColumns) {
        PColumn dcol = IndexUtil.getDataColumn(ptable, icol.getName().getString());
        if (SchemaUtil.isPKColumn(icol) && !SchemaUtil.isPKColumn(dcol)) {
            indexQueryBuf.append("CAST (\"" + icol.getName().getString() + "\" AS " + dcol.getDataType().getSqlTypeName() + ")");
            indexQueryBuf.append(",");
        }
    }
    for (PColumn icol : indexColumns) {
        if (!SchemaUtil.isPKColumn(icol)) {
            PColumn dcol = IndexUtil.getDataColumn(ptable, icol.getName().getString());
            indexQueryBuf.append("CAST (\"" + icol.getName().getString() + "\" AS " + dcol.getDataType().getSqlTypeName() + ")");
            indexQueryBuf.append(",");
        }
    }
    indexQueryBuf.setLength(indexQueryBuf.length() - 1);
    indexQueryBuf.append("\nFROM " + fullIndexName);
    StringBuilder tableQueryBuf = new StringBuilder("SELECT ");
    for (PColumn dcol : tablePKColumns) {
        tableQueryBuf.append("\"" + dcol.getName().getString() + "\"");
        tableQueryBuf.append(",");
    }
    for (PColumn icol : indexColumns) {
        PColumn dcol = IndexUtil.getDataColumn(ptable, icol.getName().getString());
        if (SchemaUtil.isPKColumn(icol) && !SchemaUtil.isPKColumn(dcol)) {
            if (dcol.getFamilyName() != null) {
                tableQueryBuf.append("\"" + dcol.getFamilyName().getString() + "\"");
                tableQueryBuf.append(".");
            }
            tableQueryBuf.append("\"" + dcol.getName().getString() + "\"");
            tableQueryBuf.append(",");
        }
    }
    for (PColumn icol : indexColumns) {
        if (!SchemaUtil.isPKColumn(icol)) {
            PColumn dcol = IndexUtil.getDataColumn(ptable, icol.getName().getString());
            if (dcol.getFamilyName() != null) {
                tableQueryBuf.append("\"" + dcol.getFamilyName().getString() + "\"");
                tableQueryBuf.append(".");
            }
            tableQueryBuf.append("\"" + dcol.getName().getString() + "\"");
            tableQueryBuf.append(",");
        }
    }
    tableQueryBuf.setLength(tableQueryBuf.length() - 1);
    tableQueryBuf.append("\nFROM " + fullTableName + "\nWHERE (");
    for (PColumn dcol : tablePKColumns) {
        tableQueryBuf.append("\"" + dcol.getName().getString() + "\"");
        tableQueryBuf.append(",");
    }
    tableQueryBuf.setLength(tableQueryBuf.length() - 1);
    tableQueryBuf.append(") = ((");
    for (int i = 0; i < tablePKColumns.size(); i++) {
        tableQueryBuf.append("?");
        tableQueryBuf.append(",");
    }
    tableQueryBuf.setLength(tableQueryBuf.length() - 1);
    tableQueryBuf.append("))");
    String tableQuery = tableQueryBuf.toString();
    PreparedStatement istmt = conn.prepareStatement(tableQuery);
    String indexQuery = indexQueryBuf.toString();
    ResultSet irs = conn.createStatement().executeQuery(indexQuery);
    ResultSetMetaData irsmd = irs.getMetaData();
    long icount = 0;
    while (irs.next()) {
        icount++;
        StringBuilder pkBuf = new StringBuilder("(");
        for (int i = 0; i < tablePKColumns.size(); i++) {
            PColumn dcol = tablePKColumns.get(i);
            int offset = i + 1;
            Object pkVal = irs.getObject(offset);
            PDataType pkType = PDataType.fromTypeId(irsmd.getColumnType(offset));
            istmt.setObject(offset, pkVal, dcol.getDataType().getSqlType());
            pkBuf.append(pkType.toStringLiteral(pkVal));
            pkBuf.append(",");
        }
        pkBuf.setLength(pkBuf.length() - 1);
        pkBuf.append(")");
        ResultSet drs = istmt.executeQuery();
        ResultSetMetaData drsmd = drs.getMetaData();
        assertTrue("Expected to find PK in data table: " + pkBuf, drs.next());
        for (int i = 0; i < irsmd.getColumnCount(); i++) {
            Object iVal = irs.getObject(i + 1);
            PDataType iType = PDataType.fromTypeId(irsmd.getColumnType(i + 1));
            Object dVal = drs.getObject(i + 1);
            PDataType dType = PDataType.fromTypeId(drsmd.getColumnType(i + 1));
            assertTrue("Expected equality for " + drsmd.getColumnName(i + 1) + ", but " + iType.toStringLiteral(iVal) + "!=" + dType.toStringLiteral(dVal), Objects.equal(iVal, dVal));
        }
    }
    long dcount = TestUtil.getRowCount(conn, fullTableName);
    assertEquals("Expected data table row count to match", dcount, icount);
    return dcount;
}
Also used : PhoenixConnection(org.apache.phoenix.jdbc.PhoenixConnection) PreparedStatement(java.sql.PreparedStatement) PTable(org.apache.phoenix.schema.PTable) PColumn(org.apache.phoenix.schema.PColumn) ResultSetMetaData(java.sql.ResultSetMetaData) PDataType(org.apache.phoenix.schema.types.PDataType) ResultSet(java.sql.ResultSet) PTableKey(org.apache.phoenix.schema.PTableKey)

Aggregations

PColumn (org.apache.phoenix.schema.PColumn)101 PTable (org.apache.phoenix.schema.PTable)59 PhoenixConnection (org.apache.phoenix.jdbc.PhoenixConnection)26 Expression (org.apache.phoenix.expression.Expression)21 TableRef (org.apache.phoenix.schema.TableRef)20 ArrayList (java.util.ArrayList)19 PName (org.apache.phoenix.schema.PName)18 ImmutableBytesWritable (org.apache.hadoop.hbase.io.ImmutableBytesWritable)17 LiteralExpression (org.apache.phoenix.expression.LiteralExpression)17 ImmutableBytesPtr (org.apache.phoenix.hbase.index.util.ImmutableBytesPtr)17 ColumnRef (org.apache.phoenix.schema.ColumnRef)17 Hint (org.apache.phoenix.parse.HintNode.Hint)14 PTableKey (org.apache.phoenix.schema.PTableKey)14 ColumnNotFoundException (org.apache.phoenix.schema.ColumnNotFoundException)13 PColumnFamily (org.apache.phoenix.schema.PColumnFamily)13 PSmallint (org.apache.phoenix.schema.types.PSmallint)13 SQLException (java.sql.SQLException)12 ProjectedColumnExpression (org.apache.phoenix.expression.ProjectedColumnExpression)12 PColumnImpl (org.apache.phoenix.schema.PColumnImpl)12 Map (java.util.Map)11