Search in sources :

Example 11 with DataDescriptorGenerator

use of org.apache.derby.iapi.sql.dictionary.DataDescriptorGenerator in project derby by apache.

the class CreateTriggerConstantAction method executeConstantAction.

/**
 * This is the guts of the Execution-time logic for CREATE TRIGGER.
 *
 * @see ConstantAction#executeConstantAction
 *
 * @exception StandardException		Thrown on failure
 */
public void executeConstantAction(Activation activation) throws StandardException {
    SPSDescriptor whenspsd = null;
    SPSDescriptor actionspsd;
    LanguageConnectionContext lcc = activation.getLanguageConnectionContext();
    DataDictionary dd = lcc.getDataDictionary();
    DependencyManager dm = dd.getDependencyManager();
    TransactionController tc = lcc.getTransactionExecute();
    /*
		** Indicate that we are about to modify the data dictionary.
		** 
		** We tell the data dictionary we're done writing at the end of
		** the transaction.
		*/
    dd.startWriting(lcc);
    SchemaDescriptor triggerSd = getSchemaDescriptorForCreate(dd, activation, triggerSchemaName);
    if (spsCompSchemaId == null) {
        SchemaDescriptor def = lcc.getDefaultSchema();
        if (def.getUUID() == null) {
            // Descriptor for default schema is stale,
            // look it up in the dictionary
            def = dd.getSchemaDescriptor(def.getDescriptorName(), tc, false);
        }
        /* 
			** It is possible for spsCompSchemaId to be null.  For instance, 
			** the current schema may not have been physically created yet but 
			** it exists "virtually".  In this case, its UUID will have the 
			** value of null meaning that it is not persistent.  e.g.:   
			**
			** CONNECT 'db;create=true' user 'ernie';
			** CREATE TABLE bert.t1 (i INT);
			** CREATE TRIGGER bert.tr1 AFTER INSERT ON bert.t1 
			**    FOR EACH STATEMENT MODE DB2SQL 
			**    SELECT * FROM SYS.SYSTABLES;
			**
			** Note that in the above case, the trigger action statement have a 
			** null compilation schema.  A compilation schema with null value 
			** indicates that the trigger action statement text does not have 
			** any dependencies with the CURRENT SCHEMA.  This means:
			**
			** o  It is safe to compile this statement in any schema since 
			**    there is no dependency with the CURRENT SCHEMA. i.e.: All 
			**    relevent identifiers are qualified with a specific schema.
			**
			** o  The statement cache mechanism can utilize this piece of 
			**    information to enable better statement plan sharing across 
			**    connections in different schemas; thus, avoiding unnecessary 
			**    statement compilation.
			*/
        if (def != null)
            spsCompSchemaId = def.getUUID();
    }
    String tabName;
    if (triggerTable != null) {
        triggerTableId = triggerTable.getUUID();
        tabName = triggerTable.getName();
    } else
        tabName = "with UUID " + triggerTableId;
    /* We need to get table descriptor again.  We simply can't trust the
		 * one we got at compile time, the lock on system table was released
		 * when compile was done, and the table might well have been dropped.
		 */
    triggerTable = dd.getTableDescriptor(triggerTableId);
    if (triggerTable == null) {
        throw StandardException.newException(SQLState.LANG_TABLE_NOT_FOUND_DURING_EXECUTION, tabName);
    }
    /* Lock the table for DDL.  Otherwise during our execution, the table
		 * might be changed, even dropped.  Beetle 4269
		 */
    lockTableForDDL(tc, triggerTable.getHeapConglomerateId(), true);
    /* get triggerTable again for correctness, in case it's changed before
		 * the lock is aquired
		 */
    triggerTable = dd.getTableDescriptor(triggerTableId);
    if (triggerTable == null) {
        throw StandardException.newException(SQLState.LANG_TABLE_NOT_FOUND_DURING_EXECUTION, tabName);
    }
    /*
		** Send an invalidate on the table from which
		** the triggering event emanates.  This it
		** to make sure that DML statements on this table
		** will be recompiled.  Do this before we create
		** our trigger spses lest we invalidate them just
		** after creating them.
		*/
    dm.invalidateFor(triggerTable, DependencyManager.CREATE_TRIGGER, lcc);
    /*
		** Lets get our trigger id up front, we'll use it when
	 	** we create our spses.
		*/
    UUID tmpTriggerId = dd.getUUIDFactory().createUUID();
    actionSPSId = (actionSPSId == null) ? dd.getUUIDFactory().createUUID() : actionSPSId;
    if (whenSPSId == null && whenText != null) {
        whenSPSId = dd.getUUIDFactory().createUUID();
    }
    DataDescriptorGenerator ddg = dd.getDataDescriptorGenerator();
    /*
		** Create the trigger descriptor first so the trigger action
		** compilation can pick up the relevant trigger especially in 
		** the case of self triggering.
		*/
    TriggerDescriptor triggerd = ddg.newTriggerDescriptor(triggerSd, tmpTriggerId, triggerName, eventMask, isBefore, isRow, isEnabled, triggerTable, whenSPSId, actionSPSId, makeCreationTimestamp(dd), referencedCols, referencedColsInTriggerAction, originalActionText, referencingOld, referencingNew, oldReferencingName, newReferencingName, originalWhenText);
    dd.addDescriptor(triggerd, triggerSd, DataDictionary.SYSTRIGGERS_CATALOG_NUM, false, tc);
    /*	
		** If we have a WHEN action we create it now.
		*/
    if (whenText != null) {
        // The WHEN clause is just a search condition and not a full
        // SQL statement. Turn in into a VALUES statement.
        String whenValuesStmt = "VALUES " + whenText;
        whenspsd = createSPS(lcc, ddg, dd, tc, tmpTriggerId, triggerSd, whenSPSId, spsCompSchemaId, whenValuesStmt, true, triggerTable);
    }
    /*
		** Create the trigger action
		*/
    actionspsd = createSPS(lcc, ddg, dd, tc, tmpTriggerId, triggerSd, actionSPSId, spsCompSchemaId, actionText, false, triggerTable);
    /*
		** Make underlying spses dependent on the trigger.
		*/
    if (whenspsd != null) {
        dm.addDependency(triggerd, whenspsd, lcc.getContextManager());
    }
    dm.addDependency(triggerd, actionspsd, lcc.getContextManager());
    dm.addDependency(triggerd, triggerTable, lcc.getContextManager());
    // from the triggered statement or the WHEN clause.
    for (ProviderInfo info : providerInfo) {
        Provider provider = (Provider) info.getDependableFinder().getDependable(dd, info.getObjectId());
        dm.addDependency(triggerd, provider, lcc.getContextManager());
    }
    // store trigger's dependency on various privileges in the dependeny system
    storeViewTriggerDependenciesOnPrivileges(activation, triggerd);
}
Also used : DataDescriptorGenerator(org.apache.derby.iapi.sql.dictionary.DataDescriptorGenerator) SchemaDescriptor(org.apache.derby.iapi.sql.dictionary.SchemaDescriptor) ProviderInfo(org.apache.derby.iapi.sql.depend.ProviderInfo) LanguageConnectionContext(org.apache.derby.iapi.sql.conn.LanguageConnectionContext) DependencyManager(org.apache.derby.iapi.sql.depend.DependencyManager) DataDictionary(org.apache.derby.iapi.sql.dictionary.DataDictionary) TransactionController(org.apache.derby.iapi.store.access.TransactionController) UUID(org.apache.derby.catalog.UUID) SPSDescriptor(org.apache.derby.iapi.sql.dictionary.SPSDescriptor) TriggerDescriptor(org.apache.derby.iapi.sql.dictionary.TriggerDescriptor) Provider(org.apache.derby.iapi.sql.depend.Provider)

Example 12 with DataDescriptorGenerator

use of org.apache.derby.iapi.sql.dictionary.DataDescriptorGenerator in project derby by apache.

the class CreateViewConstantAction method executeConstantAction.

// INTERFACE METHODS
/**
 *	This is the guts of the Execution-time logic for CREATE VIEW.
 *
 *	@see ConstantAction#executeConstantAction
 *
 * @exception StandardException		Thrown on failure
 */
public void executeConstantAction(Activation activation) throws StandardException {
    TableDescriptor td;
    UUID toid;
    ColumnDescriptor columnDescriptor;
    ViewDescriptor vd;
    LanguageConnectionContext lcc = activation.getLanguageConnectionContext();
    DataDictionary dd = lcc.getDataDictionary();
    DependencyManager dm = dd.getDependencyManager();
    TransactionController tc = lcc.getTransactionExecute();
    /*
		** Inform the data dictionary that we are about to write to it.
		** There are several calls to data dictionary "get" methods here
		** that might be done in "read" mode in the data dictionary, but
		** it seemed safer to do this whole operation in "write" mode.
		**
		** We tell the data dictionary we're done writing at the end of
		** the transaction.
		*/
    dd.startWriting(lcc);
    SchemaDescriptor sd = DDLConstantAction.getSchemaDescriptorForCreate(dd, activation, schemaName);
    /* Create a new table descriptor.
		 * (Pass in row locking, even though meaningless for views.)
		 */
    DataDescriptorGenerator ddg = dd.getDataDescriptorGenerator();
    td = ddg.newTableDescriptor(tableName, sd, tableType, TableDescriptor.ROW_LOCK_GRANULARITY);
    dd.addDescriptor(td, sd, DataDictionary.SYSTABLES_CATALOG_NUM, false, tc);
    toid = td.getUUID();
    // for each column, stuff system.column
    ColumnDescriptor[] cdlArray = new ColumnDescriptor[columnInfo.length];
    int index = 1;
    for (int ix = 0; ix < columnInfo.length; ix++) {
        columnDescriptor = new ColumnDescriptor(columnInfo[ix].name, index++, columnInfo[ix].dataType, columnInfo[ix].defaultValue, columnInfo[ix].defaultInfo, td, (UUID) null, columnInfo[ix].autoincStart, columnInfo[ix].autoincInc, columnInfo[ix].autoincCycle);
        cdlArray[ix] = columnDescriptor;
    }
    dd.addDescriptorArray(cdlArray, td, DataDictionary.SYSCOLUMNS_CATALOG_NUM, false, tc);
    // add columns to the column descriptor list.
    ColumnDescriptorList cdl = td.getColumnDescriptorList();
    for (int i = 0; i < cdlArray.length; i++) cdl.add(cdlArray[i]);
    /* Get and add a view descriptor */
    vd = ddg.newViewDescriptor(toid, tableName, viewText, checkOption, (compSchemaId == null) ? lcc.getDefaultSchema().getUUID() : compSchemaId);
    for (int ix = 0; ix < providerInfo.length; ix++) {
        /* We should always be able to find the Provider */
        Provider provider = (Provider) providerInfo[ix].getDependableFinder().getDependable(dd, providerInfo[ix].getObjectId());
        dm.addDependency(vd, provider, lcc.getContextManager());
    }
    // store view's dependency on various privileges in the dependeny system
    storeViewTriggerDependenciesOnPrivileges(activation, vd);
    dd.addDescriptor(vd, sd, DataDictionary.SYSVIEWS_CATALOG_NUM, true, tc);
}
Also used : SchemaDescriptor(org.apache.derby.iapi.sql.dictionary.SchemaDescriptor) ColumnDescriptor(org.apache.derby.iapi.sql.dictionary.ColumnDescriptor) DependencyManager(org.apache.derby.iapi.sql.depend.DependencyManager) DataDictionary(org.apache.derby.iapi.sql.dictionary.DataDictionary) TableDescriptor(org.apache.derby.iapi.sql.dictionary.TableDescriptor) ViewDescriptor(org.apache.derby.iapi.sql.dictionary.ViewDescriptor) Provider(org.apache.derby.iapi.sql.depend.Provider) DataDescriptorGenerator(org.apache.derby.iapi.sql.dictionary.DataDescriptorGenerator) LanguageConnectionContext(org.apache.derby.iapi.sql.conn.LanguageConnectionContext) ColumnDescriptorList(org.apache.derby.iapi.sql.dictionary.ColumnDescriptorList) UUID(org.apache.derby.catalog.UUID) TransactionController(org.apache.derby.iapi.store.access.TransactionController)

Example 13 with DataDescriptorGenerator

use of org.apache.derby.iapi.sql.dictionary.DataDescriptorGenerator in project derby by apache.

the class GrantRoleConstantAction method executeConstantAction.

// INTERFACE METHODS
/**
 *  This is the guts of the Execution-time logic for GRANT role.
 *
 *  @see ConstantAction#executeConstantAction
 *
 * @exception StandardException     Thrown on failure
 */
public void executeConstantAction(Activation activation) throws StandardException {
    LanguageConnectionContext lcc = activation.getLanguageConnectionContext();
    DataDictionary dd = lcc.getDataDictionary();
    TransactionController tc = lcc.getTransactionExecute();
    DataDescriptorGenerator ddg = dd.getDataDescriptorGenerator();
    final String grantor = lcc.getCurrentUserId(activation);
    dd.startWriting(lcc);
    for (Iterator rIter = roleNames.iterator(); rIter.hasNext(); ) {
        String role = (String) rIter.next();
        if (role.equals(Authorizer.PUBLIC_AUTHORIZATION_ID)) {
            throw StandardException.newException(SQLState.AUTH_PUBLIC_ILLEGAL_AUTHORIZATION_ID);
        }
        for (Iterator gIter = grantees.iterator(); gIter.hasNext(); ) {
            String grantee = (String) gIter.next();
            // check that role exists
            RoleGrantDescriptor rdDef = dd.getRoleDefinitionDescriptor(role);
            if (rdDef == null) {
                throw StandardException.newException(SQLState.ROLE_INVALID_SPECIFICATION, role);
            }
            // :
            if (grantor.equals(lcc.getDataDictionary().getAuthorizationDatabaseOwner())) {
                // All ok, we are database owner
                if (SanityManager.DEBUG) {
                    SanityManager.ASSERT(rdDef.getGrantee().equals(grantor), "expected database owner in role grant descriptor");
                    SanityManager.ASSERT(rdDef.isWithAdminOption(), "expected role definition to have ADMIN OPTION");
                }
            } else {
                throw StandardException.newException(SQLState.AUTH_ROLE_DBO_ONLY, "GRANT role");
            }
            // Has it already been granted?
            RoleGrantDescriptor rgd = dd.getRoleGrantDescriptor(role, grantee, grantor);
            if (rgd != null && withAdminOption && !rgd.isWithAdminOption()) {
                // NOTE: Never called yet, withAdminOption not yet
                // implemented.
                // Remove old descriptor and add a new one with admin
                // option: cf. SQL 2003, section 12.5, general rule 3
                rgd.drop(lcc);
                rgd.setWithAdminOption(true);
                dd.addDescriptor(rgd, // parent
                null, DataDictionary.SYSROLES_CATALOG_NUM, // no duplicatesAllowed
                false, tc);
            } else if (rgd == null) {
                // Check if the grantee is a role (if not, it is a user)
                RoleGrantDescriptor granteeDef = dd.getRoleDefinitionDescriptor(grantee);
                if (granteeDef != null) {
                    checkCircularity(role, grantee, grantor, tc, dd);
                }
                rgd = ddg.newRoleGrantDescriptor(dd.getUUIDFactory().createUUID(), role, grantee, // dbo for now
                grantor, withAdminOption, // not definition
                false);
                dd.addDescriptor(rgd, // parent
                null, DataDictionary.SYSROLES_CATALOG_NUM, // no duplicatesAllowed
                false, tc);
            }
        // else exists already, no need to add
        }
    }
}
Also used : DataDescriptorGenerator(org.apache.derby.iapi.sql.dictionary.DataDescriptorGenerator) LanguageConnectionContext(org.apache.derby.iapi.sql.conn.LanguageConnectionContext) Iterator(java.util.Iterator) RoleClosureIterator(org.apache.derby.iapi.sql.dictionary.RoleClosureIterator) DataDictionary(org.apache.derby.iapi.sql.dictionary.DataDictionary) TransactionController(org.apache.derby.iapi.store.access.TransactionController) RoleGrantDescriptor(org.apache.derby.iapi.sql.dictionary.RoleGrantDescriptor)

Example 14 with DataDescriptorGenerator

use of org.apache.derby.iapi.sql.dictionary.DataDescriptorGenerator in project derby by apache.

the class GenericPrivilegeInfo method executeGrantRevoke.

// /////////////////////////////////////////////////////////////////////////////////
// 
// PrivilegeInfo BEHAVIOR
// 
// /////////////////////////////////////////////////////////////////////////////////
/**
 *	This is the guts of the Execution-time logic for GRANT/REVOKE generic privileges.
 *
 * @param activation
 * @param grant true if grant, false if revoke
 * @param grantees a list of authorization ids (strings)
 *
 * @exception StandardException		Thrown on failure
 */
public void executeGrantRevoke(Activation activation, boolean grant, List grantees) throws StandardException {
    // Check that the current user has permission to grant the privileges.
    LanguageConnectionContext lcc = activation.getLanguageConnectionContext();
    DataDictionary dd = lcc.getDataDictionary();
    String currentUser = lcc.getCurrentUserId(activation);
    TransactionController tc = lcc.getTransactionExecute();
    SchemaDescriptor sd = _tupleDescriptor.getSchemaDescriptor();
    UUID objectID = _tupleDescriptor.getUUID();
    String objectTypeName = _tupleDescriptor.getObjectTypeName();
    // Check that the current user has permission to grant the privileges.
    checkOwnership(currentUser, (TupleDescriptor) _tupleDescriptor, sd, dd);
    DataDescriptorGenerator ddg = dd.getDataDescriptorGenerator();
    PermDescriptor permDesc = ddg.newPermDescriptor(null, objectTypeName, objectID, _privilege, currentUser, null, false);
    dd.startWriting(lcc);
    for (Iterator itr = grantees.iterator(); itr.hasNext(); ) {
        // Keep track to see if any privileges are revoked by a revoke
        // statement. If a privilege is not revoked, we need to raise a
        // warning.
        boolean privileges_revoked = false;
        String grantee = (String) itr.next();
        if (dd.addRemovePermissionsDescriptor(grant, permDesc, grantee, tc)) {
            // 
            // We fall in here if we are performing REVOKE.
            // 
            privileges_revoked = true;
            int invalidationType = _restrict ? DependencyManager.REVOKE_PRIVILEGE_RESTRICT : DependencyManager.REVOKE_PRIVILEGE;
            dd.getDependencyManager().invalidateFor(permDesc, invalidationType, lcc);
            // Now invalidate all GPSs refering to the object.
            dd.getDependencyManager().invalidateFor(_tupleDescriptor, invalidationType, lcc);
        }
        addWarningIfPrivilegeNotRevoked(activation, grant, privileges_revoked, grantee);
    }
}
Also used : DataDescriptorGenerator(org.apache.derby.iapi.sql.dictionary.DataDescriptorGenerator) SchemaDescriptor(org.apache.derby.iapi.sql.dictionary.SchemaDescriptor) LanguageConnectionContext(org.apache.derby.iapi.sql.conn.LanguageConnectionContext) Iterator(java.util.Iterator) DataDictionary(org.apache.derby.iapi.sql.dictionary.DataDictionary) TransactionController(org.apache.derby.iapi.store.access.TransactionController) UUID(org.apache.derby.catalog.UUID) PermDescriptor(org.apache.derby.iapi.sql.dictionary.PermDescriptor)

Example 15 with DataDescriptorGenerator

use of org.apache.derby.iapi.sql.dictionary.DataDescriptorGenerator in project derby by apache.

the class AlterTableConstantAction method dropColumnFromTable.

/**
 * Workhorse for dropping a column from a table.
 *
 * This routine drops a column from a table, taking care
 * to properly handle the various related schema objects.
 *
 * The syntax which gets you here is:
 *
 *   ALTER TABLE tbl DROP [COLUMN] col [CASCADE|RESTRICT]
 *
 * The keyword COLUMN is optional, and if you don't
 * specify CASCADE or RESTRICT, the default is CASCADE
 * (the default is chosen in the parser, not here).
 *
 * If you specify RESTRICT, then the column drop should be
 * rejected if it would cause a dependent schema object
 * to become invalid.
 *
 * If you specify CASCADE, then the column drop should
 * additionally drop other schema objects which have
 * become invalid.
 *
 * You may not drop the last (only) column in a table.
 *
 * Schema objects of interest include:
 *  - views
 *  - triggers
 *  - constraints
 *    - check constraints
 *    - primary key constraints
 *    - foreign key constraints
 *    - unique key constraints
 *    - not null constraints
 *  - privileges
 *  - indexes
 *  - default values
 *
 * Dropping a column may also change the column position
 * numbers of other columns in the table, which may require
 * fixup of schema objects (such as triggers and column
 * privileges) which refer to columns by column position number.
 *
 * Indexes are a bit interesting. The official SQL spec
 * doesn't talk about indexes; they are considered to be
 * an imlementation-specific performance optimization.
 * The current Derby behavior is that:
 *  - CASCADE/RESTRICT doesn't matter for indexes
 *  - when a column is dropped, it is removed from any indexes
 *    which contain it.
 *  - if that column was the only column in the index, the
 *    entire index is dropped.
 *
 * @param   columnName the name of the column specfication in the ALTER
 *						statement-- currently we allow only one.
 * @exception StandardException 	thrown on failure.
 */
private void dropColumnFromTable(String columnName) throws StandardException {
    boolean cascade = (behavior == StatementType.DROP_CASCADE);
    // drop any generated columns which reference this column
    ColumnDescriptorList generatedColumnList = td.getGeneratedColumns();
    int generatedColumnCount = generatedColumnList.size();
    ArrayList<String> cascadedDroppedColumns = new ArrayList<String>();
    for (int i = 0; i < generatedColumnCount; i++) {
        ColumnDescriptor generatedColumn = generatedColumnList.elementAt(i);
        String[] referencedColumnNames = generatedColumn.getDefaultInfo().getReferencedColumnNames();
        int referencedColumnCount = referencedColumnNames.length;
        for (int j = 0; j < referencedColumnCount; j++) {
            if (columnName.equals(referencedColumnNames[j])) {
                String generatedColumnName = generatedColumn.getColumnName();
                // we're trying to drop
                if (!cascade) {
                    // 
                    throw StandardException.newException(SQLState.LANG_PROVIDER_HAS_DEPENDENT_OBJECT, dm.getActionString(DependencyManager.DROP_COLUMN), columnName, "GENERATED COLUMN", generatedColumnName);
                } else {
                    cascadedDroppedColumns.add(generatedColumnName);
                }
            }
        }
    }
    DataDescriptorGenerator ddg = dd.getDataDescriptorGenerator();
    int cascadedDrops = cascadedDroppedColumns.size();
    int sizeAfterCascadedDrops = td.getColumnDescriptorList().size() - cascadedDrops;
    // can NOT drop a column if it is the only one in the table
    if (sizeAfterCascadedDrops == 1) {
        throw StandardException.newException(SQLState.LANG_PROVIDER_HAS_DEPENDENT_OBJECT, dm.getActionString(DependencyManager.DROP_COLUMN), "THE *LAST* COLUMN " + columnName, "TABLE", td.getQualifiedName());
    }
    // now drop dependent generated columns
    for (int i = 0; i < cascadedDrops; i++) {
        String generatedColumnName = cascadedDroppedColumns.get(i);
        activation.addWarning(StandardException.newWarning(SQLState.LANG_GEN_COL_DROPPED, generatedColumnName, td.getName()));
        // 
        // We can only recurse 2 levels since a generation clause cannot
        // refer to other generated columns.
        // 
        dropColumnFromTable(generatedColumnName);
    }
    /*
         * Cascaded drops of dependent generated columns may require us to
         * rebuild the table descriptor.
         */
    td = dd.getTableDescriptor(tableId);
    ColumnDescriptor columnDescriptor = td.getColumnDescriptor(columnName);
    // We already verified this in bind, but do it again
    if (columnDescriptor == null) {
        throw StandardException.newException(SQLState.LANG_COLUMN_NOT_FOUND_IN_TABLE, columnName, td.getQualifiedName());
    }
    int size = td.getColumnDescriptorList().size();
    droppedColumnPosition = columnDescriptor.getPosition();
    FormatableBitSet toDrop = new FormatableBitSet(size + 1);
    toDrop.set(droppedColumnPosition);
    td.setReferencedColumnMap(toDrop);
    dm.invalidateFor(td, (cascade ? DependencyManager.DROP_COLUMN : DependencyManager.DROP_COLUMN_RESTRICT), lcc);
    // If column has a default we drop the default and any dependencies
    if (columnDescriptor.getDefaultInfo() != null) {
        dm.clearDependencies(lcc, columnDescriptor.getDefaultDescriptor(dd));
    }
    // then we need to drop the system-generated sequence backing it.
    if (columnDescriptor.isAutoincrement() && dd.checkVersion(DataDictionary.DD_VERSION_DERBY_10_11, null)) {
        DropTableConstantAction.dropIdentitySequence(dd, td, activation);
    }
    // columns which are used through REFERENCING clause
    for (TriggerDescriptor trd : dd.getTriggerDescriptors(td)) {
        // If we find that the trigger is dependent on the column being
        // dropped because column is part of trigger columns list, then
        // we will give a warning or drop the trigger based on whether
        // ALTER TABLE DROP COLUMN is RESTRICT or CASCADE. In such a
        // case, no need to check if the trigger action columns referenced
        // through REFERENCING clause also used the column being dropped.
        boolean triggerDroppedAlready = false;
        int[] referencedCols = trd.getReferencedCols();
        if (referencedCols != null) {
            int refColLen = referencedCols.length, j;
            boolean changed = false;
            for (j = 0; j < refColLen; j++) {
                if (referencedCols[j] > droppedColumnPosition) {
                    // Trigger is not defined on the column being dropped
                    // but the column position of trigger column is changing
                    // because the position of the column being dropped is
                    // before the the trigger column
                    changed = true;
                } else if (referencedCols[j] == droppedColumnPosition) {
                    // the trigger is defined on the column being dropped
                    if (cascade) {
                        trd.drop(lcc);
                        triggerDroppedAlready = true;
                        activation.addWarning(StandardException.newWarning(SQLState.LANG_TRIGGER_DROPPED, trd.getName(), td.getName()));
                    } else {
                        // otherwsie there would be unexpected behaviors
                        throw StandardException.newException(SQLState.LANG_PROVIDER_HAS_DEPENDENT_OBJECT, dm.getActionString(DependencyManager.DROP_COLUMN), columnName, "TRIGGER", trd.getName());
                    }
                    break;
                }
            }
            // drop column.
            if (j == refColLen && changed) {
                dd.dropTriggerDescriptor(trd, tc);
                for (j = 0; j < refColLen; j++) {
                    if (referencedCols[j] > droppedColumnPosition)
                        referencedCols[j]--;
                }
                trd.setReferencedCols(referencedCols);
                dd.addDescriptor(trd, sd, DataDictionary.SYSTRIGGERS_CATALOG_NUM, false, tc);
            }
        }
        // loop above, then move to next trigger
        if (triggerDroppedAlready)
            continue;
        // Column being dropped is not one of trigger columns. Check if
        // that column is getting used inside the trigger action through
        // REFERENCING clause. This can be tracked only for triggers
        // created in 10.7 and higher releases. Derby releases prior to
        // that did not keep track of trigger action columns used
        // through the REFERENCING clause.
        int[] referencedColsInTriggerAction = trd.getReferencedColsInTriggerAction();
        if (referencedColsInTriggerAction != null) {
            int refColInTriggerActionLen = referencedColsInTriggerAction.length, j;
            boolean changedColPositionInTriggerAction = false;
            for (j = 0; j < refColInTriggerActionLen; j++) {
                if (referencedColsInTriggerAction[j] > droppedColumnPosition) {
                    changedColPositionInTriggerAction = true;
                } else if (referencedColsInTriggerAction[j] == droppedColumnPosition) {
                    if (cascade) {
                        trd.drop(lcc);
                        triggerDroppedAlready = true;
                        activation.addWarning(StandardException.newWarning(SQLState.LANG_TRIGGER_DROPPED, trd.getName(), td.getName()));
                    } else {
                        // we'd better give an error if don't drop it,
                        throw StandardException.newException(SQLState.LANG_PROVIDER_HAS_DEPENDENT_OBJECT, dm.getActionString(DependencyManager.DROP_COLUMN), columnName, "TRIGGER", trd.getName());
                    }
                    break;
                }
            }
            // column has been actually dropped from the table descriptor.
            if (j == refColInTriggerActionLen && changedColPositionInTriggerAction) {
                dd.dropTriggerDescriptor(trd, tc);
                for (j = 0; j < refColInTriggerActionLen; j++) {
                    if (referencedColsInTriggerAction[j] > droppedColumnPosition)
                        referencedColsInTriggerAction[j]--;
                }
                trd.setReferencedColsInTriggerAction(referencedColsInTriggerAction);
                dd.addDescriptor(trd, sd, DataDictionary.SYSTRIGGERS_CATALOG_NUM, false, tc);
            }
        }
    }
    ConstraintDescriptorList csdl = dd.getConstraintDescriptors(td);
    int csdl_size = csdl.size();
    ArrayList<ConstantAction> newCongloms = new ArrayList<ConstantAction>();
    // we want to remove referenced primary/unique keys in the second
    // round.  This will ensure that self-referential constraints will
    // work OK.
    int tbr_size = 0;
    ConstraintDescriptor[] toBeRemoved = new ConstraintDescriptor[csdl_size];
    // let's go downwards, don't want to get messed up while removing
    for (int i = csdl_size - 1; i >= 0; i--) {
        ConstraintDescriptor cd = csdl.elementAt(i);
        int[] referencedColumns = cd.getReferencedColumns();
        int numRefCols = referencedColumns.length, j;
        boolean changed = false;
        for (j = 0; j < numRefCols; j++) {
            if (referencedColumns[j] > droppedColumnPosition)
                changed = true;
            if (referencedColumns[j] == droppedColumnPosition)
                break;
        }
        if (// column not referenced
        j == numRefCols) {
            if ((cd instanceof CheckConstraintDescriptor) && changed) {
                dd.dropConstraintDescriptor(cd, tc);
                for (j = 0; j < numRefCols; j++) {
                    if (referencedColumns[j] > droppedColumnPosition)
                        referencedColumns[j]--;
                }
                ((CheckConstraintDescriptor) cd).setReferencedColumnsDescriptor(new ReferencedColumnsDescriptorImpl(referencedColumns));
                dd.addConstraintDescriptor(cd, tc);
            }
            continue;
        }
        if (!cascade) {
            // 
            throw StandardException.newException(SQLState.LANG_PROVIDER_HAS_DEPENDENT_OBJECT, dm.getActionString(DependencyManager.DROP_COLUMN), columnName, "CONSTRAINT", cd.getConstraintName());
        }
        if (cd instanceof ReferencedKeyConstraintDescriptor) {
            // restrict will raise an error in invalidate if referenced
            toBeRemoved[tbr_size++] = cd;
            continue;
        }
        // drop now in all other cases
        dm.invalidateFor(cd, DependencyManager.DROP_CONSTRAINT, lcc);
        dropConstraint(cd, td, newCongloms, activation, lcc, true);
        activation.addWarning(StandardException.newWarning(SQLState.LANG_CONSTRAINT_DROPPED, cd.getConstraintName(), td.getName()));
    }
    for (int i = tbr_size - 1; i >= 0; i--) {
        ConstraintDescriptor cd = toBeRemoved[i];
        dropConstraint(cd, td, newCongloms, activation, lcc, false);
        activation.addWarning(StandardException.newWarning(SQLState.LANG_CONSTRAINT_DROPPED, cd.getConstraintName(), td.getName()));
        if (cascade) {
            ConstraintDescriptorList fkcdl = dd.getForeignKeys(cd.getUUID());
            for (ConstraintDescriptor fkcd : fkcdl) {
                dm.invalidateFor(fkcd, DependencyManager.DROP_CONSTRAINT, lcc);
                dropConstraint(fkcd, td, newCongloms, activation, lcc, true);
                activation.addWarning(StandardException.newWarning(SQLState.LANG_CONSTRAINT_DROPPED, fkcd.getConstraintName(), fkcd.getTableDescriptor().getName()));
            }
        }
        dm.invalidateFor(cd, DependencyManager.DROP_CONSTRAINT, lcc);
        dm.clearDependencies(lcc, cd);
    }
    /* If there are new backing conglomerates which must be
		 * created to replace a dropped shared conglomerate
		 * (where the shared conglomerate was dropped as part
		 * of a "drop constraint" call above), then create them
		 * now.  We do this *after* dropping all dependent
		 * constraints because we don't want to waste time
		 * creating a new conglomerate if it's just going to be
		 * dropped again as part of another "drop constraint".
		 */
    createNewBackingCongloms(newCongloms, (long[]) null);
    /*
         * The work we've done above, specifically the possible
         * dropping of primary key, foreign key, and unique constraints
         * and their underlying indexes, may have affected the table
         * descriptor. By re-reading the table descriptor here, we
         * ensure that the compressTable code is working with an
         * accurate table descriptor. Without this line, we may get
         * conglomerate-not-found errors and the like due to our
         * stale table descriptor.
         */
    td = dd.getTableDescriptor(tableId);
    compressTable();
    ColumnDescriptorList tab_cdl = td.getColumnDescriptorList();
    // drop the column from syscolumns
    dd.dropColumnDescriptor(td.getUUID(), columnName, tc);
    ColumnDescriptor[] cdlArray = new ColumnDescriptor[size - columnDescriptor.getPosition()];
    // 
    for (int i = columnDescriptor.getPosition(), j = 0; i < size; i++, j++) {
        ColumnDescriptor cd = tab_cdl.elementAt(i);
        dd.dropColumnDescriptor(td.getUUID(), cd.getColumnName(), tc);
        cd.setPosition(i);
        if (cd.isAutoincrement()) {
            cd.setAutoinc_create_or_modify_Start_Increment(ColumnDefinitionNode.CREATE_AUTOINCREMENT);
        }
        cdlArray[j] = cd;
    }
    dd.addDescriptorArray(cdlArray, td, DataDictionary.SYSCOLUMNS_CATALOG_NUM, false, tc);
    // By this time, the column has been removed from the table descriptor.
    // Now, go through all the triggers and regenerate their trigger action
    // SPS and rebind the generated trigger action sql. If the trigger
    // action is using the dropped column, it will get detected here. If
    // not, then we will have generated the internal trigger action sql
    // which matches the trigger action sql provided by the user.
    // 
    // eg of positive test case
    // create table atdc_16_tab1 (a1 integer, b1 integer, c1 integer);
    // create table atdc_16_tab2 (a2 integer, b2 integer, c2 integer);
    // create trigger atdc_16_trigger_1
    // after update of b1 on atdc_16_tab1
    // REFERENCING NEW AS newt
    // for each row
    // update atdc_16_tab2 set c2 = newt.c1
    // The internal representation for the trigger action before the column
    // is dropped is as follows
    // update atdc_16_tab2 set c2 =
    // org.apache.derby.iapi.db.Factory::getTriggerExecutionContext().
    // getONewRow().getInt(3)
    // After the drop column shown as below
    // alter table DERBY4998_SOFT_UPGRADE_RESTRICT drop column c11
    // The above internal representation of tigger action sql is not
    // correct anymore because column position of c1 in atdc_16_tab1 has
    // now changed from 3 to 2. Following while loop will regenerate it and
    // change it to as follows
    // update atdc_16_tab2 set c2 =
    // org.apache.derby.iapi.db.Factory::getTriggerExecutionContext().
    // getONewRow().getInt(2)
    // 
    // We could not do this before the actual column drop, because the
    // rebind would have still found the column being dropped in the
    // table descriptor and hence use of such a column in the trigger
    // action rebind would not have been caught.
    // For the table on which ALTER TABLE is getting performed, find out
    // all the SPSDescriptors that use that table as a provider. We are
    // looking for SPSDescriptors that have been created internally for
    // trigger action SPSes. Through those SPSDescriptors, we will be
    // able to get to the triggers dependent on the table being altered
    // Following will get all the dependent objects that are using
    // ALTER TABLE table as provider
    List<DependencyDescriptor> depsOnAlterTableList = dd.getProvidersDescriptorList(td.getObjectID().toString());
    for (DependencyDescriptor depOnAT : depsOnAlterTableList) {
        // Go through all the dependent objects on the table being altered
        DependableFinder dependent = depOnAT.getDependentFinder();
        // stored prepared statement.
        if (dependent.getSQLObjectType().equals(Dependable.STORED_PREPARED_STATEMENT)) {
            // Look for all the dependent objects that are using this
            // stored prepared statement as provider. We are only
            // interested in dependents that are triggers.
            List<DependencyDescriptor> depsTrigger = dd.getProvidersDescriptorList(depOnAT.getUUID().toString());
            for (DependencyDescriptor depsTriggerDesc : depsTrigger) {
                DependableFinder providerIsTrigger = depsTriggerDesc.getDependentFinder();
                // it is a trigger
                if (providerIsTrigger.getSQLObjectType().equals(Dependable.TRIGGER)) {
                    // Drop and recreate the trigger after regenerating
                    // it's trigger action plan. If the trigger action
                    // depends on the column being dropped, it will be
                    // caught here.
                    TriggerDescriptor trdToBeDropped = dd.getTriggerDescriptor(depsTriggerDesc.getUUID());
                    // First check for dependencies in the trigger's WHEN
                    // clause, if there is one.
                    UUID whenClauseId = trdToBeDropped.getWhenClauseId();
                    boolean gotDropped = false;
                    if (whenClauseId != null) {
                        gotDropped = columnDroppedAndTriggerDependencies(trdToBeDropped, whenClauseId, true, cascade, columnName);
                    }
                    // dependencies.
                    if (!gotDropped) {
                        columnDroppedAndTriggerDependencies(trdToBeDropped, trdToBeDropped.getActionId(), false, cascade, columnName);
                    }
                }
            }
        }
    }
    // Adjust the column permissions rows in SYSCOLPERMS to reflect the
    // changed column positions due to the dropped column:
    dd.updateSYSCOLPERMSforDropColumn(td.getUUID(), tc, columnDescriptor);
    // remove column descriptor from table descriptor. this fixes up the
    // list in case we were called recursively in order to cascade-drop a
    // dependent generated column.
    tab_cdl.remove(td.getColumnDescriptor(columnName));
}
Also used : DependencyDescriptor(org.apache.derby.iapi.sql.dictionary.DependencyDescriptor) ArrayList(java.util.ArrayList) ReferencedColumnsDescriptorImpl(org.apache.derby.catalog.types.ReferencedColumnsDescriptorImpl) CheckConstraintDescriptor(org.apache.derby.iapi.sql.dictionary.CheckConstraintDescriptor) DataDescriptorGenerator(org.apache.derby.iapi.sql.dictionary.DataDescriptorGenerator) ConstantAction(org.apache.derby.iapi.sql.execute.ConstantAction) DependableFinder(org.apache.derby.catalog.DependableFinder) ColumnDescriptorList(org.apache.derby.iapi.sql.dictionary.ColumnDescriptorList) FormatableBitSet(org.apache.derby.iapi.services.io.FormatableBitSet) UUID(org.apache.derby.catalog.UUID) ColumnDescriptor(org.apache.derby.iapi.sql.dictionary.ColumnDescriptor) ConstraintDescriptorList(org.apache.derby.iapi.sql.dictionary.ConstraintDescriptorList) TriggerDescriptor(org.apache.derby.iapi.sql.dictionary.TriggerDescriptor) CheckConstraintDescriptor(org.apache.derby.iapi.sql.dictionary.CheckConstraintDescriptor) ForeignKeyConstraintDescriptor(org.apache.derby.iapi.sql.dictionary.ForeignKeyConstraintDescriptor) ConstraintDescriptor(org.apache.derby.iapi.sql.dictionary.ConstraintDescriptor) ReferencedKeyConstraintDescriptor(org.apache.derby.iapi.sql.dictionary.ReferencedKeyConstraintDescriptor) ReferencedKeyConstraintDescriptor(org.apache.derby.iapi.sql.dictionary.ReferencedKeyConstraintDescriptor)

Aggregations

DataDescriptorGenerator (org.apache.derby.iapi.sql.dictionary.DataDescriptorGenerator)33 UUID (org.apache.derby.catalog.UUID)23 DataValueDescriptor (org.apache.derby.iapi.types.DataValueDescriptor)15 SchemaDescriptor (org.apache.derby.iapi.sql.dictionary.SchemaDescriptor)14 LanguageConnectionContext (org.apache.derby.iapi.sql.conn.LanguageConnectionContext)13 DataDictionary (org.apache.derby.iapi.sql.dictionary.DataDictionary)13 TransactionController (org.apache.derby.iapi.store.access.TransactionController)12 TableDescriptor (org.apache.derby.iapi.sql.dictionary.TableDescriptor)7 DependencyManager (org.apache.derby.iapi.sql.depend.DependencyManager)5 ColumnDescriptor (org.apache.derby.iapi.sql.dictionary.ColumnDescriptor)5 ConglomerateDescriptor (org.apache.derby.iapi.sql.dictionary.ConglomerateDescriptor)5 Timestamp (java.sql.Timestamp)4 Iterator (java.util.Iterator)4 ColumnDescriptorList (org.apache.derby.iapi.sql.dictionary.ColumnDescriptorList)4 ConstraintDescriptor (org.apache.derby.iapi.sql.dictionary.ConstraintDescriptor)4 Provider (org.apache.derby.iapi.sql.depend.Provider)3 SubKeyConstraintDescriptor (org.apache.derby.iapi.sql.dictionary.SubKeyConstraintDescriptor)3 Properties (java.util.Properties)2 ReferencedColumns (org.apache.derby.catalog.ReferencedColumns)2 TypeDescriptor (org.apache.derby.catalog.TypeDescriptor)2