use of org.apache.derby.iapi.sql.dictionary.SchemaDescriptor in project derby by apache.
the class AlterConstraintConstantAction method executeConstantAction.
/**
* This is the guts of the Execution-time logic for ALTER CONSTRAINT.
*
* @see ConstantAction#executeConstantAction
*
* @exception StandardException Thrown on failure
*/
public void executeConstantAction(Activation activation) throws StandardException {
final LanguageConnectionContext lcc = activation.getLanguageConnectionContext();
final DataDictionary dd = lcc.getDataDictionary();
final DependencyManager dm = dd.getDependencyManager();
final TransactionController tc = lcc.getTransactionExecute();
/*
** Inform the data dictionary that we are about to write to it.
** There are several calls to data dictionary "get" methods here
** that might be done in "read" mode in the data dictionary, but
** it seemed safer to do this whole operation in "write" mode.
**
** We tell the data dictionary we're done writing at the end of
** the transaction.
*/
dd.startWriting(lcc);
final TableDescriptor td = dd.getTableDescriptor(tableId);
if (td == null) {
throw StandardException.newException(SQLState.LANG_TABLE_NOT_FOUND_DURING_EXECUTION, tableName);
}
/* Table gets locked in AlterTableConstantAction */
/*
** If the schema descriptor is null, then
** we must have just read ourselves in.
** So we will get the corresponding schema
** descriptor from the data dictionary.
*/
SchemaDescriptor tdSd = td.getSchemaDescriptor();
SchemaDescriptor constraintSd = constraintSchemaName == null ? tdSd : dd.getSchemaDescriptor(constraintSchemaName, tc, true);
/* Get the constraint descriptor for the index, along
* with an exclusive row lock on the row in sys.sysconstraints
* in order to ensure that no one else compiles against the
* index.
*/
final ConstraintDescriptor conDesc = dd.getConstraintDescriptorByName(td, constraintSd, constraintName, true);
if (conDesc == null) {
throw StandardException.newException(SQLState.LANG_DROP_OR_ALTER_NON_EXISTING_CONSTRAINT, constraintSd.getSchemaName() + "." + constraintName, td.getQualifiedName());
}
if (characteristics[2] != ConstraintDefinitionNode.ENFORCED_DEFAULT) {
dd.checkVersion(DataDictionary.DD_VERSION_DERBY_10_11, "DEFERRED CONSTRAINTS");
if (constraintType == DataDictionary.FOREIGNKEY_CONSTRAINT || constraintType == DataDictionary.NOTNULL_CONSTRAINT || !characteristics[2]) /* not enforced */
{
// Remove when feature DERBY-532 is completed
if (!PropertyUtil.getSystemProperty("derby.constraintsTesting", "false").equals("true")) {
throw StandardException.newException(SQLState.NOT_IMPLEMENTED, "non-default enforcement");
}
}
}
// The first two characteristics are unused during ALTER CONSTRAINT; only
// enforcement can change.
conDesc.setEnforced(characteristics[2]);
int[] colsToSet = new int[1];
colsToSet[0] = SYSCONSTRAINTSRowFactory.SYSCONSTRAINTS_STATE;
dd.updateConstraintDescriptor(conDesc, conDesc.getUUID(), colsToSet, tc);
}
use of org.apache.derby.iapi.sql.dictionary.SchemaDescriptor in project derby by apache.
the class AlterTableConstantAction method columnDroppedAndTriggerDependencies.
// For the trigger, get the trigger action sql provided by the user
// in the create trigger sql. This sql is saved in the system
// table. Since a column has been dropped from the trigger table,
// the trigger action sql may not be valid anymore. To establish
// that, we need to regenerate the internal representation of that
// sql and bind it again.
//
// This method is called both on the WHEN clause (if one exists) and the
// triggered SQL statement of the trigger action.
//
// Return true if the trigger was dropped by this method (if cascade is
// true and it turns out the trigger depends on the column being dropped),
// or false otherwise.
private boolean columnDroppedAndTriggerDependencies(TriggerDescriptor trd, UUID spsUUID, boolean isWhenClause, boolean cascade, String columnName) throws StandardException {
dd.dropTriggerDescriptor(trd, tc);
// Here we get the trigger action sql and use the parser to build
// the parse tree for it.
SchemaDescriptor compSchema = dd.getSchemaDescriptor(dd.getSPSDescriptor(spsUUID).getCompSchemaId(), null);
CompilerContext newCC = lcc.pushCompilerContext(compSchema);
Parser pa = newCC.getParser();
String originalSQL = isWhenClause ? trd.getWhenClauseText() : trd.getTriggerDefinition();
Visitable node = isWhenClause ? pa.parseSearchCondition(originalSQL) : pa.parseStatement(originalSQL);
lcc.popCompilerContext(newCC);
// Do not delete following. We use this in finally clause to
// determine if the CompilerContext needs to be popped.
newCC = null;
try {
// Regenerate the internal representation for the trigger action
// sql using the ColumnReference classes in the parse tree. It
// will catch dropped column getting used in trigger action sql
// through the REFERENCING clause(this can happen only for the
// the triggers created prior to 10.7. Trigger created with
// 10.7 and higher keep track of trigger action column used
// through the REFERENCING clause in system table and hence
// use of dropped column will be detected earlier in this
// method for such triggers).
//
// We might catch errors like following during this step.
// Say that following pre-10.7 trigger exists in the system and
// user is dropping column c11. During the regeneration of the
// internal trigger action sql format, we will catch that
// column oldt.c11 does not exist anymore
// CREATE TRIGGER DERBY4998_SOFT_UPGRADE_RESTRICT_tr1
// AFTER UPDATE OF c12
// ON DERBY4998_SOFT_UPGRADE_RESTRICT REFERENCING OLD AS oldt
// FOR EACH ROW
// SELECT oldt.c11 from DERBY4998_SOFT_UPGRADE_RESTRICT
SPSDescriptor sps = isWhenClause ? trd.getWhenClauseSPS(lcc) : trd.getActionSPS(lcc);
int[] referencedColsInTriggerAction = new int[td.getNumberOfColumns()];
java.util.Arrays.fill(referencedColsInTriggerAction, -1);
String newText = dd.getTriggerActionString(node, trd.getOldReferencingName(), trd.getNewReferencingName(), originalSQL, trd.getReferencedCols(), referencedColsInTriggerAction, 0, trd.getTableDescriptor(), trd.getTriggerEventMask(), true, null, null);
if (isWhenClause) {
// The WHEN clause is not a full SQL statement, just a search
// condition, so we need to turn it into a statement in order
// to create an SPS.
newText = "VALUES " + newText;
}
sps.setText(newText);
// Now that we have the internal format of the trigger action sql,
// bind that sql to make sure that we are not using colunm being
// dropped in the trigger action sql directly (ie not through
// REFERENCING clause.
// eg
// create table atdc_12 (a integer, b integer);
// create trigger atdc_12_trigger_1 after update of a
// on atdc_12 for each row select a,b from atdc_12
// Drop one of the columns used in the trigger action
// alter table atdc_12 drop column b
// Following rebinding of the trigger action sql will catch the use
// of column b in trigger atdc_12_trigger_1
newCC = lcc.pushCompilerContext(compSchema);
newCC.setReliability(CompilerContext.INTERNAL_SQL_LEGAL);
pa = newCC.getParser();
StatementNode stmtnode = (StatementNode) pa.parseStatement(newText);
// need a current dependent for bind
newCC.setCurrentDependent(sps.getPreparedStatement());
stmtnode.bindStatement();
} catch (StandardException se) {
// Ane drop column ATDC_13_TAB3.c12 is issued
if (se.getMessageId().equals(SQLState.LANG_COLUMN_NOT_FOUND) || (se.getMessageId().equals(SQLState.LANG_COLUMN_NOT_FOUND_IN_TABLE) || (se.getMessageId().equals(SQLState.LANG_DB2_INVALID_COLS_SPECIFIED) || (se.getMessageId().equals(SQLState.LANG_TABLE_NOT_FOUND))))) {
if (cascade) {
trd.drop(lcc);
activation.addWarning(StandardException.newWarning(SQLState.LANG_TRIGGER_DROPPED, trd.getName(), td.getName()));
return true;
} else {
// we'd better give an error if don't drop it,
throw StandardException.newException(SQLState.LANG_PROVIDER_HAS_DEPENDENT_OBJECT, dm.getActionString(DependencyManager.DROP_COLUMN), columnName, "TRIGGER", trd.getName());
}
} else
throw se;
} finally {
if (newCC != null)
lcc.popCompilerContext(newCC);
}
// If we are here, then it means that the column being dropped
// is not getting used in the trigger action.
//
// We have recreated the trigger action SPS and recollected the
// column positions for trigger columns and trigger action columns
// getting accessed through REFERENCING clause because
// drop column can affect the column positioning of existing
// columns in the table. We will save that in the system table.
dd.addDescriptor(trd, sd, DataDictionary.SYSTRIGGERS_CATALOG_NUM, false, tc);
return false;
}
use of org.apache.derby.iapi.sql.dictionary.SchemaDescriptor in project derby by apache.
the class GenericLanguageConnectionContext method resetSchemaUsages.
/**
* @see LanguageConnectionContext#resetSchemaUsages(Activation activation,
* String schemaName)
*/
public void resetSchemaUsages(Activation activation, String schemaName) throws StandardException {
Activation parent = activation.getParentActivation();
SchemaDescriptor defaultSchema = getInitialDefaultSchemaDescriptor();
// walk SQL session context chain
while (parent != null) {
SQLSessionContext ssc = parent.getSQLSessionContextForChildren();
SchemaDescriptor s = ssc.getDefaultSchema();
if (SanityManager.DEBUG) {
SanityManager.ASSERT(s != null, "s should not be empty here");
}
if (schemaName.equals(s.getSchemaName())) {
ssc.setDefaultSchema(defaultSchema);
}
parent = parent.getParentActivation();
}
// finally top level
SQLSessionContext top = getTopLevelSQLSessionContext();
SchemaDescriptor sd = top.getDefaultSchema();
if (SanityManager.DEBUG) {
SanityManager.ASSERT(sd != null, "sd should not be empty here");
}
if (schemaName.equals(sd.getSchemaName())) {
top.setDefaultSchema(defaultSchema);
}
}
use of org.apache.derby.iapi.sql.dictionary.SchemaDescriptor in project derby by apache.
the class GenericLanguageConnectionContext method initDefaultSchemaDescriptor.
/**
* Compute the initial default schema and set
* cachedInitialDefaultSchemaDescr accordingly.
*
* @return computed initial default schema value for this session
* @throws StandardException
*/
protected SchemaDescriptor initDefaultSchemaDescriptor() throws StandardException {
/*
** - If the database supports schemas and a schema with the
** same name as the user's name exists (has been created using
** create schema already) the database will set the users
** default schema to the the schema with the same name as the
** user.
** - Else Set the default schema to APP.
*/
if (cachedInitialDefaultSchemaDescr == null) {
DataDictionary dd = getDataDictionary();
SchemaDescriptor sd = dd.getSchemaDescriptor(getSessionUserId(), getTransactionCompile(), false);
if (sd == null) {
sd = new SchemaDescriptor(dd, getSessionUserId(), getSessionUserId(), (UUID) null, false);
}
cachedInitialDefaultSchemaDescr = sd;
}
return cachedInitialDefaultSchemaDescr;
}
use of org.apache.derby.iapi.sql.dictionary.SchemaDescriptor in project derby by apache.
the class GenericLanguageConnectionContext method setupSessionContextMinion.
private void setupSessionContextMinion(Activation a, boolean push, boolean definersRights, String definer) throws StandardException {
if (SanityManager.DEBUG) {
if (definersRights) {
SanityManager.ASSERT(push);
}
}
SQLSessionContext sc = a.setupSQLSessionContextForChildren(push);
if (definersRights) {
sc.setUser(definer);
} else {
// A priori: invoker's rights: Current user
sc.setUser(getCurrentUserId(a));
}
if (definersRights) {
// No role a priori. Cf. SQL 2008, section 10.4 <routine
// invocation>, GR 5 j) i) 1) B) "If the external security
// characteristic of R is DEFINER, then the top cell of the
// authorization stack of RSC is set to contain only the routine
// authorization identifier of R.
sc.setRole(null);
} else {
// Semantics for roles dictate (SQL 4.34.1.1 and 4.27.3.) that the
// role is initially inherited from the current session context
// when we run with INVOKER security characteristic.
sc.setRole(getCurrentRoleId(a));
}
if (definersRights) {
SchemaDescriptor sd = getDataDictionary().getSchemaDescriptor(definer, getTransactionExecute(), false);
if (sd == null) {
sd = new SchemaDescriptor(getDataDictionary(), definer, definer, (UUID) null, false);
}
sc.setDefaultSchema(sd);
} else {
// Inherit current default schema. The initial value of the
// default schema is implementation defined. In Derby we
// inherit it when we invoke stored procedures and functions.
sc.setDefaultSchema(getDefaultSchema(a));
}
final SQLSessionContext ssc = getCurrentSQLSessionContext(a);
sc.setDeferredAll(ssc.getDeferredAll());
sc.setConstraintModes(ssc.getConstraintModes());
StatementContext stmctx = getStatementContext();
// Since the statement is an invocation (iff push=true), it will now be
// associated with the pushed SQLSessionContext (and no longer just
// share that of its caller (or top). The statement contexts of nested
// connection statements will inherit statement context so the SQL
// session context is available through it when nested statements are
// compiled (and executed, for the most part). However, for dynamic
// result sets, the relevant statement context (originating result set)
// is no longer available for execution time references to the SQL
// session context, so we rely on the activation of the caller for
// accessing it, cf. e.g. overload variants of
// getDefaultSchema/setDefaultSchema. If such nested connections
// themselves turn out to be invocations, they in turn get a new
// SQLSessionContext associated with them etc.
stmctx.setSQLSessionContext(sc);
}
Aggregations