use of org.apache.derby.iapi.sql.conn.LanguageConnectionContext in project derby by apache.
the class CreateTriggerNode method bindStatement.
// accessors
// We inherit the generate() method from DDLStatementNode.
/**
* Bind this CreateTriggerNode. This means doing any static error
* checking that can be done before actually creating the table.
*
* @exception StandardException Thrown on error
*/
@Override
public void bindStatement() throws StandardException {
CompilerContext compilerContext = getCompilerContext();
DataDictionary dd = getDataDictionary();
/*
** Grab the current schema. We will use that for
** sps compilation
*/
LanguageConnectionContext lcc = getLanguageConnectionContext();
compSchemaDescriptor = lcc.getDefaultSchema();
/*
** Get and check the schema descriptor for this
** trigger. This check will throw the proper exception
** if someone tries to create a trigger in the SYS
** schema.
*/
triggerSchemaDescriptor = getSchemaDescriptor();
/*
** Get the trigger table.
*/
triggerTableDescriptor = getTableDescriptor(tableName);
// throw an exception if user is attempting to create a trigger on a temporary table
if (isSessionSchema(triggerTableDescriptor.getSchemaDescriptor())) {
throw StandardException.newException(SQLState.LANG_OPERATION_NOT_ALLOWED_ON_SESSION_SCHEMA_TABLES);
}
if (isPrivilegeCollectionRequired()) {
compilerContext.pushCurrentPrivType(Authorizer.TRIGGER_PRIV);
compilerContext.addRequiredTablePriv(triggerTableDescriptor);
compilerContext.popCurrentPrivType();
}
/*
** Regenerates the actionText and actionNode if necessary.
*/
boolean needInternalSQL = bindReferencesClause(dd);
// Get all the names of SQL objects referenced by the triggered
// SQL statement and the WHEN clause. Since some of the TableName
// nodes may be eliminated from the node tree during the bind phase,
// we collect the nodes before the nodes have been bound. The
// names will be used later when we normalize the trigger text
// that will be stored in the system tables.
SortedSet<TableName> actionNames = actionNode.getOffsetOrderedNodes(TableName.class);
SortedSet<TableName> whenNames = (whenClause != null) ? whenClause.getOffsetOrderedNodes(TableName.class) : null;
ProviderList prevAPL = compilerContext.getCurrentAuxiliaryProviderList();
ProviderList apl = new ProviderList();
lcc.pushTriggerTable(triggerTableDescriptor);
try {
compilerContext.setCurrentAuxiliaryProviderList(apl);
/*
** Bind the trigger action and the trigger
** when clause to make sure that they are
** ok. Note that we have already substituted
** in various replacements for OLD/NEW transition
** tables/variables and reparsed if necessary.
*/
if (needInternalSQL)
compilerContext.setReliability(CompilerContext.INTERNAL_SQL_LEGAL);
// bind of the call statement node.
if (isBefore)
compilerContext.setReliability(CompilerContext.MODIFIES_SQL_DATA_PROCEDURE_ILLEGAL);
actionNode.bindStatement();
if (whenClause != null) {
ContextManager cm = getContextManager();
whenClause = whenClause.bindExpression(new FromList(cm), new SubqueryList(cm), new ArrayList<AggregateNode>(0));
// The WHEN clause must be a BOOLEAN expression.
whenClause.checkIsBoolean();
}
} finally {
lcc.popTriggerTable(triggerTableDescriptor);
compilerContext.setCurrentAuxiliaryProviderList(prevAPL);
}
// Qualify identifiers before storing them (DERBY-5901/DERBY-6370).
qualifyNames(actionNames, whenNames);
/*
** Statement is dependent on the TableDescriptor
*/
compilerContext.createDependency(triggerTableDescriptor);
/*
** If there is a list of columns, then no duplicate columns,
** and all columns must be found.
*/
if (triggerCols != null && triggerCols.size() != 0) {
HashSet<String> columnNames = new HashSet<String>();
for (ResultColumn rc : triggerCols) {
if (!columnNames.add(rc.getName())) {
throw StandardException.newException(SQLState.LANG_DUPLICATE_COLUMN_IN_TRIGGER_UPDATE, rc.getName(), triggerName);
}
ColumnDescriptor cd = triggerTableDescriptor.getColumnDescriptor(rc.getName());
if (cd == null) {
throw StandardException.newException(SQLState.LANG_COLUMN_NOT_FOUND_IN_TABLE, rc.getName(), tableName);
}
}
}
// statement references a table in the SESSION schema.
if (referencesSessionSchema()) {
throw StandardException.newException(SQLState.LANG_OPERATION_NOT_ALLOWED_ON_SESSION_SCHEMA_TABLES);
}
DependencyManager dm = dd.getDependencyManager();
providerInfo = dm.getPersistentProviderInfos(apl);
dm.clearColumnInfoInProviders(apl);
}
use of org.apache.derby.iapi.sql.conn.LanguageConnectionContext in project derby by apache.
the class DMLModStatementNode method parseCheckConstraint.
/**
* Parse a check constraint and turn it into a query tree.
*
* @param checkConstraintText Text of CHECK CONSTRAINT.
* @param td The TableDescriptor for the table the the constraint is on.
*
* @return The parsed check constraint as a query tree.
*
* @exception StandardException Thrown on failure
*/
public ValueNode parseCheckConstraint(String checkConstraintText, TableDescriptor td) throws StandardException {
Parser p;
ValueNode checkTree;
LanguageConnectionContext lcc = getLanguageConnectionContext();
/* Get a Statement to pass to the parser */
/* We're all set up to parse. We have to build a compile SQL statement
* before we can parse - we just have a WHERE clause right now.
* So, we goober up a SELECT * FROM table WHERE checkDefs.
*/
String select = "SELECT * FROM " + td.getQualifiedName() + " WHERE " + checkConstraintText;
/*
** Get a new compiler context, so the parsing of the select statement
** doesn't mess up anything in the current context (it could clobber
** the ParameterValueSet, for example).
*/
CompilerContext newCC = lcc.pushCompilerContext();
p = newCC.getParser();
/* Finally, we can call the parser */
// Since this is always nested inside another SQL statement, so topLevel flag
// should be false
Visitable qt = p.parseStatement(select);
if (SanityManager.DEBUG) {
if (!(qt instanceof CursorNode)) {
SanityManager.THROWASSERT("qt expected to be instanceof CursorNode, not " + qt.getClass().getName());
}
CursorNode cn = (CursorNode) qt;
if (!(cn.getResultSetNode() instanceof SelectNode)) {
SanityManager.THROWASSERT("cn.getResultSetNode() expected to be instanceof SelectNode, not " + cn.getResultSetNode().getClass().getName());
}
}
checkTree = ((SelectNode) ((CursorNode) qt).getResultSetNode()).getWhereClause();
lcc.popCompilerContext(newCC);
return checkTree;
}
use of org.apache.derby.iapi.sql.conn.LanguageConnectionContext in project derby by apache.
the class CreateConstraintConstantAction method executeConstantAction.
// INTERFACE METHODS
/**
* This is the guts of the Execution-time logic for CREATE CONSTRAINT.
* <P>
* A constraint is represented as:
* <UL>
* <LI> ConstraintDescriptor.
* </UL>
* If a backing index is required then the index will
* be created through an CreateIndexConstantAction setup
* by the compiler.
* <BR>
* Dependencies are created as:
* <UL>
* <LI> ConstraintDescriptor depends on all the providers collected
* at compile time and passed into the constructor.
* <LI> For a FOREIGN KEY constraint ConstraintDescriptor depends
* on the ConstraintDescriptor for the referenced constraints
* and the privileges required to create the constraint.
* </UL>
*
* @see ConstraintDescriptor
* @see CreateIndexConstantAction
* @see ConstantAction#executeConstantAction
*
* @exception StandardException Thrown on failure
*/
public void executeConstantAction(Activation activation) throws StandardException {
ConglomerateDescriptor conglomDesc = null;
ConglomerateDescriptor[] conglomDescs = null;
ConstraintDescriptor conDesc = null;
TableDescriptor td = null;
UUID indexId = null;
String uniqueName;
String backingIndexName;
/* RESOLVE - blow off not null constraints for now (and probably for ever) */
if (constraintType == DataDictionary.NOTNULL_CONSTRAINT) {
return;
}
LanguageConnectionContext lcc = activation.getLanguageConnectionContext();
DataDictionary dd = lcc.getDataDictionary();
DependencyManager dm = dd.getDependencyManager();
TransactionController tc = lcc.getTransactionExecute();
cf = lcc.getLanguageConnectionFactory().getClassFactory();
/*
** Inform the data dictionary that we are about to write to it.
** There are several calls to data dictionary "get" methods here
** that might be done in "read" mode in the data dictionary, but
** it seemed safer to do this whole operation in "write" mode.
**
** We tell the data dictionary we're done writing at the end of
** the transaction.
*/
dd.startWriting(lcc);
/* Table gets locked in AlterTableConstantAction */
/*
** If the schema descriptor is null, then
** we must have just read ourselves in.
** So we will get the corresponding schema
** descriptor from the data dictionary.
*/
SchemaDescriptor sd = dd.getSchemaDescriptor(schemaName, tc, true);
/* Try to get the TableDescriptor from
* the Activation. We will go to the
* DD if not there. (It should always be
* there except when in a target.)
*/
td = activation.getDDLTableDescriptor();
if (td == null) {
/* tableId will be non-null if adding a
* constraint to an existing table.
*/
if (tableId != null) {
td = dd.getTableDescriptor(tableId);
} else {
td = dd.getTableDescriptor(tableName, sd, tc);
}
if (td == null) {
throw StandardException.newException(SQLState.LANG_TABLE_NOT_FOUND_DURING_EXECUTION, tableName);
}
activation.setDDLTableDescriptor(td);
}
/* Generate the UUID for the backing index. This will become the
* constraint's name, if no name was specified.
*/
UUIDFactory uuidFactory = dd.getUUIDFactory();
UUID constrId = uuidFactory.createUUID();
/* Create the index, if there's one for this constraint */
if (indexAction != null) {
if (indexAction.getIndexName() == null) {
/* Set the index name */
backingIndexName = uuidFactory.createUUID().toString();
indexAction.setIndexName(backingIndexName);
} else {
backingIndexName = indexAction.getIndexName();
}
indexAction.setConstraintID(constrId);
/* Create the index */
indexAction.executeConstantAction(activation);
/* Get the conglomerate descriptor for the backing index */
conglomDescs = td.getConglomerateDescriptors();
for (int index = 0; index < conglomDescs.length; index++) {
conglomDesc = conglomDescs[index];
/* Check for conglomerate being an index first, since
* name is null for heap.
*/
if (conglomDesc.isIndex() && backingIndexName.equals(conglomDesc.getConglomerateName())) {
break;
}
}
if (SanityManager.DEBUG) {
SanityManager.ASSERT(conglomDesc != null, "conglomDesc is expected to be non-null after search for backing index");
SanityManager.ASSERT(conglomDesc.isIndex(), "conglomDesc is expected to be indexable after search for backing index");
SanityManager.ASSERT(conglomDesc.getConglomerateName().equals(backingIndexName), "conglomDesc name expected to be the same as backing index name after search for backing index");
}
indexId = conglomDesc.getUUID();
}
boolean[] defaults = new boolean[] { ConstraintDefinitionNode.DEFERRABLE_DEFAULT, ConstraintDefinitionNode.INITIALLY_DEFERRED_DEFAULT, ConstraintDefinitionNode.ENFORCED_DEFAULT };
for (int i = 0; i < characteristics.length; i++) {
if (characteristics[i] != defaults[i]) {
dd.checkVersion(DataDictionary.DD_VERSION_DERBY_10_11, "DEFERRED CONSTRAINTS");
if (constraintType == DataDictionary.NOTNULL_CONSTRAINT || !characteristics[2]) /* not enforced */
{
// Remove when feature DERBY-532 is completed
if (!PropertyUtil.getSystemProperty("derby.constraintsTesting", "false").equals("true")) {
throw StandardException.newException(SQLState.NOT_IMPLEMENTED, "non-default constraint characteristics");
}
}
}
}
/* Now, lets create the constraint descriptor */
DataDescriptorGenerator ddg = dd.getDataDescriptorGenerator();
switch(constraintType) {
case DataDictionary.PRIMARYKEY_CONSTRAINT:
conDesc = ddg.newPrimaryKeyConstraintDescriptor(td, constraintName, // deferable,
characteristics[0], // initiallyDeferred,
characteristics[1], // int[],
genColumnPositions(td, false), constrId, indexId, sd, characteristics[2], // referenceCount
0);
dd.addConstraintDescriptor(conDesc, tc);
break;
case DataDictionary.UNIQUE_CONSTRAINT:
conDesc = ddg.newUniqueConstraintDescriptor(td, constraintName, // deferable,
characteristics[0], // initiallyDeferred,
characteristics[1], // int[],
genColumnPositions(td, false), constrId, indexId, sd, characteristics[2], // referenceCount
0);
dd.addConstraintDescriptor(conDesc, tc);
break;
case DataDictionary.CHECK_CONSTRAINT:
conDesc = ddg.newCheckConstraintDescriptor(td, constraintName, // deferable,
characteristics[0], // initiallyDeferred,
characteristics[1], constrId, constraintText, // int[],
new ReferencedColumnsDescriptorImpl(genColumnPositions(td, false)), sd, characteristics[2]);
dd.addConstraintDescriptor(conDesc, tc);
storeConstraintDependenciesOnPrivileges(activation, conDesc, null, providerInfo);
break;
case DataDictionary.FOREIGNKEY_CONSTRAINT:
ReferencedKeyConstraintDescriptor referencedConstraint = DDUtils.locateReferencedConstraint(dd, td, constraintName, columnNames, otherConstraintInfo);
DDUtils.validateReferentialActions(dd, td, constraintName, otherConstraintInfo, columnNames);
conDesc = ddg.newForeignKeyConstraintDescriptor(td, constraintName, // deferable,
characteristics[0], // initiallyDeferred,
characteristics[1], // int[],
genColumnPositions(td, false), constrId, indexId, sd, referencedConstraint, characteristics[2], otherConstraintInfo.getReferentialActionDeleteRule(), otherConstraintInfo.getReferentialActionUpdateRule());
// try to create the constraint first, because it
// is expensive to do the bulk check, find obvious
// errors first
dd.addConstraintDescriptor(conDesc, tc);
/* No need to do check if we're creating a
* table.
*/
if ((!forCreateTable) && dd.activeConstraint(conDesc)) {
validateFKConstraint(activation, tc, dd, (ForeignKeyConstraintDescriptor) conDesc, referencedConstraint, ((CreateIndexConstantAction) indexAction).getIndexTemplateRow());
}
/* Create stored dependency on the referenced constraint */
dm.addDependency(conDesc, referencedConstraint, lcc.getContextManager());
// store constraint's dependency on REFERENCES privileges in the dependeny system
storeConstraintDependenciesOnPrivileges(activation, conDesc, referencedConstraint.getTableId(), providerInfo);
break;
case DataDictionary.MODIFY_CONSTRAINT:
throw StandardException.newException(SQLState.NOT_IMPLEMENTED, "ALTER CONSTRAINT");
default:
if (SanityManager.DEBUG) {
SanityManager.THROWASSERT("contraintType (" + constraintType + ") has unexpected value");
}
break;
}
/* Create stored dependencies for each provider */
if (providerInfo != null) {
for (int ix = 0; ix < providerInfo.length; ix++) {
Provider provider = null;
/* We should always be able to find the Provider */
provider = (Provider) providerInfo[ix].getDependableFinder().getDependable(dd, providerInfo[ix].getObjectId());
dm.addDependency(conDesc, provider, lcc.getContextManager());
}
}
/* Finally, invalidate off of the table descriptor(s)
* to ensure that any dependent statements get
* re-compiled.
*/
if (!forCreateTable) {
dm.invalidateFor(td, DependencyManager.CREATE_CONSTRAINT, lcc);
}
if (constraintType == DataDictionary.FOREIGNKEY_CONSTRAINT) {
if (SanityManager.DEBUG) {
SanityManager.ASSERT(conDesc != null, "conDesc expected to be non-null");
if (!(conDesc instanceof ForeignKeyConstraintDescriptor)) {
SanityManager.THROWASSERT("conDesc expected to be instance of ForeignKeyConstraintDescriptor, not " + conDesc.getClass().getName());
}
}
dm.invalidateFor(((ForeignKeyConstraintDescriptor) conDesc).getReferencedConstraint().getTableDescriptor(), DependencyManager.CREATE_CONSTRAINT, lcc);
}
this.constraintId = constrId;
}
use of org.apache.derby.iapi.sql.conn.LanguageConnectionContext in project derby by apache.
the class CreateSequenceConstantAction method executeConstantAction.
// INTERFACE METHODS
/**
* This is the guts of the Execution-time logic for CREATE SEQUENCE.
*
* @throws org.apache.derby.shared.common.error.StandardException
* Thrown on failure
* @see org.apache.derby.iapi.sql.execute.ConstantAction#executeConstantAction
*/
public void executeConstantAction(Activation activation) throws StandardException {
SchemaDescriptor schemaDescriptor;
LanguageConnectionContext lcc = activation.getLanguageConnectionContext();
DataDictionary dd = lcc.getDataDictionary();
TransactionController tc = lcc.getTransactionExecute();
DataDescriptorGenerator ddg = dd.getDataDescriptorGenerator();
dd.startWriting(lcc);
schemaDescriptor = DDLConstantAction.getSchemaDescriptorForCreate(dd, activation, _schemaName);
//
// Check if this sequence already exists. If it does, throw.
//
SequenceDescriptor seqDef = dd.getSequenceDescriptor(schemaDescriptor, _sequenceName);
if (seqDef != null) {
throw StandardException.newException(SQLState.LANG_OBJECT_ALREADY_EXISTS, seqDef.getDescriptorType(), _sequenceName);
}
seqDef = ddg.newSequenceDescriptor(schemaDescriptor, dd.getUUIDFactory().createUUID(), _sequenceName, _dataType, // current value
_initialValue, _initialValue, _minValue, _maxValue, _stepValue, // whether the sequence can wrap-around
_cycle);
dd.addDescriptor(seqDef, // parent
null, DataDictionary.SYSSEQUENCES_CATALOG_NUM, // duplicatesAllowed
false, tc);
}
use of org.apache.derby.iapi.sql.conn.LanguageConnectionContext in project derby by apache.
the class CreateTableConstantAction method executeConstantAction.
// INTERFACE METHODS
/**
* This is the guts of the Execution-time logic for CREATE TABLE.
*
* @see ConstantAction#executeConstantAction
*
* @exception StandardException Thrown on failure
*/
public void executeConstantAction(Activation activation) throws StandardException {
TableDescriptor td;
UUID toid;
SchemaDescriptor schemaDescriptor;
ColumnDescriptor columnDescriptor;
ExecRow template;
LanguageConnectionContext lcc = activation.getLanguageConnectionContext();
DataDictionary dd = lcc.getDataDictionary();
DependencyManager dm = dd.getDependencyManager();
TransactionController tc = lcc.getTransactionExecute();
/* Mark the activation as being for create table */
activation.setForCreateTable();
// setup for create conglomerate call:
// o create row template to tell the store what type of rows this
// table holds.
// o create array of collation id's to tell collation id of each
// column in table.
template = RowUtil.getEmptyValueRow(columnInfo.length, lcc);
int[] collation_ids = new int[columnInfo.length];
for (int ix = 0; ix < columnInfo.length; ix++) {
ColumnInfo col_info = columnInfo[ix];
if (col_info.defaultValue != null) {
/* If there is a default value, use it, otherwise use null */
template.setColumn(ix + 1, col_info.defaultValue);
} else {
template.setColumn(ix + 1, col_info.dataType.getNull());
}
// get collation info for each column.
collation_ids[ix] = col_info.dataType.getCollationType();
}
/* create the conglomerate to hold the table's rows
* RESOLVE - If we ever have a conglomerate creator
* that lets us specify the conglomerate number then
* we will need to handle it here.
*/
long conglomId = tc.createConglomerate(// we're requesting a heap conglomerate
"heap", // row template
template.getRowArray(), // column sort order - not required for heap
null, collation_ids, // properties
properties, tableType == TableDescriptor.GLOBAL_TEMPORARY_TABLE_TYPE ? (TransactionController.IS_TEMPORARY | TransactionController.IS_KEPT) : TransactionController.IS_DEFAULT);
/*
** Inform the data dictionary that we are about to write to it.
** There are several calls to data dictionary "get" methods here
** that might be done in "read" mode in the data dictionary, but
** it seemed safer to do this whole operation in "write" mode.
**
** We tell the data dictionary we're done writing at the end of
** the transaction.
*/
if (tableType != TableDescriptor.GLOBAL_TEMPORARY_TABLE_TYPE)
dd.startWriting(lcc);
SchemaDescriptor sd;
if (tableType == TableDescriptor.GLOBAL_TEMPORARY_TABLE_TYPE)
sd = dd.getSchemaDescriptor(schemaName, tc, true);
else
sd = DDLConstantAction.getSchemaDescriptorForCreate(dd, activation, schemaName);
//
// Create a new table descriptor.
//
DataDescriptorGenerator ddg = dd.getDataDescriptorGenerator();
if (tableType != TableDescriptor.GLOBAL_TEMPORARY_TABLE_TYPE) {
td = ddg.newTableDescriptor(tableName, sd, tableType, lockGranularity);
dd.addDescriptor(td, sd, DataDictionary.SYSTABLES_CATALOG_NUM, false, tc);
} else {
td = ddg.newTableDescriptor(tableName, sd, tableType, onCommitDeleteRows, onRollbackDeleteRows);
td.setUUID(dd.getUUIDFactory().createUUID());
}
toid = td.getUUID();
// Save the TableDescriptor off in the Activation
activation.setDDLTableDescriptor(td);
/* NOTE: We must write the columns out to the system
* tables before any of the conglomerates, including
* the heap, since we read the columns before the
* conglomerates when building a TableDescriptor.
* This will hopefully reduce the probability of
* a deadlock involving those system tables.
*/
// for each column, stuff system.column
int index = 1;
ColumnDescriptor[] cdlArray = new ColumnDescriptor[columnInfo.length];
for (int ix = 0; ix < columnInfo.length; ix++) {
UUID defaultUUID = columnInfo[ix].newDefaultUUID;
/* Generate a UUID for the default, if one exists
* and there is no default id yet.
*/
if (columnInfo[ix].defaultInfo != null && defaultUUID == null) {
defaultUUID = dd.getUUIDFactory().createUUID();
}
if (// dealing with autoinc column
columnInfo[ix].autoincInc != 0) {
columnDescriptor = new ColumnDescriptor(columnInfo[ix].name, index++, columnInfo[ix].dataType, columnInfo[ix].defaultValue, columnInfo[ix].defaultInfo, td, defaultUUID, columnInfo[ix].autoincStart, columnInfo[ix].autoincInc, columnInfo[ix].autoinc_create_or_modify_Start_Increment, columnInfo[ix].autoincCycle);
//
if (dd.checkVersion(DataDictionary.DD_VERSION_DERBY_10_11, null)) {
CreateSequenceConstantAction csca = makeCSCA(columnInfo[ix], TableDescriptor.makeSequenceName(toid));
csca.executeConstantAction(activation);
}
} else {
columnDescriptor = new ColumnDescriptor(columnInfo[ix].name, index++, columnInfo[ix].dataType, columnInfo[ix].defaultValue, columnInfo[ix].defaultInfo, td, defaultUUID, columnInfo[ix].autoincStart, columnInfo[ix].autoincInc, columnInfo[ix].autoincCycle);
}
cdlArray[ix] = columnDescriptor;
}
if (tableType != TableDescriptor.GLOBAL_TEMPORARY_TABLE_TYPE) {
dd.addDescriptorArray(cdlArray, td, DataDictionary.SYSCOLUMNS_CATALOG_NUM, false, tc);
}
// now add the column descriptors to the table.
ColumnDescriptorList cdl = td.getColumnDescriptorList();
for (int i = 0; i < cdlArray.length; i++) cdl.add(cdlArray[i]);
//
// Create a conglomerate desciptor with the conglomId filled in and
// add it.
//
// RESOLVE: Get information from the conglomerate descriptor which
// was provided.
//
ConglomerateDescriptor cgd = ddg.newConglomerateDescriptor(conglomId, null, false, null, false, null, toid, sd.getUUID());
if (tableType != TableDescriptor.GLOBAL_TEMPORARY_TABLE_TYPE) {
dd.addDescriptor(cgd, sd, DataDictionary.SYSCONGLOMERATES_CATALOG_NUM, false, tc);
}
// add the newly added conglomerate to the table descriptor
ConglomerateDescriptorList conglomList = td.getConglomerateDescriptorList();
conglomList.add(cgd);
/* Create any constraints */
if (constraintActions != null) {
/*
** Do everything but FK constraints first,
** then FK constraints on 2nd pass.
*/
for (int conIndex = 0; conIndex < constraintActions.length; conIndex++) {
// skip fks
if (!constraintActions[conIndex].isForeignKeyConstraint()) {
constraintActions[conIndex].executeConstantAction(activation);
}
}
for (int conIndex = 0; conIndex < constraintActions.length; conIndex++) {
// only foreign keys
if (constraintActions[conIndex].isForeignKeyConstraint()) {
constraintActions[conIndex].executeConstantAction(activation);
}
}
}
//
for (int ix = 0; ix < columnInfo.length; ix++) {
addColumnDependencies(lcc, dd, td, columnInfo[ix]);
}
//
// The table itself can depend on the user defined types of its columns.
//
adjustUDTDependencies(lcc, dd, td, columnInfo, false);
if (tableType == TableDescriptor.GLOBAL_TEMPORARY_TABLE_TYPE) {
lcc.addDeclaredGlobalTempTable(td);
}
// Indicate that the CREATE TABLE statement itself depends on the
// table it is creating. Normally such statement dependencies are
// added during compilation, but here we have a bootstrapping issue
// because the table doesn't exist until the CREATE TABLE statement
// has been executed, so we had to defer the creation of this
// dependency until now. (DERBY-4479)
dd.getDependencyManager().addDependency(activation.getPreparedStatement(), td, lcc.getContextManager());
}
Aggregations