use of org.apache.derby.iapi.sql.dictionary.DataDictionary in project derby by apache.
the class GenericLanguageConnectionContext method roleIsSettable.
/**
* @see LanguageConnectionContext#roleIsSettable(Activation a, String role)
*/
public boolean roleIsSettable(Activation a, String role) throws StandardException {
DataDictionary dd = getDataDictionary();
String dbo = dd.getAuthorizationDatabaseOwner();
RoleGrantDescriptor grantDesc;
String currentUser = getCurrentUserId(a);
if (currentUser.equals(dbo)) {
grantDesc = dd.getRoleDefinitionDescriptor(role);
} else {
grantDesc = dd.getRoleGrantDescriptor(role, currentUser, dbo);
if (grantDesc == null) {
// or if not, via PUBLIC?
grantDesc = dd.getRoleGrantDescriptor(role, Authorizer.PUBLIC_AUTHORIZATION_ID, dbo);
}
}
return grantDesc != null;
}
use of org.apache.derby.iapi.sql.dictionary.DataDictionary in project derby by apache.
the class GenericLanguageConnectionContext method autoincrementFlushCache.
/**
* Flush the cache of autoincrement values being kept by the lcc.
* This will result in the autoincrement values being written to the
* SYSCOLUMNS table as well as the mapping used by lastAutoincrementValue
*
* @exception StandardException thrown on error.
* @see LanguageConnectionContext#lastAutoincrementValue
* @see GenericLanguageConnectionContext#lastAutoincrementValue
*/
public void autoincrementFlushCache(UUID tableUUID) throws StandardException {
if (autoincrementCacheHashtable == null)
return;
if (autoincrementHT == null)
autoincrementHT = new HashMap<String, Long>();
DataDictionary dd = getDataDictionary();
for (Iterator<String> it = autoincrementCacheHashtable.keySet().iterator(); it.hasNext(); ) {
String key = it.next();
AutoincrementCounter aic = autoincrementCacheHashtable.get(key);
Long value = aic.getCurrentValue();
aic.flushToDisk(getTransactionExecute(), dd, tableUUID);
if (value != null) {
autoincrementHT.put(key, value);
}
}
autoincrementCacheHashtable.clear();
}
use of org.apache.derby.iapi.sql.dictionary.DataDictionary in project derby by apache.
the class GenericLanguageConnectionContext method finishDDTransaction.
/**
* Finish the data dictionary transaction, if any.
*
* @exception StandardException Thrown on error
*/
private void finishDDTransaction() throws StandardException {
/* Was the data dictionary put into write mode? */
if (ddWriteMode) {
DataDictionary dd = getDataDictionary();
/* Tell the data dictionary that the transaction is finished */
dd.transactionFinished();
/* The data dictionary isn't in write mode any more */
ddWriteMode = false;
}
}
use of org.apache.derby.iapi.sql.dictionary.DataDictionary in project derby by apache.
the class CreateConstraintConstantAction method executeConstantAction.
// INTERFACE METHODS
/**
* This is the guts of the Execution-time logic for CREATE CONSTRAINT.
* <P>
* A constraint is represented as:
* <UL>
* <LI> ConstraintDescriptor.
* </UL>
* If a backing index is required then the index will
* be created through an CreateIndexConstantAction setup
* by the compiler.
* <BR>
* Dependencies are created as:
* <UL>
* <LI> ConstraintDescriptor depends on all the providers collected
* at compile time and passed into the constructor.
* <LI> For a FOREIGN KEY constraint ConstraintDescriptor depends
* on the ConstraintDescriptor for the referenced constraints
* and the privileges required to create the constraint.
* </UL>
*
* @see ConstraintDescriptor
* @see CreateIndexConstantAction
* @see ConstantAction#executeConstantAction
*
* @exception StandardException Thrown on failure
*/
public void executeConstantAction(Activation activation) throws StandardException {
ConglomerateDescriptor conglomDesc = null;
ConglomerateDescriptor[] conglomDescs = null;
ConstraintDescriptor conDesc = null;
TableDescriptor td = null;
UUID indexId = null;
String uniqueName;
String backingIndexName;
/* RESOLVE - blow off not null constraints for now (and probably for ever) */
if (constraintType == DataDictionary.NOTNULL_CONSTRAINT) {
return;
}
LanguageConnectionContext lcc = activation.getLanguageConnectionContext();
DataDictionary dd = lcc.getDataDictionary();
DependencyManager dm = dd.getDependencyManager();
TransactionController tc = lcc.getTransactionExecute();
cf = lcc.getLanguageConnectionFactory().getClassFactory();
/*
** Inform the data dictionary that we are about to write to it.
** There are several calls to data dictionary "get" methods here
** that might be done in "read" mode in the data dictionary, but
** it seemed safer to do this whole operation in "write" mode.
**
** We tell the data dictionary we're done writing at the end of
** the transaction.
*/
dd.startWriting(lcc);
/* Table gets locked in AlterTableConstantAction */
/*
** If the schema descriptor is null, then
** we must have just read ourselves in.
** So we will get the corresponding schema
** descriptor from the data dictionary.
*/
SchemaDescriptor sd = dd.getSchemaDescriptor(schemaName, tc, true);
/* Try to get the TableDescriptor from
* the Activation. We will go to the
* DD if not there. (It should always be
* there except when in a target.)
*/
td = activation.getDDLTableDescriptor();
if (td == null) {
/* tableId will be non-null if adding a
* constraint to an existing table.
*/
if (tableId != null) {
td = dd.getTableDescriptor(tableId);
} else {
td = dd.getTableDescriptor(tableName, sd, tc);
}
if (td == null) {
throw StandardException.newException(SQLState.LANG_TABLE_NOT_FOUND_DURING_EXECUTION, tableName);
}
activation.setDDLTableDescriptor(td);
}
/* Generate the UUID for the backing index. This will become the
* constraint's name, if no name was specified.
*/
UUIDFactory uuidFactory = dd.getUUIDFactory();
UUID constrId = uuidFactory.createUUID();
/* Create the index, if there's one for this constraint */
if (indexAction != null) {
if (indexAction.getIndexName() == null) {
/* Set the index name */
backingIndexName = uuidFactory.createUUID().toString();
indexAction.setIndexName(backingIndexName);
} else {
backingIndexName = indexAction.getIndexName();
}
indexAction.setConstraintID(constrId);
/* Create the index */
indexAction.executeConstantAction(activation);
/* Get the conglomerate descriptor for the backing index */
conglomDescs = td.getConglomerateDescriptors();
for (int index = 0; index < conglomDescs.length; index++) {
conglomDesc = conglomDescs[index];
/* Check for conglomerate being an index first, since
* name is null for heap.
*/
if (conglomDesc.isIndex() && backingIndexName.equals(conglomDesc.getConglomerateName())) {
break;
}
}
if (SanityManager.DEBUG) {
SanityManager.ASSERT(conglomDesc != null, "conglomDesc is expected to be non-null after search for backing index");
SanityManager.ASSERT(conglomDesc.isIndex(), "conglomDesc is expected to be indexable after search for backing index");
SanityManager.ASSERT(conglomDesc.getConglomerateName().equals(backingIndexName), "conglomDesc name expected to be the same as backing index name after search for backing index");
}
indexId = conglomDesc.getUUID();
}
boolean[] defaults = new boolean[] { ConstraintDefinitionNode.DEFERRABLE_DEFAULT, ConstraintDefinitionNode.INITIALLY_DEFERRED_DEFAULT, ConstraintDefinitionNode.ENFORCED_DEFAULT };
for (int i = 0; i < characteristics.length; i++) {
if (characteristics[i] != defaults[i]) {
dd.checkVersion(DataDictionary.DD_VERSION_DERBY_10_11, "DEFERRED CONSTRAINTS");
if (constraintType == DataDictionary.NOTNULL_CONSTRAINT || !characteristics[2]) /* not enforced */
{
// Remove when feature DERBY-532 is completed
if (!PropertyUtil.getSystemProperty("derby.constraintsTesting", "false").equals("true")) {
throw StandardException.newException(SQLState.NOT_IMPLEMENTED, "non-default constraint characteristics");
}
}
}
}
/* Now, lets create the constraint descriptor */
DataDescriptorGenerator ddg = dd.getDataDescriptorGenerator();
switch(constraintType) {
case DataDictionary.PRIMARYKEY_CONSTRAINT:
conDesc = ddg.newPrimaryKeyConstraintDescriptor(td, constraintName, // deferable,
characteristics[0], // initiallyDeferred,
characteristics[1], // int[],
genColumnPositions(td, false), constrId, indexId, sd, characteristics[2], // referenceCount
0);
dd.addConstraintDescriptor(conDesc, tc);
break;
case DataDictionary.UNIQUE_CONSTRAINT:
conDesc = ddg.newUniqueConstraintDescriptor(td, constraintName, // deferable,
characteristics[0], // initiallyDeferred,
characteristics[1], // int[],
genColumnPositions(td, false), constrId, indexId, sd, characteristics[2], // referenceCount
0);
dd.addConstraintDescriptor(conDesc, tc);
break;
case DataDictionary.CHECK_CONSTRAINT:
conDesc = ddg.newCheckConstraintDescriptor(td, constraintName, // deferable,
characteristics[0], // initiallyDeferred,
characteristics[1], constrId, constraintText, // int[],
new ReferencedColumnsDescriptorImpl(genColumnPositions(td, false)), sd, characteristics[2]);
dd.addConstraintDescriptor(conDesc, tc);
storeConstraintDependenciesOnPrivileges(activation, conDesc, null, providerInfo);
break;
case DataDictionary.FOREIGNKEY_CONSTRAINT:
ReferencedKeyConstraintDescriptor referencedConstraint = DDUtils.locateReferencedConstraint(dd, td, constraintName, columnNames, otherConstraintInfo);
DDUtils.validateReferentialActions(dd, td, constraintName, otherConstraintInfo, columnNames);
conDesc = ddg.newForeignKeyConstraintDescriptor(td, constraintName, // deferable,
characteristics[0], // initiallyDeferred,
characteristics[1], // int[],
genColumnPositions(td, false), constrId, indexId, sd, referencedConstraint, characteristics[2], otherConstraintInfo.getReferentialActionDeleteRule(), otherConstraintInfo.getReferentialActionUpdateRule());
// try to create the constraint first, because it
// is expensive to do the bulk check, find obvious
// errors first
dd.addConstraintDescriptor(conDesc, tc);
/* No need to do check if we're creating a
* table.
*/
if ((!forCreateTable) && dd.activeConstraint(conDesc)) {
validateFKConstraint(activation, tc, dd, (ForeignKeyConstraintDescriptor) conDesc, referencedConstraint, ((CreateIndexConstantAction) indexAction).getIndexTemplateRow());
}
/* Create stored dependency on the referenced constraint */
dm.addDependency(conDesc, referencedConstraint, lcc.getContextManager());
// store constraint's dependency on REFERENCES privileges in the dependeny system
storeConstraintDependenciesOnPrivileges(activation, conDesc, referencedConstraint.getTableId(), providerInfo);
break;
case DataDictionary.MODIFY_CONSTRAINT:
throw StandardException.newException(SQLState.NOT_IMPLEMENTED, "ALTER CONSTRAINT");
default:
if (SanityManager.DEBUG) {
SanityManager.THROWASSERT("contraintType (" + constraintType + ") has unexpected value");
}
break;
}
/* Create stored dependencies for each provider */
if (providerInfo != null) {
for (int ix = 0; ix < providerInfo.length; ix++) {
Provider provider = null;
/* We should always be able to find the Provider */
provider = (Provider) providerInfo[ix].getDependableFinder().getDependable(dd, providerInfo[ix].getObjectId());
dm.addDependency(conDesc, provider, lcc.getContextManager());
}
}
/* Finally, invalidate off of the table descriptor(s)
* to ensure that any dependent statements get
* re-compiled.
*/
if (!forCreateTable) {
dm.invalidateFor(td, DependencyManager.CREATE_CONSTRAINT, lcc);
}
if (constraintType == DataDictionary.FOREIGNKEY_CONSTRAINT) {
if (SanityManager.DEBUG) {
SanityManager.ASSERT(conDesc != null, "conDesc expected to be non-null");
if (!(conDesc instanceof ForeignKeyConstraintDescriptor)) {
SanityManager.THROWASSERT("conDesc expected to be instance of ForeignKeyConstraintDescriptor, not " + conDesc.getClass().getName());
}
}
dm.invalidateFor(((ForeignKeyConstraintDescriptor) conDesc).getReferencedConstraint().getTableDescriptor(), DependencyManager.CREATE_CONSTRAINT, lcc);
}
this.constraintId = constrId;
}
use of org.apache.derby.iapi.sql.dictionary.DataDictionary in project derby by apache.
the class CreateSequenceConstantAction method executeConstantAction.
// INTERFACE METHODS
/**
* This is the guts of the Execution-time logic for CREATE SEQUENCE.
*
* @throws org.apache.derby.shared.common.error.StandardException
* Thrown on failure
* @see org.apache.derby.iapi.sql.execute.ConstantAction#executeConstantAction
*/
public void executeConstantAction(Activation activation) throws StandardException {
SchemaDescriptor schemaDescriptor;
LanguageConnectionContext lcc = activation.getLanguageConnectionContext();
DataDictionary dd = lcc.getDataDictionary();
TransactionController tc = lcc.getTransactionExecute();
DataDescriptorGenerator ddg = dd.getDataDescriptorGenerator();
dd.startWriting(lcc);
schemaDescriptor = DDLConstantAction.getSchemaDescriptorForCreate(dd, activation, _schemaName);
//
// Check if this sequence already exists. If it does, throw.
//
SequenceDescriptor seqDef = dd.getSequenceDescriptor(schemaDescriptor, _sequenceName);
if (seqDef != null) {
throw StandardException.newException(SQLState.LANG_OBJECT_ALREADY_EXISTS, seqDef.getDescriptorType(), _sequenceName);
}
seqDef = ddg.newSequenceDescriptor(schemaDescriptor, dd.getUUIDFactory().createUUID(), _sequenceName, _dataType, // current value
_initialValue, _initialValue, _minValue, _maxValue, _stepValue, // whether the sequence can wrap-around
_cycle);
dd.addDescriptor(seqDef, // parent
null, DataDictionary.SYSSEQUENCES_CATALOG_NUM, // duplicatesAllowed
false, tc);
}
Aggregations