use of org.apache.derby.iapi.sql.conn.LanguageConnectionFactory in project derby by apache.
the class DataDictionaryImpl method boot.
/**
* Start-up method for this instance of the data dictionary.
*
* @param startParams The start-up parameters
*
* @exception StandardException Thrown if the module fails to start
*/
public void boot(boolean create, Properties startParams) throws StandardException {
softwareVersion = new DD_Version(this, DataDictionary.DD_VERSION_DERBY_10_15);
startupParameters = startParams;
uuidFactory = getMonitor().getUUIDFactory();
engineType = Monitor.getEngineType(startParams);
// Set the collation type of system schemas before we start loading
// built-in schemas's SchemaDescriptor(s). This is because
// SchemaDescriptor will look to DataDictionary to get the correct
// collation type for themselves. We can't load SD for SESSION schema
// just yet because we do not know the collation type for user schemas
// yet. We will know the right collation for user schema little later
// in this boot method.
collationTypeOfSystemSchemas = StringDataValue.COLLATION_TYPE_UCS_BASIC;
getBuiltinSystemSchemas();
// REMIND: actually, we're supposed to get the DataValueFactory
// out of the connection context...this is a bit of a shortcut.
// We get the DataValueFactory early in order to help bootstrap the system catalogs.
LanguageConnectionFactory langConnFactory = (LanguageConnectionFactory) bootServiceModule(create, this, LanguageConnectionFactory.MODULE, startParams);
dvf = langConnFactory.getDataValueFactory();
exFactory = (ExecutionFactory) bootServiceModule(create, this, ExecutionFactory.MODULE, startParams);
// initailze the arrays of core and noncore tables
initializeCatalogInfo();
// indicate that we are in the process of booting
booting = true;
// set only if child class hasn't overriden this already
if (dataDescriptorGenerator == null) {
dataDescriptorGenerator = new DataDescriptorGenerator(this);
}
if (!create) {
// SYSTABLES
coreInfo[SYSTABLES_CORE_NUM].setHeapConglomerate(getBootParameter(startParams, CFG_SYSTABLES_ID, true));
coreInfo[SYSTABLES_CORE_NUM].setIndexConglomerate(SYSTABLESRowFactory.SYSTABLES_INDEX1_ID, getBootParameter(startParams, CFG_SYSTABLES_INDEX1_ID, true));
coreInfo[SYSTABLES_CORE_NUM].setIndexConglomerate(SYSTABLESRowFactory.SYSTABLES_INDEX2_ID, getBootParameter(startParams, CFG_SYSTABLES_INDEX2_ID, true));
// SYSCOLUMNS
coreInfo[SYSCOLUMNS_CORE_NUM].setHeapConglomerate(getBootParameter(startParams, CFG_SYSCOLUMNS_ID, true));
coreInfo[SYSCOLUMNS_CORE_NUM].setIndexConglomerate(SYSCOLUMNSRowFactory.SYSCOLUMNS_INDEX1_ID, getBootParameter(startParams, CFG_SYSCOLUMNS_INDEX1_ID, true));
// 2nd syscolumns index added in Xena, hence may not be there
coreInfo[SYSCOLUMNS_CORE_NUM].setIndexConglomerate(SYSCOLUMNSRowFactory.SYSCOLUMNS_INDEX2_ID, getBootParameter(startParams, CFG_SYSCOLUMNS_INDEX2_ID, false));
// SYSCONGLOMERATES
coreInfo[SYSCONGLOMERATES_CORE_NUM].setHeapConglomerate(getBootParameter(startParams, CFG_SYSCONGLOMERATES_ID, true));
coreInfo[SYSCONGLOMERATES_CORE_NUM].setIndexConglomerate(SYSCONGLOMERATESRowFactory.SYSCONGLOMERATES_INDEX1_ID, getBootParameter(startParams, CFG_SYSCONGLOMERATES_INDEX1_ID, true));
coreInfo[SYSCONGLOMERATES_CORE_NUM].setIndexConglomerate(SYSCONGLOMERATESRowFactory.SYSCONGLOMERATES_INDEX2_ID, getBootParameter(startParams, CFG_SYSCONGLOMERATES_INDEX2_ID, true));
coreInfo[SYSCONGLOMERATES_CORE_NUM].setIndexConglomerate(SYSCONGLOMERATESRowFactory.SYSCONGLOMERATES_INDEX3_ID, getBootParameter(startParams, CFG_SYSCONGLOMERATES_INDEX3_ID, true));
// SYSSCHEMAS
coreInfo[SYSSCHEMAS_CORE_NUM].setHeapConglomerate(getBootParameter(startParams, CFG_SYSSCHEMAS_ID, true));
coreInfo[SYSSCHEMAS_CORE_NUM].setIndexConglomerate(SYSSCHEMASRowFactory.SYSSCHEMAS_INDEX1_ID, getBootParameter(startParams, CFG_SYSSCHEMAS_INDEX1_ID, true));
coreInfo[SYSSCHEMAS_CORE_NUM].setIndexConglomerate(SYSSCHEMASRowFactory.SYSSCHEMAS_INDEX2_ID, getBootParameter(startParams, CFG_SYSSCHEMAS_INDEX2_ID, true));
}
String value = startParams.getProperty(Property.LANG_TD_CACHE_SIZE);
tdCacheSize = PropertyUtil.intPropertyValue(Property.LANG_TD_CACHE_SIZE, value, 0, Integer.MAX_VALUE, Property.LANG_TD_CACHE_SIZE_DEFAULT);
value = startParams.getProperty(Property.LANG_SPS_CACHE_SIZE);
stmtCacheSize = PropertyUtil.intPropertyValue(Property.LANG_SPS_CACHE_SIZE, value, 0, Integer.MAX_VALUE, Property.LANG_SPS_CACHE_SIZE_DEFAULT);
value = startParams.getProperty(Property.LANG_SEQGEN_CACHE_SIZE);
seqgenCacheSize = PropertyUtil.intPropertyValue(Property.LANG_SEQGEN_CACHE_SIZE, value, 0, Integer.MAX_VALUE, Property.LANG_SEQGEN_CACHE_SIZE_DEFAULT);
value = startParams.getProperty(Property.LANG_PERMISSIONS_CACHE_SIZE);
permissionsCacheSize = PropertyUtil.intPropertyValue(Property.LANG_PERMISSIONS_CACHE_SIZE, value, 0, Integer.MAX_VALUE, Property.LANG_PERMISSIONS_CACHE_SIZE_DEFAULT);
// See if automatic index statistics update is disabled through a
// system wide property. May be overridden by a database specific
// property later on.
// The default is that automatic index statistics update is enabled.
indexStatsUpdateDisabled = !PropertyUtil.getSystemBoolean(Property.STORAGE_AUTO_INDEX_STATS, true);
// See if we should enable logging of index stats activities.
indexStatsUpdateLogging = PropertyUtil.getSystemBoolean(Property.STORAGE_AUTO_INDEX_STATS_LOGGING);
// See if we should enable tracing of index stats activities.
indexStatsUpdateTracing = PropertyUtil.getSystemProperty(Property.STORAGE_AUTO_INDEX_STATS_TRACING, "off");
/*
* data dictionary contexts are only associated with connections.
* we have to look for the basic data dictionary, as there is
* no connection, and thus no context stack yet.
*/
/*
* Get the table descriptor cache.
*/
CacheFactory cf = (CacheFactory) startSystemModule(org.apache.derby.shared.common.reference.Module.CacheFactory);
OIDTdCache = cf.newCacheManager(this, "TableDescriptorOIDCache", tdCacheSize, tdCacheSize);
nameTdCache = cf.newCacheManager(this, "TableDescriptorNameCache", tdCacheSize, tdCacheSize);
if (stmtCacheSize > 0) {
spsNameCache = cf.newCacheManager(this, "SPSNameDescriptorCache", stmtCacheSize, stmtCacheSize);
spsIdHash = new Hashtable<UUID, SPSDescriptor>(stmtCacheSize);
// spsTextHash = new Hashtable(stmtCacheSize);
}
sequenceGeneratorCache = cf.newCacheManager(this, "SequenceGeneratorCache", seqgenCacheSize, seqgenCacheSize);
sequenceIDs = new HashMap<String, HashMap<String, String>>();
/* Get the object to coordinate cache transitions */
cacheCoordinator = new ShExLockable();
/* Get AccessFactory in order to transaction stuff */
af = (AccessFactory) findServiceModule(this, AccessFactory.MODULE);
/* Get the lock factory */
lockFactory = af.getLockFactory();
/*
* now we need to setup a context stack for the database creation work.
* We assume the System boot process has created a context
* manager already, but not that contexts we need are there.
*/
ContextService csf = getContextService();
ContextManager cm = csf.getCurrentContextManager();
if (SanityManager.DEBUG)
SanityManager.ASSERT((cm != null), "Failed to get current ContextManager");
// RESOLVE other non-StandardException errors.
bootingTC = null;
try {
// Get a transaction controller. This has the side effect of
// creating a transaction context if there isn't one already.
bootingTC = af.getTransaction(cm);
/*
We need an execution context so that we can generate rows
REMIND: maybe only for create case?
*/
exFactory.newExecutionContext(cm);
DataDescriptorGenerator ddg = getDataDescriptorGenerator();
// We should set the user schema collation type here now because
// later on, we are going to create user schema APP. By the time any
// user schema gets created, we should have the correct collation
// type set for such schemas to use. For this reason, don't remove
// the following if else statement and don't move it later in this
// method.
String userDefinedCollation;
if (create) {
// Get the collation attribute from the JDBC url. It can only
// have one of 2 possible values - UCS_BASIC or TERRITORY_BASED
// This attribute can only be specified at database create time.
// The attribute value has already been verified in DVF.boot and
// hence we can be assured that the attribute value if provided
// is either UCS_BASIC or TERRITORY_BASED. If none provided,
// then we will take it to be the default which is UCS_BASIC.
userDefinedCollation = startParams.getProperty(Attribute.COLLATION, Property.UCS_BASIC_COLLATION);
bootingTC.setProperty(Property.COLLATION, userDefinedCollation, true);
} else {
userDefinedCollation = startParams.getProperty(Property.COLLATION, Property.UCS_BASIC_COLLATION);
}
// Initialize the collation type of user schemas by looking at
// collation property/attribute.
collationTypeOfUserSchemas = DataTypeDescriptor.getCollationType(userDefinedCollation);
if (SanityManager.DEBUG)
SanityManager.ASSERT((collationTypeOfUserSchemas != -1), "Invalid collation type: " + userDefinedCollation);
// Now is also a good time to create schema descriptor for global
// temporary tables. Since this is a user schema, it should use the
// collation type associated with user schemas. Since we just
// finished setting up the collation type of user schema, it is
// safe to create user SchemaDescriptor(s) now.
declaredGlobalTemporaryTablesSchemaDesc = newDeclaredGlobalTemporaryTablesSchemaDesc(SchemaDescriptor.STD_DECLARED_GLOBAL_TEMPORARY_TABLES_SCHEMA_NAME);
boolean nativeAuthenticationEnabled = PropertyUtil.nativeAuthenticationEnabled(startParams);
if (create) {
String userName = IdUtil.getUserNameFromURLProps(startParams);
authorizationDatabaseOwner = IdUtil.getUserAuthorizationId(userName);
HashSet<String> newlyCreatedRoutines = new HashSet<String>();
// log the current dictionary version. Moving this statement to top as SYSCOLUMNSRowFactory
// queries the version info. SEE Derby-6904
dictionaryVersion = softwareVersion;
// create any required tables.
createDictionaryTables(startParams, bootingTC, ddg);
// create procedures for network server metadata
create_SYSIBM_procedures(bootingTC, newlyCreatedRoutines);
// create metadata sps statement required for network server
createSystemSps(bootingTC);
// create the SYSCS_UTIL system procedures)
create_SYSCS_procedures(bootingTC, newlyCreatedRoutines);
// now grant execute permission on some of these routines
grantPublicAccessToSystemRoutines(newlyCreatedRoutines, bootingTC, authorizationDatabaseOwner);
/* Set properties for current and create time
* DataDictionary versions.
*/
bootingTC.setProperty(DataDictionary.CORE_DATA_DICTIONARY_VERSION, dictionaryVersion, true);
bootingTC.setProperty(DataDictionary.CREATE_DATA_DICTIONARY_VERSION, dictionaryVersion, true);
//
if (PropertyUtil.getSystemBoolean(Property.SQL_AUTHORIZATION_PROPERTY)) {
bootingTC.setProperty(Property.SQL_AUTHORIZATION_PROPERTY, "true", true);
}
if (PropertyUtil.getSystemBoolean(Property.SQL_AUTHORIZATION_PROPERTY) || nativeAuthenticationEnabled) {
usesSqlAuthorization = true;
}
// Set default hash algorithm used to protect passwords stored
// in the database for BUILTIN and NATIVE authentication.
bootingTC.setProperty(Property.AUTHENTICATION_BUILTIN_ALGORITHM, findDefaultBuiltinAlgorithm(), false);
} else {
// Get the ids for non-core tables
loadDictionaryTables(bootingTC, startParams);
// See if index stats update is disabled by a database prop.
String dbIndexStatsUpdateAuto = PropertyUtil.getDatabaseProperty(bootingTC, Property.STORAGE_AUTO_INDEX_STATS);
if (dbIndexStatsUpdateAuto != null) {
indexStatsUpdateDisabled = !Boolean.valueOf(dbIndexStatsUpdateAuto).booleanValue();
}
String dbEnableIndexStatsLogging = PropertyUtil.getDatabaseProperty(bootingTC, Property.STORAGE_AUTO_INDEX_STATS_LOGGING);
if (dbEnableIndexStatsLogging != null) {
indexStatsUpdateLogging = Boolean.valueOf(dbEnableIndexStatsLogging).booleanValue();
}
String dbEnableIndexStatsTracing = PropertyUtil.getDatabaseProperty(bootingTC, Property.STORAGE_AUTO_INDEX_STATS_TRACING);
if (dbEnableIndexStatsTracing != null) {
if (!(dbEnableIndexStatsTracing.equalsIgnoreCase("off") || dbEnableIndexStatsTracing.equalsIgnoreCase("log") || dbEnableIndexStatsTracing.equalsIgnoreCase("stdout") || dbEnableIndexStatsTracing.equalsIgnoreCase("both"))) {
indexStatsUpdateTracing = "off";
} else {
indexStatsUpdateTracing = dbEnableIndexStatsTracing;
}
}
String sqlAuth = PropertyUtil.getDatabaseProperty(bootingTC, Property.SQL_AUTHORIZATION_PROPERTY);
// Feature compatibility check.
if (Boolean.valueOf(startParams.getProperty(Attribute.SOFT_UPGRADE_NO_FEATURE_CHECK)).booleanValue()) {
// database owner check at a hard upgrade.
if (dictionaryVersion.majorVersionNumber >= DataDictionary.DD_VERSION_DERBY_10_2) {
usesSqlAuthorization = Boolean.valueOf(sqlAuth).booleanValue() || nativeAuthenticationEnabled;
}
} else {
if (Boolean.valueOf(sqlAuth).booleanValue() || nativeAuthenticationEnabled) {
// SQL authorization requires 10.2 or higher database
checkVersion(DataDictionary.DD_VERSION_DERBY_10_2, "sqlAuthorization");
usesSqlAuthorization = true;
}
}
}
if (SanityManager.DEBUG)
SanityManager.ASSERT((authorizationDatabaseOwner != null), "Failed to get Database Owner authorization");
/* Commit & destroy the create database */
bootingTC.commit();
// done with ctx
cm.getContext(ExecutionContext.CONTEXT_ID).popMe();
} finally {
if (bootingTC != null) {
// gets rid of the transaction context
bootingTC.destroy();
bootingTC = null;
}
}
setDependencyManager();
booting = false;
}
use of org.apache.derby.iapi.sql.conn.LanguageConnectionFactory in project derby by apache.
the class GenericLanguageFactory method boot.
/*
ModuleControl interface
*/
/**
* Start-up method for this instance of the language factory.
* This service is expected to be started and accessed relative
* to a database.
*
* @param startParams The start-up parameters (ignored in this case)
*
* @exception StandardException Thrown if module cannot be booted.
*/
public void boot(boolean create, Properties startParams) throws StandardException {
LanguageConnectionFactory lcf = (LanguageConnectionFactory) findServiceModule(this, LanguageConnectionFactory.MODULE);
PropertyFactory pf = lcf.getPropertyFactory();
if (pf != null)
pf.addPropertySetNotification(new LanguageDbPropertySetter());
emptySet = new GenericParameterValueSet(null, 0, false);
}
use of org.apache.derby.iapi.sql.conn.LanguageConnectionFactory in project derby by apache.
the class SPSDescriptor method compileStatement.
/**
* Compiles this SPS.
* <p>
* <em>Note:</em> This SPS may still be marked as invalid after this method
* has completed, because an invalidation request may have been received
* while compiling.
*
* @param lcc connection
* @param triggerTable subject table (may be {@code null})
* @param tc transaction controller to use (may be {@code null})
* @throws StandardException if something fails
*/
// @GuardedBy("this")
private void compileStatement(LanguageConnectionContext lcc, TableDescriptor triggerTable, TransactionController tc) throws StandardException {
ContextManager cm = lcc.getContextManager();
LanguageConnectionFactory lcf = lcc.getLanguageConnectionFactory();
DataDictionary dd = getDataDictionary();
/*
** If we are a trigger, then we have to go ahead
** and locate the trigger's table descriptor and
** push it on the lcc. This is expensive, but
** pretty atypical since trigger actions aren't
** likely to be invalidated too often. Also, when
** possible, we already have the triggerTable.
*/
if (type == SPS_TYPE_TRIGGER && triggerTable == null) {
// 49 because name consists of (see CreateTriggerConstantAction):
// TRIGGER<ACTN|WHEN>_<UUID:36>_<UUID:36>
String uuidStr = name.substring(49);
triggerTable = dd.getTableDescriptor(recreateUUID(uuidStr));
if (SanityManager.DEBUG) {
if (triggerTable == null) {
SanityManager.THROWASSERT("couldn't find trigger table for trigger sps " + name);
}
}
}
if (triggerTable != null) {
lcc.pushTriggerTable(triggerTable);
}
// stored statements always stored as unicode.
Statement stmt = lcf.getStatement(dd.getSchemaDescriptor(compSchemaId, null), text, true);
try {
preparedStatement = (ExecPreparedStatement) stmt.prepareStorable(lcc, preparedStatement, getParameterDefaults(), getSchemaDescriptor(), type == SPS_TYPE_TRIGGER);
} finally {
if (triggerTable != null) {
lcc.popTriggerTable(triggerTable);
}
}
// when the query is getting compiled.
if (preparedStatement.referencesSessionSchema())
throw StandardException.newException(SQLState.LANG_OPERATION_NOT_ALLOWED_ON_SESSION_SCHEMA_TABLES);
setCompileTime();
setParams(preparedStatement.getParameterTypes());
if (!dd.isReadOnlyUpgrade()) {
/*
** Indicate that we are going to write the data
** dictionary. We have probably already done this
** but it is ok to call startWriting more than once.
*/
dd.startWriting(lcc);
DependencyManager dm = dd.getDependencyManager();
/*
** Clear out all the dependencies that exist
** before we recreate them so we don't grow
** SYS.SYSDEPENDS forever.
*/
dm.clearDependencies(lcc, this, tc);
/*
** Copy over all the dependencies to me
*/
// from
dm.copyDependencies(// from
preparedStatement, // to
this, // persistent only
false, cm, tc);
// between this sps and the trigger table DERBY-5120
if (triggerTable != null)
dm.addDependency(this, triggerTable, lcc.getContextManager());
}
// mark it as valid
valid = true;
}
Aggregations