use of org.apache.derby.iapi.sql.compile.CompilerContext in project derby by apache.
the class CreateTableNode method bindStatement.
// We inherit the generate() method from DDLStatementNode.
/**
* Bind this CreateTableNode. This means doing any static error checking that can be
* done before actually creating the base table or declaring the global temporary table.
* For eg, verifying that the TableElementList does not contain any duplicate column names.
*
* @exception StandardException Thrown on error
*/
@Override
public void bindStatement() throws StandardException {
DataDictionary dataDictionary = getDataDictionary();
int numPrimaryKeys;
int numCheckConstraints;
int numReferenceConstraints;
int numUniqueConstraints;
int numGenerationClauses;
SchemaDescriptor sd = getSchemaDescriptor(tableType != TableDescriptor.GLOBAL_TEMPORARY_TABLE_TYPE, true);
if (queryExpression != null) {
FromList fromList = new FromList(getOptimizerFactory().doJoinOrderOptimization(), getContextManager());
CompilerContext cc = getCompilerContext();
ProviderList prevAPL = cc.getCurrentAuxiliaryProviderList();
ProviderList apl = new ProviderList();
try {
cc.setCurrentAuxiliaryProviderList(apl);
cc.pushCurrentPrivType(Authorizer.SELECT_PRIV);
/* Bind the tables in the queryExpression */
queryExpression = queryExpression.bindNonVTITables(dataDictionary, fromList);
queryExpression = queryExpression.bindVTITables(fromList);
/* Bind the expressions under the resultSet */
queryExpression.bindExpressions(fromList);
/* Bind the query expression */
queryExpression.bindResultColumns(fromList);
/* Reject any untyped nulls in the RCL */
/* e.g. CREATE TABLE t1 (x) AS VALUES NULL WITH NO DATA */
queryExpression.bindUntypedNullsToResultColumns(null);
} finally {
cc.popCurrentPrivType();
cc.setCurrentAuxiliaryProviderList(prevAPL);
}
/* If there is an RCL for the table definition then copy the
* names to the queryExpression's RCL after verifying that
* they both have the same size.
*/
ResultColumnList qeRCL = queryExpression.getResultColumns();
if (resultColumns != null) {
if (resultColumns.size() != qeRCL.visibleSize()) {
throw StandardException.newException(SQLState.LANG_TABLE_DEFINITION_R_C_L_MISMATCH, getFullName());
}
qeRCL.copyResultColumnNames(resultColumns);
}
int schemaCollationType = sd.getCollationType();
/* Create table element list from columns in query expression */
tableElementList = new TableElementList(getContextManager());
for (ResultColumn rc : qeRCL) {
if (rc.isGenerated()) {
continue;
}
/* Raise error if column name is system generated. */
if (rc.isNameGenerated()) {
throw StandardException.newException(SQLState.LANG_TABLE_REQUIRES_COLUMN_NAMES);
}
DataTypeDescriptor dtd = rc.getExpression().getTypeServices();
if ((dtd != null) && !dtd.isUserCreatableType()) {
throw StandardException.newException(SQLState.LANG_INVALID_COLUMN_TYPE_CREATE_TABLE, dtd.getFullSQLTypeName(), rc.getName());
}
// a territory based database.
if (dtd.getTypeId().isStringTypeId() && dtd.getCollationType() != schemaCollationType) {
throw StandardException.newException(SQLState.LANG_CAN_NOT_CREATE_TABLE, dtd.getCollationName(), DataTypeDescriptor.getCollationName(schemaCollationType));
}
ColumnDefinitionNode column = new ColumnDefinitionNode(rc.getName(), null, rc.getType(), null, getContextManager());
tableElementList.addTableElement(column);
}
} else {
// Set the collation type and collation derivation of all the
// character type columns. Their collation type will be same as the
// collation of the schema they belong to. Their collation
// derivation will be "implicit".
// Earlier we did this in makeConstantAction but that is little too
// late (DERBY-2955)
// eg
// CREATE TABLE STAFF9 (EMPNAME CHAR(20),
// CONSTRAINT STAFF9_EMPNAME CHECK (EMPNAME NOT LIKE 'T%'))
// For the query above, when run in a territory based db, we need
// to have the correct collation set in bind phase of create table
// so that when LIKE is handled in LikeEscapeOperatorNode, we have
// the correct collation set for EMPNAME otherwise it will throw an
// exception for 'T%' having collation of territory based and
// EMPNAME having the default collation of UCS_BASIC
tableElementList.setCollationTypesOnCharacterStringColumns(getSchemaDescriptor(tableType != TableDescriptor.GLOBAL_TEMPORARY_TABLE_TYPE, true));
}
tableElementList.validate(this, dataDictionary, (TableDescriptor) null);
/* Only 1012 columns allowed per table */
if (tableElementList.countNumberOfColumns() > Limits.DB2_MAX_COLUMNS_IN_TABLE) {
throw StandardException.newException(SQLState.LANG_TOO_MANY_COLUMNS_IN_TABLE_OR_VIEW, String.valueOf(tableElementList.countNumberOfColumns()), getRelativeName(), String.valueOf(Limits.DB2_MAX_COLUMNS_IN_TABLE));
}
numPrimaryKeys = tableElementList.countConstraints(DataDictionary.PRIMARYKEY_CONSTRAINT);
/* Only 1 primary key allowed per table */
if (numPrimaryKeys > 1) {
throw StandardException.newException(SQLState.LANG_TOO_MANY_PRIMARY_KEY_CONSTRAINTS, getRelativeName());
}
/* Check the validity of all check constraints */
numCheckConstraints = tableElementList.countConstraints(DataDictionary.CHECK_CONSTRAINT);
numReferenceConstraints = tableElementList.countConstraints(DataDictionary.FOREIGNKEY_CONSTRAINT);
numUniqueConstraints = tableElementList.countConstraints(DataDictionary.UNIQUE_CONSTRAINT);
numGenerationClauses = tableElementList.countGenerationClauses();
// temp tables can't have primary key or check or foreign key or unique constraints defined on them
if ((tableType == TableDescriptor.GLOBAL_TEMPORARY_TABLE_TYPE) && (numPrimaryKeys > 0 || numCheckConstraints > 0 || numReferenceConstraints > 0 || numUniqueConstraints > 0))
throw StandardException.newException(SQLState.LANG_NOT_ALLOWED_FOR_DECLARED_GLOBAL_TEMP_TABLE);
// more than 32767 indexes on it and that is why this check.
if ((numPrimaryKeys + numReferenceConstraints + numUniqueConstraints) > Limits.DB2_MAX_INDEXES_ON_TABLE) {
throw StandardException.newException(SQLState.LANG_TOO_MANY_INDEXES_ON_TABLE, String.valueOf(numPrimaryKeys + numReferenceConstraints + numUniqueConstraints), getRelativeName(), String.valueOf(Limits.DB2_MAX_INDEXES_ON_TABLE));
}
if ((numCheckConstraints > 0) || (numGenerationClauses > 0) || (numReferenceConstraints > 0)) {
/* In order to check the validity of the check constraints and
* generation clauses
* we must goober up a FromList containing a single table,
* the table being created, with an RCL containing the
* new columns and their types. This will allow us to
* bind the constraint definition trees against that
* FromList. When doing this, we verify that there are
* no nodes which can return non-deterministic results.
*/
FromList fromList = makeFromList(null, tableElementList, true);
FormatableBitSet generatedColumns = new FormatableBitSet();
/* Now that we've finally goobered stuff up, bind and validate
* the check constraints and generation clauses.
*/
if (numGenerationClauses > 0) {
tableElementList.bindAndValidateGenerationClauses(sd, fromList, generatedColumns, null);
}
if (numCheckConstraints > 0) {
tableElementList.bindAndValidateCheckConstraints(fromList);
}
if (numReferenceConstraints > 0) {
tableElementList.validateForeignKeysOnGenerationClauses(fromList, generatedColumns);
}
}
if (numPrimaryKeys > 0) {
tableElementList.validatePrimaryKeyNullability();
}
}
use of org.apache.derby.iapi.sql.compile.CompilerContext in project derby by apache.
the class CreateTriggerNode method bindStatement.
// accessors
// We inherit the generate() method from DDLStatementNode.
/**
* Bind this CreateTriggerNode. This means doing any static error
* checking that can be done before actually creating the table.
*
* @exception StandardException Thrown on error
*/
@Override
public void bindStatement() throws StandardException {
CompilerContext compilerContext = getCompilerContext();
DataDictionary dd = getDataDictionary();
/*
** Grab the current schema. We will use that for
** sps compilation
*/
LanguageConnectionContext lcc = getLanguageConnectionContext();
compSchemaDescriptor = lcc.getDefaultSchema();
/*
** Get and check the schema descriptor for this
** trigger. This check will throw the proper exception
** if someone tries to create a trigger in the SYS
** schema.
*/
triggerSchemaDescriptor = getSchemaDescriptor();
/*
** Get the trigger table.
*/
triggerTableDescriptor = getTableDescriptor(tableName);
// throw an exception if user is attempting to create a trigger on a temporary table
if (isSessionSchema(triggerTableDescriptor.getSchemaDescriptor())) {
throw StandardException.newException(SQLState.LANG_OPERATION_NOT_ALLOWED_ON_SESSION_SCHEMA_TABLES);
}
if (isPrivilegeCollectionRequired()) {
compilerContext.pushCurrentPrivType(Authorizer.TRIGGER_PRIV);
compilerContext.addRequiredTablePriv(triggerTableDescriptor);
compilerContext.popCurrentPrivType();
}
/*
** Regenerates the actionText and actionNode if necessary.
*/
boolean needInternalSQL = bindReferencesClause(dd);
// Get all the names of SQL objects referenced by the triggered
// SQL statement and the WHEN clause. Since some of the TableName
// nodes may be eliminated from the node tree during the bind phase,
// we collect the nodes before the nodes have been bound. The
// names will be used later when we normalize the trigger text
// that will be stored in the system tables.
SortedSet<TableName> actionNames = actionNode.getOffsetOrderedNodes(TableName.class);
SortedSet<TableName> whenNames = (whenClause != null) ? whenClause.getOffsetOrderedNodes(TableName.class) : null;
ProviderList prevAPL = compilerContext.getCurrentAuxiliaryProviderList();
ProviderList apl = new ProviderList();
lcc.pushTriggerTable(triggerTableDescriptor);
try {
compilerContext.setCurrentAuxiliaryProviderList(apl);
/*
** Bind the trigger action and the trigger
** when clause to make sure that they are
** ok. Note that we have already substituted
** in various replacements for OLD/NEW transition
** tables/variables and reparsed if necessary.
*/
if (needInternalSQL)
compilerContext.setReliability(CompilerContext.INTERNAL_SQL_LEGAL);
// bind of the call statement node.
if (isBefore)
compilerContext.setReliability(CompilerContext.MODIFIES_SQL_DATA_PROCEDURE_ILLEGAL);
actionNode.bindStatement();
if (whenClause != null) {
ContextManager cm = getContextManager();
whenClause = whenClause.bindExpression(new FromList(cm), new SubqueryList(cm), new ArrayList<AggregateNode>(0));
// The WHEN clause must be a BOOLEAN expression.
whenClause.checkIsBoolean();
}
} finally {
lcc.popTriggerTable(triggerTableDescriptor);
compilerContext.setCurrentAuxiliaryProviderList(prevAPL);
}
// Qualify identifiers before storing them (DERBY-5901/DERBY-6370).
qualifyNames(actionNames, whenNames);
/*
** Statement is dependent on the TableDescriptor
*/
compilerContext.createDependency(triggerTableDescriptor);
/*
** If there is a list of columns, then no duplicate columns,
** and all columns must be found.
*/
if (triggerCols != null && triggerCols.size() != 0) {
HashSet<String> columnNames = new HashSet<String>();
for (ResultColumn rc : triggerCols) {
if (!columnNames.add(rc.getName())) {
throw StandardException.newException(SQLState.LANG_DUPLICATE_COLUMN_IN_TRIGGER_UPDATE, rc.getName(), triggerName);
}
ColumnDescriptor cd = triggerTableDescriptor.getColumnDescriptor(rc.getName());
if (cd == null) {
throw StandardException.newException(SQLState.LANG_COLUMN_NOT_FOUND_IN_TABLE, rc.getName(), tableName);
}
}
}
// statement references a table in the SESSION schema.
if (referencesSessionSchema()) {
throw StandardException.newException(SQLState.LANG_OPERATION_NOT_ALLOWED_ON_SESSION_SCHEMA_TABLES);
}
DependencyManager dm = dd.getDependencyManager();
providerInfo = dm.getPersistentProviderInfos(apl);
dm.clearColumnInfoInProviders(apl);
}
use of org.apache.derby.iapi.sql.compile.CompilerContext in project derby by apache.
the class DMLModStatementNode method bindConstraints.
/**
* Gets and binds all the constraints for an INSERT/UPDATE/DELETE.
* First finds the constraints that are relevant to this node.
* This is done by calling getAllRelevantConstriants(). If
* getAllRelevantConstraints() has already been called, then
* this list is used. Then it creates appropriate
* dependencies. Then binds check constraints. It also
* generates the array of FKInfo items that are used in
* code generation.
*
* Note: we have a new flag here to see if defer processing is enabled or
* not, the only scenario that is disabled is when we reapply the
* reply message we get from the source
*
* @param dataDictionary The DataDictionary
* @param targetTableDescriptor The TableDescriptor
* @param dependent Parent object that will depend on all the constraints
* that we look up. If this argument is null, then we
* use the default dependent (the statement being compiled).
* @param sourceRCL RCL of the table being changed
* @param changedColumnIds If null, all columns being changed, otherwise array
* of 1-based column ids for columns being changed
* @param readColsBitSet bit set for the read scan
* @param includeTriggers whether triggers are included in the processing
* @param hasDeferrableCheckConstraints
* OUT semantics: set element 0 to true if the
* target table has any deferrable CHECK constraints
*
* @return The bound, ANDed check constraints as a query tree.
*
* @exception StandardException Thrown on failure
*/
ValueNode bindConstraints(DataDictionary dataDictionary, OptimizerFactory optimizerFactory, TableDescriptor targetTableDescriptor, Dependent dependent, ResultColumnList sourceRCL, int[] changedColumnIds, FormatableBitSet readColsBitSet, boolean includeTriggers, boolean[] hasDeferrableCheckConstraints) throws StandardException {
bound = true;
/* Nothing to do if updatable VTI */
if (targetVTI != null) {
return null;
}
CompilerContext compilerContext = getCompilerContext();
// Do not need privileges to execute constraints
compilerContext.pushCurrentPrivType(Authorizer.NULL_PRIV);
try {
getAllRelevantConstraints(dataDictionary, targetTableDescriptor, changedColumnIds);
createConstraintDependencies(dataDictionary, relevantCdl, dependent);
generateFKInfo(relevantCdl, dataDictionary, targetTableDescriptor, readColsBitSet);
getAllRelevantTriggers(dataDictionary, targetTableDescriptor, changedColumnIds, includeTriggers);
createTriggerDependencies(relevantTriggers, dependent);
generateTriggerInfo(relevantTriggers);
checkConstraints = generateCheckTree(relevantCdl, targetTableDescriptor, hasDeferrableCheckConstraints);
if (checkConstraints != null) {
SchemaDescriptor originalCurrentSchema = targetTableDescriptor.getSchemaDescriptor();
compilerContext.pushCompilationSchema(originalCurrentSchema);
try {
bindRowScopedExpression(optimizerFactory, getContextManager(), targetTableDescriptor, sourceRCL, checkConstraints);
} finally {
compilerContext.popCompilationSchema();
}
}
} finally {
compilerContext.popCurrentPrivType();
}
return checkConstraints;
}
use of org.apache.derby.iapi.sql.compile.CompilerContext in project derby by apache.
the class DMLModStatementNode method parseCheckConstraint.
/**
* Parse a check constraint and turn it into a query tree.
*
* @param checkConstraintText Text of CHECK CONSTRAINT.
* @param td The TableDescriptor for the table the the constraint is on.
*
* @return The parsed check constraint as a query tree.
*
* @exception StandardException Thrown on failure
*/
public ValueNode parseCheckConstraint(String checkConstraintText, TableDescriptor td) throws StandardException {
Parser p;
ValueNode checkTree;
LanguageConnectionContext lcc = getLanguageConnectionContext();
/* Get a Statement to pass to the parser */
/* We're all set up to parse. We have to build a compile SQL statement
* before we can parse - we just have a WHERE clause right now.
* So, we goober up a SELECT * FROM table WHERE checkDefs.
*/
String select = "SELECT * FROM " + td.getQualifiedName() + " WHERE " + checkConstraintText;
/*
** Get a new compiler context, so the parsing of the select statement
** doesn't mess up anything in the current context (it could clobber
** the ParameterValueSet, for example).
*/
CompilerContext newCC = lcc.pushCompilerContext();
p = newCC.getParser();
/* Finally, we can call the parser */
// Since this is always nested inside another SQL statement, so topLevel flag
// should be false
Visitable qt = p.parseStatement(select);
if (SanityManager.DEBUG) {
if (!(qt instanceof CursorNode)) {
SanityManager.THROWASSERT("qt expected to be instanceof CursorNode, not " + qt.getClass().getName());
}
CursorNode cn = (CursorNode) qt;
if (!(cn.getResultSetNode() instanceof SelectNode)) {
SanityManager.THROWASSERT("cn.getResultSetNode() expected to be instanceof SelectNode, not " + cn.getResultSetNode().getClass().getName());
}
}
checkTree = ((SelectNode) ((CursorNode) qt).getResultSetNode()).getWhereClause();
lcc.popCompilerContext(newCC);
return checkTree;
}
use of org.apache.derby.iapi.sql.compile.CompilerContext in project derby by apache.
the class DMLModStatementNode method markAffectedIndexes.
protected void markAffectedIndexes(List<ConglomerateDescriptor> affectedConglomerates) throws StandardException {
ConglomerateDescriptor cd;
int indexCount = affectedConglomerates.size();
CompilerContext cc = getCompilerContext();
indicesToMaintain = new IndexRowGenerator[indexCount];
indexConglomerateNumbers = new long[indexCount];
indexNames = new String[indexCount];
for (int ictr = 0; ictr < indexCount; ictr++) {
cd = affectedConglomerates.get(ictr);
indicesToMaintain[ictr] = cd.getIndexDescriptor();
indexConglomerateNumbers[ictr] = cd.getConglomerateNumber();
indexNames[ictr] = ((cd.isConstraint()) ? null : cd.getConglomerateName());
cc.createDependency(cd);
}
}
Aggregations