use of org.apache.derby.iapi.sql.compile.CompilerContext in project derby by apache.
the class AlterTableConstantAction method columnDroppedAndTriggerDependencies.
// For the trigger, get the trigger action sql provided by the user
// in the create trigger sql. This sql is saved in the system
// table. Since a column has been dropped from the trigger table,
// the trigger action sql may not be valid anymore. To establish
// that, we need to regenerate the internal representation of that
// sql and bind it again.
//
// This method is called both on the WHEN clause (if one exists) and the
// triggered SQL statement of the trigger action.
//
// Return true if the trigger was dropped by this method (if cascade is
// true and it turns out the trigger depends on the column being dropped),
// or false otherwise.
private boolean columnDroppedAndTriggerDependencies(TriggerDescriptor trd, UUID spsUUID, boolean isWhenClause, boolean cascade, String columnName) throws StandardException {
dd.dropTriggerDescriptor(trd, tc);
// Here we get the trigger action sql and use the parser to build
// the parse tree for it.
SchemaDescriptor compSchema = dd.getSchemaDescriptor(dd.getSPSDescriptor(spsUUID).getCompSchemaId(), null);
CompilerContext newCC = lcc.pushCompilerContext(compSchema);
Parser pa = newCC.getParser();
String originalSQL = isWhenClause ? trd.getWhenClauseText() : trd.getTriggerDefinition();
Visitable node = isWhenClause ? pa.parseSearchCondition(originalSQL) : pa.parseStatement(originalSQL);
lcc.popCompilerContext(newCC);
// Do not delete following. We use this in finally clause to
// determine if the CompilerContext needs to be popped.
newCC = null;
try {
// Regenerate the internal representation for the trigger action
// sql using the ColumnReference classes in the parse tree. It
// will catch dropped column getting used in trigger action sql
// through the REFERENCING clause(this can happen only for the
// the triggers created prior to 10.7. Trigger created with
// 10.7 and higher keep track of trigger action column used
// through the REFERENCING clause in system table and hence
// use of dropped column will be detected earlier in this
// method for such triggers).
//
// We might catch errors like following during this step.
// Say that following pre-10.7 trigger exists in the system and
// user is dropping column c11. During the regeneration of the
// internal trigger action sql format, we will catch that
// column oldt.c11 does not exist anymore
// CREATE TRIGGER DERBY4998_SOFT_UPGRADE_RESTRICT_tr1
// AFTER UPDATE OF c12
// ON DERBY4998_SOFT_UPGRADE_RESTRICT REFERENCING OLD AS oldt
// FOR EACH ROW
// SELECT oldt.c11 from DERBY4998_SOFT_UPGRADE_RESTRICT
SPSDescriptor sps = isWhenClause ? trd.getWhenClauseSPS(lcc) : trd.getActionSPS(lcc);
int[] referencedColsInTriggerAction = new int[td.getNumberOfColumns()];
java.util.Arrays.fill(referencedColsInTriggerAction, -1);
String newText = dd.getTriggerActionString(node, trd.getOldReferencingName(), trd.getNewReferencingName(), originalSQL, trd.getReferencedCols(), referencedColsInTriggerAction, 0, trd.getTableDescriptor(), trd.getTriggerEventMask(), true, null, null);
if (isWhenClause) {
// The WHEN clause is not a full SQL statement, just a search
// condition, so we need to turn it into a statement in order
// to create an SPS.
newText = "VALUES " + newText;
}
sps.setText(newText);
// Now that we have the internal format of the trigger action sql,
// bind that sql to make sure that we are not using colunm being
// dropped in the trigger action sql directly (ie not through
// REFERENCING clause.
// eg
// create table atdc_12 (a integer, b integer);
// create trigger atdc_12_trigger_1 after update of a
// on atdc_12 for each row select a,b from atdc_12
// Drop one of the columns used in the trigger action
// alter table atdc_12 drop column b
// Following rebinding of the trigger action sql will catch the use
// of column b in trigger atdc_12_trigger_1
newCC = lcc.pushCompilerContext(compSchema);
newCC.setReliability(CompilerContext.INTERNAL_SQL_LEGAL);
pa = newCC.getParser();
StatementNode stmtnode = (StatementNode) pa.parseStatement(newText);
// need a current dependent for bind
newCC.setCurrentDependent(sps.getPreparedStatement());
stmtnode.bindStatement();
} catch (StandardException se) {
// Ane drop column ATDC_13_TAB3.c12 is issued
if (se.getMessageId().equals(SQLState.LANG_COLUMN_NOT_FOUND) || (se.getMessageId().equals(SQLState.LANG_COLUMN_NOT_FOUND_IN_TABLE) || (se.getMessageId().equals(SQLState.LANG_DB2_INVALID_COLS_SPECIFIED) || (se.getMessageId().equals(SQLState.LANG_TABLE_NOT_FOUND))))) {
if (cascade) {
trd.drop(lcc);
activation.addWarning(StandardException.newWarning(SQLState.LANG_TRIGGER_DROPPED, trd.getName(), td.getName()));
return true;
} else {
// we'd better give an error if don't drop it,
throw StandardException.newException(SQLState.LANG_PROVIDER_HAS_DEPENDENT_OBJECT, dm.getActionString(DependencyManager.DROP_COLUMN), columnName, "TRIGGER", trd.getName());
}
} else
throw se;
} finally {
if (newCC != null)
lcc.popCompilerContext(newCC);
}
// If we are here, then it means that the column being dropped
// is not getting used in the trigger action.
//
// We have recreated the trigger action SPS and recollected the
// column positions for trigger columns and trigger action columns
// getting accessed through REFERENCING clause because
// drop column can affect the column positioning of existing
// columns in the table. We will save that in the system table.
dd.addDescriptor(trd, sd, DataDictionary.SYSTRIGGERS_CATALOG_NUM, false, tc);
return false;
}
use of org.apache.derby.iapi.sql.compile.CompilerContext in project derby by apache.
the class QueryTreeNode method orReliability.
/**
* OR in more reliability bits and return the old reliability value.
*/
public int orReliability(int newBits) {
CompilerContext cc = getCompilerContext();
int previousReliability = cc.getReliability();
cc.setReliability(previousReliability | newBits);
return previousReliability;
}
use of org.apache.derby.iapi.sql.compile.CompilerContext in project derby by apache.
the class SubqueryNode method generateExpression.
/**
* Do code generation for this subquery.
*
* @param expressionBuilder The ExpressionClassBuilder for the class being built
* @param mbex The method the expression will go into
*
* @exception StandardException Thrown on error
*/
@Override
void generateExpression(ExpressionClassBuilder expressionBuilder, MethodBuilder mbex) throws StandardException {
CompilerContext cc = getCompilerContext();
String resultSetString;
if (SanityManager.DEBUG) {
SanityManager.ASSERT(expressionBuilder instanceof ActivationClassBuilder, "Expecting an ActivationClassBuilder");
}
ActivationClassBuilder acb = (ActivationClassBuilder) expressionBuilder;
/* Generate the appropriate (Any or Once) ResultSet */
if (subqueryType == EXPRESSION_SUBQUERY) {
resultSetString = "getOnceResultSet";
} else {
resultSetString = "getAnyResultSet";
}
// Get cost estimate for underlying subquery
CostEstimate costEstimate = resultSet.getFinalCostEstimate();
/* Generate a new method. It's only used within the other
* exprFuns, so it could be private. but since we don't
* generate the right bytecodes to invoke private methods,
* we just make it protected. This generated class won't
* have any subclasses, certainly! (nat 12/97)
*/
String subqueryTypeString = getTypeCompiler().interfaceName();
MethodBuilder mb = acb.newGeneratedFun(subqueryTypeString, Modifier.PROTECTED);
/* Declare the field to hold the suquery's ResultSet tree */
LocalField rsFieldLF = acb.newFieldDeclaration(Modifier.PRIVATE, ClassName.NoPutResultSet);
ResultSetNode subNode = null;
if (!isMaterializable()) {
MethodBuilder executeMB = acb.getExecuteMethod();
if (pushedNewPredicate && (!hasCorrelatedCRs())) {
/* We try to materialize the subquery if it can fit in the memory. We
* evaluate the subquery first. If the result set fits in the memory,
* we replace the resultset with in-memory unions of row result sets.
* We do this trick by replacing the child result with a new node --
* MaterializeSubqueryNode, which essentially generates the suitable
* code to materialize the subquery if possible. This may have big
* performance improvement. See beetle 4373.
*/
if (SanityManager.DEBUG) {
SanityManager.ASSERT(resultSet instanceof ProjectRestrictNode, "resultSet expected to be a ProjectRestrictNode!");
}
subNode = ((ProjectRestrictNode) resultSet).getChildResult();
LocalField subRS = acb.newFieldDeclaration(Modifier.PRIVATE, ClassName.NoPutResultSet);
mb.getField(subRS);
mb.conditionalIfNull();
ResultSetNode materialSubNode = new MaterializeSubqueryNode(subRS, getContextManager());
// Propagate the resultSet's cost estimate to the new node.
materialSubNode.setCostEstimate(resultSet.getFinalCostEstimate());
((ProjectRestrictNode) resultSet).setChildResult(materialSubNode);
/* Evaluate subquery resultset here first. Next time when we come to
* this subquery it may be replaced by a bunch of unions of rows.
*/
subNode.generate(acb, mb);
mb.startElseCode();
mb.getField(subRS);
mb.completeConditional();
mb.setField(subRS);
executeMB.pushNull(ClassName.NoPutResultSet);
executeMB.setField(subRS);
}
executeMB.pushNull(ClassName.NoPutResultSet);
executeMB.setField(rsFieldLF);
// now we fill in the body of the conditional
mb.getField(rsFieldLF);
mb.conditionalIfNull();
}
acb.pushGetResultSetFactoryExpression(mb);
// start of args
int nargs;
/* Inside here is where subquery could already have been materialized. 4373
*/
resultSet.generate(acb, mb);
/* Get the next ResultSet #, so that we can number the subquery's
* empty row ResultColumnList and Once/Any ResultSet.
*/
int subqResultSetNumber = cc.getNextResultSetNumber();
/* We will be reusing the RCL from the subquery's ResultSet for the
* empty row function. We need to reset the resultSetNumber in the
* RCL, before we generate that function. Now that we've called
* generate() on the subquery's ResultSet, we can reset that
* resultSetNumber.
*/
resultSet.getResultColumns().setResultSetNumber(subqResultSetNumber);
/* Generate code for empty row */
resultSet.getResultColumns().generateNulls(acb, mb);
/*
* arg1: suqueryExpress - Expression for subquery's
* ResultSet
* arg2: Activation
* arg3: Method to generate Row with null(s) if subquery
* Result Set is empty
*/
if (subqueryType == EXPRESSION_SUBQUERY) {
int cardinalityCheck;
/* No need to do sort if subquery began life as a distinct expression subquery.
* (We simply check for a single unique value at execution time.)
* No need for cardinality check if we know that underlying
* ResultSet can contain at most 1 row.
* RESOLVE - Not necessary if we know we
* are getting a single row because of a unique index.
*/
if (distinctExpression) {
cardinalityCheck = OnceResultSet.UNIQUE_CARDINALITY_CHECK;
} else if (resultSet.returnsAtMostOneRow()) {
cardinalityCheck = OnceResultSet.NO_CARDINALITY_CHECK;
} else {
cardinalityCheck = OnceResultSet.DO_CARDINALITY_CHECK;
}
/* arg4: int - whether or not cardinality check is required
* DO_CARDINALITY_CHECK - required
* NO_CARDINALITY_CHECK - not required
* UNIQUE_CARDINALITY_CHECK - verify single
* unique value
*/
mb.push(cardinalityCheck);
nargs = 8;
} else {
nargs = 7;
}
mb.push(subqResultSetNumber);
mb.push(subqueryNumber);
mb.push(pointOfAttachment);
mb.push(costEstimate.rowCount());
mb.push(costEstimate.getEstimatedCost());
mb.callMethod(VMOpcode.INVOKEINTERFACE, (String) null, resultSetString, ClassName.NoPutResultSet, nargs);
if (!isMaterializable()) {
/* put it back
*/
if (pushedNewPredicate && (!hasCorrelatedCRs()))
((ProjectRestrictNode) resultSet).setChildResult(subNode);
// now we fill in the body of the conditional
mb.startElseCode();
mb.getField(rsFieldLF);
mb.completeConditional();
}
mb.setField(rsFieldLF);
/* rs.openCore() */
mb.getField(rsFieldLF);
mb.callMethod(VMOpcode.INVOKEINTERFACE, (String) null, "openCore", "void", 0);
/* r = rs.next() */
mb.getField(rsFieldLF);
mb.callMethod(VMOpcode.INVOKEINTERFACE, (String) null, "getNextRowCore", ClassName.ExecRow, 0);
// mb.putVariable(rVar);
// mb.endStatement();
/* col = (<Datatype interface>) r.getColumn(1) */
// mb.getVariable(rVar);
// both the Row interface and columnId are 1-based
mb.push(1);
mb.callMethod(VMOpcode.INVOKEINTERFACE, ClassName.Row, "getColumn", ClassName.DataValueDescriptor, 1);
mb.cast(subqueryTypeString);
/* Only generate the close() method for materialized
* subqueries. All others will be closed when the
* close() method is called on the top ResultSet.
*/
if (isMaterializable()) {
/* rs.close() */
mb.getField(rsFieldLF);
mb.callMethod(VMOpcode.INVOKEINTERFACE, ClassName.ResultSet, "close", "void", 0);
}
/* return col */
// mb.getVariable(colVar);
mb.methodReturn();
mb.complete();
/*
** If we have an expression subquery, then we
** can materialize it if it has no correlated
** column references and is invariant.
*/
if (isMaterializable()) {
LocalField lf = generateMaterialization(acb, mb, subqueryTypeString);
mbex.getField(lf);
} else {
/* Generate the call to the new method */
mbex.pushThis();
mbex.callMethod(VMOpcode.INVOKEVIRTUAL, (String) null, mb.getName(), subqueryTypeString, 0);
}
}
use of org.apache.derby.iapi.sql.compile.CompilerContext in project derby by apache.
the class SumAvgAggregateDefinition method getAggregator.
/**
* Determines the result datatype. Accept NumberDataValues
* only.
* <P>
* <I>Note</I>: In the future you should be able to do
* a sum user data types. One option would be to run
* sum on anything that implements plus(). In which
* case avg() would need divide().
*
* @param inputType the input type, either a user type or a java.lang object
*
* @return the output Class (null if cannot operate on
* value expression of this type.
*/
public final DataTypeDescriptor getAggregator(DataTypeDescriptor inputType, StringBuffer aggregatorClass) {
try {
TypeId compType = inputType.getTypeId();
CompilerContext cc = (CompilerContext) QueryTreeNode.getContext(CompilerContext.CONTEXT_ID);
TypeCompilerFactory tcf = cc.getTypeCompilerFactory();
TypeCompiler tc = tcf.getTypeCompiler(compType);
/*
** If the class implements NumberDataValue, then we
** are in business. Return type is same as input
** type.
*/
if (compType.isNumericTypeId()) {
aggregatorClass.append(getAggregatorClassName());
DataTypeDescriptor outDts = tc.resolveArithmeticOperation(inputType, inputType, getOperator());
/*
** SUM and AVG may return null
*/
return outDts.getNullabilityType(true);
}
} catch (StandardException e) {
if (SanityManager.DEBUG) {
SanityManager.THROWASSERT("Unexpected exception", e);
}
}
return null;
}
use of org.apache.derby.iapi.sql.compile.CompilerContext in project derby by apache.
the class TableElementList method bindAndValidateCheckConstraints.
/**
* Bind and validate all of the check constraints in this list against
* the specified FromList.
*
* @param fromList The FromList in question.
*
* @exception StandardException Thrown on error
*/
void bindAndValidateCheckConstraints(FromList fromList) throws StandardException {
FromBaseTable table = (FromBaseTable) fromList.elementAt(0);
CompilerContext cc = getCompilerContext();
ArrayList<AggregateNode> aggregates = new ArrayList<AggregateNode>();
for (TableElementNode element : this) {
ConstraintDefinitionNode cdn;
ValueNode checkTree;
if (!(element instanceof ConstraintDefinitionNode)) {
continue;
}
cdn = (ConstraintDefinitionNode) element;
if (cdn.getConstraintType() != DataDictionary.CHECK_CONSTRAINT) {
continue;
}
checkTree = cdn.getCheckCondition();
// bind the check condition
// verify that it evaluates to a boolean
final int previousReliability = cc.getReliability();
try {
/* Each check constraint can have its own set of dependencies.
* These dependencies need to be shared with the prepared
* statement as well. We create a new auxiliary provider list
* for the check constraint, "push" it on the compiler context
* by swapping it with the current auxiliary provider list
* and the "pop" it when we're done by restoring the old
* auxiliary provider list.
*/
ProviderList apl = new ProviderList();
ProviderList prevAPL = cc.getCurrentAuxiliaryProviderList();
cc.setCurrentAuxiliaryProviderList(apl);
// Tell the compiler context to only allow deterministic nodes
cc.setReliability(CompilerContext.CHECK_CONSTRAINT);
checkTree = checkTree.bindExpression(fromList, (SubqueryList) null, aggregates);
// no aggregates, please
if (!aggregates.isEmpty()) {
throw StandardException.newException(SQLState.LANG_INVALID_CHECK_CONSTRAINT, cdn.getConstraintText());
}
checkTree = checkTree.checkIsBoolean();
cdn.setCheckCondition(checkTree);
/* Save the APL off in the constraint node */
if (apl.size() > 0) {
cdn.setAuxiliaryProviderList(apl);
}
// Restore the previous AuxiliaryProviderList
cc.setCurrentAuxiliaryProviderList(prevAPL);
} finally {
cc.setReliability(previousReliability);
}
/* We have a valid check constraint.
* Now we build a list with only the referenced columns and
* copy it to the cdn. Thus we can build the array of
* column names for the referenced columns during generate().
*/
ResultColumnList rcl = table.getResultColumns();
int numReferenced = rcl.countReferencedColumns();
ResultColumnList refRCL = new ResultColumnList(getContextManager());
rcl.copyReferencedColumnsToNewList(refRCL);
/* A column check constraint can only refer to that column. If this is a
* column constraint, we should have an RCL with that column
*/
if (cdn.getColumnList() != null) {
String colName = cdn.getColumnList().elementAt(0).getName();
if (numReferenced > 1 || !colName.equals(refRCL.elementAt(0).getName()))
throw StandardException.newException(SQLState.LANG_DB2_INVALID_CHECK_CONSTRAINT, colName);
}
cdn.setColumnList(refRCL);
/* Clear the column references in the RCL so each check constraint
* starts with a clean list.
*/
rcl.clearColumnReferences();
// Make sure all names are schema qualified (DERBY-6362)
cdn.qualifyNames();
}
}
Aggregations