Search in sources :

Example 1 with HsqlArrayList

use of org.hsqldb_voltpatches.lib.HsqlArrayList in project voltdb by VoltDB.

the class ParserDDL method compileCreateTrigger.

StatementSchema compileCreateTrigger() {
    Table table;
    boolean isForEachRow = false;
    boolean isNowait = false;
    boolean hasQueueSize = false;
    Integer queueSize = TriggerDef.defaultQueueSize;
    String beforeOrAfter;
    int beforeOrAfterType;
    String operation;
    int operationType;
    String className;
    TriggerDef td;
    HsqlName name;
    HsqlName otherName = null;
    OrderedHashSet columns = null;
    int[] updateColumnIndexes = null;
    read();
    name = readNewSchemaObjectName(SchemaObject.TRIGGER);
    switch(token.tokenType) {
        case Tokens.INSTEAD:
            beforeOrAfter = token.tokenString;
            beforeOrAfterType = token.tokenType;
            read();
            readThis(Tokens.OF);
            break;
        case Tokens.BEFORE:
        case Tokens.AFTER:
            beforeOrAfter = token.tokenString;
            beforeOrAfterType = token.tokenType;
            read();
            break;
        default:
            throw unexpectedToken();
    }
    switch(token.tokenType) {
        case Tokens.INSERT:
        case Tokens.DELETE:
            operation = token.tokenString;
            operationType = token.tokenType;
            read();
            break;
        case Tokens.UPDATE:
            operation = token.tokenString;
            operationType = token.tokenType;
            read();
            if (token.tokenType == Tokens.OF && beforeOrAfterType != Tokens.INSTEAD) {
                read();
                columns = readColumnNames(false);
            }
            break;
        default:
            throw unexpectedToken();
    }
    readThis(Tokens.ON);
    table = readTableName();
    if (token.tokenType == Tokens.BEFORE) {
        read();
        checkIsSimpleName();
        otherName = readNewSchemaObjectName(SchemaObject.TRIGGER);
    }
    name.setSchemaIfNull(table.getSchemaName());
    checkSchemaUpdateAuthorisation(name.schema);
    if (beforeOrAfterType == Tokens.INSTEAD) {
        if (!table.isView() || ((View) table).getCheckOption() == SchemaObject.ViewCheckModes.CHECK_CASCADE) {
            throw Error.error(ErrorCode.X_42538, name.schema.name);
        }
    } else {
        if (table.isView()) {
            throw Error.error(ErrorCode.X_42538, name.schema.name);
        }
    }
    if (name.schema != table.getSchemaName()) {
        throw Error.error(ErrorCode.X_42505, name.schema.name);
    }
    name.parent = table.getName();
    database.schemaManager.checkSchemaObjectNotExists(name);
    if (columns != null) {
        updateColumnIndexes = table.getColumnIndexes(columns);
        for (int i = 0; i < updateColumnIndexes.length; i++) {
            if (updateColumnIndexes[i] == -1) {
                throw Error.error(ErrorCode.X_42544, (String) columns.get(i));
            }
        }
    }
    Expression condition = null;
    String oldTableName = null;
    String newTableName = null;
    String oldRowName = null;
    String newRowName = null;
    Table[] transitions = new Table[4];
    RangeVariable[] rangeVars = new RangeVariable[4];
    HsqlArrayList compiledStatements = new HsqlArrayList();
    String conditionSQL = null;
    String procedureSQL = null;
    if (token.tokenType == Tokens.REFERENCING) {
        read();
        if (token.tokenType != Tokens.OLD && token.tokenType != Tokens.NEW) {
            throw unexpectedToken();
        }
        while (true) {
            if (token.tokenType == Tokens.OLD) {
                if (operationType == Tokens.INSERT) {
                    throw unexpectedToken();
                }
                read();
                if (token.tokenType == Tokens.TABLE) {
                    if (oldTableName != null || beforeOrAfterType == Tokens.BEFORE) {
                        throw unexpectedToken();
                    }
                    read();
                    readIfThis(Tokens.AS);
                    checkIsSimpleName();
                    oldTableName = token.tokenString;
                    String n = oldTableName;
                    if (n.equals(newTableName) || n.equals(oldRowName) || n.equals(newRowName)) {
                        throw unexpectedToken();
                    }
                    HsqlName hsqlName = database.nameManager.newHsqlName(table.getSchemaName(), n, isDelimitedIdentifier(), SchemaObject.TRANSITION);
                    Table transition = new Table(table, hsqlName);
                    RangeVariable range = new RangeVariable(transition, null, null, null, compileContext);
                    transitions[TriggerDef.OLD_TABLE] = transition;
                    rangeVars[TriggerDef.OLD_TABLE] = range;
                } else if (token.tokenType == Tokens.ROW) {
                    if (oldRowName != null) {
                        throw unexpectedToken();
                    }
                    read();
                    readIfThis(Tokens.AS);
                    checkIsSimpleName();
                    oldRowName = token.tokenString;
                    String n = oldRowName;
                    if (n.equals(newTableName) || n.equals(oldTableName) || n.equals(newRowName)) {
                        throw unexpectedToken();
                    }
                    isForEachRow = true;
                    HsqlName hsqlName = database.nameManager.newHsqlName(table.getSchemaName(), n, isDelimitedIdentifier(), SchemaObject.TRANSITION);
                    Table transition = new Table(table, hsqlName);
                    RangeVariable range = new RangeVariable(transition, null, null, null, compileContext);
                    transitions[TriggerDef.OLD_ROW] = transition;
                    rangeVars[TriggerDef.OLD_ROW] = range;
                } else {
                    throw unexpectedToken();
                }
            } else if (token.tokenType == Tokens.NEW) {
                if (operationType == Tokens.DELETE) {
                    throw unexpectedToken();
                }
                read();
                if (token.tokenType == Tokens.TABLE) {
                    if (newTableName != null || beforeOrAfterType == Tokens.BEFORE) {
                        throw unexpectedToken();
                    }
                    read();
                    readIfThis(Tokens.AS);
                    checkIsSimpleName();
                    newTableName = token.tokenString;
                    String n = newTableName;
                    if (n.equals(oldTableName) || n.equals(oldRowName) || n.equals(newRowName)) {
                        throw unexpectedToken();
                    }
                    HsqlName hsqlName = database.nameManager.newHsqlName(table.getSchemaName(), n, isDelimitedIdentifier(), SchemaObject.TRANSITION);
                    Table transition = new Table(table, hsqlName);
                    RangeVariable range = new RangeVariable(transition, null, null, null, compileContext);
                    transitions[TriggerDef.NEW_TABLE] = transition;
                    rangeVars[TriggerDef.NEW_TABLE] = range;
                } else if (token.tokenType == Tokens.ROW) {
                    if (newRowName != null) {
                        throw unexpectedToken();
                    }
                    read();
                    readIfThis(Tokens.AS);
                    checkIsSimpleName();
                    newRowName = token.tokenString;
                    isForEachRow = true;
                    String n = newRowName;
                    if (n.equals(oldTableName) || n.equals(newTableName) || n.equals(oldRowName)) {
                        throw unexpectedToken();
                    }
                    HsqlName hsqlName = database.nameManager.newHsqlName(table.getSchemaName(), n, isDelimitedIdentifier(), SchemaObject.TRANSITION);
                    Table transition = new Table(table, hsqlName);
                    RangeVariable range = new RangeVariable(transition, null, null, null, compileContext);
                    transitions[TriggerDef.NEW_ROW] = transition;
                    rangeVars[TriggerDef.NEW_ROW] = range;
                } else {
                    throw unexpectedToken();
                }
            } else {
                break;
            }
            read();
        }
    }
    if (isForEachRow && token.tokenType != Tokens.FOR) {
        throw unexpectedToken();
    }
    // "FOR EACH ROW" or "CALL"
    if (token.tokenType == Tokens.FOR) {
        read();
        readThis(Tokens.EACH);
        if (token.tokenType == Tokens.ROW) {
            isForEachRow = true;
        } else if (token.tokenType == Tokens.STATEMENT) {
            if (isForEachRow) {
                throw unexpectedToken();
            }
        } else {
            throw unexpectedToken();
        }
        read();
    }
    //
    if (rangeVars[TriggerDef.OLD_TABLE] != null) {
    }
    if (rangeVars[TriggerDef.NEW_TABLE] != null) {
    }
    //
    if (Tokens.T_NOWAIT.equals(token.tokenString)) {
        read();
        isNowait = true;
    } else if (Tokens.T_QUEUE.equals(token.tokenString)) {
        read();
        queueSize = readInteger();
        hasQueueSize = true;
    }
    if (token.tokenType == Tokens.WHEN && beforeOrAfterType != Tokens.INSTEAD) {
        read();
        readThis(Tokens.OPENBRACKET);
        int position = getPosition();
        isCheckOrTriggerCondition = true;
        condition = XreadBooleanValueExpression();
        conditionSQL = getLastPart(position);
        isCheckOrTriggerCondition = false;
        readThis(Tokens.CLOSEBRACKET);
        HsqlList unresolved = condition.resolveColumnReferences(rangeVars, null);
        ExpressionColumn.checkColumnsResolved(unresolved);
        condition.resolveTypes(session, null);
        if (condition.getDataType() != Type.SQL_BOOLEAN) {
            throw Error.error(ErrorCode.X_42568);
        }
    }
    if (token.tokenType == Tokens.CALL) {
        read();
        checkIsSimpleName();
        checkIsDelimitedIdentifier();
        className = token.tokenString;
        read();
        td = new TriggerDef(name, beforeOrAfter, operation, isForEachRow, table, transitions, rangeVars, condition, conditionSQL, updateColumnIndexes, className, isNowait, queueSize);
        String sql = getLastPart();
        Object[] args = new Object[] { td, otherName };
        return new StatementSchema(sql, StatementTypes.CREATE_TRIGGER, args, null, table.getName());
    }
    //
    if (isNowait) {
        throw unexpectedToken(Tokens.T_NOWAIT);
    }
    if (hasQueueSize) {
        throw unexpectedToken(Tokens.T_QUEUE);
    }
    // procedure
    boolean isBlock = false;
    if (readIfThis(Tokens.BEGIN)) {
        readThis(Tokens.ATOMIC);
        isBlock = true;
    }
    int position = getPosition();
    while (true) {
        StatementDMQL cs = null;
        switch(token.tokenType) {
            case Tokens.INSERT:
                if (beforeOrAfterType == Tokens.BEFORE) {
                    throw unexpectedToken();
                }
                cs = compileInsertStatement(rangeVars);
                compiledStatements.add(cs);
                if (isBlock) {
                    readThis(Tokens.SEMICOLON);
                }
                break;
            case Tokens.UPDATE:
                if (beforeOrAfterType == Tokens.BEFORE) {
                    throw unexpectedToken();
                }
                cs = compileUpdateStatement(rangeVars);
                compiledStatements.add(cs);
                if (isBlock) {
                    readThis(Tokens.SEMICOLON);
                }
                break;
            case Tokens.DELETE:
                if (beforeOrAfterType == Tokens.BEFORE) {
                    throw unexpectedToken();
                }
                cs = compileDeleteStatement(rangeVars);
                compiledStatements.add(cs);
                if (isBlock) {
                    readThis(Tokens.SEMICOLON);
                }
                break;
            case Tokens.MERGE:
                if (beforeOrAfterType == Tokens.BEFORE) {
                    throw unexpectedToken();
                }
                cs = compileMergeStatement(rangeVars);
                compiledStatements.add(cs);
                if (isBlock) {
                    readThis(Tokens.SEMICOLON);
                }
                break;
            case Tokens.SET:
                if (beforeOrAfterType != Tokens.BEFORE || operationType == Tokens.DELETE) {
                    throw unexpectedToken();
                }
                cs = compileTriggerSetStatement(table, rangeVars);
                compiledStatements.add(cs);
                if (isBlock) {
                    readThis(Tokens.SEMICOLON);
                }
                break;
            case Tokens.END:
                break;
            default:
                throw unexpectedToken();
        }
        if (!isBlock || token.tokenType == Tokens.END) {
            break;
        }
    }
    procedureSQL = getLastPart(position);
    if (isBlock) {
        readThis(Tokens.END);
    }
    StatementDMQL[] csArray = new StatementDMQL[compiledStatements.size()];
    compiledStatements.toArray(csArray);
    OrderedHashSet references = compileContext.getSchemaObjectNames();
    for (int i = 0; i < csArray.length; i++) {
        Table targetTable = csArray[i].targetTable;
        boolean[] check = csArray[i].getInsertOrUpdateColumnCheckList();
        if (check != null) {
            targetTable.getColumnNames(check, references);
        }
    }
    references.remove(table.getName());
    td = new TriggerDefSQL(name, beforeOrAfter, operation, isForEachRow, table, transitions, rangeVars, condition, conditionSQL, updateColumnIndexes, csArray, procedureSQL, references);
    String sql = getLastPart();
    Object[] args = new Object[] { td, otherName };
    return new StatementSchema(sql, StatementTypes.CREATE_TRIGGER, args, null, table.getName());
}
Also used : HsqlArrayList(org.hsqldb_voltpatches.lib.HsqlArrayList) HsqlList(org.hsqldb_voltpatches.lib.HsqlList) OrderedHashSet(org.hsqldb_voltpatches.lib.OrderedHashSet) HsqlName(org.hsqldb_voltpatches.HsqlNameManager.HsqlName)

Example 2 with HsqlArrayList

use of org.hsqldb_voltpatches.lib.HsqlArrayList in project voltdb by VoltDB.

the class ParserDDL method readConstraint.

/**
     * Reads and adds a table constraint definition to the list
     *
     * @param schemaObject table or domain
     * @param constraintList list of constraints
     */
private void readConstraint(SchemaObject schemaObject, HsqlArrayList constraintList) {
    HsqlName constName = null;
    boolean isAutogeneratedName = true;
    if (token.tokenType == Tokens.CONSTRAINT) {
        read();
        constName = readNewDependentSchemaObjectName(schemaObject.getName(), SchemaObject.CONSTRAINT);
        isAutogeneratedName = false;
    }
    // A VoltDB extension to support indexed expressions and the assume unique attribute
    // For VoltDB
    boolean assumeUnique = false;
    // End of VoltDB extension
    switch(token.tokenType) {
        case Tokens.PRIMARY:
            {
                if (schemaObject.getName().type != SchemaObject.TABLE) {
                    throw this.unexpectedTokenRequire(Tokens.T_CHECK);
                }
                read();
                readThis(Tokens.KEY);
                Constraint mainConst;
                mainConst = (Constraint) constraintList.get(0);
                if (mainConst.constType == Constraint.PRIMARY_KEY) {
                    throw Error.error(ErrorCode.X_42532);
                }
                if (constName == null) {
                    constName = database.nameManager.newAutoName("PK", schemaObject.getSchemaName(), schemaObject.getName(), SchemaObject.CONSTRAINT);
                }
                OrderedHashSet set = readColumnNames(false);
                Constraint c = new Constraint(constName, isAutogeneratedName, set, Constraint.PRIMARY_KEY);
                constraintList.set(0, c);
                break;
            }
        // A VoltDB extension to support indexed expressions and the assume unique attribute
        case Tokens.ASSUMEUNIQUE:
            assumeUnique = true;
        // End of VoltDB extension
        case Tokens.UNIQUE:
            {
                if (schemaObject.getName().type != SchemaObject.TABLE) {
                    throw this.unexpectedTokenRequire(Tokens.T_CHECK);
                }
                read();
                // A VoltDB extension to "readColumnNames(false)" to support indexed expressions.
                java.util.List<Expression> indexExprs = XreadExpressions(null);
                OrderedHashSet set = getSimpleColumnNames(indexExprs);
                if (constName == null) {
                    constName = database.nameManager.newAutoName("CT", schemaObject.getSchemaName(), schemaObject.getName(), SchemaObject.CONSTRAINT);
                }
                // A VoltDB extension to support indexed expressions.
                boolean hasNonColumnExprs = false;
                if (set == null) {
                    hasNonColumnExprs = true;
                    set = getBaseColumnNames(indexExprs);
                }
                // End of VoltDB extension
                Constraint c = new Constraint(constName, isAutogeneratedName, set, Constraint.UNIQUE);
                // A VoltDB extension to support indexed expressions and assume unique attribute.
                c.setAssumeUnique(assumeUnique);
                if (hasNonColumnExprs) {
                    c = c.withExpressions(indexExprs.toArray(new Expression[indexExprs.size()]));
                }
                // End of VoltDB extension
                constraintList.add(c);
                break;
            }
        case Tokens.FOREIGN:
            {
                if (schemaObject.getName().type != SchemaObject.TABLE) {
                    throw this.unexpectedTokenRequire(Tokens.T_CHECK);
                }
                read();
                readThis(Tokens.KEY);
                OrderedHashSet set = readColumnNames(false);
                Constraint c = readFKReferences((Table) schemaObject, constName, set);
                constraintList.add(c);
                break;
            }
        case Tokens.CHECK:
            {
                read();
                if (constName == null) {
                    constName = database.nameManager.newAutoName("CT", schemaObject.getSchemaName(), schemaObject.getName(), SchemaObject.CONSTRAINT);
                }
                Constraint c = new Constraint(constName, isAutogeneratedName, null, Constraint.CHECK);
                readCheckConstraintCondition(c);
                constraintList.add(c);
                break;
            }
        // A VoltDB extension to support LIMIT PARTITION ROWS
        case Tokens.LIMIT:
            {
                read();
                for (int i = 0; i < constraintList.size(); i++) {
                    if (((Constraint) constraintList.get(i)).getConstraintType() == Constraint.LIMIT) {
                        throw Error.error(ErrorCode.X_42524, String.format("Multiple LIMIT PARTITION ROWS constraints on table %s are forbidden.", schemaObject.getName().name));
                    }
                }
                if (constName == null) {
                    constName = database.nameManager.newAutoName("LIMIT", schemaObject.getSchemaName(), schemaObject.getName(), SchemaObject.CONSTRAINT);
                }
                Constraint c = new Constraint(constName, isAutogeneratedName, null, Constraint.LIMIT);
                readLimitConstraintCondition(c);
                constraintList.add(c);
                break;
            }
        // End of VoltDB extension
        default:
            {
                if (constName != null) {
                    throw Error.error(ErrorCode.X_42581);
                }
            }
    }
}
Also used : OrderedHashSet(org.hsqldb_voltpatches.lib.OrderedHashSet) HsqlName(org.hsqldb_voltpatches.HsqlNameManager.HsqlName) HsqlList(org.hsqldb_voltpatches.lib.HsqlList) HsqlArrayList(org.hsqldb_voltpatches.lib.HsqlArrayList)

Example 3 with HsqlArrayList

use of org.hsqldb_voltpatches.lib.HsqlArrayList in project voltdb by VoltDB.

the class ParserDML method compileMergeStatement.

/**
     * Retrieves a MERGE Statement from this parse context.
     */
StatementDMQL compileMergeStatement(RangeVariable[] outerRanges) {
    boolean[] insertColumnCheckList;
    int[] insertColumnMap = null;
    int[] updateColumnMap = null;
    int[] baseUpdateColumnMap;
    Table table;
    RangeVariable targetRange;
    RangeVariable sourceRange;
    Expression mergeCondition;
    HsqlArrayList updateList = new HsqlArrayList();
    Expression[] updateExpressions = null;
    HsqlArrayList insertList = new HsqlArrayList();
    Expression insertExpression = null;
    read();
    readThis(Tokens.INTO);
    targetRange = readSimpleRangeVariable(StatementTypes.MERGE);
    table = targetRange.rangeTable;
    readThis(Tokens.USING);
    sourceRange = readTableOrSubquery();
    // parse ON search conditions
    readThis(Tokens.ON);
    mergeCondition = XreadBooleanValueExpression();
    if (mergeCondition.getDataType() != Type.SQL_BOOLEAN) {
        throw Error.error(ErrorCode.X_42568);
    }
    RangeVariable[] fullRangeVars = new RangeVariable[] { sourceRange, targetRange };
    RangeVariable[] sourceRangeVars = new RangeVariable[] { sourceRange };
    RangeVariable[] targetRangeVars = new RangeVariable[] { targetRange };
    // parse WHEN clause(s) and convert lists to arrays
    insertColumnMap = table.getColumnMap();
    insertColumnCheckList = table.getNewColumnCheckList();
    OrderedHashSet updateColNames = new OrderedHashSet();
    OrderedHashSet insertColNames = new OrderedHashSet();
    readMergeWhen(insertColNames, updateColNames, insertList, updateList, targetRangeVars, sourceRange);
    if (insertList.size() > 0) {
        int colCount = insertColNames.size();
        if (colCount != 0) {
            insertColumnMap = table.getColumnIndexes(insertColNames);
            insertColumnCheckList = table.getColumnCheckList(insertColumnMap);
        }
        insertExpression = (Expression) insertList.get(0);
        setParameterTypes(insertExpression, table, insertColumnMap);
    }
    if (updateList.size() > 0) {
        updateExpressions = new Expression[updateList.size()];
        updateList.toArray(updateExpressions);
        updateColumnMap = table.getColumnIndexes(updateColNames);
    }
    if (updateExpressions != null) {
        Table baseTable = table.getBaseTable();
        baseUpdateColumnMap = updateColumnMap;
        if (table != baseTable) {
            baseUpdateColumnMap = new int[updateColumnMap.length];
            ArrayUtil.projectRow(table.getBaseTableColumnMap(), updateColumnMap, baseUpdateColumnMap);
        }
        resolveUpdateExpressions(table, sourceRangeVars, updateColumnMap, updateExpressions, outerRanges);
    }
    HsqlList unresolved = null;
    unresolved = mergeCondition.resolveColumnReferences(fullRangeVars, null);
    ExpressionColumn.checkColumnsResolved(unresolved);
    mergeCondition.resolveTypes(session, null);
    if (mergeCondition.isParam()) {
        mergeCondition.dataType = Type.SQL_BOOLEAN;
    }
    if (mergeCondition.getDataType() != Type.SQL_BOOLEAN) {
        throw Error.error(ErrorCode.X_42568);
    }
    RangeVariableResolver resolver = new RangeVariableResolver(fullRangeVars, mergeCondition, compileContext);
    resolver.processConditions();
    fullRangeVars = resolver.rangeVariables;
    if (insertExpression != null) {
        unresolved = insertExpression.resolveColumnReferences(sourceRangeVars, unresolved);
        ExpressionColumn.checkColumnsResolved(unresolved);
        insertExpression.resolveTypes(session, null);
    }
    StatementDMQL cs = new StatementDML(session, fullRangeVars, insertColumnMap, updateColumnMap, insertColumnCheckList, mergeCondition, insertExpression, updateExpressions, compileContext);
    return cs;
}
Also used : HsqlArrayList(org.hsqldb_voltpatches.lib.HsqlArrayList) HsqlList(org.hsqldb_voltpatches.lib.HsqlList) OrderedHashSet(org.hsqldb_voltpatches.lib.OrderedHashSet)

Example 4 with HsqlArrayList

use of org.hsqldb_voltpatches.lib.HsqlArrayList in project voltdb by VoltDB.

the class ParserDDL method compileAlterTableAddColumn.

Statement compileAlterTableAddColumn(Table table) {
    int colIndex = table.getColumnCount();
    HsqlArrayList list = new HsqlArrayList();
    Constraint constraint = new Constraint(null, true, null, Constraint.TEMP);
    list.add(constraint);
    checkIsSchemaObjectName();
    HsqlName hsqlName = database.nameManager.newColumnHsqlName(table.getName(), token.tokenString, isDelimitedIdentifier());
    read();
    ColumnSchema column = readColumnDefinitionOrNull(table, hsqlName, list);
    if (column == null) {
        throw Error.error(ErrorCode.X_42000);
    }
    if (token.tokenType == Tokens.BEFORE) {
        read();
        colIndex = table.getColumnIndex(token.tokenString);
        read();
    }
    String sql = getLastPart();
    Object[] args = new Object[] { column, new Integer(colIndex), list };
    return new StatementSchema(sql, StatementTypes.ALTER_TABLE, args, null, table.getName());
}
Also used : HsqlArrayList(org.hsqldb_voltpatches.lib.HsqlArrayList) HsqlName(org.hsqldb_voltpatches.HsqlNameManager.HsqlName)

Example 5 with HsqlArrayList

use of org.hsqldb_voltpatches.lib.HsqlArrayList in project voltdb by VoltDB.

the class ParserDDL method compileTriggerSetStatement.

/**
     * Creates SET Statement for a trigger row from this parse context.
     */
StatementDMQL compileTriggerSetStatement(Table table, RangeVariable[] rangeVars) {
    read();
    Expression[] updateExpressions;
    int[] columnMap;
    OrderedHashSet colNames = new OrderedHashSet();
    HsqlArrayList exprList = new HsqlArrayList();
    RangeVariable[] targetRangeVars = new RangeVariable[] { rangeVars[TriggerDef.NEW_ROW] };
    readSetClauseList(targetRangeVars, colNames, exprList);
    columnMap = table.getColumnIndexes(colNames);
    updateExpressions = new Expression[exprList.size()];
    exprList.toArray(updateExpressions);
    resolveUpdateExpressions(table, rangeVars, columnMap, updateExpressions, RangeVariable.emptyArray);
    StatementDMQL cs = new StatementDML(session, table, rangeVars, columnMap, updateExpressions, compileContext);
    return cs;
}
Also used : HsqlArrayList(org.hsqldb_voltpatches.lib.HsqlArrayList) OrderedHashSet(org.hsqldb_voltpatches.lib.OrderedHashSet)

Aggregations

HsqlArrayList (org.hsqldb_voltpatches.lib.HsqlArrayList)69 HsqlName (org.hsqldb_voltpatches.HsqlNameManager.HsqlName)21 Iterator (org.hsqldb_voltpatches.lib.Iterator)14 HsqlList (org.hsqldb_voltpatches.lib.HsqlList)11 OrderedHashSet (org.hsqldb_voltpatches.lib.OrderedHashSet)10 WrapperIterator (org.hsqldb_voltpatches.lib.WrapperIterator)10 Table (org.hsqldb_voltpatches.Table)7 Type (org.hsqldb_voltpatches.types.Type)6 SchemaObject (org.hsqldb_voltpatches.SchemaObject)5 Method (java.lang.reflect.Method)4 Constraint (org.hsqldb_voltpatches.Constraint)3 HashMappedList (org.hsqldb_voltpatches.lib.HashMappedList)3 Grantee (org.hsqldb_voltpatches.rights.Grantee)3 PersistentStore (org.hsqldb_voltpatches.persist.PersistentStore)2 Result (org.hsqldb_voltpatches.result.Result)2 BufferedOutputStream (java.io.BufferedOutputStream)1 IOException (java.io.IOException)1 OutputStream (java.io.OutputStream)1 HSQLParseException (org.hsqldb_voltpatches.HSQLInterface.HSQLParseException)1 NumberSequence (org.hsqldb_voltpatches.NumberSequence)1