use of org.h2.dev.util.BinaryArithmeticStream.In in project h2database by h2database.
the class AlterTableAlterColumn method cloneTableStructure.
private Table cloneTableStructure(Table table, Column[] columns, Database db, String tempName, ArrayList<Column> newColumns) {
for (Column col : columns) {
newColumns.add(col.getClone());
}
if (type == CommandInterface.ALTER_TABLE_DROP_COLUMN) {
for (Column removeCol : columnsToRemove) {
Column foundCol = null;
for (Column newCol : newColumns) {
if (newCol.getName().equals(removeCol.getName())) {
foundCol = newCol;
break;
}
}
if (foundCol == null) {
throw DbException.throwInternalError(removeCol.getCreateSQL());
}
newColumns.remove(foundCol);
}
} else if (type == CommandInterface.ALTER_TABLE_ADD_COLUMN) {
int position;
if (addFirst) {
position = 0;
} else if (addBefore != null) {
position = table.getColumn(addBefore).getColumnId();
} else if (addAfter != null) {
position = table.getColumn(addAfter).getColumnId() + 1;
} else {
position = columns.length;
}
if (columnsToAdd != null) {
for (Column column : columnsToAdd) {
newColumns.add(position++, column);
}
}
} else if (type == CommandInterface.ALTER_TABLE_ALTER_COLUMN_CHANGE_TYPE) {
int position = oldColumn.getColumnId();
newColumns.set(position, newColumn);
}
// create a table object in order to get the SQL statement
// can't just use this table, because most column objects are 'shared'
// with the old table
// still need a new id because using 0 would mean: the new table tries
// to use the rows of the table 0 (the meta table)
int id = db.allocateObjectId();
CreateTableData data = new CreateTableData();
data.tableName = tempName;
data.id = id;
data.columns = newColumns;
data.temporary = table.isTemporary();
data.persistData = table.isPersistData();
data.persistIndexes = table.isPersistIndexes();
data.isHidden = table.isHidden();
data.create = true;
data.session = session;
Table newTable = getSchema().createTable(data);
newTable.setComment(table.getComment());
StringBuilder buff = new StringBuilder();
buff.append(newTable.getCreateSQL());
StringBuilder columnList = new StringBuilder();
for (Column nc : newColumns) {
if (columnList.length() > 0) {
columnList.append(", ");
}
if (type == CommandInterface.ALTER_TABLE_ADD_COLUMN && columnsToAdd != null && columnsToAdd.contains(nc)) {
Expression def = nc.getDefaultExpression();
columnList.append(def == null ? "NULL" : def.getSQL());
} else {
columnList.append(nc.getSQL());
}
}
buff.append(" AS SELECT ");
if (columnList.length() == 0) {
// special case: insert into test select * from
buff.append('*');
} else {
buff.append(columnList);
}
buff.append(" FROM ").append(table.getSQL());
String newTableSQL = buff.toString();
String newTableName = newTable.getName();
Schema newTableSchema = newTable.getSchema();
newTable.removeChildrenAndResources(session);
execute(newTableSQL, true);
newTable = newTableSchema.getTableOrView(session, newTableName);
ArrayList<String> triggers = New.arrayList();
for (DbObject child : table.getChildren()) {
if (child instanceof Sequence) {
continue;
} else if (child instanceof Index) {
Index idx = (Index) child;
if (idx.getIndexType().getBelongsToConstraint()) {
continue;
}
}
String createSQL = child.getCreateSQL();
if (createSQL == null) {
continue;
}
if (child instanceof TableView) {
continue;
} else if (child.getType() == DbObject.TABLE_OR_VIEW) {
DbException.throwInternalError();
}
String quotedName = Parser.quoteIdentifier(tempName + "_" + child.getName());
String sql = null;
if (child instanceof ConstraintReferential) {
ConstraintReferential r = (ConstraintReferential) child;
if (r.getTable() != table) {
sql = r.getCreateSQLForCopy(r.getTable(), newTable, quotedName, false);
}
}
if (sql == null) {
sql = child.getCreateSQLForCopy(newTable, quotedName);
}
if (sql != null) {
if (child instanceof TriggerObject) {
triggers.add(sql);
} else {
execute(sql, true);
}
}
}
table.setModified();
// otherwise the sequence is dropped if the table is dropped
for (Column col : newColumns) {
Sequence seq = col.getSequence();
if (seq != null) {
table.removeSequence(seq);
col.setSequence(null);
}
}
for (String sql : triggers) {
execute(sql, true);
}
return newTable;
}
use of org.h2.dev.util.BinaryArithmeticStream.In in project h2database by h2database.
the class Parser method parseWith.
private Prepared parseWith() {
List<TableView> viewsCreated = new ArrayList<>();
readIf("RECURSIVE");
// this WITH statement might not be a temporary view - allow optional keyword to
// tell us that this keyword. This feature will not be documented - H2 internal use only.
boolean isPersistent = readIf("PERSISTENT");
// as in CREATE VIEW abc AS WITH my_cte - this auto detects that condition
if (session.isParsingCreateView()) {
isPersistent = true;
}
do {
viewsCreated.add(parseSingleCommonTableExpression(isPersistent));
} while (readIf(","));
Prepared p = null;
// reverse the order of constructed CTE views - as the destruction order
// (since later created view may depend on previously created views -
// we preserve that dependency order in the destruction sequence )
// used in setCteCleanups
Collections.reverse(viewsCreated);
if (isToken("SELECT")) {
Query query = parseSelectUnion();
query.setPrepareAlways(true);
query.setNeverLazy(true);
p = query;
} else if (readIf("INSERT")) {
p = parseInsert();
p.setPrepareAlways(true);
} else if (readIf("UPDATE")) {
p = parseUpdate();
p.setPrepareAlways(true);
} else if (readIf("MERGE")) {
p = parseMerge();
p.setPrepareAlways(true);
} else if (readIf("DELETE")) {
p = parseDelete();
p.setPrepareAlways(true);
} else if (readIf("CREATE")) {
if (!isToken("TABLE")) {
throw DbException.get(ErrorCode.SYNTAX_ERROR_1, WITH_STATEMENT_SUPPORTS_LIMITED_SUB_STATEMENTS);
}
p = parseCreate();
p.setPrepareAlways(true);
} else {
throw DbException.get(ErrorCode.SYNTAX_ERROR_1, WITH_STATEMENT_SUPPORTS_LIMITED_SUB_STATEMENTS);
}
// dependencies) - but only if they are not persistent
if (!isPersistent) {
p.setCteCleanups(viewsCreated);
}
return p;
}
use of org.h2.dev.util.BinaryArithmeticStream.In in project h2database by h2database.
the class Insert method prepareUpdateCondition.
private Expression prepareUpdateCondition(Index foundIndex) {
// MVPrimaryIndex is playing fast and loose with it's implementation of
// the Index interface.
// It returns all of the columns in the table when we call
// getIndexColumns() or getColumns().
// Don't have time right now to fix that, so just special-case it.
final Column[] indexedColumns;
if (foundIndex instanceof MVPrimaryIndex) {
MVPrimaryIndex foundMV = (MVPrimaryIndex) foundIndex;
indexedColumns = new Column[] { foundMV.getIndexColumns()[foundMV.getMainIndexColumn()].column };
} else {
indexedColumns = foundIndex.getColumns();
}
Expression[] row = list.get(getCurrentRowNumber() - 1);
Expression condition = null;
for (Column column : indexedColumns) {
ExpressionColumn expr = new ExpressionColumn(session.getDatabase(), table.getSchema().getName(), table.getName(), column.getName());
for (int i = 0; i < columns.length; i++) {
if (expr.getColumnName().equals(columns[i].getName())) {
if (condition == null) {
condition = new Comparison(session, Comparison.EQUAL, expr, row[i]);
} else {
condition = new ConditionAndOr(ConditionAndOr.AND, condition, new Comparison(session, Comparison.EQUAL, expr, row[i]));
}
break;
}
}
}
return condition;
}
use of org.h2.dev.util.BinaryArithmeticStream.In in project h2database by h2database.
the class MergeUsing method prepare.
@Override
public void prepare() {
onCondition.addFilterConditions(sourceTableFilter, true);
onCondition.addFilterConditions(targetTableFilter, true);
onCondition.mapColumns(sourceTableFilter, 2);
onCondition.mapColumns(targetTableFilter, 1);
if (keys == null) {
HashSet<Column> targetColumns = buildColumnListFromOnCondition(targetTableFilter);
keys = targetColumns.toArray(new Column[0]);
}
if (keys.length == 0) {
throw DbException.get(ErrorCode.COLUMN_NOT_FOUND_1, "No references to target columns found in ON clause:" + targetTableFilter.toString());
}
if (sourceKeys == null) {
HashSet<Column> sourceColumns = buildColumnListFromOnCondition(sourceTableFilter);
sourceKeys = sourceColumns.toArray(new Column[0]);
}
if (sourceKeys.length == 0) {
throw DbException.get(ErrorCode.COLUMN_NOT_FOUND_1, "No references to source columns found in ON clause:" + sourceTableFilter.toString());
}
// only do the optimize now - before we have already gathered the
// unoptimized column data
onCondition = onCondition.optimize(session);
onCondition.createIndexConditions(session, sourceTableFilter);
onCondition.createIndexConditions(session, targetTableFilter);
if (columns == null) {
if (!valuesExpressionList.isEmpty() && valuesExpressionList.get(0).length == 0) {
// special case where table is used as a sequence
columns = new Column[0];
} else {
columns = targetTable.getColumns();
}
}
if (!valuesExpressionList.isEmpty()) {
for (Expression[] expr : valuesExpressionList) {
if (expr.length != columns.length) {
throw DbException.get(ErrorCode.COLUMN_COUNT_DOES_NOT_MATCH);
}
for (int i = 0; i < expr.length; i++) {
Expression e = expr[i];
if (e != null) {
expr[i] = e.optimize(session);
}
}
}
} else {
query.prepare();
}
int embeddedStatementsCount = 0;
// collaboration
if (updateCommand != null) {
updateCommand.setSourceTableFilter(sourceTableFilter);
updateCommand.setCondition(appendOnCondition(updateCommand));
updateCommand.prepare();
embeddedStatementsCount++;
}
if (deleteCommand != null) {
deleteCommand.setSourceTableFilter(sourceTableFilter);
deleteCommand.setCondition(appendOnCondition(deleteCommand));
deleteCommand.prepare();
embeddedStatementsCount++;
}
if (insertCommand != null) {
insertCommand.setSourceTableFilter(sourceTableFilter);
insertCommand.prepare();
embeddedStatementsCount++;
}
if (embeddedStatementsCount == 0) {
throw DbException.get(ErrorCode.SYNTAX_ERROR_1, "At least UPDATE, DELETE or INSERT embedded statement must be supplied.");
}
// setup the targetMatchQuery - for detecting if the target row exists
Expression targetMatchCondition = targetMatchQuery.getCondition();
targetMatchCondition.addFilterConditions(sourceTableFilter, true);
targetMatchCondition.mapColumns(sourceTableFilter, 2);
targetMatchCondition = targetMatchCondition.optimize(session);
targetMatchCondition.createIndexConditions(session, sourceTableFilter);
targetMatchQuery.prepare();
}
use of org.h2.dev.util.BinaryArithmeticStream.In in project h2database by h2database.
the class ScriptBase method openInput.
/**
* Open the input stream.
*/
void openInput() {
String file = getFileName();
if (file == null) {
return;
}
if (isEncrypted()) {
initStore();
in = new FileStoreInputStream(store, this, compressionAlgorithm != null, false);
} else {
InputStream inStream;
try {
inStream = FileUtils.newInputStream(file);
} catch (IOException e) {
throw DbException.convertIOException(e, file);
}
in = new BufferedInputStream(inStream, Constants.IO_BUFFER_SIZE);
in = CompressTool.wrapInputStream(in, compressionAlgorithm, SCRIPT_SQL);
if (in == null) {
throw DbException.get(ErrorCode.FILE_NOT_FOUND_1, SCRIPT_SQL + " in " + file);
}
}
}
Aggregations