Search in sources :

Example 1 with Delete

use of org.h2.command.dml.Delete in project ignite by apache.

the class DmlStatementsProcessor method getPlanForStatement.

/**
     * Generate SELECT statements to retrieve data for modifications from and find fast UPDATE or DELETE args,
     * if available.
     *
     * @param schema Schema.
     * @param prepStmt JDBC statement.
     * @return Update plan.
     */
@SuppressWarnings({ "unchecked", "ConstantConditions" })
private UpdatePlan getPlanForStatement(String schema, PreparedStatement prepStmt, @Nullable Integer errKeysPos) throws IgniteCheckedException {
    Prepared p = GridSqlQueryParser.prepared(prepStmt);
    H2DmlPlanKey planKey = new H2DmlPlanKey(schema, p.getSQL());
    UpdatePlan res = (errKeysPos == null ? planCache.get(planKey) : null);
    if (res != null)
        return res;
    res = UpdatePlanBuilder.planForStatement(p, errKeysPos);
    // Don't cache re-runs
    if (errKeysPos == null)
        return U.firstNotNull(planCache.putIfAbsent(planKey, res), res);
    else
        return res;
}
Also used : Prepared(org.h2.command.Prepared) UpdatePlan(org.apache.ignite.internal.processors.query.h2.dml.UpdatePlan)

Example 2 with Delete

use of org.h2.command.dml.Delete in project ignite by apache.

the class DmlStatementsProcessor method streamUpdateQuery.

/**
     * Perform given statement against given data streamer. Only rows based INSERT and MERGE are supported
     * as well as key bound UPDATE and DELETE (ones with filter {@code WHERE _key = ?}).
     *
     * @param streamer Streamer to feed data to.
     * @param stmt Statement.
     * @param args Statement arguments.
     * @return Number of rows in given statement for INSERT and MERGE, {@code 1} otherwise.
     * @throws IgniteCheckedException if failed.
     */
@SuppressWarnings({ "unchecked", "ConstantConditions" })
long streamUpdateQuery(IgniteDataStreamer streamer, PreparedStatement stmt, Object[] args) throws IgniteCheckedException {
    args = U.firstNotNull(args, X.EMPTY_OBJECT_ARRAY);
    Prepared p = GridSqlQueryParser.prepared(stmt);
    assert p != null;
    UpdatePlan plan = UpdatePlanBuilder.planForStatement(p, null);
    if (!F.eq(streamer.cacheName(), plan.tbl.rowDescriptor().context().name()))
        throw new IgniteSQLException("Cross cache streaming is not supported, please specify cache explicitly" + " in connection options", IgniteQueryErrorCode.UNSUPPORTED_OPERATION);
    if (plan.mode == UpdateMode.INSERT && plan.rowsNum > 0) {
        assert plan.isLocSubqry;
        final GridCacheContext cctx = plan.tbl.rowDescriptor().context();
        QueryCursorImpl<List<?>> cur;
        final ArrayList<List<?>> data = new ArrayList<>(plan.rowsNum);
        final GridQueryFieldsResult res = idx.queryLocalSqlFields(idx.schema(cctx.name()), plan.selectQry, F.asList(args), null, false, 0, null);
        QueryCursorImpl<List<?>> stepCur = new QueryCursorImpl<>(new Iterable<List<?>>() {

            @Override
            public Iterator<List<?>> iterator() {
                try {
                    return new GridQueryCacheObjectsIterator(res.iterator(), idx.objectContext(), cctx.keepBinary());
                } catch (IgniteCheckedException e) {
                    throw new IgniteException(e);
                }
            }
        }, null);
        data.addAll(stepCur.getAll());
        cur = new QueryCursorImpl<>(new Iterable<List<?>>() {

            @Override
            public Iterator<List<?>> iterator() {
                return data.iterator();
            }
        }, null);
        if (plan.rowsNum == 1) {
            IgniteBiTuple t = rowToKeyValue(cctx, cur.iterator().next(), plan);
            streamer.addData(t.getKey(), t.getValue());
            return 1;
        }
        Map<Object, Object> rows = new LinkedHashMap<>(plan.rowsNum);
        for (List<?> row : cur) {
            final IgniteBiTuple t = rowToKeyValue(cctx, row, plan);
            rows.put(t.getKey(), t.getValue());
        }
        streamer.addData(rows);
        return rows.size();
    } else
        throw new IgniteSQLException("Only tuple based INSERT statements are supported in streaming mode", IgniteQueryErrorCode.UNSUPPORTED_OPERATION);
}
Also used : GridCacheContext(org.apache.ignite.internal.processors.cache.GridCacheContext) IgniteBiTuple(org.apache.ignite.lang.IgniteBiTuple) Prepared(org.h2.command.Prepared) ArrayList(java.util.ArrayList) QueryCursorImpl(org.apache.ignite.internal.processors.cache.QueryCursorImpl) GridQueryCacheObjectsIterator(org.apache.ignite.internal.processors.query.GridQueryCacheObjectsIterator) GridQueryFieldsResult(org.apache.ignite.internal.processors.query.GridQueryFieldsResult) LinkedHashMap(java.util.LinkedHashMap) GridBoundedConcurrentLinkedHashMap(org.apache.ignite.internal.util.GridBoundedConcurrentLinkedHashMap) IgniteCheckedException(org.apache.ignite.IgniteCheckedException) IgniteException(org.apache.ignite.IgniteException) IgniteSQLException(org.apache.ignite.internal.processors.query.IgniteSQLException) GridQueryCacheObjectsIterator(org.apache.ignite.internal.processors.query.GridQueryCacheObjectsIterator) IgniteSingletonIterator(org.apache.ignite.internal.util.lang.IgniteSingletonIterator) Iterator(java.util.Iterator) List(java.util.List) ArrayList(java.util.ArrayList) BinaryObject(org.apache.ignite.binary.BinaryObject) UpdatePlan(org.apache.ignite.internal.processors.query.h2.dml.UpdatePlan)

Example 3 with Delete

use of org.h2.command.dml.Delete in project ignite by apache.

the class DmlAstUtils method selectForDelete.

/**
     * Generate SQL SELECT based on DELETE's WHERE, LIMIT, etc.
     *
     * @param del Delete statement.
     * @param keysParamIdx Index for .
     * @return SELECT statement.
     */
public static GridSqlSelect selectForDelete(GridSqlDelete del, @Nullable Integer keysParamIdx) {
    GridSqlSelect mapQry = new GridSqlSelect();
    mapQry.from(del.from());
    Set<GridSqlTable> tbls = new HashSet<>();
    collectAllGridTablesInTarget(del.from(), tbls);
    assert tbls.size() == 1 : "Failed to determine target table for DELETE";
    GridSqlTable tbl = tbls.iterator().next();
    GridH2Table gridTbl = tbl.dataTable();
    assert gridTbl != null : "Failed to determine target grid table for DELETE";
    Column h2KeyCol = gridTbl.getColumn(GridH2AbstractKeyValueRow.KEY_COL);
    Column h2ValCol = gridTbl.getColumn(GridH2AbstractKeyValueRow.VAL_COL);
    GridSqlColumn keyCol = new GridSqlColumn(h2KeyCol, tbl, h2KeyCol.getName());
    keyCol.resultType(GridSqlType.fromColumn(h2KeyCol));
    GridSqlColumn valCol = new GridSqlColumn(h2ValCol, tbl, h2ValCol.getName());
    valCol.resultType(GridSqlType.fromColumn(h2ValCol));
    mapQry.addColumn(keyCol, true);
    mapQry.addColumn(valCol, true);
    GridSqlElement where = del.where();
    if (keysParamIdx != null)
        where = injectKeysFilterParam(where, keyCol, keysParamIdx);
    mapQry.where(where);
    mapQry.limit(del.limit());
    return mapQry;
}
Also used : Column(org.h2.table.Column) GridH2Table(org.apache.ignite.internal.processors.query.h2.opt.GridH2Table) HashSet(java.util.HashSet)

Example 4 with Delete

use of org.h2.command.dml.Delete in project ignite by apache.

the class DmlAstUtils method selectForDelete.

/**
 * Generate SQL SELECT based on DELETE's WHERE, LIMIT, etc.
 *
 * @param del Delete statement.
 * @param keysParamIdx Index for .
 * @return SELECT statement.
 */
public static GridSqlSelect selectForDelete(GridSqlDelete del, @Nullable Integer keysParamIdx) {
    GridSqlSelect mapQry = new GridSqlSelect();
    mapQry.from(del.from());
    Set<GridSqlTable> tbls = new HashSet<>();
    collectAllGridTablesInTarget(del.from(), tbls);
    assert tbls.size() == 1 : "Failed to determine target table for DELETE";
    GridSqlTable tbl = tbls.iterator().next();
    GridH2Table gridTbl = tbl.dataTable();
    assert gridTbl != null : "Failed to determine target grid table for DELETE";
    Column h2KeyCol = gridTbl.getColumn(GridH2KeyValueRowOnheap.KEY_COL);
    Column h2ValCol = gridTbl.getColumn(GridH2KeyValueRowOnheap.VAL_COL);
    GridSqlColumn keyCol = new GridSqlColumn(h2KeyCol, tbl, h2KeyCol.getName());
    keyCol.resultType(GridSqlType.fromColumn(h2KeyCol));
    GridSqlColumn valCol = new GridSqlColumn(h2ValCol, tbl, h2ValCol.getName());
    valCol.resultType(GridSqlType.fromColumn(h2ValCol));
    mapQry.addColumn(keyCol, true);
    mapQry.addColumn(valCol, true);
    GridSqlElement where = del.where();
    if (keysParamIdx != null)
        where = injectKeysFilterParam(where, keyCol, keysParamIdx);
    mapQry.where(where);
    mapQry.limit(del.limit());
    return mapQry;
}
Also used : Column(org.h2.table.Column) GridSqlColumn(org.apache.ignite.internal.processors.query.h2.sql.GridSqlColumn) GridSqlTable(org.apache.ignite.internal.processors.query.h2.sql.GridSqlTable) GridH2Table(org.apache.ignite.internal.processors.query.h2.opt.GridH2Table) GridSqlColumn(org.apache.ignite.internal.processors.query.h2.sql.GridSqlColumn) GridSqlElement(org.apache.ignite.internal.processors.query.h2.sql.GridSqlElement) GridSqlSelect(org.apache.ignite.internal.processors.query.h2.sql.GridSqlSelect) HashSet(java.util.HashSet)

Example 5 with Delete

use of org.h2.command.dml.Delete in project spf4j by zolyfarkas.

the class JdbcSemaphoreTest method testMultiProcess.

@Test
@SuppressFBWarnings("AFBR_ABNORMAL_FINALLY_BLOCK_RETURN")
public void testMultiProcess() throws SQLException, IOException, InterruptedException, ExecutionException, TimeoutException {
    Server server = Server.createTcpServer(new String[] { "-tcpPort", "9123", "-tcpAllowOthers" }).start();
    File tempDB = File.createTempFile("test", "h2db");
    String connStr = "jdbc:h2:tcp://localhost:9123/nio:" + tempDB.getAbsolutePath() + ";AUTO_SERVER=TRUE";
    try {
        JdbcDataSource ds = new JdbcDataSource();
        ds.setURL(connStr);
        ds.setUser("sa");
        ds.setPassword("sa");
        createSchemaObjects(ds);
        testReleaseAck(ds, "testSem", 2);
        JdbcSemaphore semaphore = new JdbcSemaphore(ds, "test_sem2", 3);
        org.spf4j.base.Runtime.jrun(BadSemaphoreHandler.class, 10000, connStr, "test_sem2");
        org.spf4j.base.Runtime.jrun(BadSemaphoreHandler.class, 10000, connStr, "test_sem2");
        Assert.assertTrue(semaphore.tryAcquire(1, TimeUnit.SECONDS));
        Assert.assertTrue(semaphore.tryAcquire(10, TimeUnit.SECONDS));
        JdbcHeartBeat.stopHeartBeats();
        server.stop();
    } finally {
        if (!tempDB.delete()) {
            throw new IOException("Cannot delete " + tempDB);
        }
    }
}
Also used : Server(org.h2.tools.Server) JdbcDataSource(org.h2.jdbcx.JdbcDataSource) IOException(java.io.IOException) File(java.io.File) Test(org.junit.Test) SuppressFBWarnings(edu.umd.cs.findbugs.annotations.SuppressFBWarnings)

Aggregations

Connection (java.sql.Connection)40 PreparedStatement (java.sql.PreparedStatement)39 Statement (java.sql.Statement)38 ResultSet (java.sql.ResultSet)36 JdbcConnection (org.h2.jdbc.JdbcConnection)25 SQLException (java.sql.SQLException)17 SimpleResultSet (org.h2.tools.SimpleResultSet)14 Savepoint (java.sql.Savepoint)13 StatementBuilder (org.h2.util.StatementBuilder)9 DbException (org.h2.message.DbException)8 Column (org.h2.table.Column)8 ValueString (org.h2.value.ValueString)7 Random (java.util.Random)6 Expression (org.h2.expression.Expression)5 ValueExpression (org.h2.expression.ValueExpression)5 ByteArrayInputStream (java.io.ByteArrayInputStream)4 ArrayList (java.util.ArrayList)4 GridH2Table (org.apache.ignite.internal.processors.query.h2.opt.GridH2Table)4 AlterTableAddConstraint (org.h2.command.ddl.AlterTableAddConstraint)4 AlterTableDropConstraint (org.h2.command.ddl.AlterTableDropConstraint)4