Search in sources :

Example 61 with Handle

use of jnc.platform.win32.Handle in project druid by druid-io.

the class JDBCExtractionNamespaceTest method setup.

@Before
public void setup() throws Exception {
    lifecycle = new Lifecycle();
    updates = new AtomicLong(0L);
    updateLock = new ReentrantLock(true);
    closer = Closer.create();
    setupTeardownService = MoreExecutors.listeningDecorator(Execs.multiThreaded(2, "JDBCExtractionNamespaceTeardown--%s"));
    final ListenableFuture<Handle> setupFuture = setupTeardownService.submit(new Callable<Handle>() {

        @Override
        public Handle call() {
            final Handle handle = derbyConnectorRule.getConnector().getDBI().open();
            Assert.assertEquals(0, handle.createStatement(String.format("CREATE TABLE %s (%s TIMESTAMP, %s VARCHAR(64), %s VARCHAR(64))", tableName, tsColumn_, keyName, valName)).setQueryTimeout(1).execute());
            handle.createStatement(String.format("TRUNCATE TABLE %s", tableName)).setQueryTimeout(1).execute();
            handle.commit();
            closer.register(new Closeable() {

                @Override
                public void close() throws IOException {
                    handle.createStatement("DROP TABLE " + tableName).setQueryTimeout(1).execute();
                    final ListenableFuture future = setupTeardownService.submit(new Runnable() {

                        @Override
                        public void run() {
                            handle.close();
                        }
                    });
                    try (Closeable closeable = new Closeable() {

                        @Override
                        public void close() throws IOException {
                            future.cancel(true);
                        }
                    }) {
                        future.get(10, TimeUnit.SECONDS);
                    } catch (InterruptedException | ExecutionException | TimeoutException e) {
                        throw new IOException("Error closing handle", e);
                    }
                }
            });
            closer.register(new Closeable() {

                @Override
                public void close() throws IOException {
                    if (scheduler == null) {
                        return;
                    }
                    Assert.assertEquals(0, scheduler.getActiveEntries());
                }
            });
            for (Map.Entry<String, String> entry : renames.entrySet()) {
                try {
                    insertValues(handle, entry.getKey(), entry.getValue(), "2015-01-01 00:00:00");
                } catch (InterruptedException e) {
                    Thread.currentThread().interrupt();
                    throw Throwables.propagate(e);
                }
            }
            NoopServiceEmitter noopServiceEmitter = new NoopServiceEmitter();
            scheduler = new CacheScheduler(noopServiceEmitter, ImmutableMap.<Class<? extends ExtractionNamespace>, ExtractionNamespaceCacheFactory<?>>of(JDBCExtractionNamespace.class, new ExtractionNamespaceCacheFactory<JDBCExtractionNamespace>() {

                private final JDBCExtractionNamespaceCacheFactory delegate = new JDBCExtractionNamespaceCacheFactory();

                @Override
                public CacheScheduler.VersionedCache populateCache(final JDBCExtractionNamespace namespace, final CacheScheduler.EntryImpl<JDBCExtractionNamespace> id, final String lastVersion, final CacheScheduler scheduler) throws InterruptedException {
                    updateLock.lockInterruptibly();
                    try {
                        log.debug("Running cache populator");
                        try {
                            return delegate.populateCache(namespace, id, lastVersion, scheduler);
                        } finally {
                            updates.incrementAndGet();
                        }
                    } finally {
                        updateLock.unlock();
                    }
                }
            }), new OnHeapNamespaceExtractionCacheManager(lifecycle, noopServiceEmitter));
            try {
                lifecycle.start();
            } catch (Exception e) {
                throw Throwables.propagate(e);
            }
            closer.register(new Closeable() {

                @Override
                public void close() throws IOException {
                    final ListenableFuture future = setupTeardownService.submit(new Runnable() {

                        @Override
                        public void run() {
                            lifecycle.stop();
                        }
                    });
                    try (final Closeable closeable = new Closeable() {

                        @Override
                        public void close() throws IOException {
                            future.cancel(true);
                        }
                    }) {
                        future.get(30, TimeUnit.SECONDS);
                    } catch (InterruptedException | ExecutionException | TimeoutException e) {
                        throw new IOException("Error stopping lifecycle", e);
                    }
                }
            });
            return handle;
        }
    });
    try (final Closeable closeable = new Closeable() {

        @Override
        public void close() throws IOException {
            if (!setupFuture.isDone() && !setupFuture.cancel(true) && !setupFuture.isDone()) {
                throw new IOException("Unable to stop future");
            }
        }
    }) {
        handleRef = setupFuture.get(10, TimeUnit.SECONDS);
    }
    Assert.assertNotNull(handleRef);
}
Also used : Closeable(java.io.Closeable) ExtractionNamespaceCacheFactory(io.druid.query.lookup.namespace.ExtractionNamespaceCacheFactory) JDBCExtractionNamespaceCacheFactory(io.druid.server.lookup.namespace.JDBCExtractionNamespaceCacheFactory) ExecutionException(java.util.concurrent.ExecutionException) TimeoutException(java.util.concurrent.TimeoutException) ReentrantLock(java.util.concurrent.locks.ReentrantLock) JDBCExtractionNamespaceCacheFactory(io.druid.server.lookup.namespace.JDBCExtractionNamespaceCacheFactory) Lifecycle(io.druid.java.util.common.lifecycle.Lifecycle) NoopServiceEmitter(io.druid.server.metrics.NoopServiceEmitter) JDBCExtractionNamespace(io.druid.query.lookup.namespace.JDBCExtractionNamespace) IOException(java.io.IOException) TimeoutException(java.util.concurrent.TimeoutException) IOException(java.io.IOException) ExecutionException(java.util.concurrent.ExecutionException) Handle(org.skife.jdbi.v2.Handle) AtomicLong(java.util.concurrent.atomic.AtomicLong) ListenableFuture(com.google.common.util.concurrent.ListenableFuture) Before(org.junit.Before)

Example 62 with Handle

use of jnc.platform.win32.Handle in project dropwizard by dropwizard.

the class OptionalInstantTest method setupTests.

@Before
public void setupTests() throws IOException {
    final DataSourceFactory dataSourceFactory = new DataSourceFactory();
    dataSourceFactory.setDriverClass("org.h2.Driver");
    dataSourceFactory.setUrl("jdbc:h2:mem:optional-instant-" + System.currentTimeMillis() + "?user=sa");
    dataSourceFactory.setInitialSize(1);
    final DBI dbi = new DBIFactory().build(env, dataSourceFactory, "test");
    try (Handle h = dbi.open()) {
        h.execute("CREATE TABLE IF NOT EXISTS tasks (" + "id INT PRIMARY KEY, " + "assignee VARCHAR(255) NOT NULL, " + "start_date TIMESTAMP, " + "end_date TIMESTAMP, " + "comments VARCHAR(1024) " + ")");
    }
    dao = dbi.onDemand(TaskDao.class);
}
Also used : DataSourceFactory(io.dropwizard.db.DataSourceFactory) DBI(org.skife.jdbi.v2.DBI) DBIFactory(io.dropwizard.jdbi.DBIFactory) Handle(org.skife.jdbi.v2.Handle) Before(org.junit.Before)

Example 63 with Handle

use of jnc.platform.win32.Handle in project dropwizard by dropwizard.

the class OptionalOffsetDateTimeTest method setupTests.

@Before
public void setupTests() throws IOException {
    final DataSourceFactory dataSourceFactory = new DataSourceFactory();
    dataSourceFactory.setDriverClass("org.h2.Driver");
    dataSourceFactory.setUrl("jdbc:h2:mem:optional-offset-date-time-" + System.currentTimeMillis() + "?user=sa");
    dataSourceFactory.setInitialSize(1);
    final DBI dbi = new DBIFactory().build(env, dataSourceFactory, "test");
    try (Handle h = dbi.open()) {
        h.execute("CREATE TABLE IF NOT EXISTS tasks (" + "id INT PRIMARY KEY, " + "assignee VARCHAR(255) NOT NULL, " + "start_date TIMESTAMP, " + "end_date TIMESTAMP, " + "comments VARCHAR(1024) " + ")");
    }
    dao = dbi.onDemand(TaskDao.class);
}
Also used : DataSourceFactory(io.dropwizard.db.DataSourceFactory) DBI(org.skife.jdbi.v2.DBI) DBIFactory(io.dropwizard.jdbi.DBIFactory) Handle(org.skife.jdbi.v2.Handle) Before(org.junit.Before)

Example 64 with Handle

use of jnc.platform.win32.Handle in project dropwizard by dropwizard.

the class OptionalZonedDateTimeTest method setupTests.

@Before
public void setupTests() throws IOException {
    final DataSourceFactory dataSourceFactory = new DataSourceFactory();
    dataSourceFactory.setDriverClass("org.h2.Driver");
    dataSourceFactory.setUrl("jdbc:h2:mem:optional-zoned-date-time-" + System.currentTimeMillis() + "?user=sa");
    dataSourceFactory.setInitialSize(1);
    final DBI dbi = new DBIFactory().build(env, dataSourceFactory, "test");
    try (Handle h = dbi.open()) {
        h.execute("CREATE TABLE IF NOT EXISTS tasks (" + "id INT PRIMARY KEY, " + "assignee VARCHAR(255) NOT NULL, " + "start_date TIMESTAMP, " + "end_date TIMESTAMP, " + "comments VARCHAR(1024) " + ")");
    }
    dao = dbi.onDemand(TaskDao.class);
}
Also used : DataSourceFactory(io.dropwizard.db.DataSourceFactory) DBI(org.skife.jdbi.v2.DBI) DBIFactory(io.dropwizard.jdbi.DBIFactory) Handle(org.skife.jdbi.v2.Handle) Before(org.junit.Before)

Example 65 with Handle

use of jnc.platform.win32.Handle in project dropwizard by dropwizard.

the class GuavaOptionalDateTimeTest method setupTests.

@Before
public void setupTests() throws IOException {
    final DataSourceFactory dataSourceFactory = new DataSourceFactory();
    dataSourceFactory.setDriverClass("org.h2.Driver");
    dataSourceFactory.setUrl("jdbc:h2:mem:guava-date-time-" + System.currentTimeMillis() + "?user=sa");
    dataSourceFactory.setInitialSize(1);
    final DBI dbi = new DBIFactory().build(env, dataSourceFactory, "test");
    try (Handle h = dbi.open()) {
        h.execute("CREATE TABLE IF NOT EXISTS tasks (" + "id INT PRIMARY KEY, " + "assignee VARCHAR(255) NOT NULL, " + "start_date TIMESTAMP, " + "end_date TIMESTAMP, " + "comments VARCHAR(1024) " + ")");
    }
    dao = dbi.onDemand(TaskDao.class);
}
Also used : DataSourceFactory(io.dropwizard.db.DataSourceFactory) DBI(org.skife.jdbi.v2.DBI) DBIFactory(io.dropwizard.jdbi.DBIFactory) Handle(org.skife.jdbi.v2.Handle) Before(org.junit.Before)

Aggregations

Handle (org.skife.jdbi.v2.Handle)103 DBI (org.skife.jdbi.v2.DBI)28 Before (org.junit.Before)21 IOException (java.io.IOException)18 List (java.util.List)17 DataSourceFactory (io.dropwizard.db.DataSourceFactory)15 DBIFactory (io.dropwizard.jdbi.DBIFactory)15 SQLException (java.sql.SQLException)15 Map (java.util.Map)14 Test (org.junit.Test)14 Test (org.testng.annotations.Test)14 DateTime (org.joda.time.DateTime)13 ArrayList (java.util.ArrayList)11 TransactionStatus (org.skife.jdbi.v2.TransactionStatus)11 ResultSet (java.sql.ResultSet)10 ImmutableList (com.google.common.collect.ImmutableList)8 UUID (java.util.UUID)8 CallbackFailedException (org.skife.jdbi.v2.exceptions.CallbackFailedException)7 ImmutableSet (com.google.common.collect.ImmutableSet)6 Set (java.util.Set)6