Search in sources :

Example 1 with StatementContext

use of org.skife.jdbi.v2.StatementContext in project druid by druid-io.

the class SQLMetadataSegmentManager method enableDatasource.

@Override
public boolean enableDatasource(final String ds) {
    try {
        final IDBI dbi = connector.getDBI();
        VersionedIntervalTimeline<String, DataSegment> segmentTimeline = connector.inReadOnlyTransaction(new TransactionCallback<VersionedIntervalTimeline<String, DataSegment>>() {

            @Override
            public VersionedIntervalTimeline<String, DataSegment> inTransaction(Handle handle, TransactionStatus status) throws Exception {
                return handle.createQuery(String.format("SELECT payload FROM %s WHERE dataSource = :dataSource", getSegmentsTable())).setFetchSize(connector.getStreamingFetchSize()).bind("dataSource", ds).map(ByteArrayMapper.FIRST).fold(new VersionedIntervalTimeline<String, DataSegment>(Ordering.natural()), new Folder3<VersionedIntervalTimeline<String, DataSegment>, byte[]>() {

                    @Override
                    public VersionedIntervalTimeline<String, DataSegment> fold(VersionedIntervalTimeline<String, DataSegment> timeline, byte[] payload, FoldController foldController, StatementContext statementContext) throws SQLException {
                        try {
                            final DataSegment segment = DATA_SEGMENT_INTERNER.intern(jsonMapper.readValue(payload, DataSegment.class));
                            timeline.add(segment.getInterval(), segment.getVersion(), segment.getShardSpec().createChunk(segment));
                            return timeline;
                        } catch (Exception e) {
                            throw new SQLException(e.toString());
                        }
                    }
                });
            }
        });
        final List<DataSegment> segments = Lists.newArrayList();
        for (TimelineObjectHolder<String, DataSegment> objectHolder : segmentTimeline.lookup(new Interval("0000-01-01/3000-01-01"))) {
            for (PartitionChunk<DataSegment> partitionChunk : objectHolder.getObject()) {
                segments.add(partitionChunk.getObject());
            }
        }
        if (segments.isEmpty()) {
            log.warn("No segments found in the database!");
            return false;
        }
        dbi.withHandle(new HandleCallback<Void>() {

            @Override
            public Void withHandle(Handle handle) throws Exception {
                Batch batch = handle.createBatch();
                for (DataSegment segment : segments) {
                    batch.add(String.format("UPDATE %s SET used=true WHERE id = '%s'", getSegmentsTable(), segment.getIdentifier()));
                }
                batch.execute();
                return null;
            }
        });
    } catch (Exception e) {
        log.error(e, "Exception enabling datasource %s", ds);
        return false;
    }
    return true;
}
Also used : IDBI(org.skife.jdbi.v2.IDBI) SQLException(java.sql.SQLException) TransactionStatus(org.skife.jdbi.v2.TransactionStatus) DataSegment(io.druid.timeline.DataSegment) SQLException(java.sql.SQLException) IOException(java.io.IOException) Handle(org.skife.jdbi.v2.Handle) StatementContext(org.skife.jdbi.v2.StatementContext) FoldController(org.skife.jdbi.v2.FoldController) Batch(org.skife.jdbi.v2.Batch) VersionedIntervalTimeline(io.druid.timeline.VersionedIntervalTimeline) Folder3(org.skife.jdbi.v2.Folder3) Interval(org.joda.time.Interval)

Example 2 with StatementContext

use of org.skife.jdbi.v2.StatementContext in project druid by druid-io.

the class SQLMetadataSegmentManager method poll.

@Override
public void poll() {
    try {
        if (!started) {
            return;
        }
        ConcurrentHashMap<String, DruidDataSource> newDataSources = new ConcurrentHashMap<String, DruidDataSource>();
        log.debug("Starting polling of segment table");
        // some databases such as PostgreSQL require auto-commit turned off
        // to stream results back, enabling transactions disables auto-commit
        //
        // setting connection to read-only will allow some database such as MySQL
        // to automatically use read-only transaction mode, further optimizing the query
        final List<DataSegment> segments = connector.inReadOnlyTransaction(new TransactionCallback<List<DataSegment>>() {

            @Override
            public List<DataSegment> inTransaction(Handle handle, TransactionStatus status) throws Exception {
                return handle.createQuery(String.format("SELECT payload FROM %s WHERE used=true", getSegmentsTable())).setFetchSize(connector.getStreamingFetchSize()).map(new ResultSetMapper<DataSegment>() {

                    @Override
                    public DataSegment map(int index, ResultSet r, StatementContext ctx) throws SQLException {
                        try {
                            return DATA_SEGMENT_INTERNER.intern(jsonMapper.readValue(r.getBytes("payload"), DataSegment.class));
                        } catch (IOException e) {
                            log.makeAlert(e, "Failed to read segment from db.");
                            return null;
                        }
                    }
                }).list();
            }
        });
        if (segments == null || segments.isEmpty()) {
            log.warn("No segments found in the database!");
            return;
        }
        final Collection<DataSegment> segmentsFinal = Collections2.filter(segments, Predicates.notNull());
        log.info("Polled and found %,d segments in the database", segments.size());
        for (final DataSegment segment : segmentsFinal) {
            String datasourceName = segment.getDataSource();
            DruidDataSource dataSource = newDataSources.get(datasourceName);
            if (dataSource == null) {
                dataSource = new DruidDataSource(datasourceName, ImmutableMap.of("created", new DateTime().toString()));
                Object shouldBeNull = newDataSources.put(datasourceName, dataSource);
                if (shouldBeNull != null) {
                    log.warn("Just put key[%s] into dataSources and what was there wasn't null!?  It was[%s]", datasourceName, shouldBeNull);
                }
            }
            if (!dataSource.getSegments().contains(segment)) {
                dataSource.addSegment(segment.getIdentifier(), segment);
            }
        }
        synchronized (lock) {
            if (started) {
                dataSources.set(newDataSources);
            }
        }
    } catch (Exception e) {
        log.makeAlert(e, "Problem polling DB.").emit();
    }
}
Also used : SQLException(java.sql.SQLException) TransactionStatus(org.skife.jdbi.v2.TransactionStatus) IOException(java.io.IOException) DruidDataSource(io.druid.client.DruidDataSource) DataSegment(io.druid.timeline.DataSegment) SQLException(java.sql.SQLException) IOException(java.io.IOException) DateTime(org.joda.time.DateTime) Handle(org.skife.jdbi.v2.Handle) StatementContext(org.skife.jdbi.v2.StatementContext) ResultSet(java.sql.ResultSet) List(java.util.List) ArrayList(java.util.ArrayList) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap)

Example 3 with StatementContext

use of org.skife.jdbi.v2.StatementContext in project irontest by zheng-wang.

the class DBTeststepRunner method run.

protected BasicTeststepRun run(Teststep teststep) throws Exception {
    BasicTeststepRun basicTeststepRun = new BasicTeststepRun();
    DBAPIResponse response = new DBAPIResponse();
    String request = (String) teststep.getRequest();
    Endpoint endpoint = teststep.getEndpoint();
    DBI jdbi = new DBI(endpoint.getUrl(), endpoint.getUsername(), getDecryptedEndpointPassword());
    // get SQL statements (trimmed and without comments) and JDBI script object
    List<String> statements = IronTestUtils.getStatements(request);
    sanityCheckTheStatements(statements);
    Handle handle = jdbi.open();
    if (SQLStatementType.isSelectStatement(statements.get(0))) {
        // the request is a select statement
        RetainingColumnOrderResultSetMapper resultSetMapper = new RetainingColumnOrderResultSetMapper();
        // use statements.get(0) instead of the raw request, as Oracle does not support trailing semicolon in select statement
        Query<Map<String, Object>> query = handle.createQuery(statements.get(0)).map(resultSetMapper);
        // obtain columnNames in case the query returns no row
        final List<String> columnNames = new ArrayList<String>();
        query.addStatementCustomizer(new BaseStatementCustomizer() {

            public void afterExecution(PreparedStatement stmt, StatementContext ctx) throws SQLException {
                ResultSetMetaData metaData = stmt.getMetaData();
                for (int i = 1; i <= metaData.getColumnCount(); i++) {
                    columnNames.add(metaData.getColumnLabel(i).toLowerCase());
                }
            }
        });
        // limit the number of returned rows
        List<Map<String, Object>> rows = query.list(5000);
        response.setColumnNames(columnNames);
        response.setRowsJSON(jacksonObjectMapper.writeValueAsString(rows));
    } else {
        // the request is one or more non-select statements
        Script script = handle.createScript(request);
        int[] returnValues = script.execute();
        StringBuilder sb = new StringBuilder();
        for (int i = 0; i < returnValues.length; i++) {
            String statementType = SQLStatementType.getByStatement(statements.get(i)).toString();
            sb.append(returnValues[i]).append(" row(s) ").append(statementType.toLowerCase()).append(statementType.endsWith("E") ? "d" : "ed").append("\n");
            response.setStatementExecutionResults(sb.toString());
        }
    }
    handle.close();
    basicTeststepRun.setResponse(response);
    return basicTeststepRun;
}
Also used : BaseStatementCustomizer(org.skife.jdbi.v2.tweak.BaseStatementCustomizer) SQLException(java.sql.SQLException) ArrayList(java.util.ArrayList) PreparedStatement(java.sql.PreparedStatement) Endpoint(io.irontest.models.endpoint.Endpoint) ResultSetMetaData(java.sql.ResultSetMetaData) Endpoint(io.irontest.models.endpoint.Endpoint) Map(java.util.Map)

Example 4 with StatementContext

use of org.skife.jdbi.v2.StatementContext in project SpinalTap by airbnb.

the class ColumnMapperTest method testColumnMap.

@Test
public void testColumnMap() throws Exception {
    ResultSet resultSet = mock(ResultSet.class);
    StatementContext context = mock(StatementContext.class);
    when(resultSet.getString("COLUMN_NAME")).thenReturn("col1");
    when(resultSet.getString("COLUMN_KEY")).thenReturn("PRI");
    ColumnInfo column = MysqlSchemaUtil.COLUMN_MAPPER.map(0, resultSet, context);
    assertEquals("col1", column.getName());
    assertTrue(column.isPrimaryKey());
    when(resultSet.getString("COLUMN_NAME")).thenReturn("col2");
    when(resultSet.getString("COLUMN_KEY")).thenReturn("");
    column = MysqlSchemaUtil.COLUMN_MAPPER.map(0, resultSet, context);
    assertEquals("col2", column.getName());
    assertFalse(column.isPrimaryKey());
}
Also used : ResultSet(java.sql.ResultSet) StatementContext(org.skife.jdbi.v2.StatementContext) Test(org.junit.Test)

Example 5 with StatementContext

use of org.skife.jdbi.v2.StatementContext in project Rosetta by HubSpot.

the class RosettaMapperFactory method mapperFor.

@Override
@SuppressWarnings({ "rawtypes", "unchecked" })
public ResultSetMapper mapperFor(Class rawType, StatementContext ctx) {
    ObjectMapper objectMapper = RosettaObjectMapperOverride.resolve(ctx);
    final Type genericType;
    if (ctx.getSqlObjectMethod() == null) {
        genericType = rawType;
    } else {
        genericType = determineGenericReturnType(rawType, ctx.getSqlObjectMethod().getGenericReturnType());
    }
    String tableName = SqlTableNameExtractor.extractTableName(ctx.getRewrittenSql());
    final RosettaMapper mapper = new RosettaMapper(genericType, objectMapper, tableName);
    return new ResultSetMapper() {

        @Override
        public Object map(int index, ResultSet r, StatementContext ctx) throws SQLException {
            return mapper.mapRow(r);
        }
    };
}
Also used : ParameterizedType(java.lang.reflect.ParameterizedType) Type(java.lang.reflect.Type) ResultSetMapper(org.skife.jdbi.v2.tweak.ResultSetMapper) ResultSet(java.sql.ResultSet) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) RosettaMapper(com.hubspot.rosetta.RosettaMapper) StatementContext(org.skife.jdbi.v2.StatementContext)

Aggregations

StatementContext (org.skife.jdbi.v2.StatementContext)20 Test (org.junit.Test)13 Timer (com.codahale.metrics.Timer)11 StatementNameStrategy (com.codahale.metrics.jdbi.strategies.StatementNameStrategy)11 SmartNameStrategy (com.codahale.metrics.jdbi.strategies.SmartNameStrategy)9 ResultSet (java.sql.ResultSet)8 SQLException (java.sql.SQLException)5 Handle (org.skife.jdbi.v2.Handle)5 IOException (java.io.IOException)3 ArrayList (java.util.ArrayList)3 List (java.util.List)3 DataSegment (org.apache.druid.timeline.DataSegment)3 Interval (org.joda.time.Interval)3 ShortNameStrategy (com.codahale.metrics.jdbi.strategies.ShortNameStrategy)2 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)2 DataSegment (io.druid.timeline.DataSegment)2 HashSet (java.util.HashSet)2 ConcurrentHashMap (java.util.concurrent.ConcurrentHashMap)2 Nullable (javax.annotation.Nullable)2 TransactionStatus (org.skife.jdbi.v2.TransactionStatus)2