Search in sources :

Example 1 with SQLFirehoseDatabaseConnector

use of org.apache.druid.metadata.SQLFirehoseDatabaseConnector in project druid by druid-io.

the class SqlEntity method openCleanableFile.

/**
 * Executes a SQL query on the specified database and fetches the result into the given file.
 * The result file is deleted if the query execution or the file write fails.
 *
 * @param sql                          The SQL query to be executed
 * @param sqlFirehoseDatabaseConnector The database connector
 * @param objectMapper                 An object mapper, used for deserialization
 * @param foldCase                     A boolean flag used to enable or disabling case sensitivity while handling database column names
 *
 * @return A {@link InputEntity.CleanableFile} object that wraps the file containing the SQL results
 */
public static CleanableFile openCleanableFile(String sql, SQLFirehoseDatabaseConnector sqlFirehoseDatabaseConnector, ObjectMapper objectMapper, boolean foldCase, File tempFile) throws IOException {
    try (FileOutputStream fos = new FileOutputStream(tempFile);
        final JsonGenerator jg = objectMapper.getFactory().createGenerator(fos)) {
        // Execute the sql query and lazily retrieve the results into the file in json format.
        // foldCase is useful to handle differences in case sensitivity behavior across databases.
        sqlFirehoseDatabaseConnector.retryWithHandle((handle) -> {
            ResultIterator<Map<String, Object>> resultIterator = handle.createQuery(sql).map((index, r, ctx) -> {
                Map<String, Object> resultRow = foldCase ? new CaseFoldedMap() : new HashMap<>();
                ResultSetMetaData resultMetadata;
                try {
                    resultMetadata = r.getMetaData();
                } catch (SQLException e) {
                    throw new ResultSetException("Unable to obtain metadata from result set", e, ctx);
                }
                try {
                    for (int i = 1; i <= resultMetadata.getColumnCount(); i++) {
                        String key = resultMetadata.getColumnName(i);
                        String alias = resultMetadata.getColumnLabel(i);
                        Object value = r.getObject(i);
                        resultRow.put(alias != null ? alias : key, value);
                    }
                } catch (SQLException e) {
                    throw new ResultSetException("Unable to access specific metadata from " + "result set metadata", e, ctx);
                }
                return resultRow;
            }).iterator();
            jg.writeStartArray();
            while (resultIterator.hasNext()) {
                jg.writeObject(resultIterator.next());
            }
            jg.writeEndArray();
            jg.close();
            return null;
        }, (exception) -> sqlFirehoseDatabaseConnector.isTransientException(exception) && !(SQLMetadataStorageActionHandler.isStatementException(exception)));
        return new CleanableFile() {

            @Override
            public File file() {
                return tempFile;
            }

            @Override
            public void close() {
                if (!tempFile.delete()) {
                    LOG.warn("Failed to remove file[%s]", tempFile.getAbsolutePath());
                }
            }
        };
    } catch (Exception e) {
        if (!tempFile.delete()) {
            LOG.warn("Failed to remove file[%s]", tempFile.getAbsolutePath());
        }
        throw new IOException(e);
    }
}
Also used : Logger(org.apache.druid.java.util.common.logger.Logger) SQLFirehoseDatabaseConnector(org.apache.druid.metadata.SQLFirehoseDatabaseConnector) JsonGenerator(com.fasterxml.jackson.core.JsonGenerator) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) StringUtils(org.apache.druid.java.util.common.StringUtils) FileOutputStream(java.io.FileOutputStream) IOException(java.io.IOException) HashMap(java.util.HashMap) SQLMetadataStorageActionHandler(org.apache.druid.metadata.SQLMetadataStorageActionHandler) File(java.io.File) SQLException(java.sql.SQLException) Map(java.util.Map) ResultIterator(org.skife.jdbi.v2.ResultIterator) Preconditions(com.google.common.base.Preconditions) ResultSetException(org.skife.jdbi.v2.exceptions.ResultSetException) URI(java.net.URI) InputEntity(org.apache.druid.data.input.InputEntity) Nullable(javax.annotation.Nullable) ResultSetMetaData(java.sql.ResultSetMetaData) InputStream(java.io.InputStream) HashMap(java.util.HashMap) SQLException(java.sql.SQLException) IOException(java.io.IOException) IOException(java.io.IOException) SQLException(java.sql.SQLException) ResultSetException(org.skife.jdbi.v2.exceptions.ResultSetException) ResultSetMetaData(java.sql.ResultSetMetaData) ResultSetException(org.skife.jdbi.v2.exceptions.ResultSetException) FileOutputStream(java.io.FileOutputStream) JsonGenerator(com.fasterxml.jackson.core.JsonGenerator) HashMap(java.util.HashMap) Map(java.util.Map)

Aggregations

JsonGenerator (com.fasterxml.jackson.core.JsonGenerator)1 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)1 Preconditions (com.google.common.base.Preconditions)1 File (java.io.File)1 FileOutputStream (java.io.FileOutputStream)1 IOException (java.io.IOException)1 InputStream (java.io.InputStream)1 URI (java.net.URI)1 ResultSetMetaData (java.sql.ResultSetMetaData)1 SQLException (java.sql.SQLException)1 HashMap (java.util.HashMap)1 Map (java.util.Map)1 Nullable (javax.annotation.Nullable)1 InputEntity (org.apache.druid.data.input.InputEntity)1 StringUtils (org.apache.druid.java.util.common.StringUtils)1 Logger (org.apache.druid.java.util.common.logger.Logger)1 SQLFirehoseDatabaseConnector (org.apache.druid.metadata.SQLFirehoseDatabaseConnector)1 SQLMetadataStorageActionHandler (org.apache.druid.metadata.SQLMetadataStorageActionHandler)1 ResultIterator (org.skife.jdbi.v2.ResultIterator)1 ResultSetException (org.skife.jdbi.v2.exceptions.ResultSetException)1