use of com.torodb.core.exceptions.SystemException in project torodb by torodb.
the class OfficialPostgreSqlDriver method getConfiguredDataSource.
@Override
public DataSource getConfiguredDataSource(PostgreSqlBackendConfiguration configuration, String poolName) {
PGSimpleDataSource dataSource = new PGSimpleDataSource();
dataSource.setUser(configuration.getUsername());
dataSource.setPassword(configuration.getPassword());
dataSource.setServerName(configuration.getDbHost());
dataSource.setPortNumber(configuration.getDbPort());
dataSource.setDatabaseName(configuration.getDbName());
dataSource.setApplicationName("torodb-" + poolName);
if (JDBC_LOGGER.isTraceEnabled()) {
dataSource.setLogLevel(Driver.DEBUG);
dataSource.setLogWriter(LOGGER_WRITER);
}
try (Connection conn = dataSource.getConnection();
Statement stat = conn.createStatement();
ResultSet rs = stat.executeQuery("SELECT 1")) {
rs.next();
} catch (SQLException ex) {
throw new SystemException(ex.getLocalizedMessage());
}
return dataSource;
}
use of com.torodb.core.exceptions.SystemException in project torodb by torodb.
the class AbstractSchemaUpdater method executeSql.
@SuppressFBWarnings(value = "UI_INHERITANCE_UNSAFE_GETRESOURCE", justification = "We want to read resources from the subclass")
protected void executeSql(DSLContext dsl, String resourcePath, SqlHelper sqlHelper) {
try (InputStream resourceAsStream = getClass().getResourceAsStream(resourcePath)) {
if (resourceAsStream == null) {
throw new SystemException("Resource '" + resourcePath + "' does not exist");
}
String statementAsString = CharStreams.toString(new BufferedReader(new InputStreamReader(resourceAsStream, Charset.forName("UTF-8"))));
sqlHelper.executeStatement(dsl, statementAsString, Context.UNKNOWN);
} catch (IOException ex) {
throw new ToroRuntimeException(ex);
}
}
use of com.torodb.core.exceptions.SystemException in project torodb by torodb.
the class PostgreSqlWriteInterface method insertDocPartData.
@Override
public void insertDocPartData(DSLContext dsl, String schemaName, DocPartData docPartData) throws UserException {
metrics.insertRows.mark(docPartData.rowCount());
metrics.insertFields.mark(docPartData.rowCount() * (docPartData.fieldColumnsCount() + docPartData.scalarColumnsCount()));
if (docPartData.rowCount() == 0) {
return;
}
try (Timer.Context ctx = metrics.insertDocPartDataTimer.time()) {
int maxCappedSize = 10;
int cappedSize = Math.min(docPartData.rowCount(), maxCappedSize);
if (cappedSize < maxCappedSize) {
//there are not enough elements on the insert => fallback
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("The insert window is not big enough to use copy (the limit is {}, the real " + "size is {}).", maxCappedSize, cappedSize);
}
metrics.insertDefault.mark();
super.insertDocPartData(dsl, schemaName, docPartData);
} else {
Connection connection = dsl.configuration().connectionProvider().acquire();
try {
if (!connection.isWrapperFor(PGConnection.class)) {
LOGGER.warn("It was impossible to use the PostgreSQL way to " + "insert documents. Inserting using the standard " + "implementation");
metrics.insertDefault.mark();
super.insertDocPartData(dsl, schemaName, docPartData);
} else {
try {
metrics.insertCopy.mark();
copyInsertDocPartData(connection.unwrap(PGConnection.class), schemaName, docPartData);
} catch (DataAccessException ex) {
throw errorHandler.handleUserException(Context.INSERT, ex);
} catch (SQLException ex) {
throw errorHandler.handleUserException(Context.INSERT, ex);
} catch (IOException ex) {
if (ex instanceof EOFException && ex.getMessage() == null) {
LOGGER.debug(ex);
ex = new EOFException("End of file while COPYing data");
}
throw new SystemException(ex);
}
}
} catch (SQLException ex) {
throw errorHandler.handleException(Context.INSERT, ex);
} finally {
dsl.configuration().connectionProvider().release(connection);
}
}
}
}
Aggregations