use of org.xbib.elasticsearch.common.util.SQLCommand in project elasticsearch-jdbc by jprante.
the class StandardSource method fetch.
/**
* Fetch, issue SQL statements.
*
* @throws SQLException when SQL execution gives an error
* @throws IOException when input/output error occurs
*/
@Override
public void fetch() throws SQLException, IOException {
logger.debug("fetching, {} SQL commands", getStatements().size());
DateTime dateTime = new DateTime();
try {
for (SQLCommand command : getStatements()) {
try {
if (command.isCallable()) {
logger.debug("{} executing callable SQL: {}", this, command);
executeCallable(command);
} else if (!command.getParameters().isEmpty()) {
logger.debug("{} executing SQL with params: {}", this, command);
executeWithParameter(command);
} else {
logger.debug("{} executing SQL without params: {}", this, command);
execute(command);
}
if (sourceMetric != null) {
sourceMetric.getSucceeded().inc();
sourceMetric.setLastExecutionStart(dateTime);
sourceMetric.setLastExecutionEnd(new DateTime());
}
} catch (SQLRecoverableException e) {
long millis = getMaxRetryWait().getMillis();
logger.warn("retrying after " + millis / 1000 + " seconds, got exception ", e);
Thread.sleep(getMaxRetryWait().getMillis());
if (command.isCallable()) {
logger.debug("retrying, executing callable SQL: {}", command);
executeCallable(command);
} else if (!command.getParameters().isEmpty()) {
logger.debug("retrying, executing SQL with params: {}", command);
executeWithParameter(command);
} else {
logger.debug("retrying, executing SQL without params: {}", command);
execute(command);
}
if (sourceMetric != null) {
sourceMetric.getSucceeded().inc();
sourceMetric.setLastExecutionStart(dateTime);
sourceMetric.setLastExecutionEnd(new DateTime());
}
}
}
} catch (Exception e) {
if (sourceMetric != null) {
sourceMetric.getFailed().inc();
sourceMetric.setLastExecutionStart(dateTime);
sourceMetric.setLastExecutionEnd(new DateTime());
}
throw new IOException(e);
} finally {
if (sourceMetric != null) {
sourceMetric.incCounter();
}
}
}
use of org.xbib.elasticsearch.common.util.SQLCommand in project elasticsearch-jdbc by jprante.
the class ColumnSource method fetch.
@Override
public void fetch() throws SQLException, IOException {
for (SQLCommand command : getStatements()) {
Connection connection = getConnectionForReading();
if (connection != null) {
List<OpInfo> opInfos = getOpInfos(connection);
Timestamp lastRunTimestamp = getLastRunTimestamp();
logger.debug("lastRunTimestamp={}", lastRunTimestamp);
for (OpInfo opInfo : opInfos) {
logger.debug("opinfo={}", opInfo.toString());
fetch(connection, command, opInfo, lastRunTimestamp);
}
}
}
}
use of org.xbib.elasticsearch.common.util.SQLCommand in project elasticsearch-jdbc by jprante.
the class StandardContext method prepareContext.
@SuppressWarnings("unchecked")
protected void prepareContext(S source, Sink sink) throws IOException {
Map<String, Object> params = settings.getAsStructuredMap();
List<SQLCommand> sql = SQLCommand.parse(params);
String rounding = XContentMapValues.nodeStringValue(params.get("rounding"), null);
int scale = XContentMapValues.nodeIntegerValue(params.get("scale"), 2);
boolean autocommit = XContentMapValues.nodeBooleanValue(params.get("autocommit"), false);
int fetchsize = 10;
String fetchSizeStr = XContentMapValues.nodeStringValue(params.get("fetchsize"), null);
if ("min".equals(fetchSizeStr)) {
// for MySQL streaming mode
fetchsize = Integer.MIN_VALUE;
} else if (fetchSizeStr != null) {
try {
fetchsize = Integer.parseInt(fetchSizeStr);
} catch (Exception e) {
// ignore unparseable
}
} else {
// if MySQL, enable streaming mode hack by default
String url = XContentMapValues.nodeStringValue(params.get("url"), null);
if (url != null && url.startsWith("jdbc:mysql")) {
// for MySQL streaming mode
fetchsize = Integer.MIN_VALUE;
}
}
int maxrows = XContentMapValues.nodeIntegerValue(params.get("max_rows"), 0);
int maxretries = XContentMapValues.nodeIntegerValue(params.get("max_retries"), 3);
TimeValue maxretrywait = XContentMapValues.nodeTimeValue(params.get("max_retries_wait"), TimeValue.timeValueSeconds(30));
String resultSetType = XContentMapValues.nodeStringValue(params.get("resultset_type"), "TYPE_FORWARD_ONLY");
String resultSetConcurrency = XContentMapValues.nodeStringValue(params.get("resultset_concurrency"), "CONCUR_UPDATABLE");
boolean shouldIgnoreNull = XContentMapValues.nodeBooleanValue(params.get("ignore_null_values"), false);
boolean shouldDetectGeo = XContentMapValues.nodeBooleanValue(params.get("detect_geo"), true);
boolean shouldDetectJson = XContentMapValues.nodeBooleanValue(params.get("detect_json"), true);
boolean shouldPrepareDatabaseMetadata = XContentMapValues.nodeBooleanValue(params.get("prepare_database_metadata"), false);
boolean shouldPrepareResultSetMetadata = XContentMapValues.nodeBooleanValue(params.get("prepare_resultset_metadata"), false);
Map<String, Object> columnNameMap = (Map<String, Object>) params.get("column_name_map");
int queryTimeout = XContentMapValues.nodeIntegerValue(params.get("query_timeout"), 1800);
Map<String, Object> connectionProperties = (Map<String, Object>) params.get("connection_properties");
boolean shouldTreatBinaryAsString = XContentMapValues.nodeBooleanValue(params.get("treat_binary_as_string"), false);
source.setRounding(rounding).setScale(scale).setStatements(sql).setAutoCommit(autocommit).setMaxRows(maxrows).setFetchSize(fetchsize).setRetries(maxretries).setMaxRetryWait(maxretrywait).setResultSetType(resultSetType).setResultSetConcurrency(resultSetConcurrency).shouldIgnoreNull(shouldIgnoreNull).shouldDetectGeo(shouldDetectGeo).shouldDetectJson(shouldDetectJson).shouldPrepareDatabaseMetadata(shouldPrepareDatabaseMetadata).shouldPrepareResultSetMetadata(shouldPrepareResultSetMetadata).setColumnNameMap(columnNameMap).setQueryTimeout(queryTimeout).setConnectionProperties(connectionProperties).shouldTreatBinaryAsString(shouldTreatBinaryAsString);
setSource(source);
setSink(sink);
}
Aggregations