use of org.jkiss.dbeaver.model.impl.AbstractExecutionSource in project dbeaver by serge-rider.
the class DatabaseTransferProducer method transferData.
@Override
public void transferData(DBRProgressMonitor monitor, IDataTransferConsumer consumer, DatabaseProducerSettings settings) throws DBException {
String contextTask = CoreMessages.data_transfer_wizard_job_task_export;
DBPDataSource dataSource = getSourceObject().getDataSource();
assert (dataSource != null);
boolean newConnection = settings.isOpenNewConnections();
DBCExecutionContext context = newConnection ? dataSource.openIsolatedContext(monitor, "Data transfer producer") : dataSource.getDefaultContext(false);
try (DBCSession session = context.openSession(monitor, DBCExecutionPurpose.UTIL, contextTask)) {
try {
AbstractExecutionSource transferSource = new AbstractExecutionSource(dataContainer, context, consumer);
session.enableLogging(false);
if (newConnection) {
// other complex structures only in transactional mode
try {
DBCTransactionManager txnManager = DBUtils.getTransactionManager(context);
if (txnManager != null) {
txnManager.setAutoCommit(monitor, false);
}
} catch (DBCException e) {
log.warn("Can't change auto-commit", e);
}
}
long totalRows = 0;
if (settings.isQueryRowCount() && (dataContainer.getSupportedFeatures() & DBSDataContainer.DATA_COUNT) != 0) {
monitor.beginTask(CoreMessages.data_transfer_wizard_job_task_retrieve, 1);
try {
totalRows = dataContainer.countData(transferSource, session, dataFilter);
} catch (Throwable e) {
log.warn("Can't retrieve row count from '" + dataContainer.getName() + "'", e);
try {
DBCTransactionManager txnManager = DBUtils.getTransactionManager(session.getExecutionContext());
if (txnManager != null && !txnManager.isAutoCommit()) {
txnManager.rollback(session, null);
}
} catch (Throwable e1) {
log.warn("Error rolling back transaction", e1);
}
} finally {
monitor.done();
}
}
monitor.beginTask(CoreMessages.data_transfer_wizard_job_task_export_table_data, (int) totalRows);
try {
// Perform export
if (settings.getExtractType() == DatabaseProducerSettings.ExtractType.SINGLE_QUERY) {
// Just do it in single query
dataContainer.readData(transferSource, session, consumer, dataFilter, -1, -1, DBSDataContainer.FLAG_NONE);
} else {
// Read all data by segments
long offset = 0;
int segmentSize = settings.getSegmentSize();
for (; ; ) {
DBCStatistics statistics = dataContainer.readData(transferSource, session, consumer, dataFilter, offset, segmentSize, DBSDataContainer.FLAG_NONE);
if (statistics == null || statistics.getRowsFetched() < segmentSize) {
// Done
break;
}
offset += statistics.getRowsFetched();
}
}
} finally {
monitor.done();
}
//dataContainer.readData(context, consumer, dataFilter, -1, -1);
} finally {
if (newConnection) {
DBCTransactionManager txnManager = DBUtils.getTransactionManager(context);
if (txnManager != null) {
try {
txnManager.commit(session);
} catch (DBCException e) {
log.error("Can't finish transaction in data producer connection", e);
}
}
}
if (newConnection) {
context.close();
}
}
}
}
use of org.jkiss.dbeaver.model.impl.AbstractExecutionSource in project dbeaver by serge-rider.
the class SQLQueryJob method executeSingleQuery.
private boolean executeSingleQuery(@NotNull DBCSession session, @NotNull SQLQuery sqlQuery, final boolean fireEvents) {
lastError = null;
final DBCExecutionContext executionContext = getExecutionContext();
final DBPDataSource dataSource = executionContext.getDataSource();
final SQLQuery originalQuery = sqlQuery;
long startTime = System.currentTimeMillis();
boolean startQueryAlerted = false;
if (!prepareStatementParameters(sqlQuery)) {
return false;
}
// Modify query (filters + parameters)
if (dataFilter != null && dataFilter.hasFilters() && dataSource instanceof SQLDataSource) {
String filteredQueryText = ((SQLDataSource) dataSource).getSQLDialect().addFiltersToQuery(dataSource, originalQuery.getQuery(), dataFilter);
sqlQuery = new SQLQuery(executionContext.getDataSource(), filteredQueryText, sqlQuery);
}
final SQLQueryResult curResult = new SQLQueryResult(sqlQuery);
if (rsOffset > 0) {
curResult.setRowOffset(rsOffset);
}
try {
// Prepare statement
closeStatement();
// Check and invalidate connection
if (!connectionInvalidated && dataSource.getContainer().getPreferenceStore().getBoolean(DBeaverPreferences.STATEMENT_INVALIDATE_BEFORE_EXECUTE)) {
executionContext.invalidateContext(session.getProgressMonitor());
connectionInvalidated = true;
}
statistics.setQueryText(originalQuery.getQuery());
// Notify query start
if (fireEvents && listener != null) {
// Notify query start
try {
listener.onStartQuery(session, sqlQuery);
} catch (Exception e) {
log.error(e);
}
startQueryAlerted = true;
}
startTime = System.currentTimeMillis();
DBCExecutionSource source = new AbstractExecutionSource(dataContainer, executionContext, partSite.getPart(), sqlQuery);
final DBCStatement dbcStatement = DBUtils.makeStatement(source, session, DBCStatementType.SCRIPT, sqlQuery, rsOffset, rsMaxRows);
curStatement = dbcStatement;
int statementTimeout = getDataSourceContainer().getPreferenceStore().getInt(DBeaverPreferences.STATEMENT_TIMEOUT);
if (statementTimeout > 0) {
try {
dbcStatement.setStatementTimeout(statementTimeout);
} catch (Throwable e) {
log.debug("Can't set statement timeout:" + e.getMessage());
}
}
// Execute statement
try {
boolean hasResultSet = dbcStatement.executeStatement();
curResult.setHasResultSet(hasResultSet);
statistics.addExecuteTime(System.currentTimeMillis() - startTime);
statistics.addStatementsCount();
long updateCount = -1;
while (hasResultSet || resultSetNumber == 0 || updateCount >= 0) {
// Fetch data only if we have to fetch all results or if it is rs requested
if (fetchResultSetNumber < 0 || fetchResultSetNumber == resultSetNumber) {
if (hasResultSet && fetchResultSets) {
DBDDataReceiver dataReceiver = resultsConsumer.getDataReceiver(sqlQuery, resultSetNumber);
if (dataReceiver != null) {
hasResultSet = fetchQueryData(session, dbcStatement.openResultSet(), curResult, dataReceiver, true);
}
}
}
if (!hasResultSet) {
try {
updateCount = dbcStatement.getUpdateRowCount();
if (updateCount >= 0) {
curResult.setUpdateCount(updateCount);
statistics.addRowsUpdated(updateCount);
}
} catch (DBCException e) {
// In some cases we can't read update count
// This is bad but we can live with it
// Just print a warning
log.warn("Can't obtain update count", e);
}
}
if (hasResultSet && fetchResultSets) {
resultSetNumber++;
fetchResultSetNumber = resultSetNumber;
}
if (!hasResultSet && updateCount < 0) {
// Nothing else to fetch
break;
}
if (dataSource.getInfo().supportsMultipleResults()) {
hasResultSet = dbcStatement.nextResults();
updateCount = hasResultSet ? -1 : 0;
} else {
break;
}
}
try {
curResult.setWarnings(dbcStatement.getStatementWarnings());
} catch (Throwable e) {
log.warn("Can't read execution warnings", e);
}
} finally {
//monitor.subTask("Close query");
if (!keepStatementOpen()) {
closeStatement();
}
}
} catch (Throwable ex) {
if (!(ex instanceof DBException)) {
log.error("Unexpected error while processing SQL", ex);
}
curResult.setError(ex);
lastError = ex;
} finally {
curResult.setQueryTime(System.currentTimeMillis() - startTime);
if (fireEvents && listener != null && startQueryAlerted) {
// Notify query end
try {
listener.onEndQuery(session, curResult);
} catch (Exception e) {
log.error(e);
}
}
}
if (curResult.getError() != null && errorHandling != SQLScriptErrorHandling.IGNORE) {
return false;
}
// Success
lastGoodQuery = originalQuery;
return true;
}
use of org.jkiss.dbeaver.model.impl.AbstractExecutionSource in project dbeaver by dbeaver.
the class MockDataExecuteWizard method executeProcess.
@Override
public boolean executeProcess(DBRProgressMonitor monitor, DBSDataManipulator dataManipulator) throws IOException {
DBCExecutionContext context = dataManipulator.getDataSource().getDefaultContext(true);
try (DBCSession session = context.openSession(monitor, DBCExecutionPurpose.USER, MockDataMessages.tools_mockdata_generate_data_task)) {
DBCTransactionManager txnManager = DBUtils.getTransactionManager(session.getExecutionContext());
boolean autoCommit;
try {
autoCommit = txnManager == null || txnManager.isAutoCommit();
} catch (DBCException e) {
log.error(e);
autoCommit = true;
}
AbstractExecutionSource executionSource = new AbstractExecutionSource(dataManipulator, session.getExecutionContext(), this);
boolean success = true;
monitor.beginTask("Generate Mock Data", 3);
if (mockDataSettings.isRemoveOldData()) {
logPage.appendLog("Removing old data from the '" + dataManipulator.getName() + "'.\n");
monitor.subTask("Cleanup old data");
DBCStatistics deleteStats = new DBCStatistics();
try {
// TODO: truncate is much faster than delete
try (DBSDataManipulator.ExecuteBatch batch = dataManipulator.deleteData(session, new DBSAttributeBase[] {}, executionSource)) {
batch.add(new Object[] {});
deleteStats.accumulate(batch.execute(session));
}
if (txnManager != null && !autoCommit) {
txnManager.commit(session);
}
} catch (Exception e) {
success = false;
String message = " Error removing the data: " + e.getMessage();
log.error(message, e);
logPage.appendLog(message + "\n\n", true);
}
logPage.appendLog(" Rows updated: " + deleteStats.getRowsUpdated() + "\n");
logPage.appendLog(" Duration: " + deleteStats.getExecuteTime() + "ms\n\n");
} else {
logPage.appendLog("Old data isn't removed.\n\n");
}
if (!success) {
return true;
}
try {
monitor.subTask("Insert data");
logPage.appendLog("Inserting mock data into the '" + dataManipulator.getName() + "'.\n");
DBCStatistics insertStats = new DBCStatistics();
// build and init the generators
generators.clear();
DBSEntity dbsEntity = (DBSEntity) dataManipulator;
Collection<? extends DBSAttributeBase> attributes = DBUtils.getRealAttributes(dbsEntity.getAttributes(monitor));
for (DBSAttributeBase attribute : attributes) {
MockGeneratorDescriptor generatorDescriptor = mockDataSettings.getGeneratorDescriptor(mockDataSettings.getAttributeGeneratorProperties(attribute).getSelectedGeneratorId());
if (generatorDescriptor != null) {
MockValueGenerator generator = generatorDescriptor.createGenerator();
MockDataSettings.AttributeGeneratorProperties generatorPropertySource = this.mockDataSettings.getAttributeGeneratorProperties(attribute);
String selectedGenerator = generatorPropertySource.getSelectedGeneratorId();
Map<Object, Object> generatorProperties = generatorPropertySource.getGeneratorPropertySource(selectedGenerator).getPropertiesWithDefaults();
generator.init(dataManipulator, attribute, generatorProperties);
generators.put(attribute.getName(), generator);
}
}
monitor.done();
long rowsNumber = mockDataSettings.getRowsNumber();
long quotient = rowsNumber / BATCH_SIZE;
long modulo = rowsNumber % BATCH_SIZE;
if (modulo > 0) {
quotient++;
}
int counter = 0;
monitor.beginTask("Insert data", (int) rowsNumber);
// generate and insert the data
session.enableLogging(false);
DBSDataManipulator.ExecuteBatch batch = null;
for (int q = 0; q < quotient; q++) {
if (monitor.isCanceled()) {
break;
}
if (counter > 0) {
if (txnManager != null && !autoCommit) {
txnManager.commit(session);
}
monitor.subTask(String.valueOf(counter) + " rows inserted");
monitor.worked(BATCH_SIZE);
}
try {
for (int i = 0; (i < BATCH_SIZE && counter < rowsNumber); i++) {
if (monitor.isCanceled()) {
break;
}
List<DBDAttributeValue> attributeValues = new ArrayList<>();
try {
for (DBSAttributeBase attribute : attributes) {
MockValueGenerator generator = generators.get(attribute.getName());
if (generator != null) {
// ((AbstractMockValueGenerator) generator).checkUnique(monitor);
Object value = generator.generateValue(monitor);
attributeValues.add(new DBDAttributeValue(attribute, value));
}
}
} catch (DBException e) {
processGeneratorException(e);
return true;
}
if (batch == null) {
batch = dataManipulator.insertData(session, DBDAttributeValue.getAttributes(attributeValues), null, executionSource);
}
if (counter++ < rowsNumber) {
batch.add(DBDAttributeValue.getValues(attributeValues));
}
}
if (batch != null) {
insertStats.accumulate(batch.execute(session));
}
} catch (Exception e) {
processGeneratorException(e);
if (e instanceof DBException) {
throw e;
}
} finally {
if (batch != null) {
batch.close();
batch = null;
}
}
}
if (txnManager != null && !autoCommit) {
txnManager.commit(session);
}
logPage.appendLog(" Rows updated: " + insertStats.getRowsUpdated() + "\n");
logPage.appendLog(" Duration: " + insertStats.getExecuteTime() + "ms\n\n");
} catch (DBException e) {
String message = " Error inserting mock data: " + e.getMessage();
log.error(message, e);
logPage.appendLog(message + "\n\n", true);
}
} finally {
monitor.done();
}
return true;
}
use of org.jkiss.dbeaver.model.impl.AbstractExecutionSource in project dbeaver by serge-rider.
the class SearchDataQuery method findRows.
private DBCStatistics findRows(@NotNull DBCSession session, @NotNull DBSDataContainer dataContainer, @NotNull TestDataReceiver dataReceiver) throws DBCException {
DBSEntity entity;
if (dataContainer instanceof DBSEntity) {
entity = (DBSEntity) dataContainer;
} else {
log.warn("Data container " + dataContainer + " isn't entity");
return null;
}
try {
List<DBDAttributeConstraint> constraints = new ArrayList<>();
for (DBSEntityAttribute attribute : CommonUtils.safeCollection(entity.getAttributes(session.getProgressMonitor()))) {
if (params.fastSearch) {
if (DBUtils.findAttributeIndex(session.getProgressMonitor(), attribute) == null) {
continue;
}
}
if (DBUtils.isPseudoAttribute(attribute) || DBUtils.isHiddenObject(attribute)) {
continue;
}
DBCLogicalOperator[] supportedOperators = DBUtils.getAttributeOperators(attribute);
DBCLogicalOperator operator;
Object value;
switch(attribute.getDataKind()) {
case BOOLEAN:
continue;
case NUMERIC:
if (!params.searchNumbers) {
continue;
}
if (!ArrayUtils.contains(supportedOperators, DBCLogicalOperator.EQUALS)) {
continue;
}
operator = DBCLogicalOperator.EQUALS;
try {
value = Integer.valueOf(params.searchString);
} catch (NumberFormatException e) {
try {
value = Long.valueOf(params.searchString);
} catch (NumberFormatException e1) {
try {
value = Double.valueOf(params.searchString);
} catch (NumberFormatException e2) {
try {
value = new BigDecimal(params.searchString);
} catch (Exception e3) {
// Not a number
continue;
}
}
}
}
break;
case CONTENT:
case BINARY:
if (!params.searchLOBs) {
continue;
}
case STRING:
if (!params.isCaseSensitive() && ArrayUtils.contains(supportedOperators, DBCLogicalOperator.ILIKE)) {
operator = DBCLogicalOperator.ILIKE;
value = "%" + params.searchString + "%";
} else if (ArrayUtils.contains(supportedOperators, DBCLogicalOperator.LIKE)) {
operator = DBCLogicalOperator.LIKE;
value = "%" + params.searchString + "%";
} else if (ArrayUtils.contains(supportedOperators, DBCLogicalOperator.EQUALS)) {
operator = DBCLogicalOperator.EQUALS;
value = params.searchString;
} else {
continue;
}
break;
default:
{
// On success search by exact match
if (!ArrayUtils.contains(supportedOperators, DBCLogicalOperator.EQUALS)) {
continue;
}
String typeName = attribute.getTypeName();
if (typeName.equals(DBConstants.TYPE_NAME_UUID) || typeName.equals(DBConstants.TYPE_NAME_UUID2)) {
try {
UUID uuid = UUID.fromString(params.searchString);
operator = DBCLogicalOperator.EQUALS;
value = uuid.toString();
} catch (Exception e) {
// No a UUID
continue;
}
} else {
continue;
}
}
}
DBDAttributeConstraint constraint = new DBDAttributeConstraint(attribute, constraints.size());
constraint.setOperator(operator);
constraint.setValue(value);
constraint.setVisible(true);
constraints.add(constraint);
}
if (constraints.isEmpty()) {
return null;
}
dataReceiver.filter = new DBDDataFilter(constraints);
dataReceiver.filter.setAnyConstraint(true);
DBCExecutionSource searchSource = new AbstractExecutionSource(dataContainer, session.getExecutionContext(), this);
return dataContainer.readData(searchSource, session, dataReceiver, dataReceiver.filter, -1, -1, 0, 0);
} catch (DBException e) {
throw new DBCException("Error finding rows", e);
}
}
use of org.jkiss.dbeaver.model.impl.AbstractExecutionSource in project dbeaver by serge-rider.
the class DatabaseMappingContainer method readAttributes.
private void readAttributes(DBRProgressMonitor monitor) throws DBException {
if (source instanceof DBSEntity && !(source instanceof DBSDocumentContainer)) {
for (DBSEntityAttribute attr : CommonUtils.safeCollection(((DBSEntity) source).getAttributes(monitor))) {
if (DBUtils.isHiddenObject(attr)) {
continue;
}
addAttributeMapping(monitor, attr);
}
} else {
// Seems to be a dynamic query. Execute it to get metadata
DBPDataSource dataSource = source.getDataSource();
assert (dataSource != null);
DBCExecutionContext context;
if (source instanceof DBPContextProvider) {
context = ((DBPContextProvider) source).getExecutionContext();
} else {
context = DBUtils.getDefaultContext(source, false);
}
if (context == null) {
throw new DBCException("No execution context");
}
DBExecUtils.tryExecuteRecover(monitor, context.getDataSource(), monitor1 -> {
try (DBCSession session = context.openSession(monitor1, DBCExecutionPurpose.META, "Read query meta data")) {
MetadataReceiver receiver = new MetadataReceiver();
try {
source.readData(new AbstractExecutionSource(source, session.getExecutionContext(), this), session, receiver, null, 0, 1, DBSDataContainer.FLAG_NONE, 1);
for (DBDAttributeBinding attr : receiver.attributes) {
if (DBUtils.isHiddenObject(attr)) {
continue;
}
addAttributeMapping(monitor1, attr);
}
} catch (Exception e) {
throw new InvocationTargetException(e);
}
}
});
}
}
Aggregations