use of org.jkiss.dbeaver.model.DBPDataSource in project dbeaver by serge-rider.
the class ERDEditorStandalone method getExecutionContext.
@Override
public DBCExecutionContext getExecutionContext() {
for (Object part : getViewer().getSelectedEditParts()) {
EditPart editPart = (EditPart) part;
if (editPart.getModel() instanceof ERDObject) {
final ERDObject model = (ERDObject) editPart.getModel();
Object object = model.getObject();
if (object instanceof DBSObject) {
DBSObject dbObject = (DBSObject) object;
DBPDataSource dataSource = dbObject.getDataSource();
return dataSource.getDefaultContext(true);
}
}
}
return null;
}
use of org.jkiss.dbeaver.model.DBPDataSource in project dbeaver by serge-rider.
the class SQLQueryJob method executeSingleQuery.
private boolean executeSingleQuery(@NotNull DBCSession session, @NotNull SQLQuery sqlQuery, final boolean fireEvents) {
lastError = null;
final DBCExecutionContext executionContext = getExecutionContext();
final DBPDataSource dataSource = executionContext.getDataSource();
final SQLQuery originalQuery = sqlQuery;
long startTime = System.currentTimeMillis();
boolean startQueryAlerted = false;
if (!prepareStatementParameters(sqlQuery)) {
return false;
}
// Modify query (filters + parameters)
if (dataFilter != null && dataFilter.hasFilters() && dataSource instanceof SQLDataSource) {
String filteredQueryText = ((SQLDataSource) dataSource).getSQLDialect().addFiltersToQuery(dataSource, originalQuery.getQuery(), dataFilter);
sqlQuery = new SQLQuery(executionContext.getDataSource(), filteredQueryText, sqlQuery);
}
final SQLQueryResult curResult = new SQLQueryResult(sqlQuery);
if (rsOffset > 0) {
curResult.setRowOffset(rsOffset);
}
try {
// Prepare statement
closeStatement();
// Check and invalidate connection
if (!connectionInvalidated && dataSource.getContainer().getPreferenceStore().getBoolean(DBeaverPreferences.STATEMENT_INVALIDATE_BEFORE_EXECUTE)) {
executionContext.invalidateContext(session.getProgressMonitor());
connectionInvalidated = true;
}
statistics.setQueryText(originalQuery.getQuery());
// Notify query start
if (fireEvents && listener != null) {
// Notify query start
try {
listener.onStartQuery(session, sqlQuery);
} catch (Exception e) {
log.error(e);
}
startQueryAlerted = true;
}
startTime = System.currentTimeMillis();
DBCExecutionSource source = new AbstractExecutionSource(dataContainer, executionContext, partSite.getPart(), sqlQuery);
final DBCStatement dbcStatement = DBUtils.makeStatement(source, session, DBCStatementType.SCRIPT, sqlQuery, rsOffset, rsMaxRows);
curStatement = dbcStatement;
int statementTimeout = getDataSourceContainer().getPreferenceStore().getInt(DBeaverPreferences.STATEMENT_TIMEOUT);
if (statementTimeout > 0) {
try {
dbcStatement.setStatementTimeout(statementTimeout);
} catch (Throwable e) {
log.debug("Can't set statement timeout:" + e.getMessage());
}
}
// Execute statement
try {
boolean hasResultSet = dbcStatement.executeStatement();
curResult.setHasResultSet(hasResultSet);
statistics.addExecuteTime(System.currentTimeMillis() - startTime);
statistics.addStatementsCount();
long updateCount = -1;
while (hasResultSet || resultSetNumber == 0 || updateCount >= 0) {
// Fetch data only if we have to fetch all results or if it is rs requested
if (fetchResultSetNumber < 0 || fetchResultSetNumber == resultSetNumber) {
if (hasResultSet && fetchResultSets) {
DBDDataReceiver dataReceiver = resultsConsumer.getDataReceiver(sqlQuery, resultSetNumber);
if (dataReceiver != null) {
hasResultSet = fetchQueryData(session, dbcStatement.openResultSet(), curResult, dataReceiver, true);
}
}
}
if (!hasResultSet) {
try {
updateCount = dbcStatement.getUpdateRowCount();
if (updateCount >= 0) {
curResult.setUpdateCount(updateCount);
statistics.addRowsUpdated(updateCount);
}
} catch (DBCException e) {
// In some cases we can't read update count
// This is bad but we can live with it
// Just print a warning
log.warn("Can't obtain update count", e);
}
}
if (hasResultSet && fetchResultSets) {
resultSetNumber++;
fetchResultSetNumber = resultSetNumber;
}
if (!hasResultSet && updateCount < 0) {
// Nothing else to fetch
break;
}
if (dataSource.getInfo().supportsMultipleResults()) {
hasResultSet = dbcStatement.nextResults();
updateCount = hasResultSet ? -1 : 0;
} else {
break;
}
}
try {
curResult.setWarnings(dbcStatement.getStatementWarnings());
} catch (Throwable e) {
log.warn("Can't read execution warnings", e);
}
} finally {
//monitor.subTask("Close query");
if (!keepStatementOpen()) {
closeStatement();
}
}
} catch (Throwable ex) {
if (!(ex instanceof DBException)) {
log.error("Unexpected error while processing SQL", ex);
}
curResult.setError(ex);
lastError = ex;
} finally {
curResult.setQueryTime(System.currentTimeMillis() - startTime);
if (fireEvents && listener != null && startQueryAlerted) {
// Notify query end
try {
listener.onEndQuery(session, curResult);
} catch (Exception e) {
log.error(e);
}
}
}
if (curResult.getError() != null && errorHandling != SQLScriptErrorHandling.IGNORE) {
return false;
}
// Success
lastGoodQuery = originalQuery;
return true;
}
use of org.jkiss.dbeaver.model.DBPDataSource in project dbeaver by serge-rider.
the class InvalidateJob method run.
@Override
protected IStatus run(DBRProgressMonitor monitor) {
DBPDataSource dataSource = getExecutionContext().getDataSource();
this.invalidateResults = invalidateDataSource(monitor, dataSource);
return Status.OK_STATUS;
}
use of org.jkiss.dbeaver.model.DBPDataSource in project dbeaver by serge-rider.
the class DriverEditDialog method showBadConfigDialog.
public static void showBadConfigDialog(final Shell shell, final String message, final DBException error) {
//log.debug(message);
Runnable runnable = new Runnable() {
@Override
public void run() {
DBPDataSource dataSource = error.getDataSource();
String title = "Bad driver [" + dataSource.getContainer().getDriver().getName() + "] configuration";
new BadDriverConfigDialog(shell, title, message == null ? title : message, error).open();
}
};
DBeaverUI.syncExec(runnable);
}
use of org.jkiss.dbeaver.model.DBPDataSource in project dbeaver by dbeaver.
the class JDBCCompositeCache method loadObjects.
protected synchronized void loadObjects(DBRProgressMonitor monitor, OWNER owner, PARENT forParent) throws DBException {
synchronized (objectCache) {
if ((forParent == null && isFullyCached()) || (forParent != null && (!forParent.isPersisted() || objectCache.containsKey(forParent)))) {
return;
}
}
// Load tables and columns first
if (forParent == null) {
parentCache.loadObjects(monitor, owner);
parentCache.loadChildren(monitor, owner, null);
}
Map<PARENT, Map<String, ObjectInfo>> parentObjectMap = new LinkedHashMap<>();
// Load index columns
DBPDataSource dataSource = owner.getDataSource();
assert (dataSource != null);
try (JDBCSession session = DBUtils.openMetaSession(monitor, dataSource, "Load composite objects")) {
JDBCStatement dbStat = prepareObjectsStatement(session, owner, forParent);
dbStat.setFetchSize(DBConstants.METADATA_FETCH_SIZE);
try {
dbStat.executeStatement();
JDBCResultSet dbResult = dbStat.getResultSet();
if (dbResult != null)
try {
while (dbResult.next()) {
if (monitor.isCanceled()) {
break;
}
String parentName = parentColumnName instanceof Number ? JDBCUtils.safeGetString(dbResult, ((Number) parentColumnName).intValue()) : JDBCUtils.safeGetString(dbResult, parentColumnName.toString());
String objectName = objectColumnName instanceof Number ? JDBCUtils.safeGetString(dbResult, ((Number) objectColumnName).intValue()) : JDBCUtils.safeGetString(dbResult, objectColumnName.toString());
if (CommonUtils.isEmpty(objectName)) {
// Use default name
objectName = getDefaultObjectName(dbResult, parentName);
}
if (forParent == null && CommonUtils.isEmpty(parentName)) {
// No parent - can't evaluate it
log.debug("Empty parent name in " + this);
continue;
}
PARENT parent = forParent;
if (parent == null) {
parent = parentCache.getObject(monitor, owner, parentName, parentType);
if (parent == null) {
log.debug("Object '" + objectName + "' owner '" + parentName + "' not found");
continue;
}
}
synchronized (objectCache) {
if (objectCache.containsKey(parent)) {
// Already cached
continue;
}
}
// Add to map
Map<String, ObjectInfo> objectMap = parentObjectMap.get(parent);
if (objectMap == null) {
objectMap = new TreeMap<>();
parentObjectMap.put(parent, objectMap);
}
ObjectInfo objectInfo = objectMap.get(objectName);
if (objectInfo == null) {
OBJECT object = fetchObject(session, owner, parent, objectName, dbResult);
if (object == null) {
// Can't fetch object
continue;
}
objectName = object.getName();
objectInfo = new ObjectInfo(object);
objectMap.put(objectName, objectInfo);
}
ROW_REF[] rowRef = fetchObjectRow(session, parent, objectInfo.object, dbResult);
if (rowRef == null || rowRef.length == 0) {
// At least one of rows is broken.
// So entire object is broken, let's just skip it.
objectInfo.broken = true;
// log.debug("Object '" + objectName + "' metadata corrupted - NULL child returned");
continue;
}
for (ROW_REF row : rowRef) {
if (row != null) {
objectInfo.rows.add(row);
}
}
}
} finally {
dbResult.close();
}
} finally {
dbStat.close();
}
} catch (SQLException ex) {
throw new DBException(ex, dataSource);
}
if (monitor.isCanceled()) {
return;
}
// Fill global cache
synchronized (this) {
synchronized (objectCache) {
if (forParent != null || !parentObjectMap.isEmpty()) {
if (forParent == null) {
// Cache global object list
List<OBJECT> globalCache = new ArrayList<>();
for (Map<String, ObjectInfo> objMap : parentObjectMap.values()) {
if (objMap != null) {
for (ObjectInfo info : objMap.values()) {
if (!info.broken) {
globalCache.add(info.object);
}
}
}
}
// Save precached objects in global cache
for (List<OBJECT> objects : objectCache.values()) {
globalCache.addAll(objects);
}
// Add precached objects to global cache too
super.setCache(globalCache);
this.invalidateObjects(monitor, owner, new CacheIterator());
}
}
// All objects are read. Now assign them to parents
for (Map.Entry<PARENT, Map<String, ObjectInfo>> colEntry : parentObjectMap.entrySet()) {
if (colEntry.getValue() == null || objectCache.containsKey(colEntry.getKey())) {
// Do not overwrite this object's cache
continue;
}
Collection<ObjectInfo> objectInfos = colEntry.getValue().values();
ArrayList<OBJECT> objects = new ArrayList<>(objectInfos.size());
for (ObjectInfo objectInfo : objectInfos) {
objectInfo.needsCaching = true;
objects.add(objectInfo.object);
}
objectCache.put(colEntry.getKey(), objects);
}
// Now set empty object list for other parents
if (forParent == null) {
for (PARENT tmpParent : parentCache.getTypedObjects(monitor, owner, parentType)) {
if (!parentObjectMap.containsKey(tmpParent) && !objectCache.containsKey(tmpParent)) {
objectCache.put(tmpParent, new ArrayList<OBJECT>());
}
}
} else if (!parentObjectMap.containsKey(forParent) && !objectCache.containsKey(forParent)) {
objectCache.put(forParent, new ArrayList<OBJECT>());
}
}
// Cache children lists (we do it in the end because children caching may operate with other model objects)
for (Map.Entry<PARENT, Map<String, ObjectInfo>> colEntry : parentObjectMap.entrySet()) {
for (ObjectInfo objectInfo : colEntry.getValue().values()) {
if (objectInfo.needsCaching) {
cacheChildren(monitor, objectInfo.object, objectInfo.rows);
}
}
}
for (Map.Entry<PARENT, Map<String, ObjectInfo>> colEntry : parentObjectMap.entrySet()) {
for (ObjectInfo objectInfo : colEntry.getValue().values()) {
if (objectInfo.needsCaching) {
cacheChildren2(monitor, objectInfo.object, objectInfo.rows);
}
}
}
}
}
Aggregations