use of org.jkiss.dbeaver.model.struct.DBSDataManipulator in project dbeaver by dbeaver.
the class MockDataExecuteWizard method executeProcess.
@Override
public boolean executeProcess(DBRProgressMonitor monitor, DBSDataManipulator dataManipulator) throws IOException {
DBCExecutionContext context = dataManipulator.getDataSource().getDefaultContext(true);
try (DBCSession session = context.openSession(monitor, DBCExecutionPurpose.USER, MockDataMessages.tools_mockdata_generate_data_task)) {
DBCTransactionManager txnManager = DBUtils.getTransactionManager(session.getExecutionContext());
boolean autoCommit;
try {
autoCommit = txnManager == null || txnManager.isAutoCommit();
} catch (DBCException e) {
log.error(e);
autoCommit = true;
}
AbstractExecutionSource executionSource = new AbstractExecutionSource(dataManipulator, session.getExecutionContext(), this);
boolean success = true;
monitor.beginTask("Generate Mock Data", 3);
if (mockDataSettings.isRemoveOldData()) {
logPage.appendLog("Removing old data from the '" + dataManipulator.getName() + "'.\n");
monitor.subTask("Cleanup old data");
DBCStatistics deleteStats = new DBCStatistics();
try {
// TODO: truncate is much faster than delete
try (DBSDataManipulator.ExecuteBatch batch = dataManipulator.deleteData(session, new DBSAttributeBase[] {}, executionSource)) {
batch.add(new Object[] {});
deleteStats.accumulate(batch.execute(session));
}
if (txnManager != null && !autoCommit) {
txnManager.commit(session);
}
} catch (Exception e) {
success = false;
String message = " Error removing the data: " + e.getMessage();
log.error(message, e);
logPage.appendLog(message + "\n\n", true);
}
logPage.appendLog(" Rows updated: " + deleteStats.getRowsUpdated() + "\n");
logPage.appendLog(" Duration: " + deleteStats.getExecuteTime() + "ms\n\n");
} else {
logPage.appendLog("Old data isn't removed.\n\n");
}
if (!success) {
return true;
}
try {
monitor.subTask("Insert data");
logPage.appendLog("Inserting mock data into the '" + dataManipulator.getName() + "'.\n");
DBCStatistics insertStats = new DBCStatistics();
// build and init the generators
generators.clear();
DBSEntity dbsEntity = (DBSEntity) dataManipulator;
Collection<? extends DBSAttributeBase> attributes = DBUtils.getRealAttributes(dbsEntity.getAttributes(monitor));
for (DBSAttributeBase attribute : attributes) {
MockGeneratorDescriptor generatorDescriptor = mockDataSettings.getGeneratorDescriptor(mockDataSettings.getAttributeGeneratorProperties(attribute).getSelectedGeneratorId());
if (generatorDescriptor != null) {
MockValueGenerator generator = generatorDescriptor.createGenerator();
MockDataSettings.AttributeGeneratorProperties generatorPropertySource = this.mockDataSettings.getAttributeGeneratorProperties(attribute);
String selectedGenerator = generatorPropertySource.getSelectedGeneratorId();
Map<Object, Object> generatorProperties = generatorPropertySource.getGeneratorPropertySource(selectedGenerator).getPropertiesWithDefaults();
generator.init(dataManipulator, attribute, generatorProperties);
generators.put(attribute.getName(), generator);
}
}
monitor.done();
long rowsNumber = mockDataSettings.getRowsNumber();
long quotient = rowsNumber / BATCH_SIZE;
long modulo = rowsNumber % BATCH_SIZE;
if (modulo > 0) {
quotient++;
}
int counter = 0;
monitor.beginTask("Insert data", (int) rowsNumber);
// generate and insert the data
session.enableLogging(false);
DBSDataManipulator.ExecuteBatch batch = null;
for (int q = 0; q < quotient; q++) {
if (monitor.isCanceled()) {
break;
}
if (counter > 0) {
if (txnManager != null && !autoCommit) {
txnManager.commit(session);
}
monitor.subTask(String.valueOf(counter) + " rows inserted");
monitor.worked(BATCH_SIZE);
}
try {
for (int i = 0; (i < BATCH_SIZE && counter < rowsNumber); i++) {
if (monitor.isCanceled()) {
break;
}
List<DBDAttributeValue> attributeValues = new ArrayList<>();
try {
for (DBSAttributeBase attribute : attributes) {
MockValueGenerator generator = generators.get(attribute.getName());
if (generator != null) {
// ((AbstractMockValueGenerator) generator).checkUnique(monitor);
Object value = generator.generateValue(monitor);
attributeValues.add(new DBDAttributeValue(attribute, value));
}
}
} catch (DBException e) {
processGeneratorException(e);
return true;
}
if (batch == null) {
batch = dataManipulator.insertData(session, DBDAttributeValue.getAttributes(attributeValues), null, executionSource);
}
if (counter++ < rowsNumber) {
batch.add(DBDAttributeValue.getValues(attributeValues));
}
}
if (batch != null) {
insertStats.accumulate(batch.execute(session));
}
} catch (Exception e) {
processGeneratorException(e);
if (e instanceof DBException) {
throw e;
}
} finally {
if (batch != null) {
batch.close();
batch = null;
}
}
}
if (txnManager != null && !autoCommit) {
txnManager.commit(session);
}
logPage.appendLog(" Rows updated: " + insertStats.getRowsUpdated() + "\n");
logPage.appendLog(" Duration: " + insertStats.getExecuteTime() + "ms\n\n");
} catch (DBException e) {
String message = " Error inserting mock data: " + e.getMessage();
log.error(message, e);
logPage.appendLog(message + "\n\n", true);
}
} finally {
monitor.done();
}
return true;
}
use of org.jkiss.dbeaver.model.struct.DBSDataManipulator in project dbeaver by dbeaver.
the class MockDataHandler method execute.
@Override
public Object execute(ExecutionEvent event) throws ExecutionException {
List<DBSObject> selectedObjects;
if (event.getCommand().getId().endsWith("button")) {
IResultSetController resultSet = ResultSetCommandHandler.getActiveResultSet(HandlerUtil.getActivePart(event));
if (resultSet == null) {
DBeaverUI.getInstance().showError("Mock Data", "No active results viewer");
return null;
}
DBSDataContainer dataContainer = resultSet.getDataContainer();
if (dataContainer == null || dataContainer.getDataSource() == null) {
DBeaverUI.getInstance().showError("Mock Data", "Not connected to a database");
return null;
}
if (!(dataContainer instanceof DBSDataManipulator)) {
DBeaverUI.getInstance().showError("Mock Data", "Mock Data can be generated for a table only");
return null;
}
selectedObjects = new ArrayList<>();
selectedObjects.add(dataContainer);
} else {
selectedObjects = NavigatorUtils.getSelectedObjects(HandlerUtil.getCurrentSelection(event));
}
MockDataGenerateTool mockDataGenerator = new MockDataGenerateTool();
try {
mockDataGenerator.execute(HandlerUtil.getActiveWorkbenchWindow(event), null, selectedObjects);
} catch (DBException e) {
log.error("Error launching the Mock Data Generator", e);
}
return null;
}
use of org.jkiss.dbeaver.model.struct.DBSDataManipulator in project dbeaver by serge-rider.
the class DatabaseProducerPageInputObjects method autoAssignMappings.
private void autoAssignMappings(List<DBSObject> containerObjects) {
boolean chooseConsumer = getWizard().getSettings().isConsumerOptional();
for (TableItem item : mappingTable.getItems()) {
DataTransferPipe pipe = (DataTransferPipe) item.getData();
if ((chooseConsumer && (pipe.getConsumer() == null || pipe.getConsumer().getDatabaseObject() == null)) || (!chooseConsumer && (pipe.getProducer() == null || pipe.getProducer().getDatabaseObject() == null))) {
DBSObject objectToMap = chooseConsumer ? pipe.getProducer().getDatabaseObject() : pipe.getConsumer().getDatabaseObject();
if (objectToMap == null) {
continue;
}
DBSObject object = DBUtils.findObject(containerObjects, objectToMap.getName());
if (object != null) {
if (chooseConsumer) {
if (object instanceof DBSDataManipulator) {
pipe.setConsumer(new DatabaseTransferConsumer((DBSDataManipulator) object));
}
} else {
if (object instanceof DBSDataContainer) {
pipe.setProducer(new DatabaseTransferProducer((DBSDataContainer) object));
}
}
updateItemData(item, pipe);
}
}
}
updatePageCompletion();
}
use of org.jkiss.dbeaver.model.struct.DBSDataManipulator in project dbeaver by serge-rider.
the class StreamProducerPageSettings method updateSingleConsumer.
private void updateSingleConsumer(DBRProgressMonitor monitor, DataTransferPipe pipe, File file) {
final StreamProducerSettings producerSettings = getWizard().getPageSettings(this, StreamProducerSettings.class);
final StreamTransferProducer oldProducer = pipe.getProducer() instanceof StreamTransferProducer ? (StreamTransferProducer) pipe.getProducer() : null;
final StreamTransferProducer newProducer = new StreamTransferProducer(new StreamEntityMapping(file));
pipe.setProducer(newProducer);
producerSettings.updateProducerSettingsFromStream(monitor, newProducer, getWizard().getSettings());
IDataTransferSettings consumerSettings = getWizard().getSettings().getNodeSettings(getWizard().getSettings().getConsumer());
if (consumerSettings instanceof DatabaseConsumerSettings) {
DatabaseConsumerSettings settings = (DatabaseConsumerSettings) consumerSettings;
DatabaseMappingContainer mapping = new DatabaseMappingContainer(settings, newProducer.getDatabaseObject());
if (pipe.getConsumer() != null && pipe.getConsumer().getDatabaseObject() instanceof DBSDataManipulator) {
DBSDataManipulator databaseObject = (DBSDataManipulator) pipe.getConsumer().getDatabaseObject();
DBNDatabaseNode databaseNode = DBNUtils.getNodeByObject(monitor, databaseObject.getParentObject(), false);
if (databaseNode != null) {
settings.setContainerNode(databaseNode);
}
mapping.setTarget(databaseObject);
} else {
mapping.setTarget(null);
mapping.setTargetName(generateTableName(newProducer.getInputFile()));
}
if (oldProducer != null) {
// Remove old mapping because we're just replaced file
DatabaseMappingContainer oldMappingContainer = settings.getDataMappings().remove(oldProducer.getDatabaseObject());
if (oldMappingContainer != null && oldMappingContainer.getSource() instanceof StreamEntityMapping) {
StreamEntityMapping oldEntityMapping = (StreamEntityMapping) oldMappingContainer.getSource();
// Copy mappings from old producer if columns are the same
if (oldEntityMapping.isSameColumns(newProducer.getEntityMapping())) {
StreamEntityMapping entityMapping = new StreamEntityMapping(file);
settings.addDataMappings(getWizard().getRunnableContext(), entityMapping, new DatabaseMappingContainer(oldMappingContainer, entityMapping));
StreamTransferProducer producer = new StreamTransferProducer(entityMapping);
pipe.setProducer(producer);
producerSettings.updateProducerSettingsFromStream(monitor, producer, getWizard().getSettings());
return;
}
}
}
settings.addDataMappings(getWizard().getRunnableContext(), newProducer.getDatabaseObject(), mapping);
}
}
use of org.jkiss.dbeaver.model.struct.DBSDataManipulator in project dbeaver by dbeaver.
the class DatabaseConsumerSettings method loadSettings.
@Override
public void loadSettings(DBRRunnableContext runnableContext, DataTransferSettings dataTransferSettings, Map<String, Object> settings) {
this.dialogSettings = settings;
containerNodePath = CommonUtils.toString(settings.get("container"), containerNodePath);
openNewConnections = CommonUtils.getBoolean(settings.get("openNewConnections"), openNewConnections);
useTransactions = CommonUtils.getBoolean(settings.get("useTransactions"), useTransactions);
onDuplicateKeyInsertMethodId = CommonUtils.toString(settings.get("onDuplicateKeyMethod"), onDuplicateKeyInsertMethodId);
commitAfterRows = CommonUtils.toInt(settings.get("commitAfterRows"), commitAfterRows);
disableUsingBatches = CommonUtils.getBoolean(settings.get("disableUsingBatches"), disableUsingBatches);
transferAutoGeneratedColumns = CommonUtils.getBoolean(settings.get("transferAutoGeneratedColumns"), transferAutoGeneratedColumns);
truncateBeforeLoad = CommonUtils.getBoolean(settings.get("truncateBeforeLoad"), truncateBeforeLoad);
openTableOnFinish = CommonUtils.getBoolean(settings.get("openTableOnFinish"), openTableOnFinish);
List<DataTransferPipe> dataPipes = dataTransferSettings.getDataPipes();
{
if (!dataPipes.isEmpty()) {
IDataTransferConsumer consumer = dataPipes.get(0).getConsumer();
if (consumer instanceof DatabaseTransferConsumer) {
final DBSDataManipulator targetObject = ((DatabaseTransferConsumer) consumer).getTargetObject();
if (targetObject != null) {
containerNode = DBWorkbench.getPlatform().getNavigatorModel().findNode(targetObject.getParentObject());
}
}
}
checkContainerConnection(runnableContext);
}
loadNode(runnableContext, dataTransferSettings, null);
// Load mapping for current objects
Map<String, Object> mappings = (Map<String, Object>) settings.get("mappings");
if (mappings != null) {
if (!dataMappings.isEmpty()) {
for (DatabaseMappingContainer dmc : dataMappings.values()) {
DBSDataContainer sourceDatacontainer = dmc.getSource();
if (sourceDatacontainer != null) {
Map<String, Object> dmcSettings = (Map<String, Object>) mappings.get(DBUtils.getObjectFullId(sourceDatacontainer));
if (dmcSettings != null) {
dmc.loadSettings(runnableContext, dmcSettings);
}
}
}
} else if (!dataPipes.isEmpty()) {
for (DataTransferPipe pipe : dataPipes) {
IDataTransferProducer producer = pipe.getProducer();
if (producer != null) {
DBSObject dbObject = producer.getDatabaseObject();
if (dbObject instanceof DBSDataContainer) {
DBSDataContainer sourceDC = (DBSDataContainer) dbObject;
Map<String, Object> dmcSettings = (Map<String, Object>) mappings.get(DBUtils.getObjectFullId(dbObject));
if (dmcSettings != null) {
DatabaseMappingContainer dmc = new DatabaseMappingContainer(this, sourceDC);
dmc.loadSettings(runnableContext, dmcSettings);
dataMappings.put(sourceDC, dmc);
}
}
}
}
}
}
}
Aggregations