use of org.eclipse.core.runtime.jobs.IJobChangeEvent in project hale by halestudio.
the class Transformation method transform.
/**
* Transform the given instances, according to the given alignment.
*
* @param sources the collection of source instances
* @param targetSink the target sink
* @param exportJob the export job
* @param validationJob the validation job, may be <code>null</code>
* @param alignment the alignment, may not be changed outside this method
* @param sourceSchema the source schema
* @param reportHandler the report handler
* @param serviceProvider the service provider in the transformation context
* @param processId the identifier for the transformation process, may be
* <code>null</code> if grouping the jobs to a job family is not
* necessary
* @return the future representing the successful completion of the
* transformation (note that a successful completion doesn't
* necessary mean there weren't any internal transformation errors)
*/
public static ListenableFuture<Boolean> transform(InstanceCollection sources, final TransformationSink targetSink, final ExportJob exportJob, final ValidationJob validationJob, final Alignment alignment, SchemaSpace sourceSchema, final ReportHandler reportHandler, final ServiceProvider serviceProvider, final Object processId) {
final SettableFuture<Boolean> result = SettableFuture.create();
final InstanceCollection sourceToUse;
// Check whether to create a temporary database or not.
// Currently do not create a temporary DB is there are Retypes/Creates
// only.
boolean useTempDatabase = false;
final LocalOrientDB db;
for (Cell cell : alignment.getActiveTypeCells()) if (!isStreamingTypeTransformation(cell.getTransformationIdentifier())) {
useTempDatabase = true;
break;
}
// Create temporary database if necessary.
if (useTempDatabase) {
// create db
File tmpDir = Files.createTempDir();
db = new LocalOrientDB(tmpDir);
tmpDir.deleteOnExit();
// get instance collection
// sourceToUse = new BrowseOrientInstanceCollection(db, sourceSchema, DataSet.SOURCE);
// only yield instances that were actually inserted
// this is also done in OrientInstanceService
// TODO make configurable?
sourceToUse = FilteredInstanceCollection.applyFilter(new BrowseOrientInstanceCollection(db, sourceSchema, DataSet.SOURCE), new Filter() {
@Override
public boolean match(Instance instance) {
if (instance instanceof OInstance) {
return ((OInstance) instance).isInserted();
}
return true;
}
});
} else {
sourceToUse = new StatsCountInstanceCollection(sources, reportHandler);
db = null;
}
// create transformation job
final AbstractTransformationJob transformJob = new AbstractTransformationJob("Transformation") {
/**
* @see org.eclipse.core.runtime.jobs.Job#run(org.eclipse.core.runtime.IProgressMonitor)
*/
@Override
protected IStatus run(IProgressMonitor monitor) {
TransformationService transformationService = HalePlatform.getService(TransformationService.class);
TransformationReport report = transformationService.transform(alignment, sourceToUse, targetSink, serviceProvider, new ProgressMonitorIndicator(monitor));
try {
// publish report
reportHandler.publishReport(report);
if (report.isSuccess()) {
return Status.OK_STATUS;
} else {
return ERROR_STATUS;
}
} finally {
// and may lead to the transformation report being lost
if (monitor.isCanceled()) {
targetSink.done(true);
return Status.CANCEL_STATUS;
} else {
targetSink.done(false);
}
}
}
};
// set process IDs to group jobs in a job family
if (processId != null) {
transformJob.setProcessId(processId);
exportJob.setProcessId(processId);
if (validationJob != null) {
validationJob.setProcessId(processId);
}
}
exportJob.setUser(true);
// the jobs should cancel each other
transformJob.addJobChangeListener(new JobChangeAdapter() {
@Override
public void done(IJobChangeEvent event) {
if (!event.getResult().isOK()) {
// log transformation job error (because it otherwise gets
// lost)
String msg = "Error during transformation";
if (event.getResult().getMessage() != null) {
msg = ": " + event.getResult().getMessage();
}
log.error(msg, event.getResult().getException());
// failing transformation is done by cancelling the export
exportJob.cancel();
}
if (db != null) {
db.delete();
}
}
});
// after export is done, validation should run
exportJob.addJobChangeListener(new JobChangeAdapter() {
@Override
public void done(IJobChangeEvent event) {
if (!event.getResult().isOK()) {
transformJob.cancel();
// failure
failure(result, event);
} else {
if (validationJob == null) {
// success
result.set(true);
} else {
// schedule the validation job
validationJob.schedule();
}
}
}
});
// validation ends the process
if (validationJob != null) {
validationJob.addJobChangeListener(new JobChangeAdapter() {
@Override
public void done(IJobChangeEvent event) {
if (!event.getResult().isOK()) {
// failure
failure(result, event);
} else {
// success
result.set(true);
}
}
});
}
if (useTempDatabase) {
// Initialize instance index with alignment
InstanceIndexService indexService = serviceProvider.getService(InstanceIndexService.class);
indexService.addPropertyMappings(alignment.getActiveTypeCells(), serviceProvider);
// run store instance job first...
Job storeJob = new StoreInstancesJob("Load source instances into temporary database", db, sources, serviceProvider, reportHandler, true) {
@Override
protected void onComplete() {
// onComplete is also called if monitor is cancelled...
}
@Override
public boolean belongsTo(Object family) {
if (processId == null) {
return super.belongsTo(family);
}
return AbstractTransformationJob.createFamily(processId).equals(family);
}
};
// and schedule jobs on successful completion
storeJob.addJobChangeListener(new JobChangeAdapter() {
@Override
public void done(IJobChangeEvent event) {
if (event.getResult().isOK()) {
exportJob.schedule();
transformJob.schedule();
} else {
failure(result, event);
}
}
});
storeJob.schedule();
} else {
// otherwise feed InstanceProcessors directly from the
// InstanceCollection...
// TODO Implement differently, not w/ PseudoInstanceReference which
// will cause memory problems
// final InstanceProcessingExtension ext = new InstanceProcessingExtension(
// serviceProvider);
// final List<InstanceProcessor> processors = ext.getInstanceProcessors();
//
// ResourceIterator<Instance> it = sourceToUse.iterator();
// try {
// while (it.hasNext()) {
// Instance instance = it.next();
//
// ResolvableInstanceReference resolvableRef = new ResolvableInstanceReference(
// new PseudoInstanceReference(instance), sourceToUse);
// processors.forEach(p -> p.process(instance, resolvableRef));
//
// }
// } finally {
// it.close();
// }
// ...and schedule jobs
exportJob.schedule();
transformJob.schedule();
}
return result;
}
use of org.eclipse.core.runtime.jobs.IJobChangeEvent in project linuxtools by eclipse.
the class DockerImagesView method changeEvent.
@Override
public void changeEvent(final IDockerConnection connection, final int type) {
if (type == IDockerConnectionManagerListener.UPDATE_SETTINGS_EVENT) {
final Job refreshJob = new Job(DVMessages.getString("ImagesRefresh.msg")) {
@Override
protected IStatus run(IProgressMonitor monitor) {
connection.getImages(true);
return Status.OK_STATUS;
}
};
refreshJob.addJobChangeListener(new JobChangeAdapter() {
@Override
public void done(IJobChangeEvent event) {
Display.getDefault().asyncExec(() -> refreshViewTitle());
}
});
refreshJob.schedule();
} else if (type == IDockerConnectionManagerListener.RENAME_EVENT) {
Display.getDefault().asyncExec(() -> refreshViewTitle());
}
}
use of org.eclipse.core.runtime.jobs.IJobChangeEvent in project linuxtools by eclipse.
the class SWTUtils method expandTreeItem.
private static SWTBotTreeItem expandTreeItem(final SWTBotTreeItem treeItem) {
final UIJob expandJob = new UIJob("expanding tree") {
@Override
public IStatus runInUIThread(IProgressMonitor monitor) {
treeItem.expand();
return Status.OK_STATUS;
}
};
expandJob.addJobChangeListener(new JobChangeAdapter() {
@Override
public void done(IJobChangeEvent event) {
final int maxAttempts = 30;
int currentAttempt = 0;
while (currentAttempt < maxAttempts && treeItem.getItems().length == 1 && treeItem.getItems()[0].getText().isEmpty()) {
SWTUtils.wait(1, TimeUnit.SECONDS);
currentAttempt++;
}
}
});
expandJob.schedule();
SWTUtils.wait(1, TimeUnit.SECONDS);
return treeItem;
}
use of org.eclipse.core.runtime.jobs.IJobChangeEvent in project linuxtools by eclipse.
the class OpenInHierarchyViewCommandHandler method execute.
@Override
public Object execute(ExecutionEvent event) {
// retrieve the selected image
final IWorkbenchPart activePart = HandlerUtil.getActivePart(event);
final IDockerConnection2 currentConnection = (IDockerConnection2) CommandUtils.getCurrentConnection(activePart);
// run a job to retrieve the image hierarchy
final RetrieveImageHierarchyJob retrieveImageHierarchyJob = new RetrieveImageHierarchyJob(currentConnection, CommandUtils.getSelectedElement(activePart));
retrieveImageHierarchyJob.addJobChangeListener(new JobChangeAdapter() {
@Override
public void done(IJobChangeEvent event) {
// input
if (retrieveImageHierarchyJob.getImageHierarchy() == null) {
Activator.logWarningMessage(CommandMessages.getString(// $NON-NLS-1$
"command.showIn.imageHierarchyView.failure.missingHierarchy"));
}
Display.getDefault().asyncExec(() -> {
try {
final DockerImageHierarchyView dockerImageHierarchyView = (DockerImageHierarchyView) PlatformUI.getWorkbench().getActiveWorkbenchWindow().getActivePage().showView(DockerImageHierarchyView.VIEW_ID, null, IWorkbenchPage.VIEW_VISIBLE);
dockerImageHierarchyView.setConnection((IDockerConnection) currentConnection);
dockerImageHierarchyView.show(retrieveImageHierarchyJob.getImageHierarchy());
} catch (PartInitException e) {
Activator.logErrorMessage(CommandMessages.getString(// $NON-NLS-1$
"command.showIn.imageHierarchyView.failure"), e);
}
});
}
});
retrieveImageHierarchyJob.schedule();
//
return null;
}
use of org.eclipse.core.runtime.jobs.IJobChangeEvent in project knime-core by knime.
the class InstallMissingNodesJob method startInstallJob.
private void startInstallJob(final Set<IInstallableUnit> featuresToInstall) {
final ProvisioningUI provUI = ProvisioningUI.getDefaultUI();
Job.getJobManager().cancel(LoadMetadataRepositoryJob.LOAD_FAMILY);
final LoadMetadataRepositoryJob loadJob = new LoadMetadataRepositoryJob(provUI);
loadJob.setProperty(LoadMetadataRepositoryJob.ACCUMULATE_LOAD_ERRORS, Boolean.toString(true));
loadJob.addJobChangeListener(new JobChangeAdapter() {
@Override
public void done(final IJobChangeEvent event) {
if (PlatformUI.isWorkbenchRunning() && event.getResult().isOK()) {
Display.getDefault().asyncExec(() -> {
if (Display.getDefault().isDisposed()) {
NodeLogger.getLogger("Display disposed, aborting install action");
// fixes AP-8376, AP-8380, AP-7184
return;
}
provUI.getPolicy().setRepositoriesVisible(false);
provUI.openInstallWizard(featuresToInstall, new InstallOperation(provUI.getSession(), featuresToInstall), loadJob);
provUI.getPolicy().setRepositoriesVisible(true);
});
}
}
});
loadJob.setUser(true);
loadJob.schedule();
}
Aggregations