use of org.pentaho.di.job.JobExecutionConfiguration in project pentaho-kettle by pentaho.
the class DefaultRunConfigurationExecutorTest method testExecuteLocalJob.
@Test
public void testExecuteLocalJob() throws Exception {
DefaultRunConfiguration defaultRunConfiguration = new DefaultRunConfiguration();
defaultRunConfiguration.setName("Default Configuration");
defaultRunConfiguration.setLocal(true);
JobExecutionConfiguration jobExecutionConfiguration = new JobExecutionConfiguration();
defaultRunConfigurationExecutor.execute(defaultRunConfiguration, jobExecutionConfiguration, abstractMeta, variableSpace, null);
assertTrue(jobExecutionConfiguration.isExecutingLocally());
}
use of org.pentaho.di.job.JobExecutionConfiguration in project pentaho-kettle by pentaho.
the class Spoon method init.
public void init(TransMeta ti) {
FormLayout layout = new FormLayout();
layout.marginWidth = 0;
layout.marginHeight = 0;
shell.setLayout(layout);
addFileListener(new TransFileListener());
addFileListener(new JobFileListener());
// INIT Data structure
if (ti != null) {
delegates.trans.addTransformation(ti);
}
// Load settings in the props
loadSettings();
transExecutionConfiguration = new TransExecutionConfiguration();
transExecutionConfiguration.setGatheringMetrics(true);
transPreviewExecutionConfiguration = new TransExecutionConfiguration();
transPreviewExecutionConfiguration.setGatheringMetrics(true);
transDebugExecutionConfiguration = new TransExecutionConfiguration();
transDebugExecutionConfiguration.setGatheringMetrics(true);
jobExecutionConfiguration = new JobExecutionConfiguration();
// Clean out every time we start, auto-loading etc, is not a good idea
// If they are needed that often, set them in the kettle.properties file
//
variables = new RowMetaAndData(new RowMeta());
// props.setLook(shell);
shell.setImage(GUIResource.getInstance().getImageSpoon());
cursor_hourglass = new Cursor(display, SWT.CURSOR_WAIT);
cursor_hand = new Cursor(display, SWT.CURSOR_HAND);
Composite sashComposite = null;
MainSpoonPerspective mainPerspective = null;
try {
KettleXulLoader xulLoader = new KettleXulLoader();
xulLoader.setIconsSize(16, 16);
xulLoader.setOuterContext(shell);
xulLoader.setSettingsManager(XulSpoonSettingsManager.getInstance());
ApplicationWindowLocal.setApplicationWindow(this);
mainSpoonContainer = xulLoader.loadXul(XUL_FILE_MAIN, new XulSpoonResourceBundle());
BindingFactory bf = new DefaultBindingFactory();
bf.setDocument(mainSpoonContainer.getDocumentRoot());
mainSpoonContainer.addEventHandler(this);
/* menuBar = (XulMenubar) */
mainSpoonContainer.getDocumentRoot().getElementById("spoon-menubar");
mainToolbar = (XulToolbar) mainSpoonContainer.getDocumentRoot().getElementById("main-toolbar");
props.setLook((Control) mainToolbar.getManagedObject(), Props.WIDGET_STYLE_TOOLBAR);
/* canvas = (XulVbox) */
mainSpoonContainer.getDocumentRoot().getElementById("trans-job-canvas");
deck = (SwtDeck) mainSpoonContainer.getDocumentRoot().getElementById("canvas-deck");
final Composite tempSashComposite = new Composite(shell, SWT.None);
sashComposite = tempSashComposite;
mainPerspective = new MainSpoonPerspective(tempSashComposite, tabfolder);
if (startupPerspective == null) {
startupPerspective = mainPerspective.getId();
}
SpoonPerspectiveManager.getInstance().setStartupPerspective(startupPerspective);
SpoonPerspectiveManager.getInstance().addPerspective(mainPerspective);
SpoonPluginManager.getInstance().applyPluginsForContainer("spoon", mainSpoonContainer);
SpoonPerspectiveManager.getInstance().setDeck(deck);
SpoonPerspectiveManager.getInstance().setXulDoc(mainSpoonContainer);
SpoonPerspectiveManager.getInstance().initialize();
} catch (Exception e) {
LogChannel.GENERAL.logError("Error initializing transformation", e);
}
// addBar();
// Set the shell size, based upon previous time...
WindowProperty windowProperty = props.getScreen(APP_TITLE);
if (windowProperty != null) {
windowProperty.setShell(shell);
} else {
shell.pack();
// Default = maximized!
shell.setMaximized(true);
}
layout = new FormLayout();
layout.marginWidth = 0;
layout.marginHeight = 0;
GridData data = new GridData();
data.grabExcessHorizontalSpace = true;
data.grabExcessVerticalSpace = true;
data.verticalAlignment = SWT.FILL;
data.horizontalAlignment = SWT.FILL;
sashComposite.setLayoutData(data);
sashComposite.setLayout(layout);
sashform = new SashForm(sashComposite, SWT.HORIZONTAL);
FormData fdSash = new FormData();
fdSash.left = new FormAttachment(0, 0);
// fdSash.top = new FormAttachment((org.eclipse.swt.widgets.ToolBar)
// toolbar.getNativeObject(), 0);
fdSash.top = new FormAttachment(0, 0);
fdSash.bottom = new FormAttachment(100, 0);
fdSash.right = new FormAttachment(100, 0);
sashform.setLayoutData(fdSash);
createPopupMenus();
addTree();
addTabs();
mainPerspective.setTabset(this.tabfolder);
((Composite) deck.getManagedObject()).layout(true, true);
SpoonPluginManager.getInstance().notifyLifecycleListeners(SpoonLifeCycleEvent.STARTUP);
// Add a browser widget
if (props.showWelcomePageOnStartup()) {
showWelcomePage();
}
// Allow data to be copied or moved to the drop target
int operations = DND.DROP_COPY | DND.DROP_DEFAULT;
DropTarget target = new DropTarget(shell, operations);
// Receive data in File format
final FileTransfer fileTransfer = FileTransfer.getInstance();
Transfer[] types = new Transfer[] { fileTransfer };
target.setTransfer(types);
target.addDropListener(new DropTargetListener() {
@Override
public void dragEnter(DropTargetEvent event) {
if (event.detail == DND.DROP_DEFAULT) {
if ((event.operations & DND.DROP_COPY) != 0) {
event.detail = DND.DROP_COPY;
} else {
event.detail = DND.DROP_NONE;
}
}
}
@Override
public void dragOver(DropTargetEvent event) {
event.feedback = DND.FEEDBACK_SELECT | DND.FEEDBACK_SCROLL;
}
@Override
public void dragOperationChanged(DropTargetEvent event) {
if (event.detail == DND.DROP_DEFAULT) {
if ((event.operations & DND.DROP_COPY) != 0) {
event.detail = DND.DROP_COPY;
} else {
event.detail = DND.DROP_NONE;
}
}
}
@Override
public void dragLeave(DropTargetEvent event) {
}
@Override
public void dropAccept(DropTargetEvent event) {
}
@Override
public void drop(DropTargetEvent event) {
if (fileTransfer.isSupportedType(event.currentDataType)) {
String[] files = (String[]) event.data;
for (String file : files) {
openFile(file, false);
}
}
}
});
// listen for steps being added or removed
PluginRegistry.getInstance().addPluginListener(StepPluginType.class, new PluginTypeListener() {
@Override
public void pluginAdded(Object serviceObject) {
// hack to get the tree to reload
previousShowTrans = false;
Display.getDefault().asyncExec(new Runnable() {
@Override
public void run() {
refreshCoreObjects();
}
});
}
@Override
public void pluginRemoved(Object serviceObject) {
// hack to get the tree to reload
previousShowTrans = false;
Display.getDefault().asyncExec(new Runnable() {
@Override
public void run() {
refreshCoreObjects();
}
});
}
@Override
public void pluginChanged(Object serviceObject) {
}
});
}
use of org.pentaho.di.job.JobExecutionConfiguration in project pdi-platform-plugin by pentaho.
the class PdiAction method executeJob.
/**
* Executes a PDI job
*
* @param jobMeta
* @param repository
* @param logWriter
* @return
* @throws ActionExecutionException
*/
protected void executeJob(final JobMeta jobMeta, final Repository repository, final LogWriter logWriter) throws ActionExecutionException {
localJob = null;
if (jobMeta != null) {
JobExecutionConfiguration jobExConfig = newJobExecutionConfiguration();
if (logLevel != null) {
jobExConfig.setLogLevel(LogLevel.getLogLevelForCode(logLevel));
}
if (clearLog != null) {
jobExConfig.setClearingLog(Boolean.valueOf(clearLog));
}
if (runSafeMode != null) {
jobExConfig.setSafeModeEnabled(Boolean.valueOf(runSafeMode));
}
if (expandingRemoteJob != null) {
jobExConfig.setExpandingRemoteJob(Boolean.valueOf(expandingRemoteJob));
}
if (startCopyName != null) {
jobExConfig.setStartCopyName(startCopyName);
}
try {
localJob = newJob(repository, jobMeta);
localJob.setArguments(arguments);
localJob.shareVariablesWith(jobMeta);
String carteObjectId = UUID.randomUUID().toString();
localJob.setContainerObjectId(carteObjectId);
CarteSingleton.getInstance().getJobMap().addJob(getJobName(carteObjectId), carteObjectId, localJob, new JobConfiguration(localJob.getJobMeta(), jobExConfig));
} catch (Exception e) {
throw new ActionExecutionException(Messages.getInstance().getErrorString("Kettle.ERROR_0021_BAD_JOB_METADATA"), // $NON-NLS-1$
e);
}
}
if (localJob == null) {
if (log.isDebugEnabled()) {
log.debug(pdiUserAppender.getBuffer().toString());
}
throw new ActionExecutionException(Messages.getInstance().getErrorString(// $NON-NLS-1$
"Kettle.ERROR_0021_BAD_JOB_METADATA"));
}
if (localJob != null) {
try {
if (log.isDebugEnabled()) {
// $NON-NLS-1$
log.debug(Messages.getInstance().getString("Kettle.DEBUG_STARTING_JOB"));
}
if (startCopyName != null) {
JobEntryCopy startJobEntryCopy = jobMeta.findJobEntry(startCopyName);
localJob.setStartJobEntryCopy(startJobEntryCopy);
}
localJob.setLogLevel(LogLevel.getLogLevelForCode(logLevel));
localJob.start();
} catch (Throwable e) {
throw new ActionExecutionException(Messages.getInstance().getErrorString("Kettle.ERROR_0022_JOB_START_FAILED"), // $NON-NLS-1$
e);
}
// It's running in a separate tread to allow monitoring, etc.
if (log.isDebugEnabled()) {
// $NON-NLS-1$
log.debug(Messages.getInstance().getString("Kettle.DEBUG_JOB_RUNNING"));
}
localJob.waitUntilFinished();
int jobErrors = localJob.getErrors();
long jobResultErrors = localJob.getResult().getNrErrors();
if ((jobErrors > 0) || (jobResultErrors > 0)) {
if (log.isDebugEnabled()) {
log.debug(pdiUserAppender.getBuffer().toString());
}
// don't throw exception, because the scheduler may try to run this job again
log.error(org.pentaho.platform.plugin.kettle.messages.Messages.getInstance().getErrorString(// $NON-NLS-1$
"PdiAction.ERROR_0008_JOB_HAD_ERRORS", Integer.toString(jobErrors), Long.toString(jobResultErrors)));
return;
}
// Dump the Kettle log...
if (log.isDebugEnabled()) {
log.debug(pdiUserAppender.getBuffer().toString());
}
}
}
use of org.pentaho.di.job.JobExecutionConfiguration in project pdi-platform-plugin by pentaho.
the class PdiActionTest method testSetParamsIntoExecuteConfigInExecuteJob.
@Test
public void testSetParamsIntoExecuteConfigInExecuteJob() throws ActionExecutionException {
PdiAction action = spy(new PdiAction());
JobMeta meta = mock(JobMeta.class);
Repository repository = mock(Repository.class);
LogWriter logWriter = mock(LogWriter.class);
Job job = mock(Job.class);
Log log = mock(Log.class);
JobExecutionConfiguration jobExecutionConfiguration = mock(JobExecutionConfiguration.class);
Result result = mock(Result.class);
action.setLogger(log);
action.setLogLevel(TEST_LOG_LEVEL_PARAM);
action.setClearLog(TEST_TRUE_BOOLEAN_PARAM);
action.setRunSafeMode(TEST_FALSE_BOOLEAN_PARAM);
action.setExpandingRemoteJob(TEST_FALSE_BOOLEAN_PARAM);
action.setStartCopyName(TEST_START_COPY_NAME_PARAM);
doReturn(job).when(action).newJob(repository, meta);
doReturn(false).when(log).isDebugEnabled();
doReturn(jobExecutionConfiguration).when(action).newJobExecutionConfiguration();
doReturn(result).when(job).getResult();
action.executeJob(meta, repository, logWriter);
verify(jobExecutionConfiguration).setLogLevel(LogLevel.getLogLevelForCode(TEST_LOG_LEVEL_PARAM));
verify(jobExecutionConfiguration).setClearingLog(Boolean.valueOf(TEST_TRUE_BOOLEAN_PARAM));
verify(jobExecutionConfiguration).setSafeModeEnabled(Boolean.valueOf(TEST_FALSE_BOOLEAN_PARAM));
verify(jobExecutionConfiguration).setExpandingRemoteJob(Boolean.valueOf(TEST_FALSE_BOOLEAN_PARAM));
verify(jobExecutionConfiguration).setStartCopyName(TEST_START_COPY_NAME_PARAM);
}
Aggregations