use of org.pentaho.di.job.JobMeta in project pentaho-kettle by pentaho.
the class JobExecutorMeta method loadJobMeta.
public static final synchronized JobMeta loadJobMeta(JobExecutorMeta executorMeta, Repository rep, IMetaStore metaStore, VariableSpace space) throws KettleException {
JobMeta mappingJobMeta = null;
CurrentDirectoryResolver r = new CurrentDirectoryResolver();
VariableSpace tmpSpace = r.resolveCurrentDirectory(executorMeta.getSpecificationMethod(), space, rep, executorMeta.getParentStepMeta(), executorMeta.getFileName());
switch(executorMeta.getSpecificationMethod()) {
case FILENAME:
String realFilename = tmpSpace.environmentSubstitute(executorMeta.getFileName());
try {
//
if (rep != null) {
realFilename = r.normalizeSlashes(realFilename);
// need to try to load from the repository
try {
String dirStr = realFilename.substring(0, realFilename.lastIndexOf("/"));
String tmpFilename = realFilename.substring(realFilename.lastIndexOf("/") + 1);
RepositoryDirectoryInterface dir = rep.findDirectory(dirStr);
mappingJobMeta = rep.loadJob(tmpFilename, dir, null, null);
} catch (KettleException ke) {
// try without extension
if (realFilename.endsWith(Const.STRING_JOB_DEFAULT_EXT)) {
try {
String tmpFilename = realFilename.substring(realFilename.lastIndexOf("/") + 1, realFilename.indexOf("." + Const.STRING_JOB_DEFAULT_EXT));
String dirStr = realFilename.substring(0, realFilename.lastIndexOf("/"));
RepositoryDirectoryInterface dir = rep.findDirectory(dirStr);
mappingJobMeta = rep.loadJob(tmpFilename, dir, null, null);
} catch (KettleException ke2) {
// fall back to try loading from file system (mappingJobMeta is going to be null)
}
}
}
}
if (mappingJobMeta == null) {
mappingJobMeta = new JobMeta(null, realFilename, rep, metaStore, null);
LogChannel.GENERAL.logDetailed("Loading job from repository", "Job was loaded from XML file [" + realFilename + "]");
}
} catch (Exception e) {
throw new KettleException(BaseMessages.getString(PKG, "JobExecutorMeta.Exception.UnableToLoadJob"), e);
}
break;
case REPOSITORY_BY_NAME:
String realJobname = tmpSpace.environmentSubstitute(executorMeta.getJobName());
String realDirectory = tmpSpace.environmentSubstitute(executorMeta.getDirectoryPath());
if (rep != null) {
if (!Utils.isEmpty(realJobname) && !Utils.isEmpty(realDirectory)) {
realDirectory = r.normalizeSlashes(realDirectory);
RepositoryDirectoryInterface repdir = rep.findDirectory(realDirectory);
if (repdir != null) {
try {
// reads the last revision in the repository...
//
// TODO: FIXME: should we also pass an
mappingJobMeta = rep.loadJob(realJobname, repdir, null, null);
// external MetaStore into the
// repository?
LogChannel.GENERAL.logDetailed("Loading job from repository", "Executor job [" + realJobname + "] was loaded from the repository");
} catch (Exception e) {
throw new KettleException("Unable to load job [" + realJobname + "]", e);
}
}
}
} else {
// rep is null, let's try loading by filename
try {
mappingJobMeta = new JobMeta(null, realDirectory + "/" + realJobname, rep, metaStore, null);
} catch (KettleException ke) {
try {
// add .kjb extension and try again
mappingJobMeta = new JobMeta(null, realDirectory + "/" + realJobname + "." + Const.STRING_JOB_DEFAULT_EXT, rep, metaStore, null);
} catch (KettleException ke2) {
throw new KettleException(BaseMessages.getString(PKG, "JobExecutorMeta.Exception.UnableToLoadJob", realJobname) + realDirectory);
}
}
}
break;
case REPOSITORY_BY_REFERENCE:
// Read the last revision by reference...
mappingJobMeta = rep.loadJob(executorMeta.getJobObjectId(), null);
break;
default:
break;
}
// Pass some important information to the mapping transformation metadata:
// When the child parameter does exist in the parent parameters, overwrite the child parameter by the
// parent parameter.
StepWithMappingMeta.replaceVariableValues(mappingJobMeta, space);
if (executorMeta.getParameters().isInheritingAllVariables()) {
// All other parent parameters need to get copied into the child parameters (when the 'Inherit all
// variables from the transformation?' option is checked)
StepWithMappingMeta.addMissingVariables(mappingJobMeta, space);
}
mappingJobMeta.setRepository(rep);
mappingJobMeta.setMetaStore(metaStore);
mappingJobMeta.setFilename(mappingJobMeta.getFilename());
return mappingJobMeta;
}
use of org.pentaho.di.job.JobMeta in project pentaho-kettle by pentaho.
the class JobFileListener method open.
public boolean open(Node jobNode, String fname, boolean importfile) {
Spoon spoon = Spoon.getInstance();
try {
// Call extension point(s) before the file has been opened
ExtensionPointHandler.callExtensionPoint(spoon.getLog(), KettleExtensionPoint.JobBeforeOpen.id, fname);
JobMeta jobMeta = new JobMeta();
jobMeta.loadXML(jobNode, fname, spoon.getRepository(), spoon.getMetaStore(), false, spoon);
if (jobMeta.hasMissingPlugins()) {
MissingEntryDialog missingDialog = new MissingEntryDialog(spoon.getShell(), jobMeta.getMissingEntries());
if (missingDialog.open() == null) {
return true;
}
}
jobMeta.setRepositoryDirectory(spoon.getDefaultSaveLocation(jobMeta));
jobMeta.setRepository(spoon.getRepository());
jobMeta.setMetaStore(spoon.getMetaStore());
spoon.setJobMetaVariables(jobMeta);
spoon.getProperties().addLastFile(LastUsedFile.FILE_TYPE_JOB, fname, null, false, null);
spoon.addMenuLast();
// if any exist.
if (importfile) {
if (spoon.getRepository() != null) {
jobMeta = fixLinks(jobMeta);
}
} else {
jobMeta.clearChanged();
}
jobMeta.setFilename(fname);
spoon.delegates.jobs.addJobGraph(jobMeta);
// Call extension point(s) now that the file has been opened
ExtensionPointHandler.callExtensionPoint(spoon.getLog(), KettleExtensionPoint.JobAfterOpen.id, jobMeta);
spoon.refreshTree();
SpoonPerspectiveManager.getInstance().activatePerspective(MainSpoonPerspective.class);
return true;
} catch (KettleException e) {
new ErrorDialog(spoon.getShell(), BaseMessages.getString(PKG, "Spoon.Dialog.ErrorOpening.Title"), BaseMessages.getString(PKG, "Spoon.Dialog.ErrorOpening.Message") + fname, e);
}
return false;
}
use of org.pentaho.di.job.JobMeta in project pentaho-kettle by pentaho.
the class JobFileListener method save.
public boolean save(EngineMetaInterface meta, String fname, boolean export) {
Spoon spoon = Spoon.getInstance();
EngineMetaInterface lmeta;
if (export) {
lmeta = (JobMeta) ((JobMeta) meta).realClone(false);
} else {
lmeta = meta;
}
try {
ExtensionPointHandler.callExtensionPoint(spoon.getLog(), KettleExtensionPoint.JobBeforeSave.id, lmeta);
} catch (KettleException e) {
// fails gracefully
}
boolean saveStatus = spoon.saveMeta(lmeta, fname);
if (saveStatus) {
try {
ExtensionPointHandler.callExtensionPoint(spoon.getLog(), KettleExtensionPoint.JobAfterSave.id, lmeta);
} catch (KettleException e) {
// fails gracefully
}
}
return saveStatus;
}
use of org.pentaho.di.job.JobMeta in project pentaho-kettle by pentaho.
the class JobEntryJobDialog method pickFileVFS.
protected void pickFileVFS() {
FileDialog dialog = new FileDialog(shell, SWT.OPEN);
dialog.setFilterExtensions(Const.STRING_JOB_FILTER_EXT);
dialog.setFilterNames(Const.getJobFilterNames());
String prevName = jobMeta.environmentSubstitute(wPath.getText());
String parentFolder = null;
try {
parentFolder = KettleVFS.getFilename(KettleVFS.getFileObject(jobMeta.environmentSubstitute(jobMeta.getFilename())).getParent());
} catch (Exception e) {
// not that important
}
if (!Utils.isEmpty(prevName)) {
try {
if (KettleVFS.fileExists(prevName)) {
dialog.setFilterPath(KettleVFS.getFilename(KettleVFS.getFileObject(prevName).getParent()));
} else {
if (!prevName.endsWith(".kjb")) {
prevName = getEntryName(Const.trim(wPath.getText()) + ".kjb");
}
if (KettleVFS.fileExists(prevName)) {
wPath.setText(prevName);
specificationMethod = ObjectLocationSpecificationMethod.FILENAME;
return;
} else {
// File specified doesn't exist. Ask if we should create the file...
//
MessageBox mb = new MessageBox(shell, SWT.YES | SWT.NO | SWT.ICON_QUESTION);
mb.setMessage(BaseMessages.getString(PKG, "JobJob.Dialog.CreateJobQuestion.Message"));
// Sorry!
mb.setText(BaseMessages.getString(PKG, "JobJob.Dialog.CreateJobQuestion.Title"));
int answer = mb.open();
if (answer == SWT.YES) {
Spoon spoon = Spoon.getInstance();
spoon.newJobFile();
JobMeta newJobMeta = spoon.getActiveJob();
newJobMeta.initializeVariablesFrom(jobEntry);
newJobMeta.setFilename(jobMeta.environmentSubstitute(prevName));
wPath.setText(prevName);
specificationMethod = ObjectLocationSpecificationMethod.FILENAME;
spoon.saveFile();
return;
}
}
}
} catch (Exception e) {
dialog.setFilterPath(parentFolder);
}
} else if (!Utils.isEmpty(parentFolder)) {
dialog.setFilterPath(parentFolder);
}
String fname = dialog.open();
if (fname != null) {
File file = new File(fname);
String name = file.getName();
String parentFolderSelection = file.getParentFile().toString();
if (!Utils.isEmpty(parentFolder) && parentFolder.equals(parentFolderSelection)) {
wPath.setText(getEntryName(name));
} else {
wPath.setText(fname);
}
}
}
use of org.pentaho.di.job.JobMeta in project pentaho-kettle by pentaho.
the class StreamToJobNodeConverter method convert.
/**
* @param inputStream
* @param charset
* @param mimeType
* @return
*/
public IRepositoryFileData convert(final InputStream inputStream, final String charset, final String mimeType) {
try {
long size = inputStream.available();
JobMeta jobMeta = new JobMeta();
Repository repository = connectToRepository();
Document doc = PDIImportUtil.loadXMLFrom(inputStream);
if (doc != null) {
jobMeta.loadXML(doc.getDocumentElement(), repository, null);
JobDelegate delegate = new JobDelegate(repository, this.unifiedRepository);
delegate.saveSharedObjects(jobMeta, null);
return new NodeRepositoryFileData(delegate.elementToDataNode(jobMeta), size);
} else {
return null;
}
} catch (Exception e) {
return null;
}
}
Aggregations