use of org.pentaho.di.job.entry.JobEntryCopy in project pentaho-kettle by pentaho.
the class JobMeta method lookupRepositoryReferences.
/**
* Look up the references after import
*
* @param repository the repository to reference.
*/
public void lookupRepositoryReferences(Repository repository) throws KettleException {
KettleException lastThrownException = null;
Map<String, RepositoryObjectType> notFoundedReferences = new HashMap<>();
for (JobEntryCopy copy : jobcopies) {
if (copy.getEntry().hasRepositoryReferences()) {
try {
copy.getEntry().lookupRepositoryReferences(repository);
} catch (IdNotFoundException e) {
lastThrownException = e;
String path = e.getPathToObject();
String name = e.getObjectName();
String key = StringUtils.isEmpty(path) || path.equals("null") ? name : path + "/" + name;
notFoundedReferences.put(key, e.getObjectType());
}
}
}
if (lastThrownException != null && !notFoundedReferences.isEmpty()) {
throw new LookupReferencesException(lastThrownException, notFoundedReferences);
}
}
use of org.pentaho.di.job.entry.JobEntryCopy in project pentaho-kettle by pentaho.
the class JobMeta method getSelectedLocations.
/**
* Gets the selected locations.
*
* @return the selected locations
*/
public Point[] getSelectedLocations() {
List<JobEntryCopy> selectedEntries = getSelectedEntries();
Point[] retval = new Point[selectedEntries.size()];
for (int i = 0; i < retval.length; i++) {
JobEntryCopy si = selectedEntries.get(i);
Point p = si.getLocation();
// explicit copy of location
retval[i] = new Point(p.x, p.y);
}
return retval;
}
use of org.pentaho.di.job.entry.JobEntryCopy in project pentaho-kettle by pentaho.
the class JobMeta method unselectAll.
/**
* Unselect all.
*/
public void unselectAll() {
int i;
for (i = 0; i < nrJobEntries(); i++) {
JobEntryCopy ce = getJobEntry(i);
ce.setSelected(false);
}
for (i = 0; i < nrNotes(); i++) {
NotePadMeta ni = getNote(i);
ni.setSelected(false);
}
}
use of org.pentaho.di.job.entry.JobEntryCopy in project pentaho-kettle by pentaho.
the class JobMeta method exportResources.
public String exportResources(VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface namingInterface, Repository repository, IMetaStore metaStore) throws KettleException {
String resourceName = null;
try {
// Handle naming for both repository and XML bases resources...
//
String baseName;
String originalPath;
String fullname;
String extension = "kjb";
if (Utils.isEmpty(getFilename())) {
// Assume repository...
//
originalPath = directory.getPath();
baseName = getName();
fullname = directory.getPath() + (directory.getPath().endsWith(RepositoryDirectory.DIRECTORY_SEPARATOR) ? "" : RepositoryDirectory.DIRECTORY_SEPARATOR) + getName() + "." + //
extension;
} else {
// Assume file
//
FileObject fileObject = KettleVFS.getFileObject(space.environmentSubstitute(getFilename()), space);
originalPath = fileObject.getParent().getName().getPath();
baseName = fileObject.getName().getBaseName();
fullname = fileObject.getName().getPath();
}
resourceName = namingInterface.nameResource(baseName, originalPath, extension, ResourceNamingInterface.FileNamingType.JOB);
ResourceDefinition definition = definitions.get(resourceName);
if (definition == null) {
// If we do this once, it will be plenty :-)
//
JobMeta jobMeta = (JobMeta) this.realClone(false);
// All objects get re-located to the root folder,
// but, when exporting, we need to see current directory
// in order to make 'Internal.Entry.Current.Directory' variable work
jobMeta.setRepositoryDirectory(directory);
//
for (JobEntryCopy jobEntry : jobMeta.jobcopies) {
compatibleJobEntryExportResources(jobEntry.getEntry(), jobMeta, definitions, namingInterface, repository);
jobEntry.getEntry().exportResources(jobMeta, definitions, namingInterface, repository, metaStore);
}
// Set a number of parameters for all the data files referenced so far...
//
Map<String, String> directoryMap = namingInterface.getDirectoryMap();
if (directoryMap != null) {
for (String directory : directoryMap.keySet()) {
String parameterName = directoryMap.get(directory);
jobMeta.addParameterDefinition(parameterName, directory, "Data file path discovered during export");
}
}
// At the end, add ourselves to the map...
//
String jobMetaContent = jobMeta.getXML();
definition = new ResourceDefinition(resourceName, jobMetaContent);
//
if (Utils.isEmpty(this.getFilename())) {
// Repository
definition.setOrigin(fullname);
} else {
definition.setOrigin(this.getFilename());
}
definitions.put(fullname, definition);
}
} catch (FileSystemException e) {
throw new KettleException(BaseMessages.getString(PKG, "JobMeta.Exception.AnErrorOccuredReadingJob", getFilename()), e);
} catch (KettleFileException e) {
throw new KettleException(BaseMessages.getString(PKG, "JobMeta.Exception.AnErrorOccuredReadingJob", getFilename()), e);
}
return resourceName;
}
use of org.pentaho.di.job.entry.JobEntryCopy in project pentaho-kettle by pentaho.
the class JobMeta method getAllJobGraphEntries.
/**
* Gets the all job graph entries.
*
* @param name the name
* @return the all job graph entries
*/
public JobEntryCopy[] getAllJobGraphEntries(String name) {
int count = 0;
for (int i = 0; i < nrJobEntries(); i++) {
JobEntryCopy je = getJobEntry(i);
if (je.getName().equalsIgnoreCase(name)) {
count++;
}
}
JobEntryCopy[] retval = new JobEntryCopy[count];
count = 0;
for (int i = 0; i < nrJobEntries(); i++) {
JobEntryCopy je = getJobEntry(i);
if (je.getName().equalsIgnoreCase(name)) {
retval[count] = je;
count++;
}
}
return retval;
}
Aggregations