use of org.pentaho.di.job.JobMeta in project pentaho-kettle by pentaho.
the class StreamToJobNodeConverter method saveSharedObjects.
public void saveSharedObjects(final Repository repo, final RepositoryElementInterface element) throws KettleException {
JobMeta jobMeta = (JobMeta) element;
// First store the databases and other depending objects in the transformation.
List<String> databaseNames = Arrays.asList(repo.getDatabaseNames(true));
int dbIndex = 0;
int indexToReplace = 0;
boolean updateMeta = Boolean.FALSE;
for (DatabaseMeta databaseMeta : jobMeta.getDatabases()) {
if (!databaseNames.contains(databaseMeta.getName())) {
if (databaseMeta.getObjectId() == null || !StringUtils.isEmpty(databaseMeta.getHostname())) {
repo.save(databaseMeta, null, null);
}
} else if (databaseMeta.getObjectId() == null) {
indexToReplace = dbIndex;
updateMeta = Boolean.TRUE;
}
dbIndex++;
}
// in the transMeta db collection
if (updateMeta) {
DatabaseMeta dbMetaToReplace = jobMeta.getDatabase(indexToReplace);
dbMetaToReplace.setObjectId(repo.getDatabaseID(dbMetaToReplace.getName()));
jobMeta.removeDatabase(indexToReplace);
jobMeta.addDatabase(dbMetaToReplace);
}
//
for (SlaveServer slaveServer : jobMeta.getSlaveServers()) {
if (slaveServer.getObjectId() == null) {
repo.save(slaveServer, null, null);
}
}
}
use of org.pentaho.di.job.JobMeta in project pentaho-kettle by pentaho.
the class JobDelegate method loadSharedObjects.
// ~ Methods =========================================================================================================
@SuppressWarnings("unchecked")
public SharedObjects loadSharedObjects(final RepositoryElementInterface element, final Map<RepositoryObjectType, List<? extends SharedObjectInterface>> sharedObjectsByType) throws KettleException {
JobMeta jobMeta = (JobMeta) element;
jobMeta.setSharedObjects(jobMeta.readSharedObjects());
// Repository objects take priority so let's overwrite them...
//
readDatabases(jobMeta, true, (List<DatabaseMeta>) sharedObjectsByType.get(RepositoryObjectType.DATABASE));
readSlaves(jobMeta, true, (List<SlaveServer>) sharedObjectsByType.get(RepositoryObjectType.SLAVE_SERVER));
return jobMeta.getSharedObjects();
}
use of org.pentaho.di.job.JobMeta in project pentaho-kettle by pentaho.
the class PurRepository method loadJobs.
/**
* Load all jobs referenced by {@code files}.
*
* @param monitor
* @param log
* @param files
* Job files to load.
* @param setInternalVariables
* Should internal variables be set when loading? (Note: THIS IS IGNORED, they are always set)
* @return Loaded jobs
* @throws KettleException
* Error loading data for jobs from repository
*/
protected List<JobMeta> loadJobs(final ProgressMonitorListener monitor, final LogChannelInterface log, final List<RepositoryFile> files, final boolean setInternalVariables) throws KettleException {
List<JobMeta> jobs = new ArrayList<JobMeta>(files.size());
List<NodeRepositoryFileData> filesData = pur.getDataForReadInBatch(files, NodeRepositoryFileData.class);
List<VersionSummary> versions = pur.getVersionSummaryInBatch(files);
Iterator<RepositoryFile> filesIter = files.iterator();
Iterator<NodeRepositoryFileData> filesDataIter = filesData.iterator();
Iterator<VersionSummary> versionsIter = versions.iterator();
while ((monitor == null || !monitor.isCanceled()) && filesIter.hasNext()) {
RepositoryFile file = filesIter.next();
NodeRepositoryFileData fileData = filesDataIter.next();
VersionSummary version = versionsIter.next();
try {
String dirPath = file.getPath().substring(0, file.getPath().lastIndexOf(RepositoryDirectory.DIRECTORY_SEPARATOR));
log.logDetailed("Loading/Exporting job [{0} : {1}] ({2})", dirPath, file.getTitle(), // $NON-NLS-1$
file.getPath());
if (monitor != null) {
// $NON-NLS-1$ //$NON-NLS-2$
monitor.subTask("Exporting job [" + file.getPath() + "]");
}
JobMeta jobMeta = buildJobMeta(file, findDirectory(dirPath), fileData, createObjectRevision(version));
ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.JobMetaLoaded.id, jobMeta);
jobs.add(jobMeta);
} catch (Exception ex) {
// $NON-NLS-1$ //$NON-NLS-2$
log.logError("Unable to load job [" + file.getPath() + "]", ex);
}
}
return jobs;
}
use of org.pentaho.di.job.JobMeta in project pentaho-kettle by pentaho.
the class PurRepository method loadJob.
@Override
public JobMeta loadJob(String jobname, RepositoryDirectoryInterface parentDir, ProgressMonitorListener monitor, String versionId) throws KettleException {
String absPath = null;
try {
absPath = getPath(jobname, parentDir, RepositoryObjectType.JOB);
if (absPath == null) {
// Couldn't resolve path, throw an exception
throw new KettleFileException(BaseMessages.getString(PKG, "PurRepository.ERROR_0003_JOB_NOT_FOUND", jobname));
}
RepositoryFile file = pur.getFile(absPath);
if (versionId != null) {
// need to go back to server to get versioned info
file = pur.getFileAtVersion(file.getId(), versionId);
}
NodeRepositoryFileData data = null;
ObjectRevision revision = null;
data = pur.getDataAtVersionForRead(file.getId(), versionId, NodeRepositoryFileData.class);
revision = getObjectRevision(new StringObjectId(file.getId().toString()), versionId);
JobMeta jobMeta = buildJobMeta(file, parentDir, data, revision);
ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.JobMetaLoaded.id, jobMeta);
return jobMeta;
} catch (Exception e) {
throw new KettleException("Unable to load job from path [" + absPath + "]", e);
}
}
use of org.pentaho.di.job.JobMeta in project pentaho-kettle by pentaho.
the class JobExecutorMetaTest method testExportResources.
@Test
public void testExportResources() throws KettleException {
JobExecutorMeta jobExecutorMeta = spy(new JobExecutorMeta());
JobMeta jobMeta = mock(JobMeta.class);
String testName = "test";
doReturn(jobMeta).when(jobExecutorMeta).loadJobMetaProxy(any(JobExecutorMeta.class), any(Repository.class), any(VariableSpace.class));
when(jobMeta.exportResources(any(JobMeta.class), any(Map.class), any(ResourceNamingInterface.class), any(Repository.class), any(IMetaStore.class))).thenReturn(testName);
jobExecutorMeta.exportResources(null, null, null, null, null);
verify(jobMeta).setFilename("${" + Const.INTERNAL_VARIABLE_ENTRY_CURRENT_DIRECTORY + "}/" + testName);
verify(jobExecutorMeta).setSpecificationMethod(ObjectLocationSpecificationMethod.FILENAME);
}
Aggregations