use of org.pentaho.di.job.entry.JobEntryInterface in project pentaho-kettle by pentaho.
the class RepositoryImporterTest method testImportTrans_patchTransEntries_without_variables.
@Test
public void testImportTrans_patchTransEntries_without_variables() throws KettleException {
JobEntryInterface jobEntryInterface = createJobEntry("");
StepMetaInterface stepMeta = createStepMeta("/userName");
RepositoryImporter importer = createRepositoryImporter(jobEntryInterface, stepMeta, true);
importer.setBaseDirectory(baseDirectory);
importer.importTransformation(entityNode, feedback);
verify((HasRepositoryDirectories) stepMeta).setDirectories(new String[] { ROOT_PATH + USER_NAME_PATH });
}
use of org.pentaho.di.job.entry.JobEntryInterface in project pentaho-kettle by pentaho.
the class JobExecutionConfigurationTest method testGetUsedArguments.
@Test
public void testGetUsedArguments() throws KettleException {
JobExecutionConfiguration executionConfiguration = new JobExecutionConfiguration();
JobMeta jobMeta = new JobMeta();
jobMeta.jobcopies = new ArrayList<>();
String[] commandLineArguments = new String[0];
IMetaStore metaStore = mock(IMetaStore.class);
JobEntryCopy jobEntryCopy0 = new JobEntryCopy();
TransMeta transMeta0 = mock(TransMeta.class);
Map<String, String> map0 = new HashMap<>();
map0.put("arg0", "argument0");
when(transMeta0.getUsedArguments(commandLineArguments)).thenReturn(map0);
JobEntryInterface jobEntryInterface0 = mock(JobEntryInterface.class);
when(jobEntryInterface0.isTransformation()).thenReturn(false);
jobEntryCopy0.setEntry(jobEntryInterface0);
jobMeta.jobcopies.add(jobEntryCopy0);
JobEntryCopy jobEntryCopy1 = new JobEntryCopy();
TransMeta transMeta1 = mock(TransMeta.class);
Map<String, String> map1 = new HashMap<>();
map1.put("arg1", "argument1");
when(transMeta1.getUsedArguments(commandLineArguments)).thenReturn(map1);
JobEntryTrans jobEntryTrans1 = mock(JobEntryTrans.class);
when(jobEntryTrans1.isTransformation()).thenReturn(true);
when(jobEntryTrans1.getTransMeta(executionConfiguration.getRepository(), metaStore, jobMeta)).thenReturn(transMeta1);
jobEntryCopy1.setEntry(jobEntryTrans1);
jobMeta.jobcopies.add(jobEntryCopy1);
JobEntryCopy jobEntryCopy2 = new JobEntryCopy();
TransMeta transMeta2 = mock(TransMeta.class);
Map<String, String> map2 = new HashMap<>();
map2.put("arg1", "argument1");
map2.put("arg2", "argument2");
when(transMeta2.getUsedArguments(commandLineArguments)).thenReturn(map2);
JobEntryTrans jobEntryTrans2 = mock(JobEntryTrans.class);
when(jobEntryTrans2.isTransformation()).thenReturn(true);
when(jobEntryTrans2.getTransMeta(executionConfiguration.getRepository(), metaStore, jobMeta)).thenReturn(transMeta2);
jobEntryCopy2.setEntry(jobEntryTrans2);
jobMeta.jobcopies.add(jobEntryCopy2);
executionConfiguration.getUsedArguments(jobMeta, commandLineArguments, metaStore);
assertEquals(2, executionConfiguration.getArguments().size());
}
use of org.pentaho.di.job.entry.JobEntryInterface in project pentaho-kettle by pentaho.
the class JobDelegate method readJobEntry.
protected JobEntryInterface readJobEntry(DataNode copyNode, JobMeta jobMeta, List<JobEntryInterface> jobentries) throws KettleException {
try {
String name = getString(copyNode, PROP_NAME);
for (JobEntryInterface entry : jobentries) {
if (entry.getName().equalsIgnoreCase(name)) {
// already loaded!
return entry;
}
}
// load the entry from the node
//
String typeId = getString(copyNode, PROP_JOBENTRY_TYPE);
PluginRegistry registry = PluginRegistry.getInstance();
PluginInterface jobPlugin = registry.findPluginWithId(JobEntryPluginType.class, typeId);
JobEntryInterface jobMetaInterface = null;
boolean isMissing = jobPlugin == null;
if (!isMissing) {
jobMetaInterface = (JobEntryInterface) registry.loadClass(jobPlugin);
} else {
MissingEntry missingEntry = new MissingEntry(jobMeta.getName(), typeId);
jobMeta.addMissingEntry(missingEntry);
jobMetaInterface = missingEntry;
}
jobMetaInterface.setName(name);
jobMetaInterface.setDescription(getString(copyNode, PROP_DESCRIPTION));
jobMetaInterface.setObjectId(new StringObjectId(copyNode.getId().toString()));
RepositoryProxy proxy = new RepositoryProxy(copyNode.getNode(NODE_CUSTOM));
// make sure metastore is passed
jobMetaInterface.setMetaStore(jobMeta.getMetaStore());
if (!isMissing) {
compatibleJobEntryLoadRep(jobMetaInterface, proxy, null, jobMeta.getDatabases(), jobMeta.getSlaveServers());
jobMetaInterface.loadRep(proxy, jobMeta.getMetaStore(), null, jobMeta.getDatabases(), jobMeta.getSlaveServers());
}
jobentries.add(jobMetaInterface);
return jobMetaInterface;
} catch (Exception e) {
throw new KettleException("Unable to read job entry interface information from repository", e);
}
}
use of org.pentaho.di.job.entry.JobEntryInterface in project pentaho-kettle by pentaho.
the class JobDelegate method dataNodeToElement.
public void dataNodeToElement(final DataNode rootNode, final RepositoryElementInterface element) throws KettleException {
JobMeta jobMeta = (JobMeta) element;
Set<String> privateDatabases = null;
// read the private databases
DataNode privateDbsNode = rootNode.getNode(NODE_JOB_PRIVATE_DATABASES);
// BACKLOG-6635
if (privateDbsNode != null) {
privateDatabases = new HashSet<>();
if (privateDbsNode.hasProperty(PROP_JOB_PRIVATE_DATABASE_NAMES)) {
for (String privateDatabaseName : getString(privateDbsNode, PROP_JOB_PRIVATE_DATABASE_NAMES).split(JOB_PRIVATE_DATABASE_DELIMITER)) {
if (!privateDatabaseName.isEmpty()) {
privateDatabases.add(privateDatabaseName);
}
}
} else {
for (DataNode privateDatabase : privateDbsNode.getNodes()) {
privateDatabases.add(privateDatabase.getName());
}
}
}
jobMeta.setPrivateDatabases(privateDatabases);
jobMeta.setSharedObjectsFile(getString(rootNode, PROP_SHARED_FILE));
// Keep a unique list of job entries to facilitate in the loading.
//
List<JobEntryInterface> jobentries = new ArrayList<>();
// Read the job entry copies
//
DataNode entriesNode = rootNode.getNode(NODE_ENTRIES);
int nrCopies = (int) entriesNode.getProperty(PROP_NR_JOB_ENTRY_COPIES).getLong();
//
for (DataNode copyNode : entriesNode.getNodes()) {
// Read the entry...
//
JobEntryInterface jobEntry = readJobEntry(copyNode, jobMeta, jobentries);
JobEntryCopy copy = new JobEntryCopy(jobEntry);
copy.setName(getString(copyNode, PROP_NAME));
copy.setDescription(getString(copyNode, PROP_DESCRIPTION));
copy.setObjectId(new StringObjectId(copyNode.getId().toString()));
copy.setNr((int) copyNode.getProperty(PROP_NR).getLong());
int x = (int) copyNode.getProperty(PROP_GUI_LOCATION_X).getLong();
int y = (int) copyNode.getProperty(PROP_GUI_LOCATION_Y).getLong();
copy.setLocation(x, y);
copy.setDrawn(copyNode.getProperty(PROP_GUI_DRAW).getBoolean());
copy.setLaunchingInParallel(copyNode.getProperty(PROP_PARALLEL).getBoolean());
// Read the job entry group attributes map
if (jobEntry instanceof JobEntryBase) {
AttributesMapUtil.loadAttributesMap(copyNode, (JobEntryBase) jobEntry);
}
loadAttributesMap(copyNode, copy);
jobMeta.addJobEntry(copy);
}
if (jobMeta.getJobCopies().size() != nrCopies) {
throw new KettleException("The number of job entry copies read [" + jobMeta.getJobCopies().size() + "] was not the number we expected [" + nrCopies + "]");
}
// Read the notes...
//
DataNode notesNode = rootNode.getNode(NODE_NOTES);
int nrNotes = (int) notesNode.getProperty(PROP_NR_NOTES).getLong();
for (DataNode noteNode : notesNode.getNodes()) {
String xml = getString(noteNode, PROP_XML);
jobMeta.addNote(new NotePadMeta(XMLHandler.getSubNode(XMLHandler.loadXMLString(xml), NotePadMeta.XML_TAG)));
}
if (jobMeta.nrNotes() != nrNotes) {
throw new KettleException("The number of notes read [" + jobMeta.nrNotes() + "] was not the number we expected [" + nrNotes + "]");
}
// Read the hops...
//
DataNode hopsNode = rootNode.getNode(NODE_HOPS);
int nrHops = (int) hopsNode.getProperty(PROP_NR_HOPS).getLong();
for (DataNode hopNode : hopsNode.getNodes()) {
String copyFromName = getString(hopNode, JOB_HOP_FROM);
int copyFromNr = (int) hopNode.getProperty(JOB_HOP_FROM_NR).getLong();
String copyToName = getString(hopNode, JOB_HOP_TO);
int copyToNr = (int) hopNode.getProperty(JOB_HOP_TO_NR).getLong();
boolean enabled = true;
if (hopNode.hasProperty(JOB_HOP_ENABLED)) {
enabled = hopNode.getProperty(JOB_HOP_ENABLED).getBoolean();
}
boolean evaluation = true;
if (hopNode.hasProperty(JOB_HOP_EVALUATION)) {
evaluation = hopNode.getProperty(JOB_HOP_EVALUATION).getBoolean();
}
boolean unconditional = true;
if (hopNode.hasProperty(JOB_HOP_UNCONDITIONAL)) {
unconditional = hopNode.getProperty(JOB_HOP_UNCONDITIONAL).getBoolean();
}
JobEntryCopy copyFrom = jobMeta.findJobEntry(copyFromName, copyFromNr, true);
JobEntryCopy copyTo = jobMeta.findJobEntry(copyToName, copyToNr, true);
JobHopMeta jobHopMeta = new JobHopMeta(copyFrom, copyTo);
jobHopMeta.setEnabled(enabled);
jobHopMeta.setEvaluation(evaluation);
jobHopMeta.setUnconditional(unconditional);
jobMeta.addJobHop(jobHopMeta);
}
if (jobMeta.nrJobHops() != nrHops) {
throw new KettleException("The number of hops read [" + jobMeta.nrJobHops() + "] was not the number we expected [" + nrHops + "]");
}
// Load the details at the end, to make sure we reference the databases correctly, etc.
//
loadJobMetaDetails(rootNode, jobMeta);
jobMeta.eraseParameters();
DataNode paramsNode = rootNode.getNode(NODE_PARAMETERS);
int count = (int) paramsNode.getProperty(PROP_NR_PARAMETERS).getLong();
for (int idx = 0; idx < count; idx++) {
DataNode paramNode = paramsNode.getNode(PARAM_PREFIX + idx);
String key = getString(paramNode, PARAM_KEY);
String def = getString(paramNode, PARAM_DEFAULT);
String desc = getString(paramNode, PARAM_DESC);
jobMeta.addParameterDefinition(key, def, desc);
}
}
use of org.pentaho.di.job.entry.JobEntryInterface in project pentaho-kettle by pentaho.
the class JobDelegate method elementToDataNode.
public DataNode elementToDataNode(final RepositoryElementInterface element) throws KettleException {
JobMeta jobMeta = (JobMeta) element;
DataNode rootNode = new DataNode(NODE_JOB);
if (jobMeta.getPrivateDatabases() != null) {
// save all private database names http://jira.pentaho.com/browse/PPP-3413
String privateDatabaseNames = StringUtils.join(jobMeta.getPrivateDatabases(), JOB_PRIVATE_DATABASE_DELIMITER);
DataNode privateDatabaseNode = rootNode.addNode(NODE_JOB_PRIVATE_DATABASES);
privateDatabaseNode.setProperty(PROP_JOB_PRIVATE_DATABASE_NAMES, privateDatabaseNames);
}
// Save the notes
//
DataNode notesNode = rootNode.addNode(NODE_NOTES);
notesNode.setProperty(PROP_NR_NOTES, jobMeta.nrNotes());
for (int i = 0; i < jobMeta.nrNotes(); i++) {
NotePadMeta note = jobMeta.getNote(i);
DataNode noteNode = notesNode.addNode(NOTE_PREFIX + i);
noteNode.setProperty(PROP_XML, note.getXML());
}
//
if (log.isDetailed()) {
// $NON-NLS-1$ //$NON-NLS-2$
log.logDetailed(toString(), "Saving " + jobMeta.nrJobEntries() + " Job entry copies to repository...");
}
DataNode entriesNode = rootNode.addNode(NODE_ENTRIES);
entriesNode.setProperty(PROP_NR_JOB_ENTRY_COPIES, jobMeta.nrJobEntries());
for (int i = 0; i < jobMeta.nrJobEntries(); i++) {
JobEntryCopy copy = jobMeta.getJobEntry(i);
JobEntryInterface entry = copy.getEntry();
// Create a new node for each entry...
//
DataNode copyNode = entriesNode.addNode(// $NON-NLS-1$
sanitizeNodeName(copy.getName()) + "_" + (i + 1) + EXT_JOB_ENTRY_COPY);
copyNode.setProperty(PROP_NAME, copy.getName());
copyNode.setProperty(PROP_DESCRIPTION, copy.getDescription());
copyNode.setProperty(PROP_NR, copy.getNr());
copyNode.setProperty(PROP_GUI_LOCATION_X, copy.getLocation().x);
copyNode.setProperty(PROP_GUI_LOCATION_Y, copy.getLocation().y);
copyNode.setProperty(PROP_GUI_DRAW, copy.isDrawn());
copyNode.setProperty(PROP_PARALLEL, copy.isLaunchingInParallel());
// Save the job entry group attributes map
if (entry instanceof JobEntryBase) {
AttributesMapUtil.saveAttributesMap(copyNode, (JobEntryBase) entry);
}
// And save the job entry copy group attributes map
AttributesMapUtil.saveAttributesMap(copyNode, copy, PROP_ATTRIBUTES_JOB_ENTRY_COPY);
// Save the entry information here as well, for completeness.
// TODO: since this slightly stores duplicate information, figure out how to store this separately.
//
copyNode.setProperty(PROP_JOBENTRY_TYPE, entry.getPluginId());
DataNode customNode = new DataNode(NODE_CUSTOM);
RepositoryProxy proxy = new RepositoryProxy(customNode);
entry.saveRep(proxy, proxy.getMetaStore(), null);
compatibleEntrySaveRep(entry, proxy, null);
copyNode.addNode(customNode);
}
// Finally, save the hops
//
DataNode hopsNode = rootNode.addNode(NODE_HOPS);
hopsNode.setProperty(PROP_NR_HOPS, jobMeta.nrJobHops());
for (int i = 0; i < jobMeta.nrJobHops(); i++) {
JobHopMeta hop = jobMeta.getJobHop(i);
DataNode hopNode = hopsNode.addNode(JOB_HOP_PREFIX + i);
hopNode.setProperty(JOB_HOP_FROM, hop.getFromEntry().getName());
hopNode.setProperty(JOB_HOP_FROM_NR, hop.getFromEntry().getNr());
hopNode.setProperty(JOB_HOP_TO, hop.getToEntry().getName());
hopNode.setProperty(JOB_HOP_TO_NR, hop.getToEntry().getNr());
hopNode.setProperty(JOB_HOP_ENABLED, hop.isEnabled());
hopNode.setProperty(JOB_HOP_EVALUATION, hop.getEvaluation());
hopNode.setProperty(JOB_HOP_UNCONDITIONAL, hop.isUnconditional());
}
String[] paramKeys = jobMeta.listParameters();
DataNode paramsNode = rootNode.addNode(NODE_PARAMETERS);
paramsNode.setProperty(PROP_NR_PARAMETERS, paramKeys == null ? 0 : paramKeys.length);
for (int idx = 0; idx < paramKeys.length; idx++) {
DataNode paramNode = paramsNode.addNode(PARAM_PREFIX + idx);
String key = paramKeys[idx];
String description = jobMeta.getParameterDescription(paramKeys[idx]);
String defaultValue = jobMeta.getParameterDefault(paramKeys[idx]);
// $NON-NLS-1$
paramNode.setProperty(PARAM_KEY, key != null ? key : "");
// $NON-NLS-1$
paramNode.setProperty(PARAM_DEFAULT, defaultValue != null ? defaultValue : "");
// $NON-NLS-1$
paramNode.setProperty(PARAM_DESC, description != null ? description : "");
}
// Let's not forget to save the details of the transformation itself.
// This includes logging information, parameters, etc.
//
saveJobDetails(rootNode, jobMeta);
return rootNode;
}
Aggregations