use of org.pentaho.di.resource.ResourceDefinition in project pentaho-kettle by pentaho.
the class JobMeta method exportResources.
public String exportResources(VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface namingInterface, Repository repository, IMetaStore metaStore) throws KettleException {
String resourceName = null;
try {
// Handle naming for both repository and XML bases resources...
//
String baseName;
String originalPath;
String fullname;
String extension = "kjb";
if (Utils.isEmpty(getFilename())) {
// Assume repository...
//
originalPath = directory.getPath();
baseName = getName();
fullname = directory.getPath() + (directory.getPath().endsWith(RepositoryDirectory.DIRECTORY_SEPARATOR) ? "" : RepositoryDirectory.DIRECTORY_SEPARATOR) + getName() + "." + //
extension;
} else {
// Assume file
//
FileObject fileObject = KettleVFS.getFileObject(space.environmentSubstitute(getFilename()), space);
originalPath = fileObject.getParent().getName().getPath();
baseName = fileObject.getName().getBaseName();
fullname = fileObject.getName().getPath();
}
resourceName = namingInterface.nameResource(baseName, originalPath, extension, ResourceNamingInterface.FileNamingType.JOB);
ResourceDefinition definition = definitions.get(resourceName);
if (definition == null) {
// If we do this once, it will be plenty :-)
//
JobMeta jobMeta = (JobMeta) this.realClone(false);
// All objects get re-located to the root folder,
// but, when exporting, we need to see current directory
// in order to make 'Internal.Entry.Current.Directory' variable work
jobMeta.setRepositoryDirectory(directory);
//
for (JobEntryCopy jobEntry : jobMeta.jobcopies) {
compatibleJobEntryExportResources(jobEntry.getEntry(), jobMeta, definitions, namingInterface, repository);
jobEntry.getEntry().exportResources(jobMeta, definitions, namingInterface, repository, metaStore);
}
// Set a number of parameters for all the data files referenced so far...
//
Map<String, String> directoryMap = namingInterface.getDirectoryMap();
if (directoryMap != null) {
for (String directory : directoryMap.keySet()) {
String parameterName = directoryMap.get(directory);
jobMeta.addParameterDefinition(parameterName, directory, "Data file path discovered during export");
}
}
// At the end, add ourselves to the map...
//
String jobMetaContent = jobMeta.getXML();
definition = new ResourceDefinition(resourceName, jobMetaContent);
//
if (Utils.isEmpty(this.getFilename())) {
// Repository
definition.setOrigin(fullname);
} else {
definition.setOrigin(this.getFilename());
}
definitions.put(fullname, definition);
}
} catch (FileSystemException e) {
throw new KettleException(BaseMessages.getString(PKG, "JobMeta.Exception.AnErrorOccuredReadingJob", getFilename()), e);
} catch (KettleFileException e) {
throw new KettleException(BaseMessages.getString(PKG, "JobMeta.Exception.AnErrorOccuredReadingJob", getFilename()), e);
}
return resourceName;
}
use of org.pentaho.di.resource.ResourceDefinition in project pentaho-kettle by pentaho.
the class TransMeta method exportResources.
/**
* Exports the specified objects to a flat-file system, adding content with filename keys to a set of definitions. The
* supplied resource naming interface allows the object to name appropriately without worrying about those parts of
* the implementation specific details.
*
* @param space
* the variable space to use
* @param definitions
* @param resourceNamingInterface
* @param repository
* The repository to optionally load other resources from (to be converted to XML)
* @param metaStore
* the metaStore in which non-kettle metadata could reside.
*
* @return the filename of the exported resource
*/
@Override
public String exportResources(VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore) throws KettleException {
try {
// Handle naming for both repository and XML bases resources...
//
String baseName;
String originalPath;
String fullname;
String extension = "ktr";
if (Utils.isEmpty(getFilename())) {
// Assume repository...
//
originalPath = directory.getPath();
baseName = getName();
fullname = directory.getPath() + (directory.getPath().endsWith(RepositoryDirectory.DIRECTORY_SEPARATOR) ? "" : RepositoryDirectory.DIRECTORY_SEPARATOR) + getName() + "." + //
extension;
} else {
// Assume file
//
FileObject fileObject = KettleVFS.getFileObject(space.environmentSubstitute(getFilename()), space);
originalPath = fileObject.getParent().getURL().toString();
baseName = fileObject.getName().getBaseName();
fullname = fileObject.getURL().toString();
}
String exportFileName = resourceNamingInterface.nameResource(baseName, originalPath, extension, ResourceNamingInterface.FileNamingType.TRANSFORMATION);
ResourceDefinition definition = definitions.get(exportFileName);
if (definition == null) {
// If we do this once, it will be plenty :-)
//
TransMeta transMeta = (TransMeta) this.realClone(false);
//
for (StepMeta stepMeta : transMeta.getSteps()) {
stepMeta.exportResources(space, definitions, resourceNamingInterface, repository, metaStore);
}
// Change the filename, calling this sets internal variables
// inside of the transformation.
//
transMeta.setFilename(exportFileName);
// All objects get re-located to the root folder
//
transMeta.setRepositoryDirectory(new RepositoryDirectory());
// Set a number of parameters for all the data files referenced so far...
//
Map<String, String> directoryMap = resourceNamingInterface.getDirectoryMap();
if (directoryMap != null) {
for (String directory : directoryMap.keySet()) {
String parameterName = directoryMap.get(directory);
transMeta.addParameterDefinition(parameterName, directory, "Data file path discovered during export");
}
}
// At the end, add ourselves to the map...
//
String transMetaContent = transMeta.getXML();
definition = new ResourceDefinition(exportFileName, transMetaContent);
//
if (Utils.isEmpty(this.getFilename())) {
// Repository
definition.setOrigin(fullname);
} else {
definition.setOrigin(this.getFilename());
}
definitions.put(fullname, definition);
}
return exportFileName;
} catch (FileSystemException e) {
throw new KettleException(BaseMessages.getString(PKG, "TransMeta.Exception.ErrorOpeningOrValidatingTheXMLFile", getFilename()), e);
} catch (KettleFileException e) {
throw new KettleException(BaseMessages.getString(PKG, "TransMeta.Exception.ErrorOpeningOrValidatingTheXMLFile", getFilename()), e);
}
}
use of org.pentaho.di.resource.ResourceDefinition in project pentaho-kettle by pentaho.
the class MetaInjectMetaTest method exportResources.
@Test
public void exportResources() throws KettleException {
VariableSpace variableSpace = mock(VariableSpace.class);
ResourceNamingInterface resourceNamingInterface = mock(ResourceNamingInterface.class);
Repository repository = mock(Repository.class);
IMetaStore metaStore = mock(IMetaStore.class);
MetaInjectMeta injectMetaSpy = spy(metaInjectMeta);
TransMeta transMeta = mock(TransMeta.class);
Map<String, ResourceDefinition> definitions = Collections.<String, ResourceDefinition>emptyMap();
doReturn(TEST_FILE_NAME).when(transMeta).exportResources(transMeta, definitions, resourceNamingInterface, repository, metaStore);
doReturn(transMeta).when(injectMetaSpy).loadTransformationMeta(repository, variableSpace);
String actualExportedFileName = injectMetaSpy.exportResources(variableSpace, definitions, resourceNamingInterface, repository, metaStore);
assertEquals(TEST_FILE_NAME, actualExportedFileName);
assertEquals(EXPORTED_FILE_NAME, injectMetaSpy.getFileName());
verify(transMeta).exportResources(transMeta, definitions, resourceNamingInterface, repository, metaStore);
}
Aggregations