use of org.pentaho.di.repository.RepositoryDirectoryInterface in project pentaho-kettle by pentaho.
the class TransMetaConverter method convert.
public static Transformation convert(TransMeta transMeta) {
final org.pentaho.di.engine.model.Transformation transformation = new org.pentaho.di.engine.model.Transformation(createTransformationId(transMeta));
try {
TransMeta copyTransMeta = (TransMeta) transMeta.realClone(false);
cleanupDisabledHops(copyTransMeta);
// Turn off lazy conversion for AEL for now
disableLazyConversion(copyTransMeta);
resolveStepMetaResources(copyTransMeta);
copyTransMeta.getSteps().forEach(createOperation(transformation));
findHops(copyTransMeta, hop -> true).forEach(createHop(transformation));
transformation.setConfig(TRANS_META_CONF_KEY, copyTransMeta.getXML());
transformation.setConfig(TRANS_META_NAME_CONF_KEY, Optional.ofNullable(transMeta.getName()).orElse(TRANS_DEFAULT_NAME));
Map<String, Transformation> subTransformations = copyTransMeta.getResourceDependencies().stream().flatMap(resourceReference -> resourceReference.getEntries().stream()).filter(entry -> ResourceEntry.ResourceType.ACTIONFILE.equals(entry.getResourcetype())).collect(toMap(ResourceEntry::getResource, entry -> {
try {
Repository repository = copyTransMeta.getRepository();
if (repository != null) {
Path path = Paths.get(entry.getResource());
RepositoryDirectoryInterface directory = repository.findDirectory(path.getParent().toString().replace(File.separator, "/"));
return convert(repository.loadTransformation(path.getFileName().toString(), directory, null, true, null));
}
return convert(new TransMeta(entry.getResource(), copyTransMeta.getParentVariableSpace()));
} catch (KettleException e) {
throw new RuntimeException(e);
}
}));
transformation.setConfig(SUB_TRANSFORMATIONS_KEY, (Serializable) subTransformations);
} catch (KettleException e) {
Throwables.propagate(e);
}
return transformation;
}
use of org.pentaho.di.repository.RepositoryDirectoryInterface in project pentaho-kettle by pentaho.
the class JobExecutorMeta method loadJobMeta.
public static final synchronized JobMeta loadJobMeta(JobExecutorMeta executorMeta, Repository rep, IMetaStore metaStore, VariableSpace space) throws KettleException {
JobMeta mappingJobMeta = null;
CurrentDirectoryResolver r = new CurrentDirectoryResolver();
VariableSpace tmpSpace = r.resolveCurrentDirectory(executorMeta.getSpecificationMethod(), space, rep, executorMeta.getParentStepMeta(), executorMeta.getFileName());
switch(executorMeta.getSpecificationMethod()) {
case FILENAME:
String realFilename = tmpSpace.environmentSubstitute(executorMeta.getFileName());
try {
//
if (rep != null) {
realFilename = r.normalizeSlashes(realFilename);
// need to try to load from the repository
try {
String dirStr = realFilename.substring(0, realFilename.lastIndexOf("/"));
String tmpFilename = realFilename.substring(realFilename.lastIndexOf("/") + 1);
RepositoryDirectoryInterface dir = rep.findDirectory(dirStr);
mappingJobMeta = rep.loadJob(tmpFilename, dir, null, null);
} catch (KettleException ke) {
// try without extension
if (realFilename.endsWith(Const.STRING_JOB_DEFAULT_EXT)) {
try {
String tmpFilename = realFilename.substring(realFilename.lastIndexOf("/") + 1, realFilename.indexOf("." + Const.STRING_JOB_DEFAULT_EXT));
String dirStr = realFilename.substring(0, realFilename.lastIndexOf("/"));
RepositoryDirectoryInterface dir = rep.findDirectory(dirStr);
mappingJobMeta = rep.loadJob(tmpFilename, dir, null, null);
} catch (KettleException ke2) {
// fall back to try loading from file system (mappingJobMeta is going to be null)
}
}
}
}
if (mappingJobMeta == null) {
mappingJobMeta = new JobMeta(null, realFilename, rep, metaStore, null);
LogChannel.GENERAL.logDetailed("Loading job from repository", "Job was loaded from XML file [" + realFilename + "]");
}
} catch (Exception e) {
throw new KettleException(BaseMessages.getString(PKG, "JobExecutorMeta.Exception.UnableToLoadJob"), e);
}
break;
case REPOSITORY_BY_NAME:
String realJobname = tmpSpace.environmentSubstitute(executorMeta.getJobName());
String realDirectory = tmpSpace.environmentSubstitute(executorMeta.getDirectoryPath());
if (rep != null) {
if (!Utils.isEmpty(realJobname) && !Utils.isEmpty(realDirectory)) {
realDirectory = r.normalizeSlashes(realDirectory);
RepositoryDirectoryInterface repdir = rep.findDirectory(realDirectory);
if (repdir != null) {
try {
// reads the last revision in the repository...
//
// TODO: FIXME: should we also pass an
mappingJobMeta = rep.loadJob(realJobname, repdir, null, null);
// external MetaStore into the
// repository?
LogChannel.GENERAL.logDetailed("Loading job from repository", "Executor job [" + realJobname + "] was loaded from the repository");
} catch (Exception e) {
throw new KettleException("Unable to load job [" + realJobname + "]", e);
}
}
}
} else {
// rep is null, let's try loading by filename
try {
mappingJobMeta = new JobMeta(null, realDirectory + "/" + realJobname, rep, metaStore, null);
} catch (KettleException ke) {
try {
// add .kjb extension and try again
mappingJobMeta = new JobMeta(null, realDirectory + "/" + realJobname + "." + Const.STRING_JOB_DEFAULT_EXT, rep, metaStore, null);
} catch (KettleException ke2) {
throw new KettleException(BaseMessages.getString(PKG, "JobExecutorMeta.Exception.UnableToLoadJob", realJobname) + realDirectory);
}
}
}
break;
case REPOSITORY_BY_REFERENCE:
// Read the last revision by reference...
mappingJobMeta = rep.loadJob(executorMeta.getJobObjectId(), null);
break;
default:
break;
}
// Pass some important information to the mapping transformation metadata:
// When the child parameter does exist in the parent parameters, overwrite the child parameter by the
// parent parameter.
StepWithMappingMeta.replaceVariableValues(mappingJobMeta, space);
if (executorMeta.getParameters().isInheritingAllVariables()) {
// All other parent parameters need to get copied into the child parameters (when the 'Inherit all
// variables from the transformation?' option is checked)
StepWithMappingMeta.addMissingVariables(mappingJobMeta, space);
}
mappingJobMeta.setRepository(rep);
mappingJobMeta.setMetaStore(metaStore);
mappingJobMeta.setFilename(mappingJobMeta.getFilename());
return mappingJobMeta;
}
use of org.pentaho.di.repository.RepositoryDirectoryInterface in project pentaho-kettle by pentaho.
the class JobExecutorMeta method lookupRepositoryReferences.
@Override
public void lookupRepositoryReferences(Repository repository) throws KettleException {
// The correct reference is stored in the trans name and directory attributes...
//
RepositoryDirectoryInterface repositoryDirectoryInterface = RepositoryImportLocation.getRepositoryImportLocation().findDirectory(directoryPath);
jobObjectId = repository.getTransformationID(jobName, repositoryDirectoryInterface);
}
use of org.pentaho.di.repository.RepositoryDirectoryInterface in project pentaho-kettle by pentaho.
the class BrowseController method init.
// end PDI-3326 hack
@SuppressWarnings("deprecation")
public void init(Repository repository) throws ControllerInitializationException {
try {
this.repository = repository;
mainController = (MainController) this.getXulDomContainer().getEventHandler("mainController");
RepositoryDirectoryInterface root;
try {
if (repository instanceof RepositoryExtended) {
root = ((RepositoryExtended) repository).loadRepositoryDirectoryTree("/", "*.ktr|*.kjb", -1, BooleanUtils.isTrue(repository.getUserInfo().isAdmin()), true, true);
} else {
root = repository.loadRepositoryDirectoryTree();
}
this.repositoryDirectory = UIObjectRegistry.getInstance().constructUIRepositoryDirectory(root, null, repository);
} catch (UIObjectCreationException uoe) {
this.repositoryDirectory = new UIRepositoryDirectory(repository.loadRepositoryDirectoryTree(), null, repository);
}
dirMap = new HashMap<ObjectId, UIRepositoryDirectory>();
populateDirMap(repositoryDirectory);
bf = new SwtBindingFactory();
bf.setDocument(this.getXulDomContainer().getDocumentRoot());
messageBox = (XulMessageBox) document.createElement("messagebox");
createBindings();
} catch (Exception e) {
throw new ControllerInitializationException(e);
}
}
use of org.pentaho.di.repository.RepositoryDirectoryInterface in project pentaho-kettle by pentaho.
the class UIRepositoryDirectory method getRepositoryObjects.
// TODO: Abstract working model; should throw RepositoryException
// TODO: We will need a way to reset this cache when a directory or element changes
public UIRepositoryObjects getRepositoryObjects() throws KettleException {
if (kidElementCache != null) {
return kidElementCache;
}
if (kidElementCache == null) {
kidElementCache = new UIRepositoryObjects() {
private static final long serialVersionUID = 6901479331535375165L;
public void onRemove(UIRepositoryObject child) {
List<? extends RepositoryElementMetaInterface> dirRepoObjects = getDirectory().getRepositoryObjects();
if (dirRepoObjects != null) {
Iterator<? extends RepositoryElementMetaInterface> iter = dirRepoObjects.iterator();
while (iter.hasNext()) {
RepositoryElementMetaInterface e = iter.next();
if (child.getObjectId().equals(e.getObjectId())) {
iter.remove();
return;
}
}
}
}
};
}
for (UIRepositoryObject child : getChildren()) {
kidElementCache.add(child);
}
List<RepositoryElementMetaInterface> jobsAndTransformations = getDirectory().getRepositoryObjects();
if (jobsAndTransformations == null) {
RepositoryDirectoryInterface dir = getDirectory();
jobsAndTransformations = rep.getJobAndTransformationObjects(dir.getObjectId(), false);
dir.setRepositoryObjects(jobsAndTransformations);
}
for (RepositoryElementMetaInterface child : jobsAndTransformations) {
if (child.getObjectType().equals(RepositoryObjectType.TRANSFORMATION)) {
try {
kidElementCache.add(UIObjectRegistry.getInstance().constructUITransformation(child, this, rep));
} catch (UIObjectCreationException e) {
kidElementCache.add(new UITransformation(child, this, rep));
}
} else if (child.getObjectType().equals(RepositoryObjectType.JOB)) {
try {
kidElementCache.add(UIObjectRegistry.getInstance().constructUIJob(child, this, rep));
} catch (UIObjectCreationException e) {
kidElementCache.add(new UIJob(child, this, rep));
}
}
}
return kidElementCache;
}
Aggregations