use of org.pentaho.di.repository.RepositoryDirectoryInterface in project pentaho-kettle by pentaho.
the class PanCommandExecutor method executeRepositoryBasedCommand.
public Trans executeRepositoryBasedCommand(Repository repository, RepositoryMeta repositoryMeta, final String dirName, final String transName, final String listTrans, final String listDirs, final String exportRepo) throws Exception {
try {
if (repository != null && repositoryMeta != null) {
// Define and connect to the repository...
logDebug("Pan.Log.Allocate&ConnectRep");
// Default is the root directory
RepositoryDirectoryInterface directory = repository.loadRepositoryDirectoryTree();
// Add the IMetaStore of the repository to our delegation
if (repository.getMetaStore() != null && getMetaStore() != null) {
getMetaStore().addMetaStore(repository.getMetaStore());
}
// Find the directory name if one is specified...
if (!Utils.isEmpty(dirName)) {
directory = directory.findDirectory(dirName);
}
if (directory != null) {
// Check username, password
logDebug("Pan.Log.CheckSuppliedUserPass");
// transname is not empty ? then command it to load a transformation
if (!Utils.isEmpty(transName)) {
logDebug("Pan.Log.LoadTransInfo");
TransMeta transMeta = repository.loadTransformation(transName, directory, null, true, null);
logDebug("Pan.Log.AllocateTrans");
Trans trans = new Trans(transMeta);
trans.setRepository(repository);
trans.setMetaStore(getMetaStore());
// return transformation loaded from the repo
return trans;
} else if (YES.equalsIgnoreCase(listTrans)) {
// List the transformations in the repository
printRepositoryStoredTransformations(repository, directory);
} else if (YES.equalsIgnoreCase(listDirs)) {
// List the directories in the repository
printRepositoryDirectories(repository, directory);
} else if (!Utils.isEmpty(exportRepo)) {
// Export the repository
System.out.println(BaseMessages.getString(getPkgClazz(), "Pan.Log.ExportingObjectsRepToFile", "" + exportRepo));
repository.getExporter().exportAllObjects(null, exportRepo, directory, "all");
System.out.println(BaseMessages.getString(getPkgClazz(), "Pan.Log.FinishedExportObjectsRepToFile", "" + exportRepo));
} else {
System.out.println(BaseMessages.getString(getPkgClazz(), "Pan.Error.NoTransNameSupplied"));
}
} else {
System.out.println(BaseMessages.getString(getPkgClazz(), "Pan.Error.CanNotFindSpecifiedDirectory", "" + dirName));
}
} else {
System.out.println(BaseMessages.getString(getPkgClazz(), "Pan.Error.NoRepProvided"));
}
} catch (Exception e) {
getLog().logError(e.getMessage());
}
return null;
}
use of org.pentaho.di.repository.RepositoryDirectoryInterface in project pentaho-kettle by pentaho.
the class JobEntryJob method getJobMeta.
public JobMeta getJobMeta(Repository rep, IMetaStore metaStore, VariableSpace space) throws KettleException {
JobMeta jobMeta = null;
try {
CurrentDirectoryResolver r = new CurrentDirectoryResolver();
VariableSpace tmpSpace = r.resolveCurrentDirectory(specificationMethod, space, rep, parentJob, getFilename());
switch(specificationMethod) {
case FILENAME:
String realFilename = tmpSpace.environmentSubstitute(getFilename());
if (rep != null) {
// need to try to load from the repository
realFilename = r.normalizeSlashes(realFilename);
try {
String dirStr = realFilename.substring(0, realFilename.lastIndexOf("/"));
String tmpFilename = realFilename.substring(realFilename.lastIndexOf("/") + 1);
RepositoryDirectoryInterface dir = rep.findDirectory(dirStr);
jobMeta = rep.loadJob(tmpFilename, dir, null, null);
} catch (KettleException ke) {
// try without extension
if (realFilename.endsWith(Const.STRING_JOB_DEFAULT_EXT)) {
try {
String tmpFilename = realFilename.substring(realFilename.lastIndexOf("/") + 1, realFilename.indexOf("." + Const.STRING_JOB_DEFAULT_EXT));
String dirStr = realFilename.substring(0, realFilename.lastIndexOf("/"));
RepositoryDirectoryInterface dir = rep.findDirectory(dirStr);
jobMeta = rep.loadJob(tmpFilename, dir, null, null);
} catch (KettleException ke2) {
// fall back to try loading from file system (mappingJobMeta is going to be null)
}
}
}
}
if (jobMeta == null) {
jobMeta = new JobMeta(tmpSpace, realFilename, rep, metaStore, null);
}
break;
case REPOSITORY_BY_NAME:
String realDirectory = tmpSpace.environmentSubstitute(getDirectory());
String realJobName = tmpSpace.environmentSubstitute(getJobName());
if (rep != null) {
realDirectory = r.normalizeSlashes(realDirectory);
RepositoryDirectoryInterface repositoryDirectory = rep.loadRepositoryDirectoryTree().findDirectory(realDirectory);
if (repositoryDirectory == null) {
throw new KettleException("Unable to find repository directory [" + Const.NVL(realDirectory, "") + "]");
}
// reads
jobMeta = rep.loadJob(realJobName, repositoryDirectory, null, null);
} else {
// rep is null, let's try loading by filename
try {
jobMeta = new JobMeta(tmpSpace, realDirectory + "/" + realJobName, rep, metaStore, null);
} catch (KettleException ke) {
try {
// add .kjb extension and try again
jobMeta = new JobMeta(tmpSpace, realDirectory + "/" + realJobName + "." + Const.STRING_JOB_DEFAULT_EXT, rep, metaStore, null);
} catch (KettleException ke2) {
ke2.printStackTrace();
throw new KettleException("Could not execute job specified in a repository since we're not connected to one");
}
}
}
break;
case REPOSITORY_BY_REFERENCE:
if (rep != null) {
// Load the last version...
//
jobMeta = rep.loadJob(jobObjectId, null);
break;
} else {
throw new KettleException("Could not execute job specified in a repository since we're not connected to one");
}
default:
throw new KettleException("The specified object location specification method '" + specificationMethod + "' is not yet supported in this job entry.");
}
if (jobMeta != null) {
jobMeta.setRepository(rep);
jobMeta.setMetaStore(metaStore);
}
return jobMeta;
} catch (Exception e) {
throw new KettleException("Unexpected error during job metadata load", e);
}
}
use of org.pentaho.di.repository.RepositoryDirectoryInterface in project pentaho-kettle by pentaho.
the class JobEntryTrans method getTransMeta.
public TransMeta getTransMeta(Repository rep, IMetaStore metaStore, VariableSpace space) throws KettleException {
try {
TransMeta transMeta = null;
CurrentDirectoryResolver r = new CurrentDirectoryResolver();
VariableSpace tmpSpace = r.resolveCurrentDirectory(specificationMethod, space, rep, parentJob, getFilename());
switch(specificationMethod) {
case FILENAME:
String realFilename = tmpSpace.environmentSubstitute(getFilename());
if (rep != null) {
if (StringUtils.isBlank(realFilename)) {
throw new KettleException(BaseMessages.getString(PKG, "JobTrans.Exception.MissingTransFileName"));
}
realFilename = r.normalizeSlashes(realFilename);
// need to try to load from the repository
try {
String dirStr = realFilename.substring(0, realFilename.lastIndexOf("/"));
String tmpFilename = realFilename.substring(realFilename.lastIndexOf("/") + 1);
RepositoryDirectoryInterface dir = rep.findDirectory(dirStr);
transMeta = rep.loadTransformation(tmpFilename, dir, null, true, null);
} catch (KettleException ke) {
// try without extension
if (realFilename.endsWith(Const.STRING_TRANS_DEFAULT_EXT)) {
try {
String tmpFilename = realFilename.substring(realFilename.lastIndexOf("/") + 1, realFilename.indexOf("." + Const.STRING_TRANS_DEFAULT_EXT));
String dirStr = realFilename.substring(0, realFilename.lastIndexOf("/"));
RepositoryDirectoryInterface dir = rep.findDirectory(dirStr);
transMeta = rep.loadTransformation(tmpFilename, dir, null, true, null);
} catch (KettleException ke2) {
// fall back to try loading from file system (transMeta is going to be null)
}
}
}
}
if (transMeta == null) {
logBasic("Loading transformation from XML file [" + realFilename + "]");
transMeta = new TransMeta(realFilename, metaStore, null, true, null, null);
}
break;
case REPOSITORY_BY_NAME:
String transname = tmpSpace.environmentSubstitute(getTransname());
String realDirectory = tmpSpace.environmentSubstitute(getDirectory());
logBasic(BaseMessages.getString(PKG, "JobTrans.Log.LoadingTransRepDirec", transname, realDirectory));
if (rep != null) {
//
// It only makes sense to try to load from the repository when the
// repository is also filled in.
//
// It reads last the last revision from the repository.
//
realDirectory = r.normalizeSlashes(realDirectory);
RepositoryDirectoryInterface repositoryDirectory = rep.findDirectory(realDirectory);
transMeta = rep.loadTransformation(transname, repositoryDirectory, null, true, null);
} else {
// rep is null, let's try loading by filename
try {
transMeta = new TransMeta(realDirectory + "/" + transname, metaStore, null, true, this, null);
} catch (KettleException ke) {
try {
// add .ktr extension and try again
transMeta = new TransMeta(realDirectory + "/" + transname + "." + Const.STRING_TRANS_DEFAULT_EXT, metaStore, null, true, this, null);
} catch (KettleException ke2) {
throw new KettleException(BaseMessages.getString(PKG, "JobTrans.Exception.NoRepDefined"), ke2);
}
}
}
break;
case REPOSITORY_BY_REFERENCE:
if (transObjectId == null) {
throw new KettleException(BaseMessages.getString(PKG, "JobTrans.Exception.ReferencedTransformationIdIsNull"));
}
if (rep != null) {
// Load the last revision
//
transMeta = rep.loadTransformation(transObjectId, null);
}
break;
default:
throw new KettleException("The specified object location specification method '" + specificationMethod + "' is not yet supported in this job entry.");
}
if (transMeta != null) {
// set Internal.Entry.Current.Directory again because it was changed
transMeta.setInternalKettleVariables();
// When the child parameter does exist in the parent parameters, overwrite the child parameter by the
// parent parameter.
StepWithMappingMeta.replaceVariableValues(transMeta, space);
if (isPassingAllParameters()) {
// All other parent parameters need to get copied into the child parameters (when the 'Inherit all
// variables from the transformation?' option is checked)
StepWithMappingMeta.addMissingVariables(transMeta, space);
}
// Pass repository and metastore references
//
transMeta.setRepository(rep);
transMeta.setMetaStore(metaStore);
}
return transMeta;
} catch (final KettleException ke) {
// if we get a KettleException, simply re-throw it
throw ke;
} catch (Exception e) {
throw new KettleException(BaseMessages.getString(PKG, "JobTrans.Exception.MetaDataLoad"), e);
}
}
use of org.pentaho.di.repository.RepositoryDirectoryInterface in project pentaho-kettle by pentaho.
the class KettleFileRepository method getObjectInformation.
@Override
public RepositoryObject getObjectInformation(ObjectId objectId, RepositoryObjectType objectType) throws KettleException {
try {
String filename = calcDirectoryName(null);
if (objectId.getId().startsWith("/")) {
filename += objectId.getId().substring(1);
} else {
filename += objectId.getId();
}
FileObject fileObject = KettleVFS.getFileObject(filename);
if (!fileObject.exists()) {
return null;
}
FileName fname = fileObject.getName();
String name = fname.getBaseName();
if (!Utils.isEmpty(fname.getExtension()) && name.length() > fname.getExtension().length()) {
name = name.substring(0, name.length() - fname.getExtension().length() - 1);
}
String filePath = fileObject.getParent().getName().getPath();
final FileObject baseDirObject = KettleVFS.getFileObject(repositoryMeta.getBaseDirectory());
final int baseDirObjectPathLength = baseDirObject.getName().getPath().length();
final String dirPath = baseDirObjectPathLength <= filePath.length() ? filePath.substring(baseDirObjectPathLength) : "/";
RepositoryDirectoryInterface directory = loadRepositoryDirectoryTree().findDirectory(dirPath);
Date lastModified = new Date(fileObject.getContent().getLastModifiedTime());
return new RepositoryObject(objectId, name, directory, "-", lastModified, objectType, "", false);
} catch (Exception e) {
throw new KettleException("Unable to get object information for object with id=" + objectId, e);
}
}
use of org.pentaho.di.repository.RepositoryDirectoryInterface in project pentaho-kettle by pentaho.
the class KettleFileRepository method getDirectoryNames.
@Override
public String[] getDirectoryNames(ObjectId id_directory) throws KettleException {
RepositoryDirectoryInterface tree = loadRepositoryDirectoryTree();
RepositoryDirectoryInterface directory = tree.findDirectory(id_directory);
String[] names = new String[directory.getNrSubdirectories()];
for (int i = 0; i < names.length; i++) {
names[i] = directory.getSubdirectory(i).getName();
}
return names;
}
Aggregations