Search in sources :

Example 11 with RepositoryDirectoryInterface

use of org.pentaho.di.repository.RepositoryDirectoryInterface in project pentaho-kettle by pentaho.

the class JobEntryExportRepository method execute.

public Result execute(Result previousResult, int nr) {
    Result result = previousResult;
    result.setNrErrors(1);
    result.setResult(false);
    String realrepName = environmentSubstitute(repositoryname);
    String realusername = environmentSubstitute(username);
    String realpassword = Encr.decryptPasswordOptionallyEncrypted(environmentSubstitute(password));
    String realfoldername = environmentSubstitute(directoryPath);
    String realoutfilename = environmentSubstitute(targetfilename);
    if (export_type.equals(Export_All) || export_type.equals(Export_Jobs) || export_type.equals(Export_Trans) || export_type.equals(Export_One_Folder)) {
        realoutfilename = buildFilename(realoutfilename);
    }
    NrErrors = 0;
    successConditionBroken = false;
    limitErr = Const.toInt(environmentSubstitute(getNrLimit()), 10);
    try {
        file = KettleVFS.getFileObject(realoutfilename, this);
        if (file.exists()) {
            if (export_type.equals(Export_All) || export_type.equals(Export_Jobs) || export_type.equals(Export_Trans) || export_type.equals(Export_One_Folder)) {
                if (iffileexists.equals(If_FileExists_Fail)) {
                    logError(BaseMessages.getString(PKG, "JobExportRepository.Log.Failing", realoutfilename));
                    return result;
                } else if (iffileexists.equals(If_FileExists_Skip)) {
                    if (log.isDetailed()) {
                        logDetailed(BaseMessages.getString(PKG, "JobExportRepository.Log.Exit", realoutfilename));
                    }
                    result.setResult(true);
                    result.setNrErrors(0);
                    return result;
                } else if (iffileexists.equals(If_FileExists_Uniquename)) {
                    String parentFolder = KettleVFS.getFilename(file.getParent());
                    String shortFilename = file.getName().getBaseName();
                    shortFilename = buildUniqueFilename(shortFilename);
                    file = KettleVFS.getFileObject(parentFolder + Const.FILE_SEPARATOR + shortFilename, this);
                    if (log.isDetailed()) {
                        logDetailed(BaseMessages.getString(PKG, "JobExportRepository.Log.NewFilename", file.toString()));
                    }
                }
            } else if (export_type.equals(Export_By_Folder)) {
                if (file.getType() != FileType.FOLDER) {
                    logError(BaseMessages.getString(PKG, "JobExportRepository.Log.NotFolder", "" + file.getName()));
                    return result;
                }
            }
        } else {
            if (export_type.equals(Export_By_Folder)) {
                // create folder?
                if (log.isDetailed()) {
                    logDetailed(BaseMessages.getString(PKG, "JobExportRepository.Log.FolderNotExists", "" + file.getName()));
                }
                if (!createfolder) {
                    return result;
                }
                file.createFolder();
                if (log.isDetailed()) {
                    logDetailed(BaseMessages.getString(PKG, "JobExportRepository.Log.FolderCreated", file.toString()));
                }
            } else if (export_type.equals(Export_All) || export_type.equals(Export_Jobs) || export_type.equals(Export_Trans) || export_type.equals(Export_One_Folder)) {
                // create parent folder?
                if (!file.getParent().exists()) {
                    if (log.isDetailed()) {
                        logDetailed(BaseMessages.getString(PKG, "JobExportRepository.Log.FolderNotExists", "" + file.getParent().toString()));
                    }
                    if (createfolder) {
                        file.getParent().createFolder();
                        if (log.isDetailed()) {
                            logDetailed(BaseMessages.getString(PKG, "JobExportRepository.Log.FolderCreated", file.getParent().toString()));
                        }
                    } else {
                        return result;
                    }
                }
            }
        }
        realoutfilename = KettleVFS.getFilename(this.file);
        // connect to repository
        connectRep(log, realrepName, realusername, realpassword);
        IRepositoryExporter exporter = repository.getExporter();
        if (export_type.equals(Export_All)) {
            if (log.isDetailed()) {
                logDetailed(BaseMessages.getString(PKG, "JobExportRepository.Log.StartingExportAllRep", realoutfilename));
            }
            exporter.exportAllObjects(null, realoutfilename, null, "all");
            if (log.isDetailed()) {
                logDetailed(BaseMessages.getString(PKG, "JobExportRepository.Log.EndExportAllRep", realoutfilename));
            }
            if (add_result_filesname) {
                addFileToResultFilenames(realoutfilename, log, result, parentJob);
            }
        } else if (export_type.equals(Export_Jobs)) {
            if (log.isDetailed()) {
                logDetailed(BaseMessages.getString(PKG, "JobExportRepository.Log.StartingExportJobsRep", realoutfilename));
            }
            exporter.exportAllObjects(null, realoutfilename, null, "jobs");
            if (log.isDetailed()) {
                logDetailed(BaseMessages.getString(PKG, "JobExportRepository.Log.EndExportJobsRep", realoutfilename));
            }
            if (add_result_filesname) {
                addFileToResultFilenames(realoutfilename, log, result, parentJob);
            }
        } else if (export_type.equals(Export_Trans)) {
            if (log.isDetailed()) {
                logDetailed(BaseMessages.getString(PKG, "JobExportRepository.Log.StartingExportTransRep", realoutfilename));
            }
            exporter.exportAllObjects(null, realoutfilename, null, "trans");
            if (log.isDetailed()) {
                logDetailed(BaseMessages.getString(PKG, "JobExportRepository.Log.EndExportTransRep", realoutfilename));
            }
            if (add_result_filesname) {
                addFileToResultFilenames(realoutfilename, log, result, parentJob);
            }
        } else if (export_type.equals(Export_One_Folder)) {
            RepositoryDirectoryInterface directory = new RepositoryDirectory();
            directory = repository.findDirectory(realfoldername);
            if (directory != null) {
                if (log.isDetailed()) {
                    logDetailed(BaseMessages.getString(PKG, "JobExportRepository.Log.ExpAllFolderRep", directoryPath, realoutfilename));
                }
                exporter.exportAllObjects(null, realoutfilename, directory, "all");
                if (log.isDetailed()) {
                    logDetailed(BaseMessages.getString(PKG, "JobExportRepository.Log.EndExpAllFolderRep", directoryPath, realoutfilename));
                }
                if (add_result_filesname) {
                    addFileToResultFilenames(realoutfilename, log, result, parentJob);
                }
            } else {
                logError(BaseMessages.getString(PKG, "JobExportRepository.Error.CanNotFindFolderInRep", realfoldername, realrepName));
                return result;
            }
        } else if (export_type.equals(Export_By_Folder)) {
            // User must give a destination folder..
            RepositoryDirectoryInterface directory = new RepositoryDirectory();
            directory = this.repository.loadRepositoryDirectoryTree().findRoot();
            // Loop over all the directory id's
            ObjectId[] dirids = directory.getDirectoryIDs();
            if (log.isDetailed()) {
                logDetailed(BaseMessages.getString(PKG, "JobExportRepository.Log.TotalFolders", "" + dirids.length));
            }
            for (int d = 0; d < dirids.length && !parentJob.isStopped(); d++) {
                // Success condition broken?
                if (successConditionBroken) {
                    logError(BaseMessages.getString(PKG, "JobExportRepository.Error.SuccessConditionbroken", "" + NrErrors));
                    throw new Exception(BaseMessages.getString(PKG, "JobExportRepository.Error.SuccessConditionbroken", "" + NrErrors));
                }
                RepositoryDirectoryInterface repdir = directory.findDirectory(dirids[d]);
                if (!processOneFolder(parentJob, result, log, repdir, realoutfilename, d, dirids.length)) {
                    // updateErrors
                    updateErrors();
                }
            }
        // end for
        }
    } catch (Exception e) {
        updateErrors();
        logError(BaseMessages.getString(PKG, "JobExportRepository.UnExpectedError", e.toString()));
        logError("Stack trace: " + Const.CR + Const.getStackTracker(e));
    } finally {
        if (this.repository != null) {
            this.repository.disconnect();
            this.repository = null;
        }
        if (this.repositoryMeta != null) {
            this.repositoryMeta = null;
        }
        if (this.repsinfo != null) {
            this.repsinfo.clear();
            this.repsinfo = null;
        }
        if (this.file != null) {
            try {
                this.file.close();
                this.file = null;
            } catch (Exception e) {
            // Ignore close errors
            }
        }
    }
    // Success Condition
    result.setNrErrors(NrErrors);
    if (getSuccessStatus()) {
        result.setResult(true);
    }
    return result;
}
Also used : RepositoryDirectoryInterface(org.pentaho.di.repository.RepositoryDirectoryInterface) RepositoryDirectory(org.pentaho.di.repository.RepositoryDirectory) ObjectId(org.pentaho.di.repository.ObjectId) IRepositoryExporter(org.pentaho.di.repository.IRepositoryExporter) KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) KettleXMLException(org.pentaho.di.core.exception.KettleXMLException) Result(org.pentaho.di.core.Result)

Example 12 with RepositoryDirectoryInterface

use of org.pentaho.di.repository.RepositoryDirectoryInterface in project pentaho-kettle by pentaho.

the class JobEntryJob method lookupRepositoryReferences.

/**
 * Look up the references after import
 *
 * @param repository
 *          the repository to reference.
 */
@Override
public void lookupRepositoryReferences(Repository repository) throws KettleException {
    // The correct reference is stored in the job name and directory attributes...
    // 
    RepositoryDirectoryInterface repositoryDirectoryInterface = RepositoryImportLocation.getRepositoryImportLocation().findDirectory(directory);
    jobObjectId = repository.getJobId(jobname, repositoryDirectoryInterface);
}
Also used : RepositoryDirectoryInterface(org.pentaho.di.repository.RepositoryDirectoryInterface)

Example 13 with RepositoryDirectoryInterface

use of org.pentaho.di.repository.RepositoryDirectoryInterface in project pentaho-kettle by pentaho.

the class KettleDatabaseRepositoryTransDelegate method getTransformationsWithIDList.

public String[] getTransformationsWithIDList(ObjectId[] ids) throws KettleException {
    String[] transList = new String[ids.length];
    for (int i = 0; i < ids.length; i++) {
        ObjectId id_transformation = ids[i];
        if (id_transformation != null) {
            RowMetaAndData transRow = getTransformation(id_transformation);
            if (transRow != null) {
                String transName = transRow.getString(KettleDatabaseRepository.FIELD_TRANSFORMATION_NAME, "<name not found>");
                long id_directory = transRow.getInteger(KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DIRECTORY, -1L);
                RepositoryDirectoryInterface dir = repository.loadRepositoryDirectoryTree().findDirectory(new LongObjectId(id_directory));
                transList[i] = dir.getPathObjectCombination(transName);
            }
        }
    }
    return transList;
}
Also used : RepositoryDirectoryInterface(org.pentaho.di.repository.RepositoryDirectoryInterface) RowMetaAndData(org.pentaho.di.core.RowMetaAndData) LongObjectId(org.pentaho.di.repository.LongObjectId) ObjectId(org.pentaho.di.repository.ObjectId) ValueMetaString(org.pentaho.di.core.row.value.ValueMetaString) LongObjectId(org.pentaho.di.repository.LongObjectId)

Example 14 with RepositoryDirectoryInterface

use of org.pentaho.di.repository.RepositoryDirectoryInterface in project pentaho-kettle by pentaho.

the class StepWithMappingMeta method loadMappingMeta.

public static synchronized TransMeta loadMappingMeta(StepWithMappingMeta executorMeta, Repository rep, IMetaStore metaStore, VariableSpace space, boolean share) throws KettleException {
    TransMeta mappingTransMeta = null;
    CurrentDirectoryResolver r = new CurrentDirectoryResolver();
    // send parentVariables = null we don't need it here for resolving resolveCurrentDirectory.
    // Otherwise we destroy child variables and the option "Inherit all variables from the transformation" is enabled always.
    VariableSpace tmpSpace = r.resolveCurrentDirectory(executorMeta.getSpecificationMethod(), null, rep, executorMeta.getParentStepMeta(), executorMeta.getFileName());
    switch(executorMeta.getSpecificationMethod()) {
        case FILENAME:
            String realFilename = tmpSpace.environmentSubstitute(executorMeta.getFileName());
            try {
                // Don't set internal variables: they belong to the parent thread!
                if (rep != null) {
                    // need to try to load from the repository
                    realFilename = r.normalizeSlashes(realFilename);
                    try {
                        String dirStr = realFilename.substring(0, realFilename.lastIndexOf("/"));
                        String tmpFilename = realFilename.substring(realFilename.lastIndexOf("/") + 1);
                        RepositoryDirectoryInterface dir = rep.findDirectory(dirStr);
                        mappingTransMeta = rep.loadTransformation(tmpFilename, dir, null, true, null);
                    } catch (KettleException ke) {
                        // try without extension
                        if (realFilename.endsWith(Const.STRING_TRANS_DEFAULT_EXT)) {
                            try {
                                String tmpFilename = realFilename.substring(realFilename.lastIndexOf("/") + 1, realFilename.indexOf("." + Const.STRING_TRANS_DEFAULT_EXT));
                                String dirStr = realFilename.substring(0, realFilename.lastIndexOf("/"));
                                RepositoryDirectoryInterface dir = rep.findDirectory(dirStr);
                                mappingTransMeta = rep.loadTransformation(tmpFilename, dir, null, true, null);
                            } catch (KettleException ke2) {
                            // fall back to try loading from file system (transMeta is going to be null)
                            }
                        }
                    }
                }
                if (mappingTransMeta == null) {
                    mappingTransMeta = new TransMeta(realFilename, metaStore, rep, true, tmpSpace, null);
                    LogChannel.GENERAL.logDetailed("Loading transformation from repository", "Transformation was loaded from XML file [" + realFilename + "]");
                }
            } catch (Exception e) {
                throw new KettleException(BaseMessages.getString(PKG, "StepWithMappingMeta.Exception.UnableToLoadTrans"), e);
            }
            break;
        case REPOSITORY_BY_NAME:
            String realTransname = tmpSpace.environmentSubstitute(executorMeta.getTransName());
            String realDirectory = tmpSpace.environmentSubstitute(executorMeta.getDirectoryPath());
            if (rep != null) {
                if (!Utils.isEmpty(realTransname) && !Utils.isEmpty(realDirectory)) {
                    realDirectory = r.normalizeSlashes(realDirectory);
                    RepositoryDirectoryInterface repdir = rep.findDirectory(realDirectory);
                    if (repdir != null) {
                        try {
                            // reads the last revision in the repository...
                            mappingTransMeta = rep.loadTransformation(realTransname, repdir, null, true, null);
                            // TODO: FIXME: pass in metaStore to repository?
                            LogChannel.GENERAL.logDetailed("Loading transformation from repository", "Executor transformation [" + realTransname + "] was loaded from the repository");
                        } catch (Exception e) {
                            throw new KettleException("Unable to load transformation [" + realTransname + "]", e);
                        }
                    }
                }
            } else {
                // rep is null, let's try loading by filename
                try {
                    mappingTransMeta = new TransMeta(realDirectory + "/" + realTransname, metaStore, null, true, tmpSpace, null);
                } catch (KettleException ke) {
                    try {
                        // add .ktr extension and try again
                        mappingTransMeta = new TransMeta(realDirectory + "/" + realTransname + "." + Const.STRING_TRANS_DEFAULT_EXT, metaStore, null, true, tmpSpace, null);
                    } catch (KettleException ke2) {
                        throw new KettleException(BaseMessages.getString(PKG, "StepWithMappingMeta.Exception.UnableToLoadTrans", realTransname) + realDirectory);
                    }
                }
            }
            break;
        case REPOSITORY_BY_REFERENCE:
            // Read the last revision by reference...
            mappingTransMeta = rep.loadTransformation(executorMeta.getTransObjectId(), null);
            break;
        default:
            break;
    }
    if (mappingTransMeta == null) {
        // skip warning
        return null;
    }
    // When the child parameter does exist in the parent parameters, overwrite the child parameter by the
    // parent parameter.
    replaceVariableValues(mappingTransMeta, space);
    if (share) {
        // All other parent parameters need to get copied into the child parameters  (when the 'Inherit all
        // variables from the transformation?' option is checked)
        addMissingVariables(mappingTransMeta, space);
    }
    mappingTransMeta.setRepository(rep);
    mappingTransMeta.setMetaStore(metaStore);
    mappingTransMeta.setFilename(mappingTransMeta.getFilename());
    return mappingTransMeta;
}
Also used : RepositoryDirectoryInterface(org.pentaho.di.repository.RepositoryDirectoryInterface) KettleException(org.pentaho.di.core.exception.KettleException) VariableSpace(org.pentaho.di.core.variables.VariableSpace) CurrentDirectoryResolver(org.pentaho.di.core.util.CurrentDirectoryResolver) KettleException(org.pentaho.di.core.exception.KettleException) UnknownParamException(org.pentaho.di.core.parameters.UnknownParamException)

Example 15 with RepositoryDirectoryInterface

use of org.pentaho.di.repository.RepositoryDirectoryInterface in project pentaho-kettle by pentaho.

the class KettleDatabaseRepositoryDirectoryDelegate method createRepositoryDirectory.

/**
 * Create a new directory, possibly by creating several sub-directies of / at the same time.
 *
 * @param parentDirectory
 *          the parent directory
 * @param directoryPath
 *          The path to the new Repository Directory, to be created.
 * @return The created sub-directory
 * @throws KettleException
 *           In case something goes wrong
 */
public RepositoryDirectoryInterface createRepositoryDirectory(RepositoryDirectoryInterface parentDirectory, String directoryPath) throws KettleException {
    // RepositoryDirectoryInterface refreshedParentDir =
    // repository.loadRepositoryDirectoryTree().findDirectory(parentDirectory.getPath());
    RepositoryDirectoryInterface refreshedParentDir = parentDirectory;
    String[] path = Const.splitPath(directoryPath, RepositoryDirectory.DIRECTORY_SEPARATOR);
    RepositoryDirectoryInterface parent = refreshedParentDir;
    for (int level = 0; level < path.length; level++) {
        RepositoryDirectoryInterface rd = parent.findChild(path[level]);
        if (rd == null) {
            // This child directory doesn't exists, let's add it!
            // 
            rd = new RepositoryDirectory(parent, path[level]);
            saveRepositoryDirectory(rd);
            // Don't forget to add this directory to the tree!
            // 
            parent.addSubdirectory(rd);
            parent = rd;
        } else {
            parent = rd;
        }
    }
    return parent;
}
Also used : RepositoryDirectoryInterface(org.pentaho.di.repository.RepositoryDirectoryInterface) RepositoryDirectory(org.pentaho.di.repository.RepositoryDirectory) ValueMetaString(org.pentaho.di.core.row.value.ValueMetaString)

Aggregations

RepositoryDirectoryInterface (org.pentaho.di.repository.RepositoryDirectoryInterface)163 KettleException (org.pentaho.di.core.exception.KettleException)68 Test (org.junit.Test)32 TransMeta (org.pentaho.di.trans.TransMeta)30 ObjectId (org.pentaho.di.repository.ObjectId)27 JobMeta (org.pentaho.di.job.JobMeta)23 Repository (org.pentaho.di.repository.Repository)22 ArrayList (java.util.ArrayList)17 ErrorDialog (org.pentaho.di.ui.core.dialog.ErrorDialog)17 RepositoryDirectory (org.pentaho.di.repository.RepositoryDirectory)15 IOException (java.io.IOException)14 RepositoryElementMetaInterface (org.pentaho.di.repository.RepositoryElementMetaInterface)13 ValueMetaString (org.pentaho.di.core.row.value.ValueMetaString)11 RepositoryObject (org.pentaho.di.repository.RepositoryObject)11 FileObject (org.apache.commons.vfs2.FileObject)10 List (java.util.List)8 TreeItem (org.eclipse.swt.widgets.TreeItem)8 KettleFileException (org.pentaho.di.core.exception.KettleFileException)8 Date (java.util.Date)7 FileSystemException (org.apache.commons.vfs2.FileSystemException)7