use of org.pentaho.platform.api.repository2.unified.RepositoryFile in project pentaho-kettle by pentaho.
the class PurRepository method loadRepositoryDirectoryTree.
@Deprecated
@Override
public RepositoryDirectoryInterface loadRepositoryDirectoryTree(boolean eager) throws KettleException {
// this method forces a reload of the repository directory tree structure
// a new rootRef will be obtained - this is a SoftReference which will be used
// by any calls to getRootDir()
RepositoryDirectoryInterface rootDir;
if (eager) {
RepositoryFileTree rootFileTree = loadRepositoryFileTree(ClientRepositoryPaths.getRootFolderPath());
rootDir = initRepositoryDirectoryTree(rootFileTree);
} else {
RepositoryFile root = pur.getFile("/");
rootDir = new LazyUnifiedRepositoryDirectory(root, null, pur, purRepositoryServiceRegistry);
}
rootRef.setRef(rootDir);
return rootDir;
}
use of org.pentaho.platform.api.repository2.unified.RepositoryFile in project pentaho-kettle by pentaho.
the class PurRepository method getClusterIDs.
@Override
public ObjectId[] getClusterIDs(boolean includeDeleted) throws KettleException {
try {
List<RepositoryFile> children = getAllFilesOfType(null, RepositoryObjectType.CLUSTER_SCHEMA, includeDeleted);
List<ObjectId> ids = new ArrayList<ObjectId>();
for (RepositoryFile file : children) {
ids.add(new StringObjectId(file.getId().toString()));
}
return ids.toArray(new ObjectId[0]);
} catch (Exception e) {
throw new KettleException("Unable to get all cluster schema IDs", e);
}
}
use of org.pentaho.platform.api.repository2.unified.RepositoryFile in project pentaho-kettle by pentaho.
the class PurRepository method loadJobs.
/**
* Load all jobs referenced by {@code files}.
*
* @param monitor
* @param log
* @param files
* Job files to load.
* @param setInternalVariables
* Should internal variables be set when loading? (Note: THIS IS IGNORED, they are always set)
* @return Loaded jobs
* @throws KettleException
* Error loading data for jobs from repository
*/
protected List<JobMeta> loadJobs(final ProgressMonitorListener monitor, final LogChannelInterface log, final List<RepositoryFile> files, final boolean setInternalVariables) throws KettleException {
List<JobMeta> jobs = new ArrayList<JobMeta>(files.size());
List<NodeRepositoryFileData> filesData = pur.getDataForReadInBatch(files, NodeRepositoryFileData.class);
List<VersionSummary> versions = pur.getVersionSummaryInBatch(files);
Iterator<RepositoryFile> filesIter = files.iterator();
Iterator<NodeRepositoryFileData> filesDataIter = filesData.iterator();
Iterator<VersionSummary> versionsIter = versions.iterator();
while ((monitor == null || !monitor.isCanceled()) && filesIter.hasNext()) {
RepositoryFile file = filesIter.next();
NodeRepositoryFileData fileData = filesDataIter.next();
VersionSummary version = versionsIter.next();
try {
String dirPath = file.getPath().substring(0, file.getPath().lastIndexOf(RepositoryDirectory.DIRECTORY_SEPARATOR));
log.logDetailed("Loading/Exporting job [{0} : {1}] ({2})", dirPath, file.getTitle(), // $NON-NLS-1$
file.getPath());
if (monitor != null) {
// $NON-NLS-1$ //$NON-NLS-2$
monitor.subTask("Exporting job [" + file.getPath() + "]");
}
JobMeta jobMeta = buildJobMeta(file, findDirectory(dirPath), fileData, createObjectRevision(version));
ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.JobMetaLoaded.id, jobMeta);
jobs.add(jobMeta);
} catch (Exception ex) {
// $NON-NLS-1$ //$NON-NLS-2$
log.logError("Unable to load job [" + file.getPath() + "]", ex);
}
}
return jobs;
}
use of org.pentaho.platform.api.repository2.unified.RepositoryFile in project pentaho-kettle by pentaho.
the class PurRepository method getFilesByType.
/**
* Fetch {@link RepositoryFile}s by {@code RepositoryObjectType}.
*
* @param allFiles
* List to add files into.
* @param types
* Types of files to fetch
* @return Ordered map of object types to list of files.
* @throws KettleException
*/
private LinkedHashMap<RepositoryObjectType, List<RepositoryFile>> getFilesByType(List<RepositoryFile> allFiles, RepositoryObjectType... types) throws KettleException {
// Must be ordered or we can't match up files with data and version summary
LinkedHashMap<RepositoryObjectType, List<RepositoryFile>> filesByType = new LinkedHashMap<RepositoryObjectType, List<RepositoryFile>>();
// Since type is not preserved in the RepositoryFile we must fetch files by type
for (RepositoryObjectType type : types) {
try {
List<RepositoryFile> files = getAllFilesOfType(null, type, false);
filesByType.put(type, files);
allFiles.addAll(files);
} catch (Exception ex) {
// $NON-NLS-1$
throw new KettleException(String.format("Unable to get all files of type [%s]", type), ex);
}
}
return filesByType;
}
use of org.pentaho.platform.api.repository2.unified.RepositoryFile in project pentaho-kettle by pentaho.
the class PurRepository method loadTransformations.
/**
* Load all transformations referenced by {@code files}.
*
* @param monitor
* @param log
* @param files
* Transformation files to load.
* @param setInternalVariables
* Should internal variables be set when loading? (Note: THIS IS IGNORED, they are always set)
* @return Loaded transformations
* @throws KettleException
* Error loading data for transformations from repository
*/
protected List<TransMeta> loadTransformations(final ProgressMonitorListener monitor, final LogChannelInterface log, final List<RepositoryFile> files, final boolean setInternalVariables) throws KettleException {
List<TransMeta> transformations = new ArrayList<TransMeta>(files.size());
List<NodeRepositoryFileData> filesData = pur.getDataForReadInBatch(files, NodeRepositoryFileData.class);
List<VersionSummary> versions = pur.getVersionSummaryInBatch(files);
Iterator<RepositoryFile> filesIter = files.iterator();
Iterator<NodeRepositoryFileData> filesDataIter = filesData.iterator();
Iterator<VersionSummary> versionsIter = versions.iterator();
while ((monitor == null || !monitor.isCanceled()) && filesIter.hasNext()) {
RepositoryFile file = filesIter.next();
NodeRepositoryFileData fileData = filesDataIter.next();
VersionSummary version = versionsIter.next();
String dirPath = file.getPath().substring(0, file.getPath().lastIndexOf(RepositoryDirectory.DIRECTORY_SEPARATOR));
try {
log.logDetailed("Loading/Exporting transformation [{0} : {1}] ({2})", dirPath, file.getTitle(), file.getPath());
if (monitor != null) {
// $NON-NLS-1$ //$NON-NLS-2$
monitor.subTask("Exporting transformation [" + file.getPath() + "]");
}
TransMeta transMeta = buildTransMeta(file, findDirectory(dirPath), fileData, createObjectRevision(version));
ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.TransformationMetaLoaded.id, transMeta);
transformations.add(transMeta);
} catch (Exception ex) {
// $NON-NLS-1$ //$NON-NLS-2$
log.logDetailed("Unable to load transformation [" + file.getPath() + "]", ex);
log.logError("An error occurred reading transformation [" + file.getTitle() + "] from directory [" + dirPath + "] : " + // $NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
ex.getMessage());
log.logError("Transformation [" + file.getTitle() + "] from directory [" + dirPath + // $NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
"] was not exported because of a loading error!");
}
}
return transformations;
}
Aggregations