use of org.pentaho.platform.api.repository2.unified.VersionSummary in project pentaho-kettle by pentaho.
the class UnifiedRepositoryRevisionService method getRevisions.
@Override
public List<ObjectRevision> getRevisions(ObjectId fileId) throws KettleException {
String absPath = null;
try {
List<ObjectRevision> versions = new ArrayList<ObjectRevision>();
List<VersionSummary> versionSummaries = unifiedRepository.getVersionSummaries(fileId.getId());
for (VersionSummary versionSummary : versionSummaries) {
versions.add(new PurObjectRevision(versionSummary.getId(), versionSummary.getAuthor(), versionSummary.getDate(), versionSummary.getMessage()));
}
return versions;
} catch (Exception e) {
throw new KettleException("Could not retrieve version history of object with path [" + absPath + "]", e);
}
}
use of org.pentaho.platform.api.repository2.unified.VersionSummary in project pentaho-kettle by pentaho.
the class PurRepository method readSharedObjects.
/**
* Read shared objects of the types provided from the repository. Every {@link SharedObjectInterface} that is read
* will be fully loaded as if it has been loaded through {@link #loadDatabaseMeta(ObjectId, String)},
* {@link #loadClusterSchema(ObjectId, List, String)}, etc.
* <p>
* This method was introduced to reduce the number of server calls for loading shared objects to a constant number:
* {@code 2 + n, where n is the number of types requested}.
* </p>
*
* @param sharedObjectsByType
* Map of type to shared objects. Each map entry will contain a non-null {@link List} of
* {@link RepositoryObjectType}s for every type provided. Only entries for types provided will be altered.
* @param types
* Types of repository objects to read from the repository
* @throws KettleException
*/
protected void readSharedObjects(Map<RepositoryObjectType, List<? extends SharedObjectInterface>> sharedObjectsByType, RepositoryObjectType... types) throws KettleException {
// Overview:
// 1) We will fetch RepositoryFile, NodeRepositoryFileData, and VersionSummary for all types provided.
// 2) We assume that unless an exception is thrown every RepositoryFile returned by getFilesByType(..) have a
// matching NodeRepositoryFileData and VersionSummary.
// 3) With all files, node data, and versions in hand we will iterate over them, merging them back into usable
// shared objects
List<RepositoryFile> allFiles = new ArrayList<RepositoryFile>();
// Since type is not preserved in the RepositoryFile we fetch files by type so we don't rely on parsing the name to
// determine type afterward
// Map must be ordered or we can't match up files with data and version summary
LinkedHashMap<RepositoryObjectType, List<RepositoryFile>> filesByType = getFilesByType(allFiles, types);
try {
List<NodeRepositoryFileData> data = pur.getDataForReadInBatch(allFiles, NodeRepositoryFileData.class);
List<VersionSummary> versions = pur.getVersionSummaryInBatch(allFiles);
// Only need one iterator for all data and versions. We will work through them as we process the files by type, in
// order.
Iterator<NodeRepositoryFileData> dataIter = data.iterator();
Iterator<VersionSummary> versionsIter = versions.iterator();
// Assemble into completely loaded SharedObjectInterfaces by type
for (Entry<RepositoryObjectType, List<RepositoryFile>> entry : filesByType.entrySet()) {
SharedObjectAssembler<?> assembler = sharedObjectAssemblerMap.get(entry.getKey());
if (assembler == null) {
throw new UnsupportedOperationException(// $NON-NLS-1$
String.format("Cannot assemble shared object of type [%s]", entry.getKey()));
}
// For all files of this type, assemble them from the pieces of data pulled from the repository
Iterator<RepositoryFile> filesIter = entry.getValue().iterator();
List<SharedObjectInterface> sharedObjects = new ArrayList<SharedObjectInterface>(entry.getValue().size());
// (no need to check for next on all iterators)
while (filesIter.hasNext()) {
RepositoryFile file = filesIter.next();
NodeRepositoryFileData repoData = dataIter.next();
VersionSummary version = versionsIter.next();
// TODO: inexistent db types can cause exceptions assembling; prevent total failure
try {
sharedObjects.add(assembler.assemble(file, repoData, version));
} catch (Exception ex) {
// TODO i18n
getLog().logError("Unable to load shared objects", ex);
}
}
sharedObjectsByType.put(entry.getKey(), sharedObjects);
}
} catch (Exception ex) {
// $NON-NLS-1$
throw new KettleException("Unable to load shared objects", ex);
}
}
use of org.pentaho.platform.api.repository2.unified.VersionSummary in project pentaho-kettle by pentaho.
the class PurRepository method loadJobs.
/**
* Load all jobs referenced by {@code files}.
*
* @param monitor
* @param log
* @param files
* Job files to load.
* @param setInternalVariables
* Should internal variables be set when loading? (Note: THIS IS IGNORED, they are always set)
* @return Loaded jobs
* @throws KettleException
* Error loading data for jobs from repository
*/
protected List<JobMeta> loadJobs(final ProgressMonitorListener monitor, final LogChannelInterface log, final List<RepositoryFile> files, final boolean setInternalVariables) throws KettleException {
List<JobMeta> jobs = new ArrayList<JobMeta>(files.size());
List<NodeRepositoryFileData> filesData = pur.getDataForReadInBatch(files, NodeRepositoryFileData.class);
List<VersionSummary> versions = pur.getVersionSummaryInBatch(files);
Iterator<RepositoryFile> filesIter = files.iterator();
Iterator<NodeRepositoryFileData> filesDataIter = filesData.iterator();
Iterator<VersionSummary> versionsIter = versions.iterator();
while ((monitor == null || !monitor.isCanceled()) && filesIter.hasNext()) {
RepositoryFile file = filesIter.next();
NodeRepositoryFileData fileData = filesDataIter.next();
VersionSummary version = versionsIter.next();
try {
String dirPath = file.getPath().substring(0, file.getPath().lastIndexOf(RepositoryDirectory.DIRECTORY_SEPARATOR));
log.logDetailed("Loading/Exporting job [{0} : {1}] ({2})", dirPath, file.getTitle(), // $NON-NLS-1$
file.getPath());
if (monitor != null) {
// $NON-NLS-1$ //$NON-NLS-2$
monitor.subTask("Exporting job [" + file.getPath() + "]");
}
JobMeta jobMeta = buildJobMeta(file, findDirectory(dirPath), fileData, createObjectRevision(version));
ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.JobMetaLoaded.id, jobMeta);
jobs.add(jobMeta);
} catch (Exception ex) {
// $NON-NLS-1$ //$NON-NLS-2$
log.logError("Unable to load job [" + file.getPath() + "]", ex);
}
}
return jobs;
}
use of org.pentaho.platform.api.repository2.unified.VersionSummary in project pentaho-kettle by pentaho.
the class PurRepository method loadTransformations.
/**
* Load all transformations referenced by {@code files}.
*
* @param monitor
* @param log
* @param files
* Transformation files to load.
* @param setInternalVariables
* Should internal variables be set when loading? (Note: THIS IS IGNORED, they are always set)
* @return Loaded transformations
* @throws KettleException
* Error loading data for transformations from repository
*/
protected List<TransMeta> loadTransformations(final ProgressMonitorListener monitor, final LogChannelInterface log, final List<RepositoryFile> files, final boolean setInternalVariables) throws KettleException {
List<TransMeta> transformations = new ArrayList<TransMeta>(files.size());
List<NodeRepositoryFileData> filesData = pur.getDataForReadInBatch(files, NodeRepositoryFileData.class);
List<VersionSummary> versions = pur.getVersionSummaryInBatch(files);
Iterator<RepositoryFile> filesIter = files.iterator();
Iterator<NodeRepositoryFileData> filesDataIter = filesData.iterator();
Iterator<VersionSummary> versionsIter = versions.iterator();
while ((monitor == null || !monitor.isCanceled()) && filesIter.hasNext()) {
RepositoryFile file = filesIter.next();
NodeRepositoryFileData fileData = filesDataIter.next();
VersionSummary version = versionsIter.next();
String dirPath = file.getPath().substring(0, file.getPath().lastIndexOf(RepositoryDirectory.DIRECTORY_SEPARATOR));
try {
log.logDetailed("Loading/Exporting transformation [{0} : {1}] ({2})", dirPath, file.getTitle(), file.getPath());
if (monitor != null) {
// $NON-NLS-1$ //$NON-NLS-2$
monitor.subTask("Exporting transformation [" + file.getPath() + "]");
}
TransMeta transMeta = buildTransMeta(file, findDirectory(dirPath), fileData, createObjectRevision(version));
ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.TransformationMetaLoaded.id, transMeta);
transformations.add(transMeta);
} catch (Exception ex) {
// $NON-NLS-1$ //$NON-NLS-2$
log.logDetailed("Unable to load transformation [" + file.getPath() + "]", ex);
log.logError("An error occurred reading transformation [" + file.getTitle() + "] from directory [" + dirPath + "] : " + // $NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
ex.getMessage());
log.logError("Transformation [" + file.getTitle() + "] from directory [" + dirPath + // $NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
"] was not exported because of a loading error!");
}
}
return transformations;
}
use of org.pentaho.platform.api.repository2.unified.VersionSummary in project pentaho-platform by pentaho.
the class MockUnifiedRepository method getVersionSummaries.
@Override
public List<VersionSummary> getVersionSummaries(final Serializable fileId) {
List<VersionSummary> sums = new ArrayList<VersionSummary>();
List<FrozenFileRecord> records = versionManager.getVersions(fileId);
for (FrozenFileRecord record : records) {
sums.add(new VersionSummary(record.getVersionId(), record.getFile().getId(), false, record.getDate(), record.getAuthor(), record.getVersionMessage(), new ArrayList<String>(0)));
}
return sums;
}
Aggregations