use of org.pentaho.platform.api.repository2.unified.RepositoryFile in project pentaho-kettle by pentaho.
the class PurRepository method readSharedObjects.
/**
* Read shared objects of the types provided from the repository. Every {@link SharedObjectInterface} that is read
* will be fully loaded as if it has been loaded through {@link #loadDatabaseMeta(ObjectId, String)},
* {@link #loadClusterSchema(ObjectId, List, String)}, etc.
* <p>
* This method was introduced to reduce the number of server calls for loading shared objects to a constant number:
* {@code 2 + n, where n is the number of types requested}.
* </p>
*
* @param sharedObjectsByType
* Map of type to shared objects. Each map entry will contain a non-null {@link List} of
* {@link RepositoryObjectType}s for every type provided. Only entries for types provided will be altered.
* @param types
* Types of repository objects to read from the repository
* @throws KettleException
*/
protected void readSharedObjects(Map<RepositoryObjectType, List<? extends SharedObjectInterface>> sharedObjectsByType, RepositoryObjectType... types) throws KettleException {
// Overview:
// 1) We will fetch RepositoryFile, NodeRepositoryFileData, and VersionSummary for all types provided.
// 2) We assume that unless an exception is thrown every RepositoryFile returned by getFilesByType(..) have a
// matching NodeRepositoryFileData and VersionSummary.
// 3) With all files, node data, and versions in hand we will iterate over them, merging them back into usable
// shared objects
List<RepositoryFile> allFiles = new ArrayList<RepositoryFile>();
// Since type is not preserved in the RepositoryFile we fetch files by type so we don't rely on parsing the name to
// determine type afterward
// Map must be ordered or we can't match up files with data and version summary
LinkedHashMap<RepositoryObjectType, List<RepositoryFile>> filesByType = getFilesByType(allFiles, types);
try {
List<NodeRepositoryFileData> data = pur.getDataForReadInBatch(allFiles, NodeRepositoryFileData.class);
List<VersionSummary> versions = pur.getVersionSummaryInBatch(allFiles);
// Only need one iterator for all data and versions. We will work through them as we process the files by type, in
// order.
Iterator<NodeRepositoryFileData> dataIter = data.iterator();
Iterator<VersionSummary> versionsIter = versions.iterator();
// Assemble into completely loaded SharedObjectInterfaces by type
for (Entry<RepositoryObjectType, List<RepositoryFile>> entry : filesByType.entrySet()) {
SharedObjectAssembler<?> assembler = sharedObjectAssemblerMap.get(entry.getKey());
if (assembler == null) {
throw new UnsupportedOperationException(// $NON-NLS-1$
String.format("Cannot assemble shared object of type [%s]", entry.getKey()));
}
// For all files of this type, assemble them from the pieces of data pulled from the repository
Iterator<RepositoryFile> filesIter = entry.getValue().iterator();
List<SharedObjectInterface> sharedObjects = new ArrayList<SharedObjectInterface>(entry.getValue().size());
// (no need to check for next on all iterators)
while (filesIter.hasNext()) {
RepositoryFile file = filesIter.next();
NodeRepositoryFileData repoData = dataIter.next();
VersionSummary version = versionsIter.next();
// TODO: inexistent db types can cause exceptions assembling; prevent total failure
try {
sharedObjects.add(assembler.assemble(file, repoData, version));
} catch (Exception ex) {
// TODO i18n
getLog().logError("Unable to load shared objects", ex);
}
}
sharedObjectsByType.put(entry.getKey(), sharedObjects);
}
} catch (Exception ex) {
// $NON-NLS-1$
throw new KettleException("Unable to load shared objects", ex);
}
}
use of org.pentaho.platform.api.repository2.unified.RepositoryFile in project pentaho-kettle by pentaho.
the class PurRepository method getAllFilesOfType.
protected List<RepositoryFile> getAllFilesOfType(final ObjectId dirId, final List<RepositoryObjectType> objectTypes) throws KettleException {
Set<Serializable> parentFolderIds = new HashSet<>();
List<String> filters = new ArrayList<>();
for (RepositoryObjectType objectType : objectTypes) {
switch(objectType) {
case DATABASE:
{
parentFolderIds.add(getDatabaseMetaParentFolderId());
// $NON-NLS-1$
filters.add("*" + RepositoryObjectType.DATABASE.getExtension());
break;
}
case TRANSFORMATION:
{
parentFolderIds.add(dirId.getId());
// $NON-NLS-1$
filters.add("*" + RepositoryObjectType.TRANSFORMATION.getExtension());
break;
}
case PARTITION_SCHEMA:
{
parentFolderIds.add(getPartitionSchemaParentFolderId());
// $NON-NLS-1$
filters.add("*" + RepositoryObjectType.PARTITION_SCHEMA.getExtension());
break;
}
case SLAVE_SERVER:
{
parentFolderIds.add(getSlaveServerParentFolderId());
// $NON-NLS-1$
filters.add("*" + RepositoryObjectType.SLAVE_SERVER.getExtension());
break;
}
case CLUSTER_SCHEMA:
{
parentFolderIds.add(getClusterSchemaParentFolderId());
// $NON-NLS-1$
filters.add("*" + RepositoryObjectType.CLUSTER_SCHEMA.getExtension());
break;
}
case JOB:
{
parentFolderIds.add(dirId.getId());
// $NON-NLS-1$
filters.add("*" + RepositoryObjectType.JOB.getExtension());
break;
}
case TRANS_DATA_SERVICE:
{
parentFolderIds.add(dirId.getId());
// $NON-NLS-1$
filters.add("*" + RepositoryObjectType.TRANS_DATA_SERVICE.getExtension());
break;
}
default:
{
throw new UnsupportedOperationException("not implemented");
}
}
}
StringBuilder mergedFilterBuf = new StringBuilder();
// build filter
int i = 0;
for (String filter : filters) {
if (i++ > 0) {
// $NON-NLS-1$
mergedFilterBuf.append(" | ");
}
mergedFilterBuf.append(filter);
}
List<RepositoryFile> allFiles = new ArrayList<>();
for (Serializable parentFolderId : parentFolderIds) {
allFiles.addAll(pur.getChildren(parentFolderId, mergedFilterBuf.toString()));
}
Collections.sort(allFiles);
return allFiles;
}
use of org.pentaho.platform.api.repository2.unified.RepositoryFile in project pentaho-kettle by pentaho.
the class PurRepository method getObjectInformation.
@Override
public RepositoryObject getObjectInformation(ObjectId objectId, RepositoryObjectType objectType) throws KettleException {
try {
RepositoryFile repositoryFile;
try {
repositoryFile = pur.getFileById(objectId.getId());
} catch (Exception e) {
// javax.jcr.Session throws exception, if a node with specified ID does not exist
// see http://jira.pentaho.com/browse/BISERVER-12758
log.logError("Error when trying to obtain a file by id: " + objectId.getId(), e);
return null;
}
if (repositoryFile == null) {
return null;
}
RepositoryFileAcl repositoryFileAcl = pur.getAcl(repositoryFile.getId());
String parentPath = getParentPath(repositoryFile.getPath());
String name = repositoryFile.getTitle();
String description = repositoryFile.getDescription();
Date modifiedDate = repositoryFile.getLastModifiedDate();
// String creatorId = repositoryFile.getCreatorId();
String ownerName = repositoryFileAcl != null ? repositoryFileAcl.getOwner().getName() : "";
boolean deleted = isDeleted(repositoryFile);
RepositoryDirectoryInterface directory = findDirectory(parentPath);
return new RepositoryObject(objectId, name, directory, ownerName, modifiedDate, objectType, description, deleted);
} catch (Exception e) {
throw new KettleException("Unable to get object information for object with id=" + objectId, e);
}
}
use of org.pentaho.platform.api.repository2.unified.RepositoryFile in project pentaho-kettle by pentaho.
the class PurRepository method deleteDatabaseMeta.
@Override
public void deleteDatabaseMeta(final String databaseName) throws KettleException {
RepositoryFile fileToDelete = null;
try {
fileToDelete = pur.getFile(getPath(databaseName, null, RepositoryObjectType.DATABASE));
} catch (Exception e) {
throw new KettleException("Unable to delete database with name [" + databaseName + "]", e);
}
ObjectId idDatabase = new StringObjectId(fileToDelete.getId().toString());
permanentlyDeleteSharedObject(idDatabase);
removeFromSharedObjectCache(RepositoryObjectType.DATABASE, idDatabase);
}
use of org.pentaho.platform.api.repository2.unified.RepositoryFile in project pentaho-kettle by pentaho.
the class PurRepository method deleteRepositoryDirectory.
@Override
public void deleteRepositoryDirectory(final RepositoryDirectoryInterface dir, final boolean deleteHomeDirectories) throws KettleException {
try {
// Fetch the folder to be deleted
RepositoryFile folder = pur.getFileById(dir.getObjectId().getId());
// Fetch the user's home directory
RepositoryFile homeFolder = pur.getFile(ClientRepositoryPaths.getUserHomeFolderPath(user.getLogin()));
// Make sure the user is not trying to delete their own home directory
if (isSameOrAncestorFolder(folder, homeFolder)) {
// Then throw an exception that the user cannot delete their own home directory
throw new KettleException("You are not allowed to delete your home folder.");
}
if (!deleteHomeDirectories && isUserHomeDirectory(folder)) {
throw new RepositoryObjectAccessException("Cannot delete another users home directory", RepositoryObjectAccessException.AccessExceptionType.USER_HOME_DIR);
}
pur.deleteFile(dir.getObjectId().getId(), null);
rootRef.clearRef();
} catch (Exception e) {
throw new KettleException("Unable to delete directory with path [" + getPath(null, dir, null) + "]", e);
}
}
Aggregations