Search in sources :

Example 31 with StorageAsset

use of org.apache.archiva.repository.storage.StorageAsset in project archiva by apache.

the class AbstractStorageUtilTest method testDelete.

@Test
void testDelete() {
    StorageAsset root = createTree();
    RepositoryStorage storage = createStorage(root);
    StorageUtil.deleteRecursively(root);
    int expected = LEVEL1 * LEVEL2 * LEVEL3 + LEVEL1 * LEVEL2 + LEVEL1 + 1;
    testDeletionStatus(expected, storage);
}
Also used : RepositoryStorage(org.apache.archiva.repository.storage.RepositoryStorage) StorageAsset(org.apache.archiva.repository.storage.StorageAsset) Test(org.junit.jupiter.api.Test)

Example 32 with StorageAsset

use of org.apache.archiva.repository.storage.StorageAsset in project archiva by apache.

the class AbstractStorageUtilTest method testWalkFromRoot.

@Test
void testWalkFromRoot() {
    StorageAsset root = createTree();
    ConsumeVisitStatus status = new ConsumeVisitStatus();
    StorageUtil.walk(root, status);
    int expected = LEVEL1 * LEVEL2 * LEVEL3 + LEVEL1 * LEVEL2 + LEVEL1 + 1;
    Assertions.assertEquals(expected, status.size());
    StorageAsset first = root.list().get(0).list().get(0).list().get(0);
    Assertions.assertEquals(first, status.getFirst());
    Assertions.assertEquals(root, status.getLast());
}
Also used : StorageAsset(org.apache.archiva.repository.storage.StorageAsset) Test(org.junit.jupiter.api.Test)

Example 33 with StorageAsset

use of org.apache.archiva.repository.storage.StorageAsset in project archiva by apache.

the class ArchivaDavResourceFactory method buildMergedIndexDirectory.

protected StorageAsset buildMergedIndexDirectory(String activePrincipal, DavServletRequest request, RepositoryGroup repositoryGroup) throws DavException {
    try {
        final List<ManagedRepository> repositories = repositoryGroup.getRepositories();
        HttpSession session = request.getSession();
        @SuppressWarnings("unchecked") Map<String, TemporaryGroupIndex> temporaryGroupIndexMap = (Map<String, TemporaryGroupIndex>) session.getAttribute(TemporaryGroupIndexSessionCleaner.TEMPORARY_INDEX_SESSION_KEY);
        if (temporaryGroupIndexMap == null) {
            temporaryGroupIndexMap = new HashMap<>();
        }
        final String id = repositoryGroup.getId();
        TemporaryGroupIndex tmp = temporaryGroupIndexMap.get(id);
        if (tmp != null && tmp.getDirectory() != null && tmp.getDirectory().exists()) {
            if (System.currentTimeMillis() - tmp.getCreationTime() > (repositoryGroup.getMergedIndexTTL() * 60 * 1000)) {
                log.debug(MarkerFactory.getMarker("group.merged.index"), "tmp group index '{}' is too old so delete it", id);
                indexMerger.cleanTemporaryGroupIndex(tmp);
            } else {
                log.debug(MarkerFactory.getMarker("group.merged.index"), "merged index for group '{}' found in cache", id);
                return tmp.getDirectory();
            }
        }
        Set<String> authzRepos = new HashSet<String>();
        String permission = WebdavMethodUtil.getMethodPermission(request.getMethod());
        for (ManagedRepository repository : repositories) {
            try {
                if (servletAuth.isAuthorized(activePrincipal, repository.getId(), permission)) {
                    authzRepos.add(repository.getId());
                    authzRepos.addAll(this.repositorySearch.getRemoteIndexingContextIds(repository.getId()));
                }
            } catch (UnauthorizedException e) {
                // TODO: review exception handling
                log.debug("Skipping repository '{}' for user '{}': {}", repository, activePrincipal, e.getMessage());
            }
        }
        log.info("generate temporary merged index for repository group '{}' for repositories '{}'", id, authzRepos);
        IndexCreationFeature indexCreationFeature = repositoryGroup.getFeature(IndexCreationFeature.class);
        Path indexPath = indexCreationFeature.getLocalIndexPath().getFilePath();
        if (indexPath != null) {
            Path tempRepoFile = Files.createTempDirectory("temp");
            tempRepoFile.toFile().deleteOnExit();
            FilesystemStorage storage = new FilesystemStorage(tempRepoFile, new DefaultFileLockManager());
            StorageAsset tmpAsset = storage.getRoot();
            IndexMergerRequest indexMergerRequest = new IndexMergerRequest(authzRepos, true, id, indexPath.toString(), repositoryGroup.getMergedIndexTTL()).mergedIndexDirectory(tmpAsset).temporary(true);
            MergedRemoteIndexesTaskRequest taskRequest = new MergedRemoteIndexesTaskRequest(indexMergerRequest, indexMerger);
            MergedRemoteIndexesTask job = new MergedRemoteIndexesTask(taskRequest);
            ArchivaIndexingContext indexingContext = job.execute().getIndexingContext();
            StorageAsset mergedRepoDir = indexingContext.getPath();
            TemporaryGroupIndex temporaryGroupIndex = new TemporaryGroupIndex(mergedRepoDir, indexingContext.getId(), id, // 
            repositoryGroup.getMergedIndexTTL()).setCreationTime(new Date().getTime());
            temporaryGroupIndexMap.put(id, temporaryGroupIndex);
            session.setAttribute(TemporaryGroupIndexSessionCleaner.TEMPORARY_INDEX_SESSION_KEY, temporaryGroupIndexMap);
            return mergedRepoDir;
        } else {
            log.error("Local index path for repository group {} does not exist.", repositoryGroup.getId());
            throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
        }
    } catch (RepositorySearchException e) {
        throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e);
    } catch (IndexMergerException e) {
        throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e);
    } catch (IOException e) {
        throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e);
    }
}
Also used : TemporaryGroupIndex(org.apache.archiva.indexer.merger.TemporaryGroupIndex) ManagedRepository(org.apache.archiva.repository.ManagedRepository) MergedRemoteIndexesTaskRequest(org.apache.archiva.indexer.merger.base.MergedRemoteIndexesTaskRequest) FilesystemStorage(org.apache.archiva.repository.storage.fs.FilesystemStorage) UnauthorizedException(org.apache.archiva.redback.authorization.UnauthorizedException) IndexMergerException(org.apache.archiva.indexer.merger.IndexMergerException) HashSet(java.util.HashSet) Path(java.nio.file.Path) MergedRemoteIndexesTask(org.apache.archiva.indexer.merger.base.MergedRemoteIndexesTask) DavException(org.apache.jackrabbit.webdav.DavException) HttpSession(javax.servlet.http.HttpSession) IndexMergerRequest(org.apache.archiva.indexer.merger.IndexMergerRequest) RepositorySearchException(org.apache.archiva.indexer.search.RepositorySearchException) IOException(java.io.IOException) ArchivaIndexingContext(org.apache.archiva.indexer.ArchivaIndexingContext) Date(java.util.Date) IndexCreationFeature(org.apache.archiva.repository.features.IndexCreationFeature) StorageAsset(org.apache.archiva.repository.storage.StorageAsset) DefaultFileLockManager(org.apache.archiva.common.filelock.DefaultFileLockManager) Map(java.util.Map) HashMap(java.util.HashMap)

Example 34 with StorageAsset

use of org.apache.archiva.repository.storage.StorageAsset in project archiva by apache.

the class ArchivaDavResourceFactory method writeMergedMetadataToFile.

private StorageAsset writeMergedMetadataToFile(RepositoryGroup repoGroup, ArchivaRepositoryMetadata mergedMetadata, String outputFilename) throws RepositoryMetadataException, IOException {
    StorageAsset asset = repoGroup.addAsset(outputFilename, false);
    OutputStream stream = asset.getWriteStream(true);
    OutputStreamWriter sw = new OutputStreamWriter(stream, "UTF-8");
    RepositoryMetadataWriter.write(mergedMetadata, sw);
    createChecksumFiles(repoGroup, outputFilename);
    return asset;
}
Also used : StorageAsset(org.apache.archiva.repository.storage.StorageAsset) OutputStream(java.io.OutputStream) OutputStreamWriter(java.io.OutputStreamWriter)

Example 35 with StorageAsset

use of org.apache.archiva.repository.storage.StorageAsset in project archiva by apache.

the class ArchivaDavResource method addMember.

@Override
public void addMember(DavResource resource, InputContext inputContext) throws DavException {
    // Path localFile = localResource.resolve( resource.getDisplayName() );
    boolean exists = asset.exists();
    final String newPath = asset.getPath() + "/" + resource.getDisplayName();
    if (// New File
    isCollection() && inputContext.hasStream()) {
        Path tempFile = null;
        try {
            tempFile = Files.createTempFile("archiva_upload", "dat");
            try (OutputStream os = Files.newOutputStream(tempFile, StandardOpenOption.CREATE)) {
                IOUtils.copy(inputContext.getInputStream(), os);
            }
            long expectedContentLength = inputContext.getContentLength();
            long actualContentLength = 0;
            try {
                actualContentLength = Files.size(tempFile);
            } catch (IOException e) {
                log.error("Could not get length of file {}: {}", tempFile, e.getMessage(), e);
            }
            // length of -1 is given for a chunked request or unknown length, in which case we accept what was uploaded
            if (expectedContentLength >= 0 && expectedContentLength != actualContentLength) {
                String msg = "Content Header length was " + expectedContentLength + " but was " + actualContentLength;
                log.debug("Upload failed: {}", msg);
                throw new DavException(HttpServletResponse.SC_BAD_REQUEST, msg);
            }
            StorageAsset member = repositoryStorage.addAsset(newPath, false);
            member.create();
            member.replaceDataFromFile(tempFile);
        } catch (IOException e) {
            throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e);
        } finally {
            if (tempFile != null) {
                try {
                    Files.deleteIfExists(tempFile);
                } catch (IOException e) {
                    log.error("Could not delete temporary file {}", tempFile);
                }
            }
        }
        // queueRepositoryTask( asset );
        log.debug("File '{}{}(current user '{}')", resource.getDisplayName(), (exists ? "' modified " : "' created "), this.principal);
    // triggerAuditEvent( resource, exists ? AuditEvent.MODIFY_FILE : AuditEvent.CREATE_FILE );
    } else if (// New directory
    !inputContext.hasStream() && isCollection()) {
        try {
            StorageAsset member = repositoryStorage.addAsset(newPath, true);
            member.create();
        } catch (IOException e) {
            log.error("Could not create directory {}: {}", newPath, e.getMessage(), e);
        }
        log.debug("Directory '{}' (current user '{}')", resource.getDisplayName(), this.principal);
        triggerAuditEvent(resource, AuditEvent.CREATE_DIR);
    } else {
        String msg = "Could not write member " + resource.getResourcePath() + " at " + getResourcePath() + " as this is not a DAV collection";
        log.debug(msg);
        throw new DavException(HttpServletResponse.SC_BAD_REQUEST, msg);
    }
}
Also used : Path(java.nio.file.Path) DavException(org.apache.jackrabbit.webdav.DavException) OutputStream(java.io.OutputStream) StorageAsset(org.apache.archiva.repository.storage.StorageAsset) IOException(java.io.IOException)

Aggregations

StorageAsset (org.apache.archiva.repository.storage.StorageAsset)191 Path (java.nio.file.Path)91 BaseRepositoryContentLayout (org.apache.archiva.repository.content.BaseRepositoryContentLayout)61 IOException (java.io.IOException)59 Test (org.junit.Test)59 Artifact (org.apache.archiva.repository.content.Artifact)54 ManagedRepository (org.apache.archiva.repository.ManagedRepository)27 ArchivaIndexingContext (org.apache.archiva.indexer.ArchivaIndexingContext)22 ArchivaRepositoryMetadata (org.apache.archiva.model.ArchivaRepositoryMetadata)22 List (java.util.List)20 Inject (javax.inject.Inject)20 RepositoryMetadataException (org.apache.archiva.repository.metadata.RepositoryMetadataException)20 Collectors (java.util.stream.Collectors)19 RemoteRepository (org.apache.archiva.repository.RemoteRepository)19 IndexingContext (org.apache.maven.index.context.IndexingContext)19 FilesystemStorage (org.apache.archiva.repository.storage.fs.FilesystemStorage)18 StringUtils (org.apache.commons.lang3.StringUtils)18 Logger (org.slf4j.Logger)18 LoggerFactory (org.slf4j.LoggerFactory)18 Map (java.util.Map)17