use of org.apache.archiva.repository.content.ContentAccessException in project archiva by apache.
the class ManagedDefaultRepositoryContent method addArtifact.
/**
* Moves the file to the artifact destination
*/
@Override
public void addArtifact(Path sourceFile, Artifact destination) throws IllegalArgumentException, ContentAccessException {
try {
StorageAsset asset = destination.getAsset();
if (!asset.exists()) {
asset.create();
}
asset.replaceDataFromFile(sourceFile);
} catch (IOException e) {
log.error("Could not push data to asset source={} destination={}. {}", sourceFile, destination.getAsset().getFilePath(), e.getMessage());
throw new ContentAccessException(e.getMessage(), e);
}
}
use of org.apache.archiva.repository.content.ContentAccessException in project archiva by apache.
the class ManagedDefaultRepositoryContent method newArtifactStream.
/**
* Returns all related artifacts that match the given artifact. That means all artifacts that have
* the same filename plus an additional extension, e.g. ${fileName}.sha2
*
* @param item the artifact
* @return the stream of artifacts
* @throws ContentAccessException if access to the underlying storage failed
*/
public Stream<? extends Artifact> newArtifactStream(Artifact item) throws ContentAccessException {
final Version v = item.getVersion();
final String fileName = item.getFileName();
final Predicate<StorageAsset> filter = (StorageAsset a) -> a.getName().startsWith(fileName + ".");
return v.getAsset().list().stream().filter(filter).map(a -> {
try {
return getArtifactFromPath(a);
} catch (LayoutException e) {
log.error("Not a valid artifact path " + a.getPath(), e);
return null;
}
}).filter(Objects::nonNull);
}
use of org.apache.archiva.repository.content.ContentAccessException in project archiva by apache.
the class ManagedDefaultRepositoryContent method newItemStream.
@Override
public Stream<? extends ContentItem> newItemStream(ItemSelector selector, boolean parallel) throws ContentAccessException, IllegalArgumentException {
final Predicate<StorageAsset> filter = getItemFileFilterFromSelector(selector);
StorageAsset startDir;
if (selector.getNamespace().contains("*")) {
startDir = getAsset("");
} else if (selector.hasProjectId() && selector.getProjectId().contains("*")) {
startDir = getAsset(selector.getNamespace());
} else if (selector.hasProjectId() && selector.hasVersion() && selector.getVersion().contains("*")) {
startDir = getAsset(selector.getNamespace(), selector.getProjectId());
} else if (selector.hasProjectId() && selector.hasVersion()) {
startDir = getAsset(selector.getNamespace(), selector.getProjectId(), selector.getVersion());
} else if (selector.hasProjectId()) {
startDir = getAsset(selector.getNamespace(), selector.getProjectId());
} else {
startDir = getAsset(selector.getNamespace());
if (!selector.recurse()) {
// We descend into 2 subdirectories (project and version)
return startDir.list().stream().flatMap(a -> getChildrenDF(a, 1)).map(this::getItemFromPath);
}
}
return StorageUtil.newAssetStream(startDir, parallel).filter(filter).map(this::getItemFromPath);
}
use of org.apache.archiva.repository.content.ContentAccessException in project archiva by apache.
the class ArchivaDavResourceFactory method processRepository.
private DavResource processRepository(final DavServletRequest request, ArchivaDavResourceLocator archivaLocator, String activePrincipal, ManagedRepositoryContent managedRepositoryContent, org.apache.archiva.repository.ManagedRepository managedRepository) throws DavException {
DavResource resource = null;
if (isAuthorized(request, managedRepositoryContent.getId())) {
boolean readMethod = WebdavMethodUtil.isReadMethod(request.getMethod());
// Maven Centric part ask evaluation if -SNAPSHOT
// MRM-1846 test if read method to prevent issue with maven 2.2.1 and uniqueVersion false
String path = readMethod ? evaluatePathWithVersion(archivaLocator, managedRepositoryContent, request.getContextPath()) : getLogicalResource(archivaLocator, managedRepository, false);
if (path.startsWith("/")) {
path = path.substring(1);
}
LogicalResource logicalResource = new LogicalResource(path);
StorageAsset repoAsset = managedRepository.getAsset(path);
// Path resourceFile = Paths.get( managedRepositoryContent.getRepoRoot(), path );
try {
resource = new ArchivaDavResource(repoAsset, path, managedRepository, request.getRemoteAddr(), activePrincipal, request.getDavSession(), archivaLocator, this, mimeTypes, auditListeners, scheduler);
} catch (LayoutException e) {
log.error("Incompatible layout: {}", e.getMessage(), e);
throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e);
}
if (WebdavMethodUtil.isReadMethod(request.getMethod())) {
if (archivaLocator.getHref(false).endsWith("/") && !repoAsset.isContainer()) {
// force a resource not found
throw new DavException(HttpServletResponse.SC_NOT_FOUND, "Resource does not exist");
} else {
if (!resource.isCollection()) {
boolean previouslyExisted = repoAsset.exists();
boolean fromProxy = fetchContentFromProxies(managedRepository, request, logicalResource);
StorageAsset resourceAsset = null;
// legacy layout format.
try {
// Perform an adjustment of the resource to the managed
// repository expected path.
// String localResourcePath = managedRepository.getRequestInfo().toNativePath( logicalResource.getPath() );
resourceAsset = managedRepository.getAsset(logicalResource.getPath());
resource = new ArchivaDavResource(resourceAsset, logicalResource.getPath(), managedRepository, request.getRemoteAddr(), activePrincipal, request.getDavSession(), archivaLocator, this, mimeTypes, auditListeners, scheduler);
} catch (LayoutException e) {
if (resourceAsset == null || !resourceAsset.exists()) {
throw new DavException(HttpServletResponse.SC_NOT_FOUND, e);
}
}
if (fromProxy) {
String action = (previouslyExisted ? AuditEvent.MODIFY_FILE : AuditEvent.CREATE_FILE) + PROXIED_SUFFIX;
log.debug("Proxied artifact '{}' in repository '{}' (current user '{}')", resourceAsset.getName(), managedRepositoryContent.getId(), activePrincipal);
triggerAuditEvent(request.getRemoteAddr(), archivaLocator.getRepositoryId(), logicalResource.getPath(), action, activePrincipal);
}
if (!resourceAsset.exists()) {
throw new DavException(HttpServletResponse.SC_NOT_FOUND, "Resource does not exist");
}
}
}
}
if (request.getMethod().equals(HTTP_PUT_METHOD)) {
String resourcePath = logicalResource.getPath();
RepositoryRequestInfo repositoryRequestInfo = managedRepository.getRequestInfo();
// we suppose that release-artifacts can be deployed only to repos enabled for releases
if (managedRepositoryContent.getRepository().getActiveReleaseSchemes().contains(ReleaseScheme.RELEASE) && !repositoryRequestInfo.isMetadata(resourcePath) && !repositoryRequestInfo.isSupportFile(resourcePath)) {
// ArtifactReference artifact = null;
Artifact artifact = null;
try {
BaseRepositoryContentLayout layout = managedRepositoryContent.getLayout(BaseRepositoryContentLayout.class);
ContentItem artifactItem = managedRepositoryContent.toItem(resourcePath);
artifact = layout.adaptItem(Artifact.class, artifactItem);
if (!VersionUtil.isSnapshot(artifact.getVersion().getId())) {
// check if artifact already exists and if artifact re-deployment to the repository is allowed
if (artifactItem.exists() && managedRepositoryContent.getRepository().blocksRedeployments()) {
log.warn("Overwriting released artifacts in repository '{}' is not allowed.", managedRepositoryContent.getId());
throw new DavException(HttpServletResponse.SC_CONFLICT, "Overwriting released artifacts is not allowed.");
}
}
} catch (LayoutException e) {
log.warn("Artifact path '{}' is invalid.", resourcePath);
} catch (ContentAccessException e) {
e.printStackTrace();
}
}
/*
* Create parent directories that don't exist when writing a file This actually makes this
* implementation not compliant to the WebDAV RFC - but we have enough knowledge about how the
* collection is being used to do this reasonably and some versions of Maven's WebDAV don't correctly
* create the collections themselves.
*/
StorageAsset rootDirectory = managedRepositoryContent.getRepository().getRoot();
StorageAsset destDir = rootDirectory.resolve(logicalResource.getPath()).getParent();
if (!destDir.exists()) {
try {
destDir.create();
} catch (IOException e) {
log.error("Could not create directory {}: {}", destDir, e.getMessage(), e);
throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Could not create directory " + destDir);
}
String relPath = PathUtil.getRelative(rootDirectory.getPath(), destDir.getPath());
log.debug("Creating destination directory '{}' (current user '{}')", destDir.getName(), activePrincipal);
triggerAuditEvent(request.getRemoteAddr(), managedRepositoryContent.getId(), relPath, AuditEvent.CREATE_DIR, activePrincipal);
}
}
}
return resource;
}
use of org.apache.archiva.repository.content.ContentAccessException in project archiva by apache.
the class DaysOldRepositoryPurge method process.
@Override
public void process(String path) throws RepositoryPurgeException {
try {
ContentItem item = repository.toItem(path);
Artifact artifactItem = repository.getLayout(BaseRepositoryContentLayout.class).adaptItem(Artifact.class, item);
if (!artifactItem.exists()) {
return;
}
// ArtifactReference artifact = repository.toArtifactReference( path );
Calendar olderThanThisDate = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
olderThanThisDate.add(Calendar.DATE, -retentionPeriod);
ArchivaItemSelector selector = ArchivaItemSelector.builder().withNamespace(artifactItem.getVersion().getProject().getNamespace().getId()).withProjectId(artifactItem.getVersion().getProject().getId()).withVersion(artifactItem.getVersion().getId()).withClassifier("*").includeRelatedArtifacts().build();
List<String> artifactVersions;
try (Stream<? extends Artifact> stream = repository.getLayout(BaseRepositoryContentLayout.class).newArtifactStream(selector)) {
artifactVersions = stream.map(a -> a.getArtifactVersion()).filter(StringUtils::isNotEmpty).distinct().collect(Collectors.toList());
}
Collections.sort(artifactVersions, VersionComparator.getInstance());
if (retentionCount > artifactVersions.size()) {
// Done. nothing to do here. skip it.
return;
}
int countToPurge = artifactVersions.size() - retentionCount;
ArchivaItemSelector.Builder artifactSelectorBuilder = ArchivaItemSelector.builder().withNamespace(artifactItem.getVersion().getProject().getNamespace().getId()).withProjectId(artifactItem.getVersion().getProject().getId()).withVersion(artifactItem.getVersion().getId()).withArtifactId(artifactItem.getId()).withClassifier("*").includeRelatedArtifacts();
Set<Artifact> artifactsToDelete = new HashSet<>();
for (String version : artifactVersions) {
if (countToPurge-- <= 0) {
break;
}
ArchivaItemSelector artifactSelector = artifactSelectorBuilder.withArtifactVersion(version).build();
try {
// Is this a generic snapshot "1.0-SNAPSHOT" ?
if (VersionUtil.isGenericSnapshot(version)) {
List<? extends Artifact> artifactList = repository.getLayout(BaseRepositoryContentLayout.class).getArtifacts(artifactSelector);
if (artifactList.size() > 0 && artifactList.get(0).getAsset().getModificationTime().toEpochMilli() < olderThanThisDate.getTimeInMillis()) {
artifactsToDelete.addAll(artifactList);
}
} else // Is this a timestamp snapshot "1.0-20070822.123456-42" ?
if (VersionUtil.isUniqueSnapshot(version)) {
Calendar timestampCal = uniqueSnapshotToCalendar(version);
if (timestampCal.getTimeInMillis() < olderThanThisDate.getTimeInMillis()) {
artifactsToDelete.addAll(repository.getLayout(BaseRepositoryContentLayout.class).getArtifacts(artifactSelector));
}
}
} catch (IllegalArgumentException e) {
log.error("Bad selector for artifact: {}", e.getMessage(), e);
// continue
}
}
purge(artifactsToDelete);
} catch (LayoutException e) {
log.debug("Not processing file that is not an artifact: {}", e.getMessage());
} catch (ContentAccessException e) {
e.printStackTrace();
}
}
Aggregations