use of org.apache.archiva.admin.model.beans.ManagedRepository in project archiva by apache.
the class ManagedRepositoryAdminTest method getById.
@Test
public void getById() throws Exception {
mockAuditListener.clearEvents();
ManagedRepository repo = managedRepositoryAdmin.getManagedRepository("internal");
assertNotNull(repo);
mockAuditListener.clearEvents();
}
use of org.apache.archiva.admin.model.beans.ManagedRepository in project archiva by apache.
the class ManagedRepositoryAdminTest method updateDeleteManagedRepo.
@Test
public void updateDeleteManagedRepo() throws Exception {
Path repoDir = clearRepoLocation(repoLocation);
mockAuditListener.clearEvents();
List<ManagedRepository> repos = managedRepositoryAdmin.getManagedRepositories();
assertNotNull(repos);
int initialSize = repos.size();
assertTrue(initialSize > 0);
ManagedRepository repo = new ManagedRepository(Locale.getDefault());
repo.setId(repoId);
repo.setName("test repo");
repo.setLocation(repoLocation);
repo.setCronExpression("0 0 * * * ?");
managedRepositoryAdmin.addManagedRepository(repo, false, getFakeAuditInformation());
assertTemplateRoleExists(repoId);
repos = managedRepositoryAdmin.getManagedRepositories();
assertNotNull(repos);
assertEquals(initialSize + 1, repos.size());
String newName = "test repo update";
repo.setName(newName);
String description = "so great repository";
repo.setDescription(description);
repo.setLocation(repoLocation);
repo.setCronExpression("0 0 * * * ?");
repo.setSkipPackedIndexCreation(true);
managedRepositoryAdmin.updateManagedRepository(repo, false, getFakeAuditInformation(), false);
repo = managedRepositoryAdmin.getManagedRepository(repoId);
assertNotNull(repo);
assertEquals(newName, repo.getName());
assertEquals(Paths.get(repoLocation).normalize(), Paths.get(repo.getLocation()).normalize());
assertTrue(Files.exists(Paths.get(repoLocation)));
assertEquals(description, repo.getDescription());
assertTrue(repo.isSkipPackedIndexCreation());
assertTemplateRoleExists(repoId);
managedRepositoryAdmin.deleteManagedRepository(repo.getId(), getFakeAuditInformation(), false);
// check deleteContents false
assertTrue(Files.exists(repoDir));
assertTemplateRoleNotExists(repoId);
assertAuditListenerCallAndUpdateAddAndDelete(false);
mockAuditListener.clearEvents();
}
use of org.apache.archiva.admin.model.beans.ManagedRepository in project archiva by apache.
the class ManagedRepositoryAdminTest method updateDeleteManagedRepoWithStagedRepo.
@Test
public void updateDeleteManagedRepoWithStagedRepo() throws Exception {
String stageRepoLocation = Paths.get(APPSERVER_BASE_PATH, repoId).toString();
Path repoDir = clearRepoLocation(repoLocation);
clearRepoLocation(repoLocation + STAGE_REPO_ID_END);
mockAuditListener.clearEvents();
List<ManagedRepository> repos = managedRepositoryAdmin.getManagedRepositories();
assertNotNull(repos);
int initialSize = repos.size();
assertTrue(initialSize > 0);
ManagedRepository repo = getTestManagedRepository(repoId, repoLocation);
managedRepositoryAdmin.addManagedRepository(repo, false, getFakeAuditInformation());
assertTemplateRoleExists(repoId);
assertFalse(Files.exists(Paths.get(repoLocation + STAGE_REPO_ID_END)));
assertTemplateRoleNotExists(repoId + STAGE_REPO_ID_END);
repos = managedRepositoryAdmin.getManagedRepositories();
assertNotNull(repos);
assertEquals(initialSize + 1, repos.size());
repo = managedRepositoryAdmin.getManagedRepository(repoId);
assertEquals(getTestManagedRepository(repoId, repoLocation).getIndexDirectory(), repo.getIndexDirectory());
String newName = "test repo update";
repo.setName(newName);
repo.setLocation(repoLocation);
managedRepositoryAdmin.updateManagedRepository(repo, true, getFakeAuditInformation(), false);
repo = managedRepositoryAdmin.getManagedRepository(repoId);
System.err.println("REPOSITORY " + repo.getLocation());
assertNotNull(repo);
assertEquals(newName, repo.getName());
assertEquals(Paths.get(repoLocation).toAbsolutePath(), Paths.get(repo.getLocation()).toAbsolutePath());
assertTrue(Files.exists(Paths.get(repoLocation)));
assertEquals(getTestManagedRepository(repoId, repoLocation).getCronExpression(), repo.getCronExpression());
assertEquals(getTestManagedRepository(repoId, repoLocation).getLayout(), repo.getLayout());
assertEquals(getTestManagedRepository(repoId, repoLocation).getId(), repo.getId());
assertEquals(getTestManagedRepository(repoId, repoLocation).getIndexDirectory(), repo.getIndexDirectory());
assertEquals(getTestManagedRepository(repoId, repoLocation).getRetentionPeriod(), repo.getRetentionPeriod());
assertEquals(getTestManagedRepository(repoId, repoLocation).getRetentionCount(), repo.getRetentionCount());
assertEquals(getTestManagedRepository(repoId, repoLocation).isDeleteReleasedSnapshots(), repo.isDeleteReleasedSnapshots());
assertTemplateRoleExists(repoId);
assertTrue(Files.exists(Paths.get(stageRepoLocation + STAGE_REPO_ID_END)));
assertTemplateRoleExists(repoId + STAGE_REPO_ID_END);
managedRepositoryAdmin.deleteManagedRepository(repo.getId(), getFakeAuditInformation(), false);
// check deleteContents false
assertTrue(Files.exists(repoDir));
assertTemplateRoleNotExists(repoId);
assertTrue(Files.exists(Paths.get(stageRepoLocation + STAGE_REPO_ID_END)));
assertTemplateRoleNotExists(repoId + STAGE_REPO_ID_END);
assertAuditListenerCallAndUpdateAddAndDelete(true);
mockAuditListener.clearEvents();
FileUtils.deleteQuietly(Paths.get(repoLocation + STAGE_REPO_ID_END));
assertFalse(Files.exists(Paths.get(repoLocation + STAGE_REPO_ID_END)));
}
use of org.apache.archiva.admin.model.beans.ManagedRepository in project archiva by apache.
the class DefaultRepositoriesService method deleteArtifact.
@Override
public Boolean deleteArtifact(Artifact artifact) throws ArchivaRestServiceException {
String repositoryId = artifact.getContext();
// so try both!!
if (StringUtils.isEmpty(repositoryId)) {
repositoryId = artifact.getRepositoryId();
}
if (StringUtils.isEmpty(repositoryId)) {
throw new ArchivaRestServiceException("repositoryId cannot be null", 400, null);
}
if (!isAuthorizedToDeleteArtifacts(repositoryId)) {
throw new ArchivaRestServiceException("not authorized to delete artifacts", 403, null);
}
if (artifact == null) {
throw new ArchivaRestServiceException("artifact cannot be null", 400, null);
}
if (StringUtils.isEmpty(artifact.getGroupId())) {
throw new ArchivaRestServiceException("artifact.groupId cannot be null", 400, null);
}
if (StringUtils.isEmpty(artifact.getArtifactId())) {
throw new ArchivaRestServiceException("artifact.artifactId cannot be null", 400, null);
}
// TODO more control on artifact fields
boolean snapshotVersion = VersionUtil.isSnapshot(artifact.getVersion()) | VersionUtil.isGenericSnapshot(artifact.getVersion());
RepositorySession repositorySession = repositorySessionFactory.createSession();
try {
Date lastUpdatedTimestamp = Calendar.getInstance().getTime();
TimeZone timezone = TimeZone.getTimeZone("UTC");
DateFormat fmt = new SimpleDateFormat("yyyyMMdd.HHmmss");
fmt.setTimeZone(timezone);
ManagedRepository repoConfig = managedRepositoryAdmin.getManagedRepository(repositoryId);
VersionedReference ref = new VersionedReference();
ref.setArtifactId(artifact.getArtifactId());
ref.setGroupId(artifact.getGroupId());
ref.setVersion(artifact.getVersion());
ManagedRepositoryContent repository = getManagedRepositoryContent(repositoryId);
ArtifactReference artifactReference = new ArtifactReference();
artifactReference.setArtifactId(artifact.getArtifactId());
artifactReference.setGroupId(artifact.getGroupId());
artifactReference.setVersion(artifact.getVersion());
artifactReference.setClassifier(artifact.getClassifier());
artifactReference.setType(artifact.getPackaging());
MetadataRepository metadataRepository = repositorySession.getRepository();
String path = repository.toMetadataPath(ref);
if (StringUtils.isNotBlank(artifact.getClassifier())) {
if (StringUtils.isBlank(artifact.getPackaging())) {
throw new ArchivaRestServiceException("You must configure a type/packaging when using classifier", 400, null);
}
repository.deleteArtifact(artifactReference);
} else {
int index = path.lastIndexOf('/');
path = path.substring(0, index);
Path targetPath = Paths.get(repoConfig.getLocation(), path);
if (!Files.exists(targetPath)) {
// throw new ContentNotFoundException(
// artifact.getGroupId() + ":" + artifact.getArtifactId() + ":" + artifact.getVersion() );
log.warn("targetPath {} not found skip file deletion", targetPath);
}
// delete from file system
if (!snapshotVersion) {
repository.deleteVersion(ref);
} else {
Set<ArtifactReference> related = repository.getRelatedArtifacts(artifactReference);
log.debug("related: {}", related);
for (ArtifactReference artifactRef : related) {
repository.deleteArtifact(artifactRef);
}
}
Path metadataFile = getMetadata(targetPath.toAbsolutePath().toString());
ArchivaRepositoryMetadata metadata = getMetadata(metadataFile);
updateMetadata(metadata, metadataFile, lastUpdatedTimestamp, artifact);
}
Collection<ArtifactMetadata> artifacts = Collections.emptyList();
if (snapshotVersion) {
String baseVersion = VersionUtil.getBaseVersion(artifact.getVersion());
artifacts = metadataRepository.getArtifacts(repositoryId, artifact.getGroupId(), artifact.getArtifactId(), baseVersion);
} else {
artifacts = metadataRepository.getArtifacts(repositoryId, artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion());
}
log.debug("artifacts: {}", artifacts);
if (artifacts.isEmpty()) {
if (!snapshotVersion) {
// verify metata repository doesn't contains anymore the version
Collection<String> projectVersions = metadataRepository.getProjectVersions(repositoryId, artifact.getGroupId(), artifact.getArtifactId());
if (projectVersions.contains(artifact.getVersion())) {
log.warn("artifact not found when deleted but version still here ! so force cleanup");
metadataRepository.removeProjectVersion(repositoryId, artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion());
}
}
}
for (ArtifactMetadata artifactMetadata : artifacts) {
// TODO: mismatch between artifact (snapshot) version and project (base) version here
if (artifactMetadata.getVersion().equals(artifact.getVersion())) {
if (StringUtils.isNotBlank(artifact.getClassifier())) {
if (StringUtils.isBlank(artifact.getPackaging())) {
throw new ArchivaRestServiceException("You must configure a type/packaging when using classifier", 400, null);
}
// cleanup facet which contains classifier information
MavenArtifactFacet mavenArtifactFacet = (MavenArtifactFacet) artifactMetadata.getFacet(MavenArtifactFacet.FACET_ID);
if (StringUtils.equals(artifact.getClassifier(), mavenArtifactFacet.getClassifier())) {
artifactMetadata.removeFacet(MavenArtifactFacet.FACET_ID);
String groupId = artifact.getGroupId(), artifactId = artifact.getArtifactId(), version = artifact.getVersion();
MavenArtifactFacet mavenArtifactFacetToCompare = new MavenArtifactFacet();
mavenArtifactFacetToCompare.setClassifier(artifact.getClassifier());
metadataRepository.removeArtifact(repositoryId, groupId, artifactId, version, mavenArtifactFacetToCompare);
metadataRepository.save();
}
} else {
if (snapshotVersion) {
metadataRepository.removeArtifact(artifactMetadata, VersionUtil.getBaseVersion(artifact.getVersion()));
} else {
metadataRepository.removeArtifact(artifactMetadata.getRepositoryId(), artifactMetadata.getNamespace(), artifactMetadata.getProject(), artifact.getVersion(), artifactMetadata.getId());
}
}
// repository metadata to an artifact
for (RepositoryListener listener : listeners) {
listener.deleteArtifact(metadataRepository, repository.getId(), artifactMetadata.getNamespace(), artifactMetadata.getProject(), artifactMetadata.getVersion(), artifactMetadata.getId());
}
triggerAuditEvent(repositoryId, path, AuditEvent.REMOVE_FILE);
}
}
} catch (ContentNotFoundException e) {
throw new ArchivaRestServiceException("Artifact does not exist: " + e.getMessage(), 400, e);
} catch (RepositoryNotFoundException e) {
throw new ArchivaRestServiceException("Target repository cannot be found: " + e.getMessage(), 400, e);
} catch (RepositoryException e) {
throw new ArchivaRestServiceException("Repository exception: " + e.getMessage(), 500, e);
} catch (MetadataResolutionException e) {
throw new ArchivaRestServiceException("Repository exception: " + e.getMessage(), 500, e);
} catch (MetadataRepositoryException e) {
throw new ArchivaRestServiceException("Repository exception: " + e.getMessage(), 500, e);
} catch (RepositoryAdminException e) {
throw new ArchivaRestServiceException("RepositoryAdmin exception: " + e.getMessage(), 500, e);
} finally {
repositorySession.save();
repositorySession.close();
}
return Boolean.TRUE;
}
use of org.apache.archiva.admin.model.beans.ManagedRepository in project archiva by apache.
the class DefaultRepositoriesService method copyArtifact.
@Override
public Boolean copyArtifact(ArtifactTransferRequest artifactTransferRequest) throws ArchivaRestServiceException {
// check parameters
String userName = getAuditInformation().getUser().getUsername();
if (StringUtils.isBlank(userName)) {
throw new ArchivaRestServiceException("copyArtifact call: userName not found", null);
}
if (StringUtils.isBlank(artifactTransferRequest.getRepositoryId())) {
throw new ArchivaRestServiceException("copyArtifact call: sourceRepositoryId cannot be null", null);
}
if (StringUtils.isBlank(artifactTransferRequest.getTargetRepositoryId())) {
throw new ArchivaRestServiceException("copyArtifact call: targetRepositoryId cannot be null", null);
}
ManagedRepository source = null;
try {
source = managedRepositoryAdmin.getManagedRepository(artifactTransferRequest.getRepositoryId());
} catch (RepositoryAdminException e) {
throw new ArchivaRestServiceException(e.getMessage(), e);
}
if (source == null) {
throw new ArchivaRestServiceException("cannot find repository with id " + artifactTransferRequest.getRepositoryId(), null);
}
ManagedRepository target = null;
try {
target = managedRepositoryAdmin.getManagedRepository(artifactTransferRequest.getTargetRepositoryId());
} catch (RepositoryAdminException e) {
throw new ArchivaRestServiceException(e.getMessage(), e);
}
if (target == null) {
throw new ArchivaRestServiceException("cannot find repository with id " + artifactTransferRequest.getTargetRepositoryId(), null);
}
if (StringUtils.isBlank(artifactTransferRequest.getGroupId())) {
throw new ArchivaRestServiceException("groupId is mandatory", null);
}
if (StringUtils.isBlank(artifactTransferRequest.getArtifactId())) {
throw new ArchivaRestServiceException("artifactId is mandatory", null);
}
if (StringUtils.isBlank(artifactTransferRequest.getVersion())) {
throw new ArchivaRestServiceException("version is mandatory", null);
}
if (VersionUtil.isSnapshot(artifactTransferRequest.getVersion())) {
throw new ArchivaRestServiceException("copy of SNAPSHOT not supported", null);
}
// end check parameters
User user = null;
try {
user = securitySystem.getUserManager().findUser(userName);
} catch (UserNotFoundException e) {
throw new ArchivaRestServiceException("user " + userName + " not found", e);
} catch (UserManagerException e) {
throw new ArchivaRestServiceException("ArchivaRestServiceException:" + e.getMessage(), e);
}
// check karma on source : read
AuthenticationResult authn = new AuthenticationResult(true, userName, null);
SecuritySession securitySession = new DefaultSecuritySession(authn, user);
try {
boolean authz = securitySystem.isAuthorized(securitySession, ArchivaRoleConstants.OPERATION_REPOSITORY_ACCESS, artifactTransferRequest.getRepositoryId());
if (!authz) {
throw new ArchivaRestServiceException("not authorized to access repo:" + artifactTransferRequest.getRepositoryId(), null);
}
} catch (AuthorizationException e) {
log.error("error reading permission: {}", e.getMessage(), e);
throw new ArchivaRestServiceException(e.getMessage(), e);
}
// check karma on target: write
try {
boolean authz = securitySystem.isAuthorized(securitySession, ArchivaRoleConstants.OPERATION_REPOSITORY_UPLOAD, artifactTransferRequest.getTargetRepositoryId());
if (!authz) {
throw new ArchivaRestServiceException("not authorized to write to repo:" + artifactTransferRequest.getTargetRepositoryId(), null);
}
} catch (AuthorizationException e) {
log.error("error reading permission: {}", e.getMessage(), e);
throw new ArchivaRestServiceException(e.getMessage(), e);
}
// sounds good we can continue !
ArtifactReference artifactReference = new ArtifactReference();
artifactReference.setArtifactId(artifactTransferRequest.getArtifactId());
artifactReference.setGroupId(artifactTransferRequest.getGroupId());
artifactReference.setVersion(artifactTransferRequest.getVersion());
artifactReference.setClassifier(artifactTransferRequest.getClassifier());
String packaging = StringUtils.trim(artifactTransferRequest.getPackaging());
artifactReference.setType(StringUtils.isEmpty(packaging) ? "jar" : packaging);
try {
ManagedRepositoryContent sourceRepository = getManagedRepositoryContent(artifactTransferRequest.getRepositoryId());
String artifactSourcePath = sourceRepository.toPath(artifactReference);
if (StringUtils.isEmpty(artifactSourcePath)) {
log.error("cannot find artifact {}", artifactTransferRequest);
throw new ArchivaRestServiceException("cannot find artifact " + artifactTransferRequest.toString(), null);
}
Path artifactFile = Paths.get(source.getLocation(), artifactSourcePath);
if (!Files.exists(artifactFile)) {
log.error("cannot find artifact {}", artifactTransferRequest);
throw new ArchivaRestServiceException("cannot find artifact " + artifactTransferRequest.toString(), null);
}
ManagedRepositoryContent targetRepository = getManagedRepositoryContent(artifactTransferRequest.getTargetRepositoryId());
String artifactPath = targetRepository.toPath(artifactReference);
int lastIndex = artifactPath.lastIndexOf('/');
String path = artifactPath.substring(0, lastIndex);
Path targetPath = Paths.get(target.getLocation(), path);
Date lastUpdatedTimestamp = Calendar.getInstance().getTime();
int newBuildNumber = 1;
String timestamp = null;
Path versionMetadataFile = targetPath.resolve(MetadataTools.MAVEN_METADATA);
/* unused */
getMetadata(versionMetadataFile);
if (!Files.exists(targetPath)) {
Files.createDirectories(targetPath);
}
String filename = artifactPath.substring(lastIndex + 1);
boolean fixChecksums = !(archivaAdministration.getKnownContentConsumers().contains("create-missing-checksums"));
Path targetFile = targetPath.resolve(filename);
if (Files.exists(targetFile) && target.isBlockRedeployments()) {
throw new ArchivaRestServiceException("artifact already exists in target repo: " + artifactTransferRequest.getTargetRepositoryId() + " and redeployment blocked", null);
} else {
copyFile(artifactFile, targetPath, filename, fixChecksums);
queueRepositoryTask(target.getId(), targetFile);
}
// copy source pom to target repo
String pomFilename = filename;
if (StringUtils.isNotBlank(artifactTransferRequest.getClassifier())) {
pomFilename = StringUtils.remove(pomFilename, "-" + artifactTransferRequest.getClassifier());
}
pomFilename = FilenameUtils.removeExtension(pomFilename) + ".pom";
Path pomFile = Paths.get(source.getLocation(), artifactSourcePath.substring(0, artifactPath.lastIndexOf('/')), pomFilename);
if (pomFile != null && Files.size(pomFile) > 0) {
copyFile(pomFile, targetPath, pomFilename, fixChecksums);
queueRepositoryTask(target.getId(), targetPath.resolve(pomFilename));
}
// explicitly update only if metadata-updater consumer is not enabled!
if (!archivaAdministration.getKnownContentConsumers().contains("metadata-updater")) {
updateProjectMetadata(targetPath.toAbsolutePath().toString(), lastUpdatedTimestamp, timestamp, newBuildNumber, fixChecksums, artifactTransferRequest);
}
String msg = "Artifact \'" + artifactTransferRequest.getGroupId() + ":" + artifactTransferRequest.getArtifactId() + ":" + artifactTransferRequest.getVersion() + "\' was successfully deployed to repository \'" + artifactTransferRequest.getTargetRepositoryId() + "\'";
log.debug("copyArtifact {}", msg);
} catch (RepositoryException e) {
log.error("RepositoryException: {}", e.getMessage(), e);
throw new ArchivaRestServiceException(e.getMessage(), e);
} catch (RepositoryAdminException e) {
log.error("RepositoryAdminException: {}", e.getMessage(), e);
throw new ArchivaRestServiceException(e.getMessage(), e);
} catch (IOException e) {
log.error("IOException: {}", e.getMessage(), e);
throw new ArchivaRestServiceException(e.getMessage(), e);
}
return true;
}
Aggregations