use of org.apache.archiva.metadata.repository.MetadataRepositoryException in project archiva by apache.
the class DefaultManagedRepositoryAdmin method updateManagedRepository.
@Override
public Boolean updateManagedRepository(ManagedRepository managedRepository, boolean needStageRepo, AuditInformation auditInformation, boolean resetStats) throws RepositoryAdminException {
log.debug("updateManagedConfiguration repo {} needStage {} resetStats {} ", managedRepository, needStageRepo, resetStats);
// Ensure that the fields are valid.
getRepositoryCommonValidator().basicValidation(managedRepository, true);
getRepositoryCommonValidator().validateManagedRepository(managedRepository);
Configuration configuration = getArchivaConfiguration().getConfiguration();
ManagedRepositoryConfiguration updatedRepoConfig = getRepositoryConfiguration(managedRepository);
updatedRepoConfig.setStageRepoNeeded(needStageRepo);
org.apache.archiva.repository.ManagedRepository oldRepo = repositoryRegistry.getManagedRepository(managedRepository.getId());
boolean stagingExists = false;
if (oldRepo.supportsFeature(StagingRepositoryFeature.class)) {
stagingExists = oldRepo.getFeature(StagingRepositoryFeature.class).get().getStagingRepository() != null;
}
boolean updateIndexContext = !StringUtils.equals(updatedRepoConfig.getIndexDir(), managedRepository.getIndexDirectory());
org.apache.archiva.repository.ManagedRepository newRepo;
// TODO remove content from old if path has changed !!!!!
try {
newRepo = repositoryRegistry.putRepository(updatedRepoConfig, configuration);
if (newRepo.supportsFeature(StagingRepositoryFeature.class)) {
org.apache.archiva.repository.ManagedRepository stagingRepo = newRepo.getFeature(StagingRepositoryFeature.class).get().getStagingRepository();
if (stagingRepo != null && !stagingExists) {
triggerAuditEvent(stagingRepo.getId(), null, AuditEvent.ADD_MANAGED_REPO, auditInformation);
addRepositoryRoles(stagingRepo.getId());
}
}
} catch (RepositoryException e) {
log.error("Could not update repository {}: {}", managedRepository.getId(), e.getMessage(), e);
throw new RepositoryAdminException("Could not update repository " + managedRepository.getId());
} catch (RoleManagerException e) {
log.error("Error during role update of stage repo {}", managedRepository.getId(), e);
throw new RepositoryAdminException("Could not update repository " + managedRepository.getId());
}
triggerAuditEvent(managedRepository.getId(), null, AuditEvent.MODIFY_MANAGED_REPO, auditInformation);
try {
getArchivaConfiguration().save(configuration);
} catch (RegistryException | IndeterminateConfigurationException e) {
log.error("Could not save repository configuration: {}", e.getMessage(), e);
throw new RepositoryAdminException("Could not save repository configuration: " + e.getMessage());
}
// Save the repository configuration.
RepositorySession repositorySession = getRepositorySessionFactory().createSession();
try {
if (resetStats) {
log.debug("call repositoryStatisticsManager.deleteStatistics");
getRepositoryStatisticsManager().deleteStatistics(repositorySession.getRepository(), managedRepository.getId());
repositorySession.save();
}
} catch (MetadataRepositoryException e) {
throw new RepositoryAdminException(e.getMessage(), e);
} finally {
repositorySession.close();
}
if (updateIndexContext) {
try {
repositoryRegistry.resetIndexingContext(newRepo);
} catch (IndexUpdateFailedException e) {
e.printStackTrace();
}
}
return true;
}
use of org.apache.archiva.metadata.repository.MetadataRepositoryException in project archiva by apache.
the class ArchivaRepositoryScanningTaskExecutor method executeTask.
@SuppressWarnings("unchecked")
@Override
public void executeTask(RepositoryTask task) throws TaskExecutionException {
try {
// TODO: replace this whole class with the prescribed content scanning service/action
// - scan repository for artifacts that do not have corresponding metadata or have been updated and
// send events for each
// - scan metadata for artifacts that have been removed and send events for each
// - scan metadata for missing plugin data
// - store information so that it can restart upon failure (publish event on the server recovery
// queue, remove it on successful completion)
this.task = task;
String repoId = task.getRepositoryId();
if (StringUtils.isBlank(repoId)) {
throw new TaskExecutionException("Unable to execute RepositoryTask with blank repository Id.");
}
ManagedRepository arepo = repositoryRegistry.getManagedRepository(repoId);
// execute consumers on resource file if set
if (task.getResourceFile() != null) {
log.debug("Executing task from queue with job name: {}", task);
consumers.executeConsumers(arepo, task.getResourceFile(), task.isUpdateRelatedArtifacts());
} else {
log.info("Executing task from queue with job name: {}", task);
// otherwise, execute consumers on whole repository
if (arepo == null) {
throw new TaskExecutionException("Unable to execute RepositoryTask with invalid repository id: " + repoId);
}
long sinceWhen = RepositoryScanner.FRESH_SCAN;
long previousFileCount = 0;
RepositorySession repositorySession = repositorySessionFactory.createSession();
MetadataRepository metadataRepository = repositorySession.getRepository();
try {
if (!task.isScanAll()) {
RepositoryStatistics previousStats = repositoryStatisticsManager.getLastStatistics(metadataRepository, repoId);
if (previousStats != null) {
sinceWhen = previousStats.getScanStartTime().getTime();
previousFileCount = previousStats.getTotalFileCount();
}
}
RepositoryScanStatistics stats;
try {
stats = repoScanner.scan(arepo, sinceWhen);
} catch (RepositoryScannerException e) {
throw new TaskExecutionException("Repository error when executing repository job.", e);
}
log.info("Finished first scan: {}", stats.toDump(arepo));
// further statistics will be populated by the following method
Date endTime = new Date(stats.getWhenGathered().getTime() + stats.getDuration());
log.info("Gathering repository statistics");
repositoryStatisticsManager.addStatisticsAfterScan(metadataRepository, repoId, stats.getWhenGathered(), endTime, stats.getTotalFileCount(), stats.getTotalFileCount() - previousFileCount);
repositorySession.save();
} catch (MetadataRepositoryException e) {
throw new TaskExecutionException("Unable to store updated statistics: " + e.getMessage(), e);
} finally {
repositorySession.close();
}
// log.info( "Scanning for removed repository content" );
// metadataRepository.findAllProjects();
// FIXME: do something
log.info("Finished repository task: {}", task);
this.task = null;
}
} catch (RepositoryAdminException e) {
log.error(e.getMessage(), e);
throw new TaskExecutionException(e.getMessage(), e);
}
}
use of org.apache.archiva.metadata.repository.MetadataRepositoryException in project archiva by apache.
the class CassandraMetadataRepository method getOrCreateRepository.
/**
* if the repository doesn't exist it will be created
*
* @param repositoryId
* @return
*/
public Repository getOrCreateRepository(String repositoryId) throws MetadataRepositoryException {
String cf = cassandraArchivaManager.getRepositoryFamilyName();
QueryResult<OrderedRows<String, String, String>> result = //
HFactory.createRangeSlicesQuery(keyspace, StringSerializer.get(), StringSerializer.get(), //
StringSerializer.get()).setColumnFamily(//
cf).setColumnNames(//
REPOSITORY_NAME.toString()).addEqualsExpression(REPOSITORY_NAME.toString(), //
repositoryId).execute();
if (result.get().getCount() < 1) {
// we need to create the repository
Repository repository = new Repository(repositoryId);
try {
MutationResult mutationResult = //
HFactory.createMutator(keyspace, StringSerializer.get()).addInsertion(repositoryId, cf, //
CassandraUtils.column(REPOSITORY_NAME.toString(), repository.getName())).execute();
logger.debug("time to insert repository: {}", mutationResult.getExecutionTimeMicro());
return repository;
} catch (HInvalidRequestException e) {
logger.error(e.getMessage(), e);
throw new MetadataRepositoryException(e.getMessage(), e);
}
}
return new Repository(result.get().getList().get(0).getColumnSlice().getColumnByName(REPOSITORY_NAME.toString()).getValue());
}
use of org.apache.archiva.metadata.repository.MetadataRepositoryException in project archiva by apache.
the class CassandraMetadataRepository method getRepositories.
@Override
public Collection<String> getRepositories() throws MetadataRepositoryException {
try {
logger.debug("getRepositories");
final //
QueryResult<OrderedRows<String, String, String>> cResult = //
HFactory.createRangeSlicesQuery(//
cassandraArchivaManager.getKeyspace(), ss, ss, //
ss).setColumnFamily(//
cassandraArchivaManager.getRepositoryFamilyName()).setColumnNames(//
REPOSITORY_NAME.toString()).setRange(null, null, false, //
Integer.MAX_VALUE).execute();
List<String> repoIds = new ArrayList<>(cResult.get().getCount());
for (Row<String, String, String> row : cResult.get()) {
repoIds.add(getStringValue(row.getColumnSlice(), REPOSITORY_NAME.toString()));
}
return repoIds;
} catch (PersistenceException e) {
throw new MetadataRepositoryException(e.getMessage(), e);
}
}
use of org.apache.archiva.metadata.repository.MetadataRepositoryException in project archiva by apache.
the class CassandraMetadataRepository method updateOrAddNamespace.
private Namespace updateOrAddNamespace(String repositoryId, String namespaceId) throws MetadataRepositoryException {
try {
Repository repository = getOrCreateRepository(repositoryId);
String key = new Namespace.KeyBuilder().withNamespace(namespaceId).withRepositoryId(repositoryId).build();
Namespace namespace = getNamespace(repositoryId, namespaceId);
if (namespace == null) {
String cf = cassandraArchivaManager.getNamespaceFamilyName();
namespace = new Namespace(namespaceId, repository);
HFactory.createMutator(keyspace, StringSerializer.get()).addInsertion(key, cf, //
CassandraUtils.column(NAME.toString(), namespace.getName())).addInsertion(key, cf, //
CassandraUtils.column(REPOSITORY_NAME.toString(), repository.getName())).execute();
}
return namespace;
} catch (HInvalidRequestException e) {
logger.error(e.getMessage(), e);
throw new MetadataRepositoryException(e.getMessage(), e);
}
}
Aggregations