use of org.apache.archiva.indexer.IndexUpdateFailedException in project archiva by apache.
the class ArchivaIndexManagerMock method update.
@Override
public void update(final ArchivaIndexingContext context, final boolean fullUpdate) throws IndexUpdateFailedException {
log.info("start download remote index for remote repository {}", context.getRepository().getId());
URI remoteUpdateUri;
if (!(context.getRepository() instanceof RemoteRepository) || !(context.getRepository().supportsFeature(RemoteIndexFeature.class))) {
throw new IndexUpdateFailedException("The context is not associated to a remote repository with remote index " + context.getId());
} else {
RemoteIndexFeature rif = context.getRepository().getFeature(RemoteIndexFeature.class).get();
remoteUpdateUri = context.getRepository().getLocation().resolve(rif.getIndexUri());
}
final RemoteRepository remoteRepository = (RemoteRepository) context.getRepository();
executeUpdateFunction(context, indexingContext -> {
try {
// create a temp directory to download files
Path tempIndexDirectory = Paths.get(indexingContext.getIndexDirectoryFile().getParent(), ".tmpIndex");
Path indexCacheDirectory = Paths.get(indexingContext.getIndexDirectoryFile().getParent(), ".indexCache");
Files.createDirectories(indexCacheDirectory);
if (Files.exists(tempIndexDirectory)) {
org.apache.archiva.common.utils.FileUtils.deleteDirectory(tempIndexDirectory);
}
Files.createDirectories(tempIndexDirectory);
tempIndexDirectory.toFile().deleteOnExit();
String baseIndexUrl = indexingContext.getIndexUpdateUrl();
String wagonProtocol = remoteUpdateUri.toURL().getProtocol();
NetworkProxy networkProxy = null;
if (remoteRepository.supportsFeature(RemoteIndexFeature.class)) {
RemoteIndexFeature rif = remoteRepository.getFeature(RemoteIndexFeature.class).get();
if (StringUtils.isNotBlank(rif.getProxyId())) {
try {
networkProxy = networkProxyAdmin.getNetworkProxy(rif.getProxyId());
} catch (RepositoryAdminException e) {
log.error("Error occured while retrieving proxy {}", e.getMessage());
}
if (networkProxy == null) {
log.warn("your remote repository is configured to download remote index trought a proxy we cannot find id:{}", rif.getProxyId());
}
}
final StreamWagon wagon = (StreamWagon) wagonFactory.getWagon(new WagonFactoryRequest(wagonProtocol, remoteRepository.getExtraHeaders()).networkProxy(networkProxy));
int readTimeout = (int) rif.getDownloadTimeout().toMillis() * 1000;
wagon.setReadTimeout(readTimeout);
wagon.setTimeout((int) remoteRepository.getTimeout().toMillis() * 1000);
if (wagon instanceof AbstractHttpClientWagon) {
HttpConfiguration httpConfiguration = new HttpConfiguration();
HttpMethodConfiguration httpMethodConfiguration = new HttpMethodConfiguration();
httpMethodConfiguration.setUsePreemptive(true);
httpMethodConfiguration.setReadTimeout(readTimeout);
httpConfiguration.setGet(httpMethodConfiguration);
AbstractHttpClientWagon.class.cast(wagon).setHttpConfiguration(httpConfiguration);
}
wagon.addTransferListener(new DownloadListener());
ProxyInfo proxyInfo = null;
if (networkProxy != null) {
proxyInfo = new ProxyInfo();
proxyInfo.setType(networkProxy.getProtocol());
proxyInfo.setHost(networkProxy.getHost());
proxyInfo.setPort(networkProxy.getPort());
proxyInfo.setUserName(networkProxy.getUsername());
proxyInfo.setPassword(networkProxy.getPassword());
}
AuthenticationInfo authenticationInfo = null;
if (remoteRepository.getLoginCredentials() != null && (remoteRepository.getLoginCredentials() instanceof PasswordCredentials)) {
PasswordCredentials creds = (PasswordCredentials) remoteRepository.getLoginCredentials();
authenticationInfo = new AuthenticationInfo();
authenticationInfo.setUserName(creds.getUsername());
authenticationInfo.setPassword(new String(creds.getPassword()));
}
wagon.connect(new org.apache.maven.wagon.repository.Repository(remoteRepository.getId(), baseIndexUrl), authenticationInfo, proxyInfo);
Path indexDirectory = indexingContext.getIndexDirectoryFile().toPath();
if (!Files.exists(indexDirectory)) {
Files.createDirectories(indexDirectory);
}
ResourceFetcher resourceFetcher = new WagonResourceFetcher(log, tempIndexDirectory, wagon, remoteRepository);
IndexUpdateRequest request = new IndexUpdateRequest(indexingContext, resourceFetcher);
request.setForceFullUpdate(fullUpdate);
request.setLocalIndexCacheDir(indexCacheDirectory.toFile());
// indexUpdater.fetchAndUpdateIndex( request );
indexingContext.updateTimestamp(true);
}
} catch (AuthenticationException e) {
log.error("Could not login to the remote proxy for updating index of {}", remoteRepository.getId(), e);
throw new IndexUpdateFailedException("Login in to proxy failed while updating remote repository " + remoteRepository.getId(), e);
} catch (ConnectionException e) {
log.error("Connection error during index update for remote repository {}", remoteRepository.getId(), e);
throw new IndexUpdateFailedException("Connection error during index update for remote repository " + remoteRepository.getId(), e);
} catch (MalformedURLException e) {
log.error("URL for remote index update of remote repository {} is not correct {}", remoteRepository.getId(), remoteUpdateUri, e);
throw new IndexUpdateFailedException("URL for remote index update of repository is not correct " + remoteUpdateUri, e);
} catch (IOException e) {
log.error("IOException during index update of remote repository {}: {}", remoteRepository.getId(), e.getMessage(), e);
throw new IndexUpdateFailedException("IOException during index update of remote repository " + remoteRepository.getId() + (StringUtils.isNotEmpty(e.getMessage()) ? ": " + e.getMessage() : ""), e);
} catch (WagonFactoryException e) {
log.error("Wagon for remote index download of {} could not be created: {}", remoteRepository.getId(), e.getMessage(), e);
throw new IndexUpdateFailedException("Error while updating the remote index of " + remoteRepository.getId(), e);
}
});
}
use of org.apache.archiva.indexer.IndexUpdateFailedException in project archiva by apache.
the class ArchivaIndexManagerMock method executeUpdateFunction.
/*
* This method is used to do some actions around the update execution code. And to make sure, that no other
* method is running on the same index.
*/
private void executeUpdateFunction(ArchivaIndexingContext context, IndexUpdateConsumer function) throws IndexUpdateFailedException {
IndexingContext indexingContext = null;
try {
indexingContext = getMvnContext(context);
} catch (UnsupportedBaseContextException e) {
throw new IndexUpdateFailedException("Maven index is not supported by this context", e);
}
final Path ctxPath = getIndexPath(context);
int loop = MAX_WAIT;
boolean active = false;
while (loop-- > 0 && !active) {
active = activeContexts.add(ctxPath);
try {
Thread.currentThread().sleep(WAIT_TIME);
} catch (InterruptedException e) {
// Ignore this
}
}
if (active) {
try {
function.accept(indexingContext);
} finally {
activeContexts.remove(ctxPath);
}
} else {
throw new IndexUpdateFailedException("Timeout while waiting for index release on context " + context.getId());
}
}
use of org.apache.archiva.indexer.IndexUpdateFailedException in project archiva by apache.
the class DefaultManagedRepositoryAdmin method updateManagedRepository.
@Override
public Boolean updateManagedRepository(ManagedRepository managedRepository, boolean needStageRepo, AuditInformation auditInformation, boolean resetStats) throws RepositoryAdminException {
log.debug("updateManagedConfiguration repo {} needStage {} resetStats {} ", managedRepository, needStageRepo, resetStats);
// Ensure that the fields are valid.
getRepositoryCommonValidator().basicValidation(managedRepository, true);
getRepositoryCommonValidator().validateManagedRepository(managedRepository);
Configuration configuration = getArchivaConfiguration().getConfiguration();
ManagedRepositoryConfiguration updatedRepoConfig = getRepositoryConfiguration(managedRepository);
updatedRepoConfig.setStageRepoNeeded(needStageRepo);
org.apache.archiva.repository.ManagedRepository oldRepo = repositoryRegistry.getManagedRepository(managedRepository.getId());
boolean stagingExists = false;
if (oldRepo.supportsFeature(StagingRepositoryFeature.class)) {
stagingExists = oldRepo.getFeature(StagingRepositoryFeature.class).get().getStagingRepository() != null;
}
boolean updateIndexContext = !StringUtils.equals(updatedRepoConfig.getIndexDir(), managedRepository.getIndexDirectory());
org.apache.archiva.repository.ManagedRepository newRepo;
// TODO remove content from old if path has changed !!!!!
try {
newRepo = repositoryRegistry.putRepository(updatedRepoConfig, configuration);
if (newRepo.supportsFeature(StagingRepositoryFeature.class)) {
org.apache.archiva.repository.ManagedRepository stagingRepo = newRepo.getFeature(StagingRepositoryFeature.class).get().getStagingRepository();
if (stagingRepo != null && !stagingExists) {
triggerAuditEvent(stagingRepo.getId(), null, AuditEvent.ADD_MANAGED_REPO, auditInformation);
addRepositoryRoles(stagingRepo.getId());
}
}
} catch (RepositoryException e) {
log.error("Could not update repository {}: {}", managedRepository.getId(), e.getMessage(), e);
throw new RepositoryAdminException("Could not update repository " + managedRepository.getId());
} catch (RoleManagerException e) {
log.error("Error during role update of stage repo {}", managedRepository.getId(), e);
throw new RepositoryAdminException("Could not update repository " + managedRepository.getId());
}
triggerAuditEvent(managedRepository.getId(), null, AuditEvent.MODIFY_MANAGED_REPO, auditInformation);
try {
getArchivaConfiguration().save(configuration);
} catch (RegistryException | IndeterminateConfigurationException e) {
log.error("Could not save repository configuration: {}", e.getMessage(), e);
throw new RepositoryAdminException("Could not save repository configuration: " + e.getMessage());
}
// Save the repository configuration.
RepositorySession repositorySession = getRepositorySessionFactory().createSession();
try {
if (resetStats) {
log.debug("call repositoryStatisticsManager.deleteStatistics");
getRepositoryStatisticsManager().deleteStatistics(repositorySession.getRepository(), managedRepository.getId());
repositorySession.save();
}
} catch (MetadataRepositoryException e) {
throw new RepositoryAdminException(e.getMessage(), e);
} finally {
repositorySession.close();
}
if (updateIndexContext) {
try {
repositoryRegistry.resetIndexingContext(newRepo);
} catch (IndexUpdateFailedException e) {
e.printStackTrace();
}
}
return true;
}
use of org.apache.archiva.indexer.IndexUpdateFailedException in project archiva by apache.
the class ArchivaIndexManagerMock method pack.
@Override
public void pack(final ArchivaIndexingContext context) throws IndexUpdateFailedException {
executeUpdateFunction(context, indexingContext -> {
try {
IndexPackingRequest request = new IndexPackingRequest(indexingContext, indexingContext.acquireIndexSearcher().getIndexReader(), indexingContext.getIndexDirectoryFile());
indexPacker.packIndex(request);
indexingContext.updateTimestamp(true);
} catch (IOException e) {
log.error("IOException while packing index of context " + context.getId() + (StringUtils.isNotEmpty(e.getMessage()) ? ": " + e.getMessage() : ""));
throw new IndexUpdateFailedException("IOException during update of " + context.getId(), e);
}
});
}
use of org.apache.archiva.indexer.IndexUpdateFailedException in project archiva by apache.
the class ArchivaIndexManagerMock method removeArtifactsFromIndex.
@Override
public void removeArtifactsFromIndex(ArchivaIndexingContext context, Collection<URI> artifactReference) throws IndexUpdateFailedException {
final URI ctxUri = context.getPath();
executeUpdateFunction(context, indexingContext -> {
Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.resolve(r)).toFile())).collect(Collectors.toList());
try {
indexer.deleteArtifactsFromIndex(artifacts, indexingContext);
} catch (IOException e) {
log.error("IOException while removing artifact {}", e.getMessage(), e);
throw new IndexUpdateFailedException("Error occured while removing artifact from index of " + context.getId() + (StringUtils.isNotEmpty(e.getMessage()) ? ": " + e.getMessage() : ""));
}
});
}
Aggregations