use of org.apache.maven.index.context.IndexingContext in project archiva by apache.
the class ArchivaIndexingTaskExecutor method executeTask.
/**
* depending on current {@link Task} you have.
* If {@link org.apache.archiva.scheduler.indexing.ArtifactIndexingTask.Action#FINISH} && isExecuteOnEntireRepo:
* repository will be scanned.
*
* @param task
* @throws TaskExecutionException
*/
@Override
public void executeTask(Task task) throws TaskExecutionException {
ArtifactIndexingTask indexingTask = (ArtifactIndexingTask) task;
ManagedRepository repository = indexingTask.getRepository();
ArchivaIndexingContext archivaContext = indexingTask.getContext();
IndexingContext context = null;
try {
context = archivaContext.getBaseContext(IndexingContext.class);
} catch (UnsupportedBaseContextException e) {
throw new TaskExecutionException("Bad repository type.", e);
}
if (ArtifactIndexingTask.Action.FINISH.equals(indexingTask.getAction()) && indexingTask.isExecuteOnEntireRepo()) {
long start = System.currentTimeMillis();
try {
context.updateTimestamp();
DefaultScannerListener listener = new DefaultScannerListener(context, indexerEngine, true, null);
ScanningRequest request = new ScanningRequest(context, listener);
ScanningResult result = scanner.scan(request);
if (result.hasExceptions()) {
log.error("Exceptions occured during index scan of " + context.getId());
result.getExceptions().stream().map(e -> e.getMessage()).distinct().limit(5).forEach(s -> log.error("Message: " + s));
}
} catch (IOException e) {
log.error("Error during context scan {}: {}", context.getId(), context.getIndexDirectory());
}
long end = System.currentTimeMillis();
log.info("indexed maven repository: {}, onlyUpdate: {}, time {} ms", repository.getId(), indexingTask.isOnlyUpdate(), (end - start));
log.debug("Finishing indexing task on repo: {}", repository.getId());
finishIndexingTask(indexingTask, repository, context);
} else {
// create context if not a repo scan request
if (!indexingTask.isExecuteOnEntireRepo()) {
try {
//
log.debug(//
"Creating indexing context on resource: {}", (indexingTask.getResourceFile() == null ? "none" : indexingTask.getResourceFile()));
archivaContext = repository.getIndexingContext();
context = archivaContext.getBaseContext(IndexingContext.class);
} catch (UnsupportedBaseContextException e) {
log.error("Error occurred while creating context: {}", e.getMessage());
throw new TaskExecutionException("Error occurred while creating context: " + e.getMessage(), e);
}
}
if (context == null || context.getIndexDirectory() == null) {
throw new TaskExecutionException("Trying to index an artifact but the context is already closed");
}
try {
Path artifactFile = indexingTask.getResourceFile();
if (artifactFile == null) {
log.debug("no artifact pass in indexing task so skip it");
} else {
ArtifactContext ac = artifactContextProducer.getArtifactContext(context, artifactFile.toFile());
if (ac != null) {
// TODO make that configurable?
if (artifactFile.getFileName().toString().endsWith(".pom")) {
ac.getArtifactInfo().setFileExtension("pom");
ac.getArtifactInfo().setPackaging("pom");
ac.getArtifactInfo().setClassifier("pom");
}
if (indexingTask.getAction().equals(ArtifactIndexingTask.Action.ADD)) {
// IndexSearcher s = context.getIndexSearcher();
// String uinfo = ac.getArtifactInfo().getUinfo();
// TopDocs d = s.search( new TermQuery( new Term( ArtifactInfo.UINFO, uinfo ) ), 1 );
BooleanQuery.Builder qb = new BooleanQuery.Builder();
qb.add(indexer.constructQuery(MAVEN.GROUP_ID, new SourcedSearchExpression(ac.getArtifactInfo().getGroupId())), BooleanClause.Occur.MUST);
qb.add(indexer.constructQuery(MAVEN.ARTIFACT_ID, new SourcedSearchExpression(ac.getArtifactInfo().getArtifactId())), BooleanClause.Occur.MUST);
qb.add(indexer.constructQuery(MAVEN.VERSION, new SourcedSearchExpression(ac.getArtifactInfo().getVersion())), BooleanClause.Occur.MUST);
if (ac.getArtifactInfo().getClassifier() != null) {
qb.add(indexer.constructQuery(MAVEN.CLASSIFIER, new SourcedSearchExpression(ac.getArtifactInfo().getClassifier())), BooleanClause.Occur.MUST);
}
if (ac.getArtifactInfo().getPackaging() != null) {
qb.add(indexer.constructQuery(MAVEN.PACKAGING, new SourcedSearchExpression(ac.getArtifactInfo().getPackaging())), BooleanClause.Occur.MUST);
}
FlatSearchRequest flatSearchRequest = new FlatSearchRequest(qb.build(), context);
FlatSearchResponse flatSearchResponse = indexer.searchFlat(flatSearchRequest);
if (flatSearchResponse.getResults().isEmpty()) {
log.debug("Adding artifact '{}' to index..", ac.getArtifactInfo());
indexerEngine.index(context, ac);
} else {
log.debug("Updating artifact '{}' in index..", ac.getArtifactInfo());
// TODO check if update exists !!
indexerEngine.update(context, ac);
}
context.updateTimestamp();
context.commit();
} else {
log.debug("Removing artifact '{}' from index..", ac.getArtifactInfo());
indexerEngine.remove(context, ac);
}
}
}
// close the context if not a repo scan request
if (!indexingTask.isExecuteOnEntireRepo()) {
log.debug("Finishing indexing task on resource file : {}", indexingTask.getResourceFile() != null ? indexingTask.getResourceFile() : " none ");
finishIndexingTask(indexingTask, repository, context);
}
} catch (IOException e) {
log.error("Error occurred while executing indexing task '{}': {}", indexingTask, e.getMessage(), e);
throw new TaskExecutionException("Error occurred while executing indexing task '" + indexingTask + "'", e);
}
}
}
use of org.apache.maven.index.context.IndexingContext in project archiva by apache.
the class DefaultDownloadRemoteIndexScheduler method startup.
@PostConstruct
public void startup() throws DownloadRemoteIndexException, UnsupportedBaseContextException {
archivaConfiguration.addListener(this);
for (org.apache.archiva.repository.RemoteRepository remoteRepository : repositoryRegistry.getRemoteRepositories()) {
String contextKey = "remote-" + remoteRepository.getId();
IndexingContext context = remoteRepository.getIndexingContext().getBaseContext(IndexingContext.class);
if (context == null) {
continue;
}
RemoteIndexFeature rif = remoteRepository.getFeature(RemoteIndexFeature.class).get();
// TODO record jobs from configuration
if (rif.isDownloadRemoteIndex() && StringUtils.isNotEmpty(remoteRepository.getSchedulingDefinition())) {
boolean fullDownload = context.getIndexDirectoryFile().list().length == 0;
scheduleDownloadRemote(remoteRepository.getId(), false, fullDownload);
}
}
}
use of org.apache.maven.index.context.IndexingContext in project archiva by apache.
the class NexusIndexerConsumer method beginScan.
@Override
public void beginScan(ManagedRepository repository, Date whenGathered) throws ConsumerException {
this.repository = repository;
managedRepository = PathUtil.getPathFromUri(repository.getLocation());
try {
log.info("Creating indexing context for repo : {}", repository.getId());
if (repository.getType() == RepositoryType.MAVEN) {
indexingContext = repository.getIndexingContext().getBaseContext(IndexingContext.class);
} else {
indexingContext = null;
}
} catch (UnsupportedBaseContextException e) {
log.error("Bad repository type. Not nexus indexer compatible.");
throw new ConsumerException("Bad repository type " + repository.getType());
}
}
use of org.apache.maven.index.context.IndexingContext in project archiva by apache.
the class ArchivaDavResourceFactory method buildMergedIndexDirectory.
protected Path buildMergedIndexDirectory(List<String> repositories, String activePrincipal, DavServletRequest request, RepositoryGroupConfiguration repositoryGroupConfiguration) throws DavException {
try {
HttpSession session = request.getSession();
@SuppressWarnings("unchecked") Map<String, TemporaryGroupIndex> temporaryGroupIndexMap = (Map<String, TemporaryGroupIndex>) session.getAttribute(TemporaryGroupIndexSessionCleaner.TEMPORARY_INDEX_SESSION_KEY);
if (temporaryGroupIndexMap == null) {
temporaryGroupIndexMap = new HashMap<>();
}
TemporaryGroupIndex tmp = temporaryGroupIndexMap.get(repositoryGroupConfiguration.getId());
if (tmp != null && tmp.getDirectory() != null && Files.exists(tmp.getDirectory())) {
if (System.currentTimeMillis() - tmp.getCreationTime() > (repositoryGroupConfiguration.getMergedIndexTtl() * 60 * 1000)) {
log.debug(MarkerFactory.getMarker("group.merged.index"), "tmp group index '{}' is too old so delete it", repositoryGroupConfiguration.getId());
indexMerger.cleanTemporaryGroupIndex(tmp);
} else {
log.debug(MarkerFactory.getMarker("group.merged.index"), "merged index for group '{}' found in cache", repositoryGroupConfiguration.getId());
return tmp.getDirectory();
}
}
Set<String> authzRepos = new HashSet<String>();
String permission = WebdavMethodUtil.getMethodPermission(request.getMethod());
for (String repository : repositories) {
try {
if (servletAuth.isAuthorized(activePrincipal, repository, permission)) {
authzRepos.add(repository);
authzRepos.addAll(this.repositorySearch.getRemoteIndexingContextIds(repository));
}
} catch (UnauthorizedException e) {
// TODO: review exception handling
log.debug("Skipping repository '{}' for user '{}': {}", repository, activePrincipal, e.getMessage());
}
}
log.info("generate temporary merged index for repository group '{}' for repositories '{}'", repositoryGroupConfiguration.getId(), authzRepos);
Path tempRepoFile = Files.createTempDirectory("temp");
tempRepoFile.toFile().deleteOnExit();
IndexMergerRequest indexMergerRequest = new IndexMergerRequest(authzRepos, true, repositoryGroupConfiguration.getId(), repositoryGroupConfiguration.getMergedIndexPath(), repositoryGroupConfiguration.getMergedIndexTtl()).mergedIndexDirectory(tempRepoFile).temporary(true);
MergedRemoteIndexesTaskRequest taskRequest = new MergedRemoteIndexesTaskRequest(indexMergerRequest, indexMerger);
MergedRemoteIndexesTask job = new MergedRemoteIndexesTask(taskRequest);
IndexingContext indexingContext = job.execute().getIndexingContext();
Path mergedRepoDir = indexingContext.getIndexDirectoryFile().toPath();
TemporaryGroupIndex temporaryGroupIndex = new TemporaryGroupIndex(mergedRepoDir, indexingContext.getId(), repositoryGroupConfiguration.getId(), //
repositoryGroupConfiguration.getMergedIndexTtl()).setCreationTime(new Date().getTime());
temporaryGroupIndexMap.put(repositoryGroupConfiguration.getId(), temporaryGroupIndex);
session.setAttribute(TemporaryGroupIndexSessionCleaner.TEMPORARY_INDEX_SESSION_KEY, temporaryGroupIndexMap);
return mergedRepoDir;
} catch (RepositorySearchException e) {
throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e);
} catch (IndexMergerException e) {
throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e);
} catch (IOException e) {
throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e);
}
}
use of org.apache.maven.index.context.IndexingContext in project archiva by apache.
the class ArchivaDavResourceFactory method createResource.
@Override
public DavResource createResource(final DavResourceLocator locator, final DavServletRequest request, final DavServletResponse response) throws DavException {
ArchivaDavResourceLocator archivaLocator = checkLocatorIsInstanceOfRepositoryLocator(locator);
RepositoryGroupConfiguration repoGroupConfig = archivaConfiguration.getConfiguration().getRepositoryGroupsAsMap().get(archivaLocator.getRepositoryId());
String activePrincipal = getActivePrincipal(request);
List<String> resourcesInAbsolutePath = new ArrayList<>();
boolean readMethod = WebdavMethodUtil.isReadMethod(request.getMethod());
DavResource resource;
if (repoGroupConfig != null) {
if (!readMethod) {
throw new DavException(HttpServletResponse.SC_METHOD_NOT_ALLOWED, "Write method not allowed for repository groups.");
}
log.debug("Repository group '{}' accessed by '{}", repoGroupConfig.getId(), activePrincipal);
// handle browse requests for virtual repos
if (getLogicalResource(archivaLocator, null, true).endsWith("/")) {
DavResource davResource = getResourceFromGroup(request, repoGroupConfig.getRepositories(), archivaLocator, repoGroupConfig);
setHeaders(response, locator, davResource, true);
return davResource;
} else {
// make a copy to avoid potential concurrent modifications (eg. by configuration)
// TODO: ultimately, locking might be more efficient than copying in this fashion since updates are
// infrequent
List<String> repositories = new ArrayList<>(repoGroupConfig.getRepositories());
resource = processRepositoryGroup(request, archivaLocator, repositories, activePrincipal, resourcesInAbsolutePath, repoGroupConfig);
}
} else {
try {
RemoteRepository remoteRepository = remoteRepositoryAdmin.getRemoteRepository(archivaLocator.getRepositoryId());
if (remoteRepository != null) {
String logicalResource = getLogicalResource(archivaLocator, null, false);
IndexingContext indexingContext = remoteRepositoryAdmin.createIndexContext(remoteRepository);
Path resourceFile = StringUtils.equals(logicalResource, "/") ? Paths.get(indexingContext.getIndexDirectoryFile().getParent()) : Paths.get(indexingContext.getIndexDirectoryFile().getParent(), logicalResource);
resource = new //
ArchivaDavResource(//
resourceFile.toAbsolutePath().toString(), //
locator.getResourcePath(), //
null, //
request.getRemoteAddr(), //
activePrincipal, //
request.getDavSession(), //
archivaLocator, //
this, //
mimeTypes, //
auditListeners, //
scheduler, fileLockManager);
setHeaders(response, locator, resource, false);
return resource;
}
} catch (RepositoryAdminException e) {
log.debug("RepositoryException remote repository with d'{}' not found, msg: {}", archivaLocator.getRepositoryId(), e.getMessage());
}
ManagedRepository repo = repositoryRegistry.getManagedRepository(archivaLocator.getRepositoryId());
if (repo == null) {
throw new DavException(HttpServletResponse.SC_NOT_FOUND, "Invalid repository: " + archivaLocator.getRepositoryId());
}
ManagedRepositoryContent managedRepositoryContent = repo.getContent();
if (managedRepositoryContent == null) {
log.error("Inconsistency detected. Repository content not found for '{}'", archivaLocator.getRepositoryId());
throw new DavException(HttpServletResponse.SC_NOT_FOUND, "Invalid repository: " + archivaLocator.getRepositoryId());
}
log.debug("Managed repository '{}' accessed by '{}'", managedRepositoryContent.getId(), activePrincipal);
resource = processRepository(request, archivaLocator, activePrincipal, managedRepositoryContent, repo);
String logicalResource = getLogicalResource(archivaLocator, null, false);
resourcesInAbsolutePath.add(Paths.get(managedRepositoryContent.getRepoRoot(), logicalResource).toAbsolutePath().toString());
}
String requestedResource = request.getRequestURI();
// merge metadata only when requested via the repo group
if ((repositoryRequest.isMetadata(requestedResource) || repositoryRequest.isMetadataSupportFile(requestedResource)) && repoGroupConfig != null) {
// this should only be at the project level not version level!
if (isProjectReference(requestedResource)) {
ArchivaDavResource res = (ArchivaDavResource) resource;
String filePath = StringUtils.substringBeforeLast(res.getLocalResource().toAbsolutePath().toString().replace('\\', '/'), "/");
filePath = filePath + "/maven-metadata-" + repoGroupConfig.getId() + ".xml";
// for MRM-872 handle checksums of the merged metadata files
if (repositoryRequest.isSupportFile(requestedResource)) {
Path metadataChecksum = Paths.get(filePath + "." + StringUtils.substringAfterLast(requestedResource, "."));
if (Files.exists(metadataChecksum)) {
LogicalResource logicalResource = new LogicalResource(getLogicalResource(archivaLocator, null, false));
resource = new ArchivaDavResource(metadataChecksum.toAbsolutePath().toString(), logicalResource.getPath(), null, request.getRemoteAddr(), activePrincipal, request.getDavSession(), archivaLocator, this, mimeTypes, auditListeners, scheduler, fileLockManager);
}
} else {
if (resourcesInAbsolutePath != null && resourcesInAbsolutePath.size() > 1) {
// merge the metadata of all repos under group
ArchivaRepositoryMetadata mergedMetadata = new ArchivaRepositoryMetadata();
for (String resourceAbsPath : resourcesInAbsolutePath) {
try {
Path metadataFile = Paths.get(resourceAbsPath);
ArchivaRepositoryMetadata repoMetadata = MavenMetadataReader.read(metadataFile);
mergedMetadata = RepositoryMetadataMerge.merge(mergedMetadata, repoMetadata);
} catch (XMLException e) {
throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Error occurred while reading metadata file.");
} catch (RepositoryMetadataException r) {
throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Error occurred while merging metadata file.");
}
}
try {
Path resourceFile = writeMergedMetadataToFile(mergedMetadata, filePath);
LogicalResource logicalResource = new LogicalResource(getLogicalResource(archivaLocator, null, false));
resource = new ArchivaDavResource(resourceFile.toAbsolutePath().toString(), logicalResource.getPath(), null, request.getRemoteAddr(), activePrincipal, request.getDavSession(), archivaLocator, this, mimeTypes, auditListeners, scheduler, fileLockManager);
} catch (RepositoryMetadataException r) {
throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Error occurred while writing metadata file.");
} catch (IOException ie) {
throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Error occurred while generating checksum files.");
} catch (DigesterException de) {
throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Error occurred while generating checksum files." + de.getMessage());
}
}
}
}
}
setHeaders(response, locator, resource, false);
// compatibility with MRM-440 to ensure browsing the repository works ok
if (resource.isCollection() && !request.getRequestURI().endsWith("/")) {
throw new BrowserRedirectException(resource.getHref());
}
resource.addLockManager(lockManager);
return resource;
}
Aggregations