use of org.apache.archiva.repository.ManagedRepository in project archiva by apache.
the class ArchivaDavResourceFactory method processRepositoryGroup.
private DavResource processRepositoryGroup(final DavServletRequest request, ArchivaDavResourceLocator archivaLocator, List<String> repositories, String activePrincipal, List<String> resourcesInAbsolutePath, RepositoryGroupConfiguration repoGroupConfig) throws DavException {
DavResource resource = null;
List<DavException> storedExceptions = new ArrayList<>();
String pathInfo = StringUtils.removeEnd(request.getPathInfo(), "/");
String rootPath = StringUtils.substringBeforeLast(pathInfo, "/");
if (StringUtils.endsWith(rootPath, repoGroupConfig.getMergedIndexPath())) {
// we are in the case of index file request
String requestedFileName = StringUtils.substringAfterLast(pathInfo, "/");
Path temporaryIndexDirectory = buildMergedIndexDirectory(repositories, activePrincipal, request, repoGroupConfig);
Path resourceFile = temporaryIndexDirectory.resolve(requestedFileName);
resource = new ArchivaDavResource(resourceFile.toAbsolutePath().toString(), requestedFileName, null, request.getRemoteAddr(), activePrincipal, request.getDavSession(), archivaLocator, this, mimeTypes, auditListeners, scheduler, fileLockManager);
} else {
for (String repositoryId : repositories) {
ManagedRepositoryContent managedRepositoryContent;
ManagedRepository managedRepository = repositoryRegistry.getManagedRepository(repositoryId);
if (managedRepository == null) {
throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Could not find repository with id " + repositoryId);
}
managedRepositoryContent = managedRepository.getContent();
if (managedRepositoryContent == null) {
log.error("Inconsistency detected. Repository content not found for '{}'", repositoryId);
throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Could not find repository content with id " + repositoryId);
}
try {
DavResource updatedResource = processRepository(request, archivaLocator, activePrincipal, managedRepositoryContent, managedRepository);
if (resource == null) {
resource = updatedResource;
}
String logicalResource = getLogicalResource(archivaLocator, null, false);
if (logicalResource.endsWith("/")) {
logicalResource = logicalResource.substring(1);
}
resourcesInAbsolutePath.add(Paths.get(managedRepositoryContent.getRepoRoot(), logicalResource).toAbsolutePath().toString());
} catch (DavException e) {
storedExceptions.add(e);
}
}
}
if (resource == null) {
if (!storedExceptions.isEmpty()) {
// MRM-1232
for (DavException e : storedExceptions) {
if (401 == e.getErrorCode()) {
throw e;
}
}
throw new DavException(HttpServletResponse.SC_NOT_FOUND);
} else {
throw new DavException(HttpServletResponse.SC_NOT_FOUND);
}
}
return resource;
}
use of org.apache.archiva.repository.ManagedRepository in project archiva by apache.
the class ArchivaIndexingTaskExecutor method executeTask.
/**
* depending on current {@link Task} you have.
* If {@link org.apache.archiva.scheduler.indexing.ArtifactIndexingTask.Action#FINISH} && isExecuteOnEntireRepo:
* repository will be scanned.
*
* @param task
* @throws TaskExecutionException
*/
@Override
public void executeTask(Task task) throws TaskExecutionException {
ArtifactIndexingTask indexingTask = (ArtifactIndexingTask) task;
ManagedRepository repository = indexingTask.getRepository();
ArchivaIndexingContext archivaContext = indexingTask.getContext();
IndexingContext context = null;
try {
context = archivaContext.getBaseContext(IndexingContext.class);
} catch (UnsupportedBaseContextException e) {
throw new TaskExecutionException("Bad repository type.", e);
}
if (ArtifactIndexingTask.Action.FINISH.equals(indexingTask.getAction()) && indexingTask.isExecuteOnEntireRepo()) {
long start = System.currentTimeMillis();
try {
context.updateTimestamp();
DefaultScannerListener listener = new DefaultScannerListener(context, indexerEngine, true, null);
ScanningRequest request = new ScanningRequest(context, listener);
ScanningResult result = scanner.scan(request);
if (result.hasExceptions()) {
log.error("Exceptions occured during index scan of " + context.getId());
result.getExceptions().stream().map(e -> e.getMessage()).distinct().limit(5).forEach(s -> log.error("Message: " + s));
}
} catch (IOException e) {
log.error("Error during context scan {}: {}", context.getId(), context.getIndexDirectory());
}
long end = System.currentTimeMillis();
log.info("indexed maven repository: {}, onlyUpdate: {}, time {} ms", repository.getId(), indexingTask.isOnlyUpdate(), (end - start));
log.debug("Finishing indexing task on repo: {}", repository.getId());
finishIndexingTask(indexingTask, repository, context);
} else {
// create context if not a repo scan request
if (!indexingTask.isExecuteOnEntireRepo()) {
try {
//
log.debug(//
"Creating indexing context on resource: {}", (indexingTask.getResourceFile() == null ? "none" : indexingTask.getResourceFile()));
archivaContext = repository.getIndexingContext();
context = archivaContext.getBaseContext(IndexingContext.class);
} catch (UnsupportedBaseContextException e) {
log.error("Error occurred while creating context: {}", e.getMessage());
throw new TaskExecutionException("Error occurred while creating context: " + e.getMessage(), e);
}
}
if (context == null || context.getIndexDirectory() == null) {
throw new TaskExecutionException("Trying to index an artifact but the context is already closed");
}
try {
Path artifactFile = indexingTask.getResourceFile();
if (artifactFile == null) {
log.debug("no artifact pass in indexing task so skip it");
} else {
ArtifactContext ac = artifactContextProducer.getArtifactContext(context, artifactFile.toFile());
if (ac != null) {
// TODO make that configurable?
if (artifactFile.getFileName().toString().endsWith(".pom")) {
ac.getArtifactInfo().setFileExtension("pom");
ac.getArtifactInfo().setPackaging("pom");
ac.getArtifactInfo().setClassifier("pom");
}
if (indexingTask.getAction().equals(ArtifactIndexingTask.Action.ADD)) {
// IndexSearcher s = context.getIndexSearcher();
// String uinfo = ac.getArtifactInfo().getUinfo();
// TopDocs d = s.search( new TermQuery( new Term( ArtifactInfo.UINFO, uinfo ) ), 1 );
BooleanQuery.Builder qb = new BooleanQuery.Builder();
qb.add(indexer.constructQuery(MAVEN.GROUP_ID, new SourcedSearchExpression(ac.getArtifactInfo().getGroupId())), BooleanClause.Occur.MUST);
qb.add(indexer.constructQuery(MAVEN.ARTIFACT_ID, new SourcedSearchExpression(ac.getArtifactInfo().getArtifactId())), BooleanClause.Occur.MUST);
qb.add(indexer.constructQuery(MAVEN.VERSION, new SourcedSearchExpression(ac.getArtifactInfo().getVersion())), BooleanClause.Occur.MUST);
if (ac.getArtifactInfo().getClassifier() != null) {
qb.add(indexer.constructQuery(MAVEN.CLASSIFIER, new SourcedSearchExpression(ac.getArtifactInfo().getClassifier())), BooleanClause.Occur.MUST);
}
if (ac.getArtifactInfo().getPackaging() != null) {
qb.add(indexer.constructQuery(MAVEN.PACKAGING, new SourcedSearchExpression(ac.getArtifactInfo().getPackaging())), BooleanClause.Occur.MUST);
}
FlatSearchRequest flatSearchRequest = new FlatSearchRequest(qb.build(), context);
FlatSearchResponse flatSearchResponse = indexer.searchFlat(flatSearchRequest);
if (flatSearchResponse.getResults().isEmpty()) {
log.debug("Adding artifact '{}' to index..", ac.getArtifactInfo());
indexerEngine.index(context, ac);
} else {
log.debug("Updating artifact '{}' in index..", ac.getArtifactInfo());
// TODO check if update exists !!
indexerEngine.update(context, ac);
}
context.updateTimestamp();
context.commit();
} else {
log.debug("Removing artifact '{}' from index..", ac.getArtifactInfo());
indexerEngine.remove(context, ac);
}
}
}
// close the context if not a repo scan request
if (!indexingTask.isExecuteOnEntireRepo()) {
log.debug("Finishing indexing task on resource file : {}", indexingTask.getResourceFile() != null ? indexingTask.getResourceFile() : " none ");
finishIndexingTask(indexingTask, repository, context);
}
} catch (IOException e) {
log.error("Error occurred while executing indexing task '{}': {}", indexingTask, e.getMessage(), e);
throw new TaskExecutionException("Error occurred while executing indexing task '" + indexingTask + "'", e);
}
}
}
use of org.apache.archiva.repository.ManagedRepository in project archiva by apache.
the class RepositoryScannerTest method testDefaultRepositoryMetadataScanner.
@Test
public void testDefaultRepositoryMetadataScanner() throws Exception {
List<String> actualMetadataPaths = new ArrayList<>();
actualMetadataPaths.add("org/apache/maven/some-ejb/1.0/maven-metadata.xml");
actualMetadataPaths.add("org/apache/maven/update/test-not-updated/maven-metadata.xml");
actualMetadataPaths.add("org/apache/maven/update/test-updated/maven-metadata.xml");
actualMetadataPaths.add("org/apache/maven/maven-metadata.xml");
actualMetadataPaths.add("org/apache/testgroup/discovery/1.0/maven-metadata.xml");
actualMetadataPaths.add("org/apache/testgroup/discovery/maven-metadata.xml");
actualMetadataPaths.add("javax/sql/jdbc/2.0/maven-metadata-repository.xml");
actualMetadataPaths.add("javax/sql/jdbc/maven-metadata-repository.xml");
actualMetadataPaths.add("javax/sql/maven-metadata-repository.xml");
actualMetadataPaths.add("javax/maven-metadata.xml");
ManagedRepository repository = createDefaultRepository();
List<KnownRepositoryContentConsumer> knownConsumers = new ArrayList<>();
KnownScanConsumer knownConsumer = new KnownScanConsumer();
knownConsumer.setIncludes(new String[] { "**/maven-metadata*.xml" });
knownConsumers.add(knownConsumer);
List<InvalidRepositoryContentConsumer> invalidConsumers = new ArrayList<>();
InvalidScanConsumer badconsumer = new InvalidScanConsumer();
invalidConsumers.add(badconsumer);
RepositoryScanner scanner = lookupRepositoryScanner();
RepositoryScanStatistics stats = scanner.scan(repository, knownConsumers, invalidConsumers, getIgnoreList(), RepositoryScanner.FRESH_SCAN);
assertNotNull("Stats should not be null.", stats);
assertMinimumHits("Stats.totalFileCount", actualMetadataPaths.size(), stats.getTotalFileCount());
assertMinimumHits("Processed Count", actualMetadataPaths.size(), knownConsumer.getProcessCount());
}
use of org.apache.archiva.repository.ManagedRepository in project archiva by apache.
the class RepositoryScannerTest method testLegacyRepositoryArtifactScanner.
@Test
public void testLegacyRepositoryArtifactScanner() throws Exception {
List<String> actualArtifactPaths = new ArrayList<>();
actualArtifactPaths.add("invalid/jars/1.0/invalid-1.0.jar");
actualArtifactPaths.add("invalid/jars/invalid-1.0.rar");
actualArtifactPaths.add("invalid/jars/invalid.jar");
actualArtifactPaths.add("invalid/invalid-1.0.jar");
actualArtifactPaths.add("javax.sql/jars/jdbc-2.0.jar");
actualArtifactPaths.add("org.apache.maven/jars/some-ejb-1.0-client.jar");
actualArtifactPaths.add("org.apache.maven/jars/testing-1.0.jar");
actualArtifactPaths.add("org.apache.maven/jars/testing-1.0-sources.jar");
actualArtifactPaths.add("org.apache.maven/jars/testing-UNKNOWN.jar");
actualArtifactPaths.add("org.apache.maven/jars/testing-1.0.zip");
actualArtifactPaths.add("org.apache.maven/jars/testing-1.0-20050611.112233-1.jar");
actualArtifactPaths.add("org.apache.maven/jars/testing-1.0.tar.gz");
actualArtifactPaths.add("org.apache.maven.update/jars/test-not-updated-1.0.jar");
actualArtifactPaths.add("org.apache.maven.update/jars/test-updated-1.0.jar");
ManagedRepository repository = createLegacyRepository();
List<KnownRepositoryContentConsumer> knownConsumers = new ArrayList<>();
KnownScanConsumer consumer = new KnownScanConsumer();
consumer.setIncludes(ARTIFACT_PATTERNS);
knownConsumers.add(consumer);
List<InvalidRepositoryContentConsumer> invalidConsumers = new ArrayList<>();
InvalidScanConsumer badconsumer = new InvalidScanConsumer();
invalidConsumers.add(badconsumer);
RepositoryScanner scanner = lookupRepositoryScanner();
RepositoryScanStatistics stats = scanner.scan(repository, knownConsumers, invalidConsumers, getIgnoreList(), RepositoryScanner.FRESH_SCAN);
assertNotNull("Stats should not be null.", stats);
assertMinimumHits("Stats.totalFileCount", actualArtifactPaths.size(), stats.getTotalFileCount());
assertMinimumHits("Processed Count", actualArtifactPaths.size(), consumer.getProcessCount());
}
use of org.apache.archiva.repository.ManagedRepository in project archiva by apache.
the class RepositoryScannerTest method testTimestampRepositoryScannerProcessUnmodified.
@Test
public void testTimestampRepositoryScannerProcessUnmodified() throws Exception {
ManagedRepository repository = createSimpleRepository();
List<KnownRepositoryContentConsumer> knownConsumers = new ArrayList<>();
KnownScanConsumer consumer = new KnownScanConsumer();
consumer.setProcessUnmodified(true);
consumer.setIncludes(ARTIFACT_PATTERNS);
knownConsumers.add(consumer);
List<InvalidRepositoryContentConsumer> invalidConsumers = new ArrayList<>();
InvalidScanConsumer badconsumer = new InvalidScanConsumer();
invalidConsumers.add(badconsumer);
RepositoryScanner scanner = lookupRepositoryScanner();
RepositoryScanStatistics stats = scanner.scan(repository, knownConsumers, invalidConsumers, getIgnoreList(), getTimestampAsMillis("20061101.000000"));
assertNotNull("Stats should not be null.", stats);
assertEquals("Stats.totalFileCount", 4, stats.getTotalFileCount());
assertEquals("Stats.newFileCount", 3, stats.getNewFileCount());
assertEquals("Processed Count", 3, consumer.getProcessCount());
assertEquals("Processed Count (of invalid items)", 1, badconsumer.getProcessCount());
}
Aggregations