use of org.apache.archiva.redback.components.taskqueue.execution.TaskExecutionException in project archiva by apache.
the class ArchivaIndexingTaskExecutor method executeTask.
/**
* depending on current {@link Task} you have.
* If {@link org.apache.archiva.scheduler.indexing.ArtifactIndexingTask.Action#FINISH} && isExecuteOnEntireRepo:
* repository will be scanned.
*
* @param task
* @throws TaskExecutionException
*/
@Override
public void executeTask(Task task) throws TaskExecutionException {
ArtifactIndexingTask indexingTask = (ArtifactIndexingTask) task;
ManagedRepository repository = indexingTask.getRepository();
ArchivaIndexingContext archivaContext = indexingTask.getContext();
IndexingContext context = null;
try {
context = archivaContext.getBaseContext(IndexingContext.class);
} catch (UnsupportedBaseContextException e) {
throw new TaskExecutionException("Bad repository type.", e);
}
if (ArtifactIndexingTask.Action.FINISH.equals(indexingTask.getAction()) && indexingTask.isExecuteOnEntireRepo()) {
long start = System.currentTimeMillis();
try {
context.updateTimestamp();
DefaultScannerListener listener = new DefaultScannerListener(context, indexerEngine, true, null);
ScanningRequest request = new ScanningRequest(context, listener);
ScanningResult result = scanner.scan(request);
if (result.hasExceptions()) {
log.error("Exceptions occured during index scan of " + context.getId());
result.getExceptions().stream().map(e -> e.getMessage()).distinct().limit(5).forEach(s -> log.error("Message: " + s));
}
} catch (IOException e) {
log.error("Error during context scan {}: {}", context.getId(), context.getIndexDirectory());
}
long end = System.currentTimeMillis();
log.info("indexed maven repository: {}, onlyUpdate: {}, time {} ms", repository.getId(), indexingTask.isOnlyUpdate(), (end - start));
log.debug("Finishing indexing task on repo: {}", repository.getId());
finishIndexingTask(indexingTask, repository, context);
} else {
// create context if not a repo scan request
if (!indexingTask.isExecuteOnEntireRepo()) {
try {
//
log.debug(//
"Creating indexing context on resource: {}", (indexingTask.getResourceFile() == null ? "none" : indexingTask.getResourceFile()));
archivaContext = repository.getIndexingContext();
context = archivaContext.getBaseContext(IndexingContext.class);
} catch (UnsupportedBaseContextException e) {
log.error("Error occurred while creating context: {}", e.getMessage());
throw new TaskExecutionException("Error occurred while creating context: " + e.getMessage(), e);
}
}
if (context == null || context.getIndexDirectory() == null) {
throw new TaskExecutionException("Trying to index an artifact but the context is already closed");
}
try {
Path artifactFile = indexingTask.getResourceFile();
if (artifactFile == null) {
log.debug("no artifact pass in indexing task so skip it");
} else {
ArtifactContext ac = artifactContextProducer.getArtifactContext(context, artifactFile.toFile());
if (ac != null) {
// TODO make that configurable?
if (artifactFile.getFileName().toString().endsWith(".pom")) {
ac.getArtifactInfo().setFileExtension("pom");
ac.getArtifactInfo().setPackaging("pom");
ac.getArtifactInfo().setClassifier("pom");
}
if (indexingTask.getAction().equals(ArtifactIndexingTask.Action.ADD)) {
// IndexSearcher s = context.getIndexSearcher();
// String uinfo = ac.getArtifactInfo().getUinfo();
// TopDocs d = s.search( new TermQuery( new Term( ArtifactInfo.UINFO, uinfo ) ), 1 );
BooleanQuery.Builder qb = new BooleanQuery.Builder();
qb.add(indexer.constructQuery(MAVEN.GROUP_ID, new SourcedSearchExpression(ac.getArtifactInfo().getGroupId())), BooleanClause.Occur.MUST);
qb.add(indexer.constructQuery(MAVEN.ARTIFACT_ID, new SourcedSearchExpression(ac.getArtifactInfo().getArtifactId())), BooleanClause.Occur.MUST);
qb.add(indexer.constructQuery(MAVEN.VERSION, new SourcedSearchExpression(ac.getArtifactInfo().getVersion())), BooleanClause.Occur.MUST);
if (ac.getArtifactInfo().getClassifier() != null) {
qb.add(indexer.constructQuery(MAVEN.CLASSIFIER, new SourcedSearchExpression(ac.getArtifactInfo().getClassifier())), BooleanClause.Occur.MUST);
}
if (ac.getArtifactInfo().getPackaging() != null) {
qb.add(indexer.constructQuery(MAVEN.PACKAGING, new SourcedSearchExpression(ac.getArtifactInfo().getPackaging())), BooleanClause.Occur.MUST);
}
FlatSearchRequest flatSearchRequest = new FlatSearchRequest(qb.build(), context);
FlatSearchResponse flatSearchResponse = indexer.searchFlat(flatSearchRequest);
if (flatSearchResponse.getResults().isEmpty()) {
log.debug("Adding artifact '{}' to index..", ac.getArtifactInfo());
indexerEngine.index(context, ac);
} else {
log.debug("Updating artifact '{}' in index..", ac.getArtifactInfo());
// TODO check if update exists !!
indexerEngine.update(context, ac);
}
context.updateTimestamp();
context.commit();
} else {
log.debug("Removing artifact '{}' from index..", ac.getArtifactInfo());
indexerEngine.remove(context, ac);
}
}
}
// close the context if not a repo scan request
if (!indexingTask.isExecuteOnEntireRepo()) {
log.debug("Finishing indexing task on resource file : {}", indexingTask.getResourceFile() != null ? indexingTask.getResourceFile() : " none ");
finishIndexingTask(indexingTask, repository, context);
}
} catch (IOException e) {
log.error("Error occurred while executing indexing task '{}': {}", indexingTask, e.getMessage(), e);
throw new TaskExecutionException("Error occurred while executing indexing task '" + indexingTask + "'", e);
}
}
}
use of org.apache.archiva.redback.components.taskqueue.execution.TaskExecutionException in project archiva by apache.
the class ArchivaIndexingTaskExecutor method finishIndexingTask.
private void finishIndexingTask(ArtifactIndexingTask indexingTask, ManagedRepository repository, IndexingContext context) throws TaskExecutionException {
try {
log.debug("Finishing indexing");
context.optimize();
if (repository.supportsFeature(IndexCreationFeature.class)) {
IndexCreationFeature icf = repository.getFeature(IndexCreationFeature.class).get();
if (!icf.isSkipPackedIndexCreation() && icf.getLocalPackedIndexPath() != null) {
log.debug("Creating packed index from {} on {}", context.getIndexDirectoryFile(), icf.getLocalPackedIndexPath());
IndexPackingRequest request = new //
IndexPackingRequest(//
context, context.acquireIndexSearcher().getIndexReader(), icf.getLocalPackedIndexPath().toFile());
indexPacker.packIndex(request);
context.updateTimestamp(true);
log.debug("Index file packed at '{}'.", icf.getLocalPackedIndexPath());
} else {
log.debug("skip packed index creation");
}
} else {
log.debug("skip packed index creation");
}
} catch (IOException e) {
log.error("Error occurred while executing indexing task '{}': {}", indexingTask, e.getMessage());
throw new TaskExecutionException("Error occurred while executing indexing task '" + indexingTask + "'", e);
}
}
use of org.apache.archiva.redback.components.taskqueue.execution.TaskExecutionException in project archiva by apache.
the class ArchivaRepositoryScanningTaskExecutor method executeTask.
@SuppressWarnings("unchecked")
@Override
public void executeTask(RepositoryTask task) throws TaskExecutionException {
try {
// TODO: replace this whole class with the prescribed content scanning service/action
// - scan repository for artifacts that do not have corresponding metadata or have been updated and
// send events for each
// - scan metadata for artifacts that have been removed and send events for each
// - scan metadata for missing plugin data
// - store information so that it can restart upon failure (publish event on the server recovery
// queue, remove it on successful completion)
this.task = task;
String repoId = task.getRepositoryId();
if (StringUtils.isBlank(repoId)) {
throw new TaskExecutionException("Unable to execute RepositoryTask with blank repository Id.");
}
ManagedRepository arepo = repositoryRegistry.getManagedRepository(repoId);
// execute consumers on resource file if set
if (task.getResourceFile() != null) {
log.debug("Executing task from queue with job name: {}", task);
consumers.executeConsumers(arepo, task.getResourceFile(), task.isUpdateRelatedArtifacts());
} else {
log.info("Executing task from queue with job name: {}", task);
// otherwise, execute consumers on whole repository
if (arepo == null) {
throw new TaskExecutionException("Unable to execute RepositoryTask with invalid repository id: " + repoId);
}
long sinceWhen = RepositoryScanner.FRESH_SCAN;
long previousFileCount = 0;
RepositorySession repositorySession = repositorySessionFactory.createSession();
MetadataRepository metadataRepository = repositorySession.getRepository();
try {
if (!task.isScanAll()) {
RepositoryStatistics previousStats = repositoryStatisticsManager.getLastStatistics(metadataRepository, repoId);
if (previousStats != null) {
sinceWhen = previousStats.getScanStartTime().getTime();
previousFileCount = previousStats.getTotalFileCount();
}
}
RepositoryScanStatistics stats;
try {
stats = repoScanner.scan(arepo, sinceWhen);
} catch (RepositoryScannerException e) {
throw new TaskExecutionException("Repository error when executing repository job.", e);
}
log.info("Finished first scan: {}", stats.toDump(arepo));
// further statistics will be populated by the following method
Date endTime = new Date(stats.getWhenGathered().getTime() + stats.getDuration());
log.info("Gathering repository statistics");
repositoryStatisticsManager.addStatisticsAfterScan(metadataRepository, repoId, stats.getWhenGathered(), endTime, stats.getTotalFileCount(), stats.getTotalFileCount() - previousFileCount);
repositorySession.save();
} catch (MetadataRepositoryException e) {
throw new TaskExecutionException("Unable to store updated statistics: " + e.getMessage(), e);
} finally {
repositorySession.close();
}
// log.info( "Scanning for removed repository content" );
// metadataRepository.findAllProjects();
// FIXME: do something
log.info("Finished repository task: {}", task);
this.task = null;
}
} catch (RepositoryAdminException e) {
log.error(e.getMessage(), e);
throw new TaskExecutionException(e.getMessage(), e);
}
}
Aggregations