use of org.apache.archiva.metadata.repository.stats.model.RepositoryStatistics in project archiva by apache.
the class TestRepositoryStatisticsManager method addStatisticsAfterScan.
@Override
public void addStatisticsAfterScan(MetadataRepository metadataRepository, String repositoryId, Date startTime, Date endTime, long totalFiles, long newFiles) {
List<RepositoryStatistics> stats = getStatsList(repositoryId);
DefaultRepositoryStatistics repositoryStatistics = new DefaultRepositoryStatistics();
repositoryStatistics.setScanStartTime(startTime);
repositoryStatistics.setScanEndTime(endTime);
repositoryStatistics.setNewFileCount(newFiles);
repositoryStatistics.setTotalFileCount(totalFiles);
repositoryStatistics.setRepositoryId(repositoryId);
stats.add(repositoryStatistics);
}
use of org.apache.archiva.metadata.repository.stats.model.RepositoryStatistics in project archiva by apache.
the class ArchivaRepositoryScanningTaskExecutor method executeTask.
@SuppressWarnings("unchecked")
@Override
public void executeTask(RepositoryTask task) throws TaskExecutionException {
try {
// TODO: replace this whole class with the prescribed content scanning service/action
// - scan repository for artifacts that do not have corresponding metadata or have been updated and
// send events for each
// - scan metadata for artifacts that have been removed and send events for each
// - scan metadata for missing plugin data
// - store information so that it can restart upon failure (publish event on the server recovery
// queue, remove it on successful completion)
this.task = task;
String repoId = task.getRepositoryId();
if (StringUtils.isBlank(repoId)) {
throw new TaskExecutionException("Unable to execute RepositoryTask with blank repository Id.");
}
ManagedRepository arepo = repositoryRegistry.getManagedRepository(repoId);
// execute consumers on resource file if set
if (task.getResourceFile() != null) {
log.debug("Executing task from queue with job name: {}", task);
consumers.executeConsumers(arepo, task.getResourceFile(), task.isUpdateRelatedArtifacts());
} else {
log.info("Executing task from queue with job name: {}", task);
// otherwise, execute consumers on whole repository
if (arepo == null) {
throw new TaskExecutionException("Unable to execute RepositoryTask with invalid repository id: " + repoId);
}
long sinceWhen = RepositoryScanner.FRESH_SCAN;
long previousFileCount = 0;
RepositorySession repositorySession = repositorySessionFactory.createSession();
MetadataRepository metadataRepository = repositorySession.getRepository();
try {
if (!task.isScanAll()) {
RepositoryStatistics previousStats = repositoryStatisticsManager.getLastStatistics(metadataRepository, repoId);
if (previousStats != null) {
sinceWhen = previousStats.getScanStartTime().getTime();
previousFileCount = previousStats.getTotalFileCount();
}
}
RepositoryScanStatistics stats;
try {
stats = repoScanner.scan(arepo, sinceWhen);
} catch (RepositoryScannerException e) {
throw new TaskExecutionException("Repository error when executing repository job.", e);
}
log.info("Finished first scan: {}", stats.toDump(arepo));
// further statistics will be populated by the following method
Date endTime = new Date(stats.getWhenGathered().getTime() + stats.getDuration());
log.info("Gathering repository statistics");
repositoryStatisticsManager.addStatisticsAfterScan(metadataRepository, repoId, stats.getWhenGathered(), endTime, stats.getTotalFileCount(), stats.getTotalFileCount() - previousFileCount);
repositorySession.save();
} catch (MetadataRepositoryException e) {
throw new TaskExecutionException("Unable to store updated statistics: " + e.getMessage(), e);
} finally {
repositorySession.close();
}
// log.info( "Scanning for removed repository content" );
// metadataRepository.findAllProjects();
// FIXME: do something
log.info("Finished repository task: {}", task);
this.task = null;
}
} catch (RepositoryAdminException e) {
log.error(e.getMessage(), e);
throw new TaskExecutionException(e.getMessage(), e);
}
}
use of org.apache.archiva.metadata.repository.stats.model.RepositoryStatistics in project archiva by apache.
the class ArchivaRepositoryScanningTaskExecutorPhase2Test method testExecutorScanOnlyNewArtifactsMidScan.
@Test
public void testExecutorScanOnlyNewArtifactsMidScan() throws Exception {
RepositoryTask repoTask = new RepositoryTask();
repoTask.setRepositoryId(TEST_REPO_ID);
repoTask.setScanAll(false);
createAndSaveTestStats();
Path newArtifactGroup = repoDir.resolve("org/apache/archiva");
assertFalse("newArtifactGroup should not exist.", Files.exists(newArtifactGroup));
FileUtils.copyDirectoryStructure(Paths.get("target/test-classes/test-repo/org/apache/archiva").toFile(), newArtifactGroup.toFile());
// update last modified date, placing in middle of last scan
Files.setLastModifiedTime(newArtifactGroup.resolve("archiva-index-methods-jar-test/1.0/pom.xml"), FileTime.fromMillis(Calendar.getInstance().getTimeInMillis() - 50000));
Files.setLastModifiedTime(newArtifactGroup.resolve("archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar"), FileTime.fromMillis(Calendar.getInstance().getTimeInMillis() - 50000));
assertTrue(Files.exists(newArtifactGroup));
// scan using the really long previous duration
taskExecutor.executeTask(repoTask);
// check no artifacts processed
Collection<ArtifactReference> unprocessedResultList = testConsumer.getConsumed();
assertNotNull(unprocessedResultList);
assertEquals("Incorrect number of unprocessed artifacts detected. One new artifact should have been found.", 1, unprocessedResultList.size());
// check correctness of new stats
RepositoryStatistics newStats = repositoryStatisticsManager.getLastStatistics(metadataRepository, TEST_REPO_ID);
assertEquals(2, newStats.getNewFileCount());
assertEquals(33, newStats.getTotalFileCount());
// FIXME: can't test these as they weren't stored in the database, move to tests for RepositoryStatisticsManager implementation
// assertEquals( 8, newStats.getTotalArtifactCount() );
// assertEquals( 3, newStats.getTotalGroupCount() );
// assertEquals( 5, newStats.getTotalProjectCount() );
// assertEquals( 19301, newStats.getTotalArtifactFileSize() );
}
use of org.apache.archiva.metadata.repository.stats.model.RepositoryStatistics in project archiva by apache.
the class DefaultRepositoryStatisticsManager method getStatisticsInRange.
@Override
public List<RepositoryStatistics> getStatisticsInRange(MetadataRepository metadataRepository, String repositoryId, Date startTime, Date endTime) throws MetadataRepositoryException {
List<RepositoryStatistics> results = new ArrayList<>();
List<String> list = metadataRepository.getMetadataFacets(repositoryId, DefaultRepositoryStatistics.FACET_ID);
Collections.sort(list, Collections.reverseOrder());
for (String name : list) {
try {
Date date = createNameFormat().parse(name);
if ((startTime == null || !date.before(startTime)) && (endTime == null || !date.after(endTime))) {
RepositoryStatistics stats = (RepositoryStatistics) metadataRepository.getMetadataFacet(repositoryId, DefaultRepositoryStatistics.FACET_ID, name);
results.add(stats);
}
} catch (ParseException e) {
log.error("Invalid scan result found in the metadata repository: {}", e.getMessage());
// continue and ignore this one
}
}
return results;
}
use of org.apache.archiva.metadata.repository.stats.model.RepositoryStatistics in project archiva by apache.
the class DefaultRepositoryStatisticsManager method getLastStatistics.
@Override
public RepositoryStatistics getLastStatistics(MetadataRepository metadataRepository, String repositoryId) throws MetadataRepositoryException {
StopWatch stopWatch = new StopWatch();
stopWatch.start();
// TODO: consider a more efficient implementation that directly gets the last one from the content repository
List<String> scans = metadataRepository.getMetadataFacets(repositoryId, DefaultRepositoryStatistics.FACET_ID);
if (scans == null) {
return null;
}
Collections.sort(scans);
if (!scans.isEmpty()) {
String name = scans.get(scans.size() - 1);
RepositoryStatistics repositoryStatistics = RepositoryStatistics.class.cast(metadataRepository.getMetadataFacet(repositoryId, RepositoryStatistics.FACET_ID, name));
stopWatch.stop();
log.debug("time to find last RepositoryStatistics: {} ms", stopWatch.getTime());
return repositoryStatistics;
} else {
return null;
}
}
Aggregations