use of org.apache.archiva.consumers.ConsumerException in project archiva by apache.
the class NexusIndexerConsumer method beginScan.
@Override
public void beginScan(ManagedRepository repository, Date whenGathered) throws ConsumerException {
this.repository = repository;
managedRepository = PathUtil.getPathFromUri(repository.getLocation());
try {
log.info("Creating indexing context for repo : {}", repository.getId());
if (repository.getType() == RepositoryType.MAVEN) {
indexingContext = repository.getIndexingContext().getBaseContext(IndexingContext.class);
} else {
indexingContext = null;
}
} catch (UnsupportedBaseContextException e) {
log.error("Bad repository type. Not nexus indexer compatible.");
throw new ConsumerException("Bad repository type " + repository.getType());
}
}
use of org.apache.archiva.consumers.ConsumerException in project archiva by apache.
the class AutoRenameConsumer method processFile.
@Override
public void processFile(String path) throws ConsumerException {
Path file = this.repositoryDir.resolve(path);
if (Files.exists(file)) {
Iterator<String> itExtensions = this.extensionRenameMap.keySet().iterator();
while (itExtensions.hasNext()) {
String extension = itExtensions.next();
if (path.endsWith(extension)) {
String fixedExtension = this.extensionRenameMap.get(extension);
String correctedPath = path.substring(0, path.length() - extension.length()) + fixedExtension;
Path to = repositoryDir.resolve(correctedPath);
try {
// Rename the file.
FileUtils.moveFile(file.toFile(), to.toFile());
} catch (IOException e) {
log.warn("Unable to rename {} to {} :", path, correctedPath, e);
triggerConsumerWarning(RENAME_FAILURE, "Unable to rename " + path + " to " + correctedPath + ": " + e.getMessage());
}
}
}
log.info("(Auto) Removing File: {} ", file.toAbsolutePath());
triggerConsumerInfo("(Auto) Removing File: " + file.toAbsolutePath());
try {
Files.delete(file);
} catch (IOException e) {
log.error("Could not delete file {}: {}", file, e.getMessage(), e);
throw new ConsumerException("File deletion failed " + file);
}
}
}
use of org.apache.archiva.consumers.ConsumerException in project archiva by apache.
the class AutoRemoveConsumer method processFile.
@Override
public void processFile(String path) throws ConsumerException {
Path file = this.repositoryDir.resolve(path);
if (Files.exists(file)) {
log.info("(Auto) Removing File: {}", file.toAbsolutePath());
triggerConsumerInfo("(Auto) Removing File: " + file.toAbsolutePath());
try {
Files.deleteIfExists(file);
} catch (IOException e) {
log.error("Could not delete file {}: {}", file, e.getMessage(), e);
throw new ConsumerException("Could not delete file " + file);
}
}
}
use of org.apache.archiva.consumers.ConsumerException in project archiva by apache.
the class MetadataUpdaterConsumer method beginScan.
@Override
public void beginScan(ManagedRepository repoConfig, Date whenGathered) throws ConsumerException {
try {
ManagedRepository repo = repositoryRegistry.getManagedRepository(repoConfig.getId());
if (repo == null) {
throw new RepositoryNotFoundException("Repository not found: " + repoConfig.getId());
}
this.repository = repo.getContent();
if (this.repository == null) {
throw new RepositoryNotFoundException("Repository content not found: " + repoConfig.getId());
}
this.repositoryDir = Paths.get(repository.getRepoRoot());
this.scanStartTimestamp = System.currentTimeMillis();
} catch (RepositoryException e) {
throw new ConsumerException(e.getMessage(), e);
}
}
use of org.apache.archiva.consumers.ConsumerException in project archiva by apache.
the class DuplicateArtifactsConsumer method processFile.
@Override
public void processFile(String path) throws ConsumerException {
Path artifactFile = this.repositoryDir.resolve(path);
// TODO: would be quicker to somehow make sure it ran after the update database consumer, or as a part of that
// perhaps could use an artifact context that is retained for all consumers? First in can set the SHA-1
// alternatively this could come straight from the storage resolver, which could populate the artifact metadata
// in the later parse call with the desired checksum and use that
String checksumSha1;
ChecksummedFile checksummedFile = new ChecksummedFile(artifactFile);
try {
checksumSha1 = checksummedFile.calculateChecksum(ChecksumAlgorithm.SHA1);
} catch (IOException e) {
throw new ConsumerException(e.getMessage(), e);
}
MetadataRepository metadataRepository = repositorySession.getRepository();
Collection<ArtifactMetadata> results;
try {
results = metadataRepository.getArtifactsByChecksum(repoId, checksumSha1);
} catch (MetadataRepositoryException e) {
repositorySession.close();
throw new ConsumerException(e.getMessage(), e);
}
if (CollectionUtils.isNotEmpty(results)) {
ArtifactMetadata originalArtifact;
try {
originalArtifact = pathTranslator.getArtifactForPath(repoId, path);
} catch (Exception e) {
log.warn("Not reporting problem for invalid artifact in checksum check: {}", e.getMessage());
return;
}
for (ArtifactMetadata dupArtifact : results) {
String id = path.substring(path.lastIndexOf('/') + 1);
if (dupArtifact.getId().equals(id) && dupArtifact.getNamespace().equals(originalArtifact.getNamespace()) && dupArtifact.getProject().equals(originalArtifact.getProject()) && dupArtifact.getVersion().equals(originalArtifact.getVersion())) {
// Skip reference to itself.
log.debug("Not counting duplicate for artifact {} for path {}", dupArtifact, path);
continue;
}
RepositoryProblemFacet problem = new RepositoryProblemFacet();
problem.setRepositoryId(repoId);
problem.setNamespace(originalArtifact.getNamespace());
problem.setProject(originalArtifact.getProject());
problem.setVersion(originalArtifact.getVersion());
problem.setId(id);
// FIXME: need to get the right storage resolver for the repository the dupe artifact is in, it might be
// a different type
// FIXME: we need the project version here, not the artifact version
problem.setMessage("Duplicate Artifact Detected: " + path + " <--> " + pathTranslator.toPath(dupArtifact.getNamespace(), dupArtifact.getProject(), dupArtifact.getVersion(), dupArtifact.getId()));
problem.setProblem("duplicate-artifact");
try {
metadataRepository.addMetadataFacet(repoId, problem);
} catch (MetadataRepositoryException e) {
throw new ConsumerException(e.getMessage(), e);
}
}
}
}
Aggregations