use of org.apache.archiva.metadata.model.MetadataFacetFactory in project archiva by apache.
the class FileMetadataRepository method getArtifacts.
@Override
public Collection<ArtifactMetadata> getArtifacts(String repoId, String namespace, String projectId, String projectVersion) throws MetadataResolutionException {
try {
Map<String, ArtifactMetadata> artifacts = new HashMap<>();
Path directory = getDirectory(repoId).resolve(namespace + "/" + projectId + "/" + projectVersion);
Properties properties = readOrCreateProperties(directory, PROJECT_VERSION_METADATA_KEY);
for (Map.Entry entry : properties.entrySet()) {
String name = (String) entry.getKey();
StringTokenizer tok = new StringTokenizer(name, ":");
if (tok.hasMoreTokens() && "artifact".equals(tok.nextToken())) {
String field = tok.nextToken();
String id = tok.nextToken();
ArtifactMetadata artifact = artifacts.get(id);
if (artifact == null) {
artifact = new ArtifactMetadata();
artifact.setRepositoryId(repoId);
artifact.setNamespace(namespace);
artifact.setProject(projectId);
artifact.setProjectVersion(projectVersion);
artifact.setVersion(projectVersion);
artifact.setId(id);
artifacts.put(id, artifact);
}
String value = (String) entry.getValue();
if ("updated".equals(field)) {
artifact.setFileLastModified(Long.parseLong(value));
} else if ("size".equals(field)) {
artifact.setSize(Long.valueOf(value));
} else if ("whenGathered".equals(field)) {
artifact.setWhenGathered(new Date(Long.parseLong(value)));
} else if ("version".equals(field)) {
artifact.setVersion(value);
} else if ("md5".equals(field)) {
artifact.setMd5(value);
} else if ("sha1".equals(field)) {
artifact.setSha1(value);
} else if ("facetIds".equals(field)) {
if (value.length() > 0) {
String propertyPrefix = "artifact:facet:" + id + ":";
for (String facetId : value.split(",")) {
MetadataFacetFactory factory = metadataFacetFactories.get(facetId);
if (factory == null) {
log.error("Attempted to load unknown artifact metadata facet: {}", facetId);
} else {
MetadataFacet facet = factory.createMetadataFacet();
String prefix = propertyPrefix + facet.getFacetId();
Map<String, String> map = new HashMap<>();
for (Object key : new ArrayList<>(properties.keySet())) {
String property = (String) key;
if (property.startsWith(prefix)) {
map.put(property.substring(prefix.length() + 1), properties.getProperty(property));
}
}
facet.fromProperties(map);
artifact.addFacet(facet);
}
}
}
updateArtifactFacets(artifact, properties);
}
}
}
return artifacts.values();
} catch (IOException e) {
throw new MetadataResolutionException(e.getMessage(), e);
}
}
use of org.apache.archiva.metadata.model.MetadataFacetFactory in project archiva by apache.
the class FileMetadataRepository method getMetadataFacet.
@Override
public MetadataFacet getMetadataFacet(String repositoryId, String facetId, String name) {
Properties properties;
try {
properties = readProperties(getMetadataDirectory(repositoryId, facetId).resolve(name), METADATA_KEY);
} catch (NoSuchFileException | FileNotFoundException e) {
return null;
} catch (IOException e) {
log.error("Could not read properties from {}, {}: {}", repositoryId, facetId, e.getMessage(), e);
return null;
}
MetadataFacet metadataFacet = null;
MetadataFacetFactory metadataFacetFactory = metadataFacetFactories.get(facetId);
if (metadataFacetFactory != null) {
metadataFacet = metadataFacetFactory.createMetadataFacet(repositoryId, name);
Map<String, String> map = new HashMap<>();
for (Object key : new ArrayList<>(properties.keySet())) {
String property = (String) key;
map.put(property, properties.getProperty(property));
}
metadataFacet.fromProperties(map);
}
return metadataFacet;
}
use of org.apache.archiva.metadata.model.MetadataFacetFactory in project archiva by apache.
the class FileMetadataRepositoryTest method setUp.
@Before
@Override
public void setUp() throws Exception {
super.setUp();
Path directory = Paths.get("target/test-repositories");
if (Files.exists(directory)) {
org.apache.archiva.common.utils.FileUtils.deleteDirectory(directory);
}
ArchivaConfiguration config = createTestConfiguration(directory);
Map<String, MetadataFacetFactory> factories = createTestMetadataFacetFactories();
this.repository = new FileMetadataRepository(factories, config);
}
use of org.apache.archiva.metadata.model.MetadataFacetFactory in project archiva by apache.
the class JcrMetadataRepositoryTest method setUp.
@Before
@Override
public void setUp() throws Exception {
super.setUp();
Map<String, MetadataFacetFactory> factories = createTestMetadataFacetFactories();
// TODO: probably don't need to use Spring for this
jcrMetadataRepository = new JcrMetadataRepository(factories, jcrRepository);
try {
Session session = jcrMetadataRepository.getJcrSession();
// set up namespaces, etc.
JcrMetadataRepository.initialize(session);
// removing content is faster than deleting and re-copying the files from target/jcr
session.getRootNode().getNode("repositories").remove();
session.save();
} catch (RepositoryException e) {
// ignore
}
this.repository = jcrMetadataRepository;
}
use of org.apache.archiva.metadata.model.MetadataFacetFactory in project archiva by apache.
the class CassandraMetadataRepository method mapArtifactMetadataToArtifact.
/**
* Attach metadata to each of the ArtifactMetadata objects
*/
private List<ArtifactMetadata> mapArtifactMetadataToArtifact(QueryResult<OrderedRows<String, String, String>> result, List<ArtifactMetadata> artifactMetadatas) {
if (result.get() == null || result.get().getCount() < 1) {
return artifactMetadatas;
}
final List<MetadataFacetModel> metadataFacetModels = new ArrayList<>(result.get().getCount());
for (Row<String, String, String> row : result.get()) {
ColumnSlice<String, String> columnSlice = row.getColumnSlice();
MetadataFacetModel metadataFacetModel = new MetadataFacetModel();
metadataFacetModel.setFacetId(getStringValue(columnSlice, FACET_ID.toString()));
metadataFacetModel.setName(getStringValue(columnSlice, NAME.toString()));
metadataFacetModel.setValue(getStringValue(columnSlice, VALUE.toString()));
metadataFacetModel.setKey(getStringValue(columnSlice, KEY.toString()));
metadataFacetModel.setProjectVersion(getStringValue(columnSlice, PROJECT_VERSION.toString()));
metadataFacetModels.add(metadataFacetModel);
}
for (final ArtifactMetadata artifactMetadata : artifactMetadatas) {
Iterable<MetadataFacetModel> metadataFacetModelIterable = Iterables.filter(metadataFacetModels, new Predicate<MetadataFacetModel>() {
@Override
public boolean apply(MetadataFacetModel metadataFacetModel) {
if (metadataFacetModel != null) {
return StringUtils.equals(artifactMetadata.getVersion(), metadataFacetModel.getProjectVersion());
}
return false;
}
});
Iterator<MetadataFacetModel> iterator = metadataFacetModelIterable.iterator();
Map<String, List<MetadataFacetModel>> metadataFacetValuesPerFacetId = new HashMap<>();
while (iterator.hasNext()) {
MetadataFacetModel metadataFacetModel = iterator.next();
List<MetadataFacetModel> values = metadataFacetValuesPerFacetId.get(metadataFacetModel.getName());
if (values == null) {
values = new ArrayList<>();
metadataFacetValuesPerFacetId.put(metadataFacetModel.getFacetId(), values);
}
values.add(metadataFacetModel);
}
for (Map.Entry<String, List<MetadataFacetModel>> entry : metadataFacetValuesPerFacetId.entrySet()) {
MetadataFacetFactory metadataFacetFactory = metadataFacetFactories.get(entry.getKey());
if (metadataFacetFactory != null) {
List<MetadataFacetModel> facetModels = entry.getValue();
if (!facetModels.isEmpty()) {
MetadataFacet metadataFacet = metadataFacetFactory.createMetadataFacet();
Map<String, String> props = new HashMap<>(facetModels.size());
for (MetadataFacetModel metadataFacetModel : facetModels) {
props.put(metadataFacetModel.getKey(), metadataFacetModel.getValue());
}
metadataFacet.fromProperties(props);
artifactMetadata.addFacet(metadataFacet);
}
}
}
}
return artifactMetadatas;
}
Aggregations