use of ca.nrc.cadc.caom2.Provenance in project caom2db by opencadc.
the class ObservationDAO method deleteChildren.
@Override
protected void deleteChildren(Skeleton s, JdbcTemplate jdbc) {
ObservationSkeleton o = (ObservationSkeleton) s;
if (o.planes.size() > 0) {
// delete children of planes
for (PlaneSkeleton p : o.planes) {
planeDAO.deleteChildren(p, jdbc);
// delete planes by PK so we also clean up provenance join table
EntityDelete op = gen.getEntityDelete(Plane.class, true);
op.setID(p.id);
op.execute(jdbc);
}
} else {
log.debug("no children: " + o.id);
}
}
use of ca.nrc.cadc.caom2.Provenance in project caom2db by opencadc.
the class AbstractObservationDAOTest method getTestPlane.
protected Plane getTestPlane(boolean full, String productID, int depth, boolean poly) throws Exception {
Plane p = new Plane(productID);
if (full) {
p.metaProducer = URI.create("test:plane/roundrip-1.0");
p.creatorID = URI.create("ivo://example.com/TEST?" + productID);
p.calibrationLevel = CalibrationLevel.CALIBRATED;
p.dataProductType = DataProductType.IMAGE;
p.metaRelease = TEST_DATE;
p.dataRelease = TEST_DATE;
p.provenance = new Provenance("doit");
p.provenance.lastExecuted = TEST_DATE;
p.provenance.producer = "MyProducer";
p.provenance.project = "MyProject";
p.provenance.reference = new URI("http://www.example.com/MyProject/doit");
p.provenance.runID = "RUNID123";
p.provenance.version = "0.1alpha4";
p.provenance.getKeywords().addAll(TEST_KEYWORDS);
p.provenance.getInputs().add(new PlaneURI(new ObservationURI("FOO", "bar"), "in1"));
p.provenance.getInputs().add(new PlaneURI(new ObservationURI("FOO", "bar"), "in2"));
p.metrics = new Metrics();
p.metrics.sourceNumberDensity = 100.0;
p.metrics.background = 2.7;
p.metrics.backgroundStddev = 0.3;
p.metrics.fluxDensityLimit = 1.0e-5;
p.metrics.magLimit = 28.5;
p.metrics.sampleSNR = 11.0;
p.quality = new DataQuality(Quality.JUNK);
// previously was computed metadata
p.energy = new Energy();
p.energy.bandpassName = "V";
p.energy.bounds = new SampledInterval(400e-6, 900e-6);
p.energy.bounds.getSamples().add(new Interval(400e-6, 500e-6));
p.energy.bounds.getSamples().add(new Interval(800e-6, 900e-6));
p.energy.dimension = 2L;
p.energy.getEnergyBands().add(EnergyBand.OPTICAL);
p.energy.resolvingPower = 2.0;
p.energy.resolvingPowerBounds = new Interval(1.8, 2.2);
p.energy.restwav = 600e-9;
p.energy.sampleSize = 100e-6;
p.energy.transition = new EnergyTransition("H", "alpha");
p.polarization = new Polarization();
p.polarization.dimension = 3L;
p.polarization.states = new TreeSet<>();
p.polarization.states.add(PolarizationState.I);
p.polarization.states.add(PolarizationState.Q);
p.polarization.states.add(PolarizationState.U);
p.position = new Position();
if (poly) {
MultiPolygon mp = new MultiPolygon();
mp.getVertices().add(new Vertex(2.0, 2.0, SegmentType.MOVE));
mp.getVertices().add(new Vertex(1.0, 4.0, SegmentType.LINE));
mp.getVertices().add(new Vertex(3.0, 3.0, SegmentType.LINE));
mp.getVertices().add(new Vertex(0.0, 0.0, SegmentType.CLOSE));
List<Point> points = new ArrayList<Point>();
for (Vertex v : mp.getVertices()) {
if (!SegmentType.CLOSE.equals(v.getType())) {
points.add(new Point(v.cval1, v.cval2));
}
}
p.position.bounds = new Polygon(points, mp);
} else {
p.position.bounds = new Circle(new Point(0.0, 89.0), 2.0);
}
p.position.dimension = new Dimension2D(1024, 2048);
p.position.resolution = 0.05;
p.position.resolutionBounds = new Interval(0.04, 0.06);
p.position.sampleSize = 0.025;
p.position.timeDependent = false;
p.time = new Time();
p.time.bounds = new SampledInterval(50000.25, 50000.75);
p.time.bounds.getSamples().add(new Interval(50000.25, 50000.40));
p.time.bounds.getSamples().add(new Interval(50000.50, 50000.75));
p.time.dimension = 2L;
p.time.exposure = 666.0;
p.time.resolution = 0.5;
p.time.resolutionBounds = new Interval(0.22, 0.88);
p.time.sampleSize = 0.15;
p.custom = new CustomAxis("FDEP");
p.custom.bounds = new SampledInterval(100.0, 200.0);
p.custom.bounds.getSamples().add(new Interval(100.0, 140.0));
p.custom.bounds.getSamples().add(new Interval(160.0, 200.0));
p.custom.bounds.validate();
p.custom.dimension = 1024L;
p.observable = new Observable("phot.flux");
p.getMetaReadGroups().add(URI.create("ivo://example.net/gms?GroupA"));
p.getMetaReadGroups().add(URI.create("ivo://example.net/gms?GroupB"));
p.getDataReadGroups().add(URI.create("ivo://example.net/gms?GroupC"));
p.getDataReadGroups().add(URI.create("ivo://example.net/gms?GroupD"));
}
if (depth <= 2)
return p;
p.getArtifacts().add(getTestArtifact(full, new URI("http://www.example.com/stuff/" + productID + "a"), depth));
p.getArtifacts().add(getTestArtifact(full, new URI("http://www.example.com/stuff/" + productID + "b"), depth));
Assert.assertEquals(2, p.getArtifacts().size());
return p;
}
use of ca.nrc.cadc.caom2.Provenance in project geonetwork-microservices by geonetwork.
the class DcatConverter method convert.
/**
* Convert an index document into a DCAT object.
*/
public CatalogRecord convert(JsonNode doc) {
CatalogRecord catalogRecord = null;
Dataset dcatDataset = null;
try {
IndexRecord record = new ObjectMapper().enable(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY).readValue(doc.get(IndexRecordFieldNames.source).toString(), IndexRecord.class);
String recordIdentifier = record.getMetadataIdentifier();
String recordUri = formatterConfiguration.buildLandingPageLink(record.getMetadataIdentifier());
Optional<ResourceIdentifier> resourceIdentifier = record.getResourceIdentifier().stream().filter(Objects::nonNull).findFirst();
// TODO: Define strategy to build IRI
final String resourceIdentifierUri = resourceIdentifier.isPresent() ? "local:" + resourceIdentifier.get().getCode() : null;
String language = record.getMainLanguage() == null ? defaultLanguage : record.getMainLanguage();
String languageUpperCase = language.toUpperCase();
// TODO: Need language mapper
String iso2letterLanguage = language.substring(0, 2);
List<String> resourceLanguage = record.getResourceLanguage();
List<String> resourceType = record.getResourceType();
boolean isInspireResource = resourceType.contains("dataset") || resourceType.contains("series") || resourceType.contains("service");
// TODO: Add multilingual support
// TODO .resource("https://creativecommons.org/publicdomain/zero/1.0/deed")
DatasetBuilder datasetBuilder = Dataset.builder().identifier(record.getResourceIdentifier().stream().map(c -> c.getCode()).collect(Collectors.toList())).title(listOfNullable(record.getResourceTitle().get(defaultText))).description(listOfNullable(record.getResourceAbstract().get(defaultText))).landingPage(listOfNullable(DcatDocument.builder().foafDocument(FoafDocument.builder().about(formatterConfiguration.buildLandingPageLink(record.getMetadataIdentifier())).title(record.getResourceTitle().get(defaultText)).build()).build())).provenance(record.getResourceLineage().stream().map(l -> Provenance.builder().provenanceStatement(ProvenanceStatement.builder().label(l.get(defaultText)).build()).build()).collect(Collectors.toList())).type(record.getResourceType().stream().map(t -> new RdfResource(null, "dcat:" + RESSOURCE_TYPE_MAPPING.get(t), null)).collect(Collectors.toList())).modified(toDate(record.getChangeDate())).theme(record.getCodelists().get(topic).stream().map(t -> Subject.builder().skosConcept(SkosConcept.builder().prefLabel(t.getProperties().get(defaultText)).build()).build()).collect(Collectors.toList())).theme(record.getTag().stream().map(t -> Subject.builder().skosConcept(SkosConcept.builder().prefLabel(t.get(defaultText)).build()).build()).collect(Collectors.toList()));
record.getResourceDate().stream().filter(d -> "creation".equals(d.getType())).forEach(d -> datasetBuilder.created(toDate(d.getDate())));
record.getResourceDate().stream().filter(d -> "publication".equals(d.getType())).forEach(d -> datasetBuilder.issued(toDate(d.getDate())));
record.getResourceDate().stream().filter(d -> "revision".equals(d.getType())).forEach(d -> datasetBuilder.modified(toDate(d.getDate())));
// TODO: Convert to meter ?
datasetBuilder.spatialResolutionInMeters(record.getResolutionScaleDenominator().stream().map(BigDecimal::new).collect(Collectors.toList()));
// INSPIRE
if (record.getSpecificationConformance().size() > 0) {
datasetBuilder.wasUsedBy(record.getSpecificationConformance().stream().map(c -> DcatActivity.builder().activity(// https://github.com/SEMICeu/iso-19139-to-dcat-ap/blob/master/iso-19139-to-dcat-ap.xsl#L837-L840
ProvActivity.builder().used(new RdfResource(null, resourceIdentifierUri, null)).qualifiedAssociation(ProvQualifiedAssociation.builder().hadPlan(ProvHadPlan.builder().wasDerivedFrom(new RdfResource("Resource", null, null, c.getTitle(), null)).build()).build()).generated(ProvGenerated.builder().type(new RdfResource("http://inspire.ec.europa.eu/metadata-codelist/DegreeOfConformity/" + INSPIRE_DEGREE_OF_CONFORMITY.get(c.getPass()), null)).build()).build()).build()).collect(Collectors.toList()));
}
if (record.getResourceLanguage() != null) {
// TODO: Where to put resource language ?
datasetBuilder.language(record.getResourceLanguage().stream().map(l -> new RdfResource(null, "http://publications.europa.eu/resource/authority/language/" + l.toUpperCase(), null)).collect(Collectors.toList()));
}
ArrayList<Codelist> updateFrequencyList = record.getCodelists().get(Codelists.maintenanceAndUpdateFrequency);
if (updateFrequencyList != null && updateFrequencyList.size() > 0) {
datasetBuilder.accrualPeriodicity(new RdfResource(null, ACCRUAL_PERIODICITY_URI_PREFIX + ACCRUAL_PERIODICITY_TO_ISO.get(updateFrequencyList.get(0).getProperties().get(CommonField.key)), null));
}
// <dct:spatial rdf:parseType="Resource">
datasetBuilder.spatial(record.getGeometries().stream().map(g -> DctSpatial.builder().location(DctLocation.builder().geometry(g).build()).build()).collect(Collectors.toList()));
datasetBuilder.temporal(record.getResourceTemporalExtentDateRange().stream().map(range -> {
DctPeriodOfTimeBuilder periodOfTime = DctPeriodOfTime.builder();
if (StringUtils.isNotEmpty(range.getGte())) {
periodOfTime.startDate(toDate(range.getGte()));
}
if (StringUtils.isNotEmpty(range.getLte())) {
periodOfTime.endDate(toDate(range.getLte()));
}
return DctTemporal.builder().periodOfTime(periodOfTime.build()).build();
}).collect(Collectors.toList()));
record.getLinks().stream().forEach(link -> {
DcatDistributionBuilder dcatDistributionBuilder = DcatDistribution.builder().title(listOfNullable(link.getName())).description(listOfNullable(link.getDescription())).representationTechnique(Subject.builder().skosConcept(SkosConcept.builder().prefLabel(link.getProtocol()).build()).build());
// TODO: depending on function/protocol build page/accessUrl/downloadUrl
dcatDistributionBuilder.accessUrl(link.getUrl());
datasetBuilder.distribution(listOfNullable(DcatDistributionContainer.builder().distribution(dcatDistributionBuilder.build()).build()));
});
datasetBuilder.contactPoint(record.getContactForResource().stream().map(contact -> DcatContactPoint.builder().contact(VcardContact.builder().title(contact.getOrganisation()).role(contact.getRole()).hasEmail(contact.getEmail()).build()).build()).collect(Collectors.toList()));
dcatDataset = datasetBuilder.build();
catalogRecord = CatalogRecord.builder().identifier(listOfNullable(record.getMetadataIdentifier())).created(toDate(record.getCreateDate())).modified(toDate(record.getChangeDate())).language(listOfNullable(new RdfResource(null, "http://publications.europa.eu/resource/authority/language/" + record.getMainLanguage().toUpperCase()))).primaryTopic(listOfNullable(new ResourceContainer(dcatDataset, null))).build();
} catch (JsonMappingException e) {
e.printStackTrace();
} catch (JsonProcessingException e) {
e.printStackTrace();
}
return catalogRecord;
}
Aggregations