use of com.github.zhenwei.core.asn1.x509.Attribute in project geonetwork by georchestra.
the class Harvester method createDIFMetadata.
// ---------------------------------------------------------------------------
/**
* Process one dataset by extracting its metadata, writing to DIF
* and using xslt to transform to the required ISO format.
*
* @param ds the dataset to be processed
*/
private void createDIFMetadata(InvDataset ds) {
try {
// add coordinate systems if not DIF relaxed
boolean addCoordSys = false;
// --- TODO: Thredds has a metadata converter interface and some other
// --- methods of handling metadata (including XML of different
// --- namespaces) in the catalog - this is a place holder for getting
// --- this info in future
List<InvMetadata> mds = ds.getMetadata();
log.info("Dataset has " + mds.size() + " metadata elements");
for (InvMetadata md : mds) {
log.info("Found metadata " + md.toString());
}
// --- check and see whether this dataset is DIF writeable
DIFWriter difWriter = new DIFWriter();
StringBuffer sBuff = new StringBuffer();
Element dif = null;
if (difWriter.isDatasetUseable(ds, sBuff)) {
log.info("Yay! Dataset has DIF compatible metadata " + sBuff.toString());
dif = difWriter.writeOneEntry(ds, sBuff);
} else {
log.info("Dataset does not have DIF compatible metadata so we will write a relaxed DIF entry\n" + sBuff.toString());
dif = difWriter.writeOneRelaxedEntry(ds, sBuff);
addCoordSys = true;
}
// --- get the UUID assigned to the DIF record
String uuid = dif.getChild("Entry_ID", difNS).getText();
boolean isCollection = ds.hasNestedDatasets();
log.info("Dataset is a collection dataset? " + isCollection);
// --- now convert DIF entry into an ISO entry using the appropriate
// --- difToIso converter (only schemas with a DIF converter are
// --- supplied to the user for choice)
Element md = null;
if (isCollection) {
String difToIsoStyleSheet = schemaMan.getSchemaDir(params.outputSchemaOnCollectionsDIF) + Geonet.Path.DIF_STYLESHEETS + "/DIFToISO.xsl";
log.info("Transforming collection dataset to " + params.outputSchemaOnCollectionsDIF);
md = Xml.transform(dif, difToIsoStyleSheet);
} else {
String difToIsoStyleSheet = schemaMan.getSchemaDir(params.outputSchemaOnAtomicsDIF) + Geonet.Path.DIF_STYLESHEETS + "/DIFToISO.xsl";
log.info("Transforming atomic dataset to " + params.outputSchemaOnAtomicsDIF);
md = Xml.transform(dif, difToIsoStyleSheet);
}
// --- create a netcdfInfo for addition to the ISO record
if (addCoordSys) {
boolean globalAttributes = false;
if (!isCollection) {
// open up atomic dataset for info
log.info("Opening dataset to get global attributes");
// --- open and check global attributes for metadata conventions
try {
NetcdfDataset ncD = NetcdfDataset.openDataset("thredds:" + ds.getCatalogUrl());
Attribute mdCon = ncD.findGlobalAttributeIgnoreCase("metadata_conventions");
if (mdCon != null) {
List<Attribute> ga = ncD.getGlobalAttributes();
for (Attribute att : ga) {
if (log.isDebugEnabled())
log.debug("Attribute found " + att.toString());
// --- TODO: Attach the attributes to the metadata node
// --- for conversion into the ISO record by an xslt
}
} else {
if (log.isDebugEnabled())
log.debug("No global attribute with metadata conventions found");
}
ncD.close();
} catch (Exception e) {
log.info("Exception raised in netcdfDataset ops: " + e);
e.printStackTrace();
}
}
// --- if no metadata conventions then find the coordinate systems
// --- and add these to the appropriate place in whatever ISO or ISO
// --- profile we are using - MCP: mcp:dataParameters & gmd:keywords,
// --- ISO: gmd:keywords
boolean foundNetcdfInfo = false;
if (!globalAttributes && !isCollection) {
log.info("No global attributes describing metadata so opening dataset to get coordinate systems");
try {
NetcdfDatasetInfo ncDI = new NetcdfDatasetInfo("thredds:" + ds.getCatalogUrl());
log.info("Coordinate systems builder is " + ncDI.getConventionUsed());
if (!ncDI.getConventionUsed().equals("None")) {
Document doc = ncDI.makeDocument();
Element coords = doc.detachRootElement();
log.info("Coordinate systems of dataset are: \n" + Xml.getString(coords));
setCoordsStyleSheet(isCollection);
addKeywordsAndDataParams(coords, md);
foundNetcdfInfo = true;
} else {
if (log.isDebugEnabled())
log.debug("Coordinate system convention is not recognized");
}
ncDI.close();
} catch (Exception e) {
log.info("Exception raised in netcdfDatasetInfo ops: " + e);
e.printStackTrace();
}
}
// --- or atomic
if (!globalAttributes && !foundNetcdfInfo) {
// --- get ThreddsMetadata.Variables and create a netcdfDatasetInfo
// --- document if possible
List<ThreddsMetadata.Variables> vsL = ds.getVariables();
if (vsL != null && vsL.size() > 0) {
for (ThreddsMetadata.Variables vs : vsL) {
String vHref = vs.getVocabHref();
URI vUri = vs.getVocabUri();
String vocab = vs.getVocabulary();
Element coords = new Element("netcdfDatasetInfo");
for (ThreddsMetadata.Variable v : vs.getVariableList()) {
Element varX = new Element("variable");
varX.setAttribute("name", v.getName());
varX.setAttribute("decl", v.getDescription());
varX.setAttribute("units", v.getUnits());
// - these three attributes are new but then there is no
// - xsd for this so we can add as we want!
varX.setAttribute("vocab", vocab);
varX.setAttribute("vocaburi", vUri.toString());
varX.setAttribute("vocabhref", vHref);
coords.addContent(varX);
}
log.info("Coordinate systems from ThreddsMetadata are: \n" + Xml.getString(coords));
setCoordsStyleSheet(isCollection);
addKeywordsAndDataParams(coords, md);
}
}
}
}
// --- write metadata
saveMetadata(md, uuid, getUri(ds));
// --- update totals
if (isCollection) {
result.collectionDatasetRecords++;
} else {
result.atomicDatasetRecords++;
}
} catch (Exception e) {
log.error("Thrown Exception " + e + " during dataset processing");
e.printStackTrace();
}
}
use of com.github.zhenwei.core.asn1.x509.Attribute in project PGM by PGMDev.
the class FilterParser method parseFiltersProperty.
/**
* Return a list containing any and all of the following: - A filter reference in an attribute of
* the given name - Inline filters inside child tags of the given name
*/
public List<Filter> parseFiltersProperty(Element el, String name) throws InvalidXMLException {
List<Filter> filters = new ArrayList<>();
Attribute attr = el.getAttribute(name);
if (attr != null) {
filters.add(this.parseReference(new Node(attr)));
}
for (Element elFilter : el.getChildren(name)) {
filters.addAll(this.parseChildren(elFilter));
}
return filters;
}
use of com.github.zhenwei.core.asn1.x509.Attribute in project PGM by PGMDev.
the class RegionParser method parseTranslate.
@MethodParser("translate")
public TranslatedRegion parseTranslate(Element el) throws InvalidXMLException {
Attribute offsetAttribute = el.getAttribute("offset");
if (offsetAttribute == null) {
throw new InvalidXMLException("Translate region must have an offset", el);
}
Vector offset = XMLUtils.parseVector(offsetAttribute);
return new TranslatedRegion(this.parseChildren(el), offset);
}
use of com.github.zhenwei.core.asn1.x509.Attribute in project PGM by PGMDev.
the class RegionParser method parseRegionProperty.
@Nullable
public Region parseRegionProperty(Element rootElement, @Nullable FeatureValidation<RegionDefinition> validation, Region def, String... names) throws InvalidXMLException {
Attribute propertyAttribute = null;
Element propertyElement = null;
for (String name : names) {
if (rootElement.getAttribute(name) != null && rootElement.getChild(name) != null) {
throw new InvalidXMLException("Multiple defined region properties for " + name, rootElement);
}
if ((rootElement.getAttribute(name) != null || rootElement.getChild(name) != null) && (propertyAttribute != null || propertyElement != null)) {
throw new InvalidXMLException("Multiple defined region properties for " + Arrays.toString(names), rootElement);
}
if (rootElement.getAttribute(name) != null) {
propertyAttribute = rootElement.getAttribute(name);
} else if (rootElement.getChild(name) != null) {
propertyElement = rootElement.getChild(name);
}
}
Region region = def;
Node node = null;
if (propertyAttribute != null) {
region = this.parseReference(propertyAttribute);
node = new Node(propertyAttribute);
} else if (propertyElement != null) {
region = this.parseChildren(propertyElement);
node = new Node(propertyElement);
}
if (region != null && validation != null) {
validate(region, validation, node);
}
return region;
}
use of com.github.zhenwei.core.asn1.x509.Attribute in project PGM by PGMDev.
the class RegionFilterApplicationParser method parse.
public void parse(Element el) throws InvalidXMLException {
Region region = parseRegion(el);
Component message = XMLUtils.parseFormattedText(el, "message");
boolean earlyWarning = XMLUtils.parseBoolean(el.getAttribute("early-warning"), false);
Filter effectFilter = filterParser.parseFilterProperty(el, "filter");
Kit kit = factory.getKits().parseKitProperty(el, "kit");
if (kit != null) {
add(el, new RegionFilterApplication(RFAScope.EFFECT, region, effectFilter, kit, false));
}
kit = factory.getKits().parseKitProperty(el, "lend-kit");
if (kit != null) {
add(el, new RegionFilterApplication(RFAScope.EFFECT, region, effectFilter, kit, true));
}
Attribute attrVelocity = el.getAttribute("velocity");
if (attrVelocity != null) {
// Legacy support
String velocityText = attrVelocity.getValue();
if (velocityText.charAt(0) == '@')
velocityText = velocityText.substring(1);
Vector velocity = XMLUtils.parseVector(attrVelocity, velocityText);
add(el, new RegionFilterApplication(RFAScope.EFFECT, region, effectFilter, velocity));
}
for (String tag : RFAScope.byTag.keySet()) {
Filter filter;
if (useId()) {
filter = filterParser.parseFilterProperty(el, tag);
} else {
// Legacy syntax allows a list of filter names in the attribute
Node node = Node.fromAttr(el, tag);
if (node == null) {
filter = null;
} else {
List<Filter> filters = new ArrayList<>();
for (String name : Splitter.on(" ").split(node.getValue())) {
filters.add(filterParser.parseReference(node, name));
}
switch(filters.size()) {
case 0:
filter = null;
break;
case 1:
filter = filters.get(0);
break;
default:
filter = new FilterNode(filters, Collections.<Filter>emptyList(), Collections.<Filter>emptyList());
}
}
}
if (filter != null) {
for (RFAScope scope : RFAScope.byTag.get(tag)) {
add(el, new RegionFilterApplication(scope, region, filter, message, earlyWarning));
}
}
}
}
Aggregations