Search in sources :

Example 76 with Attribute

use of com.github.zhenwei.core.asn1.x509.Attribute in project geonetwork by georchestra.

the class Harvester method createDIFMetadata.

// ---------------------------------------------------------------------------
/**
 * Process one dataset by extracting its metadata, writing to DIF
 * and using xslt to transform to the required ISO format.
 *
 * @param ds     the dataset to be processed
 */
private void createDIFMetadata(InvDataset ds) {
    try {
        // add coordinate systems if not DIF relaxed
        boolean addCoordSys = false;
        // --- TODO: Thredds has a metadata converter interface and some other
        // --- methods of handling metadata (including XML of different
        // --- namespaces) in the catalog - this is a place holder for getting
        // --- this info in future
        List<InvMetadata> mds = ds.getMetadata();
        log.info("Dataset has " + mds.size() + " metadata elements");
        for (InvMetadata md : mds) {
            log.info("Found metadata " + md.toString());
        }
        // --- check and see whether this dataset is DIF writeable
        DIFWriter difWriter = new DIFWriter();
        StringBuffer sBuff = new StringBuffer();
        Element dif = null;
        if (difWriter.isDatasetUseable(ds, sBuff)) {
            log.info("Yay! Dataset has DIF compatible metadata " + sBuff.toString());
            dif = difWriter.writeOneEntry(ds, sBuff);
        } else {
            log.info("Dataset does not have DIF compatible metadata so we will write a relaxed DIF entry\n" + sBuff.toString());
            dif = difWriter.writeOneRelaxedEntry(ds, sBuff);
            addCoordSys = true;
        }
        // --- get the UUID assigned to the DIF record
        String uuid = dif.getChild("Entry_ID", difNS).getText();
        boolean isCollection = ds.hasNestedDatasets();
        log.info("Dataset is a collection dataset? " + isCollection);
        // --- now convert DIF entry into an ISO entry using the appropriate
        // --- difToIso converter (only schemas with a DIF converter are
        // --- supplied to the user for choice)
        Element md = null;
        if (isCollection) {
            String difToIsoStyleSheet = schemaMan.getSchemaDir(params.outputSchemaOnCollectionsDIF) + Geonet.Path.DIF_STYLESHEETS + "/DIFToISO.xsl";
            log.info("Transforming collection dataset to " + params.outputSchemaOnCollectionsDIF);
            md = Xml.transform(dif, difToIsoStyleSheet);
        } else {
            String difToIsoStyleSheet = schemaMan.getSchemaDir(params.outputSchemaOnAtomicsDIF) + Geonet.Path.DIF_STYLESHEETS + "/DIFToISO.xsl";
            log.info("Transforming atomic dataset to " + params.outputSchemaOnAtomicsDIF);
            md = Xml.transform(dif, difToIsoStyleSheet);
        }
        // --- create a netcdfInfo for addition to the ISO record
        if (addCoordSys) {
            boolean globalAttributes = false;
            if (!isCollection) {
                // open up atomic dataset for info
                log.info("Opening dataset to get global attributes");
                // --- open and check global attributes for metadata conventions
                try {
                    NetcdfDataset ncD = NetcdfDataset.openDataset("thredds:" + ds.getCatalogUrl());
                    Attribute mdCon = ncD.findGlobalAttributeIgnoreCase("metadata_conventions");
                    if (mdCon != null) {
                        List<Attribute> ga = ncD.getGlobalAttributes();
                        for (Attribute att : ga) {
                            if (log.isDebugEnabled())
                                log.debug("Attribute found " + att.toString());
                        // --- TODO: Attach the attributes to the metadata node
                        // --- for conversion into the ISO record by an xslt
                        }
                    } else {
                        if (log.isDebugEnabled())
                            log.debug("No global attribute with metadata conventions found");
                    }
                    ncD.close();
                } catch (Exception e) {
                    log.info("Exception raised in netcdfDataset ops: " + e);
                    e.printStackTrace();
                }
            }
            // --- if no metadata conventions then find the coordinate systems
            // --- and add these to the appropriate place in whatever ISO or ISO
            // --- profile we are using - MCP: mcp:dataParameters & gmd:keywords,
            // --- ISO: gmd:keywords
            boolean foundNetcdfInfo = false;
            if (!globalAttributes && !isCollection) {
                log.info("No global attributes describing metadata so opening dataset to get coordinate systems");
                try {
                    NetcdfDatasetInfo ncDI = new NetcdfDatasetInfo("thredds:" + ds.getCatalogUrl());
                    log.info("Coordinate systems builder is " + ncDI.getConventionUsed());
                    if (!ncDI.getConventionUsed().equals("None")) {
                        Document doc = ncDI.makeDocument();
                        Element coords = doc.detachRootElement();
                        log.info("Coordinate systems of dataset are: \n" + Xml.getString(coords));
                        setCoordsStyleSheet(isCollection);
                        addKeywordsAndDataParams(coords, md);
                        foundNetcdfInfo = true;
                    } else {
                        if (log.isDebugEnabled())
                            log.debug("Coordinate system convention is not recognized");
                    }
                    ncDI.close();
                } catch (Exception e) {
                    log.info("Exception raised in netcdfDatasetInfo ops: " + e);
                    e.printStackTrace();
                }
            }
            // --- or atomic
            if (!globalAttributes && !foundNetcdfInfo) {
                // --- get ThreddsMetadata.Variables and create a netcdfDatasetInfo
                // --- document if possible
                List<ThreddsMetadata.Variables> vsL = ds.getVariables();
                if (vsL != null && vsL.size() > 0) {
                    for (ThreddsMetadata.Variables vs : vsL) {
                        String vHref = vs.getVocabHref();
                        URI vUri = vs.getVocabUri();
                        String vocab = vs.getVocabulary();
                        Element coords = new Element("netcdfDatasetInfo");
                        for (ThreddsMetadata.Variable v : vs.getVariableList()) {
                            Element varX = new Element("variable");
                            varX.setAttribute("name", v.getName());
                            varX.setAttribute("decl", v.getDescription());
                            varX.setAttribute("units", v.getUnits());
                            // - these three attributes are new but then there is no
                            // - xsd for this so we can add as we want!
                            varX.setAttribute("vocab", vocab);
                            varX.setAttribute("vocaburi", vUri.toString());
                            varX.setAttribute("vocabhref", vHref);
                            coords.addContent(varX);
                        }
                        log.info("Coordinate systems from ThreddsMetadata are: \n" + Xml.getString(coords));
                        setCoordsStyleSheet(isCollection);
                        addKeywordsAndDataParams(coords, md);
                    }
                }
            }
        }
        // --- write metadata
        saveMetadata(md, uuid, getUri(ds));
        // --- update totals
        if (isCollection) {
            result.collectionDatasetRecords++;
        } else {
            result.atomicDatasetRecords++;
        }
    } catch (Exception e) {
        log.error("Thrown Exception " + e + " during dataset processing");
        e.printStackTrace();
    }
}
Also used : Attribute(ucar.nc2.Attribute) Element(org.jdom.Element) ThreddsMetadata(thredds.catalog.ThreddsMetadata) NetcdfDataset(ucar.nc2.dataset.NetcdfDataset) Document(org.jdom.Document) URI(java.net.URI) SSLHandshakeException(javax.net.ssl.SSLHandshakeException) UnsupportedEncodingException(java.io.UnsupportedEncodingException) DIFWriter(thredds.catalog.dl.DIFWriter) NetcdfDatasetInfo(ucar.nc2.dataset.NetcdfDatasetInfo) InvMetadata(thredds.catalog.InvMetadata)

Example 77 with Attribute

use of com.github.zhenwei.core.asn1.x509.Attribute in project PGM by PGMDev.

the class FilterParser method parseFiltersProperty.

/**
 * Return a list containing any and all of the following: - A filter reference in an attribute of
 * the given name - Inline filters inside child tags of the given name
 */
public List<Filter> parseFiltersProperty(Element el, String name) throws InvalidXMLException {
    List<Filter> filters = new ArrayList<>();
    Attribute attr = el.getAttribute(name);
    if (attr != null) {
        filters.add(this.parseReference(new Node(attr)));
    }
    for (Element elFilter : el.getChildren(name)) {
        filters.addAll(this.parseChildren(elFilter));
    }
    return filters;
}
Also used : Filter(tc.oc.pgm.api.filter.Filter) Attribute(org.jdom2.Attribute) Node(tc.oc.pgm.util.xml.Node) Element(org.jdom2.Element) ArrayList(java.util.ArrayList)

Example 78 with Attribute

use of com.github.zhenwei.core.asn1.x509.Attribute in project PGM by PGMDev.

the class RegionParser method parseTranslate.

@MethodParser("translate")
public TranslatedRegion parseTranslate(Element el) throws InvalidXMLException {
    Attribute offsetAttribute = el.getAttribute("offset");
    if (offsetAttribute == null) {
        throw new InvalidXMLException("Translate region must have an offset", el);
    }
    Vector offset = XMLUtils.parseVector(offsetAttribute);
    return new TranslatedRegion(this.parseChildren(el), offset);
}
Also used : Attribute(org.jdom2.Attribute) InvalidXMLException(tc.oc.pgm.util.xml.InvalidXMLException) Vector(org.bukkit.util.Vector) MethodParser(tc.oc.pgm.util.MethodParser)

Example 79 with Attribute

use of com.github.zhenwei.core.asn1.x509.Attribute in project PGM by PGMDev.

the class RegionParser method parseRegionProperty.

@Nullable
public Region parseRegionProperty(Element rootElement, @Nullable FeatureValidation<RegionDefinition> validation, Region def, String... names) throws InvalidXMLException {
    Attribute propertyAttribute = null;
    Element propertyElement = null;
    for (String name : names) {
        if (rootElement.getAttribute(name) != null && rootElement.getChild(name) != null) {
            throw new InvalidXMLException("Multiple defined region properties for " + name, rootElement);
        }
        if ((rootElement.getAttribute(name) != null || rootElement.getChild(name) != null) && (propertyAttribute != null || propertyElement != null)) {
            throw new InvalidXMLException("Multiple defined region properties for " + Arrays.toString(names), rootElement);
        }
        if (rootElement.getAttribute(name) != null) {
            propertyAttribute = rootElement.getAttribute(name);
        } else if (rootElement.getChild(name) != null) {
            propertyElement = rootElement.getChild(name);
        }
    }
    Region region = def;
    Node node = null;
    if (propertyAttribute != null) {
        region = this.parseReference(propertyAttribute);
        node = new Node(propertyAttribute);
    } else if (propertyElement != null) {
        region = this.parseChildren(propertyElement);
        node = new Node(propertyElement);
    }
    if (region != null && validation != null) {
        validate(region, validation, node);
    }
    return region;
}
Also used : Attribute(org.jdom2.Attribute) Element(org.jdom2.Element) Node(tc.oc.pgm.util.xml.Node) InvalidXMLException(tc.oc.pgm.util.xml.InvalidXMLException) Region(tc.oc.pgm.api.region.Region) Nullable(javax.annotation.Nullable)

Example 80 with Attribute

use of com.github.zhenwei.core.asn1.x509.Attribute in project PGM by PGMDev.

the class RegionFilterApplicationParser method parse.

public void parse(Element el) throws InvalidXMLException {
    Region region = parseRegion(el);
    Component message = XMLUtils.parseFormattedText(el, "message");
    boolean earlyWarning = XMLUtils.parseBoolean(el.getAttribute("early-warning"), false);
    Filter effectFilter = filterParser.parseFilterProperty(el, "filter");
    Kit kit = factory.getKits().parseKitProperty(el, "kit");
    if (kit != null) {
        add(el, new RegionFilterApplication(RFAScope.EFFECT, region, effectFilter, kit, false));
    }
    kit = factory.getKits().parseKitProperty(el, "lend-kit");
    if (kit != null) {
        add(el, new RegionFilterApplication(RFAScope.EFFECT, region, effectFilter, kit, true));
    }
    Attribute attrVelocity = el.getAttribute("velocity");
    if (attrVelocity != null) {
        // Legacy support
        String velocityText = attrVelocity.getValue();
        if (velocityText.charAt(0) == '@')
            velocityText = velocityText.substring(1);
        Vector velocity = XMLUtils.parseVector(attrVelocity, velocityText);
        add(el, new RegionFilterApplication(RFAScope.EFFECT, region, effectFilter, velocity));
    }
    for (String tag : RFAScope.byTag.keySet()) {
        Filter filter;
        if (useId()) {
            filter = filterParser.parseFilterProperty(el, tag);
        } else {
            // Legacy syntax allows a list of filter names in the attribute
            Node node = Node.fromAttr(el, tag);
            if (node == null) {
                filter = null;
            } else {
                List<Filter> filters = new ArrayList<>();
                for (String name : Splitter.on(" ").split(node.getValue())) {
                    filters.add(filterParser.parseReference(node, name));
                }
                switch(filters.size()) {
                    case 0:
                        filter = null;
                        break;
                    case 1:
                        filter = filters.get(0);
                        break;
                    default:
                        filter = new FilterNode(filters, Collections.<Filter>emptyList(), Collections.<Filter>emptyList());
                }
            }
        }
        if (filter != null) {
            for (RFAScope scope : RFAScope.byTag.get(tag)) {
                add(el, new RegionFilterApplication(scope, region, filter, message, earlyWarning));
            }
        }
    }
}
Also used : Attribute(org.jdom2.Attribute) Node(tc.oc.pgm.util.xml.Node) FilterNode(tc.oc.pgm.filters.FilterNode) FilterNode(tc.oc.pgm.filters.FilterNode) ArrayList(java.util.ArrayList) TeamFilter(tc.oc.pgm.filters.TeamFilter) DenyFilter(tc.oc.pgm.filters.DenyFilter) Filter(tc.oc.pgm.api.filter.Filter) StaticFilter(tc.oc.pgm.filters.StaticFilter) Kit(tc.oc.pgm.kits.Kit) Region(tc.oc.pgm.api.region.Region) Component(net.kyori.adventure.text.Component) Vector(org.bukkit.util.Vector)

Aggregations

Attribute (org.jdom2.Attribute)316 Element (org.jdom2.Element)210 IOException (java.io.IOException)98 ArrayList (java.util.ArrayList)75 Attribute (ucar.nc2.Attribute)65 List (java.util.List)46 Document (org.jdom2.Document)43 Variable (ucar.nc2.Variable)39 HashMap (java.util.HashMap)26 Extensions (org.bouncycastle.asn1.x509.Extensions)26 X509Certificate (java.security.cert.X509Certificate)24 Namespace (org.jdom2.Namespace)24 File (java.io.File)23 Attribute (org.bouncycastle.asn1.pkcs.Attribute)21 GeneralName (org.bouncycastle.asn1.x509.GeneralName)21 Array (ucar.ma2.Array)21 Test (org.junit.Test)20 GeneralNames (org.bouncycastle.asn1.x509.GeneralNames)19 Dimension (ucar.nc2.Dimension)19 Map (java.util.Map)17