use of thredds.catalog.ThreddsMetadata in project geonetwork by georchestra.
the class Harvester method createDIFMetadata.
// ---------------------------------------------------------------------------
/**
* Process one dataset by extracting its metadata, writing to DIF
* and using xslt to transform to the required ISO format.
*
* @param ds the dataset to be processed
*/
private void createDIFMetadata(InvDataset ds) {
try {
// add coordinate systems if not DIF relaxed
boolean addCoordSys = false;
// --- TODO: Thredds has a metadata converter interface and some other
// --- methods of handling metadata (including XML of different
// --- namespaces) in the catalog - this is a place holder for getting
// --- this info in future
List<InvMetadata> mds = ds.getMetadata();
log.info("Dataset has " + mds.size() + " metadata elements");
for (InvMetadata md : mds) {
log.info("Found metadata " + md.toString());
}
// --- check and see whether this dataset is DIF writeable
DIFWriter difWriter = new DIFWriter();
StringBuffer sBuff = new StringBuffer();
Element dif = null;
if (difWriter.isDatasetUseable(ds, sBuff)) {
log.info("Yay! Dataset has DIF compatible metadata " + sBuff.toString());
dif = difWriter.writeOneEntry(ds, sBuff);
} else {
log.info("Dataset does not have DIF compatible metadata so we will write a relaxed DIF entry\n" + sBuff.toString());
dif = difWriter.writeOneRelaxedEntry(ds, sBuff);
addCoordSys = true;
}
// --- get the UUID assigned to the DIF record
String uuid = dif.getChild("Entry_ID", difNS).getText();
boolean isCollection = ds.hasNestedDatasets();
log.info("Dataset is a collection dataset? " + isCollection);
// --- now convert DIF entry into an ISO entry using the appropriate
// --- difToIso converter (only schemas with a DIF converter are
// --- supplied to the user for choice)
Element md = null;
if (isCollection) {
String difToIsoStyleSheet = schemaMan.getSchemaDir(params.outputSchemaOnCollectionsDIF) + Geonet.Path.DIF_STYLESHEETS + "/DIFToISO.xsl";
log.info("Transforming collection dataset to " + params.outputSchemaOnCollectionsDIF);
md = Xml.transform(dif, difToIsoStyleSheet);
} else {
String difToIsoStyleSheet = schemaMan.getSchemaDir(params.outputSchemaOnAtomicsDIF) + Geonet.Path.DIF_STYLESHEETS + "/DIFToISO.xsl";
log.info("Transforming atomic dataset to " + params.outputSchemaOnAtomicsDIF);
md = Xml.transform(dif, difToIsoStyleSheet);
}
// --- create a netcdfInfo for addition to the ISO record
if (addCoordSys) {
boolean globalAttributes = false;
if (!isCollection) {
// open up atomic dataset for info
log.info("Opening dataset to get global attributes");
// --- open and check global attributes for metadata conventions
try {
NetcdfDataset ncD = NetcdfDataset.openDataset("thredds:" + ds.getCatalogUrl());
Attribute mdCon = ncD.findGlobalAttributeIgnoreCase("metadata_conventions");
if (mdCon != null) {
List<Attribute> ga = ncD.getGlobalAttributes();
for (Attribute att : ga) {
if (log.isDebugEnabled())
log.debug("Attribute found " + att.toString());
// --- TODO: Attach the attributes to the metadata node
// --- for conversion into the ISO record by an xslt
}
} else {
if (log.isDebugEnabled())
log.debug("No global attribute with metadata conventions found");
}
ncD.close();
} catch (Exception e) {
log.info("Exception raised in netcdfDataset ops: " + e);
e.printStackTrace();
}
}
// --- if no metadata conventions then find the coordinate systems
// --- and add these to the appropriate place in whatever ISO or ISO
// --- profile we are using - MCP: mcp:dataParameters & gmd:keywords,
// --- ISO: gmd:keywords
boolean foundNetcdfInfo = false;
if (!globalAttributes && !isCollection) {
log.info("No global attributes describing metadata so opening dataset to get coordinate systems");
try {
NetcdfDatasetInfo ncDI = new NetcdfDatasetInfo("thredds:" + ds.getCatalogUrl());
log.info("Coordinate systems builder is " + ncDI.getConventionUsed());
if (!ncDI.getConventionUsed().equals("None")) {
Document doc = ncDI.makeDocument();
Element coords = doc.detachRootElement();
log.info("Coordinate systems of dataset are: \n" + Xml.getString(coords));
setCoordsStyleSheet(isCollection);
addKeywordsAndDataParams(coords, md);
foundNetcdfInfo = true;
} else {
if (log.isDebugEnabled())
log.debug("Coordinate system convention is not recognized");
}
ncDI.close();
} catch (Exception e) {
log.info("Exception raised in netcdfDatasetInfo ops: " + e);
e.printStackTrace();
}
}
// --- or atomic
if (!globalAttributes && !foundNetcdfInfo) {
// --- get ThreddsMetadata.Variables and create a netcdfDatasetInfo
// --- document if possible
List<ThreddsMetadata.Variables> vsL = ds.getVariables();
if (vsL != null && vsL.size() > 0) {
for (ThreddsMetadata.Variables vs : vsL) {
String vHref = vs.getVocabHref();
URI vUri = vs.getVocabUri();
String vocab = vs.getVocabulary();
Element coords = new Element("netcdfDatasetInfo");
for (ThreddsMetadata.Variable v : vs.getVariableList()) {
Element varX = new Element("variable");
varX.setAttribute("name", v.getName());
varX.setAttribute("decl", v.getDescription());
varX.setAttribute("units", v.getUnits());
// - these three attributes are new but then there is no
// - xsd for this so we can add as we want!
varX.setAttribute("vocab", vocab);
varX.setAttribute("vocaburi", vUri.toString());
varX.setAttribute("vocabhref", vHref);
coords.addContent(varX);
}
log.info("Coordinate systems from ThreddsMetadata are: \n" + Xml.getString(coords));
setCoordsStyleSheet(isCollection);
addKeywordsAndDataParams(coords, md);
}
}
}
}
// --- write metadata
saveMetadata(md, uuid, getUri(ds));
// --- update totals
if (isCollection) {
result.collectionDatasetRecords++;
} else {
result.atomicDatasetRecords++;
}
} catch (Exception e) {
log.error("Thrown Exception " + e + " during dataset processing");
e.printStackTrace();
}
}
use of thredds.catalog.ThreddsMetadata in project core-geonetwork by geonetwork.
the class Harvester method extractThreddsMetadata.
/**
* Extract thredds metadata and use it to extend extents and record variables.
*
* @param ds the dataset being processed
*/
private void extractThreddsMetadata(InvDataset ds) {
log.info("Trying to find ThreddsMetadata for dataset " + ds.getName());
ThreddsMetadata.GeospatialCoverage gsC = ds.getGeospatialCoverage();
if (gsC != null) {
log.info("Found ThreddsMetadata geospatialcoverage");
addLatLonBox(gsC.getBoundingBox());
}
DateRange dr = ds.getTimeCoverage();
if (dr != null) {
log.info("Found ThreddsMetadata daterange");
addTimeSpan(dr);
}
List<ThreddsMetadata.Variables> variablesList = ds.getVariables();
if (variablesList != null && variablesList.size() > 0) {
log.info("Found ThreddsMetadata variables");
for (ThreddsMetadata.Variables variables : variablesList) {
List<ThreddsMetadata.Variable> variableList = variables.getVariableList();
for (ThreddsMetadata.Variable variable : variableList) {
gridVariables.put(variable.getName(), variable);
}
}
}
if (gsC != null && dr != null && gridVariables.size() > 0)
metadataObtained = true;
// record any documentation so that it can be passed to metadata records
docs = ds.getDocumentation();
if (docs != null && docs.size() > 0) {
log.info("Found ThreddsMetadata documentation");
}
}
use of thredds.catalog.ThreddsMetadata in project core-geonetwork by geonetwork.
the class Harvester method hasThreddsMetadata.
/**
* Check to see whether the dataset has thredds metadata.
*
* @param ds the dataset being processed
*/
private boolean hasThreddsMetadata(InvDataset ds) {
ThreddsMetadata.GeospatialCoverage gsC = ds.getGeospatialCoverage();
DateRange dr = ds.getTimeCoverage();
List<ThreddsMetadata.Variables> vars = ds.getVariables();
List<InvDocumentation> docs = ds.getDocumentation();
log.debug("ThreddsMetadata: " + gsC + " : " + dr + " : " + vars.size() + " : " + docs);
return (gsC != null || dr != null || vars.size() > 0 || docs != null);
}
use of thredds.catalog.ThreddsMetadata in project geonetwork-eea by eea.
the class Harvester method createDIFMetadata.
// ---------------------------------------------------------------------------
/**
* Process one dataset by extracting its metadata, writing to DIF
* and using xslt to transform to the required ISO format.
*
* @param ds the dataset to be processed
*/
private void createDIFMetadata(InvDataset ds) {
try {
// add coordinate systems if not DIF relaxed
boolean addCoordSys = false;
// --- TODO: Thredds has a metadata converter interface and some other
// --- methods of handling metadata (including XML of different
// --- namespaces) in the catalog - this is a place holder for getting
// --- this info in future
List<InvMetadata> mds = ds.getMetadata();
log.info("Dataset has " + mds.size() + " metadata elements");
for (InvMetadata md : mds) {
log.info("Found metadata " + md.toString());
}
// --- check and see whether this dataset is DIF writeable
DIFWriter difWriter = new DIFWriter();
StringBuffer sBuff = new StringBuffer();
Element dif = null;
if (difWriter.isDatasetUseable(ds, sBuff)) {
log.info("Yay! Dataset has DIF compatible metadata " + sBuff.toString());
dif = difWriter.writeOneEntry(ds, sBuff);
} else {
log.info("Dataset does not have DIF compatible metadata so we will write a relaxed DIF entry\n" + sBuff.toString());
dif = difWriter.writeOneRelaxedEntry(ds, sBuff);
addCoordSys = true;
}
// --- get the UUID assigned to the DIF record
String uuid = dif.getChild("Entry_ID", difNS).getText();
boolean isCollection = ds.hasNestedDatasets();
log.info("Dataset is a collection dataset? " + isCollection);
// --- now convert DIF entry into an ISO entry using the appropriate
// --- difToIso converter (only schemas with a DIF converter are
// --- supplied to the user for choice)
Element md = null;
if (isCollection) {
String difToIsoStyleSheet = schemaMan.getSchemaDir(params.outputSchemaOnCollectionsDIF) + Geonet.Path.DIF_STYLESHEETS + "/DIFToISO.xsl";
log.info("Transforming collection dataset to " + params.outputSchemaOnCollectionsDIF);
md = Xml.transform(dif, difToIsoStyleSheet);
} else {
String difToIsoStyleSheet = schemaMan.getSchemaDir(params.outputSchemaOnAtomicsDIF) + Geonet.Path.DIF_STYLESHEETS + "/DIFToISO.xsl";
log.info("Transforming atomic dataset to " + params.outputSchemaOnAtomicsDIF);
md = Xml.transform(dif, difToIsoStyleSheet);
}
// --- create a netcdfInfo for addition to the ISO record
if (addCoordSys) {
boolean globalAttributes = false;
if (!isCollection) {
// open up atomic dataset for info
log.info("Opening dataset to get global attributes");
// --- open and check global attributes for metadata conventions
try {
NetcdfDataset ncD = NetcdfDataset.openDataset("thredds:" + ds.getCatalogUrl());
Attribute mdCon = ncD.findGlobalAttributeIgnoreCase("metadata_conventions");
if (mdCon != null) {
List<Attribute> ga = ncD.getGlobalAttributes();
for (Attribute att : ga) {
if (log.isDebugEnabled())
log.debug("Attribute found " + att.toString());
// --- TODO: Attach the attributes to the metadata node
// --- for conversion into the ISO record by an xslt
}
} else {
if (log.isDebugEnabled())
log.debug("No global attribute with metadata conventions found");
}
ncD.close();
} catch (Exception e) {
log.info("Exception raised in netcdfDataset ops: " + e);
e.printStackTrace();
}
}
// --- if no metadata conventions then find the coordinate systems
// --- and add these to the appropriate place in whatever ISO or ISO
// --- profile we are using - MCP: mcp:dataParameters & gmd:keywords,
// --- ISO: gmd:keywords
boolean foundNetcdfInfo = false;
if (!globalAttributes && !isCollection) {
log.info("No global attributes describing metadata so opening dataset to get coordinate systems");
try {
NetcdfDatasetInfo ncDI = new NetcdfDatasetInfo("thredds:" + ds.getCatalogUrl());
log.info("Coordinate systems builder is " + ncDI.getConventionUsed());
if (!ncDI.getConventionUsed().equals("None")) {
Document doc = ncDI.makeDocument();
Element coords = doc.detachRootElement();
log.info("Coordinate systems of dataset are: \n" + Xml.getString(coords));
setCoordsStyleSheet(isCollection);
addKeywordsAndDataParams(coords, md);
foundNetcdfInfo = true;
} else {
if (log.isDebugEnabled())
log.debug("Coordinate system convention is not recognized");
}
ncDI.close();
} catch (Exception e) {
log.info("Exception raised in netcdfDatasetInfo ops: " + e);
e.printStackTrace();
}
}
// --- or atomic
if (!globalAttributes && !foundNetcdfInfo) {
// --- get ThreddsMetadata.Variables and create a netcdfDatasetInfo
// --- document if possible
List<ThreddsMetadata.Variables> vsL = ds.getVariables();
if (vsL != null && vsL.size() > 0) {
for (ThreddsMetadata.Variables vs : vsL) {
String vHref = vs.getVocabHref();
URI vUri = vs.getVocabUri();
String vocab = vs.getVocabulary();
Element coords = new Element("netcdfDatasetInfo");
for (ThreddsMetadata.Variable v : vs.getVariableList()) {
Element varX = new Element("variable");
varX.setAttribute("name", v.getName());
varX.setAttribute("decl", v.getDescription());
varX.setAttribute("units", v.getUnits());
// - these three attributes are new but then there is no
// - xsd for this so we can add as we want!
varX.setAttribute("vocab", vocab);
varX.setAttribute("vocaburi", vUri.toString());
varX.setAttribute("vocabhref", vHref);
coords.addContent(varX);
}
log.info("Coordinate systems from ThreddsMetadata are: \n" + Xml.getString(coords));
setCoordsStyleSheet(isCollection);
addKeywordsAndDataParams(coords, md);
}
}
}
}
// --- write metadata
saveMetadata(md, uuid, getUri(ds));
// --- update totals
if (isCollection) {
result.collectionDatasetRecords++;
} else {
result.atomicDatasetRecords++;
}
} catch (Exception e) {
log.error("Thrown Exception " + e + " during dataset processing");
e.printStackTrace();
}
}
Aggregations