use of org.openmuc.openiec61850.Array in project gridfour by gwlucastrig.
the class PackageData method process.
void process(PrintStream ps, TestOptions options, String[] args) throws IOException {
// The packaging of data in a Gvrs file can be thought of in terms of
// the steps shown below.
//
// 0. Obtain descriptive parameters about source data. In this
// case, the application is packing data from a NetCDF source
// and most of the descriptive parameters follow the pattern
// established in the earlier ExtractData.java demonstration
//
// 1. Define the fixed metadata about the file (it's dimensions,
// data type, tile organization, etc.) using a GvrsFileSpecification
// object.
//
// 2. Open a new GvrsFile object using the settings created in step 1.
// Adjust any run-time parameters (such as the tile-cache size)
// according to the needs of the application.
//
// 3. Extract the data from its source and store in the Gvrs file.
//
ps.format("%nGvrs Packaging Application for NetCDF-format Global DEM files%n");
Locale locale = Locale.getDefault();
Date date = new Date();
SimpleDateFormat sdFormat = new SimpleDateFormat("dd MMM yyyy HH:mm z", locale);
ps.format("Date of Execution: %s%n", sdFormat.format(date));
String inputPath = options.getInputFile().getPath();
File outputFile = options.getOutputFile();
if (outputFile == null) {
ps.format("Missing specification for output file%n");
ps.format("Packaging application terminated%n");
return;
}
ps.format("Input file: %s%n", inputPath);
ps.format("Output file: %s%n", outputFile.getPath());
boolean[] matched = new boolean[args.length];
boolean useLsop = options.scanBooleanOption(args, "-lsop", matched, false);
// Open the NetCDF file -----------------------------------
ps.println("Opening NetCDF input file");
NetcdfFile ncfile = NetcdfFile.open(inputPath);
// Identify which Variable instances carry information about the
// geographic (latitude/longitude) coordinate system and also which
// carry information for elevation and bathymetry.
// the Variable that carries row-latitude information
Variable lat;
// the Variable that carries column-longitude information
Variable lon;
// the variable that carries elevation and bathymetry
Variable z;
lat = ncfile.findVariable("lat");
lon = ncfile.findVariable("lon");
z = ncfile.findVariable("elevation");
int[] tileSize;
// Use the input file name to format a product label
File inputFile = new File(inputPath);
String productLabel = inputFile.getName();
if (productLabel.toLowerCase().endsWith(".nc")) {
productLabel = productLabel.substring(0, productLabel.length() - 3);
}
if (lat == null) {
// ETOPO1 specification
tileSize = options.getTileSize(90, 120);
lat = ncfile.findVariable("y");
lon = ncfile.findVariable("x");
z = ncfile.findVariable("z");
} else {
tileSize = options.getTileSize(90, 120);
}
if (lat == null || lon == null || z == null) {
throw new IllegalArgumentException("Input does not contain valid lat,lon, and elevation Variables");
}
// using the variables from above, extract coordinate system
// information for the product and print it to the output.
ExtractionCoordinates extractionCoords = new ExtractionCoordinates(lat, lon);
extractionCoords.summarizeCoordinates(ps);
// Get the dimensions of the raster (grid) elevation/bathymetry data.
// should be 1.
int rank = z.getRank();
int[] shape = z.getShape();
int nRows = shape[0];
int nCols = shape[1];
ps.format("Rows: %8d%n", nRows);
ps.format("Columns: %8d%n", nCols);
int nRowsInTile = tileSize[0];
int nColsInTile = tileSize[1];
// Initialize the specification used to initialize the Gvrs file -------
GvrsFileSpecification spec = new GvrsFileSpecification(nRows, nCols, nRowsInTile, nColsInTile);
spec.setLabel(productLabel);
// Initialize the data type. If a zScale option was specified,
// use integer-coded floats. Otherwise, pick the data type
// based on whether the NetCDF file gives integral or floating point
// data.
boolean isZScaleSpecified = options.isZScaleSpecified();
float zScale = (float) options.getZScale();
float zOffset = (float) options.getZOffset();
// data type from NetCDF file
DataType sourceDataType = z.getDataType();
GvrsElementSpecification elementSpec = null;
GvrsElementType gvrsDataType;
if (isZScaleSpecified) {
// the options define our data type
int encodedLimitDepth = (int) ((LIMIT_DEPTH - zOffset) * zScale);
int encodedLimitElev = (int) ((LIMIT_ELEVATION - zOffset) * zScale);
elementSpec = new GvrsElementSpecificationIntCodedFloat("z", zScale, zOffset, encodedLimitDepth, encodedLimitElev, Integer.MIN_VALUE, true);
spec.addElementSpecification(elementSpec);
gvrsDataType = GvrsElementType.INT_CODED_FLOAT;
} else if (sourceDataType.isIntegral()) {
elementSpec = new GvrsElementSpecificationShort("z", LIMIT_DEPTH, LIMIT_ELEVATION, FILL_VALUE);
spec.addElementSpecification(elementSpec);
gvrsDataType = GvrsElementType.SHORT;
} else {
elementSpec = new GvrsElementSpecificationFloat("z", LIMIT_DEPTH, LIMIT_ELEVATION, Float.NaN);
spec.addElementSpecification(elementSpec);
gvrsDataType = GvrsElementType.FLOAT;
}
elementSpec.setDescription("Elevation (positive values) or depth (negative), in meters");
elementSpec.setUnitOfMeasure("m");
// Example with special character
elementSpec.setLabel("die H\u00f6henlage");
ps.println("Source date type " + sourceDataType + ", stored as " + gvrsDataType);
ps.println("");
// Determine whether data compression is used -------------------
boolean compressionEnabled = options.isCompressionEnabled();
spec.setDataCompressionEnabled(compressionEnabled);
boolean checksumsEnalbed = options.isChecksumComputationEnabled();
spec.setChecksumEnabled(checksumsEnalbed);
boolean bigAddressSpaceEnabled = options.isBigAddressSpaceEnabled();
spec.setExtendedFileSizeEnabled(bigAddressSpaceEnabled);
double[] geoCoords = extractionCoords.getGeographicCoordinateBounds();
spec.setGeographicCoordinates(geoCoords[0], geoCoords[1], geoCoords[2], geoCoords[3]);
// Check to verify that the geographic coordinates and grid coordinate
// are correctly implemented. This test is not truly part of the packaging
// process (since it should always work), but is included here as a
// diagnostic.
extractionCoords.checkSpecificationTransform(ps, spec);
// is enabled and the data type is integral.
if (useLsop) {
LsCodecUtility.addLsopToSpecification(spec, false);
}
// Create the output file and store the content from the input file.
if (outputFile.exists()) {
ps.println("Output file exists. Removing old file");
boolean status = outputFile.delete();
if (!status) {
ps.println("Removal attempt failed");
return;
}
}
ps.println("Begin processing");
double zMin = Double.POSITIVE_INFINITY;
double zMax = Double.NEGATIVE_INFINITY;
double zSum = 0;
long nSum = 0;
try (GvrsFile gvrs = new GvrsFile(outputFile, spec)) {
gvrs.writeMetadata(GvrsMnc.Copyright, "This data is in the public domain and may be used free of charge");
gvrs.writeMetadata(GvrsMnc.TermsOfUse, "This data should not be used for navigation");
GvrsElement zElement = gvrs.getElement("z");
gvrs.setTileCacheSize(GvrsCacheSize.Large);
storeGeoreferencingInformation(gvrs);
// Initialize data-statistics collection ---------------------------
// we happen to know the range of values for the global DEM a-priori.
// it ranges from about -11000 to 8650. This allows us to tabulate counts
// of which values we find in the data source. We can use this information
// to estimate the entropy of the source data and make a realistic
// assessment of how many bytes would be needed to store them.
InputDataStatCollector stats = new InputDataStatCollector(-11000, 8650, zScale);
int[] readOrigin = new int[rank];
int[] readShape = new int[rank];
// -----------------------------------------------------------------
// Package the data
long time0 = System.currentTimeMillis();
for (int iRow = 0; iRow < nRows; iRow++) {
if (iRow % 1000 == 999) {
long time1 = System.currentTimeMillis();
double deltaT = time1 - time0;
// rows per millis
double rate = (iRow + 1) / deltaT;
int nRemaining = nRows - iRow;
long remainingT = (long) (nRemaining / rate);
Date d = new Date(time1 + remainingT);
ps.format("Completed %d rows, %4.1f%% of total, est completion at %s%n", iRow + 1, 100.0 * (double) iRow / (nRows - 1.0), d);
ps.flush();
}
int row0 = iRow;
int col0 = 0;
readOrigin[0] = row0;
readOrigin[1] = col0;
readShape[0] = 1;
readShape[1] = nCols;
// happen in this application unless the input file is corrupt.
try {
Array array = z.read(readOrigin, readShape);
// and store it in the Gvrs file.
switch(gvrsDataType) {
case INTEGER:
case SHORT:
for (int iCol = 0; iCol < nCols; iCol++) {
int sample = array.getInt(iCol);
zElement.writeValueInt(iRow, iCol, sample);
stats.addSample(sample);
if (sample < zMin) {
zMin = sample;
}
if (sample > zMax) {
zMax = sample;
}
zSum += sample;
nSum++;
}
break;
case INT_CODED_FLOAT:
case FLOAT:
default:
for (int iCol = 0; iCol < nCols; iCol++) {
float sample = array.getFloat(iCol);
zElement.writeValue(iRow, iCol, sample);
stats.addSample(sample);
if (sample < zMin) {
zMin = sample;
}
if (sample > zMax) {
zMax = sample;
}
zSum += sample;
nSum++;
}
}
} catch (InvalidRangeException irex) {
throw new IOException(irex.getMessage(), irex);
}
}
gvrs.flush();
long time1 = System.currentTimeMillis();
double timeToProcess = (time1 - time0) / 1000.0;
ps.format("Finished processing file in %4.1f seconds%n", timeToProcess);
ps.format("Entropy for input data %4.1f bits/sample%n", stats.getEntropy());
long outputSize = outputFile.length();
long nCells = (long) nRows * (long) nCols;
double bitsPerSymbol = 8.0 * (double) outputSize / (double) nCells;
ps.format("Storage used (including overhead) %6.4f bits/sample%n", bitsPerSymbol);
ps.format("%nSummary of file content and packaging actions------------%n");
gvrs.summarize(ps, true);
ps.format("Range of z values:%n");
ps.format(" Min z: %8.3f%n", zMin);
ps.format(" Max z: %8.3f%n", zMax);
ps.format(" Avg z: %8.3f%n", zSum / (nSum > 0 ? nSum : 1));
}
// to those of the source data.
if (options.isVerificationEnabled()) {
int[] readOrigin = new int[rank];
int[] readShape = new int[rank];
ps.println("\nTesting product for data consistency with source");
ps.println("Opening gvrs file for reading");
long time0 = System.currentTimeMillis();
try (GvrsFile gvrs = new GvrsFile(outputFile, "r")) {
long time1 = System.currentTimeMillis();
ps.println("Opening complete in " + (time1 - time0) + " ms");
GvrsFileSpecification testSpec = gvrs.getSpecification();
String testLabel = testSpec.getLabel();
ps.println("Label: " + testLabel);
GvrsMetadata m = gvrs.readMetadata("Copyright", 0);
if (m != null) {
ps.println("Copyright: " + m.getString());
}
GvrsElement zElement = gvrs.getElement("z");
ps.println("Element: " + zElement.getName() + ", " + zElement.getDescription());
gvrs.setTileCacheSize(GvrsCacheSize.Large);
for (int iRow = 0; iRow < nRows; iRow++) {
if (iRow % 10000 == 9999) {
time1 = System.currentTimeMillis();
double deltaT = time1 - time0;
// rows per millis
double rate = (iRow + 1) / deltaT;
int nRemaining = nRows - iRow;
long remainingT = (long) (nRemaining / rate);
Date d = new Date(time1 + remainingT);
ps.format("Completed %d rows, %4.1f%% of total, est completion at %s%n", iRow + 1, 100.0 * (double) iRow / (nRows - 1.0), d);
ps.flush();
}
int row0 = iRow;
int col0 = 0;
readOrigin[0] = row0;
readOrigin[1] = col0;
readShape[0] = 1;
readShape[1] = nCols;
try {
Array array = z.read(readOrigin, readShape);
switch(gvrsDataType) {
case INTEGER:
for (int iCol = 0; iCol < nCols; iCol++) {
int sample = array.getInt(iCol);
int test = zElement.readValueInt(iRow, iCol);
if (sample != test) {
ps.println("Failure at " + iRow + ", " + iCol);
test = zElement.readValueInt(iRow, iCol);
System.exit(-1);
}
}
break;
case INT_CODED_FLOAT:
for (int iCol = 0; iCol < nCols; iCol++) {
double sample = array.getDouble(iCol);
int iSample = (int) ((sample - zOffset) * zScale + 0.5);
float fSample = iSample / zScale + zOffset;
float test = zElement.readValue(iRow, iCol);
double delta = Math.abs(fSample - test);
if (delta > 1.01 / zScale) {
ps.println("Failure at " + iRow + ", " + iCol);
System.exit(-1);
}
}
break;
case FLOAT:
default:
for (int iCol = 0; iCol < nCols; iCol++) {
float sample = array.getFloat(iCol);
float test = zElement.readValue(iRow, iCol);
if (sample != test) {
ps.println("Failure at " + iRow + ", " + iCol);
test = zElement.readValueInt(iRow, iCol);
System.exit(-1);
}
}
}
} catch (InvalidRangeException irex) {
throw new IOException(irex.getMessage(), irex);
}
}
time1 = System.currentTimeMillis();
ps.println("Exhaustive cross check complete in " + (time1 - time0) + " ms");
gvrs.summarize(ps, false);
}
}
ncfile.close();
}
use of org.openmuc.openiec61850.Array in project geotoolkit by Geomatys.
the class NetCDFExtractor method parseDataBlockXY.
private static ExtractionResult parseDataBlockXY(final NCFieldAnalyze analyze, final String procedureID, final List<String> acceptedSensorID, Set<org.opengis.observation.Phenomenon> phenomenons) throws NetCDFParsingException {
final ExtractionResult results = new ExtractionResult();
if (analyze.mainField == null) {
LOGGER.warning("No main field found");
return results;
}
LOGGER.info("parsing datablock XY");
try {
final List<String> separators = parseSeparatorValues(analyze);
final boolean single = separators.isEmpty();
Array latArray = null;
Array lonArray = null;
if (analyze.hasSpatial()) {
latArray = analyze.getArrayFromField(analyze.latField);
lonArray = analyze.getArrayFromField(analyze.lonField);
}
Array timeArray = null;
String timeUnits = null;
if (analyze.hasTime()) {
timeUnits = analyze.timeField.uom;
timeArray = analyze.getArrayFromField(analyze.timeField);
}
final Variable zVar = analyze.vars.get(analyze.mainField.name);
final Array zArray = analyze.file.readArrays(Arrays.asList(zVar)).get(0);
final boolean constantZ = analyze.mainField.dimension == 1;
final boolean Zfirst = analyze.mainField.mainVariableFirst;
final Map<String, Array> phenArrays = analyze.getPhenomenonArrayMap();
results.fields.addAll(phenArrays.keySet());
final AbstractDataRecord datarecord = OMUtils.getDataRecordProfile("2.0.0", analyze.phenfields);
final Phenomenon phenomenon = OMUtils.getPhenomenon("2.0.0", analyze.phenfields, phenomenons);
results.phenomenons.add(phenomenon);
if (single) {
if (acceptedSensorID == null || acceptedSensorID.contains(procedureID)) {
final Process proc = (Process) OMUtils.buildProcess(procedureID);
final ProcedureTree compo = new ProcedureTree(proc.getHref(), proc.getName(), proc.getDescription(), "profile", "Component");
results.procedures.add(compo);
final MeasureStringBuilder sb = new MeasureStringBuilder();
final int count = zVar.getDimension(0).getLength();
final GeoSpatialBound gb = new GeoSpatialBound();
final String identifier = UUID.randomUUID().toString();
// read geometry (assume point)
SamplingFeature sp = null;
if (analyze.hasSpatial()) {
final double latitude = getDoubleValue(latArray, 0, analyze.latField.fillValue);
final double longitude = Longitude.normalize(getDoubleValue(lonArray, 0, analyze.lonField.fillValue));
if (!Double.isNaN(latitude) && !Double.isNaN(longitude)) {
sp = OMUtils.buildSamplingPoint(identifier, latitude, longitude);
results.addFeatureOfInterest(sp);
gb.addXYCoordinate(longitude, latitude);
gb.addGeometry((AbstractGeometry) sp.getGeometry());
}
}
if (analyze.hasTime()) {
final long millis = getTimeValue(timeUnits, timeArray, 0);
if (millis != 0 && millis != LIMIT) {
gb.addDate(millis);
}
}
for (int zIndex = 0; zIndex < zVar.getDimension(0).getLength(); zIndex++) {
double zLevel = getDoubleValue(zArray, zIndex, analyze.mainField.fillValue);
if (zLevel == 0 || zLevel == FILL_VALUE) {
continue;
}
sb.appendValue(zLevel);
for (NCField field : analyze.phenfields) {
final Array phenArray = phenArrays.get(field.name);
final double value = getDoubleValue(phenArray, zIndex, field.fillValue);
sb.appendValue(value);
}
sb.closeBlock();
}
results.observations.add(// id
OMUtils.buildObservation(// id
identifier, // foi
sp, // phenomenon
phenomenon, // procedure
proc, // result
count, // result
datarecord, // result
sb, // time
gb.getTimeObject("2.0.0")));
results.spatialBound.merge(gb);
compo.spatialBound.merge(gb);
}
} else {
final Process proc = (Process) OMUtils.buildProcess(procedureID);
final ProcedureTree system = new ProcedureTree(proc.getHref(), proc.getName(), proc.getDescription(), "profile", "System");
results.procedures.add(system);
for (int profileIndex = 0; profileIndex < separators.size(); profileIndex++) {
final String identifier = separators.get(profileIndex);
final int count = zVar.getDimension(0).getLength();
final GeoSpatialBound gb = new GeoSpatialBound();
final String currentProcID = procedureID + '-' + identifier;
final Process currentProc = (Process) OMUtils.buildProcess(currentProcID);
final ProcedureTree compo = new ProcedureTree(currentProc.getHref(), currentProc.getName(), currentProc.getDescription(), "profile", "Component");
if (acceptedSensorID == null || acceptedSensorID.contains(currentProcID)) {
// read geometry (assume point)
SamplingFeature sp = null;
if (analyze.hasSpatial()) {
final double latitude = getDoubleValue(latArray, 0, analyze.latField.fillValue);
final double longitude = Longitude.normalize(getDoubleValue(lonArray, 0, analyze.lonField.fillValue));
if (!Double.isNaN(latitude) && !Double.isNaN(longitude)) {
sp = OMUtils.buildSamplingPoint(identifier, latitude, longitude);
results.addFeatureOfInterest(sp);
gb.addXYCoordinate(longitude, latitude);
gb.addGeometry((AbstractGeometry) sp.getGeometry());
}
}
if (analyze.hasTime()) {
final long millis = getTimeValue(timeUnits, timeArray, 0);
if (millis != 0 && millis != LIMIT) {
gb.addDate(millis);
}
}
final MeasureStringBuilder sb = new MeasureStringBuilder();
for (int zIndex = 0; zIndex < zVar.getDimension(0).getLength(); zIndex++) {
double zLevel = getZValue(Zfirst, constantZ, zArray, zIndex, profileIndex, analyze.mainField.fillValue);
if (zLevel == 0 || zLevel == FILL_VALUE) {
continue;
}
sb.appendValue(zLevel);
for (NCField field : analyze.phenfields) {
final Array phenArray = phenArrays.get(field.name);
final boolean mainFirst = field.mainVariableFirst;
final double value = getDoubleValue(mainFirst, phenArray, zIndex, profileIndex, field.fillValue);
sb.appendValue(value);
}
sb.closeBlock();
}
compo.spatialBound.merge(gb);
system.children.add(compo);
final String obsid = UUID.randomUUID().toString();
results.observations.add(// id
OMUtils.buildObservation(// id
obsid, // foi
sp, // phenomenon
phenomenon, // procedure
currentProc, // result
count, // result
datarecord, // result
sb, // time
gb.getTimeObject("2.0.0")));
results.spatialBound.merge(gb);
}
}
}
} catch (IOException | IllegalArgumentException ex) {
throw new NetCDFParsingException("error while parsing netcdf profile", ex);
}
LOGGER.info("datablock parsed");
return results;
}
use of org.openmuc.openiec61850.Array in project geotoolkit by Geomatys.
the class NetCDFExtractor method getProcedureTS.
private static List<ProcedureTree> getProcedureTS(final NCFieldAnalyze analyze, final String procedureID, final List<String> acceptedSensorID) throws NetCDFParsingException {
final List<ProcedureTree> results = new ArrayList<>();
if (analyze.mainField == null) {
LOGGER.warning("No main field found");
return results;
}
LOGGER.info("parsing netCDF TS");
try {
final List<String> separators = parseSeparatorValues(analyze);
final boolean single = separators.isEmpty();
Array latArray = null;
Array lonArray = null;
if (analyze.hasSpatial()) {
latArray = analyze.getArrayFromField(analyze.latField);
lonArray = analyze.getArrayFromField(analyze.lonField);
}
final Variable timeVar = analyze.vars.get(analyze.mainField.name);
final String timeUnits = analyze.mainField.uom;
final Array timeArray = analyze.file.readArrays(Arrays.asList(timeVar)).get(0);
final boolean constantT = analyze.mainField.dimension == 1;
final boolean timeFirst = analyze.mainField.mainVariableFirst;
final Set<String> fields = analyze.getPhenomenonArrayMap().keySet();
if (single) {
if (acceptedSensorID == null || acceptedSensorID.contains(procedureID)) {
final Process proc = (Process) OMUtils.buildProcess(procedureID);
final ProcedureTree compo = new ProcedureTree(proc.getHref(), proc.getName(), proc.getHref(), "Component", "timeseries", fields);
results.add(compo);
final int count = timeVar.getDimension(0).getLength();
final GeoSpatialBound gb = new GeoSpatialBound();
if (analyze.hasSpatial()) {
final double latitude = getDoubleValue(latArray, analyze.latField.fillValue);
final double longitude = Longitude.normalize(getDoubleValue(lonArray, analyze.lonField.fillValue));
if (!Double.isNaN(latitude) && !Double.isNaN(longitude)) {
gb.addXYCoordinate(longitude, latitude);
}
}
// iterating over time
for (int i = 0; i < count; i++) {
final long millis = getTimeValue(timeUnits, timeArray, i);
if (millis == 0 || millis == LIMIT) {
continue;
}
gb.addDate(millis);
}
compo.spatialBound.merge(gb);
}
} else {
final Process proc = (Process) OMUtils.buildProcess(procedureID);
final ProcedureTree system = new ProcedureTree(proc.getHref(), proc.getName(), proc.getHref(), "System", "timeseries", fields);
results.add(system);
for (int j = 0; j < separators.size(); j++) {
final String identifier = separators.get(j);
final int count = getGoodTimeDimension(timeVar, analyze.dimensionSeparator).getLength();
final GeoSpatialBound gb = new GeoSpatialBound();
final String currentProcID = procedureID + '-' + identifier;
final Process currentProc = (Process) OMUtils.buildProcess(currentProcID);
final ProcedureTree compo = new ProcedureTree(currentProc.getHref(), currentProc.getName(), currentProc.getDescription(), "Component", "timeseries", fields);
if (acceptedSensorID == null || acceptedSensorID.contains(currentProcID)) {
if (analyze.hasSpatial()) {
final double latitude = getDoubleValue(latArray, j, analyze.latField.fillValue);
final double longitude = Longitude.normalize(getDoubleValue(lonArray, j, analyze.lonField.fillValue));
if (!Double.isNaN(latitude) && !Double.isNaN(longitude)) {
gb.addXYCoordinate(longitude, latitude);
}
}
for (int i = 0; i < count; i++) {
final long millis = getTimeValue(timeUnits, timeFirst, constantT, timeArray, i, j);
if (millis == 0 || millis == LIMIT) {
continue;
}
gb.addDate(millis);
}
compo.spatialBound.merge(gb);
system.children.add(compo);
}
}
}
} catch (IOException | IllegalArgumentException ex) {
throw new NetCDFParsingException("error while parsing netcdf timeserie", ex);
}
LOGGER.info("datablock parsed");
return results;
}
use of org.openmuc.openiec61850.Array in project SOS by 52North.
the class AbstractNetcdfEncoder method getLongitudeArray.
protected Array getLongitudeArray(AbstractSensorDataset sensorDataset) throws EncodingException {
if (sensorDataset instanceof StaticLocationDataset) {
StaticLocationDataset locationDataset = (StaticLocationDataset) sensorDataset;
if (locationDataset.getLat() != null) {
Array array = getArray();
initArrayWithFillValue(array, getNetcdfHelper().getFillValue());
Index index = array.getIndex();
index.set(0);
array.setDouble(index, locationDataset.getLat());
}
} else {
// TODO support varying lat
throw new EncodingException("Varying longs are not yet supported.");
}
return null;
}
use of org.openmuc.openiec61850.Array in project SOS by 52North.
the class AbstractNetcdfEncoder method encodeSensorDataToNetcdf.
protected void encodeSensorDataToNetcdf(File netcdfFile, AbstractSensorDataset sensorDataset, Version version) throws EncodingException, IOException {
String sensor = sensorDataset.getSensorIdentifier();
sensorDataset.getSensor().setSensorDescription(getProcedureDescription(sensor, sensorDataset.getProcedureDescription()));
NetcdfFileWriter writer = getNetcdfFileWriter(netcdfFile, version);
// set fill on, doesn't seem to have any effect though
writer.setFill(true);
Map<Variable, Array> variableArrayMap = Maps.newHashMap();
int numTimes = sensorDataset.getTimes().size();
// FIXME shouldn't assume that all subsensors are heights (or rename
// subsensors if they are)
int numHeightDepth = sensorDataset.getSubSensors().size() > 0 ? sensorDataset.getSubSensors().size() : 1;
// global attributes
addGlobaleAttributes(writer, sensorDataset);
// add appropriate dims for feature type
List<Dimension> timeDims = Lists.newArrayList();
List<Dimension> latLngDims = Lists.newArrayList();
List<Dimension> latDims = Lists.newArrayList();
List<Dimension> lngDims = Lists.newArrayList();
List<Dimension> zDims = Lists.newArrayList();
List<Dimension> obsPropDims = Lists.newArrayList();
// REQUIRED for NetCDF-3
// add Dimensions
// dTime = writer.addDimension(null, CFStandardNames.TIME.getName(),
// numTimes);
Dimension dTime = writer.addUnlimitedDimension(getVariableDimensionCaseName(CFStandardNames.TIME.getName()));
dTime.setLength(numTimes);
timeDims.add(dTime);
if (!(sensorDataset instanceof StaticLocationDataset)) {
zDims.add(dTime);
}
obsPropDims.add(dTime);
// set up lat/lng dimensions
// FIXME do not set time dimension for static location dataset
// if ((sensorDataset instanceof StaticLocationDataset)) {
// latLngDims.add(dTime);
// }
// set up z dimensions
String dimensionName;
if (useHeight()) {
dimensionName = getVariableDimensionCaseName(CFStandardNames.HEIGHT.getName());
} else {
dimensionName = getVariableDimensionCaseName(CFStandardNames.DEPTH.getName());
}
// profile/timeSeriesProfile
Dimension dZ = writer.addDimension(null, dimensionName, numHeightDepth);
if (!(sensorDataset instanceof StaticLocationDataset)) {
// trajectory
zDims.add(dTime);
}
zDims.add(dZ);
obsPropDims.add(dZ);
variableArrayMap.putAll(getNetcdfProfileSpecificVariablesArrays(writer, sensorDataset));
// time var
Variable vTime = addVariableTime(writer, timeDims);
if (numTimes > 1 && writer.getVersion().isNetdf4format()) {
vTime.addAttribute(new Attribute(CDM.CHUNK_SIZES, getNetcdfHelper().getChunkSizeTime()));
}
ArrayDouble timeArray = new ArrayDouble(getDimShapes(timeDims));
initArrayWithFillValue(timeArray, getNetcdfHelper().getFillValue());
Array latArray = getLatitudeArray(sensorDataset);
Array lonArray = getLongitudeArray(sensorDataset);
// add lat/long dimensions
long latSize = 1;
if (latArray != null) {
latSize = latArray.getSize();
}
Dimension dLat = writer.addDimension(null, getVariableDimensionCaseName(CFStandardNames.LATITUDE.getName()), (int) latSize);
latDims.add(dLat);
long lonSize = 1;
if (lonArray != null) {
lonSize = lonArray.getSize();
}
Dimension dLon = writer.addDimension(null, getVariableDimensionCaseName(CFStandardNames.LONGITUDE.getName()), (int) lonSize);
lngDims.add(dLon);
// lat/lon var
Variable vLat;
Variable vLon;
if (latLngDims.size() > 0) {
vLat = addVariableLatitude(writer, latLngDims);
vLon = addVariableLongitude(writer, latLngDims);
} else {
vLat = addVariableLatitude(writer, latDims);
vLon = addVariableLongitude(writer, lngDims);
}
// height/depth var
Variable vHeightDepth;
if (useHeight()) {
vHeightDepth = addVariableHeight(writer, zDims);
} else {
vHeightDepth = addVariableDepth(writer, zDims);
}
String coordinateString = Joiner.on(' ').join(Lists.newArrayList(vTime.getFullName(), vLat.getFullName(), vLon.getFullName(), vHeightDepth.getFullName()));
Map<OmObservableProperty, Variable> obsPropVarMap = Maps.newHashMap();
Map<Variable, Array> varDataArrayMap = Maps.newHashMap();
for (OmObservableProperty obsProp : sensorDataset.getPhenomena()) {
// obs prop var
Variable vObsProp = addVariableForObservedProperty(writer, obsProp, obsPropDims, coordinateString);
obsPropVarMap.put(obsProp, vObsProp);
// init obs prop data array
Array obsPropArray = getArray(obsPropDims);
initArrayWithFillValue(obsPropArray, getNetcdfHelper().getFillValue());
varDataArrayMap.put(vObsProp, obsPropArray);
}
// populate heights array for profile
Array heightDephtArray = null;
if (zDims.size() == 1 && hasDimension(zDims, dZ) && !sensorDataset.getSubSensors().isEmpty()) {
heightDephtArray = initHeightDephtArray(zDims);
Double consistentBinHeight = populateHeightDepthArray(sensorDataset, heightDephtArray, vHeightDepth);
String verticalResolution = null;
if (consistentBinHeight == null) {
verticalResolution = ACDDConstants.POINT;
} else if (consistentBinHeight != getNetcdfHelper().getFillValue()) {
verticalResolution = consistentBinHeight + " " + CFConstants.UNITS_METERS + " " + ACDDConstants.BINNED;
}
if (verticalResolution != null) {
writer.addGroupAttribute(null, new Attribute(ACDDConstants.GEOSPATIAL_VERTICAL_RESOLUTION, verticalResolution));
}
}
// iterate through sensorDataset, set values
int timeCounter = 0;
for (Time time : sensorDataset.getTimes()) {
// set time value
Index timeIndex = timeArray.getIndex();
int timeIndexCounter = 0;
// }
if (hasDimension(timeDims, dTime)) {
timeIndex.setDim(timeIndexCounter++, timeCounter++);
}
timeArray.set(timeIndex, getTimeValue(time));
// data values
Map<OmObservableProperty, Map<SubSensor, Value<?>>> obsPropMap = sensorDataset.getDataValues().get(time);
for (Entry<OmObservableProperty, Map<SubSensor, Value<?>>> entry : obsPropMap.entrySet()) {
OmObservableProperty obsProp = entry.getKey();
Variable variable = obsPropVarMap.get(obsProp);
Array array = varDataArrayMap.get(variable);
for (Entry<SubSensor, Value<?>> subSensorEntry : obsPropMap.get(obsProp).entrySet()) {
SubSensor subSensor = subSensorEntry.getKey();
Value<?> value = subSensorEntry.getValue();
Object valObj = value.getValue();
if (!(valObj instanceof Number)) {
throw new EncodingException("Value class %s not supported", valObj.getClass().getCanonicalName());
}
Index index = array.getIndex();
int obsPropDimCounter = 0;
for (Dimension dim : obsPropDims) {
// } else if (dim.equals(dTime)){
if (dim.equals(dTime)) {
// time index dim
index.setDim(obsPropDimCounter++, timeCounter - 1);
} else if (dim.equals(dZ) && dim.getLength() > 1) {
// height/depth index dim
index.setDim(obsPropDimCounter++, sensorDataset.getSubSensors().indexOf(subSensor));
}
}
if (array instanceof ArrayFloat) {
((ArrayFloat) array).set(index, ((Number) valObj).floatValue());
} else {
((ArrayDouble) array).set(index, ((Number) valObj).doubleValue());
}
}
}
}
// create the empty netCDF with dims/vars/attributes defined
variableArrayMap.put(vTime, timeArray);
if (latArray != null) {
variableArrayMap.put(vLat, latArray);
}
if (lonArray != null) {
variableArrayMap.put(vLon, lonArray);
}
if (heightDephtArray != null) {
variableArrayMap.put(vHeightDepth, heightDephtArray);
}
variableArrayMap.putAll(varDataArrayMap);
// create the empty netCDF with dims/vars/attributes defined
writeToFile(writer, variableArrayMap);
writer.close();
}
Aggregations