use of com.beanit.openiec61850.Array in project s1tbx by senbox-org.
the class CosmoSkymedReader method readBandRasterDataImpl.
/**
* {@inheritDoc}
*/
@Override
protected void readBandRasterDataImpl(int sourceOffsetX, int sourceOffsetY, int sourceWidth, int sourceHeight, int sourceStepX, int sourceStepY, Band destBand, int destOffsetX, int destOffsetY, int destWidth, int destHeight, ProductData destBuffer, ProgressMonitor pm) throws IOException {
Guardian.assertTrue("sourceStepX == 1 && sourceStepY == 1", sourceStepX == 1 && sourceStepY == 1);
Guardian.assertTrue("sourceWidth == destWidth", sourceWidth == destWidth);
Guardian.assertTrue("sourceHeight == destHeight", sourceHeight == destHeight);
final int sceneHeight = product.getSceneRasterHeight();
final int sceneWidth = product.getSceneRasterWidth();
destHeight = Math.min(destHeight, sceneHeight - sourceOffsetY);
destWidth = Math.min(destWidth, sceneWidth - destOffsetX);
final int y0 = yFlipped ? (sceneHeight - 1) - sourceOffsetY : sourceOffsetY;
final Variable variable = bandMap.get(destBand);
final int rank = variable.getRank();
final int[] origin = new int[rank];
final int[] shape = new int[rank];
for (int i = 0; i < rank; i++) {
shape[i] = 1;
origin[i] = 0;
}
shape[0] = 1;
shape[1] = destWidth;
origin[1] = sourceOffsetX;
if (isComplex && destBand.getUnit().equals(Unit.IMAGINARY)) {
origin[2] = 1;
}
pm.beginTask("Reading data from band " + destBand.getName(), destHeight);
try {
for (int y = 0; y < destHeight; y++) {
origin[0] = yFlipped ? y0 - y : y0 + y;
final Array array;
synchronized (netcdfFile) {
array = variable.read(origin, shape);
}
System.arraycopy(array.getStorage(), 0, destBuffer.getElems(), y * destWidth, destWidth);
pm.worked(1);
}
} catch (InvalidRangeException e) {
final IOException ioException = new IOException(e.getMessage());
ioException.initCause(e);
throw ioException;
} finally {
pm.done();
}
}
use of com.beanit.openiec61850.Array in project gridfour by gwlucastrig.
the class PackageData method process.
void process(PrintStream ps, TestOptions options, String[] args) throws IOException {
// The packaging of data in a Gvrs file can be thought of in terms of
// the steps shown below.
//
// 0. Obtain descriptive parameters about source data. In this
// case, the application is packing data from a NetCDF source
// and most of the descriptive parameters follow the pattern
// established in the earlier ExtractData.java demonstration
//
// 1. Define the fixed metadata about the file (it's dimensions,
// data type, tile organization, etc.) using a GvrsFileSpecification
// object.
//
// 2. Open a new GvrsFile object using the settings created in step 1.
// Adjust any run-time parameters (such as the tile-cache size)
// according to the needs of the application.
//
// 3. Extract the data from its source and store in the Gvrs file.
//
ps.format("%nGvrs Packaging Application for NetCDF-format Global DEM files%n");
Locale locale = Locale.getDefault();
Date date = new Date();
SimpleDateFormat sdFormat = new SimpleDateFormat("dd MMM yyyy HH:mm z", locale);
ps.format("Date of Execution: %s%n", sdFormat.format(date));
String inputPath = options.getInputFile().getPath();
File outputFile = options.getOutputFile();
if (outputFile == null) {
ps.format("Missing specification for output file%n");
ps.format("Packaging application terminated%n");
return;
}
ps.format("Input file: %s%n", inputPath);
ps.format("Output file: %s%n", outputFile.getPath());
boolean[] matched = new boolean[args.length];
boolean useLsop = options.scanBooleanOption(args, "-lsop", matched, false);
// Open the NetCDF file -----------------------------------
ps.println("Opening NetCDF input file");
NetcdfFile ncfile = NetcdfFile.open(inputPath);
// Identify which Variable instances carry information about the
// geographic (latitude/longitude) coordinate system and also which
// carry information for elevation and bathymetry.
// the Variable that carries row-latitude information
Variable lat;
// the Variable that carries column-longitude information
Variable lon;
// the variable that carries elevation and bathymetry
Variable z;
lat = ncfile.findVariable("lat");
lon = ncfile.findVariable("lon");
z = ncfile.findVariable("elevation");
int[] tileSize;
// Use the input file name to format a product label
File inputFile = new File(inputPath);
String productLabel = inputFile.getName();
if (productLabel.toLowerCase().endsWith(".nc")) {
productLabel = productLabel.substring(0, productLabel.length() - 3);
}
if (lat == null) {
// ETOPO1 specification
tileSize = options.getTileSize(90, 120);
lat = ncfile.findVariable("y");
lon = ncfile.findVariable("x");
z = ncfile.findVariable("z");
} else {
tileSize = options.getTileSize(90, 120);
}
if (lat == null || lon == null || z == null) {
throw new IllegalArgumentException("Input does not contain valid lat,lon, and elevation Variables");
}
// using the variables from above, extract coordinate system
// information for the product and print it to the output.
ExtractionCoordinates extractionCoords = new ExtractionCoordinates(lat, lon);
extractionCoords.summarizeCoordinates(ps);
// Get the dimensions of the raster (grid) elevation/bathymetry data.
// should be 1.
int rank = z.getRank();
int[] shape = z.getShape();
int nRows = shape[0];
int nCols = shape[1];
ps.format("Rows: %8d%n", nRows);
ps.format("Columns: %8d%n", nCols);
int nRowsInTile = tileSize[0];
int nColsInTile = tileSize[1];
// Initialize the specification used to initialize the Gvrs file -------
GvrsFileSpecification spec = new GvrsFileSpecification(nRows, nCols, nRowsInTile, nColsInTile);
spec.setLabel(productLabel);
// Initialize the data type. If a zScale option was specified,
// use integer-coded floats. Otherwise, pick the data type
// based on whether the NetCDF file gives integral or floating point
// data.
boolean isZScaleSpecified = options.isZScaleSpecified();
float zScale = (float) options.getZScale();
float zOffset = (float) options.getZOffset();
// data type from NetCDF file
DataType sourceDataType = z.getDataType();
GvrsElementSpecification elementSpec = null;
GvrsElementType gvrsDataType;
if (isZScaleSpecified) {
// the options define our data type
int encodedLimitDepth = (int) ((LIMIT_DEPTH - zOffset) * zScale);
int encodedLimitElev = (int) ((LIMIT_ELEVATION - zOffset) * zScale);
elementSpec = new GvrsElementSpecificationIntCodedFloat("z", zScale, zOffset, encodedLimitDepth, encodedLimitElev, Integer.MIN_VALUE, true);
spec.addElementSpecification(elementSpec);
gvrsDataType = GvrsElementType.INT_CODED_FLOAT;
} else if (sourceDataType.isIntegral()) {
elementSpec = new GvrsElementSpecificationShort("z", LIMIT_DEPTH, LIMIT_ELEVATION, FILL_VALUE);
spec.addElementSpecification(elementSpec);
gvrsDataType = GvrsElementType.SHORT;
} else {
elementSpec = new GvrsElementSpecificationFloat("z", LIMIT_DEPTH, LIMIT_ELEVATION, Float.NaN);
spec.addElementSpecification(elementSpec);
gvrsDataType = GvrsElementType.FLOAT;
}
elementSpec.setDescription("Elevation (positive values) or depth (negative), in meters");
elementSpec.setUnitOfMeasure("m");
// Example with special character
elementSpec.setLabel("die H\u00f6henlage");
ps.println("Source date type " + sourceDataType + ", stored as " + gvrsDataType);
ps.println("");
// Determine whether data compression is used -------------------
boolean compressionEnabled = options.isCompressionEnabled();
spec.setDataCompressionEnabled(compressionEnabled);
boolean checksumsEnalbed = options.isChecksumComputationEnabled();
spec.setChecksumEnabled(checksumsEnalbed);
boolean bigAddressSpaceEnabled = options.isBigAddressSpaceEnabled();
spec.setExtendedFileSizeEnabled(bigAddressSpaceEnabled);
double[] geoCoords = extractionCoords.getGeographicCoordinateBounds();
spec.setGeographicCoordinates(geoCoords[0], geoCoords[1], geoCoords[2], geoCoords[3]);
// Check to verify that the geographic coordinates and grid coordinate
// are correctly implemented. This test is not truly part of the packaging
// process (since it should always work), but is included here as a
// diagnostic.
extractionCoords.checkSpecificationTransform(ps, spec);
// is enabled and the data type is integral.
if (useLsop) {
LsCodecUtility.addLsopToSpecification(spec, false);
}
// Create the output file and store the content from the input file.
if (outputFile.exists()) {
ps.println("Output file exists. Removing old file");
boolean status = outputFile.delete();
if (!status) {
ps.println("Removal attempt failed");
return;
}
}
ps.println("Begin processing");
double zMin = Double.POSITIVE_INFINITY;
double zMax = Double.NEGATIVE_INFINITY;
double zSum = 0;
long nSum = 0;
try (GvrsFile gvrs = new GvrsFile(outputFile, spec)) {
gvrs.writeMetadata(GvrsMnc.Copyright, "This data is in the public domain and may be used free of charge");
gvrs.writeMetadata(GvrsMnc.TermsOfUse, "This data should not be used for navigation");
GvrsElement zElement = gvrs.getElement("z");
gvrs.setTileCacheSize(GvrsCacheSize.Large);
storeGeoreferencingInformation(gvrs);
// Initialize data-statistics collection ---------------------------
// we happen to know the range of values for the global DEM a-priori.
// it ranges from about -11000 to 8650. This allows us to tabulate counts
// of which values we find in the data source. We can use this information
// to estimate the entropy of the source data and make a realistic
// assessment of how many bytes would be needed to store them.
InputDataStatCollector stats = new InputDataStatCollector(-11000, 8650, zScale);
int[] readOrigin = new int[rank];
int[] readShape = new int[rank];
// -----------------------------------------------------------------
// Package the data
long time0 = System.currentTimeMillis();
for (int iRow = 0; iRow < nRows; iRow++) {
if (iRow % 1000 == 999) {
long time1 = System.currentTimeMillis();
double deltaT = time1 - time0;
// rows per millis
double rate = (iRow + 1) / deltaT;
int nRemaining = nRows - iRow;
long remainingT = (long) (nRemaining / rate);
Date d = new Date(time1 + remainingT);
ps.format("Completed %d rows, %4.1f%% of total, est completion at %s%n", iRow + 1, 100.0 * (double) iRow / (nRows - 1.0), d);
ps.flush();
}
int row0 = iRow;
int col0 = 0;
readOrigin[0] = row0;
readOrigin[1] = col0;
readShape[0] = 1;
readShape[1] = nCols;
// happen in this application unless the input file is corrupt.
try {
Array array = z.read(readOrigin, readShape);
// and store it in the Gvrs file.
switch(gvrsDataType) {
case INTEGER:
case SHORT:
for (int iCol = 0; iCol < nCols; iCol++) {
int sample = array.getInt(iCol);
zElement.writeValueInt(iRow, iCol, sample);
stats.addSample(sample);
if (sample < zMin) {
zMin = sample;
}
if (sample > zMax) {
zMax = sample;
}
zSum += sample;
nSum++;
}
break;
case INT_CODED_FLOAT:
case FLOAT:
default:
for (int iCol = 0; iCol < nCols; iCol++) {
float sample = array.getFloat(iCol);
zElement.writeValue(iRow, iCol, sample);
stats.addSample(sample);
if (sample < zMin) {
zMin = sample;
}
if (sample > zMax) {
zMax = sample;
}
zSum += sample;
nSum++;
}
}
} catch (InvalidRangeException irex) {
throw new IOException(irex.getMessage(), irex);
}
}
gvrs.flush();
long time1 = System.currentTimeMillis();
double timeToProcess = (time1 - time0) / 1000.0;
ps.format("Finished processing file in %4.1f seconds%n", timeToProcess);
ps.format("Entropy for input data %4.1f bits/sample%n", stats.getEntropy());
long outputSize = outputFile.length();
long nCells = (long) nRows * (long) nCols;
double bitsPerSymbol = 8.0 * (double) outputSize / (double) nCells;
ps.format("Storage used (including overhead) %6.4f bits/sample%n", bitsPerSymbol);
ps.format("%nSummary of file content and packaging actions------------%n");
gvrs.summarize(ps, true);
ps.format("Range of z values:%n");
ps.format(" Min z: %8.3f%n", zMin);
ps.format(" Max z: %8.3f%n", zMax);
ps.format(" Avg z: %8.3f%n", zSum / (nSum > 0 ? nSum : 1));
}
// to those of the source data.
if (options.isVerificationEnabled()) {
int[] readOrigin = new int[rank];
int[] readShape = new int[rank];
ps.println("\nTesting product for data consistency with source");
ps.println("Opening gvrs file for reading");
long time0 = System.currentTimeMillis();
try (GvrsFile gvrs = new GvrsFile(outputFile, "r")) {
long time1 = System.currentTimeMillis();
ps.println("Opening complete in " + (time1 - time0) + " ms");
GvrsFileSpecification testSpec = gvrs.getSpecification();
String testLabel = testSpec.getLabel();
ps.println("Label: " + testLabel);
GvrsMetadata m = gvrs.readMetadata("Copyright", 0);
if (m != null) {
ps.println("Copyright: " + m.getString());
}
GvrsElement zElement = gvrs.getElement("z");
ps.println("Element: " + zElement.getName() + ", " + zElement.getDescription());
gvrs.setTileCacheSize(GvrsCacheSize.Large);
for (int iRow = 0; iRow < nRows; iRow++) {
if (iRow % 10000 == 9999) {
time1 = System.currentTimeMillis();
double deltaT = time1 - time0;
// rows per millis
double rate = (iRow + 1) / deltaT;
int nRemaining = nRows - iRow;
long remainingT = (long) (nRemaining / rate);
Date d = new Date(time1 + remainingT);
ps.format("Completed %d rows, %4.1f%% of total, est completion at %s%n", iRow + 1, 100.0 * (double) iRow / (nRows - 1.0), d);
ps.flush();
}
int row0 = iRow;
int col0 = 0;
readOrigin[0] = row0;
readOrigin[1] = col0;
readShape[0] = 1;
readShape[1] = nCols;
try {
Array array = z.read(readOrigin, readShape);
switch(gvrsDataType) {
case INTEGER:
for (int iCol = 0; iCol < nCols; iCol++) {
int sample = array.getInt(iCol);
int test = zElement.readValueInt(iRow, iCol);
if (sample != test) {
ps.println("Failure at " + iRow + ", " + iCol);
test = zElement.readValueInt(iRow, iCol);
System.exit(-1);
}
}
break;
case INT_CODED_FLOAT:
for (int iCol = 0; iCol < nCols; iCol++) {
double sample = array.getDouble(iCol);
int iSample = (int) ((sample - zOffset) * zScale + 0.5);
float fSample = iSample / zScale + zOffset;
float test = zElement.readValue(iRow, iCol);
double delta = Math.abs(fSample - test);
if (delta > 1.01 / zScale) {
ps.println("Failure at " + iRow + ", " + iCol);
System.exit(-1);
}
}
break;
case FLOAT:
default:
for (int iCol = 0; iCol < nCols; iCol++) {
float sample = array.getFloat(iCol);
float test = zElement.readValue(iRow, iCol);
if (sample != test) {
ps.println("Failure at " + iRow + ", " + iCol);
test = zElement.readValueInt(iRow, iCol);
System.exit(-1);
}
}
}
} catch (InvalidRangeException irex) {
throw new IOException(irex.getMessage(), irex);
}
}
time1 = System.currentTimeMillis();
ps.println("Exhaustive cross check complete in " + (time1 - time0) + " ms");
gvrs.summarize(ps, false);
}
}
ncfile.close();
}
use of com.beanit.openiec61850.Array in project SOS by 52North.
the class AbstractNetcdfEncoder method encodeSensorDataToNetcdf.
protected void encodeSensorDataToNetcdf(File netcdfFile, AbstractSensorDataset sensorDataset, Version version) throws EncodingException, IOException {
String sensor = sensorDataset.getSensorIdentifier();
sensorDataset.getSensor().setSensorDescription(getProcedureDescription(sensor, sensorDataset.getProcedureDescription()));
NetcdfFileWriter writer = getNetcdfFileWriter(netcdfFile, version);
// set fill on, doesn't seem to have any effect though
writer.setFill(true);
Map<Variable, Array> variableArrayMap = Maps.newHashMap();
int numTimes = sensorDataset.getTimes().size();
// FIXME shouldn't assume that all subsensors are heights (or rename
// subsensors if they are)
int numHeightDepth = sensorDataset.getSubSensors().size() > 0 ? sensorDataset.getSubSensors().size() : 1;
// global attributes
addGlobaleAttributes(writer, sensorDataset);
// add appropriate dims for feature type
List<Dimension> timeDims = Lists.newArrayList();
List<Dimension> latLngDims = Lists.newArrayList();
List<Dimension> latDims = Lists.newArrayList();
List<Dimension> lngDims = Lists.newArrayList();
List<Dimension> zDims = Lists.newArrayList();
List<Dimension> obsPropDims = Lists.newArrayList();
// REQUIRED for NetCDF-3
// add Dimensions
// dTime = writer.addDimension(null, CFStandardNames.TIME.getName(),
// numTimes);
Dimension dTime = writer.addUnlimitedDimension(getVariableDimensionCaseName(CFStandardNames.TIME.getName()));
dTime.setLength(numTimes);
timeDims.add(dTime);
if (!(sensorDataset instanceof StaticLocationDataset)) {
zDims.add(dTime);
}
obsPropDims.add(dTime);
// set up lat/lng dimensions
// FIXME do not set time dimension for static location dataset
// if ((sensorDataset instanceof StaticLocationDataset)) {
// latLngDims.add(dTime);
// }
// set up z dimensions
String dimensionName;
if (useHeight()) {
dimensionName = getVariableDimensionCaseName(CFStandardNames.HEIGHT.getName());
} else {
dimensionName = getVariableDimensionCaseName(CFStandardNames.DEPTH.getName());
}
// profile/timeSeriesProfile
Dimension dZ = writer.addDimension(null, dimensionName, numHeightDepth);
if (!(sensorDataset instanceof StaticLocationDataset)) {
// trajectory
zDims.add(dTime);
}
zDims.add(dZ);
obsPropDims.add(dZ);
variableArrayMap.putAll(getNetcdfProfileSpecificVariablesArrays(writer, sensorDataset));
// time var
Variable vTime = addVariableTime(writer, timeDims);
if (numTimes > 1 && writer.getVersion().isNetdf4format()) {
vTime.addAttribute(new Attribute(CDM.CHUNK_SIZES, getNetcdfHelper().getChunkSizeTime()));
}
ArrayDouble timeArray = new ArrayDouble(getDimShapes(timeDims));
initArrayWithFillValue(timeArray, getNetcdfHelper().getFillValue());
Array latArray = getLatitudeArray(sensorDataset);
Array lonArray = getLongitudeArray(sensorDataset);
// add lat/long dimensions
long latSize = 1;
if (latArray != null) {
latSize = latArray.getSize();
}
Dimension dLat = writer.addDimension(null, getVariableDimensionCaseName(CFStandardNames.LATITUDE.getName()), (int) latSize);
latDims.add(dLat);
long lonSize = 1;
if (lonArray != null) {
lonSize = lonArray.getSize();
}
Dimension dLon = writer.addDimension(null, getVariableDimensionCaseName(CFStandardNames.LONGITUDE.getName()), (int) lonSize);
lngDims.add(dLon);
// lat/lon var
Variable vLat;
Variable vLon;
if (latLngDims.size() > 0) {
vLat = addVariableLatitude(writer, latLngDims);
vLon = addVariableLongitude(writer, latLngDims);
} else {
vLat = addVariableLatitude(writer, latDims);
vLon = addVariableLongitude(writer, lngDims);
}
// height/depth var
Variable vHeightDepth;
if (useHeight()) {
vHeightDepth = addVariableHeight(writer, zDims);
} else {
vHeightDepth = addVariableDepth(writer, zDims);
}
String coordinateString = Joiner.on(' ').join(Lists.newArrayList(vTime.getFullName(), vLat.getFullName(), vLon.getFullName(), vHeightDepth.getFullName()));
Map<OmObservableProperty, Variable> obsPropVarMap = Maps.newHashMap();
Map<Variable, Array> varDataArrayMap = Maps.newHashMap();
for (OmObservableProperty obsProp : sensorDataset.getPhenomena()) {
// obs prop var
Variable vObsProp = addVariableForObservedProperty(writer, obsProp, obsPropDims, coordinateString);
obsPropVarMap.put(obsProp, vObsProp);
// init obs prop data array
Array obsPropArray = getArray(obsPropDims);
initArrayWithFillValue(obsPropArray, getNetcdfHelper().getFillValue());
varDataArrayMap.put(vObsProp, obsPropArray);
}
// populate heights array for profile
Array heightDephtArray = null;
if (zDims.size() == 1 && hasDimension(zDims, dZ) && !sensorDataset.getSubSensors().isEmpty()) {
heightDephtArray = initHeightDephtArray(zDims);
Double consistentBinHeight = populateHeightDepthArray(sensorDataset, heightDephtArray, vHeightDepth);
String verticalResolution = null;
if (consistentBinHeight == null) {
verticalResolution = ACDDConstants.POINT;
} else if (consistentBinHeight != getNetcdfHelper().getFillValue()) {
verticalResolution = consistentBinHeight + " " + CFConstants.UNITS_METERS + " " + ACDDConstants.BINNED;
}
if (verticalResolution != null) {
writer.addGroupAttribute(null, new Attribute(ACDDConstants.GEOSPATIAL_VERTICAL_RESOLUTION, verticalResolution));
}
}
// iterate through sensorDataset, set values
int timeCounter = 0;
for (Time time : sensorDataset.getTimes()) {
// set time value
Index timeIndex = timeArray.getIndex();
int timeIndexCounter = 0;
// }
if (hasDimension(timeDims, dTime)) {
timeIndex.setDim(timeIndexCounter++, timeCounter++);
}
timeArray.set(timeIndex, getTimeValue(time));
// data values
Map<OmObservableProperty, Map<SubSensor, Value<?>>> obsPropMap = sensorDataset.getDataValues().get(time);
for (Entry<OmObservableProperty, Map<SubSensor, Value<?>>> entry : obsPropMap.entrySet()) {
OmObservableProperty obsProp = entry.getKey();
Variable variable = obsPropVarMap.get(obsProp);
Array array = varDataArrayMap.get(variable);
for (Entry<SubSensor, Value<?>> subSensorEntry : obsPropMap.get(obsProp).entrySet()) {
SubSensor subSensor = subSensorEntry.getKey();
Value<?> value = subSensorEntry.getValue();
Object valObj = value.getValue();
if (!(valObj instanceof Number)) {
throw new EncodingException("Value class %s not supported", valObj.getClass().getCanonicalName());
}
Index index = array.getIndex();
int obsPropDimCounter = 0;
for (Dimension dim : obsPropDims) {
// } else if (dim.equals(dTime)){
if (dim.equals(dTime)) {
// time index dim
index.setDim(obsPropDimCounter++, timeCounter - 1);
} else if (dim.equals(dZ) && dim.getLength() > 1) {
// height/depth index dim
index.setDim(obsPropDimCounter++, sensorDataset.getSubSensors().indexOf(subSensor));
}
}
if (array instanceof ArrayFloat) {
((ArrayFloat) array).set(index, ((Number) valObj).floatValue());
} else {
((ArrayDouble) array).set(index, ((Number) valObj).doubleValue());
}
}
}
}
// create the empty netCDF with dims/vars/attributes defined
variableArrayMap.put(vTime, timeArray);
if (latArray != null) {
variableArrayMap.put(vLat, latArray);
}
if (lonArray != null) {
variableArrayMap.put(vLon, lonArray);
}
if (heightDephtArray != null) {
variableArrayMap.put(vHeightDepth, heightDephtArray);
}
variableArrayMap.putAll(varDataArrayMap);
// create the empty netCDF with dims/vars/attributes defined
writeToFile(writer, variableArrayMap);
writer.close();
}
use of com.beanit.openiec61850.Array in project SOS by 52North.
the class AbstractNetcdfEncoder method getLongitudeArray.
protected Array getLongitudeArray(AbstractSensorDataset sensorDataset) throws EncodingException {
if (sensorDataset instanceof StaticLocationDataset) {
StaticLocationDataset locationDataset = (StaticLocationDataset) sensorDataset;
if (locationDataset.getLat() != null) {
Array array = getArray();
initArrayWithFillValue(array, getNetcdfHelper().getFillValue());
Index index = array.getIndex();
index.set(0);
array.setDouble(index, locationDataset.getLat());
}
} else {
// TODO support varying lat
throw new EncodingException("Varying longs are not yet supported.");
}
return null;
}
use of com.beanit.openiec61850.Array in project ncWMS by Unidata.
the class DataChunk method readDataChunk.
/**
* Creates a DataChunk by reading from the given variable
*/
public static DataChunk readDataChunk(VariableDS var, RangesList ranges) throws IOException {
final Array arr;
Variable origVar = var.getOriginalVariable();
if (origVar == null) {
// We read from the enhanced variable
arr = readVariable(var, ranges);
} else {
// We read from the original variable to avoid enhancing data
// values that we won't use
arr = readVariable(origVar, ranges);
}
// Decide whether or not we need to enhance any data values we
// read from this array
final boolean needsEnhance;
Set<Enhance> enhanceMode = var.getEnhanceMode();
if (enhanceMode.contains(Enhance.ScaleMissingDefer)) {
// Values read from the array are not enhanced, but need to be
needsEnhance = true;
} else if (enhanceMode.contains(Enhance.ScaleMissing)) {
// We only need to enhance if we read data from the plain Variable
needsEnhance = origVar != null;
} else {
// Values read from the array will not be enhanced
needsEnhance = false;
}
return new DataChunk(var, arr, needsEnhance);
}
Aggregations