use of org.gridfour.interpolation.InterpolationResult in project gridfour by gwlucastrig.
the class ExtractData method process.
void process(PrintStream ps, String product, String inputPath) throws IOException, InvalidRangeException {
// Open the NetCDF file -----------------------------------
ps.println("Reading data from " + inputPath);
NetcdfFile ncfile = NetcdfFile.open(inputPath);
// Inspect the content of the file ----------------------------
// Start with the high-level metadata elements that describe the
// entire file
ps.println("NetCDF File Type: " + ncfile.getFileTypeDescription());
ps.println("");
ps.println("Global attributes attached to file---------------------------");
List<Attribute> attributes = ncfile.getGlobalAttributes();
for (Attribute a : attributes) {
ps.println(a.toString());
}
// The content of NetCDF files is accessed through the use of the
// NetCDF class named Variable. Get all Variables defined in the
// current file and print a summary of their metadata to the text output.
// The Java NetCDF team implemented good "toString()" methods
// for the variable class. So printing their metadata is quite easy.
ps.println("");
ps.println("Variables found in file--------------------------------------");
List<Variable> variables = ncfile.getVariables();
for (Variable v : variables) {
ps.println("\n" + v.toString());
}
// Identify which Variable instances carry information about the
// geographic (latitude/longitude) coordinate system and also which
// carry information for elevation and bathymetry.
// In NetCDF, Variable instances area associated
// with arbitrary "name" attributes assigned to the objects
// when the data product is created. We can pull these variables
// out of the NetCDF file using their names. However,
// ETOPO1 and GEBCO_2019 use different Variable names to identify
// their content.
// the Variable that carries row-latitude information
Variable lat;
// the Variable that carries column-longitude information
Variable lon;
// the variable that carries elevation and bathymetry
Variable z;
// will be set to either "ETOPO1" or "GEBCO"
String label;
float[][] palette;
int reportingCount;
if (product.startsWith("ETOP")) {
label = "ETOPO1";
lat = ncfile.findVariable("y");
lon = ncfile.findVariable("x");
z = ncfile.findVariable("z");
palette = ExamplePalettes.paletteETOPO1;
reportingCount = 1000;
} else {
// the product is GEBCO
label = "GEBCO";
lat = ncfile.findVariable("lat");
lon = ncfile.findVariable("lon");
z = ncfile.findVariable("elevation");
palette = ExamplePalettes.paletteGEBCO;
reportingCount = 10000;
}
// using the variables from above, extract coordinate system
// information for the product and print it to the output.
// we will use this data below for computing the volume of the
// world's oceans.
ExtractionCoordinates coords = new ExtractionCoordinates(lat, lon);
coords.summarizeCoordinates(ps);
// Get the dimensions of the raster (grid) elevation/bathymetry data.
//
// NetCDF uses an concept named "rank" to define the rank of a
// Variable. A variable of rank 1 is essentially a vector (or one
// dimensional array). A variable of rank 2 has rows and columns,
// and is essentially a matrix. Higher rank variables are not uncommon.
//
// In ETOPO1 and GEBCO_2019, the elevation/bathymetry data is given
// as a raster (grid), so the rank will be two (for rows and columns).
// NetCDF data is always given in row-major order.
//
// NetCDF Variables also use the concept of "shape". In this case,
// the "shape" element tells you the number of rows and columns
// in the elevation variable.
// The getShape() method returns an array of integers dimensioned to
// the rank. Since the "z" variable is a raster, the return from
// getShape() will be an array of two values where shape[0] is
// the number of rows in the product and shape[1] is the number of
// columns.
// ETOPO1: 10800 rows and 21600 columns.
// GEBCO 2019: 43200 rows and 86400 columns
// In both these products, the rows are arranged from south-to-north,
// starting near the South Pole (-90 degrees) and ending near
// the North Pole (90 degrees).
// ETOPO1 has a uniform point spacing a 1-minute of arc.
// GEBCO has a uniform point spacing of 15-seconds of arc.
// Thus there are 4 times as many rows and 4 times as many columns
// in GEBCO versus ETOPO1.
int rank = z.getRank();
int[] shape = z.getShape();
int nRows = shape[0];
int nCols = shape[1];
ps.format("Rows: %8d%n", nRows);
ps.format("Columns: %8d%n", nCols);
// The output for this application is an image that is
// 720 pixels wide and 360 pixels high. Since the resulution of
// the two products is much higher than that, we need to compute a
// pixel scale value for down-sampling the source data.
// As we read the data, we will combine blocks of elevation/bathymetry
// values into a average value.
int imageHeight = 360;
int imageWidth = 720;
int pixelScale = nCols / imageWidth;
double[] sampleSum = new double[imageHeight * imageWidth];
ps.format("Down scaling data %d to 1 for image (%d values per pixel)%n", pixelScale, pixelScale * pixelScale);
// we also wish to collect the minimum and maximum values of the data.
double zMin = Double.POSITIVE_INFINITY;
double zMax = Double.NEGATIVE_INFINITY;
// naturally, the source data products contain far too many data values
// for us to read into memory all at once. We could read them one-at-a-time,
// but that would entail a lot of overhead and would slow processing
// considerably. So we read the data one-row-at-a-time.
// That pattern is not always to best for some products, but it works
// quite well for both ETOPO1 and GEBCO 2019.
// The readOrigin variable allows the application to specify where
// it wants to read the data from. The readShape variable tells
// NetCDF how many rows and columns to read.
int[] readOrigin = new int[rank];
int[] readShape = new int[rank];
// Finally, we are going to use the input data to estimate both the
// surface area and volume of the world's oceans. This calculation
// is not authoritative, and is performed here only to illustrate
// a potential use of the data. We perform the calculation by
// obtaining the surface area for each "cell" in the input raster
// (surface area per cell will vary by latitude). Then, if the sample
// is less than zero, we treat it as an ocean depth and add the
// computed area and volume to a running sum.
double areaSum = 0;
double volumeSum = 0;
double depthSum = 0;
long nDepth = 0;
long time0 = System.nanoTime();
for (int iRow = 0; iRow < nRows; iRow++) {
int imageRow = imageHeight - 1 - iRow / pixelScale;
double areaOfCellsInRow = coords.getAreaOfEachCellInRow(iRow);
if (iRow % reportingCount == 0) {
System.out.println("Processing row " + iRow);
}
int row0 = iRow;
int col0 = 0;
readOrigin[0] = row0;
readOrigin[1] = col0;
readShape[0] = 1;
readShape[1] = nCols;
Array array = z.read(readOrigin, readShape);
for (int iCol = 0; iCol < nCols; iCol++) {
int imageCol = iCol / pixelScale;
int index = imageRow * imageWidth + imageCol;
double sample = array.getDouble(iCol);
if (sample <= -32767) {
// neither ETOPO1 or GEBCO contain "no-data" points. However,
// there are some similar products that do. Treat these
// as not-a-number
sample = Float.NaN;
}
if (sample < zMin) {
zMin = sample;
}
if (sample > zMax) {
zMax = sample;
}
sampleSum[index] += sample;
if (sample < 0) {
// it's water
areaSum += areaOfCellsInRow;
volumeSum -= areaOfCellsInRow * sample / 1000.0;
depthSum -= sample;
nDepth++;
}
}
}
ncfile.close();
long time1 = System.nanoTime();
double deltaT = (time1 - time0) / 1.0e+9;
ps.format("Time to read input file %7.3f second%n", deltaT);
ps.format("Min Value: %7.3f meters%n", zMin);
ps.format("Max Value: %7.3f meters%n", zMax);
ps.format("Surface area of oceans %20.1f km^2%n", areaSum);
ps.format("Volume of oceans %20.1f km^3%n", volumeSum);
ps.format("Mean depth of oceans %20.1f m%n", depthSum / nDepth);
ps.flush();
int[] argb = new int[imageHeight * imageWidth];
float[] zPixel = new float[sampleSum.length];
double nCellsPerPixel = pixelScale * pixelScale;
for (int i = 0; i < sampleSum.length; i++) {
zPixel[i] = (float) (sampleSum[i] / nCellsPerPixel);
argb[i] = getRgb(palette, zPixel[i]);
}
String outputPath = "Test" + label + ".png";
File outputImage = new File(outputPath);
BufferedImage bImage = new BufferedImage(imageWidth, imageHeight, BufferedImage.TYPE_INT_ARGB);
bImage.setRGB(0, 0, imageWidth, imageHeight, argb, 0, imageWidth);
Graphics graphics = bImage.getGraphics();
graphics.setColor(Color.darkGray);
graphics.drawRect(0, 0, imageWidth - 1, imageHeight - 1);
ImageIO.write(bImage, "PNG", outputImage);
// Shaded Relief demo ------------------------------------------------
// Create a shaded-relief demo by using a simple illumination model
// and applying Gridfour's B-Spline interpolator to compute the
// surface normal. The x and y scales that are needed by the interpolator
// are derived using the meters-per-pixel value which is computed from
// the radius of the earth and the number of pixels in the image.
// Because the distance across one degree of longitude decreases
// as the magnitude of the latitude increases, an adjustment is
// made to the xScale factor. Finally, a steeping factor is added to
// make the shading a bit more pronounced in the image.
//
// Specify the parameters for the illumination source (the "sun")
double ambient = 0.2;
double steepen = 50.0;
double sunAzimuth = Math.toRadians(145);
double sunElevation = Math.toRadians(60);
// create a unit vector pointing at illumination source
double cosA = Math.cos(sunAzimuth);
double sinA = Math.sin(sunAzimuth);
double cosE = Math.cos(sunElevation);
double sinE = Math.sin(sunElevation);
double xSun = cosA * cosE;
double ySun = sinA * cosE;
double zSun = sinE;
double yScale = Math.PI * earthRadiusM / imageHeight;
InterpolatorBSpline bSpline = new InterpolatorBSpline();
InterpolationResult result = new InterpolationResult();
for (int iRow = 0; iRow < imageHeight; iRow++) {
double yRow = iRow + 0.5;
double rowLatitude = 90 - 180.0 * yRow / imageHeight;
double xScale = yScale * Math.cos(Math.toRadians(rowLatitude));
for (int iCol = 0; iCol < imageWidth; iCol++) {
double xCol = iCol + 0.5;
bSpline.interpolate(yRow, xCol, imageHeight, imageWidth, zPixel, yScale, xScale, InterpolationTarget.FirstDerivatives, result);
// double z = result.z; not used, included for documentation
double nx = -result.zx * steepen;
double ny = result.zy * steepen;
double s = Math.sqrt(nx * nx + ny * ny + 1);
nx /= s;
ny /= s;
double nz = 1 / s;
double dot = nx * xSun + ny * ySun + nz * zSun;
double shade = ambient;
if (dot > 0) {
shade = dot * (1 - ambient) + ambient;
}
int index = iRow * imageWidth + iCol;
argb[index] = getRgb(palette, zPixel[index], shade);
}
}
outputPath = "ShadedRelief_" + label + ".png";
outputImage = new File(outputPath);
bImage = new BufferedImage(imageWidth, imageHeight, BufferedImage.TYPE_INT_ARGB);
bImage.setRGB(0, 0, imageWidth, imageHeight, argb, 0, imageWidth);
graphics = bImage.getGraphics();
graphics.setColor(Color.darkGray);
graphics.drawRect(0, 0, imageWidth - 1, imageHeight - 1);
ImageIO.write(bImage, "PNG", outputImage);
}
use of org.gridfour.interpolation.InterpolationResult in project gridfour by gwlucastrig.
the class GvrsInterpolatorBSpline method zNormal.
/**
* Interpolates a value at the indicated position and also computes the unit
* surface normal vector. Intended for rendering purposes.
* <p>
* The result is stored in an array of 4 elements giving, in order, the z
* coordinate, and the x, y, and z components of the normal vector.
*
* @param x the x coordinate for interpolation
* @param y the y coordinate for interpolation
* @return a valid array, if successful populated with floating point
* values;
* otherwise, populated with NaN.
* @throws IOException in the event of an IO error
*/
public InterpolationResult zNormal(double x, double y) throws IOException {
GridPoint g;
double dx = du;
double dy = dv;
if (geoCoordinates) {
g = gvrs.mapGeographicToGridPoint(y, x);
double s = Math.cos(Math.toRadians(y));
dx = s * du;
if (dx < 1) {
dx = 1;
}
} else {
g = gvrs.mapModelToGridPoint(x, y);
}
double row = g.getRow();
double col = g.getColumn();
float[] z = loadSamples(row, col);
if (z == null) {
InterpolationResult result = new InterpolationResult();
result.nullify();
return result;
}
return bSpline.interpolate(1.0 + v, 1.0 + u, 4, 4, z, dy, dx, InterpolationTarget.FirstDerivatives, null);
}
use of org.gridfour.interpolation.InterpolationResult in project gridfour by gwlucastrig.
the class DemoCOG method process.
/**
* Process the specified elevation GeoTIFF/
*
* @param args a set of command-line options for processing
* @param ps a valid instance for printing status and metadata elements.
* @throws IOException in the event of an unrecoverable I/O error
* @throws ImageReadException in the event of an internal format error in
* reading a GeoTIFF image.
*/
void process(String[] args, PrintStream ps) throws IOException, ImageReadException {
TestOptions options = new TestOptions();
options.argumentScan(args);
File target = options.getInputFile();
File outputFile;
String name = target.getName();
int iExt = name.lastIndexOf('.');
String rootName = name.substring(0, iExt);
String outputName = rootName + "_FullRez.jpg";
File parent = target.getParentFile();
outputFile = new File(parent, outputName);
ps.format("Input: %s%n", target.getPath());
ps.format("Output: %s%n", outputFile.getPath());
boolean[] matched = new boolean[args.length];
boolean ocean = options.scanBooleanOption(args, "-ocean", matched, false);
int reductionFactor = options.scanIntOption(args, "-reduction", matched, 12);
boolean scaleBar = options.scanBooleanOption(args, "-scaleBar", matched, true);
int waterARGB = 0xff85b0cd;
Color waterColor = new Color(waterARGB);
ByteSourceFile byteSource = new ByteSourceFile(target);
// Establish a TiffReader. This is just a simple constructor that
// does not actually access the file. So the application cannot
// obtain the byteOrder, or other details, until the contents has
// been read. Then read the directories associated with the
// file by passing in the byte source and options.
TiffReader tiffReader = new TiffReader(true);
// Read the directories in the TIFF file. Directories are the
// main data element of a TIFF file. They usually include an image
// element, but sometimes just carry metadata. This example
// reads all the directories in the file. Typically reading
// the directories is not a time-consuming operation.
TiffContents contents = tiffReader.readDirectories(byteSource, // indicates that application should read image data, if present
true, FormatCompliance.getDefault());
// Read the first Image File Directory (IFD) in the file. A practical
// implementation could use any of the directories in the file.
// This demo uses the first one just for simplicity.
TiffDirectory directory = contents.directories.get(0);
// raster data.
if (!directory.hasTiffFloatingPointRasterData()) {
ps.println("Specified directory does not contain floating-point data");
System.exit(-1);
}
// Extract and print the mandatory GeoKeyDirectoryTag. If this tag is
// not present, the image is not a GeoTIFF. The "true" flag on
// the access routine indicates that the tag is mandatory and
// will cause the API to throw a ImageReadException if it is not there.
ps.println("");
ps.println("Extracting the GeoKeyDiretoryTag");
short[] geoKeyDirectory = directory.getFieldValue(GeoTiffTagConstants.EXIF_TAG_GEO_KEY_DIRECTORY_TAG, true);
// all GeoKey elements are unsigned shorts (2 bytes), some of which exceed
// the value 32767 (the maximum value of a signed short).
// since Java does not support an unsigned short type, we need to
// mask in the low-order 2 bytes and obtain a 4-byte integer equivalent.
int[] geoKey = new int[geoKeyDirectory.length];
for (int i = 0; i < geoKeyDirectory.length; i++) {
geoKey[i] = geoKeyDirectory[i] & 0xffff;
}
// print the table for reference.
ps.println(" key ref len value/pos");
int k = 0;
int n = geoKeyDirectory.length / 4;
for (int i = 0; i < n; i++) {
int ref = geoKey[k + 1];
String label = "";
if (ref == GeoTiffTagConstants.EXIF_TAG_GEO_ASCII_PARAMS_TAG.tag) {
label = "ASCII";
} else if (ref == GeoTiffTagConstants.EXIF_TAG_GEO_DOUBLE_PARAMS_TAG.tag) {
label = "Double";
}
for (int j = 0; j < 4; j++) {
ps.format("%8d", geoKey[k++]);
}
if (label.isEmpty()) {
ps.format("%n");
} else {
ps.format(" %s%n", label);
}
}
// find the raster geometry type specification
// which is indicated by a GeoKey of 1025
// and the GeogAngularUnitsKey, which is 2054
String rasterInterpretation = "Unspecified";
String unitOfMeasure = "Unspecified";
for (int i = 0; i < n; i += 4) {
if (geoKey[i] == 1025) {
int rasterInterpretationCode = geoKey[i + 3];
if (rasterInterpretationCode == 1) {
rasterInterpretation = "PixelIsArea";
} else {
rasterInterpretation = "PixelIsPoint";
}
} else if (geoKey[i] == 2054) {
int unitOfMeasureCode = geoKey[i + 3];
if (unitOfMeasureCode == 9102) {
unitOfMeasure = "Angular Degrees";
} else {
unitOfMeasure = "Unsupported code " + unitOfMeasureCode;
}
}
}
ps.println("Elements from GeoKeyDirectoryTag");
ps.println(" Raster Interpretation(1025): " + rasterInterpretation);
ps.println(" Unit of Measure(2054): " + unitOfMeasure);
TiffField modelTiepoint = directory.findField(GeoTiffTagConstants.EXIF_TAG_MODEL_TIEPOINT_TAG);
double[] tiePoints = modelTiepoint.getDoubleArrayValue();
ps.println("");
ps.println("ModelTiepointTag");
for (int i = 0; i < tiePoints.length; i++) {
ps.format(" %2d. %19.9f%n", i, tiePoints[i]);
}
// Note: The y coordinate of the model pixel scale is backwards.
// Cloud Optimized GeoTIFF images are stored
// from upper-left corner downward (following the convention
// of virtually all graphics standards), so the rows in the
// raster run from north to south. Thus, one might expect that
// the delta-Y between rows is a negative number. But by the
// GeoTIFF standard, the verical spacing is given as a postive
// number.
TiffField pixelScale = directory.findField(GeoTiffTagConstants.EXIF_TAG_MODEL_PIXEL_SCALE_TAG);
double[] pixScale = pixelScale.getDoubleArrayValue();
ps.println("ModelPixelScaleTag");
for (int i = 0; i < pixScale.length; i++) {
ps.format(" %2d. %15.10e%n", i, pixScale[i]);
}
TiffField imageWidth = directory.findField(TiffTagConstants.TIFF_TAG_IMAGE_WIDTH);
TiffField imageLength = directory.findField(TiffTagConstants.TIFF_TAG_IMAGE_LENGTH);
int w = imageWidth.getIntValue();
// by "length", they mean height
int h = imageLength.getIntValue();
AffineTransform[] a = extractTransforms(directory);
AffineTransform pix2Geo = a[0];
AffineTransform geo2Pix = a[1];
double[] c = new double[12];
// adjust corner coordinates for 6 row/column overlap
c[0] = 6;
c[1] = 6;
c[2] = w - 6;
c[3] = h - 6;
pix2Geo.transform(c, 0, c, 4, 2);
// Northwest corner
double lat0 = c[5];
double lon0 = c[4];
double lat1 = c[7];
double lon1 = c[6];
ps.println("Geographic bounds (excluding overlap)");
ps.format(" Northwest: %12.6f %12.6f%n", lat0, lon0);
ps.format(" Southeast: %12.6f %12.6f%n", lat1, lon1);
// now get the true bounds
// adjust corner coordinates for 6 row/column overlap
c[0] = 0;
c[1] = 0;
c[2] = w;
c[3] = h;
pix2Geo.transform(c, 0, c, 4, 2);
// Northwest corner
lat0 = c[5];
lon0 = c[4];
lat1 = c[7];
lon1 = c[6];
Rectangle2D geoBounds = new Rectangle2D.Double(lon0, lat1, lon1 - lon0, lat0 - lat1);
// The Natural Earth Land Cover data set is divided into two files
// one for the west half of the conterminous US and one for the east.
// They two files overlap slightly and the division is at 96 W.
double midLon = (lon0 + lon1) / 2.0;
String lcFileName;
if (midLon < -96) {
lcFileName = "W_CONUS_100m_NE_LC.tif";
} else {
lcFileName = "E_CONUS_100m_NE_LC.tif";
}
LandCoverTints landCoverTints = new LandCoverTints(new File(".\\NaturalEarth\\CONUS_100m_NE_LC\\" + lcFileName), lon0, lon1, lat0, lat1);
landCoverTints.extendNearShoreColors(100);
ps.println("Reading data from file");
long time0 = System.nanoTime();
HashMap<String, Object> params = new HashMap<>();
TiffRasterData rasterData = directory.getFloatingPointRasterData(params);
long time1 = System.nanoTime();
ps.println("Data read in " + ((time1 - time0) / 1.0e+6) + " ms");
// Compute the scale in meters using the Earth's mean radius.
// The scales give the Earth-surface distances between rows and
// columns of raster cells. They will be used in the interpolation
// logic below to get an accurate estimate of the slope (and
// thus, the surface normal) at each pixel location in the output
// image.
// The xScale is adjusted for the convergence of the meridians
// at the center latitude of the raster data set.
double cenLat = (lat1 + lat0) / 2.0;
double cosCenLat = Math.cos(Math.toRadians(cenLat));
double yScale = pixScale[1] * (Math.PI / 180) * EARTH_MEAN_RADIUS;
double xScale = pixScale[0] * (Math.PI / 180) * EARTH_MEAN_RADIUS * cosCenLat;
// Compute the adjusted width for the output image
int adjustedWidth = (int) (w * cosCenLat + 0.5);
int adjustedHeight = h;
// compute a transform that will convert geographic coordinates
// to the pixel coordinates in the adjusted (narrow) image size
// This transform will be used for rendering the optional shapefiles.
AffineTransform geoToAdjusted;
geoToAdjusted = AffineTransform.getScaleInstance(cosCenLat, 1);
geoToAdjusted.concatenate(geo2Pix);
// Specify the parameters for the illumination source (the "sun")
double ambient = 0.2;
double steepen = 5.0;
double sunAzimuth = Math.toRadians(145);
double sunElevation = Math.toRadians(60);
// create a unit vector pointing at illumination source
double cosA = Math.cos(sunAzimuth);
double sinA = Math.sin(sunAzimuth);
double cosE = Math.cos(sunElevation);
double sinE = Math.sin(sunElevation);
double xSun = cosA * cosE;
double ySun = sinA * cosE;
double zSun = sinE;
// GeoTIFF goes from north to south, so I had to negate the
// usual -result.zy component in order to get the b-spline
// harmonized with the pixel coordinates.
InterpolatorBSpline bSpline = new InterpolatorBSpline();
float[] f = rasterData.getData();
int[] argb = new int[adjustedHeight * adjustedWidth];
InterpolationResult result = new InterpolationResult();
int index = 0;
// loop on the rows and columns of the adjusted-size image.
// for each row and column, compute xCol, yCol which are the
// corresponding coordinates in the original elevation raster.
// To obtain an elevation (z) value for the adjusted-size image,
// we need to perform an interpolation into the elevations from the
// source data. As a bonus, the interpolation routine also
// provides derivatives (slopes) in the direction of the x and y
// axis.
ps.println("Begin rendering");
time0 = System.nanoTime();
for (int iRow = 0; iRow < adjustedHeight; iRow++) {
double yRow = iRow + 0.5;
for (int iCol = 0; iCol < adjustedWidth; iCol++) {
double xCol = iCol / cosCenLat + 0.5;
bSpline.interpolate(yRow, xCol, h, w, f, yScale, xScale, InterpolationTarget.FirstDerivatives, result);
// double z = result.z; not used, included for documentation
double nx = -result.zx * steepen;
double ny = result.zy * steepen;
double s = Math.sqrt(nx * nx + ny * ny + 1);
nx /= s;
ny /= s;
double nz = 1 / s;
double dot = nx * xSun + ny * ySun + nz * zSun;
double shade = ambient;
if (dot > 0) {
shade = dot * (1 - ambient) + ambient;
}
if (ocean && result.z <= 0) {
argb[index] = waterARGB;
} else {
// Perform a lookup into the land-cover tints image.
// Convert the source image coordinates to geographic
// coordinates and call the associated land-cover tints.
c[0] = xCol;
c[1] = yRow;
pix2Geo.transform(c, 0, c, 2, 1);
argb[index] = landCoverTints.getPixelValueUsingCieLab(c[2], c[3], shade);
// the following was used for monochrome images
// argb[index] = getRgb(shade);
}
index++;
}
}
time1 = System.nanoTime();
ps.println("Shaded relief rendering completed in " + ((time1 - time0) / 1.0e+6) + " ms");
BufferedImage bImage = new BufferedImage(adjustedWidth, adjustedHeight, BufferedImage.TYPE_INT_RGB);
bImage.setRGB(0, 0, adjustedWidth, adjustedHeight, argb, 0, adjustedWidth);
Graphics2D graphics = bImage.createGraphics();
graphics.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
BasicStroke thinStroke = new BasicStroke(1.0f);
BasicStroke thickStroke = new BasicStroke(4.0f);
BasicStroke veryThickStroke = new BasicStroke(18.0f);
File shapefile = new File(".\\NationalAtlas\\water_bodies\\wtrbdyp010.shp");
drawShape(ps, shapefile, graphics, waterColor, thinStroke, geoBounds, geoToAdjusted, true);
shapefile = new File(".\\NationalAtlas\\streams\\streaml010.shp");
drawShape(ps, shapefile, graphics, waterColor, thickStroke, geoBounds, geoToAdjusted, true);
shapefile = new File(".\\NaturalEarth\\ne_10m_admin_1_states_provinces_lines.shp");
drawShape(ps, shapefile, graphics, Color.black, veryThickStroke, geoBounds, geoToAdjusted, false);
// Get the updated argb (pixel) values based on whatever changes
// may have been introduced by the shapefile rendering.
bImage.getRGB(0, 0, adjustedWidth, adjustedHeight, argb, 0, adjustedWidth);
graphics.setStroke(new BasicStroke(2.0f));
graphics.setColor(Color.darkGray);
graphics.drawRect(0, 0, adjustedWidth - 2, adjustedHeight - 2);
ps.println("Writing image to " + outputFile);
ImageIO.write(bImage, "JPEG", outputFile);
double mPerPix = reductionFactor * yScale;
BufferedImage sImage = makeReducedImage(adjustedWidth, adjustedHeight, argb, reductionFactor);
if (scaleBar) {
BufferedImage rImage = new BufferedImage(sImage.getWidth(), sImage.getHeight() + 80, BufferedImage.TYPE_INT_RGB);
graphics = rImage.createGraphics();
graphics.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
graphics.setColor(Color.white);
graphics.fillRect(0, 0, rImage.getWidth() + 1, rImage.getHeight() + 1);
graphics.drawImage(sImage, 0, 0, null);
int yOffset = rImage.getHeight() - 55;
drawScale(graphics, yOffset, rImage.getWidth(), 420, mPerPix);
labelSource(graphics, yOffset, name);
graphics.setStroke(new BasicStroke(2.0f));
graphics.setColor(Color.darkGray);
graphics.drawRect(0, 0, sImage.getWidth() - 2, sImage.getHeight() - 2);
graphics.drawRect(0, 0, rImage.getWidth() - 2, rImage.getHeight() - 2);
sImage = rImage;
} else {
graphics = sImage.createGraphics();
graphics.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
graphics.setStroke(new BasicStroke(2.0f));
graphics.setColor(Color.darkGray);
graphics.drawRect(0, 0, sImage.getWidth() - 2, sImage.getHeight() - 2);
}
outputName = rootName + "_Reduced.jpg";
parent = target.getParentFile();
outputFile = new File(parent, outputName);
ps.println("Writing image to " + outputFile);
ImageIO.write(sImage, "JPEG", outputFile);
}
use of org.gridfour.interpolation.InterpolationResult in project gridfour by gwlucastrig.
the class InterpolationBSplineTest method testInterpolationAll.
/**
* Test of interpolationAll method.
*/
@Test
public void testInterpolationAll() {
InterpolatorBSpline interp = new InterpolatorBSpline();
InterpolationResult result = new InterpolationResult();
assertTrue(interp.isInterpolationTargetSupported(InterpolationTarget.SecondDerivatives), "Implementation indicates that second derivatives are not supported");
float[] z = new float[121];
int k = 0;
for (int i = 0; i <= 10; i++) {
for (int j = 0; j <= 10; j++) {
double x = j / 10.0;
double y = i / 10.0;
z[k++] = (float) f(x, y);
}
}
// are rather a bit larger than absolutely needed.
for (double row = 0; row <= 10; row += 0.25) {
for (double col = 0; col <= 10; col += 0.25) {
double x = col / 10.0;
double y = row / 10.0;
result = interp.interpolate(row, col, 11, 11, z, 0.1, 0.1, InterpolationTarget.SecondDerivatives, result);
assertEquals(result.row, row, 0, "Row not transcribed to result");
assertEquals(result.column, col, 0, "Column not transcribed to result");
assertTrue(result.firstDerivativesSet, "First derivative flag is not set to true");
assertTrue(result.secondDerivativesSet, "First derivative flag is not set to true");
assertEquals(result.z, f(x, y), 3.0e-2, "Interpolated z out of bounds");
assertEquals(result.zx, fx(x, y), 2.0e-2, "Interpolated dz/dx out of bounds");
assertEquals(result.zy, fy(x, y), 2.0e-2, "Interpolated dz/dy out of bounds");
assertEquals(result.zxx, fxx(x, y), 1.0e-4, "Interpolated d(dz/dx)/dx out of bounds");
assertEquals(result.zyy, fyy(x, y), 1.0e-4, "Interpolated d(dz/dy)/dy out of bounds");
assertEquals(result.zxy, fxy(x, y), 1.0e-4, "Interpolated d(dz/dx)/dy out of bounds");
double[] n = result.getUnitNormal();
assertNotNull(n, "Computed normal is null");
double m = n[0] * n[0] + n[1] * n[1] + n[2] * n[2];
assertEquals(m, 1.0, 1.0e-6, "Computed normal is not a unit vector");
}
}
}
Aggregations