use of javax.media.jai.iterator.WritableRandomIter in project hortonmachine by TheHortonMachine.
the class OmsShalstab method qcrit.
/**
* Calculates the trasmissivity in every pixel of the map.
*/
private void qcrit(RenderedImage slope, RenderedImage ab, RandomIter trasmissivityRI, RandomIter frictionRI, RandomIter cohesionRI, RandomIter hsIter, RandomIter effectiveRI, RandomIter densityRI) {
HashMap<String, Double> regionMap = CoverageUtilities.getRegionParamsFromGridCoverage(inSlope);
int cols = regionMap.get(CoverageUtilities.COLS).intValue();
int rows = regionMap.get(CoverageUtilities.ROWS).intValue();
RandomIter slopeRI = RandomIterFactory.create(slope, null);
RandomIter abRI = RandomIterFactory.create(ab, null);
WritableRaster qcritWR = CoverageUtilities.createWritableRaster(cols, rows, null, null, null);
WritableRandomIter qcritIter = RandomIterFactory.createWritable(qcritWR, null);
WritableRaster classiWR = CoverageUtilities.createWritableRaster(cols, rows, null, null, null);
WritableRandomIter classiIter = RandomIterFactory.createWritable(classiWR, null);
pm.beginTask("Creating qcrit map...", rows);
for (int j = 0; j < rows; j++) {
pm.worked(1);
for (int i = 0; i < cols; i++) {
double slopeValue = slopeRI.getSampleDouble(i, j, 0);
double tanPhiValue = frictionRI.getSampleDouble(i, j, 0);
double cohValue = cohesionRI.getSampleDouble(i, j, 0);
double rhoValue = densityRI.getSampleDouble(i, j, 0);
double hsValue = hsIter.getSampleDouble(i, j, 0);
if (!isNovalue(slopeValue) && !isNovalue(tanPhiValue) && !isNovalue(cohValue) && !isNovalue(rhoValue) && !isNovalue(hsValue)) {
if (hsValue <= EPS || slopeValue > pRock) {
qcritIter.setSample(i, j, 0, ROCK);
} else {
double checkUnstable = tanPhiValue + cohValue / (9810.0 * rhoValue * hsValue) * (1 + pow(slopeValue, 2));
if (slopeValue >= checkUnstable) {
/*
* uncond unstable
*/
qcritIter.setSample(i, j, 0, 5);
} else {
double checkStable = tanPhiValue * (1 - 1 / rhoValue) + cohValue / (9810 * rhoValue * hsValue) * (1 + pow(slopeValue, 2));
if (slopeValue < checkStable) {
/*
* uncond. stable
*/
qcritIter.setSample(i, j, 0, 0);
} else {
double qCrit = trasmissivityRI.getSampleDouble(i, j, 0) * sin(atan(slopeValue)) / abRI.getSampleDouble(i, j, 0) * rhoValue * (1 - slopeValue / tanPhiValue + cohValue / (9810 * rhoValue * hsValue * tanPhiValue) * (1 + pow(slopeValue, 2))) * 1000;
qcritIter.setSample(i, j, 0, qCrit);
/*
* see the Qcrit (critical effective
* precipitation) that leads the slope to
* instability (see article of Montgomery et Al,
* Hydrological Processes, 12, 943-955, 1998)
*/
double value = qcritIter.getSampleDouble(i, j, 0);
if (value > 0 && value < 50)
qcritIter.setSample(i, j, 0, 1);
if (value >= 50 && value < 100)
qcritIter.setSample(i, j, 0, 2);
if (value >= 100 && value < 200)
qcritIter.setSample(i, j, 0, 3);
if (value >= 200)
qcritIter.setSample(i, j, 0, 4);
}
}
}
} else {
qcritIter.setSample(i, j, 0, doubleNovalue);
}
}
}
pm.done();
/*
* build the class matrix 1=inc inst 2=inc stab 3=stab 4=instab
* rock=presence of rock
*/
pm.beginTask("Creating stability map...", rows);
double Tq = 0;
for (int j = 0; j < rows; j++) {
pm.worked(1);
for (int i = 0; i < cols; i++) {
Tq = trasmissivityRI.getSampleDouble(i, j, 0) / (effectiveRI.getSampleDouble(i, j, 0) / 1000.0);
double slopeValue = slopeRI.getSampleDouble(i, j, 0);
double abValue = abRI.getSampleDouble(i, j, 0);
double tangPhiValue = frictionRI.getSampleDouble(i, j, 0);
double cohValue = cohesionRI.getSampleDouble(i, j, 0);
double rhoValue = densityRI.getSampleDouble(i, j, 0);
double hsValue = hsIter.getSampleDouble(i, j, 0);
if (!isNovalue(slopeValue) && !isNovalue(abValue) && !isNovalue(tangPhiValue) && !isNovalue(cohValue) && !isNovalue(rhoValue) && !isNovalue(hsValue)) {
if (hsValue <= EPS || slopeValue > pRock) {
classiIter.setSample(i, j, 0, ROCK);
} else {
double checkUncondUnstable = tangPhiValue + cohValue / (9810 * rhoValue * hsValue) * (1 + pow(slopeValue, 2));
double checkUncondStable = tangPhiValue * (1 - 1 / rhoValue) + cohValue / (9810 * rhoValue * hsValue) * (1 + pow(slopeValue, 2));
double checkStable = Tq * sin(atan(slopeValue)) * rhoValue * (1 - slopeValue / tangPhiValue + cohValue / (9810 * rhoValue * hsValue * tangPhiValue) * (1 + pow(slopeValue, 2)));
if (slopeValue >= checkUncondUnstable) {
classiIter.setSample(i, j, 0, 1);
} else if (slopeValue < checkUncondStable) {
classiIter.setSample(i, j, 0, 2);
} else if (abValue < checkStable && classiIter.getSampleDouble(i, j, 0) != 1 && classiIter.getSampleDouble(i, j, 0) != 2) {
classiIter.setSample(i, j, 0, 3);
} else {
classiIter.setSample(i, j, 0, 4);
}
}
} else {
classiIter.setSample(i, j, 0, doubleNovalue);
}
}
}
pm.done();
outQcrit = CoverageUtilities.buildCoverage("qcrit", qcritWR, regionMap, inSlope.getCoordinateReferenceSystem());
outShalstab = CoverageUtilities.buildCoverage("classi", classiWR, regionMap, inSlope.getCoordinateReferenceSystem());
}
use of javax.media.jai.iterator.WritableRandomIter in project hortonmachine by TheHortonMachine.
the class CoverageUtilities method coverageValuesMapper.
/**
* Mappes the values of a map (valuesMap) into the valid pixels of the second map (maskMap).
*
* @param valuesMap the map holding the values that are needed in the resulting map.
* @param maskMap the map to use as mask for the values.
* @return the map containing the values of the valuesMap, but only in the places in which the maskMap is valid.
*/
public static GridCoverage2D coverageValuesMapper(GridCoverage2D valuesMap, GridCoverage2D maskMap) {
RegionMap valuesRegionMap = getRegionParamsFromGridCoverage(valuesMap);
int cs = valuesRegionMap.getCols();
int rs = valuesRegionMap.getRows();
RegionMap maskRegionMap = getRegionParamsFromGridCoverage(maskMap);
int tmpcs = maskRegionMap.getCols();
int tmprs = maskRegionMap.getRows();
if (cs != tmpcs || rs != tmprs) {
throw new IllegalArgumentException("The raster maps have to be of equal size to be mapped.");
}
RandomIter valuesIter = RandomIterFactory.create(valuesMap.getRenderedImage(), null);
RandomIter maskIter = RandomIterFactory.create(maskMap.getRenderedImage(), null);
WritableRaster writableRaster = createWritableRaster(cs, rs, null, null, HMConstants.doubleNovalue);
WritableRandomIter outIter = RandomIterFactory.createWritable(writableRaster, null);
for (int r = 0; r < rs; r++) {
for (int c = 0; c < cs; c++) {
if (!isNovalue(maskIter.getSampleDouble(c, r, 0))) {
// if not nv, put the value from the valueMap in the new map
double value = valuesIter.getSampleDouble(c, r, 0);
if (!isNovalue(value))
outIter.setSample(c, r, 0, value);
}
}
}
GridCoverage2D outCoverage = buildCoverage(// $NON-NLS-1$
"mapped", // $NON-NLS-1$
writableRaster, // $NON-NLS-1$
maskRegionMap, valuesMap.getCoordinateReferenceSystem());
return outCoverage;
}
use of javax.media.jai.iterator.WritableRandomIter in project hortonmachine by TheHortonMachine.
the class CoverageUtilities method invert.
public static GridCoverage2D invert(GridCoverage2D raster, double max) {
RegionMap regionMap = CoverageUtilities.getRegionParamsFromGridCoverage(raster);
int nCols = regionMap.getCols();
int nRows = regionMap.getRows();
RandomIter rasterIter = CoverageUtilities.getRandomIterator(raster);
WritableRaster outWR = CoverageUtilities.createWritableRaster(nCols, nRows, null, null, doubleNovalue);
WritableRandomIter outIter = RandomIterFactory.createWritable(outWR, null);
for (int r = 0; r < nRows; r++) {
for (int c = 0; c < nCols; c++) {
double value = rasterIter.getSampleDouble(c, r, 0);
if (!isNovalue(value)) {
outIter.setSample(c, r, 0, max - value);
}
}
}
GridCoverage2D invertedRaster = CoverageUtilities.buildCoverage("inverted", outWR, regionMap, raster.getCoordinateReferenceSystem());
return invertedRaster;
}
use of javax.media.jai.iterator.WritableRandomIter in project hortonmachine by TheHortonMachine.
the class OmsBasinShape method process.
@Execute
public void process() throws Exception {
if (!concatOr(outBasins == null, doReset)) {
return;
}
checkNull(inBasins);
HashMap<String, Double> regionMap = CoverageUtilities.getRegionParamsFromGridCoverage(inBasins);
nCols = regionMap.get(CoverageUtilities.COLS).intValue();
nRows = regionMap.get(CoverageUtilities.ROWS).intValue();
// double xRes = regionMap.get(CoverageUtilities.XRES);
// double yRes = regionMap.get(CoverageUtilities.YRES);
RenderedImage basinsRI = inBasins.getRenderedImage();
RenderedImage pitRI = null;
if (inElev != null)
pitRI = inElev.getRenderedImage();
int[] nstream = new int[1];
// nstream[0] = 1508;
WritableRaster basinsWR = CoverageUtilities.renderedImage2WritableRaster(basinsRI, true);
RandomIter basinsRandomIter = RandomIterFactory.create(basinsWR, null);
for (int j = 0; j < nRows; j++) {
for (int i = 0; i < nCols; i++) {
if (!isNovalue(basinsRandomIter.getSampleDouble(i, j, 0)) && basinsRandomIter.getSampleDouble(i, j, 0) > (double) nstream[0]) {
nstream[0] = (int) basinsRandomIter.getSampleDouble(i, j, 0);
}
}
}
WritableRaster subbasinsWR = CoverageUtilities.createWritableRaster(basinsRI.getWidth(), basinsRI.getHeight(), null, basinsRI.getSampleModel(), doubleNovalue);
// create the feature type
SimpleFeatureTypeBuilder b = new SimpleFeatureTypeBuilder();
// set the name
// $NON-NLS-1$
b.setName("basinshape");
// add a geometry property
// $NON-NLS-1$
String defaultGeometryName = "the_geom";
b.setCRS(inBasins.getCoordinateReferenceSystem());
b.add(defaultGeometryName, MultiPolygon.class);
// add some properties
// $NON-NLS-1$
b.add("area", Float.class);
// $NON-NLS-1$
b.add("perimeter", Float.class);
// $NON-NLS-1$
b.add(NetworkChannel.NETNUMNAME, Integer.class);
// $NON-NLS-1$
b.add("maxelev", Float.class);
// $NON-NLS-1$
b.add("minelev", Float.class);
// $NON-NLS-1$
b.add("avgelev", Float.class);
// $NON-NLS-1$
b.add(NetworkChannel.BARICENTERELEVNAME, Float.class);
// build the type
SimpleFeatureType type = b.buildFeatureType();
SimpleFeatureBuilder builder = new SimpleFeatureBuilder(type);
outBasins = new DefaultFeatureCollection();
// for each stream correct problems with basins and create geometries
pm.beginTask("Extracting basins...", nstream[0]);
for (int num = 1; num <= nstream[0]; num++) {
Object[] values = new Object[8];
int nordRow = -1;
int southRow = 0;
int eastCol = -1;
int westCol = nCols;
int numPixel = 0;
double minZ = Double.MAX_VALUE;
double maxZ = Double.MIN_VALUE;
double averageZ = 0.0;
if (pitRI != null)
pitRandomIter = RandomIterFactory.create(pitRI, null);
WritableRandomIter subbasinIter = RandomIterFactory.createWritable(subbasinsWR, null);
for (int j = 0; j < nRows; j++) {
for (int i = 0; i < nCols; i++) {
double basinId = basinsRandomIter.getSampleDouble(i, j, 0);
if (isNovalue(basinId)) {
continue;
}
int basinNum = (int) basinId;
if (basinNum == num) {
if (nordRow == -1) {
nordRow = i;
}
if (i > nordRow) {
southRow = i;
}
if (westCol > j) {
westCol = j;
}
if (eastCol < j) {
eastCol = j;
}
subbasinIter.setSample(i, j, 0, basinNum);
if (pitRI != null) {
double elevation = pitRandomIter.getSampleDouble(i, j, 0);
if (!isNovalue(elevation)) {
minZ = elevation < minZ ? elevation : minZ;
maxZ = elevation > maxZ ? elevation : maxZ;
averageZ = averageZ + elevation;
} else {
minZ = -1;
maxZ = -1;
averageZ = 0;
}
}
numPixel++;
}
}
}
if (numPixel != 0) {
// min, max and average
values[3] = num;
values[4] = maxZ;
values[5] = minZ;
values[6] = averageZ / numPixel;
numPixel = 0;
for (int i = nordRow; i < southRow + 1; i++) {
for (int j = westCol; j < eastCol + 1; j++) {
if (isNovalue(subbasinIter.getSampleDouble(i, j, 0))) {
for (int k = 1; k <= 8; k++) {
// index.setFlow(k);
// index.getParameters()[
int indexI = i + ModelsSupporter.DIR[k][1];
// 0];
// index.getParameters()[
int indexJ = j + ModelsSupporter.DIR[k][0];
// 1];
if (!isNovalue(subbasinIter.getSampleDouble(indexI, indexJ, 0))) {
numPixel++;
}
k++;
}
if (numPixel == 4) {
subbasinIter.setSample(i, j, 0, num);
}
}
numPixel = 0;
}
}
// extract the feature polygon of that basin number
OmsVectorizer vectorizer = new OmsVectorizer();
try {
vectorizer.inRaster = inBasins;
vectorizer.pm = pm;
vectorizer.doReset = true;
vectorizer.pValue = (double) num;
vectorizer.process();
} catch (Exception e) {
pm.errorMessage(e.getLocalizedMessage());
continue;
}
SimpleFeatureCollection outGeodata = vectorizer.outVector;
FeatureIterator<SimpleFeature> outGeodataIterator = outGeodata.features();
List<Polygon> polygons = new ArrayList<Polygon>();
while (outGeodataIterator.hasNext()) {
SimpleFeature feature = outGeodataIterator.next();
polygons.add((Polygon) feature.getDefaultGeometry());
}
outGeodataIterator.close();
MultiPolygon geometry = GeometryUtilities.gf().createMultiPolygon((Polygon[]) polygons.toArray(new Polygon[polygons.size()]));
values[0] = geometry;
values[1] = geometry.getArea();
values[2] = geometry.getLength();
Point centroid = geometry.getCentroid();
if (centroid == null || centroid.isEmpty()) {
pm.errorMessage("Unable to extract basin: " + num);
continue;
}
Coordinate centroidCoords = centroid.getCoordinate();
GridGeometry2D gridGeometry = inBasins.getGridGeometry();
GridCoordinates2D worldToGrid = gridGeometry.worldToGrid(new DirectPosition2D(centroidCoords.x, centroidCoords.y));
int[] rowColPoint = new int[] { worldToGrid.y, worldToGrid.x };
double centroidElevation = -1;
;
if (pitRI != null) {
double elev = pitRandomIter.getSampleDouble(rowColPoint[1], rowColPoint[0], 0);
if (!isNovalue(elev)) {
centroidElevation = elev;
}
}
values[7] = centroidElevation;
subbasinIter.done();
subbasinsWR = CoverageUtilities.createWritableRaster(nCols, nRows, null, null, doubleNovalue);
// add the values
builder.addAll(values);
// build the feature with provided ID
SimpleFeature feature = builder.buildFeature(type.getTypeName() + "." + num);
((DefaultFeatureCollection) outBasins).add(feature);
}
pm.worked(1);
}
pm.done();
basinsRandomIter.done();
basinsWR = null;
}
use of javax.media.jai.iterator.WritableRandomIter in project hortonmachine by TheHortonMachine.
the class OmsRescaledDistance method process.
@Execute
public void process() throws Exception {
if (!concatOr(outRescaled == null, doReset)) {
return;
}
checkNull(inFlow, inNet);
RegionMap regionMap = CoverageUtilities.getRegionParamsFromGridCoverage(inFlow);
int cols = regionMap.getCols();
int rows = regionMap.getRows();
xRes = regionMap.getXres();
yRes = regionMap.getYres();
RenderedImage flowRI = inFlow.getRenderedImage();
RandomIter flowIter = RandomIterFactory.create(flowRI, null);
int novalue = HMConstants.getIntNovalue(inFlow);
RenderedImage netRI = inNet.getRenderedImage();
RandomIter netIter = RandomIterFactory.create(netRI, null);
if (inElev != null) {
RenderedImage elevRI = inElev.getRenderedImage();
elevIter = RandomIterFactory.create(elevRI, null);
}
WritableRaster rescaledWR = CoverageUtilities.createWritableRaster(cols, rows, Float.class, null, floatNovalue);
WritableRandomIter rescaledIter = RandomIterFactory.createWritable(rescaledWR, null);
try {
// $NON-NLS-1$
pm.beginTask("Find outlets...", rows * cols);
ConcurrentLinkedQueue<FlowNode> exitsList = new ConcurrentLinkedQueue<>();
processGrid(cols, rows, (c, r) -> {
if (pm.isCanceled())
return;
int netValue = netIter.getSample(c, r, 0);
if (isNovalue(netValue)) {
// we make sure that we pick only outlets that are on the net
return;
}
FlowNode flowNode = new FlowNode(flowIter, cols, rows, c, r, novalue);
if (flowNode.isHeadingOutside()) {
exitsList.add(flowNode);
}
pm.worked(1);
});
pm.done();
if (exitsList.size() == 0) {
throw new ModelsIllegalargumentException("No exits found in the map of flowdirections.", this);
}
pm.beginTask("Calculate rescaled distance...", exitsList.size());
exitsList.parallelStream().forEach(exitNode -> {
if (pm.isCanceled())
return;
calculateRescaledDistance(exitNode, (float) xRes, rescaledIter, elevIter, netIter);
pm.worked(1);
});
pm.done();
} finally {
rescaledIter.done();
netIter.done();
if (elevIter != null)
elevIter.done();
}
outRescaled = CoverageUtilities.buildCoverage("OmsRescaledDistance", rescaledWR, regionMap, inFlow.getCoordinateReferenceSystem());
}
Aggregations