use of org.hortonmachine.hmachine.modules.basin.rescaleddistance.OmsRescaledDistance in project hortonmachine by TheHortonMachine.
the class TestRescaledDistance method testRescaledDistance.
public void testRescaledDistance() throws Exception {
HashMap<String, Double> envelopeParams = HMTestMaps.getEnvelopeparams();
CoordinateReferenceSystem crs = HMTestMaps.getCrs();
double[][] flowData = HMTestMaps.flowData;
GridCoverage2D flowCoverage = CoverageUtilities.buildCoverage("flow", flowData, envelopeParams, crs, true);
double[][] netData = HMTestMaps.extractNet0Data;
GridCoverage2D netCoverage = CoverageUtilities.buildCoverage("net", netData, envelopeParams, crs, true);
OmsRescaledDistance rescaledDistance = new OmsRescaledDistance();
rescaledDistance.inFlow = flowCoverage;
rescaledDistance.inNet = netCoverage;
rescaledDistance.pRatio = 0.3f;
rescaledDistance.pm = pm;
rescaledDistance.process();
GridCoverage2D rescaledDistanceCoverage = rescaledDistance.outRescaled;
checkMatrixEqual(rescaledDistanceCoverage.getRenderedImage(), HMTestMaps.rescaledDistanceData, 0.1);
}
use of org.hortonmachine.hmachine.modules.basin.rescaleddistance.OmsRescaledDistance in project hortonmachine by TheHortonMachine.
the class GeoframeInputsBuilder method process.
@Execute
public void process() throws Exception {
checkNull(inPitfiller, inDrain, inTca, inNet, inSkyview, inBasins, outFolder);
GridCoverage2D subBasins = getRaster(inBasins);
CoordinateReferenceSystem crs = subBasins.getCoordinateReferenceSystem();
List<Polygon> cells = CoverageUtilities.gridcoverageToCellPolygons(subBasins, null, true, pm);
GridCoverage2D pit = getRaster(inPitfiller);
GridCoverage2D sky = getRaster(inSkyview);
GridCoverage2D drain = getRaster(inDrain);
GridCoverage2D net = getRaster(inNet);
GridCoverage2D tca = getRaster(inTca);
List<Geometry> lakesList = new ArrayList<>();
if (inLakes != null) {
SimpleFeatureCollection lakesFC = getVector(inLakes);
List<SimpleFeature> lakesFList = FeatureUtilities.featureCollectionToList(lakesFC);
// remove those that don't even cover the raster envelope
Polygon rasterBounds = FeatureUtilities.envelopeToPolygon(pit.getEnvelope2D());
for (SimpleFeature lakeF : lakesFList) {
Geometry lakeGeom = (Geometry) lakeF.getDefaultGeometry();
Envelope env = lakeGeom.getEnvelopeInternal();
if (rasterBounds.getEnvelopeInternal().intersects(env)) {
// TODO would be good to check with basin geom
lakesList.add(lakeGeom);
}
}
}
OmsNetworkAttributesBuilder netAttributesBuilder = new OmsNetworkAttributesBuilder();
netAttributesBuilder.pm = pm;
netAttributesBuilder.inDem = pit;
netAttributesBuilder.inFlow = drain;
netAttributesBuilder.inTca = tca;
netAttributesBuilder.inNet = net;
netAttributesBuilder.doHack = true;
netAttributesBuilder.onlyDoSimpleGeoms = false;
netAttributesBuilder.process();
SimpleFeatureCollection outNet = netAttributesBuilder.outNet;
// dumpVector(outNet, "/Users/hydrologis/Dropbox/hydrologis/lavori/2020_projects/15_uniTN_basins/brenta/brenta_063basins/network_attributes_full.shp");
String userDataField = useHack ? NetworkChannel.HACKNAME : NetworkChannel.PFAFNAME;
List<Geometry> netGeometries = FeatureUtilities.featureCollectionToGeometriesList(outNet, true, userDataField);
Map<Integer, List<Geometry>> collected = cells.parallelStream().filter(poly -> ((Number) poly.getUserData()).doubleValue() != HMConstants.doubleNovalue).collect(Collectors.groupingBy(poly -> ((Number) poly.getUserData()).intValue()));
SimpleFeatureBuilder basinsBuilder = getBasinsBuilder(pit.getCoordinateReferenceSystem());
SimpleFeatureBuilder basinCentroidsBuilder = getBasinCentroidsBuilder(pit.getCoordinateReferenceSystem());
SimpleFeatureBuilder singleNetBuilder = getSingleNetBuilder(pit.getCoordinateReferenceSystem());
DefaultFeatureCollection allBasinsFC = new DefaultFeatureCollection();
DefaultFeatureCollection allNetworksFC = new DefaultFeatureCollection();
StringBuilder csvText = new StringBuilder();
csvText.append("#id;x;y;elev_m;avgelev_m;area_km2;netlength;centroid_skyview\n");
// aggregate basins and check for lakes
Map<Integer, Geometry> basinId2geomMap = new HashMap<>();
pm.beginTask("Join basin cells...", collected.size());
int maxBasinNum = 0;
for (Entry<Integer, List<Geometry>> entry : collected.entrySet()) {
int basinNum = entry.getKey();
maxBasinNum = Math.max(maxBasinNum, basinNum);
List<Geometry> polygons = entry.getValue();
Geometry basin = CascadedPolygonUnion.union(polygons);
// extract largest basin
double maxArea = Double.NEGATIVE_INFINITY;
Geometry maxPolygon = basin;
int numGeometries = basin.getNumGeometries();
if (numGeometries > 1) {
for (int i = 0; i < numGeometries; i++) {
Geometry geometryN = basin.getGeometryN(i);
double area = geometryN.getArea();
if (area > maxArea) {
maxArea = area;
maxPolygon = geometryN;
}
}
}
// and can't be completely contained in a basin
for (Geometry lakeGeom : lakesList) {
if (maxPolygon.contains(lakeGeom)) {
throw new ModelsIllegalargumentException("A basin can't completely contain a lake. Check your data.", this);
}
}
basinId2geomMap.put(basinNum, maxPolygon);
pm.worked(1);
}
pm.done();
// if lakes are available, they have to contain at least one confluence
for (Geometry lakeGeom : lakesList) {
PreparedGeometry preparedLake = PreparedGeometryFactory.prepare(lakeGeom);
boolean hasPoint = false;
for (Geometry netGeom : netGeometries) {
LineString line = (LineString) netGeom;
Point startPoint = line.getStartPoint();
Point endPoint = line.getEndPoint();
if (preparedLake.contains(startPoint) || preparedLake.contains(endPoint)) {
hasPoint = true;
break;
}
}
if (!hasPoint) {
throw new ModelsIllegalargumentException("A lake has to contain at least one confluence. Check your data.", this);
}
}
// if lakes are available, we need to find the basins that intersect and cut the basins on
// them
List<Integer> lakesIdList = new ArrayList<>();
if (!lakesList.isEmpty()) {
List<Basin> allBasins = new ArrayList<>();
int nextBasinNum = maxBasinNum + 1;
Basin rootBasin = getRootBasin(basinId2geomMap, allBasins);
if (rootBasin != null) {
pm.beginTask("Handle lake-basin intersections...", lakesList.size());
for (Geometry lakeGeom : lakesList) {
Basin outBasin = findFirstIntersecting(rootBasin, lakeGeom);
if (outBasin != null) {
// found the out basin. All others intersecting will drain into this
List<Basin> intersectingBasins = allBasins.parallelStream().filter(tmpBasin -> {
boolean isNotOutBasin = tmpBasin.id != outBasin.id;
boolean intersectsLake = tmpBasin.basinGeometry.intersects(lakeGeom);
return isNotOutBasin && intersectsLake;
}).collect(Collectors.toList());
// of the intersecting find those that are completely contained
List<Basin> completelyContainedBasins = new ArrayList<>();
List<Basin> justIntersectingBasins = new ArrayList<>();
intersectingBasins.forEach(interBasin -> {
if (lakeGeom.contains(interBasin.basinGeometry)) {
completelyContainedBasins.add(interBasin);
} else {
justIntersectingBasins.add(interBasin);
}
});
intersectingBasins = justIntersectingBasins;
// create a new basin based on the lake
Basin lakeBasin = new Basin();
lakeBasin.basinGeometry = lakeGeom;
lakeBasin.id = nextBasinNum++;
lakesIdList.add(lakeBasin.id);
lakeBasin.downStreamBasin = outBasin;
lakeBasin.downStreamBasinId = outBasin.id;
lakeBasin.upStreamBasins.addAll(intersectingBasins);
basinId2geomMap.put(lakeBasin.id, lakeBasin.basinGeometry);
// the lake
for (Basin containedBasin : completelyContainedBasins) {
for (Basin tmpBasin : allBasins) {
if (tmpBasin.id != containedBasin.id) {
// them to the lake
if (tmpBasin.downStreamBasinId == containedBasin.id) {
tmpBasin.downStreamBasinId = lakeBasin.id;
tmpBasin.downStreamBasin = lakeBasin;
}
// find basins in which the completely contained drains into and
// relink with lake
Basin removeBasin = null;
for (Basin tmpUpBasin : tmpBasin.upStreamBasins) {
if (tmpUpBasin.id == containedBasin.id) {
removeBasin = tmpUpBasin;
}
}
if (removeBasin != null) {
tmpBasin.upStreamBasins.remove(removeBasin);
tmpBasin.upStreamBasins.add(lakeBasin);
}
}
}
// at this point the containedBasin should be orphan
containedBasin.downStreamBasin = null;
containedBasin.upStreamBasins = null;
basinId2geomMap.remove(containedBasin.id);
}
// handle interaction with downstream basin
outBasin.upStreamBasins.removeAll(lakeBasin.upStreamBasins);
outBasin.upStreamBasins.add(lakeBasin);
Geometry geomToCut = basinId2geomMap.get(outBasin.id);
Geometry newGeom;
try {
newGeom = geomToCut.difference(lakeGeom);
} catch (Exception e) {
File folder = new File(outFolder);
File errorFile = new File(folder, "errors." + HMConstants.GPKG + "#error_basin_" + outBasin.id);
String message = "An error occurred during intersection between basin and lake geometries. IGNORING LAKE.\nGeometries written to: " + errorFile;
pm.errorMessage(message);
geomToCut.setUserData("basin_" + outBasin.id);
lakeGeom.setUserData("lake");
SimpleFeatureCollection fc = FeatureUtilities.featureCollectionFromGeometry(crs, geomToCut, lakeGeom);
OmsVectorWriter.writeVector(errorFile.getAbsolutePath(), fc);
continue;
}
outBasin.basinGeometry = newGeom;
basinId2geomMap.put(outBasin.id, newGeom);
// handle interaction with upstream basins
intersectingBasins.forEach(iBasin -> {
iBasin.downStreamBasin = lakeBasin;
iBasin.downStreamBasinId = lakeBasin.id;
Geometry basinGeomToCut = basinId2geomMap.get(iBasin.id);
Geometry newBasinGeom = basinGeomToCut.difference(lakeGeom);
basinId2geomMap.put(iBasin.id, newBasinGeom);
});
// we also need to cut the streams on the lakes
List<Geometry> cutNetGeometries = new ArrayList<>();
for (Geometry netGeom : netGeometries) {
if (netGeom.intersects(lakeGeom)) {
Geometry difference = netGeom.difference(lakeGeom);
if (!difference.isEmpty()) {
if (difference.getNumGeometries() > 1) {
// choose longest
Geometry longest = null;
double maxLength = -1;
for (int i = 0; i < difference.getNumGeometries(); i++) {
Geometry geometryN = difference.getGeometryN(i);
double l = geometryN.getLength();
if (l > maxLength) {
maxLength = l;
longest = geometryN;
}
}
difference = longest;
}
cutNetGeometries.add(difference);
difference.setUserData(netGeom.getUserData());
}
} else {
cutNetGeometries.add(netGeom);
}
}
netGeometries = cutNetGeometries;
}
}
// finally rewrite the topology file.
File topoFile = new File(inGeoframeTopology);
String topoFilename = FileUtilities.getNameWithoutExtention(topoFile);
File newTopoFile = new File(topoFile.getParentFile(), topoFilename + "_lakes.txt");
List<String> topology = new ArrayList<>();
writeTopology(topology, rootBasin);
StringBuilder sb = new StringBuilder();
topology.forEach(record -> sb.append(record).append("\n"));
FileUtilities.writeFile(sb.toString(), newTopoFile);
} else {
pm.errorMessage("Unable to find the basin topology.");
}
}
pm.beginTask("Extract vector basins...", basinId2geomMap.size());
for (Entry<Integer, Geometry> entry : basinId2geomMap.entrySet()) {
int basinNum = entry.getKey();
pm.message("Processing basin " + basinNum + "...");
Geometry basinPolygon = entry.getValue();
// for lakes ==0
double mainNetLength = 0;
List<LineString> netPieces = new ArrayList<>();
List<Integer> checkValueList = new ArrayList<>();
boolean isLake = lakesIdList.contains(basinNum);
if (!isLake) {
// get network pieces inside basin
int minCheckValue = Integer.MAX_VALUE;
HashMap<Integer, List<LineString>> checkValueList4Lines = new HashMap<>();
for (Geometry netGeom : netGeometries) {
if (netGeom.intersects(basinPolygon)) {
LengthIndexedLine lil = new LengthIndexedLine(netGeom);
Coordinate centerCoord = lil.extractPoint(0.5);
double value = CoverageUtilities.getValue(subBasins, centerCoord);
if ((int) value == basinNum) {
Object userData = netGeom.getUserData();
int checkValue;
if (useHack) {
checkValue = Integer.parseInt(userData.toString());
} else {
String pfaf = userData.toString();
PfafstetterNumber p = new PfafstetterNumber(pfaf);
checkValue = p.getOrder();
}
minCheckValue = Math.min(minCheckValue, checkValue);
checkValueList.add(checkValue);
List<LineString> list = checkValueList4Lines.get(checkValue);
if (list == null) {
list = new ArrayList<>();
}
list.add((LineString) netGeom);
checkValueList4Lines.put(checkValue, list);
netPieces.add((LineString) netGeom);
}
}
}
if (minCheckValue != Integer.MAX_VALUE) {
List<LineString> minCheckValueLines = checkValueList4Lines.get(minCheckValue);
for (LineString minCheckValueLine : minCheckValueLines) {
mainNetLength += minCheckValueLine.getLength();
}
}
}
Envelope basinEnvelope = basinPolygon.getEnvelopeInternal();
Point basinCentroid = basinPolygon.getCentroid();
double areaM2 = basinPolygon.getArea();
double areaKm2 = areaM2 / 1000000.0;
Coordinate point = basinCentroid.getCoordinate();
double elev = CoverageUtilities.getValue(pit, point);
double skyview = CoverageUtilities.getValue(sky, point);
// Extracting raster data for each basin
ReferencedEnvelope basinRefEnvelope = new ReferencedEnvelope(basinEnvelope, crs);
GridCoverage2D clipped = CoverageUtilities.clipCoverage(subBasins, basinRefEnvelope);
WritableRaster clippedWR = CoverageUtilities.renderedImage2IntWritableRaster(clipped.getRenderedImage(), false);
// we need to consider the lakes and lake cuts, so the polygon needs to be used
PreparedGeometry preparedBasinPolygon = PreparedGeometryFactory.prepare(basinPolygon);
RegionMap regionMap = CoverageUtilities.getRegionParamsFromGridCoverage(clipped);
GridGeometry2D clippedGG = clipped.getGridGeometry();
int cols = regionMap.getCols();
int rows = regionMap.getRows();
for (int r = 0; r < rows; r++) {
for (int c = 0; c < cols; c++) {
Coordinate coord = CoverageUtilities.coordinateFromColRow(c, r, clippedGG);
if (preparedBasinPolygon.intersects(GeometryUtilities.gf().createPoint(coord))) {
clippedWR.setSample(c, r, 0, basinNum);
// TODO check
// int value = clippedWR.getSample(c, r, 0);
// if (value != basinNum) {
// clippedWR.setSample(c, r, 0, HMConstants.intNovalue);
// }
} else {
clippedWR.setSample(c, r, 0, HMConstants.intNovalue);
}
}
}
File basinFolder = makeBasinFolder(basinNum);
GridCoverage2D maskCoverage = CoverageUtilities.buildCoverage("basin" + basinNum, clippedWR, regionMap, crs);
GridCoverage2D clippedPit = CoverageUtilities.clipCoverage(pit, basinRefEnvelope);
GridCoverage2D cutPit = CoverageUtilities.coverageValuesMapper(clippedPit, maskCoverage);
File pitFile = new File(basinFolder, "dtm_" + basinNum + ".asc");
if (!pitFile.exists() || doOverWrite) {
dumpRaster(cutPit, pitFile.getAbsolutePath());
}
double[] minMaxAvgSum = OmsRasterSummary.getMinMaxAvgSum(cutPit);
double avgElev = minMaxAvgSum[2];
GridCoverage2D clippedSky = CoverageUtilities.clipCoverage(sky, basinRefEnvelope);
GridCoverage2D cutSky = CoverageUtilities.coverageValuesMapper(clippedSky, maskCoverage);
File skyFile = new File(basinFolder, "sky_" + basinNum + ".asc");
if (!skyFile.exists() || doOverWrite) {
dumpRaster(cutSky, skyFile.getAbsolutePath());
}
GridCoverage2D clippedDrain = CoverageUtilities.clipCoverage(drain, basinRefEnvelope);
GridCoverage2D cutDrain = CoverageUtilities.coverageValuesMapper(clippedDrain, maskCoverage);
File drainFile = new File(basinFolder, "drain_" + basinNum + ".asc");
if (!drainFile.exists() || doOverWrite) {
dumpRaster(cutDrain, drainFile.getAbsolutePath());
}
GridCoverage2D clippedNet = CoverageUtilities.clipCoverage(net, basinRefEnvelope);
GridCoverage2D cutNet = CoverageUtilities.coverageValuesMapper(clippedNet, maskCoverage);
File netFile = new File(basinFolder, "net_" + basinNum + ".asc");
if (!netFile.exists() || doOverWrite) {
dumpRaster(cutNet, netFile.getAbsolutePath());
}
// OmsRescaledDistance rescaledDistance1 = new OmsRescaledDistance();
// rescaledDistance1.pm = pm;
// rescaledDistance1.inElev = cutPit;
// rescaledDistance1.inFlow = cutDrain;
// rescaledDistance1.inNet = cutNet;
// rescaledDistance1.pRatio = 1;
// rescaledDistance1.process();
// File rescaled1File = new File(basinFolder, "rescaleddistance_1_" + basinNum + ".asc");
// if (!rescaled1File.exists() || doOverWrite) {
// dumpRaster(rescaledDistance1.outRescaled, rescaled1File.getAbsolutePath());
// }
//
// OmsRescaledDistance rescaledDistance = new OmsRescaledDistance();
// rescaledDistance.pm = pm;
// rescaledDistance.inElev = cutPit;
// rescaledDistance.inFlow = cutDrain;
// rescaledDistance.inNet = cutNet;
// rescaledDistance.pRatio = pRatio;
// rescaledDistance.process();
// File rescaledRatioFile = new File(basinFolder, "rescaleddistance_" + pRatio + "_" + basinNum + ".asc");
// if (!rescaledRatioFile.exists() || doOverWrite) {
// dumpRaster(rescaledDistance.outRescaled, rescaledRatioFile.getAbsolutePath());
// }
// finalize feature writing
// BASINS
Geometry dumpBasin = basinPolygon;
if (basinPolygon instanceof Polygon) {
dumpBasin = GeometryUtilities.gf().createMultiPolygon(new Polygon[] { (Polygon) basinPolygon });
}
Object[] basinValues = new Object[] { dumpBasin, basinNum, point.x, point.y, elev, avgElev, areaKm2, mainNetLength, skyview, isLake ? 1 : 0 };
basinsBuilder.addAll(basinValues);
SimpleFeature basinFeature = basinsBuilder.buildFeature(null);
allBasinsFC.add(basinFeature);
// dump single subbasin
DefaultFeatureCollection singleBasin = new DefaultFeatureCollection();
singleBasin.add(basinFeature);
File basinShpFile = new File(basinFolder, "subbasins_complete_ID_" + basinNum + ".shp");
if (!basinShpFile.exists() || doOverWrite) {
dumpVector(singleBasin, basinShpFile.getAbsolutePath());
}
Object[] centroidValues = new Object[] { basinCentroid, basinNum, point.x, point.y, elev, avgElev, areaKm2, mainNetLength, skyview };
basinCentroidsBuilder.addAll(centroidValues);
SimpleFeature basinCentroidFeature = basinCentroidsBuilder.buildFeature(null);
// dump single centroid
DefaultFeatureCollection singleCentroid = new DefaultFeatureCollection();
singleCentroid.add(basinCentroidFeature);
File centroidShpFile = new File(basinFolder, "centroid_ID_" + basinNum + ".shp");
if (!centroidShpFile.exists() || doOverWrite) {
dumpVector(singleCentroid, centroidShpFile.getAbsolutePath());
}
// CHANNELS
if (!netPieces.isEmpty()) {
DefaultFeatureCollection singleNet = new DefaultFeatureCollection();
for (int i = 0; i < netPieces.size(); i++) {
LineString netLine = netPieces.get(i);
Integer checkValue = checkValueList.get(i);
Object[] netValues = new Object[] { netLine, basinNum, netLine.getLength(), checkValue };
singleNetBuilder.addAll(netValues);
SimpleFeature singleNetFeature = singleNetBuilder.buildFeature(null);
allNetworksFC.add(singleNetFeature);
singleNet.add(singleNetFeature);
}
File netShpFile = new File(basinFolder, "network_complete_ID_" + basinNum + ".shp");
if (!netShpFile.exists() || doOverWrite) {
dumpVector(singleNet, netShpFile.getAbsolutePath());
}
}
csvText.append(basinNum).append(";");
csvText.append(point.x).append(";");
csvText.append(point.y).append(";");
csvText.append(elev).append(";");
csvText.append(avgElev).append(";");
csvText.append(areaKm2).append(";");
csvText.append(mainNetLength).append(";");
csvText.append(skyview).append("\n");
pm.worked(1);
}
pm.done();
File folder = new File(outFolder);
File basinShpFile = new File(folder, "subbasins_complete.shp");
if (!basinShpFile.exists() || doOverWrite) {
dumpVector(allBasinsFC, basinShpFile.getAbsolutePath());
}
File netShpFile = new File(folder, "network_complete.shp");
if (!netShpFile.exists() || doOverWrite) {
dumpVector(allNetworksFC, netShpFile.getAbsolutePath());
}
File csvFile = new File(folder, "subbasins.csv");
if (!csvFile.exists() || doOverWrite) {
FileUtilities.writeFile(csvText.toString(), csvFile);
}
}
use of org.hortonmachine.hmachine.modules.basin.rescaleddistance.OmsRescaledDistance in project hortonmachine by TheHortonMachine.
the class RescaledDistance method process.
@Execute
public void process() throws Exception {
OmsRescaledDistance rescaleddistance = new OmsRescaledDistance();
rescaleddistance.inFlow = getRaster(inFlow);
rescaleddistance.inNet = getRaster(inNet);
rescaleddistance.inElev = getRaster(inElev);
rescaleddistance.pRatio = pRatio;
rescaleddistance.pm = pm;
rescaleddistance.doProcess = doProcess;
rescaleddistance.doReset = doReset;
rescaleddistance.process();
dumpRaster(rescaleddistance.outRescaled, outRescaled);
}
use of org.hortonmachine.hmachine.modules.basin.rescaleddistance.OmsRescaledDistance in project hortonmachine by TheHortonMachine.
the class RunMaximumDischarge method main.
public static void main(String[] args) throws Exception {
String dtm = "/media/hydrologis/Samsung_T3/MAZONE/PITFILLE/flanginec/dtm.tiff";
String aspect = "/media/hydrologis/Samsung_T3/MAZONE/PITFILLE/flanginec/aspect.tiff";
String pit = "/media/hydrologis/Samsung_T3/MAZONE/PITFILLE/flanginec/pit.tiff";
String flow = "/media/hydrologis/Samsung_T3/MAZONE/PITFILLE/flanginec/flow.tiff";
String drain = "/media/hydrologis/Samsung_T3/MAZONE/PITFILLE/flanginec/drain.tiff";
String tca = "/media/hydrologis/Samsung_T3/MAZONE/PITFILLE/flanginec/tca.tiff";
String net = "/media/hydrologis/Samsung_T3/MAZONE/PITFILLE/flanginec/net300.tiff";
String basin = "/media/hydrologis/Samsung_T3/MAZONE/PITFILLE/flanginec/basin.tiff";
String basinShp = "/media/hydrologis/Samsung_T3/MAZONE/PITFILLE/flanginec/basin_vect.shp";
String basinOutletShp = "/media/hydrologis/Samsung_T3/MAZONE/PITFILLE/flanginec/basin_outlet.shp";
String basinPit = "/media/hydrologis/Samsung_T3/MAZONE/PITFILLE/flanginec/basin_cutout.tiff";
String slope = "/media/hydrologis/Samsung_T3/MAZONE/PITFILLE/flanginec/slope.tiff";
String topindex = "/media/hydrologis/Samsung_T3/MAZONE/PITFILLE/flanginec/topindex.tiff";
String curvaturesPlan = "/media/hydrologis/Samsung_T3/MAZONE/PITFILLE/flanginec/curvatures_plan.tiff";
String curvaturesProf = "/media/hydrologis/Samsung_T3/MAZONE/PITFILLE/flanginec/curvatures_prof.tiff";
String curvaturesTan = "/media/hydrologis/Samsung_T3/MAZONE/PITFILLE/flanginec/curvatures_tan.tiff";
String alung = "/media/hydrologis/Samsung_T3/MAZONE/PITFILLE/flanginec/alung.tiff";
String alungB = "/media/hydrologis/Samsung_T3/MAZONE/PITFILLE/flanginec/alungB.tiff";
String rescaled = "/media/hydrologis/Samsung_T3/MAZONE/PITFILLE/flanginec/rescaled.tiff";
// String dtm = "/media/hydrologis/Samsung_T3/MAZONE/DTM/dtm_toblino/dtm_toblino.tiff";
// String pit = "/media/hydrologis/Samsung_T3/MAZONE/DTM/dtm_toblino/pit.tiff";
// String flow = "/media/hydrologis/Samsung_T3/MAZONE/DTM/dtm_toblino/flow.tiff";
// String drain = "/media/hydrologis/Samsung_T3/MAZONE/DTM/dtm_toblino/drain.tiff";
// String tca = "/media/hydrologis/Samsung_T3/MAZONE/DTM/dtm_toblino/tca.tiff";
// String dtm =
// "/media/hydrologis/Samsung_T3/MAZONE/PITFILLE/DTM_calvello/dtm_all_float.tiff";
// String pit = "/media/hydrologis/Samsung_T3/MAZONE/PITFILLE/DTM_calvello/pit.tiff";
// String flow = "/media/hydrologis/Samsung_T3/MAZONE/PITFILLE/DTM_calvello/flow.tiff";
// String drain = "/media/hydrologis/Samsung_T3/MAZONE/PITFILLE/DTM_calvello/drain.tiff";
// String tca = "/media/hydrologis/Samsung_T3/MAZONE/PITFILLE/DTM_calvello/tca.tiff";
// OmsAspect aspectMod = new OmsAspect();
// aspectMod.inElev = OmsRasterReader.readRaster(dtm);
// // aspectMod.doRadiants = doRadiants;
// aspectMod.doRound = true;
// aspectMod.process();
// OmsRasterWriter.writeRaster(aspect, aspectMod.outAspect);
OmsDePitter pitfiller = new OmsDePitter();
pitfiller.inElev = OmsRasterReader.readRaster("/home/hydrologis/Dropbox/hydrologis/lavori/2017_06_mapzone/test/dtm_test2.tiff");
pitfiller.process();
OmsRasterWriter.writeRaster(pit, pitfiller.outPit);
OmsRasterWriter.writeRaster(flow, pitfiller.outFlow);
if (true) {
System.exit(0);
}
OmsDrainDir draindir = new OmsDrainDir();
draindir.inPit = OmsRasterReader.readRaster(pit);
draindir.inFlow = OmsRasterReader.readRaster(flow);
// draindir.inFlownet = OmsRasterReader.readRaster(inFlownet);
draindir.pLambda = 1f;
draindir.process();
OmsRasterWriter.writeRaster(drain, draindir.outFlow);
OmsRasterWriter.writeRaster(tca, draindir.outTca);
OmsExtractNetwork extractnetwork = new OmsExtractNetwork();
extractnetwork.inTca = OmsRasterReader.readRaster(tca);
extractnetwork.inFlow = OmsRasterReader.readRaster(drain);
// extractnetwork.inSlope = OmsRasterReader.readRaster(inSlope);
// extractnetwork.inTc3 = OmsRasterReader.readRaster(inTc3);
extractnetwork.pThres = 300;
extractnetwork.pMode = Variables.TCA;
// extractnetwork.pExp = pExp;
// extractnetwork.pm = pm;
extractnetwork.process();
OmsRasterWriter.writeRaster(net, extractnetwork.outNet);
OmsExtractBasin extractbasin = new OmsExtractBasin();
extractbasin.pNorth = 5112705.635839384;
extractbasin.pEast = 1638565.1058415675;
extractbasin.inFlow = OmsRasterReader.readRaster(drain);
// extractbasin.inNetwork = getVector(inNetwork);
// extractbasin.pSnapbuffer = pSnapbuffer;
extractbasin.doVector = true;
// extractbasin.doSmoothing = doSmoothing;
extractbasin.process();
OmsRasterWriter.writeRaster(basin, extractbasin.outBasin);
OmsVectorWriter.writeVector(basinOutletShp, extractbasin.outOutlet);
OmsVectorWriter.writeVector(basinShp, extractbasin.outVectorBasin);
System.out.println("Basin Area = " + extractbasin.outArea);
OmsCutOut cutPit = new OmsCutOut();
cutPit.inRaster = OmsRasterReader.readRaster(pit);
cutPit.inMask = OmsRasterReader.readRaster(basin);
// c.pMax = pMax;
// c.pMin = pMin;
// c.doInverse = doInverse;
cutPit.process();
OmsRasterWriter.writeRaster(basinPit, cutPit.outRaster);
OmsGradient gradient = new OmsGradient();
gradient.inElev = OmsRasterReader.readRaster(basinPit);
gradient.pMode = Variables.FINITE_DIFFERENCES;
gradient.doDegrees = true;
gradient.process();
OmsRasterWriter.writeRaster(slope, gradient.outSlope);
OmsTopIndex topindexMod = new OmsTopIndex();
topindexMod.inTca = OmsRasterReader.readRaster(tca);
topindexMod.inSlope = OmsRasterReader.readRaster(slope);
topindexMod.process();
OmsRasterWriter.writeRaster(topindex, topindexMod.outTopindex);
OmsCurvatures curv = new OmsCurvatures();
curv.inElev = OmsRasterReader.readRaster(basinPit);
curv.process();
OmsRasterWriter.writeRaster(curvaturesProf, curv.outProf);
OmsRasterWriter.writeRaster(curvaturesPlan, curv.outPlan);
OmsRasterWriter.writeRaster(curvaturesTan, curv.outTang);
OmsAb ab = new OmsAb();
ab.inTca = OmsRasterReader.readRaster(tca);
ab.inPlan = OmsRasterReader.readRaster(curvaturesPlan);
ab.process();
OmsRasterWriter.writeRaster(alung, ab.outAb);
OmsRasterWriter.writeRaster(alungB, ab.outB);
GridCoverage2D cutDrain = OmsCutOut.cut(OmsRasterReader.readRaster(drain), OmsRasterReader.readRaster(basin));
GridCoverage2D cutNet = OmsCutOut.cut(OmsRasterReader.readRaster(net), OmsRasterReader.readRaster(basin));
OmsRescaledDistance rescaleddistance = new OmsRescaledDistance();
rescaleddistance.inFlow = cutDrain;
rescaleddistance.inNet = cutNet;
rescaleddistance.inElev = OmsRasterReader.readRaster(basinPit);
rescaleddistance.pRatio = 0.3;
rescaleddistance.process();
OmsRasterWriter.writeRaster(rescaled, rescaleddistance.outRescaled);
}
use of org.hortonmachine.hmachine.modules.basin.rescaleddistance.OmsRescaledDistance in project hortonmachine by TheHortonMachine.
the class TestRescaledDistance method testRescaledDistance3D.
public void testRescaledDistance3D() throws Exception {
HashMap<String, Double> envelopeParams = HMTestMaps.getEnvelopeparams();
CoordinateReferenceSystem crs = HMTestMaps.getCrs();
double[][] flowData = HMTestMaps.flowData;
GridCoverage2D flowCoverage = CoverageUtilities.buildCoverage("flow", flowData, envelopeParams, crs, true);
double[][] netData = HMTestMaps.extractNet0Data;
GridCoverage2D netCoverage = CoverageUtilities.buildCoverage("net", netData, envelopeParams, crs, true);
double[][] elevData = HMTestMaps.mapData;
GridCoverage2D elevCoverage = CoverageUtilities.buildCoverage("elev", elevData, envelopeParams, crs, true);
OmsRescaledDistance rescaledDistance = new OmsRescaledDistance();
rescaledDistance.inFlow = flowCoverage;
rescaledDistance.inNet = netCoverage;
rescaledDistance.inElev = elevCoverage;
rescaledDistance.pRatio = 0.3f;
rescaledDistance.pm = pm;
rescaledDistance.process();
// GridCoverage2D rescaledDistanceCoverage = rescaledDistance.outRescaled;
// PrintUtilities.printCoverageData(rescaledDistanceCoverage);
// checkMatrixEqual(rescaledDistanceCoverage.getRenderedImage(),
// HMTestMaps.rescaledDistanceData, 0.1);
}
Aggregations