use of org.hortonmachine.gears.libs.exceptions.ModelsRuntimeException in project hortonmachine by TheHortonMachine.
the class OmsRasterVectorIntersector method process.
@Execute
public void process() throws Exception {
checkNull(inRaster, inVector);
SimpleFeatureType schema = inVector.getSchema();
if (!EGeometryType.isPolygon(schema.getGeometryDescriptor())) {
throw new ModelsRuntimeException("The module works only with polygon vectors.", this);
}
RegionMap regionMap = CoverageUtilities.getRegionParamsFromGridCoverage(inRaster);
OmsScanLineRasterizer raster = new OmsScanLineRasterizer();
raster.inVector = inVector;
raster.pCols = regionMap.getCols();
raster.pRows = regionMap.getRows();
raster.pNorth = regionMap.getNorth();
raster.pSouth = regionMap.getSouth();
raster.pEast = regionMap.getEast();
raster.pWest = regionMap.getWest();
raster.pValue = 1.0;
raster.process();
GridCoverage2D rasterizedVector = raster.outRaster;
OmsCutOut cutout = new OmsCutOut();
cutout.pm = pm;
cutout.inRaster = inRaster;
cutout.inMask = rasterizedVector;
cutout.doInverse = doInverse;
cutout.process();
outRaster = cutout.outRaster;
}
use of org.hortonmachine.gears.libs.exceptions.ModelsRuntimeException in project hortonmachine by TheHortonMachine.
the class OmsScanLineRasterizer method process.
@Execute
public void process() throws Exception {
checkNull(inVector);
if (pValue == null && fCat == null) {
throw new ModelsIllegalargumentException("One of pValue or the fCat have to be defined.", this, pm);
}
if (pNorth == null || pSouth == null || pWest == null || pEast == null || pRows == null || pCols == null) {
if (inRaster == null) {
throw new ModelsIllegalargumentException("It is necessary to supply all the information about the processing region. Did you set the boundaries and rows/cols?", this, pm);
}
}
if (inRaster != null) {
RegionMap regionMap = CoverageUtilities.getRegionParamsFromGridCoverage(inRaster);
pNorth = regionMap.getNorth();
pSouth = regionMap.getSouth();
pWest = regionMap.getWest();
pEast = regionMap.getEast();
pRows = regionMap.getRows();
pCols = regionMap.getCols();
inIter = CoverageUtilities.getRandomIterator(inRaster);
}
SimpleFeatureType schema = inVector.getSchema();
CoordinateReferenceSystem crs = schema.getCoordinateReferenceSystem();
GridGeometry2D pGrid;
if (inRaster != null) {
pGrid = inRaster.getGridGeometry();
} else {
pGrid = gridGeometryFromRegionValues(pNorth, pSouth, pEast, pWest, pCols, pRows, crs);
}
if (outWR == null) {
paramsMap = gridGeometry2RegionParamsMap(pGrid);
height = paramsMap.getRows();
width = paramsMap.getCols();
xRes = paramsMap.getXres();
outWR = CoverageUtilities.createWritableRaster(width, height, null, null, doubleNovalue);
}
GeometryDescriptor geometryDescriptor = schema.getGeometryDescriptor();
if (EGeometryType.isPoint(geometryDescriptor)) {
throw new ModelsRuntimeException("Not implemented yet for points", this.getClass().getSimpleName());
} else if (EGeometryType.isLine(geometryDescriptor)) {
throw new ModelsRuntimeException("Not implemented yet for lines", this.getClass().getSimpleName());
} else if (EGeometryType.isPolygon(geometryDescriptor)) {
if (pUsePointInPolygon) {
if (inRaster == null) {
throw new ModelsIllegalargumentException("The point in polygon mode needs an input raster to work on.", this);
}
pm.beginTask("Prepare input data...", IHMProgressMonitor.UNKNOWN);
List<Geometry> allGeoms = FeatureUtilities.featureCollectionToGeometriesList(inVector, false, null);
Geometry allGeomsUnion = CascadedPolygonUnion.union(allGeoms);
PreparedGeometry preparedGeometry = PreparedGeometryFactory.prepare(allGeomsUnion);
pm.done();
double value = pValue;
pm.beginTask("Rasterizing...", height);
WritableRandomIter wIter = CoverageUtilities.getWritableRandomIterator(outWR);
for (int row = 0; row < height; row++) {
for (int col = 0; col < width; col++) {
Coordinate coord = CoverageUtilities.coordinateFromColRow(col, row, pGrid);
if (preparedGeometry.intersects(gf.createPoint(coord))) {
wIter.setSample(col, col, 0, value);
}
}
pm.worked(1);
}
pm.done();
wIter.done();
} else {
rasterizepolygon(pGrid);
}
} else {
throw new ModelsIllegalargumentException("Couldn't recognize the geometry type of the file.", this.getClass().getSimpleName(), pm);
}
outRaster = CoverageUtilities.buildCoverage("rasterized", outWR, paramsMap, inVector.getSchema().getCoordinateReferenceSystem());
}
use of org.hortonmachine.gears.libs.exceptions.ModelsRuntimeException in project hortonmachine by TheHortonMachine.
the class OmsKrigingRasterMode method executeKriging.
/**
* Executing ordinary kriging.
* <p>
* <li>Verify if the parameters are correct.
* <li>Calculating the matrix of the covariance (a).
* <li>For each point to interpolated, evalutate the know term vector (b)
* and solve the system (a x)=b where x is the weight.
* </p>
*
* @throws Exception the exception
*/
@Execute
public void executeKriging() throws Exception {
inInterpolationGrid = inGridCoverage2D.getGridGeometry();
verifyInput();
demWR = mapsTransform(inGridCoverage2D);
LinkedHashMap<Integer, Coordinate> pointsToInterpolateId2Coordinates = null;
pointsToInterpolateId2Coordinates = getCoordinate(inInterpolationGrid);
Set<Integer> pointsToInterpolateIdSet = pointsToInterpolateId2Coordinates.keySet();
Iterator<Integer> idIterator = pointsToInterpolateIdSet.iterator();
int j = 0;
double[] result = new double[pointsToInterpolateId2Coordinates.size()];
int[] idArray = new int[pointsToInterpolateId2Coordinates.size()];
final DirectPosition gridPoint = new DirectPosition2D();
MathTransform transf = inInterpolationGrid.getCRSToGrid2D();
while (idIterator.hasNext()) {
double sum = 0.;
id = idIterator.next();
idArray[j] = id;
Coordinate coordinate = (Coordinate) pointsToInterpolateId2Coordinates.get(id);
DirectPosition point = new DirectPosition2D(inInterpolationGrid.getCoordinateReferenceSystem(), coordinate.x, coordinate.y);
transf.transform(point, gridPoint);
double[] gridCoord = gridPoint.getCoordinate();
int x = (int) gridCoord[0];
int y = (int) gridCoord[1];
/**
* StationsSelection is an external class that allows the
* selection of the stations involved in the study.
* It is possible to define if to include stations with zero values,
* station in a define neighborhood or within a max distance from
* the considered point.
*/
StationsSelection stations = new StationsSelection();
stations.idx = coordinate.x;
stations.idy = coordinate.y;
stations.inStations = inStations;
stations.inData = inData;
stations.doIncludezero = doIncludezero;
stations.maxdist = maxdist;
stations.inNumCloserStations = inNumCloserStations;
stations.fStationsid = fStationsid;
stations.fStationsZ = fStationsZ;
stations.execute();
double[] xStations = stations.xStationInitialSet;
double[] yStations = stations.yStationInitialSet;
double[] zStations = stations.zStationInitialSet;
double[] hStations = stations.hStationInitialSet;
boolean areAllEquals = stations.areAllEquals;
int n1 = xStations.length - 1;
xStations[n1] = coordinate.x;
yStations[n1] = coordinate.y;
zStations[n1] = demWR.getSample(x, y, 0);
double[] hresiduals = hStations;
if (doDetrended == true) {
if (zStations[n1] < 0) {
doDetrended = false;
} else {
doDetrended = true;
}
}
if (doDetrended) {
RegressionLine t = new PolyTrendLine(regressionOrder);
t.setValues(zStations, hStations);
double[] regressionParameters = t.getRegressionParameters();
trend_intercept = regressionParameters[0];
trend_coefficient = regressionParameters[1];
hresiduals = t.getResiduals();
// Regression r = new Regression();
//
// r = new Regression(zStations, hStations);
// r.polynomial(regressionOrder);
//
// /*If there is a trend for meteorological
// * variables and elevation and it is statistically significant
// * then the residuals from this linear trend
// * are computed for each meteorological stations.
// */
// // if (Math.abs(r.getXYcorrCoeff()) > thresholdCorrelation) {
//
// trend_intercept = r.getBestEstimates()[0];
// trend_coefficient = r.getBestEstimates()[1];
// hresiduals = r.getResiduals();
//
// // } else {
// // System.out.println("The trend is not significant");
// // doDetrended=false;
// // hresiduals=hStations;
//
// // }
}
if (n1 != 0) {
if (!areAllEquals && n1 > 1) {
pm.beginTask(msg.message("kriging.working"), pointsToInterpolateId2Coordinates.size());
double h0 = 0.0;
/*
* calculating the covariance matrix.
*/
double[][] covarianceMatrix = covMatrixCalculating(xStations, yStations, zStations, n1);
double[] knownTerm = knownTermsCalculation(xStations, yStations, zStations, n1);
/*
* solve the linear system, where the result is the weight (moltiplicativeFactor).
*/
ColumnVector solution = SimpleLinearSystemSolverFactory.solve(knownTerm, covarianceMatrix, linearSystemSolverType);
double[] moltiplicativeFactor = solution.copyValues1D();
for (int k = 0; k < n1; k++) {
h0 = h0 + moltiplicativeFactor[k] * hresiduals[k];
// sum is computed to check that
// the sum of all the weights is 1
sum = sum + moltiplicativeFactor[k];
}
double trend = (doDetrended) ? zStations[n1] * trend_coefficient + trend_intercept : 0;
h0 = h0 + trend;
if (zStations[n1] < 0) {
result[j] = HMConstants.doubleNovalue;
} else {
result[j] = h0;
}
j++;
if (Math.abs(sum - 1) >= TOLL) {
throw new ModelsRuntimeException("Error in the coffeicients calculation", this.getClass().getSimpleName());
}
pm.worked(1);
} else if (n1 == 1 || areAllEquals) {
double tmp = hresiduals[0];
pm.message(msg.message("kriging.setequalsvalue"));
pm.beginTask(msg.message("kriging.working"), pointsToInterpolateId2Coordinates.size());
if (zStations[n1] < 0) {
result[j] = HMConstants.doubleNovalue;
} else {
result[j] = tmp;
}
j++;
n1 = 0;
pm.worked(1);
}
pm.done();
} else {
pm.errorMessage("No value for this time step");
j = 0;
double[] value = inData.values().iterator().next();
if (zStations[n1] < 0) {
result[j] = HMConstants.doubleNovalue;
} else {
result[j] = value[0];
}
j++;
}
}
storeResult(result, pointsToInterpolateId2Coordinates);
}
use of org.hortonmachine.gears.libs.exceptions.ModelsRuntimeException in project hortonmachine by TheHortonMachine.
the class OmsKrigingVectorMode method executeKriging.
/**
* Executing ordinary kriging.
* <p>
* <li>Verify if the parameters are correct.
* <li>Calculating the matrix of the covariance (a).
* <li>For each point to interpolated, evalutate the know term vector (b)
* and solve the system (a x)=b where x is the weight.
* </p>
*
* @throws Exception the exception
*/
@Execute
public void executeKriging() throws Exception {
verifyInput();
LinkedHashMap<Integer, Coordinate> pointsToInterpolateId2Coordinates = null;
pointsToInterpolateId2Coordinates = getCoordinate(0, inInterpolate, fInterpolateid);
Set<Integer> pointsToInterpolateIdSet = pointsToInterpolateId2Coordinates.keySet();
Iterator<Integer> idIterator = pointsToInterpolateIdSet.iterator();
int j = 0;
double[] result = new double[pointsToInterpolateId2Coordinates.size()];
int[] idArray = new int[pointsToInterpolateId2Coordinates.size()];
while (idIterator.hasNext()) {
double sum = 0.;
id = idIterator.next();
idArray[j] = id;
Coordinate coordinate = (Coordinate) pointsToInterpolateId2Coordinates.get(id);
/**
* StationsSelection is an external class that allows the
* selection of the stations involved in the study.
* It is possible to define if to include stations with zero values,
* station in a define neighborhood or within a max distance from
* the considered point.
*/
StationsSelection stations = new StationsSelection();
stations.idx = coordinate.x;
stations.idy = coordinate.y;
stations.inStations = inStations;
stations.inData = inData;
stations.doIncludezero = doIncludezero;
stations.maxdist = maxdist;
stations.inNumCloserStations = inNumCloserStations;
stations.fStationsid = fStationsid;
stations.fStationsZ = fStationsZ;
stations.execute();
double[] xStations = stations.xStationInitialSet;
double[] yStations = stations.yStationInitialSet;
double[] zStations = stations.zStationInitialSet;
double[] hStations = stations.hStationInitialSet;
boolean areAllEquals = stations.areAllEquals;
int n1 = xStations.length - 1;
xStations[n1] = coordinate.x;
yStations[n1] = coordinate.y;
zStations[n1] = coordinate.getZ();
double[] hresiduals = hStations;
if (doDetrended) {
RegressionLine t = new PolyTrendLine(regressionOrder);
t.setValues(zStations, hStations);
double[] regressionParameters = t.getRegressionParameters();
trend_intercept = regressionParameters[0];
trend_coefficient = regressionParameters[1];
hresiduals = t.getResiduals();
// Regression r = new Regression();
//
// r = new Regression(zStations, hStations);
// r.polynomial(regressionOrder);
//
// /*If there is a trend for meteorological
// * variables and elevation and it is statistically significant
// * then the residuals from this linear trend
// * are computed for each meteorological stations.
// */
//
// trend_intercept = r.getBestEstimates()[0];
// trend_coefficient = r.getBestEstimates()[1];
// hresiduals = r.getResiduals();
}
if (n1 != 0) {
if (!areAllEquals && n1 > 1) {
pm.beginTask(msg.message("kriging.working"), pointsToInterpolateId2Coordinates.size());
double h0 = 0.0;
/*
* calculating the covariance matrix.
*/
double[][] covarianceMatrix = covMatrixCalculating(xStations, yStations, zStations, n1);
double[] knownTerm = knownTermsCalculation(xStations, yStations, zStations, n1);
/*
* solve the linear system, where the result is the weight (moltiplicativeFactor).
*/
ColumnVector solution = SimpleLinearSystemSolverFactory.solve(knownTerm, covarianceMatrix, linearSystemSolverType);
double[] moltiplicativeFactor = solution.copyValues1D();
for (int k = 0; k < n1; k++) {
h0 = h0 + moltiplicativeFactor[k] * hresiduals[k];
// sum is computed to check that
// the sum of all the weights is 1
sum = sum + moltiplicativeFactor[k];
}
double trend = (doDetrended) ? coordinate.getZ() * trend_coefficient + trend_intercept : 0;
h0 = h0 + trend;
result[j] = h0;
j++;
if (Math.abs(sum - 1) >= TOLL) {
throw new ModelsRuntimeException("Error in the coffeicients calculation", this.getClass().getSimpleName());
}
pm.worked(1);
} else if (n1 == 1 || areAllEquals) {
double tmp = hresiduals[0];
pm.message(msg.message("kriging.setequalsvalue"));
pm.beginTask(msg.message("kriging.working"), pointsToInterpolateId2Coordinates.size());
result[j] = tmp;
j++;
n1 = 0;
pm.worked(1);
}
pm.done();
} else {
pm.errorMessage("No value for this time step");
double[] value = inData.values().iterator().next();
result[j] = value[0];
j++;
}
}
storeResult(result, idArray);
}
use of org.hortonmachine.gears.libs.exceptions.ModelsRuntimeException in project hortonmachine by TheHortonMachine.
the class OmsGeopaparazzi3Converter method process.
@Execute
public void process() throws IOException {
checkNull(inGeopaparazzi);
if (!hasDriver) {
throw new ModelsIllegalargumentException("Can't find any sqlite driver. Check your settings.", this, pm);
}
File geopapFolderFile = new File(inGeopaparazzi);
File geopapDatabaseFile = new File(geopapFolderFile, "geopaparazzi.db");
if (!geopapDatabaseFile.exists()) {
geopapDatabaseFile = new File(geopapFolderFile, "geopaparazzi3.db");
if (!geopapDatabaseFile.exists()) {
throw new ModelsIllegalargumentException("The geopaparazzi database file (geopaparazzi.db) is missing. Check the inserted path.", this, pm);
}
}
File outputFolderFile = new File(outData);
try (Connection connection = DriverManager.getConnection("jdbc:sqlite:" + geopapDatabaseFile.getAbsolutePath())) {
if (geopapDatabaseFile.exists()) {
/*
* import notes as shapefile
*/
if (doNotes) {
simpleNotesToShapefile(connection, outputFolderFile, pm);
complexNotesToShapefile(connection, outputFolderFile, pm);
}
/*
* import gps logs as shapefiles, once as lines and once as points
*/
gpsLogToShapefiles(connection, outputFolderFile, pm);
}
/*
* import media as point shapefile, containing the path
*/
mediaToShapeFile(geopapFolderFile, outputFolderFile, pm);
} catch (Exception e) {
throw new ModelsRuntimeException("An error occurred while importing from geopaparazzi: " + e.getLocalizedMessage(), this);
}
}
Aggregations