use of org.geotoolkit.process.ProcessException in project geotoolkit by Geomatys.
the class TiffImageReader method readLayer.
/**
* Reads the layer at the given index.
*
* @param layerIndex The index of the image to read.
* @param param Parameters used to control the reading process, or {@code null}.
* @return The image.
* @throws IOException If an error occurred while reading the image.
*/
private BufferedImage readLayer(final int layerIndex, final ImageReadParam param) throws IOException {
selectLayer(layerIndex);
// Modify read param exclude source and destination bands, not supported yet
// source and destination are processed after reading
ImageReadParam sparam = param;
if (param != null && (param.getSourceBands() != null || param.getDestinationBands() != null)) {
sparam = new ImageReadParam();
sparam.setController(param.getController());
sparam.setDestinationOffset(param.getDestinationOffset());
sparam.setDestinationType(param.getDestinationType());
sparam.setSourceRegion(param.getSourceRegion());
Dimension rs = param.getSourceRenderSize();
if (rs != null)
sparam.setSourceRenderSize(rs);
sparam.setSourceSubsampling(param.getSourceXSubsampling(), param.getSourceYSubsampling(), param.getSubsamplingXOffset(), param.getSubsamplingYOffset());
if (param.getDestination() != null) {
throw new IOException("Destination image with source and destination band selection not supported");
}
}
BufferedImage image = getDestination(sparam, getImageTypes(layerIndex), imageWidth, imageHeight);
final WritableRaster raster = image.getRaster();
if (raster.getDataBuffer().getDataType() != sourceDataBufferType) {
throw new IllegalArgumentException("The destination image datatype doesn't match with read source image datatype. " + "Expected Datatype : " + sourceDataBufferType + " found : " + image.getRaster().getDataBuffer().getDataType());
}
/*
* compute region : ajust les 2 rectangles src region et dest region en fonction des coeff subsampling present dans Imagereadparam.
*/
final Rectangle srcRegion = new Rectangle();
final Rectangle dstRegion = new Rectangle();
// calculer une region de l'image sur le fichier que l'on doit lire
computeRegions(sparam, imageWidth, imageHeight, image, srcRegion, dstRegion);
if (compression == 32773) {
assert stripOffsets != null : "with compression 32773 (packbits) : image should be writen in strip offset use case.";
readFromStrip32773(raster, sparam, srcRegion, dstRegion);
} else if (compression == 5) {
if (stripOffsets != null) {
readFromStripLZW(raster, sparam, srcRegion, dstRegion);
} else {
assert tileOffsets != null;
readFromTilesLZW(raster, sparam, srcRegion, dstRegion);
}
} else if (compression == 8) {
if (stripOffsets != null) {
readFromStripDeflate(raster, sparam, srcRegion, dstRegion);
} else {
assert tileOffsets != null;
readFromTilesDeflate(raster, sparam, srcRegion, dstRegion);
}
} else {
// -- by strips
if (stripOffsets != null) {
readFromStrip(raster, sparam, srcRegion, dstRegion);
} else {
// -- by tiles
readFromTiles(raster, sparam, srcRegion, dstRegion);
}
}
// apply source band selection
if (param != null) {
// {@link TiffimageReader} does not support param reader with setted source and destination band parameters.
// we apply it afterward
int[] sourceBands = param.getSourceBands();
int[] destinationBands = param.getDestinationBands();
if (sourceBands != null || destinationBands != null) {
final int nbBand = raster.getNumBands();
// set defaults if one parameter is null
if (sourceBands == null) {
sourceBands = new int[nbBand];
for (int i = 0; i < sourceBands.length; i++) sourceBands[i] = i;
}
if (destinationBands == null) {
destinationBands = new int[sourceBands.length];
for (int i = 0; i < destinationBands.length; i++) destinationBands[i] = i;
}
// check if given source and destination bands actually changes from
// original image model
boolean changed = nbBand != samplesPerPixel || sourceBands.length != samplesPerPixel || destinationBands.length != samplesPerPixel;
for (int i = 0; i < sourceBands.length; i++) {
if (sourceBands[i] != i) {
changed = true;
}
}
for (int i = 0; i < destinationBands.length; i++) {
if (destinationBands[i] != i) {
changed = true;
}
}
if (changed) {
// prepare image band select process parameters
// find output size : max destination band index
int outSize = 0;
for (int i : destinationBands) outSize = Math.max(outSize, destinationBands[i]);
final int[] indexes = new int[outSize + 1];
Arrays.fill(indexes, -1);
for (int i = 0; i < destinationBands.length; i++) {
indexes[destinationBands[i]] = sourceBands[i];
}
try {
final ProcessDescriptor descriptor = ProcessFinder.getProcessDescriptor("geotoolkit", "image:bandselect");
final ParameterValueGroup inputs = descriptor.getInputDescriptor().createValue();
inputs.parameter("image").setValue(image);
inputs.parameter("bands").setValue(indexes);
ParameterValueGroup result = descriptor.createProcess(inputs).call();
image = (BufferedImage) result.parameter("result").getValue();
} catch (NoSuchIdentifierException | ProcessException ex) {
throw new IOException(ex.getMessage(), ex);
}
}
}
}
return image;
}
use of org.geotoolkit.process.ProcessException in project geotoolkit by Geomatys.
the class NearestProcess method execute.
/**
* {@inheritDoc }
*/
@Override
protected void execute() throws ProcessException {
try {
final FeatureSet inputFeatureList = inputParameters.getValue(VectorDescriptor.FEATURESET_IN);
final Geometry interGeom = inputParameters.getValue(NearestDescriptor.GEOMETRY_IN);
final FeatureSet resultFeatureList = inputFeatureList.subset(nearestQuery(inputFeatureList, interGeom));
outputParameters.getOrCreate(VectorDescriptor.FEATURESET_OUT).setValue(resultFeatureList);
} catch (FactoryException | DataStoreException | TransformException ex) {
throw new ProcessException(ex.getMessage(), this, ex);
}
}
use of org.geotoolkit.process.ProcessException in project geotoolkit by Geomatys.
the class Predictor method initContext.
private PredictionContext initContext(final DirectPosition2D origin2d) throws ProcessException {
// TODO : create point bucket / add origin point into bucket.
final long timestep = inputParameters.getMandatoryValue(TIMESTEP);
final int maxPts = inputParameters.getMandatoryValue(MAX_POINTS);
try {
final PredictionContext ctx = new PredictionContext(initGrid(origin2d), initWeights(), Duration.ofSeconds(timestep), maxPts);
ctx.points.add(origin2d.getCoordinate(), 1);
return ctx;
} catch (NoninvertibleTransformException ex) {
throw new ProcessException("Cannot initialize output grid", this, ex);
}
}
use of org.geotoolkit.process.ProcessException in project geotoolkit by Geomatys.
the class Predictor method compute.
private List<Output> compute(final Instant startTime, final Instant endTime, final PredictionContext ctx, final MeteoDataset.TimeSet meteo) throws ProcessException {
final double totalSeconds = startTime.until(endTime, ChronoUnit.SECONDS);
boolean newDay = false;
Instant nextDay = startTime.plus(1, ChronoUnit.DAYS);
Instant stepTime = startTime;
// TODO : We should think about a better way of managing output grid, because here we waste a lot of space.
final double[] globalProba = new double[ctx.grid.width * ctx.grid.height];
final double[] dayProba = new double[globalProba.length];
final List<Output> outputs = new ArrayList<>();
// When computing point drift, we'll add a point for each weight available. As we don't want this amount to grow
// past a configured maximum, we have to purge available points before each computing pass.
final int maxAllowedPoints = ctx.points.maxPts / (ctx.weights.length + 1);
do {
final long timePassed = startTime.until(stepTime, ChronoUnit.SECONDS);
fireProgressing("Drifting: " + stepTime, (float) (timePassed / totalSeconds) * 100f, false);
MeteoDataset.Snapshot snapshot = meteo.setTime(stepTime).map(calibration -> calibration.setHorizontalComponent(ctx.grid.model.getEnvelope())).orElse(null);
if (snapshot == null)
break;
ctx.points.removeLeastProbable(maxAllowedPoints);
final double[] stepProba = advance(ctx, snapshot);
if (stepProba == null)
break;
// TODO : add abstraction here : we could reduce loop by iterating only over a rectangle where probabilities
// have really been updated.
IntStream.range(0, dayProba.length).parallel().forEach(i -> {
dayProba[i] += stepProba[i];
globalProba[i] += stepProba[i];
});
newDay = stepTime.isAfter(nextDay);
if (newDay) {
nextDay = nextDay.plus(1, ChronoUnit.DAYS);
outputs.add(new Output(dayProba, ctx.grid.width, ctx.grid.height));
Arrays.fill(dayProba, 0);
}
} while ((stepTime = ctx.step(stepTime)).isBefore(endTime));
if (stepTime.equals(startTime)) {
throw new ProcessException("No data available for time: " + stepTime, this);
}
if (!newDay) {
outputs.add(new Output(dayProba, ctx.grid.width, ctx.grid.height));
}
outputs.add(new Output(globalProba, ctx.grid.width, ctx.grid.height));
outputParameters.getOrCreate(ACTUAL_END_TIMESTAMP).setValue(stepTime.toEpochMilli());
return outputs;
}
use of org.geotoolkit.process.ProcessException in project geotoolkit by Geomatys.
the class ClusterHullProcess method computeClusterHull.
/**
* Compute the cluster hull from a feature set according to the measure of tolerance.
*
* @param inputFeatureSet input feature set
* @param tolerance distance minimum between two cluster and buffer width
* @param epsilon tolerance used to simplify/smoothing geometries
* @param unit unit of tolerance and epsilon
* @return the cluster hull
*/
private FeatureSet computeClusterHull(final FeatureSet inputFeatureSet) throws ProcessException {
try {
// Extract geometries set from featureSet and smooth geometries with Douglas Peucker algorithm
extractAndFormat(inputFeatureSet);
// Compute all distances between WorkingGeometry, and keep if they are under the tolerance distance
initRoadmap();
// Apply the process
ApplyClusterHull(initCluster());
// Build feature set result
final FeatureType type = inputFeatureSet.getType();
CoordinateReferenceSystem crs = FeatureExt.getCRS(type);
return toFeatureSet(this.clusters, crs);
// Here OutOfMemoryError is catch cause input and treatment use a lot of memory
} catch (DataStoreException | TransformException | OutOfMemoryError e) {
throw new ProcessException(e.getMessage(), this, e);
}
}
Aggregations