use of org.opengis.coverage.grid.GridCoverage in project georchestra by georchestra.
the class WcsCoverageReader method convertToGeotiff.
private void convertToGeotiff(File tmpFile, final File file) throws IOException {
CoverageTransformation<Object> transformation = new CoverageTransformation<Object>() {
@Override
public Object transform(GridCoverage coverage) throws IOException {
GeoTiffWriter writer = new GeoTiffWriter(file);
GeneralParameterValue[] params = new GeneralParameterValue[0];
writer.write(coverage, params);
return null;
}
};
CoverageTransformation.perform(tmpFile, transformation);
}
use of org.opengis.coverage.grid.GridCoverage in project georchestra by georchestra.
the class WcsCoverageReader method geotoolsTranformation.
private void geotoolsTranformation(final File sourceFile, final File file, final WcsReaderRequest request, final CoordinateReferenceSystem original) throws IOException {
LOG.info("using Geotools libraries to tranform the coverage");
CoverageTransformation<Object> transformation = new CoverageTransformation<Object>() {
@Override
public Object transform(final GridCoverage coverage) throws IOException, FactoryException {
boolean writeToTmp = sourceFile.equals(file);
Hints hints = new Hints(GeoTools.getDefaultHints());
hints.put(Hints.LENIENT_DATUM_SHIFT, Boolean.TRUE);
GeoTools.init(hints);
Coverage transformed = Operations.DEFAULT.resample(coverage, original);
AbstractGridFormat format = Formats.getFormat(request.format);
if (writeToTmp) {
File tmpDir = FileUtils.createTempDirectory();
try {
File tmpFile = new File(tmpDir, file.getName());
// write must be to tmpFile because Geotools does not always
// load coverages into memory but reads off disk
GridCoverageWriter writer = format.getWriter(tmpFile);
file.delete();
ParameterValue<String> formatParam = FORMAT.createValue();
formatParam.setValue(request.format);
writer.write((GridCoverage) transformed, new GeneralParameterValue[] { formatParam });
// so move all files in the tmpDir
for (File f : tmpDir.listFiles()) {
File dest = new File(file.getParentFile(), f.getName());
FileUtils.moveFile(f, dest);
}
} finally {
FileUtils.delete(tmpDir);
}
} else {
GridCoverageWriter writer = format.getWriter(file);
writer.write((GridCoverage) transformed, null);
}
LOG.debug("Finished reprojecting output");
return null;
}
};
CoverageTransformation.perform(sourceFile, transformation);
}
use of org.opengis.coverage.grid.GridCoverage in project geowave by locationtech.
the class RasterTileResizeSparkRunner method run.
public void run() throws IOException {
initContext();
// Validate inputs
if (inputStoreOptions == null) {
LOGGER.error("You must supply an input datastore!");
throw new IOException("You must supply an input datastore!");
}
final InternalAdapterStore internalAdapterStore = inputStoreOptions.createInternalAdapterStore();
final short internalAdapterId = internalAdapterStore.getAdapterId(rasterResizeOptions.getInputCoverageName());
final DataTypeAdapter adapter = inputStoreOptions.createAdapterStore().getAdapter(internalAdapterId).getAdapter();
if (adapter == null) {
throw new IllegalArgumentException("Adapter for coverage '" + rasterResizeOptions.getInputCoverageName() + "' does not exist in namespace '" + inputStoreOptions.getGeoWaveNamespace() + "'");
}
Index index = null;
final IndexStore indexStore = inputStoreOptions.createIndexStore();
if (rasterResizeOptions.getIndexName() != null) {
index = indexStore.getIndex(rasterResizeOptions.getIndexName());
}
if (index == null) {
try (CloseableIterator<Index> indices = indexStore.getIndices()) {
index = indices.next();
}
if (index == null) {
throw new IllegalArgumentException("Index does not exist in namespace '" + inputStoreOptions.getGeoWaveNamespace() + "'");
}
}
final RasterDataAdapter newAdapter = new RasterDataAdapter((RasterDataAdapter) adapter, rasterResizeOptions.getOutputCoverageName(), rasterResizeOptions.getOutputTileSize());
final DataStore store = outputStoreOptions.createDataStore();
store.addType(newAdapter, index);
final short newInternalAdapterId = outputStoreOptions.createInternalAdapterStore().addTypeName(newAdapter.getTypeName());
final RDDOptions options = new RDDOptions();
if (rasterResizeOptions.getMinSplits() != null) {
options.setMinSplits(rasterResizeOptions.getMinSplits());
}
if (rasterResizeOptions.getMaxSplits() != null) {
options.setMaxSplits(rasterResizeOptions.getMaxSplits());
}
final JavaPairRDD<GeoWaveInputKey, GridCoverage> inputRDD = GeoWaveRDDLoader.loadRawRasterRDD(jsc.sc(), inputStoreOptions, index.getName(), rasterResizeOptions.getMinSplits(), rasterResizeOptions.getMaxSplits());
LOGGER.debug("Writing results to output store...");
RDDUtils.writeRasterToGeoWave(jsc.sc(), index, outputStoreOptions, newAdapter, inputRDD.flatMapToPair(new RasterResizeMappingFunction(internalAdapterId, newInternalAdapterId, newAdapter, index)).groupByKey().map(new MergeRasterFunction(internalAdapterId, newInternalAdapterId, newAdapter, index)));
LOGGER.debug("Results successfully written!");
}
use of org.opengis.coverage.grid.GridCoverage in project geowave by locationtech.
the class GeoWaveRDDLoader method loadRawRasterRDD.
public static JavaPairRDD<GeoWaveInputKey, GridCoverage> loadRawRasterRDD(final SparkContext sc, final DataStorePluginOptions storeOptions, final String indexName, final Integer minSplits, final Integer maxSplits) throws IOException {
if (sc == null) {
LOGGER.error("Must supply a valid Spark Context. Please set SparkContext and try again.");
return null;
}
if (storeOptions == null) {
LOGGER.error("Must supply input store to load. Please set storeOptions and try again.");
return null;
}
final Configuration conf = new Configuration(sc.hadoopConfiguration());
GeoWaveInputFormat.setStoreOptions(conf, storeOptions);
if (indexName != null) {
GeoWaveInputFormat.setQuery(conf, QueryBuilder.newBuilder().indexName(indexName).build(), storeOptions.createAdapterStore(), storeOptions.createInternalAdapterStore(), storeOptions.createIndexStore());
}
if (((minSplits != null) && (minSplits > -1)) || ((maxSplits != null) && (maxSplits > -1))) {
GeoWaveInputFormat.setMinimumSplitCount(conf, minSplits);
GeoWaveInputFormat.setMaximumSplitCount(conf, maxSplits);
} else {
final int defaultSplitsSpark = sc.getConf().getInt("spark.default.parallelism", -1);
// Otherwise just fallback to default according to index strategy
if (defaultSplitsSpark != -1) {
GeoWaveInputFormat.setMinimumSplitCount(conf, defaultSplitsSpark);
GeoWaveInputFormat.setMaximumSplitCount(conf, defaultSplitsSpark);
}
}
final RDD<Tuple2<GeoWaveInputKey, GridCoverage>> rdd = sc.newAPIHadoopRDD(conf, GeoWaveInputFormat.class, GeoWaveInputKey.class, GridCoverage.class);
final JavaPairRDD<GeoWaveInputKey, GridCoverage> javaRdd = JavaPairRDD.fromJavaRDD(rdd.toJavaRDD());
return javaRdd;
}
use of org.opengis.coverage.grid.GridCoverage in project geowave by locationtech.
the class KDERunner method run.
public void run() throws IOException {
initContext();
// Validate inputs
if (inputDataStore == null) {
LOGGER.error("You must supply an input datastore!");
throw new IOException("You must supply an input datastore!");
}
// Retrieve the feature adapters
final VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder();
List<String> featureTypeNames;
// If provided, just use the one
if (typeName != null) {
featureTypeNames = new ArrayList<>();
featureTypeNames.add(typeName);
} else {
// otherwise, grab all the feature adapters
featureTypeNames = FeatureDataUtils.getFeatureTypeNames(inputDataStore);
}
bldr.setTypeNames(featureTypeNames.toArray(new String[0]));
if (indexName != null) {
bldr.indexName(indexName);
}
Index inputPrimaryIndex = null;
final Index[] idxArray = inputDataStore.createDataStore().getIndices();
for (final Index idx : idxArray) {
if ((idx != null) && ((indexName == null) || indexName.equals(idx.getName()))) {
inputPrimaryIndex = idx;
break;
}
}
final CoordinateReferenceSystem inputIndexCrs = GeometryUtils.getIndexCrs(inputPrimaryIndex);
final String inputCrsCode = GeometryUtils.getCrsCode(inputIndexCrs);
Index outputPrimaryIndex = outputIndex;
CoordinateReferenceSystem outputIndexCrs = null;
final String outputCrsCode;
if (outputPrimaryIndex != null) {
outputIndexCrs = GeometryUtils.getIndexCrs(outputPrimaryIndex);
outputCrsCode = GeometryUtils.getCrsCode(outputIndexCrs);
} else {
final SpatialDimensionalityTypeProvider sdp = new SpatialDimensionalityTypeProvider();
final SpatialOptions so = sdp.createOptions();
so.setCrs(inputCrsCode);
outputPrimaryIndex = SpatialDimensionalityTypeProvider.createIndexFromOptions(so);
outputIndexCrs = inputIndexCrs;
outputCrsCode = inputCrsCode;
}
final CoordinateSystem cs = outputIndexCrs.getCoordinateSystem();
final CoordinateSystemAxis csx = cs.getAxis(0);
final CoordinateSystemAxis csy = cs.getAxis(1);
final double xMax = csx.getMaximumValue();
final double xMin = csx.getMinimumValue();
final double yMax = csy.getMaximumValue();
final double yMin = csy.getMinimumValue();
if ((xMax == Double.POSITIVE_INFINITY) || (xMin == Double.NEGATIVE_INFINITY) || (yMax == Double.POSITIVE_INFINITY) || (yMin == Double.NEGATIVE_INFINITY)) {
LOGGER.error("Raster KDE resize with raster primary index CRS dimensions min/max equal to positive infinity or negative infinity is not supported");
throw new RuntimeException("Raster KDE resize with raster primary index CRS dimensions min/max equal to positive infinity or negative infinity is not supported");
}
if (cqlFilter != null) {
bldr.constraints(bldr.constraintsFactory().cqlConstraints(cqlFilter));
}
// Load RDD from datastore
final RDDOptions kdeOpts = new RDDOptions();
kdeOpts.setMinSplits(minSplits);
kdeOpts.setMaxSplits(maxSplits);
kdeOpts.setQuery(bldr.build());
final Function<Double, Double> identity = x -> x;
final Function2<Double, Double, Double> sum = (final Double x, final Double y) -> {
return x + y;
};
final RasterDataAdapter adapter = RasterUtils.createDataAdapterTypeDouble(coverageName, KDEReducer.NUM_BANDS, tileSize, MINS_PER_BAND, MAXES_PER_BAND, NAME_PER_BAND, new NoDataMergeStrategy());
outputDataStore.createDataStore().addType(adapter, outputPrimaryIndex);
// The following "inner" variables are created to give access to member
// variables within lambda
// expressions
// tileSize;
final int innerTileSize = 1;
final String innerCoverageName = coverageName;
for (int level = minLevel; level <= maxLevel; level++) {
final int numXTiles = (int) Math.pow(2, level + 1);
final int numYTiles = (int) Math.pow(2, level);
// * tileSize;
final int numXPosts = numXTiles;
// * tileSize;
final int numYPosts = numYTiles;
final GeoWaveRDD kdeRDD = GeoWaveRDDLoader.loadRDD(session.sparkContext(), inputDataStore, kdeOpts);
JavaPairRDD<Double, Long> cells = kdeRDD.getRawRDD().flatMapToPair(new GeoWaveCellMapper(numXPosts, numYPosts, xMin, xMax, yMin, yMax, inputCrsCode, outputCrsCode)).combineByKey(identity, sum, sum).mapToPair(item -> item.swap());
cells = cells.partitionBy(new RangePartitioner(cells.getNumPartitions(), cells.rdd(), true, scala.math.Ordering.Double$.MODULE$, scala.reflect.ClassTag$.MODULE$.apply(Double.class))).sortByKey(false).cache();
final long count = cells.count();
if (count == 0) {
LOGGER.warn("No cells produced by KDE");
continue;
}
final double max = cells.first()._1;
JavaRDD<GridCoverage> rdd = cells.zipWithIndex().map(t -> {
final TileInfo tileInfo = fromCellIndexToTileInfo(t._1._2, numXPosts, numYPosts, numXTiles, numYTiles, xMin, xMax, yMin, yMax, innerTileSize);
final WritableRaster raster = RasterUtils.createRasterTypeDouble(NUM_BANDS, innerTileSize);
final double normalizedValue = t._1._1 / max;
// because we are using a Double as the key, the ordering
// isn't always completely reproducible as Double equals does not
// take into account an epsilon
final double percentile = (count - t._2) / ((double) count);
raster.setSample(tileInfo.x, tileInfo.y, 0, t._1._1);
raster.setSample(tileInfo.x, tileInfo.y, 1, normalizedValue);
raster.setSample(tileInfo.x, tileInfo.y, 2, percentile);
return RasterUtils.createCoverageTypeDouble(innerCoverageName, tileInfo.tileWestLon, tileInfo.tileEastLon, tileInfo.tileSouthLat, tileInfo.tileNorthLat, MINS_PER_BAND, MAXES_PER_BAND, NAME_PER_BAND, raster, GeometryUtils.DEFAULT_CRS_STR);
});
LOGGER.debug("Writing results to output store...");
if (tileSize > 1) {
// byte[] adapterBytes = PersistenceUtils.toBinary(adapter);
// byte[] indexBytes = PersistenceUtils.toBinary(outputPrimaryIndex);
rdd = rdd.flatMapToPair(new TransformTileSize(adapter, outputPrimaryIndex)).groupByKey().map(new MergeOverlappingTiles(adapter, outputPrimaryIndex));
}
RDDUtils.writeRasterToGeoWave(jsc.sc(), outputPrimaryIndex, outputDataStore, adapter, rdd);
LOGGER.debug("Results successfully written!");
}
}
Aggregations