use of org.locationtech.geowave.analytic.spark.kde.operations.KDESparkCommand in project geowave by locationtech.
the class GeoWaveGrpcAnalyticSparkService method kDESparkCommand.
@Override
public void kDESparkCommand(KDESparkCommandParametersProtos request, StreamObserver<VoidResponseProtos> responseObserver) {
KDESparkCommand cmd = new KDESparkCommand();
Map<FieldDescriptor, Object> m = request.getAllFields();
GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);
final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;
final OperationParams params = new ManualOperationParams();
params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);
cmd.prepare(params);
LOGGER.info("Executing KDESparkCommand...");
try {
cmd.computeResults(params);
VoidResponseProtos resp = VoidResponseProtos.newBuilder().build();
responseObserver.onNext(resp);
responseObserver.onCompleted();
} catch (final Exception e) {
LOGGER.error("Exception encountered executing command", e);
responseObserver.onError(e);
}
}
use of org.locationtech.geowave.analytic.spark.kde.operations.KDESparkCommand in project geowave by locationtech.
the class CustomCRSKDERasterResizeIT method testKDEAndRasterResize.
@Test
public void testKDEAndRasterResize() throws Exception {
TestUtils.deleteAll(inputDataStorePluginOptions);
TestUtils.testLocalIngest(inputDataStorePluginOptions, DimensionalityType.SPATIAL, "EPSG:4901", KDE_SHAPEFILE_FILE, "geotools-vector", 1);
final File configFile = File.createTempFile("test_export", null);
final ManualOperationParams params = new ManualOperationParams();
params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);
final AddStoreCommand addStore = new AddStoreCommand();
addStore.setParameters("test-in");
addStore.setPluginOptions(inputDataStorePluginOptions);
addStore.execute(params);
addStore.setParameters("raster-spatial");
addStore.setPluginOptions(outputDataStorePluginOptions);
addStore.execute(params);
final String outputIndexName = "raster-spatial-idx";
final IndexPluginOptions outputIndexOptions = new IndexPluginOptions();
outputIndexOptions.selectPlugin("spatial");
outputIndexOptions.setName(outputIndexName);
((SpatialOptions) outputIndexOptions.getDimensionalityOptions()).setCrs("EPSG:4240");
final DataStore outputDataStore = outputDataStorePluginOptions.createDataStore();
final Index outputIndex = outputIndexOptions.createIndex(outputDataStore);
outputDataStore.addIndex(outputIndex);
// use the min level to define the request boundary because it is the
// most coarse grain
final double decimalDegreesPerCellMinLevel = 180.0 / Math.pow(2, BASE_MIN_LEVEL);
final double cellOriginXMinLevel = Math.round(TARGET_MIN_LON / decimalDegreesPerCellMinLevel);
final double cellOriginYMinLevel = Math.round(TARGET_MIN_LAT / decimalDegreesPerCellMinLevel);
final double numCellsMinLevel = Math.round(TARGET_DECIMAL_DEGREES_SIZE / decimalDegreesPerCellMinLevel);
final GeneralEnvelope queryEnvelope = new GeneralEnvelope(new double[] { // scaling on the tile composition/rendering
decimalDegreesPerCellMinLevel * cellOriginXMinLevel, decimalDegreesPerCellMinLevel * cellOriginYMinLevel }, new double[] { // scaling
decimalDegreesPerCellMinLevel * (cellOriginXMinLevel + numCellsMinLevel), decimalDegreesPerCellMinLevel * (cellOriginYMinLevel + numCellsMinLevel) });
final MapReduceTestEnvironment env = MapReduceTestEnvironment.getInstance();
final String geomField = ((FeatureDataAdapter) inputDataStorePluginOptions.createDataStore().getTypes()[0]).getFeatureType().getGeometryDescriptor().getLocalName();
final Envelope cqlEnv = JTS.transform(new Envelope(155.12, 155.17, 16.07, 16.12), CRS.findMathTransform(CRS.decode("EPSG:4326"), CRS.decode("EPSG:4901"), true));
final String cqlStr = String.format("BBOX(%s, %f, %f, %f, %f)", geomField, cqlEnv.getMinX(), cqlEnv.getMinY(), cqlEnv.getMaxX(), cqlEnv.getMaxY());
for (int i = MIN_TILE_SIZE_POWER_OF_2; i <= MAX_TILE_SIZE_POWER_OF_2; i += INCREMENT) {
LOGGER.warn("running mapreduce kde: " + i);
final String tileSizeCoverageName = TEST_COVERAGE_NAME_MR_PREFIX + i;
final KdeCommand command = new KdeCommand();
command.setParameters("test-in", "raster-spatial");
command.getKdeOptions().setCqlFilter(cqlStr);
command.getKdeOptions().setOutputIndex(outputIndexName);
command.getKdeOptions().setFeatureType(KDE_FEATURE_TYPE_NAME);
command.getKdeOptions().setMinLevel(BASE_MIN_LEVEL);
command.getKdeOptions().setMaxLevel(BASE_MAX_LEVEL);
command.getKdeOptions().setMinSplits(MapReduceTestUtils.MIN_INPUT_SPLITS);
command.getKdeOptions().setMaxSplits(MapReduceTestUtils.MAX_INPUT_SPLITS);
command.getKdeOptions().setCoverageName(tileSizeCoverageName);
command.getKdeOptions().setHdfsHostPort(env.getHdfs());
command.getKdeOptions().setJobTrackerOrResourceManHostPort(env.getJobtracker());
command.getKdeOptions().setTileSize((int) Math.pow(2, i));
ToolRunner.run(command.createRunner(params), new String[] {});
}
final int numLevels = (BASE_MAX_LEVEL - BASE_MIN_LEVEL) + 1;
final double[][][][] initialSampleValuesPerRequestSize = new double[numLevels][][][];
LOGGER.warn("testing mapreduce kdes");
for (int l = 0; l < numLevels; l++) {
initialSampleValuesPerRequestSize[l] = testSamplesMatch(TEST_COVERAGE_NAME_MR_PREFIX, ((MAX_TILE_SIZE_POWER_OF_2 - MIN_TILE_SIZE_POWER_OF_2) / INCREMENT) + 1, queryEnvelope, new Rectangle((int) (numCellsMinLevel * Math.pow(2, l)), (int) (numCellsMinLevel * Math.pow(2, l))), null);
}
for (int i = MIN_TILE_SIZE_POWER_OF_2; i <= MAX_TILE_SIZE_POWER_OF_2; i += INCREMENT) {
LOGGER.warn("running spark kde: " + i);
final String tileSizeCoverageName = TEST_COVERAGE_NAME_SPARK_PREFIX + i;
final KDESparkCommand command = new KDESparkCommand();
// We're going to override these anyway.
command.setParameters("test-in", "raster-spatial");
command.getKDESparkOptions().setOutputIndex(outputIndexName);
command.getKDESparkOptions().setCqlFilter(cqlStr);
command.getKDESparkOptions().setTypeName(KDE_FEATURE_TYPE_NAME);
command.getKDESparkOptions().setMinLevel(BASE_MIN_LEVEL);
command.getKDESparkOptions().setMaxLevel(BASE_MAX_LEVEL);
command.getKDESparkOptions().setMinSplits(MapReduceTestUtils.MIN_INPUT_SPLITS);
command.getKDESparkOptions().setMaxSplits(MapReduceTestUtils.MAX_INPUT_SPLITS);
command.getKDESparkOptions().setCoverageName(tileSizeCoverageName);
command.getKDESparkOptions().setMaster("local[*]");
command.getKDESparkOptions().setTileSize((int) Math.pow(2, i));
command.execute(params);
}
LOGGER.warn("testing spark kdes");
for (int l = 0; l < numLevels; l++) {
testSamplesMatch(TEST_COVERAGE_NAME_SPARK_PREFIX, ((MAX_TILE_SIZE_POWER_OF_2 - MIN_TILE_SIZE_POWER_OF_2) / INCREMENT) + 1, queryEnvelope, new Rectangle((int) (numCellsMinLevel * Math.pow(2, l)), (int) (numCellsMinLevel * Math.pow(2, l))), initialSampleValuesPerRequestSize[l]);
}
// go from the original mr KDEs to a resized version using the MR command
for (int i = MIN_TILE_SIZE_POWER_OF_2; i <= MAX_TILE_SIZE_POWER_OF_2; i += INCREMENT) {
LOGGER.warn("running mapreduce resize: " + i);
final String originalTileSizeCoverageName = TEST_COVERAGE_NAME_MR_PREFIX + i;
final String resizeTileSizeCoverageName = TEST_RESIZE_COVERAGE_NAME_MR_PREFIX + i;
final ResizeMRCommand command = new ResizeMRCommand();
// We're going to override these anyway.
command.setParameters("raster-spatial", "raster-spatial");
command.getOptions().setInputCoverageName(originalTileSizeCoverageName);
command.getOptions().setMinSplits(MapReduceTestUtils.MIN_INPUT_SPLITS);
command.getOptions().setMaxSplits(MapReduceTestUtils.MAX_INPUT_SPLITS);
command.setHdfsHostPort(env.getHdfs());
command.setJobTrackerOrResourceManHostPort(env.getJobtracker());
command.getOptions().setOutputCoverageName(resizeTileSizeCoverageName);
command.getOptions().setIndexName(TestUtils.createWebMercatorSpatialIndex().getName());
// due to time considerations when running the test, downsample to
// at most 2 powers of 2 lower
int targetRes = (MAX_TILE_SIZE_POWER_OF_2 - i);
if ((i - targetRes) > 2) {
targetRes = i - 2;
}
command.getOptions().setOutputTileSize((int) Math.pow(2, targetRes));
ToolRunner.run(command.createRunner(params), new String[] {});
}
LOGGER.warn("testing mapreduce resize");
for (int l = 0; l < numLevels; l++) {
testSamplesMatch(TEST_RESIZE_COVERAGE_NAME_MR_PREFIX, ((MAX_TILE_SIZE_POWER_OF_2 - MIN_TILE_SIZE_POWER_OF_2) / INCREMENT) + 1, queryEnvelope, new Rectangle((int) (numCellsMinLevel * Math.pow(2, l)), (int) (numCellsMinLevel * Math.pow(2, l))), initialSampleValuesPerRequestSize[l]);
}
// Spark command
for (int i = MIN_TILE_SIZE_POWER_OF_2; i <= MAX_TILE_SIZE_POWER_OF_2; i += INCREMENT) {
LOGGER.warn("running spark resize: " + i);
final String originalTileSizeCoverageName = TEST_COVERAGE_NAME_SPARK_PREFIX + i;
final String resizeTileSizeCoverageName = TEST_RESIZE_COVERAGE_NAME_SPARK_PREFIX + i;
final ResizeSparkCommand command = new ResizeSparkCommand();
// We're going to override these anyway.
command.setParameters("raster-spatial", "raster-spatial");
command.getOptions().setInputCoverageName(originalTileSizeCoverageName);
command.getOptions().setMinSplits(MapReduceTestUtils.MIN_INPUT_SPLITS);
command.getOptions().setMaxSplits(MapReduceTestUtils.MAX_INPUT_SPLITS);
command.getOptions().setOutputCoverageName(resizeTileSizeCoverageName);
command.getOptions().setIndexName(TestUtils.createWebMercatorSpatialIndex().getName());
command.setMaster("local[*]");
// due to time considerations when running the test, downsample to
// at most 2 powers of 2 lower
int targetRes = (MAX_TILE_SIZE_POWER_OF_2 - i);
if ((i - targetRes) > 2) {
targetRes = i - 2;
}
command.getOptions().setOutputTileSize((int) Math.pow(2, targetRes));
command.execute(params);
}
LOGGER.warn("testing spark resize");
for (int l = 0; l < numLevels; l++) {
testSamplesMatch(TEST_RESIZE_COVERAGE_NAME_SPARK_PREFIX, ((MAX_TILE_SIZE_POWER_OF_2 - MIN_TILE_SIZE_POWER_OF_2) / INCREMENT) + 1, queryEnvelope, new Rectangle((int) (numCellsMinLevel * Math.pow(2, l)), (int) (numCellsMinLevel * Math.pow(2, l))), initialSampleValuesPerRequestSize[l]);
}
}
Aggregations