use of com.graphhopper.GraphHopperConfig in project graphhopper by graphhopper.
the class CHMeasurement method testPerformanceAutomaticNodeOrdering.
/**
* Parses a given osm file, contracts the graph and runs random routing queries on it. This is useful to test
* the node contraction heuristics with regards to the performance of the automatic graph contraction (the node
* contraction order determines how many and which shortcuts will be introduced) and the resulting query speed.
* The queries are compared with a normal AStar search for comparison and to ensure correctness.
*/
private static void testPerformanceAutomaticNodeOrdering(String[] args) {
// example args:
// map=berlin.pbf stats_file=stats.dat period_updates=0 lazy_updates=100 neighbor_updates=50 max_neighbor_updatse=3 contract_nodes=100 log_messages=20 edge_quotient_weight=100.0 orig_edge_quotient_weight=100.0 hierarchy_depth_weight=20.0 landmarks=0 cleanup=true turncosts=true threshold=0.1 seed=456 comp_iterations=10 perf_iterations=100 quick=false
long start = nanoTime();
PMap map = PMap.read(args);
GraphHopperConfig ghConfig = new GraphHopperConfig(map);
LOGGER.info("Running analysis with parameters {}", ghConfig);
String osmFile = ghConfig.getString("map", "map-matching/files/leipzig_germany.osm.pbf");
ghConfig.putObject("datareader.file", osmFile);
final String statsFile = ghConfig.getString("stats_file", null);
final int periodicUpdates = ghConfig.getInt("period_updates", 0);
final int lazyUpdates = ghConfig.getInt("lazy_updates", 100);
final int neighborUpdates = ghConfig.getInt("neighbor_updates", 50);
final int maxNeighborUpdates = ghConfig.getInt("max_neighbor_updates", 3);
final int contractedNodes = ghConfig.getInt("contract_nodes", 100);
final int logMessages = ghConfig.getInt("log_messages", 20);
final float edgeQuotientWeight = ghConfig.getFloat("edge_quotient_weight", 100.0f);
final float origEdgeQuotientWeight = ghConfig.getFloat("orig_edge_quotient_weight", 100.0f);
final float hierarchyDepthWeight = ghConfig.getFloat("hierarchy_depth_weight", 20.0f);
final int pollFactorHeuristic = ghConfig.getInt("poll_factor_heur", 5);
final int pollFactorContraction = ghConfig.getInt("poll_factor_contr", 200);
final int landmarks = ghConfig.getInt("landmarks", 0);
final boolean cleanup = ghConfig.getBool("cleanup", true);
final boolean withTurnCosts = ghConfig.getBool("turncosts", true);
final int uTurnCosts = ghConfig.getInt(Parameters.Routing.U_TURN_COSTS, 80);
final double errorThreshold = ghConfig.getDouble("threshold", 0.1);
final long seed = ghConfig.getLong("seed", 456);
final int compIterations = ghConfig.getInt("comp_iterations", 100);
final int perfIterations = ghConfig.getInt("perf_iterations", 1000);
final boolean quick = ghConfig.getBool("quick", false);
final GraphHopper graphHopper = new GraphHopper();
String profile = "car_profile";
if (withTurnCosts) {
ghConfig.putObject("graph.flag_encoders", "car|turn_costs=true");
ghConfig.setProfiles(Collections.singletonList(new Profile(profile).setVehicle("car").setWeighting("fastest").setTurnCosts(true).putHint(Parameters.Routing.U_TURN_COSTS, uTurnCosts)));
ghConfig.setCHProfiles(Collections.singletonList(new CHProfile(profile)));
if (landmarks > 0) {
ghConfig.setLMProfiles(Collections.singletonList(new LMProfile(profile)));
ghConfig.putObject("prepare.lm.landmarks", landmarks);
}
} else {
ghConfig.putObject("graph.flag_encoders", "car");
ghConfig.setProfiles(Collections.singletonList(new Profile(profile).setVehicle("car").setWeighting("fastest").setTurnCosts(false)));
}
ghConfig.putObject(PERIODIC_UPDATES, periodicUpdates);
ghConfig.putObject(LAST_LAZY_NODES_UPDATES, lazyUpdates);
ghConfig.putObject(NEIGHBOR_UPDATES, neighborUpdates);
ghConfig.putObject(NEIGHBOR_UPDATES_MAX, maxNeighborUpdates);
ghConfig.putObject(CONTRACTED_NODES, contractedNodes);
ghConfig.putObject(LOG_MESSAGES, logMessages);
if (withTurnCosts) {
ghConfig.putObject(EDGE_QUOTIENT_WEIGHT, edgeQuotientWeight);
ghConfig.putObject(ORIGINAL_EDGE_QUOTIENT_WEIGHT, origEdgeQuotientWeight);
ghConfig.putObject(HIERARCHY_DEPTH_WEIGHT, hierarchyDepthWeight);
ghConfig.putObject(MAX_POLL_FACTOR_HEURISTIC_EDGE, pollFactorHeuristic);
ghConfig.putObject(MAX_POLL_FACTOR_CONTRACTION_EDGE, pollFactorContraction);
} else {
ghConfig.putObject(MAX_POLL_FACTOR_HEURISTIC_NODE, pollFactorHeuristic);
ghConfig.putObject(MAX_POLL_FACTOR_CONTRACTION_NODE, pollFactorContraction);
}
LOGGER.info("Initializing graph hopper with args: {}", ghConfig);
graphHopper.init(ghConfig);
if (cleanup) {
graphHopper.clean();
}
PMap results = new PMap(ghConfig.asPMap());
StopWatch sw = new StopWatch();
sw.start();
graphHopper.importOrLoad();
sw.stop();
results.putObject("_prepare_time", sw.getSeconds());
LOGGER.info("Import and preparation took {}s", sw.getMillis() / 1000);
if (!quick) {
runCompareTest(DIJKSTRA_BI, graphHopper, withTurnCosts, uTurnCosts, seed, compIterations, errorThreshold, results);
runCompareTest(ASTAR_BI, graphHopper, withTurnCosts, uTurnCosts, seed, compIterations, errorThreshold, results);
}
if (!quick) {
runPerformanceTest(DIJKSTRA_BI, graphHopper, withTurnCosts, seed, perfIterations, results);
}
runPerformanceTest(ASTAR_BI, graphHopper, withTurnCosts, seed, perfIterations, results);
if (!quick && landmarks > 0) {
runPerformanceTest("lm", graphHopper, withTurnCosts, seed, perfIterations, results);
}
graphHopper.close();
Map<String, Object> resultMap = results.toMap();
TreeSet<String> sortedKeys = new TreeSet<>(resultMap.keySet());
for (String key : sortedKeys) {
LOGGER.info(key + "=" + resultMap.get(key));
}
if (statsFile != null) {
File f = new File(statsFile);
boolean writeHeader = !f.exists();
try (OutputStream os = new FileOutputStream(f, true);
Writer writer = new OutputStreamWriter(os, StandardCharsets.UTF_8)) {
if (writeHeader)
writer.write(getHeader(sortedKeys));
writer.write(getStatLine(sortedKeys, resultMap));
} catch (IOException e) {
LOGGER.error("Could not write summary to file '{}'", statsFile, e);
}
}
// output to be used by external caller
StringBuilder sb = new StringBuilder();
for (String key : sortedKeys) {
sb.append(key).append(":").append(resultMap.get(key)).append(";");
}
sb.deleteCharAt(sb.lastIndexOf(";"));
System.out.println(sb);
LOGGER.info("Total time: {}s", fmt((nanoTime() - start) * 1.e-9));
}
use of com.graphhopper.GraphHopperConfig in project graphhopper by graphhopper.
the class CHImportTest method main.
public static void main(String[] args) {
System.out.println("running for args: " + Arrays.toString(args));
PMap map = PMap.read(args);
String vehicle = map.getString("vehicle", "car");
GraphHopperConfig config = new GraphHopperConfig(map);
config.putObject("datareader.file", map.getString("pbf", "map-matching/files/leipzig_germany.osm.pbf"));
config.putObject("graph.location", map.getString("gh", "ch-import-test-gh"));
config.setProfiles(Arrays.asList(new Profile(vehicle).setVehicle(vehicle).setWeighting("fastest")));
config.setCHProfiles(Collections.singletonList(new CHProfile(vehicle)));
config.putObject(CHParameters.PERIODIC_UPDATES, map.getInt("periodic", 0));
config.putObject(CHParameters.LAST_LAZY_NODES_UPDATES, map.getInt("lazy", 100));
config.putObject(CHParameters.NEIGHBOR_UPDATES, map.getInt("neighbor", 100));
config.putObject(CHParameters.NEIGHBOR_UPDATES_MAX, map.getInt("neighbor_max", 2));
config.putObject(CHParameters.CONTRACTED_NODES, map.getInt("contracted", 100));
config.putObject(CHParameters.LOG_MESSAGES, map.getInt("logs", 20));
config.putObject(CHParameters.EDGE_DIFFERENCE_WEIGHT, map.getDouble("edge_diff", 10));
config.putObject(CHParameters.ORIGINAL_EDGE_COUNT_WEIGHT, map.getDouble("orig_edge", 1));
config.putObject(CHParameters.MAX_POLL_FACTOR_HEURISTIC_NODE, map.getDouble("mpf_heur", 5));
config.putObject(CHParameters.MAX_POLL_FACTOR_CONTRACTION_NODE, map.getDouble("mpf_contr", 200));
GraphHopper hopper = new GraphHopper();
hopper.init(config);
if (map.getBool("use_country_rules", false))
// note that using this requires a new import of the base graph!
hopper.setCountryRuleFactory(new CountryRuleFactory());
hopper.importOrLoad();
runQueries(hopper, vehicle);
}
use of com.graphhopper.GraphHopperConfig in project graphhopper by graphhopper.
the class MatchCommand method run.
@Override
protected void run(Bootstrap<GraphHopperServerConfiguration> bootstrap, Namespace args, GraphHopperServerConfiguration configuration) {
GraphHopperConfig graphHopperConfiguration = configuration.getGraphHopperConfiguration();
GraphHopper hopper = new GraphHopper().init(graphHopperConfiguration);
hopper.importOrLoad();
PMap hints = new PMap();
hints.putObject("profile", args.get("profile"));
MapMatching mapMatching = new MapMatching(hopper, hints);
mapMatching.setTransitionProbabilityBeta(args.getDouble("transition_probability_beta"));
mapMatching.setMeasurementErrorSigma(args.getInt("gps_accuracy"));
StopWatch importSW = new StopWatch();
StopWatch matchSW = new StopWatch();
Translation tr = new TranslationMap().doImport().getWithFallBack(Helper.getLocale(args.getString("instructions")));
final boolean withRoute = !args.getString("instructions").isEmpty();
XmlMapper xmlMapper = new XmlMapper();
for (File gpxFile : args.<File>getList("gpx")) {
try {
importSW.start();
Gpx gpx = xmlMapper.readValue(gpxFile, Gpx.class);
if (gpx.trk == null) {
throw new IllegalArgumentException("No tracks found in GPX document. Are you using waypoints or routes instead?");
}
if (gpx.trk.size() > 1) {
throw new IllegalArgumentException("GPX documents with multiple tracks not supported yet.");
}
List<Observation> measurements = GpxConversions.getEntries(gpx.trk.get(0));
importSW.stop();
matchSW.start();
MatchResult mr = mapMatching.match(measurements);
matchSW.stop();
System.out.println(gpxFile);
System.out.println("\tmatches:\t" + mr.getEdgeMatches().size() + ", gps entries:" + measurements.size());
System.out.println("\tgpx length:\t" + (float) mr.getGpxEntriesLength() + " vs " + (float) mr.getMatchLength());
String outFile = gpxFile.getAbsolutePath() + ".res.gpx";
System.out.println("\texport results to:" + outFile);
ResponsePath responsePath = new PathMerger(mr.getGraph(), mr.getWeighting()).doWork(PointList.EMPTY, Collections.singletonList(mr.getMergedPath()), hopper.getEncodingManager(), tr);
if (responsePath.hasErrors()) {
System.err.println("Problem with file " + gpxFile + ", " + responsePath.getErrors());
continue;
}
try (BufferedWriter writer = new BufferedWriter(new FileWriter(outFile))) {
long time = gpx.trk.get(0).getStartTime().map(Date::getTime).orElse(System.currentTimeMillis());
writer.append(GpxConversions.createGPX(responsePath.getInstructions(), gpx.trk.get(0).name != null ? gpx.trk.get(0).name : "", time, hopper.hasElevation(), withRoute, true, false, Constants.VERSION, tr));
}
} catch (Exception ex) {
importSW.stop();
matchSW.stop();
System.err.println("Problem with file " + gpxFile);
ex.printStackTrace(System.err);
}
}
System.out.println("gps import took:" + importSW.getSeconds() + "s, match took: " + matchSW.getSeconds());
}
use of com.graphhopper.GraphHopperConfig in project graphhopper by graphhopper.
the class GraphHopperConfigModuleTest method testDeserializeConfig.
@Test
public void testDeserializeConfig() throws IOException {
ObjectMapper objectMapper = new ObjectMapper(new YAMLFactory());
objectMapper.setPropertyNamingStrategy(PropertyNamingStrategy.SNAKE_CASE);
GraphHopperConfig graphHopperConfig = objectMapper.readValue(getClass().getResourceAsStream("config.yml"), GraphHopperConfig.class);
// The dot in the key is no special symbol in YAML. It's just part of the string.
Assertions.assertEquals(graphHopperConfig.getInt("index.max_region_search", 0), 100);
// So when I think this refers to a YAML hierarchy, I'll be disappointed!
// sadly
Assertions.assertEquals(graphHopperConfig.getInt("index.pups", 0), 0);
// Note: This also doesn't work in DropWizard's config. It's just not a feature. It does work in Spring,
// but because Spring does it, not because YAML does it.
}
use of com.graphhopper.GraphHopperConfig in project graphhopper by graphhopper.
the class LMPreparationHandlerTest method testPrepareWeightingNo.
@Test
public void testPrepareWeightingNo() {
GraphHopperConfig ghConfig = new GraphHopperConfig();
ghConfig.setProfiles(Collections.singletonList(new Profile("profile")));
ghConfig.setLMProfiles(Collections.singletonList(new LMProfile("profile")));
LMPreparationHandler handler = new LMPreparationHandler();
handler.init(ghConfig);
assertTrue(handler.isEnabled());
// See #1076
ghConfig.setLMProfiles(Collections.emptyList());
handler = new LMPreparationHandler();
handler.init(ghConfig);
assertFalse(handler.isEnabled());
}
Aggregations