use of org.json.simple.parser.JSONParser in project spatial-portal by AtlasOfLivingAustralia.
the class CommonData method initJournalmap.
private static void initJournalmap() {
if (journalMapArticles != null && journalMapArticles.size() > 0) {
return;
}
journalMapArticles = new ArrayList<JSONObject>();
journalMapLocations = new ArrayList<JournalMapLocation>();
try {
String journalmapUrl = CommonData.getSettings().getProperty("journalmap.url", null);
String journalmapKey = CommonData.getSettings().getProperty("journalmap.api_key", null);
//try disk cache
File jaFile = new File("/data/webportal/journalmapArticles.json");
if (jaFile.exists()) {
JSONParser jp = new JSONParser();
JSONArray ja = (JSONArray) jp.parse(FileUtils.readFileToString(jaFile));
for (int i = 0; i < ja.size(); i++) {
journalMapArticles.add((JSONObject) ja.get(i));
}
} else if (journalmapKey != null && !journalmapKey.isEmpty()) {
int page = 1;
int maxpage = 0;
List<String> publicationsIds = new ArrayList<String>();
while (page == 1 || page <= maxpage) {
HttpClient client = new HttpClient();
String url = journalmapUrl + "api/publications.json?version=1.0&key=" + journalmapKey + "&page=" + page;
page = page + 1;
LOGGER.debug("journalmap url: " + url);
GetMethod get = new GetMethod(url);
int result = client.executeMethod(get);
//update maxpage
maxpage = Integer.parseInt(get.getResponseHeader("X-Pages").getValue());
//cache
JSONParser jp = new JSONParser();
JSONArray jcollection = (JSONArray) jp.parse(get.getResponseBodyAsString());
for (int i = 0; i < jcollection.size(); i++) {
if (((JSONObject) jcollection.get(i)).containsKey("id")) {
publicationsIds.add(((JSONObject) jcollection.get(i)).get("id").toString());
LOGGER.debug("found publication: " + ((JSONObject) jcollection.get(i)).get("id").toString() + ", article_count: " + ((JSONObject) jcollection.get(i)).get("articles_count").toString());
}
}
}
for (String publicationsId : publicationsIds) {
//allow for collection failure
try {
page = 1;
maxpage = 0;
while (page == 1 || page <= maxpage) {
HttpClient client = new HttpClient();
String url = journalmapUrl + "api/articles.json?version=1.0&key=" + journalmapKey + "&page=" + page + "&publication_id=" + publicationsId;
page = page + 1;
LOGGER.debug("journalmap url: " + url);
GetMethod get = new GetMethod(url);
int result = client.executeMethod(get);
//update maxpage
maxpage = Integer.parseInt(get.getResponseHeader("X-Pages").getValue());
//cache
JSONParser jp = new JSONParser();
JSONArray jarticles = (JSONArray) jp.parse(get.getResponseBodyAsString());
for (int j = 0; j < jarticles.size(); j++) {
JSONObject o = (JSONObject) jarticles.get(j);
if (o.containsKey("locations")) {
journalMapArticles.add(o);
}
}
}
} catch (Exception e) {
LOGGER.error("journalmap - failure to get articles from publicationsId: " + publicationsId);
}
}
//save to disk cache
FileWriter fw = new FileWriter(jaFile);
JSONValue.writeJSONString(journalMapArticles, fw);
fw.flush();
fw.close();
}
} catch (Exception e) {
LOGGER.error("error initialising journalmap data", e);
}
//construct locations list
for (int i = 0; i < journalMapArticles.size(); i++) {
JSONArray locations = (JSONArray) journalMapArticles.get(i).get("locations");
for (int j = 0; j < locations.size(); j++) {
JSONObject l = (JSONObject) locations.get(j);
double longitude = Double.parseDouble(l.get("longitude").toString());
double latitude = Double.parseDouble(l.get("latitude").toString());
journalMapLocations.add(new JournalMapLocation(longitude, latitude, i));
}
}
}
use of org.json.simple.parser.JSONParser in project spatial-portal by AtlasOfLivingAustralia.
the class BiocacheQuery method getScientificNameRank.
public static String getScientificNameRank(String lsid) {
String snUrl = "true".equalsIgnoreCase(CommonData.getSettings().getProperty("new.bie")) ? CommonData.getBieServer() + BIE_SPECIES_WS + lsid + ".json" : CommonData.getBieServer() + BIE_SPECIES + lsid + ".json";
LOGGER.debug(snUrl);
try {
HttpClient client = new HttpClient();
GetMethod get = new GetMethod(snUrl);
get.addRequestHeader(StringConstants.CONTENT_TYPE, StringConstants.APPLICATION_JSON);
client.executeMethod(get);
String slist = get.getResponseBodyAsString();
JSONParser jp = new JSONParser();
JSONObject jo = (JSONObject) jp.parse(slist);
String scientficName = ((JSONObject) jo.get("taxonConcept")).get("nameString").toString();
String r = ((JSONObject) jo.get("taxonConcept")).get("rankString").toString();
LOGGER.debug("Arrays.binarySearch(COMMON_TAXON_RANKS, rank): " + Arrays.binarySearch(COMMON_TAXON_RANKS, r));
if (Arrays.binarySearch(COMMON_TAXON_RANKS, r) > -1) {
r = StringConstants.TAXON;
}
return scientficName + "," + r;
} catch (Exception e) {
LOGGER.error("error getting scientific name:" + snUrl, e);
}
return StringConstants.OCCURRENCES;
}
use of org.json.simple.parser.JSONParser in project spatial-portal by AtlasOfLivingAustralia.
the class BiocacheQuery method retrieveCustomFields.
private List<String> retrieveCustomFields() {
List<String> customFields = new ArrayList<String>();
//look up facets
final String jsonUri = biocacheServer + "/upload/dynamicFacets?q=" + getFullQ(true) + "&qc=" + getQc();
try {
HttpClient client = new HttpClient();
GetMethod get = new GetMethod(jsonUri);
get.addRequestHeader(StringConstants.CONTENT_TYPE, StringConstants.APPLICATION_JSON);
client.executeMethod(get);
String slist = get.getResponseBodyAsString();
JSONParser jp = new JSONParser();
JSONArray ja = (JSONArray) jp.parse(slist);
for (Object arrayElement : ja) {
JSONObject jsonObject = (JSONObject) arrayElement;
String facetName = jsonObject.get(StringConstants.NAME).toString();
if (!facetName.endsWith("_RNG")) {
customFields.add(facetName);
}
}
} catch (Exception e) {
LOGGER.error("error loading custom facets for: " + jsonUri, e);
}
return customFields;
}
use of org.json.simple.parser.JSONParser in project spatial-portal by AtlasOfLivingAustralia.
the class BiocacheQuery method endemicSpeciesList.
public String endemicSpeciesList() {
if (endemicSpeciesList != null) {
return endemicSpeciesList;
}
if (CommonData.getSettings().containsKey("endemic.sp.method") && CommonData.getSettings().getProperty("endemic.sp.method").equals("true")) {
String speciesList = speciesList();
//can get species list counts as "kosher:true" or "kosher:*" only
Map speciesCounts;
if (getGeospatialKosher()[1]) {
//[1] is 'include kosher:false'
speciesCounts = CommonData.getSpeciesListCounts(false);
} else {
speciesCounts = CommonData.getSpeciesListCountsKosher(false);
}
StringBuilder sb = new StringBuilder();
int speciesCol = 0;
int countCol = 11;
try {
CSVReader csv = new CSVReader(new StringReader(speciesList));
String[] row;
int currentPos = 0;
int nextPos = speciesList.indexOf('\n', currentPos + 1);
//header
sb.append(speciesList.substring(currentPos, nextPos));
//header
csv.readNext();
while ((row = csv.readNext()) != null) {
//add if species is not present elsewhere
Long c = (Long) speciesCounts.get(row[speciesCol]);
if (c != null && c <= Long.parseLong(row[countCol])) {
if (nextPos > speciesList.length()) {
nextPos = speciesList.length();
}
sb.append(speciesList.substring(currentPos, nextPos));
} else if (c == null) {
LOGGER.error("failed to find species_guid: " + row[speciesCol] + " in CommonData.getSpeciesListCounts()");
}
currentPos = nextPos;
nextPos = speciesList.indexOf('\n', currentPos + 1);
}
} catch (Exception e) {
LOGGER.error("failed generating endemic species list", e);
}
endemicSpeciesList = sb.toString();
} else {
forMapping = true;
if (paramId == null)
makeParamId();
HttpClient client = new HttpClient();
String url = biocacheServer + ENDEMIC_LIST + paramId + "?facets=names_and_lsid";
LOGGER.debug(url);
GetMethod get = new GetMethod(url);
try {
client.executeMethod(get);
JSONParser jp = new JSONParser();
JSONArray ja = (JSONArray) jp.parse(get.getResponseBodyAsString());
//extract endemic matches from the species list
String speciesList = speciesList();
StringBuilder sb = new StringBuilder();
int idx = speciesList.indexOf('\n');
if (idx > 0) {
sb.append(speciesList.substring(0, idx));
}
for (int j = 0; j < ja.size(); j++) {
JSONObject jo = (JSONObject) ja.get(j);
if (jo.containsKey("label")) {
idx = speciesList.indexOf("\n" + jo.get("label") + ",");
if (idx > 0) {
int lineEnd = speciesList.indexOf('\n', idx + 1);
if (lineEnd < 0)
lineEnd = speciesList.length();
sb.append(speciesList.substring(idx, lineEnd));
}
}
}
endemicSpeciesList = sb.toString();
} catch (Exception e) {
LOGGER.error("error getting endemic species result", e);
}
}
return endemicSpeciesList;
}
use of org.json.simple.parser.JSONParser in project spatial-portal by AtlasOfLivingAustralia.
the class RemoteLogger method getLogCSV.
public JSONObject getLogCSV() {
init();
try {
if (Util.isLoggedIn()) {
String url = loggerService + "/app/types/tool.json?" + "email=" + URLEncoder.encode(Util.getUserEmail(), StringConstants.UTF_8) + "&appid=" + URLEncoder.encode(appid, StringConstants.UTF_8) + "&api_key=" + URLEncoder.encode(CommonData.getSettings().getProperty("api_key"), StringConstants.UTF_8);
HttpClient client = new HttpClient();
GetMethod get = new GetMethod(url);
get.addRequestHeader(StringConstants.ACCEPT, StringConstants.APPLICATION_JSON);
client.executeMethod(get);
LOGGER.debug("get: " + url + ", response: " + get.getResponseBodyAsString());
JSONParser jp = new JSONParser();
return (JSONObject) jp.parse(get.getResponseBodyAsString());
}
} catch (Exception e) {
LOGGER.error("Error getting logging information from server:", e);
}
return null;
}
Aggregations