use of org.json.simple.parser.JSONParser in project spatial-portal by AtlasOfLivingAustralia.
the class CommonData method getDataTypes.
/**
* Extracts the biocache data types from the webservice so that they can be used to dynamically load the facets
*
* @return
*/
private static Map<String, QueryField.FieldType> getDataTypes() throws Exception {
Map<String, QueryField.FieldType> map = new HashMap<String, QueryField.FieldType>();
//get the JSON from the WS
JSONParser jp = new JSONParser();
JSONArray values = (JSONArray) jp.parse(Util.readUrl(CommonData.getBiocacheServer() + "/index/fields"));
for (Object mvalues : values) {
String name = ((JSONObject) mvalues).get(StringConstants.NAME).toString();
String dtype = "string";
if (((JSONObject) mvalues).containsKey("dataType"))
dtype = ((JSONObject) mvalues).get("dataType").toString();
if ("string".equals(dtype) || "textgen".equals(dtype)) {
map.put(name, QueryField.FieldType.STRING);
} else if ("int".equals(dtype) || "tint".equals(dtype) || "tdate".equals(dtype)) {
map.put(name, QueryField.FieldType.INT);
} else if ("double".equals(dtype) || "tdouble".equals(dtype)) {
map.put(name, QueryField.FieldType.DOUBLE);
} else {
map.put(name, QueryField.FieldType.STRING);
}
}
return map;
}
use of org.json.simple.parser.JSONParser in project spatial-portal by AtlasOfLivingAustralia.
the class Sampling method sampling.
public static List<String[]> sampling(List<String> facetIds, double[][] points) {
try {
long start = System.currentTimeMillis();
URL url = new URL(CommonData.getLayersServer() + "/intersect/batch");
URLConnection c = url.openConnection();
c.setDoOutput(true);
OutputStreamWriter out = new OutputStreamWriter(c.getOutputStream());
out.write("fids=");
for (int i = 0; i < facetIds.size(); i++) {
if (i > 0) {
out.write(",");
}
out.write(facetIds.get(i));
}
out.write("&points=");
for (int i = 0; i < points.length; i++) {
if (i > 0) {
out.write(",");
}
out.write(String.valueOf(points[i][1]));
out.write(",");
out.write(String.valueOf(points[i][0]));
}
out.write("&pw=");
out.write(CommonData.getSettings().getProperty("batch.sampling.pw"));
out.close();
JSONParser jp = new JSONParser();
JSONObject jo = (JSONObject) jp.parse(IOUtils.toString(c.getInputStream()));
String statusUrl = jo.get("statusUrl").toString();
//wait until done, or until it fails
String downloadUrl = null;
int count = 0;
int retryMax = 10;
int retry = 0;
while (retry < retryMax) {
Thread.sleep(2000);
while ((downloadUrl = getDownloadUrl(statusUrl)) != null) {
retry = 0;
if (!downloadUrl.isEmpty() || downloadUrl == null) {
retry = retryMax;
break;
}
count++;
}
retry++;
}
if (downloadUrl != null) {
return getDownloadData(downloadUrl, points.length);
}
} catch (Exception e) {
LOGGER.error("error with sampling", e);
}
return new ArrayList();
}
use of org.json.simple.parser.JSONParser in project spatial-portal by AtlasOfLivingAustralia.
the class CommonData method initDownloadReasons.
public static void initDownloadReasons() {
copyDownloadReasons = null;
LOGGER.debug("CommonData::initDownloadReasons()");
String url = CommonData.getSettings().getProperty("logger.url") + "/service/logger/reasons";
try {
LOGGER.debug(url);
HttpClient client = new HttpClient();
GetMethod get = new GetMethod(url);
int result = client.executeMethod(get);
if (result == 200) {
JSONParser jp = new JSONParser();
copyDownloadReasons = (JSONArray) jp.parse(get.getResponseBodyAsString());
}
} catch (Exception e) {
copyDownloadReasons = null;
LOGGER.error("error getting reasons: " + url, e);
}
}
use of org.json.simple.parser.JSONParser in project spatial-portal by AtlasOfLivingAustralia.
the class CommonData method initJournalmap.
private static void initJournalmap() {
if (journalMapArticles != null && journalMapArticles.size() > 0) {
return;
}
journalMapArticles = new ArrayList<JSONObject>();
journalMapLocations = new ArrayList<JournalMapLocation>();
try {
String journalmapUrl = CommonData.getSettings().getProperty("journalmap.url", null);
String journalmapKey = CommonData.getSettings().getProperty("journalmap.api_key", null);
//try disk cache
File jaFile = new File("/data/webportal/journalmapArticles.json");
if (jaFile.exists()) {
JSONParser jp = new JSONParser();
JSONArray ja = (JSONArray) jp.parse(FileUtils.readFileToString(jaFile));
for (int i = 0; i < ja.size(); i++) {
journalMapArticles.add((JSONObject) ja.get(i));
}
} else if (journalmapKey != null && !journalmapKey.isEmpty()) {
int page = 1;
int maxpage = 0;
List<String> publicationsIds = new ArrayList<String>();
while (page == 1 || page <= maxpage) {
HttpClient client = new HttpClient();
String url = journalmapUrl + "api/publications.json?version=1.0&key=" + journalmapKey + "&page=" + page;
page = page + 1;
LOGGER.debug("journalmap url: " + url);
GetMethod get = new GetMethod(url);
int result = client.executeMethod(get);
//update maxpage
maxpage = Integer.parseInt(get.getResponseHeader("X-Pages").getValue());
//cache
JSONParser jp = new JSONParser();
JSONArray jcollection = (JSONArray) jp.parse(get.getResponseBodyAsString());
for (int i = 0; i < jcollection.size(); i++) {
if (((JSONObject) jcollection.get(i)).containsKey("id")) {
publicationsIds.add(((JSONObject) jcollection.get(i)).get("id").toString());
LOGGER.debug("found publication: " + ((JSONObject) jcollection.get(i)).get("id").toString() + ", article_count: " + ((JSONObject) jcollection.get(i)).get("articles_count").toString());
}
}
}
for (String publicationsId : publicationsIds) {
//allow for collection failure
try {
page = 1;
maxpage = 0;
while (page == 1 || page <= maxpage) {
HttpClient client = new HttpClient();
String url = journalmapUrl + "api/articles.json?version=1.0&key=" + journalmapKey + "&page=" + page + "&publication_id=" + publicationsId;
page = page + 1;
LOGGER.debug("journalmap url: " + url);
GetMethod get = new GetMethod(url);
int result = client.executeMethod(get);
//update maxpage
maxpage = Integer.parseInt(get.getResponseHeader("X-Pages").getValue());
//cache
JSONParser jp = new JSONParser();
JSONArray jarticles = (JSONArray) jp.parse(get.getResponseBodyAsString());
for (int j = 0; j < jarticles.size(); j++) {
JSONObject o = (JSONObject) jarticles.get(j);
if (o.containsKey("locations")) {
journalMapArticles.add(o);
}
}
}
} catch (Exception e) {
LOGGER.error("journalmap - failure to get articles from publicationsId: " + publicationsId);
}
}
//save to disk cache
FileWriter fw = new FileWriter(jaFile);
JSONValue.writeJSONString(journalMapArticles, fw);
fw.flush();
fw.close();
}
} catch (Exception e) {
LOGGER.error("error initialising journalmap data", e);
}
//construct locations list
for (int i = 0; i < journalMapArticles.size(); i++) {
JSONArray locations = (JSONArray) journalMapArticles.get(i).get("locations");
for (int j = 0; j < locations.size(); j++) {
JSONObject l = (JSONObject) locations.get(j);
double longitude = Double.parseDouble(l.get("longitude").toString());
double latitude = Double.parseDouble(l.get("latitude").toString());
journalMapLocations.add(new JournalMapLocation(longitude, latitude, i));
}
}
}
use of org.json.simple.parser.JSONParser in project spatial-portal by AtlasOfLivingAustralia.
the class BiocacheQuery method getScientificNameRank.
public static String getScientificNameRank(String lsid) {
String snUrl = "true".equalsIgnoreCase(CommonData.getSettings().getProperty("new.bie")) ? CommonData.getBieServer() + BIE_SPECIES_WS + lsid + ".json" : CommonData.getBieServer() + BIE_SPECIES + lsid + ".json";
LOGGER.debug(snUrl);
try {
HttpClient client = new HttpClient();
GetMethod get = new GetMethod(snUrl);
get.addRequestHeader(StringConstants.CONTENT_TYPE, StringConstants.APPLICATION_JSON);
client.executeMethod(get);
String slist = get.getResponseBodyAsString();
JSONParser jp = new JSONParser();
JSONObject jo = (JSONObject) jp.parse(slist);
String scientficName = ((JSONObject) jo.get("taxonConcept")).get("nameString").toString();
String r = ((JSONObject) jo.get("taxonConcept")).get("rankString").toString();
LOGGER.debug("Arrays.binarySearch(COMMON_TAXON_RANKS, rank): " + Arrays.binarySearch(COMMON_TAXON_RANKS, r));
if (Arrays.binarySearch(COMMON_TAXON_RANKS, r) > -1) {
r = StringConstants.TAXON;
}
return scientficName + "," + r;
} catch (Exception e) {
LOGGER.error("error getting scientific name:" + snUrl, e);
}
return StringConstants.OCCURRENCES;
}
Aggregations