use of org.apache.commons.httpclient.HttpClient in project spatial-portal by AtlasOfLivingAustralia.
the class SandboxPasteController method upload.
private boolean upload(byte[] bytes, String filename, String contentType) throws Exception {
//create tmp file
File tmp = File.createTempFile("pointsUpload", "_" + filename);
FileUtils.writeByteArrayToFile(tmp, bytes);
String url = CommonData.getSettings().getProperty("sandbox.url") + "upload/uploadFile";
HttpClient httpClient = new HttpClient();
PostMethod filePost = new PostMethod(url);
Part[] parts = { new FilePart("myFile", filename, tmp, contentType, null) };
filePost.setRequestEntity(new MultipartRequestEntity(parts, filePost.getParams()));
int status = httpClient.executeMethod(filePost);
if (status == 302) {
String responseText = filePost.getResponseHeader("Location").getValue();
uploadId = responseText.substring(responseText.indexOf("preview/") + "preview/".length(), responseText.lastIndexOf('?'));
uploadFn = responseText.substring(responseText.indexOf('?') + 4);
System.out.println(responseText);
return true;
}
return false;
}
use of org.apache.commons.httpclient.HttpClient in project spatial-portal by AtlasOfLivingAustralia.
the class ProgressController method onClick$btnCancel.
public void onClick$btnCancel(Event e) {
try {
HttpClient client = new HttpClient();
GetMethod get = new GetMethod((CommonData.getSatServer() + "/ws/jobs/cancel?pid=") + pid);
get.addRequestHeader(StringConstants.ACCEPT, StringConstants.TEXT_PLAIN);
client.getHttpConnectionManager().getParams().setSoTimeout(timer.getDelay());
client.executeMethod(get);
} catch (Exception ex) {
LOGGER.error("error getting updated job info pid=" + pid, ex);
}
this.detach();
}
use of org.apache.commons.httpclient.HttpClient in project spatial-portal by AtlasOfLivingAustralia.
the class ProgressController method get.
JSONObject get() {
try {
StringBuilder sbProcessUrl = new StringBuilder();
sbProcessUrl.append(CommonData.getSatServer()).append("/ws/job?pid=").append(pid);
LOGGER.debug("checking status every '" + timer.getDelay() + "' sec: " + sbProcessUrl.toString());
HttpClient client = new HttpClient();
GetMethod get = new GetMethod(sbProcessUrl.toString());
get.addRequestHeader(StringConstants.ACCEPT, StringConstants.APPLICATION_JSON);
client.getHttpConnectionManager().getParams().setSoTimeout(timer.getDelay());
int result = client.executeMethod(get);
if (result == 200) {
JSONParser jp = new JSONParser();
return (JSONObject) jp.parse(get.getResponseBodyAsString());
}
} catch (SocketTimeoutException e) {
LOGGER.debug("progress timeout exception, will be trying again.");
} catch (Exception e) {
LOGGER.error("error getting updated job info pid=" + pid, e);
}
return null;
}
use of org.apache.commons.httpclient.HttpClient in project spatial-portal by AtlasOfLivingAustralia.
the class AreaReportController method getGazPoints.
public static JSONArray getGazPoints(String wkt) {
try {
int limit = Integer.MAX_VALUE;
String url = CommonData.getLayersServer() + "/objects/inarea/" + CommonData.getSettings().get("area_report_gaz_field") + "?limit=" + limit;
HttpClient client = new HttpClient();
PostMethod post = new PostMethod(url);
LOGGER.debug(url);
if (wkt != null) {
post.addParameter(StringConstants.WKT, wkt);
}
post.addRequestHeader(StringConstants.ACCEPT, StringConstants.JSON_JAVASCRIPT_ALL);
int result = client.executeMethod(post);
if (result == 200) {
String txt = post.getResponseBodyAsString();
JSONParser jp = new JSONParser();
return (JSONArray) jp.parse(txt);
} else {
LOGGER.debug(result + ", " + post.getResponseBodyAsString());
}
} catch (Exception e) {
LOGGER.error("error getting number of gaz points in an area: " + wkt, e);
}
return null;
}
use of org.apache.commons.httpclient.HttpClient in project spatial-portal by AtlasOfLivingAustralia.
the class MapComposer method loadDistributionMap.
private void loadDistributionMap(String lsids, String wkt) {
String newWkt = wkt;
if (CommonData.WORLD_WKT.equals(newWkt)) {
newWkt = null;
}
//test for a valid lsid match
String[] wmsNames = CommonData.getSpeciesDistributionWMS(lsids);
String[] spcode = CommonData.getSpeciesDistributionSpcode(lsids);
MapLayer ml;
JSONParser jp = new JSONParser();
if (wmsNames.length > 0 && (newWkt == null || newWkt.equals(CommonData.WORLD_WKT))) {
//add all
for (int i = 0; i < wmsNames.length; i++) {
if (getMapLayerWMS(wmsNames[i]) == null) {
//map this layer with its recorded scientific name
try {
JSONObject jo = ((JSONObject) jp.parse(Util.readUrl(CommonData.getLayersServer() + "/distribution/" + spcode[i] + "?nowkt=true")));
String scientific = jo.get(StringConstants.SCIENTIFIC).toString();
String distributionAreaName = jo.get("area_name").toString();
String layerName = getNextAreaLayerName(scientific);
String html = Util.getMetadataHtmlForDistributionOrChecklist(spcode[i], null, layerName);
ml = addWMSLayer(layerName, getNextAreaLayerName(distributionAreaName), wmsNames[i], 0.35f, html, null, LayerUtilitiesImpl.WKT, null, null);
ml.setSPCode(spcode[i]);
setupMapLayerAsDistributionArea(ml);
} catch (Exception e) {
LOGGER.error("failed to parse for distribution: " + spcode[i]);
}
}
}
} else if (wmsNames.length > 0 && newWkt != null && !newWkt.equals(CommonData.WORLD_WKT)) {
String url = CommonData.getLayersServer() + "/distributions";
try {
HttpClient client = new HttpClient();
PostMethod post = new PostMethod(url);
post.addParameter(StringConstants.WKT, newWkt);
post.addParameter(StringConstants.LSIDS, lsids);
post.addRequestHeader(StringConstants.ACCEPT, StringConstants.JSON_JAVASCRIPT_ALL);
int result = client.executeMethod(post);
if (result == 200) {
String txt = post.getResponseBodyAsString();
JSONArray ja = (JSONArray) jp.parse(txt);
List<String> found = new ArrayList();
for (int i = 0; i < ja.size(); i++) {
JSONObject jo = (JSONObject) ja.get(i);
if (jo.containsKey(StringConstants.WMSURL)) {
found.add(jo.get(StringConstants.WMSURL).toString());
}
}
for (int i = 0; i < wmsNames.length; i++) {
if (getMapLayerWMS(wmsNames[i]) == null) {
String scientific = ((JSONObject) jp.parse(Util.readUrl(CommonData.getLayersServer() + "/distribution/" + spcode[i] + "?nowkt=true"))).get(StringConstants.SCIENTIFIC).toString();
String layerName = getNextAreaLayerName(scientific + " area " + (i + 1));
String html = Util.getMetadataHtmlForDistributionOrChecklist(spcode[i], null, layerName);
ml = addWMSLayer(layerName, getNextAreaLayerName("Expert distribution: " + scientific), found.get(i), 0.35f, html, null, LayerUtilitiesImpl.WKT, null, null);
ml.setSPCode(spcode[i]);
setupMapLayerAsDistributionArea(ml);
}
}
}
} catch (Exception e) {
LOGGER.error("error posting distributions: " + url);
}
}
openChecklistSpecies(lsids, newWkt, true);
}
Aggregations