Search in sources :

Example 21 with CSVReader

use of au.com.bytecode.opencsv.CSVReader in project spatial-portal by AtlasOfLivingAustralia.

the class CommonData method getSpeciesListCountsKosher.

public static Map getSpeciesListCountsKosher(boolean refresh) {
    if (speciesListCountsKosher == null || refresh) {
        Map m = new HashMap();
        HttpClient client = new HttpClient();
        String url = biocacheServer + "/occurrences/facets/download?facets=species_guid&count=true" + "&q=geospatial_kosher:true";
        LOGGER.debug(url);
        GetMethod get = new GetMethod(url);
        try {
            client.executeMethod(get);
            CSVReader csv = new CSVReader(new BufferedReader(new InputStreamReader(get.getResponseBodyAsStream())));
            String[] row;
            //skip header
            csv.readNext();
            while ((row = csv.readNext()) != null) {
                try {
                    m.put(row[0], Long.parseLong(row[1]));
                } catch (Exception e) {
                    LOGGER.error("error getting species_guid,count (kosher): " + url, e);
                }
            }
        } catch (Exception e) {
            LOGGER.error("error getting species list from: " + url);
        }
        speciesListCountsUpdatedKosher = System.currentTimeMillis();
        speciesListCountsKosher = m;
    }
    return speciesListCountsKosher;
}
Also used : CSVReader(au.com.bytecode.opencsv.CSVReader) HttpClient(org.apache.commons.httpclient.HttpClient) GetMethod(org.apache.commons.httpclient.methods.GetMethod)

Example 22 with CSVReader

use of au.com.bytecode.opencsv.CSVReader in project spatial-portal by AtlasOfLivingAustralia.

the class CommonData method getSpeciesListCounts.

public static Map getSpeciesListCounts(boolean refresh) {
    if (speciesListCounts == null || refresh) {
        Map m = new HashMap();
        HttpClient client = new HttpClient();
        String url = biocacheServer + "/occurrences/facets/download?facets=species_guid&count=true" + "&q=geospatial_kosher:*";
        LOGGER.debug(url);
        GetMethod get = new GetMethod(url);
        try {
            client.executeMethod(get);
            CSVReader csv = new CSVReader(new BufferedReader(new InputStreamReader(get.getResponseBodyAsStream())));
            String[] row;
            //skip header
            csv.readNext();
            while ((row = csv.readNext()) != null) {
                try {
                    m.put(row[0], Long.parseLong(row[1]));
                } catch (Exception e) {
                    LOGGER.error("error getting species_guid,count: " + url, e);
                }
            }
        } catch (Exception e) {
            LOGGER.error("error getting species list from: " + url);
        }
        speciesListCountsUpdated = System.currentTimeMillis();
        speciesListCounts = m;
    }
    return speciesListCounts;
}
Also used : CSVReader(au.com.bytecode.opencsv.CSVReader) HttpClient(org.apache.commons.httpclient.HttpClient) GetMethod(org.apache.commons.httpclient.methods.GetMethod)

Example 23 with CSVReader

use of au.com.bytecode.opencsv.CSVReader in project spatial-portal by AtlasOfLivingAustralia.

the class BiocacheQuery method speciesList.

/**
     * Get species list for this query.
     *
     * @return species list as String containing CSV.
     */
@Override
public String speciesList() {
    if (speciesList != null) {
        return speciesList;
    }
    HttpClient client = new HttpClient();
    String url = biocacheServer + SPECIES_LIST_SERVICE_CSV + "&q=" + getQ() + getQc();
    LOGGER.debug(url);
    GetMethod get = new GetMethod(url);
    try {
        client.executeMethod(get);
        speciesList = get.getResponseBodyAsString();
        //add 'Other' correction and add additional columns
        List<String> header = CommonData.getSpeciesListAdditionalColumnsHeader();
        StringBuilder newlist = new StringBuilder();
        int total = getOccurrenceCount();
        CSVReader csv = new CSVReader(new StringReader(speciesList));
        String[] line;
        int count = 0;
        int lastpos = 0;
        while ((line = csv.readNext()) != null) {
            int nextpos = speciesList.indexOf('\n', lastpos + 1);
            if (nextpos < 0)
                nextpos = speciesList.length();
            newlist.append(speciesList.substring(lastpos, nextpos));
            List<String> list = header;
            if (lastpos != 0) {
                list = CommonData.getSpeciesListAdditionalColumns(header, line[0]);
            }
            for (int i = 0; i < list.size(); i++) {
                newlist.append(",\"").append(list.get(i).replace("\"", "\"\"").replace("\\", "\\\\")).append("\"");
            }
            lastpos = nextpos;
            try {
                count += Integer.parseInt(line[line.length - 1]);
            } catch (Exception e) {
            }
        }
        if (total - count > 0) {
            String correction = "\n,,,,,,,,,,Other (not species rank)," + (total - count);
            newlist.append(correction);
        }
        speciesList = newlist.toString();
    } catch (Exception e) {
        LOGGER.error("error getting species list from: " + url);
    }
    return speciesList;
}
Also used : CSVReader(au.com.bytecode.opencsv.CSVReader) HttpClient(org.apache.commons.httpclient.HttpClient) GetMethod(org.apache.commons.httpclient.methods.GetMethod)

Example 24 with CSVReader

use of au.com.bytecode.opencsv.CSVReader in project spatial-portal by AtlasOfLivingAustralia.

the class SamplingDownloadUtil method downloadSecond.

public static byte[] downloadSecond(MapComposer mc, Query downloadSecondQuery, String[] downloadSecondLayers, String[] downloadSecondLayersDN) {
    LOGGER.debug("attempting to sample biocache records with analysis layers: " + downloadSecondQuery);
    if (downloadSecondQuery != null) {
        try {
            List<QueryField> fields = new ArrayList<QueryField>();
            fields.add(new QueryField(downloadSecondQuery.getRecordIdFieldName()));
            fields.add(new QueryField(downloadSecondQuery.getRecordLongitudeFieldName()));
            fields.add(new QueryField(downloadSecondQuery.getRecordLatitudeFieldName()));
            String results = downloadSecondQuery.sample(fields);
            if (results != null) {
                CSVReader csvreader = new CSVReader(new StringReader(results));
                List<String[]> csv = csvreader.readAll();
                csvreader.close();
                int longitudeColumn = Util.findInArray(downloadSecondQuery.getRecordLongitudeFieldDisplayName(), csv.get(0));
                int latitudeColumn = Util.findInArray(downloadSecondQuery.getRecordLatitudeFieldDisplayName(), csv.get(0));
                int idColumn = Util.findInArray(downloadSecondQuery.getRecordIdFieldDisplayName(), csv.get(0));
                double[] points = new double[(csv.size() - 1) * 2];
                String[] ids = new String[csv.size() - 1];
                int pos = 0;
                for (int i = 1; i < csv.size(); i++) {
                    try {
                        points[pos] = Double.parseDouble(csv.get(i)[longitudeColumn]);
                        points[pos + 1] = Double.parseDouble(csv.get(i)[latitudeColumn]);
                    } catch (Exception e) {
                        points[pos] = Double.NaN;
                        points[pos + 1] = Double.NaN;
                    }
                    ids[pos / 2] = csv.get(i)[idColumn];
                    pos += 2;
                }
                double[][] p = new double[points.length / 2][2];
                for (int i = 0; i < points.length; i += 2) {
                    p[i / 2][0] = points[i];
                    p[i / 2][1] = points[i + 1];
                }
                List<String> layers = new ArrayList<String>();
                StringBuilder sb = new StringBuilder();
                sb.append("id,longitude,latitude");
                for (int i = 0; i < downloadSecondLayers.length; i++) {
                    String layer = downloadSecondLayers[i];
                    sb.append(",");
                    String name = downloadSecondLayersDN[i];
                    sb.append(name);
                    layers.add(CommonData.getLayerFacetName(layer));
                }
                List<String[]> sample = Sampling.sampling(layers, p);
                if (!sample.isEmpty()) {
                    for (int j = 0; j < sample.get(0).length; j++) {
                        sb.append("\n");
                        sb.append(ids[j]).append(",").append(p[j][0]).append(",").append(p[j][1]);
                        for (int i = 0; i < sample.size(); i++) {
                            sb.append(",").append(sample.get(i)[j]);
                        }
                    }
                }
                ByteArrayOutputStream baos = new ByteArrayOutputStream();
                ZipOutputStream zos = new ZipOutputStream(baos);
                ZipEntry anEntry = new ZipEntry("analysis_output_intersect.csv");
                zos.putNextEntry(anEntry);
                zos.write(sb.toString().getBytes());
                zos.close();
                return baos.toByteArray();
            }
        } catch (Exception e) {
            LOGGER.error("error downloading samping records", e);
        }
    }
    return null;
}
Also used : CSVReader(au.com.bytecode.opencsv.CSVReader) ZipEntry(java.util.zip.ZipEntry) ArrayList(java.util.ArrayList) ByteArrayOutputStream(java.io.ByteArrayOutputStream) QueryField(au.org.ala.legend.QueryField) ZipOutputStream(java.util.zip.ZipOutputStream) StringReader(java.io.StringReader)

Example 25 with CSVReader

use of au.com.bytecode.opencsv.CSVReader in project spatial-portal by AtlasOfLivingAustralia.

the class MapComposer method openChecklistSpecies.

void openChecklistSpecies(String lsids, String wkt, boolean mapIfOnlyOne) {
    try {
        //species checklists
        String[] finallist = Util.getDistributionsOrChecklists(StringConstants.CHECKLISTS, wkt, lsids, null);
        //open for optional mapping of areas
        if (finallist.length > 1) {
            if (mapIfOnlyOne && finallist.length == 2) {
                try {
                    String[] row;
                    CSVReader csv = new CSVReader(new StringReader(finallist[1]));
                    row = csv.readNext();
                    csv.close();
                    if (getMapLayerWMS(CommonData.getSpeciesChecklistWMSFromSpcode(row[0])[1]) == null) {
                        //map it
                        String[] mapping = CommonData.getSpeciesChecklistWMSFromSpcode(row[0]);
                        String displayName = mapping[0] + " area";
                        if (row[11] != null && row[11].length() > 0) {
                            displayName = row[11];
                        }
                        String layerName = getNextAreaLayerName(row[0] + " area");
                        String html = Util.getMetadataHtmlForDistributionOrChecklist(row[0], row, layerName);
                        MapLayer ml = getMapComposer().addWMSLayer(layerName, displayName, mapping[1], 0.6f, html, null, LayerUtilitiesImpl.WKT, null, null);
                        ml.setSPCode(row[0]);
                        setupMapLayerAsDistributionArea(ml);
                    }
                } catch (Exception e) {
                    LOGGER.error("error opening checklist species", e);
                }
            } else {
                if (hasFellow(StringConstants.DISTRIBUTION_RESULTS)) {
                    getFellowIfAny(StringConstants.DISTRIBUTION_RESULTS).detach();
                }
                Map params = new HashMap();
                params.put(StringConstants.TITLE, "Checklist species");
                params.put(StringConstants.SIZE, String.valueOf(finallist.length - 1));
                params.put(StringConstants.TABLE, finallist);
                Window window = (Window) Executions.createComponents("WEB-INF/zul/results/AnalysisDistributionResults.zul", this, params);
                try {
                    window.setParent(this);
                    window.doModal();
                } catch (Exception e) {
                    LOGGER.error("error opening checklist species dialog", e);
                }
            }
        }
    } catch (Exception e) {
        LOGGER.error("error opening distribution area dialog", e);
    }
}
Also used : Window(org.zkoss.zul.Window) CSVReader(au.com.bytecode.opencsv.CSVReader) HasMapLayer(au.org.emii.portal.menu.HasMapLayer) MapLayer(au.org.emii.portal.menu.MapLayer) RemoteMap(au.org.emii.portal.util.RemoteMap) ParseException(org.json.simple.parser.ParseException)

Aggregations

CSVReader (au.com.bytecode.opencsv.CSVReader)32 StringReader (java.io.StringReader)12 IOException (java.io.IOException)8 ArrayList (java.util.ArrayList)6 HttpClient (org.apache.commons.httpclient.HttpClient)5 GetMethod (org.apache.commons.httpclient.methods.GetMethod)5 InputStreamReader (java.io.InputStreamReader)4 HashMap (java.util.HashMap)4 JSONObject (org.json.simple.JSONObject)4 JSONParser (org.json.simple.parser.JSONParser)4 Query (au.org.ala.spatial.util.Query)3 FileReader (java.io.FileReader)3 ZipEntry (java.util.zip.ZipEntry)3 ZipInputStream (java.util.zip.ZipInputStream)3 JSONArray (org.json.simple.JSONArray)3 ParseException (org.json.simple.parser.ParseException)3 Event (org.zkoss.zk.ui.event.Event)3 Facet (au.org.ala.legend.Facet)2 LegendObject (au.org.ala.legend.LegendObject)2 SelectedArea (au.org.emii.portal.menu.SelectedArea)2