use of au.org.ala.legend.QueryField in project spatial-portal by AtlasOfLivingAustralia.
the class ScatterplotLayerLegendComposer method setupCBColour.
private void setupCBColour(Query q) {
for (int i = cbColour.getItemCount() - 1; i >= 0; i--) {
if (cbColour.getItemAtIndex(i) != ciColourUser) {
cbColour.removeItemAt(i);
}
}
if (q != null) {
List<QueryField> fields = query.getFacetFieldList();
Collections.sort(fields, new QueryField.QueryFieldComparator());
String lastGroup = null;
for (QueryField field : fields) {
if (field.getFieldType() == QueryField.FieldType.STRING && (q instanceof UserDataQuery || !(StringConstants.OCCURRENCE_YEAR.equalsIgnoreCase(field.getName()) || StringConstants.COORDINATE_UNCERTAINTY.equalsIgnoreCase(field.getName()) || StringConstants.MONTH.equalsIgnoreCase(field.getName())))) {
String newGroup = field.getGroup().getName();
if (!newGroup.equals(lastGroup)) {
Comboitem sep = new Comboitem(StringConstants.SEPERATOR);
sep.setLabel(StringUtils.center(newGroup, 19));
sep.setParent(cbColour);
sep.setDisabled(true);
lastGroup = newGroup;
}
Comboitem ci = new Comboitem(field.getDisplayName());
ci.setValue(field.getName());
ci.setParent(cbColour);
}
}
}
}
use of au.org.ala.legend.QueryField in project spatial-portal by AtlasOfLivingAustralia.
the class LayerLegendGeneralComposer method setupCBColour.
private void setupCBColour(MapLayer m) {
for (int i = cbColour.getItemCount() - 1; i >= 0; i--) {
if (cbColour.getItemAtIndex(i) != ciColourUser) {
cbColour.removeItemAt(i);
}
}
Query q = m.getSpeciesQuery();
if (q != null) {
List<QueryField> fields = q.getFacetFieldList();
for (int i = fields.size() - 1; i >= 0; i--) {
if (fields.get(i) == null) {
fields.remove(i);
}
}
Collections.sort(fields, new QueryField.QueryFieldComparator());
String lastGroup = null;
for (QueryField field : fields) {
String newGroup = field.getGroup().getName();
if (!newGroup.equals(lastGroup)) {
Comboitem sep = new Comboitem(StringConstants.SEPERATOR);
sep.setLabel("---------------" + StringUtils.center(newGroup, 19) + "---------------");
sep.setParent(cbColour);
sep.setDisabled(true);
lastGroup = newGroup;
}
Comboitem ci = new Comboitem(field.getDisplayName());
ci.setValue(field.getName());
ci.setParent(cbColour);
}
}
}
use of au.org.ala.legend.QueryField in project spatial-portal by AtlasOfLivingAustralia.
the class CommonData method getDefaultUploadSamplingFields.
public static List<QueryField> getDefaultUploadSamplingFields() {
String[] fl = defaultFieldString.split(",");
List<QueryField> fields = new ArrayList<QueryField>();
for (int i = 0; i < fl.length; i++) {
fields.add(new QueryField(fl[i], getFacetLayerDisplayName(fl[i]), QueryField.FieldType.AUTO));
}
return fields;
}
use of au.org.ala.legend.QueryField in project spatial-portal by AtlasOfLivingAustralia.
the class CommonData method init18n.
private static void init18n() {
try {
Map<String, String[][]> tmpMap = new LinkedHashMap<String, String[][]>();
List<QueryField> tmpList = new ArrayList<QueryField>();
//get the JSON from the WS\
JSONParser jp = new JSONParser();
JSONArray values = (JSONArray) jp.parse(Util.readUrl(CommonData.getBiocacheServer() + FACET_SUFFIX));
LOGGER.debug(values);
Map<String, QueryField.FieldType> dataTypes = getDataTypes();
for (Object v : values) {
JSONObject value = (JSONObject) v;
//extract the group
String title = value.get(StringConstants.TITLE).toString();
//now get the facets themselves
List<Map<String, String>> facets = (List<Map<String, String>>) value.get("facets");
String[][] facetValues = new String[facets.size()][2];
int i = 0;
for (Map<String, String> facet : facets) {
String field = facet.get("field");
//Only add if it is not included in the ignore list
if (!CommonData.ignoredFacets.contains(field)) {
String i18n = i18nProperites.getProperty("facet." + field, field);
//TODO: update biocache i18n instead of doing this
if ("data_provider".equals(field)) {
i18n = "Data Provider";
}
//use current layer names for facets
try {
String layername = CommonData.getFacetLayerName(field);
if (i18n == null || layername != null) {
i18n = CommonData.getLayerDisplayName(layername);
}
} catch (Exception e) {
}
facetValues[i][0] = field;
facetValues[i][1] = i18n;
QueryField.FieldType ftype = dataTypes.containsKey(field) ? dataTypes.get(field) : QueryField.FieldType.STRING;
QueryField qf = new QueryField(field, i18n, QueryField.GroupType.getGroupType(title), ftype);
tmpList.add(qf);
i++;
}
}
tmpMap.put(title, facetValues);
}
//add a bunch of configured extra fields from the default values
for (String f : CommonData.customFacets) {
String i18n = i18nProperites.getProperty("facet." + f, f);
tmpList.add(new QueryField(f, i18n, QueryField.GroupType.CUSTOM, QueryField.FieldType.STRING));
}
facetQueryFieldList = tmpList;
LOGGER.debug("Grouped Facets: " + tmpMap);
LOGGER.debug("facet query list : " + facetQueryFieldList);
} catch (Exception e) {
LOGGER.error("failed to init i18n", e);
}
}
use of au.org.ala.legend.QueryField in project spatial-portal by AtlasOfLivingAustralia.
the class SamplingDownloadUtil method downloadSecond.
public static byte[] downloadSecond(MapComposer mc, Query downloadSecondQuery, String[] downloadSecondLayers, String[] downloadSecondLayersDN) {
LOGGER.debug("attempting to sample biocache records with analysis layers: " + downloadSecondQuery);
if (downloadSecondQuery != null) {
try {
List<QueryField> fields = new ArrayList<QueryField>();
fields.add(new QueryField(downloadSecondQuery.getRecordIdFieldName()));
fields.add(new QueryField(downloadSecondQuery.getRecordLongitudeFieldName()));
fields.add(new QueryField(downloadSecondQuery.getRecordLatitudeFieldName()));
String results = downloadSecondQuery.sample(fields);
if (results != null) {
CSVReader csvreader = new CSVReader(new StringReader(results));
List<String[]> csv = csvreader.readAll();
csvreader.close();
int longitudeColumn = Util.findInArray(downloadSecondQuery.getRecordLongitudeFieldDisplayName(), csv.get(0));
int latitudeColumn = Util.findInArray(downloadSecondQuery.getRecordLatitudeFieldDisplayName(), csv.get(0));
int idColumn = Util.findInArray(downloadSecondQuery.getRecordIdFieldDisplayName(), csv.get(0));
double[] points = new double[(csv.size() - 1) * 2];
String[] ids = new String[csv.size() - 1];
int pos = 0;
for (int i = 1; i < csv.size(); i++) {
try {
points[pos] = Double.parseDouble(csv.get(i)[longitudeColumn]);
points[pos + 1] = Double.parseDouble(csv.get(i)[latitudeColumn]);
} catch (Exception e) {
points[pos] = Double.NaN;
points[pos + 1] = Double.NaN;
}
ids[pos / 2] = csv.get(i)[idColumn];
pos += 2;
}
double[][] p = new double[points.length / 2][2];
for (int i = 0; i < points.length; i += 2) {
p[i / 2][0] = points[i];
p[i / 2][1] = points[i + 1];
}
List<String> layers = new ArrayList<String>();
StringBuilder sb = new StringBuilder();
sb.append("id,longitude,latitude");
for (int i = 0; i < downloadSecondLayers.length; i++) {
String layer = downloadSecondLayers[i];
sb.append(",");
String name = downloadSecondLayersDN[i];
sb.append(name);
layers.add(CommonData.getLayerFacetName(layer));
}
List<String[]> sample = Sampling.sampling(layers, p);
if (!sample.isEmpty()) {
for (int j = 0; j < sample.get(0).length; j++) {
sb.append("\n");
sb.append(ids[j]).append(",").append(p[j][0]).append(",").append(p[j][1]);
for (int i = 0; i < sample.size(); i++) {
sb.append(",").append(sample.get(i)[j]);
}
}
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ZipOutputStream zos = new ZipOutputStream(baos);
ZipEntry anEntry = new ZipEntry("analysis_output_intersect.csv");
zos.putNextEntry(anEntry);
zos.write(sb.toString().getBytes());
zos.close();
return baos.toByteArray();
}
} catch (Exception e) {
LOGGER.error("error downloading samping records", e);
}
}
return null;
}
Aggregations