use of org.geotoolkit.storage.feature.FeatureStoreRuntimeException in project geotoolkit by Geomatys.
the class JDBCFeatureWriterUpdate method write.
@Override
public void write() throws FeatureStoreRuntimeException {
if (last == null) {
throw new FeatureStoreRuntimeException("Cursor is not on a record.");
}
try {
// figure out what the fid is
final PrimaryKey key = store.getDatabaseModel().getPrimaryKey(type.getName().toString());
final String fid = key.encodeFID(rs);
final FilterFactory ff = store.getFilterFactory();
final ResourceId filter = ff.resourceId(fid);
// figure out which attributes changed
final Map<String, Object> changes = new HashMap<>();
for (final PropertyType att : type.getProperties(true)) {
if (att instanceof FeatureAssociationRole || att instanceof Operation || AttributeConvention.contains(att.getName())) {
// not a writable property
continue;
}
changes.put(att.getName().tip().toString(), last.getPropertyValue(att.getName().toString()));
}
// do the write
store.updateSingle(type, changes, filter, st.getConnection());
} catch (Exception e) {
throw new FeatureStoreRuntimeException(e);
}
}
use of org.geotoolkit.storage.feature.FeatureStoreRuntimeException in project geotoolkit by Geomatys.
the class FeatureSetToReferenceConverter method convert.
/**
* {@inheritDoc}
*/
@Override
public Reference convert(final FeatureSet source, final Map<String, Object> params) throws UnconvertibleObjectException {
if (params.get(TMP_DIR_PATH) == null) {
throw new UnconvertibleObjectException("The output directory should be defined.");
}
if (params.get(TMP_DIR_URL) == null) {
throw new UnconvertibleObjectException("The output directory URL should be defined.");
}
if (source == null) {
throw new UnconvertibleObjectException("The output data should be defined.");
}
// TODO : useless test, null test above is all we need, fix this and other converters
if (!(source instanceof FeatureSet)) {
throw new UnconvertibleObjectException("The requested output data is not an instance of FeatureSet.");
}
Reference reference = new Reference();
reference.setMimeType((String) params.get(MIME));
reference.setEncoding((String) params.get(ENCODING));
final FeatureType ft;
try {
ft = source.getType();
} catch (DataStoreException ex) {
throw new UnconvertibleObjectException("Can't write acess FeatureSet type.", ex);
}
final String namespace = NamesExt.getNamespace(ft.getName());
final Map<String, String> schemaLocation = new HashMap<>();
final String randomFileName = UUID.randomUUID().toString();
if (WPSMimeType.APP_GEOJSON.val().equalsIgnoreCase(reference.getMimeType())) {
// create file
final Path dataFile = buildPath(params, randomFileName + ".json");
try (OutputStream fos = Files.newOutputStream(dataFile, CREATE, TRUNCATE_EXISTING, WRITE);
GeoJSONStreamWriter writer = new GeoJSONStreamWriter(fos, ft, WPSConvertersUtils.FRACTION_DIGITS);
Stream<Feature> st = source.features(false)) {
Iterator<Feature> iterator = st.iterator();
while (iterator.hasNext()) {
Feature next = iterator.next();
Feature neww = writer.next();
FeatureExt.copy(next, neww, false);
writer.write();
}
} catch (DataStoreException e) {
throw new UnconvertibleObjectException("Can't write Feature into GeoJSON output stream.", e);
} catch (IOException e) {
throw new UnconvertibleObjectException(e);
}
final String relLoc = getRelativeLocation(dataFile, params);
reference.setHref(params.get(TMP_DIR_URL) + "/" + relLoc);
reference.setSchema(null);
} else if (WPSMimeType.APP_GML.val().equalsIgnoreCase(reference.getMimeType()) || WPSMimeType.TEXT_XML.val().equalsIgnoreCase(reference.getMimeType()) || WPSMimeType.TEXT_GML.val().equalsIgnoreCase(reference.getMimeType())) {
try {
reference.setSchema(WPSConvertersUtils.writeSchema(ft, params));
schemaLocation.put(namespace, reference.getSchema());
} catch (JAXBException ex) {
throw new UnconvertibleObjectException("Can't write FeatureType into xsd schema.", ex);
} catch (IOException ex) {
throw new UnconvertibleObjectException("Can't create xsd schema file.", ex);
}
// Write Feature
final JAXPStreamFeatureWriter featureWriter = new JAXPStreamFeatureWriter(schemaLocation);
// create file
final Path dataFile = buildPath(params, randomFileName + ".xml");
try (final OutputStream dataStream = Files.newOutputStream(dataFile);
final AutoCloseable xmlCloser = () -> featureWriter.dispose()) {
// Write feature in file
featureWriter.write(source, dataStream);
final String relLoc = getRelativeLocation(dataFile, params);
reference.setHref(params.get(TMP_DIR_URL) + "/" + relLoc);
} catch (XMLStreamException ex) {
throw new UnconvertibleObjectException("Stax exception while writing the feature collection", ex);
} catch (DataStoreException ex) {
throw new UnconvertibleObjectException("FeatureStore exception while writing the feature collection", ex);
} catch (FeatureStoreRuntimeException ex) {
throw new UnconvertibleObjectException("FeatureStoreRuntimeException exception while writing the feature collection", ex);
} catch (Exception ex) {
throw new UnconvertibleObjectException(ex);
}
} else {
throw new UnconvertibleObjectException("Unsupported mime-type for " + this.getClass().getName() + " : " + reference.getMimeType());
}
return reference;
}
use of org.geotoolkit.storage.feature.FeatureStoreRuntimeException in project geotoolkit by Geomatys.
the class GenericFeatureWriter method write.
/**
* {@inheritDoc }
*/
@Override
public void write() throws FeatureStoreRuntimeException {
if (currentFeature != null) {
final Filter filter = FeatureExt.getId(currentFeature);
if (remove) {
// it's a remove operation
try {
store.removeFeatures(typeName, filter);
} catch (DataStoreException ex) {
throw new FeatureStoreRuntimeException(ex);
}
} else {
// it's a modify operation
final Map<String, Object> values = new HashMap<>();
for (PropertyType desc : type.getProperties(true)) {
if (desc instanceof AttributeType) {
final String propName = desc.getName().toString();
final Object original = currentFeature.getPropertyValue(propName);
final Object mod = modified.getProperty(propName).getValue();
// check if the values was modified
if (!safeEqual(original, mod)) {
// value has changed
values.put(propName, mod);
}
}
}
if (!values.isEmpty()) {
try {
store.updateFeatures(typeName.toString(), filter, values);
} catch (DataStoreException ex) {
throw new FeatureStoreRuntimeException(ex);
}
}
}
} else {
if (modified != null) {
// it's an add operation
try {
final List<ResourceId> res = store.addFeatures(typeName.toString(), Collections.singleton(modified));
if (!res.isEmpty()) {
modified.setPropertyValue(AttributeConvention.IDENTIFIER, res.get(0).getIdentifier());
}
} catch (DataStoreException ex) {
throw new FeatureStoreRuntimeException(ex);
}
modified = null;
}
}
remove = false;
}
use of org.geotoolkit.storage.feature.FeatureStoreRuntimeException in project geotoolkit by Geomatys.
the class RenderingRoutines method getIterator.
public static GraphicIterator getIterator(final FeatureSet features, final RenderingContext2D renderingContext) throws DataStoreException {
final FeatureIterator iterator;
final Stream<Feature> stream = features.features(false);
final Iterator<Feature> i = stream.iterator();
iterator = new FeatureIterator() {
@Override
public Feature next() throws FeatureStoreRuntimeException {
return i.next();
}
@Override
public boolean hasNext() throws FeatureStoreRuntimeException {
return i.hasNext();
}
@Override
public void close() {
stream.close();
}
};
final ProjectedFeature projectedFeature = new ProjectedFeature(renderingContext);
return new GraphicIterator(iterator, projectedFeature);
}
use of org.geotoolkit.storage.feature.FeatureStoreRuntimeException in project geotoolkit by Geomatys.
the class IntervalStyleBuilder method genericAnalyze.
private void genericAnalyze() {
if (genericAnalyze)
return;
genericAnalyze = true;
properties.clear();
minimum = Double.POSITIVE_INFINITY;
maximum = Double.NEGATIVE_INFINITY;
count = 0;
sum = 0;
median = 0;
mean = 0;
// search the different numeric attributs
if (layer.getData() instanceof FeatureSet) {
throw new IllegalArgumentException("MapLayer resource must be a FeatureSet");
}
FeatureSet data = (FeatureSet) layer.getData();
final FeatureType schema;
try {
schema = data.getType();
} catch (DataStoreException ex) {
throw new FeatureStoreRuntimeException(ex.getMessage(), ex);
}
for (PropertyType desc : schema.getProperties(true)) {
if (desc instanceof AttributeType) {
Class<?> type = ((AttributeType) desc).getValueClass();
if (Number.class.isAssignableFrom(type) || type == byte.class || type == short.class || type == int.class || type == long.class || type == float.class || type == double.class) {
properties.add(ff.property(desc.getName().tip().toString()));
}
}
}
// find the geometry class for template
Class<?> geoClass = null;
try {
PropertyType geo = FeatureExt.getDefaultGeometry(schema);
geoClass = ((AttributeType) ((Operation) geo).getResult()).getValueClass();
} catch (PropertyNotFoundException | IllegalStateException ex) {
}
if (geoClass == null) {
return;
}
if (template == null) {
if (Polygon.class.isAssignableFrom(geoClass) || MultiPolygon.class.isAssignableFrom(geoClass)) {
template = createPolygonTemplate();
} else if (LineString.class.isAssignableFrom(geoClass) || MultiLineString.class.isAssignableFrom(geoClass)) {
template = createLineTemplate();
} else {
template = createPointTemplate();
}
}
// search the extreme values
final Query query = new Query(schema.getName().toString());
if (classification == null || layer == null)
return;
if (!properties.contains(classification))
return;
final Set<String> qp = new HashSet<String>();
qp.add(classification.getXPath());
if (normalize != null && !normalize.equals(noValue)) {
qp.add(normalize.getXPath());
}
query.setProperties(qp.toArray(new String[0]));
Iterator<Feature> features = null;
try (Stream<Feature> stream = data.subset(query).features(false)) {
features = stream.iterator();
List<Double> values = new ArrayList<Double>();
while (features.hasNext()) {
Feature sf = features.next();
count++;
Number classifValue = (Number) classification.apply(sf);
double value;
if (classifValue == null) {
// skip null values in analyze
continue;
}
if (normalize == null || normalize.equals(noValue)) {
value = classifValue.doubleValue();
} else {
Number normalizeValue = (Number) normalize.apply(sf);
value = classifValue.doubleValue() / normalizeValue.doubleValue();
}
values.add(value);
sum += value;
if (value < minimum) {
minimum = value;
}
if (value > maximum) {
maximum = value;
}
}
mean = (minimum + maximum) / 2;
// find the median
allValues = values.toArray(new Double[values.size()]);
Arrays.sort(allValues);
if (values.isEmpty()) {
median = 0;
} else if (values.size() % 2 == 0) {
median = (allValues[(allValues.length / 2) - 1] + allValues[allValues.length / 2]) / 2.0;
} else {
median = allValues[allValues.length / 2];
}
} catch (DataStoreException ex) {
ex.printStackTrace();
}
}
Aggregations