use of com.google.common.base.Optional in project GeoGig by boundlessgeo.
the class OSMHookTest method testOSMHook.
@Test
public void testOSMHook() throws Exception {
// set the hook that will trigger an unmapping when something is imported to the busstops
// tree
CharSequence commitPreHookCode = "var diffs = geogig.getFeaturesToCommit(\"busstops\", false);\n" + "if (diffs.length > 0){\n" + "\tvar params = {\"path\" : \"busstops\"};\n" + "\tgeogig.run(\"org.locationtech.geogig.osm.internal.OSMUnmapOp\", params)\n}";
File hooksFolder = new File(geogig.getPlatform().pwd(), ".geogig/hooks");
File commitPreHookFile = new File(hooksFolder, "pre_commit.js");
Files.write(commitPreHookCode, commitPreHookFile, Charsets.UTF_8);
// Import
String filename = OSMImportOp.class.getResource("nodes.xml").getFile();
File file = new File(filename);
geogig.command(OSMImportOp.class).setDataSource(file.getAbsolutePath()).call();
// Map
Map<String, AttributeDefinition> fields = Maps.newHashMap();
Map<String, List<String>> mappings = Maps.newHashMap();
mappings.put("highway", Lists.newArrayList("bus_stop"));
fields.put("geom", new AttributeDefinition("geom", FieldType.POINT));
fields.put("name", new AttributeDefinition("name", FieldType.STRING));
MappingRule mappingRule = new MappingRule("busstops", mappings, null, fields, null);
List<MappingRule> mappingRules = Lists.newArrayList();
mappingRules.add(mappingRule);
Mapping mapping = new Mapping(mappingRules);
geogig.command(AddOp.class).call();
geogig.command(CommitOp.class).setMessage("msg").call();
geogig.command(OSMMapOp.class).setMapping(mapping).call();
Optional<RevFeature> revFeature = geogig.command(RevObjectParse.class).setRefSpec("HEAD:busstops/507464799").call(RevFeature.class);
assertTrue(revFeature.isPresent());
Optional<RevFeatureType> featureType = geogig.command(ResolveFeatureType.class).setRefSpec("HEAD:busstops/507464799").call();
assertTrue(featureType.isPresent());
ImmutableList<Optional<Object>> values = revFeature.get().getValues();
assertEquals(3, values.size());
String wkt = "POINT (7.1959361 50.739397)";
assertEquals(wkt, values.get(2).get().toString());
assertEquals(507464799l, values.get(0).get());
// Modify a node
GeometryFactory gf = new GeometryFactory();
SimpleFeatureBuilder fb = new SimpleFeatureBuilder((SimpleFeatureType) featureType.get().type());
fb.set("geom", gf.createPoint(new Coordinate(0, 1)));
fb.set("name", "newname");
fb.set("id", 507464799l);
SimpleFeature newFeature = fb.buildFeature("507464799");
geogig.getRepository().workingTree().insert("busstops", newFeature);
geogig.command(AddOp.class).call();
// this should trigger the hook
geogig.command(CommitOp.class).setMessage("msg").call();
// check that the unmapping has been triggered and the unmapped node has the changes we
// introduced
Optional<RevFeature> unmapped = geogig.command(RevObjectParse.class).setRefSpec("WORK_HEAD:node/507464799").call(RevFeature.class);
assertTrue(unmapped.isPresent());
values = unmapped.get().getValues();
assertEquals("POINT (0 1)", values.get(6).get().toString());
assertEquals("bus:yes|public_transport:platform|highway:bus_stop|VRS:ortsteil:Hoholz|name:newname|VRS:ref:68566|VRS:gemeinde:BONN", values.get(3).get().toString());
// check that unchanged nodes keep their attributes
Optional<RevFeature> unchanged = geogig.command(RevObjectParse.class).setRefSpec("WORK_HEAD:node/1633594723").call(RevFeature.class);
values = unchanged.get().getValues();
assertEquals("14220478", values.get(4).get().toString());
assertEquals("1355097351000", values.get(2).get().toString());
assertEquals("2", values.get(1).get().toString());
}
use of com.google.common.base.Optional in project GeoGig by boundlessgeo.
the class OSMImportOpTest method testImportWithMapping.
@Test
public void testImportWithMapping() throws Exception {
String filename = getClass().getResource("ways.xml").getFile();
File file = new File(filename);
// Define a mapping
Map<String, AttributeDefinition> fields = Maps.newHashMap();
Map<String, List<String>> mappings = Maps.newHashMap();
mappings.put("oneway", Lists.newArrayList("yes"));
fields.put("geom", new AttributeDefinition("geom", FieldType.LINESTRING));
fields.put("lit", new AttributeDefinition("lit", FieldType.STRING));
Map<String, List<String>> filterExclude = Maps.newHashMap();
MappingRule mappingRule = new MappingRule("onewaystreets", mappings, filterExclude, fields, null);
List<MappingRule> mappingRules = Lists.newArrayList();
mappingRules.add(mappingRule);
Mapping mapping = new Mapping(mappingRules);
// import with mapping and check import went ok
geogig.command(OSMImportOp.class).setDataSource(file.getAbsolutePath()).setMapping(mapping).call();
Optional<RevTree> tree = geogig.command(RevObjectParse.class).setRefSpec("HEAD:node").call(RevTree.class);
assertTrue(tree.isPresent());
assertTrue(tree.get().size() > 0);
tree = geogig.command(RevObjectParse.class).setRefSpec("HEAD:way").call(RevTree.class);
assertTrue(tree.isPresent());
assertTrue(tree.get().size() > 0);
// check that the tree with the mapping exist and is not empty
tree = geogig.command(RevObjectParse.class).setRefSpec("HEAD:onewaystreets").call(RevTree.class);
assertTrue(tree.isPresent());
assertTrue(tree.get().size() > 0);
// check that the mapping was correctly performed
Optional<Node> feature = geogig.getRepository().workingTree().findUnstaged("onewaystreets/31045880");
assertTrue(feature.isPresent());
Optional<RevFeature> revFeature = geogig.command(RevObjectParse.class).setObjectId(feature.get().getObjectId()).call(RevFeature.class);
assertTrue(revFeature.isPresent());
ImmutableList<Optional<Object>> values = revFeature.get().getValues();
String wkt = "LINESTRING (7.1923367 50.7395887, 7.1923127 50.7396946, 7.1923444 50.7397419, 7.1924199 50.7397781)";
assertEquals(wkt, values.get(2).get().toString());
assertEquals("31045880", values.get(0).get().toString());
assertEquals("yes", values.get(1).get());
}
use of com.google.common.base.Optional in project GeoGig by boundlessgeo.
the class ShpExport method runInternal.
/**
* Executes the export command using the provided options.
*/
@Override
protected void runInternal(GeogigCLI cli) throws IOException {
if (args.isEmpty()) {
printUsage(cli);
throw new CommandFailedException();
}
String path = args.get(0);
String shapefile = args.get(1);
ShapefileDataStoreFactory dataStoreFactory = new ShapefileDataStoreFactory();
File targetShapefile = new File(shapefile);
if (!targetShapefile.isAbsolute()) {
File pwd = cli.getGeogig().getPlatform().pwd();
String relativePath = targetShapefile.getPath();
targetShapefile = new File(pwd, relativePath);
}
if (targetShapefile.exists() && !overwrite) {
throw new CommandFailedException("The selected shapefile already exists. Use -o to overwrite");
}
Map<String, Serializable> params = new HashMap<String, Serializable>();
URL targetShapefileAsUrl = targetShapefile.toURI().toURL();
params.put(ShapefileDataStoreFactory.URLP.key, targetShapefileAsUrl);
params.put(ShapefileDataStoreFactory.CREATE_SPATIAL_INDEX.key, Boolean.FALSE);
params.put(ShapefileDataStoreFactory.ENABLE_SPATIAL_INDEX.key, Boolean.FALSE);
ShapefileDataStore dataStore = (ShapefileDataStore) dataStoreFactory.createNewDataStore(params);
SimpleFeatureType outputFeatureType;
ObjectId featureTypeId;
if (sFeatureTypeId != null) {
// Check the feature type id string is a correct id
Optional<ObjectId> id = cli.getGeogig().command(RevParse.class).setRefSpec(sFeatureTypeId).call();
checkParameter(id.isPresent(), "Invalid feature type reference", sFeatureTypeId);
TYPE type = cli.getGeogig().command(ResolveObjectType.class).setObjectId(id.get()).call();
checkParameter(type.equals(TYPE.FEATURETYPE), "Provided reference does not resolve to a feature type: ", sFeatureTypeId);
outputFeatureType = (SimpleFeatureType) cli.getGeogig().command(RevObjectParse.class).setObjectId(id.get()).call(RevFeatureType.class).get().type();
featureTypeId = id.get();
} else {
try {
outputFeatureType = getFeatureType(path, cli);
featureTypeId = null;
} catch (GeoToolsOpException e) {
cli.getConsole().println("No features to export.");
return;
}
}
dataStore.createSchema(outputFeatureType);
final String typeName = dataStore.getTypeNames()[0];
final SimpleFeatureSource featureSource = dataStore.getFeatureSource(typeName);
if (!(featureSource instanceof SimpleFeatureStore)) {
throw new CommandFailedException("Could not create feature store.");
}
Function<Feature, Optional<Feature>> function = getTransformingFunction(dataStore.getSchema());
final SimpleFeatureStore featureStore = (SimpleFeatureStore) featureSource;
ExportOp op = cli.getGeogig().command(ExportOp.class).setFeatureStore(featureStore).setPath(path).setFilterFeatureTypeId(featureTypeId).setAlter(alter).setFeatureTypeConversionFunction(function);
// shapefile transactions are memory bound, so avoid them
op.setTransactional(false);
if (defaultType) {
op.exportDefaultFeatureType();
}
try {
op.setProgressListener(cli.getProgressListener()).call();
} catch (IllegalArgumentException iae) {
throw new org.locationtech.geogig.cli.InvalidParameterException(iae.getMessage(), iae);
} catch (GeoToolsOpException e) {
targetShapefile.delete();
switch(e.statusCode) {
case MIXED_FEATURE_TYPES:
throw new CommandFailedException("Error: The selected tree contains mixed feature types. Use --defaulttype or --featuretype <feature_type_ref> to export.", e);
default:
throw new CommandFailedException("Could not export. Error:" + e.statusCode.name(), e);
}
}
cli.getConsole().println(path + " exported successfully to " + shapefile);
}
use of com.google.common.base.Optional in project GeoGig by boundlessgeo.
the class EndTransaction method run.
/**
* Runs the command and builds the appropriate response.
*
* @param context - the context to use for this command
*
* @throws CommandSpecException
*/
@Override
public void run(CommandContext context) {
if (this.getTransactionId() == null) {
throw new CommandSpecException("There isn't a transaction to end.");
}
final Context transaction = this.getCommandLocator(context);
TransactionEnd endTransaction = context.getGeoGIG().command(TransactionEnd.class);
try {
final boolean closed = endTransaction.setCancel(cancel).setTransaction((GeogigTransaction) transaction).call();
context.setResponseContent(new CommandResponse() {
@Override
public void write(ResponseWriter out) throws Exception {
out.start();
if (closed) {
out.writeTransactionId(null);
} else {
out.writeTransactionId(getTransactionId());
}
out.finish();
}
});
} catch (MergeConflictsException m) {
final RevCommit ours = context.getGeoGIG().getRepository().getCommit(m.getOurs());
final RevCommit theirs = context.getGeoGIG().getRepository().getCommit(m.getTheirs());
final Optional<ObjectId> ancestor = transaction.command(FindCommonAncestor.class).setLeft(ours).setRight(theirs).call();
context.setResponseContent(new CommandResponse() {
final MergeScenarioReport report = transaction.command(ReportMergeScenarioOp.class).setMergeIntoCommit(ours).setToMergeCommit(theirs).call();
@Override
public void write(ResponseWriter out) throws Exception {
out.start();
Optional<RevCommit> mergeCommit = Optional.absent();
out.writeMergeResponse(mergeCommit, report, transaction, ours.getId(), theirs.getId(), ancestor.get());
out.finish();
}
});
} catch (RebaseConflictsException r) {
// TODO: Handle rebase exception
}
}
use of com.google.common.base.Optional in project GeoGig by boundlessgeo.
the class Log method writeCSV.
private void writeCSV(GeoGIG geogig, Writer out, Iterator<RevCommit> log) throws Exception {
String response = "ChangeType,FeatureId,CommitId,Parent CommitIds,Author Name,Author Email,Author Commit Time,Committer Name,Committer Email,Committer Commit Time,Commit Message";
out.write(response);
response = "";
String path = paths.get(0);
// This is the feature type object
Optional<NodeRef> ref = geogig.command(FindTreeChild.class).setChildPath(path).setParent(geogig.getRepository().workingTree().getTree()).call();
Optional<RevObject> type = Optional.absent();
if (ref.isPresent()) {
type = geogig.command(RevObjectParse.class).setRefSpec(ref.get().getMetadataId().toString()).call();
} else {
throw new CommandSpecException("Couldn't resolve the given path.");
}
if (type.isPresent() && type.get() instanceof RevFeatureType) {
RevFeatureType featureType = (RevFeatureType) type.get();
Collection<PropertyDescriptor> attribs = featureType.type().getDescriptors();
int attributeLength = attribs.size();
for (PropertyDescriptor attrib : attribs) {
response += "," + escapeCsv(attrib.getName().toString());
}
response += '\n';
out.write(response);
response = "";
RevCommit commit = null;
while (log.hasNext()) {
commit = log.next();
String parentId = commit.getParentIds().size() >= 1 ? commit.getParentIds().get(0).toString() : ObjectId.NULL.toString();
Iterator<DiffEntry> diff = geogig.command(DiffOp.class).setOldVersion(parentId).setNewVersion(commit.getId().toString()).setFilter(path).call();
while (diff.hasNext()) {
DiffEntry entry = diff.next();
response += entry.changeType().toString() + ",";
String fid = "";
if (entry.newPath() != null) {
if (entry.oldPath() != null) {
fid = entry.oldPath() + " -> " + entry.newPath();
} else {
fid = entry.newPath();
}
} else if (entry.oldPath() != null) {
fid = entry.oldPath();
}
response += fid + ",";
response += commit.getId().toString() + ",";
response += parentId;
if (commit.getParentIds().size() > 1) {
for (int index = 1; index < commit.getParentIds().size(); index++) {
response += " " + commit.getParentIds().get(index).toString();
}
}
response += ",";
if (commit.getAuthor().getName().isPresent()) {
response += escapeCsv(commit.getAuthor().getName().get());
}
response += ",";
if (commit.getAuthor().getEmail().isPresent()) {
response += escapeCsv(commit.getAuthor().getEmail().get());
}
response += "," + new SimpleDateFormat("MM/dd/yyyy HH:mm:ss z").format(new Date(commit.getAuthor().getTimestamp())) + ",";
if (commit.getCommitter().getName().isPresent()) {
response += escapeCsv(commit.getCommitter().getName().get());
}
response += ",";
if (commit.getCommitter().getEmail().isPresent()) {
response += escapeCsv(commit.getCommitter().getEmail().get());
}
response += "," + new SimpleDateFormat("MM/dd/yyyy HH:mm:ss z").format(new Date(commit.getCommitter().getTimestamp())) + ",";
String message = escapeCsv(commit.getMessage());
response += message;
if (entry.newObjectId() == ObjectId.NULL) {
// Feature was removed so we need to fill out blank attribute values
for (int index = 0; index < attributeLength; index++) {
response += ",";
}
} else {
// Feature was added or modified so we need to write out the
// attribute
// values from the feature
Optional<RevObject> feature = geogig.command(RevObjectParse.class).setObjectId(entry.newObjectId()).call();
RevFeature revFeature = (RevFeature) feature.get();
List<Optional<Object>> values = revFeature.getValues();
for (int index = 0; index < values.size(); index++) {
Optional<Object> value = values.get(index);
PropertyDescriptor attrib = (PropertyDescriptor) attribs.toArray()[index];
String stringValue = "";
if (value.isPresent()) {
FieldType attributeType = FieldType.forBinding(attrib.getType().getBinding());
switch(attributeType) {
case DATE:
stringValue = new SimpleDateFormat("MM/dd/yyyy z").format((java.sql.Date) value.get());
break;
case DATETIME:
stringValue = new SimpleDateFormat("MM/dd/yyyy HH:mm:ss z").format((Date) value.get());
break;
case TIME:
stringValue = new SimpleDateFormat("HH:mm:ss z").format((Time) value.get());
break;
case TIMESTAMP:
stringValue = new SimpleDateFormat("MM/dd/yyyy HH:mm:ss z").format((Timestamp) value.get());
break;
default:
stringValue = escapeCsv(value.get().toString());
}
response += "," + stringValue;
} else {
response += ",";
}
}
}
response += '\n';
out.write(response);
response = "";
}
}
} else {
// Couldn't resolve FeatureType
throw new CommandSpecException("Couldn't resolve the given path to a feature type.");
}
}
Aggregations