use of org.locationtech.geogig.api.RevObject in project GeoGig by boundlessgeo.
the class SparseCloneTest method testSparseCloneOnlyFirstMatch.
@Test
public void testSparseCloneOnlyFirstMatch() throws Exception {
Map<String, String> filter = new HashMap<String, String>();
filter.put("default", "BBOX(pp,9, -80, 15, -70,'EPSG:4326')");
createFilterFile(filter);
// Commit several features to the remote
List<Feature> features = Arrays.asList(city1, city2, city3, road1, road2, road3);
LinkedList<RevCommit> expected = new LinkedList<RevCommit>();
Map<Feature, ObjectId> oids = new HashMap<Feature, ObjectId>();
for (Feature f : features) {
ObjectId oId = insertAndAdd(remoteGeogig.geogig, f);
oids.put(f, oId);
final RevCommit commit = remoteGeogig.geogig.command(CommitOp.class).setMessage(f.getIdentifier().toString()).call();
expected.addFirst(commit);
Optional<RevObject> childObject = remoteGeogig.geogig.command(RevObjectParse.class).setObjectId(oId).call();
assertTrue(childObject.isPresent());
}
// Make sure the remote has all of the commits
Iterator<RevCommit> logs = remoteGeogig.geogig.command(LogOp.class).call();
List<RevCommit> logged = new ArrayList<RevCommit>();
for (; logs.hasNext(); ) {
logged.add(logs.next());
}
assertEquals(expected, logged);
// Make sure the local repository has no commits prior to clone
logs = localGeogig.geogig.command(LogOp.class).call();
assertNotNull(logs);
assertFalse(logs.hasNext());
// clone from the remote
CloneOp clone = clone();
clone.setDepth(0);
clone.setRepositoryURL(remoteGeogig.envHome.getCanonicalPath()).setBranch("master").call();
// Because only the first feature matches (Cities.1), the first commit should be the same,
// there will also be the commit that adds the "Roads" tree but no features, and finally an
// "Empty Placeholder Commit".
// Make sure the local repository got the correct commits
logs = localGeogig.geogig.command(LogOp.class).call();
logged = new ArrayList<RevCommit>();
for (; logs.hasNext(); ) {
logged.add(logs.next());
}
assertEquals(3, logged.size());
assertEquals(AbstractMappedRemoteRepo.PLACEHOLDER_COMMIT_MESSAGE, logged.get(0).getMessage());
assertFalse(expected.get(0).getId().equals(logged.get(0).getId()));
assertEquals("Roads.1", logged.get(1).getMessage());
assertFalse(expected.get(2).getId().equals(logged.get(1).getId()));
assertEquals("Cities.1", logged.get(2).getMessage());
assertTrue(expected.get(5).getId().equals(logged.get(2).getId()));
assertExists(localGeogig, oids.get(city1));
assertNotExists(localGeogig, oids.get(city2), oids.get(city3), oids.get(road1), oids.get(road2), oids.get(road3));
}
use of org.locationtech.geogig.api.RevObject in project GeoGig by boundlessgeo.
the class SparseCloneTest method testFeatureMovingIntoAOI.
@Test
public void testFeatureMovingIntoAOI() throws Exception {
Map<String, String> filter = new HashMap<String, String>();
filter.put("Cities", "BBOX(pp,30, -125, 40, -110,'EPSG:4326')");
createFilterFile(filter);
// Commit several features to the remote
List<Feature> features = Arrays.asList(city2, city1, city3, city1_modified);
LinkedList<RevCommit> expected = new LinkedList<RevCommit>();
Map<Feature, ObjectId> oids = new HashMap<Feature, ObjectId>();
for (Feature f : features) {
ObjectId oId = insertAndAdd(remoteGeogig.geogig, f);
oids.put(f, oId);
final RevCommit commit = remoteGeogig.geogig.command(CommitOp.class).setMessage(f.getIdentifier().toString()).call();
expected.addFirst(commit);
Optional<RevObject> childObject = remoteGeogig.geogig.command(RevObjectParse.class).setObjectId(oId).call();
assertTrue(childObject.isPresent());
}
// Make sure the remote has all of the commits
Iterator<RevCommit> logs = remoteGeogig.geogig.command(LogOp.class).call();
List<RevCommit> logged = new ArrayList<RevCommit>();
for (; logs.hasNext(); ) {
logged.add(logs.next());
}
assertEquals(expected, logged);
// Make sure the local repository has no commits prior to clone
logs = localGeogig.geogig.command(LogOp.class).call();
assertNotNull(logs);
assertFalse(logs.hasNext());
// clone from the remote
CloneOp clone = clone();
clone.setDepth(0);
clone.setRepositoryURL(remoteGeogig.envHome.getCanonicalPath()).setBranch("master").call();
// Cities.1 initially lies outside the filter, so the commit that adds it will not be part
// of the sparse clone. Later the feature is moved into the AOI so it will be added at that
// time.
// Make sure the local repository got the correct commits
logs = localGeogig.geogig.command(LogOp.class).call();
logged = new ArrayList<RevCommit>();
for (; logs.hasNext(); ) {
logged.add(logs.next());
}
assertEquals(3, logged.size());
assertEquals("Cities.1", logged.get(0).getMessage());
assertFalse(expected.get(0).getId().equals(logged.get(0).getId()));
assertEquals("Cities.3", logged.get(1).getMessage());
assertFalse(expected.get(1).getId().equals(logged.get(1).getId()));
assertEquals("Cities.2", logged.get(2).getMessage());
assertTrue(expected.get(3).getId().equals(logged.get(2).getId()));
assertExists(localGeogig, oids.get(city2), oids.get(city3), oids.get(city1_modified));
assertNotExists(localGeogig, oids.get(city1));
}
use of org.locationtech.geogig.api.RevObject in project GeoGig by boundlessgeo.
the class SparseCloneTest method testSparseClone.
@Test
public void testSparseClone() throws Exception {
Map<String, String> filter = new HashMap<String, String>();
filter.put("default", "BBOX(pp,30, -125, 40, -110,'EPSG:4326')");
filter.put("Cities", "BBOX(pp,33, -125, 40, -110,'EPSG:4326')");
createFilterFile(filter);
// Commit several features to the remote
List<Feature> features = Arrays.asList(city1, city2, city3, road1, road2, road3);
LinkedList<RevCommit> expected = new LinkedList<RevCommit>();
Map<Feature, ObjectId> oids = new HashMap<Feature, ObjectId>();
for (Feature f : features) {
ObjectId oId = insertAndAdd(remoteGeogig.geogig, f);
oids.put(f, oId);
final RevCommit commit = remoteGeogig.geogig.command(CommitOp.class).setMessage(f.getIdentifier().toString()).call();
expected.addFirst(commit);
Optional<RevObject> childObject = remoteGeogig.geogig.command(RevObjectParse.class).setObjectId(oId).call();
assertTrue(childObject.isPresent());
}
// Make sure the remote has all of the commits
Iterator<RevCommit> logs = remoteGeogig.geogig.command(LogOp.class).call();
List<RevCommit> logged = new ArrayList<RevCommit>();
for (; logs.hasNext(); ) {
logged.add(logs.next());
}
assertEquals(expected, logged);
// Make sure the local repository has no commits prior to clone
logs = localGeogig.geogig.command(LogOp.class).call();
assertNotNull(logs);
assertFalse(logs.hasNext());
// clone from the remote
CloneOp clone = clone();
clone.setDepth(0);
clone.setRepositoryURL(remoteGeogig.envHome.getCanonicalPath()).setBranch("master").call();
// The features that match the filter are "Cities.3", "Roads.1", "Roads.2", and "Roads.3",
// the "Cities.1" commit should be present since it added the "Cities" tree, but "Cities.1"
// should not be present in the tree.
// Make sure the local repository got the correct commits
logs = localGeogig.geogig.command(LogOp.class).call();
logged = new ArrayList<RevCommit>();
for (; logs.hasNext(); ) {
logged.add(logs.next());
}
assertEquals(5, logged.size());
assertEquals("Roads.3", logged.get(0).getMessage());
assertFalse(expected.get(0).getId().equals(logged.get(0).getId()));
assertEquals("Roads.2", logged.get(1).getMessage());
assertFalse(expected.get(1).getId().equals(logged.get(1).getId()));
assertEquals("Roads.1", logged.get(2).getMessage());
assertFalse(expected.get(2).getId().equals(logged.get(2).getId()));
assertEquals("Cities.3", logged.get(3).getMessage());
assertFalse(expected.get(3).getId().equals(logged.get(3).getId()));
assertEquals("Cities.1", logged.get(4).getMessage());
assertFalse(expected.get(5).getId().equals(logged.get(4).getId()));
assertExists(localGeogig, oids.get(city3), oids.get(road1), oids.get(road2), oids.get(road3));
assertNotExists(localGeogig, oids.get(city1), oids.get(city2));
}
use of org.locationtech.geogig.api.RevObject in project GeoGig by boundlessgeo.
the class HttpMappedRemoteRepo method pushSparseCommit.
/**
* Pushes a sparse commit to a remote repository and updates all mappings.
*
* @param commitId the commit to push
*/
@Override
protected void pushSparseCommit(ObjectId commitId) {
Repository from = localRepository;
Optional<RevObject> object = from.command(RevObjectParse.class).setObjectId(commitId).call();
if (object.isPresent() && object.get().getType().equals(TYPE.COMMIT)) {
RevCommit commit = (RevCommit) object.get();
ObjectId parent = ObjectId.NULL;
List<ObjectId> newParents = new LinkedList<ObjectId>();
for (int i = 0; i < commit.getParentIds().size(); i++) {
ObjectId parentId = commit.getParentIds().get(i);
if (i != 0) {
Optional<ObjectId> commonAncestor = from.command(FindCommonAncestor.class).setLeftId(commit.getParentIds().get(0)).setRightId(parentId).call();
if (commonAncestor.isPresent()) {
if (from.command(CheckSparsePath.class).setStart(parentId).setEnd(commonAncestor.get()).call()) {
// This should be the base commit to preserve changes that were filtered
// out.
newParents.add(0, from.graphDatabase().getMapping(parentId));
continue;
}
}
}
newParents.add(from.graphDatabase().getMapping(parentId));
}
if (newParents.size() > 0) {
parent = from.graphDatabase().getMapping(newParents.get(0));
}
Iterator<DiffEntry> diffIter = from.command(DiffOp.class).setNewVersion(commitId).setOldVersion(parent).setReportTrees(true).call();
// connect and send packed changes
final URL resourceURL;
try {
resourceURL = new URL(repositoryURL.toString() + "/repo/applychanges");
} catch (MalformedURLException e) {
throw Throwables.propagate(e);
}
final HttpURLConnection connection;
final OutputStream out;
try {
connection = (HttpURLConnection) resourceURL.openConnection();
connection.setDoOutput(true);
connection.setDoInput(true);
out = connection.getOutputStream();
// pack the commit object
final ObjectSerializingFactory factory = DataStreamSerializationFactoryV1.INSTANCE;
final ObjectWriter<RevCommit> commitWriter = factory.createObjectWriter(TYPE.COMMIT);
commitWriter.write(commit, out);
// write the new parents
out.write(newParents.size());
for (ObjectId parentId : newParents) {
out.write(parentId.getRawValue());
}
// pack the changes
BinaryPackedChanges changes = new BinaryPackedChanges(from);
changes.write(out, diffIter);
} catch (IOException e) {
throw Throwables.propagate(e);
}
final InputStream in;
try {
in = connection.getInputStream();
BufferedReader rd = new BufferedReader(new InputStreamReader(in));
String line = rd.readLine();
if (line != null) {
ObjectId remoteCommitId = ObjectId.valueOf(line);
from.graphDatabase().map(commit.getId(), remoteCommitId);
from.graphDatabase().map(remoteCommitId, commit.getId());
}
} catch (IOException e) {
throw Throwables.propagate(e);
}
}
}
use of org.locationtech.geogig.api.RevObject in project GeoGig by boundlessgeo.
the class HttpRemoteRepo method fetchMoreData.
/**
* Retrieve objects from the remote repository, and update have/want lists accordingly.
* Specifically, any retrieved commits are removed from the want list and added to the have
* list, and any parents of those commits are removed from the have list (it only represents the
* most recent common commits.) Retrieved objects are added to the local repository, and the
* want/have lists are updated in-place.
*
* @param want a list of ObjectIds that need to be fetched
* @param have a list of ObjectIds that are in common with the remote repository
* @param progress
*/
private void fetchMoreData(final List<ObjectId> want, final Set<ObjectId> have, final ProgressListener progress) {
final JsonObject message = createFetchMessage(want, have);
final URL resourceURL;
try {
resourceURL = new URL(repositoryURL.toString() + "/repo/batchobjects");
} catch (MalformedURLException e) {
throw Throwables.propagate(e);
}
final HttpURLConnection connection;
try {
final Gson gson = new Gson();
OutputStream out;
final Writer writer;
connection = (HttpURLConnection) resourceURL.openConnection();
connection.setDoOutput(true);
connection.setDoInput(true);
connection.addRequestProperty("Accept-Encoding", "gzip");
out = connection.getOutputStream();
writer = new OutputStreamWriter(out);
gson.toJson(message, writer);
writer.flush();
out.flush();
} catch (IOException e) {
throw Throwables.propagate(e);
}
final HttpUtils.ReportingInputStream in = HttpUtils.getResponseStream(connection);
BinaryPackedObjects unpacker = new BinaryPackedObjects(localRepository.objectDatabase());
BinaryPackedObjects.Callback callback = new BinaryPackedObjects.Callback() {
@Override
public void callback(Supplier<RevObject> supplier) {
RevObject object = supplier.get();
progress.setProgress(progress.getProgress() + 1);
if (object instanceof RevCommit) {
RevCommit commit = (RevCommit) object;
want.remove(commit.getId());
have.removeAll(commit.getParentIds());
have.add(commit.getId());
} else if (object instanceof RevTag) {
RevTag tag = (RevTag) object;
want.remove(tag.getId());
have.remove(tag.getCommitId());
have.add(tag.getId());
}
}
};
Stopwatch sw = Stopwatch.createStarted();
IngestResults ingestResults = unpacker.ingest(in, callback);
sw.stop();
String msg = String.format("Processed %,d objects. Inserted: %,d. Existing: %,d. Time: %s. Compressed size: %,d bytes. Uncompressed size: %,d bytes.", ingestResults.total(), ingestResults.getInserted(), ingestResults.getExisting(), sw, in.compressedSize(), in.unCompressedSize());
LOGGER.info(msg);
progress.setDescription(msg);
}
Aggregations