use of org.locationtech.geogig.repository.Repository in project GeoGig by boundlessgeo.
the class GeoGigDataStoreFactory method createNewDataStore.
/**
* @see org.geotools.data.DataStoreFactorySpi#createNewDataStore(java.util.Map)
*/
@Override
public GeoGigDataStore createNewDataStore(Map<String, Serializable> params) throws IOException {
String defaultNamespace = (String) DEFAULT_NAMESPACE.lookUp(params);
File repositoryRoot = new File((String) REPOSITORY.lookUp(params));
if (!repositoryRoot.isDirectory()) {
if (repositoryRoot.exists()) {
throw new IOException(repositoryRoot.getAbsolutePath() + " is not a directory");
}
repositoryRoot.mkdirs();
}
GeoGIG geogig = new GeoGIG(repositoryRoot);
try {
Repository repository = geogig.getOrCreateRepository();
Preconditions.checkState(repository != null);
} catch (RuntimeException e) {
throw new IOException(e);
}
GeoGigDataStore store = new GeoGigDataStore(geogig);
if (defaultNamespace != null) {
store.setNamespaceURI(defaultNamespace);
}
return store;
}
use of org.locationtech.geogig.repository.Repository in project GeoGig by boundlessgeo.
the class GeoGigDataStoreFactory method createDataStore.
@Override
public GeoGigDataStore createDataStore(Map<String, Serializable> params) throws IOException {
@Nullable final String lookUpClass = (String) RESOLVER_CLASS_NAME.lookUp(params);
RepositoryLookup resolver = resolver(lookUpClass);
final String repositoryLocation = (String) REPOSITORY.lookUp(params);
@Nullable final String defaultNamespace = (String) DEFAULT_NAMESPACE.lookUp(params);
@Nullable final String branch = (String) BRANCH.lookUp(params);
@Nullable final String head = (String) HEAD.lookUp(params);
@Nullable final String effectiveHead = (head == null) ? branch : head;
@Nullable final Boolean create = (Boolean) CREATE.lookUp(params);
final File repositoryDirectory = resolver.resolve(repositoryLocation);
if (create != null && create.booleanValue()) {
if (!repositoryDirectory.exists()) {
return createNewDataStore(params);
}
}
GeoGIG geogig;
try {
geogig = new GeoGIG(repositoryDirectory);
} catch (RuntimeException e) {
throw new IOException(e.getMessage(), e);
}
Repository repository = geogig.getRepository();
if (null == repository) {
if (create != null && create.booleanValue()) {
return createNewDataStore(params);
}
throw new IOException(String.format("Directory is not a geogig repository: '%s'", repositoryDirectory.getAbsolutePath()));
}
GeoGigDataStore store = new GeoGigDataStore(geogig);
if (defaultNamespace != null) {
store.setNamespaceURI(defaultNamespace);
}
if (effectiveHead != null) {
store.setHead(effectiveHead);
}
return store;
}
use of org.locationtech.geogig.repository.Repository in project GeoGig by boundlessgeo.
the class Clean method runInternal.
@Override
public void runInternal(GeogigCLI cli) throws IOException {
final ConsoleReader console = cli.getConsole();
final GeoGIG geogig = cli.getGeogig();
String pathFilter = null;
if (!path.isEmpty()) {
pathFilter = path.get(0);
}
if (dryRun) {
if (pathFilter != null) {
// check that is a valid path
Repository repository = cli.getGeogig().getRepository();
NodeRef.checkValidPath(pathFilter);
Optional<NodeRef> ref = repository.command(FindTreeChild.class).setIndex(true).setParent(repository.workingTree().getTree()).setChildPath(pathFilter).call();
checkParameter(ref.isPresent(), "pathspec '%s' did not match any tree", pathFilter);
checkParameter(ref.get().getType() == TYPE.TREE, "pathspec '%s' did not resolve to a tree", pathFilter);
}
Iterator<DiffEntry> unstaged = geogig.command(DiffWorkTree.class).setFilter(pathFilter).call();
while (unstaged.hasNext()) {
DiffEntry entry = unstaged.next();
if (entry.changeType() == ChangeType.ADDED) {
console.println("Would remove " + entry.newPath());
}
}
} else {
geogig.command(CleanOp.class).setPath(pathFilter).call();
console.println("Clean operation completed succesfully.");
}
}
use of org.locationtech.geogig.repository.Repository in project GeoGig by boundlessgeo.
the class Remove method runInternal.
@Override
public void runInternal(GeogigCLI cli) throws IOException {
ConsoleReader console = cli.getConsole();
// check that there is something to remove
if (pathsToRemove.isEmpty()) {
printUsage(cli);
throw new CommandFailedException();
}
/*
* Separate trees and features, and check that, if there are trees to remove, the -r
* modifier is used
*/
ArrayList<String> trees = new ArrayList<String>();
Repository repository = cli.getGeogig().getRepository();
for (String pathToRemove : pathsToRemove) {
NodeRef.checkValidPath(pathToRemove);
Optional<NodeRef> node = repository.command(FindTreeChild.class).setParent(repository.workingTree().getTree()).setIndex(true).setChildPath(pathToRemove).call();
checkParameter(node.isPresent(), "pathspec '%s' did not match any feature or tree", pathToRemove);
NodeRef nodeRef = node.get();
if (nodeRef.getType() == TYPE.TREE) {
checkParameter(recursive, "Cannot remove tree %s if -r is not specified", nodeRef.path());
trees.add(pathToRemove);
}
}
int featuresCount = pathsToRemove.size() - trees.size();
/* Perform the remove operation */
RemoveOp op = cli.getGeogig().command(RemoveOp.class);
for (String pathToRemove : pathsToRemove) {
op.addPathToRemove(pathToRemove);
}
op.setProgressListener(cli.getProgressListener()).call();
/* And inform about it */
if (featuresCount > 0) {
console.print(String.format("Deleted %d feature(s)", featuresCount));
}
for (String tree : trees) {
console.print(String.format("Deleted %s tree", tree));
}
}
use of org.locationtech.geogig.repository.Repository in project GeoGig by boundlessgeo.
the class HttpMappedRemoteRepo method pushSparseCommit.
/**
* Pushes a sparse commit to a remote repository and updates all mappings.
*
* @param commitId the commit to push
*/
@Override
protected void pushSparseCommit(ObjectId commitId) {
Repository from = localRepository;
Optional<RevObject> object = from.command(RevObjectParse.class).setObjectId(commitId).call();
if (object.isPresent() && object.get().getType().equals(TYPE.COMMIT)) {
RevCommit commit = (RevCommit) object.get();
ObjectId parent = ObjectId.NULL;
List<ObjectId> newParents = new LinkedList<ObjectId>();
for (int i = 0; i < commit.getParentIds().size(); i++) {
ObjectId parentId = commit.getParentIds().get(i);
if (i != 0) {
Optional<ObjectId> commonAncestor = from.command(FindCommonAncestor.class).setLeftId(commit.getParentIds().get(0)).setRightId(parentId).call();
if (commonAncestor.isPresent()) {
if (from.command(CheckSparsePath.class).setStart(parentId).setEnd(commonAncestor.get()).call()) {
// This should be the base commit to preserve changes that were filtered
// out.
newParents.add(0, from.graphDatabase().getMapping(parentId));
continue;
}
}
}
newParents.add(from.graphDatabase().getMapping(parentId));
}
if (newParents.size() > 0) {
parent = from.graphDatabase().getMapping(newParents.get(0));
}
Iterator<DiffEntry> diffIter = from.command(DiffOp.class).setNewVersion(commitId).setOldVersion(parent).setReportTrees(true).call();
// connect and send packed changes
final URL resourceURL;
try {
resourceURL = new URL(repositoryURL.toString() + "/repo/applychanges");
} catch (MalformedURLException e) {
throw Throwables.propagate(e);
}
final HttpURLConnection connection;
final OutputStream out;
try {
connection = (HttpURLConnection) resourceURL.openConnection();
connection.setDoOutput(true);
connection.setDoInput(true);
out = connection.getOutputStream();
// pack the commit object
final ObjectSerializingFactory factory = DataStreamSerializationFactoryV1.INSTANCE;
final ObjectWriter<RevCommit> commitWriter = factory.createObjectWriter(TYPE.COMMIT);
commitWriter.write(commit, out);
// write the new parents
out.write(newParents.size());
for (ObjectId parentId : newParents) {
out.write(parentId.getRawValue());
}
// pack the changes
BinaryPackedChanges changes = new BinaryPackedChanges(from);
changes.write(out, diffIter);
} catch (IOException e) {
throw Throwables.propagate(e);
}
final InputStream in;
try {
in = connection.getInputStream();
BufferedReader rd = new BufferedReader(new InputStreamReader(in));
String line = rd.readLine();
if (line != null) {
ObjectId remoteCommitId = ObjectId.valueOf(line);
from.graphDatabase().map(commit.getId(), remoteCommitId);
from.graphDatabase().map(remoteCommitId, commit.getId());
}
} catch (IOException e) {
throw Throwables.propagate(e);
}
}
}
Aggregations