use of org.locationtech.geogig.api.ProgressListener in project GeoGig by boundlessgeo.
the class FetchOp method _call.
/**
* Executes the fetch operation.
*
* @return {@code null}
* @see org.locationtech.geogig.api.AbstractGeoGigOp#call()
*/
@Override
protected TransferSummary _call() {
if (all) {
// Add all remotes to list.
ImmutableList<Remote> localRemotes = command(RemoteListOp.class).call();
for (Remote remote : localRemotes) {
if (!remotes.contains(remote)) {
remotes.add(remote);
}
}
} else if (remotes.size() == 0) {
// If no remotes are specified, default to the origin remote
addRemote("origin");
}
final ProgressListener progressListener = getProgressListener();
progressListener.started();
Optional<Integer> repoDepth = repository().getDepth();
if (repoDepth.isPresent()) {
if (fullDepth) {
depth = Optional.of(Integer.MAX_VALUE);
}
if (depth.isPresent()) {
if (depth.get() > repoDepth.get()) {
command(ConfigOp.class).setAction(ConfigAction.CONFIG_SET).setScope(ConfigScope.LOCAL).setName(Repository.DEPTH_CONFIG_KEY).setValue(depth.get().toString()).call();
repoDepth = depth;
}
}
} else if (depth.isPresent() || fullDepth) {
// Ignore depth, this is a full repository
depth = Optional.absent();
fullDepth = false;
}
TransferSummary result = new TransferSummary();
for (Remote remote : remotes) {
final ImmutableSet<Ref> remoteRemoteRefs = command(LsRemote.class).setRemote(Suppliers.ofInstance(Optional.of(remote))).retrieveTags(!remote.getMapped() && (!repoDepth.isPresent() || fullDepth)).call();
final ImmutableSet<Ref> localRemoteRefs = command(LsRemote.class).retrieveLocalRefs(true).setRemote(Suppliers.ofInstance(Optional.of(remote))).call();
// If we have specified a depth to pull, we may have more history to pull from existing
// refs.
List<ChangedRef> needUpdate = findOutdatedRefs(remote, remoteRemoteRefs, localRemoteRefs, depth);
if (prune) {
// Delete local refs that aren't in the remote
List<Ref> locals = new ArrayList<Ref>();
// provided a tag so it makes sense not to prune them anyway.
for (Ref remoteRef : remoteRemoteRefs) {
Optional<Ref> localRef = findLocal(remoteRef, localRemoteRefs);
if (localRef.isPresent()) {
locals.add(localRef.get());
}
}
for (Ref localRef : localRemoteRefs) {
if (!locals.contains(localRef)) {
// Delete the ref
ChangedRef changedRef = new ChangedRef(localRef, null, ChangeTypes.REMOVED_REF);
needUpdate.add(changedRef);
command(UpdateRef.class).setDelete(true).setName(localRef.getName()).call();
}
}
}
Optional<IRemoteRepo> remoteRepo = getRemoteRepo(remote, repository().deduplicationService());
Preconditions.checkState(remoteRepo.isPresent(), "Failed to connect to the remote.");
IRemoteRepo remoteRepoInstance = remoteRepo.get();
try {
remoteRepoInstance.open();
} catch (IOException e) {
Throwables.propagate(e);
}
try {
int refCount = 0;
for (ChangedRef ref : needUpdate) {
if (ref.getType() != ChangeTypes.REMOVED_REF) {
refCount++;
Optional<Integer> newFetchLimit = depth;
// fetch limit to the current repository depth.
if (!newFetchLimit.isPresent() && repoDepth.isPresent() && ref.getType() == ChangeTypes.ADDED_REF) {
newFetchLimit = repoDepth;
}
// Fetch updated data from this ref
Ref newRef = ref.getNewRef();
remoteRepoInstance.fetchNewData(newRef, newFetchLimit, progressListener);
if (repoDepth.isPresent() && !fullDepth) {
// Update the repository depth if it is deeper than before.
int newDepth;
try {
newDepth = repository().graphDatabase().getDepth(newRef.getObjectId());
} catch (IllegalStateException e) {
throw new RuntimeException(ref.toString(), e);
}
if (newDepth > repoDepth.get()) {
command(ConfigOp.class).setAction(ConfigAction.CONFIG_SET).setScope(ConfigScope.LOCAL).setName(Repository.DEPTH_CONFIG_KEY).setValue(Integer.toString(newDepth)).call();
repoDepth = Optional.of(newDepth);
}
}
// Update the ref
Ref updatedRef = updateLocalRef(newRef, remote, localRemoteRefs);
ref.setNewRef(updatedRef);
}
}
if (needUpdate.size() > 0) {
result.addAll(remote.getFetchURL(), needUpdate);
}
// Update HEAD ref
if (!remote.getMapped()) {
Ref remoteHead = remoteRepoInstance.headRef();
if (remoteHead != null) {
updateLocalRef(remoteHead, remote, localRemoteRefs);
}
}
} finally {
try {
remoteRepoInstance.close();
} catch (IOException e) {
Throwables.propagate(e);
}
}
}
if (fullDepth) {
// The full history was fetched, this is no longer a shallow clone
command(ConfigOp.class).setAction(ConfigAction.CONFIG_UNSET).setScope(ConfigScope.LOCAL).setName(Repository.DEPTH_CONFIG_KEY).call();
}
progressListener.complete();
return result;
}
use of org.locationtech.geogig.api.ProgressListener in project GeoGig by boundlessgeo.
the class ShpImport method runInternal.
/**
* Executes the import command using the provided options.
*/
@Override
protected void runInternal(GeogigCLI cli) throws IOException {
checkParameter(shapeFile != null && !shapeFile.isEmpty(), "No shapefile specified");
for (String shp : shapeFile) {
DataStore dataStore = null;
try {
dataStore = getDataStore(shp);
} catch (InvalidParameterException e) {
cli.getConsole().println("The shapefile '" + shp + "' could not be found, skipping...");
continue;
}
if (fidAttribute != null) {
AttributeDescriptor attrib = dataStore.getSchema(dataStore.getNames().get(0)).getDescriptor(fidAttribute);
if (attrib == null) {
throw new InvalidParameterException("The specified attribute does not exist in the selected shapefile");
}
}
try {
cli.getConsole().println("Importing from shapefile " + shp);
ProgressListener progressListener = cli.getProgressListener();
ImportOp command = cli.getGeogig().command(ImportOp.class).setAll(true).setTable(null).setAlter(alter).setOverwrite(!add).setDestinationPath(destTable).setDataStore(dataStore).setFidAttribute(fidAttribute).setAdaptToDefaultFeatureType(!forceFeatureType);
// force the import not to use paging due to a bug in the shapefile datastore
command.setUsePaging(false);
command.setProgressListener(progressListener).call();
cli.getConsole().println(shp + " imported successfully.");
} catch (GeoToolsOpException e) {
switch(e.statusCode) {
case NO_FEATURES_FOUND:
throw new CommandFailedException("No features were found in the shapefile.", e);
case UNABLE_TO_GET_NAMES:
throw new CommandFailedException("Unable to get feature types from the shapefile.", e);
case UNABLE_TO_GET_FEATURES:
throw new CommandFailedException("Unable to get features from the shapefile.", e);
case UNABLE_TO_INSERT:
throw new CommandFailedException("Unable to insert features into the working tree.", e);
case INCOMPATIBLE_FEATURE_TYPE:
throw new CommandFailedException("The feature type of the data to import does not match the feature type of the destination tree and cannot be imported\n" + "USe the --force-featuretype switch to import using the original featuretype and crete a mixed type tree", e);
default:
throw new CommandFailedException("Import failed with exception: " + e.statusCode.name(), e);
}
} finally {
dataStore.dispose();
cli.getConsole().flush();
}
}
}
use of org.locationtech.geogig.api.ProgressListener in project GeoGig by boundlessgeo.
the class ExportDiffOp method _call.
/**
* Executes the export operation using the parameters that have been specified.
*
* @return a FeatureCollection with the specified features
*/
@Override
protected SimpleFeatureStore _call() {
final SimpleFeatureStore targetStore = getTargetStore();
final String refspec = old ? oldRef : newRef;
final RevTree rootTree = resolveRootTree(refspec);
final NodeRef typeTreeRef = resolTypeTreeRef(refspec, path, rootTree);
final ObjectId defaultMetadataId = typeTreeRef.getMetadataId();
final ProgressListener progressListener = getProgressListener();
progressListener.started();
progressListener.setDescription("Exporting diffs for path '" + path + "'... ");
FeatureCollection<SimpleFeatureType, SimpleFeature> asFeatureCollection = new BaseFeatureCollection<SimpleFeatureType, SimpleFeature>() {
@Override
public FeatureIterator<SimpleFeature> features() {
Iterator<DiffEntry> diffs = command(DiffOp.class).setOldVersion(oldRef).setNewVersion(newRef).setFilter(path).call();
final Iterator<SimpleFeature> plainFeatures = getFeatures(diffs, old, stagingDatabase(), defaultMetadataId, progressListener);
Iterator<Optional<Feature>> transformed = Iterators.transform(plainFeatures, ExportDiffOp.this.function);
Iterator<SimpleFeature> filtered = Iterators.filter(Iterators.transform(transformed, new Function<Optional<Feature>, SimpleFeature>() {
@Override
public SimpleFeature apply(Optional<Feature> input) {
return (SimpleFeature) (input.isPresent() ? input.get() : null);
}
}), Predicates.notNull());
return new DelegateFeatureIterator<SimpleFeature>(filtered);
}
};
// add the feature collection to the feature store
final Transaction transaction;
if (transactional) {
transaction = new DefaultTransaction("create");
} else {
transaction = Transaction.AUTO_COMMIT;
}
try {
targetStore.setTransaction(transaction);
try {
targetStore.addFeatures(asFeatureCollection);
transaction.commit();
} catch (final Exception e) {
if (transactional) {
transaction.rollback();
}
Throwables.propagateIfInstanceOf(e, GeoToolsOpException.class);
throw new GeoToolsOpException(e, StatusCode.UNABLE_TO_ADD);
} finally {
transaction.close();
}
} catch (IOException e) {
throw new GeoToolsOpException(e, StatusCode.UNABLE_TO_ADD);
}
progressListener.complete();
return targetStore;
}
use of org.locationtech.geogig.api.ProgressListener in project GeoGig by boundlessgeo.
the class GeoJsonImport method runInternal.
@Override
protected void runInternal(GeogigCLI cli) throws InvalidParameterException, CommandFailedException, IOException {
checkParameter(geoJSONList != null && !geoJSONList.isEmpty(), "No GeoJSON specified");
checkParameter(geomName == null || !geomNameAuto, "Cannot use --geom-name and --geom-name-auto at the same time");
for (String geoJSON : geoJSONList) {
DataStore dataStore = null;
try {
dataStore = getDataStore(geoJSON);
} catch (InvalidParameterException e) {
cli.getConsole().println("The GeoJSON file '" + geoJSON + "' could not be found, skipping...");
continue;
}
if (fidAttribute != null) {
AttributeDescriptor attrib = dataStore.getSchema(dataStore.getNames().get(0)).getDescriptor(fidAttribute);
if (attrib == null) {
throw new InvalidParameterException("The specified attribute does not exist in the selected GeoJSON file");
}
}
if (geomNameAuto) {
String destPath = destTable;
if (destPath == null) {
destPath = dataStore.getSchema(dataStore.getNames().get(0)).getTypeName();
}
Optional<RevFeatureType> ft = cli.getGeogig().command(RevObjectParse.class).setRefSpec("WORK_HEAD:" + destPath).call(RevFeatureType.class);
// attribute
if (ft.isPresent()) {
GeometryDescriptor geomDescriptor = ft.get().type().getGeometryDescriptor();
if (geomDescriptor != null) {
geomName = geomDescriptor.getLocalName();
}
}
}
try {
cli.getConsole().println("Importing from GeoJSON " + geoJSON);
ProgressListener progressListener = cli.getProgressListener();
cli.getGeogig().command(ImportOp.class).setAll(true).setTable(null).setAlter(alter).setOverwrite(!add).setDestinationPath(destTable).setDataStore(dataStore).setFidAttribute(fidAttribute).setGeometryNameOverride(geomName).setAdaptToDefaultFeatureType(!forceFeatureType).setProgressListener(progressListener).call();
cli.getConsole().println(geoJSON + " imported successfully.");
} catch (GeoToolsOpException e) {
switch(e.statusCode) {
case NO_FEATURES_FOUND:
throw new CommandFailedException("No features were found in the GeoJSON file.", e);
case UNABLE_TO_GET_NAMES:
throw new CommandFailedException("Unable to get feature types from the GeoJSON file.", e);
case UNABLE_TO_GET_FEATURES:
throw new CommandFailedException("Unable to get features from the GeoJSON file.", e);
case UNABLE_TO_INSERT:
throw new CommandFailedException("Unable to insert features into the working tree.", e);
case INCOMPATIBLE_FEATURE_TYPE:
throw new CommandFailedException("The feature type of the data to import does not match the feature type of the destination tree and cannot be imported\n" + "USe the --force-featuretype switch to import using the original featuretype and crete a mixed type tree", e);
default:
throw new CommandFailedException("Import failed with exception: " + e.statusCode.name(), e);
}
}
}
}
use of org.locationtech.geogig.api.ProgressListener in project GeoGig by boundlessgeo.
the class OracleImport method runInternal.
/**
* Executes the import command using the provided options.
*
* @param cli
* @see org.locationtech.geogig.cli.AbstractOracleCommand#runInternal(org.locationtech.geogig.cli.GeogigCLI)
*/
@Override
protected void runInternal(GeogigCLI cli) throws IOException {
DataStore dataStore = getDataStore();
try {
cli.getConsole().println("Importing from database " + commonArgs.database);
ProgressListener progressListener = cli.getProgressListener();
cli.getGeogig().command(ImportOp.class).setAll(all).setTable(table).setAlter(alter).setDestinationPath(destTable).setOverwrite(!add).setDataStore(dataStore).setAdaptToDefaultFeatureType(!forceFeatureType).setProgressListener(progressListener).call();
cli.getConsole().println("Import successful.");
} catch (GeoToolsOpException e) {
switch(e.statusCode) {
case TABLE_NOT_DEFINED:
cli.getConsole().println("No tables specified for import. Specify --all or --table <table>.");
throw new CommandFailedException();
case ALL_AND_TABLE_DEFINED:
cli.getConsole().println("Specify --all or --table <table>, both cannot be set.");
throw new CommandFailedException();
case NO_FEATURES_FOUND:
cli.getConsole().println("No features were found in the database.");
break;
case TABLE_NOT_FOUND:
cli.getConsole().println("Could not find the specified table.");
throw new CommandFailedException();
case UNABLE_TO_GET_NAMES:
cli.getConsole().println("Unable to get feature types from the database.");
throw new CommandFailedException();
case UNABLE_TO_GET_FEATURES:
cli.getConsole().println("Unable to get features from the database.");
break;
case UNABLE_TO_INSERT:
cli.getConsole().println("Unable to insert features into the working tree.");
throw new CommandFailedException();
case ALTER_AND_ALL_DEFINED:
cli.getConsole().println("Alter cannot be used with --all option and more than one table.");
throw new CommandFailedException();
case INCOMPATIBLE_FEATURE_TYPE:
throw new CommandFailedException("The feature type of the data to import does not match the feature type of the destination tree and cannot be imported\n" + "USe the --force-featuretype switch to import using the original featuretype and crete a mixed type tree", e);
default:
cli.getConsole().println("Import failed with exception: " + e.statusCode.name());
throw new CommandFailedException();
}
} finally {
dataStore.dispose();
cli.getConsole().flush();
}
}
Aggregations