use of org.locationtech.geogig.api.ProgressListener in project GeoGig by boundlessgeo.
the class GeogigFeatureStore method modifyFeatures.
@Override
public void modifyFeatures(Name[] names, Object[] values, Filter filter) throws IOException {
Preconditions.checkState(getDataStore().isAllowTransactions(), "Transactions not supported; head is not a local branch");
final WorkingTree workingTree = delegate.getWorkingTree();
final String path = delegate.getTypeTreePath();
Iterator<SimpleFeature> features = modifyingFeatureIterator(names, values, filter);
/*
* Make sure to transform the incoming features to the native schema to avoid situations
* where geogig would change the metadataId of the RevFeature nodes due to small differences
* in the default and incoming schema such as namespace or missing properties
*/
final SimpleFeatureType nativeSchema = delegate.getNativeType();
features = Iterators.transform(features, new SchemaInforcer(nativeSchema));
try {
ProgressListener listener = new DefaultProgressListener();
Integer count = (Integer) null;
List<Node> target = (List<Node>) null;
workingTree.insert(path, features, listener, target, count);
} catch (Exception e) {
throw new IOException(e);
}
}
use of org.locationtech.geogig.api.ProgressListener in project GeoGig by boundlessgeo.
the class SQLServerImport method runInternal.
/**
* Executes the import command using the provided options.
*/
@Override
protected void runInternal(GeogigCLI cli) throws IOException {
DataStore dataStore = getDataStore();
try {
cli.getConsole().println("Importing from database " + commonArgs.database);
ProgressListener progressListener = cli.getProgressListener();
cli.getGeogig().command(ImportOp.class).setAll(all).setTable(table).setAlter(alter).setDestinationPath(destTable).setOverwrite(!add).setDataStore(dataStore).setAdaptToDefaultFeatureType(!forceFeatureType).setProgressListener(progressListener).call();
cli.getConsole().println("Import successful.");
} catch (GeoToolsOpException e) {
switch(e.statusCode) {
case TABLE_NOT_DEFINED:
throw new CommandFailedException("No tables specified for import. Specify --all or --table <table>.", e);
case ALL_AND_TABLE_DEFINED:
throw new CommandFailedException("Specify --all or --table <table>, both cannot be set.", e);
case NO_FEATURES_FOUND:
throw new CommandFailedException("No features were found in the database.", e);
case TABLE_NOT_FOUND:
throw new CommandFailedException("Could not find the specified table.", e);
case UNABLE_TO_GET_NAMES:
throw new CommandFailedException("Unable to get feature types from the database.", e);
case UNABLE_TO_GET_FEATURES:
throw new CommandFailedException("Unable to get features from the database.", e);
case UNABLE_TO_INSERT:
throw new CommandFailedException("Unable to insert features into the working tree.", e);
case ALTER_AND_ALL_DEFINED:
throw new CommandFailedException("Alter cannot be used with --all option and more than one table.", e);
case INCOMPATIBLE_FEATURE_TYPE:
throw new CommandFailedException("The feature type of the data to import does not match the feature type of the destination tree and cannot be imported\n" + "USe the --force-featuretype switch to import using the original featuretype and crete a mixed type tree", e);
default:
throw new CommandFailedException("Import failed with exception: " + e.statusCode.name(), e);
}
} finally {
dataStore.dispose();
cli.getConsole().flush();
}
}
use of org.locationtech.geogig.api.ProgressListener in project GeoGig by boundlessgeo.
the class PGImport method runInternal.
/**
* Executes the import command using the provided options.
*/
@Override
protected void runInternal(GeogigCLI cli) throws IOException {
DataStore dataStore = getDataStore();
try {
cli.getConsole().println("Importing from database " + commonArgs.database);
ProgressListener progressListener = cli.getProgressListener();
cli.getGeogig().command(ImportOp.class).setAll(all).setTable(table).setAlter(alter).setDestinationPath(destTable).setOverwrite(!add).setDataStore(dataStore).setAdaptToDefaultFeatureType(!forceFeatureType).setFidAttribute(fidAttribute).setProgressListener(progressListener).call();
cli.getConsole().println("Import successful.");
} catch (GeoToolsOpException e) {
switch(e.statusCode) {
case TABLE_NOT_DEFINED:
throw new CommandFailedException("No tables specified for import. Specify --all or --table <table>.", e);
case ALL_AND_TABLE_DEFINED:
throw new CommandFailedException("Specify --all or --table <table>, both cannot be set.", e);
case NO_FEATURES_FOUND:
throw new CommandFailedException("No features were found in the database.", e);
case TABLE_NOT_FOUND:
throw new CommandFailedException("Could not find the specified table.", e);
case UNABLE_TO_GET_NAMES:
throw new CommandFailedException("Unable to get feature types from the database.", e);
case UNABLE_TO_GET_FEATURES:
throw new CommandFailedException("Unable to get features from the database.", e);
case UNABLE_TO_INSERT:
throw new CommandFailedException("Unable to insert features into the working tree.", e);
case ALTER_AND_ALL_DEFINED:
throw new CommandFailedException("Alter cannot be used with --all option and more than one table.", e);
case INCOMPATIBLE_FEATURE_TYPE:
throw new CommandFailedException("The feature type of the data to import does not match the feature type of the destination tree and cannot be imported\n" + "USe the --force-featuretype switch to import using the original featuretype and crete a mixed type tree", e);
default:
throw new CommandFailedException("Import failed with exception: " + e.statusCode.name(), e);
}
} finally {
dataStore.dispose();
cli.getConsole().flush();
}
}
use of org.locationtech.geogig.api.ProgressListener in project GeoGig by boundlessgeo.
the class ExportOp method _call.
/**
* Executes the export operation using the parameters that have been specified.
*
* @return a FeatureCollection with the specified features
*/
@Override
protected SimpleFeatureStore _call() {
final StagingDatabase database = stagingDatabase();
if (filterFeatureTypeId != null) {
RevObject filterType = database.getIfPresent(filterFeatureTypeId);
checkArgument(filterType instanceof RevFeatureType, "Provided filter feature type is does not exist");
}
final SimpleFeatureStore targetStore = getTargetStore();
final String refspec = resolveRefSpec();
final String treePath = refspec.substring(refspec.indexOf(':') + 1);
final RevTree rootTree = resolveRootTree(refspec);
final NodeRef typeTreeRef = resolTypeTreeRef(refspec, treePath, rootTree);
final ObjectId defaultMetadataId = typeTreeRef.getMetadataId();
final RevTree typeTree = database.getTree(typeTreeRef.objectId());
final ProgressListener progressListener = getProgressListener();
progressListener.started();
progressListener.setDescription("Exporting from " + path + " to " + targetStore.getName().getLocalPart() + "... ");
FeatureCollection<SimpleFeatureType, SimpleFeature> asFeatureCollection = new BaseFeatureCollection<SimpleFeatureType, SimpleFeature>() {
@Override
public FeatureIterator<SimpleFeature> features() {
final Iterator<SimpleFeature> plainFeatures = getFeatures(typeTree, database, defaultMetadataId, progressListener);
Iterator<SimpleFeature> adaptedFeatures = adaptToArguments(plainFeatures, defaultMetadataId);
Iterator<Optional<Feature>> transformed = Iterators.transform(adaptedFeatures, ExportOp.this.function);
Iterator<SimpleFeature> filtered = Iterators.filter(Iterators.transform(transformed, new Function<Optional<Feature>, SimpleFeature>() {
@Override
public SimpleFeature apply(Optional<Feature> input) {
return (SimpleFeature) (input.isPresent() ? input.get() : null);
}
}), Predicates.notNull());
return new DelegateFeatureIterator<SimpleFeature>(filtered);
}
};
// add the feature collection to the feature store
final Transaction transaction;
if (transactional) {
transaction = new DefaultTransaction("create");
} else {
transaction = Transaction.AUTO_COMMIT;
}
try {
targetStore.setTransaction(transaction);
try {
targetStore.addFeatures(asFeatureCollection);
transaction.commit();
} catch (final Exception e) {
if (transactional) {
transaction.rollback();
}
Throwables.propagateIfInstanceOf(e, GeoToolsOpException.class);
throw new GeoToolsOpException(e, StatusCode.UNABLE_TO_ADD);
} finally {
transaction.close();
}
} catch (IOException e) {
throw new GeoToolsOpException(e, StatusCode.UNABLE_TO_ADD);
}
progressListener.complete();
return targetStore;
}
use of org.locationtech.geogig.api.ProgressListener in project GeoGig by boundlessgeo.
the class ImportOp method _call.
/**
* Executes the import operation using the parameters that have been specified. Features will be
* added to the working tree, and a new working tree will be constructed. Either {@code all} or
* {@code table}, but not both, must be set prior to the import process.
*
* @return RevTree the new working tree
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
@Override
protected RevTree _call() {
// check preconditions and get the actual list of type names to import
final String[] typeNames = checkPreconditions();
for (int i = 0; i < typeNames.length; i++) {
try {
typeNames[i] = URLDecoder.decode(typeNames[i], Charsets.UTF_8.displayName());
} catch (UnsupportedEncodingException e) {
// shouldn't reach here.
}
}
ProgressListener progressListener = getProgressListener();
progressListener.started();
// use a local variable not to alter the command's state
boolean overwrite = this.overwrite;
if (alter) {
overwrite = false;
}
final WorkingTree workTree = workingTree();
RevFeatureType destPathFeatureType = null;
final boolean destPathProvided = destPath != null;
if (destPathProvided) {
destPathFeatureType = this.command(ResolveFeatureType.class).setRefSpec(destPath).call().orNull();
// only the last one will be imported.
if (overwrite) {
try {
workTree.delete(destPath);
} catch (Exception e) {
throw new GeoToolsOpException(e, StatusCode.UNABLE_TO_INSERT);
}
overwrite = false;
}
}
int tableCount = 0;
for (String typeName : typeNames) {
{
tableCount++;
String tableName = String.format("%-16s", typeName);
if (typeName.length() > 16) {
tableName = tableName.substring(0, 13) + "...";
}
progressListener.setDescription("Importing " + tableName + " (" + tableCount + "/" + typeNames.length + ")... ");
}
FeatureSource featureSource = getFeatureSource(typeName);
SimpleFeatureType featureType = (SimpleFeatureType) featureSource.getSchema();
final String fidPrefix = featureType.getTypeName() + ".";
String path;
if (destPath == null) {
path = featureType.getTypeName();
} else {
NodeRef.checkValidPath(destPath);
path = destPath;
featureType = forceFeatureTypeName(featureType, path);
}
featureType = overrideGeometryName(featureType);
featureSource = new ForceTypeAndFidFeatureSource<FeatureType, Feature>(featureSource, featureType, fidPrefix);
boolean hasPrimaryKey = hasPrimaryKey(typeName);
boolean forbidSorting = !usePaging || !hasPrimaryKey;
((ForceTypeAndFidFeatureSource) featureSource).setForbidSorting(forbidSorting);
if (destPathFeatureType != null && adaptToDefaultFeatureType && !alter) {
featureSource = new FeatureTypeAdapterFeatureSource<FeatureType, Feature>(featureSource, destPathFeatureType.type());
}
ProgressListener taskProgress = subProgress(100.f / typeNames.length);
if (overwrite) {
try {
workTree.delete(path);
workTree.createTypeTree(path, featureType);
} catch (Exception e) {
throw new GeoToolsOpException(e, StatusCode.UNABLE_TO_INSERT);
}
}
if (alter) {
// first we modify the feature type and the existing features, if needed
workTree.updateTypeTree(path, featureType);
Iterator<Feature> transformedIterator = transformFeatures(featureType, path);
try {
final Integer collectionSize = collectionSize(featureSource);
workTree.insert(path, transformedIterator, taskProgress, null, collectionSize);
} catch (Exception e) {
throw new GeoToolsOpException(StatusCode.UNABLE_TO_INSERT);
}
}
try {
insert(workTree, path, featureSource, taskProgress);
} catch (GeoToolsOpException e) {
throw e;
} catch (Exception e) {
throw new GeoToolsOpException(e, StatusCode.UNABLE_TO_INSERT);
}
}
progressListener.setProgress(100.f);
progressListener.complete();
return workTree.getTree();
}
Aggregations