use of org.locationtech.geogig.api.ProgressListener in project GeoGig by boundlessgeo.
the class OSMApplyDiffOp method parseDiffFileAndInsert.
public OSMReport parseDiffFileAndInsert() {
final WorkingTree workTree = workingTree();
final int queueCapacity = 100 * 1000;
final int timeout = 1;
final TimeUnit timeoutUnit = TimeUnit.SECONDS;
// With this iterator and the osm parsing happening on a separate thread, we follow a
// producer/consumer approach so that the osm parse thread produces features into the
// iterator's queue, and WorkingTree.insert consumes them on this thread
QueueIterator<Feature> target = new QueueIterator<Feature>(queueCapacity, timeout, timeoutUnit);
XmlChangeReader reader = new XmlChangeReader(file, true, resolveCompressionMethod(file));
ProgressListener progressListener = getProgressListener();
ConvertAndImportSink sink = new ConvertAndImportSink(target, context, workingTree(), platform(), new SubProgressListener(progressListener, 100));
reader.setChangeSink(sink);
Thread readerThread = new Thread(reader, "osm-diff-reader-thread");
readerThread.start();
// used to set the task status name, but report no progress so it does not interfere
// with the progress reported by the reader thread
SubProgressListener noProgressReportingListener = new SubProgressListener(progressListener, 0) {
@Override
public void setProgress(float progress) {
// no-op
}
};
Function<Feature, String> parentTreePathResolver = new Function<Feature, String>() {
@Override
public String apply(Feature input) {
return input.getType().getName().getLocalPart();
}
};
workTree.insert(parentTreePathResolver, target, noProgressReportingListener, null, null);
OSMReport report = new OSMReport(sink.getCount(), sink.getNodeCount(), sink.getWayCount(), sink.getUnprocessedCount(), sink.getLatestChangeset(), sink.getLatestTimestamp());
return report;
}
use of org.locationtech.geogig.api.ProgressListener in project GeoGig by boundlessgeo.
the class OSMHistoryImport method insertChanges.
/**
* @param cli
* @param changes
* @param featureFilter
* @throws IOException
*/
private long insertChanges(GeogigCLI cli, final Iterator<Change> changes, @Nullable Envelope featureFilter) throws IOException {
final GeoGIG geogig = cli.getGeogig();
final Repository repository = geogig.getRepository();
final WorkingTree workTree = repository.workingTree();
Map<Long, Coordinate> thisChangePointCache = new LinkedHashMap<Long, Coordinate>() {
/** serialVersionUID */
private static final long serialVersionUID = 1277795218777240552L;
@Override
protected boolean removeEldestEntry(Map.Entry<Long, Coordinate> eldest) {
return size() == 10000;
}
};
long cnt = 0;
Set<String> deletes = Sets.newHashSet();
Multimap<String, SimpleFeature> insertsByParent = HashMultimap.create();
while (changes.hasNext()) {
Change change = changes.next();
final String featurePath = featurePath(change);
if (featurePath == null) {
// ignores relations
continue;
}
final String parentPath = NodeRef.parentPath(featurePath);
if (Change.Type.delete.equals(change.getType())) {
cnt++;
deletes.add(featurePath);
} else {
final Primitive primitive = change.getNode().isPresent() ? change.getNode().get() : change.getWay().get();
final Geometry geom = parseGeometry(geogig, primitive, thisChangePointCache);
if (geom instanceof Point) {
thisChangePointCache.put(Long.valueOf(primitive.getId()), ((Point) geom).getCoordinate());
}
SimpleFeature feature = toFeature(primitive, geom);
if (featureFilter == null || featureFilter.intersects((Envelope) feature.getBounds())) {
insertsByParent.put(parentPath, feature);
cnt++;
}
}
}
for (String parentPath : insertsByParent.keySet()) {
Collection<SimpleFeature> features = insertsByParent.get(parentPath);
if (features.isEmpty()) {
continue;
}
Iterator<? extends Feature> iterator = features.iterator();
ProgressListener listener = new DefaultProgressListener();
List<org.locationtech.geogig.api.Node> insertedTarget = null;
Integer collectionSize = Integer.valueOf(features.size());
workTree.insert(parentPath, iterator, listener, insertedTarget, collectionSize);
}
if (!deletes.isEmpty()) {
workTree.delete(deletes.iterator());
}
return cnt;
}
use of org.locationtech.geogig.api.ProgressListener in project GeoGig by boundlessgeo.
the class FeatureMapFlusher method flush.
private void flush(String path) {
Set<SimpleFeature> features = map.get(path);
if (!features.isEmpty()) {
Iterator<? extends Feature> iterator = features.iterator();
ProgressListener listener = new DefaultProgressListener();
List<org.locationtech.geogig.api.Node> insertedTarget = null;
Integer collectionSize = Integer.valueOf(features.size());
workTree.insert(path, iterator, listener, insertedTarget, collectionSize);
}
}
use of org.locationtech.geogig.api.ProgressListener in project GeoGig by boundlessgeo.
the class SLImport method runInternal.
/**
* Executes the import command using the provided options.
*/
@Override
protected void runInternal(GeogigCLI cli) throws IOException {
DataStore dataStore = getDataStore();
try {
cli.getConsole().println("Importing from database " + commonArgs.database);
ProgressListener progressListener = cli.getProgressListener();
cli.getGeogig().command(ImportOp.class).setAll(all).setTable(table).setAlter(alter).setOverwrite(!add).setDataStore(dataStore).setAdaptToDefaultFeatureType(!forceFeatureType).setProgressListener(progressListener).call();
cli.getConsole().println("Import successful.");
} catch (GeoToolsOpException e) {
switch(e.statusCode) {
case TABLE_NOT_DEFINED:
throw new CommandFailedException("No tables specified for import. Specify --all or --table <table>.", e);
case ALL_AND_TABLE_DEFINED:
throw new CommandFailedException("Specify --all or --table <table>, both cannot be set.", e);
case NO_FEATURES_FOUND:
throw new CommandFailedException("No features were found in the database.", e);
case TABLE_NOT_FOUND:
throw new CommandFailedException("Could not find the specified table.", e);
case UNABLE_TO_GET_NAMES:
throw new CommandFailedException("Unable to get feature types from the database.", e);
case UNABLE_TO_GET_FEATURES:
throw new CommandFailedException("Unable to get features from the database.", e);
case UNABLE_TO_INSERT:
throw new CommandFailedException("Unable to insert features into the working tree.", e);
case INCOMPATIBLE_FEATURE_TYPE:
throw new CommandFailedException("The feature type of the data to import does not match the feature type of the destination tree and cannot be imported\n" + "USe the --force-featuretype switch to import using the original featuretype and crete a mixed type tree", e);
case ALTER_AND_ALL_DEFINED:
throw new CommandFailedException("Alter cannot be used with --all option and more than one table.", e);
default:
throw new CommandFailedException("Import failed with exception: " + e.statusCode.name(), e);
}
} finally {
dataStore.dispose();
cli.getConsole().flush();
}
}
use of org.locationtech.geogig.api.ProgressListener in project GeoGig by boundlessgeo.
the class GeogigFeatureStore method addFeatures.
@Override
public final List<FeatureId> addFeatures(FeatureCollection<SimpleFeatureType, SimpleFeature> featureCollection) throws IOException {
if (Transaction.AUTO_COMMIT.equals(getTransaction())) {
throw new UnsupportedOperationException("GeoGIG does not support AUTO_COMMIT");
}
Preconditions.checkState(getDataStore().isAllowTransactions(), "Transactions not supported; head is not a local branch");
final WorkingTree workingTree = delegate.getWorkingTree();
final String path = delegate.getTypeTreePath();
ProgressListener listener = new DefaultProgressListener();
final List<FeatureId> insertedFids = Lists.newArrayList();
List<Node> deferringTarget = new AbstractList<Node>() {
@Override
public boolean add(Node node) {
String fid = node.getName();
String version = node.getObjectId().toString();
insertedFids.add(new FeatureIdVersionedImpl(fid, version));
return true;
}
@Override
public Node get(int index) {
throw new UnsupportedOperationException();
}
@Override
public int size() {
return 0;
}
};
Integer count = (Integer) null;
FeatureIterator<SimpleFeature> featureIterator = featureCollection.features();
try {
Iterator<SimpleFeature> features;
features = new FeatureIteratorIterator<SimpleFeature>(featureIterator);
/*
* Make sure to transform the incoming features to the native schema to avoid situations
* where geogig would change the metadataId of the RevFeature nodes due to small
* differences in the default and incoming schema such as namespace or missing
* properties
*/
final SimpleFeatureType nativeSchema = delegate.getNativeType();
features = Iterators.transform(features, new SchemaInforcer(nativeSchema));
workingTree.insert(path, features, listener, deferringTarget, count);
} catch (Exception e) {
throw new IOException(e);
} finally {
featureIterator.close();
}
return insertedFids;
}
Aggregations