use of org.openlca.app.db.Database in project olca-app by GreenDelta.
the class Replacer method run.
@Override
public void run() {
if (conf == null || (conf.models.isEmpty())) {
log.info("no configuration; nothing to replace");
return;
}
// collect the IDs of processes and LCIA categories
// where flows should be replaced
processes.clear();
impacts.clear();
for (var model : conf.models) {
if (model.type == ModelType.PROCESS) {
processes.add(model.id);
} else if (model.type == ModelType.IMPACT_METHOD) {
ImpactMethodDao dao = new ImpactMethodDao(db);
dao.getCategoryDescriptors(model.id).forEach(d -> impacts.add(d.id));
}
}
buildIndices();
if (entries.isEmpty()) {
log.info("found no flows that can be mapped");
return;
}
log.info("found {} flows that can be mapped", entries.size());
try {
// start and wait for the cursors to finish
log.info("start updatable cursors");
List<UpdatableCursor> cursors = createCursors();
ExecutorService pool = Executors.newFixedThreadPool(4);
for (UpdatableCursor c : cursors) {
pool.execute(c);
}
pool.shutdown();
int i = 0;
while (!pool.awaitTermination(10, TimeUnit.SECONDS)) {
i++;
log.info("waiting for cursors to finish; {} seconds", i * 10);
}
log.info("cursors finished");
db.getEntityFactory().getCache().evictAll();
// TODO when products were replaced we also need to check
// whether these products are used in the quant. ref. of
// product systems and project variants and convert the
// amounts there.
// TODO also: we need to replace such flows in allocation
// factors; the application of the conversion factor is
// not required there.
// collect and log statistics
Stats stats = new Stats();
for (UpdatableCursor c : cursors) {
stats.add(c.stats);
c.stats.log(c.getClass().getName(), flows);
}
// TODO: update the version and last-update fields
// of the changed models; also call the indexer
// when the database is a connected repository
boolean deleteMapped = false;
Set<Long> usedFlows = null;
if (conf.deleteMapped) {
if (stats.failures > 0) {
log.warn("Will not delete mapped flows because" + " there were {} failures in replacement process", stats.failures);
} else {
deleteMapped = true;
usedFlows = new FlowDao(db).getUsed();
}
}
// update the mapping entries
for (Long flowID : entries.keySet()) {
FlowMapEntry e = entries.get(flowID);
if (flowID == null || e == null)
continue;
if (stats.hadFailures(flowID)) {
e.sourceFlow().status = MappingStatus.error("Replacement error");
continue;
}
if (deleteMapped && !usedFlows.contains(flowID)) {
FlowDao dao = new FlowDao(db);
Flow flow = dao.getForId(flowID);
dao.delete(flow);
log.info("removed mapped flow {} uuid={}", Labels.name(flow), flow.refId);
e.sourceFlow().status = MappingStatus.ok("Applied and removed");
} else {
e.sourceFlow().status = MappingStatus.ok("Applied (not removed)");
}
}
} catch (Exception e) {
log.error("Flow replacement failed", e);
}
}
Aggregations