use of mb.nabl2.scopegraph.terms.Scope in project spoofax by metaborg.
the class ConstraintMultiFileAnalyzer method analyzeSemiIncremental.
private ISpoofaxAnalyzeResults analyzeSemiIncremental(Map<String, ISpoofaxParseUnit> changed, java.util.Set<String> removed, IMultiFileScopeGraphContext context, HybridInterpreter runtime, String strategy, IProgress progress, ICancel cancel) throws AnalysisException {
final NaBL2DebugConfig debugConfig = context.config().debug();
final Timer totalTimer = new Timer(true);
final AggregateTimer collectionTimer = new AggregateTimer();
final AggregateTimer solverTimer = new AggregateTimer();
final AggregateTimer finalizeTimer = new AggregateTimer();
final String globalSource = "";
final Function1<String, String> globalFresh = base -> context.unit(globalSource).fresh().fresh(base);
for (String input : removed) {
context.removeUnit(input);
}
final int n = changed.size();
final int w = context.units().size() / 2;
progress.setWorkRemaining(n + w + 1);
if (debugConfig.analysis() || debugConfig.files()) {
logger.info("Analyzing {} files in {}.", n, context.location());
}
final Collection<ISpoofaxAnalyzeUnit> results = Lists.newArrayList();
final Collection<ISpoofaxAnalyzeUnitUpdate> updateResults = Lists.newArrayList();
try {
// initial
InitialResult initialResult;
final Optional<ITerm> customInitial;
{
if (debugConfig.collection()) {
logger.info("Collecting initial constraints.");
}
if (context.initialResult().isPresent()) {
initialResult = context.initialResult().get();
customInitial = context.initialResult().flatMap(r -> r.getCustomResult());
} else {
collectionTimer.start();
try {
final ITerm globalAST = Actions.sourceTerm(globalSource, B.EMPTY_TUPLE);
ITerm initialResultTerm = doAction(strategy, Actions.analyzeInitial(globalSource, globalAST), context, runtime).orElseThrow(() -> new AnalysisException(context, "No initial result."));
initialResult = InitialResult.matcher().match(initialResultTerm).orElseThrow(() -> new AnalysisException(context, "Invalid initial results."));
customInitial = doCustomAction(strategy, Actions.customInitial(globalSource, globalAST), context, runtime);
initialResult = initialResult.withCustomResult(customInitial);
context.setInitialResult(initialResult);
} finally {
collectionTimer.stop();
}
}
if (debugConfig.collection()) {
logger.info("Initial constraints collected.");
}
}
// global parameters, that form the interface for a single unit
final java.util.Set<ITermVar> intfVars = Sets.newHashSet();
{
initialResult.getArgs().getParams().stream().forEach(param -> intfVars.addAll(param.getVars()));
initialResult.getArgs().getType().ifPresent(type -> intfVars.addAll(type.getVars()));
}
final SemiIncrementalMultiFileSolver solver = new SemiIncrementalMultiFileSolver(context.config().debug(), callExternal(runtime));
// global
ISolution initialSolution;
{
if (context.initialSolution().isPresent()) {
initialSolution = context.initialSolution().get();
} else {
try {
solverTimer.start();
final IProgress subprogress = progress.subProgress(1);
GraphSolution preSolution = solver.solveGraph(ImmutableBaseSolution.of(initialResult.getConfig(), initialResult.getConstraints(), PersistentUnifier.Immutable.of()), globalFresh, cancel, subprogress);
preSolution = solver.reportUnsolvedGraphConstraints(preSolution);
initialSolution = solver.solveIntra(preSolution, intfVars, null, globalFresh, cancel, subprogress);
if (debugConfig.resolution()) {
logger.info("Reduced file constraints to {}.", initialSolution.constraints().size());
}
} catch (SolverException e) {
throw new AnalysisException(context, e);
} finally {
solverTimer.stop();
}
context.setInitialSolution(initialSolution);
}
}
final java.util.Set<Scope> intfScopes = Sets.newHashSet();
{
initialResult.getArgs().getParams().stream().forEach(param -> Scope.matcher().match(param, initialSolution.unifier()).ifPresent(intfScopes::add));
}
// units
final Map<String, IStrategoTerm> astsByFile = Maps.newHashMap();
final Map<String, IMessage> failures = Maps.newHashMap();
final Multimap<String, IMessage> ambiguitiesByFile = HashMultimap.create();
for (Map.Entry<String, ISpoofaxParseUnit> input : changed.entrySet()) {
final String source = input.getKey();
final ISpoofaxParseUnit parseUnit = input.getValue();
final ITerm ast = strategoTerms.fromStratego(parseUnit.ast());
if (debugConfig.files()) {
logger.info("Analyzing {}.", source);
}
final IMultiFileScopeGraphUnit unit = context.unit(source);
unit.clear();
try {
UnitResult unitResult;
final Optional<ITerm> customUnit;
{
if (debugConfig.collection()) {
logger.info("Collecting constraints of {}.", source);
}
try {
collectionTimer.start();
final ITerm unitResultTerm = doAction(strategy, Actions.analyzeUnit(source, ast, initialResult.getArgs()), context, runtime).orElseThrow(() -> new AnalysisException(context, "No unit result."));
unitResult = UnitResult.matcher().match(unitResultTerm).orElseThrow(() -> new MetaborgException("Invalid unit results."));
final ITerm desugaredAST = unitResult.getAST();
customUnit = doCustomAction(strategy, Actions.customUnit(source, desugaredAST, customInitial.orElse(B.EMPTY_TUPLE)), context, runtime);
unitResult = unitResult.withCustomResult(customUnit);
final IStrategoTerm analyzedAST = strategoTerms.toStratego(desugaredAST);
astsByFile.put(source, analyzedAST);
ambiguitiesByFile.putAll(source, analysisCommon.ambiguityMessages(parseUnit.source(), parseUnit.ast()));
unit.setUnitResult(unitResult);
} finally {
collectionTimer.stop();
}
if (debugConfig.collection()) {
logger.info("Collected {} constraints of {}.", unitResult.getConstraints().size(), source);
}
}
{
final ISolution unitSolution;
if (debugConfig.resolution()) {
logger.info("Reducing {} constraints of {}.", unitResult.getConstraints().size(), source);
}
try {
solverTimer.start();
final Function1<String, String> fresh = base -> context.unit(source).fresh().fresh(base);
final IProgress subprogress = progress.subProgress(1);
GraphSolution preSolution = solver.solveGraph(ImmutableBaseSolution.of(initialResult.getConfig(), unitResult.getConstraints(), initialSolution.unifier()), fresh, cancel, subprogress);
preSolution = solver.reportUnsolvedGraphConstraints(preSolution);
unitSolution = solver.solveIntra(preSolution, intfVars, intfScopes, fresh, cancel, subprogress);
if (debugConfig.resolution()) {
logger.info("Reduced file constraints to {}.", unitSolution.constraints().size());
}
} catch (SolverException e) {
throw new AnalysisException(context, e);
} finally {
solverTimer.stop();
}
unit.setPartialSolution(unitSolution);
if (debugConfig.files() || debugConfig.resolution()) {
logger.info("Analyzed {}: {} errors, {} warnings, {} notes, {} unsolved constraints.", source, unitSolution.messages().getErrors().size(), unitSolution.messages().getWarnings().size(), unitSolution.messages().getNotes().size(), unitSolution.constraints().size());
}
}
} catch (MetaborgException e) {
logger.warn("Analysis of " + source + " failed.", e);
failures.put(source, MessageFactory.newAnalysisErrorAtTop(parseUnit.source(), "File analysis failed.", e));
}
}
// solve
final ISolution solution;
final List<Optional<ITerm>> customUnits = Lists.newArrayList();
{
final List<ISolution> partialSolutions = Lists.newArrayList();
for (IMultiFileScopeGraphUnit unit : context.units()) {
unit.partialSolution().ifPresent(partialSolutions::add);
unit.unitResult().map(UnitResult::getCustomResult).ifPresent(customUnits::add);
}
if (debugConfig.resolution()) {
logger.info("Solving {} partial solutions.", partialSolutions.size());
}
ISolution sol;
try {
solverTimer.start();
Function1<String, String> fresh = base -> context.unit(globalSource).fresh().fresh(base);
IMessageInfo message = ImmutableMessageInfo.of(MessageKind.ERROR, MessageContent.of(), Actions.sourceTerm(globalSource));
sol = solver.solveInter(initialSolution, partialSolutions, message, fresh, cancel, progress.subProgress(w));
sol = solver.reportUnsolvedConstraints(sol);
} catch (SolverException e) {
throw new AnalysisException(context, e);
} finally {
solverTimer.stop();
}
if (!sol.flowSpecSolution().controlFlowGraph().isEmpty()) {
logger.debug("CFG is not empty: calling FlowSpec dataflow solver");
sol = new FixedPoint().entryPoint(sol, getFlowSpecTransferFunctions(context.language()));
}
solution = sol;
context.setSolution(solution);
if (debugConfig.resolution()) {
logger.info("Project constraints solved.");
}
}
// final
FinalResult finalResult;
final Optional<ITerm> customFinal;
final Optional<CustomSolution> customSolution;
{
if (debugConfig.analysis()) {
logger.info("Finalizing project analysis.");
}
finalizeTimer.start();
try {
ITerm finalResultTerm = doAction(strategy, Actions.analyzeFinal(globalSource), context, runtime).orElseThrow(() -> new AnalysisException(context, "No final result."));
finalResult = FinalResult.matcher().match(finalResultTerm, solution.unifier()).orElseThrow(() -> new AnalysisException(context, "Invalid final results."));
customFinal = doCustomAction(strategy, Actions.customFinal(globalSource, customInitial.orElse(B.EMPTY_TUPLE), Optionals.filter(customUnits)), context, runtime);
finalResult = finalResult.withCustomResult(customFinal);
context.setFinalResult(finalResult);
customSolution = customFinal.flatMap(cs -> CustomSolution.matcher().match(cs, solution.unifier()));
customSolution.ifPresent(cs -> context.setCustomSolution(cs));
} finally {
finalizeTimer.stop();
}
if (debugConfig.analysis()) {
logger.info("Project analysis finalized.");
}
}
// errors
{
if (debugConfig.analysis()) {
logger.info("Processing project messages.");
}
Messages.Transient messageBuilder = Messages.Transient.of();
messageBuilder.addAll(Messages.unsolvedErrors(solution.constraints()));
messageBuilder.addAll(solution.messages().getAll());
customSolution.map(CustomSolution::getMessages).map(IMessages::getAll).ifPresent(messageBuilder::addAll);
IMessages messages = messageBuilder.freeze();
IRelation3.Transient<String, MessageSeverity, IMessage> messagesByFile = HashTrieRelation3.Transient.of();
messagesByFile(failures.values(), messagesByFile, context);
messagesByFile(messages(messages.getAll(), solution.unifier(), context, context.location()), messagesByFile, context);
// precondition: the messagesByFile should not contain any files that do not have corresponding units
for (IMultiFileScopeGraphUnit unit : context.units()) {
final String source = unit.resource();
final java.util.Set<IMessage> fileMessages = messagesByFile.get(source).stream().map(Map.Entry::getValue).collect(Collectors2.toHashSet());
if (changed.containsKey(source)) {
fileMessages.addAll(ambiguitiesByFile.get(source));
final boolean valid = !failures.containsKey(source);
final boolean success = valid && messagesByFile.get(source, MessageSeverity.ERROR).isEmpty();
final IStrategoTerm analyzedAST = astsByFile.get(source);
results.add(unitService.analyzeUnit(changed.get(source), new AnalyzeContrib(valid, success, analyzedAST != null, analyzedAST, fileMessages, -1), context));
} else {
try {
final FileObject file = context.location().resolveFile(source);
updateResults.add(unitService.analyzeUnitUpdate(file, new AnalyzeUpdateData(fileMessages), context));
} catch (IOException ex) {
logger.error("Could not resolve {} to update messages", source);
}
}
messagesByFile.remove(source);
}
if (!messagesByFile.keySet().isEmpty()) {
logger.error("Found messages for unanalyzed files {}", messagesByFile.keySet());
}
if (debugConfig.analysis() || debugConfig.files() || debugConfig.resolution()) {
logger.info("Analyzed {} files: {} errors, {} warnings, {} notes.", n, messages.getErrors().size(), messages.getWarnings().size(), messages.getNotes().size());
}
}
} catch (InterruptedException e) {
logger.debug("Analysis was interrupted.");
} finally {
totalTimer.stop();
}
final ConstraintDebugData debugData = new ConstraintDebugData(totalTimer.stop(), collectionTimer.total(), solverTimer.total(), finalizeTimer.total());
if (debugConfig.analysis()) {
logger.info("{}", debugData);
}
return new SpoofaxAnalyzeResults(results, updateResults, context, debugData);
}
use of mb.nabl2.scopegraph.terms.Scope in project nabl by metaborg.
the class InterpreterTerms method scopeEntries.
private static ITerm scopeEntries(IScopeGraph<Scope, Label, Occurrence> scopeGraph) {
Map<ITerm, ITerm> entries = Maps.newHashMap();
for (Scope scope : scopeGraph.getAllScopes()) {
IListTerm decls = B.newList(scopeGraph.getDecls().inverse().get(scope));
IListTerm refs = B.newList(scopeGraph.getRefs().inverse().get(scope));
IListTerm edges = multimap(scopeGraph.getDirectEdges().get(scope));
IListTerm imports = multimap(scopeGraph.getImportEdges().get(scope));
ITerm entry = B.newAppl("SE", decls, refs, edges, imports);
entries.put(scope, entry);
}
return map(entries.entrySet());
}
use of mb.nabl2.scopegraph.terms.Scope in project nabl by metaborg.
the class InterpreterTerms method refEntries.
private static ITerm refEntries(IScopeGraph<Scope, Label, Occurrence> scopeGraph) {
Map<ITerm, ITerm> entries = Maps.newHashMap();
for (Occurrence ref : scopeGraph.getAllRefs()) {
ITerm scope = scopeGraph.getRefs().get(ref).map(s -> B.newList(s)).orElse(B.newNil());
ITerm entry = B.newAppl("RE", scope);
entries.put(ref, entry);
}
return map(entries.entrySet());
}
use of mb.nabl2.scopegraph.terms.Scope in project nabl by metaborg.
the class InterpreterTerms method nameresolution.
private static ITerm nameresolution(IEsopNameResolution<Scope, Label, Occurrence> nameResolution) {
nameResolution.resolveAll();
final Map<ITerm, ITerm> entries = Maps.newHashMap();
for (Map.Entry<Occurrence, Set<IResolutionPath<Scope, Label, Occurrence>>> entry : nameResolution.resolutionEntries()) {
final Occurrence ref = entry.getKey();
final Set<IResolutionPath<Scope, Label, Occurrence>> paths = entry.getValue();
if (paths.size() == 1) {
IResolutionPath<Scope, Label, Occurrence> path = Iterables.getOnlyElement(paths);
ITerm value = B.newTuple(path.getDeclaration(), Paths.toTerm(path));
entries.put(ref, value);
} else {
logger.warn("Can only convert a single path, but {} has {}.", ref, paths.size());
}
}
return map(entries.entrySet());
}
use of mb.nabl2.scopegraph.terms.Scope in project nabl by metaborg.
the class InterpreterTerms method declEntries.
private static ITerm declEntries(IScopeGraph<Scope, Label, Occurrence> scopeGraph) {
Map<ITerm, ITerm> entries = Maps.newHashMap();
for (Occurrence decl : scopeGraph.getAllDecls()) {
ITerm scope = scopeGraph.getDecls().get(decl).map(s -> B.newList(s)).orElse(B.newNil());
ITerm assocs = multimap(scopeGraph.getExportEdges().get(decl));
ITerm entry = B.newAppl("DE", scope, assocs);
entries.put(decl, entry);
}
return map(entries.entrySet());
}
Aggregations