use of org.metaborg.core.messages.IMessage in project spoofax by metaborg.
the class SpoofaxLanguageSpecConfigService method toConfig.
@Override
protected ConfigRequest<ISpoofaxLanguageSpecConfig> toConfig(HierarchicalConfiguration<ImmutableNode> config, FileObject configFile) {
final SpoofaxProjectConfig projectConfig = new SpoofaxProjectConfig(config);
final SpoofaxLanguageSpecConfig languageSpecConfig = new SpoofaxLanguageSpecConfig(config, projectConfig);
final MessageBuilder mb = MessageBuilder.create().asError().asInternal().withSource(configFile);
final Collection<IMessage> messages = languageSpecConfig.validate(mb);
return new ConfigRequest<ISpoofaxLanguageSpecConfig>(languageSpecConfig, messages);
}
use of org.metaborg.core.messages.IMessage in project spoofax by metaborg.
the class Builder method printMessages.
private boolean printMessages(Iterable<IMessage> messages, String phase, BuildInput input, boolean pardoned) {
final IMessagePrinter printer = input.messagePrinter;
if (printer != null) {
for (IMessage message : messages) {
printer.print(message, pardoned);
}
}
final boolean failed = !pardoned && MessageUtils.containsSeverity(messages, MessageSeverity.ERROR);
if (input.throwOnErrors && failed) {
throw new MetaborgRuntimeException(phase + " produced errors");
}
return !failed;
}
use of org.metaborg.core.messages.IMessage in project spoofax by metaborg.
the class Builder method updateLanguageResources.
private Collection<FileObject> updateLanguageResources(BuildInput input, ILanguageImpl language, LanguageBuildDiff diff, IBuildOutputInternal<P, A, AU, T> output, boolean pardoned, IProgress progress, ICancel cancel) throws InterruptedException {
cancel.throwIfCancelled();
final boolean analyze = input.analyze && analysisService.available(language);
final boolean transform = input.transform;
progress.setWorkRemaining(10 + (analyze ? 45 : 0) + (transform ? 45 : 0));
final Iterable<IdentifiedResourceChange> sourceChanges = diff.sourceChanges;
final Iterable<IdentifiedResourceChange> includeChanges = diff.includeChanges;
final Set<FileName> includes = Sets.newHashSet();
for (IdentifiedResourceChange includeChange : includeChanges) {
includes.add(includeChange.change.resource.getName());
}
final FileObject location = input.project.location();
final Collection<FileObject> changedSources = Sets.newHashSet();
final Set<FileName> removedResources = Sets.newHashSet();
final Collection<IMessage> extraMessages = Lists.newLinkedList();
final RefBool success = new RefBool(true);
logger.info("Building {} sources, {} includes of {}", Iterables.size(sourceChanges), Iterables.size(includeChanges), language);
// Parse
cancel.throwIfCancelled();
final Collection<P> sourceParseUnits = parse(input, language, sourceChanges, pardoned, changedSources, removedResources, extraMessages, success, progress.subProgress(5), cancel);
// GTODO: when a new context is created, all include files need to be parsed and analyzed in that context, this
// approach does not do that!
final Collection<P> includeParseUnits = parse(input, language, includeChanges, pardoned, changedSources, removedResources, extraMessages, success, progress.subProgress(5), cancel);
final Iterable<P> allParseResults = Iterables.concat(sourceParseUnits, includeParseUnits);
// Analyze
cancel.throwIfCancelled();
final Multimap<IContext, A> allAnalyzeUnits;
final Collection<AU> allAnalyzeUpdates = Lists.newArrayList();
if (analyze) {
// Segregate by context
final Multimap<IContext, P> parseUnitsPerContext = ArrayListMultimap.create();
for (P parseResult : sourceParseUnits) {
cancel.throwIfCancelled();
final FileObject resource = parseResult.source();
final ILanguageImpl langImpl = parseResult.input().langImpl();
try {
if (contextService.available(langImpl)) {
final IContext context = contextService.get(resource, input.project, langImpl);
parseUnitsPerContext.put(context, parseResult);
}
} catch (ContextException e) {
final String message = String.format("Failed to retrieve context for parse result of %s", resource);
printMessage(resource, message, e, input, pardoned);
extraMessages.add(MessageFactory.newAnalysisErrorAtTop(resource, "Failed to retrieve context", e));
}
}
// Run analysis
cancel.throwIfCancelled();
allAnalyzeUnits = analyze(input, language, location, parseUnitsPerContext, includeParseUnits, pardoned, allAnalyzeUpdates, removedResources, extraMessages, success, progress.subProgress(45), cancel);
} else {
allAnalyzeUnits = ArrayListMultimap.create();
}
// Transform
cancel.throwIfCancelled();
final Collection<T> allTransformUnits;
if (transform) {
allTransformUnits = transform(input, language, location, allAnalyzeUnits, includes, pardoned, removedResources, extraMessages, success, progress.subProgress(45), cancel);
} else {
allTransformUnits = Lists.newLinkedList();
}
printMessages(extraMessages, "Something", input, pardoned);
output.add(success.get(), removedResources, includes, changedSources, allParseResults, allAnalyzeUnits.values(), allAnalyzeUpdates, allTransformUnits, extraMessages);
final Collection<FileObject> newResources = Lists.newArrayList();
for (T transformUnit : allTransformUnits) {
for (ITransformOutput transformOutput : transformUnit.outputs()) {
final FileObject outputFile = transformOutput.output();
if (outputFile != null) {
newResources.add(outputFile);
}
}
}
return newResources;
}
use of org.metaborg.core.messages.IMessage in project spoofax by metaborg.
the class ProjectConfigService method toConfig.
@Override
protected ConfigRequest<IProjectConfig> toConfig(HierarchicalConfiguration<ImmutableNode> config, FileObject configFile) {
final ProjectConfig projectConfig = new ProjectConfig(config);
final MessageBuilder mb = MessageBuilder.create().asError().asInternal().withSource(configFile);
final Collection<IMessage> messages = projectConfig.validate(mb);
return new ConfigRequest<IProjectConfig>(projectConfig, messages);
}
use of org.metaborg.core.messages.IMessage in project spoofax by metaborg.
the class ConstraintMultiFileAnalyzer method analyzeSemiIncremental.
private ISpoofaxAnalyzeResults analyzeSemiIncremental(Map<String, ISpoofaxParseUnit> changed, java.util.Set<String> removed, IMultiFileScopeGraphContext context, HybridInterpreter runtime, String strategy, IProgress progress, ICancel cancel) throws AnalysisException {
final NaBL2DebugConfig debugConfig = context.config().debug();
final Timer totalTimer = new Timer(true);
final AggregateTimer collectionTimer = new AggregateTimer();
final AggregateTimer solverTimer = new AggregateTimer();
final AggregateTimer finalizeTimer = new AggregateTimer();
final String globalSource = "";
final Function1<String, String> globalFresh = base -> context.unit(globalSource).fresh().fresh(base);
for (String input : removed) {
context.removeUnit(input);
}
final int n = changed.size();
final int w = context.units().size() / 2;
progress.setWorkRemaining(n + w + 1);
if (debugConfig.analysis() || debugConfig.files()) {
logger.info("Analyzing {} files in {}.", n, context.location());
}
final Collection<ISpoofaxAnalyzeUnit> results = Lists.newArrayList();
final Collection<ISpoofaxAnalyzeUnitUpdate> updateResults = Lists.newArrayList();
try {
// initial
InitialResult initialResult;
final Optional<ITerm> customInitial;
{
if (debugConfig.collection()) {
logger.info("Collecting initial constraints.");
}
if (context.initialResult().isPresent()) {
initialResult = context.initialResult().get();
customInitial = context.initialResult().flatMap(r -> r.getCustomResult());
} else {
collectionTimer.start();
try {
final ITerm globalAST = Actions.sourceTerm(globalSource, B.EMPTY_TUPLE);
ITerm initialResultTerm = doAction(strategy, Actions.analyzeInitial(globalSource, globalAST), context, runtime).orElseThrow(() -> new AnalysisException(context, "No initial result."));
initialResult = InitialResult.matcher().match(initialResultTerm).orElseThrow(() -> new AnalysisException(context, "Invalid initial results."));
customInitial = doCustomAction(strategy, Actions.customInitial(globalSource, globalAST), context, runtime);
initialResult = initialResult.withCustomResult(customInitial);
context.setInitialResult(initialResult);
} finally {
collectionTimer.stop();
}
}
if (debugConfig.collection()) {
logger.info("Initial constraints collected.");
}
}
// global parameters, that form the interface for a single unit
final java.util.Set<ITermVar> intfVars = Sets.newHashSet();
{
initialResult.getArgs().getParams().stream().forEach(param -> intfVars.addAll(param.getVars()));
initialResult.getArgs().getType().ifPresent(type -> intfVars.addAll(type.getVars()));
}
final SemiIncrementalMultiFileSolver solver = new SemiIncrementalMultiFileSolver(context.config().debug(), callExternal(runtime));
// global
ISolution initialSolution;
{
if (context.initialSolution().isPresent()) {
initialSolution = context.initialSolution().get();
} else {
try {
solverTimer.start();
final IProgress subprogress = progress.subProgress(1);
GraphSolution preSolution = solver.solveGraph(ImmutableBaseSolution.of(initialResult.getConfig(), initialResult.getConstraints(), PersistentUnifier.Immutable.of()), globalFresh, cancel, subprogress);
preSolution = solver.reportUnsolvedGraphConstraints(preSolution);
initialSolution = solver.solveIntra(preSolution, intfVars, null, globalFresh, cancel, subprogress);
if (debugConfig.resolution()) {
logger.info("Reduced file constraints to {}.", initialSolution.constraints().size());
}
} catch (SolverException e) {
throw new AnalysisException(context, e);
} finally {
solverTimer.stop();
}
context.setInitialSolution(initialSolution);
}
}
final java.util.Set<Scope> intfScopes = Sets.newHashSet();
{
initialResult.getArgs().getParams().stream().forEach(param -> Scope.matcher().match(param, initialSolution.unifier()).ifPresent(intfScopes::add));
}
// units
final Map<String, IStrategoTerm> astsByFile = Maps.newHashMap();
final Map<String, IMessage> failures = Maps.newHashMap();
final Multimap<String, IMessage> ambiguitiesByFile = HashMultimap.create();
for (Map.Entry<String, ISpoofaxParseUnit> input : changed.entrySet()) {
final String source = input.getKey();
final ISpoofaxParseUnit parseUnit = input.getValue();
final ITerm ast = strategoTerms.fromStratego(parseUnit.ast());
if (debugConfig.files()) {
logger.info("Analyzing {}.", source);
}
final IMultiFileScopeGraphUnit unit = context.unit(source);
unit.clear();
try {
UnitResult unitResult;
final Optional<ITerm> customUnit;
{
if (debugConfig.collection()) {
logger.info("Collecting constraints of {}.", source);
}
try {
collectionTimer.start();
final ITerm unitResultTerm = doAction(strategy, Actions.analyzeUnit(source, ast, initialResult.getArgs()), context, runtime).orElseThrow(() -> new AnalysisException(context, "No unit result."));
unitResult = UnitResult.matcher().match(unitResultTerm).orElseThrow(() -> new MetaborgException("Invalid unit results."));
final ITerm desugaredAST = unitResult.getAST();
customUnit = doCustomAction(strategy, Actions.customUnit(source, desugaredAST, customInitial.orElse(B.EMPTY_TUPLE)), context, runtime);
unitResult = unitResult.withCustomResult(customUnit);
final IStrategoTerm analyzedAST = strategoTerms.toStratego(desugaredAST);
astsByFile.put(source, analyzedAST);
ambiguitiesByFile.putAll(source, analysisCommon.ambiguityMessages(parseUnit.source(), parseUnit.ast()));
unit.setUnitResult(unitResult);
} finally {
collectionTimer.stop();
}
if (debugConfig.collection()) {
logger.info("Collected {} constraints of {}.", unitResult.getConstraints().size(), source);
}
}
{
final ISolution unitSolution;
if (debugConfig.resolution()) {
logger.info("Reducing {} constraints of {}.", unitResult.getConstraints().size(), source);
}
try {
solverTimer.start();
final Function1<String, String> fresh = base -> context.unit(source).fresh().fresh(base);
final IProgress subprogress = progress.subProgress(1);
GraphSolution preSolution = solver.solveGraph(ImmutableBaseSolution.of(initialResult.getConfig(), unitResult.getConstraints(), initialSolution.unifier()), fresh, cancel, subprogress);
preSolution = solver.reportUnsolvedGraphConstraints(preSolution);
unitSolution = solver.solveIntra(preSolution, intfVars, intfScopes, fresh, cancel, subprogress);
if (debugConfig.resolution()) {
logger.info("Reduced file constraints to {}.", unitSolution.constraints().size());
}
} catch (SolverException e) {
throw new AnalysisException(context, e);
} finally {
solverTimer.stop();
}
unit.setPartialSolution(unitSolution);
if (debugConfig.files() || debugConfig.resolution()) {
logger.info("Analyzed {}: {} errors, {} warnings, {} notes, {} unsolved constraints.", source, unitSolution.messages().getErrors().size(), unitSolution.messages().getWarnings().size(), unitSolution.messages().getNotes().size(), unitSolution.constraints().size());
}
}
} catch (MetaborgException e) {
logger.warn("Analysis of " + source + " failed.", e);
failures.put(source, MessageFactory.newAnalysisErrorAtTop(parseUnit.source(), "File analysis failed.", e));
}
}
// solve
final ISolution solution;
final List<Optional<ITerm>> customUnits = Lists.newArrayList();
{
final List<ISolution> partialSolutions = Lists.newArrayList();
for (IMultiFileScopeGraphUnit unit : context.units()) {
unit.partialSolution().ifPresent(partialSolutions::add);
unit.unitResult().map(UnitResult::getCustomResult).ifPresent(customUnits::add);
}
if (debugConfig.resolution()) {
logger.info("Solving {} partial solutions.", partialSolutions.size());
}
ISolution sol;
try {
solverTimer.start();
Function1<String, String> fresh = base -> context.unit(globalSource).fresh().fresh(base);
IMessageInfo message = ImmutableMessageInfo.of(MessageKind.ERROR, MessageContent.of(), Actions.sourceTerm(globalSource));
sol = solver.solveInter(initialSolution, partialSolutions, message, fresh, cancel, progress.subProgress(w));
sol = solver.reportUnsolvedConstraints(sol);
} catch (SolverException e) {
throw new AnalysisException(context, e);
} finally {
solverTimer.stop();
}
if (!sol.flowSpecSolution().controlFlowGraph().isEmpty()) {
logger.debug("CFG is not empty: calling FlowSpec dataflow solver");
sol = new FixedPoint().entryPoint(sol, getFlowSpecTransferFunctions(context.language()));
}
solution = sol;
context.setSolution(solution);
if (debugConfig.resolution()) {
logger.info("Project constraints solved.");
}
}
// final
FinalResult finalResult;
final Optional<ITerm> customFinal;
final Optional<CustomSolution> customSolution;
{
if (debugConfig.analysis()) {
logger.info("Finalizing project analysis.");
}
finalizeTimer.start();
try {
ITerm finalResultTerm = doAction(strategy, Actions.analyzeFinal(globalSource), context, runtime).orElseThrow(() -> new AnalysisException(context, "No final result."));
finalResult = FinalResult.matcher().match(finalResultTerm, solution.unifier()).orElseThrow(() -> new AnalysisException(context, "Invalid final results."));
customFinal = doCustomAction(strategy, Actions.customFinal(globalSource, customInitial.orElse(B.EMPTY_TUPLE), Optionals.filter(customUnits)), context, runtime);
finalResult = finalResult.withCustomResult(customFinal);
context.setFinalResult(finalResult);
customSolution = customFinal.flatMap(cs -> CustomSolution.matcher().match(cs, solution.unifier()));
customSolution.ifPresent(cs -> context.setCustomSolution(cs));
} finally {
finalizeTimer.stop();
}
if (debugConfig.analysis()) {
logger.info("Project analysis finalized.");
}
}
// errors
{
if (debugConfig.analysis()) {
logger.info("Processing project messages.");
}
Messages.Transient messageBuilder = Messages.Transient.of();
messageBuilder.addAll(Messages.unsolvedErrors(solution.constraints()));
messageBuilder.addAll(solution.messages().getAll());
customSolution.map(CustomSolution::getMessages).map(IMessages::getAll).ifPresent(messageBuilder::addAll);
IMessages messages = messageBuilder.freeze();
IRelation3.Transient<String, MessageSeverity, IMessage> messagesByFile = HashTrieRelation3.Transient.of();
messagesByFile(failures.values(), messagesByFile, context);
messagesByFile(messages(messages.getAll(), solution.unifier(), context, context.location()), messagesByFile, context);
// precondition: the messagesByFile should not contain any files that do not have corresponding units
for (IMultiFileScopeGraphUnit unit : context.units()) {
final String source = unit.resource();
final java.util.Set<IMessage> fileMessages = messagesByFile.get(source).stream().map(Map.Entry::getValue).collect(Collectors2.toHashSet());
if (changed.containsKey(source)) {
fileMessages.addAll(ambiguitiesByFile.get(source));
final boolean valid = !failures.containsKey(source);
final boolean success = valid && messagesByFile.get(source, MessageSeverity.ERROR).isEmpty();
final IStrategoTerm analyzedAST = astsByFile.get(source);
results.add(unitService.analyzeUnit(changed.get(source), new AnalyzeContrib(valid, success, analyzedAST != null, analyzedAST, fileMessages, -1), context));
} else {
try {
final FileObject file = context.location().resolveFile(source);
updateResults.add(unitService.analyzeUnitUpdate(file, new AnalyzeUpdateData(fileMessages), context));
} catch (IOException ex) {
logger.error("Could not resolve {} to update messages", source);
}
}
messagesByFile.remove(source);
}
if (!messagesByFile.keySet().isEmpty()) {
logger.error("Found messages for unanalyzed files {}", messagesByFile.keySet());
}
if (debugConfig.analysis() || debugConfig.files() || debugConfig.resolution()) {
logger.info("Analyzed {} files: {} errors, {} warnings, {} notes.", n, messages.getErrors().size(), messages.getWarnings().size(), messages.getNotes().size());
}
}
} catch (InterruptedException e) {
logger.debug("Analysis was interrupted.");
} finally {
totalTimer.stop();
}
final ConstraintDebugData debugData = new ConstraintDebugData(totalTimer.stop(), collectionTimer.total(), solverTimer.total(), finalizeTimer.total());
if (debugConfig.analysis()) {
logger.info("{}", debugData);
}
return new SpoofaxAnalyzeResults(results, updateResults, context, debugData);
}
Aggregations