use of org.eclipse.titan.designer.AST.brokenpartsanalyzers.SelectionMethodBase in project titan.EclipsePlug-ins by eclipse.
the class ProjectSourceSemanticAnalyzer method analyzeMultipleProjectsSemantically.
/**
* Internal function.
* <p>
* Does the semantic checking of the modules located in multiple projects.
* It is important to call this function after the
* {@link #internalDoAnalyzeSyntactically(IProgressMonitor, CompilationTimeStamp)}
* function was executed on all involved projects, as the out-dated markers will be cleared here.
*
* @param tobeSemanticallyAnalyzed the list of projects to be analyzed.
* @param monitor
* the progress monitor to provide feedback to the user
* about the progress.
* @param compilationCounter
* the timestamp of the actual build cycle.
*
* @return the status of the operation when it finished.
*/
static IStatus analyzeMultipleProjectsSemantically(final List<IProject> tobeSemanticallyAnalyzed, final IProgressMonitor monitor, final CompilationTimeStamp compilationCounter) {
for (int i = 0; i < tobeSemanticallyAnalyzed.size(); i++) {
if (!tobeSemanticallyAnalyzed.get(i).isAccessible() || !TITANNature.hasTITANNature(tobeSemanticallyAnalyzed.get(i))) {
return Status.CANCEL_STATUS;
}
}
final long semanticCheckStart = System.nanoTime();
for (int i = 0; i < tobeSemanticallyAnalyzed.size(); i++) {
ProjectSourceSemanticAnalyzer semanticAnalyzer = GlobalParser.getProjectSourceParser(tobeSemanticallyAnalyzed.get(i)).getSemanticAnalyzer();
synchronized (semanticAnalyzer.outdatedModuleMap) {
semanticAnalyzer.outdatedModuleMap.clear();
}
semanticAnalyzer.moduleMap.clear();
}
// Semantic checking starts here
SubMonitor progress = SubMonitor.convert(monitor, 1);
progress.setTaskName("On-the-fly semantic checking of everything ");
progress.subTask("Checking the importations of the modules");
try {
// clean the instantiated parameterized assignments,
// from their instances
Ass_pard.resetAllInstanceCounters();
// check for duplicated module names
HashMap<String, Module> uniqueModules = new HashMap<String, Module>();
Set<String> duplicatedModules = new HashSet<String>();
// collect all modules and semantically checked modules to work on.
final List<Module> allModules = new ArrayList<Module>();
final List<String> semanticallyChecked = new ArrayList<String>();
// remove module name duplication markers. It shall be done before starting the next for-loop!
for (int i = 0; i < tobeSemanticallyAnalyzed.size(); i++) {
final ProjectSourceSemanticAnalyzer semanticAnalyzer = GlobalParser.getProjectSourceParser(tobeSemanticallyAnalyzed.get(i)).getSemanticAnalyzer();
for (Module module : semanticAnalyzer.fileModuleMap.values()) {
if (module instanceof TTCN3Module) {
MarkerHandler.markAllSemanticMarkersForRemoval(module.getIdentifier());
}
}
}
for (int i = 0; i < tobeSemanticallyAnalyzed.size(); i++) {
final ProjectSourceSemanticAnalyzer semanticAnalyzer = GlobalParser.getProjectSourceParser(tobeSemanticallyAnalyzed.get(i)).getSemanticAnalyzer();
for (Module module : semanticAnalyzer.fileModuleMap.values()) {
final String name = module.getIdentifier().getName();
allModules.add(module);
// ASN1 modules are not been analyzed incrementally, therefore their markers can be removed in one step:
if (module instanceof ASN1Module) {
MarkerHandler.markAllSemanticMarkersForRemoval(module.getLocation().getFile());
}
if (uniqueModules.containsKey(name)) {
final Location location = uniqueModules.get(name).getIdentifier().getLocation();
final Location location2 = module.getIdentifier().getLocation();
location.reportSemanticError(MessageFormat.format(DUPLICATEMODULE, module.getIdentifier().getDisplayName()));
location2.reportSemanticError(MessageFormat.format(DUPLICATEMODULE, module.getIdentifier().getDisplayName()));
duplicatedModules.add(name);
semanticAnalyzer.semanticallyUptodateModules.remove(name);
} else {
uniqueModules.put(name, module);
semanticAnalyzer.moduleMap.put(name, module);
if (semanticAnalyzer.semanticallyUptodateModules.contains(name)) {
semanticallyChecked.add(name);
}
}
}
}
int nofModulesTobeChecked = 0;
if (allModules.size() > semanticallyChecked.size()) {
// check and build the import hierarchy of the modules
ModuleImportationChain referenceChain = new ModuleImportationChain(CIRCULARIMPORTCHAIN, false);
// remove markers from import lines
for (Module module : allModules) {
if (module instanceof TTCN3Module) {
List<ImportModule> imports = ((TTCN3Module) module).getImports();
for (ImportModule imp : imports) {
MarkerHandler.markAllSemanticMarkersForRemoval(imp.getLocation());
}
}
// markers are removed in one step in ASN1 modules
}
for (Module module : allModules) {
module.checkImports(compilationCounter, referenceChain, new ArrayList<Module>());
referenceChain.clear();
}
progress.subTask("Calculating the list of modules to be checked");
BrokenPartsViaReferences selectionMethod = new BrokenPartsViaReferences(compilationCounter);
SelectionMethodBase selectionMethodBase = (SelectionMethodBase) selectionMethod;
selectionMethodBase.setModules(allModules, semanticallyChecked);
selectionMethod.execute();
if (OutOfMemoryCheck.isOutOfMemory()) {
OutOfMemoryCheck.outOfMemoryEvent();
return Status.CANCEL_STATUS;
}
BrokenPartsChecker brokenPartsChecker = new BrokenPartsChecker(progress.newChild(1), compilationCounter, selectionMethodBase);
brokenPartsChecker.doChecking();
// re-enable the markers on the skipped modules.
for (Module module2 : selectionMethodBase.getModulesToSkip()) {
MarkerHandler.reEnableAllMarkers((IFile) module2.getLocation().getFile());
}
nofModulesTobeChecked = selectionMethodBase.getModulesToCheck().size();
} else {
// re-enable all markers
for (Module module2 : allModules) {
MarkerHandler.reEnableAllMarkers((IFile) module2.getLocation().getFile());
}
}
// Not supported markers are handled here, at the and of checking. Otherwise they would be deleted
final IPreferencesService preferenceService = Platform.getPreferencesService();
final String option = preferenceService.getString(ProductConstants.PRODUCT_ID_DESIGNER, PreferenceConstants.REPORTUNSUPPORTEDCONSTRUCTS, GeneralConstants.WARNING, null);
for (int i = 0; i < tobeSemanticallyAnalyzed.size(); i++) {
// report the unsupported constructs in the project
ProjectSourceSyntacticAnalyzer syntacticAnalyzer = GlobalParser.getProjectSourceParser(tobeSemanticallyAnalyzed.get(i)).getSyntacticAnalyzer();
for (IFile file : syntacticAnalyzer.unsupportedConstructMap.keySet()) {
List<TITANMarker> markers = syntacticAnalyzer.unsupportedConstructMap.get(file);
if (markers != null && file.isAccessible()) {
for (TITANMarker marker : markers) {
Location location = new Location(file, marker.getLine(), marker.getOffset(), marker.getEndOffset());
location.reportConfigurableSemanticProblem(option, marker.getMessage());
}
}
}
}
if (preferenceService.getBoolean(ProductConstants.PRODUCT_ID_DESIGNER, PreferenceConstants.DISPLAYDEBUGINFORMATION, true, null)) {
MessageConsoleStream stream = TITANDebugConsole.getConsole().newMessageStream();
TITANDebugConsole.println(" ** Had to start checking at " + nofModulesTobeChecked + " modules. ", stream);
TITANDebugConsole.println(" **On-the-fly semantic checking of projects (" + allModules.size() + " modules) took " + (System.nanoTime() - semanticCheckStart) * (1e-9) + " seconds", stream);
}
progress.subTask("Cleanup operations");
for (int i = 0; i < tobeSemanticallyAnalyzed.size(); i++) {
ProjectSourceSemanticAnalyzer semanticAnalyzer = GlobalParser.getProjectSourceParser(tobeSemanticallyAnalyzed.get(i)).getSemanticAnalyzer();
synchronized (semanticAnalyzer.semanticallyUptodateModules) {
semanticAnalyzer.semanticallyUptodateModules.clear();
semanticAnalyzer.semanticallyUptodateModules.addAll(semanticAnalyzer.moduleMap.keySet());
for (String name : duplicatedModules) {
semanticAnalyzer.semanticallyUptodateModules.remove(name);
}
}
}
} catch (Exception e) {
// This catch is extremely important, as it is supposed
// to protect the project parser, from whatever might go
// wrong inside the analysis.
ErrorReporter.logExceptionStackTrace(e);
}
progress.done();
for (int i = 0; i < tobeSemanticallyAnalyzed.size(); i++) {
GlobalParser.getProjectSourceParser(tobeSemanticallyAnalyzed.get(i)).setLastTimeChecked(compilationCounter);
ProjectStructureDataCollector collector = GlobalProjectStructureTracker.getDataCollector(tobeSemanticallyAnalyzed.get(i));
for (Module module : GlobalParser.getProjectSourceParser(tobeSemanticallyAnalyzed.get(i)).getSemanticAnalyzer().moduleMap.values()) {
collector.addKnownModule(module.getIdentifier());
module.extractStructuralInformation(collector);
}
MarkerHandler.removeAllOnTheFlyMarkedMarkers(tobeSemanticallyAnalyzed.get(i));
}
return Status.OK_STATUS;
}
Aggregations