use of org.eclipse.titan.designer.AST.Module in project titan.EclipsePlug-ins by eclipse.
the class FMInEnvy method measure.
@Override
public Number measure(final MetricData data, final Def_Function function) {
final Counter count = new Counter(0);
final Module myModule = function.getMyScope().getModuleScope();
function.accept(new InternalFeatureEnvyDetector(myModule, count));
return count.val();
}
use of org.eclipse.titan.designer.AST.Module in project titan.EclipsePlug-ins by eclipse.
the class ProjectSourceSemanticAnalyzer method analyzeMultipleProjectsSemantically.
/**
* Internal function.
* <p>
* Does the semantic checking of the modules located in multiple projects.
* It is important to call this function after the
* {@link #internalDoAnalyzeSyntactically(IProgressMonitor, CompilationTimeStamp)}
* function was executed on all involved projects, as the out-dated markers will be cleared here.
*
* @param tobeSemanticallyAnalyzed the list of projects to be analyzed.
* @param monitor
* the progress monitor to provide feedback to the user
* about the progress.
* @param compilationCounter
* the timestamp of the actual build cycle.
*
* @return the status of the operation when it finished.
*/
static IStatus analyzeMultipleProjectsSemantically(final List<IProject> tobeSemanticallyAnalyzed, final IProgressMonitor monitor, final CompilationTimeStamp compilationCounter) {
for (int i = 0; i < tobeSemanticallyAnalyzed.size(); i++) {
if (!tobeSemanticallyAnalyzed.get(i).isAccessible() || !TITANNature.hasTITANNature(tobeSemanticallyAnalyzed.get(i))) {
return Status.CANCEL_STATUS;
}
}
final long semanticCheckStart = System.nanoTime();
for (int i = 0; i < tobeSemanticallyAnalyzed.size(); i++) {
ProjectSourceSemanticAnalyzer semanticAnalyzer = GlobalParser.getProjectSourceParser(tobeSemanticallyAnalyzed.get(i)).getSemanticAnalyzer();
synchronized (semanticAnalyzer.outdatedModuleMap) {
semanticAnalyzer.outdatedModuleMap.clear();
}
semanticAnalyzer.moduleMap.clear();
}
// Semantic checking starts here
SubMonitor progress = SubMonitor.convert(monitor, 1);
progress.setTaskName("On-the-fly semantic checking of everything ");
progress.subTask("Checking the importations of the modules");
try {
// clean the instantiated parameterized assignments,
// from their instances
Ass_pard.resetAllInstanceCounters();
// check for duplicated module names
HashMap<String, Module> uniqueModules = new HashMap<String, Module>();
Set<String> duplicatedModules = new HashSet<String>();
// collect all modules and semantically checked modules to work on.
final List<Module> allModules = new ArrayList<Module>();
final List<String> semanticallyChecked = new ArrayList<String>();
// remove module name duplication markers. It shall be done before starting the next for-loop!
for (int i = 0; i < tobeSemanticallyAnalyzed.size(); i++) {
final ProjectSourceSemanticAnalyzer semanticAnalyzer = GlobalParser.getProjectSourceParser(tobeSemanticallyAnalyzed.get(i)).getSemanticAnalyzer();
for (Module module : semanticAnalyzer.fileModuleMap.values()) {
if (module instanceof TTCN3Module) {
MarkerHandler.markAllSemanticMarkersForRemoval(module.getIdentifier());
}
}
}
for (int i = 0; i < tobeSemanticallyAnalyzed.size(); i++) {
final ProjectSourceSemanticAnalyzer semanticAnalyzer = GlobalParser.getProjectSourceParser(tobeSemanticallyAnalyzed.get(i)).getSemanticAnalyzer();
for (Module module : semanticAnalyzer.fileModuleMap.values()) {
final String name = module.getIdentifier().getName();
allModules.add(module);
// ASN1 modules are not been analyzed incrementally, therefore their markers can be removed in one step:
if (module instanceof ASN1Module) {
MarkerHandler.markAllSemanticMarkersForRemoval(module.getLocation().getFile());
}
if (uniqueModules.containsKey(name)) {
final Location location = uniqueModules.get(name).getIdentifier().getLocation();
final Location location2 = module.getIdentifier().getLocation();
location.reportSemanticError(MessageFormat.format(DUPLICATEMODULE, module.getIdentifier().getDisplayName()));
location2.reportSemanticError(MessageFormat.format(DUPLICATEMODULE, module.getIdentifier().getDisplayName()));
duplicatedModules.add(name);
semanticAnalyzer.semanticallyUptodateModules.remove(name);
} else {
uniqueModules.put(name, module);
semanticAnalyzer.moduleMap.put(name, module);
if (semanticAnalyzer.semanticallyUptodateModules.contains(name)) {
semanticallyChecked.add(name);
}
}
}
}
int nofModulesTobeChecked = 0;
if (allModules.size() > semanticallyChecked.size()) {
// check and build the import hierarchy of the modules
ModuleImportationChain referenceChain = new ModuleImportationChain(CIRCULARIMPORTCHAIN, false);
// remove markers from import lines
for (Module module : allModules) {
if (module instanceof TTCN3Module) {
List<ImportModule> imports = ((TTCN3Module) module).getImports();
for (ImportModule imp : imports) {
MarkerHandler.markAllSemanticMarkersForRemoval(imp.getLocation());
}
}
// markers are removed in one step in ASN1 modules
}
for (Module module : allModules) {
module.checkImports(compilationCounter, referenceChain, new ArrayList<Module>());
referenceChain.clear();
}
progress.subTask("Calculating the list of modules to be checked");
BrokenPartsViaReferences selectionMethod = new BrokenPartsViaReferences(compilationCounter);
SelectionMethodBase selectionMethodBase = (SelectionMethodBase) selectionMethod;
selectionMethodBase.setModules(allModules, semanticallyChecked);
selectionMethod.execute();
if (OutOfMemoryCheck.isOutOfMemory()) {
OutOfMemoryCheck.outOfMemoryEvent();
return Status.CANCEL_STATUS;
}
BrokenPartsChecker brokenPartsChecker = new BrokenPartsChecker(progress.newChild(1), compilationCounter, selectionMethodBase);
brokenPartsChecker.doChecking();
// re-enable the markers on the skipped modules.
for (Module module2 : selectionMethodBase.getModulesToSkip()) {
MarkerHandler.reEnableAllMarkers((IFile) module2.getLocation().getFile());
}
nofModulesTobeChecked = selectionMethodBase.getModulesToCheck().size();
} else {
// re-enable all markers
for (Module module2 : allModules) {
MarkerHandler.reEnableAllMarkers((IFile) module2.getLocation().getFile());
}
}
// Not supported markers are handled here, at the and of checking. Otherwise they would be deleted
final IPreferencesService preferenceService = Platform.getPreferencesService();
final String option = preferenceService.getString(ProductConstants.PRODUCT_ID_DESIGNER, PreferenceConstants.REPORTUNSUPPORTEDCONSTRUCTS, GeneralConstants.WARNING, null);
for (int i = 0; i < tobeSemanticallyAnalyzed.size(); i++) {
// report the unsupported constructs in the project
ProjectSourceSyntacticAnalyzer syntacticAnalyzer = GlobalParser.getProjectSourceParser(tobeSemanticallyAnalyzed.get(i)).getSyntacticAnalyzer();
for (IFile file : syntacticAnalyzer.unsupportedConstructMap.keySet()) {
List<TITANMarker> markers = syntacticAnalyzer.unsupportedConstructMap.get(file);
if (markers != null && file.isAccessible()) {
for (TITANMarker marker : markers) {
Location location = new Location(file, marker.getLine(), marker.getOffset(), marker.getEndOffset());
location.reportConfigurableSemanticProblem(option, marker.getMessage());
}
}
}
}
if (preferenceService.getBoolean(ProductConstants.PRODUCT_ID_DESIGNER, PreferenceConstants.DISPLAYDEBUGINFORMATION, true, null)) {
MessageConsoleStream stream = TITANDebugConsole.getConsole().newMessageStream();
TITANDebugConsole.println(" ** Had to start checking at " + nofModulesTobeChecked + " modules. ", stream);
TITANDebugConsole.println(" **On-the-fly semantic checking of projects (" + allModules.size() + " modules) took " + (System.nanoTime() - semanticCheckStart) * (1e-9) + " seconds", stream);
}
progress.subTask("Cleanup operations");
for (int i = 0; i < tobeSemanticallyAnalyzed.size(); i++) {
ProjectSourceSemanticAnalyzer semanticAnalyzer = GlobalParser.getProjectSourceParser(tobeSemanticallyAnalyzed.get(i)).getSemanticAnalyzer();
synchronized (semanticAnalyzer.semanticallyUptodateModules) {
semanticAnalyzer.semanticallyUptodateModules.clear();
semanticAnalyzer.semanticallyUptodateModules.addAll(semanticAnalyzer.moduleMap.keySet());
for (String name : duplicatedModules) {
semanticAnalyzer.semanticallyUptodateModules.remove(name);
}
}
}
} catch (Exception e) {
// This catch is extremely important, as it is supposed
// to protect the project parser, from whatever might go
// wrong inside the analysis.
ErrorReporter.logExceptionStackTrace(e);
}
progress.done();
for (int i = 0; i < tobeSemanticallyAnalyzed.size(); i++) {
GlobalParser.getProjectSourceParser(tobeSemanticallyAnalyzed.get(i)).setLastTimeChecked(compilationCounter);
ProjectStructureDataCollector collector = GlobalProjectStructureTracker.getDataCollector(tobeSemanticallyAnalyzed.get(i));
for (Module module : GlobalParser.getProjectSourceParser(tobeSemanticallyAnalyzed.get(i)).getSemanticAnalyzer().moduleMap.values()) {
collector.addKnownModule(module.getIdentifier());
module.extractStructuralInformation(collector);
}
MarkerHandler.removeAllOnTheFlyMarkedMarkers(tobeSemanticallyAnalyzed.get(i));
}
return Status.OK_STATUS;
}
use of org.eclipse.titan.designer.AST.Module in project titan.EclipsePlug-ins by eclipse.
the class ProjectSourceSyntacticAnalyzer method postFileBasedGeneralAnalysis.
/**
* Uses the parsed data structure to decide if the module found can be
* inserted into the list of known modules. And inserts it if possible.
*
* @param parsedData
* the parsed data to insert into the semantic database.
*/
private void postFileBasedGeneralAnalysis(final TemporalParseData parsedData) {
final Module module = parsedData.getModule();
if (module != null && module.getIdentifier() != null && module.getLocation() != null) {
sourceParser.getSemanticAnalyzer().addModule(module);
final IFile file = parsedData.getFile();
fileMap.put(file, module.getName());
uptodateFiles.put(file, module.getName());
final List<TITANMarker> unsupportedConstructs = parsedData.getUnsupportedConstructs();
if (unsupportedConstructs != null && !unsupportedConstructs.isEmpty()) {
unsupportedConstructMap.put(file, unsupportedConstructs);
}
if (module.getLocation().getEndOffset() == -1 && parsedData.hadParseErrors()) {
if (parsedData.getDocument() == null) {
module.getLocation().setEndOffset((int) new File(file.getLocationURI()).length());
} else {
module.getLocation().setEndOffset(parsedData.getDocument().getLength());
}
}
} else {
syntacticallyOutdated = true;
highlySyntaxErroneousFiles.add(parsedData.getFile());
}
}
use of org.eclipse.titan.designer.AST.Module in project titan.EclipsePlug-ins by eclipse.
the class ProjectSourceSyntacticAnalyzer method updateSyntax.
/**
* The entry point of incremental parsing.
* <p>
* Handles the data storages, calls the module level incremental parser
* on the file, and if everything fails does a full parsing to correct
* possibly invalid states.
*
* @param file
* the edited file
* @param reparser
* the parser doing the incremental parsing.
*/
public void updateSyntax(final IFile file, final TTCN3ReparseUpdater reparser) {
if (uptodateFiles.containsKey(file)) {
Module module = sourceParser.getSemanticAnalyzer().getModulebyFile(file);
sourceParser.getSemanticAnalyzer().reportSemanticOutdating(file);
if (module != null && module_type.TTCN3_MODULE.equals(module.getModuletype())) {
try {
reparser.setUnsupportedConstructs(unsupportedConstructMap);
try {
((TTCN3Module) module).updateSyntax(reparser, sourceParser);
reparser.updateLocation(((TTCN3Module) module).getLocation());
} catch (ReParseException e) {
syntacticallyOutdated = true;
uptodateFiles.remove(file);
sourceParser.getSemanticAnalyzer().reportSemanticOutdating(file);
String oldModuleName = fileMap.get(file);
if (oldModuleName != null) {
sourceParser.getSemanticAnalyzer().removeModule(oldModuleName);
fileMap.remove(file);
}
unsupportedConstructMap.remove(file);
reparser.maxDamage();
ITtcn3FileReparser r = new Ttcn3FileReparser(reparser, file, sourceParser, fileMap, uptodateFiles, highlySyntaxErroneousFiles);
syntacticallyOutdated = r.parse();
}
MarkerHandler.removeAllOnTheFlySyntacticMarkedMarkers(file);
// update the position of the markers located after the damaged region
MarkerHandler.updateMarkers(file, reparser.getFirstLine(), reparser.getLineShift(), reparser.getDamageEnd(), reparser.getShift());
} catch (Exception e) {
// This catch is extremely important, as
// it is supposed to protect the project
// parser, from whatever might go wrong
// inside the analysis.
ErrorReporter.logExceptionStackTrace(e);
}
} else {
reportOutdating(file);
}
} else if (highlySyntaxErroneousFiles.contains(file)) {
reportOutdating(file);
} else {
MarkerHandler.markAllMarkersForRemoval(file);
TemporalParseData temp = fileBasedTTCN3Analysis(file);
postFileBasedGeneralAnalysis(temp);
}
reparser.reportSyntaxErrors();
}
use of org.eclipse.titan.designer.AST.Module in project titan.EclipsePlug-ins by eclipse.
the class ReferenceSearch method runAction.
/**
* Helper function used by FindReferences classes for TTCN-3, ASN.1 and
* TTCNPP editors
*/
public static void runAction(final IEditorPart targetEditor, final ISelection selection) {
targetEditor.getEditorSite().getActionBars().getStatusLineManager().setErrorMessage(null);
IFile file = (IFile) targetEditor.getEditorInput().getAdapter(IFile.class);
if (file == null) {
targetEditor.getEditorSite().getActionBars().getStatusLineManager().setErrorMessage(FILENOTIDENTIFIABLE);
return;
}
if (!TITANNature.hasTITANNature(file.getProject())) {
targetEditor.getEditorSite().getActionBars().getStatusLineManager().setErrorMessage(TITANNature.NO_TITAN_FILE_NATURE_FOUND);
return;
}
IPreferencesService prefs = Platform.getPreferencesService();
boolean reportDebugInformation = prefs.getBoolean(ProductConstants.PRODUCT_ID_DESIGNER, PreferenceConstants.DISPLAYDEBUGINFORMATION, true, null);
int offset;
if (selection instanceof TextSelection && !selection.isEmpty() && !"".equals(((TextSelection) selection).getText())) {
if (reportDebugInformation) {
TITANDebugConsole.println("text selected: " + ((TextSelection) selection).getText());
}
TextSelection tSelection = (TextSelection) selection;
offset = tSelection.getOffset() + tSelection.getLength();
} else {
offset = ((IEditorWithCarretOffset) targetEditor).getCarretOffset();
}
// find the module
ProjectSourceParser projectSourceParser = GlobalParser.getProjectSourceParser(file.getProject());
if (ResourceExclusionHelper.isExcluded(file)) {
targetEditor.getEditorSite().getActionBars().getStatusLineManager().setErrorMessage(MessageFormat.format(EXCLUDEDFROMBUILD, file.getFullPath()));
return;
}
final Module module = projectSourceParser.containedModule(file);
if (module == null) {
targetEditor.getEditorSite().getActionBars().getStatusLineManager().setErrorMessage(MessageFormat.format(NOTFOUNDMODULE, file.getName()));
return;
}
final ReferenceFinder rf = new ReferenceFinder();
boolean isDetected = rf.detectAssignmentDataByOffset(module, offset, targetEditor, true, reportDebugInformation);
if (!isDetected) {
return;
}
final ReferenceSearchQuery query = new ReferenceSearchQuery(rf, module, file.getProject());
for (ISearchQuery runningQuery : NewSearchUI.getQueries()) {
NewSearchUI.cancelQuery(runningQuery);
}
NewSearchUI.runQueryInBackground(query);
}
Aggregations