use of org.antlr.v4.Tool in project antlr4 by antlr.
the class Tool method handleArgs.
protected void handleArgs() {
int i = 0;
while (args != null && i < args.length) {
String arg = args[i];
i++;
if (arg.startsWith("-D")) {
// -Dlanguage=Java syntax
handleOptionSetArg(arg);
continue;
}
if (arg.charAt(0) != '-') {
// file name
if (!grammarFiles.contains(arg))
grammarFiles.add(arg);
continue;
}
boolean found = false;
for (Option o : optionDefs) {
if (arg.equals(o.name)) {
found = true;
String argValue = null;
if (o.argType == OptionArgType.STRING) {
argValue = args[i];
i++;
}
// use reflection to set field
Class<? extends Tool> c = this.getClass();
try {
Field f = c.getField(o.fieldName);
if (argValue == null) {
if (arg.startsWith("-no-"))
f.setBoolean(this, false);
else
f.setBoolean(this, true);
} else
f.set(this, argValue);
} catch (Exception e) {
errMgr.toolError(ErrorType.INTERNAL_ERROR, "can't access field " + o.fieldName);
}
}
}
if (!found) {
errMgr.toolError(ErrorType.INVALID_CMDLINE_ARG, arg);
}
}
if (outputDirectory != null) {
if (outputDirectory.endsWith("/") || outputDirectory.endsWith("\\")) {
outputDirectory = outputDirectory.substring(0, outputDirectory.length() - 1);
}
File outDir = new File(outputDirectory);
haveOutputDir = true;
if (outDir.exists() && !outDir.isDirectory()) {
errMgr.toolError(ErrorType.OUTPUT_DIR_IS_FILE, outputDirectory);
libDirectory = ".";
}
} else {
outputDirectory = ".";
}
if (libDirectory != null) {
if (libDirectory.endsWith("/") || libDirectory.endsWith("\\")) {
libDirectory = libDirectory.substring(0, libDirectory.length() - 1);
}
File outDir = new File(libDirectory);
if (!outDir.exists()) {
errMgr.toolError(ErrorType.DIR_NOT_FOUND, libDirectory);
libDirectory = ".";
}
} else {
libDirectory = ".";
}
if (launch_ST_inspector) {
STGroup.trackCreationEvents = true;
return_dont_exit = true;
}
}
use of org.antlr.v4.Tool in project antlr4 by antlr.
the class TokenVocabParser method load.
/** Load a vocab file {@code <vocabName>.tokens} and return mapping. */
public Map<String, Integer> load() {
Map<String, Integer> tokens = new LinkedHashMap<String, Integer>();
int maxTokenType = -1;
File fullFile = getImportedVocabFile();
FileInputStream fis = null;
BufferedReader br = null;
Tool tool = g.tool;
String vocabName = g.getOptionString("tokenVocab");
try {
Pattern tokenDefPattern = Pattern.compile("([^\n]+?)[ \\t]*?=[ \\t]*?([0-9]+)");
fis = new FileInputStream(fullFile);
InputStreamReader isr;
if (tool.grammarEncoding != null) {
isr = new InputStreamReader(fis, tool.grammarEncoding);
} else {
isr = new InputStreamReader(fis);
}
br = new BufferedReader(isr);
String tokenDef = br.readLine();
int lineNum = 1;
while (tokenDef != null) {
Matcher matcher = tokenDefPattern.matcher(tokenDef);
if (matcher.find()) {
String tokenID = matcher.group(1);
String tokenTypeS = matcher.group(2);
int tokenType;
try {
tokenType = Integer.valueOf(tokenTypeS);
} catch (NumberFormatException nfe) {
tool.errMgr.toolError(ErrorType.TOKENS_FILE_SYNTAX_ERROR, vocabName + CodeGenerator.VOCAB_FILE_EXTENSION, " bad token type: " + tokenTypeS, lineNum);
tokenType = Token.INVALID_TOKEN_TYPE;
}
tool.log("grammar", "import " + tokenID + "=" + tokenType);
tokens.put(tokenID, tokenType);
maxTokenType = Math.max(maxTokenType, tokenType);
lineNum++;
} else {
if (tokenDef.length() > 0) {
// ignore blank lines
tool.errMgr.toolError(ErrorType.TOKENS_FILE_SYNTAX_ERROR, vocabName + CodeGenerator.VOCAB_FILE_EXTENSION, " bad token def: " + tokenDef, lineNum);
}
}
tokenDef = br.readLine();
}
} catch (FileNotFoundException fnfe) {
GrammarAST inTree = g.ast.getOptionAST("tokenVocab");
String inTreeValue = inTree.getToken().getText();
if (vocabName.equals(inTreeValue)) {
tool.errMgr.grammarError(ErrorType.CANNOT_FIND_TOKENS_FILE_REFD_IN_GRAMMAR, g.fileName, inTree.getToken(), fullFile);
} else {
// must be from -D option on cmd-line not token in tree
tool.errMgr.toolError(ErrorType.CANNOT_FIND_TOKENS_FILE_GIVEN_ON_CMDLINE, fullFile, g.name);
}
} catch (Exception e) {
tool.errMgr.toolError(ErrorType.ERROR_READING_TOKENS_FILE, e, fullFile, e.getMessage());
} finally {
try {
if (br != null)
br.close();
} catch (IOException ioe) {
tool.errMgr.toolError(ErrorType.ERROR_READING_TOKENS_FILE, ioe, fullFile, ioe.getMessage());
}
}
return tokens;
}
Aggregations