use of java.io.StreamTokenizer in project jop by jop-devel.
the class Jopa method pass1.
/**
* Parse the assembler file and build symbol table (first pass).
* During this pass, the assembler code, the symboltable and vartable are build.
* @return the map from program locations (pc) to microcode lines
*/
public void pass1() {
StreamTokenizer in = getSt();
int pc = 0;
try {
while (in.nextToken() != StreamTokenizer.TT_EOF) {
in.pushBack();
Line l = getLine(in);
if (l.jinstr == -1) {
if (l.label != null) {
if (symMap.containsKey(l.label)) {
error(in, "symbol " + l.label + " already defined");
} else {
symMap.put(l.label, new Integer(pc));
}
}
if (l.special == '=') {
if (l.symVal == null) {
error(in, "missing symbol for '='");
} else {
if (symMap.containsKey(l.symVal)) {
error(in, "symbol " + l.symVal + " allready defined");
} else {
symMap.put(l.symVal, new Integer(l.intVal));
}
}
} else if (l.special == '?') {
if (symMap.containsKey(l.symVal)) {
error(in, "symbol " + l.symVal + " allready defined");
} else {
symMap.put(l.symVal, new Integer(memcnt++));
varList.add(l.symVal);
}
}
} else {
jinstrMap.put(l.jinstr, pc);
}
if (l.instr != null) {
++pc;
instructions.add(l);
}
}
} catch (IOException e) {
System.out.println(e.getMessage());
System.exit(-1);
}
//System.out.println(symMap);
}
use of java.io.StreamTokenizer in project jop by jop-devel.
the class Jopa method getSt.
private StreamTokenizer getSt() {
try {
FileReader fileIn = new FileReader(srcDir + fname);
StreamTokenizer in = new StreamTokenizer(fileIn);
in.wordChars('_', '_');
in.wordChars(':', ':');
in.eolIsSignificant(true);
in.slashStarComments(true);
in.slashSlashComments(true);
in.lowerCaseMode(true);
return in;
} catch (IOException e) {
System.out.println(e.getMessage());
System.exit(-1);
return null;
}
}
use of java.io.StreamTokenizer in project openhab1-addons by openhab.
the class ExecuteCommandJob method parseCommand.
/**
* Parses a <code>command</code>. Utilizes the {@link StreamTokenizer} which
* takes care of quoted Strings as well.
*
* @param command the command to parse
* @return the tokenized command which can be processed by the
* <code>ConsoleInterpreter</code>
*
* @see org.openhab.io.console.ConsoleInterpreter
*/
protected String[] parseCommand(String command) {
logger.trace("going to parse command '{}'", command);
// further handling here ...
if (command.startsWith(">")) {
return new String[] { ">", command.substring(1).trim() };
}
StreamTokenizer tokenizer = new StreamTokenizer(new StringReader(command));
// treat all characters as ordinary, including digits, so we never
// have to deal with doubles
tokenizer.resetSyntax();
tokenizer.wordChars(0x23, 0xFF);
tokenizer.whitespaceChars(0x00, 0x20);
tokenizer.quoteChar('"');
List<String> tokens = new ArrayList<String>();
try {
int tokenType = 0;
while (tokenType != StreamTokenizer.TT_EOF && tokenType != StreamTokenizer.TT_EOL) {
tokenType = tokenizer.nextToken();
String token = "";
switch(tokenType) {
case StreamTokenizer.TT_WORD:
case 34:
/* quoted String */
token = tokenizer.sval;
break;
}
tokens.add(token);
logger.trace("read value {} from the given command", token);
}
} catch (IOException ioe) {
}
return tokens.toArray(new String[0]);
}
use of java.io.StreamTokenizer in project robovm by robovm.
the class OldStreamTokenizerTest method test_harmonyRegressionTest.
public void test_harmonyRegressionTest() {
byte[] data = new byte[] { (byte) '-' };
StreamTokenizer tokenizer = new StreamTokenizer(new ByteArrayInputStream(data));
try {
tokenizer.nextToken();
} catch (Exception e) {
Assert.fail(e.getMessage());
}
String result = tokenizer.toString();
Assert.assertEquals("Token['-'], line 1", result);
}
use of java.io.StreamTokenizer in project robovm by robovm.
the class OldStreamTokenizerTest method test_nextToken.
public void test_nextToken() throws IOException {
st = new StreamTokenizer(new Support_StringReader("\n \r\n#"));
// make \n ordinary
st.ordinaryChar('\n');
st.eolIsSignificant(true);
assertTrue("Wrong token 2,1", st.nextToken() == '\n');
assertTrue("Wrong token 2,2", st.nextToken() == '\n');
assertEquals("Wrong token 2,3", '#', st.nextToken());
Support_ASimpleInputStream sis = new Support_ASimpleInputStream();
sis.throwExceptionOnNextUse = true;
st = new StreamTokenizer(sis);
try {
st.nextToken();
fail("IOException expected.");
} catch (IOException e) {
// Expected.
}
}
Aggregations