use of org.eclipse.xtext.parser.antlr.Lexer in project xtext-xtend by eclipse.
the class AbstractSmokeTest method testSkipTokensInBetweenWithoutResourceSet.
@Test
public void testSkipTokensInBetweenWithoutResourceSet() throws Exception {
for (String string : smokeTestModels) {
List<CommonToken> tokenList = Lists.newArrayList();
{
Lexer lexer = lexerProvider.get();
lexer.setCharStream(new ANTLRStringStream(string));
Token token = lexer.nextToken();
while (token != Token.EOF_TOKEN) {
tokenList.add((CommonToken) token);
token = lexer.nextToken();
}
}
for (CommonToken token : tokenList) {
int start = token.getStartIndex();
int length = token.getText().length();
logProgress(token);
doParseAndCheckForSmokeWithoutResourceSet(string.substring(0, start) + string.substring(start + length));
}
}
}
use of org.eclipse.xtext.parser.antlr.Lexer in project xtext-xtend by eclipse.
the class SmokeTest method testResourceUpdateSkipTokensInBetween.
@Test
public void testResourceUpdateSkipTokensInBetween() throws Exception {
for (String string : smokeTestModels) {
List<CommonToken> tokenList = Lists.newArrayList();
{
Lexer lexer = lexerProvider.get();
lexer.setCharStream(new ANTLRStringStream(string));
Token token = lexer.nextToken();
while (token != Token.EOF_TOKEN) {
tokenList.add((CommonToken) token);
token = lexer.nextToken();
}
}
LazyLinkingResource resource = createResource(string);
CommonToken prev = null;
for (CommonToken token : tokenList) {
logProgress(token);
if (prev == null) {
compareWithNewResource(resource, 0, token.getText().length(), "");
} else {
int offset = prev.getStartIndex();
int length = token.getText().length();
if (offset + length < string.length())
compareWithNewResource(resource, offset, length, prev.getText());
}
prev = token;
}
}
}
use of org.eclipse.xtext.parser.antlr.Lexer in project xtext-xtend by eclipse.
the class XtendRuntimeModule method configureRuntimeLexer.
@Override
public void configureRuntimeLexer(final Binder binder) {
binder.<Lexer>bind(Lexer.class).annotatedWith(Names.named(LexerBindings.RUNTIME)).to(DisabledAntlrLexer.class);
final Provider<DisabledAntlrLexer> _function = () -> {
return new DisabledAntlrLexer(null);
};
binder.<DisabledAntlrLexer>bind(DisabledAntlrLexer.class).toProvider(_function);
}
use of org.eclipse.xtext.parser.antlr.Lexer in project xtext-xtend by eclipse.
the class LexingTest method assertLexing.
protected void assertLexing(String input, Pair<String, String>... expectedTokens) {
Lexer lexer = new InternalXtendLexer(null);
CharStream stream = new ANTLRStringStream(input);
lexer.setCharStream(stream);
XtextTokenStream tokenStream = new XtextTokenStream(lexer, tokenDefProvider);
List<?> tokens = tokenStream.getTokens();
assertEquals(input + " / " + tokens, expectedTokens.length, tokens.size());
for (int i = 0; i < tokens.size(); i++) {
Token token = (Token) tokens.get(i);
assertEquals(token.toString(), expectedTokens[i].getFirst(), token.getText());
final String expected = expectedTokens[i].getSecond();
String actual = tokenDefProvider.getTokenDefMap().get(token.getType());
assertEquals("expected " + expected + " but was " + actual, expected, actual);
}
}
use of org.eclipse.xtext.parser.antlr.Lexer in project xtext-xtend by eclipse.
the class DocumentRewriterTest method createDocument.
protected XtextDocument createDocument(final String content) throws Exception {
XtextDocument _xblockexpression = null;
{
final IFile file = this._workbenchTestHelper.createFile("Foo", content);
final Resource resource = this._workbenchTestHelper.getResourceSet().getResource(URI.createPlatformResourceURI(file.getFullPath().toString(), true), true);
XtendDocumentTokenSource _xtendDocumentTokenSource = new XtendDocumentTokenSource();
final Procedure1<XtendDocumentTokenSource> _function = (XtendDocumentTokenSource it) -> {
AntlrTokenDefProvider _antlrTokenDefProvider = new AntlrTokenDefProvider();
final Procedure1<AntlrTokenDefProvider> _function_1 = (AntlrTokenDefProvider it_1) -> {
XtendAntlrTokenFileProvider _xtendAntlrTokenFileProvider = new XtendAntlrTokenFileProvider();
it_1.setAntlrTokenFileProvider(_xtendAntlrTokenFileProvider);
};
AntlrTokenDefProvider _doubleArrow = ObjectExtensions.<AntlrTokenDefProvider>operator_doubleArrow(_antlrTokenDefProvider, _function_1);
it.setTokenDefProvider(_doubleArrow);
final Provider<Lexer> _function_2 = () -> {
InternalXtendLexer _internalXtendLexer = new InternalXtendLexer();
return ((Lexer) _internalXtendLexer);
};
it.setLexer(_function_2);
it.setFlexerFactory(this.flexerFactory);
};
final XtendDocumentTokenSource source = ObjectExtensions.<XtendDocumentTokenSource>operator_doubleArrow(_xtendDocumentTokenSource, _function);
final XtextDocument document = new XtextDocument(source, null, this.outdatedStateManager, this.operationCanceledManager);
document.set(content);
document.setInput(((XtextResource) resource));
_xblockexpression = document;
}
return _xblockexpression;
}
Aggregations