use of com.lingtuan.firefly.custom.contact.HanziToPinyin3.Token in project bayou by capergroup.
the class EvidenceLLexerTests method testLexColon.
@Test
public void testLexColon() {
EvidenceLLexer lexer = makeLexer();
Iterator<Token> tokens = lexer.lex(":").iterator();
Assert.assertTrue(tokens.hasNext());
Token token = tokens.next();
Assert.assertEquals(":", token.getLexeme());
Assert.assertTrue(token.getType() instanceof TokenTypeColon);
}
use of com.lingtuan.firefly.custom.contact.HanziToPinyin3.Token in project bayou by capergroup.
the class EvidenceLLexerTests method testLexIdents.
@Test
public void testLexIdents() {
EvidenceLLexer lexer = makeLexer();
Iterator<Token> tokens = lexer.lex("ident1 ident2").iterator();
Assert.assertTrue(tokens.hasNext());
Token token = tokens.next();
Assert.assertEquals("ident1", token.getLexeme());
Assert.assertTrue(token.getType() instanceof TokenTypeIdentifier);
token = tokens.next();
Assert.assertEquals("ident2", token.getLexeme());
Assert.assertTrue(token.getType() instanceof TokenTypeIdentifier);
}
use of com.lingtuan.firefly.custom.contact.HanziToPinyin3.Token in project bayou by capergroup.
the class EvidenceLLexerTests method testLexIdent.
@Test
public void testLexIdent() {
EvidenceLLexer lexer = makeLexer();
Iterator<Token> tokens = lexer.lex("ident").iterator();
Assert.assertTrue(tokens.hasNext());
Token token = tokens.next();
Assert.assertEquals("ident", token.getLexeme());
Assert.assertTrue(token.getType() instanceof TokenTypeIdentifier);
}
use of com.lingtuan.firefly.custom.contact.HanziToPinyin3.Token in project bayou by capergroup.
the class EvidenceLLexerTests method testLexEmpty.
@Test
public void testLexEmpty() {
EvidenceLLexer lexer = makeLexer();
Iterator<Token> tokens = lexer.lex("").iterator();
Assert.assertFalse(tokens.hasNext());
}
use of com.lingtuan.firefly.custom.contact.HanziToPinyin3.Token in project java-docs-samples by GoogleCloudPlatform.
the class Analyze method analyzeSyntaxText.
/**
* from the string {@code text}.
*/
public static List<Token> analyzeSyntaxText(String text) throws Exception {
// Instantiate the Language client com.google.cloud.language.v1.LanguageServiceClient
try (LanguageServiceClient language = LanguageServiceClient.create()) {
Document doc = Document.newBuilder().setContent(text).setType(Type.PLAIN_TEXT).build();
AnalyzeSyntaxRequest request = AnalyzeSyntaxRequest.newBuilder().setDocument(doc).setEncodingType(EncodingType.UTF16).build();
// analyze the syntax in the given text
AnalyzeSyntaxResponse response = language.analyzeSyntax(request);
// print the response
for (Token token : response.getTokensList()) {
System.out.printf("\tText: %s\n", token.getText().getContent());
System.out.printf("\tBeginOffset: %d\n", token.getText().getBeginOffset());
System.out.printf("Lemma: %s\n", token.getLemma());
System.out.printf("PartOfSpeechTag: %s\n", token.getPartOfSpeech().getTag());
System.out.printf("\tAspect: %s\n", token.getPartOfSpeech().getAspect());
System.out.printf("\tCase: %s\n", token.getPartOfSpeech().getCase());
System.out.printf("\tForm: %s\n", token.getPartOfSpeech().getForm());
System.out.printf("\tGender: %s\n", token.getPartOfSpeech().getGender());
System.out.printf("\tMood: %s\n", token.getPartOfSpeech().getMood());
System.out.printf("\tNumber: %s\n", token.getPartOfSpeech().getNumber());
System.out.printf("\tPerson: %s\n", token.getPartOfSpeech().getPerson());
System.out.printf("\tProper: %s\n", token.getPartOfSpeech().getProper());
System.out.printf("\tReciprocity: %s\n", token.getPartOfSpeech().getReciprocity());
System.out.printf("\tTense: %s\n", token.getPartOfSpeech().getTense());
System.out.printf("\tVoice: %s\n", token.getPartOfSpeech().getVoice());
System.out.println("DependencyEdge");
System.out.printf("\tHeadTokenIndex: %d\n", token.getDependencyEdge().getHeadTokenIndex());
System.out.printf("\tLabel: %s\n\n", token.getDependencyEdge().getLabel());
}
return response.getTokensList();
}
// [END analyze_syntax_text]
}
Aggregations