use of net.morimekta.providence.serializer.pretty.Token in project providence by morimekta.
the class ThriftProgramParser method parseTypedef.
private void parseTypedef(ThriftTokenizer tokenizer, String comment, List<Declaration> declarations, Set<String> includedPrograms) throws IOException {
String type = parseType(tokenizer, tokenizer.expect("typename"), includedPrograms);
Token id = tokenizer.expectIdentifier("typedef identifier");
String name = id.asString();
if (!allowedNameIdentifier(name)) {
throw tokenizer.failure(id, "Typedef with reserved name: " + name);
}
TypedefType typedef = TypedefType.builder().setDocumentation(comment).setType(type).setName(name).build();
declarations.add(Declaration.withDeclTypedef(typedef));
}
use of net.morimekta.providence.serializer.pretty.Token in project providence by morimekta.
the class ThriftProgramParser method parseInternal.
private ProgramType parseInternal(InputStream in, File file, Collection<File> includeDirs) throws IOException {
ProgramType._Builder program = ProgramType.builder();
String programName = ReflectionUtils.programNameFromPath(file.getName());
if (!VALID_PROGRAM_NAME.matcher(programName).matches()) {
throw new ParseException("Program name \"%s\" derived from filename \"%s\" is not valid.", Strings.escape(programName), Strings.escape(file.getName()));
}
program.setProgramName(programName);
List<String> include_files = new ArrayList<>();
Set<String> includedPrograms = new HashSet<>();
Map<String, String> namespaces = new LinkedHashMap<>();
List<Declaration> declarations = new ArrayList<>();
ThriftTokenizer tokenizer = new ThriftTokenizer(in);
boolean has_header = false;
boolean hasDeclaration = false;
String doc_string = null;
Token token;
while ((token = tokenizer.next()) != null) {
if (token.strEquals(kLineCommentStart)) {
doc_string = parseDocLine(tokenizer, doc_string);
continue;
} else if (token.strEquals(kBlockCommentStart)) {
doc_string = tokenizer.parseDocBlock();
continue;
}
String keyword = token.asString();
if (!Model_Constants.kThriftKeywords.contains(keyword)) {
throw tokenizer.failure(token, "Unexpected token \'%s\'", token.asString());
}
switch(keyword) {
case kNamespace:
if (hasDeclaration) {
throw tokenizer.failure(token, "Unexpected token 'namespace', expected type declaration");
}
if (doc_string != null && !has_header) {
program.setDocumentation(doc_string);
}
doc_string = null;
has_header = true;
parseNamespace(tokenizer, namespaces);
break;
case kInclude:
if (hasDeclaration) {
throw tokenizer.failure(token, "Unexpected token 'include', expected type declaration");
}
if (doc_string != null && !has_header) {
program.setDocumentation(doc_string);
}
doc_string = null;
has_header = true;
parseIncludes(tokenizer, include_files, file, includedPrograms, includeDirs);
break;
case kTypedef:
has_header = true;
hasDeclaration = true;
parseTypedef(tokenizer, doc_string, declarations, includedPrograms);
doc_string = null;
break;
case kEnum:
has_header = true;
hasDeclaration = true;
EnumType et = parseEnum(tokenizer, doc_string);
declarations.add(Declaration.withDeclEnum(et));
doc_string = null;
break;
case kStruct:
case kUnion:
case kException:
has_header = true;
hasDeclaration = true;
MessageType st = parseMessage(tokenizer, token.asString(), doc_string, includedPrograms);
declarations.add(Declaration.withDeclStruct(st));
doc_string = null;
break;
case kService:
has_header = true;
hasDeclaration = true;
ServiceType srv = parseService(tokenizer, doc_string, includedPrograms);
declarations.add(Declaration.withDeclService(srv));
doc_string = null;
break;
case kConst:
has_header = true;
hasDeclaration = true;
ConstType cnst = parseConst(tokenizer, doc_string, includedPrograms);
declarations.add(Declaration.withDeclConst(cnst));
doc_string = null;
break;
default:
throw tokenizer.failure(token, "Unexpected token \'%s\'", Strings.escape(token.asString()));
}
}
if (namespaces.size() > 0) {
program.setNamespaces(namespaces);
}
if (include_files.size() > 0) {
program.setIncludes(include_files);
}
if (declarations.size() > 0) {
program.setDecl(declarations);
}
return program.build();
}
use of net.morimekta.providence.serializer.pretty.Token in project providence by morimekta.
the class ThriftProgramParser method parseAnnotations.
private Map<String, String> parseAnnotations(ThriftTokenizer tokenizer, String annotationsOn) throws IOException {
Map<String, String> annotations = new TreeMap<>();
char sep = Token.kParamsStart;
while (sep != Token.kParamsEnd) {
Token token = tokenizer.expect(annotationsOn + " annotation name", Token::isReferenceIdentifier);
String name = token.asString();
sep = tokenizer.expectSymbol(annotationsOn + " annotation KV, sep or end", Token.kFieldValueSep, Token.kParamsEnd, Token.kParamsEnd, Token.kLineSep1);
if (sep == Token.kFieldValueSep) {
Token value = tokenizer.expectLiteral(annotationsOn + " annotation value");
annotations.put(name, value.decodeLiteral(true));
sep = tokenizer.expectSymbol(annotationsOn + " annotation sep or end", Token.kParamsEnd, Token.kLineSep1, Token.kLineSep2);
} else {
annotations.put(name, "");
}
}
return annotations;
}
use of net.morimekta.providence.serializer.pretty.Token in project providence by morimekta.
the class ThriftProgramParser method parseConst.
private ConstType parseConst(ThriftTokenizer tokenizer, String comment, Set<String> includedPrograms) throws IOException {
Token token = tokenizer.expect("const typename", t -> t.isIdentifier() || t.isQualifiedIdentifier());
String type = parseType(tokenizer, token, includedPrograms);
Token id = tokenizer.expectIdentifier("const identifier");
tokenizer.expectSymbol("const value separator", Token.kFieldValueSep);
Token value = tokenizer.parseValue();
if (tokenizer.hasNext()) {
Token sep = tokenizer.peek("");
if (sep.isSymbol(Token.kLineSep1) || sep.isSymbol(Token.kLineSep2)) {
tokenizer.next();
}
}
return ConstType.builder().setDocumentation(comment).setName(id.asString()).setType(type).setValue(value.asString()).setStartLineNo(value.getLineNo()).setStartLinePos(value.getLinePos()).build();
}
use of net.morimekta.providence.serializer.pretty.Token in project providence by morimekta.
the class ThriftProgramParser method parseService.
private ServiceType parseService(ThriftTokenizer tokenizer, String doc_string, Set<String> includedPrograms) throws IOException {
ServiceType._Builder service = ServiceType.builder();
if (doc_string != null) {
service.setDocumentation(doc_string);
doc_string = null;
}
Token identifier = tokenizer.expectIdentifier("service name");
if (!allowedNameIdentifier(identifier.asString())) {
throw tokenizer.failure(identifier, "Service with reserved name: " + identifier.asString());
}
service.setName(identifier.asString());
if (tokenizer.peek("service start or extends").strEquals(kExtends)) {
tokenizer.next();
service.setExtend(tokenizer.expect("service extending identifier", t -> t.isIdentifier() || t.isQualifiedIdentifier()).asString());
}
tokenizer.expectSymbol("reading service start", Token.kMessageStart);
Set<String> methodNames = new TreeSet<>();
while (true) {
Token token = tokenizer.expect("service method initializer");
if (token.isSymbol(Token.kMessageEnd)) {
break;
} else if (token.strEquals(kLineCommentStart)) {
doc_string = parseDocLine(tokenizer, doc_string);
continue;
} else if (token.strEquals(kBlockCommentStart)) {
doc_string = tokenizer.parseDocBlock();
continue;
}
FunctionType._Builder method = FunctionType.builder();
if (doc_string != null) {
method.setDocumentation(doc_string);
doc_string = null;
}
if (token.strEquals(kOneway)) {
method.setOneWay(true);
token = tokenizer.expect("service method type");
}
if (!token.strEquals(kVoid)) {
if (method.isSetOneWay()) {
throw tokenizer.failure(token, "Oneway methods must have void return type, found '%s'", Strings.escape(token.asString()));
}
method.setReturnType(parseType(tokenizer, token, includedPrograms));
}
token = tokenizer.expectIdentifier("method name");
String name = token.asString();
if (!allowedNameIdentifier(name)) {
throw tokenizer.failure(token, "Method with reserved name: " + name);
}
String normalized = Strings.camelCase("", name);
if (methodNames.contains(normalized)) {
throw tokenizer.failure(token, "Service method " + name + " has normalized name conflict");
}
methodNames.add(normalized);
method.setName(name);
tokenizer.expectSymbol("method params begin", Token.kParamsStart);
int nextAutoParamKey = -1;
while (true) {
token = tokenizer.expect("method params");
if (token.isSymbol(Token.kParamsEnd)) {
break;
} else if (token.strEquals(kLineCommentStart)) {
doc_string = parseDocLine(tokenizer, doc_string);
continue;
} else if (token.strEquals(kBlockCommentStart)) {
doc_string = tokenizer.parseDocBlock();
continue;
}
FieldType._Builder field = FieldType.builder();
if (doc_string != null) {
field.setDocumentation(doc_string);
doc_string = null;
}
if (token.isInteger()) {
field.setId((int) token.parseInteger());
tokenizer.expectSymbol("params kv sep", Token.kKeyValueSep);
token = tokenizer.expect("param type");
} else {
if (requireFieldId) {
throw tokenizer.failure(token, "Missing param ID in strict declaration");
}
field.setId(nextAutoParamKey--);
}
if (PRequirement.OPTIONAL.label.equals(token.asString())) {
field.setRequirement(FieldRequirement.OPTIONAL);
token = tokenizer.expect("param type");
} else if (PRequirement.REQUIRED.label.equals(token.asString())) {
field.setRequirement(FieldRequirement.REQUIRED);
token = tokenizer.expect("param type");
}
field.setType(parseType(tokenizer, token, includedPrograms));
token = tokenizer.expectIdentifier("param name");
name = token.asString();
if (!allowedNameIdentifier(name)) {
throw tokenizer.failure(token, "Param with reserved name: " + name);
}
field.setName(name);
// Annotations.
if (tokenizer.peek("method param annotation").isSymbol(Token.kParamsStart)) {
tokenizer.next();
field.setAnnotations(parseAnnotations(tokenizer, "params"));
}
token = tokenizer.peek("method params");
if (token.isSymbol(Token.kLineSep1) || token.isSymbol(Token.kLineSep2)) {
tokenizer.next();
}
method.addToParams(field.build());
}
// for each param
doc_string = null;
if (tokenizer.peek("possible throws statement").strEquals(kThrows)) {
tokenizer.next();
tokenizer.expectSymbol("throws group start", Token.kParamsStart);
int nextAutoExceptionKey = -1;
while (true) {
token = tokenizer.expect("exception key, type or end throws");
if (token.isSymbol(Token.kParamsEnd)) {
break;
} else if (token.strEquals(kLineCommentStart)) {
doc_string = parseDocLine(tokenizer, doc_string);
continue;
} else if (token.strEquals(kBlockCommentStart)) {
doc_string = tokenizer.parseDocBlock();
continue;
}
FieldType._Builder field = FieldType.builder();
if (doc_string != null) {
field.setDocumentation(doc_string);
doc_string = null;
}
if (token.isInteger()) {
field.setId((int) token.parseInteger());
tokenizer.expectSymbol("exception KV sep", Token.kKeyValueSep);
token = tokenizer.expect("exception type");
} else {
if (requireFieldId) {
throw tokenizer.failure(token, "Missing exception ID in strict declaration");
}
field.setId(nextAutoExceptionKey--);
}
field.setType(parseType(tokenizer, token, includedPrograms));
token = tokenizer.expectIdentifier("exception name");
name = token.asString();
if (!allowedNameIdentifier(name)) {
throw tokenizer.failure(token, "Thrown field with reserved name: " + name);
}
field.setName(name);
// Annotations.
if (tokenizer.peek("exception annotation start").isSymbol(Token.kParamsStart)) {
tokenizer.next();
field.setAnnotations(parseAnnotations(tokenizer, "exception"));
}
method.addToExceptions(field.build());
token = tokenizer.peek("method exceptions");
if (token.isSymbol(Token.kLineSep1) || token.isSymbol(Token.kLineSep2)) {
tokenizer.next();
}
}
}
token = tokenizer.peek("");
// Method Annotations.
if (token.isSymbol(Token.kParamsStart)) {
tokenizer.next();
method.setAnnotations(parseAnnotations(tokenizer, "method"));
token = tokenizer.peek("method or service end");
}
service.addToMethods(method.build());
if (token.isSymbol(Token.kLineSep1) || token.isSymbol(Token.kLineSep2)) {
tokenizer.next();
}
}
if (tokenizer.hasNext()) {
Token token = tokenizer.peek("optional annotations");
if (token.isSymbol(Token.kParamsStart)) {
// Method Annotations.
tokenizer.next();
service.setAnnotations(parseAnnotations(tokenizer, "service"));
}
}
return service.build();
}
Aggregations