Search in sources :

Example 1 with ThriftTokenizer

use of net.morimekta.providence.reflect.parser.internal.ThriftTokenizer in project providence by morimekta.

the class ThriftProgramParser method parseMessage.

private MessageType parseMessage(ThriftTokenizer tokenizer, String variant, String comment, Set<String> includedPrograms) throws IOException {
    MessageType._Builder struct = MessageType.builder();
    if (comment != null) {
        struct.setDocumentation(comment);
        comment = null;
    }
    boolean union = variant.equals("union");
    if (!variant.equals("struct")) {
        struct.setVariant(MessageVariant.valueForName(variant.toUpperCase(Locale.US)));
    }
    Token nameToken = tokenizer.expectIdentifier("message name identifier");
    String name = nameToken.asString();
    if (!allowedNameIdentifier(name)) {
        throw tokenizer.failure(nameToken, "Message with reserved name: " + name);
    }
    struct.setName(name);
    int nextAutoFieldKey = -1;
    tokenizer.expectSymbol("message start", Token.kMessageStart);
    Set<String> fieldNames = new HashSet<>();
    Set<String> fieldNameVariants = new HashSet<>();
    Set<Integer> fieldIds = new HashSet<>();
    while (true) {
        Token token = tokenizer.expect("field def or message end");
        if (token.isSymbol(Token.kMessageEnd)) {
            break;
        } else if (token.strEquals(kLineCommentStart)) {
            comment = parseDocLine(tokenizer, comment);
            continue;
        } else if (token.strEquals(kBlockCommentStart)) {
            comment = tokenizer.parseDocBlock();
            continue;
        }
        FieldType._Builder field = FieldType.builder();
        field.setDocumentation(comment);
        comment = null;
        if (token.isInteger()) {
            int fId = (int) token.parseInteger();
            if (fId < 1) {
                throw tokenizer.failure(token, "Negative or 0 field id " + fId + " not allowed.");
            }
            if (fieldIds.contains(fId)) {
                throw tokenizer.failure(token, "Field id " + fId + " already exists in " + struct.build().getName());
            }
            fieldIds.add(fId);
            field.setId(fId);
            tokenizer.expectSymbol("field id sep", Token.kKeyValueSep);
            token = tokenizer.expect("field requirement or type", t -> t.isIdentifier() || t.isQualifiedIdentifier());
        } else {
            if (requireFieldId) {
                throw tokenizer.failure(token, "Missing field ID in strict declaration");
            }
            field.setId(nextAutoFieldKey--);
        }
        if (token.strEquals(kRequired)) {
            if (union) {
                throw tokenizer.failure(token, "Found required field in union");
            }
            field.setRequirement(FieldRequirement.REQUIRED);
            token = tokenizer.expect("field type", t -> t.isIdentifier() || t.isQualifiedIdentifier());
        } else if (token.strEquals(kOptional)) {
            if (!union) {
                // All union fields are optional regardless.
                field.setRequirement(FieldRequirement.OPTIONAL);
            }
            token = tokenizer.expect("field type", t -> t.isIdentifier() || t.isQualifiedIdentifier());
        }
        // Get type.... This is mandatory.
        field.setType(parseType(tokenizer, token, includedPrograms));
        nameToken = tokenizer.expectIdentifier("field name");
        String fName = nameToken.asString();
        if (!allowedNameIdentifier(fName)) {
            throw tokenizer.failure(nameToken, "Field with reserved name: " + fName);
        }
        if (fieldNames.contains(fName)) {
            throw tokenizer.failure(nameToken, "Field %s already exists in %s", fName, struct.build().getName());
        }
        if (fieldNameVariants.contains(Strings.camelCase("get", fName))) {
            throw tokenizer.failure(nameToken, "Field %s has field with conflicting name in %s", fName, struct.build().getName());
        }
        fieldNames.add(fName);
        fieldNameVariants.add(Strings.camelCase("get", fName));
        field.setName(fName);
        token = tokenizer.peek("default sep, annotation, field def or message end");
        // Default value
        if (token.isSymbol(Token.kFieldValueSep)) {
            tokenizer.next();
            Token defaultValue = tokenizer.parseValue();
            field.setDefaultValue(defaultValue.asString());
            field.setStartLineNo(defaultValue.getLineNo());
            field.setStartLinePos(defaultValue.getLinePos());
            token = tokenizer.peek("field annotation, def or message end");
        }
        // Annotation
        if (token.isSymbol(Token.kParamsStart)) {
            tokenizer.next();
            field.setAnnotations(parseAnnotations(tokenizer, "field"));
            token = tokenizer.peek("field def or message end");
        }
        struct.addToFields(field.build());
        if (token.isSymbol(Token.kLineSep1) || token.isSymbol(Token.kLineSep2)) {
            tokenizer.next();
        }
    }
    if (tokenizer.hasNext()) {
        Token token = tokenizer.peek("optional annotations");
        if (token.isSymbol(Token.kParamsStart)) {
            tokenizer.next();
            struct.setAnnotations(parseAnnotations(tokenizer, "message"));
        }
    }
    return struct.build();
}
Also used : TypedefType(net.morimekta.providence.model.TypedefType) ThriftTokenizer.kNamespace(net.morimekta.providence.reflect.parser.internal.ThriftTokenizer.kNamespace) FieldRequirement(net.morimekta.providence.model.FieldRequirement) EnumValue(net.morimekta.providence.model.EnumValue) Locale(java.util.Locale) Map(java.util.Map) Strings(net.morimekta.util.Strings) Token(net.morimekta.providence.serializer.pretty.Token) EnumType(net.morimekta.providence.model.EnumType) Collection(java.util.Collection) Declaration(net.morimekta.providence.model.Declaration) Set(java.util.Set) ThriftTokenizer.kInclude(net.morimekta.providence.reflect.parser.internal.ThriftTokenizer.kInclude) ThriftTokenizer(net.morimekta.providence.reflect.parser.internal.ThriftTokenizer) ThriftTokenizer.kService(net.morimekta.providence.reflect.parser.internal.ThriftTokenizer.kService) List(java.util.List) ThriftTokenizer.kOneway(net.morimekta.providence.reflect.parser.internal.ThriftTokenizer.kOneway) ThriftTokenizer.kConst(net.morimekta.providence.reflect.parser.internal.ThriftTokenizer.kConst) TokenizerException(net.morimekta.providence.serializer.pretty.TokenizerException) Pattern(java.util.regex.Pattern) ServiceType(net.morimekta.providence.model.ServiceType) FieldType(net.morimekta.providence.model.FieldType) ProgramType(net.morimekta.providence.model.ProgramType) PEnumDescriptor(net.morimekta.providence.descriptor.PEnumDescriptor) Model_Constants(net.morimekta.providence.model.Model_Constants) ThriftTokenizer.kEnum(net.morimekta.providence.reflect.parser.internal.ThriftTokenizer.kEnum) ThriftTokenizer.kException(net.morimekta.providence.reflect.parser.internal.ThriftTokenizer.kException) TreeSet(java.util.TreeSet) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) LinkedHashMap(java.util.LinkedHashMap) IOUtils(net.morimekta.util.io.IOUtils) ThriftTokenizer.kLineCommentStart(net.morimekta.providence.reflect.parser.internal.ThriftTokenizer.kLineCommentStart) ThriftTokenizer.kVoid(net.morimekta.providence.reflect.parser.internal.ThriftTokenizer.kVoid) ThriftTokenizer.kUnion(net.morimekta.providence.reflect.parser.internal.ThriftTokenizer.kUnion) ThriftTokenizer.kOptional(net.morimekta.providence.reflect.parser.internal.ThriftTokenizer.kOptional) ThriftTokenizer.kRequired(net.morimekta.providence.reflect.parser.internal.ThriftTokenizer.kRequired) IOException(java.io.IOException) ConstType(net.morimekta.providence.model.ConstType) ThriftTokenizer.kTypedef(net.morimekta.providence.reflect.parser.internal.ThriftTokenizer.kTypedef) File(java.io.File) ReflectionUtils(net.morimekta.providence.reflect.util.ReflectionUtils) MessageVariant(net.morimekta.providence.model.MessageVariant) MessageType(net.morimekta.providence.model.MessageType) TreeMap(java.util.TreeMap) ThriftTokenizer.kExtends(net.morimekta.providence.reflect.parser.internal.ThriftTokenizer.kExtends) ThriftTokenizer.kThrows(net.morimekta.providence.reflect.parser.internal.ThriftTokenizer.kThrows) FunctionType(net.morimekta.providence.model.FunctionType) PRequirement(net.morimekta.providence.descriptor.PRequirement) ThriftTokenizer.kBlockCommentStart(net.morimekta.providence.reflect.parser.internal.ThriftTokenizer.kBlockCommentStart) ThriftTokenizer.kStruct(net.morimekta.providence.reflect.parser.internal.ThriftTokenizer.kStruct) InputStream(java.io.InputStream) Token(net.morimekta.providence.serializer.pretty.Token) FieldType(net.morimekta.providence.model.FieldType) MessageType(net.morimekta.providence.model.MessageType) HashSet(java.util.HashSet)

Example 2 with ThriftTokenizer

use of net.morimekta.providence.reflect.parser.internal.ThriftTokenizer in project providence by morimekta.

the class ThriftProgramParser method parseInternal.

private ProgramType parseInternal(InputStream in, File file, Collection<File> includeDirs) throws IOException {
    ProgramType._Builder program = ProgramType.builder();
    String programName = ReflectionUtils.programNameFromPath(file.getName());
    if (!VALID_PROGRAM_NAME.matcher(programName).matches()) {
        throw new ParseException("Program name \"%s\" derived from filename \"%s\" is not valid.", Strings.escape(programName), Strings.escape(file.getName()));
    }
    program.setProgramName(programName);
    List<String> include_files = new ArrayList<>();
    Set<String> includedPrograms = new HashSet<>();
    Map<String, String> namespaces = new LinkedHashMap<>();
    List<Declaration> declarations = new ArrayList<>();
    ThriftTokenizer tokenizer = new ThriftTokenizer(in);
    boolean has_header = false;
    boolean hasDeclaration = false;
    String doc_string = null;
    Token token;
    while ((token = tokenizer.next()) != null) {
        if (token.strEquals(kLineCommentStart)) {
            doc_string = parseDocLine(tokenizer, doc_string);
            continue;
        } else if (token.strEquals(kBlockCommentStart)) {
            doc_string = tokenizer.parseDocBlock();
            continue;
        }
        String keyword = token.asString();
        if (!Model_Constants.kThriftKeywords.contains(keyword)) {
            throw tokenizer.failure(token, "Unexpected token \'%s\'", token.asString());
        }
        switch(keyword) {
            case kNamespace:
                if (hasDeclaration) {
                    throw tokenizer.failure(token, "Unexpected token 'namespace', expected type declaration");
                }
                if (doc_string != null && !has_header) {
                    program.setDocumentation(doc_string);
                }
                doc_string = null;
                has_header = true;
                parseNamespace(tokenizer, namespaces);
                break;
            case kInclude:
                if (hasDeclaration) {
                    throw tokenizer.failure(token, "Unexpected token 'include', expected type declaration");
                }
                if (doc_string != null && !has_header) {
                    program.setDocumentation(doc_string);
                }
                doc_string = null;
                has_header = true;
                parseIncludes(tokenizer, include_files, file, includedPrograms, includeDirs);
                break;
            case kTypedef:
                has_header = true;
                hasDeclaration = true;
                parseTypedef(tokenizer, doc_string, declarations, includedPrograms);
                doc_string = null;
                break;
            case kEnum:
                has_header = true;
                hasDeclaration = true;
                EnumType et = parseEnum(tokenizer, doc_string);
                declarations.add(Declaration.withDeclEnum(et));
                doc_string = null;
                break;
            case kStruct:
            case kUnion:
            case kException:
                has_header = true;
                hasDeclaration = true;
                MessageType st = parseMessage(tokenizer, token.asString(), doc_string, includedPrograms);
                declarations.add(Declaration.withDeclStruct(st));
                doc_string = null;
                break;
            case kService:
                has_header = true;
                hasDeclaration = true;
                ServiceType srv = parseService(tokenizer, doc_string, includedPrograms);
                declarations.add(Declaration.withDeclService(srv));
                doc_string = null;
                break;
            case kConst:
                has_header = true;
                hasDeclaration = true;
                ConstType cnst = parseConst(tokenizer, doc_string, includedPrograms);
                declarations.add(Declaration.withDeclConst(cnst));
                doc_string = null;
                break;
            default:
                throw tokenizer.failure(token, "Unexpected token \'%s\'", Strings.escape(token.asString()));
        }
    }
    if (namespaces.size() > 0) {
        program.setNamespaces(namespaces);
    }
    if (include_files.size() > 0) {
        program.setIncludes(include_files);
    }
    if (declarations.size() > 0) {
        program.setDecl(declarations);
    }
    return program.build();
}
Also used : ThriftTokenizer(net.morimekta.providence.reflect.parser.internal.ThriftTokenizer) ArrayList(java.util.ArrayList) Token(net.morimekta.providence.serializer.pretty.Token) LinkedHashMap(java.util.LinkedHashMap) EnumType(net.morimekta.providence.model.EnumType) ServiceType(net.morimekta.providence.model.ServiceType) ProgramType(net.morimekta.providence.model.ProgramType) Declaration(net.morimekta.providence.model.Declaration) ConstType(net.morimekta.providence.model.ConstType) MessageType(net.morimekta.providence.model.MessageType) HashSet(java.util.HashSet)

Aggregations

ArrayList (java.util.ArrayList)2 HashSet (java.util.HashSet)2 LinkedHashMap (java.util.LinkedHashMap)2 ConstType (net.morimekta.providence.model.ConstType)2 Declaration (net.morimekta.providence.model.Declaration)2 EnumType (net.morimekta.providence.model.EnumType)2 MessageType (net.morimekta.providence.model.MessageType)2 ProgramType (net.morimekta.providence.model.ProgramType)2 ServiceType (net.morimekta.providence.model.ServiceType)2 ThriftTokenizer (net.morimekta.providence.reflect.parser.internal.ThriftTokenizer)2 Token (net.morimekta.providence.serializer.pretty.Token)2 File (java.io.File)1 IOException (java.io.IOException)1 InputStream (java.io.InputStream)1 Collection (java.util.Collection)1 List (java.util.List)1 Locale (java.util.Locale)1 Map (java.util.Map)1 Set (java.util.Set)1 TreeMap (java.util.TreeMap)1