use of net.morimekta.providence.serializer.pretty.Token in project providence by morimekta.
the class ProvidenceConfigParser method parseMapValue.
@SuppressWarnings("unchecked")
Map parseMapValue(Tokenizer tokenizer, ProvidenceConfigContext context, PMap descriptor, Map builder) throws IOException {
Token next = tokenizer.expect("map key or end");
while (!next.isSymbol(Token.kMessageEnd)) {
Object key = parseFieldValue(next, tokenizer, context, descriptor.keyDescriptor(), true);
tokenizer.expectSymbol("map key value sep", Token.kKeyValueSep);
next = tokenizer.expect("map value");
if (UNDEFINED.equals(next.asString())) {
builder.remove(key);
} else {
Object value;
if (context.containsReference(next.asString())) {
value = context.getReference(next.asString(), next, tokenizer);
} else {
value = parseFieldValue(next, tokenizer, context, descriptor.itemDescriptor(), strict);
}
if (value != null) {
builder.put(key, value);
}
}
// maps do *not* require separator, but allows ',' separator, and separator after last.
next = tokenizer.expect("map key, end or sep");
if (next.isSymbol(Token.kLineSep1)) {
next = tokenizer.expect("map key or end");
}
}
return descriptor.builder().putAll(builder).build();
}
use of net.morimekta.providence.serializer.pretty.Token in project providence by morimekta.
the class ProvidenceConfigParser method parseDefinitionValue.
@SuppressWarnings("unchecked")
Object parseDefinitionValue(ProvidenceConfigContext context, Tokenizer tokenizer) throws IOException {
Token token = tokenizer.expect("Start of def value");
if (token.isReal()) {
return Double.parseDouble(token.asString());
} else if (token.isInteger()) {
return Long.parseLong(token.asString());
} else if (token.isStringLiteral()) {
return token.decodeLiteral(strict);
} else if (TRUE.equalsIgnoreCase(token.asString())) {
return Boolean.TRUE;
} else if (FALSE.equalsIgnoreCase(token.asString())) {
return Boolean.FALSE;
} else if (Token.B64.equals(token.asString())) {
tokenizer.expectSymbol("binary data enclosing start", Token.kParamsStart);
return Binary.fromBase64(tokenizer.readBinary(Token.kParamsEnd));
} else if (Token.HEX.equals(token.asString())) {
tokenizer.expectSymbol("binary data enclosing start", Token.kParamsStart);
return Binary.fromHexString(tokenizer.readBinary(Token.kParamsEnd));
} else if (token.isDoubleQualifiedIdentifier()) {
// this may be an enum reference, must be
// - package.EnumType.IDENTIFIER
String id = token.asString();
int l = id.lastIndexOf(Token.kIdentifierSep);
try {
PEnumDescriptor ed = registry.getEnumType(id.substring(0, l));
PEnumValue val = ed.findByName(id.substring(l + 1));
if (val == null && strict) {
throw new TokenizerException(token, "Unknown %s value: %s", id.substring(0, l), id.substring(l + 1)).setLine(tokenizer.getLine());
}
// Note that unknown enum value results in null. Therefore we don't catch null values here.
return val;
} catch (IllegalArgumentException e) {
// No such declared type.
if (strict) {
throw new TokenizerException(token, "Unknown enum identifier: %s", id.substring(0, l)).setLine(tokenizer.getLine());
}
consumeValue(context, tokenizer, token);
} catch (ClassCastException e) {
// Not an enum.
throw new TokenizerException(token, "Identifier " + id + " does not reference an enum, from " + token.asString()).setLine(tokenizer.getLine());
}
} else if (token.isQualifiedIdentifier()) {
// Message type.
PMessageDescriptor descriptor;
try {
descriptor = registry.getMessageType(token.asString());
} catch (IllegalArgumentException e) {
// - strict mode: all types must be known.
if (strict) {
throw new TokenizerException(token, "Unknown declared type: %s", token.asString()).setLine(tokenizer.getLine());
}
consumeValue(context, tokenizer, token);
return null;
}
PMessageBuilder builder = descriptor.builder();
if (tokenizer.expectSymbol("message start or inherits", '{', ':') == ':') {
token = tokenizer.expect("inherits reference");
PMessage inheritsFrom = resolve(context, token, tokenizer, descriptor);
if (inheritsFrom == null) {
throw new TokenizerException(token, "Inheriting from null reference: %s", token.asString()).setLine(tokenizer.getLine());
}
builder.merge(inheritsFrom);
tokenizer.expectSymbol("message start", '{');
}
return parseMessage(tokenizer, context, builder);
} else {
throw new TokenizerException(token, "Invalid define value " + token.asString()).setLine(tokenizer.getLine());
}
return null;
}
use of net.morimekta.providence.serializer.pretty.Token in project providence by morimekta.
the class ThriftProgramParser method parseEnum.
private EnumType parseEnum(ThriftTokenizer tokenizer, String doc_string) throws IOException {
Token id = tokenizer.expectIdentifier("enum name");
String enum_name = id.asString();
if (!allowedNameIdentifier(enum_name)) {
throw tokenizer.failure(id, "Enum with reserved name: " + enum_name);
}
EnumType._Builder enum_type = EnumType.builder();
if (doc_string != null) {
enum_type.setDocumentation(doc_string);
doc_string = null;
}
enum_type.setName(enum_name);
int nextValueID = PEnumDescriptor.DEFAULT_FIRST_VALUE;
tokenizer.expectSymbol("enum start", Token.kMessageStart);
if (!tokenizer.peek("").isSymbol(Token.kMessageEnd)) {
while (true) {
Token token = tokenizer.expect("enum value or end");
if (token.isSymbol(Token.kMessageEnd)) {
break;
} else if (token.strEquals(kLineCommentStart)) {
doc_string = parseDocLine(tokenizer, doc_string);
} else if (token.strEquals(kBlockCommentStart)) {
doc_string = tokenizer.parseDocBlock();
} else if (token.isIdentifier()) {
String value_name = token.asString();
if (!allowedNameIdentifier(value_name)) {
throw tokenizer.failure(token, "Enum value with reserved name: " + enum_name);
}
EnumValue._Builder enum_value = EnumValue.builder();
// TODO: Validate enum value name. This probably needs a different logic than
// type names, field names and methods.
enum_value.setName(value_name);
if (doc_string != null) {
enum_value.setDocumentation(doc_string);
doc_string = null;
}
int value_id = nextValueID++;
if (tokenizer.peek("enum value ID").isSymbol(Token.kFieldValueSep)) {
tokenizer.next();
Token v = tokenizer.expectInteger("enum value");
value_id = (int) v.parseInteger();
nextValueID = value_id + 1;
} else if (requireEnumValue) {
// So the token points at the token that *should* have been '='.
if (tokenizer.hasNext()) {
token = tokenizer.next();
}
throw tokenizer.failure(token, "Missing enum value in strict declaration");
}
enum_value.setId(value_id);
// Enum value annotations.
if (tokenizer.peek("enum value annotation").isSymbol(Token.kParamsStart)) {
tokenizer.next();
enum_value.setAnnotations(parseAnnotations(tokenizer, "enum value"));
}
enum_type.addToValues(enum_value.build());
// Optional separator...
token = tokenizer.peek("enum value or end");
if (token.isSymbol(Token.kLineSep1) || token.isSymbol(Token.kLineSep2)) {
tokenizer.next();
}
} else {
throw tokenizer.failure(token, "Unexpected token: %s", token.asString());
}
}
}
if (tokenizer.hasNext()) {
Token token = tokenizer.peek("optional annotations");
if (token.isSymbol(Token.kParamsStart)) {
tokenizer.next();
enum_type.setAnnotations(parseAnnotations(tokenizer, "enum type"));
}
}
return enum_type.build();
}
use of net.morimekta.providence.serializer.pretty.Token in project providence by morimekta.
the class ThriftProgramParser method parseNamespace.
private void parseNamespace(ThriftTokenizer tokenizer, Map<String, String> namespaces) throws IOException {
Token language = tokenizer.expect("namespace language", Token::isReferenceIdentifier);
if (namespaces.containsKey(language.asString())) {
throw tokenizer.failure(language, "Namespace for %s already defined.", language.asString());
}
Token namespace = tokenizer.expect("namespace", t -> VALID_NAMESPACE.matcher(t.asString()).matches() || VALID_SDI_NAMESPACE.matcher(t.asString()).matches());
namespaces.put(language.asString(), namespace.asString());
}
use of net.morimekta.providence.serializer.pretty.Token in project providence by morimekta.
the class ThriftProgramParser method parseIncludes.
private void parseIncludes(ThriftTokenizer tokenizer, List<String> includeFiles, File currentFile, Set<String> includePrograms, Collection<File> includeDirs) throws IOException {
Token include = tokenizer.expectLiteral("include file");
String filePath = include.decodeLiteral(true);
if (!ReflectionUtils.isThriftFile(filePath)) {
throw tokenizer.failure(include, "Include not valid for thrift files " + filePath);
}
if (!includeExists(currentFile, filePath, includeDirs)) {
throw tokenizer.failure(include, "Included file not found " + filePath);
}
includeFiles.add(filePath);
includePrograms.add(ReflectionUtils.programNameFromPath(filePath));
}
Aggregations