use of net.morimekta.providence.serializer.pretty.TokenizerException in project providence by morimekta.
the class SerializerTest method testSerializer.
/**
* Test that the serializer can serialize and deserialize a test-set of
* random data. This is not testing backward compatibility of the
* serializer.
*
* @param serializer The serializer to test.
*/
private void testSerializer(Serializer serializer) throws IOException {
try {
// Just a sanity check.
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ByteArrayInputStream bais;
int size;
// simple message.
{
baos.reset();
size = serializer.serialize(baos, operation);
assertEquals(baos.size(), size);
bais = new ByteArrayInputStream(baos.toByteArray());
Operation actual = serializer.deserialize(bais, Operation.kDescriptor);
assertEquals(actual, operation);
}
// complex message, one at a time.
for (Containers expected : containers) {
baos.reset();
size = serializer.serialize(baos, expected);
assertEquals(baos.size(), size);
bais = new ByteArrayInputStream(baos.toByteArray());
Containers actual;
try {
actual = serializer.deserialize(bais, Containers.kDescriptor);
} catch (TokenizerException e) {
System.err.println(new String(baos.toByteArray(), UTF_8));
System.err.println(e.asString());
e.printStackTrace();
fail("oops");
return;
}
assertThat(actual, new EqualToMessage<>(expected));
}
// complex message in stream.
{
baos.reset();
boolean first = true;
size = 0;
for (Containers c : containers) {
if (first) {
first = false;
} else {
baos.write('\n');
size += 1;
}
size += serializer.serialize(baos, c);
}
assertEquals(baos.size(), size);
bais = new ByteArrayInputStream(baos.toByteArray());
first = true;
for (Containers expected : containers) {
if (first) {
first = false;
} else {
assertThat(bais.read(), is((int) '\n'));
}
Containers actual = serializer.deserialize(bais, Containers.kDescriptor);
assertThat(actual, new EqualToMessage<>(expected));
}
assertEquals(0, bais.available());
}
try {
if (serializer instanceof PrettySerializer) {
String tmp = new String(baos.toByteArray(), UTF_8);
bais = new ByteArrayInputStream(tmp.replaceFirst("providence[.]Containers", "providence.ConsumeAll").getBytes(UTF_8));
} else {
bais = new ByteArrayInputStream(baos.toByteArray());
}
boolean first = true;
for (Containers ignore : containers) {
if (first) {
first = false;
} else {
assertThat(bais.read(), is((int) '\n'));
}
ConsumeAll actual = serializer.deserialize(bais, ConsumeAll.kDescriptor);
assertThat(actual, new EqualToMessage<>(ConsumeAll.builder().build()));
}
} catch (TokenizerException e) {
System.err.println(e.asString());
throw e;
}
// service
for (PServiceCall<?, ?> call : serviceCalls) {
baos.reset();
int i = serializer.serialize(baos, call);
assertThat(i, is(baos.size()));
bais = new ByteArrayInputStream(baos.toByteArray());
PServiceCall<?, ?> re = serializer.deserialize(bais, ContainerService.kDescriptor);
assertThat(re, is(call));
}
} catch (SerializerException e) {
System.err.println(e.asString());
throw e;
}
}
use of net.morimekta.providence.serializer.pretty.TokenizerException in project providence by morimekta.
the class BinarySerializerTest method testNonPrecompiled_lenient.
@Test
public void testNonPrecompiled_lenient() throws IOException {
Serializer serializer = new BinarySerializer();
// Just a sanity check.
assertTrue(containers.size() == 10);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ByteArrayInputStream bais;
int size;
// complex message, one at a time.
for (int i = 0; i < 10; ++i) {
baos.reset();
Containers expected = containers.get(i);
size = serializer.serialize(baos, expected);
assertEquals(baos.size(), size);
bais = new ByteArrayInputStream(baos.toByteArray());
Containers actual;
try {
actual = serializer.deserialize(bais, Containers.kDescriptor);
} catch (TokenizerException e) {
System.err.println(new String(baos.toByteArray(), UTF_8));
System.err.println(e.asString());
fail("oops");
return;
}
assertEquals(actual, expected);
}
// complex message in stream.
{
baos.reset();
size = 0;
for (int i = 0; i < 10; ++i) {
size += serializer.serialize(baos, containers.get(i));
}
assertEquals(baos.size(), size);
bais = new ByteArrayInputStream(baos.toByteArray());
for (int i = 0; i < 10; ++i) {
Containers expected = containers.get(i);
Containers actual = serializer.deserialize(bais, Containers.kDescriptor);
assertEquals(actual, expected);
}
assertEquals(0, bais.available());
}
}
use of net.morimekta.providence.serializer.pretty.TokenizerException in project providence by morimekta.
the class ThriftProgramParserTest method assertBadThrift.
private void assertBadThrift(String message, String fileName) {
try {
ThriftProgramParser parser = new ThriftProgramParser();
File file = new File(tmp.getRoot(), fileName);
parser.parse(new FileInputStream(file), file, new TreeSet<>());
fail("No exception on bad thrift: " + fileName);
} catch (TokenizerException e) {
assertThat(e.asString().replaceAll("\\r", ""), is(message));
} catch (IOException e) {
assertThat(e.getMessage(), is(message));
}
}
use of net.morimekta.providence.serializer.pretty.TokenizerException in project providence by morimekta.
the class ProvidenceConfigParser method parseConfigRecursively.
@SuppressWarnings("unchecked")
<M extends PMessage<M, F>, F extends PField> Pair<M, Set<String>> parseConfigRecursively(@Nonnull Path file, M parent, String[] stack) throws IOException {
Tokenizer tokenizer;
try (BufferedInputStream in = new BufferedInputStream(new FileInputStream(file.toFile()))) {
// Non-enclosed content, meaning we should read the whole file immediately.
tokenizer = new Tokenizer(new Utf8StreamReader(in), Tokenizer.DEFAULT_BUFFER_SIZE, true);
}
ProvidenceConfigContext context = new ProvidenceConfigContext();
Set<String> includedFilePaths = new TreeSet<>();
includedFilePaths.add(canonicalFileLocation(file).toString());
Stage lastStage = Stage.INCLUDES;
M result = null;
Token token = tokenizer.peek();
while (token != null) {
tokenizer.next();
if (lastStage == Stage.MESSAGE) {
throw new TokenizerException(token, "Unexpected token '" + token.asString() + "', expected end of file.").setLine(tokenizer.getLine());
} else if (INCLUDE.equals(token.asString())) {
// if include && stage == INCLUDES --> INCLUDES
if (lastStage != Stage.INCLUDES) {
throw new TokenizerException(token, "Include added after defines or message. Only one def block allowed.").setLine(tokenizer.getLine());
}
token = tokenizer.expectLiteral("file to be included");
String includedFilePath = token.decodeLiteral(strict);
PMessage included;
Path includedFile;
try {
includedFile = resolveFile(file, includedFilePath);
Pair<PMessage, Set<String>> tmp = checkAndParseInternal(includedFile, null, stack);
if (tmp != null) {
includedFilePaths.add(includedFile.toString());
includedFilePaths.addAll(tmp.second);
included = tmp.first;
} else {
included = null;
}
} catch (FileNotFoundException e) {
throw new TokenizerException(token, "Included file \"%s\" not found.", includedFilePath).setLine(tokenizer.getLine());
}
token = tokenizer.expectIdentifier("the token 'as'");
if (!AS.equals(token.asString())) {
throw new TokenizerException(token, "Expected token 'as' after included file \"%s\".", includedFilePath).setLine(tokenizer.getLine());
}
token = tokenizer.expectIdentifier("Include alias");
String alias = token.asString();
if (RESERVED_WORDS.contains(alias)) {
throw new TokenizerException(token, "Alias \"%s\" is a reserved word.", alias).setLine(tokenizer.getLine());
}
if (context.containsReference(alias)) {
throw new TokenizerException(token, "Alias \"%s\" is already used.", alias).setLine(tokenizer.getLine());
}
context.setInclude(alias, included);
} else if (DEF.equals(token.asString())) {
// if params && stage == DEF --> DEF
lastStage = Stage.DEFINES;
parseDefinitions(context, tokenizer);
} else if (token.isQualifiedIdentifier()) {
// if a.b (type identifier) --> MESSAGE
lastStage = Stage.MESSAGE;
PMessageDescriptor<M, F> descriptor;
try {
descriptor = (PMessageDescriptor) registry.getDeclaredType(token.asString());
} catch (IllegalArgumentException e) {
// even in non-strict mode.
if (strict || stack.length == 1) {
throw new TokenizerException(token, "Unknown declared type: %s", token.asString()).setLine(tokenizer.getLine());
}
return null;
}
result = parseConfigMessage(tokenizer, context, descriptor.builder(), parent, file);
} else {
throw new TokenizerException(token, "Unexpected token '" + token.asString() + "'. Expected include, defines or message type").setLine(tokenizer.getLine());
}
token = tokenizer.peek();
}
if (result == null) {
throw new TokenizerException("No message in config: " + file.getFileName().toString());
}
return Pair.create(result, includedFilePaths);
}
use of net.morimekta.providence.serializer.pretty.TokenizerException in project providence by morimekta.
the class ProvidenceConfigParser method resolveAny.
private static Object resolveAny(ProvidenceConfigContext context, Token token, Tokenizer tokenizer) throws TokenizerException {
String key = token.asString();
String name = key;
String subKey = null;
if (key.contains(IDENTIFIER_SEP)) {
int idx = key.indexOf(IDENTIFIER_SEP);
name = key.substring(0, idx);
subKey = key.substring(idx + 1);
}
Object value = context.getReference(name, token, tokenizer);
if (subKey != null) {
if (!(value instanceof PMessage)) {
throw new TokenizerException(token, "Reference name " + key + " not declared");
}
try {
return ProvidenceConfigUtil.getInMessage((PMessage) value, subKey, null);
} catch (ProvidenceConfigException e) {
throw new TokenizerException(token, e.getMessage()).setLine(tokenizer.getLine()).initCause(e);
}
}
return value;
}
Aggregations