Search in sources :

Example 1 with ModelConverter

use of us.parr.bookish.translate.ModelConverter in project bookish by parrt.

the class Tool method legacy_translate.

// legacy single-doc translation
public Pair<Document, String> legacy_translate(Translator trans, String inputDir, String inputFilename) throws IOException {
    Pair<BookishParser.DocumentContext, BookishParser> results = parseChapter(inputDir, inputFilename, 0);
    trans.entities = results.b.entities;
    // get single chapter
    Document doc = (Document) trans.visit(results.a);
    doc.chapter.connectContainerTree();
    ModelConverter converter = new ModelConverter(trans.templates);
    ST outputST = converter.walk(doc);
    return new Pair<>(doc, outputST.render());
}
Also used : ST(org.stringtemplate.v4.ST) Document(us.parr.bookish.model.Document) ModelConverter(us.parr.bookish.translate.ModelConverter) BookishParser(us.parr.bookish.parse.BookishParser) Pair(org.antlr.v4.runtime.misc.Pair)

Example 2 with ModelConverter

use of us.parr.bookish.translate.ModelConverter in project bookish by parrt.

the class Tool method process.

public void process(String[] args) throws Exception {
    options = handleArgs(args);
    String metadataFilename = option("metadataFilename");
    inputDir = new File(metadataFilename).getParent();
    outputDir = option("o");
    String outFilename;
    Translator trans;
    Target target = (Target) optionO("target");
    ParrtIO.mkdir(outputDir + "/images");
    String snippetsDir = getBuildDir(metadataFilename) + "/snippets";
    ParrtIO.mkdir(snippetsDir);
    if (metadataFilename.endsWith(".md")) {
        // just one file (legacy stuff)
        String inputFilename = metadataFilename;
        Book book = new Book(this, "", "");
        book.entities = new HashMap<>();
        trans = new Translator(book, book.entities, target, outputDir);
        if (target == Target.HTML) {
            outFilename = "index.html";
        } else {
            outFilename = stripFileExtension(basename(inputFilename)) + ".tex";
        }
        Pair<Document, String> results = legacy_translate(trans, inputDir, basename(inputFilename));
        String output = results.b;
        ParrtIO.save(outputDir + "/" + outFilename, output);
        // System.out.println("Wrote "+outputDir+"/"+outFilename);
        copyImages(book, inputDir, outputDir);
        return;
    }
    // otherwise, read and use metadata
    JsonReader jsonReader = Json.createReader(new FileReader(metadataFilename));
    JsonObject metadata = jsonReader.readObject();
    // System.out.println(metadata);
    String title = metadata.getString("title");
    Book book = new Book(this, title, null);
    String author = metadata.getString("author");
    dataDir = metadata.getString("data");
    // Rule paragraph needs blank line on the front
    author = "\n\n" + author;
    trans = new Translator(book, null, target, outputDir);
    book.author = translateString(trans, author, "paragraph");
    String mainOutFilename;
    if (target == Target.HTML) {
        mainOutFilename = "index.html";
    } else {
        mainOutFilename = "book.tex";
    }
    // parse all documents first to get entity defs
    List<BookishParser.DocumentContext> trees = new ArrayList<>();
    List<Map<String, EntityDef>> entities = new ArrayList<>();
    List<List<ExecutableCodeDef>> codeBlocks = new ArrayList<>();
    JsonArray markdownFilenames = metadata.getJsonArray("chapters");
    for (JsonValue f : markdownFilenames) {
        String fname = stripQuotes(f.toString());
        book.filenames.add(fname);
        Pair<BookishParser.DocumentContext, BookishParser> results = parseChapter(inputDir, fname, book.chapCounter);
        book.chapCounter++;
        trees.add(results.a);
        entities.add(results.b.entities);
        codeBlocks.add(results.b.codeBlocks);
    }
    executeCodeSnippets(book, getBuildDir(metadataFilename), codeBlocks);
    // now walk all trees and translate
    List<Document> documents = new ArrayList<>();
    for (int i = 0; i < book.filenames.size(); i++) {
        String fname = book.filenames.get(i);
        BookishParser.DocumentContext tree = trees.get(i);
        Map<String, EntityDef> thisDocsEntities = entities.get(i);
        trans = new Translator(book, thisDocsEntities, target, outputDir);
        // get doc for single chapter
        Document doc = (Document) trans.visit(tree);
        book.addChapterDocument(doc);
        doc.chapter.connectContainerTree();
        ModelConverter converter = new ModelConverter(trans.templates);
        ST outputST = converter.walk(doc);
        // walk all OutputModelObjects created as labeled entities to convert those entities
        // unlabeled entities are done in-line
        ArrayList<String> labels = new ArrayList<>(thisDocsEntities.keySet());
        for (String label : labels) {
            EntityDef def = thisDocsEntities.get(label);
            def.template = converter.walk(def.model);
            if (def.isGloballyVisible()) {
                // move to global space
                book.entities.put(label, def);
                thisDocsEntities.remove(label);
            }
        }
        String output = outputST.render();
        doc.markdownFilename = fname;
        documents.add(doc);
        if (target == Target.HTML) {
            outFilename = stripFileExtension(fname) + ".html";
        } else {
            outFilename = stripFileExtension(fname) + ".tex";
        }
        ParrtIO.save(outputDir + "/" + outFilename, output);
        doc.generatedFilename = outFilename;
    // System.out.println("Wrote "+outputDir+"/"+outFilename);
    }
    ST bookTemplate = trans.templates.getInstanceOf("Book");
    bookTemplate.add("model", book);
    ParrtIO.save(outputDir + "/" + mainOutFilename, bookTemplate.render());
    // System.out.println("Wrote "+outputDir+"/"+mainOutFilename);
    copyImages(book, inputDir, outputDir);
    execCommandLine(String.format("cp -r %s/css %s", inputDir, outputDir));
// copyImages(BUILD_DIR, outputDir);
}
Also used : ArrayList(java.util.ArrayList) JsonObject(javax.json.JsonObject) Document(us.parr.bookish.model.Document) EntityDef(us.parr.bookish.model.entity.EntityDef) Translator(us.parr.bookish.translate.Translator) Book(us.parr.bookish.model.Book) JsonReader(javax.json.JsonReader) FileReader(java.io.FileReader) ArrayList(java.util.ArrayList) List(java.util.List) ST(org.stringtemplate.v4.ST) JsonValue(javax.json.JsonValue) ModelConverter(us.parr.bookish.translate.ModelConverter) BookishParser(us.parr.bookish.parse.BookishParser) JsonArray(javax.json.JsonArray) File(java.io.File) STGroupFile(org.stringtemplate.v4.STGroupFile) HashMap(java.util.HashMap) Map(java.util.Map) MultiMap(org.antlr.v4.runtime.misc.MultiMap)

Example 3 with ModelConverter

use of us.parr.bookish.translate.ModelConverter in project bookish by parrt.

the class Tool method translateString.

public String translateString(Translator trans, String markdown, String startRule) throws Exception {
    CharStream input = CharStreams.fromString(markdown);
    BookishLexer lexer = new BookishLexer(input);
    CommonTokenStream tokens = new CommonTokenStream(lexer);
    BookishParser parser = new BookishParser(tokens, null, 0);
    Method startMethod = BookishParser.class.getMethod(startRule, (Class[]) null);
    ParseTree doctree = (ParseTree) startMethod.invoke(parser, (Object[]) null);
    // get single chapter
    OutputModelObject omo = trans.visit(doctree);
    ModelConverter converter = new ModelConverter(trans.templates);
    ST outputST = converter.walk(omo);
    return outputST.render();
}
Also used : BookishLexer(us.parr.bookish.parse.BookishLexer) CommonTokenStream(org.antlr.v4.runtime.CommonTokenStream) ST(org.stringtemplate.v4.ST) OutputModelObject(us.parr.bookish.model.OutputModelObject) Method(java.lang.reflect.Method) CharStream(org.antlr.v4.runtime.CharStream) ParseTree(org.antlr.v4.runtime.tree.ParseTree) ModelConverter(us.parr.bookish.translate.ModelConverter) BookishParser(us.parr.bookish.parse.BookishParser)

Aggregations

ST (org.stringtemplate.v4.ST)3 BookishParser (us.parr.bookish.parse.BookishParser)3 ModelConverter (us.parr.bookish.translate.ModelConverter)3 Document (us.parr.bookish.model.Document)2 File (java.io.File)1 FileReader (java.io.FileReader)1 Method (java.lang.reflect.Method)1 ArrayList (java.util.ArrayList)1 HashMap (java.util.HashMap)1 List (java.util.List)1 Map (java.util.Map)1 JsonArray (javax.json.JsonArray)1 JsonObject (javax.json.JsonObject)1 JsonReader (javax.json.JsonReader)1 JsonValue (javax.json.JsonValue)1 CharStream (org.antlr.v4.runtime.CharStream)1 CommonTokenStream (org.antlr.v4.runtime.CommonTokenStream)1 MultiMap (org.antlr.v4.runtime.misc.MultiMap)1 Pair (org.antlr.v4.runtime.misc.Pair)1 ParseTree (org.antlr.v4.runtime.tree.ParseTree)1