use of org.hl7.fhir.definitions.model.WorkGroup in project kindling by HL7.
the class Publisher method generateCodeSystemPart2.
private void generateCodeSystemPart2(CodeSystem cs) throws Exception {
String n = cs.getUserString("filename");
if (n == null)
n = "codesystem-" + cs.getId();
ImplementationGuideDefn ig = (ImplementationGuideDefn) cs.getUserData(ToolResourceUtilities.NAME_RES_IG);
if (ig != null)
n = ig.getCode() + File.separator + n;
if (cs.getText().getDiv().allChildrenAreText() && (Utilities.noString(cs.getText().getDiv().allText()) || !cs.getText().getDiv().allText().matches(".*\\w.*"))) {
RenderingContext lrc = page.getRc().copy().setLocalPrefix(ig != null ? "../" : "").setTooCostlyNoteEmpty(PageProcessor.TOO_MANY_CODES_TEXT_EMPTY).setTooCostlyNoteNotEmpty(PageProcessor.TOO_MANY_CODES_TEXT_NOT_EMPTY);
RendererFactory.factory(cs, lrc).render(cs);
}
page.getVsValidator().validate(page.getValidationErrors(), n, cs, true, false);
if (isGenerate) {
// page.log(" ... "+n, LogMessageType.Process);
addToResourceFeed(cs, valueSetsFeed, null);
if (cs.getUserData("path") == null)
cs.setUserData("path", n + ".html");
page.setId(cs.getId());
String sf;
WorkGroup wg = wg(cs, "vocab");
try {
sf = page.processPageIncludes(n + ".html", TextFile.fileToString(page.getFolders().templateDir + "template-cs.html"), "codeSystem", null, n + ".html", cs, null, "Value Set", ig, null, wg);
} catch (Exception e) {
throw new Exception("Error processing " + n + ".html: " + e.getMessage(), e);
}
sf = addSectionNumbers(n + ".html", "template-codesystem", sf, csCounter(), ig == null ? 0 : 1, null, ig);
TextFile.stringToFile(sf, page.getFolders().dstDir + n + ".html");
try {
String src = page.processPageIncludesForBook(n + ".html", TextFile.fileToString(page.getFolders().templateDir + "template-cs-book.html"), "codeSystem", cs, ig, null);
cachePage(n + ".html", src, "Code System " + n, false);
page.setId(null);
} catch (Exception e) {
throw new Exception("Error processing " + n + ".html: " + e.getMessage(), e);
}
IParser json = new JsonParser().setOutputStyle(OutputStyle.PRETTY);
FileOutputStream s = new FileOutputStream(page.getFolders().dstDir + n + ".json");
json.compose(s, cs);
s.close();
json = new JsonParser().setOutputStyle(OutputStyle.CANONICAL);
s = new FileOutputStream(page.getFolders().dstDir + n + ".canonical.json");
json.compose(s, cs);
s.close();
IParser xml = new XmlParser().setOutputStyle(OutputStyle.PRETTY);
s = new FileOutputStream(page.getFolders().dstDir + n + ".xml");
xml.compose(s, cs);
s.close();
xml = new XmlParser().setOutputStyle(OutputStyle.CANONICAL);
s = new FileOutputStream(page.getFolders().dstDir + n + ".canonical.xml");
xml.compose(s, cs);
s.close();
// System.out.println(vs.getUrl());
cloneToXhtml(n, "Definition for Code System " + cs.getName(), false, "codesystem-instance", "Code System", null, wg);
jsonToXhtml(n, "Definition for Code System " + cs.getName(), resource2Json(cs), "codesystem-instance", "Code System", null, wg);
ttlToXhtml(n, "Definition for Code System " + cs.getName(), resource2Ttl(cs), "codesystem-instance", "Code System", null, wg);
}
}
use of org.hl7.fhir.definitions.model.WorkGroup in project kindling by HL7.
the class Publisher method processWarnings.
private void processWarnings(boolean showOnlyErrors) throws Exception {
String xslt = Utilities.path(page.getFolders().rootDir, "implementations", "xmltools", "OwnerResources.xslt");
OutputStreamWriter s = new OutputStreamWriter(new FileOutputStream(page.getFolders().dstDir + "warnings.xml"), "UTF-8");
s.write("<warnings>");
for (WorkGroup wg : page.getDefinitions().getWorkgroups().values()) {
s.write("<wg code=\"" + wg.getCode() + "\" name=\"" + wg.getName() + "\" url=\"" + wg.getUrl() + "\"/>\r\n");
}
for (PageInformation pn : page.getDefinitions().getPageInfo().values()) {
s.write("<page name=\"" + pn.getName() + "\" wg=\"" + pn.getWgCode() + "\" fmm=\"" + pn.getFmm() + "\"/>\r\n");
}
try {
s.write(new String(XsltUtilities.saxonTransform(page.getFolders().dstDir + "profiles-resources.xml", xslt)));
s.write(new String(XsltUtilities.saxonTransform(page.getFolders().dstDir + "profiles-types.xml", xslt)));
s.write(new String(XsltUtilities.saxonTransform(page.getFolders().dstDir + "profiles-others.xml", xslt)));
} catch (Exception e) {
for (ValidationMessage err : page.getValidationErrors()) {
if (!page.getSuppressedMessages().contains(err.getDisplay()))
System.out.println(err.summary());
}
System.out.println("WARNING: Unable to create warnings file - one or more profiles-* files unavailable or invalid");
System.out.println("To determine the cause of the build failure, look in the log prior to the warning and information messages immediately above");
}
for (ValidationMessage e : page.getValidationErrors()) {
if (!page.getSuppressedMessages().contains(e.getDisplay()))
s.write(e.toXML());
}
s.write("</warnings>");
s.flush();
s.close();
String xslt2 = Utilities.path(page.getFolders().rootDir, "implementations", "xmltools", "CategorizeWarnings.xslt");
FileOutputStream s2 = new FileOutputStream(page.getFolders().dstDir + "work-group-warnings.xml");
try {
s2.write(XsltUtilities.saxonTransform(page.getFolders().dstDir + "warnings.xml", xslt2).getBytes("UTF8"));
} catch (Exception e) {
// nothing - do not want to know.
}
s2.flush();
s2.close();
String xslt3 = Utilities.path(page.getFolders().rootDir, "implementations", "xmltools", "RenderWarnings.xslt");
try {
String hw = XsltUtilities.saxonTransform(page.getFolders().dstDir + "work-group-warnings.xml", xslt3);
if (!showOnlyErrors)
page.log(hw, LogMessageType.Process);
} catch (Exception e) {
// nothing - do not want to know.
}
int i = 0;
int w = 0;
int ee = 0;
for (ValidationMessage e : page.getValidationErrors()) {
if (e.getLevel() == IssueSeverity.ERROR || e.getLevel() == IssueSeverity.FATAL) {
ee++;
page.log(e.summary(), LogMessageType.Hint);
} else if (e.getLevel() == IssueSeverity.WARNING) {
w++;
} else if (e.getLevel() == IssueSeverity.INFORMATION) {
i++;
}
}
page.getQa().setCounts(ee, w, i);
}
use of org.hl7.fhir.definitions.model.WorkGroup in project kindling by HL7.
the class PageProcessor method processResourceIncludes.
String processResourceIncludes(String name, ResourceDefn resource, String xml, String json, String ttl, String tx, String dict, String src, String mappings, String mappingsList, String type, String pagePath, ImplementationGuideDefn ig, Map<String, String> otherValues, WorkGroup wg, Map<String, String> examples) throws Exception {
String workingTitle = Utilities.escapeXml(resource.getName());
List<String> tabs = new ArrayList<String>();
int level = (ig == null || ig.isCore()) ? 0 : 1;
while (src.contains("<%") || src.contains("[%")) {
int i1 = src.indexOf("<%");
int i2 = src.indexOf("%>");
if (i1 == -1) {
i1 = src.indexOf("[%");
i2 = src.indexOf("%]");
}
String s1 = src.substring(0, i1);
String s2 = src.substring(i1 + 2, i2).trim();
String s3 = src.substring(i2 + 2);
String[] com = s2.split(" ");
String searchAdditions = "";
if (com[0].equals("resheader"))
src = s1 + resHeader(name, resource.getName(), com.length > 1 ? com[1] : null) + s3;
else if (com[0].equals("aresheader"))
src = s1 + abstractResHeader(name, resource.getName(), com.length > 1 ? com[1] : null) + s3;
else if (com[0].equals("lmheader"))
src = s1 + lmHeader(name, resource.getName(), com.length > 1 ? com[1] : null, false) + s3;
else if (com[0].equals("file")) {
if (new File(folders.templateDir + com[1] + ".html").exists()) {
src = s1 + TextFile.fileToString(folders.templateDir + com[1] + ".html") + s3;
} else {
src = s1 + TextFile.fileToString(folders.srcDir + com[1] + ".html") + s3;
}
} else if (com[0].equals("settitle")) {
workingTitle = s2.substring(9).replace("{", "<%").replace("}", "%>");
src = s1 + s3;
} else if (com[0].equals("complinks"))
src = s1 + getCompLinks(resource, com.length > 1 ? com[1] : null) + s3;
else if (com[0].equals("othertabs"))
src = s1 + genOtherTabs(com[1], tabs) + s3;
else if (com[0].equals("svg"))
src = s1 + new SvgGenerator(this, genlevel(level), resource.getLayout(), true, false, version).generate(resource, com[1]) + s3;
else if (com[0].equals("normative")) {
String np = null;
if (com[2].equals("%check") || com[2].equals("%check-op")) {
StandardsStatus st = resource.getStatus();
boolean mixed = false;
if (com[2].equals("%check-op") && st == StandardsStatus.NORMATIVE) {
for (Operation op : resource.getOperations()) {
if (op.getStandardsStatus() != null)
mixed = true;
}
}
if (st != null && (resource.getNormativePackage() != null || resource.getNormativeVersion() != null)) {
if (mixed)
np = getMixedNormativeNote(genlevel(level), resource.getNormativePackage(), com[1], workingTitle, name + ".html") + s3;
else
np = getNormativeNote(genlevel(level), resource.getNormativePackage(), com[1], workingTitle, name + ".html") + s3;
}
} else
np = getNormativeNote(genlevel(level), resource.getNormativePackage(), com[1], workingTitle, name + ".html");
if (np == null)
src = s1 + s3;
else
src = s1 + np + s3;
} else if (com.length != 1)
throw new Exception("Instruction <%" + s2 + "%> not understood parsing resource " + name);
else if (com[0].equals("pageheader"))
src = s1 + pageHeader(resource.getName()) + s3;
else if (com[0].equals("maponthispage"))
src = s1 + mapOnThisPage(mappingsList) + s3;
else if (com[0].equals("newheader"))
src = s1 + TextFile.fileToString(folders.srcDir + "newheader.html") + s3;
else if (com[0].equals("newheader1"))
src = s1 + TextFile.fileToString(folders.srcDir + "newheader1.html") + s3;
else if (com[0].equals("footer"))
src = s1 + TextFile.fileToString(folders.srcDir + "footer.html") + s3;
else if (com[0].equals("newfooter"))
src = s1 + TextFile.fileToString(folders.srcDir + "newfooter.html") + s3;
else if (com[0].equals("footer1"))
src = s1 + TextFile.fileToString(folders.srcDir + "footer1.html") + s3;
else if (com[0].equals("footer2"))
src = s1 + TextFile.fileToString(folders.srcDir + "footer2.html") + s3;
else if (com[0].equals("footer3"))
src = s1 + TextFile.fileToString(folders.srcDir + "footer3.html") + s3;
else if (com[0].equals("title"))
src = s1 + workingTitle + s3;
else if (com[0].equals("xtitle"))
src = s1 + Utilities.escapeXml(resource.getName()) + s3;
else if (com[0].equals("status"))
src = s1 + resource.getStatus() + s3;
else if (com[0].equals("draft-note"))
src = s1 + getDraftNote(resource) + s3;
else if (com[0].equals("introduction"))
src = s1 + loadXmlNotes(name, "introduction", true, resource.getRoot().getDefinition(), resource, tabs, null, wg) + s3;
else if (com[0].equals("notes"))
src = s1 + loadXmlNotes(name, "notes", false, null, resource, tabs, null, wg) + s3;
else if (com[0].equals("examples"))
src = s1 + produceExamples(resource) + s3;
else if (com[0].equals("profilelist"))
src = s1 + produceProfiles(resource) + s3;
else if (com[0].equals("extensionlist"))
src = s1 + produceExtensions(resource) + s3;
else if (com[0].equals("extensionreflist"))
src = s1 + produceRefExtensions(resource) + s3;
else if (com[0].equals("searchextensionlist"))
src = s1 + produceSearchExtensions(resource) + s3;
else if (com[0].equals("wg"))
src = s1 + (resource.getWg() == null ? "null" : resource.getWg().getUrl()) + s3;
else if (com[0].equals("wgt"))
src = s1 + (resource.getWg() == null ? "null" : resource.getWg().getName()) + s3;
else if (com[0].equals("fmm"))
if (resource.getNormativeVersion() != null)
src = s1 + "<a href=\"versions.html#maturity\">Maturity Level</a>: <a href=\"versions.html#std-process\">N</a>" + s3;
else
src = s1 + "<a href=\"versions.html#maturity\">Maturity Level</a>: " + resource.getFmmLevel() + "" + s3;
else if (com[0].equals("sec-cat"))
src = s1 + (resource.getSecurityCategorization() == null ? "" : "<a href=\"security.html#SecPrivConsiderations\">Security Category</a>: " + resource.getSecurityCategorization().toDisplay()) + s3;
else if (com[0].equals("sstatus"))
src = s1 + getStandardsStatus(resource.getName()) + s3;
else if (com[0].equals("example-list"))
src = s1 + produceExampleList(resource) + s3;
else if (com[0].equals("name"))
src = s1 + name + s3;
else if (com[0].equals("cname"))
src = s1 + resource.getName() + s3;
else if (com[0].equals("search-additions")) {
searchAdditions = s2.substring(16).trim();
src = s1 + s3;
} else if (com[0].equals("search"))
src = s1 + getSearch(resource, searchAdditions) + s3;
else if (com[0].equals("asearch"))
src = s1 + getAbstractSearch(resource, searchAdditions) + s3;
else if (com[0].equals("version"))
src = s1 + ini.getStringProperty("FHIR", "version") + s3;
else if (com[0].equals("gendate"))
src = s1 + Config.DATE_FORMAT().format(new Date()) + s3;
else if (com[0].equals("definition"))
src = s1 + processMarkdown("resource.definition", resource.getRoot().getDefinition(), "", true) + s3;
else if (com[0].equals("xml"))
src = s1 + xml + s3;
else if (com[0].equals("json"))
src = s1 + json + s3;
else if (com[0].equals("ttl"))
src = s1 + ttl + s3;
else if (com[0].equals("tx"))
src = s1 + tx + s3;
else if (com[0].equals("inv"))
src = s1 + genResourceConstraints(resource, genlevel(level)) + s3;
else if (com[0].equals("resource-table"))
src = s1 + genResourceTable(resource, genlevel(level)) + s3;
else if (com[0].equals("plural"))
src = s1 + Utilities.pluralizeMe(name) + s3;
else if (com[0].equals("dictionary"))
src = s1 + dict + s3;
else if (com[0].equals("mappings"))
src = s1 + mappings + s3;
else if (com[0].equals("mappingslist"))
src = s1 + mappingsList + s3;
else if (com[0].equals("breadcrumb"))
src = s1 + breadCrumbManager.make(name) + s3;
else if (com[0].equals("ext-link"))
src = s1 + getExtensionsLink(resource) + s3;
else if (com[0].equals("navlist"))
src = s1 + breadCrumbManager.navlist(name, genlevel(level)) + s3;
else if (com[0].equals("breadcrumblist"))
src = s1 + ((ig == null || ig.isCore()) ? breadCrumbManager.makelist(name, type, genlevel(level), workingTitle) : ig.makeList(name, type, genlevel(level), workingTitle)) + s3;
else if (com[0].equals("year"))
src = s1 + new SimpleDateFormat("yyyy").format(new Date()) + s3;
else if (com[0].equals("buildId"))
src = s1 + buildId + s3;
else if (com[0].equals("level"))
src = s1 + genlevel(level) + s3;
else if (com[0].equals("atitle"))
src = s1 + abstractResourceTitle(resource) + s3;
else if (com[0].equals("pub-type"))
src = s1 + publicationType + s3;
else if (com[0].equals("example-header"))
src = s1 + loadXmlNotesFromFile(Utilities.path(folders.srcDir, name.toLowerCase(), name + "-examples-header.xml"), false, null, resource, tabs, null, wg) + s3;
else if (com[0].equals("pub-notice"))
src = s1 + publicationNotice + s3;
else if (com[0].equals("resref"))
src = s1 + getReferences(resource.getName()) + s3;
else if (com[0].equals("pagepath"))
src = s1 + pagePath + s3;
else if (com[0].equals("rellink")) {
if (!pagePath.contains(".html"))
throw new Error("Invalid link: " + pagePath + " at " + workingTitle);
src = s1 + Utilities.URLEncode(pagePath) + s3;
} else if (com[0].equals("baseURL"))
src = s1 + Utilities.URLEncode(baseURL) + s3;
else if (com[0].equals("baseURLn"))
src = s1 + Utilities.appendForwardSlash(baseURL) + s3;
else if (com[0].equals("operations")) {
List<Operation> oplist = resource.getOperations();
String n = resource.getName();
String id = resource.getName().toLowerCase();
boolean mixed = false;
if (resource.getStatus() == StandardsStatus.NORMATIVE) {
for (Operation op : resource.getOperations()) {
if (op.getStandardsStatus() != null)
mixed = true;
}
}
src = s1 + genOperations(oplist, n, id, mixed, resource.getStatus(), "", resource.getNormativePackage()) + s3;
} else if (com[0].equals("operations-summary"))
src = s1 + genOperationsSummary(resource.getOperations(), resource) + s3;
else if (com[0].equals("opcount"))
src = s1 + genOpCount(resource.getOperations()) + s3;
else if (com[0].startsWith("!"))
src = s1 + s3;
else if (com[0].equals("search-footer"))
src = s1 + searchFooter(level) + s3;
else if (com[0].equals("pattern-title"))
src = s1 + resource.getName() + s3;
else if (com[0].equals("search-header"))
src = s1 + searchHeader(level) + s3;
else if (com[0].equals("diff-analysis"))
src = s1 + diffEngine.getDiffAsHtml(this, resource.getProfile()) + s3;
else if (com[0].equals("r3r4transforms"))
src = s1 + getR3r4transformNote(resource.getName()) + s3;
else if (com[0].equals("fmm-style"))
src = s1 + fmmBarColorStyle(resource) + s3;
else if (otherValues.containsKey(com[0]))
src = s1 + otherValues.get(com[0]) + s3;
else if (com[0].equals("lmimplementations"))
src = s1 + genImplementationList(resource) + s3;
else if (com[0].equals("json-schema"))
src = s1 + jsonSchema(resource.getName()) + s3;
else if (com[0].equals("dependency-graph"))
src = s1 + genDependencyGraph(resource, genlevel(level)) + s3;
else if (com[0].equals("logical-mappings"))
src = s1 + genLogicalMappings(resource, genlevel(level)) + s3;
else if (com[0].equals("no-extensions-base-warning"))
src = s1 + genNoExtensionsWarning(resource) + s3;
else if (com[0].equals("res-ext-link"))
src = s1 + genResExtLink(resource) + s3;
else if (com[0].equals("pattern-analysis"))
src = s1 + genLogicalAnalysis(resource, genlevel(level)) + s3;
else if (com[0].equals("resurl")) {
if (isAggregationEndpoint(resource.getName()))
src = s1 + s3;
else
src = s1 + "<p>The resource name as it appears in a RESTful URL is <a href=\"http.html#root\">[root]</a>/" + name + "/</p>" + s3;
} else if (com[0].equals("res-type-count")) {
src = s1 + definitions.getResources().size() + s3;
} else if (macros.containsKey(com[0])) {
src = s1 + macros.get(com[0]) + s3;
} else
throw new Exception("Instruction <%" + s2 + "%> not understood parsing resource " + name);
}
return src;
}
use of org.hl7.fhir.definitions.model.WorkGroup in project kindling by HL7.
the class Publisher method cloneToXhtml.
private void cloneToXhtml(String n, String description, boolean adorn, String pageType, String crumbTitle, ImplementationGuideDefn igd, ResourceDefn rd, WorkGroup wg) throws Exception {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setNamespaceAware(true);
DocumentBuilder builder = factory.newDocumentBuilder();
Document xdoc = builder.parse(new CSFileInputStream(new CSFile(page.getFolders().dstDir + n + ".xml")));
XhtmlGenerator xhtml = new XhtmlGenerator(new ExampleAdorner(page.getDefinitions(), page.genlevel(Utilities.charCount(n, File.separatorChar))));
ByteArrayOutputStream b = new ByteArrayOutputStream();
xhtml.generate(xdoc, b, n.toUpperCase().substring(0, 1) + n.substring(1), description, 0, adorn, n + ".xml.html");
String html = TextFile.fileToString(page.getFolders().templateDir + "template-example-xml.html").replace("<%example%>", b.toString());
html = page.processPageIncludes(n + ".xml.html", html, pageType, null, n + ".xml.html", null, null, crumbTitle, (adorn && hasNarrative(xdoc)) ? new Boolean(true) : null, igd, rd, wg);
TextFile.stringToFile(html, page.getFolders().dstDir + n + ".xml.html");
// page.getEpub().registerFile(n + ".xml.html", description, EPubManager.XHTML_TYPE);
page.getHTMLChecker().registerExternal(n + ".xml.html");
}
use of org.hl7.fhir.definitions.model.WorkGroup in project kindling by HL7.
the class PageProcessor method cloneToXhtml.
private void cloneToXhtml(String src, String dst, String name, String description, int level, boolean adorn, String pageType, String crumbTitle, ImplementationGuideDefn ig, ResourceDefn rd, WorkGroup wg) throws Exception {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setNamespaceAware(true);
DocumentBuilder builder = factory.newDocumentBuilder();
Document xdoc = builder.parse(new CSFileInputStream(new CSFile(src)));
// XhtmlGenerator xhtml = new XhtmlGenerator(null);
// xhtml.generate(xdoc, new CSFile(dst), name, description, level, adorn);
String n = new File(dst).getName();
n = n.substring(0, n.length() - 9);
XhtmlGenerator xhtml = new XhtmlGenerator(new ExampleAdorner(definitions, genlevel(level)));
ByteArrayOutputStream b = new ByteArrayOutputStream();
xhtml.generate(xdoc, b, name, description, level, adorn, n + ".xml.html");
String html = ("<%setlevel " + Integer.toString(level) + "%>" + TextFile.fileToString(folders.srcDir + "template-example-xml.html")).replace("<%example%>", b.toString());
html = processPageIncludes(n + ".xml.html", html, pageType, null, n + ".xml.html", null, null, crumbTitle, (adorn && hasNarrative(xdoc)) ? new Boolean(true) : null, ig, rd, wg);
TextFile.stringToFile(html, dst);
htmlchecker.registerExternal(dst);
}
Aggregations