use of org.hl7.fhir.definitions.model.ImplementationGuideDefn in project kindling by HL7.
the class Publisher method produceIgPage.
private void produceIgPage(ImplementationGuideDefn ig, ImplementationGuideDefinitionPageComponent p) throws Exception {
String actualName = Utilities.path(page.getFolders().rootDir, Utilities.getDirectoryForFile(ig.getSource()), p.getNameUrlType().getValue());
String logicalName = Utilities.fileTitle(actualName);
String src;
if (IgParser.getKind(p) == GuidePageKind.TOC)
src = TextFile.fileToString(Utilities.path(page.getFolders().templateDir, "template-ig-toc.html"));
else
throw new Exception("Unsupported special page kind " + IgParser.getKind(p).toCode());
String file = ig.getCode() + File.separator + logicalName + ".html";
src = page.processPageIncludes(file, src, "page", null, null, null, logicalName, ig, null, null);
// before we save this page out, we're going to figure out what it's index
// is, and number the headers if we can
src = addSectionNumbers(file, logicalName, src, null, 1, null, ig);
TextFile.stringToFile(src, Utilities.path(page.getFolders().dstDir, file));
src = TextFile.fileToString(Utilities.path(page.getFolders().dstDir, file)).replace("<body>", "<body style=\"margin: 10px\">");
src = page.processPageIncludesForBook(file, src, "page", null, ig, null);
cachePage(file, src, logicalName, true);
}
use of org.hl7.fhir.definitions.model.ImplementationGuideDefn in project kindling by HL7.
the class Publisher method produceSpecMap.
private void produceSpecMap() throws IOException {
SpecMapManager spm = new SpecMapManager("hl7.fhir.core", page.getVersion().toCode(), page.getVersion().toCode(), page.getBuildId(), page.getGenDate(), CANONICAL_BASE);
for (StructureDefinition sd : page.getWorkerContext().allStructures()) {
if (sd.hasUserData("path")) {
spm.path(sd.getUrl(), sd.getUserString("path").replace("\\", "/"));
spm.target(sd.getUserString("path").replace("\\", "/"));
}
}
for (StructureDefinition sd : page.getWorkerContext().getExtensionDefinitions()) {
if (sd.hasUserData("path")) {
spm.path(sd.getUrl(), sd.getUserString("path").replace("\\", "/"));
spm.target(sd.getUserString("path").replace("\\", "/"));
}
}
for (String s : page.getCodeSystems().keys()) {
CodeSystem cs = page.getCodeSystems().get(s);
if (cs == null && !Utilities.existsInList(s, "http://unitsofmeasure.org", "http://loinc.org", "http://fdasis.nlm.nih.gov", "http://www.nlm.nih.gov/research/umls/rxnorm", "urn:oid:1.2.36.1.2001.1005.17") && !SIDUtilities.isknownCodeSystem(s))
System.out.println("No code system resource found for " + s);
}
for (CodeSystem cs : page.getCodeSystems().getList()) {
if (cs != null && cs.hasUserData("path")) {
spm.path(cs.getUrl(), cs.getUserString("path").replace("\\", "/"));
spm.target(cs.getUserString("path").replace("\\", "/"));
}
}
for (ValueSet vs : page.getValueSets().getList()) {
if (vs.hasUserData("path")) {
spm.path(vs.getUrl(), vs.getUserString("path").replace("\\", "/"));
spm.target(vs.getUserString("path").replace("\\", "/"));
}
}
for (ConceptMap cm : page.getConceptMaps().getList()) {
if (cm.hasUserData("path")) {
spm.path(cm.getUrl(), cm.getUserString("path").replace("\\", "/"));
spm.target(cm.getUserString("path").replace("\\", "/"));
}
}
for (String s : page.getDefinitions().getPageTitles().keySet()) {
spm.page(s, page.getDefinitions().getPageTitles().get(s));
}
for (String n : page.getIni().getPropertyNames("pages")) {
spm.target(n);
}
for (ResourceDefn rd : page.getDefinitions().getResources().values()) {
spm.target(rd.getName().toLowerCase() + ".html");
spm.target(rd.getName().toLowerCase() + "-definitions.html");
spm.target(rd.getName().toLowerCase() + "-mappings.html");
spm.target(rd.getName().toLowerCase() + "-examples.html");
spm.target(rd.getName().toLowerCase() + "-profiles.html");
if (!rd.getOperations().isEmpty())
spm.target(rd.getName().toLowerCase() + "-operations.html");
for (Example ex : rd.getExamples()) {
ImplementationGuideDefn ig = ex.getIg() == null ? null : page.getDefinitions().getIgs().get(ex.getIg());
String prefix = (ig == null || ig.isCore()) ? "" : ig.getCode() + "/";
spm.target(prefix + ex.getTitle() + ".html");
}
}
for (Profile p : page.getDefinitions().getPackList()) {
spm.target(p.getId() + ".html");
}
// for (String url : page.getDefinitions().getMapTypes().keySet()) {
// spm.map(url, page.getDefinitions().getMapTypes().get(url).getPreamble());
// }
scanForImages(spm, page.getFolders().dstDir, page.getFolders().dstDir);
scanForPages(spm, page.getFolders().dstDir, page.getFolders().dstDir);
for (String url : page.getDefinitions().getRedirectList().keySet()) {
// http://hl7.org/fhir/ = 20 chars
spm.target(url.substring(20));
}
spm.save(page.getFolders().dstDir + "spec.internals");
}
use of org.hl7.fhir.definitions.model.ImplementationGuideDefn in project kindling by HL7.
the class Publisher method produceProfile.
private void produceProfile(ResourceDefn resource, Profile pack, ConstraintStructure profile, SectionTracker st, String intro, String notes, String prefix, ImplementationGuideDefn ig) throws Exception {
File tmp = Utilities.createTempFile("tmp", ".tmp");
String title = profile.getId();
int level = (ig == null || ig.isCore()) ? 0 : 1;
// you have to validate a profile, because it has to be merged with it's
// base resource to fill out all the missing bits
// validateProfile(profile);
ByteArrayOutputStream bs = new ByteArrayOutputStream();
XmlSpecGenerator gen = new XmlSpecGenerator(bs, title + "-definitions.html", "", page, ig.isCore() ? "" : "../");
gen.generate(profile.getResource());
gen.close();
String xml = new String(bs.toByteArray());
bs = new ByteArrayOutputStream();
JsonSpecGenerator genJ = new JsonSpecGenerator(bs, title + "-definitions.html", "", page, ig.isCore() ? "" : "../", page.getVersion().toCode());
genJ.generate(profile.getResource());
genJ.close();
String json = new String(bs.toByteArray());
XmlParser comp = new XmlParser();
FileOutputStream s = new FileOutputStream(page.getFolders().dstDir + prefix + title + ".profile.xml");
comp.setOutputStyle(OutputStyle.PRETTY).compose(s, profile.getResource());
s.close();
Utilities.copyFile(new CSFile(page.getFolders().dstDir + prefix + title + ".profile.xml"), new CSFile(page.getFolders().dstDir + "examples" + File.separator + title + ".profile.xml"));
JsonParser jcomp = new JsonParser();
s = new FileOutputStream(page.getFolders().dstDir + prefix + title + ".profile.json");
jcomp.setOutputStyle(OutputStyle.PRETTY).compose(s, profile.getResource());
s.close();
// String shex = new ShExGenerator(page.getWorkerContext()).generate(HTMLLinkPolicy.NONE, profile.getResource());
// TextFile.stringToFile(shex, Utilities.changeFileExt(page.getFolders().dstDir + prefix +title + ".profile.shex", ".shex"));
// shexToXhtml(prefix +title + ".profile", "ShEx statement for " + prefix +title, shex, "profile-instance:type:" + title, "Type");
TerminologyNotesGenerator tgen = new TerminologyNotesGenerator(new FileOutputStream(tmp), page);
tgen.generate(level == 0 ? "" : "../", profile);
tgen.close();
String tx = TextFile.fileToString(tmp.getAbsolutePath());
String src = TextFile.fileToString(page.getFolders().templateDir + "template-profile.html");
src = page.processProfileIncludes(profile.getId(), profile.getId(), pack, profile, xml, json, tx, src, title + ".html", (resource == null ? profile.getResource().getType() : resource.getName()) + "/" + pack.getId() + "/" + profile.getId(), intro, notes, ig, false, false);
if (st != null)
src = insertSectionNumbers(src, st, title + ".html", level, null);
else if (ig != null && !ig.isCore()) {
src = addSectionNumbers(title + ".html", title, src, null, 1, null, ig);
st = page.getSectionTrackerCache().get(ig.getCode() + "::" + title);
}
page.getHTMLChecker().registerFile(prefix + title + ".html", "StructureDefinition " + profile.getResource().getName(), HTMLLinkChecker.XHTML_TYPE, false);
TextFile.stringToFile(src, page.getFolders().dstDir + prefix + title + ".html");
new ProfileUtilities(page.getWorkerContext(), page.getValidationErrors(), page).generateSchematrons(new FileOutputStream(page.getFolders().dstDir + prefix + title + ".sch"), profile.getResource());
if (pack.getExamples().size() > 0) {
src = TextFile.fileToString(page.getFolders().templateDir + "template-profile-examples.html");
src = page.processProfileIncludes(profile.getId(), profile.getId(), pack, profile, xml, json, tx, src, title + ".html", (resource == null ? profile.getResource().getType() : resource.getName()) + "/" + pack.getId() + "/" + profile.getId(), intro, notes, ig, false, false);
page.getHTMLChecker().registerFile(prefix + title + "-examples.html", "Examples for StructureDefinition " + profile.getResource().getName(), HTMLLinkChecker.XHTML_TYPE, true);
TextFile.stringToFile(src, page.getFolders().dstDir + prefix + title + "-examples.html");
}
src = TextFile.fileToString(page.getFolders().templateDir + "template-profile-definitions.html");
src = page.processProfileIncludes(profile.getId(), profile.getId(), pack, profile, xml, json, tx, src, title + ".html", (resource == null ? profile.getResource().getType() : resource.getName()) + "/" + pack.getId() + "/" + profile.getId(), intro, notes, ig, false, false);
if (st != null)
src = insertSectionNumbers(src, st, title + "-definitions.html", level, null);
page.getHTMLChecker().registerFile(prefix + title + "-definitions.html", "Definitions for StructureDefinition " + profile.getResource().getName(), HTMLLinkChecker.XHTML_TYPE, true);
TextFile.stringToFile(src, page.getFolders().dstDir + prefix + title + "-definitions.html");
src = TextFile.fileToString(page.getFolders().templateDir + "template-profile-mappings.html");
src = page.processProfileIncludes(profile.getId(), profile.getId(), pack, profile, xml, json, tx, src, title + ".html", (resource == null ? profile.getResource().getType() : resource.getName()) + "/" + pack.getId() + "/" + profile.getId(), intro, notes, ig, false, false);
if (st != null)
src = insertSectionNumbers(src, st, title + "-mappings.html", level, null);
page.getHTMLChecker().registerFile(prefix + title + "-mappings.html", "Mappings for StructureDefinition " + profile.getResource().getName(), HTMLLinkChecker.XHTML_TYPE, true);
TextFile.stringToFile(src, page.getFolders().dstDir + prefix + title + "-mappings.html");
try {
processQuestionnaire(resource, profile.getResource(), st, false, prefix, ig);
} catch (Exception e) {
e.printStackTrace();
page.log("Questionnaire Generation Failed: " + e.getMessage(), LogMessageType.Error);
}
new ReviewSpreadsheetGenerator().generate(page.getFolders().dstDir + prefix + Utilities.changeFileExt((String) profile.getResource().getUserData("filename"), "-review.xls"), "Health Level Seven International", page.getGenDate(), profile.getResource(), page);
// xml to xhtml of xml
// first pass is to strip the xsi: stuff. seems to need double
// processing in order to delete namespace crap
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setNamespaceAware(true);
DocumentBuilder builder = factory.newDocumentBuilder();
Document xdoc = builder.parse(new CSFileInputStream(page.getFolders().dstDir + prefix + title + ".profile.xml"));
XmlGenerator xmlgen = new XmlGenerator();
xmlgen.generate(xdoc.getDocumentElement(), tmp, "http://hl7.org/fhir", xdoc.getDocumentElement().getLocalName());
// reload it now
builder = factory.newDocumentBuilder();
xdoc = builder.parse(new CSFileInputStream(tmp.getAbsolutePath()));
XhtmlGenerator xhtml = new XhtmlGenerator(new ExampleAdorner(page.getDefinitions(), page.genlevel(level)));
ByteArrayOutputStream b = new ByteArrayOutputStream();
xhtml.generate(xdoc, b, "StructureDefinition", profile.getTitle(), 0, true, title + ".profile.xml.html");
String html = TextFile.fileToString(page.getFolders().templateDir + "template-profile-example-xml.html").replace("<%example%>", b.toString());
html = page.processProfileIncludes(title + ".profile.xml.html", profile.getId(), pack, profile, "", "", "", html, title + ".html", (resource == null ? profile.getResource().getType() : resource.getName()) + "/" + pack.getId() + "/" + profile.getId(), intro, notes, ig, false, hasNarrative(xdoc));
TextFile.stringToFile(html, page.getFolders().dstDir + prefix + title + ".profile.xml.html");
page.getHTMLChecker().registerFile(prefix + title + ".profile.xml.html", "StructureDefinition", HTMLLinkChecker.XHTML_TYPE, false);
String n = prefix + title + ".profile";
json = resource2Json(profile.getResource());
json = "<div class=\"example\">\r\n<p>" + Utilities.escapeXml("StructureDefinition for " + profile.getResource().getDescription()) + "</p>\r\n<p><a href=\"" + title + ".profile.json\">Raw JSON</a></p>\r\n<pre class=\"json\">\r\n" + Utilities.escapeXml(json) + "\r\n</pre>\r\n</div>\r\n";
html = TextFile.fileToString(page.getFolders().templateDir + "template-profile-example-json.html").replace("<%example%>", json);
html = page.processProfileIncludes(title + ".profile.json.html", profile.getId(), pack, profile, "", "", "", html, title + ".html", (resource == null ? profile.getResource().getType() : resource.getName()) + "/" + pack.getId() + "/" + profile.getId(), intro, notes, ig, false, false);
TextFile.stringToFile(html, page.getFolders().dstDir + prefix + title + ".profile.json.html");
// page.getEpub().registerFile(n + ".json.html", description, EPubManager.XHTML_TYPE);
page.getHTMLChecker().registerExternal(n + ".json.html");
tmp.delete();
}
use of org.hl7.fhir.definitions.model.ImplementationGuideDefn in project kindling by HL7.
the class Publisher method generateCodeSystemPart2.
private void generateCodeSystemPart2(CodeSystem cs) throws Exception {
String n = cs.getUserString("filename");
if (n == null)
n = "codesystem-" + cs.getId();
ImplementationGuideDefn ig = (ImplementationGuideDefn) cs.getUserData(ToolResourceUtilities.NAME_RES_IG);
if (ig != null)
n = ig.getCode() + File.separator + n;
if (cs.getText().getDiv().allChildrenAreText() && (Utilities.noString(cs.getText().getDiv().allText()) || !cs.getText().getDiv().allText().matches(".*\\w.*"))) {
RenderingContext lrc = page.getRc().copy().setLocalPrefix(ig != null ? "../" : "").setTooCostlyNoteEmpty(PageProcessor.TOO_MANY_CODES_TEXT_EMPTY).setTooCostlyNoteNotEmpty(PageProcessor.TOO_MANY_CODES_TEXT_NOT_EMPTY);
RendererFactory.factory(cs, lrc).render(cs);
}
page.getVsValidator().validate(page.getValidationErrors(), n, cs, true, false);
if (isGenerate) {
// page.log(" ... "+n, LogMessageType.Process);
addToResourceFeed(cs, valueSetsFeed, null);
if (cs.getUserData("path") == null)
cs.setUserData("path", n + ".html");
page.setId(cs.getId());
String sf;
WorkGroup wg = wg(cs, "vocab");
try {
sf = page.processPageIncludes(n + ".html", TextFile.fileToString(page.getFolders().templateDir + "template-cs.html"), "codeSystem", null, n + ".html", cs, null, "Value Set", ig, null, wg);
} catch (Exception e) {
throw new Exception("Error processing " + n + ".html: " + e.getMessage(), e);
}
sf = addSectionNumbers(n + ".html", "template-codesystem", sf, csCounter(), ig == null ? 0 : 1, null, ig);
TextFile.stringToFile(sf, page.getFolders().dstDir + n + ".html");
try {
String src = page.processPageIncludesForBook(n + ".html", TextFile.fileToString(page.getFolders().templateDir + "template-cs-book.html"), "codeSystem", cs, ig, null);
cachePage(n + ".html", src, "Code System " + n, false);
page.setId(null);
} catch (Exception e) {
throw new Exception("Error processing " + n + ".html: " + e.getMessage(), e);
}
IParser json = new JsonParser().setOutputStyle(OutputStyle.PRETTY);
FileOutputStream s = new FileOutputStream(page.getFolders().dstDir + n + ".json");
json.compose(s, cs);
s.close();
json = new JsonParser().setOutputStyle(OutputStyle.CANONICAL);
s = new FileOutputStream(page.getFolders().dstDir + n + ".canonical.json");
json.compose(s, cs);
s.close();
IParser xml = new XmlParser().setOutputStyle(OutputStyle.PRETTY);
s = new FileOutputStream(page.getFolders().dstDir + n + ".xml");
xml.compose(s, cs);
s.close();
xml = new XmlParser().setOutputStyle(OutputStyle.CANONICAL);
s = new FileOutputStream(page.getFolders().dstDir + n + ".canonical.xml");
xml.compose(s, cs);
s.close();
// System.out.println(vs.getUrl());
cloneToXhtml(n, "Definition for Code System " + cs.getName(), false, "codesystem-instance", "Code System", null, wg);
jsonToXhtml(n, "Definition for Code System " + cs.getName(), resource2Json(cs), "codesystem-instance", "Code System", null, wg);
ttlToXhtml(n, "Definition for Code System " + cs.getName(), resource2Ttl(cs), "codesystem-instance", "Code System", null, wg);
}
}
use of org.hl7.fhir.definitions.model.ImplementationGuideDefn in project kindling by HL7.
the class Publisher method loadValueSets1.
private void loadValueSets1() throws Exception {
page.log(" ...vocab #1", LogMessageType.Process);
generateCodeSystemsPart1();
generateValueSetsPart1();
for (BindingSpecification cd : page.getDefinitions().getUnresolvedBindings()) {
String ref = cd.getReference();
if (ref.startsWith("http://hl7.org/fhir")) {
// we expect to be able to resolve this
ValueSet vs = page.getDefinitions().getValuesets().get(ref);
if (vs == null)
vs = page.getDefinitions().getExtraValuesets().get(ref);
if (vs == null)
vs = page.getWorkerContext().fetchResource(ValueSet.class, ref);
if (vs == null) {
if (page.getDefinitions().getBoundValueSets().containsKey(ref))
throw new Exception("Unable to resolve the value set reference " + ref + " but found it in load list");
throw new Exception("Unable to resolve the value set reference " + ref);
}
cd.setValueSet(vs);
} else {
ValueSet vs = page.getWorkerContext().fetchResource(ValueSet.class, ref);
if (vs != null)
cd.setValueSet(vs);
else if (!ref.startsWith("http://loinc.org/vs/LL"))
System.out.println("Unresolved value set reference: " + ref);
}
}
for (ImplementationGuideDefn ig : page.getDefinitions().getSortedIgs()) {
for (BindingSpecification cd : ig.getUnresolvedBindings()) {
String ref = cd.getReference();
if (ref.contains("|"))
ref = ref.substring(0, ref.indexOf("|"));
ValueSet vs = page.getDefinitions().getValuesets().get(ref);
if (vs == null)
vs = ig.getValueSet(ref);
if (vs == null)
vs = page.getWorkerContext().fetchResource(ValueSet.class, ref);
if (vs == null)
throw new Exception("unable to resolve value set " + ref);
cd.setValueSet(vs);
}
}
}
Aggregations