use of org.hl7.fhir.definitions.model.Definitions in project kindling by HL7.
the class Publisher method produceProfile.
private void produceProfile(ResourceDefn resource, Profile pack, ConstraintStructure profile, SectionTracker st, String intro, String notes, String prefix, ImplementationGuideDefn ig) throws Exception {
File tmp = Utilities.createTempFile("tmp", ".tmp");
String title = profile.getId();
int level = (ig == null || ig.isCore()) ? 0 : 1;
// you have to validate a profile, because it has to be merged with it's
// base resource to fill out all the missing bits
// validateProfile(profile);
ByteArrayOutputStream bs = new ByteArrayOutputStream();
XmlSpecGenerator gen = new XmlSpecGenerator(bs, title + "-definitions.html", "", page, ig.isCore() ? "" : "../");
gen.generate(profile.getResource());
gen.close();
String xml = new String(bs.toByteArray());
bs = new ByteArrayOutputStream();
JsonSpecGenerator genJ = new JsonSpecGenerator(bs, title + "-definitions.html", "", page, ig.isCore() ? "" : "../", page.getVersion().toCode());
genJ.generate(profile.getResource());
genJ.close();
String json = new String(bs.toByteArray());
XmlParser comp = new XmlParser();
FileOutputStream s = new FileOutputStream(page.getFolders().dstDir + prefix + title + ".profile.xml");
comp.setOutputStyle(OutputStyle.PRETTY).compose(s, profile.getResource());
s.close();
Utilities.copyFile(new CSFile(page.getFolders().dstDir + prefix + title + ".profile.xml"), new CSFile(page.getFolders().dstDir + "examples" + File.separator + title + ".profile.xml"));
JsonParser jcomp = new JsonParser();
s = new FileOutputStream(page.getFolders().dstDir + prefix + title + ".profile.json");
jcomp.setOutputStyle(OutputStyle.PRETTY).compose(s, profile.getResource());
s.close();
// String shex = new ShExGenerator(page.getWorkerContext()).generate(HTMLLinkPolicy.NONE, profile.getResource());
// TextFile.stringToFile(shex, Utilities.changeFileExt(page.getFolders().dstDir + prefix +title + ".profile.shex", ".shex"));
// shexToXhtml(prefix +title + ".profile", "ShEx statement for " + prefix +title, shex, "profile-instance:type:" + title, "Type");
TerminologyNotesGenerator tgen = new TerminologyNotesGenerator(new FileOutputStream(tmp), page);
tgen.generate(level == 0 ? "" : "../", profile);
tgen.close();
String tx = TextFile.fileToString(tmp.getAbsolutePath());
String src = TextFile.fileToString(page.getFolders().templateDir + "template-profile.html");
src = page.processProfileIncludes(profile.getId(), profile.getId(), pack, profile, xml, json, tx, src, title + ".html", (resource == null ? profile.getResource().getType() : resource.getName()) + "/" + pack.getId() + "/" + profile.getId(), intro, notes, ig, false, false);
if (st != null)
src = insertSectionNumbers(src, st, title + ".html", level, null);
else if (ig != null && !ig.isCore()) {
src = addSectionNumbers(title + ".html", title, src, null, 1, null, ig);
st = page.getSectionTrackerCache().get(ig.getCode() + "::" + title);
}
page.getHTMLChecker().registerFile(prefix + title + ".html", "StructureDefinition " + profile.getResource().getName(), HTMLLinkChecker.XHTML_TYPE, false);
TextFile.stringToFile(src, page.getFolders().dstDir + prefix + title + ".html");
new ProfileUtilities(page.getWorkerContext(), page.getValidationErrors(), page).generateSchematrons(new FileOutputStream(page.getFolders().dstDir + prefix + title + ".sch"), profile.getResource());
if (pack.getExamples().size() > 0) {
src = TextFile.fileToString(page.getFolders().templateDir + "template-profile-examples.html");
src = page.processProfileIncludes(profile.getId(), profile.getId(), pack, profile, xml, json, tx, src, title + ".html", (resource == null ? profile.getResource().getType() : resource.getName()) + "/" + pack.getId() + "/" + profile.getId(), intro, notes, ig, false, false);
page.getHTMLChecker().registerFile(prefix + title + "-examples.html", "Examples for StructureDefinition " + profile.getResource().getName(), HTMLLinkChecker.XHTML_TYPE, true);
TextFile.stringToFile(src, page.getFolders().dstDir + prefix + title + "-examples.html");
}
src = TextFile.fileToString(page.getFolders().templateDir + "template-profile-definitions.html");
src = page.processProfileIncludes(profile.getId(), profile.getId(), pack, profile, xml, json, tx, src, title + ".html", (resource == null ? profile.getResource().getType() : resource.getName()) + "/" + pack.getId() + "/" + profile.getId(), intro, notes, ig, false, false);
if (st != null)
src = insertSectionNumbers(src, st, title + "-definitions.html", level, null);
page.getHTMLChecker().registerFile(prefix + title + "-definitions.html", "Definitions for StructureDefinition " + profile.getResource().getName(), HTMLLinkChecker.XHTML_TYPE, true);
TextFile.stringToFile(src, page.getFolders().dstDir + prefix + title + "-definitions.html");
src = TextFile.fileToString(page.getFolders().templateDir + "template-profile-mappings.html");
src = page.processProfileIncludes(profile.getId(), profile.getId(), pack, profile, xml, json, tx, src, title + ".html", (resource == null ? profile.getResource().getType() : resource.getName()) + "/" + pack.getId() + "/" + profile.getId(), intro, notes, ig, false, false);
if (st != null)
src = insertSectionNumbers(src, st, title + "-mappings.html", level, null);
page.getHTMLChecker().registerFile(prefix + title + "-mappings.html", "Mappings for StructureDefinition " + profile.getResource().getName(), HTMLLinkChecker.XHTML_TYPE, true);
TextFile.stringToFile(src, page.getFolders().dstDir + prefix + title + "-mappings.html");
try {
processQuestionnaire(resource, profile.getResource(), st, false, prefix, ig);
} catch (Exception e) {
e.printStackTrace();
page.log("Questionnaire Generation Failed: " + e.getMessage(), LogMessageType.Error);
}
new ReviewSpreadsheetGenerator().generate(page.getFolders().dstDir + prefix + Utilities.changeFileExt((String) profile.getResource().getUserData("filename"), "-review.xls"), "Health Level Seven International", page.getGenDate(), profile.getResource(), page);
// xml to xhtml of xml
// first pass is to strip the xsi: stuff. seems to need double
// processing in order to delete namespace crap
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setNamespaceAware(true);
DocumentBuilder builder = factory.newDocumentBuilder();
Document xdoc = builder.parse(new CSFileInputStream(page.getFolders().dstDir + prefix + title + ".profile.xml"));
XmlGenerator xmlgen = new XmlGenerator();
xmlgen.generate(xdoc.getDocumentElement(), tmp, "http://hl7.org/fhir", xdoc.getDocumentElement().getLocalName());
// reload it now
builder = factory.newDocumentBuilder();
xdoc = builder.parse(new CSFileInputStream(tmp.getAbsolutePath()));
XhtmlGenerator xhtml = new XhtmlGenerator(new ExampleAdorner(page.getDefinitions(), page.genlevel(level)));
ByteArrayOutputStream b = new ByteArrayOutputStream();
xhtml.generate(xdoc, b, "StructureDefinition", profile.getTitle(), 0, true, title + ".profile.xml.html");
String html = TextFile.fileToString(page.getFolders().templateDir + "template-profile-example-xml.html").replace("<%example%>", b.toString());
html = page.processProfileIncludes(title + ".profile.xml.html", profile.getId(), pack, profile, "", "", "", html, title + ".html", (resource == null ? profile.getResource().getType() : resource.getName()) + "/" + pack.getId() + "/" + profile.getId(), intro, notes, ig, false, hasNarrative(xdoc));
TextFile.stringToFile(html, page.getFolders().dstDir + prefix + title + ".profile.xml.html");
page.getHTMLChecker().registerFile(prefix + title + ".profile.xml.html", "StructureDefinition", HTMLLinkChecker.XHTML_TYPE, false);
String n = prefix + title + ".profile";
json = resource2Json(profile.getResource());
json = "<div class=\"example\">\r\n<p>" + Utilities.escapeXml("StructureDefinition for " + profile.getResource().getDescription()) + "</p>\r\n<p><a href=\"" + title + ".profile.json\">Raw JSON</a></p>\r\n<pre class=\"json\">\r\n" + Utilities.escapeXml(json) + "\r\n</pre>\r\n</div>\r\n";
html = TextFile.fileToString(page.getFolders().templateDir + "template-profile-example-json.html").replace("<%example%>", json);
html = page.processProfileIncludes(title + ".profile.json.html", profile.getId(), pack, profile, "", "", "", html, title + ".html", (resource == null ? profile.getResource().getType() : resource.getName()) + "/" + pack.getId() + "/" + profile.getId(), intro, notes, ig, false, false);
TextFile.stringToFile(html, page.getFolders().dstDir + prefix + title + ".profile.json.html");
// page.getEpub().registerFile(n + ".json.html", description, EPubManager.XHTML_TYPE);
page.getHTMLChecker().registerExternal(n + ".json.html");
tmp.delete();
}
use of org.hl7.fhir.definitions.model.Definitions in project kindling by HL7.
the class Publisher method checkBundleURLs.
/**
* This is not true of bundles generally, but it is true of all the
* conformance bundles produced by the spec:
*
* all entries must have a fullUrl, and it must equal http://hl7.org/fhir/[type]/[id]
*
* @param bnd - the bundle to check
*/
private void checkBundleURLs(Bundle bnd) {
int i = 0;
for (BundleEntryComponent e : bnd.getEntry()) {
i++;
if (!e.getResource().hasUserData("external.url")) {
if (!e.hasFullUrl())
page.getValidationErrors().add(new ValidationMessage(Source.Publisher, IssueType.INVALID, -1, -1, "Bundle " + bnd.getId(), "no Full URL on entry " + Integer.toString(i), IssueSeverity.ERROR));
else if (!e.getFullUrl().endsWith("/" + e.getResource().getResourceType().toString() + "/" + e.getResource().getId()) && e.getResource().getResourceType() != ResourceType.CodeSystem)
page.getValidationErrors().add(new ValidationMessage(Source.Publisher, IssueType.INVALID, -1, -1, "Bundle " + bnd.getId(), "URL doesn't match resource and id on entry " + Integer.toString(i) + " : " + e.getFullUrl() + " should end with /" + e.getResource().getResourceType().toString() + "/" + e.getResource().getId(), IssueSeverity.ERROR));
else if (!e.getFullUrl().equals("http://hl7.org/fhir/" + e.getResource().getResourceType().toString() + "/" + e.getResource().getId()) && e.getResource().getResourceType() != ResourceType.CodeSystem)
page.getValidationErrors().add(new ValidationMessage(Source.Publisher, IssueType.INVALID, -1, -1, "Bundle " + bnd.getId(), "URL is non-FHIR " + Integer.toString(i) + " : " + e.getFullUrl() + " should start with http://hl7.org/fhir/ for HL7-defined artifacts", IssueSeverity.WARNING));
if (e.getResource() instanceof CanonicalResource) {
CanonicalResource m = (CanonicalResource) e.getResource();
String url = m.getUrl();
if (url != null && url.startsWith("http://hl7.org/fhir") && !SIDUtilities.isKnownSID(url)) {
if (!page.getVersion().toCode().equals(m.getVersion()))
page.getValidationErrors().add(new ValidationMessage(Source.Publisher, IssueType.INVALID, -1, -1, "Bundle " + bnd.getId(), "definitions in FHIR space should have the correct version (url = " + url + ", version = " + m.getVersion() + ")", IssueSeverity.ERROR));
}
}
}
}
}
use of org.hl7.fhir.definitions.model.Definitions in project kindling by HL7.
the class PageProcessor method cdHeader.
private String cdHeader(String mode) {
StringBuilder b = new StringBuilder();
b.append("<ul class=\"nav nav-tabs\">");
b.append(makeHeaderTab("Contact Detail", "contactdetail.html", mode == null || "base".equals(mode)));
b.append(makeHeaderTab("Examples", "contactdetail-examples.html", mode == null || "examples".equals(mode)));
b.append(makeHeaderTab("Detailed Descriptions", "contactdetail-definitions.html", mode == null || "definitions".equals(mode)));
b.append(makeHeaderTab("Mappings", "contactdetail-mappings.html", mode == null || "mappings".equals(mode)));
b.append("</ul>\r\n");
return b.toString();
}
use of org.hl7.fhir.definitions.model.Definitions in project kindling by HL7.
the class PageProcessor method genStatusCodes.
private String genStatusCodes() throws Exception {
StringBuilder b = new StringBuilder();
b.append("<table border=\"1\">\r\n");
int colcount = 0;
for (ArrayList<String> row : definitions.getStatusCodes().values()) {
int rc = 0;
for (int i = 0; i < row.size(); i++) if (!Utilities.noString(row.get(i)))
rc = i;
if (rc > colcount)
colcount = rc;
}
// b.append("<tr>");
// b.append("<td>Path</td>");
// for (int i = 0; i < colcount; i++)
// b.append("<td>c").append(Integer.toString(i + 1)).append("</td>");
// b.append("</tr>\r\n");
List<String> names = new ArrayList<String>();
for (String n : definitions.getStatusCodes().keySet()) names.add(n);
Collections.sort(names);
ArrayList<String> row = definitions.getStatusCodes().get("@code");
b.append("<tr>");
b.append("<td><b>code</b></td>");
for (int i = 0; i < colcount; i++) b.append("<td><b><a href=\"codesystem-resource-status.html#resource-status-" + row.get(i) + "\">").append(row.get(i)).append("</a></b></td>");
b.append("</tr>\r\n");
row = definitions.getStatusCodes().get("@codes");
b.append("<tr>");
b.append("<td><b>stated codes</b></td>");
for (int i = 0; i < colcount; i++) b.append("<td>").append(i < row.size() ? row.get(i) : "").append("</td>");
b.append("</tr>\r\n");
b.append("<tr>");
b.append("<td>actual codes</td>");
for (int i = 0; i < colcount; i++) {
Set<String> codeset = new HashSet<String>();
for (String n : names) {
if (!n.startsWith("@")) {
row = definitions.getStatusCodes().get(n);
String c = row.get(i);
if (!Utilities.noString(c)) {
codeset.add(c);
}
}
}
b.append("<td>").append(separated(codeset, ", ")).append("</td>");
}
b.append("</tr>\r\n");
row = definitions.getStatusCodes().get("@issues");
b.append("<tr>");
b.append("<td><b>Issues?</b></td>");
for (int i = 0; i < colcount; i++) {
String s = i < row.size() ? row.get(i) : "";
b.append("<td").append(Utilities.noString(s) ? "" : " style=\"background-color: #ffcccc\"").append(">").append(s).append("</td>");
}
b.append("</tr>\r\n");
for (String n : names) {
if (!n.startsWith("@")) {
b.append("<tr>");
ElementDefn ed = getElementDefn(n);
if (ed == null || !ed.isModifier())
b.append("<td>").append(linkToPath(n)).append("</td>");
else
b.append("<td><b>").append(linkToPath(n)).append("</b></td>");
row = definitions.getStatusCodes().get(n);
for (int i = 0; i < colcount; i++) b.append("<td>").append(i < row.size() ? row.get(i) : "").append("</td>");
b.append("</tr>\r\n");
}
}
b.append("</table>\r\n");
CodeSystem cs = getCodeSystems().get("http://hl7.org/fhir/resource-status");
row = definitions.getStatusCodes().get("@code");
for (int i = 0; i < colcount; i++) {
String code = row.get(i);
String definition = CodeSystemUtilities.getCodeDefinition(cs, code);
Set<String> dset = new HashSet<String>();
for (String n : names) {
if (!n.startsWith("@")) {
ArrayList<String> rowN = definitions.getStatusCodes().get(n);
String c = rowN.get(i);
String d = getDefinition(n, c);
if (!Utilities.noString(d))
dset.add(d);
}
}
b.append("<hr/>\r\n");
b.append("<h4>").append(code).append("</h4>\r\n");
b.append("<p>").append(Utilities.escapeXml(definition)).append("</p>\r\n");
b.append("<p>Definitions for matching codes:</p>\r\n");
b.append("<ul>\r\n");
for (String s : sorted(dset)) b.append("<li>").append(Utilities.escapeXml(s)).append("</li>\r\n");
b.append("</ul>\r\n");
}
return b.toString();
}
use of org.hl7.fhir.definitions.model.Definitions in project kindling by HL7.
the class PageProcessor method mappingsExtension.
private String mappingsExtension(StructureDefinition ed) throws IOException {
MappingsGenerator m = new MappingsGenerator(definitions);
m.generate(ed);
return m.getMappings();
}
Aggregations