use of org.hl7.fhir.r4b.model.StructureDefinition in project kindling by HL7.
the class Publisher method produceProfile.
private void produceProfile(ResourceDefn resource, Profile pack, ConstraintStructure profile, SectionTracker st, String intro, String notes, String prefix, ImplementationGuideDefn ig) throws Exception {
File tmp = Utilities.createTempFile("tmp", ".tmp");
String title = profile.getId();
int level = (ig == null || ig.isCore()) ? 0 : 1;
// you have to validate a profile, because it has to be merged with it's
// base resource to fill out all the missing bits
// validateProfile(profile);
ByteArrayOutputStream bs = new ByteArrayOutputStream();
XmlSpecGenerator gen = new XmlSpecGenerator(bs, title + "-definitions.html", "", page, ig.isCore() ? "" : "../");
gen.generate(profile.getResource());
gen.close();
String xml = new String(bs.toByteArray());
bs = new ByteArrayOutputStream();
JsonSpecGenerator genJ = new JsonSpecGenerator(bs, title + "-definitions.html", "", page, ig.isCore() ? "" : "../", page.getVersion().toCode());
genJ.generate(profile.getResource());
genJ.close();
String json = new String(bs.toByteArray());
XmlParser comp = new XmlParser();
FileOutputStream s = new FileOutputStream(page.getFolders().dstDir + prefix + title + ".profile.xml");
comp.setOutputStyle(OutputStyle.PRETTY).compose(s, profile.getResource());
s.close();
Utilities.copyFile(new CSFile(page.getFolders().dstDir + prefix + title + ".profile.xml"), new CSFile(page.getFolders().dstDir + "examples" + File.separator + title + ".profile.xml"));
JsonParser jcomp = new JsonParser();
s = new FileOutputStream(page.getFolders().dstDir + prefix + title + ".profile.json");
jcomp.setOutputStyle(OutputStyle.PRETTY).compose(s, profile.getResource());
s.close();
// String shex = new ShExGenerator(page.getWorkerContext()).generate(HTMLLinkPolicy.NONE, profile.getResource());
// TextFile.stringToFile(shex, Utilities.changeFileExt(page.getFolders().dstDir + prefix +title + ".profile.shex", ".shex"));
// shexToXhtml(prefix +title + ".profile", "ShEx statement for " + prefix +title, shex, "profile-instance:type:" + title, "Type");
TerminologyNotesGenerator tgen = new TerminologyNotesGenerator(new FileOutputStream(tmp), page);
tgen.generate(level == 0 ? "" : "../", profile);
tgen.close();
String tx = TextFile.fileToString(tmp.getAbsolutePath());
String src = TextFile.fileToString(page.getFolders().templateDir + "template-profile.html");
src = page.processProfileIncludes(profile.getId(), profile.getId(), pack, profile, xml, json, tx, src, title + ".html", (resource == null ? profile.getResource().getType() : resource.getName()) + "/" + pack.getId() + "/" + profile.getId(), intro, notes, ig, false, false);
if (st != null)
src = insertSectionNumbers(src, st, title + ".html", level, null);
else if (ig != null && !ig.isCore()) {
src = addSectionNumbers(title + ".html", title, src, null, 1, null, ig);
st = page.getSectionTrackerCache().get(ig.getCode() + "::" + title);
}
page.getHTMLChecker().registerFile(prefix + title + ".html", "StructureDefinition " + profile.getResource().getName(), HTMLLinkChecker.XHTML_TYPE, false);
TextFile.stringToFile(src, page.getFolders().dstDir + prefix + title + ".html");
new ProfileUtilities(page.getWorkerContext(), page.getValidationErrors(), page).generateSchematrons(new FileOutputStream(page.getFolders().dstDir + prefix + title + ".sch"), profile.getResource());
if (pack.getExamples().size() > 0) {
src = TextFile.fileToString(page.getFolders().templateDir + "template-profile-examples.html");
src = page.processProfileIncludes(profile.getId(), profile.getId(), pack, profile, xml, json, tx, src, title + ".html", (resource == null ? profile.getResource().getType() : resource.getName()) + "/" + pack.getId() + "/" + profile.getId(), intro, notes, ig, false, false);
page.getHTMLChecker().registerFile(prefix + title + "-examples.html", "Examples for StructureDefinition " + profile.getResource().getName(), HTMLLinkChecker.XHTML_TYPE, true);
TextFile.stringToFile(src, page.getFolders().dstDir + prefix + title + "-examples.html");
}
src = TextFile.fileToString(page.getFolders().templateDir + "template-profile-definitions.html");
src = page.processProfileIncludes(profile.getId(), profile.getId(), pack, profile, xml, json, tx, src, title + ".html", (resource == null ? profile.getResource().getType() : resource.getName()) + "/" + pack.getId() + "/" + profile.getId(), intro, notes, ig, false, false);
if (st != null)
src = insertSectionNumbers(src, st, title + "-definitions.html", level, null);
page.getHTMLChecker().registerFile(prefix + title + "-definitions.html", "Definitions for StructureDefinition " + profile.getResource().getName(), HTMLLinkChecker.XHTML_TYPE, true);
TextFile.stringToFile(src, page.getFolders().dstDir + prefix + title + "-definitions.html");
src = TextFile.fileToString(page.getFolders().templateDir + "template-profile-mappings.html");
src = page.processProfileIncludes(profile.getId(), profile.getId(), pack, profile, xml, json, tx, src, title + ".html", (resource == null ? profile.getResource().getType() : resource.getName()) + "/" + pack.getId() + "/" + profile.getId(), intro, notes, ig, false, false);
if (st != null)
src = insertSectionNumbers(src, st, title + "-mappings.html", level, null);
page.getHTMLChecker().registerFile(prefix + title + "-mappings.html", "Mappings for StructureDefinition " + profile.getResource().getName(), HTMLLinkChecker.XHTML_TYPE, true);
TextFile.stringToFile(src, page.getFolders().dstDir + prefix + title + "-mappings.html");
try {
processQuestionnaire(resource, profile.getResource(), st, false, prefix, ig);
} catch (Exception e) {
e.printStackTrace();
page.log("Questionnaire Generation Failed: " + e.getMessage(), LogMessageType.Error);
}
new ReviewSpreadsheetGenerator().generate(page.getFolders().dstDir + prefix + Utilities.changeFileExt((String) profile.getResource().getUserData("filename"), "-review.xls"), "Health Level Seven International", page.getGenDate(), profile.getResource(), page);
// xml to xhtml of xml
// first pass is to strip the xsi: stuff. seems to need double
// processing in order to delete namespace crap
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setNamespaceAware(true);
DocumentBuilder builder = factory.newDocumentBuilder();
Document xdoc = builder.parse(new CSFileInputStream(page.getFolders().dstDir + prefix + title + ".profile.xml"));
XmlGenerator xmlgen = new XmlGenerator();
xmlgen.generate(xdoc.getDocumentElement(), tmp, "http://hl7.org/fhir", xdoc.getDocumentElement().getLocalName());
// reload it now
builder = factory.newDocumentBuilder();
xdoc = builder.parse(new CSFileInputStream(tmp.getAbsolutePath()));
XhtmlGenerator xhtml = new XhtmlGenerator(new ExampleAdorner(page.getDefinitions(), page.genlevel(level)));
ByteArrayOutputStream b = new ByteArrayOutputStream();
xhtml.generate(xdoc, b, "StructureDefinition", profile.getTitle(), 0, true, title + ".profile.xml.html");
String html = TextFile.fileToString(page.getFolders().templateDir + "template-profile-example-xml.html").replace("<%example%>", b.toString());
html = page.processProfileIncludes(title + ".profile.xml.html", profile.getId(), pack, profile, "", "", "", html, title + ".html", (resource == null ? profile.getResource().getType() : resource.getName()) + "/" + pack.getId() + "/" + profile.getId(), intro, notes, ig, false, hasNarrative(xdoc));
TextFile.stringToFile(html, page.getFolders().dstDir + prefix + title + ".profile.xml.html");
page.getHTMLChecker().registerFile(prefix + title + ".profile.xml.html", "StructureDefinition", HTMLLinkChecker.XHTML_TYPE, false);
String n = prefix + title + ".profile";
json = resource2Json(profile.getResource());
json = "<div class=\"example\">\r\n<p>" + Utilities.escapeXml("StructureDefinition for " + profile.getResource().getDescription()) + "</p>\r\n<p><a href=\"" + title + ".profile.json\">Raw JSON</a></p>\r\n<pre class=\"json\">\r\n" + Utilities.escapeXml(json) + "\r\n</pre>\r\n</div>\r\n";
html = TextFile.fileToString(page.getFolders().templateDir + "template-profile-example-json.html").replace("<%example%>", json);
html = page.processProfileIncludes(title + ".profile.json.html", profile.getId(), pack, profile, "", "", "", html, title + ".html", (resource == null ? profile.getResource().getType() : resource.getName()) + "/" + pack.getId() + "/" + profile.getId(), intro, notes, ig, false, false);
TextFile.stringToFile(html, page.getFolders().dstDir + prefix + title + ".profile.json.html");
// page.getEpub().registerFile(n + ".json.html", description, EPubManager.XHTML_TYPE);
page.getHTMLChecker().registerExternal(n + ".json.html");
tmp.delete();
}
use of org.hl7.fhir.r4b.model.StructureDefinition in project kindling by HL7.
the class Publisher method genProfiledTypeProfile.
private void genProfiledTypeProfile(ProfiledType pt) throws Exception {
StructureDefinition profile = new ProfileGenerator(page.getDefinitions(), page.getWorkerContext(), page, page.getGenDate(), page.getVersion(), dataElements, fpUsages, page.getFolders().rootDir, page.getUml(), page.getRc()).generate(pt, page.getValidationErrors());
if (page.getProfiles().has(profile.getUrl()))
throw new Exception("Duplicate Profile URL " + profile.getUrl());
page.getProfiles().see(profile, page.packageInfo());
pt.setProfile(profile);
// todo: what to do in the narrative?
}
use of org.hl7.fhir.r4b.model.StructureDefinition in project kindling by HL7.
the class Publisher method checkElement.
private void checkElement(StructureDefinition sd, ElementDefinition ed, boolean inDiff) {
check(ed.hasPath(), sd, "Element has no path");
Set<String> codes = new HashSet<String>();
for (TypeRefComponent tr : ed.getType()) {
String tc = tr.getWorkingCode();
if (codes.contains(tc))
check(false, sd, ed.getPath() + ": type '" + tc + "' is duplicated");
if ((!inDiff || tr.hasCode()) && tc != null)
if (ed.getPath().contains("."))
check(page.getDefinitions().hasBaseType(tc) || tc.equals("Resource"), sd, ed.getPath() + ": type '" + tc + "' is not valid (a)");
else if (sd.hasBaseDefinition()) {
if (sd.getDerivation() == TypeDerivationRule.CONSTRAINT)
check(page.getDefinitions().hasConcreteResource(tc) || page.getDefinitions().hasBaseType(tc), sd, ed.getPath() + ": type '" + tc + "' is not valid (b)");
else
check(page.getDefinitions().hasAbstractResource(tc) || tc.equals("Element"), sd, ed.getPath() + ": type '" + tc + "' is not valid (c)");
}
if (tr.hasProfile()) {
check(tr.getProfile().size() == 1, sd, ed.getPath() + ": multiple profiles found: " + tr.getProfile());
String pt = tr.getProfile().get(0).getValue();
if (pt.contains("#")) {
String[] parts = pt.split("\\#");
StructureDefinition exd = page.getWorkerContext().fetchResource(StructureDefinition.class, parts[0]);
if (exd == null)
check(false, sd, ed.getPath() + ": profile '" + pt + "' is not valid (definition not found)");
else {
ElementDefinition ex = null;
for (ElementDefinition et : exd.getSnapshot().getElement()) if (et.hasFixed() && et.getFixed() instanceof UriType && ((UriType) et.getFixed()).asStringValue().equals(parts[1]))
ex = et;
check(ex != null, sd, ed.getPath() + ": profile '" + pt + "' is not valid (inner path not found)");
}
} else
check((page.getWorkerContext().hasResource(StructureDefinition.class, pt)) || isStringPattern(tail(pt)), sd, ed.getPath() + ": profile '" + pt + "' is not valid (d)");
}
if (tr.hasTargetProfile()) {
String pt = tr.getTargetProfile().get(0).getValue();
if (pt.contains("#")) {
String[] parts = pt.split("\\#");
StructureDefinition exd = page.getWorkerContext().fetchResource(StructureDefinition.class, parts[0]);
if (exd == null)
check(false, sd, ed.getPath() + ": target profile '" + pt + "' is not valid (definition not found)");
else {
ElementDefinition ex = null;
for (ElementDefinition et : exd.getSnapshot().getElement()) if (et.hasFixed() && et.getFixed() instanceof UriType && ((UriType) et.getFixed()).asStringValue().equals(parts[1]))
ex = et;
check(ex != null, sd, ed.getPath() + ": target profile '" + pt + "' is not valid (inner path not found)");
}
} else
check((page.getWorkerContext().hasResource(StructureDefinition.class, pt)) || isStringPattern(tail(pt)), sd, ed.getPath() + ": target profile '" + pt + "' is not valid (d)");
}
}
}
use of org.hl7.fhir.r4b.model.StructureDefinition in project kindling by HL7.
the class Publisher method genPrimitiveTypeProfile.
private void genPrimitiveTypeProfile(PrimitiveType t) throws Exception {
StructureDefinition profile = new ProfileGenerator(page.getDefinitions(), page.getWorkerContext(), page, page.getGenDate(), page.getVersion(), dataElements, fpUsages, page.getFolders().rootDir, page.getUml(), page.getRc()).generate(t);
if (page.getProfiles().has(profile.getUrl()))
throw new Exception("Duplicate Profile URL " + profile.getUrl());
page.getProfiles().see(profile, page.packageInfo());
t.setProfile(profile);
// DataTypeTableGenerator dtg = new DataTypeTableGenerator(page.getFolders().dstDir, page, t.getCode(), true);
// t.setProfile(profile);
// t.getProfile().getText().setDiv(new XhtmlNode(NodeType.Element, "div"));
// t.getProfile().getText().getDiv().getChildNodes().add(dtg.generate(t));
}
use of org.hl7.fhir.r4b.model.StructureDefinition in project kindling by HL7.
the class Publisher method processProfiles.
@SuppressWarnings("unchecked")
private void processProfiles() throws Exception {
page.log(" ...process profiles (base)", LogMessageType.Process);
// first, for each type and resource, we build it's master profile
for (DefinedCode t : page.getDefinitions().getPrimitives().values()) {
if (t instanceof PrimitiveType)
genPrimitiveTypeProfile((PrimitiveType) t);
else
genPrimitiveTypeProfile((DefinedStringPattern) t);
}
genXhtmlProfile();
for (TypeDefn t : page.getDefinitions().getTypes().values()) genTypeProfile(t);
for (TypeDefn t : page.getDefinitions().getInfrastructure().values()) genTypeProfile(t);
page.log(" ...process profiles (resources)", LogMessageType.Process);
for (ResourceDefn r : page.getDefinitions().getBaseResources().values()) {
r.setConformancePack(makeConformancePack(r));
r.setProfile(new ProfileGenerator(page.getDefinitions(), page.getWorkerContext(), page, page.getGenDate(), page.getVersion(), dataElements, fpUsages, page.getFolders().rootDir, page.getUml(), page.getRc()).generate(r.getConformancePack(), r, "core", false));
if (page.getProfiles().has(r.getProfile().getUrl()))
throw new Exception("Duplicate Profile URL " + r.getProfile().getUrl());
page.getProfiles().see(r.getProfile(), page.packageInfo());
ResourceTableGenerator rtg = new ResourceTableGenerator(page.getFolders().dstDir, page, null, true, page.getVersion());
r.getProfile().getText().setDiv(new XhtmlNode(NodeType.Element, "div"));
r.getProfile().getText().getDiv().getChildNodes().add(rtg.generate(r, "", false));
}
for (String rn : page.getDefinitions().sortedResourceNames()) {
ResourceDefn r = page.getDefinitions().getResourceByName(rn);
r.setConformancePack(makeConformancePack(r));
r.setProfile(new ProfileGenerator(page.getDefinitions(), page.getWorkerContext(), page, page.getGenDate(), page.getVersion(), dataElements, fpUsages, page.getFolders().rootDir, page.getUml(), page.getRc()).generate(r.getConformancePack(), r, "core", false));
if (page.getProfiles().has(r.getProfile().getUrl()))
throw new Exception("Duplicate Profile URL " + r.getProfile().getUrl());
page.getProfiles().see(r.getProfile(), page.packageInfo());
ResourceTableGenerator rtg = new ResourceTableGenerator(page.getFolders().dstDir, page, null, true, page.getVersion());
r.getProfile().getText().setDiv(new XhtmlNode(NodeType.Element, "div"));
r.getProfile().getText().getDiv().getChildNodes().add(rtg.generate(r, "", false));
}
for (ResourceDefn r : page.getDefinitions().getResourceTemplates().values()) {
r.setConformancePack(makeConformancePack(r));
r.setProfile(new ProfileGenerator(page.getDefinitions(), page.getWorkerContext(), page, page.getGenDate(), page.getVersion(), dataElements, fpUsages, page.getFolders().rootDir, page.getUml(), page.getRc()).generate(r.getConformancePack(), r, "core", true));
ResourceTableGenerator rtg = new ResourceTableGenerator(page.getFolders().dstDir, page, null, true, page.getVersion());
r.getProfile().getText().setDiv(new XhtmlNode(NodeType.Element, "div"));
r.getProfile().getText().getDiv().getChildNodes().add(rtg.generate(r, "", true));
if (page.getProfiles().has(r.getProfile().getUrl()))
throw new Exception("Duplicate Profile URL " + r.getProfile().getUrl());
page.getProfiles().see(r.getProfile(), page.packageInfo());
}
for (ProfiledType pt : page.getDefinitions().getConstraints().values()) {
genProfiledTypeProfile(pt);
}
page.log(" ...process profiles (extensions)", LogMessageType.Process);
for (StructureDefinition ex : page.getWorkerContext().getExtensionDefinitions()) processExtension(ex);
for (ResourceDefn r : page.getDefinitions().getResources().values()) {
// boolean logged = false;
for (Profile ap : r.getConformancePackages()) {
// logged = true;
for (ConstraintStructure p : ap.getProfiles()) processProfile(ap, p, ap.getId(), r);
}
}
page.log(" ...process profiles (packs)", LogMessageType.Process);
// we have profiles scoped by resources, and stand alone profiles
for (Profile ap : page.getDefinitions().getPackList()) {
// page.log(" ... pack "+ap.getId(), LogMessageType.Process);
for (ConstraintStructure p : ap.getProfiles()) processProfile(ap, p, ap.getId(), null);
}
page.log(" ...process logical models", LogMessageType.Process);
for (ImplementationGuideDefn ig : page.getDefinitions().getSortedIgs()) {
for (LogicalModel lm : ig.getLogicalModels()) {
page.log(" ...process logical model " + lm.getId(), LogMessageType.Process);
if (lm.getDefinition() == null)
lm.setDefinition(new ProfileGenerator(page.getDefinitions(), page.getWorkerContext(), page, page.getGenDate(), page.getVersion(), dataElements, fpUsages, page.getFolders().rootDir, page.getUml(), page.getRc()).generateLogicalModel(ig, lm.getResource()));
}
}
// now, validate the profiles
for (Profile ap : page.getDefinitions().getPackList()) for (ConstraintStructure p : ap.getProfiles()) validateProfile(p);
for (ResourceDefn r : page.getDefinitions().getResources().values()) for (Profile ap : r.getConformancePackages()) for (ConstraintStructure p : ap.getProfiles()) validateProfile(p);
page.log(" ...Check FHIR Path Expressions", LogMessageType.Process);
StringBuilder b = new StringBuilder();
FHIRPathEngine fp = new FHIRPathEngine(page.getWorkerContext());
fp.setHostServices(page.getExpressionResolver());
for (FHIRPathUsage p : fpUsages) {
checkExpression(b, fp, p);
}
TextFile.stringToFile(b.toString(), Utilities.path(page.getFolders().dstDir, "fhirpaths.txt"));
checkAllOk();
}
Aggregations