use of org.hl7.fhir.utilities.graphql.Package in project redmatch by aehrc.
the class RedmatchGrammarCodeSystemGenerator method main.
public static void main(String[] args) {
if (args.length != 3) {
System.out.println("Three arguments are required: FHIR package name, FHIR package version, output file.");
System.exit(0);
}
FhirContext ctx = FhirContext.forR4();
RedmatchGrammarCodeSystemGenerator generator = new RedmatchGrammarCodeSystemGenerator(new Gson(), ctx);
CodeSystem cs = null;
try {
cs = generator.createCodeSystem(new VersionedFhirPackage(args[0], args[1]));
} catch (IOException e) {
e.printStackTrace();
}
try (FileWriter fw = new FileWriter(args[2])) {
ctx.newJsonParser().setPrettyPrint(true).encodeResourceToWriter(cs, fw);
} catch (IOException e) {
e.printStackTrace();
}
}
use of org.hl7.fhir.utilities.graphql.Package in project redmatch by aehrc.
the class RedmatchGrammarCodeSystemGenerator method createCodeSystem.
/**
* Creates a validation code system for a version of FHIR or an implementation guide.
*
* @param fhirPackage The NPM package of a version of FHIR or implementation guide, e.g., hl7.fhir.r4.core.
* @param progressReporter An object to report progress. Can be null.
* @return A code system that can be used for validation.
* @throws IOException If there are issues reading the files.
*/
public CodeSystem createCodeSystem(VersionedFhirPackage fhirPackage, ProgressReporter progressReporter) throws IOException {
if (progressReporter != null) {
progressReporter.reportProgress(Progress.reportStart("Creating code system for FHIR package " + fhirPackage));
}
// Create a set with all the FHIR packages required
Set<VersionedFhirPackage> packages = new HashSet<>();
packages.add(fhirPackage);
packages.addAll(getDependencies(fhirPackage));
for (VersionedFhirPackage pack : packages) {
FhirUtils.getStructureDefinitions(ctx, pack).forEach(e -> {
structureDefinitionsMapByCode.put(e.getId().replace("StructureDefinition/", ""), e);
structureDefinitionsMapByUrl.put(e.getUrl(), e);
});
}
Set<StructureDefinition> complexTypes = structureDefinitionsMapByCode.values().stream().filter(e -> e.hasDerivation() && e.getDerivation().equals(StructureDefinition.TypeDerivationRule.SPECIALIZATION) && e.hasKind() && e.getKind().equals(StructureDefinition.StructureDefinitionKind.COMPLEXTYPE)).collect(Collectors.toSet());
log.info("Found " + complexTypes.size() + " complex types");
Set<StructureDefinition> resourceProfiles = structureDefinitionsMapByCode.values().stream().filter(e -> e.hasDerivation() && e.getDerivation().equals(StructureDefinition.TypeDerivationRule.CONSTRAINT) && e.hasKind() && e.getKind().equals(StructureDefinition.StructureDefinitionKind.RESOURCE)).collect(Collectors.toSet());
log.info("Found " + resourceProfiles.size() + " resource profiles");
Set<StructureDefinition> resources = structureDefinitionsMapByCode.values().stream().filter(e -> e.hasDerivation() && e.getDerivation().equals(StructureDefinition.TypeDerivationRule.SPECIALIZATION) && e.hasKind() && e.getKind().equals(StructureDefinition.StructureDefinitionKind.RESOURCE)).collect(Collectors.toSet());
log.info("Found " + resources.size() + " resources");
int total = complexTypes.size() + resourceProfiles.size() + resources.size();
double div = total / 100.0;
// Initialise profile children map
resourceProfiles.forEach(sd -> profileChildrenMap.put(sd.getUrl(), new HashSet<>()));
resources.forEach(sd -> profileChildrenMap.put(sd.getUrl(), new HashSet<>()));
// Create temporary profile parents map
Map<String, Set<String>> profileParentsMap = new HashMap<>();
resourceProfiles.forEach(sd -> {
Set<String> parentUrls = new HashSet<>();
profileParentsMap.put(sd.getUrl(), parentUrls);
StructureDefinition baseDefinition = structureDefinitionsMapByUrl.get(sd.getBaseDefinition());
while (baseDefinition != null && baseDefinition.getDerivation() == StructureDefinition.TypeDerivationRule.CONSTRAINT) {
parentUrls.add(baseDefinition.getUrl());
baseDefinition = structureDefinitionsMapByUrl.get(baseDefinition.getBaseDefinition());
}
// The parent resource will be a specialization
if (baseDefinition != null) {
parentUrls.add(baseDefinition.getUrl());
}
});
// Use the temporary map to populate the children map
profileParentsMap.keySet().forEach(key -> {
Set<String> parents = profileParentsMap.get(key);
if (parents != null) {
parents.forEach(val -> profileChildrenMap.get(val).add(key));
}
});
CodeSystem codeSystem = createBaseCodeSystem(fhirPackage);
int i = 0;
for (StructureDefinition structureDefinition : complexTypes) {
processStructureDefinition(codeSystem, structureDefinition, false, "");
i++;
if (progressReporter != null) {
progressReporter.reportProgress(Progress.reportProgress((int) Math.floor(i / div)));
}
}
for (StructureDefinition structureDefinition : resourceProfiles) {
processStructureDefinition(codeSystem, structureDefinition, false, "");
i++;
if (progressReporter != null) {
progressReporter.reportProgress(Progress.reportProgress((int) Math.floor(i / div)));
}
}
for (StructureDefinition structureDefinition : resources) {
processStructureDefinition(codeSystem, structureDefinition, false, "");
i++;
if (progressReporter != null) {
progressReporter.reportProgress(Progress.reportProgress((int) Math.floor(i / div)));
}
}
if (progressReporter != null) {
progressReporter.reportProgress(Progress.reportEnd());
}
return codeSystem;
}
use of org.hl7.fhir.utilities.graphql.Package in project org.hl7.fhir.core by hapifhir.
the class NpmPackageVersionConverter method execute.
public void execute() throws IOException {
GzipCompressorInputStream gzipIn;
try {
gzipIn = new GzipCompressorInputStream(new FileInputStream(source));
} catch (Exception e) {
throw new IOException("Error reading " + source + ": " + e.getMessage(), e);
}
Map<String, byte[]> content = new HashMap<>();
try (TarArchiveInputStream tarIn = new TarArchiveInputStream(gzipIn)) {
TarArchiveEntry entry;
while ((entry = (TarArchiveEntry) tarIn.getNextEntry()) != null) {
String n = entry.getName();
if (!entry.isDirectory()) {
int count;
byte[] data = new byte[BUFFER_SIZE];
ByteArrayOutputStream fos = new ByteArrayOutputStream();
try (BufferedOutputStream dest = new BufferedOutputStream(fos, BUFFER_SIZE)) {
while ((count = tarIn.read(data, 0, BUFFER_SIZE)) != -1) {
dest.write(data, 0, count);
}
}
fos.close();
content.put(n, fos.toByteArray());
}
}
}
Map<String, byte[]> output = new HashMap<>();
output.put("package/package.json", convertPackage(content.get("package/package.json")));
for (Entry<String, byte[]> e : content.entrySet()) {
if (!e.getKey().equals("package/package.json")) {
byte[] cnv = e.getValue();
try {
JsonObject json = JsonTrackingParser.parseJson(e.getValue());
if (json.has("resourceType")) {
cnv = convertResource(e.getKey(), e.getValue());
}
} catch (Exception ex) {
}
if (cnv != null && cnv.length > 0) {
output.put(e.getKey(), cnv);
}
}
}
TarArchiveOutputStream tar;
ByteArrayOutputStream OutputStream;
BufferedOutputStream bufferedOutputStream;
GzipCompressorOutputStream gzipOutputStream;
OutputStream = new ByteArrayOutputStream();
bufferedOutputStream = new BufferedOutputStream(OutputStream);
gzipOutputStream = new GzipCompressorOutputStream(bufferedOutputStream);
tar = new TarArchiveOutputStream(gzipOutputStream);
Map<String, NpmPackageIndexBuilder> indexers = new HashMap<>();
for (Entry<String, byte[]> e : output.entrySet()) {
String n = e.getKey().substring(0, e.getKey().lastIndexOf("/"));
String s = e.getKey().substring(n.length() + 1);
byte[] b = e.getValue();
NpmPackageIndexBuilder indexer = indexers.get(n);
if (indexer == null) {
indexer = new NpmPackageIndexBuilder();
indexer.start();
indexers.put(n, indexer);
}
indexer.seeFile(s, b);
if (!s.equals(".index.json") && !s.equals("package.json")) {
TarArchiveEntry entry = new TarArchiveEntry(e.getKey());
entry.setSize(b.length);
tar.putArchiveEntry(entry);
tar.write(b);
tar.closeArchiveEntry();
}
}
for (Entry<String, NpmPackageIndexBuilder> e : indexers.entrySet()) {
byte[] cnt = e.getValue().build().getBytes(StandardCharsets.UTF_8);
TarArchiveEntry entry = new TarArchiveEntry(e.getKey() + "/.index.json");
entry.setSize(cnt.length);
tar.putArchiveEntry(entry);
tar.write(cnt);
tar.closeArchiveEntry();
}
byte[] cnt = output.get("package/package.json");
TarArchiveEntry entry = new TarArchiveEntry("package/package.json");
entry.setSize(cnt.length);
tar.putArchiveEntry(entry);
tar.write(cnt);
tar.closeArchiveEntry();
tar.finish();
tar.close();
gzipOutputStream.close();
bufferedOutputStream.close();
OutputStream.close();
byte[] b = OutputStream.toByteArray();
TextFile.bytesToFile(b, dest);
}
use of org.hl7.fhir.utilities.graphql.Package in project org.hl7.fhir.core by hapifhir.
the class RdfParser method composeImplementationGuideImplementationGuidePageComponent.
protected void composeImplementationGuideImplementationGuidePageComponent(Complex parent, String parentType, String name, ImplementationGuide.ImplementationGuidePageComponent element, int index) {
if (element == null)
return;
Complex t;
if (Utilities.noString(parentType))
t = parent;
else {
t = parent.predicate("fhir:" + parentType + '.' + name);
}
composeBackboneElement(t, "page", name, element, index);
if (element.hasSourceElement())
composeUri(t, "ImplementationGuide", "source", element.getSourceElement(), -1);
if (element.hasNameElement())
composeString(t, "ImplementationGuide", "name", element.getNameElement(), -1);
if (element.hasKindElement())
composeEnum(t, "ImplementationGuide", "kind", element.getKindElement(), -1);
for (int i = 0; i < element.getType().size(); i++) composeCode(t, "ImplementationGuide", "type", element.getType().get(i), i);
for (int i = 0; i < element.getPackage().size(); i++) composeString(t, "ImplementationGuide", "package", element.getPackage().get(i), i);
if (element.hasFormatElement())
composeCode(t, "ImplementationGuide", "format", element.getFormatElement(), -1);
for (int i = 0; i < element.getPage().size(); i++) composeImplementationGuideImplementationGuidePageComponent(t, "ImplementationGuide", "page", element.getPage().get(i), i);
}
use of org.hl7.fhir.utilities.graphql.Package in project org.hl7.fhir.core by hapifhir.
the class RdfParser method composeMedicationMedicationPackageComponent.
protected void composeMedicationMedicationPackageComponent(Complex parent, String parentType, String name, Medication.MedicationPackageComponent element, int index) {
if (element == null)
return;
Complex t;
if (Utilities.noString(parentType))
t = parent;
else {
t = parent.predicate("fhir:" + parentType + '.' + name);
}
composeBackboneElement(t, "package", name, element, index);
if (element.hasContainer())
composeCodeableConcept(t, "Medication", "container", element.getContainer(), -1);
for (int i = 0; i < element.getContent().size(); i++) composeMedicationMedicationPackageContentComponent(t, "Medication", "content", element.getContent().get(i), i);
for (int i = 0; i < element.getBatch().size(); i++) composeMedicationMedicationPackageBatchComponent(t, "Medication", "batch", element.getBatch().get(i), i);
}
Aggregations