use of com.ibm.cohort.cql.translation.TranslatingCqlLibraryProvider in project quality-measure-and-cohort-service by Alvearie.
the class SparkSchemaCreatorTest method setup.
@Before
public void setup() {
cqlTranslator = new CqlToElmTranslator();
cqlTranslator.registerModelInfo(new File("src/test/resources/output-validation/modelinfo/simple-all-types-model-info.xml"));
CqlLibraryProvider backingProvider = new ClasspathCqlLibraryProvider("output-validation.cql");
cqlLibraryProvider = new TranslatingCqlLibraryProvider(backingProvider, cqlTranslator);
outputColumnNameFactory = new DefaultSparkOutputColumnEncoder(".");
}
use of com.ibm.cohort.cql.translation.TranslatingCqlLibraryProvider in project quality-measure-and-cohort-service by Alvearie.
the class ColumnRuleCreatorTest method testGetFiltersForContextOnlyJoinColumns.
@Test
public void testGetFiltersForContextOnlyJoinColumns() throws Exception {
CqlToElmTranslator cqlTranslator = new CqlToElmTranslator();
cqlTranslator.registerModelInfo(new File("src/test/resources/alltypes/modelinfo/alltypes-modelinfo-1.0.0.xml"));
ObjectMapper mapper = new ObjectMapper();
CqlEvaluationRequests requests = mapper.readValue(new File("src/test/resources/alltypes/metadata/join-only.json"), CqlEvaluationRequests.class);
CqlLibraryProvider backingProvider = new ClasspathCqlLibraryProvider("alltypes.cql");
TranslatingCqlLibraryProvider cqlLibraryProvider = new TranslatingCqlLibraryProvider(backingProvider, cqlTranslator);
ColumnRuleCreator columnRuleCreator = new ColumnRuleCreator(requests.getEvaluations(), cqlTranslator, cqlLibraryProvider);
ContextDefinitions definitions = mapper.readValue(new File("src/test/resources/alltypes/metadata/context-definitions-related-column.json"), ContextDefinitions.class);
ContextDefinition context = definitions.getContextDefinitionByName("Patient");
Map<String, Set<StringMatcher>> actual = columnRuleCreator.getDataRequirementsForContext(context);
Map<String, Set<StringMatcher>> expected = new HashMap<>();
expected.put("A", new HashSet<>(Arrays.asList(new EqualsStringMatcher(ContextRetriever.SOURCE_FACT_IDX), new EqualsStringMatcher("id_col"), new EqualsStringMatcher("pat_id"))));
expected.put("B", new HashSet<>(Arrays.asList(new EqualsStringMatcher(ContextRetriever.SOURCE_FACT_IDX), new EqualsStringMatcher("string"), new EqualsStringMatcher(ContextRetriever.JOIN_CONTEXT_VALUE_IDX))));
expected.put("C", new HashSet<>(Arrays.asList(new EqualsStringMatcher(ContextRetriever.SOURCE_FACT_IDX), new EqualsStringMatcher("pat_id"), new EqualsStringMatcher(ContextRetriever.JOIN_CONTEXT_VALUE_IDX))));
assertEquals(expected, actual);
}
use of com.ibm.cohort.cql.translation.TranslatingCqlLibraryProvider in project quality-measure-and-cohort-service by Alvearie.
the class CohortCLI method runWithArgs.
/**
* Simulate main method behavior in a non-static context for use in testing
* tools. This method is intended to be called only once. Multiple calls for the
* same library path will attempt duplicate library loading.
*
* @param args parameter values
* @param out location where contents that would normally go to stdout should
* be written
* @return CQLEvaluator
* @throws IOException IOException
*/
public CqlEvaluator runWithArgs(String[] args, PrintStream out) throws IOException {
Arguments arguments = new Arguments();
Console console = new DefaultConsole(out);
JCommander jc = JCommander.newBuilder().programName("cql-engine").console(console).addObject(arguments).build();
jc.parse(args);
CqlEvaluator wrapper = null;
if (arguments.isDisplayHelp) {
jc.usage();
} else {
FhirClientBuilderFactory factory = FhirClientBuilderFactory.newInstance();
FhirClientBuilder fhirClientBuilder = factory.newFhirClientBuilder();
readConnectionConfiguration(arguments);
MapCqlLibraryProviderFactory libraryProviderFactory = new MapCqlLibraryProviderFactory();
String[] filters = null;
if (arguments.filters != null) {
filters = arguments.filters.toArray(new String[arguments.filters.size()]);
}
CqlLibraryProvider backingLibraryProvider;
Path libraryFolder = Paths.get(arguments.libraryPath);
if (libraryFolder.toFile().isDirectory()) {
out.println(String.format("Loading libraries from folder '%s'", libraryFolder.toString()));
backingLibraryProvider = libraryProviderFactory.fromDirectory(libraryFolder, filters);
} else if (FileHelpers.isZip(libraryFolder.toFile())) {
out.println(String.format("Loading libraries from ZIP '%s'", libraryFolder.toString()));
backingLibraryProvider = libraryProviderFactory.fromZipFile(libraryFolder, filters);
} else {
out.println(String.format("Loading libraries from FHIR Library '%s'", libraryFolder.toString()));
IGenericClient measureClient = fhirClientBuilder.createFhirClient(measureServerConfig);
FhirResourceResolver<Library> libraryResolver = R4FhirServerResourceResolverFactory.createLibraryResolver(measureClient);
R4LibraryDependencyGatherer dependencyGatherer = new R4LibraryDependencyGatherer(libraryResolver);
List<Library> cqlLibraries = dependencyGatherer.gatherForLibraryId(arguments.libraryPath);
Map<CqlLibraryDescriptor, CqlLibrary> cqlLibraryMap = toCqlLibraryMap(cqlLibraries);
backingLibraryProvider = new MapCqlLibraryProvider(cqlLibraryMap);
}
CqlLibraryProvider fhirClasspathProvider = new ClasspathCqlLibraryProvider();
backingLibraryProvider = new PriorityCqlLibraryProvider(backingLibraryProvider, fhirClasspathProvider);
CqlToElmTranslator translator = new CqlToElmTranslator();
if (arguments.modelInfoFile != null && arguments.modelInfoFile.exists()) {
translator.registerModelInfo(arguments.modelInfoFile);
}
boolean isForceTranslation = arguments.sourceFormat == Format.CQL;
CqlLibraryProvider libraryProvider = new TranslatingCqlLibraryProvider(backingLibraryProvider, translator, isForceTranslation);
IGenericClient dataClient = fhirClientBuilder.createFhirClient(dataServerConfig);
IGenericClient termClient = fhirClientBuilder.createFhirClient(terminologyServerConfig);
CqlTerminologyProvider termProvider = new R4RestFhirTerminologyProvider(termClient);
Map<String, com.ibm.cohort.cql.evaluation.parameters.Parameter> parameters = null;
if (arguments.parameters != null) {
parameters = parseParameterArguments(arguments.parameters);
}
CqlVersionedIdentifier libraryIdentifier = new CqlVersionedIdentifier(arguments.libraryName, arguments.libraryVersion);
List<Pair<String, String>> contexts;
if (arguments.contextIds == null || arguments.contextIds.isEmpty()) {
// If no context ids are provided, perform one run using a null context
contexts = Collections.singletonList(null);
} else {
contexts = arguments.contextIds.stream().map(x -> new ImmutablePair<>(arguments.contextName, x)).collect(Collectors.toList());
}
try (RetrieveCacheContext cacheContext = new DefaultRetrieveCacheContext()) {
CqlDataProvider dataProvider = R4DataProviderFactory.createDataProvider(dataClient, termProvider, cacheContext, R4FhirModelResolverFactory.createCachingResolver(), !arguments.enableTerminologyOptimization, arguments.searchPageSize);
wrapper = new CqlEvaluator().setLibraryProvider(libraryProvider).setDataProvider(dataProvider).setTerminologyProvider(termProvider);
ZonedDateTime evaluationDateTime = ZonedDateTime.now();
for (Pair<String, String> context : contexts) {
String contextLabel = context == null ? "null" : context.getRight();
out.println("Context: " + contextLabel);
CqlEvaluationResult result = wrapper.evaluate(libraryIdentifier, parameters, context, arguments.expressions, arguments.loggingLevel, evaluationDateTime);
out.print(prettyPrintResult(result));
out.println("---");
}
}
}
return wrapper;
}
use of com.ibm.cohort.cql.translation.TranslatingCqlLibraryProvider in project quality-measure-and-cohort-service by Alvearie.
the class SparkCqlEvaluator method createLibraryProvider.
/**
* Initialize a library provider that will load resources from the configured path
* in local storage or from the well-known classpath locations. The library provider
* comes configured with CQL translation enabled and will use custom modelinfo
* definitions if provided in the configuration.
*
* @return configured library provider
* @throws IOException when model info cannot be read
* @throws FileNotFoundException when a specified model info file cannot be found
*/
protected CqlLibraryProvider createLibraryProvider() throws IOException, FileNotFoundException {
CqlLibraryProvider hadoopBasedLp = new HadoopBasedCqlLibraryProvider(new Path(args.cqlPath), this.hadoopConfiguration.value());
// we are excluding the pre-compiled FHIRHelpers libraries because they were not compiled
// with the EnableResultTypes option that is required for some of the features of this program.
ClasspathCqlLibraryProvider cpBasedLp = new ClasspathCqlLibraryProvider();
cpBasedLp.setSupportedFormats(Format.CQL);
CqlLibraryProvider priorityLp = new PriorityCqlLibraryProvider(hadoopBasedLp, cpBasedLp);
return new TranslatingCqlLibraryProvider(priorityLp, getCqlTranslator());
}
use of com.ibm.cohort.cql.translation.TranslatingCqlLibraryProvider in project quality-measure-and-cohort-service by Alvearie.
the class ColumnRuleCreatorTest method testGetFiltersForContext.
@Test
public void testGetFiltersForContext() throws Exception {
CqlToElmTranslator cqlTranslator = new CqlToElmTranslator();
cqlTranslator.registerModelInfo(new File("src/test/resources/alltypes/modelinfo/alltypes-modelinfo-1.0.0.xml"));
ObjectMapper mapper = new ObjectMapper();
CqlEvaluationRequests requests = mapper.readValue(new File("src/test/resources/alltypes/metadata/parent-child-jobs.json"), CqlEvaluationRequests.class);
CqlLibraryProvider backingProvider = new ClasspathCqlLibraryProvider("alltypes.cql");
TranslatingCqlLibraryProvider cqlLibraryProvider = new TranslatingCqlLibraryProvider(backingProvider, cqlTranslator);
ColumnRuleCreator columnRuleCreator = new ColumnRuleCreator(requests.getEvaluations(), cqlTranslator, cqlLibraryProvider);
ContextDefinitions definitions = mapper.readValue(new File("src/test/resources/alltypes/metadata/context-definitions.json"), ContextDefinitions.class);
ContextDefinition context = definitions.getContextDefinitionByName("Patient");
Map<String, Set<StringMatcher>> actual = columnRuleCreator.getDataRequirementsForContext(context);
Map<String, Set<StringMatcher>> expected = new HashMap<>();
expected.put("A", new HashSet<>(Arrays.asList(new EqualsStringMatcher(ContextRetriever.SOURCE_FACT_IDX), new EqualsStringMatcher("pat_id"), new EqualsStringMatcher("code_col"), new EqualsStringMatcher("boolean_col"))));
assertEquals(expected, actual);
}
Aggregations