Search in sources :

Example 11 with CqlToElmTranslator

use of com.ibm.cohort.cql.translation.CqlToElmTranslator in project quality-measure-and-cohort-service by Alvearie.

the class CohortCLI method runWithArgs.

/**
 * Simulate main method behavior in a non-static context for use in testing
 * tools. This method is intended to be called only once. Multiple calls for the
 * same library path will attempt duplicate library loading.
 *
 * @param args parameter values
 * @param out  location where contents that would normally go to stdout should
 *             be written
 * @return CQLEvaluator
 * @throws IOException IOException
 */
public CqlEvaluator runWithArgs(String[] args, PrintStream out) throws IOException {
    Arguments arguments = new Arguments();
    Console console = new DefaultConsole(out);
    JCommander jc = JCommander.newBuilder().programName("cql-engine").console(console).addObject(arguments).build();
    jc.parse(args);
    CqlEvaluator wrapper = null;
    if (arguments.isDisplayHelp) {
        jc.usage();
    } else {
        FhirClientBuilderFactory factory = FhirClientBuilderFactory.newInstance();
        FhirClientBuilder fhirClientBuilder = factory.newFhirClientBuilder();
        readConnectionConfiguration(arguments);
        MapCqlLibraryProviderFactory libraryProviderFactory = new MapCqlLibraryProviderFactory();
        String[] filters = null;
        if (arguments.filters != null) {
            filters = arguments.filters.toArray(new String[arguments.filters.size()]);
        }
        CqlLibraryProvider backingLibraryProvider;
        Path libraryFolder = Paths.get(arguments.libraryPath);
        if (libraryFolder.toFile().isDirectory()) {
            out.println(String.format("Loading libraries from folder '%s'", libraryFolder.toString()));
            backingLibraryProvider = libraryProviderFactory.fromDirectory(libraryFolder, filters);
        } else if (FileHelpers.isZip(libraryFolder.toFile())) {
            out.println(String.format("Loading libraries from ZIP '%s'", libraryFolder.toString()));
            backingLibraryProvider = libraryProviderFactory.fromZipFile(libraryFolder, filters);
        } else {
            out.println(String.format("Loading libraries from FHIR Library '%s'", libraryFolder.toString()));
            IGenericClient measureClient = fhirClientBuilder.createFhirClient(measureServerConfig);
            FhirResourceResolver<Library> libraryResolver = R4FhirServerResourceResolverFactory.createLibraryResolver(measureClient);
            R4LibraryDependencyGatherer dependencyGatherer = new R4LibraryDependencyGatherer(libraryResolver);
            List<Library> cqlLibraries = dependencyGatherer.gatherForLibraryId(arguments.libraryPath);
            Map<CqlLibraryDescriptor, CqlLibrary> cqlLibraryMap = toCqlLibraryMap(cqlLibraries);
            backingLibraryProvider = new MapCqlLibraryProvider(cqlLibraryMap);
        }
        CqlLibraryProvider fhirClasspathProvider = new ClasspathCqlLibraryProvider();
        backingLibraryProvider = new PriorityCqlLibraryProvider(backingLibraryProvider, fhirClasspathProvider);
        CqlToElmTranslator translator = new CqlToElmTranslator();
        if (arguments.modelInfoFile != null && arguments.modelInfoFile.exists()) {
            translator.registerModelInfo(arguments.modelInfoFile);
        }
        boolean isForceTranslation = arguments.sourceFormat == Format.CQL;
        CqlLibraryProvider libraryProvider = new TranslatingCqlLibraryProvider(backingLibraryProvider, translator, isForceTranslation);
        IGenericClient dataClient = fhirClientBuilder.createFhirClient(dataServerConfig);
        IGenericClient termClient = fhirClientBuilder.createFhirClient(terminologyServerConfig);
        CqlTerminologyProvider termProvider = new R4RestFhirTerminologyProvider(termClient);
        Map<String, com.ibm.cohort.cql.evaluation.parameters.Parameter> parameters = null;
        if (arguments.parameters != null) {
            parameters = parseParameterArguments(arguments.parameters);
        }
        CqlVersionedIdentifier libraryIdentifier = new CqlVersionedIdentifier(arguments.libraryName, arguments.libraryVersion);
        List<Pair<String, String>> contexts;
        if (arguments.contextIds == null || arguments.contextIds.isEmpty()) {
            // If no context ids are provided, perform one run using a null context
            contexts = Collections.singletonList(null);
        } else {
            contexts = arguments.contextIds.stream().map(x -> new ImmutablePair<>(arguments.contextName, x)).collect(Collectors.toList());
        }
        try (RetrieveCacheContext cacheContext = new DefaultRetrieveCacheContext()) {
            CqlDataProvider dataProvider = R4DataProviderFactory.createDataProvider(dataClient, termProvider, cacheContext, R4FhirModelResolverFactory.createCachingResolver(), !arguments.enableTerminologyOptimization, arguments.searchPageSize);
            wrapper = new CqlEvaluator().setLibraryProvider(libraryProvider).setDataProvider(dataProvider).setTerminologyProvider(termProvider);
            ZonedDateTime evaluationDateTime = ZonedDateTime.now();
            for (Pair<String, String> context : contexts) {
                String contextLabel = context == null ? "null" : context.getRight();
                out.println("Context: " + contextLabel);
                CqlEvaluationResult result = wrapper.evaluate(libraryIdentifier, parameters, context, arguments.expressions, arguments.loggingLevel, evaluationDateTime);
                out.print(prettyPrintResult(result));
                out.println("---");
            }
        }
    }
    return wrapper;
}
Also used : FhirResourceResolver(com.ibm.cohort.cql.fhir.resolver.FhirResourceResolver) DefaultConsole(com.beust.jcommander.internal.DefaultConsole) TranslatingCqlLibraryProvider(com.ibm.cohort.cql.translation.TranslatingCqlLibraryProvider) IGenericClient(ca.uhn.fhir.rest.client.api.IGenericClient) R4RestFhirTerminologyProvider(com.ibm.cohort.engine.terminology.R4RestFhirTerminologyProvider) DefaultRetrieveCacheContext(com.ibm.cohort.engine.measure.cache.DefaultRetrieveCacheContext) RetrieveCacheContext(com.ibm.cohort.engine.measure.cache.RetrieveCacheContext) CqlToElmTranslator(com.ibm.cohort.cql.translation.CqlToElmTranslator) MapCqlLibraryProvider(com.ibm.cohort.cql.library.MapCqlLibraryProvider) CqlLibraryProvider(com.ibm.cohort.cql.library.CqlLibraryProvider) PriorityCqlLibraryProvider(com.ibm.cohort.cql.library.PriorityCqlLibraryProvider) TranslatingCqlLibraryProvider(com.ibm.cohort.cql.translation.TranslatingCqlLibraryProvider) ClasspathCqlLibraryProvider(com.ibm.cohort.cql.library.ClasspathCqlLibraryProvider) ZonedDateTime(java.time.ZonedDateTime) JCommander(com.beust.jcommander.JCommander) Console(com.beust.jcommander.internal.Console) DefaultConsole(com.beust.jcommander.internal.DefaultConsole) R4LibraryDependencyGatherer(com.ibm.cohort.cql.hapi.R4LibraryDependencyGatherer) List(java.util.List) MapCqlLibraryProviderFactory(com.ibm.cohort.cql.library.MapCqlLibraryProviderFactory) CqlTerminologyProvider(com.ibm.cohort.cql.terminology.CqlTerminologyProvider) CqlDataProvider(com.ibm.cohort.cql.data.CqlDataProvider) Pair(org.apache.commons.lang3.tuple.Pair) ImmutablePair(org.apache.commons.lang3.tuple.ImmutablePair) Path(java.nio.file.Path) FhirClientBuilder(com.ibm.cohort.fhir.client.config.FhirClientBuilder) ParameterHelper.parseParameterArguments(com.ibm.cohort.cli.ParameterHelper.parseParameterArguments) MapCqlLibraryProvider(com.ibm.cohort.cql.library.MapCqlLibraryProvider) CqlEvaluationResult(com.ibm.cohort.cql.evaluation.CqlEvaluationResult) DefaultRetrieveCacheContext(com.ibm.cohort.engine.measure.cache.DefaultRetrieveCacheContext) PriorityCqlLibraryProvider(com.ibm.cohort.cql.library.PriorityCqlLibraryProvider) FhirClientBuilderFactory(com.ibm.cohort.fhir.client.config.FhirClientBuilderFactory) Parameter(com.beust.jcommander.Parameter) ClasspathCqlLibraryProvider(com.ibm.cohort.cql.library.ClasspathCqlLibraryProvider) Map(java.util.Map) HashMap(java.util.HashMap) CqlEvaluator(com.ibm.cohort.cql.evaluation.CqlEvaluator) CqlVersionedIdentifier(com.ibm.cohort.cql.library.CqlVersionedIdentifier)

Example 12 with CqlToElmTranslator

use of com.ibm.cohort.cql.translation.CqlToElmTranslator in project quality-measure-and-cohort-service by Alvearie.

the class MeasureEvaluationSeeder method create.

public IMeasureEvaluationSeed create(Measure measure, String periodStart, String periodEnd, String productLine, Map<String, Parameter> parameters) {
    // Gather the primary library and all of its dependencies
    List<Library> fhirLibraries = libraryDependencyGatherer.gatherForMeasure(measure);
    if (CollectionUtils.isEmpty(fhirLibraries)) {
        throw new IllegalArgumentException(String.format("No libraries were able to be loaded for %s", measure.getId()));
    }
    // the "primary" library is always the first library loaded for the measure
    Library primaryFhirLibrary = fhirLibraries.get(0);
    VersionedIdentifier libraryIdentifier = new VersionedIdentifier().withId(primaryFhirLibrary.getName()).withVersion(primaryFhirLibrary.getVersion());
    LibraryLoader libraryLoader = new R4TranslatingLibraryLoader(libraryResolver, new CqlToElmTranslator());
    org.cqframework.cql.elm.execution.Library primaryLibrary = libraryLoader.load(libraryIdentifier);
    List<Triple<String, String, String>> usingDefs = UsingHelper.getUsingUrlAndVersion(primaryLibrary.getUsings());
    if (usingDefs.size() > 1) {
        throw new IllegalArgumentException("Evaluation of Measure using multiple Models is not supported at this time.");
    }
    // Per the above condition, we should only have one model per measure
    String lastModelUri = usingDefs.get(usingDefs.size() - 1).getRight();
    DataProvider dataProvider = dataProviders.get(lastModelUri);
    Context context = createContext(primaryLibrary, lastModelUri, dataProvider, productLine, libraryLoader);
    // fhir path: Measure.extension[measureParameter][].valueParameterDefinition.extension[defaultValue]
    measure.getExtension().stream().filter(MeasureEvaluationSeeder::isMeasureParameter).map(parameter -> dataProvider.resolvePath(parameter, "valueParameterDefinition")).map(ParameterDefinition.class::cast).forEach(parameterDefinition -> setDefaultValue(context, parameterDefinition));
    if (parameters != null) {
        parameters.entrySet().stream().forEach(e -> context.setParameter(null, e.getKey(), e.getValue().toCqlType()));
    }
    // Set measurement period last to make sure we respect periodStart
    // and periodEnd date boundaries for an execution.
    Interval measurementPeriod = createMeasurePeriod(periodStart, periodEnd);
    context.setParameter(null, MEASUREMENT_PERIOD, measurementPeriod);
    return new CustomMeasureEvaluationSeed(measure, context, measurementPeriod, dataProvider);
}
Also used : CDMContext(com.ibm.cohort.engine.cqfruler.CDMContext) Context(org.opencds.cqf.cql.engine.execution.Context) CqlToElmTranslator(com.ibm.cohort.cql.translation.CqlToElmTranslator) R4TranslatingLibraryLoader(com.ibm.cohort.cql.hapi.R4TranslatingLibraryLoader) LibraryLoader(org.opencds.cqf.cql.engine.execution.LibraryLoader) Triple(org.apache.commons.lang3.tuple.Triple) CqlDataProvider(com.ibm.cohort.cql.data.CqlDataProvider) DataProvider(org.opencds.cqf.cql.engine.data.DataProvider) VersionedIdentifier(org.cqframework.cql.elm.execution.VersionedIdentifier) Library(org.hl7.fhir.r4.model.Library) R4TranslatingLibraryLoader(com.ibm.cohort.cql.hapi.R4TranslatingLibraryLoader) Interval(org.opencds.cqf.cql.engine.runtime.Interval)

Example 13 with CqlToElmTranslator

use of com.ibm.cohort.cql.translation.CqlToElmTranslator in project quality-measure-and-cohort-service by Alvearie.

the class SparkCqlEvaluator method run.

public void run(PrintStream out) throws Exception {
    EvaluationSummary evaluationSummary = new EvaluationSummary();
    long startTimeMillis = System.currentTimeMillis();
    evaluationSummary.setStartTimeMillis(startTimeMillis);
    evaluationSummary.setJobStatus(JobStatus.FAIL);
    SparkSession.Builder sparkBuilder = SparkSession.builder();
    try (SparkSession spark = sparkBuilder.getOrCreate()) {
        final LongAccumulator contextAccum = spark.sparkContext().longAccumulator("Context");
        final CollectionAccumulator<EvaluationError> errorAccumulator = spark.sparkContext().collectionAccumulator("EvaluationErrors");
        try {
            spark.sparkContext().setLocalProperty("mdc." + CORRELATION_ID, MDC.get(CORRELATION_ID));
            evaluationSummary.setCorrelationId(MDC.get(CORRELATION_ID));
            boolean useJava8API = Boolean.valueOf(spark.conf().get("spark.sql.datetime.java8API.enabled"));
            this.typeConverter = new SparkTypeConverter(useJava8API);
            this.hadoopConfiguration = new SerializableConfiguration(spark.sparkContext().hadoopConfiguration());
            evaluationSummary.setApplicationId(spark.sparkContext().applicationId());
            CqlToElmTranslator cqlTranslator = getCqlTranslator();
            SparkOutputColumnEncoder columnEncoder = getSparkOutputColumnEncoder();
            ContextDefinitions contexts = readContextDefinitions(args.contextDefinitionPath);
            List<ContextDefinition> filteredContexts = contexts.getContextDefinitions();
            if (args.aggregationContexts != null && !args.aggregationContexts.isEmpty()) {
                filteredContexts = filteredContexts.stream().filter(def -> args.aggregationContexts.contains(def.getName())).collect(Collectors.toList());
            }
            if (filteredContexts.isEmpty()) {
                throw new IllegalArgumentException("At least one context definition is required (after filtering if enabled).");
            }
            Map<String, StructType> resultSchemas = calculateSparkSchema(filteredContexts.stream().map(ContextDefinition::getName).collect(Collectors.toList()), contexts, columnEncoder, cqlTranslator);
            ZonedDateTime batchRunTime = ZonedDateTime.now();
            final LongAccumulator perContextAccum = spark.sparkContext().longAccumulator("PerContext");
            CustomMetricSparkPlugin.contextAccumGauge.setAccumulator(contextAccum);
            CustomMetricSparkPlugin.perContextAccumGauge.setAccumulator(perContextAccum);
            CustomMetricSparkPlugin.totalContextsToProcessCounter.inc(filteredContexts.size());
            CustomMetricSparkPlugin.currentlyEvaluatingContextGauge.setValue(0);
            ColumnRuleCreator columnRuleCreator = new ColumnRuleCreator(getFilteredJobSpecificationWithIds().getEvaluations(), getCqlTranslator(), createLibraryProvider());
            Map<String, String> dataTypeAliases = createDataTypeAliases(filteredContexts, cqlTranslator);
            for (ContextDefinition context : filteredContexts) {
                final String contextName = context.getName();
                ContextRetriever contextRetriever = new ContextRetriever(args.inputPaths, new DefaultDatasetRetriever(spark, args.inputFormat), args.disableColumnFiltering ? null : columnRuleCreator.getDataRequirementsForContext(context));
                StructType resultsSchema = resultSchemas.get(contextName);
                if (resultsSchema == null || resultsSchema.fields().length == 0) {
                    LOG.warn("Context " + contextName + " has no defines configured. Skipping.");
                } else {
                    LOG.info("Evaluating context " + contextName);
                    long contextStartMillis = System.currentTimeMillis();
                    final String outputPath = MapUtils.getRequiredKey(args.outputPaths, context.getName(), "outputPath");
                    JavaPairRDD<Object, List<Row>> rowsByContextId = contextRetriever.retrieveContext(context);
                    CustomMetricSparkPlugin.currentlyEvaluatingContextGauge.setValue(CustomMetricSparkPlugin.currentlyEvaluatingContextGauge.getValue() + 1);
                    JavaPairRDD<Object, Row> resultsByContext = rowsByContextId.flatMapToPair(x -> evaluate(contextName, resultsSchema, x, dataTypeAliases, perContextAccum, errorAccumulator, batchRunTime));
                    writeResults(spark, resultsSchema, resultsByContext, outputPath);
                    long contextEndMillis = System.currentTimeMillis();
                    LOG.info(String.format("Wrote results for context %s to %s", contextName, outputPath));
                    evaluationSummary.addContextCount(contextName, perContextAccum.value());
                    evaluationSummary.addContextRuntime(contextName, contextEndMillis - contextStartMillis);
                    contextAccum.add(1);
                    perContextAccum.reset();
                }
            }
            CustomMetricSparkPlugin.currentlyEvaluatingContextGauge.setValue(0);
            try {
                Boolean metricsEnabledStr = Boolean.valueOf(spark.conf().get("spark.ui.prometheus.enabled"));
                if (metricsEnabledStr) {
                    LOG.info("Prometheus metrics enabled, sleeping for 7 seconds to finish gathering metrics");
                    // sleep for over 5 seconds because Prometheus only polls
                    // every 5 seconds. If spark finishes and goes away immediately after completing,
                    // Prometheus will never be able to poll for the final set of metrics for the spark-submit
                    // The default promtheus config map was changed from 2 minute scrape interval to 5 seconds for spark pods
                    Thread.sleep(7000);
                } else {
                    LOG.info("Prometheus metrics not enabled");
                }
            } catch (NoSuchElementException e) {
                LOG.info("spark.ui.prometheus.enabled is not set");
            }
            evaluationSummary.setJobStatus(JobStatus.SUCCESS);
        } catch (Exception e) {
            // If we experience an error that would make the program halt, capture the error
            // and report it in the batch summary file
            ByteArrayOutputStream errorDetailStream = new ByteArrayOutputStream();
            try (PrintStream printStream = new PrintStream(errorDetailStream)) {
                printStream.write(e.getMessage().getBytes());
                printStream.write('\n');
                if (e.getCause() != null) {
                    printStream.write(e.getCause().getMessage().getBytes());
                    printStream.write('\n');
                }
                e.printStackTrace(printStream);
                evaluationSummary.setErrorList(Collections.singletonList(new EvaluationError(null, null, null, errorDetailStream.toString())));
            }
            throw e;
        } finally {
            long endTimeMillis = System.currentTimeMillis();
            evaluationSummary.setEndTimeMillis(endTimeMillis);
            evaluationSummary.setRuntimeMillis(endTimeMillis - startTimeMillis);
            if (args.metadataOutputPath != null) {
                if (evaluationSummary.getErrorList() == null) {
                    evaluationSummary.setErrorList(errorAccumulator.value());
                }
                if (CollectionUtils.isNotEmpty(evaluationSummary.getErrorList())) {
                    evaluationSummary.setJobStatus(JobStatus.FAIL);
                }
                evaluationSummary.setTotalContexts(contextAccum.value());
                OutputMetadataWriter writer = getOutputMetadataWriter();
                writer.writeMetadata(evaluationSummary);
            }
        }
    }
}
Also used : SparkSession(org.apache.spark.sql.SparkSession) StructType(org.apache.spark.sql.types.StructType) ContextRetriever(com.ibm.cohort.cql.spark.aggregation.ContextRetriever) CqlToElmTranslator(com.ibm.cohort.cql.translation.CqlToElmTranslator) ContextDefinitions(com.ibm.cohort.cql.spark.aggregation.ContextDefinitions) ZonedDateTime(java.time.ZonedDateTime) List(java.util.List) ArrayList(java.util.ArrayList) DefaultDatasetRetriever(com.ibm.cohort.cql.spark.data.DefaultDatasetRetriever) PrintStream(java.io.PrintStream) SerializableConfiguration(org.apache.spark.util.SerializableConfiguration) SparkTypeConverter(com.ibm.cohort.cql.spark.data.SparkTypeConverter) ByteArrayOutputStream(java.io.ByteArrayOutputStream) ContextDefinition(com.ibm.cohort.cql.spark.aggregation.ContextDefinition) FileNotFoundException(java.io.FileNotFoundException) NoSuchElementException(java.util.NoSuchElementException) IOException(java.io.IOException) LongAccumulator(org.apache.spark.util.LongAccumulator) EvaluationSummary(com.ibm.cohort.cql.spark.metadata.EvaluationSummary) HadoopPathOutputMetadataWriter(com.ibm.cohort.cql.spark.metadata.HadoopPathOutputMetadataWriter) OutputMetadataWriter(com.ibm.cohort.cql.spark.metadata.OutputMetadataWriter) ColumnRuleCreator(com.ibm.cohort.cql.spark.aggregation.ColumnRuleCreator) SparkOutputColumnEncoder(com.ibm.cohort.cql.spark.data.SparkOutputColumnEncoder) EvaluationError(com.ibm.cohort.cql.spark.errors.EvaluationError) SparkDataRow(com.ibm.cohort.cql.spark.data.SparkDataRow) DataRow(com.ibm.cohort.datarow.model.DataRow) Row(org.apache.spark.sql.Row) NoSuchElementException(java.util.NoSuchElementException)

Example 14 with CqlToElmTranslator

use of com.ibm.cohort.cql.translation.CqlToElmTranslator in project quality-measure-and-cohort-service by Alvearie.

the class ColumnRuleCreatorTest method testGetFiltersForContext.

@Test
public void testGetFiltersForContext() throws Exception {
    CqlToElmTranslator cqlTranslator = new CqlToElmTranslator();
    cqlTranslator.registerModelInfo(new File("src/test/resources/alltypes/modelinfo/alltypes-modelinfo-1.0.0.xml"));
    ObjectMapper mapper = new ObjectMapper();
    CqlEvaluationRequests requests = mapper.readValue(new File("src/test/resources/alltypes/metadata/parent-child-jobs.json"), CqlEvaluationRequests.class);
    CqlLibraryProvider backingProvider = new ClasspathCqlLibraryProvider("alltypes.cql");
    TranslatingCqlLibraryProvider cqlLibraryProvider = new TranslatingCqlLibraryProvider(backingProvider, cqlTranslator);
    ColumnRuleCreator columnRuleCreator = new ColumnRuleCreator(requests.getEvaluations(), cqlTranslator, cqlLibraryProvider);
    ContextDefinitions definitions = mapper.readValue(new File("src/test/resources/alltypes/metadata/context-definitions.json"), ContextDefinitions.class);
    ContextDefinition context = definitions.getContextDefinitionByName("Patient");
    Map<String, Set<StringMatcher>> actual = columnRuleCreator.getDataRequirementsForContext(context);
    Map<String, Set<StringMatcher>> expected = new HashMap<>();
    expected.put("A", new HashSet<>(Arrays.asList(new EqualsStringMatcher(ContextRetriever.SOURCE_FACT_IDX), new EqualsStringMatcher("pat_id"), new EqualsStringMatcher("code_col"), new EqualsStringMatcher("boolean_col"))));
    assertEquals(expected, actual);
}
Also used : Set(java.util.Set) HashSet(java.util.HashSet) TranslatingCqlLibraryProvider(com.ibm.cohort.cql.translation.TranslatingCqlLibraryProvider) HashMap(java.util.HashMap) CqlToElmTranslator(com.ibm.cohort.cql.translation.CqlToElmTranslator) ClasspathCqlLibraryProvider(com.ibm.cohort.cql.library.ClasspathCqlLibraryProvider) CqlLibraryProvider(com.ibm.cohort.cql.library.CqlLibraryProvider) TranslatingCqlLibraryProvider(com.ibm.cohort.cql.translation.TranslatingCqlLibraryProvider) EqualsStringMatcher(com.ibm.cohort.cql.util.EqualsStringMatcher) CqlEvaluationRequests(com.ibm.cohort.cql.evaluation.CqlEvaluationRequests) ClasspathCqlLibraryProvider(com.ibm.cohort.cql.library.ClasspathCqlLibraryProvider) File(java.io.File) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) Test(org.junit.Test)

Example 15 with CqlToElmTranslator

use of com.ibm.cohort.cql.translation.CqlToElmTranslator in project quality-measure-and-cohort-service by Alvearie.

the class ColumnRuleCreatorTest method testGetFiltersForContextWithMultiJoinColumns.

@Test
public void testGetFiltersForContextWithMultiJoinColumns() throws Exception {
    CqlToElmTranslator cqlTranslator = new CqlToElmTranslator();
    cqlTranslator.registerModelInfo(new File("src/test/resources/multiple-joins/modelinfo/omop-modelinfo-5.2.2.xml"));
    ObjectMapper mapper = new ObjectMapper();
    CqlEvaluationRequests requests = mapper.readValue(new File("src/test/resources/multiple-joins/metadata/cql-jobs.json"), CqlEvaluationRequests.class);
    TranslatingCqlLibraryProvider cqlLibraryProvider = new TranslatingCqlLibraryProvider(new ClasspathCqlLibraryProvider("multiple-joins.cql"), cqlTranslator);
    ColumnRuleCreator columnRuleCreator = new ColumnRuleCreator(requests.getEvaluations(), cqlTranslator, cqlLibraryProvider);
    ContextDefinitions definitions = mapper.readValue(new File("src/test/resources/multiple-joins/metadata/context-definitions.json"), ContextDefinitions.class);
    ContextDefinition context = definitions.getContextDefinitionByName("person");
    Map<String, Set<StringMatcher>> actual = columnRuleCreator.getDataRequirementsForContext(context);
    Map<String, Set<StringMatcher>> expected = new HashMap<>();
    expected.put("person", new HashSet<>(Arrays.asList(new EqualsStringMatcher(ContextRetriever.SOURCE_FACT_IDX), new EqualsStringMatcher("person_id"))));
    expected.put("vocabulary", new HashSet<>(Arrays.asList(new EqualsStringMatcher(ContextRetriever.SOURCE_FACT_IDX), new EqualsStringMatcher("vocabulary_id"), new EqualsStringMatcher("vocabulary_version"), new EqualsStringMatcher(ContextRetriever.JOIN_CONTEXT_VALUE_IDX))));
    expected.put("concept", new HashSet<>(Arrays.asList(new EqualsStringMatcher(ContextRetriever.SOURCE_FACT_IDX), new EqualsStringMatcher("concept_id"), new EqualsStringMatcher("concept_code"), new EqualsStringMatcher("concept_name"), new EqualsStringMatcher("vocabulary_id"), new EqualsStringMatcher(ContextRetriever.JOIN_CONTEXT_VALUE_IDX))));
    expected.put("observation", new HashSet<>(Arrays.asList(new EqualsStringMatcher(ContextRetriever.SOURCE_FACT_IDX), new EqualsStringMatcher("observation_concept_id"), new EqualsStringMatcher("person_id"), new EqualsStringMatcher(ContextRetriever.JOIN_CONTEXT_VALUE_IDX))));
    assertEquals(expected, actual);
}
Also used : Set(java.util.Set) HashSet(java.util.HashSet) TranslatingCqlLibraryProvider(com.ibm.cohort.cql.translation.TranslatingCqlLibraryProvider) HashMap(java.util.HashMap) CqlToElmTranslator(com.ibm.cohort.cql.translation.CqlToElmTranslator) EqualsStringMatcher(com.ibm.cohort.cql.util.EqualsStringMatcher) CqlEvaluationRequests(com.ibm.cohort.cql.evaluation.CqlEvaluationRequests) ClasspathCqlLibraryProvider(com.ibm.cohort.cql.library.ClasspathCqlLibraryProvider) File(java.io.File) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) Test(org.junit.Test)

Aggregations

CqlToElmTranslator (com.ibm.cohort.cql.translation.CqlToElmTranslator)22 TranslatingCqlLibraryProvider (com.ibm.cohort.cql.translation.TranslatingCqlLibraryProvider)18 CqlLibraryProvider (com.ibm.cohort.cql.library.CqlLibraryProvider)14 ClasspathCqlLibraryProvider (com.ibm.cohort.cql.library.ClasspathCqlLibraryProvider)13 HashMap (java.util.HashMap)11 Test (org.junit.Test)11 CqlDataProvider (com.ibm.cohort.cql.data.CqlDataProvider)9 CqlTerminologyProvider (com.ibm.cohort.cql.terminology.CqlTerminologyProvider)9 PriorityCqlLibraryProvider (com.ibm.cohort.cql.library.PriorityCqlLibraryProvider)7 CqlLibraryDescriptor (com.ibm.cohort.cql.library.CqlLibraryDescriptor)6 Set (java.util.Set)6 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)5 File (java.io.File)5 CqlEvaluationRequests (com.ibm.cohort.cql.evaluation.CqlEvaluationRequests)4 CqlEvaluator (com.ibm.cohort.cql.evaluation.CqlEvaluator)4 Parameter (com.ibm.cohort.cql.evaluation.parameters.Parameter)4 CqlLibrary (com.ibm.cohort.cql.library.CqlLibrary)4 VersionedIdentifier (org.cqframework.cql.elm.execution.VersionedIdentifier)4 LibraryLoader (org.opencds.cqf.cql.engine.execution.LibraryLoader)4 IGenericClient (ca.uhn.fhir.rest.client.api.IGenericClient)3