use of com.ibm.cohort.cql.evaluation.parameters.IntegerParameter in project quality-measure-and-cohort-service by Alvearie.
the class MeasureEvaluatorTest method measure_report_generated___java_overrides_overwrite_measure_params.
@Test
public void measure_report_generated___java_overrides_overwrite_measure_params() throws Exception {
CapabilityStatement metadata = getCapabilityStatement();
mockFhirResourceRetrieval("/metadata?_format=json", metadata);
Patient patient = getPatient("123", AdministrativeGender.MALE, "1970-10-10");
mockFhirResourceRetrieval(patient);
Library library = mockLibraryRetrieval("TestDummyPopulations", DEFAULT_VERSION, "cql/fhir-measure/test-dummy-populations.xml", ELM_MIME_TYPE);
Measure measure = getCohortMeasure("CohortMeasureName", library, INITIAL_POPULATION);
String duplicateParamName = "duplicateParam";
int fhirMeasureIntValue = 10;
int javaParameterIntValue = 99;
measure.addExtension(createParameterExtension(duplicateParamName, new IntegerType(fhirMeasureIntValue)));
mockFhirResourceRetrieval(measure);
Map<String, Parameter> parameterMap = new HashMap<>();
parameterMap.put(duplicateParamName, new IntegerParameter(javaParameterIntValue));
MeasureReport report = evaluator.evaluatePatientMeasure(measure.getId(), patient.getId(), parameterMap);
assertNotNull(report);
// Make sure report only contained one entry for the duplicate parameter
List<Type> filteredReportParams = report.getExtension().stream().filter(x -> x.getUrl().equals(MEASURE_PARAMETER_VALUE_URL)).map(x -> (ParameterDefinition) x.getValue()).filter(x -> x.getName().equals(duplicateParamName)).map(x -> x.getExtensionByUrl(PARAMETER_VALUE_URL).getValue()).collect(Collectors.toList());
assertEquals(1, filteredReportParams.size());
// Sanity check input parameter values were different before checking for correct value
assertNotEquals(fhirMeasureIntValue, javaParameterIntValue);
assertEquals(javaParameterIntValue, ((IntegerType) filteredReportParams.get(0)).getValue().intValue());
}
use of com.ibm.cohort.cql.evaluation.parameters.IntegerParameter in project quality-measure-and-cohort-service by Alvearie.
the class SparkCqlEvaluatorTest method testReadCqlJobsSuccess.
@Test
public void testReadCqlJobsSuccess() throws Exception {
IntervalParameter measurementPeriod = new IntervalParameter();
measurementPeriod.setStart(new DateParameter("2020-01-01")).setEnd(new DateParameter("2021-01-01"));
IntegerParameter minimumAge = new IntegerParameter(17);
evaluator.hadoopConfiguration = new SerializableConfiguration(SparkHadoopUtil.get().conf());
CqlEvaluationRequests requests = evaluator.readJobSpecification("src/test/resources/simple-job/cql-jobs.json");
assertNotNull(requests);
assertEquals(measurementPeriod, requests.getGlobalParameters().get("Measurement Period"));
assertEquals(1, requests.getEvaluations().size());
assertEquals(minimumAge, requests.getEvaluations().get(0).getParameters().get("MinimumAge"));
}
use of com.ibm.cohort.cql.evaluation.parameters.IntegerParameter in project quality-measure-and-cohort-service by Alvearie.
the class SparkCqlEvaluatorTest method testGetFilteredRequestsGlobalParametersApplied.
@Test
public void testGetFilteredRequestsGlobalParametersApplied() {
CqlEvaluationRequests requests = new CqlEvaluationRequests();
requests.setGlobalParameters(new HashMap<String, Parameter>() {
{
put("param1", new IntegerParameter(10));
put("param2", new StringParameter("10"));
put("globalParam", new DecimalParameter("10.0"));
}
});
CqlEvaluationRequest request = makeEvaluationRequest("context", "lib1", "1.0.0");
request.setExpressionsByNames(new HashSet<>(Collections.singletonList("cohortOrig")));
request.setParameters(new HashMap<String, Parameter>() {
{
put("param1", new IntegerParameter(1));
put("param2", new StringParameter("1"));
put("param3", new DecimalParameter("1.0"));
}
});
CqlEvaluationRequest request2 = makeEvaluationRequest("context", "lib2", "1.0.0");
request2.setExpressionsByNames(new HashSet<>(Collections.singletonList("cohortOrig")));
List<CqlEvaluationRequest> evaluations = Arrays.asList(request, request2);
requests.setEvaluations(evaluations);
CqlEvaluationRequests actual = evaluator.getFilteredRequests(requests, null, null);
for (CqlEvaluationRequest evaluation : actual.getEvaluations()) {
if (evaluation.getDescriptor().getLibraryId().equals("lib1")) {
assertEquals(4, evaluation.getParameters().size());
assertEquals(new IntegerParameter(1), evaluation.getParameters().get("param1"));
assertEquals(new StringParameter("1"), evaluation.getParameters().get("param2"));
assertEquals(new DecimalParameter("1.0"), evaluation.getParameters().get("param3"));
assertEquals(new DecimalParameter("10.0"), evaluation.getParameters().get("globalParam"));
} else if (evaluation.getDescriptor().getLibraryId().equals("lib2")) {
assertEquals(3, evaluation.getParameters().size());
assertEquals(new IntegerParameter(10), evaluation.getParameters().get("param1"));
assertEquals(new StringParameter("10"), evaluation.getParameters().get("param2"));
assertEquals(new DecimalParameter("10.0"), evaluation.getParameters().get("globalParam"));
} else {
fail("Unexpected library encountered. Expected only lib1 and lib2.");
}
}
}
use of com.ibm.cohort.cql.evaluation.parameters.IntegerParameter in project quality-measure-and-cohort-service by Alvearie.
the class SparkCqlEvaluatorTest method testParameterMatrixOutputWithKeyParametersSpecifiedSuccess.
@Test
public void testParameterMatrixOutputWithKeyParametersSpecifiedSuccess() throws Exception {
String outputLocation = "target/output/param-matrix-key-params/patient_cohort";
CqlEvaluationRequest template = new CqlEvaluationRequest();
template.setDescriptor(new CqlLibraryDescriptor().setLibraryId("SampleLibrary").setVersion("1.0.0"));
template.setExpressionsByNames(Collections.singleton("IsFemale"));
template.setContextKey("Patient");
template.setContextValue("NA");
CqlEvaluationRequests requests = new CqlEvaluationRequests();
requests.setEvaluations(new ArrayList<>());
List<Integer> ages = Arrays.asList(15, 17, 18);
for (Integer age : ages) {
Map<String, Parameter> parameters = new HashMap<>();
parameters.put("MinimumAge", new IntegerParameter(age));
CqlEvaluationRequest request = new CqlEvaluationRequest(template);
request.setParameters(parameters);
requests.getEvaluations().add(request);
CqlExpressionConfiguration renamed = new CqlExpressionConfiguration();
renamed.setName("IsFemale");
renamed.setOutputColumn("Renamed");
Map<String, Parameter> parametersWithExtraneous = new HashMap<>(parameters);
parametersWithExtraneous.put("Extraneous", new IntegerParameter(0));
request = new CqlEvaluationRequest(template);
request.setExpressions(Collections.singleton(renamed));
request.setParameters(parametersWithExtraneous);
requests.getEvaluations().add(request);
}
ObjectMapper om = new ObjectMapper();
File jobsFile = new File("target/param-matrix/cql-jobs.json");
if (!jobsFile.exists()) {
jobsFile.getParentFile().mkdirs();
}
FileUtils.write(jobsFile, om.writeValueAsString(requests), StandardCharsets.UTF_8);
try {
String[] args = new String[] { "-d", "src/test/resources/simple-job/context-definitions.json", "-j", jobsFile.getPath(), "-m", "src/test/resources/simple-job/modelinfo/simple-modelinfo-1.0.0.xml", "-c", "src/test/resources/simple-job/cql", "-i", "Patient=" + new File("src/test/resources/simple-job/testdata/patient").toURI().toString(), "-o", "Patient=" + new File(outputLocation).toURI().toString(), "--output-format", "delta", "--overwrite-output-for-contexts", "--metadata-output-path", outputLocation, "--key-parameters", "MinimumAge" };
SparkCqlEvaluator.main(args);
validateOutputCountsAndColumns(outputLocation, new HashSet<>(Arrays.asList("id", "parameters", "SampleLibrary|IsFemale", "Renamed")), 10 * ages.size(), "delta");
} finally {
jobsFile.delete();
}
}
use of com.ibm.cohort.cql.evaluation.parameters.IntegerParameter in project quality-measure-and-cohort-service by Alvearie.
the class SparkCqlEvaluatorTest method testParameterMatrixOutputNonOverlappingParamsSuccess.
@Test
public void testParameterMatrixOutputNonOverlappingParamsSuccess() throws Exception {
String outputLocation = "target/output/param-matrix-non-overlap/patient_cohort";
CqlEvaluationRequest template = new CqlEvaluationRequest();
template.setDescriptor(new CqlLibraryDescriptor().setLibraryId("SampleLibrary").setVersion("1.0.0"));
template.setExpressionsByNames(Collections.singleton("IsFemale"));
template.setContextKey("Patient");
template.setContextValue("NA");
CqlEvaluationRequests requests = new CqlEvaluationRequests();
requests.setEvaluations(new ArrayList<>());
List<Integer> ages = Arrays.asList(15, 17, 18);
for (Integer age : ages) {
Map<String, Parameter> parameters = new HashMap<>();
parameters.put("MinimumAge", new IntegerParameter(age));
CqlEvaluationRequest request = new CqlEvaluationRequest(template);
request.setParameters(parameters);
requests.getEvaluations().add(request);
CqlExpressionConfiguration renamed = new CqlExpressionConfiguration();
renamed.setName("IsFemale");
renamed.setOutputColumn("Renamed");
Map<String, Parameter> parametersWithExtraneous = new HashMap<>(parameters);
parametersWithExtraneous.put("Extraneous", new IntegerParameter(0));
request = new CqlEvaluationRequest(template);
request.setExpressions(Collections.singleton(renamed));
request.setParameters(parametersWithExtraneous);
requests.getEvaluations().add(request);
}
ObjectMapper om = new ObjectMapper();
File jobsFile = new File("target/output/param-matrix/cql-jobs.json");
if (!jobsFile.exists()) {
jobsFile.getParentFile().mkdirs();
}
FileUtils.write(jobsFile, om.writeValueAsString(requests), StandardCharsets.UTF_8);
try {
String[] args = new String[] { "-d", "src/test/resources/simple-job/context-definitions.json", "-j", jobsFile.getPath(), "-m", "src/test/resources/simple-job/modelinfo/simple-modelinfo-1.0.0.xml", "-c", "src/test/resources/simple-job/cql", "-i", "Patient=" + new File("src/test/resources/simple-job/testdata/patient").toURI().toString(), "-o", "Patient=" + new File(outputLocation).toURI().toString(), "--output-format", "delta", "--overwrite-output-for-contexts", "--metadata-output-path", outputLocation };
SparkCqlEvaluator.main(args);
// Because we've got a mismatch in the parameters in the first and second columns, each context
// has a set of rows for the first parameter set where one column is populated and the other is null
// and another set of rows where the first column is null and the second is populated.
validateOutputCountsAndColumns(outputLocation, new HashSet<>(Arrays.asList("id", "parameters", "SampleLibrary|IsFemale", "Renamed")), 10 * ages.size() * /*outputColumns=*/
2, "delta");
} finally {
jobsFile.delete();
}
}
Aggregations