use of com.ibm.cohort.cql.evaluation.CqlEvaluationRequests in project quality-measure-and-cohort-service by Alvearie.
the class SparkCqlEvaluatorTest method testGetFilteredRequestsGlobalParametersApplied.
@Test
public void testGetFilteredRequestsGlobalParametersApplied() {
CqlEvaluationRequests requests = new CqlEvaluationRequests();
requests.setGlobalParameters(new HashMap<String, Parameter>() {
{
put("param1", new IntegerParameter(10));
put("param2", new StringParameter("10"));
put("globalParam", new DecimalParameter("10.0"));
}
});
CqlEvaluationRequest request = makeEvaluationRequest("context", "lib1", "1.0.0");
request.setExpressionsByNames(new HashSet<>(Collections.singletonList("cohortOrig")));
request.setParameters(new HashMap<String, Parameter>() {
{
put("param1", new IntegerParameter(1));
put("param2", new StringParameter("1"));
put("param3", new DecimalParameter("1.0"));
}
});
CqlEvaluationRequest request2 = makeEvaluationRequest("context", "lib2", "1.0.0");
request2.setExpressionsByNames(new HashSet<>(Collections.singletonList("cohortOrig")));
List<CqlEvaluationRequest> evaluations = Arrays.asList(request, request2);
requests.setEvaluations(evaluations);
CqlEvaluationRequests actual = evaluator.getFilteredRequests(requests, null, null);
for (CqlEvaluationRequest evaluation : actual.getEvaluations()) {
if (evaluation.getDescriptor().getLibraryId().equals("lib1")) {
assertEquals(4, evaluation.getParameters().size());
assertEquals(new IntegerParameter(1), evaluation.getParameters().get("param1"));
assertEquals(new StringParameter("1"), evaluation.getParameters().get("param2"));
assertEquals(new DecimalParameter("1.0"), evaluation.getParameters().get("param3"));
assertEquals(new DecimalParameter("10.0"), evaluation.getParameters().get("globalParam"));
} else if (evaluation.getDescriptor().getLibraryId().equals("lib2")) {
assertEquals(3, evaluation.getParameters().size());
assertEquals(new IntegerParameter(10), evaluation.getParameters().get("param1"));
assertEquals(new StringParameter("10"), evaluation.getParameters().get("param2"));
assertEquals(new DecimalParameter("10.0"), evaluation.getParameters().get("globalParam"));
} else {
fail("Unexpected library encountered. Expected only lib1 and lib2.");
}
}
}
use of com.ibm.cohort.cql.evaluation.CqlEvaluationRequests in project quality-measure-and-cohort-service by Alvearie.
the class SparkCqlEvaluatorTest method testParameterMatrixOutputWithKeyParametersSpecifiedSuccess.
@Test
public void testParameterMatrixOutputWithKeyParametersSpecifiedSuccess() throws Exception {
String outputLocation = "target/output/param-matrix-key-params/patient_cohort";
CqlEvaluationRequest template = new CqlEvaluationRequest();
template.setDescriptor(new CqlLibraryDescriptor().setLibraryId("SampleLibrary").setVersion("1.0.0"));
template.setExpressionsByNames(Collections.singleton("IsFemale"));
template.setContextKey("Patient");
template.setContextValue("NA");
CqlEvaluationRequests requests = new CqlEvaluationRequests();
requests.setEvaluations(new ArrayList<>());
List<Integer> ages = Arrays.asList(15, 17, 18);
for (Integer age : ages) {
Map<String, Parameter> parameters = new HashMap<>();
parameters.put("MinimumAge", new IntegerParameter(age));
CqlEvaluationRequest request = new CqlEvaluationRequest(template);
request.setParameters(parameters);
requests.getEvaluations().add(request);
CqlExpressionConfiguration renamed = new CqlExpressionConfiguration();
renamed.setName("IsFemale");
renamed.setOutputColumn("Renamed");
Map<String, Parameter> parametersWithExtraneous = new HashMap<>(parameters);
parametersWithExtraneous.put("Extraneous", new IntegerParameter(0));
request = new CqlEvaluationRequest(template);
request.setExpressions(Collections.singleton(renamed));
request.setParameters(parametersWithExtraneous);
requests.getEvaluations().add(request);
}
ObjectMapper om = new ObjectMapper();
File jobsFile = new File("target/param-matrix/cql-jobs.json");
if (!jobsFile.exists()) {
jobsFile.getParentFile().mkdirs();
}
FileUtils.write(jobsFile, om.writeValueAsString(requests), StandardCharsets.UTF_8);
try {
String[] args = new String[] { "-d", "src/test/resources/simple-job/context-definitions.json", "-j", jobsFile.getPath(), "-m", "src/test/resources/simple-job/modelinfo/simple-modelinfo-1.0.0.xml", "-c", "src/test/resources/simple-job/cql", "-i", "Patient=" + new File("src/test/resources/simple-job/testdata/patient").toURI().toString(), "-o", "Patient=" + new File(outputLocation).toURI().toString(), "--output-format", "delta", "--overwrite-output-for-contexts", "--metadata-output-path", outputLocation, "--key-parameters", "MinimumAge" };
SparkCqlEvaluator.main(args);
validateOutputCountsAndColumns(outputLocation, new HashSet<>(Arrays.asList("id", "parameters", "SampleLibrary|IsFemale", "Renamed")), 10 * ages.size(), "delta");
} finally {
jobsFile.delete();
}
}
use of com.ibm.cohort.cql.evaluation.CqlEvaluationRequests in project quality-measure-and-cohort-service by Alvearie.
the class SparkCqlEvaluatorTest method testParameterMatrixOutputNonOverlappingParamsSuccess.
@Test
public void testParameterMatrixOutputNonOverlappingParamsSuccess() throws Exception {
String outputLocation = "target/output/param-matrix-non-overlap/patient_cohort";
CqlEvaluationRequest template = new CqlEvaluationRequest();
template.setDescriptor(new CqlLibraryDescriptor().setLibraryId("SampleLibrary").setVersion("1.0.0"));
template.setExpressionsByNames(Collections.singleton("IsFemale"));
template.setContextKey("Patient");
template.setContextValue("NA");
CqlEvaluationRequests requests = new CqlEvaluationRequests();
requests.setEvaluations(new ArrayList<>());
List<Integer> ages = Arrays.asList(15, 17, 18);
for (Integer age : ages) {
Map<String, Parameter> parameters = new HashMap<>();
parameters.put("MinimumAge", new IntegerParameter(age));
CqlEvaluationRequest request = new CqlEvaluationRequest(template);
request.setParameters(parameters);
requests.getEvaluations().add(request);
CqlExpressionConfiguration renamed = new CqlExpressionConfiguration();
renamed.setName("IsFemale");
renamed.setOutputColumn("Renamed");
Map<String, Parameter> parametersWithExtraneous = new HashMap<>(parameters);
parametersWithExtraneous.put("Extraneous", new IntegerParameter(0));
request = new CqlEvaluationRequest(template);
request.setExpressions(Collections.singleton(renamed));
request.setParameters(parametersWithExtraneous);
requests.getEvaluations().add(request);
}
ObjectMapper om = new ObjectMapper();
File jobsFile = new File("target/output/param-matrix/cql-jobs.json");
if (!jobsFile.exists()) {
jobsFile.getParentFile().mkdirs();
}
FileUtils.write(jobsFile, om.writeValueAsString(requests), StandardCharsets.UTF_8);
try {
String[] args = new String[] { "-d", "src/test/resources/simple-job/context-definitions.json", "-j", jobsFile.getPath(), "-m", "src/test/resources/simple-job/modelinfo/simple-modelinfo-1.0.0.xml", "-c", "src/test/resources/simple-job/cql", "-i", "Patient=" + new File("src/test/resources/simple-job/testdata/patient").toURI().toString(), "-o", "Patient=" + new File(outputLocation).toURI().toString(), "--output-format", "delta", "--overwrite-output-for-contexts", "--metadata-output-path", outputLocation };
SparkCqlEvaluator.main(args);
// Because we've got a mismatch in the parameters in the first and second columns, each context
// has a set of rows for the first parameter set where one column is populated and the other is null
// and another set of rows where the first column is null and the second is populated.
validateOutputCountsAndColumns(outputLocation, new HashSet<>(Arrays.asList("id", "parameters", "SampleLibrary|IsFemale", "Renamed")), 10 * ages.size() * /*outputColumns=*/
2, "delta");
} finally {
jobsFile.delete();
}
}
use of com.ibm.cohort.cql.evaluation.CqlEvaluationRequests in project quality-measure-and-cohort-service by Alvearie.
the class SparkSchemaCreatorTest method testMultipleContexts.
@Test
public void testMultipleContexts() throws Exception {
ContextDefinitions contextDefinitions = makeContextDefinitions(Arrays.asList(makeContextDefinition("Context1Id", "Type1", "id"), makeContextDefinition("Context2Id", "Type2", "id")));
CqlEvaluationRequests cqlEvaluationRequests = makeEvaluationRequests(Arrays.asList(makeEvaluationRequest(new CqlLibraryDescriptor().setLibraryId("Context1Id").setVersion("1.0.0"), new HashSet<>(Collections.singletonList("define_integer")), "Context1Id"), makeEvaluationRequest(new CqlLibraryDescriptor().setLibraryId("Context2Id").setVersion("1.0.0"), new HashSet<>(Arrays.asList("define_date", "define_datetime")), "Context2Id")));
SparkSchemaCreator schemaCreator = new SparkSchemaCreator(cqlLibraryProvider, cqlEvaluationRequests, contextDefinitions, outputColumnNameFactory, cqlTranslator);
Map<String, StructType> actualSchemas = schemaCreator.calculateSchemasForContexts(Arrays.asList("Context1Id", "Context2Id"));
Map<String, Tuple2<String, StructType>> expectedSchemas = new HashMap<String, Tuple2<String, StructType>>() {
{
put("Context1Id", new Tuple2<>("id", new StructType().add("id", DataTypes.IntegerType, false).add("parameters", DataTypes.StringType, false).add("Context1Id.define_integer", DataTypes.IntegerType, true)));
put("Context2Id", new Tuple2<>("id", new StructType().add("id", DataTypes.IntegerType, false).add("parameters", DataTypes.StringType, false).add("Context2Id.define_date", DataTypes.DateType, true).add("Context2Id.define_datetime", DataTypes.TimestampType, true)));
}
};
for (Map.Entry<String, Tuple2<String, StructType>> entry : expectedSchemas.entrySet()) {
String contextName = entry.getKey();
validateSchemas(expectedSchemas.get(contextName)._2(), actualSchemas.get(contextName), expectedSchemas.get(contextName)._1());
}
}
use of com.ibm.cohort.cql.evaluation.CqlEvaluationRequests in project quality-measure-and-cohort-service by Alvearie.
the class SparkSchemaCreatorTest method makeEvaluationRequests.
private CqlEvaluationRequests makeEvaluationRequests(List<CqlEvaluationRequest> evaluationRequestList) {
CqlEvaluationRequests requests = new CqlEvaluationRequests();
requests.setEvaluations(evaluationRequestList);
return requests;
}
Aggregations