use of au.csiro.pathling.fhirpath.literal.BooleanLiteralPath in project pathling by aehrc.
the class TranslateFunction method validateInput.
private void validateInput(@Nonnull final NamedFunctionInput input) {
final ParserContext context = input.getContext();
checkUserInput(context.getTerminologyServiceFactory().isPresent(), "Attempt to call terminology function " + NAME + " when terminology service has not been configured");
final FhirPath inputPath = input.getInput();
checkUserInput(TerminologyUtils.isCodingOrCodeableConcept(inputPath), String.format("Input to %s function is of unsupported type: %s", NAME, inputPath.getExpression()));
final List<FhirPath> arguments = input.getArguments();
checkUserInput(arguments.size() >= 1 && arguments.size() <= 3, NAME + " function accepts one required and two optional arguments");
checkUserInput(arguments.get(0) instanceof StringLiteralPath, String.format("Function `%s` expects `%s` as argument %s", NAME, "String literal", 1));
checkUserInput(arguments.size() <= 1 || arguments.get(1) instanceof BooleanLiteralPath, String.format("Function `%s` expects `%s` as argument %s", NAME, "Boolean literal", 2));
checkUserInput(arguments.size() <= 2 || arguments.get(2) instanceof StringLiteralPath, String.format("Function `%s` expects `%s` as argument %s", NAME, "String literal", 3));
}
use of au.csiro.pathling.fhirpath.literal.BooleanLiteralPath in project pathling by aehrc.
the class SearchExecutor method initializeDataset.
@Nonnull
private Dataset<Row> initializeDataset() {
final ResourcePath resourcePath = ResourcePath.build(getFhirContext(), getDatabase(), subjectResource, subjectResource.toCode(), true, true);
final Dataset<Row> subjectDataset = resourcePath.getDataset();
final Column subjectIdColumn = resourcePath.getIdColumn();
final Dataset<Row> dataset;
if (filters.isEmpty() || filters.get().getValuesAsQueryTokens().isEmpty()) {
// If there are no filters, return all resources.
dataset = subjectDataset;
} else {
final Collection<FhirPath> fhirPaths = new ArrayList<>();
@Nullable Column filterIdColumn = null;
@Nullable Column filterColumn = null;
ResourcePath currentContext = ResourcePath.build(getFhirContext(), getDatabase(), subjectResource, subjectResource.toCode(), true);
// https://hl7.org/fhir/R4/search.html#combining.
for (final StringOrListParam orParam : filters.get().getValuesAsQueryTokens()) {
@Nullable Column orColumn = null;
for (final StringParam param : orParam.getValuesAsQueryTokens()) {
final ParserContext parserContext = buildParserContext(currentContext, Collections.singletonList(currentContext.getIdColumn()));
final Parser parser = new Parser(parserContext);
final String expression = param.getValue();
checkUserInput(!expression.isBlank(), "Filter expression cannot be blank");
final FhirPath fhirPath = parser.parse(expression);
checkUserInput(fhirPath instanceof BooleanPath || fhirPath instanceof BooleanLiteralPath, "Filter expression must be of Boolean type: " + fhirPath.getExpression());
final Column filterValue = fhirPath.getValueColumn();
// Add each expression to a list that will later be joined.
fhirPaths.add(fhirPath);
// Combine all the OR columns with OR logic.
orColumn = orColumn == null ? filterValue : orColumn.or(filterValue);
// subject resource dataset with the joined filter datasets.
if (filterIdColumn == null) {
filterIdColumn = fhirPath.getIdColumn();
}
// Update the context to build the next expression from the same dataset.
currentContext = currentContext.copy(currentContext.getExpression(), fhirPath.getDataset(), fhirPath.getIdColumn(), currentContext.getEidColumn(), fhirPath.getValueColumn(), currentContext.isSingular(), currentContext.getThisColumn());
}
// Combine all the columns at this level with AND logic.
filterColumn = filterColumn == null ? orColumn : filterColumn.and(orColumn);
}
checkNotNull(filterIdColumn);
checkNotNull(filterColumn);
check(!fhirPaths.isEmpty());
// Get the full resources which are present in the filtered dataset.
final String filterIdAlias = randomAlias();
final Dataset<Row> filteredIds = currentContext.getDataset().select(filterIdColumn.alias(filterIdAlias)).filter(filterColumn);
dataset = subjectDataset.join(filteredIds, subjectIdColumn.equalTo(col(filterIdAlias)), "left_semi");
}
if (getConfiguration().getSpark().getCacheDatasets()) {
// We cache the dataset because we know it will be accessed for both the total and the record
// retrieval.
log.debug("Caching search dataset");
dataset.cache();
}
return dataset;
}
use of au.csiro.pathling.fhirpath.literal.BooleanLiteralPath in project pathling by aehrc.
the class QueryExecutor method getFilteredIds.
@Nonnull
private DatasetWithColumn getFilteredIds(@Nonnull final Iterable<String> filters, @Nonnull final ResourcePath inputContext, @Nonnull final BinaryOperator<Column> operator) {
ResourcePath currentContext = inputContext;
@Nullable Column filterColumn = null;
for (final String filter : filters) {
// Parse the filter expression.
final ParserContext parserContext = buildParserContext(currentContext, Collections.singletonList(currentContext.getIdColumn()));
final Parser parser = new Parser(parserContext);
final FhirPath fhirPath = parser.parse(filter);
// Check that it is a Boolean expression.
checkUserInput(fhirPath instanceof BooleanPath || fhirPath instanceof BooleanLiteralPath, "Filter expression must be of Boolean type: " + fhirPath.getExpression());
// Add the filter column to the overall filter expression using the supplied operator.
final Column filterValue = fhirPath.getValueColumn();
filterColumn = filterColumn == null ? filterValue : operator.apply(filterColumn, filterValue);
// Update the context to build the next expression from the same dataset.
currentContext = currentContext.copy(currentContext.getExpression(), fhirPath.getDataset(), currentContext.getIdColumn(), currentContext.getEidColumn(), currentContext.getValueColumn(), currentContext.isSingular(), currentContext.getThisColumn());
}
checkNotNull(filterColumn);
// Return a dataset of filtered IDs with an aliased ID column, ready for joining.
final String filterIdAlias = randomAlias();
final Dataset<Row> dataset = currentContext.getDataset().select(currentContext.getIdColumn().alias(filterIdAlias));
return new DatasetWithColumn(dataset.filter(filterColumn), col(filterIdAlias));
}
use of au.csiro.pathling.fhirpath.literal.BooleanLiteralPath in project pathling by aehrc.
the class WhereFunctionTest method throwsErrorIfArgumentIsLiteral.
@Test
void throwsErrorIfArgumentIsLiteral() {
final ResourcePath input = new ResourcePathBuilder(spark).build();
final BooleanLiteralPath argument = BooleanLiteralPath.fromString("true", input);
final ParserContext parserContext = new ParserContextBuilder(spark, fhirContext).build();
final NamedFunctionInput whereInput = new NamedFunctionInput(parserContext, input, Collections.singletonList(argument));
final NamedFunction whereFunction = NamedFunction.getInstance("where");
final InvalidUserInputError error = assertThrows(InvalidUserInputError.class, () -> whereFunction.invoke(whereInput));
assertEquals("Argument to where function cannot be a literal: true", error.getMessage());
}
use of au.csiro.pathling.fhirpath.literal.BooleanLiteralPath in project pathling by aehrc.
the class TranslateFunctionTest method translateCodeableConceptWithNonDefaultArguments.
@Test
void translateCodeableConceptWithNonDefaultArguments() {
final Optional<ElementDefinition> optionalDefinition = FhirHelpers.getChildOfResource(fhirContext, "Encounter", "type");
assertTrue(optionalDefinition.isPresent());
final ElementDefinition definition = optionalDefinition.get();
// The translations are
// {
// coding1 -> [translated1],
// coding2 -> [translated1, translated2]
// coding4 -> [translated2]
// }
// Use cases:
// 1. [ {C2,C3,C1}, {C3}, {C4} ] -> [ [T1, T2],[], [T2]]
// 2. [ {C3, C5}, {C3} ] -> [ [], [] ]
// 3. [ {C2} ] -> [[T1, T2]]
// 4. [ {C3}] -> [[]]
// 5. [ ]-> []
// 6. null -> null
final Dataset<Row> inputDataset = new DatasetBuilder(spark).withIdColumn().withEidColumn().withStructTypeColumns(codeableConceptStructType()).withRow("encounter-1", makeEid(0), rowFromCodeableConcept(new CodeableConcept(CODING_2).addCoding(CODING_3).addCoding(CODING_1))).withRow("encounter-1", makeEid(1), rowFromCodeableConcept(new CodeableConcept(CODING_3).addCoding(CODING_5))).withRow("encounter-1", makeEid(2), rowFromCodeableConcept(new CodeableConcept(CODING_4))).withRow("encounter-2", makeEid(0), rowFromCodeableConcept(new CodeableConcept(CODING_3).addCoding(CODING_5))).withRow("encounter-2", makeEid(1), rowFromCodeableConcept(new CodeableConcept(CODING_3))).withRow("encounter-3", makeEid(0), rowFromCodeableConcept(new CodeableConcept(CODING_2))).withRow("encounter-4", makeEid(0), rowFromCodeableConcept(new CodeableConcept(CODING_3))).withRow("encounter-5", makeEid(0), null).withRow("encounter-6", null, null).buildWithStructValue();
final ElementPath inputExpression = new ElementPathBuilder(spark).dataset(inputDataset).idAndEidAndValueColumns().expression("Encounter.type").singular(false).definition(definition).buildDefined();
final ConceptTranslator returnedConceptTranslator = ConceptTranslatorBuilder.toSystem(DEST_SYSTEM_URI).put(new SimpleCoding(CODING_1), TRANSLATED_1).put(new SimpleCoding(CODING_2), TRANSLATED_1, TRANSLATED_2).put(new SimpleCoding(CODING_4), TRANSLATED_2).build();
// Create a mock terminology client.
when(terminologyService.translate(any(), any(), anyBoolean(), any())).thenReturn(returnedConceptTranslator);
// Prepare the inputs to the function.
final ParserContext parserContext = new ParserContextBuilder(spark, fhirContext).idColumn(inputExpression.getIdColumn()).terminologyClientFactory(terminologyServiceFactory).build();
final StringLiteralPath conceptMapUrlArgument = StringLiteralPath.fromString("'" + CONCEPT_MAP2_URI + "'", inputExpression);
final BooleanLiteralPath reverseArgument = BooleanLiteralPath.fromString("true", inputExpression);
final StringLiteralPath equivalenceArgument = StringLiteralPath.fromString("narrower,equivalent", inputExpression);
final NamedFunctionInput translateInput = new NamedFunctionInput(parserContext, inputExpression, Arrays.asList(conceptMapUrlArgument, reverseArgument, equivalenceArgument));
// Invoke the function.
final FhirPath result = new TranslateFunction().invoke(translateInput);
final Dataset<Row> expectedResult = new DatasetBuilder(spark).withIdColumn().withEidColumn().withStructTypeColumns(codingStructType()).withRow("encounter-1", makeEid(0, 0), rowFromCoding(TRANSLATED_1)).withRow("encounter-1", makeEid(0, 1), rowFromCoding(TRANSLATED_2)).withRow("encounter-1", makeEid(1, 0), null).withRow("encounter-1", makeEid(2, 0), rowFromCoding(TRANSLATED_2)).withRow("encounter-2", makeEid(0, 0), null).withRow("encounter-2", makeEid(1, 0), null).withRow("encounter-3", makeEid(0, 0), rowFromCoding(TRANSLATED_1)).withRow("encounter-3", makeEid(0, 1), rowFromCoding(TRANSLATED_2)).withRow("encounter-4", makeEid(0, 0), null).withRow("encounter-5", makeEid(0, 0), null).withRow("encounter-6", null, null).buildWithStructValue();
// Check the result.
assertThat(result).hasExpression("Encounter.type.translate('" + CONCEPT_MAP2_URI + "', true, 'narrower,equivalent')").isElementPath(CodingPath.class).hasFhirType(FHIRDefinedType.CODING).isNotSingular().selectOrderedResultWithEid().hasRows(expectedResult);
// Verify mocks
final Set<SimpleCoding> expectedSourceCodings = ImmutableSet.of(new SimpleCoding(CODING_1), new SimpleCoding(CODING_2), new SimpleCoding(CODING_3), new SimpleCoding(CODING_4), new SimpleCoding(CODING_5));
final List<ConceptMapEquivalence> expectedEquivalences = Arrays.asList(ConceptMapEquivalence.NARROWER, ConceptMapEquivalence.EQUIVALENT);
verify(terminologyService).translate(eq(expectedSourceCodings), eq(CONCEPT_MAP2_URI), eq(true), eq(expectedEquivalences));
verifyNoMoreInteractions(terminologyService);
}
Aggregations