use of au.csiro.pathling.fhirpath.FhirPath in project pathling by aehrc.
the class EmptyFunction method invoke.
@Nonnull
@Override
public FhirPath invoke(@Nonnull final NamedFunctionInput input) {
checkNoArguments(NAME, input);
final NonLiteralPath inputPath = input.getInput();
final String expression = expressionFromInput(input, NAME);
// We use the count function to determine whether there are zero items in the input collection.
final FhirPath countResult = new CountFunction().invoke(input);
final Dataset<Row> dataset = countResult.getDataset();
final Column valueColumn = countResult.getValueColumn().equalTo(0);
return ElementPath.build(expression, dataset, inputPath.getIdColumn(), Optional.empty(), valueColumn, true, Optional.empty(), inputPath.getThisColumn(), FHIRDefinedType.BOOLEAN);
}
use of au.csiro.pathling.fhirpath.FhirPath in project pathling by aehrc.
the class ExtensionFunction method invoke.
@Nonnull
@Override
public FhirPath invoke(@Nonnull final NamedFunctionInput input) {
final String expression = NamedFunction.expressionFromInput(input, NAME);
checkUserInput(input.getArguments().size() == 1, "extension function must have one argument: " + expression);
final FhirPath urlArgument = input.getArguments().get(0);
checkUserInput(urlArgument instanceof StringLiteralPath, "extension function must have argument of type String literal: " + expression);
final NonLiteralPath inputPath = input.getInput();
final ElementPath extensionPath = new PathTraversalOperator().invoke(new PathTraversalInput(input.getContext(), inputPath, ExtensionSupport.EXTENSION_ELEMENT_NAME()));
// Now we need to create a correct argument context for the `where` call.
final ParserContext argumentContext = input.getContext();
final FhirPath extensionUrlPath = new PathTraversalOperator().invoke(new PathTraversalInput(argumentContext, extensionPath.toThisPath(), "url"));
final FhirPath extensionUrCondition = new ComparisonOperator(ComparisonOperation.EQUALS).invoke(new OperatorInput(argumentContext, extensionUrlPath, urlArgument));
// Override the expression in the function input.
return new WhereFunction().invoke(new NamedFunctionInput(input.getContext(), extensionPath, Collections.singletonList(extensionUrCondition), expression));
}
use of au.csiro.pathling.fhirpath.FhirPath in project pathling by aehrc.
the class CountFunctionTest method doesNotCountNullElements.
@Test
void doesNotCountNullElements() {
final Dataset<Row> dataset = new DatasetBuilder(spark).withIdColumn().withColumn("gender", DataTypes.StringType).withRow("patient-1", "female").withRow("patient-2", null).withRow("patient-3", "male").build();
final ElementPath inputPath = new ElementPathBuilder(spark).expression("gender").fhirType(FHIRDefinedType.CODE).dataset(dataset).idAndValueColumns().build();
final ParserContext parserContext = new ParserContextBuilder(spark, fhirContext).idColumn(inputPath.getIdColumn()).groupingColumns(Collections.emptyList()).build();
final NamedFunctionInput countInput = new NamedFunctionInput(parserContext, inputPath, Collections.emptyList());
final NamedFunction count = NamedFunction.getInstance("count");
final FhirPath result = count.invoke(countInput);
final Dataset<Row> expectedDataset = new DatasetBuilder(spark).withIdColumn().withColumn(DataTypes.LongType).withRow("patient-1", 2L).build();
assertThat(result).hasExpression("gender.count()").isSingular().isElementPath(IntegerPath.class).hasFhirType(FHIRDefinedType.UNSIGNEDINT).selectOrderedResult().hasRows(expectedDataset);
}
use of au.csiro.pathling.fhirpath.FhirPath in project pathling by aehrc.
the class CountFunctionTest method countsByResourceIdentity.
@Test
void countsByResourceIdentity() {
final Dataset<Row> patientDataset = new ResourceDatasetBuilder(spark).withIdColumn().withColumn("gender", DataTypes.StringType).withColumn("active", DataTypes.BooleanType).withRow("patient-1", "female", true).withRow("patient-2", "female", false).withRow("patient-3", "male", true).build();
when(database.read(ResourceType.PATIENT)).thenReturn(patientDataset);
final ResourcePath inputPath = ResourcePath.build(fhirContext, database, ResourceType.PATIENT, "Patient", false);
final ParserContext parserContext = new ParserContextBuilder(spark, fhirContext).idColumn(inputPath.getIdColumn()).groupingColumns(Collections.singletonList(inputPath.getIdColumn())).inputExpression("Patient").build();
final NamedFunctionInput countInput = new NamedFunctionInput(parserContext, inputPath, Collections.emptyList());
final NamedFunction count = NamedFunction.getInstance("count");
final FhirPath result = count.invoke(countInput);
final Dataset<Row> expectedDataset = new DatasetBuilder(spark).withIdColumn().withColumn(DataTypes.LongType).withRow("patient-1", 1L).withRow("patient-2", 1L).withRow("patient-3", 1L).build();
assertThat(result).hasExpression("count()").isSingular().isElementPath(IntegerPath.class).hasFhirType(FHIRDefinedType.UNSIGNEDINT).selectOrderedResult().hasRows(expectedDataset);
}
use of au.csiro.pathling.fhirpath.FhirPath in project pathling by aehrc.
the class CountFunctionTest method countsByGrouping.
@Test
void countsByGrouping() {
final Dataset<Row> inputDataset = new ResourceDatasetBuilder(spark).withIdColumn().withColumn("gender", DataTypes.StringType).withColumn("active", DataTypes.BooleanType).withRow("patient-1", "female", true).withRow("patient-2", "female", false).withRow("patient-2", "male", true).build();
when(database.read(ResourceType.PATIENT)).thenReturn(inputDataset);
final ResourcePath inputPath = new ResourcePathBuilder(spark).database(database).resourceType(ResourceType.PATIENT).expression("Patient").build();
final Column groupingColumn = inputPath.getElementColumn("gender");
final ParserContext parserContext = new ParserContextBuilder(spark, fhirContext).groupingColumns(Collections.singletonList(groupingColumn)).inputExpression("Patient").build();
final NamedFunctionInput countInput = new NamedFunctionInput(parserContext, inputPath, Collections.emptyList());
final NamedFunction count = NamedFunction.getInstance("count");
final FhirPath result = count.invoke(countInput);
final Dataset<Row> expectedDataset = new DatasetBuilder(spark).withColumn(DataTypes.StringType).withColumn(DataTypes.LongType).withRow("female", 2L).withRow("male", 1L).build();
assertThat(result).hasExpression("count()").isSingular().isElementPath(IntegerPath.class).hasFhirType(FHIRDefinedType.UNSIGNEDINT).selectGroupingResult(Collections.singletonList(groupingColumn)).hasRows(expectedDataset);
}
Aggregations