use of org.hl7.fhir.dstu3.model.Condition in project camel by apache.
the class QueryCommandTest method execute.
@Test
public void execute() {
Map<String, AttributeValue> startKey = new HashMap<String, AttributeValue>();
startKey.put("1", new AttributeValue("startKey"));
List<String> attributeNames = Arrays.asList("attrNameOne", "attrNameTwo");
exchange.getIn().setHeader(DdbConstants.ATTRIBUTE_NAMES, attributeNames);
exchange.getIn().setHeader(DdbConstants.CONSISTENT_READ, true);
exchange.getIn().setHeader(DdbConstants.START_KEY, startKey);
exchange.getIn().setHeader(DdbConstants.LIMIT, 10);
exchange.getIn().setHeader(DdbConstants.SCAN_INDEX_FORWARD, true);
Map<String, Condition> keyConditions = new HashMap<String, Condition>();
Condition condition = new Condition().withComparisonOperator(ComparisonOperator.GT.toString()).withAttributeValueList(new AttributeValue().withN("1985"));
keyConditions.put("1", condition);
exchange.getIn().setHeader(DdbConstants.KEY_CONDITIONS, keyConditions);
command.execute();
Map<String, AttributeValue> mapAssert = new HashMap<String, AttributeValue>();
mapAssert.put("1", new AttributeValue("LAST_KEY"));
ConsumedCapacity consumed = (ConsumedCapacity) exchange.getIn().getHeader(DdbConstants.CONSUMED_CAPACITY);
assertEquals(Integer.valueOf(1), exchange.getIn().getHeader(DdbConstants.COUNT, Integer.class));
assertEquals(Double.valueOf(1.0), consumed.getCapacityUnits());
assertEquals(mapAssert, exchange.getIn().getHeader(DdbConstants.LAST_EVALUATED_KEY, Map.class));
assertEquals(keyConditions, exchange.getIn().getHeader(DdbConstants.KEY_CONDITIONS, Map.class));
Map<?, ?> items = (Map<?, ?>) exchange.getIn().getHeader(DdbConstants.ITEMS, List.class).get(0);
assertEquals(new AttributeValue("attrValue"), items.get("attrName"));
}
use of org.hl7.fhir.dstu3.model.Condition in project camel by apache.
the class ScanCommandTest method execute.
@Test
public void execute() {
Map<String, Condition> scanFilter = new HashMap<String, Condition>();
Condition condition = new Condition().withComparisonOperator(ComparisonOperator.GT.toString()).withAttributeValueList(new AttributeValue().withN("1985"));
scanFilter.put("year", condition);
exchange.getIn().setHeader(DdbConstants.SCAN_FILTER, scanFilter);
command.execute();
Map<String, AttributeValue> mapAssert = new HashMap<String, AttributeValue>();
mapAssert.put("1", new AttributeValue("LAST_KEY"));
ConsumedCapacity consumed = (ConsumedCapacity) exchange.getIn().getHeader(DdbConstants.CONSUMED_CAPACITY);
assertEquals(scanFilter, ddbClient.scanRequest.getScanFilter());
assertEquals(Integer.valueOf(10), exchange.getIn().getHeader(DdbConstants.SCANNED_COUNT, Integer.class));
assertEquals(Integer.valueOf(1), exchange.getIn().getHeader(DdbConstants.COUNT, Integer.class));
assertEquals(Double.valueOf(1.0), consumed.getCapacityUnits());
assertEquals(mapAssert, exchange.getIn().getHeader(DdbConstants.LAST_EVALUATED_KEY, Map.class));
Map<?, ?> items = (Map<?, ?>) exchange.getIn().getHeader(DdbConstants.ITEMS, List.class).get(0);
assertEquals(new AttributeValue("attrValue"), items.get("attrName"));
}
use of org.hl7.fhir.dstu3.model.Condition in project bunsen by cerner.
the class FhirEncodersTest method testFromParquet.
@Test
public void testFromParquet() throws IOException {
Path dirPath = Files.createTempDirectory("encoder_test");
String path = dirPath.resolve("out.parquet").toString();
conditionsDataset.write().save(path);
Dataset<Condition> ds = spark.read().parquet(path).as(encoders.of(Condition.class));
Condition readCondition = ds.head();
Assert.assertEquals(condition.getId(), readCondition.getId());
}
use of org.hl7.fhir.dstu3.model.Condition in project bunsen by cerner.
the class ValueSetUdfsTest method setUp.
/**
* Sets up Spark and loads test value sets.
*/
@BeforeClass
public static void setUp() throws IOException {
// Create a local spark session using an in-memory metastore.
// We must also use Hive and set the partition mode to non-strict to
// support dynamic partitions.
spark = SparkSession.builder().master("local[2]").appName("UdfsTest").enableHiveSupport().config("javax.jdo.option.ConnectionURL", "jdbc:derby:memory:metastore_db;create=true").config("hive.exec.dynamic.partition.mode", "nonstrict").config("spark.sql.warehouse.dir", Files.createTempDirectory("spark_warehouse").toString()).getOrCreate();
spark.sql("create database " + ConceptMaps.MAPPING_DATABASE);
Hierarchies withLoinc = Loinc.withLoincHierarchy(spark, Hierarchies.getEmpty(spark), "src/test/resources/LOINC_HIERARCHY_SAMPLE.CSV", "2.56");
Hierarchies withLoincAndSnomed = Snomed.withRelationships(spark, withLoinc, "src/test/resources/SNOMED_RELATIONSHIP_SAMPLE.TXT", "20160901");
ValueSets withGender = ValueSets.getEmpty(spark).withValueSetsFromDirectory("src/test/resources/xml/valuesets");
BroadcastableValueSets valueSets = BroadcastableValueSets.newBuilder().addCode("bp", Loinc.LOINC_CODE_SYSTEM_URI, "8462-4").addCode("albumin", Loinc.LOINC_CODE_SYSTEM_URI, "14959-1").addReference("married", "urn:cerner:bunsen:valueset:married_maritalstatus").addDescendantsOf("leukocytes", Loinc.LOINC_CODE_SYSTEM_URI, "LP14419-3", Loinc.LOINC_HIERARCHY_URI).addDescendantsOf("diabetes", Snomed.SNOMED_CODE_SYSTEM_URI, "73211009", Snomed.SNOMED_HIERARCHY_URI).addDescendantsOf("blood_disorder", Snomed.SNOMED_CODE_SYSTEM_URI, "266992002", Snomed.SNOMED_HIERARCHY_URI).addDescendantsOf("disorder_history", Snomed.SNOMED_CODE_SYSTEM_URI, "312850006", Snomed.SNOMED_HIERARCHY_URI).build(spark, withGender, withLoincAndSnomed);
ValueSetUdfs.pushUdf(spark, valueSets);
Dataset<Observation> loincObservations = spark.createDataset(ImmutableList.of(// "is a" LP14419-3
observation("leukocytes", "5821-4"), // Blood pressure
observation("bp", "8462-4")), encoders.of(Observation.class));
loincObservations.createOrReplaceTempView("test_loinc_obs");
// Conditions include history of anemia, which includes a cycling ancestor
// in our test data. This ensures that can be loaded correctly.
Dataset<Condition> conditions = spark.createDataset(ImmutableList.of(// "is a" 73211009 (diabetes)
condition("diabetes", "44054006"), // 312850006 (history of disorder)
condition("history_of_anemia", "275538002")), encoders.of(Condition.class));
conditions.createOrReplaceTempView("test_snomed_cond");
Dataset<Patient> patients = spark.createDataset(ImmutableList.of(patient("married", "M"), patient("unmarried", "U")), encoders.of(Patient.class));
patients.createOrReplaceTempView("test_valueset_patient");
}
use of org.hl7.fhir.dstu3.model.Condition in project bunsen by cerner.
the class ValueSetUdfsTest method condition.
private static Condition condition(String id, String code) {
Condition condition = new Condition();
// Condition based on example from FHIR:
// https://www.hl7.org/fhir/condition-example.json.html
condition.setId(id);
condition.setCode(codeable(Snomed.SNOMED_CODE_SYSTEM_URI, code));
return condition;
}
Aggregations