Search in sources :

Example 11 with Condition

use of org.hl7.fhir.dstu3.model.Condition in project camel by apache.

the class QueryCommandTest method execute.

@Test
public void execute() {
    Map<String, AttributeValue> startKey = new HashMap<String, AttributeValue>();
    startKey.put("1", new AttributeValue("startKey"));
    List<String> attributeNames = Arrays.asList("attrNameOne", "attrNameTwo");
    exchange.getIn().setHeader(DdbConstants.ATTRIBUTE_NAMES, attributeNames);
    exchange.getIn().setHeader(DdbConstants.CONSISTENT_READ, true);
    exchange.getIn().setHeader(DdbConstants.START_KEY, startKey);
    exchange.getIn().setHeader(DdbConstants.LIMIT, 10);
    exchange.getIn().setHeader(DdbConstants.SCAN_INDEX_FORWARD, true);
    Map<String, Condition> keyConditions = new HashMap<String, Condition>();
    Condition condition = new Condition().withComparisonOperator(ComparisonOperator.GT.toString()).withAttributeValueList(new AttributeValue().withN("1985"));
    keyConditions.put("1", condition);
    exchange.getIn().setHeader(DdbConstants.KEY_CONDITIONS, keyConditions);
    command.execute();
    Map<String, AttributeValue> mapAssert = new HashMap<String, AttributeValue>();
    mapAssert.put("1", new AttributeValue("LAST_KEY"));
    ConsumedCapacity consumed = (ConsumedCapacity) exchange.getIn().getHeader(DdbConstants.CONSUMED_CAPACITY);
    assertEquals(Integer.valueOf(1), exchange.getIn().getHeader(DdbConstants.COUNT, Integer.class));
    assertEquals(Double.valueOf(1.0), consumed.getCapacityUnits());
    assertEquals(mapAssert, exchange.getIn().getHeader(DdbConstants.LAST_EVALUATED_KEY, Map.class));
    assertEquals(keyConditions, exchange.getIn().getHeader(DdbConstants.KEY_CONDITIONS, Map.class));
    Map<?, ?> items = (Map<?, ?>) exchange.getIn().getHeader(DdbConstants.ITEMS, List.class).get(0);
    assertEquals(new AttributeValue("attrValue"), items.get("attrName"));
}
Also used : Condition(com.amazonaws.services.dynamodbv2.model.Condition) AttributeValue(com.amazonaws.services.dynamodbv2.model.AttributeValue) HashMap(java.util.HashMap) HashMap(java.util.HashMap) Map(java.util.Map) ConsumedCapacity(com.amazonaws.services.dynamodbv2.model.ConsumedCapacity) Test(org.junit.Test)

Example 12 with Condition

use of org.hl7.fhir.dstu3.model.Condition in project camel by apache.

the class ScanCommandTest method execute.

@Test
public void execute() {
    Map<String, Condition> scanFilter = new HashMap<String, Condition>();
    Condition condition = new Condition().withComparisonOperator(ComparisonOperator.GT.toString()).withAttributeValueList(new AttributeValue().withN("1985"));
    scanFilter.put("year", condition);
    exchange.getIn().setHeader(DdbConstants.SCAN_FILTER, scanFilter);
    command.execute();
    Map<String, AttributeValue> mapAssert = new HashMap<String, AttributeValue>();
    mapAssert.put("1", new AttributeValue("LAST_KEY"));
    ConsumedCapacity consumed = (ConsumedCapacity) exchange.getIn().getHeader(DdbConstants.CONSUMED_CAPACITY);
    assertEquals(scanFilter, ddbClient.scanRequest.getScanFilter());
    assertEquals(Integer.valueOf(10), exchange.getIn().getHeader(DdbConstants.SCANNED_COUNT, Integer.class));
    assertEquals(Integer.valueOf(1), exchange.getIn().getHeader(DdbConstants.COUNT, Integer.class));
    assertEquals(Double.valueOf(1.0), consumed.getCapacityUnits());
    assertEquals(mapAssert, exchange.getIn().getHeader(DdbConstants.LAST_EVALUATED_KEY, Map.class));
    Map<?, ?> items = (Map<?, ?>) exchange.getIn().getHeader(DdbConstants.ITEMS, List.class).get(0);
    assertEquals(new AttributeValue("attrValue"), items.get("attrName"));
}
Also used : Condition(com.amazonaws.services.dynamodbv2.model.Condition) AttributeValue(com.amazonaws.services.dynamodbv2.model.AttributeValue) HashMap(java.util.HashMap) HashMap(java.util.HashMap) Map(java.util.Map) ConsumedCapacity(com.amazonaws.services.dynamodbv2.model.ConsumedCapacity) Test(org.junit.Test)

Example 13 with Condition

use of org.hl7.fhir.dstu3.model.Condition in project bunsen by cerner.

the class FhirEncodersTest method testFromParquet.

@Test
public void testFromParquet() throws IOException {
    Path dirPath = Files.createTempDirectory("encoder_test");
    String path = dirPath.resolve("out.parquet").toString();
    conditionsDataset.write().save(path);
    Dataset<Condition> ds = spark.read().parquet(path).as(encoders.of(Condition.class));
    Condition readCondition = ds.head();
    Assert.assertEquals(condition.getId(), readCondition.getId());
}
Also used : Path(java.nio.file.Path) Condition(org.hl7.fhir.dstu3.model.Condition) Test(org.junit.Test)

Example 14 with Condition

use of org.hl7.fhir.dstu3.model.Condition in project bunsen by cerner.

the class ValueSetUdfsTest method setUp.

/**
 * Sets up Spark and loads test value sets.
 */
@BeforeClass
public static void setUp() throws IOException {
    // Create a local spark session using an in-memory metastore.
    // We must also use Hive and set the partition mode to non-strict to
    // support dynamic partitions.
    spark = SparkSession.builder().master("local[2]").appName("UdfsTest").enableHiveSupport().config("javax.jdo.option.ConnectionURL", "jdbc:derby:memory:metastore_db;create=true").config("hive.exec.dynamic.partition.mode", "nonstrict").config("spark.sql.warehouse.dir", Files.createTempDirectory("spark_warehouse").toString()).getOrCreate();
    spark.sql("create database " + ConceptMaps.MAPPING_DATABASE);
    Hierarchies withLoinc = Loinc.withLoincHierarchy(spark, Hierarchies.getEmpty(spark), "src/test/resources/LOINC_HIERARCHY_SAMPLE.CSV", "2.56");
    Hierarchies withLoincAndSnomed = Snomed.withRelationships(spark, withLoinc, "src/test/resources/SNOMED_RELATIONSHIP_SAMPLE.TXT", "20160901");
    ValueSets withGender = ValueSets.getEmpty(spark).withValueSetsFromDirectory("src/test/resources/xml/valuesets");
    BroadcastableValueSets valueSets = BroadcastableValueSets.newBuilder().addCode("bp", Loinc.LOINC_CODE_SYSTEM_URI, "8462-4").addCode("albumin", Loinc.LOINC_CODE_SYSTEM_URI, "14959-1").addReference("married", "urn:cerner:bunsen:valueset:married_maritalstatus").addDescendantsOf("leukocytes", Loinc.LOINC_CODE_SYSTEM_URI, "LP14419-3", Loinc.LOINC_HIERARCHY_URI).addDescendantsOf("diabetes", Snomed.SNOMED_CODE_SYSTEM_URI, "73211009", Snomed.SNOMED_HIERARCHY_URI).addDescendantsOf("blood_disorder", Snomed.SNOMED_CODE_SYSTEM_URI, "266992002", Snomed.SNOMED_HIERARCHY_URI).addDescendantsOf("disorder_history", Snomed.SNOMED_CODE_SYSTEM_URI, "312850006", Snomed.SNOMED_HIERARCHY_URI).build(spark, withGender, withLoincAndSnomed);
    ValueSetUdfs.pushUdf(spark, valueSets);
    Dataset<Observation> loincObservations = spark.createDataset(ImmutableList.of(// "is a" LP14419-3
    observation("leukocytes", "5821-4"), // Blood pressure
    observation("bp", "8462-4")), encoders.of(Observation.class));
    loincObservations.createOrReplaceTempView("test_loinc_obs");
    // Conditions include history of anemia, which includes a cycling ancestor
    // in our test data. This ensures that can be loaded correctly.
    Dataset<Condition> conditions = spark.createDataset(ImmutableList.of(// "is a" 73211009 (diabetes)
    condition("diabetes", "44054006"), // 312850006 (history of disorder)
    condition("history_of_anemia", "275538002")), encoders.of(Condition.class));
    conditions.createOrReplaceTempView("test_snomed_cond");
    Dataset<Patient> patients = spark.createDataset(ImmutableList.of(patient("married", "M"), patient("unmarried", "U")), encoders.of(Patient.class));
    patients.createOrReplaceTempView("test_valueset_patient");
}
Also used : Condition(org.hl7.fhir.dstu3.model.Condition) Hierarchies(com.cerner.bunsen.codes.Hierarchies) Observation(org.hl7.fhir.dstu3.model.Observation) BroadcastableValueSets(com.cerner.bunsen.codes.broadcast.BroadcastableValueSets) Patient(org.hl7.fhir.dstu3.model.Patient) BroadcastableValueSets(com.cerner.bunsen.codes.broadcast.BroadcastableValueSets) ValueSets(com.cerner.bunsen.codes.ValueSets) BeforeClass(org.junit.BeforeClass)

Example 15 with Condition

use of org.hl7.fhir.dstu3.model.Condition in project bunsen by cerner.

the class ValueSetUdfsTest method condition.

private static Condition condition(String id, String code) {
    Condition condition = new Condition();
    // Condition based on example from FHIR:
    // https://www.hl7.org/fhir/condition-example.json.html
    condition.setId(id);
    condition.setCode(codeable(Snomed.SNOMED_CODE_SYSTEM_URI, code));
    return condition;
}
Also used : Condition(org.hl7.fhir.dstu3.model.Condition)

Aggregations

Condition (com.amazonaws.services.dynamodbv2.model.Condition)16 AttributeValue (com.amazonaws.services.dynamodbv2.model.AttributeValue)14 HashMap (java.util.HashMap)9 Condition (org.hl7.fhir.dstu3.model.Condition)7 Test (org.junit.Test)7 QueryRequest (com.amazonaws.services.dynamodbv2.model.QueryRequest)6 QueryResult (com.amazonaws.services.dynamodbv2.model.QueryResult)6 Map (java.util.Map)5 DynamoDBScanExpression (com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBScanExpression)2 ComparisonOperator (com.amazonaws.services.dynamodbv2.model.ComparisonOperator)2 ConsumedCapacity (com.amazonaws.services.dynamodbv2.model.ConsumedCapacity)2 SimpleDateFormat (java.text.SimpleDateFormat)2 Date (java.util.Date)2 JavaSparkContext (org.apache.spark.api.java.JavaSparkContext)2 IdDt (ca.uhn.fhir.model.primitive.IdDt)1 MethodOutcome (ca.uhn.fhir.rest.api.MethodOutcome)1 AmazonClientException (com.amazonaws.AmazonClientException)1 AmazonServiceException (com.amazonaws.AmazonServiceException)1 ScanRequest (com.amazonaws.services.dynamodbv2.model.ScanRequest)1 ScanResult (com.amazonaws.services.dynamodbv2.model.ScanResult)1