use of com.ibm.cohort.cql.spark.aggregation.ContextDefinitions in project quality-measure-and-cohort-service by Alvearie.
the class SparkCqlEvaluatorTest method testReadContextDefinitions.
@Test
public void testReadContextDefinitions() throws Exception {
evaluator.hadoopConfiguration = new SerializableConfiguration(SparkHadoopUtil.get().conf());
ContextDefinitions contextDefinitions = evaluator.readContextDefinitions("src/test/resources/alltypes/metadata/context-definitions.json");
assertNotNull(contextDefinitions);
assertEquals(5, contextDefinitions.getContextDefinitions().size());
assertEquals(3, contextDefinitions.getContextDefinitions().get(0).getRelationships().size());
}
use of com.ibm.cohort.cql.spark.aggregation.ContextDefinitions in project quality-measure-and-cohort-service by Alvearie.
the class SparkSchemaCreatorTest method makeContextDefinitions.
private ContextDefinitions makeContextDefinitions(List<ContextDefinition> definitionList) {
ContextDefinitions definitions = new ContextDefinitions();
definitions.setContextDefinitions(definitionList);
return definitions;
}
use of com.ibm.cohort.cql.spark.aggregation.ContextDefinitions in project quality-measure-and-cohort-service by Alvearie.
the class SparkSchemaCreatorTest method testInvalidKeyColumn.
@Test(expected = IllegalArgumentException.class)
public void testInvalidKeyColumn() throws Exception {
ContextDefinitions contextDefinitions = makeContextDefinitions(Arrays.asList(makeContextDefinition("Context1Id", "Type1", "other")));
CqlEvaluationRequests cqlEvaluationRequests = makeEvaluationRequests(Arrays.asList(makeEvaluationRequest(new CqlLibraryDescriptor().setLibraryId("Context1Id").setVersion("1.0.0"), new HashSet<>(Collections.singletonList("define_boolean")), "Context1Id")));
SparkSchemaCreator schemaCreator = new SparkSchemaCreator(cqlLibraryProvider, cqlEvaluationRequests, contextDefinitions, outputColumnNameFactory, cqlTranslator);
schemaCreator.calculateSchemasForContexts(Arrays.asList("Context1Id"));
}
use of com.ibm.cohort.cql.spark.aggregation.ContextDefinitions in project quality-measure-and-cohort-service by Alvearie.
the class SparkSchemaCreatorTest method singleContextSupportedDefineTypes.
@Test
public void singleContextSupportedDefineTypes() throws Exception {
ContextDefinitions contextDefinitions = makeContextDefinitions(Collections.singletonList(makeContextDefinition("A", "Type1", "id")));
CqlEvaluationRequests cqlEvaluationRequests = makeEvaluationRequests(Arrays.asList(makeEvaluationRequest(new CqlLibraryDescriptor().setLibraryId("Context1Id").setVersion("1.0.0"), new HashSet<>(Arrays.asList("define_integer", "define_boolean", "define_string", "define_decimal")), "A"), makeEvaluationRequest(new CqlLibraryDescriptor().setLibraryId("Context2Id").setVersion("1.0.0"), new HashSet<>(Arrays.asList("define_date", "define_datetime")), "A")));
SparkSchemaCreator schemaCreator = new SparkSchemaCreator(cqlLibraryProvider, cqlEvaluationRequests, contextDefinitions, outputColumnNameFactory, cqlTranslator);
StructType actualSchema = schemaCreator.calculateSchemasForContexts(Arrays.asList("A")).get("A");
StructType expectedSchema = new StructType().add("id", DataTypes.IntegerType, false).add("parameters", DataTypes.StringType, false).add("Context1Id.define_integer", DataTypes.IntegerType, true).add("Context1Id.define_boolean", DataTypes.BooleanType, true).add("Context1Id.define_string", DataTypes.StringType, true).add("Context1Id.define_decimal", DataTypes.createDecimalType(28, 8), true).add("Context2Id.define_date", DataTypes.DateType, true).add("Context2Id.define_datetime", DataTypes.TimestampType, true);
validateSchemas(expectedSchema, actualSchema, "id");
}
use of com.ibm.cohort.cql.spark.aggregation.ContextDefinitions in project quality-measure-and-cohort-service by Alvearie.
the class SparkSchemaCreatorTest method testMultipleContextDefinitionsForContext.
@Test(expected = IllegalArgumentException.class)
public void testMultipleContextDefinitionsForContext() throws Exception {
ContextDefinitions contextDefinitions = makeContextDefinitions(Arrays.asList(makeContextDefinition("Context1Id", "Type1", "id"), makeContextDefinition("Context1Id", "Type1", "id")));
CqlEvaluationRequests cqlEvaluationRequests = makeEvaluationRequests(Arrays.asList(makeEvaluationRequest(new CqlLibraryDescriptor().setLibraryId("Context1Id").setVersion("1.0.0"), new HashSet<>(Collections.singletonList("define_boolean")), "Context1Id")));
SparkSchemaCreator schemaCreator = new SparkSchemaCreator(cqlLibraryProvider, cqlEvaluationRequests, contextDefinitions, outputColumnNameFactory, cqlTranslator);
schemaCreator.calculateSchemasForContexts(Arrays.asList("Context1Id"));
}
Aggregations