use of com.sequenceiq.cloudbreak.template.TemplatePreparationObject in project cloudbreak by hortonworks.
the class SchemaRegistryJarStorageConfigProviderTest method testLocalStorageIsChosenWhenSingleSchemaRegistryInstance.
@Test
void testLocalStorageIsChosenWhenSingleSchemaRegistryInstance() {
cdhMainVersionIs("7.1.0");
TemplatePreparationObject tpo = getTemplatePreparationObject(1);
HostgroupView hostGroup = tpo.getHostGroupsWithComponent(SCHEMA_REGISTRY_SERVER).findFirst().get();
assertEquals(List.of(config(CONFIG_JAR_STORAGE_DIRECTORY_PATH, "/hadoopfs/root1/schema_registry")), subject.getRoleConfigs(SCHEMA_REGISTRY_SERVER, hostGroup, tpo));
}
use of com.sequenceiq.cloudbreak.template.TemplatePreparationObject in project cloudbreak by hortonworks.
the class SchemaRegistryJarStorageConfigProviderTest method getTemplatePreparationObject.
private TemplatePreparationObject getTemplatePreparationObject(HostgroupView hostGroup) {
String inputJson = FileReaderUtils.readFileFromClasspathQuietly("input/kafka.bp");
TemplatePreparationObject preparationObject = TemplatePreparationObject.Builder.builder().withHostgroupViews(Set.of(hostGroup)).withBlueprintView(new BlueprintView(inputJson, "CDP", "1.0", new CmTemplateProcessor(inputJson))).build();
return preparationObject;
}
use of com.sequenceiq.cloudbreak.template.TemplatePreparationObject in project cloudbreak by hortonworks.
the class SchemaRegistryServiceConfigProviderTest method testGetSchemaRegistryRoleConfigs710.
@Test
public void testGetSchemaRegistryRoleConfigs710() {
String inputJson = loadBlueprint("7.1.0");
CmTemplateProcessor cmTemplateProcessor = new CmTemplateProcessor(inputJson);
TemplatePreparationObject preparationObject = getTemplatePreparationObject(cmTemplateProcessor);
List<ApiClusterTemplateConfig> roleConfigs = underTest.getRoleConfigs(SchemaRegistryRoles.SCHEMA_REGISTRY_SERVER, preparationObject);
assertThat(roleConfigs).hasSameElementsAs(List.of(config("schema.registry.storage.connector.connectURI", "jdbc:postgresql://testhost:5432/schema_registry"), config("schema.registry.storage.connector.user", "schema_registry_server"), config("schema.registry.storage.connector.password", "schema_registry_server_password")));
}
use of com.sequenceiq.cloudbreak.template.TemplatePreparationObject in project cloudbreak by hortonworks.
the class StreamsMessagingManagerServiceConfigProviderTest method testGetStreamsMessagingManagerServerConfigs.
@Test
public void testGetStreamsMessagingManagerServerConfigs() {
String inputJson = getBlueprintText("input/cdp-streaming.bp").replace("__CDH_VERSION__", "7.2.0");
CmTemplateProcessor cmTemplateProcessor = new CmTemplateProcessor(inputJson);
TemplatePreparationObject preparationObject = getTemplatePreparationObject(null, false, cmTemplateProcessor);
List<ApiClusterTemplateConfig> serviceConfigs = underTest.getServiceConfigs(cmTemplateProcessor, preparationObject);
assertThat(serviceConfigs).hasSameElementsAs(List.of(config(DATABASE_TYPE, "postgresql"), config(DATABASE_NAME, "smm"), config(DATABASE_HOST, "testhost"), config(DATABASE_PORT, "5432"), config(DATABASE_USER, "smm_server"), config(DATABASE_PASSWORD, "smm_server_db_password")));
}
use of com.sequenceiq.cloudbreak.template.TemplatePreparationObject in project cloudbreak by hortonworks.
the class Spark3OnYarnRoleConfigProviderTest method validateClientConfig.
protected void validateClientConfig(String hmsExternalDirLocation, String clientConfigDirLocation) {
TemplatePreparationObject preparationObject = getTemplatePreparationObject(hmsExternalDirLocation);
String inputJson = getBlueprintText("input/clouderamanager-ds.bp");
CmTemplateProcessor cmTemplateProcessor = new CmTemplateProcessor(inputJson);
Map<String, List<ApiClusterTemplateConfig>> roleConfigs = underTest.getRoleConfigs(cmTemplateProcessor, preparationObject);
List<ApiClusterTemplateConfig> sparkOnYarnConfigs = roleConfigs.get("spark3_on_yarn-GATEWAY-BASE");
assertEquals(1, sparkOnYarnConfigs.size());
assertEquals("spark3-conf/spark-defaults.conf_client_config_safety_valve", sparkOnYarnConfigs.get(0).getName());
assertEquals("spark.kerberos.access.hadoopFileSystems=" + clientConfigDirLocation, sparkOnYarnConfigs.get(0).getValue());
}
Aggregations