use of gov.cms.bfd.pipeline.sharedutils.DatabaseOptions in project beneficiary-fhir-data by CMSgov.
the class RdaPipelineTestUtils method runTestWithTemporaryDb.
/**
* Creates a temporary in-memory HSQLDB that is destroyed when the test ends plus a
* PipelineApplicationState and EntityManager using that db, passes them to the provided lambda
* function, then closes them and destroys the database.
*
* @param testClass used to create a db name
* @param clock used for the app state
* @param test lambda to receive the appState and perform some testing
*/
public static void runTestWithTemporaryDb(Class<?> testClass, Clock clock, DatabaseConsumer test) throws Exception {
final String dbUrl = "jdbc:hsqldb:mem:" + testClass.getSimpleName();
// the HSQLDB database will be destroyed when this connection is closed
try (Connection dbLifetimeConnection = DriverManager.getConnection(dbUrl + ";shutdown=true", "", "")) {
final DatabaseOptions dbOptions = new DatabaseOptions(dbUrl, "", "", 10);
final MetricRegistry appMetrics = new MetricRegistry();
final HikariDataSource dataSource = PipelineApplicationState.createPooledDataSource(dbOptions, appMetrics);
DatabaseSchemaManager.createOrUpdateSchema(dataSource);
try (PipelineApplicationState appState = new PipelineApplicationState(appMetrics, dataSource, RDA_PERSISTENCE_UNIT_NAME, clock)) {
final EntityManager entityManager = appState.getEntityManagerFactory().createEntityManager();
try {
test.accept(appState, entityManager);
} finally {
entityManager.close();
}
}
}
}
use of gov.cms.bfd.pipeline.sharedutils.DatabaseOptions in project beneficiary-fhir-data by CMSgov.
the class DirectRdaLoadApp method main.
public static void main(String[] args) throws Exception {
if (args.length != 2) {
System.err.printf("usage: %s configfile claimType%n", DirectRdaLoadApp.class.getSimpleName());
System.exit(1);
}
final ConfigLoader options = ConfigLoader.builder().addPropertiesFile(new File(args[0])).addSystemProperties().build();
final String claimType = Strings.nullToEmpty(args[1]);
final MetricRegistry metrics = new MetricRegistry();
final Slf4jReporter reporter = Slf4jReporter.forRegistry(metrics).outputTo(LoggerFactory.getLogger(Logger.ROOT_LOGGER_NAME)).convertRatesTo(TimeUnit.SECONDS).convertDurationsTo(TimeUnit.MILLISECONDS).build();
reporter.start(5, TimeUnit.SECONDS);
final RdaLoadOptions jobConfig = readRdaLoadOptionsFromProperties(options);
final DatabaseOptions databaseConfig = readDatabaseOptions(options, jobConfig.getJobConfig().getWriteThreads());
HikariDataSource pooledDataSource = PipelineApplicationState.createPooledDataSource(databaseConfig, metrics);
System.out.printf("thread count is %d%n", jobConfig.getJobConfig().getWriteThreads());
System.out.printf("database pool size %d%n", pooledDataSource.getMaximumPoolSize());
DatabaseSchemaManager.createOrUpdateSchema(pooledDataSource);
try (PipelineApplicationState appState = new PipelineApplicationState(metrics, pooledDataSource, PipelineApplicationState.RDA_PERSISTENCE_UNIT_NAME, Clock.systemUTC())) {
final Optional<PipelineJob<?>> job = createPipelineJob(jobConfig, appState, claimType);
if (!job.isPresent()) {
System.err.printf("error: invalid claim type: '%s' expected 'fiss' or 'mcs'%n", claimType);
System.exit(1);
}
try {
job.get().call();
} finally {
reporter.report();
}
}
}
use of gov.cms.bfd.pipeline.sharedutils.DatabaseOptions in project beneficiary-fhir-data by CMSgov.
the class LoadRdaJsonApp method main.
public static void main(String[] args) throws Exception {
final ConfigLoader.Builder options = ConfigLoader.builder();
if (args.length == 1) {
options.addPropertiesFile(new File(args[0]));
} else if (System.getProperty("config.properties", "").length() > 0) {
options.addPropertiesFile(new File(System.getProperty("config.properties")));
}
options.addSystemProperties();
final Config config = new Config(options.build());
final MetricRegistry metrics = new MetricRegistry();
final Slf4jReporter reporter = Slf4jReporter.forRegistry(metrics).outputTo(LoggerFactory.getLogger(Logger.ROOT_LOGGER_NAME)).convertRatesTo(TimeUnit.SECONDS).convertDurationsTo(TimeUnit.MILLISECONDS).build();
reporter.start(5, TimeUnit.SECONDS);
try {
logger.info("starting RDA API local server");
RdaServer.LocalConfig.builder().fissSourceFactory(config::createFissClaimsSource).mcsSourceFactory(config::createMcsClaimsSource).build().runWithPortParam(port -> {
final RdaLoadOptions jobConfig = config.createRdaLoadOptions(port);
final DatabaseOptions databaseConfig = config.createDatabaseOptions();
final HikariDataSource pooledDataSource = PipelineApplicationState.createPooledDataSource(databaseConfig, metrics);
if (config.runSchemaMigration) {
logger.info("running database migration");
DatabaseSchemaManager.createOrUpdateSchema(pooledDataSource);
}
try (PipelineApplicationState appState = new PipelineApplicationState(metrics, pooledDataSource, PipelineApplicationState.RDA_PERSISTENCE_UNIT_NAME, Clock.systemUTC())) {
final List<PipelineJob<?>> jobs = config.createPipelineJobs(jobConfig, appState);
for (PipelineJob<?> job : jobs) {
logger.info("starting job {}", job.getClass().getSimpleName());
job.call();
}
}
});
} finally {
reporter.report();
reporter.close();
}
}
use of gov.cms.bfd.pipeline.sharedutils.DatabaseOptions in project beneficiary-fhir-data by CMSgov.
the class AppConfiguration method readConfigFromEnvironmentVariables.
/**
* Per <code>/dev/design-decisions-readme.md</code>, this application accepts its configuration
* via environment variables. Read those in, and build an {@link AppConfiguration} instance from
* them.
*
* <p>As a convenience, this method will also verify that AWS credentials were provided, such that
* {@link DefaultAWSCredentialsProviderChain} can load them. If not, an {@link
* AppConfigurationException} will be thrown.
*
* @return the {@link AppConfiguration} instance represented by the configuration provided to this
* application via the environment variables
* @throws AppConfigurationException An {@link AppConfigurationException} will be thrown if the
* configuration passed to the application are incomplete or incorrect.
*/
static AppConfiguration readConfigFromEnvironmentVariables() {
int hicnHashIterations = readEnvIntPositiveRequired(ENV_VAR_KEY_HICN_HASH_ITERATIONS);
byte[] hicnHashPepper = readEnvBytesRequired(ENV_VAR_KEY_HICN_HASH_PEPPER);
int hicnHashCacheSize = readEnvIntOptional(ENV_VAR_KEY_HICN_HASH_CACHE_SIZE).orElse(DEFAULT_HICN_HASH_CACHE_SIZE);
String databaseUrl = readEnvStringRequired(ENV_VAR_KEY_DATABASE_URL);
String databaseUsername = readEnvStringRequired(ENV_VAR_KEY_DATABASE_USERNAME);
String databasePassword = readEnvStringRequired(ENV_VAR_KEY_DATABASE_PASSWORD);
int loaderThreads = readEnvIntPositiveRequired(ENV_VAR_KEY_LOADER_THREADS);
boolean idempotencyRequired = readEnvBooleanRequired(ENV_VAR_KEY_IDEMPOTENCY_REQUIRED);
boolean filteringNonNullAndNon2022Benes = readEnvBooleanOptional(ENV_VAR_KEY_RIF_FILTERING_NON_NULL_AND_NON_2022_BENES).orElse(DEFAULT_RIF_FILTERING_NON_NULL_AND_NON_2022_BENES);
Optional<String> newRelicMetricKey = readEnvStringOptional(ENV_VAR_NEW_RELIC_METRIC_KEY);
Optional<String> newRelicAppName = readEnvStringOptional(ENV_VAR_NEW_RELIC_APP_NAME);
Optional<String> newRelicMetricHost = readEnvStringOptional(ENV_VAR_NEW_RELIC_METRIC_HOST);
Optional<String> newRelicMetricPath = readEnvStringOptional(ENV_VAR_NEW_RELIC_METRIC_PATH);
Optional<Integer> newRelicMetricPeriod = readEnvIntOptional(ENV_VAR_NEW_RELIC_METRIC_PERIOD);
/*
* Note: For CcwRifLoadJob, databaseMaxPoolSize needs to be double the number of loader threads
* when idempotent loads are being used. Apparently, the queries need a separate Connection?
*/
Optional<Integer> databaseMaxPoolSize = readEnvIntOptional(ENV_VAR_KEY_DATABASE_MAX_POOL_SIZE);
if (databaseMaxPoolSize.isPresent() && databaseMaxPoolSize.get() < 1)
throw new AppConfigurationException(String.format("Invalid value for configuration environment variable '%s': '%s'", ENV_VAR_KEY_DATABASE_MAX_POOL_SIZE, databaseMaxPoolSize));
if (!databaseMaxPoolSize.isPresent())
databaseMaxPoolSize = Optional.of(loaderThreads * 2);
Optional<String> hostname;
try {
hostname = Optional.of(InetAddress.getLocalHost().getHostName());
} catch (UnknownHostException e) {
hostname = Optional.empty();
}
MetricOptions metricOptions = new MetricOptions(newRelicMetricKey, newRelicAppName, newRelicMetricHost, newRelicMetricPath, newRelicMetricPeriod, hostname);
DatabaseOptions databaseOptions = new DatabaseOptions(databaseUrl, databaseUsername, databasePassword, databaseMaxPoolSize.get());
LoadAppOptions loadOptions = new LoadAppOptions(IdHasher.Config.builder().hashIterations(hicnHashIterations).hashPepper(hicnHashPepper).cacheSize(hicnHashCacheSize).build(), loaderThreads, idempotencyRequired, filteringNonNullAndNon2022Benes);
CcwRifLoadOptions ccwRifLoadOptions = readCcwRifLoadOptionsFromEnvironmentVariables(loadOptions);
RdaLoadOptions rdaLoadOptions = readRdaLoadOptionsFromEnvironmentVariables(loadOptions.getIdHasherConfig());
return new AppConfiguration(metricOptions, databaseOptions, ccwRifLoadOptions, rdaLoadOptions);
}
Aggregations