use of gov.cms.bfd.model.rif.SkippedRifRecord in project beneficiary-fhir-data by CMSgov.
the class BeneficiaryTransformerV2Test method setup.
@BeforeEach
public void setup() {
List<Object> parsedRecords = ServerTestUtils.parseData(Arrays.asList(StaticRifResourceGroup.SAMPLE_A.getResources()));
// Pull out the base Beneficiary record and fix its HICN and MBI-HASH fields.
beneficiary = parsedRecords.stream().filter(r -> r instanceof Beneficiary).map(r -> (Beneficiary) r).findFirst().get();
beneficiary.getSkippedRifRecords().add(new SkippedRifRecord());
beneficiary.setLastUpdated(Instant.now());
beneficiary.setMbiHash(Optional.of("someMBIhash"));
// Add the history records to the Beneficiary, but nill out the HICN fields.
Set<BeneficiaryHistory> beneficiaryHistories = parsedRecords.stream().filter(r -> r instanceof BeneficiaryHistory).map(r -> (BeneficiaryHistory) r).filter(r -> beneficiary.getBeneficiaryId().equals(r.getBeneficiaryId())).collect(Collectors.toSet());
beneficiary.getBeneficiaryHistories().addAll(beneficiaryHistories);
// Add the MBI history records to the Beneficiary.
Set<MedicareBeneficiaryIdHistory> beneficiaryMbis = parsedRecords.stream().filter(r -> r instanceof MedicareBeneficiaryIdHistory).map(r -> (MedicareBeneficiaryIdHistory) r).filter(r -> beneficiary.getBeneficiaryId().equals(r.getBeneficiaryId().orElse(null))).collect(Collectors.toSet());
beneficiary.getMedicareBeneficiaryIdHistories().addAll(beneficiaryMbis);
assertThat(beneficiary, is(notNullValue()));
createPatient(RequestHeaders.getHeaderWrapper());
}
use of gov.cms.bfd.model.rif.SkippedRifRecord in project beneficiary-fhir-data by CMSgov.
the class RifLoader method process.
/**
* @param recordsBatch the {@link RifRecordEvent}s to process
* @param loadedFileBuilder the builder for the {@LoadedFile} associated with this batch
* @param postgresBatch the {@link PostgreSqlCopyInserter} for the current set of {@link
* RifFilesEvent}s being processed
* @return the {@link RifRecordLoadResult}s that model the results of the operation
*/
private List<RifRecordLoadResult> process(List<RifRecordEvent<?>> recordsBatch, long loadedFileId, PostgreSqlCopyInserter postgresBatch) {
RifFileEvent fileEvent = recordsBatch.get(0).getFileEvent();
MetricRegistry fileEventMetrics = fileEvent.getEventMetrics();
RifFileType rifFileType = fileEvent.getFile().getFileType();
if (rifFileType == RifFileType.BENEFICIARY_HISTORY) {
for (RifRecordEvent<?> rifRecordEvent : recordsBatch) {
hashBeneficiaryHistoryHicn(rifRecordEvent);
hashBeneficiaryHistoryMbi(rifRecordEvent);
}
}
// Only one of each failure/success Timer.Contexts will be applied.
Timer.Context timerBatchSuccess = appState.getMetrics().timer(MetricRegistry.name(getClass().getSimpleName(), "recordBatches")).time();
Timer.Context timerBatchTypeSuccess = fileEventMetrics.timer(MetricRegistry.name(getClass().getSimpleName(), "recordBatches", rifFileType.name())).time();
Timer.Context timerBundleFailure = appState.getMetrics().timer(MetricRegistry.name(getClass().getSimpleName(), "recordBatches", "failed")).time();
EntityManager entityManager = null;
EntityTransaction txn = null;
// TODO: refactor the following to be less of an indented mess
try {
entityManager = appState.getEntityManagerFactory().createEntityManager();
txn = entityManager.getTransaction();
txn.begin();
List<RifRecordLoadResult> loadResults = new ArrayList<>(recordsBatch.size());
/*
* Dev Note: All timestamps of records in the batch and the LoadedBatch must be the same for data consistency.
* The timestamp from the LoadedBatchBuilder is used.
*/
LoadedBatchBuilder loadedBatchBuilder = new LoadedBatchBuilder(loadedFileId, recordsBatch.size());
for (RifRecordEvent<?> rifRecordEvent : recordsBatch) {
RecordAction recordAction = rifRecordEvent.getRecordAction();
RifRecordBase record = rifRecordEvent.getRecord();
LOGGER.trace("Loading '{}' record.", rifFileType);
// Set lastUpdated to the same value for the whole batch
record.setLastUpdated(Optional.of(loadedBatchBuilder.getTimestamp()));
// Associate the beneficiary with this file loaded
loadedBatchBuilder.associateBeneficiary(rifRecordEvent.getBeneficiaryId());
LoadStrategy strategy = selectStrategy(recordAction);
LoadAction loadAction;
if (strategy == LoadStrategy.INSERT_IDEMPOTENT) {
// Check to see if record already exists.
Timer.Context timerIdempotencyQuery = fileEventMetrics.timer(MetricRegistry.name(getClass().getSimpleName(), "idempotencyQueries")).time();
Object recordId = appState.getEntityManagerFactory().getPersistenceUnitUtil().getIdentifier(record);
Objects.requireNonNull(recordId);
Object recordInDb = entityManager.find(record.getClass(), recordId);
timerIdempotencyQuery.close();
// Log if we have a non-2022 enrollment year INSERT
if (isBackdatedBene(rifRecordEvent)) {
Beneficiary bene = (Beneficiary) rifRecordEvent.getRecord();
LOGGER.info("Inserted beneficiary with non-2022 enrollment year (beneficiaryId={})", bene.getBeneficiaryId());
}
if (recordInDb == null) {
loadAction = LoadAction.INSERTED;
tweakIfBeneficiary(entityManager, loadedBatchBuilder, rifRecordEvent);
entityManager.persist(record);
// FIXME Object recordInDbAfterUpdate = entityManager.find(record.getClass(), recordId);
} else {
loadAction = LoadAction.DID_NOTHING;
}
} else if (strategy == LoadStrategy.INSERT_UPDATE_NON_IDEMPOTENT) {
if (rifRecordEvent.getRecordAction().equals(RecordAction.INSERT)) {
loadAction = LoadAction.INSERTED;
// Log if we have a non-2022 enrollment year INSERT
if (isBackdatedBene(rifRecordEvent)) {
Beneficiary bene = (Beneficiary) rifRecordEvent.getRecord();
LOGGER.info("Inserted beneficiary with non-2022 enrollment year (beneficiaryId={})", bene.getBeneficiaryId());
}
tweakIfBeneficiary(entityManager, loadedBatchBuilder, rifRecordEvent);
entityManager.persist(record);
} else if (rifRecordEvent.getRecordAction().equals(RecordAction.UPDATE)) {
loadAction = LoadAction.UPDATED;
// Skip this record if the year is not 2022 and its an update.
if (isBackdatedBene(rifRecordEvent)) {
/*
* Serialize the record's CSV data back to actual RIF/CSV, as that's how we'll store
* it in the DB.
*/
StringBuffer rifData = new StringBuffer();
try (CSVPrinter csvPrinter = new CSVPrinter(rifData, RifParsingUtils.CSV_FORMAT)) {
for (CSVRecord csvRow : rifRecordEvent.getRawCsvRecords()) {
csvPrinter.printRecord(csvRow);
}
}
// Save the skipped record to the DB.
SkippedRifRecord skippedRifRecord = new SkippedRifRecord(rifRecordEvent.getFileEvent().getParentFilesEvent().getTimestamp(), SkipReasonCode.DELAYED_BACKDATED_ENROLLMENT_BFD_1566, rifRecordEvent.getFileEvent().getFile().getFileType().name(), rifRecordEvent.getRecordAction(), ((Beneficiary) record).getBeneficiaryId(), rifData.toString());
entityManager.persist(skippedRifRecord);
LOGGER.info("Skipped RIF record, due to '{}'.", skippedRifRecord.getSkipReason());
} else {
tweakIfBeneficiary(entityManager, loadedBatchBuilder, rifRecordEvent);
entityManager.merge(record);
}
} else {
throw new BadCodeMonkeyException(String.format("Unhandled %s: '%s'.", RecordAction.class, rifRecordEvent.getRecordAction()));
}
} else
throw new BadCodeMonkeyException();
LOGGER.trace("Loaded '{}' record.", rifFileType);
fileEventMetrics.meter(MetricRegistry.name(getClass().getSimpleName(), "records", loadAction.name())).mark(1);
loadResults.add(new RifRecordLoadResult(rifRecordEvent, loadAction));
}
LoadedBatch loadedBatch = loadedBatchBuilder.build();
entityManager.persist(loadedBatch);
txn.commit();
// Update the metrics now that things have been pushed.
timerBatchSuccess.stop();
timerBatchTypeSuccess.stop();
return loadResults;
} catch (Throwable t) {
timerBundleFailure.stop();
fileEventMetrics.meter(MetricRegistry.name(getClass().getSimpleName(), "recordBatches", "failed")).mark(1);
LOGGER.warn("Failed to load '{}' record.", rifFileType, t);
throw new RifLoadFailure(recordsBatch, t);
} finally {
/*
* Some errors (e.g. HSQL constraint violations) seem to cause the
* rollback to fail. Extra error handling is needed here, too, to
* ensure that the failing data is captured.
*/
try {
if (txn != null && txn.isActive())
txn.rollback();
} catch (Throwable t) {
timerBundleFailure.stop();
fileEventMetrics.meter(MetricRegistry.name(getClass().getSimpleName(), "recordBatches", "failed")).mark(1);
LOGGER.warn("Failed to load '{}' record.", rifFileType, t);
throw new RifLoadFailure(recordsBatch, t);
}
if (entityManager != null)
entityManager.close();
}
}
use of gov.cms.bfd.model.rif.SkippedRifRecord in project beneficiary-fhir-data by CMSgov.
the class BeneficiaryTransformerTest method transformSampleARecordWithSkippedRecord.
/**
* Verifies that {@link
* gov.cms.bfd.server.war.stu3.providers.BeneficiaryTransformer#transform(Beneficiary)} works as
* expected when run against the {@link StaticRifResource#SAMPLE_A_BENES} {@link Beneficiary},
* when there is a matching {@link SkippedRifRecord} for the {@link Beneficiary}.
*/
@Test
public void transformSampleARecordWithSkippedRecord() {
Beneficiary beneficiary = loadSampleABeneficiary();
beneficiary.getSkippedRifRecords().add(new SkippedRifRecord());
RequestHeaders requestHeader = getRHwithIncldIdntityHdr("false");
Patient patient = BeneficiaryTransformer.transform(new MetricRegistry(), beneficiary, requestHeader);
assertEquals(1, patient.getMeta().getTag().size());
TransformerTestUtils.assertCodingEquals(TransformerConstants.CODING_SYSTEM_BFD_TAGS, TransformerConstants.CODING_BFD_TAGS_DELAYED_BACKDATED_ENROLLMENT, patient.getMeta().getTag().get(0));
}
Aggregations