use of cz.mzk.recordmanager.server.model.HarvestedRecord in project RecordManager2 by moravianlibrary.
the class ImportRecordsJobTest method testSimpleImportMarcXML.
@Test
public void testSimpleImportMarcXML() throws Exception {
Job job = jobRegistry.getJob(Constants.JOB_ID_IMPORT);
Map<String, JobParameter> params = new HashMap<>();
params.put(Constants.JOB_PARAM_CONF_ID, new JobParameter(300L));
params.put(Constants.JOB_PARAM_IN_FILE, new JobParameter(testFileXML1));
params.put(Constants.JOB_PARAM_FORMAT, new JobParameter("xml"));
JobParameters jobParams = new JobParameters(params);
jobLauncher.run(job, jobParams);
HarvestedRecord insertedRecord = harvestedRecordDao.findByIdAndHarvestConfiguration("19790455", 300L);
Assert.assertNotNull(insertedRecord);
}
use of cz.mzk.recordmanager.server.model.HarvestedRecord in project RecordManager2 by moravianlibrary.
the class ZakonyProLidiFulltextWriter method write.
@Override
public void write(List<? extends HarvestedRecordUniqueId> items) throws Exception {
for (HarvestedRecordUniqueId uniqueId : items) {
HarvestedRecord hr = harvestedRecordDao.get(uniqueId);
if (!hr.getFulltextKramerius().isEmpty())
continue;
getNextFulltext(uniqueId.getRecordId());
FulltextKramerius fk = new FulltextKramerius();
String fulltext = reader.next();
if (fulltext.isEmpty()) {
logger.warn("Fulltext from " + FULLTEXT_URL + uniqueId.getRecordId() + " is empty.");
} else {
fk.setFulltext(fulltext.getBytes());
fk.setUuidPage(uniqueId.getRecordId());
fk.setPage("1");
fk.setOrder(1L);
hr.setFulltextKramerius(Collections.singletonList(fk));
hr.setUpdated(new Date());
harvestedRecordDao.persist(hr);
}
client.close();
}
sessionFactory.getCurrentSession().flush();
sessionFactory.getCurrentSession().clear();
}
use of cz.mzk.recordmanager.server.model.HarvestedRecord in project RecordManager2 by moravianlibrary.
the class InspirationImportWriter method write.
@Override
public void write(List<? extends Map<String, List<String>>> items) throws Exception {
for (Map<String, List<String>> map : items) {
for (Entry<String, List<String>> entry : map.entrySet()) {
String inspiration_name = entry.getKey();
logger.info(String.format(TEXT_INFO, inspiration_name, entry.getValue().size()));
// actual list of records with inspiration in db
hrWithInspiration = inspirationDao.fingHrByInspiraion(inspiration_name);
// added inspiration records
int added_ins = 0;
// inspirations in db
int exists_ins = 0;
// hr not in db
int not_exists_rec = 0;
for (String id : entry.getValue()) {
Matcher matcher = PATTERN_ID.matcher(id);
if (matcher.matches()) {
String id_prefix = matcher.group(1);
String record_id = matcher.group(2);
List<ImportConfiguration> confs = confDao.findByIdPrefix(id_prefix);
int counter = 0;
for (ImportConfiguration conf : confs) {
if (conf == null)
continue;
HarvestedRecord hr = hrDao.findByIdAndHarvestConfiguration(record_id, conf);
if (hr == null) {
if (++counter == confs.size())
++not_exists_rec;
continue;
}
if (hrWithInspiration.contains(hr)) {
// inspiration is already in db
hrWithInspiration.remove(hr);
++exists_ins;
} else {
// add inspiration to hr
List<Inspiration> result = hr.getInspiration();
Inspiration newInspiration = new Inspiration(entry.getKey());
newInspiration.setHarvestedRecordId(hr.getId());
result.add(newInspiration);
hr.setInspiration(result);
hr.setUpdated(new Date());
hrDao.persist(hr);
++added_ins;
}
}
}
}
// rest of records - delete inspiration
for (HarvestedRecord hr : hrWithInspiration) {
Inspiration delete = inspirationDao.findByHrIdAndName(hr.getId(), inspiration_name);
List<Inspiration> inspirations = hr.getInspiration();
inspirations.remove(delete);
hr.setInspiration(inspirations);
hr.setUpdated(new Date());
hrDao.persist(hr);
inspirationDao.delete(delete);
}
logger.info(String.format(TEXT_EXISTS, exists_ins));
logger.info(String.format(TEXT_ADD, added_ins));
logger.info(String.format(TEXT_RECORD_NOT_EXIST, not_exists_rec));
logger.info(String.format(TEXT_DELETE, hrWithInspiration.size()));
logger.info(String.format(TEXT_COMPLETE, inspiration_name));
}
}
}
use of cz.mzk.recordmanager.server.model.HarvestedRecord in project RecordManager2 by moravianlibrary.
the class IndexHarvestedRecordsToSolrJobConfig method deletedHarvestedRecordsReader.
@Bean(name = "indexHarvestedRecordsToSolrJob:deletedHarvestedRecordsReader")
@StepScope
public JdbcPagingItemReader<HarvestedRecord> deletedHarvestedRecordsReader(@Value("#{jobParameters[" + Constants.JOB_PARAM_FROM_DATE + "]}") Date from, @Value("#{jobParameters[" + Constants.JOB_PARAM_UNTIL_DATE + "]}") Date to, @Value("#{jobParameters[" + Constants.JOB_PARAM_CONF_ID + "]}") Long importConfId) throws Exception {
JdbcPagingItemReader<HarvestedRecord> reader = new JdbcPagingItemReader<HarvestedRecord>();
SqlPagingQueryProviderFactoryBean pqpf = new SqlPagingQueryProviderFactoryBean();
pqpf.setDataSource(dataSource);
pqpf.setSelectClause("SELECT id, import_conf_id, record_id, updated, deleted, raw_record, format");
pqpf.setFromClause("FROM harvested_record");
init(reader, pqpf, from, to, importConfId, true);
pqpf.setSortKeys(ImmutableMap.of("import_conf_id", Order.ASCENDING, "record_id", Order.ASCENDING));
reader.setRowMapper(harvestedRecordRowMapper());
reader.setPageSize(PAGE_SIZE);
reader.setQueryProvider(pqpf.getObject());
reader.setDataSource(dataSource);
reader.setSaveState(true);
reader.afterPropertiesSet();
return reader;
}
use of cz.mzk.recordmanager.server.model.HarvestedRecord in project RecordManager2 by moravianlibrary.
the class IndexHarvestedRecordsToSolrJobConfig method updatedRecordsReader.
@Bean(name = "indexHarvestedRecordsToSolrJob:updatedRecordsReader")
@StepScope
public JdbcPagingItemReader<HarvestedRecord> updatedRecordsReader(@Value("#{jobParameters[" + Constants.JOB_PARAM_FROM_DATE + "]}") Date from, @Value("#{jobParameters[" + Constants.JOB_PARAM_UNTIL_DATE + "]}") Date to, @Value("#{jobParameters[" + Constants.JOB_PARAM_CONF_ID + "]}") Long importConfId) throws Exception {
JdbcPagingItemReader<HarvestedRecord> reader = new JdbcPagingItemReader<>();
SqlPagingQueryProviderFactoryBean pqpf = new SqlPagingQueryProviderFactoryBean();
pqpf.setDataSource(dataSource);
pqpf.setSelectClause("SELECT id, import_conf_id, record_id, updated, deleted, raw_record, format");
pqpf.setFromClause("FROM harvested_record");
init(reader, pqpf, from, to, importConfId, false);
if (from != null && to != null) {
pqpf.setSortKeys(ImmutableMap.of("updated", Order.ASCENDING, "id", Order.ASCENDING));
} else {
pqpf.setSortKeys(ImmutableMap.of("id", Order.ASCENDING));
}
reader.setRowMapper(harvestedRecordRowMapper());
reader.setPageSize(PAGE_SIZE);
reader.setQueryProvider(pqpf.getObject());
reader.setDataSource(dataSource);
reader.setSaveState(true);
reader.afterPropertiesSet();
return reader;
}
Aggregations