use of cz.mzk.recordmanager.server.model.HarvestedRecord.HarvestedRecordUniqueId in project RecordManager2 by moravianlibrary.
the class AdresarRecordsWriter method writeInner.
protected void writeInner(List<? extends List<Record>> items) throws Exception {
for (List<Record> records : items) {
for (Record currentRecord : records) {
try {
if (currentRecord == null) {
continue;
}
MarcRecord marc = new MarcRecordImpl(currentRecord);
String recordId = marc.getControlField("SYS");
HarvestedRecord hr = harvestedRecordDAO.findByIdAndHarvestConfiguration(recordId, configurationId);
if (hr == null) {
hr = new HarvestedRecord(new HarvestedRecordUniqueId(configurationId, recordId));
hr.setFormat("marc21-xml");
}
hr.setUpdated(new Date());
ByteArrayOutputStream outStream = new ByteArrayOutputStream();
MarcWriter marcWriter = new MarcXmlWriter(outStream, true);
marcWriter.setConverter(ISOCharConvertor.INSTANCE);
marcWriter.write(currentRecord);
marcWriter.close();
hr.setRawRecord(outStream.toByteArray());
harvestedRecordDAO.persist(hr);
} catch (Exception e) {
logger.warn("Error occured in processing record");
throw e;
}
}
}
}
use of cz.mzk.recordmanager.server.model.HarvestedRecord.HarvestedRecordUniqueId in project RecordManager2 by moravianlibrary.
the class OAIItemProcessor method createHarvestedRecord.
protected HarvestedRecord createHarvestedRecord(OAIRecord record) throws TransformerException {
String recordId = idExtractor.extract(record.getHeader().getIdentifier());
HarvestedRecord rec = recordDao.findByIdAndHarvestConfiguration(recordId, configuration);
boolean deleted = record.getHeader().isDeleted() || record.getMetadata().getElement().getTagName().equals(METADATA_ERROR);
byte[] recordContent = (deleted) ? null : asByteArray(record.getMetadata().getElement());
if (recordContent != null && configuration.isInterceptionEnabled()) {
MarcRecordInterceptor interceptor = marcInterceptorFactory.getInterceptor(configuration, recordId, recordContent);
if (interceptor != null) {
// in case of invalid MARC is error processed later
recordContent = interceptor.intercept();
}
}
if (rec == null) {
// create new record
HarvestedRecordUniqueId id = new HarvestedRecordUniqueId(configuration, recordId);
rec = new HarvestedRecord(id);
rec.setHarvestedFrom(configuration);
rec.setFormat(format);
} else if ((deleted && rec.getDeleted() != null && (rec.getRawRecord() == null || rec.getRawRecord().length == 0)) || Arrays.equals(recordContent, rec.getRawRecord())) {
rec.setUpdated(new Date());
rec.setShouldBeProcessed(false);
// no change in record
return rec;
}
rec.setShouldBeProcessed(true);
rec.setUpdated(new Date());
if (record.getHeader().getDatestamp() != null) {
rec.setHarvested(record.getHeader().getDatestamp());
}
if (record.getHeader().getDatestamp() != null) {
rec.setTemporalOldOaiTimestamp(rec.getOaiTimestamp());
rec.setOaiTimestamp(record.getHeader().getDatestamp());
}
if (deleted) {
rec.setDeleted(new Date());
rec.setRawRecord(new byte[0]);
return rec;
} else {
rec.setDeleted(null);
rec.setRawRecord(recordContent);
}
return rec;
}
use of cz.mzk.recordmanager.server.model.HarvestedRecord.HarvestedRecordUniqueId in project RecordManager2 by moravianlibrary.
the class CosmotronHarvestJobConfig method upate996Reader.
@Bean(name = Constants.JOB_ID_HARVEST_COSMOTRON + ":update996reader")
@StepScope
public ItemReader<HarvestedRecordUniqueId> upate996Reader(@Value("#{jobParameters[" + Constants.JOB_PARAM_CONF_ID + "]}") Long configId, @Value("#{stepExecutionContext[" + Constants.JOB_PARAM_FROM_DATE + "] " + "?:jobParameters[ " + Constants.JOB_PARAM_FROM_DATE + "]}") Date from) throws Exception {
SqlPagingQueryProviderFactoryBean pqpf = new SqlPagingQueryProviderFactoryBean();
JdbcPagingItemReader<HarvestedRecordUniqueId> reader = new JdbcPagingItemReader<>();
Map<String, Object> parameterValues = new HashMap<>();
pqpf.setDataSource(dataSource);
pqpf.setSelectClause("SELECT harvested_record_id, import_conf_id, record_id");
pqpf.setFromClause("FROM cosmotron_periodicals_last_update");
String where = "WHERE import_conf_id = :conf_id";
parameterValues.put("conf_id", configId);
if (from != null) {
where += " AND last_update > :from";
parameterValues.put("from", from);
}
pqpf.setWhereClause(where);
pqpf.setSortKey("harvested_record_id");
reader.setParameterValues(parameterValues);
reader.setRowMapper(new HarvestedRecordIdRowMapper());
reader.setPageSize(PAGE_SIZE);
reader.setQueryProvider(pqpf.getObject());
reader.setDataSource(dataSource);
reader.setSaveState(true);
reader.afterPropertiesSet();
return reader;
}
use of cz.mzk.recordmanager.server.model.HarvestedRecord.HarvestedRecordUniqueId in project RecordManager2 by moravianlibrary.
the class CosmotronUpdate996Writer method write.
@Override
public void write(List<? extends HarvestedRecordUniqueId> uniqueIds) throws Exception {
for (HarvestedRecordUniqueId uniqueId : uniqueIds) {
move996ToParentRecord(uniqueId);
}
sessionFactory.getCurrentSession().flush();
sessionFactory.getCurrentSession().clear();
}
use of cz.mzk.recordmanager.server.model.HarvestedRecord.HarvestedRecordUniqueId in project RecordManager2 by moravianlibrary.
the class ExportRecordsJobConfig method exportRecordsReader.
@Bean(name = "exportRecordsJob:exportRecordsReader")
@StepScope
public ItemReader<HarvestedRecordUniqueId> exportRecordsReader(@Value("#{jobParameters[" + Constants.JOB_PARAM_CONF_ID + "]}") Long configId, @Value("#{jobParameters[" + Constants.JOB_PARAM_DELETED + "]}") Long deleted) throws Exception {
JdbcPagingItemReader<HarvestedRecordUniqueId> reader = new JdbcPagingItemReader<HarvestedRecordUniqueId>();
SqlPagingQueryProviderFactoryBean pqpf = new SqlPagingQueryProviderFactoryBean();
pqpf.setDataSource(dataSource);
pqpf.setSelectClause("SELECT import_conf_id, record_id");
pqpf.setFromClause("FROM harvested_record");
pqpf.setWhereClause("WHERE import_conf_id = :conf_id" + (deleted == null ? " AND deleted IS NULL" : ""));
pqpf.setSortKey("record_id");
Map<String, Object> parameterValues = new HashMap<String, Object>();
parameterValues.put("conf_id", configId);
reader.setParameterValues(parameterValues);
reader.setRowMapper(new HarvestedRecordIdRowMapper());
reader.setPageSize(20);
reader.setQueryProvider(pqpf.getObject());
reader.setDataSource(dataSource);
reader.afterPropertiesSet();
return reader;
}
Aggregations