use of dk.dbc.pgqueue.consumer.JobMetaData in project solr-document-store by DBCDK.
the class Worker method makeWorker.
public JobConsumer<QueueJob> makeWorker() {
return new JobConsumer<QueueJob>() {
@Override
public void accept(Connection connection, QueueJob job, JobMetaData metaData) throws FatalQueueError, NonFatalQueueError, PostponedNonFatalQueueError {
log.info("job = {}, metadata = {}", job, metaData);
try {
JsonNode sourceDoc = docProducer.fetchSourceDoc(job);
SolrInputDocument solrDocument = docProducer.createSolrDocument(sourceDoc);
String bibliographicShardId = docProducer.bibliographicShardId(sourceDoc);
docProducer.deleteSolrDocuments(bibliographicShardId, job.getCommitwithin());
docProducer.deploy(solrDocument, job.getCommitwithin());
} catch (IOException ex) {
throw new NonFatalQueueError(ex);
} catch (SolrServerException ex) {
throw new FatalQueueError(ex);
}
}
};
}
use of dk.dbc.pgqueue.consumer.JobMetaData in project solr-document-store by DBCDK.
the class QueueJobIT method testStoreRetrieve.
@Test(timeout = 5000L)
public void testStoreRetrieve() throws Exception {
System.out.println("store-retrieve");
QueueJob job1 = new QueueJob(888888, "clazzifier", "12345678");
QueueJob job2 = new QueueJob(888888, "clazzifier", "87654321", 1000);
try (Connection connection = dataSource.getConnection()) {
PreparedQueueSupplier<QueueJob> supplier = QUEUE_SUPPLIER.preparedSupplier(connection);
supplier.enqueue(QUEUE, job1);
supplier.enqueue(QUEUE, job2);
BlockingDeque<QueueJob> list = new LinkedBlockingDeque<>();
QueueWorker worker = QueueWorker.builder().consume(QUEUE).dataSource(dataSource).build(QueueJob.STORAGE_ABSTRACTION, (JobConsumer<QueueJob>) (Connection connection1, QueueJob job, JobMetaData metaData) -> {
list.add(job);
});
worker.start();
QueueJob actual1 = list.pollFirst(5, TimeUnit.SECONDS);
QueueJob actual2 = list.pollFirst(5, TimeUnit.SECONDS);
worker.stop();
assertEquals(job1.toString(), actual1.toString());
assertEquals(job2.toString(), actual2.toString());
}
}
Aggregations