use of org.candlepin.model.dto.PoolIdAndQuantity in project candlepin by candlepin.
the class EntitlerJobTest method serializeJobDataMapForPool.
/**
* At first glance this seems like a stupid test of Quartz functionality,
* but its intent is to ensure that what we put into the JobDataMap can
* be serialized to the database for Quartz clustering. If this test fails
* 9/10 times one of the objects added does not implement the Serializable
* interface.
* @throws IOException
*/
@Test
public void serializeJobDataMapForPool() throws IOException {
PoolIdAndQuantity[] pQs = new PoolIdAndQuantity[1];
pQs[0] = new PoolIdAndQuantity("pool10", 1);
JobDetail detail = EntitlerJob.bindByPoolAndQuantities(consumer, owner.getKey(), pQs);
serialize(detail.getJobDataMap());
}
use of org.candlepin.model.dto.PoolIdAndQuantity in project candlepin by candlepin.
the class EntitlerJobTest method recoveryIsFalse.
@Test
public void recoveryIsFalse() {
PoolIdAndQuantity[] pQs = new PoolIdAndQuantity[1];
pQs[0] = new PoolIdAndQuantity("pool10", 1);
JobDetail detail = EntitlerJob.bindByPoolAndQuantities(consumer, owner.getKey(), pQs);
assertFalse(detail.requestsRecovery());
assertTrue(detail.isDurable());
}
use of org.candlepin.model.dto.PoolIdAndQuantity in project candlepin by candlepin.
the class EntitlerJobTest method respondWithValidationErrors.
@Test
public void respondWithValidationErrors() throws JobExecutionException, EntitlementRefusedException {
PoolIdAndQuantity[] pQs = new PoolIdAndQuantity[1];
pQs[0] = new PoolIdAndQuantity("pool10", 1);
JobDetail detail = EntitlerJob.bindByPoolAndQuantities(consumer, owner.getKey(), pQs);
JobExecutionContext ctx = mock(JobExecutionContext.class);
when(ctx.getMergedJobDataMap()).thenReturn(detail.getJobDataMap());
HashMap<String, ValidationResult> mapResult = new HashMap<>();
ValidationResult result = new ValidationResult();
result.addError("rulefailed.no.entitlements.available");
mapResult.put("hello", result);
when(e.bindByPoolQuantities(eq(consumerUuid), anyMapOf(String.class, Integer.class))).thenThrow(new EntitlementRefusedException(mapResult));
EntitlerJob job = new EntitlerJob(e, null, pC, i18n);
injector.injectMembers(job);
Pool p = new Pool();
p.setId("hello");
CandlepinQuery cqmock = mock(CandlepinQuery.class);
when(cqmock.iterator()).thenReturn(Arrays.asList(p).iterator());
when(pC.listAllByIds(anyListOf(String.class))).thenReturn(cqmock);
job.execute(ctx);
ArgumentCaptor<Object> argumentCaptor = ArgumentCaptor.forClass(Object.class);
verify(ctx).setResult(argumentCaptor.capture());
List<PoolIdAndErrors> resultErrors = (List<PoolIdAndErrors>) argumentCaptor.getValue();
assertEquals(1, resultErrors.size());
assertEquals("hello", resultErrors.get(0).getPoolId());
assertEquals(1, resultErrors.get(0).getErrors().size());
assertEquals("No subscriptions are available from the pool with ID \"hello\".", resultErrors.get(0).getErrors().get(0));
}
use of org.candlepin.model.dto.PoolIdAndQuantity in project candlepin by candlepin.
the class EntitlerJobTest method handleException.
@Test(expected = JobExecutionException.class)
public void handleException() throws JobExecutionException, EntitlementRefusedException {
PoolIdAndQuantity[] pQs = new PoolIdAndQuantity[1];
pQs[0] = new PoolIdAndQuantity("pool10", 1);
JobDetail detail = EntitlerJob.bindByPoolAndQuantities(consumer, owner.getKey(), pQs);
JobExecutionContext ctx = mock(JobExecutionContext.class);
when(ctx.getMergedJobDataMap()).thenReturn(detail.getJobDataMap());
Class<HashMap<String, Integer>> className = (Class<HashMap<String, Integer>>) (Class) Map.class;
ArgumentCaptor<HashMap<String, Integer>> pqMapCaptor = ArgumentCaptor.forClass(className);
when(e.bindByPoolQuantities(eq(consumerUuid), pqMapCaptor.capture())).thenThrow(new ForbiddenException("job should fail"));
EntitlerJob job = new EntitlerJob(e, null, null, null);
injector.injectMembers(job);
job.execute(ctx);
}
use of org.candlepin.model.dto.PoolIdAndQuantity in project candlepin by candlepin.
the class EntitlerJob method toExecute.
@Override
public void toExecute(JobExecutionContext ctx) throws JobExecutionException {
try {
JobDataMap map = ctx.getMergedJobDataMap();
String uuid = (String) map.get(JobStatus.TARGET_ID);
PoolIdAndQuantity[] poolQuantities = (PoolIdAndQuantity[]) map.get("pool_and_quantities");
Map<String, Integer> poolMap = new HashMap<>();
for (PoolIdAndQuantity poolIdAndQuantity : poolQuantities) {
poolMap.put(poolIdAndQuantity.getPoolId(), poolIdAndQuantity.getQuantity());
}
List<Entitlement> ents = entitler.bindByPoolQuantities(uuid, poolMap);
entitler.sendEvents(ents);
PoolIdAndQuantity[] consumed = new PoolIdAndQuantity[ents.size()];
for (int i = 0; i < ents.size(); i++) {
consumed[i] = new PoolIdAndQuantity(ents.get(i).getPool().getId(), ents.get(i).getQuantity());
}
ctx.setResult(Arrays.asList(consumed));
poolCurator.clear();
} catch (EntitlementRefusedException e) {
log.error("EntitlerJob encountered a problem, translating errors", e);
Map<String, ValidationResult> validationResults = e.getResults();
EntitlementRulesTranslator translator = new EntitlementRulesTranslator(i18n);
List<PoolIdAndErrors> poolErrors = new ArrayList<>();
for (Pool pool : poolCurator.listAllByIds(validationResults.keySet())) {
List<String> errorMessages = new ArrayList<>();
for (ValidationError error : validationResults.get(pool.getId()).getErrors()) {
errorMessages.add(translator.poolErrorToMessage(pool, error));
}
poolErrors.add(new PoolIdAndErrors(pool.getId(), errorMessages));
}
ctx.setResult(poolErrors);
}// so that the job will be properly cleaned up on failure.
catch (Exception e) {
log.error("EntitlerJob encountered a problem.", e);
throw new JobExecutionException(e.getMessage(), e, false);
}
}
Aggregations