use of org.candlepin.model.ExporterMetadata in project candlepin by candlepin.
the class ImporterTest method newerImport.
@Test
public void newerImport() throws Exception {
// this tests bz #790751
Date importDate = getDateBeforeDays(10);
// actualmeta is the mock for the import itself
File actualmeta = createFile("meta.json", "0.0.3", importDate, "test_user", "prefix");
ExporterMetadataCurator emc = mock(ExporterMetadataCurator.class);
// em is the mock for lastrun (i.e., the most recent import in CP)
ExporterMetadata em = new ExporterMetadata();
em.setExported(getDateBeforeDays(30));
em.setId("42");
em.setType(ExporterMetadata.TYPE_SYSTEM);
when(emc.lookupByType(ExporterMetadata.TYPE_SYSTEM)).thenReturn(em);
Importer i = new Importer(null, null, null, null, null, null, null, null, null, emc, null, null, i18n, null, null, su, null, this.mockSubReconciler, this.ec, this.translator);
i.validateMetadata(ExporterMetadata.TYPE_SYSTEM, null, actualmeta, new ConflictOverrides());
assertEquals(importDate, em.getExported());
}
use of org.candlepin.model.ExporterMetadata in project candlepin by candlepin.
the class ImporterTest method oldImport.
@Test
public void oldImport() throws Exception {
// actualmeta is the mock for the import itself
File actualmeta = createFile("meta.json", "0.0.3", getDateBeforeDays(10), "test_user", "prefix");
ExporterMetadataCurator emc = mock(ExporterMetadataCurator.class);
// emc is the mock for lastrun (i.e., the most recent import in CP)
ExporterMetadata em = new ExporterMetadata();
em.setExported(getDateBeforeDays(3));
em.setId("42");
em.setType(ExporterMetadata.TYPE_SYSTEM);
when(emc.lookupByType(ExporterMetadata.TYPE_SYSTEM)).thenReturn(em);
Importer i = new Importer(null, null, null, null, null, null, null, null, null, emc, null, null, i18n, null, null, su, null, this.mockSubReconciler, this.ec, this.translator);
try {
i.validateMetadata(ExporterMetadata.TYPE_SYSTEM, null, actualmeta, new ConflictOverrides());
fail();
} catch (ImportConflictException e) {
assertFalse(e.message().getConflicts().isEmpty());
assertEquals(1, e.message().getConflicts().size());
assertTrue(e.message().getConflicts().contains(Importer.Conflict.MANIFEST_OLD));
}
}
use of org.candlepin.model.ExporterMetadata in project candlepin by candlepin.
the class Importer method validateMetadata.
/**
* Check to make sure the meta data is newer than the imported data.
* @param type ExporterMetadata.TYPE_PER_USER or TYPE_SYSTEM
* @param owner Owner in the case of PER_USER
* @param meta meta.json file
* @param forcedConflicts Conflicts we will override if encountered
* @throws IOException thrown if there's a problem reading the file
* @throws ImporterException thrown if the metadata is invalid.
*/
protected void validateMetadata(String type, Owner owner, File meta, ConflictOverrides forcedConflicts) throws IOException, ImporterException {
Meta m = mapper.readValue(meta, Meta.class);
if (type == null) {
throw new ImporterException(i18n.tr("Wrong metadata type"));
}
ExporterMetadata lastrun = null;
if (ExporterMetadata.TYPE_SYSTEM.equals(type)) {
lastrun = expMetaCurator.lookupByType(type);
} else if (ExporterMetadata.TYPE_PER_USER.equals(type)) {
if (owner == null) {
throw new ImporterException(i18n.tr("Invalid owner"));
}
lastrun = expMetaCurator.lookupByTypeAndOwner(type, owner);
}
if (lastrun == null) {
// this is our first import, let's create a new entry
lastrun = new ExporterMetadata(type, m.getCreated(), owner);
lastrun = expMetaCurator.create(lastrun);
} else {
if (lastrun.getExported().after(m.getCreated())) {
if (!forcedConflicts.isForced(Importer.Conflict.MANIFEST_OLD)) {
throw new ImportConflictException(i18n.tr("Import is older than existing data"), Importer.Conflict.MANIFEST_OLD);
} else {
log.warn("Manifest older than existing data.");
}
} else {
/*
* Prior to 5.6.4, MySQL did not store fractions of a second in
* temporal values. Consequently, the manifest metadata can end up
* with a created date that is a fraction of a second ahead of
* the created date in the cp_export_metadata table. So we throw away
* the fractions of a second.
*/
long exported = lastrun.getExported().getTime() / 1000;
long created = m.getCreated().getTime() / 1000;
if (exported == created) {
if (!forcedConflicts.isForced(Importer.Conflict.MANIFEST_SAME)) {
throw new ImportConflictException(i18n.tr("Import is the same as existing data"), Importer.Conflict.MANIFEST_SAME);
} else {
log.warn("Manifest same as existing data.");
}
}
}
lastrun.setExported(m.getCreated());
expMetaCurator.merge(lastrun);
}
}
use of org.candlepin.model.ExporterMetadata in project candlepin by candlepin.
the class UndoImportsJobTest method testUndoImport.
@Test
public void testUndoImport() throws JobExecutionException, IOException, ImporterException {
// We need proper curators for this test
this.poolManager = this.poolManagerBase;
this.ownerCurator = super.ownerCurator;
this.exportCurator = this.exportCuratorBase;
this.undoImportsJob = new UndoImportsJob(this.i18n, this.ownerCurator, this.poolManager, this.subAdapter, this.exportCurator, this.importRecordCurator);
// Create owner w/upstream consumer
Owner owner1 = TestUtil.createOwner();
Owner owner2 = TestUtil.createOwner();
ConsumerType type = this.createConsumerType();
UpstreamConsumer uc1 = new UpstreamConsumer("uc1", null, type, "uc1");
UpstreamConsumer uc2 = new UpstreamConsumer("uc2", null, type, "uc2");
this.ownerCurator.create(owner1);
this.ownerCurator.create(owner2);
owner1.setUpstreamConsumer(uc1);
owner1.setUpstreamConsumer(uc2);
this.ownerCurator.merge(owner1);
this.ownerCurator.merge(owner2);
// Create metadata
ExporterMetadata metadata1 = new ExporterMetadata(ExporterMetadata.TYPE_PER_USER, new Date(), owner1);
ExporterMetadata metadata2 = new ExporterMetadata(ExporterMetadata.TYPE_PER_USER, new Date(), owner2);
this.exportCurator.create(metadata1);
this.exportCurator.create(metadata2);
// Create pools w/upstream pool IDs
Pool pool1 = this.createPool("pool1", owner1, true, PoolType.NORMAL);
Pool pool2 = this.createPool("pool2", owner1, true, PoolType.BONUS);
Pool pool3 = this.createPool("pool3", owner1, false, PoolType.NORMAL);
Pool pool4 = this.createPool("pool4", owner1, false, PoolType.BONUS);
Pool pool5 = this.createPool("pool5", owner1, true, PoolType.ENTITLEMENT_DERIVED);
Pool pool6 = this.createPool("pool6", owner1, false, PoolType.ENTITLEMENT_DERIVED);
Pool pool7 = this.createPool("pool7", owner2, true, PoolType.NORMAL);
Pool pool8 = this.createPool("pool8", owner2, true, PoolType.BONUS);
Pool pool9 = this.createPool("pool9", owner2, true, PoolType.ENTITLEMENT_DERIVED);
// Create an ueber certificate for the owner.
UeberCertificate uebercert = ueberCertGenerator.generate(owner1.getKey(), this.setupAdminPrincipal("test_admin"));
assertNotNull(uebercert);
// Verify initial state
assertEquals(Arrays.asList(pool1, pool2, pool3, pool4, pool5, pool6), this.poolManager.listPoolsByOwner(owner1).list());
assertEquals(Arrays.asList(pool7, pool8, pool9), this.poolManager.listPoolsByOwner(owner2).list());
assertEquals(metadata1, exportCurator.lookupByTypeAndOwner(ExporterMetadata.TYPE_PER_USER, owner1));
assertEquals(metadata2, exportCurator.lookupByTypeAndOwner(ExporterMetadata.TYPE_PER_USER, owner2));
assertEquals(0, this.importRecordCurator.findRecords(owner1).list().size());
assertEquals(0, this.importRecordCurator.findRecords(owner2).list().size());
// Execute job
Principal principal = new UserPrincipal("JarJarBinks", null, true);
this.jobDataMap.put(JobStatus.TARGET_TYPE, JobStatus.TargetType.OWNER);
this.jobDataMap.put(JobStatus.TARGET_ID, owner1.getId());
this.jobDataMap.put(UndoImportsJob.OWNER_KEY, owner1.getKey());
this.jobDataMap.put(PinsetterJobListener.PRINCIPAL_KEY, principal);
// since we locking owner we need start transaction
beginTransaction();
this.undoImportsJob.toExecute(this.jobContext);
commitTransaction();
// Verify deletions -- Ueber pools should not get deleted.
assertEquals(Arrays.asList(pool3, pool4, pool5, pool6), this.poolManager.listPoolsByOwner(owner1).list());
assertEquals(Arrays.asList(pool7, pool8, pool9), this.poolManager.listPoolsByOwner(owner2).list());
assertNull(exportCurator.lookupByTypeAndOwner(ExporterMetadata.TYPE_PER_USER, owner1));
assertEquals(metadata2, exportCurator.lookupByTypeAndOwner(ExporterMetadata.TYPE_PER_USER, owner2));
assertNull(owner1.getUpstreamConsumer());
List<ImportRecord> records = this.importRecordCurator.findRecords(owner1).list();
assertEquals(1, records.size());
assertEquals(ImportRecord.Status.DELETE, records.get(0).getStatus());
assertEquals(0, this.importRecordCurator.findRecords(owner2).list().size());
}
use of org.candlepin.model.ExporterMetadata in project candlepin by candlepin.
the class UndoImportsJob method toExecute.
/**
* {@inheritDoc}
*
* Executes {@link PoolManager#refreshPools(org.candlepin.model.Owner)}
* as a pinsetter job.
*
* @param context the job's execution context
*/
@Transactional
public void toExecute(JobExecutionContext context) throws JobExecutionException {
try {
JobDataMap map = context.getMergedJobDataMap();
String ownerId = map.getString(JobStatus.TARGET_ID);
String ownerKey = map.getString(OWNER_KEY);
Owner owner = this.ownerCurator.lockAndLoadById(ownerId);
Boolean lazy = map.getBoolean(LAZY_REGEN);
Principal principal = (Principal) map.get(PinsetterJobListener.PRINCIPAL_KEY);
if (owner == null) {
log.debug("Owner no longer exists: {}", ownerKey);
context.setResult("Nothing to do; owner no longer exists: " + ownerKey);
return;
}
String displayName = owner.getDisplayName();
// Remove imports
ExporterMetadata metadata = this.exportCurator.lookupByTypeAndOwner(ExporterMetadata.TYPE_PER_USER, owner);
if (metadata == null) {
log.debug("No imports exist for owner {}", displayName);
context.setResult("Nothing to do; imports no longer exist for owner: " + displayName);
return;
}
log.info("Deleting all pools originating from manifests for owner/org: {}", displayName);
List<Pool> pools = this.poolManager.listPoolsByOwner(owner).list();
for (Pool pool : pools) {
if (this.poolManager.isManaged(pool)) {
this.poolManager.deletePool(pool);
}
}
// Clear out upstream ID so owner can import from other distributors:
UpstreamConsumer uc = owner.getUpstreamConsumer();
owner.setUpstreamConsumer(null);
this.exportCurator.delete(metadata);
this.recordManifestDeletion(owner, principal.getUsername(), uc);
context.setResult("Imported pools removed for owner " + displayName);
} catch (PersistenceException e) {
throw new RetryJobException("UndoImportsJob encountered a problem.", e);
} catch (RuntimeException e) {
Throwable cause = e.getCause();
while (cause != null) {
if (SQLException.class.isAssignableFrom(cause.getClass())) {
log.warn("Caught a runtime exception wrapping an SQLException.");
throw new RetryJobException("UndoImportsJob encountered a problem.", e);
}
cause = cause.getCause();
}
// Otherwise throw as we would normally for any generic Exception:
log.error("UndoImportsJob encountered a problem.", e);
context.setResult(e.getMessage());
throw new JobExecutionException(e.getMessage(), e, false);
}// cleaned up on failure.
catch (Exception e) {
log.error("UndoImportsJob encountered a problem.", e);
context.setResult(e.getMessage());
throw new JobExecutionException(e.getMessage(), e, false);
}
}
Aggregations