use of org.candlepin.util.Traceable in project candlepin by candlepin.
the class CandlepinPoolManager method refreshPoolsWithRegeneration.
/*
* We need to update/regen entitlements in the same transaction we update pools
* so we don't miss anything
*/
@Transactional
@SuppressWarnings("checkstyle:methodlength")
@Traceable
void refreshPoolsWithRegeneration(SubscriptionServiceAdapter subAdapter, @TraceableParam("owner") Owner owner, boolean lazy) {
Date now = new Date();
owner = this.resolveOwner(owner);
log.info("Refreshing pools for owner: {}", owner);
Map<String, Subscription> subscriptionMap = new HashMap<>();
Map<String, ProductData> productMap = new HashMap<>();
Map<String, ContentData> contentMap = new HashMap<>();
// Resolve all our subscriptions, products and content to ensure we don't have bad or
// duplicate inbound data
log.debug("Fetching subscriptions from adapter...");
List<Subscription> subscriptions = subAdapter.getSubscriptions(owner);
log.debug("Done. Processing subscriptions...");
for (Subscription subscription : subscriptions) {
if (subscription == null) {
continue;
}
if (subscription.getId() == null) {
log.error("subscription does not contain a mappable ID: {}", subscription);
throw new IllegalStateException("subscription does not contain a mappable ID: " + subscription);
}
Subscription existingSub = subscriptionMap.get(subscription.getId());
if (existingSub != null && !existingSub.equals(subscription)) {
log.warn("Multiple versions of the same subscription received during refresh; " + "discarding duplicate: {} => {}, {}", subscription.getId(), existingSub, subscription);
continue;
}
subscriptionMap.put(subscription.getId(), subscription);
List<ProductData> products = new LinkedList<>();
products.add(subscription.getProduct());
products.add(subscription.getDerivedProduct());
products.addAll(subscription.getProvidedProducts());
products.addAll(subscription.getDerivedProvidedProducts());
for (ProductData product : products) {
if (product == null) {
// forward.
continue;
}
if (product.getId() == null) {
log.error("product does not contain a mappable Red Hat ID: {}", product);
throw new IllegalStateException("product does not contain a mappable Red Hat ID: " + product);
}
// Product is coming from an upstream source; lock it so only upstream can make
// further changes to it.
product.setLocked(true);
ProductData existingProduct = productMap.get(product.getId());
if (existingProduct != null && !existingProduct.equals(product)) {
log.warn("Multiple versions of the same product received during refresh; " + "discarding duplicate: {} => {}, {}", product.getId(), existingProduct, product);
} else {
productMap.put(product.getId(), product);
Collection<ProductContentData> pcdCollection = product.getProductContent();
if (pcdCollection != null) {
for (ProductContentData pcd : pcdCollection) {
if (pcd == null) {
log.error("product contains a null product-content mapping: {}", product);
throw new IllegalStateException("product contains a null product-content mapping: " + product);
}
ContentData content = pcd.getContent();
// population validation for us.
if (content == null || content.getId() == null) {
log.error("product contains a null or incomplete product-content mapping: {}", product);
throw new IllegalStateException("product contains a null or incomplete " + "product-content mapping: " + product);
}
// We need to lock the incoming content here, but doing so will affect
// the equality comparison for products. We'll correct them later.
ContentData existingContent = contentMap.get(content.getId());
if (existingContent != null && !existingContent.equals(content)) {
log.warn("Multiple versions of the same content received during refresh; " + "discarding duplicate: {} => {}, {}", content.getId(), existingContent, content);
} else {
contentMap.put(content.getId(), content);
}
}
}
}
}
}
// Persist content changes
log.debug("Importing {} content...", contentMap.size());
// TODO: Find a more efficient way of doing this, preferably within this method
for (ContentData cdata : contentMap.values()) {
cdata.setLocked(true);
}
Map<String, Content> importedContent = this.contentManager.importContent(owner, contentMap, productMap.keySet()).getImportedEntities();
log.debug("Importing {} product(s)...", productMap.size());
ImportResult<Product> importResult = this.productManager.importProducts(owner, productMap, importedContent);
Map<String, Product> importedProducts = importResult.getImportedEntities();
Map<String, Product> updatedProducts = importResult.getUpdatedEntities();
log.debug("Refreshing {} pool(s)...", subscriptionMap.size());
Iterator<Map.Entry<String, Subscription>> subsIterator = subscriptionMap.entrySet().iterator();
while (subsIterator.hasNext()) {
Map.Entry<String, Subscription> entry = subsIterator.next();
Subscription sub = entry.getValue();
if (now.after(sub.getEndDate())) {
log.info("Skipping expired subscription: {}", sub);
subsIterator.remove();
continue;
}
log.debug("Processing subscription: {}", sub);
Pool pool = this.convertToMasterPoolImpl(sub, owner, importedProducts);
this.refreshPoolsForMasterPool(pool, false, lazy, updatedProducts);
}
// delete pools whose subscription disappeared:
log.debug("Deleting pools for absent subscriptions...");
List<Pool> poolsToDelete = new ArrayList<>();
for (Pool pool : poolCurator.getPoolsFromBadSubs(owner, subscriptionMap.keySet())) {
if (this.isManaged(pool)) {
poolsToDelete.add(pool);
}
}
deletePools(poolsToDelete);
// TODO: break this call into smaller pieces. There may be lots of floating pools
log.debug("Updating floating pools...");
List<Pool> floatingPools = poolCurator.getOwnersFloatingPools(owner);
updateFloatingPools(floatingPools, lazy, updatedProducts);
log.info("Refresh pools for owner: {} completed in: {}ms", owner.getKey(), System.currentTimeMillis() - now.getTime());
}
use of org.candlepin.util.Traceable in project candlepin by candlepin.
the class CandlepinPoolManager method revokeEntitlements.
/**
* Revokes the given set of entitlements.
*
* @param entsToRevoke entitlements to revoke
* @param alreadyDeletedPools pools to skip deletion as they have already been deleted
* @param regenCertsAndStatuses if this revocation should also trigger regeneration of certificates
* and recomputation of statuses. For performance reasons some callers might
* choose to set this to false.
*/
@Transactional
@Traceable
public void revokeEntitlements(List<Entitlement> entsToRevoke, Set<String> alreadyDeletedPools, boolean regenCertsAndStatuses) {
if (CollectionUtils.isEmpty(entsToRevoke)) {
return;
}
log.debug("Starting batch revoke of {} entitlements", entsToRevoke.size());
if (log.isTraceEnabled()) {
log.trace("Entitlements IDs: {}", getEntIds(entsToRevoke));
}
Set<Pool> poolsToDelete = this.poolCurator.listBySourceEntitlements(entsToRevoke);
log.debug("Found {} additional pools to delete from source entitlements", poolsToDelete.size());
if (log.isTraceEnabled()) {
log.trace("Additional pool IDs: {}", getPoolIds(poolsToDelete));
}
List<Pool> poolsToLock = new ArrayList<>();
poolsToLock.addAll(poolsToDelete);
for (Entitlement ent : entsToRevoke) {
poolsToLock.add(ent.getPool());
// associated pool as well.
if (ent.getPool() != null && ent.getPool().isDevelopmentPool()) {
poolsToDelete.add(ent.getPool());
}
}
poolCurator.lockAndLoad(poolsToLock);
log.info("Batch revoking {} entitlements", entsToRevoke.size());
entsToRevoke = new ArrayList<>(entsToRevoke);
for (Pool pool : poolsToDelete) {
for (Entitlement ent : pool.getEntitlements()) {
ent.setDeletedFromPool(true);
entsToRevoke.add(ent);
}
}
log.debug("Adjusting consumed quantities on pools");
List<Pool> poolsToSave = new ArrayList<>();
Set<String> entIdsToRevoke = new HashSet<>();
for (Entitlement ent : entsToRevoke) {
// or just continue silently ignoring them?
if (ent == null || ent.getId() == null) {
continue;
}
// Collect the entitlement IDs to revoke seeing as we are iterating over them anyway.
entIdsToRevoke.add(ent.getId());
// We need to trigger lazy load of provided products
// to have access to those products later in this method.
Pool pool = ent.getPool();
int entQuantity = ent.getQuantity() != null ? ent.getQuantity() : 0;
pool.setConsumed(pool.getConsumed() - entQuantity);
Consumer consumer = ent.getConsumer();
ConsumerType ctype = this.consumerTypeCurator.getConsumerType(consumer);
if (ctype != null) {
if (ctype.isManifest()) {
pool.setExported(pool.getExported() - entQuantity);
} else if (ctype.isType(ConsumerTypeEnum.SHARE)) {
pool.setShared(pool.getShared() - entQuantity);
}
}
consumer.setEntitlementCount(consumer.getEntitlementCount() - entQuantity);
consumerCurator.update(consumer);
poolsToSave.add(pool);
}
poolCurator.updateAll(poolsToSave, false, false);
/*
* Before deleting the entitlements, we need to find out if there are any
* modifier entitlements that need to have their certificates regenerated
*/
if (regenCertsAndStatuses) {
log.debug("Marking dependent entitlements as dirty...");
int update = this.entitlementCurator.markDependentEntitlementsDirty(entIdsToRevoke);
log.debug("{} dependent entitlements marked dirty.", update);
}
log.info("Starting batch delete of pools");
poolCurator.batchDelete(poolsToDelete, alreadyDeletedPools);
log.info("Starting batch delete of entitlements");
entitlementCurator.batchDelete(entsToRevoke);
log.info("Starting delete flush");
entitlementCurator.flush();
log.info("All deletes flushed successfully");
Map<Consumer, List<Entitlement>> consumerSortedEntitlements = entitlementCurator.getDistinctConsumers(entsToRevoke);
filterAndUpdateStackingEntitlements(consumerSortedEntitlements, alreadyDeletedPools);
// post unbind actions
for (Entitlement ent : entsToRevoke) {
enforcer.postUnbind(ent.getConsumer(), this, ent);
}
if (!regenCertsAndStatuses) {
log.info("Regeneration and status computation was not requested finishing batch revoke");
sendDeletedEvents(entsToRevoke);
return;
}
log.info("Recomputing status for {} consumers.", consumerSortedEntitlements.size());
int i = 1;
for (Consumer consumer : consumerSortedEntitlements.keySet()) {
if (i++ % 1000 == 0) {
consumerCurator.flush();
}
complianceRules.getStatus(consumer);
}
consumerCurator.flush();
log.info("All statuses recomputed.");
sendDeletedEvents(entsToRevoke);
}
use of org.candlepin.util.Traceable in project candlepin by candlepin.
the class CandlepinPoolManager method deletePools.
@Override
@Transactional
@Traceable
@SuppressWarnings("checkstyle:methodlength")
public void deletePools(Collection<Pool> pools, Collection<String> alreadyDeletedPoolIds) {
if (pools == null || pools.isEmpty()) {
return;
}
log.info("Attempting to delete {} pools...", pools.size());
// than they need to be and is resulting in running slow calculations multiple times.
if (alreadyDeletedPoolIds == null) {
alreadyDeletedPoolIds = new HashSet<>();
}
Set<String> poolIds = new HashSet<>();
Set<String> entitlementIds = new HashSet<>();
Owner owner = null;
// Convert pools to pool IDs.
log.info("Fetching related pools and entitlements...");
for (Pool pool : pools) {
if (owner == null) {
owner = pool.getOwner();
}
poolIds.add(pool.getId());
}
// Fetch pools which are derived from the pools we're going to delete...
poolIds.addAll(this.poolCurator.getDerivedPoolIdsForPools(poolIds));
// Fetch related pools and entitlements (recursively)
Collection<String> pids = poolIds;
int cachedSize;
do {
// Fetch entitlement IDs for our set of pools
Collection<String> eids = this.poolCurator.getEntitlementIdsForPools(pids);
// Fetch pools which are derived from these entitlements...
pids = this.poolCurator.getPoolIdsForSourceEntitlements(eids);
// Fetch stack derived pools which will be unentitled when we revoke entitlements
// Impl note: This may occassionally miss stack derived pools in cases where our
// entitlement count exceeds the IN block limitations. In those cases, we'll end
// up doing a recursive call into this method, which sucks, but will still work.
pids.addAll(this.poolCurator.getUnentitledStackDerivedPoolIds(eids));
// Fetch pools which are derived from the pools we're going to delete...
pids.addAll(this.poolCurator.getDerivedPoolIdsForPools(pids));
// Add the new entitlement and pool IDs to our list of things to delete
cachedSize = poolIds.size();
entitlementIds.addAll(eids);
poolIds.addAll(pids);
} while (poolIds.size() != cachedSize);
// TODO: Remove this and stop recursively calling into this method.
if (alreadyDeletedPoolIds != null) {
poolIds.removeAll(alreadyDeletedPoolIds);
}
// Lock pools we're going to delete (also, fetch them for event generation/slow deletes)
pools = this.poolCurator.lockAndLoadByIds(poolIds);
if (!pools.isEmpty()) {
log.info("Locked {} pools for deletion...", pools.size());
// Impl note:
// There is a fair bit of duplicated work between the actions below this block and
// methods like revokeEntitlements. However, the decision was made to decouple these
// methods explicitly to avoid situations such as fetching collections of pools, getting
// entitlements from them (a slow process in itself) and then passing it off to another
// standalone method which repeats the process of fetching pools and related entitlements.
//
// More work can be done in revokeEntitlements to optimize that method and maybe make it
// slightly more generic so that this work can be offloaded to it again. Though, at the time
// of writing, that's no small undertaking. Even changing this method has far-reaching
// consequences when trying to remove direct uses of entities as far as interoperability is
// concerned. Going forward we need to be more aware of the amount of duplication we're
// adding to our code when writing standlone/generic utility methods and linking them
// together, and perhaps take steps to avoid getting into situations like these two methods.
// Fetch the list of pools which are related to the entitlements but are *not* being
// deleted. We'll need to update the quantities on these.
Collection<String> affectedPoolIds = this.poolCurator.getPoolIdsForEntitlements(entitlementIds);
affectedPoolIds.removeAll(poolIds);
// Fetch entitlements (uggh).
// TODO: Stop doing this. Update the bits below to not use the entities directly and
// do the updates via queries.
Collection<Entitlement> entitlements = !entitlementIds.isEmpty() ? this.entitlementCurator.listAllByIds(entitlementIds).list() : Collections.<Entitlement>emptySet();
// Mark remaining dependent entitlements dirty for this consumer
this.entitlementCurator.markDependentEntitlementsDirty(entitlementIds);
// Unlink the pools and entitlements we're about to delete so we don't error out while
// trying to delete entitlements.
this.poolCurator.clearPoolSourceEntitlementRefs(poolIds);
// Revoke/delete entitlements
if (!entitlements.isEmpty()) {
log.info("Revoking {} entitlements...", entitlements.size());
this.entitlementCurator.batchDelete(entitlements);
this.entitlementCurator.flush();
log.info("Entitlements successfully revoked");
} else {
log.info("Skipping entitlement revocation; no entitlements to revoke");
}
// Delete pools
log.info("Deleting {} pools...", pools.size());
this.poolCurator.batchDelete(pools, alreadyDeletedPoolIds);
this.poolCurator.flush();
log.info("Pools successfully deleted");
if (!entitlements.isEmpty()) {
// Update entitlement counts on affected, non-deleted pools
log.info("Updating entitlement counts on remaining, affected pools...");
Map<Consumer, List<Entitlement>> consumerStackedEnts = new HashMap<>();
List<Pool> poolsToSave = new LinkedList<>();
Set<String> stackIds = new HashSet<>();
for (Entitlement entitlement : entitlements) {
// Since we're sifting through these already, let's also sort them into consumer lists
// for some of the other methods we'll be calling later
Consumer consumer = entitlement.getConsumer();
Pool pool = entitlement.getPool();
List<Entitlement> stackedEntitlements = consumerStackedEnts.get(consumer);
if (stackedEntitlements == null) {
stackedEntitlements = new LinkedList<>();
consumerStackedEnts.put(consumer, stackedEntitlements);
}
if (!"true".equals(pool.getAttributeValue(Pool.Attributes.DERIVED_POOL)) && pool.hasProductAttribute(Product.Attributes.STACKING_ID)) {
stackedEntitlements.add(entitlement);
stackIds.add(entitlement.getPool().getStackId());
}
// Update quantities if the entitlement quantity is non-zero
int quantity = entitlement.getQuantity() != null ? entitlement.getQuantity() : 0;
if (quantity != 0) {
// Update the pool quantities if we didn't delete it
if (affectedPoolIds.contains(pool.getId())) {
pool.setConsumed(pool.getConsumed() - quantity);
poolsToSave.add(pool);
}
// Update entitlement counts for affected consumers...
consumer.setEntitlementCount(consumer.getEntitlementCount() - quantity);
// Set the number exported if we're working with a manifest distributor
ConsumerType ctype = this.consumerTypeCurator.getConsumerType(consumer);
if (ctype != null && ctype.isManifest()) {
pool.setExported(pool.getExported() - quantity);
}
}
}
this.poolCurator.updateAll(poolsToSave, false, false);
this.consumerCurator.updateAll(consumerStackedEnts.keySet(), false, false);
this.consumerCurator.flush();
log.info("Entitlement counts successfully updated for {} pools and {} consumers", poolsToSave.size(), consumerStackedEnts.size());
// Update stacked entitlements for affected consumers(???)
if (!stackIds.isEmpty()) {
// Get consumer + pool tuples for stack ids
Map<String, Set<String>> consumerStackDerivedPoolIds = this.poolCurator.getConsumerStackDerivedPoolIdMap(stackIds);
if (!consumerStackDerivedPoolIds.isEmpty()) {
log.info("Updating stacked entitlements for {} consumers...", consumerStackDerivedPoolIds.size());
for (Consumer consumer : consumerStackedEnts.keySet()) {
Set<String> subPoolIds = consumerStackDerivedPoolIds.get(consumer.getId());
if (subPoolIds != null && !subPoolIds.isEmpty()) {
// Resolve pool IDs...
Collection<Pool> subPools = this.poolCurator.listAllByIds(subPoolIds).list();
// Invoke the rules engine to update the affected pools
if (subPools != null && !subPools.isEmpty()) {
log.debug("Updating {} stacking pools for consumer: {}", subPools.size(), consumer);
this.poolRules.updatePoolsFromStack(consumer, subPools, alreadyDeletedPoolIds, true);
}
}
}
}
}
this.consumerCurator.flush();
// Hydrate remaining consumer pools so we can skip some extra work during serialization
Set<Pool> poolsToHydrate = new HashSet<>();
for (Consumer consumer : consumerStackedEnts.keySet()) {
for (Entitlement entitlement : consumer.getEntitlements()) {
poolsToHydrate.add(entitlement.getPool());
}
}
this.productCurator.hydratePoolProvidedProducts(poolsToHydrate);
// Fire post-unbind events for revoked entitlements
log.info("Firing post-unbind events for {} entitlements...", entitlements.size());
for (Entitlement entitlement : entitlements) {
this.enforcer.postUnbind(entitlement.getConsumer(), this, entitlement);
}
// Recalculate status for affected consumers
log.info("Recomputing status for {} consumers", consumerStackedEnts.size());
int i = 0;
for (Consumer consumer : consumerStackedEnts.keySet()) {
this.complianceRules.getStatus(consumer);
if (++i % 1000 == 0) {
this.consumerCurator.flush();
}
}
this.consumerCurator.flush();
log.info("All statuses recomputed");
}
// Impl note:
// We don't need to fire entitlement revocation events, since they're all being revoked as
// a consequence of the pools being deleted.
// Fire pool deletion events
// This part hurts so much. Because we output the whole entity, we have to fetch the bloody
// things before we delete them.
log.info("Firing pool deletion events for {} pools...", pools.size());
for (Pool pool : pools) {
this.sink.queueEvent(this.eventFactory.poolDeleted(pool));
}
} else {
log.info("Skipping pool deletion; no pools to delete");
}
}
use of org.candlepin.util.Traceable in project candlepin by candlepin.
the class ContentManager method importContent.
/**
* Creates or updates content from the given content DTOs, omitting product updates for the
* provided Red Hat product IDs.
* <p></p>
* The content DTOs provided in the given map should be mapped by the content's Red Hat ID. If
* the mappings are incorrect or inconsistent, the result of this method is undefined.
*
* @param owner
* The owner for which to import the given content
*
* @param contentData
* A mapping of Red Hat content ID to content DTOs to import
*
* @param importedProductIds
* A set of Red Hat product IDs specifying products which are being imported and should not be
* updated as part of this import operation
*
* @return
* A mapping of Red Hat content ID to content entities representing the imported content
*/
@SuppressWarnings("checkstyle:methodlength")
@Transactional
@Traceable
public ImportResult<Content> importContent(@TraceableParam("owner") Owner owner, Map<String, ContentData> contentData, Set<String> importedProductIds) {
if (owner == null) {
throw new IllegalArgumentException("owner is null");
}
ImportResult<Content> importResult = new ImportResult<>();
if (contentData == null || contentData.isEmpty()) {
// Nothing to import
return importResult;
}
Map<String, Content> skippedContent = importResult.getSkippedEntities();
Map<String, Content> createdContent = importResult.getCreatedEntities();
Map<String, Content> updatedContent = importResult.getUpdatedEntities();
Map<String, Integer> contentVersions = new HashMap<>();
Map<String, Content> sourceContent = new HashMap<>();
Map<String, List<Content>> existingVersions = new HashMap<>();
List<OwnerContent> ownerContentBuffer = new LinkedList<>();
// - Divide imported products into sets of updates and creates
log.debug("Fetching existing content for update...");
for (Content content : this.ownerContentCurator.getContentByIds(owner, contentData.keySet())) {
ContentData update = contentData.get(content.getId());
if (!this.isChangedBy(content, update)) {
// This content won't be changing, so we'll just pretend it's not being imported at all
skippedContent.put(content.getId(), content);
continue;
}
// Content is coming from an upstream source; lock it so only upstream can make
// further changes to it. If we ever use this method for anything other than
// imports, we'll need to stop doing this.
sourceContent.put(content.getId(), content);
content = this.applyContentChanges((Content) content.clone(), update);
updatedContent.put(content.getId(), content);
contentVersions.put(content.getId(), content.getEntityVersion());
}
log.debug("Validating new content...");
for (ContentData update : contentData.values()) {
if (!skippedContent.containsKey(update.getId()) && !updatedContent.containsKey(update.getId())) {
// Ensure content is minimally populated
if (update.getId() == null || update.getType() == null || update.getLabel() == null || update.getName() == null || update.getVendor() == null) {
throw new IllegalStateException("Content data is incomplete: " + update);
}
Content content = this.applyContentChanges(new Content(update.getId()), update);
createdContent.put(content.getId(), content);
contentVersions.put(content.getId(), content.getEntityVersion());
}
}
log.debug("Checking for existing content versions...");
for (Content alt : this.ownerContentCurator.getContentByVersions(owner, contentVersions)) {
List<Content> alternates = existingVersions.get(alt.getId());
if (alternates == null) {
alternates = new LinkedList<>();
existingVersions.put(alt.getId(), alternates);
}
alternates.add(alt);
}
contentVersions.clear();
contentVersions = null;
// We're about to start modifying the maps, so we need to clone the created set before we
// start adding the update forks to it.
Map<String, Content> stagedEntities = new HashMap<>(createdContent);
// Process the created group...
// Check our created set for existing versions:
// - If there's an existing version, we'll remove the staged entity from the creation
// set, and stage an owner-content mapping for the existing version
// - Otherwise, we'll stage the new entity for persistence by leaving it in the created
// set, and stage an owner-content mapping to the new entity
Iterator<Content> iterator = stagedEntities.values().iterator();
createdContentLoop: while (iterator.hasNext()) {
Content created = iterator.next();
List<Content> alternates = existingVersions.get(created.getId());
if (alternates != null) {
for (Content alt : alternates) {
if (created.equals(alt)) {
ownerContentBuffer.add(new OwnerContent(owner, alt));
createdContent.put(alt.getId(), alt);
iterator.remove();
continue createdContentLoop;
}
}
}
ownerContentBuffer.add(new OwnerContent(owner, created));
}
// - Otherwise, we need to stage the updated entity for persistence
updatedContentLoop: for (Map.Entry<String, Content> entry : updatedContent.entrySet()) {
Content updated = entry.getValue();
List<Content> alternates = existingVersions.get(updated.getId());
if (alternates != null) {
for (Content alt : alternates) {
if (!updated.getUuid().equals(alt.getUuid()) && updated.equals(alt)) {
updated = alt;
entry.setValue(alt);
continue updatedContentLoop;
}
}
}
// We need to stage the updated entity for persistence. We'll reuse the now-empty
// createdContent map for this.
updated.setUuid(null);
stagedEntities.put(updated.getId(), updated);
}
// Persist our staged entities
// We probably don't want to evict the content yet, as they'll appear as unmanaged if
// they're used later. However, the join objects can be evicted safely since they're only
// really used here.
log.debug("Persisting content changes...");
this.contentCurator.saveAll(stagedEntities.values(), true, false);
this.ownerContentCurator.saveAll(ownerContentBuffer, true, true);
// Fetch collection of products affected by this import that aren't being imported themselves
log.debug("Updating non-imported, affected products...");
List<Product> affectedProducts = this.productCurator.getProductsByContent(owner, sourceContent.keySet(), importedProductIds).list();
if (affectedProducts != null && !affectedProducts.isEmpty()) {
// Get the collection of content those products use
Map<String, Content> affectedProductsContent = new HashMap<>();
for (Content content : this.contentCurator.getContentByProducts(affectedProducts)) {
affectedProductsContent.put(content.getId(), content);
}
// Update the content map so it references the updated content
affectedProductsContent.putAll(updatedContent);
Map<String, ProductData> affectedProductData = new HashMap<>();
Map<String, ContentData> contentDTOCache = new HashMap<>();
for (Product product : affectedProducts) {
ProductData pdto = product.toDTO();
for (ProductContent pcdata : product.getProductContent()) {
Content content = pcdata.getContent();
Content updated = updatedContent.get(content.getId());
if (updated != null) {
ContentData cdto = contentDTOCache.get(content.getId());
if (cdto == null) {
cdto = content.toDTO();
contentDTOCache.put(cdto.getId(), cdto);
}
pdto.addContent(cdto, pcdata.isEnabled());
}
}
affectedProductData.put(pdto.getId(), pdto);
}
// Perform a micro-import for these products using the content map we just built
this.productManager.importProducts(owner, affectedProductData, affectedProductsContent);
}
// Perform bulk reference update
Map<String, String> contentUuidMap = new HashMap<>();
for (Content update : updatedContent.values()) {
Content source = sourceContent.get(update.getId());
contentUuidMap.put(source.getUuid(), update.getUuid());
}
this.ownerContentCurator.updateOwnerContentReferences(owner, contentUuidMap);
// Return
return importResult;
}
use of org.candlepin.util.Traceable in project candlepin by candlepin.
the class ProductManager method importProducts.
/**
* Creates or updates products from the given products DTOs, using the provided content for
* content lookup and resolution.
* <p></p>
* The product DTOs provided in the given map should be mapped by the product's Red Hat ID. If
* the mappings are incorrect or inconsistent, the result of this method is undefined.
*
* @param owner
* The owner for which to import the given product
*
* @param productData
* A mapping of Red Hat product ID to product DTOs to import
*
* @param importedContent
* A mapping of Red Hat content ID to content instances to use to lookup and resolve content
* references on the provided product DTOs.
*
* @return
* A mapping of Red Hat content ID to content entities representing the imported content
*/
@Transactional
@Traceable
public ImportResult<Product> importProducts(@TraceableParam("owner") Owner owner, Map<String, ProductData> productData, Map<String, Content> importedContent) {
if (owner == null) {
throw new IllegalArgumentException("owner is null");
}
ImportResult<Product> importResult = new ImportResult<>();
if (productData == null || productData.isEmpty()) {
// Nothing to import
return importResult;
}
Map<String, Product> skippedProducts = importResult.getSkippedEntities();
Map<String, Product> createdProducts = importResult.getCreatedEntities();
Map<String, Product> updatedProducts = importResult.getUpdatedEntities();
Map<String, Integer> productVersions = new HashMap<>();
Map<String, Product> sourceProducts = new HashMap<>();
Map<String, List<Product>> existingVersions = new HashMap<>();
List<OwnerProduct> ownerProductBuffer = new LinkedList<>();
// - Divide imported products into sets of updates and creates
log.debug("Fetching existing products for update...");
for (Product product : this.ownerProductCurator.getProductsByIds(owner, productData.keySet())) {
ProductData update = productData.get(product.getId());
if (!this.isChangedBy(product, update)) {
// This product won't be changing, so we'll just pretend it's not being imported at all
skippedProducts.put(product.getId(), product);
continue;
}
sourceProducts.put(product.getId(), product);
product = this.applyProductChanges((Product) product.clone(), update, importedContent);
updatedProducts.put(product.getId(), product);
productVersions.put(product.getId(), product.getEntityVersion());
}
log.debug("Validating new products...");
for (ProductData update : productData.values()) {
if (!skippedProducts.containsKey(update.getId()) && !updatedProducts.containsKey(update.getId())) {
// Ensure the product is minimally populated
if (update.getId() == null || update.getName() == null) {
throw new IllegalStateException("Product data is incomplete: " + update);
}
Product product = new Product(update.getId(), update.getName());
// TODO: Remove this shim and stop using DTOs in this class
product = this.applyProductChanges(product, update, importedContent);
createdProducts.put(product.getId(), product);
productVersions.put(product.getId(), product.getEntityVersion());
}
}
log.debug("Checking for existing product versions...");
for (Product alt : this.ownerProductCurator.getProductsByVersions(owner, productVersions)) {
List<Product> alternates = existingVersions.get(alt.getId());
if (alternates == null) {
alternates = new LinkedList<>();
existingVersions.put(alt.getId(), alternates);
}
alternates.add(alt);
}
productVersions.clear();
productVersions = null;
// We're about to start modifying the maps, so we need to clone the created set before we
// start adding the update forks to it.
Map<String, Product> stagedEntities = new HashMap<>(createdProducts);
// Process the created group...
// Check our created set for existing versions:
// - If there's an existing version, we'll remove the staged entity from the creation
// set, and stage an owner-product mapping for the existing version
// - Otherwise, we'll stage the new entity for persistence by leaving it in the created
// set, and stage an owner-product mapping to the new entity
Iterator<Product> iterator = stagedEntities.values().iterator();
createdProductLoop: while (iterator.hasNext()) {
Product created = iterator.next();
List<Product> alternates = existingVersions.get(created.getId());
if (alternates != null) {
for (Product alt : alternates) {
if (created.equals(alt)) {
ownerProductBuffer.add(new OwnerProduct(owner, alt));
createdProducts.put(alt.getId(), alt);
iterator.remove();
continue createdProductLoop;
}
}
}
ownerProductBuffer.add(new OwnerProduct(owner, created));
}
// - Otherwise, we need to stage the updated entity for persistence
updatedProductLoop: for (Map.Entry<String, Product> entry : updatedProducts.entrySet()) {
Product updated = entry.getValue();
List<Product> alternates = existingVersions.get(updated.getId());
if (alternates != null) {
for (Product alt : alternates) {
if (updated.equals(alt)) {
updated = alt;
entry.setValue(alt);
continue updatedProductLoop;
}
}
}
// We need to stage the updated entity for persistence. We'll reuse the now-empty
// createdProducts map for this.
updated.setUuid(null);
stagedEntities.put(updated.getId(), updated);
}
// Persist our staged entities
// We probably don't want to evict the products yet, as they'll appear as unmanaged if
// they're used later. However, the join objects can be evicted safely since they're only
// really used here.
log.debug("Persisting product changes...");
this.productCurator.saveAll(stagedEntities.values(), true, false);
this.ownerProductCurator.saveAll(ownerProductBuffer, true, true);
// Perform bulk reference update
Map<String, String> productUuidMap = new HashMap<>();
for (Product update : updatedProducts.values()) {
Product source = sourceProducts.get(update.getId());
productUuidMap.put(source.getUuid(), update.getUuid());
}
this.ownerProductCurator.updateOwnerProductReferences(owner, productUuidMap);
// Return
return importResult;
}
Aggregations