use of org.candlepin.model.dto.ContentData in project candlepin by candlepin.
the class CandlepinPoolManager method refreshPoolsWithRegeneration.
/*
* We need to update/regen entitlements in the same transaction we update pools
* so we don't miss anything
*/
@Transactional
@SuppressWarnings("checkstyle:methodlength")
@Traceable
void refreshPoolsWithRegeneration(SubscriptionServiceAdapter subAdapter, @TraceableParam("owner") Owner owner, boolean lazy) {
Date now = new Date();
owner = this.resolveOwner(owner);
log.info("Refreshing pools for owner: {}", owner);
Map<String, Subscription> subscriptionMap = new HashMap<>();
Map<String, ProductData> productMap = new HashMap<>();
Map<String, ContentData> contentMap = new HashMap<>();
// Resolve all our subscriptions, products and content to ensure we don't have bad or
// duplicate inbound data
log.debug("Fetching subscriptions from adapter...");
List<Subscription> subscriptions = subAdapter.getSubscriptions(owner);
log.debug("Done. Processing subscriptions...");
for (Subscription subscription : subscriptions) {
if (subscription == null) {
continue;
}
if (subscription.getId() == null) {
log.error("subscription does not contain a mappable ID: {}", subscription);
throw new IllegalStateException("subscription does not contain a mappable ID: " + subscription);
}
Subscription existingSub = subscriptionMap.get(subscription.getId());
if (existingSub != null && !existingSub.equals(subscription)) {
log.warn("Multiple versions of the same subscription received during refresh; " + "discarding duplicate: {} => {}, {}", subscription.getId(), existingSub, subscription);
continue;
}
subscriptionMap.put(subscription.getId(), subscription);
List<ProductData> products = new LinkedList<>();
products.add(subscription.getProduct());
products.add(subscription.getDerivedProduct());
products.addAll(subscription.getProvidedProducts());
products.addAll(subscription.getDerivedProvidedProducts());
for (ProductData product : products) {
if (product == null) {
// forward.
continue;
}
if (product.getId() == null) {
log.error("product does not contain a mappable Red Hat ID: {}", product);
throw new IllegalStateException("product does not contain a mappable Red Hat ID: " + product);
}
// Product is coming from an upstream source; lock it so only upstream can make
// further changes to it.
product.setLocked(true);
ProductData existingProduct = productMap.get(product.getId());
if (existingProduct != null && !existingProduct.equals(product)) {
log.warn("Multiple versions of the same product received during refresh; " + "discarding duplicate: {} => {}, {}", product.getId(), existingProduct, product);
} else {
productMap.put(product.getId(), product);
Collection<ProductContentData> pcdCollection = product.getProductContent();
if (pcdCollection != null) {
for (ProductContentData pcd : pcdCollection) {
if (pcd == null) {
log.error("product contains a null product-content mapping: {}", product);
throw new IllegalStateException("product contains a null product-content mapping: " + product);
}
ContentData content = pcd.getContent();
// population validation for us.
if (content == null || content.getId() == null) {
log.error("product contains a null or incomplete product-content mapping: {}", product);
throw new IllegalStateException("product contains a null or incomplete " + "product-content mapping: " + product);
}
// We need to lock the incoming content here, but doing so will affect
// the equality comparison for products. We'll correct them later.
ContentData existingContent = contentMap.get(content.getId());
if (existingContent != null && !existingContent.equals(content)) {
log.warn("Multiple versions of the same content received during refresh; " + "discarding duplicate: {} => {}, {}", content.getId(), existingContent, content);
} else {
contentMap.put(content.getId(), content);
}
}
}
}
}
}
// Persist content changes
log.debug("Importing {} content...", contentMap.size());
// TODO: Find a more efficient way of doing this, preferably within this method
for (ContentData cdata : contentMap.values()) {
cdata.setLocked(true);
}
Map<String, Content> importedContent = this.contentManager.importContent(owner, contentMap, productMap.keySet()).getImportedEntities();
log.debug("Importing {} product(s)...", productMap.size());
ImportResult<Product> importResult = this.productManager.importProducts(owner, productMap, importedContent);
Map<String, Product> importedProducts = importResult.getImportedEntities();
Map<String, Product> updatedProducts = importResult.getUpdatedEntities();
log.debug("Refreshing {} pool(s)...", subscriptionMap.size());
Iterator<Map.Entry<String, Subscription>> subsIterator = subscriptionMap.entrySet().iterator();
while (subsIterator.hasNext()) {
Map.Entry<String, Subscription> entry = subsIterator.next();
Subscription sub = entry.getValue();
if (now.after(sub.getEndDate())) {
log.info("Skipping expired subscription: {}", sub);
subsIterator.remove();
continue;
}
log.debug("Processing subscription: {}", sub);
Pool pool = this.convertToMasterPoolImpl(sub, owner, importedProducts);
this.refreshPoolsForMasterPool(pool, false, lazy, updatedProducts);
}
// delete pools whose subscription disappeared:
log.debug("Deleting pools for absent subscriptions...");
List<Pool> poolsToDelete = new ArrayList<>();
for (Pool pool : poolCurator.getPoolsFromBadSubs(owner, subscriptionMap.keySet())) {
if (this.isManaged(pool)) {
poolsToDelete.add(pool);
}
}
deletePools(poolsToDelete);
// TODO: break this call into smaller pieces. There may be lots of floating pools
log.debug("Updating floating pools...");
List<Pool> floatingPools = poolCurator.getOwnersFloatingPools(owner);
updateFloatingPools(floatingPools, lazy, updatedProducts);
log.info("Refresh pools for owner: {} completed in: {}ms", owner.getKey(), System.currentTimeMillis() - now.getTime());
}
use of org.candlepin.model.dto.ContentData in project candlepin by candlepin.
the class ProductDTOTranslatorTest method verifyOutput.
@Override
protected void verifyOutput(ProductDTO source, ProductData dto, boolean childrenGenerated) {
if (source != null) {
assertEquals(source.getUuid(), dto.getUuid());
assertEquals(source.getId(), dto.getId());
assertEquals(source.getName(), dto.getName());
assertEquals(source.getMultiplier(), dto.getMultiplier());
assertEquals(source.getAttributes(), dto.getAttributes());
assertEquals(source.getDependentProductIds(), dto.getDependentProductIds());
assertNotNull(dto.getProductContent());
if (childrenGenerated) {
for (ProductContentDTO pcdto : source.getProductContent()) {
for (ProductContentData pcdata : dto.getProductContent()) {
ContentDTO cdto = pcdto.getContent();
ContentData cdata = pcdata.getContent();
assertNotNull(cdata);
assertNotNull(cdata.getUuid());
if (cdata.getUuid().equals(cdto.getUuid())) {
assertEquals(pcdto.isEnabled(), pcdata.isEnabled());
// Pass the content off to the ContentTranslatorTest to verify it
this.contentDTOTranslatorTest.verifyOutput(cdto, cdata, true);
}
}
}
} else {
assertTrue(dto.getProductContent().isEmpty());
}
} else {
assertNull(dto);
}
}
use of org.candlepin.model.dto.ContentData in project candlepin by candlepin.
the class ContentManager method importContent.
/**
* Creates or updates content from the given content DTOs, omitting product updates for the
* provided Red Hat product IDs.
* <p></p>
* The content DTOs provided in the given map should be mapped by the content's Red Hat ID. If
* the mappings are incorrect or inconsistent, the result of this method is undefined.
*
* @param owner
* The owner for which to import the given content
*
* @param contentData
* A mapping of Red Hat content ID to content DTOs to import
*
* @param importedProductIds
* A set of Red Hat product IDs specifying products which are being imported and should not be
* updated as part of this import operation
*
* @return
* A mapping of Red Hat content ID to content entities representing the imported content
*/
@SuppressWarnings("checkstyle:methodlength")
@Transactional
@Traceable
public ImportResult<Content> importContent(@TraceableParam("owner") Owner owner, Map<String, ContentData> contentData, Set<String> importedProductIds) {
if (owner == null) {
throw new IllegalArgumentException("owner is null");
}
ImportResult<Content> importResult = new ImportResult<>();
if (contentData == null || contentData.isEmpty()) {
// Nothing to import
return importResult;
}
Map<String, Content> skippedContent = importResult.getSkippedEntities();
Map<String, Content> createdContent = importResult.getCreatedEntities();
Map<String, Content> updatedContent = importResult.getUpdatedEntities();
Map<String, Integer> contentVersions = new HashMap<>();
Map<String, Content> sourceContent = new HashMap<>();
Map<String, List<Content>> existingVersions = new HashMap<>();
List<OwnerContent> ownerContentBuffer = new LinkedList<>();
// - Divide imported products into sets of updates and creates
log.debug("Fetching existing content for update...");
for (Content content : this.ownerContentCurator.getContentByIds(owner, contentData.keySet())) {
ContentData update = contentData.get(content.getId());
if (!this.isChangedBy(content, update)) {
// This content won't be changing, so we'll just pretend it's not being imported at all
skippedContent.put(content.getId(), content);
continue;
}
// Content is coming from an upstream source; lock it so only upstream can make
// further changes to it. If we ever use this method for anything other than
// imports, we'll need to stop doing this.
sourceContent.put(content.getId(), content);
content = this.applyContentChanges((Content) content.clone(), update);
updatedContent.put(content.getId(), content);
contentVersions.put(content.getId(), content.getEntityVersion());
}
log.debug("Validating new content...");
for (ContentData update : contentData.values()) {
if (!skippedContent.containsKey(update.getId()) && !updatedContent.containsKey(update.getId())) {
// Ensure content is minimally populated
if (update.getId() == null || update.getType() == null || update.getLabel() == null || update.getName() == null || update.getVendor() == null) {
throw new IllegalStateException("Content data is incomplete: " + update);
}
Content content = this.applyContentChanges(new Content(update.getId()), update);
createdContent.put(content.getId(), content);
contentVersions.put(content.getId(), content.getEntityVersion());
}
}
log.debug("Checking for existing content versions...");
for (Content alt : this.ownerContentCurator.getContentByVersions(owner, contentVersions)) {
List<Content> alternates = existingVersions.get(alt.getId());
if (alternates == null) {
alternates = new LinkedList<>();
existingVersions.put(alt.getId(), alternates);
}
alternates.add(alt);
}
contentVersions.clear();
contentVersions = null;
// We're about to start modifying the maps, so we need to clone the created set before we
// start adding the update forks to it.
Map<String, Content> stagedEntities = new HashMap<>(createdContent);
// Process the created group...
// Check our created set for existing versions:
// - If there's an existing version, we'll remove the staged entity from the creation
// set, and stage an owner-content mapping for the existing version
// - Otherwise, we'll stage the new entity for persistence by leaving it in the created
// set, and stage an owner-content mapping to the new entity
Iterator<Content> iterator = stagedEntities.values().iterator();
createdContentLoop: while (iterator.hasNext()) {
Content created = iterator.next();
List<Content> alternates = existingVersions.get(created.getId());
if (alternates != null) {
for (Content alt : alternates) {
if (created.equals(alt)) {
ownerContentBuffer.add(new OwnerContent(owner, alt));
createdContent.put(alt.getId(), alt);
iterator.remove();
continue createdContentLoop;
}
}
}
ownerContentBuffer.add(new OwnerContent(owner, created));
}
// - Otherwise, we need to stage the updated entity for persistence
updatedContentLoop: for (Map.Entry<String, Content> entry : updatedContent.entrySet()) {
Content updated = entry.getValue();
List<Content> alternates = existingVersions.get(updated.getId());
if (alternates != null) {
for (Content alt : alternates) {
if (!updated.getUuid().equals(alt.getUuid()) && updated.equals(alt)) {
updated = alt;
entry.setValue(alt);
continue updatedContentLoop;
}
}
}
// We need to stage the updated entity for persistence. We'll reuse the now-empty
// createdContent map for this.
updated.setUuid(null);
stagedEntities.put(updated.getId(), updated);
}
// Persist our staged entities
// We probably don't want to evict the content yet, as they'll appear as unmanaged if
// they're used later. However, the join objects can be evicted safely since they're only
// really used here.
log.debug("Persisting content changes...");
this.contentCurator.saveAll(stagedEntities.values(), true, false);
this.ownerContentCurator.saveAll(ownerContentBuffer, true, true);
// Fetch collection of products affected by this import that aren't being imported themselves
log.debug("Updating non-imported, affected products...");
List<Product> affectedProducts = this.productCurator.getProductsByContent(owner, sourceContent.keySet(), importedProductIds).list();
if (affectedProducts != null && !affectedProducts.isEmpty()) {
// Get the collection of content those products use
Map<String, Content> affectedProductsContent = new HashMap<>();
for (Content content : this.contentCurator.getContentByProducts(affectedProducts)) {
affectedProductsContent.put(content.getId(), content);
}
// Update the content map so it references the updated content
affectedProductsContent.putAll(updatedContent);
Map<String, ProductData> affectedProductData = new HashMap<>();
Map<String, ContentData> contentDTOCache = new HashMap<>();
for (Product product : affectedProducts) {
ProductData pdto = product.toDTO();
for (ProductContent pcdata : product.getProductContent()) {
Content content = pcdata.getContent();
Content updated = updatedContent.get(content.getId());
if (updated != null) {
ContentData cdto = contentDTOCache.get(content.getId());
if (cdto == null) {
cdto = content.toDTO();
contentDTOCache.put(cdto.getId(), cdto);
}
pdto.addContent(cdto, pcdata.isEnabled());
}
}
affectedProductData.put(pdto.getId(), pdto);
}
// Perform a micro-import for these products using the content map we just built
this.productManager.importProducts(owner, affectedProductData, affectedProductsContent);
}
// Perform bulk reference update
Map<String, String> contentUuidMap = new HashMap<>();
for (Content update : updatedContent.values()) {
Content source = sourceContent.get(update.getId());
contentUuidMap.put(source.getUuid(), update.getUuid());
}
this.ownerContentCurator.updateOwnerContentReferences(owner, contentUuidMap);
// Return
return importResult;
}
use of org.candlepin.model.dto.ContentData in project candlepin by candlepin.
the class ContentManager method removeContentByUuids.
/**
* Removes all content with the provided UUIDs from the given owner.
*
* @param owner
* The owner from which to remove content
*
* @param contentUuids
* A collection of UUIDs representing the content to remove
*
* @param regenerateEntitlementCerts
* Whether or not changes made to the content should trigger the regeneration of entitlement
* certificates for affected consumers
*
* @throws IllegalArgumentException
* if owner is null
*/
public void removeContentByUuids(Owner owner, Collection<String> contentUuids, boolean regenerateEntitlementCerts) {
if (owner == null) {
throw new IllegalArgumentException("owner is null");
}
if (contentUuids != null && !contentUuids.isEmpty()) {
log.debug("Deleting content with UUIDs: {}", contentUuids);
List<Product> affectedProducts = this.productCurator.getProductsByContentUuids(owner, contentUuids).list();
if (!affectedProducts.isEmpty()) {
log.debug("Updating {} affected products", affectedProducts.size());
if (!(contentUuids instanceof Set)) {
// Convert this to a set so our filtering lookups aren't painfully slow
contentUuids = new HashSet<>(contentUuids);
}
// Get the collection of content those products use, throwing out the ones we'll be
// removing shortly
Map<String, Content> affectedProductsContent = new HashMap<>();
for (Content content : this.contentCurator.getContentByProducts(affectedProducts)) {
if (!contentUuids.contains(content.getUuid())) {
affectedProductsContent.put(content.getId(), content);
}
}
// Convert our affectedProducts into DTOs (hoping Hibernate uses its entity cache
// instead of pulling down the content list for each product...)
Map<String, ProductData> affectedProductData = new HashMap<>();
for (Product product : affectedProducts) {
ProductData pdto = product.toDTO();
Iterator<ProductContentData> pcd = pdto.getProductContent().iterator();
while (pcd.hasNext()) {
ContentData cdto = pcd.next().getContent();
if (!affectedProductsContent.containsKey(cdto.getId())) {
pcd.remove();
}
}
affectedProductData.put(pdto.getId(), pdto);
}
// Perform a micro-import for these products using the content map we just built
log.debug("Performing micro-import for products: {}", affectedProductData);
this.productManager.importProducts(owner, affectedProductData, affectedProductsContent);
if (regenerateEntitlementCerts) {
this.entitlementCertGenerator.regenerateCertificatesOf(Arrays.asList(owner), affectedProducts, true);
}
}
// Remove content references
this.ownerContentCurator.removeOwnerContentReferences(owner, contentUuids);
}
}
use of org.candlepin.model.dto.ContentData in project candlepin by candlepin.
the class Entitler method getDevProductMap.
/**
* Looks up all Products matching the specified SKU and the consumer's
* installed products.
*
* @param consumer the consumer to pull the installed product id list from.
* @param sku the product id of the SKU.
* @return a {@link DeveloperProducts} object that contains the Product objects
* from the adapter.
*/
private DeveloperProducts getDevProductMap(Consumer consumer, Owner owner, String sku) {
List<String> devProductIds = new ArrayList<>();
devProductIds.add(sku);
for (ConsumerInstalledProduct ip : consumer.getInstalledProducts()) {
devProductIds.add(ip.getProductId());
}
Map<String, ProductData> productMap = new HashMap<>();
Map<String, ContentData> contentMap = new HashMap<>();
log.debug("Importing products for dev pool resolution...");
for (ProductData product : this.productAdapter.getProductsByIds(owner, devProductIds)) {
if (product == null) {
continue;
}
if (sku.equals(product.getId()) && StringUtils.isEmpty(product.getAttributeValue(Product.Attributes.SUPPORT_LEVEL))) {
// if there is no SLA, apply the default
product.setAttribute(Product.Attributes.SUPPORT_LEVEL, this.DEFAULT_DEV_SLA);
}
// Product is coming from an upstream source; lock it so only upstream can make
// further changes to it.
product.setLocked(true);
ProductData existingProduct = productMap.get(product.getId());
if (existingProduct != null && !existingProduct.equals(product)) {
log.warn("Multiple versions of the same product received during dev pool resolution; " + "discarding duplicate: {} => {}, {}", product.getId(), existingProduct, product);
} else {
productMap.put(product.getId(), product);
Collection<ProductContentData> pcdCollection = product.getProductContent();
if (pcdCollection != null) {
for (ProductContentData pcd : pcdCollection) {
if (pcd == null) {
log.error("product contains a null product-content mapping: {}", product);
throw new IllegalStateException("product contains a null product-content mapping: " + product);
}
ContentData content = pcd.getContent();
// population validation for us.
if (content == null || content.getId() == null) {
log.error("product contains a null or incomplete product-content mapping: {}", product);
throw new IllegalStateException("product contains a null or incomplete " + "product-content mapping: " + product);
}
// We need to lock the incoming content here, but doing so will affect
// the equality comparison for products. We'll correct them later.
ContentData existingContent = contentMap.get(content.getId());
if (existingContent != null && !existingContent.equals(content)) {
log.warn("Multiple versions of the same content received during dev pool " + "resolution; discarding duplicate: {} => {}, {}", content.getId(), existingContent, content);
} else {
contentMap.put(content.getId(), content);
}
}
}
}
}
log.debug("Importing {} content...", contentMap.size());
for (ContentData cdata : contentMap.values()) {
cdata.setLocked(true);
}
Map<String, Content> importedContent = this.contentManager.importContent(owner, contentMap, productMap.keySet()).getImportedEntities();
log.debug("Importing {} product(s)...", productMap.size());
Map<String, Product> importedProducts = this.productManager.importProducts(owner, productMap, importedContent).getImportedEntities();
log.debug("Resolved {} dev product(s) for sku: {}", productMap.size(), sku);
return new DeveloperProducts(sku, importedProducts);
}
Aggregations