use of com.netflix.spinnaker.kork.web.exceptions.NotFoundException in project front50 by spinnaker.
the class AzureStorageService method listObjectVersions.
@Override
public <T extends Timestamped> Collection<T> listObjectVersions(ObjectType objectType, String objectKey, int maxResults) throws NotFoundException {
Set<T> results = new HashSet<>();
String fullKey = buildKeyPath(objectType.group, objectKey, objectType.defaultMetadataFilename);
try {
ResultContinuation token = null;
EnumSet<BlobListingDetails> listDetails = EnumSet.of(BlobListingDetails.SNAPSHOTS);
do {
ResultSegment<ListBlobItem> result = getBlobContainer().listBlobsSegmented(fullKey, true, listDetails, NUM_OF_SNAPSHOT_LIMITATION, token, null, null);
token = result.getContinuationToken();
// By default, listBlobsSegmented with maxResult parameter will return oldest blob snapshots
// But here we want the latest blob snapshots, and storage sdk doesn't provide a way to get
// latest number of snapshots
// So fetch all and then do sort and filter work (SnapshotID == null means the latest one)
List<CloudBlockBlob> filteredResults = result.getResults().stream().map(item -> (CloudBlockBlob) item).sorted((a, b) -> {
if (a.getSnapshotID() == null)
return -1;
if (b.getSnapshotID() == null)
return 1;
return b.getSnapshotID().compareTo(a.getSnapshotID());
}).limit(maxResults).collect(Collectors.toList());
for (ListBlobItem item : filteredResults) {
CloudBlockBlob blob = (CloudBlockBlob) item;
T blobObject = deserialize(blob, (Class<T>) objectType.clazz);
blobObject.setLastModified(blob.getProperties().getLastModified().getTime());
results.add(blobObject);
}
} while (token != null);
} catch (StorageException se) {
logStorageException(se, fullKey);
} catch (Exception e) {
log.error("Error retrieving versions for {} object: {}", value("key", fullKey), value("exception", e.getMessage()));
}
return results;
}
use of com.netflix.spinnaker.kork.web.exceptions.NotFoundException in project front50 by spinnaker.
the class ApplicationPermissionsService method deleteApplicationPermission.
public void deleteApplicationPermission(@Nonnull String appName) {
Permission oldPerm;
try {
oldPerm = applicationPermissionDAO().findById(appName);
} catch (NotFoundException e) {
// Nothing to see here, we're all done already.
return;
}
performWrite(supportingEventListeners(Type.PRE_DELETE), supportingEventListeners(Type.POST_DELETE), (unused, newPerm) -> {
applicationPermissionDAO().delete(appName);
syncUsers(null, oldPerm);
return newPerm;
}, oldPerm, null);
}
use of com.netflix.spinnaker.kork.web.exceptions.NotFoundException in project front50 by spinnaker.
the class StorageServiceSupport method findById.
public T findById(String id) throws NotFoundException {
CircuitBreaker breaker = circuitBreakerRegistry.circuitBreaker(getClass().getSimpleName() + "-findById", CircuitBreakerConfig.custom().ignoreException(e -> e instanceof NotFoundException).build());
Supplier<T> recoverableSupplier = SupplierUtils.recover(() -> service.loadObject(objectType, buildObjectKey(id)), e -> Optional.ofNullable(allItemsCache.get()).orElseGet(HashSet::new).stream().filter(item -> item.getId().equalsIgnoreCase(id)).findFirst().orElseThrow(() -> new NotFoundException(String.format("No item found in cache with id of %s", id == null ? "null" : id.toLowerCase()))));
return breaker.executeSupplier(recoverableSupplier);
}
use of com.netflix.spinnaker.kork.web.exceptions.NotFoundException in project front50 by spinnaker.
the class StorageServiceSupport method fetchAllItems.
/**
* Fetch any previously cached applications that have been updated since last retrieved.
*
* @param existingItems Previously cached applications
* @return Refreshed applications
*/
private Set<T> fetchAllItems(Set<T> existingItems) {
if (existingItems == null) {
existingItems = new HashSet<>();
}
int existingSize = existingItems.size();
AtomicLong numAdded = new AtomicLong();
AtomicLong numRemoved = new AtomicLong();
AtomicLong numUpdated = new AtomicLong();
Map<String, String> keyToId = new HashMap<String, String>();
for (T item : existingItems) {
String id = item.getId();
keyToId.put(buildObjectKey(id), id);
}
Long refreshTime = System.currentTimeMillis();
Long storageLastModified = readLastModified();
Map<String, Long> keyUpdateTime = objectKeyLoader.listObjectKeys(objectType);
// Expanded from a stream collector to avoid DuplicateKeyExceptions
Map<String, T> resultMap = new HashMap<>();
for (T item : existingItems) {
if (keyUpdateTime.containsKey(buildObjectKey(item))) {
String itemId = buildObjectKey(item.getId());
if (resultMap.containsKey(itemId)) {
log.error("Duplicate item id found, last-write wins: (id: {})", value("id", itemId));
}
resultMap.put(itemId, item);
}
}
List<Map.Entry<String, Long>> modifiedKeys = keyUpdateTime.entrySet().stream().filter(entry -> {
T existingItem = resultMap.get(entry.getKey());
if (existingItem == null) {
numAdded.getAndIncrement();
return true;
}
Long modTime = existingItem.getLastModified();
if (modTime == null || entry.getValue() > modTime) {
numUpdated.getAndIncrement();
return true;
}
return false;
}).collect(Collectors.toList());
if (!existingItems.isEmpty() && !modifiedKeys.isEmpty()) {
// only log keys that have been modified after initial cache load
log.debug("Modified object keys: {}", value("keys", modifiedKeys));
}
try {
List<String> objectKeys = modifiedKeys.stream().map(Map.Entry::getKey).collect(Collectors.toList());
List<T> objects = service.loadObjects(objectType, objectKeys);
Map<String, T> objectsById = objects.stream().collect(Collectors.toMap(this::buildObjectKey, Function.identity(), (o1, o2) -> o1));
for (String objectKey : objectKeys) {
if (objectsById.containsKey(objectKey)) {
resultMap.put(objectKey, objectsById.get(objectKey));
} else {
// equivalent to the NotFoundException handling in the exceptional case below
resultMap.remove(keyToId.get(objectKey));
numRemoved.getAndIncrement();
log.warn("Unable to find result for {}:{} (filtering!)", objectType, objectKey);
}
}
} catch (UnsupportedOperationException e) {
Observable.from(modifiedKeys).buffer(10).flatMap(ids -> Observable.from(ids).flatMap(entry -> {
try {
String key = entry.getKey();
T object = (T) service.loadObject(objectType, key);
if (!key.equals(buildObjectKey(object))) {
mismatchedIdCounter.increment();
log.warn("{} '{}' has non-matching id '{}'", objectType.group, key, buildObjectKey(object));
// Should return Observable.empty() to skip caching, but will wait
// until the
// logging has been present for a release.
}
return Observable.just(object);
} catch (NotFoundException e2) {
resultMap.remove(keyToId.get(entry.getKey()));
numRemoved.getAndIncrement();
return Observable.empty();
}
}).subscribeOn(scheduler)).subscribeOn(scheduler).toList().toBlocking().single().forEach(item -> {
resultMap.put(buildObjectKey(item), item);
});
}
Set<T> result = resultMap.values().stream().collect(Collectors.toSet());
this.lastRefreshedTime.set(refreshTime);
this.lastSeenStorageTime.set(storageLastModified);
int resultSize = result.size();
addCounter.increment(numAdded.get());
updateCounter.increment(numUpdated.get());
removeCounter.increment(existingSize + numAdded.get() - resultSize);
if (existingSize != resultSize) {
log.info("{}={} delta={}", value("objectType", objectType.group), value("resultSize", resultSize), value("delta", resultSize - existingSize));
}
return result;
}
use of com.netflix.spinnaker.kork.web.exceptions.NotFoundException in project front50 by spinnaker.
the class PipelineController method validatePipeline.
/**
* Ensure basic validity of the pipeline. Invalid pipelines will raise runtime exceptions.
*
* @param pipeline The Pipeline to validate
*/
private void validatePipeline(final Pipeline pipeline, Boolean staleCheck) {
// Pipelines must have an application and a name
if (StringUtils.isAnyBlank(pipeline.getApplication(), pipeline.getName())) {
throw new InvalidEntityException("A pipeline requires name and application fields");
}
// Check if pipeline type is templated
if (TYPE_TEMPLATED.equals(pipeline.getType())) {
PipelineTemplateDAO templateDAO = getTemplateDAO();
// Check templated pipelines to ensure template is valid
String source;
switch(pipeline.getSchema()) {
case "v2":
V2TemplateConfiguration v2Config = objectMapper.convertValue(pipeline, V2TemplateConfiguration.class);
source = v2Config.getTemplate().getReference();
break;
default:
TemplateConfiguration v1Config = objectMapper.convertValue(pipeline.getConfig(), TemplateConfiguration.class);
source = v1Config.getPipeline().getTemplate().getSource();
break;
}
// Check if template id which is after :// is in the store
if (source.startsWith(SPINNAKER_PREFIX)) {
String templateId = source.substring(SPINNAKER_PREFIX.length());
try {
templateDAO.findById(templateId);
} catch (NotFoundException notFoundEx) {
throw new BadRequestException("Configured pipeline template not found", notFoundEx);
}
}
}
checkForDuplicatePipeline(pipeline.getApplication(), pipeline.getName().trim(), pipeline.getId());
final ValidatorErrors errors = new ValidatorErrors();
pipelineValidators.forEach(it -> it.validate(pipeline, errors));
if (staleCheck && !Strings.isNullOrEmpty(pipeline.getId()) && pipeline.getLastModified() != null) {
checkForStalePipeline(pipeline, errors);
}
if (errors.hasErrors()) {
String message = errors.getAllErrorsMessage();
throw new ValidationException(message, errors.getAllErrors());
}
}
Aggregations