use of org.apache.cloudstack.engine.subsystem.api.storage.DataStoreProvider in project cloudstack by apache.
the class StorageManagerImpl method preparePrimaryStorageForMaintenance.
@Override
@DB
public PrimaryDataStoreInfo preparePrimaryStorageForMaintenance(Long primaryStorageId) throws ResourceUnavailableException, InsufficientCapacityException {
StoragePoolVO primaryStorage = null;
primaryStorage = _storagePoolDao.findById(primaryStorageId);
if (primaryStorage == null) {
String msg = "Unable to obtain lock on the storage pool record in preparePrimaryStorageForMaintenance()";
s_logger.error(msg);
throw new InvalidParameterValueException(msg);
}
if (!primaryStorage.getStatus().equals(StoragePoolStatus.Up) && !primaryStorage.getStatus().equals(StoragePoolStatus.ErrorInMaintenance)) {
throw new InvalidParameterValueException("Primary storage with id " + primaryStorageId + " is not ready for migration, as the status is:" + primaryStorage.getStatus().toString());
}
DataStoreProvider provider = _dataStoreProviderMgr.getDataStoreProvider(primaryStorage.getStorageProviderName());
DataStoreLifeCycle lifeCycle = provider.getDataStoreLifeCycle();
DataStore store = _dataStoreMgr.getDataStore(primaryStorage.getId(), DataStoreRole.Primary);
if (primaryStorage.getPoolType() == StoragePoolType.DatastoreCluster) {
if (primaryStorage.getStatus() == StoragePoolStatus.PrepareForMaintenance) {
throw new CloudRuntimeException(String.format("There is already a job running for preparation for maintenance of the storage pool %s", primaryStorage.getUuid()));
}
handlePrepareDatastoreClusterMaintenance(lifeCycle, primaryStorageId);
}
lifeCycle.maintain(store);
return (PrimaryDataStoreInfo) _dataStoreMgr.getDataStore(primaryStorage.getId(), DataStoreRole.Primary);
}
use of org.apache.cloudstack.engine.subsystem.api.storage.DataStoreProvider in project cloudstack by apache.
the class StorageManagerImpl method updateStoragePool.
@Override
public PrimaryDataStoreInfo updateStoragePool(UpdateStoragePoolCmd cmd) throws IllegalArgumentException {
// Input validation
Long id = cmd.getId();
StoragePoolVO pool = _storagePoolDao.findById(id);
if (pool == null) {
throw new IllegalArgumentException("Unable to find storage pool with ID: " + id);
}
String name = cmd.getName();
if (StringUtils.isNotBlank(name)) {
s_logger.debug("Updating Storage Pool name to: " + name);
pool.setName(name);
_storagePoolDao.update(pool.getId(), pool);
}
final List<String> storagePoolTags = cmd.getTags();
if (storagePoolTags != null) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Updating Storage Pool Tags to :" + storagePoolTags);
}
if (pool.getPoolType() == StoragePoolType.DatastoreCluster) {
List<StoragePoolVO> childStoragePools = _storagePoolDao.listChildStoragePoolsInDatastoreCluster(pool.getId());
for (StoragePoolVO childPool : childStoragePools) {
_storagePoolTagsDao.persist(childPool.getId(), storagePoolTags);
}
}
_storagePoolTagsDao.persist(pool.getId(), storagePoolTags);
}
Long updatedCapacityBytes = null;
Long capacityBytes = cmd.getCapacityBytes();
if (capacityBytes != null) {
if (capacityBytes != pool.getCapacityBytes()) {
updatedCapacityBytes = capacityBytes;
}
}
Long updatedCapacityIops = null;
Long capacityIops = cmd.getCapacityIops();
if (capacityIops != null) {
if (!capacityIops.equals(pool.getCapacityIops())) {
updatedCapacityIops = capacityIops;
}
}
if (updatedCapacityBytes != null || updatedCapacityIops != null) {
StoragePoolVO storagePool = _storagePoolDao.findById(id);
DataStoreProvider dataStoreProvider = _dataStoreProviderMgr.getDataStoreProvider(storagePool.getStorageProviderName());
DataStoreLifeCycle dataStoreLifeCycle = dataStoreProvider.getDataStoreLifeCycle();
if (dataStoreLifeCycle instanceof PrimaryDataStoreLifeCycle) {
Map<String, String> details = new HashMap<String, String>();
details.put(PrimaryDataStoreLifeCycle.CAPACITY_BYTES, updatedCapacityBytes != null ? String.valueOf(updatedCapacityBytes) : null);
details.put(PrimaryDataStoreLifeCycle.CAPACITY_IOPS, updatedCapacityIops != null ? String.valueOf(updatedCapacityIops) : null);
((PrimaryDataStoreLifeCycle) dataStoreLifeCycle).updateStoragePool(storagePool, details);
}
}
Boolean enabled = cmd.getEnabled();
if (enabled != null) {
if (enabled) {
enablePrimaryStoragePool(pool);
} else {
disablePrimaryStoragePool(pool);
}
}
if (updatedCapacityBytes != null) {
_storagePoolDao.updateCapacityBytes(id, capacityBytes);
}
if (updatedCapacityIops != null) {
_storagePoolDao.updateCapacityIops(id, capacityIops);
}
return (PrimaryDataStoreInfo) _dataStoreMgr.getDataStore(pool.getId(), DataStoreRole.Primary);
}
use of org.apache.cloudstack.engine.subsystem.api.storage.DataStoreProvider in project cloudstack by apache.
the class StorageManagerImpl method getDataObjectSizeIncludingHypervisorSnapshotReserve.
private long getDataObjectSizeIncludingHypervisorSnapshotReserve(Volume volume, DiskProfile diskProfile, StoragePool pool) {
DataStoreProvider storeProvider = _dataStoreProviderMgr.getDataStoreProvider(pool.getStorageProviderName());
DataStoreDriver storeDriver = storeProvider.getDataStoreDriver();
if (storeDriver instanceof PrimaryDataStoreDriver) {
PrimaryDataStoreDriver primaryStoreDriver = (PrimaryDataStoreDriver) storeDriver;
VolumeInfo volumeInfo = volFactory.getVolume(volume.getId());
if (volume.getDiskOfferingId() != diskProfile.getDiskOfferingId()) {
return diskProfile.getSize();
}
return primaryStoreDriver.getDataObjectSizeIncludingHypervisorSnapshotReserve(volumeInfo, pool);
}
return volume.getSize();
}
use of org.apache.cloudstack.engine.subsystem.api.storage.DataStoreProvider in project cloudstack by apache.
the class StorageManagerImpl method disablePrimaryStoragePool.
@ActionEvent(eventType = EventTypes.EVENT_DISABLE_PRIMARY_STORAGE, eventDescription = "disable storage pool")
private void disablePrimaryStoragePool(StoragePoolVO primaryStorage) {
if (!primaryStorage.getStatus().equals(StoragePoolStatus.Up)) {
throw new InvalidParameterValueException("Primary storage with id " + primaryStorage.getId() + " cannot be disabled. Storage pool state : " + primaryStorage.getStatus().toString());
}
DataStoreProvider provider = _dataStoreProviderMgr.getDataStoreProvider(primaryStorage.getStorageProviderName());
DataStoreLifeCycle dataStoreLifeCycle = provider.getDataStoreLifeCycle();
DataStore store = _dataStoreMgr.getDataStore(primaryStorage.getId(), DataStoreRole.Primary);
((PrimaryDataStoreLifeCycle) dataStoreLifeCycle).disableStoragePool(store);
}
use of org.apache.cloudstack.engine.subsystem.api.storage.DataStoreProvider in project cloudstack by apache.
the class StorageManagerImpl method enablePrimaryStoragePool.
@ActionEvent(eventType = EventTypes.EVENT_ENABLE_PRIMARY_STORAGE, eventDescription = "enable storage pool")
private void enablePrimaryStoragePool(StoragePoolVO primaryStorage) {
if (!primaryStorage.getStatus().equals(StoragePoolStatus.Disabled)) {
throw new InvalidParameterValueException("Primary storage with id " + primaryStorage.getId() + " cannot be enabled. Storage pool state : " + primaryStorage.getStatus().toString());
}
DataStoreProvider provider = _dataStoreProviderMgr.getDataStoreProvider(primaryStorage.getStorageProviderName());
DataStoreLifeCycle dataStoreLifeCycle = provider.getDataStoreLifeCycle();
DataStore store = _dataStoreMgr.getDataStore(primaryStorage.getId(), DataStoreRole.Primary);
((PrimaryDataStoreLifeCycle) dataStoreLifeCycle).enableStoragePool(store);
}
Aggregations