Search in sources :

Example 1 with DataStoreLifeCycle

use of org.apache.cloudstack.engine.subsystem.api.storage.DataStoreLifeCycle in project cloudstack by apache.

the class VolumeServiceTest method createPrimaryDataStore.

public DataStore createPrimaryDataStore() {
    try {
        DataStoreProvider provider = dataStoreProviderMgr.getDataStoreProvider("sample primary data store provider");
        Map<String, Object> params = new HashMap<String, Object>();
        URI uri = new URI(this.getPrimaryStorageUrl());
        params.put("url", this.getPrimaryStorageUrl());
        params.put("server", uri.getHost());
        params.put("path", uri.getPath());
        params.put("protocol", Storage.StoragePoolType.NetworkFilesystem);
        params.put("dcId", dcId.toString());
        params.put("clusterId", clusterId.toString());
        params.put("name", this.primaryName);
        params.put("port", "1");
        params.put("roles", DataStoreRole.Primary.toString());
        params.put("uuid", UUID.nameUUIDFromBytes(this.getPrimaryStorageUrl().getBytes()).toString());
        params.put("providerName", String.valueOf(provider.getName()));
        DataStoreLifeCycle lifeCycle = provider.getDataStoreLifeCycle();
        DataStore store = lifeCycle.initialize(params);
        ClusterScope scope = new ClusterScope(clusterId, podId, dcId);
        lifeCycle.attachCluster(store, scope);
        /*
             * PrimaryDataStoreProvider provider =
             * primaryDataStoreProviderMgr.getDataStoreProvider
             * ("sample primary data store provider");
             * primaryDataStoreProviderMgr.configure("primary data store mgr",
             * new HashMap<String, Object>());
             *
             * List<PrimaryDataStoreVO> ds =
             * primaryStoreDao.findPoolByName(this.primaryName); if (ds.size()
             * >= 1) { PrimaryDataStoreVO store = ds.get(0); if
             * (store.getRemoved() == null) { return
             * provider.getDataStore(store.getId()); } }
             *
             *
             * Map<String, String> params = new HashMap<String, String>();
             * params.put("url", this.getPrimaryStorageUrl());
             * params.put("dcId", dcId.toString()); params.put("clusterId",
             * clusterId.toString()); params.put("name", this.primaryName);
             * PrimaryDataStoreInfo primaryDataStoreInfo =
             * provider.registerDataStore(params); PrimaryDataStoreLifeCycle lc
             * = primaryDataStoreInfo.getLifeCycle(); ClusterScope scope = new
             * ClusterScope(clusterId, podId, dcId); lc.attachCluster(scope);
             * return primaryDataStoreInfo;
             */
        return store;
    } catch (Exception e) {
        return null;
    }
}
Also used : DataStoreLifeCycle(org.apache.cloudstack.engine.subsystem.api.storage.DataStoreLifeCycle) ClusterScope(org.apache.cloudstack.engine.subsystem.api.storage.ClusterScope) HashMap(java.util.HashMap) DataStoreProvider(org.apache.cloudstack.engine.subsystem.api.storage.DataStoreProvider) DataStore(org.apache.cloudstack.engine.subsystem.api.storage.DataStore) DataObject(org.apache.cloudstack.engine.subsystem.api.storage.DataObject) URI(java.net.URI) URISyntaxException(java.net.URISyntaxException) ExecutionException(java.util.concurrent.ExecutionException)

Example 2 with DataStoreLifeCycle

use of org.apache.cloudstack.engine.subsystem.api.storage.DataStoreLifeCycle in project cloudstack by apache.

the class StorageManagerImpl method createLocalStorage.

@DB
@Override
public DataStore createLocalStorage(Host host, StoragePoolInfo pInfo) throws ConnectionException {
    DataCenterVO dc = _dcDao.findById(host.getDataCenterId());
    if (dc == null) {
        return null;
    }
    boolean useLocalStorageForSystemVM = false;
    Boolean isLocal = ConfigurationManagerImpl.SystemVMUseLocalStorage.valueIn(dc.getId());
    if (isLocal != null) {
        useLocalStorageForSystemVM = isLocal.booleanValue();
    }
    if (!(dc.isLocalStorageEnabled() || useLocalStorageForSystemVM)) {
        return null;
    }
    DataStore store;
    try {
        String hostAddress = pInfo.getHost();
        if (host.getHypervisorType() == Hypervisor.HypervisorType.VMware) {
            hostAddress = "VMFS datastore: " + pInfo.getHostPath();
        }
        StoragePoolVO pool = _storagePoolDao.findPoolByHostPath(host.getDataCenterId(), host.getPodId(), hostAddress, pInfo.getHostPath(), pInfo.getUuid());
        if (pool == null && host.getHypervisorType() == HypervisorType.VMware) {
            // need to perform runtime upgrade here
            if (pInfo.getHostPath().length() > 0) {
                pool = _storagePoolDao.findPoolByHostPath(host.getDataCenterId(), host.getPodId(), hostAddress, "", pInfo.getUuid());
            }
        }
        if (pool == null) {
            //the path can be different, but if they have the same uuid, assume they are the same storage
            pool = _storagePoolDao.findPoolByHostPath(host.getDataCenterId(), host.getPodId(), hostAddress, null, pInfo.getUuid());
            if (pool != null) {
                s_logger.debug("Found a storage pool: " + pInfo.getUuid() + ", but with different hostpath " + pInfo.getHostPath() + ", still treat it as the same pool");
            }
        }
        DataStoreProvider provider = _dataStoreProviderMgr.getDefaultPrimaryDataStoreProvider();
        DataStoreLifeCycle lifeCycle = provider.getDataStoreLifeCycle();
        if (pool == null) {
            Map<String, Object> params = new HashMap<String, Object>();
            String name = (host.getName() + " Local Storage");
            params.put("zoneId", host.getDataCenterId());
            params.put("clusterId", host.getClusterId());
            params.put("podId", host.getPodId());
            params.put("url", pInfo.getPoolType().toString() + "://" + pInfo.getHost() + "/" + pInfo.getHostPath());
            params.put("name", name);
            params.put("localStorage", true);
            params.put("details", pInfo.getDetails());
            params.put("uuid", pInfo.getUuid());
            params.put("providerName", provider.getName());
            store = lifeCycle.initialize(params);
        } else {
            store = _dataStoreMgr.getDataStore(pool.getId(), DataStoreRole.Primary);
        }
        pool = _storagePoolDao.findById(store.getId());
        if (pool.getStatus() != StoragePoolStatus.Maintenance && pool.getStatus() != StoragePoolStatus.Removed) {
            HostScope scope = new HostScope(host.getId(), host.getClusterId(), host.getDataCenterId());
            lifeCycle.attachHost(store, scope, pInfo);
        }
    } catch (Exception e) {
        s_logger.warn("Unable to setup the local storage pool for " + host, e);
        throw new ConnectionException(true, "Unable to setup the local storage pool for " + host, e);
    }
    return _dataStoreMgr.getDataStore(store.getId(), DataStoreRole.Primary);
}
Also used : DataCenterVO(com.cloud.dc.DataCenterVO) HashMap(java.util.HashMap) DataStoreProvider(org.apache.cloudstack.engine.subsystem.api.storage.DataStoreProvider) HostScope(org.apache.cloudstack.engine.subsystem.api.storage.HostScope) ConnectionException(com.cloud.exception.ConnectionException) AgentUnavailableException(com.cloud.exception.AgentUnavailableException) OperationTimedoutException(com.cloud.exception.OperationTimedoutException) InsufficientCapacityException(com.cloud.exception.InsufficientCapacityException) StorageConflictException(com.cloud.exception.StorageConflictException) ResourceUnavailableException(com.cloud.exception.ResourceUnavailableException) StorageUnavailableException(com.cloud.exception.StorageUnavailableException) CloudRuntimeException(com.cloud.utils.exception.CloudRuntimeException) UnknownHostException(java.net.UnknownHostException) ExecutionException(java.util.concurrent.ExecutionException) ResourceInUseException(com.cloud.exception.ResourceInUseException) URISyntaxException(java.net.URISyntaxException) DiscoveryException(com.cloud.exception.DiscoveryException) InvalidParameterValueException(com.cloud.exception.InvalidParameterValueException) ConfigurationException(javax.naming.ConfigurationException) PermissionDeniedException(com.cloud.exception.PermissionDeniedException) DataStoreLifeCycle(org.apache.cloudstack.engine.subsystem.api.storage.DataStoreLifeCycle) PrimaryDataStoreLifeCycle(org.apache.cloudstack.engine.subsystem.api.storage.PrimaryDataStoreLifeCycle) DataStore(org.apache.cloudstack.engine.subsystem.api.storage.DataStore) StoragePoolVO(org.apache.cloudstack.storage.datastore.db.StoragePoolVO) ConnectionException(com.cloud.exception.ConnectionException) DB(com.cloud.utils.db.DB)

Example 3 with DataStoreLifeCycle

use of org.apache.cloudstack.engine.subsystem.api.storage.DataStoreLifeCycle in project cloudstack by apache.

the class StorageManagerImpl method createPool.

@Override
public PrimaryDataStoreInfo createPool(CreateStoragePoolCmd cmd) throws ResourceInUseException, IllegalArgumentException, UnknownHostException, ResourceUnavailableException {
    String providerName = cmd.getStorageProviderName();
    DataStoreProvider storeProvider = _dataStoreProviderMgr.getDataStoreProvider(providerName);
    if (storeProvider == null) {
        storeProvider = _dataStoreProviderMgr.getDefaultPrimaryDataStoreProvider();
        if (storeProvider == null) {
            throw new InvalidParameterValueException("can't find storage provider: " + providerName);
        }
    }
    Long clusterId = cmd.getClusterId();
    Long podId = cmd.getPodId();
    Long zoneId = cmd.getZoneId();
    ScopeType scopeType = ScopeType.CLUSTER;
    String scope = cmd.getScope();
    if (scope != null) {
        try {
            scopeType = Enum.valueOf(ScopeType.class, scope.toUpperCase());
        } catch (Exception e) {
            throw new InvalidParameterValueException("invalid scope for pool " + scope);
        }
    }
    if (scopeType == ScopeType.CLUSTER && clusterId == null) {
        throw new InvalidParameterValueException("cluster id can't be null, if scope is cluster");
    } else if (scopeType == ScopeType.ZONE && zoneId == null) {
        throw new InvalidParameterValueException("zone id can't be null, if scope is zone");
    }
    HypervisorType hypervisorType = HypervisorType.KVM;
    if (scopeType == ScopeType.ZONE) {
        // ignore passed clusterId and podId
        clusterId = null;
        podId = null;
        String hypervisor = cmd.getHypervisor();
        if (hypervisor != null) {
            try {
                hypervisorType = HypervisorType.getType(hypervisor);
            } catch (Exception e) {
                throw new InvalidParameterValueException("invalid hypervisor type " + hypervisor);
            }
        } else {
            throw new InvalidParameterValueException("Missing parameter hypervisor. Hypervisor type is required to create zone wide primary storage.");
        }
        if (hypervisorType != HypervisorType.KVM && hypervisorType != HypervisorType.VMware && hypervisorType != HypervisorType.Hyperv && hypervisorType != HypervisorType.LXC && hypervisorType != HypervisorType.Any) {
            throw new InvalidParameterValueException("zone wide storage pool is not supported for hypervisor type " + hypervisor);
        }
    }
    Map<String, String> details = extractApiParamAsMap(cmd.getDetails());
    DataCenterVO zone = _dcDao.findById(cmd.getZoneId());
    if (zone == null) {
        throw new InvalidParameterValueException("unable to find zone by id " + zoneId);
    }
    // Check if zone is disabled
    Account account = CallContext.current().getCallingAccount();
    if (Grouping.AllocationState.Disabled == zone.getAllocationState() && !_accountMgr.isRootAdmin(account.getId())) {
        throw new PermissionDeniedException("Cannot perform this operation, Zone is currently disabled: " + zoneId);
    }
    Map<String, Object> params = new HashMap<String, Object>();
    params.put("zoneId", zone.getId());
    params.put("clusterId", clusterId);
    params.put("podId", podId);
    params.put("url", cmd.getUrl());
    params.put("tags", cmd.getTags());
    params.put("name", cmd.getStoragePoolName());
    params.put("details", details);
    params.put("providerName", storeProvider.getName());
    params.put("managed", cmd.isManaged());
    params.put("capacityBytes", cmd.getCapacityBytes());
    params.put("capacityIops", cmd.getCapacityIops());
    DataStoreLifeCycle lifeCycle = storeProvider.getDataStoreLifeCycle();
    DataStore store = null;
    try {
        store = lifeCycle.initialize(params);
        if (scopeType == ScopeType.CLUSTER) {
            ClusterScope clusterScope = new ClusterScope(clusterId, podId, zoneId);
            lifeCycle.attachCluster(store, clusterScope);
        } else if (scopeType == ScopeType.ZONE) {
            ZoneScope zoneScope = new ZoneScope(zoneId);
            lifeCycle.attachZone(store, zoneScope, hypervisorType);
        }
    } catch (Exception e) {
        s_logger.debug("Failed to add data store: " + e.getMessage(), e);
        try {
            // not deleting data store.
            if (store != null) {
                lifeCycle.deleteDataStore(store);
            }
        } catch (Exception ex) {
            s_logger.debug("Failed to clean up storage pool: " + ex.getMessage());
        }
        throw new CloudRuntimeException("Failed to add data store: " + e.getMessage(), e);
    }
    return (PrimaryDataStoreInfo) _dataStoreMgr.getDataStore(store.getId(), DataStoreRole.Primary);
}
Also used : DataCenterVO(com.cloud.dc.DataCenterVO) PrimaryDataStoreInfo(org.apache.cloudstack.engine.subsystem.api.storage.PrimaryDataStoreInfo) Account(com.cloud.user.Account) HashMap(java.util.HashMap) DataStoreProvider(org.apache.cloudstack.engine.subsystem.api.storage.DataStoreProvider) ConnectionException(com.cloud.exception.ConnectionException) AgentUnavailableException(com.cloud.exception.AgentUnavailableException) OperationTimedoutException(com.cloud.exception.OperationTimedoutException) InsufficientCapacityException(com.cloud.exception.InsufficientCapacityException) StorageConflictException(com.cloud.exception.StorageConflictException) ResourceUnavailableException(com.cloud.exception.ResourceUnavailableException) StorageUnavailableException(com.cloud.exception.StorageUnavailableException) CloudRuntimeException(com.cloud.utils.exception.CloudRuntimeException) UnknownHostException(java.net.UnknownHostException) ExecutionException(java.util.concurrent.ExecutionException) ResourceInUseException(com.cloud.exception.ResourceInUseException) URISyntaxException(java.net.URISyntaxException) DiscoveryException(com.cloud.exception.DiscoveryException) InvalidParameterValueException(com.cloud.exception.InvalidParameterValueException) ConfigurationException(javax.naming.ConfigurationException) PermissionDeniedException(com.cloud.exception.PermissionDeniedException) HypervisorType(com.cloud.hypervisor.Hypervisor.HypervisorType) ZoneScope(org.apache.cloudstack.engine.subsystem.api.storage.ZoneScope) DataStoreLifeCycle(org.apache.cloudstack.engine.subsystem.api.storage.DataStoreLifeCycle) PrimaryDataStoreLifeCycle(org.apache.cloudstack.engine.subsystem.api.storage.PrimaryDataStoreLifeCycle) ClusterScope(org.apache.cloudstack.engine.subsystem.api.storage.ClusterScope) InvalidParameterValueException(com.cloud.exception.InvalidParameterValueException) CloudRuntimeException(com.cloud.utils.exception.CloudRuntimeException) DataStore(org.apache.cloudstack.engine.subsystem.api.storage.DataStore) PermissionDeniedException(com.cloud.exception.PermissionDeniedException)

Example 4 with DataStoreLifeCycle

use of org.apache.cloudstack.engine.subsystem.api.storage.DataStoreLifeCycle in project cloudstack by apache.

the class StorageManagerImpl method deletePool.

@Override
@DB
public boolean deletePool(DeletePoolCmd cmd) {
    Long id = cmd.getId();
    boolean forced = cmd.isForced();
    StoragePoolVO sPool = _storagePoolDao.findById(id);
    if (sPool == null) {
        s_logger.warn("Unable to find pool:" + id);
        throw new InvalidParameterValueException("Unable to find pool by id " + id);
    }
    if (sPool.getStatus() != StoragePoolStatus.Maintenance) {
        s_logger.warn("Unable to delete storage id: " + id + " due to it is not in Maintenance state");
        throw new InvalidParameterValueException("Unable to delete storage due to it is not in Maintenance state, id: " + id);
    }
    if (sPool.isLocal()) {
        s_logger.warn("Unable to delete local storage id:" + id);
        throw new InvalidParameterValueException("Unable to delete local storage id: " + id);
    }
    Pair<Long, Long> vlms = _volsDao.getCountAndTotalByPool(id);
    if (forced) {
        if (vlms.first() > 0) {
            Pair<Long, Long> nonDstrdVlms = _volsDao.getNonDestroyedCountAndTotalByPool(id);
            if (nonDstrdVlms.first() > 0) {
                throw new CloudRuntimeException("Cannot delete pool " + sPool.getName() + " as there are associated " + "non-destroyed vols for this pool");
            }
            // force expunge non-destroyed volumes
            List<VolumeVO> vols = _volsDao.listVolumesToBeDestroyed();
            for (VolumeVO vol : vols) {
                AsyncCallFuture<VolumeApiResult> future = volService.expungeVolumeAsync(volFactory.getVolume(vol.getId()));
                try {
                    future.get();
                } catch (InterruptedException e) {
                    s_logger.debug("expunge volume failed:" + vol.getId(), e);
                } catch (ExecutionException e) {
                    s_logger.debug("expunge volume failed:" + vol.getId(), e);
                }
            }
        }
    } else {
        // If it does , then you cannot delete the pool
        if (vlms.first() > 0) {
            throw new CloudRuntimeException("Cannot delete pool " + sPool.getName() + " as there are associated volumes for this pool");
        }
    }
    // First get the host_id from storage_pool_host_ref for given pool id
    StoragePoolVO lock = _storagePoolDao.acquireInLockTable(sPool.getId());
    if (lock == null) {
        if (s_logger.isDebugEnabled()) {
            s_logger.debug("Failed to acquire lock when deleting PrimaryDataStoreVO with ID: " + sPool.getId());
        }
        return false;
    }
    _storagePoolDao.releaseFromLockTable(lock.getId());
    s_logger.trace("Released lock for storage pool " + id);
    DataStoreProvider storeProvider = _dataStoreProviderMgr.getDataStoreProvider(sPool.getStorageProviderName());
    DataStoreLifeCycle lifeCycle = storeProvider.getDataStoreLifeCycle();
    DataStore store = _dataStoreMgr.getDataStore(sPool.getId(), DataStoreRole.Primary);
    return lifeCycle.deleteDataStore(store);
}
Also used : DataStoreProvider(org.apache.cloudstack.engine.subsystem.api.storage.DataStoreProvider) VolumeApiResult(org.apache.cloudstack.engine.subsystem.api.storage.VolumeService.VolumeApiResult) DataStoreLifeCycle(org.apache.cloudstack.engine.subsystem.api.storage.DataStoreLifeCycle) PrimaryDataStoreLifeCycle(org.apache.cloudstack.engine.subsystem.api.storage.PrimaryDataStoreLifeCycle) InvalidParameterValueException(com.cloud.exception.InvalidParameterValueException) CloudRuntimeException(com.cloud.utils.exception.CloudRuntimeException) DataStore(org.apache.cloudstack.engine.subsystem.api.storage.DataStore) StoragePoolVO(org.apache.cloudstack.storage.datastore.db.StoragePoolVO) ExecutionException(java.util.concurrent.ExecutionException) DB(com.cloud.utils.db.DB)

Example 5 with DataStoreLifeCycle

use of org.apache.cloudstack.engine.subsystem.api.storage.DataStoreLifeCycle in project cloudstack by apache.

the class StorageManagerImpl method cancelPrimaryStorageForMaintenance.

@Override
@DB
public PrimaryDataStoreInfo cancelPrimaryStorageForMaintenance(CancelPrimaryStorageMaintenanceCmd cmd) throws ResourceUnavailableException {
    Long primaryStorageId = cmd.getId();
    StoragePoolVO primaryStorage = null;
    primaryStorage = _storagePoolDao.findById(primaryStorageId);
    if (primaryStorage == null) {
        String msg = "Unable to obtain lock on the storage pool in cancelPrimaryStorageForMaintenance()";
        s_logger.error(msg);
        throw new InvalidParameterValueException(msg);
    }
    if (primaryStorage.getStatus().equals(StoragePoolStatus.Up) || primaryStorage.getStatus().equals(StoragePoolStatus.PrepareForMaintenance)) {
        throw new StorageUnavailableException("Primary storage with id " + primaryStorageId + " is not ready to complete migration, as the status is:" + primaryStorage.getStatus().toString(), primaryStorageId);
    }
    DataStoreProvider provider = _dataStoreProviderMgr.getDataStoreProvider(primaryStorage.getStorageProviderName());
    DataStoreLifeCycle lifeCycle = provider.getDataStoreLifeCycle();
    DataStore store = _dataStoreMgr.getDataStore(primaryStorage.getId(), DataStoreRole.Primary);
    lifeCycle.cancelMaintain(store);
    return (PrimaryDataStoreInfo) _dataStoreMgr.getDataStore(primaryStorage.getId(), DataStoreRole.Primary);
}
Also used : PrimaryDataStoreInfo(org.apache.cloudstack.engine.subsystem.api.storage.PrimaryDataStoreInfo) DataStoreLifeCycle(org.apache.cloudstack.engine.subsystem.api.storage.DataStoreLifeCycle) PrimaryDataStoreLifeCycle(org.apache.cloudstack.engine.subsystem.api.storage.PrimaryDataStoreLifeCycle) StorageUnavailableException(com.cloud.exception.StorageUnavailableException) InvalidParameterValueException(com.cloud.exception.InvalidParameterValueException) DataStoreProvider(org.apache.cloudstack.engine.subsystem.api.storage.DataStoreProvider) DataStore(org.apache.cloudstack.engine.subsystem.api.storage.DataStore) StoragePoolVO(org.apache.cloudstack.storage.datastore.db.StoragePoolVO) DB(com.cloud.utils.db.DB)

Aggregations

DataStoreLifeCycle (org.apache.cloudstack.engine.subsystem.api.storage.DataStoreLifeCycle)14 DataStoreProvider (org.apache.cloudstack.engine.subsystem.api.storage.DataStoreProvider)14 DataStore (org.apache.cloudstack.engine.subsystem.api.storage.DataStore)12 PrimaryDataStoreLifeCycle (org.apache.cloudstack.engine.subsystem.api.storage.PrimaryDataStoreLifeCycle)11 InvalidParameterValueException (com.cloud.exception.InvalidParameterValueException)10 HashMap (java.util.HashMap)8 URISyntaxException (java.net.URISyntaxException)6 ExecutionException (java.util.concurrent.ExecutionException)6 StorageUnavailableException (com.cloud.exception.StorageUnavailableException)5 CloudRuntimeException (com.cloud.utils.exception.CloudRuntimeException)5 StoragePoolVO (org.apache.cloudstack.storage.datastore.db.StoragePoolVO)5 DataCenterVO (com.cloud.dc.DataCenterVO)4 AgentUnavailableException (com.cloud.exception.AgentUnavailableException)4 ConnectionException (com.cloud.exception.ConnectionException)4 DiscoveryException (com.cloud.exception.DiscoveryException)4 InsufficientCapacityException (com.cloud.exception.InsufficientCapacityException)4 OperationTimedoutException (com.cloud.exception.OperationTimedoutException)4 PermissionDeniedException (com.cloud.exception.PermissionDeniedException)4 ResourceInUseException (com.cloud.exception.ResourceInUseException)4 ResourceUnavailableException (com.cloud.exception.ResourceUnavailableException)4