Search in sources :

Example 1 with PrimaryDataStoreInfo

use of org.apache.cloudstack.engine.subsystem.api.storage.PrimaryDataStoreInfo in project cloudstack by apache.

the class ElastistorPrimaryDataStoreLifeCycle method attachCluster.

@Override
public boolean attachCluster(DataStore store, ClusterScope scope) {
    dataStoreHelper.attachCluster(store);
    StoragePoolVO dataStoreVO = _storagePoolDao.findById(store.getId());
    PrimaryDataStoreInfo primarystore = (PrimaryDataStoreInfo) store;
    // Check if there is host up in this cluster
    List<HostVO> allHosts = _resourceMgr.listAllUpAndEnabledHosts(Host.Type.Routing, primarystore.getClusterId(), primarystore.getPodId(), primarystore.getDataCenterId());
    if (allHosts.isEmpty()) {
        primaryDataStoreDao.expunge(primarystore.getId());
        throw new CloudRuntimeException("No host up to associate a storage pool with in cluster " + primarystore.getClusterId());
    }
    if (!dataStoreVO.isManaged()) {
        boolean success = false;
        for (HostVO h : allHosts) {
            success = createStoragePool(h.getId(), primarystore);
            if (success) {
                break;
            }
        }
    }
    s_logger.debug("In createPool Adding the pool to each of the hosts");
    List<HostVO> poolHosts = new ArrayList<HostVO>();
    for (HostVO h : allHosts) {
        try {
            storageMgr.connectHostToSharedPool(h.getId(), primarystore.getId());
            poolHosts.add(h);
        } catch (Exception e) {
            s_logger.warn("Unable to establish a connection between " + h + " and " + primarystore, e);
        }
        if (poolHosts.isEmpty()) {
            s_logger.warn("No host can access storage pool " + primarystore + " on cluster " + primarystore.getClusterId());
            primaryDataStoreDao.expunge(primarystore.getId());
            throw new CloudRuntimeException("Failed to access storage pool");
        }
    }
    return true;
}
Also used : PrimaryDataStoreInfo(org.apache.cloudstack.engine.subsystem.api.storage.PrimaryDataStoreInfo) CloudRuntimeException(com.cloud.utils.exception.CloudRuntimeException) StoragePoolVO(org.apache.cloudstack.storage.datastore.db.StoragePoolVO) ArrayList(java.util.ArrayList) HostVO(com.cloud.host.HostVO) StoragePoolHostVO(com.cloud.storage.StoragePoolHostVO) CloudRuntimeException(com.cloud.utils.exception.CloudRuntimeException)

Example 2 with PrimaryDataStoreInfo

use of org.apache.cloudstack.engine.subsystem.api.storage.PrimaryDataStoreInfo in project cloudstack by apache.

the class VirtualMachineManagerImpl method orchestrateStorageMigration.

@ReflectionUse
private Pair<JobInfo.Status, String> orchestrateStorageMigration(final VmWorkStorageMigration work) throws Exception {
    final VMInstanceVO vm = _entityMgr.findById(VMInstanceVO.class, work.getVmId());
    if (vm == null) {
        s_logger.info("Unable to find vm " + work.getVmId());
    }
    assert vm != null;
    final StoragePool pool = (PrimaryDataStoreInfo) dataStoreMgr.getPrimaryDataStore(work.getDestStoragePoolId());
    orchestrateStorageMigration(vm.getUuid(), pool);
    return new Pair<JobInfo.Status, String>(JobInfo.Status.SUCCEEDED, null);
}
Also used : PrimaryDataStoreInfo(org.apache.cloudstack.engine.subsystem.api.storage.PrimaryDataStoreInfo) StoragePool(com.cloud.storage.StoragePool) JobInfo(org.apache.cloudstack.jobs.JobInfo) Pair(com.cloud.utils.Pair) ReflectionUse(com.cloud.utils.ReflectionUse)

Example 3 with PrimaryDataStoreInfo

use of org.apache.cloudstack.engine.subsystem.api.storage.PrimaryDataStoreInfo in project cloudstack by apache.

the class StorageManagerImpl method createPool.

@Override
public PrimaryDataStoreInfo createPool(CreateStoragePoolCmd cmd) throws ResourceInUseException, IllegalArgumentException, UnknownHostException, ResourceUnavailableException {
    String providerName = cmd.getStorageProviderName();
    DataStoreProvider storeProvider = _dataStoreProviderMgr.getDataStoreProvider(providerName);
    if (storeProvider == null) {
        storeProvider = _dataStoreProviderMgr.getDefaultPrimaryDataStoreProvider();
        if (storeProvider == null) {
            throw new InvalidParameterValueException("can't find storage provider: " + providerName);
        }
    }
    Long clusterId = cmd.getClusterId();
    Long podId = cmd.getPodId();
    Long zoneId = cmd.getZoneId();
    ScopeType scopeType = ScopeType.CLUSTER;
    String scope = cmd.getScope();
    if (scope != null) {
        try {
            scopeType = Enum.valueOf(ScopeType.class, scope.toUpperCase());
        } catch (Exception e) {
            throw new InvalidParameterValueException("invalid scope for pool " + scope);
        }
    }
    if (scopeType == ScopeType.CLUSTER && clusterId == null) {
        throw new InvalidParameterValueException("cluster id can't be null, if scope is cluster");
    } else if (scopeType == ScopeType.ZONE && zoneId == null) {
        throw new InvalidParameterValueException("zone id can't be null, if scope is zone");
    }
    HypervisorType hypervisorType = HypervisorType.KVM;
    if (scopeType == ScopeType.ZONE) {
        // ignore passed clusterId and podId
        clusterId = null;
        podId = null;
        String hypervisor = cmd.getHypervisor();
        if (hypervisor != null) {
            try {
                hypervisorType = HypervisorType.getType(hypervisor);
            } catch (Exception e) {
                throw new InvalidParameterValueException("invalid hypervisor type " + hypervisor);
            }
        } else {
            throw new InvalidParameterValueException("Missing parameter hypervisor. Hypervisor type is required to create zone wide primary storage.");
        }
        if (hypervisorType != HypervisorType.KVM && hypervisorType != HypervisorType.VMware && hypervisorType != HypervisorType.Hyperv && hypervisorType != HypervisorType.LXC && hypervisorType != HypervisorType.Any) {
            throw new InvalidParameterValueException("zone wide storage pool is not supported for hypervisor type " + hypervisor);
        }
    }
    Map<String, String> details = extractApiParamAsMap(cmd.getDetails());
    DataCenterVO zone = _dcDao.findById(cmd.getZoneId());
    if (zone == null) {
        throw new InvalidParameterValueException("unable to find zone by id " + zoneId);
    }
    // Check if zone is disabled
    Account account = CallContext.current().getCallingAccount();
    if (Grouping.AllocationState.Disabled == zone.getAllocationState() && !_accountMgr.isRootAdmin(account.getId())) {
        throw new PermissionDeniedException("Cannot perform this operation, Zone is currently disabled: " + zoneId);
    }
    Map<String, Object> params = new HashMap<String, Object>();
    params.put("zoneId", zone.getId());
    params.put("clusterId", clusterId);
    params.put("podId", podId);
    params.put("url", cmd.getUrl());
    params.put("tags", cmd.getTags());
    params.put("name", cmd.getStoragePoolName());
    params.put("details", details);
    params.put("providerName", storeProvider.getName());
    params.put("managed", cmd.isManaged());
    params.put("capacityBytes", cmd.getCapacityBytes());
    params.put("capacityIops", cmd.getCapacityIops());
    DataStoreLifeCycle lifeCycle = storeProvider.getDataStoreLifeCycle();
    DataStore store = null;
    try {
        store = lifeCycle.initialize(params);
        if (scopeType == ScopeType.CLUSTER) {
            ClusterScope clusterScope = new ClusterScope(clusterId, podId, zoneId);
            lifeCycle.attachCluster(store, clusterScope);
        } else if (scopeType == ScopeType.ZONE) {
            ZoneScope zoneScope = new ZoneScope(zoneId);
            lifeCycle.attachZone(store, zoneScope, hypervisorType);
        }
    } catch (Exception e) {
        s_logger.debug("Failed to add data store: " + e.getMessage(), e);
        try {
            // not deleting data store.
            if (store != null) {
                lifeCycle.deleteDataStore(store);
            }
        } catch (Exception ex) {
            s_logger.debug("Failed to clean up storage pool: " + ex.getMessage());
        }
        throw new CloudRuntimeException("Failed to add data store: " + e.getMessage(), e);
    }
    return (PrimaryDataStoreInfo) _dataStoreMgr.getDataStore(store.getId(), DataStoreRole.Primary);
}
Also used : DataCenterVO(com.cloud.dc.DataCenterVO) PrimaryDataStoreInfo(org.apache.cloudstack.engine.subsystem.api.storage.PrimaryDataStoreInfo) Account(com.cloud.user.Account) HashMap(java.util.HashMap) DataStoreProvider(org.apache.cloudstack.engine.subsystem.api.storage.DataStoreProvider) ConnectionException(com.cloud.exception.ConnectionException) AgentUnavailableException(com.cloud.exception.AgentUnavailableException) OperationTimedoutException(com.cloud.exception.OperationTimedoutException) InsufficientCapacityException(com.cloud.exception.InsufficientCapacityException) StorageConflictException(com.cloud.exception.StorageConflictException) ResourceUnavailableException(com.cloud.exception.ResourceUnavailableException) StorageUnavailableException(com.cloud.exception.StorageUnavailableException) CloudRuntimeException(com.cloud.utils.exception.CloudRuntimeException) UnknownHostException(java.net.UnknownHostException) ExecutionException(java.util.concurrent.ExecutionException) ResourceInUseException(com.cloud.exception.ResourceInUseException) URISyntaxException(java.net.URISyntaxException) DiscoveryException(com.cloud.exception.DiscoveryException) InvalidParameterValueException(com.cloud.exception.InvalidParameterValueException) ConfigurationException(javax.naming.ConfigurationException) PermissionDeniedException(com.cloud.exception.PermissionDeniedException) HypervisorType(com.cloud.hypervisor.Hypervisor.HypervisorType) ZoneScope(org.apache.cloudstack.engine.subsystem.api.storage.ZoneScope) DataStoreLifeCycle(org.apache.cloudstack.engine.subsystem.api.storage.DataStoreLifeCycle) PrimaryDataStoreLifeCycle(org.apache.cloudstack.engine.subsystem.api.storage.PrimaryDataStoreLifeCycle) ClusterScope(org.apache.cloudstack.engine.subsystem.api.storage.ClusterScope) InvalidParameterValueException(com.cloud.exception.InvalidParameterValueException) CloudRuntimeException(com.cloud.utils.exception.CloudRuntimeException) DataStore(org.apache.cloudstack.engine.subsystem.api.storage.DataStore) PermissionDeniedException(com.cloud.exception.PermissionDeniedException)

Example 4 with PrimaryDataStoreInfo

use of org.apache.cloudstack.engine.subsystem.api.storage.PrimaryDataStoreInfo in project cloudstack by apache.

the class StorageManagerImpl method cancelPrimaryStorageForMaintenance.

@Override
@DB
public PrimaryDataStoreInfo cancelPrimaryStorageForMaintenance(CancelPrimaryStorageMaintenanceCmd cmd) throws ResourceUnavailableException {
    Long primaryStorageId = cmd.getId();
    StoragePoolVO primaryStorage = null;
    primaryStorage = _storagePoolDao.findById(primaryStorageId);
    if (primaryStorage == null) {
        String msg = "Unable to obtain lock on the storage pool in cancelPrimaryStorageForMaintenance()";
        s_logger.error(msg);
        throw new InvalidParameterValueException(msg);
    }
    if (primaryStorage.getStatus().equals(StoragePoolStatus.Up) || primaryStorage.getStatus().equals(StoragePoolStatus.PrepareForMaintenance)) {
        throw new StorageUnavailableException("Primary storage with id " + primaryStorageId + " is not ready to complete migration, as the status is:" + primaryStorage.getStatus().toString(), primaryStorageId);
    }
    DataStoreProvider provider = _dataStoreProviderMgr.getDataStoreProvider(primaryStorage.getStorageProviderName());
    DataStoreLifeCycle lifeCycle = provider.getDataStoreLifeCycle();
    DataStore store = _dataStoreMgr.getDataStore(primaryStorage.getId(), DataStoreRole.Primary);
    lifeCycle.cancelMaintain(store);
    return (PrimaryDataStoreInfo) _dataStoreMgr.getDataStore(primaryStorage.getId(), DataStoreRole.Primary);
}
Also used : PrimaryDataStoreInfo(org.apache.cloudstack.engine.subsystem.api.storage.PrimaryDataStoreInfo) DataStoreLifeCycle(org.apache.cloudstack.engine.subsystem.api.storage.DataStoreLifeCycle) PrimaryDataStoreLifeCycle(org.apache.cloudstack.engine.subsystem.api.storage.PrimaryDataStoreLifeCycle) StorageUnavailableException(com.cloud.exception.StorageUnavailableException) InvalidParameterValueException(com.cloud.exception.InvalidParameterValueException) DataStoreProvider(org.apache.cloudstack.engine.subsystem.api.storage.DataStoreProvider) DataStore(org.apache.cloudstack.engine.subsystem.api.storage.DataStore) StoragePoolVO(org.apache.cloudstack.storage.datastore.db.StoragePoolVO) DB(com.cloud.utils.db.DB)

Example 5 with PrimaryDataStoreInfo

use of org.apache.cloudstack.engine.subsystem.api.storage.PrimaryDataStoreInfo in project cloudstack by apache.

the class SolidFireSharedPrimaryDataStoreLifeCycle method attachCluster.

@Override
public boolean attachCluster(DataStore store, ClusterScope scope) {
    PrimaryDataStoreInfo primaryDataStoreInfo = (PrimaryDataStoreInfo) store;
    // check if there is at least one host up in this cluster
    List<HostVO> allHosts = _resourceMgr.listAllUpAndEnabledHosts(Host.Type.Routing, primaryDataStoreInfo.getClusterId(), primaryDataStoreInfo.getPodId(), primaryDataStoreInfo.getDataCenterId());
    if (allHosts.isEmpty()) {
        _primaryDataStoreDao.expunge(primaryDataStoreInfo.getId());
        throw new CloudRuntimeException("No host up to associate a storage pool with in cluster " + primaryDataStoreInfo.getClusterId());
    }
    boolean success = false;
    for (HostVO host : allHosts) {
        success = createStoragePool(host, primaryDataStoreInfo);
        if (success) {
            break;
        }
    }
    if (!success) {
        throw new CloudRuntimeException("Unable to create storage in cluster " + primaryDataStoreInfo.getClusterId());
    }
    List<HostVO> poolHosts = new ArrayList<HostVO>();
    for (HostVO host : allHosts) {
        try {
            _storageMgr.connectHostToSharedPool(host.getId(), primaryDataStoreInfo.getId());
            poolHosts.add(host);
        } catch (Exception e) {
            s_logger.warn("Unable to establish a connection between " + host + " and " + primaryDataStoreInfo, e);
        }
    }
    if (poolHosts.isEmpty()) {
        s_logger.warn("No host can access storage pool '" + primaryDataStoreInfo + "' on cluster '" + primaryDataStoreInfo.getClusterId() + "'.");
        _primaryDataStoreDao.expunge(primaryDataStoreInfo.getId());
        throw new CloudRuntimeException("Failed to access storage pool");
    }
    _primaryDataStoreHelper.attachCluster(store);
    return true;
}
Also used : PrimaryDataStoreInfo(org.apache.cloudstack.engine.subsystem.api.storage.PrimaryDataStoreInfo) CloudRuntimeException(com.cloud.utils.exception.CloudRuntimeException) ArrayList(java.util.ArrayList) HostVO(com.cloud.host.HostVO) StoragePoolHostVO(com.cloud.storage.StoragePoolHostVO) CloudRuntimeException(com.cloud.utils.exception.CloudRuntimeException)

Aggregations

PrimaryDataStoreInfo (org.apache.cloudstack.engine.subsystem.api.storage.PrimaryDataStoreInfo)9 InvalidParameterValueException (com.cloud.exception.InvalidParameterValueException)5 CloudRuntimeException (com.cloud.utils.exception.CloudRuntimeException)5 StoragePoolVO (org.apache.cloudstack.storage.datastore.db.StoragePoolVO)5 DataStoreLifeCycle (org.apache.cloudstack.engine.subsystem.api.storage.DataStoreLifeCycle)4 DataStoreProvider (org.apache.cloudstack.engine.subsystem.api.storage.DataStoreProvider)4 PrimaryDataStoreLifeCycle (org.apache.cloudstack.engine.subsystem.api.storage.PrimaryDataStoreLifeCycle)4 StorageUnavailableException (com.cloud.exception.StorageUnavailableException)3 HostVO (com.cloud.host.HostVO)3 StoragePoolHostVO (com.cloud.storage.StoragePoolHostVO)3 ArrayList (java.util.ArrayList)3 DataStore (org.apache.cloudstack.engine.subsystem.api.storage.DataStore)3 StorageConflictException (com.cloud.exception.StorageConflictException)2 HypervisorType (com.cloud.hypervisor.Hypervisor.HypervisorType)2 DB (com.cloud.utils.db.DB)2 URISyntaxException (java.net.URISyntaxException)2 HashMap (java.util.HashMap)2 DataCenterVO (com.cloud.dc.DataCenterVO)1 AgentUnavailableException (com.cloud.exception.AgentUnavailableException)1 ConcurrentOperationException (com.cloud.exception.ConcurrentOperationException)1