Search in sources :

Example 91 with HostVO

use of com.cloud.host.HostVO in project cloudstack by apache.

the class GloboDnsElement method callCommand.

// /////// Provider control methods ////////////
private Answer callCommand(Command cmd, Long zoneId) {
    HostVO globoDnsHost = getGloboDnsHost(zoneId);
    if (globoDnsHost == null) {
        throw new CloudRuntimeException("Could not find the GloboDNS resource");
    }
    Answer answer = _agentMgr.easySend(globoDnsHost.getId(), cmd);
    if (answer == null || !answer.getResult()) {
        String msg = "Error executing command " + cmd;
        msg = answer == null ? msg : answer.getDetails();
        throw new CloudRuntimeException(msg);
    }
    return answer;
}
Also used : Answer(com.cloud.agent.api.Answer) CloudRuntimeException(com.cloud.utils.exception.CloudRuntimeException) HostVO(com.cloud.host.HostVO)

Example 92 with HostVO

use of com.cloud.host.HostVO in project cloudstack by apache.

the class SolidFirePrimaryDataStoreDriver method grantAccess.

@Override
public boolean grantAccess(DataObject dataObject, Host host, DataStore dataStore) {
    Preconditions.checkArgument(dataObject != null, "'dataObject' should not be 'null'");
    Preconditions.checkArgument(host != null, "'host' should not be 'null'");
    Preconditions.checkArgument(dataStore != null, "'dataStore' should not be 'null'");
    long sfVolumeId = getSolidFireVolumeId(dataObject, true);
    long clusterId = host.getClusterId();
    long storagePoolId = dataStore.getId();
    ClusterVO cluster = clusterDao.findById(clusterId);
    GlobalLock lock = GlobalLock.getInternLock(cluster.getUuid());
    if (!lock.lock(SolidFireUtil.LOCK_TIME_IN_SECONDS)) {
        String errMsg = "Couldn't lock the DB (in grantAccess) on the following string: " + cluster.getUuid();
        LOGGER.warn(errMsg);
        throw new CloudRuntimeException(errMsg);
    }
    try {
        List<HostVO> hosts = hostDao.findByClusterId(clusterId);
        String clusterUuId = clusterDao.findById(clusterId).getUuid();
        SolidFireUtil.SolidFireConnection sfConnection = SolidFireUtil.getSolidFireConnection(storagePoolId, storagePoolDetailsDao);
        SolidFireUtil.placeVolumeInVolumeAccessGroups(sfConnection, sfVolumeId, hosts, clusterUuId);
        return true;
    } finally {
        lock.unlock();
        lock.releaseRef();
    }
}
Also used : GlobalLock(com.cloud.utils.db.GlobalLock) ClusterVO(com.cloud.dc.ClusterVO) CloudRuntimeException(com.cloud.utils.exception.CloudRuntimeException) SolidFireUtil(org.apache.cloudstack.storage.datastore.util.SolidFireUtil) HostVO(com.cloud.host.HostVO)

Example 93 with HostVO

use of com.cloud.host.HostVO in project cloudstack by apache.

the class CloudStackPrimaryDataStoreLifeCycleImpl method attachZone.

@Override
public boolean attachZone(DataStore dataStore, ZoneScope scope, HypervisorType hypervisorType) {
    List<HostVO> hosts = _resourceMgr.listAllUpAndEnabledHostsInOneZoneByHypervisor(hypervisorType, scope.getScopeId());
    s_logger.debug("In createPool. Attaching the pool to each of the hosts.");
    List<HostVO> poolHosts = new ArrayList<HostVO>();
    for (HostVO host : hosts) {
        try {
            storageMgr.connectHostToSharedPool(host.getId(), dataStore.getId());
            poolHosts.add(host);
        } catch (StorageConflictException se) {
            primaryDataStoreDao.expunge(dataStore.getId());
            throw new CloudRuntimeException("Storage has already been added as local storage to host: " + host.getName());
        } catch (Exception e) {
            s_logger.warn("Unable to establish a connection between " + host + " and " + dataStore, e);
        }
    }
    if (poolHosts.isEmpty()) {
        s_logger.warn("No host can access storage pool " + dataStore + " in this zone.");
        primaryDataStoreDao.expunge(dataStore.getId());
        throw new CloudRuntimeException("Failed to create storage pool as it is not accessible to hosts.");
    }
    dataStoreHelper.attachZone(dataStore, hypervisorType);
    return true;
}
Also used : CloudRuntimeException(com.cloud.utils.exception.CloudRuntimeException) ArrayList(java.util.ArrayList) StorageConflictException(com.cloud.exception.StorageConflictException) HostVO(com.cloud.host.HostVO) StoragePoolHostVO(com.cloud.storage.StoragePoolHostVO) URISyntaxException(java.net.URISyntaxException) InvalidParameterValueException(com.cloud.exception.InvalidParameterValueException) UnsupportedEncodingException(java.io.UnsupportedEncodingException) StorageConflictException(com.cloud.exception.StorageConflictException) CloudRuntimeException(com.cloud.utils.exception.CloudRuntimeException)

Example 94 with HostVO

use of com.cloud.host.HostVO in project cloudstack by apache.

the class SolidFireSharedPrimaryDataStoreLifeCycle method initialize.

// invoked to add primary storage that is based on the SolidFire plug-in
@Override
public DataStore initialize(Map<String, Object> dsInfos) {
    final String CAPACITY_IOPS = "capacityIops";
    String url = (String) dsInfos.get("url");
    Long zoneId = (Long) dsInfos.get("zoneId");
    Long podId = (Long) dsInfos.get("podId");
    Long clusterId = (Long) dsInfos.get("clusterId");
    String storagePoolName = (String) dsInfos.get("name");
    String providerName = (String) dsInfos.get("providerName");
    Long capacityBytes = (Long) dsInfos.get("capacityBytes");
    Long capacityIops = (Long) dsInfos.get(CAPACITY_IOPS);
    String tags = (String) dsInfos.get("tags");
    @SuppressWarnings("unchecked") Map<String, String> details = (Map<String, String>) dsInfos.get("details");
    if (podId == null) {
        throw new CloudRuntimeException("The Pod ID must be specified.");
    }
    if (clusterId == null) {
        throw new CloudRuntimeException("The Cluster ID must be specified.");
    }
    String storageVip = SolidFireUtil.getStorageVip(url);
    int storagePort = SolidFireUtil.getStoragePort(url);
    if (capacityBytes == null || capacityBytes <= 0) {
        throw new IllegalArgumentException("'capacityBytes' must be present and greater than 0.");
    }
    if (capacityIops == null || capacityIops <= 0) {
        throw new IllegalArgumentException("'capacityIops' must be present and greater than 0.");
    }
    HypervisorType hypervisorType = getHypervisorTypeForCluster(clusterId);
    if (!isSupportedHypervisorType(hypervisorType)) {
        throw new CloudRuntimeException(hypervisorType + " is not a supported hypervisor type.");
    }
    String datacenter = SolidFireUtil.getValue(SolidFireUtil.DATACENTER, url, false);
    if (HypervisorType.VMware.equals(hypervisorType) && datacenter == null) {
        throw new CloudRuntimeException("'Datacenter' must be set for hypervisor type of " + HypervisorType.VMware);
    }
    PrimaryDataStoreParameters parameters = new PrimaryDataStoreParameters();
    parameters.setType(getStorageType(hypervisorType));
    parameters.setZoneId(zoneId);
    parameters.setPodId(podId);
    parameters.setClusterId(clusterId);
    parameters.setName(storagePoolName);
    parameters.setProviderName(providerName);
    parameters.setManaged(false);
    parameters.setCapacityBytes(capacityBytes);
    parameters.setUsedBytes(0);
    parameters.setCapacityIops(capacityIops);
    parameters.setHypervisorType(hypervisorType);
    parameters.setTags(tags);
    parameters.setDetails(details);
    String managementVip = SolidFireUtil.getManagementVip(url);
    int managementPort = SolidFireUtil.getManagementPort(url);
    details.put(SolidFireUtil.MANAGEMENT_VIP, managementVip);
    details.put(SolidFireUtil.MANAGEMENT_PORT, String.valueOf(managementPort));
    String clusterAdminUsername = SolidFireUtil.getValue(SolidFireUtil.CLUSTER_ADMIN_USERNAME, url);
    String clusterAdminPassword = SolidFireUtil.getValue(SolidFireUtil.CLUSTER_ADMIN_PASSWORD, url);
    details.put(SolidFireUtil.CLUSTER_ADMIN_USERNAME, clusterAdminUsername);
    details.put(SolidFireUtil.CLUSTER_ADMIN_PASSWORD, clusterAdminPassword);
    if (capacityBytes < SolidFireUtil.MIN_VOLUME_SIZE) {
        capacityBytes = SolidFireUtil.MIN_VOLUME_SIZE;
    }
    long lMinIops = 100;
    long lMaxIops = 15000;
    long lBurstIops = 15000;
    try {
        String minIops = SolidFireUtil.getValue(SolidFireUtil.MIN_IOPS, url);
        if (minIops != null && minIops.trim().length() > 0) {
            lMinIops = Long.parseLong(minIops);
        }
    } catch (Exception ex) {
        LOGGER.info("[ignored] error getting Min IOPS: " + ex.getLocalizedMessage());
    }
    try {
        String maxIops = SolidFireUtil.getValue(SolidFireUtil.MAX_IOPS, url);
        if (maxIops != null && maxIops.trim().length() > 0) {
            lMaxIops = Long.parseLong(maxIops);
        }
    } catch (Exception ex) {
        LOGGER.info("[ignored] error getting Max IOPS: " + ex.getLocalizedMessage());
    }
    try {
        String burstIops = SolidFireUtil.getValue(SolidFireUtil.BURST_IOPS, url);
        if (burstIops != null && burstIops.trim().length() > 0) {
            lBurstIops = Long.parseLong(burstIops);
        }
    } catch (Exception ex) {
        LOGGER.info("[ignored] error getting Burst IOPS: " + ex.getLocalizedMessage());
    }
    if (lMinIops > lMaxIops) {
        throw new CloudRuntimeException("The parameter '" + SolidFireUtil.MIN_IOPS + "' must be less than or equal to the parameter '" + SolidFireUtil.MAX_IOPS + "'.");
    }
    if (lMaxIops > lBurstIops) {
        throw new CloudRuntimeException("The parameter '" + SolidFireUtil.MAX_IOPS + "' must be less than or equal to the parameter '" + SolidFireUtil.BURST_IOPS + "'.");
    }
    if (lMinIops != capacityIops) {
        throw new CloudRuntimeException("The parameter '" + CAPACITY_IOPS + "' must be equal to the parameter '" + SolidFireUtil.MIN_IOPS + "'.");
    }
    if (lMinIops > SolidFireUtil.MAX_MIN_IOPS_PER_VOLUME) {
        throw new CloudRuntimeException("This volume's Min IOPS cannot exceed " + NumberFormat.getInstance().format(SolidFireUtil.MAX_MIN_IOPS_PER_VOLUME) + " IOPS.");
    }
    if (lMaxIops > SolidFireUtil.MAX_IOPS_PER_VOLUME) {
        throw new CloudRuntimeException("This volume's Max IOPS cannot exceed " + NumberFormat.getInstance().format(SolidFireUtil.MAX_IOPS_PER_VOLUME) + " IOPS.");
    }
    if (lBurstIops > SolidFireUtil.MAX_IOPS_PER_VOLUME) {
        throw new CloudRuntimeException("This volume's Burst IOPS cannot exceed " + NumberFormat.getInstance().format(SolidFireUtil.MAX_IOPS_PER_VOLUME) + " IOPS.");
    }
    details.put(SolidFireUtil.MIN_IOPS, String.valueOf(lMinIops));
    details.put(SolidFireUtil.MAX_IOPS, String.valueOf(lMaxIops));
    details.put(SolidFireUtil.BURST_IOPS, String.valueOf(lBurstIops));
    SolidFireUtil.SolidFireConnection sfConnection = new SolidFireUtil.SolidFireConnection(managementVip, managementPort, clusterAdminUsername, clusterAdminPassword);
    SolidFireCreateVolume sfCreateVolume = createSolidFireVolume(sfConnection, storagePoolName, capacityBytes, lMinIops, lMaxIops, lBurstIops);
    SolidFireUtil.SolidFireVolume sfVolume = sfCreateVolume.getVolume();
    String iqn = sfVolume.getIqn();
    details.put(SolidFireUtil.VOLUME_ID, String.valueOf(sfVolume.getId()));
    parameters.setUuid(iqn);
    if (HypervisorType.VMware.equals(hypervisorType)) {
        String datastore = iqn.replace("/", "_");
        String path = "/" + datacenter + "/" + datastore;
        parameters.setHost("VMFS datastore: " + path);
        parameters.setPort(0);
        parameters.setPath(path);
        details.put(SolidFireUtil.DATASTORE_NAME, datastore);
        details.put(SolidFireUtil.IQN, iqn);
        details.put(SolidFireUtil.STORAGE_VIP, storageVip);
        details.put(SolidFireUtil.STORAGE_PORT, String.valueOf(storagePort));
    } else {
        parameters.setHost(storageVip);
        parameters.setPort(storagePort);
        parameters.setPath(iqn);
    }
    ClusterVO cluster = clusterDao.findById(clusterId);
    GlobalLock lock = GlobalLock.getInternLock(cluster.getUuid());
    if (!lock.lock(SolidFireUtil.LOCK_TIME_IN_SECONDS)) {
        String errMsg = "Couldn't lock the DB on the following string: " + cluster.getUuid();
        LOGGER.debug(errMsg);
        throw new CloudRuntimeException(errMsg);
    }
    DataStore dataStore = null;
    try {
        // this adds a row in the cloud.storage_pool table for this SolidFire volume
        dataStore = primaryDataStoreHelper.createPrimaryDataStore(parameters);
        // now that we have a DataStore (we need the id from the DataStore instance), we can create a Volume Access Group, if need be, and
        // place the newly created volume in the Volume Access Group
        List<HostVO> hosts = hostDao.findByClusterId(clusterId);
        String clusterUuId = clusterDao.findById(clusterId).getUuid();
        SolidFireUtil.placeVolumeInVolumeAccessGroups(sfConnection, sfVolume.getId(), hosts, clusterUuId);
        SolidFireUtil.SolidFireAccount sfAccount = sfCreateVolume.getAccount();
        Account csAccount = CallContext.current().getCallingAccount();
        SolidFireUtil.updateCsDbWithSolidFireAccountInfo(csAccount.getId(), sfAccount, dataStore.getId(), accountDetailsDao);
    } catch (Exception ex) {
        if (dataStore != null) {
            primaryDataStoreDao.expunge(dataStore.getId());
        }
        throw new CloudRuntimeException(ex.getMessage());
    } finally {
        lock.unlock();
        lock.releaseRef();
    }
    return dataStore;
}
Also used : Account(com.cloud.user.Account) ClusterVO(com.cloud.dc.ClusterVO) CloudRuntimeException(com.cloud.utils.exception.CloudRuntimeException) HostVO(com.cloud.host.HostVO) StoragePoolHostVO(com.cloud.storage.StoragePoolHostVO) HypervisorType(com.cloud.hypervisor.Hypervisor.HypervisorType) GlobalLock(com.cloud.utils.db.GlobalLock) PrimaryDataStoreParameters(org.apache.cloudstack.engine.subsystem.api.storage.PrimaryDataStoreParameters) CloudRuntimeException(com.cloud.utils.exception.CloudRuntimeException) DataStore(org.apache.cloudstack.engine.subsystem.api.storage.DataStore) SolidFireUtil(org.apache.cloudstack.storage.datastore.util.SolidFireUtil) Map(java.util.Map) HashMap(java.util.HashMap)

Example 95 with HostVO

use of com.cloud.host.HostVO in project cloudstack by apache.

the class ScaleIOPrimaryDataStoreLifeCycle method attachCluster.

@Override
public boolean attachCluster(DataStore dataStore, ClusterScope scope) {
    final ClusterVO cluster = clusterDao.findById(scope.getScopeId());
    if (!isSupportedHypervisorType(cluster.getHypervisorType())) {
        throw new CloudRuntimeException("Unsupported hypervisor type: " + cluster.getHypervisorType().toString());
    }
    List<String> connectedSdcIps = null;
    try {
        ScaleIOGatewayClient client = ScaleIOGatewayClientConnectionPool.getInstance().getClient(dataStore.getId(), storagePoolDetailsDao);
        connectedSdcIps = client.listConnectedSdcIps();
    } catch (NoSuchAlgorithmException | KeyManagementException | URISyntaxException e) {
        LOGGER.error("Failed to create storage pool", e);
        throw new CloudRuntimeException("Failed to establish connection with PowerFlex Gateway to create storage pool");
    }
    if (connectedSdcIps == null || connectedSdcIps.isEmpty()) {
        LOGGER.debug("No connected SDCs found for the PowerFlex storage pool");
        throw new CloudRuntimeException("Failed to create storage pool as connected SDCs not found");
    }
    PrimaryDataStoreInfo primaryDataStoreInfo = (PrimaryDataStoreInfo) dataStore;
    List<HostVO> hostsInCluster = resourceManager.listAllUpAndEnabledHosts(Host.Type.Routing, primaryDataStoreInfo.getClusterId(), primaryDataStoreInfo.getPodId(), primaryDataStoreInfo.getDataCenterId());
    if (hostsInCluster.isEmpty()) {
        primaryDataStoreDao.expunge(primaryDataStoreInfo.getId());
        throw new CloudRuntimeException("No hosts are Up to associate a storage pool with in cluster: " + primaryDataStoreInfo.getClusterId());
    }
    LOGGER.debug("Attaching the pool to each of the hosts in the cluster: " + primaryDataStoreInfo.getClusterId());
    List<HostVO> poolHosts = new ArrayList<HostVO>();
    for (HostVO host : hostsInCluster) {
        try {
            if (connectedSdcIps.contains(host.getPrivateIpAddress())) {
                storageMgr.connectHostToSharedPool(host.getId(), primaryDataStoreInfo.getId());
                poolHosts.add(host);
            }
        } catch (Exception e) {
            LOGGER.warn("Unable to establish a connection between " + host + " and " + primaryDataStoreInfo, e);
        }
    }
    if (poolHosts.isEmpty()) {
        LOGGER.warn("No host can access storage pool '" + primaryDataStoreInfo + "' on cluster '" + primaryDataStoreInfo.getClusterId() + "'.");
        primaryDataStoreDao.expunge(primaryDataStoreInfo.getId());
        throw new CloudRuntimeException("Failed to create storage pool in the cluster: " + primaryDataStoreInfo.getClusterId() + " as it is not accessible to hosts");
    }
    dataStoreHelper.attachCluster(dataStore);
    return true;
}
Also used : PrimaryDataStoreInfo(org.apache.cloudstack.engine.subsystem.api.storage.PrimaryDataStoreInfo) ClusterVO(com.cloud.dc.ClusterVO) ArrayList(java.util.ArrayList) NoSuchAlgorithmException(java.security.NoSuchAlgorithmException) URISyntaxException(java.net.URISyntaxException) KeyManagementException(java.security.KeyManagementException) HostVO(com.cloud.host.HostVO) StoragePoolHostVO(com.cloud.storage.StoragePoolHostVO) URISyntaxException(java.net.URISyntaxException) InvalidParameterValueException(com.cloud.exception.InvalidParameterValueException) KeyManagementException(java.security.KeyManagementException) NoSuchAlgorithmException(java.security.NoSuchAlgorithmException) UnsupportedEncodingException(java.io.UnsupportedEncodingException) CloudRuntimeException(com.cloud.utils.exception.CloudRuntimeException) CloudRuntimeException(com.cloud.utils.exception.CloudRuntimeException) ScaleIOGatewayClient(org.apache.cloudstack.storage.datastore.client.ScaleIOGatewayClient)

Aggregations

HostVO (com.cloud.host.HostVO)631 CloudRuntimeException (com.cloud.utils.exception.CloudRuntimeException)223 ArrayList (java.util.ArrayList)178 Answer (com.cloud.agent.api.Answer)105 AgentUnavailableException (com.cloud.exception.AgentUnavailableException)95 StoragePoolHostVO (com.cloud.storage.StoragePoolHostVO)91 Test (org.junit.Test)81 InvalidParameterValueException (com.cloud.exception.InvalidParameterValueException)75 OperationTimedoutException (com.cloud.exception.OperationTimedoutException)74 ClusterVO (com.cloud.dc.ClusterVO)72 Account (com.cloud.user.Account)67 HashMap (java.util.HashMap)67 VMInstanceVO (com.cloud.vm.VMInstanceVO)60 ConfigurationException (javax.naming.ConfigurationException)60 NoTransitionException (com.cloud.utils.fsm.NoTransitionException)58 DataCenterVO (com.cloud.dc.DataCenterVO)50 PhysicalNetworkVO (com.cloud.network.dao.PhysicalNetworkVO)48 HostPodVO (com.cloud.dc.HostPodVO)47 DB (com.cloud.utils.db.DB)47 StoragePoolVO (org.apache.cloudstack.storage.datastore.db.StoragePoolVO)46