use of org.apache.cloudstack.storage.datastore.api.StoragePoolStatistics in project cloudstack by apache.
the class ScaleIOPrimaryDataStoreLifeCycle method initialize.
@SuppressWarnings("unchecked")
@Override
public DataStore initialize(Map<String, Object> dsInfos) {
String url = (String) dsInfos.get("url");
Long zoneId = (Long) dsInfos.get("zoneId");
Long podId = (Long) dsInfos.get("podId");
Long clusterId = (Long) dsInfos.get("clusterId");
String dataStoreName = (String) dsInfos.get("name");
String providerName = (String) dsInfos.get("providerName");
Long capacityBytes = (Long) dsInfos.get("capacityBytes");
Long capacityIops = (Long) dsInfos.get("capacityIops");
String tags = (String) dsInfos.get("tags");
Map<String, String> details = (Map<String, String>) dsInfos.get("details");
if (zoneId == null) {
throw new CloudRuntimeException("Zone Id must be specified.");
}
PrimaryDataStoreParameters parameters = new PrimaryDataStoreParameters();
if (clusterId != null) {
// Primary datastore is cluster-wide, check and set the podId and clusterId parameters
if (podId == null) {
throw new CloudRuntimeException("Pod Id must also be specified when the Cluster Id is specified for Cluster-wide primary storage.");
}
Hypervisor.HypervisorType hypervisorType = getHypervisorTypeForCluster(clusterId);
if (!isSupportedHypervisorType(hypervisorType)) {
throw new CloudRuntimeException("Unsupported hypervisor type: " + hypervisorType.toString());
}
parameters.setPodId(podId);
parameters.setClusterId(clusterId);
} else if (podId != null) {
throw new CloudRuntimeException("Cluster Id must also be specified when the Pod Id is specified for Cluster-wide primary storage.");
}
URI uri = null;
try {
uri = new URI(UriUtils.encodeURIComponent(url));
if (uri.getScheme() == null || !uri.getScheme().equalsIgnoreCase("powerflex")) {
throw new InvalidParameterValueException("scheme is invalid for url: " + url + ", should be powerflex://username:password@gatewayhost/pool");
}
} catch (Exception ignored) {
throw new InvalidParameterValueException(url + " is not a valid uri");
}
String storagePoolName = null;
try {
storagePoolName = URLDecoder.decode(uri.getPath(), "UTF-8");
} catch (UnsupportedEncodingException e) {
LOGGER.error("[ignored] we are on a platform not supporting \"UTF-8\"!?!", e);
}
if (storagePoolName == null) {
// if decoding fails, use getPath() anyway
storagePoolName = uri.getPath();
}
storagePoolName = storagePoolName.replaceFirst("/", "");
final String storageHost = uri.getHost();
final int port = uri.getPort();
String gatewayApiURL = null;
if (port == -1) {
gatewayApiURL = String.format("https://%s/api", storageHost);
} else {
gatewayApiURL = String.format("https://%s:%d/api", storageHost, port);
}
final String userInfo = uri.getUserInfo();
final String gatewayUsername = userInfo.split(":")[0];
final String gatewayPassword = userInfo.split(":")[1];
List<StoragePoolVO> storagePoolVO = primaryDataStoreDao.findPoolsByProvider(ScaleIOUtil.PROVIDER_NAME);
if (CollectionUtils.isNotEmpty(storagePoolVO)) {
for (StoragePoolVO poolVO : storagePoolVO) {
Map<String, String> poolDetails = primaryDataStoreDao.getDetails(poolVO.getId());
String poolUrl = poolDetails.get(ScaleIOGatewayClient.GATEWAY_API_ENDPOINT);
String poolName = poolDetails.get(ScaleIOGatewayClient.STORAGE_POOL_NAME);
if (gatewayApiURL.equals(poolUrl) && storagePoolName.equals(poolName)) {
throw new IllegalArgumentException("PowerFlex storage pool: " + storagePoolName + " already exists, please specify other storage pool.");
}
}
}
final org.apache.cloudstack.storage.datastore.api.StoragePool scaleIOPool = this.findStoragePool(gatewayApiURL, gatewayUsername, gatewayPassword, storagePoolName);
parameters.setZoneId(zoneId);
parameters.setName(dataStoreName);
parameters.setProviderName(providerName);
parameters.setManaged(true);
parameters.setHost(storageHost);
parameters.setPath(scaleIOPool.getId());
parameters.setUserInfo(userInfo);
parameters.setType(Storage.StoragePoolType.PowerFlex);
parameters.setHypervisorType(Hypervisor.HypervisorType.KVM);
parameters.setUuid(UUID.randomUUID().toString());
parameters.setTags(tags);
StoragePoolStatistics poolStatistics = scaleIOPool.getStatistics();
if (poolStatistics != null) {
if (capacityBytes == null) {
parameters.setCapacityBytes(poolStatistics.getNetMaxCapacityInBytes());
}
parameters.setUsedBytes(poolStatistics.getNetUsedCapacityInBytes());
}
if (capacityBytes != null) {
parameters.setCapacityBytes(capacityBytes);
}
if (capacityIops != null) {
parameters.setCapacityIops(capacityIops);
}
details.put(ScaleIOGatewayClient.GATEWAY_API_ENDPOINT, gatewayApiURL);
details.put(ScaleIOGatewayClient.GATEWAY_API_USERNAME, DBEncryptionUtil.encrypt(gatewayUsername));
details.put(ScaleIOGatewayClient.GATEWAY_API_PASSWORD, DBEncryptionUtil.encrypt(gatewayPassword));
details.put(ScaleIOGatewayClient.STORAGE_POOL_NAME, storagePoolName);
details.put(ScaleIOGatewayClient.STORAGE_POOL_SYSTEM_ID, scaleIOPool.getSystemId());
parameters.setDetails(details);
return dataStoreHelper.createPrimaryDataStore(parameters);
}
use of org.apache.cloudstack.storage.datastore.api.StoragePoolStatistics in project cloudstack by apache.
the class ScaleIOPrimaryDataStoreDriver method getStorageStats.
@Override
public Pair<Long, Long> getStorageStats(StoragePool storagePool) {
Preconditions.checkArgument(storagePool != null, "storagePool cannot be null");
try {
final ScaleIOGatewayClient client = getScaleIOClient(storagePool.getId());
StoragePoolStatistics poolStatistics = client.getStoragePoolStatistics(storagePool.getPath());
if (poolStatistics != null && poolStatistics.getNetMaxCapacityInBytes() != null && poolStatistics.getNetUsedCapacityInBytes() != null) {
Long capacityBytes = poolStatistics.getNetMaxCapacityInBytes();
Long usedBytes = poolStatistics.getNetUsedCapacityInBytes();
return new Pair<Long, Long>(capacityBytes, usedBytes);
}
} catch (Exception e) {
String errMsg = "Unable to get storage stats for the pool: " + storagePool.getId() + " due to " + e.getMessage();
LOGGER.warn(errMsg);
throw new CloudRuntimeException(errMsg, e);
}
return null;
}
Aggregations