diff --git a/api/src/main/java/org/apache/cloudstack/api/response/StoragePoolResponse.java b/api/src/main/java/org/apache/cloudstack/api/response/StoragePoolResponse.java index abc674ff0f9..7867c685bba 100644 --- a/api/src/main/java/org/apache/cloudstack/api/response/StoragePoolResponse.java +++ b/api/src/main/java/org/apache/cloudstack/api/response/StoragePoolResponse.java @@ -77,19 +77,24 @@ public class StoragePoolResponse extends BaseResponseWithAnnotations { @Param(description = "the name of the cluster for the storage pool") private String clusterName; + @SerializedName(ApiConstants.CAPACITY_BYTES) + @Param(description = "bytes CloudStack can provision from this storage pool", since = "4.22.0") + private Long capacityBytes; + + @Deprecated(since = "4.22.0") @SerializedName("disksizetotal") @Param(description = "the total disk size of the storage pool") private Long diskSizeTotal; @SerializedName("disksizeallocated") - @Param(description = "the host's currently allocated disk size") + @Param(description = "the pool's currently allocated disk size") private Long diskSizeAllocated; @SerializedName("disksizeused") - @Param(description = "the host's currently used disk size") + @Param(description = "the pool's currently used disk size") private Long diskSizeUsed; - @SerializedName("capacityiops") + @SerializedName(ApiConstants.CAPACITY_IOPS) @Param(description = "IOPS CloudStack can provision from this storage pool") private Long capacityIops; @@ -288,6 +293,14 @@ public class StoragePoolResponse extends BaseResponseWithAnnotations { this.clusterName = clusterName; } + public Long getCapacityBytes() { + return capacityBytes; + } + + public void setCapacityBytes(Long capacityBytes) { + this.capacityBytes = capacityBytes; + } + public Long getDiskSizeTotal() { return diskSizeTotal; } diff --git a/engine/api/src/main/java/org/apache/cloudstack/engine/subsystem/api/storage/PrimaryDataStoreLifeCycle.java b/engine/api/src/main/java/org/apache/cloudstack/engine/subsystem/api/storage/PrimaryDataStoreLifeCycle.java index 54f3c63f8d7..1acaccf09df 100644 --- a/engine/api/src/main/java/org/apache/cloudstack/engine/subsystem/api/storage/PrimaryDataStoreLifeCycle.java +++ b/engine/api/src/main/java/org/apache/cloudstack/engine/subsystem/api/storage/PrimaryDataStoreLifeCycle.java @@ -24,8 +24,8 @@ import com.cloud.hypervisor.Hypervisor; import com.cloud.storage.StoragePool; public interface PrimaryDataStoreLifeCycle extends DataStoreLifeCycle { - public static final String CAPACITY_BYTES = "capacityBytes"; - public static final String CAPACITY_IOPS = "capacityIops"; + String CAPACITY_BYTES = "capacityBytes"; + String CAPACITY_IOPS = "capacityIops"; void updateStoragePool(StoragePool storagePool, Map details); void enableStoragePool(DataStore store); diff --git a/engine/schema/src/main/java/org/apache/cloudstack/storage/datastore/db/PrimaryDataStoreDaoImpl.java b/engine/schema/src/main/java/org/apache/cloudstack/storage/datastore/db/PrimaryDataStoreDaoImpl.java index 8b230d03154..6da02d7716b 100644 --- a/engine/schema/src/main/java/org/apache/cloudstack/storage/datastore/db/PrimaryDataStoreDaoImpl.java +++ b/engine/schema/src/main/java/org/apache/cloudstack/storage/datastore/db/PrimaryDataStoreDaoImpl.java @@ -320,6 +320,9 @@ public class PrimaryDataStoreDaoImpl extends GenericDaoBase pool = super.persist(pool); if (details != null) { for (Map.Entry detail : details.entrySet()) { + if (detail.getKey().toLowerCase().contains("password") || detail.getKey().toLowerCase().contains("token")) { + displayDetails = false; + } StoragePoolDetailVO vo = new StoragePoolDetailVO(pool.getId(), detail.getKey(), detail.getValue(), displayDetails); _detailsDao.persist(vo); } diff --git a/engine/schema/src/main/resources/META-INF/db/schema-42100to42200.sql b/engine/schema/src/main/resources/META-INF/db/schema-42100to42200.sql index 62ae10b7cc9..d6087ed9a5f 100644 --- a/engine/schema/src/main/resources/META-INF/db/schema-42100to42200.sql +++ b/engine/schema/src/main/resources/META-INF/db/schema-42100to42200.sql @@ -34,3 +34,7 @@ UPDATE `cloud`.`ldap_configuration` SET uuid = UUID() WHERE uuid IS NULL OR uuid -- Add the column cross_zone_instance_creation to cloud.backup_repository. if enabled it means that new Instance can be created on all Zones from Backups on this Repository. CALL `cloud`.`IDEMPOTENT_ADD_COLUMN`('cloud.backup_repository', 'cross_zone_instance_creation', 'TINYINT(1) DEFAULT NULL COMMENT ''Backup Repository can be used for disaster recovery on another zone'''); + +-- Updated display to false for password/token detail of the storage pool details +UPDATE `cloud`.`storage_pool_details` SET display = 0 WHERE name LIKE '%password%'; +UPDATE `cloud`.`storage_pool_details` SET display = 0 WHERE name LIKE '%token%'; diff --git a/engine/storage/src/main/java/org/apache/cloudstack/storage/volume/datastore/PrimaryDataStoreHelper.java b/engine/storage/src/main/java/org/apache/cloudstack/storage/volume/datastore/PrimaryDataStoreHelper.java index 32a321c2619..d17dae132a0 100644 --- a/engine/storage/src/main/java/org/apache/cloudstack/storage/volume/datastore/PrimaryDataStoreHelper.java +++ b/engine/storage/src/main/java/org/apache/cloudstack/storage/volume/datastore/PrimaryDataStoreHelper.java @@ -85,8 +85,7 @@ public class PrimaryDataStoreHelper { DataStoreProviderManager dataStoreProviderMgr; public DataStore createPrimaryDataStore(PrimaryDataStoreParameters params) { - if(params == null) - { + if (params == null) { throw new InvalidParameterValueException("createPrimaryDataStore: Input params is null, please check"); } StoragePoolVO dataStoreVO = dataStoreDao.findPoolByUUID(params.getUuid()); diff --git a/plugins/storage/volume/default/src/main/java/org/apache/cloudstack/storage/datastore/lifecycle/CloudStackPrimaryDataStoreLifeCycleImpl.java b/plugins/storage/volume/default/src/main/java/org/apache/cloudstack/storage/datastore/lifecycle/CloudStackPrimaryDataStoreLifeCycleImpl.java index 2b44b0b8c53..a98ea8eea3c 100644 --- a/plugins/storage/volume/default/src/main/java/org/apache/cloudstack/storage/datastore/lifecycle/CloudStackPrimaryDataStoreLifeCycleImpl.java +++ b/plugins/storage/volume/default/src/main/java/org/apache/cloudstack/storage/datastore/lifecycle/CloudStackPrimaryDataStoreLifeCycleImpl.java @@ -18,6 +18,7 @@ */ package org.apache.cloudstack.storage.datastore.lifecycle; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; @@ -139,7 +140,6 @@ public class CloudStackPrimaryDataStoreLifeCycleImpl extends BasePrimaryDataStor Long clusterId = (Long)dsInfos.get("clusterId"); Long podId = (Long)dsInfos.get("podId"); Long zoneId = (Long)dsInfos.get("zoneId"); - String url = (String)dsInfos.get("url"); String providerName = (String)dsInfos.get("providerName"); HypervisorType hypervisorType = (HypervisorType)dsInfos.get("hypervisorType"); if (clusterId != null && podId == null) { @@ -148,19 +148,43 @@ public class CloudStackPrimaryDataStoreLifeCycleImpl extends BasePrimaryDataStor PrimaryDataStoreParameters parameters = new PrimaryDataStoreParameters(); - String tags = (String)dsInfos.get("tags"); - String storageAccessGroups = (String)dsInfos.get(ApiConstants.STORAGE_ACCESS_GROUPS); Map details = (Map)dsInfos.get("details"); + if (dsInfos.get("capacityBytes") != null) { + Long capacityBytes = (Long)dsInfos.get("capacityBytes"); + if (capacityBytes <= 0) { + throw new IllegalArgumentException("'capacityBytes' must be greater than 0."); + } + if (details == null) { + details = new HashMap<>(); + } + details.put(PrimaryDataStoreLifeCycle.CAPACITY_BYTES, String.valueOf(capacityBytes)); + parameters.setCapacityBytes(capacityBytes); + } + + if (dsInfos.get("capacityIops") != null) { + Long capacityIops = (Long)dsInfos.get("capacityIops"); + if (capacityIops <= 0) { + throw new IllegalArgumentException("'capacityIops' must be greater than 0."); + } + if (details == null) { + details = new HashMap<>(); + } + details.put(PrimaryDataStoreLifeCycle.CAPACITY_IOPS, String.valueOf(capacityIops)); + parameters.setCapacityIops(capacityIops); + } - parameters.setTags(tags); - parameters.setStorageAccessGroups(storageAccessGroups); - parameters.setIsTagARule((Boolean)dsInfos.get("isTagARule")); parameters.setDetails(details); + String tags = (String)dsInfos.get("tags"); + parameters.setTags(tags); + parameters.setIsTagARule((Boolean)dsInfos.get("isTagARule")); + + String storageAccessGroups = (String)dsInfos.get(ApiConstants.STORAGE_ACCESS_GROUPS); + parameters.setStorageAccessGroups(storageAccessGroups); + String scheme = dsInfos.get("scheme").toString(); String storageHost = dsInfos.get("host").toString(); String hostPath = dsInfos.get("hostPath").toString(); - String uri = String.format("%s://%s%s", scheme, storageHost, hostPath); Object localStorage = dsInfos.get("localStorage"); if (localStorage != null) { diff --git a/server/src/main/java/com/cloud/api/query/dao/StoragePoolJoinDaoImpl.java b/server/src/main/java/com/cloud/api/query/dao/StoragePoolJoinDaoImpl.java index ce38727e42e..8bfce47b120 100644 --- a/server/src/main/java/com/cloud/api/query/dao/StoragePoolJoinDaoImpl.java +++ b/server/src/main/java/com/cloud/api/query/dao/StoragePoolJoinDaoImpl.java @@ -40,6 +40,7 @@ import org.springframework.stereotype.Component; import com.cloud.api.ApiDBUtils; import com.cloud.api.query.vo.StoragePoolJoinVO; import com.cloud.capacity.CapacityManager; +import com.cloud.server.ResourceTag; import com.cloud.storage.DataStoreRole; import com.cloud.storage.ScopeType; import com.cloud.storage.Storage; @@ -152,6 +153,7 @@ public class StoragePoolJoinDaoImpl extends GenericDaoBase details = ApiDBUtils.getResourceDetails(pool.getId(), ResourceTag.ResourceObjectType.Storage); + poolResponse.setDetails(details); // set async job if (pool.getJobId() != null) { @@ -252,6 +256,7 @@ public class StoragePoolJoinDaoImpl extends GenericDaoBase