Skip to end of metadata
Go to start of metadata

You are viewing an old version of this page. View the current version.

Compare with Current View Page History

« Previous Version 6 Next »

  • Right now we can not build and run from source so make sure to run test based on RPM
  • Install daos rpm using the command :
sudo yum install -y daos-client daos-tests daos daos-server daos-debuginfo
  • Test code will be under so do cd /usr/lib/daos/TESTING/ftest/
  • Change the code in the below file


--- util/server_utils_params.py 2021-07-01 21:07:41.391147255 +0000
+++ /home/samirrav/daos_CentOS8/src/tests/ftest/util/server_utils_params.py     2021-07-01 01:32:40.367654887 +0000
@@ -106,7 +106,6 @@

         self.provider = BasicParameter(None, default_provider)
         self.hyperthreads = BasicParameter(None, False)
-        self.disable_vmd = BasicParameter(None, True)
         self.socket_dir = BasicParameter(None, "/var/run/daos_server")
         self.nr_hugepages = BasicParameter(None, 4096)
         self.control_log_mask = BasicParameter(None, "DEBUG")

--- util/command_utils_base.py  2021-07-01 20:52:19.893442825 +0000
+++ /home/samirrav/daos_CentOS8/src/tests/ftest/util/command_utils_base.py      2021-07-01 01:32:40.320654151 +0000
@@ -415,7 +415,7 @@
             yaml_data = {}
         for name in self.get_param_names():
             value = getattr(self, name).value
-            if value is not None:
+            if value is not None and value is not False:
                 yaml_data[name] = value

         return yaml_data if self.title is None else {self.title: yaml_data}
  • Update the test nvme_health.yaml file
--- /home/samirrav/daos_CentOS8/src/tests/ftest/nvme/nvme_health.yaml   2021-07-01 01:32:40.148651459 +0000
+++ nvme/nvme_health.yaml       2021-07-01 22:18:24.236068631 +0000
@@ -1,10 +1,10 @@
 hosts:
   test_servers:
     - server-A
-    - server-B
   test_clients:
     - client-C
 timeout: 900
+disable_vmd: false
 server_config:
   engines_per_host: 2
   name: daos_server
@@ -12,24 +12,24 @@
     0:
       pinned_numa_node: 0
       nr_xs_helpers: 1
-      fabric_iface: ib0
-      fabric_iface_port: 31317
+      fabric_iface: eth0
+      fabric_iface_port: 31416
       log_file: daos_server0.log
       bdev_class: nvme
-      bdev_list: ["0000:81:00.0"]
-      scm_class: dcpm
-      scm_list: ["/dev/pmem0"]
+      bdev_list: ["0000:5d:05.5"]
+      scm_class: ram
+      scm_size: 32
       scm_mount: /mnt/daos0
     1:
       pinned_numa_node: 1
       nr_xs_helpers: 1
-      fabric_iface: ib1
-      fabric_iface_port: 31417
+      fabric_iface: eth0
+      fabric_iface_port: 31517
       log_file: daos_server1.log
       bdev_class: nvme
-      bdev_list: ["0000:da:00.0"]
-      scm_class: dcpm
-      scm_list: ["/dev/pmem1"]
+      bdev_list: ["0000:85:05.5"]
+      scm_class: ram
+      scm_size: 32
       scm_mount: /mnt/daos1
   transport_config:
     allow_insecure: True
@@ -45,3 +45,4 @@
     control_method: dmg
     number_of_pools: 40
     pool_used_percentage: 75

Execute test Avocado launch command and run nvme_health

  • ./launch.py -c -tc=wolf-[157] -ts=wolf-[157]  nvme_health




  • No labels