From 50258bf53a1dd73537e05a104518960c0c6b8030 Mon Sep 17 00:00:00 2001 From: Neetha John Date: Wed, 2 Dec 2020 13:02:25 -0800 Subject: [PATCH 1/4] Retreive headroom pool watermark in saitests Signed-off-by: Neetha John --- tests/saitests/switch.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/tests/saitests/switch.py b/tests/saitests/switch.py index 5c61342b3ca..e844378e5d7 100644 --- a/tests/saitests/switch.py +++ b/tests/saitests/switch.py @@ -758,6 +758,17 @@ def sai_thrift_read_buffer_pool_watermark(client, buffer_pool_id): return None return wm_vals[0] +def sai_thrift_read_headroom_pool_watermark(client, buffer_pool_id): + buffer_pool_wm_ids = [ + SAI_BUFFER_POOL_STAT_XOFF_ROOM_WATERMARK_BYTES + ] + + wm_vals = client.sai_thrift_get_buffer_pool_stats(buffer_pool_id, buffer_pool_wm_ids) + if not wm_vals: + print >> sys.stderr, "sai_thrift_read_headroom_pool_watermark returns empty list" + return None + return wm_vals[0] + def sai_thrift_create_vlan_member(client, vlan_id, port_id, tagging_mode): vlan_member_attr_list = [] attribute_value = sai_thrift_attribute_value_t(s32=vlan_id) From f0b33a86b71f6b9a295a6d52c080fe52d890ab82 Mon Sep 17 00:00:00 2001 From: Neetha John Date: Wed, 2 Dec 2020 13:03:07 -0800 Subject: [PATCH 2/4] Introduce vars to support headroom pool wm on Th and Th2 Signed-off-by: Neetha John --- tests/qos/files/qos.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/qos/files/qos.yml b/tests/qos/files/qos.yml index 5e4475ef2bb..d7e08bab0a7 100644 --- a/tests/qos/files/qos.yml +++ b/tests/qos/files/qos.yml @@ -615,6 +615,8 @@ qos_params: pkts_num_trig_egr_drp: 9887 pkts_num_fill_egr_min: 8 cell_size: 208 + hdrm_pool_wm_multiplier: 4 + cell_size: 208 th2: 40000_300m: pkts_num_leak_out: 0 @@ -811,3 +813,5 @@ qos_params: pkts_num_trig_egr_drp: 10692 pkts_num_fill_egr_min: 16 cell_size: 208 + hdrm_pool_wm_multiplier: 4 + cell_size: 208 From 65eef6eb33fca296dc61eccf629a6575dac648a7 Mon Sep 17 00:00:00 2001 From: Neetha John Date: Wed, 2 Dec 2020 13:04:00 -0800 Subject: [PATCH 3/4] Headroom pool watermark tests on Th and Th2 Signed-off-by: Neetha John --- tests/qos/qos_sai_base.py | 11 +++++-- tests/qos/test_qos_sai.py | 53 +++++++++++++++++++++++++++++++++ tests/saitests/sai_qos_tests.py | 44 +++++++++++++++++++++++++++ 3 files changed, 105 insertions(+), 3 deletions(-) diff --git a/tests/qos/qos_sai_base.py b/tests/qos/qos_sai_base.py index dac0b61c690..f6d2d08ddab 100644 --- a/tests/qos/qos_sai_base.py +++ b/tests/qos/qos_sai_base.py @@ -72,6 +72,13 @@ def __computeBufferThreshold(self, duthost, bufferProfile): bufferScale /= (bufferScale + 1) bufferProfile.update({"static_th": int(bufferProfile["size"]) + int(bufferScale * bufferSize)}) + poolHeadroom = self.__runRedisCommandOrAssert( + duthost, + argv = ["redis-cli", "-n", "4", "HGET", pool, "xoff"] + ) + if poolHeadroom: + bufferProfile.update({"poolXoff": int(poolHeadroom[0])}) + def __updateVoidRoidParams(self, duthost, bufferProfile): """ Updates buffer profile with VOID/ROID params @@ -142,9 +149,7 @@ def __getBufferProfile(self, request, duthost, table, port, priorityGroup): pytest_assert("xon" in bufferProfile.keys() and "xoff" in bufferProfile.keys(), "Could not find xon and/or xoff values for profile '{0}'".format(bufferProfileName)) - disableTest = request.config.getoption("--disable_test") - if not disableTest: - self.__updateVoidRoidParams(duthost, bufferProfile) + self.__updateVoidRoidParams(duthost, bufferProfile) return bufferProfile diff --git a/tests/qos/test_qos_sai.py b/tests/qos/test_qos_sai.py index ffa8098cfaa..eb992c6c09a 100644 --- a/tests/qos/test_qos_sai.py +++ b/tests/qos/test_qos_sai.py @@ -212,6 +212,59 @@ def testQosSaiHeadroomPoolSize(self, ptfhost, dutTestParams, dutConfig, dutQosCo self.runPtfTest(ptfhost, testCase="sai_qos_tests.HdrmPoolSizeTest", testParams=testParams) + def testQosSaiHeadroomPoolWatermark(self, duthosts, rand_one_dut_hostname, ptfhost, dutTestParams, dutConfig, dutQosConfig, ingressLosslessProfile, resetWatermark): + """ + Test QoS SAI Headroom pool watermark + + Args: + duthosts (AnsibleHost): Dut hosts + rand_one_dut_hostname (AnsibleHost): select one of the duts in multi dut testbed + ptfhost (AnsibleHost): Packet Test Framework (PTF) + dutTestParams (Fixture, dict): DUT host test params + dutConfig (Fixture, dict): Map of DUT config containing dut interfaces, test port IDs, test port IPs, + and test ports + dutQosConfig (Fixture, dict): Map containing DUT host QoS configuration + ingressLosslessProfile (Fxiture): Map of egress lossless buffer profile attributes + resetWatermark (Fixture): reset watermarks + + Returns: + None + + Raises: + RunAnsibleModuleFail if ptf test fails + """ + duthost = duthosts[rand_one_dut_hostname] + cmd_output = duthost.shell("show headroom-pool watermark", module_ignore_errors=True) + if dutTestParams["hwsku"] not in self.SUPPORTED_HEADROOM_SKUS or cmd_output['rc'] != 0: + pytest.skip("Headroom pool watermark is not supported") + + portSpeedCableLength = dutQosConfig["portSpeedCableLength"] + qosConfig = dutQosConfig["param"][portSpeedCableLength] + testPortIps = dutConfig["testPortIps"] + + testParams = dict() + testParams.update(dutTestParams["basicParams"]) + testParams.update({ + "testbed_type": dutTestParams["topo"], + "dscps": qosConfig["hdrm_pool_size"]["dscps"], + "ecn": qosConfig["hdrm_pool_size"]["ecn"], + "pgs": qosConfig["hdrm_pool_size"]["pgs"], + "src_port_ids": qosConfig["hdrm_pool_size"]["src_port_ids"], + "src_port_ips": [testPortIps[port] for port in qosConfig["hdrm_pool_size"]["src_port_ids"]], + "dst_port_id": qosConfig["hdrm_pool_size"]["dst_port_id"], + "dst_port_ip": testPortIps[qosConfig["hdrm_pool_size"]["dst_port_id"]], + "pgs_num": qosConfig["hdrm_pool_size"]["pgs_num"], + "pkts_num_leak_out": qosConfig["pkts_num_leak_out"], + "pkts_num_trig_pfc": qosConfig["hdrm_pool_size"]["pkts_num_trig_pfc"], + "pkts_num_hdrm_full": qosConfig["hdrm_pool_size"]["pkts_num_hdrm_full"], + "pkts_num_hdrm_partial": qosConfig["hdrm_pool_size"]["pkts_num_hdrm_partial"], + "hdrm_pool_wm_multiplier": dutQosConfig["param"]["hdrm_pool_wm_multiplier"], + "cell_size": dutQosConfig["param"]["cell_size"], + "buf_pool_roid": ingressLosslessProfile["bufferPoolRoid"], + "max_headroom": ingressLosslessProfile["poolXoff"] + }) + self.runPtfTest(ptfhost, testCase="sai_qos_tests.HdrmPoolSizeTest", testParams=testParams) + @pytest.mark.parametrize("bufPool", ["wm_buf_pool_lossless", "wm_buf_pool_lossy"]) def testQosSaiBufferPoolWatermark(self, request, bufPool, ptfhost, dutTestParams, dutConfig, dutQosConfig, ingressLosslessProfile, egressLossyProfile, resetWatermark): """ diff --git a/tests/saitests/sai_qos_tests.py b/tests/saitests/sai_qos_tests.py index 0e0fafd3075..e1c2a5eeaa2 100644 --- a/tests/saitests/sai_qos_tests.py +++ b/tests/saitests/sai_qos_tests.py @@ -26,6 +26,7 @@ sai_thrift_read_port_watermarks, sai_thrift_read_pg_counters, sai_thrift_read_buffer_pool_watermark, + sai_thrift_read_headroom_pool_watermark, sai_thrift_port_tx_disable, sai_thrift_port_tx_enable) from switch_sai_thrift.ttypes import (sai_thrift_attribute_value_t, @@ -876,6 +877,18 @@ def setUp(self): print >> sys.stderr, ("pkts num: leak_out: %d, trig_pfc: %d, hdrm_full: %d, hdrm_partial: %d, pkt_size %d" % (self.pkts_num_leak_out, self.pkts_num_trig_pfc, self.pkts_num_hdrm_full, self.pkts_num_hdrm_partial, self.pkt_size)) elif self.pkts_num_trig_pfc_shp: print >> sys.stderr, ("pkts num: leak_out: {}, trig_pfc: {}, hdrm_full: {}, hdrm_partial: {}, pkt_size {}".format(self.pkts_num_leak_out, self.pkts_num_trig_pfc_shp, self.pkts_num_hdrm_full, self.pkts_num_hdrm_partial, self.pkt_size)) + + # used only for headroom pool watermark + if all(key in self.test_params for key in ['hdrm_pool_wm_multiplier', 'buf_pool_roid', 'cell_size', 'max_headroom']): + self.cell_size = int(self.test_params['cell_size']) + self.wm_multiplier = self.test_params['hdrm_pool_wm_multiplier'] + print >> sys.stderr, "Wm multiplier: %d buf_pool_roid: %s" % (self.wm_multiplier, self.test_params['buf_pool_roid']) + self.buf_pool_roid = int(self.test_params['buf_pool_roid'], 0) + print >> sys.stderr, "buf_pool_roid: 0x%lx" % (self.buf_pool_roid) + self.max_headroom = int(self.test_params['max_headroom']) + else: + self.wm_multiplier = None + sys.stderr.flush() self.dst_port_mac = self.dataplane.get_mac(0, self.dst_port_id) @@ -997,6 +1010,15 @@ def runTest(self): print >> sys.stderr, "PFC triggered" sys.stderr.flush() + upper_bound = 2 + if self.wm_multiplier: + hdrm_pool_wm = sai_thrift_read_headroom_pool_watermark(self.client, self.buf_pool_roid) + print >> sys.stderr, "Actual headroom pool watermark value to start: %d" % hdrm_pool_wm + assert (hdrm_pool_wm <= (upper_bound * self.cell_size * self.wm_multiplier)) + + expected_wm = 0 + wm_pkt_num = 0 + upper_bound_wm = 0 # send packets to all pgs to fill the headroom pool for i in range(0, self.pgs_num): # Prepare TCP packet data @@ -1021,6 +1043,18 @@ def runTest(self): assert(recv_counters[INGRESS_DROP] == recv_counters_bases[sidx_dscp_pg_tuples[i][0]][INGRESS_DROP]) assert(recv_counters[INGRESS_PORT_BUFFER_DROP] == recv_counters_bases[sidx_dscp_pg_tuples[i][0]][INGRESS_PORT_BUFFER_DROP]) + if self.wm_multiplier: + wm_pkt_num += (self.pkts_num_hdrm_full if i != self.pgs_num - 1 else self.pkts_num_hdrm_partial) + hdrm_pool_wm = sai_thrift_read_headroom_pool_watermark(self.client, self.buf_pool_roid) + expected_wm = wm_pkt_num * self.cell_size * self.wm_multiplier + upper_bound_wm = expected_wm + (upper_bound * self.cell_size * self.wm_multiplier) + if upper_bound_wm > self.max_headroom: + upper_bound_wm = self.max_headroom + + print >> sys.stderr, "pkts sent: %d, lower bound: %d, actual headroom pool watermark: %d, upper_bound: %d" %(wm_pkt_num, expected_wm, hdrm_pool_wm, upper_bound_wm) + assert(expected_wm <= hdrm_pool_wm) + assert(hdrm_pool_wm <= upper_bound_wm) + print >> sys.stderr, "all but the last pg hdrms filled" sys.stderr.flush() @@ -1041,6 +1075,16 @@ def runTest(self): assert(xmit_counters[EGRESS_PORT_BUFFER_DROP] == xmit_counters_base[EGRESS_PORT_BUFFER_DROP]) print >> sys.stderr, "pg hdrm filled" + if self.wm_multiplier: + # assert hdrm pool wm still remains the same + hdrm_pool_wm = sai_thrift_read_headroom_pool_watermark(self.client, self.buf_pool_roid) + assert(expected_wm <= hdrm_pool_wm) + assert(hdrm_pool_wm <= upper_bound_wm) + # at this point headroom pool should be full. send few more packets to continue causing drops + print >> sys.stderr, "overflow headroom pool" + send_packet(self, self.src_port_ids[sidx_dscp_pg_tuples[i][0]], pkt, 10) + hdrm_pool_wm = sai_thrift_read_headroom_pool_watermark(self.client, self.buf_pool_roid) + assert(hdrm_pool_wm <= self.max_headroom) sys.stderr.flush() finally: From 121e170c15ad25b0047ac89724bb11aab18a19ca Mon Sep 17 00:00:00 2001 From: Neetha John Date: Thu, 7 Jan 2021 15:04:46 -0800 Subject: [PATCH 4/4] Resolve merge conflicts and get pool xoff using the new fixture Signed-off-by: Neetha John --- tests/qos/qos_sai_base.py | 7 ------- tests/qos/test_qos_sai.py | 4 ++-- 2 files changed, 2 insertions(+), 9 deletions(-) diff --git a/tests/qos/qos_sai_base.py b/tests/qos/qos_sai_base.py index f6d2d08ddab..1549b33bcdc 100644 --- a/tests/qos/qos_sai_base.py +++ b/tests/qos/qos_sai_base.py @@ -72,13 +72,6 @@ def __computeBufferThreshold(self, duthost, bufferProfile): bufferScale /= (bufferScale + 1) bufferProfile.update({"static_th": int(bufferProfile["size"]) + int(bufferScale * bufferSize)}) - poolHeadroom = self.__runRedisCommandOrAssert( - duthost, - argv = ["redis-cli", "-n", "4", "HGET", pool, "xoff"] - ) - if poolHeadroom: - bufferProfile.update({"poolXoff": int(poolHeadroom[0])}) - def __updateVoidRoidParams(self, duthost, bufferProfile): """ Updates buffer profile with VOID/ROID params diff --git a/tests/qos/test_qos_sai.py b/tests/qos/test_qos_sai.py index eb992c6c09a..06394ffa028 100644 --- a/tests/qos/test_qos_sai.py +++ b/tests/qos/test_qos_sai.py @@ -212,7 +212,7 @@ def testQosSaiHeadroomPoolSize(self, ptfhost, dutTestParams, dutConfig, dutQosCo self.runPtfTest(ptfhost, testCase="sai_qos_tests.HdrmPoolSizeTest", testParams=testParams) - def testQosSaiHeadroomPoolWatermark(self, duthosts, rand_one_dut_hostname, ptfhost, dutTestParams, dutConfig, dutQosConfig, ingressLosslessProfile, resetWatermark): + def testQosSaiHeadroomPoolWatermark(self, duthosts, rand_one_dut_hostname, ptfhost, dutTestParams, dutConfig, dutQosConfig, ingressLosslessProfile, sharedHeadroomPoolSize, resetWatermark): """ Test QoS SAI Headroom pool watermark @@ -261,7 +261,7 @@ def testQosSaiHeadroomPoolWatermark(self, duthosts, rand_one_dut_hostname, ptfh "hdrm_pool_wm_multiplier": dutQosConfig["param"]["hdrm_pool_wm_multiplier"], "cell_size": dutQosConfig["param"]["cell_size"], "buf_pool_roid": ingressLosslessProfile["bufferPoolRoid"], - "max_headroom": ingressLosslessProfile["poolXoff"] + "max_headroom": sharedHeadroomPoolSize }) self.runPtfTest(ptfhost, testCase="sai_qos_tests.HdrmPoolSizeTest", testParams=testParams)