Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[qos] Headroom pool watermark test #2614

Merged
merged 4 commits into from
Jan 9, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions tests/qos/files/qos.yml
Original file line number Diff line number Diff line change
Expand Up @@ -615,6 +615,8 @@ qos_params:
pkts_num_trig_egr_drp: 9887
pkts_num_fill_egr_min: 8
cell_size: 208
hdrm_pool_wm_multiplier: 4
cell_size: 208
th2:
40000_300m:
pkts_num_leak_out: 0
Expand Down Expand Up @@ -811,3 +813,5 @@ qos_params:
pkts_num_trig_egr_drp: 10692
pkts_num_fill_egr_min: 16
cell_size: 208
hdrm_pool_wm_multiplier: 4
cell_size: 208
4 changes: 1 addition & 3 deletions tests/qos/qos_sai_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,9 +142,7 @@ def __getBufferProfile(self, request, duthost, table, port, priorityGroup):
pytest_assert("xon" in bufferProfile.keys() and "xoff" in bufferProfile.keys(),
"Could not find xon and/or xoff values for profile '{0}'".format(bufferProfileName))

disableTest = request.config.getoption("--disable_test")
if not disableTest:
self.__updateVoidRoidParams(duthost, bufferProfile)
self.__updateVoidRoidParams(duthost, bufferProfile)

return bufferProfile

Expand Down
53 changes: 53 additions & 0 deletions tests/qos/test_qos_sai.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,6 +212,59 @@ def testQosSaiHeadroomPoolSize(self, ptfhost, dutTestParams, dutConfig, dutQosCo

self.runPtfTest(ptfhost, testCase="sai_qos_tests.HdrmPoolSizeTest", testParams=testParams)

def testQosSaiHeadroomPoolWatermark(self, duthosts, rand_one_dut_hostname, ptfhost, dutTestParams, dutConfig, dutQosConfig, ingressLosslessProfile, sharedHeadroomPoolSize, resetWatermark):
"""
Test QoS SAI Headroom pool watermark

Args:
duthosts (AnsibleHost): Dut hosts
rand_one_dut_hostname (AnsibleHost): select one of the duts in multi dut testbed
ptfhost (AnsibleHost): Packet Test Framework (PTF)
dutTestParams (Fixture, dict): DUT host test params
dutConfig (Fixture, dict): Map of DUT config containing dut interfaces, test port IDs, test port IPs,
and test ports
dutQosConfig (Fixture, dict): Map containing DUT host QoS configuration
ingressLosslessProfile (Fxiture): Map of egress lossless buffer profile attributes
resetWatermark (Fixture): reset watermarks

Returns:
None

Raises:
RunAnsibleModuleFail if ptf test fails
"""
duthost = duthosts[rand_one_dut_hostname]
cmd_output = duthost.shell("show headroom-pool watermark", module_ignore_errors=True)
if dutTestParams["hwsku"] not in self.SUPPORTED_HEADROOM_SKUS or cmd_output['rc'] != 0:
pytest.skip("Headroom pool watermark is not supported")

portSpeedCableLength = dutQosConfig["portSpeedCableLength"]
qosConfig = dutQosConfig["param"][portSpeedCableLength]
testPortIps = dutConfig["testPortIps"]

testParams = dict()
testParams.update(dutTestParams["basicParams"])
testParams.update({
"testbed_type": dutTestParams["topo"],
"dscps": qosConfig["hdrm_pool_size"]["dscps"],
"ecn": qosConfig["hdrm_pool_size"]["ecn"],
"pgs": qosConfig["hdrm_pool_size"]["pgs"],
"src_port_ids": qosConfig["hdrm_pool_size"]["src_port_ids"],
"src_port_ips": [testPortIps[port] for port in qosConfig["hdrm_pool_size"]["src_port_ids"]],
"dst_port_id": qosConfig["hdrm_pool_size"]["dst_port_id"],
"dst_port_ip": testPortIps[qosConfig["hdrm_pool_size"]["dst_port_id"]],
"pgs_num": qosConfig["hdrm_pool_size"]["pgs_num"],
"pkts_num_leak_out": qosConfig["pkts_num_leak_out"],
"pkts_num_trig_pfc": qosConfig["hdrm_pool_size"]["pkts_num_trig_pfc"],
"pkts_num_hdrm_full": qosConfig["hdrm_pool_size"]["pkts_num_hdrm_full"],
"pkts_num_hdrm_partial": qosConfig["hdrm_pool_size"]["pkts_num_hdrm_partial"],
"hdrm_pool_wm_multiplier": dutQosConfig["param"]["hdrm_pool_wm_multiplier"],
"cell_size": dutQosConfig["param"]["cell_size"],
"buf_pool_roid": ingressLosslessProfile["bufferPoolRoid"],
"max_headroom": sharedHeadroomPoolSize
})
self.runPtfTest(ptfhost, testCase="sai_qos_tests.HdrmPoolSizeTest", testParams=testParams)

@pytest.mark.parametrize("bufPool", ["wm_buf_pool_lossless", "wm_buf_pool_lossy"])
def testQosSaiBufferPoolWatermark(self, request, bufPool, ptfhost, dutTestParams, dutConfig, dutQosConfig, ingressLosslessProfile, egressLossyProfile, resetWatermark):
"""
Expand Down
44 changes: 44 additions & 0 deletions tests/saitests/sai_qos_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
sai_thrift_read_port_watermarks,
sai_thrift_read_pg_counters,
sai_thrift_read_buffer_pool_watermark,
sai_thrift_read_headroom_pool_watermark,
sai_thrift_port_tx_disable,
sai_thrift_port_tx_enable)
from switch_sai_thrift.ttypes import (sai_thrift_attribute_value_t,
Expand Down Expand Up @@ -876,6 +877,18 @@ def setUp(self):
print >> sys.stderr, ("pkts num: leak_out: %d, trig_pfc: %d, hdrm_full: %d, hdrm_partial: %d, pkt_size %d" % (self.pkts_num_leak_out, self.pkts_num_trig_pfc, self.pkts_num_hdrm_full, self.pkts_num_hdrm_partial, self.pkt_size))
elif self.pkts_num_trig_pfc_shp:
print >> sys.stderr, ("pkts num: leak_out: {}, trig_pfc: {}, hdrm_full: {}, hdrm_partial: {}, pkt_size {}".format(self.pkts_num_leak_out, self.pkts_num_trig_pfc_shp, self.pkts_num_hdrm_full, self.pkts_num_hdrm_partial, self.pkt_size))

# used only for headroom pool watermark
if all(key in self.test_params for key in ['hdrm_pool_wm_multiplier', 'buf_pool_roid', 'cell_size', 'max_headroom']):
self.cell_size = int(self.test_params['cell_size'])
self.wm_multiplier = self.test_params['hdrm_pool_wm_multiplier']
print >> sys.stderr, "Wm multiplier: %d buf_pool_roid: %s" % (self.wm_multiplier, self.test_params['buf_pool_roid'])
self.buf_pool_roid = int(self.test_params['buf_pool_roid'], 0)
print >> sys.stderr, "buf_pool_roid: 0x%lx" % (self.buf_pool_roid)
self.max_headroom = int(self.test_params['max_headroom'])
else:
self.wm_multiplier = None

sys.stderr.flush()

self.dst_port_mac = self.dataplane.get_mac(0, self.dst_port_id)
Expand Down Expand Up @@ -997,6 +1010,15 @@ def runTest(self):
print >> sys.stderr, "PFC triggered"
sys.stderr.flush()

upper_bound = 2
if self.wm_multiplier:
hdrm_pool_wm = sai_thrift_read_headroom_pool_watermark(self.client, self.buf_pool_roid)
print >> sys.stderr, "Actual headroom pool watermark value to start: %d" % hdrm_pool_wm
assert (hdrm_pool_wm <= (upper_bound * self.cell_size * self.wm_multiplier))

expected_wm = 0
wm_pkt_num = 0
upper_bound_wm = 0
# send packets to all pgs to fill the headroom pool
for i in range(0, self.pgs_num):
# Prepare TCP packet data
Expand All @@ -1021,6 +1043,18 @@ def runTest(self):
assert(recv_counters[INGRESS_DROP] == recv_counters_bases[sidx_dscp_pg_tuples[i][0]][INGRESS_DROP])
assert(recv_counters[INGRESS_PORT_BUFFER_DROP] == recv_counters_bases[sidx_dscp_pg_tuples[i][0]][INGRESS_PORT_BUFFER_DROP])

if self.wm_multiplier:
wm_pkt_num += (self.pkts_num_hdrm_full if i != self.pgs_num - 1 else self.pkts_num_hdrm_partial)
hdrm_pool_wm = sai_thrift_read_headroom_pool_watermark(self.client, self.buf_pool_roid)
expected_wm = wm_pkt_num * self.cell_size * self.wm_multiplier
upper_bound_wm = expected_wm + (upper_bound * self.cell_size * self.wm_multiplier)
if upper_bound_wm > self.max_headroom:
upper_bound_wm = self.max_headroom

print >> sys.stderr, "pkts sent: %d, lower bound: %d, actual headroom pool watermark: %d, upper_bound: %d" %(wm_pkt_num, expected_wm, hdrm_pool_wm, upper_bound_wm)
assert(expected_wm <= hdrm_pool_wm)
assert(hdrm_pool_wm <= upper_bound_wm)

print >> sys.stderr, "all but the last pg hdrms filled"
sys.stderr.flush()

Expand All @@ -1041,6 +1075,16 @@ def runTest(self):
assert(xmit_counters[EGRESS_PORT_BUFFER_DROP] == xmit_counters_base[EGRESS_PORT_BUFFER_DROP])

print >> sys.stderr, "pg hdrm filled"
if self.wm_multiplier:
# assert hdrm pool wm still remains the same
hdrm_pool_wm = sai_thrift_read_headroom_pool_watermark(self.client, self.buf_pool_roid)
assert(expected_wm <= hdrm_pool_wm)
assert(hdrm_pool_wm <= upper_bound_wm)
# at this point headroom pool should be full. send few more packets to continue causing drops
print >> sys.stderr, "overflow headroom pool"
send_packet(self, self.src_port_ids[sidx_dscp_pg_tuples[i][0]], pkt, 10)
hdrm_pool_wm = sai_thrift_read_headroom_pool_watermark(self.client, self.buf_pool_roid)
assert(hdrm_pool_wm <= self.max_headroom)
sys.stderr.flush()

finally:
Expand Down
11 changes: 11 additions & 0 deletions tests/saitests/switch.py
Original file line number Diff line number Diff line change
Expand Up @@ -758,6 +758,17 @@ def sai_thrift_read_buffer_pool_watermark(client, buffer_pool_id):
return None
return wm_vals[0]

def sai_thrift_read_headroom_pool_watermark(client, buffer_pool_id):
buffer_pool_wm_ids = [
SAI_BUFFER_POOL_STAT_XOFF_ROOM_WATERMARK_BYTES
]

wm_vals = client.sai_thrift_get_buffer_pool_stats(buffer_pool_id, buffer_pool_wm_ids)
if not wm_vals:
print >> sys.stderr, "sai_thrift_read_headroom_pool_watermark returns empty list"
return None
return wm_vals[0]

def sai_thrift_create_vlan_member(client, vlan_id, port_id, tagging_mode):
vlan_member_attr_list = []
attribute_value = sai_thrift_attribute_value_t(s32=vlan_id)
Expand Down