Skip to content

Commit 7eaf3b0

Browse files
authored
Use cached testbedInfo and host_vars in inventory in dut selection fixtures (#2811)
What is the motivation for this PR? pytest_generate_tests dynamically adds enum_* fixtures that select DUTs on which the tests are to be parameterized. In order to select the DUTs on which the tests are to run, we need to get the TestbedInfo and also all variables defined in the inventory file for each DUT. These operations are time consuming - like creating TestbedInfo taking 1-9 seconds, and getting variables from inventory taking 3-5 seconds. pytest_generate_tests is called for all the tests/fixtures that needs to run in a pytest session. This was adding a long time when trying to execute many tests. This issue was reported in issue #2790 - pytest_generate_tests in tests/conftest.py takes much more time than before How did you do it? To fix this, we need to create the TestbedInfo only once, store it, and then use the stored value in the next execution of pytest_generate_tests, rather than re-creating TestbedInfo. Similary, the variables for all the DUTs in the testbed should be read once from the inventory files, stored, and the re-used in the next execution of pytest_generate_tests. PR #2789 Added caching capability to store any facts using pickle. PR #2856 added caching capability for TestbedInfo PR #2873 added caching capability for variables for DUTs in the inventory files. We use the cached TestbedInfo and DUT variables in the selection of DUTs in pytest_generate_tests. How did you verify/test it? Ran tests that use the dut selection fixtures and validated that the delay is seen only once, and not in every call to pytest_generate_tests. Validated the execution time reduced significantly when using caching when executing iface_namingmode test cases which have ~30 tests.
1 parent ae8fa15 commit 7eaf3b0

2 files changed

Lines changed: 68 additions & 59 deletions

File tree

tests/common/helpers/dut_utils.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,8 +11,8 @@ def is_supervisor_node(inv_files, hostname):
1111
the inventory, and it is 'supervisor', then return True, else return False. In future, we can change this
1212
logic if possible to derive it from the DUT.
1313
"""
14-
node_type = get_host_visible_vars(inv_files, hostname, variable='type')
15-
if node_type and node_type == 'supervisor':
14+
dut_vars = get_host_visible_vars(inv_files, hostname)
15+
if 'type' in dut_vars and dut_vars['type'] == 'supervisor':
1616
return True
1717
return False
1818

tests/conftest.py

Lines changed: 66 additions & 57 deletions
Original file line numberDiff line numberDiff line change
@@ -204,9 +204,7 @@ def duthost(duthosts, request):
204204
def rand_one_dut_hostname(request):
205205
"""
206206
"""
207-
tbname, testbedinfo = get_tbinfo(request)
208-
duts_in_testbed = testbedinfo["duts"]
209-
dut_hostnames = generate_params_dut_hostname(duts_in_testbed, tbname)
207+
dut_hostnames = generate_params_dut_hostname(request)
210208
if len(dut_hostnames) > 1:
211209
dut_hostnames = random.sample(dut_hostnames, 1)
212210
return dut_hostnames[0]
@@ -226,16 +224,6 @@ def rand_one_dut_lossless_prio(request):
226224
lossless_prio_list = random.sample(lossless_prio_list, 1)
227225
return lossless_prio_list[0]
228226

229-
@pytest.fixture(scope="module")
230-
def rand_one_frontend_dut_hostname(request):
231-
"""
232-
"""
233-
tbname, testbedinfo = get_tbinfo(request)
234-
duts_in_testbed = testbedinfo["duts"]
235-
frontend_dut_hostnames = generate_params_frontend_hostname(request, duts_in_testbed, tbname)
236-
dut_hostnames = random.sample(frontend_dut_hostnames, 1)
237-
return dut_hostnames[0]
238-
239227
@pytest.fixture(scope="module", autouse=True)
240228
def reset_critical_services_list(duthosts):
241229
"""
@@ -611,28 +599,37 @@ def get_host_data(request, dut):
611599
return get_host_vars(inv_files, dut)
612600

613601

614-
def generate_params_frontend_hostname(request, duts_in_testbed, tbname):
602+
def generate_params_frontend_hostname(request):
615603
frontend_duts = []
604+
tbname, tbinfo = get_tbinfo(request)
605+
duts = tbinfo['duts']
616606
inv_files = get_inventory_files(request)
617-
for dut in duts_in_testbed:
607+
for dut in duts:
618608
if is_frontend_node(inv_files, dut):
619609
frontend_duts.append(dut)
620610
assert len(frontend_duts) > 0, \
621611
"Test selected require at-least one frontend node, " \
622-
"none of the DUTs '{}' in testbed '{}' are a supervisor node".format(duts_in_testbed, tbname)
612+
"none of the DUTs '{}' in testbed '{}' are a supervisor node".format(duts, tbname)
623613
return frontend_duts
624614

625615

626-
def generate_params_frontend_hostname_rand_per_hwsku(request, duts_in_testbed, tbname):
627-
frontend_hosts = generate_params_frontend_hostname(request, duts_in_testbed, tbname)
616+
def generate_params_hostname_rand_per_hwsku(request, frontend_only=False):
617+
tbname, tbinfo = get_tbinfo(request)
618+
hosts = tbinfo['duts']
619+
if frontend_only:
620+
hosts = generate_params_frontend_hostname(request)
628621
inv_files = get_inventory_files(request)
629622
# Create a list of hosts per hwsku
630623
host_hwskus = {}
631-
for a_host in frontend_hosts:
632-
a_host_hwsku = get_host_visible_vars(inv_files, a_host, variable='hwsku')
633-
if not a_host_hwsku:
624+
for a_host in hosts:
625+
host_vars = get_host_visible_vars(inv_files, a_host)
626+
a_host_hwsku = None
627+
if 'hwsku' in host_vars:
628+
a_host_hwsku = host_vars['hwsku']
629+
else:
634630
# Lets try 'sonic_hwsku' as well
635-
a_host_hwsku = get_host_visible_vars(inv_files, a_host, variable='sonic_hwsku')
631+
if 'sonic_hwsku' in host_vars:
632+
a_host_hwsku = host_vars['sonic_hwsku']
636633
if a_host_hwsku:
637634
if a_host_hwsku not in host_hwskus:
638635
host_hwskus[a_host_hwsku] = [a_host]
@@ -641,56 +638,65 @@ def generate_params_frontend_hostname_rand_per_hwsku(request, duts_in_testbed, t
641638
else:
642639
pytest.fail("Test selected require a node per hwsku, but 'hwsku' for '{}' not defined in the inventory".format(a_host))
643640

644-
frontend_hosts_per_hwsku = []
641+
hosts_per_hwsku = []
645642
for hosts in host_hwskus.values():
646643
if len(hosts) == 1:
647-
frontend_hosts_per_hwsku.append(hosts[0])
644+
hosts_per_hwsku.append(hosts[0])
648645
else:
649-
frontend_hosts_per_hwsku.extend(random.sample(hosts, 1))
646+
hosts_per_hwsku.extend(random.sample(hosts, 1))
650647

651-
return frontend_hosts_per_hwsku
648+
return hosts_per_hwsku
652649

653650

654-
def generate_params_supervisor_hostname(request, duts_in_testbed, tbname):
655-
if len(duts_in_testbed) == 1:
651+
def generate_params_supervisor_hostname(request):
652+
tbname, tbinfo = get_tbinfo(request)
653+
duts = tbinfo['duts']
654+
if len(duts) == 1:
656655
# We have a single node - dealing with pizza box, return it
657-
return [duts_in_testbed[0]]
656+
return [duts[0]]
658657
inv_files = get_inventory_files(request)
659-
for dut in duts_in_testbed:
658+
for dut in duts:
660659
# Expecting only a single supervisor node
661660
if is_supervisor_node(inv_files, dut):
662661
return [dut]
663662
pytest.fail("Test selected require a supervisor node, " +
664-
"none of the DUTs '{}' in testbed '{}' are a supervisor node".format(duts_in_testbed, tbname))
665-
663+
"none of the DUTs '{}' in testbed '{}' are a supervisor node".format(duts, tbname))
666664

667-
def generate_param_asic_index(request, duts_in_testbed, dut_indices, param_type):
668-
logging.info("generating {} asic indices for DUT [{}] in ".format(param_type, dut_indices))
665+
def generate_param_asic_index(request, dut_indices, param_type):
666+
_, tbinfo = get_tbinfo(request)
667+
inv_files = get_inventory_files(request)
668+
logging.info("generating {} asic indicies for DUT [{}] in ".format(param_type, dut_indices))
669669
#if the params are not present treat the device as a single asic device
670670
asic_index_params = [DEFAULT_ASIC_ID]
671671

672672
for dut_id in dut_indices:
673-
dut = duts_in_testbed[dut_id]
674-
inv_data = get_host_data(request, dut)
673+
dut = tbinfo['duts'][dut_id]
674+
inv_data = get_host_visible_vars(inv_files, dut)
675675
if inv_data is not None:
676676
if param_type == ASIC_PARAM_TYPE_ALL and ASIC_PARAM_TYPE_ALL in inv_data:
677-
asic_index_params = range(int(inv_data[ASIC_PARAM_TYPE_ALL]))
677+
if int(inv_data[ASIC_PARAM_TYPE_ALL]) == 1:
678+
asic_index_params = [DEFAULT_ASIC_ID]
679+
else:
680+
asic_index_params = range(int(inv_data[ASIC_PARAM_TYPE_ALL]))
678681
elif param_type == ASIC_PARAM_TYPE_FRONTEND and ASIC_PARAM_TYPE_FRONTEND in inv_data:
679682
asic_index_params = inv_data[ASIC_PARAM_TYPE_FRONTEND]
680683
logging.info("dut_index {} dut name {} asics params = {}".format(
681684
dut_id, dut, asic_index_params))
682685
return asic_index_params
683686

684687

685-
def generate_params_dut_index(duts_in_testbeds, tbname):
686-
num_duts = len(duts_in_testbeds)
688+
def generate_params_dut_index(request):
689+
tbname, tbinfo = get_tbinfo(request)
690+
num_duts = len(tbinfo['duts'])
687691
logging.info("Num of duts in testbed '{}' is {}".format(tbname, num_duts))
688692
return range(num_duts)
689693

690694

691-
def generate_params_dut_hostname(duts_in_testbed, tbname):
692-
logging.info("DUTs in testbed '{}' are: {}".format(tbname, str(duts_in_testbed)))
693-
return duts_in_testbed
695+
def generate_params_dut_hostname(request):
696+
tbname, tbinfo = get_tbinfo(request)
697+
duts = tbinfo["duts"]
698+
logging.info("DUTs in testbed '{}' are: {}".format(tbname, str(duts)))
699+
return duts
694700

695701

696702
def generate_port_lists(request, port_scope):
@@ -797,38 +803,41 @@ def generate_priority_lists(request, prio_scope):
797803
return ret if ret else empty
798804

799805
_frontend_hosts_per_hwsku_per_module = {}
806+
_hosts_per_hwsku_per_module = {}
800807
def pytest_generate_tests(metafunc):
801808
# The topology always has atleast 1 dut
802809
dut_indices = [0]
803-
tbname, testbedinfo = get_tbinfo(metafunc)
804-
duts_in_testbed = testbedinfo["duts"]
810+
global _frontend_hosts_per_hwsku_per_module, _hosts_per_hwsku_per_module
805811
# Enumerators ("enum_dut_index", "enum_dut_hostname", "rand_one_dut_hostname") are mutually exclusive
806812
if "enum_dut_index" in metafunc.fixturenames:
807-
dut_indices = generate_params_dut_index(duts_in_testbed, tbname)
813+
dut_indices = generate_params_dut_index(metafunc)
808814
metafunc.parametrize("enum_dut_index", dut_indices, scope="module")
809815
elif "enum_dut_hostname" in metafunc.fixturenames:
810-
dut_hostnames = generate_params_dut_hostname(duts_in_testbed, tbname)
816+
dut_hostnames = generate_params_dut_hostname(metafunc)
811817
metafunc.parametrize("enum_dut_hostname", dut_hostnames, scope="module")
812818
elif "enum_supervisor_dut_hostname" in metafunc.fixturenames:
813-
supervisor_hosts = generate_params_supervisor_hostname(metafunc, duts_in_testbed, tbname)
819+
supervisor_hosts = generate_params_supervisor_hostname(metafunc)
814820
metafunc.parametrize("enum_supervisor_dut_hostname", supervisor_hosts, scope="module")
815821
elif "enum_frontend_dut_hostname" in metafunc.fixturenames:
816-
frontend_hosts = generate_params_frontend_hostname(metafunc, duts_in_testbed, tbname)
822+
frontend_hosts = generate_params_frontend_hostname(metafunc)
817823
metafunc.parametrize("enum_frontend_dut_hostname", frontend_hosts, scope="module")
824+
elif "enum_rand_one_per_hwsku_hostname" in metafunc.fixturenames:
825+
if metafunc.module not in _hosts_per_hwsku_per_module:
826+
hosts_per_hwsku = generate_params_hostname_rand_per_hwsku(metafunc)
827+
_hosts_per_hwsku_per_module[metafunc.module] = hosts_per_hwsku
828+
hosts = _hosts_per_hwsku_per_module[metafunc.module]
829+
metafunc.parametrize("enum_rand_one_per_hwsku_hostname", hosts, scope="module")
818830
elif "enum_rand_one_per_hwsku_frontend_hostname" in metafunc.fixturenames:
819831
if metafunc.module not in _frontend_hosts_per_hwsku_per_module:
820-
frontend_hosts_per_hwsku = generate_params_frontend_hostname_rand_per_hwsku(metafunc, duts_in_testbed, tbname)
821-
_frontend_hosts_per_hwsku_per_module[metafunc.module] = frontend_hosts_per_hwsku
822-
frontend_hosts = _frontend_hosts_per_hwsku_per_module[metafunc.module]
823-
metafunc.parametrize("enum_rand_one_per_hwsku_frontend_hostname", frontend_hosts, scope="module")
824-
832+
hosts_per_hwsku = generate_params_hostname_rand_per_hwsku(metafunc, frontend_only=True)
833+
_frontend_hosts_per_hwsku_per_module[metafunc.module] = hosts_per_hwsku
834+
hosts = _frontend_hosts_per_hwsku_per_module[metafunc.module]
835+
metafunc.parametrize("enum_rand_one_per_hwsku_frontend_hostname", hosts, scope="module")
825836

826837
if "enum_asic_index" in metafunc.fixturenames:
827-
metafunc.parametrize("enum_asic_index",
828-
generate_param_asic_index(metafunc, duts_in_testbed, dut_indices, ASIC_PARAM_TYPE_ALL))
838+
metafunc.parametrize("enum_asic_index", generate_param_asic_index(metafunc, dut_indices, ASIC_PARAM_TYPE_ALL))
829839
if "enum_frontend_asic_index" in metafunc.fixturenames:
830-
metafunc.parametrize("enum_frontend_asic_index",
831-
generate_param_asic_index(metafunc, duts_in_testbed, dut_indices, ASIC_PARAM_TYPE_FRONTEND))
840+
metafunc.parametrize("enum_frontend_asic_index",generate_param_asic_index(metafunc, dut_indices, ASIC_PARAM_TYPE_FRONTEND))
832841

833842
if "enum_dut_portname" in metafunc.fixturenames:
834843
metafunc.parametrize("enum_dut_portname", generate_port_lists(metafunc, "all_ports"))

0 commit comments

Comments
 (0)