Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
45 changes: 33 additions & 12 deletions tests/common/devices.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,13 @@
"""
import json
import logging
from multiprocessing import Process, Queue
import os
from multiprocessing.pool import ThreadPool

from errors import RunAnsibleModuleFail
from errors import UnsupportedAnsibleModule


class AnsibleHostBase(object):
"""
@summary: The base class for various objects.
Expand Down Expand Up @@ -44,13 +46,11 @@ def _run(self, *module_args, **complex_args):
module_async = complex_args.pop('module_async', False)

if module_async:
q = Queue()
def run_module(queue, module_args, complex_args):
res = self.module(*module_args, **complex_args)
q.put(res[self.hostname])
p = Process(target=run_module, args=(q, module_args, complex_args))
p.start()
return p, q
def run_module(module_args, complex_args):
return self.module(*module_args, **complex_args)[self.hostname]
pool = ThreadPool()
result = pool.apply_async(run_module, (module_args, complex_args))
return pool, result

res = self.module(*module_args, **complex_args)[self.hostname]
if res.is_failed and not module_ignore_errors:
Expand Down Expand Up @@ -149,7 +149,8 @@ def is_service_fully_started(self, service):
return True
else:
return False
except:
except Exception as e:
logging.error("Failed to get service status, exception: %s" % repr(e))
return False

def critical_services_fully_started(self):
Expand All @@ -163,7 +164,6 @@ def critical_services_fully_started(self):
logging.debug("Status of critical services: %s" % str(result))
return all(result.values())


def get_crm_resources(self):
"""
@summary: Run the "crm show resources all" command and parse its output
Expand All @@ -185,11 +185,32 @@ def get_crm_resources(self):
fields = line.split()
if len(fields) == 5:
result["acl_resources"].append({"stage": fields[0], "bind_point": fields[1],
"resource_name": fields[2], "used_count": int(fields[3]), "available_count": int(fields[4])})
"resource_name": fields[2], "used_count": int(fields[3]),
"available_count": int(fields[4])})
if current_table == 3: # content of the third table, table resources
fields = line.split()
if len(fields) == 4:
result["table_resources"].append({"table_id": fields[0], "resource_name": fields[1],
"used_count": int(fields[2]), "available_count": int(fields[3])})
"used_count": int(fields[2]), "available_count": int(fields[3])})

return result

def get_pmon_daemon_list(self):
"""
@summary: in 201811 use different way to get the pmon daemon list since
config file (/usr/share/sonic/device/{platform}/{hwsku}/pmon_daemon_control.json) is not available.
Check the availability of two plugins led_control.py and sfputil.py, they are for ledd and xcvrd.
If one of them does not exist, then the related daemon is not expected to be running on this platform.
"""
daemon_list = []

led_plugin_path = os.path.join('/usr/share/sonic/device', self.facts["platform"], 'plugins/led_control.py')
sfp_plugin_path = os.path.join('/usr/share/sonic/device', self.facts["platform"], 'plugins/sfputil.py')

if os.path.isfile(led_plugin_path):
daemon_list.append('ledd')
if os.path.isfile(sfp_plugin_path):
daemon_list.append('xcvrd')

logging.info("Pmon daemon list for this platform is %s" % str(daemon_list))
return daemon_list
83 changes: 70 additions & 13 deletions tests/common/mellanox_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,15 @@
"psus": {
"number": 2,
"hot_swappable": True
},
"cpu_pack": {
"number": 1
},
"cpu_cores": {
"number": 2
},
"ports": {
"number": 32
}
},
"ACS-MSN2740": {
Expand All @@ -32,6 +41,15 @@
"psus": {
"number": 2,
"hot_swappable": True
},
"cpu_pack": {
"number": 0
},
"cpu_cores": {
"number": 4
},
"ports": {
"number": 32
}
},
"ACS-MSN2410": {
Expand All @@ -47,6 +65,15 @@
"psus": {
"number": 2,
"hot_swappable": True
},
"cpu_pack": {
"number": 1
},
"cpu_cores": {
"number": 2
},
"ports": {
"number": 56
}
},
"ACS-MSN2010": {
Expand All @@ -62,6 +89,15 @@
"psus": {
"number": 2,
"hot_swappable": False
},
"cpu_pack": {
"number": 0
},
"cpu_cores": {
"number": 4
},
"ports": {
"number": 22
}
},
"ACS-MSN2100": {
Expand All @@ -77,6 +113,15 @@
"psus": {
"number": 2,
"hot_swappable": False
},
"cpu_pack": {
"number": 0
},
"cpu_cores": {
"number": 4
},
"ports": {
"number": 16
}
},
"ACS-MSN3800": {
Expand All @@ -92,6 +137,15 @@
"psus": {
"number": 2,
"hot_swappable": True
},
"cpu_pack": {
"number": 1
},
"cpu_cores": {
"number": 4
},
"ports": {
"number": 64
}
},
"ACS-MSN3700": {
Expand All @@ -107,6 +161,15 @@
"psus": {
"number": 2,
"hot_swappable": True
},
"cpu_pack": {
"number": 1
},
"cpu_cores": {
"number": 4
},
"ports": {
"number": 32
}
},
"ACS-MSN3700C": {
Expand All @@ -122,21 +185,15 @@
"psus": {
"number": 2,
"hot_swappable": True
}
},
"ACS-MSN3510": {
"reboot": {
"cold_reboot": True,
"fast_reboot": True,
"warm_reboot": False
},
"fans": {
"number": 6,
"hot_swappable": True
"cpu_pack": {
"number": 1
},
"psus": {
"number": 2,
"hot_swappable": True
"cpu_cores": {
"number": 2
},
"ports": {
"number": 32
}
}
}
36 changes: 26 additions & 10 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,10 @@
import ipaddr as ipaddress

from ansible_host import AnsibleHost
from loganalyzer import LogAnalyzer

pytest_plugins = ('ptf_fixtures', 'ansible_fixtures')

# Add the tests folder to sys.path, for importing the lib package
_current_file_dir = os.path.dirname(os.path.realpath(__file__))
if _current_file_dir not in sys.path:
sys.path.append(current_file_dir)


class TestbedInfo(object):
"""
Expand All @@ -35,7 +31,6 @@ def __init__(self, testbed_file):
name = ''
for key in line:
if ('uniq-name' in key or 'conf-name' in key) and '#' in line[key]:
### skip comment line
continue
elif 'uniq-name' in key or 'conf-name' in key:
name = line[key]
Expand All @@ -52,6 +47,8 @@ def __init__(self, testbed_file):
def pytest_addoption(parser):
parser.addoption("--testbed", action="store", default=None, help="testbed name")
parser.addoption("--testbed_file", action="store", default=None, help="testbed file name")
parser.addoption("--disable_loganalyzer", action="store_true", default=False,
help="disable loganalyzer analysis for 'loganalyzer' fixture")


@pytest.fixture(scope="session")
Expand All @@ -78,11 +75,12 @@ def testbed_devices(ansible_adhoc, testbed):
@param testbed: Fixture for parsing testbed configuration file.
@return: Return the created device objects in a dictionary
"""
from common.devices import SonicHost, Localhost
from common.devices import SonicHost, Localhost, PTFHost

devices = {
"localhost": Localhost(ansible_adhoc),
"dut": SonicHost(ansible_adhoc, testbed["dut"], gather_facts=True)}

devices = {}
devices["localhost"] = Localhost(ansible_adhoc)
devices["dut"] = SonicHost(ansible_adhoc, testbed["dut"], gather_facts=True)
if "ptf" in testbed:
devices["ptf"] = PTFHost(ansible_adhoc, testbed["ptf"])

Expand Down Expand Up @@ -121,3 +119,21 @@ def eos():
with open('eos/eos.yml') as stream:
eos = yaml.safe_load(stream)
return eos


@pytest.fixture(autouse=True)
def loganalyzer(duthost, request):
loganalyzer = LogAnalyzer(ansible_host=duthost, marker_prefix=request.node.name)
# Add start marker into DUT syslog
marker = loganalyzer.init()
yield loganalyzer
if not request.config.getoption("--disable_loganalyzer") and "disable_loganalyzer" not in request.keywords:
# Read existed common regular expressions located with legacy loganalyzer module
loganalyzer.load_common_config()
# Parse syslog and process result. Raise "LogAnalyzerError" exception if: total match or expected missing
# match is not equal to zero
loganalyzer.analyze(marker)
else:
# Add end marker into DUT syslog
loganalyzer._add_end_marker(marker)

Loading