diff --git a/tests/acl/test_acl.py b/tests/acl/test_acl.py index de138dd7ba2..a62068599ef 100644 --- a/tests/acl/test_acl.py +++ b/tests/acl/test_acl.py @@ -13,7 +13,7 @@ import ptf.packet as packet from common import reboot, port_toggle -from loganalyzer import LogAnalyzer, LogAnalyzerError +from common.plugins.loganalyzer.loganalyzer import LogAnalyzer, LogAnalyzerError logger = logging.getLogger(__name__) diff --git a/tests/common/plugins/ansible_fixtures.py b/tests/common/plugins/ansible_fixtures.py new file mode 100644 index 00000000000..ac31dd27012 --- /dev/null +++ b/tests/common/plugins/ansible_fixtures.py @@ -0,0 +1,31 @@ +""" This module provides few pytest-ansible fixtures overridden """ + +import pytest + +# Here we override ansible_adhoc fixture from pytest-ansible plugin to overcome +# scope limitation issue; since we want to be able to use ansible_adhoc in module/class scope +# fixtures we have to override the scope here in global conftest.py +# Let's have it with module scope for now, so if something really breaks next test module run will have +# this fixture reevaluated +@pytest.fixture(scope='module') +def ansible_adhoc(request): + """Return an inventory initialization method.""" + plugin = request.config.pluginmanager.getplugin("ansible") + + def init_host_mgr(**kwargs): + return plugin.initialize(request.config, request, **kwargs) + return init_host_mgr + + +# Same as for ansible_adhoc, let's have localhost fixture with session scope +# as it feels that during session run the localhost object should persist unchanged. +# Also, we have autouse=True here to force pytest to evaluate localhost fixture to overcome +# some hidden dependency between localhost and ansible_adhoc (even with default scope) (FIXME) +@pytest.fixture(scope='session', autouse=True) +def localhost(request): + """Return a host manager representing localhost.""" + # NOTE: Do not use ansible_adhoc as a dependent fixture since that will assert specific command-line parameters have + # been supplied. In the case of localhost, the parameters are provided as kwargs below. + plugin = request.config.pluginmanager.getplugin("ansible") + return plugin.initialize(request.config, request, inventory='localhost,', connection='local', + host_pattern='localhost').localhost diff --git a/tests/plugins/dut_monitor/README.md b/tests/common/plugins/dut_monitor/README.md old mode 100755 new mode 100644 similarity index 100% rename from tests/plugins/dut_monitor/README.md rename to tests/common/plugins/dut_monitor/README.md diff --git a/tests/common/plugins/dut_monitor/__init__.py b/tests/common/plugins/dut_monitor/__init__.py new file mode 100644 index 00000000000..72dc5c18485 --- /dev/null +++ b/tests/common/plugins/dut_monitor/__init__.py @@ -0,0 +1,26 @@ +import os +import pytest + +from pytest_dut_monitor import DUTMonitorPlugin + + +def pytest_addoption(parser): + """Describe plugin specified options""" + parser.addoption("--dut_monitor", action="store_true", default=False, + help="Enable DUT hardware resources monitoring") + parser.addoption("--thresholds_file", action="store", default=None, help="Path to the custom thresholds file") + + +def pytest_configure(config): + if config.option.dut_monitor: + thresholds = os.path.join(os.path.split(__file__)[0], "thresholds.yml") + if config.option.thresholds_file: + thresholds = config.option.thresholds_file + config.pluginmanager.register(DUTMonitorPlugin(thresholds), "dut_monitor") + + +def pytest_unconfigure(config): + dut_monitor = getattr(config, "dut_monitor", None) + if dut_monitor: + del config.dut_monitor + config.pluginmanager.unregister(dut_monitor) diff --git a/tests/plugins/dut_monitor/dut_monitor.py b/tests/common/plugins/dut_monitor/dut_monitor.py old mode 100755 new mode 100644 similarity index 100% rename from tests/plugins/dut_monitor/dut_monitor.py rename to tests/common/plugins/dut_monitor/dut_monitor.py diff --git a/tests/plugins/dut_monitor/errors.py b/tests/common/plugins/dut_monitor/errors.py old mode 100755 new mode 100644 similarity index 100% rename from tests/plugins/dut_monitor/errors.py rename to tests/common/plugins/dut_monitor/errors.py diff --git a/tests/plugins/dut_monitor/pytest_dut_monitor.py b/tests/common/plugins/dut_monitor/pytest_dut_monitor.py old mode 100755 new mode 100644 similarity index 94% rename from tests/plugins/dut_monitor/pytest_dut_monitor.py rename to tests/common/plugins/dut_monitor/pytest_dut_monitor.py index bf5cbc80d16..2dce1f56148 --- a/tests/plugins/dut_monitor/pytest_dut_monitor.py +++ b/tests/common/plugins/dut_monitor/pytest_dut_monitor.py @@ -16,29 +16,6 @@ DUT_CPU_LOG = "/tmp/cpu.log" DUT_RAM_LOG = "/tmp/ram.log" DUT_HDD_LOG = "/tmp/hdd.log" -THRESHOLDS = os.path.join(os.path.split(__file__)[0], "thresholds.yml") - - -def pytest_addoption(parser): - """Describe plugin specified options""" - parser.addoption("--dut_monitor", action="store_true", default=False, - help="Enable DUT hardware resources monitoring") - parser.addoption("--thresholds_file", action="store", default=None, help="Path to the custom thresholds file") - - -def pytest_configure(config): - if config.option.dut_monitor: - config.pluginmanager.register(DUTMonitorPlugin(), "dut_monitor") - if config.option.thresholds_file: - global THRESHOLDS - THRESHOLDS = config.option.thresholds_file - - -def pytest_unconfigure(config): - dut_monitor = getattr(config, "dut_monitor", None) - if dut_monitor: - del config.dut_monitor - config.pluginmanager.unregister(dut_monitor) class DUTMonitorPlugin(object): @@ -48,6 +25,8 @@ class DUTMonitorPlugin(object): - handlers to verify that measured CPU, RAM and HDD values during each test item execution does not exceed defined threshold """ + def __init__(self, thresholds): + self.thresholds = thresholds @pytest.fixture(autouse=True, scope="session") def dut_ssh(self, testbed, creds): @@ -67,7 +46,7 @@ def dut_monitor(self, dut_ssh, localhost, duthost, testbed_devices): dut_ssh.start() # Read file with defined thresholds - with open(THRESHOLDS) as stream: + with open(self.thresholds) as stream: general_thresholds = yaml.safe_load(stream) dut_thresholds = general_thresholds["default"] @@ -359,7 +338,7 @@ def read_yml(self, file_pointer): measurements = yaml.safe_load("".join(fp)) if measurements is None: return {} - # Sort json data to process logs chronologically + # Sort json data to process logs chronologically keys = measurements.keys() keys.sort() key_value_pairs = [(item, measurements[item]) for item in keys] diff --git a/tests/plugins/dut_monitor/thresholds.yml b/tests/common/plugins/dut_monitor/thresholds.yml old mode 100755 new mode 100644 similarity index 100% rename from tests/plugins/dut_monitor/thresholds.yml rename to tests/common/plugins/dut_monitor/thresholds.yml diff --git a/tests/fib.py b/tests/common/plugins/fib.py similarity index 96% rename from tests/fib.py rename to tests/common/plugins/fib.py index 46592421dea..43867f980a8 100644 --- a/tests/fib.py +++ b/tests/common/plugins/fib.py @@ -5,8 +5,8 @@ import pytest import ipaddr as ipaddress -def announce_routes(ptfip, port, family, podset_number, tor_number, tor_subnet_number, - spine_asn, leaf_asn_start, tor_asn_start, +def announce_routes(ptfip, port, family, podset_number, tor_number, tor_subnet_number, + spine_asn, leaf_asn_start, tor_asn_start, nexthop, nexthop_v6, tor_subnet_size = 128, max_tor_subnet_number = 16): messages = [] @@ -29,7 +29,7 @@ def announce_routes(ptfip, port, family, podset_number, tor_number, tor_subnet_n suffix = ( (podset * tor_number * max_tor_subnet_number * tor_subnet_size) + \ (tor * max_tor_subnet_number * tor_subnet_size) + \ (subnet * tor_subnet_size) ) - octet2 = (168 + (suffix / (256 ** 2))) + octet2 = (168 + (suffix / (256 ** 2))) octet1 = (192 + (octet2 / 256)) octet2 = (octet2 % 256) octet3 = ((suffix / 256) % 256) @@ -37,7 +37,7 @@ def announce_routes(ptfip, port, family, podset_number, tor_number, tor_subnet_n prefixlen_v4 = (32 - int(math.log(tor_subnet_size, 2))) prefix = "{}.{}.{}.{}/{}".format(octet1, octet2, octet3, octet4, prefixlen_v4) - prefix_v6 = "20%02X:%02X%02X:0:%02X::/64" % (octet1, octet2, octet3, octet4) + prefix_v6 = "20%02X:%02X%02X:0:%02X::/64" % (octet1, octet2, octet3, octet4) leaf_asn = leaf_asn_start + podset tor_asn = tor_asn_start + tor diff --git a/tests/loganalyzer/README.md b/tests/common/plugins/loganalyzer/README.md old mode 100755 new mode 100644 similarity index 98% rename from tests/loganalyzer/README.md rename to tests/common/plugins/loganalyzer/README.md index db9cc80fa38..4b6bfe7d9f4 --- a/tests/loganalyzer/README.md +++ b/tests/common/plugins/loganalyzer/README.md @@ -1,135 +1,135 @@ -#### Loganalyzer API usage example - -Below is described possibility of loganalyzer fixture/module usage. - -##### Loganalyzer fixture -In the root conftest there is implemented "loganalyzer" pytest fixture, which starts automatically for all test cases. -Fixture main flow: -- loganalyzer will add start marker before test case start -- loganalyzer will add stop marker after test case finish -- if loganalyzer analysis is not disabled for current test case it will analyze DUT syslog and display results. -If loganalyzer find specified messages which corresponds to defined regular expressions, it will display found messages and pytest will generate 'error'. - -#### To skip loganalyzer analysis for: -- all test cases - use pytest command line option ```--disable_loganalyzer``` -- specific test case: mark test case with ```@pytest.mark.disable_loganalyzer``` decorator. Example is shown below. - - -#### Notes: -loganalyzer.init() - can be called several times without calling "loganalyzer.analyze(marker)" between calls. Each call return its unique marker, which is used for "analyze" phase - loganalyzer.analyze(marker). - - -### Loganalyzer usage example - -#### Example calling loganalyzer init/analyze methods automatically by using with statement -```python - # Read existed common regular expressions located with legacy loganalyzer module - loganalyzer.load_common_config() - # Analyze syslog for code executed inside with statement - with loganalyzer as analyzer: - logging.debug("============== Test steps ===================") - # Add test code here ... - time.sleep(1) - - # Separately analyze syslog for code executed inside each with statement - with loganalyzer as analyzer: - # Clear current regexp match list if there is a need to have clear configuration - loganalyzer.match_regex = [] - # Load regular expressions from the specified file - reg_exp = loganalyzer.parse_regexp_file(src=COMMON_MATCH) - # Extend currently configured match criteria (regular expressions) with data read from "COMMON_MATCH" file - loganalyzer.match_regex.extend(reg_exp) - # Add test code here ... - # Here will be executed syslog analysis on context manager __exit__ - time.sleep(1) - with loganalyzer as analyzer: - # Clear current regexp match list if there is a need to have clear configuration - loganalyzer.match_regex = [] - # Set match criteria (regular expression) to custom regexp - "test:.*Error" - loganalyzer.match_regex.extend(["test:.*Error"]) - # Add test code here ... - # Here will be executed syslog analysis on context manager __exit__ - time.sleep(1) - with loganalyzer as analyzer: - # Add test code here ... - # Here will be executed syslog analysis on context manager __exit__ - time.sleep(1) -``` - -#### Example calling loganalyzer init/analyze methods directly in test case -```python - # Example 1 - # Read existed common regular expressions located with legacy loganalyzer module - loganalyzer.load_common_config() - # Add start marker to the DUT syslog - marker = loganalyzer.init() - # PERFORM TEST CASE STEPS ... - # Verify that error messages were not found in DUT syslog. Exception will be raised if in DUT syslog will be found messages which fits regexp defined in COMMON_MATCH - loganalyzer.analyze(marker) - - # Example 2 - # Read existed common regular expressions located with legacy loganalyzer module - loganalyzer.load_common_config() - # Add start marker to the DUT syslog - marker = loganalyzer.init() - # PERFORM TEST CASE STEPS ... - # Get summary of analyzed DUT syslog - result = loganalyzer.analyze(marker, fail=False) - # Verify that specific amount of error messages found in syslog # Negative test case - assert result["total"]["match"] == 2, "Not found expected errors: {}".format(result) - - # Example 3 - # Download extracted syslog file from DUT to the local host - loganalyzer.save_extracted_log(dest="/tmp/log/syslog") - - # Example 4 - # Update previously configured marker - # Now start marker will have new prefix - test_bgp - loganalyzer.update_marker_prefix("test_bgp") - - def get_platform_info(dut): - """ - Example callback which gets DUT platform information and returns obtained string - """ - return dut.command("show platform summary") - - # Example 5 - # Execute specific function and analyze logs during function execution - run_cmd_result = loganalyzer.run_cmd(get_platform_info, ans_host) - # Process result of "get_platform_info" callback - assert all(item in run_cmd_result["stdout"] for item in ["Platform", "HwSKU", "ASIC"]) is True, "Unexpected output returned after command execution: {}".format(run_cmd_result) - - # Example 6 - # Clear current regexp match list - loganalyzer.match_regex = [] - # Load regular expressions from the specified file defined in COMMON_MATCH variable - reg_exp = loganalyzer.parse_regexp_file(src=COMMON_MATCH) - # Extend currently configured match criteria (regular expressions) with data read from "COMMON_MATCH" file - loganalyzer.match_regex.extend(reg_exp) - marker = loganalyzer.init() - # PERFORM TEST CASE STEPS ... - # Verify that error messages were not found in DUT syslog. Exception will be raised if in DUT syslog will be found messages which fits regexp defined in COMMON_MATCH - loganalyzer.analyze(marker) - - # Example 7 - loganalyzer.expect_regex = [] - # Add specific EXPECTED regular expression - # Means that in the DUT syslog loganalyzer will search for message which matches with "kernel:.*Oops" regular expression - # If such message will not be present in DUT syslog, it will raise exception - loganalyzer.expect_regex.append("kernel:.*Oops") - # Add start marker to the DUT syslog - marker = loganalyzer.init() - # PERFORM TEST CASE STEPS ... - # Verify that expected error messages WERE FOUND in DUT syslog. Exception will be raised if in DUT syslog will NOT be found messages which fits to "kernel:.*Oops" regular expression - loganalyzer.analyze(marker) - - # Example 8 - loganalyzer.expect_regex = [] - # Add specific EXPECTED regular expression - # Means that in the DUT syslog loganalyzer will search for message which matches with "kernel:.*Oops" regular expression - # If such message will not be present in DUT syslog, it will raise exception - loganalyzer.expect_regex.append("kernel:.*Oops") - # PERFORM TEST CASE STEPS ... - # Verify that expected error messages WERE FOUND in DUT syslog. Exception will be raised if in DUT syslog will NOT be found messages which fits to "kernel:.*Oops" regular expression - loganalyzer.run_cmd(ans_host.command, "echo '---------- kernel: says Oops --------------' >> /var/log/syslog") -``` +#### Loganalyzer API usage example + +Below is described possibility of loganalyzer fixture/module usage. + +##### Loganalyzer fixture +In the root conftest there is implemented "loganalyzer" pytest fixture, which starts automatically for all test cases. +Fixture main flow: +- loganalyzer will add start marker before test case start +- loganalyzer will add stop marker after test case finish +- if loganalyzer analysis is not disabled for current test case it will analyze DUT syslog and display results. +If loganalyzer find specified messages which corresponds to defined regular expressions, it will display found messages and pytest will generate 'error'. + +#### To skip loganalyzer analysis for: +- all test cases - use pytest command line option ```--disable_loganalyzer``` +- specific test case: mark test case with ```@pytest.mark.disable_loganalyzer``` decorator. Example is shown below. + + +#### Notes: +loganalyzer.init() - can be called several times without calling "loganalyzer.analyze(marker)" between calls. Each call return its unique marker, which is used for "analyze" phase - loganalyzer.analyze(marker). + + +### Loganalyzer usage example + +#### Example calling loganalyzer init/analyze methods automatically by using with statement +```python + # Read existed common regular expressions located with legacy loganalyzer module + loganalyzer.load_common_config() + # Analyze syslog for code executed inside with statement + with loganalyzer as analyzer: + logging.debug("============== Test steps ===================") + # Add test code here ... + time.sleep(1) + + # Separately analyze syslog for code executed inside each with statement + with loganalyzer as analyzer: + # Clear current regexp match list if there is a need to have clear configuration + loganalyzer.match_regex = [] + # Load regular expressions from the specified file + reg_exp = loganalyzer.parse_regexp_file(src=COMMON_MATCH) + # Extend currently configured match criteria (regular expressions) with data read from "COMMON_MATCH" file + loganalyzer.match_regex.extend(reg_exp) + # Add test code here ... + # Here will be executed syslog analysis on context manager __exit__ + time.sleep(1) + with loganalyzer as analyzer: + # Clear current regexp match list if there is a need to have clear configuration + loganalyzer.match_regex = [] + # Set match criteria (regular expression) to custom regexp - "test:.*Error" + loganalyzer.match_regex.extend(["test:.*Error"]) + # Add test code here ... + # Here will be executed syslog analysis on context manager __exit__ + time.sleep(1) + with loganalyzer as analyzer: + # Add test code here ... + # Here will be executed syslog analysis on context manager __exit__ + time.sleep(1) +``` + +#### Example calling loganalyzer init/analyze methods directly in test case +```python + # Example 1 + # Read existed common regular expressions located with legacy loganalyzer module + loganalyzer.load_common_config() + # Add start marker to the DUT syslog + marker = loganalyzer.init() + # PERFORM TEST CASE STEPS ... + # Verify that error messages were not found in DUT syslog. Exception will be raised if in DUT syslog will be found messages which fits regexp defined in COMMON_MATCH + loganalyzer.analyze(marker) + + # Example 2 + # Read existed common regular expressions located with legacy loganalyzer module + loganalyzer.load_common_config() + # Add start marker to the DUT syslog + marker = loganalyzer.init() + # PERFORM TEST CASE STEPS ... + # Get summary of analyzed DUT syslog + result = loganalyzer.analyze(marker, fail=False) + # Verify that specific amount of error messages found in syslog # Negative test case + assert result["total"]["match"] == 2, "Not found expected errors: {}".format(result) + + # Example 3 + # Download extracted syslog file from DUT to the local host + loganalyzer.save_extracted_log(dest="/tmp/log/syslog") + + # Example 4 + # Update previously configured marker + # Now start marker will have new prefix - test_bgp + loganalyzer.update_marker_prefix("test_bgp") + + def get_platform_info(dut): + """ + Example callback which gets DUT platform information and returns obtained string + """ + return dut.command("show platform summary") + + # Example 5 + # Execute specific function and analyze logs during function execution + run_cmd_result = loganalyzer.run_cmd(get_platform_info, ans_host) + # Process result of "get_platform_info" callback + assert all(item in run_cmd_result["stdout"] for item in ["Platform", "HwSKU", "ASIC"]) is True, "Unexpected output returned after command execution: {}".format(run_cmd_result) + + # Example 6 + # Clear current regexp match list + loganalyzer.match_regex = [] + # Load regular expressions from the specified file defined in COMMON_MATCH variable + reg_exp = loganalyzer.parse_regexp_file(src=COMMON_MATCH) + # Extend currently configured match criteria (regular expressions) with data read from "COMMON_MATCH" file + loganalyzer.match_regex.extend(reg_exp) + marker = loganalyzer.init() + # PERFORM TEST CASE STEPS ... + # Verify that error messages were not found in DUT syslog. Exception will be raised if in DUT syslog will be found messages which fits regexp defined in COMMON_MATCH + loganalyzer.analyze(marker) + + # Example 7 + loganalyzer.expect_regex = [] + # Add specific EXPECTED regular expression + # Means that in the DUT syslog loganalyzer will search for message which matches with "kernel:.*Oops" regular expression + # If such message will not be present in DUT syslog, it will raise exception + loganalyzer.expect_regex.append("kernel:.*Oops") + # Add start marker to the DUT syslog + marker = loganalyzer.init() + # PERFORM TEST CASE STEPS ... + # Verify that expected error messages WERE FOUND in DUT syslog. Exception will be raised if in DUT syslog will NOT be found messages which fits to "kernel:.*Oops" regular expression + loganalyzer.analyze(marker) + + # Example 8 + loganalyzer.expect_regex = [] + # Add specific EXPECTED regular expression + # Means that in the DUT syslog loganalyzer will search for message which matches with "kernel:.*Oops" regular expression + # If such message will not be present in DUT syslog, it will raise exception + loganalyzer.expect_regex.append("kernel:.*Oops") + # PERFORM TEST CASE STEPS ... + # Verify that expected error messages WERE FOUND in DUT syslog. Exception will be raised if in DUT syslog will NOT be found messages which fits to "kernel:.*Oops" regular expression + loganalyzer.run_cmd(ans_host.command, "echo '---------- kernel: says Oops --------------' >> /var/log/syslog") +``` diff --git a/tests/common/plugins/loganalyzer/__init__.py b/tests/common/plugins/loganalyzer/__init__.py new file mode 100644 index 00000000000..3810d371d6c --- /dev/null +++ b/tests/common/plugins/loganalyzer/__init__.py @@ -0,0 +1,27 @@ +import logging +import pytest + +from loganalyzer import LogAnalyzer + + +def pytest_addoption(parser): + parser.addoption("--disable_loganalyzer", action="store_true", default=False, + help="disable loganalyzer analysis for 'loganalyzer' fixture") + + +@pytest.fixture(autouse=True) +def loganalyzer(duthost, request): + loganalyzer = LogAnalyzer(ansible_host=duthost, marker_prefix=request.node.name) + logging.info("Add start marker into DUT syslog") + marker = loganalyzer.init() + yield loganalyzer + if not request.config.getoption("--disable_loganalyzer") and "disable_loganalyzer" not in request.keywords: + logging.info("Load config and analyze log") + # Read existed common regular expressions located with legacy loganalyzer module + loganalyzer.load_common_config() + # Parse syslog and process result. Raise "LogAnalyzerError" exception if: total match or expected missing + # match is not equal to zero + loganalyzer.analyze(marker) + else: + logging.info("Add end marker into DUT syslog") + loganalyzer._add_end_marker(marker) diff --git a/tests/loganalyzer/loganalyzer.py b/tests/common/plugins/loganalyzer/loganalyzer.py old mode 100755 new mode 100644 similarity index 97% rename from tests/loganalyzer/loganalyzer.py rename to tests/common/plugins/loganalyzer/loganalyzer.py index 8d9b8b1e663..eee59ce1a52 --- a/tests/loganalyzer/loganalyzer.py +++ b/tests/common/plugins/loganalyzer/loganalyzer.py @@ -1,232 +1,233 @@ -import sys -import logging -import os -import re -import time -import pprint -import system_msg_handler - -from system_msg_handler import AnsibleLogAnalyzer as ansible_loganalyzer -from os.path import join, split -from os.path import normpath - -ANSIBLE_LOGANALYZER_MODULE = system_msg_handler.__file__.replace(r".pyc", ".py") -COMMON_MATCH = join(split(__file__)[0], "loganalyzer_common_match.txt") -COMMON_IGNORE = join(split(__file__)[0], "loganalyzer_common_ignore.txt") -COMMON_EXPECT = join(split(__file__)[0], "loganalyzer_common_expect.txt") -SYSLOG_TMP_FOLDER = "/tmp/pytest-run/syslog" - - -class LogAnalyzerError(Exception): - """Raised when loganalyzer found matches during analysis phase.""" - def __repr__(self): - return pprint.pformat(self.message) - - -class LogAnalyzer: - def __init__(self, ansible_host, marker_prefix, dut_run_dir="/tmp"): - self.ansible_host = ansible_host - self.dut_run_dir = dut_run_dir - self.extracted_syslog = os.path.join(self.dut_run_dir, "syslog") - self.marker_prefix = marker_prefix - self.ansible_loganalyzer = ansible_loganalyzer(self.marker_prefix, False) - - self.match_regex = [] - self.expect_regex = [] - self.ignore_regex = [] - self._markers = [] - - def _add_end_marker(self, marker): - """ - @summary: Add stop marker into syslog on the DUT. - - @return: True for successfull execution False otherwise - """ - self.ansible_host.copy(src=ANSIBLE_LOGANALYZER_MODULE, dest=os.path.join(self.dut_run_dir, "loganalyzer.py")) - - cmd = "python {run_dir}/loganalyzer.py --action add_end_marker --run_id {marker}".format(run_dir=self.dut_run_dir, marker=marker) - - logging.debug("Adding end marker '{}'".format(marker)) - self.ansible_host.command(cmd) - - def __enter__(self): - """ - Store start markers which are used in analyze phase. - """ - self._markers.append(self.init()) - - def __exit__(self, *args): - """ - Analyze syslog messages. - """ - self.analyze(self._markers.pop()) - - def _verify_log(self, result): - """ - Verify that total match and expected missing match equals to zero or raise exception otherwise. - Verify that expected_match is not equal to zero when there is configured expected regexp in self.expect_regex list - """ - if not result: - raise LogAnalyzerError("Log analyzer failed - no result.") - if result["total"]["match"] != 0 or result["total"]["expected_missing_match"] != 0: - raise LogAnalyzerError(result) - - # Check for negative case - if self.expect_regex and result["total"]["expected_match"] == 0: - raise LogAnalyzerError(result) - - def update_marker_prefix(self, marker_prefix): - """ - @summary: Update configured marker prefix - """ - self.marker_prefix = marker_prefix - - def load_common_config(self): - """ - @summary: Load regular expressions from common files, which are localted in folder with legacy loganalyzer. - Loaded regular expressions are used by "analyze" method to match expected text in the downloaded log file. - """ - self.match_regex = self.ansible_loganalyzer.create_msg_regex([COMMON_MATCH])[1] - self.ignore_regex = self.ansible_loganalyzer.create_msg_regex([COMMON_IGNORE])[1] - self.expect_regex = self.ansible_loganalyzer.create_msg_regex([COMMON_EXPECT])[1] - - def parse_regexp_file(self, src): - """ - @summary: Get regular expressions defined in src file. - """ - return self.ansible_loganalyzer.create_msg_regex([src])[1] - - def run_cmd(self, callback, *args, **kwargs): - """ - @summary: Initialize loganalyzer, execute function and analyze syslog. - - @param callback: Python callable or function to be executed. - @param args: Input arguments for callback function. - @param kwargs: Input key value arguments for callback function. - - @return: Callback execution result - """ - marker = self.init() - try: - call_result = callback(*args, **kwargs) - except Exception as err: - logging.error("Error during callback execution:\n{}".format(err)) - logging.debug("Log analysis result\n".format(self.analyze(marker))) - raise err - self.analyze(marker) - - return call_result - - def init(self): - """ - @summary: Add start marker into syslog on the DUT. - - @return: True for successfull execution False otherwise - """ - logging.debug("Loganalyzer init") - - self.ansible_host.copy(src=ANSIBLE_LOGANALYZER_MODULE, dest=os.path.join(self.dut_run_dir, "loganalyzer.py")) - - start_marker = ".".join((self.marker_prefix, time.strftime("%Y-%m-%d-%H:%M:%S", time.gmtime()))) - cmd = "python {run_dir}/loganalyzer.py --action init --run_id {start_marker}".format(run_dir=self.dut_run_dir, start_marker=start_marker) - - logging.debug("Adding start marker '{}'".format(start_marker)) - self.ansible_host.command(cmd) - return start_marker - - def analyze(self, marker, fail=True): - """ - @summary: Extract syslog logs based on the start/stop markers and compose one file. Download composed file, analyze file based on defined regular expressions. - - @param marker: Marker obtained from "init" method. - @param fail: Flag to enable/disable raising exception when loganalyzer find error messages. - - @return: If "fail" is False - return dictionary of parsed syslog summary, if dictionary can't be parsed - return empty dictionary. If "fail" is True and if found match messages - raise exception. - """ - logging.debug("Loganalyzer analyze") - analyzer_summary = {"total": {"match": 0, "expected_match": 0, "expected_missing_match": 0}, - "match_files": {}, - "match_messages": {}, - "expect_messages": {}, - "unused_expected_regexp": [] - } - tmp_folder = ".".join((SYSLOG_TMP_FOLDER, time.strftime("%Y-%m-%d-%H:%M:%S", time.gmtime()))) - self.ansible_loganalyzer.run_id = marker - - # Add end marker into DUT syslog - self._add_end_marker(marker) - - try: - # Disable logrotate cron task - self.ansible_host.command("sed -i 's/^/#/g' /etc/cron.d/logrotate") - - logging.debug("Waiting for logrotate from previous cron task run to finish") - # Wait for logrotate from previous cron task run to finish - end = time.time() + 60 - while time.time() < end: - # Verify for exception because self.ansible_host automatically handle command return codes and raise exception for none zero code - try: - self.ansible_host.command("pgrep -f logrotate") - except Exception: - break - else: - time.sleep(5) - continue - else: - logging.error("Logrotate from previous task was not finished during 60 seconds") - - # On DUT extract syslog files from /var/log/ and create one file by location - /tmp/syslog - self.ansible_host.extract_log(directory='/var/log', file_prefix='syslog', start_string='start-LogAnalyzer-{}'.format(marker), target_filename=self.extracted_syslog) - finally: - # Enable logrotate cron task back - self.ansible_host.command("sed -i 's/^#//g' /etc/cron.d/logrotate") - - # Download extracted logs from the DUT to the temporal folder defined in SYSLOG_TMP_FOLDER - self.save_extracted_log(dest=tmp_folder) - - match_messages_regex = re.compile('|'.join(self.match_regex)) if len(self.match_regex) else None - ignore_messages_regex = re.compile('|'.join(self.ignore_regex)) if len(self.ignore_regex) else None - expect_messages_regex = re.compile('|'.join(self.expect_regex)) if len(self.expect_regex) else None - - analyzer_parse_result = self.ansible_loganalyzer.analyze_file_list([tmp_folder], match_messages_regex, ignore_messages_regex, expect_messages_regex) - # Print syslog file content and remove the file - with open(tmp_folder) as fo: - logging.debug("Syslog content:\n\n{}".format(fo.read())) - os.remove(tmp_folder) - - total_match_cnt = 0 - total_expect_cnt = 0 - expected_lines_total = [] - unused_regex_messages = [] - - for key, value in analyzer_parse_result.iteritems(): - matching_lines, expecting_lines = value - analyzer_summary["total"]["match"] += len(matching_lines) - analyzer_summary["total"]["expected_match"] += len(expecting_lines) - analyzer_summary["match_files"][key] = {"match": len(matching_lines), "expected_match": len(expecting_lines)} - analyzer_summary["match_messages"][key] = matching_lines - analyzer_summary["expect_messages"][key] = expecting_lines - expected_lines_total.extend(expecting_lines) - - # Find unused regex matches - for regex in self.expect_regex: - for line in expected_lines_total: - if re.search(regex, line): - break - else: - unused_regex_messages.append(regex) - analyzer_summary["total"]["expected_missing_match"] = len(unused_regex_messages) - analyzer_summary["unused_expected_regexp"] = unused_regex_messages - - if fail: - self._verify_log(analyzer_summary) - else: - return analyzer_summary - - def save_extracted_log(self, dest): - """ - @summary: Download extracted syslog log file to the ansible host. - - @param dest: File path to store downloaded log file. - """ - self.ansible_host.fetch(dest=dest, src=self.extracted_syslog, flat="yes") +import sys +import logging +import os +import re +import time +import pprint + +import system_msg_handler + +from system_msg_handler import AnsibleLogAnalyzer as ansible_loganalyzer +from os.path import join, split +from os.path import normpath + +ANSIBLE_LOGANALYZER_MODULE = system_msg_handler.__file__.replace(r".pyc", ".py") +COMMON_MATCH = join(split(__file__)[0], "loganalyzer_common_match.txt") +COMMON_IGNORE = join(split(__file__)[0], "loganalyzer_common_ignore.txt") +COMMON_EXPECT = join(split(__file__)[0], "loganalyzer_common_expect.txt") +SYSLOG_TMP_FOLDER = "/tmp/pytest-run/syslog" + + +class LogAnalyzerError(Exception): + """Raised when loganalyzer found matches during analysis phase.""" + def __repr__(self): + return pprint.pformat(self.message) + + +class LogAnalyzer: + def __init__(self, ansible_host, marker_prefix, dut_run_dir="/tmp"): + self.ansible_host = ansible_host + self.dut_run_dir = dut_run_dir + self.extracted_syslog = os.path.join(self.dut_run_dir, "syslog") + self.marker_prefix = marker_prefix + self.ansible_loganalyzer = ansible_loganalyzer(self.marker_prefix, False) + + self.match_regex = [] + self.expect_regex = [] + self.ignore_regex = [] + self._markers = [] + + def _add_end_marker(self, marker): + """ + @summary: Add stop marker into syslog on the DUT. + + @return: True for successfull execution False otherwise + """ + self.ansible_host.copy(src=ANSIBLE_LOGANALYZER_MODULE, dest=os.path.join(self.dut_run_dir, "loganalyzer.py")) + + cmd = "python {run_dir}/loganalyzer.py --action add_end_marker --run_id {marker}".format(run_dir=self.dut_run_dir, marker=marker) + + logging.debug("Adding end marker '{}'".format(marker)) + self.ansible_host.command(cmd) + + def __enter__(self): + """ + Store start markers which are used in analyze phase. + """ + self._markers.append(self.init()) + + def __exit__(self, *args): + """ + Analyze syslog messages. + """ + self.analyze(self._markers.pop()) + + def _verify_log(self, result): + """ + Verify that total match and expected missing match equals to zero or raise exception otherwise. + Verify that expected_match is not equal to zero when there is configured expected regexp in self.expect_regex list + """ + if not result: + raise LogAnalyzerError("Log analyzer failed - no result.") + if result["total"]["match"] != 0 or result["total"]["expected_missing_match"] != 0: + raise LogAnalyzerError(result) + + # Check for negative case + if self.expect_regex and result["total"]["expected_match"] == 0: + raise LogAnalyzerError(result) + + def update_marker_prefix(self, marker_prefix): + """ + @summary: Update configured marker prefix + """ + self.marker_prefix = marker_prefix + + def load_common_config(self): + """ + @summary: Load regular expressions from common files, which are localted in folder with legacy loganalyzer. + Loaded regular expressions are used by "analyze" method to match expected text in the downloaded log file. + """ + self.match_regex = self.ansible_loganalyzer.create_msg_regex([COMMON_MATCH])[1] + self.ignore_regex = self.ansible_loganalyzer.create_msg_regex([COMMON_IGNORE])[1] + self.expect_regex = self.ansible_loganalyzer.create_msg_regex([COMMON_EXPECT])[1] + + def parse_regexp_file(self, src): + """ + @summary: Get regular expressions defined in src file. + """ + return self.ansible_loganalyzer.create_msg_regex([src])[1] + + def run_cmd(self, callback, *args, **kwargs): + """ + @summary: Initialize loganalyzer, execute function and analyze syslog. + + @param callback: Python callable or function to be executed. + @param args: Input arguments for callback function. + @param kwargs: Input key value arguments for callback function. + + @return: Callback execution result + """ + marker = self.init() + try: + call_result = callback(*args, **kwargs) + except Exception as err: + logging.error("Error during callback execution:\n{}".format(err)) + logging.debug("Log analysis result\n".format(self.analyze(marker))) + raise err + self.analyze(marker) + + return call_result + + def init(self): + """ + @summary: Add start marker into syslog on the DUT. + + @return: True for successfull execution False otherwise + """ + logging.debug("Loganalyzer init") + + self.ansible_host.copy(src=ANSIBLE_LOGANALYZER_MODULE, dest=os.path.join(self.dut_run_dir, "loganalyzer.py")) + + start_marker = ".".join((self.marker_prefix, time.strftime("%Y-%m-%d-%H:%M:%S", time.gmtime()))) + cmd = "python {run_dir}/loganalyzer.py --action init --run_id {start_marker}".format(run_dir=self.dut_run_dir, start_marker=start_marker) + + logging.debug("Adding start marker '{}'".format(start_marker)) + self.ansible_host.command(cmd) + return start_marker + + def analyze(self, marker, fail=True): + """ + @summary: Extract syslog logs based on the start/stop markers and compose one file. Download composed file, analyze file based on defined regular expressions. + + @param marker: Marker obtained from "init" method. + @param fail: Flag to enable/disable raising exception when loganalyzer find error messages. + + @return: If "fail" is False - return dictionary of parsed syslog summary, if dictionary can't be parsed - return empty dictionary. If "fail" is True and if found match messages - raise exception. + """ + logging.debug("Loganalyzer analyze") + analyzer_summary = {"total": {"match": 0, "expected_match": 0, "expected_missing_match": 0}, + "match_files": {}, + "match_messages": {}, + "expect_messages": {}, + "unused_expected_regexp": [] + } + tmp_folder = ".".join((SYSLOG_TMP_FOLDER, time.strftime("%Y-%m-%d-%H:%M:%S", time.gmtime()))) + self.ansible_loganalyzer.run_id = marker + + # Add end marker into DUT syslog + self._add_end_marker(marker) + + try: + # Disable logrotate cron task + self.ansible_host.command("sed -i 's/^/#/g' /etc/cron.d/logrotate") + + logging.debug("Waiting for logrotate from previous cron task run to finish") + # Wait for logrotate from previous cron task run to finish + end = time.time() + 60 + while time.time() < end: + # Verify for exception because self.ansible_host automatically handle command return codes and raise exception for none zero code + try: + self.ansible_host.command("pgrep -f logrotate") + except Exception: + break + else: + time.sleep(5) + continue + else: + logging.error("Logrotate from previous task was not finished during 60 seconds") + + # On DUT extract syslog files from /var/log/ and create one file by location - /tmp/syslog + self.ansible_host.extract_log(directory='/var/log', file_prefix='syslog', start_string='start-LogAnalyzer-{}'.format(marker), target_filename=self.extracted_syslog) + finally: + # Enable logrotate cron task back + self.ansible_host.command("sed -i 's/^#//g' /etc/cron.d/logrotate") + + # Download extracted logs from the DUT to the temporal folder defined in SYSLOG_TMP_FOLDER + self.save_extracted_log(dest=tmp_folder) + + match_messages_regex = re.compile('|'.join(self.match_regex)) if len(self.match_regex) else None + ignore_messages_regex = re.compile('|'.join(self.ignore_regex)) if len(self.ignore_regex) else None + expect_messages_regex = re.compile('|'.join(self.expect_regex)) if len(self.expect_regex) else None + + analyzer_parse_result = self.ansible_loganalyzer.analyze_file_list([tmp_folder], match_messages_regex, ignore_messages_regex, expect_messages_regex) + # Print syslog file content and remove the file + with open(tmp_folder) as fo: + logging.debug("Syslog content:\n\n{}".format(fo.read())) + os.remove(tmp_folder) + + total_match_cnt = 0 + total_expect_cnt = 0 + expected_lines_total = [] + unused_regex_messages = [] + + for key, value in analyzer_parse_result.iteritems(): + matching_lines, expecting_lines = value + analyzer_summary["total"]["match"] += len(matching_lines) + analyzer_summary["total"]["expected_match"] += len(expecting_lines) + analyzer_summary["match_files"][key] = {"match": len(matching_lines), "expected_match": len(expecting_lines)} + analyzer_summary["match_messages"][key] = matching_lines + analyzer_summary["expect_messages"][key] = expecting_lines + expected_lines_total.extend(expecting_lines) + + # Find unused regex matches + for regex in self.expect_regex: + for line in expected_lines_total: + if re.search(regex, line): + break + else: + unused_regex_messages.append(regex) + analyzer_summary["total"]["expected_missing_match"] = len(unused_regex_messages) + analyzer_summary["unused_expected_regexp"] = unused_regex_messages + + if fail: + self._verify_log(analyzer_summary) + else: + return analyzer_summary + + def save_extracted_log(self, dest): + """ + @summary: Download extracted syslog log file to the ansible host. + + @param dest: File path to store downloaded log file. + """ + self.ansible_host.fetch(dest=dest, src=self.extracted_syslog, flat="yes") diff --git a/tests/common/plugins/loganalyzer/loganalyzer_common_expect.txt b/tests/common/plugins/loganalyzer/loganalyzer_common_expect.txt new file mode 120000 index 00000000000..3887f77b365 --- /dev/null +++ b/tests/common/plugins/loganalyzer/loganalyzer_common_expect.txt @@ -0,0 +1 @@ +../../../../ansible/roles/test/files/tools/loganalyzer/loganalyzer_common_expect.txt \ No newline at end of file diff --git a/tests/common/plugins/loganalyzer/loganalyzer_common_ignore.txt b/tests/common/plugins/loganalyzer/loganalyzer_common_ignore.txt new file mode 120000 index 00000000000..68c59f2a59c --- /dev/null +++ b/tests/common/plugins/loganalyzer/loganalyzer_common_ignore.txt @@ -0,0 +1 @@ +../../../../ansible/roles/test/files/tools/loganalyzer/loganalyzer_common_ignore.txt \ No newline at end of file diff --git a/tests/common/plugins/loganalyzer/loganalyzer_common_match.txt b/tests/common/plugins/loganalyzer/loganalyzer_common_match.txt new file mode 120000 index 00000000000..c106be50f4a --- /dev/null +++ b/tests/common/plugins/loganalyzer/loganalyzer_common_match.txt @@ -0,0 +1 @@ +../../../../ansible/roles/test/files/tools/loganalyzer/loganalyzer_common_match.txt \ No newline at end of file diff --git a/tests/common/plugins/loganalyzer/system_msg_handler.py b/tests/common/plugins/loganalyzer/system_msg_handler.py new file mode 120000 index 00000000000..3627cbb6cf3 --- /dev/null +++ b/tests/common/plugins/loganalyzer/system_msg_handler.py @@ -0,0 +1 @@ +../../../../ansible/roles/test/files/tools/loganalyzer/loganalyzer.py \ No newline at end of file diff --git a/tests/ptfadapter/README.md b/tests/common/plugins/ptfadapter/README.md similarity index 93% rename from tests/ptfadapter/README.md rename to tests/common/plugins/ptfadapter/README.md index 4a51cb15ecc..0b42bf6170e 100644 --- a/tests/ptfadapter/README.md +++ b/tests/common/plugins/ptfadapter/README.md @@ -3,9 +3,9 @@ ## Overview ```PtfTestAdapter``` provides an interface to send and receive traffic in the same way as ```ptf.base_tests.BaseTest``` object in PTF framework. -It makes use of ```ptf_nn_agent.py``` script running on PTF host, connectes to it over TCP and intialize PTF data plane thread. +It makes use of ```ptf_nn_agent.py``` script running on PTF host, connects to it over TCP and initialize PTF data plane thread. -**NOTE** a good network connection between sonic-mgmt node and PTF host is requiered for traffic tests to be stable. +**NOTE** a good network connection between sonic-mgmt node and PTF host is required for traffic tests to be stable. ## Usage in pytest diff --git a/tests/common/plugins/ptfadapter/__init__.py b/tests/common/plugins/ptfadapter/__init__.py new file mode 100644 index 00000000000..7e890f50c47 --- /dev/null +++ b/tests/common/plugins/ptfadapter/__init__.py @@ -0,0 +1,68 @@ +"""This module provides ptfadapter fixture to be used by tests to send/receive traffic via PTF ports""" +import os +import pytest + +from ptfadapter import PtfTestAdapter +from ansible_host import AnsibleHost + +DEFAULT_PTF_NN_PORT = 10900 +DEFAULT_DEVICE_NUM = 0 +ETH_PFX = 'eth' + + +def get_ifaces(netdev_output): + """ parse /proc/net/dev content + :param netdev_output: content of /proc/net/dev + :return: interface names list + """ + + ifaces = [] + for line in netdev_output.split('\n'): + # Skip a header + if ':' not in line: + continue + + iface = line.split(':')[0].strip() + + # Skip not FP interfaces + if ETH_PFX not in iface: + continue + + ifaces.append(iface) + + return ifaces + + +@pytest.fixture(scope='module') +def ptfadapter(ptfhost, testbed): + """return ptf test adapter object. + The fixture is module scope, because usually there is not need to + restart PTF nn agent and reinitialize data plane thread on every + test class or test function/method. Session scope should also be Ok, + however if something goes really wrong in one test module it is safer + to restart PTF before proceeding running other test modules + """ + + # get the eth interfaces from PTF and initialize ifaces_map + res = ptfhost.command('cat /proc/net/dev') + ifaces = get_ifaces(res['stdout']) + ifaces_map = {int(ifname.replace(ETH_PFX, '')): ifname for ifname in ifaces} + + # generate supervisor configuration for ptf_nn_agent + ptfhost.host.options['variable_manager'].extra_vars.update({ + 'device_num': DEFAULT_DEVICE_NUM, + 'ptf_nn_port': DEFAULT_PTF_NN_PORT, + 'ifaces_map': ifaces_map, + }) + + current_file_dir = os.path.dirname(os.path.realpath(__file__)) + + ptfhost.template(src=os.path.join(current_file_dir, 'templates/ptf_nn_agent.conf.ptf.j2'), + dest='/etc/supervisor/conf.d/ptf_nn_agent.conf') + + # reread configuration and update supervisor + ptfhost.command('supervisorctl reread') + ptfhost.command('supervisorctl update') + + with PtfTestAdapter(testbed['ptf_ip'], DEFAULT_PTF_NN_PORT, 0, len(ifaces_map)) as adapter: + yield adapter diff --git a/tests/ptfadapter/ptfadapter.py b/tests/common/plugins/ptfadapter/ptfadapter.py similarity index 99% rename from tests/ptfadapter/ptfadapter.py rename to tests/common/plugins/ptfadapter/ptfadapter.py index 1c7c7b25953..e3364402f4c 100644 --- a/tests/ptfadapter/ptfadapter.py +++ b/tests/common/plugins/ptfadapter/ptfadapter.py @@ -88,4 +88,3 @@ def reinit(self, ptf_config=None): """ self.kill() self._init_ptf_dataplane(self.ptf_ip, self.ptf_nn_port, self.device_num, self.ptf_ports_num, ptf_config) - diff --git a/tests/ptfadapter/templates/ptf_nn_agent.conf.ptf.j2 b/tests/common/plugins/ptfadapter/templates/ptf_nn_agent.conf.ptf.j2 similarity index 100% rename from tests/ptfadapter/templates/ptf_nn_agent.conf.ptf.j2 rename to tests/common/plugins/ptfadapter/templates/ptf_nn_agent.conf.ptf.j2 diff --git a/tests/conftest.py b/tests/conftest.py index d0873b3ebed..4cd2c5c5ea0 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -11,16 +11,16 @@ import ipaddr as ipaddress from ansible_host import AnsibleHost -from loganalyzer import LogAnalyzer from common.sanity_check import check_critical_services, check_links_up from common.devices import SonicHost, Localhost, PTFHost logger = logging.getLogger(__name__) -pytest_plugins = ('ptf_fixtures', - 'ansible_fixtures', - 'plugins.dut_monitor.pytest_dut_monitor', - 'fib', +pytest_plugins = ('common.plugins.ptfadapter', + 'common.plugins.ansible_fixtures', + 'common.plugins.dut_monitor', + 'common.plugins.fib', + 'common.plugins.loganalyzer', 'common.plugins.psu_controller') @@ -65,13 +65,12 @@ def __init__(self, testbed_file): def pytest_addoption(parser): parser.addoption("--testbed", action="store", default=None, help="testbed name") parser.addoption("--testbed_file", action="store", default=None, help="testbed file name") - parser.addoption("--disable_loganalyzer", action="store_true", default=False, - help="disable loganalyzer analysis for 'loganalyzer' fixture") # test_vrf options parser.addoption("--vrf_capacity", action="store", default=None, type=int, help="vrf capacity of dut (4-1000)") parser.addoption("--vrf_test_count", action="store", default=None, type=int, help="number of vrf to be tested (1-997)") + @pytest.fixture(scope="session") def testbed(request): """ @@ -118,6 +117,7 @@ def testbed_devices(ansible_adhoc, testbed): return devices + def disable_ssh_timout(dut): ''' @summary disable ssh session on target dut @@ -130,6 +130,7 @@ def disable_ssh_timout(dut): dut.command("sudo systemctl restart ssh") time.sleep(5) + def enable_ssh_timout(dut): ''' @summary: enable ssh session on target dut @@ -162,6 +163,7 @@ def duthost(testbed_devices, request): if stop_ssh_timeout is not None: enable_ssh_timout(duthost) + @pytest.fixture(scope="module") def ptfhost(testbed_devices): """ @@ -179,22 +181,6 @@ def eos(): return eos -@pytest.fixture(autouse=True) -def loganalyzer(duthost, request): - loganalyzer = LogAnalyzer(ansible_host=duthost, marker_prefix=request.node.name) - # Add start marker into DUT syslog - marker = loganalyzer.init() - yield loganalyzer - if not request.config.getoption("--disable_loganalyzer") and "disable_loganalyzer" not in request.keywords: - # Read existed common regular expressions located with legacy loganalyzer module - loganalyzer.load_common_config() - # Parse syslog and process result. Raise "LogAnalyzerError" exception if: total match or expected missing - # match is not equal to zero - loganalyzer.analyze(marker) - else: - # Add end marker into DUT syslog - loganalyzer._add_end_marker(marker) - @pytest.fixture(scope="session") def creds(): """ read and yield lab configuration """ @@ -205,6 +191,7 @@ def creds(): creds.update(yaml.safe_load(stream)) return creds + @pytest.fixture(scope="module", autouse=True) def base_sanity(duthost): """perform base sanity checks before and after each test""" @@ -217,6 +204,7 @@ def base_sanity(duthost): check_critical_services(duthost) check_links_up(duthost) + @pytest.hookimpl(tryfirst=True, hookwrapper=True) def pytest_runtest_makereport(item, call): # execute all other hooks to obtain the report object @@ -234,6 +222,7 @@ def fetch_dbs(duthost, testname): duthost.shell("redis-dump -d {} --pretty -o {}.json".format(db[0], db[1])) duthost.fetch(src="{}.json".format(db[1]), dest="logs/{}".format(testname)) + @pytest.fixture def collect_techsupport(request, duthost): yield diff --git a/tests/loganalyzer/__init__.py b/tests/loganalyzer/__init__.py deleted file mode 100755 index e32a3567489..00000000000 --- a/tests/loganalyzer/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .loganalyzer import LogAnalyzer, COMMON_MATCH, COMMON_IGNORE, COMMON_EXPECT, LogAnalyzerError diff --git a/tests/loganalyzer/loganalyzer_common_expect.txt b/tests/loganalyzer/loganalyzer_common_expect.txt deleted file mode 120000 index 2ae3246b088..00000000000 --- a/tests/loganalyzer/loganalyzer_common_expect.txt +++ /dev/null @@ -1 +0,0 @@ -../../ansible/roles/test/files/tools/loganalyzer/loganalyzer_common_expect.txt \ No newline at end of file diff --git a/tests/loganalyzer/loganalyzer_common_ignore.txt b/tests/loganalyzer/loganalyzer_common_ignore.txt deleted file mode 120000 index ad2643fb427..00000000000 --- a/tests/loganalyzer/loganalyzer_common_ignore.txt +++ /dev/null @@ -1 +0,0 @@ -../../ansible/roles/test/files/tools/loganalyzer/loganalyzer_common_ignore.txt \ No newline at end of file diff --git a/tests/loganalyzer/loganalyzer_common_match.txt b/tests/loganalyzer/loganalyzer_common_match.txt deleted file mode 120000 index 1f865571b62..00000000000 --- a/tests/loganalyzer/loganalyzer_common_match.txt +++ /dev/null @@ -1 +0,0 @@ -../../ansible/roles/test/files/tools/loganalyzer/loganalyzer_common_match.txt \ No newline at end of file diff --git a/tests/loganalyzer/system_msg_handler.py b/tests/loganalyzer/system_msg_handler.py deleted file mode 120000 index 4fd54f816fc..00000000000 --- a/tests/loganalyzer/system_msg_handler.py +++ /dev/null @@ -1 +0,0 @@ -../../ansible/roles/test/files/tools/loganalyzer/loganalyzer.py \ No newline at end of file diff --git a/tests/platform/test_platform_info.py b/tests/platform/test_platform_info.py index f6fb0f9ab09..86edbfcb3d8 100644 --- a/tests/platform/test_platform_info.py +++ b/tests/platform/test_platform_info.py @@ -12,7 +12,7 @@ import pytest -from loganalyzer import LogAnalyzer, LogAnalyzerError +from common.plugins.loganalyzer import LogAnalyzer, LogAnalyzerError from common.utilities import wait_until from thermal_control_test_helper import * diff --git a/tests/platform/test_sfp.py b/tests/platform/test_sfp.py index 2e1be3b9d62..ed9fb062746 100644 --- a/tests/platform/test_sfp.py +++ b/tests/platform/test_sfp.py @@ -14,7 +14,7 @@ import pytest from platform_fixtures import conn_graph_facts -from loganalyzer import LogAnalyzer +from common.plugins.loganalyzer import LogAnalyzer ans_host = None port_mapping = None diff --git a/tests/plugins/__init__.py b/tests/plugins/__init__.py deleted file mode 100755 index e69de29bb2d..00000000000 diff --git a/tests/plugins/dut_monitor/__init__.py b/tests/plugins/dut_monitor/__init__.py deleted file mode 100755 index e69de29bb2d..00000000000 diff --git a/tests/ptfadapter/__init__.py b/tests/ptfadapter/__init__.py deleted file mode 100644 index c9eb27c8599..00000000000 --- a/tests/ptfadapter/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from ptfadapter import PtfTestAdapter - -__all__ = ['PtfTestAdapter']