Skip to content
8 changes: 6 additions & 2 deletions ansible/library/extract_log.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,8 +80,6 @@
from datetime import datetime
from ansible.module_utils.basic import *

from pprint import pprint


def extract_lines(directory, filename, target_string):
path = os.path.join(directory, filename)
Expand All @@ -100,6 +98,7 @@ def extract_lines(directory, filename, target_string):

return result


def extract_number(s):
"""Extracts number from string, if not number found returns 0"""
ns = re.findall(r'\d+', s)
Expand All @@ -112,6 +111,10 @@ def extract_number(s):
def convert_date(s):
dt = None
re_result = re.findall(r'^\S{3}\s{1,2}\d{1,2} \d{2}:\d{2}:\d{2}\.?\d*', s)
# Workaround for pytest-ansible
loc = locale.getlocale()
locale.setlocale(locale.LC_ALL, (None, None))

if len(re_result) > 0:
str_date = re_result[0]
try:
Expand All @@ -122,6 +125,7 @@ def convert_date(s):
re_result = re.findall(r'^\d{4}-\d{2}-\d{2}\.\d{2}:\d{2}:\d{2}\.\d{6}', s)
str_date = re_result[0]
dt = datetime.strptime(str_date, '%Y-%m-%d.%X.%f')
locale.setlocale(locale.LC_ALL, loc)

return dt

Expand Down
78 changes: 49 additions & 29 deletions ansible/roles/test/files/tools/loganalyzer/loganalyzer.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,15 +32,15 @@
comment_key = '#'
system_log_file = '/var/log/syslog'

#-- List of ERROR codes to be returned by LogAnalyzer
#-- List of ERROR codes to be returned by AnsibleLogAnalyzer
err_duplicate_start_marker = -1
err_duplicate_end_marker = -2
err_no_end_marker = -3
err_no_start_marker = -4
err_invalid_string_format = -5
err_invalid_input = -6

class LogAnalyzer:
class AnsibleLogAnalyzer:
'''
@summary: Overview of functionality

Expand All @@ -52,10 +52,10 @@ class LogAnalyzer:
AND will not match set of 'ignore' regex expressions, will be considered a
'match' and will be reported.

LogAnalyzer will be called initially before any test has ran, and will be
AnsibleLogAnalyzer will be called initially before any test has ran, and will be
instructed to place 'start' marker into all log files to be analyzed.
When tests have ran, LogAnalyzer will be instructed to place end-marker
into the log files. After this, LogAnalyzer will be invoked to perform the
When tests have ran, AnsibleLogAnalyzer will be instructed to place end-marker
into the log files. After this, AnsibleLogAnalyzer will be invoked to perform the
analysis of logs. The analysis will be performed on specified log files.
For each log file only the content between start/end markers will be analyzed.

Expand Down Expand Up @@ -105,25 +105,41 @@ def create_end_marker(self):
return self.end_marker_prefix + "-" + self.run_id
#---------------------------------------------------------------------

def place_marker(self, log_file_list, marker):
def place_marker_to_file(self, log_file, marker):
'''
@summary: Place marker into each log file specified.
@param log_file : File path, to be applied with marker.
@param marker: Marker to be placed into log files.
'''
if not len(log_file) or self.is_filename_stdin(log_file):
self.print_diagnostic_message('Log file {} not found. Skip adding marker.'.format(log_file))
self.print_diagnostic_message('log file:{}, place marker {}'.format(log_file, marker))
with open(log_file, 'a') as file:
file.write(marker)
file.write('\n')
file.flush()

def place_marker_to_syslog(self, marker):
'''
@summary: Place marker into '/dev/log'.
@param marker: Marker to be placed into syslog.
'''

syslogger = self.init_sys_logger()
syslogger.info(marker)
syslogger.info('\n')

def place_marker(self, log_file_list, marker):
'''
@summary: Place marker into '/dev/log' and each log file specified.
@param log_file_list : List of file paths, to be applied with marker.
@param marker: Marker to be placed into log files.
'''

for log_file in log_file_list:
if not len(log_file) or self.is_filename_stdin(log_file):
continue
self.print_diagnostic_message('log file:%s, place marker %s'%(log_file, marker))
with open(log_file, 'a') as file:
file.write(marker)
file.write('\n')
file.flush()
self.place_marker_to_file(log_file, marker)

syslogger = self.init_sys_logger()
syslogger.info(marker)
syslogger.info('\n')
self.place_marker_to_syslog(marker)

return
#---------------------------------------------------------------------
Expand Down Expand Up @@ -183,11 +199,15 @@ def create_msg_regex(self, file_lsit):
skipinitialspace=True)

for index, row in enumerate(csvreader):
row = [item for item in row if item != ""]
self.print_diagnostic_message('[diagnostic]:processing row:%d' % index)
self.print_diagnostic_message('row:%s'% row)
try:
#-- Ignore commented Lines and Empty Lines
if (not row or row[0].startswith(comment_key)):
#-- Ignore Empty Lines
if not row:
continue
#-- Ignore commented Lines
if row[0].startswith(comment_key):
self.print_diagnostic_message('[diagnostic]:skipping row[0]:%s' % row[0])
continue

Expand All @@ -202,13 +222,10 @@ def create_msg_regex(self, file_lsit):
'must be \'s\'(string) or \'r\'(regex)'
%(filename,index))

#-- One error message per line
error_string = row[1]

if (is_regex):
messages_regex.append(error_string)
messages_regex.extend(row[1:])
else:
messages_regex.append(self.error_to_regx(error_string))
messages_regex.append(self.error_to_regx(row[1:]))

except Exception as e:
print 'ERROR: line %d is formatted incorrectly in file %s. Skipping line' % (index, filename)
Expand Down Expand Up @@ -393,10 +410,11 @@ def usage():
print ' init - initialize analysis by placing start-marker'
print ' to all log files specified in --logs parameter.'
print ' analyze - perform log analysis of files specified in --logs parameter.'
print ' add_end_marker - add end marker to all log files specified in --logs parameter.'
print '--out_dir path Directory path where to place output files, '
print ' must be present when --action == analyze'
print '--logs path{,path} List of full paths to log files to be analyzed.'
print ' Implicetly system log file will be also processed'
print ' Implicitly system log file will be also processed'
print '--run_id string String passed to loganalyzer, uniquely identifying '
print ' analysis session. Used to construct start/end markers. '
print '--match_files_in path{,path} List of paths to files containing strings. A string from log file'
Expand Down Expand Up @@ -426,6 +444,8 @@ def check_action(action, log_files_in, out_dir, match_files_in, ignore_files_in,

if (action == 'init'):
ret_code = True
elif (action == 'add_end_marker'):
ret_code = True
elif (action == 'analyze'):
if out_dir is None or len(out_dir) == 0:
print 'ERROR: missing required out_dir for analyze action'
Expand Down Expand Up @@ -500,12 +520,10 @@ def write_result_file(run_id, out_dir, analysis_result_per_file, messages_regex_
out_file.write('Total matches:%d\n' % match_cnt)
# Find unused regex matches
for regex in messages_regex_e:
regex_used = False
for line in expected_lines_total:
if re.search(regex, line):
regex_used = True
break
if not regex_used:
else:
unused_regex_messages.append(regex)

out_file.write('Total expected and found matches:%d\n' % expected_cnt)
Expand All @@ -515,7 +533,6 @@ def write_result_file(run_id, out_dir, analysis_result_per_file, messages_regex_

out_file.write("\n-------------------------------------------------\n\n")
out_file.flush()

#---------------------------------------------------------------------

def write_summary_file(run_id, out_dir, analysis_result_per_file, unused_regex_messages):
Expand Down Expand Up @@ -607,7 +624,7 @@ def main(argv):
usage()
sys.exit(err_invalid_input)

analyzer = LogAnalyzer(run_id, verbose)
analyzer = AnsibleLogAnalyzer(run_id, verbose)

log_file_list = filter(None, log_files_in.split(tokenizer))

Expand Down Expand Up @@ -635,6 +652,9 @@ def main(argv):
unused_regex_messages = []
write_result_file(run_id, out_dir, result, messages_regex_e, unused_regex_messages)
write_summary_file(run_id, out_dir, result, unused_regex_messages)
elif (action == "add_end_marker"):
analyzer.place_marker(log_file_list, analyzer.create_end_marker())
return 0

else:
print 'Unknown action:%s specified' % action
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
r, "\.ERR", "\.WARN", "crash"
r, "kernel:.*Oops", "kernel:.*hung", "kernel.*oom\s"
r, "kernel:.*scheduling", "kernel:.*atomic", "kernel:.*panic"
r, "kernel:.*\serr", "kernel:.*allocation", "kernel:.*kill",
r, "kernel:.*kmemleak.*","kernel:.* Err:"
r, "kernel:.*\serr", "kernel:.*allocation", "kernel:.*kill"
r, "kernel:.*kmemleak.*", "kernel:.* Err:"
s, "ERR"
18 changes: 18 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import ipaddr as ipaddress

from ansible_host import AnsibleHost
from loganalyzer import LogAnalyzer

pytest_plugins = ('ptf_fixtures', 'ansible_fixtures')

Expand Down Expand Up @@ -52,6 +53,7 @@ def __init__(self, testbed_file):
def pytest_addoption(parser):
parser.addoption("--testbed", action="store", default=None, help="testbed name")
parser.addoption("--testbed_file", action="store", default=None, help="testbed file name")
parser.addoption("--disable_loganalyzer", action="store_true", default=False, help="disable loganalyzer analysis for 'loganalyzer' fixture")


@pytest.fixture(scope="session")
Expand Down Expand Up @@ -121,3 +123,19 @@ def eos():
with open('eos/eos.yml') as stream:
eos = yaml.safe_load(stream)
return eos

@pytest.fixture(autouse=True)
def loganalyzer(duthost, request):
loganalyzer = LogAnalyzer(ansible_host=duthost, marker_prefix=request.node.name)
# Add start marker into DUT syslog
marker = loganalyzer.init()
yield loganalyzer
if not request.config.getoption("--disable_loganalyzer") and "disable_loganalyzer" not in request.keywords:
# Read existed common regular expressions located with legacy loganalyzer module
loganalyzer.load_common_config()
# Parse syslog and process result. Raise "LogAnalyzerError" exception if: total match or expected missing match is not equal to zero
loganalyzer.analyze(marker)
else:
# Add end marker into DUT syslog
loganalyzer._add_end_marker(marker)

135 changes: 135 additions & 0 deletions tests/loganalyzer/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,135 @@
#### Loganalyzer API usage example

Below is described possibility of loganalyzer fixture/module usage.

##### Loganalyzer fixture
In the root conftest there is implemented "loganalyzer" pytest fixture, which starts automatically for all test cases.
Fixture main flow:
- loganalyzer will add start marker before test case start
- loganalyzer will add stop marker after test case finish
- if loganalyzer analysis is not disabled for current test case it will analyze DUT syslog and display results.
If loganalyzer find specified messages which corresponds to defined regular expressions, it will display found messages and pytest will generate 'error'.

#### To skip loganalyzer analysis for:
- all test cases - use pytest command line option ```--disable_loganalyzer```
- specific test case: mark test case with ```@pytest.mark.disable_loganalyzer``` decorator. Example is shown below.


#### Notes:
loganalyzer.init() - can be called several times without calling "loganalyzer.analyze(marker)" between calls. Each call return its unique marker, which is used for "analyze" phase - loganalyzer.analyze(marker).


### Loganalyzer usage example

#### Example calling loganalyzer init/analyze methods automatically by using with statement
```python
# Read existed common regular expressions located with legacy loganalyzer module
loganalyzer.load_common_config()
# Analyze syslog for code executed inside with statement
with loganalyzer as analyzer:
logging.debug("============== Test steps ===================")
# Add test code here ...
time.sleep(1)

# Separately analyze syslog for code executed inside each with statement
with loganalyzer as analyzer:
# Clear current regexp match list if there is a need to have clear configuration
loganalyzer.match_regex = []
# Load regular expressions from the specified file
reg_exp = loganalyzer.parse_regexp_file(src=COMMON_MATCH)
# Extend currently configured match criteria (regular expressions) with data read from "COMMON_MATCH" file
loganalyzer.match_regex.extend(reg_exp)
# Add test code here ...
# Here will be executed syslog analysis on context manager __exit__
time.sleep(1)
with loganalyzer as analyzer:
# Clear current regexp match list if there is a need to have clear configuration
loganalyzer.match_regex = []
# Set match criteria (regular expression) to custom regexp - "test:.*Error"
loganalyzer.match_regex.extend(["test:.*Error"])
# Add test code here ...
# Here will be executed syslog analysis on context manager __exit__
time.sleep(1)
with loganalyzer as analyzer:
# Add test code here ...
# Here will be executed syslog analysis on context manager __exit__
time.sleep(1)
```

#### Example calling loganalyzer init/analyze methods directly in test case
```python
# Example 1
# Read existed common regular expressions located with legacy loganalyzer module
loganalyzer.load_common_config()
# Add start marker to the DUT syslog
marker = loganalyzer.init()
# PERFORM TEST CASE STEPS ...
# Verify that error messages were not found in DUT syslog. Exception will be raised if in DUT syslog will be found messages which fits regexp defined in COMMON_MATCH
loganalyzer.analyze(marker)

# Example 2
# Read existed common regular expressions located with legacy loganalyzer module
loganalyzer.load_common_config()
# Add start marker to the DUT syslog
marker = loganalyzer.init()
# PERFORM TEST CASE STEPS ...
# Get summary of analyzed DUT syslog
result = loganalyzer.analyze(marker, fail=False)
# Verify that specific amount of error messages found in syslog # Negative test case
assert result["total"]["match"] == 2, "Not found expected errors: {}".format(result)

# Example 3
# Download extracted syslog file from DUT to the local host
loganalyzer.save_extracted_log(dest="/tmp/log/syslog")

# Example 4
# Update previously configured marker
# Now start marker will have new prefix - test_bgp
loganalyzer.update_marker_prefix("test_bgp")

def get_platform_info(dut):
"""
Example callback which gets DUT platform information and returns obtained string
"""
return dut.command("show platform summary")

# Example 5
# Execute specific function and analyze logs during function execution
run_cmd_result = loganalyzer.run_cmd(get_platform_info, ans_host)
# Process result of "get_platform_info" callback
assert all(item in run_cmd_result["stdout"] for item in ["Platform", "HwSKU", "ASIC"]) is True, "Unexpected output returned after command execution: {}".format(run_cmd_result)

# Example 6
# Clear current regexp match list
loganalyzer.match_regex = []
# Load regular expressions from the specified file defined in COMMON_MATCH variable
reg_exp = loganalyzer.parse_regexp_file(src=COMMON_MATCH)
# Extend currently configured match criteria (regular expressions) with data read from "COMMON_MATCH" file
loganalyzer.match_regex.extend(reg_exp)
marker = loganalyzer.init()
# PERFORM TEST CASE STEPS ...
# Verify that error messages were not found in DUT syslog. Exception will be raised if in DUT syslog will be found messages which fits regexp defined in COMMON_MATCH
loganalyzer.analyze(marker)

# Example 7
loganalyzer.expect_regex = []
# Add specific EXPECTED regular expression
# Means that in the DUT syslog loganalyzer will search for message which matches with "kernel:.*Oops" regular expression
# If such message will not be present in DUT syslog, it will raise exception
loganalyzer.expect_regex.append("kernel:.*Oops")
# Add start marker to the DUT syslog
marker = loganalyzer.init()
# PERFORM TEST CASE STEPS ...
# Verify that expected error messages WERE FOUND in DUT syslog. Exception will be raised if in DUT syslog will NOT be found messages which fits to "kernel:.*Oops" regular expression
loganalyzer.analyze(marker)

# Example 8
loganalyzer.expect_regex = []
# Add specific EXPECTED regular expression
# Means that in the DUT syslog loganalyzer will search for message which matches with "kernel:.*Oops" regular expression
# If such message will not be present in DUT syslog, it will raise exception
loganalyzer.expect_regex.append("kernel:.*Oops")
# PERFORM TEST CASE STEPS ...
# Verify that expected error messages WERE FOUND in DUT syslog. Exception will be raised if in DUT syslog will NOT be found messages which fits to "kernel:.*Oops" regular expression
loganalyzer.run_cmd(ans_host.command, "echo '---------- kernel: says Oops --------------' >> /var/log/syslog")
```
Loading