Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 13 additions & 3 deletions ansible/roles/test/files/tools/loganalyzer/loganalyzer.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,11 @@ def is_filename_stdin(self, file_name):

#---------------------------------------------------------------------

def is_filename_sairedis_rec(self, file_name):
return "sairedis.rec" in file_name

#---------------------------------------------------------------------

def create_end_marker(self):
return self.end_marker_prefix + "-" + self.run_id
#---------------------------------------------------------------------
Expand Down Expand Up @@ -313,6 +318,7 @@ def analyze_file(self, log_file_path, match_messages_regex, ignore_messages_rege

#-- indicates whether log analyzer currently is in the log range between start
#-- and end marker. see analyze_file method.
is_sairedis_rec = self.is_filename_sairedis_rec(log_file_path)
in_analysis_range = False
stdin_as_input = self.is_filename_stdin(log_file_path)
matching_lines = []
Expand Down Expand Up @@ -354,15 +360,19 @@ def analyze_file(self, log_file_path, match_messages_regex, ignore_messages_rege
in_analysis_range = False
break

if in_analysis_range :
if in_analysis_range or is_sairedis_rec:
# Skip long logs in sairedis recording since most likely they are bulk set operations for non-default routes
# without much insight while they are time consuming to analyze
if is_sairedis_rec and len(rev_line) > 1000:
continue
if self.line_is_expected(rev_line, expect_messages_regex):
expected_lines.append(rev_line)

elif self.line_matches(rev_line, match_messages_regex, ignore_messages_regex):
matching_lines.append(rev_line)

# care about the markers only if input is not stdin
if not stdin_as_input:
# care about the markers only if input is not stdin or sairedis recording
if not stdin_as_input and not is_sairedis_rec:
if (not found_start_marker):
print 'ERROR: start marker was not found'
sys.exit(err_no_start_marker)
Expand Down
45 changes: 38 additions & 7 deletions tests/common/plugins/loganalyzer/loganalyzer.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def __repr__(self):


class LogAnalyzer:
def __init__(self, ansible_host, marker_prefix, dut_run_dir="/tmp", start_marker=None):
def __init__(self, ansible_host, marker_prefix, dut_run_dir="/tmp", start_marker=None, additional_files={}):
self.ansible_host = ansible_host
self.dut_run_dir = dut_run_dir
self.extracted_syslog = os.path.join(self.dut_run_dir, "syslog")
Expand All @@ -41,6 +41,9 @@ def __init__(self, ansible_host, marker_prefix, dut_run_dir="/tmp", start_marker
self._markers = []
self.fail = True

self.additional_files = list(additional_files.keys())
self.additional_start_str = list(additional_files.values())

def _add_end_marker(self, marker):
"""
@summary: Add stop marker into syslog on the DUT.
Expand Down Expand Up @@ -176,7 +179,8 @@ def analyze(self, marker, fail=True):
"expect_messages": {},
"unused_expected_regexp": []
}
tmp_folder = ".".join((SYSLOG_TMP_FOLDER, time.strftime("%Y-%m-%d-%H:%M:%S", time.gmtime())))
timestamp = time.strftime("%Y-%m-%d-%H:%M:%S", time.gmtime())
tmp_folder = ".".join((SYSLOG_TMP_FOLDER, timestamp))
marker = marker.replace(' ', '_')
self.ansible_loganalyzer.run_id = marker

Expand Down Expand Up @@ -209,22 +213,39 @@ def analyze(self, marker, fail=True):

# On DUT extract syslog files from /var/log/ and create one file by location - /tmp/syslog
self.ansible_host.extract_log(directory='/var/log', file_prefix='syslog', start_string=start_string, target_filename=self.extracted_syslog)
for idx, path in enumerate(self.additional_files):
file_dir, file_name = split(path)
extracted_file_name = os.path.join(self.dut_run_dir, file_name)
if self.additional_start_str and self.additional_start_str[idx] != '':
start_str = self.additional_start_str[idx]
else:
start_str = start_string
self.ansible_host.extract_log(directory=file_dir, file_prefix=file_name, start_string=start_str, target_filename=extracted_file_name)
finally:
# Enable logrotate cron task back
self.ansible_host.command("sed -i 's/^#//g' /etc/cron.d/logrotate")

# Download extracted logs from the DUT to the temporal folder defined in SYSLOG_TMP_FOLDER
self.save_extracted_log(dest=tmp_folder)
file_list = [tmp_folder]

for path in self.additional_files:
file_dir, file_name = split(path)
extracted_file_name = os.path.join(self.dut_run_dir, file_name)
tmp_folder = ".".join((extracted_file_name, timestamp))
self.save_extracted_file(dest=tmp_folder, src=extracted_file_name)
file_list.append(tmp_folder)

match_messages_regex = re.compile('|'.join(self.match_regex)) if len(self.match_regex) else None
ignore_messages_regex = re.compile('|'.join(self.ignore_regex)) if len(self.ignore_regex) else None
expect_messages_regex = re.compile('|'.join(self.expect_regex)) if len(self.expect_regex) else None

analyzer_parse_result = self.ansible_loganalyzer.analyze_file_list([tmp_folder], match_messages_regex, ignore_messages_regex, expect_messages_regex)
# Print syslog file content and remove the file
with open(tmp_folder) as fo:
logging.debug("Syslog content:\n\n{}".format(fo.read()))
os.remove(tmp_folder)
analyzer_parse_result = self.ansible_loganalyzer.analyze_file_list(file_list, match_messages_regex, ignore_messages_regex, expect_messages_regex)
# Print file content and remove the file
for folder in file_list:
with open(folder) as fo:
logging.debug("{} file content:\n\n{}".format(folder, fo.read()))
os.remove(folder)

total_match_cnt = 0
total_expect_cnt = 0
Expand Down Expand Up @@ -262,3 +283,13 @@ def save_extracted_log(self, dest):
@param dest: File path to store downloaded log file.
"""
self.ansible_host.fetch(dest=dest, src=self.extracted_syslog, flat="yes")

def save_extracted_file(self, dest, src):
"""
@summary: Download extracted file to the ansible host.

@param dest: File path to store downloaded file.

@param src: Source path to store downloaded file.
"""
self.ansible_host.fetch(dest=dest, src=src, flat="yes")
123 changes: 87 additions & 36 deletions tests/platform_tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,40 +62,10 @@ def bring_up_dut_interfaces(request, duthosts, rand_one_dut_hostname, tbinfo):
for port in ports:
duthost.no_shutdown(ifname=port)

@pytest.fixture()
def advanceboot_loganalyzer(duthosts, rand_one_dut_hostname, request):
"""
Advance reboot log analysis.
This fixture starts log analysis at the beginning of the test. At the end,
the collected expect messages are verified and timing of start/stop is calculated.

Args:
duthosts : List of DUT hosts
rand_one_dut_hostname: hostname of a randomly selected DUT
"""
duthost = duthosts[rand_one_dut_hostname]
loganalyzer = LogAnalyzer(ansible_host=duthost, marker_prefix="test_advanced_reboot")
marker = loganalyzer.init()
loganalyzer.load_common_config()

ignore_file = os.path.join(TEMPLATES_DIR, "ignore_boot_messages")
expect_file = os.path.join(TEMPLATES_DIR, "expect_boot_messages")
ignore_reg_exp = loganalyzer.parse_regexp_file(src=ignore_file)
expect_reg_exp = loganalyzer.parse_regexp_file(src=expect_file)

loganalyzer.ignore_regex.extend(ignore_reg_exp)
loganalyzer.expect_regex = []
loganalyzer.expect_regex.extend(expect_reg_exp)
loganalyzer.match_regex = []

yield

result = loganalyzer.analyze(marker, fail=False)
messages = result["expect_messages"].values()
def analyze_syslog(duthost, messages):
if not messages:
logging.error("Expected messages not found in syslog")
return
messages = messages[0]
return None

service_restart_times = dict()
service_patterns = {
Expand All @@ -120,7 +90,6 @@ def service_time_check(message, status):
if re.search(pattern, message):
service_time_check(message, status)

loganalyzer.save_extracted_log(dest="/tmp/log/syslog")
logging.info(json.dumps(service_restart_times, indent=4))

FMT = "%b %d %H:%M:%S.%f"
Expand All @@ -144,16 +113,98 @@ def service_time_check(message, status):
with open(filepath) as json_file:
report = json.load(json_file)
service_restart_times.update(report)
result = service_restart_times
logging.info(json.dumps(result, indent=4))

return service_restart_times

def analyze_sairedis_rec(messages):
state_times = dict()
state_patterns = {
"sai_switch_create|Started": re.compile(r'.*\|c\|SAI_OBJECT_TYPE_SWITCH.*'),
"sai_switch_create|Stopped": re.compile(r'.*\|g\|SAI_OBJECT_TYPE_SWITCH.*SAI_SWITCH_ATTR_DEFAULT_VIRTUAL_ROUTER_ID.*'),
"default_route_set|Started": re.compile(r'.*\|(S|s)\|SAI_OBJECT_TYPE_ROUTE_ENTRY.*0\.0\.0\.0/0.*SAI_ROUTE_ENTRY_ATTR_PACKET_ACTION=SAI_PACKET_ACTION_FORWARD.*')
}

FMT = "%b %d %H:%M:%S.%f"
for message in messages:
for state, pattern in state_patterns.items():
if re.search(pattern, message):
state_name = state.split("|")[0].strip()
state_status = state.split("|")[1].strip()
state_dict = state_times.get(state_name, {"timestamp": {}})
timestamps = state_dict.get("timestamp")
if state_status in timestamps:
state_dict[state_status+" count"] = state_dict.get(state_status+" count", 1) + 1
timestamp = datetime.strptime(message.split("|")[0].strip(), "%Y-%m-%d.%H:%M:%S.%f")
time = timestamp.strftime(FMT)
timestamps[state_status] = time
state_times.update({state_name: state_dict})

for _, timings in state_times.items():
timestamps = timings["timestamp"]
if "Stopped" in timestamps and "Started" in timestamps:
timings["time"] = (datetime.strptime(timestamps["Stopped"], FMT) -\
datetime.strptime(timestamps["Started"], FMT)).total_seconds() \

return state_times

@pytest.fixture()
def advanceboot_loganalyzer(duthosts, rand_one_dut_hostname, request):
"""
Advance reboot log analysis.
This fixture starts log analysis at the beginning of the test. At the end,
the collected expect messages are verified and timing of start/stop is calculated.

Args:
duthosts : List of DUT hosts
rand_one_dut_hostname: hostname of a randomly selected DUT
"""
duthost = duthosts[rand_one_dut_hostname]
# Currently, advanced reboot test would skip for kvm platform if the test has no device_type marker for vs.
# Doing the same skip logic in this fixture to avoid running loganalyzer without the test executed
if duthost.facts['platform'] == 'x86_64-kvm_x86_64-r0':
device_marks = [arg for mark in request.node.iter_markers(name='device_type') for arg in mark.args]
if 'vs' not in device_marks:
pytest.skip('Testcase not supported for kvm')

loganalyzer = LogAnalyzer(ansible_host=duthost, marker_prefix="test_advanced_reboot",
additional_files={'/var/log/swss/sairedis.rec': 'recording on: /var/log/swss/sairedis.rec'})
marker = loganalyzer.init()
loganalyzer.load_common_config()

ignore_file = os.path.join(TEMPLATES_DIR, "ignore_boot_messages")
expect_file = os.path.join(TEMPLATES_DIR, "expect_boot_messages")
ignore_reg_exp = loganalyzer.parse_regexp_file(src=ignore_file)
expect_reg_exp = loganalyzer.parse_regexp_file(src=expect_file)

loganalyzer.ignore_regex.extend(ignore_reg_exp)
loganalyzer.expect_regex = []
loganalyzer.expect_regex.extend(expect_reg_exp)
loganalyzer.match_regex = []

yield

result = loganalyzer.analyze(marker, fail=False)
analyze_result = dict()
for key, messages in result["expect_messages"].items():
if "syslog" in key:
service_restart_times = analyze_syslog(duthost, messages)
if service_restart_times is not None:
analyze_result["Services"] = service_restart_times

elif "sairedis.rec" in key:
state_times = analyze_sairedis_rec(messages)
if state_times is not None:
analyze_result["sairedis_state"] = state_times

logging.info(json.dumps(analyze_result, indent=4))
report_file_name = request.node.name + "_report.json"
report_file_dir = os.path.realpath((os.path.join(os.path.dirname(__file__),\
"../logs/platform_tests/")))
report_file_path = report_file_dir + "/" + report_file_name
if not os.path.exists(report_file_dir):
os.makedirs(report_file_dir)
with open(report_file_path, 'w') as fp:
json.dump(result, fp, indent=4)
json.dump(analyze_result, fp, indent=4)


def pytest_addoption(parser):
Expand Down
5 changes: 4 additions & 1 deletion tests/platform_tests/templates/expect_boot_messages
Original file line number Diff line number Diff line change
@@ -1,4 +1,7 @@
r, ".* NOTICE admin: (Stopping|Stopped|Starting|Started).* (?!syncd|swss|gbsyncd).* service.*"
r, ".* NOTICE admin: Stopped.* swss.* service.*"
r, ".* NOTICE root: (Stopping|Stopped|Starting|Started).* (swss|syncd|gbsyncd).* service.*"
r, ".* NOTICE root: WARMBOOT_FINALIZER.*"
r, ".* NOTICE root: WARMBOOT_FINALIZER.*"
r, ".*\|c\|SAI_OBJECT_TYPE_SWITCH.*"
r, ".*\|g\|SAI_OBJECT_TYPE_SWITCH.*SAI_SWITCH_ATTR_DEFAULT_VIRTUAL_ROUTER_ID.*"
r, ".*\|(S|s)\|SAI_OBJECT_TYPE_ROUTE_ENTRY.*0\.0\.0\.0/0.*SAI_ROUTE_ENTRY_ATTR_PACKET_ACTION=SAI_PACKET_ACTION_FORWARD.*"