diff --git a/.azure-pipelines/run-test-template.yml b/.azure-pipelines/run-test-template.yml index 4341300a72a..f97ac53ce76 100644 --- a/.azure-pipelines/run-test-template.yml +++ b/.azure-pipelines/run-test-template.yml @@ -25,7 +25,7 @@ steps: pipeline: 1 artifact: sonic-buildimage.vs runVersion: 'latestFromBranch' - runBranch: 'refs/heads/master' + runBranch: 'refs/heads/202012' displayName: "Download sonic kvm image" - script: | diff --git a/ansible/library/extract_log.py b/ansible/library/extract_log.py index dc939844248..b8c2d9b3c72 100644 --- a/ansible/library/extract_log.py +++ b/ansible/library/extract_log.py @@ -77,6 +77,7 @@ import gzip import re import sys +import hashlib import logging import logging.handlers from datetime import datetime @@ -100,7 +101,8 @@ def extract_lines(directory, filename, target_string): # messes up with the comparator. # Prehandle lines to remove these sub-strings dt = datetime.datetime.fromtimestamp(os.path.getctime(path)) - result = [(filename, dt, line.replace('\x00', '')) for line in file if target_string in line and 'nsible' not in line] + sz = os.path.getsize(path) + result = [(filename, dt, line.replace('\x00', ''), sz) for line in file if target_string in line and 'nsible' not in line] return result @@ -219,33 +221,50 @@ def calculate_files_to_copy(filenames, file_with_latest_line): return files_to_copy -def combine_logs_and_save(directory, filenames, start_string, target_filename): +def combine_logs_and_save(directory, filenames, start_string, target_string, target_filename): do_copy = False + line_processed = 0 + line_copied = 0 with open(target_filename, 'w') as fp: for filename in reversed(filenames): path = os.path.join(directory, filename) + dt = datetime.datetime.fromtimestamp(os.path.getctime(path)) + sz = os.path.getsize(path) + logger.debug("extract_log combine_logs from file {} create time {}, size {}".format(path, dt, sz)) file = None if 'gz' in path: file = gzip.GzipFile(path) else: file = open(path) + with file: for line in file: - if line == start_string: - do_copy = True - if do_copy: + line_processed += 1 + if do_copy == False: + if line == start_string or target_string in line: + do_copy = True + fp.write(line) + line_copied += 1 + else: fp.write(line) + line_copied += 1 + + logger.debug("extract_log combine_logs from file {}, {} lines processed, {} lines copied".format(path, line_processed, line_copied)) def extract_log(directory, prefixname, target_string, target_filename): logger.debug("extract_log for start string {}".format(target_string.replace("start-", ""))) filenames = list_files(directory, prefixname) logger.debug("extract_log from files {}".format(filenames)) - file_with_latest_line, file_create_time, latest_line = extract_latest_line_with_string(directory, filenames, target_string) - logger.debug("extract_log start file {}".format(file_with_latest_line)) + file_with_latest_line, file_create_time, latest_line, file_size = extract_latest_line_with_string(directory, filenames, target_string) + m = hashlib.md5() + m.update(latest_line) + logger.debug("extract_log start file {} size {}, ctime {}, latest line md5sum {}".format(file_with_latest_line, file_size, file_create_time, m.hexdigest())) files_to_copy = calculate_files_to_copy(filenames, file_with_latest_line) logger.debug("extract_log subsequent files {}".format(files_to_copy)) - combine_logs_and_save(directory, files_to_copy, latest_line, target_filename) + combine_logs_and_save(directory, files_to_copy, latest_line, target_string, target_filename) + filenames = list_files(directory, prefixname) + logger.debug("extract_log check logs files {}".format(filenames)) def main(): diff --git a/ansible/roles/test/files/tools/loganalyzer/loganalyzer.py b/ansible/roles/test/files/tools/loganalyzer/loganalyzer.py index d931105de5c..e5b5bd884eb 100644 --- a/ansible/roles/test/files/tools/loganalyzer/loganalyzer.py +++ b/ansible/roles/test/files/tools/loganalyzer/loganalyzer.py @@ -21,7 +21,9 @@ import getopt import re import os +import os.path import csv +import time import pprint import logging import logging.handlers @@ -180,7 +182,48 @@ def place_marker_to_syslog(self, marker): syslogger.info('\n') self.flush_rsyslogd() - def place_marker(self, log_file_list, marker): + def wait_for_marker(self, marker, timeout=60, polling_interval=10): + ''' + @summary: Wait the marker to appear in the /var/log/syslog file + @param marker: Marker to be placed into log files. + @param timeout: Maximum time in seconds to wait till Marker in /var/log/syslog + @param polling_interval: Polling interval during the wait + ''' + + wait_time = 0 + last_check_pos = 0 + syslog_file = "/var/log/syslog" + prev_syslog_file = "/var/log/syslog.1" + last_dt = os.path.getctime(syslog_file) + while wait_time <= timeout: + with open(syslog_file, 'r') as fp: + dt = os.path.getctime(syslog_file) + if last_dt != dt: + try: + with open(prev_syslog_file, 'r') as pfp: + pfp.seek(last_check_pos) + for l in fp: + if marker in l: + return True + except FileNotFoundError: + print("cannot find file {}".format(prev_syslog_file)) + last_check_pos = 0 + last_dt = dt + # resume from last search position + if last_check_pos: + fp.seek(last_check_pos) + # check if marker in the file + for l in fp: + if marker in l: + return True + # record last search position + last_check_pos = fp.tell() + time.sleep(polling_interval) + wait_time += polling_interval + + return False + + def place_marker(self, log_file_list, marker, wait_for_marker=False): ''' @summary: Place marker into '/dev/log' and each log file specified. @param log_file_list : List of file paths, to be applied with marker. @@ -191,6 +234,9 @@ def place_marker(self, log_file_list, marker): self.place_marker_to_file(log_file, marker) self.place_marker_to_syslog(marker) + if wait_for_marker: + if self.wait_for_marker(marker) is False: + raise RuntimeError("cannot find marker {} in /var/log/syslog".format(marker)) return #--------------------------------------------------------------------- @@ -697,7 +743,7 @@ def main(argv): ignore_file_list = ignore_files_in.split(tokenizer) expect_file_list = expect_files_in.split(tokenizer) - analyzer.place_marker(log_file_list, analyzer.create_end_marker()) + analyzer.place_marker(log_file_list, analyzer.create_end_marker(), wait_for_marker=True) match_messages_regex, messages_regex_m = analyzer.create_msg_regex(match_file_list) ignore_messages_regex, messages_regex_i = analyzer.create_msg_regex(ignore_file_list) @@ -713,7 +759,7 @@ def main(argv): write_result_file(run_id, out_dir, result, messages_regex_e, unused_regex_messages) write_summary_file(run_id, out_dir, result, unused_regex_messages) elif (action == "add_end_marker"): - analyzer.place_marker(log_file_list, analyzer.create_end_marker()) + analyzer.place_marker(log_file_list, analyzer.create_end_marker(), wait_for_marker=True) return 0 else: