Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .azure-pipelines/run-test-template.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ steps:
pipeline: 1
artifact: sonic-buildimage.vs
runVersion: 'latestFromBranch'
runBranch: 'refs/heads/master'
runBranch: 'refs/heads/202012'
displayName: "Download sonic kvm image"

- script: |
Expand Down
35 changes: 27 additions & 8 deletions ansible/library/extract_log.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,7 @@
import gzip
import re
import sys
import hashlib
import logging
import logging.handlers
from datetime import datetime
Expand All @@ -100,7 +101,8 @@ def extract_lines(directory, filename, target_string):
# messes up with the comparator.
# Prehandle lines to remove these sub-strings
dt = datetime.datetime.fromtimestamp(os.path.getctime(path))
result = [(filename, dt, line.replace('\x00', '')) for line in file if target_string in line and 'nsible' not in line]
sz = os.path.getsize(path)
result = [(filename, dt, line.replace('\x00', ''), sz) for line in file if target_string in line and 'nsible' not in line]

return result

Expand Down Expand Up @@ -219,33 +221,50 @@ def calculate_files_to_copy(filenames, file_with_latest_line):
return files_to_copy


def combine_logs_and_save(directory, filenames, start_string, target_filename):
def combine_logs_and_save(directory, filenames, start_string, target_string, target_filename):
do_copy = False
line_processed = 0
line_copied = 0
with open(target_filename, 'w') as fp:
for filename in reversed(filenames):
path = os.path.join(directory, filename)
dt = datetime.datetime.fromtimestamp(os.path.getctime(path))
sz = os.path.getsize(path)
logger.debug("extract_log combine_logs from file {} create time {}, size {}".format(path, dt, sz))
file = None
if 'gz' in path:
file = gzip.GzipFile(path)
else:
file = open(path)

with file:
for line in file:
if line == start_string:
do_copy = True
if do_copy:
line_processed += 1
if do_copy == False:
if line == start_string or target_string in line:
do_copy = True
fp.write(line)
line_copied += 1
else:
fp.write(line)
line_copied += 1

logger.debug("extract_log combine_logs from file {}, {} lines processed, {} lines copied".format(path, line_processed, line_copied))


def extract_log(directory, prefixname, target_string, target_filename):
logger.debug("extract_log for start string {}".format(target_string.replace("start-", "")))
filenames = list_files(directory, prefixname)
logger.debug("extract_log from files {}".format(filenames))
file_with_latest_line, file_create_time, latest_line = extract_latest_line_with_string(directory, filenames, target_string)
logger.debug("extract_log start file {}".format(file_with_latest_line))
file_with_latest_line, file_create_time, latest_line, file_size = extract_latest_line_with_string(directory, filenames, target_string)
m = hashlib.md5()
m.update(latest_line)
logger.debug("extract_log start file {} size {}, ctime {}, latest line md5sum {}".format(file_with_latest_line, file_size, file_create_time, m.hexdigest()))
files_to_copy = calculate_files_to_copy(filenames, file_with_latest_line)
logger.debug("extract_log subsequent files {}".format(files_to_copy))
combine_logs_and_save(directory, files_to_copy, latest_line, target_filename)
combine_logs_and_save(directory, files_to_copy, latest_line, target_string, target_filename)
filenames = list_files(directory, prefixname)
logger.debug("extract_log check logs files {}".format(filenames))


def main():
Expand Down
52 changes: 49 additions & 3 deletions ansible/roles/test/files/tools/loganalyzer/loganalyzer.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,9 @@
import getopt
import re
import os
import os.path
import csv
import time
import pprint
import logging
import logging.handlers
Expand Down Expand Up @@ -180,7 +182,48 @@ def place_marker_to_syslog(self, marker):
syslogger.info('\n')
self.flush_rsyslogd()

def place_marker(self, log_file_list, marker):
def wait_for_marker(self, marker, timeout=60, polling_interval=10):
'''
@summary: Wait the marker to appear in the /var/log/syslog file
@param marker: Marker to be placed into log files.
@param timeout: Maximum time in seconds to wait till Marker in /var/log/syslog
@param polling_interval: Polling interval during the wait
'''

wait_time = 0
last_check_pos = 0
syslog_file = "/var/log/syslog"
prev_syslog_file = "/var/log/syslog.1"
last_dt = os.path.getctime(syslog_file)
while wait_time <= timeout:
with open(syslog_file, 'r') as fp:
dt = os.path.getctime(syslog_file)
if last_dt != dt:
try:
with open(prev_syslog_file, 'r') as pfp:
pfp.seek(last_check_pos)
for l in fp:
if marker in l:
return True
except FileNotFoundError:
print("cannot find file {}".format(prev_syslog_file))
last_check_pos = 0
last_dt = dt
# resume from last search position
if last_check_pos:
fp.seek(last_check_pos)
# check if marker in the file
for l in fp:
if marker in l:
return True
# record last search position
last_check_pos = fp.tell()
time.sleep(polling_interval)
wait_time += polling_interval

return False

def place_marker(self, log_file_list, marker, wait_for_marker=False):
'''
@summary: Place marker into '/dev/log' and each log file specified.
@param log_file_list : List of file paths, to be applied with marker.
Expand All @@ -191,6 +234,9 @@ def place_marker(self, log_file_list, marker):
self.place_marker_to_file(log_file, marker)

self.place_marker_to_syslog(marker)
if wait_for_marker:
if self.wait_for_marker(marker) is False:
raise RuntimeError("cannot find marker {} in /var/log/syslog".format(marker))

return
#---------------------------------------------------------------------
Expand Down Expand Up @@ -697,7 +743,7 @@ def main(argv):
ignore_file_list = ignore_files_in.split(tokenizer)
expect_file_list = expect_files_in.split(tokenizer)

analyzer.place_marker(log_file_list, analyzer.create_end_marker())
analyzer.place_marker(log_file_list, analyzer.create_end_marker(), wait_for_marker=True)

match_messages_regex, messages_regex_m = analyzer.create_msg_regex(match_file_list)
ignore_messages_regex, messages_regex_i = analyzer.create_msg_regex(ignore_file_list)
Expand All @@ -713,7 +759,7 @@ def main(argv):
write_result_file(run_id, out_dir, result, messages_regex_e, unused_regex_messages)
write_summary_file(run_id, out_dir, result, unused_regex_messages)
elif (action == "add_end_marker"):
analyzer.place_marker(log_file_list, analyzer.create_end_marker())
analyzer.place_marker(log_file_list, analyzer.create_end_marker(), wait_for_marker=True)
return 0

else:
Expand Down