diff --git a/tests/common/fixtures/advanced_reboot.py b/tests/common/fixtures/advanced_reboot.py index 799f61c3cf5..a074e9dd147 100644 --- a/tests/common/fixtures/advanced_reboot.py +++ b/tests/common/fixtures/advanced_reboot.py @@ -91,6 +91,8 @@ def __init__(self, request, duthosts, duthost, ptfhost, localhost, tbinfo, creds self.allowMacJump = kwargs["allow_mac_jumping"] if "allow_mac_jumping" in kwargs else False self.advanceboot_loganalyzer = kwargs["advanceboot_loganalyzer"] if "advanceboot_loganalyzer"\ in kwargs else None + self.consistency_checker_provider = kwargs["consistency_checker_provider"] if "consistency_checker_provider"\ + in kwargs else None self.other_vendor_nos = kwargs['other_vendor_nos'] if 'other_vendor_nos' in kwargs else False self.__dict__.update(kwargs) self.__extractTestParam() @@ -543,6 +545,50 @@ def acl_manager_checker(self, error_list): if int(acl_proc_count) != 1: error_list.append("Expected one ACL manager process running. Actual: {}".format(acl_proc_count)) + def check_asic_and_db_consistency(self): + """ + Check ASIC_DB and ASIC consistency, logging out any inconsistencies that are found. + """ + if not self.consistency_checker_provider.is_consistency_check_supported(self.duthost): + os_version = self.duthost.image_facts()["ansible_facts"]["ansible_image_facts"]["current"] + platform = self.duthost.facts['platform'] + logger.info((f"Consistency check is not supported on this platform ({platform}) and " + f"version ({os_version})")) + return + + with self.consistency_checker_provider.get_consistency_checker(self.duthost) as consistency_checker: + inconsistencies = consistency_checker.check_consistency() + not_implemented_attributes = set() + mismatched_attributes = {} + failed_to_query_asic_attributes = {} + + for sai_object, summary in inconsistencies.items(): + # Not implemented attributes + object_name = sai_object.split(":")[1] + for attr in summary["attributeNotImplemented"]: + not_implemented_attributes.add(f"{object_name}.{attr}") + + # Mismatched attributes + mismatched_attributes = { + attr: summary["attributes"][attr] for attr + in summary["mismatchedAttributes"] + } + if mismatched_attributes: + mismatched_attributes[sai_object] = mismatched_attributes + + # Failed to query ASIC attributes + if summary["failedToQueryAsic"]: + failed_to_query_asic_attributes[sai_object] = summary["failedToQueryAsic"] + + if not_implemented_attributes: + logger.warning(f"Not implemented attributes: {not_implemented_attributes}") + + if mismatched_attributes: + logger.error(f"Mismatched attributes found: {mismatched_attributes}") + + if failed_to_query_asic_attributes: + logger.error(f"Failed to query ASIC attributes: {failed_to_query_asic_attributes}") + def runRebootTest(self): # Run advanced-reboot.ReloadTest for item in preboot/inboot list count = 0 @@ -582,6 +628,8 @@ def runRebootTest(self): logger.error("Exception caught while running advanced-reboot test on ptf: \n{}".format(traceback_msg)) test_results[test_case_name].append("Exception caught while running advanced-reboot test on ptf") finally: + if self.consistency_checker_provider: + self.check_asic_and_db_consistency() # capture the test logs, and print all of them in case of failure, or a summary in case of success log_dir = self.__fetchTestLogs(rebootOper) self.print_test_logs_summary(log_dir) diff --git a/tests/common/fixtures/consistency_checker/consistency_checker.py b/tests/common/fixtures/consistency_checker/consistency_checker.py new file mode 100644 index 00000000000..e9c4f2c4eb9 --- /dev/null +++ b/tests/common/fixtures/consistency_checker/consistency_checker.py @@ -0,0 +1,440 @@ +import pytest +import logging +import json +import os +import datetime +from typing import List, Optional +from collections import defaultdict +from tests.common.fixtures.consistency_checker.constants import SUPPORTED_PLATFORMS_AND_VERSIONS, \ + ConsistencyCheckQueryKey, ALL_ATTRIBUTES + +logger = logging.getLogger(__name__) + +SYNCD_CONTAINER = "syncd" +QUERY_ASIC_SCRIPT = "query-asic.py" +QUERY_ASIC_PARSER = "parser.py" +LIBSAIREDIS_DEB = "libsairedis.deb" +PYTHON3_PYSAIREDIS_DEB = "python3-pysairedis.deb" +DUT_DST_PATH_HOST = "/tmp/consistency-checker" +DUT_DST_PATH_CONTAINER = "/consistency-checker" + +QUERY_ASIC_PATH_SRC = os.path.dirname(__file__) + "/query-asic" +QUERY_ASIC_SCRIPT_PATH_SRC = QUERY_ASIC_PATH_SRC + "/" + QUERY_ASIC_SCRIPT +QUERY_ASIC_PARSER_PATH_SRC = QUERY_ASIC_PATH_SRC + "/" + QUERY_ASIC_PARSER +QUERY_ASIC_SCRIPT_PATH_DST_HOST = DUT_DST_PATH_HOST + "/" + QUERY_ASIC_SCRIPT +QUERY_ASIC_PARSER_PATH_DST_HOST = DUT_DST_PATH_HOST + "/" + QUERY_ASIC_PARSER +QUERY_ASIC_SCRIPT_PATH_DST_CONTAINER = DUT_DST_PATH_CONTAINER + "/" + QUERY_ASIC_SCRIPT + +LIBSAIREDIS_TEMP = "libsairedis-temp" + + +class ConsistencyChecker: + + def __init__(self, duthost, libsairedis_download_url=None, python3_pysairedis_download_url=None): + """ + If the libsairedis_download_url and python3_pysairedis_download_url are provided, then these artifacts + are downloaded and installed on the DUT, otherwise it's assumed that the environment is already setup + for the consistency checker. + """ + self._duthost = duthost + self._libsairedis_download_url = libsairedis_download_url + self._python3_pysairedis_download_url = python3_pysairedis_download_url + + def __enter__(self): + logger.info("Initializing consistency checker on dut...") + + self._duthost.file(path=DUT_DST_PATH_HOST, state="directory") + self._duthost.copy(src=QUERY_ASIC_SCRIPT_PATH_SRC, dest=QUERY_ASIC_SCRIPT_PATH_DST_HOST) + self._duthost.copy(src=QUERY_ASIC_PARSER_PATH_SRC, dest=QUERY_ASIC_PARSER_PATH_DST_HOST) + + if self._libsairedis_download_url is not None: + self._duthost.command(f"curl -o {DUT_DST_PATH_HOST}/{LIBSAIREDIS_DEB} {self._libsairedis_download_url}") + if self._python3_pysairedis_download_url is not None: + self._duthost.command( + f"curl -o {DUT_DST_PATH_HOST}/{PYTHON3_PYSAIREDIS_DEB} {self._python3_pysairedis_download_url}") + + # Move everything into syncd container + self._duthost.shell(( + f"docker cp {DUT_DST_PATH_HOST} {SYNCD_CONTAINER}:/ && " + f"rm -rf {DUT_DST_PATH_HOST}" + )) + + if self._python3_pysairedis_download_url is not None: + # Install python3-sairedis in syncd container + self._duthost.shell((f"docker exec {SYNCD_CONTAINER} bash -c " + f"'cd {DUT_DST_PATH_CONTAINER} && " + f"dpkg --install {DUT_DST_PATH_CONTAINER}/{PYTHON3_PYSAIREDIS_DEB}'")) + + if self._libsairedis_download_url is not None: + # Extract the libsairedis deb to be used by the query script + self._duthost.shell((f"docker exec {SYNCD_CONTAINER} bash -c " + f"'cd {DUT_DST_PATH_CONTAINER} && " + f"dpkg --extract {DUT_DST_PATH_CONTAINER}/{LIBSAIREDIS_DEB} {LIBSAIREDIS_TEMP}'")) + + logger.info("Consistency checker setup complete.") + + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + logger.info("Cleaning up consistency checker on dut...") + + if self._python3_pysairedis_download_url is not None: + # Uninstall python3-sairedis in syncd container + self._duthost.command(f"docker exec {SYNCD_CONTAINER} dpkg --remove python3-pysairedis") + + # Remove all the files from the syncd container + self._duthost.command(f"docker exec {SYNCD_CONTAINER} rm -rf {DUT_DST_PATH_CONTAINER}") + + # NOTE: If consistency checker is used to do write operations (currently it's read-only), then syncd should be + # restarted or minigraph reloaded re-align the ASIC_DB and ASIC state. + + logger.info("Consistency checker cleanup complete.") + + def get_db_and_asic_peers(self, keys=["*"]) -> dict: + """ + Bulk query ASIC data that exists in the ASIC_DB. + + :param keys: Optional list of glob search strings that correspond to the --key arg of sonic-db-dump. + sonic-db-dump doesn't take multiple keys, so a list is passed in to support multiple + keys at the API level. + :return: Dictionary containing the queried ASIC data. + + Example return value: + { + "ASIC_STATE:SAI_OBJECT_TYPE_BUFFER_PROFILE:oid:0x1900000000154f": { + "SAI_BUFFER_PROFILE_ATTR_POOL_ID": { + "dbValue": "oid:0x1800000000154a", + "asicValue": "oid:0x1800000000154a", + "asicQuerySuccess": True + }, + "SAI_BUFFER_PROFILE_ATTR_SHARED_DYNAMIC_TH": { + "dbValue": "0", + "asicValue": -1, + "asicQuerySuccess": False, + "asicQueryErrorMsg": "Failed to query attribute value" + }, + "SAI_BUFFER_PROFILE_ATTR_THRESHOLD_MODE": { + "dbValue": "SAI_BUFFER_PROFILE_THRESHOLD_MODE_DYNAMIC", + "asicValue": "SAI_BUFFER_PROFILE_THRESHOLD_MODE_DYNAMIC", + "asicQuerySuccess": True + }, + ... + }, + ... + } + """ + + db_attributes = self._get_db_attributes(keys) + asic_attributes = self._get_asic_attributes_from_db_results(db_attributes) + + results = defaultdict(dict) + + for object in db_attributes: + db_object = db_attributes[object] + asic_object = asic_attributes[object] + + for attr in db_object["value"].keys(): + db_value = db_object["value"][attr] + asic_value = asic_object[attr]["asicValue"] + + if db_value.startswith("oid:0x"): + # Convert the asic one to the same format + try: + asic_value = f"oid:{hex(int(asic_value))}" + except Exception: + # keep the value as is + pass + + results[object][attr] = { + "dbValue": db_value, + "asicValue": asic_value, + "asicQuerySuccess": asic_object[attr]["success"] + } + + if not asic_object[attr]["success"]: + results[object][attr]["asicQueryErrorMsg"] = asic_object[attr]["error"] + + return dict(results) + + def check_consistency(self, keys=None) -> dict: + """ + Get the out-of-sync ASIC_DB and ASIC attributes. Differences are indicative of an error state. + Same arg style as the get_objects function but returns a list of objects that don't match or couldn't + be queried from the ASIC. If it was successfully queried and has a matching value, then it won't be + included in the response. + + :param keys: Optional list of glob search strings that correspond to the --key arg of sonic-db-dump. + sonic-db-dump doesn't take multiple keys, so a list is passed in to support multiple + keys at the API level. If not provided, then the default keys are used. + :return: Dictionary containing the out-of-sync ASIC_DB and ASIC attributes. + + Example return val (matching): + {} + + Example return val (mismatch): + { + "ASIC_STATE:SAI_OBJECT_TYPE_BUFFER_PROFILE:oid:0x1900000000154f": { + "attributes": { + "SAI_BUFFER_PROFILE_ATTR_SHARED_DYNAMIC_TH": { + "dbValue": "0", + "asicValue": -1, + }, + "SAI_BUFFER_PROFILE_ATTR_THRESHOLD_MODE": { + "dbValue": "SAI_BUFFER_PROFILE_THRESHOLD_MODE_DYNAMIC", + "asicValue": "SAI_BUFFER_PROFILE_THRESHOLD_MODE_STATIC" + }, + ... + }, + "failedToQueryAsic": [ + {"SAI_BUFFER_PROFILE_ATTR_SHARED_DYNAMIC_TH": "Failed to query attribute value"} + ], + "mismatchedAttributes": ["SAI_BUFFER_PROFILE_ATTR_THRESHOLD_MODE"], + "attributeNotImplemented": ["SAI_BUFFER_PROFILE_ATTR_POOL_ID"] + }, + ... + } + """ + if keys is None: + platform = self._duthost.facts['platform'] + os_version = self._duthost.image_facts()["ansible_facts"]["ansible_image_facts"]["current"] + keys = self._get_consistency_checker_keys(platform, os_version) + + db_attributes = self._get_db_attributes(keys) + asic_attributes = self._get_asic_attributes_from_db_results(db_attributes) + + inconsistencies = defaultdict(lambda: { + "attributes": {}, + "failedToQueryAsic": [], + "mismatchedAttributes": [], + "attributeNotImplemented": [], + }) + + for object in db_attributes: + db_object = db_attributes[object] + asic_object = asic_attributes[object] + + for attr in db_object["value"].keys(): + db_value = db_object["value"][attr] + asic_value = asic_object[attr]["asicValue"] + asic_query_success = asic_object[attr]["success"] + + if asic_query_success and db_value == asic_value: + continue + + if db_value.startswith("oid:0x"): + # Convert the asic one to the same format + try: + asic_value = f"oid:{hex(int(asic_value))}" + if db_value == asic_value: + continue + except Exception: + # true error - let below code handle it + pass + + inconsistencies[object]["attributes"][attr] = { + "dbValue": db_value, + "asicValue": asic_value + } + + if asic_query_success: + inconsistencies[object]["mismatchedAttributes"].append(attr) + else: + error = asic_object[attr]["error"] + if "ATTR_NOT_IMPLEMENTED" in error: + inconsistencies[object]["attributeNotImplemented"].append(attr) + else: + inconsistencies[object]["failedToQueryAsic"].append({attr: error}) + + return dict(inconsistencies) + + def _get_consistency_checker_keys(self, platform, os_version) -> List[str]: + """ + Get the keys for the given platform and OS version. + + :param platform: Platform name + :param os_version: OS version + :return: List of keys + """ + + if platform not in SUPPORTED_PLATFORMS_AND_VERSIONS: + raise Exception(f"Unsupported platform: {platform}") + + supported_versions = SUPPORTED_PLATFORMS_AND_VERSIONS[platform] + for version in supported_versions: + if version in os_version: + return supported_versions[version] + + raise Exception(f"Unsupported OS version: {os_version}") + + def _get_db_attributes(self, keys: List[ConsistencyCheckQueryKey]) -> dict: + """ + Fetchs and merges the attributes of the objects returned by the search key from the DB. + """ + db_attributes = {} + for key in keys: + result = self._duthost.command(f"sonic-db-dump -k '{key.key}' -n ASIC_DB") + if result['rc'] != 0: + raise Exception((f"Failed to fetch attributes for key '{key}' from ASIC_DB. " + f"Return code: {result['rc']}, stdout: {result['stdout']}, " + f"stderr: {result['stderr']}")) + + query_result = json.loads(result['stdout']) + + # Filter for attributes that we want ... + objects_with_no_attrs = [] + for object in query_result: + + if "NULL" in query_result[object]["value"]: + logger.debug(f"Ignoring attribute 'NULL' for object '{object}'") + del query_result[object]["value"]["NULL"] + + if ALL_ATTRIBUTES in key.attributes: + logger.debug(f"Retaining all attributes for object '{object}'") + else: + attributes_to_remove = set(query_result[object]["value"].keys()) - set(key.attributes) + for attr in attributes_to_remove: + logger.debug(f"Ignoring attribute '{attr}' for object '{object}'") + del query_result[object]["value"][attr] + + if len(query_result[object]["value"]) == 0: + objects_with_no_attrs.append(object) + + # ... then remove the objects that have no attributes left + for object in objects_with_no_attrs: + logger.debug(f"Ignoring empty object '{object}'") + del query_result[object] + + db_attributes.update(query_result) + + return db_attributes + + def _get_asic_attributes_from_db_results(self, db_attributes: dict) -> dict: + """ + Queries the ASIC for the attributes of the objects in db_attributes which are the results + from the ASIC DB query. + + Example return value: + { + "ASIC_STATE:SAI_OBJECT_TYPE_BUFFER_POOL:oid:0x18000000000628": { + "SAI_BUFFER_POOL_ATTR_THRESHOLD_MODE": { + "asicValue": "SAI_BUFFER_POOL_THRESHOLD_MODE_STATIC", + "success": true + }, + "SAI_BUFFER_POOL_ATTR_SIZE": { + "success" false, + "error": "Failed to query attribute value" + }, + "SAI_BUFFER_POOL_ATTR_TYPE": { + "asicValue": "SAI_BUFFER_POOL_TYPE_EGRESS", + "success": true + } + }, + ... + } + """ + # Map to format expected by the query-asic.py + asic_query = {k: list(v["value"].keys()) for k, v in db_attributes.items()} + asic_query_input_filename = f"query-input-{datetime.datetime.now().strftime('%Y%m%d%H%M%S')}.json" + with open(f"/tmp/{asic_query_input_filename}", 'w') as f: + json.dump(asic_query, f) + + # Copy the input file to the syncd container + self._duthost.copy(src=f"/tmp/{asic_query_input_filename}", dest=f"/tmp/{asic_query_input_filename}") + self._duthost.shell((f"docker cp /tmp/{asic_query_input_filename} " + f"{SYNCD_CONTAINER}:{DUT_DST_PATH_CONTAINER} && " + f"rm /tmp/{asic_query_input_filename}")) + + ld_lib_path_arg = f"LD_LIBRARY_PATH={LIBSAIREDIS_TEMP}/usr/lib/x86_64-linux-gnu"\ + if self._libsairedis_download_url is not None else "" + + res = self._duthost.shell((f"docker exec {SYNCD_CONTAINER} bash -c " + f"'cd {DUT_DST_PATH_CONTAINER} && " + f"{ld_lib_path_arg} python3 {QUERY_ASIC_SCRIPT_PATH_DST_CONTAINER} " + f"--input {asic_query_input_filename}'")) + if res['rc'] != 0: + raise Exception((f"Failed to query ASIC attributes. Return code: {res['rc']}, stdout: {res['stdout']}, " + f"stderr: {res['stderr']}")) + asic_results = json.loads(res['stdout']) + + return asic_results + + +class ConsistencyCheckerProvider: + + def __init__(self, libsairedis_url_template: Optional[str], + python3_pysairedis_url_template: Optional[str]) -> None: + """ + The libsairedis_url_template and python3_pysairedis_url_template are optional URL templates that the + consistency checker can use to download the libsairedis and python3-pysairedis debs respectively. + + :param libsairedis_url_template: Optional URL template for the libsairedis deb + :param python3_pysairedis_url_template: Optional URL template for the python3-pysairedis deb + """ + self._libsairedis_url_template = libsairedis_url_template + self._python3_pysairedis_url_template = python3_pysairedis_url_template + + def is_consistency_check_supported(self, dut) -> bool: + """ + Checks if the provided DUT is supported for consistency checking. + + :param dut: SonicHost object + :return bool: True if the DUT is supported, False otherwise + """ + + platform = dut.facts['platform'] + if platform not in SUPPORTED_PLATFORMS_AND_VERSIONS: + return False + + current_version = dut.image_facts()['ansible_facts']['ansible_image_facts']['current'] + supported_versions = SUPPORTED_PLATFORMS_AND_VERSIONS[platform] + if any(v in current_version for v in supported_versions.keys()): + return True + + return False + + def get_consistency_checker(self, dut) -> ConsistencyChecker: + """ + Get a new instance of the ConsistencyChecker class. + + :param dut: SonicHost object + :return ConsistencyChecker: New instance of the ConsistencyChecker class + """ + + os_version = dut.image_facts()["ansible_facts"]["ansible_image_facts"]["current"] + + if self._libsairedis_url_template or self._python3_pysairedis_url_template: + if "202305" in os_version: + sonic_version_template_param = "202305" + else: + raise Exception(f"Unsupported OS version: {os_version}") + + libsairedis_download_url = self._libsairedis_url_template\ + .format(sonic_version=sonic_version_template_param)\ + if self._libsairedis_url_template else None + + python3_pysairedis_download_url = self._python3_pysairedis_url_template\ + .format(sonic_version=sonic_version_template_param)\ + if self._python3_pysairedis_url_template else None + + return ConsistencyChecker(dut, libsairedis_download_url, python3_pysairedis_download_url) + + +@pytest.fixture +def consistency_checker_provider(request): + """ + Fixture that provides the ConsistencyCheckerProvider class. + + :param request: pytest request object + """ + + if not request.config.getoption("enable_consistency_checker"): + logger.info("Consistency checker is not enabled. Skipping check.") + return None + + consistency_checker_libsairedis_url_template = request.config.getoption( + "consistency_checker_libsairedis_url_template") + consistency_checker_python3_pysairedis_url_template = request.config.getoption( + "consistency_checker_python3_pysairedis_url_template") + + return ConsistencyCheckerProvider(consistency_checker_libsairedis_url_template, + consistency_checker_python3_pysairedis_url_template) diff --git a/tests/common/fixtures/consistency_checker/constants.py b/tests/common/fixtures/consistency_checker/constants.py new file mode 100644 index 00000000000..70f8b254d28 --- /dev/null +++ b/tests/common/fixtures/consistency_checker/constants.py @@ -0,0 +1,49 @@ +from dataclasses import dataclass +from typing import List + +ALL_ATTRIBUTES = "all" + + +@dataclass +class ConsistencyCheckQueryKey: + key: str + attributes: List[str] + + +BROADCOM_KEYS: List[ConsistencyCheckQueryKey] = [ + ConsistencyCheckQueryKey("ASIC_STATE:SAI_OBJECT_TYPE_BUFFER_POOL:*", attributes=[ALL_ATTRIBUTES]), + ConsistencyCheckQueryKey("ASIC_STATE:SAI_OBJECT_TYPE_BUFFER_PROFILE:*", attributes=[ALL_ATTRIBUTES]), + ConsistencyCheckQueryKey("ASIC_STATE:SAI_OBJECT_TYPE_SWITCH:*", attributes=[ALL_ATTRIBUTES]), + ConsistencyCheckQueryKey("ASIC_STATE:SAI_OBJECT_TYPE_WRED:*", attributes=[ALL_ATTRIBUTES]), + ConsistencyCheckQueryKey( + "ASIC_STATE:SAI_OBJECT_TYPE_PORT:*", + attributes=[ + "SAI_PORT_ATTR_QOS_TC_TO_QUEUE_MAP", + "SAI_PORT_ATTR_QOS_TC_TO_PRIORITY_GROUP_MAP", + "SAI_PORT_ATTR_QOS_PFC_PRIORITY_TO_QUEUE_MAP", + "SAI_PORT_ATTR_QOS_DSCP_TO_TC_MAP", + "SAI_PORT_ATTR_MTU", + "SAI_PORT_ATTR_INGRESS_ACL", + "SAI_PORT_ATTR_AUTO_NEG_MODE", + "SAI_PORT_ATTR_PRIORITY_FLOW_CONTROL", + "SAI_PORT_ATTR_ADMIN_STATE", + "SAI_PORT_ATTR_FEC_MODE", + # The "get" implementation of the SAI_PORT_ATTR_SPEED attribute sometimes has a side effect of changing + # the port speed. Consistency-checker should not change the state of the DUT, so we ignore this attribute + # "SAI_PORT_ATTR_SPEED", + # This attribute doesn't match between ASIC_DB and ASIC SAI and the test fails the assertion + # "SAI_PORT_ATTR_PORT_VLAN_ID", + ] + ), +] + + +# The list of platforms and versions that have been tested to work with the consistency checker +SUPPORTED_PLATFORMS_AND_VERSIONS = { + "x86_64-arista_7060_cx32s": { + "202305": BROADCOM_KEYS, + }, + "x86_64-arista_7260cx3_64": { + "202305": BROADCOM_KEYS, + }, +} diff --git a/tests/common/fixtures/consistency_checker/query-asic/parser.py b/tests/common/fixtures/consistency_checker/query-asic/parser.py new file mode 100644 index 00000000000..076d4f0c01b --- /dev/null +++ b/tests/common/fixtures/consistency_checker/query-asic/parser.py @@ -0,0 +1,234 @@ +""" +This module contains utilities for parsing the primitive values out of the ASIC query results. +""" +import ctypes +from sairedis import pysairedis + + +def mac_address_str_from_swig_uint8_t_arr(swig_uint8_p) -> str: + """ + Given a swig pointer to a uint8_t array, return the MAC address string representation + + :param swig_uint8_p: The swig pointer to the uint8_t array + :return: The MAC address string representation + """ + pointer = ctypes.cast(swig_uint8_p.__int__(), ctypes.POINTER(ctypes.c_uint8)) + octets = [pointer[i] for i in range(6)] + fmtd_mac_address = ":".join([f"{octet:02X}" for octet in octets]) + return fmtd_mac_address + + +def extract_attr_value(attr_metadata, attr): + """ + Extract the value from the attribute based on the attribute metadata + + :param attr_metadata: The attribute metadata + :param attr: The attribute + :return: The value of the attribute + """ + + if attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_BOOL: + attr_value = attr.value.booldata + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_UINT8: + attr_value = attr.value.u8 + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_INT8: + attr_value = attr.value.s8 + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_UINT16: + attr_value = attr.value.u16 + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_INT16: + attr_value = attr.value.s16 + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_UINT32: + attr_value = attr.value.u32 + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_INT32: + attr_value = attr.value.s32 + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_UINT64: + attr_value = attr.value.u64 + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_INT64: + attr_value = attr.value.s64 + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_OBJECT_ID: + attr_value = attr.value.oid + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_UINT32_LIST: + attr_value = attr.value.u32list + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_MAC: + attr_value = mac_address_str_from_swig_uint8_t_arr(attr.value.mac) + # *************************************************************************** + # NOTE: GPT generated attributes below, likely to be incomplete and/or + # need additional processing. + # *************************************************************************** + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_IP_ADDRESS: + attr_value = attr.value.ipaddr + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_LATCH_STATUS: + attr_value = attr.value.latch + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_INT8_LIST: + attr_value = attr.value.s8list + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_ACL_ACTION_DATA_OBJECT_LIST: + attr_value = attr.value.aclactiondataobjlist + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_SYSTEM_PORT_CONFIG_LIST: + attr_value = attr.value.systemportconfiglist + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_ACL_FIELD_DATA_UINT8_LIST: + attr_value = attr.value.aclfielddatau8list + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_UINT32_LIST: + attr_value = attr.value.u32list + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_NAT_ENTRY_DATA: + attr_value = attr.value.natentrydata + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_INT32_LIST: + attr_value = attr.value.s32list + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_FABRIC_PORT_REACHABILITY: + attr_value = attr.value.fabricportreachability + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_TLV_LIST: + attr_value = attr.value.tlvlist + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_UINT32: + attr_value = attr.value.u32 + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_UINT8_LIST: + attr_value = attr.value.u8list + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_ACL_FIELD_DATA_IPV4: + attr_value = attr.value.aclfielddataipv4 + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_UINT16_RANGE_LIST: + attr_value = attr.value.u16rangelist + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_ACL_CHAIN_LIST: + attr_value = attr.value.aclchainlist + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_ACL_FIELD_DATA_MACSEC_SCI: + attr_value = attr.value.aclfielddatamacsecsci + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_INT16: + attr_value = attr.value.s16 + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_ACL_ACTION_DATA_UINT16: + attr_value = attr.value.aclactiondatau16 + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_ACL_ACTION_DATA_IPV6: + attr_value = attr.value.aclactiondataipv6 + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_IPV4: + attr_value = attr.value.ip4 + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_ACL_ACTION_DATA_UINT8: + attr_value = attr.value.aclactiondatau8 + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_PORT_SNR_LIST: + attr_value = attr.value.portsnrlist + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_UINT16: + attr_value = attr.value.u16 + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_BOOL: + attr_value = attr.value.booldata + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_ACL_FIELD_DATA_IPV6: + attr_value = attr.value.aclfielddataipv6 + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_ACL_ACTION_DATA_BOOL: + attr_value = attr.value.aclactiondatabool + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_OBJECT_ID: + attr_value = attr.value.oid + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_ACL_FIELD_DATA_UINT8: + attr_value = attr.value.aclfielddatau8 + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_POINTER: + attr_value = attr.value.ptr + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_TIMESPEC: + attr_value = attr.value.timespec + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_UINT16_LIST: + attr_value = attr.value.u16list + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_UINT8: + attr_value = attr.value.u8 + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_ACL_FIELD_DATA_INT16: + attr_value = attr.value.aclfielddatas16 + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_PORT_ERR_STATUS_LIST: + attr_value = attr.value.porterrstatuslist + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_JSON: + attr_value = attr.value.json + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_ACL_FIELD_DATA_UINT16: + attr_value = attr.value.aclfielddatau16 + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_ACL_ACTION_DATA_IP_ADDRESS: + attr_value = attr.value.aclactiondataipaddr + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_ACL_FIELD_DATA_BOOL: + attr_value = attr.value.aclfielddatabool + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_ACL_RESOURCE_LIST: + attr_value = attr.value.aclresourcelist + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_IP_PREFIX_LIST: + attr_value = attr.value.ipprefixlist + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_VLAN_LIST: + attr_value = attr.value.vlanlist + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_ACL_ACTION_DATA_INT8: + attr_value = attr.value.aclactiondataint8 + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_OBJECT_LIST: + attr_value = attr.value.objlist + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_TWAMP_STATS_DATA: + attr_value = attr.value.twampstatsdata + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_MACSEC_SALT: + attr_value = attr.value.macsecsalt + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_IPV6: + attr_value = attr.value.ipv6 + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_ACL_FIELD_DATA_MAC: + attr_value = attr.value.aclfielddatamac + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_AUTH_KEY: + attr_value = attr.value.authkey + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_ACL_FIELD_DATA_UINT32: + attr_value = attr.value.aclfielddatau32 + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_MACSEC_SAK: + attr_value = attr.value.macsecsak + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_INT64: + attr_value = attr.value.s64 + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_SYSTEM_PORT_CONFIG: + attr_value = attr.value.systemportconfig + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_INT32_RANGE: + attr_value = attr.value.s32range + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_ACL_ACTION_DATA_OBJECT_ID: + attr_value = attr.value.aclactiondataobjid + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_MACSEC_SCI: + attr_value = attr.value.macsecsci + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_UINT64: + attr_value = attr.value.u64 + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_PRBS_RX_STATE: + attr_value = attr.value.prbsrxstate + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_ACL_FIELD_DATA_INT32: + attr_value = attr.value.aclfielddatas32 + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_ACL_ACTION_DATA_INT32: + attr_value = attr.value.aclactiondatas32 + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_CHARDATA: + attr_value = attr.value.chardata + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_SEGMENT_LIST: + attr_value = attr.value.segmentlist + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_INT8: + attr_value = attr.value.s8 + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_PORT_FREQUENCY_OFFSET_PPM_LIST: + attr_value = attr.value.portfreqoffsetppmlist + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_MACSEC_AUTH_KEY: + attr_value = attr.value.macsecauthkey + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_MAP_LIST: + attr_value = attr.value.maplist + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_ACL_FIELD_DATA_OBJECT_ID: + attr_value = attr.value.aclfielddataobjid + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_ACL_FIELD_DATA_UINT64: + attr_value = attr.value.aclfielddatau64 + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_ACL_CAPABILITY: + attr_value = attr.value.aclcapability + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_QOS_MAP_LIST: + attr_value = attr.value.qosmaplist + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_ENCRYPT_KEY: + attr_value = attr.value.encryptkey + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_ACL_FIELD_DATA_OBJECT_LIST: + attr_value = attr.value.aclfielddataobjlist + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_IP_PREFIX: + attr_value = attr.value.ipprefix + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_PORT_EYE_VALUES_LIST: + attr_value = attr.value.porteyevalueslist + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_MACSEC_SSCI: + attr_value = attr.value.macsecssci + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_ACL_ACTION_DATA_UINT32: + attr_value = attr.value.aclactiondatau32 + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_ACL_FIELD_DATA_INT8: + attr_value = attr.value.aclfielddataint8 + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_INT16_LIST: + attr_value = attr.value.s16list + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_PORT_LANE_LATCH_STATUS_LIST: + attr_value = attr.value.portlanelatchstatuslist + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_INT32: + attr_value = attr.value.s32 + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_ACL_ACTION_DATA_IPV4: + attr_value = attr.value.aclactiondataipv4 + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_ACL_ACTION_DATA_MAC: + attr_value = attr.value.aclactiondatamac + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_ACL_ACTION_DATA_INT16: + attr_value = attr.value.aclactiondatas16 + elif attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_IP_ADDRESS_LIST: + attr_value = attr.value.ipaddlist + else: + raise NotImplementedError(f"Unsupported attribute value type: {attr_metadata.attrvaluetype}") + + if attr_metadata.isenum: + # Map to the string value if enum + enum_metadata = attr_metadata.enummetadata + attr_value = pysairedis.sai_metadata_get_enum_value_name(enum_metadata, attr_value) + + return attr_value diff --git a/tests/common/fixtures/consistency_checker/query-asic/query-asic.py b/tests/common/fixtures/consistency_checker/query-asic/query-asic.py new file mode 100644 index 00000000000..564a9aeed15 --- /dev/null +++ b/tests/common/fixtures/consistency_checker/query-asic/query-asic.py @@ -0,0 +1,272 @@ +#!/usr/bin/env python3 + +import logging +import sys +import argparse +import json +from collections import defaultdict +from parser import extract_attr_value +from sairedis import pysairedis + +logger = logging.getLogger(__name__) + +# Results get written to stdout, so we want to log errors to stderr to avoid mixing them up +logger.addHandler(logging.StreamHandler(sys.stderr)) + +HELP_TEXT = """ +Query ASIC using the json provided in the --input file. The expected format is as follows: + { + "ASIC_STATE:SAI_OBJECT_TYPE_BUFFER_POOL:oid:0x18000000000628": [ + "SAI_BUFFER_POOL_ATTR_THRESHOLD_MODE", + "SAI_BUFFER_POOL_ATTR_SIZE", + "SAI_BUFFER_POOL_ATTR_TYPE" + ], + ... + } + +The results will be printed to stdout, in the following format: + { + "ASIC_STATE:SAI_OBJECT_TYPE_BUFFER_POOL:oid:0x18000000000628": { + "SAI_BUFFER_POOL_ATTR_THRESHOLD_MODE": { + "asicValue": "SAI_BUFFER_POOL_THRESHOLD_MODE_STATIC", + "success": true + }, + "SAI_BUFFER_POOL_ATTR_SIZE": { + "asicValue": null, + "success" false, + "error": "Failed to query attribute value" + }, + "SAI_BUFFER_POOL_ATTR_TYPE": { + "asicValue": "SAI_BUFFER_POOL_TYPE_EGRESS", + "success": true + } + }, + ... + } +""" + + +def load_input(input_file: argparse.FileType) -> dict: + """ + Read the opened input JSON file with contents like so: + { + "ASIC_STATE:SAI_OBJECT_TYPE_BUFFER_POOL:oid:0x18000000000628": [ + "SAI_BUFFER_POOL_ATTR_THRESHOLD_MODE", + "SAI_BUFFER_POOL_ATTR_SIZE", + "SAI_BUFFER_POOL_ATTR_TYPE" + ], + ... + } + + Closes the file after reading. + + :param input_file: Path to the input JSON file + :return: The loaded JSON data + """ + with input_file as f: + return json.load(f) + + +def get_numeric_oid_from_label(oid_label: str) -> int: + """ + From a label like "ASIC_STATE:SAI_OBJECT_TYPE_BUFFER_POOL:oid:0x18000000000628", + extracts and returns the numeric oid part 0x18000000000628. + + NOTE: There's also another form like so: + ASIC_STATE:SAI_OBJECT_TYPE_FDB_ENTRY:{\"bvid\":\"oid:0x260000000008da\",\"mac\":\"98:03:9B:03:22:14\",\"switch_id\":\"oid:0x21000000000000\"} + which isn't currently supported. + + :param oid_label: The label to extract the oid from + :return: The numeric oid value + """ + # Extract the value segment e.g. oid:0x18000000000628 + value_segment = oid_label.split(":", 2)[2] + if not value_segment.startswith("oid:"): + raise NotImplementedError(f"Unsupported oid format: {oid_label}") + + oid_value = value_segment.split(":", 1)[1] + return int(oid_value, 16) + + +def lookup_attribute_value_in_pysairedis(attr: str) -> int: + """ + Given an attribute name like "SAI_BUFFER_POOL_ATTR_THRESHOLD_MODE", return the corresponding + attribute oid from pysairedis. + + :param attr: The attribute name + :return: The attribute oid value + """ + return getattr(pysairedis, attr) + + +# Generate a one-time lookup table for all SAI status codes in the query results +sai_status_map = { + "single": {}, + "range": defaultdict(list), +} +for key, value in vars(pysairedis).items(): + if key.startswith("SAI_STATUS_"): + key = key.replace("SAI_STATUS_", "") + if key.endswith("_0") or key.endswith("_MAX"): + # Range + range_key = key[:-2] if key.endswith("_0") else key[:-4] + sai_status_map["range"][range_key].append(value) # Add to end of list + sai_status_map["range"][range_key].sort() # Only ever 0-2 elements, so this won't be expensive + else: + # Single value + sai_status_map["single"][value] = key + + +def map_sai_status_to_str(status_code: int) -> str: + """ + Given a SAI status code e.g. -196608, return the string representation e.g. SAI_STATUS_ATTR_NOT_SUPPORTED + + :param status_code: The numeric SAI status code + :return: The string representation of the status code + """ + if status_code in sai_status_map["single"]: + return sai_status_map["single"][status_code] + + # See if it falls in range of any status + for status_str, status_code_range in sai_status_map["range"].items(): + if status_code_range[0] <= status_code and status_code <= status_code_range[1]: + return status_str + + return "UNKNOWN_SAI_STATUS" + + +def get_attribute_value_from_asic(oid, attribute_oid): + """ + Given an oid and attribute_oid, query the ASIC for the attribute value. The attribute value + is transformed to match the format of the ASIC_DB. + + :param oid: The oid of the object to query + :param attribute_oid: The attribute oid of the object to query + :return: The attribute value from the ASIC in the format of the ASIC_DB + """ + + oid_type = pysairedis.sai_object_type_query(oid) + object_type_name = pysairedis.sai_metadata_get_object_type_name(oid_type).replace("SAI_OBJECT_TYPE_", "") + class_name = object_type_name.lower() + # Handle special cases where the class name is different + if object_type_name in ["BUFFER_POOL", "BUFFER_PROFILE"]: + class_name = "buffer" + api = getattr(pysairedis, f"sai_{class_name}_api_t")() + status = getattr(pysairedis, f"sai_get_{class_name}_api")(api) + assert status == pysairedis.SAI_STATUS_SUCCESS, (f"Failed to get sai API {api}. " + f"Status: {map_sai_status_to_str(status)} ({status})") + + attr_metadata = pysairedis.sai_metadata_get_attr_metadata(oid_type, attribute_oid) + + attr = pysairedis.sai_attribute_t() + attr.id = attribute_oid + if attr_metadata.attrvaluetype == pysairedis.SAI_ATTR_VALUE_TYPE_UINT32_LIST: + # Extra initialization for reading into a list + attr.value.u32list.count = 32 + attr.value.u32list.list = pysairedis.new_uint32_t_arr(attr.value.u32list.count) + + # Read the attribute from the ASIC into attr + func_name = f"get_{object_type_name.lower()}_attribute" + status = getattr(api, func_name)(oid, 1, attr) + assert status == pysairedis.SAI_STATUS_SUCCESS, \ + (f"Failed to call SAI API {func_name} for oid {oid} and attribute " + f"{attribute_oid}. Status: {map_sai_status_to_str(status)} ({status})") + + # Extract the attribute value from attr + attr_value = extract_attr_value(attr_metadata, attr) + return attr_value + + +def query_asic_objects(query_objects) -> dict: + """ + Query the ASIC for the attributes of the objects provided in deserialized JSON input file format. + + :param query_objects: The deserialized JSON input file format + :return: The deserialized JSON output format + """ + + results = defaultdict(dict) + + for oid_label_key, attributes in query_objects.items(): + try: + logger.debug(f"Querying ASIC for object key {oid_label_key}") + oid = get_numeric_oid_from_label(oid_label_key) + except Exception as e: + err_msg = f"Failed to extract oid from label '{oid_label_key}': {e}" + logger.warning(err_msg) + for attribute in attributes: + results[oid_label_key][attribute] = {"success": False, "error": err_msg, "asicValue": None} + continue + + for attribute in attributes: + try: + logger.debug(f"Querying ASIC object {oid_label_key} ({oid}) for attribute {attribute}") + attribute_oid = lookup_attribute_value_in_pysairedis(attribute) + asic_value = get_attribute_value_from_asic(oid, attribute_oid) + + # Convert to str to match how values are represented in ASIC_DB + if asic_value in [True, False]: + # ASIC_DB represents these as lowercase + asic_value = str(asic_value).lower() + elif asic_value is None: + asic_value = "NULL" + else: + asic_value = str(asic_value) + + # Success + results[oid_label_key][attribute] = {"asicValue": asic_value, "success": True} + logger.debug((f"Got ASIC object {oid_label_key} ({oid}) -> attribute {attribute} ({attribute_oid}) " + f"value {asic_value}")) + + except Exception as e: + err_msg = f"Failed to lookup attribute '{attribute}': {e}" + logger.warning(err_msg) + results[oid_label_key][attribute] = {"success": False, "error": err_msg, "asicValue": None} + + return dict(results) + + +def initialize_sai_api(): + """ + Initialize the SAI API + """ + logger.info("Initializing SAI API") + profileMap = dict() + profileMap[pysairedis.SAI_REDIS_KEY_ENABLE_CLIENT] = "true" + status = pysairedis.sai_api_initialize(0, profileMap) + assert status == pysairedis.SAI_STATUS_SUCCESS, "Failed to initialize SAI API" + logger.info("SAI API initialized") + + +def uninitialize_sai_api(): + """ + Uninitialize the SAI API + """ + logger.info("Uninitializing SAI API") + status = pysairedis.sai_api_uninitialize() + assert status == pysairedis.SAI_STATUS_SUCCESS, "Failed to uninitialize SAI API" + logger.info("SAI API uninitialized") + + +def main(args): + parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter, description=HELP_TEXT) + parser.add_argument("-i", "--input", type=argparse.FileType("r"), help="Input JSON file", required=True) + args = parser.parse_args(args) + + try: + query_objects = load_input(args.input) + except Exception as e: + sys.exit(f"Failed to parse JSON input file {args.input}: {e}") + + initialize_sai_api() + + try: + results = query_asic_objects(query_objects) + finally: + uninitialize_sai_api() + + print(json.dumps(results)) + + +if __name__ == "__main__": + main(sys.argv[1:]) diff --git a/tests/platform_tests/args/advanced_reboot_args.py b/tests/platform_tests/args/advanced_reboot_args.py index 300dffba7ad..392c538ff2e 100644 --- a/tests/platform_tests/args/advanced_reboot_args.py +++ b/tests/platform_tests/args/advanced_reboot_args.py @@ -149,3 +149,30 @@ def add_advanced_reboot_args(parser): "sad(3h45m), multi_sad(5h), sad_bgp(1h5m), sad_lag_member(1h15m), sad_lag(1h15m), " + "sad_vlan_port(1h10m), sad_inboot(1h20m)", ) + + parser.addoption( + "--enable_consistency_checker", + action="store_true", + default=False, + help="Enables the consistency checker between the ASIC_DB and ASIC itself for the test" + ) + + parser.addoption( + "--consistency_checker_libsairedis_url_template", + default="", + help="Optional URL template for downloading and using an alternative version of libsairedis deb package " + + "during the consistency check. E.g. " + + "http://build-server.example/sonic-buildimage/{sonic_version}/debs/libsairedis_1.0.0_amd64.deb " + + "sonic_version is a template token that will be replaced with the actual sonic version of the device under " + + "test. e.g. 202305" + ) + + parser.addoption( + "--consistency_checker_python3_pysairedis_url_template", + default="", + help="Optional URL template for downloading and using an alternative version of python3-pysairedis deb " + + "package during the consistency check. E.g. " + + "http://build-server.example/sonic-buildimage/{sonic_version}/debs/python3-pysairedis_1.0.0_amd64.deb " + + "sonic_version is a template token that will be replaced with the actual sonic version of the device under " + + "test. e.g. 202305" + ) diff --git a/tests/platform_tests/test_advanced_reboot.py b/tests/platform_tests/test_advanced_reboot.py index c1bd133a622..988145886bc 100644 --- a/tests/platform_tests/test_advanced_reboot.py +++ b/tests/platform_tests/test_advanced_reboot.py @@ -7,6 +7,7 @@ from tests.common.fixtures.duthost_utils import backup_and_restore_config_db # noqa F401 from tests.common.fixtures.advanced_reboot import get_advanced_reboot # noqa F401 from tests.platform_tests.verify_dut_health import verify_dut_health # noqa F401 +from tests.common.fixtures.consistency_checker.consistency_checker import consistency_checker_provider # noqa F401 from tests.platform_tests.verify_dut_health import add_fail_step_to_reboot # noqa F401 from tests.platform_tests.warmboot_sad_cases import get_sad_case_list, SAD_CASE_LIST @@ -60,7 +61,8 @@ def pytest_generate_tests(metafunc): # Tetcases to verify normal reboot procedure ### def test_fast_reboot(request, get_advanced_reboot, verify_dut_health, # noqa F811 - advanceboot_loganalyzer, capture_interface_counters): + advanceboot_loganalyzer, consistency_checker_provider, # noqa F811 + capture_interface_counters): ''' Fast reboot test case is run using advanced reboot test fixture @@ -68,13 +70,15 @@ def test_fast_reboot(request, get_advanced_reboot, verify_dut_health, @param get_advanced_reboot: advanced reboot test fixture ''' advancedReboot = get_advanced_reboot(rebootType='fast-reboot', - advanceboot_loganalyzer=advanceboot_loganalyzer) + advanceboot_loganalyzer=advanceboot_loganalyzer, + consistency_checker_provider=consistency_checker_provider) advancedReboot.runRebootTestcase() def test_fast_reboot_from_other_vendor(duthosts, rand_one_dut_hostname, request, - get_advanced_reboot, verify_dut_health, # noqa F811 - advanceboot_loganalyzer, capture_interface_counters): + get_advanced_reboot, verify_dut_health, # noqa F811 + advanceboot_loganalyzer, consistency_checker_provider, # noqa F811 + capture_interface_counters): ''' Fast reboot test from other vendor case is run using advanced reboot test fixture @@ -83,7 +87,8 @@ def test_fast_reboot_from_other_vendor(duthosts, rand_one_dut_hostname, request ''' duthost = duthosts[rand_one_dut_hostname] advancedReboot = get_advanced_reboot(rebootType='fast-reboot', other_vendor_nos=True, - advanceboot_loganalyzer=advanceboot_loganalyzer) + advanceboot_loganalyzer=advanceboot_loganalyzer, + consistency_checker_provider=consistency_checker_provider) # Before rebooting, we will flush all unnecessary databases, to mimic reboot from other vendor. flush_dbs(duthost) advancedReboot.runRebootTestcase() @@ -91,7 +96,8 @@ def test_fast_reboot_from_other_vendor(duthosts, rand_one_dut_hostname, request @pytest.mark.device_type('vs') def test_warm_reboot(request, testing_config, get_advanced_reboot, verify_dut_health, # noqa F811 - duthosts, advanceboot_loganalyzer, capture_interface_counters, + duthosts, advanceboot_loganalyzer, consistency_checker_provider, # noqa F811 + capture_interface_counters, toggle_all_simulator_ports, enum_rand_one_per_hwsku_frontend_hostname, # noqa F811 toggle_simulator_port_to_upper_tor): # noqa F811 ''' @@ -114,12 +120,14 @@ def test_warm_reboot(request, testing_config, get_advanced_reboot, verify_dut_he toggle_simulator_port_to_upper_tor(itfs) advancedReboot = get_advanced_reboot(rebootType='warm-reboot', - advanceboot_loganalyzer=advanceboot_loganalyzer) + advanceboot_loganalyzer=advanceboot_loganalyzer, # noqa F811 + consistency_checker_provider=consistency_checker_provider) advancedReboot.runRebootTestcase() def test_warm_reboot_mac_jump(request, get_advanced_reboot, verify_dut_health, # noqa F811 - advanceboot_loganalyzer, capture_interface_counters): + advanceboot_loganalyzer, consistency_checker_provider, # noqa F811 + capture_interface_counters): ''' Warm reboot testcase with one MAC address (00-06-07-08-09-0A) jumping from all VLAN ports. @@ -134,7 +142,8 @@ def test_warm_reboot_mac_jump(request, get_advanced_reboot, verify_dut_health, generated during warm reboot will cause META checker failure resulting to Orchagent crash. ''' advancedReboot = get_advanced_reboot(rebootType='warm-reboot', allow_mac_jumping=True, - advanceboot_loganalyzer=advanceboot_loganalyzer) + advanceboot_loganalyzer=advanceboot_loganalyzer, + consistency_checker_provider=consistency_checker_provider) advancedReboot.runRebootTestcase() @@ -142,8 +151,8 @@ def test_warm_reboot_mac_jump(request, get_advanced_reboot, verify_dut_health, @pytest.mark.device_type('vs') def test_warm_reboot_sad(duthosts, rand_one_dut_hostname, nbrhosts, fanouthosts, vmhost, tbinfo, get_advanced_reboot, verify_dut_health, advanceboot_loganalyzer, # noqa F811 - backup_and_restore_config_db, advanceboot_neighbor_restore, # noqa F811 - sad_case_type): + consistency_checker_provider, backup_and_restore_config_db, # noqa F811 + advanceboot_neighbor_restore, sad_case_type): # noqa F811 ''' Warm reboot with sad path @param get_advanced_reboot: Fixture located in advanced_reboot.py @@ -155,7 +164,8 @@ def test_warm_reboot_sad(duthosts, rand_one_dut_hostname, nbrhosts, fanouthosts, ''' duthost = duthosts[rand_one_dut_hostname] advancedReboot = get_advanced_reboot(rebootType='warm-reboot', - advanceboot_loganalyzer=advanceboot_loganalyzer) + advanceboot_loganalyzer=advanceboot_loganalyzer, + consistency_checker_provider=consistency_checker_provider) sad_preboot_list, sad_inboot_list = get_sad_case_list( duthost, nbrhosts, fanouthosts, vmhost, tbinfo, sad_case_type) diff --git a/tests/upgrade_path/test_upgrade_path.py b/tests/upgrade_path/test_upgrade_path.py index 5452ceec269..a4c2b3d9492 100644 --- a/tests/upgrade_path/test_upgrade_path.py +++ b/tests/upgrade_path/test_upgrade_path.py @@ -5,10 +5,11 @@ from tests.common import reboot from tests.common.reboot import get_reboot_cause from tests.common.reboot import REBOOT_TYPE_COLD -from tests.upgrade_path.upgrade_helpers import check_services, install_sonic, check_sonic_version,\ +from tests.upgrade_path.upgrade_helpers import check_services, install_sonic, check_sonic_version, \ get_reboot_command, check_copp_config from tests.upgrade_path.upgrade_helpers import restore_image # noqa F401 from tests.common.fixtures.advanced_reboot import get_advanced_reboot # noqa F401 +from tests.common.fixtures.consistency_checker.consistency_checker import consistency_checker_provider # noqa F401 from tests.platform_tests.verify_dut_health import verify_dut_health # noqa F401 from tests.common.fixtures.duthost_utils import backup_and_restore_config_db # noqa F401 @@ -51,7 +52,7 @@ def upgrade_path_lists(request): def test_upgrade_path(localhost, duthosts, ptfhost, rand_one_dut_hostname, nbrhosts, fanouthosts, tbinfo, restore_image, # noqa F811 get_advanced_reboot, verify_dut_health, advanceboot_loganalyzer, # noqa F811 - upgrade_path_lists): + upgrade_path_lists, consistency_checker_provider): # noqa F811 duthost = duthosts[rand_one_dut_hostname] upgrade_type, from_list_images, to_list_images, _ = upgrade_path_lists from_list = from_list_images.split(',') @@ -92,7 +93,8 @@ def test_upgrade_path(localhost, duthosts, ptfhost, rand_one_dut_hostname, reboot(duthost, localhost) else: advancedReboot = get_advanced_reboot(rebootType=get_reboot_command(duthost, upgrade_type), - advanceboot_loganalyzer=advanceboot_loganalyzer) + advanceboot_loganalyzer=advanceboot_loganalyzer, + consistency_checker_provider=consistency_checker_provider) advancedReboot.runRebootTestcase() reboot_cause = get_reboot_cause(duthost) logger.info("Check reboot cause. Expected cause {}".format(upgrade_type)) @@ -107,7 +109,8 @@ def test_warm_upgrade_sad_path(localhost, duthosts, ptfhost, rand_one_dut_hostna nbrhosts, fanouthosts, vmhost, tbinfo, restore_image, # noqa F811 get_advanced_reboot, verify_dut_health, advanceboot_loganalyzer, # noqa F811 upgrade_path_lists, backup_and_restore_config_db, # noqa F811 - advanceboot_neighbor_restore, sad_case_type): # noqa F811 + advanceboot_neighbor_restore, consistency_checker_provider, # noqa F811 + sad_case_type): # noqa F811 duthost = duthosts[rand_one_dut_hostname] upgrade_type, from_list_images, to_list_images, _ = upgrade_path_lists from_list = from_list_images.split(',') @@ -128,7 +131,8 @@ def test_warm_upgrade_sad_path(localhost, duthosts, ptfhost, rand_one_dut_hostna logger.info("Upgrading to {}".format(to_image)) install_sonic(duthost, to_image, tbinfo) advancedReboot = get_advanced_reboot(rebootType=get_reboot_command(duthost, "warm"), - advanceboot_loganalyzer=advanceboot_loganalyzer) + advanceboot_loganalyzer=advanceboot_loganalyzer, + consistency_checker_provider=consistency_checker_provider) sad_preboot_list, sad_inboot_list = get_sad_case_list( duthost, nbrhosts, fanouthosts, vmhost, tbinfo, sad_case_type) advancedReboot.runRebootTestcase(