diff --git a/dump/main.py b/dump/main.py index b936f18101..eb33c95ad6 100644 --- a/dump/main.py +++ b/dump/main.py @@ -6,11 +6,10 @@ from tabulate import tabulate from sonic_py_common import multi_asic from utilities_common.constants import DEFAULT_NAMESPACE +from dump.match_infra import RedisSource, JsonSource, MatchEngine, CONN from swsscommon.swsscommon import ConfigDBConnector -from dump.match_infra import RedisSource, JsonSource, ConnectionPool from dump import plugins - # Autocompletion Helper def get_available_modules(ctx, args, incomplete): return [k for k in plugins.dump_modules.keys() if incomplete in k] @@ -29,8 +28,9 @@ def show_modules(ctx, param, value): @click.group() -def dump(): - pass +@click.pass_context +def dump(ctx): + ctx.obj = MatchEngine() @dump.command() @@ -70,8 +70,7 @@ def state(ctx, module, identifier, db, table, key_map, verbose, namespace): else: os.environ["VERBOSE"] = "0" - ctx.module = module - obj = plugins.dump_modules[module]() + obj = plugins.dump_modules[module](ctx.obj) if identifier == "all": ids = obj.get_all_args(namespace) @@ -91,10 +90,10 @@ def state(ctx, module, identifier, db, table, key_map, verbose, namespace): if len(db) > 0: collected_info = filter_out_dbs(db, collected_info) - vidtorid = extract_rid(collected_info, namespace) + vidtorid = extract_rid(collected_info, namespace, ctx.obj.conn_pool) if not key_map: - collected_info = populate_fv(collected_info, module, namespace) + collected_info = populate_fv(collected_info, module, namespace, ctx.obj.conn_pool) for id in vidtorid.keys(): collected_info[id]["ASIC_DB"]["vidtorid"] = vidtorid[id] @@ -104,8 +103,8 @@ def state(ctx, module, identifier, db, table, key_map, verbose, namespace): return -def extract_rid(info, ns): - r = RedisSource(ConnectionPool()) +def extract_rid(info, ns, conn_pool): + r = RedisSource(conn_pool) r.connect("ASIC_DB", ns) vidtorid = {} vid_cache = {} # Cache Entries to reduce number of Redis Calls @@ -146,19 +145,20 @@ def filter_out_dbs(db_list, collected_info): return collected_info -def populate_fv(info, module, namespace): +def populate_fv(info, module, namespace, conn_pool): all_dbs = set() for id in info.keys(): for db_name in info[id].keys(): all_dbs.add(db_name) db_cfg_file = JsonSource() - db_conn = ConnectionPool().initialize_connector(namespace) for db_name in all_dbs: if db_name == "CONFIG_FILE": db_cfg_file.connect(plugins.dump_modules[module].CONFIG_FILE, namespace) else: - db_conn.connect(db_name) + conn_pool.get(db_name, namespace) + + db_conn = conn_pool.cache.get(namespace, {}).get(CONN, None) final_info = {} for id in info.keys(): diff --git a/dump/match_infra.py b/dump/match_infra.py index ec493d2bcf..8b15f69a05 100644 --- a/dump/match_infra.py +++ b/dump/match_infra.py @@ -7,6 +7,10 @@ from sonic_py_common import multi_asic from utilities_common.constants import DEFAULT_NAMESPACE +# Constants +CONN = "conn" +CONN_TO = "connected_to" + EXCEP_DICT = { "INV_REQ": "Argument should be of type MatchRequest", "INV_DB": "DB provided is not valid", @@ -249,12 +253,12 @@ def get(self, db_name, ns, update=False): """ Returns a SonicV2Connector Object and caches it for further requests """ if ns not in self.cache: self.cache[ns] = {} - self.cache[ns]["conn"] = self.initialize_connector(ns) - self.cache[ns]["connected_to"] = set() - if update or db_name not in self.cache[ns]["connected_to"]: - self.cache[ns]["conn"].connect(db_name) - self.cache[ns]["connected_to"].add(db_name) - return self.cache[ns]["conn"] + self.cache[ns][CONN] = self.initialize_connector(ns) + self.cache[ns][CONN_TO] = set() + if update or db_name not in self.cache[ns][CONN_TO]: + self.cache[ns][CONN].connect(db_name) + self.cache[ns][CONN_TO].add(db_name) + return self.cache[ns][CONN] def clear(self, namespace=None): if not namespace: @@ -264,7 +268,7 @@ def clear(self, namespace=None): def fill(self, ns, conn, connected_to): """ Update internal cache """ - self.cache[ns] = {'conn': conn, 'connected_to': set(connected_to)} + self.cache[ns] = {CONN: conn, CONN_TO: set(connected_to)} class MatchEngine: diff --git a/tests/dump_input/dump/asic0/appl_db.json b/tests/dump_input/dump/asic0/appl_db.json new file mode 100644 index 0000000000..64a0b6a63c --- /dev/null +++ b/tests/dump_input/dump/asic0/appl_db.json @@ -0,0 +1,14 @@ +{ + "PORT_TABLE:Ethernet0": { + "lanes": "33,34,35,36", + "description": "ARISTA01T2:Ethernet3/1/1", + "pfc_asym": "off", + "mtu": "9100", + "alias": "Ethernet1/1", + "oper_status": "up", + "admin_status": "up", + "role": "Ext", + "speed": "40000", + "asic_port_name": "Eth0-ASIC0" + } +} \ No newline at end of file diff --git a/tests/dump_input/dump/asic0/asic_db.json b/tests/dump_input/dump/asic0/asic_db.json new file mode 100644 index 0000000000..1a769b82b5 --- /dev/null +++ b/tests/dump_input/dump/asic0/asic_db.json @@ -0,0 +1,6 @@ +{ + "ASIC_STATE:SAI_OBJECT_TYPE_SWITCH:oid:0x21000000000000": { + "SAI_SWITCH_ATTR_INIT_SWITCH": "true", + "SAI_SWITCH_ATTR_SRC_MAC_ADDRESS": "DE:AD:BE:EF:CA:FE" + } +} diff --git a/tests/dump_input/dump/asic0/config_db.json b/tests/dump_input/dump/asic0/config_db.json new file mode 100644 index 0000000000..edf50afb96 --- /dev/null +++ b/tests/dump_input/dump/asic0/config_db.json @@ -0,0 +1,13 @@ +{ + "PORT|Ethernet0": { + "admin_status": "up", + "alias": "Ethernet1/1", + "asic_port_name": "Eth0-ASIC0", + "description": "ARISTA01T2:Ethernet3/1/1", + "lanes": "33,34,35,36", + "mtu": "9100", + "pfc_asym": "off", + "role": "Ext", + "speed": "40000" + } +} diff --git a/tests/dump_input/dump/asic0/state_db.json b/tests/dump_input/dump/asic0/state_db.json new file mode 100644 index 0000000000..cdea61851d --- /dev/null +++ b/tests/dump_input/dump/asic0/state_db.json @@ -0,0 +1,20 @@ +{ + "TRANSCEIVER_INFO|Ethernet0": { + "type": "QSFP28 or later", + "vendor_rev": "AC", + "serial": "MT1706FT02064", + "manufacturer": "Mellanox", + "model": "MFA1A00-C003", + "vendor_oui": "00-02-c9", + "vendor_date": "2017-01-13 ", + "connector": "No separable connector", + "encoding": "64B66B", + "ext_identifier": "Power Class 3(2.5W max), CDR present in Rx Tx", + "ext_rateselect_compliance": "QSFP+ Rate Select Version 1", + "cable_type": "Length Cable Assembly(m)", + "cable_length": "3", + "specification_compliance": "{'10/40G Ethernet Compliance Code': '40G Active Cable (XLPPI)'}", + "nominal_bit_rate": "255", + "application_advertisement": "N/A" + } +} diff --git a/tests/dump_input/dump/asic1/appl_db.json b/tests/dump_input/dump/asic1/appl_db.json new file mode 100644 index 0000000000..35ec025315 --- /dev/null +++ b/tests/dump_input/dump/asic1/appl_db.json @@ -0,0 +1,13 @@ +{ + "PORT_TABLE:Ethernet-BP256": { + "oper_status": "up", + "lanes": "61,62,63,64", + "description": "ASIC0:Eth16-ASIC0", + "pfc_asym": "off", + "mtu": "9100", + "alias": "Ethernet-BP256", + "admin_status": "up", + "speed": "40000", + "asic_port_name": "Eth0-ASIC1" + } +} diff --git a/tests/dump_input/dump/asic1/asic_db.json b/tests/dump_input/dump/asic1/asic_db.json new file mode 100644 index 0000000000..642549040a --- /dev/null +++ b/tests/dump_input/dump/asic1/asic_db.json @@ -0,0 +1,6 @@ +{ + "ASIC_STATE:SAI_OBJECT_TYPE_SWITCH:oid:0x21000000000000": { + "SAI_SWITCH_ATTR_INIT_SWITCH": "true", + "SAI_SWITCH_ATTR_SRC_MAC_ADDRESS": "DE:AD:BE:EF:CA:FF" + } +} diff --git a/tests/dump_input/dump/asic1/config_db.json b/tests/dump_input/dump/asic1/config_db.json new file mode 100644 index 0000000000..a7b8ed40a3 --- /dev/null +++ b/tests/dump_input/dump/asic1/config_db.json @@ -0,0 +1,13 @@ +{ + "PORT|Ethernet-BP256": { + "admin_status": "up", + "alias": "Ethernet-BP256", + "asic_port_name": "Eth0-ASIC1", + "description": "ASIC0:Eth16-ASIC0", + "lanes": "61,62,63,64", + "mtu": "9100", + "pfc_asym": "off", + "role": "Int", + "speed": "40000" + } +} diff --git a/tests/dump_input/dump/asic1/state_db.json b/tests/dump_input/dump/asic1/state_db.json new file mode 100644 index 0000000000..f5ccda4771 --- /dev/null +++ b/tests/dump_input/dump/asic1/state_db.json @@ -0,0 +1,20 @@ +{ + "TRANSCEIVER_INFO|Ethernet-BP256": { + "type": "QSFP28 or later", + "vendor_rev": "AC", + "serial": "MT1706FT02064", + "manufacturer": "Mellanox", + "model": "MFA1A00-C003", + "vendor_oui": "00-02-c9", + "vendor_date": "2017-01-13 ", + "connector": "No separable connector", + "encoding": "64B66B", + "ext_identifier": "Power Class 3(2.5W max), CDR present in Rx Tx", + "ext_rateselect_compliance": "QSFP+ Rate Select Version 1", + "cable_type": "Length Cable Assembly(m)", + "cable_length": "3", + "specification_compliance": "{'10/40G Ethernet Compliance Code': '40G Active Cable (XLPPI)'}", + "nominal_bit_rate": "255", + "application_advertisement": "N/A" + } +} \ No newline at end of file diff --git a/tests/dump_input/dump/default/appl_db.json b/tests/dump_input/dump/default/appl_db.json new file mode 100644 index 0000000000..99b9488ae0 --- /dev/null +++ b/tests/dump_input/dump/default/appl_db.json @@ -0,0 +1,59 @@ +{ + "PORT_TABLE:Ethernet176": { + "index": "0", + "lanes": "0", + "alias": "etp45", + "speed": "25000", + "oper_status": "up", + "pfc_asym": "off", + "mtu": "9100", + "fec": "rs", + "admin_status": "up" + }, + "PORT_TABLE:Ethernet160": { + "index": "0", + "lanes": "0", + "alias": "etp41", + "speed": "25000", + "oper_status": "up", + "pfc_asym": "off", + "mtu": "9100", + "fec": "rs", + "admin_status": "up" + }, + "PORT_TABLE:Ethernet164": { + "index": "0", + "lanes": "0", + "alias": "etp42", + "speed": "25000", + "oper_status": "up", + "pfc_asym": "off", + "mtu": "9100", + "fec": "rs", + "admin_status": "up" + }, + "PORT_TABLE:Ethernet0": { + "index": "0", + "lanes": "0", + "alias": "Ethernet0", + "description": "ARISTA01T2:Ethernet1", + "speed": "25000", + "oper_status": "down", + "pfc_asym": "off", + "mtu": "9100", + "fec": "rs" + }, + "PORT_TABLE:Ethernet200": { + "index": "200", + "lanes": "200,201,202,203", + "alias": "Ethernet200", + "description": "Ethernet200", + "speed": "100000", + "oper_status": "down", + "fec": "rs", + "mtu": "9100", + "tpid": "0x8100", + "pfc_asym": "off", + "admin_status": "up" + } +} \ No newline at end of file diff --git a/tests/dump_input/dump/default/asic_db.json b/tests/dump_input/dump/default/asic_db.json new file mode 100644 index 0000000000..346f38f41d --- /dev/null +++ b/tests/dump_input/dump/default/asic_db.json @@ -0,0 +1,50 @@ +{ + "ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF:oid:0xd000000000a4d":{ + "SAI_HOSTIF_ATTR_TYPE" : "SAI_HOSTIF_TYPE_NETDEV", + "SAI_HOSTIF_ATTR_OBJ_ID": "oid:0x100000000036a", + "SAI_HOSTIF_ATTR_NAME" : "Ethernet176", + "SAI_HOSTIF_ATTR_OPER_STATUS" : "true" + }, + "ASIC_STATE:SAI_OBJECT_TYPE_PORT:oid:0x100000000036a": { + "SAI_PORT_ATTR_ADMIN_STATE" : "true", + "SAI_PORT_ATTR_SPEED" : "25000", + "SAI_PORT_ATTR_MTU" : "9122" + }, + "ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF:oid:0xd000000000a49":{ + "SAI_HOSTIF_ATTR_TYPE" : "SAI_HOSTIF_TYPE_NETDEV", + "SAI_HOSTIF_ATTR_OBJ_ID": "oid:0x10000000002e6", + "SAI_HOSTIF_ATTR_NAME" : "Ethernet160", + "SAI_HOSTIF_ATTR_OPER_STATUS" : "true" + }, + "ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF:oid:0xd000000000a4a":{ + "SAI_HOSTIF_ATTR_TYPE" : "SAI_HOSTIF_TYPE_NETDEV", + "SAI_HOSTIF_ATTR_OBJ_ID": "oid:0x1000000000307", + "SAI_HOSTIF_ATTR_OPER_STATUS" : "true" + }, + "ASIC_STATE:SAI_OBJECT_TYPE_PORT:oid:0x1000000000307": { + "SAI_PORT_ATTR_ADMIN_STATE" : "true", + "SAI_PORT_ATTR_SPEED" : "25000", + "SAI_PORT_ATTR_MTU" : "9122" + }, + "ASIC_STATE:SAI_OBJECT_TYPE_SWITCH:oid:0x21000000000000": { + "SAI_SWITCH_ATTR_INIT_SWITCH": "true", + "SAI_SWITCH_ATTR_SRC_MAC_ADDRESS": "DE:AD:BE:EF:CA:FE" + }, + "ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF:oid:0xd00000000056d": { + "SAI_HOSTIF_ATTR_NAME": "Ethernet0", + "SAI_HOSTIF_ATTR_OBJ_ID": "oid:0x10000000004a4", + "SAI_HOSTIF_ATTR_OPER_STATUS": "true", + "SAI_HOSTIF_ATTR_TYPE": "SAI_HOSTIF_TYPE_NETDEV", + "SAI_HOSTIF_ATTR_VLAN_TAG": "SAI_HOSTIF_VLAN_TAG_STRIP" + }, + "ASIC_STATE:SAI_OBJECT_TYPE_PORT:oid:0x10000000004a4": { + "NULL": "NULL", + "SAI_PORT_ATTR_ADMIN_STATE": "true", + "SAI_PORT_ATTR_MTU": "9122", + "SAI_PORT_ATTR_SPEED": "100000" + }, + "VIDTORID":{ + "oid:0xd00000000056d": "oid:0xd", + "oid:0x10000000004a4": "oid:0x1690000000001" + } +} diff --git a/tests/dump_input/dump/default/config_db.json b/tests/dump_input/dump/default/config_db.json new file mode 100644 index 0000000000..ff1830b945 --- /dev/null +++ b/tests/dump_input/dump/default/config_db.json @@ -0,0 +1,89 @@ +{ + "PORT|Ethernet176": { + "admin_status" : "up", + "alias": "etp45", + "index": "45", + "lanes": "176", + "speed": "25000" + }, + "PORT|Ethernet164": { + "admin_status" : "up", + "alias": "etp42", + "index": "42", + "lanes": "164", + "speed": "25000" + }, + "PORT|Ethernet160": { + "admin_status" : "up", + "alias": "etp41", + "index": "41", + "lanes": "160", + "speed": "25000" + }, + "PORT|Ethernet156": { + "admin_status" : "up", + "alias": "etp40", + "index": "40", + "lanes": "156", + "speed": "25000" + }, + "PORT|Ethernet0": { + "alias": "etp1", + "description": "etp1", + "index": "0", + "lanes": "25,26,27,28", + "mtu": "9100", + "pfc_asym": "off", + "speed": "40000" + }, + "PORT|Ethernet4": { + "admin_status": "up", + "alias": "etp2", + "description": "Servers0:eth0", + "index": "1", + "lanes": "29,30,31,32", + "mtu": "9100", + "pfc_asym": "off", + "speed": "40000" + }, + "SFLOW_COLLECTOR|prod": { + "collector_ip": "fe80::6e82:6aff:fe1e:cd8e", + "collector_port": "6343", + "collector_vrf": "mgmt" + }, + "SFLOW_COLLECTOR|ser5": { + "collector_ip": "172.21.35.15", + "collector_port": "6343", + "collector_vrf": "default" + }, + "ACL_RULE|EVERFLOW|RULE_6": { + "PACKET_ACTION": "FORWARD", + "PRIORITY": "9994", + "TCP_FLAGS": "0x12/0x12" + }, + "ACL_RULE|EVERFLOW|RULE_08": { + "PACKET_ACTION": "FORWARD", + "PRIORITY": "9992", + "SRC_IP": "10.0.0.3/32" + }, + "ACL_TABLE|SSH_ONLY": { + "policy_desc": "SSH_ONLY", + "services@": "SSH", + "type": "CTRLPLANE" + }, + "SFLOW|global": { + "admin_state": "up", + "polling_interval": "0" + }, + "PORT|Ethernet60": { + "admin_status": "up", + "alias": "etp16", + "description": "Servers14:eth0", + "index": "15", + "lanes": "61,62,63,64", + "mtu": "9100", + "tpid": "0x8100", + "pfc_asym": "off", + "speed": "40000" + } +} diff --git a/tests/dump_input/dump/default/state_db.json b/tests/dump_input/dump/default/state_db.json new file mode 100644 index 0000000000..3b5a0d7604 --- /dev/null +++ b/tests/dump_input/dump/default/state_db.json @@ -0,0 +1,66 @@ +{ + "PORT_TABLE|Ethernet176":{ + "state" : "ok", + "netdev_oper_status" : "up" + }, + "PORT_TABLE|Ethernet160":{ + "state" : "ok", + "netdev_oper_status" : "up" + }, + "PORT_TABLE|Ethernet164":{ + "state" : "ok", + "netdev_oper_status" : "up" + }, + "PORT_TABLE|Ethernet0": { + "speed" : "100000", + "supported_speeds": "10000,25000,40000,100000" + }, + "REBOOT_CAUSE|2020_10_09_04_53_58": { + "cause": "warm-reboot", + "time": "Fri Oct 9 04:51:47 UTC 2020", + "user": "admin", + "comment": "N/A" + }, + "REBOOT_CAUSE|2020_10_09_02_33_06": { + "cause": "reboot", + "time": "Fri Oct 9 02:29:44 UTC 2020", + "user": "admin", + "comment": "N/A" + }, + "CHASSIS_MODULE_TABLE|FABRIC-CARD1": { + "desc": "fabric-card", + "oper_status": "Offline", + "slot": "18" + }, + "VXLAN_TUNNEL_TABLE|EVPN_25.25.25.25": { + "src_ip": "1.1.1.1", + "dst_ip": "25.25.25.25", + "tnl_src": "EVPN", + "operstatus": "down" + }, + "VXLAN_TUNNEL_TABLE|EVPN_25.25.25.26": { + "src_ip": "1.1.1.1", + "dst_ip": "25.25.25.26", + "tnl_src": "EVPN", + "operstatus": "down" + }, + "VXLAN_TUNNEL_TABLE|EVPN_25.25.25.27": { + "src_ip": "1.1.1.1", + "dst_ip": "25.25.25.27", + "tnl_src": "EVPN", + "operstatus": "down" + }, + "FAN_INFO|fan1": { + "drawer_name": "drawer1", + "presence": "True", + "model": "N/A", + "serial": "N/A", + "status": "True", + "direction": "intake", + "speed": "30", + "speed_tolerance": "50", + "speed_target": "20", + "led_status": "red", + "timestamp": "20200813 01:32:30" + } +} diff --git a/tests/dump_tests/dump_state_test.py b/tests/dump_tests/dump_state_test.py index 4aeb4b57fd..5468d966ee 100644 --- a/tests/dump_tests/dump_state_test.py +++ b/tests/dump_tests/dump_state_test.py @@ -2,22 +2,28 @@ import sys import json import pytest -from unittest import mock, TestCase -from click.testing import CliRunner +import traceback import dump.main as dump -from deepdiff import DeepDiff + +from unittest import mock, TestCase from importlib import reload +from click.testing import CliRunner from utilities_common.db import Db -import traceback +from dump.match_infra import ConnectionPool, MatchEngine, CONN +from dump.helper import populate_mock +from deepdiff import DeepDiff from utilities_common.constants import DEFAULT_NAMESPACE from pyfakefs.fake_filesystem_unittest import Patcher - +from swsscommon.swsscommon import SonicV2Connector +from ..mock_tables import dbconnector def compare_json_output(exp_json, rec, exclude_paths=None): print("EXPECTED: \n") print(json.dumps(exp_json, indent=4)) try: rec_json = json.loads(rec) + print("RECIEVED: \n") + print(json.dumps(rec_json, indent=4)) except Exception as e: print(rec) assert False, "CLI Output is not in JSON Format" @@ -25,22 +31,20 @@ def compare_json_output(exp_json, rec, exclude_paths=None): table_display_output = '''\ -+-------------+-----------+--------------------------------------------------------------------------------+ -| port_name | DB_NAME | DUMP | -+=============+===========+================================================================================+ -| Ethernet0 | STATE_DB | +----------------------+-----------------------------------------------------+ | -| | | | Keys | field-value pairs | | -| | | +======================+=====================================================+ | -| | | | PORT_TABLE|Ethernet0 | +----------------------+--------------------------+ | | -| | | | | | field | value | | | -| | | | | |----------------------+--------------------------| | | -| | | | | | rmt_adv_speeds | 10,100,1000 | | | -| | | | | | speed | 100000 | | | -| | | | | | supported_speeds | 10000,25000,40000,100000 | | | -| | | | | | link_training_status | not_trained | | | -| | | | | +----------------------+--------------------------+ | | -| | | +----------------------+-----------------------------------------------------+ | -+-------------+-----------+--------------------------------------------------------------------------------+ ++-------------+-----------+----------------------------------------------------------------------------+ +| port_name | DB_NAME | DUMP | ++=============+===========+============================================================================+ +| Ethernet0 | STATE_DB | +----------------------+-------------------------------------------------+ | +| | | | Keys | field-value pairs | | +| | | +======================+=================================================+ | +| | | | PORT_TABLE|Ethernet0 | +------------------+--------------------------+ | | +| | | | | | field | value | | | +| | | | | |------------------+--------------------------| | | +| | | | | | speed | 100000 | | | +| | | | | | supported_speeds | 10000,25000,40000,100000 | | | +| | | | | +------------------+--------------------------+ | | +| | | +----------------------+-------------------------------------------------+ | ++-------------+-----------+----------------------------------------------------------------------------+ ''' @@ -108,53 +112,71 @@ def compare_json_output(exp_json, rec, exclude_paths=None): +-----------+-------------+---------------------------------------------------------------+ ''' -class TestDumpState(object): +@pytest.fixture(scope="class") +def match_engine(): + print("SETUP") + os.environ["VERBOSE"] = "1" + + dump_port_input = os.path.join(os.path.dirname(__file__), "../dump_input/dump/default") + + dedicated_dbs = {} + dedicated_dbs['CONFIG_DB'] = os.path.join(dump_port_input, "config_db.json") + dedicated_dbs['APPL_DB'] = os.path.join(dump_port_input, "appl_db.json") + dedicated_dbs['STATE_DB'] = os.path.join(dump_port_input, "state_db.json") + dedicated_dbs['ASIC_DB'] = os.path.join(dump_port_input, "asic_db.json") + + conn = SonicV2Connector() + # popualate the db ,with mock data + db_names = list(dedicated_dbs.keys()) + try: + populate_mock(conn, db_names, dedicated_dbs) + except Exception as e: + assert False, "Mock initialization failed: " + str(e) + + conn_pool = ConnectionPool() + conn_pool.fill(DEFAULT_NAMESPACE, conn, db_names) + match_engine = MatchEngine(conn_pool) - @classmethod - def setup_class(cls): - print("SETUP") - os.environ["UTILITIES_UNIT_TESTING"] = "1" - mock_db_path = os.path.join(os.path.dirname(__file__), "../mock_tables/") + yield match_engine + print("TEARDOWN") - def test_identifier_single(self): + +@pytest.mark.usefixtures("match_engine") +class TestDumpState: + + def test_identifier_single(self, match_engine): runner = CliRunner() - result = runner.invoke(dump.state, ["port", "Ethernet0"]) + result = runner.invoke(dump.state, ["port", "Ethernet0"], obj=match_engine) expected = {'Ethernet0': {'CONFIG_DB': {'keys': [{'PORT|Ethernet0': {'alias': 'etp1', 'description': 'etp1', 'index': '0', 'lanes': '25,26,27,28', 'mtu': '9100', 'pfc_asym': 'off', 'speed': '40000'}}], 'tables_not_found': []}, - 'APPL_DB': {'keys': [{'PORT_TABLE:Ethernet0': {'index': '0', 'lanes': '0', 'alias': 'Ethernet0', 'description': 'ARISTA01T2:Ethernet1', 'speed': '25000', 'oper_status': 'down', 'pfc_asym': 'off', 'mtu': '9100', 'fec': 'rs', 'admin_status': 'up'}}], 'tables_not_found': []}, + 'APPL_DB': {'keys': [{'PORT_TABLE:Ethernet0': {'index': '0', 'lanes': '0', 'alias': 'Ethernet0', 'description': 'ARISTA01T2:Ethernet1', 'speed': '25000', 'oper_status': 'down', 'pfc_asym': 'off', 'mtu': '9100', 'fec': 'rs'}}], 'tables_not_found': []}, 'ASIC_DB': {'keys': [{'ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF:oid:0xd00000000056d': {'SAI_HOSTIF_ATTR_NAME': 'Ethernet0', 'SAI_HOSTIF_ATTR_OBJ_ID': 'oid:0x10000000004a4', 'SAI_HOSTIF_ATTR_OPER_STATUS': 'true', 'SAI_HOSTIF_ATTR_TYPE': 'SAI_HOSTIF_TYPE_NETDEV', 'SAI_HOSTIF_ATTR_VLAN_TAG': 'SAI_HOSTIF_VLAN_TAG_STRIP'}}, {'ASIC_STATE:SAI_OBJECT_TYPE_PORT:oid:0x10000000004a4': {'NULL': 'NULL', 'SAI_PORT_ATTR_ADMIN_STATE': 'true', 'SAI_PORT_ATTR_MTU': '9122', 'SAI_PORT_ATTR_SPEED': '100000'}}], 'tables_not_found': [], 'vidtorid': {'oid:0xd00000000056d': 'oid:0xd', 'oid:0x10000000004a4': 'oid:0x1690000000001'}}, - 'STATE_DB': {'keys': [{'PORT_TABLE|Ethernet0': {'rmt_adv_speeds': '10,100,1000', 'speed': '100000', 'supported_speeds': '10000,25000,40000,100000', 'link_training_status': 'not_trained'}}], 'tables_not_found': []}}} + 'STATE_DB': {'keys': [{'PORT_TABLE|Ethernet0': {'speed': '100000', 'supported_speeds': '10000,25000,40000,100000'}}], 'tables_not_found': []}}} assert result.exit_code == 0, "exit code: {}, Exception: {}, Traceback: {}".format(result.exit_code, result.exception, result.exc_info) - # Cause other tests depend and change these paths in the mock_db, this test would fail everytime when a field or a value in changed in this path, creating noise - # and therefore ignoring these paths. field-value dump capability of the utility is nevertheless verified using f-v dumps of ASIC_DB & STATE_DB - pths = ["root['Ethernet0']['CONFIG_DB']['keys'][0]['PORT|Ethernet0']", "root['Ethernet0']['APPL_DB']['keys'][0]['PORT_TABLE:Ethernet0']"] - ddiff = compare_json_output(expected, result.output, exclude_paths=pths) + ddiff = compare_json_output(expected, result.output) assert not ddiff, ddiff - def test_identifier_multiple(self): + def test_identifier_multiple(self, match_engine): runner = CliRunner() - result = runner.invoke(dump.state, ["port", "Ethernet0,Ethernet4"]) + result = runner.invoke(dump.state, ["port", "Ethernet0,Ethernet4"], obj=match_engine) print(result.output) expected = {"Ethernet0": {"CONFIG_DB": {"keys": [{"PORT|Ethernet0": {"alias": "etp1", "description": "etp1", "index": "0", "lanes": "25,26,27,28", "mtu": "9100", "pfc_asym": "off", "speed": "40000"}}], "tables_not_found": []}, - "APPL_DB": {"keys": [{"PORT_TABLE:Ethernet0": {"index": "0", "lanes": "0", "alias": "Ethernet0", "description": "ARISTA01T2:Ethernet1", "speed": "25000", "oper_status": "down", "pfc_asym": "off", "mtu": "9100", "fec": "rs", "admin_status": "up"}}], "tables_not_found": []}, + "APPL_DB": {"keys": [{"PORT_TABLE:Ethernet0": {"index": "0", "lanes": "0", "alias": "Ethernet0", "description": "ARISTA01T2:Ethernet1", "speed": "25000", "oper_status": "down", "pfc_asym": "off", "mtu": "9100", "fec": "rs"}}], "tables_not_found": []}, "ASIC_DB": {"keys": [{"ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF:oid:0xd00000000056d": {"SAI_HOSTIF_ATTR_NAME": "Ethernet0", "SAI_HOSTIF_ATTR_OBJ_ID": "oid:0x10000000004a4", "SAI_HOSTIF_ATTR_OPER_STATUS": "true", "SAI_HOSTIF_ATTR_TYPE": "SAI_HOSTIF_TYPE_NETDEV", "SAI_HOSTIF_ATTR_VLAN_TAG": "SAI_HOSTIF_VLAN_TAG_STRIP"}}, {"ASIC_STATE:SAI_OBJECT_TYPE_PORT:oid:0x10000000004a4": {"NULL": "NULL", "SAI_PORT_ATTR_ADMIN_STATE": "true", "SAI_PORT_ATTR_MTU": "9122", "SAI_PORT_ATTR_SPEED": "100000"}}], "tables_not_found": [], "vidtorid": {"oid:0xd00000000056d": "oid:0xd", "oid:0x10000000004a4": "oid:0x1690000000001"}}, - "STATE_DB": {"keys": [{"PORT_TABLE|Ethernet0": {"rmt_adv_speeds": "10,100,1000", "speed": "100000", "supported_speeds": "10000,25000,40000,100000", "link_training_status": "not_trained"}}], "tables_not_found": []}}, + "STATE_DB": {"keys": [{"PORT_TABLE|Ethernet0": {"speed": "100000", "supported_speeds": "10000,25000,40000,100000"}}], "tables_not_found": []}}, "Ethernet4": {"CONFIG_DB": {"keys": [{"PORT|Ethernet4": {"admin_status": "up", "alias": "etp2", "description": "Servers0:eth0", "index": "1", "lanes": "29,30,31,32", "mtu": "9100", "pfc_asym": "off", "speed": "40000"}}], "tables_not_found": []}, "APPL_DB": {"keys": [], "tables_not_found": ["PORT_TABLE"]}, "ASIC_DB": {"keys": [], "tables_not_found": ["ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF", "ASIC_STATE:SAI_OBJECT_TYPE_PORT"]}, "STATE_DB": {"keys": [], "tables_not_found": ["PORT_TABLE"]}}} assert result.exit_code == 0, "exit code: {}, Exception: {}, Traceback: {}".format(result.exit_code, result.exception, result.exc_info) - pths = ["root['Ethernet0']['CONFIG_DB']['keys'][0]['PORT|Ethernet0']", "root['Ethernet0']['APPL_DB']['keys'][0]['PORT_TABLE:Ethernet0']"] - pths.extend(["root['Ethernet4']['CONFIG_DB']['keys'][0]['PORT|Ethernet4']", "root['Ethernet4']['APPL_DB']['keys'][0]['PORT_TABLE:Ethernet4']"]) - ddiff = compare_json_output(expected, result.output, pths) + ddiff = compare_json_output(expected, result.output) assert not ddiff, ddiff - def test_option_key_map(self): + def test_option_key_map(self, match_engine): runner = CliRunner() - result = runner.invoke(dump.state, ["port", "Ethernet0", "--key-map"]) - print(result.output) + result = runner.invoke(dump.state, ["port", "Ethernet0", "--key-map"], obj=match_engine) expected = {"Ethernet0": {"CONFIG_DB": {"keys": ["PORT|Ethernet0"], "tables_not_found": []}, "APPL_DB": {"keys": ["PORT_TABLE:Ethernet0"], "tables_not_found": []}, "ASIC_DB": {"keys": ["ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF:oid:0xd00000000056d", "ASIC_STATE:SAI_OBJECT_TYPE_PORT:oid:0x10000000004a4"], "tables_not_found": [], "vidtorid": {"oid:0xd00000000056d": "oid:0xd", "oid:0x10000000004a4": "oid:0x1690000000001"}}, @@ -163,36 +185,31 @@ def test_option_key_map(self): ddiff = compare_json_output(expected, result.output) assert not ddiff, ddiff - def test_option_db_filtering(self): + def test_option_db_filtering(self, match_engine): runner = CliRunner() - result = runner.invoke(dump.state, ["port", "Ethernet0", "--db", "ASIC_DB", "--db", "STATE_DB"]) - print(result.output) + result = runner.invoke(dump.state, ["port", "Ethernet0", "--db", "ASIC_DB", "--db", "STATE_DB"], obj=match_engine) expected = {"Ethernet0": {"ASIC_DB": {"keys": [{"ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF:oid:0xd00000000056d": {"SAI_HOSTIF_ATTR_NAME": "Ethernet0", "SAI_HOSTIF_ATTR_OBJ_ID": "oid:0x10000000004a4", "SAI_HOSTIF_ATTR_OPER_STATUS": "true", "SAI_HOSTIF_ATTR_TYPE": "SAI_HOSTIF_TYPE_NETDEV", "SAI_HOSTIF_ATTR_VLAN_TAG": "SAI_HOSTIF_VLAN_TAG_STRIP"}}, {"ASIC_STATE:SAI_OBJECT_TYPE_PORT:oid:0x10000000004a4": {"NULL": "NULL", "SAI_PORT_ATTR_ADMIN_STATE": "true", "SAI_PORT_ATTR_MTU": "9122", "SAI_PORT_ATTR_SPEED": "100000"}}], "tables_not_found": [], "vidtorid": {"oid:0xd00000000056d": "oid:0xd", "oid:0x10000000004a4": "oid:0x1690000000001"}}, - "STATE_DB": {"keys": [{"PORT_TABLE|Ethernet0": {"rmt_adv_speeds": "10,100,1000", "speed": "100000", "supported_speeds": "10000,25000,40000,100000", "link_training_status": "not_trained"}}], "tables_not_found": []}}} + "STATE_DB": {"keys": [{"PORT_TABLE|Ethernet0": {"speed": "100000", "supported_speeds": "10000,25000,40000,100000"}}], "tables_not_found": []}}} assert result.exit_code == 0, "exit code: {}, Exception: {}, Traceback: {}".format(result.exit_code, result.exception, result.exc_info) ddiff = compare_json_output(expected, result.output) assert not ddiff, ddiff - def test_option_tabular_display(self): + def test_option_tabular_display(self, match_engine): runner = CliRunner() - result = runner.invoke(dump.state, ["port", "Ethernet0", "--db", "STATE_DB", "--table"]) - print(result.output) + result = runner.invoke(dump.state, ["port", "Ethernet0", "--db", "STATE_DB", "--table"], obj=match_engine) assert result.exit_code == 0, "exit code: {}, Exception: {}, Traceback: {}".format(result.exit_code, result.exception, result.exc_info) assert table_display_output == result.output - def test_option_tabular_display_no_db_filter(self): + def test_option_tabular_display_no_db_filter(self, match_engine): runner = CliRunner() - result = runner.invoke(dump.state, ["port", "Ethernet0", "--table", "--key-map"]) - print(result.output) + result = runner.invoke(dump.state, ["port", "Ethernet0", "--table", "--key-map"], obj=match_engine) assert result.exit_code == 0, "exit code: {}, Exception: {}, Traceback: {}".format(result.exit_code, result.exception, result.exc_info) assert table_display_output_no_filtering == result.output - def test_identifier_all_with_filtering(self): + def test_identifier_all_with_filtering(self, match_engine): runner = CliRunner() - expected_entries = [] - for i in range(0, 125, 4): - expected_entries.append("Ethernet" + str(i)) - result = runner.invoke(dump.state, ["port", "all", "--db", "CONFIG_DB", "--key-map"]) + expected_entries = ["Ethernet0", "Ethernet4", "Ethernet156", "Ethernet160", "Ethernet164", "Ethernet176", "Ethernet60"] + result = runner.invoke(dump.state, ["port", "all", "--db", "CONFIG_DB", "--key-map"], obj=match_engine) print(result.output) try: rec_json = json.loads(result.output) @@ -201,13 +218,13 @@ def test_identifier_all_with_filtering(self): ddiff = DeepDiff(set(expected_entries), set(rec_json.keys())) assert not ddiff, "Expected Entries were not recieved when passing all keyword" - def test_namespace_single_asic(self): + def test_namespace_single_asic(self, match_engine): runner = CliRunner() - result = runner.invoke(dump.state, ["port", "Ethernet0", "--table", "--key-map", "--namespace", "asic0"]) + result = runner.invoke(dump.state, ["port", "Ethernet0", "--table", "--key-map", "--namespace", "asic0"], obj=match_engine) print(result.output) assert result.output == "Namespace option is not valid for a single-ASIC device\n" - def test_populate_fv_config_file(self): + def test_populate_fv_config_file(self, match_engine): test_data = { "COPP_TRAP": { "bgp": { @@ -226,33 +243,64 @@ def test_populate_fv_config_file(self): with Patcher() as patcher: patcher.fs.create_file("/etc/sonic/copp_cfg.json", contents=json.dumps(test_data)) runner = CliRunner() - result = runner.invoke(dump.state, ["copp", "bgp", "--table", "--db", "CONFIG_FILE"]) + result = runner.invoke(dump.state, ["copp", "bgp", "--table", "--db", "CONFIG_FILE"], obj=match_engine) print(result) print(result.output) assert result.output == table_config_file_copp - @classmethod - def teardown(cls): - print("TEARDOWN") - os.environ["UTILITIES_UNIT_TESTING"] = "0" +@pytest.fixture(scope="class") +def match_engine_masic(): + print("SETUP") + os.environ["VERBOSE"] = "1" + + from ..mock_tables import mock_multi_asic + reload(mock_multi_asic) + from ..mock_tables import dbconnector + dbconnector.load_namespace_config() + + dump_input = os.path.join(os.path.dirname(__file__), "../dump_input/") + dedicated_dbs = {} + + conn = SonicV2Connector() + # popualate the db ,with mock data + db_names = list(dedicated_dbs.keys()) + try: + populate_mock(conn, db_names, dedicated_dbs) + except Exception as e: + assert False, "Mock initialization failed: " + str(e) + + conn_pool = ConnectionPool() + dedicated_dbs['CONFIG_DB'] = os.path.join(dump_input, "dump/default/config_db.json") + dedicated_dbs['APPL_DB'] = os.path.join(dump_input, "dump/default/appl_db.json") + dedicated_dbs['STATE_DB'] = os.path.join(dump_input, "dump/default/state_db.json") + dedicated_dbs['ASIC_DB'] = os.path.join(dump_input, "dump/default/asic_db.json") + conn_pool.fill(DEFAULT_NAMESPACE, conn_pool.initialize_connector(DEFAULT_NAMESPACE), list(dedicated_dbs.keys())) + populate_mock(conn_pool.cache[DEFAULT_NAMESPACE][CONN], list(dedicated_dbs.keys()), dedicated_dbs) + + dedicated_dbs['CONFIG_DB'] = os.path.join(dump_input, "dump/asic0/config_db.json") + dedicated_dbs['APPL_DB'] = os.path.join(dump_input, "dump/asic0/appl_db.json") + dedicated_dbs['STATE_DB'] = os.path.join(dump_input, "dump/asic0/state_db.json") + dedicated_dbs['ASIC_DB'] = os.path.join(dump_input, "dump/asic0/asic_db.json") + conn_pool.fill("asic0", conn_pool.initialize_connector("asic0"), list(dedicated_dbs.keys())) + populate_mock(conn_pool.cache["asic0"][CONN], list(dedicated_dbs.keys()), dedicated_dbs) + + dedicated_dbs['CONFIG_DB'] = os.path.join(dump_input, "dump/asic1/config_db.json") + dedicated_dbs['APPL_DB'] = os.path.join(dump_input, "dump/asic1/appl_db.json") + dedicated_dbs['STATE_DB'] = os.path.join(dump_input, "dump/asic1/state_db.json") + dedicated_dbs['ASIC_DB'] = os.path.join(dump_input, "dump/asic1/asic_db.json") + conn_pool.fill("asic1", conn_pool.initialize_connector("asic1"), list(dedicated_dbs.keys())) + populate_mock(conn_pool.cache["asic1"][CONN], list(dedicated_dbs.keys()), dedicated_dbs) + match_engine = MatchEngine(conn_pool) + yield match_engine + print("TEARDOWN") +@pytest.mark.usefixtures("match_engine_masic") class TestDumpStateMultiAsic(object): - @classmethod - def setup_class(cls): - print("SETUP") - os.environ["UTILITIES_UNIT_TESTING"] = "2" - os.environ["UTILITIES_UNIT_TESTING_TOPOLOGY"] = "multi_asic" - from ..mock_tables import mock_multi_asic - reload(mock_multi_asic) - from ..mock_tables import dbconnector - dbconnector.load_namespace_config() - - def test_default_namespace(self): + def test_default_namespace(self, match_engine_masic): runner = CliRunner() - db = Db() - result = runner.invoke(dump.state, ["port", "Ethernet0", "--key-map"], obj=db) + result = runner.invoke(dump.state, ["port", "Ethernet0", "--key-map"], obj=match_engine_masic) expected = {"Ethernet0": {"CONFIG_DB": {"keys": ["PORT|Ethernet0"], "tables_not_found": []}, "APPL_DB": {"keys": ["PORT_TABLE:Ethernet0"], "tables_not_found": []}, "ASIC_DB": {"keys": ["ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF:oid:0xd00000000056d", "ASIC_STATE:SAI_OBJECT_TYPE_PORT:oid:0x10000000004a4"], "tables_not_found": [], "vidtorid": {"oid:0xd00000000056d": "oid:0xd", "oid:0x10000000004a4": "oid:0x1690000000001"}}, @@ -261,10 +309,9 @@ def test_default_namespace(self): ddiff = compare_json_output(expected, result.output) assert not ddiff, ddiff - def test_namespace_asic0(self): + def test_namespace_asic0(self, match_engine_masic): runner = CliRunner() - db = Db() - result = runner.invoke(dump.state, ["port", "Ethernet0", "--namespace", "asic0"], obj=db) + result = runner.invoke(dump.state, ["port", "Ethernet0", "--namespace", "asic0"], obj=match_engine_masic) expected = {"Ethernet0": {"CONFIG_DB": {"keys": [{"PORT|Ethernet0": {"admin_status": "up", "alias": "Ethernet1/1", "asic_port_name": "Eth0-ASIC0", "description": "ARISTA01T2:Ethernet3/1/1", "lanes": "33,34,35,36", "mtu": "9100", "pfc_asym": "off", "role": "Ext", "speed": "40000"}}], "tables_not_found": []}, "APPL_DB": {"keys": [{"PORT_TABLE:Ethernet0": {"lanes": "33,34,35,36", "description": "ARISTA01T2:Ethernet3/1/1", "pfc_asym": "off", "mtu": "9100", "alias": "Ethernet1/1", "oper_status": "up", "admin_status": "up", "role": "Ext", "speed": "40000", "asic_port_name": "Eth0-ASIC0"}}], "tables_not_found": []}, "ASIC_DB": {"keys": [], "tables_not_found": ["ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF", "ASIC_STATE:SAI_OBJECT_TYPE_PORT"]}, "STATE_DB": {"keys": [], "tables_not_found": ["PORT_TABLE"]}}} @@ -273,10 +320,9 @@ def test_namespace_asic0(self): ddiff = compare_json_output(expected, result.output) assert not ddiff, ddiff - def test_namespace_asic1(self): + def test_namespace_asic1(self, match_engine_masic): runner = CliRunner() - db = Db() - result = runner.invoke(dump.state, ["port", "Ethernet-BP256", "--namespace", "asic1"], obj=db) + result = runner.invoke(dump.state, ["port", "Ethernet-BP256", "--namespace", "asic1"], obj=match_engine_masic) expected = {"Ethernet-BP256": {"CONFIG_DB": {"keys": [{"PORT|Ethernet-BP256": {"admin_status": "up", "alias": "Ethernet-BP256", "asic_port_name": "Eth0-ASIC1", "description": "ASIC0:Eth16-ASIC0", "lanes": "61,62,63,64", "mtu": "9100", "pfc_asym": "off", "role": "Int", "speed": "40000"}}], "tables_not_found": []}, "APPL_DB": {"keys": [{"PORT_TABLE:Ethernet-BP256": {"oper_status": "up", "lanes": "61,62,63,64", "description": "ASIC0:Eth16-ASIC0", "pfc_asym": "off", "mtu": "9100", "alias": "Ethernet-BP256", "admin_status": "up", "speed": "40000", "asic_port_name": "Eth0-ASIC1"}}], "tables_not_found": []}, @@ -286,13 +332,8 @@ def test_namespace_asic1(self): ddiff = compare_json_output(expected, result.output) assert not ddiff, ddiff - def test_invalid_namespace(self): + def test_invalid_namespace(self, match_engine_masic): runner = CliRunner() - db = Db() - result = runner.invoke(dump.state, ["port", "Ethernet0", "--namespace", "asic3"], obj=db) + result = runner.invoke(dump.state, ["port", "Ethernet0", "--namespace", "asic3"], obj=match_engine_masic) assert result.output == "Namespace option is not valid. Choose one of ['asic0', 'asic1']\n", result - def teardown_class(cls): - print("TEARDOWN") - os.environ["UTILITIES_UNIT_TESTING"] = "0" - os.environ["UTILITIES_UNIT_TESTING_TOPOLOGY"] = "" diff --git a/tests/dump_tests/match_engine_test.py b/tests/dump_tests/match_engine_test.py index 2085dc3e4d..be4a8f6f45 100644 --- a/tests/dump_tests/match_engine_test.py +++ b/tests/dump_tests/match_engine_test.py @@ -2,31 +2,67 @@ import sys import unittest import pytest -from dump.match_infra import MatchEngine, EXCEP_DICT, MatchRequest, MatchRequestOptimizer +from dump.match_infra import MatchEngine, EXCEP_DICT, MatchRequest, MatchRequestOptimizer, ConnectionPool, CONN +from utilities_common.constants import DEFAULT_NAMESPACE +from dump.helper import populate_mock from unittest.mock import MagicMock from deepdiff import DeepDiff from importlib import reload +from swsscommon.swsscommon import SonicV2Connector +from ..mock_tables import dbconnector + test_path = os.path.join(os.path.dirname(__file__), "../") dump_test_input = os.path.join(test_path, "dump_input") -sys.path.append(test_path) - - @pytest.fixture(scope="module", autouse=True) -def mock_setup(): +def match_engine(): print("SETUP") os.environ["VERBOSE"] = "1" - yield - print("TEARDOWN") - os.environ["VERBOSE"] = "0" + from ..mock_tables import mock_multi_asic + reload(mock_multi_asic) + from ..mock_tables import dbconnector + dbconnector.load_namespace_config() + + dump_input = os.path.join(os.path.dirname(__file__), "../dump_input/") + dedicated_dbs = {} + + conn = SonicV2Connector() + # popualate the db ,with mock data + db_names = list(dedicated_dbs.keys()) + try: + populate_mock(conn, db_names, dedicated_dbs) + except Exception as e: + assert False, "Mock initialization failed: " + str(e) + + conn_pool = ConnectionPool() + dedicated_dbs['CONFIG_DB'] = os.path.join(dump_input, "dump/default/config_db.json") + dedicated_dbs['APPL_DB'] = os.path.join(dump_input, "dump/default/appl_db.json") + dedicated_dbs['STATE_DB'] = os.path.join(dump_input, "dump/default/state_db.json") + dedicated_dbs['ASIC_DB'] = os.path.join(dump_input, "dump/default/asic_db.json") + conn_pool.fill(DEFAULT_NAMESPACE, conn_pool.initialize_connector(DEFAULT_NAMESPACE), list(dedicated_dbs.keys())) + populate_mock(conn_pool.cache[DEFAULT_NAMESPACE][CONN], list(dedicated_dbs.keys()), dedicated_dbs) + + dedicated_dbs['CONFIG_DB'] = os.path.join(dump_input, "dump/asic0/config_db.json") + dedicated_dbs['APPL_DB'] = os.path.join(dump_input, "dump/asic0/appl_db.json") + dedicated_dbs['STATE_DB'] = os.path.join(dump_input, "dump/asic0/state_db.json") + dedicated_dbs['ASIC_DB'] = os.path.join(dump_input, "dump/asic0/asic_db.json") + conn_pool.fill("asic0", conn_pool.initialize_connector("asic0"), list(dedicated_dbs.keys())) + populate_mock(conn_pool.cache["asic0"][CONN], list(dedicated_dbs.keys()), dedicated_dbs) + + dedicated_dbs['CONFIG_DB'] = os.path.join(dump_input, "dump/asic1/config_db.json") + dedicated_dbs['APPL_DB'] = os.path.join(dump_input, "dump/asic1/appl_db.json") + dedicated_dbs['STATE_DB'] = os.path.join(dump_input, "dump/asic1/state_db.json") + dedicated_dbs['ASIC_DB'] = os.path.join(dump_input, "dump/asic1/asic_db.json") + conn_pool.fill("asic1", conn_pool.initialize_connector("asic1"), list(dedicated_dbs.keys())) + populate_mock(conn_pool.cache["asic1"][CONN], list(dedicated_dbs.keys()), dedicated_dbs) + + match_engine = MatchEngine(conn_pool) + yield match_engine + print("TEARDOWN") -class TestMatchRequestValidation(unittest.TestCase): - - def __init__(self, *args, **kwargs): - super(TestMatchRequestValidation, self).__init__(*args, **kwargs) - self.match_engine = MatchEngine() +class TestMatchRequestValidation: def assertRaisesWithMessage(self, msg, func, *args, **kwargs): try: @@ -36,9 +72,9 @@ def assertRaisesWithMessage(self, msg, func, *args, **kwargs): print(inst) assert msg in str(inst) - def test_bad_request(self): + def test_bad_request(self, match_engine): req = [] - ret = self.match_engine.fetch(req) + ret = match_engine.fetch(req) assert ret["error"] == EXCEP_DICT["INV_REQ"] def test_no_source(self): @@ -57,9 +93,9 @@ def test_invalid_namespace(self): self.assertRaisesWithMessage(EXCEP_DICT["INV_NS"], MatchRequest, db="APPL_DB", table="PORT_TABLE", field="lanes", value="202", ns="asic4") - def test_bad_key_pattern(self): + def test_bad_key_pattern(self, match_engine): req = MatchRequest(db="CONFIG_DB", table="PORT", key_pattern="") - ret = self.match_engine.fetch(req) + ret = match_engine.fetch(req) assert ret["error"] == EXCEP_DICT["NO_KEY"] def test_no_value(self): @@ -72,9 +108,9 @@ def test_just_keys_return_fields_compat(self): self.assertRaisesWithMessage(EXCEP_DICT["JUST_KEYS_COMPAT"], MatchRequest, db="APPL_DB", return_fields=["trap_group"], table="COPP_TABLE", key_pattern="*", field="trap_ids", value="", just_keys=False) - def test_invalid_combination(self): + def test_invalid_combination(self, match_engine): req = MatchRequest(db="CONFIG_DB", table="COPP_TRAP", key_pattern="*", field="trap_ids", value="sample_packet") - ret = self.match_engine.fetch(req) + ret = match_engine.fetch(req) assert ret["error"] == EXCEP_DICT["NO_MATCHES"] def test_return_fields_bad_format(self): @@ -86,94 +122,90 @@ def test_valid_match_request(self): except Exception as e: assert False, "Exception Raised for a Valid MatchRequest" + str(e) +@pytest.mark.usefixtures("match_engine") +class TestMatchEngine: -class TestMatchEngine(unittest.TestCase): - - def __init__(self, *args, **kwargs): - super(TestMatchEngine, self).__init__(*args, **kwargs) - self.match_engine = MatchEngine() - - def test_key_pattern_wildcard(self): + def test_key_pattern_wildcard(self, match_engine): req = MatchRequest(db="CONFIG_DB", table="SFLOW_COLLECTOR", key_pattern="*") - ret = self.match_engine.fetch(req) + ret = match_engine.fetch(req) assert ret["error"] == "" assert len(ret["keys"]) == 2 assert "SFLOW_COLLECTOR|ser5" in ret['keys'] assert "SFLOW_COLLECTOR|prod" in ret['keys'] - def test_key_pattern_complex(self): + def test_key_pattern_complex(self, match_engine): req = MatchRequest(db="CONFIG_DB", table="ACL_RULE", key_pattern="EVERFLOW*") - ret = self.match_engine.fetch(req) + ret = match_engine.fetch(req) assert ret["error"] == "" assert len(ret["keys"]) == 2 assert "ACL_RULE|EVERFLOW|RULE_6" in ret['keys'] assert "ACL_RULE|EVERFLOW|RULE_08" in ret['keys'] - def test_field_value_match(self): + def test_field_value_match(self, match_engine): req = MatchRequest(db="CONFIG_DB", table="ACL_TABLE", field="policy_desc", value="SSH_ONLY") - ret = self.match_engine.fetch(req) + ret = match_engine.fetch(req) assert ret["error"] == "" assert len(ret["keys"]) == 1 assert "ACL_TABLE|SSH_ONLY" in ret['keys'] - def test_field_value_match_list_type(self): + def test_field_value_match_list_type(self, match_engine): req = MatchRequest(db="APPL_DB", table="PORT_TABLE", field="lanes", value="202") - ret = self.match_engine.fetch(req) + ret = match_engine.fetch(req) assert ret["error"] == "" assert len(ret["keys"]) == 1 assert "PORT_TABLE:Ethernet200" in ret['keys'] - def test_for_no_match(self): + def test_for_no_match(self, match_engine): req = MatchRequest(db="ASIC_DB", table="ASIC_STATE:SAI_OBJECT_TYPE_SWITCH", field="SAI_SWITCH_ATTR_SRC_MAC_ADDRESS", value="DE:AD:EE:EE:EE") - ret = self.match_engine.fetch(req) + ret = match_engine.fetch(req) assert ret["error"] == EXCEP_DICT["NO_ENTRIES"] assert len(ret["keys"]) == 0 - def test_for_no_key_match(self): + def test_for_no_key_match(self, match_engine): req = MatchRequest(db="ASIC_DB", table="ASIC_STATE:SAI_OBJECT_TYPE_SWITCH", key_pattern="oid:0x22*") - ret = self.match_engine.fetch(req) + ret = match_engine.fetch(req) assert ret["error"] == EXCEP_DICT["NO_MATCHES"] - def test_field_value_no_match(self): + def test_field_value_no_match(self, match_engine): req = MatchRequest(db="STATE_DB", table="FAN_INFO", key_pattern="*", field="led_status", value="yellow") - ret = self.match_engine.fetch(req) + ret = match_engine.fetch(req) assert ret["error"] == EXCEP_DICT["NO_ENTRIES"] assert len(ret["keys"]) == 0 - def test_return_keys(self): + def test_return_keys(self, match_engine): req = MatchRequest(db="STATE_DB", table="REBOOT_CAUSE", return_fields=["cause"]) - ret = self.match_engine.fetch(req) + ret = match_engine.fetch(req) assert ret["error"] == "" assert len(ret["keys"]) == 2 assert "warm-reboot" == ret["return_values"]["REBOOT_CAUSE|2020_10_09_04_53_58"]["cause"] assert "reboot" == ret["return_values"]["REBOOT_CAUSE|2020_10_09_02_33_06"]["cause"] - def test_return_fields_with_key_filtering(self): + def test_return_fields_with_key_filtering(self, match_engine): req = MatchRequest(db="STATE_DB", table="REBOOT_CAUSE", key_pattern="2020_10_09_02*", return_fields=["cause"]) - ret = self.match_engine.fetch(req) + ret = match_engine.fetch(req) assert ret["error"] == "" assert len(ret["keys"]) == 1 assert "reboot" == ret["return_values"]["REBOOT_CAUSE|2020_10_09_02_33_06"]["cause"] - def test_return_fields_with_field_value_filtering(self): + def test_return_fields_with_field_value_filtering(self, match_engine): req = MatchRequest(db="STATE_DB", table="CHASSIS_MODULE_TABLE", field="oper_status", value="Offline", return_fields=["slot"]) - ret = self.match_engine.fetch(req) + ret = match_engine.fetch(req) assert ret["error"] == "" assert len(ret["keys"]) == 1 assert "18" == ret["return_values"]["CHASSIS_MODULE_TABLE|FABRIC-CARD1"]["slot"] - def test_return_fields_with_all_filtering(self): + def test_return_fields_with_all_filtering(self, match_engine): req = MatchRequest(db="STATE_DB", table="VXLAN_TUNNEL_TABLE", key_pattern="EVPN_25.25.25.2*", field="operstatus", value="down", return_fields=["src_ip"]) - ret = self.match_engine.fetch(req) + ret = match_engine.fetch(req) assert ret["error"] == "" assert len(ret["keys"]) == 3 assert "1.1.1.1" == ret["return_values"]["VXLAN_TUNNEL_TABLE|EVPN_25.25.25.25"]["src_ip"] assert "1.1.1.1" == ret["return_values"]["VXLAN_TUNNEL_TABLE|EVPN_25.25.25.26"]["src_ip"] assert "1.1.1.1" == ret["return_values"]["VXLAN_TUNNEL_TABLE|EVPN_25.25.25.27"]["src_ip"] - def test_just_keys_false(self): + def test_just_keys_false(self, match_engine): req = MatchRequest(db="CONFIG_DB", table="SFLOW", key_pattern="global", just_keys=False) - ret = self.match_engine.fetch(req) + ret = match_engine.fetch(req) assert ret["error"] == "" assert len(ret["keys"]) == 1 recv_dict = ret["keys"][0] @@ -182,26 +214,26 @@ def test_just_keys_false(self): ddiff = DeepDiff(exp_dict, recv_dict) assert not ddiff, ddiff - def test_file_source(self): + def test_file_source(self, match_engine): file = os.path.join(dump_test_input, "copp_cfg.json") req = MatchRequest(file=file, table="COPP_TRAP", field="trap_ids", value="arp_req") - ret = self.match_engine.fetch(req) + ret = match_engine.fetch(req) assert ret["error"] == "" assert len(ret["keys"]) == 1 assert "COPP_TRAP|arp" in ret["keys"] - def test_file_source_with_key_ptrn(self): + def test_file_source_with_key_ptrn(self, match_engine): file = os.path.join(dump_test_input, "copp_cfg.json") req = MatchRequest(file=file, table="COPP_GROUP", key_pattern="queue4*", field="red_action", value="drop") - ret = self.match_engine.fetch(req) + ret = match_engine.fetch(req) assert ret["error"] == "" assert len(ret["keys"]) == 1 assert "COPP_GROUP|queue4_group2" in ret["keys"] - def test_file_source_with_not_only_return_keys(self): + def test_file_source_with_not_only_return_keys(self, match_engine): file = os.path.join(dump_test_input, "copp_cfg.json") req = MatchRequest(file=file, table="COPP_GROUP", key_pattern="queue4*", field="red_action", value="drop", just_keys=False) - ret = self.match_engine.fetch(req) + ret = match_engine.fetch(req) assert ret["error"] == "" assert len(ret["keys"]) == 1 recv_dict = ret["keys"][0] @@ -209,32 +241,17 @@ def test_file_source_with_not_only_return_keys(self): ddiff = DeepDiff(exp_dict, recv_dict) assert not ddiff, ddiff - def test_match_entire_list(self): + def test_match_entire_list(self, match_engine): req = MatchRequest(db="CONFIG_DB", table="PORT", key_pattern="*", field="lanes", value="61,62,63,64", match_entire_list=True, just_keys=True) - ret = self.match_engine.fetch(req) + ret = match_engine.fetch(req) assert ret["error"] == "" assert len(ret["keys"]) == 1 assert "PORT|Ethernet60" in ret["keys"] +@pytest.mark.usefixtures("match_engine") +class TestNonDefaultNameSpace: -class TestNonDefaultNameSpace(unittest.TestCase): - - @classmethod - def setup_class(cls): - print("SETUP") - os.environ["UTILITIES_UNIT_TESTING"] = "2" - os.environ["UTILITIES_UNIT_TESTING_TOPOLOGY"] = "multi_asic" - from ..mock_tables import mock_multi_asic - reload(mock_multi_asic) - from ..mock_tables import dbconnector - dbconnector.load_namespace_config() - - def teardown_class(cls): - print("TEARDOWN") - os.environ["UTILITIES_UNIT_TESTING"] = "0" - os.environ["UTILITIES_UNIT_TESTING_TOPOLOGY"] = "" - - def test_namespace_asic0(self): + def test_namespace_asic0(self, match_engine): req = MatchRequest(db="CONFIG_DB", table="PORT", key_pattern="*", field="asic_port_name", value="Eth0-ASIC0", ns="asic0") match_engine = MatchEngine() ret = match_engine.fetch(req) @@ -242,7 +259,7 @@ def test_namespace_asic0(self): assert len(ret["keys"]) == 1 assert "PORT|Ethernet0" in ret["keys"] - def test_namespace_asic1(self): + def test_namespace_asic1(self, match_engine): req = MatchRequest(db="CONFIG_DB", table="PORT", key_pattern="Ethernet-BP256", ns="asic1") match_engine = MatchEngine() ret = match_engine.fetch(req) @@ -250,7 +267,7 @@ def test_namespace_asic1(self): assert len(ret["keys"]) == 1 assert "PORT|Ethernet-BP256" in ret["keys"] -class TestMatchEngineOptimizer(unittest.TestCase): +class TestMatchEngineOptimizer: def test_caching(self): rv = {"COPP_GROUP|queue4_group2": {"trap_action": "copy", "trap_priority": "4", "queue": "4", "meter_type": "packets", "mode": "sr_tcm", "cir": "600", "cbs": "600", "red_action": "drop"}} diff --git a/tests/dump_tests/module_tests/acl_test.py b/tests/dump_tests/module_tests/acl_test.py index 67698b034a..105b948a53 100755 --- a/tests/dump_tests/module_tests/acl_test.py +++ b/tests/dump_tests/module_tests/acl_test.py @@ -6,6 +6,7 @@ from dump.plugins.acl_rule import Acl_Rule from dump.match_infra import MatchEngine, ConnectionPool from swsscommon.swsscommon import SonicV2Connector +from utilities_common.constants import DEFAULT_NAMESPACE # Location for dedicated db's used for UT module_tests_path = os.path.dirname(__file__) @@ -38,9 +39,7 @@ def match_engine(): # Initialize connection pool conn_pool = ConnectionPool() - DEF_NS = '' # Default Namespace - conn_pool.cache = {DEF_NS: {'conn': db, - 'connected_to': set(db_names)}} + conn_pool.fill(DEFAULT_NAMESPACE, db, db_names) # Initialize match_engine match_engine = MatchEngine(conn_pool) diff --git a/tests/dump_tests/module_tests/copp_test.py b/tests/dump_tests/module_tests/copp_test.py index 37e9e53c91..56ed3dc63c 100644 --- a/tests/dump_tests/module_tests/copp_test.py +++ b/tests/dump_tests/module_tests/copp_test.py @@ -10,6 +10,7 @@ from dump.plugins.copp import Copp from dump.match_infra import MatchEngine, ConnectionPool from swsscommon.swsscommon import SonicV2Connector +from utilities_common.constants import DEFAULT_NAMESPACE # Location for dedicated db's used for UT module_tests_path = os.path.dirname(__file__) @@ -45,9 +46,7 @@ def match_engine(): # Initialize connection pool conn_pool = ConnectionPool() - DEF_NS = '' # Default Namespace - conn_pool.cache = {DEF_NS: {'conn': db, - 'connected_to': set(db_names)}} + conn_pool.fill(DEFAULT_NAMESPACE, db, db_names) # Initialize match_engine match_engine = MatchEngine(conn_pool) diff --git a/tests/dump_tests/module_tests/evpn_test.py b/tests/dump_tests/module_tests/evpn_test.py index ff4085f0f6..822b5c8069 100644 --- a/tests/dump_tests/module_tests/evpn_test.py +++ b/tests/dump_tests/module_tests/evpn_test.py @@ -6,6 +6,7 @@ from dump.plugins.evpn import Evpn from dump.match_infra import MatchEngine, ConnectionPool from swsscommon.swsscommon import SonicV2Connector +from utilities_common.constants import DEFAULT_NAMESPACE # Location for dedicated db's used for UT module_tests_path = os.path.dirname(__file__) @@ -40,9 +41,7 @@ def match_engine(): # Initialize connection pool conn_pool = ConnectionPool() - DEF_NS = '' # Default Namespace - conn_pool.cache = {DEF_NS: {'conn': db, - 'connected_to': set(db_names)}} + conn_pool.fill(DEFAULT_NAMESPACE, db, db_names) # Initialize match_engine match_engine = MatchEngine(conn_pool) diff --git a/tests/dump_tests/module_tests/fdb_test.py b/tests/dump_tests/module_tests/fdb_test.py index 7e2e8b88ed..52b705fb39 100644 --- a/tests/dump_tests/module_tests/fdb_test.py +++ b/tests/dump_tests/module_tests/fdb_test.py @@ -10,6 +10,7 @@ from dump.plugins.fdb import Fdb from dump.match_infra import MatchEngine, ConnectionPool from swsscommon.swsscommon import SonicV2Connector +from utilities_common.constants import DEFAULT_NAMESPACE # Location for dedicated db's used for UT module_tests_path = os.path.dirname(__file__) @@ -45,9 +46,7 @@ def match_engine(): # Initialize connection pool conn_pool = ConnectionPool() - DEF_NS = '' # Default Namespace - conn_pool.cache = {DEF_NS: {'conn': db, - 'connected_to': set(db_names)}} + conn_pool.fill(DEFAULT_NAMESPACE, db, db_names) # Initialize match_engine match_engine = MatchEngine(conn_pool) diff --git a/tests/dump_tests/module_tests/interface_test.py b/tests/dump_tests/module_tests/interface_test.py index 969272b069..ec5ef6998a 100644 --- a/tests/dump_tests/module_tests/interface_test.py +++ b/tests/dump_tests/module_tests/interface_test.py @@ -10,6 +10,7 @@ from dump.plugins.interface import Interface from dump.match_infra import MatchEngine, ConnectionPool from swsscommon.swsscommon import SonicV2Connector +from utilities_common.constants import DEFAULT_NAMESPACE # Location for dedicated db's used for UT module_tests_path = os.path.dirname(__file__) @@ -45,9 +46,7 @@ def match_engine(): # Initialize connection pool conn_pool = ConnectionPool() - DEF_NS = '' # Default Namespace - conn_pool.cache = {DEF_NS: {'conn': db, - 'connected_to': set(db_names)}} + conn_pool.fill(DEFAULT_NAMESPACE, db, db_names) # Initialize match_engine match_engine = MatchEngine(conn_pool) diff --git a/tests/dump_tests/module_tests/port_test.py b/tests/dump_tests/module_tests/port_test.py index 011f401478..1021e3a044 100644 --- a/tests/dump_tests/module_tests/port_test.py +++ b/tests/dump_tests/module_tests/port_test.py @@ -10,6 +10,7 @@ from dump.plugins.port import Port from dump.match_infra import MatchEngine, ConnectionPool from swsscommon.swsscommon import SonicV2Connector +from utilities_common.constants import DEFAULT_NAMESPACE # Location for dedicated db's used for UT module_tests_path = os.path.dirname(__file__) @@ -45,9 +46,7 @@ def match_engine(): # Initialize connection pool conn_pool = ConnectionPool() - DEF_NS = '' # Default Namespace - conn_pool.cache = {DEF_NS: {'conn': db, - 'connected_to': set(db_names)}} + conn_pool.fill(DEFAULT_NAMESPACE, db, db_names) # Initialize match_engine match_engine = MatchEngine(conn_pool) diff --git a/tests/dump_tests/module_tests/portchannel_member_test.py b/tests/dump_tests/module_tests/portchannel_member_test.py index 8af90968e5..029aba1ed3 100644 --- a/tests/dump_tests/module_tests/portchannel_member_test.py +++ b/tests/dump_tests/module_tests/portchannel_member_test.py @@ -8,7 +8,7 @@ from dump.plugins.portchannel_member import Portchannel_Member from dump.match_infra import MatchEngine, ConnectionPool from swsscommon.swsscommon import SonicV2Connector - +from utilities_common.constants import DEFAULT_NAMESPACE # Location for dedicated db's used for UT module_tests_path = os.path.dirname(__file__) @@ -44,9 +44,7 @@ def match_engine(): # Initialize connection pool conn_pool = ConnectionPool() - DEF_NS = '' # Default Namespace - conn_pool.cache = {DEF_NS: {'conn': db, - 'connected_to': set(db_names)}} + conn_pool.fill(DEFAULT_NAMESPACE, db, db_names) # Initialize match_engine match_engine = MatchEngine(conn_pool) diff --git a/tests/dump_tests/module_tests/portchannel_test.py b/tests/dump_tests/module_tests/portchannel_test.py index 9ae2b54cd2..0512e50829 100644 --- a/tests/dump_tests/module_tests/portchannel_test.py +++ b/tests/dump_tests/module_tests/portchannel_test.py @@ -8,7 +8,7 @@ from dump.plugins.portchannel import Portchannel from dump.match_infra import MatchEngine, ConnectionPool from swsscommon.swsscommon import SonicV2Connector - +from utilities_common.constants import DEFAULT_NAMESPACE # Location for dedicated db's used for UT module_tests_path = os.path.dirname(__file__) @@ -44,9 +44,7 @@ def match_engine(): # Initialize connection pool conn_pool = ConnectionPool() - DEF_NS = '' # Default Namespace - conn_pool.cache = {DEF_NS: {'conn': db, - 'connected_to': set(db_names)}} + conn_pool.fill(DEFAULT_NAMESPACE, db, db_names) # Initialize match_engine match_engine = MatchEngine(conn_pool) diff --git a/tests/dump_tests/module_tests/route_test.py b/tests/dump_tests/module_tests/route_test.py index 22707593b0..118e6aeedc 100644 --- a/tests/dump_tests/module_tests/route_test.py +++ b/tests/dump_tests/module_tests/route_test.py @@ -8,7 +8,7 @@ from dump.plugins.route import Route from dump.match_infra import MatchEngine, ConnectionPool from swsscommon.swsscommon import SonicV2Connector - +from utilities_common.constants import DEFAULT_NAMESPACE # Location for dedicated db's used for UT module_tests_path = os.path.dirname(__file__) @@ -43,9 +43,7 @@ def match_engine(): # Initialize connection pool conn_pool = ConnectionPool() - DEF_NS = '' # Default Namespace - conn_pool.cache = {DEF_NS: {'conn': db, - 'connected_to': set(db_names)}} + conn_pool.fill(DEFAULT_NAMESPACE, db, db_names) # Initialize match_engine match_engine = MatchEngine(conn_pool) diff --git a/tests/dump_tests/module_tests/vlan_test.py b/tests/dump_tests/module_tests/vlan_test.py index c0451ca44a..5c991436ec 100644 --- a/tests/dump_tests/module_tests/vlan_test.py +++ b/tests/dump_tests/module_tests/vlan_test.py @@ -9,6 +9,7 @@ from dump.plugins.vlan_member import Vlan_Member from dump.match_infra import MatchEngine, ConnectionPool from swsscommon.swsscommon import SonicV2Connector +from utilities_common.constants import DEFAULT_NAMESPACE module_tests_path = os.path.dirname(__file__) dump_tests_path = os.path.join(module_tests_path, "../") @@ -50,9 +51,7 @@ def match_engine(): # Initialize connection pool conn_pool = ConnectionPool() - DEF_NS = '' # Default Namespace - conn_pool.cache = {DEF_NS: {'conn': db, - 'connected_to': set(db_names)}} + conn_pool.fill(DEFAULT_NAMESPACE, db, db_names) # Initialize match_engine match_engine = MatchEngine(conn_pool) diff --git a/tests/dump_tests/module_tests/vxlan_tunnel_map_test.py b/tests/dump_tests/module_tests/vxlan_tunnel_map_test.py index 157eb8f539..e98034f2dd 100644 --- a/tests/dump_tests/module_tests/vxlan_tunnel_map_test.py +++ b/tests/dump_tests/module_tests/vxlan_tunnel_map_test.py @@ -6,6 +6,7 @@ from dump.plugins.vxlan_tunnel_map import Vxlan_tunnel_map from dump.match_infra import MatchEngine, ConnectionPool from swsscommon.swsscommon import SonicV2Connector +from utilities_common.constants import DEFAULT_NAMESPACE # Location for dedicated db's used for UT module_tests_path = os.path.dirname(__file__) @@ -40,9 +41,7 @@ def match_engine(): # Initialize connection pool conn_pool = ConnectionPool() - DEF_NS = '' # Default Namespace - conn_pool.cache = {DEF_NS: {'conn': db, - 'connected_to': set(db_names)}} + conn_pool.fill(DEFAULT_NAMESPACE, db, db_names) # Initialize match_engine match_engine = MatchEngine(conn_pool) diff --git a/tests/dump_tests/module_tests/vxlan_tunnel_test.py b/tests/dump_tests/module_tests/vxlan_tunnel_test.py index a3c7f09a70..9ebdc8bf0a 100644 --- a/tests/dump_tests/module_tests/vxlan_tunnel_test.py +++ b/tests/dump_tests/module_tests/vxlan_tunnel_test.py @@ -6,6 +6,7 @@ from dump.plugins.vxlan_tunnel import Vxlan_tunnel from dump.match_infra import MatchEngine, ConnectionPool from swsscommon.swsscommon import SonicV2Connector +from utilities_common.constants import DEFAULT_NAMESPACE # Location for dedicated db's used for UT module_tests_path = os.path.dirname(__file__) @@ -40,9 +41,7 @@ def match_engine(): # Initialize connection pool conn_pool = ConnectionPool() - DEF_NS = '' # Default Namespace - conn_pool.cache = {DEF_NS: {'conn': db, - 'connected_to': set(db_names)}} + conn_pool.fill(DEFAULT_NAMESPACE, db, db_names) # Initialize match_engine match_engine = MatchEngine(conn_pool)