"brian_polling_manager/interface_stats/nokia.py" did not exist on "6bb115f52088ffdf37777649d5a48ada2ce46bed"
Select Git revision
svg_replace.js
conftest.py 1.85 KiB
import json
import pathlib
from brian_polling_manager.interface_stats.cli import set_app_params
import pytest
DATA_DIR = pathlib.Path(__file__).parent / "data"
JUNIPER_DATA_FILENAME_EXTENSION = "-interface-info.xml"
JUNIPER_ROUTERS = [
path.name[: -len(JUNIPER_DATA_FILENAME_EXTENSION)]
for path in DATA_DIR.iterdir()
if path.name.endswith(JUNIPER_DATA_FILENAME_EXTENSION)
]
NOKIA_ROUTERS = list({
path.name[: -len(suffix)]
for suffix in {"-ports.xml", "-lags.xml"}
for path in DATA_DIR.iterdir()
if path.name.endswith(suffix)
})
@pytest.fixture
def data_dir():
return DATA_DIR
@pytest.fixture(autouse=True)
def app_params():
"""
ER: I think this is a smell, putting special-purpose code
in the production release that runs iff. a condition path
is not taken that runs in test
mocking isn't an anti-pattern, and "explicit is better than implicit"
"""
params = {
"testing": {
"dry_run": True,
"no-out": False,
"netconf-source-dir": DATA_DIR,
}
}
set_app_params(params)
yield params
set_app_params({})
def poller_interfaces():
file = DATA_DIR / "poller-interfaces.json"
return json.loads(file.read_text())
@pytest.fixture(scope="session")
def polled_interfaces():
polled = {}
for ifc in poller_interfaces():
if ifc["dashboards"]:
polled.setdefault(ifc["router"], set()).add(ifc["name"])
return polled
@pytest.fixture
def all_juniper_routers():
return JUNIPER_ROUTERS
@pytest.fixture
def all_nokia_routers():
return NOKIA_ROUTERS
@pytest.fixture(params=JUNIPER_ROUTERS)
def juniper_router_fqdn(request):
return request.param
@pytest.fixture(params=NOKIA_ROUTERS)
def nokia_router_fqdn(request):
return request.param
@pytest.fixture()
def single_router_fqdn():
return JUNIPER_ROUTERS[0]