diff --git a/brian_dashboard_manager/grafana/provision.py b/brian_dashboard_manager/grafana/provision.py index aeab79ee54f7fd26d82c5298c43ce6c88c11872c..0259d48414956946ce6b2f990af970e0a195e1ed 100644 --- a/brian_dashboard_manager/grafana/provision.py +++ b/brian_dashboard_manager/grafana/provision.py @@ -33,13 +33,14 @@ from brian_dashboard_manager.templating.helpers import \ get_aggregate_dashboard_data, get_interface_data, \ get_nren_interface_data, get_dashboard_data, \ get_nren_dashboard_data, get_aggregate_interface_data, \ - get_nren_interface_data_old + get_nren_interface_data_old, get_re_peer_dashboard_data, get_re_peer_interface_data -from brian_dashboard_manager.templating.gws import generate_gws, \ - generate_indirect -from brian_dashboard_manager.templating.eumetsat \ - import generate_eumetsat_multicast -from brian_dashboard_manager.templating.render import render_dashboard +from brian_dashboard_manager.templating.gws import generate_gws, generate_indirect +from brian_dashboard_manager.templating.eumetsat import generate_eumetsat_multicast +from brian_dashboard_manager.templating.render import ( + render_complex_dashboard, + render_simple_dashboard, +) logger = logging.getLogger(__name__) @@ -214,10 +215,10 @@ def provision_folder(token_request, folder_name, dash, # dashboard should include error panels errors = dash.get('errors', False) - # needed for POL1-642 BETA - is_nren_beta = folder_name == 'NREN Access BETA' + is_nren_beta = folder_name == "NREN Access BETA" # needed for POL1-642 BETA + is_nren = folder_name == "NREN Access" + is_re_peer = folder_name == "RE Peer" - is_nren = folder_name == 'NREN Access' if is_nren: data = get_nren_interface_data_old(interfaces) dash_data = get_nren_dashboard_data(data, ds_name, tag) @@ -227,6 +228,9 @@ def provision_folder(token_request, folder_name, dash, data = get_nren_interface_data( services, interfaces, excluded_dashboards) dash_data = get_nren_dashboard_data(data, ds_name, tag) + elif is_re_peer: + data = get_re_peer_interface_data(interfaces) + dash_data = get_re_peer_dashboard_data(data, ds_name, tag) else: data = get_interface_data(interfaces) dash_data = get_dashboard_data( @@ -239,11 +243,13 @@ def provision_folder(token_request, folder_name, dash, with ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor: for dashboard in dash_data: - rendered = render_dashboard( - dashboard, nren=is_nren or is_nren_beta) - if rendered.get('title').lower() in excluded_dashboards: - executor.submit(delete_dashboard, token_request, - rendered, folder['id']) + if is_nren or is_nren_beta or is_re_peer: + rendered = render_complex_dashboard(**dashboard) + else: + rendered = render_simple_dashboard(**dashboard) + + if rendered.get("title").lower() in excluded_dashboards: + executor.submit(delete_dashboard, token_request, rendered, folder["id"]) continue provisioned.append(executor.submit(create_dashboard, token_request, rendered, folder['id'])) @@ -273,7 +279,7 @@ def provision_aggregate(token_request, folder, dashboard = get_aggregate_dashboard_data( f'Aggregate - {name}', data, ds_name, tag) - rendered = render_dashboard(dashboard) + rendered = render_simple_dashboard(**dashboard) return create_dashboard(token_request, rendered, folder['id']) @@ -460,7 +466,7 @@ def _provision_gws_indirect(config, org_config, ds_name, token): provisioned = [] dashes = generate_indirect(gws_indirect_data, ds_name) for dashboard in dashes: - rendered = render_dashboard(dashboard) + rendered = render_simple_dashboard(**dashboard) provisioned.append(executor.submit(create_dashboard, token, rendered, folder['id'])) @@ -495,7 +501,7 @@ def _provision_gws_direct(config, org_config, ds_name, token): provisioned = [] for dashboard in generate_gws(gws_data, ds_name): - rendered = render_dashboard(dashboard) + rendered = render_simple_dashboard(**dashboard) provisioned.append(executor.submit(create_dashboard, token, rendered, folder['id'])) @@ -530,7 +536,7 @@ def _provision_eumetsat_multicast(config, org_config, ds_name, token): for dashboard in generate_eumetsat_multicast( subscriptions, ds_name): - rendered = render_dashboard(dashboard) + rendered = render_simple_dashboard(**dashboard) provisioned.append( executor.submit( create_dashboard, diff --git a/brian_dashboard_manager/templating/helpers.py b/brian_dashboard_manager/templating/helpers.py index b4ccaceb9135dc056d099dfc1bf1ff7c8d31c143..31c47cf52313f91c65792d1aee8b030f3754e038 100644 --- a/brian_dashboard_manager/templating/helpers.py +++ b/brian_dashboard_manager/templating/helpers.py @@ -152,6 +152,70 @@ def get_nren_interface_data_old(interfaces): return result +def get_re_peer_interface_data(interfaces): + """ + Helper for grouping interfaces into groups of R&E Peers + See POL1-579 + Aggregate (AGGREGATES) all service interfaces (logical) (ipv4 only) + Services (SERVICES) contain all logical interfaces (both ipv4 and ipv6) + Interfaces (PHYSICAL) contain physical interfaces and LAGs (AGGREGATE) + """ + result = {} + + for interface in interfaces: + + description = interface['description'].strip() + interface_name = interface['name'] + host = interface['router'] + + router = host.replace('.geant.net', '') + location = host.split('.')[1].upper() + panel_title = f"{router} - {{}} - {interface_name} - {description}" + + dashboards_info = interface['dashboards_info'] + + for info in dashboards_info: + dashboard_name = info['name'] + + dashboard = result.get(dashboard_name, { + 'AGGREGATES': [], + 'SERVICES': [], + 'PHYSICAL': [] + }) + + if info['interface_type'] == 'AGGREGATE': + # link aggregates are shown under the physical dropdown + dashboard['PHYSICAL'].append({ + 'title': panel_title, + 'hostname': host, + 'interface': interface_name + }) + + elif info['interface_type'] == 'LOGICAL': + dashboard['AGGREGATES'].append({ + 'interface': interface_name, + 'hostname': host, + 'alias': + f"{location} - {dashboard_name} ({interface_name})" + }) + + dashboard['SERVICES'].append({ + 'title': panel_title, + 'hostname': host, + 'interface': interface_name, + 'has_v6': len(interface.get('ipv6', [])) > 0 + }) + elif info['interface_type'] == 'PHYSICAL': + dashboard['PHYSICAL'].append({ + 'title': panel_title, + 'hostname': host, + 'interface': interface_name + }) + + result[dashboard_name] = dashboard + return result + + def get_nren_interface_data(services, interfaces, excluded_dashboards): """ Helper for grouping interface data to be used for generating @@ -652,6 +716,100 @@ def get_nren_dashboard_data(data, datasource, tag): yield dash +def get_re_peer_dashboard_data_single(data, datasource, tag): + """ + Helper for generating dashboard definitions for a single R&E Peer. + + NREN dashboards have two aggregate panels (ingress and egress), + and two dropdown panels for services and interfaces. + + :param data: data for the dashboard, including the R&E Peer name and + the panel data + :param datasource: datasource to use for the panels + :param tag: tag to use for the dashboard, used for dashboard dropdowns on + the home dashboard. + + :return: dashboard definition for the R&E Peer dashboard + """ + + peer, dash = data + id_gen = num_generator() + + if len(dash['AGGREGATES']) > 0: + agg_panels = create_aggregate_panel( + f'Aggregate - {peer}', + gridPos_generator(id_gen, agg=True), + dash['AGGREGATES'], datasource) + gridPos = gridPos_generator(id_gen, start=2) + else: + gridPos = gridPos_generator(id_gen) + agg_panels = [] + + panel_gen = default_interface_panel_generator(gridPos, use_all_traffic=True, use_ipv6=True) + + services_dropdown = create_dropdown_panel('Services', **next(gridPos)) + + def sort_key(panel): + sort = panel.get('sort') + if not sort: + return 'ZZZ'+panel.get('hostname') # sort to end + return sort + + service_panels = panel_gen( + sorted(dash['SERVICES'], key=sort_key), datasource) + + iface_dropdown = create_dropdown_panel('Interfaces', **next(gridPos)) + phys_panels = panel_gen(dash['PHYSICAL'], datasource, True) + + dropdown_groups = [{ + 'dropdown': services_dropdown, + 'panels': service_panels, + }] + + dropdown_groups.append({ + 'dropdown': iface_dropdown, + 'panels': phys_panels, + }) + + result = { + 'nren_name': peer, + 'datasource': datasource, + 'aggregate_panels': agg_panels, + 'dropdown_groups': dropdown_groups + } + if isinstance(tag, list): + result['tags'] = tag + else: + result['tag'] = tag + + return result + + +def get_re_peer_dashboard_data(data, datasource, tag): + """ + Helper for generating dashboard definitions for all R&E Peers. + Uses multiprocessing to speed up generation. + + :param data: the names and the panel data for each R&E Peer + :param datasource: datasource to use for the panels + :param tag: tag to use for the dashboard, used for dashboard dropdowns on + the home dashboard. + + :return: generator for dashboard definitions for each R&E Peer + """ + + with ProcessPoolExecutor(max_workers=NUM_PROCESSES) as executor: + for dash in executor.map( + partial( + get_re_peer_dashboard_data_single, + datasource=datasource, + tag=tag), + data.items() + ): + + yield dash + + def get_dashboard_data_single( data, datasource, tag, panel_generator=default_interface_panel_generator, diff --git a/brian_dashboard_manager/templating/render.py b/brian_dashboard_manager/templating/render.py index e77573a2343c884a57b5fc3e55973c2b53bf9756..19b4cdfab4d7ac3d84fb0de47ad6d1d135f598ea 100644 --- a/brian_dashboard_manager/templating/render.py +++ b/brian_dashboard_manager/templating/render.py @@ -259,39 +259,23 @@ def create_panel( return result -def render_dashboard(dashboard, nren=False): - """ - Renders the dashboard template using the given data. - NREN dashboards are rendered using a different template that uses - a different layout than other dashboards. - - :param dashboard: data to be used in the template - :param nren: whether the dashboard is an NREN dashboard - - :return: rendered dashboard JSON - """ - - if nren: - return render_nren_dashboard(**dashboard) - else: - return render_standard_dashboard(**dashboard) +def create_infobox(): + return { + "datasource": None, + "gridPos": {"h": 1, "w": 24, "x": 0, "y": 0}, + "id": 1, + "options": {"content": "", "mode": "html"}, + "pluginVersion": "8.2.5", + "title": "INFO: The average values displayed are only mean values for timescales of 2 days or less", + "type": "text", + } -def render_nren_dashboard( +def render_complex_dashboard( nren_name, aggregate_panels, dropdown_groups, tag=None, tags=None, **_ ): assert tag or tags - panels = [ - { - "datasource": None, - "gridPos": {"h": 1, "w": 24, "x": 0, "y": 0}, - "id": 1, - "options": {"content": "", "mode": "html"}, - "pluginVersion": "8.2.5", - "title": "INFO: The average values displayed are only mean values for timescales of 2 days or less", - "type": "text", - } - ] + panels = [create_infobox()] panels.extend(aggregate_panels) for group in dropdown_groups: panels.append(group["dropdown"]) @@ -330,7 +314,7 @@ def render_nren_dashboard( } -def render_standard_dashboard(title, tag=None, tags=None, panels=None, **_): +def render_simple_dashboard(title, tag=None, tags=None, panels=None, **_): assert tag or tags return { "id": None, @@ -362,15 +346,7 @@ def render_standard_dashboard(title, tag=None, tags=None, panels=None, **_): "version": 1, "links": [], "panels": [ - { - "datasource": None, - "gridPos": {"h": 1, "w": 24, "x": 0, "y": 0}, - "id": 1, - "options": {"content": "", "mode": "html"}, - "pluginVersion": "8.2.5", - "title": "INFO: The average values displayed are only mean values for timescales of 2 days or less", - "type": "text", - }, + create_infobox(), *(panels or []), ], } diff --git a/test/conftest.py b/test/conftest.py index f071048336179bf35eac2005305bd7272cc9443f..dbc0f52723b405677db6d72a7fef7ecf827d9981 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -3,9 +3,10 @@ import datetime import itertools import json import os +import pathlib import re import string -import tempfile +import threading from brian_dashboard_manager import environment from brian_dashboard_manager.grafana.utils.request import TokenRequest import pytest @@ -59,21 +60,22 @@ def data_config(): } -def get_test_data(filename): - data_filename = os.path.join( - os.path.dirname(__file__), - 'data', - filename) - with open(data_filename) as f: - return json.loads(f.read()) +DATA_DIR = pathlib.Path(__file__).parent / "data" @pytest.fixture -def data_config_filename(data_config): - with tempfile.NamedTemporaryFile() as f: - f.write(json.dumps(data_config).encode('utf-8')) - f.flush() - yield f.name +def get_test_data(): + def _get_test_data(filename): + return json.loads((DATA_DIR / filename).read_text()) + + return _get_test_data + + +@pytest.fixture +def data_config_filename(data_config, tmp_path): + file = tmp_path / "data_config.json" + file.write_text(json.dumps(data_config)) + return str(file) @pytest.fixture @@ -92,10 +94,20 @@ def mock_grafana(data_config): for result in itertools.permutations(string.ascii_lowercase, 8) ) + lock = threading.RLock() + + def synchronized(fun): + def _sync(*args, **kwargs): + with lock: + return fun(*args, **kwargs) + + return _sync + class MockGrafana: def __init__(self) -> None: self.folders = {} self.dashboards = {} + self.dashboards_by_folder_uid = {} self.datasources = {} self.organizations = {} self.api_tokens = {} @@ -103,9 +115,11 @@ def mock_grafana(data_config): self.ids = itertools.count(start=1) self.request = TokenRequest(data_config["hostname"], token="abc") + @synchronized def list_api_tokens(self): return list(copy.copy(val) for val in self.api_tokens.values()) + @synchronized def create_api_token(self, payload): lifetime = payload.get("secondsToLive") return self._create_object( @@ -123,12 +137,15 @@ def mock_grafana(data_config): self.api_tokens, ) + @synchronized def delete_api_token(self, uid): return self._delete_object(uid, self.api_tokens, name="api token") + @synchronized def list_organizations(self): return list(copy.copy(val) for val in self.organizations.values()) + @synchronized def create_organization(self, organization): organization = { **organization, @@ -141,27 +158,35 @@ def mock_grafana(data_config): "message": "Organization created", } + @synchronized def delete_organization(self, uid): return self._delete_object(uid, self.organizations, name="organization") + @synchronized def list_datasources(self): return list(copy.copy(val) for val in self.datasources.values()) + @synchronized def create_datasource(self, datasource): return self._create_object(datasource, self.datasources) + @synchronized def delete_datasource(self, uid): return self._delete_object(uid, self.datasources, name="datasource") + @synchronized def list_folders(self): return list(self.folders.values()) + @synchronized def create_folder(self, folder): return self._create_object(folder, self.folders) + @synchronized def delete_folder(self, uid): return self._delete_object(uid, self.folders, name="folder") + @synchronized def list_dashboards(self, title=None, folder_id=None): return [ copy.copy(db) @@ -170,11 +195,31 @@ def mock_grafana(data_config): and (folder_id is None or db.get("folderId") == folder_id) ] - def create_dashboard(self, dashboard): - return self._create_object(dashboard, self.dashboards) - + @synchronized + def create_dashboard(self, dashboard, folder_id=None): + result = self._create_object(dashboard, self.dashboards) + folder_uid = next( + iter(f["uid"] for f in self.folders.values() if f["id"] == folder_id), + None, + ) + for idx, db in enumerate(self.dashboards_by_folder_uid.get(folder_uid, [])): + if db["uid"] == result["uid"]: + self.dashboards_by_folder_uid[folder_uid].pop(idx) + self.dashboards_by_folder_uid[folder_uid].insert(idx, result) + break + else: + self.dashboards_by_folder_uid.setdefault(folder_uid, []).append(result) + return result + + @synchronized def delete_dashboard(self, uid): - return self._delete_object(uid, self.dashboards, name="dashboard") + result = self._delete_object(uid, self.dashboards, name="dashboard") + for dashboards in self.dashboards_by_folder_uid.values(): + for idx, db in enumerate(dashboards): + if db["uid"] == result["uid"]: + dashboards.pop(idx) + break + return result def _create_object(self, obj, all_objects): id = obj.get("id") @@ -329,7 +374,6 @@ def mock_grafana(data_config): @json_request_cb def search_dashboard_callback(_, request): query = request.params - print(query) return ( 200, grafana.list_dashboards(query.get("title"), query.get("folderIds")), @@ -344,8 +388,9 @@ def mock_grafana(data_config): @json_request_cb def create_dashboard_callback(payload, request): dashboard = payload["dashboard"] + folder_id = payload.get("folderId") - return (200, grafana.create_dashboard(dashboard)) + return (200, grafana.create_dashboard(dashboard, folder_id=folder_id)) responses.add_callback( method=responses.POST, diff --git a/test/test_gws_direct.py b/test/test_gws_direct.py index 0a263c0f33a2c07ae2ec3fcd4c98020c42c1ca78..c8712279c937f760e2b9724a225f2abc04d2a36d 100644 --- a/test/test_gws_direct.py +++ b/test/test_gws_direct.py @@ -1,22 +1,17 @@ import responses -from test.conftest import get_test_data from brian_dashboard_manager.templating.gws import generate_gws -from brian_dashboard_manager.inventory_provider.interfaces import \ - get_gws_direct - -TEST_DATA = get_test_data('gws-direct-data.json') +from brian_dashboard_manager.inventory_provider.interfaces import get_gws_direct @responses.activate -def test_gws(data_config, client): - +def test_gws(data_config, get_test_data): responses.add( method=responses.GET, url=f"{data_config['inventory_provider']}/poller/gws/direct", - json=TEST_DATA) + json=get_test_data("gws-direct-data.json"), + ) gws_data = get_gws_direct(data_config['inventory_provider']) - dashboards = list(generate_gws(gws_data, 'testdatasource')) assert len(dashboards) == 4 diff --git a/test/test_helpers.py b/test/test_helpers.py new file mode 100644 index 0000000000000000000000000000000000000000..05c96559636c3aa0f1455ee0dbb94a9a69288b41 --- /dev/null +++ b/test/test_helpers.py @@ -0,0 +1,70 @@ +from brian_dashboard_manager.templating.helpers import get_re_peer_interface_data +import pytest + + +@pytest.mark.parametrize( + "interface_type, expected", + [ + ( + "LOGICAL", + { + "AGGREGATES": [ + { + "interface": "xe-0/0/0.1", + "hostname": "mx1.dub2.ie.geant.net", + "alias": "DUB2 - ESNET (xe-0/0/0.1)", + } + ], + "SERVICES": [ + { + "title": "mx1.dub2.ie - {} - xe-0/0/0.1 - description", + "interface": "xe-0/0/0.1", + "hostname": "mx1.dub2.ie.geant.net", + "has_v6": True, + } + ], + }, + ), + ( + "PHYSICAL", + { + "PHYSICAL": [ + { + "title": "mx1.dub2.ie - {} - xe-0/0/0.1 - description", + "interface": "xe-0/0/0.1", + "hostname": "mx1.dub2.ie.geant.net", + } + ], + }, + ), + ( + "AGGREGATE", + { + "PHYSICAL": [ + { + "title": "mx1.dub2.ie - {} - xe-0/0/0.1 - description", + "interface": "xe-0/0/0.1", + "hostname": "mx1.dub2.ie.geant.net", + } + ], + }, + ), + ], +) +def test_re_peer_interface_data(interface_type, expected): + interfaces = [ + { + "router": "mx1.dub2.ie.geant.net", + "name": "xe-0/0/0.1", + "description": "description", + "dashboards": ["RE_PEER"], + "dashboard_info": {"name": "ESNET", "interface_type": interface_type}, + "dashboards_info": [{"name": "ESNET", "interface_type": interface_type}], + "ipv4": ["1.1.1.1"], + "ipv6": ["::2"], + }, + ] + + assert get_re_peer_interface_data(interfaces) == { + "ESNET": {"AGGREGATES": [], "SERVICES": [], "PHYSICAL": [], **expected} + } diff --git a/test/test_update.py b/test/test_update.py index 88ce867c3b7528363dede0c5ab378d67962844bd..e407e600de890cfc03178033a42fdb6df6e1486e 100644 --- a/test/test_update.py +++ b/test/test_update.py @@ -2,7 +2,6 @@ import pytest import responses from brian_dashboard_manager.grafana.provision import provision_folder, provision -from test.conftest import get_test_data TEST_INTERFACES = [ { @@ -601,6 +600,32 @@ def generate_folder(data): } +@pytest.fixture +def reporting_provider(get_test_data, data_config): + responses.add( + method=responses.GET, + url=f"{data_config['reporting_provider']}/scid/current", + json=get_test_data("services.json"), + ) + + +@pytest.fixture +def populate_inventory(data_config): + """function-fixture for provisioning inventory provider. Call it with a dictionary + {url_path: contents}. ie {"/poller/interfaces": [...]} + """ + + def _populate(contents_dict): + for path, contents in contents_dict.items(): + responses.add( + method=responses.GET, + url=f"{data_config['inventory_provider']}{path}", + json=contents, + ) + + return _populate + + @responses.activate @pytest.mark.parametrize( "folder_name, excluded_nrens, expected_nrens", @@ -621,7 +646,13 @@ def generate_folder(data): ], ) def test_provision_nren_folder( - folder_name, excluded_nrens, expected_nrens, data_config, mock_grafana + folder_name, + excluded_nrens, + expected_nrens, + data_config, + mock_grafana, + reporting_provider, + populate_inventory, ): dashboards = { "NREN": { @@ -639,10 +670,12 @@ def test_provision_nren_folder( ], }, } - responses.add( - method=responses.GET, - url=f"{data_config['reporting_provider']}/scid/current", - json=get_test_data("services.json"), + populate_inventory( + { + "/poller/interfaces": NREN_INTERFACES, + "/data/interfaces": NREN_INTERFACES, + "/poller/eumetsat-multicast": EUMETSAT_MULTICAST, + } ) result = provision_folder( @@ -663,28 +696,9 @@ def test_provision_nren_folder( @responses.activate -def test_provision(data_config, mocker, mock_grafana): - - responses.add( - method=responses.GET, - url=f"{data_config['reporting_provider']}/scid/current", - json=get_test_data('services.json')) - - responses.add( - method=responses.GET, - url=f"{data_config['inventory_provider']}/poller/interfaces", - json=NREN_INTERFACES) - - responses.add( - method=responses.GET, - url=f"{data_config['inventory_provider']}/data/interfaces", - json=NREN_INTERFACES) - - responses.add( - method=responses.GET, - url=f'{data_config["inventory_provider"]}/poller/eumetsat-multicast', - json=EUMETSAT_MULTICAST) - +def test_provision( + data_config, mocker, mock_grafana, reporting_provider, populate_inventory +): mock_grafana.create_datasource( { "name": "brian-influx-datasource", @@ -697,12 +711,17 @@ def test_provision(data_config, mocker, mock_grafana): "readOnly": False, } ) - + populate_inventory( + { + "/poller/interfaces": NREN_INTERFACES, + "/data/interfaces": NREN_INTERFACES, + "/poller/eumetsat-multicast": EUMETSAT_MULTICAST, + } + ) for org in data_config["organizations"][1:]: mock_grafana.create_organization(org) - _mocked_get_dashboard_definitions = mocker.patch( - 'brian_dashboard_manager.grafana.provision.get_dashboard_definitions') + mock_grafana.create_dashboard({"title": "testdashboard", "version": 1}) _mocked_gws = mocker.patch( 'brian_dashboard_manager.grafana.provision.get_gws_direct') @@ -712,13 +731,54 @@ def test_provision(data_config, mocker, mock_grafana): 'brian_dashboard_manager.grafana.provision.get_gws_indirect') _mocked_gws_indirect.return_value = [] - UID = 1 - ID = 1 - VERSION = 1 - TITLE = 'testdashboard' - dashboard = {'id': ID, 'uid': UID, 'title': TITLE, 'version': VERSION} - _mocked_get_dashboard_definitions.return_value = [ - dashboard # test dashboard + provision(data_config, raise_exceptions=True) + + +@responses.activate +def test_provision_re_peer_dashboard( + mocker, data_config, mock_grafana, reporting_provider, populate_inventory +): + interfaces = [ + { + "router": "mx1.dub2.ie.geant.net", + "name": "xe-0/0/0.1", + "description": "PHY SVC P_AE10 SRF9948758 | HEANET-AP2-LL3", # noqa: E501 + "dashboards": ["RE_PEER"], + "dashboard_info": {"name": "ESNET", "interface_type": "LOGICAL"}, + "dashboards_info": [{"name": "ESNET", "interface_type": "LOGICAL"}], + "ipv4": ["1.1.1.1"], + "ipv6": ["::2"], + }, ] + populate_inventory( + { + "/poller/interfaces": interfaces, + "/data/interfaces": interfaces, + "/poller/eumetsat-multicast": EUMETSAT_MULTICAST, + } + ) + _mocked_gws = mocker.patch( + "brian_dashboard_manager.grafana.provision.get_gws_direct" + ) + _mocked_gws.return_value = [] + _mocked_gws_indirect = mocker.patch( + "brian_dashboard_manager.grafana.provision.get_gws_indirect" + ) + _mocked_gws_indirect.return_value = [] + data_config["organizations"] = [ + {"name": "Testorg1", "excluded_nrens": ["GEANT"], "excluded_dashboards": []}, + ] provision(data_config, raise_exceptions=True) + folder_uid = "RE_Peer" + assert len(mock_grafana.dashboards_by_folder_uid[folder_uid]) == 1 + panels = mock_grafana.dashboards_by_folder_uid[folder_uid][0]["panels"] + expected_types = ["text", "graph", "graph", "row", "graph", "graph", "row"] + assert [p["type"] for p in panels] == expected_types + assert "INFO" in panels[0]["title"] + assert "ingress" in panels[1]["title"] + assert "egress" in panels[2]["title"] + assert "Services" in panels[3]["title"] + assert "traffic" in panels[4]["title"] + assert "IPv6" in panels[5]["title"] + assert "Interfaces" in panels[6]["title"]