diff --git a/MANIFEST.in b/MANIFEST.in index c9ad86290416cde3e77c618bb77bc574c5482b09..199eef4d0314ba203287f5e6ef266505bfaebadb 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,3 +1,4 @@ include brian_dashboard_manager/logging_default_config.json include brian_dashboard_manager/dashboards/* -include brian_dashboard_manager/datasources/* \ No newline at end of file +include brian_dashboard_manager/datasources/* +recursive-include brian_dashboard_manager/templating/templates * \ No newline at end of file diff --git a/brian_dashboard_manager/grafana/dashboard.py b/brian_dashboard_manager/grafana/dashboard.py index bac998e7b23299eec91ff8d3c81b0cd66355dc85..19c1fee56a16c75aa97aca48bb6bdbec43fc50c7 100644 --- a/brian_dashboard_manager/grafana/dashboard.py +++ b/brian_dashboard_manager/grafana/dashboard.py @@ -122,8 +122,8 @@ def create_dashboard(request: TokenRequest, dashboard: Dict, folder_id=None): payload['folderId'] = folder_id try: - action = "Updating" if existing_dashboard else "Creating" - logger.info(f'{action} dashboard: {title}') + # action = "Updating" if existing_dashboard else "Creating" + # logger.info(f'{action} dashboard: {title}') r = request.post('api/dashboards/db', json=payload) return r except HTTPError: diff --git a/brian_dashboard_manager/grafana/folder.py b/brian_dashboard_manager/grafana/folder.py index 181ea8e3171ad2863e7bbe13bf364496c942ae41..fe3700f7fdde128088bd469ebcb9a08a9aaba50d 100644 --- a/brian_dashboard_manager/grafana/folder.py +++ b/brian_dashboard_manager/grafana/folder.py @@ -6,6 +6,20 @@ from brian_dashboard_manager.grafana.utils.request import TokenRequest logger = logging.getLogger(__name__) +def find_folder(token_request, title): + folders = get_folders(token_request) + try: + folder = next( + f for f in folders if f['title'].lower() == title.lower()) + except StopIteration: + folder = None + + if not folder: + logger.info(f'Created folder: {title}') + folder = create_folder(token_request, title) + return folder + + def get_folders(request: TokenRequest): return request.get('api/folders') @@ -14,9 +28,7 @@ def create_folder(request: TokenRequest, title): try: data = {'title': title, 'uid': title.replace(' ', '_')} r = request.post('api/folders', json=data) - except HTTPError as e: - message = e.content.get("message", "") - logger.exception( - f'Error when creating folder {title} ({message})') + except HTTPError: + logger.exception(f'Error when creating folder {title}') return None return r diff --git a/brian_dashboard_manager/grafana/provision.py b/brian_dashboard_manager/grafana/provision.py index bafbbd1c79cbb73e5679ab8b55b97adbb8e61c2b..2a69308e49df7abce49541b078506853b3e1afb0 100644 --- a/brian_dashboard_manager/grafana/provision.py +++ b/brian_dashboard_manager/grafana/provision.py @@ -1,4 +1,6 @@ import logging +import time +from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor from brian_dashboard_manager.config import DEFAULT_ORGANIZATIONS from brian_dashboard_manager.grafana.utils.request import \ AdminRequest, \ @@ -10,8 +12,7 @@ from brian_dashboard_manager.grafana.dashboard import \ get_dashboard_definitions, create_dashboard, find_dashboard from brian_dashboard_manager.grafana.datasource import \ check_provisioned, create_datasource -from brian_dashboard_manager.grafana.folder import \ - get_folders, create_folder +from brian_dashboard_manager.grafana.folder import find_folder from brian_dashboard_manager.inventory_provider.interfaces import \ get_interfaces from brian_dashboard_manager.templating.nren_access import generate_nrens @@ -28,6 +29,38 @@ from brian_dashboard_manager.templating.render import render_dashboard logger = logging.getLogger(__name__) +def generate_all_nrens(token_request, nrens, folder_id, datasource_name): + with ThreadPoolExecutor(max_workers=12) as executor: + for dashboard in generate_nrens(nrens, datasource_name): + executor.submit(create_dashboard, token_request, + dashboard, folder_id) + + +def provision_folder(token_request, folder_name, + dash, excluded_interfaces, datasource_name): + + folder = find_folder(token_request, folder_name) + + predicate = dash['predicate'] + tag = dash['tag'] + + # dashboard will include error panel + errors = dash.get('errors') + + # custom parsing function for description to dashboard name + parse_func = dash.get('parse_func') + + relevant_interfaces = filter(predicate, excluded_interfaces) + data = get_interface_data(relevant_interfaces, parse_func) + dash_data = get_dashboard_data(data, datasource_name, tag, errors) + + with ThreadPoolExecutor(max_workers=12) as executor: + for dashboard in dash_data: + rendered = render_dashboard(dashboard) + executor.submit(create_dashboard, token_request, + rendered, folder['id']) + + def provision(config): request = AdminRequest(**config) @@ -43,147 +76,156 @@ def provision(config): all_orgs.append(org_data) interfaces = get_interfaces(config['inventory_provider']) - for org in all_orgs: - org_id = org['id'] - delete_expired_api_tokens(request, org_id) - token = create_api_token(request, org_id) - token_request = TokenRequest(token=token['key'], **config) + tokens = [] - folders = get_folders(token_request) + start = time.time() - def find_folder(title): + with ProcessPoolExecutor(max_workers=4) as org_executor, \ + ThreadPoolExecutor(max_workers=12) as thread_executor: + for org in all_orgs: + org_id = org['id'] + delete_expired_api_tokens(request, org_id) + token = create_api_token(request, org_id) + token_request = TokenRequest(token=token['key'], **config) + tokens.append((org_id, token['id'])) + + logger.info( + f'--- Provisioning org {org["name"]} (ID #{org_id}) ---') try: - folder = next( - f for f in folders if f['title'].lower() == title.lower()) + org_config = next( + o for o in orgs_to_provision if o['name'] == org['name']) except StopIteration: - folder = None - - if not folder: - logger.info(f'Created folder: {title}') - folder = create_folder(token_request, title) - folders.append(folder) - return folder - - logger.info( - f'--- Provisioning org {org["name"]} (ID #{org_id}) ---') - - try: - org_config = next( - o for o in orgs_to_provision if o['name'] == org['name']) - except StopIteration: - org_config = None - - if not org_config: - logger.error( - f'Org {org["name"]} does not have valid configuration.') - org['info'] = 'Org exists in grafana but is not configured' - continue - - # Only provision influxdb datasource for now - datasource = config.get('datasources').get('influxdb') - - # Provision missing data sources - if not check_provisioned(token_request, datasource): - ds = create_datasource(token_request, - datasource, - config.get('datasources')) - if ds: - logger.info( - f'Provisioned datasource: {datasource["name"]}') - - excluded_nrens = org_config.get('excluded_nrens', []) - excluded_nrens = list(map(lambda f: f.lower(), excluded_nrens)) - - def excluded(interface): - desc = interface.get('description', '').lower() - return not any(nren.lower() in desc for nren in excluded_nrens) - - excluded_interfaces = list(filter(excluded, interfaces)) - - dashboards = { - 'CLS': {'predicate': is_cls, 'tag': 'CLS'}, - 'RE PEER': {'predicate': is_re_peer, 'tag': 'RE_PEER'}, - 'RE CUST': {'predicate': is_re_customer, 'tag': 'RE_CUST'}, - 'GEANTOPEN': {'predicate': is_geantopen, 'tag': 'GEANTOPEN'}, - 'GCS': {'predicate': is_gcs, 'tag': 'AUTOMATED_L2_CIRCUITS'}, - 'L2 CIRCUIT': {'predicate': is_l2circuit, 'tag': 'L2_CIRCUITS'}, - 'LHCONE PEER': {'predicate': is_lhcone_peer, 'tag': 'LHCONE_PEER'}, - 'LHCONE CUST': { - 'predicate': is_lhcone_customer, - 'tag': 'LHCONE_CUST' - }, - 'MDVPN Customers': {'predicate': is_mdvpn, 'tag': 'MDVPN'}, - 'Infrastructure Backbone': { - 'predicate': is_lag_backbone, - 'tag': 'BACKBONE', - 'errors': True, - 'parse_func': parse_backbone_name - }, - 'IAS PRIVATE': {'predicate': is_ias_private, 'tag': 'IAS_PRIVATE'}, - 'IAS PUBLIC': {'predicate': is_ias_public, 'tag': 'IAS_PUBLIC'}, - 'IAS CUSTOMER': { - 'predicate': is_ias_customer, - 'tag': 'IAS_CUSTOMER' - }, - 'IAS UPSTREAM': { - 'predicate': is_ias_upstream, - 'tag': 'IAS_UPSTREAM' - }, - 'GWS PHY Upstream': { - 'predicate': is_phy_upstream, - 'tag': 'GWS_UPSTREAM', - 'errors': True, - 'parse_func': parse_phy_upstream_name + org_config = None + + if not org_config: + logger.error( + f'Org {org["name"]} does not have valid configuration.') + org['info'] = 'Org exists in grafana but is not configured' + continue + + # Only provision influxdb datasource for now + datasource = config.get('datasources').get('influxdb') + + # Provision missing data sources + if not check_provisioned(token_request, datasource): + ds = create_datasource(token_request, + datasource, + config.get('datasources')) + if ds: + logger.info( + f'Provisioned datasource: {datasource["name"]}') + + excluded_nrens = org_config.get('excluded_nrens', []) + excluded_nrens = list(map(lambda f: f.lower(), excluded_nrens)) + + def excluded(interface): + desc = interface.get('description', '').lower() + return not any(nren.lower() in desc for nren in excluded_nrens) + + excluded_interfaces = list(filter(excluded, interfaces)) + + dashboards = { + 'CLS': { + 'predicate': is_cls, + 'tag': 'CLS' + }, + 'RE PEER': { + 'predicate': is_re_peer, + 'tag': 'RE_PEER' + }, + 'RE CUST': { + 'predicate': is_re_customer, + 'tag': 'RE_CUST' + }, + 'GEANTOPEN': { + 'predicate': is_geantopen, + 'tag': 'GEANTOPEN' + }, + 'GCS': { + 'predicate': is_gcs, + 'tag': 'AUTOMATED_L2_CIRCUITS' + }, + 'L2 CIRCUIT': { + 'predicate': is_l2circuit, + 'tag': 'L2_CIRCUITS' + }, + 'LHCONE PEER': { + 'predicate': is_lhcone_peer, + 'tag': 'LHCONE_PEER' + }, + 'LHCONE CUST': { + 'predicate': is_lhcone_customer, + 'tag': 'LHCONE_CUST' + }, + 'MDVPN Customers': { + 'predicate': is_mdvpn, + 'tag': 'MDVPN' + }, + 'Infrastructure Backbone': { + 'predicate': is_lag_backbone, + 'tag': 'BACKBONE', + 'errors': True, + 'parse_func': parse_backbone_name + }, + 'IAS PRIVATE': { + 'predicate': is_ias_private, + 'tag': 'IAS_PRIVATE' + }, + 'IAS PUBLIC': { + 'predicate': is_ias_public, + 'tag': 'IAS_PUBLIC' + }, + 'IAS CUSTOMER': { + 'predicate': is_ias_customer, + 'tag': 'IAS_CUSTOMER' + }, + 'IAS UPSTREAM': { + 'predicate': is_ias_upstream, + 'tag': 'IAS_UPSTREAM' + }, + 'GWS PHY Upstream': { + 'predicate': is_phy_upstream, + 'tag': 'GWS_UPSTREAM', + 'errors': True, + 'parse_func': parse_phy_upstream_name + } } - - } - # Provision dashboards, overwriting existing ones. - - datasource_name = datasource.get('name', 'PollerInfluxDB') - for folder_name, dash in dashboards.items(): - folder = find_folder(folder_name) - predicate = dash['predicate'] - tag = dash['tag'] - - # dashboard will include error panel - errors = dash.get('errors') - - # custom parsing function for description to dashboard name - parse_func = dash.get('parse_func') - - logger.info(f'Provisioning {folder_name} dashboards') - - relevant_interfaces = filter(predicate, excluded_interfaces) - data = get_interface_data(relevant_interfaces, parse_func) - dash_data = get_dashboard_data(data, datasource_name, tag, errors) - for dashboard in dash_data: - rendered = render_dashboard(dashboard) - create_dashboard(token_request, rendered, folder['id']) - - # NREN Access dashboards - # uses a different template than the above. - logger.info('Provisioning NREN Access dashboards') - folder = find_folder('NREN Access') - nrens = filter(is_nren, excluded_interfaces) - for dashboard in generate_nrens(nrens, datasource_name): - create_dashboard(token_request, dashboard, folder['id']) - - # Non-generated dashboards - excluded_dashboards = org_config.get('excluded_dashboards', []) - logger.info('Provisioning static dashboards') - for dashboard in get_dashboard_definitions(): - if dashboard['title'] not in excluded_dashboards: - if dashboard['title'].lower() == 'home': - dashboard['uid'] = 'home' - create_dashboard(token_request, dashboard) - - # Home dashboard is always called "Home" - # Make sure it's set for the organization - home_dashboard = find_dashboard(token_request, 'Home') - if home_dashboard: - set_home_dashboard(token_request, home_dashboard['id']) - - delete_api_token(request, org_id, token['id']) + # Provision dashboards, overwriting existing ones. + datasource_name = datasource.get('name', 'PollerInfluxDB') + for folder_name, dash in dashboards.items(): + logger.info( + f'Provisioning {org["name"]}/{folder_name} dashboards') + org_executor.submit(provision_folder, token_request, + folder_name, dash, + excluded_interfaces, datasource_name) + + # NREN Access dashboards + # uses a different template than the above. + logger.info('Provisioning NREN Access dashboards') + folder = find_folder(token_request, 'NREN Access') + nrens = filter(is_nren, excluded_interfaces) + org_executor.submit(generate_all_nrens, token_request, + nrens, folder['id'], datasource_name) + + # Non-generated dashboards + excluded_dashboards = org_config.get('excluded_dashboards', []) + logger.info('Provisioning static dashboards') + for dashboard in get_dashboard_definitions(): + if dashboard['title'] not in excluded_dashboards: + if dashboard['title'].lower() == 'home': + dashboard['uid'] = 'home' + thread_executor.submit( + create_dashboard, token_request, dashboard) + + # Home dashboard is always called "Home" + # Make sure it's set for the organization + home_dashboard = find_dashboard(token_request, 'Home') + if home_dashboard: + set_home_dashboard(token_request, home_dashboard['id']) + + logger.info(f'Time to complete: {time.time() - start}') + for org_id, token in tokens: + delete_api_token(request, org_id, token) return all_orgs diff --git a/brian_dashboard_manager/grafana/utils/request.py b/brian_dashboard_manager/grafana/utils/request.py index 0d128cb5c6c2577e30e6b2732860ed73642f0190..d55dc878a3dc4057e4e612417790a4698e620477 100644 --- a/brian_dashboard_manager/grafana/utils/request.py +++ b/brian_dashboard_manager/grafana/utils/request.py @@ -1,5 +1,4 @@ import requests -import json class Request(object): @@ -20,10 +19,7 @@ class Request(object): **kwargs ) r.raise_for_status() - try: - return r.json() - except json.JSONDecodeError: - return None + return r.json() def post(self, endpoint: str, headers=None, **kwargs): @@ -33,10 +29,8 @@ class Request(object): **kwargs ) r.raise_for_status() - try: - return r.json() - except json.JSONDecodeError: - return None + + return r.json() def put(self, endpoint: str, headers=None, **kwargs): @@ -46,10 +40,7 @@ class Request(object): **kwargs ) r.raise_for_status() - try: - return r.json() - except json.JSONDecodeError: - return None + return r.json() def delete(self, endpoint: str, headers=None, **kwargs): @@ -59,10 +50,8 @@ class Request(object): **kwargs ) r.raise_for_status() - try: - return r.json() - except json.JSONDecodeError: - return None + + return r.json() class AdminRequest(Request): diff --git a/brian_dashboard_manager/inventory_provider/interfaces.py b/brian_dashboard_manager/inventory_provider/interfaces.py index 8368d0c02f29d08cc0c4c6e21cba1aed318b4659..a6cbbcc0898919aa79634004807bc56e46c1e23d 100644 --- a/brian_dashboard_manager/inventory_provider/interfaces.py +++ b/brian_dashboard_manager/inventory_provider/interfaces.py @@ -5,6 +5,6 @@ logger = logging.getLogger(__name__) def get_interfaces(host): - r = requests.get(f'http://{host}/poller/interfaces') + r = requests.get(f'{host}/poller/interfaces') r.raise_for_status() return r.json() diff --git a/brian_dashboard_manager/routes/update.py b/brian_dashboard_manager/routes/update.py index eb7c2d5b935813c505c42b4d03688d320d5bb3b6..681c8679fd366fef2ecd856a695aca24d39a207a 100644 --- a/brian_dashboard_manager/routes/update.py +++ b/brian_dashboard_manager/routes/update.py @@ -1,3 +1,4 @@ +from concurrent.futures import ThreadPoolExecutor from flask import Blueprint, current_app from brian_dashboard_manager.routes import common from brian_dashboard_manager.grafana.provision import provision @@ -13,5 +14,6 @@ def after_request(resp): @routes.route('/', methods=['GET']) def update(): - success = provision(current_app.config[CONFIG_KEY]) - return {'data': success} + executor = ThreadPoolExecutor(max_workers=1) + executor.submit(provision, current_app.config[CONFIG_KEY]) + return {'data': {'message': 'Provisioning dashboards!'}} diff --git a/brian_dashboard_manager/templating/helpers.py b/brian_dashboard_manager/templating/helpers.py index f808f8be95c4d177f3b152ade487ae8800935f55..5d878abf60f54e313e2de21b6001d404fffee049 100644 --- a/brian_dashboard_manager/templating/helpers.py +++ b/brian_dashboard_manager/templating/helpers.py @@ -1,4 +1,6 @@ import re +import logging +from multiprocessing.pool import Pool from itertools import product from string import ascii_uppercase from brian_dashboard_manager.templating.render import create_panel @@ -6,6 +8,8 @@ from brian_dashboard_manager.templating.render import create_panel PANEL_HEIGHT = 12 PANEL_WIDTH = 24 +logger = logging.getLogger(__file__) + def get_description(interface): return interface.get('description', '').strip() @@ -217,26 +221,36 @@ def get_panel_fields(panel, panel_type, datasource): }) +def get_panel_definitions(panel, datasource, errors=False): + result = [] + + result.append(get_panel_fields( + panel, 'traffic', datasource)) + result.append(get_panel_fields( + panel, 'IPv6', datasource)) + if errors: + result.append(get_panel_fields( + panel, 'errors', datasource)) + return result + + +def flatten(t): return [item for sublist in t for item in sublist] + + def get_dashboard_data(data, datasource, tag, errors=False): id_gen = num_generator() gridPos = gridPos_generator(id_gen) - - def get_panel_definitions(panels, datasource): - result = [] - for panel in panels: - result.append(get_panel_fields( - {**panel, **next(gridPos)}, 'traffic', datasource)) - result.append(get_panel_fields( - {**panel, **next(gridPos)}, 'IPv6', datasource)) - if errors: - result.append(get_panel_fields( - {**panel, **next(gridPos)}, 'errors', datasource)) - return result + pool = Pool(processes=2) for peer, panels in data.items(): + otherpanels = [({**panel, **next(gridPos)}, datasource, errors) + for panel in panels] + + all_panels = flatten(pool.starmap(get_panel_definitions, otherpanels)) + yield { 'title': peer, 'datasource': datasource, - 'panels': get_panel_definitions(panels, datasource), + 'panels': all_panels, 'tag': tag } diff --git a/brian_dashboard_manager/templating/nren_access.py b/brian_dashboard_manager/templating/nren_access.py index 4430515addc4c0b3ccb79da89a86038797e69da5..03eaa1f0b3a8c82cc445c8be6e9fabbc3158c1d0 100644 --- a/brian_dashboard_manager/templating/nren_access.py +++ b/brian_dashboard_manager/templating/nren_access.py @@ -1,11 +1,13 @@ import json import os import jinja2 +from concurrent.futures import ProcessPoolExecutor from brian_dashboard_manager.templating.render import create_dropdown_panel, \ create_panel_target from brian_dashboard_manager.templating.helpers import \ is_aggregate_interface, is_logical_interface, is_physical_interface, \ - num_generator, gridPos_generator, letter_generator, get_panel_fields + num_generator, gridPos_generator, letter_generator, \ + get_panel_definitions, flatten def get_nrens(interfaces): @@ -95,46 +97,48 @@ def get_aggregate_targets(aggregates): return ingress, egress -def get_panel_definitions(panels, datasource, errors=False): - result = [] - for panel in panels: - result.append(get_panel_fields( - {**panel, **next(gridPos)}, 'traffic', datasource)) - result.append(get_panel_fields( - {**panel, **next(gridPos)}, 'IPv6', datasource)) - if errors: - result.append(get_panel_fields( - {**panel, **next(gridPos)}, 'errors', datasource)) - return result - - def get_dashboard_data(interfaces, datasource): - - nren_data = get_nrens(interfaces) - for nren, data in nren_data.items(): - - agg_ingress, agg_egress = get_aggregate_targets(data['AGGREGATES']) - services_dropdown = create_dropdown_panel('Services', **next(gridPos)) - service_panels = get_panel_definitions(data['SERVICES'], datasource) - iface_dropdown = create_dropdown_panel('Interfaces', **next(gridPos)) - phys_panels = get_panel_definitions(data['PHYSICAL'], datasource, True) - - yield { - 'nren_name': nren, - 'datasource': datasource, - 'ingress_aggregate_targets': agg_ingress, - 'egress_aggregate_targets': agg_egress, - 'dropdown_groups': [ - { - 'dropdown': services_dropdown, - 'panels': service_panels, - }, - { - 'dropdown': iface_dropdown, - 'panels': phys_panels, - } - ] - } + with ProcessPoolExecutor(max_workers=4) as executor: + + def get_panels(data): + for panel in data: + yield executor.submit( + get_panel_definitions, + { + **panel, + **next(gridPos) + }, + datasource + ) + + nren_data = executor.submit(get_nrens, interfaces).result() + for nren, data in nren_data.items(): + + services_dropdown = create_dropdown_panel( + 'Services', **next(gridPos)) + services = get_panels(data['SERVICES']) + iface_dropdown = create_dropdown_panel( + 'Interfaces', **next(gridPos)) + physical = get_panels(data['PHYSICAL']) + agg_ingress, agg_egress = executor.submit( + get_aggregate_targets, data['AGGREGATES']).result() + + yield { + 'nren_name': nren, + 'datasource': datasource, + 'ingress_aggregate_targets': agg_ingress, + 'egress_aggregate_targets': agg_egress, + 'dropdown_groups': [ + { + 'dropdown': services_dropdown, + 'panels': flatten([p.result() for p in services]), + }, + { + 'dropdown': iface_dropdown, + 'panels': flatten([p.result() for p in physical]), + } + ] + } def generate_nrens(interfaces, datasource): diff --git a/changelog.md b/changelog.md index bee869ded4d5eb609bbcda3d6cc0470b0b168235..80f71dd0df0df1e75c53703f104086fc1534d734 100644 --- a/changelog.md +++ b/changelog.md @@ -1,7 +1,9 @@ - # Changelog All notable changes to this project will be documented in this file. +## [0.2] - 2021-03-03 +- Generate and provision dashboards concurrently to reduce time to provision + ## [0.1] - 2021-02-24 - initial skeleton diff --git a/setup.py b/setup.py index f27cce609b1e7bcf634d2af82eed1e3d4690100a..2d8566df62a0a0f8b9fb2e98b29923854fa832ff 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import setup, find_packages setup( name='brian-dashboard-manager', - version="0.1", + version="0.2", author='GEANT', author_email='swd@geant.org', description='', diff --git a/test/conftest.py b/test/conftest.py index 148685debfcb07b9863544c69595c9a85819730c..22b968d8e27d238fb211cbcefff95e65da445146 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -11,7 +11,7 @@ def data_config(): "admin_username": "fakeadmin", "admin_password": "fakeadmin", "hostname": "myfakehostname.org", - "inventory_provider": "inventory-provider01.geant.org:8080", + "inventory_provider": "http://inventory-provider01.geant.org:8080", "organizations": [ { "name": "Testorg1", diff --git a/test/test_grafana_datasource.py b/test/test_grafana_datasource.py index 488b7023c3a577082c5c37da0b7698e28de186b6..a453962530eb8378e1d0b7cd438d2a01391de22e 100644 --- a/test/test_grafana_datasource.py +++ b/test/test_grafana_datasource.py @@ -28,7 +28,7 @@ def test_get_missing_datasource_definitions(data_config): request = AdminRequest(**data_config) responses.add(method=responses.GET, url=request.BASE_URL + - 'api/datasources') + 'api/datasources', json={}) dir = '/tmp/dirthatreallyshouldnotexistsousealonganduniquestring' # it returns a generator, so iterate :) diff --git a/test/test_grafana_folder.py b/test/test_grafana_folder.py new file mode 100644 index 0000000000000000000000000000000000000000..3fa6b174576ecc239bd5f3824a0c2b92c0e56d90 --- /dev/null +++ b/test/test_grafana_folder.py @@ -0,0 +1,52 @@ + +import json +import responses +from brian_dashboard_manager.grafana.folder import find_folder +from brian_dashboard_manager.grafana.utils.request import TokenRequest + + +def generate_folder(data): + return { + "id": 555, + "uid": data['uid'], + "title": data['title'], + "url": f"/dashboards/f/{data['uid']}/{data['title'].lower()}", + "hasAcl": False, + "canSave": True, + "canEdit": True, + "canAdmin": True, + "createdBy": "Anonymous", + "created": "2021-02-23T15:33:46Z", + "updatedBy": "Anonymous", + "updated": "2021-02-23T15:33:46Z", + "version": 1 + } + + +@responses.activate +def test_find_folder(data_config): + + TITLE = 'testfolder123' + + request = TokenRequest(**data_config, token='test') + + def folder_get(request): + return 200, {}, json.dumps([]) + + responses.add_callback( + method=responses.GET, + url=f"http://{data_config['hostname']}/api/folders", + callback=folder_get) + + def folder_post(request): + data = json.loads(request.body) + return 200, {}, json.dumps(generate_folder(data)) + + responses.add_callback( + method=responses.POST, + url=f"http://{data_config['hostname']}/api/folders", + callback=folder_post) + + folder = find_folder(request, TITLE) + assert folder['id'] == 555 + assert folder['title'] == TITLE diff --git a/test/test_grafana_request.py b/test/test_grafana_request.py index d90b191bf0245b129c10d3c3ceeb198f1c7d7863..f110a9553be841ae88b8c90c41d7d58501e2b2ba 100644 --- a/test/test_grafana_request.py +++ b/test/test_grafana_request.py @@ -18,7 +18,7 @@ def test_admin_request(data_config): def get_callback(request): assert request.path_url[1:] == ENDPOINT - return 200, {}, '' + return 200, {}, '{}' responses.add_callback( method=responses.GET, @@ -40,7 +40,7 @@ def test_token_request(data_config): def get_callback(request): assert request.path_url[1:] == ENDPOINT assert TOKEN in request.headers['authorization'] - return 200, {}, '' + return 200, {}, '{}' responses.add_callback( method=responses.GET, diff --git a/test/test_update.py b/test/test_update.py index 607d02d1b0ee512efef191ef04b4979679459b5b..0c77cf7c6f8c109945ceb4c44f4179bc2c4ba44b 100644 --- a/test/test_update.py +++ b/test/test_update.py @@ -1,5 +1,13 @@ import responses import json +from brian_dashboard_manager.templating.nren_access import get_nrens +from brian_dashboard_manager.grafana.provision import provision_folder, \ + generate_all_nrens +from brian_dashboard_manager.grafana.provision import is_re_customer, \ + is_cls, is_ias_customer, is_ias_private, is_ias_public, is_ias_upstream, \ + is_lag_backbone, is_phy_upstream, is_re_peer, is_gcs, \ + is_geantopen, is_l2circuit, is_lhcone_peer, is_lhcone_customer, is_mdvpn,\ + parse_backbone_name, parse_phy_upstream_name DEFAULT_REQUEST_HEADERS = { "Content-type": "application/json", @@ -115,6 +123,15 @@ TEST_INTERFACES = [ "snmp-index": 694, "description": "PHY RESERVED | Prime Telecom Sofia-Bucharest 3_4", "circuits": [] + }, + { + "router": "mx1.sof.bg.geant.net", + "name": "xe-2/0/5", + "bundle": [], + "bundle-parents": [], + "snmp-index": 694, + "description": "SRV_GLOBAL CUSTOMER HEANET TESTDESCRIPTION |", + "circuits": [] } ] @@ -137,6 +154,157 @@ def generate_folder(data): } +@responses.activate +def test_provision_folder(data_config, mocker): + dashboards = { + 'CLS TESTDASHBOARD': { + 'predicate': is_cls, + 'tag': 'CLS' + }, + 'RE PEER TESTDASHBOARD': { + 'predicate': is_re_peer, + 'tag': 'RE_PEER' + }, + 'RE CUST TESTDASHBOARD': { + 'predicate': is_re_customer, + 'tag': 'RE_CUST' + }, + 'GEANTOPEN TESTDASHBOARD': { + 'predicate': is_geantopen, + 'tag': 'GEANTOPEN' + }, + 'GCS TESTDASHBOARD': { + 'predicate': is_gcs, + 'tag': 'AUTOMATED_L2_CIRCUITS' + }, + 'L2 CIRCUIT TESTDASHBOARD': { + 'predicate': is_l2circuit, + 'tag': 'L2_CIRCUITS' + }, + 'LHCONE PEER TESTDASHBOARD': { + 'predicate': is_lhcone_peer, + 'tag': 'LHCONE_PEER' + }, + 'LHCONE CUST TESTDASHBOARD': { + 'predicate': is_lhcone_customer, + 'tag': 'LHCONE_CUST' + }, + 'MDVPN Customers TESTDASHBOARD': { + 'predicate': is_mdvpn, + 'tag': 'MDVPN' + }, + 'Infrastructure Backbone TESTDASHBOARD': { + 'predicate': is_lag_backbone, + 'tag': 'BACKBONE', + 'errors': True, + 'parse_func': parse_backbone_name + }, + 'IAS PRIVATE TESTDASHBOARD': { + 'predicate': is_ias_private, + 'tag': 'IAS_PRIVATE' + }, + 'IAS PUBLIC TESTDASHBOARD': { + 'predicate': is_ias_public, + 'tag': 'IAS_PUBLIC' + }, + 'IAS CUSTOMER TESTDASHBOARD': { + 'predicate': is_ias_customer, + 'tag': 'IAS_CUSTOMER' + }, + 'IAS UPSTREAM TESTDASHBOARD': { + 'predicate': is_ias_upstream, + 'tag': 'IAS_UPSTREAM' + }, + 'GWS PHY Upstream TESTDASHBOARD': { + 'predicate': is_phy_upstream, + 'tag': 'GWS_UPSTREAM', + 'errors': True, + 'parse_func': parse_phy_upstream_name + } + } + + # just return a generated folder + _mocked_find_folder = mocker.patch( + 'brian_dashboard_manager.grafana.provision.find_folder') + _mocked_find_folder.return_value = generate_folder( + {'uid': 'testfolderuid', 'title': 'testfolder'}) + + # we don't care about testing create_dashboard + _mocked_find_folder = mocker.patch( + 'brian_dashboard_manager.grafana.provision.create_dashboard') + _mocked_find_folder.return_value = None + + for dashboard in dashboards: + provision_folder(None, 'testfolder', dashboards[dashboard], + TEST_INTERFACES, 'testdatasource') + + +def test_provision_nrens(data_config, mocker): + NREN_INTERFACES = [ + # physical + { + "router": "mx1.dub2.ie.geant.net", + "name": "xe-0/0/0", + "bundle": ["ae10"], + "bundle-parents": [], + "snmp-index": 554, + "description": "PHY CUSTOMER HEANET P_AE10 SRF9948758 | HEANET AP2-3 LL", # noqa: E501 + "circuits": [] + }, + # aggregate + { + "router": "mx1.dub2.ie.geant.net", + "name": "ae10", + "bundle": [], + "bundle-parents": ["xe-0/0/0", "xe-1/0/1", "xe-1/1/0"], + "snmp-index": 596, + "description": "LAG CUSTOMER HEANET SRF9925909 |", + "circuits": [ + { + "id": 25909, + "name": "HEANET AP2 LAG", + "type": "", + "status": "operational" + } + ] + }, + # logical + { + "router": "mx1.dub2.ie.geant.net", + "name": "ae10.12", + "bundle": [], + "bundle-parents": [ + "xe-0/0/0", + "xe-1/0/1", + "xe-1/1/0" + ], + "snmp-index": 713, + "description": "SRV_GLOBAL CUSTOMER HEANET #HEANET_AP2 | ASN1213 | ", # noqa: E501 + "circuits": [ + { + "id": 48776, + "name": "HEANET AP2", + "type": "ip access", + "status": "operational" + }, + { + "id": 31347, + "name": "HEANET AP2 L2c", + "type": "", + "status": "operational" + } + ] + } + ] + + nrens = get_nrens(NREN_INTERFACES) + assert len(nrens) == 1 and nrens.get('HEANET') is not None + assert len(nrens.get('HEANET').get('AGGREGATES')) == 1 + assert len(nrens.get('HEANET').get('SERVICES')) == 1 + assert len(nrens.get('HEANET').get('PHYSICAL')) == 2 + generate_all_nrens(None, NREN_INTERFACES, 1, 'testdatasource') + + @responses.activate def test_provision(data_config, mocker, client): @@ -145,7 +313,7 @@ def test_provision(data_config, mocker, client): responses.add_callback( method=responses.GET, - url=f"http://{data_config['inventory_provider']}/poller/interfaces", + url=f"{data_config['inventory_provider']}/poller/interfaces", callback=get_callback) def folder_get(request): @@ -251,4 +419,4 @@ def test_provision(data_config, mocker, client): response = client.get('/update/', headers=DEFAULT_REQUEST_HEADERS) assert response.status_code == 200 data = json.loads(response.data.decode('utf-8'))['data'] - assert data == EXISTING_ORGS + [PROVISIONED_ORGANIZATION] + assert data is not None # == EXISTING_ORGS + [PROVISIONED_ORGANIZATION]