Skip to content
Snippets Groups Projects
Commit 386e8dee authored by Release Webservice's avatar Release Webservice
Browse files

Finished release 0.2.

parents 607bf83f fdae4d77
No related branches found
No related tags found
No related merge requests found
Showing
with 512 additions and 226 deletions
include brian_dashboard_manager/logging_default_config.json
include brian_dashboard_manager/dashboards/*
include brian_dashboard_manager/datasources/*
\ No newline at end of file
include brian_dashboard_manager/datasources/*
recursive-include brian_dashboard_manager/templating/templates *
\ No newline at end of file
......@@ -122,8 +122,8 @@ def create_dashboard(request: TokenRequest, dashboard: Dict, folder_id=None):
payload['folderId'] = folder_id
try:
action = "Updating" if existing_dashboard else "Creating"
logger.info(f'{action} dashboard: {title}')
# action = "Updating" if existing_dashboard else "Creating"
# logger.info(f'{action} dashboard: {title}')
r = request.post('api/dashboards/db', json=payload)
return r
except HTTPError:
......
......@@ -6,6 +6,20 @@ from brian_dashboard_manager.grafana.utils.request import TokenRequest
logger = logging.getLogger(__name__)
def find_folder(token_request, title):
folders = get_folders(token_request)
try:
folder = next(
f for f in folders if f['title'].lower() == title.lower())
except StopIteration:
folder = None
if not folder:
logger.info(f'Created folder: {title}')
folder = create_folder(token_request, title)
return folder
def get_folders(request: TokenRequest):
return request.get('api/folders')
......@@ -14,9 +28,7 @@ def create_folder(request: TokenRequest, title):
try:
data = {'title': title, 'uid': title.replace(' ', '_')}
r = request.post('api/folders', json=data)
except HTTPError as e:
message = e.content.get("message", "")
logger.exception(
f'Error when creating folder {title} ({message})')
except HTTPError:
logger.exception(f'Error when creating folder {title}')
return None
return r
import logging
import time
from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor
from brian_dashboard_manager.config import DEFAULT_ORGANIZATIONS
from brian_dashboard_manager.grafana.utils.request import \
AdminRequest, \
......@@ -10,8 +12,7 @@ from brian_dashboard_manager.grafana.dashboard import \
get_dashboard_definitions, create_dashboard, find_dashboard
from brian_dashboard_manager.grafana.datasource import \
check_provisioned, create_datasource
from brian_dashboard_manager.grafana.folder import \
get_folders, create_folder
from brian_dashboard_manager.grafana.folder import find_folder
from brian_dashboard_manager.inventory_provider.interfaces import \
get_interfaces
from brian_dashboard_manager.templating.nren_access import generate_nrens
......@@ -28,6 +29,38 @@ from brian_dashboard_manager.templating.render import render_dashboard
logger = logging.getLogger(__name__)
def generate_all_nrens(token_request, nrens, folder_id, datasource_name):
with ThreadPoolExecutor(max_workers=12) as executor:
for dashboard in generate_nrens(nrens, datasource_name):
executor.submit(create_dashboard, token_request,
dashboard, folder_id)
def provision_folder(token_request, folder_name,
dash, excluded_interfaces, datasource_name):
folder = find_folder(token_request, folder_name)
predicate = dash['predicate']
tag = dash['tag']
# dashboard will include error panel
errors = dash.get('errors')
# custom parsing function for description to dashboard name
parse_func = dash.get('parse_func')
relevant_interfaces = filter(predicate, excluded_interfaces)
data = get_interface_data(relevant_interfaces, parse_func)
dash_data = get_dashboard_data(data, datasource_name, tag, errors)
with ThreadPoolExecutor(max_workers=12) as executor:
for dashboard in dash_data:
rendered = render_dashboard(dashboard)
executor.submit(create_dashboard, token_request,
rendered, folder['id'])
def provision(config):
request = AdminRequest(**config)
......@@ -43,147 +76,156 @@ def provision(config):
all_orgs.append(org_data)
interfaces = get_interfaces(config['inventory_provider'])
for org in all_orgs:
org_id = org['id']
delete_expired_api_tokens(request, org_id)
token = create_api_token(request, org_id)
token_request = TokenRequest(token=token['key'], **config)
tokens = []
folders = get_folders(token_request)
start = time.time()
def find_folder(title):
with ProcessPoolExecutor(max_workers=4) as org_executor, \
ThreadPoolExecutor(max_workers=12) as thread_executor:
for org in all_orgs:
org_id = org['id']
delete_expired_api_tokens(request, org_id)
token = create_api_token(request, org_id)
token_request = TokenRequest(token=token['key'], **config)
tokens.append((org_id, token['id']))
logger.info(
f'--- Provisioning org {org["name"]} (ID #{org_id}) ---')
try:
folder = next(
f for f in folders if f['title'].lower() == title.lower())
org_config = next(
o for o in orgs_to_provision if o['name'] == org['name'])
except StopIteration:
folder = None
if not folder:
logger.info(f'Created folder: {title}')
folder = create_folder(token_request, title)
folders.append(folder)
return folder
logger.info(
f'--- Provisioning org {org["name"]} (ID #{org_id}) ---')
try:
org_config = next(
o for o in orgs_to_provision if o['name'] == org['name'])
except StopIteration:
org_config = None
if not org_config:
logger.error(
f'Org {org["name"]} does not have valid configuration.')
org['info'] = 'Org exists in grafana but is not configured'
continue
# Only provision influxdb datasource for now
datasource = config.get('datasources').get('influxdb')
# Provision missing data sources
if not check_provisioned(token_request, datasource):
ds = create_datasource(token_request,
datasource,
config.get('datasources'))
if ds:
logger.info(
f'Provisioned datasource: {datasource["name"]}')
excluded_nrens = org_config.get('excluded_nrens', [])
excluded_nrens = list(map(lambda f: f.lower(), excluded_nrens))
def excluded(interface):
desc = interface.get('description', '').lower()
return not any(nren.lower() in desc for nren in excluded_nrens)
excluded_interfaces = list(filter(excluded, interfaces))
dashboards = {
'CLS': {'predicate': is_cls, 'tag': 'CLS'},
'RE PEER': {'predicate': is_re_peer, 'tag': 'RE_PEER'},
'RE CUST': {'predicate': is_re_customer, 'tag': 'RE_CUST'},
'GEANTOPEN': {'predicate': is_geantopen, 'tag': 'GEANTOPEN'},
'GCS': {'predicate': is_gcs, 'tag': 'AUTOMATED_L2_CIRCUITS'},
'L2 CIRCUIT': {'predicate': is_l2circuit, 'tag': 'L2_CIRCUITS'},
'LHCONE PEER': {'predicate': is_lhcone_peer, 'tag': 'LHCONE_PEER'},
'LHCONE CUST': {
'predicate': is_lhcone_customer,
'tag': 'LHCONE_CUST'
},
'MDVPN Customers': {'predicate': is_mdvpn, 'tag': 'MDVPN'},
'Infrastructure Backbone': {
'predicate': is_lag_backbone,
'tag': 'BACKBONE',
'errors': True,
'parse_func': parse_backbone_name
},
'IAS PRIVATE': {'predicate': is_ias_private, 'tag': 'IAS_PRIVATE'},
'IAS PUBLIC': {'predicate': is_ias_public, 'tag': 'IAS_PUBLIC'},
'IAS CUSTOMER': {
'predicate': is_ias_customer,
'tag': 'IAS_CUSTOMER'
},
'IAS UPSTREAM': {
'predicate': is_ias_upstream,
'tag': 'IAS_UPSTREAM'
},
'GWS PHY Upstream': {
'predicate': is_phy_upstream,
'tag': 'GWS_UPSTREAM',
'errors': True,
'parse_func': parse_phy_upstream_name
org_config = None
if not org_config:
logger.error(
f'Org {org["name"]} does not have valid configuration.')
org['info'] = 'Org exists in grafana but is not configured'
continue
# Only provision influxdb datasource for now
datasource = config.get('datasources').get('influxdb')
# Provision missing data sources
if not check_provisioned(token_request, datasource):
ds = create_datasource(token_request,
datasource,
config.get('datasources'))
if ds:
logger.info(
f'Provisioned datasource: {datasource["name"]}')
excluded_nrens = org_config.get('excluded_nrens', [])
excluded_nrens = list(map(lambda f: f.lower(), excluded_nrens))
def excluded(interface):
desc = interface.get('description', '').lower()
return not any(nren.lower() in desc for nren in excluded_nrens)
excluded_interfaces = list(filter(excluded, interfaces))
dashboards = {
'CLS': {
'predicate': is_cls,
'tag': 'CLS'
},
'RE PEER': {
'predicate': is_re_peer,
'tag': 'RE_PEER'
},
'RE CUST': {
'predicate': is_re_customer,
'tag': 'RE_CUST'
},
'GEANTOPEN': {
'predicate': is_geantopen,
'tag': 'GEANTOPEN'
},
'GCS': {
'predicate': is_gcs,
'tag': 'AUTOMATED_L2_CIRCUITS'
},
'L2 CIRCUIT': {
'predicate': is_l2circuit,
'tag': 'L2_CIRCUITS'
},
'LHCONE PEER': {
'predicate': is_lhcone_peer,
'tag': 'LHCONE_PEER'
},
'LHCONE CUST': {
'predicate': is_lhcone_customer,
'tag': 'LHCONE_CUST'
},
'MDVPN Customers': {
'predicate': is_mdvpn,
'tag': 'MDVPN'
},
'Infrastructure Backbone': {
'predicate': is_lag_backbone,
'tag': 'BACKBONE',
'errors': True,
'parse_func': parse_backbone_name
},
'IAS PRIVATE': {
'predicate': is_ias_private,
'tag': 'IAS_PRIVATE'
},
'IAS PUBLIC': {
'predicate': is_ias_public,
'tag': 'IAS_PUBLIC'
},
'IAS CUSTOMER': {
'predicate': is_ias_customer,
'tag': 'IAS_CUSTOMER'
},
'IAS UPSTREAM': {
'predicate': is_ias_upstream,
'tag': 'IAS_UPSTREAM'
},
'GWS PHY Upstream': {
'predicate': is_phy_upstream,
'tag': 'GWS_UPSTREAM',
'errors': True,
'parse_func': parse_phy_upstream_name
}
}
}
# Provision dashboards, overwriting existing ones.
datasource_name = datasource.get('name', 'PollerInfluxDB')
for folder_name, dash in dashboards.items():
folder = find_folder(folder_name)
predicate = dash['predicate']
tag = dash['tag']
# dashboard will include error panel
errors = dash.get('errors')
# custom parsing function for description to dashboard name
parse_func = dash.get('parse_func')
logger.info(f'Provisioning {folder_name} dashboards')
relevant_interfaces = filter(predicate, excluded_interfaces)
data = get_interface_data(relevant_interfaces, parse_func)
dash_data = get_dashboard_data(data, datasource_name, tag, errors)
for dashboard in dash_data:
rendered = render_dashboard(dashboard)
create_dashboard(token_request, rendered, folder['id'])
# NREN Access dashboards
# uses a different template than the above.
logger.info('Provisioning NREN Access dashboards')
folder = find_folder('NREN Access')
nrens = filter(is_nren, excluded_interfaces)
for dashboard in generate_nrens(nrens, datasource_name):
create_dashboard(token_request, dashboard, folder['id'])
# Non-generated dashboards
excluded_dashboards = org_config.get('excluded_dashboards', [])
logger.info('Provisioning static dashboards')
for dashboard in get_dashboard_definitions():
if dashboard['title'] not in excluded_dashboards:
if dashboard['title'].lower() == 'home':
dashboard['uid'] = 'home'
create_dashboard(token_request, dashboard)
# Home dashboard is always called "Home"
# Make sure it's set for the organization
home_dashboard = find_dashboard(token_request, 'Home')
if home_dashboard:
set_home_dashboard(token_request, home_dashboard['id'])
delete_api_token(request, org_id, token['id'])
# Provision dashboards, overwriting existing ones.
datasource_name = datasource.get('name', 'PollerInfluxDB')
for folder_name, dash in dashboards.items():
logger.info(
f'Provisioning {org["name"]}/{folder_name} dashboards')
org_executor.submit(provision_folder, token_request,
folder_name, dash,
excluded_interfaces, datasource_name)
# NREN Access dashboards
# uses a different template than the above.
logger.info('Provisioning NREN Access dashboards')
folder = find_folder(token_request, 'NREN Access')
nrens = filter(is_nren, excluded_interfaces)
org_executor.submit(generate_all_nrens, token_request,
nrens, folder['id'], datasource_name)
# Non-generated dashboards
excluded_dashboards = org_config.get('excluded_dashboards', [])
logger.info('Provisioning static dashboards')
for dashboard in get_dashboard_definitions():
if dashboard['title'] not in excluded_dashboards:
if dashboard['title'].lower() == 'home':
dashboard['uid'] = 'home'
thread_executor.submit(
create_dashboard, token_request, dashboard)
# Home dashboard is always called "Home"
# Make sure it's set for the organization
home_dashboard = find_dashboard(token_request, 'Home')
if home_dashboard:
set_home_dashboard(token_request, home_dashboard['id'])
logger.info(f'Time to complete: {time.time() - start}')
for org_id, token in tokens:
delete_api_token(request, org_id, token)
return all_orgs
import requests
import json
class Request(object):
......@@ -20,10 +19,7 @@ class Request(object):
**kwargs
)
r.raise_for_status()
try:
return r.json()
except json.JSONDecodeError:
return None
return r.json()
def post(self, endpoint: str, headers=None, **kwargs):
......@@ -33,10 +29,8 @@ class Request(object):
**kwargs
)
r.raise_for_status()
try:
return r.json()
except json.JSONDecodeError:
return None
return r.json()
def put(self, endpoint: str, headers=None, **kwargs):
......@@ -46,10 +40,7 @@ class Request(object):
**kwargs
)
r.raise_for_status()
try:
return r.json()
except json.JSONDecodeError:
return None
return r.json()
def delete(self, endpoint: str, headers=None, **kwargs):
......@@ -59,10 +50,8 @@ class Request(object):
**kwargs
)
r.raise_for_status()
try:
return r.json()
except json.JSONDecodeError:
return None
return r.json()
class AdminRequest(Request):
......
......@@ -5,6 +5,6 @@ logger = logging.getLogger(__name__)
def get_interfaces(host):
r = requests.get(f'http://{host}/poller/interfaces')
r = requests.get(f'{host}/poller/interfaces')
r.raise_for_status()
return r.json()
from concurrent.futures import ThreadPoolExecutor
from flask import Blueprint, current_app
from brian_dashboard_manager.routes import common
from brian_dashboard_manager.grafana.provision import provision
......@@ -13,5 +14,6 @@ def after_request(resp):
@routes.route('/', methods=['GET'])
def update():
success = provision(current_app.config[CONFIG_KEY])
return {'data': success}
executor = ThreadPoolExecutor(max_workers=1)
executor.submit(provision, current_app.config[CONFIG_KEY])
return {'data': {'message': 'Provisioning dashboards!'}}
import re
import logging
from multiprocessing.pool import Pool
from itertools import product
from string import ascii_uppercase
from brian_dashboard_manager.templating.render import create_panel
......@@ -6,6 +8,8 @@ from brian_dashboard_manager.templating.render import create_panel
PANEL_HEIGHT = 12
PANEL_WIDTH = 24
logger = logging.getLogger(__file__)
def get_description(interface):
return interface.get('description', '').strip()
......@@ -217,26 +221,36 @@ def get_panel_fields(panel, panel_type, datasource):
})
def get_panel_definitions(panel, datasource, errors=False):
result = []
result.append(get_panel_fields(
panel, 'traffic', datasource))
result.append(get_panel_fields(
panel, 'IPv6', datasource))
if errors:
result.append(get_panel_fields(
panel, 'errors', datasource))
return result
def flatten(t): return [item for sublist in t for item in sublist]
def get_dashboard_data(data, datasource, tag, errors=False):
id_gen = num_generator()
gridPos = gridPos_generator(id_gen)
def get_panel_definitions(panels, datasource):
result = []
for panel in panels:
result.append(get_panel_fields(
{**panel, **next(gridPos)}, 'traffic', datasource))
result.append(get_panel_fields(
{**panel, **next(gridPos)}, 'IPv6', datasource))
if errors:
result.append(get_panel_fields(
{**panel, **next(gridPos)}, 'errors', datasource))
return result
pool = Pool(processes=2)
for peer, panels in data.items():
otherpanels = [({**panel, **next(gridPos)}, datasource, errors)
for panel in panels]
all_panels = flatten(pool.starmap(get_panel_definitions, otherpanels))
yield {
'title': peer,
'datasource': datasource,
'panels': get_panel_definitions(panels, datasource),
'panels': all_panels,
'tag': tag
}
import json
import os
import jinja2
from concurrent.futures import ProcessPoolExecutor
from brian_dashboard_manager.templating.render import create_dropdown_panel, \
create_panel_target
from brian_dashboard_manager.templating.helpers import \
is_aggregate_interface, is_logical_interface, is_physical_interface, \
num_generator, gridPos_generator, letter_generator, get_panel_fields
num_generator, gridPos_generator, letter_generator, \
get_panel_definitions, flatten
def get_nrens(interfaces):
......@@ -95,46 +97,48 @@ def get_aggregate_targets(aggregates):
return ingress, egress
def get_panel_definitions(panels, datasource, errors=False):
result = []
for panel in panels:
result.append(get_panel_fields(
{**panel, **next(gridPos)}, 'traffic', datasource))
result.append(get_panel_fields(
{**panel, **next(gridPos)}, 'IPv6', datasource))
if errors:
result.append(get_panel_fields(
{**panel, **next(gridPos)}, 'errors', datasource))
return result
def get_dashboard_data(interfaces, datasource):
nren_data = get_nrens(interfaces)
for nren, data in nren_data.items():
agg_ingress, agg_egress = get_aggregate_targets(data['AGGREGATES'])
services_dropdown = create_dropdown_panel('Services', **next(gridPos))
service_panels = get_panel_definitions(data['SERVICES'], datasource)
iface_dropdown = create_dropdown_panel('Interfaces', **next(gridPos))
phys_panels = get_panel_definitions(data['PHYSICAL'], datasource, True)
yield {
'nren_name': nren,
'datasource': datasource,
'ingress_aggregate_targets': agg_ingress,
'egress_aggregate_targets': agg_egress,
'dropdown_groups': [
{
'dropdown': services_dropdown,
'panels': service_panels,
},
{
'dropdown': iface_dropdown,
'panels': phys_panels,
}
]
}
with ProcessPoolExecutor(max_workers=4) as executor:
def get_panels(data):
for panel in data:
yield executor.submit(
get_panel_definitions,
{
**panel,
**next(gridPos)
},
datasource
)
nren_data = executor.submit(get_nrens, interfaces).result()
for nren, data in nren_data.items():
services_dropdown = create_dropdown_panel(
'Services', **next(gridPos))
services = get_panels(data['SERVICES'])
iface_dropdown = create_dropdown_panel(
'Interfaces', **next(gridPos))
physical = get_panels(data['PHYSICAL'])
agg_ingress, agg_egress = executor.submit(
get_aggregate_targets, data['AGGREGATES']).result()
yield {
'nren_name': nren,
'datasource': datasource,
'ingress_aggregate_targets': agg_ingress,
'egress_aggregate_targets': agg_egress,
'dropdown_groups': [
{
'dropdown': services_dropdown,
'panels': flatten([p.result() for p in services]),
},
{
'dropdown': iface_dropdown,
'panels': flatten([p.result() for p in physical]),
}
]
}
def generate_nrens(interfaces, datasource):
......
# Changelog
All notable changes to this project will be documented in this file.
## [0.2] - 2021-03-03
- Generate and provision dashboards concurrently to reduce time to provision
## [0.1] - 2021-02-24
- initial skeleton
......@@ -2,7 +2,7 @@ from setuptools import setup, find_packages
setup(
name='brian-dashboard-manager',
version="0.1",
version="0.2",
author='GEANT',
author_email='swd@geant.org',
description='',
......
......@@ -11,7 +11,7 @@ def data_config():
"admin_username": "fakeadmin",
"admin_password": "fakeadmin",
"hostname": "myfakehostname.org",
"inventory_provider": "inventory-provider01.geant.org:8080",
"inventory_provider": "http://inventory-provider01.geant.org:8080",
"organizations": [
{
"name": "Testorg1",
......
......@@ -28,7 +28,7 @@ def test_get_missing_datasource_definitions(data_config):
request = AdminRequest(**data_config)
responses.add(method=responses.GET, url=request.BASE_URL +
'api/datasources')
'api/datasources', json={})
dir = '/tmp/dirthatreallyshouldnotexistsousealonganduniquestring'
# it returns a generator, so iterate :)
......
import json
import responses
from brian_dashboard_manager.grafana.folder import find_folder
from brian_dashboard_manager.grafana.utils.request import TokenRequest
def generate_folder(data):
return {
"id": 555,
"uid": data['uid'],
"title": data['title'],
"url": f"/dashboards/f/{data['uid']}/{data['title'].lower()}",
"hasAcl": False,
"canSave": True,
"canEdit": True,
"canAdmin": True,
"createdBy": "Anonymous",
"created": "2021-02-23T15:33:46Z",
"updatedBy": "Anonymous",
"updated": "2021-02-23T15:33:46Z",
"version": 1
}
@responses.activate
def test_find_folder(data_config):
TITLE = 'testfolder123'
request = TokenRequest(**data_config, token='test')
def folder_get(request):
return 200, {}, json.dumps([])
responses.add_callback(
method=responses.GET,
url=f"http://{data_config['hostname']}/api/folders",
callback=folder_get)
def folder_post(request):
data = json.loads(request.body)
return 200, {}, json.dumps(generate_folder(data))
responses.add_callback(
method=responses.POST,
url=f"http://{data_config['hostname']}/api/folders",
callback=folder_post)
folder = find_folder(request, TITLE)
assert folder['id'] == 555
assert folder['title'] == TITLE
......@@ -18,7 +18,7 @@ def test_admin_request(data_config):
def get_callback(request):
assert request.path_url[1:] == ENDPOINT
return 200, {}, ''
return 200, {}, '{}'
responses.add_callback(
method=responses.GET,
......@@ -40,7 +40,7 @@ def test_token_request(data_config):
def get_callback(request):
assert request.path_url[1:] == ENDPOINT
assert TOKEN in request.headers['authorization']
return 200, {}, ''
return 200, {}, '{}'
responses.add_callback(
method=responses.GET,
......
import responses
import json
from brian_dashboard_manager.templating.nren_access import get_nrens
from brian_dashboard_manager.grafana.provision import provision_folder, \
generate_all_nrens
from brian_dashboard_manager.grafana.provision import is_re_customer, \
is_cls, is_ias_customer, is_ias_private, is_ias_public, is_ias_upstream, \
is_lag_backbone, is_phy_upstream, is_re_peer, is_gcs, \
is_geantopen, is_l2circuit, is_lhcone_peer, is_lhcone_customer, is_mdvpn,\
parse_backbone_name, parse_phy_upstream_name
DEFAULT_REQUEST_HEADERS = {
"Content-type": "application/json",
......@@ -115,6 +123,15 @@ TEST_INTERFACES = [
"snmp-index": 694,
"description": "PHY RESERVED | Prime Telecom Sofia-Bucharest 3_4",
"circuits": []
},
{
"router": "mx1.sof.bg.geant.net",
"name": "xe-2/0/5",
"bundle": [],
"bundle-parents": [],
"snmp-index": 694,
"description": "SRV_GLOBAL CUSTOMER HEANET TESTDESCRIPTION |",
"circuits": []
}
]
......@@ -137,6 +154,157 @@ def generate_folder(data):
}
@responses.activate
def test_provision_folder(data_config, mocker):
dashboards = {
'CLS TESTDASHBOARD': {
'predicate': is_cls,
'tag': 'CLS'
},
'RE PEER TESTDASHBOARD': {
'predicate': is_re_peer,
'tag': 'RE_PEER'
},
'RE CUST TESTDASHBOARD': {
'predicate': is_re_customer,
'tag': 'RE_CUST'
},
'GEANTOPEN TESTDASHBOARD': {
'predicate': is_geantopen,
'tag': 'GEANTOPEN'
},
'GCS TESTDASHBOARD': {
'predicate': is_gcs,
'tag': 'AUTOMATED_L2_CIRCUITS'
},
'L2 CIRCUIT TESTDASHBOARD': {
'predicate': is_l2circuit,
'tag': 'L2_CIRCUITS'
},
'LHCONE PEER TESTDASHBOARD': {
'predicate': is_lhcone_peer,
'tag': 'LHCONE_PEER'
},
'LHCONE CUST TESTDASHBOARD': {
'predicate': is_lhcone_customer,
'tag': 'LHCONE_CUST'
},
'MDVPN Customers TESTDASHBOARD': {
'predicate': is_mdvpn,
'tag': 'MDVPN'
},
'Infrastructure Backbone TESTDASHBOARD': {
'predicate': is_lag_backbone,
'tag': 'BACKBONE',
'errors': True,
'parse_func': parse_backbone_name
},
'IAS PRIVATE TESTDASHBOARD': {
'predicate': is_ias_private,
'tag': 'IAS_PRIVATE'
},
'IAS PUBLIC TESTDASHBOARD': {
'predicate': is_ias_public,
'tag': 'IAS_PUBLIC'
},
'IAS CUSTOMER TESTDASHBOARD': {
'predicate': is_ias_customer,
'tag': 'IAS_CUSTOMER'
},
'IAS UPSTREAM TESTDASHBOARD': {
'predicate': is_ias_upstream,
'tag': 'IAS_UPSTREAM'
},
'GWS PHY Upstream TESTDASHBOARD': {
'predicate': is_phy_upstream,
'tag': 'GWS_UPSTREAM',
'errors': True,
'parse_func': parse_phy_upstream_name
}
}
# just return a generated folder
_mocked_find_folder = mocker.patch(
'brian_dashboard_manager.grafana.provision.find_folder')
_mocked_find_folder.return_value = generate_folder(
{'uid': 'testfolderuid', 'title': 'testfolder'})
# we don't care about testing create_dashboard
_mocked_find_folder = mocker.patch(
'brian_dashboard_manager.grafana.provision.create_dashboard')
_mocked_find_folder.return_value = None
for dashboard in dashboards:
provision_folder(None, 'testfolder', dashboards[dashboard],
TEST_INTERFACES, 'testdatasource')
def test_provision_nrens(data_config, mocker):
NREN_INTERFACES = [
# physical
{
"router": "mx1.dub2.ie.geant.net",
"name": "xe-0/0/0",
"bundle": ["ae10"],
"bundle-parents": [],
"snmp-index": 554,
"description": "PHY CUSTOMER HEANET P_AE10 SRF9948758 | HEANET AP2-3 LL", # noqa: E501
"circuits": []
},
# aggregate
{
"router": "mx1.dub2.ie.geant.net",
"name": "ae10",
"bundle": [],
"bundle-parents": ["xe-0/0/0", "xe-1/0/1", "xe-1/1/0"],
"snmp-index": 596,
"description": "LAG CUSTOMER HEANET SRF9925909 |",
"circuits": [
{
"id": 25909,
"name": "HEANET AP2 LAG",
"type": "",
"status": "operational"
}
]
},
# logical
{
"router": "mx1.dub2.ie.geant.net",
"name": "ae10.12",
"bundle": [],
"bundle-parents": [
"xe-0/0/0",
"xe-1/0/1",
"xe-1/1/0"
],
"snmp-index": 713,
"description": "SRV_GLOBAL CUSTOMER HEANET #HEANET_AP2 | ASN1213 | ", # noqa: E501
"circuits": [
{
"id": 48776,
"name": "HEANET AP2",
"type": "ip access",
"status": "operational"
},
{
"id": 31347,
"name": "HEANET AP2 L2c",
"type": "",
"status": "operational"
}
]
}
]
nrens = get_nrens(NREN_INTERFACES)
assert len(nrens) == 1 and nrens.get('HEANET') is not None
assert len(nrens.get('HEANET').get('AGGREGATES')) == 1
assert len(nrens.get('HEANET').get('SERVICES')) == 1
assert len(nrens.get('HEANET').get('PHYSICAL')) == 2
generate_all_nrens(None, NREN_INTERFACES, 1, 'testdatasource')
@responses.activate
def test_provision(data_config, mocker, client):
......@@ -145,7 +313,7 @@ def test_provision(data_config, mocker, client):
responses.add_callback(
method=responses.GET,
url=f"http://{data_config['inventory_provider']}/poller/interfaces",
url=f"{data_config['inventory_provider']}/poller/interfaces",
callback=get_callback)
def folder_get(request):
......@@ -251,4 +419,4 @@ def test_provision(data_config, mocker, client):
response = client.get('/update/', headers=DEFAULT_REQUEST_HEADERS)
assert response.status_code == 200
data = json.loads(response.data.decode('utf-8'))['data']
assert data == EXISTING_ORGS + [PROVISIONED_ORGANIZATION]
assert data is not None # == EXISTING_ORGS + [PROVISIONED_ORGANIZATION]
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment