Skip to content
Snippets Groups Projects
Commit ab351f7d authored by Erik Reid's avatar Erik Reid
Browse files

Finished feature POL1-465-eumetsat-multicast-dashboards.

parents 112a625e f485c8eb
Branches
Tags
No related merge requests found
Showing with 803 additions and 501 deletions
...@@ -7,9 +7,8 @@ import logging ...@@ -7,9 +7,8 @@ import logging
import time import time
import json import json
import datetime import datetime
from functools import reduce
from concurrent.futures import Future from concurrent.futures import Future
from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor from concurrent.futures import ThreadPoolExecutor
from brian_dashboard_manager.config import DEFAULT_ORGANIZATIONS, STATE_PATH from brian_dashboard_manager.config import DEFAULT_ORGANIZATIONS, STATE_PATH
from brian_dashboard_manager.grafana.utils.request import AdminRequest, \ from brian_dashboard_manager.grafana.utils.request import AdminRequest, \
TokenRequest TokenRequest
...@@ -24,7 +23,8 @@ from brian_dashboard_manager.grafana.datasource import \ ...@@ -24,7 +23,8 @@ from brian_dashboard_manager.grafana.datasource import \
from brian_dashboard_manager.grafana.folder import find_folder, \ from brian_dashboard_manager.grafana.folder import find_folder, \
delete_folder, get_folders delete_folder, get_folders
from brian_dashboard_manager.inventory_provider.interfaces import \ from brian_dashboard_manager.inventory_provider.interfaces import \
get_gws_direct, get_gws_indirect, get_interfaces get_gws_direct, get_gws_indirect, get_interfaces, \
get_eumetsat_multicast_subscriptions
from brian_dashboard_manager.templating.helpers import \ from brian_dashboard_manager.templating.helpers import \
get_aggregate_dashboard_data, get_interface_data, \ get_aggregate_dashboard_data, get_interface_data, \
...@@ -33,11 +33,150 @@ from brian_dashboard_manager.templating.helpers import \ ...@@ -33,11 +33,150 @@ from brian_dashboard_manager.templating.helpers import \
from brian_dashboard_manager.templating.gws import generate_gws, \ from brian_dashboard_manager.templating.gws import generate_gws, \
generate_indirect generate_indirect
from brian_dashboard_manager.templating.eumetsat \
import generate_eumetsat_multicast
from brian_dashboard_manager.templating.render import render_dashboard from brian_dashboard_manager.templating.render import render_dashboard
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
DASHBOARD_CHANGES = None # will be an instance of DashboardChanges
MAX_WORKERS = 1
DASHBOARDS = {
'NREN': {
'tag': ['customers'],
'folder_name': 'NREN Access',
'interfaces': []
},
'CLS': {
'tag': 'CLS',
'folder_name': 'CLS',
'interfaces': []
},
'RE_PEER': {
'tag': 'RE_PEER',
'folder_name': 'RE Peer',
'interfaces': []
},
'RE_CUST': {
'tag': 'RE_CUST',
'folder_name': 'RE Customer',
'interfaces': []
},
'GEANTOPEN': {
'tag': 'GEANTOPEN',
'folder_name': 'GEANTOPEN',
'interfaces': []
},
'GCS': {
'tag': 'AUTOMATED_L2_CIRCUITS',
'folder_name': 'GCS',
'interfaces': []
},
'L2_CIRCUIT': {
'tag': 'L2_CIRCUITS',
'folder_name': 'L2 Circuit',
'interfaces': []
},
'LHCONE_PEER': {
'tag': 'LHCONE_PEER',
'folder_name': 'LHCONE Peer',
'interfaces': []
},
'LHCONE_CUST': {
'tag': 'LHCONE_CUST',
'folder_name': 'LHCONE Customer',
'interfaces': []
},
'MDVPN_CUSTOMERS': {
'tag': 'MDVPN',
'folder_name': 'MDVPN Customers',
'interfaces': []
},
'INFRASTRUCTURE_BACKBONE': {
'tag': 'BACKBONE',
'errors': True,
'folder_name': 'Infrastructure Backbone',
'interfaces': []
},
'IAS_PRIVATE': {
'tag': 'IAS_PRIVATE',
'folder_name': 'IAS Private',
'interfaces': []
},
'IAS_PUBLIC': {
'tag': 'IAS_PUBLIC',
'folder_name': 'IAS Public',
'interfaces': []
},
'IAS_CUSTOMER': {
'tag': 'IAS_CUSTOMER',
'folder_name': 'IAS Customer',
'interfaces': []
},
'IAS_UPSTREAM': {
'tag': ['IAS_UPSTREAM', 'UPSTREAM'],
'folder_name': 'IAS Upstream',
'interfaces': []
},
'GWS_PHY_UPSTREAM': {
'tag': ['GWS_UPSTREAM', 'UPSTREAM'],
'errors': True,
'folder_name': 'GWS PHY Upstream',
'interfaces': []
}
}
AGG_DASHBOARDS = {
'CLS_PEERS': {
'tag': 'cls_peers',
'dashboard_name': 'CLS Peers',
'interfaces': []
},
'IAS_PEERS': {
'tag': 'ias_peers',
'dashboard_name': 'IAS Peers',
'interfaces': []
},
'IAS_UPSTREAM': {
'tag': 'gws_upstreams',
'dashboard_name': 'GWS Upstreams',
'interfaces': []
},
'LHCONE': {
'tag': 'lhcone',
'dashboard_name': 'LHCONE',
'interfaces': []
},
'CAE1': {
'tag': 'cae',
'dashboard_name': 'CAE1',
'interfaces': []
}
}
class DashboardChanges(object):
def __init__(self, token):
# Map of dashboard UID -> whether it has been updated.
# This is used to remove stale dashboards at the end.
all_dashboards = find_dashboard(token) or []
self.updated = {d['uid']: False for d in all_dashboards}
def update_dash_list(self, dashboards):
for dashboard in dashboards:
if isinstance(dashboard, Future):
dashboard = dashboard.result()
if dashboard is None:
continue
self.updated[dashboard.get('uid')] = True
def delete_untouched(self, token):
for uid, provisioned in self.updated.items():
if not provisioned:
logger.info(f'Deleting stale dashboard with UID {uid}')
delete_dashboard(token, {'uid': uid})
def provision_folder(token_request, folder_name, dash, def provision_folder(token_request, folder_name, dash,
ds_name, excluded_dashboards): ds_name, excluded_dashboards):
...@@ -61,7 +200,11 @@ def provision_folder(token_request, folder_name, dash, ...@@ -61,7 +200,11 @@ def provision_folder(token_request, folder_name, dash,
dash_data = get_nren_dashboard_data(data, ds_name, tag) dash_data = get_nren_dashboard_data(data, ds_name, tag)
else: else:
data = get_interface_data(interfaces) data = get_interface_data(interfaces)
dash_data = get_dashboard_data(data, ds_name, tag, errors) dash_data = get_dashboard_data(
data=data,
datasource=ds_name,
tag=tag,
errors=errors)
if not isinstance(excluded_dashboards, list): if not isinstance(excluded_dashboards, list):
excluded_dashboards = [] excluded_dashboards = []
...@@ -70,7 +213,7 @@ def provision_folder(token_request, folder_name, dash, ...@@ -70,7 +213,7 @@ def provision_folder(token_request, folder_name, dash,
provisioned = [] provisioned = []
with ThreadPoolExecutor(max_workers=4) as executor: with ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor:
for dashboard in dash_data: for dashboard in dash_data:
rendered = render_dashboard(dashboard, nren=is_nren) rendered = render_dashboard(dashboard, nren=is_nren)
if rendered.get('title').lower() in excluded_dashboards: if rendered.get('title').lower() in excluded_dashboards:
...@@ -119,13 +262,266 @@ def provision_maybe(config): ...@@ -119,13 +262,266 @@ def provision_maybe(config):
write_timestamp(now.timestamp(), False) write_timestamp(now.timestamp(), False)
def provision(config): def is_excluded_folder(org_config, folder_name):
excluded_folders = org_config.get('excluded_folders', {})
excluded = excluded_folders.get(folder_name, False)
# boolean True means entire folder excluded
# if list, it is specific dashboard names not to provision
# so is handled at provision time.
return isinstance(excluded, bool) and excluded
def excluded_folder_dashboards(org_config, folder_name):
excluded_folders = org_config.get('excluded_folders', {})
excluded = excluded_folders.get(folder_name, [])
return excluded if isinstance(excluded, list) else []
def _provision_interfaces(config, org_config, ds_name, token):
# Provision dashboards, overwriting existing ones.
interfaces = get_interfaces(config['inventory_provider'])
excluded_nrens = org_config['excluded_nrens']
def excluded(interface):
desc = interface['description'].lower()
lab = 'lab.office' in interface['router'].lower()
to_exclude = any(nren.lower() in desc for nren in excluded_nrens)
return not (to_exclude or lab)
relevant_interfaces = list(filter(excluded, interfaces))
# loop over interfaces and add them to the dashboard_name
# -> folder mapping structure `dashboards` above, for convenience.
for iface in relevant_interfaces:
for dash_name in iface['dashboards']:
# add interface to matched dashboard
if dash_name in DASHBOARDS:
ifaces = DASHBOARDS[dash_name]['interfaces']
ifaces.append(iface)
# add to matched aggregate dashboard
if dash_name in AGG_DASHBOARDS:
ifaces = AGG_DASHBOARDS[dash_name]['interfaces']
ifaces.append(iface)
# provision dashboards and their folders
with ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor:
provisioned = []
for folder in DASHBOARDS.values():
folder_name = folder['folder_name']
# boolean True means entire folder excluded
# if list, it is specific dashboard names not to provision
# so is handled at provision time.
if is_excluded_folder(org_config, folder_name):
executor.submit(
delete_folder, token, title=folder_name)
continue
logger.info(
f'Provisioning {org_config["name"]}/{folder_name} dashboards')
res = executor.submit(
provision_folder, token,
folder_name, folder, ds_name,
excluded_folder_dashboards(org_config, folder_name))
provisioned.append(res)
for result in provisioned:
folder = result.result()
if folder is None:
continue
DASHBOARD_CHANGES.update_dash_list(folder)
def _provision_gws_indirect(config, org_config, ds_name, token):
# fetch GWS direct data and provision related dashboards
logger.info('Provisioning GWS Indirect dashboards')
folder_name = 'GWS Indirect'
if is_excluded_folder(org_config, folder_name):
# don't provision GWS Direct folder
delete_folder(token, title=folder_name)
else:
folder = find_folder(token, folder_name)
with ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor:
gws_indirect_data = get_gws_indirect(
config['inventory_provider'])
provisioned = []
dashes = generate_indirect(gws_indirect_data, ds_name)
for dashboard in dashes:
rendered = render_dashboard(dashboard)
provisioned.append(executor.submit(create_dashboard,
token,
rendered, folder['id']))
DASHBOARD_CHANGES.update_dash_list(provisioned)
def _provision_gws_direct(config, org_config, ds_name, token):
# fetch GWS direct data and provision related dashboards
logger.info('Provisioning GWS Direct dashboards')
folder_name = 'GWS Direct'
if is_excluded_folder(org_config, folder_name):
# don't provision GWS Direct folder
delete_folder(token, title=folder_name)
else:
folder = find_folder(token, folder_name)
with ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor:
gws_data = get_gws_direct(config['inventory_provider'])
provisioned = []
for dashboard in generate_gws(gws_data, ds_name):
rendered = render_dashboard(dashboard)
provisioned.append(executor.submit(create_dashboard,
token,
rendered, folder['id']))
DASHBOARD_CHANGES.update_dash_list(provisioned)
def _provision_eumetsat_multicast(config, org_config, ds_name, token):
# fetch EUMETSAT multicast provision related dashboards
logger.info('Provisioning EUMETSAT Multicast dashboards')
folder_name = 'EUMETSAT Multicast'
if is_excluded_folder(org_config, folder_name):
# don't provision EUMETSAT Multicast folder
delete_folder(token, title=folder_name)
else:
folder = find_folder(token, folder_name)
with ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor:
subscriptions = get_eumetsat_multicast_subscriptions(
config['inventory_provider'])
provisioned = []
for dashboard in generate_eumetsat_multicast(
subscriptions, ds_name):
rendered = render_dashboard(dashboard)
provisioned.append(
executor.submit(
create_dashboard,
token,
rendered,
folder['id']))
DASHBOARD_CHANGES.update_dash_list(provisioned)
def _provision_aggregates(config, org_config, ds_name, token):
if is_excluded_folder(org_config, 'Aggregates'):
# don't provision aggregate folder
delete_folder(token, title='Aggregates')
else:
with ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor:
provisioned = []
agg_folder = find_folder(token, 'Aggregates')
for dash in AGG_DASHBOARDS.values():
excluded_dashboards = excluded_folder_dashboards(
org_config, 'Aggregates')
if dash['dashboard_name'] in excluded_dashboards:
dash_name = {
'title': f'Aggregate - {dash["dashboard_name"]}'}
executor.submit(delete_dashboard,
token, dash_name,
agg_folder['id'])
continue
logger.info(f'Provisioning {org_config["name"]}' +
f'/Aggregate {dash["dashboard_name"]} dashboards')
res = executor.submit(
provision_aggregate, token,
agg_folder, dash, ds_name)
provisioned.append(res)
DASHBOARD_CHANGES.update_dash_list(provisioned)
def _provision_static_dashboards(config, org_config, ds_name, token):
# Statically defined dashboards from json files
excluded_dashboards = org_config.get('excluded_dashboards', [])
logger.info('Provisioning static dashboards')
for dashboard in get_dashboard_definitions():
if dashboard['title'] not in excluded_dashboards:
res = create_dashboard(token, dashboard)
if res:
DASHBOARD_CHANGES.updated[res.get('uid')] = True
else:
delete_dashboard(token, dashboard)
# Home dashboard is always called "Home"
# Make sure it's set for the organization
logger.info('Configuring Home dashboard')
set_home_dashboard(token, org_config['name'] == 'GÉANT Staff')
# just hardcode that we updated home dashboard
DASHBOARD_CHANGES.updated['home'] = True
def _set_ignored_folders_as_updated(config, org_config, token):
# get dashboard UIDs from ignored folders
# and make sure we don't touch them
ignored_folders = config.get('ignored_folders', [])
for name in ignored_folders:
logger.info(
'Ignoring dashboards under '
f'the folder {org_config["name"]}/{name}')
folder = find_folder(token, name, create=False)
if folder is None:
continue
to_ignore = find_dashboard(token, folder_id=folder['id'])
if to_ignore is None:
continue
for dash in to_ignore:
# mark it updated, so we don't modify it.
DASHBOARD_CHANGES.updated[dash['uid']] = True
def _delete_unknown_folders(config, token):
all_folders = get_folders(token)
folders_to_keep = [
# General is a base folder present in Grafana
'General',
# other folders, created outside of the DASHBOARDS list
'GWS Indirect',
'GWS Direct',
'Aggregates',
'EUMETSAT Multicast'
]
folders_to_keep.extend([dash['folder_name']
for dash in DASHBOARDS.values()])
ignored_folders = config.get('ignored_folders', [])
folders_to_keep.extend(ignored_folders)
folders_to_keep = set(folders_to_keep) # de-dupe
for folder in all_folders:
if folder['title'] in folders_to_keep:
continue
delete_folder(token, uid=folder['uid'])
def _provision_datasource(config, token):
# Only provision influxdb datasource for now
datasource = config.get('datasources').get('influxdb')
# Provision missing data sources
if not check_provisioned(token, datasource):
ds = create_datasource(token,
datasource,
config.get('datasources'))
if ds:
logger.info(
f'Provisioned datasource: {datasource["name"]}')
return datasource
def _provision_orgs(config):
request = AdminRequest(**config) request = AdminRequest(**config)
all_orgs = get_organizations(request) all_orgs = get_organizations(request)
orgs_to_provision = config.get('organizations', DEFAULT_ORGANIZATIONS) orgs_to_provision = config.get('organizations', DEFAULT_ORGANIZATIONS)
ignored_folders = config.get('ignored_folders', [])
missing = (org['name'] for org in orgs_to_provision missing = (org['name'] for org in orgs_to_provision
if org['name'] not in [org['name'] for org in all_orgs]) if org['name'] not in [org['name'] for org in all_orgs])
...@@ -134,10 +530,28 @@ def provision(config): ...@@ -134,10 +530,28 @@ def provision(config):
org_data = create_organization(request, org_name) org_data = create_organization(request, org_name)
all_orgs.append(org_data) all_orgs.append(org_data)
interfaces = get_interfaces(config['inventory_provider']) return all_orgs
tokens = []
def provision(config):
global DASHBOARD_CHANGES
start = time.time() start = time.time()
tokens = []
all_orgs = _provision_orgs(config)
request = AdminRequest(**config)
def _find_org_config(org):
orgs_to_provision = config.get('organizations', DEFAULT_ORGANIZATIONS)
try:
return next(
o for o in orgs_to_provision if o['name'] == org['name'])
except StopIteration:
logger.error(
f'Org {org["name"]} does not have valid configuration.')
org['info'] = 'Org exists in grafana but is not configured'
return None
for org in all_orgs: for org in all_orgs:
org_id = org['id'] org_id = org['id']
...@@ -146,346 +560,36 @@ def provision(config): ...@@ -146,346 +560,36 @@ def provision(config):
token_request = TokenRequest(token=token['key'], **config) token_request = TokenRequest(token=token['key'], **config)
tokens.append((org_id, token['id'])) tokens.append((org_id, token['id']))
DASHBOARD_CHANGES = DashboardChanges(token_request)
logger.info( logger.info(
f'--- Provisioning org {org["name"]} (ID #{org_id}) ---') f'--- Provisioning org {org["name"]} (ID #{org_id}) ---')
try: org_config = _find_org_config(org)
org_config = next(
o for o in orgs_to_provision if o['name'] == org['name'])
except StopIteration:
org_config = None
if not org_config: if not org_config:
logger.error( # message logged from _find_org_config
f'Org {org["name"]} does not have valid configuration.')
org['info'] = 'Org exists in grafana but is not configured'
continue continue
# Only provision influxdb datasource for now datasource = _provision_datasource(config, token_request)
datasource = config.get('datasources').get('influxdb')
# Provision missing data sources
if not check_provisioned(token_request, datasource):
ds = create_datasource(token_request,
datasource,
config.get('datasources'))
if ds:
logger.info(
f'Provisioned datasource: {datasource["name"]}')
excluded_nrens = org_config['excluded_nrens']
def excluded(interface):
desc = interface['description'].lower()
lab = 'lab.office' in interface['router'].lower()
to_exclude = any(nren.lower() in desc for nren in excluded_nrens)
return not (to_exclude or lab)
relevant_interfaces = list(filter(excluded, interfaces))
dashboards = {
'NREN': {
'tag': ['customers'],
'folder_name': 'NREN Access',
'interfaces': []
},
'CLS': {
'tag': 'CLS',
'folder_name': 'CLS',
'interfaces': []
},
'RE_PEER': {
'tag': 'RE_PEER',
'folder_name': 'RE Peer',
'interfaces': []
},
'RE_CUST': {
'tag': 'RE_CUST',
'folder_name': 'RE Customer',
'interfaces': []
},
'GEANTOPEN': {
'tag': 'GEANTOPEN',
'folder_name': 'GEANTOPEN',
'interfaces': []
},
'GCS': {
'tag': 'AUTOMATED_L2_CIRCUITS',
'folder_name': 'GCS',
'interfaces': []
},
'L2_CIRCUIT': {
'tag': 'L2_CIRCUITS',
'folder_name': 'L2 Circuit',
'interfaces': []
},
'LHCONE_PEER': {
'tag': 'LHCONE_PEER',
'folder_name': 'LHCONE Peer',
'interfaces': []
},
'LHCONE_CUST': {
'tag': 'LHCONE_CUST',
'folder_name': 'LHCONE Customer',
'interfaces': []
},
'MDVPN_CUSTOMERS': {
'tag': 'MDVPN',
'folder_name': 'MDVPN Customers',
'interfaces': []
},
'INFRASTRUCTURE_BACKBONE': {
'tag': 'BACKBONE',
'errors': True,
'folder_name': 'Infrastructure Backbone',
'interfaces': []
},
'IAS_PRIVATE': {
'tag': 'IAS_PRIVATE',
'folder_name': 'IAS Private',
'interfaces': []
},
'IAS_PUBLIC': {
'tag': 'IAS_PUBLIC',
'folder_name': 'IAS Public',
'interfaces': []
},
'IAS_CUSTOMER': {
'tag': 'IAS_CUSTOMER',
'folder_name': 'IAS Customer',
'interfaces': []
},
'IAS_UPSTREAM': {
'tag': ['IAS_UPSTREAM', 'UPSTREAM'],
'folder_name': 'IAS Upstream',
'interfaces': []
},
'GWS_PHY_UPSTREAM': {
'tag': ['GWS_UPSTREAM', 'UPSTREAM'],
'errors': True,
'folder_name': 'GWS PHY Upstream',
'interfaces': []
}
}
agg_dashboards = {
'CLS_PEERS': {
'tag': 'cls_peers',
'dashboard_name': 'CLS Peers',
'interfaces': []
},
'IAS_PEERS': {
'tag': 'ias_peers',
'dashboard_name': 'IAS Peers',
'interfaces': []
},
'IAS_UPSTREAM': {
'tag': 'gws_upstreams',
'dashboard_name': 'GWS Upstreams',
'interfaces': []
},
'LHCONE': {
'tag': 'lhcone',
'dashboard_name': 'LHCONE',
'interfaces': []
},
'CAE1': {
'tag': 'cae',
'dashboard_name': 'CAE1',
'interfaces': []
}
}
# Provision dashboards, overwriting existing ones.
ds_name = datasource.get('name', 'PollerInfluxDB') ds_name = datasource.get('name', 'PollerInfluxDB')
excluded_folders = org_config.get('excluded_folders', {})
def get_uid(prev, curr): _provision_interfaces(config, org_config, ds_name, token_request)
prev[curr.get('uid')] = False _provision_gws_indirect(config, org_config, ds_name, token_request)
return prev _provision_gws_direct(config, org_config, ds_name, token_request)
_provision_eumetsat_multicast(
config, org_config, ds_name, token_request)
_provision_aggregates(config, org_config, ds_name, token_request)
_provision_static_dashboards(
config, org_config, ds_name, token_request)
_set_ignored_folders_as_updated(config, org_config, token_request)
# Map of dashboard UID -> whether it has been updated. DASHBOARD_CHANGES.delete_untouched(token_request)
# This is used to remove stale dashboards at the end.
updated = find_dashboard(token_request) or []
updated = reduce(get_uid, updated, {})
# General is a base folder present in Grafana _delete_unknown_folders(config, token_request)
folders_to_keep = ['General', 'GWS Indirect',
'GWS Direct', 'Aggregates']
folders_to_keep.extend([dash['folder_name']
for dash in dashboards.values()])
def update_dash_list(dashboards):
for dashboard in dashboards:
if isinstance(dashboard, Future):
dashboard = dashboard.result()
if dashboard is None:
continue
updated[dashboard.get('uid')] = True
# loop over interfaces and add them to the dashboard_name
# -> folder mapping structure `dashboards` above, for convenience.
for iface in relevant_interfaces:
for dash_name in iface['dashboards']:
# add interface to matched dashboard
if dash_name in dashboards:
ifaces = dashboards[dash_name]['interfaces']
ifaces.append(iface)
# add to matched aggregate dashboard
if dash_name in agg_dashboards:
ifaces = agg_dashboards[dash_name]['interfaces']
ifaces.append(iface)
# provision dashboards and their folders
with ProcessPoolExecutor(max_workers=4) as executor:
provisioned = []
for folder in dashboards.values():
folder_name = folder['folder_name']
exclude = excluded_folders.get(folder_name)
# boolean True means entire folder excluded
# if list, it is specific dashboard names not to provision
# so is handled at provision time.
if exclude:
if isinstance(exclude, bool):
executor.submit(
delete_folder, token_request, folder_name)
continue
logger.info(
f'Provisioning {org["name"]}/{folder_name} dashboards')
res = executor.submit(provision_folder, token_request,
folder_name, folder, ds_name,
exclude)
provisioned.append(res)
for result in provisioned:
folder = result.result()
if folder is None:
continue
update_dash_list(folder)
# fetch GWS direct data and provision related dashboards
logger.info('Provisioning GWS Indirect dashboards')
folder_name = 'GWS Indirect'
exclude_indirect = excluded_folders.get(folder_name, False)
if isinstance(exclude_indirect, bool) and exclude_indirect:
# don't provision GWS Direct folder
delete_folder(token_request, folder_name)
else:
folder = find_folder(token_request, folder_name)
with ThreadPoolExecutor(max_workers=4) as executor:
gws_indirect_data = get_gws_indirect(
config['inventory_provider'])
provisioned = []
dashes = generate_indirect(gws_indirect_data, ds_name)
for dashboard in dashes:
rendered = render_dashboard(dashboard)
provisioned.append(executor.submit(create_dashboard,
token_request,
rendered, folder['id']))
update_dash_list(provisioned)
# fetch GWS direct data and provision related dashboards
logger.info('Provisioning GWS Direct dashboards')
folder_name = 'GWS Direct'
exclude_gws = excluded_folders.get(folder_name, False)
if isinstance(exclude_gws, bool) and exclude_gws:
# don't provision GWS Direct folder
delete_folder(token_request, folder_name)
else:
folder = find_folder(token_request, folder_name)
with ThreadPoolExecutor(max_workers=4) as executor:
gws_data = get_gws_direct(config['inventory_provider'])
provisioned = []
for dashboard in generate_gws(gws_data, ds_name):
rendered = render_dashboard(dashboard)
provisioned.append(executor.submit(create_dashboard,
token_request,
rendered, folder['id']))
update_dash_list(provisioned)
exclude_agg = excluded_folders.get('Aggregates', [])
if isinstance(exclude_agg, bool) and exclude_agg:
# don't provision aggregate folder
delete_folder(token_request, 'Aggregates')
else:
with ProcessPoolExecutor(max_workers=4) as executor:
provisioned = []
agg_folder = find_folder(token_request, 'Aggregates')
for dash in agg_dashboards.values():
if dash['dashboard_name'] in exclude_agg:
dash_name = {
'title': f'Aggregate - {dash["dashboard_name"]}'}
executor.submit(delete_dashboard,
token_request, dash_name,
agg_folder['id'])
continue
logger.info(f'Provisioning {org["name"]}' +
f'/Aggregate {dash["dashboard_name"]} dashboards') # noqa: E501
res = executor.submit(
provision_aggregate, token_request,
agg_folder, dash, ds_name)
provisioned.append(res)
update_dash_list(provisioned)
# Statically defined dashboards from json files
excluded_dashboards = org_config.get('excluded_dashboards', [])
logger.info('Provisioning static dashboards')
for dashboard in get_dashboard_definitions():
if dashboard['title'] not in excluded_dashboards:
res = create_dashboard(token_request, dashboard)
if res:
updated[res.get('uid')] = True
else:
delete_dashboard(token_request, dashboard)
# Home dashboard is always called "Home"
# Make sure it's set for the organization
logger.info('Configuring Home dashboard')
is_staff = org['name'] == 'GÉANT Staff'
set_home_dashboard(token_request, is_staff)
# just hardcode that we updated home dashboard
updated['home'] = True
# get dashboard UIDs from ignored folders
# and make sure we don't touch them
for name in ignored_folders:
folders_to_keep.append(name)
logger.info(
f'Ignoring dashboards under the folder {org["name"]}/{name}')
folder = find_folder(token_request, name, create=False)
if folder is None:
continue
to_ignore = find_dashboard(token_request, folder_id=folder['id'])
if to_ignore is None:
continue
for dash in to_ignore:
# mark it updated, so we don't modify it.
updated[dash['uid']] = True
for dash, provisioned in updated.items():
if not provisioned:
logger.info(f'Deleting stale dashboard with UID {dash}')
delete_dashboard(token_request, {'uid': dash})
all_folders = get_folders(token_request)
folders_to_keep = set(folders_to_keep)
for folder in all_folders:
if folder['title'] not in folders_to_keep:
delete_folder(token_request, uid=folder['uid'])
logger.info(f'Time to complete: {time.time() - start}')
for org_id, token in tokens: for org_id, token in tokens:
delete_api_token(request, org_id, token) delete_api_token(request, org_id, token)
logger.info(f'Time to complete: {time.time() - start}')
return all_orgs return all_orgs
...@@ -64,3 +64,9 @@ def get_gws_indirect(host): ...@@ -64,3 +64,9 @@ def get_gws_indirect(host):
r.raise_for_status() r.raise_for_status()
interfaces = r.json() interfaces = r.json()
return interfaces return interfaces
def get_eumetsat_multicast_subscriptions(host):
r = requests.get(f'{host}/poller/eumetsat-multicast')
r.raise_for_status()
return r.json()
...@@ -45,7 +45,7 @@ ...@@ -45,7 +45,7 @@
}, },
"loggers": { "loggers": {
"api": { "brian_dashboard_manager": {
"level": "DEBUG", "level": "DEBUG",
"handlers": ["console", "syslog_handler"], "handlers": ["console", "syslog_handler"],
"propagate": false "propagate": false
......
import operator
from brian_dashboard_manager.templating.helpers \
import get_dashboard_data, letter_generator, create_panel
def get_panel_data(all_subscriptions):
result = dict()
def _panel(s):
return {
'measurement': 'multicast_rates',
'title': f'{s["subscription"]} on {s["router"]}',
'subscription': s['subscription'],
'hostname': s['router']
}
for subscription in all_subscriptions:
dashboard_name = f'{subscription["router"]} subscriptions'
result.setdefault(dashboard_name, []).append(_panel(subscription))
# make the panels sorted deterministically
for name in result.keys():
result[name] = sorted(
result[name],
key=operator.itemgetter('subscription'))
return result
def get_panel_fields(panel, panel_type, datasource):
"""
Helper for generating a single multicast panel
"""
letters = letter_generator()
def get_target_data(alias, field):
return {
# panel includes identifying information
# such as hostname, subscription, etc.
**panel,
'alias': alias,
'refId': next(letters),
'select_field': field
# 'percentile': 'percentile' in alias.lower(),
}
targets = [('Multicast Traffic', 'octets')]
return create_panel({
**panel,
'datasource': datasource,
'linewidth': 1,
'title': panel['title'].format(panel_type),
'panel_targets': [get_target_data(*target) for target in targets],
'y_axis_type': 'bits',
})
def subscription_panel_generator(gridPos):
"""
Generates panels used for multicast traffic dashboards
"""
def get_panel_definitions(panels, datasource, errors=False):
result = []
for panel in panels:
result.append(get_panel_fields({
**panel,
**next(gridPos)
}, 'traffic', datasource))
if panel.get('has_v6', False):
result.append(get_panel_fields({
**panel,
**next(gridPos)
}, 'IPv6', datasource))
if errors:
result.append(get_panel_fields({
**panel,
**next(gridPos)
}, 'errors', datasource))
return result
return get_panel_definitions
def generate_eumetsat_multicast(subscriptions, datasource):
panel_data = get_panel_data(subscriptions)
for dash in get_dashboard_data(
data=panel_data,
datasource=datasource,
tag='EUMET_MULTICAST',
panel_generator=subscription_panel_generator):
yield dash
...@@ -78,11 +78,17 @@ def get_gws_indirect_panel_data(interfaces): ...@@ -78,11 +78,17 @@ def get_gws_indirect_panel_data(interfaces):
def generate_gws(gws_data, datasource): def generate_gws(gws_data, datasource):
panel_data = get_panel_data(gws_data) panel_data = get_panel_data(gws_data)
for dash in get_dashboard_data(panel_data, datasource, 'GWS_DIRECT'): for dash in get_dashboard_data(
data=panel_data,
datasource=datasource,
tag='GWS_DIRECT'):
yield dash yield dash
def generate_indirect(gws_data, datasource): def generate_indirect(gws_data, datasource):
panel_data = get_gws_indirect_panel_data(gws_data) panel_data = get_gws_indirect_panel_data(gws_data)
for dash in get_dashboard_data(panel_data, datasource, 'GWS_INDIRECT'): for dash in get_dashboard_data(
data=panel_data,
datasource=datasource,
tag='GWS_INDIRECT'):
yield dash yield dash
...@@ -275,7 +275,7 @@ def get_panel_fields(panel, panel_type, datasource): ...@@ -275,7 +275,7 @@ def get_panel_fields(panel, panel_type, datasource):
}) })
def panel_generator(gridPos): def default_interface_panel_generator(gridPos):
""" """
Shared wrapper for shorter calls without Shared wrapper for shorter calls without
gridPos to generate panels. gridPos to generate panels.
...@@ -318,7 +318,7 @@ def get_nren_dashboard_data(data, datasource, tag): ...@@ -318,7 +318,7 @@ def get_nren_dashboard_data(data, datasource, tag):
gridPos = gridPos_generator(id_gen, start=1) gridPos = gridPos_generator(id_gen, start=1)
panel_gen = panel_generator(gridPos) panel_gen = default_interface_panel_generator(gridPos)
if len(dash['AGGREGATES']) > 0: if len(dash['AGGREGATES']) > 0:
agg_panels = create_aggregate_panel( agg_panels = create_aggregate_panel(
...@@ -358,7 +358,10 @@ def get_nren_dashboard_data(data, datasource, tag): ...@@ -358,7 +358,10 @@ def get_nren_dashboard_data(data, datasource, tag):
yield result yield result
def get_dashboard_data(data, datasource, tag, errors=False): def get_dashboard_data(
data, datasource, tag,
panel_generator=default_interface_panel_generator,
errors=False):
""" """
Generates all panels used in a normal dashboard without aggregate panels Generates all panels used in a normal dashboard without aggregate panels
""" """
......
...@@ -45,37 +45,50 @@ ...@@ -45,37 +45,50 @@
] ]
], ],
"tags": [ "tags": [
{% if not isp %} {% if isp %}
{ {
"condition": null, "condition": null,
"key": "hostname", "key": "tag",
"operator": "=", "operator": "=",
"value": "{{ hostname }}" "value": "{{ interface_tag }}"
}, },
{ {
"condition": "AND", "condition": "AND",
"key": "interface_name", "key": "isp",
"operator": "=", "operator": "=",
"value": "{{ interface }}" "value": "{{ isp }}"
},
{
"condition": "AND",
"key": "nren",
"operator": "=",
"value": "{{ nren }}"
} }
{% else %} {% elif subscription %}
{ {
"condition": null, "condition": null,
"key": "tag", "key": "hostname",
"operator": "=", "operator": "=",
"value": "{{ interface_tag }}" "value": "{{ hostname }}"
}, },
{ {
"condition": "AND", "condition": "AND",
"key": "isp", "key": "subscription",
"operator": "=", "operator": "=",
"value": "{{ isp }}" "value": "{{ subscription }}"
}
{% else %}
{
"condition": null,
"key": "hostname",
"operator": "=",
"value": "{{ hostname }}"
}, },
{ {
"condition": "AND", "condition": "AND",
"key": "nren", "key": "interface_name",
"operator": "=", "operator": "=",
"value": "{{ nren }}" "value": "{{ interface }}"
} }
{% endif %} {% endif %}
] ]
......
import responses
from brian_dashboard_manager.inventory_provider.interfaces import \
get_eumetsat_multicast_subscriptions
from brian_dashboard_manager.templating.eumetsat \
import generate_eumetsat_multicast
TEST_DATA = [
{
'router': 'mx1.ams.nl.geant.net',
'oid': '1.3.6.1.2.1.83.1.1.2.1.16.232.223.222.1.193.17.9.3.255.255.255.255', # noqa: E501
'community': '0pBiFbD',
'subscription': '232.223.222.1',
'endpoint': '193.17.9.3'
},
{
'router': 'mx1.ams.nl.geant.net',
'oid': '1.3.6.1.2.1.83.1.1.2.1.16.232.223.222.2.193.17.9.3.255.255.255.255', # noqa: E501
'community': '0pBiFbD',
'subscription': '232.223.222.2',
'endpoint': '193.17.9.3'
},
{
'router': 'mx1.lon.uk.geant.net',
'oid': '1.3.6.1.2.1.83.1.1.2.1.16.232.223.222.1.193.17.9.3.255.255.255.255', # noqa: E501
'community': '0pBiFbD',
'subscription': '232.223.222.1',
'endpoint': '193.17.9.3'
},
{
'router': 'mx1.lon.uk.geant.net',
'oid': '1.3.6.1.2.1.83.1.1.2.1.16.232.223.222.2.193.17.9.3.255.255.255.255', # noqa: E501
'community': '0pBiFbD',
'subscription': '232.223.222.2',
'endpoint': '193.17.9.3'
},
{
'router': 'mx1.fra.de.geant.net',
'oid': '1.3.6.1.2.1.83.1.1.2.1.16.232.223.222.1.193.17.9.3.255.255.255.255', # noqa: E501
'community': '0pBiFbD',
'subscription': '232.223.222.1',
'endpoint': '193.17.9.3'
},
{
'router': 'mx1.fra.de.geant.net',
'oid': '1.3.6.1.2.1.83.1.1.2.1.16.232.223.222.2.193.17.9.3.255.255.255.255', # noqa: E501
'community': '0pBiFbD',
'subscription': '232.223.222.2',
'endpoint': '193.17.9.3'
}
]
@responses.activate
def test_eumetsat_subscriptions(data_config, client):
responses.add(
method=responses.GET,
url=f'{data_config["inventory_provider"]}/poller/eumetsat-multicast',
json=TEST_DATA)
subscription_data = {}
for s in TEST_DATA:
subscription_data.setdefault(s['router'], set()).add(s['subscription'])
expected_titles = [
f'{name} subscriptions' for name in subscription_data.keys()]
subscriptions = get_eumetsat_multicast_subscriptions(
data_config['inventory_provider'])
dashboards = list(
generate_eumetsat_multicast(subscriptions, 'testdatasource'))
assert len(dashboards) == len(expected_titles)
assert all(d['title'] in expected_titles for d in dashboards)
# just use 2, instead of something smart, since the data is above
assert all(len(d['panels']) == 2 for d in dashboards)
...@@ -16,20 +16,15 @@ def test_get_dashboard(data_config): ...@@ -16,20 +16,15 @@ def test_get_dashboard(data_config):
method=responses.GET, method=responses.GET,
url=request.BASE_URL + url=request.BASE_URL +
f'api/dashboards/uid/{UID}', f'api/dashboards/uid/{UID}',
callback=lambda f: ( callback=lambda f: (404, {}, ''))
404,
{},
''))
data = dashboard._get_dashboard(request, UID) data = dashboard._get_dashboard(request, UID)
assert data is None assert data is None
responses.add_callback(method=responses.GET, responses.add(
url=request.BASE_URL + method=responses.GET,
f'api/dashboards/uid/{UID+1}', url=request.BASE_URL + f'api/dashboards/uid/{UID+1}',
callback=lambda f: (200, json={'uid': 1})
{},
json.dumps({"uid": 1})))
data = dashboard._get_dashboard(request, UID + 1) data = dashboard._get_dashboard(request, UID + 1)
assert data['uid'] == 1 assert data['uid'] == 1
...@@ -42,24 +37,15 @@ def test_delete_dashboards(data_config): ...@@ -42,24 +37,15 @@ def test_delete_dashboards(data_config):
request = TokenRequest(**data_config, token='test') request = TokenRequest(**data_config, token='test')
responses.add_callback( responses.add(
method=responses.GET, method=responses.GET,
url=request.BASE_URL + url=request.BASE_URL + f'api/dashboards/uid/{UID}',
f'api/dashboards/uid/{UID}', json=dashboards[0])
callback=lambda f: (
200,
{},
json.dumps(
dashboards[0])))
responses.add_callback( responses.add(
method=responses.GET, method=responses.GET,
url=request.BASE_URL + url=request.BASE_URL + 'api/search',
'api/search', json=dashboards)
callback=lambda f: (
200,
{},
json.dumps(dashboards)))
def delete_callback(request): def delete_callback(request):
uid = request.path_url.split('/')[-1] uid = request.path_url.split('/')[-1]
...@@ -79,10 +65,7 @@ def test_delete_dashboards(data_config): ...@@ -79,10 +65,7 @@ def test_delete_dashboards(data_config):
method=responses.DELETE, method=responses.DELETE,
url=request.BASE_URL + url=request.BASE_URL +
f'api/dashboards/uid/{UID+1}', f'api/dashboards/uid/{UID+1}',
callback=lambda f: ( callback=lambda f: (400, {}, ''))
400,
{},
''))
data = dashboard._delete_dashboard(request, UID + 1) data = dashboard._delete_dashboard(request, UID + 1)
assert data is False assert data is False
...@@ -98,19 +81,15 @@ def test_delete_dashboard(data_config): ...@@ -98,19 +81,15 @@ def test_delete_dashboard(data_config):
dash = {'id': ID, 'uid': UID, 'title': TITLE, 'version': VERSION} dash = {'id': ID, 'uid': UID, 'title': TITLE, 'version': VERSION}
request = TokenRequest(**data_config, token='test') request = TokenRequest(**data_config, token='test')
def delete_callback(request): responses.add(
return 200, {}, json.dumps({'message': 'deleted dashboard'}) method=responses.DELETE,
url=request.BASE_URL + f'api/dashboards/uid/{UID}',
responses.add_callback(method=responses.DELETE, json={'message': 'deleted dashboard'})
url=request.BASE_URL + f'api/dashboards/uid/{UID}',
callback=delete_callback)
def search_callback(request):
return 200, {}, json.dumps(dash)
responses.add_callback(method=responses.GET, responses.add(
url=request.BASE_URL + 'api/search', method=responses.GET,
callback=search_callback) url=request.BASE_URL + 'api/search',
json=dash)
deleted = dashboard.delete_dashboard(request, dash) deleted = dashboard.delete_dashboard(request, dash)
assert deleted assert deleted
...@@ -127,24 +106,15 @@ def test_search_dashboard(data_config): ...@@ -127,24 +106,15 @@ def test_search_dashboard(data_config):
request = TokenRequest(**data_config, token='test') request = TokenRequest(**data_config, token='test')
responses.add_callback( responses.add(
method=responses.GET, method=responses.GET,
url=request.BASE_URL + url=request.BASE_URL + 'api/search',
'api/search', json=dashboards)
callback=lambda f: (
200,
{},
json.dumps(dashboards)))
responses.add_callback( responses.add(
method=responses.GET, method=responses.GET,
url=request.BASE_URL + url=request.BASE_URL + f'api/dashboards/uid/{UID}',
f'api/dashboards/uid/{UID}', json=dashboards[0])
callback=lambda f: (
200,
{},
json.dumps(
dashboards[0])))
data = dashboard._search_dashboard( data = dashboard._search_dashboard(
request, {'title': dashboards[0]['title']}) request, {'title': dashboards[0]['title']})
...@@ -160,7 +130,8 @@ def test_search_dashboard_error(data_config): ...@@ -160,7 +130,8 @@ def test_search_dashboard_error(data_config):
responses.add_callback( responses.add_callback(
method=responses.GET, method=responses.GET,
url=request.BASE_URL + 'api/search', callback=lambda f: (400, {}, '')) url=request.BASE_URL + 'api/search',
callback=lambda f: (400, {}, ''))
data = dashboard._search_dashboard(request, {'title': 'DoesNotExist'}) data = dashboard._search_dashboard(request, {'title': 'DoesNotExist'})
assert data is None assert data is None
...@@ -175,16 +146,15 @@ def test_create_dashboard(data_config): ...@@ -175,16 +146,15 @@ def test_create_dashboard(data_config):
dashboard = {'id': ID, 'uid': UID, 'title': TITLE, 'version': VERSION} dashboard = {'id': ID, 'uid': UID, 'title': TITLE, 'version': VERSION}
request = TokenRequest(**data_config, token='test') request = TokenRequest(**data_config, token='test')
def get_callback(request): responses.add(
return 200, {}, json.dumps({'dashboard': dashboard}) method=responses.GET,
url=request.BASE_URL + f'api/dashboards/uid/{UID}',
responses.add_callback(method=responses.GET, json={'dashboard': dashboard})
url=request.BASE_URL + f'api/dashboards/uid/{UID}',
callback=get_callback)
responses.add_callback( responses.add_callback(
method=responses.GET, method=responses.GET,
url=request.BASE_URL + 'api/search', callback=lambda f: (400, {}, '')) url=request.BASE_URL + 'api/search',
callback=lambda f: (400, {}, ''))
def post_callback(request): def post_callback(request):
body = json.loads(request.body) body = json.loads(request.body)
...@@ -192,7 +162,8 @@ def test_create_dashboard(data_config): ...@@ -192,7 +162,8 @@ def test_create_dashboard(data_config):
responses.add_callback( responses.add_callback(
method=responses.POST, method=responses.POST,
url=request.BASE_URL + 'api/dashboards/db', callback=post_callback) url=request.BASE_URL + 'api/dashboards/db',
callback=post_callback)
data = provision.create_dashboard(request, dashboard) data = provision.create_dashboard(request, dashboard)
assert data == dashboard assert data == dashboard
...@@ -208,7 +179,8 @@ def test_create_dashboard_no_uid_error(data_config): ...@@ -208,7 +179,8 @@ def test_create_dashboard_no_uid_error(data_config):
responses.add_callback( responses.add_callback(
method=responses.GET, method=responses.GET,
url=request.BASE_URL + 'api/search', callback=lambda f: (400, {}, '')) url=request.BASE_URL + 'api/search',
callback=lambda f: (400, {}, ''))
def post_callback(request): def post_callback(request):
body = json.loads(request.body) body = json.loads(request.body)
...@@ -221,7 +193,8 @@ def test_create_dashboard_no_uid_error(data_config): ...@@ -221,7 +193,8 @@ def test_create_dashboard_no_uid_error(data_config):
responses.add_callback( responses.add_callback(
method=responses.POST, method=responses.POST,
url=request.BASE_URL + 'api/dashboards/db', callback=post_callback) url=request.BASE_URL + 'api/dashboards/db',
callback=post_callback)
data = provision.create_dashboard(request, dashboard) data = provision.create_dashboard(request, dashboard)
assert data is None assert data is None
...@@ -30,13 +30,10 @@ def test_find_folder(data_config): ...@@ -30,13 +30,10 @@ def test_find_folder(data_config):
request = TokenRequest(**data_config, token='test') request = TokenRequest(**data_config, token='test')
def folder_get(request): responses.add(
return 200, {}, json.dumps([])
responses.add_callback(
method=responses.GET, method=responses.GET,
url=f"http://{data_config['hostname']}/api/folders", url=f"http://{data_config['hostname']}/api/folders",
callback=folder_get) json=[])
def folder_post(request): def folder_post(request):
data = json.loads(request.body) data = json.loads(request.body)
......
import responses import responses
import json
from brian_dashboard_manager.templating.gws import generate_gws from brian_dashboard_manager.templating.gws import generate_gws
from brian_dashboard_manager.inventory_provider.interfaces import \ from brian_dashboard_manager.inventory_provider.interfaces import \
get_gws_direct get_gws_direct
...@@ -110,15 +109,12 @@ TEST_DATA = [ ...@@ -110,15 +109,12 @@ TEST_DATA = [
@responses.activate @responses.activate
def test_gws(data_config, mocker, client): def test_gws(data_config, client):
def get_callback(request): responses.add(
return 200, {}, json.dumps(TEST_DATA)
responses.add_callback(
method=responses.GET, method=responses.GET,
url=f"{data_config['inventory_provider']}/poller/gws/direct", url=f"{data_config['inventory_provider']}/poller/gws/direct",
callback=get_callback) json=TEST_DATA)
gws_data = get_gws_direct(data_config['inventory_provider']) gws_data = get_gws_direct(data_config['inventory_provider'])
......
import responses import responses
import json
from brian_dashboard_manager.templating.gws import generate_indirect from brian_dashboard_manager.templating.gws import generate_indirect
from brian_dashboard_manager.inventory_provider.interfaces import \ from brian_dashboard_manager.inventory_provider.interfaces import \
get_gws_indirect get_gws_indirect
...@@ -76,15 +75,12 @@ TEST_DATA = [ ...@@ -76,15 +75,12 @@ TEST_DATA = [
@responses.activate @responses.activate
def test_gws(data_config, mocker, client): def test_gws(data_config, client):
def get_callback(request): responses.add(
return 200, {}, json.dumps(TEST_DATA)
responses.add_callback(
method=responses.GET, method=responses.GET,
url=f"{data_config['inventory_provider']}/poller/gws/indirect", url=f"{data_config['inventory_provider']}/poller/gws/indirect",
callback=get_callback) json=TEST_DATA)
gws_data = get_gws_indirect(data_config['inventory_provider']) gws_data = get_gws_indirect(data_config['inventory_provider'])
......
import responses import responses
import json import json
import re
from brian_dashboard_manager.grafana.provision import provision_folder, \ from brian_dashboard_manager.grafana.provision import provision_folder, \
provision provision
DEFAULT_REQUEST_HEADERS = {
"Content-type": "application/json",
"Accept": ["application/json"]
}
TEST_INTERFACES = [ TEST_INTERFACES = [
{ {
"router": "srx2.ch.office.geant.net", "router": "srx2.ch.office.geant.net",
...@@ -469,6 +461,51 @@ NREN_INTERFACES = [ ...@@ -469,6 +461,51 @@ NREN_INTERFACES = [
} }
] ]
EUMETSAT_MULTICAST = [
{
'router': 'mx1.ams.nl.geant.net',
'oid': '1.3.6.1.2.1.83.1.1.2.1.16.232.223.222.1.193.17.9.3.255.255.255.255', # noqa: E501
'community': '0pBiFbD',
'subscription': '232.223.222.1',
'endpoint': '193.17.9.3'
},
{
'router': 'mx1.ams.nl.geant.net',
'oid': '1.3.6.1.2.1.83.1.1.2.1.16.232.223.222.2.193.17.9.3.255.255.255.255', # noqa: E501
'community': '0pBiFbD',
'subscription': '232.223.222.2',
'endpoint': '193.17.9.3'
},
{
'router': 'mx1.lon.uk.geant.net',
'oid': '1.3.6.1.2.1.83.1.1.2.1.16.232.223.222.1.193.17.9.3.255.255.255.255', # noqa: E501
'community': '0pBiFbD',
'subscription': '232.223.222.1',
'endpoint': '193.17.9.3'
},
{
'router': 'mx1.lon.uk.geant.net',
'oid': '1.3.6.1.2.1.83.1.1.2.1.16.232.223.222.2.193.17.9.3.255.255.255.255', # noqa: E501
'community': '0pBiFbD',
'subscription': '232.223.222.2',
'endpoint': '193.17.9.3'
},
{
'router': 'mx1.fra.de.geant.net',
'oid': '1.3.6.1.2.1.83.1.1.2.1.16.232.223.222.1.193.17.9.3.255.255.255.255', # noqa: E501
'community': '0pBiFbD',
'subscription': '232.223.222.1',
'endpoint': '193.17.9.3'
},
{
'router': 'mx1.fra.de.geant.net',
'oid': '1.3.6.1.2.1.83.1.1.2.1.16.232.223.222.2.193.17.9.3.255.255.255.255', # noqa: E501
'community': '0pBiFbD',
'subscription': '232.223.222.2',
'endpoint': '193.17.9.3'
}
]
def generate_folder(data): def generate_folder(data):
return { return {
...@@ -494,12 +531,16 @@ def test_provision_folder(data_config, mocker): ...@@ -494,12 +531,16 @@ def test_provision_folder(data_config, mocker):
'NREN': { 'NREN': {
'tag': ['customers'], 'tag': ['customers'],
'folder_name': 'NREN Access', 'folder_name': 'NREN Access',
'interfaces': [iface for iface in TEST_INTERFACES if 'NREN' in iface['dashboards']] # noqa: E501 'interfaces': [
iface for iface in TEST_INTERFACES
if 'NREN' in iface['dashboards']]
}, },
'RE_CUST': { 'RE_CUST': {
'tag': 'RE_CUST', 'tag': 'RE_CUST',
'folder_name': 'RE Customer', 'folder_name': 'RE Customer',
'interfaces': [iface for iface in TEST_INTERFACES if 'RE_CUST' in iface['dashboards']] # noqa: E501 'interfaces': [
iface for iface in TEST_INTERFACES
if 'RE_CUST' in iface['dashboards']]
}, },
} }
...@@ -544,34 +585,35 @@ def test_provision_folder(data_config, mocker): ...@@ -544,34 +585,35 @@ def test_provision_folder(data_config, mocker):
@responses.activate @responses.activate
def test_provision(data_config, mocker, client): def test_provision(data_config, mocker, client):
def get_callback(request): responses.add(
return 200, {}, json.dumps(NREN_INTERFACES)
responses.add_callback(
method=responses.GET, method=responses.GET,
url=f"{data_config['inventory_provider']}/poller/interfaces", url=f"{data_config['inventory_provider']}/poller/interfaces",
callback=get_callback) json=NREN_INTERFACES)
responses.add_callback( responses.add(
method=responses.GET, method=responses.GET,
url=f"{data_config['inventory_provider']}/data/interfaces", url=f"{data_config['inventory_provider']}/data/interfaces",
callback=get_callback) json=NREN_INTERFACES)
def folder_get(request):
return 200, {}, json.dumps([])
responses.add_callback( responses.add(
method=responses.GET, method=responses.GET,
url=f'{data_config["inventory_provider"]}/poller/eumetsat-multicast',
json=EUMETSAT_MULTICAST)
responses.add(
method=responses.DELETE,
url=f"http://{data_config['hostname']}/api/folders", url=f"http://{data_config['hostname']}/api/folders",
callback=folder_get) json={"message": "Deleted folder"})
def folder_delete(request): responses.add(
return 200, {}, json.dumps({"message": "Deleted folder"}) method=responses.GET,
url=f"http://{data_config['hostname']}/api/folders",
json=[])
responses.add_callback( responses.add(
method=responses.DELETE, method='get',
url=re.compile(f"http://{data_config['hostname']}/api/folders"), url=f"http://{data_config['hostname']}/api/folders",
callback=folder_delete, ) json=[])
def folder_post(request): def folder_post(request):
data = json.loads(request.body) data = json.loads(request.body)
...@@ -582,48 +624,41 @@ def test_provision(data_config, mocker, client): ...@@ -582,48 +624,41 @@ def test_provision(data_config, mocker, client):
url=f"http://{data_config['hostname']}/api/folders", url=f"http://{data_config['hostname']}/api/folders",
callback=folder_post) callback=folder_post)
def home_dashboard(request): def search_responses(request):
return 200, {}, json.dumps([]) if request.params.get('query', None) == 'Home':
return 200, {}, json.dumps([])
if request.params.get('type', None) == 'dash-db':
return 200, {}, json.dumps([])
assert False # no other queries expected
responses.add_callback( responses.add_callback(
method=responses.GET, method=responses.GET,
url=f"http://{data_config['hostname']}/api/search?query=Home", url=f"http://{data_config['hostname']}/api/search",
callback=home_dashboard) callback=search_responses)
TEST_DATASOURCE = [{
"name": "brian-influx-datasource",
"type": "influxdb",
"access": "proxy",
"url": "http://test-brian-datasource.geant.org:8086",
"database": "test-db",
"basicAuth": False,
"isDefault": True,
"readOnly": False
}]
def datasources(request):
return 200, {}, json.dumps(TEST_DATASOURCE)
responses.add_callback( responses.add(
method=responses.GET, method=responses.GET,
url=f"http://{data_config['hostname']}/api/datasources", url=f"http://{data_config['hostname']}/api/datasources",
callback=datasources) json=[{
"name": "brian-influx-datasource",
def createdashboard(request): "type": "influxdb",
return 200, {}, json.dumps({'id': 666}) "access": "proxy",
"url": "http://test-brian-datasource.geant.org:8086",
responses.add_callback( "database": "test-db",
"basicAuth": False,
"isDefault": True,
"readOnly": False
}])
responses.add(
method=responses.POST, method=responses.POST,
url=f"http://{data_config['hostname']}/api/dashboards/db", url=f"http://{data_config['hostname']}/api/dashboards/db",
callback=createdashboard) json={'id': 666})
def preferences(request): responses.add(
return 200, {}, json.dumps({'message': 'Preferences updated'})
responses.add_callback(
method=responses.PUT, method=responses.PUT,
url=f"http://{data_config['hostname']}/api/org/preferences", url=f"http://{data_config['hostname']}/api/org/preferences",
callback=preferences) json={'message': 'Preferences updated'})
def homedashboard(request): def homedashboard(request):
return 404, {}, '' return 404, {}, ''
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment