Skip to content
Snippets Groups Projects
Commit d22d8970 authored by Bjarke Madsen's avatar Bjarke Madsen
Browse files

Provision dashboards generated with jinja

parent bf51cad2
No related branches found
No related tags found
No related merge requests found
import logging
from brian_dashboard_manager.config import DEFAULT_ORGANIZATIONS
from brian_dashboard_manager.grafana.utils.request import \
AdminRequest, \
TokenRequest
from brian_dashboard_manager.grafana.organization import \
get_organizations, \
create_organization, \
create_api_token, \
delete_api_token, \
delete_expired_api_tokens
get_organizations, create_organization, create_api_token, \
delete_api_token, delete_expired_api_tokens, set_home_dashboard
from brian_dashboard_manager.grafana.dashboard import \
get_dashboard_definitions, \
create_dashboard
get_dashboard_definitions, create_dashboard, find_dashboard
from brian_dashboard_manager.grafana.datasource import \
get_missing_datasource_definitions,\
create_datasource
check_provisioned, create_datasource
from brian_dashboard_manager.grafana.folder import \
get_folders, create_folder
from brian_dashboard_manager.inventory_provider.interfaces import \
get_interfaces
from brian_dashboard_manager.templating.nren_access import generate_nrens
from brian_dashboard_manager.templating.helpers import is_re_customer, \
is_cls, is_ias_customer, is_ias_private, is_ias_public, is_ias_upstream, \
is_lag_backbone, is_nren, is_phy_upstream, is_re_peer, is_gcs, \
is_geantopen, is_l2circuit, is_lhcone_peer, is_lhcone_customer, is_mdvpn,\
get_interface_data, parse_backbone_name, parse_phy_upstream_name, \
get_dashboard_data
from brian_dashboard_manager.templating.render import render_dashboard
logger = logging.getLogger(__name__)
......@@ -24,36 +33,156 @@ def provision(config):
request = AdminRequest(**config)
all_orgs = get_organizations(request)
organizations_to_provision = config.get('organizations', [])
orgs_to_provision = config.get('organizations', DEFAULT_ORGANIZATIONS)
missing = (name for name in organizations_to_provision
if name not in [org['name'] for org in all_orgs])
missing = (org['name'] for org in orgs_to_provision
if org['name'] not in [org['name'] for org in all_orgs])
for org in missing:
org_data = create_organization(request, org)
for org_name in missing:
org_data = create_organization(request, org_name)
all_orgs.append(org_data)
interfaces = get_interfaces(config['inventory_provider'])
for org in all_orgs:
org_id = org['id']
delete_expired_api_tokens(request, org_id)
token = create_api_token(request, org_id)
token_request = TokenRequest(token=token['key'], **config)
logger.info(f'--- Provisioning org {org["name"]} (ID #{org_id}) ---')
folders = get_folders(token_request)
def find_folder(title):
try:
folder = next(
f for f in folders if f['title'].lower() == title.lower())
except StopIteration:
folder = None
if not folder:
logger.info(f'Created folder: {title}')
folder = create_folder(token_request, title)
folders.append(folder)
return folder
logger.info(
f'--- Provisioning org {org["name"]} (ID #{org_id}) ---')
try:
org_config = next(
o for o in orgs_to_provision if o['name'] == org['name'])
except StopIteration:
org_config = None
if not org_config:
logger.error(
f'Org {org["name"]} does not have valid configuration.')
org['info'] = 'Org exists in grafana but is not configured'
continue
# Only provision influxdb datasource for now
datasource = config.get('datasources').get('influxdb')
# Provision missing data sources
for datasource in get_missing_datasource_definitions(token_request):
if datasource:
ds = create_datasource(token_request,
datasource,
config.get('datasources'))
if ds:
logger.info(
f'Provisioned datasource: {datasource["name"]}')
if not check_provisioned(token_request, datasource):
ds = create_datasource(token_request,
datasource,
config.get('datasources'))
if ds:
logger.info(
f'Provisioned datasource: {datasource["name"]}')
excluded_nrens = org_config.get('excluded_nrens', [])
excluded_nrens = list(map(lambda f: f.lower(), excluded_nrens))
def excluded(interface):
desc = interface.get('description', '').lower()
return not any(nren.lower() in desc for nren in excluded_nrens)
excluded_interfaces = list(filter(excluded, interfaces))
dashboards = {
'CLS': {'predicate': is_cls, 'tag': 'CLS'},
'RE PEER': {'predicate': is_re_peer, 'tag': 'RE_PEER'},
'RE CUST': {'predicate': is_re_customer, 'tag': 'RE_CUST'},
'GEANTOPEN': {'predicate': is_geantopen, 'tag': 'GEANTOPEN'},
'GCS': {'predicate': is_gcs, 'tag': 'AUTOMATED_L2_CIRCUITS'},
'L2 CIRCUIT': {'predicate': is_l2circuit, 'tag': 'L2_CIRCUITS'},
'LHCONE PEER': {'predicate': is_lhcone_peer, 'tag': 'LHCONE_PEER'},
'LHCONE CUST': {
'predicate': is_lhcone_customer,
'tag': 'LHCONE_CUST'
},
'MDVPN Customers': {'predicate': is_mdvpn, 'tag': 'MDVPN'},
'Infrastructure Backbone': {
'predicate': is_lag_backbone,
'tag': 'BACKBONE',
'errors': True,
'parse_func': parse_backbone_name
},
'IAS PRIVATE': {'predicate': is_ias_private, 'tag': 'IAS_PRIVATE'},
'IAS PUBLIC': {'predicate': is_ias_public, 'tag': 'IAS_PUBLIC'},
'IAS CUSTOMER': {
'predicate': is_ias_customer,
'tag': 'IAS_CUSTOMER'
},
'IAS UPSTREAM': {
'predicate': is_ias_upstream,
'tag': 'IAS_UPSTREAM'
},
'GWS PHY Upstream': {
'predicate': is_phy_upstream,
'tag': 'GWS_UPSTREAM',
'errors': True,
'parse_func': parse_phy_upstream_name
}
}
# Provision dashboards, overwriting existing ones.
datasource_name = datasource.get('name', 'PollerInfluxDB')
for folder_name, dash in dashboards.items():
folder = find_folder(folder_name)
predicate = dash['predicate']
tag = dash['tag']
# dashboard will include error panel
errors = dash.get('errors')
# custom parsing function for description to dashboard name
parse_func = dash.get('parse_func')
logger.info(f'Provisioning {folder_name} dashboards')
relevant_interfaces = filter(predicate, excluded_interfaces)
data = get_interface_data(relevant_interfaces, parse_func)
dash_data = get_dashboard_data(data, datasource_name, tag, errors)
for dashboard in dash_data:
rendered = render_dashboard(dashboard)
create_dashboard(token_request, rendered, folder['id'])
# NREN Access dashboards
# uses a different template than the above.
logger.info('Provisioning NREN Access dashboards')
folder = find_folder('NREN Access')
nrens = filter(is_nren, excluded_interfaces)
for dashboard in generate_nrens(nrens, datasource_name):
create_dashboard(token_request, dashboard, folder['id'])
# Non-generated dashboards
excluded_dashboards = org_config.get('excluded_dashboards', [])
logger.info('Provisioning static dashboards')
for dashboard in get_dashboard_definitions():
create_dashboard(token_request, dashboard)
if dashboard['title'] not in excluded_dashboards:
if dashboard['title'].lower() == 'home':
dashboard['uid'] = 'home'
create_dashboard(token_request, dashboard)
# Home dashboard is always called "Home"
# Make sure it's set for the organization
home_dashboard = find_dashboard(token_request, 'Home')
if home_dashboard:
set_home_dashboard(token_request, home_dashboard['id'])
delete_api_token(request, org_id, token['id'])
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment