-
Bjarke Madsen authoredBjarke Madsen authored
provision.py 10.20 KiB
import logging
import time
from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor
from brian_dashboard_manager.config import DEFAULT_ORGANIZATIONS
from brian_dashboard_manager.grafana.utils.request import \
AdminRequest, \
TokenRequest
from brian_dashboard_manager.grafana.organization import \
get_organizations, create_organization, create_api_token, \
delete_api_token, delete_expired_api_tokens, set_home_dashboard
from brian_dashboard_manager.grafana.dashboard import \
get_dashboard_definitions, create_dashboard, find_dashboard
from brian_dashboard_manager.grafana.datasource import \
check_provisioned, create_datasource
from brian_dashboard_manager.grafana.folder import find_folder
from brian_dashboard_manager.inventory_provider.interfaces import \
get_interfaces
from brian_dashboard_manager.templating.nren_access import generate_nrens
from brian_dashboard_manager.templating.helpers import is_re_customer, \
is_cls_peer, is_cls, is_ias_customer, is_ias_private, is_ias_public, \
is_ias_upstream, is_ias_peer, is_lag_backbone, is_nren, is_phy_upstream, \
is_re_peer, is_gcs, is_geantopen, is_l2circuit, is_lhcone_peer, \
is_lhcone_customer, is_lhcone, is_mdvpn, get_aggregate_dashboard_data, \
get_interface_data, parse_backbone_name, parse_phy_upstream_name, \
get_dashboard_data, get_aggregate_interface_data
from brian_dashboard_manager.templating.render import render_dashboard
logger = logging.getLogger(__name__)
def generate_all_nrens(token_request, nrens, folder_id, datasource_name):
with ThreadPoolExecutor(max_workers=8) as executor:
for dashboard in generate_nrens(nrens, datasource_name):
executor.submit(create_dashboard, token_request,
dashboard, folder_id)
def provision_folder(token_request, folder_name,
dash, excluded_interfaces, datasource_name):
folder = find_folder(token_request, folder_name)
predicate = dash['predicate']
tag = dash['tag']
# dashboard will include error panel
errors = dash.get('errors', False)
# custom parsing function for description to dashboard name
parse_func = dash.get('parse_func')
relevant_interfaces = filter(predicate, excluded_interfaces)
data = get_interface_data(relevant_interfaces, parse_func)
dash_data = get_dashboard_data(data, datasource_name, tag, errors)
with ThreadPoolExecutor(max_workers=4) as executor:
for dashboard in dash_data:
rendered = render_dashboard(dashboard)
executor.submit(create_dashboard, token_request,
rendered, folder['id'])
def provision_aggregate(token_request, agg_type, aggregate_folder,
dash, excluded_interfaces, datasource_name):
predicate = dash['predicate']
tag = dash['tag']
relevant_interfaces = filter(predicate, excluded_interfaces)
data = get_aggregate_interface_data(relevant_interfaces, agg_type)
dashboard = get_aggregate_dashboard_data(
f'Aggregate - {agg_type}', data, datasource_name, tag)
rendered = render_dashboard(dashboard)
create_dashboard(token_request, rendered, aggregate_folder['id'])
def provision(config):
request = AdminRequest(**config)
all_orgs = get_organizations(request)
orgs_to_provision = config.get('organizations', DEFAULT_ORGANIZATIONS)
missing = (org['name'] for org in orgs_to_provision
if org['name'] not in [org['name'] for org in all_orgs])
for org_name in missing:
org_data = create_organization(request, org_name)
all_orgs.append(org_data)
interfaces = get_interfaces(config['inventory_provider'])
tokens = []
start = time.time()
for org in all_orgs:
org_id = org['id']
delete_expired_api_tokens(request, org_id)
token = create_api_token(request, org_id)
token_request = TokenRequest(token=token['key'], **config)
tokens.append((org_id, token['id']))
logger.info(
f'--- Provisioning org {org["name"]} (ID #{org_id}) ---')
try:
org_config = next(
o for o in orgs_to_provision if o['name'] == org['name'])
except StopIteration:
org_config = None
if not org_config:
logger.error(
f'Org {org["name"]} does not have valid configuration.')
org['info'] = 'Org exists in grafana but is not configured'
continue
# Only provision influxdb datasource for now
datasource = config.get('datasources').get('influxdb')
# Provision missing data sources
if not check_provisioned(token_request, datasource):
ds = create_datasource(token_request,
datasource,
config.get('datasources'))
if ds:
logger.info(
f'Provisioned datasource: {datasource["name"]}')
excluded_nrens = org_config.get('excluded_nrens', [])
excluded_nrens = list(map(lambda f: f.lower(), excluded_nrens))
def excluded(interface):
desc = interface.get('description', '').lower()
return not any(nren.lower() in desc for nren in excluded_nrens)
excluded_interfaces = list(filter(excluded, interfaces))
dashboards = {
'CLS': {
'predicate': is_cls,
'tag': 'CLS'
},
'RE PEER': {
'predicate': is_re_peer,
'tag': 'RE_PEER'
},
'RE CUST': {
'predicate': is_re_customer,
'tag': 'RE_CUST'
},
'GEANTOPEN': {
'predicate': is_geantopen,
'tag': 'GEANTOPEN'
},
'GCS': {
'predicate': is_gcs,
'tag': 'AUTOMATED_L2_CIRCUITS'
},
'L2 CIRCUIT': {
'predicate': is_l2circuit,
'tag': 'L2_CIRCUITS'
},
'LHCONE PEER': {
'predicate': is_lhcone_peer,
'tag': 'LHCONE_PEER'
},
'LHCONE CUST': {
'predicate': is_lhcone_customer,
'tag': 'LHCONE_CUST'
},
'MDVPN Customers': {
'predicate': is_mdvpn,
'tag': 'MDVPN'
},
'Infrastructure Backbone': {
'predicate': is_lag_backbone,
'tag': 'BACKBONE',
'errors': True,
'parse_func': parse_backbone_name
},
'IAS PRIVATE': {
'predicate': is_ias_private,
'tag': 'IAS_PRIVATE'
},
'IAS PUBLIC': {
'predicate': is_ias_public,
'tag': 'IAS_PUBLIC'
},
'IAS CUSTOMER': {
'predicate': is_ias_customer,
'tag': 'IAS_CUSTOMER'
},
'IAS UPSTREAM': {
'predicate': is_ias_upstream,
'tag': 'IAS_UPSTREAM'
},
'GWS PHY Upstream': {
'predicate': is_phy_upstream,
'tag': 'GWS_UPSTREAM',
'errors': True,
'parse_func': parse_phy_upstream_name
}
}
# Provision dashboards, overwriting existing ones.
datasource_name = datasource.get('name', 'PollerInfluxDB')
with ProcessPoolExecutor(max_workers=4) as executor:
for folder_name, dash in dashboards.items():
logger.info(
f'Provisioning {org["name"]}/{folder_name} dashboards')
executor.submit(provision_folder, token_request,
folder_name, dash,
excluded_interfaces, datasource_name)
aggregate_dashboards = {
'CLS PEERS': {
'predicate': is_cls_peer,
'tag': 'cls_peers',
},
'IAS PEERS': {
'predicate': is_ias_peer,
'tag': 'ias_peers',
},
'GWS UPSTREAMS': {
'predicate': is_ias_upstream,
'tag': 'gws_upstreams',
},
'LHCONE': {
'predicate': is_lhcone,
'tag': 'lhcone',
},
# 'CAE1': {
# 'predicate': is_cae1,
# 'tag': 'cae',
# }
}
with ProcessPoolExecutor(max_workers=4) as executor:
aggregate_folder = find_folder(token_request, 'Aggregates')
for agg_type, dash in aggregate_dashboards.items():
logger.info(
f'Provisioning {org["name"]}' +
f'/Aggregate {agg_type} dashboards')
executor.submit(provision_aggregate, token_request, agg_type,
aggregate_folder, dash,
excluded_interfaces, datasource_name)
# NREN Access dashboards
# uses a different template than the above.
logger.info('Provisioning NREN Access dashboards')
folder = find_folder(token_request, 'NREN Access')
nrens = filter(is_nren, excluded_interfaces)
generate_all_nrens(token_request,
nrens, folder['id'], datasource_name)
# Non-generated dashboards
excluded_dashboards = org_config.get('excluded_dashboards', [])
logger.info('Provisioning static dashboards')
for dashboard in get_dashboard_definitions():
if dashboard['title'] not in excluded_dashboards:
if dashboard['title'].lower() == 'home':
dashboard['uid'] = 'home'
create_dashboard(token_request, dashboard)
# Home dashboard is always called "Home"
# Make sure it's set for the organization
logger.info('Configuring Home dashboard')
home_dashboard = find_dashboard(token_request, 'Home')
if home_dashboard:
set_home_dashboard(token_request, home_dashboard['id'])
logger.info(f'Time to complete: {time.time() - start}')
for org_id, token in tokens:
delete_api_token(request, org_id, token)
return all_orgs