Newer
Older
import time
from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor
from brian_dashboard_manager.config import DEFAULT_ORGANIZATIONS
from brian_dashboard_manager.grafana.utils.request import \
AdminRequest, \
TokenRequest
from brian_dashboard_manager.grafana.organization import \
get_organizations, create_organization, create_api_token, \
delete_api_token, delete_expired_api_tokens, set_home_dashboard
get_dashboard_definitions, create_dashboard, find_dashboard
check_provisioned, create_datasource
from brian_dashboard_manager.grafana.folder import find_folder
from brian_dashboard_manager.inventory_provider.interfaces import \
get_interfaces
from brian_dashboard_manager.templating.nren_access import generate_nrens
from brian_dashboard_manager.templating.helpers import is_re_customer, \
is_cls_peer, is_cls, is_ias_customer, is_ias_private, is_ias_public, \
is_ias_upstream, is_ias_peer, is_lag_backbone, is_nren, is_phy_upstream, \
is_re_peer, is_gcs, is_geantopen, is_l2circuit, is_lhcone_peer, \
is_lhcone_customer, is_lhcone, is_mdvpn, get_aggregate_dashboard_data, \
get_interface_data, parse_backbone_name, parse_phy_upstream_name, \
get_dashboard_data, get_aggregate_interface_data
from brian_dashboard_manager.templating.render import render_dashboard
logger = logging.getLogger(__name__)
def generate_all_nrens(token_request, nrens, folder_id, datasource_name):
with ThreadPoolExecutor(max_workers=8) as executor:
for dashboard in generate_nrens(nrens, datasource_name):
executor.submit(create_dashboard, token_request,
dashboard, folder_id)
def provision_folder(token_request, folder_name,
dash, excluded_interfaces, datasource_name):
folder = find_folder(token_request, folder_name)
predicate = dash['predicate']
tag = dash['tag']
# dashboard will include error panel
# custom parsing function for description to dashboard name
parse_func = dash.get('parse_func')
relevant_interfaces = filter(predicate, excluded_interfaces)
data = get_interface_data(relevant_interfaces, parse_func)
dash_data = get_dashboard_data(data, datasource_name, tag, errors)
with ThreadPoolExecutor(max_workers=4) as executor:
for dashboard in dash_data:
rendered = render_dashboard(dashboard)
executor.submit(create_dashboard, token_request,
rendered, folder['id'])
def provision_aggregate(token_request, agg_type, aggregate_folder,
dash, excluded_interfaces, datasource_name):
predicate = dash['predicate']
tag = dash['tag']
relevant_interfaces = filter(predicate, excluded_interfaces)
data = get_aggregate_interface_data(relevant_interfaces, agg_type)
dashboard = get_aggregate_dashboard_data(
f'Aggregate - {agg_type}', data, datasource_name, tag)
rendered = render_dashboard(dashboard)
create_dashboard(token_request, rendered, aggregate_folder['id'])
request = AdminRequest(**config)
all_orgs = get_organizations(request)
orgs_to_provision = config.get('organizations', DEFAULT_ORGANIZATIONS)
missing = (org['name'] for org in orgs_to_provision
if org['name'] not in [org['name'] for org in all_orgs])
for org_name in missing:
org_data = create_organization(request, org_name)
all_orgs.append(org_data)
interfaces = get_interfaces(config['inventory_provider'])
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
for org in all_orgs:
org_id = org['id']
delete_expired_api_tokens(request, org_id)
token = create_api_token(request, org_id)
token_request = TokenRequest(token=token['key'], **config)
tokens.append((org_id, token['id']))
logger.info(
f'--- Provisioning org {org["name"]} (ID #{org_id}) ---')
try:
org_config = next(
o for o in orgs_to_provision if o['name'] == org['name'])
except StopIteration:
org_config = None
if not org_config:
logger.error(
f'Org {org["name"]} does not have valid configuration.')
org['info'] = 'Org exists in grafana but is not configured'
continue
# Only provision influxdb datasource for now
datasource = config.get('datasources').get('influxdb')
# Provision missing data sources
if not check_provisioned(token_request, datasource):
ds = create_datasource(token_request,
datasource,
config.get('datasources'))
if ds:
logger.info(
f'Provisioned datasource: {datasource["name"]}')
excluded_nrens = org_config.get('excluded_nrens', [])
excluded_nrens = list(map(lambda f: f.lower(), excluded_nrens))
def excluded(interface):
desc = interface.get('description', '').lower()
return not any(nren.lower() in desc for nren in excluded_nrens)
excluded_interfaces = list(filter(excluded, interfaces))
dashboards = {
'CLS': {
'predicate': is_cls,
'tag': 'CLS'
},
'RE PEER': {
'predicate': is_re_peer,
'tag': 'RE_PEER'
},
'RE CUST': {
'predicate': is_re_customer,
'tag': 'RE_CUST'
},
'GEANTOPEN': {
'predicate': is_geantopen,
'tag': 'GEANTOPEN'
},
'GCS': {
'predicate': is_gcs,
'tag': 'AUTOMATED_L2_CIRCUITS'
},
'L2 CIRCUIT': {
'predicate': is_l2circuit,
'tag': 'L2_CIRCUITS'
},
'LHCONE PEER': {
'predicate': is_lhcone_peer,
'tag': 'LHCONE_PEER'
},
'LHCONE CUST': {
'predicate': is_lhcone_customer,
'tag': 'LHCONE_CUST'
},
'MDVPN Customers': {
'predicate': is_mdvpn,
'tag': 'MDVPN'
},
'Infrastructure Backbone': {
'predicate': is_lag_backbone,
'tag': 'BACKBONE',
'errors': True,
'parse_func': parse_backbone_name
},
'IAS PRIVATE': {
'predicate': is_ias_private,
'tag': 'IAS_PRIVATE'
},
'IAS PUBLIC': {
'predicate': is_ias_public,
'tag': 'IAS_PUBLIC'
},
'IAS CUSTOMER': {
'predicate': is_ias_customer,
'tag': 'IAS_CUSTOMER'
},
'IAS UPSTREAM': {
'predicate': is_ias_upstream,
'tag': 'IAS_UPSTREAM'
},
'GWS PHY Upstream': {
'predicate': is_phy_upstream,
'tag': 'GWS_UPSTREAM',
'errors': True,
'parse_func': parse_phy_upstream_name
}
# Provision dashboards, overwriting existing ones.
datasource_name = datasource.get('name', 'PollerInfluxDB')
with ProcessPoolExecutor(max_workers=4) as executor:
for folder_name, dash in dashboards.items():
logger.info(
f'Provisioning {org["name"]}/{folder_name} dashboards')
executor.submit(provision_folder, token_request,
folder_name, dash,
excluded_interfaces, datasource_name)
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
aggregate_dashboards = {
'CLS PEERS': {
'predicate': is_cls_peer,
'tag': 'cls_peers',
},
'IAS PEERS': {
'predicate': is_ias_peer,
'tag': 'ias_peers',
},
'GWS UPSTREAMS': {
'predicate': is_ias_upstream,
'tag': 'gws_upstreams',
},
'LHCONE': {
'predicate': is_lhcone,
'tag': 'lhcone',
},
# 'CAE1': {
# 'predicate': is_cae1,
# 'tag': 'cae',
# }
}
with ProcessPoolExecutor(max_workers=4) as executor:
aggregate_folder = find_folder(token_request, 'Aggregates')
for agg_type, dash in aggregate_dashboards.items():
logger.info(
f'Provisioning {org["name"]}' +
f'/Aggregate {agg_type} dashboards')
executor.submit(provision_aggregate, token_request, agg_type,
aggregate_folder, dash,
excluded_interfaces, datasource_name)
# NREN Access dashboards
# uses a different template than the above.
logger.info('Provisioning NREN Access dashboards')
folder = find_folder(token_request, 'NREN Access')
nrens = filter(is_nren, excluded_interfaces)
generate_all_nrens(token_request,
nrens, folder['id'], datasource_name)
# Non-generated dashboards
excluded_dashboards = org_config.get('excluded_dashboards', [])
logger.info('Provisioning static dashboards')
for dashboard in get_dashboard_definitions():
if dashboard['title'] not in excluded_dashboards:
if dashboard['title'].lower() == 'home':
dashboard['uid'] = 'home'
create_dashboard(token_request, dashboard)
# Home dashboard is always called "Home"
# Make sure it's set for the organization
logger.info('Configuring Home dashboard')
home_dashboard = find_dashboard(token_request, 'Home')
if home_dashboard:
set_home_dashboard(token_request, home_dashboard['id'])
logger.info(f'Time to complete: {time.time() - start}')
for org_id, token in tokens:
delete_api_token(request, org_id, token)