Skip to content
Snippets Groups Projects
Commit f9907a4a authored by Bjarke Madsen's avatar Bjarke Madsen
Browse files

Minor refactor and add lots of documentation.

parent 0bcc98f9
No related branches found
No related tags found
No related merge requests found
Showing with 1187 additions and 285 deletions
......@@ -4,16 +4,20 @@ Grafana Dashhboard API endpoints wrapper functions.
import logging
import os
import json
from typing import Dict
from requests.models import HTTPError
from requests.exceptions import HTTPError
from brian_dashboard_manager.grafana.utils.request import TokenRequest
logger = logging.getLogger(__name__)
# Returns dictionary for each dashboard JSON definition in supplied directory
def get_dashboard_definitions(dir=None): # pragma: no cover
def get_dashboard_definitions(dir=None):
"""
Returns dictionary for each dashboard JSON definition in supplied directory
:param dir: directory to search for dashboard definitions
:return: generator of dashboard definitions
"""
dashboard_dir = dir or os.path.join(
os.path.dirname(__file__), '../dashboards/')
for (dirpath, _, filenames) in os.walk(dashboard_dir):
......@@ -24,57 +28,99 @@ def get_dashboard_definitions(dir=None): # pragma: no cover
yield dashboard
def delete_dashboard(request: TokenRequest, dashboard, folder_id=None):
def delete_dashboard(request: TokenRequest, dashboard: dict, folder_id=None):
"""
Deletes a single dashboard for the organization
the API token is registered to.
Dashboard can be specified by UID or title.
If a folder ID is not supplied, dashboard title should be globally unique.
:param request: TokenRequest object
:param dashboard: dashboard object with either a UID or title
:param folder_id: folder ID to search for dashboard in
:return: True if dashboard is considered deleted, False otherwise
"""
try:
r = None
uid = dashboard.get('uid')
if uid:
return _delete_dashboard(request, uid)
elif dashboard.get('title'):
logger.info(f'Deleting dashboard: {dashboard.get("title")}')
# if a folder ID is not supplied,
# dashboard title should be globally unique
dash = _search_dashboard(request, dashboard, folder_id)
if dash is None:
return True
_delete_dashboard(request, dash.get(
'dashboard', {}).get('uid', ''))
logger.info(f'Deleted dashboard: {dashboard.get("title")}')
return r is not None
uid = dash.get('dashboard', {}).get('uid', '')
if uid:
return _delete_dashboard(request, uid)
else:
return True
return False
except HTTPError:
dump = json.dumps(dashboard, indent=2)
except HTTPError as e:
if e.response is not None and e.response.status_code == 404:
return True
title = dashboard.get('title')
logger.exception(
f'Error when deleting dashboard:\n{dump}')
return None
f'Error when deleting dashboard: {title or ""}')
return False
# Deletes a single dashboard for the organization
# the API token is registered to.
def _delete_dashboard(request: TokenRequest, uid: int):
"""
Deletes a single dashboard for the organization
the API token is registered to.
:param request: TokenRequest object
:param uid: dashboard UID
:return: True if dashboard is considered deleted, False otherwise
"""
try:
r = request.delete(f'api/dashboards/uid/{uid}')
if r and 'deleted' in r.get('message', ''):
resp = r.json()
if resp and 'deleted' in resp.get('message', ''):
return True
except HTTPError as e:
if e.response is not None and e.response.status_code == 404:
return True
logger.exception(f'Error when deleting dashboard with UID #{uid}')
return False
raise e
return False
# Deletes all dashboards for the organization
# the API token is registered to.
def delete_dashboards(request: TokenRequest):
"""
Deletes all dashboards for the organization
the API token is registered to.
:param request: TokenRequest object
:return: True if all dashboards are considered deleted, False otherwise
"""
r = request.get('api/search')
if r and len(r) > 0:
for dash in r:
_delete_dashboard(request, dash['uid'])
dashboards = r.json()
if dashboards and len(dashboards) > 0:
for dash in dashboards:
try:
_delete_dashboard(request, dash['uid'])
except HTTPError:
logger.exception(
f'Error when deleting dashboard with UID #{dash["uid"]}')
return True
# Searches for a dashboard with given title
def find_dashboard(request: TokenRequest, title=None, folder_id=None):
def list_dashboards(request: TokenRequest, title=None, folder_id=None):
"""
Searches for dashboard(s) with given title.
If no title is provided, all dashboards are returned,
filtered by folder ID if provided.
:param request: TokenRequest object
:param title: optional dashboard title to search for
:param folder_id: optional folder ID to search for dashboards in
:return: list of dashboards matching the search criteria
"""
param = {
**({'query': title} if title else {}),
'type': 'dash-db',
......@@ -84,37 +130,40 @@ def find_dashboard(request: TokenRequest, title=None, folder_id=None):
if folder_id is not None:
param['folderIds'] = folder_id
r = request.get('api/search', params=param)
if r and len(r) > 0:
if title:
return r[0]
dashboards = []
while True:
r = request.get('api/search', params=param)
page = r.json()
if page:
dashboards.extend(page)
if len(page) < param['limit']:
break
param['page'] += 1
else:
while True:
param['page'] += 1
page = request.get('api/search', params=param)
if len(page) > 0:
r.extend(page)
else:
break
return r
break
return None
return dashboards
# Searches Grafana for a dashboard
# matching the title of the provided dashboard.
def _search_dashboard(request: TokenRequest, dashboard: Dict, folder_id=None):
def _search_dashboard(request: TokenRequest, dashboard: dict, folder_id=None):
"""
Searches Grafana for a dashboard with given title from the supplied dict.
Primarily used to get the provisioned dashboard definition if it exists
:param request: TokenRequest object
:param dashboard: dashboard dictionary with a title
:param folder_id: optional folder ID to search for dashboards in
:return: dashboard definition if found, None otherwise
"""
try:
params = {
'query': dashboard["title"]
}
if folder_id is not None:
params['folderIds'] = folder_id
r = request.get('api/search', params=params)
if r and isinstance(r, list):
if len(r) >= 1:
for dash in r:
title = dashboard['title']
dashboards = list_dashboards(request, title, folder_id)
if dashboards and isinstance(dashboards, list):
if len(dashboards) >= 1:
for dash in dashboards:
if dash['title'] == dashboard['title']:
definition = _get_dashboard(request, dash['uid'])
return definition
......@@ -123,19 +172,32 @@ def _search_dashboard(request: TokenRequest, dashboard: Dict, folder_id=None):
return None
# Fetches dashboard with given UID for the token's organization.
def _get_dashboard(request: TokenRequest, uid: int):
def _get_dashboard(request: TokenRequest, uid):
"""
Fetches the dashboard with supplied UID for the token's organization.
:param request: TokenRequest object
:param uid: dashboard UID
:return: dashboard definition if found, None otherwise
"""
try:
r = request.get(f'api/dashboards/uid/{uid}')
except HTTPError:
return None
return r
return r.json()
def create_dashboard(request: TokenRequest, dashboard: dict, folder_id=None):
"""
Creates the given dashboard for the organization tied to the token.
If the dashboard already exists, it will be updated.
# Creates or updates (if exists) given dashboard for the token's organization.
# supplied dashboards are JSON blobs exported from GUI with a UID.
def create_dashboard(request: TokenRequest, dashboard: Dict, folder_id=None):
:param request: TokenRequest object
:param dashboard: dashboard dictionary
:param folder_id: optional folder ID to search for the dashboard in
:return: dashboard definition if dashboard was created, None otherwise
"""
title = dashboard['title']
existing_dashboard = None
......@@ -170,10 +232,8 @@ def create_dashboard(request: TokenRequest, dashboard: Dict, folder_id=None):
payload['folderId'] = folder_id
try:
# action = "Updating" if existing_dashboard else "Creating"
# logger.info(f'{action} dashboard: {title}')
r = request.post('api/dashboards/db', json=payload)
return r
return r.json()
except HTTPError:
logger.exception(f'Error when provisioning dashboard {title}')
return None
import logging
import os
import json
from typing import Dict
from requests.exceptions import HTTPError
from brian_dashboard_manager.grafana.utils.request import Request, TokenRequest
......@@ -10,7 +9,18 @@ from brian_dashboard_manager.grafana.utils.request import Request, TokenRequest
logger = logging.getLogger(__name__)
def _datasource_provisioned(datasource_to_check, provisioned_datasources):
def _datasource_exists(datasource_to_check, provisioned_datasources):
"""
Checks if a datasource exists in the list of provisioned datasources
A datasource exists iff all the config on the provisioned version
is the same as the local datasource (identified by its name)
:param datasource_to_check: datasource to check
:param provisioned_datasources: list of provisioned datasources
:return: True if datasource exists, False otherwise
"""
if len(datasource_to_check.keys()) == 0:
return True
for datasource in provisioned_datasources:
......@@ -22,55 +32,84 @@ def _datasource_provisioned(datasource_to_check, provisioned_datasources):
def get_missing_datasource_definitions(request: Request, dir=None):
"""
Returns a list of datasource definitions that are not yet provisioned
:param request: Request session to use
:param dir: directory to search for datasource definitions
:return: generator of datasource definitions
"""
datasource_dir = dir or os.path.join(
os.path.dirname(__file__), '../datasources/')
existing_datasources = get_datasources(request)
def check_ds_not_provisioned(filename):
datasource = json.load(open(filename, 'r'))
if not _datasource_provisioned(datasource, existing_datasources):
return datasource
for (dirpath, _, filenames) in os.walk(datasource_dir): # pragma: no cover
for (dirpath, _, filenames) in os.walk(datasource_dir):
for file in filenames:
if not file.endswith('.json'):
continue
filename = os.path.join(dirpath, file)
yield check_ds_not_provisioned(filename)
datasource = json.load(open(filename, 'r'))
if not _datasource_exists(datasource, existing_datasources):
yield datasource
def check_provisioned(request: TokenRequest, datasource):
def datasource_exists(request: TokenRequest, datasource):
"""
Checks if a datasource exists in the organization
the API token is registered to.
A datasource exists iff all the config on the provisioned version
is the same as the local datasource (identified by its name)
:param request: TokenRequest object
:param datasource: datasource to check
:return: True if datasource exists, False otherwise
"""
existing = get_datasources(request)
exists = _datasource_provisioned(datasource, existing)
exists = _datasource_exists(datasource, existing)
name = datasource.get('name')
if not exists and any([ds['name'] == name for ds in existing]):
# delete datasource
duplicate_exists = any([ds['name'] == name for ds in existing])
if not exists and duplicate_exists:
delete_datasource(request, name)
return False
return exists
def get_datasources(request: Request):
return request.get('api/datasources')
"""
Returns list of all datasources
:param request: Request session to use
:return: list of datasources
"""
return request.get('api/datasources').json()
def create_datasource(request: TokenRequest, datasource: dict):
"""
Creates a datasource for the organization
the API token is registered to.
def create_datasource(request: TokenRequest, datasource: Dict, datasources):
:param request: TokenRequest object
:param datasource: datasource to create
:return: datasource definition
"""
try:
ds_type = datasource["type"]
# find out which params
# we need to configure for this datasource type
config = datasources.get(ds_type, None)
if config is None:
logger.exception(
f'No datasource config could be found for {ds_type}')
return None
datasource.update(config)
r = request.post('api/datasources', json=datasource)
logger.info(f'Provisioned datasource: {datasource["name"]}')
except HTTPError:
logger.exception('Error when provisioning datasource')
return None
return r
return r.json()
def delete_datasource(request: TokenRequest, name: str):
return request.delete(f'api/datasources/name/{name}')
"""
Deletes a datasource for the organization
the API token is registered to.
:param request: TokenRequest object
:param name: name of datasource to delete
"""
return request.delete(f'api/datasources/name/{name}').json()
......@@ -7,19 +7,39 @@ logger = logging.getLogger(__name__)
def delete_folder(request: TokenRequest, title=None, uid=None):
"""
Deletes a single folder for the organization
the API token is registered to.
Folder can be specified by UID or title.
:param request: TokenRequest object
:param title: folder title
:param uid: folder UID
:return: True if folder is considered deleted, False otherwise
"""
if uid:
r = request.delete(f'api/folders/{uid}')
r = request.delete(f'api/folders/{uid}').json()
return r is not None
else:
folder = find_folder(request, title, False)
if folder is None:
return True
r = request.delete(f'api/folders/{folder.get("uid")}')
r = request.delete(f'api/folders/{folder.get("uid")}').json()
logger.info(f'Deleted folder: {title}')
return r is not None
def find_folder(request: TokenRequest, title, create=True):
"""
Finds a folder by title. If create is True, creates the folder if it does
not exist.
:param request: TokenRequest object
:param title: folder title
:param create: create folder if it does not exist
:return: folder definition
"""
folders = get_folders(request)
try:
folder = next(
......@@ -34,14 +54,46 @@ def find_folder(request: TokenRequest, title, create=True):
def get_folders(request: TokenRequest):
return request.get('api/folders')
"""
Returns all folders for the organization
the API token is registered to.
:param request: TokenRequest object
:return: list of folder definitions
"""
return request.get('api/folders').json()
def create_folder(request: TokenRequest, title):
"""
Creates a folder for the organization
the API token is registered to.
:param request: TokenRequest object
:param title: folder title
:return: folder definition
"""
try:
data = {'title': title, 'uid': title.replace(' ', '_')}
r = request.post('api/folders', json=data)
except HTTPError:
logger.exception(f'Error when creating folder {title}')
return None
return r
return r.json()
def delete_unknown_folders(token, folders_to_keep: set):
"""
Deletes all folders that are not in the folders_to_keep list.
:param token: TokenRequest object
:param folders_to_keep: set of folder titles to keep
"""
all_folders = get_folders(token)
for folder in all_folders:
if folder['title'] in folders_to_keep:
continue
logger.info(f'Deleting unknown folder: {folder.get("title")}')
delete_folder(token, uid=folder['uid'])
......@@ -18,22 +18,45 @@ logger = logging.getLogger(__name__)
def switch_active_organization(request: AdminRequest, org_id: int):
"""
Switches the active organization for the current session.
:param request: AdminRequest object
:param org_id: organization ID
:return: response JSON
"""
assert org_id
logger.debug(f'Switched {str(request)} active organization to #{org_id}')
return request.post(f'api/user/using/{org_id}', {})
return request.post(f'api/user/using/{org_id}', {}).json()
def get_organizations(request: AdminRequest) -> List[Dict]:
"""
Returns all organizations.
:param request: AdminRequest object
:return: list of organization definitions
"""
def get_organizations(request: AdminRequest) -> List:
return request.get('api/orgs')
return request.get('api/orgs').json()
def create_organization(request: AdminRequest, name: str) -> Union[Dict, None]:
"""
Creates a new organization with the given name.
:param request: AdminRequest object
:param name: organization name
:return: organization definition or None if unsuccessful
"""
assert name
result = request.post('api/orgs', json={
'name': name
})
}).json()
if result.get('message', '').lower() == 'organization created':
id = result.get('orgId')
......@@ -43,14 +66,16 @@ def create_organization(request: AdminRequest, name: str) -> Union[Dict, None]:
return None
def delete_organization(request: AdminRequest, id: int) -> bool:
result = request.delete(f'api/orgs/{id}')
return result.get('message', '').lower() == 'organization deleted'
def create_api_token(request: AdminRequest, org_id: int, key_data=None):
"""
Creates a new API token for the given organization.
:param request: AdminRequest object
:param org_id: organization ID
:param key_data: additional key data
:return: API token definition
"""
def create_api_token(request: AdminRequest, org_id: int, key_data=None):
characters = string.ascii_uppercase + string.digits
name = ''.join(random.choices(characters, k=16))
data = {
......@@ -62,7 +87,7 @@ def create_api_token(request: AdminRequest, org_id: int, key_data=None):
data.update(key_data)
switch_active_organization(request, org_id)
result = request.post('api/auth/keys', json=data)
result = request.post('api/auth/keys', json=data).json()
token_id = result.get('id')
logger.debug(f'Created API token #{token_id} for organization #{org_id}')
......@@ -71,6 +96,15 @@ def create_api_token(request: AdminRequest, org_id: int, key_data=None):
def delete_api_token(request: AdminRequest, token_id: int, org_id=None):
"""
Deletes an API token.
:param request: AdminRequest object
:param token_id: API token ID
:param org_id: organization ID
:return: delete response
"""
assert token_id
if org_id:
switch_active_organization(request, org_id)
......@@ -80,8 +114,15 @@ def delete_api_token(request: AdminRequest, token_id: int, org_id=None):
def delete_expired_api_tokens(request: AdminRequest) -> bool:
"""
Deletes all expired API tokens.
tokens = request.get('api/auth/keys', params={'includeExpired': True})
:param request: AdminRequest object
:return: True if successful
"""
tokens = request.get(
'api/auth/keys', params={'includeExpired': True}).json()
now = datetime.utcnow()
......@@ -97,6 +138,15 @@ def delete_expired_api_tokens(request: AdminRequest) -> bool:
def set_home_dashboard(request: TokenRequest, is_staff):
"""
Sets the home dashboard for the organization
the API token is registered to.
:param request: TokenRequest object
:param is_staff: True if the organization is the staff organization
:return: True if successful
"""
file = os.path.abspath(os.path.join(
os.path.dirname(__file__),
'..',
......@@ -111,5 +161,5 @@ def set_home_dashboard(request: TokenRequest, is_staff):
dashboard = create_dashboard(request, rendered)
r = request.put('api/org/preferences', json={
'homeDashboardId': dashboard.get('id')
})
}).json()
return r and r.get('message') == 'Preferences updated'
This diff is collapsed.
from enum import Enum, auto
import requests
import logging
import jsonschema
from functools import reduce
logger = logging.getLogger(__name__)
def _get_ip_info(host): # pragma: no cover
class INTERFACE_TYPES(Enum):
UNKNOWN = auto()
LOGICAL = auto()
PHYSICAL = auto()
AGGREGATE = auto()
class BRIAN_DASHBOARDS(Enum):
CLS = auto()
RE_PEER = auto()
RE_CUST = auto()
GEANTOPEN = auto()
GCS = auto()
L2_CIRCUIT = auto()
LHCONE_PEER = auto()
LHCONE_CUST = auto()
MDVPN_CUSTOMERS = auto()
INFRASTRUCTURE_BACKBONE = auto()
IAS_PRIVATE = auto()
IAS_PUBLIC = auto()
IAS_CUSTOMER = auto()
IAS_UPSTREAM = auto()
GWS_PHY_UPSTREAM = auto()
GBS_10G = auto()
# aggregate dashboards
CLS_PEERS = auto()
IAS_PEERS = auto()
GWS_UPSTREAMS = auto()
LHCONE = auto()
CAE1 = auto()
COPERNICUS = auto()
# NREN customer
NREN = auto()
class PORT_TYPES(Enum):
ACCESS = auto()
SERVICE = auto()
UNKNOWN = auto()
# only used in INTERFACE_LIST_SCHEMA and sphinx docs
_DASHBOARD_IDS = [d.name for d in list(BRIAN_DASHBOARDS)]
_PORT_TYPES = [t.name for t in list(PORT_TYPES)]
_INTERFACE_TYPES = [i.name for i in list(INTERFACE_TYPES)]
ROUTER_INTERFACES_SCHEMA = {
"$schema": "https://json-schema.org/draft-07/schema#",
"type": "array",
"items": {
"type": "object",
"properties": {
"name": {"type": "string"},
"description": {"type": "string"},
"router": {"type": "string"},
"bundle": {
"type": "array",
"items": {"type": "string"}
},
"ipv4": {
"type": "array",
"items": {"type": "string"}
},
"ipv6": {
"type": "array",
"items": {"type": "string"}
},
"logical-system": {"type": "string"},
},
"required": ["name", "router", "ipv4", "ipv6"]
}
}
INTERFACE_LIST_SCHEMA = {
'$schema': 'https://json-schema.org/draft-07/schema#',
'definitions': {
'service': {
'type': 'object',
'properties': {
'id': {'type': 'integer'},
'name': {'type': 'string'},
'type': {'type': 'string'},
'status': {'type': 'string'},
},
'required': ['id', 'name', 'type', 'status']
},
'db_info': {
'type': 'object',
'properties': {
'name': {'type': 'string'},
'interface_type': {'enum': _INTERFACE_TYPES}
},
'required': ['name', 'interface_type']
},
'interface': {
'type': 'object',
'properties': {
'router': {'type': 'string'},
'name': {'type': 'string'},
'description': {'type': 'string'},
'dashboards': {
'type': 'array',
'items': {'enum': _DASHBOARD_IDS}
},
'dashboards_info': {
'type': 'array',
'items': {'$ref': '#/definitions/db_info'}
},
'port_type': {'enum': _PORT_TYPES}
},
'required': [
'router', 'name', 'description',
'dashboards']
},
},
'type': 'array',
'items': {'$ref': '#/definitions/interface'}
}
GWS_DIRECT_DATA_SCHEMA = {
'$schema': 'https://json-schema.org/draft-07/schema#',
'definitions': {
'oid': {
'type': 'string',
'pattern': r'^(\d+\.)*\d+$'
},
'snmp-v2': {
'type': 'object',
'properties': {
'community': {'type': 'string'}
},
'required': ['community']
},
'snmp-v3-cred': {
'type': 'object',
'properties': {
'protocol': {'enum': ['MD5', 'DES']},
'password': {'type': 'string'}
},
'required': ['protocol', 'password']
},
'snmp-v3': {
'type': 'object',
'properties': {
'sec-name': {'type': 'string'},
'auth': {'$ref': '#/definitions/snmp-v3-cred'},
'priv': {'$ref': '#/definitions/snmp-v3-cred'}
},
'required': ['sec-name']
},
'counter': {
'type': 'object',
'properties': {
'field': {
'enum': [
'discards_in',
'discards_out',
'errors_in',
'errors_out',
'traffic_in',
'traffic_out'
]
},
'oid': {'$ref': '#/definitions/oid'},
'snmp': {
'oneOf': [
{'$ref': '#/definitions/snmp-v2'},
{'$ref': '#/definitions/snmp-v3'}
]
}
},
'required': ['field', 'oid']
},
'interface-counters': {
'type': 'object',
'properties': {
'nren': {'type': 'string'},
'isp': {'type': 'string'},
'hostname': {'type': 'string'},
'tag': {'type': 'string'},
'counters': {
'type': 'array',
'items': {'$ref': '#/definitions/counter'},
'minItems': 1
},
'info': {'type': 'string'}
},
'required': ['nren', 'isp', 'hostname', 'tag', 'counters']
}
},
'type': 'array',
'items': {'$ref': '#/definitions/interface-counters'}
}
MULTICAST_SUBSCRIPTION_LIST_SCHEMA = {
'$schema': 'https://json-schema.org/draft-07/schema#',
'definitions': {
'ipv4-address': {
'type': 'string',
'pattern': r'^(\d+\.){3}\d+$'
},
'subscription': {
'type': 'object',
'properties': {
'router': {'type': 'string'},
'subscription': {'$ref': '#/definitions/ipv4-address'},
'endpoint': {'$ref': '#/definitions/ipv4-address'},
'oid': {
'type': 'string',
'pattern': r'^(\d+\.)*\d+$'
},
'community': {'type': 'string'}
},
'required': [
'router', 'subscription', 'endpoint', 'oid', 'community']
},
},
'type': 'array',
'items': {'$ref': '#/definitions/subscription'}
}
def _get_ip_info(host):
"""
Get IP information for all interfaces on all routers.
:param host: Hostname to perform the request to.
:return: A lookup table of the form:
{
'router1': {
'interface1': {
'ipv4': [
'62.40.109.193/30'
],
'ipv6': [
'2001:798:cc:1::4a/126'
]
},
'interface2': {
'ipv4': [
'62.40.109.193/30'
],
'ipv6': [
'2001:798:cc:1::4a/126'
]
}
},
'router2': {
'interface1': {
'ipv4': [
'62.40.109.193/30'
],
'ipv6': [
'2001:798:cc:1::4a/126'
]
},
}
}
"""
def reduce_func(prev, curr):
"""
Reduce function to build the lookup table.
:param prev: The accumulator. The lookup table.
:param curr: The current interface.
:return: The updated lookup table.
"""
interface_name = curr.get('name')
router_name = curr.get('router')
......@@ -24,13 +303,23 @@ def _get_ip_info(host): # pragma: no cover
r = requests.get(f'{host}/data/interfaces')
r.raise_for_status()
interfaces = r.json()
jsonschema.validate(interfaces, ROUTER_INTERFACES_SCHEMA)
return reduce(reduce_func, interfaces, {})
def get_interfaces(host): # pragma: no cover
def get_interfaces(host):
"""
Get all interfaces that have dashboards assigned to them.
:param host: Hostname to perform the request to.
:return: A list of interfaces with IP information added, if present.
"""
r = requests.get(f'{host}/poller/interfaces')
r.raise_for_status()
interfaces = r.json()
jsonschema.validate(interfaces, INTERFACE_LIST_SCHEMA)
ip_info = _get_ip_info(host)
def enrich(interface):
......@@ -53,13 +342,29 @@ def get_interfaces(host): # pragma: no cover
def get_gws_direct(host):
"""
Get all GWS Direct data.
Follows the schema defined in GWS_DIRECT_DATA_SCHEMA.
:param host: Hostname to perform the request to.
:return: GWS direct data
"""
r = requests.get(f'{host}/poller/gws/direct')
r.raise_for_status()
interfaces = r.json()
jsonschema.validate(interfaces, GWS_DIRECT_DATA_SCHEMA)
return interfaces
def get_gws_indirect(host):
"""
Get all GWS Indirect data.
:param host: Hostname to perform the request to.
:return: GWS Indirect data
"""
r = requests.get(f'{host}/poller/gws/indirect')
r.raise_for_status()
interfaces = r.json()
......@@ -67,6 +372,15 @@ def get_gws_indirect(host):
def get_eumetsat_multicast_subscriptions(host):
"""
Get all EUMETSAT multicast subscriptions.
:param host: Hostname to perform the request to.
:return: EUMETSAT multicast subscriptions
"""
r = requests.get(f'{host}/poller/eumetsat-multicast')
r.raise_for_status()
return r.json()
data = r.json()
jsonschema.validate(data, MULTICAST_SUBSCRIPTION_LIST_SCHEMA)
return data
......@@ -13,7 +13,7 @@ from brian_dashboard_manager.config import STATE_PATH
routes = Blueprint("update", __name__)
UPDATE_RESPONSE_SCHEMA = {
'$schema': 'http://json-schema.org/draft-07/schema#',
'$schema': 'https://json-schema.org/draft-07/schema#',
'type': 'object',
'properties': {
'message': {
......@@ -29,6 +29,16 @@ def after_request(resp):
def should_provision():
"""
Check if we should provision by checking the state file.
Multiple workers can call this function at the same time,
so we need to make sure we don't provision twice while
the first provisioning is still running.
:return: tuple of (bool, datetime) representing if we can provision
and the timestamp of the last provisioning, respectively.
"""
try:
with open(STATE_PATH, 'r+') as f:
try:
......
......@@ -4,6 +4,13 @@ from brian_dashboard_manager.templating.helpers \
def get_panel_data(all_subscriptions):
"""
Helper for generating multicast panel data from subscriptions
which are duplicated across all routers
:param all_subscriptions: list of subscriptions
:return: dict of dashboard name to list of panels.
"""
result = dict()
......@@ -31,6 +38,11 @@ def get_panel_data(all_subscriptions):
def get_panel_fields(panel, panel_type, datasource):
"""
Helper for generating a single multicast panel
:param panel: panel data
:param panel_type: type of panel (traffic, errors, etc.)
:param datasource: datasource to use
:return: panel data
"""
letters = letter_generator()
......@@ -60,6 +72,9 @@ def get_panel_fields(panel, panel_type, datasource):
def subscription_panel_generator(gridPos):
"""
Generates panels used for multicast traffic dashboards
:param gridPos: generator of grid positions
:return: function that generates panels
"""
def get_panel_definitions(panels, datasource, errors=False):
result = []
......@@ -86,6 +101,14 @@ def subscription_panel_generator(gridPos):
def generate_eumetsat_multicast(subscriptions, datasource):
"""
Generates EUMETSAT multicast dashboards
:param subscriptions: list of subscriptions
:param datasource: datasource to use
:return: generator of dashboards
"""
panel_data = get_panel_data(subscriptions)
for dash in get_dashboard_data(
data=panel_data,
......
......@@ -3,6 +3,13 @@ from brian_dashboard_manager.templating.helpers import get_dashboard_data
def get_panel_data(interfaces):
"""
Helper for generating GWS panel data
:param interfaces: list of interfaces
:return: dict of dashboard name to list of data used for generating panels.
"""
result: Dict[str, List[Dict]] = {}
count = {}
......@@ -54,6 +61,13 @@ def get_panel_data(interfaces):
def get_gws_indirect_panel_data(interfaces):
"""
Helper for generating GWS indirect panel data
:param interfaces: list of interfaces
:return: dict of dashboard name to list of data used for generating panels.
"""
result: Dict[str, List[Dict]] = {}
for interface in interfaces:
......@@ -76,6 +90,13 @@ def get_gws_indirect_panel_data(interfaces):
def generate_gws(gws_data, datasource):
"""
Generates GWS Direct dashboards
:param gws_data: data from GWS Direct API
:param datasource: datasource to use
:return: generator of GWS Direct dashboards
"""
panel_data = get_panel_data(gws_data)
for dash in get_dashboard_data(
......@@ -86,6 +107,13 @@ def generate_gws(gws_data, datasource):
def generate_indirect(gws_data, datasource):
"""
Generates GWS Indirect dashboards
:param gws_data: data from GWS Indirect API
:param datasource: datasource to use
:return: generator of GWS Indirect dashboards
"""
panel_data = get_gws_indirect_panel_data(gws_data)
for dash in get_dashboard_data(
data=panel_data,
......
......@@ -21,6 +21,13 @@ logger = logging.getLogger(__file__)
def num_generator(start=1):
"""
Generator for numbers starting from the value of `start`
:param start: number to start at
:return: generator of numbers
"""
num = start
while True:
yield num
......@@ -28,6 +35,17 @@ def num_generator(start=1):
def gridPos_generator(id_generator, start=0, agg=False):
"""
Generator of gridPos objects used in Grafana dashboards to position panels.
:param id_generator: generator of panel ids
:param start: panel number to start from
:param agg: whether to generate a panel for the aggregate dashboards,
which has two panels per row
:return: generator of gridPos objects
"""
num = start
while True:
yield {
......@@ -49,6 +67,11 @@ def gridPos_generator(id_generator, start=0, agg=False):
def letter_generator():
"""
Generator for letters used to generate refIds for panel targets.
:return: generator of strings
"""
i = 0
j = 0
num_letters = len(ascii_uppercase)
......@@ -132,11 +155,19 @@ def get_nren_interface_data_old(interfaces):
def get_nren_interface_data(services, interfaces, excluded_dashboards):
"""
Helper for grouping interfaces into groups of NRENs
Helper for grouping interface data to be used for generating
dashboards for NRENs.
Extracts information from interfaces to be used in panels.
NREN dashboards have aggregate panels at the top and
dropdowns for services / physical interfaces.
:param services: list of services
:param interfaces: list of interfaces
:param excluded_dashboards: list of dashboards to exclude for
the organization we are generating dashboards for
:return: dictionary of dashboards and their service/interface data
"""
result = {}
customers = defaultdict(list)
......@@ -171,24 +202,24 @@ def get_nren_interface_data(services, interfaces, excluded_dashboards):
lag_service = 'GA-' in sid and service_type == 'ETHERNET'
if len(_interfaces) == 0:
continue
if 'interface' in _interfaces[0]:
if_name = _interfaces[0].get('interface')
router = _interfaces[0].get('hostname')
else:
if_name = _interfaces[0].get('port')
router = _interfaces[0].get('equipment')
router = router.replace('.geant.net', '')
title = f'{router} - {if_name} - {name} ({sid})'
if lag_service:
if len(_interfaces) == 0:
continue
if len(_interfaces) > 1:
logger.info(
f'{sid} {name} aggregate service has > 1 interface')
continue
if 'interface' in _interfaces[0]:
if_name = _interfaces[0].get('interface')
router = _interfaces[0].get('hostname')
else:
if_name = _interfaces[0].get('port')
router = _interfaces[0].get('equipment')
router = router.replace('.geant.net', '')
location = router.split('.')[1].upper()
title = f'{location} - {customer} ({if_name}) | {name}'
aggregate_interfaces[f'{router}:::{if_name}'] = True
dashboard['AGGREGATES'].append({
'measurement': measurement,
......@@ -200,11 +231,11 @@ def get_nren_interface_data(services, interfaces, excluded_dashboards):
# MDVPN type services don't have data in BRIAN
continue
title = f'{name} ({sid})'
dashboard['SERVICES'].append({
'measurement': measurement,
'title': title,
'scid': scid
'scid': scid,
'sort': (sid[:2], name)
})
def _check_in_aggregate(router, interface):
......@@ -218,7 +249,6 @@ def get_nren_interface_data(services, interfaces, excluded_dashboards):
port_type = interface.get('port_type', 'unknown').lower()
router = host.replace('.geant.net', '')
location = host.split('.')[1].upper()
panel_title = f"{router} - {{}} - {interface_name} - {description}"
dashboards_info = interface['dashboards_info']
......@@ -238,7 +268,7 @@ def get_nren_interface_data(services, interfaces, excluded_dashboards):
'interface': interface_name,
'hostname': host,
'alias':
f"{location} - {dashboard_name} ({interface_name})"
f"{router} - {interface_name} - {dashboard_name} "
})
if info['interface_type'] == 'AGGREGATE':
......@@ -268,9 +298,16 @@ def get_nren_interface_data(services, interfaces, excluded_dashboards):
def get_interface_data(interfaces):
"""
Helper for grouping interfaces into dashboards.
Helper for grouping interface data to be used for generating
various dashboards
Extracts information from interfaces to be used in panels.
:param interfaces: list of interfaces
:return: dictionary of dashboards and their interface data
"""
result = {}
for interface in interfaces:
......@@ -299,16 +336,23 @@ def get_interface_data(interfaces):
return result
def get_aggregate_interface_data(interfaces, agg_type, group_field):
def get_aggregate_interface_data(interfaces, agg_name, group_field):
"""
Helper for grouping interfaces into groups by fields, eg. remotes
(ISP/NREN/...) used for aggregate dashboards
Extracts information from interfaces to be used in panels.
Helper for grouping interface data to be used for generating
aggregate dashboards.
Aggregate dashboards have panels with multiple targets (timeseries)
that are grouped by a field (`group_field`). This function
groups the interfaces by the `group_field` and returns a dictionary
of aggregate dashboards and their interface data.
One of the panels is a special panel that has all the targets
in a single panel, as an aggregate of all data for that dashboard.
Aggregate dashboards have aggregates at the top for all groups
as well as aggregate panels for specific groups.
This builds a dict with interfaces for each group
and one with all interfaces.
:param interfaces: list of interfaces
:param agg_name: name of the aggregate dashboard
:param group_field: field to group the interfaces by
:return: dictionary of aggregate dashboards and their interface data
"""
result = []
......@@ -328,27 +372,37 @@ def get_aggregate_interface_data(interfaces, agg_type, group_field):
interface_name = interface.get('name')
host = interface.get('router', '')
router = host.replace('.geant.net', '')
for info in interface['dashboards_info']:
remote = info['name']
location = host.split('.')[1].upper()
result.append({
'type': agg_type,
'type': agg_name,
'interface': interface_name,
'hostname': host,
'remote': remote,
'location': location,
'alias': f"{location} - {remote} ({interface_name})",
'alias': f"{router} - {remote} - {interface_name}",
})
return reduce(get_reduce_func_for_field(group_field), result, {})
def get_aggregate_targets(targets):
"""
Helper used for generating panel fields for aggregate panels
with multiple target fields (ingress/egress)
Helper for generating targets for aggregate panels.
Aggregate panels have multiple targets (timeseries) that are
grouped by a field (`group_field`).
This function generates the targets for the aggregate panel.
:param targets: list of targets
:return: tuple of ingress and egress targets for the ingress and egress
aggregate panels respectively
"""
ingress = []
egress = []
......@@ -379,9 +433,17 @@ def get_aggregate_targets(targets):
def get_panel_fields(panel, panel_type, datasource):
"""
Helper for generating a single panel,
with ingress/egress and percentile targets
Helper for generating panels.
Generates the fields for the panel based on the panel type.
:param panel: panel data
:param panel_type: type of panel (traffic, errors, etc.)
:param datasource: datasource to use for the panel
:return: generated panel definition from the panel data and panel type
"""
letters = letter_generator()
def get_target_data(alias, field):
......@@ -426,13 +488,30 @@ def get_panel_fields(panel, panel_type, datasource):
def default_interface_panel_generator(gridPos):
"""
Shared wrapper for shorter calls without
gridPos to generate panels.
Helper for generating panel definitions for dashboards.
Generates the panel definitions for the dashboard based on the
panel data and panel type.
Generates panels used in a normal dashboard
for all traffic + (conditionally) IPv6 + Errors
:param gridPos: generator for grid positions
:return: function that generates panel definitions
"""
def get_panel_definitions(panels, datasource, errors=False):
"""
Generates the panel definitions for the dashboard based on the
panel data for the panel types (traffic, errors, IPv6).
IPv6 and errors are optional / determined by the presence of the
`has_v6` field in the panel data, and the `errors` parameter.
:param panels: panel data
:param datasource: datasource to use for the panel
:param errors: whether or not to include an error panel
:return: list of panel definitions
"""
result = []
for panel in panels:
......@@ -457,6 +536,20 @@ def default_interface_panel_generator(gridPos):
def get_nren_dashboard_data_single(data, datasource, tag):
"""
Helper for generating dashboard definitions for a single NREN.
NREN dashboards have two aggregate panels (ingress and egress),
and two dropdown panels for services and interfaces.
:param data: data for the dashboard, including the NREN name and
the panel data
:param datasource: datasource to use for the panels
:param tag: tag to use for the dashboard, used for dashboard dropdowns on
the home dashboard.
:return: dashboard definition for the NREN dashboard
"""
nren, dash = data
id_gen = num_generator()
......@@ -476,7 +569,15 @@ def get_nren_dashboard_data_single(data, datasource, tag):
panel_gen = default_interface_panel_generator(gridPos)
services_dropdown = create_dropdown_panel('Services', **next(gridPos))
service_panels = panel_gen(dash['SERVICES'], datasource)
def sort_key(panel):
sort = panel.get('sort')
if not sort:
return 'ZZZ'+panel.get('hostname') # sort to end
return sort
service_panels = panel_gen(
sorted(dash['SERVICES'], key=sort_key), datasource)
iface_dropdown = create_dropdown_panel('Interfaces', **next(gridPos))
phys_panels = panel_gen(dash['PHYSICAL'], datasource, True)
......@@ -505,8 +606,15 @@ def get_nren_dashboard_data_single(data, datasource, tag):
def get_nren_dashboard_data(data, datasource, tag):
"""
Generates all panels used in a NREN dashboard,
including dropdowns and aggregate panels.
Helper for generating dashboard definitions for all NRENs.
Uses multiprocessing to speed up generation.
:param data: the NREN names and the panel data for each NREN
:param datasource: datasource to use for the panels
:param tag: tag to use for the dashboard, used for dashboard dropdowns on
the home dashboard.
:return: generator for dashboard definitions for each NREN
"""
with ProcessPoolExecutor(max_workers=NUM_PROCESSES) as executor:
......@@ -526,8 +634,19 @@ def get_dashboard_data_single(
panel_generator=default_interface_panel_generator,
errors=False):
"""
Generates all panels used in a normal dashboard without aggregate panels
Helper for generating dashboard definitions for non-NREN dashboards.
:param data: data for the dashboard, including the dashboard name and
the panel data
:param datasource: datasource to use for the panels
:param tag: tag to use for the dashboard, used for dashboard dropdowns on
the home dashboard.
:param panel_generator: function for generating panel definitions
:param errors: whether or not to include an error panel for each interface
:return: dashboard definition for the NREN dashboard
"""
id_gen = num_generator()
gridPos = gridPos_generator(id_gen)
panel_gen = panel_generator(gridPos)
......@@ -552,7 +671,17 @@ def get_dashboard_data(
panel_generator=default_interface_panel_generator,
errors=False):
"""
Generates all panels used in a normal dashboard without aggregate panels
Helper for generating dashboard definitions for all non-NREN dashboards.
Uses multiprocessing to speed up generation.
:param data: the dashboard names and the panel data for each dashboard
:param datasource: datasource to use for the panels
:param tag: tag to use for the dashboard, used for dashboard dropdowns on
the home dashboard.
:param panel_generator: function for generating panel definitions
:param errors: whether or not to include an error panel for each interface
:return: generator for dashboard definitions for each dashboard
"""
with ProcessPoolExecutor(max_workers=NUM_PROCESSES) as executor:
......@@ -571,12 +700,19 @@ def get_dashboard_data(
def create_aggregate_panel(title, gridpos, targets, datasource):
"""
Generates a single panel with multiple targets.
Each target is one interface / line on the graph
Helper for generating aggregate panels. Creates two panels, one for
ingress and one for egress.
:param title: title for the panel
:param gridpos: generator for grid position
:param targets: list of targets for the panels, used to build separate
targets for both ingress and egress.
:param datasource: datasource to use for the panels
:return: tuple of aggregate panels, one for ingress and one for egress
"""
ingress_targets, egress_targets = get_aggregate_targets(targets)
result = []
ingress_pos = next(gridpos)
egress_pos = next(gridpos)
......@@ -595,7 +731,7 @@ def create_aggregate_panel(title, gridpos, targets, datasource):
ingress_colors = reduce(reduce_alias, ingress_targets, {})
egress_colors = reduce(reduce_alias, egress_targets, {})
result.append(create_panel({
ingress = create_panel({
**ingress_pos,
'stack': True,
'linewidth': 0 if is_total else 1,
......@@ -604,9 +740,9 @@ def create_aggregate_panel(title, gridpos, targets, datasource):
'targets': ingress_targets,
'y_axis_type': 'bits',
'alias_colors': json.dumps(ingress_colors) if is_total else {}
}))
})
result.append(create_panel({
egress = create_panel({
**egress_pos,
'stack': True,
'linewidth': 0 if is_total else 1,
......@@ -615,30 +751,40 @@ def create_aggregate_panel(title, gridpos, targets, datasource):
'targets': egress_targets,
'y_axis_type': 'bits',
'alias_colors': json.dumps(egress_colors) if is_total else {}
}))
})
return result
return ingress, egress
def get_aggregate_dashboard_data(title, targets, datasource, tag):
def get_aggregate_dashboard_data(title, remotes, datasource, tag):
"""
Creates three types of aggregate panels:
Aggregate Ingress/Egress that contain
every target (interface) given as parameter
Totals Ingress/Egress which is the same as above,
but with a different line color.
Aggregates for each remote
(all interfaces for each remote (ISP/NREN/...)
on separate graphs
Helper for generating aggregate dashboard definitions.
Aggregate dashboards consist only of aggregate panels that are
panels with data for multiple interfaces.
At the top of the dashboard are two aggregate panels showing
total ingress and egress data for all interfaces.
Below that are two aggregate panels for each target, one for
ingress and one for egress.
:param title: title for the dashboard
:param targets: dictionary of targets for the panels, the key is the
remote (usually a customer) and the value is a list of targets
for that remote. A single target represents how to fetch
data for one interface.
:param datasource: datasource to use for the panels
:param tag: tag to use for the dashboard, used for dashboard dropdowns on
the home dashboard.
:return: dashboard definition for the aggregate dashboard
"""
id_gen = num_generator()
gridPos = gridPos_generator(id_gen, agg=True)
panels = []
all_targets = targets.get('EVERYSINGLETARGET', [])
all_targets = remotes.get('EVERYSINGLETARGET', [])
ingress, egress = create_aggregate_panel(
title, gridPos, all_targets, datasource)
......@@ -649,12 +795,12 @@ def get_aggregate_dashboard_data(title, targets, datasource, tag):
totals_title, gridPos, all_targets, datasource)
panels.extend([t_in, t_eg])
if 'EVERYSINGLETARGET' in targets:
del targets['EVERYSINGLETARGET']
if 'EVERYSINGLETARGET' in remotes:
del remotes['EVERYSINGLETARGET']
for target in targets:
for remote in remotes:
_in, _out = create_aggregate_panel(
title + f' - {target}', gridPos, targets[target], datasource)
title + f' - {remote}', gridPos, remotes[remote], datasource)
panels.extend([_in, _out])
result = {
......
......@@ -8,6 +8,15 @@ import jinja2
def create_dropdown_panel(title, **kwargs):
"""
Creates a dropdown panel from the given data.
:param title: title of the dropdown panel
:param kwargs: data to be used in the template
:return: rendered dropdown panel JSON
"""
TEMPLATE_FILENAME = os.path.abspath(os.path.join(
os.path.dirname(__file__),
'templates',
......@@ -18,8 +27,15 @@ def create_dropdown_panel(title, **kwargs):
return template.render({**kwargs, 'title': title})
# wrapper around bits/s and err/s panel labels
def create_yaxes(type):
"""
Creates the yaxes JSON for the given type, used in the panel template.
:param type: type of yaxes to create (bits/s or errors/s)
:return: rendered yaxes JSON
"""
file = os.path.abspath(os.path.join(
os.path.dirname(__file__),
'templates',
......@@ -31,6 +47,15 @@ def create_yaxes(type):
def create_panel_target(data):
"""
Creates a panel target from the given data.
A panel target defines how to query data for a single timeseries.
:param data: data to be used in the template
:return: rendered panel target JSON
"""
file = os.path.abspath(os.path.join(
os.path.dirname(__file__),
'templates',
......@@ -42,6 +67,15 @@ def create_panel_target(data):
def create_panel(data):
"""
Creates a panel from the given data. Constructs the yaxes and panel targets
and renders the panel template using these.
:param data: data to be used in the template
:return: rendered panel JSON
"""
file = os.path.abspath(os.path.join(
os.path.dirname(__file__),
'templates',
......@@ -57,6 +91,17 @@ def create_panel(data):
def render_dashboard(dashboard, nren=False):
"""
Renders the dashboard template using the given data.
NREN dashboards are rendered using a different template that uses
a different layout than other dashboards.
:param dashboard: data to be used in the template
:param nren: whether the dashboard is an NREN dashboard
:return: rendered dashboard JSON
"""
if nren:
file = os.path.abspath(os.path.join(
os.path.dirname(__file__),
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment