diff --git a/brian_dashboard_manager/grafana/provision.py b/brian_dashboard_manager/grafana/provision.py
index d9a608d9ac32dc66d2a513f7e6c428ac64c6e3b7..053fb7cdefdcaf3e6a303f10200f454ae529e30d 100644
--- a/brian_dashboard_manager/grafana/provision.py
+++ b/brian_dashboard_manager/grafana/provision.py
@@ -2,14 +2,14 @@
 This module is responsible for the
 entire provisioning lifecycle.
 """
+import itertools
 import os
 import logging
 import time
 import json
 import datetime
-from functools import reduce
 from concurrent.futures import Future
-from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor
+from concurrent.futures import ThreadPoolExecutor
 from brian_dashboard_manager.config import DEFAULT_ORGANIZATIONS, STATE_PATH
 from brian_dashboard_manager.grafana.utils.request import AdminRequest, \
     TokenRequest
@@ -24,7 +24,8 @@ from brian_dashboard_manager.grafana.datasource import \
 from brian_dashboard_manager.grafana.folder import find_folder, \
     delete_folder, get_folders
 from brian_dashboard_manager.inventory_provider.interfaces import \
-    get_gws_direct, get_gws_indirect, get_interfaces
+    get_gws_direct, get_gws_indirect, get_interfaces, \
+    get_eumetsat_multicast_subscriptions
 
 from brian_dashboard_manager.templating.helpers import \
     get_aggregate_dashboard_data, get_interface_data, \
@@ -33,11 +34,126 @@ from brian_dashboard_manager.templating.helpers import \
 
 from brian_dashboard_manager.templating.gws import generate_gws, \
     generate_indirect
-
+from brian_dashboard_manager.templating.eumetsat \
+    import generate_eumetsat_multicast
 from brian_dashboard_manager.templating.render import render_dashboard
 
 logger = logging.getLogger(__name__)
 
+MAX_WORKERS = 1
+DASHBOARDS = {
+    'NREN': {
+        'tag': ['customers'],
+        'folder_name': 'NREN Access',
+        'interfaces': []
+    },
+    'CLS': {
+        'tag': 'CLS',
+        'folder_name': 'CLS',
+        'interfaces': []
+    },
+    'RE_PEER': {
+        'tag': 'RE_PEER',
+        'folder_name': 'RE Peer',
+        'interfaces': []
+    },
+    'RE_CUST': {
+        'tag': 'RE_CUST',
+        'folder_name': 'RE Customer',
+        'interfaces': []
+    },
+    'GEANTOPEN': {
+        'tag': 'GEANTOPEN',
+        'folder_name': 'GEANTOPEN',
+        'interfaces': []
+    },
+    'GCS': {
+        'tag': 'AUTOMATED_L2_CIRCUITS',
+        'folder_name': 'GCS',
+        'interfaces': []
+    },
+    'L2_CIRCUIT': {
+        'tag': 'L2_CIRCUITS',
+        'folder_name': 'L2 Circuit',
+        'interfaces': []
+    },
+    'LHCONE_PEER': {
+        'tag': 'LHCONE_PEER',
+        'folder_name': 'LHCONE Peer',
+        'interfaces': []
+    },
+    'LHCONE_CUST': {
+        'tag': 'LHCONE_CUST',
+        'folder_name': 'LHCONE Customer',
+        'interfaces': []
+    },
+    'MDVPN_CUSTOMERS': {
+        'tag': 'MDVPN',
+        'folder_name': 'MDVPN Customers',
+        'interfaces': []
+    },
+    'INFRASTRUCTURE_BACKBONE': {
+        'tag': 'BACKBONE',
+        'errors': True,
+        'folder_name': 'Infrastructure Backbone',
+        'interfaces': []
+    },
+    'IAS_PRIVATE': {
+        'tag': 'IAS_PRIVATE',
+        'folder_name': 'IAS Private',
+        'interfaces': []
+    },
+    'IAS_PUBLIC': {
+        'tag': 'IAS_PUBLIC',
+        'folder_name': 'IAS Public',
+        'interfaces': []
+    },
+    'IAS_CUSTOMER': {
+        'tag': 'IAS_CUSTOMER',
+        'folder_name': 'IAS Customer',
+        'interfaces': []
+    },
+    'IAS_UPSTREAM': {
+        'tag': ['IAS_UPSTREAM', 'UPSTREAM'],
+        'folder_name': 'IAS Upstream',
+        'interfaces': []
+    },
+    'GWS_PHY_UPSTREAM': {
+        'tag': ['GWS_UPSTREAM', 'UPSTREAM'],
+        'errors': True,
+        'folder_name': 'GWS PHY Upstream',
+        'interfaces': []
+    }
+}
+
+AGG_DASHBOARDS = {
+    'CLS_PEERS': {
+        'tag': 'cls_peers',
+        'dashboard_name': 'CLS Peers',
+        'interfaces': []
+    },
+    'IAS_PEERS': {
+        'tag': 'ias_peers',
+        'dashboard_name': 'IAS Peers',
+        'interfaces': []
+    },
+    'IAS_UPSTREAM': {
+        'tag': 'gws_upstreams',
+        'dashboard_name': 'GWS Upstreams',
+        'interfaces': []
+    },
+    'LHCONE': {
+        'tag': 'lhcone',
+        'dashboard_name': 'LHCONE',
+        'interfaces': []
+    },
+    'CAE1': {
+        'tag': 'cae',
+        'dashboard_name': 'CAE1',
+        'interfaces': []
+    }
+}
+
 
 def provision_folder(token_request, folder_name, dash,
                      ds_name, excluded_dashboards):
@@ -61,7 +177,11 @@ def provision_folder(token_request, folder_name, dash,
         dash_data = get_nren_dashboard_data(data, ds_name, tag)
     else:
         data = get_interface_data(interfaces)
-        dash_data = get_dashboard_data(data, ds_name, tag, errors)
+        dash_data = get_dashboard_data(
+            data=data,
+            datasource=ds_name,
+            tag=tag,
+            errors=errors)
 
     if not isinstance(excluded_dashboards, list):
         excluded_dashboards = []
@@ -70,7 +190,7 @@ def provision_folder(token_request, folder_name, dash,
 
     provisioned = []
 
-    with ThreadPoolExecutor(max_workers=4) as executor:
+    with ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor:
         for dashboard in dash_data:
             rendered = render_dashboard(dashboard, nren=is_nren)
             if rendered.get('title').lower() in excluded_dashboards:
@@ -119,13 +239,277 @@ def provision_maybe(config):
             write_timestamp(now.timestamp(), False)
 
 
-def provision(config):
+def is_excluded_folder(org_config, folder_name):
+    excluded_folders = org_config.get('excluded_folders', {})
+    excluded = excluded_folders.get(folder_name, False)
+    # boolean True means entire folder excluded
+    # if list, it is specific dashboard names not to provision
+    # so is handled at provision time.
+    return isinstance(excluded, bool) and excluded
+
+
+def excluded_folder_dashboards(org_config, folder_name):
+    excluded_folders = org_config.get('excluded_folders', {})
+    excluded = excluded_folders.get(folder_name, [])
+    return excluded if isinstance(excluded, list) else []
+
+
+def _provision_interfaces(config, org_config, ds_name, token):
+    """
+    Provision dashboards, overwriting existing ones.
+
+    :param config:
+    :param org_config:
+    :param ds_name:
+    :param token:
+    :return: yields dashboards that were created
+    """
+
+    interfaces = get_interfaces(config['inventory_provider'])
+
+    excluded_nrens = org_config['excluded_nrens']
+
+    def excluded(interface):
+        desc = interface['description'].lower()
+        lab = 'lab.office' in interface['router'].lower()
+        to_exclude = any(nren.lower() in desc for nren in excluded_nrens)
+        return not (to_exclude or lab)
+
+    relevant_interfaces = list(filter(excluded, interfaces))
+
+    # loop over interfaces and add them to the dashboard_name
+    # -> folder mapping structure `dashboards` above, for convenience.
+    for iface in relevant_interfaces:
+        for dash_name in iface['dashboards']:
+
+            # add interface to matched dashboard
+            if dash_name in DASHBOARDS:
+                ifaces = DASHBOARDS[dash_name]['interfaces']
+                ifaces.append(iface)
+
+            # add to matched aggregate dashboard
+            if dash_name in AGG_DASHBOARDS:
+                ifaces = AGG_DASHBOARDS[dash_name]['interfaces']
+                ifaces.append(iface)
+
+    # provision dashboards and their folders
+    with ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor:
+        provisioned = []
+        for folder in DASHBOARDS.values():
+            folder_name = folder['folder_name']
+
+            # boolean True means entire folder excluded
+            # if list, it is specific dashboard names not to provision
+            # so is handled at provision time.
+            if is_excluded_folder(org_config, folder_name):
+                executor.submit(
+                    delete_folder, token, title=folder_name)
+                continue
+
+            logger.info(
+                f'Provisioning {org_config["name"]}/{folder_name} dashboards')
+            res = executor.submit(
+                provision_folder, token,
+                folder_name, folder, ds_name,
+                excluded_folder_dashboards(org_config, folder_name))
+            provisioned.append(res)
+
+        for result in provisioned:
+            folder = result.result()
+            if folder is None:
+                continue
+            yield from folder
+
+
+def _provision_gws_indirect(config, org_config, ds_name, token):
+    # fetch GWS direct data and provision related dashboards
+    logger.info('Provisioning GWS Indirect dashboards')
+    folder_name = 'GWS Indirect'
+    if is_excluded_folder(org_config, folder_name):
+        # don't provision GWS Direct folder
+        delete_folder(token, title=folder_name)
+    else:
+        folder = find_folder(token, folder_name)
+        with ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor:
+            gws_indirect_data = get_gws_indirect(
+                config['inventory_provider'])
+            provisioned = []
+            dashes = generate_indirect(gws_indirect_data, ds_name)
+            for dashboard in dashes:
+                rendered = render_dashboard(dashboard)
+                provisioned.append(executor.submit(create_dashboard,
+                                                   token,
+                                                   rendered, folder['id']))
+
+            yield from provisioned
+
+
+def _provision_gws_direct(config, org_config, ds_name, token):
+    # fetch GWS direct data and provision related dashboards
+    logger.info('Provisioning GWS Direct dashboards')
+    folder_name = 'GWS Direct'
+    if is_excluded_folder(org_config, folder_name):
+        # don't provision GWS Direct folder
+        delete_folder(token, title=folder_name)
+    else:
+        folder = find_folder(token, folder_name)
+        with ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor:
+            gws_data = get_gws_direct(config['inventory_provider'])
+            provisioned = []
+
+            for dashboard in generate_gws(gws_data, ds_name):
+                rendered = render_dashboard(dashboard)
+                provisioned.append(executor.submit(create_dashboard,
+                                                   token,
+                                                   rendered, folder['id']))
+
+            yield from provisioned
+
+
+def _provision_eumetsat_multicast(config, org_config, ds_name, token):
+    # fetch EUMETSAT multicast provision related dashboards
+    logger.info('Provisioning EUMETSAT Multicast dashboards')
+    folder_name = 'EUMETSAT Multicast'
+    if is_excluded_folder(org_config, folder_name):
+        # don't provision EUMETSAT Multicast folder
+        delete_folder(token, title=folder_name)
+    else:
+        folder = find_folder(token, folder_name)
+        with ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor:
+            subscriptions = get_eumetsat_multicast_subscriptions(
+                config['inventory_provider'])
+            provisioned = []
+
+            for dashboard in generate_eumetsat_multicast(
+                    subscriptions, ds_name):
+                rendered = render_dashboard(dashboard)
+                provisioned.append(
+                    executor.submit(
+                        create_dashboard,
+                        token,
+                        rendered,
+                        folder['id']))
+
+            yield from provisioned
+
+
+def _provision_aggregates(config, org_config, ds_name, token):
+    if is_excluded_folder(org_config, 'Aggregates'):
+        # don't provision aggregate folder
+        delete_folder(token, title='Aggregates')
+    else:
+        with ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor:
+            provisioned = []
+            agg_folder = find_folder(token, 'Aggregates')
+            for dash in AGG_DASHBOARDS.values():
+                excluded_dashboards = excluded_folder_dashboards(
+                    org_config, 'Aggregates')
+                if dash['dashboard_name'] in excluded_dashboards:
+                    dash_name = {
+                        'title': f'Aggregate - {dash["dashboard_name"]}'}
+                    executor.submit(delete_dashboard,
+                                    token, dash_name,
+                                    agg_folder['id'])
+                    continue
+                logger.info(f'Provisioning {org_config["name"]}' +
+                            f'/Aggregate {dash["dashboard_name"]} dashboards')
+                res = executor.submit(
+                    provision_aggregate, token,
+                    agg_folder, dash, ds_name)
+                provisioned.append(res)
+
+            yield from provisioned
+
+
+def _provision_static_dashboards(config, org_config, ds_name, token):
+    # Statically defined dashboards from json files
+    excluded_dashboards = org_config.get('excluded_dashboards', [])
+    logger.info('Provisioning static dashboards')
+    for dashboard in get_dashboard_definitions():
+        if dashboard['title'] not in excluded_dashboards:
+            res = create_dashboard(token, dashboard)
+            if res:
+                # yield a fake dashboard dict
+                # ... only the 'uid' element is referenced
+                yield {'uid': res.get('uid')}
+        else:
+            delete_dashboard(token, dashboard)
+
+    # Home dashboard is always called "Home"
+    # Make sure it's set for the organization
+    logger.info('Configuring Home dashboard')
+    set_home_dashboard(token, org_config['name'] == 'GÉANT Staff')
+
+    yield {'uid': 'home'}
+
+
+def _get_ignored_dashboards(config, org_config, token):
+    # get dashboard UIDs from ignored folders
+    # and make sure we don't touch them
+    ignored_folders = config.get('ignored_folders', [])
+    for name in ignored_folders:
+        logger.info(
+            'Ignoring dashboards under '
+            f'the folder {org_config["name"]}/{name}')
+        folder = find_folder(token, name, create=False)
+        if folder is None:
+            continue
+        to_ignore = find_dashboard(token, folder_id=folder['id'])
+
+        if to_ignore is None:
+            continue
+
+        for dash in to_ignore:
+            # return a hard-coded fake dashboard dict
+            # ... only the 'uid' element is referenced
+            yield {'uid': dash['uid']}  # could just yield dash
+
+
+def _delete_unknown_folders(config, token):
+    all_folders = get_folders(token)
+
+    folders_to_keep = [
+        # General is a base folder present in Grafana
+        'General',
+        # other folders, created outside of the DASHBOARDS list
+        'GWS Indirect',
+        'GWS Direct',
+        'Aggregates',
+        'EUMETSAT Multicast'
+    ]
+    folders_to_keep.extend([dash['folder_name']
+                            for dash in DASHBOARDS.values()])
+    ignored_folders = config.get('ignored_folders', [])
+    folders_to_keep.extend(ignored_folders)
+    folders_to_keep = set(folders_to_keep)  # de-dupe
+
+    for folder in all_folders:
+        if folder['title'] in folders_to_keep:
+            continue
+        delete_folder(token, uid=folder['uid'])
+
 
+def _provision_datasource(config, token):
+    # Only provision influxdb datasource for now
+    datasource = config.get('datasources').get('influxdb')
+
+    # Provision missing data sources
+    if not check_provisioned(token, datasource):
+        ds = create_datasource(token,
+                               datasource,
+                               config.get('datasources'))
+        if ds:
+            logger.info(
+                f'Provisioned datasource: {datasource["name"]}')
+
+    return datasource
+
+
+def _provision_orgs(config):
     request = AdminRequest(**config)
     all_orgs = get_organizations(request)
 
     orgs_to_provision = config.get('organizations', DEFAULT_ORGANIZATIONS)
-    ignored_folders = config.get('ignored_folders', [])
 
     missing = (org['name'] for org in orgs_to_provision
                if org['name'] not in [org['name'] for org in all_orgs])
@@ -134,358 +518,84 @@ def provision(config):
         org_data = create_organization(request, org_name)
         all_orgs.append(org_data)
 
-    interfaces = get_interfaces(config['inventory_provider'])
-    tokens = []
+    return all_orgs
 
-    start = time.time()
 
-    for org in all_orgs:
-        org_id = org['id']
-        delete_expired_api_tokens(request, org_id)
-        token = create_api_token(request, org_id)
-        token_request = TokenRequest(token=token['key'], **config)
-        tokens.append((org_id, token['id']))
+def provision(config):
 
-        logger.info(
-            f'--- Provisioning org {org["name"]} (ID #{org_id}) ---')
+    start = time.time()
+    tokens = []
+    all_orgs = _provision_orgs(config)
+    request = AdminRequest(**config)
 
+    def _find_org_config(org):
+        orgs_to_provision = config.get('organizations', DEFAULT_ORGANIZATIONS)
         try:
-            org_config = next(
+            return next(
                 o for o in orgs_to_provision if o['name'] == org['name'])
         except StopIteration:
-            org_config = None
-
-        if not org_config:
             logger.error(
                 f'Org {org["name"]} does not have valid configuration.')
             org['info'] = 'Org exists in grafana but is not configured'
-            continue
-
-        # Only provision influxdb datasource for now
-        datasource = config.get('datasources').get('influxdb')
-
-        # Provision missing data sources
-        if not check_provisioned(token_request, datasource):
-            ds = create_datasource(token_request,
-                                   datasource,
-                                   config.get('datasources'))
-            if ds:
-                logger.info(
-                    f'Provisioned datasource: {datasource["name"]}')
-
-        excluded_nrens = org_config['excluded_nrens']
-
-        def excluded(interface):
-            desc = interface['description'].lower()
-            lab = 'lab.office' in interface['router'].lower()
-            to_exclude = any(nren.lower() in desc for nren in excluded_nrens)
-            return not (to_exclude or lab)
-
-        relevant_interfaces = list(filter(excluded, interfaces))
-
-        dashboards = {
-            'NREN': {
-                'tag': ['customers'],
-                'folder_name': 'NREN Access',
-                'interfaces': []
-            },
-            'CLS': {
-                'tag': 'CLS',
-                'folder_name': 'CLS',
-                'interfaces': []
-            },
-            'RE_PEER': {
-                'tag': 'RE_PEER',
-                'folder_name': 'RE Peer',
-                'interfaces': []
-            },
-            'RE_CUST': {
-                'tag': 'RE_CUST',
-                'folder_name': 'RE Customer',
-                'interfaces': []
-            },
-            'GEANTOPEN': {
-                'tag': 'GEANTOPEN',
-                'folder_name': 'GEANTOPEN',
-                'interfaces': []
-            },
-            'GCS': {
-                'tag': 'AUTOMATED_L2_CIRCUITS',
-                'folder_name': 'GCS',
-                'interfaces': []
-            },
-            'L2_CIRCUIT': {
-                'tag': 'L2_CIRCUITS',
-                'folder_name': 'L2 Circuit',
-                'interfaces': []
-            },
-            'LHCONE_PEER': {
-                'tag': 'LHCONE_PEER',
-                'folder_name': 'LHCONE Peer',
-                'interfaces': []
-            },
-            'LHCONE_CUST': {
-                'tag': 'LHCONE_CUST',
-                'folder_name': 'LHCONE Customer',
-                'interfaces': []
-            },
-            'MDVPN_CUSTOMERS': {
-                'tag': 'MDVPN',
-                'folder_name': 'MDVPN Customers',
-                'interfaces': []
-            },
-            'INFRASTRUCTURE_BACKBONE': {
-                'tag': 'BACKBONE',
-                'errors': True,
-                'folder_name': 'Infrastructure Backbone',
-                'interfaces': []
-            },
-            'IAS_PRIVATE': {
-                'tag': 'IAS_PRIVATE',
-                'folder_name': 'IAS Private',
-                'interfaces': []
-            },
-            'IAS_PUBLIC': {
-                'tag': 'IAS_PUBLIC',
-                'folder_name': 'IAS Public',
-                'interfaces': []
-            },
-            'IAS_CUSTOMER': {
-                'tag': 'IAS_CUSTOMER',
-                'folder_name': 'IAS Customer',
-                'interfaces': []
-            },
-            'IAS_UPSTREAM': {
-                'tag': ['IAS_UPSTREAM', 'UPSTREAM'],
-                'folder_name': 'IAS Upstream',
-                'interfaces': []
-            },
-            'GWS_PHY_UPSTREAM': {
-                'tag': ['GWS_UPSTREAM', 'UPSTREAM'],
-                'errors': True,
-                'folder_name': 'GWS PHY Upstream',
-                'interfaces': []
-            }
-        }
-
-        agg_dashboards = {
-            'CLS_PEERS': {
-                'tag': 'cls_peers',
-                'dashboard_name': 'CLS Peers',
-                'interfaces': []
-            },
-            'IAS_PEERS': {
-                'tag': 'ias_peers',
-                'dashboard_name': 'IAS Peers',
-                'interfaces': []
-            },
-            'IAS_UPSTREAM': {
-                'tag': 'gws_upstreams',
-                'dashboard_name': 'GWS Upstreams',
-                'interfaces': []
-            },
-            'LHCONE': {
-                'tag': 'lhcone',
-                'dashboard_name': 'LHCONE',
-                'interfaces': []
-            },
-            'CAE1': {
-                'tag': 'cae',
-                'dashboard_name': 'CAE1',
-                'interfaces': []
-            }
-        }
-        # Provision dashboards, overwriting existing ones.
-        ds_name = datasource.get('name', 'PollerInfluxDB')
-        excluded_folders = org_config.get('excluded_folders', {})
-
-        def get_uid(prev, curr):
-            prev[curr.get('uid')] = False
-            return prev
-
-        # Map of dashboard UID -> whether it has been updated.
-        # This is used to remove stale dashboards at the end.
-        updated = find_dashboard(token_request) or []
-        updated = reduce(get_uid, updated, {})
-
-        # General is a base folder present in Grafana
-        folders_to_keep = ['General', 'GWS Indirect',
-                           'GWS Direct', 'Aggregates']
-        folders_to_keep.extend([dash['folder_name']
-                                for dash in dashboards.values()])
-
-        def update_dash_list(dashboards):
-            for dashboard in dashboards:
-                if isinstance(dashboard, Future):
-                    dashboard = dashboard.result()
-                if dashboard is None:
-                    continue
-                updated[dashboard.get('uid')] = True
-
-        # loop over interfaces and add them to the dashboard_name
-        # -> folder mapping structure `dashboards` above, for convenience.
-        for iface in relevant_interfaces:
-            for dash_name in iface['dashboards']:
-
-                # add interface to matched dashboard
-                if dash_name in dashboards:
-                    ifaces = dashboards[dash_name]['interfaces']
-                    ifaces.append(iface)
+            return None
 
-                # add to matched aggregate dashboard
-                if dash_name in agg_dashboards:
-                    ifaces = agg_dashboards[dash_name]['interfaces']
-                    ifaces.append(iface)
-
-        # provision dashboards and their folders
-        with ProcessPoolExecutor(max_workers=4) as executor:
-            provisioned = []
-            for folder in dashboards.values():
-                folder_name = folder['folder_name']
-                exclude = excluded_folders.get(folder_name)
-
-                # boolean True means entire folder excluded
-                # if list, it is specific dashboard names not to provision
-                # so is handled at provision time.
-                if exclude:
-                    if isinstance(exclude, bool):
-                        executor.submit(
-                            delete_folder, token_request, folder_name)
-                        continue
-
-                logger.info(
-                    f'Provisioning {org["name"]}/{folder_name} dashboards')
-                res = executor.submit(provision_folder, token_request,
-                                      folder_name, folder, ds_name,
-                                      exclude)
-                provisioned.append(res)
+    for org in all_orgs:
+        org_id = org['id']
 
-            for result in provisioned:
-                folder = result.result()
-                if folder is None:
-                    continue
-                update_dash_list(folder)
-
-        # fetch GWS direct data and provision related dashboards
-        logger.info('Provisioning GWS Indirect dashboards')
-        folder_name = 'GWS Indirect'
-        exclude_indirect = excluded_folders.get(folder_name, False)
-        if isinstance(exclude_indirect, bool) and exclude_indirect:
-            # don't provision GWS Direct folder
-            delete_folder(token_request, folder_name)
-        else:
-            folder = find_folder(token_request, folder_name)
-            with ThreadPoolExecutor(max_workers=4) as executor:
-                gws_indirect_data = get_gws_indirect(
-                    config['inventory_provider'])
-                provisioned = []
-                dashes = generate_indirect(gws_indirect_data, ds_name)
-                for dashboard in dashes:
-                    rendered = render_dashboard(dashboard)
-                    provisioned.append(executor.submit(create_dashboard,
-                                                       token_request,
-                                                       rendered, folder['id']))
-
-                update_dash_list(provisioned)
-
-        # fetch GWS direct data and provision related dashboards
-        logger.info('Provisioning GWS Direct dashboards')
-        folder_name = 'GWS Direct'
-        exclude_gws = excluded_folders.get(folder_name, False)
-        if isinstance(exclude_gws, bool) and exclude_gws:
-            # don't provision GWS Direct folder
-            delete_folder(token_request, folder_name)
-        else:
-            folder = find_folder(token_request, folder_name)
-            with ThreadPoolExecutor(max_workers=4) as executor:
-                gws_data = get_gws_direct(config['inventory_provider'])
-                provisioned = []
+        logger.info(
+            f'--- Provisioning org {org["name"]} (ID #{org_id}) ---')
 
-                for dashboard in generate_gws(gws_data, ds_name):
-                    rendered = render_dashboard(dashboard)
-                    provisioned.append(executor.submit(create_dashboard,
-                                                       token_request,
-                                                       rendered, folder['id']))
+        delete_expired_api_tokens(request, org_id)
+        token = create_api_token(request, org_id)
+        token_request = TokenRequest(token=token['key'], **config)
+        tokens.append((org_id, token['id']))
 
-                update_dash_list(provisioned)
+        org_config = _find_org_config(org)
+        if not org_config:
+            # message logged from _find_org_config
+            continue
 
-        exclude_agg = excluded_folders.get('Aggregates', [])
+        all_original_dashboards = find_dashboard(token_request) or []
+        all_original_dashboard_uids = {
+            d['uid'] for d in all_original_dashboards}
 
-        if isinstance(exclude_agg, bool) and exclude_agg:
-            # don't provision aggregate folder
-            delete_folder(token_request, 'Aggregates')
-        else:
-            with ProcessPoolExecutor(max_workers=4) as executor:
-                provisioned = []
-                agg_folder = find_folder(token_request, 'Aggregates')
-                for dash in agg_dashboards.values():
-                    if dash['dashboard_name'] in exclude_agg:
-                        dash_name = {
-                            'title': f'Aggregate - {dash["dashboard_name"]}'}
-                        executor.submit(delete_dashboard,
-                                        token_request, dash_name,
-                                        agg_folder['id'])
-                        continue
-                    logger.info(f'Provisioning {org["name"]}' +
-                                f'/Aggregate {dash["dashboard_name"]} dashboards')  # noqa: E501
-                    res = executor.submit(
-                        provision_aggregate, token_request,
-                        agg_folder, dash, ds_name)
-                    provisioned.append(res)
-
-                update_dash_list(provisioned)
-
-        # Statically defined dashboards from json files
-        excluded_dashboards = org_config.get('excluded_dashboards', [])
-        logger.info('Provisioning static dashboards')
-        for dashboard in get_dashboard_definitions():
-            if dashboard['title'] not in excluded_dashboards:
-                res = create_dashboard(token_request, dashboard)
-                if res:
-                    updated[res.get('uid')] = True
-            else:
-                delete_dashboard(token_request, dashboard)
-
-        # Home dashboard is always called "Home"
-        # Make sure it's set for the organization
-        logger.info('Configuring Home dashboard')
-        is_staff = org['name'] == 'GÉANT Staff'
-        set_home_dashboard(token_request, is_staff)
-        # just hardcode that we updated home dashboard
-        updated['home'] = True
-
-        # get dashboard UIDs from ignored folders
-        # and make sure we don't touch them
-        for name in ignored_folders:
-            folders_to_keep.append(name)
-            logger.info(
-                f'Ignoring dashboards under the folder {org["name"]}/{name}')
-            folder = find_folder(token_request, name, create=False)
-            if folder is None:
-                continue
-            to_ignore = find_dashboard(token_request, folder_id=folder['id'])
+        datasource = _provision_datasource(config, token_request)
+        ds_name = datasource.get('name', 'PollerInfluxDB')
 
-            if to_ignore is None:
+        managed_dashboards = itertools.chain(
+            _provision_interfaces(
+                config, org_config, ds_name, token_request),
+            _provision_gws_indirect(
+                config, org_config, ds_name, token_request),
+            _provision_gws_direct(
+                config, org_config, ds_name, token_request),
+            _provision_eumetsat_multicast(
+                config, org_config, ds_name, token_request),
+            _provision_aggregates(
+                config, org_config, ds_name, token_request),
+            _provision_static_dashboards(
+                config, org_config, ds_name, token_request),
+            _get_ignored_dashboards(
+                config, org_config, token_request)
+        )
+
+        managed_dashboard_uids = set()
+        for dashboard in managed_dashboards:
+            if isinstance(dashboard, Future):
+                dashboard = dashboard.result()
+            if dashboard is None:
                 continue
+            managed_dashboard_uids.add(dashboard['uid'])
 
-            for dash in to_ignore:
-                # mark it updated, so we don't modify it.
-                updated[dash['uid']] = True
-
-        for dash, provisioned in updated.items():
-            if not provisioned:
-                logger.info(f'Deleting stale dashboard with UID {dash}')
-                delete_dashboard(token_request, {'uid': dash})
-
-        all_folders = get_folders(token_request)
-        folders_to_keep = set(folders_to_keep)
+        for uid in all_original_dashboard_uids - managed_dashboard_uids:
+            logger.info(f'Deleting stale dashboard with UID {uid}')
+            delete_dashboard(token, {'uid': uid})
 
-        for folder in all_folders:
-            if folder['title'] not in folders_to_keep:
-                delete_folder(token_request, uid=folder['uid'])
+        _delete_unknown_folders(config, token_request)
 
-    logger.info(f'Time to complete: {time.time() - start}')
     for org_id, token in tokens:
         delete_api_token(request, org_id, token)
 
+    logger.info(f'Time to complete: {time.time() - start}')
+
     return all_orgs
diff --git a/brian_dashboard_manager/inventory_provider/interfaces.py b/brian_dashboard_manager/inventory_provider/interfaces.py
index e023ea4c9da20d5731887383acfcc5948d4b630b..bc92dabca4424581b939d71db0a86349fd6367ab 100644
--- a/brian_dashboard_manager/inventory_provider/interfaces.py
+++ b/brian_dashboard_manager/inventory_provider/interfaces.py
@@ -64,3 +64,9 @@ def get_gws_indirect(host):
     r.raise_for_status()
     interfaces = r.json()
     return interfaces
+
+
+def get_eumetsat_multicast_subscriptions(host):
+    r = requests.get(f'{host}/poller/eumetsat-multicast')
+    r.raise_for_status()
+    return r.json()
diff --git a/brian_dashboard_manager/logging_default_config.json b/brian_dashboard_manager/logging_default_config.json
index c100d56cad34389f015313d906c69f1a85c6417e..cccf0a3d7d0bacac6c126440c3ecbd0a0c117c48 100644
--- a/brian_dashboard_manager/logging_default_config.json
+++ b/brian_dashboard_manager/logging_default_config.json
@@ -45,7 +45,7 @@
     },
 
     "loggers": {
-        "api": {
+        "brian_dashboard_manager": {
             "level": "DEBUG",
             "handlers": ["console", "syslog_handler"],
             "propagate": false
diff --git a/brian_dashboard_manager/templating/eumetsat.py b/brian_dashboard_manager/templating/eumetsat.py
new file mode 100644
index 0000000000000000000000000000000000000000..17a417b5db0cb409ef1551f20991dc4157fc0560
--- /dev/null
+++ b/brian_dashboard_manager/templating/eumetsat.py
@@ -0,0 +1,96 @@
+import operator
+from brian_dashboard_manager.templating.helpers \
+    import get_dashboard_data, letter_generator, create_panel
+
+
+def get_panel_data(all_subscriptions):
+
+    result = dict()
+
+    def _panel(s):
+        return {
+            'measurement': 'multicast_rates',
+            'title': f'{s["subscription"]} on {s["router"]}',
+            'subscription': s['subscription'],
+            'hostname': s['router']
+        }
+
+    for subscription in all_subscriptions:
+        dashboard_name = f'{subscription["router"]} subscriptions'
+        result.setdefault(dashboard_name, []).append(_panel(subscription))
+
+    # make the panels sorted deterministically
+    for name in result.keys():
+        result[name] = sorted(
+            result[name],
+            key=operator.itemgetter('subscription'))
+
+    return result
+
+
+def get_panel_fields(panel, panel_type, datasource):
+    """
+    Helper for generating a single multicast panel
+    """
+    letters = letter_generator()
+
+    def get_target_data(alias, field):
+        return {
+            # panel includes identifying information
+            # such as hostname, subscription, etc.
+            **panel,
+            'alias': alias,
+            'refId': next(letters),
+            'select_field': field
+            # 'percentile': 'percentile' in alias.lower(),
+        }
+
+    targets = [('Multicast Traffic', 'octets')]
+
+    return create_panel({
+        **panel,
+        'datasource': datasource,
+        'linewidth': 1,
+        'title': panel['title'].format(panel_type),
+        'panel_targets': [get_target_data(*target) for target in targets],
+        'y_axis_type': 'bits',
+    })
+
+
+def subscription_panel_generator(gridPos):
+    """
+    Generates panels used for multicast traffic dashboards
+    """
+    def get_panel_definitions(panels, datasource, errors=False):
+        result = []
+
+        for panel in panels:
+            result.append(get_panel_fields({
+                **panel,
+                **next(gridPos)
+            }, 'traffic', datasource))
+            if panel.get('has_v6', False):
+                result.append(get_panel_fields({
+                    **panel,
+                    **next(gridPos)
+                }, 'IPv6', datasource))
+            if errors:
+                result.append(get_panel_fields({
+                    **panel,
+                    **next(gridPos)
+                }, 'errors', datasource))
+
+        return result
+
+    return get_panel_definitions
+
+
+def generate_eumetsat_multicast(subscriptions, datasource):
+    panel_data = get_panel_data(subscriptions)
+    for dash in get_dashboard_data(
+            data=panel_data,
+            datasource=datasource,
+            tag='EUMET_MULTICAST',
+            panel_generator=subscription_panel_generator):
+
+        yield dash
diff --git a/brian_dashboard_manager/templating/gws.py b/brian_dashboard_manager/templating/gws.py
index c517d72a6d1258789c12f29c71290ac823689ec2..dd8a49b35ade68d88b3a0441fed4336e9636f666 100644
--- a/brian_dashboard_manager/templating/gws.py
+++ b/brian_dashboard_manager/templating/gws.py
@@ -1,9 +1,9 @@
-from typing import DefaultDict
+from typing import Dict, List
 from brian_dashboard_manager.templating.helpers import get_dashboard_data
 
 
 def get_panel_data(interfaces):
-    result = DefaultDict(list)
+    result: Dict[str, List[Dict]] = {}
 
     count = {}
 
@@ -41,7 +41,7 @@ def get_panel_data(interfaces):
 
         gws_measurement = 'gwsd_rates'
         title = f'{nren} GWS Direct {isp} Interface {if_num} ({hostname})'
-        result[f'GWS Direct - {isp}'].append({
+        result.setdefault(f'GWS Direct - {isp}', []).append({
             'isp': isp,
             'nren': nren,
             'measurement': gws_measurement,
@@ -54,7 +54,7 @@ def get_panel_data(interfaces):
 
 
 def get_gws_indirect_panel_data(interfaces):
-    result = DefaultDict(list)
+    result: Dict[str, List[Dict]] = {}
 
     for interface in interfaces:
 
@@ -65,7 +65,7 @@ def get_gws_indirect_panel_data(interfaces):
 
         measurement = 'dscp32_rates'
         panel_title = f'{hostname} - {{}} - {if_name} - #{service_name} IASGWS'
-        result[f'GWS Indirect - {customer}'].append({
+        result.setdefault(f'GWS Indirect - {customer}', []).append({
             'measurement': measurement,
             'title': panel_title,
             'interface': if_name,
@@ -78,11 +78,17 @@ def get_gws_indirect_panel_data(interfaces):
 def generate_gws(gws_data, datasource):
 
     panel_data = get_panel_data(gws_data)
-    for dash in get_dashboard_data(panel_data, datasource, 'GWS_DIRECT'):
+    for dash in get_dashboard_data(
+            data=panel_data,
+            datasource=datasource,
+            tag='GWS_DIRECT'):
         yield dash
 
 
 def generate_indirect(gws_data, datasource):
     panel_data = get_gws_indirect_panel_data(gws_data)
-    for dash in get_dashboard_data(panel_data, datasource, 'GWS_INDIRECT'):
+    for dash in get_dashboard_data(
+            data=panel_data,
+            datasource=datasource,
+            tag='GWS_INDIRECT'):
         yield dash
diff --git a/brian_dashboard_manager/templating/helpers.py b/brian_dashboard_manager/templating/helpers.py
index b7c9e0e87bc89ae28be0f7a7e71a1f30bd40b204..a5cb297b0bb9ed587420ecb9164cbbe2993271c4 100644
--- a/brian_dashboard_manager/templating/helpers.py
+++ b/brian_dashboard_manager/templating/helpers.py
@@ -275,7 +275,7 @@ def get_panel_fields(panel, panel_type, datasource):
     })
 
 
-def panel_generator(gridPos):
+def default_interface_panel_generator(gridPos):
     """
     Shared wrapper for shorter calls without
     gridPos to generate panels.
@@ -318,7 +318,7 @@ def get_nren_dashboard_data(data, datasource, tag):
 
         gridPos = gridPos_generator(id_gen, start=1)
 
-        panel_gen = panel_generator(gridPos)
+        panel_gen = default_interface_panel_generator(gridPos)
 
         if len(dash['AGGREGATES']) > 0:
             agg_panels = create_aggregate_panel(
@@ -358,7 +358,10 @@ def get_nren_dashboard_data(data, datasource, tag):
         yield result
 
 
-def get_dashboard_data(data, datasource, tag, errors=False):
+def get_dashboard_data(
+        data, datasource, tag,
+        panel_generator=default_interface_panel_generator,
+        errors=False):
     """
     Generates all panels used in a normal dashboard without aggregate panels
     """
diff --git a/brian_dashboard_manager/templating/templates/shared/panel_target.json.j2 b/brian_dashboard_manager/templating/templates/shared/panel_target.json.j2
index 0bcd9f694d3153a187f63012bb032a8bbc796cb4..7247e1723a84bbca4e9ad1de260f6101d3ef6958 100644
--- a/brian_dashboard_manager/templating/templates/shared/panel_target.json.j2
+++ b/brian_dashboard_manager/templating/templates/shared/panel_target.json.j2
@@ -45,37 +45,50 @@
         ]
     ],
     "tags": [
-        {% if not isp %}
+        {% if isp %}
         {
             "condition": null,
-            "key": "hostname",
+            "key": "tag",
             "operator": "=",
-            "value": "{{ hostname }}"
+            "value": "{{ interface_tag }}"
         },
         {
             "condition": "AND",
-            "key": "interface_name",
+            "key": "isp",
             "operator": "=",
-            "value": "{{ interface }}"
+            "value": "{{ isp }}"
+        },
+        {
+            "condition": "AND",
+            "key": "nren",
+            "operator": "=",
+            "value": "{{ nren }}"
         }
-        {% else %}
+        {% elif subscription %}
         {
             "condition": null,
-            "key": "tag",
+            "key": "hostname",
             "operator": "=",
-            "value": "{{ interface_tag }}"
+            "value": "{{ hostname }}"
         },
         {
             "condition": "AND",
-            "key": "isp",
+            "key": "subscription",
             "operator": "=",
-            "value": "{{ isp }}"
+            "value": "{{ subscription }}"
+        }
+        {% else %}
+        {
+            "condition": null,
+            "key": "hostname",
+            "operator": "=",
+            "value": "{{ hostname }}"
         },
         {
             "condition": "AND",
-            "key": "nren",
+            "key": "interface_name",
             "operator": "=",
-            "value": "{{ nren }}"
+            "value": "{{ interface }}"
         }
         {% endif %}
     ]
diff --git a/setup.py b/setup.py
index 3f417c84291c9188ad3bda4f42c3176ba1fe6dd6..5c54dd5fd059d1dac458781628e1a1336c2c5ade 100644
--- a/setup.py
+++ b/setup.py
@@ -2,7 +2,7 @@ from setuptools import setup, find_packages
 
 setup(
     name='brian-dashboard-manager',
-    version="0.23",
+    version="0.24",
     author='GEANT',
     author_email='swd@geant.org',
     description='',
diff --git a/test/test_eumetsat_multicast.py b/test/test_eumetsat_multicast.py
new file mode 100644
index 0000000000000000000000000000000000000000..f9a38ff2e170277faf5f9f95d0851f91bc6a3a09
--- /dev/null
+++ b/test/test_eumetsat_multicast.py
@@ -0,0 +1,77 @@
+import responses
+from brian_dashboard_manager.inventory_provider.interfaces import \
+    get_eumetsat_multicast_subscriptions
+from brian_dashboard_manager.templating.eumetsat \
+    import generate_eumetsat_multicast
+
+
+TEST_DATA = [
+    {
+        'router': 'mx1.ams.nl.geant.net',
+        'oid': '1.3.6.1.2.1.83.1.1.2.1.16.232.223.222.1.193.17.9.3.255.255.255.255',  # noqa: E501
+        'community': '0pBiFbD',
+        'subscription': '232.223.222.1',
+        'endpoint': '193.17.9.3'
+    },
+    {
+        'router': 'mx1.ams.nl.geant.net',
+        'oid': '1.3.6.1.2.1.83.1.1.2.1.16.232.223.222.2.193.17.9.3.255.255.255.255',  # noqa: E501
+        'community': '0pBiFbD',
+        'subscription': '232.223.222.2',
+        'endpoint': '193.17.9.3'
+    },
+    {
+        'router': 'mx1.lon.uk.geant.net',
+        'oid': '1.3.6.1.2.1.83.1.1.2.1.16.232.223.222.1.193.17.9.3.255.255.255.255',  # noqa: E501
+        'community': '0pBiFbD',
+        'subscription': '232.223.222.1',
+        'endpoint': '193.17.9.3'
+    },
+    {
+        'router': 'mx1.lon.uk.geant.net',
+        'oid': '1.3.6.1.2.1.83.1.1.2.1.16.232.223.222.2.193.17.9.3.255.255.255.255',  # noqa: E501
+        'community': '0pBiFbD',
+        'subscription': '232.223.222.2',
+        'endpoint': '193.17.9.3'
+    },
+    {
+        'router': 'mx1.fra.de.geant.net',
+        'oid': '1.3.6.1.2.1.83.1.1.2.1.16.232.223.222.1.193.17.9.3.255.255.255.255',  # noqa: E501
+        'community': '0pBiFbD',
+        'subscription': '232.223.222.1',
+        'endpoint': '193.17.9.3'
+    },
+    {
+        'router': 'mx1.fra.de.geant.net',
+        'oid': '1.3.6.1.2.1.83.1.1.2.1.16.232.223.222.2.193.17.9.3.255.255.255.255',  # noqa: E501
+        'community': '0pBiFbD',
+        'subscription': '232.223.222.2',
+        'endpoint': '193.17.9.3'
+    }
+]
+
+
+@responses.activate
+def test_eumetsat_subscriptions(data_config, client):
+
+    responses.add(
+        method=responses.GET,
+        url=f'{data_config["inventory_provider"]}/poller/eumetsat-multicast',
+        json=TEST_DATA)
+
+    subscription_data = {}
+    for s in TEST_DATA:
+        subscription_data.setdefault(s['router'], set()).add(s['subscription'])
+
+    expected_titles = [
+        f'{name} subscriptions' for name in subscription_data.keys()]
+
+    subscriptions = get_eumetsat_multicast_subscriptions(
+        data_config['inventory_provider'])
+    dashboards = list(
+        generate_eumetsat_multicast(subscriptions, 'testdatasource'))
+
+    assert len(dashboards) == len(expected_titles)
+    assert all(d['title'] in expected_titles for d in dashboards)
+    # just use 2, instead of something smart, since the data is above
+    assert all(len(d['panels']) == 2 for d in dashboards)
diff --git a/test/test_grafana_dashboard.py b/test/test_grafana_dashboard.py
index 7cb29b70affc65a3ab995fb9790dc11853bf38ba..31991c1a0966811fa042cef5d760a1604844b2a3 100644
--- a/test/test_grafana_dashboard.py
+++ b/test/test_grafana_dashboard.py
@@ -16,20 +16,15 @@ def test_get_dashboard(data_config):
         method=responses.GET,
         url=request.BASE_URL +
         f'api/dashboards/uid/{UID}',
-        callback=lambda f: (
-            404,
-            {},
-            ''))
+        callback=lambda f: (404, {}, ''))
 
     data = dashboard._get_dashboard(request, UID)
     assert data is None
 
-    responses.add_callback(method=responses.GET,
-                           url=request.BASE_URL +
-                           f'api/dashboards/uid/{UID+1}',
-                           callback=lambda f: (200,
-                                               {},
-                                               json.dumps({"uid": 1})))
+    responses.add(
+        method=responses.GET,
+        url=request.BASE_URL + f'api/dashboards/uid/{UID+1}',
+        json={'uid': 1})
 
     data = dashboard._get_dashboard(request, UID + 1)
     assert data['uid'] == 1
@@ -42,24 +37,15 @@ def test_delete_dashboards(data_config):
 
     request = TokenRequest(**data_config, token='test')
 
-    responses.add_callback(
+    responses.add(
         method=responses.GET,
-        url=request.BASE_URL +
-        f'api/dashboards/uid/{UID}',
-        callback=lambda f: (
-            200,
-            {},
-            json.dumps(
-                dashboards[0])))
+        url=request.BASE_URL + f'api/dashboards/uid/{UID}',
+        json=dashboards[0])
 
-    responses.add_callback(
+    responses.add(
         method=responses.GET,
-        url=request.BASE_URL +
-        'api/search',
-        callback=lambda f: (
-            200,
-            {},
-            json.dumps(dashboards)))
+        url=request.BASE_URL + 'api/search',
+        json=dashboards)
 
     def delete_callback(request):
         uid = request.path_url.split('/')[-1]
@@ -79,10 +65,7 @@ def test_delete_dashboards(data_config):
         method=responses.DELETE,
         url=request.BASE_URL +
         f'api/dashboards/uid/{UID+1}',
-        callback=lambda f: (
-            400,
-            {},
-            ''))
+        callback=lambda f: (400, {}, ''))
 
     data = dashboard._delete_dashboard(request, UID + 1)
     assert data is False
@@ -98,19 +81,15 @@ def test_delete_dashboard(data_config):
     dash = {'id': ID, 'uid': UID, 'title': TITLE, 'version': VERSION}
     request = TokenRequest(**data_config, token='test')
 
-    def delete_callback(request):
-        return 200, {}, json.dumps({'message': 'deleted dashboard'})
-
-    responses.add_callback(method=responses.DELETE,
-                           url=request.BASE_URL + f'api/dashboards/uid/{UID}',
-                           callback=delete_callback)
-
-    def search_callback(request):
-        return 200, {}, json.dumps(dash)
+    responses.add(
+        method=responses.DELETE,
+        url=request.BASE_URL + f'api/dashboards/uid/{UID}',
+        json={'message': 'deleted dashboard'})
 
-    responses.add_callback(method=responses.GET,
-                           url=request.BASE_URL + 'api/search',
-                           callback=search_callback)
+    responses.add(
+        method=responses.GET,
+        url=request.BASE_URL + 'api/search',
+        json=dash)
 
     deleted = dashboard.delete_dashboard(request, dash)
     assert deleted
@@ -127,24 +106,15 @@ def test_search_dashboard(data_config):
 
     request = TokenRequest(**data_config, token='test')
 
-    responses.add_callback(
+    responses.add(
         method=responses.GET,
-        url=request.BASE_URL +
-        'api/search',
-        callback=lambda f: (
-            200,
-            {},
-            json.dumps(dashboards)))
+        url=request.BASE_URL + 'api/search',
+        json=dashboards)
 
-    responses.add_callback(
+    responses.add(
         method=responses.GET,
-        url=request.BASE_URL +
-        f'api/dashboards/uid/{UID}',
-        callback=lambda f: (
-            200,
-            {},
-            json.dumps(
-                dashboards[0])))
+        url=request.BASE_URL + f'api/dashboards/uid/{UID}',
+        json=dashboards[0])
 
     data = dashboard._search_dashboard(
         request, {'title': dashboards[0]['title']})
@@ -160,7 +130,8 @@ def test_search_dashboard_error(data_config):
 
     responses.add_callback(
         method=responses.GET,
-        url=request.BASE_URL + 'api/search', callback=lambda f: (400, {}, ''))
+        url=request.BASE_URL + 'api/search',
+        callback=lambda f: (400, {}, ''))
 
     data = dashboard._search_dashboard(request, {'title': 'DoesNotExist'})
     assert data is None
@@ -175,16 +146,15 @@ def test_create_dashboard(data_config):
     dashboard = {'id': ID, 'uid': UID, 'title': TITLE, 'version': VERSION}
     request = TokenRequest(**data_config, token='test')
 
-    def get_callback(request):
-        return 200, {}, json.dumps({'dashboard': dashboard})
-
-    responses.add_callback(method=responses.GET,
-                           url=request.BASE_URL + f'api/dashboards/uid/{UID}',
-                           callback=get_callback)
+    responses.add(
+        method=responses.GET,
+        url=request.BASE_URL + f'api/dashboards/uid/{UID}',
+        json={'dashboard': dashboard})
 
     responses.add_callback(
         method=responses.GET,
-        url=request.BASE_URL + 'api/search', callback=lambda f: (400, {}, ''))
+        url=request.BASE_URL + 'api/search',
+        callback=lambda f: (400, {}, ''))
 
     def post_callback(request):
         body = json.loads(request.body)
@@ -192,7 +162,8 @@ def test_create_dashboard(data_config):
 
     responses.add_callback(
         method=responses.POST,
-        url=request.BASE_URL + 'api/dashboards/db', callback=post_callback)
+        url=request.BASE_URL + 'api/dashboards/db',
+        callback=post_callback)
 
     data = provision.create_dashboard(request, dashboard)
     assert data == dashboard
@@ -208,7 +179,8 @@ def test_create_dashboard_no_uid_error(data_config):
 
     responses.add_callback(
         method=responses.GET,
-        url=request.BASE_URL + 'api/search', callback=lambda f: (400, {}, ''))
+        url=request.BASE_URL + 'api/search',
+        callback=lambda f: (400, {}, ''))
 
     def post_callback(request):
         body = json.loads(request.body)
@@ -221,7 +193,8 @@ def test_create_dashboard_no_uid_error(data_config):
 
     responses.add_callback(
         method=responses.POST,
-        url=request.BASE_URL + 'api/dashboards/db', callback=post_callback)
+        url=request.BASE_URL + 'api/dashboards/db',
+        callback=post_callback)
 
     data = provision.create_dashboard(request, dashboard)
     assert data is None
diff --git a/test/test_grafana_folder.py b/test/test_grafana_folder.py
index 3fa6b174576ecc239bd5f3824a0c2b92c0e56d90..e9baedc9e356078094bbd1b71b7b2db5f5af74ea 100644
--- a/test/test_grafana_folder.py
+++ b/test/test_grafana_folder.py
@@ -30,13 +30,10 @@ def test_find_folder(data_config):
 
     request = TokenRequest(**data_config, token='test')
 
-    def folder_get(request):
-        return 200, {}, json.dumps([])
-
-    responses.add_callback(
+    responses.add(
         method=responses.GET,
         url=f"http://{data_config['hostname']}/api/folders",
-        callback=folder_get)
+        json=[])
 
     def folder_post(request):
         data = json.loads(request.body)
diff --git a/test/test_gws_direct.py b/test/test_gws_direct.py
index 688db385a1a0d1371876168d4f46b4066ed5cb5b..cbbd2380cc47cdd9f07af637096191d6e08c5b79 100644
--- a/test/test_gws_direct.py
+++ b/test/test_gws_direct.py
@@ -1,5 +1,4 @@
 import responses
-import json
 from brian_dashboard_manager.templating.gws import generate_gws
 from brian_dashboard_manager.inventory_provider.interfaces import \
     get_gws_direct
@@ -110,15 +109,12 @@ TEST_DATA = [
 
 
 @responses.activate
-def test_gws(data_config, mocker, client):
+def test_gws(data_config, client):
 
-    def get_callback(request):
-        return 200, {}, json.dumps(TEST_DATA)
-
-    responses.add_callback(
+    responses.add(
         method=responses.GET,
         url=f"{data_config['inventory_provider']}/poller/gws/direct",
-        callback=get_callback)
+        json=TEST_DATA)
 
     gws_data = get_gws_direct(data_config['inventory_provider'])
 
diff --git a/test/test_gws_indirect.py b/test/test_gws_indirect.py
index 9edd611152bbd469f5ba3deec6a38e4069ed86e4..ba26780233982a47877ccc72f4e8d60234044067 100644
--- a/test/test_gws_indirect.py
+++ b/test/test_gws_indirect.py
@@ -1,5 +1,4 @@
 import responses
-import json
 from brian_dashboard_manager.templating.gws import generate_indirect
 from brian_dashboard_manager.inventory_provider.interfaces import \
     get_gws_indirect
@@ -76,15 +75,12 @@ TEST_DATA = [
 
 
 @responses.activate
-def test_gws(data_config, mocker, client):
+def test_gws(data_config, client):
 
-    def get_callback(request):
-        return 200, {}, json.dumps(TEST_DATA)
-
-    responses.add_callback(
+    responses.add(
         method=responses.GET,
         url=f"{data_config['inventory_provider']}/poller/gws/indirect",
-        callback=get_callback)
+        json=TEST_DATA)
 
     gws_data = get_gws_indirect(data_config['inventory_provider'])
 
diff --git a/test/test_update.py b/test/test_update.py
index 16735679523bb7a3d7a69e8348bc9f18beb23e11..94600c1ceb6da2928010c1859056c2e98c40081c 100644
--- a/test/test_update.py
+++ b/test/test_update.py
@@ -1,17 +1,9 @@
 import responses
 import json
-import re
 
 from brian_dashboard_manager.grafana.provision import provision_folder, \
     provision
 
-
-DEFAULT_REQUEST_HEADERS = {
-    "Content-type": "application/json",
-    "Accept": ["application/json"]
-}
-
-
 TEST_INTERFACES = [
     {
         "router": "srx2.ch.office.geant.net",
@@ -469,6 +461,51 @@ NREN_INTERFACES = [
     }
 ]
 
+EUMETSAT_MULTICAST = [
+    {
+        'router': 'mx1.ams.nl.geant.net',
+        'oid': '1.3.6.1.2.1.83.1.1.2.1.16.232.223.222.1.193.17.9.3.255.255.255.255',  # noqa: E501
+        'community': '0pBiFbD',
+        'subscription': '232.223.222.1',
+        'endpoint': '193.17.9.3'
+    },
+    {
+        'router': 'mx1.ams.nl.geant.net',
+        'oid': '1.3.6.1.2.1.83.1.1.2.1.16.232.223.222.2.193.17.9.3.255.255.255.255',  # noqa: E501
+        'community': '0pBiFbD',
+        'subscription': '232.223.222.2',
+        'endpoint': '193.17.9.3'
+    },
+    {
+        'router': 'mx1.lon.uk.geant.net',
+        'oid': '1.3.6.1.2.1.83.1.1.2.1.16.232.223.222.1.193.17.9.3.255.255.255.255',  # noqa: E501
+        'community': '0pBiFbD',
+        'subscription': '232.223.222.1',
+        'endpoint': '193.17.9.3'
+    },
+    {
+        'router': 'mx1.lon.uk.geant.net',
+        'oid': '1.3.6.1.2.1.83.1.1.2.1.16.232.223.222.2.193.17.9.3.255.255.255.255',  # noqa: E501
+        'community': '0pBiFbD',
+        'subscription': '232.223.222.2',
+        'endpoint': '193.17.9.3'
+    },
+    {
+        'router': 'mx1.fra.de.geant.net',
+        'oid': '1.3.6.1.2.1.83.1.1.2.1.16.232.223.222.1.193.17.9.3.255.255.255.255',  # noqa: E501
+        'community': '0pBiFbD',
+        'subscription': '232.223.222.1',
+        'endpoint': '193.17.9.3'
+    },
+    {
+        'router': 'mx1.fra.de.geant.net',
+        'oid': '1.3.6.1.2.1.83.1.1.2.1.16.232.223.222.2.193.17.9.3.255.255.255.255',  # noqa: E501
+        'community': '0pBiFbD',
+        'subscription': '232.223.222.2',
+        'endpoint': '193.17.9.3'
+    }
+]
+
 
 def generate_folder(data):
     return {
@@ -494,12 +531,16 @@ def test_provision_folder(data_config, mocker):
         'NREN': {
             'tag': ['customers'],
             'folder_name': 'NREN Access',
-            'interfaces': [iface for iface in TEST_INTERFACES if 'NREN' in iface['dashboards']]  # noqa: E501
+            'interfaces': [
+                iface for iface in TEST_INTERFACES
+                if 'NREN' in iface['dashboards']]
         },
         'RE_CUST': {
             'tag': 'RE_CUST',
             'folder_name': 'RE Customer',
-            'interfaces': [iface for iface in TEST_INTERFACES if 'RE_CUST' in iface['dashboards']]  # noqa: E501
+            'interfaces': [
+                iface for iface in TEST_INTERFACES
+                if 'RE_CUST' in iface['dashboards']]
         },
 
     }
@@ -544,34 +585,35 @@ def test_provision_folder(data_config, mocker):
 @responses.activate
 def test_provision(data_config, mocker, client):
 
-    def get_callback(request):
-        return 200, {}, json.dumps(NREN_INTERFACES)
-
-    responses.add_callback(
+    responses.add(
         method=responses.GET,
         url=f"{data_config['inventory_provider']}/poller/interfaces",
-        callback=get_callback)
+        json=NREN_INTERFACES)
 
-    responses.add_callback(
+    responses.add(
         method=responses.GET,
         url=f"{data_config['inventory_provider']}/data/interfaces",
-        callback=get_callback)
-
-    def folder_get(request):
-        return 200, {}, json.dumps([])
+        json=NREN_INTERFACES)
 
-    responses.add_callback(
+    responses.add(
         method=responses.GET,
+        url=f'{data_config["inventory_provider"]}/poller/eumetsat-multicast',
+        json=EUMETSAT_MULTICAST)
+
+    responses.add(
+        method=responses.DELETE,
         url=f"http://{data_config['hostname']}/api/folders",
-        callback=folder_get)
+        json={"message": "Deleted folder"})
 
-    def folder_delete(request):
-        return 200, {}, json.dumps({"message": "Deleted folder"})
+    responses.add(
+        method=responses.GET,
+        url=f"http://{data_config['hostname']}/api/folders",
+        json=[])
 
-    responses.add_callback(
-        method=responses.DELETE,
-        url=re.compile(f"http://{data_config['hostname']}/api/folders"),
-        callback=folder_delete, )
+    responses.add(
+        method='get',
+        url=f"http://{data_config['hostname']}/api/folders",
+        json=[])
 
     def folder_post(request):
         data = json.loads(request.body)
@@ -582,48 +624,41 @@ def test_provision(data_config, mocker, client):
         url=f"http://{data_config['hostname']}/api/folders",
         callback=folder_post)
 
-    def home_dashboard(request):
-        return 200, {}, json.dumps([])
+    def search_responses(request):
+        if request.params.get('query', None) == 'Home':
+            return 200, {}, json.dumps([])
+        if request.params.get('type', None) == 'dash-db':
+            return 200, {}, json.dumps([])
+        assert False  # no other queries expected
 
     responses.add_callback(
         method=responses.GET,
-        url=f"http://{data_config['hostname']}/api/search?query=Home",
-        callback=home_dashboard)
-
-    TEST_DATASOURCE = [{
-        "name": "brian-influx-datasource",
-        "type": "influxdb",
-        "access": "proxy",
-        "url": "http://test-brian-datasource.geant.org:8086",
-        "database": "test-db",
-        "basicAuth": False,
-        "isDefault": True,
-        "readOnly": False
-    }]
-
-    def datasources(request):
-        return 200, {}, json.dumps(TEST_DATASOURCE)
+        url=f"http://{data_config['hostname']}/api/search",
+        callback=search_responses)
 
-    responses.add_callback(
+    responses.add(
         method=responses.GET,
         url=f"http://{data_config['hostname']}/api/datasources",
-        callback=datasources)
-
-    def createdashboard(request):
-        return 200, {}, json.dumps({'id': 666})
-
-    responses.add_callback(
+        json=[{
+            "name": "brian-influx-datasource",
+            "type": "influxdb",
+            "access": "proxy",
+            "url": "http://test-brian-datasource.geant.org:8086",
+            "database": "test-db",
+            "basicAuth": False,
+            "isDefault": True,
+            "readOnly": False
+        }])
+
+    responses.add(
         method=responses.POST,
         url=f"http://{data_config['hostname']}/api/dashboards/db",
-        callback=createdashboard)
+        json={'uid': '999', 'id': 666})
 
-    def preferences(request):
-        return 200, {}, json.dumps({'message': 'Preferences updated'})
-
-    responses.add_callback(
+    responses.add(
         method=responses.PUT,
         url=f"http://{data_config['hostname']}/api/org/preferences",
-        callback=preferences)
+        json={'message': 'Preferences updated'})
 
     def homedashboard(request):
         return 404, {}, ''
@@ -691,7 +726,7 @@ def test_provision(data_config, mocker, client):
         'brian_dashboard_manager.grafana.provision.create_dashboard')
     # we dont care about this, just mark it created
     # we dont care about this, tested separately
-    _mocked_create_dashboard.return_value = {'id': 666}
+    _mocked_create_dashboard.return_value = {'uid': '999', 'id': 666}
 
     _mocked_delete_api_token = mocker.patch(
         'brian_dashboard_manager.grafana.provision.delete_api_token')