diff --git a/brian_dashboard_manager/grafana/provision.py b/brian_dashboard_manager/grafana/provision.py
index f96acada7aa228d9dfbd0e80d38bca376329ceab..e5449bc0fa8c1bad33017445229cc186a4374fe2 100644
--- a/brian_dashboard_manager/grafana/provision.py
+++ b/brian_dashboard_manager/grafana/provision.py
@@ -7,7 +7,6 @@ import logging
 import time
 import json
 import datetime
-from functools import reduce
 from concurrent.futures import Future
 from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor
 from brian_dashboard_manager.config import DEFAULT_ORGANIZATIONS, STATE_PATH
@@ -39,6 +38,120 @@ from brian_dashboard_manager.templating.render import render_dashboard
 
 logger = logging.getLogger(__name__)
 
+MAX_WORKERS = 1
+DASHBOARDS = {
+    'NREN': {
+        'tag': ['customers'],
+        'folder_name': 'NREN Access',
+        'interfaces': []
+    },
+    'CLS': {
+        'tag': 'CLS',
+        'folder_name': 'CLS',
+        'interfaces': []
+    },
+    'RE_PEER': {
+        'tag': 'RE_PEER',
+        'folder_name': 'RE Peer',
+        'interfaces': []
+    },
+    'RE_CUST': {
+        'tag': 'RE_CUST',
+        'folder_name': 'RE Customer',
+        'interfaces': []
+    },
+    'GEANTOPEN': {
+        'tag': 'GEANTOPEN',
+        'folder_name': 'GEANTOPEN',
+        'interfaces': []
+    },
+    'GCS': {
+        'tag': 'AUTOMATED_L2_CIRCUITS',
+        'folder_name': 'GCS',
+        'interfaces': []
+    },
+    'L2_CIRCUIT': {
+        'tag': 'L2_CIRCUITS',
+        'folder_name': 'L2 Circuit',
+        'interfaces': []
+    },
+    'LHCONE_PEER': {
+        'tag': 'LHCONE_PEER',
+        'folder_name': 'LHCONE Peer',
+        'interfaces': []
+    },
+    'LHCONE_CUST': {
+        'tag': 'LHCONE_CUST',
+        'folder_name': 'LHCONE Customer',
+        'interfaces': []
+    },
+    'MDVPN_CUSTOMERS': {
+        'tag': 'MDVPN',
+        'folder_name': 'MDVPN Customers',
+        'interfaces': []
+    },
+    'INFRASTRUCTURE_BACKBONE': {
+        'tag': 'BACKBONE',
+        'errors': True,
+        'folder_name': 'Infrastructure Backbone',
+        'interfaces': []
+    },
+    'IAS_PRIVATE': {
+        'tag': 'IAS_PRIVATE',
+        'folder_name': 'IAS Private',
+        'interfaces': []
+    },
+    'IAS_PUBLIC': {
+        'tag': 'IAS_PUBLIC',
+        'folder_name': 'IAS Public',
+        'interfaces': []
+    },
+    'IAS_CUSTOMER': {
+        'tag': 'IAS_CUSTOMER',
+        'folder_name': 'IAS Customer',
+        'interfaces': []
+    },
+    'IAS_UPSTREAM': {
+        'tag': ['IAS_UPSTREAM', 'UPSTREAM'],
+        'folder_name': 'IAS Upstream',
+        'interfaces': []
+    },
+    'GWS_PHY_UPSTREAM': {
+        'tag': ['GWS_UPSTREAM', 'UPSTREAM'],
+        'errors': True,
+        'folder_name': 'GWS PHY Upstream',
+        'interfaces': []
+    }
+}
+
+AGG_DASHBOARDS = {
+    'CLS_PEERS': {
+        'tag': 'cls_peers',
+        'dashboard_name': 'CLS Peers',
+        'interfaces': []
+    },
+    'IAS_PEERS': {
+        'tag': 'ias_peers',
+        'dashboard_name': 'IAS Peers',
+        'interfaces': []
+    },
+    'IAS_UPSTREAM': {
+        'tag': 'gws_upstreams',
+        'dashboard_name': 'GWS Upstreams',
+        'interfaces': []
+    },
+    'LHCONE': {
+        'tag': 'lhcone',
+        'dashboard_name': 'LHCONE',
+        'interfaces': []
+    },
+    'CAE1': {
+        'tag': 'cae',
+        'dashboard_name': 'CAE1',
+        'interfaces': []
+    }
+}
+
 
 def provision_folder(token_request, folder_name, dash,
                      ds_name, excluded_dashboards):
@@ -62,7 +175,11 @@ def provision_folder(token_request, folder_name, dash,
         dash_data = get_nren_dashboard_data(data, ds_name, tag)
     else:
         data = get_interface_data(interfaces)
-        dash_data = get_dashboard_data(data, ds_name, tag, errors)
+        dash_data = get_dashboard_data(
+            data=data,
+            datasource=ds_name,
+            tag=tag,
+            errors=errors)
 
     if not isinstance(excluded_dashboards, list):
         excluded_dashboards = []
@@ -71,7 +188,7 @@ def provision_folder(token_request, folder_name, dash,
 
     provisioned = []
 
-    with ThreadPoolExecutor(max_workers=4) as executor:
+    with ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor:
         for dashboard in dash_data:
             rendered = render_dashboard(dashboard, nren=is_nren)
             if rendered.get('title').lower() in excluded_dashboards:
@@ -120,394 +237,347 @@ def provision_maybe(config):
             write_timestamp(now.timestamp(), False)
 
 
-def provision(config):
+def is_excluded_folder(org_config, folder_name):
+    excluded_folders = org_config.get('excluded_folders', {})
+    excluded = excluded_folders.get(folder_name, False)
+    # boolean True means entire folder excluded
+    # if list, it is specific dashboard names not to provision
+    # so is handled at provision time.
+    return isinstance(excluded, bool) and excluded
 
-    request = AdminRequest(**config)
-    all_orgs = get_organizations(request)
 
-    orgs_to_provision = config.get('organizations', DEFAULT_ORGANIZATIONS)
-    ignored_folders = config.get('ignored_folders', [])
+def excluded_folder_dashboards(org_config, folder_name):
+    excluded_folders = org_config.get('excluded_folders', {})
+    excluded = excluded_folders.get(folder_name, [])
+    return excluded if isinstance(excluded, list) else []
 
-    missing = (org['name'] for org in orgs_to_provision
-               if org['name'] not in [org['name'] for org in all_orgs])
 
-    for org_name in missing:
-        org_data = create_organization(request, org_name)
-        all_orgs.append(org_data)
+class DashboardChanges(object):
+    def __init__(self, token):
+        # Map of dashboard UID -> whether it has been updated.
+        # This is used to remove stale dashboards at the end.
+        all_dashboards = find_dashboard(token) or []
+        self.updated = {d['uid']: False for d in all_dashboards}
+
+    def update_dash_list(self, dashboards):
+        for dashboard in dashboards:
+            if isinstance(dashboard, Future):
+                dashboard = dashboard.result()
+            if dashboard is None:
+                continue
+            self.updated[dashboard.get('uid')] = True
+
+    def delete_untouched(self, token):
+        for uid, provisioned in self.updated.items():
+            if not provisioned:
+                logger.info(f'Deleting stale dashboard with UID {uid}')
+                delete_dashboard(token, {'uid': uid})
+
+
+DASHBOARD_CHANGES = None  # will be an instance of DashboardChanges
 
+def _provision_interfaces(config, org_config, ds_name, token):
     interfaces = get_interfaces(config['inventory_provider'])
-    tokens = []
 
-    start = time.time()
+    excluded_nrens = org_config['excluded_nrens']
 
-    for org in all_orgs:
-        org_id = org['id']
-        delete_expired_api_tokens(request, org_id)
-        token = create_api_token(request, org_id)
-        token_request = TokenRequest(token=token['key'], **config)
-        tokens.append((org_id, token['id']))
+    def excluded(interface):
+        desc = interface['description'].lower()
+        lab = 'lab.office' in interface['router'].lower()
+        to_exclude = any(nren.lower() in desc for nren in excluded_nrens)
+        return not (to_exclude or lab)
 
-        logger.info(
-            f'--- Provisioning org {org["name"]} (ID #{org_id}) ---')
 
-        try:
-            org_config = next(
-                o for o in orgs_to_provision if o['name'] == org['name'])
-        except StopIteration:
-            org_config = None
+    relevant_interfaces = list(filter(excluded, interfaces))
 
-        if not org_config:
-            logger.error(
-                f'Org {org["name"]} does not have valid configuration.')
-            org['info'] = 'Org exists in grafana but is not configured'
-            continue
+    # Provision dashboards, overwriting existing ones.
 
-        # Only provision influxdb datasource for now
-        datasource = config.get('datasources').get('influxdb')
-
-        # Provision missing data sources
-        if not check_provisioned(token_request, datasource):
-            ds = create_datasource(token_request,
-                                   datasource,
-                                   config.get('datasources'))
-            if ds:
-                logger.info(
-                    f'Provisioned datasource: {datasource["name"]}')
-
-        excluded_nrens = org_config['excluded_nrens']
-
-        def excluded(interface):
-            desc = interface['description'].lower()
-            lab = 'lab.office' in interface['router'].lower()
-            to_exclude = any(nren.lower() in desc for nren in excluded_nrens)
-            return not (to_exclude or lab)
-
-        relevant_interfaces = list(filter(excluded, interfaces))
-
-        dashboards = {
-            'NREN': {
-                'tag': ['customers'],
-                'folder_name': 'NREN Access',
-                'interfaces': []
-            },
-            'CLS': {
-                'tag': 'CLS',
-                'folder_name': 'CLS',
-                'interfaces': []
-            },
-            'RE_PEER': {
-                'tag': 'RE_PEER',
-                'folder_name': 'RE Peer',
-                'interfaces': []
-            },
-            'RE_CUST': {
-                'tag': 'RE_CUST',
-                'folder_name': 'RE Customer',
-                'interfaces': []
-            },
-            'GEANTOPEN': {
-                'tag': 'GEANTOPEN',
-                'folder_name': 'GEANTOPEN',
-                'interfaces': []
-            },
-            'GCS': {
-                'tag': 'AUTOMATED_L2_CIRCUITS',
-                'folder_name': 'GCS',
-                'interfaces': []
-            },
-            'L2_CIRCUIT': {
-                'tag': 'L2_CIRCUITS',
-                'folder_name': 'L2 Circuit',
-                'interfaces': []
-            },
-            'LHCONE_PEER': {
-                'tag': 'LHCONE_PEER',
-                'folder_name': 'LHCONE Peer',
-                'interfaces': []
-            },
-            'LHCONE_CUST': {
-                'tag': 'LHCONE_CUST',
-                'folder_name': 'LHCONE Customer',
-                'interfaces': []
-            },
-            'MDVPN_CUSTOMERS': {
-                'tag': 'MDVPN',
-                'folder_name': 'MDVPN Customers',
-                'interfaces': []
-            },
-            'INFRASTRUCTURE_BACKBONE': {
-                'tag': 'BACKBONE',
-                'errors': True,
-                'folder_name': 'Infrastructure Backbone',
-                'interfaces': []
-            },
-            'IAS_PRIVATE': {
-                'tag': 'IAS_PRIVATE',
-                'folder_name': 'IAS Private',
-                'interfaces': []
-            },
-            'IAS_PUBLIC': {
-                'tag': 'IAS_PUBLIC',
-                'folder_name': 'IAS Public',
-                'interfaces': []
-            },
-            'IAS_CUSTOMER': {
-                'tag': 'IAS_CUSTOMER',
-                'folder_name': 'IAS Customer',
-                'interfaces': []
-            },
-            'IAS_UPSTREAM': {
-                'tag': ['IAS_UPSTREAM', 'UPSTREAM'],
-                'folder_name': 'IAS Upstream',
-                'interfaces': []
-            },
-            'GWS_PHY_UPSTREAM': {
-                'tag': ['GWS_UPSTREAM', 'UPSTREAM'],
-                'errors': True,
-                'folder_name': 'GWS PHY Upstream',
-                'interfaces': []
-            }
-        }
-
-        agg_dashboards = {
-            'CLS_PEERS': {
-                'tag': 'cls_peers',
-                'dashboard_name': 'CLS Peers',
-                'interfaces': []
-            },
-            'IAS_PEERS': {
-                'tag': 'ias_peers',
-                'dashboard_name': 'IAS Peers',
-                'interfaces': []
-            },
-            'IAS_UPSTREAM': {
-                'tag': 'gws_upstreams',
-                'dashboard_name': 'GWS Upstreams',
-                'interfaces': []
-            },
-            'LHCONE': {
-                'tag': 'lhcone',
-                'dashboard_name': 'LHCONE',
-                'interfaces': []
-            },
-            'CAE1': {
-                'tag': 'cae',
-                'dashboard_name': 'CAE1',
-                'interfaces': []
-            }
-        }
-        # Provision dashboards, overwriting existing ones.
-        ds_name = datasource.get('name', 'PollerInfluxDB')
-        excluded_folders = org_config.get('excluded_folders', {})
 
-        def get_uid(prev, curr):
-            prev[curr.get('uid')] = False
-            return prev
 
-        # Map of dashboard UID -> whether it has been updated.
-        # This is used to remove stale dashboards at the end.
-        updated = find_dashboard(token_request) or []
-        updated = reduce(get_uid, updated, {})
 
-        # General is a base folder present in Grafana
-        folders_to_keep = ['General', 'GWS Indirect',
-                           'GWS Direct', 'Aggregates']
-        folders_to_keep.extend([dash['folder_name']
-                                for dash in dashboards.values()])
-
-        def update_dash_list(dashboards):
-            for dashboard in dashboards:
-                if isinstance(dashboard, Future):
-                    dashboard = dashboard.result()
-                if dashboard is None:
-                    continue
-                updated[dashboard.get('uid')] = True
+    # loop over interfaces and add them to the dashboard_name
+    # -> folder mapping structure `dashboards` above, for convenience.
+    for iface in relevant_interfaces:
+        for dash_name in iface['dashboards']:
+
+            # add interface to matched dashboard
+            if dash_name in DASHBOARDS:
+                ifaces = DASHBOARDS[dash_name]['interfaces']
+                ifaces.append(iface)
 
-        # loop over interfaces and add them to the dashboard_name
-        # -> folder mapping structure `dashboards` above, for convenience.
-        for iface in relevant_interfaces:
-            for dash_name in iface['dashboards']:
+            # add to matched aggregate dashboard
+            if dash_name in AGG_DASHBOARDS:
+                ifaces = AGG_DASHBOARDS[dash_name]['interfaces']
+                ifaces.append(iface)
 
-                # add interface to matched dashboard
-                if dash_name in dashboards:
-                    ifaces = dashboards[dash_name]['interfaces']
-                    ifaces.append(iface)
+    # provision dashboards and their folders
+    with ProcessPoolExecutor(max_workers=MAX_WORKERS) as executor:
+        provisioned = []
+        for folder in DASHBOARDS.values():
+            folder_name = folder['folder_name']
 
-                # add to matched aggregate dashboard
-                if dash_name in agg_dashboards:
-                    ifaces = agg_dashboards[dash_name]['interfaces']
-                    ifaces.append(iface)
+            # boolean True means entire folder excluded
+            # if list, it is specific dashboard names not to provision
+            # so is handled at provision time.
+            if is_excluded_folder(org_config, folder_name):
+                executor.submit(
+                    delete_folder, token, title=folder_name)
+                continue
 
-        # provision dashboards and their folders
-        with ProcessPoolExecutor(max_workers=4) as executor:
+            logger.info(
+                f'Provisioning {org_config["name"]}/{folder_name} dashboards')
+            res = executor.submit(provision_folder, token,
+                                  folder_name, folder, ds_name,
+                                  excluded_folder_dashboards(org_config, folder_name))
+            provisioned.append(res)
+
+        for result in provisioned:
+            folder = result.result()
+            if folder is None:
+                continue
+            DASHBOARD_CHANGES.update_dash_list(folder)
+
+
+def _provision_gws_indirect(config, org_config, ds_name, token):
+    # fetch GWS direct data and provision related dashboards
+    logger.info('Provisioning GWS Indirect dashboards')
+    folder_name = 'GWS Indirect'
+    if is_excluded_folder(org_config, folder_name):
+        # don't provision GWS Direct folder
+        delete_folder(token, title=folder_name)
+    else:
+        folder = find_folder(token, folder_name)
+        with ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor:
+            gws_indirect_data = get_gws_indirect(
+                config['inventory_provider'])
+            provisioned = []
+            dashes = generate_indirect(gws_indirect_data, ds_name)
+            for dashboard in dashes:
+                rendered = render_dashboard(dashboard)
+                provisioned.append(executor.submit(create_dashboard,
+                                                   token,
+                                                   rendered, folder['id']))
+
+            DASHBOARD_CHANGES.update_dash_list(provisioned)
+
+
+def _provision_gws_direct(config, org_config, ds_name, token):
+    # fetch GWS direct data and provision related dashboards
+    logger.info('Provisioning GWS Direct dashboards')
+    folder_name = 'GWS Direct'
+    if is_excluded_folder(org_config, folder_name):
+        # don't provision GWS Direct folder
+        delete_folder(token, title=folder_name)
+    else:
+        folder = find_folder(token, folder_name)
+        with ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor:
+            gws_data = get_gws_direct(config['inventory_provider'])
             provisioned = []
-            for folder in dashboards.values():
-                folder_name = folder['folder_name']
-                exclude = excluded_folders.get(folder_name)
-
-                # boolean True means entire folder excluded
-                # if list, it is specific dashboard names not to provision
-                # so is handled at provision time.
-                if exclude:
-                    if isinstance(exclude, bool):
-                        executor.submit(
-                            delete_folder, token_request, folder_name)
-                        continue
-
-                logger.info(
-                    f'Provisioning {org["name"]}/{folder_name} dashboards')
-                res = executor.submit(provision_folder, token_request,
-                                      folder_name, folder, ds_name,
-                                      exclude)
-                provisioned.append(res)
 
-            for result in provisioned:
-                folder = result.result()
-                if folder is None:
+            for dashboard in generate_gws(gws_data, ds_name):
+                rendered = render_dashboard(dashboard)
+                provisioned.append(executor.submit(create_dashboard,
+                                                   token,
+                                                   rendered, folder['id']))
+
+            DASHBOARD_CHANGES.update_dash_list(provisioned)
+
+
+def _provision_eumetsat_multicast(config, org_config, ds_name, token):
+    # fetch EUMETSAT multicast provision related dashboards
+    logger.info('Provisioning EUMETSAT Multicast dashboards')
+    folder_name = 'EUMETSAT Multicast'
+    if is_excluded_folder(org_config, folder_name):
+        # don't provision EUMETSAT Multicast folder
+        delete_folder(token, title=folder_name)
+    else:
+        folder = find_folder(token, folder_name)
+        with ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor:
+            subscriptions = get_eumetsat_multicast_subscriptions(config['inventory_provider'])
+            provisioned = []
+
+            for dashboard in generate_eumetsat_multicast(subscriptions, ds_name):
+                rendered = render_dashboard(dashboard)
+                provisioned.append(executor.submit(create_dashboard,
+                                                   token,
+                                                   rendered, folder['id']))
+
+            DASHBOARD_CHANGES.update_dash_list(provisioned)
+
+
+def _provision_aggregates(config, org_config, ds_name, token):
+    if is_excluded_folder(org_config, 'Aggregates'):
+        # don't provision aggregate folder
+        delete_folder(token, title='Aggregates')
+    else:
+        with ProcessPoolExecutor(max_workers=MAX_WORKERS) as executor:
+            provisioned = []
+            agg_folder = find_folder(token, 'Aggregates')
+            for dash in AGG_DASHBOARDS.values():
+                excluded_dashboards = excluded_folder_dashboards(org_config, 'Aggregates')
+                if dash['dashboard_name'] in excluded_dashboards:
+                    dash_name = {
+                        'title': f'Aggregate - {dash["dashboard_name"]}'}
+                    executor.submit(delete_dashboard,
+                                    token, dash_name,
+                                    agg_folder['id'])
                     continue
-                update_dash_list(folder)
-
-        # fetch GWS direct data and provision related dashboards
-        logger.info('Provisioning GWS Indirect dashboards')
-        folder_name = 'GWS Indirect'
-        exclude_indirect = excluded_folders.get(folder_name, False)
-        if isinstance(exclude_indirect, bool) and exclude_indirect:
-            # don't provision GWS Direct folder
-            delete_folder(token_request, folder_name)
-        else:
-            folder = find_folder(token_request, folder_name)
-            with ThreadPoolExecutor(max_workers=4) as executor:
-                gws_indirect_data = get_gws_indirect(
-                    config['inventory_provider'])
-                provisioned = []
-                dashes = generate_indirect(gws_indirect_data, ds_name)
-                for dashboard in dashes:
-                    rendered = render_dashboard(dashboard)
-                    provisioned.append(executor.submit(create_dashboard,
-                                                       token_request,
-                                                       rendered, folder['id']))
-
-                update_dash_list(provisioned)
-
-        # fetch GWS direct data and provision related dashboards
-        logger.info('Provisioning GWS Direct dashboards')
-        folder_name = 'GWS Direct'
-        exclude_gws = excluded_folders.get(folder_name, False)
-        if isinstance(exclude_gws, bool) and exclude_gws:
-            # don't provision GWS Direct folder
-            delete_folder(token_request, folder_name)
-        else:
-            folder = find_folder(token_request, folder_name)
-            with ThreadPoolExecutor(max_workers=4) as executor:
-                gws_data = get_gws_direct(config['inventory_provider'])
-                provisioned = []
-
-                for dashboard in generate_gws(gws_data, ds_name):
-                    rendered = render_dashboard(dashboard)
-                    provisioned.append(executor.submit(create_dashboard,
-                                                       token_request,
-                                                       rendered, folder['id']))
-
-                update_dash_list(provisioned)
-
-        # fetch EUMETSAT multicast provision related dashboards
-        logger.info('Provisioning EUMETSAT Multicast dashboards')
-        folder_name = 'EUMETSAT Multicast'
-        exclude_eumet_mc = excluded_folders.get(folder_name, False)
-        if isinstance(exclude_eumet_mc, bool) and exclude_eumet_mc:
-            # don't provision EUMETSAT Multicast folder
-            delete_folder(token_request, folder_name)
+                logger.info(f'Provisioning {org_config["name"]}' +
+                            f'/Aggregate {dash["dashboard_name"]} dashboards')  # noqa: E501
+                res = executor.submit(
+                    provision_aggregate, token,
+                    agg_folder, dash, ds_name)
+                provisioned.append(res)
+
+            DASHBOARD_CHANGES.update_dash_list(provisioned)
+
+
+def _provision_static_dashboards(config, org_config, ds_name, token):
+    # Statically defined dashboards from json files
+    excluded_dashboards = org_config.get('excluded_dashboards', [])
+    logger.info('Provisioning static dashboards')
+    for dashboard in get_dashboard_definitions():
+        if dashboard['title'] not in excluded_dashboards:
+            res = create_dashboard(token, dashboard)
+            if res:
+                DASHBOARD_CHANGES.updated[res.get('uid')] = True
         else:
-            folder = find_folder(token_request, folder_name)
-            with ThreadPoolExecutor(max_workers=4) as executor:
-                subscriptions = get_eumetsat_multicast_subscriptions(config['inventory_provider'])
-                provisioned = []
+            delete_dashboard(token, dashboard)
 
-                for dashboard in generate_eumetsat_multicast(subscriptions, ds_name):
-                    rendered = render_dashboard(dashboard)
-                    provisioned.append(executor.submit(create_dashboard,
-                                                       token_request,
-                                                       rendered, folder['id']))
+    # Home dashboard is always called "Home"
+    # Make sure it's set for the organization
+    logger.info('Configuring Home dashboard')
+    set_home_dashboard(token, org_config['name'] == 'GÉANT Staff')
+    # just hardcode that we updated home dashboard
+    DASHBOARD_CHANGES.updated['home'] = True
 
-                update_dash_list(provisioned)
 
-        exclude_agg = excluded_folders.get('Aggregates', [])
+def _set_ignored_folders_as_updated(config, org_config, token):
+    # get dashboard UIDs from ignored folders
+    # and make sure we don't touch them
+    ignored_folders = config.get('ignored_folders', [])
+    for name in ignored_folders:
+        logger.info(
+            f'Ignoring dashboards under the folder {org_config["name"]}/{name}')
+        folder = find_folder(token, name, create=False)
+        if folder is None:
+            continue
+        to_ignore = find_dashboard(token, folder_id=folder['id'])
 
-        if isinstance(exclude_agg, bool) and exclude_agg:
-            # don't provision aggregate folder
-            delete_folder(token_request, 'Aggregates')
-        else:
-            with ProcessPoolExecutor(max_workers=4) as executor:
-                provisioned = []
-                agg_folder = find_folder(token_request, 'Aggregates')
-                for dash in agg_dashboards.values():
-                    if dash['dashboard_name'] in exclude_agg:
-                        dash_name = {
-                            'title': f'Aggregate - {dash["dashboard_name"]}'}
-                        executor.submit(delete_dashboard,
-                                        token_request, dash_name,
-                                        agg_folder['id'])
-                        continue
-                    logger.info(f'Provisioning {org["name"]}' +
-                                f'/Aggregate {dash["dashboard_name"]} dashboards')  # noqa: E501
-                    res = executor.submit(
-                        provision_aggregate, token_request,
-                        agg_folder, dash, ds_name)
-                    provisioned.append(res)
-
-                update_dash_list(provisioned)
-
-        # Statically defined dashboards from json files
-        excluded_dashboards = org_config.get('excluded_dashboards', [])
-        logger.info('Provisioning static dashboards')
-        for dashboard in get_dashboard_definitions():
-            if dashboard['title'] not in excluded_dashboards:
-                res = create_dashboard(token_request, dashboard)
-                if res:
-                    updated[res.get('uid')] = True
-            else:
-                delete_dashboard(token_request, dashboard)
-
-        # Home dashboard is always called "Home"
-        # Make sure it's set for the organization
-        logger.info('Configuring Home dashboard')
-        is_staff = org['name'] == 'GÉANT Staff'
-        set_home_dashboard(token_request, is_staff)
-        # just hardcode that we updated home dashboard
-        updated['home'] = True
-
-        # get dashboard UIDs from ignored folders
-        # and make sure we don't touch them
-        for name in ignored_folders:
-            folders_to_keep.append(name)
+        if to_ignore is None:
+            continue
+
+        for dash in to_ignore:
+            # mark it updated, so we don't modify it.
+            DASHBOARD_CHANGES.updated[dash['uid']] = True
+
+
+def _delete_unknown_folders(config, token):
+    all_folders = get_folders(token)
+
+    folders_to_keep = [
+        # General is a base folder present in Grafana
+        'General', 'GWS Indirect', 'GWS Direct', 'Aggregates']
+    folders_to_keep.extend([dash['folder_name']
+                            for dash in DASHBOARDS.values()])
+    ignored_folders = config.get('ignored_folders', [])
+    folders_to_keep.extend(ignored_folders)
+    folders_to_keep = set(folders_to_keep)  # de-dupe
+
+    for folder in all_folders:
+        if folder['title'] in folders_to_keep:
+            continue
+        delete_folder(token, uid=folder['uid'])
+
+
+def _provision_datasource(config, token):
+    # Only provision influxdb datasource for now
+    datasource = config.get('datasources').get('influxdb')
+
+    # Provision missing data sources
+    if not check_provisioned(token, datasource):
+        ds = create_datasource(token,
+                               datasource,
+                               config.get('datasources'))
+        if ds:
             logger.info(
-                f'Ignoring dashboards under the folder {org["name"]}/{name}')
-            folder = find_folder(token_request, name, create=False)
-            if folder is None:
-                continue
-            to_ignore = find_dashboard(token_request, folder_id=folder['id'])
+                f'Provisioned datasource: {datasource["name"]}')
 
-            if to_ignore is None:
-                continue
+    return datasource
 
-            for dash in to_ignore:
-                # mark it updated, so we don't modify it.
-                updated[dash['uid']] = True
 
-        for dash, provisioned in updated.items():
-            if not provisioned:
-                logger.info(f'Deleting stale dashboard with UID {dash}')
-                delete_dashboard(token_request, {'uid': dash})
+def _provision_orgs(config):
+    request = AdminRequest(**config)
+    all_orgs = get_organizations(request)
 
-        all_folders = get_folders(token_request)
-        folders_to_keep = set(folders_to_keep)
+    orgs_to_provision = config.get('organizations', DEFAULT_ORGANIZATIONS)
 
-        for folder in all_folders:
-            if folder['title'] not in folders_to_keep:
-                delete_folder(token_request, uid=folder['uid'])
+    missing = (org['name'] for org in orgs_to_provision
+               if org['name'] not in [org['name'] for org in all_orgs])
+
+    for org_name in missing:
+        org_data = create_organization(request, org_name)
+        all_orgs.append(org_data)
+
+    return all_orgs
+
+
+def provision(config):
+
+    global DASHBOARD_CHANGES
+
+    start = time.time()
+    tokens = []
+    all_orgs = _provision_orgs(config)
+    request = AdminRequest(**config)
+
+    def _find_org_config(org):
+        orgs_to_provision = config.get('organizations', DEFAULT_ORGANIZATIONS)
+        try:
+            return next(
+                o for o in orgs_to_provision if o['name'] == org['name'])
+        except StopIteration:
+            logger.error(
+                f'Org {org["name"]} does not have valid configuration.')
+            org['info'] = 'Org exists in grafana but is not configured'
+            return None
+
+
+    for org in all_orgs:
+        org_id = org['id']
+        delete_expired_api_tokens(request, org_id)
+        token = create_api_token(request, org_id)
+        token_request = TokenRequest(token=token['key'], **config)
+        tokens.append((org_id, token['id']))
+
+        DASHBOARD_CHANGES = DashboardChanges(token_request)
+
+        logger.info(
+            f'--- Provisioning org {org["name"]} (ID #{org_id}) ---')
+
+        org_config = _find_org_config(org)
+        if not org_config:
+            # message logged from _find_org_config
+            continue
+
+        datasource = _provision_datasource(config, token_request)
+        ds_name = datasource.get('name', 'PollerInfluxDB')
+
+        _provision_interfaces(config, org_config, ds_name, token_request)
+        _provision_gws_indirect(config, org_config, ds_name, token_request)
+        _provision_gws_direct(config, org_config, ds_name, token_request)
+        _provision_eumetsat_multicast(config, org_config, ds_name, token_request)
+        _provision_aggregates(config, org_config, ds_name, token_request)
+        _provision_static_dashboards(config, org_config, ds_name, token_request)
+        _set_ignored_folders_as_updated(config, org_config, token_request)
+
+        DASHBOARD_CHANGES.delete_untouched(token_request)
+
+        _delete_unknown_folders(config, token_request)
 
-    logger.info(f'Time to complete: {time.time() - start}')
     for org_id, token in tokens:
         delete_api_token(request, org_id, token)
 
+    logger.info(f'Time to complete: {time.time() - start}')
+
     return all_orgs