Skip to content
Snippets Groups Projects
Commit fd4d13f6 authored by Erik Reid's avatar Erik Reid
Browse files

load reporting provider scid's

parent 2779e06d
Branches
Tags
No related merge requests found
...@@ -3,22 +3,77 @@ import logging ...@@ -3,22 +3,77 @@ import logging
import os import os
import time import time
from threading import Event from threading import Event
import concurrent.futures
import requests import requests
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def _load_and_process_services(inventory_base_uri: str, working_dirname: str) -> None: POLLER_INTERFACES_CACHE_FILENAME = 'poller-interfaces.json'
""" REPORTING_SCID_CURRENT_CACHE_FILENAME = 'reporting-scid-current.json'
Load the services from the inventory and process them. MAP_SERVICES_CACHE_FILENAME = 'map-services.json'
def _load_and_cache_json(
key: str,
url: str,
cache_filename: str,
working_dirname: str) -> None:
""" """
Load the JSON from the URL, return and cache it.
:param key: key to return with the loaded data for distinguishing futures
:param url: URL to load the data from
:param cache_filename: filename to cache the data under
:param working_dirname: directory to cache the data under
"""
# TODO: proper error handling # TODO: proper error handling
# TODO: actually process the services # TODO: maybe try to return cached data if http fails?
rv = requests.get(f'{inventory_base_uri}/poller/interfaces') rv = requests.get(url)
rv.raise_for_status() rv.raise_for_status()
with open(os.path.join(working_dirname, 'poller-interfaces.json'), 'w+') as f: with open(os.path.join(working_dirname, cache_filename), 'w+') as f:
f.write(json.dumps(rv.json())) f.write(json.dumps(rv.json()))
return {
'key': cache_filename,
'value': rv.json()
}
def _load_and_process_services(inventory_base_uri: str, reporting_base_uri: str, working_dirname: str) -> None:
"""
Load and process service info from inventory+reporting provider.
:param inventory_base_uri: base URI of the inventory provider
:param reporting_base_uri: base URI of the reporting provider
:param working_dirname: directory to cache the services
"""
with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor:
futures = [
executor.submit(
_load_and_cache_json,
key='poller-interfaces',
url=f'{inventory_base_uri}/poller/interfaces',
cache_filename=POLLER_INTERFACES_CACHE_FILENAME,
working_dirname=working_dirname),
executor.submit(
_load_and_cache_json,
key='map-services',
url=f'{inventory_base_uri}/map/services',
cache_filename=MAP_SERVICES_CACHE_FILENAME,
working_dirname=working_dirname),
executor.submit(
_load_and_cache_json,
key='scid-current',
url=f'{reporting_base_uri}/scid/current',
cache_filename=REPORTING_SCID_CURRENT_CACHE_FILENAME,
working_dirname=working_dirname),
]
responses = {}
for _f in concurrent.futures.as_completed(futures):
responses[_f.result()['key']] = _f.result()['value']
print(responses)
def worker_proc( def worker_proc(
......
...@@ -27,6 +27,7 @@ class SentryConfig(BaseModel): ...@@ -27,6 +27,7 @@ class SentryConfig(BaseModel):
class Configuration(BaseModel): class Configuration(BaseModel):
sentry: SentryConfig | None = None sentry: SentryConfig | None = None
inventory: HttpUrl inventory: HttpUrl
reporting: HttpUrl
rmq: RMQConnectionParams | None = None rmq: RMQConnectionParams | None = None
correlator_exchange: str = 'dashboard.alarms.broadcast' correlator_exchange: str = 'dashboard.alarms.broadcast'
......
...@@ -16,7 +16,8 @@ def dummy_config(): ...@@ -16,7 +16,8 @@ def dummy_config():
'dsn': 'https://token@hostname.geant.org:1111/a/b', 'dsn': 'https://token@hostname.geant.org:1111/a/b',
'environment': 'unit tests' 'environment': 'unit tests'
}, },
'inventory': 'https://dummy-hostname.dummy.domain', 'inventory': 'https://inventory.bogus.domain',
'reporting': 'http://reporting.another-bogus.domain',
# no rmq param to skip correlator thread # no rmq param to skip correlator thread
# 'rmq': { # 'rmq': {
# 'brokers': [ # 'brokers': [
......
This diff is collapsed.
...@@ -5,8 +5,7 @@ import tempfile ...@@ -5,8 +5,7 @@ import tempfile
import responses import responses
from mapping_provider.backends.services import _load_and_process_services from mapping_provider.backends import services
from .common import load_test_data from .common import load_test_data
...@@ -16,19 +15,40 @@ def test_inventory_service_download(): ...@@ -16,19 +15,40 @@ def test_inventory_service_download():
tmp bogus test - just to have something tmp bogus test - just to have something
""" """
inventory_base_uri = 'https://dummy-hostname.dummy.domain'
reporting_base_uri = 'https://another-dummy-hostname.dummy.domain'
responses.add( responses.add(
method=responses.GET, method=responses.GET,
url=re.compile(r'.*/poller/interfaces$'), url=f'{inventory_base_uri}/poller/interfaces',
json=load_test_data('poller-interfaces.json') json=load_test_data('poller-interfaces.json')
) )
responses.add(
method=responses.GET,
url=f'{reporting_base_uri}/scid/current',
json=load_test_data('scid-current.json')
)
responses.add(
method=responses.GET,
url=f'{inventory_base_uri}/map/services',
json=load_test_data('inprov-services.json')
)
with tempfile.TemporaryDirectory() as tmp_dir: with tempfile.TemporaryDirectory() as tmp_dir:
_load_and_process_services( services._load_and_process_services(
inventory_base_uri='https://dummy-hostname.dummy.domain', inventory_base_uri='https://dummy-hostname.dummy.domain',
reporting_base_uri='https://another-dummy-hostname.dummy.domain',
working_dirname=tmp_dir) working_dirname=tmp_dir)
assert os.path.exists(os.path.join(tmp_dir, 'poller-interfaces.json')) # assert os.path.exists(os.path.join(tmp_dir, services.POLLER_INTERFACES_CACHE_FILENAME))
with open(os.path.join(tmp_dir, 'poller-interfaces.json')) as f: with open(os.path.join(tmp_dir, services.POLLER_INTERFACES_CACHE_FILENAME)) as f:
assert json.load(f) == load_test_data('poller-interfaces.json') assert json.load(f) == load_test_data('poller-interfaces.json')
# assert os.path.exists(os.path.join(tmp_dir, services.MAP_SERVICES_CACHE_FILENAME))
with open(os.path.join(tmp_dir, services.MAP_SERVICES_CACHE_FILENAME)) as f:
assert json.load(f) == load_test_data('inprov-services.json')
# assert os.path.exists(os.path.join(tmp_dir, services.REPORTING_SCID_CURRENT_CACHE_FILENAME))
with open(os.path.join(tmp_dir, services.REPORTING_SCID_CURRENT_CACHE_FILENAME)) as f:
assert json.load(f) == load_test_data('scid-current.json')
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment