Skip to content
Snippets Groups Projects
Commit 725960bc authored by Erik Reid's avatar Erik Reid
Browse files

added/update schemas on reporting/inventory requests & cache

parent 2d97a5f6
No related branches found
No related tags found
No related merge requests found
import concurrent.futures
import jsonschema
import logging
import time
from threading import Event
......@@ -10,15 +11,104 @@ from . import cache
logger = logging.getLogger(__name__)
INPROV_POLLER_INTERFACES_CACHE_FILENAME = 'inprov-poller-interfaces.json'
# INPROV_POLLER_INTERFACES_CACHE_FILENAME = 'inprov-poller-interfaces.json'
REPORTING_SCID_CURRENT_CACHE_FILENAME = 'reporting-scid-current.json'
INPROV_MAP_SERVICES_CACHE_FILENAME = 'inprov-map-services.json'
# INPROV_MAP_SERVICES_CACHE_FILENAME = 'inprov-map-services.json'
INPROV_EQUIPMENT_CACHE_FILENAME = 'inprov-equipment.json'
REPORTING_SCID_CURRENT_CACHE_SCHEMA = {
'$schema': 'https://json-schema.org/draft/2020-12/schema',
'definitions': {
'interface': {
'type': 'object',
'properties': {
'hostname': {'type': 'string'},
# 'interface': {'type': 'string'},
# 'addresses': {
# 'type': 'array',
# 'items': {'type': 'string'}
# }
},
'required': ['hostname']
# 'required': ['hostname', 'interface']
},
'lambda_interface': {
'type': 'object',
'properties': {
'equipment': {'type': 'string'},
# 'port': {'type': 'string'},
},
'required': ['equipment']
# 'required': ['equipment', 'port']
},
'service': {
'type': 'object',
'properties': {
'scid': {'type': 'string'},
'sid': {'type': 'string'},
'name': {'type': 'string'},
'speed': {'type': 'integer'},
'status': {'type': 'string'},
# 'monitored': {'type': 'boolean'},
'service_type': {'type': ['string', 'null']},
# 'imsid': {'type': 'integer'},
# 'customers': {
# 'type': 'array',
# 'items': {'type': 'string'}
# },
'endpoints': {
'type': 'array',
'items': {
'anyOf': [
{'$ref': '#/definitions/interface'},
{'$ref': '#/definitions/lambda_interface'},
]
}
}
},
'required': ['scid', 'sid', 'name', 'speed', 'status', 'service_type', 'endpoints'],
# 'required': ['scid', 'sid', 'name',
# 'speed', 'status', 'monitored',
# 'service_type', 'imsid', 'customers', 'endpoints'],
# 'additionalProperties': False
},
},
'type': 'array',
'items': {'$ref': '#/definitions/service'}
}
INPROV_EQUIPMENT_LIST_SCHEMA = {
'$schema': 'https://json-schema.org/draft/2020-12/schema',
'definitions': {
'equipment': {
'type': 'object',
'properties': {
'name': {'type': 'string'},
'pop': {'type': 'string'},
'status': {'type': 'string'},
},
'required': ['name', 'pop', 'status'],
# 'additionalProperties': False
},
},
'type': 'array',
'items': {'$ref': '#/definitions/equipment'}
}
def _load_and_cache_json(
key: str,
url: str,
cache_filename: str) -> dict[str, Any]:
cache_filename: str,
schema: dict[str, Any] | None = None) -> dict[str, Any]:
"""
Load the JSON from the URL, return and cache it.
......@@ -32,6 +122,9 @@ def _load_and_cache_json(
rv.raise_for_status()
rsp_object = rv.json()
if schema:
jsonschema.validate(instance=rsp_object, schema=schema)
cache.set(cache_filename, rsp_object)
return {
'key': key,
......@@ -50,21 +143,23 @@ def _load_all_inventory(inventory_base_uri: str, reporting_base_uri: str) -> dic
with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor:
futures = [
executor.submit(
_load_and_cache_json,
key='poller-interfaces',
url=f'{inventory_base_uri}/poller/interfaces',
cache_filename=INPROV_POLLER_INTERFACES_CACHE_FILENAME),
# executor.submit(
# _load_and_cache_json,
# key='poller-interfaces',
# url=f'{inventory_base_uri}/poller/interfaces',
# cache_filename=INPROV_POLLER_INTERFACES_CACHE_FILENAME),
executor.submit(
_load_and_cache_json,
key='map-services',
url=f'{inventory_base_uri}/map/services',
cache_filename=INPROV_MAP_SERVICES_CACHE_FILENAME),
url=f'{inventory_base_uri}/map/equipment',
cache_filename=INPROV_EQUIPMENT_CACHE_FILENAME,
schema=INPROV_EQUIPMENT_LIST_SCHEMA),
executor.submit(
_load_and_cache_json,
key='scid-current',
url=f'{reporting_base_uri}/scid/current',
cache_filename=REPORTING_SCID_CURRENT_CACHE_FILENAME),
cache_filename=REPORTING_SCID_CURRENT_CACHE_FILENAME,
schema=REPORTING_SCID_CURRENT_CACHE_SCHEMA),
]
responses = {}
for _f in concurrent.futures.as_completed(futures):
......
......@@ -16,11 +16,11 @@ def test_inventory_service_download():
inventory_base_uri = 'https://dummy-hostname.dummy.domain'
reporting_base_uri = 'https://another-dummy-hostname.dummy.domain'
responses.add(
method=responses.GET,
url=f'{inventory_base_uri}/poller/interfaces',
json=load_test_data('poller-interfaces.json')
)
# responses.add(
# method=responses.GET,
# url=f'{inventory_base_uri}/poller/interfaces',
# json=load_test_data('poller-interfaces.json')
# )
responses.add(
method=responses.GET,
url=f'{reporting_base_uri}/scid/current',
......@@ -28,8 +28,8 @@ def test_inventory_service_download():
)
responses.add(
method=responses.GET,
url=f'{inventory_base_uri}/map/services',
json=load_test_data('inprov-services.json')
url=f'{inventory_base_uri}/map/equipment',
json=load_test_data('inprov-equipment.json')
)
with tempfile.TemporaryDirectory() as tmp_dir:
......@@ -41,11 +41,11 @@ def test_inventory_service_download():
# assert os.path.exists(os.path.join(tmp_dir, services.POLLER_INTERFACES_CACHE_FILENAME))
cached_data = cache.get(inventory.INPROV_POLLER_INTERFACES_CACHE_FILENAME)
assert cached_data == load_test_data('poller-interfaces.json')
# cached_data = cache.get(inventory.INPROV_POLLER_INTERFACES_CACHE_FILENAME)
# assert cached_data == load_test_data('poller-interfaces.json')
cached_data = cache.get(inventory.INPROV_MAP_SERVICES_CACHE_FILENAME)
assert cached_data == load_test_data('inprov-services.json')
cached_data = cache.get(inventory.INPROV_EQUIPMENT_CACHE_FILENAME)
assert cached_data == load_test_data('inprov-equipment.json')
cached_data = cache.get(inventory.REPORTING_SCID_CURRENT_CACHE_FILENAME)
assert cached_data == load_test_data('scid-current.json')
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment