diff --git a/Changelog.md b/Changelog.md index 1d550008c02b8c0e4bdcc9e0e9a004e6de8cce42..e671bf3370461f75da8879f43ad1f151169a2faf 100644 --- a/Changelog.md +++ b/Changelog.md @@ -3,6 +3,9 @@ All notable changes to this project will be documented in this file. +## [0.106] - 2023-07-14 +- DBOARD-771 : Updated inventory provider APIs to include all sites even without nodes. + ## [0.105] - 2023-06-16 - DBOARD-754 : Adding redis authentication. - DBOARD-743 : Updated Poller Services caching. diff --git a/inventory_provider/db/ims_data.py b/inventory_provider/db/ims_data.py index 81cf11b7e430398537410e29ec27c2e085f871b6..f2ac1089dad1114d0a1d0d5c97754a85b076510b 100644 --- a/inventory_provider/db/ims_data.py +++ b/inventory_provider/db/ims_data.py @@ -67,6 +67,12 @@ NODE_LOCATION_SCHEMA = { } +SITE_LOCATION_SCHEMA = { + '$schema': 'https://json-schema.org/draft-07/schema#', + **_POP_LOCATION_SCHEMA_STRUCT +} + + def get_flexils_by_circuitid(ds: IMS): by_circuit = defaultdict(list) found_keys = set() @@ -437,6 +443,45 @@ def get_circuit_hierarchy(ds: IMS): } +@log_entry_and_exit +def get_site_locations(ds: IMS): + """ + return location info for all Sites + + yields dictionaries formatted as: + + .. as_json:: + inventory_provider.db.ims_data.SITE_LOCATION_SCHEMA + + :param ds: + :return: yields dicts as above + """ + site_nav_props = [ + ims.SITE_PROPERTIES['City'], + ims.SITE_PROPERTIES['SiteAliases'], + ims.SITE_PROPERTIES['Country'], + ims.SITE_PROPERTIES['Nodes'] + ] + sites = ds.get_all_entities('Site', site_nav_props, step_count=500) + for site in sites: + city = site['city'] + try: + abbreviation = site['sitealiases'][0]['aliasname'] + except IndexError: + abbreviation = '' # no alias - ignore silently + + has_geo_data = bool(site.get('longitude') and site.get('latitude')) + if abbreviation and has_geo_data: + yield site['name'], { + 'name': site['name'], + 'city': city['name'], + 'country': city['country']['name'], + 'abbreviation': abbreviation, + 'longitude': site['longitude'], + 'latitude': site['latitude'], + } + + @log_entry_and_exit def get_node_locations(ds: IMS): """ @@ -459,11 +504,11 @@ def get_node_locations(ds: IMS): sites = ds.get_all_entities('Site', site_nav_props, step_count=500) for site in sites: city = site['city'] - abbreviation = '' + try: abbreviation = site['sitealiases'][0]['aliasname'] except IndexError: - pass # no alias - ignore silently + abbreviation = '' # no alias - ignore silently for node in site['nodes']: if node['inventorystatusid'] in STATUSES_TO_IGNORE: diff --git a/inventory_provider/routes/neteng.py b/inventory_provider/routes/neteng.py index 37e32c7bdd39ea185d1bf6ee37acdf2c314fe09a..2b38808d394fceca730a31cff01a77a147dc2c63 100644 --- a/inventory_provider/routes/neteng.py +++ b/inventory_provider/routes/neteng.py @@ -103,9 +103,9 @@ def get_pop_names(): def _pops(): r = common.get_current_redis() - for k in r.scan_iter('ims:pop:*', count=1000): + for k in r.scan_iter('ims:site:*', count=1000): k = k.decode('utf-8') - m = re.match('^ims:pop:(.+)$', k) + m = re.match('^ims:site:(.+)$', k) yield m.group(1) return jsonify(sorted(list(_pops()))) @@ -131,7 +131,7 @@ def get_pop_location(abbreviation): r = common.get_current_redis() - value = r.get(f'ims:pop:{abbreviation}') + value = r.get(f'ims:site:{abbreviation}') if not value: return Response( response=f'no location information available for "{abbreviation}"', diff --git a/inventory_provider/tasks/worker.py b/inventory_provider/tasks/worker.py index cfc62657f91a34a485adf48931338166efc3c36c..f4f8b33940cc0a5fd6de3faf899f13ecece7e159 100644 --- a/inventory_provider/tasks/worker.py +++ b/inventory_provider/tasks/worker.py @@ -708,8 +708,7 @@ def ims_task(self, use_current=False): extracted_data = extract_ims_data() cache_extracted_ims_data(extracted_data) transformed_data = transform_ims_data(extracted_data) - transformed_data['locations'] = extracted_data['locations'] - transformed_data['lg_routers'] = extracted_data['lg_routers'] + persist_ims_data(transformed_data, use_current) except Exception as e: logger.error(e) @@ -739,6 +738,7 @@ def _extract_ims_data(ims_api_url, ims_username, ims_password): _ds().clear_dynamic_context_cache() locations = {} + site_locations = {} lg_routers = [] geant_nodes = [] customer_contacts = {} @@ -758,6 +758,11 @@ def _extract_ims_data(ims_api_url, ims_username, ims_password): nonlocal locations locations = {k: v for k, v in ims_data.get_node_locations(ds=_ds())} + @log_task_entry_and_exit + def _populate_site_locations(): + nonlocal site_locations + site_locations = {k: v for k, v in ims_data.get_site_locations(ds=_ds())} + @log_task_entry_and_exit def _populate_lg_routers(): nonlocal lg_routers @@ -803,6 +808,7 @@ def _extract_ims_data(ims_api_url, ims_username, ims_password): with concurrent.futures.ThreadPoolExecutor() as executor: futures = { executor.submit(_populate_locations): 'locations', + executor.submit(_populate_site_locations): 'site_locations', executor.submit(_populate_geant_nodes): 'geant_nodes', executor.submit(_populate_lg_routers): 'lg_routers', executor.submit(_populate_customer_contacts): 'customer_contacts', @@ -871,6 +877,7 @@ def _extract_ims_data(ims_api_url, ims_username, ims_password): return { 'locations': locations, + 'site_locations': site_locations, 'lg_routers': lg_routers, 'customer_contacts': customer_contacts, 'planned_work_contacts': planned_work_contacts, @@ -1173,13 +1180,17 @@ def transform_ims_data(data): 'services_by_type': services_by_type, 'node_pair_services': node_pair_services, 'sid_services': sid_services, - 'pop_nodes': pop_nodes + 'pop_nodes': pop_nodes, + 'locations': data['locations'], + 'site_locations': data['site_locations'], + 'lg_routers': data['lg_routers'], } def persist_ims_data(data, use_current=False): hierarchy = data['hierarchy'] locations = data['locations'] + site_locations = data['site_locations'] lg_routers = data['lg_routers'] interface_services = data['interface_services'] services_by_type = data['services_by_type'] @@ -1187,13 +1198,14 @@ def persist_ims_data(data, use_current=False): sid_services = data['sid_services'] pop_nodes = data['pop_nodes'] - def _get_pops(): + def _get_sites(): # de-dupe the sites (by abbreviation) - pops = { - equip['pop']['abbreviation']: equip['pop'] - for equip in locations.values() - if equip['pop']['abbreviation']} - return pops.values() + sites = { + site_location['abbreviation']: site_location + for site_location in site_locations.values() + } + + return sites.values() if use_current: r = get_current_redis(InventoryTask.config) @@ -1203,7 +1215,6 @@ def persist_ims_data(data, use_current=False): # only need to delete the individual keys if it's just an IMS update # rather than a complete update (the db will have been flushed) for key_pattern in [ - 'ims:pop:*', 'ims:location:*', 'ims:lg:*', 'ims:circuit_hierarchy:*', @@ -1223,8 +1234,9 @@ def persist_ims_data(data, use_current=False): rp = r.pipeline() for h, d in locations.items(): rp.set(f'ims:location:{h}', json.dumps([d])) - for pop in _get_pops(): - rp.set(f'ims:pop:{pop["abbreviation"]}', json.dumps(pop)) + for site in _get_sites(): + rp.set(f'ims:site:{site["abbreviation"]}', json.dumps(site)) + rp.execute() rp = r.pipeline() for router in lg_routers: diff --git a/setup.py b/setup.py index 808206ba3b3e211b0c81f61621e0ecfe0949b215..96cf18d4bc886bb179fc4850b5c46dcb7ab28415 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import setup, find_packages setup( name='inventory-provider', - version="0.105", + version="0.106", author='GEANT', author_email='swd@geant.org', description='Dashboard inventory provider', diff --git a/test/data/router-info.json b/test/data/router-info.json index b022aaeac9509d17e9e3a0fede0f46c6c1b9fe73..9240589f9a38297af9720fc2c44ceaad5328357d 100644 Binary files a/test/data/router-info.json and b/test/data/router-info.json differ diff --git a/test/data/update-test-db.py b/test/data/update-test-db.py index de58dc68b245531430d39aa4b9b177d29b334197..4cff6c37fcd3548442ebd81bd419b0a47477fb3c 100644 --- a/test/data/update-test-db.py +++ b/test/data/update-test-db.py @@ -21,7 +21,7 @@ def _ignore_key(key): def _redis_client_proc(key_queue, value_queue, hostname, db_index): - r = redis.StrictRedis(host=hostname, db=db_index) + r = redis.StrictRedis(host=hostname, db=db_index, password='bsgjdfjyot7JnAQJbMj6',) while True: key = key_queue.get() @@ -60,7 +60,7 @@ def docs(loaders, thread_count): t.start() threads.append({'thread': t, 'queue': q}) - r = redis.StrictRedis(host=loader['hostname'], db=loader['db-index']) + r = redis.StrictRedis(host=loader['hostname'], db=loader['db-index'], password='bsgjdfjyot7JnAQJbMj6') for pattern in loader['key-patterns']: logging.debug(pattern) diff --git a/test/test_ims_data.py b/test/test_ims_data.py index 1c89f5a9b18254a253c4897282dd378f59566f98..436be65d8d956a2d936e4b7be86e08422dba0e67 100644 --- a/test/test_ims_data.py +++ b/test/test_ims_data.py @@ -9,7 +9,7 @@ from inventory_provider.db.ims_data import lookup_lg_routers, \ get_node_locations, IMS_OPSDB_STATUS_MAP, \ get_port_id_services, get_port_details, \ get_circuit_hierarchy, get_ids_and_sids, NODE_LOCATION_SCHEMA, \ - get_flexils_by_circuitid + get_flexils_by_circuitid, SITE_LOCATION_SCHEMA, get_site_locations def _json_test_data(filename): @@ -337,6 +337,24 @@ def test_get_node_location(mocker): }) +def test_get_site_location(mocker): + ims = mocker.patch('inventory_provider.db.ims.IMS') + resp_data = _json_test_data('ims_nodes_data.json') + ims.return_value.get_all_entities.return_value = resp_data + + ds = inventory_provider.db.ims.IMS( + 'dummy_base', 'dummy_username', 'dummy_password') + res = list(get_site_locations(ds)) + for name, site in res: + assert isinstance(name, str) + jsonschema.validate(site, SITE_LOCATION_SCHEMA) + + assert len(res) == 2 + assert res[0] == ('LONDON 3 POWERGATE', + {'abbreviation': 'LON3', 'city': 'LONDON', 'country': 'UNITED KINGDOM', 'latitude': 51.5308142, + 'longitude': -0.257712, 'name': 'LONDON 3 POWERGATE'}) + + def test_get_circuit_ids_and_sids(mocker): ims = mocker.patch('inventory_provider.db.ims.IMS') ims.return_value.get_filtered_entities.return_value = \ diff --git a/test/test_neteng_routes.py b/test/test_neteng_routes.py index 78fe102b1dea739d3a22604963c5d0ed6d0ef605..6b52565c98352e483acd1994c04e94b9768d2dad 100644 --- a/test/test_neteng_routes.py +++ b/test/test_neteng_routes.py @@ -40,7 +40,7 @@ def test_get_pops(client, mocked_redis): @pytest.mark.parametrize('pop_name', [ - 'AMS', 'LON', 'LON2', 'ORB', 'ORBE' + 'AMS', 'LON', 'COR', 'ORB', 'ORBE', ]) def test_pop_location(client, mocked_redis, pop_name): rv = client.get( diff --git a/test/test_worker.py b/test/test_worker.py index b48432250be7e638eec052b010264fe6d96b1fad..37e2c7e998395b0f9983d150e76435f3368ea39d 100644 --- a/test/test_worker.py +++ b/test/test_worker.py @@ -8,7 +8,6 @@ from inventory_provider.tasks.worker import transform_ims_data, \ def test_extract_ims_data(mocker): - mocker.patch( 'inventory_provider.tasks.worker.InventoryTask.config' ) @@ -16,6 +15,16 @@ def test_extract_ims_data(mocker): 'inventory_provider.tasks.worker.ims_data.get_node_locations', return_value=[('loc_a', 'LOC A'), ('loc_b', 'LOC B')] ) + mocker.patch( + 'inventory_provider.tasks.worker.ims_data.get_site_locations', + return_value=[ + ('JEN-SPL', + { + 'abbreviation': 'JEN', 'city': 'MILAN', 'country': 'ITALY', 'latitude': 12.12, 'longitude': 12.45, + 'name': 'JEN-SPL' + }), + ] + ) mocker.patch( 'inventory_provider.tasks.worker.IMS.clear_dynamic_context_cache' ) @@ -99,6 +108,9 @@ def test_extract_ims_data(mocker): ) res = extract_ims_data() assert res['locations'] == {'loc_a': 'LOC A', 'loc_b': 'LOC B'} + assert res['site_locations'] == { + 'JEN-SPL': {'abbreviation': 'JEN', 'city': 'MILAN', 'country': 'ITALY', 'latitude': 12.12, 'longitude': 12.45, + 'name': 'JEN-SPL'}} assert res['lg_routers'] == ['lg router 1', 'lg router 2'] assert res['customer_contacts'] == {'123': 'CON A', '456': 'CON B'} assert res['planned_work_contacts'] == \ @@ -164,6 +176,16 @@ def test_transform_ims_data(): } } + site_locations = { + 'JEN-SPL': {'abbreviation': 'JEN', 'city': 'MILAN', 'country': 'ITALY', + 'latitude': 12.12323, 'longitude': 4.90123, + 'name': 'JEN-SPL'} + } + + lg_routers = [ + {"equipment name": "lg_eq1"}, {"equipment name": "lg_eq2"} + ] + additional_circuit_customer_ids = { "circ_id_1": [ {"id": "cu_1_1", "name": "customer_1"} @@ -379,6 +401,7 @@ def test_transform_ims_data(): } data = { "locations": locations, + "site_locations": site_locations, "customer_contacts": customer_contacts, "planned_work_contacts": planned_work_contacts, "circuit_ids_to_monitor": ["sub_circuit_2"], @@ -389,7 +412,8 @@ def test_transform_ims_data(): "circuit_ids_sids": circuit_ids_and_sids, "geant_nodes": ["eq_b"], "flexils_data": flexils_data, - "customers": customers + "customers": customers, + "lg_routers": lg_routers, } orig_port_id_services_len = len(port_id_services.keys()) res = transform_ims_data(data) @@ -488,6 +512,9 @@ def test_persist_ims_data(mocker, data_config, mocked_redis): 'pop': {'name': "LOC B", 'abbreviation': 'bbb'} }, }, + "site_locations": { + 'JEN-SPL': {'abbreviation': 'JEN', 'city': 'MILAN', 'country': 'ITALY', 'latitude': 12.1, 'longitude': -1.2, + 'name': 'JEN-SPL'}}, "lg_routers": [ {"equipment name": "lg_eq1"}, {"equipment name": "lg_eq2"} ],