Skip to content
Snippets Groups Projects
Commit 087068df authored by Release Webservice's avatar Release Webservice
Browse files

Finished release 0.71.

parents 850b9254 deb4afae
No related branches found
Tags 0.71
No related merge requests found
...@@ -2,6 +2,9 @@ ...@@ -2,6 +2,9 @@
All notable changes to this project will be documented in this file. All notable changes to this project will be documented in this file.
## [0.71] - 2021-08-20
- DBOARD3-433: fix missing classification data during Inventory refresh
## [0.70] - 2021-08-20 ## [0.70] - 2021-08-20
- DBOARD3-459: fix performance issue with /poller/interfaces - DBOARD3-459: fix performance issue with /poller/interfaces
- POL1-483: add dashboard mappings to /poller/interfaces response - POL1-483: add dashboard mappings to /poller/interfaces response
......
...@@ -443,6 +443,7 @@ def reload_router_config(self, hostname): ...@@ -443,6 +443,7 @@ def reload_router_config(self, hostname):
self.log_info(f'updated configuration for {hostname}') self.log_info(f'updated configuration for {hostname}')
# updated with transaction
def _erase_next_db(config): def _erase_next_db(config):
""" """
flush next db, but first save latch and then restore afterwards flush next db, but first save latch and then restore afterwards
...@@ -453,8 +454,23 @@ def _erase_next_db(config): ...@@ -453,8 +454,23 @@ def _erase_next_db(config):
""" """
r = get_next_redis(config) r = get_next_redis(config)
saved_latch = get_latch(r) saved_latch = get_latch(r)
r.flushdb()
if saved_latch: if saved_latch:
# execute as transaction to ensure that latch is always available in
# db that is being flushed
rp = r.pipeline()
rp.multi()
rp.flushdb()
set_single_latch(
rp,
saved_latch['this'],
saved_latch['current'],
saved_latch['next'],
saved_latch.get('timestamp', 0)
)
rp.execute()
# ensure latch is consistent in all dbs
set_latch( set_latch(
config, config,
new_current=saved_latch['current'], new_current=saved_latch['current'],
...@@ -1167,7 +1183,7 @@ def update_entry_point(self): ...@@ -1167,7 +1183,7 @@ def update_entry_point(self):
) )
lab_routers = InventoryTask.config.get('lab-routers', []) lab_routers = InventoryTask.config.get('lab-routers', [])
_erase_next_db_chorded(InventoryTask.config) _erase_next_db(InventoryTask.config)
update_latch_status(InventoryTask.config, pending=True) update_latch_status(InventoryTask.config, pending=True)
tasks = chord( tasks = chord(
...@@ -1238,41 +1254,6 @@ def retrieve_and_persist_neteng_managed_device_list( ...@@ -1238,41 +1254,6 @@ def retrieve_and_persist_neteng_managed_device_list(
return netdash_equipment return netdash_equipment
# updated with transaction
def _erase_next_db_chorded(config):
"""
flush next db, but first save latch and then restore afterwards
TODO: handle the no latch scenario nicely
:param config:
:return:
"""
r = get_next_redis(config)
saved_latch = get_latch(r)
if saved_latch:
# execute as transaction to ensure that latch is always available in
# db that is being flushed
rp = r.pipeline()
rp.multi()
rp.flushdb()
set_single_latch(
rp,
saved_latch['this'],
saved_latch['current'],
saved_latch['next'],
saved_latch.get('timestamp', 0)
)
rp.execute()
# ensure latch is consistent in all dbs
set_latch(
config,
new_current=saved_latch['current'],
new_next=saved_latch['next'],
timestamp=saved_latch.get('timestamp', 0))
# updated # updated
@app.task(base=InventoryTask, bind=True, name='reload_lab_router_config') @app.task(base=InventoryTask, bind=True, name='reload_lab_router_config')
@log_task_entry_and_exit @log_task_entry_and_exit
...@@ -1837,7 +1818,7 @@ def final_task(self): ...@@ -1837,7 +1818,7 @@ def final_task(self):
def populate_poller_interfaces_cache(warning_callback=lambda s: None): def populate_poller_interfaces_cache(warning_callback=lambda s: None):
no_lab_cache_key = 'classifier-cache:poller-interfaces:no-lab' no_lab_cache_key = 'classifier-cache:poller-interfaces:all:no-lab'
all_cache_key = 'classifier-cache:poller-interfaces:all' all_cache_key = 'classifier-cache:poller-interfaces:all'
non_lab_populated_interfaces = None non_lab_populated_interfaces = None
all_populated_interfaces = None all_populated_interfaces = None
......
...@@ -2,7 +2,7 @@ from setuptools import setup, find_packages ...@@ -2,7 +2,7 @@ from setuptools import setup, find_packages
setup( setup(
name='inventory-provider', name='inventory-provider',
version="0.70", version="0.71",
author='GEANT', author='GEANT',
author_email='swd@geant.org', author_email='swd@geant.org',
description='Dashboard inventory provider', description='Dashboard inventory provider',
......
...@@ -499,9 +499,10 @@ def test_populate_poller_interfaces_cache( ...@@ -499,9 +499,10 @@ def test_populate_poller_interfaces_cache(
return_value=r) return_value=r)
populate_poller_interfaces_cache() populate_poller_interfaces_cache()
assert r.exists("classifier-cache:poller-interfaces:no-lab") assert r.exists("classifier-cache:poller-interfaces:all:no-lab")
assert r.exists("classifier-cache:poller-interfaces:all") assert r.exists("classifier-cache:poller-interfaces:all")
no_lab = r.get("classifier-cache:poller-interfaces:no-lab").decode("utf-8") no_lab = \
r.get("classifier-cache:poller-interfaces:all:no-lab").decode("utf-8")
all = r.get("classifier-cache:poller-interfaces:all").decode("utf-8") all = r.get("classifier-cache:poller-interfaces:all").decode("utf-8")
assert json.loads(no_lab) == no_lab_res assert json.loads(no_lab) == no_lab_res
all_res = no_lab_res + lab_res all_res = no_lab_res + lab_res
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment