Skip to content
Snippets Groups Projects
Commit df58eef6 authored by Robert Latta's avatar Robert Latta
Browse files

switched update to chorded and updated relevant tests

parent f70979f4
No related branches found
No related tags found
No related merge requests found
...@@ -17,12 +17,6 @@ These endpoints are intended for use by Dashboard V3. ...@@ -17,12 +17,6 @@ These endpoints are intended for use by Dashboard V3.
.. autofunction:: inventory_provider.routes.classifier.get_juniper_link_info .. autofunction:: inventory_provider.routes.classifier.get_juniper_link_info
/classifier/infinera-lambda-info
--------------------------------
.. autofunction:: inventory_provider.routes.classifier.get_infinera_lambda_info
/classifier/infinera-fiberlink-info /classifier/infinera-fiberlink-info
------------------------------------ ------------------------------------
......
...@@ -108,13 +108,12 @@ def after_request(resp): ...@@ -108,13 +108,12 @@ def after_request(resp):
return common.after_request(resp) return common.after_request(resp)
@routes.route("/update", methods=['GET', 'POST']) @routes.route("update", methods=['GET', 'POST'])
@common.require_accepts_json
def update(): def update():
""" """
Handler for `/jobs/update`. Handler for `/jobs/update`.
This resource updates the inventory network data for juniper devices. This resource updates the inventory network data.
The function completes asynchronously and a list of outstanding The function completes asynchronously and a list of outstanding
task id's is returned so the caller can task id's is returned so the caller can
use `/jobs/check-task-status` to determine when all jobs use `/jobs/check-task-status` to determine when all jobs
...@@ -135,17 +134,15 @@ def update(): ...@@ -135,17 +134,15 @@ def update():
response='an update is already in progress', response='an update is already in progress',
status=503, status=503,
mimetype="text/html") mimetype="text/html")
update_task_id = worker.update_entry_point.delay().get()
phase2_task_id = worker.launch_refresh_cache_all(config) r.set('classifier-cache:update-task-id', update_task_id.encode('utf-8'))
return jsonify({'task id': update_task_id})
r.set('classifier-cache:update-task-id', phase2_task_id.encode('utf-8'))
return jsonify({'task id': phase2_task_id})
@routes.route("reload-router-config/<equipment_name>", methods=['GET', 'POST']) @routes.route("reload-router-config/<equipment_name>", methods=['GET', 'POST'])
@common.require_accepts_json @common.require_accepts_json
def reload_router_config(equipment_name): def reload_router_config(equipment_name):
result = worker.reload_router_config.delay(equipment_name) result = worker.reload_router_config_chorded.delay(equipment_name)
return jsonify({'task id': result.id}) return jsonify({'task id': result.id})
......
...@@ -18,12 +18,6 @@ routes = Blueprint("inventory-data-testing-support-routes", __name__) ...@@ -18,12 +18,6 @@ routes = Blueprint("inventory-data-testing-support-routes", __name__)
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@routes.route("chord-update", methods=['GET', 'POST'])
def chord_update():
r = worker.update_entry_point.delay().get()
return jsonify(r)
@routes.route("flushdb", methods=['GET', 'POST']) @routes.route("flushdb", methods=['GET', 'POST'])
def flushdb(): def flushdb():
common.get_current_redis().flushdb() common.get_current_redis().flushdb()
......
This diff is collapsed.
...@@ -19,7 +19,7 @@ def backend_db(): ...@@ -19,7 +19,7 @@ def backend_db():
def test_netconf_refresh_config(mocked_worker_module, router): def test_netconf_refresh_config(mocked_worker_module, router):
del backend_db()['netconf:' + router] del backend_db()['netconf:' + router]
worker.netconf_refresh_config(router) worker.reload_router_config_chorded(router)
assert backend_db()['netconf:' + router] assert backend_db()['netconf:' + router]
...@@ -51,7 +51,7 @@ def test_snmp_refresh_peerings(mocked_worker_module, router): ...@@ -51,7 +51,7 @@ def test_snmp_refresh_peerings(mocked_worker_module, router):
for k in list(_ifc_keys()): for k in list(_ifc_keys()):
del backend_db()[k] del backend_db()[k]
worker.snmp_refresh_peerings(router, 'fake-community', []) worker.snmp_refresh_peerings_chorded(router, 'fake-community', [])
assert list(_ifc_keys()) assert list(_ifc_keys())
...@@ -80,15 +80,15 @@ def test_reload_router_config(mocked_worker_module, router, mocker): ...@@ -80,15 +80,15 @@ def test_reload_router_config(mocked_worker_module, router, mocker):
key = 'netconf:' + args[0] key = 'netconf:' + args[0]
backend_db()[key] = saved_data[key] backend_db()[key] = saved_data[key]
mocker.patch( mocker.patch(
'inventory_provider.tasks.worker.netconf_refresh_config.apply', 'inventory_provider.tasks.worker.reload_router_config_chorded.apply',
_mocked_netconf_refresh_config_apply) _mocked_netconf_refresh_config_apply)
def _mocked_snmp_refresh_peerings_apply(args): def _mocked_reload_router_config_chorded_apply(args):
assert len(args) == 3 assert len(args) == 3
backend_db().update(saved_peerings) backend_db().update(saved_peerings)
mocker.patch( mocker.patch(
'inventory_provider.tasks.worker.snmp_refresh_peerings.apply', 'inventory_provider.tasks.worker.reload_router_config_chorded.apply',
_mocked_snmp_refresh_peerings_apply) _mocked_reload_router_config_chorded_apply)
def _mocked_snmp_refresh_interfaces_apply(args): def _mocked_snmp_refresh_interfaces_apply(args):
assert len(args) == 3 assert len(args) == 3
...@@ -105,6 +105,6 @@ def test_reload_router_config(mocked_worker_module, router, mocker): ...@@ -105,6 +105,6 @@ def test_reload_router_config(mocked_worker_module, router, mocker):
'inventory_provider.tasks.worker.InventoryTask.update_state', 'inventory_provider.tasks.worker.InventoryTask.update_state',
_mocked_update_status) _mocked_update_status)
worker.reload_router_config(router) worker.reload_router_config_chorded(router)
assert 'netconf:' + router in backend_db() assert 'netconf:' + router in backend_db()
assert 'snmp-interfaces:' + router in backend_db() assert 'snmp-interfaces:' + router in backend_db()
from inventory_provider.tasks import worker
from inventory_provider.tasks.common import _get_redis
def backend_db():
return _get_redis({
'redis': {
'hostname': None,
'port': None
},
'redis-databases': [0, 7]
}).db
def test_clear_classifier_cache(
router,
mocked_redis,
data_config,
classifier_cache_test_entries):
worker.InventoryTask.config = data_config
backend_db().update(classifier_cache_test_entries)
worker.clear_cached_classifier_responses(router)
for k in backend_db():
assert not k.startswith('classifier-cache:%s:' % router)
...@@ -24,9 +24,9 @@ def backend_db(): ...@@ -24,9 +24,9 @@ def backend_db():
def test_job_update_all(client, mocker): def test_job_update_all(client, mocker):
expected_task_id = 'xyz@123#456' expected_task_id = 'xyz@123#456'
launch_refresh_cache_all = mocker.patch( update_entry_point = mocker.patch(
'inventory_provider.tasks.worker.launch_refresh_cache_all') 'inventory_provider.tasks.worker.update_entry_point.delay')
launch_refresh_cache_all.return_value = expected_task_id update_entry_point.return_value.get.return_value = expected_task_id
rv = client.post( rv = client.post(
'jobs/update', 'jobs/update',
...@@ -43,9 +43,9 @@ def test_job_update_all(client, mocker): ...@@ -43,9 +43,9 @@ def test_job_update_all(client, mocker):
def test_job_update_force_pending(client, mocker): def test_job_update_force_pending(client, mocker):
expected_task_id = 'asf#asdf%111' expected_task_id = 'asf#asdf%111'
launch_refresh_cache_all = mocker.patch( update_entry_point = mocker.patch(
'inventory_provider.tasks.worker.launch_refresh_cache_all') 'inventory_provider.tasks.worker.update_entry_point.delay')
launch_refresh_cache_all.return_value = expected_task_id update_entry_point.return_value.get.return_value = expected_task_id
mocked_get_latch = mocker.patch( mocked_get_latch = mocker.patch(
'inventory_provider.routes.jobs.get_latch') 'inventory_provider.routes.jobs.get_latch')
...@@ -64,7 +64,7 @@ def test_job_update_pending_force_false(client, mocker): ...@@ -64,7 +64,7 @@ def test_job_update_pending_force_false(client, mocker):
def _assert_if_called(*args, **kwargs): def _assert_if_called(*args, **kwargs):
assert False assert False
mocker.patch( mocker.patch(
'inventory_provider.tasks.worker.launch_refresh_cache_all', 'inventory_provider.tasks.worker.update_entry_point',
_assert_if_called) _assert_if_called)
mocked_get_latch = mocker.patch( mocked_get_latch = mocker.patch(
...@@ -81,7 +81,7 @@ def test_job_update_pending(client, mocker): ...@@ -81,7 +81,7 @@ def test_job_update_pending(client, mocker):
def _assert_if_called(*args, **kwargs): def _assert_if_called(*args, **kwargs):
assert False assert False
mocker.patch( mocker.patch(
'inventory_provider.tasks.worker.launch_refresh_cache_all', 'inventory_provider.tasks.worker.update_entry_point',
_assert_if_called) _assert_if_called)
mocked_get_latch = mocker.patch( mocked_get_latch = mocker.patch(
...@@ -104,7 +104,7 @@ class MockedAsyncResult(object): ...@@ -104,7 +104,7 @@ class MockedAsyncResult(object):
def test_reload_router_config(client, mocker): def test_reload_router_config(client, mocker):
delay_result = mocker.patch( delay_result = mocker.patch(
'inventory_provider.tasks.worker.reload_router_config.delay') 'inventory_provider.tasks.worker.reload_router_config_chorded.delay')
delay_result.return_value = MockedAsyncResult('bogus task id') delay_result.return_value = MockedAsyncResult('bogus task id')
rv = client.post( rv = client.post(
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment