diff --git a/README.md b/README.md
index fe15468ac5ab845a066b938bbc85a44aacd0523c..4c0efb8a76365491a6facab3c99902d7dd64f44a 100644
--- a/README.md
+++ b/README.md
@@ -116,11 +116,13 @@ configuration are beyond the scope of this document.
 
 The following resources can be requested from the webservice.
 
-### synchronous resources
+### resources
+
+Any non-empty responses are JSON formatted messages.
 
 * `/data/version`
 
-  The response will be a JSON message formatted object
+  The response will be an object
   containing the module and protocol versions of the
   running server and will be formatted as follows:
 
@@ -143,3 +145,20 @@ The following resources can be requested from the webservice.
     }
   ```
 
+* /data/routers
+
+  The response will be a list of router hostnames
+  for which information is available and will be
+  formatted as follows:
+
+  ```json
+  {
+      "$schema": "http://json-schema.org/draft-07/schema#",
+      "type": "array",
+      "items": {"type": "string"}
+  }
+  ```
+
+* /jobs/update
+
+  This resource updates the inventory network data.
\ No newline at end of file
diff --git a/inventory_provider/__init__.py b/inventory_provider/__init__.py
index 2918d68e2776b9ce8c3de4f05f80c839742755b2..3eb4592480884d54a4bde47e961fea4ec900c2e3 100644
--- a/inventory_provider/__init__.py
+++ b/inventory_provider/__init__.py
@@ -20,6 +20,9 @@ def create_app():
     from inventory_provider import data_routes
     app.register_blueprint(data_routes.routes, url_prefix='/data')
 
+    from inventory_provider import job_routes
+    app.register_blueprint(job_routes.routes, url_prefix='/jobs')
+
     if "SETTINGS_FILENAME" not in os.environ:
         assert False, \
             "environment variable SETTINGS_FILENAME' must be defined"
@@ -36,7 +39,7 @@ def create_app():
     from inventory_provider import config
     with open(app.config["INVENTORY_PROVIDER_CONFIG_FILENAME"]) as f:
         # test the config file can be loaded
-        config.load(f)
+        app.config["INVENTORY_PROVIDER_CONFIG"] = config.load(f)
 
     logging.debug(app.config)
 
diff --git a/inventory_provider/data_routes.py b/inventory_provider/data_routes.py
index 3c3238edbf12fd71a99cd30bd89b9616df514f96..25518cdccac8e0cb7f7afe9506ba6f93668fde79 100644
--- a/inventory_provider/data_routes.py
+++ b/inventory_provider/data_routes.py
@@ -1,10 +1,10 @@
 import functools
 import json
 
-from flask import Blueprint, request, Response
-# render_template, url_for
+from flask import Blueprint, request, Response, current_app
+import redis
 
-routes = Blueprint("python-utils-ui-routes", __name__)
+routes = Blueprint("inventory-data-query-routes", __name__)
 
 VERSION = {
     "api": "0.1",
@@ -38,3 +38,15 @@ def version():
         json.dumps(VERSION),
         mimetype="application/json"
     )
+
+
+@routes.route("/routers", methods=['GET', 'POST'])
+@require_accepts_json
+def routers():
+    redis_config = current_app.config["INVENTORY_PROVIDER_CONFIG"]["redis"]
+    r = redis.StrictRedis(
+        host=redis_config["hostname"],
+        port=redis_config["port"])
+    return Response(
+        json.dumps(list(r.keys("*"))),
+        mimetype="application/json")
diff --git a/inventory_provider/job_routes.py b/inventory_provider/job_routes.py
new file mode 100644
index 0000000000000000000000000000000000000000..41f8b7a0b466574cad024aacae4ad007ed9e4bf5
--- /dev/null
+++ b/inventory_provider/job_routes.py
@@ -0,0 +1,12 @@
+from flask import Blueprint, Response, current_app
+
+from inventory_provider import router_details
+
+routes = Blueprint("inventory-data-job-routes", __name__)
+
+
+@routes.route("/update", methods=['GET', 'POST'])
+def update():
+    router_details.update_network_details(
+        current_app.config["INVENTORY_PROVIDER_CONFIG"])
+    return Response("OK")
diff --git a/inventory_provider/router_details.py b/inventory_provider/router_details.py
new file mode 100644
index 0000000000000000000000000000000000000000..fe38136e2755020167a0b7f104b781bb8332e263
--- /dev/null
+++ b/inventory_provider/router_details.py
@@ -0,0 +1,113 @@
+import json
+import logging
+from multiprocessing import Process, Queue
+
+import redis
+
+from inventory_provider import constants
+from inventory_provider import snmp
+from inventory_provider import juniper
+
+
+def get_router_interfaces_q(router, params, q):
+    threading_logger = logging.getLogger(constants.THREADING_LOGGER_NAME)
+    threading_logger.debug("[ENTER>>] get_router_interfaces_q: %r" % router)
+    q.put(list(snmp.get_router_interfaces(router, params)))
+    threading_logger.debug("[<<EXIT]  get_router_interfaces_q: %r" % router)
+
+
+def ssh_exec_commands_q(hostname, ssh_params, commands, q):
+    threading_logger = logging.getLogger(constants.THREADING_LOGGER_NAME)
+    threading_logger.debug("[ENTER>>] exec_router_commands_q: %r" % hostname)
+    q.put(list(juniper.ssh_exec_commands(hostname, ssh_params, commands)))
+    threading_logger.debug("[<<EXIT] exec_router_commands_q: %r" % hostname)
+
+
+def get_router_details(router, params):
+
+    threading_logger = logging.getLogger(constants.THREADING_LOGGER_NAME)
+
+    threading_logger.debug("[ENTER>>]get_router_details: %r" % router)
+
+    commands = list(juniper.shell_commands())
+
+    snmpifc_proc_queue = Queue()
+    snmpifc_proc = Process(
+        target=get_router_interfaces_q,
+        args=(router, params, snmpifc_proc_queue))
+    snmpifc_proc.start()
+
+    commands_proc_queue = Queue()
+    commands_proc = Process(
+        target=ssh_exec_commands_q,
+        args=(
+            router["hostname"],
+            params["ssh"],
+            [c["command"] for c in commands],
+            commands_proc_queue))
+    commands_proc.start()
+
+    threading_logger.debug("waiting for commands result: %r" % router)
+    command_output = commands_proc_queue.get()
+    assert len(command_output) == len(commands)
+
+    r = redis.StrictRedis(
+        host=params["redis"]["hostname"],
+        port=params["redis"]["port"])
+    for c, o in zip(commands, command_output):
+        if c["key"]:
+            r.hset(
+                name=router["hostname"],
+                key=c["key"],
+                value=json.dumps(c["parser"](o)))
+    commands_proc.join()
+    threading_logger.debug("... got commands result & joined: %r" % router)
+
+    threading_logger.debug("waiting for snmp ifc results: %r" % router)
+    r.hset(
+        name=router["hostname"],
+        key="snmp-interfaces",
+        value=json.dumps(snmpifc_proc_queue.get()))
+    snmpifc_proc.join()
+    threading_logger.debug("... got snmp ifc result & joined: %r" % router)
+
+    threading_logger.debug("[<<EXIT]get_router_details: %r" % router)
+
+
+def update_network_details(params):
+
+    threading_logger = logging.getLogger(constants.THREADING_LOGGER_NAME)
+
+    processes = []
+    for r in params["routers"]:
+        p = Process(target=get_router_details, args=(r, params))
+        p.start()
+        processes.append({"router": r, "process": p})
+
+    result = {}
+    for p in processes:
+        threading_logger.debug(
+            "waiting for get_router_details result: %r" % p["router"])
+        p["process"].join()
+        threading_logger.debug(
+            "got result and joined get_router_details proc: %r" % p["router"])
+
+    return result
+
+
+def load_network_details(redis_params):
+
+    r = redis.StrictRedis(
+        host=redis_params["hostname"],
+        port=redis_params["port"])
+
+    result = {}
+    for hostname in r.keys():
+        host = {}
+        for key in r.hkeys(name=hostname):
+            host[key.decode("utf-8")] = json.loads(
+                r.hget(hostname, key).decode("utf-8")
+            )
+        result[hostname.decode("utf-8")] = host
+
+    return result
diff --git a/inventory_provider/router_interfaces.py b/inventory_provider/router_interfaces.py
index 95a09be5492c59710b33c6377bf50a951dd26839..431ed64154bcad217a76d8e6724e3b3e5aefb29d 100644
--- a/inventory_provider/router_interfaces.py
+++ b/inventory_provider/router_interfaces.py
@@ -1,102 +1,13 @@
 import json
 import logging
-from multiprocessing import Process, Queue
 
 import click
-import redis
 
 from inventory_provider import constants
-from inventory_provider import snmp
-from inventory_provider import juniper
+from inventory_provider import router_details
 from inventory_provider import config
 
 
-def get_router_interfaces_q(router, params, q):
-    threading_logger = logging.getLogger(constants.THREADING_LOGGER_NAME)
-    threading_logger.debug("[ENTER>>] get_router_interfaces_q: %r" % router)
-    q.put(list(snmp.get_router_interfaces(router, params)))
-    threading_logger.debug("[<<EXIT]  get_router_interfaces_q: %r" % router)
-
-
-def ssh_exec_commands_q(hostname, ssh_params, commands, q):
-    threading_logger = logging.getLogger(constants.THREADING_LOGGER_NAME)
-    threading_logger.debug("[ENTER>>] exec_router_commands_q: %r" % hostname)
-    q.put(list(juniper.ssh_exec_commands(hostname, ssh_params, commands)))
-    threading_logger.debug("[<<EXIT] exec_router_commands_q: %r" % hostname)
-
-
-def get_router_details(router, params):
-
-    threading_logger = logging.getLogger(constants.THREADING_LOGGER_NAME)
-
-    threading_logger.debug("[ENTER>>]get_router_details: %r" % router)
-
-    commands = list(juniper.shell_commands())
-
-    snmpifc_proc_queue = Queue()
-    snmpifc_proc = Process(
-        target=get_router_interfaces_q,
-        args=(router, params, snmpifc_proc_queue))
-    snmpifc_proc.start()
-
-    commands_proc_queue = Queue()
-    commands_proc = Process(
-        target=ssh_exec_commands_q,
-        args=(
-            router["hostname"],
-            params["ssh"],
-            [c["command"] for c in commands],
-            commands_proc_queue))
-    commands_proc.start()
-
-    threading_logger.debug("waiting for commands result: %r" % router)
-    command_output = commands_proc_queue.get()
-    assert len(command_output) == len(commands)
-
-    r = redis.StrictRedis(
-        host=params["redis"]["hostname"],
-        port=params["redis"]["port"])
-    for c, o in zip(commands, command_output):
-        if c["key"]:
-            r.hset(
-                name=router["hostname"],
-                key=c["key"],
-                value=json.dumps(c["parser"](o)))
-    commands_proc.join()
-    threading_logger.debug("... got commands result & joined: %r" % router)
-
-    threading_logger.debug("waiting for snmp ifc results: %r" % router)
-    r.hset(
-        name=router["hostname"],
-        key="snmp-interfaces",
-        value=json.dumps(snmpifc_proc_queue.get()))
-    snmpifc_proc.join()
-    threading_logger.debug("... got snmp ifc result & joined: %r" % router)
-
-    threading_logger.debug("[<<EXIT]get_router_details: %r" % router)
-
-
-def load_network_details(params):
-
-    threading_logger = logging.getLogger(constants.THREADING_LOGGER_NAME)
-
-    processes = []
-    for r in params["routers"]:
-        p = Process(target=get_router_details, args=(r, params))
-        p.start()
-        processes.append({"router": r, "process": p})
-
-    result = {}
-    for p in processes:
-        threading_logger.debug(
-            "waiting for get_router_details result: %r" % p["router"])
-        p["process"].join()
-        threading_logger.debug(
-            "got result and joined get_router_details proc: %r" % p["router"])
-
-    return result
-
-
 def _validate_config(ctx, param, value):
     return config.load(value)
 
@@ -110,20 +21,8 @@ def _validate_config(ctx, param, value):
     default=open("config.json"),
     callback=_validate_config)
 def cli(params):
-    load_network_details(params)
-
-    r = redis.StrictRedis(
-        host=params["redis"]["hostname"],
-        port=params["redis"]["port"])
-
-    result = {}
-    for hostname in r.keys():
-        host = {}
-        for key in r.hkeys(name=hostname):
-            host[key.decode("utf-8")] = json.loads(
-                r.hget(hostname, key).decode("utf-8")
-            )
-        result[hostname.decode("utf-8")] = host
+    router_details.update_network_details(params)
+    result = router_details.load_network_details(params["redis"])
 
     filename = "/tmp/router-info.json"
     logging.debug("writing output to: " + filename)
diff --git a/requirements.txt b/requirements.txt
index 9d2440c74c133be6ad934a9d78f3bb2df2bd1bfe..5ae83dfda7753c6fc1080dade3f4fe7f4696835a 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -7,3 +7,4 @@ flask
 redis
 
 pytest
+pytest-mock
diff --git a/test/test_data_routes.py b/test/test_data_routes.py
index 811adb0df69bac7f190d05c5e42a8a8860a8bf97..c034ec9fa3863db2a724a8e5b1c25a1106e516c8 100644
--- a/test/test_data_routes.py
+++ b/test/test_data_routes.py
@@ -135,8 +135,6 @@ def app_config():
 @pytest.fixture
 def client(app_config):
     os.environ["SETTINGS_FILENAME"] = app_config
-    # with release_webservice.create_app().test_client() as c:
-    #         yield c
     with inventory_provider.create_app().test_client() as c:
         yield c
 
@@ -166,3 +164,48 @@ def test_version_request(client):
     jsonschema.validate(
         json.loads(rv.data.decode("utf-8")),
         version_schema)
+
+
+TEST_DATA_FILENAME = os.path.join(
+    os.path.dirname(__file__),
+    "router-info.json")
+
+
+class MockedRedis(object):
+
+    db = None
+
+    def __init__(self, *args, **kwargs):
+        if MockedRedis.db is None:
+            with open(TEST_DATA_FILENAME) as f:
+                MockedRedis.db = json.loads(f.read())
+
+    def set(self, key, value):
+        MockedRedis.db[key] = value
+
+    def keys(self, *args, **kwargs):
+        return MockedRedis.db.keys()
+
+
+def test_routers_list(mocker, client):
+
+    routers_list_schema = {
+        "$schema": "http://json-schema.org/draft-07/schema#",
+        "type": "array",
+        "items": {"type": "string"}
+    }
+
+    mocker.patch(
+        'inventory_provider.router_details.redis.StrictRedis',
+        MockedRedis)
+    mocker.patch(
+        'inventory_provider.data_routes.redis.StrictRedis',
+        MockedRedis)
+    rv = client.post(
+        "data/routers",
+        headers=DEFAULT_REQUEST_HEADERS)
+    assert rv.status_code == 200
+
+    response = json.loads(rv.data.decode("utf-8"))
+    jsonschema.validate(response, routers_list_schema)
+    assert response  # shouldn't be empty