diff --git a/inventory_provider/routes/jobs.py b/inventory_provider/routes/jobs.py
index 9a8080c87a48174a27fcc50935b585ed738961a7..fc07171113483ca7af59850b8af23ec98dcf1f1b 100644
--- a/inventory_provider/routes/jobs.py
+++ b/inventory_provider/routes/jobs.py
@@ -23,9 +23,10 @@ def update():
         force = False
 
     config = current_app.config['INVENTORY_PROVIDER_CONFIG']
+    r = get_current_redis(config)
 
     if not force:
-        latch = get_latch(get_current_redis(config))
+        latch = get_latch(r)
         if latch and latch['pending']:
             return Response(
                 response='an update is already in progress',
@@ -34,7 +35,6 @@ def update():
 
     phase2_task_id = worker.launch_refresh_cache_all(config)
 
-    r = common.get_current_redis()
     r.set('classifier-cache:update-task-id', phase2_task_id.encode('utf-8'))
     return jsonify({'task id': phase2_task_id})
 
diff --git a/inventory_provider/static/update.html b/inventory_provider/static/update.html
index cb2ce23af994576ac8e4fff946eb006a958f393d..0d615b21b475c422331786b2957594cb78129725 100644
--- a/inventory_provider/static/update.html
+++ b/inventory_provider/static/update.html
@@ -29,6 +29,25 @@
           {{ update_request_status }}
         </div>
 
+        <span class="grid-item" ng-show="latch_error||latch_pending">
+          <table class="table table-striped">
+            <tr>
+              <th colspan="4" scope="col">update tasks</th>
+            <tr>
+              <th scope="col">name</th>
+              <th scope="col">status</th>
+              <th scope="col">success</th>
+              <th scope="col">message</th>
+            </tr>
+            <tr ng-repeat="t in tasks">
+              <td>{{ t.name }}</td>
+              <td>{{ t.status }}</td>
+              <td>{{ t.success }}</td>
+              <td>{{ t.message }}</td>
+            </tr>
+          </table>
+        </span>
+
       </div>
     </div>
   </body>
diff --git a/inventory_provider/static/update.js b/inventory_provider/static/update.js
index d2b729b9158968ee894fb462390680afaa828e71..64e6807348e3d7fa065be42a01deb5a5c1334912 100644
--- a/inventory_provider/static/update.js
+++ b/inventory_provider/static/update.js
@@ -9,6 +9,8 @@ myApp.controller('update', function($scope, $http, $timeout) {
     $scope.update_request_status = "";
     $scope.update_request_error = false;
 
+    $scope.tasks = [];
+
     $scope.check_status = function() {
 
         $http({
@@ -25,6 +27,9 @@ myApp.controller('update', function($scope, $http, $timeout) {
                     $scope.update_request_status = "";
                 }
                 $timeout($scope.check_status, 5000);
+                if ($scope.latch_pending || $scope.latch_error) {
+                    $scope.refresh_update_status();
+                }
             },
             /* error response */
             function(rsp) {
@@ -32,11 +37,39 @@ myApp.controller('update', function($scope, $http, $timeout) {
                 $scope.latch_info = "communication error";
                 $scope.latch_pending = false;
                 $scope.latch_error = true;
-                $timeout($scope.check_status(), 5000);
+                $timeout($scope.check_status, 5000);
             }
         );
     }
 
+    $scope.refresh_update_status = function() {
+
+        $http({
+            method: 'GET',
+            // url: window.location.origin + "/jobs/check-task-status/9d1cbcd2-c377-4b7a-b969-04ce17f03f20"
+            url: window.location.origin + "/jobs/check-update-status"
+        }).then(
+            /* ok response */
+            function(rsp) {
+                console.log('got update status rsp: ' + JSON.stringify(rsp.data).substring(0,30));
+                $scope.tasks = rsp.data.map(t => ({
+                    id: t.id,
+                    parent: t.parent,
+                    status: t.status,
+                    success: t.ready ? (t.success ? "OK" : "NO") : "-",
+                    message: (t.result && t.result.message) ? t.result.message.substring(0,100) : "",
+                    name: t.result ? t.result.task : "",
+                }));
+            },
+            /* error response */
+            function(rsp) {
+                // assume this is 404 ...
+                $scope.tasks = [];
+            }
+        );
+
+    }
+
     $scope.launch_update = function() {
         $scope.update_request_status = "sending update request";
         $scope.update_request_error = false;
diff --git a/inventory_provider/tasks/worker.py b/inventory_provider/tasks/worker.py
index bab1b31db43b5ff78728733d0c24568390c517bc..6c965ae95e535624b5d838495e9df8df00780989 100644
--- a/inventory_provider/tasks/worker.py
+++ b/inventory_provider/tasks/worker.py
@@ -471,7 +471,7 @@ def reload_router_config(self, hostname):
         state=states.STARTED,
         meta={
             'hostname': hostname,
-            'message': 'loading router netconf data'
+            'message': f'loading netconf data for {hostname}'
         })
 
     # get the timestamp for the current netconf data
@@ -495,8 +495,9 @@ def reload_router_config(self, hostname):
     assert new_netconf_timestamp, \
         'no timestamp available for new netconf data'
     if new_netconf_timestamp == current_netconf_timestamp:
-        logger.debug('no netconf change timestamp change, aborting')
-        return self.success(f'no change (timestamp not updated)')
+        msg = f'no timestamp change for {hostname} netconf data'
+        logger.debug(msg)
+        return self.success(msg)
 
     # clear cached classifier responses for this router, and
     # refresh peering data
@@ -504,7 +505,7 @@ def reload_router_config(self, hostname):
         state=states.STARTED,
         meta={
             'hostname': hostname,
-            'message': 'refreshing peers & clearing cache'
+            'message': f'refreshing peers & clearing cache for {hostname}'
         })
     refresh_ix_public_peers(hostname, netconf_doc)
     refresh_vpn_rr_peers(hostname, netconf_doc)
@@ -516,19 +517,20 @@ def reload_router_config(self, hostname):
     community = juniper.snmp_community_string(netconf_doc)
     if not community:
         raise InventoryTaskError(
-            'error extracting community string for %r' % hostname)
+            f'error extracting community string for {hostname}')
     else:
         self.update_state(
             state=states.STARTED,
             meta={
                 'hostname': hostname,
-                'message': 'refreshing snmp interface indexes'
+                'message': f'refreshing snmp interface indexes for {hostname}'
             })
+        # load snmp data, in this thread
         snmp_refresh_interfaces.apply(args=[hostname, community])
 
     clear_cached_classifier_responses(None)
 
-    return self.success(f'updated config for {hostname}')
+    return self.success(f'updated configuration for {hostname}')
 
 
 def _erase_next_db(config):