diff --git a/inventory_provider/routes/classifier.py b/inventory_provider/routes/classifier.py
index ad3b5db05214a1133e607b1a49b8e6ec7a55c0ac..9e9b48456a412ebd8cb2aed12e03502d5d2805a2 100644
--- a/inventory_provider/routes/classifier.py
+++ b/inventory_provider/routes/classifier.py
@@ -384,13 +384,15 @@ def peer_info(address):
                     _location_from_service_dict(s) for s in i['services']]
 
         snmp_info = r.get(
-            f'snmp-peerings:{address}')
+            f'snmp-peerings:remote:{address}')
         if snmp_info:
             snmp_info = json.loads(snmp_info.decode('utf-8'))
-            result['snmp'] = {
-                'community': snmp_info['community'],
-                'oid': snmp_info['oid']
-            }
+            result['snmp'] = [
+                {
+                    'hostname': h['hostname'],
+                    'community': h['community'],
+                    'oid': h['oid']
+                } for h in snmp_info]
 
         result['locations'] = _remove_duplicates_from_list(result['locations'])
         result = json.dumps(result)
diff --git a/inventory_provider/tasks/worker.py b/inventory_provider/tasks/worker.py
index 9c877975e218cf5955370e171c26e3ccd5043492..f8b491e264a39ed4cdb8637339adb3722a8d7eb4 100644
--- a/inventory_provider/tasks/worker.py
+++ b/inventory_provider/tasks/worker.py
@@ -96,7 +96,7 @@ def snmp_refresh_peerings(self, hostname, community, logical_systems):
         logger.exception(msg)
         self.log_warning(msg)
         r = get_current_redis(InventoryTask.config)
-        peerings = r.get(f'snmp-peerings:{hostname}:all')
+        peerings = r.get(f'snmp-peerings:hosts:{hostname}')
         if peerings is None:
             raise InventoryTaskError(
                 f'snmp error with {peerings}'
@@ -106,16 +106,7 @@ def snmp_refresh_peerings(self, hostname, community, logical_systems):
         self.log_warning(f'using cached snmp peering data for {hostname}')
 
     r = get_next_redis(InventoryTask.config)
-
-    rp = r.pipeline()
-    rp.set(f'snmp-peerings:{hostname}:all', json.dumps(peerings))
-
-    for session in peerings:
-        rp.set(
-            f'snmp-peerings:{hostname}:{session["remote"]}',
-            json.dumps(session))
-
-    rp.execute()
+    r.set(f'snmp-peerings:hosts:{hostname}', json.dumps(peerings))
 
     self.log_info(f'snmp peering info loaded from {hostname}')
 
@@ -726,6 +717,7 @@ def refresh_finalizer(self, pending_task_ids_json):
 
         _wait_for_tasks(task_ids, update_callback=self.log_info)
         _build_subnet_db(update_callback=self.log_info)
+        _build_peering_db(update_callback=self.log_info)
 
     except (jsonschema.ValidationError,
             json.JSONDecodeError,
@@ -760,6 +752,32 @@ def _build_subnet_db(update_callback=lambda s: None):
     rp.execute()
 
 
+def _build_peering_db(update_callback=lambda s: None):
+
+    r = get_next_redis(InventoryTask.config)
+
+    update_callback('loading all network peerings')
+    peerings = {}
+
+    # scan with bigger batches, to mitigate network latency effects
+    key_prefix = 'snmp-peerings:hosts:'
+    for k in r.scan_iter(f'{key_prefix}*', count=1000):
+        key_name = k.decode('utf-8')
+        hostname = key_name[len(key_prefix):]
+        host_peerings = r.get(key_name).decode('utf-8')
+        host_peerings = json.loads(host_peerings)
+        for p in host_peerings:
+            p['hostname'] = hostname
+            peerings.setdefault(p['remote'], []).append(p)
+
+    update_callback(f'saving {len(peerings)} remote peers')
+
+    rp = r.pipeline()
+    for k, v in peerings.items():
+        rp.set(f'snmp-peerings:remote:{k}', json.dumps(v))
+    rp.execute()
+
+
 def check_task_status(task_id, parent=None, forget=False):
     r = AsyncResult(task_id, app=app)
     assert r.id == task_id  # sanity