Skip to content
Snippets Groups Projects
Commit 556fb7e4 authored by Erik Reid's avatar Erik Reid
Browse files

optimization - avoid expensive deletes during refresh

parent c9fb15c1
No related branches found
No related tags found
No related merge requests found
...@@ -277,15 +277,18 @@ def _refresh_peers(hostname, key_base, peers): ...@@ -277,15 +277,18 @@ def _refresh_peers(hostname, key_base, peers):
logger.debug( logger.debug(
'removing cached %s for %r' % (key_base, hostname)) 'removing cached %s for %r' % (key_base, hostname))
r = get_next_redis(InventoryTask.config) r = get_next_redis(InventoryTask.config)
for k in r.scan_iter(key_base + ':*'): # WARNING (optimization): this is an expensive query if
# potential race condition: another proc could have # the redis connection is slow, and we currently only
# delete this element between the time we read the # call this method during a full refresh
# keys and the next statement ... check for None below # for k in r.scan_iter(key_base + ':*'):
value = r.get(k.decode('utf-8')) # # potential race condition: another proc could have
if value: # # delete this element between the time we read the
value = json.loads(value.decode('utf-8')) # # keys and the next statement ... check for None below
if value['router'] == hostname: # value = r.get(k.decode('utf-8'))
r.delete(k) # if value:
# value = json.loads(value.decode('utf-8'))
# if value['router'] == hostname:
# r.delete(k)
rp = r.pipeline() rp = r.pipeline()
for peer in peers: for peer in peers:
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment