Skip to content
Snippets Groups Projects
Commit 556fb7e4 authored by Erik Reid's avatar Erik Reid
Browse files

optimization - avoid expensive deletes during refresh

parent c9fb15c1
No related branches found
No related tags found
No related merge requests found
......@@ -277,15 +277,18 @@ def _refresh_peers(hostname, key_base, peers):
logger.debug(
'removing cached %s for %r' % (key_base, hostname))
r = get_next_redis(InventoryTask.config)
for k in r.scan_iter(key_base + ':*'):
# potential race condition: another proc could have
# delete this element between the time we read the
# keys and the next statement ... check for None below
value = r.get(k.decode('utf-8'))
if value:
value = json.loads(value.decode('utf-8'))
if value['router'] == hostname:
r.delete(k)
# WARNING (optimization): this is an expensive query if
# the redis connection is slow, and we currently only
# call this method during a full refresh
# for k in r.scan_iter(key_base + ':*'):
# # potential race condition: another proc could have
# # delete this element between the time we read the
# # keys and the next statement ... check for None below
# value = r.get(k.decode('utf-8'))
# if value:
# value = json.loads(value.decode('utf-8'))
# if value['router'] == hostname:
# r.delete(k)
rp = r.pipeline()
for peer in peers:
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment