Skip to content
Snippets Groups Projects
Commit af277012 authored by Erik Reid's avatar Erik Reid
Browse files

don't delete log records in another thread

much simpler, hopefully the scan_iter count param helps
parent 190ac489
Branches
Tags
No related merge requests found
......@@ -114,19 +114,15 @@ def run():
t['thread'].join()
def clear_joblog(r, keys_read_event=None):
def clear_joblog(r):
"""
:param r: connection to a redis database
:param keys_read_event: optional event to signal after all keys are read
:return:
"""
rp = r.pipeline()
# scan with bigger batches, to mitigate network latency effects
for key in r.scan_iter('joblog:*', count=1000):
rp.delete(key)
if keys_read_event:
assert isinstance(keys_read_event, threading.Event) # sanity
keys_read_event.set()
rp.execute()
......
import json
import logging
import os
import threading
import time
from redis.exceptions import RedisError
......@@ -563,18 +562,7 @@ def launch_refresh_cache_all(config):
_erase_next_db(config)
update_latch_status(config, pending=True)
# call monitor.clear_joblog in a thread, since
# deletion might be slow and can be done in parallel
def _clear_log_proc(wait_event):
monitor.clear_joblog(get_current_redis(config), wait_event)
keys_captured_event = threading.Event()
threading.Thread(
target=_clear_log_proc,
args=[keys_captured_event]).start()
if not keys_captured_event.wait(timeout=60.0):
# wait a reasonable time
logging.error('timed out waiting for log keys to be read')
monitor.clear_joblog(get_current_redis(config))
# first batch of subtasks: refresh cached opsdb data
subtasks = [
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment