Skip to content
Snippets Groups Projects
Commit 4c1d04e7 authored by Valentin Pocotilenco's avatar Valentin Pocotilenco
Browse files

refactor for delayed logrotate

parent fd39ff5d
No related branches found
No related tags found
No related merge requests found
...@@ -123,6 +123,10 @@ def parseLog(lines,criteria): ...@@ -123,6 +123,10 @@ def parseLog(lines,criteria):
continue continue
rowDate = datetime.strptime(clearDateString(row[1]), '%a %b %d %H:%M:%S %Y').strftime('%Y-%m-%d') rowDate = datetime.strptime(clearDateString(row[1]), '%a %b %d %H:%M:%S %Y').strftime('%Y-%m-%d')
# check for entries to be in range of date of rotated log
if 'prev_date' in criteria and criteria['date_from'] != rowDate:
continue
if ('date_from' in criteria and 'date_to' in criteria and criteria['date_from'] <= rowDate <= criteria['date_to']) or ('date_from' not in criteria and 'date_to' not in criteria and rowDate): if ('date_from' in criteria and 'date_to' in criteria and criteria['date_from'] <= rowDate <= criteria['date_to']) or ('date_from' not in criteria and 'date_to' not in criteria and rowDate):
...@@ -178,6 +182,28 @@ def parseReqURL(url): ...@@ -178,6 +182,28 @@ def parseReqURL(url):
return result return result
def filterParsedData(json_data,criteria):
# idp = {}
# reg_auth = {}
# if 'idp' in criteria and criteria['idp'] in json_data['idp'].keys():
# for entry in json_data[criteria['prev_date']]['idp']:
# # if criteria['idp'] == entry.key():
# if entry.key().find(criteria['idp']) >= 0:
# idp[entry.key()] = entry
# # search substring
# if 'reg_auth' in criteria and criteria['reg_auth'] in json_data['reg_auth'].keys():
# for entry in json_data[criteria['reg_auth']]:
# if criteria['reg_auth'] == entry.key():
# reg_auth[criteria['reg_auth']] = entry
# json_data[criteria['prev_date']] = {
# 'request_uniq' : {'idp':len(idp),'reg_auth':len(reg_auth)},
# 'idp' : idp,
# 'reg_auth' : reg_auth
# }
return json_data
### Classes ### Classes
# /api/test # /api/test
...@@ -408,6 +434,8 @@ class WebData(Resource): ...@@ -408,6 +434,8 @@ class WebData(Resource):
criteria = {} criteria = {}
criteria['date_from'] = criteria['date_to'] = e_p.DAY criteria['date_from'] = criteria['date_to'] = e_p.DAY
eccsLogRotated = True eccsLogRotated = True
useParsedFile = True
in_data = request.args in_data = request.args
if ('dateFrom' in in_data and isValidDate(in_data['dateFrom'])): if ('dateFrom' in in_data and isValidDate(in_data['dateFrom'])):
...@@ -434,51 +462,59 @@ class WebData(Resource): ...@@ -434,51 +462,59 @@ class WebData(Resource):
if ('idp' in in_data): if ('idp' in in_data):
criteria['idp'] = in_data['idp'] criteria['idp'] = in_data['idp']
# here I have to parse eccs-log file
lines = [] lines = []
results = {} results = {}
cur_date = criteria['date_from'] cur_date = criteria['date_from']
if eccsLogRotated == True: if eccsLogRotated == True:
while cur_date <= criteria['date_to']: while cur_date <= criteria['date_to']:
json_data = {} json_data = {}
criteria['prev_date'] = (datetime.strptime(cur_date, '%Y-%m-%d') - timedelta(days=1)).strftime('%Y-%m-%d')
tmpDate = datetime.strptime(cur_date, '%Y-%m-%d').strftime('%Y%m%d') tmpDate = datetime.strptime(cur_date, '%Y-%m-%d').strftime('%Y%m%d')
file_path = f"{e_p.ECCS_LOGSDIR}/eccs-uwsgi-req.log-{tmpDate}" file_path = f"{e_p.ECCS_LOGSDIR}/eccs-uwsgi-req.log-{tmpDate}"
json_file_path = f"{e_p.ECCS_DIR}/parsed/eccs-uwsgi-req-json-{tmpDate}"
if useParsedFile == True:
try: tmpDate = (datetime.strptime(cur_date, '%Y-%m-%d') - timedelta(days=1)).strftime('%Y%m%d')
f = open(json_file_path) json_file_path = f"{e_p.ECCS_DIR}/parsed/eccs-uwsgi-req-json-{tmpDate}"
json_data = json.load(f) try:
f = open(json_file_path)
except (ValueError, FileNotFoundError) as e: json_data = json.load(f)
#print(e)
pass except (ValueError, FileNotFoundError) as e:
#print(e)
pass
if len(json_data) > 0 and ('idp' in criteria or 'reg_auth' in criteria):
json_data = filterParsedData(json_data, criteria)
if len(json_data) == 0: if len(json_data) == 0:
try: try:
with open(file_path,"r",encoding="utf-8") as fo: with open(file_path,"r",encoding="utf-8") as fo:
lines = fo.readlines() lines = fo.readlines()
json_data = parseLog(lines, criteria) json_data = parseLog(lines, criteria)
storeParsedDay(json_file_path, json_data)
if useParsedFile == True and 'idp' not in criteria and 'reg_auth' not in criteria:
storeParsedDay(json_file_path, json_data)
except FileNotFoundError as e: except FileNotFoundError as e:
#print(e) #print(e)
pass pass
results.update(json_data) results.update(json_data)
cur_date = (datetime.strptime(cur_date, '%Y-%m-%d') + timedelta(days=1)).strftime('%Y-%m-%d') cur_date = (datetime.strptime(cur_date, '%Y-%m-%d') + timedelta(days=1)).strftime('%Y-%m-%d')
else: else:
try: try:
with open(file_path,"r",encoding="utf-8") as fo: with open(file_path,"r",encoding="utf-8") as fo:
lines = fo.readlines() lines = fo.readlines()
except FileNotFoundError as e: except FileNotFoundError as e:
print(e) print(e)
results = {} results = {}
results = parseLog(lines, criteria) results = parseLog(lines, criteria)
return json.dumps(results) return json.dumps(results)
# /api/ # /api/
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment