Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
E
eduGAIN Connectivity Check
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Package registry
Container registry
Model registry
Operate
Environments
Terraform modules
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
edugain
eduGAIN Connectivity Check
Commits
fd39ff5d
Commit
fd39ff5d
authored
1 year ago
by
Valentin Pocotilenco
Browse files
Options
Downloads
Patches
Plain Diff
refactor to work logrotate files
parent
86c36b42
No related branches found
No related tags found
No related merge requests found
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
api.py
+56
-27
56 additions, 27 deletions
api.py
with
56 additions
and
27 deletions
api.py
+
56
−
27
View file @
fd39ff5d
...
@@ -63,7 +63,8 @@ def getSimpleDict(aux):
...
@@ -63,7 +63,8 @@ def getSimpleDict(aux):
def
isValidDate
(
date_text
):
def
isValidDate
(
date_text
):
try
:
try
:
date
.
fromisoformat
(
date_text
)
date
.
fromisoformat
(
date_text
)
except
ValueError
:
except
ValueError
as
e
:
print
(
e
)
return
False
return
False
return
True
return
True
...
@@ -73,6 +74,13 @@ def clearDateString(str):
...
@@ -73,6 +74,13 @@ def clearDateString(str):
str
=
str
.
replace
(
elem
,
''
)
str
=
str
.
replace
(
elem
,
''
)
return
str
return
str
def
storeParsedDay
(
path
,
data
):
try
:
with
open
(
path
,
"
w
"
)
as
outfile
:
outfile
.
write
(
json
.
dumps
(
data
))
except
FileNotFoundError
as
e
:
print
(
e
)
# Log will be parsed using predefined format
# Log will be parsed using predefined format
# %(addr)|[%(ctime)]|%(method)|%(uri)|%(uagent)|%(referer)
# %(addr)|[%(ctime)]|%(method)|%(uri)|%(uagent)|%(referer)
# target result is array like:
# target result is array like:
...
@@ -133,7 +141,7 @@ def parseLog(lines,criteria):
...
@@ -133,7 +141,7 @@ def parseLog(lines,criteria):
if
rowGET
[
'
idp
'
]:
if
rowGET
[
'
idp
'
]:
request_param
[
'
idp
'
]
+=
1
request_param
[
'
idp
'
]
+=
1
if
(
'
idp
'
in
criteria
and
criteria
[
'
idp
'
]
==
rowGET
[
'
idp
'
])
or
'
idp
'
not
in
criteria
:
if
(
'
idp
'
in
criteria
and
criteria
[
'
idp
'
]
in
rowGET
[
'
idp
'
])
or
'
idp
'
not
in
criteria
:
if
rowGET
[
'
idp
'
]
not
in
idp
.
keys
():
if
rowGET
[
'
idp
'
]
not
in
idp
.
keys
():
idp
[
rowGET
[
'
idp
'
]]
=
0
idp
[
rowGET
[
'
idp
'
]]
=
0
idp
[
rowGET
[
'
idp
'
]]
+=
1
idp
[
rowGET
[
'
idp
'
]]
+=
1
...
@@ -141,7 +149,7 @@ def parseLog(lines,criteria):
...
@@ -141,7 +149,7 @@ def parseLog(lines,criteria):
if
rowGET
[
'
reg_auth
'
]:
if
rowGET
[
'
reg_auth
'
]:
request_param
[
'
reg_auth
'
]
+=
1
request_param
[
'
reg_auth
'
]
+=
1
if
(
'
reg_auth
'
in
criteria
and
criteria
[
'
reg_auth
'
]
==
rowGET
[
'
reg_auth
'
])
or
'
reg_auth
'
not
in
criteria
:
if
(
'
reg_auth
'
in
criteria
and
criteria
[
'
reg_auth
'
]
in
rowGET
[
'
reg_auth
'
])
or
'
reg_auth
'
not
in
criteria
:
if
rowGET
[
'
reg_auth
'
]
not
in
reg_auth
.
keys
():
if
rowGET
[
'
reg_auth
'
]
not
in
reg_auth
.
keys
():
reg_auth
[
rowGET
[
'
reg_auth
'
]]
=
0
reg_auth
[
rowGET
[
'
reg_auth
'
]]
=
0
reg_auth
[
rowGET
[
'
reg_auth
'
]]
+=
1
reg_auth
[
rowGET
[
'
reg_auth
'
]]
+=
1
...
@@ -154,7 +162,7 @@ def parseLog(lines,criteria):
...
@@ -154,7 +162,7 @@ def parseLog(lines,criteria):
'
reg_auth
'
:
reg_auth
'
reg_auth
'
:
reg_auth
}
}
return
json
.
dumps
(
result
)
return
result
# Parse URL from log line. Used to get only idp and reg_auth.
# Parse URL from log line. Used to get only idp and reg_auth.
def
parseReqURL
(
url
):
def
parseReqURL
(
url
):
...
@@ -396,26 +404,31 @@ class WebData(Resource):
...
@@ -396,26 +404,31 @@ class WebData(Resource):
list_feds
=
get_list_from_url
(
e_p
.
ECCS_LISTFEDSURL
,
e_p
.
ECCS_LISTFEDSFILE
)
list_feds
=
get_list_from_url
(
e_p
.
ECCS_LISTFEDSURL
,
e_p
.
ECCS_LISTFEDSFILE
)
regAuthDict
=
get_reg_auth_dict
(
list_feds
)
regAuthDict
=
get_reg_auth_dict
(
list_feds
)
file_path
=
f
"
{
e_p
.
ECCS_
OUTPUTDIR
}
/
{
e_p
.
ECCS_RESULTSLOG
}
"
file_path
=
f
"
{
e_p
.
ECCS_
LOGSDIR
}
/eccs-uwsgi-req.log
"
# will this name be moved to properties definer file ?
criteria
=
{}
criteria
=
{}
criteria
[
'
date_from
'
]
=
criteria
[
'
date_to
'
]
=
e_p
.
DAY
criteria
[
'
date_from
'
]
=
criteria
[
'
date_to
'
]
=
e_p
.
DAY
eccsLogRotated
=
Fals
e
eccsLogRotated
=
Tru
e
in_data
=
request
.
args
in_data
=
request
.
args
if
(
'
date_from
'
in
in_data
and
isValidDate
(
in_data
[
'
date_from
'
])):
if
(
'
dateFrom
'
in
in_data
and
isValidDate
(
in_data
[
'
dateFrom
'
])):
criteria
[
'
date_from
'
]
=
in_data
[
'
date_from
'
]
criteria
[
'
date_from
'
]
=
in_data
[
'
dateFrom
'
]
if
(
'
date_to
'
not
in
in_data
):
if
(
'
dateTo
'
not
in
in_data
):
criteria
[
'
date_to
'
]
=
criteria
[
'
date_from
'
]
+
timedelta
(
days
=
30
)
criteria
[
'
date_to
'
]
=
(
datetime
.
strptime
(
criteria
[
'
date_from
'
],
'
%Y-%m-%d
'
)
+
timedelta
(
days
=
30
)).
strftime
(
'
%Y-%m-%d
'
)
if
datetime
.
today
().
strftime
(
'
%Y-%m-%d
'
)
<
criteria
[
'
date_to
'
]:
diff
=
(
datetime
.
strptime
(
criteria
[
'
date_to
'
],
'
%Y-%m-%d
'
)
-
datetime
.
today
()).
days
criteria
[
'
date_from
'
]
=
(
datetime
.
strptime
(
criteria
[
'
date_from
'
],
'
%Y-%m-%d
'
)
-
timedelta
(
days
=
diff
)).
strftime
(
'
%Y-%m-%d
'
)
criteria
[
'
date_to
'
]
=
datetime
.
today
().
strftime
(
'
%Y-%m-%d
'
)
if
(
'
date
_t
o
'
in
in_data
and
isValidDate
(
in_data
[
'
date
_t
o
'
])):
if
(
'
date
T
o
'
in
in_data
and
isValidDate
(
in_data
[
'
date
T
o
'
])):
criteria
[
'
date_to
'
]
=
in_data
[
'
date
_t
o
'
]
criteria
[
'
date_to
'
]
=
in_data
[
'
date
T
o
'
]
if
(
'
date
_f
rom
'
not
in
in_data
):
if
(
'
date
F
rom
'
not
in
in_data
):
criteria
[
'
date_from
'
]
=
criteria
[
'
date_to
'
]
-
timedelta
(
days
=
30
)
criteria
[
'
date_from
'
]
=
(
datetime
.
strptime
(
criteria
[
'
date_to
'
]
,
'
%Y-%m-%d
'
)
+
timedelta
(
days
=
30
)
).
strftime
(
'
%Y-%m-%d
'
)
if
(
'
request
_s
ource
'
in
in_data
and
in_data
[
'
request
_s
ource
'
]
==
'
divided
'
):
if
(
'
request
S
ource
'
in
in_data
and
in_data
[
'
request
S
ource
'
]
==
'
divided
'
):
criteria
[
'
request_source
'
]
=
'
divided
'
criteria
[
'
request_source
'
]
=
'
divided
'
if
(
'
reg
_a
uth
'
in
in_data
and
in_data
[
'
reg
_a
uth
'
]
in
regAuthDict
):
if
(
'
reg
A
uth
'
in
in_data
and
in_data
[
'
reg
A
uth
'
]
in
regAuthDict
):
criteria
[
'
reg_auth
'
]
=
in_data
[
'
reg_auth
'
]
criteria
[
'
reg_auth
'
]
=
in_data
[
'
reg_auth
'
]
if
(
'
idp
'
in
in_data
):
if
(
'
idp
'
in
in_data
):
...
@@ -423,34 +436,50 @@ class WebData(Resource):
...
@@ -423,34 +436,50 @@ class WebData(Resource):
# here I have to parse eccs-log file
# here I have to parse eccs-log file
lines
=
[]
lines
=
[]
results
=
[]
results
=
{}
cur_date
=
criteria
[
'
date_from
'
]
cur_date
=
criteria
[
'
date_from
'
]
if
eccsLogRotated
==
True
:
if
eccsLogRotated
==
True
:
while
cur_date
<=
criteria
[
'
date_to
'
]:
while
cur_date
<=
criteria
[
'
date_to
'
]:
file_path
=
f
"
{
e_p
.
ECCS_OUTPUTDIR
}
/eccs_
{
cur_date
}
.log
"
json_data
=
{}
tmpDate
=
datetime
.
strptime
(
cur_date
,
'
%Y-%m-%d
'
).
strftime
(
'
%Y%m%d
'
)
file_path
=
f
"
{
e_p
.
ECCS_LOGSDIR
}
/eccs-uwsgi-req.log-
{
tmpDate
}
"
json_file_path
=
f
"
{
e_p
.
ECCS_DIR
}
/parsed/eccs-uwsgi-req-json-
{
tmpDate
}
"
try
:
try
:
with
open
(
file_path
,
"
r
"
,
encoding
=
"
utf-8
"
)
as
fo
:
f
=
open
(
json_
file_path
)
lines
=
fo
.
readlines
(
)
json_data
=
json
.
load
(
f
)
except
FileNotFoundError
as
e
:
except
(
ValueError
,
FileNotFoundError
)
as
e
:
results
[
cur_date
]
=
[]
#print(e)
pass
if
len
(
json_data
)
==
0
:
try
:
with
open
(
file_path
,
"
r
"
,
encoding
=
"
utf-8
"
)
as
fo
:
lines
=
fo
.
readlines
()
json_data
=
parseLog
(
lines
,
criteria
)
storeParsedDay
(
json_file_path
,
json_data
)
except
FileNotFoundError
as
e
:
#print(e)
pass
results
=
parseLog
(
lines
,
criteri
a
)
results
.
update
(
json_dat
a
)
cur_date
+
=
timedelta
(
days
=
1
)
cur_date
=
(
datetime
.
strptime
(
cur_date
,
'
%Y-%m-%d
'
)
+
timedelta
(
days
=
1
)).
strftime
(
'
%Y-%m-%d
'
)
else
:
else
:
try
:
try
:
with
open
(
file_path
,
"
r
"
,
encoding
=
"
utf-8
"
)
as
fo
:
with
open
(
file_path
,
"
r
"
,
encoding
=
"
utf-8
"
)
as
fo
:
lines
=
fo
.
readlines
()
lines
=
fo
.
readlines
()
except
FileNotFoundError
as
e
:
except
FileNotFoundError
as
e
:
results
=
[]
print
(
e
)
results
=
{}
results
=
parseLog
(
lines
,
criteria
)
results
=
parseLog
(
lines
,
criteria
)
return
results
return
json
.
dumps
(
results
)
# /api/
# /api/
class
Help
(
Resource
):
class
Help
(
Resource
):
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment