Skip to content
Snippets Groups Projects
Commit f4c8e106 authored by Karel van Klink's avatar Karel van Klink :smiley_cat:
Browse files

rework pipeline to use ruff instead of flake, isort, and black

parent 186046df
Branches
Tags
1 merge request!62Feature/update documentation
[MAIN]
extension-pkg-whitelist=pydantic
[MISCELLANEOUS]
# List of note tags to take in consideration, separated by a comma.
# Note that it does not contain TODO, only the default FIXME and XXX
notes=FIXME,
XXX
...@@ -4,21 +4,19 @@ import os ...@@ -4,21 +4,19 @@ import os
from fastapi.testclient import TestClient from fastapi.testclient import TestClient
import lso import lso
config_filename = os.path.join( config_filename = os.path.join(os.path.dirname(__file__), "..", "config.json.example")
os.path.dirname(__file__),
'..', 'config.json.example')
output_filename = os.path.join( output_filename = os.path.join(
os.path.dirname(__file__), os.path.dirname(__file__), "source", "_static", "openapi.json"
'source', '_static', 'openapi.json') )
os.environ['SETTINGS_FILENAME'] = config_filename os.environ["SETTINGS_FILENAME"] = config_filename
app = lso.create_app() app = lso.create_app()
client = TestClient(app) client = TestClient(app)
rsp = client.get('/openapi.json') rsp = client.get("/openapi.json")
openapi_doc = json.dumps(rsp.json(), indent=2) openapi_doc = json.dumps(rsp.json(), indent=2)
with open(output_filename, 'w') as f: with open(output_filename, "w") as f:
f.write(openapi_doc) f.write(openapi_doc)
print(f'wrote {output_filename}') print(f"wrote {output_filename}")
...@@ -16,10 +16,9 @@ import json ...@@ -16,10 +16,9 @@ import json
import os import os
import sys import sys
sys.path.insert(0, os.path.abspath( sys.path.insert(
os.path.join( 0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "lso"))
os.path.dirname(__file__), )
'..', '..', 'lso')))
class RenderAsJSON(Directive): class RenderAsJSON(Directive):
...@@ -28,56 +27,56 @@ class RenderAsJSON(Directive): ...@@ -28,56 +27,56 @@ class RenderAsJSON(Directive):
required_arguments = 1 required_arguments = 1
def run(self): def run(self):
module_path, member_name = self.arguments[0].rsplit('.', 1) module_path, member_name = self.arguments[0].rsplit(".", 1)
member_data = getattr(import_module(module_path), member_name) member_data = getattr(import_module(module_path), member_name)
code = json.dumps(member_data, indent=2) code = json.dumps(member_data, indent=2)
literal = nodes.literal_block(code, code) literal = nodes.literal_block(code, code)
literal['language'] = 'json' literal["language"] = "json"
return [ return [
addnodes.desc_name(text=member_name), addnodes.desc_name(text=member_name),
addnodes.desc_content('', literal) addnodes.desc_content("", literal),
] ]
def setup(app): def setup(app):
app.add_directive('asjson', RenderAsJSON) app.add_directive("asjson", RenderAsJSON)
# -- Project information ----------------------------------------------------- # -- Project information -----------------------------------------------------
project = 'Lightweight Service Orchestrator' project = "Lightweight Service Orchestrator"
copyright = '2023, GÉANT Vereniging' copyright = "2023, GÉANT Vereniging"
author = 'GÉANT Orchestration & Automation Team' author = "GÉANT Orchestration & Automation Team"
# -- General configuration --------------------------------------------------- # -- General configuration ---------------------------------------------------
extensions = [ extensions = [
'sphinx_rtd_theme', "sphinx_rtd_theme",
'sphinx.ext.autodoc', "sphinx.ext.autodoc",
'sphinx.ext.coverage', "sphinx.ext.coverage",
'sphinx.ext.todo' "sphinx.ext.todo",
] ]
templates_path = ['templates'] templates_path = ["templates"]
exclude_patterns = [] exclude_patterns = []
# -- Options for HTML output ------------------------------------------------- # -- Options for HTML output -------------------------------------------------
html_theme = 'sphinx_rtd_theme' html_theme = "sphinx_rtd_theme"
html_static_path = ['_static'] html_static_path = ["_static"]
html_theme_options = { html_theme_options = {
'style_nav_header_background': 'rgb(0 63 95)', "style_nav_header_background": "rgb(0 63 95)",
} }
html_css_files = ['custom.css'] html_css_files = ["custom.css"]
html_logo = '_static/geant_logo_white.svg' html_logo = "_static/geant_logo_white.svg"
# Both the class' and the ``__init__`` method's docstring are concatenated and inserted. # Both the class' and the ``__init__`` method's docstring are concatenated and inserted.
autoclass_content = 'both' autoclass_content = "both"
autodoc_typehints = 'none' autodoc_typehints = "none"
# Display todos by setting to True # Display todos by setting to True
todo_include_todos = True todo_include_todos = True
"""Automatically invoked app factory.""" """Automatically invoked app factory."""
import logging import logging
from fastapi import FastAPI from fastapi import FastAPI
...@@ -13,7 +14,6 @@ def create_app() -> FastAPI: ...@@ -13,7 +14,6 @@ def create_app() -> FastAPI:
:return: a new flask app instance :return: a new flask app instance
""" """
app = FastAPI() app = FastAPI()
# app = FastAPI(dependencies=[Depends(get_query_token)]) # app = FastAPI(dependencies=[Depends(get_query_token)])
......
"""Default app creation.""" """Default app creation."""
import lso import lso
app = lso.create_app() app = lso.create_app()
......
...@@ -7,16 +7,22 @@ import os ...@@ -7,16 +7,22 @@ import os
LOGGING_DEFAULT_CONFIG = { LOGGING_DEFAULT_CONFIG = {
"version": 1, "version": 1,
"disable_existing_loggers": False, "disable_existing_loggers": False,
"formatters": {"simple": {"format": "%(asctime)s - %(name)s " "(%(lineno)d) - %(levelname)s - %(message)s"}}, "formatters": {"simple": {"format": "%(asctime)s - %(name)s (%(lineno)d) - %(levelname)s - %(message)s"}},
"handlers": { "handlers": {
"console": { "console": {
"class": "logging.StreamHandler", "class": "logging.StreamHandler",
"level": "DEBUG", "level": "DEBUG",
"formatter": "simple", "formatter": "simple",
"stream": "ext://sys.stdout", "stream": "ext://sys.stdout",
} },
},
"loggers": {
"resource_management": {
"level": "DEBUG",
"handlers": ["console"],
"propagate": False,
},
}, },
"loggers": {"resource_management": {"level": "DEBUG", "handlers": ["console"], "propagate": False}},
"root": {"level": "INFO", "handlers": ["console"]}, "root": {"level": "INFO", "handlers": ["console"]},
} }
......
"""Module that gathers common API responses and data models.""" """Module that gathers common API responses and data models."""
import enum import enum
import json import json
import logging import logging
...@@ -137,7 +138,13 @@ def _process_json_output(runner: ansible_runner.Runner) -> list[dict[Any, Any]]: ...@@ -137,7 +138,13 @@ def _process_json_output(runner: ansible_runner.Runner) -> list[dict[Any, Any]]:
return parsed_output return parsed_output
def _run_playbook_proc(job_id: str, playbook_path: str, extra_vars: dict, inventory: list[str], callback: str) -> None: def _run_playbook_proc(
job_id: str,
playbook_path: str,
extra_vars: dict,
inventory: list[str],
callback: str,
) -> None:
"""Run a playbook, internal function. """Run a playbook, internal function.
:param str job_id: Identifier of the job that's executed. :param str job_id: Identifier of the job that's executed.
...@@ -177,7 +184,6 @@ def run_playbook(playbook_path: str, extra_vars: dict, inventory: str, callback: ...@@ -177,7 +184,6 @@ def run_playbook(playbook_path: str, extra_vars: dict, inventory: str, callback:
:return: Result of playbook launch, this could either be successful or unsuccessful. :return: Result of playbook launch, this could either be successful or unsuccessful.
:rtype: :class:`PlaybookLaunchResponse` :rtype: :class:`PlaybookLaunchResponse`
""" """
job_id = str(uuid.uuid4()) job_id = str(uuid.uuid4())
thread = threading.Thread( thread = threading.Thread(
target=_run_playbook_proc, target=_run_playbook_proc,
......
"""Module of all routes that are available in LSO."""
...@@ -2,6 +2,7 @@ ...@@ -2,6 +2,7 @@
For now only includes a single endpoint that responds with the current version of the API and LSO. For now only includes a single endpoint that responds with the current version of the API and LSO.
""" """
from importlib import metadata from importlib import metadata
from fastapi import APIRouter from fastapi import APIRouter
......
...@@ -28,7 +28,7 @@ class IPTrunkProvisioningParams(IPTrunkParams): ...@@ -28,7 +28,7 @@ class IPTrunkProvisioningParams(IPTrunkParams):
#: also making it an optional parameter. #: also making it an optional parameter.
dry_run: bool | None = True dry_run: bool | None = True
#: The type of object that is changed. #: The type of object that is changed.
object: str object: str # noqa: A003
class IPTrunkModifyParams(IPTrunkParams): class IPTrunkModifyParams(IPTrunkParams):
......
[tool.isort]
profile = "black"
line_length = 120
skip = ["venv", ".tox", "docs"]
known_third_party = ["pydantic", "migrations"]
known_first_party = ["test", "docs"]
[tool.black]
line-length = 120
target-version = ["py310"]
exclude = '''
(
/(
geant_service_orchestrator\.egg-info # exclude a few common directories in the
| \.git # root of the project
| \.*_cache
| \.tox
| venv
| docs
)/
)
'''
[tool.mypy] [tool.mypy]
exclude = [ exclude = [
"venv", "venv",
"test/*",
"docs" "docs"
] ]
ignore_missing_imports = true ignore_missing_imports = true
...@@ -44,64 +22,81 @@ show_error_codes = true ...@@ -44,64 +22,81 @@ show_error_codes = true
show_column_numbers = true show_column_numbers = true
# Suppress "note: By default the bodies of untyped functions are not checked" # Suppress "note: By default the bodies of untyped functions are not checked"
disable_error_code = "annotation-unchecked" disable_error_code = "annotation-unchecked"
# Forbid the use of a generic "type: ignore" without specifying the exact error that is ignored
enable_error_code = "ignore-without-code"
[tool.ruff] [tool.ruff]
exclude = [ extend-exclude = [
".git",
".*_cache",
".tox",
"*.egg-info",
"__pycache__",
"htmlcov", "htmlcov",
"venv", "docs",
"docs"
] ]
ignore = [ ignore = [
"C417",
"D100",
"D101",
"D102",
"D103",
"D104",
"D105",
"D106",
"D107",
"D202",
"D203", "D203",
"D213", "D213",
"E501",
"N806",
"B905",
"N805", "N805",
"B904", "PLR0913",
"N803", "PLR0904",
"N801", "PLW1514"
"N815",
"N802",
"S101",
"S104"
] ]
line-length = 120 line-length = 120
select = [ select = [
"A",
"ARG",
"B", "B",
"BLE",
"C", "C",
"COM",
"C4",
"C90",
"D", "D",
"DTZ",
"E", "E",
"EM",
"ERA",
"F", "F",
"FA",
"FBT",
"FLY",
"FURB",
"G",
"I", "I",
"ICN",
"INP",
"ISC",
"LOG",
"N", "N",
"PERF",
"PGH",
"PIE",
"PL",
"PT",
"PTH",
"PYI",
"Q",
"RET",
"R",
"RET", "RET",
"RSE",
"RUF",
"S", "S",
"SIM",
"SLF",
"T", "T",
"T20",
"TID",
"TRY",
"UP",
"W", "W",
"YTT"
] ]
target-version = "py310" target-version = "py311"
[tool.ruff.flake8-tidy-imports] [tool.ruff.flake8-tidy-imports]
ban-relative-imports = "all" ban-relative-imports = "all"
[tool.ruff.per-file-ignores] [tool.ruff.per-file-ignores]
"test/*" = ["B033", "N816", "N802"] "test/*" = ["D", "S101", "PLR2004"]
"setup.py" = ["D100"]
[tool.ruff.isort] [tool.ruff.isort]
known-third-party = ["pydantic", "migrations"] known-third-party = ["pydantic", "migrations"]
......
...@@ -25,9 +25,6 @@ sphinx~=7.2.6 ...@@ -25,9 +25,6 @@ sphinx~=7.2.6
sphinx-rtd-theme~=1.3.0 sphinx-rtd-theme~=1.3.0
requests~=2.31.0 requests~=2.31.0
docutils~=0.18.1 docutils~=0.18.1
isort~=5.12.0
black~=23.11.0
flake8~=6.1.0
mypy~=1.7.0 mypy~=1.7.0
ruff~=0.1.6 ruff~=0.1.6
types-setuptools~=68.2.0.1 types-setuptools~=68.2.0.1
......
...@@ -4,9 +4,9 @@ setup( ...@@ -4,9 +4,9 @@ setup(
name="goat-lso", name="goat-lso",
version="0.1", version="0.1",
author="GÉANT Orchestration & Automation Team", author="GÉANT Orchestration & Automation Team",
author_email="TBD", author_email="goat@geant.org",
description="Lightweight Service Orchestrator", description="Lightweight Service Orchestrator",
url="https://gitlab.geant.org/goat/gap/lso", url="https://gitlab.software.geant.org/goat/gap/lso",
packages=find_packages(), packages=find_packages(),
install_requires=[ install_requires=[
"jsonschema~=4.18.0", "jsonschema~=4.18.0",
...@@ -34,11 +34,10 @@ setup( ...@@ -34,11 +34,10 @@ setup(
license_files=("LICENSE.txt",), license_files=("LICENSE.txt",),
classifiers=[ classifiers=[
"Programming Language :: Python :: 3", "Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.11",
"License :: OSI Approved :: MIT License", "License :: OSI Approved :: MIT License",
"Operating System :: OS Independent", "Operating System :: OS Independent",
"Development Status :: 2 - Pre-Alpha", "Development Status :: 2 - Pre-Alpha",
], ],
python_requires=">=3.10", python_requires=">=3.11",
) )
import json import json
import os import os
import tempfile import tempfile
from collections.abc import Callable, Generator
from io import StringIO from io import StringIO
from typing import Any, Callable, Generator from typing import Any
import pytest import pytest
from faker import Faker from faker import Faker
...@@ -11,7 +12,7 @@ from fastapi.testclient import TestClient ...@@ -11,7 +12,7 @@ from fastapi.testclient import TestClient
import lso import lso
@pytest.fixture @pytest.fixture()
def mocked_ansible_runner_run() -> Callable: def mocked_ansible_runner_run() -> Callable:
class Runner: class Runner:
def __init__(self) -> None: def __init__(self) -> None:
...@@ -19,7 +20,7 @@ def mocked_ansible_runner_run() -> Callable: ...@@ -19,7 +20,7 @@ def mocked_ansible_runner_run() -> Callable:
self.rc = 0 self.rc = 0
self.stdout = StringIO("[{'step one': 'results'}, {'step two': 2}]") self.stdout = StringIO("[{'step one': 'results'}, {'step two': 2}]")
def run(*args: Any, **kwargs: Any) -> Runner: def run(*args: Any, **kwargs: Any) -> Runner: # noqa: ARG001
return Runner() return Runner()
return run return run
...@@ -44,11 +45,11 @@ def data_config_filename(configuration_data: dict[str, str]) -> Generator[str, A ...@@ -44,11 +45,11 @@ def data_config_filename(configuration_data: dict[str, str]) -> Generator[str, A
@pytest.fixture(scope="session") @pytest.fixture(scope="session")
def client(data_config_filename: str) -> Generator[TestClient, Any, None]: def client(data_config_filename: str) -> TestClient:
"""Return a client that can be used to test the server.""" """Return a client that can be used to test the server."""
os.environ["SETTINGS_FILENAME"] = data_config_filename os.environ["SETTINGS_FILENAME"] = data_config_filename
app = lso.create_app() app = lso.create_app()
yield TestClient(app) # wait here until calling context ends return TestClient(app) # wait here until calling context ends
@pytest.fixture(scope="session") @pytest.fixture(scope="session")
......
import time import time
from typing import Callable from collections.abc import Callable
from unittest.mock import patch from unittest.mock import patch
import jsonschema import jsonschema
...@@ -66,7 +66,10 @@ def subscription_object(faker: Faker) -> dict: ...@@ -66,7 +66,10 @@ def subscription_object(faker: Faker) -> dict:
"iptrunk_side_ae_members": ["ge-0/0/0", "ge-0/0/1"], "iptrunk_side_ae_members": ["ge-0/0/0", "ge-0/0/1"],
"subscription_instance_id": faker.uuid4(), "subscription_instance_id": faker.uuid4(),
"iptrunk_side_ae_geant_a_sid": "SID-11112", "iptrunk_side_ae_geant_a_sid": "SID-11112",
"iptrunk_side_ae_members_description": [faker.pystr(), faker.pystr()], "iptrunk_side_ae_members_description": [
faker.pystr(),
faker.pystr(),
],
}, },
{ {
"name": "IptrunkSideBlock", "name": "IptrunkSideBlock",
...@@ -110,7 +113,10 @@ def subscription_object(faker: Faker) -> dict: ...@@ -110,7 +113,10 @@ def subscription_object(faker: Faker) -> dict:
"iptrunk_side_ae_members": ["ge-0/0/0", "ge-0/0/1"], "iptrunk_side_ae_members": ["ge-0/0/0", "ge-0/0/1"],
"subscription_instance_id": faker.uuid4(), "subscription_instance_id": faker.uuid4(),
"iptrunk_side_ae_geant_a_sid": "SID-11112", "iptrunk_side_ae_geant_a_sid": "SID-11112",
"iptrunk_side_ae_members_description": [faker.pystr(), faker.pystr()], "iptrunk_side_ae_members_description": [
faker.pystr(),
faker.pystr(),
],
}, },
], ],
}, },
...@@ -160,7 +166,7 @@ def migration_object(faker: Faker) -> dict: ...@@ -160,7 +166,7 @@ def migration_object(faker: Faker) -> dict:
@responses.activate @responses.activate
def test_ip_trunk_provisioning( def test_ip_trunk_provisioning(
client: TestClient, subscription_object: dict, mocked_ansible_runner_run: Callable client: TestClient, subscription_object: dict, mocked_ansible_runner_run: Callable,
) -> None: ) -> None:
responses.post(url=TEST_CALLBACK_URL, status=200) responses.post(url=TEST_CALLBACK_URL, status=200)
...@@ -189,7 +195,7 @@ def test_ip_trunk_provisioning( ...@@ -189,7 +195,7 @@ def test_ip_trunk_provisioning(
@responses.activate @responses.activate
def test_ip_trunk_modification( def test_ip_trunk_modification(
client: TestClient, subscription_object: dict, mocked_ansible_runner_run: Callable client: TestClient, subscription_object: dict, mocked_ansible_runner_run: Callable,
) -> None: ) -> None:
responses.post(url=TEST_CALLBACK_URL, status=200) responses.post(url=TEST_CALLBACK_URL, status=200)
...@@ -244,7 +250,10 @@ def test_ip_trunk_deletion(client: TestClient, subscription_object: dict, mocked ...@@ -244,7 +250,10 @@ def test_ip_trunk_deletion(client: TestClient, subscription_object: dict, mocked
@responses.activate @responses.activate
def test_ip_trunk_migration( def test_ip_trunk_migration(
client: TestClient, subscription_object: dict, migration_object: dict, mocked_ansible_runner_run: Callable client: TestClient,
subscription_object: dict,
migration_object: dict,
mocked_ansible_runner_run: Callable,
) -> None: ) -> None:
responses.post(url=TEST_CALLBACK_URL, status=204) responses.post(url=TEST_CALLBACK_URL, status=204)
......
import time import time
from typing import Callable from collections.abc import Callable
from unittest.mock import patch from unittest.mock import patch
import jsonschema import jsonschema
...@@ -31,7 +31,10 @@ def test_router_provisioning(client: TestClient, faker: Faker, mocked_ansible_ru ...@@ -31,7 +31,10 @@ def test_router_provisioning(client: TestClient, faker: Faker, mocked_ansible_ru
"lo_iso_address": "1.2.3.4.5.6", "lo_iso_address": "1.2.3.4.5.6",
"snmp_location": "city,country[1.2,3.4]", "snmp_location": "city,country[1.2,3.4]",
"si_ipv4_network": faker.ipv4() + "/24", "si_ipv4_network": faker.ipv4() + "/24",
"ias_lt_network": {"v4": faker.ipv4() + "/24", "v6": faker.ipv6() + "/64"}, "ias_lt_network": {
"v4": faker.ipv4() + "/24",
"v6": faker.ipv6() + "/64",
},
"site_country_code": faker.country_code(), "site_country_code": faker.country_code(),
"site_city": faker.city(), "site_city": faker.city(),
"site_latitude": float(faker.latitude()), "site_latitude": float(faker.latitude()),
......
"""Set of tests that verify correct config is accepted and incorrect config is not.""" """Set of tests that verify correct config is accepted and incorrect config is not."""
import io import io
import json import json
import os import os
...@@ -20,7 +21,12 @@ def test_validate_testenv_config(data_config_filename: str) -> None: ...@@ -20,7 +21,12 @@ def test_validate_testenv_config(data_config_filename: str) -> None:
@pytest.mark.parametrize( @pytest.mark.parametrize(
"bad_config", [{"name": "bad version", "version": 123}, {"name": "missing version"}, {"version": "missing name"}] "bad_config",
[
{"name": "bad version", "version": 123},
{"name": "missing version"},
{"version": "missing name"},
],
) )
def test_bad_config(bad_config: dict) -> None: def test_bad_config(bad_config: dict) -> None:
with io.StringIO(json.dumps(bad_config)) as file: with io.StringIO(json.dumps(bad_config)) as file:
......
[tox] [tox]
envlist = py311 envlist = py311
[flake8]
ignore = W503
exclude = .git,.*_cache,.eggs,*.egg-info,__pycache__,venv,.tox,docs
enable-extensions = G
select = B,C,D,E,F,G,I,N,S,T,W,B902,B903,R
max-line-length = 120
ban-relative-imports = true
per-file-ignores =
# Allow first argument to be cls instead of self for pydantic validators
gso/*: B902
[testenv] [testenv]
passenv = XDG_CACHE_HOME,USE_COMPOSE passenv = XDG_CACHE_HOME,USE_COMPOSE
setenv = setenv =
...@@ -21,11 +10,9 @@ deps = ...@@ -21,11 +10,9 @@ deps =
-r requirements.txt -r requirements.txt
commands = commands =
isort -c . ruff --respect-gitignore --preview .
ruff . ruff format --respect-gitignore --preview --check .
black --check .
mypy . mypy .
flake8
coverage erase coverage erase
coverage run --source lso -m pytest coverage run --source lso -m pytest
coverage xml coverage xml
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment