Skip to content
Snippets Groups Projects
Verified Commit e3fb6ceb authored by Karel van Klink's avatar Karel van Klink :smiley_cat:
Browse files

pass mypy linting, and make flake8 stricter

parent 95f4e54c
No related branches found
No related tags found
1 merge request!38Nat 189 ansible return json
Pipeline #83756 passed
......@@ -24,7 +24,6 @@ exclude = '''
[tool.mypy]
exclude = [
"venv",
"test/*",
"docs"
]
ignore_missing_imports = true
......
import json
import os
import tempfile
from typing import Any, Generator
import pytest
from fastapi.testclient import TestClient
......@@ -11,13 +12,13 @@ TEST_CONFIG = {"collection-name": "kvklink.echo", "test-role": "kvklink.echo.ech
@pytest.fixture
def config_data():
def config_data() -> dict[str, str]:
"""Start the server with valid configuration data."""
return {"ansible_playbooks_root_dir": "/"}
@pytest.fixture
def config_file(config_data):
def config_file(config_data: dict[str, str]) -> Generator[str, Any, None]:
"""Fixture that will yield a filename that contains a valid configuration.
:return: Path to valid configuration file
......@@ -29,7 +30,7 @@ def config_file(config_data):
@pytest.fixture
def client(config_file):
def client(config_file: str) -> Generator[TestClient, Any, None]:
"""Return a client that can be used to test the server."""
os.environ["SETTINGS_FILENAME"] = config_file
app = lso.create_app()
......
from io import StringIO
from typing import Any
TEST_CALLBACK_URL = "https://fqdn.abc.xyz/api/resume"
def test_ansible_runner_run(**kwargs):
class Runner:
def __init__(self):
self.status = "success"
self.rc = 0
self.stdout = StringIO("some initial text data")
class Runner:
def __init__(self) -> None:
self.status = "success"
self.rc = 0
self.stdout = StringIO("[{'step one': 'results'}, {'step two': 2}]")
def test_ansible_runner_run(**kwargs: Any) -> Runner:
return Runner()
......@@ -3,13 +3,14 @@ from unittest.mock import patch
import jsonschema
import responses
from starlette.testclient import TestClient
from lso.playbook import PlaybookLaunchResponse
from test.routes import TEST_CALLBACK_URL, test_ansible_runner_run
@responses.activate
def test_router_provisioning(client):
def test_router_provisioning(client: TestClient) -> None:
responses.put(url=TEST_CALLBACK_URL, status=204)
params = {
......@@ -43,7 +44,7 @@ def test_router_provisioning(client):
# wait two seconds for the run thread to finish
time.sleep(2)
jsonschema.validate(response, PlaybookLaunchResponse.schema())
jsonschema.validate(response, PlaybookLaunchResponse.model_json_schema())
responses.assert_call_count(TEST_CALLBACK_URL, 1)
assert response["status"] == "ok"
......@@ -3,6 +3,7 @@ from unittest.mock import patch
import jsonschema
import responses
from starlette.testclient import TestClient
from lso.playbook import PlaybookLaunchResponse
from test.routes import TEST_CALLBACK_URL, test_ansible_runner_run
......@@ -91,7 +92,7 @@ _SUBSCRIPTION_OBJECT = {
@responses.activate
def test_ip_trunk_provisioning(client):
def test_ip_trunk_provisioning(client: TestClient) -> None:
responses.put(url=TEST_CALLBACK_URL, status=204)
params = {
......@@ -109,14 +110,14 @@ def test_ip_trunk_provisioning(client):
# wait a second for the run thread to finish
time.sleep(1)
jsonschema.validate(response, PlaybookLaunchResponse.schema())
jsonschema.validate(response, PlaybookLaunchResponse.model_json_schema())
responses.assert_call_count(TEST_CALLBACK_URL, 1)
assert response["status"] == "ok"
@responses.activate
def test_ip_trunk_modification(client):
def test_ip_trunk_modification(client: TestClient) -> None:
responses.put(url=TEST_CALLBACK_URL, status=204)
params = {
......@@ -134,14 +135,14 @@ def test_ip_trunk_modification(client):
# wait a second for the run thread to finish
time.sleep(1)
jsonschema.validate(response, PlaybookLaunchResponse.schema())
jsonschema.validate(response, PlaybookLaunchResponse.model_json_schema())
responses.assert_call_count(TEST_CALLBACK_URL, 1)
assert response["status"] == "ok"
@responses.activate
def test_ip_trunk_deletion(client):
def test_ip_trunk_deletion(client: TestClient) -> None:
responses.put(url=TEST_CALLBACK_URL, status=204)
params = {"callback": TEST_CALLBACK_URL, "dry_run": True, "verb": "terminate", "subscription": _SUBSCRIPTION_OBJECT}
......@@ -153,7 +154,7 @@ def test_ip_trunk_deletion(client):
# wait a second for the run thread to finish
time.sleep(1)
jsonschema.validate(response, PlaybookLaunchResponse.schema())
jsonschema.validate(response, PlaybookLaunchResponse.model_json_schema())
responses.assert_call_count(TEST_CALLBACK_URL, 1)
assert response["status"] == "ok"
......@@ -9,7 +9,7 @@ import pytest
from lso import config
def test_validate_testenv_config(config_file):
def test_validate_testenv_config(config_file: str) -> None:
"""Load a configuration from a file.
:param config_file: Configuration file pytest fixture
......@@ -22,7 +22,7 @@ def test_validate_testenv_config(config_file):
@pytest.mark.parametrize(
"bad_config", [{"name": "bad version", "version": 123}, {"name": "missing version"}, {"version": "missing name"}]
)
def test_bad_config(bad_config):
def test_bad_config(bad_config: dict) -> None:
with io.StringIO(json.dumps(bad_config)) as file:
file.seek(0) # rewind file position to the beginning
with pytest.raises(jsonschema.ValidationError):
......
[flake8]
ignore = D100,D101,D102,D103,D104,D105,D106,D107,D202,E501,RST301,RST304,W503,E203,C417,T202,S101
; extend-ignore = E203
ignore = W503
exclude = .git,.*_cache,.eggs,*.egg-info,__pycache__,venv,.tox,docs
enable-extensions = G
select = B,C,D,E,F,G,I,N,S,T,W,B902,B903,R
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment