Skip to content
Snippets Groups Projects
Commit 90afb9b2 authored by Mohammad Torkashvand's avatar Mohammad Torkashvand
Browse files

add ansible colection and roles to the Dockerfile

parent cd623471
No related branches found
No related tags found
1 merge request!57add ansible colection and roles to the Dockerfile
Pipeline #84623 passed
......@@ -13,4 +13,7 @@ docs/vale/styles/*
venv/
.venv/
.DS_Store # macOS
Thumbs.db # Windows
config.json
......@@ -12,7 +12,7 @@ run-tox-pipeline:
stage: tox
tags:
- docker-executor
image: python:3.10
image: python:3.11
# Change pip's cache directory to be inside the project directory since we can
# only cache local items.
......
FROM python:3.11
FROM python:3.11-alpine
ARG ARTIFACT_VERSION
WORKDIR /app
RUN apt update && apt install -y gcc libc-dev libffi-dev curl vim && \
RUN apk add --update --no-cache gcc libc-dev libffi-dev curl vim ansible bash openssh && \
addgroup -S appgroup && adduser -S appuser -G appgroup -h /app
# Create ansible.cfg file and set custom paths for collections and roles
RUN mkdir -p /app/gap/collections /app/gap/roles /etc/ansible && \
printf "[defaults]\ncollections_paths = /app/gap/collections\nroles_path = /app/gap/roles" > /etc/ansible/ansible.cfg
RUN pip install \
--pre \
--extra-index-url https://artifactory.software.geant.org/artifactory/api/pypi/geant-swd-pypi/simple \
--target /app \
goat-lso==${ARTIFACT_VERSION}
goat-lso==${ARTIFACT_VERSION} && \
ansible-galaxy collection install \
community.general \
juniper.device \
junipernetworks.junos \
geant.gap_ansible -p /app/gap/collections && \
ansible-galaxy role install Juniper.junos -p /app/gap/roles
RUN chown -R appuser:appgroup /app
USER appuser
EXPOSE 8000
ENTRYPOINT []
CMD ["python", "-m", "uvicorn", "lso.app:app", "--host", "0.0.0.0", "--port", "8000"]
{
"ansible_playbooks_root_dir": "/"
"ansible_playbooks_root_dir": "/app/gap/collections/ansible_collections/geant/gap_ansible/playbooks"
}
FROM python:alpine3.17
LABEL version="1.0"
LABEL maintainer="Geant LSO Team <@geant.org>"
RUN apk add --no-cache bash curl vim gcc libc-dev libffi-dev
# RUN pip install --pre --extra-index-url https://artifactory.software.geant.org/artifactory/api/pypi/geant-swd-pypi/simple goat-lso
WORKDIR /opt/lso
COPY . .
RUN pip install -e .
RUN pip install httpx sphinx sphinx_rtd_theme vale ansible
# Generate documentation
RUN ./build-docs.sh
# Generate sample configuration file, and remove an existing one if present
RUN rm -f config.json >/dev/null 2>&1
RUN ln -s config.json.example config.json
# ENTRYPOINT ["sleep", "inf"]
# ENTRYPOINT ["SETTINGS_FILENAME=./config.json", "python", "-m", "lso.app"]
ENTRYPOINT ["./docker/app-run.sh"]
#!/usr/bin/env bash
SETTINGS_FILENAME=./config.json python -m lso.app
#!/usr/bin/env bash
goat_name="goat-lso"
goat_image="goat/lso"
goat_tag="1.0"
if [[ $(docker image list | grep "${goat_image}" | grep -c "${goat_tag}") -eq 0 ]]; then
docker build -f docker/Dockerfile -t ${goat_image}:${goat_tag} .
fi
if [[ $(docker ps -a | grep -c "${goat_image}:${goat_tag}") -eq 0 ]]; then
docker run -d -p 44444:44444 --name ${goat_name} ${goat_image}:${goat_tag} >/dev/null 2>&1
fi
if [[ "$( docker container inspect -f '{{.State.Status}}' ${goat_name} )" != "running" ]]; then
docker start ${goat_name} >/dev/null 2>&1
fi
sleep 1
# Check endpoints
curl -f http://localhost:44444/docs >/dev/null 2>&1
if [[ $? -eq 0 ]]; then
echo "LSO is running. OpenAPI available at http://localhost:44444/docs"
else
echo "LSO is not running"
fi
#!/usr/bin/env bash
docker stop goat-lso >/dev/null 2>&1
docker rm goat-lso >/dev/null 2>&1
......@@ -10,7 +10,7 @@ import os
from typing import TextIO
import jsonschema
from pydantic import BaseModel, DirectoryPath
from pydantic import BaseModel
CONFIG_SCHEMA = {
"$schema": "http://json-schema.org/draft-07/schema#",
......@@ -25,7 +25,7 @@ DEFAULT_REQUEST_TIMEOUT = 10
class Config(BaseModel):
"""Simple Config class that only contains the path to the used Ansible playbooks."""
ansible_playbooks_root_dir: DirectoryPath
ansible_playbooks_root_dir: str
def load_from_file(file: TextIO) -> Config:
......
......@@ -18,8 +18,6 @@ from lso.config import DEFAULT_REQUEST_TIMEOUT
logger = logging.getLogger(__name__)
config_params = config.load()
# enum.StrEnum is only available in python 3.11
class PlaybookJobStatus(str, enum.Enum):
......@@ -50,6 +48,7 @@ class PlaybookLaunchResponse(BaseModel):
def get_playbook_path(playbook_name: str) -> str:
config_params = config.load()
return os.path.join(config_params.ansible_playbooks_root_dir, playbook_name)
......
import json
import os
import tempfile
from typing import Any, Generator
from io import StringIO
from typing import Any, Callable, Generator
import pytest
from faker import Faker
......@@ -10,28 +11,42 @@ from fastapi.testclient import TestClient
import lso
@pytest.fixture
def mocked_ansible_runner_run() -> Callable:
class Runner:
def __init__(self) -> None:
self.status = "success"
self.rc = 0
self.stdout = StringIO("[{'step one': 'results'}, {'step two': 2}]")
def run(*args: Any, **kwargs: Any) -> Runner:
return Runner()
return run
@pytest.fixture(scope="session")
def config_data() -> dict[str, str]:
def configuration_data() -> dict[str, str]:
"""Start the server with valid configuration data."""
return {"ansible_playbooks_root_dir": "/"}
return {"ansible_playbooks_root_dir": "/app/gap/collections/ansible_collections/geant/gap_ansible/playbooks"}
@pytest.fixture(scope="session")
def config_file(config_data: dict[str, str]) -> Generator[str, Any, None]:
def data_config_filename(configuration_data: dict[str, str]) -> Generator[str, Any, None]:
"""Fixture that will yield a filename that contains a valid configuration.
:return: Path to valid configuration file
"""
with tempfile.NamedTemporaryFile(mode="w") as file:
file.write(json.dumps(config_data))
file.write(json.dumps(configuration_data))
file.flush()
yield file.name
@pytest.fixture(scope="session")
def client(config_file: str) -> Generator[TestClient, Any, None]:
def client(data_config_filename: str) -> Generator[TestClient, Any, None]:
"""Return a client that can be used to test the server."""
os.environ["SETTINGS_FILENAME"] = config_file
os.environ["SETTINGS_FILENAME"] = data_config_filename
app = lso.create_app()
yield TestClient(app) # wait here until calling context ends
......
from io import StringIO
from typing import Any
TEST_CALLBACK_URL = "https://fqdn.abc.xyz/api/resume"
class Runner:
def __init__(self) -> None:
self.status = "success"
self.rc = 0
self.stdout = StringIO("[{'step one': 'results'}, {'step two': 2}]")
def test_ansible_runner_run(**kwargs: Any) -> Runner:
return Runner()
import time
from typing import Callable
from unittest.mock import patch
import jsonschema
......@@ -8,7 +9,8 @@ from faker import Faker
from starlette.testclient import TestClient
from lso.playbook import PlaybookLaunchResponse
from test.routes import TEST_CALLBACK_URL, test_ansible_runner_run
TEST_CALLBACK_URL = "https://fqdn.abc.xyz/api/resume"
@pytest.fixture(scope="session")
......@@ -157,7 +159,9 @@ def migration_object(faker: Faker) -> dict:
@responses.activate
def test_ip_trunk_provisioning(client: TestClient, subscription_object: dict) -> None:
def test_ip_trunk_provisioning(
client: TestClient, subscription_object: dict, mocked_ansible_runner_run: Callable
) -> None:
responses.post(url=TEST_CALLBACK_URL, status=200)
params = {
......@@ -170,7 +174,7 @@ def test_ip_trunk_provisioning(client: TestClient, subscription_object: dict) ->
"subscription": subscription_object,
}
with patch("lso.playbook.ansible_runner.run", new=test_ansible_runner_run) as _:
with patch("lso.playbook.ansible_runner.run", new=mocked_ansible_runner_run) as _:
rv = client.post("/api/ip_trunk/", json=params)
assert rv.status_code == 200
response = rv.json()
......@@ -184,7 +188,9 @@ def test_ip_trunk_provisioning(client: TestClient, subscription_object: dict) ->
@responses.activate
def test_ip_trunk_modification(client: TestClient, subscription_object: dict) -> None:
def test_ip_trunk_modification(
client: TestClient, subscription_object: dict, mocked_ansible_runner_run: Callable
) -> None:
responses.post(url=TEST_CALLBACK_URL, status=200)
params = {
......@@ -197,7 +203,7 @@ def test_ip_trunk_modification(client: TestClient, subscription_object: dict) ->
"old_subscription": subscription_object,
}
with patch("lso.playbook.ansible_runner.run", new=test_ansible_runner_run) as _:
with patch("lso.playbook.ansible_runner.run", new=mocked_ansible_runner_run) as _:
rv = client.put("/api/ip_trunk/", json=params)
assert rv.status_code == 200
response = rv.json()
......@@ -211,7 +217,7 @@ def test_ip_trunk_modification(client: TestClient, subscription_object: dict) ->
@responses.activate
def test_ip_trunk_deletion(client: TestClient, subscription_object: dict) -> None:
def test_ip_trunk_deletion(client: TestClient, subscription_object: dict, mocked_ansible_runner_run: Callable) -> None:
responses.post(url=TEST_CALLBACK_URL, status=204)
params = {
......@@ -223,7 +229,7 @@ def test_ip_trunk_deletion(client: TestClient, subscription_object: dict) -> Non
"subscription": subscription_object,
}
with patch("lso.playbook.ansible_runner.run", new=test_ansible_runner_run) as _:
with patch("lso.playbook.ansible_runner.run", new=mocked_ansible_runner_run) as _:
rv = client.request(url="/api/ip_trunk/", method=responses.DELETE, json=params)
assert rv.status_code == 200
response = rv.json()
......@@ -237,7 +243,9 @@ def test_ip_trunk_deletion(client: TestClient, subscription_object: dict) -> Non
@responses.activate
def test_ip_trunk_migration(client: TestClient, subscription_object: dict, migration_object: dict) -> None:
def test_ip_trunk_migration(
client: TestClient, subscription_object: dict, migration_object: dict, mocked_ansible_runner_run: Callable
) -> None:
responses.post(url=TEST_CALLBACK_URL, status=204)
params = {
......@@ -251,7 +259,7 @@ def test_ip_trunk_migration(client: TestClient, subscription_object: dict, migra
"new_side": migration_object,
}
with patch("lso.playbook.ansible_runner.run", new=test_ansible_runner_run) as _:
with patch("lso.playbook.ansible_runner.run", new=mocked_ansible_runner_run) as _:
rv = client.post(url="/api/ip_trunk/migrate", json=params)
assert rv.status_code == 200
response = rv.json()
......
import time
from typing import Callable
from unittest.mock import patch
import jsonschema
......@@ -7,11 +8,12 @@ from faker import Faker
from starlette.testclient import TestClient
from lso.playbook import PlaybookLaunchResponse
from test.routes import TEST_CALLBACK_URL, test_ansible_runner_run
TEST_CALLBACK_URL = "https://fqdn.abc.xyz/api/resume"
@responses.activate
def test_router_provisioning(client: TestClient, faker: Faker) -> None:
def test_router_provisioning(client: TestClient, faker: Faker, mocked_ansible_runner_run: Callable) -> None:
responses.put(url=TEST_CALLBACK_URL, status=200)
params = {
......@@ -40,7 +42,7 @@ def test_router_provisioning(client: TestClient, faker: Faker) -> None:
},
}
with patch("lso.playbook.ansible_runner.run", new=test_ansible_runner_run) as _:
with patch("lso.playbook.ansible_runner.run", new=mocked_ansible_runner_run) as _:
rv = client.post("/api/router/", json=params)
assert rv.status_code == 200
response = rv.json()
......
......@@ -9,12 +9,12 @@ import pytest
from lso import config
def test_validate_testenv_config(config_file: str) -> None:
def test_validate_testenv_config(data_config_filename: str) -> None:
"""Load a configuration from a file.
:param config_file: Configuration file pytest fixture
:param data_config_filename: Configuration file pytest fixture
"""
os.environ["SETTINGS_FILENAME"] = config_file
os.environ["SETTINGS_FILENAME"] = data_config_filename
params = config.load()
assert params
......
[tox]
envlist = py310
envlist = py311
[flake8]
ignore = W503
......@@ -15,19 +15,19 @@ per-file-ignores =
[testenv]
passenv = XDG_CACHE_HOME,USE_COMPOSE
setenv =
SETTINGS_FILENAME = config.json.example
SETTINGS_FILENAME = dummy.json
deps =
coverage
-r requirements.txt
commands =
coverage erase
coverage run --source lso -m pytest
coverage xml
coverage html
coverage report --fail-under 80
isort -c .
ruff .
black --check .
mypy .
flake8
coverage erase
coverage run --source lso -m pytest
coverage xml
coverage html
coverage report --fail-under 80
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment