Skip to content
Snippets Groups Projects
Select Git revision
  • 94d6671e299e0ac169fcd2c90282537b98442eba
  • develop default
  • master protected
  • feature/frontend-tests
  • 0.99
  • 0.98
  • 0.97
  • 0.96
  • 0.95
  • 0.94
  • 0.93
  • 0.92
  • 0.91
  • 0.90
  • 0.89
  • 0.88
  • 0.87
  • 0.86
  • 0.85
  • 0.84
  • 0.83
  • 0.82
  • 0.81
  • 0.80
24 results

setup.py

Blame
  • conftest.py 8.87 KiB
    import contextlib
    import ipaddress
    import json
    import os
    import socket
    import tempfile
    from pathlib import Path
    
    import orchestrator
    import pytest
    from alembic import command
    from alembic.config import Config
    from faker import Faker
    from faker.providers import BaseProvider
    from orchestrator import app_settings
    from orchestrator.db import Database, db
    from orchestrator.db.database import ENGINE_ARGUMENTS, SESSION_ARGUMENTS, BaseModel
    from sqlalchemy import create_engine
    from sqlalchemy.engine import make_url
    from sqlalchemy.orm import scoped_session, sessionmaker
    from starlette.testclient import TestClient
    
    from gso.main import init_gso_app
    
    
    class FakerProvider(BaseProvider):
        def ipv4_network(self):
            ipv4 = self.generator.ipv4()
            interface = ipaddress.IPv4Interface(ipv4 + "/24")
            network = interface.network.network_address
    
            return ipaddress.IPv4Network(str(network) + "/24")
    
        def ipv6_network(self):
            ipv6 = self.generator.ipv6()
            interface = ipaddress.IPv6Interface(ipv6 + "/64")
            network = interface.network.network_address
    
            return ipaddress.IPv6Network(str(network) + "/64")
    
    
    @pytest.fixture(scope="session")
    def faker() -> Faker:
        fake = Faker()
        fake.add_provider(FakerProvider)
        return fake
    
    
    @pytest.fixture(scope="session")
    def configuration_data() -> dict:
        with contextlib.closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s:
            s.bind(("", 0))
            s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
            yield {
                "GENERAL": {"public_hostname": "https://gap.geant.org"},
                "RESOURCE_MANAGEMENT": {"todo": "todo"},
                "IPAM": {
                    "INFOBLOX": {
                        "scheme": "https",
                        "wapi_version": "v2.12",
                        "host": "10.0.0.1",
                        "username": "robot-user",
                        "password": "robot-user-password",
                    },
                    "LO": {
                        "V4": {"containers": [], "networks": ["10.255.255.0/26"], "mask": 32},
                        "V6": {"containers": [], "networks": ["dead:beef::/80"], "mask": 128},
                        "domain_name": ".lo",
                        "dns_view": "default",
                    },
                    "TRUNK": {
                        "V4": {"containers": ["10.255.255.0/24", "10.255.254.0/24"], "networks": [], "mask": 31},
                        "V6": {"containers": ["dead:beef::/64", "dead:beee::/64"], "networks": [], "mask": 126},
                        "domain_name": ".trunk",
                        "dns_view": "default",
                    },
                    "GEANT_IP": {
                        "V4": {"containers": ["10.255.255.0/24", "10.255.254.0/24"], "networks": [], "mask": 31},
                        "V6": {"containers": ["dead:beef::/64", "dead:beee::/64"], "networks": [], "mask": 126},
                        "domain_name": ".geantip",
                        "dns_view": "default",
                    },
                    "SI": {
                        "V4": {"containers": ["10.255.253.128/25"], "networks": [], "mask": 31},
                        "V6": {"containers": [], "networks": [], "mask": 126},
                        "domain_name": ".geantip",
                        "dns_view": "default",
                    },
                    "LT_IAS": {
                        "V4": {"containers": ["10.255.255.0/24"], "networks": [], "mask": 31},
                        "V6": {"containers": ["dead:beef:cc::/48"], "networks": [], "mask": 126},
                        "domain_name": ".geantip",
                        "dns_view": "default",
                    },
                },
                "PROVISIONING_PROXY": {
                    "scheme": "https",
                    "api_base": "localhost:44444",
                    "auth": "Bearer <token>",
                    "api_version": 1123,
                },
            }
    
    
    @pytest.fixture(scope="session")
    def data_config_filename(configuration_data) -> str:
        file_name = os.path.join(tempfile.gettempdir(), os.urandom(24).hex())
        open(file_name, "x").close()
        with open(file_name, "wb") as f:
            f.write(json.dumps(configuration_data).encode("utf-8"))
            f.flush()
    
            os.environ["OSS_PARAMS_FILENAME"] = f.name
    
            yield f.name
    
    
    @pytest.fixture(scope="session")
    def db_uri():
        """Provide the database uri configuration to run the migration on."""
    
        return os.environ.get("DATABASE_URI_TEST", "postgresql://nwa:nwa@localhost/gso-test-db")
    
    
    def run_migrations(db_uri: str) -> None:
        """Configure the alembic migration and run the migration on the database.
    
        Args:
        ----
        db_uri: The database uri configuration to run the migration on.
    
        Returns:
        -------
        None
        """
    
        path = Path(__file__).resolve().parent
        app_settings.DATABASE_URI = db_uri
        alembic_cfg = Config(file_=path / "../gso/alembic.ini")
        alembic_cfg.set_main_option("sqlalchemy.url", db_uri)
    
        alembic_cfg.set_main_option("script_location", str(path / "../gso/migrations"))
        version_locations = alembic_cfg.get_main_option("version_locations")
        alembic_cfg.set_main_option(
            "version_locations", f"{version_locations} {os.path.dirname(orchestrator.__file__)}/migrations/versions/schema"
        )
    
        command.upgrade(alembic_cfg, "heads")
    
    
    @pytest.fixture(scope="session")
    def database(db_uri):
        """Create database and run migrations and cleanup after wards.
    
        Args:
        ----
        db_uri: The database uri configuration to run the migration on.
        """
    
        db.update(Database(db_uri))
        url = make_url(db_uri)
        db_to_create = url.database
        url = url.set(database="postgres")
    
        engine = create_engine(url)
        with engine.connect() as conn:
            conn.execute("COMMIT;")
            conn.execute(f"SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname='{db_to_create}';")  # noqa
            conn.execute(f'DROP DATABASE IF EXISTS "{db_to_create}";')  # noqa
            conn.execute("COMMIT;")
            conn.execute(f'CREATE DATABASE "{db_to_create}";')  # noqa
    
        run_migrations(db_uri)
        db.wrapped_database.engine = create_engine(db_uri, **ENGINE_ARGUMENTS)
    
        try:
            yield
        finally:
            db.wrapped_database.engine.dispose()
            with engine.connect() as conn:
                conn.execute("COMMIT;")
                # Terminate all connections to the database
                conn.execute(
                    f"SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname='{db_to_create}';"  # noqa
                )
                conn.execute(f'DROP DATABASE IF EXISTS "{db_to_create}";')  # noqa
    
    
    @pytest.fixture(autouse=True)
    def db_session(database):
        """Ensure that tests are executed within a transactional scope that automatically rolls back after completion.
    
        This fixture facilitates a pattern known as 'transactional tests'. At the start, it establishes a connection and
        begins an overarching transaction. Any database operations performed within the test function—whether they commit
        or not happen within the context of this master transaction.
    
        From the perspective of the test function, it seems as though changes are getting committed to the database,
        enabling the tests to query and assert the persistence of data. Yet, once the test completes, this fixture
        intervenes to roll back the master transaction. This ensures a clean slate after each test, preventing tests from
        polluting the database state for subsequent tests.
    
        Benefits:
        - Each test runs in isolation with a pristine database state.
        - Avoids the overhead of recreating the database schema or re-seeding data between tests.
    
        Args:
        ----
        database: A fixture reference that initializes the database.
        """
    
        with contextlib.closing(db.wrapped_database.engine.connect()) as test_connection:
            # Create a new session factory for this context.
            session_factory = sessionmaker(bind=test_connection, **SESSION_ARGUMENTS)
            scoped_session_instance = scoped_session(session_factory, scopefunc=db.wrapped_database._scopefunc)
    
            # Point the database session to this new scoped session.
            db.wrapped_database.session_factory = session_factory
            db.wrapped_database.scoped_session = scoped_session_instance
    
            # Set the query for the base model.
            BaseModel.set_query(scoped_session_instance.query_property())
            transaction = test_connection.begin()
            try:
                yield
            finally:
                transaction.rollback()
                scoped_session_instance.remove()
    
    
    @pytest.fixture(scope="session", autouse=True)
    def fastapi_app(database, db_uri):
        """Load the GSO FastAPI app for testing purposes."""
    
        app_settings.DATABASE_URI = db_uri
        return init_gso_app(settings=app_settings)
    
    
    @pytest.fixture(scope="session")
    def test_client(fastapi_app):
        return TestClient(fastapi_app)