Skip to content
Snippets Groups Projects
test_imports.py 14.23 KiB
import json
from pathlib import Path
from unittest.mock import patch

import pytest

from gso.cli.imports import (
    import_iptrunks,
    import_office_routers,
    import_opengear,
    import_routers,
    import_sites,
    import_super_pop_switches,
)
from gso.products import Router, Site
from gso.products.product_blocks.iptrunk import IptrunkType
from gso.products.product_blocks.router import RouterRole
from gso.products.product_blocks.site import SiteTier
from gso.utils.helpers import iso_from_ipv4
from gso.utils.shared_enums import Vendor
from gso.utils.types.interfaces import PhysicalPortCapacity


##############
#  FIXTURES  #
##############
@pytest.fixture()
def temp_file(tmp_path) -> Path:
    return tmp_path / "data.json"


@pytest.fixture()
def iptrunk_data(temp_file, router_subscription_factory, faker) -> (Path, dict):
    def _iptrunk_data(
        *,
        ipv4_network=None,
        ipv6_network=None,
        min_links=None,
        isis_metric=None,
        side_a_node=None,
        side_b_node=None,
        side_a_members=None,
        side_b_members=None,
        side_a_ae_name=None,
        side_b_ae_name=None,
    ):
        router_side_a = router_subscription_factory()
        router_side_b = router_subscription_factory()
        ipv4_network = ipv4_network or str(faker.ipv4_network(max_subnet=31))
        ipv6_network = ipv6_network or str(faker.ipv6_network(max_subnet=126))

        iptrunk_data = {
            "id": faker.geant_sid(),
            "config": {
                "common": {
                    "link_speed": PhysicalPortCapacity.HUNDRED_GIGABIT_PER_SECOND,
                    "minimum_links": min_links or 3,
                    "isis_metric": isis_metric or 500,
                    "type": IptrunkType.DARK_FIBER,
                },
                "nodeA": {
                    "name": side_a_node or Router.from_subscription(router_side_a).router.router_fqdn,
                    "ae_name": side_a_ae_name or faker.network_interface(),
                    "port_sid": faker.geant_sid(),
                    "members": side_a_members
                    or [
                        {
                            "interface_name": faker.network_interface(),
                            "interface_description": faker.sentence(),
                        }
                        for _ in range(5)
                    ],
                    "ipv4_address": ipv4_network,
                    "ipv6_address": ipv6_network,
                },
                "nodeB": {
                    "name": side_b_node or Router.from_subscription(router_side_b).router.router_fqdn,
                    "ae_name": side_b_ae_name or faker.network_interface(),
                    "port_sid": faker.geant_sid(),
                    "members": side_b_members
                    or [
                        {
                            "interface_name": faker.network_interface(),
                            "interface_description": faker.sentence(),
                        }
                        for _ in range(5)
                    ],
                    "ipv4_address": ipv4_network,
                    "ipv6_address": ipv6_network,
                },
            },
        }

        temp_file.write_text(json.dumps([iptrunk_data]))
        return {"path": str(temp_file), "data": iptrunk_data}

    return _iptrunk_data


@pytest.fixture()
def site_data(faker, temp_file):
    def _site_data(**kwargs):
        site_data = {
            "site_name": faker.site_name(),
            "site_city": faker.city(),
            "site_country": faker.country(),
            "site_country_code": faker.country_code(),
            "site_latitude": str(faker.latitude()),
            "site_longitude": str(faker.longitude()),
            "site_bgp_community_id": faker.pyint(),
            "site_internal_id": faker.pyint(),
            "site_tier": SiteTier.TIER1,
            "site_ts_address": faker.ipv4(),
        }
        site_data.update(**kwargs)

        temp_file.write_text(json.dumps([site_data]))
        return {"path": str(temp_file), "data": site_data}

    return _site_data


@pytest.fixture()
def router_data(temp_file, faker, site_subscription_factory):
    def _router_data(**kwargs):
        mock_ipv4 = faker.ipv4()
        router_data = {
            "router_site": Site.from_subscription(site_subscription_factory()).site.site_name,
            "hostname": str(faker.ipv4()),
            "ts_port": faker.port_number(is_user=True),
            "router_role": RouterRole.PE,
            "router_vendor": Vendor.JUNIPER,
            "router_lo_ipv4_address": mock_ipv4,
            "router_lo_ipv6_address": str(faker.ipv6()),
            "router_lo_iso_address": iso_from_ipv4(mock_ipv4),
        }
        router_data.update(**kwargs)

        temp_file.write_text(json.dumps([router_data]))
        return {"path": str(temp_file), "data": router_data}

    return _router_data


@pytest.fixture()
def super_pop_switch_data(temp_file, faker, site_subscription_factory):
    def _super_pop_switch_data(**kwargs):
        super_pop_switch_data = {
            "hostname": str(faker.ipv4()),
            "super_pop_switch_site": Site.from_subscription(site_subscription_factory()).site.site_name,
            "super_pop_switch_ts_port": faker.port_number(is_user=True),
            "super_pop_switch_mgmt_ipv4_address": str(faker.ipv4()),
        }
        super_pop_switch_data.update(**kwargs)

        temp_file.write_text(json.dumps([super_pop_switch_data]))
        return {"path": str(temp_file), "data": super_pop_switch_data}

    return _super_pop_switch_data


@pytest.fixture()
def office_router_data(temp_file, faker, site_subscription_factory):
    def _office_router_data(**kwargs):
        office_router_data = {
            "office_router_fqdn": faker.domain_name(levels=4),
            "office_router_site": Site.from_subscription(site_subscription_factory()).site.site_name,
            "office_router_ts_port": faker.port_number(is_user=True),
            "office_router_lo_ipv4_address": str(faker.ipv4()),
            "office_router_lo_ipv6_address": str(faker.ipv6()),
        }
        office_router_data.update(**kwargs)

        temp_file.write_text(json.dumps([office_router_data]))
        return {"path": str(temp_file), "data": office_router_data}

    return _office_router_data


@pytest.fixture()
def opengear_data(temp_file, faker, site_subscription_factory):
    def _opengear_data(**kwargs):
        opengear_data = {
            "opengear_site": Site.from_subscription(site_subscription_factory()).site.site_name,
            "opengear_hostname": faker.domain_name(levels=4),
            "opengear_wan_address": str(faker.ipv4()),
            "opengear_wan_netmask": str(faker.ipv4()),
            "opengear_wan_gateway": str(faker.ipv4()),
        }
        opengear_data.update(**kwargs)

        temp_file.write_text(json.dumps([opengear_data]))
        return {"path": str(temp_file), "data": opengear_data}

    return _opengear_data


###########
#  TESTS  #
###########


@patch("gso.cli.imports.time.sleep")
@patch("gso.cli.imports.start_process")
def test_import_iptrunk_success(mock_start_process, mock_sleep, iptrunk_data):
    import_iptrunks(iptrunk_data()["path"])
    assert mock_start_process.call_count == 1


@patch("gso.cli.imports.time.sleep")
@patch("gso.cli.imports.start_process")
def test_import_site_success(mock_start_process, mock_sleep, site_data):
    import_sites(site_data()["path"])
    assert mock_start_process.call_count == 1


@patch("gso.cli.imports.time.sleep")
@patch("gso.cli.imports.start_process")
def test_import_site_twice(mock_start_process, mock_sleep, site_data, site_subscription_factory, capfd):
    site_import_data = site_data()
    #  Create an initial site
    site_subscription_factory(
        site_bgp_community_id=site_import_data["data"]["site_bgp_community_id"],
        site_internal_id=site_import_data["data"]["site_internal_id"],
        site_ts_address=site_import_data["data"]["site_ts_address"],
        site_name=site_import_data["data"]["site_name"],
    )

    #  Second identical import should print ValidationError to stdout
    import_sites(site_import_data["path"])

    captured_output, _ = capfd.readouterr()

    assert "Validation error: 4 validation errors for SiteImportModel" in captured_output
    assert "Value error, site_bgp_community_id must be unique [type=value_error, input_value=" in captured_output
    assert "Value error, site_internal_id must be unique [type=value_error, input_value=" in captured_output
    assert "Value error, site_ts_address must be unique [type=value_error, input_value=" in captured_output
    assert "Value error, site_name must be unique [type=value_error, input_value=" in captured_output

    assert mock_start_process.call_count == 0


@patch("gso.cli.imports.time.sleep")
@patch("gso.cli.imports.start_process")
def test_import_site_with_invalid_data(mock_start_process, mock_sleep, site_data, capfd):
    # invalid data, missing site_latitude and invalid site_longitude
    incorrect_site_data = site_data(site_latitude=None, site_longitude="broken")

    import_sites(incorrect_site_data["path"])

    captured_output, _ = capfd.readouterr()
    assert "Validation error: 2 validation errors for SiteImportModel" in captured_output
    assert (
        """site_latitude
  Input should be a valid string [type=string_type, input_value=None, input_type=NoneType]"""
        in captured_output
    )
    assert (
        """site_longitude
  Value error, Invalid longitude coordinate. Valid examples: '40.7128', '-74.0060', '180', '-180', '0'. [type=value_e"""
        in captured_output
    )

    assert mock_start_process.call_count == 0


@patch("gso.cli.imports.time.sleep")
@patch("gso.cli.imports.start_process")
def test_import_router_success(mock_start_process, mock_sleep, site_subscription_factory, router_data):
    import_routers(router_data()["path"])
    assert mock_start_process.call_count == 1


@patch("gso.cli.imports.time.sleep")
@patch("gso.cli.imports.start_process")
def test_import_router_with_invalid_data(mock_start_process, mock_sleep, router_data, capfd):
    broken_data = router_data(hostname="", router_lo_ipv6_address="Not an IP address")
    import_routers(broken_data["path"])

    captured_output, _ = capfd.readouterr()
    #  The extra space at the end of the next line is required, and not dangling by accident.
    assert (
        """Validation error: 1 validation error for RouterImportModel
router_lo_ipv6_address
  Input is not a valid IPv6 address [type=ip_v6_address, input_value='Not an IP address', input_type=str]"""
        in captured_output
    )
    assert mock_start_process.call_count == 0


@patch("gso.cli.imports.time.sleep")
@patch("gso.cli.imports.start_process")
def test_import_iptrunk_successful(mock_start_process, mock_sleep, iptrunk_data):
    import_iptrunks(iptrunk_data()["path"])
    assert mock_start_process.call_count == 1


@patch("gso.cli.imports.time.sleep")
@patch("gso.cli.imports.start_process")
def test_import_iptrunk_invalid_router_id_side_a_and_b(mock_start_process, mock_sleep, iptrunk_data, capfd):
    broken_data = iptrunk_data(side_a_node="Doesn't exist", side_b_node="Also doesn't exist")
    import_iptrunks(broken_data["path"])

    captured_output, _ = capfd.readouterr()
    assert (
        """Validation error: 2 validation errors for IptrunkImportModel
side_a_node_id
  Value error, Router  not found [type=value_error, input_value='', input_type=str]
    For further information visit https://errors.pydantic.dev/2.7/v/value_error
side_b_node_id
  Value error, Router  not found [type=value_error, input_value='', input_type=str]"""
        in captured_output
    )
    assert mock_start_process.call_count == 0


@patch("gso.cli.imports.time.sleep")
@patch("gso.cli.imports.start_process")
def test_import_iptrunk_non_unique_members_side_a_and_b(mock_start_process, mock_sleep, iptrunk_data, faker, capfd):
    duplicate_interface = {"interface_name": faker.network_interface(), "interface_description": faker.sentence()}
    side_a_members = [duplicate_interface for _ in range(5)]
    side_b_members = [duplicate_interface for _ in range(5)]
    broken_data = iptrunk_data(side_a_members=side_a_members, side_b_members=side_b_members)
    import_iptrunks(broken_data["path"])

    captured_output, _ = capfd.readouterr()

    assert "Validation error: 2 validation errors for IptrunkImportModel" in captured_output
    assert (
        """side_a_ae_members
  List must be unique [type=unique_list, input_value=[{'interface_name':"""
    ) in captured_output
    assert (
        """side_b_ae_members
  List must be unique [type=unique_list, input_value=[{'interface_name':"""
    ) in captured_output

    assert mock_start_process.call_count == 0


@patch("gso.cli.imports.time.sleep")
@patch("gso.cli.imports.start_process")
def test_import_iptrunk_side_a_member_count_mismatch(mock_start_process, mock_sleep, iptrunk_data, faker, capfd):
    side_a_members = [
        {"interface_name": faker.network_interface(), "interface_description": faker.sentence()} for _ in range(5)
    ]
    side_b_members = [
        {"interface_name": faker.network_interface(), "interface_description": faker.sentence()} for _ in range(6)
    ]
    broken_data = iptrunk_data(side_a_members=side_a_members, side_b_members=side_b_members)
    import_iptrunks(broken_data["path"])

    captured_output, _ = capfd.readouterr()
    assert (
        """Validation error: 1 validation error for IptrunkImportModel
  Value error, Mismatch between Side A and B members [type=value_error, input_value={'partner': 'GEANT',"""
        in captured_output
    )
    assert mock_start_process.call_count == 0


@patch("gso.cli.imports.time.sleep")
@patch("gso.cli.imports.start_process")
def test_import_office_router_success(mock_start_process, mock_sleep, office_router_data):
    import_office_routers(office_router_data()["path"])
    assert mock_start_process.call_count == 1


@patch("gso.cli.imports.time.sleep")
@patch("gso.cli.imports.start_process")
def test_import_super_pop_switch_success(mock_start_process, mock_sleep, super_pop_switch_data):
    import_super_pop_switches(super_pop_switch_data()["path"])
    assert mock_start_process.call_count == 1


@patch("gso.cli.imports.start_process")
def test_import_opengear_success(mock_start_process, opengear_data):
    import_opengear(opengear_data()["path"])
    assert mock_start_process.call_count == 1