diff --git a/gso/__init__.py b/gso/__init__.py index cf1b39a29faa0d570fd822bbd783433ab45dbe27..94c8f5fa273a584cf542da257c0289952567857d 100644 --- a/gso/__init__.py +++ b/gso/__init__.py @@ -16,7 +16,6 @@ from gso.middlewares import ModifyProcessEndpointResponse def init_gso_app() -> OrchestratorCore: """Initialise the :term:`GSO` app.""" app = OrchestratorCore(base_settings=app_settings) - # app.register_graphql() # TODO: uncomment this line when the GUI V2 is ready app.include_router(api_router, prefix="/api") app.add_middleware(ModifyProcessEndpointResponse) return app diff --git a/gso/api/v1/imports.py b/gso/api/v1/imports.py deleted file mode 100644 index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000 diff --git a/gso/cli/imports.py b/gso/cli/imports.py index 014781cbe60b223032dd669f1d9f5f9ee9abe781..27dae014c18fe9a0f1b59f686f5721d4509fb679 100644 --- a/gso/cli/imports.py +++ b/gso/cli/imports.py @@ -6,14 +6,14 @@ import json import time from datetime import UTC, datetime from pathlib import Path -from typing import Any, TypeVar +from typing import Self, TypeVar import typer import yaml from orchestrator.db import db from orchestrator.services.processes import start_process from orchestrator.types import SubscriptionLifecycle -from pydantic import BaseModel, ValidationError, root_validator, validator +from pydantic import BaseModel, ValidationError, field_validator, model_validator from sqlalchemy.exc import SQLAlchemyError from gso.db.models import PartnerTable @@ -28,7 +28,7 @@ from gso.services.subscriptions import ( get_subscriptions, ) from gso.utils.helpers import BaseSiteValidatorModel, LAGMember -from gso.utils.shared_enums import PortNumber, Vendor +from gso.utils.shared_enums import IPv4AddressType, IPv6AddressType, PortNumber, Vendor app: typer.Typer = typer.Typer() @@ -58,8 +58,8 @@ class RouterImportModel(BaseModel): ts_port: int router_vendor: Vendor router_role: RouterRole - router_lo_ipv4_address: ipaddress.IPv4Address - router_lo_ipv6_address: ipaddress.IPv6Address + router_lo_ipv4_address: IPv4AddressType + router_lo_ipv6_address: IPv6AddressType router_lo_iso_address: str @@ -112,7 +112,7 @@ class IptrunkImportModel(BaseModel): str(router["subscription_id"]) for router in get_active_router_subscriptions(includes=["subscription_id"]) } - @validator("partner") + @field_validator("partner") def check_if_partner_exists(cls, value: str) -> str: """Validate that the partner exists.""" try: @@ -123,7 +123,7 @@ class IptrunkImportModel(BaseModel): return value - @validator("side_a_node_id", "side_b_node_id") + @field_validator("side_a_node_id", "side_b_node_id") def check_if_router_side_is_available(cls, value: str) -> str: """Both sides of the trunk must exist in :term:`GSO`.""" if value not in cls._get_active_routers(): @@ -132,7 +132,7 @@ class IptrunkImportModel(BaseModel): return value - @validator("side_a_ae_members", "side_b_ae_members") + @field_validator("side_a_ae_members", "side_b_ae_members") def check_side_uniqueness(cls, value: list[str]) -> list[str]: """:term:`LAG` members must be unique.""" if len(value) != len(set(value)): @@ -141,25 +141,21 @@ class IptrunkImportModel(BaseModel): return value - @root_validator - def check_members(cls, values: dict[str, Any]) -> dict[str, Any]: + @model_validator(mode="after") + def check_members(self) -> Self: """Amount of :term:`LAG` members has to match on side A and B, and meet the minimum requirement.""" - min_links = values["iptrunk_minimum_links"] - side_a_members = values.get("side_a_ae_members", []) - side_b_members = values.get("side_b_ae_members", []) + len_a = len(self.side_a_ae_members) + len_b = len(self.side_b_ae_members) - len_a = len(side_a_members) - len_b = len(side_b_members) - - if len_a < min_links: - msg = f"Side A members should be at least {min_links} (iptrunk_minimum_links)" + if len_a < self.iptrunk_minimum_links: + msg = f"Side A members should be at least {self.iptrunk_minimum_links} (iptrunk_minimum_links)" raise ValueError(msg) if len_a != len_b: msg = "Mismatch between Side A and B members" raise ValueError(msg) - return values + return self T = TypeVar( diff --git a/gso/migrations/versions/2024-04-20_1ec810b289c0_add_orchestrator_2_1_2_migrations.py b/gso/migrations/versions/2024-04-20_1ec810b289c0_add_orchestrator_2_1_2_migrations.py deleted file mode 100644 index 89fcbfcd056850a331ca92e379bf392a51a0cdc4..0000000000000000000000000000000000000000 --- a/gso/migrations/versions/2024-04-20_1ec810b289c0_add_orchestrator_2_1_2_migrations.py +++ /dev/null @@ -1,61 +0,0 @@ -"""remove subscription cancellation workflow. - -Revision ID: 1ec810b289c0 -Revises: -Create Date: 2024-04-02 10:21:08.539591 - -""" -from alembic import op -from orchestrator.migrations.helpers import create_workflow, delete_workflow - -# revision identifiers, used by Alembic. -revision = '1ec810b289c0' -down_revision = '393acfa175c0' -branch_labels = None -# TODO: check it carefuly -depends_on = '048219045729' # in this revision, SURF has added a new columns to the workflow table like delted_at, so we need to add a dependency on the revision that added the columns to the workflow table. - -new_workflows = [ - { - "name": "import_site", - "target": "SYSTEM", - "description": "Import a site without provisioning it.", - "product_type": "Site" - }, - { - "name": "import_router", - "target": "SYSTEM", - "description": "Import a router without provisioning it.", - "product_type": "Router" - }, - { - "name": "import_iptrunk", - "target": "SYSTEM", - "description": "Import an IP trunk without provisioning it.", - "product_type": "Iptrunk" - }, - { - "name": "import_super_pop_switch", - "target": "SYSTEM", - "description": "Import a Super PoP switch without provisioning it.", - "product_type": "SuperPopSwitch" - }, - { - "name": "import_office_router", - "target": "SYSTEM", - "description": "Import an office router without provisioning it.", - "product_type": "OfficeRouter" - }, -] - - -def upgrade() -> None: - conn = op.get_bind() - for workflow in new_workflows: - create_workflow(conn, workflow) - - -def downgrade() -> None: - conn = op.get_bind() - for workflow in new_workflows: - delete_workflow(conn, workflow["name"]) diff --git a/gso/migrations/versions/2024-04-30_1ec810b289c0_add_orchestrator_2_2_1_V2_migrations.py b/gso/migrations/versions/2024-04-30_1ec810b289c0_add_orchestrator_2_2_1_V2_migrations.py new file mode 100644 index 0000000000000000000000000000000000000000..a2acc5c1d08cc53b3f7cabebf927a2e7959ec085 --- /dev/null +++ b/gso/migrations/versions/2024-04-30_1ec810b289c0_add_orchestrator_2_2_1_V2_migrations.py @@ -0,0 +1,24 @@ +"""remove subscription cancellation workflow. + +Revision ID: 1ec810b289c0 +Revises: +Create Date: 2024-04-02 10:21:08.539591 + +""" +from alembic import op +from orchestrator.migrations.helpers import create_workflow, delete_workflow + +# revision identifiers, used by Alembic. +revision = '1ec810b289c0' +down_revision = '32cad119b7c4' +branch_labels = None +# TODO: check it carefuly +depends_on = '048219045729' # in this revision, SURF has added a new columns to the workflow table like delted_at, so we need to add a dependency on the revision that added the columns to the workflow table. + + +def upgrade() -> None: + pass + + +def downgrade() -> None: + pass diff --git a/gso/products/product_blocks/iptrunk.py b/gso/products/product_blocks/iptrunk.py index 2d12b9c55924844338ce68d2af0a4a975e7b7c89..dd65242b30b9e6277699921df1403a8230d6de17 100644 --- a/gso/products/product_blocks/iptrunk.py +++ b/gso/products/product_blocks/iptrunk.py @@ -8,6 +8,7 @@ from orchestrator.domain.base import ProductBlockModel, T from orchestrator.types import SubscriptionLifecycle, strEnum from pydantic import AfterValidator from pydantic_forms.validators import validate_unique_list +from typing_extensions import Doc from gso.products.product_blocks.router import ( RouterBlock, @@ -35,8 +36,15 @@ class IptrunkType(strEnum): LEASED = "Leased" -LAGMemberList = Annotated[list[T], AfterValidator(validate_unique_list), Len(min_length=0)] -IptrunkSides = Annotated[list[T], AfterValidator(validate_unique_list), Len(min_length=2, max_length=2)] +LAGMemberList = Annotated[ + list[T], AfterValidator(validate_unique_list), Len(min_length=0), Doc("A list of :term:`LAG` member interfaces.") +] +IptrunkSides = Annotated[ + list[T], + AfterValidator(validate_unique_list), + Len(min_length=2, max_length=2), + Doc("A list of IP trunk interfaces that make up one side of a link."), +] class IptrunkInterfaceBlockInactive( diff --git a/gso/products/product_blocks/pop_vlan.py b/gso/products/product_blocks/pop_vlan.py index 9942a97991ed4ea0e72f3ea2ad5f95b5fc580e83..eb54241e4e727bb6c23ea2aa024fbc556b1e7a2e 100644 --- a/gso/products/product_blocks/pop_vlan.py +++ b/gso/products/product_blocks/pop_vlan.py @@ -8,6 +8,7 @@ from orchestrator.types import SubscriptionLifecycle from pydantic import AfterValidator from pydantic_forms.types import strEnum from pydantic_forms.validators import validate_unique_list +from typing_extensions import Doc from gso.products.product_blocks.lan_switch_interconnect import ( LanSwitchInterconnectBlock, @@ -25,7 +26,7 @@ class LayerPreference(strEnum): L3 = "L3" -PortList = Annotated[list[T], AfterValidator(validate_unique_list)] +PortList = Annotated[list[T], AfterValidator(validate_unique_list), Doc("A list of unique ports.")] class PopVlanPortBlockInactive( diff --git a/gso/products/product_blocks/site.py b/gso/products/product_blocks/site.py index fc34c4d2b33fad86fe7a3068d0307aacc7ce09c1..c6e932c4974c043953593a1b73fb499855bf540f 100644 --- a/gso/products/product_blocks/site.py +++ b/gso/products/product_blocks/site.py @@ -3,10 +3,10 @@ import re from typing import Annotated -from annotated_types import doc from orchestrator.domain.base import ProductBlockModel from orchestrator.types import SubscriptionLifecycle, strEnum from pydantic import AfterValidator, Field +from typing_extensions import Doc class SiteTier(strEnum): @@ -48,7 +48,7 @@ LatitudeCoordinate = Annotated[ le=90, ), AfterValidator(validate_latitude), - doc( + Doc( "A latitude coordinate, modeled as a string. " "The coordinate must match the format conforming to the latitude range of -90 to +90 degrees. " "It can be a floating-point number or an integer. Valid examples: 40.7128, -74.0060, 90, -90, 0." @@ -62,11 +62,11 @@ LongitudeCoordinate = Annotated[ le=180, ), AfterValidator(validate_longitude), - doc( - "A longitude coordinate, modeled as a string. " - "The coordinate must match the format conforming to the longitude " - "range of -180 to +180 degrees. It can be a floating-point number or an integer. " - "Valid examples: 40.7128, -74.0060, 180, -180, 0." + Doc( + "A longitude coordinate, modeled as a string. " + "The coordinate must match the format conforming to the longitude " + "range of -180 to +180 degrees. It can be a floating-point number or an integer. " + "Valid examples: 40.7128, -74.0060, 180, -180, 0." ), ] diff --git a/gso/services/infoblox.py b/gso/services/infoblox.py index 140b4c851b29b649a05243e9f12c44e729debc03..514c55cbc571d7127429f53fa9eedbc12e4cf9af 100644 --- a/gso/services/infoblox.py +++ b/gso/services/infoblox.py @@ -273,7 +273,7 @@ def find_host_by_ip(ip_addr: IPv4AddressType | ipaddress.IPv6Address) -> objects """Find a host record in Infoblox by its associated IP address. :param ip_addr: The IP address of a host that is searched for. - :type ip_addr: FancyIPV4Address | ipaddress.IPv6Address + :type ip_addr: IPV4AddressType | ipaddress.IPv6Address """ conn, _ = _setup_connection() if ip_addr.version == 4: # noqa: PLR2004, the 4 in IPv4 is well-known and not a "magic value." @@ -322,7 +322,7 @@ def delete_host_by_ip(ip_addr: IPv4AddressType | ipaddress.IPv6Address) -> None: :class:`DeletionError` if no record can be found in Infoblox. :param ip_addr: The IP address of the host record that should get deleted. - :type ip_addr: FancyIPV4Address | ipaddress.IPv6Address + :type ip_addr: IPV4AddressType | ipaddress.IPv6Address """ host = find_host_by_ip(ip_addr) if host: diff --git a/gso/settings.py b/gso/settings.py index f550c0cc90a3bc749dd581af31500cab30de8729..87f42dbc2d63ba882a66c212eecbb862a729b851 100644 --- a/gso/settings.py +++ b/gso/settings.py @@ -13,6 +13,7 @@ from typing import Annotated from pydantic import Field from pydantic_settings import BaseSettings +from typing_extensions import Doc logger = logging.getLogger(__name__) @@ -46,8 +47,8 @@ class InfoBloxParams(BaseSettings): password: str -V4Netmask = Annotated[int, Field(ge=0, le=32)] -V6Netmask = Annotated[int, Field(ge=0, le=128)] +V4Netmask = Annotated[int, Field(ge=0, le=32), Doc("A valid netmask for an IPv4 network or address.")] +V6Netmask = Annotated[int, Field(ge=0, le=128), Doc("A valid netmask for an IPv6 network or address.")] class V4NetworkParams(BaseSettings): diff --git a/gso/utils/shared_enums.py b/gso/utils/shared_enums.py index 07dbe34641ce5abac50105dbb00387f4e12f08e7..1054876fb8823a002c278455e0fa450c2da31ae6 100644 --- a/gso/utils/shared_enums.py +++ b/gso/utils/shared_enums.py @@ -3,9 +3,9 @@ import ipaddress from typing import Annotated -from annotated_types import doc from pydantic import Field, PlainSerializer from pydantic_forms.types import strEnum +from typing_extensions import Doc class Vendor(strEnum): @@ -21,10 +21,10 @@ PortNumber = Annotated[ gt=0, le=49151, ), - doc( - "Constrained integer for valid port numbers. The range from 49152 to 65535 is marked as ephemeral, " - "and can therefore not be selected for permanent allocation." -), + Doc( + "Constrained integer for valid port numbers. The range from 49152 to 65535 is marked as ephemeral, " + "and can therefore not be selected for permanent allocation." + ), ] diff --git a/requirements.txt b/requirements.txt index e9164c69a1c47d6ccd52d42bcd97b8a22b8bf994..25b297607030fdf534b44612cf6417a836e18e13 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -orchestrator-core==2.1.2 +orchestrator-core==2.2.1 requests==2.31.0 infoblox-client~=0.6.0 pycountry==23.12.11 diff --git a/setup.py b/setup.py index f60d9aaf1a405d3e3ae13839a27258770a582ba6..08fa01b80709230ebc0707717025a2bdbcfaec9e 100644 --- a/setup.py +++ b/setup.py @@ -9,13 +9,13 @@ setup( url="https://gitlab.software.geant.org/goat/gap/geant-service-orchestrator", packages=find_packages(), install_requires=[ - "orchestrator-core==1.3.4", + "orchestrator-core==2.2.1", "requests==2.31.0", "infoblox-client~=0.6.0", - "pycountry==22.3.5", - "pynetbox==7.2.0", - "celery-redbeat==2.1.1", - "celery==5.3.4", + "pycountry==23.12.11", + "pynetbox==7.3.3", + "celery-redbeat==2.2.0", + "celery==5.3.6", ], include_package_data=True, ) diff --git a/test/cli/test_imports.py b/test/cli/test_imports.py index 73c0d031c9ebc2a09f4f341e7264a61c04a9cd59..637baa34492b80e0d0a605f5492ae81dd7d4630c 100644 --- a/test/cli/test_imports.py +++ b/test/cli/test_imports.py @@ -206,19 +206,14 @@ def test_import_site_twice(mock_start_process, site_data, site_subscription_fact # Second identical import should print ValidationError to stdout import_sites(site_import_data["path"]) - out, _ = capfd.readouterr() - assert ( - """Validation error: 4 validation errors for SiteImportModel -site_bgp_community_id - site_bgp_community_id must be unique (type=value_error) -site_internal_id - site_internal_id must be unique (type=value_error) -site_ts_address - site_ts_address must be unique (type=value_error) -site_name - site_name must be unique (type=value_error)""" - in out - ) + captured_output, _ = capfd.readouterr() + + assert "Validation error: 4 validation errors for SiteImportModel" in captured_output + assert "Value error, site_bgp_community_id must be unique [type=value_error, input_value=" in captured_output + assert "Value error, site_internal_id must be unique [type=value_error, input_value=" in captured_output + assert "Value error, site_ts_address must be unique [type=value_error, input_value=" in captured_output + assert "Value error, site_name must be unique [type=value_error, input_value=" in captured_output + assert mock_start_process.call_count == 0 @@ -229,15 +224,19 @@ def test_import_site_with_invalid_data(mock_start_process, site_data, capfd): import_sites(incorrect_site_data["path"]) - out, _ = capfd.readouterr() + captured_output, _ = capfd.readouterr() + assert "Validation error: 2 validation errors for SiteImportModel" in captured_output + assert ( + """site_latitude + Input should be a valid number [type=float_type, input_value=None, input_type=NoneType]""" + in captured_output + ) assert ( - """Validation error: 2 validation errors for SiteImportModel -site_latitude - none is not an allowed value (type=type_error.none.not_allowed) -site_longitude - value is not a valid float (type=type_error.float)""" - in out + """site_longitude + Input should be a valid number, unable to parse string as a number [type=float_parsing, input_value='broken',""" + in captured_output ) + assert mock_start_process.call_count == 0 @@ -252,13 +251,13 @@ def test_import_router_with_invalid_data(mock_start_process, router_data, capfd) broken_data = router_data(hostname="", router_lo_ipv6_address="Not an IP address") import_routers(broken_data["path"]) - out, _ = capfd.readouterr() + captured_output, _ = capfd.readouterr() # The extra space at the end of the next line is required, and not dangling by accident. - assert "Validation error: 1 validation error for RouterImportModel" in out assert ( - """router_lo_ipv6_address - value is not a valid IPv6 address (type=value_error.ipv6address)""" - in out + """Validation error: 1 validation error for RouterImportModel +router_lo_ipv6_address + Input is not a valid IPv6 address [type=ip_v6_address, input_value='Not an IP address', input_type=str]""" + in captured_output ) assert mock_start_process.call_count == 0 @@ -274,14 +273,15 @@ def test_import_iptrunk_invalid_router_id_side_a_and_b(mock_start_process, iptru broken_data = iptrunk_data(side_a_node="Doesn't exist", side_b_node="Also doesn't exist") import_iptrunks(broken_data["path"]) - out, _ = capfd.readouterr() + captured_output, _ = capfd.readouterr() assert ( """Validation error: 2 validation errors for IptrunkImportModel side_a_node_id - Router not found (type=value_error) + Value error, Router not found [type=value_error, input_value='', input_type=str] + For further information visit https://errors.pydantic.dev/2.5/v/value_error side_b_node_id - Router not found (type=value_error)""" - in out + Value error, Router not found [type=value_error, input_value='', input_type=str]""" + in captured_output ) assert mock_start_process.call_count == 0 @@ -294,14 +294,20 @@ def test_import_iptrunk_non_unique_members_side_a_and_b(mock_start_process, iptr broken_data = iptrunk_data(side_a_members=side_a_members, side_b_members=side_b_members) import_iptrunks(broken_data["path"]) - out, _ = capfd.readouterr() + captured_output, _ = capfd.readouterr() + assert SubscriptionTable.query.count() == 4 + assert len(get_active_iptrunk_subscriptions()) == 0 assert ( - """Validation error: 3 validation errors for IptrunkImportModel + """Validation error: 2 validation errors for IptrunkImportModel side_a_ae_members - Items must be unique (type=value_error) + Value error, Items must be unique [type=value_error, input_value=[{'interface_name':""" + in captured_output + ) + assert ( + """ side_b_ae_members - Items must be unique (type=value_error)""" - in out + Value error, Items must be unique [type=value_error, input_value=[{'interface_name':""" + in captured_output ) assert mock_start_process.call_count == 0 @@ -317,12 +323,11 @@ def test_import_iptrunk_side_a_member_count_mismatch(mock_start_process, iptrunk broken_data = iptrunk_data(side_a_members=side_a_members, side_b_members=side_b_members) import_iptrunks(broken_data["path"]) - out, _ = capfd.readouterr() + captured_output, _ = capfd.readouterr() assert ( """Validation error: 1 validation error for IptrunkImportModel -__root__ - Mismatch between Side A and B members (type=value_error)""" - in out + Value error, Mismatch between Side A and B members [type=value_error, input_value={'partner': 'GEANT',""" + in captured_output ) assert mock_start_process.call_count == 0