Skip to content
Snippets Groups Projects
Commit 38e8ada3 authored by geant-release-service's avatar geant-release-service
Browse files

Finished release 2.0.

parents 19fb9f05 c069564e
No related branches found
No related tags found
No related merge requests found
Pipeline #86950 passed
Showing
with 596 additions and 468 deletions
``gso.workflows.tasks`` ``gso.workflows.super_pop_switch``
======================= ==================================
.. automodule:: gso.workflows.tasks .. automodule:: gso.workflows.super_pop_switch
:members: :members:
:show-inheritance: :show-inheritance:
...@@ -12,8 +12,5 @@ Submodules ...@@ -12,8 +12,5 @@ Submodules
:maxdepth: 2 :maxdepth: 2
:titlesonly: :titlesonly:
create_imported_super_pop_switch
import_super_pop_switch import_super_pop_switch
import_office_router
import_iptrunk
import_router
import_site
``gso.workflows.tasks.import_office_router``
============================================
.. automodule:: gso.workflows.tasks.import_office_router
:members:
:show-inheritance:
``gso.workflows.tasks.import_site``
===================================
.. automodule:: gso.workflows.tasks.import_site
:members:
:show-inheritance:
...@@ -4,15 +4,15 @@ Quickstart ...@@ -4,15 +4,15 @@ Quickstart
Development environment and dependencies Development environment and dependencies
---------------------------------------- ----------------------------------------
- Install python 3.11 if you do not have it already: - Install python 3.12 if you do not have it already:
- ``add-apt-repository ppa:deadsnakes/ppa`` - ``add-apt-repository ppa:deadsnakes/ppa``
- ``apt install python3.11 python3.11-distutils`` - ``apt install python3.12 python3.12-distutils``
- Follow Steps 1 and 2 from here to install dependencies and setup DB: - Follow Steps 1 and 2 from here to install dependencies and setup DB:
`<https://workfloworchestrator.org/orchestrator-core/workshops/beginner/debian/>`_ `<https://workfloworchestrator.org/orchestrator-core/workshops/beginner/debian/>`_
- To install the orchestrator GUI, you can follow the steps 5 and 6 from the previous link. - To install the orchestrator GUI, you can follow the steps 5 and 6 from the previous link.
- Create a virtual environment: - Create a virtual environment:
- ``source /usr/share/virtualenvwrapper/virtualenvwrapper.sh`` - ``source /usr/share/virtualenvwrapper/virtualenvwrapper.sh``
- ``mkvirtualenv --python python3.11 gso`` - ``mkvirtualenv --python python3.12 gso``
- To use the virtual environment: - To use the virtual environment:
- ``source /usr/share/virtualenvwrapper/virtualenvwrapper.sh`` - ``source /usr/share/virtualenvwrapper/virtualenvwrapper.sh``
- ``workon gso`` - ``workon gso``
...@@ -25,7 +25,7 @@ Do all this inside the virtual environment. ...@@ -25,7 +25,7 @@ Do all this inside the virtual environment.
- Clone this repository - Clone this repository
- ``pip install -r requirements.txt`` - ``pip install -r requirements.txt``
- If you get an error because you pip version is too old, run this: - If you get an error because you pip version is too old, run this:
``curl -sS https://bootstrap.pypa.io/get-pip.py | python3.11`` ``curl -sS https://bootstrap.pypa.io/get-pip.py | python3.12``
- ``pip install -e .`` - ``pip install -e .``
- Create an ``oss-params.json`` based on the ``oss-params-example.json`` file inside ``/gso``. - Create an ``oss-params.json`` based on the ``oss-params-example.json`` file inside ``/gso``.
- Export the oss-params file: ``export OSS_PARAMS_FILENAME="/path/to/oss-params.json"`` - Export the oss-params file: ``export OSS_PARAMS_FILENAME="/path/to/oss-params.json"``
......
...@@ -26,3 +26,7 @@ TWAMP ...@@ -26,3 +26,7 @@ TWAMP
Pydantic Pydantic
UUID UUID
SNMP SNMP
V?LAN
OPA
OIDC
HTTPBearer
...@@ -5,19 +5,22 @@ from gso import monkeypatches # noqa: F401, isort:skip ...@@ -5,19 +5,22 @@ from gso import monkeypatches # noqa: F401, isort:skip
import typer import typer
from orchestrator import OrchestratorCore, app_settings from orchestrator import OrchestratorCore, app_settings
from orchestrator.cli.main import app as cli_app from orchestrator.cli.main import app as cli_app
from orchestrator.graphql import SCALAR_OVERRIDES
# noinspection PyUnresolvedReferences # noinspection PyUnresolvedReferences
import gso.products import gso.products
import gso.workflows # noqa: F401 import gso.workflows # noqa: F401
from gso.api import router as api_router from gso.api import router as api_router
from gso.middlewares import ModifyProcessEndpointResponse from gso.graphql_api.types import GSO_SCALAR_OVERRIDES
SCALAR_OVERRIDES.update(GSO_SCALAR_OVERRIDES)
def init_gso_app() -> OrchestratorCore: def init_gso_app() -> OrchestratorCore:
"""Initialise the :term:`GSO` app.""" """Initialise the :term:`GSO` app."""
app = OrchestratorCore(base_settings=app_settings) app = OrchestratorCore(base_settings=app_settings)
app.register_graphql()
app.include_router(api_router, prefix="/api") app.include_router(api_router, prefix="/api")
app.add_middleware(ModifyProcessEndpointResponse)
return app return app
......
...@@ -2,14 +2,12 @@ ...@@ -2,14 +2,12 @@
from fastapi import APIRouter from fastapi import APIRouter
from gso.api.v1.imports import router as imports_router
from gso.api.v1.network import router as network_router from gso.api.v1.network import router as network_router
from gso.api.v1.processes import router as processes_router from gso.api.v1.processes import router as processes_router
from gso.api.v1.subscriptions import router as subscriptions_router from gso.api.v1.subscriptions import router as subscriptions_router
router = APIRouter() router = APIRouter()
router.include_router(imports_router)
router.include_router(subscriptions_router) router.include_router(subscriptions_router)
router.include_router(processes_router) router.include_router(processes_router)
router.include_router(network_router) router.include_router(network_router)
""":term:`GSO` :term:`API` endpoints that import different types of existing services."""
import ipaddress
from typing import Any
from uuid import UUID
from fastapi import Depends, HTTPException, status
from fastapi.routing import APIRouter
from orchestrator.services import processes
from pydantic import BaseModel, root_validator, validator
from gso.auth.security import opa_security_default
from gso.products.product_blocks.iptrunk import IptrunkType, PhysicalPortCapacity
from gso.products.product_blocks.router import RouterRole
from gso.products.product_blocks.site import SiteTier
from gso.services import subscriptions
from gso.services.partners import PartnerNotFoundError, get_partner_by_name
from gso.utils.helpers import BaseSiteValidatorModel, LAGMember
from gso.utils.shared_enums import PortNumber, Vendor
router = APIRouter(prefix="/imports", tags=["Imports"], dependencies=[Depends(opa_security_default)])
class ImportResponseModel(BaseModel):
"""The model of a response given when services are imported using the :term:`API`."""
pid: UUID
detail: str
class SiteImportModel(BaseSiteValidatorModel):
"""The required input for importing an existing :class:`gso.products.product_types.site`."""
site_name: str
site_city: str
site_country: str
site_country_code: str
site_latitude: float
site_longitude: float
site_bgp_community_id: int
site_internal_id: int
site_tier: SiteTier
site_ts_address: str
partner: str
class RouterImportModel(BaseModel):
"""Required fields for importing an existing :class:`gso.product.product_types.router`."""
partner: str
router_site: str
hostname: str
ts_port: int
router_vendor: Vendor
router_role: RouterRole
router_lo_ipv4_address: ipaddress.IPv4Address
router_lo_ipv6_address: ipaddress.IPv6Address
router_lo_iso_address: str
class IptrunkImportModel(BaseModel):
"""Required fields for importing an existing :class:`gso.products.product_types.iptrunk`."""
partner: str
geant_s_sid: str | None
iptrunk_type: IptrunkType
iptrunk_description: str
iptrunk_speed: PhysicalPortCapacity
iptrunk_minimum_links: int
iptrunk_isis_metric: int
side_a_node_id: str
side_a_ae_iface: str
side_a_ae_geant_a_sid: str | None
side_a_ae_members: list[LAGMember]
side_b_node_id: str
side_b_ae_iface: str
side_b_ae_geant_a_sid: str | None
side_b_ae_members: list[LAGMember]
iptrunk_ipv4_network: ipaddress.IPv4Network
iptrunk_ipv6_network: ipaddress.IPv6Network
@classmethod
def _get_active_routers(cls) -> set[str]:
return {
str(router["subscription_id"])
for router in subscriptions.get_active_router_subscriptions(includes=["subscription_id"])
}
@validator("partner")
def check_if_partner_exists(cls, value: str) -> str:
"""Validate that the partner exists."""
try:
get_partner_by_name(value)
except PartnerNotFoundError as e:
msg = f"partner {value} not found"
raise ValueError(msg) from e
return value
@validator("side_a_node_id", "side_b_node_id")
def check_if_router_side_is_available(cls, value: str) -> str:
"""Both sides of the trunk must exist in :term:`GSO`."""
if value not in cls._get_active_routers():
msg = f"Router {value} not found"
raise ValueError(msg)
return value
@validator("side_a_ae_members", "side_b_ae_members")
def check_side_uniqueness(cls, value: list[str]) -> list[str]:
""":term:`LAG` members must be unique."""
if len(value) != len(set(value)):
msg = "Items must be unique"
raise ValueError(msg)
return value
@root_validator
def check_members(cls, values: dict[str, Any]) -> dict[str, Any]:
"""Amount of :term:`LAG` members has to match on side A and B, and meet the minimum requirement."""
min_links = values["iptrunk_minimum_links"]
side_a_members = values.get("side_a_ae_members", [])
side_b_members = values.get("side_b_ae_members", [])
len_a = len(side_a_members)
len_b = len(side_b_members)
if len_a < min_links:
msg = f"Side A members should be at least {min_links} (iptrunk_minimum_links)"
raise ValueError(msg)
if len_a != len_b:
msg = "Mismatch between Side A and B members"
raise ValueError(msg)
return values
class SuperPopSwitchImportModel(BaseModel):
"""Required fields for importing an existing :class:`gso.product.product_types.super_pop_switch`."""
partner: str
super_pop_switch_site: str
hostname: str
super_pop_switch_ts_port: PortNumber
super_pop_switch_mgmt_ipv4_address: ipaddress.IPv4Address
class OfficeRouterImportModel(BaseModel):
"""Required fields for importing an existing :class:`gso.product.product_types.office_router`."""
partner: str
office_router_site: str
office_router_fqdn: str
office_router_ts_port: PortNumber
office_router_lo_ipv4_address: ipaddress.IPv4Address
office_router_lo_ipv6_address: ipaddress.IPv6Address
def _start_process(process_name: str, data: dict) -> UUID:
"""Start a process and handle common exceptions."""
pid: UUID = processes.start_process(process_name, [data])
if pid is None:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to start the process.",
)
process = processes._get_process(pid) # noqa: SLF001
if process.last_status == "failed":
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Process {pid} failed because of an internal error. {process.failed_reason}",
)
return pid
@router.post("/sites", status_code=status.HTTP_201_CREATED, response_model=ImportResponseModel)
def import_site(site: SiteImportModel) -> dict[str, Any]:
"""Import a site by running the import_site workflow.
:param site: The site information to be imported.
:type site: SiteImportModel
:return: A dictionary containing the process id of the started process and detail message.
:rtype: dict[str, Any]
:raises HTTPException: If the site already exists or if there's an error in the process.
"""
pid = _start_process("import_site", site.dict())
return {"detail": "Site added successfully.", "pid": pid}
@router.post("/routers", status_code=status.HTTP_201_CREATED, response_model=ImportResponseModel)
def import_router(router_data: RouterImportModel) -> dict[str, Any]:
"""Import a router by running the import_router workflow.
:param router_data: The router information to be imported.
:type router_data: RouterImportModel
:return: A dictionary containing the process id of the started process and detail message.
:rtype: dict[str, Any]
:raises HTTPException: If there's an error in the process.
"""
pid = _start_process("import_router", router_data.dict())
return {"detail": "Router has been added successfully", "pid": pid}
@router.post("/iptrunks", status_code=status.HTTP_201_CREATED, response_model=ImportResponseModel)
def import_iptrunk(iptrunk_data: IptrunkImportModel) -> dict[str, Any]:
"""Import an iptrunk by running the import_iptrunk workflow.
:param iptrunk_data: The iptrunk information to be imported.
:type iptrunk_data: IptrunkImportModel
:return: A dictionary containing the process id of the started process and detail message.
:rtype: dict[str, Any]
:raises HTTPException: If there's an error in the process.
"""
pid = _start_process("import_iptrunk", iptrunk_data.dict())
return {"detail": "Iptrunk has been added successfully", "pid": pid}
@router.post("/super-pop-switches", status_code=status.HTTP_201_CREATED, response_model=ImportResponseModel)
def import_super_pop_switch(super_pop_switch_data: SuperPopSwitchImportModel) -> dict[str, Any]:
"""Import a Super PoP switch by running the import_super_pop_switch workflow.
:param super_pop_switch_data: The Super PoP switch information to be imported.
:type super_pop_switch_data: SuperPopSwitchImportModel
:return: A dictionary containing the process id of the started process and detail message.
:rtype: dict[str, Any]
:raises HTTPException: If there's an error in the process.
"""
pid = _start_process("import_super_pop_switch", super_pop_switch_data.dict())
return {"detail": "Super PoP switch has been added successfully", "pid": pid}
@router.post("/office-routers", status_code=status.HTTP_201_CREATED, response_model=ImportResponseModel)
def import_office_router(office_router_data: OfficeRouterImportModel) -> dict[str, Any]:
"""Import a office router by running the import_office_router workflow.
:param office_router_data: The office router information to be imported.
:type office_router_data: OfficeRouterImportModel
:return: A dictionary containing the process id of the started process and detail message.
:rtype: dict[str, Any]
:raises HTTPException: If there's an error in the process.
"""
pid = _start_process("import_office_router", office_router_data.dict())
return {"detail": "Office router has been added successfully", "pid": pid}
...@@ -252,7 +252,7 @@ class OIDCUser(HTTPBearer): ...@@ -252,7 +252,7 @@ class OIDCUser(HTTPBearer):
return return
response = await async_request.get(self.openid_url + "/.well-known/openid-configuration") response = await async_request.get(self.openid_url + "/.well-known/openid-configuration")
self.openid_config = OIDCConfig.parse_obj(response.json()) self.openid_config = OIDCConfig.model_validate(response.json())
async def userinfo(self, async_request: AsyncClient, token: str) -> OIDCUserModel: async def userinfo(self, async_request: AsyncClient, token: str) -> OIDCUserModel:
"""Get the userinfo from the openid server. """Get the userinfo from the openid server.
......
...@@ -6,7 +6,8 @@ with external authentication providers for enhanced security management. ...@@ -6,7 +6,8 @@ with external authentication providers for enhanced security management.
Todo: Remove token and sensitive data from OPA console and API. Todo: Remove token and sensitive data from OPA console and API.
""" """
from pydantic import BaseSettings, Field from pydantic import Field
from pydantic_settings import BaseSettings
class Oauth2LibSettings(BaseSettings): class Oauth2LibSettings(BaseSettings):
......
""":term:`CLI` command for importing data to coreDB.""" """:term:`CLI` commands for importing data to coreDB."""
import csv import csv
import ipaddress import ipaddress
import json import json
import time
from datetime import UTC, datetime from datetime import UTC, datetime
from pathlib import Path from pathlib import Path
from typing import TypeVar from typing import Self, TypeVar
import typer import typer
import yaml import yaml
from orchestrator.db import db from orchestrator.db import db
from pydantic import ValidationError from orchestrator.services.processes import start_process
from orchestrator.types import SubscriptionLifecycle
from pydantic import BaseModel, ValidationError, field_validator, model_validator
from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.exc import SQLAlchemyError
from gso.api.v1.imports import (
IptrunkImportModel,
OfficeRouterImportModel,
RouterImportModel,
SiteImportModel,
SuperPopSwitchImportModel,
import_iptrunk,
import_office_router,
import_router,
import_site,
import_super_pop_switch,
)
from gso.db.models import PartnerTable from gso.db.models import PartnerTable
from gso.services.subscriptions import get_active_subscriptions_by_field_and_value from gso.products import ProductType
from gso.products.product_blocks.iptrunk import IptrunkType, PhysicalPortCapacity
from gso.products.product_blocks.router import RouterRole
from gso.services.partners import PartnerNotFoundError, get_partner_by_name
from gso.services.subscriptions import (
get_active_router_subscriptions,
get_active_subscriptions_by_field_and_value,
get_subscriptions,
)
from gso.utils.helpers import BaseSiteValidatorModel, LAGMember
from gso.utils.shared_enums import IPv4AddressType, IPv6AddressType, PortNumber, Vendor
app: typer.Typer = typer.Typer() app: typer.Typer = typer.Typer()
class SiteImportModel(BaseSiteValidatorModel):
"""The required input for importing an existing :class:`gso.products.product_types.site`."""
class RouterImportModel(BaseModel):
"""Required fields for importing an existing :class:`gso.product.product_types.router`."""
partner: str
router_site: str
hostname: str
ts_port: int
router_vendor: Vendor
router_role: RouterRole
router_lo_ipv4_address: IPv4AddressType
router_lo_ipv6_address: IPv6AddressType
router_lo_iso_address: str
class SuperPopSwitchImportModel(BaseModel):
"""Required fields for importing an existing :class:`gso.product.product_types.super_pop_switch`."""
partner: str
super_pop_switch_site: str
hostname: str
super_pop_switch_ts_port: PortNumber
super_pop_switch_mgmt_ipv4_address: ipaddress.IPv4Address
class OfficeRouterImportModel(BaseModel):
"""Required fields for importing an existing :class:`gso.product.product_types.office_router`."""
partner: str
office_router_site: str
office_router_fqdn: str
office_router_ts_port: PortNumber
office_router_lo_ipv4_address: ipaddress.IPv4Address
office_router_lo_ipv6_address: ipaddress.IPv6Address
class IptrunkImportModel(BaseModel):
"""Required fields for importing an existing :class:`gso.products.product_types.iptrunk`."""
partner: str
geant_s_sid: str | None
iptrunk_type: IptrunkType
iptrunk_description: str | None = None
iptrunk_speed: PhysicalPortCapacity
iptrunk_minimum_links: int
iptrunk_isis_metric: int
side_a_node_id: str
side_a_ae_iface: str
side_a_ae_geant_a_sid: str | None
side_a_ae_members: list[LAGMember]
side_b_node_id: str
side_b_ae_iface: str
side_b_ae_geant_a_sid: str | None
side_b_ae_members: list[LAGMember]
iptrunk_ipv4_network: ipaddress.IPv4Network
iptrunk_ipv6_network: ipaddress.IPv6Network
@classmethod
def _get_active_routers(cls) -> set[str]:
return {
str(router["subscription_id"]) for router in get_active_router_subscriptions(includes=["subscription_id"])
}
@field_validator("partner")
def check_if_partner_exists(cls, value: str) -> str:
"""Validate that the partner exists."""
try:
get_partner_by_name(value)
except PartnerNotFoundError as e:
msg = f"partner {value} not found"
raise ValueError(msg) from e
return value
@field_validator("side_a_node_id", "side_b_node_id")
def check_if_router_side_is_available(cls, value: str) -> str:
"""Both sides of the trunk must exist in :term:`GSO`."""
if value not in cls._get_active_routers():
msg = f"Router {value} not found"
raise ValueError(msg)
return value
@field_validator("side_a_ae_members", "side_b_ae_members")
def check_side_uniqueness(cls, value: list[str]) -> list[str]:
""":term:`LAG` members must be unique."""
if len(value) != len(set(value)):
msg = "Items must be unique"
raise ValueError(msg)
return value
@model_validator(mode="after")
def check_members(self) -> Self:
"""Amount of :term:`LAG` members has to match on side A and B, and meet the minimum requirement."""
len_a = len(self.side_a_ae_members)
len_b = len(self.side_b_ae_members)
if len_a < self.iptrunk_minimum_links:
msg = f"Side A members should be at least {self.iptrunk_minimum_links} (iptrunk_minimum_links)"
raise ValueError(msg)
if len_a != len_b:
msg = "Mismatch between Side A and B members"
raise ValueError(msg)
return self
T = TypeVar( T = TypeVar(
"T", SiteImportModel, RouterImportModel, IptrunkImportModel, SuperPopSwitchImportModel, OfficeRouterImportModel "T", SiteImportModel, RouterImportModel, IptrunkImportModel, SuperPopSwitchImportModel, OfficeRouterImportModel
) )
...@@ -40,10 +155,9 @@ common_filepath_option = typer.Option( ...@@ -40,10 +155,9 @@ common_filepath_option = typer.Option(
) )
def read_data(filepath: str) -> dict: def _read_data(file_path: Path) -> dict:
"""Read data from a JSON or YAML file.""" """Read data from a JSON or YAML file."""
typer.echo(f"Starting import from {filepath}") typer.echo(f"Starting import from {file_path!s}")
file_path = Path(filepath)
file_extension = file_path.suffix.lower() file_extension = file_path.suffix.lower()
with file_path.open("r") as f: with file_path.open("r") as f:
...@@ -58,95 +172,107 @@ def read_data(filepath: str) -> dict: ...@@ -58,95 +172,107 @@ def read_data(filepath: str) -> dict:
raise typer.Exit(code=1) raise typer.Exit(code=1)
def generic_import_data( def _get_router_subscription_id(node_name: str) -> str | None:
filepath: str, """Get the subscription id for a router by its node name."""
import_model: type[T], subscriptions = get_active_subscriptions_by_field_and_value(
import_function: callable, # type: ignore[valid-type] "router_fqdn",
name_key: str, node_name,
)
if subscriptions:
return str(subscriptions[0].subscription_id)
return None
def _import_partners_from_csv(file_path: Path) -> list[dict]:
"""Read partners from a CSV file."""
with Path.open(file_path, encoding="utf-8") as csv_file:
csv_reader = csv.DictReader(csv_file)
return list(csv_reader)
def _generic_import_product(
file_path: Path, imported_product_type: ProductType, workflow_suffix: str, name_key: str, import_model: type[T]
) -> None: ) -> None:
"""Import data from a JSON or YAML file.""" """Import subscriptions from a JSON or YAML file."""
successfully_imported_data = [] successfully_imported_data = []
data = read_data(filepath) data = _read_data(file_path)
for details in data: for details in data:
details["partner"] = "GEANT" details["partner"] = "GEANT"
typer.echo(f"Importing {name_key}: {details[name_key]}") typer.echo(f"Creating imported {name_key}: {details[name_key]}")
try: try:
initial_data = import_model(**details) initial_data = import_model(**details)
import_function(initial_data) # type: ignore[misc] start_process(f"create_imported_{workflow_suffix}", [initial_data.dict()])
successfully_imported_data.append(getattr(initial_data, name_key)) successfully_imported_data.append(getattr(initial_data, name_key))
typer.echo( typer.echo(
f"Successfully imported {name_key}: {getattr(initial_data, name_key)}", f"Successfully created {name_key}: {getattr(initial_data, name_key)}",
) )
except ValidationError as e: except ValidationError as e:
typer.echo(f"Validation error: {e}") typer.echo(f"Validation error: {e}")
typer.echo("Waiting for the dust to settle before moving on the importing new products...")
time.sleep(1)
# Migrate new products from imported to "full" counterpart.
imported_products = get_subscriptions(
[imported_product_type], lifecycles=[SubscriptionLifecycle.ACTIVE], includes=["subscription_id"]
)
for subscription_id in imported_products:
typer.echo(f"Importing {subscription_id}")
start_process(f"import_{workflow_suffix}", [subscription_id])
if successfully_imported_data: if successfully_imported_data:
typer.echo(f"Successfully imported {name_key}s:") typer.echo(f"Successfully created imported {name_key}s:")
for item in successfully_imported_data: for item in successfully_imported_data:
typer.echo(f"- {item}") typer.echo(f"- {item}")
typer.echo(f"Please validate no more imported {workflow_suffix} products exist anymore in the database.")
@app.command() @app.command()
def import_sites(filepath: str = common_filepath_option) -> None: def import_sites(filepath: str = common_filepath_option) -> None:
"""Import sites into GSO.""" """Import sites into GSO."""
# Use the import_data function to handle common import logic _generic_import_product(Path(filepath), ProductType.IMPORTED_SITE, "site", "site_name", SiteImportModel)
generic_import_data(filepath, SiteImportModel, import_site, "site_name")
@app.command() @app.command()
def import_routers(filepath: str = common_filepath_option) -> None: def import_routers(filepath: str = common_filepath_option) -> None:
"""Import routers into GSO.""" """Import routers into GSO."""
# Use the import_data function to handle common import logic _generic_import_product(Path(filepath), ProductType.IMPORTED_ROUTER, "router", "hostname", RouterImportModel)
generic_import_data(filepath, RouterImportModel, import_router, "hostname")
@app.command() @app.command()
def import_super_pop_switches(filepath: str = common_filepath_option) -> None: def import_super_pop_switches(filepath: str = common_filepath_option) -> None:
"""Import Super PoP Switches into GSO.""" """Import Super PoP Switches into GSO."""
# Use the import_data function to handle common import logic _generic_import_product(
generic_import_data(filepath, SuperPopSwitchImportModel, import_super_pop_switch, "hostname") Path(filepath),
ProductType.IMPORTED_SUPER_POP_SWITCH,
"super_pop_switch",
"hostname",
SuperPopSwitchImportModel,
)
@app.command() @app.command()
def import_office_routers(filepath: str = common_filepath_option) -> None: def import_office_routers(filepath: str = common_filepath_option) -> None:
"""Import office routers into GSO.""" """Import office routers into GSO."""
# Use the import_data function to handle common import logic _generic_import_product(
generic_import_data(filepath, OfficeRouterImportModel, import_office_router, "office_router_fqdn") Path(filepath),
ProductType.IMPORTED_OFFICE_ROUTER,
"office_router",
def get_router_subscription_id(node_name: str) -> str | None: "office_router_fqdn",
"""Get the subscription id for a router by its node name.""" OfficeRouterImportModel,
subscriptions = get_active_subscriptions_by_field_and_value(
"router_fqdn",
node_name,
) )
if subscriptions:
return str(subscriptions[0].subscription_id)
return None
@app.command() @app.command()
def import_iptrunks(filepath: str = common_filepath_option) -> None: def import_iptrunks(filepath: str = common_filepath_option) -> None:
"""Import IP trunks into GSO.""" """Import IP trunks into GSO."""
successfully_imported_data = [] successfully_imported_data = []
data = read_data(filepath) data = _read_data(Path(filepath))
for trunk in data: for trunk in data:
ipv4_network_a = ipaddress.ip_network( ipv4_network_a = ipaddress.IPv4Network(trunk["config"]["nodeA"]["ipv4_address"], strict=False)
trunk["config"]["nodeA"]["ipv4_address"], ipv4_network_b = ipaddress.IPv4Network(trunk["config"]["nodeB"]["ipv4_address"], strict=False)
strict=False, ipv6_network_a = ipaddress.IPv6Network(trunk["config"]["nodeA"]["ipv6_address"], strict=False)
) ipv6_network_b = ipaddress.IPv6Network(trunk["config"]["nodeB"]["ipv6_address"], strict=False)
ipv4_network_b = ipaddress.ip_network(
trunk["config"]["nodeB"]["ipv4_address"],
strict=False,
)
ipv6_network_a = ipaddress.ip_network(
trunk["config"]["nodeA"]["ipv6_address"],
strict=False,
)
ipv6_network_b = ipaddress.ip_network(
trunk["config"]["nodeB"]["ipv6_address"],
strict=False,
)
# Check if IPv4 networks are equal # Check if IPv4 networks are equal
if ipv4_network_a == ipv4_network_b: if ipv4_network_a == ipv4_network_b:
iptrunk_ipv4_network = ipv4_network_a iptrunk_ipv4_network = ipv4_network_a
...@@ -175,48 +301,45 @@ def import_iptrunks(filepath: str = common_filepath_option) -> None: ...@@ -175,48 +301,45 @@ def import_iptrunks(filepath: str = common_filepath_option) -> None:
iptrunk_speed=trunk["config"]["common"]["link_speed"], iptrunk_speed=trunk["config"]["common"]["link_speed"],
iptrunk_minimum_links=trunk["config"]["common"]["minimum_links"], iptrunk_minimum_links=trunk["config"]["common"]["minimum_links"],
iptrunk_isis_metric=trunk["config"]["common"]["isis_metric"], iptrunk_isis_metric=trunk["config"]["common"]["isis_metric"],
side_a_node_id=get_router_subscription_id( side_a_node_id=_get_router_subscription_id(trunk["config"]["nodeA"]["name"]) or "",
trunk["config"]["nodeA"]["name"],
)
or "",
side_a_ae_iface=trunk["config"]["nodeA"]["ae_name"], side_a_ae_iface=trunk["config"]["nodeA"]["ae_name"],
side_a_ae_geant_a_sid=trunk["config"]["nodeA"]["port_sid"], side_a_ae_geant_a_sid=trunk["config"]["nodeA"]["port_sid"],
side_a_ae_members=trunk["config"]["nodeA"]["members"], side_a_ae_members=trunk["config"]["nodeA"]["members"],
side_b_node_id=get_router_subscription_id( side_b_node_id=_get_router_subscription_id(trunk["config"]["nodeB"]["name"]) or "",
trunk["config"]["nodeB"]["name"],
)
or "",
side_b_ae_iface=trunk["config"]["nodeB"]["ae_name"], side_b_ae_iface=trunk["config"]["nodeB"]["ae_name"],
side_b_ae_geant_a_sid=trunk["config"]["nodeB"]["port_sid"], side_b_ae_geant_a_sid=trunk["config"]["nodeB"]["port_sid"],
side_b_ae_members=trunk["config"]["nodeB"]["members"], side_b_ae_members=trunk["config"]["nodeB"]["members"],
iptrunk_ipv4_network=iptrunk_ipv4_network, # type:ignore[arg-type] iptrunk_ipv4_network=iptrunk_ipv4_network,
iptrunk_ipv6_network=iptrunk_ipv6_network, # type:ignore[arg-type] iptrunk_ipv6_network=iptrunk_ipv6_network,
) )
import_iptrunk(initial_data) start_process("create_imported_iptrunk", [initial_data.dict()])
successfully_imported_data.append(trunk["id"]) successfully_imported_data.append(trunk["id"])
typer.echo(f"Successfully imported IP Trunk: {trunk['id']}") typer.echo(f"Successfully imported IP Trunk: {trunk['id']}")
except ValidationError as e: except ValidationError as e:
typer.echo(f"Validation error: {e}") typer.echo(f"Validation error: {e}")
typer.echo("Waiting for the dust to settle before moving on the importing new products...")
time.sleep(1)
trunk_ids = get_subscriptions(
[ProductType.IMPORTED_IP_TRUNK], lifecycles=[SubscriptionLifecycle.ACTIVE], includes=["subscription_id"]
)
for subscription_id in trunk_ids:
typer.echo(f"Migrating iptrunk {subscription_id}")
start_process("import_iptrunk", [subscription_id])
if successfully_imported_data: if successfully_imported_data:
typer.echo("Successfully imported IP Trunks:") typer.echo("Successfully imported IP Trunks:")
for item in successfully_imported_data: for item in successfully_imported_data:
typer.echo(f"- {item}") typer.echo(f"- {item}")
def import_partners_from_csv(file_path: Path) -> list[dict]:
"""Read partners from a CSV file."""
with Path.open(file_path, encoding="utf-8") as csv_file:
csv_reader = csv.DictReader(csv_file)
return list(csv_reader)
@app.command() @app.command()
def import_partners(file_path: str = typer.Argument(..., help="Path to the CSV file containing partners")) -> None: def import_partners(file_path: str = typer.Argument(..., help="Path to the CSV file containing partners")) -> None:
"""Import partners from a CSV file into the database.""" """Import partners from a CSV file into the database."""
typer.echo(f"Importing partners from {file_path} ...") typer.echo(f"Importing partners from {file_path} ...")
partners = import_partners_from_csv(Path(file_path)) partners = _import_partners_from_csv(Path(file_path))
try: try:
for partner in partners: for partner in partners:
......
"""graphql module."""
"""Map some Orchestrator types to scalars."""
from ipaddress import IPv4Network, IPv6Network
from typing import Any, NewType
import strawberry
from orchestrator.graphql.types import serialize_to_string
from strawberry.custom_scalar import ScalarDefinition, ScalarWrapper
IPv4NetworkType = strawberry.scalar(
NewType("IPv4NetworkType", str),
description="Represent the Orchestrator IPv4Network data type",
serialize=serialize_to_string,
parse_value=lambda v: v,
)
IPv6NetworkType = strawberry.scalar(
NewType("IPv6NetworkType", str),
description="Represent the Orchestrator IPv6Network data type",
serialize=serialize_to_string,
parse_value=lambda v: v,
)
GSO_SCALAR_OVERRIDES: dict[object, Any | ScalarWrapper | ScalarDefinition] = {
IPv4Network: IPv4NetworkType,
IPv6Network: IPv6NetworkType,
}
"""Custom middlewares for the GSO API."""
import json
import re
from collections.abc import Callable
from typing import Any
from fastapi import Request
from starlette.middleware.base import BaseHTTPMiddleware
from starlette.responses import Response
from starlette.status import HTTP_200_OK
class ModifyProcessEndpointResponse(BaseHTTPMiddleware):
"""Middleware to modify the response for Process details endpoint."""
async def dispatch(self, request: Request, call_next: Callable) -> Response:
"""Middleware to modify the response for Process details endpoint.
:param request: The incoming HTTP request.
:type request: Request
:param call_next: The next middleware or endpoint in the stack.
:type call_next: Callable
:return: The modified HTTP response.
:rtype: Response
"""
response = await call_next(request)
path_pattern = re.compile(
r"/api/processes/([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12})"
)
match = path_pattern.match(request.url.path)
if match and response.status_code == HTTP_200_OK:
# Modify the response body as needed
response_body = b""
async for chunk in response.body_iterator:
response_body += chunk
try:
json_content = json.loads(response_body)
await self._modify_response_body(json_content, request)
modified_response_body = json.dumps(json_content).encode()
headers = dict(response.headers)
headers["content-length"] = str(len(modified_response_body))
return Response(
content=modified_response_body,
status_code=response.status_code,
headers=headers,
media_type=response.media_type,
)
except json.JSONDecodeError:
pass
return response
@staticmethod
async def _get_token(request: Request) -> str:
"""Get the token from the request headers.
:param request: The incoming HTTP request.
:type request: Request
:return: The token from the request headers in specific format.
:rtype: str
"""
bearer_prefix = "Bearer "
authorization_header = request.headers.get("Authorization")
if authorization_header:
# Remove the "Bearer " prefix from the token
token = authorization_header.replace(bearer_prefix, "")
return f"?token={token}"
return ""
async def _modify_response_body(self, response_body: dict[str, Any], request: Request) -> None:
"""Modify the response body as needed.
:param response_body: The response body in dictionary format.
:type response_body: dict[str, Any]
:param request: The incoming HTTP request.
:type request: Request
:return: None
"""
max_output_length = 500
token = await self._get_token(request)
try:
for step in response_body["steps"]:
if step["state"].get("callback_result", None):
callback_result = step["state"]["callback_result"]
if callback_result and isinstance(callback_result, str):
callback_result = json.loads(callback_result)
if callback_result.get("output") and len(callback_result["output"]) > max_output_length:
callback_result[
"output"
] = f'{request.base_url}api/v1/processes/steps/{step["step_id"]}/callback-results{token}'
step["state"]["callback_result"] = callback_result
except (AttributeError, KeyError, TypeError):
pass
...@@ -15,7 +15,7 @@ config = context.config ...@@ -15,7 +15,7 @@ config = context.config
# This line sets up loggers basically. # This line sets up loggers basically.
logger = logging.getLogger("alembic.env") logger = logging.getLogger("alembic.env")
config.set_main_option("sqlalchemy.url", app_settings.DATABASE_URI) config.set_main_option("sqlalchemy.url", str(app_settings.DATABASE_URI))
target_metadata = BaseModel.metadata target_metadata = BaseModel.metadata
......
...@@ -10,9 +10,9 @@ from alembic import op ...@@ -10,9 +10,9 @@ from alembic import op
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = '393acfa175c0' revision = '393acfa175c0'
down_revision = None down_revision = '4ec89ab289c0'
branch_labels = ('data',) branch_labels = None
depends_on = '4ec89ab289c0' depends_on = None
def upgrade() -> None: def upgrade() -> None:
......
"""Add imported varieties of existing products.
Revision ID: 3b73ee683cec
Revises: 393acfa175c0
Create Date: 2024-04-19 15:57:38.082516
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '3b73ee683cec'
down_revision = '393acfa175c0'
branch_labels = None
depends_on = None
def upgrade() -> None:
conn = op.get_bind()
conn.execute(sa.text("""
INSERT INTO products (name, description, product_type, tag, status) VALUES ('Imported IP trunk', 'An IP trunk that already existed in the network, and is imported into GSO', 'ImportedIptrunk', 'IMP_IP_TRUNK', 'active') RETURNING products.product_id
"""))
conn.execute(sa.text("""
INSERT INTO products (name, description, product_type, tag, status) VALUES ('Imported router', 'A router that already existed in the network, and is imported into GSO', 'ImportedRouter', 'IMP_RTR', 'active') RETURNING products.product_id
"""))
conn.execute(sa.text("""
INSERT INTO products (name, description, product_type, tag, status) VALUES ('Imported site', 'A site that already existed in the network, and is imported into GSO', 'ImportedSite', 'IMP_SITE', 'active') RETURNING products.product_id
"""))
conn.execute(sa.text("""
INSERT INTO products (name, description, product_type, tag, status) VALUES ('Imported super PoP switch', 'A super PoP switch that already existed, and is imported into GSO', 'ImportedSuperPopSwitch', 'IMP_SPOP_SWITCH', 'active') RETURNING products.product_id
"""))
conn.execute(sa.text("""
INSERT INTO products (name, description, product_type, tag, status) VALUES ('Imported office router', 'An office router that already existed in the network, and is imported into GSO', 'ImportedOfficeRouter', 'IMP_OFFICE_RTR', 'active') RETURNING products.product_id
"""))
conn.execute(sa.text("""
INSERT INTO product_product_blocks (product_id, product_block_id) VALUES ((SELECT products.product_id FROM products WHERE products.name IN ('Imported IP trunk')), (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('IptrunkBlock')))
"""))
conn.execute(sa.text("""
INSERT INTO product_product_blocks (product_id, product_block_id) VALUES ((SELECT products.product_id FROM products WHERE products.name IN ('Imported router')), (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('RouterBlock')))
"""))
conn.execute(sa.text("""
INSERT INTO product_product_blocks (product_id, product_block_id) VALUES ((SELECT products.product_id FROM products WHERE products.name IN ('Imported site')), (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('SiteBlock')))
"""))
conn.execute(sa.text("""
INSERT INTO product_product_blocks (product_id, product_block_id) VALUES ((SELECT products.product_id FROM products WHERE products.name IN ('Imported super PoP switch')), (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('SuperPopSwitchBlock')))
"""))
conn.execute(sa.text("""
INSERT INTO product_product_blocks (product_id, product_block_id) VALUES ((SELECT products.product_id FROM products WHERE products.name IN ('Imported office router')), (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('OfficeRouterBlock')))
"""))
def downgrade() -> None:
conn = op.get_bind()
conn.execute(sa.text("""
DELETE FROM product_product_blocks WHERE product_product_blocks.product_id IN (SELECT products.product_id FROM products WHERE products.name IN ('Imported IP trunk')) AND product_product_blocks.product_block_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('IptrunkBlock'))
"""))
conn.execute(sa.text("""
DELETE FROM product_product_blocks WHERE product_product_blocks.product_id IN (SELECT products.product_id FROM products WHERE products.name IN ('Imported router')) AND product_product_blocks.product_block_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('RouterBlock'))
"""))
conn.execute(sa.text("""
DELETE FROM product_product_blocks WHERE product_product_blocks.product_id IN (SELECT products.product_id FROM products WHERE products.name IN ('Imported site')) AND product_product_blocks.product_block_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('SiteBlock'))
"""))
conn.execute(sa.text("""
DELETE FROM product_product_blocks WHERE product_product_blocks.product_id IN (SELECT products.product_id FROM products WHERE products.name IN ('Imported super PoP switch')) AND product_product_blocks.product_block_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('SuperPopSwitchBlock'))
"""))
conn.execute(sa.text("""
DELETE FROM product_product_blocks WHERE product_product_blocks.product_id IN (SELECT products.product_id FROM products WHERE products.name IN ('Imported office router')) AND product_product_blocks.product_block_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('OfficeRouterBlock'))
"""))
conn.execute(sa.text("""
DELETE FROM processes WHERE processes.pid IN (SELECT processes_subscriptions.pid FROM processes_subscriptions WHERE processes_subscriptions.subscription_id IN (SELECT subscriptions.subscription_id FROM subscriptions WHERE subscriptions.product_id IN (SELECT products.product_id FROM products WHERE products.name IN ('Imported super PoP switch', 'Imported router', 'Imported IP trunk', 'Imported office router', 'Imported site'))))
"""))
conn.execute(sa.text("""
DELETE FROM processes_subscriptions WHERE processes_subscriptions.subscription_id IN (SELECT subscriptions.subscription_id FROM subscriptions WHERE subscriptions.product_id IN (SELECT products.product_id FROM products WHERE products.name IN ('Imported super PoP switch', 'Imported router', 'Imported IP trunk', 'Imported office router', 'Imported site')))
"""))
conn.execute(sa.text("""
DELETE FROM subscription_instances WHERE subscription_instances.subscription_id IN (SELECT subscriptions.subscription_id FROM subscriptions WHERE subscriptions.product_id IN (SELECT products.product_id FROM products WHERE products.name IN ('Imported super PoP switch', 'Imported router', 'Imported IP trunk', 'Imported office router', 'Imported site')))
"""))
conn.execute(sa.text("""
DELETE FROM subscriptions WHERE subscriptions.product_id IN (SELECT products.product_id FROM products WHERE products.name IN ('Imported super PoP switch', 'Imported router', 'Imported IP trunk', 'Imported office router', 'Imported site'))
"""))
conn.execute(sa.text("""
DELETE FROM products WHERE products.name IN ('Imported super PoP switch', 'Imported router', 'Imported IP trunk', 'Imported office router', 'Imported site')
"""))
"""Add creation workflows for imported products.
Revision ID: ab8d805d27b3
Revises: 3b73ee683cec
Create Date: 2024-04-19 16:21:07.304696
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = 'ab8d805d27b3'
down_revision = '3b73ee683cec'
branch_labels = None
depends_on = None
from orchestrator.migrations.helpers import create_workflow, delete_workflow
new_workflows = [
{
"name": "create_imported_site",
"target": "CREATE",
"description": "Import Site",
"product_type": "ImportedSite"
},
{
"name": "create_imported_router",
"target": "CREATE",
"description": "Import router",
"product_type": "ImportedRouter"
},
{
"name": "create_imported_iptrunk",
"target": "CREATE",
"description": "Import iptrunk",
"product_type": "ImportedIptrunk"
},
{
"name": "create_imported_super_pop_switch",
"target": "CREATE",
"description": "Import Super PoP switch",
"product_type": "ImportedSuperPopSwitch"
},
{
"name": "create_imported_office_router",
"target": "CREATE",
"description": "Import office router",
"product_type": "ImportedOfficeRouter"
}
]
def upgrade() -> None:
conn = op.get_bind()
for workflow in new_workflows:
create_workflow(conn, workflow)
def downgrade() -> None:
conn = op.get_bind()
for workflow in new_workflows:
delete_workflow(conn, workflow["name"])
"""Add product import workflows.
Revision ID: c12ec1d9bd92
Revises: ab8d805d27b3
Create Date: 2024-04-23 12:57:51.227269
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = 'c12ec1d9bd92'
down_revision = 'ab8d805d27b3'
branch_labels = None
depends_on = None
from orchestrator.migrations.helpers import create_workflow, delete_workflow
new_workflows = [
{
"name": "import_site",
"target": "MODIFY",
"description": "Import Site",
"product_type": "ImportedSite"
},
{
"name": "import_office_router",
"target": "MODIFY",
"description": "Import OfficeRouter",
"product_type": "ImportedOfficeRouter"
},
{
"name": "import_super_pop_switch",
"target": "MODIFY",
"description": "Import SuperPopSwitch",
"product_type": "ImportedSuperPopSwitch"
},
{
"name": "import_router",
"target": "MODIFY",
"description": "Import Router",
"product_type": "ImportedRouter"
},
{
"name": "import_iptrunk",
"target": "MODIFY",
"description": "Import Iptrunk",
"product_type": "ImportedIptrunk"
}
]
def upgrade() -> None:
conn = op.get_bind()
for workflow in new_workflows:
create_workflow(conn, workflow)
def downgrade() -> None:
conn = op.get_bind()
for workflow in new_workflows:
delete_workflow(conn, workflow["name"])
"""Add Opengear product..
Revision ID: 32cad119b7c4
Revises: c12ec1d9bd92
Create Date: 2024-04-26 11:12:36.852353
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '32cad119b7c4'
down_revision = 'c12ec1d9bd92'
branch_labels = None
depends_on = None
def upgrade() -> None:
conn = op.get_bind()
conn.execute(sa.text("""
INSERT INTO products (name, description, product_type, tag, status) VALUES ('Opengear', 'An Opengear', 'Opengear', 'OPENGEAR', 'active') RETURNING products.product_id
"""))
conn.execute(sa.text("""
INSERT INTO product_blocks (name, description, tag, status) VALUES ('OpengearBlock', 'An OpengearBlock', 'OPENGEAR_BLOCK', 'active') RETURNING product_blocks.product_block_id
"""))
conn.execute(sa.text("""
INSERT INTO resource_types (resource_type, description) VALUES ('opengear_wan_address', 'The WAN address of the Opengear device.') RETURNING resource_types.resource_type_id
"""))
conn.execute(sa.text("""
INSERT INTO resource_types (resource_type, description) VALUES ('opengear_wan_netmask', 'The WAN netmask of the Opengear device.') RETURNING resource_types.resource_type_id
"""))
conn.execute(sa.text("""
INSERT INTO resource_types (resource_type, description) VALUES ('opengear_wan_gateway', 'The WAN gateway of the Opengear device.') RETURNING resource_types.resource_type_id
"""))
conn.execute(sa.text("""
INSERT INTO resource_types (resource_type, description) VALUES ('opengear_hostname', 'The hostname of the Opengear device.') RETURNING resource_types.resource_type_id
"""))
conn.execute(sa.text("""
INSERT INTO product_product_blocks (product_id, product_block_id) VALUES ((SELECT products.product_id FROM products WHERE products.name IN ('Opengear')), (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('OpengearBlock')))
"""))
conn.execute(sa.text("""
INSERT INTO product_block_relations (in_use_by_id, depends_on_id) VALUES ((SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('OpengearBlock')), (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('SiteBlock')))
"""))
conn.execute(sa.text("""
INSERT INTO product_block_resource_types (product_block_id, resource_type_id) VALUES ((SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('OpengearBlock')), (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('opengear_hostname')))
"""))
conn.execute(sa.text("""
INSERT INTO product_block_resource_types (product_block_id, resource_type_id) VALUES ((SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('OpengearBlock')), (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('opengear_wan_address')))
"""))
conn.execute(sa.text("""
INSERT INTO product_block_resource_types (product_block_id, resource_type_id) VALUES ((SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('OpengearBlock')), (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('opengear_wan_netmask')))
"""))
conn.execute(sa.text("""
INSERT INTO product_block_resource_types (product_block_id, resource_type_id) VALUES ((SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('OpengearBlock')), (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('opengear_wan_gateway')))
"""))
def downgrade() -> None:
conn = op.get_bind()
conn.execute(sa.text("""
DELETE FROM product_block_resource_types WHERE product_block_resource_types.product_block_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('OpengearBlock')) AND product_block_resource_types.resource_type_id = (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('opengear_hostname'))
"""))
conn.execute(sa.text("""
DELETE FROM subscription_instance_values USING product_block_resource_types WHERE subscription_instance_values.subscription_instance_id IN (SELECT subscription_instances.subscription_instance_id FROM subscription_instances WHERE subscription_instances.subscription_instance_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('OpengearBlock'))) AND product_block_resource_types.resource_type_id = (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('opengear_hostname'))
"""))
conn.execute(sa.text("""
DELETE FROM product_block_resource_types WHERE product_block_resource_types.product_block_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('OpengearBlock')) AND product_block_resource_types.resource_type_id = (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('opengear_wan_address'))
"""))
conn.execute(sa.text("""
DELETE FROM subscription_instance_values USING product_block_resource_types WHERE subscription_instance_values.subscription_instance_id IN (SELECT subscription_instances.subscription_instance_id FROM subscription_instances WHERE subscription_instances.subscription_instance_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('OpengearBlock'))) AND product_block_resource_types.resource_type_id = (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('opengear_wan_address'))
"""))
conn.execute(sa.text("""
DELETE FROM product_block_resource_types WHERE product_block_resource_types.product_block_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('OpengearBlock')) AND product_block_resource_types.resource_type_id = (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('opengear_wan_netmask'))
"""))
conn.execute(sa.text("""
DELETE FROM subscription_instance_values USING product_block_resource_types WHERE subscription_instance_values.subscription_instance_id IN (SELECT subscription_instances.subscription_instance_id FROM subscription_instances WHERE subscription_instances.subscription_instance_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('OpengearBlock'))) AND product_block_resource_types.resource_type_id = (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('opengear_wan_netmask'))
"""))
conn.execute(sa.text("""
DELETE FROM product_block_resource_types WHERE product_block_resource_types.product_block_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('OpengearBlock')) AND product_block_resource_types.resource_type_id = (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('opengear_wan_gateway'))
"""))
conn.execute(sa.text("""
DELETE FROM subscription_instance_values USING product_block_resource_types WHERE subscription_instance_values.subscription_instance_id IN (SELECT subscription_instances.subscription_instance_id FROM subscription_instances WHERE subscription_instances.subscription_instance_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('OpengearBlock'))) AND product_block_resource_types.resource_type_id = (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('opengear_wan_gateway'))
"""))
conn.execute(sa.text("""
DELETE FROM subscription_instance_values WHERE subscription_instance_values.resource_type_id IN (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('opengear_wan_address', 'opengear_wan_netmask', 'opengear_wan_gateway', 'opengear_hostname'))
"""))
conn.execute(sa.text("""
DELETE FROM resource_types WHERE resource_types.resource_type IN ('opengear_wan_address', 'opengear_wan_netmask', 'opengear_wan_gateway', 'opengear_hostname')
"""))
conn.execute(sa.text("""
DELETE FROM product_product_blocks WHERE product_product_blocks.product_id IN (SELECT products.product_id FROM products WHERE products.name IN ('Opengear')) AND product_product_blocks.product_block_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('OpengearBlock'))
"""))
conn.execute(sa.text("""
DELETE FROM product_block_relations WHERE product_block_relations.in_use_by_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('OpengearBlock')) AND product_block_relations.depends_on_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('SiteBlock'))
"""))
conn.execute(sa.text("""
DELETE FROM subscription_instances WHERE subscription_instances.product_block_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('OpengearBlock'))
"""))
conn.execute(sa.text("""
DELETE FROM product_blocks WHERE product_blocks.name IN ('OpengearBlock')
"""))
conn.execute(sa.text("""
DELETE FROM processes WHERE processes.pid IN (SELECT processes_subscriptions.pid FROM processes_subscriptions WHERE processes_subscriptions.subscription_id IN (SELECT subscriptions.subscription_id FROM subscriptions WHERE subscriptions.product_id IN (SELECT products.product_id FROM products WHERE products.name IN ('Opengear'))))
"""))
conn.execute(sa.text("""
DELETE FROM processes_subscriptions WHERE processes_subscriptions.subscription_id IN (SELECT subscriptions.subscription_id FROM subscriptions WHERE subscriptions.product_id IN (SELECT products.product_id FROM products WHERE products.name IN ('Opengear')))
"""))
conn.execute(sa.text("""
DELETE FROM subscription_instances WHERE subscription_instances.subscription_id IN (SELECT subscriptions.subscription_id FROM subscriptions WHERE subscriptions.product_id IN (SELECT products.product_id FROM products WHERE products.name IN ('Opengear')))
"""))
conn.execute(sa.text("""
DELETE FROM subscriptions WHERE subscriptions.product_id IN (SELECT products.product_id FROM products WHERE products.name IN ('Opengear'))
"""))
conn.execute(sa.text("""
DELETE FROM products WHERE products.name IN ('Opengear')
"""))
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment