diff --git a/Changelog.md b/Changelog.md
index 3d8ebd30e5a1783cf83be60d86362ce4445689cb..d51bd0965f45cdf749cef2b164f513fac6e34e16 100644
--- a/Changelog.md
+++ b/Changelog.md
@@ -2,10 +2,17 @@
 
 All notable changes to this project will be documented in this file.
 
-## [2.7] - 2024-07-18
-- Nothing new. Jenkins pipeline was broken.
+## [2.8] - 2024-08-01
+- Reworked authentication components
+- Add a task for sending email notifications in case validation workflows have failed
+- Fix Netbox device type for tier 2 sites
+- Add a Kentik client
+- Add tasks for updating partners
+- Fix a bug in IP trunk validation workflow
+- Update router termination workflow
+- Update `orchestrator-core` from 2.2.1 to 2.6.1
 
-## [2.6] - 2024-07-17
+## [2.7] - 2024-07-18
 - Added verification workflows.
 
 ## [2.5] - 2024-07-16
diff --git a/docs/source/module/api/v1/index.rst b/docs/source/module/api/v1/index.rst
index 265f493adbd6250449e16e8be08f129d62a6741e..58f463a02e5dd718b1cd0afad035d6542cbc66fe 100644
--- a/docs/source/module/api/v1/index.rst
+++ b/docs/source/module/api/v1/index.rst
@@ -13,5 +13,4 @@ Submodules
    :titlesonly:
 
    subscriptions
-   processes
    network
diff --git a/docs/source/module/api/v1/processes.rst b/docs/source/module/api/v1/processes.rst
deleted file mode 100644
index 436d9887a5f7e126f9d78c3e6fc09fa374528cf7..0000000000000000000000000000000000000000
--- a/docs/source/module/api/v1/processes.rst
+++ /dev/null
@@ -1,6 +0,0 @@
-``gso.api.v1.processes``
-========================
-
-.. automodule:: gso.api.v1.processes
-   :members:
-   :show-inheritance:
diff --git a/docs/source/module/auth/api_key_auth.rst b/docs/source/module/auth/api_key_auth.rst
new file mode 100644
index 0000000000000000000000000000000000000000..d17327cf8866e4064597b57681d72b97c8da5091
--- /dev/null
+++ b/docs/source/module/auth/api_key_auth.rst
@@ -0,0 +1,6 @@
+``gso.auth.api_key_auth``
+=========================
+
+.. automodule:: gso.auth.api_key_auth
+   :members:
+   :show-inheritance:
diff --git a/docs/source/module/auth/index.rst b/docs/source/module/auth/index.rst
index 5d818a853cd02ffe3b7e4ca1a8b43528c2c2a003..ec638c781d5e7f24338dc62eadc94c3fa587132e 100644
--- a/docs/source/module/auth/index.rst
+++ b/docs/source/module/auth/index.rst
@@ -11,6 +11,6 @@ Subpackages
 .. toctree::
    :maxdepth: 1
 
-   oidc_policy_helper
-   security
-   settings
+   api_key_auth
+   oidc
+   opa
diff --git a/docs/source/module/auth/oidc.rst b/docs/source/module/auth/oidc.rst
new file mode 100644
index 0000000000000000000000000000000000000000..aa4eda9408ead8c993f4ffd7f75fabcd867a5d98
--- /dev/null
+++ b/docs/source/module/auth/oidc.rst
@@ -0,0 +1,6 @@
+``gso.auth.oidc``
+=================
+
+.. automodule:: gso.auth.oidc
+   :members:
+   :show-inheritance:
diff --git a/docs/source/module/auth/oidc_policy_helper.rst b/docs/source/module/auth/oidc_policy_helper.rst
deleted file mode 100644
index b01d9cdf938f149ee927af3120cd080e3bd719c2..0000000000000000000000000000000000000000
--- a/docs/source/module/auth/oidc_policy_helper.rst
+++ /dev/null
@@ -1,6 +0,0 @@
-``gso.auth.oidc_policy_helper``
-====================================
-
-.. automodule:: gso.auth.oidc_policy_helper
-   :members:
-   :show-inheritance:
diff --git a/docs/source/module/auth/opa.rst b/docs/source/module/auth/opa.rst
new file mode 100644
index 0000000000000000000000000000000000000000..bd91aaccbce26f41a22157f42041be67c05dfbb8
--- /dev/null
+++ b/docs/source/module/auth/opa.rst
@@ -0,0 +1,6 @@
+``gso.auth.opa``
+================
+
+.. automodule:: gso.auth.opa
+   :members:
+   :show-inheritance:
diff --git a/docs/source/module/auth/security.rst b/docs/source/module/auth/security.rst
deleted file mode 100644
index c933054270634dd3dd7500b7277fd657e16600c1..0000000000000000000000000000000000000000
--- a/docs/source/module/auth/security.rst
+++ /dev/null
@@ -1,6 +0,0 @@
-``gso.auth.security``
-====================================
-
-.. automodule:: gso.auth.security
-   :members:
-   :show-inheritance:
diff --git a/docs/source/module/auth/settings.rst b/docs/source/module/auth/settings.rst
deleted file mode 100644
index 2bc37fa8b5285b23bd956d1fcede332261ea5c88..0000000000000000000000000000000000000000
--- a/docs/source/module/auth/settings.rst
+++ /dev/null
@@ -1,6 +0,0 @@
-``gso.auth.settings``
-====================================
-
-.. automodule:: gso.auth.settings
-   :members:
-   :show-inheritance:
diff --git a/docs/vale/styles/config/vocabularies/geant-jargon/accept.txt b/docs/vale/styles/config/vocabularies/geant-jargon/accept.txt
index 738c2785ffbd1dfec50374c80b0f6fdf8a576ef4..9d05c0923f215fd59a4b0729ffec2b91301d8e93 100644
--- a/docs/vale/styles/config/vocabularies/geant-jargon/accept.txt
+++ b/docs/vale/styles/config/vocabularies/geant-jargon/accept.txt
@@ -30,3 +30,4 @@ V?LAN
 OPA
 OIDC
 HTTPBearer
+Kentik
diff --git a/gso/__init__.py b/gso/__init__.py
index 143c93ddbe0fc3786bc0990bc86a82ee108d5cc3..2df27bcb762638dee24e7c6449b34f7e99b4782d 100644
--- a/gso/__init__.py
+++ b/gso/__init__.py
@@ -1,7 +1,8 @@
 """The main entrypoint for :term:`GSO`, and the different ways in which it can be run."""
 
-from gso import monkeypatches  # noqa: F401, isort:skip
+import os
 
+import sentry_sdk
 import typer
 from orchestrator import OrchestratorCore, app_settings
 from orchestrator.cli.main import app as cli_app
@@ -11,7 +12,10 @@ from orchestrator.graphql import SCALAR_OVERRIDES
 import gso.products
 import gso.workflows  # noqa: F401
 from gso.api import router as api_router
+from gso.auth.oidc import oidc_instance
+from gso.auth.opa import graphql_opa_instance, opa_instance
 from gso.graphql_api.types import GSO_SCALAR_OVERRIDES
+from gso.settings import load_oss_params
 
 SCALAR_OVERRIDES.update(GSO_SCALAR_OVERRIDES)
 
@@ -19,6 +23,9 @@ SCALAR_OVERRIDES.update(GSO_SCALAR_OVERRIDES)
 def init_gso_app() -> OrchestratorCore:
     """Initialise the :term:`GSO` app."""
     app = OrchestratorCore(base_settings=app_settings)
+    app.register_authentication(oidc_instance)
+    app.register_authorization(opa_instance)
+    app.register_graphql_authorization(graphql_opa_instance)
     app.register_graphql()
     app.include_router(api_router, prefix="/api")
     return app
@@ -36,3 +43,12 @@ def init_cli_app() -> typer.Typer:
     cli_app.add_typer(imports.app, name="import-cli")
     cli_app.add_typer(netbox.app, name="netbox-cli")
     return cli_app()
+
+
+def init_sentry() -> None:
+    """Only initialize Sentry if not in testing mode."""
+    if os.getenv("TESTING", "false").lower() == "false" and (sentry_config := load_oss_params().SENTRY):
+        sentry_sdk.init(dsn=sentry_config.DSN, environment=sentry_config.environment, traces_sample_rate=1.0)
+
+
+init_sentry()
diff --git a/gso/api/v1/__init__.py b/gso/api/v1/__init__.py
index 5407a81644e5253922b40b2f110fcb9a50804ce3..4694b6c55ab142a01a45037e93827ed1a2ca439c 100644
--- a/gso/api/v1/__init__.py
+++ b/gso/api/v1/__init__.py
@@ -3,11 +3,9 @@
 from fastapi import APIRouter
 
 from gso.api.v1.network import router as network_router
-from gso.api.v1.processes import router as processes_router
 from gso.api.v1.subscriptions import router as subscriptions_router
 
 router = APIRouter()
 
 router.include_router(subscriptions_router)
-router.include_router(processes_router)
 router.include_router(network_router)
diff --git a/gso/api/v1/network.py b/gso/api/v1/network.py
index 6dd5ac75e3fc62b64fd4a30f69880e13227497f5..9109fd6fcde9ca14825b64d506082fb68037eb54 100644
--- a/gso/api/v1/network.py
+++ b/gso/api/v1/network.py
@@ -6,17 +6,17 @@ from uuid import UUID
 from fastapi import APIRouter, Depends
 from orchestrator.domain import SubscriptionModel
 from orchestrator.schemas.base import OrchestratorBaseModel
+from orchestrator.security import authorize
 from orchestrator.services.subscriptions import build_extended_domain_model
 from starlette import status
 
-from gso.auth.security import opa_security_default
 from gso.products.product_blocks.iptrunk import IptrunkType, PhysicalPortCapacity
 from gso.products.product_blocks.router import RouterRole
 from gso.products.product_blocks.site import LatitudeCoordinate, LongitudeCoordinate
 from gso.services.subscriptions import get_active_iptrunk_subscriptions
 from gso.utils.shared_enums import Vendor
 
-router = APIRouter(prefix="/networks", tags=["Network"], dependencies=[Depends(opa_security_default)])
+router = APIRouter(prefix="/networks", tags=["Network"], dependencies=[Depends(authorize)])
 
 
 class SiteBlock(OrchestratorBaseModel):
diff --git a/gso/api/v1/processes.py b/gso/api/v1/processes.py
deleted file mode 100644
index 32eb104c5860c60e99d6ae34cdbf37edba075a34..0000000000000000000000000000000000000000
--- a/gso/api/v1/processes.py
+++ /dev/null
@@ -1,41 +0,0 @@
-"""Process related endpoints."""
-
-from typing import Any
-from uuid import UUID
-
-from fastapi import APIRouter, Depends, HTTPException, status
-from orchestrator.db import ProcessStepTable
-from orchestrator.schemas.base import OrchestratorBaseModel
-
-from gso.auth.security import opa_security_default
-
-router = APIRouter(prefix="/processes", tags=["Processes"], dependencies=[Depends(opa_security_default)])
-
-
-class CallBackResultsBaseModel(OrchestratorBaseModel):
-    """Base model for callback results."""
-
-    callback_results: dict
-
-
-@router.get(
-    "/steps/{step_id}/callback-results", status_code=status.HTTP_200_OK, response_model=CallBackResultsBaseModel
-)
-def callback_results(step_id: UUID) -> dict[str, Any]:
-    """Retrieve callback results for a specific process step.
-
-    :param step_id: The unique identifier of the process step.
-    :type step_id: UUID
-
-    :return: Dictionary containing callback results.
-    :rtype: dict[str, Any]
-
-    :raises HTTPException: 404 status code if the specified step_id is not found or if the 'callback_result' key
-    is not present in the state.
-    """
-    step = ProcessStepTable.query.filter(ProcessStepTable.step_id == step_id).first()
-
-    if not (step and step.state.get("callback_result", None)):
-        raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Callback result not found.")
-
-    return {"callback_results": step.state["callback_result"]}
diff --git a/gso/auth/oidc.py b/gso/auth/oidc.py
new file mode 100644
index 0000000000000000000000000000000000000000..b65de5428bddc6abf734ec9ebeadd47fc1b2e296
--- /dev/null
+++ b/gso/auth/oidc.py
@@ -0,0 +1,158 @@
+"""Module contains the OIDC Authentication class."""
+
+import re
+from collections.abc import Callable
+from functools import wraps
+from http import HTTPStatus
+from json import JSONDecodeError
+from typing import Any
+
+from fastapi.exceptions import HTTPException
+from fastapi.requests import Request
+from httpx import AsyncClient
+from oauth2_lib.fastapi import OIDCAuth, OIDCUserModel
+from oauth2_lib.settings import oauth2lib_settings
+from structlog import get_logger
+
+logger = get_logger(__name__)
+
+_CALLBACK_STEP_API_URL_PATTERN = re.compile(
+    r"^/api/processes/([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12})"
+    r"/callback/([0-9a-zA-Z\-_]+)$"
+)
+
+
+def _is_client_credentials_token(intercepted_token: dict) -> bool:
+    return "sub" not in intercepted_token
+
+
+def _is_callback_step_endpoint(request: Request) -> bool:
+    """Check if the request is a callback step API call."""
+    return re.match(_CALLBACK_STEP_API_URL_PATTERN, request.url.path) is not None
+
+
+def ensure_openid_config_loaded(func: Callable) -> Callable:
+    """Ensure that the openid_config is loaded before calling the function."""
+
+    @wraps(func)
+    async def wrapper(self: OIDCAuth, async_request: AsyncClient, *args: Any, **kwargs: Any) -> dict:
+        await self.check_openid_config(async_request)
+        return await func(self, async_request, *args, **kwargs)
+
+    return wrapper
+
+
+class OIDCAuthentication(OIDCAuth):
+    """OIDCUser class extends the :term:`HTTPBearer` class to do extra verification.
+
+    The class will act as follows:
+        1. Validate the Credentials at :term: `AAI` proxy by calling the UserInfo endpoint
+    """
+
+    @staticmethod
+    async def is_bypassable_request(request: Request) -> bool:
+        """Check if the request is a callback step API call."""
+        return _is_callback_step_endpoint(request=request)
+
+    @ensure_openid_config_loaded
+    async def userinfo(self, async_request: AsyncClient, token: str) -> OIDCUserModel:
+        """Get the userinfo from the openid server.
+
+        :param AsyncClient async_request: The async request
+        :param str token: the access_token
+        :return: OIDCUserModel: OIDC user model from openid server
+
+        """
+        assert self.openid_config is not None, "OpenID config is not loaded"  # noqa: S101
+
+        intercepted_token = await self.introspect_token(async_request, token)
+        client_id = intercepted_token.get("client_id")
+        if _is_client_credentials_token(intercepted_token):
+            return OIDCUserModel(client_id=client_id)
+
+        response = await async_request.post(
+            self.openid_config.userinfo_endpoint,
+            data={"token": token},
+            headers={"Authorization": f"Bearer {token}"},
+        )
+        try:
+            data = dict(response.json())
+        except JSONDecodeError as err:
+            logger.debug(
+                "Unable to parse userinfo response",
+                detail=response.text,
+                resource_server_id=self.resource_server_id,
+                openid_url=self.openid_url,
+            )
+            raise HTTPException(status_code=HTTPStatus.UNAUTHORIZED, detail=response.text) from err
+        logger.debug("Response from openid userinfo", response=data)
+
+        if response.status_code not in range(200, 300):
+            logger.debug(
+                "Userinfo cannot find an active token, user unauthorized",
+                detail=response.text,
+                resource_server_id=self.resource_server_id,
+                openid_url=self.openid_url,
+            )
+            raise HTTPException(status_code=HTTPStatus.UNAUTHORIZED, detail=response.text)
+
+        data["client_id"] = client_id
+
+        return OIDCUserModel(data)
+
+    @ensure_openid_config_loaded
+    async def introspect_token(self, async_request: AsyncClient, token: str) -> dict:
+        """Introspect the access token to see if it is a valid token.
+
+        :param async_request: The async request
+        :param token: the access_token
+        :return: dict from openid server
+        """
+        assert self.openid_config is not None, "OpenID config is not loaded"  # noqa: S101
+
+        endpoint = self.openid_config.introspect_endpoint or self.openid_config.introspection_endpoint or ""
+        response = await async_request.post(
+            endpoint,
+            data={"token": token, "client_id": self.resource_server_id},
+            headers={"Content-Type": "application/x-www-form-urlencoded"},
+        )
+
+        try:
+            data = dict(response.json())
+        except JSONDecodeError as err:
+            logger.debug(
+                "Unable to parse introspect response",
+                detail=response.text,
+                resource_server_id=self.resource_server_id,
+                openid_url=self.openid_url,
+            )
+            raise HTTPException(status_code=HTTPStatus.UNAUTHORIZED, detail=response.text) from err
+
+        logger.debug("Response from openid introspect", response=data)
+
+        if response.status_code not in range(200, 300):
+            logger.debug(
+                "Introspect cannot find an active token, user unauthorized",
+                detail=response.text,
+                resource_server_id=self.resource_server_id,
+                openid_url=self.openid_url,
+            )
+            raise HTTPException(status_code=HTTPStatus.UNAUTHORIZED, detail=response.text)
+
+        if "active" not in data:
+            logger.error("Token doesn't have the mandatory 'active' key, probably caused by a caching problem")
+            raise HTTPException(status_code=HTTPStatus.UNAUTHORIZED, detail="Missing active key")
+        if not data.get("active", False):
+            logger.info("User is not active", user_info=data)
+            raise HTTPException(status_code=HTTPStatus.UNAUTHORIZED, detail="User is not active")
+
+        return data
+
+
+oidc_instance = OIDCAuthentication(
+    openid_url=oauth2lib_settings.OIDC_BASE_URL,
+    openid_config_url=oauth2lib_settings.OIDC_CONF_URL,
+    resource_server_id=oauth2lib_settings.OAUTH2_RESOURCE_SERVER_ID,
+    resource_server_secret=oauth2lib_settings.OAUTH2_RESOURCE_SERVER_SECRET,
+    oidc_user_model_cls=OIDCUserModel,
+)
diff --git a/gso/auth/oidc_policy_helper.py b/gso/auth/oidc_policy_helper.py
deleted file mode 100644
index 51b6bf01a4d88e65aca6209557d6176c486228c9..0000000000000000000000000000000000000000
--- a/gso/auth/oidc_policy_helper.py
+++ /dev/null
@@ -1,447 +0,0 @@
-"""OpenID Connect and Open Policy Agent Integration for GSO Application.
-
-This module provides helper functions and classes for handling OpenID Connect (OIDC) and
-Open Policy Agent (OPA) related functionalities within the GSO application. It includes
-implementations for OIDC-based user authentication and user information modeling. Additionally,
-it facilitates making authorization decisions based on policies defined in OPA. Key components
-comprise OIDCUser, OIDCUserModel, OPAResult, and opa_decision. These elements integrate with
-FastAPI to ensure secure API development.
-"""
-
-import re
-import ssl
-from collections.abc import AsyncGenerator, Awaitable, Callable, Mapping
-from http import HTTPStatus
-from json import JSONDecodeError
-from typing import Any, ClassVar, cast
-
-from fastapi.exceptions import HTTPException
-from fastapi.param_functions import Depends
-from fastapi.requests import Request
-from fastapi.security.http import HTTPBearer
-from httpx import AsyncClient, NetworkError
-from pydantic import BaseModel
-from starlette.requests import ClientDisconnect
-from structlog import get_logger
-
-from gso.auth.settings import oauth2lib_settings
-
-logger = get_logger(__name__)
-
-HTTPX_SSL_CONTEXT = ssl.create_default_context()  # https://github.com/encode/httpx/issues/838
-
-_CALLBACK_STEP_API_URL_PATTERN = re.compile(
-    r"^/api/processes/([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12})"
-    r"/callback/([0-9a-zA-Z\-_]+)$"
-)
-
-
-def _is_callback_step_endpoint(request: Request) -> bool:
-    """Check if the request is a callback step API call."""
-    return re.match(_CALLBACK_STEP_API_URL_PATTERN, request.url.path) is not None
-
-
-class InvalidScopeValueError(ValueError):
-    """Exception raised for invalid scope values in OIDC."""
-
-
-class OIDCUserModel(dict):
-    """The standard claims of a OIDCUserModel object. Defined per `Section 5.1`_ and AAI attributes.
-
-    .. _`Section 5.1`: http://openid.net/specs/openid-connect-core-1_0.html#StandardClaims
-    """
-
-    #: registered claims that OIDCUserModel supports
-    REGISTERED_CLAIMS: ClassVar[list[str]] = [
-        "sub",
-        "name",
-        "given_name",
-        "family_name",
-        "middle_name",
-        "nickname",
-        "preferred_username",
-        "profile",
-        "picture",
-        "website",
-        "email",
-        "email_verified",
-        "gender",
-        "birthdate",
-        "zoneinfo",
-        "locale",
-        "phone_number",
-        "phone_number_verified",
-        "address",
-        "updated_at",
-    ]
-
-    def __getattr__(self, key: str) -> Any:
-        """Get an attribute value using key.
-
-        Overrides the default behavior to return the value from the dictionary
-        if the attribute is one of the registered claims or raises an AttributeError
-        if the key is not found.
-
-        :param str key: The attribute name to retrieve.
-        :return: The value of the attribute if it exists, otherwise raises AttributeError.
-        """
-        try:
-            return object.__getattribute__(self, key)
-        except AttributeError as error:
-            if key in self.REGISTERED_CLAIMS:
-                return self.get(key)
-            raise error from None
-
-    @property
-    def client_id(self) -> str:
-        """Return the client id."""
-        return self.get("client_id") or ""
-
-    @property
-    def user_name(self) -> str:
-        """Return the username of the user."""
-        if "user_name" in self.keys():
-            return cast(str, self["user_name"])
-        if "unspecified_id" in self.keys():
-            return cast(str, self["unspecified_id"])
-        return ""
-
-    @property
-    def display_name(self) -> str:
-        """Return the display name of the user."""
-        return self.get("display_name", "")
-
-    @property
-    def principal_name(self) -> str:
-        """Return the principal name of the user."""
-        return self.get("eduperson_principal_name", "")
-
-    @property
-    def scopes(self) -> set[str]:
-        """Return the scopes of the user."""
-        scope_value = self.get("scope")
-        if scope_value is None:
-            return set()
-
-        if isinstance(scope_value, list):
-            return {item for item in scope_value if isinstance(item, str)}
-        if isinstance(scope_value, str):
-            return set(filter(None, re.split("[ ,]", scope_value)))
-
-        message = f"Invalid scope value: {scope_value}"
-        raise InvalidScopeValueError(message)
-
-
-async def _make_async_client() -> AsyncGenerator[AsyncClient, None]:
-    async with AsyncClient(http1=True, verify=HTTPX_SSL_CONTEXT) as client:
-        yield client
-
-
-class OIDCConfig(BaseModel):
-    """Configuration for OpenID Connect (OIDC) authentication and token validation."""
-
-    issuer: str
-    authorization_endpoint: str
-    token_endpoint: str
-    userinfo_endpoint: str
-    introspect_endpoint: str | None = None
-    introspection_endpoint: str | None = None
-    jwks_uri: str
-    response_types_supported: list[str]
-    response_modes_supported: list[str]
-    grant_types_supported: list[str]
-    subject_types_supported: list[str]
-    id_token_signing_alg_values_supported: list[str]
-    scopes_supported: list[str]
-    token_endpoint_auth_methods_supported: list[str]
-    claims_supported: list[str]
-    claims_parameter_supported: bool
-    request_parameter_supported: bool
-    code_challenge_methods_supported: list[str]
-
-
-class OPAResult(BaseModel):
-    """Represents the outcome of an authorization decision made by the Open Policy Agent (OPA).
-
-    Attributes
-    ----------
-    - result (bool): Indicates whether the access request is allowed or denied.
-    - decision_id (str): A unique identifier for the decision made by OPA.
-
-    """
-
-    result: bool = False
-    decision_id: str
-
-
-class OIDCUser(HTTPBearer):
-    """OIDCUser class extends the :term:`HTTPBearer` class to do extra verification.
-
-    The class will act as follows:
-        1. Validate the Credentials at :term: `AAI` proxy by calling the UserInfo endpoint
-    """
-
-    openid_config: OIDCConfig | None = None
-    openid_url: str
-    resource_server_id: str
-    resource_server_secret: str
-
-    def __init__(
-        self,
-        openid_url: str,
-        resource_server_id: str,
-        resource_server_secret: str,
-        *,
-        auto_error: bool = True,
-        scheme_name: str | None = None,
-    ):
-        """Set up OIDCUser with specified OpenID Connect configurations and credentials."""
-        super().__init__(auto_error=auto_error)
-        self.openid_url = openid_url
-        self.resource_server_id = resource_server_id
-        self.resource_server_secret = resource_server_secret
-        self.scheme_name = scheme_name or self.__class__.__name__
-
-    async def __call__(  # type: ignore[override]
-        self, request: Request, token: str | None = None
-    ) -> OIDCUserModel | None:
-        """Return the OIDC user from OIDC introspect endpoint.
-
-        This is used as a security module in Fastapi projects
-
-
-        :param Request request: Starlette request method.
-        :param str token: Optional value to directly pass a token.
-        :return: OIDCUserModel object.
-        """
-        if not oauth2lib_settings.OAUTH2_ACTIVE:
-            return None
-
-        async with AsyncClient(http1=True, verify=HTTPX_SSL_CONTEXT) as async_request:
-            if not token:
-                credentials = await super().__call__(request)
-                if not credentials:
-                    return None
-                token = credentials.credentials
-            elif _is_callback_step_endpoint(request):
-                logger.debug(
-                    "callback step endpoint is called. verification will be done by endpoint itself.", url=request.url
-                )
-                return None
-
-            await self.check_openid_config(async_request)
-            intercepted_token = await self.introspect_token(async_request, token)
-
-            if "active" not in intercepted_token:
-                logger.error("Token doesn't have the mandatory 'active' key, probably caused by a caching problem")
-                raise HTTPException(status_code=HTTPStatus.UNAUTHORIZED, detail="Missing active key")
-            if not intercepted_token.get("active", False):
-                logger.info("User is not active", url=request.url, user_info=intercepted_token)
-                raise HTTPException(status_code=HTTPStatus.UNAUTHORIZED, detail="User is not active")
-
-            client_id = intercepted_token.get("client_id")
-            if "sub" not in intercepted_token:
-                return OIDCUserModel(client_id=client_id)
-
-            user_info = await self.userinfo(async_request, token)
-            user_info["client_id"] = client_id
-            logger.debug("OIDCUserModel object.", intercepted_token=intercepted_token)
-
-            return user_info
-
-    async def check_openid_config(self, async_request: AsyncClient) -> None:
-        """Check of openid config is loaded and load if not."""
-        if self.openid_config is not None:
-            return
-
-        response = await async_request.get(self.openid_url + "/.well-known/openid-configuration")
-        self.openid_config = OIDCConfig.model_validate(response.json())
-
-    async def userinfo(self, async_request: AsyncClient, token: str) -> OIDCUserModel:
-        """Get the userinfo from the openid server.
-
-        :param AsyncClient async_request: The async request
-        :param str token: the access_token
-        :return: OIDCUserModel: OIDC user model from openid server
-
-        """
-        await self.check_openid_config(async_request)
-        assert self.openid_config, "OpenID config should be loaded"  # noqa: S101
-
-        response = await async_request.post(
-            self.openid_config.userinfo_endpoint,
-            data={"token": token},
-            headers={"Authorization": f"Bearer {token}"},
-        )
-        try:
-            data = dict(response.json())
-        except JSONDecodeError as err:
-            logger.debug(
-                "Unable to parse userinfo response",
-                detail=response.text,
-                resource_server_id=self.resource_server_id,
-                openid_url=self.openid_url,
-            )
-            raise HTTPException(status_code=HTTPStatus.UNAUTHORIZED, detail=response.text) from err
-        logger.debug("Response from openid userinfo", response=data)
-
-        if response.status_code not in range(200, 300):
-            logger.debug(
-                "Userinfo cannot find an active token, user unauthorized",
-                detail=response.text,
-                resource_server_id=self.resource_server_id,
-                openid_url=self.openid_url,
-            )
-            raise HTTPException(status_code=HTTPStatus.UNAUTHORIZED, detail=response.text)
-
-        return OIDCUserModel(data)
-
-    async def introspect_token(self, async_request: AsyncClient, token: str) -> dict:
-        """Introspect the access token to see if it is a valid token.
-
-        :param async_request: The async request
-        :param token: the access_token
-        :return: dict from openid server
-        """
-        await self.check_openid_config(async_request)
-        assert self.openid_config, "OpenID config should be loaded"  # noqa: S101
-
-        endpoint = self.openid_config.introspect_endpoint or self.openid_config.introspection_endpoint or ""
-        response = await async_request.post(
-            endpoint,
-            data={"token": token, "client_id": self.resource_server_id},
-            headers={"Content-Type": "application/x-www-form-urlencoded"},
-        )
-
-        try:
-            data = dict(response.json())
-        except JSONDecodeError as err:
-            logger.debug(
-                "Unable to parse introspect response",
-                detail=response.text,
-                resource_server_id=self.resource_server_id,
-                openid_url=self.openid_url,
-            )
-            raise HTTPException(status_code=HTTPStatus.UNAUTHORIZED, detail=response.text) from err
-
-        logger.debug("Response from openid introspect", response=data)
-
-        if response.status_code not in range(200, 300):
-            logger.debug(
-                "Introspect cannot find an active token, user unauthorized",
-                detail=response.text,
-                resource_server_id=self.resource_server_id,
-                openid_url=self.openid_url,
-            )
-            raise HTTPException(status_code=HTTPStatus.UNAUTHORIZED, detail=response.text)
-
-        return data
-
-
-async def _get_decision(async_request: AsyncClient, opa_url: str, opa_input: dict) -> OPAResult:
-    logger.debug("Posting input json to Policy agent", opa_url=opa_url, input=opa_input)
-    try:
-        response = await async_request.post(opa_url, json=opa_input)
-    except (NetworkError, TypeError) as exc:
-        logger.debug("Could not get decision from policy agent", error=str(exc))
-        raise HTTPException(status_code=HTTPStatus.SERVICE_UNAVAILABLE, detail="Policy agent is unavailable") from exc
-
-    result = response.json()
-    logger.debug("Received response from Policy agent", response=result)
-    return OPAResult(result=result["result"]["allow"], decision_id=result["decision_id"])
-
-
-def _evaluate_decision(decision: OPAResult, *, auto_error: bool, **context: dict[str, Any]) -> bool:
-    did = decision.decision_id
-
-    if decision.result:
-        logger.debug("User is authorized to access the resource", decision_id=did, **context)
-        return True
-
-    logger.debug("User is not allowed to access the resource", decision_id=did, **context)
-    if not auto_error:
-        return False
-
-    raise HTTPException(
-        status_code=HTTPStatus.FORBIDDEN,
-        detail=f"User is not allowed to access resource: {context.get("resource")} Decision was taken with id: {did}",
-    )
-
-
-def opa_decision(
-    opa_url: str,
-    oidc_security: OIDCUser,
-    *,
-    auto_error: bool = True,
-    opa_kwargs: Mapping[str, str] | None = None,
-) -> Callable[[Request, OIDCUserModel, AsyncClient], Awaitable[bool | None]]:
-    """Create a decision function for Open Policy Agent (OPA) authorization checks.
-
-    This function generates an asynchronous decision function that can be used in FastAPI endpoints
-    to authorize requests based on OPA policies. It utilizes OIDC for user information and makes a
-    call to the OPA service to determine authorization.
-
-    :param str opa_url: URL of the Open Policy Agent service.
-    :param OIDCUser oidc_security: An instance of OIDCUser for user authentication.
-    :param bool auto_error: If True, automatically raises an HTTPException on authorization failure.
-    :param Mapping[str, str] | None opa_kwargs: Additional keyword arguments to be passed to the OPA input.
-
-    :return: An asynchronous decision function that can be used as a dependency in FastAPI endpoints.
-    """
-
-    async def _opa_decision(
-        request: Request,
-        user_info: OIDCUserModel = Depends(oidc_security),  # noqa: B008
-        async_request: AsyncClient = Depends(_make_async_client),  # noqa: B008
-    ) -> bool | None:
-        """Check OIDCUserModel against the OPA policy.
-
-        This is used as a security module in Fastapi projects
-        This method will make an async call towards the Policy agent.
-
-        Args:
-        ----
-            request: Request object that will be used to retrieve request metadata.
-            user_info: The OIDCUserModel object that will be checked
-            async_request: The :term:`httpx` client.
-
-        """
-        if not (oauth2lib_settings.OAUTH2_ACTIVE and oauth2lib_settings.OAUTH2_AUTHORIZATION_ACTIVE):
-            return None
-
-        if _is_callback_step_endpoint(request):
-            return None
-
-        try:
-            json = await request.json()
-        # Silencing the Decode error or Type error when request.json() does not return anything sane.
-        # Some requests do not have a json response therefore as this code gets called on every request
-        # we need to suppress the `None` case (TypeError) or the `other than json` case (JSONDecodeError)
-        # Suppress AttributeError in case of websocket request, it doesn't have .json
-        except (JSONDecodeError, TypeError, ClientDisconnect, AttributeError):
-            json = {}
-
-        # defaulting to GET request method for WebSocket request, it doesn't have .method
-        request_method = request.method if hasattr(request, "method") else "GET"
-        opa_input = {
-            "input": {
-                **(opa_kwargs or {}),
-                **user_info,
-                "resource": request.url.path,
-                "method": request_method.upper(),
-                "arguments": {"path": request.path_params, "query": {**request.query_params}, "json": json},
-            }
-        }
-
-        decision = await _get_decision(async_request, opa_url, opa_input)
-
-        context = {
-            "resource": opa_input["input"]["resource"],
-            "method": opa_input["input"]["method"],
-            "user_info": user_info,
-            "input": opa_input,
-            "url": request.url,
-        }
-        return _evaluate_decision(decision, auto_error=auto_error, **context)
-
-    return _opa_decision
diff --git a/gso/auth/opa.py b/gso/auth/opa.py
new file mode 100644
index 0000000000000000000000000000000000000000..9d801a8b958b7283f23aca3ac12a530451e85d62
--- /dev/null
+++ b/gso/auth/opa.py
@@ -0,0 +1,49 @@
+"""Module contains the OPA authorization class that is used to get decisions from the OPA server."""
+
+from http import HTTPStatus
+
+from fastapi.exceptions import HTTPException
+from httpx import AsyncClient, NetworkError
+from oauth2_lib.fastapi import GraphQLOPAAuthorization, OPAAuthorization, OPAResult
+from oauth2_lib.settings import oauth2lib_settings
+from structlog import get_logger
+
+logger = get_logger(__name__)
+
+
+async def _get_decision(opa_url: str, async_request: AsyncClient, opa_input: dict) -> OPAResult:
+    logger.debug("Posting input json to Policy agent", opa_url=opa_url, input=opa_input)
+    try:
+        result = await async_request.post(opa_url, json=opa_input)
+    except (NetworkError, TypeError) as exc:
+        logger.debug("Could not get decision from policy agent", error=str(exc))
+        raise HTTPException(status_code=HTTPStatus.SERVICE_UNAVAILABLE, detail="Policy agent is unavailable") from exc
+
+    json_result = result.json()
+    logger.debug("Received decision from policy agent", decision=json_result)
+
+    return OPAResult(decision_id=json_result["decision_id"], result=json_result["result"]["allow"])
+
+
+class OPAAuthZ(OPAAuthorization):
+    """Applies OPA decisions to HTTP requests for authorization."""
+
+    async def get_decision(self, async_request: AsyncClient, opa_input: dict) -> OPAResult:
+        """Get the decision from the OPA server."""
+        return await _get_decision(self.opa_url, async_request, opa_input)
+
+
+class GraphQLOPAAuthZ(GraphQLOPAAuthorization):
+    """Specializes OPA authorization for GraphQL operations."""
+
+    async def get_decision(self, async_request: AsyncClient, opa_input: dict) -> OPAResult:
+        """Get the decision from the OPA server."""
+        return await _get_decision(self.opa_url, async_request, opa_input)
+
+
+opa_instance = OPAAuthZ(
+    opa_url=oauth2lib_settings.OPA_URL,
+)
+graphql_opa_instance = GraphQLOPAAuthZ(
+    opa_url=oauth2lib_settings.OPA_URL,
+)
diff --git a/gso/auth/security.py b/gso/auth/security.py
deleted file mode 100644
index e1d5376479e9e95f50847af06fa7120272135a4a..0000000000000000000000000000000000000000
--- a/gso/auth/security.py
+++ /dev/null
@@ -1,39 +0,0 @@
-"""Module for initializing OAuth client credentials and OIDC user."""
-
-from authlib.integrations.starlette_client import OAuth
-from nwastdlib.url import URL
-
-from gso.auth.oidc_policy_helper import HTTPX_SSL_CONTEXT, OIDCUser, opa_decision
-from gso.auth.settings import oauth2_settings
-
-oauth_client_credentials = OAuth()
-
-well_known_endpoint = URL(oauth2_settings.OIDC_CONF_WELL_KNOWN_URL)
-
-oauth_client_credentials.register(
-    "connext",
-    server_metadata_url=well_known_endpoint / ".well-known" / "openid-configuration",
-    client_id=oauth2_settings.OAUTH2_RESOURCE_SERVER_ID,
-    client_secret=oauth2_settings.OAUTH2_RESOURCE_SERVER_SECRET,
-    request_token_params={"grant_type": "client_credentials"},
-    client_kwargs={"verify": HTTPX_SSL_CONTEXT},
-)
-
-oidc_user = OIDCUser(
-    oauth2_settings.OIDC_CONF_WELL_KNOWN_URL,
-    oauth2_settings.OAUTH2_RESOURCE_SERVER_ID,
-    oauth2_settings.OAUTH2_RESOURCE_SERVER_SECRET,
-)
-
-opa_security_default = opa_decision(oauth2_settings.OPA_URL, oidc_user)
-
-
-def get_oidc_user() -> OIDCUser:
-    """Retrieve the global OIDCUser instance.
-
-    This function returns the instance of OIDCUser initialized in the module.
-    It is typically used for accessing the OIDCUser across different parts of the application.
-
-    :return OIDCUser: The instance of OIDCUser configured with OAuth2 settings.
-    """
-    return oidc_user
diff --git a/gso/auth/settings.py b/gso/auth/settings.py
deleted file mode 100644
index b3ab1a6a569e2e594e181c23c231366e212f4905..0000000000000000000000000000000000000000
--- a/gso/auth/settings.py
+++ /dev/null
@@ -1,39 +0,0 @@
-"""Security configurations and utilities for the GSO application. Handles OAuth2 and OpenID Connect.
-
-authentication and authorization, including token validation and user authentication. Integrates
-with external authentication providers for enhanced security management.
-
-Todo: Remove token and sensitive data from OPA console and API.
-"""
-
-from pydantic import Field
-from pydantic_settings import BaseSettings
-
-
-class Oauth2LibSettings(BaseSettings):
-    """Common settings for applications depending on oauth2."""
-
-    ENVIRONMENT: str = "local"
-    SERVICE_NAME: str = ""
-    MUTATIONS_ENABLED: bool = False
-    ENVIRONMENT_IGNORE_MUTATION_DISABLED: list[str] = Field(
-        default_factory=list, description="Environments for which to allow unauthenticated mutations"
-    )
-    OAUTH2_ACTIVE: bool = True
-    OAUTH2_AUTHORIZATION_ACTIVE: bool = True
-
-
-oauth2lib_settings = Oauth2LibSettings()
-
-
-class Oauth2Settings(BaseSettings):
-    """Configuration settings for OAuth2 and OpenID Connect (OIDC)."""
-
-    OAUTH2_RESOURCE_SERVER_ID: str = ""
-    OAUTH2_RESOURCE_SERVER_SECRET: str = ""
-    OAUTH2_TOKEN_URL: str = ""
-    OIDC_CONF_WELL_KNOWN_URL: str = ""
-    OPA_URL: str = "http://localhost:8181/v1/data/gap/gso/api/access"
-
-
-oauth2_settings = Oauth2Settings()
diff --git a/gso/cli/imports.py b/gso/cli/imports.py
index 3a6dcf1c814aecb96a2ecc1a406a78fd9fda9f9a..406fa85d1dbc0e11d58187909a63dc5f6285285c 100644
--- a/gso/cli/imports.py
+++ b/gso/cli/imports.py
@@ -13,14 +13,19 @@ import yaml
 from orchestrator.db import db
 from orchestrator.services.processes import start_process
 from orchestrator.types import SubscriptionLifecycle
-from pydantic import BaseModel, ValidationError, field_validator, model_validator
+from pydantic import BaseModel, EmailStr, ValidationError, field_validator, model_validator
 from sqlalchemy.exc import SQLAlchemyError
 
 from gso.db.models import PartnerTable
 from gso.products import ProductType
 from gso.products.product_blocks.iptrunk import IptrunkType, PhysicalPortCapacity
 from gso.products.product_blocks.router import RouterRole
-from gso.services.partners import PartnerNotFoundError, get_partner_by_name
+from gso.services.partners import (
+    PartnerNotFoundError,
+    filter_partners_by_email,
+    filter_partners_by_name,
+    get_partner_by_name,
+)
 from gso.services.subscriptions import (
     get_active_router_subscriptions,
     get_active_subscriptions_by_field_and_value,
@@ -32,6 +37,32 @@ from gso.utils.shared_enums import IPv4AddressType, IPv6AddressType, PortNumber,
 app: typer.Typer = typer.Typer()
 
 
+class CreatePartner(BaseModel):
+    """Required inputs for creating a partner."""
+
+    name: str
+    email: EmailStr
+
+    @field_validator("name")
+    def validate_name(cls, name: str) -> str:
+        """Validate name."""
+        if filter_partners_by_name(name=name, case_sensitive=False):
+            msg = "Partner with this name already exists."
+            raise ValueError(msg)
+
+        return name
+
+    @field_validator("email")
+    def validate_email(cls, email: str) -> EmailStr:
+        """Validate email."""
+        email = email.lower()
+        if filter_partners_by_email(email=email, case_sensitive=False):
+            msg = "Partner with this email already exists."
+            raise ValueError(msg)
+
+        return email
+
+
 class SiteImportModel(BaseSiteValidatorModel):
     """The required input for importing an existing :class:`gso.products.product_types.site`."""
 
@@ -375,7 +406,7 @@ def import_partners(file_path: str = typer.Argument(..., help="Path to the CSV f
             if partner.get("created_at"):
                 partner["created_at"] = datetime.strptime(partner["created_at"], "%Y-%m-%d").replace(tzinfo=UTC)
 
-            new_partner = PartnerTable(**partner)
+            new_partner = PartnerTable(**CreatePartner(**partner).model_dump())
             db.session.add(new_partner)
 
         db.session.commit()
diff --git a/gso/db/models.py b/gso/db/models.py
index 02d8c59ce72e6e1ecb47bab5c513853a9f59551b..c6382b1c81d06f9192ed4f416e186a2d990a5a45 100644
--- a/gso/db/models.py
+++ b/gso/db/models.py
@@ -1,50 +1,25 @@
 """Database model definitions and table mappings for the GSO system."""
 
-import enum
-
 import structlog
 from orchestrator.db import UtcTimestamp
 from orchestrator.db.database import BaseModel
 from sqlalchemy import (
-    Enum,
     String,
     text,
 )
-from sqlalchemy.dialects.postgresql import ARRAY
 from sqlalchemy.orm import mapped_column
 
 logger = structlog.get_logger(__name__)
 
 
-class PartnerType(str, enum.Enum):
-    """Defining different types of partners in the GSO system."""
-
-    NREN = "NREN"
-    RE_PEER = "RE_PEER"
-    PUBLIC_PEER = "PUBLIC_PEER"
-    PRIVATE_PEER = "PRIVATE_PEER"
-    UPSTREAM = "UPSTREAM"
-    GEANT = "GEANT"
-
-
 class PartnerTable(BaseModel):
     """Database table for the partners in the GSO system."""
 
     __tablename__ = "partners"
 
     partner_id = mapped_column(String, server_default=text("uuid_generate_v4"), primary_key=True)
-    name = mapped_column(String, unique=True, nullable=True)
+    name = mapped_column(String, unique=True, nullable=False)
     email = mapped_column(String, unique=True, nullable=False)
-    partner_type = mapped_column(Enum(PartnerType), nullable=False)
-
-    as_number = mapped_column(
-        String, unique=True, nullable=True
-    )  # the as_number and as_set are mutually exclusive. if you give me one I don't need the other
-    as_set = mapped_column(String, nullable=True)
-    route_set = mapped_column(String, nullable=True)
-    black_listed_as_sets = mapped_column(ARRAY(String), nullable=True)
-    additional_routers = mapped_column(ARRAY(String), nullable=True)
-    additional_bgp_speakers = mapped_column(ARRAY(String), nullable=True)
 
     created_at = mapped_column(UtcTimestamp, server_default=text("current_timestamp"), nullable=False)
     updated_at = mapped_column(
diff --git a/gso/migrations/versions/2024-07-29_3111c27972af_add_email_notification_task.py b/gso/migrations/versions/2024-07-29_3111c27972af_add_email_notification_task.py
new file mode 100644
index 0000000000000000000000000000000000000000..4b4b806139e74e54cf14d2f0e1e80b93d53161e4
--- /dev/null
+++ b/gso/migrations/versions/2024-07-29_3111c27972af_add_email_notification_task.py
@@ -0,0 +1,44 @@
+"""Add email notification task.
+
+Revision ID: 3111c27972af
+Revises: 31fd1ae8d5bb
+Create Date: 2024-07-29 17:38:37.786347
+
+"""
+
+from uuid import uuid4
+
+import sqlalchemy as sa
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision = '3111c27972af'
+down_revision = '31fd1ae8d5bb'
+branch_labels = None
+depends_on = None
+
+workflows = [
+    {
+        "name": "task_send_email_notifications",
+        "target": "SYSTEM",
+        "description": "Send email notifications for all failed tasks",
+        "workflow_id": uuid4(),
+    }
+]
+
+
+def upgrade() -> None:
+    conn = op.get_bind()
+    for workflow in workflows:
+        conn.execute(
+            sa.text(
+                "INSERT INTO workflows VALUES (:workflow_id, :name, :target, :description, now()) ON CONFLICT DO NOTHING"
+            ),
+            workflow,
+        )
+
+
+def downgrade() -> None:
+    conn = op.get_bind()
+    for workflow in workflows:
+        conn.execute(sa.text("DELETE FROM workflows WHERE name = :name"), {"name": workflow["name"]})
diff --git a/gso/migrations/versions/2024-07-31_41fd1ae225aq_create_modify_delete_partner_task.py b/gso/migrations/versions/2024-07-31_41fd1ae225aq_create_modify_delete_partner_task.py
new file mode 100644
index 0000000000000000000000000000000000000000..1fa4e4eed5191185f72bb0e0f7130e39034ee956
--- /dev/null
+++ b/gso/migrations/versions/2024-07-31_41fd1ae225aq_create_modify_delete_partner_task.py
@@ -0,0 +1,73 @@
+"""Add task_create_partners, task_modify_partners and task_delete_partners.
+
+Revision ID: 41fd1ae225aq
+Revises: 3111c27972af
+Create Date: 2024-07-29 17:11:00.00000
+
+"""
+
+from uuid import uuid4
+
+import sqlalchemy as sa
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision = "41fd1ae225aq"
+down_revision = "3111c27972af"
+branch_labels = None
+depends_on = None
+
+workflows = [
+    {"name": "task_create_partners", "description": "Create partners", "workflow_id": uuid4(), "target": "SYSTEM"},
+    {"name": "task_modify_partners", "description": "Modify partners", "workflow_id": uuid4(), "target": "SYSTEM"},
+    {"name": "task_delete_partners", "description": "Delete partners", "workflow_id": uuid4(), "target": "SYSTEM"},
+]
+
+
+def upgrade() -> None:
+    conn = op.get_bind()
+    for workflow in workflows:
+        conn.execute(
+            sa.text(
+                "INSERT INTO workflows VALUES (:workflow_id, :name, :target, :description, now()) ON CONFLICT DO NOTHING"
+            ),
+            workflow,
+        )
+
+    op.alter_column('partners', 'email',
+                    existing_type=sa.String(),
+                    nullable=False)
+    op.drop_column(
+        'partners',
+        'partner_type'
+    )
+    op.drop_column(
+        'partners',
+        'as_number'
+    )
+    op.drop_column(
+        'partners',
+        'as_set'
+    )
+    op.drop_column(
+        'partners',
+        'route_set'
+    )
+    op.drop_column(
+        'partners',
+        'black_listed_as_sets'
+    )
+    op.drop_column(
+        'partners',
+        'additional_routers'
+    )
+    op.drop_column(
+        'partners',
+        'additional_bgp_speakers'
+    )
+
+
+def downgrade() -> None:
+    conn = op.get_bind()
+    for workflow in workflows:
+        conn.execute(sa.text("DELETE FROM workflows WHERE name = :name"), {"name": workflow["name"]})
diff --git a/gso/monkeypatches.py b/gso/monkeypatches.py
deleted file mode 100644
index 1b71f634ac0677b741b9670c756b79bc6a929f4e..0000000000000000000000000000000000000000
--- a/gso/monkeypatches.py
+++ /dev/null
@@ -1,18 +0,0 @@
-"""Override certain classes and settings in the oauth2_lib.fastapi package with custom implementations.
-
-This adjustment is typically done to extend or modify the functionality of the original
-oauth2_lib package to meet specific requirements of the gso application.
-"""
-
-import oauth2_lib.fastapi
-import oauth2_lib.settings
-
-from gso.auth.oidc_policy_helper import HTTPX_SSL_CONTEXT, OIDCUser, OIDCUserModel, _get_decision, opa_decision
-from gso.auth.settings import oauth2lib_settings
-
-oauth2_lib.fastapi.OIDCUser = OIDCUser  # type: ignore[assignment, misc]
-oauth2_lib.fastapi.OIDCUserModel = OIDCUserModel  # type: ignore[assignment, misc]
-oauth2_lib.fastapi.opa_decision = opa_decision  # type: ignore[assignment]
-oauth2_lib.fastapi._get_decision = _get_decision  # type: ignore[assignment] # noqa: SLF001
-oauth2_lib.fastapi.HTTPX_SSL_CONTEXT = HTTPX_SSL_CONTEXT
-oauth2_lib.settings.oauth2lib_settings = oauth2lib_settings  # type: ignore[assignment]
diff --git a/gso/oss-params-example.json b/gso/oss-params-example.json
index 2a40269346ad446e4b8fbac6a4f9ed4dafc8a3ea..f9523a724d2b433b761ef4c5f33544077e6d850b 100644
--- a/gso/oss-params-example.json
+++ b/gso/oss-params-example.json
@@ -85,12 +85,13 @@
     "Application_2": "another_REALY_random_AND_3cure_T0keN"
   },
   "EMAIL": {
-    "from_address": "noreply@nren.local",
+    "from_address": "noreply@example.com",
     "smtp_host": "smtp.nren.local",
     "smtp_port": 487,
     "starttls_enabled": true,
     "smtp_username": "username",
-    "smtp_password": "password"
+    "smtp_password": "password",
+    "notification_email_destinations": "oc@nren.local, neteng@nren.local, ceo@nren.local"
   },
   "SHAREPOINT": {
     "client_id": "UUID",
@@ -102,5 +103,23 @@
       "p_router": "UUID"
     },
     "scopes": ["https://graph.microsoft.com/.default"]
+  },
+  "KENTIK": {
+    "api_base": "https://api.kentik.com/api/",
+    "user_email": "robot-user@geant.org",
+    "api_key": "kentik_api_key",
+    "device_type": "router",
+    "minimize_snmp": false,
+    "placeholder_license_key": "placeholder license",
+    "sample_rate": 100,
+    "bgp_type": "device",
+    "bgp_lookup_strategy": "lu_global_fallback",
+    "ASN": 137,
+    "snmp_community": "secret community string",
+    "md5_password": "snmp password"
+  },
+  "SENTRY": {
+    "DSN": "https://sentry-dsn-url",
+    "environment": "development"
   }
 }
diff --git a/gso/schedules/send_email_notifications.py b/gso/schedules/send_email_notifications.py
new file mode 100644
index 0000000000000000000000000000000000000000..61e5a6765ca343002a514d83aef86caef6948bff
--- /dev/null
+++ b/gso/schedules/send_email_notifications.py
@@ -0,0 +1,13 @@
+"""Task that sends out email notifications for failed tasks."""
+
+from orchestrator.services.processes import start_process
+
+from gso.schedules.scheduling import CronScheduleConfig, scheduler
+from gso.worker import celery
+
+
+@celery.task
+@scheduler(CronScheduleConfig(name="Send email notifications", hour="2", minute="30"))
+def send_email_notifications() -> None:
+    """Run this task every night at 2:30 AM."""
+    start_process("task_send_email_notifications")
diff --git a/gso/schema/__init__.py b/gso/schema/__init__.py
deleted file mode 100644
index 20b21e2c5736e9d2890482561fd61bc06a84e3c9..0000000000000000000000000000000000000000
--- a/gso/schema/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-"""It is used to group the schema files together as a package."""
diff --git a/gso/schema/partner.py b/gso/schema/partner.py
deleted file mode 100644
index b1c58c2cf91bf544501f6b2e316117b8b83a70c9..0000000000000000000000000000000000000000
--- a/gso/schema/partner.py
+++ /dev/null
@@ -1,26 +0,0 @@
-"""Partner schema module."""
-
-from datetime import datetime
-from uuid import uuid4
-
-from pydantic import BaseModel, ConfigDict, EmailStr, Field
-
-from gso.db.models import PartnerType
-
-
-class PartnerCreate(BaseModel):
-    """Partner create schema."""
-
-    partner_id: str = Field(default_factory=lambda: str(uuid4()))
-    name: str
-    email: EmailStr | None = None
-    as_number: str | None = None
-    as_set: str | None = None
-    route_set: str | None = None
-    black_listed_as_sets: list[str] | None = None
-    additional_routers: list[str] | None = None
-    additional_bgp_speakers: list[str] | None = None
-    partner_type: PartnerType
-    created_at: datetime = Field(default_factory=lambda: datetime.now().astimezone())
-    updated_at: datetime = Field(default_factory=lambda: datetime.now().astimezone())
-    model_config = ConfigDict(from_attributes=True)
diff --git a/gso/services/kentik_client.py b/gso/services/kentik_client.py
new file mode 100644
index 0000000000000000000000000000000000000000..7f62d169186b28848ea696b791834bd0192fddee
--- /dev/null
+++ b/gso/services/kentik_client.py
@@ -0,0 +1,162 @@
+"""The Kentik service is used for external interactions with Kentik."""
+
+import logging
+from typing import Any, Literal
+
+import requests
+from pydantic import BaseModel
+from requests import Response
+
+from gso.products.product_blocks.site import SiteTier
+from gso.settings import load_oss_params
+
+logger = logging.getLogger(__name__)
+
+
+class NewKentikDevice(BaseModel):
+    """API model for creating a new device in Kentik."""
+
+    device_name: str
+    device_description: str
+    sending_ips: list[str]
+    site_id: int
+    site_tier: SiteTier
+    device_snmp_ip: str
+    device_bgp_flowspec: bool
+    device_bgp_neighbor_ip: str
+    device_bgp_neighbor_ip6: str
+
+
+class KentikClient:
+    """The client for Kentik that interacts with an external instance."""
+
+    def __init__(self) -> None:
+        """Instantiate a new Kentik Client."""
+        self.config = load_oss_params().KENTIK
+        self.session = requests.Session()
+        self.session.headers.update({
+            "X-CH-Auth-Email": self.config.user_email,
+            "X-CH-Auth-API-Token": self.config.api_key,
+            "Content-Type": "application/json",
+        })
+
+    def _send_request(
+        self, method: Literal["GET", "POST", "PUT", "DELETE"], endpoint: str, data: dict[str, Any] | None = None
+    ) -> Response:
+        url = self.config.api_base + endpoint
+        logger.debug("Kentik - Sending request", extra={"method": method, "endpoint": url, "form_data": data})
+        result = self.session.request(method, url, json=data)
+        logger.debug("Kentik - Received response", extra=result.__dict__)
+
+        return result
+
+    def get_devices(self) -> list[dict[str, Any]]:
+        """List all devices in Kentik."""
+        return [self._send_request("GET", "v5/devices").json()]
+
+    def get_device(self, device_id: str) -> dict[str, Any]:
+        """Get a device by ID."""
+        return self._send_request("GET", f"v5/device/{device_id}").json()
+
+    def get_device_by_name(self, device_name: str) -> dict[str, Any]:
+        """Fetch a device in Kentik by its :term:`FQDN`.
+
+        If the device is not found, returns an empty dict.
+
+        :param str device_name: The :term:`FQDN` of the sought device.
+        """
+        devices = self.get_devices()
+        for device in devices:
+            if device["name"] == device_name:
+                return device
+
+        return {}
+
+    def get_sites(self) -> list[dict[str, Any]]:
+        """Get a list of all available sites in Kentik."""
+        return self._send_request("GET", "v5/sites").json()["sites"]
+
+    def get_site(self, site_id: str) -> dict[str, Any]:
+        """Get a site by ID."""
+        return self._send_request("GET", f"v5/site/{site_id}").json()
+
+    def get_site_by_name(self, site_slug: str) -> dict[str, Any]:
+        """Get a Kentik site by its name.
+
+        If the site is not found, return an empty dict.
+
+        :param str site_slug: The name of the site, should be a three-letter slug like COR or POZ.
+        """
+        sites = self.get_sites()
+        for site in sites:
+            if site["site_name"] == site_slug:
+                return site
+
+        return {}
+
+    def get_plans(self) -> list[dict[str, Any]]:
+        """Get all Kentik plans available."""
+        return self._send_request("GET", "v5/plans").json()["plans"]
+
+    def get_plan_by_name(self, plan_name: str) -> dict[str, Any]:
+        """Get a Kentik plan by its name.
+
+        If the plan is not found, returns an empty dict.
+
+        :param str plan_name: The name of the plan.
+        """
+        plans = self.get_plans()
+        for plan in plans:
+            if plan["name"] == plan_name:
+                return plan
+
+        return {}
+
+    def create_device(self, device: NewKentikDevice) -> dict[str, Any]:
+        """Add a new device to Kentik."""
+        plan_id = self.get_plan_by_name(self.config.placeholder_license_key)["id"]
+        request_body = {
+            "device": {
+                **device.model_dump(exclude=set("device_name" "site_tier")),
+                "device_name": device.device_description,
+                "device_type": self.config.device_type,
+                "device_subtype": self.config.device_type,
+                "minimize_snmp": self.config.minimize_snmp,
+                "device_sample_rate": self.config.sample_rate,
+                "plan_id": plan_id,
+                "device_snmp_community": self.config.snmp_community,
+                "device_bgp_type": self.config.bgp_type,
+                "bgp_lookup_strategy": self.config.bgp_lookup_strategy,
+                "device_bgp_neighbor_asn": str(self.config.ASN),
+                "device_bgp_password": self.config.md5_password,
+            }
+        }
+
+        new_device = self._send_request("POST", "v5/device", request_body).json()
+
+        # The name of the device has to be updated from the subscription ID to its FQDN.
+        # This is a limitation of the Kentik API that disallows settings device names containing a . symbol.
+        self.update_device(new_device["device"]["id"], {"device": {"device_name": device.device_name}})
+        new_device["device"]["device_name"] = device.device_name
+
+        return new_device
+
+    def update_device(self, device_id: str, updated_device: dict[str, Any]) -> dict[str, Any]:
+        """Update an existing device in Kentik."""
+        return self._send_request("PUT", f"v5/device/{device_id}", updated_device).json()
+
+    def remove_device(self, device_id: str, *, archive: bool) -> None:
+        """Remove a device from Kentik.
+
+        :param str device_id: The Kentik internal ID of the device that is to be removed.
+        :param bool archive: Archive the device instead of completely deleting it.
+        """
+        if not archive:
+            self._send_request("DELETE", f"v5/device/{device_id}")
+
+        self._send_request("DELETE", f"v5/device/{device_id}")
+
+    def remove_device_by_fqdn(self, fqdn: str, *, archive: bool = True) -> None:
+        """Remove a device from Kentik, by its :term:`FQDN`."""
+        device_id = self.get_device_by_name(fqdn)["id"]
+        self.remove_device(device_id, archive=archive)
diff --git a/gso/services/mailer.py b/gso/services/mailer.py
index 3cd85b7369fd176d55d02674f9b55e2c02a47a72..c98ae7f8b108a82b7de4425b4b4fecfca13907f7 100644
--- a/gso/services/mailer.py
+++ b/gso/services/mailer.py
@@ -7,19 +7,18 @@ from ssl import create_default_context
 from gso.settings import load_oss_params
 
 
-def send_mail(recipient: str, subject: str, body: str) -> None:
-    """Send an email message to the given address.
+def send_mail(subject: str, body: str) -> None:
+    """Send an email message to the given addresses.
 
     Only supports STARTTLS, not SSL.
 
-    :param str recipient: The destination address.
     :param str subject: The email subject.
     :param str body: The contents of the email message.
     """
     email_params = load_oss_params().EMAIL
     msg = EmailMessage()
     msg["From"] = email_params.from_address
-    msg["To"] = recipient
+    msg["To"] = email_params.notification_email_destinations
     msg["Subject"] = subject
     msg.set_content(body)
 
diff --git a/gso/services/partners.py b/gso/services/partners.py
index 6c425bdad2432634d27780c85541bd9a02b94b7f..63d55bb3f3d4044f60ce90f4f6cbd839e1efa069 100644
--- a/gso/services/partners.py
+++ b/gso/services/partners.py
@@ -1,12 +1,27 @@
 """A module that returns the partners available in :term:`GSO`."""
 
+from datetime import datetime
 from typing import Any
+from uuid import uuid4
 
 from orchestrator.db import db
-from sqlalchemy.exc import NoResultFound, SQLAlchemyError
+from pydantic import BaseModel, ConfigDict, EmailStr, Field
+from sqlalchemy import func
+from sqlalchemy.exc import NoResultFound
 
 from gso.db.models import PartnerTable
-from gso.schema.partner import PartnerCreate
+
+
+class PartnerSchema(BaseModel):
+    """Partner schema."""
+
+    partner_id: str = Field(default_factory=lambda: str(uuid4()))
+    name: str
+    email: EmailStr
+
+    created_at: datetime = Field(default_factory=lambda: datetime.now().astimezone())
+    updated_at: datetime = Field(default_factory=lambda: datetime.now().astimezone())
+    model_config = ConfigDict(from_attributes=True)
 
 
 class PartnerNotFoundError(Exception):
@@ -22,46 +37,84 @@ def get_all_partners() -> list[dict]:
 def get_partner_by_name(name: str) -> dict[str, Any]:
     """Try to get a partner by their name."""
     try:
-        partner = PartnerTable.query.filter(PartnerTable.name == name).one()
+        partner = db.session.query(PartnerTable).filter(PartnerTable.name == name).one()
         return partner.__json__()
     except NoResultFound as e:
         msg = f"partner {name} not found"
         raise PartnerNotFoundError(msg) from e
 
 
+def get_partner_by_id(partner_id: str) -> PartnerTable:
+    """Try to get a partner by their id."""
+    partner = db.session.query(PartnerTable).filter_by(partner_id=partner_id).first()
+    if not partner:
+        raise PartnerNotFoundError
+
+    return partner
+
+
+def filter_partners_by_attribute(
+    attribute: str, value: str, *, case_sensitive: bool = True
+) -> list[dict[str, Any]] | None:
+    """Filter the list of partners by a specified attribute."""
+    if case_sensitive:
+        partners = db.session.query(PartnerTable).filter(getattr(PartnerTable, attribute) == value).all()
+    else:
+        partners = (
+            db.session.query(PartnerTable)
+            .filter(func.lower(getattr(PartnerTable, attribute)) == func.lower(value))
+            .all()
+        )
+
+    return [partner.__json__() for partner in partners] if partners else None
+
+
+def filter_partners_by_name(name: str, *, case_sensitive: bool = True) -> list[dict[str, Any]] | None:
+    """Filter the list of partners by name."""
+    return filter_partners_by_attribute("name", name, case_sensitive=case_sensitive)
+
+
+def filter_partners_by_email(email: str, *, case_sensitive: bool = True) -> list[dict[str, Any]] | None:
+    """Filter the list of partners by email."""
+    return filter_partners_by_attribute("email", email, case_sensitive=case_sensitive)
+
+
 def create_partner(
-    partner_data: PartnerCreate,
+    partner_data: PartnerSchema,
 ) -> dict:
     """Create a new partner and add it to the database using Pydantic schema for validation.
 
     :param partner_data: Partner data validated by Pydantic schema.
     :return: JSON representation of the created partner.
     """
-    try:
-        new_partner = PartnerTable(**partner_data.model_dump())
+    new_partner = PartnerTable(**partner_data.model_dump())
+    db.session.add(new_partner)
+    db.session.commit()
 
-        db.session.add(new_partner)
-        db.session.commit()
+    return new_partner.__json__()
 
-        return new_partner.__json__()
-    except SQLAlchemyError:
-        db.session.rollback()
-        raise
-    finally:
-        db.session.close()
 
+def edit_partner(
+    partner_data: PartnerSchema,
+) -> PartnerTable:
+    """Edit an existing partner and update it in the database."""
+    partner = get_partner_by_id(partner_id=partner_data.partner_id)
 
-def delete_partner_by_name(name: str) -> None:
-    """Delete a partner by their name."""
-    try:
-        partner = PartnerTable.query.filter(PartnerTable.name == name).one()
-        db.session.delete(partner)
-        db.session.commit()
-    except NoResultFound as e:
-        msg = f"partner {name} not found"
-        raise PartnerNotFoundError(msg) from e
-    except SQLAlchemyError:
-        db.session.rollback()
-        raise
-    finally:
-        db.session.close()
+    if partner_data.name:
+        partner.name = partner_data.name
+    if partner_data.email:
+        partner.email = partner_data.email
+
+    partner.updated_at = datetime.now().astimezone()
+
+    db.session.commit()
+
+    return partner
+
+
+def delete_partner(partner_id: str) -> None:
+    """Delete an existing partner from the database."""
+    partner = get_partner_by_id(partner_id=partner_id)
+
+    db.session.delete(partner)
+    db.session.commit()
diff --git a/gso/services/subscriptions.py b/gso/services/subscriptions.py
index 27f075a19ee0926311b3c360ee28254e60f10e75..d82c66c9b0a42aa83b62fe731e40710a41c85005 100644
--- a/gso/services/subscriptions.py
+++ b/gso/services/subscriptions.py
@@ -8,30 +8,33 @@ from typing import Any
 from uuid import UUID
 
 from orchestrator.db import (
+    ProcessSubscriptionTable,
     ProcessTable,
     ProductTable,
     ResourceTypeTable,
     SubscriptionInstanceTable,
     SubscriptionInstanceValueTable,
     SubscriptionTable,
+    db,
 )
+from orchestrator.domain import SubscriptionModel
 from orchestrator.services.subscriptions import query_in_use_by_subscriptions
 from orchestrator.types import SubscriptionLifecycle
+from orchestrator.workflow import ProcessStatus
 from pydantic_forms.types import UUIDstr
 
 from gso.products import ProductName, ProductType
-from gso.products.product_blocks.router import RouterRole
-from gso.products.product_types.router import Router
 from gso.products.product_types.site import Site
 
 SubscriptionType = dict[str, Any]
 
 
 def get_subscriptions(
-    product_types: list[ProductType],
+    product_types: list[ProductType] | None = None,
     lifecycles: list[SubscriptionLifecycle] | None = None,
     includes: list[str] | None = None,
     excludes: list[str] | None = None,
+    partner_id: UUIDstr | None = None,
 ) -> list[SubscriptionType]:
     """Retrieve active subscriptions for a specific product type.
 
@@ -39,13 +42,11 @@ def get_subscriptions(
     :param SubscriptionLifecycle lifecycles: The lifecycles that the products must be in.
     :param list[str] includes: List of fields to be included in the returned Subscription objects.
     :param list[str] excludes: List of fields to be excluded from the returned Subscription objects.
+    :param UUIDstr partner_id: The customer id of subscriptions.
 
     :return: A list of Subscription objects that match the query.
     :rtype: list[Subscription]
     """
-    if not lifecycles:
-        lifecycles = list(SubscriptionLifecycle)
-
     if not includes:
         includes = [col.name for col in SubscriptionTable.__table__.columns]
 
@@ -54,10 +55,16 @@ def get_subscriptions(
 
     dynamic_fields = [getattr(SubscriptionTable, field) for field in includes]
 
-    query = SubscriptionTable.query.join(ProductTable).filter(
-        ProductTable.product_type.in_([str(product_type) for product_type in product_types]),
-        SubscriptionTable.status.in_([str(lifecycle) for lifecycle in lifecycles]),
-    )
+    query = db.session.query(SubscriptionTable).join(ProductTable)
+
+    if product_types:
+        query = query.filter(ProductTable.product_type.in_([str(product_type) for product_type in product_types]))
+
+    if lifecycles:
+        query = query.filter(SubscriptionTable.status.in_([str(lifecycle) for lifecycle in lifecycles]))
+
+    if partner_id:
+        query = query.filter(SubscriptionTable.customer_id == partner_id)
 
     results = query.with_entities(*dynamic_fields).all()
 
@@ -194,17 +201,32 @@ def get_active_subscriptions_by_field_and_value(field_name: str, field_value: st
 
 
 def count_incomplete_validate_products() -> int:
-    """Count the number of incomplete validate_products processes.
+    """Count the number of incomplete validate_geant_products processes.
 
-    :return: The count of incomplete 'validate_products' processes.
+    :return: The count of incomplete 'validate_geant_products' processes.
     :rtype: int
     """
     return ProcessTable.query.filter(
-        ProcessTable.workflow_name == "validate_products",
-        ProcessTable.last_status != "completed",
+        ProcessTable.workflow_name == "validate_geant_products",
+        ProcessTable.last_status != ProcessStatus.COMPLETED.value,
     ).count()
 
 
+def get_failed_tasks() -> list[ProcessTable]:
+    """Get all tasks that have failed."""
+    return ProcessTable.query.filter(
+        ProcessTable.is_task.is_(True), ProcessTable.last_status == ProcessStatus.FAILED.value
+    ).all()
+
+
+def get_subscription_by_process_id(process_id: str) -> SubscriptionModel | None:
+    """Get a subscription from a process ID."""
+    subscription_table = ProcessSubscriptionTable.query.filter(
+        ProcessSubscriptionTable.process_id == process_id
+    ).first()
+    return SubscriptionModel.from_subscription(subscription_table.subscription_id) if subscription_table else None
+
+
 def get_insync_subscriptions() -> list[SubscriptionTable]:
     """Retrieve all subscriptions that are currently in sync."""
     return SubscriptionTable.query.join(ProductTable).filter(SubscriptionTable.insync.is_(True)).all()
@@ -214,7 +236,7 @@ def get_active_insync_subscriptions() -> list[SubscriptionTable]:
     """Retrieve all subscriptions that are currently active and in sync."""
     return (
         SubscriptionTable.query.join(ProductTable)
-        .filter(SubscriptionTable.insync.is_(True), SubscriptionTable.status.is_(SubscriptionLifecycle.ACTIVE))
+        .filter(SubscriptionTable.insync.is_(True), SubscriptionTable.status == SubscriptionLifecycle.ACTIVE.value)
         .all()
     )
 
@@ -231,18 +253,3 @@ def get_site_by_name(site_name: str) -> Site:
         raise ValueError(msg)
 
     return Site.from_subscription(subscription[0].subscription_id)
-
-
-def get_active_pe_router_dict() -> dict[str, Any]:
-    """Generate an Ansible-compatible inventory for executing playbooks. Contains all active PE routers."""
-    all_routers = [Router.from_subscription(r["subscription_id"]) for r in get_active_router_subscriptions()]
-
-    return {
-        router.router.router_fqdn: {
-            "lo4": str(router.router.router_lo_ipv4_address),
-            "lo6": str(router.router.router_lo_ipv6_address),
-            "vendor": str(router.router.vendor),
-        }
-        for router in all_routers
-        if router.router.router_role == RouterRole.PE
-    }
diff --git a/gso/settings.py b/gso/settings.py
index 21f517bc5323003558bb0ef30c042b25b61386ca..c105fffc8fbb23933a12a572f208d0bd07cd5b9e 100644
--- a/gso/settings.py
+++ b/gso/settings.py
@@ -11,11 +11,13 @@ import os
 from pathlib import Path
 from typing import Annotated
 
-from pydantic import Field
-from pydantic_forms.types import UUIDstr
+from pydantic import EmailStr, Field
+from pydantic_forms.types import UUIDstr, strEnum
 from pydantic_settings import BaseSettings
 from typing_extensions import Doc
 
+from gso.utils.shared_enums import PortNumber
+
 logger = logging.getLogger(__name__)
 
 
@@ -150,13 +152,15 @@ class NetBoxParams(BaseSettings):
 class EmailParams(BaseSettings):
     """Parameters for the email service."""
 
-    # TODO: Use more strict types after we've migrated to Pydantic 2.x
-    from_address: str
+    from_address: EmailStr
     smtp_host: str
-    smtp_port: int
+    smtp_port: PortNumber
     starttls_enabled: bool
     smtp_username: str | None = None
     smtp_password: str | None = None
+    #: List of email addresses that should receive notifications when validation of a subscription fails.
+    #: Can be a comma-separated list of multiple addresses.
+    notification_email_destinations: str
 
 
 class SharepointParams(BaseSettings):
@@ -171,6 +175,39 @@ class SharepointParams(BaseSettings):
     scopes: list[str]
 
 
+class KentikParams(BaseSettings):
+    """Settings for accessing Kentik's API."""
+
+    api_base: str
+    user_email: str
+    api_key: str
+    device_type: str
+    minimize_snmp: bool
+    placeholder_license_key: str
+    sample_rate: int
+    bgp_type: str
+    bgp_lookup_strategy: str
+    ASN: int
+    snmp_community: str
+    md5_password: str
+
+
+class EnvironmentEnum(strEnum):
+    """The different environments in which the GSO system can run."""
+
+    DEVELOPMENT = "development"
+    TEST = "test"
+    UAT = "uat"
+    PRODUCTION = "production"
+
+
+class SentryParams(BaseSettings):
+    """Settings for Sentry."""
+
+    DSN: str
+    environment: EnvironmentEnum
+
+
 class OSSParams(BaseSettings):
     """The set of parameters required for running :term:`GSO`."""
 
@@ -183,6 +220,8 @@ class OSSParams(BaseSettings):
     THIRD_PARTY_API_KEYS: dict[str, str]
     EMAIL: EmailParams
     SHAREPOINT: SharepointParams
+    KENTIK: KentikParams
+    SENTRY: SentryParams | None = None
 
 
 def load_oss_params() -> OSSParams:
diff --git a/gso/translations/en-GB.json b/gso/translations/en-GB.json
index 81af4e6b786d2b92c132ce9aaf894a65b5e58de9..c74df450a1c5b332b7dbc02cb96742dbb92d5901 100644
--- a/gso/translations/en-GB.json
+++ b/gso/translations/en-GB.json
@@ -67,6 +67,10 @@
         "import_opengear": "NOT FOR HUMANS -- Finalize import into an OpenGear",
         "validate_iptrunk": "Validate IP Trunk configuration",
         "validate_router": "Validate router configuration",
-        "task_validate_geant_products": "Validation task for GEANT products"
+        "task_validate_geant_products": "Validation task for GEANT products",
+        "task_send_email_notifications": "Send email notifications for failed tasks",
+        "task_create_partners": "Create partner task",
+        "task_modify_partners": "Modify partner task",
+        "task_delete_partners": "Delete partner task"
     }
 }
diff --git a/gso/utils/device_info.py b/gso/utils/device_info.py
index 03b911f7cacea8386cad8a578d54f12a156f0179..cfc81d06562775bf7709a6d7fbc68e4f884618c4 100644
--- a/gso/utils/device_info.py
+++ b/gso/utils/device_info.py
@@ -26,7 +26,7 @@ class TierInfo:
             total_10g_interfaces=80,
         )
         self.tier2 = ModuleInfo(
-            device_type="7750-SR7s",
+            device_type="7750 SR-7s",
             module_bays_slots=[1, 2],
             module_type="XMA2-s-36p-400g",
             breakout_interfaces_per_slot=[36, 35, 34, 33],
diff --git a/gso/utils/helpers.py b/gso/utils/helpers.py
index edd01009e7eb139a0b8481a875fb3ad9879110bc..fdf851e8495c697e95ad45747d07269f29e7a2db 100644
--- a/gso/utils/helpers.py
+++ b/gso/utils/helpers.py
@@ -13,10 +13,11 @@ from pydantic_forms.validators import Choice
 
 from gso import settings
 from gso.products.product_blocks.iptrunk import IptrunkInterfaceBlock
+from gso.products.product_blocks.router import RouterRole
 from gso.products.product_blocks.site import LatitudeCoordinate, LongitudeCoordinate, SiteTier
 from gso.products.product_types.router import Router
 from gso.services.netbox_client import NetboxClient
-from gso.services.subscriptions import get_active_subscriptions_by_field_and_value
+from gso.services.subscriptions import get_active_router_subscriptions, get_active_subscriptions_by_field_and_value
 from gso.utils.shared_enums import IPv4AddressType, Vendor
 
 
@@ -300,3 +301,34 @@ def generate_fqdn(hostname: str, site_name: str, country_code: str) -> str:
     """Generate an :term:`FQDN` from a hostname, site name, and a country code."""
     oss = settings.load_oss_params()
     return f"{hostname}.{site_name.lower()}.{country_code.lower()}{oss.IPAM.LO.domain_name}"
+
+
+def generate_inventory_for_active_routers(
+    router_role: RouterRole,
+    exclude_routers: list[str] | None = None,
+) -> dict:
+    """Generate an Ansible-compatible inventory for executing playbooks.
+
+    Contains all active routers of a specific role. Optionally, routers can be excluded from the inventory.
+
+    :param RouterRole router_role: The role of the routers to include in the inventory.
+    :param list exclude_routers: List of routers to exclude from the inventory.
+    :return: A dictionary representing the inventory of active routers.
+    :rtype: dict[str, Any]
+    """
+    all_routers = [Router.from_subscription(r["subscription_id"]) for r in get_active_router_subscriptions()]
+    exclude_routers = exclude_routers or []
+
+    return {
+        "all": {
+            "hosts": {
+                router.router.router_fqdn: {
+                    "lo4": str(router.router.router_lo_ipv4_address),
+                    "lo6": str(router.router.router_lo_ipv6_address),
+                    "vendor": str(router.router.vendor),
+                }
+                for router in all_routers
+                if router.router.router_role == router_role and router.router.router_fqdn not in exclude_routers
+            }
+        }
+    }
diff --git a/gso/utils/workflow_steps.py b/gso/utils/workflow_steps.py
index 176b83258459d86dc51dbe92ba447c68892fca49..93891456bd96a02fb52f40b7b250e40825983047 100644
--- a/gso/utils/workflow_steps.py
+++ b/gso/utils/workflow_steps.py
@@ -13,7 +13,7 @@ from pydantic_forms.types import FormGenerator
 from pydantic_forms.validators import Label
 
 from gso.products.product_types.iptrunk import Iptrunk
-from gso.services import lso_client, subscriptions
+from gso.services import lso_client
 from gso.settings import load_oss_params
 
 
@@ -121,42 +121,3 @@ def prompt_sharepoint_checklist_url(checklist_url: str) -> FormGenerator:
     yield SharepointPrompt
 
     return {}
-
-
-@step("[DRY RUN] Add all PE routers to P router iBGP group")
-def add_all_pe_to_p_dry(subscription: dict[str, Any], callback_route: str) -> None:
-    """Perform a dry run of adding the list of all PE routers to the new P router."""
-    extra_vars = {
-        "dry_run": True,
-        "subscription": subscription,
-        "pe_router_list": subscriptions.get_active_pe_router_dict(),
-        "verb": "add_pe_to_p",
-    }
-
-    lso_client.execute_playbook(
-        playbook_name="update_ibgp_mesh.yaml",
-        callback_route=callback_route,
-        inventory=subscription["router"]["router_fqdn"],
-        extra_vars=extra_vars,
-    )
-
-
-@step("[FOR REAL] Add all PE routers to P router iBGP group")
-def add_all_pe_to_p_real(
-    subscription: dict[str, Any], callback_route: str, tt_number: str, process_id: UUIDstr
-) -> None:
-    """Add the list of all PE routers to the new P router."""
-    extra_vars = {
-        "dry_run": False,
-        "subscription": subscription,
-        "pe_router_list": subscriptions.get_active_pe_router_dict(),
-        "commit_comment": f"GSO_PROCESS_ID: {process_id} - TT_NUMBER: {tt_number} - Update iBGP mesh",
-        "verb": "add_pe_to_p",
-    }
-
-    lso_client.execute_playbook(
-        playbook_name="update_ibgp_mesh.yaml",
-        callback_route=callback_route,
-        inventory=subscription["router"]["router_fqdn"],
-        extra_vars=extra_vars,
-    )
diff --git a/gso/worker.py b/gso/worker.py
index b2abfe6f5a52192454d3d691ba1715df313fc6ac..807c1edb9f41d15e1e099b03e9c0a2ae4845839e 100644
--- a/gso/worker.py
+++ b/gso/worker.py
@@ -24,6 +24,7 @@ celery = OrchestratorCelery(
         "gso.schedules.task_vacuum",
         "gso.schedules.validate_products",
         "gso.schedules.validate_subscriptions",
+        "gso.schedules.send_email_notifications",
     ],
 )
 
diff --git a/gso/workflows/__init__.py b/gso/workflows/__init__.py
index b6072c1c6bae54d2d6af93abdba533655902957c..56561838557882cac784faed6745c1d80661a0cd 100644
--- a/gso/workflows/__init__.py
+++ b/gso/workflows/__init__.py
@@ -67,4 +67,8 @@ LazyWorkflowInstance("gso.workflows.opengear.create_imported_opengear", "create_
 LazyWorkflowInstance("gso.workflows.opengear.import_opengear", "import_opengear")
 
 #  Tasks
+LazyWorkflowInstance("gso.workflows.tasks.send_email_notifications", "task_send_email_notifications")
 LazyWorkflowInstance("gso.workflows.tasks.validate_geant_products", "task_validate_geant_products")
+LazyWorkflowInstance("gso.workflows.tasks.create_partners", "task_create_partners")
+LazyWorkflowInstance("gso.workflows.tasks.modify_partners", "task_modify_partners")
+LazyWorkflowInstance("gso.workflows.tasks.delete_partners", "task_delete_partners")
diff --git a/gso/workflows/iptrunk/validate_iptrunk.py b/gso/workflows/iptrunk/validate_iptrunk.py
index 8a9e964bcc6d97b47e946ecd6ca29bd063422f6d..5b2e3f50cf4615962096af32f4838579f418a4cc 100644
--- a/gso/workflows/iptrunk/validate_iptrunk.py
+++ b/gso/workflows/iptrunk/validate_iptrunk.py
@@ -49,7 +49,7 @@ def verify_ipam_records(subscription: Iptrunk) -> None:
         )
 
     for index, side in enumerate(subscription.iptrunk.iptrunk_sides):
-        lag_fqdn = f"{side.iptrunk_side_ae_iface}.{side.iptrunk_side_node.router_fqdn}"
+        lag_fqdn = f"{side.iptrunk_side_ae_iface}-0.{side.iptrunk_side_node.router_fqdn}"
         side_v4 = subscription.iptrunk.iptrunk_ipv4_network[index]
         side_v6 = subscription.iptrunk.iptrunk_ipv6_network[index + 1]
         #  Validate IPv4 address allocation
diff --git a/gso/workflows/router/terminate_router.py b/gso/workflows/router/terminate_router.py
index b9e521da831dbcc091286c21cb2d67862b963074..688e3ecc21543ddc5ca678296b763fe7c6194bb5 100644
--- a/gso/workflows/router/terminate_router.py
+++ b/gso/workflows/router/terminate_router.py
@@ -18,10 +18,13 @@ from orchestrator.workflows.steps import (
 )
 from orchestrator.workflows.utils import wrap_modify_initial_input_form
 
+from gso.products.product_blocks.router import RouterRole
 from gso.products.product_types.router import Router
-from gso.services import infoblox
+from gso.services import infoblox, lso_client
+from gso.services.librenms_client import LibreNMSClient
 from gso.services.lso_client import execute_playbook, lso_interaction
 from gso.services.netbox_client import NetboxClient
+from gso.utils.helpers import generate_inventory_for_active_routers
 from gso.utils.shared_enums import Vendor
 
 logger = logging.getLogger(__name__)
@@ -42,9 +45,14 @@ def initial_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
         tt_number: str
         termination_label: Label = "Please confirm whether configuration should get removed from the router."
         remove_configuration: bool = True
+        update_ibgp_mesh_label: Label = "Please confirm whether the iBGP mesh should get updated."
+        update_ibgp_mesh: bool = True
 
     user_input = yield TerminateForm
-    return user_input.model_dump() | {"router_is_nokia": router.router.vendor == Vendor.NOKIA}
+    return user_input.model_dump() | {
+        "router_is_nokia": router.router.vendor == Vendor.NOKIA,
+        "router_role": router.router.router_role,
+    }
 
 
 @step("Deprovision loopback IPs from IPAM")
@@ -90,7 +98,7 @@ def remove_config_from_router_real(
     }
 
     execute_playbook(
-        playbook_name="base_config.yaml",
+        playbook_name="base_config.yaml",  # FIX: need to use correct playbook.
         callback_route=callback_route,
         inventory=subscription.router.router_fqdn,
         extra_vars=extra_vars,
@@ -104,6 +112,131 @@ def remove_device_from_netbox(subscription: Router) -> dict[str, Router]:
     return {"subscription": subscription}
 
 
+@step("[DRY RUN] Remove P router from all the PE routers")
+def remove_p_from_all_pe_dry(subscription: Router, callback_route: str, tt_number: str, process_id: UUIDstr) -> None:
+    """Perform a dry run of removing the terminated router from all the PE routers."""
+    extra_vars = {
+        "dry_run": True,
+        "subscription": json.loads(json_dumps(subscription)),
+        "commit_comment": f"GSO_PROCESS_ID: {process_id} - TT_NUMBER: {tt_number} - "
+        f"Remove {subscription.router.router_fqdn} from all the PE routers",
+        "verb": "remove_p_from_pe",
+    }
+
+    lso_client.execute_playbook(
+        playbook_name="update_ibgp_mesh.yaml",
+        callback_route=callback_route,
+        inventory=generate_inventory_for_active_routers(RouterRole.PE),
+        extra_vars=extra_vars,
+    )
+
+
+@step("[REAL RUN] Remove P router from all the PE routers")
+def remove_p_from_all_pe_real(subscription: Router, callback_route: str, tt_number: str, process_id: UUIDstr) -> None:
+    """Perform a real run of removing the terminated router from all the PE routers."""
+    extra_vars = {
+        "dry_run": False,
+        "subscription": json.loads(json_dumps(subscription)),
+        "commit_comment": f"GSO_PROCESS_ID: {process_id} - TT_NUMBER: {tt_number} - "
+        f"Remove {subscription.router.router_fqdn} from all the PE routers",
+        "verb": "remove_p_from_pe",
+    }
+
+    lso_client.execute_playbook(
+        playbook_name="update_ibgp_mesh.yaml",
+        callback_route=callback_route,
+        inventory=generate_inventory_for_active_routers(RouterRole.PE),
+        extra_vars=extra_vars,
+    )
+
+
+@step("[DRY RUN] Remove PE router from all the PE routers")
+def remove_pe_from_all_pe_dry(subscription: Router, callback_route: str, tt_number: str, process_id: UUIDstr) -> None:
+    """Perform a dry run of removing the terminated PE router from the PE router mesh."""
+    extra_vars = {
+        "dry_run": True,
+        "subscription": json.loads(json_dumps(subscription)),
+        "commit_comment": f"GSO_PROCESS_ID: {process_id} - TT_NUMBER: {tt_number} - "
+        f"Remove {subscription.router.router_fqdn} from all the PE routers",
+        "verb": "remove_pe_from_pe",
+    }
+
+    lso_client.execute_playbook(
+        playbook_name="update_ibgp_mesh.yaml",
+        callback_route=callback_route,
+        inventory=generate_inventory_for_active_routers(
+            RouterRole.PE, exclude_routers=[subscription.router.router_fqdn]
+        ),
+        extra_vars=extra_vars,
+    )
+
+
+@step("[REAL RUN] Remove all PE routers from all the PE routers")
+def remove_pe_from_all_pe_real(subscription: Router, callback_route: str, tt_number: str, process_id: UUIDstr) -> None:
+    """Perform a real run of removing terminated PE router from PE the router mesh."""
+    extra_vars = {
+        "dry_run": False,
+        "subscription": json.loads(json_dumps(subscription)),
+        "commit_comment": f"GSO_PROCESS_ID: {process_id} - TT_NUMBER: {tt_number} - "
+        f"Remove {subscription.router.router_fqdn} from iBGP mesh",
+        "verb": "remove_pe_from_pe",
+    }
+
+    lso_client.execute_playbook(
+        playbook_name="update_ibgp_mesh.yaml",
+        callback_route=callback_route,
+        inventory=generate_inventory_for_active_routers(
+            RouterRole.PE, exclude_routers=[subscription.router.router_fqdn]
+        ),
+        extra_vars=extra_vars,
+    )
+
+
+@step("[DRY RUN] Remove PE router from all the P routers")
+def remove_pe_from_all_p_dry(subscription: Router, callback_route: str, tt_number: str, process_id: UUIDstr) -> None:
+    """Perform a dry run of removing PE router from all P routers."""
+    extra_vars = {
+        "dry_run": True,
+        "subscription": json.loads(json_dumps(subscription)),
+        "commit_comment": f"GSO_PROCESS_ID: {process_id} - TT_NUMBER: {tt_number} - "
+        f"Remove {subscription.router.router_fqdn} from all the P routers",
+        "verb": "remove_pe_from_p",
+    }
+
+    lso_client.execute_playbook(
+        playbook_name="update_ibgp_mesh.yaml",
+        callback_route=callback_route,
+        inventory=generate_inventory_for_active_routers(RouterRole.P),
+        extra_vars=extra_vars,
+    )
+
+
+@step("[REAL RUN] Remove PE router from all P routers")
+def remove_pe_from_all_p_real(subscription: Router, callback_route: str, tt_number: str, process_id: UUIDstr) -> None:
+    """Perform a real run of removing PE router from all P routers."""
+    extra_vars = {
+        "dry_run": False,
+        "subscription": json.loads(json_dumps(subscription)),
+        "commit_comment": f"GSO_PROCESS_ID: {process_id} - TT_NUMBER: {tt_number} - "
+        f"Remove {subscription.router.router_fqdn} from all the P routers",
+        "verb": "remove_pe_from_p",
+    }
+
+    lso_client.execute_playbook(
+        playbook_name="update_ibgp_mesh.yaml",
+        callback_route=callback_route,
+        inventory=generate_inventory_for_active_routers(RouterRole.P),
+        extra_vars=extra_vars,
+    )
+
+
+@step("Remove Device from Librenms")
+def remove_device_from_librenms(subscription: Router) -> dict[str, Router]:
+    """Remove the device from LibreNMS."""
+    LibreNMSClient().remove_device(subscription.router.router_fqdn)
+    return {"subscription": subscription}
+
+
 @workflow(
     "Terminate router",
     initial_input_form=wrap_modify_initial_input_form(initial_input_form_generator),
@@ -118,16 +251,26 @@ def terminate_router() -> StepList:
     * Mark the subscription as terminated in the service database
     """
     run_config_steps = conditional(lambda state: state["remove_configuration"])
+    update_ibgp_mesh = conditional(lambda state: state["update_ibgp_mesh"])
     router_is_nokia = conditional(lambda state: state["router_is_nokia"])
+    router_is_pe = conditional(lambda state: state["router_role"] == RouterRole.PE)
+    router_is_p = conditional(lambda state: state["router_role"] == RouterRole.P)
 
     return (
         begin
         >> store_process_subscription(Target.TERMINATE)
         >> unsync
+        >> update_ibgp_mesh(router_is_p(lso_interaction(remove_p_from_all_pe_dry)))
+        >> update_ibgp_mesh(router_is_p(lso_interaction(remove_p_from_all_pe_real)))
+        >> update_ibgp_mesh(router_is_pe(lso_interaction(remove_pe_from_all_pe_dry)))
+        >> update_ibgp_mesh(router_is_pe(lso_interaction(remove_pe_from_all_pe_real)))
+        >> update_ibgp_mesh(router_is_pe(lso_interaction(remove_pe_from_all_p_dry)))
+        >> update_ibgp_mesh(router_is_pe(lso_interaction(remove_pe_from_all_p_real)))
         >> deprovision_loopback_ips
         >> run_config_steps(lso_interaction(remove_config_from_router_dry))
         >> run_config_steps(lso_interaction(remove_config_from_router_real))
         >> router_is_nokia(remove_device_from_netbox)
+        >> remove_device_from_librenms
         >> set_status(SubscriptionLifecycle.TERMINATED)
         >> resync
         >> done
diff --git a/gso/workflows/router/update_ibgp_mesh.py b/gso/workflows/router/update_ibgp_mesh.py
index bf0d0416d9f4ae39fa4b18e2640e8be04444866f..58d207029c9431d7252ddf2b5c87e171282944ba 100644
--- a/gso/workflows/router/update_ibgp_mesh.py
+++ b/gso/workflows/router/update_ibgp_mesh.py
@@ -12,12 +12,12 @@ from orchestrator.workflows.steps import resync, store_process_subscription, uns
 from orchestrator.workflows.utils import wrap_modify_initial_input_form
 from pydantic import ConfigDict, model_validator
 
+from gso.products.product_blocks.router import RouterRole
 from gso.products.product_types.router import Router
-from gso.services import librenms_client, lso_client, subscriptions
+from gso.services import librenms_client, lso_client
 from gso.services.lso_client import lso_interaction
 from gso.services.subscriptions import get_trunks_that_terminate_on_router
-from gso.utils.helpers import SNMPVersion
-from gso.utils.workflow_steps import add_all_pe_to_p_dry, add_all_pe_to_p_real
+from gso.utils.helpers import SNMPVersion, generate_inventory_for_active_routers
 
 
 def initial_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
@@ -62,7 +62,7 @@ def add_p_to_mesh_dry(subscription: dict[str, Any], callback_route: str, tt_numb
     lso_client.execute_playbook(
         playbook_name="update_ibgp_mesh.yaml",
         callback_route=callback_route,
-        inventory={"all": {"hosts": subscriptions.get_active_pe_router_dict()}},
+        inventory=generate_inventory_for_active_routers(RouterRole.PE),
         extra_vars=extra_vars,
     )
 
@@ -80,7 +80,46 @@ def add_p_to_mesh_real(subscription: dict[str, Any], callback_route: str, tt_num
     lso_client.execute_playbook(
         playbook_name="update_ibgp_mesh.yaml",
         callback_route=callback_route,
-        inventory={"all": {"hosts": subscriptions.get_active_pe_router_dict()}},
+        inventory=generate_inventory_for_active_routers(RouterRole.PE),
+        extra_vars=extra_vars,
+    )
+
+
+@step("[DRY RUN] Add all PE routers to P router iBGP group")
+def add_all_pe_to_p_dry(subscription: dict[str, Any], callback_route: str) -> None:
+    """Perform a dry run of adding the list of all PE routers to the new P router."""
+    extra_vars = {
+        "dry_run": True,
+        "subscription": subscription,
+        "pe_router_list": generate_inventory_for_active_routers(RouterRole.PE),
+        "verb": "add_pe_to_p",
+    }
+
+    lso_client.execute_playbook(
+        playbook_name="update_ibgp_mesh.yaml",
+        callback_route=callback_route,
+        inventory=subscription["router"]["router_fqdn"],
+        extra_vars=extra_vars,
+    )
+
+
+@step("[FOR REAL] Add all PE routers to P router iBGP group")
+def add_all_pe_to_p_real(
+    subscription: dict[str, Any], callback_route: str, tt_number: str, process_id: UUIDstr
+) -> None:
+    """Add the list of all PE routers to the new P router."""
+    extra_vars = {
+        "dry_run": False,
+        "subscription": subscription,
+        "pe_router_list": generate_inventory_for_active_routers(RouterRole.PE),
+        "commit_comment": f"GSO_PROCESS_ID: {process_id} - TT_NUMBER: {tt_number} - Update iBGP mesh",
+        "verb": "add_pe_to_p",
+    }
+
+    lso_client.execute_playbook(
+        playbook_name="update_ibgp_mesh.yaml",
+        callback_route=callback_route,
+        inventory=subscription["router"]["router_fqdn"],
         extra_vars=extra_vars,
     )
 
diff --git a/gso/workflows/router/validate_router.py b/gso/workflows/router/validate_router.py
index f5f078db007654e4a7a2b0a22984252fb85ba61b..87d4870061ebf51e0c64900b135213f22602667b 100644
--- a/gso/workflows/router/validate_router.py
+++ b/gso/workflows/router/validate_router.py
@@ -11,11 +11,13 @@ from orchestrator.workflows.steps import resync, store_process_subscription, uns
 from orchestrator.workflows.utils import wrap_modify_initial_input_form
 from pydantic_forms.types import State, UUIDstr
 
+from gso.products.product_blocks.router import RouterRole
 from gso.products.product_types.router import Router
-from gso.services import infoblox, lso_client, subscriptions
+from gso.services import infoblox, lso_client
 from gso.services.librenms_client import LibreNMSClient
 from gso.services.lso_client import anonymous_lso_interaction, execute_playbook
 from gso.services.netbox_client import NetboxClient
+from gso.utils.helpers import generate_inventory_for_active_routers
 from gso.utils.shared_enums import Vendor
 
 
@@ -57,7 +59,7 @@ def verify_p_ibgp(subscription: dict[str, Any], callback_route: str) -> None:
     extra_vars = {
         "dry_run": True,
         "subscription": subscription,
-        "pe_router_list": subscriptions.get_active_pe_router_dict(),
+        "pe_router_list": generate_inventory_for_active_routers(RouterRole.PE)["all"]["hosts"],
         "verb": "verify_p_ibgp",
         "is_verification_workflow": "true",
     }
diff --git a/gso/workflows/tasks/create_partners.py b/gso/workflows/tasks/create_partners.py
new file mode 100644
index 0000000000000000000000000000000000000000..b04c5c68d4f2ba7ddc350c87d047313af111e23c
--- /dev/null
+++ b/gso/workflows/tasks/create_partners.py
@@ -0,0 +1,66 @@
+"""A creation workflow that create a partner."""
+
+from orchestrator.forms import FormPage
+from orchestrator.targets import Target
+from orchestrator.types import FormGenerator, State
+from orchestrator.workflow import StepList, begin, done, step, workflow
+from pydantic import ConfigDict, EmailStr, field_validator
+
+from gso.services.partners import PartnerSchema, create_partner, filter_partners_by_email, filter_partners_by_name
+
+
+def initial_input_form_generator() -> FormGenerator:
+    """Gather input from the user needed for creating a partner."""
+
+    class CreatePartnerForm(FormPage):
+        model_config = ConfigDict(title="Create a Partner")
+
+        name: str
+        email: EmailStr
+
+        @field_validator("name")
+        def validate_name(cls, name: str) -> str:
+            if filter_partners_by_name(name=name, case_sensitive=False):
+                msg = "Partner with this name already exists."
+                raise ValueError(msg)
+
+            return name
+
+        @field_validator("email")
+        def validate_email(cls, email: str) -> EmailStr:
+            email = email.lower()
+            if filter_partners_by_email(email=email, case_sensitive=False):
+                msg = "Partner with this email already exists."
+                raise ValueError(msg)
+
+            return email
+
+    initial_user_input = yield CreatePartnerForm
+
+    return initial_user_input.model_dump()
+
+
+@step("Save partner information to database")
+def save_partner_to_database(
+    name: str,
+    email: EmailStr,
+) -> State:
+    """Save user input as a new partner in database."""
+    partner = create_partner(
+        partner_data=PartnerSchema(
+            name=name,
+            email=email,
+        )
+    )
+
+    return {"created_partner": partner}
+
+
+@workflow(
+    "Create partners",
+    initial_input_form=initial_input_form_generator,
+    target=Target.SYSTEM,
+)
+def task_create_partners() -> StepList:
+    """Create a new Partner."""
+    return begin >> save_partner_to_database >> done
diff --git a/gso/workflows/tasks/delete_partners.py b/gso/workflows/tasks/delete_partners.py
new file mode 100644
index 0000000000000000000000000000000000000000..cd06cc06d541163d5de3e6a6c2797ddf1f9e016d
--- /dev/null
+++ b/gso/workflows/tasks/delete_partners.py
@@ -0,0 +1,63 @@
+"""A delete workflow that remove a partner."""
+
+from enum import Enum
+
+from orchestrator.forms import FormPage
+from orchestrator.targets import Target
+from orchestrator.types import FormGenerator, State
+from orchestrator.workflow import StepList, begin, done, step, workflow
+from pydantic import ConfigDict, EmailStr, field_validator
+from pydantic_forms.types import UUIDstr
+from pydantic_forms.validators import Choice
+
+from gso.services.partners import delete_partner, get_all_partners, get_partner_by_name
+from gso.services.subscriptions import get_subscriptions
+
+
+def initial_input_form_generator() -> FormGenerator:
+    """Gather input from the user needed for deleting a partner."""
+    partners = {}
+    for partner in get_all_partners():
+        partners[partner["partner_id"]] = partner["name"]
+
+    partner_choice = Choice("Select a partner", zip(partners.values(), partners.items(), strict=True))  # type: ignore[arg-type]
+
+    class SelectPartnerForm(FormPage):
+        model_config = ConfigDict(title="Delete a Partner")
+        partners: partner_choice  # type: ignore[valid-type]
+
+        @field_validator("partners")
+        def validate_partners(cls, value: Enum) -> Enum:
+            if get_subscriptions(partner_id=str(value)):
+                msg = "This partner has associated data and cannot be removed."
+                raise ValueError(msg)
+
+            return value
+
+    initial_user_input = yield SelectPartnerForm
+
+    partner = get_partner_by_name(name=initial_user_input.partners.name)
+
+    return {"email": partner["email"], "name": partner["name"], "partner_id": partner["partner_id"]}
+
+
+@step("Delete partner information from database")
+def delete_partner_from_database(
+    partner_id: UUIDstr,
+    name: str,
+    email: EmailStr,
+) -> State:
+    """Delete a partner from database."""
+    delete_partner(partner_id=partner_id)
+
+    return {"deleted_partner": {"name": name, "email": email, "partner_id": partner_id}}
+
+
+@workflow(
+    "Delete partners",
+    initial_input_form=initial_input_form_generator,
+    target=Target.SYSTEM,
+)
+def task_delete_partners() -> StepList:
+    """Delete a Partner."""
+    return begin >> delete_partner_from_database >> done
diff --git a/gso/workflows/tasks/modify_partners.py b/gso/workflows/tasks/modify_partners.py
new file mode 100644
index 0000000000000000000000000000000000000000..0e82521c3ee72cbc8912b5cdde136387c1948cea
--- /dev/null
+++ b/gso/workflows/tasks/modify_partners.py
@@ -0,0 +1,90 @@
+"""A modification workflow that modifies a partner."""
+
+from orchestrator.forms import FormPage
+from orchestrator.targets import Target
+from orchestrator.types import FormGenerator, State
+from orchestrator.workflow import StepList, begin, done, step, workflow
+from pydantic import ConfigDict, EmailStr, field_validator
+from pydantic_forms.types import UUIDstr
+from pydantic_forms.validators import Choice
+
+from gso.services.partners import (
+    PartnerSchema,
+    edit_partner,
+    filter_partners_by_email,
+    filter_partners_by_name,
+    get_all_partners,
+    get_partner_by_name,
+)
+
+
+def initial_input_form_generator() -> FormGenerator:
+    """Gather input from the user needed for modifying a partner."""
+    partners = {}
+    for partner in get_all_partners():
+        partners[partner["partner_id"]] = partner["name"]
+
+    partner_choice = Choice("Select a partner", zip(partners.values(), partners.items(), strict=True))  # type: ignore[arg-type]
+
+    class SelectPartnerForm(FormPage):
+        model_config = ConfigDict(title="Choose a Partner")
+
+        partners: partner_choice  # type: ignore[valid-type]
+
+    initial_user_input = yield SelectPartnerForm
+
+    partner = get_partner_by_name(name=initial_user_input.partners.name)
+
+    class ModifyPartnerForm(FormPage):
+        model_config = ConfigDict(title="Modify a Partner")
+
+        name: str = partner["name"]
+        email: EmailStr = partner["email"]
+
+        @field_validator("name")
+        def validate_name(cls, name: str) -> str:
+            if partner["name"] != name and filter_partners_by_name(name=name, case_sensitive=False):
+                msg = "Partner with this name already exists."
+                raise ValueError(msg)
+
+            return name
+
+        @field_validator("email")
+        def validate_email(cls, email: str) -> EmailStr:
+            if partner["email"] != email and filter_partners_by_email(email=email, case_sensitive=False):
+                msg = "Partner with this email already exists."
+                raise ValueError(msg)
+
+            return email
+
+    user_input = yield ModifyPartnerForm
+
+    return user_input.model_dump() | {"partner_id": partner["partner_id"]}
+
+
+@step("Save partner information to database")
+def save_partner_to_database(
+    partner_id: UUIDstr,
+    name: str,
+    email: EmailStr,
+) -> State:
+    """Save modified partner in database."""
+    partner = edit_partner(
+        partner_data=PartnerSchema(
+            partner_id=partner_id,
+            name=name,
+            email=email,
+        )
+    )
+
+    return {"modified_partner": partner}
+
+
+@workflow(
+    "Modify partners",
+    initial_input_form=initial_input_form_generator,
+    target=Target.SYSTEM,
+)
+def task_modify_partners() -> StepList:
+    """Modify a Partner."""
+    return begin >> save_partner_to_database >> done
diff --git a/gso/workflows/tasks/send_email_notifications.py b/gso/workflows/tasks/send_email_notifications.py
new file mode 100644
index 0000000000000000000000000000000000000000..e30916cc69ac75ca3114fd46ae94fb6ccf275f1f
--- /dev/null
+++ b/gso/workflows/tasks/send_email_notifications.py
@@ -0,0 +1,52 @@
+"""Send email notifications for all tasks that have failed."""
+
+from orchestrator.targets import Target
+from orchestrator.types import State
+from orchestrator.workflow import StepList, conditional, done, init, step, workflow
+
+from gso.services.mailer import send_mail
+from gso.services.subscriptions import get_failed_tasks, get_subscription_by_process_id
+from gso.settings import load_oss_params
+
+
+@step("Gather all tasks that recently failed")
+def gather_failed_tasks() -> State:
+    """Gather all tasks that have failed."""
+    return {"failed_tasks": get_failed_tasks()}
+
+
+@step("Send notification emails for all failed tasks")
+def send_email_notifications(state: State) -> None:
+    """Send out an email notification for all tasks that have failed."""
+    base_url = load_oss_params().GENERAL.public_hostname
+    all_alerts = ""
+    for failure in state["failed_tasks"]:
+        failed_task_url = f"{base_url}/workflows/{failure["process_id"]}"
+        failed_subscription = get_subscription_by_process_id(failure["process_id"])
+        all_alerts = f"{all_alerts}------\n\n"
+        if failed_subscription:
+            all_alerts = (
+                f"{all_alerts}Product name: {failed_subscription.product.name}\n"
+                f"Description: {failed_subscription.description}\n"
+            )
+        all_alerts = (
+            f'{all_alerts}The step "{failure["last_step"]}" failed for the following reason: '
+            f'"{failure["failed_reason"]}".\n\nPlease inspect the full workflow at the following link: '
+            f'{failed_task_url}.\n\n'
+        )
+
+    send_mail(
+        "GAP - One or more tasks have failed!",
+        (
+            f"Please check the following tasks in GAP which have failed.\n\n{all_alerts}------"
+            f"\n\nRegards, the GÉANT Automation Platform.\n\n"
+        ),
+    )
+
+
+@workflow("Send email notifications for all failed tasks", target=Target.SYSTEM)
+def task_send_email_notifications() -> StepList:
+    """Gather all failed tasks, and send an email notification if needed."""
+    tasks_have_failed = conditional(lambda state: len(state["failed_tasks"]) > 0)
+
+    return init >> gather_failed_tasks >> tasks_have_failed(send_email_notifications) >> done
diff --git a/requirements.txt b/requirements.txt
index 3463f0c52bfebe389377947e0ae1d39065e0fdc0..8878a45c8d77880d89dfacc73fa89a5cc88cdf5f 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,4 +1,4 @@
-orchestrator-core==2.2.1
+orchestrator-core==2.6.1
 requests==2.31.0
 infoblox-client~=0.6.0
 pycountry==23.12.11
diff --git a/setup.py b/setup.py
index b3ed00ea328cdf5ec4b5f03e304bfb7e694b81a5..a70761c82a2232f6bd63b2cb01f9deb5e1f0a184 100644
--- a/setup.py
+++ b/setup.py
@@ -4,14 +4,14 @@ from setuptools import find_packages, setup
 
 setup(
     name="geant-service-orchestrator",
-    version="2.7",
+    version="2.8",
     author="GÉANT Orchestration and Automation Team",
     author_email="goat@geant.org",
     description="GÉANT Service Orchestrator",
     url="https://gitlab.software.geant.org/goat/gap/geant-service-orchestrator",
     packages=find_packages(),
     install_requires=[
-        "orchestrator-core==2.2.1",
+        "orchestrator-core==2.6.1",
         "requests==2.31.0",
         "infoblox-client~=0.6.0",
         "pycountry==23.12.11",
diff --git a/test/__init__.py b/test/__init__.py
index 433e89b0fcb66b0730237cc78cded4048140751c..d9001c9314bacef63647c108731562e2f4194f99 100644
--- a/test/__init__.py
+++ b/test/__init__.py
@@ -1,3 +1,4 @@
+import os
 from uuid import uuid4
 
 LSO_RESULT_SUCCESS = {
@@ -19,3 +20,5 @@ LSO_RESULT_FAILURE = {
 }
 
 USER_CONFIRM_EMPTY_FORM = [{}]
+
+os.environ["TESTING"] = "true"
diff --git a/test/api/test_imports.py b/test/api/test_imports.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/test/api/test_processes.py b/test/api/test_processes.py
deleted file mode 100644
index f56fe52640d587928531f5171712f98ca57f8f1e..0000000000000000000000000000000000000000
--- a/test/api/test_processes.py
+++ /dev/null
@@ -1,51 +0,0 @@
-from uuid import uuid4
-
-import pytest
-from orchestrator.db import (
-    ProcessStepTable,
-    ProcessSubscriptionTable,
-    ProcessTable,
-    db,
-)
-from orchestrator.workflow import ProcessStatus
-
-
-@pytest.fixture()
-def create_process(test_workflow, nokia_router_subscription_factory):
-    process_id = uuid4()
-    process = ProcessTable(
-        process_id=process_id, workflow_id=test_workflow.workflow_id, last_status=ProcessStatus.SUSPENDED
-    )
-    subscription = nokia_router_subscription_factory()
-    process_subscription = ProcessSubscriptionTable(process_id=process_id, subscription_id=subscription)
-
-    db.session.add(process)
-    db.session.add(process_subscription)
-    db.session.commit()
-
-    return process_id
-
-
-def test_callback_results_endpoint(test_client, create_process, faker):
-    expected_result = {"id": 1, "output": faker.sentence()}
-
-    step = ProcessStepTable(
-        process_id=create_process,
-        name="Modify",
-        status="suspend",
-        state={"subscription_id": uuid4(), "callback_result": expected_result},
-    )
-    db.session.add(step)
-    db.session.commit()
-
-    response = test_client.get(f"/api/v1/processes/steps/{step.step_id}/callback-results")
-
-    assert response.status_code == 200
-    assert response.json() == {"callback_results": expected_result}
-
-
-def test_callback_results_endpoint_with_wrong_step_id(test_client):
-    response = test_client.get(f"/api/v1/processes/steps/{uuid4()}/callback-results")
-
-    assert response.status_code == 404
-    assert response.json() == {"detail": "Callback result not found."}
diff --git a/test/auth/test_oidc_policy_helper.py b/test/auth/test_oidc.py
similarity index 61%
rename from test/auth/test_oidc_policy_helper.py
rename to test/auth/test_oidc.py
index 17bed723430d913ef88e1f95f09b7a6b2b6e088d..350d52779d573bc5530bef1d3489a35ec62578f2 100644
--- a/test/auth/test_oidc_policy_helper.py
+++ b/test/auth/test_oidc.py
@@ -1,21 +1,18 @@
 from http import HTTPStatus
-from unittest.mock import AsyncMock, MagicMock, Mock, patch
+from unittest.mock import AsyncMock, Mock
 
 import pytest
 from fastapi import HTTPException, Request
 from httpx import AsyncClient, NetworkError, Response
+from oauth2_lib.fastapi import OIDCConfig
+from oauth2_lib.settings import oauth2lib_settings
 
-from gso.auth.oidc_policy_helper import (
-    OIDCConfig,
-    OIDCUser,
+from gso.auth.oidc import (
+    OIDCAuthentication,
     OIDCUserModel,
-    OPAResult,
-    _evaluate_decision,
-    _get_decision,
     _is_callback_step_endpoint,
-    opa_decision,
 )
-from gso.auth.settings import oauth2lib_settings
+from gso.auth.opa import _get_decision
 
 
 @pytest.fixture(scope="module", autouse=True)
@@ -52,10 +49,12 @@ def mock_openid_config():
 
 @pytest.fixture()
 def oidc_user(mock_openid_config):
-    user = OIDCUser(
+    user = OIDCAuthentication(
         openid_url="https://example.proxy.aai.geant.org",
         resource_server_id="resource_server",
         resource_server_secret="secret",  # noqa: S106
+        openid_config_url="https://example.proxy.aai.geant.org/.well-known/openid-configuration",
+        oidc_user_model_cls=OIDCUserModel,
     )
     user.openid_config = OIDCConfig.model_validate(mock_openid_config)
     return user
@@ -76,7 +75,7 @@ def mock_request():
 @pytest.fixture()
 def mock_oidc_user():
     oidc_user = AsyncMock(
-        OIDCUser,
+        OIDCAuthentication,
         openid_url="https://example.com",
         resource_server_id="test",
         resource_server_secret="secret",  # noqa: S106
@@ -133,8 +132,25 @@ async def test_introspect_token_unauthorized(oidc_user, mock_async_client):
 
 @pytest.mark.asyncio()
 async def test_userinfo_success(oidc_user, mock_async_client):
-    mock_response = {"sub": "1234", "name": "John Doe", "email": "johndoe@example.com"}
-    mock_async_client.post = AsyncMock(return_value=Response(200, json=mock_response))
+    mock_response_introspect_token = {
+        "active": True,
+        "scope": "openid profile email aarc",
+        "client_id": "APP-775F0BD8-B1D7-4936-BE2C-A300A6509F00",
+        "exp": 1721395275,
+        "iat": 1721391675,
+        "sub": "ed145263-b652-3d4x-8f96-4abae9c98124@aai.geant.org",
+        "iss": "https://proxy.aai.geant.org",
+        "token_type": "Bearer",
+        "aud": ["APP-775F0BD8-B1D7-4936-BE2C-A300A6509F00"],
+    }
+    mock_response_userinfo = {"sub": "1234", "name": "John Doe", "email": "johndoe@example.com"}
+
+    mock_async_client.post = AsyncMock(
+        side_effect=[
+            Response(200, json=mock_response_introspect_token),
+            Response(200, json=mock_response_userinfo),
+        ]
+    )
 
     response = await oidc_user.userinfo(mock_async_client, "test_token")
 
@@ -145,21 +161,25 @@ async def test_userinfo_success(oidc_user, mock_async_client):
 
 
 @pytest.mark.asyncio()
-async def test_opa_decision_success(mock_request, mock_async_client):
-    mock_user_info = OIDCUserModel({"sub": "123", "name": "John Doe", "email": "johndoe@example.com"})
-
-    mock_oidc_user = AsyncMock(spec=OIDCUser)
-    mock_oidc_user.return_value = AsyncMock(return_value=mock_user_info)
+async def test_userinfo_success_client_credential(oidc_user, mock_async_client):
+    mock_response_introspect_token = {
+        "active": True,
+        "scope": "openid profile email eduperson_assurance eduperson_entitlement entitlements",
+        "client_id": "APP-48A7986C-8776-49D9-AB40-52EFBD432AB1",
+        "exp": 1721395701,
+        "iat": 1721392101,
+        "iss": "https://proxy.aai.geant.org",
+        "token_type": "Bearer",
+    }
 
-    with patch(
-        "gso.auth.oidc_policy_helper._get_decision",
-        return_value=AsyncMock(return_value=OPAResult(result=True, decision_id="1234")),
-    ):
-        decision_function = opa_decision("http://mock-opa-url", oidc_security=mock_oidc_user)
+    mock_async_client.post = AsyncMock(
+        return_value=Response(200, json=mock_response_introspect_token),
+    )
 
-        result = await decision_function(mock_request, mock_user_info, mock_async_client)
+    response = await oidc_user.userinfo(mock_async_client, "test_token")
 
-        assert result is True
+    assert isinstance(response, OIDCUserModel)
+    assert response == {"client_id": "APP-48A7986C-8776-49D9-AB40-52EFBD432AB1"}
 
 
 @pytest.mark.asyncio()
@@ -191,7 +211,7 @@ async def test_get_decision_success(mock_async_client):
 
     opa_url = "http://mock-opa-url"
     opa_input = {"some_input": "value"}
-    decision = await _get_decision(mock_async_client, opa_url, opa_input)
+    decision = await _get_decision(opa_url, mock_async_client, opa_input)
 
     assert decision.result is True
     assert decision.decision_id == "123"
@@ -205,102 +225,23 @@ async def test_get_decision_network_error(mock_async_client):
     opa_input = {"some_input": "value"}
 
     with pytest.raises(HTTPException) as exc_info:
-        await _get_decision(mock_async_client, opa_url, opa_input)
+        await _get_decision(opa_url, mock_async_client, opa_input)
 
     assert exc_info.value.status_code == HTTPStatus.SERVICE_UNAVAILABLE
     assert exc_info.value.detail == "Policy agent is unavailable"
 
 
-def test_evaluate_decision_allow():
-    decision = OPAResult(result=True, decision_id="123")
-    result = _evaluate_decision(decision, auto_error=True)
-
-    assert result is True
-
-
-def test_evaluate_decision_deny_without_auto_error():
-    decision = OPAResult(result=False, decision_id="123")
-    result = _evaluate_decision(decision, auto_error=False)
-
-    assert result is False
-
-
-def test_evaluate_decision_deny_with_auto_error():
-    decision = OPAResult(result=False, decision_id="123")
-
-    with pytest.raises(HTTPException) as exc_info:
-        _evaluate_decision(decision, auto_error=True)
-
-    assert exc_info.value.status_code == HTTPStatus.FORBIDDEN
-    assert "Decision was taken with id: 123" in str(exc_info.value.detail)
-
-
-@pytest.mark.asyncio()
-async def test_oidc_user_call_with_token(oidc_user, mock_request, mock_async_client):
-    oidc_user.introspect_token = AsyncMock(return_value={"active": True, "sub": "123", "client_id": "test_client"})
-    oidc_user.userinfo = AsyncMock(return_value=OIDCUserModel({"sub": "123", "name": "John Doe"}))
-
-    result = await oidc_user.__call__(mock_request, token="test_token")  # noqa: S106
-
-    assert isinstance(result, OIDCUserModel)
-    assert result["sub"] == "123"
-    assert result["name"] == "John Doe"
-    assert result["client_id"] == "test_client"
-
-
-@pytest.mark.asyncio()
-async def test_oidc_user_call_with_client_credential_token(oidc_user, mock_request, mock_async_client):
-    oidc_user.introspect_token = AsyncMock(return_value={"active": True})
-    oidc_user.userinfo = AsyncMock(return_value=OIDCUserModel({"sub": "123", "name": "John Doe"}))
-
-    result = await oidc_user.__call__(mock_request, token="test_token")  # noqa: S106
-
-    assert isinstance(result, OIDCUserModel)
-    assert result["client_id"] is None
-    oidc_user.userinfo.assert_not_called()
-
-
 @pytest.mark.asyncio()
-async def test_oidc_user_call_inactive_token(oidc_user, mock_request, mock_async_client):
-    oidc_user.introspect_token = AsyncMock(return_value={"active": False, "sub": "123"})
+async def test_oidc_user_call_inactive_token(oidc_user, mock_async_client):
+    mock_async_client.post = AsyncMock(return_value=Response(200, json={"active": False, "sub": "123"}))
 
     with pytest.raises(HTTPException) as exc_info:
-        await oidc_user.__call__(mock_request, token="test_token")  # noqa: S106
+        await oidc_user.userinfo(mock_async_client, token="test_token")  # noqa: S106
 
     assert exc_info.value.status_code == HTTPStatus.UNAUTHORIZED
     assert "User is not active" in str(exc_info.value.detail)
 
 
-@pytest.mark.asyncio()
-async def test_oidc_user_call_no_token(oidc_user, mock_request):
-    with (
-        patch("fastapi.security.http.HTTPBearer.__call__", return_value=None),
-        patch("httpx.AsyncClient.post", new_callable=MagicMock) as mock_post,
-        patch("httpx.AsyncClient.get", new_callable=MagicMock) as mock_get,
-    ):
-        mock_post.return_value = MagicMock(status_code=200, json=lambda: {"active": False})
-        mock_get.return_value = MagicMock(status_code=200, json=dict)
-
-        result = await oidc_user.__call__(mock_request)  # noqa: PLC2801
-
-    assert result is None
-
-
-@pytest.mark.asyncio()
-async def test_oidc_user_call_token_from_request(oidc_user, mock_request, mock_async_client):
-    mock_request.state.credentials = Mock()
-    mock_request.state.credentials.credentials = "request_token"
-
-    oidc_user.introspect_token = AsyncMock(return_value={"active": True, "sub": "123"})
-    oidc_user.userinfo = AsyncMock(return_value=OIDCUserModel({"sub": "123", "name": "John Doe"}))
-
-    result = await oidc_user.__call__(mock_request)  # noqa: PLC2801
-
-    assert isinstance(result, OIDCUserModel)
-    assert result["sub"] == "123"
-    assert result["name"] == "John Doe"
-
-
 @pytest.mark.parametrize(
     ("path", "expected"),
     [
@@ -320,3 +261,35 @@ def test_is_callback_step_endpoint(path, expected):
         }
     )
     assert _is_callback_step_endpoint(request) is expected
+
+
+@pytest.mark.asyncio()
+async def test_userinfo_invalid_token(oidc_user, mock_async_client):
+    mock_async_client.post = AsyncMock(return_value=Response(401, json={"error": "invalid_token"}))
+
+    with pytest.raises(HTTPException) as exc_info:
+        await oidc_user.userinfo(mock_async_client, "invalid_token")
+
+    assert exc_info.value.status_code == 401
+
+
+@pytest.mark.asyncio()
+async def test_introspect_token_missing_active_key(oidc_user, mock_async_client):
+    mock_async_client.post = AsyncMock(return_value=Response(200, json={}))
+
+    with pytest.raises(HTTPException) as exc_info:
+        await oidc_user.introspect_token(mock_async_client, "token")
+
+    assert exc_info.value.status_code == 401
+    assert "Missing active key" in str(exc_info.value.detail)
+
+
+@pytest.mark.asyncio()
+async def test_introspect_token_inactive(oidc_user, mock_async_client):
+    mock_async_client.post = AsyncMock(return_value=Response(200, json={"active": False}))
+
+    with pytest.raises(HTTPException) as exc_info:
+        await oidc_user.introspect_token(mock_async_client, "token")
+
+    assert exc_info.value.status_code == 401
+    assert "User is not active" in str(exc_info.value.detail)
diff --git a/test/auth/test_opa.py b/test/auth/test_opa.py
new file mode 100644
index 0000000000000000000000000000000000000000..d1239f1f27def116e2386758348998126fbb66ed
--- /dev/null
+++ b/test/auth/test_opa.py
@@ -0,0 +1,86 @@
+from unittest.mock import AsyncMock
+
+import pytest
+from fastapi.exceptions import HTTPException
+from httpx import AsyncClient, NetworkError, Response
+from oauth2_lib.fastapi import OPAResult
+
+from gso.auth.opa import GraphQLOPAAuthZ, OPAAuthZ, _get_decision
+
+
+@pytest.fixture()
+def mock_async_client():
+    return AsyncClient(verify=False)  # noqa: S501
+
+
+@pytest.mark.asyncio()
+async def test_get_decision_success(mock_async_client):
+    opa_url = "http://opa-server/v1/data/test"
+    opa_input = {"input": {"user": "test_user", "action": "test_action"}}
+    mock_response = {"decision_id": "1234", "result": {"allow": True}}
+
+    mock_async_client.post = AsyncMock(return_value=Response(200, json=mock_response))
+
+    result = await _get_decision(opa_url, mock_async_client, opa_input)
+
+    assert isinstance(result, OPAResult)
+    assert result.decision_id == "1234"
+    assert result.result is True
+
+
+@pytest.mark.asyncio()
+async def test_get_decision_network_error(mock_async_client):
+    opa_url = "http://opa-server/v1/data/test"
+    opa_input = {"input": {"user": "test_user", "action": "test_action"}}
+
+    mock_async_client.post = AsyncMock(side_effect=NetworkError("Network error"))
+
+    with pytest.raises(HTTPException) as exc_info:
+        await _get_decision(opa_url, mock_async_client, opa_input)
+
+    assert exc_info.value.status_code == 503
+    assert exc_info.value.detail == "Policy agent is unavailable"
+
+
+@pytest.mark.asyncio()
+async def test_get_decision_invalid_response(mock_async_client):
+    opa_url = "http://opa-server/v1/data/test"
+    opa_input = {"input": {"user": "test_user", "action": "test_action"}}
+    mock_response = {"invalid_key": "value"}
+
+    mock_async_client.post = AsyncMock(return_value=Response(200, json=mock_response))
+
+    with pytest.raises(KeyError):
+        await _get_decision(opa_url, mock_async_client, opa_input)
+
+
+@pytest.mark.asyncio()
+async def test_opaauthz_get_decision(mock_async_client):
+    opa_url = "http://opa-server/v1/data/test"
+    opa_input = {"input": {"user": "test_user", "action": "test_action"}}
+    mock_response = {"decision_id": "1234", "result": {"allow": True}}
+
+    opa_instance = OPAAuthZ(opa_url=opa_url)
+    mock_async_client.post = AsyncMock(return_value=Response(200, json=mock_response))
+
+    result = await opa_instance.get_decision(mock_async_client, opa_input)
+
+    assert isinstance(result, OPAResult)
+    assert result.decision_id == "1234"
+    assert result.result is True
+
+
+@pytest.mark.asyncio()
+async def test_graphql_opaauthz_get_decision(mock_async_client):
+    opa_url = "http://opa-server/v1/data/test"
+    opa_input = {"input": {"user": "test_user", "action": "test_action"}}
+    mock_response = {"decision_id": "1234", "result": {"allow": True}}
+
+    graphql_opa_instance = GraphQLOPAAuthZ(opa_url=opa_url)
+    mock_async_client.post = AsyncMock(return_value=Response(200, json=mock_response))
+
+    result = await graphql_opa_instance.get_decision(mock_async_client, opa_input)
+
+    assert isinstance(result, OPAResult)
+    assert result.decision_id == "1234"
+    assert result.result is True
diff --git a/test/cli/test_imports.py b/test/cli/test_imports.py
index c7331665c791a594119e1c2d64cb4f5ebe6834b8..8933627b82b06a35ca17e5a2a4417dc9cd9859c6 100644
--- a/test/cli/test_imports.py
+++ b/test/cli/test_imports.py
@@ -305,7 +305,7 @@ def test_import_iptrunk_invalid_router_id_side_a_and_b(mock_start_process, mock_
         """Validation error: 2 validation errors for IptrunkImportModel
 side_a_node_id
   Value error, Router  not found [type=value_error, input_value='', input_type=str]
-    For further information visit https://errors.pydantic.dev/2.5/v/value_error
+    For further information visit https://errors.pydantic.dev/2.7/v/value_error
 side_b_node_id
   Value error, Router  not found [type=value_error, input_value='', input_type=str]"""
         in captured_output
diff --git a/test/conftest.py b/test/conftest.py
index d0391c02ab4c83b0c3d0cc98a418dabe11c9e5ad..0c4e36e4f52d60d8270493b9dd234ffb080ba507 100644
--- a/test/conftest.py
+++ b/test/conftest.py
@@ -11,6 +11,7 @@ from alembic import command
 from alembic.config import Config
 from faker import Faker
 from faker.providers import BaseProvider
+from oauth2_lib.settings import oauth2lib_settings
 from orchestrator import app_settings
 from orchestrator.db import (
     Database,
@@ -29,12 +30,10 @@ from sqlalchemy import create_engine, select, text
 from sqlalchemy.engine import make_url
 from sqlalchemy.orm import scoped_session, sessionmaker
 from starlette.testclient import TestClient
+from urllib3_mock import Responses
 
-from gso.auth.settings import oauth2lib_settings
-from gso.db.models import PartnerType
 from gso.main import init_gso_app
-from gso.schema.partner import PartnerCreate
-from gso.services.partners import create_partner
+from gso.services.partners import PartnerSchema, create_partner
 from gso.utils.helpers import LAGMember
 from test.fixtures import (  # noqa: F401
     iptrunk_side_subscription_factory,
@@ -267,8 +266,24 @@ def test_client(fastapi_app):
 
 
 @pytest.fixture(scope="session")
-def geant_partner():
-    return create_partner(PartnerCreate(name="GEANT-TEST", partner_type=PartnerType.GEANT, email="goat-test@geant.org"))
+def partner_factory():
+    def _create_partner(
+        name: str,
+        email: str,
+    ) -> dict:
+        return create_partner(
+            PartnerSchema(
+                name=name,
+                email=email,
+            )
+        )
+
+    return _create_partner
+
+
+@pytest.fixture(scope="session")
+def geant_partner(partner_factory):
+    return partner_factory(name="GEANT-TEST", email="goat-test@geant.org")
 
 
 @pytest.fixture()
@@ -516,3 +531,32 @@ def product_type_1_subscriptions_factory(product_type_1_subscription_factory):
 @pytest.fixture()
 def generic_subscription_1(product_type_1_subscription_factory):
     return product_type_1_subscription_factory()
+
+
+@pytest.fixture(autouse=True)
+def responses():
+    responses_mock = Responses("requests.packages.urllib3")
+
+    def _find_request(call):
+        mock_url = responses_mock._find_match(call.request)  # noqa: SLF001
+        if not mock_url:
+            pytest.fail(f"Call not mocked: {call.request}")
+        return mock_url
+
+    def _to_tuple(url_mock):
+        return url_mock["url"], url_mock["method"], url_mock["match_querystring"]
+
+    with responses_mock:
+        yield responses_mock
+
+        mocked_urls = map(_to_tuple, responses_mock._urls)  # noqa: SLF001
+        used_urls = map(_to_tuple, map(_find_request, responses_mock.calls))
+        not_used = set(mocked_urls) - set(used_urls)
+        if not_used:
+            pytest.fail(f"Found unused responses mocks: {not_used}", pytrace=False)
+
+
+@pytest.fixture(autouse=True)
+def _no_mail(monkeypatch):
+    """Remove sending mails from all tests."""
+    monkeypatch.delattr("smtplib.SMTP")
diff --git a/test/fixtures.py b/test/fixtures.py
index 14ce92aec90a4a5371b8b929f0c21ccb25b42e33..84481eda9c9d816e438414636cd335ecb2ef1b63 100644
--- a/test/fixtures.py
+++ b/test/fixtures.py
@@ -6,12 +6,19 @@ from uuid import uuid4
 import pytest
 from orchestrator import step, workflow
 from orchestrator.config.assignee import Assignee
-from orchestrator.db import db
+from orchestrator.db import (
+    ProductTable,
+    SubscriptionInstanceTable,
+    SubscriptionInstanceValueTable,
+    SubscriptionTable,
+    db,
+)
 from orchestrator.domain import SubscriptionModel
 from orchestrator.types import SubscriptionLifecycle, UUIDstr
+from orchestrator.utils.datetime import nowtz
 from orchestrator.workflow import done, init, inputstep
 from pydantic_forms.core import FormPage
-from pydantic_forms.types import FormGenerator
+from pydantic_forms.types import FormGenerator, SubscriptionMapping
 from pydantic_forms.validators import Choice
 
 from gso.products import ProductName
@@ -559,3 +566,73 @@ def test_workflow(generic_subscription_1: UUIDstr, generic_product_type_1) -> Ge
 
     with WorkflowInstanceForTests(workflow_for_testing_processes_py, "workflow_for_testing_processes_py") as wf:
         yield wf
+
+
+def create_subscription_for_mapping(
+    product: ProductTable, mapping: SubscriptionMapping, values: dict[str, Any], **kwargs: Any
+) -> SubscriptionTable:
+    """Create a subscription in the test coredb for the given subscription_mapping and values.
+
+    This function handles optional resource types starting with a ? in the mapping not supplied in the values array.
+
+    Args:
+        product: the ProductTable to create a sub for
+        mapping: the subscription_mapping belonging to that product
+        values: a dictionary of keys from the sub_map and their corresponding test values
+        kwargs: The rest of the arguments
+
+    Returns: The conforming subscription.
+    """
+
+    def build_instance(name, value_mapping):
+        block = product.find_block_by_name(name)
+
+        def build_value(rt, value):
+            resource_type = block.find_resource_type_by_name(rt)
+            return SubscriptionInstanceValueTable(resource_type_id=resource_type.resource_type_id, value=value)
+
+        return SubscriptionInstanceTable(
+            product_block_id=block.product_block_id,
+            values=[
+                build_value(resource_type, values[value_key]) for (resource_type, value_key) in value_mapping.items()
+            ],
+        )
+
+    # recreate the mapping: leave out the ?keys if no value supplied for them
+    mapping = {
+        name: [
+            {
+                **{k: value_map[k] for k in value_map if not value_map[k].startswith("?")},
+                **{
+                    k: value_map[k][1:]
+                    for k in value_map
+                    if value_map[k].startswith("?") and value_map[k][1:] in values
+                },
+            }
+            for value_map in mapping[name]
+        ]
+        for name in mapping
+    }
+
+    instances = [
+        build_instance(name, value_mapping)
+        for (name, value_mappings) in mapping.items()
+        for value_mapping in value_mappings
+    ]
+
+    return create_subscription(instances=instances, product=product, **kwargs)
+
+
+def create_subscription(**kwargs):
+    attrs = {
+        "description": "A subscription.",
+        "customer_id": kwargs.get("customer_id", "85938c4c-0a11-e511-80d0-005056956c1a"),
+        "start_date": nowtz(),
+        "status": "active",
+        "insync": True,
+        **kwargs,
+    }
+    o = SubscriptionTable(**attrs)
+    db.session.add(o)
+    db.session.commit()
+    return o
diff --git a/test/utils/test_helpers.py b/test/utils/test_helpers.py
index e80e6f30049635fdcc4691c9c75f8e13dfbbfb67..dc7854eaa3287b4b98132a8243bf2068636a684a 100644
--- a/test/utils/test_helpers.py
+++ b/test/utils/test_helpers.py
@@ -1,9 +1,16 @@
 from unittest.mock import patch
 
 import pytest
+from orchestrator.types import SubscriptionLifecycle
 
+from gso.products import Router
 from gso.products.product_blocks.iptrunk import IptrunkInterfaceBlock
-from gso.utils.helpers import available_interfaces_choices_including_current_members, validate_tt_number
+from gso.products.product_blocks.router import RouterRole
+from gso.utils.helpers import (
+    available_interfaces_choices_including_current_members,
+    generate_inventory_for_active_routers,
+    validate_tt_number,
+)
 from gso.utils.shared_enums import Vendor
 
 
@@ -90,3 +97,45 @@ def test_tt_number(generate_tt_numbers):
 
             with pytest.raises(ValueError, match=err_msg):
                 validate_tt_number(tt_number)
+
+
+def test_generate_inventory_for_active_routers_with_single_active_router(nokia_router_subscription_factory):
+    """Test the generation of inventory for a single active P router."""
+    router = Router.from_subscription(nokia_router_subscription_factory(router_role=RouterRole.P))
+    expected_result = {
+        "all": {
+            "hosts": {
+                router.router.router_fqdn: {
+                    "lo4": str(router.router.router_lo_ipv4_address),
+                    "lo6": str(router.router.router_lo_ipv6_address),
+                    "vendor": str(router.router.vendor),
+                }
+            }
+        }
+    }
+    assert generate_inventory_for_active_routers(RouterRole.P) == expected_result
+
+
+def test_generate_inventory_for_active_routers_with_multiple_routers(nokia_router_subscription_factory):
+    """Test the generation of inventory for multiple active P and PE routers"""
+    for _ in range(5):
+        nokia_router_subscription_factory(router_role=RouterRole.P)
+    for _ in range(3):
+        nokia_router_subscription_factory(router_role=RouterRole.PE)
+    nokia_router_subscription_factory(status=SubscriptionLifecycle.TERMINATED)
+    nokia_router_subscription_factory(status=SubscriptionLifecycle.INITIAL)
+    #  Test the generation of inventory for multiple active P routers.
+    inventory = generate_inventory_for_active_routers(RouterRole.P)
+    assert len(inventory["all"]["hosts"]) == 5
+    inventory = generate_inventory_for_active_routers(RouterRole.PE)
+    assert len(inventory["all"]["hosts"]) == 3
+
+
+def test_generate_inventory_for_active_routers_with_excluded_router(nokia_router_subscription_factory):
+    """Test the generation of inventory for active P routers with an excluded router."""
+    for _ in range(5):
+        nokia_router_subscription_factory(router_role=RouterRole.P)
+    router = nokia_router_subscription_factory(router_role=RouterRole.P)
+    excluded_routers = [Router.from_subscription(router).router.router_fqdn]
+    inventory = generate_inventory_for_active_routers(RouterRole.P, exclude_routers=excluded_routers)
+    assert len(inventory["all"]["hosts"]) == 5  # 6 P routers, the last one is excluded, so 5 P routers are left.
diff --git a/test/workflows/conftest.py b/test/workflows/conftest.py
deleted file mode 100644
index 9d298a779f3e4f190e009973caa321658eb2433b..0000000000000000000000000000000000000000
--- a/test/workflows/conftest.py
+++ /dev/null
@@ -1,25 +0,0 @@
-import pytest
-from urllib3_mock import Responses
-
-
-@pytest.fixture(autouse=True)
-def responses():
-    responses_mock = Responses("requests.packages.urllib3")
-
-    def _find_request(call):
-        mock_url = responses_mock._find_match(call.request)  # noqa: SLF001
-        if not mock_url:
-            pytest.fail(f"Call not mocked: {call.request}")
-        return mock_url
-
-    def _to_tuple(url_mock):
-        return url_mock["url"], url_mock["method"], url_mock["match_querystring"]
-
-    with responses_mock:
-        yield responses_mock
-
-        mocked_urls = map(_to_tuple, responses_mock._urls)  # noqa: SLF001
-        used_urls = map(_to_tuple, map(_find_request, responses_mock.calls))
-        not_used = set(mocked_urls) - set(used_urls)
-        if not_used:
-            pytest.fail(f"Found unused responses mocks: {not_used}", pytrace=False)
diff --git a/test/workflows/router/test_terminate_router.py b/test/workflows/router/test_terminate_router.py
index e52826820084981798541f1189a134eda5d1d9c0..a41e8b32e6dde2ddca6ee716b7ce4227751ec62f 100644
--- a/test/workflows/router/test_terminate_router.py
+++ b/test/workflows/router/test_terminate_router.py
@@ -3,19 +3,24 @@ from unittest.mock import patch
 import pytest
 
 from gso.products import Router
+from gso.products.product_blocks.router import RouterRole
 from test.workflows import assert_complete, assert_lso_interaction_success, extract_state, run_workflow
 
 
 @pytest.mark.workflow()
 @pytest.mark.parametrize("remove_configuration", [True, False])
+@pytest.mark.parametrize("update_ibgp_mesh", [True, False])
 @patch("gso.services.lso_client._send_request")
 @patch("gso.workflows.router.terminate_router.NetboxClient.delete_device")
 @patch("gso.workflows.router.terminate_router.infoblox.delete_host_by_ip")
-def test_terminate_router_full_success(
+@patch("gso.workflows.router.terminate_router.LibreNMSClient.remove_device")
+def test_terminate_pe_router_full_success(
+    mock_librenms_remove_device,
     mock_delete_host_by_ip,
     mock_delete_device,
     mock_execute_playbook,
     remove_configuration,
+    update_ibgp_mesh,
     nokia_router_subscription_factory,
     faker,
     data_config_filename,
@@ -25,9 +30,63 @@ def test_terminate_router_full_success(
     router_termination_input_form_data = {
         "tt_number": faker.tt_number(),
         "remove_configuration": remove_configuration,
+        "update_ibgp_mesh": update_ibgp_mesh,
     }
-    lso_interaction_count = 2 if remove_configuration else 0
+    lso_interaction_count = 0
+    if remove_configuration:
+        lso_interaction_count += 2
+    if update_ibgp_mesh:
+        lso_interaction_count += 4
+    #  Run workflow
+    initial_router_data = [{"subscription_id": product_id}, router_termination_input_form_data]
+    result, process_stat, step_log = run_workflow("terminate_router", initial_router_data)
+
+    for _ in range(lso_interaction_count):
+        result, step_log = assert_lso_interaction_success(result, process_stat, step_log)
+
+    assert_complete(result)
+
+    state = extract_state(result)
+    subscription_id = state["subscription_id"]
+    subscription = Router.from_subscription(subscription_id)
+
+    assert subscription.status == "terminated"
+    assert mock_delete_device.call_count == 1
+    assert mock_delete_host_by_ip.call_count == 1
+    assert mock_librenms_remove_device.call_count == 1
+    assert mock_execute_playbook.call_count == lso_interaction_count
+
 
+@pytest.mark.workflow()
+@pytest.mark.parametrize("remove_configuration", [True, False])
+@pytest.mark.parametrize("update_ibgp_mesh", [True, False])
+@patch("gso.services.lso_client._send_request")
+@patch("gso.workflows.router.terminate_router.NetboxClient.delete_device")
+@patch("gso.workflows.router.terminate_router.infoblox.delete_host_by_ip")
+@patch("gso.workflows.router.terminate_router.LibreNMSClient.remove_device")
+def test_terminate_p_router_full_success(
+    mock_librenms_remove_device,
+    mock_delete_host_by_ip,
+    mock_delete_device,
+    mock_execute_playbook,
+    remove_configuration,
+    update_ibgp_mesh,
+    nokia_router_subscription_factory,
+    faker,
+    data_config_filename,
+):
+    #  Prepare mock values and expected results
+    product_id = nokia_router_subscription_factory(router_role=RouterRole.P)
+    router_termination_input_form_data = {
+        "tt_number": faker.tt_number(),
+        "remove_configuration": remove_configuration,
+        "update_ibgp_mesh": update_ibgp_mesh,
+    }
+    lso_interaction_count = 0
+    if remove_configuration:
+        lso_interaction_count += 2
+    if update_ibgp_mesh:
+        lso_interaction_count += 2
     #  Run workflow
     initial_router_data = [{"subscription_id": product_id}, router_termination_input_form_data]
     result, process_stat, step_log = run_workflow("terminate_router", initial_router_data)
@@ -44,4 +103,5 @@ def test_terminate_router_full_success(
     assert subscription.status == "terminated"
     assert mock_delete_device.call_count == 1
     assert mock_delete_host_by_ip.call_count == 1
+    assert mock_librenms_remove_device.call_count == 1
     assert mock_execute_playbook.call_count == lso_interaction_count
diff --git a/test/workflows/tasks/test_create_partners.py b/test/workflows/tasks/test_create_partners.py
new file mode 100644
index 0000000000000000000000000000000000000000..4a902aa42771f74ee6cebb4b8ba78c92a79b2253
--- /dev/null
+++ b/test/workflows/tasks/test_create_partners.py
@@ -0,0 +1,65 @@
+import pytest
+from pydantic_forms.exceptions import FormValidationError
+
+from gso.services.partners import get_partner_by_name
+from test.workflows import assert_complete, extract_state, run_workflow
+
+
+@pytest.mark.workflow()
+def test_create_partner_success():
+    result, _, _ = run_workflow(
+        "task_create_partners",
+        [
+            {
+                "name": "GEANT-TEST-CREATION",
+                "email": "goat-test-creation@geant.org",
+            }
+        ],
+    )
+    assert_complete(result)
+    state = extract_state(result)
+
+    partner = get_partner_by_name(state["name"])
+    assert partner["name"] == "GEANT-TEST-CREATION"
+    assert partner["email"] == "goat-test-creation@geant.org"
+
+
+@pytest.mark.workflow()
+def test_create_partner_with_invalid_input_fails():
+    with pytest.raises(FormValidationError) as error:
+        run_workflow(
+            "task_create_partners",
+            [
+                {
+                    "name": "Kenneth Boyle",
+                    "email": "invalid_email",
+                }
+            ],
+        )
+
+    errors = error.value.errors
+
+    email_error = errors[0]
+    assert email_error["loc"] == ("email",)
+    assert "valid email address" in email_error["msg"]
+
+
+def test_create_partner_with_duplicate_name_or_email_fails(partner_factory):
+    partner_factory(
+        name="new_name",
+        email="myemail@gmail.com",
+    )
+
+    with pytest.raises(FormValidationError) as error:
+        run_workflow(
+            "task_create_partners",
+            [
+                {
+                    "name": "NEW_name",
+                    "email": "myemail@gmail.com",
+                }
+            ],
+        )
+
+    assert error.value.errors[0]["msg"] == "Partner with this name already exists."
+    assert error.value.errors[1]["msg"] == "Partner with this email already exists."
diff --git a/test/workflows/tasks/test_delete_partners.py b/test/workflows/tasks/test_delete_partners.py
new file mode 100644
index 0000000000000000000000000000000000000000..b0964bdfc93be71624a8f947876cae956a4810b2
--- /dev/null
+++ b/test/workflows/tasks/test_delete_partners.py
@@ -0,0 +1,60 @@
+from uuid import uuid4
+
+import pytest
+from orchestrator.db import ProductTable, db
+from pydantic_forms.exceptions import FormValidationError
+from sqlalchemy import select
+
+from gso.services.partners import filter_partners_by_name
+from test.fixtures import create_subscription_for_mapping
+from test.workflows import assert_complete, run_workflow
+
+CORRECT_SUBSCRIPTION = str(uuid4())
+
+
+def get_one_product(product_name):
+    return db.session.scalars(select(ProductTable).where(ProductTable.name == product_name)).one()
+
+
+@pytest.mark.workflow()
+def test_delete_partner_success(partner_factory):
+    partner = partner_factory(
+        name="new_name",
+        email="myemail@gmail.com",
+    )
+
+    assert filter_partners_by_name(name="new_name", case_sensitive=False)
+
+    result, _, _ = run_workflow(
+        "task_delete_partners",
+        [
+            {"partners": partner["partner_id"]},
+        ],
+    )
+    assert_complete(result)
+
+    assert filter_partners_by_name(name="new_name", case_sensitive=False) is None
+
+
+def test_delete_partner_with_associated_data_fails(generic_product_3, partner_factory):
+    partner = partner_factory(
+        name="new_name",
+        email="myemail@gmail.com",
+    )
+
+    subscription_mapping = {"PB_2": [{"rt_3": "info.id", "rt_2": "info2.id"}]}
+    values = {"info.id": "0", "info2.id": "X"}
+    product = get_one_product("Product 3")
+    create_subscription_for_mapping(
+        product, subscription_mapping, values, subscription_id=CORRECT_SUBSCRIPTION, customer_id=partner["partner_id"]
+    )
+
+    with pytest.raises(FormValidationError) as error:
+        run_workflow(
+            "task_delete_partners",
+            [
+                {"partners": partner["partner_id"]},
+            ],
+        )
+
+    assert error.value.errors[0]["msg"] == "This partner has associated data and cannot be removed."
diff --git a/test/workflows/tasks/test_modify_partners.py b/test/workflows/tasks/test_modify_partners.py
new file mode 100644
index 0000000000000000000000000000000000000000..23559734df77b9a558b8eacbaf0677a0da172a0b
--- /dev/null
+++ b/test/workflows/tasks/test_modify_partners.py
@@ -0,0 +1,103 @@
+import pytest
+from pydantic_forms.exceptions import FormValidationError
+
+from gso.services.partners import get_partner_by_id
+from test.workflows import assert_complete, run_workflow
+
+
+@pytest.mark.workflow()
+def test_modify_partner_success(partner_factory):
+    partner = partner_factory(
+        name="new_name",
+        email="myemail@gmail.com",
+    )
+    result, _, _ = run_workflow(
+        "task_modify_partners",
+        [
+            {"partners": partner["partner_id"]},
+            {
+                "name": "GEANT-TEST-CREATION",
+                "email": "goat-test-creation@geant.org",
+            },
+        ],
+    )
+    assert_complete(result)
+
+    partner_db = get_partner_by_id(partner_id=partner["partner_id"])
+    assert partner_db.name == "GEANT-TEST-CREATION"
+    assert partner_db.email == "goat-test-creation@geant.org"
+
+
+@pytest.mark.workflow()
+def test_modify_partner_with_same_date_success(partner_factory):
+    partner = partner_factory(
+        name="new_name",
+        email="myemail@gmail.com",
+    )
+    result, _, _ = run_workflow(
+        "task_modify_partners",
+        [
+            {"partners": partner["partner_id"]},
+            {
+                "name": "new_name",
+                "email": "myemail@gmail.com",
+            },
+        ],
+    )
+    assert_complete(result)
+
+    partner_db = get_partner_by_id(partner_id=partner["partner_id"])
+    assert partner_db.name == "new_name"
+    assert partner_db.email == "myemail@gmail.com"
+
+
+@pytest.mark.workflow()
+def test_modify_partner_with_duplicate_name_or_email_fails(partner_factory):
+    partner_factory(
+        name="new_name",
+        email="myemail@gmail.com",
+    )
+    partner_2 = partner_factory(
+        name="new_name_2",
+        email="myemail2@gmail.com",
+    )
+
+    with pytest.raises(FormValidationError) as error:
+        run_workflow(
+            "task_modify_partners",
+            [
+                {"partners": partner_2["partner_id"]},
+                {
+                    "name": "new_name",
+                    "email": "myemail@gmail.com",
+                },
+            ],
+        )
+
+    assert error.value.errors[0]["msg"] == "Partner with this name already exists."
+    assert error.value.errors[1]["msg"] == "Partner with this email already exists."
+
+
+@pytest.mark.workflow()
+def test_modify_partner_with_invalid_input_fails(partner_factory):
+    partner = partner_factory(
+        name="new_name_2",
+        email="myemail2@gmail.com",
+    )
+    with pytest.raises(FormValidationError) as error:
+        run_workflow(
+            "task_modify_partners",
+            [
+                {"partners": partner["partner_id"]},
+                {
+                    "name": "Kenneth Boyle",
+                    "email": "invalid_email",
+                },
+            ],
+        )
+
+    errors = error.value.errors
+
+    email_error = errors[0]
+    assert email_error["loc"] == ("email",)
+    assert "valid email address" in email_error["msg"]
diff --git a/test/workflows/tasks/test_task_validate_products.py b/test/workflows/tasks/test_task_validate_products.py
index 0dba0817344e426db93a3603815bc669ff7fc510..b12b3a4a6b0190f7d150faf54d9c61d466f4f8de 100644
--- a/test/workflows/tasks/test_task_validate_products.py
+++ b/test/workflows/tasks/test_task_validate_products.py
@@ -4,7 +4,7 @@ from test.workflows import assert_complete, extract_state, run_workflow
 
 
 @pytest.mark.workflow()
-def test_task_validate_geant_products(responses, faker):
+def test_task_validate_geant_products(responses):
     result, _, _ = run_workflow("task_validate_geant_products", [{}])
     assert_complete(result)
     state = extract_state(result)