diff --git a/.gitignore b/.gitignore
index 5133b408045b4490147ccf3ca8c43db90d1fa667..fccb8082923cbdf2088055e1489862884abd02c3 100644
--- a/.gitignore
+++ b/.gitignore
@@ -16,6 +16,7 @@ docs/build
 docs/vale/styles/*
 !docs/vale/styles/config/
 !docs/vale/styles/custom/
+.DS_Store
 
 .idea
 .venv
diff --git a/Changelog.md b/Changelog.md
index ae5a2918b153c7bb9a469b3d1f1231309c80cc79..c33bdc46ccddf0b7944387da9bff0a005bfa1c4e 100644
--- a/Changelog.md
+++ b/Changelog.md
@@ -2,6 +2,9 @@
 
 All notable changes to this project will be documented in this file.
 
+## [0.3] - 2024-01-23
+- Fixed related to the Authentication and some small improvments.
+
 ## [0.2] - 2024-01-16
 - Initial release
 
diff --git a/docs/source/module/api/v1/index.rst b/docs/source/module/api/v1/index.rst
index bc6d80c3820d5ad9e6619925a230412f9e47f070..743e1a2814efd7e63d07f87a8150b234eaaa909a 100644
--- a/docs/source/module/api/v1/index.rst
+++ b/docs/source/module/api/v1/index.rst
@@ -14,3 +14,4 @@ Submodules
 
    imports
    subscriptions
+   processes
diff --git a/docs/source/module/api/v1/processes.rst b/docs/source/module/api/v1/processes.rst
new file mode 100644
index 0000000000000000000000000000000000000000..82aa87628771ec27ee360bf8c2ac4a79eef248ed
--- /dev/null
+++ b/docs/source/module/api/v1/processes.rst
@@ -0,0 +1,6 @@
+``gso.api.v1.processes``
+============================
+
+.. automodule:: gso.api.v1.processes
+   :members:
+   :show-inheritance:
diff --git a/docs/source/module/auth/index.rst b/docs/source/module/auth/index.rst
index 0ec5cd1fad1966607dbfaf3797235286bd08503c..5d818a853cd02ffe3b7e4ca1a8b43528c2c2a003 100644
--- a/docs/source/module/auth/index.rst
+++ b/docs/source/module/auth/index.rst
@@ -1,5 +1,5 @@
-``gso.products``
-================
+``gso.auth``
+============
 
 .. automodule:: gso.auth
    :members:
diff --git a/docs/source/module/services/index.rst b/docs/source/module/services/index.rst
index 26190e2968a848d3fb06e491b52572dd4fca8faa..d74c2ba8ffee28fad531d1b74ef54c5c8edab88b 100644
--- a/docs/source/module/services/index.rst
+++ b/docs/source/module/services/index.rst
@@ -14,6 +14,7 @@ Submodules
 
    crm
    infoblox
+   librenms_client
    netbox_client
    provisioning_proxy
    subscriptions
diff --git a/docs/source/module/services/librenms_client.rst b/docs/source/module/services/librenms_client.rst
new file mode 100644
index 0000000000000000000000000000000000000000..70243bb70a328a8097bb584e86981e3c7298b99d
--- /dev/null
+++ b/docs/source/module/services/librenms_client.rst
@@ -0,0 +1,6 @@
+``gso.services.librenms_client``
+================================
+
+.. automodule:: gso.services.librenms_client
+   :members:
+   :show-inheritance:
diff --git a/docs/source/modules.rst b/docs/source/modules.rst
index 36fb6ff1775e08a69e3a706bb289e146b8ff99d7..ab1adef70f18ef8a4021143bd71d53bd3eae4f86 100644
--- a/docs/source/modules.rst
+++ b/docs/source/modules.rst
@@ -19,6 +19,7 @@ Subpackages
    :titlesonly:
 
    module/api/index
+   module/auth/index
    module/cli/index
    module/products/index
    module/schedules/index
diff --git a/docs/vale/styles/config/vocabularies/geant-jargon/accept.txt b/docs/vale/styles/config/vocabularies/geant-jargon/accept.txt
index aba8e760989154433f87263909a310907fe4a667..081f7913cba3d95518c76bfb75278bdf9300dbf8 100644
--- a/docs/vale/styles/config/vocabularies/geant-jargon/accept.txt
+++ b/docs/vale/styles/config/vocabularies/geant-jargon/accept.txt
@@ -14,3 +14,4 @@ Dark_fiber
 PHASE 1
 [Mm]odify
 AAI
+[M|m]iddleware
\ No newline at end of file
diff --git a/gso/__init__.py b/gso/__init__.py
index 0227c19d7d29838dc2952ac34abb13d6a9ebc3ea..ecdfd940ffefe85df1613e4a6cbbc74f56bf80dc 100644
--- a/gso/__init__.py
+++ b/gso/__init__.py
@@ -10,12 +10,14 @@ from orchestrator.cli.main import app as cli_app
 import gso.products
 import gso.workflows  # noqa: F401
 from gso.api import router as api_router
+from gso.middlewares import ModifyProcessEndpointResponse
 
 
 def init_gso_app() -> OrchestratorCore:
     """Initialise the :term:`GSO` app."""
     app = OrchestratorCore(base_settings=app_settings)
     app.include_router(api_router, prefix="/api")
+    app.add_middleware(ModifyProcessEndpointResponse)
     return app
 
 
diff --git a/gso/api/v1/__init__.py b/gso/api/v1/__init__.py
index c14de2e3eec324fed3a12e7a85c3c39f236ed83d..983408986b827a35bf32cbc538d39eb7b6e208e1 100644
--- a/gso/api/v1/__init__.py
+++ b/gso/api/v1/__init__.py
@@ -3,9 +3,11 @@
 from fastapi import APIRouter
 
 from gso.api.v1.imports import router as imports_router
+from gso.api.v1.processes import router as processes_router
 from gso.api.v1.subscriptions import router as subscriptions_router
 
 router = APIRouter()
 
 router.include_router(imports_router)
 router.include_router(subscriptions_router)
+router.include_router(processes_router)
diff --git a/gso/api/v1/processes.py b/gso/api/v1/processes.py
new file mode 100644
index 0000000000000000000000000000000000000000..32eb104c5860c60e99d6ae34cdbf37edba075a34
--- /dev/null
+++ b/gso/api/v1/processes.py
@@ -0,0 +1,41 @@
+"""Process related endpoints."""
+
+from typing import Any
+from uuid import UUID
+
+from fastapi import APIRouter, Depends, HTTPException, status
+from orchestrator.db import ProcessStepTable
+from orchestrator.schemas.base import OrchestratorBaseModel
+
+from gso.auth.security import opa_security_default
+
+router = APIRouter(prefix="/processes", tags=["Processes"], dependencies=[Depends(opa_security_default)])
+
+
+class CallBackResultsBaseModel(OrchestratorBaseModel):
+    """Base model for callback results."""
+
+    callback_results: dict
+
+
+@router.get(
+    "/steps/{step_id}/callback-results", status_code=status.HTTP_200_OK, response_model=CallBackResultsBaseModel
+)
+def callback_results(step_id: UUID) -> dict[str, Any]:
+    """Retrieve callback results for a specific process step.
+
+    :param step_id: The unique identifier of the process step.
+    :type step_id: UUID
+
+    :return: Dictionary containing callback results.
+    :rtype: dict[str, Any]
+
+    :raises HTTPException: 404 status code if the specified step_id is not found or if the 'callback_result' key
+    is not present in the state.
+    """
+    step = ProcessStepTable.query.filter(ProcessStepTable.step_id == step_id).first()
+
+    if not (step and step.state.get("callback_result", None)):
+        raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Callback result not found.")
+
+    return {"callback_results": step.state["callback_result"]}
diff --git a/gso/auth/oidc_policy_helper.py b/gso/auth/oidc_policy_helper.py
index 945b7496d36f457574140dd191437af179b95f95..04e2fc8e5ec419fb5f80a6a5646bb3429512cad9 100644
--- a/gso/auth/oidc_policy_helper.py
+++ b/gso/auth/oidc_policy_helper.py
@@ -30,6 +30,16 @@ logger = get_logger(__name__)
 
 HTTPX_SSL_CONTEXT = ssl.create_default_context()  # https://github.com/encode/httpx/issues/838
 
+_CALLBACK_STEP_API_URL_PATTERN = re.compile(
+    r"^/api/processes/([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12})"
+    r"/callback/([0-9a-zA-Z\-_]+)$"
+)
+
+
+def _is_callback_step_endpoint(request: Request) -> bool:
+    """Check if the request is a callback step API call."""
+    return re.match(_CALLBACK_STEP_API_URL_PATTERN, request.url.path) is not None
+
 
 class InvalidScopeValueError(ValueError):
     """Exception raised for invalid scope values in OIDC."""
@@ -212,14 +222,18 @@ class OIDCUser(HTTPBearer):
             return None
 
         async with AsyncClient(http1=True, verify=HTTPX_SSL_CONTEXT) as async_request:
-            await self.check_openid_config(async_request)
-
             if not token:
                 credentials = await super().__call__(request)
                 if not credentials:
                     return None
                 token = credentials.credentials
+            elif _is_callback_step_endpoint(request):
+                logger.debug(
+                    "callback step endpoint is called. verification will be done by endpoint itself.", url=request.url
+                )
+                return None
 
+            await self.check_openid_config(async_request)
             intercepted_token = await self.introspect_token(async_request, token)
 
             if "active" not in intercepted_token:
@@ -397,6 +411,9 @@ def opa_decision(
         if not (oauth2lib_settings.OAUTH2_ACTIVE and oauth2lib_settings.OAUTH2_AUTHORIZATION_ACTIVE):
             return None
 
+        if _is_callback_step_endpoint(request):
+            return None
+
         try:
             json = await request.json()
         # Silencing the Decode error or Type error when request.json() does not return anything sane.
diff --git a/gso/middlewares.py b/gso/middlewares.py
new file mode 100644
index 0000000000000000000000000000000000000000..58106502b70a794cde29cfb714ce61101d056dbb
--- /dev/null
+++ b/gso/middlewares.py
@@ -0,0 +1,101 @@
+"""Custom middlewares for the GSO API."""
+
+import json
+import re
+from collections.abc import Callable
+from typing import Any
+
+from fastapi import Request
+from starlette.middleware.base import BaseHTTPMiddleware
+from starlette.responses import Response
+from starlette.status import HTTP_200_OK
+
+
+class ModifyProcessEndpointResponse(BaseHTTPMiddleware):
+    """Middleware to modify the response for Process details endpoint."""
+
+    async def dispatch(self, request: Request, call_next: Callable) -> Response:
+        """Middleware to modify the response for Process details endpoint.
+
+        :param request: The incoming HTTP request.
+        :type request: Request
+
+        :param call_next: The next middleware or endpoint in the stack.
+        :type call_next: Callable
+
+        :return: The modified HTTP response.
+        :rtype: Response
+        """
+        response = await call_next(request)
+        path_pattern = re.compile(
+            r"/api/processes/([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12})"
+        )
+
+        match = path_pattern.match(request.url.path)
+
+        if match and response.status_code == HTTP_200_OK:
+            # Modify the response body as needed
+            response_body = b""
+            async for chunk in response.body_iterator:
+                response_body += chunk
+            try:
+                json_content = json.loads(response_body)
+                await self._modify_response_body(json_content, request)
+                modified_response_body = json.dumps(json_content).encode()
+                headers = dict(response.headers)
+                headers["content-length"] = str(len(modified_response_body))
+                return Response(
+                    content=modified_response_body,
+                    status_code=response.status_code,
+                    headers=headers,
+                    media_type=response.media_type,
+                )
+
+            except json.JSONDecodeError:
+                pass
+
+        return response
+
+    @staticmethod
+    async def _get_token(request: Request) -> str:
+        """Get the token from the request headers.
+
+        :param request: The incoming HTTP request.
+        :type request: Request
+
+        :return: The token from the request headers in specific format.
+        :rtype: str
+        """
+        bearer_prefix = "Bearer "
+        authorization_header = request.headers.get("Authorization")
+        if authorization_header:
+            # Remove the "Bearer " prefix from the token
+            token = authorization_header.replace(bearer_prefix, "")
+            return f"?token={token}"
+        return ""
+
+    async def _modify_response_body(self, response_body: dict[str, Any], request: Request) -> None:
+        """Modify the response body as needed.
+
+        :param response_body: The response body in dictionary format.
+        :type response_body: dict[str, Any]
+        :param request: The incoming HTTP request.
+        :type request: Request
+
+        :return: None
+        """
+        max_output_length = 500
+        token = await self._get_token(request)
+        try:
+            for step in response_body["steps"]:
+                if step["state"].get("callback_result", None):
+                    callback_result = step["state"]["callback_result"]
+                    if callback_result and isinstance(callback_result, str):
+                        callback_result = json.loads(callback_result)
+                    if callback_result.get("output") and len(callback_result["output"]) > max_output_length:
+                        callback_result[
+                            "output"
+                        ] = f'{request.base_url}api/v1/processes/steps/{step["step_id"]}/callback-results{token}'
+                    step["state"]["callback_result"] = callback_result
+        except (AttributeError, KeyError, TypeError):
+            pass
diff --git a/gso/migrations/versions/2023-12-18_bacd55c26106_add_ibgp_mesh_workflow.py b/gso/migrations/versions/2023-12-18_bacd55c26106_add_ibgp_mesh_workflow.py
new file mode 100644
index 0000000000000000000000000000000000000000..8e52b8c0d611b31f03beef7c62c24e8810284495
--- /dev/null
+++ b/gso/migrations/versions/2023-12-18_bacd55c26106_add_ibgp_mesh_workflow.py
@@ -0,0 +1,39 @@
+"""Add iBGP mesh workflow.
+
+Revision ID: bacd55c26106
+Revises: f0764c6f392c
+Create Date: 2023-12-18 17:58:29.581963
+
+"""
+import sqlalchemy as sa
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision = 'bacd55c26106'
+down_revision = 'f0764c6f392c'
+branch_labels = None
+depends_on = None
+
+
+from orchestrator.migrations.helpers import create_workflow, delete_workflow
+
+new_workflows = [
+    {
+        "name": "update_ibgp_mesh",
+        "target": "MODIFY",
+        "description": "Update iBGP mesh",
+        "product_type": "Router"
+    }
+]
+
+
+def upgrade() -> None:
+    conn = op.get_bind()
+    for workflow in new_workflows:
+        create_workflow(conn, workflow)
+
+
+def downgrade() -> None:
+    conn = op.get_bind()
+    for workflow in new_workflows:
+        delete_workflow(conn, workflow["name"])
diff --git a/gso/oss-params-example.json b/gso/oss-params-example.json
index 4ee622611c12c48be7874eeab7c8ffadac395889..fa8616e4151a50175db13e52b48ed28513861133 100644
--- a/gso/oss-params-example.json
+++ b/gso/oss-params-example.json
@@ -45,6 +45,25 @@
       "dns_view": "default"
     }
   },
+  "MONITORING": {
+    "LIBRENMS": {
+      "base_url": "https://librenms/api/v0",
+      "token": "<token>"
+    },
+    "SNMP": {
+      "v2c": {
+        "community": "secret-community"
+      },
+      "v3": {
+        "authlevel": "AuthPriv",
+        "authname": "librenms",
+        "authpass": "<password1>",
+        "authalgo": "sha",
+        "cryptopass": "<password2>",
+        "cryptoalgo": "aes"
+      }
+    }
+  },
   "PROVISIONING_PROXY": {
     "scheme": "https",
     "api_base": "localhost:44444",
diff --git a/gso/services/crm.py b/gso/services/crm.py
index e0b8c61c39650ebc81ac438d8ea281792b74f51f..810df588e5b3c93081be1796d78bf9b21f4ae23f 100644
--- a/gso/services/crm.py
+++ b/gso/services/crm.py
@@ -6,8 +6,6 @@ For the time being, it's hardcoded to only contain GÉANT as a customer, since t
 
 from typing import Any
 
-from pydantic_forms.validators import Choice
-
 
 class CustomerNotFoundError(Exception):
     """Exception raised when a customer is not found."""
@@ -31,12 +29,3 @@ def get_customer_by_name(name: str) -> dict[str, Any]:
 
     msg = f"Customer {name} not found"
     raise CustomerNotFoundError(msg)
-
-
-def customer_selector() -> Choice:
-    """GUI input field for selecting a customer."""
-    customers = {}
-    for customer in all_customers():
-        customers[customer["id"]] = customer["name"]
-
-    return Choice("Select a customer", zip(customers.keys(), customers.items(), strict=True))  # type: ignore[arg-type]
diff --git a/gso/services/librenms_client.py b/gso/services/librenms_client.py
new file mode 100644
index 0000000000000000000000000000000000000000..e4564b832482346569e29bae3fa3d1dc6f06d686
--- /dev/null
+++ b/gso/services/librenms_client.py
@@ -0,0 +1,112 @@
+"""The LibreNMS module interacts with the inventory management system of :term:`GAP`."""
+
+import logging
+from http import HTTPStatus
+from importlib import metadata
+from typing import Any
+
+import requests
+from requests import HTTPError
+
+from gso.settings import load_oss_params
+from gso.utils.helpers import SNMPVersion
+
+logger = logging.getLogger(__name__)
+
+
+class LibreNMSClient:
+    """The client for LibreNMS that interacts with the inventory management system."""
+
+    def __init__(self) -> None:
+        """Initialise a new LibreNMS client with an authentication token."""
+        config = load_oss_params().MONITORING
+        token = config.LIBRENMS.token
+
+        self.base_url = config.LIBRENMS.base_url
+        self.snmp_config = config.SNMP
+
+        self.headers = {
+            "User-Agent": f"geant-service-orchestrator/{metadata.version('geant-service-orchestrator')}",
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+            "X-Auth-Token": token,
+        }
+
+    def get_device(self, fqdn: str) -> dict[str, Any]:
+        """Get an existing device from LibreNMS.
+
+        :param str fqdn: The :term:`FQDN` of a device that is retrieved.
+        :return dict[str, Any]: A :term:`JSON` formatted list of devices that match the queried :term:`FQDN`.
+        :raises HTTPError: Raises an HTTP error 404 when the device is not found
+        """
+        response = requests.get(f"{self.base_url}/devices/{fqdn}", headers=self.headers, timeout=(0.5, 75))
+        response.raise_for_status()
+
+        return response.json()
+
+    def device_exists(self, fqdn: str) -> bool:
+        """Check whether a device exists in LibreNMS.
+
+        :param str fqdn: The hostname that should be checked for.
+        :return bool: Whether the device exists or not.
+        """
+        try:
+            device = self.get_device(fqdn)
+        except HTTPError as e:
+            if e.response.status_code == HTTPStatus.NOT_FOUND:
+                return False
+            raise
+
+        return device["status"] == "ok"
+
+    def add_device(self, fqdn: str, snmp_version: SNMPVersion) -> dict[str, Any]:
+        """Add a new device to LibreNMS.
+
+        :param str fqdn: The hostname of the newly added device.
+        :param SNMPVersion snmp_version: The SNMP version of the new device, which decides the authentication parameters
+                                         that LibreNMS should use to poll the device.
+        """
+        device_data = {
+            "display": fqdn,
+            "hostname": fqdn,
+            "sysName": fqdn,
+            "snmpver": snmp_version.value,
+        }
+        device_data.update(getattr(self.snmp_config, snmp_version))
+
+        device = requests.post(f"{self.base_url}/devices", headers=self.headers, json=device_data, timeout=(0.5, 75))
+        device.raise_for_status()
+
+        return device.json()
+
+    def remove_device(self, fqdn: str) -> dict[str, Any]:
+        """Remove a device from LibreNMS.
+
+        :param str fqdn: The :term:`FQDN` of the hostname that should get deleted.
+        :return dict[str, Any]: A JSON representation of the device that got removed.
+        :raises HTTPError: Raises an exception if the request did not succeed.
+        """
+        device = requests.delete(f"{self.base_url}/devices/{fqdn}", headers=self.headers, timeout=(0.5, 75))
+        device.raise_for_status()
+
+        return device.json()
+
+    def validate_device(self, fqdn: str) -> list[str]:
+        """Validate a device in LibreNMS by fetching the record match the queried :term:`FQDN` against its hostname.
+
+        :param str fqdn: The :term:`FQDN` of the host that is validated.
+        :return list[str]: A list of errors, if empty the device is successfully validated.
+        """
+        errors = []
+        try:
+            device = self.get_device(fqdn)
+
+            if device["devices"][0]["hostname"] != fqdn:
+                errors += ["Device hostname in LibreNMS does not match FQDN."]
+        except HTTPError as e:
+            if e.response.status_code == HTTPStatus.NOT_FOUND:
+                errors += ["Device does not exist in LibreNMS."]
+            else:
+                raise
+
+        return errors
diff --git a/gso/services/netbox_client.py b/gso/services/netbox_client.py
index 6209fa9aec76f49de3b628510cdc79204d0526ab..74f56982ae0ca33a3a4cb400f50c3f5e61ad44f4 100644
--- a/gso/services/netbox_client.py
+++ b/gso/services/netbox_client.py
@@ -300,7 +300,7 @@ class NetboxClient:
         ]
 
         # Generate all feasible LAGs
-        all_feasible_lags = [f"LAG-{i}" for i in FEASIBLE_IP_TRUNK_LAG_RANGE]
+        all_feasible_lags = [f"lag-{i}" for i in FEASIBLE_IP_TRUNK_LAG_RANGE]
 
         # Return available LAGs not assigned to the device
         return [lag for lag in all_feasible_lags if lag not in lag_interface_names]
diff --git a/gso/services/subscriptions.py b/gso/services/subscriptions.py
index f4bc0b348424549ff1ed3f8184478315b0f9e303..9eb0583e6c74461b7155b7d94538ab1afad4a0e1 100644
--- a/gso/services/subscriptions.py
+++ b/gso/services/subscriptions.py
@@ -15,7 +15,9 @@ from orchestrator.db import (
     SubscriptionInstanceValueTable,
     SubscriptionTable,
 )
+from orchestrator.services.subscriptions import query_in_use_by_subscriptions
 from orchestrator.types import SubscriptionLifecycle
+from pydantic_forms.types import UUIDstr
 
 from gso.products import ProductType
 
@@ -85,6 +87,43 @@ def get_active_router_subscriptions(
     return get_active_subscriptions(product_type="Router", includes=includes)
 
 
+def get_active_iptrunk_subscriptions(
+    includes: list[str] | None = None,
+) -> list[SubscriptionType]:
+    """Retrieve active subscriptions specifically for IP trunks.
+
+    :param includes: The fields to be included in the returned Subscription objects.
+    :type includes: list[str]
+
+    :return: A list of Subscription objects for IP trunks.
+    :rtype: list[Subscription]
+    """
+    return get_active_subscriptions(product_type="Iptrunk", includes=includes)
+
+
+def get_active_trunks_that_terminate_on_router(subscription_id: UUIDstr) -> list[SubscriptionTable]:
+    """Get all IP trunk subscriptions that are active, and terminate on the given ``subscription_id`` of a Router.
+
+    Given a ``subscription_id`` of a Router subscription, this method gives a list of all active IP trunk subscriptions
+    that terminate on this Router.
+
+    :param subscription_id: Subscription ID of a Router
+    :type subscription_id: UUIDstr
+
+    :return: A list of IP trunk subscriptions
+    :rtype: list[SubscriptionTable]
+    """
+    return (
+        query_in_use_by_subscriptions(UUID(subscription_id))
+        .join(ProductTable)
+        .filter(
+            ProductTable.product_type == "Iptrunk",
+            SubscriptionTable.status == "active",
+        )
+        .all()
+    )
+
+
 def get_product_id_by_name(product_name: ProductType) -> UUID:
     """Retrieve the :term:`UUID` of a product by its name.
 
diff --git a/gso/settings.py b/gso/settings.py
index 78a27d756b03de700f1f7afcbcca6fe681a960f9..f05632ed3968d8f727c6d108618c0c80a5065ca8 100644
--- a/gso/settings.py
+++ b/gso/settings.py
@@ -94,6 +94,46 @@ class IPAMParams(BaseSettings):
     LT_IAS: ServiceNetworkParams
 
 
+class MonitoringSNMPV2Params(BaseSettings):
+    """Parameters related to SNMPv2."""
+
+    community: str
+
+
+class MonitoringSNMPV3Params(BaseSettings):
+    """Parameters related to SNMPv3."""
+
+    authlevel: str
+    authname: str
+    authpass: str
+    authalgo: str
+    cryptopass: str
+    cryptoalgo: str
+
+
+class MonitoringLibreNMSParams(BaseSettings):
+    """Parameters related to LibreNMS."""
+
+    base_url: str
+    token: str
+
+
+class SNMPParams(BaseSettings):
+    """Parameters for SNMP in LibreNMS."""
+
+    v2c: MonitoringSNMPV2Params
+    #: .. versionadded :: 2.0
+    #:    Support for :term:`SNMP` v3 will get added in a later version of :term:`GSO`. Parameters are optional for now.
+    v3: MonitoringSNMPV3Params | None
+
+
+class MonitoringParams(BaseSettings):
+    """Parameters related to the monitoring."""
+
+    LIBRENMS: MonitoringLibreNMSParams
+    SNMP: SNMPParams
+
+
 class ProvisioningProxyParams(BaseSettings):
     """Parameters for the provisioning proxy."""
 
@@ -118,6 +158,7 @@ class OSSParams(BaseSettings):
     GENERAL: GeneralParams
     IPAM: IPAMParams
     NETBOX: NetBoxParams
+    MONITORING: MonitoringParams
     PROVISIONING_PROXY: ProvisioningProxyParams
     CELERY: CeleryParams
 
diff --git a/gso/translations/en-GB.json b/gso/translations/en-GB.json
index ef849f987e5670543adea2eb5b41f7a0ba9d0c29..c8f8d2410f4b94719e127739b2a6e711e01e2efe 100644
--- a/gso/translations/en-GB.json
+++ b/gso/translations/en-GB.json
@@ -41,6 +41,7 @@
         "migrate_iptrunk": "Migrate IP Trunk",
         "modify_isis_metric": "Modify the ISIS metric",
         "modify_trunk_interface": "Modify IP Trunk interface",
-        "redeploy_base_config": "Redeploy base config"
+        "redeploy_base_config": "Redeploy base config",
+        "update_ibgp_mesh": "Update iBGP mesh"
     }
 }
diff --git a/gso/utils/helpers.py b/gso/utils/helpers.py
index 4bcef047fba589b42e332c080574d5ea9995e3b7..8c28dd2569032907b028f6f60278f454370ac43c 100644
--- a/gso/utils/helpers.py
+++ b/gso/utils/helpers.py
@@ -2,6 +2,7 @@
 
 import ipaddress
 import re
+from enum import StrEnum
 from ipaddress import IPv4Address
 from uuid import UUID
 
@@ -30,6 +31,13 @@ class LAGMember(BaseModel):
         return hash((self.interface_name, self.interface_description))
 
 
+class SNMPVersion(StrEnum):
+    """An enumerator for the two relevant versions of :term:`SNMP`: v2c and 3."""
+
+    V2C = "v2c"
+    V3 = "v3"
+
+
 def available_interfaces_choices(router_id: UUID, speed: str) -> Choice | None:
     """Return a list of available interfaces for a given router and speed.
 
@@ -39,7 +47,7 @@ def available_interfaces_choices(router_id: UUID, speed: str) -> Choice | None:
     if get_router_vendor(router_id) != RouterVendor.NOKIA:
         return None
     interfaces = {
-        interface["name"]: f"{interface['name']} - {interface['module']['display']} - {interface['description']}"
+        interface["name"]: f"{interface['name']} - {interface['description']}"
         for interface in NetboxClient().get_available_interfaces(router_id, speed)
     }
     return Choice("ae member", zip(interfaces.keys(), interfaces.items(), strict=True))  # type: ignore[arg-type]
diff --git a/gso/workflows/__init__.py b/gso/workflows/__init__.py
index 28ba2b51525cee346a26401e7f17c89e18229845..56f8a191946a79a786bc8764f70c03c77d9abd1b 100644
--- a/gso/workflows/__init__.py
+++ b/gso/workflows/__init__.py
@@ -11,6 +11,7 @@ LazyWorkflowInstance("gso.workflows.iptrunk.terminate_iptrunk", "terminate_iptru
 LazyWorkflowInstance("gso.workflows.router.create_router", "create_router")
 LazyWorkflowInstance("gso.workflows.router.redeploy_base_config", "redeploy_base_config")
 LazyWorkflowInstance("gso.workflows.router.terminate_router", "terminate_router")
+LazyWorkflowInstance("gso.workflows.router.update_ibgp_mesh", "update_ibgp_mesh")
 LazyWorkflowInstance("gso.workflows.site.create_site", "create_site")
 LazyWorkflowInstance("gso.workflows.site.modify_site", "modify_site")
 LazyWorkflowInstance("gso.workflows.site.terminate_site", "terminate_site")
diff --git a/gso/workflows/iptrunk/create_iptrunk.py b/gso/workflows/iptrunk/create_iptrunk.py
index 32834486e9f5b8ff9b2b1b1a86e6443a12b90b8d..acbfe3757568c2257e828e4ddbd506fcaac25bfd 100644
--- a/gso/workflows/iptrunk/create_iptrunk.py
+++ b/gso/workflows/iptrunk/create_iptrunk.py
@@ -12,6 +12,7 @@ from orchestrator.workflow import StepList, conditional, done, init, step, workf
 from orchestrator.workflows.steps import resync, set_status, store_process_subscription
 from orchestrator.workflows.utils import wrap_create_initial_input_form
 from pydantic import validator
+from pydantic_forms.core import ReadOnlyField
 from pynetbox.models.dcim import Interfaces
 
 from gso.products.product_blocks.iptrunk import (
@@ -24,7 +25,7 @@ from gso.products.product_blocks.router import RouterVendor
 from gso.products.product_types.iptrunk import IptrunkInactive, IptrunkProvisioning
 from gso.products.product_types.router import Router
 from gso.services import infoblox, subscriptions
-from gso.services.crm import customer_selector
+from gso.services.crm import get_customer_by_name
 from gso.services.netbox_client import NetboxClient
 from gso.services.provisioning_proxy import execute_playbook, pp_interaction
 from gso.utils.helpers import (
@@ -49,7 +50,7 @@ def initial_input_form_generator(product_name: str) -> FormGenerator:
             title = product_name
 
         tt_number: str
-        customer: customer_selector()  # type: ignore[valid-type]
+        customer: str = ReadOnlyField("GÉANT")
         geant_s_sid: str
         iptrunk_description: str
         iptrunk_type: IptrunkType
@@ -172,9 +173,9 @@ def initial_input_form_generator(product_name: str) -> FormGenerator:
 
 
 @step("Create subscription")
-def create_subscription(product: UUIDstr, customer: UUIDstr) -> State:
+def create_subscription(product: UUIDstr, customer: str) -> State:
     """Create a new subscription object in the database."""
-    subscription = IptrunkInactive.from_product_id(product, customer)
+    subscription = IptrunkInactive.from_product_id(product, get_customer_by_name(customer)["id"])
 
     return {
         "subscription": subscription,
@@ -215,6 +216,8 @@ def initialize_subscription(
     side_b_ae_members: list[dict],
 ) -> State:
     """Take all input from the user, and store it in the database."""
+    side_a = Router.from_subscription(side_a_node_id).router
+    side_b = Router.from_subscription(side_b_node_id).router
     subscription.iptrunk.geant_s_sid = geant_s_sid
     subscription.iptrunk.iptrunk_description = iptrunk_description
     subscription.iptrunk.iptrunk_type = iptrunk_type
@@ -222,7 +225,7 @@ def initialize_subscription(
     subscription.iptrunk.iptrunk_isis_metric = 90000
     subscription.iptrunk.iptrunk_minimum_links = iptrunk_minimum_links
 
-    subscription.iptrunk.iptrunk_sides[0].iptrunk_side_node = Router.from_subscription(side_a_node_id).router
+    subscription.iptrunk.iptrunk_sides[0].iptrunk_side_node = side_a
     subscription.iptrunk.iptrunk_sides[0].iptrunk_side_ae_iface = side_a_ae_iface
     subscription.iptrunk.iptrunk_sides[0].iptrunk_side_ae_geant_a_sid = side_a_ae_geant_a_sid
     for member in side_a_ae_members:
@@ -230,15 +233,15 @@ def initialize_subscription(
             IptrunkInterfaceBlockInactive.new(subscription_id=uuid4(), **member),
         )
 
-    subscription.iptrunk.iptrunk_sides[1].iptrunk_side_node = Router.from_subscription(side_b_node_id).router
+    subscription.iptrunk.iptrunk_sides[1].iptrunk_side_node = side_b
     subscription.iptrunk.iptrunk_sides[1].iptrunk_side_ae_iface = side_b_ae_iface
     subscription.iptrunk.iptrunk_sides[1].iptrunk_side_ae_geant_a_sid = side_b_ae_geant_a_sid
     for member in side_b_ae_members:
         subscription.iptrunk.iptrunk_sides[1].iptrunk_side_ae_members.append(
             IptrunkInterfaceBlockInactive.new(subscription_id=uuid4(), **member),
         )
-
-    subscription.description = f"IP trunk, geant_s_sid:{geant_s_sid}"
+    side_names = sorted([side_a.router_site.site_name, side_b.router_site.site_name])
+    subscription.description = f"IP trunk {side_names[0]} {side_names[1]}, geant_s_sid:{geant_s_sid}"
     subscription = IptrunkProvisioning.from_other_lifecycle(subscription, SubscriptionLifecycle.PROVISIONING)
 
     return {"subscription": subscription}
diff --git a/gso/workflows/router/create_router.py b/gso/workflows/router/create_router.py
index f07cec4e287057a495cd5038793e40c738261a45..6bec84a6a1eca393da95d68e28258e3c70d1946c 100644
--- a/gso/workflows/router/create_router.py
+++ b/gso/workflows/router/create_router.py
@@ -10,6 +10,7 @@ from orchestrator.workflow import StepList, conditional, done, init, step, workf
 from orchestrator.workflows.steps import resync, set_status, store_process_subscription
 from orchestrator.workflows.utils import wrap_create_initial_input_form
 from pydantic import validator
+from pydantic_forms.core import ReadOnlyField
 
 from gso.products.product_blocks.router import (
     PortNumber,
@@ -20,7 +21,7 @@ from gso.products.product_blocks.router import (
 from gso.products.product_types.router import RouterInactive, RouterProvisioning
 from gso.products.product_types.site import Site
 from gso.services import infoblox, subscriptions
-from gso.services.crm import customer_selector
+from gso.services.crm import get_customer_by_name
 from gso.services.netbox_client import NetboxClient
 from gso.services.provisioning_proxy import pp_interaction
 from gso.utils.helpers import iso_from_ipv4
@@ -44,7 +45,7 @@ def initial_input_form_generator(product_name: str) -> FormGenerator:
             title = product_name
 
         tt_number: str
-        customer: customer_selector()  # type: ignore[valid-type]
+        customer: str = ReadOnlyField("GÉANT")
         vendor: RouterVendor
         router_site: _site_selector()  # type: ignore[valid-type]
         hostname: str
@@ -72,9 +73,9 @@ def initial_input_form_generator(product_name: str) -> FormGenerator:
 
 
 @step("Create subscription")
-def create_subscription(product: UUIDstr, customer: UUIDstr) -> State:
+def create_subscription(product: UUIDstr, customer: str) -> State:
     """Create a new subscription object."""
-    subscription = RouterInactive.from_product_id(product, customer)
+    subscription = RouterInactive.from_product_id(product, get_customer_by_name(customer)["id"])
 
     return {
         "subscription": subscription,
diff --git a/gso/workflows/router/update_ibgp_mesh.py b/gso/workflows/router/update_ibgp_mesh.py
new file mode 100644
index 0000000000000000000000000000000000000000..32353fcf12856cf73e76b2ad16d6a8e87c72d4ad
--- /dev/null
+++ b/gso/workflows/router/update_ibgp_mesh.py
@@ -0,0 +1,228 @@
+"""Update iBGP mesh workflow. Adds a new P router to the mesh of PE routers in the network."""
+
+from typing import Any
+
+from orchestrator.forms import FormPage
+from orchestrator.targets import Target
+from orchestrator.types import FormGenerator, State, UUIDstr
+from orchestrator.workflow import StepList, done, init, step, workflow
+from orchestrator.workflows.steps import resync, store_process_subscription, unsync
+from orchestrator.workflows.utils import wrap_modify_initial_input_form
+from pydantic import root_validator
+
+from gso.products.product_blocks.router import RouterRole
+from gso.products.product_types.router import Router
+from gso.services import librenms_client, provisioning_proxy, subscriptions
+from gso.services.provisioning_proxy import pp_interaction
+from gso.services.subscriptions import get_active_trunks_that_terminate_on_router
+from gso.utils.helpers import SNMPVersion
+
+
+def initial_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
+    """Show a confirmation window before running the workflow.
+
+    Does not allow for user input, but does run a validation check. The workflow is only allowed to run, if the router
+    already is connected by at least one IP trunk.
+    """
+    subscription = Router.from_subscription(subscription_id)
+
+    class AddBGPSessionForm(FormPage):
+        class Config:
+            title = f"Add {subscription.router.router_fqdn} to the iBGP mesh?"
+
+        tt_number: str
+
+        @root_validator(allow_reuse=True)
+        def router_has_a_trunk(cls, values: dict[str, Any]) -> dict[str, Any]:
+            if len(get_active_trunks_that_terminate_on_router(subscription_id)) == 0:
+                msg = "Selected router does not terminate any active IP trunks."
+                raise ValueError(msg)
+
+            return values
+
+    user_input = yield AddBGPSessionForm
+
+    return user_input.dict()
+
+
+@step("Calculate list of all active PE routers")
+def calculate_pe_router_list() -> State:
+    """Calculate a list of all active PE routers in the network."""
+    all_routers = [
+        Router.from_subscription(r["subscription_id"]) for r in subscriptions.get_active_router_subscriptions()
+    ]
+    all_pe_routers = [router for router in all_routers if router.router.router_role == RouterRole.PE]
+
+    return {"pe_router_list": all_pe_routers}
+
+
+def _generate_pe_inventory(pe_router_list: list[Router]) -> dict[str, Any]:
+    """Generate an Ansible-compatible inventory for executing playbooks. Contains all active PE routers."""
+    return {
+        "_meta": {
+            "vars": {
+                router.router.router_fqdn: {
+                    "lo4": str(router.router.router_lo_ipv4_address),
+                    "lo6": str(router.router.router_lo_ipv6_address),
+                    "vendor": router.router.vendor,
+                }
+                for router in pe_router_list
+            }
+        },
+        "all": {"hosts": {router.router.router_fqdn: None for router in pe_router_list}},
+    }
+
+
+@step("[DRY RUN] Add P router to iBGP mesh")
+def add_p_to_mesh_dry(
+    subscription: Router, callback_route: str, pe_router_list: list[Router], tt_number: str, process_id: UUIDstr
+) -> None:
+    """Perform a dry run of adding the new P router to the PE router mesh."""
+    extra_vars = {
+        "dry_run": True,
+        "subscription": subscription,
+        "commit_comment": f"GSO_PROCESS_ID: {process_id} - TT_NUMBER: {tt_number} - Update iBGP mesh",
+        "verb": "add_p_to_pe",
+    }
+
+    provisioning_proxy.execute_playbook(
+        playbook_name="update_ibgp_mesh.yaml",
+        callback_route=callback_route,
+        inventory=_generate_pe_inventory(pe_router_list),
+        extra_vars=extra_vars,
+    )
+
+
+@step("[FOR REAL] Add P router to iBGP mesh")
+def add_p_to_mesh_real(
+    subscription: Router, callback_route: str, pe_router_list: list[Router], tt_number: str, process_id: UUIDstr
+) -> None:
+    """Add the P router to the mesh of PE routers."""
+    extra_vars = {
+        "dry_run": False,
+        "subscription": subscription,
+        "commit_comment": f"GSO_PROCESS_ID: {process_id} - TT_NUMBER: {tt_number} - Update iBGP mesh",
+        "verb": "add_p_to_pe",
+    }
+
+    provisioning_proxy.execute_playbook(
+        playbook_name="update_ibgp_mesh.yaml",
+        callback_route=callback_route,
+        inventory=_generate_pe_inventory(pe_router_list),
+        extra_vars=extra_vars,
+    )
+
+
+@step("[DRY RUN] Add all PE routers to P router iBGP table")
+def add_all_pe_to_p_dry(
+    subscription: Router, pe_router_list: list[Router], callback_route: str, tt_number: str, process_id: UUIDstr
+) -> None:
+    """Perform a dry run of adding the list of all PE routers to the new P router."""
+    extra_vars = {
+        "dry_run": True,
+        "subscription": subscription,
+        "pe_router_list": {
+            router.router.router_fqdn: {
+                "lo4": str(router.router.router_lo_ipv4_address),
+                "lo6": str(router.router.router_lo_ipv6_address),
+                "vendor": router.router.vendor,
+            }
+            for router in pe_router_list
+        },
+        "commit_comment": f"GSO_PROCESS_ID: {process_id} - TT_NUMBER: {tt_number} - Update iBGP mesh",
+        "verb": "add_pe_to_p",
+    }
+
+    provisioning_proxy.execute_playbook(
+        playbook_name="update_ibgp_mesh.yaml",
+        callback_route=callback_route,
+        inventory=subscription.router.router_fqdn,
+        extra_vars=extra_vars,
+    )
+
+
+@step("[FOR REAL] Add all PE routers to P router iBGP table")
+def add_all_pe_to_p_real(
+    subscription: Router, pe_router_list: list[Router], callback_route: str, tt_number: str, process_id: UUIDstr
+) -> None:
+    """Add the list of all PE routers to the new P router."""
+    extra_vars = {
+        "dry_run": False,
+        "subscription": subscription,
+        "pe_router_list": {
+            router.router.router_fqdn: {
+                "lo4": str(router.router.router_lo_ipv4_address),
+                "lo6": str(router.router.router_lo_ipv6_address),
+                "vendor": router.router.vendor,
+            }
+            for router in pe_router_list
+        },
+        "commit_comment": f"GSO_PROCESS_ID: {process_id} - TT_NUMBER: {tt_number} - Update iBGP mesh",
+        "verb": "add_pe_to_p",
+    }
+
+    provisioning_proxy.execute_playbook(
+        playbook_name="update_ibgp_mesh.yaml",
+        callback_route=callback_route,
+        inventory=subscription.router.router_fqdn,
+        extra_vars=extra_vars,
+    )
+
+
+@step("Verify iBGP session health")
+def check_ibgp_session(subscription: Router, callback_route: str) -> None:
+    """Run a playbook using the provisioning proxy, to check the health of the new iBGP session."""
+    provisioning_proxy.execute_playbook(
+        playbook_name="check_ibgp.yaml",
+        callback_route=callback_route,
+        inventory=subscription.router.router_fqdn,
+        extra_vars={},
+    )
+
+
+@step("Add the router to LibreNMS")
+def add_device_to_librenms(subscription: Router) -> State:
+    """Add the router as a device to LibreNMS."""
+    client = librenms_client.LibreNMSClient()
+    librenms_result = client.add_device(subscription.router.router_fqdn, SNMPVersion.V2C)
+
+    return {"librenms_device": librenms_result}
+
+
+@step("Update subscription model")
+def update_subscription_model(subscription: Router) -> State:
+    """Update the database model, such that it should not be reached via :term:`OOB` access anymore."""
+    subscription.router.router_access_via_ts = False
+
+    return {"subscription": subscription}
+
+
+@workflow(
+    "Update iBGP mesh",
+    initial_input_form=wrap_modify_initial_input_form(initial_input_form_generator),
+    target=Target.MODIFY,
+)
+def update_ibgp_mesh() -> StepList:
+    """Update the iBGP mesh with a new P router.
+
+    * Add the new P-router to all other PE-routers in the network, including a dry run.
+    * Add all PE-routers to the P-router, including a dry run.
+    * Verify that the iBGP session is up.
+    * Add the new P-router to LibreNMS.
+    * Update the subscription model.
+    """
+    return (
+        init
+        >> store_process_subscription(Target.MODIFY)
+        >> unsync
+        >> calculate_pe_router_list
+        >> pp_interaction(add_p_to_mesh_dry)
+        >> pp_interaction(add_p_to_mesh_real)
+        >> pp_interaction(add_all_pe_to_p_dry)
+        >> pp_interaction(add_all_pe_to_p_real)
+        >> pp_interaction(check_ibgp_session)
+        >> add_device_to_librenms
+        >> update_subscription_model
+        >> resync
+        >> done
+    )
diff --git a/gso/workflows/site/create_site.py b/gso/workflows/site/create_site.py
index d94c119629f95070fc68d87f8161cdb7c79f5053..8e6e13f8436375b4465959985cc115c1e74c40cc 100644
--- a/gso/workflows/site/create_site.py
+++ b/gso/workflows/site/create_site.py
@@ -6,11 +6,12 @@ from orchestrator.types import FormGenerator, State, SubscriptionLifecycle, UUID
 from orchestrator.workflow import StepList, done, init, step, workflow
 from orchestrator.workflows.steps import resync, set_status, store_process_subscription
 from orchestrator.workflows.utils import wrap_create_initial_input_form
+from pydantic_forms.core import ReadOnlyField
 
 from gso.products.product_blocks import site as site_pb
 from gso.products.product_blocks.site import LatitudeCoordinate, LongitudeCoordinate
 from gso.products.product_types import site
-from gso.services.crm import customer_selector
+from gso.services.crm import get_customer_by_name
 from gso.utils.helpers import BaseSiteValidatorModel
 
 
@@ -21,7 +22,7 @@ def initial_input_form_generator(product_name: str) -> FormGenerator:
         class Config:
             title = product_name
 
-        customer: customer_selector()  # type: ignore[valid-type]
+        customer: str = ReadOnlyField("GÉANT")
         site_name: str
         site_city: str
         site_country: str
@@ -39,9 +40,9 @@ def initial_input_form_generator(product_name: str) -> FormGenerator:
 
 
 @step("Create subscription")
-def create_subscription(product: UUIDstr, customer: UUIDstr) -> State:
+def create_subscription(product: UUIDstr, customer: str) -> State:
     """Create a new subscription object in the service database."""
-    subscription = site.SiteInactive.from_product_id(product, customer)
+    subscription = site.SiteInactive.from_product_id(product, get_customer_by_name(customer)["id"])
 
     return {
         "subscription": subscription,
diff --git a/setup.py b/setup.py
index afc43afa32e88cf09460abbe3db122473eb538ae..ecb402620217e9355290ad5f0ff3d325bffe7faa 100644
--- a/setup.py
+++ b/setup.py
@@ -2,7 +2,7 @@ from setuptools import find_packages, setup
 
 setup(
     name="geant-service-orchestrator",
-    version="0.2",
+    version="0.3",
     author="GÉANT",
     author_email="swd@geant.org",
     description="GÉANT Service Orchestrator",
diff --git a/test/imports/__init__.py b/test/api/__init__.py
similarity index 100%
rename from test/imports/__init__.py
rename to test/api/__init__.py
diff --git a/test/imports/conftest.py b/test/api/conftest.py
similarity index 100%
rename from test/imports/conftest.py
rename to test/api/conftest.py
diff --git a/test/imports/test_imports.py b/test/api/test_imports.py
similarity index 100%
rename from test/imports/test_imports.py
rename to test/api/test_imports.py
diff --git a/test/api/test_processes.py b/test/api/test_processes.py
new file mode 100644
index 0000000000000000000000000000000000000000..671218400c022a96eaa1e119be60db4fa5ec0d7b
--- /dev/null
+++ b/test/api/test_processes.py
@@ -0,0 +1,49 @@
+from uuid import uuid4
+
+import pytest
+from orchestrator.db import (
+    ProcessStepTable,
+    ProcessSubscriptionTable,
+    ProcessTable,
+    db,
+)
+from orchestrator.workflow import ProcessStatus
+
+
+@pytest.fixture()
+def create_process(faker, nokia_router_subscription_factory):
+    process_id = uuid4()
+    process = ProcessTable(process_id=process_id, workflow_name=faker.sentence(), last_status=ProcessStatus.SUSPENDED)
+    subscription = nokia_router_subscription_factory()
+    process_subscription = ProcessSubscriptionTable(process_id=process_id, subscription_id=subscription)
+
+    db.session.add(process)
+    db.session.add(process_subscription)
+    db.session.commit()
+
+    return process_id
+
+
+def test_callback_results_endpoint(test_client, create_process, faker):
+    expected_result = {"id": 1, "output": faker.sentence()}
+
+    step = ProcessStepTable(
+        process_id=create_process,
+        name="Modify",
+        status="suspend",
+        state={"subscription_id": uuid4(), "callback_result": expected_result},
+    )
+    db.session.add(step)
+    db.session.commit()
+
+    response = test_client.get(f"/api/v1/processes/steps/{step.step_id}/callback-results")
+
+    assert response.status_code == 200
+    assert response.json() == {"callback_results": expected_result}
+
+
+def test_callback_results_endpoint_with_wrong_step_id(test_client):
+    response = test_client.get(f"/api/v1/processes/steps/{uuid4()}/callback-results")
+
+    assert response.status_code == 404
+    assert response.json() == {"detail": "Callback result not found."}
diff --git a/test/subscriptions/test_subscriptions.py b/test/api/test_subscriptions.py
similarity index 100%
rename from test/subscriptions/test_subscriptions.py
rename to test/api/test_subscriptions.py
diff --git a/test/auth/test_oidc_policy_helper.py b/test/auth/test_oidc_policy_helper.py
index 500b18cb6ee8420497768ee7ebde0ed364b46cdf..14af9f6b4ee55c5025aaef64414017f85a8f7513 100644
--- a/test/auth/test_oidc_policy_helper.py
+++ b/test/auth/test_oidc_policy_helper.py
@@ -12,6 +12,7 @@ from gso.auth.oidc_policy_helper import (
     OPAResult,
     _evaluate_decision,
     _get_decision,
+    _is_callback_step_endpoint,
     opa_decision,
 )
 from gso.auth.settings import oauth2lib_settings
@@ -285,3 +286,24 @@ async def test_oidc_user_call_token_from_request(oidc_user, mock_request, mock_a
     assert isinstance(result, OIDCUserModel)
     assert result["sub"] == "123"
     assert result["name"] == "John Doe"
+
+
+@pytest.mark.parametrize(
+    ("path", "expected"),
+    [
+        (
+            "/api/processes/daa171b3-7a76-4ac5-9528-11aefa5a6222/callback/9MS2tkFLl-TvWUHD2yhftfFSnPLR-koQolXBeG8OE-o",
+            True,
+        ),
+        ("/api/some/other/path", False),
+    ],
+)
+def test_is_callback_step_endpoint(path, expected):
+    request = Request(
+        scope={
+            "type": "http",
+            "path": path,
+            "headers": [(b"host", b"example.com")],
+        }
+    )
+    assert _is_callback_step_endpoint(request) is expected
diff --git a/test/conftest.py b/test/conftest.py
index 779fc39d0f50addb3875baa4a0836adc071f8d86..edb1dffba401f2f57b5c140d171570468c7ee8e7 100644
--- a/test/conftest.py
+++ b/test/conftest.py
@@ -174,6 +174,25 @@ def configuration_data() -> dict:
                     "dns_view": "default",
                 },
             },
+            "MONITORING": {
+                "LIBRENMS": {
+                    "base_url": "http://librenms",
+                    "token": "secret-token",
+                },
+                "SNMP": {
+                    "v2c": {
+                        "community": "fake-community",
+                    },
+                    "v3": {
+                        "authlevel": "AuthPriv",
+                        "authname": "librenms",
+                        "authpass": "<password1>",
+                        "authalgo": "sha",
+                        "cryptopass": "<password2>",
+                        "cryptoalgo": "aes",
+                    },
+                },
+            },
             "PROVISIONING_PROXY": {
                 "scheme": "https",
                 "api_base": "localhost:44444",
diff --git a/test/services/conftest.py b/test/services/conftest.py
index ce6c6c45b2d21861300120d7b35885a98e6ca23d..9fc3d191369b863e151721b7eed5ced2c4ee8d7c 100644
--- a/test/services/conftest.py
+++ b/test/services/conftest.py
@@ -9,7 +9,7 @@ class MockedNetboxClient:
 
     @staticmethod
     def get_available_lags() -> list[str]:
-        return [f"LAG{lag}" for lag in range(1, 5)]
+        return [f"lag-{lag}" for lag in range(1, 5)]
 
     @staticmethod
     def get_available_interfaces():
diff --git a/test/services/test_librenms_client.py b/test/services/test_librenms_client.py
new file mode 100644
index 0000000000000000000000000000000000000000..55df5ce176329a66587b53c366988b325ecf49e0
--- /dev/null
+++ b/test/services/test_librenms_client.py
@@ -0,0 +1,458 @@
+from http import HTTPStatus
+from unittest.mock import patch
+
+import pytest
+from requests import HTTPError
+
+from gso.services.librenms_client import LibreNMSClient
+from gso.utils.helpers import SNMPVersion
+
+
+@pytest.fixture()
+def mock_get_device_success(faker):
+    with patch("gso.services.librenms_client.requests.get") as mock_get_device:
+        mock_get_device().status_code = HTTPStatus.OK
+        mock_get_device().json.return_value = {
+            "status": "ok",
+            "devices": [
+                {
+                    "device_id": 1,
+                    "inserted": faker.date("%Y-%m-%dT%H:%M:%S.%fZ"),
+                    "hostname": "localhost",
+                    "sysName": "librenms",
+                    "display": None,
+                    "ip": faker.ipv4(),
+                    "overwrite_ip": None,
+                    "community": "librenms-community",
+                    "authlevel": None,
+                    "authname": None,
+                    "authpass": None,
+                    "authalgo": None,
+                    "cryptopass": None,
+                    "cryptoalgo": None,
+                    "snmpver": "v2c",
+                    "port": faker.port_number(),
+                    "transport": "udp",
+                    "timeout": None,
+                    "retries": None,
+                    "snmp_disable": 0,
+                    "bgpLocalAs": None,
+                    "sysObjectID": ".1.3.6.1.4.1.8072.3.2.10",
+                    "sysDescr": "Linux librenms 5.15.0-79-generic #86-Ubuntu SMP Mon Jul 10 16:07:21 UTC 2023 x86_64",
+                    "sysContact": "Your Name <your@email.address>",
+                    "version": "5.15.0-79-generic",
+                    "hardware": "Generic x86 64-bit",
+                    "features": "Ubuntu 22.04",
+                    "location_id": 1,
+                    "os": "linux",
+                    "status": True,
+                    "status_reason": "",
+                    "ignore": 0,
+                    "disabled": 0,
+                    "uptime": faker.pyint(),
+                    "agent_uptime": 0,
+                    "last_polled": faker.date("%Y-%m-%dT%H:%M:%S.%fZ"),
+                    "last_poll_attempted": None,
+                    "last_polled_timetaken": faker.pyfloat(left_digits=1, positive=True),
+                    "last_discovered_timetaken": faker.pyfloat(left_digits=1, positive=True),
+                    "last_discovered": faker.date("%Y-%m-%dT%H:%M:%S.%fZ"),
+                    "last_ping": faker.date("%Y-%m-%dT%H:%M:%S.%fZ"),
+                    "last_ping_timetaken": faker.pyfloat(left_digits=1, positive=True),
+                    "purpose": None,
+                    "type": "server",
+                    "serial": None,
+                    "icon": "images/os/ubuntu.svg",
+                    "poller_group": 0,
+                    "override_sysLocation": 0,
+                    "notes": None,
+                    "port_association_mode": 1,
+                    "max_depth": 0,
+                    "disable_notify": 0,
+                    "location": "Rack, Room, Building, City, Country [Lat, Lon]",
+                    "lat": None,
+                    "lng": None,
+                },
+            ],
+            "count": 1,
+        }
+
+        yield mock_get_device
+
+
+@pytest.fixture()
+def mock_get_device_not_found():
+    with patch("gso.services.librenms_client.requests.get") as mock_get_not_found:
+        mock_get_not_found().status_code = HTTPStatus.NOT_FOUND
+        mock_get_not_found().json.return_value = {
+            "status": "error",
+            "message": "Device non-existent-url does not exist",
+        }
+        mock_get_not_found().raise_for_status.side_effect = HTTPError(
+            "404 Client Error: Not Found for url: http://librenms/devices/non-existent-url",
+            response=mock_get_not_found(),
+        )
+
+        yield mock_get_not_found
+
+
+@pytest.fixture()
+def mock_get_device_misconfigured(faker):
+    with patch("gso.services.librenms_client.requests.get") as mock_get_device:
+        mock_get_device().status_code = HTTPStatus.OK
+        mock_get_device().json.return_value = {
+            "status": "ok",
+            "devices": [
+                {
+                    "device_id": 1,
+                    "inserted": faker.date("%Y-%m-%dT%H:%M:%S.%fZ"),
+                    "hostname": "127.0.0.1",
+                    "sysName": "librenms",
+                    "display": None,
+                    "ip": faker.ipv4(),
+                    "overwrite_ip": None,
+                    "community": "librenms-community",
+                    "authlevel": None,
+                    "authname": None,
+                    "authpass": None,
+                    "authalgo": None,
+                    "cryptopass": None,
+                    "cryptoalgo": None,
+                    "snmpver": "v2c",
+                    "port": faker.port_number(),
+                    "transport": "udp",
+                    "timeout": None,
+                    "retries": None,
+                    "snmp_disable": 0,
+                    "bgpLocalAs": None,
+                    "sysObjectID": ".1.3.6.1.4.1.8072.3.2.10",
+                    "sysDescr": "Linux librenms 5.15.0-79-generic #86-Ubuntu SMP Mon Jul 10 16:07:21 UTC 2023 x86_64",
+                    "sysContact": "Your Name <your@email.address>",
+                    "version": "5.15.0-79-generic",
+                    "hardware": "Generic x86 64-bit",
+                    "features": "Ubuntu 22.04",
+                    "location_id": 1,
+                    "os": "linux",
+                    "status": True,
+                    "status_reason": "",
+                    "ignore": 0,
+                    "disabled": 0,
+                    "uptime": faker.pyint(),
+                    "agent_uptime": 0,
+                    "last_polled": faker.date("%Y-%m-%dT%H:%M:%S.%fZ"),
+                    "last_poll_attempted": None,
+                    "last_polled_timetaken": faker.pyfloat(left_digits=1, positive=True),
+                    "last_discovered_timetaken": faker.pyfloat(left_digits=1, positive=True),
+                    "last_discovered": faker.date("%Y-%m-%dT%H:%M:%S.%fZ"),
+                    "last_ping": faker.date("%Y-%m-%dT%H:%M:%S.%fZ"),
+                    "last_ping_timetaken": faker.pyfloat(left_digits=1, positive=True),
+                    "purpose": None,
+                    "type": "server",
+                    "serial": None,
+                    "icon": "images/os/ubuntu.svg",
+                    "poller_group": 0,
+                    "override_sysLocation": 0,
+                    "notes": None,
+                    "port_association_mode": 1,
+                    "max_depth": 0,
+                    "disable_notify": 0,
+                    "location": "Rack, Room, Building, City, Country [Lat, Lon]",
+                    "lat": None,
+                    "lng": None,
+                },
+            ],
+            "count": 1,
+        }
+
+        yield mock_get_device
+
+
+@pytest.fixture()
+def mock_get_device_unauthenticated():
+    with patch("gso.services.librenms_client.requests.get") as mock_get_unauthorized, patch(
+        "gso.services.librenms_client.LibreNMSClient.get_device",
+    ) as mock_get_device:
+        mock_get_unauthorized().status_code = HTTPStatus.UNAUTHORIZED
+        mock_get_unauthorized().json.return_value = {"message": "Unauthenticated."}
+        mock_get_device.side_effect = HTTPError(
+            "401 Client Error: Unauthorized for url: http://librenms/devices/naughty-url",
+            response=mock_get_unauthorized(),
+        )
+
+        yield mock_get_unauthorized
+
+
+@pytest.fixture()
+def mock_add_device_success():
+    with patch("gso.services.librenms_client.requests.post") as mock_post_device:
+        mock_post_device().status_code = HTTPStatus.OK
+        mock_post_device().json.return_value = {
+            "status": "ok",
+            "devices": [
+                {
+                    "community": "secret-community",
+                    "display": "localhost",
+                    "hostname": "localhost",
+                    "snmpver": "v2c",
+                    "port": 161,
+                    "transport": "udp",
+                    "poller_group": 0,
+                    "os": "linux",
+                    "status_reason": "",
+                    "sysName": "librenms",
+                    "port_association_mode": 1,
+                    "authlevel": None,
+                    "authname": None,
+                    "authalgo": None,
+                    "cryptopass": None,
+                    "cryptoalgo": None,
+                    "sysDescr": "Linux librenms 5.15.0-79-generic #86-Ubuntu SMP Mon Jul 10 16:07:21 UTC 2023 x86_64",
+                    "sysObjectID": ".1.3.6.1.4.1.8072.3.2.10",
+                    "device_id": 2,
+                },
+            ],
+            "message": "Device localhost has been added successfully",
+            "count": 1,
+        }
+
+        yield mock_post_device
+
+
+@pytest.fixture()
+def mock_add_device_bad_url():
+    with patch("gso.services.librenms_client.requests.post") as mock_post_device:
+        mock_post_device().status_code = HTTPStatus.INTERNAL_SERVER_ERROR
+        mock_post_device().json.return_value = {
+            "status": "error",
+            "message": "Could not ping non-existent-url (Hostname did not resolve to IP)",
+        }
+        mock_post_device().raise_for_status.side_effect = HTTPError(
+            "500 Server Error: Internal server error for url: http://librenms/devices",
+            response=mock_post_device(),
+        )
+
+        yield mock_post_device
+
+
+@pytest.fixture()
+def mock_add_device_unreachable():
+    with patch("gso.services.librenms_client.requests.post") as mock_post_device:
+        mock_post_device().status_code = HTTPStatus.INTERNAL_SERVER_ERROR
+        mock_post_device().json.return_value = {
+            "status": "error",
+            "message": "Could not connect to non-existent-url, please check the snmp details and snmp reachability",
+        }
+        mock_post_device().raise_for_status.side_effect = HTTPError(
+            "500 Server Error: Internal server error for url: http://librenms/devices",
+            response=mock_post_device(),
+        )
+
+        yield mock_post_device
+
+
+@pytest.fixture()
+def mock_remove_device_success(faker):
+    with patch("gso.services.librenms_client.requests.delete") as mock_remove_device:
+        mock_remove_device().status_code = HTTPStatus.OK
+        mock_remove_device().json.return_value = {
+            "status": "ok",
+            "devices": [
+                {
+                    "device_id": 2,
+                    "inserted": faker.date("%Y-%m-%dT%H:%M:%S.%fZ"),
+                    "hostname": "localhost",
+                    "sysName": "librenms",
+                    "display": "localhost",
+                    "ip": faker.ipv4(),
+                    "overwrite_ip": None,
+                    "community": "snmp-community",
+                    "authlevel": None,
+                    "authname": None,
+                    "authpass": None,
+                    "authalgo": None,
+                    "cryptopass": None,
+                    "cryptoalgo": None,
+                    "snmpver": "v2c",
+                    "port": 161,
+                    "transport": "udp",
+                    "timeout": None,
+                    "retries": None,
+                    "snmp_disable": 0,
+                    "bgpLocalAs": None,
+                    "sysObjectID": ".1.3.6.1.4.1.8072.3.2.10",
+                    "sysDescr": "Linux librenms 5.15.0-79-generic #86-Ubuntu SMP Mon Jul 10 16:07:21 UTC 2023 x86_64",
+                    "sysContact": "Your Name <your@email.address>",
+                    "version": "5.15.0-79-generic",
+                    "hardware": "Generic x86 64-bit",
+                    "features": "Ubuntu 22.04",
+                    "location_id": 1,
+                    "os": "linux",
+                    "status": True,
+                    "status_reason": "",
+                    "ignore": 0,
+                    "disabled": 0,
+                    "uptime": 8057430,
+                    "agent_uptime": 0,
+                    "last_polled": faker.date("%Y-%m-%dT%H:%M:%S.%fZ"),
+                    "last_poll_attempted": None,
+                    "last_polled_timetaken": faker.pyfloat(left_digits=1, positive=True),
+                    "last_discovered_timetaken": faker.pyfloat(left_digits=1, positive=True),
+                    "last_discovered": faker.date("%Y-%m-%dT%H:%M:%S.%fZ"),
+                    "last_ping": faker.date("%Y-%m-%dT%H:%M:%S.%fZ"),
+                    "last_ping_timetaken": faker.pyfloat(left_digits=1, positive=True),
+                    "purpose": None,
+                    "type": "server",
+                    "serial": None,
+                    "icon": "images/os/ubuntu.svg",
+                    "poller_group": 0,
+                    "override_sysLocation": 0,
+                    "notes": None,
+                    "port_association_mode": 1,
+                    "max_depth": 0,
+                    "disable_notify": 0,
+                    "location": "Rack, Room, Building, City, Country [Lat, Lon]",
+                    "lat": None,
+                    "lng": None,
+                },
+            ],
+            "message": "Removed device localhost\n",
+            "count": 1,
+        }
+
+        yield mock_remove_device
+
+
+@pytest.fixture()
+def mock_remove_device_non_existent(faker):
+    with patch("gso.services.librenms_client.requests.delete") as mock_remove_device:
+        mock_remove_device().status_code = HTTPStatus.NOT_FOUND
+        mock_remove_device().json.return_value = {"status": "error", "message": "Device non-existent-url not found"}
+        mock_remove_device().raise_for_status.side_effect = HTTPError(
+            "404 Client Error: Not Found for url: http://librenms/devices/non-existent-url",
+            response=mock_remove_device(),
+        )
+
+        yield mock_remove_device
+
+
+def test_get_device_success(mock_get_device_success):
+    client = LibreNMSClient()
+    device = client.get_device("localhost")
+
+    assert device["status"] == "ok"
+    assert device["devices"][0]["hostname"] == "localhost"
+
+
+def test_get_device_not_found(mock_get_device_not_found):
+    client = LibreNMSClient()
+
+    with pytest.raises(HTTPError) as e:
+        client.get_device("non-existent-url")
+
+    assert e.value.response.status_code == HTTPStatus.NOT_FOUND
+    assert e.value.response.json() == {"status": "error", "message": "Device non-existent-url does not exist"}
+    assert e.value.args[0] == "404 Client Error: Not Found for url: http://librenms/devices/non-existent-url"
+
+
+def test_device_exists_true(mock_get_device_success):
+    client = LibreNMSClient()
+
+    assert client.device_exists("localhost")
+
+
+def test_device_exists_false(mock_get_device_not_found):
+    client = LibreNMSClient()
+
+    assert not client.device_exists("non-existent-url")
+
+
+def test_device_exists_bad_request(mock_get_device_unauthenticated):
+    client = LibreNMSClient()
+
+    with pytest.raises(HTTPError) as e:
+        client.device_exists("naughty-url")
+
+    assert e.value.response.status_code == HTTPStatus.UNAUTHORIZED
+    assert e.value.response.json() == {"message": "Unauthenticated."}
+    assert e.value.args[0] == "401 Client Error: Unauthorized for url: http://librenms/devices/naughty-url"
+
+
+def test_add_device_success(mock_add_device_success):
+    fqdn = "localhost"
+    client = LibreNMSClient()
+    new_device = client.add_device(fqdn, SNMPVersion.V2C)
+
+    assert new_device["status"] == "ok"
+    assert new_device["devices"][0]["hostname"] == fqdn
+    assert new_device["devices"][0]["snmpver"] == SNMPVersion.V2C.value
+
+
+def test_add_device_bad_fqdn(mock_add_device_bad_url):
+    fqdn = "non-existent-url"
+    client = LibreNMSClient()
+
+    with pytest.raises(HTTPError) as e:
+        client.add_device(fqdn, SNMPVersion.V2C)
+
+    assert e.value.response.status_code == HTTPStatus.INTERNAL_SERVER_ERROR
+    assert e.value.response.json() == {
+        "status": "error",
+        "message": "Could not ping non-existent-url (Hostname did not resolve to IP)",
+    }
+    assert e.value.args[0] == "500 Server Error: Internal server error for url: http://librenms/devices"
+
+
+def test_add_device_no_ping(mock_add_device_unreachable):
+    fqdn = "non-existent-url"
+    client = LibreNMSClient()
+
+    with pytest.raises(HTTPError) as e:
+        client.add_device(fqdn, SNMPVersion.V2C)
+
+    assert e.value.response.status_code == HTTPStatus.INTERNAL_SERVER_ERROR
+    assert e.value.response.json() == {
+        "status": "error",
+        "message": "Could not connect to non-existent-url, please check the snmp details and snmp reachability",
+    }
+    assert e.value.args[0] == "500 Server Error: Internal server error for url: http://librenms/devices"
+
+
+def test_remove_device_success(mock_remove_device_success):
+    client = LibreNMSClient()
+    device = client.remove_device("localhost")
+
+    assert device["status"] == "ok"
+    assert device["devices"][0]["hostname"] == "localhost"
+
+
+def test_remove_non_existent_device(mock_remove_device_non_existent):
+    client = LibreNMSClient()
+
+    with pytest.raises(HTTPError) as e:
+        client.remove_device("non-existent-url")
+
+    assert e.value.response.status_code == HTTPStatus.NOT_FOUND
+    assert e.value.response.json() == {"status": "error", "message": "Device non-existent-url not found"}
+    assert e.value.args[0] == "404 Client Error: Not Found for url: http://librenms/devices/non-existent-url"
+
+
+def test_validate_device_success(mock_get_device_success):
+    client = LibreNMSClient()
+    errors = client.validate_device("localhost")
+
+    assert not errors
+
+
+def test_validate_device_non_existing(mock_get_device_not_found):
+    client = LibreNMSClient()
+    errors = client.validate_device("localhost")
+
+    assert len(errors) == 1
+    assert errors[0] == "Device does not exist in LibreNMS."
+
+
+def test_validate_device_misconfigured(mock_get_device_misconfigured):
+    client = LibreNMSClient()
+    errors = client.validate_device("localhost")
+
+    assert len(errors) == 1
+    assert errors[0] == "Device hostname in LibreNMS does not match FQDN."
diff --git a/test/services/test_netbox.py b/test/services/test_netbox_client.py
similarity index 98%
rename from test/services/test_netbox.py
rename to test/services/test_netbox_client.py
index c4dc6bdd768c36a7f0624022454b1fb97e677468..d03bf828cc5af5add0b53171ef403d8f07ab1609 100644
--- a/test/services/test_netbox.py
+++ b/test/services/test_netbox_client.py
@@ -107,12 +107,12 @@ def test_create_device(
 @patch("gso.services.netbox_client.pynetbox.api")
 def test_get_available_lags(mock_api, mock_from_subscription, data_config_filename: PathLike):
     router_id = uuid.uuid4()
-    feasible_lags = [f"LAG-{i}" for i in range(1, 11)]
+    feasible_lags = [f"lag-{i}" for i in range(1, 11)]
 
     # Mock the pynetbox API instance
     mock_netbox = mock_api.return_value
     mock_filter = mock_netbox.dcim.interfaces.filter
-    mock_filter.return_value = [{"name": f"LAG-{i}", "type": "lag"} for i in range(1, 4)]
+    mock_filter.return_value = [{"name": f"lag-{i}", "type": "lag"} for i in range(1, 4)]
 
     # Mock the Router.from_subscription method
     mock_subscription = mock_from_subscription.return_value
@@ -123,7 +123,7 @@ def test_get_available_lags(mock_api, mock_from_subscription, data_config_filena
     result = netbox_client.get_available_lags(router_id)
 
     # Check the result of the function
-    assert result == [lag for lag in feasible_lags if lag not in [f"LAG-{i}" for i in range(1, 4)]]
+    assert result == [lag for lag in feasible_lags if lag not in [f"lag-{i}" for i in range(1, 4)]]
 
 
 @patch("gso.services.netbox_client.pynetbox.api")
diff --git a/test/subscriptions/__init__.py b/test/subscriptions/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/test/subscriptions/conftest.py b/test/subscriptions/conftest.py
deleted file mode 100644
index 428b0a147c77514e35b1c674499c0eacced44ab2..0000000000000000000000000000000000000000
--- a/test/subscriptions/conftest.py
+++ /dev/null
@@ -1 +0,0 @@
-from test.fixtures import nokia_router_subscription_factory, site_subscription_factory  # noqa: F401
diff --git a/test/workflows/iptrunk/test_create_iptrunk.py b/test/workflows/iptrunk/test_create_iptrunk.py
index 773dabb38be8eb6861d47524e3ac316c60cabdb8..59ecc85e9e5fc3901ebf519246c9ca6fdb0a95ec 100644
--- a/test/workflows/iptrunk/test_create_iptrunk.py
+++ b/test/workflows/iptrunk/test_create_iptrunk.py
@@ -6,7 +6,6 @@ import pytest
 from gso.products import Iptrunk, ProductType
 from gso.products.product_blocks.iptrunk import IptrunkType, PhyPortCapacity
 from gso.products.product_blocks.router import RouterVendor
-from gso.services.crm import customer_selector, get_customer_by_name
 from gso.services.subscriptions import get_product_id_by_name
 from gso.utils.helpers import LAGMember
 from test.services.conftest import MockedNetboxClient
@@ -60,7 +59,6 @@ def input_form_wizard_data(request, juniper_router_subscription_factory, nokia_r
 
     create_ip_trunk_step = {
         "tt_number": faker.tt_number(),
-        "customer": getattr(customer_selector(), get_customer_by_name("GÉANT")["id"]),
         "geant_s_sid": faker.geant_sid(),
         "iptrunk_type": IptrunkType.DARK_FIBER,
         "iptrunk_description": faker.sentence(),
@@ -69,7 +67,7 @@ def input_form_wizard_data(request, juniper_router_subscription_factory, nokia_r
     }
     create_ip_trunk_side_a_router_name = {"side_a_node_id": router_side_a}
     create_ip_trunk_side_a_step = {
-        "side_a_ae_iface": "LAG1",
+        "side_a_ae_iface": "lag-1",
         "side_a_ae_geant_a_sid": faker.geant_sid(),
         "side_a_ae_members": [
             LAGMember(
@@ -81,7 +79,7 @@ def input_form_wizard_data(request, juniper_router_subscription_factory, nokia_r
     }
     create_ip_trunk_side_b_router_name = {"side_b_node_id": router_side_b}
     create_ip_trunk_side_b_step = {
-        "side_b_ae_iface": "LAG4",
+        "side_b_ae_iface": "lag-4",
         "side_b_ae_geant_a_sid": faker.geant_sid(),
         "side_b_ae_members": side_b_members,
     }
@@ -125,8 +123,16 @@ def test_successful_iptrunk_creation_with_standard_lso_result(
     subscription_id = state["subscription_id"]
     subscription = Iptrunk.from_subscription(subscription_id)
 
+    sorted_sides = sorted(
+        [
+            subscription.iptrunk.iptrunk_sides[0].iptrunk_side_node.router_site.site_name,
+            subscription.iptrunk.iptrunk_sides[1].iptrunk_side_node.router_site.site_name,
+        ]
+    )
     assert subscription.status == "active"
-    assert subscription.description == f"IP trunk, geant_s_sid:{input_form_wizard_data[0]['geant_s_sid']}"
+    assert subscription.description == (
+        f"IP trunk {sorted_sides[0]} {sorted_sides[1]}, geant_s_sid:{input_form_wizard_data[0]['geant_s_sid']}"
+    )
 
     assert mock_execute_playbook.call_count == 6
 
diff --git a/test/workflows/iptrunk/test_migrate_iptrunk.py b/test/workflows/iptrunk/test_migrate_iptrunk.py
index e7852bc13a2d0cfd61f9b1942690e94e0ffb01b2..8dc3acf02b5cf854ab8de257bed30647acdf8311 100644
--- a/test/workflows/iptrunk/test_migrate_iptrunk.py
+++ b/test/workflows/iptrunk/test_migrate_iptrunk.py
@@ -49,7 +49,7 @@ def migrate_form_input(
         new_router = nokia_router_subscription_factory()
         replace_side = str(old_subscription.iptrunk.iptrunk_sides[0].iptrunk_side_node.subscription.subscription_id)
         new_side_ae_members = faker.link_members_nokia()[0:2]
-        lag_name = "LAG1"
+        lag_name = "lag-1"
     elif use_juniper == UseJuniperSide.SIDE_BOTH:
         # Juniper -> Juniper
         old_side_a_node = juniper_router_subscription_factory()
@@ -69,7 +69,7 @@ def migrate_form_input(
         new_router = nokia_router_subscription_factory()
         replace_side = str(old_subscription.iptrunk.iptrunk_sides[0].iptrunk_side_node.subscription.subscription_id)
         new_side_ae_members = faker.link_members_nokia()[0:2]
-        lag_name = "LAG1"
+        lag_name = "lag-1"
 
     return [
         {"subscription_id": product_id},
diff --git a/test/workflows/router/test_create_router.py b/test/workflows/router/test_create_router.py
index 6c29760615bc2bd9b2a7500ca5de8431c906e443..957c3bda41026db88a7d7fc70ce2efa556770774 100644
--- a/test/workflows/router/test_create_router.py
+++ b/test/workflows/router/test_create_router.py
@@ -6,7 +6,6 @@ from infoblox_client import objects
 from gso.products import ProductType, Site
 from gso.products.product_blocks.router import RouterRole, RouterVendor
 from gso.products.product_types.router import Router
-from gso.services.crm import customer_selector, get_customer_by_name
 from gso.services.subscriptions import get_product_id_by_name
 from test.workflows import (
     assert_complete,
@@ -23,7 +22,6 @@ def router_creation_input_form_data(site_subscription_factory, faker):
 
     return {
         "tt_number": faker.tt_number(),
-        "customer": getattr(customer_selector(), get_customer_by_name("GÉANT")["id"]),
         "router_site": router_site,
         "hostname": faker.pystr(),
         "ts_port": faker.pyint(),
diff --git a/test/workflows/router/test_update_ibgp_mesh.py b/test/workflows/router/test_update_ibgp_mesh.py
new file mode 100644
index 0000000000000000000000000000000000000000..14543066130d06d5d6f5d148b2d126a44d267adc
--- /dev/null
+++ b/test/workflows/router/test_update_ibgp_mesh.py
@@ -0,0 +1,50 @@
+from unittest.mock import patch
+
+import pytest
+from orchestrator.workflow import StepStatus
+from pydantic_forms.exceptions import FormValidationError
+
+from gso.products import Iptrunk
+from gso.products.product_blocks.router import RouterRole
+from test.workflows import assert_pp_interaction_success, extract_state, run_workflow
+
+
+@pytest.fixture()
+def ibgp_mesh_input_form_data(iptrunk_subscription_factory, faker):
+    ip_trunk = Iptrunk.from_subscription(iptrunk_subscription_factory())
+
+    return {"subscription_id": ip_trunk.iptrunk.iptrunk_sides[0].iptrunk_side_node.owner_subscription_id}
+
+
+@pytest.mark.workflow()
+@patch("gso.workflows.router.update_ibgp_mesh.provisioning_proxy.execute_playbook")
+@patch("gso.workflows.router.update_ibgp_mesh.librenms_client.LibreNMSClient.add_device")
+def test_update_ibgp_mesh_success(
+    mock_librenms_add_device,
+    mock_execute_playbook,
+    ibgp_mesh_input_form_data,
+    data_config_filename,
+    faker,
+):
+    result, process_stat, step_log = run_workflow(
+        "update_ibgp_mesh", [ibgp_mesh_input_form_data, {"tt_number": faker.tt_number()}]
+    )
+
+    for _ in range(5):
+        result, step_log = assert_pp_interaction_success(result, process_stat, step_log)
+
+    state = extract_state(result)
+
+    assert mock_execute_playbook.call_count == 5
+    assert mock_librenms_add_device.call_count == 1
+    assert result.status == StepStatus.COMPLETE
+    assert state["subscription"]["router"]["router_access_via_ts"] is False
+
+
+@pytest.mark.workflow()
+def test_update_ibgp_mesh_isolated_router(nokia_router_subscription_factory, data_config_filename):
+    router_id = nokia_router_subscription_factory(router_role=RouterRole.P)
+
+    exception_message = "Selected router does not terminate any active IP trunks."
+    with pytest.raises(FormValidationError, match=exception_message):
+        run_workflow("update_ibgp_mesh", [{"subscription_id": router_id}, {}])
diff --git a/test/workflows/site/test_create_site.py b/test/workflows/site/test_create_site.py
index a1122f59ef08297bc1ffbeaf7baa244cad620a3a..6a260bc111f055b5724af699da61d4d6a145886f 100644
--- a/test/workflows/site/test_create_site.py
+++ b/test/workflows/site/test_create_site.py
@@ -25,7 +25,6 @@ def test_create_site(responses, faker):
             "site_internal_id": faker.pyint(),
             "site_tier": SiteTier.TIER1,
             "site_ts_address": faker.ipv4(),
-            "customer": get_customer_by_name("GÉANT")["id"],
         },
     ]
     result, _, _ = run_workflow("create_site", initial_site_data)