diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 6cc76019e6555e40b0c8a4aca3db42defbad859c..5e54ad156d4279d0bba424c9ee36aeff2548dc19 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -17,7 +17,6 @@ run-tox-pipeline:
   services:
     - postgres:15.4
 
-
   # Change pip's cache directory to be inside the project directory since we can
   # only cache local items.
   variables:
@@ -27,14 +26,11 @@ run-tox-pipeline:
     DATABASE_URI_TEST: 'postgresql://nwa:nwa@postgres:5432/gso-test-db'
     PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
 
-  # Pip's cache doesn't store the python packages
-  # https://pip.pypa.io/en/stable/topics/caching/
-  #
-  # If you want to also cache the installed packages, you have to install
-  # them in a virtualenv and cache it as well.
   cache:
+    key: tox-virtualenv
     paths:
       - .cache/pip
+      - venv/
 
   before_script:
     - pip install virtualenv
diff --git a/Changelog.md b/Changelog.md
index e0fb0c1148fefe55ce431d5331ff14627e1a73b7..94ab4fd8d9c7cbd456b9a990830cb0d40bafc706 100644
--- a/Changelog.md
+++ b/Changelog.md
@@ -1,5 +1,12 @@
 # Changelog
 
+## [2.12] - 2024-08-22
+- Add promote P to PE workflow.
+- Add new cleanup task
+- Rework the Kentik client
+- Update documentation of services and schedules modules
+- Enhanced Infoblox: Now pings all hosts, searches for all IPs, and checks for overlapping IP trunk subscriptions in allocated networks when creating a trunk.
+
 ## [2.11] - 2024-08-19
 - (fix) Make LibreNMS retry when a request times out
 - (fix) Adjust the mechanics of the minimum amount of links when creating an IP trunk
diff --git a/docs/source/module/schedules/clean_old_tasks.rst b/docs/source/module/schedules/clean_old_tasks.rst
new file mode 100644
index 0000000000000000000000000000000000000000..5d279b5ce7b8ef3955b72752a6e5d2280d89563b
--- /dev/null
+++ b/docs/source/module/schedules/clean_old_tasks.rst
@@ -0,0 +1,6 @@
+``gso.schedules.clean_old_tasks``
+=================================
+
+.. automodule:: gso.schedules.clean_old_tasks
+   :members:
+   :show-inheritance:
diff --git a/docs/source/module/schedules/index.rst b/docs/source/module/schedules/index.rst
index 7e14245d8dde018c0bdff7f1f51a51f19c79666c..947b330a8198a5a4bcbe059bf1b79a55ba2808b1 100644
--- a/docs/source/module/schedules/index.rst
+++ b/docs/source/module/schedules/index.rst
@@ -14,7 +14,9 @@ Submodules
 .. toctree::
    :maxdepth: 1
 
+   clean_old_tasks
    scheduling
+   send_email_notifications
    task_vacuum
    validate_products
    validate_subscriptions
diff --git a/docs/source/module/schedules/send_email_notifications.rst b/docs/source/module/schedules/send_email_notifications.rst
new file mode 100644
index 0000000000000000000000000000000000000000..ddc597afb7d4ad4c93979138fad3ef1a304909c5
--- /dev/null
+++ b/docs/source/module/schedules/send_email_notifications.rst
@@ -0,0 +1,6 @@
+``gso.schedules.send_email_notifications``
+==========================================
+
+.. automodule:: gso.schedules.send_email_notifications
+   :members:
+   :show-inheritance:
diff --git a/docs/source/module/services/index.rst b/docs/source/module/services/index.rst
index d93ddc5a42b90420c033482720600ba21530568c..e7b01712599bd9602b934499e00a810319b1f25f 100644
--- a/docs/source/module/services/index.rst
+++ b/docs/source/module/services/index.rst
@@ -13,9 +13,12 @@ Submodules
    :titlesonly:
 
    infoblox
+   kentik_client
    librenms_client
    lso_client
    mailer
    netbox_client
    partners
+   processes
+   sharepoint
    subscriptions
diff --git a/docs/source/module/services/kentik_client.rst b/docs/source/module/services/kentik_client.rst
new file mode 100644
index 0000000000000000000000000000000000000000..b8090e23ea353f160882dd14a57a24bac82915cd
--- /dev/null
+++ b/docs/source/module/services/kentik_client.rst
@@ -0,0 +1,6 @@
+``gso.services.kentik_client``
+==============================
+
+.. automodule:: gso.services.kentik_client
+   :members:
+   :show-inheritance:
diff --git a/docs/source/module/services/processes.rst b/docs/source/module/services/processes.rst
new file mode 100644
index 0000000000000000000000000000000000000000..7d7ffe61220edfd365b1d49f57ae27f13673e3df
--- /dev/null
+++ b/docs/source/module/services/processes.rst
@@ -0,0 +1,6 @@
+``gso.services.processes``
+==========================
+
+.. automodule:: gso.services.processes
+   :members:
+   :show-inheritance:
diff --git a/docs/source/module/services/sharepoint.rst b/docs/source/module/services/sharepoint.rst
new file mode 100644
index 0000000000000000000000000000000000000000..13d5d5b6351261bcb91eefc82c49b00175eb740a
--- /dev/null
+++ b/docs/source/module/services/sharepoint.rst
@@ -0,0 +1,6 @@
+``gso.services.sharepoint``
+===========================
+
+.. automodule:: gso.services.sharepoint
+   :members:
+   :show-inheritance:
diff --git a/docs/source/module/utils/types.rst b/docs/source/module/utils/types.rst
new file mode 100644
index 0000000000000000000000000000000000000000..c70c8dd0c61a4fa29cc2f123ec4d0643ab7bdf5d
--- /dev/null
+++ b/docs/source/module/utils/types.rst
@@ -0,0 +1,6 @@
+``gso.utils.types``
+===================
+
+.. automodule:: gso.utils.types
+   :members:
+   :show-inheritance:
diff --git a/docs/source/module/workflows/router/index.rst b/docs/source/module/workflows/router/index.rst
index 3e086871b91f672162ac31337e17844002b249df..f10b52e24360c774aca837a6fba99429a4445629 100644
--- a/docs/source/module/workflows/router/index.rst
+++ b/docs/source/module/workflows/router/index.rst
@@ -17,6 +17,8 @@ Submodules
    create_router
    import_router
    modify_connection_strategy
+   modify_kentik_license
+   promote_p_to_pe
    redeploy_base_config
    terminate_router
    update_ibgp_mesh
diff --git a/docs/source/module/workflows/router/modify_kentik_license.rst b/docs/source/module/workflows/router/modify_kentik_license.rst
new file mode 100644
index 0000000000000000000000000000000000000000..72bc88ba59262f4595879a2b34a3625a4905c85f
--- /dev/null
+++ b/docs/source/module/workflows/router/modify_kentik_license.rst
@@ -0,0 +1,6 @@
+``gso.workflows.router.modify_kentik_license``
+==============================================
+
+.. automodule:: gso.workflows.router.modify_kentik_license
+   :members:
+   :show-inheritance:
diff --git a/docs/source/module/workflows/router/promote_p_to_pe.rst b/docs/source/module/workflows/router/promote_p_to_pe.rst
new file mode 100644
index 0000000000000000000000000000000000000000..bb100c11b769b8d0e20d37c4dd62d20defe29a04
--- /dev/null
+++ b/docs/source/module/workflows/router/promote_p_to_pe.rst
@@ -0,0 +1,6 @@
+``gso.workflows.router.promote_p_to_pe``
+========================================
+
+.. automodule:: gso.workflows.router.promote_p_to_pe
+   :members:
+   :show-inheritance:
diff --git a/docs/source/module/workflows/tasks/clean_old_tasks.rst b/docs/source/module/workflows/tasks/clean_old_tasks.rst
new file mode 100644
index 0000000000000000000000000000000000000000..1a2c7bce0bea4195fc00c5fd12526b1640cca2a2
--- /dev/null
+++ b/docs/source/module/workflows/tasks/clean_old_tasks.rst
@@ -0,0 +1,6 @@
+``gso.workflows.tasks.clean_old_tasks``
+=======================================
+
+.. automodule:: gso.workflows.tasks.clean_old_tasks
+   :members:
+   :show-inheritance:
diff --git a/docs/source/module/workflows/tasks/create_partners.rst b/docs/source/module/workflows/tasks/create_partners.rst
new file mode 100644
index 0000000000000000000000000000000000000000..3755d6bca54d485a9c7130d11db965ef67ac28f0
--- /dev/null
+++ b/docs/source/module/workflows/tasks/create_partners.rst
@@ -0,0 +1,6 @@
+``gso.workflows.tasks.create_partners``
+=======================================
+
+.. automodule:: gso.workflows.tasks.create_partners
+   :members:
+   :show-inheritance:
diff --git a/docs/source/module/workflows/tasks/delete_partners.rst b/docs/source/module/workflows/tasks/delete_partners.rst
new file mode 100644
index 0000000000000000000000000000000000000000..0a731a1d4b45a6d6bab1522c266ef3ee6b5ec854
--- /dev/null
+++ b/docs/source/module/workflows/tasks/delete_partners.rst
@@ -0,0 +1,6 @@
+``gso.workflows.tasks.delete_partners``
+=======================================
+
+.. automodule:: gso.workflows.tasks.delete_partners
+   :members:
+   :show-inheritance:
diff --git a/docs/source/module/workflows/tasks/index.rst b/docs/source/module/workflows/tasks/index.rst
index 640f1184ec743d2bab5f0b183e85103ca8a6b8e1..b4a6c9ddf5a7d9e347e38f9847a0c6a3f79dffc1 100644
--- a/docs/source/module/workflows/tasks/index.rst
+++ b/docs/source/module/workflows/tasks/index.rst
@@ -12,4 +12,9 @@ Submodules
    :maxdepth: 2
    :titlesonly:
 
+   clean_old_tasks
+   create_partners
+   delete_partners
+   modify_partners
+   send_email_notifications
    validate_geant_products
diff --git a/docs/source/module/workflows/tasks/modify_partners.rst b/docs/source/module/workflows/tasks/modify_partners.rst
new file mode 100644
index 0000000000000000000000000000000000000000..b360165a36b16a7e8d8372d8e94532569532b2fa
--- /dev/null
+++ b/docs/source/module/workflows/tasks/modify_partners.rst
@@ -0,0 +1,6 @@
+``gso.workflows.tasks.modify_partners``
+=======================================
+
+.. automodule:: gso.workflows.tasks.modify_partners
+   :members:
+   :show-inheritance:
diff --git a/docs/source/module/workflows/tasks/send_email_notifications.rst b/docs/source/module/workflows/tasks/send_email_notifications.rst
new file mode 100644
index 0000000000000000000000000000000000000000..7be5d9492efc2e3067265a359b96ffb8629fe68a
--- /dev/null
+++ b/docs/source/module/workflows/tasks/send_email_notifications.rst
@@ -0,0 +1,6 @@
+``gso.workflows.tasks.send_email_notifications``
+================================================
+
+.. automodule:: gso.workflows.tasks.send_email_notifications
+   :members:
+   :show-inheritance:
diff --git a/gso/migrations/versions/2024-08-02_87a05eddee3e_add_router_modification_workflow_for_.py b/gso/migrations/versions/2024-08-02_87a05eddee3e_add_router_modification_workflow_for_.py
new file mode 100644
index 0000000000000000000000000000000000000000..84d335c7fb2b8d643132a6e14afeb98634ae46e3
--- /dev/null
+++ b/gso/migrations/versions/2024-08-02_87a05eddee3e_add_router_modification_workflow_for_.py
@@ -0,0 +1,39 @@
+"""Add router modification workflow for kentik licenses.
+
+Revision ID: 87a05eddee3e
+Revises: 844aa61c09ce
+Create Date: 2024-08-02 15:09:42.597063
+
+"""
+import sqlalchemy as sa
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision = '87a05eddee3e'
+down_revision = '844aa61c09ce'
+branch_labels = None
+depends_on = None
+
+
+from orchestrator.migrations.helpers import create_workflow, delete_workflow
+
+new_workflows = [
+    {
+        "name": "modify_router_kentik_license",
+        "target": "MODIFY",
+        "description": "Modify Kentik license",
+        "product_type": "Router"
+    }
+]
+
+
+def upgrade() -> None:
+    conn = op.get_bind()
+    for workflow in new_workflows:
+        create_workflow(conn, workflow)
+
+
+def downgrade() -> None:
+    conn = op.get_bind()
+    for workflow in new_workflows:
+        delete_workflow(conn, workflow["name"])
diff --git a/gso/migrations/versions/2024-08-07_88dd5a44150d_add_promote_p_to_pe_workflows.py b/gso/migrations/versions/2024-08-07_88dd5a44150d_add_promote_p_to_pe_workflows.py
new file mode 100644
index 0000000000000000000000000000000000000000..3ad1235c2c676c1c7164647d5fb8b483247ea919
--- /dev/null
+++ b/gso/migrations/versions/2024-08-07_88dd5a44150d_add_promote_p_to_pe_workflows.py
@@ -0,0 +1,39 @@
+"""Add promote P to PE  workflows..
+
+Revision ID: 88dd5a44150d
+Revises: 41fd1ae225aq
+Create Date: 2024-08-07 13:54:44.362435
+
+"""
+import sqlalchemy as sa
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision = '88dd5a44150d'
+down_revision = '41fd1ae225aq'
+branch_labels = None
+depends_on = None
+
+
+from orchestrator.migrations.helpers import create_workflow, delete_workflow
+
+new_workflows = [
+    {
+        "name": "promote_p_to_pe",
+        "target": "MODIFY",
+        "description": "Promote P router to PE router",
+        "product_type": "Router"
+    }
+]
+
+
+def upgrade() -> None:
+    conn = op.get_bind()
+    for workflow in new_workflows:
+        create_workflow(conn, workflow)
+
+
+def downgrade() -> None:
+    conn = op.get_bind()
+    for workflow in new_workflows:
+        delete_workflow(conn, workflow["name"])
diff --git a/gso/migrations/versions/2024-08-13_844aa61c09ce_add_new_cleanup_task.py b/gso/migrations/versions/2024-08-13_844aa61c09ce_add_new_cleanup_task.py
new file mode 100644
index 0000000000000000000000000000000000000000..037c092e05a740b230b11f490aa0730829c0499b
--- /dev/null
+++ b/gso/migrations/versions/2024-08-13_844aa61c09ce_add_new_cleanup_task.py
@@ -0,0 +1,44 @@
+"""Add new cleanup task.
+
+Revision ID: 844aa61c09ce
+Revises: 88dd5a44150d
+Create Date: 2024-08-13 12:23:11.043293
+
+"""
+
+from uuid import uuid4
+
+import sqlalchemy as sa
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision = '844aa61c09ce'
+down_revision = '88dd5a44150d'
+branch_labels = None
+depends_on = None
+
+workflows = [
+    {
+        "name": "task_clean_old_tasks",
+        "target": "SYSTEM",
+        "description": "Remove old cleanup tasks",
+        "workflow_id": uuid4(),
+    }
+]
+
+
+def upgrade() -> None:
+    conn = op.get_bind()
+    for workflow in workflows:
+        conn.execute(
+            sa.text(
+                "INSERT INTO workflows VALUES (:workflow_id, :name, :target, :description, now())"
+            ),
+            workflow,
+        )
+
+
+def downgrade() -> None:
+    conn = op.get_bind()
+    for workflow in workflows:
+        conn.execute(sa.text("DELETE FROM workflows WHERE name = :name"), {"name": workflow["name"]})
diff --git a/gso/oss-params-example.json b/gso/oss-params-example.json
index 069672ffda3c401e850b7d146d37259898ebbf46..a1bbe5d0054d9dbfbaeb9290fd68bc69c2cfa90f 100644
--- a/gso/oss-params-example.json
+++ b/gso/oss-params-example.json
@@ -113,6 +113,7 @@
     "device_type": "router",
     "minimize_snmp": false,
     "placeholder_license_key": "placeholder license",
+    "archive_license_key": "archiving license",
     "sample_rate": 100,
     "bgp_type": "device",
     "bgp_lookup_strategy": "lu_global_fallback",
diff --git a/gso/schedules/clean_old_tasks.py b/gso/schedules/clean_old_tasks.py
new file mode 100644
index 0000000000000000000000000000000000000000..c05543d355ac359f579f4517956bf85283da3113
--- /dev/null
+++ b/gso/schedules/clean_old_tasks.py
@@ -0,0 +1,13 @@
+"""Metatask that cleans up old cleanup tasks."""
+
+from orchestrator.services.processes import start_process
+
+from gso.schedules.scheduling import CronScheduleConfig, scheduler
+from gso.worker import celery
+
+
+@celery.task
+@scheduler(CronScheduleConfig(name="Clean up tasks", hour="23"))
+def clean_old_tasks() -> None:
+    """Run all cleanup tasks every 11 PM UTC."""
+    start_process("task_clean_old_tasks")
diff --git a/gso/schedules/task_vacuum.py b/gso/schedules/task_vacuum.py
index be04380f8caaf53152f77ca129ba74b9d63bd164..de4d44842f485cc8df41f9c01eed3ead2d651a30 100644
--- a/gso/schedules/task_vacuum.py
+++ b/gso/schedules/task_vacuum.py
@@ -7,7 +7,7 @@ from gso.worker import celery
 
 
 @celery.task
-@scheduler(CronScheduleConfig(name="Clean up tasks", hour="*/6"))
+@scheduler(CronScheduleConfig(name="Clean up tasks", hour="1"))
 def vacuum_tasks() -> None:
-    """Run all cleanup tasks every 6 hours."""
+    """Run all cleanup tasks every 1 AM UTC."""
     start_process("task_clean_up_tasks")
diff --git a/gso/schedules/validate_products.py b/gso/schedules/validate_products.py
index eeb689a247e337272a5f96eb5992a87e26227dc7..9d8e6f18a2ab6dcbf684f16f0a100b530df02b6c 100644
--- a/gso/schedules/validate_products.py
+++ b/gso/schedules/validate_products.py
@@ -3,7 +3,7 @@
 from orchestrator.services.processes import start_process
 
 from gso.schedules.scheduling import CronScheduleConfig, scheduler
-from gso.services.subscriptions import count_incomplete_validate_products
+from gso.services.processes import count_incomplete_validate_products
 from gso.worker import celery
 
 
diff --git a/gso/services/infoblox.py b/gso/services/infoblox.py
index d340ce979f14a8c4d767f0f0e9ccf10331174387..22e56ab5a7215ed8026d1a3a80e7d72ee1ba916e 100644
--- a/gso/services/infoblox.py
+++ b/gso/services/infoblox.py
@@ -268,11 +268,11 @@ def create_host_by_ip(
     new_host.update()
 
 
-def find_host_by_ip(ip_addr: IPv4AddressType | ipaddress.IPv6Address) -> objects.HostRecord | None:
+def find_host_by_ip(ip_addr: ipaddress.IPv4Address | ipaddress.IPv6Address) -> objects.HostRecord | None:
     """Find a host record in Infoblox by its associated IP address.
 
     :param ip_addr: The IP address of a host that is searched for.
-    :type ip_addr: IPv4AddressType | ipaddress.IPv6Address
+    :type ip_addr: ipaddress.IPv4Address | ipaddress.IPv6Address
     """
     conn, _ = _setup_connection()
     if ip_addr.version == 4:  # noqa: PLR2004, the 4 in IPv4 is well-known and not a "magic value."
diff --git a/gso/services/kentik_client.py b/gso/services/kentik_client.py
index 7f62d169186b28848ea696b791834bd0192fddee..95d55e22d53f056d08bd9376956808c9a11785c0 100644
--- a/gso/services/kentik_client.py
+++ b/gso/services/kentik_client.py
@@ -4,10 +4,10 @@ import logging
 from typing import Any, Literal
 
 import requests
+from orchestrator.utils.errors import ProcessFailureError
 from pydantic import BaseModel
 from requests import Response
 
-from gso.products.product_blocks.site import SiteTier
 from gso.settings import load_oss_params
 
 logger = logging.getLogger(__name__)
@@ -20,7 +20,6 @@ class NewKentikDevice(BaseModel):
     device_description: str
     sending_ips: list[str]
     site_id: int
-    site_tier: SiteTier
     device_snmp_ip: str
     device_bgp_flowspec: bool
     device_bgp_neighbor_ip: str
@@ -41,22 +40,37 @@ class KentikClient:
         })
 
     def _send_request(
-        self, method: Literal["GET", "POST", "PUT", "DELETE"], endpoint: str, data: dict[str, Any] | None = None
-    ) -> Response:
+        self, method: Literal["GET", "POST", "PUT"], endpoint: str, data: dict[str, Any] | None = None
+    ) -> dict[str, Any]:
         url = self.config.api_base + endpoint
-        logger.debug("Kentik - Sending request", extra={"method": method, "endpoint": url, "form_data": data})
-        result = self.session.request(method, url, json=data)
-        logger.debug("Kentik - Received response", extra=result.__dict__)
+        logger.debug("Kentik - Sending %s request to %s with headers %s", method, url, data)
+        result = self.session.request(method, url, json=data).json()
 
+        if "error" in result or "kentik_error" in result:
+            msg = "Failed to process request in Kentik"
+            raise ProcessFailureError(msg, details=result)
+
+        logger.debug("Kentik - Received response %s", result)
         return result
 
+    def _send_delete(self, endpoint: str, data: dict[str, Any] | None = None) -> Response:
+        url = self.config.api_base + endpoint
+        logger.debug("Kentik - Sending delete request to %s with headers %s", url, data)
+        return self.session.delete(url, json=data)
+
     def get_devices(self) -> list[dict[str, Any]]:
-        """List all devices in Kentik."""
-        return [self._send_request("GET", "v5/devices").json()]
+        """List all devices in Kentik.
+
+        Returns a list of shape ``[{**device_1}, {**device_2}, ..., {**device_n}]}``.
+        """
+        return self._send_request("GET", "v5/devices")["devices"]
 
     def get_device(self, device_id: str) -> dict[str, Any]:
         """Get a device by ID."""
-        return self._send_request("GET", f"v5/device/{device_id}").json()
+        device = self._send_request("GET", f"v5/device/{device_id}")
+        device.pop("custom_column_data", None)
+        device.pop("custom_columns", None)
+        return device
 
     def get_device_by_name(self, device_name: str) -> dict[str, Any]:
         """Fetch a device in Kentik by its :term:`FQDN`.
@@ -67,18 +81,18 @@ class KentikClient:
         """
         devices = self.get_devices()
         for device in devices:
-            if device["name"] == device_name:
+            if device["device_name"] == device_name:
                 return device
 
         return {}
 
     def get_sites(self) -> list[dict[str, Any]]:
         """Get a list of all available sites in Kentik."""
-        return self._send_request("GET", "v5/sites").json()["sites"]
+        return self._send_request("GET", "v5/sites")["sites"]
 
     def get_site(self, site_id: str) -> dict[str, Any]:
         """Get a site by ID."""
-        return self._send_request("GET", f"v5/site/{site_id}").json()
+        return self._send_request("GET", f"v5/site/{site_id}")
 
     def get_site_by_name(self, site_slug: str) -> dict[str, Any]:
         """Get a Kentik site by its name.
@@ -96,7 +110,11 @@ class KentikClient:
 
     def get_plans(self) -> list[dict[str, Any]]:
         """Get all Kentik plans available."""
-        return self._send_request("GET", "v5/plans").json()["plans"]
+        return self._send_request("GET", "v5/plans")["plans"]
+
+    def get_plan(self, plan_id: int) -> dict[str, Any]:
+        """Get a Kentik plan by ID."""
+        return self._send_request("GET", f"v5/plan/{plan_id}")
 
     def get_plan_by_name(self, plan_name: str) -> dict[str, Any]:
         """Get a Kentik plan by its name.
@@ -117,7 +135,7 @@ class KentikClient:
         plan_id = self.get_plan_by_name(self.config.placeholder_license_key)["id"]
         request_body = {
             "device": {
-                **device.model_dump(exclude=set("device_name" "site_tier")),
+                **device.model_dump(exclude=set("device_name")),
                 "device_name": device.device_description,
                 "device_type": self.config.device_type,
                 "device_subtype": self.config.device_type,
@@ -132,18 +150,23 @@ class KentikClient:
             }
         }
 
-        new_device = self._send_request("POST", "v5/device", request_body).json()
+        new_device = self._send_request("POST", "v5/device", request_body)["device"]
 
         # The name of the device has to be updated from the subscription ID to its FQDN.
         # This is a limitation of the Kentik API that disallows settings device names containing a . symbol.
-        self.update_device(new_device["device"]["id"], {"device": {"device_name": device.device_name}})
-        new_device["device"]["device_name"] = device.device_name
+        self.update_device(new_device["id"], {"device": {"device_name": device.device_name}})
+        new_device["device_name"] = device.device_name
+        new_device.pop("custom_column_data", None)
+        new_device.pop("custom_columns", None)
 
         return new_device
 
     def update_device(self, device_id: str, updated_device: dict[str, Any]) -> dict[str, Any]:
         """Update an existing device in Kentik."""
-        return self._send_request("PUT", f"v5/device/{device_id}", updated_device).json()
+        device = self._send_request("PUT", f"v5/device/{device_id}", updated_device)["device"]
+        device.pop("custom_column_data", None)
+        device.pop("custom_columns", None)
+        return device
 
     def remove_device(self, device_id: str, *, archive: bool) -> None:
         """Remove a device from Kentik.
@@ -152,11 +175,11 @@ class KentikClient:
         :param bool archive: Archive the device instead of completely deleting it.
         """
         if not archive:
-            self._send_request("DELETE", f"v5/device/{device_id}")
+            self._send_delete(f"v5/device/{device_id}")
 
-        self._send_request("DELETE", f"v5/device/{device_id}")
+        self._send_delete(f"v5/device/{device_id}")
 
-    def remove_device_by_fqdn(self, fqdn: str, *, archive: bool = True) -> None:
+    def remove_device_by_fqdn(self, fqdn: str, *, archive: bool) -> None:
         """Remove a device from Kentik, by its :term:`FQDN`."""
         device_id = self.get_device_by_name(fqdn)["id"]
         self.remove_device(device_id, archive=archive)
diff --git a/gso/services/processes.py b/gso/services/processes.py
new file mode 100644
index 0000000000000000000000000000000000000000..f6470a8aee5586d85bbb98204b4b734052fec4cc
--- /dev/null
+++ b/gso/services/processes.py
@@ -0,0 +1,51 @@
+"""A collection of methods that make interaction with coreDB more straight-forward.
+
+This prevents someone from having to re-write database statements many times, that might turn out to be erroneous
+or inconsistent when not careful. These methods are related to operations regarding processes and workflows.
+"""
+
+from orchestrator.db import ProcessTable, WorkflowTable, db
+from orchestrator.types import UUIDstr
+from orchestrator.workflow import ProcessStatus
+from sqlalchemy import ScalarResult, or_, select
+
+
+def count_incomplete_validate_products() -> int:
+    """Count the number of incomplete validate_geant_products processes.
+
+    :return: The count of incomplete 'validate_geant_products' processes.
+    :rtype: int
+    """
+    return ProcessTable.query.filter(
+        ProcessTable.workflow_name == "validate_geant_products",
+        ProcessTable.last_status != ProcessStatus.COMPLETED.value,
+    ).count()
+
+
+def get_failed_tasks() -> list[ProcessTable]:
+    """Get all tasks that have failed."""
+    return ProcessTable.query.filter(
+        ProcessTable.is_task.is_(True), ProcessTable.last_status == ProcessStatus.FAILED.value
+    ).all()
+
+
+def get_all_cleanup_tasks() -> list[WorkflowTable]:
+    """Get a list of all cleanup tasks that run on a schedule."""
+    return WorkflowTable.query.filter(
+        or_(WorkflowTable.name == "task_clean_up_tasks", WorkflowTable.name == "task_clean_old_tasks")
+    ).all()
+
+
+def get_created_and_completed_processes_by_id(workflow_id: UUIDstr) -> ScalarResult:
+    """Get all processes that are either created or completed, by workflow ID."""
+    return db.session.scalars(
+        select(ProcessTable)
+        .filter(ProcessTable.is_task.is_(True))
+        .filter(ProcessTable.workflow_id == workflow_id)
+        .filter(
+            or_(
+                ProcessTable.last_status == ProcessStatus.COMPLETED.value,
+                ProcessTable.last_status == ProcessStatus.CREATED.value,
+            )
+        )
+    )
diff --git a/gso/services/subscriptions.py b/gso/services/subscriptions.py
index d82c66c9b0a42aa83b62fe731e40710a41c85005..25ed84e0593b2963656167737f17b4697ef2b0df 100644
--- a/gso/services/subscriptions.py
+++ b/gso/services/subscriptions.py
@@ -1,7 +1,7 @@
 """A collection of methods that make interaction with coreDB more straight-forward.
 
 This prevents someone from having to re-write database statements many times, that might turn out to be erroneous
-or inconsistent when not careful.
+or inconsistent when not careful. These methods relate to operations on subscriptions.
 """
 
 from typing import Any
@@ -9,7 +9,6 @@ from uuid import UUID
 
 from orchestrator.db import (
     ProcessSubscriptionTable,
-    ProcessTable,
     ProductTable,
     ResourceTypeTable,
     SubscriptionInstanceTable,
@@ -20,7 +19,6 @@ from orchestrator.db import (
 from orchestrator.domain import SubscriptionModel
 from orchestrator.services.subscriptions import query_in_use_by_subscriptions
 from orchestrator.types import SubscriptionLifecycle
-from orchestrator.workflow import ProcessStatus
 from pydantic_forms.types import UUIDstr
 
 from gso.products import ProductName, ProductType
@@ -138,6 +136,19 @@ def get_active_iptrunk_subscriptions(includes: list[str] | None = None) -> list[
     )
 
 
+def get_non_terminated_iptrunk_subscriptions(includes: list[str] | None = None) -> list[SubscriptionType]:
+    """Retrieve all IP trunk subscriptions that are not terminated.
+
+    :param list[Subscription] includes: Fields to be included in the returned Subscription objects.
+    :return list[Subscription]: A list of IP trunk subscriptions.
+    """
+    return get_subscriptions(
+        product_types=[ProductType.IP_TRUNK],
+        lifecycles=[SubscriptionLifecycle.INITIAL, SubscriptionLifecycle.PROVISIONING, SubscriptionLifecycle.ACTIVE],
+        includes=includes,
+    )
+
+
 def get_trunks_that_terminate_on_router(
     subscription_id: UUIDstr, lifecycle_state: SubscriptionLifecycle
 ) -> list[SubscriptionTable]:
@@ -200,25 +211,6 @@ def get_active_subscriptions_by_field_and_value(field_name: str, field_value: st
     )
 
 
-def count_incomplete_validate_products() -> int:
-    """Count the number of incomplete validate_geant_products processes.
-
-    :return: The count of incomplete 'validate_geant_products' processes.
-    :rtype: int
-    """
-    return ProcessTable.query.filter(
-        ProcessTable.workflow_name == "validate_geant_products",
-        ProcessTable.last_status != ProcessStatus.COMPLETED.value,
-    ).count()
-
-
-def get_failed_tasks() -> list[ProcessTable]:
-    """Get all tasks that have failed."""
-    return ProcessTable.query.filter(
-        ProcessTable.is_task.is_(True), ProcessTable.last_status == ProcessStatus.FAILED.value
-    ).all()
-
-
 def get_subscription_by_process_id(process_id: str) -> SubscriptionModel | None:
     """Get a subscription from a process ID."""
     subscription_table = ProcessSubscriptionTable.query.filter(
@@ -253,3 +245,18 @@ def get_site_by_name(site_name: str) -> Site:
         raise ValueError(msg)
 
     return Site.from_subscription(subscription[0].subscription_id)
+
+
+def get_all_active_sites() -> list[dict[str, Any]]:
+    """Retrieve all active sites subscription together with instance values.
+
+    :return: A list of active sites with their subscription IDs and site instances.
+    :rtype: list[dict[str, Any]]
+    """
+    return [
+        {
+            "subscription_id": subscription["subscription_id"],
+            "site": Site.from_subscription(subscription["subscription_id"]).site,
+        }
+        for subscription in get_active_site_subscriptions(includes=["subscription_id"])
+    ]
diff --git a/gso/settings.py b/gso/settings.py
index 6c6378dd196915b26626a80ac31f8282a795791d..d2a845f3e96b526675d8d0de57972a15218a43ce 100644
--- a/gso/settings.py
+++ b/gso/settings.py
@@ -195,6 +195,7 @@ class KentikParams(BaseSettings):
     device_type: str
     minimize_snmp: bool
     placeholder_license_key: str
+    archive_license_key: str
     sample_rate: int
     bgp_type: str
     bgp_lookup_strategy: str
diff --git a/gso/translations/en-GB.json b/gso/translations/en-GB.json
index c74df450a1c5b332b7dbc02cb96742dbb92d5901..108a28522a74747461352dbd5133ba5ccf63f3ec 100644
--- a/gso/translations/en-GB.json
+++ b/gso/translations/en-GB.json
@@ -48,6 +48,7 @@
         "modify_site": "Modify Site",
         "modify_trunk_interface": "Modify IP Trunk interface",
         "modify_connection_strategy": "Modify connection strategy",
+        "modify_router_kentik_license": "Modify device license in Kentik",
         "terminate_iptrunk": "Terminate IP Trunk",
         "terminate_router": "Terminate Router",
         "terminate_site": "Terminate Site",
@@ -71,6 +72,8 @@
         "task_send_email_notifications": "Send email notifications for failed tasks",
         "task_create_partners": "Create partner task",
         "task_modify_partners": "Modify partner task",
-        "task_delete_partners": "Delete partner task"
+        "task_delete_partners": "Delete partner task",
+        "task_clean_old_tasks": "Remove old cleanup tasks",
+        "promote_p_to_pe": "Promote P to PE"
     }
 }
diff --git a/gso/utils/helpers.py b/gso/utils/helpers.py
index cca681a4be8d56c0772aaae40f5929f516c3dbc5..13016c76f13028db17e8397ebff7ce23455a997a 100644
--- a/gso/utils/helpers.py
+++ b/gso/utils/helpers.py
@@ -306,6 +306,7 @@ def generate_fqdn(hostname: str, site_name: str, country_code: str) -> str:
 def generate_inventory_for_active_routers(
     router_role: RouterRole,
     exclude_routers: list[str] | None = None,
+    router_vendor: Vendor | None = None,
 ) -> dict:
     """Generate an Ansible-compatible inventory for executing playbooks.
 
@@ -313,6 +314,7 @@ def generate_inventory_for_active_routers(
 
     :param RouterRole router_role: The role of the routers to include in the inventory.
     :param list exclude_routers: List of routers to exclude from the inventory.
+    :param Vendor router_vendor: The vendor of the routers to include in the inventory.
     :return: A dictionary representing the inventory of active routers.
     :rtype: dict[str, Any]
     """
@@ -328,7 +330,9 @@ def generate_inventory_for_active_routers(
                     "vendor": str(router.router.vendor),
                 }
                 for router in all_routers
-                if router.router.router_role == router_role and router.router.router_fqdn not in exclude_routers
+                if router.router.router_role == router_role
+                and router.router.router_fqdn not in exclude_routers
+                and (router_vendor is None or router.router.vendor == router_vendor)
             }
         }
     }
diff --git a/gso/utils/types.py b/gso/utils/types.py
new file mode 100644
index 0000000000000000000000000000000000000000..3e1b4091b127d9a572c12c7fad462dc4887de9f7
--- /dev/null
+++ b/gso/utils/types.py
@@ -0,0 +1,9 @@
+"""Define custom types for use across the application."""
+
+from typing import Annotated
+
+from pydantic import AfterValidator
+
+from gso.utils.helpers import validate_tt_number
+
+TTNumber = Annotated[str, AfterValidator(validate_tt_number)]
diff --git a/gso/worker.py b/gso/worker.py
index 807c1edb9f41d15e1e099b03e9c0a2ae4845839e..300eb5908457aead3fbbaa7425e8c05bf71d0de5 100644
--- a/gso/worker.py
+++ b/gso/worker.py
@@ -25,6 +25,7 @@ celery = OrchestratorCelery(
         "gso.schedules.validate_products",
         "gso.schedules.validate_subscriptions",
         "gso.schedules.send_email_notifications",
+        "gso.schedules.clean_old_tasks",
     ],
 )
 
diff --git a/gso/workflows/__init__.py b/gso/workflows/__init__.py
index 56561838557882cac784faed6745c1d80661a0cd..96638b85b66ca805f1f01d32053f65724d66f807 100644
--- a/gso/workflows/__init__.py
+++ b/gso/workflows/__init__.py
@@ -20,6 +20,7 @@ WF_USABLE_MAP.update({
     "terminate_site": ALL_ALIVE_STATES,
     "terminate_router": ALL_ALIVE_STATES,
     "terminate_iptrunk": ALL_ALIVE_STATES,
+    "promote_p_to_pe": [SubscriptionLifecycle.ACTIVE],
 })
 
 #  IP trunk workflows
@@ -44,6 +45,8 @@ LazyWorkflowInstance("gso.workflows.router.modify_connection_strategy", "modify_
 LazyWorkflowInstance("gso.workflows.router.import_router", "import_router")
 LazyWorkflowInstance("gso.workflows.router.create_imported_router", "create_imported_router")
 LazyWorkflowInstance("gso.workflows.router.validate_router", "validate_router")
+LazyWorkflowInstance("gso.workflows.router.promote_p_to_pe", "promote_p_to_pe")
+LazyWorkflowInstance("gso.workflows.router.modify_kentik_license", "modify_router_kentik_license")
 
 #  Site workflows
 LazyWorkflowInstance("gso.workflows.site.create_site", "create_site")
@@ -72,3 +75,4 @@ LazyWorkflowInstance("gso.workflows.tasks.validate_geant_products", "task_valida
 LazyWorkflowInstance("gso.workflows.tasks.create_partners", "task_create_partners")
 LazyWorkflowInstance("gso.workflows.tasks.modify_partners", "task_modify_partners")
 LazyWorkflowInstance("gso.workflows.tasks.delete_partners", "task_delete_partners")
+LazyWorkflowInstance("gso.workflows.tasks.clean_old_tasks", "task_clean_old_tasks")
diff --git a/gso/workflows/iptrunk/create_iptrunk.py b/gso/workflows/iptrunk/create_iptrunk.py
index 373dfc03b63e378b2889c62aca3496011121f909..3cd8d81cd14b6932f7ceafac247e752ec54d625a 100644
--- a/gso/workflows/iptrunk/create_iptrunk.py
+++ b/gso/workflows/iptrunk/create_iptrunk.py
@@ -1,6 +1,7 @@
 """A creation workflow that deploys a new IP trunk service."""
 
 import json
+from ipaddress import IPv4Address, IPv4Network, IPv6Address, IPv6Network
 from typing import Annotated
 from uuid import uuid4
 
@@ -9,10 +10,12 @@ from orchestrator.forms import FormPage
 from orchestrator.forms.validators import Choice, Label
 from orchestrator.targets import Target
 from orchestrator.types import FormGenerator, State, SubscriptionLifecycle, UUIDstr
+from orchestrator.utils.errors import ProcessFailureError
 from orchestrator.utils.json import json_dumps
-from orchestrator.workflow import StepList, begin, conditional, done, step, workflow
+from orchestrator.workflow import StepList, begin, conditional, done, step, step_group, workflow
 from orchestrator.workflows.steps import resync, set_status, store_process_subscription
 from orchestrator.workflows.utils import wrap_create_initial_input_form
+from ping3 import ping
 from pydantic import AfterValidator, ConfigDict, field_validator
 from pydantic_forms.validators import ReadOnlyField, validate_unique_list
 from pynetbox.models.dcim import Interfaces
@@ -23,13 +26,14 @@ from gso.products.product_blocks.iptrunk import (
     IptrunkType,
     PhysicalPortCapacity,
 )
-from gso.products.product_types.iptrunk import IptrunkInactive, IptrunkProvisioning
+from gso.products.product_types.iptrunk import Iptrunk, IptrunkInactive, IptrunkProvisioning
 from gso.products.product_types.router import Router
 from gso.services import infoblox, subscriptions
 from gso.services.lso_client import execute_playbook, lso_interaction
 from gso.services.netbox_client import NetboxClient
 from gso.services.partners import get_partner_by_name
 from gso.services.sharepoint import SharePointClient
+from gso.services.subscriptions import get_non_terminated_iptrunk_subscriptions
 from gso.settings import load_oss_params
 from gso.utils.helpers import (
     LAGMember,
@@ -40,9 +44,9 @@ from gso.utils.helpers import (
     validate_interface_name_list,
     validate_iptrunk_unique_interface,
     validate_router_in_netbox,
-    validate_tt_number,
 )
 from gso.utils.shared_enums import Vendor
+from gso.utils.types import TTNumber
 from gso.utils.workflow_steps import prompt_sharepoint_checklist_url
 
 
@@ -58,7 +62,7 @@ def initial_input_form_generator(product_name: str) -> FormGenerator:
     class CreateIptrunkForm(FormPage):
         model_config = ConfigDict(title=product_name)
 
-        tt_number: str
+        tt_number: TTNumber
         partner: ReadOnlyField("GEANT", default_type=str)  # type: ignore[valid-type]
         geant_s_sid: str | None = None
         iptrunk_description: str | None = None
@@ -66,10 +70,6 @@ def initial_input_form_generator(product_name: str) -> FormGenerator:
         iptrunk_speed: PhysicalPortCapacity
         iptrunk_number_of_members: int
 
-        @field_validator("tt_number")
-        def validate_tt_number(cls, tt_number: str) -> str:
-            return validate_tt_number(tt_number)
-
     initial_user_input = yield CreateIptrunkForm
     recommended_minimum_links = calculate_recommended_minimum_links(
         initial_user_input.iptrunk_number_of_members, initial_user_input.iptrunk_speed
@@ -214,16 +214,94 @@ def create_subscription(product: UUIDstr, partner: str) -> State:
 @step("Get information from IPAM")
 def get_info_from_ipam(subscription: IptrunkInactive) -> State:
     """Allocate IP resources in :term:`IPAM`."""
-    subscription.iptrunk.iptrunk_ipv4_network = infoblox.allocate_v4_network(
+    new_ipv4_network = infoblox.allocate_v4_network(
         "TRUNK",
         subscription.iptrunk.iptrunk_description,
     )
-    subscription.iptrunk.iptrunk_ipv6_network = infoblox.allocate_v6_network(
+    new_ipv6_network = infoblox.allocate_v6_network(
         "TRUNK",
         subscription.iptrunk.iptrunk_description,
     )
+    subscription.iptrunk.iptrunk_ipv4_network = new_ipv4_network
+    subscription.iptrunk.iptrunk_ipv6_network = new_ipv6_network
 
-    return {"subscription": subscription}
+    return {
+        "subscription": subscription,
+        "new_ipv4_network": str(new_ipv4_network),
+        "new_ipv6_network": str(new_ipv6_network),
+    }
+
+
+@step("Check if assigned networks are already taken by other trunk subscription")
+def check_existing_trunk_allocations(subscription: IptrunkInactive) -> None:
+    """Check if there already is a trunk with the same network resources assigned to it."""
+    if not subscription.iptrunk.iptrunk_ipv4_network or not subscription.iptrunk.iptrunk_ipv6_network:
+        msg = "Missing IP resources in subscription object."
+        raise ProcessFailureError(
+            msg, details=[subscription.iptrunk.iptrunk_ipv4_network, subscription.iptrunk.iptrunk_ipv6_network]
+        )
+
+    all_trunks = [
+        Iptrunk.from_subscription(trunk["subscription_id"])
+        for trunk in get_non_terminated_iptrunk_subscriptions()
+        if trunk["subscription_id"] != subscription.subscription_id
+    ]
+    overlapping_ipv4_networks = [
+        (trunk.description, trunk.iptrunk.iptrunk_ipv4_network)
+        for trunk in all_trunks
+        if trunk.iptrunk.iptrunk_ipv4_network.overlaps(subscription.iptrunk.iptrunk_ipv4_network)
+    ]
+    overlapping_ipv6_networks = [
+        (trunk.description, trunk.iptrunk.iptrunk_ipv6_network)
+        for trunk in all_trunks
+        if trunk.iptrunk.iptrunk_ipv6_network.overlaps(subscription.iptrunk.iptrunk_ipv6_network)
+    ]
+
+    if overlapping_ipv4_networks or overlapping_ipv6_networks:
+        msg = "Newly assigned IP networks overlap with existing IP trunk subscriptions, please investigate."
+        raise ProcessFailureError(msg, details=[overlapping_ipv4_networks, overlapping_ipv6_networks])
+
+
+@step("Check for existing DNS records in the assigned IPv4 network")
+def dig_all_hosts_v4(new_ipv4_network: str) -> None:
+    """Check if any hosts have already been assigned inside the IPv4 network in Netbox."""
+    registered_hosts = [host for host in IPv4Network(new_ipv4_network) if infoblox.find_host_by_ip(IPv4Address(host))]
+
+    if registered_hosts:
+        msg = "One or more hosts in the assigned IPv4 network are already registered, please investigate."
+        raise ProcessFailureError(msg, details=registered_hosts)
+
+
+@step("Check for existing DNS records in the assigned IPv6 network")
+def dig_all_hosts_v6(new_ipv6_network: str) -> None:
+    """Check if any hosts have already been assigned inside the IPv6 network in Netbox."""
+    registered_hosts = [host for host in IPv6Network(new_ipv6_network) if infoblox.find_host_by_ip(IPv6Address(host))]
+
+    if registered_hosts:
+        msg = "One or more hosts in the assigned IPv6 network are already registered, please investigate."
+        raise ProcessFailureError(msg, details=registered_hosts)
+
+
+@step("Ping all hosts in the assigned IPv4 network")
+def ping_all_hosts_v4(new_ipv4_network: str) -> None:
+    """Ping all hosts in the IPv4 network to verify they're not in use."""
+    unavailable_hosts = [host for host in IPv4Network(new_ipv4_network) if ping(str(host), timeout=1)]
+
+    if unavailable_hosts:
+        msg = "One or more hosts in the assigned IPv4 network are responding to ping, please investigate."
+        raise ProcessFailureError(msg, details=unavailable_hosts)
+
+
+@step("Ping all hosts in the assigned IPv6 network")
+def ping_all_hosts_v6(new_ipv6_network: str) -> State:
+    """Ping all hosts in the IPv6 network to verify they're not in use."""
+    unavailable_hosts = [host for host in IPv6Network(new_ipv6_network) if ping(str(host), timeout=1)]
+
+    if unavailable_hosts:
+        msg = "One or more hosts in the assigned IPv6 network are responding to ping, please investigate."
+        raise ProcessFailureError(msg, details=unavailable_hosts)
+
+    return {"__remove_keys": ["new_ipv4_network", "new_ipv6_network"]}
 
 
 @step("Initialize subscription")
@@ -531,12 +609,25 @@ def create_iptrunk() -> StepList:
     side_a_is_nokia = conditional(lambda state: get_router_vendor(state["side_a_node_id"]) == Vendor.NOKIA)
     side_b_is_nokia = conditional(lambda state: get_router_vendor(state["side_b_node_id"]) == Vendor.NOKIA)
 
+    assign_ip_networks = step_group(
+        name="Assign IP networks",
+        steps=(
+            begin
+            >> get_info_from_ipam
+            >> check_existing_trunk_allocations
+            >> dig_all_hosts_v4
+            >> dig_all_hosts_v6
+            >> ping_all_hosts_v4
+            >> ping_all_hosts_v6
+        ),
+    )
+
     return (
         begin
         >> create_subscription
         >> store_process_subscription(Target.CREATE)
         >> initialize_subscription
-        >> get_info_from_ipam
+        >> assign_ip_networks
         >> reserve_interfaces_in_netbox
         >> lso_interaction(provision_ip_trunk_iface_dry)
         >> lso_interaction(provision_ip_trunk_iface_real)
diff --git a/gso/workflows/iptrunk/deploy_twamp.py b/gso/workflows/iptrunk/deploy_twamp.py
index 92e37fd5777105be180674296194ea24ae70cf8f..a45b5eca61144577c5dbf58a251b0b1ca7c76f6d 100644
--- a/gso/workflows/iptrunk/deploy_twamp.py
+++ b/gso/workflows/iptrunk/deploy_twamp.py
@@ -10,11 +10,10 @@ from orchestrator.utils.json import json_dumps
 from orchestrator.workflow import StepList, begin, done, step, workflow
 from orchestrator.workflows.steps import resync, store_process_subscription, unsync
 from orchestrator.workflows.utils import wrap_modify_initial_input_form
-from pydantic import field_validator
 
 from gso.products.product_types.iptrunk import Iptrunk
 from gso.services.lso_client import execute_playbook, lso_interaction
-from gso.utils.helpers import validate_tt_number
+from gso.utils.types import TTNumber
 
 
 def _initial_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
@@ -26,11 +25,7 @@ def _initial_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
             f"{trunk.iptrunk.iptrunk_sides[0].iptrunk_side_node.router_fqdn} to "
             f"{trunk.iptrunk.iptrunk_sides[1].iptrunk_side_node.router_fqdn}"
         )
-        tt_number: str
-
-        @field_validator("tt_number")
-        def validate_tt_number(cls, tt_number: str) -> str:
-            return validate_tt_number(tt_number)
+        tt_number: TTNumber
 
     user_input = yield DeployTWAMPForm
 
diff --git a/gso/workflows/iptrunk/migrate_iptrunk.py b/gso/workflows/iptrunk/migrate_iptrunk.py
index 908fcdbc1879b64e284914d6347c2ffa05640364..be15848ae73f933fa262211e765b0901313dac66 100644
--- a/gso/workflows/iptrunk/migrate_iptrunk.py
+++ b/gso/workflows/iptrunk/migrate_iptrunk.py
@@ -38,9 +38,9 @@ from gso.utils.helpers import (
     available_lags_choices,
     get_router_vendor,
     validate_interface_name_list,
-    validate_tt_number,
 )
 from gso.utils.shared_enums import Vendor
+from gso.utils.types import TTNumber
 from gso.utils.workflow_steps import set_isis_to_max
 
 
@@ -65,16 +65,12 @@ def initial_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
     class IPTrunkMigrateForm(FormPage):
         model_config = ConfigDict(title=form_title)
 
-        tt_number: str
+        tt_number: TTNumber
         replace_side: replaced_side_enum  # type: ignore[valid-type]
         warning_label: Label = "Are we moving to a different Site?"
         migrate_to_different_site: bool = False
         restore_isis_metric: bool = True
 
-        @field_validator("tt_number", mode="before")
-        def validate_tt_number(cls, tt_number: str) -> str:
-            return validate_tt_number(tt_number)
-
     migrate_form_input = yield IPTrunkMigrateForm
 
     current_routers = [
diff --git a/gso/workflows/iptrunk/modify_isis_metric.py b/gso/workflows/iptrunk/modify_isis_metric.py
index 2e4a4586430cf3698d900924737c352463ed3343..285907b45508249794bb8c5fd486b62ed0b4dac6 100644
--- a/gso/workflows/iptrunk/modify_isis_metric.py
+++ b/gso/workflows/iptrunk/modify_isis_metric.py
@@ -12,6 +12,7 @@ from orchestrator.workflows.utils import wrap_modify_initial_input_form
 
 from gso.products.product_types.iptrunk import Iptrunk
 from gso.services.lso_client import execute_playbook, lso_interaction
+from gso.utils.types import TTNumber
 
 
 def initial_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
@@ -19,7 +20,7 @@ def initial_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
     subscription = Iptrunk.from_subscription(subscription_id)
 
     class ModifyIptrunkForm(FormPage):
-        tt_number: str
+        tt_number: TTNumber
         isis_metric: int = subscription.iptrunk.iptrunk_isis_metric
 
     user_input = yield ModifyIptrunkForm
diff --git a/gso/workflows/iptrunk/modify_trunk_interface.py b/gso/workflows/iptrunk/modify_trunk_interface.py
index f5d17d7752503d8fdb8979d0eefadd7818af4936..394e369a88d1f64481750080410ffcb8f4335064 100644
--- a/gso/workflows/iptrunk/modify_trunk_interface.py
+++ b/gso/workflows/iptrunk/modify_trunk_interface.py
@@ -32,9 +32,9 @@ from gso.utils.helpers import (
     get_router_vendor,
     validate_interface_name_list,
     validate_iptrunk_unique_interface,
-    validate_tt_number,
 )
 from gso.utils.shared_enums import IPv4AddressType, IPv6AddressType, Vendor
+from gso.utils.types import TTNumber
 from gso.workflows.iptrunk.migrate_iptrunk import check_ip_trunk_optical_levels_pre
 from gso.workflows.iptrunk.validate_iptrunk import check_ip_trunk_isis
 
@@ -85,7 +85,7 @@ def initial_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
     subscription = Iptrunk.from_subscription(subscription_id)
 
     class ModifyIptrunkForm(FormPage):
-        tt_number: str
+        tt_number: TTNumber
         geant_s_sid: str | None = subscription.iptrunk.geant_s_sid
         iptrunk_description: str | None = subscription.iptrunk.iptrunk_description
         iptrunk_type: IptrunkType = subscription.iptrunk.iptrunk_type
@@ -103,10 +103,6 @@ def initial_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
             str(subscription.iptrunk.iptrunk_ipv6_network), default_type=IPv6AddressType
         )
 
-        @field_validator("tt_number")
-        def validate_tt_number(cls, tt_number: str) -> str:
-            return validate_tt_number(tt_number)
-
     initial_user_input = yield ModifyIptrunkForm
 
     recommended_minimum_links = calculate_recommended_minimum_links(
diff --git a/gso/workflows/iptrunk/terminate_iptrunk.py b/gso/workflows/iptrunk/terminate_iptrunk.py
index bee9739a9732ea3db1415e8d7955886b9dcaa2bb..bb1a6fd90b3d9a5e3b9aa9bb633db58cc2eb1cd4 100644
--- a/gso/workflows/iptrunk/terminate_iptrunk.py
+++ b/gso/workflows/iptrunk/terminate_iptrunk.py
@@ -16,15 +16,15 @@ from orchestrator.workflows.steps import (
     unsync,
 )
 from orchestrator.workflows.utils import wrap_modify_initial_input_form
-from pydantic import field_validator
 
 from gso.products.product_blocks.iptrunk import IptrunkSideBlock
 from gso.products.product_types.iptrunk import Iptrunk
 from gso.services import infoblox
 from gso.services.lso_client import execute_playbook, lso_interaction
 from gso.services.netbox_client import NetboxClient
-from gso.utils.helpers import get_router_vendor, validate_tt_number
+from gso.utils.helpers import get_router_vendor
 from gso.utils.shared_enums import Vendor
+from gso.utils.types import TTNumber
 from gso.utils.workflow_steps import set_isis_to_max
 
 
@@ -40,16 +40,12 @@ def initial_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
             )
             info_label_3: Label = "ONLY EXECUTE THIS WORKFLOW WHEN YOU ARE ABSOLUTELY SURE WHAT YOU ARE DOING."
 
-        tt_number: str
+        tt_number: TTNumber
         termination_label: Label = (
             "Please confirm whether configuration should get removed from the A and B sides of the trunk."
         )
         remove_configuration: bool = True
 
-        @field_validator("tt_number")
-        def validate_tt_number(cls, tt_number: str) -> str:
-            return validate_tt_number(tt_number)
-
     user_input = yield TerminateForm
     return user_input.model_dump()
 
diff --git a/gso/workflows/router/create_router.py b/gso/workflows/router/create_router.py
index 8382e227181233c145371f00aeb835ba91797f58..59dd6210578d9556fdf79ecacf7df0b5518fdbd5 100644
--- a/gso/workflows/router/create_router.py
+++ b/gso/workflows/router/create_router.py
@@ -25,6 +25,7 @@ from gso.services.sharepoint import SharePointClient
 from gso.settings import load_oss_params
 from gso.utils.helpers import generate_fqdn, iso_from_ipv4
 from gso.utils.shared_enums import PortNumber, Vendor
+from gso.utils.types import TTNumber
 from gso.utils.workflow_steps import (
     deploy_base_config_dry,
     deploy_base_config_real,
@@ -48,7 +49,7 @@ def initial_input_form_generator(product_name: str) -> FormGenerator:
     class CreateRouterForm(FormPage):
         model_config = ConfigDict(title=product_name)
 
-        tt_number: str
+        tt_number: TTNumber
         partner: ReadOnlyField("GEANT", default_type=str)  # type: ignore[valid-type]
         vendor: Vendor
         router_site: _site_selector()  # type: ignore[valid-type]
diff --git a/gso/workflows/router/modify_kentik_license.py b/gso/workflows/router/modify_kentik_license.py
new file mode 100644
index 0000000000000000000000000000000000000000..28d9d21f945a55001f5b1601227f26c5494067ec
--- /dev/null
+++ b/gso/workflows/router/modify_kentik_license.py
@@ -0,0 +1,68 @@
+"""A workflow that modifies the Kentik license of a router."""
+
+import logging
+from typing import Any
+
+from orchestrator.forms import FormPage
+from orchestrator.targets import Target
+from orchestrator.types import FormGenerator, State, UUIDstr
+from orchestrator.utils.errors import ProcessFailureError
+from orchestrator.workflow import StepList, begin, done, step, workflow
+from orchestrator.workflows.steps import store_process_subscription
+from orchestrator.workflows.utils import wrap_modify_initial_input_form
+from pydantic import model_validator
+from pydantic_forms.validators import Choice
+
+from gso.products.product_blocks.router import RouterRole
+from gso.products.product_types.router import Router
+from gso.services.kentik_client import KentikClient
+
+logger = logging.getLogger()
+
+
+def _initial_input_form(subscription_id: UUIDstr) -> FormGenerator:
+    router = Router.from_subscription(subscription_id)
+    active_kentik_plans = {str(plan["id"]): plan["name"] for plan in KentikClient().get_plans() if plan["active"]}
+    available_kentik_plans = Choice(
+        "Select a Kentik license",
+        zip(active_kentik_plans.keys(), active_kentik_plans.items(), strict=True),  # type: ignore[arg-type]
+    )
+
+    class ModifyKentikLicenseForm(FormPage):
+        new_plan_id: available_kentik_plans  # type: ignore[valid-type]
+
+        @model_validator(mode="before")
+        def router_must_be_nokia_p(cls, data: Any) -> Any:
+            if router.router.router_role != RouterRole.PE:
+                msg = "Can only update Kentik license for PE routers."
+                raise ValueError(msg)
+
+            return data
+
+    user_input = yield ModifyKentikLicenseForm
+
+    return user_input.model_dump() | {"subscription": router}
+
+
+@step("Update device license in Kentik")
+def update_kentik_license(subscription: Router, new_plan_id: int) -> State:
+    """Update a Kentik device with a new license attached to it."""
+    kentik_client = KentikClient()
+    kentik_device = kentik_client.get_device_by_name(subscription.router.router_fqdn)
+    if "id" not in kentik_device:
+        msg = "Failed to find Kentik device by name"
+        raise ProcessFailureError(msg, details=subscription.router.router_fqdn)
+
+    updated_kentik_device = kentik_client.update_device(kentik_device["id"], {"device": {"plan_id": new_plan_id}})
+
+    return {"kentik_device": updated_kentik_device}
+
+
+@workflow(
+    "Modify Kentik license",
+    initial_input_form=wrap_modify_initial_input_form(_initial_input_form),
+    target=Target.MODIFY,
+)
+def modify_router_kentik_license() -> StepList:
+    """Apply a selected Kentik license on an existing PE router."""
+    return begin >> store_process_subscription(Target.MODIFY) >> update_kentik_license >> done
diff --git a/gso/workflows/router/promote_p_to_pe.py b/gso/workflows/router/promote_p_to_pe.py
new file mode 100644
index 0000000000000000000000000000000000000000..74f478c075e615900ea5e3a8c0e996aca94b364c
--- /dev/null
+++ b/gso/workflows/router/promote_p_to_pe.py
@@ -0,0 +1,603 @@
+"""Promote a P router to a PE router."""
+
+import json
+from typing import Any
+
+from orchestrator.config.assignee import Assignee
+from orchestrator.forms import FormPage
+from orchestrator.forms.validators import Label
+from orchestrator.targets import Target
+from orchestrator.types import FormGenerator, State, UUIDstr
+from orchestrator.utils.errors import ProcessFailureError
+from orchestrator.utils.json import json_dumps
+from orchestrator.workflow import StepList, begin, done, inputstep, step, workflow
+from orchestrator.workflows.steps import resync, store_process_subscription, unsync
+from orchestrator.workflows.utils import wrap_modify_initial_input_form
+from pydantic import ConfigDict, model_validator
+
+from gso.products.product_blocks.router import RouterRole
+from gso.products.product_types.router import Router
+from gso.services import lso_client
+from gso.services.kentik_client import KentikClient, NewKentikDevice
+from gso.services.lso_client import lso_interaction
+from gso.services.subscriptions import get_all_active_sites
+from gso.utils.helpers import generate_inventory_for_active_routers
+from gso.utils.shared_enums import Vendor
+from gso.utils.types import TTNumber
+
+
+def initial_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
+    """Promote P router to PE router."""
+    subscription = Router.from_subscription(subscription_id)
+
+    class PromotePToPEForm(FormPage):
+        model_config = ConfigDict(title=f"Promote {subscription.router.router_fqdn} to PE router?")
+
+        tt_number: TTNumber
+
+        @model_validator(mode="before")
+        def router_must_be_nokia_p(cls, data: Any) -> Any:
+            if not (subscription.router.router_role == RouterRole.P and subscription.router.vendor == Vendor.NOKIA):
+                msg = "PE promotion workflow can only be run on Nokia P routers."
+                raise ValueError(msg)
+
+            return data
+
+    user_input = yield PromotePToPEForm
+
+    return user_input.model_dump() | {"subscription": subscription}
+
+
+@step("Evacuate the router by setting isis_overload")
+def set_isis_overload(subscription: dict[str, Any], callback_route: str, tt_number: str, process_id: UUIDstr) -> None:
+    """Evacuate the router by setting isis overload."""
+    extra_vars = {
+        "dry_run": False,
+        "subscription": subscription,
+        "commit_comment": f"GSO_PROCESS_ID: {process_id} - TT_NUMBER: {tt_number} - Set ISIS overload",
+        "verb": "set_isis_overload",
+    }
+
+    lso_client.execute_playbook(
+        playbook_name="promote_p_to_pe.yaml",
+        callback_route=callback_route,
+        inventory=subscription["router"]["router_fqdn"],
+        extra_vars=extra_vars,
+    )
+
+
+@step("[DRY RUN] Deploy PE base config")
+def deploy_pe_base_config_dry(
+    subscription: dict[str, Any], callback_route: str, tt_number: str, process_id: UUIDstr
+) -> None:
+    """Perform a dry run of adding the base config to the router."""
+    extra_vars = {
+        "dry_run": True,
+        "subscription": subscription,
+        "commit_comment": f"GSO_PROCESS_ID: {process_id} - TT_NUMBER: {tt_number} - deploy PE base config",
+        "verb": "deploy_pe_base_config",
+        "pe_router_list": generate_inventory_for_active_routers(RouterRole.PE)["all"]["hosts"],
+        "geant_sites": json.loads(json_dumps(get_all_active_sites())),
+    }
+
+    lso_client.execute_playbook(
+        playbook_name="promote_p_to_pe.yaml",
+        callback_route=callback_route,
+        inventory=subscription["router"]["router_fqdn"],
+        extra_vars=extra_vars,
+    )
+
+
+@step("[FOR REAL] Deploy PE base config")
+def deploy_pe_base_config_real(
+    subscription: dict[str, Any], callback_route: str, tt_number: str, process_id: UUIDstr
+) -> None:
+    """Perform a real run of adding the base config to the router."""
+    extra_vars = {
+        "dry_run": False,
+        "subscription": subscription,
+        "commit_comment": f"GSO_PROCESS_ID: {process_id} - TT_NUMBER: {tt_number} - deploy PE base config",
+        "verb": "deploy_pe_base_config",
+        "pe_router_list": generate_inventory_for_active_routers(RouterRole.PE)["all"]["hosts"],
+        "geant_sites": json.loads(json_dumps(get_all_active_sites())),
+    }
+
+    lso_client.execute_playbook(
+        playbook_name="promote_p_to_pe.yaml",
+        callback_route=callback_route,
+        inventory=subscription["router"]["router_fqdn"],
+        extra_vars=extra_vars,
+    )
+
+
+@inputstep("Prompt EARL insertion", assignee=Assignee.SYSTEM)
+def prompt_insert_in_earl(subscription: dict[str, Any]) -> FormGenerator:
+    """Wait for confirmation from an operator that the router has been inserted in EARL."""
+
+    class EARLPrompt(FormPage):
+        model_config = ConfigDict(title="Update RADIUS clients")
+
+        info_label: Label = f"Please add the router {subscription["router"]["router_fqdn"]} to EARL."
+
+    yield EARLPrompt
+
+    return {}
+
+
+@step("Create Kentik device")
+def create_kentik_device(subscription: Router) -> State:
+    """Create a new device in Kentik."""
+    if not (
+        subscription.router.router_site
+        and subscription.router.router_site.site_name
+        and subscription.router.router_site.site_tier
+        and subscription.router.router_fqdn
+    ):
+        msg = "Router object is missing required properties."
+        raise ProcessFailureError(msg)
+
+    kentik_client = KentikClient()
+    kentik_site = kentik_client.get_site_by_name(subscription.router.router_site.site_name)
+
+    if not kentik_site:
+        msg = "Site could not be found in Kentik."
+        raise ProcessFailureError(msg, details=subscription.router.router_site.site_name)
+
+    new_device = NewKentikDevice(
+        device_name=subscription.router.router_fqdn,
+        device_description=str(subscription.subscription_id),
+        sending_ips=[str(subscription.router.router_lo_ipv4_address)],
+        site_id=kentik_site["id"],
+        device_snmp_ip=str(subscription.router.router_lo_ipv4_address),
+        device_bgp_flowspec=False,
+        device_bgp_neighbor_ip=str(subscription.router.router_lo_ipv4_address),
+        device_bgp_neighbor_ip6=str(subscription.router.router_lo_ipv6_address),
+    )
+    kentik_device = kentik_client.create_device(new_device)
+
+    return {"kentik_device": kentik_device}
+
+
+@step("[DRY RUN] Include new PE into SDP mesh on other Nokia PEs")
+def update_sdp_mesh_dry(subscription: dict[str, Any], callback_route: str, tt_number: str, process_id: UUIDstr) -> None:
+    """Perform a dry run for updating the SDP mesh with the new router."""
+    extra_vars = {
+        "dry_run": True,
+        "subscription": subscription,
+        "commit_comment": f"GSO_PROCESS_ID: {process_id} - TT_NUMBER: {tt_number} - "
+        f"Update the SDP mesh for L2circuits(epipes) config on PE NOKIA routers",
+        "verb": "update_sdp_mesh",
+        "pe_router_list": {
+            subscription["router"]["router_fqdn"]: {
+                "lo4": str(subscription["router"]["router_lo_ipv4_address"]),
+                "lo6": str(subscription["router"]["router_lo_ipv6_address"]),
+            }
+        },
+    }
+
+    lso_client.execute_playbook(
+        playbook_name="update_pe_sdp_mesh.yaml",
+        callback_route=callback_route,
+        inventory=generate_inventory_for_active_routers(router_role=RouterRole.PE, router_vendor=Vendor.NOKIA),
+        extra_vars=extra_vars,
+    )
+
+
+@step("[FOR REAL] Include new PE into SDP mesh on other Nokia PEs")
+def update_sdp_mesh_real(
+    subscription: dict[str, Any], callback_route: str, tt_number: str, process_id: UUIDstr
+) -> None:
+    """Update the SDP mesh for L2 circuits(epipes) config on PE NOKIA routers."""
+    extra_vars = {
+        "dry_run": False,
+        "subscription": subscription,
+        "commit_comment": f"GSO_PROCESS_ID: {process_id} - TT_NUMBER: {tt_number} - "
+        f"Update the SDP mesh for l2circuits(epipes) config on PE NOKIA routers",
+        "verb": "update_sdp_mesh",
+        "pe_router_list": {
+            subscription["router"]["router_fqdn"]: {
+                "lo4": str(subscription["router"]["router_lo_ipv4_address"]),
+                "lo6": str(subscription["router"]["router_lo_ipv6_address"]),
+            }
+        },
+    }
+
+    lso_client.execute_playbook(
+        playbook_name="update_pe_sdp_mesh.yaml",
+        callback_route=callback_route,
+        inventory=generate_inventory_for_active_routers(router_role=RouterRole.PE, router_vendor=Vendor.NOKIA),
+        extra_vars=extra_vars,
+    )
+
+
+@step("[DRY RUN] Remove P from all PEs")
+def remove_p_from_pe_dry(
+    subscription: dict[str, Any], callback_route: str, tt_number: str, process_id: UUIDstr
+) -> None:
+    """Perform a dry run of removing the P router from all the PE routers."""
+    extra_vars = {
+        "dry_run": True,
+        "subscription": subscription,
+        "commit_comment": f"GSO_PROCESS_ID: {process_id} - TT_NUMBER: {tt_number} - "
+        f"Remove P-only router neighbour {subscription["router"]["router_fqdn"]} from PE routers P-only group",
+        "verb": "remove_p_from_pe",
+    }
+
+    lso_client.execute_playbook(
+        playbook_name="update_ibgp_mesh.yaml",
+        callback_route=callback_route,
+        inventory=generate_inventory_for_active_routers(RouterRole.PE),
+        extra_vars=extra_vars,
+    )
+
+
+@step("[FOR REAL] Remove P from all PEs")
+def remove_p_from_pe_real(
+    subscription: dict[str, Any], callback_route: str, tt_number: str, process_id: UUIDstr
+) -> None:
+    """Remove the P router from all the PE routers."""
+    extra_vars = {
+        "dry_run": False,
+        "subscription": subscription,
+        "commit_comment": f"GSO_PROCESS_ID: {process_id} - TT_NUMBER: {tt_number} - "
+        f"Remove P-only router neighbour {subscription["router"]["router_fqdn"]} from PE routers P-only group",
+        "verb": "remove_p_from_pe",
+    }
+
+    lso_client.execute_playbook(
+        playbook_name="update_ibgp_mesh.yaml",
+        callback_route=callback_route,
+        inventory=generate_inventory_for_active_routers(RouterRole.PE),
+        extra_vars=extra_vars,
+    )
+
+
+@step("[DRY RUN] Add PE mesh to PE")
+def add_pe_mesh_to_pe_dry(
+    subscription: dict[str, Any], callback_route: str, tt_number: str, process_id: UUIDstr
+) -> None:
+    """Perform a dry run of adding list of PE routers into iGEANT/iGEANT6 of promoted router."""
+    extra_vars = {
+        "dry_run": True,
+        "subscription": subscription,
+        "commit_comment": f"GSO_PROCESS_ID: {process_id} - TT_NUMBER: {tt_number} - "
+        f"Add list of PE routers into iGEANT/iGEANT6 of promoted router",
+        "verb": "add_pe_mesh_to_pe",
+        "pe_router_list": generate_inventory_for_active_routers(RouterRole.PE)["all"]["hosts"],
+    }
+
+    lso_client.execute_playbook(
+        playbook_name="update_ibgp_mesh.yaml",
+        callback_route=callback_route,
+        inventory=subscription["router"]["router_fqdn"],
+        extra_vars=extra_vars,
+    )
+
+
+@step("[FOR REAL] Add PE mesh to PE")
+def add_pe_mesh_to_pe_real(
+    subscription: dict[str, Any], callback_route: str, tt_number: str, process_id: UUIDstr
+) -> None:
+    """Perform a real run of adding list of PE routers into iGEANT/iGEANT6 of promoted router."""
+    extra_vars = {
+        "dry_run": False,
+        "subscription": subscription,
+        "commit_comment": f"GSO_PROCESS_ID: {process_id} - TT_NUMBER: {tt_number} - "
+        f"Add list of PE routers into iGEANT/iGEANT6 of promoted router",
+        "verb": "add_pe_mesh_to_pe",
+        "pe_router_list": generate_inventory_for_active_routers(RouterRole.PE)["all"]["hosts"],
+    }
+
+    lso_client.execute_playbook(
+        playbook_name="update_ibgp_mesh.yaml",
+        callback_route=callback_route,
+        inventory=subscription["router"]["router_fqdn"],
+        extra_vars=extra_vars,
+    )
+
+
+@step("[DRY RUN] Add PE to PE mesh")
+def add_pe_to_pe_mesh_dry(
+    subscription: dict[str, Any], callback_route: str, tt_number: str, process_id: UUIDstr
+) -> None:
+    """Perform a dry run of adding the promoted router to all PE routers in iGEANT/iGEANT6."""
+    extra_vars = {
+        "dry_run": True,
+        "subscription": subscription,
+        "commit_comment": f"GSO_PROCESS_ID: {process_id} - TT_NUMBER: {tt_number} - "
+        f"Add promoted router to all PE routers in iGEANT/iGEANT6.",
+        "verb": "add_pe_to_pe_mesh",
+    }
+
+    lso_client.execute_playbook(
+        playbook_name="update_ibgp_mesh.yaml",
+        callback_route=callback_route,
+        inventory=generate_inventory_for_active_routers(RouterRole.PE),
+        extra_vars=extra_vars,
+    )
+
+
+@step("[FOR REAL] Add PE to PE mesh")
+def add_pe_to_pe_mesh_real(
+    subscription: dict[str, Any], callback_route: str, tt_number: str, process_id: UUIDstr
+) -> None:
+    """Perform a real run of adding the promoted router to all PE routers in iGEANT/iGEANT6."""
+    extra_vars = {
+        "dry_run": False,
+        "subscription": subscription,
+        "commit_comment": f"GSO_PROCESS_ID: {process_id} - TT_NUMBER: {tt_number} - "
+        f"Add promoted router to all PE routers in iGEANT/iGEANT6.",
+        "verb": "add_pe_to_pe_mesh",
+    }
+
+    lso_client.execute_playbook(
+        playbook_name="update_ibgp_mesh.yaml",
+        callback_route=callback_route,
+        inventory=generate_inventory_for_active_routers(RouterRole.PE),
+        extra_vars=extra_vars,
+    )
+
+
+@step("Check iBGP session")
+def check_pe_ibgp(subscription: dict[str, Any], callback_route: str) -> None:
+    """Check the iBGP session."""
+    extra_vars = {
+        "dry_run": False,
+        "subscription": subscription,
+        "verb": "check_pe_ibgp",
+    }
+
+    lso_client.execute_playbook(
+        playbook_name="check_ibgp.yaml",
+        callback_route=callback_route,
+        inventory=subscription["router"]["router_fqdn"],
+        extra_vars=extra_vars,
+    )
+
+
+@step("[DRY RUN] Deploy routing instances")
+def deploy_routing_instances_dry(
+    subscription: dict[str, Any], callback_route: str, tt_number: str, process_id: UUIDstr
+) -> None:
+    """Perform a dry run of deploying routing instances."""
+    extra_vars = {
+        "dry_run": True,
+        "subscription": subscription,
+        "commit_comment": f"GSO_PROCESS_ID: {process_id} - TT_NUMBER: {tt_number} - Deploy PE base config",
+        "verb": "deploy_routing_instances",
+    }
+
+    lso_client.execute_playbook(
+        playbook_name="promote_p_to_pe.yaml",
+        callback_route=callback_route,
+        inventory=subscription["router"]["router_fqdn"],
+        extra_vars=extra_vars,
+    )
+
+
+@step("[FOR REAL] Deploy routing instances")
+def deploy_routing_instances_real(
+    subscription: dict[str, Any], callback_route: str, tt_number: str, process_id: UUIDstr
+) -> None:
+    """Perform a real run of deploying routing instances."""
+    extra_vars = {
+        "dry_run": False,
+        "subscription": subscription,
+        "commit_comment": f"GSO_PROCESS_ID: {process_id} - TT_NUMBER: {tt_number} - Deploy PE base config",
+        "verb": "deploy_routing_instances",
+    }
+
+    lso_client.execute_playbook(
+        playbook_name="promote_p_to_pe.yaml",
+        callback_route=callback_route,
+        inventory=subscription["router"]["router_fqdn"],
+        extra_vars=extra_vars,
+    )
+
+
+@step("Check L3 services")
+def check_l3_services(subscription: dict[str, Any], callback_route: str) -> None:
+    """Check L3 services."""
+    extra_vars = {
+        "dry_run": False,
+        "subscription": subscription,
+        "verb": "check_base_ris",
+    }
+
+    lso_client.execute_playbook(
+        playbook_name="check_l3_services.yaml",
+        callback_route=callback_route,
+        inventory=subscription["router"]["router_fqdn"],
+        extra_vars=extra_vars,
+    )
+
+
+@step("Remove ISIS overload")
+def remove_isis_overload(
+    subscription: dict[str, Any], callback_route: str, tt_number: str, process_id: UUIDstr
+) -> None:
+    """Remove ISIS overload."""
+    extra_vars = {
+        "dry_run": False,
+        "subscription": subscription,
+        "commit_comment": f"GSO_PROCESS_ID: {process_id} - TT_NUMBER: {tt_number} - Remove ISIS overload",
+        "verb": "remove_isis_overload",
+    }
+
+    lso_client.execute_playbook(
+        playbook_name="promote_p_to_pe.yaml",
+        callback_route=callback_route,
+        inventory=subscription["router"]["router_fqdn"],
+        extra_vars=extra_vars,
+    )
+
+
+@step("Set router role to PE (Update subscription model)")
+def update_subscription_model(subscription: Router) -> State:
+    """Update the subscription model to set router role to PE."""
+    subscription.router.router_role = RouterRole.PE
+
+    return {"subscription": subscription}
+
+
+@step("[DRY RUN] Add all P to PE")
+def add_all_p_to_pe_dry(subscription: dict[str, Any], callback_route: str, tt_number: str, process_id: UUIDstr) -> None:
+    """Perform a dry run of adding all P routers to the PE router."""
+    extra_vars = {
+        "dry_run": True,
+        "subscription": subscription,
+        "commit_comment": f"GSO_PROCESS_ID: {process_id} - TT_NUMBER: {tt_number} - Add all P-routers to this new PE",
+        "verb": "add_all_p_to_pe",
+        "p_router_list": generate_inventory_for_active_routers(RouterRole.P)["all"]["hosts"],
+    }
+
+    lso_client.execute_playbook(
+        playbook_name="update_ibgp_mesh.yaml",
+        callback_route=callback_route,
+        inventory=subscription["router"]["router_fqdn"],
+        extra_vars=extra_vars,
+    )
+
+
+@step("[FOR REAL] Add all P to PE")
+def add_all_p_to_pe_real(
+    subscription: dict[str, Any], callback_route: str, tt_number: str, process_id: UUIDstr
+) -> None:
+    """Perform a real run of adding all P routers to the PE router."""
+    extra_vars = {
+        "dry_run": False,
+        "subscription": subscription,
+        "commit_comment": f"GSO_PROCESS_ID: {process_id} - TT_NUMBER: {tt_number} - Add all P-routers to this new PE",
+        "verb": "add_all_p_to_pe",
+        "p_router_list": generate_inventory_for_active_routers(RouterRole.P)["all"]["hosts"],
+    }
+
+    lso_client.execute_playbook(
+        playbook_name="update_ibgp_mesh.yaml",
+        callback_route=callback_route,
+        inventory=subscription["router"]["router_fqdn"],
+        extra_vars=extra_vars,
+    )
+
+
+@step("[DRY RUN] Add PE to all P")
+def add_pe_to_all_p_dry(subscription: dict[str, Any], callback_route: str, tt_number: str, process_id: UUIDstr) -> None:
+    """Perform a dry run of adding promoted router to all PE routers in iGEANT/iGEANT6."""
+    extra_vars = {
+        "dry_run": True,
+        "subscription": subscription,
+        "commit_comment": f"GSO_PROCESS_ID: {process_id} - TT_NUMBER: {tt_number} - "
+        f"Add promoted router to all PE routers in iGEANT/iGEANT6",
+        "verb": "add_pe_to_all_p",
+    }
+
+    lso_client.execute_playbook(
+        playbook_name="update_ibgp_mesh.yaml",
+        callback_route=callback_route,
+        inventory=generate_inventory_for_active_routers(RouterRole.P),
+        extra_vars=extra_vars,
+    )
+
+
+@step("[FOR REAL] Add PE to all P")
+def add_pe_to_all_p_real(
+    subscription: dict[str, Any], callback_route: str, tt_number: str, process_id: UUIDstr
+) -> None:
+    """Perform a real run of adding promoted router to all PE routers in iGEANT/iGEANT6."""
+    extra_vars = {
+        "dry_run": False,
+        "subscription": subscription,
+        "commit_comment": f"GSO_PROCESS_ID: {process_id} - TT_NUMBER: {tt_number} - "
+        f"Add promoted router to all PE routers in iGEANT/iGEANT6",
+        "verb": "add_pe_to_all_p",
+    }
+
+    lso_client.execute_playbook(
+        playbook_name="update_ibgp_mesh.yaml",
+        callback_route=callback_route,
+        inventory=generate_inventory_for_active_routers(RouterRole.P),
+        extra_vars=extra_vars,
+    )
+
+
+@step("[DRY RUN] Delete default routes")
+def delete_default_routes_dry(
+    subscription: dict[str, Any], callback_route: str, tt_number: str, process_id: UUIDstr
+) -> None:
+    """Perform a dry run of deleting the default routes."""
+    extra_vars = {
+        "dry_run": True,
+        "subscription": subscription,
+        "commit_comment": f"GSO_PROCESS_ID: {process_id} - TT_NUMBER: {tt_number} - "
+        f"Delete static default routes (part of P base-config)",
+        "verb": "delete_default_routes",
+    }
+
+    lso_client.execute_playbook(
+        playbook_name="promote_p_to_pe.yaml",
+        callback_route=callback_route,
+        inventory=subscription["router"]["router_fqdn"],
+        extra_vars=extra_vars,
+    )
+
+
+@step("[FOR REAL] Delete default routes")
+def delete_default_routes_real(
+    subscription: dict[str, Any], callback_route: str, tt_number: str, process_id: UUIDstr
+) -> None:
+    """Perform a real run of deleting the default routes."""
+    extra_vars = {
+        "dry_run": False,
+        "subscription": subscription,
+        "commit_comment": f"GSO_PROCESS_ID: {process_id} - TT_NUMBER: {tt_number} - "
+        f"Delete static default routes (part of P base-config)",
+        "verb": "delete_default_routes",
+    }
+
+    lso_client.execute_playbook(
+        playbook_name="promote_p_to_pe.yaml",
+        callback_route=callback_route,
+        inventory=subscription["router"]["router_fqdn"],
+        extra_vars=extra_vars,
+    )
+
+
+@workflow(
+    "Promote P router to PE router",
+    initial_input_form=wrap_modify_initial_input_form(initial_input_form_generator),
+    target=Target.MODIFY,
+)
+def promote_p_to_pe() -> StepList:
+    """Promote a P router to a PE router."""
+    return (
+        begin
+        >> store_process_subscription(Target.MODIFY)
+        >> unsync
+        >> lso_interaction(set_isis_overload)
+        >> lso_interaction(deploy_pe_base_config_dry)
+        >> lso_interaction(deploy_pe_base_config_real)
+        >> prompt_insert_in_earl
+        >> create_kentik_device
+        >> lso_interaction(remove_p_from_pe_dry)
+        >> lso_interaction(remove_p_from_pe_real)
+        >> lso_interaction(add_pe_mesh_to_pe_dry)
+        >> lso_interaction(add_pe_mesh_to_pe_real)
+        >> lso_interaction(add_pe_to_pe_mesh_dry)
+        >> lso_interaction(add_pe_to_pe_mesh_real)
+        >> lso_interaction(check_pe_ibgp)
+        >> lso_interaction(deploy_routing_instances_dry)
+        >> lso_interaction(deploy_routing_instances_real)
+        >> lso_interaction(check_l3_services)
+        >> lso_interaction(update_sdp_mesh_dry)
+        >> lso_interaction(update_sdp_mesh_real)
+        >> lso_interaction(add_all_p_to_pe_dry)
+        >> lso_interaction(add_all_p_to_pe_real)
+        >> lso_interaction(add_pe_to_all_p_dry)
+        >> lso_interaction(add_pe_to_all_p_real)
+        >> lso_interaction(delete_default_routes_dry)
+        >> lso_interaction(delete_default_routes_real)
+        >> lso_interaction(remove_isis_overload)
+        >> update_subscription_model
+        >> resync
+        >> done
+    )
diff --git a/gso/workflows/router/redeploy_base_config.py b/gso/workflows/router/redeploy_base_config.py
index c1a24c8340dc6c129fa99c1bb93be528bd85bd18..b30d02f16b6bb197a20257f9016eabb6dad0d8fa 100644
--- a/gso/workflows/router/redeploy_base_config.py
+++ b/gso/workflows/router/redeploy_base_config.py
@@ -10,6 +10,7 @@ from orchestrator.workflows.utils import wrap_modify_initial_input_form
 
 from gso.products.product_types.router import Router
 from gso.services.lso_client import lso_interaction
+from gso.utils.types import TTNumber
 from gso.utils.workflow_steps import deploy_base_config_dry, deploy_base_config_real
 
 
@@ -18,7 +19,7 @@ def _initial_input_form(subscription_id: UUIDstr) -> FormGenerator:
 
     class RedeployBaseConfigForm(FormPage):
         info_label: Label = f"Redeploy base config on {router.router.router_fqdn}?"
-        tt_number: str
+        tt_number: TTNumber
 
     user_input = yield RedeployBaseConfigForm
 
diff --git a/gso/workflows/router/terminate_router.py b/gso/workflows/router/terminate_router.py
index 688e3ecc21543ddc5ca678296b763fe7c6194bb5..5b0c6ef546c9cbb08d93c5cd277f8e31e71f57c1 100644
--- a/gso/workflows/router/terminate_router.py
+++ b/gso/workflows/router/terminate_router.py
@@ -3,11 +3,13 @@
 import ipaddress
 import json
 import logging
+from typing import Any
 
 from orchestrator.forms import FormPage
 from orchestrator.forms.validators import Label
 from orchestrator.targets import Target
 from orchestrator.types import FormGenerator, SubscriptionLifecycle, UUIDstr
+from orchestrator.utils.errors import ProcessFailureError
 from orchestrator.utils.json import json_dumps
 from orchestrator.workflow import StepList, begin, conditional, done, step, workflow
 from orchestrator.workflows.steps import (
@@ -21,11 +23,14 @@ from orchestrator.workflows.utils import wrap_modify_initial_input_form
 from gso.products.product_blocks.router import RouterRole
 from gso.products.product_types.router import Router
 from gso.services import infoblox, lso_client
+from gso.services.kentik_client import KentikClient
 from gso.services.librenms_client import LibreNMSClient
 from gso.services.lso_client import execute_playbook, lso_interaction
 from gso.services.netbox_client import NetboxClient
+from gso.settings import load_oss_params
 from gso.utils.helpers import generate_inventory_for_active_routers
 from gso.utils.shared_enums import Vendor
+from gso.utils.types import TTNumber
 
 logger = logging.getLogger(__name__)
 
@@ -42,7 +47,7 @@ def initial_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
             )
             info_label_3: Label = "ONLY EXECUTE THIS WORKFLOW WHEN YOU ARE ABSOLUTELY SURE WHAT YOU ARE DOING."
 
-        tt_number: str
+        tt_number: TTNumber
         termination_label: Label = "Please confirm whether configuration should get removed from the router."
         remove_configuration: bool = True
         update_ibgp_mesh_label: Label = "Please confirm whether the iBGP mesh should get updated."
@@ -231,10 +236,28 @@ def remove_pe_from_all_p_real(subscription: Router, callback_route: str, tt_numb
 
 
 @step("Remove Device from Librenms")
-def remove_device_from_librenms(subscription: Router) -> dict[str, Router]:
+def remove_device_from_librenms(subscription: Router) -> None:
     """Remove the device from LibreNMS."""
     LibreNMSClient().remove_device(subscription.router.router_fqdn)
-    return {"subscription": subscription}
+
+
+@step("Apply the archiving license in Kentik")
+def kentik_apply_archive_license(subscription: Router) -> dict[str, dict[str, Any]]:
+    """Apply the archiving license to a PE router in Kentik.
+
+    This includes setting the flow rate to one flow per second.
+    """
+    kentik_client = KentikClient()
+    kentik_archive_plan_id = kentik_client.get_plan_by_name(load_oss_params().KENTIK.archive_license_key)["id"]
+    kentik_device = kentik_client.get_device_by_name(subscription.router.router_fqdn)
+    if "id" not in kentik_device:
+        msg = "Failed to find Kentik device by name"
+        raise ProcessFailureError(msg, details=kentik_device)
+
+    updated_device = {"device": {"plan_id": kentik_archive_plan_id, "device_sample_rate": 1}}
+    kentik_device = kentik_client.update_device(kentik_device["id"], updated_device)
+
+    return {"kentik_device": kentik_device}
 
 
 @workflow(
@@ -271,6 +294,7 @@ def terminate_router() -> StepList:
         >> run_config_steps(lso_interaction(remove_config_from_router_real))
         >> router_is_nokia(remove_device_from_netbox)
         >> remove_device_from_librenms
+        >> router_is_pe(kentik_apply_archive_license)
         >> set_status(SubscriptionLifecycle.TERMINATED)
         >> resync
         >> done
diff --git a/gso/workflows/router/update_ibgp_mesh.py b/gso/workflows/router/update_ibgp_mesh.py
index 58d207029c9431d7252ddf2b5c87e171282944ba..8fbb2813c94cff8443647a033c146e93f34b7ca4 100644
--- a/gso/workflows/router/update_ibgp_mesh.py
+++ b/gso/workflows/router/update_ibgp_mesh.py
@@ -18,6 +18,7 @@ from gso.services import librenms_client, lso_client
 from gso.services.lso_client import lso_interaction
 from gso.services.subscriptions import get_trunks_that_terminate_on_router
 from gso.utils.helpers import SNMPVersion, generate_inventory_for_active_routers
+from gso.utils.types import TTNumber
 
 
 def initial_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
@@ -31,7 +32,7 @@ def initial_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
     class AddBGPSessionForm(FormPage):
         model_config = ConfigDict(title=f"Add {subscription.router.router_fqdn} to the iBGP mesh?")
 
-        tt_number: str
+        tt_number: TTNumber
 
         @model_validator(mode="before")
         def router_has_a_trunk(cls, data: Any) -> Any:
diff --git a/gso/workflows/router/validate_router.py b/gso/workflows/router/validate_router.py
index 87d4870061ebf51e0c64900b135213f22602667b..aa22b773d6004862aaab0b8c7ec2bf08b89f24be 100644
--- a/gso/workflows/router/validate_router.py
+++ b/gso/workflows/router/validate_router.py
@@ -14,6 +14,7 @@ from pydantic_forms.types import State, UUIDstr
 from gso.products.product_blocks.router import RouterRole
 from gso.products.product_types.router import Router
 from gso.services import infoblox, lso_client
+from gso.services.kentik_client import KentikClient
 from gso.services.librenms_client import LibreNMSClient
 from gso.services.lso_client import anonymous_lso_interaction, execute_playbook
 from gso.services.netbox_client import NetboxClient
@@ -84,6 +85,20 @@ def check_librenms_entry_exists(subscription: Router) -> None:
         raise ProcessFailureError(message="LibreNMS configuration error", details=errors)
 
 
+@step("Verify Kentik entry for PE router")
+def check_kentik_entry_exists(subscription: Router) -> None:
+    """Validate the Kentik entry for a PE Router.
+
+    Raises an HTTP error 404 when the device is not present in Kentik.
+    """
+    client = KentikClient()
+    device = client.get_device_by_name(subscription.router.router_fqdn)
+    if not device:
+        raise ProcessFailureError(
+            message="Device not found in Kentik", details={"device": subscription.router.router_fqdn}
+        )
+
+
 @step("Check base config for drift")
 def verify_base_config(subscription: Router, callback_route: str) -> None:
     """Workflow step for running a playbook that checks whether base config has drifted."""
@@ -113,6 +128,7 @@ def validate_router() -> StepList:
     * Validate configuration of the iBGP mesh
     """
     is_juniper_router = conditional(lambda state: state["subscription"]["router"]["vendor"] == Vendor.JUNIPER)
+    is_pe_router = conditional(lambda state: state["subscription"]["router"]["router_role"] == RouterRole.PE)
 
     return (
         begin
@@ -123,6 +139,7 @@ def validate_router() -> StepList:
         >> verify_ipam_loopback
         >> check_netbox_entry_exists
         >> check_librenms_entry_exists
+        >> is_pe_router(check_kentik_entry_exists)
         >> anonymous_lso_interaction(verify_base_config)
         >> anonymous_lso_interaction(verify_p_ibgp)
         >> resync
diff --git a/gso/workflows/tasks/clean_old_tasks.py b/gso/workflows/tasks/clean_old_tasks.py
new file mode 100644
index 0000000000000000000000000000000000000000..13563459262da03f8089d54523da50503cfdf035
--- /dev/null
+++ b/gso/workflows/tasks/clean_old_tasks.py
@@ -0,0 +1,30 @@
+"""A cleanup task for removing past runs."""
+
+from orchestrator.db import db
+from orchestrator.targets import Target
+from orchestrator.workflow import StepList, begin, done, step, workflow
+
+from gso.services.processes import get_all_cleanup_tasks, get_created_and_completed_processes_by_id
+
+
+@step("Remove old cleanup tasks")
+def remove_old_cleanup_tasks() -> None:
+    """Find and remove runs of old cleanup tasks."""
+    cleanup_tasks = get_all_cleanup_tasks()
+
+    for cleanup_task in cleanup_tasks:
+        tasks = get_created_and_completed_processes_by_id(cleanup_task.workflow_id)
+
+        for task in tasks:
+            db.session.delete(task)
+
+
+@workflow("Remove old cleanup tasks", target=Target.SYSTEM)
+def task_clean_old_tasks() -> StepList:
+    """Remove all runs of old cleanup tasks.
+
+    This will look for all past executions of the ``orchestrator-core`` built-in cleanup task, and this current cleanup
+    task. Once they have run to completion, or are stuck in a "created" state, they serve no purpose in the database and
+    can therefore be removed.
+    """
+    return begin >> remove_old_cleanup_tasks >> done
diff --git a/gso/workflows/tasks/send_email_notifications.py b/gso/workflows/tasks/send_email_notifications.py
index f346cf1f5d14db2f1b00e9307ae22b83b609066d..42b94bd421f4d2e45af354efa00205f975530c07 100644
--- a/gso/workflows/tasks/send_email_notifications.py
+++ b/gso/workflows/tasks/send_email_notifications.py
@@ -5,7 +5,8 @@ from orchestrator.types import State
 from orchestrator.workflow import StepList, conditional, done, init, step, workflow
 
 from gso.services.mailer import send_mail
-from gso.services.subscriptions import get_failed_tasks, get_subscription_by_process_id
+from gso.services.processes import get_failed_tasks
+from gso.services.subscriptions import get_subscription_by_process_id
 from gso.settings import load_oss_params
 
 
diff --git a/requirements.txt b/requirements.txt
index 8878a45c8d77880d89dfacc73fa89a5cc88cdf5f..b751e9c033cdc1edb98813563e8174876037a363 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -7,6 +7,7 @@ celery-redbeat==2.2.0
 celery==5.3.6
 azure-identity==1.16.0
 msgraph-sdk==1.2.0
+ping3==4.0.8
 
 # Test and linting dependencies
 celery-stubs==0.1.3
diff --git a/setup.py b/setup.py
index c61a2ba1d1da316081a80efe62328a2888138868..3eb6376d0268c2b5f6291676a11ac698f24314d1 100644
--- a/setup.py
+++ b/setup.py
@@ -4,7 +4,7 @@ from setuptools import find_packages, setup
 
 setup(
     name="geant-service-orchestrator",
-    version="2.11",
+    version="2.12",
     author="GÉANT Orchestration and Automation Team",
     author_email="goat@geant.org",
     description="GÉANT Service Orchestrator",
@@ -20,6 +20,7 @@ setup(
         "celery==5.3.6",
         "azure-identity==1.16.0",
         "msgraph-sdk==1.2.0",
+        "ping3==4.0.8",
     ],
     include_package_data=True,
 )
diff --git a/test-docs.sh b/test-docs.sh
index cdc50473e43e7e6c7356e89a5c994bac86f141c2..2adbe6b0680a8ee99fcb245ae1fec3713f191f4f 100755
--- a/test-docs.sh
+++ b/test-docs.sh
@@ -1,8 +1,4 @@
 #!/bin/bash
 
-if [ ! -d ./docs/vale/styles/proselint ] || [ ! -d ./docs/vale/styles/Microsoft ]; then
-  docker run -it --rm -v "$(pwd)"/docs:/docs jdkato/vale:latest --config="/docs/vale/.vale.ini" sync
-fi
-
-docker run -it --rm -v "$(pwd)":/gso jdkato/vale:latest --glob='!*/migrations/*' \
---config="/gso/docs/vale/.vale.ini" /gso/docs/source /gso/gso
+vale --config=docs/vale/.vale.ini sync
+vale --glob='!*/migrations/*' --config=docs/vale/.vale.ini docs/source gso
diff --git a/test/services/conftest.py b/test/services/conftest.py
index 9aee570a13f12985576708e9bac7740e5670b103..9557b1c5b091e51150ab78cf4b58c62f8018df8e 100644
--- a/test/services/conftest.py
+++ b/test/services/conftest.py
@@ -1,3 +1,8 @@
+from typing import Any
+
+from gso.services.kentik_client import NewKentikDevice
+
+
 class MockedNetboxClient:
     class BaseMockObject:
         def __init__(self, **kwargs):
@@ -57,3 +62,58 @@ class MockedSharePointClient:
     @staticmethod
     def add_list_item(list_name: str, fields: dict[str, str]) -> str:
         return f"http://{list_name}/{fields.popitem()}"
+
+
+class MockedKentikClient:
+    class BaseMockObject:
+        def __init__(self, **kwargs):
+            for key, value in kwargs.items():
+                setattr(self, key, value)
+
+    @staticmethod
+    def get_devices() -> list[dict[str, Any]]:
+        return [{"id": 0, "device_name": "device-1.internal"}, {"id": 1, "device_name": "device-2.internal"}]
+
+    @staticmethod
+    def get_device(device_id: str) -> dict[str, Any]:
+        return {"id": device_id, "device_name": "device-1.internal"}
+
+    @staticmethod
+    def get_device_by_name(fqdn: str) -> dict[str, Any]:
+        return {"id": 0, "device_name": fqdn}
+
+    @staticmethod
+    def get_sites() -> list[dict[str, Any]]:
+        return [{"id": 0, "site_name": "AMS"}, {"id": 1, "site_name": "BRU"}, {"id": 2, "site_name": "LUX"}]
+
+    @staticmethod
+    def get_site_by_name(site_name: str) -> dict[str, Any]:
+        return {"id": 0, "site_name": site_name}
+
+    @staticmethod
+    def get_plans() -> list[dict[str, Any]]:
+        return [{"id": 0, "plan_name": "kentik-plan-1"}, {"id": 1, "plan_name": "kentik-plan-2"}]
+
+    @staticmethod
+    def get_plan(plan_id: int) -> dict[str, Any]:
+        return {"id": plan_id, "plan_name": "kentik-mocked-plan"}
+
+    @staticmethod
+    def get_plan_by_name(plan_name: str) -> dict[str, Any]:
+        return {"id": 0, "plan_name": plan_name}
+
+    @staticmethod
+    def create_device(device: NewKentikDevice) -> dict[str, Any]:
+        return device.model_dump()
+
+    @staticmethod
+    def update_device(device_id: int, updated_device: dict[str, Any]) -> dict[str, Any]:
+        return {"id": device_id, **updated_device}
+
+    @staticmethod
+    def remove_device(device_id: int, *, archive: bool) -> None:
+        pass
+
+    @staticmethod
+    def remove_device_by_fqdn(fqdn: str, *, archive: bool) -> None:
+        pass
diff --git a/test/workflows/iptrunk/test_create_iptrunk.py b/test/workflows/iptrunk/test_create_iptrunk.py
index de15a7124e0311e4a3127ccc10477fd39a4a02b7..117444b7a33b5725df7558c2a3f7cd67b1cd00f1 100644
--- a/test/workflows/iptrunk/test_create_iptrunk.py
+++ b/test/workflows/iptrunk/test_create_iptrunk.py
@@ -2,6 +2,7 @@ from os import PathLike
 from unittest.mock import patch
 
 import pytest
+from infoblox_client.objects import HostRecord
 
 from gso.products import Iptrunk, ProductName
 from gso.products.product_blocks.iptrunk import IptrunkType, PhysicalPortCapacity
@@ -11,6 +12,7 @@ from test import USER_CONFIRM_EMPTY_FORM
 from test.services.conftest import MockedNetboxClient, MockedSharePointClient
 from test.workflows import (
     assert_complete,
+    assert_failed,
     assert_lso_interaction_failure,
     assert_lso_interaction_success,
     assert_suspended,
@@ -102,9 +104,13 @@ def input_form_wizard_data(request, juniper_router_subscription_factory, nokia_r
 @patch("gso.workflows.iptrunk.create_iptrunk.infoblox.allocate_v6_network")
 @patch("gso.workflows.iptrunk.create_iptrunk.infoblox.allocate_v4_network")
 @patch("gso.workflows.iptrunk.create_iptrunk.infoblox.create_host_by_ip")
+@patch("gso.workflows.iptrunk.create_iptrunk.infoblox.find_host_by_ip")
+@patch("gso.workflows.iptrunk.create_iptrunk.ping")
 @patch("gso.workflows.iptrunk.create_iptrunk.SharePointClient")
 def test_successful_iptrunk_creation_with_standard_lso_result(
     mock_sharepoint_client,
+    mock_ping,
+    mock_find_host_by_ip,
     mock_create_host,
     mock_allocate_v4_network,
     mock_allocate_v6_network,
@@ -117,8 +123,10 @@ def test_successful_iptrunk_creation_with_standard_lso_result(
     test_client,
 ):
     mock_create_host.return_value = None
-    mock_allocate_v4_network.return_value = faker.ipv4_network(max_subnet=31)
-    mock_allocate_v6_network.return_value = faker.ipv6_network(max_subnet=126)
+    mock_allocate_v4_network.return_value = faker.ipv4_network(min_subnet=31, max_subnet=31)
+    mock_allocate_v6_network.return_value = faker.ipv6_network(min_subnet=126, max_subnet=126)
+    mock_find_host_by_ip.return_value = None
+    mock_ping.return_value = False
     mock_sharepoint_client.return_value = MockedSharePointClient
 
     product_id = get_product_id_by_name(ProductName.IP_TRUNK)
@@ -147,13 +155,20 @@ def test_successful_iptrunk_creation_with_standard_lso_result(
     )
 
     assert mock_execute_playbook.call_count == 6
+    #  We search for 6 hosts in total, 2 in a /31 and 4 in a /126
+    assert mock_find_host_by_ip.call_count == 6
+    assert mock_ping.call_count == 6
 
 
 @pytest.mark.workflow()
 @patch("gso.workflows.iptrunk.create_iptrunk.execute_playbook")
 @patch("gso.workflows.iptrunk.create_iptrunk.infoblox.allocate_v6_network")
 @patch("gso.workflows.iptrunk.create_iptrunk.infoblox.allocate_v4_network")
+@patch("gso.workflows.iptrunk.create_iptrunk.infoblox.find_host_by_ip")
+@patch("gso.workflows.iptrunk.create_iptrunk.ping")
 def test_iptrunk_creation_fails_when_lso_return_code_is_one(
+    mock_ping,
+    mock_find_host_by_ip,
     mock_allocate_v4_network,
     mock_allocate_v6_network,
     mock_execute_playbook,
@@ -163,8 +178,10 @@ def test_iptrunk_creation_fails_when_lso_return_code_is_one(
     _netbox_client_mock,  # noqa: PT019
     data_config_filename: PathLike,
 ):
-    mock_allocate_v4_network.return_value = faker.ipv4_network(max_subnet=31)
-    mock_allocate_v6_network.return_value = faker.ipv6_network(max_subnet=126)
+    mock_allocate_v4_network.return_value = faker.ipv4_network(min_subnet=31, max_subnet=31)
+    mock_allocate_v6_network.return_value = faker.ipv6_network(min_subnet=126, max_subnet=126)
+    mock_find_host_by_ip.return_value = None
+    mock_ping.return_value = False
     product_id = get_product_id_by_name(ProductName.IP_TRUNK)
 
     initial_site_data = [{"product": product_id}, *input_form_wizard_data]
@@ -175,6 +192,8 @@ def test_iptrunk_creation_fails_when_lso_return_code_is_one(
     assert_lso_interaction_failure(result, process_stat, step_log)
 
     assert mock_execute_playbook.call_count == 2
+    assert mock_find_host_by_ip.call_count == 6
+    assert mock_ping.call_count == 6
 
 
 @pytest.mark.parametrize("input_form_wizard_data", [Vendor.JUNIPER], indirect=True)
@@ -183,9 +202,13 @@ def test_iptrunk_creation_fails_when_lso_return_code_is_one(
 @patch("gso.workflows.iptrunk.create_iptrunk.infoblox.allocate_v6_network")
 @patch("gso.workflows.iptrunk.create_iptrunk.infoblox.allocate_v4_network")
 @patch("gso.workflows.iptrunk.create_iptrunk.infoblox.create_host_by_ip")
+@patch("gso.workflows.iptrunk.create_iptrunk.infoblox.find_host_by_ip")
+@patch("gso.workflows.iptrunk.create_iptrunk.ping")
 @patch("gso.workflows.iptrunk.create_iptrunk.SharePointClient")
 def test_successful_iptrunk_creation_with_juniper_interface_names(
     mock_sharepoint_client,
+    mock_ping,
+    mock_find_host_by_ip,
     mock_create_host,
     mock_allocate_v4_network,
     mock_allocate_v6_network,
@@ -198,8 +221,11 @@ def test_successful_iptrunk_creation_with_juniper_interface_names(
     test_client,
 ):
     mock_create_host.return_value = None
-    mock_allocate_v4_network.return_value = faker.ipv4_network(max_subnet=31)
-    mock_allocate_v6_network.return_value = faker.ipv6_network(max_subnet=126)
+    mock_allocate_v4_network.return_value = faker.ipv4_network(min_subnet=31, max_subnet=31)
+    mock_allocate_v6_network.return_value = faker.ipv6_network(min_subnet=126, max_subnet=126)
+    mock_find_host_by_ip.return_value = None
+    mock_ping.return_value = False
+
     mock_sharepoint_client.return_value = MockedSharePointClient
     product_id = get_product_id_by_name(ProductName.IP_TRUNK)
     initial_site_data = [{"product": product_id}, *input_form_wizard_data]
@@ -213,3 +239,71 @@ def test_successful_iptrunk_creation_with_juniper_interface_names(
 
     assert_complete(result)
     assert mock_execute_playbook.call_count == 6
+    assert mock_find_host_by_ip.call_count == 6
+    assert mock_ping.call_count == 6
+
+
+@pytest.mark.workflow()
+@patch("gso.workflows.iptrunk.create_iptrunk.infoblox.allocate_v6_network")
+@patch("gso.workflows.iptrunk.create_iptrunk.infoblox.allocate_v4_network")
+@patch("gso.workflows.iptrunk.create_iptrunk.infoblox.find_host_by_ip")
+def test_iptrunk_creation_with_taken_dns_record(
+    mock_find_host_by_ip,
+    mock_allocate_v4_network,
+    mock_allocate_v6_network,
+    input_form_wizard_data,
+    faker,
+    _netbox_client_mock,  # noqa: PT019
+):
+    mock_allocate_v4_network.return_value = faker.ipv4_network(min_subnet=31, max_subnet=31)
+    mock_allocate_v6_network.return_value = faker.ipv6_network(min_subnet=126, max_subnet=126)
+    mock_find_host_by_ip.return_value = HostRecord(connector=None, hostname="fake.internal")
+
+    product_id = get_product_id_by_name(ProductName.IP_TRUNK)
+    initial_site_data = [{"product": product_id}, *input_form_wizard_data]
+    result, _, _ = run_workflow("create_iptrunk", initial_site_data)
+
+    assert_failed(result)
+
+    state = extract_state(result)
+    assert (
+        state["error"] == "One or more hosts in the assigned IPv4 network are already registered, please investigate."
+    )
+
+    #  We search for 2 hosts in a /31 and then fail the workflow
+    assert mock_find_host_by_ip.call_count == 2
+
+
+@pytest.mark.workflow()
+@patch("gso.workflows.iptrunk.create_iptrunk.infoblox.allocate_v6_network")
+@patch("gso.workflows.iptrunk.create_iptrunk.infoblox.allocate_v4_network")
+@patch("gso.workflows.iptrunk.create_iptrunk.infoblox.find_host_by_ip")
+@patch("gso.workflows.iptrunk.create_iptrunk.ping")
+def test_iptrunk_creation_with_taken_ip_address(
+    mock_ping,
+    mock_find_host_by_ip,
+    mock_allocate_v4_network,
+    mock_allocate_v6_network,
+    input_form_wizard_data,
+    faker,
+    _netbox_client_mock,  # noqa: PT019
+):
+    mock_allocate_v4_network.return_value = faker.ipv4_network(min_subnet=31, max_subnet=31)
+    mock_allocate_v6_network.return_value = faker.ipv6_network(min_subnet=126, max_subnet=126)
+    mock_find_host_by_ip.return_value = None
+    mock_ping.return_value = True
+
+    product_id = get_product_id_by_name(ProductName.IP_TRUNK)
+    initial_site_data = [{"product": product_id}, *input_form_wizard_data]
+    result, _, _ = run_workflow("create_iptrunk", initial_site_data)
+
+    assert_failed(result)
+
+    state = extract_state(result)
+    assert (
+        state["error"] == "One or more hosts in the assigned IPv4 network are responding to ping, please investigate."
+    )
+
+    assert mock_find_host_by_ip.call_count == 6
+    #  We ping 2 hosts in a /31 and then fail the workflow
+    assert mock_ping.call_count == 2
diff --git a/test/workflows/router/test_promote_p_to_pe.py b/test/workflows/router/test_promote_p_to_pe.py
new file mode 100644
index 0000000000000000000000000000000000000000..3bd7fdca1a65e0ad7bd0e1df59883cd020e9aca0
--- /dev/null
+++ b/test/workflows/router/test_promote_p_to_pe.py
@@ -0,0 +1,91 @@
+from unittest.mock import patch
+
+import pytest
+from orchestrator.types import SubscriptionLifecycle
+from pydantic_forms.exceptions import FormValidationError
+
+from gso.products.product_blocks.router import RouterRole
+from test import USER_CONFIRM_EMPTY_FORM
+from test.services.conftest import MockedKentikClient
+from test.workflows import (
+    assert_complete,
+    assert_lso_interaction_success,
+    extract_state,
+    resume_workflow,
+    run_workflow,
+)
+
+
+@pytest.mark.workflow()
+@patch("gso.workflows.router.promote_p_to_pe.lso_client.execute_playbook")
+@patch("gso.workflows.router.promote_p_to_pe.KentikClient")
+def test_promote_p_to_pe_success(
+    mock_kentik_client,
+    mock_execute_playbook,
+    nokia_router_subscription_factory,
+    data_config_filename,
+    faker,
+):
+    """Test the successful promotion of a Nokia P router to a PE router."""
+    mock_kentik_client.return_value = MockedKentikClient
+    router_id = nokia_router_subscription_factory(router_role=RouterRole.P, status=SubscriptionLifecycle.ACTIVE)
+    input_data = [{"subscription_id": router_id}, {"tt_number": faker.tt_number()}]
+    result, process_stat, step_log = run_workflow("promote_p_to_pe", input_data)
+    for _ in range(3):
+        result, step_log = assert_lso_interaction_success(result, process_stat, step_log)
+    result, step_log = resume_workflow(process_stat, step_log, input_data=USER_CONFIRM_EMPTY_FORM)
+    for _ in range(19):
+        result, step_log = assert_lso_interaction_success(result, process_stat, step_log)
+    state = extract_state(result)
+    assert_complete(result)
+    assert mock_execute_playbook.call_count == 22
+    assert state["subscription"]["router"]["router_role"] == RouterRole.PE
+
+
+@pytest.mark.workflow()
+def test_promote_p_to_pe_juniper_router(juniper_router_subscription_factory, data_config_filename, faker):
+    """Test that the workflow does not run for a Juniper P router since this workflow is only for Nokia routers."""
+    router_id = juniper_router_subscription_factory(router_role=RouterRole.P, status=SubscriptionLifecycle.ACTIVE)
+    input_data = [{"subscription_id": router_id}, {"tt_number": faker.tt_number()}]
+    with pytest.raises(FormValidationError) as error:
+        run_workflow("promote_p_to_pe", input_data)
+    error = error.value.errors[0]
+    assert error["msg"] == "PE promotion workflow can only be run on Nokia P routers."
+    assert error["loc"][0] == "__root__"
+
+
+@pytest.mark.workflow()
+@patch("gso.workflows.router.promote_p_to_pe.lso_client.execute_playbook")
+def test_promote_p_to_pe_nokia_pe_router(
+    mock_execute_playbook, nokia_router_subscription_factory, data_config_filename, faker
+):
+    """Test that the workflow does not run for a Nokia PE router since it is already a PE router."""
+    router_id = nokia_router_subscription_factory(router_role=RouterRole.PE, status=SubscriptionLifecycle.ACTIVE)
+    input_data = [{"subscription_id": router_id}, {"tt_number": faker.tt_number()}]
+    with pytest.raises(FormValidationError) as error:
+        run_workflow("promote_p_to_pe", input_data)
+    error = error.value.errors[0]
+    assert error["msg"] == "PE promotion workflow can only be run on Nokia P routers."
+    assert error["loc"][0] == "__root__"
+
+
+def test_promote_p_to_pe_missing_tt_number(nokia_router_subscription_factory):
+    """Test that a missing TT number results in a validation error."""
+    router_id = nokia_router_subscription_factory(router_role=RouterRole.P, status=SubscriptionLifecycle.ACTIVE)
+    with pytest.raises(FormValidationError) as error:
+        run_workflow("promote_p_to_pe", [{"subscription_id": router_id}, {}])
+    error = error.value.errors[0]
+    assert error["msg"] == "Field required"
+    assert error["loc"][0] == "tt_number"
+
+
+def test_promote_p_to_pe_with_invalid_router_life_cycle(nokia_router_subscription_factory, faker):
+    """Test that the router life cycle must be ACTIVE to run this workflow."""
+    router_id = nokia_router_subscription_factory(router_role=RouterRole.P, status=SubscriptionLifecycle.PROVISIONING)
+    with pytest.raises(FormValidationError) as error:
+        run_workflow("promote_p_to_pe", [{"subscription_id": router_id}, {"tt_number": faker.tt_number()}])
+    error = error.value.errors[0]
+    assert error["msg"] == (
+        "This workflow cannot be started: This subscription can not be modified because of the status it has"
+    )
+    assert error["loc"][0] == "subscription_id"
diff --git a/test/workflows/router/test_terminate_router.py b/test/workflows/router/test_terminate_router.py
index a41e8b32e6dde2ddca6ee716b7ce4227751ec62f..f2cb638ab1cf5f918184e218c0f2d31d4f65cc87 100644
--- a/test/workflows/router/test_terminate_router.py
+++ b/test/workflows/router/test_terminate_router.py
@@ -4,6 +4,7 @@ import pytest
 
 from gso.products import Router
 from gso.products.product_blocks.router import RouterRole
+from test.services.conftest import MockedKentikClient
 from test.workflows import assert_complete, assert_lso_interaction_success, extract_state, run_workflow
 
 
@@ -13,9 +14,11 @@ from test.workflows import assert_complete, assert_lso_interaction_success, extr
 @patch("gso.services.lso_client._send_request")
 @patch("gso.workflows.router.terminate_router.NetboxClient.delete_device")
 @patch("gso.workflows.router.terminate_router.infoblox.delete_host_by_ip")
+@patch("gso.workflows.router.terminate_router.KentikClient")
 @patch("gso.workflows.router.terminate_router.LibreNMSClient.remove_device")
 def test_terminate_pe_router_full_success(
     mock_librenms_remove_device,
+    mock_kentik_client,
     mock_delete_host_by_ip,
     mock_delete_device,
     mock_execute_playbook,
@@ -37,6 +40,7 @@ def test_terminate_pe_router_full_success(
         lso_interaction_count += 2
     if update_ibgp_mesh:
         lso_interaction_count += 4
+    mock_kentik_client.return_value = MockedKentikClient
     #  Run workflow
     initial_router_data = [{"subscription_id": product_id}, router_termination_input_form_data]
     result, process_stat, step_log = run_workflow("terminate_router", initial_router_data)
diff --git a/test/workflows/router/test_validate_router.py b/test/workflows/router/test_validate_router.py
index 98ac2ad979e3cbb05ada44233214b3341c92001a..924577e23fa83471e1206a32ea554f5558733ab2 100644
--- a/test/workflows/router/test_validate_router.py
+++ b/test/workflows/router/test_validate_router.py
@@ -4,6 +4,7 @@ import pytest
 from infoblox_client import objects
 
 from gso.products.product_types.router import Router
+from test.services.conftest import MockedKentikClient
 from test.workflows import (
     assert_complete,
     assert_lso_success,
@@ -17,7 +18,9 @@ from test.workflows import (
 @patch("gso.services.lso_client.execute_playbook")
 @patch("gso.services.netbox_client.NetboxClient.get_device_by_name")
 @patch("gso.services.librenms_client.LibreNMSClient.validate_device")
+@patch("gso.services.kentik_client.KentikClient")
 def test_validate_nokia_router_success(
+    mock_kentik_client,
     mock_validate_librenms_device,
     mock_get_device_by_name,
     mock_execute_playbook,
@@ -28,6 +31,7 @@ def test_validate_nokia_router_success(
     geant_partner,
 ):
     mock_validate_librenms_device.return_value = None
+    mock_kentik_client.return_value = MockedKentikClient
     #  Run workflow
     subscription_id = nokia_router_subscription_factory()
     mock_fqdn = Router.from_subscription(subscription_id).router.router_fqdn