diff --git a/Changelog.md b/Changelog.md
index 6a9206e1dd3b14427089b11501ac23acbe81f4ff..0b67a304a44bc42e797276063a4a2af94c52847a 100644
--- a/Changelog.md
+++ b/Changelog.md
@@ -1,5 +1,15 @@
 # Changelog
 
+## [3.12] - 2025-06-26
+- Add a pre-check command to the CLI to get BGP status from LSO.
+- Add an option to the router termination workflow that skips deleting the loopback address from IPAM.
+- Add a redeploy workflow to all Layer 3 services.
+- Add BGP local preference and MED attibutes to R&E Layer 3 services.
+- Cleaned up the confirmation page for LSO interactions.
+- Enable the `modify_note` workflow on all existing products.
+- More robust error handling in mass base config redeploy.
+- Allow for skipping nightly validation by applying a note `SKIP VALIDATION: --reason--` on a subscription.
+
 ## [3.11] - 2025-06-18
 - Update subscription descriptions for Layer 2 Circuit products.
 
diff --git a/gso/__init__.py b/gso/__init__.py
index 99f1d386081a807a5c7b34527ba890cafe7aab1e..bd869b6657e6253a2888ac6a60f7e27b52276a18 100644
--- a/gso/__init__.py
+++ b/gso/__init__.py
@@ -65,11 +65,12 @@ def init_gso_app() -> OrchestratorCore:
 
 def init_cli_app() -> typer.Typer:
     """Initialise GSO as a CLI application."""
-    from gso.cli import imports, netbox, schedule  # noqa: PLC0415
+    from gso.cli import imports, lso_calls, netbox, schedule  # noqa: PLC0415
 
     cli_app.add_typer(imports.app, name="import-cli")
     cli_app.add_typer(netbox.app, name="netbox-cli")
     cli_app.add_typer(schedule.app, name="schedule-cli")
+    cli_app.add_typer(lso_calls.app, name="lso-cli")
     return cli_app()
 
 
diff --git a/gso/cli/lso_calls.py b/gso/cli/lso_calls.py
new file mode 100644
index 0000000000000000000000000000000000000000..c8f228efc18d1839d3f3b75fb708c47525d87842
--- /dev/null
+++ b/gso/cli/lso_calls.py
@@ -0,0 +1,151 @@
+#!/usr/bin/env python3
+"""CLI for GSO pre-check using LSO remote exec endpoint."""
+
+import json
+import logging
+from pathlib import Path
+
+import click
+import httpx
+import structlog
+import typer
+from orchestrator.db import db
+from orchestrator.db.database import transactional
+from pydantic import ValidationError
+
+from gso import settings
+from gso.db.models import BgpStatusPreCheckTable
+from gso.services.partners import filter_partners_by_name
+from gso.utils.types.lso_response import ExecutableRunResponse
+
+logger = structlog.get_logger(__name__)
+app = typer.Typer()
+
+_IMPORT_FILE_ARG = typer.Argument(
+    ...,
+    exists=True,
+    file_okay=True,
+    dir_okay=False,
+    readable=True,
+    help="Path to the JSON import file to embed in the check",
+)
+
+
+def _validate_partner(partner: str) -> None:
+    if not filter_partners_by_name(name=partner, case_sensitive=True):
+        typer.echo(f"Error: partner '{partner}' not found in database.")
+        raise typer.Exit(1)
+
+
+def _load_import_file(import_file_path: Path) -> str:
+    """Read a JSON file from the given path, return it as a compact JSON string, Exits on error."""
+    try:
+        with import_file_path.open("r", encoding="utf-8") as f:
+            data = json.load(f)
+        return json.dumps(data, separators=(",", ":"))
+    except Exception as e:
+        logger.exception("Failed to read import file")
+        typer.echo(f"Error: could not read or parse '{import_file_path}': {e}")
+        raise typer.Exit(2) from e
+
+
+def _call_lso(
+    host: str,
+    partner: str,
+    import_json_str: str,
+) -> ExecutableRunResponse:
+    oss = settings.load_oss_params()
+    proxy = oss.PROVISIONING_PROXY
+    url = f"{proxy.scheme}://{proxy.api_base}/api/execute/"
+    payload = {
+        "executable_name": "bgp_status_pre_check.py",
+        "args": [host, partner, import_json_str],
+        "is_async": False,
+    }
+    try:
+        resp = httpx.post(url, json=payload, timeout=30)
+        resp.raise_for_status()
+    except Exception as e:
+        logger.exception("LSO call failed")
+        typer.echo(f"Error: failed to call LSO: {e}")
+        raise typer.Exit(1) from e
+
+    try:
+        return ExecutableRunResponse(**resp.json())
+    except ValidationError as e:
+        logger.exception("Invalid response from LSO")
+        typer.echo("Error: invalid JSON returned by LSO:")
+        typer.echo(str(e))
+        raise typer.Exit(1) from e
+
+
+def _print_full(exec_resp: ExecutableRunResponse) -> None:
+    full_json = exec_resp.model_dump(mode="json")
+    typer.echo(typer.style("\nFull LSO response:", fg=typer.colors.GREEN))
+    typer.echo(json.dumps(full_json, indent=2))
+
+
+def _print_parsed_output(exec_resp: ExecutableRunResponse) -> None:
+    output_str = exec_resp.result.output if exec_resp.result else ""
+    typer.echo(typer.style("\nParsed `result.output` as JSON:", fg=typer.colors.CYAN))
+
+    try:
+        parsed = json.loads(output_str)
+        rendered = json.dumps(parsed, indent=2)
+        max_lines = settings.load_oss_params().GENERAL.pre_check_cli_max_output_lines
+        if rendered.count("\n") > max_lines:
+            click.echo_via_pager(rendered)
+        else:
+            typer.echo(rendered)
+    except json.JSONDecodeError:
+        typer.echo("(not valid JSON, raw string below)")
+        typer.echo(output_str)
+
+
+def _maybe_save(
+    host: str,
+    partner: str,
+    exec_resp: ExecutableRunResponse,
+) -> None:
+    prompt = (
+        f"\nIf you are happy with the above output for router '{host}' "
+        f"(partner: {partner}), shall we save it to the database?"
+    )
+    if not typer.confirm(prompt, default=False):
+        typer.echo("Alright, not saving. You can re-run when ready.")
+        return
+
+    try:
+        with db.database_scope(), transactional(db, logger):
+            record = BgpStatusPreCheckTable(
+                router_fqdn=host,
+                partner=partner,
+                result=exec_resp.result.model_dump(mode="json") if exec_resp.result else {},
+            )
+            db.session.add(record)
+    except Exception as e:
+        logger.exception("Failed to save pre-check record")
+        typer.echo("Error: could not save pre-check to database.")
+        raise typer.Exit(2) from e
+
+    typer.echo("Pre-check result saved.")
+
+
+@app.command()
+def bgp_status_precheck(
+    host: str = typer.Argument(..., help="FQDN of the router to pre-check"),
+    partner: str = typer.Argument(..., help="Partner name for import file path"),
+    import_file_path: Path = _IMPORT_FILE_ARG,
+) -> None:
+    """Trigger the bgp_status_pre-check script on LSO, print results, and optionally save."""
+    _validate_partner(partner)
+    import_json_str = _load_import_file(import_file_path)
+    exec_resp = _call_lso(host, partner, import_json_str)
+    _print_full(exec_resp)
+    _print_parsed_output(exec_resp)
+    _maybe_save(host, partner, exec_resp)
+
+
+if __name__ == "__main__":
+    logging.basicConfig(level=logging.INFO)
+    app()
diff --git a/gso/db/models.py b/gso/db/models.py
index c6382b1c81d06f9192ed4f416e186a2d990a5a45..0649003e8d5222536d3ad75b73c23259c451bda9 100644
--- a/gso/db/models.py
+++ b/gso/db/models.py
@@ -4,6 +4,7 @@ import structlog
 from orchestrator.db import UtcTimestamp
 from orchestrator.db.database import BaseModel
 from sqlalchemy import (
+    JSON,
     String,
     text,
 )
@@ -25,3 +26,43 @@ class PartnerTable(BaseModel):
     updated_at = mapped_column(
         UtcTimestamp, server_default=text("current_timestamp"), nullable=False, onupdate=text("current_timestamp")
     )
+
+
+class BgpStatusPreCheckTable(BaseModel):
+    """Database table for storing per router BGP satus pre-check results."""
+
+    __tablename__ = "bgp_status_pre_checks"
+
+    pre_check_id = mapped_column(
+        String,
+        server_default=text("uuid_generate_v4"),
+        primary_key=True,
+    )
+    router_fqdn = mapped_column(
+        String,
+        nullable=False,
+        index=True,
+        comment="The FQDN of the router under check",
+    )
+    partner = mapped_column(
+        String,
+        nullable=False,
+        comment="Name of the partner (used in import file path)",
+    )
+    result = mapped_column(
+        JSON,
+        nullable=False,
+        comment="Raw JSON blob returned by LSO bgp_status_pre_check script",
+    )
+
+    created_at = mapped_column(
+        UtcTimestamp,
+        server_default=text("current_timestamp"),
+        nullable=False,
+    )
+    updated_at = mapped_column(
+        UtcTimestamp,
+        server_default=text("current_timestamp"),
+        nullable=False,
+        onupdate=text("current_timestamp"),
+    )
diff --git a/gso/migrations/env.py b/gso/migrations/env.py
index fb535946527dd0a487e75c52d3f6856b9f4eda34..198b6499028754c353efc9e76c1fb7eaa7822bc4 100644
--- a/gso/migrations/env.py
+++ b/gso/migrations/env.py
@@ -5,7 +5,7 @@ from orchestrator.db.database import BaseModel
 from orchestrator.settings import app_settings
 from sqlalchemy import engine_from_config, pool, text
 
-from gso.db.models import PartnerTable  # noqa: F401
+from gso.db.models import PartnerTable, BgpStatusPreCheckTable  # noqa: F401
 
 # this is the Alembic Config object, which provides
 # access to the values within the .ini file in use.
diff --git a/gso/migrations/versions/2025-06-20_24858fd1d805_add_modify_note_workflow_to_existing_.py b/gso/migrations/versions/2025-06-20_24858fd1d805_add_modify_note_workflow_to_existing_.py
new file mode 100644
index 0000000000000000000000000000000000000000..7414647ef21f79530bef8db6e0f022605f218c74
--- /dev/null
+++ b/gso/migrations/versions/2025-06-20_24858fd1d805_add_modify_note_workflow_to_existing_.py
@@ -0,0 +1,70 @@
+"""Add modify note workflow to existing products.
+
+Revision ID: 24858fd1d805
+Revises: 550e3aebc1c5
+Create Date: 2025-06-20 10:51:57.321841
+
+"""
+import sqlalchemy as sa
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision = '24858fd1d805'
+down_revision = '550e3aebc1c5'
+branch_labels = None
+depends_on = None
+
+
+from orchestrator.migrations.helpers import create_workflow, delete_workflow, add_products_to_workflow_by_product_tag, \
+    remove_products_from_workflow_by_product_tag
+
+product_tags = [
+    "ER",
+    "COP",
+    "EP",
+    "G_IP",
+    "G_PLUS",
+    "IAS",
+    "IMP_ER",
+    "IMP_COP",
+    "IMP_EP",
+    "IMP_G_IP",
+    "IMP_G_PLUS",
+    "IMP_IAS",
+    "IMP_IP_TRUNK",
+    "IMP_LSI",
+    "IMP_LHC",
+    "IMP_OFFICE_RTR",
+    "IMPORTED_OPENGEAR",
+    "IMP_RE_LHCONE",
+    "IMP_RE_PEER",
+    "IMP_RTR",
+    "IMP_SITE",
+    "IMP_SPOP_SWITCH",
+    "IMP_SWITCH",
+    "IPTRUNK",
+    "LSI",
+    "LHC",
+    "OFFICE_ROUTER",
+    "OPENGEAR",
+    "POP_VLAN",
+    "RE_LHCONE",
+    "RE_PEER",
+    "RTR",
+    "SITE",
+    "Super_POP_SWITCH",
+    "SWITCH",
+    "VRF",
+]
+
+
+def upgrade() -> None:
+    conn = op.get_bind()
+    for product in product_tags:
+        add_products_to_workflow_by_product_tag(conn, "modify_note", product)
+
+
+def downgrade() -> None:
+    conn = op.get_bind()
+    for product in product_tags:
+        remove_products_from_workflow_by_product_tag(conn, "modify_note", product)
diff --git a/gso/migrations/versions/2025-06-20_7c3094cd282a_remove_obsolete_validation_task.py b/gso/migrations/versions/2025-06-20_7c3094cd282a_remove_obsolete_validation_task.py
new file mode 100644
index 0000000000000000000000000000000000000000..3c64047723ba152582b3c1633b46d8845f5f641a
--- /dev/null
+++ b/gso/migrations/versions/2025-06-20_7c3094cd282a_remove_obsolete_validation_task.py
@@ -0,0 +1,52 @@
+"""Remove obsolete validation task.
+
+Revision ID: 7c3094cd282a
+Revises: 24858fd1d805
+Create Date: 2025-06-20 11:34:08.439370
+
+"""
+import sqlalchemy as sa
+from alembic import op
+from orchestrator.migrations.helpers import create_task, delete_workflow
+
+# revision identifiers, used by Alembic.
+revision = '7c3094cd282a'
+down_revision = '24858fd1d805'
+branch_labels = None
+depends_on = None
+
+old_task = {
+    "name": "task_validate_geant_products",
+    "description": "Validate GEANT products"
+}
+
+
+def upgrade() -> None:
+    conn = op.get_bind()
+    conn.execute(sa.text(f"""
+DO $$DECLARE wf_id UUID;BEGIN
+  SELECT workflow_id
+  INTO   wf_id
+  FROM   workflows
+  WHERE  NAME = '{old_task["name"]}';
+  
+  DELETE
+  FROM   input_states
+  WHERE  pid IN
+         (
+                SELECT pid
+                FROM   processes
+                WHERE  workflow_id = wf_id);
+  
+  DELETE
+  FROM   processes
+  WHERE  workflow_id = wf_id;
+
+END$$;
+    """))
+    delete_workflow(conn, old_task["name"])
+
+
+def downgrade() -> None:
+    conn = op.get_bind()
+    create_task(conn, old_task)
diff --git a/gso/migrations/versions/2025-06-20_b2b5137ef0c7_add_attributes_to_r_e_product_block.py b/gso/migrations/versions/2025-06-20_b2b5137ef0c7_add_attributes_to_r_e_product_block.py
new file mode 100644
index 0000000000000000000000000000000000000000..188bcd79a088d6321679189522c9510d780e71a3
--- /dev/null
+++ b/gso/migrations/versions/2025-06-20_b2b5137ef0c7_add_attributes_to_r_e_product_block.py
@@ -0,0 +1,113 @@
+"""Add attributes to R&E product block.
+
+Revision ID: b2b5137ef0c7
+Revises: 550e3aebc1c5
+Create Date: 2025-06-20 16:45:01.403416
+
+"""
+import sqlalchemy as sa
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision = 'b2b5137ef0c7'
+down_revision = '7c3094cd282a'
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+    conn = op.get_bind()
+    conn.execute(sa.text("""
+INSERT INTO resource_types (resource_type, description) VALUES ('v6_bgp_local_preference', 'BGP Local Preference for IPv6') RETURNING resource_types.resource_type_id
+    """))
+    conn.execute(sa.text("""
+INSERT INTO resource_types (resource_type, description) VALUES ('v4_bgp_local_preference', 'BGP Local Preference for IPv4') RETURNING resource_types.resource_type_id
+    """))
+    conn.execute(sa.text("""
+INSERT INTO resource_types (resource_type, description) VALUES ('v4_bgp_med', 'BGP Multi Exit Discriminant for IPv4') RETURNING resource_types.resource_type_id
+    """))
+    conn.execute(sa.text("""
+INSERT INTO resource_types (resource_type, description) VALUES ('v6_bgp_med', 'BGP Multi Exit Discriminant for IPv6') RETURNING resource_types.resource_type_id
+    """))
+    conn.execute(sa.text("""
+INSERT INTO product_block_resource_types (product_block_id, resource_type_id) VALUES ((SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('RAndELHCOneBlock')), (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('v6_bgp_local_preference')))
+    """))
+    conn.execute(sa.text("""
+INSERT INTO product_block_resource_types (product_block_id, resource_type_id) VALUES ((SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('RAndELHCOneBlock')), (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('v4_bgp_local_preference')))
+    """))
+    conn.execute(sa.text("""
+INSERT INTO product_block_resource_types (product_block_id, resource_type_id) VALUES ((SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('RAndELHCOneBlock')), (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('v4_bgp_med')))
+    """))
+    conn.execute(sa.text("""
+INSERT INTO product_block_resource_types (product_block_id, resource_type_id) VALUES ((SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('RAndELHCOneBlock')), (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('v6_bgp_med')))
+    """))
+    conn.execute(sa.text("""
+INSERT INTO product_block_resource_types (product_block_id, resource_type_id) VALUES ((SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('RAndEPeerBlock')), (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('v6_bgp_local_preference')))
+    """))
+    conn.execute(sa.text("""
+INSERT INTO product_block_resource_types (product_block_id, resource_type_id) VALUES ((SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('RAndEPeerBlock')), (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('v4_bgp_local_preference')))
+    """))
+    conn.execute(sa.text("""
+INSERT INTO product_block_resource_types (product_block_id, resource_type_id) VALUES ((SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('RAndEPeerBlock')), (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('v4_bgp_med')))
+    """))
+    conn.execute(sa.text("""
+INSERT INTO product_block_resource_types (product_block_id, resource_type_id) VALUES ((SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('RAndEPeerBlock')), (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('v6_bgp_med')))
+    """))
+
+
+def downgrade() -> None:
+    conn = op.get_bind()
+    conn.execute(sa.text("""
+DELETE FROM product_block_resource_types WHERE product_block_resource_types.product_block_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('RAndELHCOneBlock')) AND product_block_resource_types.resource_type_id = (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('v6_bgp_local_preference'))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM subscription_instance_values USING product_block_resource_types WHERE subscription_instance_values.subscription_instance_id IN (SELECT subscription_instances.subscription_instance_id FROM subscription_instances WHERE subscription_instances.subscription_instance_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('RAndELHCOneBlock'))) AND product_block_resource_types.resource_type_id = (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('v6_bgp_local_preference'))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM product_block_resource_types WHERE product_block_resource_types.product_block_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('RAndELHCOneBlock')) AND product_block_resource_types.resource_type_id = (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('v4_bgp_local_preference'))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM subscription_instance_values USING product_block_resource_types WHERE subscription_instance_values.subscription_instance_id IN (SELECT subscription_instances.subscription_instance_id FROM subscription_instances WHERE subscription_instances.subscription_instance_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('RAndELHCOneBlock'))) AND product_block_resource_types.resource_type_id = (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('v4_bgp_local_preference'))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM product_block_resource_types WHERE product_block_resource_types.product_block_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('RAndELHCOneBlock')) AND product_block_resource_types.resource_type_id = (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('v4_bgp_med'))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM subscription_instance_values USING product_block_resource_types WHERE subscription_instance_values.subscription_instance_id IN (SELECT subscription_instances.subscription_instance_id FROM subscription_instances WHERE subscription_instances.subscription_instance_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('RAndELHCOneBlock'))) AND product_block_resource_types.resource_type_id = (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('v4_bgp_med'))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM product_block_resource_types WHERE product_block_resource_types.product_block_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('RAndELHCOneBlock')) AND product_block_resource_types.resource_type_id = (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('v6_bgp_med'))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM subscription_instance_values USING product_block_resource_types WHERE subscription_instance_values.subscription_instance_id IN (SELECT subscription_instances.subscription_instance_id FROM subscription_instances WHERE subscription_instances.subscription_instance_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('RAndELHCOneBlock'))) AND product_block_resource_types.resource_type_id = (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('v6_bgp_med'))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM product_block_resource_types WHERE product_block_resource_types.product_block_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('RAndEPeerBlock')) AND product_block_resource_types.resource_type_id = (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('v6_bgp_local_preference'))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM subscription_instance_values USING product_block_resource_types WHERE subscription_instance_values.subscription_instance_id IN (SELECT subscription_instances.subscription_instance_id FROM subscription_instances WHERE subscription_instances.subscription_instance_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('RAndEPeerBlock'))) AND product_block_resource_types.resource_type_id = (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('v6_bgp_local_preference'))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM product_block_resource_types WHERE product_block_resource_types.product_block_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('RAndEPeerBlock')) AND product_block_resource_types.resource_type_id = (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('v4_bgp_local_preference'))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM subscription_instance_values USING product_block_resource_types WHERE subscription_instance_values.subscription_instance_id IN (SELECT subscription_instances.subscription_instance_id FROM subscription_instances WHERE subscription_instances.subscription_instance_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('RAndEPeerBlock'))) AND product_block_resource_types.resource_type_id = (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('v4_bgp_local_preference'))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM product_block_resource_types WHERE product_block_resource_types.product_block_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('RAndEPeerBlock')) AND product_block_resource_types.resource_type_id = (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('v4_bgp_med'))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM subscription_instance_values USING product_block_resource_types WHERE subscription_instance_values.subscription_instance_id IN (SELECT subscription_instances.subscription_instance_id FROM subscription_instances WHERE subscription_instances.subscription_instance_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('RAndEPeerBlock'))) AND product_block_resource_types.resource_type_id = (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('v4_bgp_med'))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM product_block_resource_types WHERE product_block_resource_types.product_block_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('RAndEPeerBlock')) AND product_block_resource_types.resource_type_id = (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('v6_bgp_med'))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM subscription_instance_values USING product_block_resource_types WHERE subscription_instance_values.subscription_instance_id IN (SELECT subscription_instances.subscription_instance_id FROM subscription_instances WHERE subscription_instances.subscription_instance_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('RAndEPeerBlock'))) AND product_block_resource_types.resource_type_id = (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('v6_bgp_med'))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM subscription_instance_values WHERE subscription_instance_values.resource_type_id IN (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('v6_bgp_local_preference', 'v4_bgp_local_preference', 'v4_bgp_med', 'v6_bgp_med'))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM resource_types WHERE resource_types.resource_type IN ('v6_bgp_local_preference', 'v4_bgp_local_preference', 'v4_bgp_med', 'v6_bgp_med')
+    """))
diff --git a/gso/migrations/versions/2025-06-24_06242291zb30_add_bgp_status_pre_check_table.py b/gso/migrations/versions/2025-06-24_06242291zb30_add_bgp_status_pre_check_table.py
new file mode 100644
index 0000000000000000000000000000000000000000..e5cfb9290139c34b35f560c609fe63672ce3eab3
--- /dev/null
+++ b/gso/migrations/versions/2025-06-24_06242291zb30_add_bgp_status_pre_check_table.py
@@ -0,0 +1,40 @@
+"""Add bgp_status_pre_checks table.
+
+Revision ID: 06242291zb30
+Revises: b2b5137ef0c7
+Create Date: 2025-06-24 11:00
+
+"""
+import sqlalchemy as sa
+from alembic import op
+from orchestrator.db import UtcTimestamp
+from sqlalchemy.dialects import postgresql
+
+# revision identifiers, used by Alembic.
+revision = '06242291zb30'
+down_revision = 'b2b5137ef0c7'
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+    op.create_table(
+        'bgp_status_pre_checks',
+        sa.Column('pre_check_id', sa.String(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
+        sa.Column('router_fqdn',  sa.String(), nullable=False),
+        sa.Column('partner',         sa.String(), nullable=False),
+        sa.Column('result',       postgresql.JSON(), nullable=False),  # type: ignore[no-untyped-call]
+        sa.Column('created_at',   UtcTimestamp(timezone=True), server_default=sa.text('current_timestamp'), nullable=False),
+        sa.Column('updated_at',   UtcTimestamp(timezone=True), server_default=sa.text('current_timestamp'),
+                  nullable=False, onupdate=sa.text('current_timestamp')),
+        sa.PrimaryKeyConstraint('pre_check_id'),
+    )
+    # indexes for faster lookup
+    op.create_index('ix_bgp_status_pre_checks_router_fqdn', 'bgp_status_pre_checks', ['router_fqdn'])
+
+
+def downgrade() -> None:
+    # drop indexes, then table
+    op.drop_index('ix_bgp_status_pre_checks_router_fqdn', table_name='bgp_status_pre_checks')
+    op.drop_index('ix_bgp_status_pre_checks_router_id',   table_name='bgp_status_pre_checks')
+    op.drop_table('bgp_status_pre_checks')
\ No newline at end of file
diff --git a/gso/migrations/versions/2025-06-24_285954f5ec04_add_l3_service_redeploy_workflow.py b/gso/migrations/versions/2025-06-24_285954f5ec04_add_l3_service_redeploy_workflow.py
new file mode 100644
index 0000000000000000000000000000000000000000..36f256ce7a0cae9a1103809024a7f4bdec26a5e1
--- /dev/null
+++ b/gso/migrations/versions/2025-06-24_285954f5ec04_add_l3_service_redeploy_workflow.py
@@ -0,0 +1,42 @@
+"""Add L3 service redeploy workflow.
+
+Revision ID: 285954f5ec04
+Revises: 06242291zb30
+Create Date: 2025-06-24 16:49:06.495691
+
+"""
+from alembic import op
+from orchestrator.migrations.helpers import (
+    add_products_to_workflow_by_product_tag,
+    create_workflow,
+    delete_workflow,
+    remove_products_from_workflow_by_product_tag
+)
+
+# revision identifiers, used by Alembic.
+revision = '285954f5ec04'
+down_revision = '06242291zb30'
+branch_labels = None
+depends_on = None
+
+new_workflow = {
+    "name": "redeploy_l3_core_service",
+    "target": "MODIFY",
+    "description": "Redeploy Layer 3 service",
+    "product_type": "GeantIP"
+}
+additional_product_tags = ["IAS", "LHC", "COP", "RE_LHCONE", "RE_PEER"]
+
+
+def upgrade() -> None:
+    conn = op.get_bind()
+    create_workflow(conn, new_workflow)
+    for product in additional_product_tags:
+        add_products_to_workflow_by_product_tag(conn, new_workflow["name"], product)
+
+
+def downgrade() -> None:
+    conn = op.get_bind()
+    for product in additional_product_tags:
+        remove_products_from_workflow_by_product_tag(conn, new_workflow["name"], product)
+    delete_workflow(conn, new_workflow["name"])
diff --git a/gso/oss-params-example.json b/gso/oss-params-example.json
index 98e41280d1f1d47480b64702d2d587452d846d08..decd0de5331c94fb604f09da978a2d5a62708c6c 100644
--- a/gso/oss-params-example.json
+++ b/gso/oss-params-example.json
@@ -3,7 +3,8 @@
     "public_hostname": "https://gap.geant.org",
     "internal_hostname": "http://gso-api:9000",
     "isis_high_metric": 999999,
-    "environment": "development"
+    "environment": "development",
+    "pre_check_cli_max_output_lines": 50
   },
   "NETBOX": {
     "api": "https://127.0.0.1:8000",
diff --git a/gso/products/product_blocks/r_and_e_lhcone.py b/gso/products/product_blocks/r_and_e_lhcone.py
index e4dda2e22a52953ddf2dcf333640bed98a9ef76e..8b5248678e65b572d18b6ed273aa4c6d0936b51b 100644
--- a/gso/products/product_blocks/r_and_e_lhcone.py
+++ b/gso/products/product_blocks/r_and_e_lhcone.py
@@ -2,12 +2,14 @@
 
 from orchestrator.domain.base import ProductBlockModel
 from orchestrator.types import SubscriptionLifecycle
+from pydantic import NonNegativeInt
 
 from gso.products.product_blocks.l3_core_service import (
     L3CoreServiceBlock,
     L3CoreServiceBlockInactive,
     L3CoreServiceBlockProvisioning,
 )
+from gso.utils.types.multi_exit_discriminator import MultiExitDiscriminator
 
 
 class RAndELHCOneBlockInactive(
@@ -16,15 +18,27 @@ class RAndELHCOneBlockInactive(
     """An inactive R&E LHCONE product block. See `RAndELHCOneBlock`."""
 
     l3_core: L3CoreServiceBlockInactive
+    v4_bgp_local_preference: NonNegativeInt = 100
+    v4_bgp_med: MultiExitDiscriminator = "igp"
+    v6_bgp_local_preference: NonNegativeInt = 100
+    v6_bgp_med: MultiExitDiscriminator = "igp"
 
 
 class RAndELHCOneBlockProvisioning(RAndELHCOneBlockInactive, lifecycle=[SubscriptionLifecycle.PROVISIONING]):
     """A provisioning R&E LHCONE product block. See `RAndELHCOneBlock`."""
 
     l3_core: L3CoreServiceBlockProvisioning
+    v4_bgp_local_preference: NonNegativeInt
+    v4_bgp_med: MultiExitDiscriminator
+    v6_bgp_local_preference: NonNegativeInt
+    v6_bgp_med: MultiExitDiscriminator
 
 
 class RAndELHCOneBlock(RAndELHCOneBlockProvisioning, lifecycle=[SubscriptionLifecycle.ACTIVE]):
     """An active R&E LHCONE product block."""
 
     l3_core: L3CoreServiceBlock
+    v4_bgp_local_preference: NonNegativeInt
+    v4_bgp_med: MultiExitDiscriminator
+    v6_bgp_local_preference: NonNegativeInt
+    v6_bgp_med: MultiExitDiscriminator
diff --git a/gso/products/product_blocks/r_and_e_peer.py b/gso/products/product_blocks/r_and_e_peer.py
index 8fbbeb84e62ee88b944eeb4782ee1800d723a405..c83222d7371abebedae61e6e901feb39c6416ba2 100644
--- a/gso/products/product_blocks/r_and_e_peer.py
+++ b/gso/products/product_blocks/r_and_e_peer.py
@@ -2,12 +2,14 @@
 
 from orchestrator.domain.base import ProductBlockModel
 from orchestrator.types import SubscriptionLifecycle
+from pydantic import NonNegativeInt
 
 from gso.products.product_blocks.l3_core_service import (
     L3CoreServiceBlock,
     L3CoreServiceBlockInactive,
     L3CoreServiceBlockProvisioning,
 )
+from gso.utils.types.multi_exit_discriminator import MultiExitDiscriminator
 
 
 class RAndEPeerBlockInactive(
@@ -16,15 +18,27 @@ class RAndEPeerBlockInactive(
     """An inactive R&E Peer product block. See `RAndEPeerBlock`."""
 
     l3_core: L3CoreServiceBlockInactive
+    v4_bgp_local_preference: NonNegativeInt = 100
+    v4_bgp_med: MultiExitDiscriminator = "igp"
+    v6_bgp_local_preference: NonNegativeInt = 100
+    v6_bgp_med: MultiExitDiscriminator = "igp"
 
 
 class RAndEPeerBlockProvisioning(RAndEPeerBlockInactive, lifecycle=[SubscriptionLifecycle.PROVISIONING]):
     """A provisioning R&E Peer product block. See `RAndEPeerBlock`."""
 
     l3_core: L3CoreServiceBlockProvisioning
+    v4_bgp_local_preference: NonNegativeInt
+    v4_bgp_med: MultiExitDiscriminator
+    v6_bgp_local_preference: NonNegativeInt
+    v6_bgp_med: MultiExitDiscriminator
 
 
 class RAndEPeerBlock(RAndEPeerBlockProvisioning, lifecycle=[SubscriptionLifecycle.ACTIVE]):
     """An active R&E Peer product block."""
 
     l3_core: L3CoreServiceBlock
+    v4_bgp_local_preference: NonNegativeInt
+    v4_bgp_med: MultiExitDiscriminator
+    v6_bgp_local_preference: NonNegativeInt
+    v6_bgp_med: MultiExitDiscriminator
diff --git a/gso/schedules/validate_products.py b/gso/schedules/validate_products.py
index efc441ad21f3a4894b82a030e99fd7f0653e0a99..1e693fe3ac48458741710d6a46be65bf548d5c25 100644
--- a/gso/schedules/validate_products.py
+++ b/gso/schedules/validate_products.py
@@ -4,12 +4,10 @@ from celery import shared_task
 from orchestrator.services.processes import start_process
 
 from gso.schedules.scheduling import CronScheduleConfig, scheduler
-from gso.services.processes import count_incomplete_validate_products
 
 
 @shared_task
 @scheduler(CronScheduleConfig(name="Validate Products and inactive subscriptions", minute="30", hour="2"))
 def validate_products() -> None:
     """Validate all products."""
-    if count_incomplete_validate_products() == 0:
-        start_process("task_validate_geant_products")
+    start_process("task_validate_products")
diff --git a/gso/schedules/validate_subscriptions.py b/gso/schedules/validate_subscriptions.py
index db3573802cd6d9fb29438068fa3737a57f0b4bf9..fb728a1a967d1b24156bd2d48f6cf17201e36a5c 100644
--- a/gso/schedules/validate_subscriptions.py
+++ b/gso/schedules/validate_subscriptions.py
@@ -7,6 +7,8 @@ From this list, each workflow is selected that meets the following:
     * The name of the workflow follows the pattern `validate_*`.
 """
 
+import re
+
 import structlog
 from celery import shared_task
 from orchestrator.services.processes import get_execution_context
@@ -37,6 +39,17 @@ def validate_subscriptions() -> None:
         return
 
     for subscription in subscriptions:
+        if re.search(r"SKIP VALIDATION: .+", subscription.note or ""):
+            #  The subscription is marked to skip validation altogether. We continue to the next subscription.
+            logger.warning(
+                "Manually skipped validation workflows for a subscription.",
+                product=subscription.product.name,
+                subscription_id=subscription.subscription_id,
+                subscription_description=subscription.description,
+                skip_reason=subscription.note,
+            )
+            continue
+
         found_a_validation_workflow = False
         for workflow in subscription.product.workflows:
             if workflow.target == Target.SYSTEM and workflow.name.startswith("validate_"):
diff --git a/gso/services/lso_client.py b/gso/services/lso_client.py
index 8cfa6785a55d693283093ed80872a999908cf9b9..23c2ecf47b106b05d2eb48970460da2da9f47e91 100644
--- a/gso/services/lso_client.py
+++ b/gso/services/lso_client.py
@@ -15,7 +15,7 @@ from orchestrator.utils.errors import ProcessFailureError
 from orchestrator.workflow import Step, StepList, begin, callback_step, conditional, inputstep
 from pydantic import ConfigDict
 from pydantic_forms.types import FormGenerator, State, UUIDstr
-from pydantic_forms.validators import Label, LongText, ReadOnlyField
+from pydantic_forms.validators import Label, LongText
 from unidecode import unidecode
 
 from gso import settings
@@ -146,13 +146,11 @@ def _show_results(state: State) -> FormGenerator:
     class ConfirmRunPage(SubmitFormPage):
         model_config = ConfigDict()
 
-        if "lso_result_extra_label" in state:
-            extra_label: Label = state["lso_result_extra_label"]
-        run_status: ReadOnlyField(state["callback_result"]["status"], default_type=str)  # type: ignore[valid-type]
-        run_results: ReadOnlyField(json.dumps(state["callback_result"], indent=4), default_type=LongText)  # type: ignore[valid-type]
+        run_status: Label = f"Callback result: {state["callback_result"]["status"]}"
+        run_results: LongText = json.dumps(state["callback_result"], indent=4)
 
     yield ConfirmRunPage
-    return state
+    return {}
 
 
 @step("Clean up keys from state")
@@ -160,8 +158,6 @@ def _clean_state() -> State:
     return {
         "__remove_keys": [
             "run_results",
-            "lso_result_title",
-            "lso_result_extra_label",
             "callback_result",
             "playbook_name",
             "callback_route",
@@ -198,10 +194,6 @@ def lso_interaction(provisioning_step: Step) -> StepList:
     to provision service subscriptions. If the playbook fails, this step will also fail, allowing for the user to retry
     provisioning from the UI.
 
-    Optionally, the keys `lso_result_title` and `lso_result_extra_label` can be added to the state before running
-    this interaction. They will be used to customise the input step that shows the outcome of the LSO
-    interaction.
-
     Args:
         provisioning_step: A workflow step that performs an operation remotely using the provisioning proxy.
 
@@ -216,7 +208,6 @@ def lso_interaction(provisioning_step: Step) -> StepList:
             >> callback_step(
                 name=RUNNING_ANSIBLE_PLAYBOOK_STEP_NAME, action_step=_execute_playbook, validate_step=_evaluate_results
             )
-            >> step("Inject result title")(lambda: {"lso_result_title": provisioning_step.name})
             >> _show_results
         )
         >> _clean_state
@@ -248,7 +239,6 @@ def indifferent_lso_interaction(provisioning_step: Step) -> StepList:
             >> callback_step(
                 name=RUNNING_ANSIBLE_PLAYBOOK_STEP_NAME, action_step=_execute_playbook, validate_step=_ignore_results
             )
-            >> step("Inject result title")(lambda: {"lso_result_title": provisioning_step.name})
             >> _show_results
         )
         >> _clean_state
diff --git a/gso/services/processes.py b/gso/services/processes.py
index 30caa96fddf3d997fa9ac91254d5934f5707a830..22bfe4e0d7cdcf4aa08141d2a3bce5c78c07ff8d 100644
--- a/gso/services/processes.py
+++ b/gso/services/processes.py
@@ -18,19 +18,6 @@ def get_processes_by_workflow_name(workflow_name: str) -> Query:
     return ProcessTable.query.join(WorkflowTable).filter(WorkflowTable.name == workflow_name)
 
 
-def count_incomplete_validate_products() -> int:
-    """Count the number of incomplete validate_geant_products processes.
-
-    Returns:
-        The count of incomplete 'validate_geant_products' processes.
-    """
-    return (
-        get_processes_by_workflow_name("validate_geant_products")
-        .filter(ProcessTable.last_status != ProcessStatus.COMPLETED)
-        .count()
-    )
-
-
 def get_failed_tasks() -> list[ProcessTable]:
     """Get all tasks that have failed."""
     return ProcessTable.query.filter(
diff --git a/gso/settings.py b/gso/settings.py
index 8c1b0ec8e2d00c52e89c133ee7474d78289face3..7bcb5549f5c41b0ed2029b91b2c44dd24c211747 100644
--- a/gso/settings.py
+++ b/gso/settings.py
@@ -41,13 +41,16 @@ class GeneralParams(BaseSettings):
     """The hostname of GSO that is for internal use, such as the provisioning proxy."""
     isis_high_metric: int
     environment: EnvironmentEnum
+    """The environment in which GSO is running, such as development, test, uat, or production."""
+    pre_check_cli_max_output_lines: int = 50
+    """The maximum number of lines to print when displaying the output of a bgp_status_precheck CLI command."""
 
 
 class CelerySettings(BaseSettings):
     """Parameters for Celery."""
 
     broker_url: str = "redis://localhost:6379/0"
-    result_backend: str = "rpc://localhost:6379/0"
+    result_backend: str = "redis://localhost:6379/0"
     result_expires: int = 3600
 
     class Config:
diff --git a/gso/tasks/massive_redeploy_base_config.py b/gso/tasks/massive_redeploy_base_config.py
index 011df8dac042af59d2d580484917c947852c59fb..7dda4e42cb4e12591bf38cbc41ee7466d124ddb7 100644
--- a/gso/tasks/massive_redeploy_base_config.py
+++ b/gso/tasks/massive_redeploy_base_config.py
@@ -23,10 +23,11 @@ def process_one_router(router_id: UUIDstr, tt_number: TTNumber) -> tuple[str, bo
 
     Returns (router_fqdn, succeeded:bool, message:str).
     """
-    router_fqdn = Router.from_subscription(router_id).router.router_fqdn
+    router_fqdn = router_id
     succeeded = False
     message = ""
     try:
+        router_fqdn = Router.from_subscription(router_id).router.router_fqdn
         pid = start_process(
             "redeploy_base_config",
             user_inputs=[
@@ -50,7 +51,7 @@ def process_one_router(router_id: UUIDstr, tt_number: TTNumber) -> tuple[str, bo
     except FormValidationError as e:
         message = f"Validation error: {e}"
     except Exception as e:  # noqa: BLE001
-        message = f"Unexpected error: {e}"
+        message = f"Unexpected error: {e}, router_fqdn: {router_fqdn}"
 
     return router_fqdn, succeeded, message
 
diff --git a/gso/translations/en-GB.json b/gso/translations/en-GB.json
index e3ea7bc6c1dd75b253f09bbec50844103087f84b..d2fa50f4813747a140061cda8f108434fe7c8ea4 100644
--- a/gso/translations/en-GB.json
+++ b/gso/translations/en-GB.json
@@ -63,6 +63,8 @@
             "v4_bgp_is_passive":  "IPv4 - BGP is passive",
             "v4_bgp_send_default_route": "IPv4 - BGP send default route",
             "v4_bgp_add_v4_multicast":  "IPv4 - BGP add multicast",
+            "v4_bgp_local_preference": "IPv4 - BGP local preference",
+            "v4_bgp_med": "IPv4 - BGP Multi Exit Discriminator",
             "v6_bfd_enabled": "IPv6 - BFD enabled",
             "v6_bfd_multiplier":  "IPv6 - BFD multiplier",
             "v6_bfd_interval_rx": "IPv6 - BFD interval RX",
@@ -76,7 +78,9 @@
             "v6_bgp_ttl_security":  "IPv6 - BGP TTL security",
             "v6_bgp_is_passive": "IPv6 - BGP is passive",
             "v6_bgp_send_default_route":  "IPv6 - BGP send default route",
-            "v6_bgp_add_v6_multicast": "IPv6 - BGP add multicast"
+            "v6_bgp_add_v6_multicast": "IPv6 - BGP add multicast",
+            "v6_bgp_local_preference": "IPv6 - BGP local preference",
+            "v6_bgp_med": "IPv6 - BGP Multi Exit Discriminator"
         }
     },
     "workflow": {
@@ -155,6 +159,7 @@
         "modify_r_and_e_lhcone": "Modify R&E LHCONE",
         "promote_p_to_pe": "Promote P to PE",
         "redeploy_base_config": "Redeploy base config",
+        "redeploy_l3_core_service": "Redeploy Layer 3 service",
         "redeploy_vrf": "Redeploy VRF router list",
         "task_check_site_connectivity": "Check NETCONF connectivity of a Site",
         "task_clean_old_tasks": "Remove old cleanup tasks",
@@ -163,7 +168,6 @@
         "task_modify_partners": "Modify partner task",
         "task_redeploy_base_config": "Redeploy base config on multiple routers",
         "task_send_email_notifications": "Send email notifications for failed tasks",
-        "task_validate_geant_products": "Validation task for GEANT products",
         "terminate_edge_port": "Terminate Edge Port",
         "terminate_iptrunk": "Terminate IP Trunk",
         "terminate_geant_ip": "Terminate GÉANT IP",
diff --git a/gso/utils/shared_enums.py b/gso/utils/shared_enums.py
index dfbf5361d696896f7c14746ff224032c7e973fc1..34599a0fe3585b4de9b020f4a245f53bfd7baefc 100644
--- a/gso/utils/shared_enums.py
+++ b/gso/utils/shared_enums.py
@@ -41,6 +41,8 @@ class APType(strEnum):
     """Backup."""
     LOAD_BALANCED = "LOAD_BALANCED"
     """Load-balanced."""
+    IGNORE = "IGNORE"
+    """Ignored."""
 
 
 class SBPType(strEnum):
diff --git a/gso/utils/types/lso_response.py b/gso/utils/types/lso_response.py
new file mode 100644
index 0000000000000000000000000000000000000000..d67808a0ce162056ff3a9f9f2905d94be5f477b8
--- /dev/null
+++ b/gso/utils/types/lso_response.py
@@ -0,0 +1,28 @@
+"""This module defines types used for pre-check operations."""
+
+from enum import StrEnum
+from uuid import UUID
+
+from pydantic import BaseModel
+
+
+class JobStatus(StrEnum):
+    """Enumeration of possible job statuses."""
+
+    SUCCESSFUL = "successful"
+    FAILED = "failed"
+
+
+class ExecutionResult(BaseModel):
+    """Model for capturing the result of an executable run."""
+
+    output: str
+    return_code: int
+    status: JobStatus
+
+
+class ExecutableRunResponse(BaseModel):
+    """Response for running an arbitrary executable."""
+
+    job_id: UUID
+    result: ExecutionResult
diff --git a/gso/utils/types/multi_exit_discriminator.py b/gso/utils/types/multi_exit_discriminator.py
new file mode 100644
index 0000000000000000000000000000000000000000..b0dd02c0978f158f57b2ecd3e0e11263263835a8
--- /dev/null
+++ b/gso/utils/types/multi_exit_discriminator.py
@@ -0,0 +1,22 @@
+"""Type definition for a BGP Multi Exit Discriminator."""
+
+import contextlib
+from typing import Annotated
+
+from pydantic import AfterValidator, BeforeValidator
+
+
+def _multi_exit_discriminator_valid(value: str) -> str:
+    with contextlib.suppress(ValueError):
+        int_value = int(value)
+        if int_value >= 0:
+            return value
+
+    if value in {"igp", "min-igp"}:
+        return value
+
+    msg = "Multi Exit Discriminator must be either a positive integer, 'igp', or 'min-igp'"
+    raise ValueError(msg)
+
+
+MultiExitDiscriminator = Annotated[str, BeforeValidator(str), AfterValidator(_multi_exit_discriminator_valid)]
diff --git a/gso/workflows/__init__.py b/gso/workflows/__init__.py
index 3456115a4439eb0bdfe98e2291b9bcb65bde0952..d37cd65309eb7fdd4da0edfe5c8af299c77a7890 100644
--- a/gso/workflows/__init__.py
+++ b/gso/workflows/__init__.py
@@ -104,7 +104,6 @@ LazyWorkflowInstance("gso.workflows.opengear.import_opengear", "import_opengear"
 
 #  Tasks
 LazyWorkflowInstance("gso.workflows.tasks.send_email_notifications", "task_send_email_notifications")
-LazyWorkflowInstance("gso.workflows.tasks.validate_geant_products", "task_validate_geant_products")
 LazyWorkflowInstance("gso.workflows.tasks.create_partners", "task_create_partners")
 LazyWorkflowInstance("gso.workflows.tasks.modify_partners", "task_modify_partners")
 LazyWorkflowInstance("gso.workflows.tasks.delete_partners", "task_delete_partners")
@@ -121,6 +120,9 @@ LazyWorkflowInstance("gso.workflows.edge_port.create_imported_edge_port", "creat
 LazyWorkflowInstance("gso.workflows.edge_port.import_edge_port", "import_edge_port")
 LazyWorkflowInstance("gso.workflows.edge_port.migrate_edge_port", "migrate_edge_port")
 
+# All L3 core services
+LazyWorkflowInstance("gso.workflows.l3_core_service.redeploy_l3_core_service", "redeploy_l3_core_service")
+
 #  IAS workflows
 LazyWorkflowInstance("gso.workflows.l3_core_service.ias.create_ias", "create_ias")
 LazyWorkflowInstance("gso.workflows.l3_core_service.ias.modify_ias", "modify_ias")
diff --git a/gso/workflows/l3_core_service/base_create_l3_core_service.py b/gso/workflows/l3_core_service/base_create_l3_core_service.py
index 90221ec0ed482de386762c097c88eb7a9e8298e0..0fb2c84357aa58e891f6578ae0c99b0de91838cc 100644
--- a/gso/workflows/l3_core_service/base_create_l3_core_service.py
+++ b/gso/workflows/l3_core_service/base_create_l3_core_service.py
@@ -38,12 +38,17 @@ def initial_input_form_generator(product_name: str) -> FormGenerator:
         model_config = ConfigDict(title=f"{product_name} - Select partner")
 
         tt_number: TTNumber
+        label_a: Label = Field(f"Please select the partner for this {product_name}.", exclude=True)
         partner: partner_choice()  # type: ignore[valid-type]
+        label_b: Label = Field(
+            f"Please select the partner who owns the Edge Port this {product_name} will be deployed on.", exclude=True
+        )
+        edge_port_partner: partner_choice()  # type: ignore[valid-type]
 
     initial_user_input = yield CreateL3CoreServiceForm
 
     class EdgePortSelection(BaseModel):
-        edge_port: active_edge_port_selector(partner_id=initial_user_input.partner)  # type: ignore[valid-type]
+        edge_port: active_edge_port_selector(partner_id=initial_user_input.edge_port_partner)  # type: ignore[valid-type]
         ap_type: APType
         custom_service_name: str | None = None
 
diff --git a/gso/workflows/l3_core_service/base_migrate_l3_core_service.py b/gso/workflows/l3_core_service/base_migrate_l3_core_service.py
index de2cae8950883b8c43affa8631339902cf09bc0e..8a18543b36302693aa0a2e9bfb1078abde5cfdb1 100644
--- a/gso/workflows/l3_core_service/base_migrate_l3_core_service.py
+++ b/gso/workflows/l3_core_service/base_migrate_l3_core_service.py
@@ -25,6 +25,7 @@ from gso.products.product_types.edge_port import EdgePort
 from gso.services.lso_client import LSOState
 from gso.services.partners import get_partner_by_id
 from gso.services.subscriptions import get_active_edge_port_subscriptions
+from gso.utils.helpers import partner_choice
 from gso.utils.types.tt_number import TTNumber
 from gso.utils.workflow_steps import IS_HUMAN_INITIATED_WF_KEY, MOODI_EXTRA_KWARGS_KEY, SKIP_MOODI_KEY
 from gso.workflows.shared import create_summary_form
@@ -33,9 +34,17 @@ from gso.workflows.shared import create_summary_form
 def initial_input_form(subscription_id: UUIDstr) -> FormGenerator:
     """Gather input from the operator on what destination Edge Ports this L3 Core Service should be migrated to."""
     subscription = SubscriptionModel.from_subscription(subscription_id)
-    partner_id = subscription.customer_id
     ap_list = subscription.l3_core.ap_list  # type: ignore[attr-defined]
 
+    class PartnerSelectionForm(FormPage):
+        model_config = ConfigDict(title=f"Migrating a(n) {subscription.product.name} AP to a new Edge Port")
+        label: Label = Field(
+            "Please select the partner who owns the Edge Port which we are migrating to.", exclude=True
+        )
+        edge_port_partner: partner_choice() = subscription.customer_id  # type: ignore[valid-type]
+
+    partner_input = yield PartnerSelectionForm
+
     current_ep_list = {
         str(
             ap.sbp.edge_port.owner_subscription_id
@@ -51,14 +60,16 @@ def initial_input_form(subscription_id: UUIDstr) -> FormGenerator:
         model_config = ConfigDict(title=f"Migrating a(n) {subscription.product.name} AP to a new Edge Port")
 
         tt_number: TTNumber
-        divider: Divider = Field(None, exclude=True)
+        divider_a: Divider = Field(None, exclude=True)
         skip_moodi: bool = False
         is_human_initiated_wf: bool = True
-        source_edge_port: source_edge_port_selector | str  # type: ignore[valid-type]
         expected_number_of_ipv4_received_routes: int | None = None
         expected_number_of_ipv4_advertised_routes: int | None = None
         expected_number_of_ipv6_received_routes: int | None = None
         expected_number_of_ipv6_advertised_routes: int | None = None
+        divider_b: Divider = Field(None, exclude=True)
+        label: Label = Field("Source Edge Port", exclude=True)
+        source_edge_port: source_edge_port_selector | str  # type: ignore[valid-type]
 
     source_ep_input = yield L3CoreServiceSourceEdgePortSelectionForm
 
@@ -78,7 +89,8 @@ def initial_input_form(subscription_id: UUIDstr) -> FormGenerator:
         )
 
     class L3CoreServiceEdgePortSelectionForm(FormPage):
-        destination_edge_port: _destination_edge_port_selector(partner_id) | str  # type: ignore[valid-type]
+        label: Label = Field("Destination Edge Port", exclude=True)
+        destination_edge_port: _destination_edge_port_selector(partner_input.edge_port_partner) | str  # type: ignore[valid-type]
 
     destination_ep_user_input = yield L3CoreServiceEdgePortSelectionForm
     if source_ep_input.is_human_initiated_wf:
diff --git a/gso/workflows/l3_core_service/base_modify_l3_core_service.py b/gso/workflows/l3_core_service/base_modify_l3_core_service.py
index e16364ad1095734f4904c7288d54f46f9dbd808c..4b04a4c6a2bb0c19fb8189ac1dc382c4af4483ea 100644
--- a/gso/workflows/l3_core_service/base_modify_l3_core_service.py
+++ b/gso/workflows/l3_core_service/base_modify_l3_core_service.py
@@ -15,6 +15,7 @@ from gso.products.product_blocks.l3_core_service import AccessPort
 from gso.products.product_blocks.service_binding_port import BFDSettings, ServiceBindingPort
 from gso.products.product_types.edge_port import EdgePort
 from gso.services.subscriptions import generate_unique_id, get_active_edge_port_subscriptions
+from gso.utils.helpers import partner_choice
 from gso.utils.shared_enums import APType, SBPType
 from gso.utils.types.geant_ids import IMPORTED_GS_ID
 from gso.utils.types.ip_address import IPv4AddressType, IPv4Netmask, IPv6AddressType, IPv6Netmask
@@ -115,6 +116,15 @@ def initial_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
     match initial_input.operation:
         case Operation.ADD:
 
+            class PartnerSelectionForm(FormPage):
+                model_config = ConfigDict(title=f"Add an Edge Port to a {product_name}")
+                label: Label = Field(
+                    "Please select the partner who owns the Edge Port which is to be added.", exclude=True
+                )
+                edge_port_partner: partner_choice() = subscription.customer_id  # type: ignore[valid-type]
+
+            partner_input = yield PartnerSelectionForm
+
             class AccessPortListItem(BaseModel):
                 edge_port: str
                 ap_type: str
@@ -122,7 +132,7 @@ def initial_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
 
             def available_new_edge_port_selector() -> TypeAlias:
                 """Generate a dropdown selector for choosing an active Edge Port in an input form."""
-                edge_ports = get_active_edge_port_subscriptions(partner_id=subscription.customer_id)
+                edge_ports = get_active_edge_port_subscriptions(partner_id=partner_input.edge_port_partner)
 
                 options = {
                     str(edge_port.subscription_id): edge_port.description
diff --git a/gso/workflows/l3_core_service/r_and_e_lhcone/create_imported_r_and_e_lhcone.py b/gso/workflows/l3_core_service/r_and_e_lhcone/create_imported_r_and_e_lhcone.py
index a4447da019a640d0c0832ebf8f5baa5be0a1a2c9..f3f50c825fa6440004cab5e26a62f90586a05ec5 100644
--- a/gso/workflows/l3_core_service/r_and_e_lhcone/create_imported_r_and_e_lhcone.py
+++ b/gso/workflows/l3_core_service/r_and_e_lhcone/create_imported_r_and_e_lhcone.py
@@ -1,19 +1,42 @@
 """A creation workflow for adding an existing Imported R&E LHCONE to the service database."""
 
 from orchestrator import workflow
+from orchestrator.forms import SubmitFormPage
 from orchestrator.targets import Target
 from orchestrator.types import SubscriptionLifecycle
 from orchestrator.workflow import StepList, begin, done, step
 from orchestrator.workflows.steps import resync, set_status, store_process_subscription
+from pydantic import NonNegativeInt
+from pydantic_forms.types import FormGenerator
 
 from gso.products import ProductName
 from gso.products.product_types.r_and_e_lhcone import ImportedRAndELHCOneInactive
 from gso.services.partners import get_partner_by_name
 from gso.services.subscriptions import get_product_id_by_name
+from gso.utils.types.multi_exit_discriminator import MultiExitDiscriminator
 from gso.workflows.l3_core_service.base_create_imported_l3_core_service import (
-    initial_input_form_generator,
+    ServiceBindingPort,
     initialize_subscription,
 )
+from gso.workflows.l3_core_service.r_and_e_lhcone.shared import update_r_and_e_lhcone_subscription_model
+from gso.workflows.l3_core_service.shared import L3ProductNameType
+
+
+def initial_input_form_generator() -> FormGenerator:
+    """Initial input form generator for creating a new imported R&E LHCOne subscription."""
+
+    class ImportL3CoreServiceForm(SubmitFormPage):
+        partner: str
+        service_binding_ports: list[ServiceBindingPort]
+        product_name: L3ProductNameType
+        v4_bgp_local_preference: NonNegativeInt = 100
+        v4_bgp_med: MultiExitDiscriminator = "igp"
+        v6_bgp_local_preference: NonNegativeInt = 100
+        v6_bgp_med: MultiExitDiscriminator = "igp"
+
+    user_input = yield ImportL3CoreServiceForm
+
+    return user_input.model_dump()
 
 
 @step("Create subscription")
@@ -37,6 +60,7 @@ def create_imported_r_and_e_lhcone() -> StepList:
         >> create_subscription
         >> store_process_subscription(Target.CREATE)
         >> initialize_subscription
+        >> update_r_and_e_lhcone_subscription_model
         >> set_status(SubscriptionLifecycle.ACTIVE)
         >> resync
         >> done
diff --git a/gso/workflows/l3_core_service/r_and_e_lhcone/create_r_and_e_lhcone.py b/gso/workflows/l3_core_service/r_and_e_lhcone/create_r_and_e_lhcone.py
index 2063c415f9f2ed1e6cb49a3b32ea4b5fabefecc7..195c70bdb84c0a8a3fa551569336140db1798d0d 100644
--- a/gso/workflows/l3_core_service/r_and_e_lhcone/create_r_and_e_lhcone.py
+++ b/gso/workflows/l3_core_service/r_and_e_lhcone/create_r_and_e_lhcone.py
@@ -1,14 +1,17 @@
 """Create R&E LHCONE subscription workflow."""
 
+from orchestrator.forms import FormPage
 from orchestrator.targets import Target
 from orchestrator.types import SubscriptionLifecycle
 from orchestrator.workflow import StepList, begin, done, step, workflow
 from orchestrator.workflows.steps import resync, set_status, store_process_subscription
 from orchestrator.workflows.utils import wrap_create_initial_input_form
-from pydantic_forms.types import State, UUIDstr
+from pydantic import NonNegativeInt
+from pydantic_forms.types import FormGenerator, State, UUIDstr
 
 from gso.products.product_types.r_and_e_lhcone import RAndELHCOneInactive
 from gso.services.lso_client import lso_interaction
+from gso.utils.types.multi_exit_discriminator import MultiExitDiscriminator
 from gso.utils.workflow_steps import prompt_sharepoint_checklist_url, start_moodi, stop_moodi
 from gso.workflows.l3_core_service.base_create_l3_core_service import (
     check_bgp_peers,
@@ -16,12 +19,31 @@ from gso.workflows.l3_core_service.base_create_l3_core_service import (
     create_new_sharepoint_checklist,
     deploy_bgp_peers_dry,
     deploy_bgp_peers_real,
-    initial_input_form_generator,
     initialize_subscription,
     provision_sbp_dry,
     provision_sbp_real,
     update_dns_records,
 )
+from gso.workflows.l3_core_service.base_create_l3_core_service import (
+    initial_input_form_generator as base_initial_input_form_generator,
+)
+from gso.workflows.l3_core_service.r_and_e_lhcone.shared import update_r_and_e_lhcone_subscription_model
+
+
+def initial_input_form_generator(product_name: str) -> FormGenerator:
+    """Initial input form generator for creating a new R&E LHCOne subscription."""
+    initial_generator = base_initial_input_form_generator(product_name)
+    initial_user_input = yield from initial_generator
+
+    # Additional R&E LHCOne step
+    class RAndELHCOneExtraForm(FormPage):
+        v4_bgp_local_preference: NonNegativeInt = 100
+        v4_bgp_med: MultiExitDiscriminator = "igp"
+        v6_bgp_local_preference: NonNegativeInt = 100
+        v6_bgp_med: MultiExitDiscriminator = "igp"
+
+    r_and_e_lhcone_extra_form = yield RAndELHCOneExtraForm
+    return initial_user_input | r_and_e_lhcone_extra_form.model_dump()
 
 
 @step("Create subscription")
@@ -44,6 +66,7 @@ def create_r_and_e_lhcone() -> StepList:
         >> create_subscription
         >> store_process_subscription(Target.CREATE)
         >> initialize_subscription
+        >> update_r_and_e_lhcone_subscription_model
         >> start_moodi()
         >> lso_interaction(provision_sbp_dry)
         >> lso_interaction(provision_sbp_real)
diff --git a/gso/workflows/l3_core_service/r_and_e_lhcone/modify_r_and_e_lhcone.py b/gso/workflows/l3_core_service/r_and_e_lhcone/modify_r_and_e_lhcone.py
index 7182a48b221894630cd96da21d426ad1ea19dceb..82bf3c71bfeab4a6fda595cc550bf0a96aceaff9 100644
--- a/gso/workflows/l3_core_service/r_and_e_lhcone/modify_r_and_e_lhcone.py
+++ b/gso/workflows/l3_core_service/r_and_e_lhcone/modify_r_and_e_lhcone.py
@@ -1,11 +1,16 @@
 """Modification workflow for an R&E LHCONE subscription."""
 
 from orchestrator import begin, conditional, done, workflow
+from orchestrator.forms import FormPage
 from orchestrator.targets import Target
 from orchestrator.workflow import StepList
 from orchestrator.workflows.steps import resync, store_process_subscription, unsync
 from orchestrator.workflows.utils import wrap_modify_initial_input_form
+from pydantic import NonNegativeInt
+from pydantic_forms.types import FormGenerator, UUIDstr
 
+from gso.products.product_types.r_and_e_lhcone import RAndELHCOne
+from gso.utils.types.multi_exit_discriminator import MultiExitDiscriminator
 from gso.workflows.l3_core_service.base_modify_l3_core_service import (
     Operation,
     create_new_sbp,
@@ -13,11 +18,30 @@ from gso.workflows.l3_core_service.base_modify_l3_core_service import (
     modify_existing_sbp,
     remove_old_sbp,
 )
+from gso.workflows.l3_core_service.r_and_e_lhcone.shared import update_r_and_e_lhcone_subscription_model
+
+
+def modify_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
+    """Initial form generator for modifying the custom attributes of an existing IAS subscription."""
+    initial_generator = initial_input_form_generator(subscription_id)
+    initial_user_input = yield from initial_generator
+
+    subscription = RAndELHCOne.from_subscription(subscription_id)
+
+    # Additional R&E LHCOne step
+    class RAndELHCOneExtraForm(FormPage):
+        v4_bgp_local_preference: NonNegativeInt = subscription.r_and_e_lhcone.v4_bgp_local_preference
+        v4_bgp_med: MultiExitDiscriminator = subscription.r_and_e_lhcone.v4_bgp_med
+        v6_bgp_local_preference: NonNegativeInt = subscription.r_and_e_lhcone.v6_bgp_local_preference
+        v6_bgp_med: MultiExitDiscriminator = subscription.r_and_e_lhcone.v6_bgp_med
+
+    r_and_e_lhcone_extra_form = yield RAndELHCOneExtraForm
+    return initial_user_input | r_and_e_lhcone_extra_form.model_dump()
 
 
 @workflow(
     "Modify R&E LHCONE",
-    initial_input_form=wrap_modify_initial_input_form(initial_input_form_generator),
+    initial_input_form=wrap_modify_initial_input_form(modify_input_form_generator),
     target=Target.MODIFY,
 )
 def modify_r_and_e_lhcone() -> StepList:
@@ -30,6 +54,7 @@ def modify_r_and_e_lhcone() -> StepList:
         begin
         >> store_process_subscription(Target.MODIFY)
         >> unsync
+        >> update_r_and_e_lhcone_subscription_model
         >> access_port_is_added(create_new_sbp)
         >> access_port_is_removed(remove_old_sbp)
         >> access_port_is_modified(modify_existing_sbp)
diff --git a/gso/workflows/l3_core_service/r_and_e_lhcone/shared.py b/gso/workflows/l3_core_service/r_and_e_lhcone/shared.py
new file mode 100644
index 0000000000000000000000000000000000000000..532eb69b267549899810d07c22a4f50fd7038105
--- /dev/null
+++ b/gso/workflows/l3_core_service/r_and_e_lhcone/shared.py
@@ -0,0 +1,25 @@
+"""Shared logic for R&E LHCOne service workflows."""
+
+from orchestrator import step
+from orchestrator.domain import SubscriptionModel
+from pydantic import NonNegativeInt
+from pydantic_forms.types import State
+
+from gso.utils.types.multi_exit_discriminator import MultiExitDiscriminator
+
+
+@step("Update R&E LHCOne-specific attributes")
+def update_r_and_e_lhcone_subscription_model(
+    subscription: SubscriptionModel,
+    v4_bgp_local_preference: NonNegativeInt,
+    v4_bgp_med: MultiExitDiscriminator,
+    v6_bgp_local_preference: NonNegativeInt,
+    v6_bgp_med: MultiExitDiscriminator,
+) -> State:
+    """Update the subscription model of an R&E LHCOne subscription."""
+    subscription.r_and_e_lhcone.v4_bgp_local_preference = v4_bgp_local_preference  # type: ignore[attr-defined]
+    subscription.r_and_e_lhcone.v4_bgp_med = v4_bgp_med  # type: ignore[attr-defined]
+    subscription.r_and_e_lhcone.v6_bgp_local_preference = v6_bgp_local_preference  # type: ignore[attr-defined]
+    subscription.r_and_e_lhcone.v6_bgp_med = v6_bgp_med  # type: ignore[attr-defined]
+
+    return {"subscription": subscription}
diff --git a/gso/workflows/l3_core_service/r_and_e_peer/create_imported_r_and_e_peer.py b/gso/workflows/l3_core_service/r_and_e_peer/create_imported_r_and_e_peer.py
index 6e56e435edefda5fcfd09cab33ad291483093898..6a7d87725d382c6ff343f76d860b2984454ab1d9 100644
--- a/gso/workflows/l3_core_service/r_and_e_peer/create_imported_r_and_e_peer.py
+++ b/gso/workflows/l3_core_service/r_and_e_peer/create_imported_r_and_e_peer.py
@@ -1,19 +1,42 @@
 """A creation workflow for adding an existing Imported R&E Peer to the service database."""
 
 from orchestrator import workflow
+from orchestrator.forms import SubmitFormPage
 from orchestrator.targets import Target
 from orchestrator.types import SubscriptionLifecycle
 from orchestrator.workflow import StepList, begin, done, step
 from orchestrator.workflows.steps import resync, set_status, store_process_subscription
+from pydantic import NonNegativeInt
+from pydantic_forms.types import FormGenerator
 
 from gso.products import ProductName
 from gso.products.product_types.r_and_e_peer import ImportedRAndEPeerInactive
 from gso.services.partners import get_partner_by_name
 from gso.services.subscriptions import get_product_id_by_name
+from gso.utils.types.multi_exit_discriminator import MultiExitDiscriminator
 from gso.workflows.l3_core_service.base_create_imported_l3_core_service import (
-    initial_input_form_generator,
+    ServiceBindingPort,
     initialize_subscription,
 )
+from gso.workflows.l3_core_service.r_and_e_peer.shared import update_r_and_e_peer_subscription_model
+from gso.workflows.l3_core_service.shared import L3ProductNameType
+
+
+def initial_input_form_generator() -> FormGenerator:
+    """Initial input form generator for creating a new imported R&E Peer subscription."""
+
+    class ImportL3CoreServiceForm(SubmitFormPage):
+        partner: str
+        service_binding_ports: list[ServiceBindingPort]
+        product_name: L3ProductNameType
+        v4_bgp_local_preference: NonNegativeInt = 100
+        v4_bgp_med: MultiExitDiscriminator = "igp"
+        v6_bgp_local_preference: NonNegativeInt = 100
+        v6_bgp_med: MultiExitDiscriminator = "igp"
+
+    user_input = yield ImportL3CoreServiceForm
+
+    return user_input.model_dump()
 
 
 @step("Create subscription")
@@ -37,6 +60,7 @@ def create_imported_r_and_e_peer() -> StepList:
         >> create_subscription
         >> store_process_subscription(Target.CREATE)
         >> initialize_subscription
+        >> update_r_and_e_peer_subscription_model
         >> set_status(SubscriptionLifecycle.ACTIVE)
         >> resync
         >> done
diff --git a/gso/workflows/l3_core_service/r_and_e_peer/create_r_and_e_peer.py b/gso/workflows/l3_core_service/r_and_e_peer/create_r_and_e_peer.py
index 93493e9d3749b6b7ff7811914e5d90189f174b5e..2ba1dab69d5db4294a6da5d5e059fda95492b8d0 100644
--- a/gso/workflows/l3_core_service/r_and_e_peer/create_r_and_e_peer.py
+++ b/gso/workflows/l3_core_service/r_and_e_peer/create_r_and_e_peer.py
@@ -1,14 +1,17 @@
 """Create R&E Peer subscription workflow."""
 
+from orchestrator.forms import FormPage
 from orchestrator.targets import Target
 from orchestrator.types import SubscriptionLifecycle
 from orchestrator.workflow import StepList, begin, done, step, workflow
 from orchestrator.workflows.steps import resync, set_status, store_process_subscription
 from orchestrator.workflows.utils import wrap_create_initial_input_form
-from pydantic_forms.types import State, UUIDstr
+from pydantic import NonNegativeInt
+from pydantic_forms.types import FormGenerator, State, UUIDstr
 
 from gso.products.product_types.r_and_e_peer import RAndEPeerInactive
 from gso.services.lso_client import lso_interaction
+from gso.utils.types.multi_exit_discriminator import MultiExitDiscriminator
 from gso.utils.workflow_steps import prompt_sharepoint_checklist_url, start_moodi, stop_moodi
 from gso.workflows.l3_core_service.base_create_l3_core_service import (
     check_bgp_peers,
@@ -16,12 +19,31 @@ from gso.workflows.l3_core_service.base_create_l3_core_service import (
     create_new_sharepoint_checklist,
     deploy_bgp_peers_dry,
     deploy_bgp_peers_real,
-    initial_input_form_generator,
     initialize_subscription,
     provision_sbp_dry,
     provision_sbp_real,
     update_dns_records,
 )
+from gso.workflows.l3_core_service.base_create_l3_core_service import (
+    initial_input_form_generator as base_initial_input_form_generator,
+)
+from gso.workflows.l3_core_service.r_and_e_peer.shared import update_r_and_e_peer_subscription_model
+
+
+def initial_input_form_generator(product_name: str) -> FormGenerator:
+    """Initial input form generator for creating a new R&E Peer subscription."""
+    initial_generator = base_initial_input_form_generator(product_name)
+    initial_user_input = yield from initial_generator
+
+    # Additional R&E Peer step
+    class RAndEPeerExtraForm(FormPage):
+        v4_bgp_local_preference: NonNegativeInt = 100
+        v4_bgp_med: MultiExitDiscriminator = "igp"
+        v6_bgp_local_preference: NonNegativeInt = 100
+        v6_bgp_med: MultiExitDiscriminator = "igp"
+
+    r_and_e_peer_extra_form = yield RAndEPeerExtraForm
+    return initial_user_input | r_and_e_peer_extra_form.model_dump()
 
 
 @step("Create subscription")
@@ -44,6 +66,7 @@ def create_r_and_e_peer() -> StepList:
         >> create_subscription
         >> store_process_subscription(Target.CREATE)
         >> initialize_subscription
+        >> update_r_and_e_peer_subscription_model
         >> start_moodi()
         >> lso_interaction(provision_sbp_dry)
         >> lso_interaction(provision_sbp_real)
diff --git a/gso/workflows/l3_core_service/r_and_e_peer/modify_r_and_e_peer.py b/gso/workflows/l3_core_service/r_and_e_peer/modify_r_and_e_peer.py
index f04009dfe04e2e9c7fac2c1c0e766baebecf7886..7ed008a073f4361a70be110ee8043ae3056ec17d 100644
--- a/gso/workflows/l3_core_service/r_and_e_peer/modify_r_and_e_peer.py
+++ b/gso/workflows/l3_core_service/r_and_e_peer/modify_r_and_e_peer.py
@@ -1,11 +1,16 @@
 """Modification workflow for an R&E Peer subscription."""
 
 from orchestrator import begin, conditional, done, workflow
+from orchestrator.forms import FormPage
 from orchestrator.targets import Target
 from orchestrator.workflow import StepList
 from orchestrator.workflows.steps import resync, store_process_subscription, unsync
 from orchestrator.workflows.utils import wrap_modify_initial_input_form
+from pydantic import NonNegativeInt
+from pydantic_forms.types import FormGenerator, UUIDstr
 
+from gso.products.product_types.r_and_e_peer import RAndEPeer
+from gso.utils.types.multi_exit_discriminator import MultiExitDiscriminator
 from gso.workflows.l3_core_service.base_modify_l3_core_service import (
     Operation,
     create_new_sbp,
@@ -13,11 +18,30 @@ from gso.workflows.l3_core_service.base_modify_l3_core_service import (
     modify_existing_sbp,
     remove_old_sbp,
 )
+from gso.workflows.l3_core_service.r_and_e_peer.shared import update_r_and_e_peer_subscription_model
+
+
+def modify_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
+    """Initial form generator for modifying the custom attributes of an existing IAS subscription."""
+    initial_generator = initial_input_form_generator(subscription_id)
+    initial_user_input = yield from initial_generator
+
+    subscription = RAndEPeer.from_subscription(subscription_id)
+
+    # Additional R&E Peer step
+    class RAndEPeerExtraForm(FormPage):
+        v4_bgp_local_preference: NonNegativeInt = subscription.r_and_e_peer.v4_bgp_local_preference
+        v4_bgp_med: MultiExitDiscriminator = subscription.r_and_e_peer.v4_bgp_med
+        v6_bgp_local_preference: NonNegativeInt = subscription.r_and_e_peer.v6_bgp_local_preference
+        v6_bgp_med: MultiExitDiscriminator = subscription.r_and_e_peer.v6_bgp_med
+
+    r_and_e_peer_extra_form = yield RAndEPeerExtraForm
+    return initial_user_input | r_and_e_peer_extra_form.model_dump()
 
 
 @workflow(
     "Modify R&E Peer",
-    initial_input_form=wrap_modify_initial_input_form(initial_input_form_generator),
+    initial_input_form=wrap_modify_initial_input_form(modify_input_form_generator),
     target=Target.MODIFY,
 )
 def modify_r_and_e_peer() -> StepList:
@@ -30,6 +54,7 @@ def modify_r_and_e_peer() -> StepList:
         begin
         >> store_process_subscription(Target.MODIFY)
         >> unsync
+        >> update_r_and_e_peer_subscription_model
         >> access_port_is_added(create_new_sbp)
         >> access_port_is_removed(remove_old_sbp)
         >> access_port_is_modified(modify_existing_sbp)
diff --git a/gso/workflows/l3_core_service/r_and_e_peer/shared.py b/gso/workflows/l3_core_service/r_and_e_peer/shared.py
new file mode 100644
index 0000000000000000000000000000000000000000..d6af00f10cc2a35cb6816e2c0086b04fd727754c
--- /dev/null
+++ b/gso/workflows/l3_core_service/r_and_e_peer/shared.py
@@ -0,0 +1,25 @@
+"""Shared logic for R&E Peer service workflows."""
+
+from orchestrator import step
+from orchestrator.domain import SubscriptionModel
+from pydantic import NonNegativeInt
+from pydantic_forms.types import State
+
+from gso.utils.types.multi_exit_discriminator import MultiExitDiscriminator
+
+
+@step("Update R&E Peer-specific attributes")
+def update_r_and_e_peer_subscription_model(
+    subscription: SubscriptionModel,
+    v4_bgp_local_preference: NonNegativeInt,
+    v4_bgp_med: MultiExitDiscriminator,
+    v6_bgp_local_preference: NonNegativeInt,
+    v6_bgp_med: MultiExitDiscriminator,
+) -> State:
+    """Update the subscription model of an R&E Peer subscription."""
+    subscription.r_and_e_peer.v4_bgp_local_preference = v4_bgp_local_preference  # type: ignore[attr-defined]
+    subscription.r_and_e_peer.v4_bgp_med = v4_bgp_med  # type: ignore[attr-defined]
+    subscription.r_and_e_peer.v6_bgp_local_preference = v6_bgp_local_preference  # type: ignore[attr-defined]
+    subscription.r_and_e_peer.v6_bgp_med = v6_bgp_med  # type: ignore[attr-defined]
+
+    return {"subscription": subscription}
diff --git a/gso/workflows/l3_core_service/redeploy_l3_core_service.py b/gso/workflows/l3_core_service/redeploy_l3_core_service.py
new file mode 100644
index 0000000000000000000000000000000000000000..cfaaacc7f75dff4462488fd6519b30adf07b9054
--- /dev/null
+++ b/gso/workflows/l3_core_service/redeploy_l3_core_service.py
@@ -0,0 +1,87 @@
+"""Base functionality for modifying an L3 Core Service subscription."""
+
+from typing import TypeAlias, cast
+
+from orchestrator import workflow
+from orchestrator.domain import SubscriptionModel
+from orchestrator.forms import FormPage
+from orchestrator.targets import Target
+from orchestrator.workflow import StepList, begin, done
+from orchestrator.workflows.steps import resync, store_process_subscription, unsync
+from orchestrator.workflows.utils import wrap_modify_initial_input_form
+from pydantic import ConfigDict
+from pydantic_forms.types import FormGenerator, UUIDstr
+from pydantic_forms.validators import Choice
+
+from gso.products.product_blocks.l3_core_service import AccessPort
+from gso.products.product_types.edge_port import EdgePort
+from gso.services.lso_client import lso_interaction
+from gso.services.partners import get_partner_by_id
+from gso.utils.types.tt_number import TTNumber
+from gso.workflows.l3_core_service.base_create_l3_core_service import (
+    deploy_bgp_peers_dry,
+    deploy_bgp_peers_real,
+    provision_sbp_dry,
+    provision_sbp_real,
+)
+
+
+def initial_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
+    """Get input which Access Port should be re-deployed."""
+    subscription = SubscriptionModel.from_subscription(subscription_id)
+    product_name = subscription.product.name
+
+    def access_port_selector() -> TypeAlias:
+        """Generate a dropdown selector for choosing an Access Port in an input form."""
+        access_ports = subscription.l3_core.ap_list  # type: ignore[attr-defined]
+        options = {
+            str(access_port.subscription_instance_id): (
+                f"{access_port.sbp.gs_id} on "
+                f"{EdgePort.from_subscription(access_port.sbp.edge_port.owner_subscription_id).description} "
+                f"({access_port.ap_type})"
+            )
+            for access_port in access_ports
+        }
+
+        return cast(
+            type[Choice],
+            Choice.__call__(
+                "Select an Access Port",
+                zip(options.keys(), options.items(), strict=True),
+            ),
+        )
+
+    class AccessPortSelectionForm(FormPage):
+        model_config = ConfigDict(title=f"Re-deploy {product_name} subscription")
+
+        tt_number: TTNumber
+        access_port: access_port_selector()  # type: ignore[valid-type]
+
+    user_input = yield AccessPortSelectionForm
+    partner_name = get_partner_by_id(subscription.customer_id).name
+    access_port = AccessPort.from_db(user_input.access_port)
+    access_port_fqdn = EdgePort.from_subscription(
+        access_port.sbp.edge_port.owner_subscription_id
+    ).edge_port.node.router_fqdn
+
+    return user_input.model_dump() | {"edge_port_fqdn_list": [access_port_fqdn], "partner_name": partner_name}
+
+
+@workflow(
+    "Redeploy Layer 3 service",
+    initial_input_form=wrap_modify_initial_input_form(initial_input_form_generator),
+    target=Target.MODIFY,
+)
+def redeploy_l3_core_service() -> StepList:
+    """Redeploy a Layer 3 subscription."""
+    return (
+        begin
+        >> store_process_subscription(Target.MODIFY)
+        >> unsync
+        >> lso_interaction(provision_sbp_dry)
+        >> lso_interaction(provision_sbp_real)
+        >> lso_interaction(deploy_bgp_peers_dry)
+        >> lso_interaction(deploy_bgp_peers_real)
+        >> resync
+        >> done
+    )
diff --git a/gso/workflows/router/terminate_router.py b/gso/workflows/router/terminate_router.py
index 4599c6f099555b79a9a5a6a5f1229f4878dd8013..75a78281d7cb8914059e69b44bedeb90148a9f97 100644
--- a/gso/workflows/router/terminate_router.py
+++ b/gso/workflows/router/terminate_router.py
@@ -15,6 +15,7 @@ The workflow consists of the following steps:
 - Set the subscription status to `TERMINATED`.
 """
 
+import datetime
 import ipaddress
 import json
 import logging
@@ -34,6 +35,7 @@ from orchestrator.workflows.steps import (
     unsync,
 )
 from orchestrator.workflows.utils import wrap_modify_initial_input_form
+from pydantic import Field
 from pydantic_forms.types import FormGenerator, State, UUIDstr
 from requests import HTTPError
 
@@ -63,19 +65,28 @@ def initial_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
 
     class TerminateForm(SubmitFormPage):
         if router.status == SubscriptionLifecycle.INITIAL:
-            info_label_2: Label = (
+            info_label_2: Label = Field(
                 "This will immediately mark the subscription as terminated, preventing any other workflows from "
-                "interacting with this product subscription."
+                "interacting with this product subscription.",
+                exclude=True,
+            )
+            info_label_3: Label = Field(
+                "ONLY EXECUTE THIS WORKFLOW WHEN YOU ARE ABSOLUTELY SURE WHAT YOU ARE DOING.", exclude=True
             )
-            info_label_3: Label = "ONLY EXECUTE THIS WORKFLOW WHEN YOU ARE ABSOLUTELY SURE WHAT YOU ARE DOING."
 
         tt_number: TTNumber
-        termination_label: Label = "Please confirm whether configuration should get removed from the router."
+        termination_label: Label = Field(
+            "Please confirm whether configuration should get removed from the router.", exclude=True
+        )
         remove_configuration: bool = False
-        update_ibgp_mesh_label: Label = "Please confirm whether the iBGP mesh should get updated."
+        update_ibgp_mesh_label: Label = Field("Please confirm whether the iBGP mesh should get updated.", exclude=True)
         update_ibgp_mesh: bool = True
-        update_sdp_mesh_label: Label = "Please confirm whether the SDP mesh should get updated."
+        update_sdp_mesh_label: Label = Field("Please confirm whether the SDP mesh should get updated.", exclude=True)
         update_sdp_mesh: bool = True
+        remove_loopback_from_ipam_label: Label = Field(
+            "Please confirm whether the loopback address should be released in IPAM.", exclude=True
+        )
+        remove_loopback_from_ipam: bool = False
 
     user_input = yield TerminateForm
     return user_input.model_dump() | {
@@ -85,11 +96,20 @@ def initial_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
 
 
 @step("Deprovision loopback IPs from IPAM")
-def deprovision_loopback_ips(subscription: Router) -> dict:
+def deprovision_loopback_ips(subscription: Router, remove_loopback_from_ipam: bool, process_id: UUIDstr) -> None:  # noqa: FBT001
     """Clear up the loopback addresses from IPAM."""
-    infoblox.delete_host_by_ip(ipaddress.IPv4Address(subscription.router.router_lo_ipv4_address))
-
-    return {"subscription": subscription}
+    if remove_loopback_from_ipam:
+        infoblox.delete_host_by_ip(ipaddress.IPv4Address(subscription.router.router_lo_ipv4_address))
+    else:
+        record = infoblox.find_host_by_fqdn(subscription.router.router_fqdn)
+        if record:
+            #  We keep the record in IPAM but add a comment stating that this router is terminated.
+            #  This is done to prevent an address from being re-used.
+            record.comment = (
+                f"This router was terminated by GAP process {process_id} on "
+                f"{datetime.datetime.now(tz=datetime.UTC).strftime("%d/%m/%Y")}."
+            )
+            record.update()
 
 
 @step("[DRY RUN] Remove configuration from router")
diff --git a/gso/workflows/tasks/validate_geant_products.py b/gso/workflows/tasks/validate_geant_products.py
deleted file mode 100644
index 8fe39f61b762f46683336f802af1885d447ab339..0000000000000000000000000000000000000000
--- a/gso/workflows/tasks/validate_geant_products.py
+++ /dev/null
@@ -1,46 +0,0 @@
-"""A task that checks for all products in the database to be well-kept."""
-
-# <!-- vale off -->
-# Copyright 2019-2020 SURF.
-# Copyright 2024 GÉANT Vereniging.
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# <!-- vale on -->
-
-from orchestrator.targets import Target
-from orchestrator.workflow import StepList, done, init, workflow
-from orchestrator.workflows.tasks.validate_products import (
-    check_all_workflows_are_in_db,
-    check_db_fixed_input_config,
-    check_subscription_models,
-    check_that_products_have_create_modify_and_terminate_workflows,
-    check_workflows_for_matching_targets_and_descriptions,
-)
-
-
-@workflow("Validate GEANT products", target=Target.SYSTEM)
-def task_validate_geant_products() -> StepList:
-    """Validate products in the database.
-
-    This task is based on the ``task_validate_products`` present in ``orchestrator-core`` but it does not check for the
-    existence of the ``modify_note`` workflow on all products, since this workflow is currently not used in GEANT.
-    """
-    return (
-        init
-        >> check_all_workflows_are_in_db
-        >> check_workflows_for_matching_targets_and_descriptions
-        # >> check_that_products_have_at_least_one_workflow  FIXME: Uncomment as soon as this would pass again
-        >> check_db_fixed_input_config
-        >> check_that_products_have_create_modify_and_terminate_workflows
-        >> check_subscription_models
-        >> done
-    )
diff --git a/setup.py b/setup.py
index 7a013462e2b7170b142309c2ea0c5fb685505bed..727d17cf74949905824a31810779dcf37388681b 100644
--- a/setup.py
+++ b/setup.py
@@ -4,7 +4,7 @@ from setuptools import find_packages, setup
 
 setup(
     name="geant-service-orchestrator",
-    version="3.11",
+    version="3.12",
     author="GÉANT Orchestration and Automation Team",
     author_email="goat@geant.org",
     description="GÉANT Service Orchestrator",
diff --git a/test/cli/test_imports.py b/test/cli/test_imports.py
index 371980aa4aae8827603f57f33a045d05dd192bb5..45a64f7780e921fddf8c12a63e95c70a519cbca1 100644
--- a/test/cli/test_imports.py
+++ b/test/cli/test_imports.py
@@ -73,12 +73,12 @@ def iptrunk_data(temp_file, router_subscription_factory, faker) -> (Path, dict):
                 },
                 "nodeA": {
                     "name": side_a_node or router_side_a.router.router_fqdn,
-                    "ae_name": side_a_ae_name or faker.nokia_lag_interface_name(),
+                    "ae_name": side_a_ae_name or faker.unique.nokia_lag_interface_name(),
                     "port_ga_id": faker.imported_ga_id(),
                     "members": side_a_members
                     or [
                         {
-                            "interface_name": faker.nokia_physical_interface_name(),
+                            "interface_name": faker.unique.nokia_physical_interface_name(),
                             "interface_description": faker.sentence(),
                         }
                         for _ in range(5)
@@ -88,12 +88,12 @@ def iptrunk_data(temp_file, router_subscription_factory, faker) -> (Path, dict):
                 },
                 "nodeB": {
                     "name": side_b_node or router_side_b.router.router_fqdn,
-                    "ae_name": side_b_ae_name or faker.nokia_lag_interface_name(),
+                    "ae_name": side_b_ae_name or faker.unique.nokia_lag_interface_name(),
                     "port_ga_id": faker.imported_ga_id(),
                     "members": side_b_members
                     or [
                         {
-                            "interface_name": faker.nokia_physical_interface_name(),
+                            "interface_name": faker.unique.nokia_physical_interface_name(),
                             "interface_description": faker.sentence(),
                         }
                         for _ in range(5)
@@ -234,18 +234,21 @@ def lan_switch_interconnect_data(temp_file, faker, switch_subscription_factory,
             "minimum_links": 1,
             "router_side": {
                 "node": str(router_subscription_factory().subscription_id),
-                "ae_iface": faker.nokia_lag_interface_name(),
+                "ae_iface": faker.unique.nokia_lag_interface_name(),
                 "ae_members": [
-                    {"interface_name": faker.nokia_physical_interface_name(), "interface_description": faker.sentence()}
+                    {
+                        "interface_name": faker.unique.nokia_physical_interface_name(),
+                        "interface_description": faker.sentence(),
+                    }
                     for _ in range(2)
                 ],
             },
             "switch_side": {
                 "switch": str(switch_subscription_factory().subscription_id),
-                "ae_iface": faker.juniper_ae_interface_name(),
+                "ae_iface": faker.unique.juniper_ae_interface_name(),
                 "ae_members": [
                     {
-                        "interface_name": faker.juniper_physical_interface_name(),
+                        "interface_name": faker.unique.juniper_physical_interface_name(),
                         "interface_description": faker.sentence(),
                     }
                     for _ in range(2)
@@ -277,7 +280,7 @@ def edge_port_data(temp_file, faker, router_subscription_factory, partner_factor
             "ignore_if_down": False,
             "ae_members": [
                 {
-                    "interface_name": faker.nokia_physical_interface_name(),
+                    "interface_name": faker.unique.nokia_physical_interface_name(),
                     "interface_description": faker.sentence(),
                 }
                 for _ in range(2)
@@ -584,7 +587,7 @@ def test_import_iptrunk_invalid_router_id_side_a_and_b(mock_start_process, mock_
 @patch("gso.cli.imports.start_process")
 def test_import_iptrunk_non_unique_members_side_a_and_b(mock_start_process, mock_sleep, iptrunk_data, faker, capfd):
     duplicate_interface = {
-        "interface_name": faker.nokia_physical_interface_name(),
+        "interface_name": faker.unique.nokia_physical_interface_name(),
         "interface_description": faker.sentence(),
     }
     side_a_members = [duplicate_interface for _ in range(5)]
@@ -611,11 +614,11 @@ def test_import_iptrunk_non_unique_members_side_a_and_b(mock_start_process, mock
 @patch("gso.cli.imports.start_process")
 def test_import_iptrunk_side_a_member_count_mismatch(mock_start_process, mock_sleep, iptrunk_data, faker, capfd):
     side_a_members = [
-        {"interface_name": faker.nokia_physical_interface_name(), "interface_description": faker.sentence()}
+        {"interface_name": faker.unique.nokia_physical_interface_name(), "interface_description": faker.sentence()}
         for _ in range(5)
     ]
     side_b_members = [
-        {"interface_name": faker.nokia_physical_interface_name(), "interface_description": faker.sentence()}
+        {"interface_name": faker.unique.nokia_physical_interface_name(), "interface_description": faker.sentence()}
         for _ in range(6)
     ]
     broken_data = iptrunk_data(side_a_members=side_a_members, side_b_members=side_b_members)
diff --git a/test/cli/test_lso_calls.py b/test/cli/test_lso_calls.py
new file mode 100644
index 0000000000000000000000000000000000000000..86eec0b6cace3f633644847a7c779632f0f832d4
--- /dev/null
+++ b/test/cli/test_lso_calls.py
@@ -0,0 +1,203 @@
+import json
+
+import click
+import httpx
+import pytest
+from orchestrator.db import db
+from typer.testing import CliRunner
+
+from gso.cli.lso_calls import app
+from gso.db.models import BgpStatusPreCheckTable
+
+runner = CliRunner()
+
+# A valid LSO response payload
+BASE_RESPONSE = {
+    "job_id": "2c19843b-c721-4662-8014-b1a7a22f1734",
+    "result": {
+        "output": json.dumps({"bgp": {"asn": 65001}, "neighbors": []}),
+        "return_code": 0,
+        "status": "successful",
+    },
+}
+
+
+class DummyResponse:
+    def __init__(self, json_data, status=200):
+        self._json = json_data
+        self.status_code = status
+
+    def json(self):
+        return self._json
+
+    def raise_for_status(self):
+        if not (200 <= self.status_code < 300):
+            raise httpx.HTTPStatusError("error", request=None, response=self)  # noqa: RUF100,EM101
+
+
+@pytest.fixture()
+def import_file(tmp_path):
+    """Create a temporary JSON import file."""
+    path = tmp_path / "import.json"
+    path.write_text(json.dumps({"foo": "bar"}))
+    return path
+
+
+@pytest.fixture()
+def mock_http_success(monkeypatch):
+    """Return a successful dummy response for httpx.post."""
+    dummy_resp = DummyResponse(BASE_RESPONSE)
+    monkeypatch.setattr("httpx.post", lambda *args, **kwargs: dummy_resp)  # noqa: ARG005
+    return dummy_resp
+
+
+@pytest.fixture()
+def mock_http_error(monkeypatch):
+    """Simulate httpx.post throwing an exception."""
+
+    def raise_exc(*args, **kwargs):
+        """Raise an exception to simulate a network error."""
+        msg = "timeout"
+        raise Exception(msg)  # noqa: TRY002
+
+    monkeypatch.setattr("httpx.post", raise_exc)
+    return raise_exc
+
+
+@pytest.fixture()
+def mock_http_bad_shape(monkeypatch):
+    """Return JSON that does not fit ExecutableRunResponse."""
+    bad = {"unexpected": "data"}
+    dummy_resp = DummyResponse(bad)
+    monkeypatch.setattr("httpx.post", lambda *args, **kwargs: dummy_resp)  # noqa: ARG005
+    return dummy_resp
+
+
+def test_invalid_partner_name(mock_http_success, import_file):
+    """Step 0: unknown partner should abort before any HTTP call."""
+    result = runner.invoke(
+        app,
+        ["rt1.ams.geant.org", "UNKNOWN", str(import_file)],
+        input="",
+    )
+    assert result.exit_code == 1
+    assert "partner 'unknown' not found" in result.stdout.lower()
+    assert db.session.query(BgpStatusPreCheckTable).count() == 0
+
+
+def test_no_save_leaves_table_empty(mock_http_success, partner_factory, import_file):
+    """If user declines save, table remains empty."""
+    partner_factory("SURF")
+    result = runner.invoke(
+        app,
+        ["rt1.example.com", "SURF", str(import_file)],
+        input="n\n",
+    )
+    assert result.exit_code == 0
+    assert "not saving" in result.stdout.lower()
+    assert db.session.query(BgpStatusPreCheckTable).count() == 0
+
+
+def test_prompt_save_yes_persists_record(mock_http_success, partner_factory, import_file):
+    """Typing 'y' at prompt should also persist."""
+    partner_factory("SURF")
+    result = runner.invoke(
+        app,
+        ["rt1.example.com", "SURF", str(import_file)],
+        input="y\n",
+    )
+    assert result.exit_code == 0
+    assert db.session.query(BgpStatusPreCheckTable).count() == 1
+
+
+def test_http_failure_aborts(mock_http_error, partner_factory, import_file):
+    """Network/timeout errors should abort with exit code 1."""
+    partner_factory("SURF")
+    result = runner.invoke(
+        app,
+        ["rt1.example.com", "SURF", str(import_file)],
+    )
+    assert result.exit_code == 1
+    # Now stderr is separately captured:
+    assert "error: failed to call lso: timeout" in result.stdout.lower()
+
+    # Table still empty
+    assert db.session.query(BgpStatusPreCheckTable).count() == 0
+
+
+def test_invalid_shape_aborts(mock_http_bad_shape, partner_factory, import_file):
+    """Malformed top-level JSON shape should abort."""
+    partner_factory("SURF")
+    result = runner.invoke(
+        app,
+        ["rt1.example.com", "SURF", str(import_file)],
+    )
+    assert result.exit_code == 1
+    assert "invalid JSON returned by LSO" in result.stdout
+    assert db.session.query(BgpStatusPreCheckTable).count() == 0
+
+
+def test_parse_output_nonjson(monkeypatch, partner_factory, import_file):
+    """If output is not valid JSON, we still complete without saving."""
+    partner_factory("SURF")
+    # Patch BASE_RESPONSE to use non-JSON output
+    bad = dict(BASE_RESPONSE)
+    bad["result"] = dict(bad["result"])
+    bad["result"]["output"] = "not a json"
+    # monkeypatch
+    import httpx as _httpx
+
+    _orig = _httpx.post
+    _httpx.post = lambda *args, **kwargs: DummyResponse(bad)  # noqa: ARG005
+    try:
+        result = runner.invoke(app, ["rt1.example.com", "SURF", str(import_file)], input="n\n")
+        assert result.exit_code == 0
+        assert "(not valid JSON, raw string below)" in result.stdout
+    finally:
+        _httpx.post = _orig
+
+
+def test_pagination_on_large_output(monkeypatch, partner_factory, import_file):
+    """Parsed output >50 lines should trigger click.echo_via_pager."""
+    partner_factory("SURF")
+    big = {"x": ["line"] * 100}
+    payload = dict(BASE_RESPONSE)
+    payload["result"] = dict(payload["result"])
+    payload["result"]["output"] = json.dumps(big)
+    monkeypatch.setattr("httpx.post", lambda *args, **kwargs: DummyResponse(payload))  # noqa: ARG005
+
+    paged = False
+
+    def fake_pager(text):
+        nonlocal paged
+        paged = True
+
+    monkeypatch.setattr(click, "echo_via_pager", fake_pager)
+    result = runner.invoke(
+        app,
+        ["rt1.example.com", "SURF", str(import_file)],
+        input="n\n",
+    )
+    assert result.exit_code == 0
+    assert paged, "Expected parsed-output pager for large JSON"
+
+
+def test_invalid_import_file(tmp_path, partner_factory):
+    """Invalid JSON import file should abort with exit code 2 and no DB write."""
+    # create invalid JSON file
+    bad_file = tmp_path / "bad_import.json"
+    bad_file.write_text("{invalid_json}")
+    partner_factory("SURF")
+
+    # Invoke with the malformed JSON file
+    result = runner.invoke(
+        app,
+        ["rt1.example.com", "SURF", str(bad_file)],
+    )
+
+    # Expect exit code 2 from _load_import_file
+    assert result.exit_code == 2
+    assert "Error: could not read or parse" in result.stdout
+
+    # Ensure no record was written
+    assert db.session.query(BgpStatusPreCheckTable).count() == 0
diff --git a/test/conftest.py b/test/conftest.py
index 3e4a765d70cfefbd4ae800de2e18d35423ec8d6f..71112c3390527c4f4051f95964027e9e2070ab81 100644
--- a/test/conftest.py
+++ b/test/conftest.py
@@ -160,6 +160,15 @@ def faker() -> Faker:
     return fake
 
 
+@pytest.fixture(autouse=True)
+def _clear_faker_uniqueness(faker):
+    """Reset the already seen values generated by faker.
+
+    The generators in Faker that require uniqueness, only do so on a per-test basis. Therefore, we can reset this after
+    every test to avoid ``UniquenessException``s."""
+    faker.unique.clear()
+
+
 @pytest.fixture(scope="session")
 def db_uri():
     """Provide a unique database URI for each pytest-xdist worker, or a default URI if running without xdist."""
diff --git a/test/fixtures/edge_port_fixtures.py b/test/fixtures/edge_port_fixtures.py
index 57f8481c380c83f355074662ed8d0124d3e6573b..7913e3324b1523278a7b1e5f46f9fd8b7cfe5c08 100644
--- a/test/fixtures/edge_port_fixtures.py
+++ b/test/fixtures/edge_port_fixtures.py
@@ -56,7 +56,7 @@ def edge_port_subscription_factory(faker, geant_partner, router_subscription_fac
         edge_port_subscription.edge_port.edge_port_description = description or faker.text(max_nb_chars=30)
         edge_port_subscription.edge_port.ga_id = ga_id or faker.ga_id()
         edge_port_subscription.edge_port.node = node
-        edge_port_subscription.edge_port.edge_port_name = name or faker.nokia_lag_interface_name()
+        edge_port_subscription.edge_port.edge_port_name = name or faker.unique.nokia_lag_interface_name()
         edge_port_subscription.edge_port.edge_port_description = edge_port_description or faker.sentence()
         edge_port_subscription.edge_port.enable_lacp = enable_lacp
         edge_port_subscription.edge_port.encapsulation = encapsulation
@@ -69,16 +69,16 @@ def edge_port_subscription_factory(faker, geant_partner, router_subscription_fac
         edge_port_subscription.edge_port.edge_port_ae_members = edge_port_ae_members or [
             EdgePortAEMemberBlock.new(
                 faker.uuid(),
-                interface_name=faker.nokia_physical_interface_name()
+                interface_name=faker.unique.nokia_physical_interface_name()
                 if node.vendor == Vendor.NOKIA
-                else faker.juniper_physical_interface_name(),
+                else faker.unique.juniper_physical_interface_name(),
                 interface_description=faker.sentence(),
             ),
             EdgePortAEMemberBlock.new(
                 faker.uuid(),
-                interface_name=faker.nokia_physical_interface_name()
+                interface_name=faker.unique.nokia_physical_interface_name()
                 if node.vendor == Vendor.NOKIA
-                else faker.juniper_physical_interface_name(),
+                else faker.unique.juniper_physical_interface_name(),
                 interface_description=faker.sentence(),
             ),
         ]
diff --git a/test/fixtures/iptrunk_fixtures.py b/test/fixtures/iptrunk_fixtures.py
index 94f409d47461529cf8d2d858f7e321145daa6d7d..fc241447a0261c99f95efc766e4a48bc262aca4b 100644
--- a/test/fixtures/iptrunk_fixtures.py
+++ b/test/fixtures/iptrunk_fixtures.py
@@ -31,18 +31,18 @@ def iptrunk_side_subscription_factory(router_subscription_factory, faker):
             iptrunk_side_node=iptrunk_side_node.router
             if iptrunk_side_node
             else router_subscription_factory(vendor=Vendor.NOKIA, router_access_via_ts=side_node_access_via_ts).router,
-            iptrunk_side_ae_iface=iptrunk_side_ae_iface or faker.nokia_lag_interface_name(),
+            iptrunk_side_ae_iface=iptrunk_side_ae_iface or faker.unique.nokia_lag_interface_name(),
             ga_id=ga_id or faker.ga_id(),
             iptrunk_side_ae_members=iptrunk_side_ae_members
             or [
                 IptrunkInterfaceBlock.new(
                     faker.uuid(),
-                    interface_name=faker.nokia_physical_interface_name(),
+                    interface_name=faker.unique.nokia_physical_interface_name(),
                     interface_description=faker.sentence(),
                 ),
                 IptrunkInterfaceBlock.new(
                     faker.uuid(),
-                    interface_name=faker.nokia_physical_interface_name(),
+                    interface_name=faker.unique.nokia_physical_interface_name(),
                     interface_description=faker.sentence(),
                 ),
             ],
diff --git a/test/fixtures/lan_switch_interconnect_fixtures.py b/test/fixtures/lan_switch_interconnect_fixtures.py
index 50cea443cf5c70cf1593193137b2b8cd4b8496eb..094de9758a14680bfafa328cf015031d9ac6c66c 100644
--- a/test/fixtures/lan_switch_interconnect_fixtures.py
+++ b/test/fixtures/lan_switch_interconnect_fixtures.py
@@ -60,9 +60,9 @@ def lan_switch_interconnect_subscription_factory(
             LanSwitchInterconnectInterfaceBlockInactive.new(
                 uuid4(),
                 interface_name=(
-                    faker.nokia_physical_interface_name()
+                    faker.unique.nokia_physical_interface_name()
                     if router_side_node.vendor == Vendor.NOKIA
-                    else faker.juniper_physical_interface_name()
+                    else faker.unique.juniper_physical_interface_name()
                 ),
                 interface_description=faker.sentence(),
             )
@@ -70,7 +70,9 @@ def lan_switch_interconnect_subscription_factory(
         ]
         switch_side_ae_members = switch_side_ae_members or [
             LanSwitchInterconnectInterfaceBlockInactive.new(
-                uuid4(), interface_name=faker.juniper_physical_interface_name(), interface_description=faker.sentence()
+                uuid4(),
+                interface_name=faker.unique.juniper_physical_interface_name(),
+                interface_description=faker.sentence(),
             )
             for _ in range(2)
         ]
@@ -84,16 +86,16 @@ def lan_switch_interconnect_subscription_factory(
             node=router_side_node,
             ae_iface=router_side_ae_iface
             or (
-                faker.nokia_lag_interface_name()
+                faker.unique.nokia_lag_interface_name()
                 if router_side_node.vendor == Vendor.NOKIA
-                else faker.juniper_ae_interface_name()
+                else faker.unique.juniper_ae_interface_name()
             ),
             ae_members=router_side_ae_members,
         )
         subscription.lan_switch_interconnect.switch_side = LanSwitchInterconnectSwitchSideBlockInactive.new(
             uuid4(),
             switch=switch_side_switch.site if switch_side_switch else switch_subscription_factory().switch,
-            ae_iface=switch_side_ae_iface or faker.juniper_ae_interface_name(),
+            ae_iface=switch_side_ae_iface or faker.unique.juniper_ae_interface_name(),
             ae_members=switch_side_ae_members,
         )
         subscription.lan_switch_interconnect.dcn_management_vlan_id = (
diff --git a/test/schedules/test_scheduling.py b/test/schedules/test_scheduling.py
index a1eb56b48a4d0023c527635701ba85dbfa5a4bab..8f6feafcc1b62b7522e4f85786e8a4e288217bb8 100644
--- a/test/schedules/test_scheduling.py
+++ b/test/schedules/test_scheduling.py
@@ -93,7 +93,10 @@ def test_subscriptions_without_system_target_workflow(
     mock_logger,
     validate_subscriptions,
 ):
-    mock_get_active_subscriptions.return_value = [MagicMock(product=MagicMock(workflows=[]))]
+    subscription_mock = MagicMock()
+    subscription_mock.product.workflows = []
+    subscription_mock.note = None
+    mock_get_active_subscriptions.return_value = [subscription_mock]
     validate_subscriptions()
     mock_logger.warning.assert_called_once()
 
@@ -106,6 +109,7 @@ def test_subscription_status_not_usable(
     subscription_mock = MagicMock()
     subscription_mock.product.workflows = [MagicMock(target=Target.SYSTEM, name="workflow_name")]
     subscription_mock.status = "Not Usable Status"
+    subscription_mock.note = None
 
     mock_get_active_subscriptions.return_value = [subscription_mock]
     validate_subscriptions()
@@ -123,6 +127,7 @@ def test_valid_subscriptions_for_validation(
     mocked_workflow = MagicMock(target=Target.SYSTEM, name="workflow_name")
     subscription_mock.product.workflows = [mocked_workflow]
     subscription_mock.status = "active"
+    subscription_mock.note = None
     mock_get_active_subscriptions.return_value = [subscription_mock]
     validate_subscriptions()
     validate_func = mock_get_execution_context()["validate"]
@@ -130,3 +135,18 @@ def test_valid_subscriptions_for_validation(
         mocked_workflow.name,
         json=[{"subscription_id": str(subscription_mock.subscription_id)}],
     )
+
+
+def test_subscription_skipped_with_note(
+    mock_get_active_subscriptions,
+    mock_get_execution_context,
+    validate_subscriptions,
+):
+    subscription_mock = MagicMock()
+    subscription_mock.product.workflows = [MagicMock(target=Target.SYSTEM, name="workflow_name")]
+    subscription_mock.note = "SKIP VALIDATION: Because we don't want to."
+    mock_get_active_subscriptions.return_value = [subscription_mock]
+    validate_subscriptions()
+
+    validate_func = mock_get_execution_context()["validate"]
+    validate_func.assert_not_called()
diff --git a/test/tasks/test_masssive_redeploy_base_config.py b/test/tasks/test_massive_redeploy_base_config.py
similarity index 98%
rename from test/tasks/test_masssive_redeploy_base_config.py
rename to test/tasks/test_massive_redeploy_base_config.py
index 394b2ca21f2d01139f57d49563f317bd26a7b34d..690c90f1151acb0829663b9f05ff54af391b2e84 100644
--- a/test/tasks/test_masssive_redeploy_base_config.py
+++ b/test/tasks/test_massive_redeploy_base_config.py
@@ -133,7 +133,7 @@ def test_timeout_and_validation_and_unexpected(
     expected_failed = {
         "t1.example.com": "Timed out waiting for workflow to complete",
         "t2.example.com": f"Validation error: {validation_exc}",
-        "t3.example.com": "Unexpected error: boom",
+        "t3.example.com": "Unexpected error: boom, router_fqdn: t3.example.com",
     }
     expected_payload = {"successful_wfs": {}, "failed_wfs": expected_failed}
 
diff --git a/test/utils/types/__init__.py b/test/utils/types/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/test/utils/types/test_multi_exit_discriminator.py b/test/utils/types/test_multi_exit_discriminator.py
new file mode 100644
index 0000000000000000000000000000000000000000..e24ffba7bb8e54f37de1ea7e2075f9c19f763ff5
--- /dev/null
+++ b/test/utils/types/test_multi_exit_discriminator.py
@@ -0,0 +1,30 @@
+import pytest
+from pydantic import BaseModel, ValidationError
+
+from gso.utils.types.multi_exit_discriminator import MultiExitDiscriminator
+
+
+class BGPDiscriminator(BaseModel):
+    bgp_med: MultiExitDiscriminator
+
+
+@pytest.mark.parametrize(
+    ("input_value", "is_valid"),
+    [
+        ("igp", True),
+        ("min-igp", True),
+        ("40", True),
+        ("0", True),
+        (43, True),
+        ("-74", False),
+        ("45.6", False),
+        (-91, False),
+        ("abc", False),
+    ],
+)
+def test_multi_exit_discriminator(input_value, is_valid):
+    if is_valid:
+        assert BGPDiscriminator(bgp_med=input_value).bgp_med == str(input_value)
+    else:
+        with pytest.raises(ValidationError):
+            BGPDiscriminator(bgp_med=input_value)
diff --git a/test/workflows/edge_port/test_create_edge_port.py b/test/workflows/edge_port/test_create_edge_port.py
index 39ac848fb8e7647bc7b64896ceebc6895d672efb..ca49dd9ef67f4e8ff60d862c479d332f50d73ac5 100644
--- a/test/workflows/edge_port/test_create_edge_port.py
+++ b/test/workflows/edge_port/test_create_edge_port.py
@@ -64,13 +64,13 @@ def input_form_wizard_data(request, router_subscription_factory, partner_factory
             "ga_id": "GA-12345",
         }
         create_edge_port_interface_step = {
-            "name": faker.nokia_lag_interface_name(),
+            "name": faker.unique.nokia_lag_interface_name(),
             "description": faker.sentence(),
             "ae_members": [
                 {
-                    "interface_name": faker.juniper_physical_interface_name()
+                    "interface_name": faker.unique.juniper_physical_interface_name()
                     if vendor == Vendor.JUNIPER
-                    else faker.nokia_physical_interface_name(),
+                    else faker.unique.nokia_physical_interface_name(),
                     "interface_description": faker.sentence(),
                 }
                 for _ in range(2)
diff --git a/test/workflows/edge_port/test_create_imported_edge_port.py b/test/workflows/edge_port/test_create_imported_edge_port.py
index d13614eb1e0b5328e55c506875ac507e290ebf84..66b279f59e3369c15ffe408b24f4df5c50f24e04 100644
--- a/test/workflows/edge_port/test_create_imported_edge_port.py
+++ b/test/workflows/edge_port/test_create_imported_edge_port.py
@@ -15,7 +15,7 @@ def imported_edge_port_creation_input_form_data(router_subscription_factory, par
         "service_type": EdgePortType.CUSTOMER,
         "speed": PhysicalPortCapacity.TEN_GIGABIT_PER_SECOND,
         "encapsulation": EncapsulationType.DOT1Q,
-        "name": faker.nokia_lag_interface_name(),
+        "name": faker.unique.nokia_lag_interface_name(),
         "minimum_links": 2,
         "ga_id": faker.imported_ga_id(),
         "mac_address": faker.mac_address(),
@@ -24,11 +24,11 @@ def imported_edge_port_creation_input_form_data(router_subscription_factory, par
         "ignore_if_down": False,
         "ae_members": [
             {
-                "interface_name": faker.juniper_physical_interface_name(),
+                "interface_name": faker.unique.juniper_physical_interface_name(),
                 "interface_description": faker.sentence(),
             },
             {
-                "interface_name": faker.juniper_physical_interface_name(),
+                "interface_name": faker.unique.juniper_physical_interface_name(),
                 "interface_description": faker.sentence(),
             },
         ],
diff --git a/test/workflows/edge_port/test_migrate_edge_port.py b/test/workflows/edge_port/test_migrate_edge_port.py
index d81536c6c9d6c848e82f85ebd11dc6677e928880..a235a8f66f1e93ffff2cc8485d4da6db23aa6689 100644
--- a/test/workflows/edge_port/test_migrate_edge_port.py
+++ b/test/workflows/edge_port/test_migrate_edge_port.py
@@ -54,11 +54,11 @@ def input_form_wizard_data(request, router_subscription_factory, partner, faker)
         "node": str(router_subscription_factory(vendor=Vendor.NOKIA).subscription_id),
     }
     create_edge_port_interface_step = {
-        "name": faker.nokia_lag_interface_name(),
+        "name": faker.unique.nokia_lag_interface_name(),
         "description": faker.sentence(),
         "ae_members": [
             {
-                "interface_name": faker.nokia_physical_interface_name(),
+                "interface_name": faker.unique.nokia_physical_interface_name(),
                 "interface_description": faker.sentence(),
             }
             for _ in range(2)
diff --git a/test/workflows/edge_port/test_modify_edge_port.py b/test/workflows/edge_port/test_modify_edge_port.py
index aa915148c95dba7390bffbb9138f0333f1cf2294..1d7e4aa04edb3299a4425d4517ef8bdbeb25f74c 100644
--- a/test/workflows/edge_port/test_modify_edge_port.py
+++ b/test/workflows/edge_port/test_modify_edge_port.py
@@ -36,9 +36,9 @@ def input_form_wizard_data(
                 "description": faker.sentence(),
                 "ae_members": [
                     {
-                        "interface_name": faker.nokia_physical_interface_name()
+                        "interface_name": faker.unique.nokia_physical_interface_name()
                         if vendor == Vendor.NOKIA
-                        else faker.juniper_physical_interface_name(),
+                        else faker.unique.juniper_physical_interface_name(),
                         "interface_description": faker.sentence(),
                     }
                 ],
diff --git a/test/workflows/iptrunk/test_create_imported_iptrunk.py b/test/workflows/iptrunk/test_create_imported_iptrunk.py
index 7f83775bb0d8ba1309bc008a8440f90b541e0f53..9373e8f8502b2c28a567fa34063d451e6d993fd3 100644
--- a/test/workflows/iptrunk/test_create_imported_iptrunk.py
+++ b/test/workflows/iptrunk/test_create_imported_iptrunk.py
@@ -24,17 +24,17 @@ def workflow_input_data(faker, router_subscription_factory):
         "iptrunk_isis_metric": 10000,
         "iptrunk_description_suffix": faker.word(),
         "side_a_node_id": str(router_subscription_factory().subscription_id),
-        "side_a_ae_iface": faker.nokia_lag_interface_name(),
+        "side_a_ae_iface": faker.unique.nokia_lag_interface_name(),
         "side_a_ga_id": faker.imported_ga_id(),
         "side_a_ae_members": [
-            {"interface_name": faker.nokia_physical_interface_name(), "interface_description": faker.sentence()}
+            {"interface_name": faker.unique.nokia_physical_interface_name(), "interface_description": faker.sentence()}
             for _ in range(3)
         ],
         "side_b_node_id": str(router_subscription_factory().subscription_id),
-        "side_b_ae_iface": faker.nokia_lag_interface_name(),
+        "side_b_ae_iface": faker.unique.nokia_lag_interface_name(),
         "side_b_ga_id": faker.imported_ga_id(),
         "side_b_ae_members": [
-            {"interface_name": faker.nokia_physical_interface_name(), "interface_description": faker.sentence()}
+            {"interface_name": faker.unique.nokia_physical_interface_name(), "interface_description": faker.sentence()}
             for _ in range(3)
         ],
         "iptrunk_ipv4_network": faker.ipv4_network(max_subnet=31),
diff --git a/test/workflows/iptrunk/test_create_iptrunk.py b/test/workflows/iptrunk/test_create_iptrunk.py
index 9e3b297efd94b62581859a7a767d5adaae1dee5d..1341d20f9a5aab053de799f348bde95665ef9b6b 100644
--- a/test/workflows/iptrunk/test_create_iptrunk.py
+++ b/test/workflows/iptrunk/test_create_iptrunk.py
@@ -55,7 +55,7 @@ def input_form_wizard_data(request, router_subscription_factory, faker):
         router_side_b = str(router_subscription_factory(vendor=Vendor.JUNIPER).subscription_id)
         side_b_members = [
             {
-                "interface_name": faker.juniper_physical_interface_name(),
+                "interface_name": faker.unique.juniper_physical_interface_name(),
                 "interface_description": faker.sentence(),
             }
             for _ in range(2)
@@ -63,7 +63,7 @@ def input_form_wizard_data(request, router_subscription_factory, faker):
     else:
         router_side_b = str(router_subscription_factory().subscription_id)
         side_b_members = [
-            {"interface_name": faker.nokia_physical_interface_name(), "interface_description": faker.sentence()}
+            {"interface_name": faker.unique.nokia_physical_interface_name(), "interface_description": faker.sentence()}
             for _ in range(2)
         ]
 
@@ -79,10 +79,10 @@ def input_form_wizard_data(request, router_subscription_factory, faker):
     create_ip_trunk_confirm_step = {"iptrunk_minimum_links": 1}
     create_ip_trunk_side_a_router_name = {"side_a_node_id": router_side_a}
     create_ip_trunk_side_a_step = {
-        "side_a_ae_iface": faker.nokia_lag_interface_name(),
+        "side_a_ae_iface": faker.unique.nokia_lag_interface_name(),
         "side_a_ae_members": [
             {
-                "interface_name": faker.nokia_physical_interface_name(),
+                "interface_name": faker.unique.nokia_physical_interface_name(),
                 "interface_description": faker.sentence(),
             }
             for _ in range(2)
@@ -90,7 +90,7 @@ def input_form_wizard_data(request, router_subscription_factory, faker):
     }
     create_ip_trunk_side_b_router_name = {"side_b_node_id": router_side_b}
     create_ip_trunk_side_b_step = {
-        "side_b_ae_iface": faker.nokia_lag_interface_name(),
+        "side_b_ae_iface": faker.unique.nokia_lag_interface_name(),
         "side_b_ae_members": side_b_members,
     }
     summary_view_step = {}
diff --git a/test/workflows/iptrunk/test_migrate_iptrunk.py b/test/workflows/iptrunk/test_migrate_iptrunk.py
index 179788307a3b963858aedc80f7e7d27b2473b89d..05f2544f1330784d0690f30b37539f4d497489ae 100644
--- a/test/workflows/iptrunk/test_migrate_iptrunk.py
+++ b/test/workflows/iptrunk/test_migrate_iptrunk.py
@@ -31,14 +31,14 @@ def migrate_form_input(
     use_juniper = getattr(request, "param", UseJuniperSide.NONE)
     new_side_ae_members_nokia = [
         {
-            "interface_name": faker.nokia_physical_interface_name(),
+            "interface_name": faker.unique.nokia_physical_interface_name(),
             "interface_description": faker.sentence(),
         }
         for _ in range(2)
     ]
     new_side_ae_members_juniper = [
         {
-            "interface_name": faker.juniper_physical_interface_name(),
+            "interface_name": faker.unique.juniper_physical_interface_name(),
             "interface_description": faker.sentence(),
         }
         for _ in range(2)
@@ -50,7 +50,7 @@ def migrate_form_input(
         new_router = str(router_subscription_factory(vendor=Vendor.JUNIPER, router_access_via_ts=False).subscription_id)
         replace_side = str(old_subscription.iptrunk.iptrunk_sides[0].iptrunk_side_node.owner_subscription_id)
         new_side_ae_members = new_side_ae_members_juniper
-        lag_name = faker.juniper_ae_interface_name()
+        lag_name = faker.unique.juniper_ae_interface_name()
     elif use_juniper == UseJuniperSide.SIDE_B:
         # Juniper -> Nokia
         old_side_a_node = router_subscription_factory(vendor=Vendor.JUNIPER)
@@ -81,7 +81,7 @@ def migrate_form_input(
         new_router = str(router_subscription_factory().subscription_id)
         replace_side = str(old_subscription.iptrunk.iptrunk_sides[0].iptrunk_side_node.subscription.subscription_id)
         new_side_ae_members = new_side_ae_members_nokia
-        lag_name = faker.nokia_lag_interface_name()
+        lag_name = faker.unique.nokia_lag_interface_name()
 
     return [
         {"subscription_id": str(old_subscription.subscription_id)},
diff --git a/test/workflows/iptrunk/test_modify_trunk_interface.py b/test/workflows/iptrunk/test_modify_trunk_interface.py
index b6117a8c3161a2c19d822f1b5acf026b265b36db..7edfe553261282248aa13aa39ee6f53fbbb19051 100644
--- a/test/workflows/iptrunk/test_modify_trunk_interface.py
+++ b/test/workflows/iptrunk/test_modify_trunk_interface.py
@@ -32,14 +32,14 @@ def input_form_iptrunk_data(
         side_b_node = iptrunk_side_subscription_factory()
         new_side_a_ae_members = [
             {
-                "interface_name": faker.juniper_physical_interface_name(),
+                "interface_name": faker.unique.juniper_physical_interface_name(),
                 "interface_description": faker.sentence(),
             }
             for _ in range(2)
         ]
         new_side_b_ae_members = [
             {
-                "interface_name": faker.nokia_physical_interface_name(),
+                "interface_name": faker.unique.nokia_physical_interface_name(),
                 "interface_description": faker.sentence(),
             }
             for _ in range(2)
@@ -50,14 +50,14 @@ def input_form_iptrunk_data(
         side_b_node = iptrunk_side_subscription_factory(iptrunk_side_node=side_node)
         new_side_a_ae_members = [
             {
-                "interface_name": faker.nokia_physical_interface_name(),
+                "interface_name": faker.unique.nokia_physical_interface_name(),
                 "interface_description": faker.sentence(),
             }
             for _ in range(2)
         ]
         new_side_b_ae_members = [
             {
-                "interface_name": faker.juniper_physical_interface_name(),
+                "interface_name": faker.unique.juniper_physical_interface_name(),
                 "interface_description": faker.sentence(),
             }
             for _ in range(2)
@@ -69,14 +69,14 @@ def input_form_iptrunk_data(
         side_b_node = iptrunk_side_subscription_factory(iptrunk_side_node=side_node_2)
         new_side_a_ae_members = [
             {
-                "interface_name": faker.juniper_physical_interface_name(),
+                "interface_name": faker.unique.juniper_physical_interface_name(),
                 "interface_description": faker.sentence(),
             }
             for _ in range(2)
         ]
         new_side_b_ae_members = [
             {
-                "interface_name": faker.juniper_physical_interface_name(),
+                "interface_name": faker.unique.juniper_physical_interface_name(),
                 "interface_description": faker.sentence(),
             }
             for _ in range(2)
@@ -86,14 +86,14 @@ def input_form_iptrunk_data(
         side_b_node = iptrunk_side_subscription_factory()
         new_side_a_ae_members = [
             {
-                "interface_name": faker.nokia_physical_interface_name(),
+                "interface_name": faker.unique.nokia_physical_interface_name(),
                 "interface_description": faker.sentence(),
             }
             for _ in range(2)
         ]
         new_side_b_ae_members = [
             {
-                "interface_name": faker.nokia_physical_interface_name(),
+                "interface_name": faker.unique.nokia_physical_interface_name(),
                 "interface_description": faker.sentence(),
             }
             for _ in range(2)
diff --git a/test/workflows/l3_core_service/test_create_imported_l3_core_service.py b/test/workflows/l3_core_service/test_create_imported_l3_core_service.py
index 21a00b73bbd76383a94b4e3985f8e6f06388544c..b6984fb90fb4af6b510da1c39a8cde3203857e7f 100644
--- a/test/workflows/l3_core_service/test_create_imported_l3_core_service.py
+++ b/test/workflows/l3_core_service/test_create_imported_l3_core_service.py
@@ -72,6 +72,9 @@ def test_create_imported_l3_core_service_success(faker, partner_factory, edge_po
     }
     if product_name == ProductName.IAS:
         creation_form_input_data["ias_flavor"] = IASFlavor.IASGWS
+    elif product_name in {ProductName.R_AND_E_PEER, ProductName.R_AND_E_LHCONE}:
+        creation_form_input_data["v6_bgp_local_preference"] = 9999
+        creation_form_input_data["v6_bgp_med"] = 8888
     result, _, _ = run_workflow(f"{L3_CREAT_IMPORTED_WF_MAP[product_name]}", creation_form_input_data)
     state = extract_state(result)
     subscription = SubscriptionModel.from_subscription(state["subscription_id"])
@@ -81,3 +84,13 @@ def test_create_imported_l3_core_service_success(faker, partner_factory, edge_po
 
     if product_name == ProductName.IAS:
         assert subscription.ias.ias_flavor == IASFlavor.IASGWS
+    elif product_name == ProductName.R_AND_E_PEER:
+        assert subscription.r_and_e_peer.v4_bgp_local_preference == 100
+        assert subscription.r_and_e_peer.v4_bgp_med == "igp"
+        assert subscription.r_and_e_peer.v6_bgp_local_preference == 9999
+        assert subscription.r_and_e_peer.v6_bgp_med == "8888"
+    elif product_name == ProductName.R_AND_E_LHCONE:
+        assert subscription.r_and_e_lhcone.v4_bgp_local_preference == 100
+        assert subscription.r_and_e_lhcone.v4_bgp_med == "igp"
+        assert subscription.r_and_e_lhcone.v6_bgp_local_preference == 9999
+        assert subscription.r_and_e_lhcone.v6_bgp_med == "8888"
diff --git a/test/workflows/l3_core_service/test_create_l3_core_service.py b/test/workflows/l3_core_service/test_create_l3_core_service.py
index a8ee5bc6a27b090acfaf936010999fc8ab0be7fe..6edf078fe4bffb344399ecf5d87cc200cfc2a82f 100644
--- a/test/workflows/l3_core_service/test_create_l3_core_service.py
+++ b/test/workflows/l3_core_service/test_create_l3_core_service.py
@@ -55,7 +55,7 @@ def test_create_l3_core_service_success(
 
     form_input_data = [
         {"product": product_id},
-        {"tt_number": faker.tt_number(), "partner": partner["partner_id"]},
+        {"tt_number": faker.tt_number(), "partner": partner["partner_id"], "edge_port_partner": partner["partner_id"]},
         {"edge_port": {"edge_port": edge_port_a, "ap_type": APType.PRIMARY, "custom_service_name": faker.sentence()}},
         {
             "is_tagged": faker.boolean(),
@@ -89,6 +89,12 @@ def test_create_l3_core_service_success(
             "ias_flavor": IASFlavor.IASGWS,
         }
         form_input_data.append(extra_ias_data)
+    elif product_name in {ProductName.R_AND_E_PEER, ProductName.R_AND_E_LHCONE}:
+        extra_r_and_e_data = {
+            "v6_bgp_local_preference": 5555,
+            "v6_bgp_med": "min-igp",
+        }
+        form_input_data.append(extra_r_and_e_data)
 
     lso_interaction_count = 7
 
@@ -117,3 +123,8 @@ def test_create_l3_core_service_success(
 
     if product_name == ProductName.IAS:
         assert subscription.ias.ias_flavor == IASFlavor.IASGWS
+    elif product_name == ProductName.R_AND_E_PEER:
+        assert subscription.r_and_e_peer.v4_bgp_local_preference == 100
+        assert subscription.r_and_e_peer.v4_bgp_med == "igp"
+        assert subscription.r_and_e_peer.v6_bgp_local_preference == 5555
+        assert subscription.r_and_e_peer.v6_bgp_med == "min-igp"
diff --git a/test/workflows/l3_core_service/test_migrate_l3_core_service.py b/test/workflows/l3_core_service/test_migrate_l3_core_service.py
index eb1b292a90b769ffd091e1d677a91df16518803f..f9960a053f988d455898164fa0f195927719417d 100644
--- a/test/workflows/l3_core_service/test_migrate_l3_core_service.py
+++ b/test/workflows/l3_core_service/test_migrate_l3_core_service.py
@@ -35,6 +35,7 @@ def test_migrate_l3_core_service_success(
     subscription = SubscriptionModel.from_subscription(subscription_id)
     form_input_data = [
         {"subscription_id": subscription_id},
+        {"edge_port_partner": partner["partner_id"]},
         {
             "tt_number": faker.tt_number(),
             "source_edge_port": subscription.l3_core.ap_list[0].sbp.edge_port.owner_subscription_id,
@@ -86,6 +87,7 @@ def test_migrate_l3_core_service_scoped_emission(
 
     form_input_data = [
         {"subscription_id": str(subscription.subscription_id)},
+        {"edge_port_partner": partner["partner_id"]},
         {
             "tt_number": faker.tt_number(),
             "source_edge_port": source_edge_port,
diff --git a/test/workflows/l3_core_service/test_modify_l3_core_service.py b/test/workflows/l3_core_service/test_modify_l3_core_service.py
index 93f8680b68dc20273030535499364f8a5a0e121f..e9a7df9d36f5888a9481b34aff3e3e30e606f1fc 100644
--- a/test/workflows/l3_core_service/test_modify_l3_core_service.py
+++ b/test/workflows/l3_core_service/test_modify_l3_core_service.py
@@ -26,6 +26,12 @@ def test_modify_l3_core_service_remove_edge_port_success(faker, l3_core_service_
             "ias_flavor": IASFlavor.IASGWS,
         }
         input_form_data.append(extra_ias_data)
+    elif product_name in {ProductName.R_AND_E_PEER, ProductName.R_AND_E_LHCONE}:
+        extra_r_and_e_data = {
+            "v6_bgp_local_preference": 5555,
+            "v6_bgp_med": "min-igp",
+        }
+        input_form_data.append(extra_r_and_e_data)
 
     result, _, _ = run_workflow(L3_MODIFICATION_WF_MAP[product_name], input_form_data)
 
@@ -36,6 +42,16 @@ def test_modify_l3_core_service_remove_edge_port_success(faker, l3_core_service_
     assert ap_list[0].ap_type == APType.BACKUP
     if product_name == ProductName.IAS:
         assert subscription.ias.ias_flavor == IASFlavor.IASGWS
+    elif product_name == ProductName.R_AND_E_PEER:
+        assert subscription.r_and_e_peer.v4_bgp_local_preference == 100
+        assert subscription.r_and_e_peer.v4_bgp_med == "igp"
+        assert subscription.r_and_e_peer.v6_bgp_local_preference == 5555
+        assert subscription.r_and_e_peer.v6_bgp_med == "min-igp"
+    elif product_name == ProductName.R_AND_E_LHCONE:
+        assert subscription.r_and_e_lhcone.v4_bgp_local_preference == 100
+        assert subscription.r_and_e_lhcone.v4_bgp_med == "igp"
+        assert subscription.r_and_e_lhcone.v6_bgp_local_preference == 5555
+        assert subscription.r_and_e_lhcone.v6_bgp_med == "min-igp"
 
 
 @pytest.mark.parametrize("product_name", L3_PRODUCT_NAMES)
@@ -53,6 +69,7 @@ def test_modify_l3_core_service_add_new_edge_port_success(
     input_form_data = [
         {"subscription_id": str(subscription.subscription_id)},
         {"tt_number": faker.tt_number(), "operation": Operation.ADD},
+        {"edge_port_partner": partner["partner_id"]},
         {  # Adding configuration for the new SBP
             "edge_port": str(new_edge_port),
             "ap_type": APType.BACKUP,
@@ -86,6 +103,12 @@ def test_modify_l3_core_service_add_new_edge_port_success(
             "ias_flavor": IASFlavor.IASGWS,
         }
         input_form_data.append(extra_ias_data)
+    elif product_name in {ProductName.R_AND_E_PEER, ProductName.R_AND_E_LHCONE}:
+        extra_r_and_e_data = {
+            "v6_bgp_local_preference": 5555,
+            "v6_bgp_med": "123123",
+        }
+        input_form_data.append(extra_r_and_e_data)
 
     result, _, _ = run_workflow(L3_MODIFICATION_WF_MAP[product_name], input_form_data)
 
@@ -94,15 +117,25 @@ def test_modify_l3_core_service_add_new_edge_port_success(
     ap_list = subscription.l3_core.ap_list
     new_ap = ap_list[-1]
     assert new_ap.ap_type == APType.BACKUP
-    assert new_ap.sbp.gs_id == input_form_data[2]["gs_id"]
-    assert new_ap.sbp.vlan_id == input_form_data[2]["vlan_id"]
-    assert str(new_ap.sbp.ipv4_address) == input_form_data[2]["ipv4_address"]
-    assert new_ap.sbp.ipv4_mask == input_form_data[2]["ipv4_mask"]
-    assert str(new_ap.sbp.ipv6_address) == input_form_data[2]["ipv6_address"]
-    assert new_ap.sbp.ipv6_mask == input_form_data[2]["ipv6_mask"]
+    assert new_ap.sbp.gs_id == input_form_data[3]["gs_id"]
+    assert new_ap.sbp.vlan_id == input_form_data[3]["vlan_id"]
+    assert str(new_ap.sbp.ipv4_address) == input_form_data[3]["ipv4_address"]
+    assert new_ap.sbp.ipv4_mask == input_form_data[3]["ipv4_mask"]
+    assert str(new_ap.sbp.ipv6_address) == input_form_data[3]["ipv6_address"]
+    assert new_ap.sbp.ipv6_mask == input_form_data[3]["ipv6_mask"]
     assert len(ap_list) == 3
     if product_name == ProductName.IAS:
         assert subscription.ias.ias_flavor == IASFlavor.IASGWS
+    elif product_name == ProductName.R_AND_E_PEER:
+        assert subscription.r_and_e_peer.v4_bgp_local_preference == 100
+        assert subscription.r_and_e_peer.v4_bgp_med == "igp"
+        assert subscription.r_and_e_peer.v6_bgp_local_preference == 5555
+        assert subscription.r_and_e_peer.v6_bgp_med == "123123"
+    elif product_name == ProductName.R_AND_E_LHCONE:
+        assert subscription.r_and_e_lhcone.v4_bgp_local_preference == 100
+        assert subscription.r_and_e_lhcone.v4_bgp_med == "igp"
+        assert subscription.r_and_e_lhcone.v6_bgp_local_preference == 5555
+        assert subscription.r_and_e_lhcone.v6_bgp_med == "123123"
 
 
 @pytest.fixture()
@@ -152,7 +185,7 @@ def sbp_input_form_data(faker):
 
 @pytest.mark.parametrize("product_name", L3_PRODUCT_NAMES)
 @pytest.mark.workflow()
-def test_modify_l3_core_service_modify_edge_port_success(
+def test_modify_l3_core_service_modify_edge_port_success(  # noqa: PLR0915
     faker, l3_core_service_subscription_factory, product_name, sbp_input_form_data
 ):
     subscription = l3_core_service_subscription_factory(product_name=product_name)
@@ -170,6 +203,12 @@ def test_modify_l3_core_service_modify_edge_port_success(
             "ias_flavor": IASFlavor.IASGWS,
         }
         input_form_data.append(extra_ias_data)
+    elif product_name in {ProductName.R_AND_E_PEER, ProductName.R_AND_E_LHCONE}:
+        extra_r_and_e_data = {
+            "v6_bgp_local_preference": 5555,
+            "v6_bgp_med": "min-igp",
+        }
+        input_form_data.append(extra_r_and_e_data)
 
     result, _, _ = run_workflow(L3_MODIFICATION_WF_MAP[product_name], input_form_data)
 
@@ -222,3 +261,13 @@ def test_modify_l3_core_service_modify_edge_port_success(
 
     if product_name == ProductName.IAS:
         assert subscription.ias.ias_flavor == IASFlavor.IASGWS
+    elif product_name == ProductName.R_AND_E_PEER:
+        assert subscription.r_and_e_peer.v4_bgp_local_preference == 100
+        assert subscription.r_and_e_peer.v4_bgp_med == "igp"
+        assert subscription.r_and_e_peer.v6_bgp_local_preference == 5555
+        assert subscription.r_and_e_peer.v6_bgp_med == "min-igp"
+    elif product_name == ProductName.R_AND_E_LHCONE:
+        assert subscription.r_and_e_lhcone.v4_bgp_local_preference == 100
+        assert subscription.r_and_e_lhcone.v4_bgp_med == "igp"
+        assert subscription.r_and_e_lhcone.v6_bgp_local_preference == 5555
+        assert subscription.r_and_e_lhcone.v6_bgp_med == "min-igp"
diff --git a/test/workflows/l3_core_service/test_redeploy_l3_core_service.py b/test/workflows/l3_core_service/test_redeploy_l3_core_service.py
new file mode 100644
index 0000000000000000000000000000000000000000..46b7c52b53b6568379aeb8d9737ba25689a84dbc
--- /dev/null
+++ b/test/workflows/l3_core_service/test_redeploy_l3_core_service.py
@@ -0,0 +1,88 @@
+from copy import deepcopy
+
+import pytest
+from orchestrator.domain import SubscriptionModel
+
+from gso.workflows.l3_core_service.shared import L3_PRODUCT_NAMES
+from test.workflows import assert_complete, assert_lso_interaction_success, extract_state, run_workflow
+
+
+@pytest.mark.parametrize("product_name", L3_PRODUCT_NAMES)
+@pytest.mark.workflow()
+def test_redeploy_l3_core_service_success(faker, l3_core_service_subscription_factory, product_name):
+    subscription = l3_core_service_subscription_factory(product_name=product_name)
+    old_subscription: SubscriptionModel = deepcopy(subscription)
+    access_port = subscription.l3_core.ap_list[0]
+    input_form_data = [
+        {"subscription_id": str(subscription.subscription_id)},
+        {"tt_number": faker.tt_number(), "access_port": str(access_port.subscription_instance_id)},
+    ]
+
+    result, process_stat, step_log = run_workflow("redeploy_l3_core_service", input_form_data)
+
+    for _ in range(4):
+        result, step_log = assert_lso_interaction_success(result, process_stat, step_log)
+
+    assert_complete(result)
+    state = extract_state(result)
+    subscription = SubscriptionModel.from_subscription(state["subscription_id"])
+    ap_list = subscription.l3_core.ap_list
+    old_ap_list = old_subscription.l3_core.ap_list
+
+    # Assertions that ensure the subscription is unchanged
+    for old_access_port, access_port in zip(old_ap_list, ap_list, strict=False):
+        assert access_port.sbp.gs_id == old_access_port.sbp.gs_id
+        assert access_port.sbp.is_tagged == old_access_port.sbp.is_tagged
+        assert access_port.sbp.vlan_id == old_access_port.sbp.vlan_id
+        assert str(access_port.sbp.ipv4_address) == str(old_access_port.sbp.ipv4_address)
+        assert access_port.sbp.ipv4_mask == old_access_port.sbp.ipv4_mask
+        assert str(access_port.sbp.ipv6_address) == str(old_access_port.sbp.ipv6_address)
+        assert access_port.sbp.ipv6_mask == old_access_port.sbp.ipv6_mask
+        assert access_port.sbp.custom_firewall_filters == old_access_port.sbp.custom_firewall_filters
+
+        assert access_port.sbp.bgp_session_list[0].bfd_enabled == old_access_port.sbp.bgp_session_list[0].bfd_enabled
+        assert (
+            access_port.sbp.bgp_session_list[0].has_custom_policies
+            == old_access_port.sbp.bgp_session_list[0].has_custom_policies
+        )
+        assert (
+            access_port.sbp.bgp_session_list[0].authentication_key
+            == old_access_port.sbp.bgp_session_list[0].authentication_key
+        )
+        assert (
+            access_port.sbp.bgp_session_list[0].multipath_enabled
+            == old_access_port.sbp.bgp_session_list[0].multipath_enabled
+        )
+        assert (
+            access_port.sbp.bgp_session_list[0].send_default_route
+            == old_access_port.sbp.bgp_session_list[0].send_default_route
+        )
+        assert access_port.sbp.bgp_session_list[0].is_passive == old_access_port.sbp.bgp_session_list[0].is_passive
+
+        assert access_port.sbp.bgp_session_list[1].bfd_enabled == old_access_port.sbp.bgp_session_list[1].bfd_enabled
+        assert (
+            access_port.sbp.bgp_session_list[1].has_custom_policies
+            == old_access_port.sbp.bgp_session_list[1].has_custom_policies
+        )
+        assert (
+            access_port.sbp.bgp_session_list[1].authentication_key
+            == old_access_port.sbp.bgp_session_list[1].authentication_key
+        )
+        assert (
+            access_port.sbp.bgp_session_list[1].multipath_enabled
+            == old_access_port.sbp.bgp_session_list[1].multipath_enabled
+        )
+        assert (
+            access_port.sbp.bgp_session_list[1].send_default_route
+            == old_access_port.sbp.bgp_session_list[1].send_default_route
+        )
+        assert access_port.sbp.bgp_session_list[1].is_passive == old_access_port.sbp.bgp_session_list[1].is_passive
+
+        assert access_port.sbp.v4_bfd_settings.bfd_enabled == old_access_port.sbp.v4_bfd_settings.bfd_enabled
+        assert access_port.sbp.v4_bfd_settings.bfd_interval_rx == old_access_port.sbp.v4_bfd_settings.bfd_interval_rx
+        assert access_port.sbp.v4_bfd_settings.bfd_interval_tx == old_access_port.sbp.v4_bfd_settings.bfd_interval_tx
+        assert access_port.sbp.v4_bfd_settings.bfd_multiplier == old_access_port.sbp.v4_bfd_settings.bfd_multiplier
+        assert access_port.sbp.v6_bfd_settings.bfd_enabled == old_access_port.sbp.v6_bfd_settings.bfd_enabled
+        assert access_port.sbp.v6_bfd_settings.bfd_interval_rx == old_access_port.sbp.v6_bfd_settings.bfd_interval_rx
+        assert access_port.sbp.v6_bfd_settings.bfd_interval_tx == old_access_port.sbp.v6_bfd_settings.bfd_interval_tx
+        assert access_port.sbp.v6_bfd_settings.bfd_multiplier == old_access_port.sbp.v6_bfd_settings.bfd_multiplier
diff --git a/test/workflows/lan_switch_interconnect/test_create_imported_lan_switch_interconnect.py b/test/workflows/lan_switch_interconnect/test_create_imported_lan_switch_interconnect.py
index bce3d1fa01b7bbd3a431e0ed160c2dcf8c60c0de..3b93dc456e42b5a5b9a339028da17a0108048d84 100644
--- a/test/workflows/lan_switch_interconnect/test_create_imported_lan_switch_interconnect.py
+++ b/test/workflows/lan_switch_interconnect/test_create_imported_lan_switch_interconnect.py
@@ -18,20 +18,20 @@ def workflow_input_data(faker, router_subscription_factory, switch_subscription_
         "minimum_links": 1,
         "router_side": {
             "node": str(router_subscription_factory().subscription_id),
-            "ae_iface": faker.nokia_lag_interface_name(),
+            "ae_iface": faker.unique.nokia_lag_interface_name(),
             "ae_members": [
                 {
-                    "interface_name": faker.nokia_physical_interface_name(),
+                    "interface_name": faker.unique.nokia_physical_interface_name(),
                     "interface_description": faker.sentence(),
                 }
             ],
         },
         "switch_side": {
             "switch": str(switch_subscription_factory().subscription_id),
-            "ae_iface": faker.juniper_ae_interface_name(),
+            "ae_iface": faker.unique.juniper_ae_interface_name(),
             "ae_members": [
                 {
-                    "interface_name": faker.juniper_physical_interface_name(),
+                    "interface_name": faker.unique.juniper_physical_interface_name(),
                     "interface_description": faker.sentence(),
                 }
             ],
diff --git a/test/workflows/lan_switch_interconnect/test_create_lan_switch_interconnect.py b/test/workflows/lan_switch_interconnect/test_create_lan_switch_interconnect.py
index f5e423c16a7b8428627518d42f3e1d76c2e4e1f1..614a12c970196878df4fb01476b55e0667d748a8 100644
--- a/test/workflows/lan_switch_interconnect/test_create_lan_switch_interconnect.py
+++ b/test/workflows/lan_switch_interconnect/test_create_lan_switch_interconnect.py
@@ -47,20 +47,20 @@ def input_form_data(faker, router_subscription_factory, switch_subscription_fact
                 "minimum_link_count": 2,
             },
             {
-                "router_side_iface": faker.nokia_lag_interface_name(),
+                "router_side_iface": faker.unique.nokia_lag_interface_name(),
                 "router_side_ae_members": [
                     {
-                        "interface_name": faker.nokia_physical_interface_name(),
+                        "interface_name": faker.unique.nokia_physical_interface_name(),
                         "interface_description": faker.sentence(),
                     }
                     for _ in range(2)
                 ],
             },
             {
-                "switch_side_iface": faker.juniper_ae_interface_name(),
+                "switch_side_iface": faker.unique.juniper_ae_interface_name(),
                 "switch_side_ae_members": [
                     {
-                        "interface_name": faker.juniper_physical_interface_name(),
+                        "interface_name": faker.unique.juniper_physical_interface_name(),
                         "interface_description": faker.sentence(),
                     }
                     for _ in range(2)
diff --git a/test/workflows/router/test_terminate_router.py b/test/workflows/router/test_terminate_router.py
index a7303181ecbf3ab50106ed9627f539e96b1add6f..2aeaca372b0a97042dc8e9bd9d858bcba5095790 100644
--- a/test/workflows/router/test_terminate_router.py
+++ b/test/workflows/router/test_terminate_router.py
@@ -37,6 +37,7 @@ def test_terminate_pe_router_full_success(
         "remove_configuration": remove_configuration,
         "update_ibgp_mesh": update_ibgp_mesh,
         "update_sdp_mesh": update_sdp_mesh,
+        "remove_loopback_from_ipam": True,
     }
     lso_interaction_count = 0
     if remove_configuration:
@@ -89,6 +90,7 @@ def test_terminate_p_router_full_success(
         "tt_number": faker.tt_number(),
         "remove_configuration": remove_configuration,
         "update_ibgp_mesh": update_ibgp_mesh,
+        "remove_loopback_from_ipam": True,
     }
     lso_interaction_count = 0
     if remove_configuration:
diff --git a/test/workflows/tasks/test_task_validate_products.py b/test/workflows/tasks/test_task_validate_products.py
index 66853d257d838b423cd7595e2a8ee2b015d94ecf..6c7398b920d392fb86b688d105efd49b01006ae1 100644
--- a/test/workflows/tasks/test_task_validate_products.py
+++ b/test/workflows/tasks/test_task_validate_products.py
@@ -4,14 +4,15 @@ from test.workflows import assert_complete, extract_state, run_workflow
 
 
 @pytest.mark.workflow()
-def test_task_validate_geant_products():
-    result, _, _ = run_workflow("task_validate_geant_products", [{}])
+def test_task_validate_products():
+    result, _, _ = run_workflow("task_validate_products", [{}])
     assert_complete(result)
     state = extract_state(result)
 
     assert state["check_all_workflows_are_in_db"]
     assert state["check_workflows_for_matching_targets_and_descriptions"]
-    # assert state["check_that_products_have_at_least_one_workflow"]  FIXME: Uncomment when the task is reverted again
+    assert state["check_that_products_have_at_least_one_workflow"]
+    assert state["check_that_active_products_have_a_modify_note"]
     assert state["check_db_fixed_input_config"]
     assert state["check_that_products_have_create_modify_and_terminate_workflows"]
     assert state["check_subscription_models"]