diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 61d37c99aa91a89f8c195261fd647924c051007e..6cc76019e6555e40b0c8a4aca3db42defbad859c 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -12,7 +12,7 @@ run-tox-pipeline:
   stage: tox
   tags:
     - docker-executor
-  image: python:3.11
+  image: python:3.12
 
   services:
     - postgres:15.4
diff --git a/Changelog.md b/Changelog.md
index a696905e59c451aed5443afc340c8208a8071340..3169eaec23dacf10c94272ce43829400e6cb2324 100644
--- a/Changelog.md
+++ b/Changelog.md
@@ -2,6 +2,16 @@
 
 All notable changes to this project will be documented in this file.
 
+## [2.0] - 2024-05-23
+- Upgraded `orchestrator-core` to version 2!
+- Added Opengear products.
+- Added separate imported product types for importing existing subscriptions.
+- Added workflows to take imported products to their counterparts.
+- Removed API endpoints for importing products, this is now CLI only.
+- Add a SharePoint service that can create new checklist items.
+    - Create new checklist in Router creation workflow.
+    - Create new checklist when creating an IP trunk.
+
 ## [1.5] - 2024-04-22
 - Added deploy TWAMP functionality.
 - Made some changes on IP Trunk creation/modification workflow including:
diff --git a/Dockerfile b/Dockerfile
index 5be8cb5440ee78244ae72f0d8df2073660e576f6..b2802dd07a91be30f25831fc71f97361e2ac46d1 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,4 +1,4 @@
-FROM python:3.11-alpine
+FROM python:3.12-alpine
 WORKDIR /app
 
 ARG ARTIFACT_VERSION
diff --git a/docs/source/glossary.rst b/docs/source/glossary.rst
index 4dc66b8eac5e2e408f220c93b8c4be80cf8e3301..5a090e7e00d1662129843c2637af81bcd54d3368 100644
--- a/docs/source/glossary.rst
+++ b/docs/source/glossary.rst
@@ -57,15 +57,24 @@ Glossary of terms
   LAG
     Link Aggregation: a bundle of multiple network connections.
 
+  LAN
+    Local Area Network
+
   LSO
     Lightweight Service Orchestrator
 
   NET
     Network Entity Title: used for :term:`ISIS` routing.
 
+  OIDC
+    OpenID Connect
+
   OOB
     Out-of-band access
 
+  OPA
+    Open Policy Agent
+
   OSS
     Operational Support Systems
 
@@ -79,8 +88,8 @@ Glossary of terms
   UUID
     Universally Unique Identifier
 
+  VLAN
+    Virtual LAN
+
   WFO
     `Workflow Orchestrator <https://workfloworchestrator.org/>`_
-
-  LAN
-    Local Area Network
diff --git a/docs/source/module/api/v1/index.rst b/docs/source/module/api/v1/index.rst
index a40080e2019e321c1f4427fd41a0c58c564b5ba2..265f493adbd6250449e16e8be08f129d62a6741e 100644
--- a/docs/source/module/api/v1/index.rst
+++ b/docs/source/module/api/v1/index.rst
@@ -12,7 +12,6 @@ Submodules
    :maxdepth: 2
    :titlesonly:
 
-   imports
    subscriptions
    processes
-   networks
+   network
diff --git a/docs/source/module/api/v1/imports.rst b/docs/source/module/api/v1/network.rst
similarity index 50%
rename from docs/source/module/api/v1/imports.rst
rename to docs/source/module/api/v1/network.rst
index b40e8edaed706c8656407fa1fc8d1b3d9bc060be..a37f21d3055339f356bdec88964f992cecc83963 100644
--- a/docs/source/module/api/v1/imports.rst
+++ b/docs/source/module/api/v1/network.rst
@@ -1,6 +1,6 @@
-``gso.api.v1.imports``
+``gso.api.v1.network``
 ======================
 
-.. automodule:: gso.api.v1.imports
+.. automodule:: gso.api.v1.network
    :members:
    :show-inheritance:
diff --git a/docs/source/module/api/v1/networks.rst b/docs/source/module/api/v1/networks.rst
deleted file mode 100644
index e85dda9ead0424f293134cfba61a0429887c88bd..0000000000000000000000000000000000000000
--- a/docs/source/module/api/v1/networks.rst
+++ /dev/null
@@ -1,6 +0,0 @@
-``gso.api.v1.subscriptions``
-============================
-
-.. automodule:: gso.api.v1.networks
-   :members:
-   :show-inheritance:
diff --git a/docs/source/module/api/v1/processes.rst b/docs/source/module/api/v1/processes.rst
index 82aa87628771ec27ee360bf8c2ac4a79eef248ed..436d9887a5f7e126f9d78c3e6fc09fa374528cf7 100644
--- a/docs/source/module/api/v1/processes.rst
+++ b/docs/source/module/api/v1/processes.rst
@@ -1,5 +1,5 @@
 ``gso.api.v1.processes``
-============================
+========================
 
 .. automodule:: gso.api.v1.processes
    :members:
diff --git a/docs/source/module/products/product_blocks/index.rst b/docs/source/module/products/product_blocks/index.rst
index d106bb2ca83b6c2ffca86694249113bd310c0560..da2852b9e7b85abfc06c2ca1906b71663d538d45 100644
--- a/docs/source/module/products/product_blocks/index.rst
+++ b/docs/source/module/products/product_blocks/index.rst
@@ -19,3 +19,8 @@ Submodules
    iptrunk
    router
    site
+   switch
+   lan_switch_interconnect
+   pop_vlan
+   opengear
+
diff --git a/docs/source/module/products/product_blocks/lan_switch_interconnect.rst b/docs/source/module/products/product_blocks/lan_switch_interconnect.rst
new file mode 100644
index 0000000000000000000000000000000000000000..2b9805da2592c30f5c2a750a6ee456b1d124930d
--- /dev/null
+++ b/docs/source/module/products/product_blocks/lan_switch_interconnect.rst
@@ -0,0 +1,6 @@
+``gso.products.product_blocks.lan_switch_interconnect``
+=======================================================
+
+.. automodule:: gso.products.product_blocks.lan_switch_interconnect
+   :members:
+   :show-inheritance:
diff --git a/docs/source/module/products/product_blocks/opengear.rst b/docs/source/module/products/product_blocks/opengear.rst
new file mode 100644
index 0000000000000000000000000000000000000000..14f5c3d38c557bde994d11d40d55a1e5fa9adcd0
--- /dev/null
+++ b/docs/source/module/products/product_blocks/opengear.rst
@@ -0,0 +1,6 @@
+``gso.products.product_blocks.opengear``
+========================================
+
+.. automodule:: gso.products.product_blocks.opengear
+   :members:
+   :show-inheritance:
diff --git a/docs/source/module/products/product_blocks/pop_vlan.rst b/docs/source/module/products/product_blocks/pop_vlan.rst
new file mode 100644
index 0000000000000000000000000000000000000000..de5f3a833406c3733109f0b9c9606b6f5eb63b73
--- /dev/null
+++ b/docs/source/module/products/product_blocks/pop_vlan.rst
@@ -0,0 +1,6 @@
+``gso.products.product_blocks.pop_vlan``
+========================================
+
+.. automodule:: gso.products.product_blocks.pop_vlan
+   :members:
+   :show-inheritance:
diff --git a/docs/source/module/products/product_blocks/switch.rst b/docs/source/module/products/product_blocks/switch.rst
new file mode 100644
index 0000000000000000000000000000000000000000..a2041d513a7ed2c2d9579bad6de5bbe1ce39faa7
--- /dev/null
+++ b/docs/source/module/products/product_blocks/switch.rst
@@ -0,0 +1,6 @@
+``gso.products.product_blocks.switch``
+======================================
+
+.. automodule:: gso.products.product_blocks.switch
+   :members:
+   :show-inheritance:
diff --git a/docs/source/module/products/product_types/index.rst b/docs/source/module/products/product_types/index.rst
index de30f01e26c15cb6f279a2720c1d90d1c21f4f15..06c706298b1badf546a36c7c84cfb47966c96ce2 100644
--- a/docs/source/module/products/product_types/index.rst
+++ b/docs/source/module/products/product_types/index.rst
@@ -19,3 +19,7 @@ Submodules
    iptrunk
    router
    site
+   switch
+   lan_switch_interconnect
+   pop_vlan
+   opengear
diff --git a/docs/source/module/products/product_types/lan_switch_interconnect.rst b/docs/source/module/products/product_types/lan_switch_interconnect.rst
new file mode 100644
index 0000000000000000000000000000000000000000..c740da278c33c5d381dbbd70f8ab05916b8df3ca
--- /dev/null
+++ b/docs/source/module/products/product_types/lan_switch_interconnect.rst
@@ -0,0 +1,6 @@
+``gso.products.product_types.lan_switch_interconnect``
+=======================================================
+
+.. automodule:: gso.products.product_types.lan_switch_interconnect
+   :members:
+   :show-inheritance:
diff --git a/docs/source/module/products/product_types/opengear.rst b/docs/source/module/products/product_types/opengear.rst
new file mode 100644
index 0000000000000000000000000000000000000000..9bb65249993947a538cc5d623a1afd012e9d8a39
--- /dev/null
+++ b/docs/source/module/products/product_types/opengear.rst
@@ -0,0 +1,6 @@
+``gso.products.product_types.opengear``
+=======================================
+
+.. automodule:: gso.products.product_types.opengear
+   :members:
+   :show-inheritance:
diff --git a/docs/source/module/products/product_types/pop_vlan.rst b/docs/source/module/products/product_types/pop_vlan.rst
new file mode 100644
index 0000000000000000000000000000000000000000..0549d5bd76b503e245cff208c97ca274e42719d7
--- /dev/null
+++ b/docs/source/module/products/product_types/pop_vlan.rst
@@ -0,0 +1,6 @@
+``gso.products.product_types.pop_vlan``
+=======================================
+
+.. automodule:: gso.products.product_types.pop_vlan
+   :members:
+   :show-inheritance:
diff --git a/docs/source/module/products/product_types/switch.rst b/docs/source/module/products/product_types/switch.rst
new file mode 100644
index 0000000000000000000000000000000000000000..530222ba2ec87772b834c7d682dd0ef391d41f52
--- /dev/null
+++ b/docs/source/module/products/product_types/switch.rst
@@ -0,0 +1,6 @@
+``gso.products.product_types.switch``
+=====================================
+
+.. automodule:: gso.products.product_types.switch
+   :members:
+   :show-inheritance:
diff --git a/docs/source/module/services/crm.rst b/docs/source/module/services/crm.rst
deleted file mode 100644
index cee4e5018343626463809b6a9cb7a4bb499937ff..0000000000000000000000000000000000000000
--- a/docs/source/module/services/crm.rst
+++ /dev/null
@@ -1,6 +0,0 @@
-``gso.services.crm``
-====================
-
-.. automodule:: gso.services.crm
-   :members:
-   :show-inheritance:
diff --git a/docs/source/module/services/index.rst b/docs/source/module/services/index.rst
index 7b1a1d5540b8c52e0f64c466184864339b36723a..d93ddc5a42b90420c033482720600ba21530568c 100644
--- a/docs/source/module/services/index.rst
+++ b/docs/source/module/services/index.rst
@@ -12,9 +12,10 @@ Submodules
    :maxdepth: 2
    :titlesonly:
 
-   crm
    infoblox
    librenms_client
    lso_client
+   mailer
    netbox_client
+   partners
    subscriptions
diff --git a/docs/source/module/services/mailer.rst b/docs/source/module/services/mailer.rst
new file mode 100644
index 0000000000000000000000000000000000000000..f54148cfec985ad723d9319a5558f7641563883a
--- /dev/null
+++ b/docs/source/module/services/mailer.rst
@@ -0,0 +1,6 @@
+``gso.services.mailer``
+=======================
+
+.. automodule:: gso.services.mailer
+   :members:
+   :show-inheritance:
diff --git a/docs/source/module/services/partners.rst b/docs/source/module/services/partners.rst
new file mode 100644
index 0000000000000000000000000000000000000000..a828b5a3142af6439a3f63b35590310a916aa204
--- /dev/null
+++ b/docs/source/module/services/partners.rst
@@ -0,0 +1,6 @@
+``gso.services.partners``
+=========================
+
+.. automodule:: gso.services.partners
+   :members:
+   :show-inheritance:
diff --git a/docs/source/module/utils/index.rst b/docs/source/module/utils/index.rst
index 0bb43e3e0af577d3d15f9ec1cc6e189a19928eb4..52f992ba035f17541d1e5314844ee59490a883bf 100644
--- a/docs/source/module/utils/index.rst
+++ b/docs/source/module/utils/index.rst
@@ -12,8 +12,8 @@ Submodules
    :maxdepth: 2
    :titlesonly:
 
-   shared_choices
    device_info
    exceptions
    helpers
+   shared_enums
    workflow_steps
diff --git a/docs/source/module/utils/shared_choices.rst b/docs/source/module/utils/shared_choices.rst
deleted file mode 100644
index 46460a304905ef81cdd14481cb316d242adc62bf..0000000000000000000000000000000000000000
--- a/docs/source/module/utils/shared_choices.rst
+++ /dev/null
@@ -1,6 +0,0 @@
-``gso.utils.shared_choices``
-============================
-
-.. automodule:: gso.utils.shared_choices
-   :members:
-   :show-inheritance:
diff --git a/docs/source/module/utils/shared_enums.rst b/docs/source/module/utils/shared_enums.rst
new file mode 100644
index 0000000000000000000000000000000000000000..5fe261d499b076c1ba85418caea676f10c5321da
--- /dev/null
+++ b/docs/source/module/utils/shared_enums.rst
@@ -0,0 +1,6 @@
+``gso.utils.shared_enums``
+==========================
+
+.. automodule:: gso.utils.shared_enums
+   :members:
+   :show-inheritance:
diff --git a/docs/source/module/workflows/index.rst b/docs/source/module/workflows/index.rst
index 97204c39c36965957ae9c741e9297ecf7e11aecf..9ef57ae8414ab3e0d46154237f5aadc7b0817ede 100644
--- a/docs/source/module/workflows/index.rst
+++ b/docs/source/module/workflows/index.rst
@@ -13,6 +13,7 @@ Subpackages
    :titlesonly:
 
    iptrunk/index
+   office_router/index
    router/index
    site/index
-   tasks/index
+   super_pop_switch/index
diff --git a/docs/source/module/workflows/iptrunk/create_imported_iptrunk.rst b/docs/source/module/workflows/iptrunk/create_imported_iptrunk.rst
new file mode 100644
index 0000000000000000000000000000000000000000..1e0d42c73a3757c4e3e82cdc7b60686638a4af09
--- /dev/null
+++ b/docs/source/module/workflows/iptrunk/create_imported_iptrunk.rst
@@ -0,0 +1,6 @@
+``gso.workflows.iptrunk.create_imported_iptrunk``
+=================================================
+
+.. automodule:: gso.workflows.iptrunk.create_imported_iptrunk
+   :members:
+   :show-inheritance:
diff --git a/docs/source/module/workflows/iptrunk/import_iptrunk.rst b/docs/source/module/workflows/iptrunk/import_iptrunk.rst
new file mode 100644
index 0000000000000000000000000000000000000000..4d5379f9c8782b62feabd735bd5d0c494576477d
--- /dev/null
+++ b/docs/source/module/workflows/iptrunk/import_iptrunk.rst
@@ -0,0 +1,6 @@
+``gso.workflows.iptrunk.import_iptrunk``
+========================================
+
+.. automodule:: gso.workflows.iptrunk.import_iptrunk
+   :members:
+   :show-inheritance:
diff --git a/docs/source/module/workflows/iptrunk/index.rst b/docs/source/module/workflows/iptrunk/index.rst
index f046983fd0c9650d0aa72315a48d875525b312f7..127d1985c0a8c6b86c62182fb0d81ee2951aed59 100644
--- a/docs/source/module/workflows/iptrunk/index.rst
+++ b/docs/source/module/workflows/iptrunk/index.rst
@@ -13,7 +13,9 @@ Submodules
    :titlesonly:
 
    activate_iptrunk
+   create_imported_iptrunk
    create_iptrunk
+   import_iptrunk
    migrate_iptrunk
    modify_isis_metric
    modify_trunk_interface
diff --git a/docs/source/module/workflows/office_router/create_imported_office_router.rst b/docs/source/module/workflows/office_router/create_imported_office_router.rst
new file mode 100644
index 0000000000000000000000000000000000000000..a3136a813822ebe8657650cebfed3cef5d4b521c
--- /dev/null
+++ b/docs/source/module/workflows/office_router/create_imported_office_router.rst
@@ -0,0 +1,6 @@
+``gso.workflows.office_router.create_imported_office_router``
+=============================================================
+
+.. automodule:: gso.workflows.office_router.create_imported_office_router
+   :members:
+   :show-inheritance:
diff --git a/docs/source/module/workflows/office_router/import_office_router.rst b/docs/source/module/workflows/office_router/import_office_router.rst
new file mode 100644
index 0000000000000000000000000000000000000000..50c55eb5fb6f727468d8c746b7730f66b8ee7665
--- /dev/null
+++ b/docs/source/module/workflows/office_router/import_office_router.rst
@@ -0,0 +1,6 @@
+``gso.workflows.office_router.import_office_router``
+====================================================
+
+.. automodule:: gso.workflows.office_router.import_office_router
+   :members:
+   :show-inheritance:
diff --git a/docs/source/module/workflows/office_router/index.rst b/docs/source/module/workflows/office_router/index.rst
new file mode 100644
index 0000000000000000000000000000000000000000..d2aa53dfa3d28f4a06b40b86bc4a1320a0fa8623
--- /dev/null
+++ b/docs/source/module/workflows/office_router/index.rst
@@ -0,0 +1,16 @@
+``gso.workflows.office_router``
+===============================
+
+.. automodule:: gso.workflows.office_router
+   :members:
+   :show-inheritance:
+
+Submodules
+----------
+
+.. toctree::
+   :maxdepth: 2
+   :titlesonly:
+
+   create_imported_office_router
+   import_office_router
diff --git a/docs/source/module/workflows/router/create_imported_router.rst b/docs/source/module/workflows/router/create_imported_router.rst
new file mode 100644
index 0000000000000000000000000000000000000000..8124fd0de31b8033b723494635c1cfa2dd9aedc9
--- /dev/null
+++ b/docs/source/module/workflows/router/create_imported_router.rst
@@ -0,0 +1,6 @@
+``gso.workflows.router.create_imported_router``
+===============================================
+
+.. automodule:: gso.workflows.router.create_imported_router
+   :members:
+   :show-inheritance:
diff --git a/docs/source/module/workflows/router/import_router.rst b/docs/source/module/workflows/router/import_router.rst
new file mode 100644
index 0000000000000000000000000000000000000000..c0544a275a90226723e71dd3f18cc9fc27d86dde
--- /dev/null
+++ b/docs/source/module/workflows/router/import_router.rst
@@ -0,0 +1,6 @@
+``gso.workflows.router.import_router``
+======================================
+
+.. automodule:: gso.workflows.router.import_router
+   :members:
+   :show-inheritance:
diff --git a/docs/source/module/workflows/router/index.rst b/docs/source/module/workflows/router/index.rst
index e582a4402c8c17ba1292fcb1763cdcce193109a0..1913157882cab48dc6d3ddeaafac26acdbf9e8ea 100644
--- a/docs/source/module/workflows/router/index.rst
+++ b/docs/source/module/workflows/router/index.rst
@@ -13,7 +13,10 @@ Submodules
    :titlesonly:
 
    activate_router
+   create_imported_router
    create_router
+   import_router
+   modify_connection_strategy
    redeploy_base_config
    terminate_router
    update_ibgp_mesh
diff --git a/docs/source/module/workflows/router/modify_connection_strategy.rst b/docs/source/module/workflows/router/modify_connection_strategy.rst
index b60db9b2fcee862a351408d2d66a693c3fc61024..ddecdb9f5e084f5d2b7ce8d273330daa3ed904df 100644
--- a/docs/source/module/workflows/router/modify_connection_strategy.rst
+++ b/docs/source/module/workflows/router/modify_connection_strategy.rst
@@ -1,5 +1,5 @@
 ``gso.workflows.router.modify_connection_strategy``
-=========================================
+===================================================
 
 .. automodule:: gso.workflows.router.modify_connection_strategy
    :members:
diff --git a/docs/source/module/workflows/site/create_imported_site.rst b/docs/source/module/workflows/site/create_imported_site.rst
new file mode 100644
index 0000000000000000000000000000000000000000..52fab323a19e5c27cc114dd03f52565d09cadc3e
--- /dev/null
+++ b/docs/source/module/workflows/site/create_imported_site.rst
@@ -0,0 +1,6 @@
+``gso.workflows.site.create_imported_site``
+===========================================
+
+.. automodule:: gso.workflows.site.create_imported_site
+   :members:
+   :show-inheritance:
diff --git a/docs/source/module/workflows/site/import_site.rst b/docs/source/module/workflows/site/import_site.rst
new file mode 100644
index 0000000000000000000000000000000000000000..5b5a94bc621843407151a025996d7322b7c10247
--- /dev/null
+++ b/docs/source/module/workflows/site/import_site.rst
@@ -0,0 +1,6 @@
+``gso.workflows.site.import_site``
+==================================
+
+.. automodule:: gso.workflows.site.import_site
+   :members:
+   :show-inheritance:
diff --git a/docs/source/module/workflows/site/index.rst b/docs/source/module/workflows/site/index.rst
index c07dc9b901088cf268cec9709286403c60c2bbcd..c1b8b06baae294c3316c9c39966914188e5f839d 100644
--- a/docs/source/module/workflows/site/index.rst
+++ b/docs/source/module/workflows/site/index.rst
@@ -12,6 +12,8 @@ Submodules
    :maxdepth: 2
    :titlesonly:
 
+   create_imported_site
    create_site
+   import_site
    modify_site
    terminate_site
diff --git a/docs/source/module/workflows/super_pop_switch/create_imported_super_pop_switch.rst b/docs/source/module/workflows/super_pop_switch/create_imported_super_pop_switch.rst
new file mode 100644
index 0000000000000000000000000000000000000000..58beb83ef8627beb73bec56980ac23e2d8059ba6
--- /dev/null
+++ b/docs/source/module/workflows/super_pop_switch/create_imported_super_pop_switch.rst
@@ -0,0 +1,6 @@
+``gso.workflows.super_pop_switch.create_imported_super_pop_switch``
+===================================================================
+
+.. automodule:: gso.workflows.super_pop_switch.create_imported_super_pop_switch
+   :members:
+   :show-inheritance:
diff --git a/docs/source/module/workflows/super_pop_switch/import_super_pop_switch.rst b/docs/source/module/workflows/super_pop_switch/import_super_pop_switch.rst
new file mode 100644
index 0000000000000000000000000000000000000000..b7cb7e2ed4fbaa1480baea89b43a97c3646402eb
--- /dev/null
+++ b/docs/source/module/workflows/super_pop_switch/import_super_pop_switch.rst
@@ -0,0 +1,6 @@
+``gso.workflows.super_pop_switch.import_super_pop_switch``
+==========================================================
+
+.. automodule:: gso.workflows.super_pop_switch.import_super_pop_switch
+   :members:
+   :show-inheritance:
diff --git a/docs/source/module/workflows/super_pop_switch/index.rst b/docs/source/module/workflows/super_pop_switch/index.rst
new file mode 100644
index 0000000000000000000000000000000000000000..48672f720f79b81b2441827f5c0a38a3b71789fe
--- /dev/null
+++ b/docs/source/module/workflows/super_pop_switch/index.rst
@@ -0,0 +1,16 @@
+``gso.workflows.super_pop_switch``
+==================================
+
+.. automodule:: gso.workflows.super_pop_switch
+   :members:
+   :show-inheritance:
+
+Submodules
+----------
+
+.. toctree::
+   :maxdepth: 2
+   :titlesonly:
+
+   create_imported_super_pop_switch
+   import_super_pop_switch
diff --git a/docs/source/module/workflows/tasks/import_iptrunk.rst b/docs/source/module/workflows/tasks/import_iptrunk.rst
deleted file mode 100644
index 24a4be08f99a22f03e107ebef8d1cb58e5d0815b..0000000000000000000000000000000000000000
--- a/docs/source/module/workflows/tasks/import_iptrunk.rst
+++ /dev/null
@@ -1,6 +0,0 @@
-``gso.workflows.tasks.import_iptrunk``
-======================================
-
-.. automodule:: gso.workflows.tasks.import_iptrunk
-   :members:
-   :show-inheritance:
diff --git a/docs/source/module/workflows/tasks/import_office_router.rst b/docs/source/module/workflows/tasks/import_office_router.rst
deleted file mode 100644
index a6b37df5df88e9e57590be22495dfa213cfba177..0000000000000000000000000000000000000000
--- a/docs/source/module/workflows/tasks/import_office_router.rst
+++ /dev/null
@@ -1,6 +0,0 @@
-``gso.workflows.tasks.import_office_router``
-============================================
-
-.. automodule:: gso.workflows.tasks.import_office_router
-   :members:
-   :show-inheritance:
diff --git a/docs/source/module/workflows/tasks/import_router.rst b/docs/source/module/workflows/tasks/import_router.rst
deleted file mode 100644
index 65a6e60741a9d13b4dd3fc2dc3d796550b12b22e..0000000000000000000000000000000000000000
--- a/docs/source/module/workflows/tasks/import_router.rst
+++ /dev/null
@@ -1,6 +0,0 @@
-``gso.workflows.tasks.import_router``
-=====================================
-
-.. automodule:: gso.workflows.tasks.import_router
-   :members:
-   :show-inheritance:
diff --git a/docs/source/module/workflows/tasks/import_site.rst b/docs/source/module/workflows/tasks/import_site.rst
deleted file mode 100644
index eb9280dff87c304ac2d7853931ce64203929851e..0000000000000000000000000000000000000000
--- a/docs/source/module/workflows/tasks/import_site.rst
+++ /dev/null
@@ -1,6 +0,0 @@
-``gso.workflows.tasks.import_site``
-===================================
-
-.. automodule:: gso.workflows.tasks.import_site
-   :members:
-   :show-inheritance:
diff --git a/docs/source/module/workflows/tasks/import_super_pop_switch.rst b/docs/source/module/workflows/tasks/import_super_pop_switch.rst
deleted file mode 100644
index 575db1e0e343ffbe1436c6822276524bbcf456a0..0000000000000000000000000000000000000000
--- a/docs/source/module/workflows/tasks/import_super_pop_switch.rst
+++ /dev/null
@@ -1,6 +0,0 @@
-``gso.workflows.tasks.import_super_pop_switch``
-===============================================
-
-.. automodule:: gso.workflows.tasks.import_super_pop_switch
-   :members:
-   :show-inheritance:
diff --git a/docs/source/module/workflows/tasks/index.rst b/docs/source/module/workflows/tasks/index.rst
deleted file mode 100644
index 1931adef62a66b169c5e5b423df4f31022cfa4d5..0000000000000000000000000000000000000000
--- a/docs/source/module/workflows/tasks/index.rst
+++ /dev/null
@@ -1,19 +0,0 @@
-``gso.workflows.tasks``
-=======================
-
-.. automodule:: gso.workflows.tasks
-   :members:
-   :show-inheritance:
-
-Submodules
-----------
-
-.. toctree::
-   :maxdepth: 2
-   :titlesonly:
-
-   import_super_pop_switch
-   import_office_router
-   import_iptrunk
-   import_router
-   import_site
diff --git a/docs/source/quickstart.rst b/docs/source/quickstart.rst
index a7cc2d9058a220cfd6db4e43bb3a1cb2a4ad9181..d82a04c1816b3ebb75e3f05213d5163df3ad6fd3 100644
--- a/docs/source/quickstart.rst
+++ b/docs/source/quickstart.rst
@@ -4,15 +4,15 @@ Quickstart
 Development environment and dependencies
 ----------------------------------------
 
-- Install python 3.11 if you do not have it already:
+- Install python 3.12 if you do not have it already:
     - ``add-apt-repository ppa:deadsnakes/ppa``
-    - ``apt install python3.11 python3.11-distutils``
+    - ``apt install python3.12 python3.12-distutils``
 - Follow Steps 1 and 2 from here to install dependencies and setup DB:
   `<https://workfloworchestrator.org/orchestrator-core/workshops/beginner/debian/>`_
 - To install the orchestrator GUI, you can follow the steps 5 and 6 from the previous link.
 - Create a virtual environment:
     - ``source /usr/share/virtualenvwrapper/virtualenvwrapper.sh``
-    - ``mkvirtualenv --python python3.11 gso``
+    - ``mkvirtualenv --python python3.12 gso``
 - To use the virtual environment:
     - ``source /usr/share/virtualenvwrapper/virtualenvwrapper.sh``
     - ``workon gso``
@@ -25,7 +25,7 @@ Do all this inside the virtual environment.
 - Clone this repository
 - ``pip install -r requirements.txt``
     - If you get an error because you pip version is too old, run this:
-      ``curl -sS https://bootstrap.pypa.io/get-pip.py | python3.11``
+      ``curl -sS https://bootstrap.pypa.io/get-pip.py | python3.12``
 - ``pip install -e .``
 - Create an ``oss-params.json`` based on the ``oss-params-example.json`` file inside ``/gso``.
 - Export the oss-params file: ``export OSS_PARAMS_FILENAME="/path/to/oss-params.json"``
diff --git a/docs/vale/styles/config/vocabularies/geant-jargon/accept.txt b/docs/vale/styles/config/vocabularies/geant-jargon/accept.txt
index af07d476da370b4fda5d5f2902f784731ae6d5d4..738c2785ffbd1dfec50374c80b0f6fdf8a576ef4 100644
--- a/docs/vale/styles/config/vocabularies/geant-jargon/accept.txt
+++ b/docs/vale/styles/config/vocabularies/geant-jargon/accept.txt
@@ -26,3 +26,7 @@ TWAMP
 Pydantic
 UUID
 SNMP
+V?LAN
+OPA
+OIDC
+HTTPBearer
diff --git a/gso/__init__.py b/gso/__init__.py
index ecdfd940ffefe85df1613e4a6cbbc74f56bf80dc..143c93ddbe0fc3786bc0990bc86a82ee108d5cc3 100644
--- a/gso/__init__.py
+++ b/gso/__init__.py
@@ -5,19 +5,22 @@ from gso import monkeypatches  # noqa: F401, isort:skip
 import typer
 from orchestrator import OrchestratorCore, app_settings
 from orchestrator.cli.main import app as cli_app
+from orchestrator.graphql import SCALAR_OVERRIDES
 
 # noinspection PyUnresolvedReferences
 import gso.products
 import gso.workflows  # noqa: F401
 from gso.api import router as api_router
-from gso.middlewares import ModifyProcessEndpointResponse
+from gso.graphql_api.types import GSO_SCALAR_OVERRIDES
+
+SCALAR_OVERRIDES.update(GSO_SCALAR_OVERRIDES)
 
 
 def init_gso_app() -> OrchestratorCore:
     """Initialise the :term:`GSO` app."""
     app = OrchestratorCore(base_settings=app_settings)
+    app.register_graphql()
     app.include_router(api_router, prefix="/api")
-    app.add_middleware(ModifyProcessEndpointResponse)
     return app
 
 
diff --git a/gso/api/v1/__init__.py b/gso/api/v1/__init__.py
index c25422efbc6c7aafd29f0751f1104c819d051160..5407a81644e5253922b40b2f110fcb9a50804ce3 100644
--- a/gso/api/v1/__init__.py
+++ b/gso/api/v1/__init__.py
@@ -2,14 +2,12 @@
 
 from fastapi import APIRouter
 
-from gso.api.v1.imports import router as imports_router
 from gso.api.v1.network import router as network_router
 from gso.api.v1.processes import router as processes_router
 from gso.api.v1.subscriptions import router as subscriptions_router
 
 router = APIRouter()
 
-router.include_router(imports_router)
 router.include_router(subscriptions_router)
 router.include_router(processes_router)
 router.include_router(network_router)
diff --git a/gso/api/v1/imports.py b/gso/api/v1/imports.py
deleted file mode 100644
index 643b4bc09a8295a35ca1b6f4ee1bcc08a978371e..0000000000000000000000000000000000000000
--- a/gso/api/v1/imports.py
+++ /dev/null
@@ -1,257 +0,0 @@
-""":term:`GSO` :term:`API` endpoints that import different types of existing services."""
-
-import ipaddress
-from typing import Any
-from uuid import UUID
-
-from fastapi import Depends, HTTPException, status
-from fastapi.routing import APIRouter
-from orchestrator.services import processes
-from pydantic import BaseModel, root_validator, validator
-
-from gso.auth.security import opa_security_default
-from gso.products.product_blocks.iptrunk import IptrunkType, PhysicalPortCapacity
-from gso.products.product_blocks.router import RouterRole
-from gso.products.product_blocks.site import SiteTier
-from gso.services import subscriptions
-from gso.services.partners import PartnerNotFoundError, get_partner_by_name
-from gso.utils.helpers import BaseSiteValidatorModel, LAGMember
-from gso.utils.shared_enums import PortNumber, Vendor
-
-router = APIRouter(prefix="/imports", tags=["Imports"], dependencies=[Depends(opa_security_default)])
-
-
-class ImportResponseModel(BaseModel):
-    """The model of a response given when services are imported using the :term:`API`."""
-
-    pid: UUID
-    detail: str
-
-
-class SiteImportModel(BaseSiteValidatorModel):
-    """The required input for importing an existing :class:`gso.products.product_types.site`."""
-
-    site_name: str
-    site_city: str
-    site_country: str
-    site_country_code: str
-    site_latitude: float
-    site_longitude: float
-    site_bgp_community_id: int
-    site_internal_id: int
-    site_tier: SiteTier
-    site_ts_address: str
-    partner: str
-
-
-class RouterImportModel(BaseModel):
-    """Required fields for importing an existing :class:`gso.product.product_types.router`."""
-
-    partner: str
-    router_site: str
-    hostname: str
-    ts_port: int
-    router_vendor: Vendor
-    router_role: RouterRole
-    router_lo_ipv4_address: ipaddress.IPv4Address
-    router_lo_ipv6_address: ipaddress.IPv6Address
-    router_lo_iso_address: str
-
-
-class IptrunkImportModel(BaseModel):
-    """Required fields for importing an existing :class:`gso.products.product_types.iptrunk`."""
-
-    partner: str
-    geant_s_sid: str | None
-    iptrunk_type: IptrunkType
-    iptrunk_description: str
-    iptrunk_speed: PhysicalPortCapacity
-    iptrunk_minimum_links: int
-    iptrunk_isis_metric: int
-    side_a_node_id: str
-    side_a_ae_iface: str
-    side_a_ae_geant_a_sid: str | None
-    side_a_ae_members: list[LAGMember]
-    side_b_node_id: str
-    side_b_ae_iface: str
-    side_b_ae_geant_a_sid: str | None
-    side_b_ae_members: list[LAGMember]
-
-    iptrunk_ipv4_network: ipaddress.IPv4Network
-    iptrunk_ipv6_network: ipaddress.IPv6Network
-
-    @classmethod
-    def _get_active_routers(cls) -> set[str]:
-        return {
-            str(router["subscription_id"])
-            for router in subscriptions.get_active_router_subscriptions(includes=["subscription_id"])
-        }
-
-    @validator("partner")
-    def check_if_partner_exists(cls, value: str) -> str:
-        """Validate that the partner exists."""
-        try:
-            get_partner_by_name(value)
-        except PartnerNotFoundError as e:
-            msg = f"partner {value} not found"
-            raise ValueError(msg) from e
-
-        return value
-
-    @validator("side_a_node_id", "side_b_node_id")
-    def check_if_router_side_is_available(cls, value: str) -> str:
-        """Both sides of the trunk must exist in :term:`GSO`."""
-        if value not in cls._get_active_routers():
-            msg = f"Router {value} not found"
-            raise ValueError(msg)
-
-        return value
-
-    @validator("side_a_ae_members", "side_b_ae_members")
-    def check_side_uniqueness(cls, value: list[str]) -> list[str]:
-        """:term:`LAG` members must be unique."""
-        if len(value) != len(set(value)):
-            msg = "Items must be unique"
-            raise ValueError(msg)
-
-        return value
-
-    @root_validator
-    def check_members(cls, values: dict[str, Any]) -> dict[str, Any]:
-        """Amount of :term:`LAG` members has to match on side A and B, and meet the minimum requirement."""
-        min_links = values["iptrunk_minimum_links"]
-        side_a_members = values.get("side_a_ae_members", [])
-        side_b_members = values.get("side_b_ae_members", [])
-
-        len_a = len(side_a_members)
-        len_b = len(side_b_members)
-
-        if len_a < min_links:
-            msg = f"Side A members should be at least {min_links} (iptrunk_minimum_links)"
-            raise ValueError(msg)
-
-        if len_a != len_b:
-            msg = "Mismatch between Side A and B members"
-            raise ValueError(msg)
-
-        return values
-
-
-class SuperPopSwitchImportModel(BaseModel):
-    """Required fields for importing an existing :class:`gso.product.product_types.super_pop_switch`."""
-
-    partner: str
-    super_pop_switch_site: str
-    hostname: str
-    super_pop_switch_ts_port: PortNumber
-    super_pop_switch_mgmt_ipv4_address: ipaddress.IPv4Address
-
-
-class OfficeRouterImportModel(BaseModel):
-    """Required fields for importing an existing :class:`gso.product.product_types.office_router`."""
-
-    partner: str
-    office_router_site: str
-    office_router_fqdn: str
-    office_router_ts_port: PortNumber
-    office_router_lo_ipv4_address: ipaddress.IPv4Address
-    office_router_lo_ipv6_address: ipaddress.IPv6Address
-
-
-def _start_process(process_name: str, data: dict) -> UUID:
-    """Start a process and handle common exceptions."""
-    pid: UUID = processes.start_process(process_name, [data])
-    if pid is None:
-        raise HTTPException(
-            status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
-            detail="Failed to start the process.",
-        )
-
-    process = processes._get_process(pid)  # noqa: SLF001
-    if process.last_status == "failed":
-        raise HTTPException(
-            status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
-            detail=f"Process {pid} failed because of an internal error. {process.failed_reason}",
-        )
-
-    return pid
-
-
-@router.post("/sites", status_code=status.HTTP_201_CREATED, response_model=ImportResponseModel)
-def import_site(site: SiteImportModel) -> dict[str, Any]:
-    """Import a site by running the import_site workflow.
-
-    :param site: The site information to be imported.
-    :type site: SiteImportModel
-
-    :return: A dictionary containing the process id of the started process and detail message.
-    :rtype: dict[str, Any]
-
-    :raises HTTPException: If the site already exists or if there's an error in the process.
-    """
-    pid = _start_process("import_site", site.dict())
-    return {"detail": "Site added successfully.", "pid": pid}
-
-
-@router.post("/routers", status_code=status.HTTP_201_CREATED, response_model=ImportResponseModel)
-def import_router(router_data: RouterImportModel) -> dict[str, Any]:
-    """Import a router by running the import_router workflow.
-
-    :param router_data: The router information to be imported.
-    :type router_data: RouterImportModel
-
-    :return: A dictionary containing the process id of the started process and detail message.
-    :rtype: dict[str, Any]
-
-    :raises HTTPException: If there's an error in the process.
-    """
-    pid = _start_process("import_router", router_data.dict())
-    return {"detail": "Router has been added successfully", "pid": pid}
-
-
-@router.post("/iptrunks", status_code=status.HTTP_201_CREATED, response_model=ImportResponseModel)
-def import_iptrunk(iptrunk_data: IptrunkImportModel) -> dict[str, Any]:
-    """Import an iptrunk by running the import_iptrunk workflow.
-
-    :param iptrunk_data: The iptrunk information to be imported.
-    :type iptrunk_data: IptrunkImportModel
-
-    :return: A dictionary containing the process id of the started process and detail message.
-    :rtype: dict[str, Any]
-
-    :raises HTTPException: If there's an error in the process.
-    """
-    pid = _start_process("import_iptrunk", iptrunk_data.dict())
-    return {"detail": "Iptrunk has been added successfully", "pid": pid}
-
-
-@router.post("/super-pop-switches", status_code=status.HTTP_201_CREATED, response_model=ImportResponseModel)
-def import_super_pop_switch(super_pop_switch_data: SuperPopSwitchImportModel) -> dict[str, Any]:
-    """Import a Super PoP switch by running the import_super_pop_switch workflow.
-
-    :param super_pop_switch_data: The Super PoP switch information to be imported.
-    :type super_pop_switch_data: SuperPopSwitchImportModel
-
-    :return: A dictionary containing the process id of the started process and detail message.
-    :rtype: dict[str, Any]
-
-    :raises HTTPException: If there's an error in the process.
-    """
-    pid = _start_process("import_super_pop_switch", super_pop_switch_data.dict())
-    return {"detail": "Super PoP switch has been added successfully", "pid": pid}
-
-
-@router.post("/office-routers", status_code=status.HTTP_201_CREATED, response_model=ImportResponseModel)
-def import_office_router(office_router_data: OfficeRouterImportModel) -> dict[str, Any]:
-    """Import a office router by running the import_office_router workflow.
-
-    :param office_router_data: The office router information to be imported.
-    :type office_router_data: OfficeRouterImportModel
-
-    :return: A dictionary containing the process id of the started process and detail message.
-    :rtype: dict[str, Any]
-
-    :raises HTTPException: If there's an error in the process.
-    """
-    pid = _start_process("import_office_router", office_router_data.dict())
-    return {"detail": "Office router has been added successfully", "pid": pid}
diff --git a/gso/auth/oidc_policy_helper.py b/gso/auth/oidc_policy_helper.py
index 1ba2eb3b8cdb3db63babe30beabf2c6186e3ae3c..96edfbf3e08b21951a320dc40dfb6e1c8012a10e 100644
--- a/gso/auth/oidc_policy_helper.py
+++ b/gso/auth/oidc_policy_helper.py
@@ -252,7 +252,7 @@ class OIDCUser(HTTPBearer):
             return
 
         response = await async_request.get(self.openid_url + "/.well-known/openid-configuration")
-        self.openid_config = OIDCConfig.parse_obj(response.json())
+        self.openid_config = OIDCConfig.model_validate(response.json())
 
     async def userinfo(self, async_request: AsyncClient, token: str) -> OIDCUserModel:
         """Get the userinfo from the openid server.
diff --git a/gso/auth/settings.py b/gso/auth/settings.py
index 29c1fc806a8589b38158a3f95dddf3f10cb8bdf3..b3ab1a6a569e2e594e181c23c231366e212f4905 100644
--- a/gso/auth/settings.py
+++ b/gso/auth/settings.py
@@ -6,7 +6,8 @@ with external authentication providers for enhanced security management.
 Todo: Remove token and sensitive data from OPA console and API.
 """
 
-from pydantic import BaseSettings, Field
+from pydantic import Field
+from pydantic_settings import BaseSettings
 
 
 class Oauth2LibSettings(BaseSettings):
diff --git a/gso/cli/imports.py b/gso/cli/imports.py
index 64a6af56cdb691a15c796fc27fce8e0d297b7950..c4dc1adf2cd9080a4401ceb34415de674d1d561e 100644
--- a/gso/cli/imports.py
+++ b/gso/cli/imports.py
@@ -1,35 +1,150 @@
-""":term:`CLI` command for importing data to coreDB."""
+""":term:`CLI` commands for importing data to coreDB."""
 
 import csv
 import ipaddress
 import json
+import time
 from datetime import UTC, datetime
 from pathlib import Path
-from typing import TypeVar
+from typing import Self, TypeVar
 
 import typer
 import yaml
 from orchestrator.db import db
-from pydantic import ValidationError
+from orchestrator.services.processes import start_process
+from orchestrator.types import SubscriptionLifecycle
+from pydantic import BaseModel, ValidationError, field_validator, model_validator
 from sqlalchemy.exc import SQLAlchemyError
 
-from gso.api.v1.imports import (
-    IptrunkImportModel,
-    OfficeRouterImportModel,
-    RouterImportModel,
-    SiteImportModel,
-    SuperPopSwitchImportModel,
-    import_iptrunk,
-    import_office_router,
-    import_router,
-    import_site,
-    import_super_pop_switch,
-)
 from gso.db.models import PartnerTable
-from gso.services.subscriptions import get_active_subscriptions_by_field_and_value
+from gso.products import ProductType
+from gso.products.product_blocks.iptrunk import IptrunkType, PhysicalPortCapacity
+from gso.products.product_blocks.router import RouterRole
+from gso.services.partners import PartnerNotFoundError, get_partner_by_name
+from gso.services.subscriptions import (
+    get_active_router_subscriptions,
+    get_active_subscriptions_by_field_and_value,
+    get_subscriptions,
+)
+from gso.utils.helpers import BaseSiteValidatorModel, LAGMember
+from gso.utils.shared_enums import IPv4AddressType, IPv6AddressType, PortNumber, Vendor
 
 app: typer.Typer = typer.Typer()
 
+
+class SiteImportModel(BaseSiteValidatorModel):
+    """The required input for importing an existing :class:`gso.products.product_types.site`."""
+
+
+class RouterImportModel(BaseModel):
+    """Required fields for importing an existing :class:`gso.product.product_types.router`."""
+
+    partner: str
+    router_site: str
+    hostname: str
+    ts_port: int
+    router_vendor: Vendor
+    router_role: RouterRole
+    router_lo_ipv4_address: IPv4AddressType
+    router_lo_ipv6_address: IPv6AddressType
+    router_lo_iso_address: str
+
+
+class SuperPopSwitchImportModel(BaseModel):
+    """Required fields for importing an existing :class:`gso.product.product_types.super_pop_switch`."""
+
+    partner: str
+    super_pop_switch_site: str
+    hostname: str
+    super_pop_switch_ts_port: PortNumber
+    super_pop_switch_mgmt_ipv4_address: ipaddress.IPv4Address
+
+
+class OfficeRouterImportModel(BaseModel):
+    """Required fields for importing an existing :class:`gso.product.product_types.office_router`."""
+
+    partner: str
+    office_router_site: str
+    office_router_fqdn: str
+    office_router_ts_port: PortNumber
+    office_router_lo_ipv4_address: ipaddress.IPv4Address
+    office_router_lo_ipv6_address: ipaddress.IPv6Address
+
+
+class IptrunkImportModel(BaseModel):
+    """Required fields for importing an existing :class:`gso.products.product_types.iptrunk`."""
+
+    partner: str
+    geant_s_sid: str | None
+    iptrunk_type: IptrunkType
+    iptrunk_description: str | None = None
+    iptrunk_speed: PhysicalPortCapacity
+    iptrunk_minimum_links: int
+    iptrunk_isis_metric: int
+    side_a_node_id: str
+    side_a_ae_iface: str
+    side_a_ae_geant_a_sid: str | None
+    side_a_ae_members: list[LAGMember]
+    side_b_node_id: str
+    side_b_ae_iface: str
+    side_b_ae_geant_a_sid: str | None
+    side_b_ae_members: list[LAGMember]
+
+    iptrunk_ipv4_network: ipaddress.IPv4Network
+    iptrunk_ipv6_network: ipaddress.IPv6Network
+
+    @classmethod
+    def _get_active_routers(cls) -> set[str]:
+        return {
+            str(router["subscription_id"]) for router in get_active_router_subscriptions(includes=["subscription_id"])
+        }
+
+    @field_validator("partner")
+    def check_if_partner_exists(cls, value: str) -> str:
+        """Validate that the partner exists."""
+        try:
+            get_partner_by_name(value)
+        except PartnerNotFoundError as e:
+            msg = f"partner {value} not found"
+            raise ValueError(msg) from e
+
+        return value
+
+    @field_validator("side_a_node_id", "side_b_node_id")
+    def check_if_router_side_is_available(cls, value: str) -> str:
+        """Both sides of the trunk must exist in :term:`GSO`."""
+        if value not in cls._get_active_routers():
+            msg = f"Router {value} not found"
+            raise ValueError(msg)
+
+        return value
+
+    @field_validator("side_a_ae_members", "side_b_ae_members")
+    def check_side_uniqueness(cls, value: list[str]) -> list[str]:
+        """:term:`LAG` members must be unique."""
+        if len(value) != len(set(value)):
+            msg = "Items must be unique"
+            raise ValueError(msg)
+
+        return value
+
+    @model_validator(mode="after")
+    def check_members(self) -> Self:
+        """Amount of :term:`LAG` members has to match on side A and B, and meet the minimum requirement."""
+        len_a = len(self.side_a_ae_members)
+        len_b = len(self.side_b_ae_members)
+
+        if len_a < self.iptrunk_minimum_links:
+            msg = f"Side A members should be at least {self.iptrunk_minimum_links} (iptrunk_minimum_links)"
+            raise ValueError(msg)
+
+        if len_a != len_b:
+            msg = "Mismatch between Side A and B members"
+            raise ValueError(msg)
+
+        return self
+
+
 T = TypeVar(
     "T", SiteImportModel, RouterImportModel, IptrunkImportModel, SuperPopSwitchImportModel, OfficeRouterImportModel
 )
@@ -40,10 +155,9 @@ common_filepath_option = typer.Option(
 )
 
 
-def read_data(filepath: str) -> dict:
+def _read_data(file_path: Path) -> dict:
     """Read data from a JSON or YAML file."""
-    typer.echo(f"Starting import from {filepath}")
-    file_path = Path(filepath)
+    typer.echo(f"Starting import from {file_path!s}")
     file_extension = file_path.suffix.lower()
 
     with file_path.open("r") as f:
@@ -58,95 +172,107 @@ def read_data(filepath: str) -> dict:
         raise typer.Exit(code=1)
 
 
-def generic_import_data(
-    filepath: str,
-    import_model: type[T],
-    import_function: callable,  # type: ignore[valid-type]
-    name_key: str,
+def _get_router_subscription_id(node_name: str) -> str | None:
+    """Get the subscription id for a router by its node name."""
+    subscriptions = get_active_subscriptions_by_field_and_value(
+        "router_fqdn",
+        node_name,
+    )
+    if subscriptions:
+        return str(subscriptions[0].subscription_id)
+    return None
+
+
+def _import_partners_from_csv(file_path: Path) -> list[dict]:
+    """Read partners from a CSV file."""
+    with Path.open(file_path, encoding="utf-8") as csv_file:
+        csv_reader = csv.DictReader(csv_file)
+        return list(csv_reader)
+
+
+def _generic_import_product(
+    file_path: Path, imported_product_type: ProductType, workflow_suffix: str, name_key: str, import_model: type[T]
 ) -> None:
-    """Import data from a JSON or YAML file."""
+    """Import subscriptions from a JSON or YAML file."""
     successfully_imported_data = []
-    data = read_data(filepath)
+    data = _read_data(file_path)
     for details in data:
         details["partner"] = "GEANT"
-        typer.echo(f"Importing {name_key}: {details[name_key]}")
+        typer.echo(f"Creating imported {name_key}: {details[name_key]}")
         try:
             initial_data = import_model(**details)
-            import_function(initial_data)  # type: ignore[misc]
+            start_process(f"create_imported_{workflow_suffix}", [initial_data.dict()])
             successfully_imported_data.append(getattr(initial_data, name_key))
             typer.echo(
-                f"Successfully imported {name_key}: {getattr(initial_data, name_key)}",
+                f"Successfully created {name_key}: {getattr(initial_data, name_key)}",
             )
         except ValidationError as e:
             typer.echo(f"Validation error: {e}")
 
+    typer.echo("Waiting for the dust to settle before moving on the importing new products...")
+    time.sleep(1)
+
+    #  Migrate new products from imported to "full" counterpart.
+    imported_products = get_subscriptions(
+        [imported_product_type], lifecycles=[SubscriptionLifecycle.ACTIVE], includes=["subscription_id"]
+    )
+    for subscription_id in imported_products:
+        typer.echo(f"Importing {subscription_id}")
+        start_process(f"import_{workflow_suffix}", [subscription_id])
+
     if successfully_imported_data:
-        typer.echo(f"Successfully imported {name_key}s:")
+        typer.echo(f"Successfully created imported {name_key}s:")
         for item in successfully_imported_data:
             typer.echo(f"- {item}")
+        typer.echo(f"Please validate no more imported {workflow_suffix} products exist anymore in the database.")
 
 
 @app.command()
 def import_sites(filepath: str = common_filepath_option) -> None:
     """Import sites into GSO."""
-    # Use the import_data function to handle common import logic
-    generic_import_data(filepath, SiteImportModel, import_site, "site_name")
+    _generic_import_product(Path(filepath), ProductType.IMPORTED_SITE, "site", "site_name", SiteImportModel)
 
 
 @app.command()
 def import_routers(filepath: str = common_filepath_option) -> None:
     """Import routers into GSO."""
-    # Use the import_data function to handle common import logic
-    generic_import_data(filepath, RouterImportModel, import_router, "hostname")
+    _generic_import_product(Path(filepath), ProductType.IMPORTED_ROUTER, "router", "hostname", RouterImportModel)
 
 
 @app.command()
 def import_super_pop_switches(filepath: str = common_filepath_option) -> None:
     """Import Super PoP Switches into GSO."""
-    # Use the import_data function to handle common import logic
-    generic_import_data(filepath, SuperPopSwitchImportModel, import_super_pop_switch, "hostname")
+    _generic_import_product(
+        Path(filepath),
+        ProductType.IMPORTED_SUPER_POP_SWITCH,
+        "super_pop_switch",
+        "hostname",
+        SuperPopSwitchImportModel,
+    )
 
 
 @app.command()
 def import_office_routers(filepath: str = common_filepath_option) -> None:
     """Import office routers into GSO."""
-    # Use the import_data function to handle common import logic
-    generic_import_data(filepath, OfficeRouterImportModel, import_office_router, "office_router_fqdn")
-
-
-def get_router_subscription_id(node_name: str) -> str | None:
-    """Get the subscription id for a router by its node name."""
-    subscriptions = get_active_subscriptions_by_field_and_value(
-        "router_fqdn",
-        node_name,
+    _generic_import_product(
+        Path(filepath),
+        ProductType.IMPORTED_OFFICE_ROUTER,
+        "office_router",
+        "office_router_fqdn",
+        OfficeRouterImportModel,
     )
-    if subscriptions:
-        return str(subscriptions[0].subscription_id)
-    return None
 
 
 @app.command()
 def import_iptrunks(filepath: str = common_filepath_option) -> None:
     """Import IP trunks into GSO."""
     successfully_imported_data = []
-    data = read_data(filepath)
+    data = _read_data(Path(filepath))
     for trunk in data:
-        ipv4_network_a = ipaddress.ip_network(
-            trunk["config"]["nodeA"]["ipv4_address"],
-            strict=False,
-        )
-        ipv4_network_b = ipaddress.ip_network(
-            trunk["config"]["nodeB"]["ipv4_address"],
-            strict=False,
-        )
-        ipv6_network_a = ipaddress.ip_network(
-            trunk["config"]["nodeA"]["ipv6_address"],
-            strict=False,
-        )
-        ipv6_network_b = ipaddress.ip_network(
-            trunk["config"]["nodeB"]["ipv6_address"],
-            strict=False,
-        )
+        ipv4_network_a = ipaddress.IPv4Network(trunk["config"]["nodeA"]["ipv4_address"], strict=False)
+        ipv4_network_b = ipaddress.IPv4Network(trunk["config"]["nodeB"]["ipv4_address"], strict=False)
+        ipv6_network_a = ipaddress.IPv6Network(trunk["config"]["nodeA"]["ipv6_address"], strict=False)
+        ipv6_network_b = ipaddress.IPv6Network(trunk["config"]["nodeB"]["ipv6_address"], strict=False)
         # Check if IPv4 networks are equal
         if ipv4_network_a == ipv4_network_b:
             iptrunk_ipv4_network = ipv4_network_a
@@ -175,48 +301,45 @@ def import_iptrunks(filepath: str = common_filepath_option) -> None:
                 iptrunk_speed=trunk["config"]["common"]["link_speed"],
                 iptrunk_minimum_links=trunk["config"]["common"]["minimum_links"],
                 iptrunk_isis_metric=trunk["config"]["common"]["isis_metric"],
-                side_a_node_id=get_router_subscription_id(
-                    trunk["config"]["nodeA"]["name"],
-                )
-                or "",
+                side_a_node_id=_get_router_subscription_id(trunk["config"]["nodeA"]["name"]) or "",
                 side_a_ae_iface=trunk["config"]["nodeA"]["ae_name"],
                 side_a_ae_geant_a_sid=trunk["config"]["nodeA"]["port_sid"],
                 side_a_ae_members=trunk["config"]["nodeA"]["members"],
-                side_b_node_id=get_router_subscription_id(
-                    trunk["config"]["nodeB"]["name"],
-                )
-                or "",
+                side_b_node_id=_get_router_subscription_id(trunk["config"]["nodeB"]["name"]) or "",
                 side_b_ae_iface=trunk["config"]["nodeB"]["ae_name"],
                 side_b_ae_geant_a_sid=trunk["config"]["nodeB"]["port_sid"],
                 side_b_ae_members=trunk["config"]["nodeB"]["members"],
-                iptrunk_ipv4_network=iptrunk_ipv4_network,  # type:ignore[arg-type]
-                iptrunk_ipv6_network=iptrunk_ipv6_network,  # type:ignore[arg-type]
+                iptrunk_ipv4_network=iptrunk_ipv4_network,
+                iptrunk_ipv6_network=iptrunk_ipv6_network,
             )
-            import_iptrunk(initial_data)
+            start_process("create_imported_iptrunk", [initial_data.dict()])
             successfully_imported_data.append(trunk["id"])
             typer.echo(f"Successfully imported IP Trunk: {trunk['id']}")
         except ValidationError as e:
             typer.echo(f"Validation error: {e}")
 
+    typer.echo("Waiting for the dust to settle before moving on the importing new products...")
+    time.sleep(1)
+
+    trunk_ids = get_subscriptions(
+        [ProductType.IMPORTED_IP_TRUNK], lifecycles=[SubscriptionLifecycle.ACTIVE], includes=["subscription_id"]
+    )
+    for subscription_id in trunk_ids:
+        typer.echo(f"Migrating iptrunk {subscription_id}")
+        start_process("import_iptrunk", [subscription_id])
+
     if successfully_imported_data:
         typer.echo("Successfully imported IP Trunks:")
         for item in successfully_imported_data:
             typer.echo(f"- {item}")
 
 
-def import_partners_from_csv(file_path: Path) -> list[dict]:
-    """Read partners from a CSV file."""
-    with Path.open(file_path, encoding="utf-8") as csv_file:
-        csv_reader = csv.DictReader(csv_file)
-        return list(csv_reader)
-
-
 @app.command()
 def import_partners(file_path: str = typer.Argument(..., help="Path to the CSV file containing partners")) -> None:
     """Import partners from a CSV file into the database."""
     typer.echo(f"Importing partners from {file_path} ...")
 
-    partners = import_partners_from_csv(Path(file_path))
+    partners = _import_partners_from_csv(Path(file_path))
 
     try:
         for partner in partners:
diff --git a/gso/graphql_api/__init__.py b/gso/graphql_api/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..98799c0cc1d503fa0f569a163881223b5c33cf21
--- /dev/null
+++ b/gso/graphql_api/__init__.py
@@ -0,0 +1 @@
+"""graphql module."""
diff --git a/gso/graphql_api/types.py b/gso/graphql_api/types.py
new file mode 100644
index 0000000000000000000000000000000000000000..c6fb920048f5080797fb1454a54bd01ee9f4288a
--- /dev/null
+++ b/gso/graphql_api/types.py
@@ -0,0 +1,27 @@
+"""Map some Orchestrator types to scalars."""
+
+from ipaddress import IPv4Network, IPv6Network
+from typing import Any, NewType
+
+import strawberry
+from orchestrator.graphql.types import serialize_to_string
+from strawberry.custom_scalar import ScalarDefinition, ScalarWrapper
+
+IPv4NetworkType = strawberry.scalar(
+    NewType("IPv4NetworkType", str),
+    description="Represent the Orchestrator IPv4Network data type",
+    serialize=serialize_to_string,
+    parse_value=lambda v: v,
+)
+
+IPv6NetworkType = strawberry.scalar(
+    NewType("IPv6NetworkType", str),
+    description="Represent the Orchestrator IPv6Network data type",
+    serialize=serialize_to_string,
+    parse_value=lambda v: v,
+)
+
+GSO_SCALAR_OVERRIDES: dict[object, Any | ScalarWrapper | ScalarDefinition] = {
+    IPv4Network: IPv4NetworkType,
+    IPv6Network: IPv6NetworkType,
+}
diff --git a/gso/middlewares.py b/gso/middlewares.py
deleted file mode 100644
index 58106502b70a794cde29cfb714ce61101d056dbb..0000000000000000000000000000000000000000
--- a/gso/middlewares.py
+++ /dev/null
@@ -1,101 +0,0 @@
-"""Custom middlewares for the GSO API."""
-
-import json
-import re
-from collections.abc import Callable
-from typing import Any
-
-from fastapi import Request
-from starlette.middleware.base import BaseHTTPMiddleware
-from starlette.responses import Response
-from starlette.status import HTTP_200_OK
-
-
-class ModifyProcessEndpointResponse(BaseHTTPMiddleware):
-    """Middleware to modify the response for Process details endpoint."""
-
-    async def dispatch(self, request: Request, call_next: Callable) -> Response:
-        """Middleware to modify the response for Process details endpoint.
-
-        :param request: The incoming HTTP request.
-        :type request: Request
-
-        :param call_next: The next middleware or endpoint in the stack.
-        :type call_next: Callable
-
-        :return: The modified HTTP response.
-        :rtype: Response
-        """
-        response = await call_next(request)
-        path_pattern = re.compile(
-            r"/api/processes/([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12})"
-        )
-
-        match = path_pattern.match(request.url.path)
-
-        if match and response.status_code == HTTP_200_OK:
-            # Modify the response body as needed
-            response_body = b""
-            async for chunk in response.body_iterator:
-                response_body += chunk
-            try:
-                json_content = json.loads(response_body)
-                await self._modify_response_body(json_content, request)
-                modified_response_body = json.dumps(json_content).encode()
-                headers = dict(response.headers)
-                headers["content-length"] = str(len(modified_response_body))
-                return Response(
-                    content=modified_response_body,
-                    status_code=response.status_code,
-                    headers=headers,
-                    media_type=response.media_type,
-                )
-
-            except json.JSONDecodeError:
-                pass
-
-        return response
-
-    @staticmethod
-    async def _get_token(request: Request) -> str:
-        """Get the token from the request headers.
-
-        :param request: The incoming HTTP request.
-        :type request: Request
-
-        :return: The token from the request headers in specific format.
-        :rtype: str
-        """
-        bearer_prefix = "Bearer "
-        authorization_header = request.headers.get("Authorization")
-        if authorization_header:
-            # Remove the "Bearer " prefix from the token
-            token = authorization_header.replace(bearer_prefix, "")
-            return f"?token={token}"
-        return ""
-
-    async def _modify_response_body(self, response_body: dict[str, Any], request: Request) -> None:
-        """Modify the response body as needed.
-
-        :param response_body: The response body in dictionary format.
-        :type response_body: dict[str, Any]
-        :param request: The incoming HTTP request.
-        :type request: Request
-
-        :return: None
-        """
-        max_output_length = 500
-        token = await self._get_token(request)
-        try:
-            for step in response_body["steps"]:
-                if step["state"].get("callback_result", None):
-                    callback_result = step["state"]["callback_result"]
-                    if callback_result and isinstance(callback_result, str):
-                        callback_result = json.loads(callback_result)
-                    if callback_result.get("output") and len(callback_result["output"]) > max_output_length:
-                        callback_result[
-                            "output"
-                        ] = f'{request.base_url}api/v1/processes/steps/{step["step_id"]}/callback-results{token}'
-                    step["state"]["callback_result"] = callback_result
-        except (AttributeError, KeyError, TypeError):
-            pass
diff --git a/gso/migrations/env.py b/gso/migrations/env.py
index 45dc109d4786205b3359743edf3681283ca58797..968abeb94a1145de0c923cdc8d27dd2030a55df7 100644
--- a/gso/migrations/env.py
+++ b/gso/migrations/env.py
@@ -15,7 +15,7 @@ config = context.config
 # This line sets up loggers basically.
 logger = logging.getLogger("alembic.env")
 
-config.set_main_option("sqlalchemy.url", app_settings.DATABASE_URI)
+config.set_main_option("sqlalchemy.url", str(app_settings.DATABASE_URI))
 
 target_metadata = BaseModel.metadata
 
diff --git a/gso/migrations/versions/2024-04-17_393acfa175c0_add_switch_products.py b/gso/migrations/versions/2024-04-17_393acfa175c0_add_switch_products.py
index 7444c84530978213d33a8fc8ec255f1b70c5c8c5..c154044ccedc0d9691cd4d2b46cb6ed7266768e4 100644
--- a/gso/migrations/versions/2024-04-17_393acfa175c0_add_switch_products.py
+++ b/gso/migrations/versions/2024-04-17_393acfa175c0_add_switch_products.py
@@ -10,9 +10,9 @@ from alembic import op
 
 # revision identifiers, used by Alembic.
 revision = '393acfa175c0'
-down_revision = None
-branch_labels = ('data',)
-depends_on = '4ec89ab289c0'
+down_revision = '4ec89ab289c0'
+branch_labels = None
+depends_on = None
 
 
 def upgrade() -> None:
diff --git a/gso/migrations/versions/2024-04-19_3b73ee683cec_add_imported_varieties_of_existing_.py b/gso/migrations/versions/2024-04-19_3b73ee683cec_add_imported_varieties_of_existing_.py
new file mode 100644
index 0000000000000000000000000000000000000000..7e45a0c520b89c18365f249a0123b8b5bb610919
--- /dev/null
+++ b/gso/migrations/versions/2024-04-19_3b73ee683cec_add_imported_varieties_of_existing_.py
@@ -0,0 +1,83 @@
+"""Add imported varieties of existing products.
+
+Revision ID: 3b73ee683cec
+Revises: 393acfa175c0
+Create Date: 2024-04-19 15:57:38.082516
+
+"""
+import sqlalchemy as sa
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision = '3b73ee683cec'
+down_revision = '393acfa175c0'
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+    conn = op.get_bind()
+    conn.execute(sa.text("""
+INSERT INTO products (name, description, product_type, tag, status) VALUES ('Imported IP trunk', 'An IP trunk that already existed in the network, and is imported into GSO', 'ImportedIptrunk', 'IMP_IP_TRUNK', 'active') RETURNING products.product_id
+    """))
+    conn.execute(sa.text("""
+INSERT INTO products (name, description, product_type, tag, status) VALUES ('Imported router', 'A router that already existed in the network, and is imported into GSO', 'ImportedRouter', 'IMP_RTR', 'active') RETURNING products.product_id
+    """))
+    conn.execute(sa.text("""
+INSERT INTO products (name, description, product_type, tag, status) VALUES ('Imported site', 'A site that already existed in the network, and is imported into GSO', 'ImportedSite', 'IMP_SITE', 'active') RETURNING products.product_id
+    """))
+    conn.execute(sa.text("""
+INSERT INTO products (name, description, product_type, tag, status) VALUES ('Imported super PoP switch', 'A super PoP switch that already existed, and is imported into GSO', 'ImportedSuperPopSwitch', 'IMP_SPOP_SWITCH', 'active') RETURNING products.product_id
+    """))
+    conn.execute(sa.text("""
+INSERT INTO products (name, description, product_type, tag, status) VALUES ('Imported office router', 'An office router that already existed in the network, and is imported into GSO', 'ImportedOfficeRouter', 'IMP_OFFICE_RTR', 'active') RETURNING products.product_id
+    """))
+    conn.execute(sa.text("""
+INSERT INTO product_product_blocks (product_id, product_block_id) VALUES ((SELECT products.product_id FROM products WHERE products.name IN ('Imported IP trunk')), (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('IptrunkBlock')))
+    """))
+    conn.execute(sa.text("""
+INSERT INTO product_product_blocks (product_id, product_block_id) VALUES ((SELECT products.product_id FROM products WHERE products.name IN ('Imported router')), (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('RouterBlock')))
+    """))
+    conn.execute(sa.text("""
+INSERT INTO product_product_blocks (product_id, product_block_id) VALUES ((SELECT products.product_id FROM products WHERE products.name IN ('Imported site')), (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('SiteBlock')))
+    """))
+    conn.execute(sa.text("""
+INSERT INTO product_product_blocks (product_id, product_block_id) VALUES ((SELECT products.product_id FROM products WHERE products.name IN ('Imported super PoP switch')), (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('SuperPopSwitchBlock')))
+    """))
+    conn.execute(sa.text("""
+INSERT INTO product_product_blocks (product_id, product_block_id) VALUES ((SELECT products.product_id FROM products WHERE products.name IN ('Imported office router')), (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('OfficeRouterBlock')))
+    """))
+
+
+def downgrade() -> None:
+    conn = op.get_bind()
+    conn.execute(sa.text("""
+DELETE FROM product_product_blocks WHERE product_product_blocks.product_id IN (SELECT products.product_id FROM products WHERE products.name IN ('Imported IP trunk')) AND product_product_blocks.product_block_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('IptrunkBlock'))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM product_product_blocks WHERE product_product_blocks.product_id IN (SELECT products.product_id FROM products WHERE products.name IN ('Imported router')) AND product_product_blocks.product_block_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('RouterBlock'))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM product_product_blocks WHERE product_product_blocks.product_id IN (SELECT products.product_id FROM products WHERE products.name IN ('Imported site')) AND product_product_blocks.product_block_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('SiteBlock'))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM product_product_blocks WHERE product_product_blocks.product_id IN (SELECT products.product_id FROM products WHERE products.name IN ('Imported super PoP switch')) AND product_product_blocks.product_block_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('SuperPopSwitchBlock'))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM product_product_blocks WHERE product_product_blocks.product_id IN (SELECT products.product_id FROM products WHERE products.name IN ('Imported office router')) AND product_product_blocks.product_block_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('OfficeRouterBlock'))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM processes WHERE processes.pid IN (SELECT processes_subscriptions.pid FROM processes_subscriptions WHERE processes_subscriptions.subscription_id IN (SELECT subscriptions.subscription_id FROM subscriptions WHERE subscriptions.product_id IN (SELECT products.product_id FROM products WHERE products.name IN ('Imported super PoP switch', 'Imported router', 'Imported IP trunk', 'Imported office router', 'Imported site'))))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM processes_subscriptions WHERE processes_subscriptions.subscription_id IN (SELECT subscriptions.subscription_id FROM subscriptions WHERE subscriptions.product_id IN (SELECT products.product_id FROM products WHERE products.name IN ('Imported super PoP switch', 'Imported router', 'Imported IP trunk', 'Imported office router', 'Imported site')))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM subscription_instances WHERE subscription_instances.subscription_id IN (SELECT subscriptions.subscription_id FROM subscriptions WHERE subscriptions.product_id IN (SELECT products.product_id FROM products WHERE products.name IN ('Imported super PoP switch', 'Imported router', 'Imported IP trunk', 'Imported office router', 'Imported site')))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM subscriptions WHERE subscriptions.product_id IN (SELECT products.product_id FROM products WHERE products.name IN ('Imported super PoP switch', 'Imported router', 'Imported IP trunk', 'Imported office router', 'Imported site'))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM products WHERE products.name IN ('Imported super PoP switch', 'Imported router', 'Imported IP trunk', 'Imported office router', 'Imported site')
+    """))
diff --git a/gso/migrations/versions/2024-04-19_ab8d805d27b3_add_creation_workflows_for_imported_.py b/gso/migrations/versions/2024-04-19_ab8d805d27b3_add_creation_workflows_for_imported_.py
new file mode 100644
index 0000000000000000000000000000000000000000..3a1aa31859c0122c30d846b6baded4e7ba82074c
--- /dev/null
+++ b/gso/migrations/versions/2024-04-19_ab8d805d27b3_add_creation_workflows_for_imported_.py
@@ -0,0 +1,63 @@
+"""Add creation workflows for imported products.
+
+Revision ID: ab8d805d27b3
+Revises: 3b73ee683cec
+Create Date: 2024-04-19 16:21:07.304696
+
+"""
+import sqlalchemy as sa
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision = 'ab8d805d27b3'
+down_revision = '3b73ee683cec'
+branch_labels = None
+depends_on = None
+
+
+from orchestrator.migrations.helpers import create_workflow, delete_workflow
+
+new_workflows = [
+    {
+        "name": "create_imported_site",
+        "target": "CREATE",
+        "description": "Import Site",
+        "product_type": "ImportedSite"
+    },
+    {
+        "name": "create_imported_router",
+        "target": "CREATE",
+        "description": "Import router",
+        "product_type": "ImportedRouter"
+    },
+    {
+        "name": "create_imported_iptrunk",
+        "target": "CREATE",
+        "description": "Import iptrunk",
+        "product_type": "ImportedIptrunk"
+    },
+    {
+        "name": "create_imported_super_pop_switch",
+        "target": "CREATE",
+        "description": "Import Super PoP switch",
+        "product_type": "ImportedSuperPopSwitch"
+    },
+    {
+        "name": "create_imported_office_router",
+        "target": "CREATE",
+        "description": "Import office router",
+        "product_type": "ImportedOfficeRouter"
+    }
+]
+
+
+def upgrade() -> None:
+    conn = op.get_bind()
+    for workflow in new_workflows:
+        create_workflow(conn, workflow)
+
+
+def downgrade() -> None:
+    conn = op.get_bind()
+    for workflow in new_workflows:
+        delete_workflow(conn, workflow["name"])
diff --git a/gso/migrations/versions/2024-04-23_c12ec1d9bd92_add_product_import_workflows.py b/gso/migrations/versions/2024-04-23_c12ec1d9bd92_add_product_import_workflows.py
new file mode 100644
index 0000000000000000000000000000000000000000..a7ac0093505d7342a663989360d44bc2236af70a
--- /dev/null
+++ b/gso/migrations/versions/2024-04-23_c12ec1d9bd92_add_product_import_workflows.py
@@ -0,0 +1,63 @@
+"""Add product import workflows.
+
+Revision ID: c12ec1d9bd92
+Revises: ab8d805d27b3
+Create Date: 2024-04-23 12:57:51.227269
+
+"""
+import sqlalchemy as sa
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision = 'c12ec1d9bd92'
+down_revision = 'ab8d805d27b3'
+branch_labels = None
+depends_on = None
+
+
+from orchestrator.migrations.helpers import create_workflow, delete_workflow
+
+new_workflows = [
+    {
+        "name": "import_site",
+        "target": "MODIFY",
+        "description": "Import Site",
+        "product_type": "ImportedSite"
+    },
+    {
+        "name": "import_office_router",
+        "target": "MODIFY",
+        "description": "Import OfficeRouter",
+        "product_type": "ImportedOfficeRouter"
+    },
+    {
+        "name": "import_super_pop_switch",
+        "target": "MODIFY",
+        "description": "Import SuperPopSwitch",
+        "product_type": "ImportedSuperPopSwitch"
+    },
+    {
+        "name": "import_router",
+        "target": "MODIFY",
+        "description": "Import Router",
+        "product_type": "ImportedRouter"
+    },
+    {
+        "name": "import_iptrunk",
+        "target": "MODIFY",
+        "description": "Import Iptrunk",
+        "product_type": "ImportedIptrunk"
+    }
+]
+
+
+def upgrade() -> None:
+    conn = op.get_bind()
+    for workflow in new_workflows:
+        create_workflow(conn, workflow)
+
+
+def downgrade() -> None:
+    conn = op.get_bind()
+    for workflow in new_workflows:
+        delete_workflow(conn, workflow["name"])
diff --git a/gso/migrations/versions/2024-04-26_32cad119b7c4_add_opengear_product.py b/gso/migrations/versions/2024-04-26_32cad119b7c4_add_opengear_product.py
new file mode 100644
index 0000000000000000000000000000000000000000..a87d2da38a45ce9c0b5158383a33090a08c119e3
--- /dev/null
+++ b/gso/migrations/versions/2024-04-26_32cad119b7c4_add_opengear_product.py
@@ -0,0 +1,116 @@
+"""Add Opengear product..
+
+Revision ID: 32cad119b7c4
+Revises: c12ec1d9bd92
+Create Date: 2024-04-26 11:12:36.852353
+
+"""
+import sqlalchemy as sa
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision = '32cad119b7c4'
+down_revision = 'c12ec1d9bd92'
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+    conn = op.get_bind()
+    conn.execute(sa.text("""
+INSERT INTO products (name, description, product_type, tag, status) VALUES ('Opengear', 'An Opengear', 'Opengear', 'OPENGEAR', 'active') RETURNING products.product_id
+    """))
+    conn.execute(sa.text("""
+INSERT INTO product_blocks (name, description, tag, status) VALUES ('OpengearBlock', 'An OpengearBlock', 'OPENGEAR_BLOCK', 'active') RETURNING product_blocks.product_block_id
+    """))
+    conn.execute(sa.text("""
+INSERT INTO resource_types (resource_type, description) VALUES ('opengear_wan_address', 'The WAN address of the Opengear device.') RETURNING resource_types.resource_type_id
+    """))
+    conn.execute(sa.text("""
+INSERT INTO resource_types (resource_type, description) VALUES ('opengear_wan_netmask', 'The WAN netmask of the Opengear device.') RETURNING resource_types.resource_type_id
+    """))
+    conn.execute(sa.text("""
+INSERT INTO resource_types (resource_type, description) VALUES ('opengear_wan_gateway', 'The WAN gateway of the Opengear device.') RETURNING resource_types.resource_type_id
+    """))
+    conn.execute(sa.text("""
+INSERT INTO resource_types (resource_type, description) VALUES ('opengear_hostname', 'The hostname of the Opengear device.') RETURNING resource_types.resource_type_id
+    """))
+    conn.execute(sa.text("""
+INSERT INTO product_product_blocks (product_id, product_block_id) VALUES ((SELECT products.product_id FROM products WHERE products.name IN ('Opengear')), (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('OpengearBlock')))
+    """))
+    conn.execute(sa.text("""
+INSERT INTO product_block_relations (in_use_by_id, depends_on_id) VALUES ((SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('OpengearBlock')), (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('SiteBlock')))
+    """))
+    conn.execute(sa.text("""
+INSERT INTO product_block_resource_types (product_block_id, resource_type_id) VALUES ((SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('OpengearBlock')), (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('opengear_hostname')))
+    """))
+    conn.execute(sa.text("""
+INSERT INTO product_block_resource_types (product_block_id, resource_type_id) VALUES ((SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('OpengearBlock')), (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('opengear_wan_address')))
+    """))
+    conn.execute(sa.text("""
+INSERT INTO product_block_resource_types (product_block_id, resource_type_id) VALUES ((SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('OpengearBlock')), (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('opengear_wan_netmask')))
+    """))
+    conn.execute(sa.text("""
+INSERT INTO product_block_resource_types (product_block_id, resource_type_id) VALUES ((SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('OpengearBlock')), (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('opengear_wan_gateway')))
+    """))
+
+
+def downgrade() -> None:
+    conn = op.get_bind()
+    conn.execute(sa.text("""
+DELETE FROM product_block_resource_types WHERE product_block_resource_types.product_block_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('OpengearBlock')) AND product_block_resource_types.resource_type_id = (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('opengear_hostname'))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM subscription_instance_values USING product_block_resource_types WHERE subscription_instance_values.subscription_instance_id IN (SELECT subscription_instances.subscription_instance_id FROM subscription_instances WHERE subscription_instances.subscription_instance_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('OpengearBlock'))) AND product_block_resource_types.resource_type_id = (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('opengear_hostname'))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM product_block_resource_types WHERE product_block_resource_types.product_block_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('OpengearBlock')) AND product_block_resource_types.resource_type_id = (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('opengear_wan_address'))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM subscription_instance_values USING product_block_resource_types WHERE subscription_instance_values.subscription_instance_id IN (SELECT subscription_instances.subscription_instance_id FROM subscription_instances WHERE subscription_instances.subscription_instance_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('OpengearBlock'))) AND product_block_resource_types.resource_type_id = (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('opengear_wan_address'))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM product_block_resource_types WHERE product_block_resource_types.product_block_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('OpengearBlock')) AND product_block_resource_types.resource_type_id = (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('opengear_wan_netmask'))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM subscription_instance_values USING product_block_resource_types WHERE subscription_instance_values.subscription_instance_id IN (SELECT subscription_instances.subscription_instance_id FROM subscription_instances WHERE subscription_instances.subscription_instance_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('OpengearBlock'))) AND product_block_resource_types.resource_type_id = (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('opengear_wan_netmask'))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM product_block_resource_types WHERE product_block_resource_types.product_block_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('OpengearBlock')) AND product_block_resource_types.resource_type_id = (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('opengear_wan_gateway'))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM subscription_instance_values USING product_block_resource_types WHERE subscription_instance_values.subscription_instance_id IN (SELECT subscription_instances.subscription_instance_id FROM subscription_instances WHERE subscription_instances.subscription_instance_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('OpengearBlock'))) AND product_block_resource_types.resource_type_id = (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('opengear_wan_gateway'))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM subscription_instance_values WHERE subscription_instance_values.resource_type_id IN (SELECT resource_types.resource_type_id FROM resource_types WHERE resource_types.resource_type IN ('opengear_wan_address', 'opengear_wan_netmask', 'opengear_wan_gateway', 'opengear_hostname'))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM resource_types WHERE resource_types.resource_type IN ('opengear_wan_address', 'opengear_wan_netmask', 'opengear_wan_gateway', 'opengear_hostname')
+    """))
+    conn.execute(sa.text("""
+DELETE FROM product_product_blocks WHERE product_product_blocks.product_id IN (SELECT products.product_id FROM products WHERE products.name IN ('Opengear')) AND product_product_blocks.product_block_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('OpengearBlock'))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM product_block_relations WHERE product_block_relations.in_use_by_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('OpengearBlock')) AND product_block_relations.depends_on_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('SiteBlock'))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM subscription_instances WHERE subscription_instances.product_block_id IN (SELECT product_blocks.product_block_id FROM product_blocks WHERE product_blocks.name IN ('OpengearBlock'))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM product_blocks WHERE product_blocks.name IN ('OpengearBlock')
+    """))
+    conn.execute(sa.text("""
+DELETE FROM processes WHERE processes.pid IN (SELECT processes_subscriptions.pid FROM processes_subscriptions WHERE processes_subscriptions.subscription_id IN (SELECT subscriptions.subscription_id FROM subscriptions WHERE subscriptions.product_id IN (SELECT products.product_id FROM products WHERE products.name IN ('Opengear'))))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM processes_subscriptions WHERE processes_subscriptions.subscription_id IN (SELECT subscriptions.subscription_id FROM subscriptions WHERE subscriptions.product_id IN (SELECT products.product_id FROM products WHERE products.name IN ('Opengear')))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM subscription_instances WHERE subscription_instances.subscription_id IN (SELECT subscriptions.subscription_id FROM subscriptions WHERE subscriptions.product_id IN (SELECT products.product_id FROM products WHERE products.name IN ('Opengear')))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM subscriptions WHERE subscriptions.product_id IN (SELECT products.product_id FROM products WHERE products.name IN ('Opengear'))
+    """))
+    conn.execute(sa.text("""
+DELETE FROM products WHERE products.name IN ('Opengear')
+    """))
diff --git a/gso/migrations/versions/2024-04-30_1ec810b289c0_add_orchestrator_2_2_1_V2_migrations.py b/gso/migrations/versions/2024-04-30_1ec810b289c0_add_orchestrator_2_2_1_V2_migrations.py
new file mode 100644
index 0000000000000000000000000000000000000000..0f4609b65cc69454403ebe90c49859f5e8a06d35
--- /dev/null
+++ b/gso/migrations/versions/2024-04-30_1ec810b289c0_add_orchestrator_2_2_1_V2_migrations.py
@@ -0,0 +1,23 @@
+"""Add upstream migrations as a dependency.
+
+Revision ID: 1ec810b289c0
+Revises:
+Create Date: 2024-04-02 10:21:08.539591
+
+"""
+from alembic import op
+from orchestrator.migrations.helpers import create_workflow, delete_workflow
+
+# revision identifiers, used by Alembic.
+revision = '1ec810b289c0'
+down_revision = '32cad119b7c4'
+branch_labels = None
+depends_on = '048219045729'  # in this revision, SURF has added a new columns to the workflow table like delted_at, so we need to add a dependency on the revision that added the columns to the workflow table.
+
+
+def upgrade() -> None:
+    pass
+
+
+def downgrade() -> None:
+    pass
diff --git a/gso/monkeypatches.py b/gso/monkeypatches.py
index 2e94f50bdd27288e4ce7d829036ffbc8f022ef20..1b71f634ac0677b741b9670c756b79bc6a929f4e 100644
--- a/gso/monkeypatches.py
+++ b/gso/monkeypatches.py
@@ -7,11 +7,12 @@ oauth2_lib package to meet specific requirements of the gso application.
 import oauth2_lib.fastapi
 import oauth2_lib.settings
 
-from gso.auth.oidc_policy_helper import HTTPX_SSL_CONTEXT, OIDCUser, OIDCUserModel, opa_decision
+from gso.auth.oidc_policy_helper import HTTPX_SSL_CONTEXT, OIDCUser, OIDCUserModel, _get_decision, opa_decision
 from gso.auth.settings import oauth2lib_settings
 
 oauth2_lib.fastapi.OIDCUser = OIDCUser  # type: ignore[assignment, misc]
 oauth2_lib.fastapi.OIDCUserModel = OIDCUserModel  # type: ignore[assignment, misc]
 oauth2_lib.fastapi.opa_decision = opa_decision  # type: ignore[assignment]
+oauth2_lib.fastapi._get_decision = _get_decision  # type: ignore[assignment] # noqa: SLF001
 oauth2_lib.fastapi.HTTPX_SSL_CONTEXT = HTTPX_SSL_CONTEXT
 oauth2_lib.settings.oauth2lib_settings = oauth2lib_settings  # type: ignore[assignment]
diff --git a/gso/oss-params-example.json b/gso/oss-params-example.json
index ff6eb33dc70bcaad8cf453e5d678483cadb79c5f..2a40269346ad446e4b8fbac6a4f9ed4dafc8a3ea 100644
--- a/gso/oss-params-example.json
+++ b/gso/oss-params-example.json
@@ -45,7 +45,7 @@
     },
     "LT_IAS": {
       "V4": {"containers": ["10.255.255.0/24"], "networks": [], "mask": 31},
-      "V6": {"containers": ["dead:beef:cc::/48"], "networks": [], "mask": 126},
+      "V6": {"containers": [ "2001:798:1::/48"], "networks": [], "mask": 126},
       "domain_name": ".geantip",
       "dns_view": "default",
       "network_view": "default"
@@ -93,6 +93,14 @@
     "smtp_password": "password"
   },
   "SHAREPOINT": {
-    "checklist_site_url": "https://example.sharepoint.com/sites/example-site"
+    "client_id": "UUID",
+    "tenant_id": "UUID",
+    "certificate_path": "/path/to/certificate",
+    "certificate_password": "P4$$w0rd",
+    "site_id": "UUID",
+    "list_ids": {
+      "p_router": "UUID"
+    },
+    "scopes": ["https://graph.microsoft.com/.default"]
   }
 }
diff --git a/gso/products/__init__.py b/gso/products/__init__.py
index cab8e801f207c7617c2e204cb9fdde0aa0d5a366..e81b36b8bd9d1777edac64aea4b4da77cba49b4c 100644
--- a/gso/products/__init__.py
+++ b/gso/products/__init__.py
@@ -8,13 +8,14 @@
 from orchestrator.domain import SUBSCRIPTION_MODEL_REGISTRY
 from pydantic_forms.types import strEnum
 
-from gso.products.product_types.iptrunk import Iptrunk
+from gso.products.product_types.iptrunk import ImportedIptrunk, Iptrunk
 from gso.products.product_types.lan_switch_interconnect import LanSwitchInterconnect
-from gso.products.product_types.office_router import OfficeRouter
+from gso.products.product_types.office_router import ImportedOfficeRouter, OfficeRouter
+from gso.products.product_types.opengear import Opengear
 from gso.products.product_types.pop_vlan import PopVlan
-from gso.products.product_types.router import Router
-from gso.products.product_types.site import Site
-from gso.products.product_types.super_pop_switch import SuperPopSwitch
+from gso.products.product_types.router import ImportedRouter, Router
+from gso.products.product_types.site import ImportedSite, Site
+from gso.products.product_types.super_pop_switch import ImportedSuperPopSwitch, SuperPopSwitch
 from gso.products.product_types.switch import Switch
 
 
@@ -29,6 +30,12 @@ class ProductName(strEnum):
     SWITCH = "Switch"
     LAN_SWITCH_INTERCONNECT = "LAN Switch Interconnect"
     POP_VLAN = "Pop VLAN"
+    IMPORTED_IP_TRUNK = "Imported IP trunk"
+    IMPORTED_ROUTER = "Imported router"
+    IMPORTED_SITE = "Imported site"
+    IMPORTED_SUPER_POP_SWITCH = "Imported super PoP switch"
+    IMPORTED_OFFICE_ROUTER = "Imported office router"
+    OPENGEAR = "Opengear"
 
 
 class ProductType(strEnum):
@@ -42,6 +49,12 @@ class ProductType(strEnum):
     SWITCH = Switch.__name__
     LAN_SWITCH_INTERCONNECT = LanSwitchInterconnect.__name__
     POP_VLAN = PopVlan.__name__
+    IMPORTED_IP_TRUNK = ImportedIptrunk.__name__
+    IMPORTED_ROUTER = ImportedRouter.__name__
+    IMPORTED_SITE = ImportedSite.__name__
+    IMPORTED_SUPER_POP_SWITCH = ImportedSuperPopSwitch.__name__
+    IMPORTED_OFFICE_ROUTER = ImportedOfficeRouter.__name__
+    OPENGEAR = Opengear.__name__
 
 
 SUBSCRIPTION_MODEL_REGISTRY.update(
@@ -54,5 +67,11 @@ SUBSCRIPTION_MODEL_REGISTRY.update(
         ProductName.SWITCH.value: Switch,
         ProductName.LAN_SWITCH_INTERCONNECT.value: LanSwitchInterconnect,
         ProductName.POP_VLAN.value: PopVlan,
+        ProductName.IMPORTED_IP_TRUNK.value: ImportedIptrunk,
+        ProductName.IMPORTED_ROUTER.value: ImportedRouter,
+        ProductName.IMPORTED_SITE.value: ImportedSite,
+        ProductName.IMPORTED_SUPER_POP_SWITCH.value: ImportedSuperPopSwitch,
+        ProductName.IMPORTED_OFFICE_ROUTER.value: ImportedOfficeRouter,
+        ProductName.OPENGEAR.value: Opengear,
     },
 )
diff --git a/gso/products/product_blocks/iptrunk.py b/gso/products/product_blocks/iptrunk.py
index cac186262f641fc52fdc62a4e4c556c7d924f22b..f2c6cea63786a206b4ed03ed74543622617a7f62 100644
--- a/gso/products/product_blocks/iptrunk.py
+++ b/gso/products/product_blocks/iptrunk.py
@@ -1,11 +1,14 @@
 """IP trunk product block that has all parameters of a subscription throughout its lifecycle."""
 
 import ipaddress
-from typing import TypeVar
+from typing import Annotated
 
-from orchestrator.domain.base import ProductBlockModel
-from orchestrator.forms.validators import UniqueConstrainedList
+from annotated_types import Len
+from orchestrator.domain.base import ProductBlockModel, T
 from orchestrator.types import SubscriptionLifecycle, strEnum
+from pydantic import AfterValidator
+from pydantic_forms.validators import validate_unique_list
+from typing_extensions import Doc
 
 from gso.products.product_blocks.router import (
     RouterBlock,
@@ -33,11 +36,15 @@ class IptrunkType(strEnum):
     LEASED = "Leased"
 
 
-T_co = TypeVar("T_co", covariant=True)
-
-
-class LAGMemberList(UniqueConstrainedList[T_co]):  # type: ignore[type-var]
-    """A list of :term:`LAG` member interfaces."""
+LAGMemberList = Annotated[
+    list[T], AfterValidator(validate_unique_list), Len(min_length=0), Doc("A list of :term:`LAG` member interfaces.")
+]
+IptrunkSides = Annotated[
+    list[T],
+    AfterValidator(validate_unique_list),
+    Len(min_length=2, max_length=2),
+    Doc("A list of IP trunk interfaces that make up one side of a link."),
+]
 
 
 class IptrunkInterfaceBlockInactive(
@@ -65,13 +72,6 @@ class IptrunkInterfaceBlock(IptrunkInterfaceBlockProvisioning, lifecycle=[Subscr
     interface_description: str | None = None
 
 
-class IptrunkSides(UniqueConstrainedList[T_co]):  # type: ignore[type-var]
-    """A list of IP trunk interfaces that make up one side of a link."""
-
-    min_items = 2
-    max_items = 2
-
-
 class IptrunkSideBlockInactive(
     ProductBlockModel,
     lifecycle=[SubscriptionLifecycle.INITIAL],
@@ -91,7 +91,7 @@ class IptrunkSideBlockProvisioning(IptrunkSideBlockInactive, lifecycle=[Subscrip
     iptrunk_side_node: RouterBlockProvisioning
     iptrunk_side_ae_iface: str | None = None
     iptrunk_side_ae_geant_a_sid: str | None = None
-    iptrunk_side_ae_members: LAGMemberList[IptrunkInterfaceBlockProvisioning]
+    iptrunk_side_ae_members: LAGMemberList[IptrunkInterfaceBlockProvisioning]  # type: ignore[assignment]
 
 
 class IptrunkSideBlock(IptrunkSideBlockProvisioning, lifecycle=[SubscriptionLifecycle.ACTIVE]):
@@ -100,7 +100,7 @@ class IptrunkSideBlock(IptrunkSideBlockProvisioning, lifecycle=[SubscriptionLife
     iptrunk_side_node: RouterBlock
     iptrunk_side_ae_iface: str | None = None
     iptrunk_side_ae_geant_a_sid: str | None = None
-    iptrunk_side_ae_members: LAGMemberList[IptrunkInterfaceBlock]
+    iptrunk_side_ae_members: LAGMemberList[IptrunkInterfaceBlock]  # type: ignore[assignment]
 
 
 class IptrunkBlockInactive(
@@ -132,7 +132,7 @@ class IptrunkBlockProvisioning(IptrunkBlockInactive, lifecycle=[SubscriptionLife
     iptrunk_isis_metric: int | None = None
     iptrunk_ipv4_network: ipaddress.IPv4Network | None = None
     iptrunk_ipv6_network: ipaddress.IPv6Network | None = None
-    iptrunk_sides: IptrunkSides[IptrunkSideBlockProvisioning]
+    iptrunk_sides: IptrunkSides[IptrunkSideBlockProvisioning]  # type: ignore[assignment]
 
 
 class IptrunkBlock(IptrunkBlockProvisioning, lifecycle=[SubscriptionLifecycle.ACTIVE]):
@@ -141,7 +141,7 @@ class IptrunkBlock(IptrunkBlockProvisioning, lifecycle=[SubscriptionLifecycle.AC
     #:  GÉANT service ID associated with this trunk.
     geant_s_sid: str | None = None
     #:  A human-readable description of this trunk.
-    iptrunk_description: str
+    iptrunk_description: str | None = None
     #:  The type of trunk, can be either dark fibre or leased capacity.
     iptrunk_type: IptrunkType
     #:  The speed of the trunk, measured per interface associated with it.
@@ -155,4 +155,4 @@ class IptrunkBlock(IptrunkBlockProvisioning, lifecycle=[SubscriptionLifecycle.AC
     #:  The IPv6 network used for this trunk.
     iptrunk_ipv6_network: ipaddress.IPv6Network
     #:  The two sides that the trunk is connected to.
-    iptrunk_sides: IptrunkSides[IptrunkSideBlock]
+    iptrunk_sides: IptrunkSides[IptrunkSideBlock]  # type: ignore[assignment]
diff --git a/gso/products/product_blocks/lan_switch_interconnect.py b/gso/products/product_blocks/lan_switch_interconnect.py
index 55a7b97a3ca788dbe3e61f5ddd9c8a5344e5976f..a9b1c77366662e24499f936f9762d515b882b934 100644
--- a/gso/products/product_blocks/lan_switch_interconnect.py
+++ b/gso/products/product_blocks/lan_switch_interconnect.py
@@ -63,7 +63,7 @@ class LanSwitchInterconnectRouterSideBlockProvisioning(
 
     node: RouterBlockProvisioning
     ae_iface: str | None = None
-    ae_members: LAGMemberList[LanSwitchInterconnectInterfaceBlockProvisioning]
+    ae_members: LAGMemberList[LanSwitchInterconnectInterfaceBlockProvisioning]  # type: ignore[assignment]
 
 
 class LanSwitchInterconnectRouterSideBlock(
@@ -73,7 +73,7 @@ class LanSwitchInterconnectRouterSideBlock(
 
     node: RouterBlock
     ae_iface: str
-    ae_members: LAGMemberList[LanSwitchInterconnectInterfaceBlock]
+    ae_members: LAGMemberList[LanSwitchInterconnectInterfaceBlock]  # type: ignore[assignment]
 
 
 class LanSwitchInterconnectSwitchSideBlockInactive(
@@ -95,7 +95,7 @@ class LanSwitchInterconnectSwitchSideBlockProvisioning(
 
     node: SwitchBlockProvisioning
     ae_iface: str | None = None
-    ae_members: LAGMemberList[LanSwitchInterconnectInterfaceBlockProvisioning]
+    ae_members: LAGMemberList[LanSwitchInterconnectInterfaceBlockProvisioning]  # type: ignore[assignment]
 
 
 class LanSwitchInterconnectSwitchSideBlock(
@@ -105,7 +105,7 @@ class LanSwitchInterconnectSwitchSideBlock(
 
     node: SwitchBlock
     ae_iface: str
-    ae_members: LAGMemberList[LanSwitchInterconnectInterfaceBlock]
+    ae_members: LAGMemberList[LanSwitchInterconnectInterfaceBlock]  # type: ignore[assignment]
 
 
 class LanSwitchInterconnectBlockInactive(
diff --git a/gso/products/product_blocks/office_router.py b/gso/products/product_blocks/office_router.py
index fec7ad8d16366baf12ec3528748f71aa2fa36d90..65eab0256a073c699f3ea2ef84d96e3352096722 100644
--- a/gso/products/product_blocks/office_router.py
+++ b/gso/products/product_blocks/office_router.py
@@ -1,7 +1,5 @@
 """Product block for :class:`office router` products."""
 
-import ipaddress
-
 from orchestrator.domain.base import ProductBlockModel
 from orchestrator.types import SubscriptionLifecycle
 
@@ -10,7 +8,7 @@ from gso.products.product_blocks.site import (
     SiteBlockInactive,
     SiteBlockProvisioning,
 )
-from gso.utils.shared_enums import PortNumber, Vendor
+from gso.utils.shared_enums import IPv4AddressType, IPv6AddressType, PortNumber, Vendor
 
 
 class OfficeRouterBlockInactive(
@@ -22,8 +20,8 @@ class OfficeRouterBlockInactive(
 
     office_router_fqdn: str | None = None
     office_router_ts_port: PortNumber | None = None
-    office_router_lo_ipv4_address: ipaddress.IPv4Address | None = None
-    office_router_lo_ipv6_address: ipaddress.IPv6Address | None = None
+    office_router_lo_ipv4_address: IPv4AddressType | None = None
+    office_router_lo_ipv6_address: IPv6AddressType | None = None
     office_router_site: SiteBlockInactive | None
     vendor: Vendor | None = None
 
@@ -33,8 +31,8 @@ class OfficeRouterBlockProvisioning(OfficeRouterBlockInactive, lifecycle=[Subscr
 
     office_router_fqdn: str | None = None
     office_router_ts_port: PortNumber | None = None
-    office_router_lo_ipv4_address: ipaddress.IPv4Address | None = None
-    office_router_lo_ipv6_address: ipaddress.IPv6Address | None = None
+    office_router_lo_ipv4_address: IPv4AddressType | None = None
+    office_router_lo_ipv6_address: IPv6AddressType | None = None
     office_router_site: SiteBlockProvisioning | None
     vendor: Vendor | None = None
 
@@ -47,9 +45,9 @@ class OfficeRouterBlock(OfficeRouterBlockProvisioning, lifecycle=[SubscriptionLi
     #:  The port of the terminal server that this office router is connected to. Used to offer out of band access.
     office_router_ts_port: PortNumber
     #:  The IPv4 loopback address of the office router.
-    office_router_lo_ipv4_address: ipaddress.IPv4Address
+    office_router_lo_ipv4_address: IPv4AddressType
     #:  The IPv6 loopback address of the office router.
-    office_router_lo_ipv6_address: ipaddress.IPv6Address
+    office_router_lo_ipv6_address: IPv6AddressType
     #:  The :class:`Site` that this office router resides in. Both physically and computationally.
     office_router_site: SiteBlock
     #:  The vendor of an office router. Defaults to Juniper.
diff --git a/gso/products/product_blocks/opengear.py b/gso/products/product_blocks/opengear.py
new file mode 100644
index 0000000000000000000000000000000000000000..84ed84b3b11d122f757b3e764202ce5052ca615f
--- /dev/null
+++ b/gso/products/product_blocks/opengear.py
@@ -0,0 +1,51 @@
+"""Product block for :class:`Opengear` products."""
+
+import ipaddress
+
+from orchestrator.domain.base import ProductBlockModel
+from orchestrator.types import SubscriptionLifecycle
+
+from gso.products.product_blocks.site import (
+    SiteBlock,
+    SiteBlockInactive,
+    SiteBlockProvisioning,
+)
+
+
+class OpengearBlockInactive(
+    ProductBlockModel,
+    lifecycle=[SubscriptionLifecycle.INITIAL],
+    product_block_name="OpengearBlock",
+):
+    """An Opengear that's being currently inactive. See :class:`OpengearBlock`."""
+
+    opengear_hostname: str | None = None
+    opengear_site: SiteBlockInactive | None = None
+    opengear_wan_address: ipaddress.IPv4Address | None = None
+    opengear_wan_netmask: ipaddress.IPv4Address | None = None
+    opengear_wan_gateway: ipaddress.IPv4Address | None = None
+
+
+class OpengearBlockProvisioning(OpengearBlockInactive, lifecycle=[SubscriptionLifecycle.PROVISIONING]):
+    """An Opengear that's being provisioned. See :class:`OpengearBlock`."""
+
+    opengear_hostname: str
+    opengear_site: SiteBlockProvisioning
+    opengear_wan_address: ipaddress.IPv4Address | None = None
+    opengear_wan_netmask: ipaddress.IPv4Address | None = None
+    opengear_wan_gateway: ipaddress.IPv4Address | None = None
+
+
+class OpengearBlock(OpengearBlockProvisioning, lifecycle=[SubscriptionLifecycle.ACTIVE]):
+    """An Opengear that's currently deployed in the network."""
+
+    #: The hostname of the Opengear device.
+    opengear_hostname: str
+    #: The site where the Opengear device is located.
+    opengear_site: SiteBlock
+    #: The WAN address of the Opengear device.
+    opengear_wan_address: ipaddress.IPv4Address
+    #: The WAN netmask of the Opengear device.
+    opengear_wan_netmask: ipaddress.IPv4Address
+    #: The WAN gateway of the Opengear device.
+    opengear_wan_gateway: ipaddress.IPv4Address
diff --git a/gso/products/product_blocks/pop_vlan.py b/gso/products/product_blocks/pop_vlan.py
index 4935c2f69966874c686489c7e06e3061d5509365..191e7255951f24f9514a592989546774d17f2185 100644
--- a/gso/products/product_blocks/pop_vlan.py
+++ b/gso/products/product_blocks/pop_vlan.py
@@ -1,12 +1,14 @@
 """Pop VLAN product block that has all parameters of a subscription throughout its lifecycle."""
 
 from ipaddress import IPv4Network, IPv6Network
-from typing import TypeVar
+from typing import Annotated, TypeVar
 
 from orchestrator.domain.base import ProductBlockModel
-from orchestrator.forms.validators import UniqueConstrainedList
 from orchestrator.types import SubscriptionLifecycle
+from pydantic import AfterValidator
 from pydantic_forms.types import strEnum
+from pydantic_forms.validators import validate_unique_list
+from typing_extensions import Doc
 
 from gso.products.product_blocks.lan_switch_interconnect import (
     LanSwitchInterconnectBlock,
@@ -14,7 +16,7 @@ from gso.products.product_blocks.lan_switch_interconnect import (
     LanSwitchInterconnectBlockProvisioning,
 )
 
-T_co = TypeVar("T_co", covariant=True)
+T = TypeVar("T")
 
 
 class LayerPreference(strEnum):
@@ -24,8 +26,7 @@ class LayerPreference(strEnum):
     L3 = "L3"
 
 
-class PortList(UniqueConstrainedList[T_co]):  # type: ignore[type-var]
-    """A list of ports."""
+PortList = Annotated[list[T], AfterValidator(validate_unique_list), Doc("A list of unique ports.")]
 
 
 class PopVlanPortBlockInactive(
@@ -74,7 +75,7 @@ class PopVlanBlockProvisioning(PopVlanBlockInactive, lifecycle=[SubscriptionLife
     """A Pop VLAN that's currently being provisioned, see :class:`PopVlanBlock`."""
 
     vlan_id: int
-    pop_vlan_description: str | None
+    pop_vlan_description: str | None = None
     lan_switch_interconnect: LanSwitchInterconnectBlockProvisioning
     ports: PortList[PopVlanPortBlockProvisioning]
     layer_preference: LayerPreference
@@ -92,7 +93,7 @@ class PopVlanBlock(PopVlanBlockProvisioning, lifecycle=[SubscriptionLifecycle.AC
     #: The LAN Switch Interconnect that this Pop VLAN is connected to.
     lan_switch_interconnect: LanSwitchInterconnectBlock
     #: The ports of the Pop VLAN.
-    ports: PortList[PopVlanPortBlock]
+    ports: PortList[PopVlanPortBlock]  # type: ignore[assignment]
     #: The level of the layer preference for the Pop VLAN (L2 or L3).
     layer_preference: LayerPreference
     #: IPv4 network for the Pop VLAN if layer preference is L3.
diff --git a/gso/products/product_blocks/router.py b/gso/products/product_blocks/router.py
index f91bf1c70507a2f7814bfe69643c70489cb0c4c2..17deeccb1ac8a5ee9bcfaa14fa25f27360881e7c 100644
--- a/gso/products/product_blocks/router.py
+++ b/gso/products/product_blocks/router.py
@@ -1,7 +1,5 @@
 """Product block for :class:`Router` products."""
 
-import ipaddress
-
 from orchestrator.domain.base import ProductBlockModel
 from orchestrator.types import SubscriptionLifecycle, strEnum
 
@@ -10,7 +8,7 @@ from gso.products.product_blocks.site import (
     SiteBlockInactive,
     SiteBlockProvisioning,
 )
-from gso.utils.shared_enums import PortNumber, Vendor
+from gso.utils.shared_enums import IPv4AddressType, IPv6AddressType, PortNumber, Vendor
 
 
 class RouterRole(strEnum):
@@ -31,8 +29,8 @@ class RouterBlockInactive(
     router_fqdn: str | None = None
     router_ts_port: PortNumber | None = None
     router_access_via_ts: bool | None = None
-    router_lo_ipv4_address: ipaddress.IPv4Address | None = None
-    router_lo_ipv6_address: ipaddress.IPv6Address | None = None
+    router_lo_ipv4_address: IPv4AddressType | None = None
+    router_lo_ipv6_address: IPv6AddressType | None = None
     router_lo_iso_address: str | None = None
     router_role: RouterRole | None = None
     router_site: SiteBlockInactive | None
@@ -45,8 +43,8 @@ class RouterBlockProvisioning(RouterBlockInactive, lifecycle=[SubscriptionLifecy
     router_fqdn: str
     router_ts_port: PortNumber
     router_access_via_ts: bool
-    router_lo_ipv4_address: ipaddress.IPv4Address
-    router_lo_ipv6_address: ipaddress.IPv6Address
+    router_lo_ipv4_address: IPv4AddressType
+    router_lo_ipv6_address: IPv6AddressType
     router_lo_iso_address: str
     router_role: RouterRole
     router_site: SiteBlockProvisioning
@@ -63,9 +61,9 @@ class RouterBlock(RouterBlockProvisioning, lifecycle=[SubscriptionLifecycle.ACTI
     #:  Whether this router should be accessed through the terminal server, or through its loopback address.
     router_access_via_ts: bool
     #:  The IPv4 loopback address of the router.
-    router_lo_ipv4_address: ipaddress.IPv4Address
+    router_lo_ipv4_address: IPv4AddressType
     #:  The IPv6 loopback address of the router.
-    router_lo_ipv6_address: ipaddress.IPv6Address
+    router_lo_ipv6_address: IPv6AddressType
     #:  The :term:`ISO` :term:`NET` of the router, used for :term:`ISIS` support.
     router_lo_iso_address: str
     #:  The role of the router, which can be any of the values defined in :class:`RouterRole`.
diff --git a/gso/products/product_blocks/site.py b/gso/products/product_blocks/site.py
index 1852b24615076b2d76dde41db71a9e5d5fcc535f..be7d086adc021992ede2048b6ef4a843c1793755 100644
--- a/gso/products/product_blocks/site.py
+++ b/gso/products/product_blocks/site.py
@@ -1,10 +1,18 @@
 """The product block that describes a site subscription."""
 
 import re
+from typing import Annotated
 
 from orchestrator.domain.base import ProductBlockModel
 from orchestrator.types import SubscriptionLifecycle, strEnum
-from pydantic import ConstrainedStr
+from pydantic import AfterValidator
+from typing_extensions import Doc
+
+MAX_LONGITUDE = 180
+MIN_LONGITUDE = -180
+
+MAX_LATITUDE = 90
+MIN_LATITUDE = -90
 
 
 class SiteTier(strEnum):
@@ -20,44 +28,54 @@ class SiteTier(strEnum):
     TIER4 = 4
 
 
-class LatitudeCoordinate(ConstrainedStr):
-    """A latitude coordinate, modeled as a constrained string.
-
-    The coordinate must match the format conforming to the latitude range of -90 to +90 degrees. It can be a
-    floating-point number or an integer.
-    Valid examples: 40.7128, -74.0060, 90, -90, 0
-    """
-
+def validate_latitude(v: str) -> str:
+    """Validate a latitude coordinate."""
+    msg = "Invalid latitude coordinate. Valid examples: '40.7128', '-74.0060', '90', '-90', '0'."
     regex = re.compile(r"^-?([1-8]?\d(\.\d+)?|90(\.0+)?)$")
+    if not regex.match(str(v)):
+        raise ValueError(msg)
 
-    @classmethod
-    def validate(cls, value: str) -> str:
-        """Validate that a latitude coordinate is valid."""
-        if not cls.regex.match(value):
-            msg = "Invalid latitude coordinate. Valid examples: '40.7128', '-74.0060', '90', '-90', '0'."
-            raise ValueError(msg)
+    float_v = float(v)
+    if float_v > MAX_LATITUDE or float_v < MIN_LATITUDE:
+        raise ValueError(msg)
 
-        return value
+    return v
 
 
-class LongitudeCoordinate(ConstrainedStr):
-    """A longitude coordinate, modeled as a constrained string.
-
-    The coordinate must match the format conforming to the longitude
-    range of -180 to +180 degrees. It can be a floating point number or an integer.
-    Valid examples: 40.7128, -74.0060, 180, -180, 0
-    """
-
+def validate_longitude(v: str) -> str:
+    """Validate a longitude coordinate."""
     regex = re.compile(r"^-?(180(\.0+)?|((1[0-7]\d)|([1-9]?\d))(\.\d+)?)$")
-
-    @classmethod
-    def validate(cls, value: str) -> str:
-        """Validate that a longitude coordinate is valid."""
-        if not cls.regex.match(value):
-            msg = "Invalid longitude coordinate. Valid examples: '40.7128', '-74.0060', '180', '-180'"
-            raise ValueError(msg)
-
-        return value
+    msg = "Invalid longitude coordinate. Valid examples: '40.7128', '-74.0060', '180', '-180', '0'."
+    if not regex.match(v):
+        raise ValueError(msg)
+
+    float_v = float(v)
+    if float_v > MAX_LONGITUDE or float_v < MIN_LONGITUDE:
+        raise ValueError(msg)
+
+    return v
+
+
+LatitudeCoordinate = Annotated[
+    str,
+    AfterValidator(validate_latitude),
+    Doc(
+        "A latitude coordinate, modeled as a string. "
+        "The coordinate must match the format conforming to the latitude range of -90 to +90 degrees. "
+        "It can be a floating-point number or an integer. Valid examples: 40.7128, -74.0060, 90, -90, 0."
+    ),
+]
+
+LongitudeCoordinate = Annotated[
+    str,
+    AfterValidator(validate_longitude),
+    Doc(
+        "A longitude coordinate, modeled as a string. "
+        "The coordinate must match the format conforming to the longitude "
+        "range of -180 to +180 degrees. It can be a floating-point number or an integer. "
+        "Valid examples: 40.7128, -74.0060, 180, -180, 0."
+    ),
+]
 
 
 class SiteBlockInactive(
diff --git a/gso/products/product_blocks/super_pop_switch.py b/gso/products/product_blocks/super_pop_switch.py
index af2f2ba74c98cc41806842d9877e8b0168ec3748..3335b28cf90ee9d55abe59be528f404d44d905b8 100644
--- a/gso/products/product_blocks/super_pop_switch.py
+++ b/gso/products/product_blocks/super_pop_switch.py
@@ -1,7 +1,5 @@
 """Product block for :class:`Super PoP Switch` products."""
 
-import ipaddress
-
 from orchestrator.domain.base import ProductBlockModel
 from orchestrator.types import SubscriptionLifecycle
 
@@ -10,7 +8,7 @@ from gso.products.product_blocks.site import (
     SiteBlockInactive,
     SiteBlockProvisioning,
 )
-from gso.utils.shared_enums import PortNumber, Vendor
+from gso.utils.shared_enums import IPv4AddressType, PortNumber, Vendor
 
 
 class SuperPopSwitchBlockInactive(
@@ -22,7 +20,7 @@ class SuperPopSwitchBlockInactive(
 
     super_pop_switch_fqdn: str | None = None
     super_pop_switch_ts_port: PortNumber | None = None
-    super_pop_switch_mgmt_ipv4_address: ipaddress.IPv4Address | None = None
+    super_pop_switch_mgmt_ipv4_address: IPv4AddressType | None = None
     super_pop_switch_site: SiteBlockInactive | None
     vendor: Vendor | None = None
 
@@ -32,7 +30,7 @@ class SuperPopSwitchBlockProvisioning(SuperPopSwitchBlockInactive, lifecycle=[Su
 
     super_pop_switch_fqdn: str | None = None
     super_pop_switch_ts_port: PortNumber | None = None
-    super_pop_switch_mgmt_ipv4_address: ipaddress.IPv4Address | None = None
+    super_pop_switch_mgmt_ipv4_address: IPv4AddressType | None = None
     super_pop_switch_site: SiteBlockProvisioning | None
     vendor: Vendor | None = None
 
@@ -45,7 +43,7 @@ class SuperPopSwitchBlock(SuperPopSwitchBlockProvisioning, lifecycle=[Subscripti
     #:  The port of the terminal server that this Super PoP switch is connected to. Used to offer out of band access.
     super_pop_switch_ts_port: PortNumber
     #:  The IPv4 management address of the Super PoP switch.
-    super_pop_switch_mgmt_ipv4_address: ipaddress.IPv4Address
+    super_pop_switch_mgmt_ipv4_address: IPv4AddressType
     #:  The :class:`Site` that this Super PoP switch resides in. Both physically and computationally.
     super_pop_switch_site: SiteBlock
     #:  The vendor of a Super PoP switch. Defaults to Juniper.
diff --git a/gso/products/product_blocks/switch.py b/gso/products/product_blocks/switch.py
index 660cfd81320fc0739713a649a780b20265871d11..f0aa0414e11409b58341fc2648f3066fa91c5aee 100644
--- a/gso/products/product_blocks/switch.py
+++ b/gso/products/product_blocks/switch.py
@@ -27,8 +27,8 @@ class SwitchBlockInactive(
 
     switch_hostname: str | None = None
     switch_ts_port: PortNumber | None = None
-    switch_site: SiteBlockInactive | None
-    switch_vendor: Vendor | None
+    switch_site: SiteBlockInactive | None = None
+    switch_vendor: Vendor | None = None
     switch_model: SwitchModel | None = None
 
 
diff --git a/gso/products/product_types/iptrunk.py b/gso/products/product_types/iptrunk.py
index 70612d49216fa6f00fb47835bbbc1b772b818dc1..9d71188ea260077f2eeec344d8c4a3ac75d02155 100644
--- a/gso/products/product_types/iptrunk.py
+++ b/gso/products/product_types/iptrunk.py
@@ -26,3 +26,17 @@ class Iptrunk(IptrunkProvisioning, lifecycle=[SubscriptionLifecycle.ACTIVE]):
     """An IP trunk that is active."""
 
     iptrunk: IptrunkBlock
+
+
+class ImportedIptrunkInactive(SubscriptionModel, is_base=True):
+    """An imported IP trunk that is inactive."""
+
+    iptrunk: IptrunkBlockInactive
+
+
+class ImportedIptrunk(
+    ImportedIptrunkInactive, lifecycle=[SubscriptionLifecycle.PROVISIONING, SubscriptionLifecycle.ACTIVE]
+):
+    """An imported IP trunk that is active."""
+
+    iptrunk: IptrunkBlock
diff --git a/gso/products/product_types/office_router.py b/gso/products/product_types/office_router.py
index 6fff33e041edc35e1f333f66a01ac3d6f78fc286..cfb98cdbf1cc5e62468811bffdbdac317ff33fe8 100644
--- a/gso/products/product_types/office_router.py
+++ b/gso/products/product_types/office_router.py
@@ -26,3 +26,17 @@ class OfficeRouter(OfficeRouterProvisioning, lifecycle=[SubscriptionLifecycle.AC
     """An office router that is currently active."""
 
     office_router: OfficeRouterBlock
+
+
+class ImportedOfficeRouterInactive(SubscriptionModel, is_base=True):
+    """An imported, inactive office router."""
+
+    office_router: OfficeRouterBlockInactive
+
+
+class ImportedOfficeRouter(
+    ImportedOfficeRouterInactive, lifecycle=[SubscriptionLifecycle.PROVISIONING, SubscriptionLifecycle.ACTIVE]
+):
+    """An imported office router that is currently active."""
+
+    office_router: OfficeRouterBlock
diff --git a/gso/products/product_types/opengear.py b/gso/products/product_types/opengear.py
new file mode 100644
index 0000000000000000000000000000000000000000..a752f07e958748584b4642f62776b8e88ab1fa54
--- /dev/null
+++ b/gso/products/product_types/opengear.py
@@ -0,0 +1,24 @@
+"""An Opengear product type."""
+
+from orchestrator.domain.base import SubscriptionModel
+from orchestrator.types import SubscriptionLifecycle
+
+from gso.products.product_blocks.opengear import OpengearBlock, OpengearBlockInactive, OpengearBlockProvisioning
+
+
+class OpengearInactive(SubscriptionModel, is_base=True):
+    """An inactive Opengear."""
+
+    opengear: OpengearBlockInactive
+
+
+class OpengearProvisioning(OpengearInactive, lifecycle=[SubscriptionLifecycle.PROVISIONING]):
+    """An Opengear that is being provisioned."""
+
+    opengear: OpengearBlockProvisioning
+
+
+class Opengear(OpengearProvisioning, lifecycle=[SubscriptionLifecycle.ACTIVE]):
+    """An Opengear that is currently active."""
+
+    opengear: OpengearBlock
diff --git a/gso/products/product_types/router.py b/gso/products/product_types/router.py
index d6a59c12ccd14dea0dac8852a7748810359718b7..973abc90a692b78c349c7a7387343596d2acce12 100644
--- a/gso/products/product_types/router.py
+++ b/gso/products/product_types/router.py
@@ -26,3 +26,17 @@ class Router(RouterProvisioning, lifecycle=[SubscriptionLifecycle.ACTIVE]):
     """A router that is currently active."""
 
     router: RouterBlock
+
+
+class ImportedRouterInactive(SubscriptionModel, is_base=True):
+    """An imported, inactive router."""
+
+    router: RouterBlockInactive
+
+
+class ImportedRouter(
+    ImportedRouterInactive, lifecycle=[SubscriptionLifecycle.PROVISIONING, SubscriptionLifecycle.ACTIVE]
+):
+    """An imported router that is currently active."""
+
+    router: RouterBlock
diff --git a/gso/products/product_types/site.py b/gso/products/product_types/site.py
index ec09962e33521d82bd5c05b6fa3bccd6dbb6e8bf..be5c90b0ccc3972f58786ba009267de401f4c9bc 100644
--- a/gso/products/product_types/site.py
+++ b/gso/products/product_types/site.py
@@ -26,3 +26,15 @@ class Site(SiteProvisioning, lifecycle=[SubscriptionLifecycle.ACTIVE]):
     """A site that is currently active."""
 
     site: SiteBlock
+
+
+class ImportedSiteInactive(SubscriptionModel, is_base=True):
+    """An imported site that is inactive."""
+
+    site: SiteBlockInactive
+
+
+class ImportedSite(ImportedSiteInactive, lifecycle=[SubscriptionLifecycle.PROVISIONING, SubscriptionLifecycle.ACTIVE]):
+    """An imported site that is currently active."""
+
+    site: SiteBlock
diff --git a/gso/products/product_types/super_pop_switch.py b/gso/products/product_types/super_pop_switch.py
index e12b1a3d12793d372c47b5022a85307a7bf730ce..298627ae5c7d6949981fffb406685e19eb8b6f2e 100644
--- a/gso/products/product_types/super_pop_switch.py
+++ b/gso/products/product_types/super_pop_switch.py
@@ -26,3 +26,17 @@ class SuperPopSwitch(SuperPopSwitchProvisioning, lifecycle=[SubscriptionLifecycl
     """A Super PoP switch that is currently active."""
 
     super_pop_switch: SuperPopSwitchBlock
+
+
+class ImportedSuperPopSwitchInactive(SubscriptionModel, is_base=True):
+    """An imported, inactive Super PoP switch."""
+
+    super_pop_switch: SuperPopSwitchBlockInactive
+
+
+class ImportedSuperPopSwitch(
+    ImportedSuperPopSwitchInactive, lifecycle=[SubscriptionLifecycle.PROVISIONING, SubscriptionLifecycle.ACTIVE]
+):
+    """An imported Super PoP switch that is currently active."""
+
+    super_pop_switch: SuperPopSwitchBlock
diff --git a/gso/schedules/scheduling.py b/gso/schedules/scheduling.py
index 8525956cb7933facebd090d8c34938f66640bb56..b0e22a0916cb730c1058f661688111dd49d26737 100644
--- a/gso/schedules/scheduling.py
+++ b/gso/schedules/scheduling.py
@@ -7,16 +7,21 @@ from typing import Any
 
 from celery import current_app
 from celery.schedules import crontab
+from pydantic import BaseModel
 
 
-def scheduler(
-    name: str,
-    minute: str = "*",
-    hour: str = "*",
-    day_of_week: str = "*",
-    day_of_month: str = "*",
-    month_of_year: str = "*",
-) -> Callable[[Callable], Callable]:
+class CronScheduleConfig(BaseModel):
+    """Configuration for scheduling a task using crontab-like timing parameters."""
+
+    name: str
+    minute: str = "*"
+    hour: str = "*"
+    day_of_week: str = "*"
+    day_of_month: str = "*"
+    month_of_year: str = "*"
+
+
+def scheduler(cron_scheduler_config: CronScheduleConfig) -> Callable[[Callable], Callable]:
     """Schedule a Celery task using crontab-like timing.
 
     Examples
@@ -29,6 +34,7 @@ def scheduler(
 
     All time units can be specified with lists of numbers or crontab pattern strings for advanced scheduling.
     All specified time parts (minute, hour, day, etc.) must align for a task to run.
+
     """
 
     def decorator(task_func: Callable) -> Callable:
@@ -43,14 +49,14 @@ def scheduler(
 
         task_path = f"{module.__name__}.{task_func.__name__}"
         current_app.conf.beat_schedule[task_func.__name__] = {
-            "name": name,
+            "name": cron_scheduler_config.name,
             "task": task_path,
             "schedule": crontab(
-                minute=minute,
-                hour=hour,
-                day_of_month=day_of_month,
-                month_of_year=month_of_year,
-                day_of_week=day_of_week,
+                minute=cron_scheduler_config.minute,
+                hour=cron_scheduler_config.hour,
+                day_of_month=cron_scheduler_config.day_of_month,
+                month_of_year=cron_scheduler_config.month_of_year,
+                day_of_week=cron_scheduler_config.day_of_week,
             ),
         }
 
diff --git a/gso/schedules/task_vacuum.py b/gso/schedules/task_vacuum.py
index 98e05343dff0c701dd6e48f229d1e553e4149358..be04380f8caaf53152f77ca129ba74b9d63bd164 100644
--- a/gso/schedules/task_vacuum.py
+++ b/gso/schedules/task_vacuum.py
@@ -2,12 +2,12 @@
 
 from orchestrator.services.processes import start_process
 
-from gso.schedules.scheduling import scheduler
+from gso.schedules.scheduling import CronScheduleConfig, scheduler
 from gso.worker import celery
 
 
 @celery.task
-@scheduler(name="Clean up tasks", hour="*/6")
+@scheduler(CronScheduleConfig(name="Clean up tasks", hour="*/6"))
 def vacuum_tasks() -> None:
     """Run all cleanup tasks every 6 hours."""
     start_process("task_clean_up_tasks")
diff --git a/gso/schedules/validate_products.py b/gso/schedules/validate_products.py
index cb9ecc675264835dd4f13dc0069be61f3472f742..580c7540db85e3565fd62c6a183f880490e9f593 100644
--- a/gso/schedules/validate_products.py
+++ b/gso/schedules/validate_products.py
@@ -2,13 +2,13 @@
 
 from orchestrator.services.processes import start_process
 
-from gso.schedules.scheduling import scheduler
+from gso.schedules.scheduling import CronScheduleConfig, scheduler
 from gso.services.subscriptions import count_incomplete_validate_products
 from gso.worker import celery
 
 
 @celery.task
-@scheduler(name="Validate Products and inactive subscriptions", minute="30", hour="2")
+@scheduler(CronScheduleConfig(name="Validate Products and inactive subscriptions", minute="30", hour="2"))
 def validate_products() -> None:
     """Validate all products."""
     if count_incomplete_validate_products() > 0:
diff --git a/gso/schedules/validate_subscriptions.py b/gso/schedules/validate_subscriptions.py
index 7c4404356a656291c169cb665ac1a12e56ce0e38..9e79ec91cabda776762147bd0b86226fe8256879 100644
--- a/gso/schedules/validate_subscriptions.py
+++ b/gso/schedules/validate_subscriptions.py
@@ -5,7 +5,7 @@ from orchestrator.services.processes import get_execution_context
 from orchestrator.services.subscriptions import TARGET_DEFAULT_USABLE_MAP, WF_USABLE_MAP
 from orchestrator.targets import Target
 
-from gso.schedules.scheduling import scheduler
+from gso.schedules.scheduling import CronScheduleConfig, scheduler
 from gso.services.subscriptions import get_insync_subscriptions
 from gso.worker import celery
 
@@ -13,7 +13,7 @@ logger = structlog.get_logger(__name__)
 
 
 @celery.task
-@scheduler(name="Subscriptions Validator", minute="10", hour="0")
+@scheduler(CronScheduleConfig(name="Subscriptions Validator", minute="10", hour="0"))
 def validate_subscriptions() -> None:
     """Validate all subscriptions using their corresponding validation workflow."""
     subscriptions = get_insync_subscriptions()
diff --git a/gso/schema/partner.py b/gso/schema/partner.py
index 890adcb9b20b08f6c244e8986ad20eaf4def83fc..b1c58c2cf91bf544501f6b2e316117b8b83a70c9 100644
--- a/gso/schema/partner.py
+++ b/gso/schema/partner.py
@@ -3,7 +3,7 @@
 from datetime import datetime
 from uuid import uuid4
 
-from pydantic import BaseModel, EmailStr, Field
+from pydantic import BaseModel, ConfigDict, EmailStr, Field
 
 from gso.db.models import PartnerType
 
@@ -14,7 +14,7 @@ class PartnerCreate(BaseModel):
     partner_id: str = Field(default_factory=lambda: str(uuid4()))
     name: str
     email: EmailStr | None = None
-    as_number: str | None = Field(None, unique=True)
+    as_number: str | None = None
     as_set: str | None = None
     route_set: str | None = None
     black_listed_as_sets: list[str] | None = None
@@ -23,8 +23,4 @@ class PartnerCreate(BaseModel):
     partner_type: PartnerType
     created_at: datetime = Field(default_factory=lambda: datetime.now().astimezone())
     updated_at: datetime = Field(default_factory=lambda: datetime.now().astimezone())
-
-    class Config:
-        """Pydantic model configuration."""
-
-        orm_mode = True
+    model_config = ConfigDict(from_attributes=True)
diff --git a/gso/services/infoblox.py b/gso/services/infoblox.py
index 06ee5719a48010c2cb175200abc3b8276312303d..c414aae59d89b8347a53a75aa0f0d4310b6da798 100644
--- a/gso/services/infoblox.py
+++ b/gso/services/infoblox.py
@@ -10,6 +10,7 @@ from infoblox_client.exceptions import (
 )
 
 from gso.settings import IPAMParams, load_oss_params
+from gso.utils.shared_enums import IPv4AddressType, IPv6AddressType
 
 logger = getLogger(__name__)
 NULL_MAC = "00:00:00:00:00:00"
@@ -36,7 +37,7 @@ def _setup_connection() -> tuple[connector.Connector, IPAMParams]:
     return connector.Connector(options), oss
 
 
-def _allocate_network(
+def _allocate_network(  # noqa: PLR0917
     conn: connector.Connector,
     dns_view: str,
     network_view: str,
@@ -233,8 +234,8 @@ def allocate_host(
 
 def create_host_by_ip(
     hostname: str,
-    ipv4_address: ipaddress.IPv4Address,
-    ipv6_address: ipaddress.IPv6Address,
+    ipv4_address: IPv4AddressType,
+    ipv6_address: IPv6AddressType,
     service_type: str,
     comment: str,
 ) -> None:
@@ -268,11 +269,11 @@ def create_host_by_ip(
     new_host.update()
 
 
-def find_host_by_ip(ip_addr: ipaddress.IPv4Address | ipaddress.IPv6Address) -> objects.HostRecord | None:
+def find_host_by_ip(ip_addr: IPv4AddressType | ipaddress.IPv6Address) -> objects.HostRecord | None:
     """Find a host record in Infoblox by its associated IP address.
 
     :param ip_addr: The IP address of a host that is searched for.
-    :type ip_addr: ipaddress.IPv4Address | ipaddress.IPv6Address
+    :type ip_addr: IPv4AddressType | ipaddress.IPv6Address
     """
     conn, _ = _setup_connection()
     if ip_addr.version == 4:  # noqa: PLR2004, the 4 in IPv4 is well-known and not a "magic value."
@@ -314,14 +315,14 @@ def find_v6_host_by_fqdn(fqdn: str) -> objects.HostRecordV6:
     )
 
 
-def delete_host_by_ip(ip_addr: ipaddress.IPv4Address | ipaddress.IPv6Address) -> None:
+def delete_host_by_ip(ip_addr: IPv4AddressType | ipaddress.IPv6Address) -> None:
     """Delete a host from Infoblox.
 
     Delete a host record in Infoblox, by providing the IP address that is associated with the record. Raises a
     :class:`DeletionError` if no record can be found in Infoblox.
 
     :param ip_addr: The IP address of the host record that should get deleted.
-    :type ip_addr: ipaddress.IPv4Address | ipaddress.IPv6Address
+    :type ip_addr: IPv4AddressType | ipaddress.IPv6Address
     """
     host = find_host_by_ip(ip_addr)
     if host:
diff --git a/gso/services/lso_client.py b/gso/services/lso_client.py
index 67e4b77cce1e4c03f1ba200b6633079cb2476437..92898940edfdb7a2981156a1fd5e0f5c5a44f786 100644
--- a/gso/services/lso_client.py
+++ b/gso/services/lso_client.py
@@ -13,9 +13,10 @@ from orchestrator.config.assignee import Assignee
 from orchestrator.types import State
 from orchestrator.utils.errors import ProcessFailureError
 from orchestrator.workflow import Step, StepList, begin, callback_step, inputstep
-from pydantic_forms.core import FormPage, ReadOnlyField
+from pydantic import ConfigDict
+from pydantic_forms.core import FormPage
 from pydantic_forms.types import FormGenerator
-from pydantic_forms.validators import Label, LongText
+from pydantic_forms.validators import Label, LongText, ReadOnlyField
 
 from gso import settings
 
@@ -125,13 +126,12 @@ def _show_results(state: State) -> FormGenerator:
         return state
 
     class ConfirmRunPage(FormPage):
-        class Config:
-            title: str = state["lso_result_title"]
+        model_config = ConfigDict()
 
         if "lso_result_extra_label" in state:
             extra_label: Label = state["lso_result_extra_label"]
-        run_status: str = ReadOnlyField(state["callback_result"]["status"])
-        run_results: LongText = ReadOnlyField(json.dumps(state["callback_result"], indent=4))
+        run_status: ReadOnlyField(state["callback_result"]["status"], default_type=str)  # type: ignore[valid-type]
+        run_results: ReadOnlyField(json.dumps(state["callback_result"], indent=4), default_type=LongText)  # type: ignore[valid-type]
 
     yield ConfirmRunPage
     [state.pop(key, None) for key in ["run_results", "lso_result_title", "lso_result_extra_label"]]
diff --git a/gso/services/sharepoint.py b/gso/services/sharepoint.py
new file mode 100644
index 0000000000000000000000000000000000000000..d0efff0b05a75799ae05c23e0b25cdab0e5db0a4
--- /dev/null
+++ b/gso/services/sharepoint.py
@@ -0,0 +1,79 @@
+"""Sharepoint service used for creating new list items."""
+
+import asyncio
+
+from azure.identity.aio import CertificateCredential
+from msgraph import GraphServiceClient
+from msgraph.generated.models.field_value_set import FieldValueSet
+from msgraph.generated.models.list_item import ListItem
+from msgraph.generated.models.list_item_collection_response import ListItemCollectionResponse
+from msgraph.generated.models.site import Site
+from msgraph.generated.sites.item.lists.item.items.items_request_builder import ItemsRequestBuilder
+
+from gso.settings import load_oss_params
+
+
+class SharePointClient:
+    """A client for interaction with SharePoint lists."""
+
+    def __init__(self) -> None:
+        """Initialise a new SharePoint client."""
+        sp_params = load_oss_params().SHAREPOINT
+        _credentials = CertificateCredential(
+            tenant_id=sp_params.tenant_id,
+            client_id=sp_params.client_id,
+            certificate_path=sp_params.certificate_path,
+            password=sp_params.certificate_password,
+        )
+        self.client = GraphServiceClient(_credentials, sp_params.scopes)
+        self.site_id = sp_params.site_id
+        self.list_ids = sp_params.list_ids
+
+    def get_site(self) -> Site | None:
+        """Get the SharePoint site that this orchestrator connects to."""
+
+        async def _get_site() -> Site | None:
+            return await self.client.sites.by_site_id(self.site_id).get()
+
+        return asyncio.run(_get_site())
+
+    def get_list_items(self, list_name: str) -> ListItemCollectionResponse | None:
+        """Get list items from a given list in SharePoint.
+
+        :param str list_name: The name of the list.
+        """
+
+        async def _get_list_items() -> ListItemCollectionResponse | None:
+            query_params = ItemsRequestBuilder.ItemsRequestBuilderGetQueryParameters(expand=["fields"])
+            request_configuration = ItemsRequestBuilder.ItemsRequestBuilderGetRequestConfiguration(
+                query_parameters=query_params
+            )
+            return (
+                await self.client.sites.by_site_id(self.site_id)
+                .lists.by_list_id(self.list_ids[list_name])
+                .items.get(request_configuration=request_configuration)
+            )
+
+        return asyncio.run(_get_list_items())
+
+    def add_list_item(self, list_name: str, fields: dict[str, str]) -> str:
+        """Add a new entry to a SharePoint list.
+
+        :param str list_name: The name of the list.
+        :param dict[str, str] fields: Any pre-filled fields in the list item. Can be left empty.
+
+        :return str: The URL of the list in which a new item has been created.
+        """
+
+        async def _new_list_item() -> str:
+            request_body = ListItem(fields=FieldValueSet(additional_data=fields))
+            new_item = (
+                await self.client.sites.by_site_id(self.site_id)
+                .lists.by_list_id(self.list_ids[list_name])
+                .items.post(request_body)
+            )
+
+            #  Strip the last part of the URL, since we want the link to the list, not the list item.
+            return new_item.web_url.rsplit("/", 1)[0]
+
+        return asyncio.run(_new_list_item())
diff --git a/gso/settings.py b/gso/settings.py
index ced74ba5c01f59555976396dc43ad268bc786c0b..21f517bc5323003558bb0ef30c042b25b61386ca 100644
--- a/gso/settings.py
+++ b/gso/settings.py
@@ -9,8 +9,12 @@ import json
 import logging
 import os
 from pathlib import Path
+from typing import Annotated
 
-from pydantic import BaseSettings, NonNegativeInt
+from pydantic import Field
+from pydantic_forms.types import UUIDstr
+from pydantic_settings import BaseSettings
+from typing_extensions import Doc
 
 logger = logging.getLogger(__name__)
 
@@ -44,16 +48,8 @@ class InfoBloxParams(BaseSettings):
     password: str
 
 
-class V4Netmask(NonNegativeInt):
-    """A valid netmask for an IPv4 network or address."""
-
-    le = 32
-
-
-class V6Netmask(NonNegativeInt):
-    """A valid netmask for an IPv6 network or address."""
-
-    le = 128
+V4Netmask = Annotated[int, Field(ge=0, le=32), Doc("A valid netmask for an IPv4 network or address.")]
+V6Netmask = Annotated[int, Field(ge=0, le=128), Doc("A valid netmask for an IPv6 network or address.")]
 
 
 class V4NetworkParams(BaseSettings):
@@ -126,7 +122,7 @@ class SNMPParams(BaseSettings):
     v2c: MonitoringSNMPV2Params
     #: .. versionadded :: 2.0
     #:    Support for :term:`SNMP` v3 will get added in a later version of :term:`GSO`. Parameters are optional for now.
-    v3: MonitoringSNMPV3Params | None
+    v3: MonitoringSNMPV3Params | None = None
 
 
 class MonitoringParams(BaseSettings):
@@ -141,9 +137,6 @@ class ProvisioningProxyParams(BaseSettings):
 
     scheme: str
     api_base: str
-    #:  .. deprecated:: 0.1
-    #:     Not used anymore, may be left out from config file.
-    auth: str | None
     api_version: int
 
 
@@ -162,15 +155,20 @@ class EmailParams(BaseSettings):
     smtp_host: str
     smtp_port: int
     starttls_enabled: bool
-    smtp_username: str | None
-    smtp_password: str | None
+    smtp_username: str | None = None
+    smtp_password: str | None = None
 
 
 class SharepointParams(BaseSettings):
     """Settings for different Sharepoint sites."""
 
-    # TODO: Stricter typing after Pydantic 2.x upgrade
-    checklist_site_url: str
+    client_id: UUIDstr
+    tenant_id: UUIDstr
+    certificate_path: str
+    certificate_password: str
+    site_id: UUIDstr
+    list_ids: dict[str, UUIDstr]
+    scopes: list[str]
 
 
 class OSSParams(BaseSettings):
diff --git a/gso/translations/en-GB.json b/gso/translations/en-GB.json
index 63b31e76b5ea23947a59fff034c4367f50e1f83d..9fdf1dfafa8ff3304e5e9cf6b0e36669f8412446 100644
--- a/gso/translations/en-GB.json
+++ b/gso/translations/en-GB.json
@@ -45,6 +45,16 @@
         "modify_site": "Modify site",
         "modify_trunk_interface": "Modify IP Trunk interface",
         "redeploy_base_config": "Redeploy base config",
-        "update_ibgp_mesh": "Update iBGP mesh"
+        "update_ibgp_mesh": "Update iBGP mesh",
+        "create_imported_site": "NOT FOR HUMANS -- Import existing site",
+        "create_imported_router": "NOT FOR HUMANS -- Import existing router",
+        "create_imported_iptrunk": "NOT FOR HUMANS -- Import existing IP trunk",
+        "create_imported_super_pop_switch": "NOT FOR HUMANS -- Import existing super PoP switch",
+        "create_imported_office_router": "NOT FOR HUMANS -- Import existing office router",
+        "import_site": "NOT FOR HUMANS -- Finalize import into a Site product",
+        "import_router": "NOT FOR HUMANS -- Finalize import into a Router product",
+        "import_iptrunk": "NOT FOR HUMANS -- Finalize import into an IP trunk product",
+        "import_office_router": "NOT FOR HUMANS -- Finalize import into an Office router product",
+        "import_super_pop_switch": "NOT FOR HUMANS -- Finalize import into a Super PoP switch"
     }
 }
diff --git a/gso/utils/helpers.py b/gso/utils/helpers.py
index 0241e836ef8c9db3bcc750b357e03c65d291914d..28269a3da83dc0f745ef31a46361e9c6952e7d2f 100644
--- a/gso/utils/helpers.py
+++ b/gso/utils/helpers.py
@@ -3,29 +3,28 @@
 import ipaddress
 import re
 from enum import StrEnum
-from ipaddress import IPv4Address
 from uuid import UUID
 
 import pycountry
 from orchestrator.types import UUIDstr
-from pydantic import BaseModel, validator
-from pydantic.fields import ModelField
+from pydantic import BaseModel, field_validator
+from pydantic_core.core_schema import ValidationInfo
 from pydantic_forms.validators import Choice
 
 from gso import settings
 from gso.products.product_blocks.iptrunk import IptrunkInterfaceBlock
-from gso.products.product_blocks.site import SiteTier
+from gso.products.product_blocks.site import LatitudeCoordinate, LongitudeCoordinate, SiteTier
 from gso.products.product_types.router import Router
 from gso.services.netbox_client import NetboxClient
 from gso.services.subscriptions import get_active_subscriptions_by_field_and_value
-from gso.utils.shared_enums import Vendor
+from gso.utils.shared_enums import IPv4AddressType, Vendor
 
 
 class LAGMember(BaseModel):
     """A :term:`LAG` member interface that consists of a name and description."""
 
     interface_name: str
-    interface_description: str | None
+    interface_description: str | None = None
 
     def __hash__(self) -> int:
         """Calculate the hash based on the interface name and description, so that uniqueness can be determined."""
@@ -107,7 +106,7 @@ def get_router_vendor(router_id: UUID) -> Vendor:
     return Router.from_subscription(router_id).router.vendor
 
 
-def iso_from_ipv4(ipv4_address: IPv4Address) -> str:
+def iso_from_ipv4(ipv4_address: IPv4AddressType) -> str:
     """Calculate an :term:`ISO` address, based on an IPv4 address.
 
     :param IPv4Address ipv4_address: The address that's to be converted
@@ -157,12 +156,11 @@ def validate_iptrunk_unique_interface(interfaces: list[LAGMember]) -> list[LAGMe
     return interfaces
 
 
-def validate_site_fields_is_unique(field_name: str, value: str | int) -> str | int:
+def validate_site_fields_is_unique(field_name: str, value: str | int) -> None:
     """Validate that a site field is unique."""
     if len(get_active_subscriptions_by_field_and_value(field_name, str(value))) > 0:
         msg = f"{field_name} must be unique"
         raise ValueError(msg)
-    return value
 
 
 def validate_ipv4_or_ipv6(value: str) -> str:
@@ -188,7 +186,7 @@ def validate_country_code(country_code: str) -> str:
     return country_code
 
 
-def validate_site_name(site_name: str) -> str:
+def validate_site_name(site_name: str) -> None:
     """Validate the site name.
 
     The site name must consist of three uppercase letters, optionally followed by a single digit.
@@ -200,7 +198,6 @@ def validate_site_name(site_name: str) -> str:
             f"digit (0-9). Received: {site_name}"
         )
         raise ValueError(msg)
-    return site_name
 
 
 class BaseSiteValidatorModel(BaseModel):
@@ -210,32 +207,38 @@ class BaseSiteValidatorModel(BaseModel):
     site_internal_id: int
     site_tier: SiteTier
     site_ts_address: str
-
-    @validator("site_ts_address", check_fields=False, allow_reuse=True)
+    site_country_code: str
+    site_name: str
+    site_city: str
+    site_country: str
+    site_latitude: LatitudeCoordinate
+    site_longitude: LongitudeCoordinate
+    partner: str
+
+    @field_validator("site_ts_address")
     def validate_ts_address(cls, site_ts_address: str) -> str:
         """Validate that a terminal server address is valid."""
         validate_ipv4_or_ipv6(site_ts_address)
         return site_ts_address
 
-    @validator("site_country_code", check_fields=False, allow_reuse=True)
+    @field_validator("site_country_code")
     def country_code_must_exist(cls, country_code: str) -> str:
         """Validate that the country code exists."""
         validate_country_code(country_code)
         return country_code
 
-    @validator(
-        "site_ts_address",
-        "site_internal_id",
-        "site_bgp_community_id",
-        "site_name",
-        check_fields=False,
-        allow_reuse=True,
-    )
-    def validate_unique_fields(cls, value: str, field: ModelField) -> str | int:
-        """Validate that the internal and :term:`BGP` community IDs are unique."""
-        return validate_site_fields_is_unique(field.name, value)
+    @field_validator("site_ts_address", "site_internal_id", "site_bgp_community_id", "site_name")
+    def field_must_be_unique(cls, value: str | int, info: ValidationInfo) -> str | int:
+        """Validate that a field is unique."""
+        if not info.field_name:
+            msg = "Field name must be provided."
+            raise ValueError(msg)
+
+        validate_site_fields_is_unique(info.field_name, value)
+
+        return value
 
-    @validator("site_name", check_fields=False, allow_reuse=True)
+    @field_validator("site_name")
     def site_name_must_be_valid(cls, site_name: str) -> str:
         """Validate the site name.
 
diff --git a/gso/utils/shared_enums.py b/gso/utils/shared_enums.py
index c0116e1690d6384cabd9ce16cf1ee79201a0d6b8..94538f78463821bf266bdd15d379ec61b0371d4b 100644
--- a/gso/utils/shared_enums.py
+++ b/gso/utils/shared_enums.py
@@ -1,7 +1,16 @@
 """Shared choices for the different models."""
 
-from pydantic import ConstrainedInt
+import ipaddress
+from typing import Annotated, Any
+
+from pydantic import Field, PlainSerializer
 from pydantic_forms.types import strEnum
+from typing_extensions import Doc
+
+
+def convert_to_str(value: Any) -> str:
+    """Convert the value to a string."""
+    return str(value)
 
 
 class Vendor(strEnum):
@@ -11,14 +20,22 @@ class Vendor(strEnum):
     NOKIA = "nokia"
 
 
-class PortNumber(ConstrainedInt):
-    """Constrained integer for valid port numbers.
+PortNumber = Annotated[
+    int,
+    Field(
+        gt=0,
+        le=49151,
+    ),
+    Doc(
+        "Constrained integer for valid port numbers. The range from 49152 to 65535 is marked as ephemeral, "
+        "and can therefore not be selected for permanent allocation."
+    ),
+]
+
 
-    The range from 49152 to 65535 is marked as ephemeral, and can therefore not be selected for permanent allocation.
-    """
+IPv4AddressType = Annotated[ipaddress.IPv4Address, PlainSerializer(convert_to_str, return_type=str, when_used="always")]
 
-    gt = 0
-    le = 49151
+IPv6AddressType = Annotated[ipaddress.IPv6Address, PlainSerializer(convert_to_str, return_type=str, when_used="always")]
 
 
 class ConnectionStrategy(strEnum):
diff --git a/gso/utils/workflow_steps.py b/gso/utils/workflow_steps.py
index 7b3b5cc91fe5899b1c8580501d85da3a638459d9..6ab00644bef668cecdf1c159e9502ec4879ff8a7 100644
--- a/gso/utils/workflow_steps.py
+++ b/gso/utils/workflow_steps.py
@@ -3,9 +3,14 @@
 import json
 from typing import Any
 
-from orchestrator import step
+from orchestrator import inputstep, step
+from orchestrator.config.assignee import Assignee
 from orchestrator.types import State, UUIDstr
 from orchestrator.utils.json import json_dumps
+from pydantic import ConfigDict
+from pydantic_forms.core import FormPage
+from pydantic_forms.types import FormGenerator
+from pydantic_forms.validators import Label
 
 from gso.products.product_types.iptrunk import Iptrunk
 from gso.services.lso_client import execute_playbook
@@ -101,3 +106,18 @@ def run_checks_after_base_config(subscription: dict[str, Any], callback_route: s
         inventory=subscription["router"]["router_fqdn"],
         extra_vars={"wfo_router_json": subscription},
     )
+
+
+@inputstep("Prompt for new SharePoint checklist", assignee=Assignee.SYSTEM)
+def prompt_sharepoint_checklist_url(checklist_url: str) -> FormGenerator:
+    """Prompt the operator with the checklist in SharePoint for approving a new subscription."""
+
+    class SharepointPrompt(FormPage):
+        model_config = ConfigDict(title="Complete new checklist")
+
+        info_label_1: Label = f"A new checklist has been created at: {checklist_url}"
+        info_label_2: Label = "Click proceed to finish the workflow."
+
+    yield SharepointPrompt
+
+    return {}
diff --git a/gso/workflows/__init__.py b/gso/workflows/__init__.py
index 1e89ec894bf9b3029484db458b6ea19e592c6c55..e10c8b7f415f22cfdc0348ea42b445ba198f3f35 100644
--- a/gso/workflows/__init__.py
+++ b/gso/workflows/__init__.py
@@ -10,20 +10,19 @@ ALL_ALIVE_STATES: list[str] = [
     SubscriptionLifecycle.ACTIVE,
 ]
 
-WF_USABLE_MAP.update(
-    {
-        "redeploy_base_config": [SubscriptionLifecycle.PROVISIONING, SubscriptionLifecycle.ACTIVE],
-        "update_ibgp_mesh": [SubscriptionLifecycle.PROVISIONING, SubscriptionLifecycle.ACTIVE],
-        "activate_router": [SubscriptionLifecycle.PROVISIONING],
-        "deploy_twamp": [SubscriptionLifecycle.PROVISIONING, SubscriptionLifecycle.ACTIVE],
-        "modify_trunk_interface": [SubscriptionLifecycle.PROVISIONING, SubscriptionLifecycle.ACTIVE],
-        "activate_iptrunk": [SubscriptionLifecycle.PROVISIONING],
-        "terminate_site": ALL_ALIVE_STATES,
-        "terminate_router": ALL_ALIVE_STATES,
-        "terminate_iptrunk": ALL_ALIVE_STATES,
-    }
-)
+WF_USABLE_MAP.update({
+    "redeploy_base_config": [SubscriptionLifecycle.PROVISIONING, SubscriptionLifecycle.ACTIVE],
+    "update_ibgp_mesh": [SubscriptionLifecycle.PROVISIONING, SubscriptionLifecycle.ACTIVE],
+    "activate_router": [SubscriptionLifecycle.PROVISIONING],
+    "deploy_twamp": [SubscriptionLifecycle.PROVISIONING, SubscriptionLifecycle.ACTIVE],
+    "modify_trunk_interface": [SubscriptionLifecycle.PROVISIONING, SubscriptionLifecycle.ACTIVE],
+    "activate_iptrunk": [SubscriptionLifecycle.PROVISIONING],
+    "terminate_site": ALL_ALIVE_STATES,
+    "terminate_router": ALL_ALIVE_STATES,
+    "terminate_iptrunk": ALL_ALIVE_STATES,
+})
 
+#  IP trunk workflows
 LazyWorkflowInstance("gso.workflows.iptrunk.activate_iptrunk", "activate_iptrunk")
 LazyWorkflowInstance("gso.workflows.iptrunk.create_iptrunk", "create_iptrunk")
 LazyWorkflowInstance("gso.workflows.iptrunk.deploy_twamp", "deploy_twamp")
@@ -31,17 +30,32 @@ LazyWorkflowInstance("gso.workflows.iptrunk.modify_isis_metric", "modify_isis_me
 LazyWorkflowInstance("gso.workflows.iptrunk.modify_trunk_interface", "modify_trunk_interface")
 LazyWorkflowInstance("gso.workflows.iptrunk.migrate_iptrunk", "migrate_iptrunk")
 LazyWorkflowInstance("gso.workflows.iptrunk.terminate_iptrunk", "terminate_iptrunk")
+LazyWorkflowInstance("gso.workflows.iptrunk.create_imported_iptrunk", "create_imported_iptrunk")
+LazyWorkflowInstance("gso.workflows.iptrunk.import_iptrunk", "import_iptrunk")
+
+#  Router workflows
 LazyWorkflowInstance("gso.workflows.router.activate_router", "activate_router")
 LazyWorkflowInstance("gso.workflows.router.create_router", "create_router")
 LazyWorkflowInstance("gso.workflows.router.redeploy_base_config", "redeploy_base_config")
 LazyWorkflowInstance("gso.workflows.router.terminate_router", "terminate_router")
 LazyWorkflowInstance("gso.workflows.router.update_ibgp_mesh", "update_ibgp_mesh")
 LazyWorkflowInstance("gso.workflows.router.modify_connection_strategy", "modify_connection_strategy")
+LazyWorkflowInstance("gso.workflows.router.import_router", "import_router")
+LazyWorkflowInstance("gso.workflows.router.create_imported_router", "create_imported_router")
+
+#  Site workflows
 LazyWorkflowInstance("gso.workflows.site.create_site", "create_site")
 LazyWorkflowInstance("gso.workflows.site.modify_site", "modify_site")
 LazyWorkflowInstance("gso.workflows.site.terminate_site", "terminate_site")
-LazyWorkflowInstance("gso.workflows.tasks.import_site", "import_site")
-LazyWorkflowInstance("gso.workflows.tasks.import_router", "import_router")
-LazyWorkflowInstance("gso.workflows.tasks.import_iptrunk", "import_iptrunk")
-LazyWorkflowInstance("gso.workflows.tasks.import_super_pop_switch", "import_super_pop_switch")
-LazyWorkflowInstance("gso.workflows.tasks.import_office_router", "import_office_router")
+LazyWorkflowInstance("gso.workflows.site.create_imported_site", "create_imported_site")
+LazyWorkflowInstance("gso.workflows.site.import_site", "import_site")
+
+#  Super PoP switch workflows
+LazyWorkflowInstance("gso.workflows.super_pop_switch.import_super_pop_switch", "import_super_pop_switch")
+LazyWorkflowInstance(
+    "gso.workflows.super_pop_switch.create_imported_super_pop_switch", "create_imported_super_pop_switch"
+)
+
+#  Office router workflows
+LazyWorkflowInstance("gso.workflows.office_router.import_office_router", "import_office_router")
+LazyWorkflowInstance("gso.workflows.office_router.create_imported_office_router", "create_imported_office_router")
diff --git a/gso/workflows/iptrunk/activate_iptrunk.py b/gso/workflows/iptrunk/activate_iptrunk.py
index f686a8cb7e3c825dceffeb876c644a37342ce3d8..a98a0446c50a4de14b8f55502b633babb7d027af 100644
--- a/gso/workflows/iptrunk/activate_iptrunk.py
+++ b/gso/workflows/iptrunk/activate_iptrunk.py
@@ -16,7 +16,7 @@ def _initial_input_form(subscription_id: UUIDstr) -> FormGenerator:
     trunk = Iptrunk.from_subscription(subscription_id)
 
     class ActivateTrunkForm(FormPage):
-        info_label: Label = "Start approval process for IP trunk activation."  # type:ignore[assignment]
+        info_label: Label = "Start approval process for IP trunk activation."
 
     user_input = yield ActivateTrunkForm
 
@@ -28,7 +28,7 @@ def verify_complete_checklist() -> FormGenerator:
     """Show a form for the operator to input a link to the completed checklist."""
 
     class VerifyCompleteForm(FormPage):
-        info_label: Label = "Verify that the checklist has been completed. Then continue this workflow."  # type: ignore[assignment]
+        info_label: Label = "Verify that the checklist has been completed. Then continue this workflow."
         checklist_url: str = ""
 
     user_input = yield VerifyCompleteForm
diff --git a/gso/workflows/tasks/import_iptrunk.py b/gso/workflows/iptrunk/create_imported_iptrunk.py
similarity index 81%
rename from gso/workflows/tasks/import_iptrunk.py
rename to gso/workflows/iptrunk/create_imported_iptrunk.py
index 9c6687b35943f7316a4cf4758009598270f80b77..bd2ea9876dfd943ea089257be71f41dd7985ec51 100644
--- a/gso/workflows/tasks/import_iptrunk.py
+++ b/gso/workflows/iptrunk/create_imported_iptrunk.py
@@ -1,19 +1,22 @@
 """A creation workflow for adding an existing IP trunk to the service database."""
 
 import ipaddress
+from typing import Annotated
 from uuid import uuid4
 
 from orchestrator import workflow
 from orchestrator.forms import FormPage
-from orchestrator.forms.validators import Choice, UniqueConstrainedList
+from orchestrator.forms.validators import Choice
 from orchestrator.targets import Target
 from orchestrator.types import FormGenerator, State, SubscriptionLifecycle
 from orchestrator.workflow import StepList, done, init, step
 from orchestrator.workflows.steps import resync, set_status, store_process_subscription
+from pydantic import AfterValidator, ConfigDict
+from pydantic_forms.validators import validate_unique_list
 
 from gso.products import ProductName
 from gso.products.product_blocks.iptrunk import IptrunkInterfaceBlockInactive, IptrunkType, PhysicalPortCapacity
-from gso.products.product_types.iptrunk import IptrunkInactive, IptrunkProvisioning
+from gso.products.product_types.iptrunk import ImportedIptrunkInactive
 from gso.products.product_types.router import Router
 from gso.services import subscriptions
 from gso.services.partners import get_partner_by_name
@@ -29,18 +32,20 @@ def _generate_routers() -> dict[str, str]:
     return routers
 
 
+LAGMemberList = Annotated[list[LAGMember], AfterValidator(validate_unique_list)]
+
+
 def initial_input_form_generator() -> FormGenerator:
     """Take all information passed to this workflow by the :term:`API` endpoint that was called."""
     routers = _generate_routers()
     router_enum = Choice("Select a router", zip(routers.keys(), routers.items(), strict=True))  # type: ignore[arg-type]
 
     class CreateIptrunkForm(FormPage):
-        class Config:
-            title = "Import Iptrunk"
+        model_config = ConfigDict(title="Import Iptrunk")
 
         partner: str
-        geant_s_sid: str | None
-        iptrunk_description: str
+        geant_s_sid: str | None = None
+        iptrunk_description: str | None = None
         iptrunk_type: IptrunkType
         iptrunk_speed: PhysicalPortCapacity
         iptrunk_minimum_links: int
@@ -48,13 +53,13 @@ def initial_input_form_generator() -> FormGenerator:
 
         side_a_node_id: router_enum  # type: ignore[valid-type]
         side_a_ae_iface: str
-        side_a_ae_geant_a_sid: str | None
-        side_a_ae_members: UniqueConstrainedList[LAGMember]
+        side_a_ae_geant_a_sid: str | None = None
+        side_a_ae_members: LAGMemberList
 
         side_b_node_id: router_enum  # type: ignore[valid-type]
         side_b_ae_iface: str
-        side_b_ae_geant_a_sid: str | None
-        side_b_ae_members: UniqueConstrainedList[LAGMember]
+        side_b_ae_geant_a_sid: str | None = None
+        side_b_ae_members: LAGMemberList
 
         iptrunk_ipv4_network: ipaddress.IPv4Network
         iptrunk_ipv6_network: ipaddress.IPv6Network
@@ -68,8 +73,8 @@ def initial_input_form_generator() -> FormGenerator:
 def create_subscription(partner: str) -> State:
     """Create a new subscription in the service database."""
     partner_id = get_partner_by_name(partner)["partner_id"]
-    product_id = subscriptions.get_product_id_by_name(ProductName.IP_TRUNK)
-    subscription = IptrunkInactive.from_product_id(product_id, partner_id)
+    product_id = subscriptions.get_product_id_by_name(ProductName.IMPORTED_IP_TRUNK)
+    subscription = ImportedIptrunkInactive.from_product_id(product_id, partner_id)
 
     return {
         "subscription": subscription,
@@ -79,7 +84,7 @@ def create_subscription(partner: str) -> State:
 
 @step("Initialize subscription")
 def initialize_subscription(
-    subscription: IptrunkInactive,
+    subscription: ImportedIptrunkInactive,
     geant_s_sid: str | None,
     iptrunk_type: IptrunkType,
     iptrunk_description: str,
@@ -118,16 +123,17 @@ def initialize_subscription(
         subscription.iptrunk.iptrunk_sides[1].iptrunk_side_ae_members.append(
             IptrunkInterfaceBlockInactive.new(subscription_id=uuid4(), **member),
         )
-
-    subscription.description = f"IP trunk, geant_s_sid:{geant_s_sid}"
-    subscription = IptrunkProvisioning.from_other_lifecycle(subscription, SubscriptionLifecycle.PROVISIONING)
-
+    side_names = sorted([
+        subscription.iptrunk.iptrunk_sides[0].iptrunk_side_node.router_site.site_name,
+        subscription.iptrunk.iptrunk_sides[1].iptrunk_side_node.router_site.site_name,
+    ])
+    subscription.description = f"IP trunk {side_names[0]} {side_names[1]}, geant_s_sid:{geant_s_sid}"
     return {"subscription": subscription}
 
 
 @step("Update IPAM Stub for Subscription")
 def update_ipam_stub_for_subscription(
-    subscription: IptrunkProvisioning,
+    subscription: ImportedIptrunkInactive,
     iptrunk_ipv4_network: ipaddress.IPv4Network,
     iptrunk_ipv6_network: ipaddress.IPv6Network,
 ) -> State:
@@ -143,7 +149,7 @@ def update_ipam_stub_for_subscription(
     initial_input_form=initial_input_form_generator,
     target=Target.CREATE,
 )
-def import_iptrunk() -> StepList:
+def create_imported_iptrunk() -> StepList:
     """Import an IP trunk without provisioning it."""
     return (
         init
diff --git a/gso/workflows/iptrunk/create_iptrunk.py b/gso/workflows/iptrunk/create_iptrunk.py
index fed3ab25b20b27a205481ca9fc685528670284d3..440bb8e2ac0eef37df5af277864e126d61245214 100644
--- a/gso/workflows/iptrunk/create_iptrunk.py
+++ b/gso/workflows/iptrunk/create_iptrunk.py
@@ -1,19 +1,20 @@
 """A creation workflow that deploys a new IP trunk service."""
 
 import json
+from typing import Annotated
 from uuid import uuid4
 
-from orchestrator.config.assignee import Assignee
+from annotated_types import Len
 from orchestrator.forms import FormPage
-from orchestrator.forms.validators import Choice, Label, UniqueConstrainedList
+from orchestrator.forms.validators import Choice, Label
 from orchestrator.targets import Target
 from orchestrator.types import FormGenerator, State, SubscriptionLifecycle, UUIDstr
 from orchestrator.utils.json import json_dumps
-from orchestrator.workflow import StepList, conditional, done, init, inputstep, step, workflow
+from orchestrator.workflow import StepList, conditional, done, init, step, workflow
 from orchestrator.workflows.steps import resync, set_status, store_process_subscription
 from orchestrator.workflows.utils import wrap_create_initial_input_form
-from pydantic import validator
-from pydantic_forms.core import ReadOnlyField
+from pydantic import AfterValidator, ConfigDict, field_validator
+from pydantic_forms.validators import ReadOnlyField, validate_unique_list
 from pynetbox.models.dcim import Interfaces
 
 from gso.products.product_blocks.iptrunk import (
@@ -28,6 +29,7 @@ from gso.services import infoblox, subscriptions
 from gso.services.lso_client import execute_playbook, lso_interaction
 from gso.services.netbox_client import NetboxClient
 from gso.services.partners import get_partner_by_name
+from gso.services.sharepoint import SharePointClient
 from gso.settings import load_oss_params
 from gso.utils.helpers import (
     LAGMember,
@@ -40,6 +42,7 @@ from gso.utils.helpers import (
     validate_tt_number,
 )
 from gso.utils.shared_enums import Vendor
+from gso.utils.workflow_steps import prompt_sharepoint_checklist_url
 
 
 def initial_input_form_generator(product_name: str) -> FormGenerator:
@@ -52,18 +55,17 @@ def initial_input_form_generator(product_name: str) -> FormGenerator:
         routers[str(router["subscription_id"])] = router["description"]
 
     class CreateIptrunkForm(FormPage):
-        class Config:
-            title = product_name
+        model_config = ConfigDict(title=product_name)
 
         tt_number: str
-        partner: str = ReadOnlyField("GEANT")
-        geant_s_sid: str | None
-        iptrunk_description: str
+        partner: ReadOnlyField("GEANT", default_type=str)  # type: ignore[valid-type]
+        geant_s_sid: str | None = None
+        iptrunk_description: str | None = None
         iptrunk_type: IptrunkType
         iptrunk_speed: PhysicalPortCapacity
         iptrunk_number_of_members: int
 
-        @validator("tt_number", allow_reuse=True)
+        @field_validator("tt_number")
         def validate_tt_number(cls, tt_number: str) -> str:
             return validate_tt_number(tt_number)
 
@@ -71,20 +73,20 @@ def initial_input_form_generator(product_name: str) -> FormGenerator:
 
     class VerifyMinimumLinksForm(FormPage):
         info_label: Label = (
-            f"This is the calculated minimum-links for this LAG: " f"{initial_user_input.iptrunk_number_of_members - 1}"  # type: ignore[assignment]
+            f"This is the calculated minimum-links for this LAG: " f"{initial_user_input.iptrunk_number_of_members - 1}"
         )
-        info_label2: Label = "Please confirm or modify."  # type: ignore[assignment]
+        iptrunk_minimum_links: int = initial_user_input.iptrunk_number_of_members - 1
+        info_label2: Label = "Please confirm or modify."
 
-    yield VerifyMinimumLinksForm
+    verify_minimum_links = yield VerifyMinimumLinksForm
     router_enum_a = Choice("Select a router", zip(routers.keys(), routers.items(), strict=True))  # type: ignore[arg-type]
 
     class SelectRouterSideA(FormPage):
-        class Config:
-            title = "Select a router for side A of the trunk."
+        model_config = ConfigDict(title="Select a router for side A of the trunk.")
 
         side_a_node_id: router_enum_a  # type: ignore[valid-type]
 
-        @validator("side_a_node_id", allow_reuse=True)
+        @field_validator("side_a_node_id")
         def validate_device_exists_in_netbox(cls, side_a_node_id: UUIDstr) -> str | None:
             return validate_router_in_netbox(side_a_node_id)
 
@@ -92,9 +94,14 @@ def initial_input_form_generator(product_name: str) -> FormGenerator:
     router_a = user_input_router_side_a.side_a_node_id.name
     router_a_fqdn = Router.from_subscription(router_a).router.router_fqdn
 
-    class JuniperAeMembers(UniqueConstrainedList[LAGMember]):
-        min_items = initial_user_input.iptrunk_number_of_members
-        max_items = initial_user_input.iptrunk_number_of_members
+    juniper_ae_members = Annotated[
+        list[LAGMember],
+        AfterValidator(validate_unique_list),
+        Len(
+            min_length=initial_user_input.iptrunk_number_of_members,
+            max_length=initial_user_input.iptrunk_number_of_members,
+        ),
+    ]
 
     if get_router_vendor(router_a) == Vendor.NOKIA:
 
@@ -104,23 +111,25 @@ def initial_input_form_generator(product_name: str) -> FormGenerator:
                 initial_user_input.iptrunk_speed,
             )
 
-        class NokiaAeMembersA(UniqueConstrainedList[NokiaLAGMemberA]):
-            min_items = initial_user_input.iptrunk_number_of_members
-            max_items = initial_user_input.iptrunk_number_of_members
-
-        ae_members_side_a = NokiaAeMembersA
+        ae_members_side_a_type = Annotated[
+            list[NokiaLAGMemberA],
+            AfterValidator(validate_unique_list),
+            Len(
+                min_length=initial_user_input.iptrunk_number_of_members,
+                max_length=initial_user_input.iptrunk_number_of_members,
+            ),
+        ]
     else:
-        ae_members_side_a = JuniperAeMembers  # type: ignore[assignment]
+        ae_members_side_a_type = juniper_ae_members  # type: ignore[assignment, misc]
 
     class CreateIptrunkSideAForm(FormPage):
-        class Config:
-            title = f"Provide subscription details for side A of the trunk.({router_a_fqdn})"
+        model_config = ConfigDict(title=f"Provide subscription details for side A of the trunk.({router_a_fqdn})")
 
         side_a_ae_iface: available_lags_choices(router_a) or str  # type: ignore[valid-type]
         side_a_ae_geant_a_sid: str | None
-        side_a_ae_members: ae_members_side_a  # type: ignore[valid-type]
+        side_a_ae_members: ae_members_side_a_type
 
-        @validator("side_a_ae_members", allow_reuse=True)
+        @field_validator("side_a_ae_members")
         def validate_side_a_ae_members(cls, side_a_ae_members: list[LAGMember]) -> list[LAGMember]:
             validate_iptrunk_unique_interface(side_a_ae_members)
             vendor = get_router_vendor(router_a)
@@ -133,12 +142,11 @@ def initial_input_form_generator(product_name: str) -> FormGenerator:
     router_enum_b = Choice("Select a router", zip(routers.keys(), routers.items(), strict=True))  # type: ignore[arg-type]
 
     class SelectRouterSideB(FormPage):
-        class Config:
-            title = "Select a router for side B of the trunk."
+        model_config = ConfigDict(title="Select a router for side B of the trunk.")
 
         side_b_node_id: router_enum_b  # type: ignore[valid-type]
 
-        @validator("side_b_node_id", allow_reuse=True)
+        @field_validator("side_b_node_id")
         def validate_device_exists_in_netbox(cls, side_b_node_id: UUIDstr) -> str | None:
             return validate_router_in_netbox(side_b_node_id)
 
@@ -154,24 +162,24 @@ def initial_input_form_generator(product_name: str) -> FormGenerator:
                 initial_user_input.iptrunk_speed,
             )
 
-        class NokiaAeMembersB(UniqueConstrainedList):
-            min_items = len(user_input_side_a.side_a_ae_members)
-            max_items = len(user_input_side_a.side_a_ae_members)
-            item_type = NokiaLAGMemberB
-
-        ae_members_side_b = NokiaAeMembersB
+        ae_members_side_b = Annotated[
+            list[NokiaLAGMemberB],
+            AfterValidator(validate_unique_list),
+            Len(
+                min_length=len(user_input_side_a.side_a_ae_members), max_length=len(user_input_side_a.side_a_ae_members)
+            ),
+        ]
     else:
-        ae_members_side_b = JuniperAeMembers  # type: ignore[assignment]
+        ae_members_side_b = juniper_ae_members  # type: ignore[assignment, misc]
 
     class CreateIptrunkSideBForm(FormPage):
-        class Config:
-            title = f"Provide subscription details for side B of the trunk.({router_b_fqdn})"
+        model_config = ConfigDict(title=f"Provide subscription details for side B of the trunk.({router_b_fqdn})")
 
         side_b_ae_iface: available_lags_choices(router_b) or str  # type: ignore[valid-type]
         side_b_ae_geant_a_sid: str | None
-        side_b_ae_members: ae_members_side_b  # type: ignore[valid-type]
+        side_b_ae_members: ae_members_side_b
 
-        @validator("side_b_ae_members", allow_reuse=True)
+        @field_validator("side_b_ae_members")
         def validate_side_b_ae_members(cls, side_b_ae_members: list[LAGMember]) -> list[LAGMember]:
             validate_iptrunk_unique_interface(side_b_ae_members)
             vendor = get_router_vendor(router_b)
@@ -182,6 +190,7 @@ def initial_input_form_generator(product_name: str) -> FormGenerator:
 
     return (
         initial_user_input.dict()
+        | verify_minimum_links.dict()
         | user_input_router_side_a.dict()
         | user_input_side_a.dict()
         | user_input_router_side_b.dict()
@@ -220,9 +229,9 @@ def initialize_subscription(
     subscription: IptrunkInactive,
     geant_s_sid: str | None,
     iptrunk_type: IptrunkType,
-    iptrunk_description: str,
+    iptrunk_description: str | None,
     iptrunk_speed: PhysicalPortCapacity,
-    iptrunk_number_of_members: int,
+    iptrunk_minimum_links: int,
     side_a_node_id: str,
     side_a_ae_iface: str,
     side_a_ae_geant_a_sid: str | None,
@@ -241,7 +250,7 @@ def initialize_subscription(
     subscription.iptrunk.iptrunk_type = iptrunk_type
     subscription.iptrunk.iptrunk_speed = iptrunk_speed
     subscription.iptrunk.iptrunk_isis_metric = oss_params.GENERAL.isis_high_metric
-    subscription.iptrunk.iptrunk_minimum_links = iptrunk_number_of_members - 1
+    subscription.iptrunk.iptrunk_minimum_links = iptrunk_minimum_links
 
     subscription.iptrunk.iptrunk_sides[0].iptrunk_side_node = side_a
     subscription.iptrunk.iptrunk_sides[0].iptrunk_side_ae_iface = side_a_ae_iface
@@ -331,7 +340,7 @@ def check_ip_trunk_connectivity(
     execute_playbook(
         playbook_name="iptrunks_checks.yaml",
         callback_route=callback_route,
-        inventory=subscription.iptrunk.iptrunk_sides[0].iptrunk_side_node.router_fqdn,
+        inventory=subscription.iptrunk.iptrunk_sides[0].iptrunk_side_node.router_fqdn,  # type: ignore[arg-type]
         extra_vars=extra_vars,
     )
 
@@ -405,7 +414,7 @@ def check_ip_trunk_isis(
     execute_playbook(
         playbook_name="iptrunks_checks.yaml",
         callback_route=callback_route,
-        inventory=subscription.iptrunk.iptrunk_sides[0].iptrunk_side_node.router_fqdn,
+        inventory=subscription.iptrunk.iptrunk_sides[0].iptrunk_side_node.router_fqdn,  # type: ignore[arg-type]
         extra_vars=extra_vars,
     )
 
@@ -436,9 +445,9 @@ def reserve_interfaces_in_netbox(subscription: IptrunkInactive) -> State:
         if get_router_vendor(trunk_side.iptrunk_side_node.owner_subscription_id) == Vendor.NOKIA:
             # Create :term:`LAG` interfaces
             lag_interface: Interfaces = nbclient.create_interface(
-                iface_name=trunk_side.iptrunk_side_ae_iface,
+                iface_name=trunk_side.iptrunk_side_ae_iface,  # type: ignore[arg-type]
                 interface_type="lag",
-                device_name=trunk_side.iptrunk_side_node.router_fqdn,
+                device_name=trunk_side.iptrunk_side_node.router_fqdn,  # type: ignore[arg-type]
                 description=str(subscription.subscription_id),
                 enabled=True,
             )
@@ -447,14 +456,14 @@ def reserve_interfaces_in_netbox(subscription: IptrunkInactive) -> State:
             # Reserve interfaces
             for interface in trunk_side.iptrunk_side_ae_members:
                 nbclient.attach_interface_to_lag(
-                    device_name=trunk_side.iptrunk_side_node.router_fqdn,
+                    device_name=trunk_side.iptrunk_side_node.router_fqdn,  # type: ignore[arg-type]
                     lag_name=lag_interface.name,
-                    iface_name=interface.interface_name,
+                    iface_name=interface.interface_name,  # type: ignore[arg-type]
                     description=str(subscription.subscription_id),
                 )
                 nbclient.reserve_interface(
-                    device_name=trunk_side.iptrunk_side_node.router_fqdn,
-                    iface_name=interface.interface_name,
+                    device_name=trunk_side.iptrunk_side_node.router_fqdn,  # type: ignore[arg-type]
+                    iface_name=interface.interface_name,  # type: ignore[arg-type]
                 )
     return {
         "subscription": subscription,
@@ -484,25 +493,15 @@ def netbox_allocate_side_b_interfaces(subscription: IptrunkInactive) -> None:
     _allocate_interfaces_in_netbox(subscription.iptrunk.iptrunk_sides[1])
 
 
-@inputstep("Prompt for new Sharepoint checklist", assignee=Assignee.SYSTEM)
-def prompt_start_new_checklist(subscription: IptrunkProvisioning) -> FormGenerator:
-    """Prompt the operator to start a new checklist in Sharepoint for approving this new IP trunk."""
-    oss_params = load_oss_params()
-
-    class SharepointPrompt(FormPage):
-        class Config:
-            title = "Start new checklist"
-
-        info_label_1: Label = (
-            f"Visit {oss_params.SHAREPOINT.checklist_site_url} and start a new Sharepoint checklist for an IPtrunk "  # type: ignore[assignment]
-            f"from {subscription.iptrunk.iptrunk_sides[0].iptrunk_side_node.router_fqdn} to "
-            f"{subscription.iptrunk.iptrunk_sides[1].iptrunk_side_node.router_fqdn}."
-        )
-        info_label_2: Label = "Once this is done, click proceed to finish the workflow."  # type: ignore[assignment]
-
-    yield SharepointPrompt
+@step("Create a new SharePoint checklist item")
+def create_new_sharepoint_checklist(subscription: IptrunkProvisioning, tt_number: str) -> State:
+    """Create a new checklist item in SharePoint for approving this IPtrunk."""
+    new_list_item_url = SharePointClient().add_list_item(
+        "ip_trunk",
+        {"Title": f"{subscription.description} - {subscription.iptrunk.geant_s_sid}", "TT_NUMBER": tt_number},
+    )
 
-    return {}
+    return {"checklist_url": new_list_item_url}
 
 
 @workflow(
@@ -543,7 +542,8 @@ def create_iptrunk() -> StepList:
         >> side_a_is_nokia(netbox_allocate_side_a_interfaces)
         >> side_b_is_nokia(netbox_allocate_side_b_interfaces)
         >> set_status(SubscriptionLifecycle.PROVISIONING)
-        >> prompt_start_new_checklist
+        >> create_new_sharepoint_checklist
+        >> prompt_sharepoint_checklist_url
         >> resync
         >> done
     )
diff --git a/gso/workflows/iptrunk/deploy_twamp.py b/gso/workflows/iptrunk/deploy_twamp.py
index 64483e60b8cae8717ad553c3f03bb03651fa6299..c8342f9a44e996ede5a30de69b06faabf846bc14 100644
--- a/gso/workflows/iptrunk/deploy_twamp.py
+++ b/gso/workflows/iptrunk/deploy_twamp.py
@@ -10,7 +10,7 @@ from orchestrator.utils.json import json_dumps
 from orchestrator.workflow import StepList, done, init, step, workflow
 from orchestrator.workflows.steps import resync, store_process_subscription, unsync
 from orchestrator.workflows.utils import wrap_modify_initial_input_form
-from pydantic import validator
+from pydantic import field_validator
 
 from gso.products.product_types.iptrunk import Iptrunk
 from gso.services.lso_client import execute_playbook, lso_interaction
@@ -24,11 +24,11 @@ def _initial_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
         info_label: Label = (
             "Please confirm deployment of TWAMP on IP trunk from "
             f"{trunk.iptrunk.iptrunk_sides[0].iptrunk_side_node.router_fqdn} to "
-            f"{trunk.iptrunk.iptrunk_sides[1].iptrunk_side_node.router_fqdn}"  # type: ignore[assignment]
+            f"{trunk.iptrunk.iptrunk_sides[1].iptrunk_side_node.router_fqdn}"
         )
         tt_number: str
 
-        @validator("tt_number", allow_reuse=True)
+        @field_validator("tt_number")
         def validate_tt_number(cls, tt_number: str) -> str:
             return validate_tt_number(tt_number)
 
diff --git a/gso/workflows/iptrunk/import_iptrunk.py b/gso/workflows/iptrunk/import_iptrunk.py
new file mode 100644
index 0000000000000000000000000000000000000000..36dce40de9daa4167b58f3e97458523a507b8203
--- /dev/null
+++ b/gso/workflows/iptrunk/import_iptrunk.py
@@ -0,0 +1,27 @@
+"""A modification workflow for migrating an ImportedIptrunk to an Iptrunk subscription."""
+
+from orchestrator.targets import Target
+from orchestrator.types import State, UUIDstr
+from orchestrator.workflow import StepList, done, init, step, workflow
+from orchestrator.workflows.steps import resync, store_process_subscription, unsync
+from orchestrator.workflows.utils import wrap_modify_initial_input_form
+
+from gso.products import ProductName
+from gso.products.product_types.iptrunk import ImportedIptrunk, Iptrunk
+from gso.services.subscriptions import get_product_id_by_name
+
+
+@step("Create new IP trunk subscription")
+def import_iptrunk_subscription(subscription_id: UUIDstr) -> State:
+    """Take an ImportedIptrunk subscription, and turn it into an Iptrunk subscription."""
+    old_iptrunk = ImportedIptrunk.from_subscription(subscription_id)
+    new_subscription_id = get_product_id_by_name(ProductName.IP_TRUNK)
+    new_subscription = Iptrunk.from_other_product(old_iptrunk, new_subscription_id)  # type: ignore[arg-type]
+
+    return {"subscription": new_subscription}
+
+
+@workflow("Import Iptrunk", target=Target.MODIFY, initial_input_form=wrap_modify_initial_input_form(None))
+def import_iptrunk() -> StepList:
+    """Modify an ImportedIptrunk subscription into an Iptrunk subscription to complete the import."""
+    return init >> store_process_subscription(Target.MODIFY) >> unsync >> import_iptrunk_subscription >> resync >> done
diff --git a/gso/workflows/iptrunk/migrate_iptrunk.py b/gso/workflows/iptrunk/migrate_iptrunk.py
index 96ee4abb3ea30356ae701c90c22f2db27669c949..6f99015c8bc3518d616dc919927b2ca673e3bd38 100644
--- a/gso/workflows/iptrunk/migrate_iptrunk.py
+++ b/gso/workflows/iptrunk/migrate_iptrunk.py
@@ -7,20 +7,22 @@ configured to run from A to C. B is then no longer associated with this IP trunk
 import copy
 import json
 import re
+from typing import Annotated
 from uuid import uuid4
 
+from annotated_types import Len
 from orchestrator import step, workflow
 from orchestrator.config.assignee import Assignee
 from orchestrator.forms import FormPage
-from orchestrator.forms.validators import Choice, Label, UniqueConstrainedList
+from orchestrator.forms.validators import Choice, Label
 from orchestrator.targets import Target
 from orchestrator.types import FormGenerator, State, UUIDstr
 from orchestrator.utils.json import json_dumps
 from orchestrator.workflow import StepList, conditional, done, init, inputstep
 from orchestrator.workflows.steps import resync, store_process_subscription, unsync
 from orchestrator.workflows.utils import wrap_modify_initial_input_form
-from pydantic import validator
-from pydantic_forms.core import ReadOnlyField
+from pydantic import AfterValidator, ConfigDict, field_validator
+from pydantic_forms.validators import ReadOnlyField, validate_unique_list
 from pynetbox.models.dcim import Interfaces
 
 from gso.products.product_blocks.iptrunk import IptrunkInterfaceBlock
@@ -61,16 +63,15 @@ def initial_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
     )
 
     class IPTrunkMigrateForm(FormPage):
-        class Config:
-            title = form_title
+        model_config = ConfigDict(title=form_title)
 
         tt_number: str
         replace_side: replaced_side_enum  # type: ignore[valid-type]
-        warning_label: Label = "Are we moving to a different Site?"  # type: ignore[assignment]
+        warning_label: Label = "Are we moving to a different Site?"
         migrate_to_different_site: bool = False
         restore_isis_metric: bool = True
 
-        @validator("tt_number", allow_reuse=True, always=True)
+        @field_validator("tt_number", mode="before")
         def validate_tt_number(cls, tt_number: str) -> str:
             return validate_tt_number(tt_number)
 
@@ -98,8 +99,7 @@ def initial_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
     new_router_enum = Choice("Select a new router", zip(routers.keys(), routers.items(), strict=True))  # type: ignore[arg-type]
 
     class NewSideIPTrunkRouterForm(FormPage):
-        class Config:
-            title = form_title
+        model_config = ConfigDict(title=form_title)
 
         new_node: new_router_enum  # type: ignore[valid-type]
 
@@ -116,18 +116,23 @@ def initial_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
                 subscription.iptrunk.iptrunk_speed,
             )
 
-        class NokiaAeMembers(UniqueConstrainedList[NokiaLAGMember]):
-            min_items = len(subscription.iptrunk.iptrunk_sides[0].iptrunk_side_ae_members)
-            max_items = len(subscription.iptrunk.iptrunk_sides[1].iptrunk_side_ae_members)
-
-        ae_members = NokiaAeMembers
+        ae_members = Annotated[
+            list[NokiaLAGMember],
+            AfterValidator(validate_unique_list),
+            Len(
+                min_length=len(subscription.iptrunk.iptrunk_sides[0].iptrunk_side_ae_members),
+                max_length=len(subscription.iptrunk.iptrunk_sides[1].iptrunk_side_ae_members),
+            ),
+        ]
     else:
-
-        class JuniperLagMember(UniqueConstrainedList[LAGMember]):
-            min_items = len(subscription.iptrunk.iptrunk_sides[0].iptrunk_side_ae_members)
-            max_items = len(subscription.iptrunk.iptrunk_sides[1].iptrunk_side_ae_members)
-
-        ae_members = JuniperLagMember  # type: ignore[assignment]
+        ae_members = Annotated[  # type: ignore[assignment, misc]
+            list[LAGMember],
+            AfterValidator(validate_unique_list),
+            Len(
+                min_length=len(subscription.iptrunk.iptrunk_sides[0].iptrunk_side_ae_members),
+                max_length=len(subscription.iptrunk.iptrunk_sides[1].iptrunk_side_ae_members),
+            ),
+        ]
 
     replace_index = (
         0
@@ -136,22 +141,21 @@ def initial_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
         else 1
     )
     existing_lag_ae_members = [
-        {
-            "interface_name": iface.interface_name,
-            "interface_description": iface.interface_description,
-        }
+        LAGMember(
+            interface_name=iface.interface_name,
+            interface_description=iface.interface_description,
+        )
         for iface in subscription.iptrunk.iptrunk_sides[replace_index].iptrunk_side_ae_members
     ]
 
     class NewSideIPTrunkForm(FormPage):
-        class Config:
-            title = form_title
+        model_config = ConfigDict(title=form_title)
 
         new_lag_interface: side_a_ae_iface  # type: ignore[valid-type]
-        existing_lag_interface: list[LAGMember] = ReadOnlyField(existing_lag_ae_members)
-        new_lag_member_interfaces: ae_members  # type: ignore[valid-type]
+        existing_lag_interface: ReadOnlyField(existing_lag_ae_members, default_type=list[LAGMember])  # type: ignore[valid-type]
+        new_lag_member_interfaces: ae_members
 
-        @validator("new_lag_interface", allow_reuse=True, pre=True, always=True)
+        @field_validator("new_lag_interface")
         def lag_interface_proper_name(cls, new_lag_interface: str) -> str:
             if get_router_vendor(new_router) == Vendor.JUNIPER:
                 juniper_lag_re = re.compile("^ae\\d{1,2}$")
@@ -160,7 +164,7 @@ def initial_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
                     raise ValueError(msg)
             return new_lag_interface
 
-        @validator("new_lag_member_interfaces", allow_reuse=True)
+        @field_validator("new_lag_member_interfaces")
         def is_interface_names_valid_juniper(cls, new_lag_member_interfaces: list[LAGMember]) -> list[LAGMember]:
             vendor = get_router_vendor(new_router)
             return validate_interface_name_list(new_lag_member_interfaces, vendor)
@@ -218,19 +222,44 @@ def calculate_old_side_data(subscription: Iptrunk, replace_index: int) -> State:
     return {"old_side_data": old_side_data}
 
 
-@step("Check Optical levels on the trunk endpoint")
-def check_ip_trunk_optical_levels(
+@step("Check Optical PRE levels on the trunk endpoint")
+def check_ip_trunk_optical_levels_pre(subscription: Iptrunk, callback_route: str) -> State:
+    """Check Optical PRE levels on the trunk."""
+    extra_vars = {"wfo_ip_trunk_json": json.loads(json_dumps(subscription)), "check": "optical_pre"}
+
+    execute_playbook(
+        playbook_name="iptrunks_checks.yaml",
+        callback_route=callback_route,
+        inventory=f"{subscription.iptrunk.iptrunk_sides[0].iptrunk_side_node.router_fqdn}\n"
+        f"{subscription.iptrunk.iptrunk_sides[1].iptrunk_side_node.router_fqdn}\n",
+        extra_vars=extra_vars,
+    )
+
+    return {"subscription": subscription}
+
+
+@step("Check Optical POST levels on the trunk endpoint")
+def check_ip_trunk_optical_levels_post(
     subscription: Iptrunk,
     callback_route: str,
+    new_node: Router,
+    new_lag_member_interfaces: list[dict],
     replace_index: int,
 ) -> State:
-    """Check Optical levels on the trunk."""
-    extra_vars = {"wfo_ip_trunk_json": json.loads(json_dumps(subscription)), "check": "optical"}
+    """Check Optical POST levels on the trunk."""
+    extra_vars = {
+        "wfo_ip_trunk_json": json.loads(json_dumps(subscription)),
+        "new_node": json.loads(json_dumps(new_node)),
+        "new_lag_member_interfaces": new_lag_member_interfaces,
+        "replace_index": replace_index,
+        "check": "optical_post",
+    }
 
     execute_playbook(
         playbook_name="iptrunks_checks.yaml",
         callback_route=callback_route,
-        inventory=subscription.iptrunk.iptrunk_sides[1 - replace_index].iptrunk_side_node.router_fqdn,
+        inventory=f"{subscription.iptrunk.iptrunk_sides[1 - replace_index].iptrunk_side_node.router_fqdn}\n"
+        f"{new_node.router.router_fqdn}\n",
         extra_vars=extra_vars,
     )
 
@@ -392,10 +421,9 @@ def confirm_continue_move_fiber() -> FormGenerator:
     """Wait for confirmation from an operator that the physical fiber has been moved."""
 
     class ProvisioningResultPage(FormPage):
-        class Config:
-            title = "Please confirm before continuing"
+        model_config = ConfigDict(title="Please confirm before continuing")
 
-        info_label: Label = "New trunk interface has been deployed, wait for the physical connection to be moved."  # type: ignore[assignment]
+        info_label: Label = "New trunk interface has been deployed, wait for the physical connection to be moved."
 
     yield ProvisioningResultPage
 
@@ -482,10 +510,9 @@ def confirm_continue_restore_isis() -> FormGenerator:
     """Wait for an operator to confirm that the old :term:`ISIS` metric should be restored."""
 
     class ProvisioningResultPage(FormPage):
-        class Config:
-            title = "Please confirm before continuing"
+        model_config = ConfigDict(title="Please confirm before continuing")
 
-        info_label: Label = "ISIS config has been deployed, confirm if you want to restore the old metric."  # type: ignore[assignment]
+        info_label: Label = "ISIS config has been deployed, confirm if you want to restore the old metric."
 
     yield ProvisioningResultPage
 
@@ -707,13 +734,13 @@ def migrate_iptrunk() -> StepList:
         >> new_side_is_nokia(netbox_reserve_interfaces)
         >> calculate_old_side_data
         >> lso_interaction(set_isis_to_max)
-        >> lso_interaction(check_ip_trunk_optical_levels)
+        >> lso_interaction(check_ip_trunk_optical_levels_pre)
         >> lso_interaction(disable_old_config_dry)
         >> lso_interaction(disable_old_config_real)
         >> lso_interaction(deploy_new_config_dry)
         >> lso_interaction(deploy_new_config_real)
         >> confirm_continue_move_fiber
-        >> lso_interaction(check_ip_trunk_optical_levels)
+        >> lso_interaction(check_ip_trunk_optical_levels_post)
         >> lso_interaction(check_ip_trunk_connectivity)
         >> lso_interaction(deploy_new_isis)
         >> lso_interaction(check_ip_trunk_isis)
diff --git a/gso/workflows/iptrunk/modify_trunk_interface.py b/gso/workflows/iptrunk/modify_trunk_interface.py
index 27111c62cbba9a1a92c173a035a3df163ac29d3f..92f71d6df5d7c8dde4f34fdb04925857f27e7512 100644
--- a/gso/workflows/iptrunk/modify_trunk_interface.py
+++ b/gso/workflows/iptrunk/modify_trunk_interface.py
@@ -1,19 +1,19 @@
 """A modification workflow that updates the :term:`LAG` interfaces that are part of an existing IP trunk."""
 
-import ipaddress
 import json
+from typing import Annotated, TypeVar
 from uuid import UUID, uuid4
 
-from orchestrator.forms import FormPage, ReadOnlyField
-from orchestrator.forms.validators import UniqueConstrainedList
+from annotated_types import Len
+from orchestrator.forms import FormPage
 from orchestrator.targets import Target
 from orchestrator.types import FormGenerator, State, UUIDstr
 from orchestrator.utils.json import json_dumps
 from orchestrator.workflow import StepList, conditional, done, init, step, workflow
 from orchestrator.workflows.steps import resync, store_process_subscription, unsync
 from orchestrator.workflows.utils import wrap_modify_initial_input_form
-from pydantic import validator
-from pydantic_forms.validators import Label
+from pydantic import AfterValidator, ConfigDict, field_validator
+from pydantic_forms.validators import Label, ReadOnlyField, validate_unique_list
 
 from gso.products.product_blocks.iptrunk import (
     IptrunkInterfaceBlock,
@@ -33,10 +33,14 @@ from gso.utils.helpers import (
     validate_iptrunk_unique_interface,
     validate_tt_number,
 )
-from gso.utils.shared_enums import Vendor
+from gso.utils.shared_enums import IPv4AddressType, IPv6AddressType, Vendor
 
+T = TypeVar("T", bound=LAGMember)
 
-def initialize_ae_members(subscription: Iptrunk, initial_user_input: dict, side_index: int) -> type[LAGMember]:
+
+def initialize_ae_members(
+    subscription: Iptrunk, initial_user_input: dict, side_index: int
+) -> Annotated[list[LAGMember], ""]:
     """Initialize the list of AE members."""
     router = subscription.iptrunk.iptrunk_sides[side_index].iptrunk_side_node
     router_vendor = get_router_vendor(router.owner_subscription_id)
@@ -60,19 +64,17 @@ def initialize_ae_members(subscription: Iptrunk, initial_user_input: dict, side_
                 )
             )
 
-        class NokiaAeMembers(UniqueConstrainedList[NokiaLAGMember]):
-            min_items = iptrunk_number_of_members
-            max_items = iptrunk_number_of_members
-
-        ae_members = NokiaAeMembers
-    else:
+        return Annotated[
+            list[NokiaLAGMember],
+            AfterValidator(validate_unique_list),
+            Len(min_length=iptrunk_number_of_members, max_length=iptrunk_number_of_members),
+        ]  # type: ignore[return-value]
 
-        class JuniperAeMembers(UniqueConstrainedList[LAGMember]):
-            min_items = iptrunk_number_of_members
-            max_items = iptrunk_number_of_members
-
-        ae_members = JuniperAeMembers  # type: ignore[assignment]
-    return ae_members  # type: ignore[return-value]
+    return Annotated[
+        list[LAGMember],
+        AfterValidator(validate_unique_list),
+        Len(min_length=iptrunk_number_of_members, max_length=iptrunk_number_of_members),
+    ]  # type: ignore[return-value]
 
 
 def initial_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
@@ -82,19 +84,23 @@ def initial_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
     class ModifyIptrunkForm(FormPage):
         tt_number: str
         geant_s_sid: str | None = subscription.iptrunk.geant_s_sid
-        iptrunk_description: str = subscription.iptrunk.iptrunk_description
+        iptrunk_description: str | None = subscription.iptrunk.iptrunk_description
         iptrunk_type: IptrunkType = subscription.iptrunk.iptrunk_type
         warning_label: Label = (
             "Changing the PhyPortCapacity will result in the deletion of all AE members. "
-            "You will need to add the new AE members in the next steps."  # type: ignore[assignment]
+            "You will need to add the new AE members in the next steps."
         )
         iptrunk_speed: PhysicalPortCapacity = subscription.iptrunk.iptrunk_speed
         iptrunk_number_of_members: int = subscription.iptrunk.iptrunk_minimum_links + 1
-        iptrunk_isis_metric: int = ReadOnlyField(subscription.iptrunk.iptrunk_isis_metric)
-        iptrunk_ipv4_network: ipaddress.IPv4Network = ReadOnlyField(subscription.iptrunk.iptrunk_ipv4_network)
-        iptrunk_ipv6_network: ipaddress.IPv6Network = ReadOnlyField(subscription.iptrunk.iptrunk_ipv6_network)
+        iptrunk_isis_metric: ReadOnlyField(subscription.iptrunk.iptrunk_isis_metric, default_type=int)  # type: ignore[valid-type]
+        iptrunk_ipv4_network: ReadOnlyField(  # type: ignore[valid-type]
+            str(subscription.iptrunk.iptrunk_ipv4_network), default_type=IPv4AddressType
+        )
+        iptrunk_ipv6_network: ReadOnlyField(  # type: ignore[valid-type]
+            str(subscription.iptrunk.iptrunk_ipv6_network), default_type=IPv6AddressType
+        )
 
-        @validator("tt_number", allow_reuse=True)
+        @field_validator("tt_number")
         def validate_tt_number(cls, tt_number: str) -> str:
             return validate_tt_number(tt_number)
 
@@ -102,19 +108,21 @@ def initial_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
 
     class VerifyMinimumLinksForm(FormPage):
         info_label: Label = (
-            f"This is the calculated minimum-links for this LAG: " f"{initial_user_input.iptrunk_number_of_members - 1}"  # type: ignore[assignment]
+            f"This is the calculated minimum-links for this LAG: " f"{initial_user_input.iptrunk_number_of_members - 1}"
         )
-        info_label2: Label = "Please confirm or modify."  # type: ignore[assignment]
+        iptrunk_minimum_links: int = initial_user_input.iptrunk_number_of_members - 1
+        info_label2: Label = "Please confirm or modify."
 
-    yield VerifyMinimumLinksForm
+    verify_minimum_links = yield VerifyMinimumLinksForm
     ae_members_side_a = initialize_ae_members(subscription, initial_user_input.dict(), 0)
 
     class ModifyIptrunkSideAForm(FormPage):
-        class Config:
-            title = "Provide subscription details for side A of the trunk."
+        model_config = ConfigDict(title="Provide subscription details for side A of the trunk.")
 
-        side_a_node: str = ReadOnlyField(subscription.iptrunk.iptrunk_sides[0].iptrunk_side_node.router_fqdn)
-        side_a_ae_iface: str = ReadOnlyField(subscription.iptrunk.iptrunk_sides[0].iptrunk_side_ae_iface)
+        side_a_node: ReadOnlyField(  # type: ignore[valid-type]
+            subscription.iptrunk.iptrunk_sides[0].iptrunk_side_node.router_fqdn, default_type=str
+        )
+        side_a_ae_iface: ReadOnlyField(subscription.iptrunk.iptrunk_sides[0].iptrunk_side_ae_iface, default_type=str)  # type: ignore[valid-type]
         side_a_ae_geant_a_sid: str | None = subscription.iptrunk.iptrunk_sides[0].iptrunk_side_ae_geant_a_sid
         side_a_ae_members: ae_members_side_a = (  # type: ignore[valid-type]
             subscription.iptrunk.iptrunk_sides[0].iptrunk_side_ae_members
@@ -122,11 +130,11 @@ def initial_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
             else []
         )
 
-        @validator("side_a_ae_members", allow_reuse=True)
+        @field_validator("side_a_ae_members")
         def validate_iptrunk_unique_interface_side_a(cls, side_a_ae_members: list[LAGMember]) -> list[LAGMember]:
             return validate_iptrunk_unique_interface(side_a_ae_members)
 
-        @validator("side_a_ae_members", allow_reuse=True)
+        @field_validator("side_a_ae_members")
         def validate_interface_name_members(cls, side_a_ae_members: list[LAGMember]) -> list[LAGMember]:
             vendor = subscription.iptrunk.iptrunk_sides[0].iptrunk_side_node.vendor
             return validate_interface_name_list(side_a_ae_members, vendor)
@@ -135,11 +143,12 @@ def initial_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
     ae_members_side_b = initialize_ae_members(subscription, initial_user_input.dict(), 1)
 
     class ModifyIptrunkSideBForm(FormPage):
-        class Config:
-            title = "Provide subscription details for side B of the trunk."
+        model_config = ConfigDict(title="Provide subscription details for side B of the trunk.")
 
-        side_b_node: str = ReadOnlyField(subscription.iptrunk.iptrunk_sides[1].iptrunk_side_node.router_fqdn)
-        side_b_ae_iface: str = ReadOnlyField(subscription.iptrunk.iptrunk_sides[1].iptrunk_side_ae_iface)
+        side_b_node: ReadOnlyField(  # type: ignore[valid-type]
+            subscription.iptrunk.iptrunk_sides[1].iptrunk_side_node.router_fqdn, default_type=str
+        )
+        side_b_ae_iface: ReadOnlyField(subscription.iptrunk.iptrunk_sides[1].iptrunk_side_ae_iface, default_type=str)  # type: ignore[valid-type]
         side_b_ae_geant_a_sid: str | None = subscription.iptrunk.iptrunk_sides[1].iptrunk_side_ae_geant_a_sid
         side_b_ae_members: ae_members_side_b = (  # type: ignore[valid-type]
             subscription.iptrunk.iptrunk_sides[1].iptrunk_side_ae_members
@@ -147,18 +156,18 @@ def initial_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
             else []
         )
 
-        @validator("side_b_ae_members", allow_reuse=True)
+        @field_validator("side_b_ae_members")
         def validate_iptrunk_unique_interface_side_b(cls, side_b_ae_members: list[LAGMember]) -> list[LAGMember]:
             return validate_iptrunk_unique_interface(side_b_ae_members)
 
-        @validator("side_b_ae_members", allow_reuse=True)
+        @field_validator("side_b_ae_members")
         def validate_interface_name_members(cls, side_b_ae_members: list[LAGMember]) -> list[LAGMember]:
             vendor = subscription.iptrunk.iptrunk_sides[1].iptrunk_side_node.vendor
             return validate_interface_name_list(side_b_ae_members, vendor)
 
     user_input_side_b = yield ModifyIptrunkSideBForm
 
-    return initial_user_input.dict() | user_input_side_a.dict() | user_input_side_b.dict()
+    return initial_user_input.dict() | user_input_side_a.dict() | user_input_side_b.dict() | verify_minimum_links.dict()
 
 
 @step("Update subscription")
@@ -166,9 +175,9 @@ def modify_iptrunk_subscription(
     subscription: Iptrunk,
     geant_s_sid: str | None,
     iptrunk_type: IptrunkType,
-    iptrunk_description: str,
+    iptrunk_description: str | None,
     iptrunk_speed: PhysicalPortCapacity,
-    iptrunk_number_of_members: int,
+    iptrunk_minimum_links: int,
     side_a_ae_geant_a_sid: str | None,
     side_a_ae_members: list[dict],
     side_b_ae_geant_a_sid: str | None,
@@ -197,7 +206,7 @@ def modify_iptrunk_subscription(
     subscription.iptrunk.iptrunk_description = iptrunk_description
     subscription.iptrunk.iptrunk_type = iptrunk_type
     subscription.iptrunk.iptrunk_speed = iptrunk_speed
-    subscription.iptrunk.iptrunk_minimum_links = iptrunk_number_of_members - 1
+    subscription.iptrunk.iptrunk_minimum_links = iptrunk_minimum_links
 
     subscription.iptrunk.iptrunk_sides[0].iptrunk_side_ae_geant_a_sid = side_a_ae_geant_a_sid
     #  Flush the old list of member interfaces
@@ -215,7 +224,11 @@ def modify_iptrunk_subscription(
             IptrunkInterfaceBlock.new(subscription_id=uuid4(), **member),
         )
 
-    subscription.description = f"IP trunk, geant_s_sid:{geant_s_sid}"
+    side_names = sorted([
+        subscription.iptrunk.iptrunk_sides[0].iptrunk_side_node.router_site.site_name,
+        subscription.iptrunk.iptrunk_sides[1].iptrunk_side_node.router_site.site_name,
+    ])
+    subscription.description = f"IP trunk {side_names[0]} {side_names[1]}, geant_s_sid:{geant_s_sid}"
 
     return {
         "subscription": subscription,
diff --git a/gso/workflows/iptrunk/terminate_iptrunk.py b/gso/workflows/iptrunk/terminate_iptrunk.py
index a2cb6727215a9f6184859c548dba7c3162127559..1ae61b80cd486d46f0f0fbea139868b01dc96d15 100644
--- a/gso/workflows/iptrunk/terminate_iptrunk.py
+++ b/gso/workflows/iptrunk/terminate_iptrunk.py
@@ -16,7 +16,7 @@ from orchestrator.workflows.steps import (
     unsync,
 )
 from orchestrator.workflows.utils import wrap_modify_initial_input_form
-from pydantic import validator
+from pydantic import field_validator
 
 from gso.products.product_blocks.iptrunk import IptrunkSideBlock
 from gso.products.product_types.iptrunk import Iptrunk
@@ -35,18 +35,18 @@ def initial_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
     class TerminateForm(FormPage):
         if iptrunk.status == SubscriptionLifecycle.INITIAL:
             info_label_2: Label = (
-                "This will immediately mark the subscription as terminated, preventing any other workflows from "  # type:ignore[assignment]
+                "This will immediately mark the subscription as terminated, preventing any other workflows from "
                 "interacting with this product subscription."
             )
-            info_label_3: Label = "ONLY EXECUTE THIS WORKFLOW WHEN YOU ARE ABSOLUTELY SURE WHAT YOU ARE DOING."  # type:ignore[assignment]
+            info_label_3: Label = "ONLY EXECUTE THIS WORKFLOW WHEN YOU ARE ABSOLUTELY SURE WHAT YOU ARE DOING."
 
         tt_number: str
         termination_label: Label = (
-            "Please confirm whether configuration should get removed from the A and B sides of the trunk."  # type: ignore[assignment]
+            "Please confirm whether configuration should get removed from the A and B sides of the trunk."
         )
         remove_configuration: bool = True
 
-        @validator("tt_number", allow_reuse=True)
+        @field_validator("tt_number")
         def validate_tt_number(cls, tt_number: str) -> str:
             return validate_tt_number(tt_number)
 
diff --git a/gso/workflows/office_router/__init__.py b/gso/workflows/office_router/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..1a947a5007a60e3629ce913aebbf4b80d22dda15
--- /dev/null
+++ b/gso/workflows/office_router/__init__.py
@@ -0,0 +1 @@
+"""Workflows for Office router products."""
diff --git a/gso/workflows/tasks/import_office_router.py b/gso/workflows/office_router/create_imported_office_router.py
similarity index 75%
rename from gso/workflows/tasks/import_office_router.py
rename to gso/workflows/office_router/create_imported_office_router.py
index 9168cdae0150a82a1893b6b3ceae450b5df542b5..e1cda4cdd924563c5eb3495997dce651fd1fd724 100644
--- a/gso/workflows/tasks/import_office_router.py
+++ b/gso/workflows/office_router/create_imported_office_router.py
@@ -1,29 +1,27 @@
 """A creation workflow that adds existing office routers to the coreDB."""
 
-import ipaddress
-
 from orchestrator import workflow
 from orchestrator.forms import FormPage
 from orchestrator.targets import Target
 from orchestrator.types import FormGenerator, State, SubscriptionLifecycle
 from orchestrator.workflow import StepList, done, init, step
 from orchestrator.workflows.steps import resync, set_status, store_process_subscription
+from pydantic import ConfigDict
 
 from gso.products import ProductName
-from gso.products.product_types import office_router
-from gso.products.product_types.office_router import OfficeRouterInactive
+from gso.products.product_types.office_router import ImportedOfficeRouterInactive
 from gso.services import subscriptions
 from gso.services.partners import get_partner_by_name
 from gso.services.subscriptions import get_site_by_name
-from gso.utils.shared_enums import PortNumber, Vendor
+from gso.utils.shared_enums import IPv4AddressType, IPv6AddressType, PortNumber, Vendor
 
 
 @step("Create subscription")
 def create_subscription(partner: str) -> State:
     """Create a new subscription object."""
     partner_id = get_partner_by_name(partner)["partner_id"]
-    product_id = subscriptions.get_product_id_by_name(ProductName.OFFICE_ROUTER)
-    subscription = OfficeRouterInactive.from_product_id(product_id, partner_id)
+    product_id = subscriptions.get_product_id_by_name(ProductName.IMPORTED_OFFICE_ROUTER)
+    subscription = ImportedOfficeRouterInactive.from_product_id(product_id, partner_id)
 
     return {
         "subscription": subscription,
@@ -35,15 +33,14 @@ def initial_input_form_generator() -> FormGenerator:
     """Generate a form that is filled in using information passed through the :term:`API` endpoint."""
 
     class ImportOfficeRouter(FormPage):
-        class Config:
-            title = "Import an office router"
+        model_config = ConfigDict(title="Import an office router")
 
         partner: str
         office_router_site: str
         office_router_fqdn: str
         office_router_ts_port: PortNumber
-        office_router_lo_ipv4_address: ipaddress.IPv4Address
-        office_router_lo_ipv6_address: ipaddress.IPv6Address
+        office_router_lo_ipv4_address: IPv4AddressType
+        office_router_lo_ipv6_address: IPv6AddressType
 
     user_input = yield ImportOfficeRouter
 
@@ -52,12 +49,12 @@ def initial_input_form_generator() -> FormGenerator:
 
 @step("Initialize subscription")
 def initialize_subscription(
-    subscription: OfficeRouterInactive,
+    subscription: ImportedOfficeRouterInactive,
     office_router_fqdn: str,
     office_router_ts_port: PortNumber,
     office_router_site: str,
-    office_router_lo_ipv4_address: ipaddress.IPv4Address | None = None,
-    office_router_lo_ipv6_address: ipaddress.IPv6Address | None = None,
+    office_router_lo_ipv4_address: IPv4AddressType | None = None,
+    office_router_lo_ipv6_address: IPv6AddressType | None = None,
 ) -> State:
     """Initialise the office router subscription using input data."""
     subscription.office_router.office_router_ts_port = office_router_ts_port
@@ -69,10 +66,6 @@ def initialize_subscription(
     subscription.office_router.office_router_lo_ipv6_address = office_router_lo_ipv6_address
     subscription.office_router.vendor = Vendor.JUNIPER
 
-    subscription = office_router.OfficeRouterProvisioning.from_other_lifecycle(
-        subscription, SubscriptionLifecycle.PROVISIONING
-    )
-
     return {"subscription": subscription}
 
 
@@ -81,7 +74,7 @@ def initialize_subscription(
     initial_input_form=initial_input_form_generator,
     target=Target.CREATE,
 )
-def import_office_router() -> StepList:
+def create_imported_office_router() -> StepList:
     """Import an office router without provisioning it."""
     return (
         init
diff --git a/gso/workflows/office_router/import_office_router.py b/gso/workflows/office_router/import_office_router.py
new file mode 100644
index 0000000000000000000000000000000000000000..0d1d67abdd28b7a67c68cec92f05d0268e52de1f
--- /dev/null
+++ b/gso/workflows/office_router/import_office_router.py
@@ -0,0 +1,34 @@
+"""A modification workflow for migrating an ImportedOfficeRouter to an OfficeRouter subscription."""
+
+from orchestrator.targets import Target
+from orchestrator.types import State, UUIDstr
+from orchestrator.workflow import StepList, done, init, step, workflow
+from orchestrator.workflows.steps import resync, store_process_subscription, unsync
+from orchestrator.workflows.utils import wrap_modify_initial_input_form
+
+from gso.products import ProductName
+from gso.products.product_types.office_router import ImportedOfficeRouter, OfficeRouter
+from gso.services.subscriptions import get_product_id_by_name
+
+
+@step("Create new office_router subscription")
+def import_office_router_subscription(subscription_id: UUIDstr) -> State:
+    """Take an ImportedOfficeRouter subscription, and turn it into an OfficeRouter subscription."""
+    old_office_router = ImportedOfficeRouter.from_subscription(subscription_id)
+    new_subscription_id = get_product_id_by_name(ProductName.OFFICE_ROUTER)
+    new_subscription = OfficeRouter.from_other_product(old_office_router, new_subscription_id)  # type: ignore[arg-type]
+
+    return {"subscription": new_subscription}
+
+
+@workflow("Import OfficeRouter", target=Target.MODIFY, initial_input_form=wrap_modify_initial_input_form(None))
+def import_office_router() -> StepList:
+    """Modify an ImportedOfficeRouter subscription into an OfficeRouter subscription to complete the import."""
+    return (
+        init
+        >> store_process_subscription(Target.MODIFY)
+        >> unsync
+        >> import_office_router_subscription
+        >> resync
+        >> done
+    )
diff --git a/gso/workflows/router/activate_router.py b/gso/workflows/router/activate_router.py
index 4de880f4e2b8a9cc13b7c1c80315fef634e577c9..b742c58cd5476685fc6ea8199fbcfa96cf865527 100644
--- a/gso/workflows/router/activate_router.py
+++ b/gso/workflows/router/activate_router.py
@@ -16,7 +16,7 @@ def _initial_input_form(subscription_id: UUIDstr) -> FormGenerator:
     router = Router.from_subscription(subscription_id)
 
     class ActivateRouterForm(FormPage):
-        info_label: Label = "Start approval process for router activation."  # type:ignore[assignment]
+        info_label: Label = "Start approval process for router activation."
 
     user_input = yield ActivateRouterForm
 
@@ -28,7 +28,7 @@ def verify_complete_checklist() -> FormGenerator:
     """Show a form for the operator to input a link to the completed checklist."""
 
     class VerifyCompleteForm(FormPage):
-        info_label: Label = "Verify that the checklist has been completed. Then continue this workflow."  # type: ignore[assignment]
+        info_label: Label = "Verify that the checklist has been completed. Then continue this workflow."
         checklist_url: str = ""
 
     user_input = yield VerifyCompleteForm
diff --git a/gso/workflows/tasks/import_router.py b/gso/workflows/router/create_imported_router.py
similarity index 73%
rename from gso/workflows/tasks/import_router.py
rename to gso/workflows/router/create_imported_router.py
index c71ce26ee47a0e0929842d7261d7c0fd195d2e55..a375965d2f2d770a97b9b00accd7cece86173382 100644
--- a/gso/workflows/tasks/import_router.py
+++ b/gso/workflows/router/create_imported_router.py
@@ -1,32 +1,28 @@
 """A creation workflow that adds an existing router to the service database."""
 
-import ipaddress
-
 from orchestrator import workflow
 from orchestrator.forms import FormPage
 from orchestrator.targets import Target
 from orchestrator.types import FormGenerator, State, SubscriptionLifecycle
 from orchestrator.workflow import StepList, done, init, step
 from orchestrator.workflows.steps import resync, set_status, store_process_subscription
+from pydantic import ConfigDict
 
 from gso.products import ProductName
-from gso.products.product_blocks import router as router_pb
 from gso.products.product_blocks.router import RouterRole
-from gso.products.product_types import router
-from gso.products.product_types.router import RouterInactive
-from gso.services import subscriptions
+from gso.products.product_types.router import ImportedRouterInactive
 from gso.services.partners import get_partner_by_name
-from gso.services.subscriptions import get_site_by_name
+from gso.services.subscriptions import get_product_id_by_name, get_site_by_name
 from gso.utils.helpers import generate_fqdn
-from gso.utils.shared_enums import PortNumber, Vendor
+from gso.utils.shared_enums import IPv4AddressType, IPv6AddressType, PortNumber, Vendor
 
 
 @step("Create subscription")
 def create_subscription(partner: str) -> State:
     """Create a new subscription object."""
     partner_id = get_partner_by_name(partner)["partner_id"]
-    product_id = subscriptions.get_product_id_by_name(ProductName.ROUTER)
-    subscription = RouterInactive.from_product_id(product_id, partner_id)
+    product_id = get_product_id_by_name(ProductName.IMPORTED_ROUTER)
+    subscription = ImportedRouterInactive.from_product_id(product_id, partner_id)
 
     return {
         "subscription": subscription,
@@ -38,8 +34,7 @@ def initial_input_form_generator() -> FormGenerator:
     """Generate a form that is filled in using information passed through the :term:`API` endpoint."""
 
     class ImportRouter(FormPage):
-        class Config:
-            title = "Import Router"
+        model_config = ConfigDict(title="Import Router")
 
         partner: str
         router_site: str
@@ -47,8 +42,8 @@ def initial_input_form_generator() -> FormGenerator:
         ts_port: int
         router_vendor: Vendor
         router_role: RouterRole
-        router_lo_ipv4_address: ipaddress.IPv4Address
-        router_lo_ipv6_address: ipaddress.IPv6Address
+        router_lo_ipv4_address: IPv4AddressType
+        router_lo_ipv6_address: IPv6AddressType
         router_lo_iso_address: str
 
     user_input = yield ImportRouter
@@ -58,14 +53,14 @@ def initial_input_form_generator() -> FormGenerator:
 
 @step("Initialize subscription")
 def initialize_subscription(
-    subscription: RouterInactive,
+    subscription: ImportedRouterInactive,
     hostname: str,
     ts_port: PortNumber,
     router_site: str,
-    router_role: router_pb.RouterRole,
+    router_role: RouterRole,
     router_vendor: Vendor,
-    router_lo_ipv4_address: ipaddress.IPv4Address | None = None,
-    router_lo_ipv6_address: ipaddress.IPv6Address | None = None,
+    router_lo_ipv4_address: IPv4AddressType | None = None,
+    router_lo_ipv6_address: IPv6AddressType | None = None,
     router_lo_iso_address: str | None = None,
 ) -> State:
     """Initialise the router subscription using input data."""
@@ -82,8 +77,6 @@ def initialize_subscription(
     subscription.router.router_lo_iso_address = router_lo_iso_address
     subscription.router.vendor = router_vendor
 
-    subscription = router.RouterProvisioning.from_other_lifecycle(subscription, SubscriptionLifecycle.PROVISIONING)
-
     return {"subscription": subscription}
 
 
@@ -92,7 +85,7 @@ def initialize_subscription(
     initial_input_form=initial_input_form_generator,
     target=Target.CREATE,
 )
-def import_router() -> StepList:
+def create_imported_router() -> StepList:
     """Import a router without provisioning it."""
     return (
         init
diff --git a/gso/workflows/router/create_router.py b/gso/workflows/router/create_router.py
index 289f201bd45b3064ad352c733a7c81aa1b7b7462..233d982c0ea1a275b12b9540399c1bbd4d8b20dd 100644
--- a/gso/workflows/router/create_router.py
+++ b/gso/workflows/router/create_router.py
@@ -1,6 +1,6 @@
 """A creation workflow for adding a new router to the network."""
 
-from typing import Any
+from typing import Self
 
 from orchestrator.config.assignee import Assignee
 from orchestrator.forms import FormPage
@@ -10,8 +10,8 @@ from orchestrator.types import FormGenerator, State, SubscriptionLifecycle, UUID
 from orchestrator.workflow import StepList, conditional, done, init, inputstep, step, workflow
 from orchestrator.workflows.steps import resync, set_status, store_process_subscription
 from orchestrator.workflows.utils import wrap_create_initial_input_form
-from pydantic import validator
-from pydantic_forms.core import ReadOnlyField
+from pydantic import ConfigDict, model_validator
+from pydantic_forms.validators import ReadOnlyField
 
 from gso.products.product_blocks.router import RouterRole
 from gso.products.product_types.router import RouterInactive, RouterProvisioning
@@ -20,10 +20,15 @@ from gso.services import infoblox, subscriptions
 from gso.services.lso_client import lso_interaction
 from gso.services.netbox_client import NetboxClient
 from gso.services.partners import get_partner_by_name
-from gso.settings import load_oss_params
+from gso.services.sharepoint import SharePointClient
 from gso.utils.helpers import generate_fqdn, iso_from_ipv4
 from gso.utils.shared_enums import PortNumber, Vendor
-from gso.utils.workflow_steps import deploy_base_config_dry, deploy_base_config_real, run_checks_after_base_config
+from gso.utils.workflow_steps import (
+    deploy_base_config_dry,
+    deploy_base_config_real,
+    prompt_sharepoint_checklist_url,
+    run_checks_after_base_config,
+)
 
 
 def _site_selector() -> Choice:
@@ -39,31 +44,30 @@ def initial_input_form_generator(product_name: str) -> FormGenerator:
     """Gather information about the new router from the operator."""
 
     class CreateRouterForm(FormPage):
-        class Config:
-            title = product_name
+        model_config = ConfigDict(title=product_name)
 
         tt_number: str
-        partner: str = ReadOnlyField("GEANT")
+        partner: ReadOnlyField("GEANT", default_type=str)  # type: ignore[valid-type]
         vendor: Vendor
         router_site: _site_selector()  # type: ignore[valid-type]
         hostname: str
         ts_port: PortNumber
         router_role: RouterRole
 
-        @validator("hostname", allow_reuse=True)
-        def hostname_must_be_available(cls, hostname: str, **kwargs: dict[str, Any]) -> str:
-            router_site = kwargs["values"].get("router_site")
+        @model_validator(mode="after")
+        def hostname_must_be_available(self) -> Self:
+            router_site = self.router_site
             if not router_site:
                 msg = "Please select a site before setting the hostname."
                 raise ValueError(msg)
 
             selected_site = Site.from_subscription(router_site).site
-            input_fqdn = generate_fqdn(hostname, selected_site.site_name, selected_site.site_country_code)
+            input_fqdn = generate_fqdn(self.hostname, selected_site.site_name, selected_site.site_country_code)
             if not infoblox.hostname_available(f"lo0.{input_fqdn}"):
                 msg = f'FQDN "{input_fqdn}" is not available.'
                 raise ValueError(msg)
 
-            return hostname
+            return self
 
     user_input = yield CreateRouterForm
 
@@ -152,18 +156,17 @@ def prompt_reboot_router(subscription: RouterInactive) -> FormGenerator:
     """Wait for confirmation from an operator that the router has been rebooted."""
 
     class RebootPrompt(FormPage):
-        class Config:
-            title = "Please reboot before continuing"
+        model_config = ConfigDict(title="Please reboot before continuing")
 
         if subscription.router.router_site and subscription.router.router_site.site_ts_address:
             info_label_1: Label = (
-                f"Base config has been deployed. Please log in via the console using https://"  # type: ignore[assignment]
+                f"Base config has been deployed. Please log in via the console using https://"
                 f"{subscription.router.router_site.site_ts_address}."
             )
         else:
-            info_label_1 = "Base config has been deployed. Please log in via the console."  # type: ignore[assignment]
+            info_label_1 = "Base config has been deployed. Please log in via the console."
 
-        info_label_2: Label = "Reboot the router, and once it is up again, press submit to continue the workflow."  # type: ignore[assignment]
+        info_label_2: Label = "Reboot the router, and once it is up again, press submit to continue the workflow."
 
     yield RebootPrompt
 
@@ -175,13 +178,12 @@ def prompt_console_login() -> FormGenerator:
     """Wait for confirmation from an operator that the router can be logged into."""
 
     class ConsolePrompt(FormPage):
-        class Config:
-            title = "Verify local authentication"
+        model_config = ConfigDict(title="Verify local authentication")
 
         info_label_1: Label = (
-            "Verify that you are able to log in to the router via the console using the admin account."  # type: ignore[assignment]
+            "Verify that you are able to log in to the router via the console using the admin account."
         )
-        info_label_2: Label = "Once this is done, press submit to continue the workflow."  # type: ignore[assignment]
+        info_label_2: Label = "Once this is done, press submit to continue the workflow."
 
     yield ConsolePrompt
 
@@ -193,11 +195,10 @@ def prompt_insert_in_ims() -> FormGenerator:
     """Wait for confirmation from an operator that the router has been inserted in IMS."""
 
     class IMSPrompt(FormPage):
-        class Config:
-            title = "Update IMS mediation server"
+        model_config = ConfigDict(title="Update IMS mediation server")
 
-        info_label_1: Label = "Insert the router into IMS."  # type: ignore[assignment]
-        info_label_2: Label = "Once this is done, press submit to continue the workflow."  # type: ignore[assignment]
+        info_label_1: Label = "Insert the router into IMS."
+        info_label_2: Label = "Once this is done, press submit to continue the workflow."
 
     yield IMSPrompt
 
@@ -209,38 +210,27 @@ def prompt_insert_in_radius(subscription: RouterInactive) -> FormGenerator:
     """Wait for confirmation from an operator that the router has been inserted in RADIUS."""
 
     class RadiusPrompt(FormPage):
-        class Config:
-            title = "Update RADIUS clients"
+        model_config = ConfigDict(title="Update RADIUS clients")
 
         info_label_1: Label = (
-            f"Please go to https://kratos.geant.org/add_radius_client and add the {subscription.router.router_fqdn}"  # type: ignore[assignment]
+            f"Please go to https://kratos.geant.org/add_radius_client and add the {subscription.router.router_fqdn}"
             f" - {subscription.router.router_lo_ipv4_address} to radius authentication"
         )
-        info_label_2: Label = "This will be functionally checked later during verification work."  # type: ignore[assignment]
+        info_label_2: Label = "This will be functionally checked later during verification work."
 
     yield RadiusPrompt
 
     return {}
 
 
-@inputstep("Prompt for new Sharepoint checklist", assignee=Assignee.SYSTEM)
-def prompt_start_new_checklist(subscription: RouterProvisioning) -> FormGenerator:
-    """Prompt the operator to start a new checklist in Sharepoint for approving this new router."""
-    oss_params = load_oss_params()
-
-    class SharepointPrompt(FormPage):
-        class Config:
-            title = "Start new checklist"
-
-        info_label_1: Label = (
-            f"Visit {oss_params.SHAREPOINT.checklist_site_url} and start a new Sharepoint checklist for "
-            f"{subscription.router.router_fqdn}."  # type: ignore[assignment]
-        )
-        info_label_2: Label = "Once this is done, click proceed to finish the workflow."  # type: ignore[assignment]
-
-    yield SharepointPrompt
+@step("Create a new SharePoint checklist")
+def create_new_sharepoint_checklist(subscription: RouterProvisioning, tt_number: str) -> State:
+    """Create a new checklist in SharePoint for approving this router."""
+    new_list_item_url = SharePointClient().add_list_item(
+        "p_router", {"Title": subscription.router.router_fqdn, "TT_NUMBER": tt_number}
+    )
 
-    return {}
+    return {"checklist_url": new_list_item_url}
 
 
 @workflow(
@@ -275,7 +265,8 @@ def create_router() -> StepList:
         >> router_is_nokia(create_netbox_device)
         >> lso_interaction(run_checks_after_base_config)
         >> set_status(SubscriptionLifecycle.PROVISIONING)
-        >> prompt_start_new_checklist
+        >> create_new_sharepoint_checklist
+        >> prompt_sharepoint_checklist_url
         >> resync
         >> done
     )
diff --git a/gso/workflows/router/import_router.py b/gso/workflows/router/import_router.py
new file mode 100644
index 0000000000000000000000000000000000000000..e9d9c4e47a04fc525c259217a5c16704c8e77751
--- /dev/null
+++ b/gso/workflows/router/import_router.py
@@ -0,0 +1,27 @@
+"""A modification workflow for setting a new :term:`ISIS` metric for an IP trunk."""
+
+from orchestrator.targets import Target
+from orchestrator.types import State, UUIDstr
+from orchestrator.workflow import StepList, done, init, step, workflow
+from orchestrator.workflows.steps import resync, store_process_subscription, unsync
+from orchestrator.workflows.utils import wrap_modify_initial_input_form
+
+from gso.products import ProductName
+from gso.products.product_types.router import ImportedRouter, Router
+from gso.services.subscriptions import get_product_id_by_name
+
+
+@step("Create new router subscription")
+def import_router_subscription(subscription_id: UUIDstr) -> State:
+    """Take an ImportedRouter subscription, and turn it into a Router subscription."""
+    old_router = ImportedRouter.from_subscription(subscription_id)
+    new_subscription_id = get_product_id_by_name(ProductName.ROUTER)
+    new_subscription = Router.from_other_product(old_router, new_subscription_id)  # type: ignore[arg-type]
+
+    return {"subscription": new_subscription}
+
+
+@workflow("Import Router", target=Target.MODIFY, initial_input_form=wrap_modify_initial_input_form(None))
+def import_router() -> StepList:
+    """Modify an ImportedRouter subscription into a Router subscription to complete the import."""
+    return init >> store_process_subscription(Target.MODIFY) >> unsync >> import_router_subscription >> resync >> done
diff --git a/gso/workflows/router/modify_connection_strategy.py b/gso/workflows/router/modify_connection_strategy.py
index a3f5b5ae2f1f8cd0aa58d1d407d2daf28662c8a0..bc6be3ecc4b4a08605de17151ec7a14e740daf01 100644
--- a/gso/workflows/router/modify_connection_strategy.py
+++ b/gso/workflows/router/modify_connection_strategy.py
@@ -6,6 +6,7 @@ from orchestrator.types import FormGenerator, State, UUIDstr
 from orchestrator.workflow import StepList, done, init, step, workflow
 from orchestrator.workflows.steps import resync, store_process_subscription, unsync
 from orchestrator.workflows.utils import wrap_modify_initial_input_form
+from pydantic import ConfigDict
 
 from gso.products.product_types.router import Router
 from gso.utils.shared_enums import ConnectionStrategy
@@ -20,8 +21,7 @@ def initial_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
     )
 
     class ModifyConnectionStrategyForm(FormPage):
-        class Config:
-            title = f"Modify the connection strategy of {subscription.router.router_fqdn}."
+        model_config = ConfigDict(title=f"Modify the connection strategy of {subscription.router.router_fqdn}.")
 
         connection_strategy: ConnectionStrategy = current_connection_strategy
 
diff --git a/gso/workflows/router/redeploy_base_config.py b/gso/workflows/router/redeploy_base_config.py
index ffacc0ce7f1e383ac258971a78601bf39ac6af89..0393a377abe9c5351e55f0181ee2e6a8ab30f229 100644
--- a/gso/workflows/router/redeploy_base_config.py
+++ b/gso/workflows/router/redeploy_base_config.py
@@ -17,7 +17,7 @@ def _initial_input_form(subscription_id: UUIDstr) -> FormGenerator:
     router = Router.from_subscription(subscription_id)
 
     class RedeployBaseConfigForm(FormPage):
-        info_label: Label = f"Redeploy base config on {router.router.router_fqdn}?"  # type: ignore[assignment]
+        info_label: Label = f"Redeploy base config on {router.router.router_fqdn}?"
         tt_number: str
 
     user_input = yield RedeployBaseConfigForm
diff --git a/gso/workflows/router/terminate_router.py b/gso/workflows/router/terminate_router.py
index 0d46f9abbd7a32e2f141894a09751659d24d52fa..781e75813843db8dd4330fa746cc8a5c45015206 100644
--- a/gso/workflows/router/terminate_router.py
+++ b/gso/workflows/router/terminate_router.py
@@ -34,13 +34,13 @@ def initial_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
     class TerminateForm(FormPage):
         if router.status == SubscriptionLifecycle.INITIAL:
             info_label_2: Label = (
-                "This will immediately mark the subscription as terminated, preventing any other workflows from "  # type:ignore[assignment]
+                "This will immediately mark the subscription as terminated, preventing any other workflows from "
                 "interacting with this product subscription."
             )
-            info_label_3: Label = "ONLY EXECUTE THIS WORKFLOW WHEN YOU ARE ABSOLUTELY SURE WHAT YOU ARE DOING."  # type:ignore[assignment]
+            info_label_3: Label = "ONLY EXECUTE THIS WORKFLOW WHEN YOU ARE ABSOLUTELY SURE WHAT YOU ARE DOING."
 
         tt_number: str
-        termination_label: Label = "Please confirm whether configuration should get removed from the router."  # type: ignore[assignment]
+        termination_label: Label = "Please confirm whether configuration should get removed from the router."
         remove_configuration: bool = True
 
     user_input = yield TerminateForm
diff --git a/gso/workflows/router/update_ibgp_mesh.py b/gso/workflows/router/update_ibgp_mesh.py
index efd4dd8370258b761a37d8437e16787b07e0a1a1..0aa61cb65a09b64c5d4821b92c4fd85c92a2ef0a 100644
--- a/gso/workflows/router/update_ibgp_mesh.py
+++ b/gso/workflows/router/update_ibgp_mesh.py
@@ -10,7 +10,7 @@ from orchestrator.types import FormGenerator, State, SubscriptionLifecycle, UUID
 from orchestrator.workflow import StepList, done, init, inputstep, step, workflow
 from orchestrator.workflows.steps import resync, store_process_subscription, unsync
 from orchestrator.workflows.utils import wrap_modify_initial_input_form
-from pydantic import root_validator
+from pydantic import ConfigDict, model_validator
 
 from gso.products.product_blocks.router import RouterRole
 from gso.products.product_types.router import Router
@@ -29,13 +29,12 @@ def initial_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
     subscription = Router.from_subscription(subscription_id)
 
     class AddBGPSessionForm(FormPage):
-        class Config:
-            title = f"Add {subscription.router.router_fqdn} to the iBGP mesh?"
+        model_config = ConfigDict(title=f"Add {subscription.router.router_fqdn} to the iBGP mesh?")
 
         tt_number: str
 
-        @root_validator(allow_reuse=True)
-        def router_has_a_trunk(cls, values: dict[str, Any]) -> dict[str, Any]:
+        @model_validator(mode="before")
+        def router_has_a_trunk(cls, data: Any) -> Any:
             terminating_trunks = get_trunks_that_terminate_on_router(
                 subscription_id, SubscriptionLifecycle.PROVISIONING
             ) + get_trunks_that_terminate_on_router(subscription_id, SubscriptionLifecycle.ACTIVE)
@@ -43,7 +42,7 @@ def initial_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
                 msg = "Selected router does not terminate any available IP trunks."
                 raise ValueError(msg)
 
-            return values
+            return data
 
     user_input = yield AddBGPSessionForm
 
@@ -203,10 +202,9 @@ def prompt_insert_in_radius() -> FormGenerator:
     """Wait for confirmation from an operator that the router has been inserted in RADIUS."""
 
     class RADIUSPrompt(FormPage):
-        class Config:
-            title = "Please update RADIUS before continuing"
+        model_config = ConfigDict(title="Please update RADIUS before continuing")
 
-        info_label: Label = "Insert the router into RADIUS, and continue the workflow once this has been completed."  # type: ignore[assignment]
+        info_label: Label = "Insert the router into RADIUS, and continue the workflow once this has been completed."
 
     yield RADIUSPrompt
 
@@ -218,10 +216,9 @@ def prompt_radius_login() -> FormGenerator:
     """Wait for confirmation from an operator that the router can be logged into using RADIUS."""
 
     class RADIUSPrompt(FormPage):
-        class Config:
-            title = "Please check RADIUS before continuing"
+        model_config = ConfigDict(title="Please check RADIUS before continuing")
 
-        info_label: Label = "Log in to the router using RADIUS, and continue the workflow when this was successful."  # type: ignore[assignment]
+        info_label: Label = "Log in to the router using RADIUS, and continue the workflow when this was successful."
 
     yield RADIUSPrompt
 
diff --git a/gso/workflows/site/create_imported_site.py b/gso/workflows/site/create_imported_site.py
new file mode 100644
index 0000000000000000000000000000000000000000..01a0e6f6c83ca0ac73e59426dfad639d4ad11851
--- /dev/null
+++ b/gso/workflows/site/create_imported_site.py
@@ -0,0 +1,89 @@
+"""A creation workflow for importing an existing site."""
+
+from uuid import UUID
+
+from orchestrator.forms import FormPage
+from orchestrator.targets import Target
+from orchestrator.types import FormGenerator, State, SubscriptionLifecycle
+from orchestrator.workflow import StepList, done, init, step, workflow
+from orchestrator.workflows.steps import resync, set_status, store_process_subscription
+from pydantic import ConfigDict
+
+from gso.products import ProductName
+from gso.products.product_blocks.site import LatitudeCoordinate, LongitudeCoordinate, SiteTier
+from gso.products.product_types.site import ImportedSiteInactive
+from gso.services import subscriptions
+from gso.services.partners import get_partner_by_name
+from gso.utils.helpers import BaseSiteValidatorModel
+
+
+@step("Create subscription")
+def create_subscription(partner: str) -> State:
+    """Create a new subscription object in the service database."""
+    partner_id = get_partner_by_name(partner)["partner_id"]
+    product_id: UUID = subscriptions.get_product_id_by_name(ProductName.IMPORTED_SITE)
+    subscription = ImportedSiteInactive.from_product_id(product_id, partner_id)
+
+    return {
+        "subscription": subscription,
+        "subscription_id": subscription.subscription_id,
+    }
+
+
+def generate_initial_input_form() -> FormGenerator:
+    """Generate a form that is filled in using information passed through the :term:`API` endpoint."""
+
+    class ImportSite(FormPage, BaseSiteValidatorModel):
+        model_config = ConfigDict(title="Import Site")
+
+    user_input = yield ImportSite
+    return user_input.dict()
+
+
+@step("Initialize subscription")
+def initialize_subscription(
+    subscription: ImportedSiteInactive,
+    site_name: str,
+    site_city: str,
+    site_country: str,
+    site_country_code: str,
+    site_latitude: LatitudeCoordinate,
+    site_longitude: LongitudeCoordinate,
+    site_bgp_community_id: int,
+    site_internal_id: int,
+    site_ts_address: str,
+    site_tier: SiteTier,
+) -> State:
+    """Initialise the subscription object with all input."""
+    subscription.site.site_name = site_name
+    subscription.site.site_city = site_city
+    subscription.site.site_country = site_country
+    subscription.site.site_country_code = site_country_code
+    subscription.site.site_latitude = site_latitude
+    subscription.site.site_longitude = site_longitude
+    subscription.site.site_bgp_community_id = site_bgp_community_id
+    subscription.site.site_internal_id = site_internal_id
+    subscription.site.site_tier = site_tier
+    subscription.site.site_ts_address = site_ts_address
+
+    subscription.description = f"Site in {site_city}, {site_country}"
+
+    return {"subscription": subscription}
+
+
+@workflow(
+    "Import Site",
+    target=Target.CREATE,
+    initial_input_form=generate_initial_input_form,
+)
+def create_imported_site() -> StepList:
+    """Workflow to import a site without provisioning it."""
+    return (
+        init
+        >> create_subscription
+        >> store_process_subscription(Target.CREATE)
+        >> initialize_subscription
+        >> set_status(SubscriptionLifecycle.ACTIVE)
+        >> resync
+        >> done
+    )
diff --git a/gso/workflows/site/create_site.py b/gso/workflows/site/create_site.py
index be9aab537c7fec01550b7f8009925b799a8c1fd9..c4290d7072e517abd2276e7369663b3685af0e61 100644
--- a/gso/workflows/site/create_site.py
+++ b/gso/workflows/site/create_site.py
@@ -6,7 +6,8 @@ from orchestrator.types import FormGenerator, State, SubscriptionLifecycle, UUID
 from orchestrator.workflow import StepList, done, init, step, workflow
 from orchestrator.workflows.steps import resync, set_status, store_process_subscription
 from orchestrator.workflows.utils import wrap_create_initial_input_form
-from pydantic_forms.core import ReadOnlyField
+from pydantic import ConfigDict
+from pydantic_forms.validators import ReadOnlyField
 
 from gso.products.product_blocks import site as site_pb
 from gso.products.product_blocks.site import LatitudeCoordinate, LongitudeCoordinate
@@ -19,20 +20,8 @@ def initial_input_form_generator(product_name: str) -> FormGenerator:
     """Get input from the operator about the new site subscription."""
 
     class CreateSiteForm(FormPage, BaseSiteValidatorModel):
-        class Config:
-            title = product_name
-
-        partner: str = ReadOnlyField("GEANT")
-        site_name: str
-        site_city: str
-        site_country: str
-        site_country_code: str
-        site_latitude: LatitudeCoordinate
-        site_longitude: LongitudeCoordinate
-        site_bgp_community_id: int
-        site_internal_id: int
-        site_tier: site_pb.SiteTier
-        site_ts_address: str
+        model_config = ConfigDict(title=product_name)
+        partner: ReadOnlyField("GEANT", default_type=str)  # type: ignore[valid-type]
 
     user_input = yield CreateSiteForm
 
diff --git a/gso/workflows/site/import_site.py b/gso/workflows/site/import_site.py
new file mode 100644
index 0000000000000000000000000000000000000000..f2130354c35095082dd1fcd2444f9ad924eaf719
--- /dev/null
+++ b/gso/workflows/site/import_site.py
@@ -0,0 +1,27 @@
+"""A modification workflow for migrating an ImportedSite to a Site subscription."""
+
+from orchestrator.targets import Target
+from orchestrator.types import State, UUIDstr
+from orchestrator.workflow import StepList, done, init, step, workflow
+from orchestrator.workflows.steps import resync, store_process_subscription, unsync
+from orchestrator.workflows.utils import wrap_modify_initial_input_form
+
+from gso.products import ProductName
+from gso.products.product_types.site import ImportedSite, Site
+from gso.services.subscriptions import get_product_id_by_name
+
+
+@step("Create new site subscription")
+def import_site_subscription(subscription_id: UUIDstr) -> State:
+    """Take an ImportedSite subscription, and turn it into a Site subscription."""
+    old_site = ImportedSite.from_subscription(subscription_id)
+    new_subscription_id = get_product_id_by_name(ProductName.SITE)
+    new_subscription = Site.from_other_product(old_site, new_subscription_id)  # type: ignore[arg-type]
+
+    return {"subscription": new_subscription}
+
+
+@workflow("Import Site", target=Target.MODIFY, initial_input_form=wrap_modify_initial_input_form(None))
+def import_site() -> StepList:
+    """Modify an ImportedSite subscription into a Site subscription to complete the import."""
+    return init >> store_process_subscription(Target.MODIFY) >> unsync >> import_site_subscription >> resync >> done
diff --git a/gso/workflows/site/modify_site.py b/gso/workflows/site/modify_site.py
index 15b549dbbcf7f357b5aebc28b885a998a18d9daa..ee48784e9c47757e4282c6e39353f2252edf32bb 100644
--- a/gso/workflows/site/modify_site.py
+++ b/gso/workflows/site/modify_site.py
@@ -11,12 +11,11 @@ from orchestrator.workflows.steps import (
     unsync,
 )
 from orchestrator.workflows.utils import wrap_modify_initial_input_form
-from pydantic import validator
-from pydantic.fields import ModelField
-from pydantic_forms.core import ReadOnlyField
+from pydantic import ConfigDict, field_validator
+from pydantic_core.core_schema import ValidationInfo
+from pydantic_forms.validators import ReadOnlyField
 
-from gso.products.product_blocks import site as site_pb
-from gso.products.product_blocks.site import LatitudeCoordinate, LongitudeCoordinate
+from gso.products.product_blocks.site import LatitudeCoordinate, LongitudeCoordinate, SiteTier
 from gso.products.product_types.site import Site
 from gso.utils.helpers import validate_ipv4_or_ipv6, validate_site_fields_is_unique
 
@@ -26,32 +25,38 @@ def initial_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
     subscription = Site.from_subscription(subscription_id)
 
     class ModifySiteForm(FormPage):
-        class Config:
-            title = "Modify Site"
+        model_config = ConfigDict(title="Modify Site")
 
-        site_name: str = ReadOnlyField(subscription.site.site_name)
+        site_name: ReadOnlyField(subscription.site.site_name, default_type=str)  # type: ignore[valid-type]
         site_city: str = subscription.site.site_city
-        site_country: str = ReadOnlyField(subscription.site.site_country)
-        site_country_code: str = ReadOnlyField(subscription.site.site_country_code)
+        site_country: ReadOnlyField(subscription.site.site_country, default_type=str)  # type: ignore[valid-type]
+        site_country_code: ReadOnlyField(subscription.site.site_country_code, default_type=str)  # type: ignore[valid-type]
         site_latitude: LatitudeCoordinate = subscription.site.site_latitude
         site_longitude: LongitudeCoordinate = subscription.site.site_longitude
         site_bgp_community_id: int = subscription.site.site_bgp_community_id
         site_internal_id: int = subscription.site.site_internal_id
-        site_tier: site_pb.SiteTier = ReadOnlyField(subscription.site.site_tier)
+        site_tier: ReadOnlyField(subscription.site.site_tier, default_type=SiteTier)  # type: ignore[valid-type]
         site_ts_address: str | None = subscription.site.site_ts_address
 
-        @validator("site_ts_address", allow_reuse=True)
+        @field_validator("site_ts_address", "site_internal_id", "site_bgp_community_id")
+        def field_must_be_unique(cls, value: str | int, info: ValidationInfo) -> str | int:
+            if not info.field_name:
+                msg = "Field name must be provided."
+                raise ValueError(msg)
+
+            if value and value == getattr(subscription.site, info.field_name):
+                return value
+
+            validate_site_fields_is_unique(info.field_name, value)
+
+            return value
+
+        @field_validator("site_ts_address")
         def validate_ts_address(cls, site_ts_address: str) -> str:
             if site_ts_address and site_ts_address != subscription.site.site_ts_address:
-                validate_site_fields_is_unique("site_ts_address", site_ts_address)
                 validate_ipv4_or_ipv6(site_ts_address)
-            return site_ts_address
 
-        @validator("site_internal_id", "site_bgp_community_id", allow_reuse=True)
-        def validate_unique_fields(cls, value: str, field: ModelField) -> str | int:
-            if value == getattr(subscription.site, field.name):
-                return value
-            return validate_site_fields_is_unique(field.name, value)
+            return site_ts_address
 
     user_input = yield ModifySiteForm
 
diff --git a/gso/workflows/site/terminate_site.py b/gso/workflows/site/terminate_site.py
index 96e807b47ed9c72b101ad3c6303e9b1fbd1405bc..02be6f7c76f4f01ada85ddaf91e552575eb3c18a 100644
--- a/gso/workflows/site/terminate_site.py
+++ b/gso/workflows/site/terminate_site.py
@@ -23,12 +23,12 @@ def initial_input_form_generator(subscription_id: UUIDstr) -> FormGenerator:
     class TerminateForm(FormPage):
         if site.status == SubscriptionLifecycle.INITIAL:
             info_label_2: Label = (
-                "This will immediately mark the subscription as terminated, preventing any other workflows from "  # type:ignore[assignment]
+                "This will immediately mark the subscription as terminated, preventing any other workflows from "
                 "interacting with this product subscription."
             )
-            info_label_3: Label = "ONLY EXECUTE THIS WORKFLOW WHEN YOU ARE ABSOLUTELY SURE WHAT YOU ARE DOING."  # type:ignore[assignment]
+            info_label_3: Label = "ONLY EXECUTE THIS WORKFLOW WHEN YOU ARE ABSOLUTELY SURE WHAT YOU ARE DOING."
 
-        termination_label: Label = "Are you sure you want to delete this site?"  # type: ignore[assignment]
+        termination_label: Label = "Are you sure you want to delete this site?"
 
     user_input = yield TerminateForm
     return user_input.dict()
diff --git a/gso/workflows/super_pop_switch/__init__.py b/gso/workflows/super_pop_switch/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..caed99fa16523bd4a445bf09576817912c7e03de
--- /dev/null
+++ b/gso/workflows/super_pop_switch/__init__.py
@@ -0,0 +1 @@
+"""Workflows for super PoP switches."""
diff --git a/gso/workflows/tasks/import_super_pop_switch.py b/gso/workflows/super_pop_switch/create_imported_super_pop_switch.py
similarity index 78%
rename from gso/workflows/tasks/import_super_pop_switch.py
rename to gso/workflows/super_pop_switch/create_imported_super_pop_switch.py
index 5f2796c2c2325ad439a0570db5154f57a0b435f1..f44f38a502575814cd15abc30707f0ba64af92a2 100644
--- a/gso/workflows/tasks/import_super_pop_switch.py
+++ b/gso/workflows/super_pop_switch/create_imported_super_pop_switch.py
@@ -1,30 +1,28 @@
 """A creation workflow that adds existing Super PoP switches to the coreDB."""
 
-import ipaddress
-
 from orchestrator import workflow
 from orchestrator.forms import FormPage
 from orchestrator.targets import Target
 from orchestrator.types import FormGenerator, State, SubscriptionLifecycle
 from orchestrator.workflow import StepList, done, init, step
 from orchestrator.workflows.steps import resync, set_status, store_process_subscription
+from pydantic import ConfigDict
 
 from gso.products import ProductName
-from gso.products.product_types import super_pop_switch
-from gso.products.product_types.super_pop_switch import SuperPopSwitchInactive
+from gso.products.product_types.super_pop_switch import ImportedSuperPopSwitchInactive
 from gso.services import subscriptions
 from gso.services.partners import get_partner_by_name
 from gso.services.subscriptions import get_site_by_name
 from gso.utils.helpers import generate_fqdn
-from gso.utils.shared_enums import PortNumber, Vendor
+from gso.utils.shared_enums import IPv4AddressType, PortNumber, Vendor
 
 
 @step("Create subscription")
 def create_subscription(partner: str) -> State:
     """Create a new subscription object."""
     partner_id = get_partner_by_name(partner)["partner_id"]
-    product_id = subscriptions.get_product_id_by_name(ProductName.SUPER_POP_SWITCH)
-    subscription = SuperPopSwitchInactive.from_product_id(product_id, partner_id)
+    product_id = subscriptions.get_product_id_by_name(ProductName.IMPORTED_SUPER_POP_SWITCH)
+    subscription = ImportedSuperPopSwitchInactive.from_product_id(product_id, partner_id)
 
     return {
         "subscription": subscription,
@@ -36,14 +34,13 @@ def initial_input_form_generator() -> FormGenerator:
     """Generate a form that is filled in using information passed through the :term:`API` endpoint."""
 
     class ImportSuperPopSwitch(FormPage):
-        class Config:
-            title = "Import a Super PoP switch"
+        model_config = ConfigDict(title="Import a Super PoP switch")
 
         partner: str
         super_pop_switch_site: str
         hostname: str
         super_pop_switch_ts_port: PortNumber
-        super_pop_switch_mgmt_ipv4_address: ipaddress.IPv4Address
+        super_pop_switch_mgmt_ipv4_address: IPv4AddressType
 
     user_input = yield ImportSuperPopSwitch
 
@@ -52,11 +49,11 @@ def initial_input_form_generator() -> FormGenerator:
 
 @step("Initialize subscription")
 def initialize_subscription(
-    subscription: SuperPopSwitchInactive,
+    subscription: ImportedSuperPopSwitchInactive,
     hostname: str,
     super_pop_switch_ts_port: PortNumber,
     super_pop_switch_site: str,
-    super_pop_switch_mgmt_ipv4_address: ipaddress.IPv4Address | None = None,
+    super_pop_switch_mgmt_ipv4_address: IPv4AddressType | None = None,
 ) -> State:
     """Initialise the Super PoP switch subscription using input data."""
     subscription.super_pop_switch.super_pop_switch_ts_port = super_pop_switch_ts_port
@@ -68,10 +65,6 @@ def initialize_subscription(
     subscription.super_pop_switch.super_pop_switch_mgmt_ipv4_address = super_pop_switch_mgmt_ipv4_address
     subscription.super_pop_switch.vendor = Vendor.JUNIPER
 
-    subscription = super_pop_switch.SuperPopSwitchProvisioning.from_other_lifecycle(
-        subscription, SubscriptionLifecycle.PROVISIONING
-    )
-
     return {"subscription": subscription}
 
 
@@ -80,7 +73,7 @@ def initialize_subscription(
     initial_input_form=initial_input_form_generator,
     target=Target.CREATE,
 )
-def import_super_pop_switch() -> StepList:
+def create_imported_super_pop_switch() -> StepList:
     """Import a Super PoP switch without provisioning it."""
     return (
         init
diff --git a/gso/workflows/super_pop_switch/import_super_pop_switch.py b/gso/workflows/super_pop_switch/import_super_pop_switch.py
new file mode 100644
index 0000000000000000000000000000000000000000..6372f69a70eb38da415703efdc73c1bcb6efe32e
--- /dev/null
+++ b/gso/workflows/super_pop_switch/import_super_pop_switch.py
@@ -0,0 +1,34 @@
+"""A modification workflow for migrating an ImportedSuperPopSwitch to a SuperPopSwitch subscription."""
+
+from orchestrator.targets import Target
+from orchestrator.types import State, UUIDstr
+from orchestrator.workflow import StepList, done, init, step, workflow
+from orchestrator.workflows.steps import resync, store_process_subscription, unsync
+from orchestrator.workflows.utils import wrap_modify_initial_input_form
+
+from gso.products import ProductName
+from gso.products.product_types.super_pop_switch import ImportedSuperPopSwitch, SuperPopSwitch
+from gso.services.subscriptions import get_product_id_by_name
+
+
+@step("Create new super_pop_switch subscription")
+def import_super_pop_switch_subscription(subscription_id: UUIDstr) -> State:
+    """Take an ImportedSuperPopSwitch subscription, and turn it into a SuperPopSwitch subscription."""
+    old_super_pop_switch = ImportedSuperPopSwitch.from_subscription(subscription_id)
+    new_subscription_id = get_product_id_by_name(ProductName.SUPER_POP_SWITCH)
+    new_subscription = SuperPopSwitch.from_other_product(old_super_pop_switch, new_subscription_id)  # type: ignore[arg-type]
+
+    return {"subscription": new_subscription}
+
+
+@workflow("Import SuperPopSwitch", target=Target.MODIFY, initial_input_form=wrap_modify_initial_input_form(None))
+def import_super_pop_switch() -> StepList:
+    """Modify an ImportedSuperPopSwitch subscription into a SuperPopSwitch subscription to complete the import."""
+    return (
+        init
+        >> store_process_subscription(Target.MODIFY)
+        >> unsync
+        >> import_super_pop_switch_subscription
+        >> resync
+        >> done
+    )
diff --git a/gso/workflows/tasks/__init__.py b/gso/workflows/tasks/__init__.py
deleted file mode 100644
index 3f8c21608af4db6181ae4cd184959737898977bf..0000000000000000000000000000000000000000
--- a/gso/workflows/tasks/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-"""Task workflows that are either started by an :term:`API` endpoint, or by one of the set schedules."""
diff --git a/gso/workflows/tasks/import_site.py b/gso/workflows/tasks/import_site.py
deleted file mode 100644
index ff49808a5a86d1e73c6a741d74a75c4c7c233471..0000000000000000000000000000000000000000
--- a/gso/workflows/tasks/import_site.py
+++ /dev/null
@@ -1,73 +0,0 @@
-"""A creation workflow for importing an existing site."""
-
-from uuid import UUID
-
-from orchestrator.forms import FormPage
-from orchestrator.targets import Target
-from orchestrator.types import FormGenerator, State, SubscriptionLifecycle
-from orchestrator.workflow import StepList, done, init, step, workflow
-from orchestrator.workflows.steps import resync, set_status, store_process_subscription
-
-from gso.products import ProductName
-from gso.products.product_blocks.site import SiteTier
-from gso.products.product_types.site import SiteInactive
-from gso.services import subscriptions
-from gso.services.partners import get_partner_by_name
-from gso.workflows.site.create_site import initialize_subscription
-
-
-@step("Create subscription")
-def create_subscription(partner: str) -> State:
-    """Create a new subscription object in the service database.
-
-    FIXME: all attributes passed by the input form appear to be unused
-    """
-    partner_id = get_partner_by_name(partner)["partner_id"]
-    product_id: UUID = subscriptions.get_product_id_by_name(ProductName.SITE)
-    subscription = SiteInactive.from_product_id(product_id, partner_id)
-
-    return {
-        "subscription": subscription,
-        "subscription_id": subscription.subscription_id,
-    }
-
-
-def generate_initial_input_form() -> FormGenerator:
-    """Generate a form that is filled in using information passed through the :term:`API` endpoint."""
-
-    class ImportSite(FormPage):
-        class Config:
-            title = "Import Site"
-
-        site_name: str
-        site_city: str
-        site_country: str
-        site_country_code: str
-        site_latitude: float
-        site_longitude: float
-        site_bgp_community_id: int
-        site_internal_id: int
-        site_tier: SiteTier
-        site_ts_address: str
-        partner: str
-
-    user_input = yield ImportSite
-    return user_input.dict()
-
-
-@workflow(
-    "Import Site",
-    target=Target.CREATE,
-    initial_input_form=generate_initial_input_form,
-)
-def import_site() -> StepList:
-    """Workflow to import a site without provisioning it."""
-    return (
-        init
-        >> create_subscription
-        >> store_process_subscription(Target.CREATE)
-        >> initialize_subscription
-        >> set_status(SubscriptionLifecycle.ACTIVE)
-        >> resync
-        >> done
-    )
diff --git a/pyproject.toml b/pyproject.toml
index 5162b2ca226c9a3181ee6942ec7153a57d909b25..376371b7d41c286cd416a176fc760ce25cd1a663 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -104,3 +104,15 @@ ban-relative-imports = "all"
 [tool.ruff.isort]
 known-third-party = ["pydantic", "migrations"]
 known-first-party = ["test", "docs"]
+
+[tool.pytest.ini_options]
+markers = [
+    "noautofixt"
+]
+filterwarnings = [
+    "ignore",
+    "default:::gso",
+]
+[tool.ruff.lint.per-file-ignores]
+"test/*" = ["PLR0917", "S101", "D104", "D105", "D103", "D100", "ARG001", "D102", "PLR2004", "D101", "D106", "D107", "PLR0914", "PLC0415", "PLC2701"]
+"gso/workflows/*" = ["PLR0917", "PLR0914"]
\ No newline at end of file
diff --git a/requirements.txt b/requirements.txt
index 73bb46ff0e83e703d78bca7c58a728f95e7fa696..3463f0c52bfebe389377947e0ae1d39065e0fdc0 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,22 +1,24 @@
-orchestrator-core==1.3.4
+orchestrator-core==2.2.1
 requests==2.31.0
 infoblox-client~=0.6.0
-pycountry==22.3.5
-pynetbox==7.2.0
-celery-redbeat==2.1.1
-celery==5.3.4
+pycountry==23.12.11
+pynetbox==7.3.3
+celery-redbeat==2.2.0
+celery==5.3.6
+azure-identity==1.16.0
+msgraph-sdk==1.2.0
 
 # Test and linting dependencies
 celery-stubs==0.1.3
-types-requests==2.31.0.1
-types-PyYAML==6.0.12.12
-pytest==7.4.3
-faker==19.13.0
-responses==0.24.0
-mypy==1.6.1
-ruff==0.1.5
+types-requests==2.31.0.20240406
+types-PyYAML==6.0.12.20240311
+pytest==8.1.1
+faker==24.8.0
+responses==0.25.0
+mypy==1.9.0
+ruff==0.3.5
 sphinx==7.2.6
-sphinx-rtd-theme==1.3.0
+sphinx-rtd-theme==2.0.0
 urllib3_mock==0.3.3
-pytest-asyncio==0.23.3
-pre-commit~=3.6.0
+pytest-asyncio==0.23.6
+pre-commit~=3.7.0
diff --git a/setup.py b/setup.py
index 2fcf9799777b7fc3a7bbd56c99307d3818fbfe7e..e856a7a87089e7b5618cf8d882bc3ac84cdbe30a 100644
--- a/setup.py
+++ b/setup.py
@@ -1,21 +1,25 @@
+"""Setup script for the GÉANT Service Orchestrator."""
+
 from setuptools import find_packages, setup
 
 setup(
     name="geant-service-orchestrator",
-    version="1.5",
+    version="2.0",
     author="GÉANT Orchestration and Automation Team",
     author_email="goat@geant.org",
     description="GÉANT Service Orchestrator",
     url="https://gitlab.software.geant.org/goat/gap/geant-service-orchestrator",
     packages=find_packages(),
     install_requires=[
-        "orchestrator-core==1.3.4",
+        "orchestrator-core==2.2.1",
         "requests==2.31.0",
         "infoblox-client~=0.6.0",
-        "pycountry==22.3.5",
-        "pynetbox==7.2.0",
-        "celery-redbeat==2.1.1",
-        "celery==5.3.4",
+        "pycountry==23.12.11",
+        "pynetbox==7.3.3",
+        "celery-redbeat==2.2.0",
+        "celery==5.3.6",
+        "azure-identity==1.16.0",
+        "msgraph-sdk==1.2.0",
     ],
     include_package_data=True,
 )
diff --git a/test/api/test_imports.py b/test/api/test_imports.py
index f7b58f723eff687b92337281e14af11d5688cd8b..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 100644
--- a/test/api/test_imports.py
+++ b/test/api/test_imports.py
@@ -1,422 +0,0 @@
-from unittest.mock import patch
-from uuid import uuid4
-
-import pytest
-from orchestrator.db import SubscriptionTable
-from orchestrator.services import subscriptions
-
-from gso.products.product_blocks.iptrunk import IptrunkType, PhysicalPortCapacity
-from gso.products.product_blocks.router import RouterRole
-from gso.products.product_blocks.site import SiteTier
-from gso.utils.helpers import iso_from_ipv4
-from gso.utils.shared_enums import Vendor
-
-SITE_IMPORT_ENDPOINT = "/api/v1/imports/sites"
-ROUTER_IMPORT_ENDPOINT = "/api/v1/imports/routers"
-IPTRUNK_IMPORT_API_URL = "/api/v1/imports/iptrunks"
-SUPER_POP_SWITCH_IMPORT_API_URL = "/api/v1/imports/super-pop-switches"
-OFFICE_ROUTER_IMPORT_API_URL = "/api/v1/imports/office-routers"
-
-
-@pytest.fixture()
-def iptrunk_data(nokia_router_subscription_factory, faker):
-    router_side_a = nokia_router_subscription_factory()
-    router_side_b = nokia_router_subscription_factory()
-    return {
-        "partner": "GEANT",
-        "geant_s_sid": faker.geant_sid(),
-        "iptrunk_type": IptrunkType.DARK_FIBER,
-        "iptrunk_description": faker.sentence(),
-        "iptrunk_speed": PhysicalPortCapacity.HUNDRED_GIGABIT_PER_SECOND,
-        "iptrunk_minimum_links": 5,
-        "iptrunk_isis_metric": 500,
-        "side_a_node_id": router_side_a,
-        "side_a_ae_iface": faker.network_interface(),
-        "side_a_ae_geant_a_sid": faker.geant_sid(),
-        "side_a_ae_members": [
-            {
-                "interface_name": faker.network_interface(),
-                "interface_description": faker.sentence(),
-            }
-            for _ in range(5)
-        ],
-        "side_b_node_id": router_side_b,
-        "side_b_ae_iface": faker.network_interface(),
-        "side_b_ae_geant_a_sid": faker.geant_sid(),
-        "side_b_ae_members": [
-            {
-                "interface_name": faker.network_interface(),
-                "interface_description": faker.sentence(),
-            }
-            for _ in range(5)
-        ],
-        "iptrunk_ipv4_network": str(faker.ipv4(network=True)),
-        "iptrunk_ipv6_network": str(faker.ipv6(network=True)),
-    }
-
-
-@pytest.fixture()
-def mock_routers(iptrunk_data):
-    with patch("gso.services.subscriptions.get_active_router_subscriptions") as mock_get_active_router_subscriptions:
-
-        def _active_router_subscriptions(*args, **kwargs):
-            if kwargs["includes"] == ["subscription_id", "description"]:
-                return [
-                    {
-                        "subscription_id": iptrunk_data["side_a_node_id"],
-                        "description": "iptrunk_sideA_node_id description",
-                    },
-                    {
-                        "subscription_id": iptrunk_data["side_b_node_id"],
-                        "description": "iptrunk_sideB_node_id description",
-                    },
-                    {
-                        "subscription_id": str(uuid4()),
-                        "description": "random description",
-                    },
-                ]
-            return [
-                {"subscription_id": iptrunk_data["side_a_node_id"]},
-                {"subscription_id": iptrunk_data["side_b_node_id"]},
-                {"subscription_id": str(uuid4())},
-            ]
-
-        mock_get_active_router_subscriptions.side_effect = _active_router_subscriptions
-        yield mock_get_active_router_subscriptions
-
-
-@patch("gso.api.v1.imports._start_process")
-def test_import_iptrunk_successful_with_mocked_process(mock_start_process, test_client, mock_routers, iptrunk_data):
-    mock_start_process.return_value = "123e4567-e89b-12d3-a456-426655440000"
-    response = test_client.post(IPTRUNK_IMPORT_API_URL, json=iptrunk_data)
-
-    assert response.status_code == 201
-    assert response.json()["pid"] == "123e4567-e89b-12d3-a456-426655440000"
-
-
-@pytest.fixture()
-def site_data(faker):
-    return {
-        "site_name": faker.site_name(),
-        "site_city": faker.city(),
-        "site_country": faker.country(),
-        "site_country_code": faker.country_code(),
-        "site_latitude": float(faker.latitude()),
-        "site_longitude": float(faker.longitude()),
-        "site_bgp_community_id": faker.pyint(),
-        "site_internal_id": faker.pyint(),
-        "site_tier": SiteTier.TIER1,
-        "site_ts_address": faker.ipv4(),
-        "partner": "GEANT",
-    }
-
-
-@pytest.fixture()
-def router_data(faker, site_data):
-    mock_ipv4 = faker.ipv4()
-    return {
-        "hostname": "127.0.0.1",
-        "router_role": RouterRole.PE,
-        "router_vendor": Vendor.JUNIPER,
-        "router_site": site_data["site_name"],
-        "ts_port": 1234,
-        "partner": "GEANT",
-        "router_lo_ipv4_address": mock_ipv4,
-        "router_lo_ipv6_address": faker.ipv6(),
-        "router_lo_iso_address": iso_from_ipv4(mock_ipv4),
-    }
-
-
-@pytest.fixture()
-def super_pop_switch_data(faker, site_data):
-    mock_ipv4 = faker.ipv4()
-    return {
-        "hostname": "127.0.0.1",
-        "super_pop_switch_site": site_data["site_name"],
-        "super_pop_switch_ts_port": 1234,
-        "partner": "GEANT",
-        "super_pop_switch_mgmt_ipv4_address": mock_ipv4,
-    }
-
-
-@pytest.fixture()
-def office_router_data(faker, site_data):
-    return {
-        "office_router_fqdn": "127.0.0.1",
-        "office_router_site": site_data["site_name"],
-        "office_router_ts_port": 1234,
-        "partner": "GEANT",
-        "office_router_lo_ipv4_address": faker.ipv4(),
-        "office_router_lo_ipv6_address": faker.ipv6(),
-    }
-
-
-def test_import_site_endpoint(test_client, site_data):
-    assert SubscriptionTable.query.all() == []
-    # Post data to the endpoint
-    response = test_client.post(SITE_IMPORT_ENDPOINT, json=site_data)
-    assert response.status_code == 201
-    assert "detail" in response.json()
-    assert "pid" in response.json()
-    subscription = subscriptions.retrieve_subscription_by_subscription_instance_value(
-        resource_type="site_name",
-        value=site_data["site_name"],
-    )
-    assert subscription is not None
-
-
-def test_import_site_endpoint_with_existing_site(test_client, site_data):
-    response = test_client.post(SITE_IMPORT_ENDPOINT, json=site_data)
-    assert SubscriptionTable.query.count() == 1
-    assert response.status_code == 201
-
-    response = test_client.post(SITE_IMPORT_ENDPOINT, json=site_data)
-    assert response.status_code == 422
-    assert SubscriptionTable.query.count() == 1
-
-
-def test_import_site_endpoint_with_invalid_data(test_client, site_data):
-    # invalid data, missing site_latitude and invalid site_longitude
-    site_data.pop("site_latitude")
-    site_data["site_longitude"] = "invalid"
-    assert SubscriptionTable.query.count() == 0
-    response = test_client.post(SITE_IMPORT_ENDPOINT, json=site_data)
-    assert response.status_code == 422
-    assert SubscriptionTable.query.count() == 0
-    response = response.json()
-    assert response["detail"][0]["loc"] == ["body", "site_latitude"]
-    assert response["detail"][0]["msg"] == "field required"
-    assert response["detail"][1]["loc"] == ["body", "site_longitude"]
-    assert response["detail"][1]["msg"] == "value is not a valid float"
-
-
-def test_import_router_endpoint(test_client, site_data, router_data):
-    # Create a site first
-    response = test_client.post(SITE_IMPORT_ENDPOINT, json=site_data)
-    assert response.status_code == 201
-    assert SubscriptionTable.query.count() == 1
-
-    response = test_client.post(ROUTER_IMPORT_ENDPOINT, json=router_data)
-    assert response.status_code == 201
-    assert SubscriptionTable.query.count() == 2
-
-
-def test_import_router_endpoint_with_invalid_data(test_client, site_data, router_data):
-    response = test_client.post(SITE_IMPORT_ENDPOINT, json=site_data)
-    assert response.status_code == 201
-    assert SubscriptionTable.query.count() == 1
-
-    # invalid data, missing hostname and invalid router_lo_ipv6_address
-    router_data.pop("hostname")
-    router_data["router_lo_ipv6_address"] = "invalid"
-    response = test_client.post(ROUTER_IMPORT_ENDPOINT, json=router_data)
-    assert response.status_code == 422
-    assert SubscriptionTable.query.count() == 1
-    response = response.json()
-    assert response["detail"][0]["loc"] == ["body", "hostname"]
-    assert response["detail"][0]["msg"] == "field required"
-    assert response["detail"][1]["loc"] == ["body", "router_lo_ipv6_address"]
-    assert response["detail"][1]["msg"] == "value is not a valid IPv6 address"
-
-
-def test_import_iptrunk_successful_with_real_process(test_client, mock_routers, iptrunk_data):
-    response = test_client.post(IPTRUNK_IMPORT_API_URL, json=iptrunk_data)
-    assert response.status_code == 201
-
-    response = response.json()
-    assert "detail" in response
-    assert "pid" in response
-
-    subscription = subscriptions.retrieve_subscription_by_subscription_instance_value(
-        resource_type="geant_s_sid",
-        value=iptrunk_data["geant_s_sid"],
-    )
-    assert subscription is not None
-
-
-@patch("gso.api.v1.imports._start_process")
-def test_import_iptrunk_invalid_partner(mock_start_process, test_client, mock_routers, iptrunk_data):
-    iptrunk_data["partner"] = "not_existing_partner"
-    mock_start_process.return_value = "123e4567-e89b-12d3-a456-426655440000"
-    response = test_client.post(IPTRUNK_IMPORT_API_URL, json=iptrunk_data)
-
-    assert response.status_code == 422
-    assert response.json() == {
-        "detail": [
-            {
-                "loc": ["body", "partner"],
-                "msg": "partner not_existing_partner not found",
-                "type": "value_error",
-            },
-        ],
-    }
-
-
-@patch("gso.api.v1.imports._start_process")
-def test_import_iptrunk_invalid_router_id_side_a_and_b(mock_start_process, test_client, iptrunk_data):
-    iptrunk_data["side_a_node_id"] = "NOT FOUND"
-    iptrunk_data["side_b_node_id"] = "NOT FOUND"
-
-    mock_start_process.return_value = "123e4567-e89b-12d3-a456-426655440000"
-    response = test_client.post(IPTRUNK_IMPORT_API_URL, json=iptrunk_data)
-
-    assert response.status_code == 422
-    assert response.json() == {
-        "detail": [
-            {
-                "loc": ["body", "side_a_node_id"],
-                "msg": f"Router {iptrunk_data['side_a_node_id']} not found",
-                "type": "value_error",
-            },
-            {
-                "loc": ["body", "side_b_node_id"],
-                "msg": f"Router {iptrunk_data['side_b_node_id']} not found",
-                "type": "value_error",
-            },
-        ],
-    }
-
-
-@patch("gso.api.v1.imports._start_process")
-def test_import_iptrunk_non_unique_members_side_a(mock_start_process, test_client, mock_routers, iptrunk_data, faker):
-    mock_start_process.return_value = "123e4567-e89b-12d3-a456-426655440000"
-
-    repeat_interface_a = {
-        "interface_name": faker.network_interface(),
-        "interface_description": faker.sentence(),
-    }
-    repeat_interface_b = {
-        "interface_name": faker.network_interface(),
-        "interface_description": faker.sentence(),
-    }
-    iptrunk_data["side_a_ae_members"] = [repeat_interface_a for _ in range(5)]
-    iptrunk_data["side_b_ae_members"] = [repeat_interface_b for _ in range(5)]
-
-    response = test_client.post(IPTRUNK_IMPORT_API_URL, json=iptrunk_data)
-
-    assert response.status_code == 422
-    assert response.json() == {
-        "detail": [
-            {
-                "loc": ["body", "side_a_ae_members"],
-                "msg": "Items must be unique",
-                "type": "value_error",
-            },
-            {
-                "loc": ["body", "side_b_ae_members"],
-                "msg": "Items must be unique",
-                "type": "value_error",
-            },
-            {
-                "loc": ["body", "__root__"],
-                "msg": "Side A members should be at least 5 (iptrunk_minimum_links)",
-                "type": "value_error",
-            },
-        ],
-    }
-
-
-@patch("gso.api.v1.imports._start_process")
-def test_import_iptrunk_fails_on_side_a_member_count_mismatch(
-    mock_start_process,
-    test_client,
-    mock_routers,
-    iptrunk_data,
-):
-    mock_start_process.return_value = "123e4567-e89b-12d3-a456-426655440000"
-
-    iptrunk_data["side_a_ae_members"].remove(iptrunk_data["side_a_ae_members"][0])
-
-    response = test_client.post(IPTRUNK_IMPORT_API_URL, json=iptrunk_data)
-
-    assert response.status_code == 422
-    assert response.json() == {
-        "detail": [
-            {
-                "loc": ["body", "__root__"],
-                "msg": "Side A members should be at least 5 (iptrunk_minimum_links)",
-                "type": "value_error",
-            },
-        ],
-    }
-
-
-@patch("gso.api.v1.imports._start_process")
-def test_import_iptrunk_fails_on_side_a_and_b_members_mismatch(
-    mock_start_process,
-    test_client,
-    iptrunk_data,
-    mock_routers,
-):
-    mock_start_process.return_value = "123e4567-e89b-12d3-a456-426655440000"
-
-    iptrunk_data["side_b_ae_members"].remove(iptrunk_data["side_b_ae_members"][0])
-
-    response = test_client.post(IPTRUNK_IMPORT_API_URL, json=iptrunk_data)
-
-    assert response.status_code == 422
-    assert response.json() == {
-        "detail": [
-            {
-                "loc": ["body", "__root__"],
-                "msg": "Mismatch between Side A and B members",
-                "type": "value_error",
-            },
-        ],
-    }
-
-
-def test_import_super_pop_switch_endpoint(test_client, site_data, super_pop_switch_data):
-    response = test_client.post(SITE_IMPORT_ENDPOINT, json=site_data)
-    assert response.status_code == 201
-    assert SubscriptionTable.query.count() == 1
-
-    response = test_client.post(SUPER_POP_SWITCH_IMPORT_API_URL, json=super_pop_switch_data)
-    assert response.status_code == 201
-    assert SubscriptionTable.query.count() == 2
-
-
-def test_import_super_pop_switch_endpoint_with_invalid_data(test_client, site_data, super_pop_switch_data):
-    response = test_client.post(SITE_IMPORT_ENDPOINT, json=site_data)
-    assert response.status_code == 201
-    assert SubscriptionTable.query.count() == 1
-
-    # invalid data, missing hostname and invalid mgmt_ipv4_address
-    super_pop_switch_data.pop("hostname")
-    super_pop_switch_data["super_pop_switch_mgmt_ipv4_address"] = "invalid"
-    response = test_client.post(SUPER_POP_SWITCH_IMPORT_API_URL, json=super_pop_switch_data)
-    assert response.status_code == 422
-    assert SubscriptionTable.query.count() == 1
-    response = response.json()
-    assert response["detail"][0]["loc"] == ["body", "hostname"]
-    assert response["detail"][0]["msg"] == "field required"
-    assert response["detail"][1]["loc"] == ["body", "super_pop_switch_mgmt_ipv4_address"]
-    assert response["detail"][1]["msg"] == "value is not a valid IPv4 address"
-
-
-def test_import_office_router_endpoint(test_client, site_data, office_router_data):
-    response = test_client.post(SITE_IMPORT_ENDPOINT, json=site_data)
-    assert response.status_code == 201
-    assert SubscriptionTable.query.count() == 1
-
-    response = test_client.post(OFFICE_ROUTER_IMPORT_API_URL, json=office_router_data)
-    assert response.status_code == 201
-    assert SubscriptionTable.query.count() == 2
-
-
-def test_import_office_router_endpoint_with_invalid_data(test_client, site_data, office_router_data):
-    response = test_client.post(SITE_IMPORT_ENDPOINT, json=site_data)
-    assert response.status_code == 201
-    assert SubscriptionTable.query.count() == 1
-
-    # invalid data, missing FQDN and invalid lo_ipv6_address
-    office_router_data.pop("office_router_fqdn")
-    office_router_data["office_router_lo_ipv6_address"] = "invalid"
-    response = test_client.post(OFFICE_ROUTER_IMPORT_API_URL, json=office_router_data)
-    assert response.status_code == 422
-    assert SubscriptionTable.query.count() == 1
-    response = response.json()
-    assert response["detail"][0]["loc"] == ["body", "office_router_fqdn"]
-    assert response["detail"][0]["msg"] == "field required"
-    assert response["detail"][1]["loc"] == ["body", "office_router_lo_ipv6_address"]
-    assert response["detail"][1]["msg"] == "value is not a valid IPv6 address"
diff --git a/test/api/test_processes.py b/test/api/test_processes.py
index 671218400c022a96eaa1e119be60db4fa5ec0d7b..f56fe52640d587928531f5171712f98ca57f8f1e 100644
--- a/test/api/test_processes.py
+++ b/test/api/test_processes.py
@@ -11,9 +11,11 @@ from orchestrator.workflow import ProcessStatus
 
 
 @pytest.fixture()
-def create_process(faker, nokia_router_subscription_factory):
+def create_process(test_workflow, nokia_router_subscription_factory):
     process_id = uuid4()
-    process = ProcessTable(process_id=process_id, workflow_name=faker.sentence(), last_status=ProcessStatus.SUSPENDED)
+    process = ProcessTable(
+        process_id=process_id, workflow_id=test_workflow.workflow_id, last_status=ProcessStatus.SUSPENDED
+    )
     subscription = nokia_router_subscription_factory()
     process_subscription = ProcessSubscriptionTable(process_id=process_id, subscription_id=subscription)
 
diff --git a/test/auth/test_oidc_policy_helper.py b/test/auth/test_oidc_policy_helper.py
index 14af9f6b4ee55c5025aaef64414017f85a8f7513..46b934caad20f93ea55e1f66cd7fa3b9d6694d20 100644
--- a/test/auth/test_oidc_policy_helper.py
+++ b/test/auth/test_oidc_policy_helper.py
@@ -57,7 +57,7 @@ def oidc_user(mock_openid_config):
         resource_server_id="resource_server",
         resource_server_secret="secret",  # noqa: S106
     )
-    user.openid_config = OIDCConfig.parse_obj(mock_openid_config)
+    user.openid_config = OIDCConfig.model_validate(mock_openid_config)
     return user
 
 
@@ -266,9 +266,9 @@ async def test_oidc_user_call_no_token(oidc_user, mock_request):
         patch("httpx.AsyncClient.get", new_callable=MagicMock) as mock_get,
     ):
         mock_post.return_value = MagicMock(status_code=200, json=lambda: {"active": False})
-        mock_get.return_value = MagicMock(status_code=200, json=lambda: {})
+        mock_get.return_value = MagicMock(status_code=200, json=dict)
 
-        result = await oidc_user.__call__(mock_request)
+        result = await oidc_user.__call__(mock_request)  # noqa: PLC2801
 
     assert result is None
 
@@ -281,7 +281,7 @@ async def test_oidc_user_call_token_from_request(oidc_user, mock_request, mock_a
     oidc_user.introspect_token = AsyncMock(return_value={"active": True})
     oidc_user.userinfo = AsyncMock(return_value=OIDCUserModel({"sub": "123", "name": "John Doe"}))
 
-    result = await oidc_user.__call__(mock_request)
+    result = await oidc_user.__call__(mock_request)  # noqa: PLC2801
 
     assert isinstance(result, OIDCUserModel)
     assert result["sub"] == "123"
diff --git a/test/cli/__init__.py b/test/cli/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/test/api/conftest.py b/test/cli/conftest.py
similarity index 100%
rename from test/api/conftest.py
rename to test/cli/conftest.py
diff --git a/test/cli/test_imports.py b/test/cli/test_imports.py
new file mode 100644
index 0000000000000000000000000000000000000000..3cd45db7d4715f046a0ab885b6b146f3b56f9db4
--- /dev/null
+++ b/test/cli/test_imports.py
@@ -0,0 +1,341 @@
+import json
+from pathlib import Path
+from unittest.mock import patch
+
+import pytest
+
+from gso.cli.imports import (
+    import_iptrunks,
+    import_office_routers,
+    import_routers,
+    import_sites,
+    import_super_pop_switches,
+)
+from gso.products import Router, Site
+from gso.products.product_blocks.iptrunk import IptrunkType, PhysicalPortCapacity
+from gso.products.product_blocks.router import RouterRole
+from gso.products.product_blocks.site import SiteTier
+from gso.utils.helpers import iso_from_ipv4
+from gso.utils.shared_enums import Vendor
+
+
+##############
+#  FIXTURES  #
+##############
+@pytest.fixture()
+def temp_file(tmp_path) -> Path:
+    return tmp_path / "data.json"
+
+
+@pytest.fixture()
+def iptrunk_data(temp_file, nokia_router_subscription_factory, faker) -> (Path, dict):
+    def _iptrunk_data(
+        *,
+        ipv4_network=None,
+        ipv6_network=None,
+        min_links=None,
+        isis_metric=None,
+        side_a_node=None,
+        side_b_node=None,
+        side_a_members=None,
+        side_b_members=None,
+        side_a_ae_name=None,
+        side_b_ae_name=None,
+    ):
+        router_side_a = nokia_router_subscription_factory()
+        router_side_b = nokia_router_subscription_factory()
+        ipv4_network = ipv4_network or str(faker.ipv4_network(max_subnet=31))
+        ipv6_network = ipv6_network or str(faker.ipv6_network(max_subnet=126))
+
+        iptrunk_data = {
+            "id": faker.geant_sid(),
+            "config": {
+                "common": {
+                    "link_speed": PhysicalPortCapacity.HUNDRED_GIGABIT_PER_SECOND,
+                    "minimum_links": min_links or 3,
+                    "isis_metric": isis_metric or 500,
+                    "type": IptrunkType.DARK_FIBER,
+                },
+                "nodeA": {
+                    "name": side_a_node or Router.from_subscription(router_side_a).router.router_fqdn,
+                    "ae_name": side_a_ae_name or faker.network_interface(),
+                    "port_sid": faker.geant_sid(),
+                    "members": side_a_members
+                    or [
+                        {
+                            "interface_name": faker.network_interface(),
+                            "interface_description": faker.sentence(),
+                        }
+                        for _ in range(5)
+                    ],
+                    "ipv4_address": ipv4_network,
+                    "ipv6_address": ipv6_network,
+                },
+                "nodeB": {
+                    "name": side_b_node or Router.from_subscription(router_side_b).router.router_fqdn,
+                    "ae_name": side_b_ae_name or faker.network_interface(),
+                    "port_sid": faker.geant_sid(),
+                    "members": side_b_members
+                    or [
+                        {
+                            "interface_name": faker.network_interface(),
+                            "interface_description": faker.sentence(),
+                        }
+                        for _ in range(5)
+                    ],
+                    "ipv4_address": ipv4_network,
+                    "ipv6_address": ipv6_network,
+                },
+            },
+        }
+
+        temp_file.write_text(json.dumps([iptrunk_data]))
+        return {"path": str(temp_file), "data": iptrunk_data}
+
+    return _iptrunk_data
+
+
+@pytest.fixture()
+def site_data(faker, temp_file):
+    def _site_data(**kwargs):
+        site_data = {
+            "site_name": faker.site_name(),
+            "site_city": faker.city(),
+            "site_country": faker.country(),
+            "site_country_code": faker.country_code(),
+            "site_latitude": str(faker.latitude()),
+            "site_longitude": str(faker.longitude()),
+            "site_bgp_community_id": faker.pyint(),
+            "site_internal_id": faker.pyint(),
+            "site_tier": SiteTier.TIER1,
+            "site_ts_address": faker.ipv4(),
+        }
+        site_data.update(**kwargs)
+
+        temp_file.write_text(json.dumps([site_data]))
+        return {"path": str(temp_file), "data": site_data}
+
+    return _site_data
+
+
+@pytest.fixture()
+def router_data(temp_file, faker, site_subscription_factory):
+    def _router_data(**kwargs):
+        mock_ipv4 = faker.ipv4()
+        router_data = {
+            "router_site": Site.from_subscription(site_subscription_factory()).site.site_name,
+            "hostname": str(faker.ipv4()),
+            "ts_port": faker.port_number(is_user=True),
+            "router_role": RouterRole.PE,
+            "router_vendor": Vendor.JUNIPER,
+            "router_lo_ipv4_address": mock_ipv4,
+            "router_lo_ipv6_address": str(faker.ipv6()),
+            "router_lo_iso_address": iso_from_ipv4(mock_ipv4),
+        }
+        router_data.update(**kwargs)
+
+        temp_file.write_text(json.dumps([router_data]))
+        return {"path": str(temp_file), "data": router_data}
+
+    return _router_data
+
+
+@pytest.fixture()
+def super_pop_switch_data(temp_file, faker, site_subscription_factory):
+    def _super_pop_switch_data(**kwargs):
+        super_pop_switch_data = {
+            "hostname": str(faker.ipv4()),
+            "super_pop_switch_site": Site.from_subscription(site_subscription_factory()).site.site_name,
+            "super_pop_switch_ts_port": faker.port_number(is_user=True),
+            "super_pop_switch_mgmt_ipv4_address": str(faker.ipv4()),
+        }
+        super_pop_switch_data.update(**kwargs)
+
+        temp_file.write_text(json.dumps([super_pop_switch_data]))
+        return {"path": str(temp_file), "data": super_pop_switch_data}
+
+    return _super_pop_switch_data
+
+
+@pytest.fixture()
+def office_router_data(temp_file, faker, site_subscription_factory):
+    def _office_router_data(**kwargs):
+        office_router_data = {
+            "office_router_fqdn": faker.domain_name(levels=4),
+            "office_router_site": Site.from_subscription(site_subscription_factory()).site.site_name,
+            "office_router_ts_port": faker.port_number(is_user=True),
+            "office_router_lo_ipv4_address": str(faker.ipv4()),
+            "office_router_lo_ipv6_address": str(faker.ipv6()),
+        }
+        office_router_data.update(**kwargs)
+
+        temp_file.write_text(json.dumps([office_router_data]))
+        return {"path": str(temp_file), "data": office_router_data}
+
+    return _office_router_data
+
+
+###########
+#  TESTS  #
+###########
+
+
+@patch("gso.cli.imports.start_process")
+def test_import_iptrunk_success(mock_start_process, iptrunk_data):
+    import_iptrunks(iptrunk_data()["path"])
+    assert mock_start_process.call_count == 1
+
+
+@patch("gso.cli.imports.start_process")
+def test_import_site_success(mock_start_process, site_data):
+    import_sites(site_data()["path"])
+    assert mock_start_process.call_count == 1
+
+
+@patch("gso.cli.imports.start_process")
+def test_import_site_twice(mock_start_process, site_data, site_subscription_factory, capfd):
+    site_import_data = site_data()
+    #  Create an initial site
+    site_subscription_factory(
+        site_bgp_community_id=site_import_data["data"]["site_bgp_community_id"],
+        site_internal_id=site_import_data["data"]["site_internal_id"],
+        site_ts_address=site_import_data["data"]["site_ts_address"],
+        site_name=site_import_data["data"]["site_name"],
+    )
+
+    #  Second identical import should print ValidationError to stdout
+    import_sites(site_import_data["path"])
+
+    captured_output, _ = capfd.readouterr()
+
+    assert "Validation error: 4 validation errors for SiteImportModel" in captured_output
+    assert "Value error, site_bgp_community_id must be unique [type=value_error, input_value=" in captured_output
+    assert "Value error, site_internal_id must be unique [type=value_error, input_value=" in captured_output
+    assert "Value error, site_ts_address must be unique [type=value_error, input_value=" in captured_output
+    assert "Value error, site_name must be unique [type=value_error, input_value=" in captured_output
+
+    assert mock_start_process.call_count == 0
+
+
+@patch("gso.cli.imports.start_process")
+def test_import_site_with_invalid_data(mock_start_process, site_data, capfd):
+    # invalid data, missing site_latitude and invalid site_longitude
+    incorrect_site_data = site_data(site_latitude=None, site_longitude="broken")
+
+    import_sites(incorrect_site_data["path"])
+
+    captured_output, _ = capfd.readouterr()
+    assert "Validation error: 2 validation errors for SiteImportModel" in captured_output
+    assert (
+        """site_latitude
+  Input should be a valid string [type=string_type, input_value=None, input_type=NoneType]"""
+        in captured_output
+    )
+    assert (
+        """site_longitude
+  Value error, Invalid longitude coordinate. Valid examples: '40.7128', '-74.0060', '180', '-180', '0'. [type=value_e"""
+        in captured_output
+    )
+
+    assert mock_start_process.call_count == 0
+
+
+@patch("gso.cli.imports.start_process")
+def test_import_router_success(mock_start_process, site_subscription_factory, router_data):
+    import_routers(router_data()["path"])
+    assert mock_start_process.call_count == 1
+
+
+@patch("gso.cli.imports.start_process")
+def test_import_router_with_invalid_data(mock_start_process, router_data, capfd):
+    broken_data = router_data(hostname="", router_lo_ipv6_address="Not an IP address")
+    import_routers(broken_data["path"])
+
+    captured_output, _ = capfd.readouterr()
+    #  The extra space at the end of the next line is required, and not dangling by accident.
+    assert (
+        """Validation error: 1 validation error for RouterImportModel
+router_lo_ipv6_address
+  Input is not a valid IPv6 address [type=ip_v6_address, input_value='Not an IP address', input_type=str]"""
+        in captured_output
+    )
+    assert mock_start_process.call_count == 0
+
+
+@patch("gso.cli.imports.start_process")
+def test_import_iptrunk_successful(mock_start_process, iptrunk_data):
+    import_iptrunks(iptrunk_data()["path"])
+    assert mock_start_process.call_count == 1
+
+
+@patch("gso.cli.imports.start_process")
+def test_import_iptrunk_invalid_router_id_side_a_and_b(mock_start_process, iptrunk_data, capfd):
+    broken_data = iptrunk_data(side_a_node="Doesn't exist", side_b_node="Also doesn't exist")
+    import_iptrunks(broken_data["path"])
+
+    captured_output, _ = capfd.readouterr()
+    assert (
+        """Validation error: 2 validation errors for IptrunkImportModel
+side_a_node_id
+  Value error, Router  not found [type=value_error, input_value='', input_type=str]
+    For further information visit https://errors.pydantic.dev/2.5/v/value_error
+side_b_node_id
+  Value error, Router  not found [type=value_error, input_value='', input_type=str]"""
+        in captured_output
+    )
+    assert mock_start_process.call_count == 0
+
+
+@patch("gso.cli.imports.start_process")
+def test_import_iptrunk_non_unique_members_side_a_and_b(mock_start_process, iptrunk_data, faker, capfd):
+    duplicate_interface = {"interface_name": faker.network_interface(), "interface_description": faker.sentence()}
+    side_a_members = [duplicate_interface for _ in range(5)]
+    side_b_members = [duplicate_interface for _ in range(5)]
+    broken_data = iptrunk_data(side_a_members=side_a_members, side_b_members=side_b_members)
+    import_iptrunks(broken_data["path"])
+
+    captured_output, _ = capfd.readouterr()
+
+    assert "Validation error: 2 validation errors for IptrunkImportModel" in captured_output
+    assert (
+        """side_a_ae_members
+  Value error, Items must be unique [type=value_error, input_value=[{'interface_name':"""
+    ) in captured_output
+    assert (
+        """side_b_ae_members
+  Value error, Items must be unique [type=value_error, input_value=[{'interface_name':"""
+    ) in captured_output
+
+    assert mock_start_process.call_count == 0
+
+
+@patch("gso.cli.imports.start_process")
+def test_import_iptrunk_side_a_member_count_mismatch(mock_start_process, iptrunk_data, faker, capfd):
+    side_a_members = [
+        {"interface_name": faker.network_interface(), "interface_description": faker.sentence()} for _ in range(5)
+    ]
+    side_b_members = [
+        {"interface_name": faker.network_interface(), "interface_description": faker.sentence()} for _ in range(6)
+    ]
+    broken_data = iptrunk_data(side_a_members=side_a_members, side_b_members=side_b_members)
+    import_iptrunks(broken_data["path"])
+
+    captured_output, _ = capfd.readouterr()
+    assert (
+        """Validation error: 1 validation error for IptrunkImportModel
+  Value error, Mismatch between Side A and B members [type=value_error, input_value={'partner': 'GEANT',"""
+        in captured_output
+    )
+    assert mock_start_process.call_count == 0
+
+
+@patch("gso.cli.imports.start_process")
+def test_import_office_router_success(mock_start_process, office_router_data):
+    import_office_routers(office_router_data()["path"])
+    assert mock_start_process.call_count == 1
+
+
+@patch("gso.cli.imports.start_process")
+def test_import_super_pop_switch_success(mock_start_process, super_pop_switch_data):
+    import_super_pop_switches(super_pop_switch_data()["path"])
+    assert mock_start_process.call_count == 1
diff --git a/test/conftest.py b/test/conftest.py
index d0bfebfed7b8bf9e04e2d086fc5bd568550dc321..8fae41e74198cd47d0cc6543b60b3b459b6be273 100644
--- a/test/conftest.py
+++ b/test/conftest.py
@@ -1,4 +1,5 @@
 import contextlib
+import datetime
 import ipaddress
 import logging
 import os
@@ -11,10 +12,20 @@ from alembic.config import Config
 from faker import Faker
 from faker.providers import BaseProvider
 from orchestrator import app_settings
-from orchestrator.db import Database, db
+from orchestrator.db import (
+    Database,
+    ProductBlockTable,
+    ProductTable,
+    ResourceTypeTable,
+    SubscriptionMetadataTable,
+    WorkflowTable,
+    db,
+)
 from orchestrator.db.database import ENGINE_ARGUMENTS, SESSION_ARGUMENTS, BaseModel
-from orchestrator.types import strEnum
-from sqlalchemy import create_engine, text
+from orchestrator.domain import SUBSCRIPTION_MODEL_REGISTRY, SubscriptionModel
+from orchestrator.domain.base import ProductBlockModel
+from orchestrator.types import SubscriptionLifecycle, strEnum
+from sqlalchemy import create_engine, select, text
 from sqlalchemy.engine import make_url
 from sqlalchemy.orm import scoped_session, sessionmaker
 from starlette.testclient import TestClient
@@ -25,6 +36,16 @@ from gso.main import init_gso_app
 from gso.schema.partner import PartnerCreate
 from gso.services.partners import create_partner
 from gso.utils.helpers import LAGMember
+from test.fixtures import (  # noqa: F401
+    iptrunk_side_subscription_factory,
+    iptrunk_subscription_factory,
+    juniper_router_subscription_factory,
+    nokia_router_subscription_factory,
+    office_router_subscription_factory,
+    site_subscription_factory,
+    super_pop_switch_subscription_factory,
+    test_workflow,
+)
 
 logging.getLogger("faker.factory").setLevel(logging.WARNING)
 
@@ -36,7 +57,7 @@ def pytest_collection_modifyitems(config, items):
 
 
 class UseJuniperSide(strEnum):
-    """Define on tests on which side to use Juniper router"""
+    """Define on tests on which side to use Juniper router."""
 
     NONE = "none"
     SIDE_A = "side_a"
@@ -247,3 +268,250 @@ def test_client(fastapi_app):
 @pytest.fixture(scope="session")
 def geant_partner():
     return create_partner(PartnerCreate(name="GEANT-TEST", partner_type=PartnerType.GEANT, email="goat-test@geant.org"))
+
+
+@pytest.fixture()
+def generic_resource_type_1():
+    rt = ResourceTypeTable(description="Resource Type one", resource_type="rt_1")
+    db.session.add(rt)
+    db.session.commit()
+
+    return rt
+
+
+@pytest.fixture()
+def generic_resource_type_2():
+    rt = ResourceTypeTable(description="Resource Type two", resource_type="rt_2")
+    db.session.add(rt)
+    db.session.commit()
+    return rt
+
+
+@pytest.fixture()
+def generic_resource_type_3():
+    rt = ResourceTypeTable(description="Resource Type three", resource_type="rt_3")
+    db.session.add(rt)
+    db.session.commit()
+
+    return rt
+
+
+@pytest.fixture()
+def generic_product_block_1(generic_resource_type_1):
+    pb = ProductBlockTable(
+        name="PB_1",
+        description="Generic Product Block 1",
+        tag="PB1",
+        status="active",
+        resource_types=[generic_resource_type_1],
+        created_at=datetime.datetime.fromisoformat("2023-05-24T00:00:00+00:00"),
+    )
+    db.session.add(pb)
+    db.session.commit()
+    return pb
+
+
+@pytest.fixture()
+def generic_product_block_2(generic_resource_type_2, generic_resource_type_3):
+    pb = ProductBlockTable(
+        name="PB_2",
+        description="Generic Product Block 2",
+        tag="PB2",
+        status="active",
+        resource_types=[generic_resource_type_2, generic_resource_type_3],
+        created_at=datetime.datetime.fromisoformat("2023-05-24T00:00:00+00:00"),
+    )
+    db.session.add(pb)
+    db.session.commit()
+    return pb
+
+
+@pytest.fixture()
+def generic_product_block_3(generic_resource_type_2):
+    pb = ProductBlockTable(
+        name="PB_3",
+        description="Generic Product Block 3",
+        tag="PB3",
+        status="active",
+        resource_types=[generic_resource_type_2],
+        created_at=datetime.datetime.fromisoformat("2023-05-24T00:00:00+00:00"),
+    )
+    db.session.add(pb)
+    db.session.commit()
+    return pb
+
+
+@pytest.fixture()
+def generic_product_1(generic_product_block_1, generic_product_block_2):
+    workflow = db.session.scalar(select(WorkflowTable).where(WorkflowTable.name == "modify_note"))
+    p = ProductTable(
+        name="Product 1",
+        description="Generic Product One",
+        product_type="Generic",
+        status="active",
+        tag="GEN1",
+        product_blocks=[generic_product_block_1, generic_product_block_2],
+        workflows=[workflow],
+    )
+    db.session.add(p)
+    db.session.commit()
+    return p
+
+
+@pytest.fixture()
+def generic_product_2(generic_product_block_3):
+    workflow = db.session.scalar(select(WorkflowTable).where(WorkflowTable.name == "modify_note"))
+
+    p = ProductTable(
+        name="Product 2",
+        description="Generic Product Two",
+        product_type="Generic",
+        status="active",
+        tag="GEN2",
+        product_blocks=[generic_product_block_3],
+        workflows=[workflow],
+    )
+    db.session.add(p)
+    db.session.commit()
+    return p
+
+
+@pytest.fixture()
+def generic_product_3(generic_product_block_2):
+    p = ProductTable(
+        name="Product 3",
+        description="Generic Product Three",
+        product_type="Generic",
+        status="active",
+        tag="GEN3",
+        product_blocks=[generic_product_block_2],
+    )
+    db.session.add(p)
+    db.session.commit()
+    return p
+
+
+@pytest.fixture()
+def generic_product_block_type_1(generic_product_block_1):
+    class GenericProductBlockOneInactive(ProductBlockModel, product_block_name="PB_1"):
+        rt_1: str | None = None
+
+    class GenericProductBlockOne(GenericProductBlockOneInactive, lifecycle=[SubscriptionLifecycle.ACTIVE]):
+        rt_1: str
+
+    return GenericProductBlockOneInactive, GenericProductBlockOne
+
+
+@pytest.fixture()
+def generic_product_block_type_2(generic_product_block_2):
+    class GenericProductBlockTwoInactive(ProductBlockModel, product_block_name="PB_2"):
+        rt_2: int | None = None
+        rt_3: str | None = None
+
+    class GenericProductBlockTwo(GenericProductBlockTwoInactive, lifecycle=[SubscriptionLifecycle.ACTIVE]):
+        rt_2: int
+        rt_3: str
+
+    return GenericProductBlockTwoInactive, GenericProductBlockTwo
+
+
+@pytest.fixture()
+def generic_product_block_type_3(generic_product_block_3):
+    class GenericProductBlockThreeInactive(ProductBlockModel, product_block_name="PB_3"):
+        rt_2: int | None = None
+
+    class GenericProductBlockThree(GenericProductBlockThreeInactive, lifecycle=[SubscriptionLifecycle.ACTIVE]):
+        rt_2: int
+
+    return GenericProductBlockThreeInactive, GenericProductBlockThree
+
+
+@pytest.fixture()
+def generic_product_type_1(generic_product_1, generic_product_block_type_1, generic_product_block_type_2):
+    generic_product_block_one_inactive, generic_product_block_one = generic_product_block_type_1
+    generic_product_block_two_inactive, generic_product_block_two = generic_product_block_type_2
+
+    # Test Product domain models
+
+    class GenericProductOneInactive(SubscriptionModel, is_base=True):
+        pb_1: generic_product_block_one_inactive
+        pb_2: generic_product_block_two_inactive
+
+    class GenericProductOne(GenericProductOneInactive, lifecycle=[SubscriptionLifecycle.ACTIVE]):
+        pb_1: generic_product_block_one
+        pb_2: generic_product_block_two
+
+    SUBSCRIPTION_MODEL_REGISTRY["Product 1"] = GenericProductOne
+
+    yield GenericProductOneInactive, GenericProductOne
+
+    del SUBSCRIPTION_MODEL_REGISTRY["Product 1"]
+
+
+@pytest.fixture()
+def generic_product_type_2(generic_product_2, generic_product_block_type_3):
+    generic_product_block_three_inactive, generic_product_block_three = generic_product_block_type_3
+
+    class GenericProductTwoInactive(SubscriptionModel, is_base=True):
+        pb_3: generic_product_block_three_inactive
+
+    class GenericProductTwo(GenericProductTwoInactive, lifecycle=[SubscriptionLifecycle.ACTIVE]):
+        pb_3: generic_product_block_three
+
+    SUBSCRIPTION_MODEL_REGISTRY["Product 2"] = GenericProductTwo
+
+    yield GenericProductTwoInactive, GenericProductTwo
+
+    del SUBSCRIPTION_MODEL_REGISTRY["Product 2"]
+
+
+@pytest.fixture()
+def product_type_1_subscription_factory(generic_product_1, generic_product_type_1, geant_partner):
+    def subscription_create(
+        description="Generic Subscription One",
+        start_date="2023-05-24T00:00:00+00:00",
+        rt_1="Value1",
+        rt_2=42,
+        rt_3="Value2",
+    ):
+        generic_product_one_inactive, _ = generic_product_type_1
+        gen_subscription = generic_product_one_inactive.from_product_id(
+            generic_product_1.product_id, customer_id=geant_partner["partner_id"], insync=True
+        )
+        gen_subscription.pb_1.rt_1 = rt_1
+        gen_subscription.pb_2.rt_2 = rt_2
+        gen_subscription.pb_2.rt_3 = rt_3
+        gen_subscription = SubscriptionModel.from_other_lifecycle(gen_subscription, SubscriptionLifecycle.ACTIVE)
+        gen_subscription.description = description
+        gen_subscription.start_date = start_date
+        gen_subscription.save()
+
+        gen_subscription_metadata = SubscriptionMetadataTable()
+        gen_subscription_metadata.subscription_id = gen_subscription.subscription_id
+        gen_subscription_metadata.metadata_ = {"description": "Some metadata description"}
+        db.session.add(gen_subscription_metadata)
+        db.session.commit()
+        return str(gen_subscription.subscription_id)
+
+    return subscription_create
+
+
+@pytest.fixture()
+def product_type_1_subscriptions_factory(product_type_1_subscription_factory):
+    def subscriptions_create(amount=1):
+        return [
+            product_type_1_subscription_factory(
+                description=f"Subscription {i}",
+                start_date=(
+                    datetime.datetime.fromisoformat("2023-05-24T00:00:00+00:00") + datetime.timedelta(days=i)
+                ).replace(tzinfo=datetime.UTC),
+            )
+            for i in range(amount)
+        ]
+
+    return subscriptions_create
+
+
+@pytest.fixture()
+def generic_subscription_1(product_type_1_subscription_factory):
+    return product_type_1_subscription_factory()
diff --git a/test/fixtures.py b/test/fixtures.py
index 2a7eab3dea34e4625beba4816741154db2d4f2a3..316a43c338f20601fa4235f0cac11ceacc2317f4 100644
--- a/test/fixtures.py
+++ b/test/fixtures.py
@@ -1,9 +1,18 @@
 import ipaddress
+from collections.abc import Generator
+from typing import Any
+from uuid import uuid4
 
 import pytest
+from orchestrator import step, workflow
+from orchestrator.config.assignee import Assignee
 from orchestrator.db import db
 from orchestrator.domain import SubscriptionModel
 from orchestrator.types import SubscriptionLifecycle, UUIDstr
+from orchestrator.workflow import done, init, inputstep
+from pydantic_forms.core import FormPage
+from pydantic_forms.types import FormGenerator
+from pydantic_forms.validators import Choice
 
 from gso.products import ProductName
 from gso.products.product_blocks.iptrunk import (
@@ -14,13 +23,14 @@ from gso.products.product_blocks.iptrunk import (
 )
 from gso.products.product_blocks.router import RouterRole
 from gso.products.product_blocks.site import SiteTier
-from gso.products.product_types.iptrunk import IptrunkInactive
-from gso.products.product_types.office_router import OfficeRouterInactive
-from gso.products.product_types.router import Router, RouterInactive
-from gso.products.product_types.site import Site, SiteInactive
-from gso.products.product_types.super_pop_switch import SuperPopSwitchInactive
+from gso.products.product_types.iptrunk import ImportedIptrunkInactive, IptrunkInactive
+from gso.products.product_types.office_router import ImportedOfficeRouterInactive, OfficeRouterInactive
+from gso.products.product_types.router import ImportedRouterInactive, Router, RouterInactive
+from gso.products.product_types.site import ImportedSiteInactive, Site, SiteInactive
+from gso.products.product_types.super_pop_switch import ImportedSuperPopSwitchInactive, SuperPopSwitchInactive
 from gso.services import subscriptions
 from gso.utils.shared_enums import Vendor
+from test.workflows import WorkflowInstanceForTests
 
 
 @pytest.fixture()
@@ -40,6 +50,8 @@ def site_subscription_factory(faker, geant_partner):
         site_ts_address=None,
         status: SubscriptionLifecycle | None = None,
         partner: dict | None = None,
+        *,
+        is_imported: bool | None = True,
     ) -> UUIDstr:
         if partner is None:
             partner = geant_partner
@@ -49,14 +61,21 @@ def site_subscription_factory(faker, geant_partner):
         site_city = site_city or faker.city()
         site_country = site_country or faker.country()
         site_country_code = site_country_code or faker.country_code()
-        site_latitude = site_latitude or float(faker.latitude())
-        site_longitude = site_longitude or float(faker.longitude())
+        site_latitude = site_latitude or str(faker.latitude())
+        site_longitude = site_longitude or str(faker.longitude())
         site_bgp_community_id = site_bgp_community_id or faker.pyint()
         site_internal_id = site_internal_id or faker.pyint()
         site_ts_address = site_ts_address or faker.ipv4()
 
-        product_id = subscriptions.get_product_id_by_name(ProductName.SITE)
-        site_subscription = SiteInactive.from_product_id(product_id, customer_id=partner["partner_id"], insync=True)
+        if is_imported:
+            product_id = subscriptions.get_product_id_by_name(ProductName.SITE)
+            site_subscription = SiteInactive.from_product_id(product_id, customer_id=partner["partner_id"], insync=True)
+        else:
+            product_id = subscriptions.get_product_id_by_name(ProductName.IMPORTED_SITE)
+            site_subscription = ImportedSiteInactive.from_product_id(
+                product_id, customer_id=partner["partner_id"], insync=True
+            )
+
         site_subscription.site.site_city = site_city
         site_subscription.site.site_name = site_name
         site_subscription.site.site_country = site_country
@@ -97,6 +116,8 @@ def nokia_router_subscription_factory(site_subscription_factory, faker, geant_pa
         router_site=None,
         status: SubscriptionLifecycle | None = None,
         partner: dict | None = None,
+        *,
+        is_imported: bool | None = True,
     ) -> UUIDstr:
         if partner is None:
             partner = geant_partner
@@ -110,8 +131,17 @@ def nokia_router_subscription_factory(site_subscription_factory, faker, geant_pa
         router_lo_iso_address = router_lo_iso_address or faker.word()
         router_site = router_site or site_subscription_factory()
 
-        product_id = subscriptions.get_product_id_by_name(ProductName.ROUTER)
-        router_subscription = RouterInactive.from_product_id(product_id, customer_id=partner["partner_id"], insync=True)
+        if is_imported:
+            product_id = subscriptions.get_product_id_by_name(ProductName.ROUTER)
+            router_subscription = RouterInactive.from_product_id(
+                product_id, customer_id=partner["partner_id"], insync=True
+            )
+        else:
+            product_id = subscriptions.get_product_id_by_name(ProductName.IMPORTED_ROUTER)
+            router_subscription = ImportedRouterInactive.from_product_id(
+                product_id, customer_id=partner["partner_id"], insync=True
+            )
+
         router_subscription.router.router_fqdn = router_fqdn
         router_subscription.router.router_ts_port = router_ts_port
         router_subscription.router.router_access_via_ts = router_access_via_ts
@@ -152,6 +182,8 @@ def juniper_router_subscription_factory(site_subscription_factory, faker, geant_
         router_site=None,
         status: SubscriptionLifecycle | None = None,
         partner: dict | None = None,
+        *,
+        is_imported: bool | None = True,
     ) -> UUIDstr:
         if partner is None:
             partner = geant_partner
@@ -165,9 +197,17 @@ def juniper_router_subscription_factory(site_subscription_factory, faker, geant_
         router_lo_iso_address = router_lo_iso_address or faker.word()
         router_site = router_site or site_subscription_factory()
 
-        product_id = subscriptions.get_product_id_by_name(ProductName.ROUTER)
+        if is_imported:
+            product_id = subscriptions.get_product_id_by_name(ProductName.ROUTER)
+            router_subscription = RouterInactive.from_product_id(
+                product_id, customer_id=partner["partner_id"], insync=True
+            )
+        else:
+            product_id = subscriptions.get_product_id_by_name(ProductName.IMPORTED_ROUTER)
+            router_subscription = ImportedRouterInactive.from_product_id(
+                product_id, customer_id=partner["partner_id"], insync=True
+            )
 
-        router_subscription = RouterInactive.from_product_id(product_id, customer_id=partner["partner_id"], insync=True)
         router_subscription.router.router_fqdn = router_fqdn
         router_subscription.router.router_ts_port = router_ts_port
         router_subscription.router.router_access_via_ts = router_access_via_ts
@@ -246,13 +286,24 @@ def iptrunk_subscription_factory(iptrunk_side_subscription_factory, faker, geant
         iptrunk_sides=None,
         status: SubscriptionLifecycle | None = None,
         partner: dict | None = None,
+        *,
+        is_imported: bool | None = True,
     ) -> UUIDstr:
         if partner is None:
             partner = geant_partner
 
-        product_id = subscriptions.get_product_id_by_name(ProductName.IP_TRUNK)
-        description = description or faker.sentence()
+        if is_imported:
+            product_id = subscriptions.get_product_id_by_name(ProductName.IP_TRUNK)
+            iptrunk_subscription = IptrunkInactive.from_product_id(
+                product_id, customer_id=partner["partner_id"], insync=True
+            )
+        else:
+            product_id = subscriptions.get_product_id_by_name(ProductName.IMPORTED_IP_TRUNK)
+            iptrunk_subscription = ImportedIptrunkInactive.from_product_id(
+                product_id, customer_id=partner["partner_id"], insync=True
+            )
 
+        description = description or faker.sentence()
         geant_s_sid = geant_s_sid or faker.geant_sid()
         iptrunk_description = iptrunk_description or faker.sentence()
         iptrunk_isis_metric = iptrunk_isis_metric or faker.pyint()
@@ -263,9 +314,6 @@ def iptrunk_subscription_factory(iptrunk_side_subscription_factory, faker, geant
         iptrunk_side_b = iptrunk_side_subscription_factory()
         iptrunk_sides = iptrunk_sides or [iptrunk_side_a, iptrunk_side_b]
 
-        iptrunk_subscription = IptrunkInactive.from_product_id(
-            product_id, customer_id=partner["partner_id"], insync=True
-        )
         iptrunk_subscription.iptrunk.geant_s_sid = geant_s_sid
         iptrunk_subscription.iptrunk.iptrunk_description = iptrunk_description
         iptrunk_subscription.iptrunk.iptrunk_type = iptrunk_type
@@ -306,6 +354,8 @@ def office_router_subscription_factory(site_subscription_factory, faker, geant_p
         office_router_site=None,
         status: SubscriptionLifecycle | None = None,
         partner: dict | None = None,
+        *,
+        is_imported: bool | None = True,
     ) -> UUIDstr:
         if partner is None:
             partner = geant_partner
@@ -317,10 +367,17 @@ def office_router_subscription_factory(site_subscription_factory, faker, geant_p
         office_router_lo_ipv6_address = office_router_lo_ipv6_address or ipaddress.IPv6Address(faker.ipv6())
         office_router_site = office_router_site or site_subscription_factory()
 
-        product_id = subscriptions.get_product_id_by_name(ProductName.OFFICE_ROUTER)
-        office_router_subscription = OfficeRouterInactive.from_product_id(
-            product_id, customer_id=partner["partner_id"], insync=True
-        )
+        if is_imported:
+            product_id = subscriptions.get_product_id_by_name(ProductName.OFFICE_ROUTER)
+            office_router_subscription = OfficeRouterInactive.from_product_id(
+                product_id, customer_id=partner["partner_id"], insync=True
+            )
+        else:
+            product_id = subscriptions.get_product_id_by_name(ProductName.IMPORTED_OFFICE_ROUTER)
+            office_router_subscription = ImportedOfficeRouterInactive.from_product_id(
+                product_id, customer_id=partner["partner_id"], insync=True
+            )
+
         office_router_subscription.office_router.office_router_fqdn = office_router_fqdn
         office_router_subscription.office_router.office_router_ts_port = office_router_ts_port
         office_router_subscription.office_router.office_router_lo_ipv4_address = office_router_lo_ipv4_address
@@ -356,6 +413,8 @@ def super_pop_switch_subscription_factory(site_subscription_factory, faker, gean
         super_pop_switch_site=None,
         status: SubscriptionLifecycle | None = None,
         partner: dict | None = None,
+        *,
+        is_imported: bool | None = True,
     ) -> UUIDstr:
         if partner is None:
             partner = geant_partner
@@ -366,10 +425,17 @@ def super_pop_switch_subscription_factory(site_subscription_factory, faker, gean
         super_pop_switch_mgmt_ipv4_address = super_pop_switch_mgmt_ipv4_address or ipaddress.IPv4Address(faker.ipv4())
         super_pop_switch_site = super_pop_switch_site or site_subscription_factory()
 
-        product_id = subscriptions.get_product_id_by_name(ProductName.SUPER_POP_SWITCH)
-        super_pop_switch_subscription = SuperPopSwitchInactive.from_product_id(
-            product_id, customer_id=partner["partner_id"], insync=True
-        )
+        if is_imported:
+            product_id = subscriptions.get_product_id_by_name(ProductName.SUPER_POP_SWITCH)
+            super_pop_switch_subscription = SuperPopSwitchInactive.from_product_id(
+                product_id, customer_id=partner["partner_id"], insync=True
+            )
+        else:
+            product_id = subscriptions.get_product_id_by_name(ProductName.IMPORTED_SUPER_POP_SWITCH)
+            super_pop_switch_subscription = ImportedSuperPopSwitchInactive.from_product_id(
+                product_id, customer_id=partner["partner_id"], insync=True
+            )
+
         super_pop_switch_subscription.super_pop_switch.super_pop_switch_fqdn = super_pop_switch_fqdn
         super_pop_switch_subscription.super_pop_switch.super_pop_switch_ts_port = super_pop_switch_ts_port
         super_pop_switch_subscription.super_pop_switch.super_pop_switch_mgmt_ipv4_address = (
@@ -395,3 +461,40 @@ def super_pop_switch_subscription_factory(site_subscription_factory, faker, gean
         return str(super_pop_switch_subscription.subscription_id)
 
     return subscription_create
+
+
+@pytest.fixture()
+def test_workflow(generic_subscription_1: UUIDstr, generic_product_type_1) -> Generator:
+    _, generic_product_one = generic_product_type_1
+
+    @step("Insert UUID in state")
+    def insert_object():
+        return {"subscription_id": str(uuid4()), "model": generic_product_one.from_subscription(generic_subscription_1)}
+
+    @step("Test that it is a string now")
+    def check_object(subscription_id: Any, model: dict) -> None:
+        # This is actually a test. It would be nicer to have this in a proper test but that takes to much setup that
+        # already happens here. So we hijack this fixture and run this test for all tests that use this fixture
+        # (which should not be an issue)
+        assert isinstance(subscription_id, str)
+        assert isinstance(model, dict)
+
+    @inputstep("Modify", assignee=Assignee.CHANGES)
+    def modify(subscription_id: UUIDstr) -> FormGenerator:
+        class TestChoice(Choice):
+            A = "A"
+            B = "B"
+            C = "C"
+
+        class TestForm(FormPage):
+            generic_select: TestChoice
+
+        user_input = yield TestForm
+        return user_input.model_dump()
+
+    @workflow("Workflow")
+    def workflow_for_testing_processes_py():
+        return init >> insert_object >> check_object >> modify >> done
+
+    with WorkflowInstanceForTests(workflow_for_testing_processes_py, "workflow_for_testing_processes_py") as wf:
+        yield wf
diff --git a/test/schedules/test_scheduling.py b/test/schedules/test_scheduling.py
index 5ed2ad01e14a00e9e0785e9ee9a31518325f4bea..17811174369913a4f47f532a8c6bbd115717a946 100644
--- a/test/schedules/test_scheduling.py
+++ b/test/schedules/test_scheduling.py
@@ -3,12 +3,12 @@ from unittest.mock import MagicMock, patch
 import pytest
 from orchestrator.targets import Target
 
-from gso.schedules.scheduling import scheduler
+from gso.schedules.scheduling import CronScheduleConfig, scheduler
 
 
 @pytest.fixture(scope="module")
 def validate_subscriptions():
-    from gso.schedules.validate_subscriptions import validate_subscriptions as vs  # noqa: PLC0415
+    from gso.schedules.validate_subscriptions import validate_subscriptions as vs
 
     return vs
 
@@ -42,12 +42,14 @@ def test_scheduler_updates_beat_schedule(mock_celery):
     mock_celery.conf.beat_schedule = {}
 
     @scheduler(
-        name="A cool task",
-        minute="0",
-        hour="0",
-        day_of_week="*",
-        day_of_month="*",
-        month_of_year="*",
+        CronScheduleConfig(
+            name="A cool task",
+            minute="0",
+            hour="0",
+            day_of_week="*",
+            day_of_month="*",
+            month_of_year="*",
+        )
     )
     def mock_task():
         return "task result"
@@ -64,12 +66,14 @@ def test_scheduled_task_still_works():
     """Ensure that the scheduler decorator does not change the behavior of the function it decorates."""
 
     @scheduler(
-        name="A cool task",
-        minute="0",
-        hour="0",
-        day_of_week="*",
-        day_of_month="*",
-        month_of_year="*",
+        CronScheduleConfig(
+            name="A cool task",
+            minute="0",
+            hour="0",
+            day_of_week="*",
+            day_of_month="*",
+            month_of_year="*",
+        )
     )
     def mock_task():
         return "task result"
diff --git a/test/schemas/test_types.py b/test/schemas/test_types.py
index 2e90123f3d96f3c0e5c86294780ba4539a9660c1..a968084f06e2674828b6a59df58484e1fd965851 100644
--- a/test/schemas/test_types.py
+++ b/test/schemas/test_types.py
@@ -1,8 +1,17 @@
 import pytest
+from pydantic import BaseModel, ValidationError
 
 from gso.products.product_blocks.site import LatitudeCoordinate, LongitudeCoordinate
 
 
+class LatitudeModel(BaseModel):
+    latitude: LatitudeCoordinate
+
+
+class LongitudeModel(BaseModel):
+    longitude: LongitudeCoordinate
+
+
 @pytest.mark.parametrize(
     ("input_value", "is_valid"),
     [
@@ -22,10 +31,10 @@ from gso.products.product_blocks.site import LatitudeCoordinate, LongitudeCoordi
 )
 def test_latitude(input_value, is_valid):
     if is_valid:
-        assert LatitudeCoordinate.validate(input_value) == input_value
+        assert LatitudeModel(latitude=input_value).latitude == input_value
     else:
-        with pytest.raises(ValueError, match="Invalid latitude coordinate"):
-            LatitudeCoordinate.validate(input_value)
+        with pytest.raises(ValidationError):
+            LatitudeModel(latitude=input_value)
 
 
 @pytest.mark.parametrize(
@@ -47,7 +56,7 @@ def test_latitude(input_value, is_valid):
 )
 def test_longitude(input_value, is_valid):
     if is_valid:
-        assert LongitudeCoordinate.validate(input_value) == input_value
+        assert LongitudeModel(longitude=input_value).longitude == input_value
     else:
-        with pytest.raises(ValueError, match="Invalid longitude coordinate"):
-            LongitudeCoordinate.validate(input_value)
+        with pytest.raises(ValidationError):
+            LongitudeModel(longitude=input_value)
diff --git a/test/services/conftest.py b/test/services/conftest.py
index 9fc3d191369b863e151721b7eed5ced2c4ee8d7c..9aee570a13f12985576708e9bac7740e5670b103 100644
--- a/test/services/conftest.py
+++ b/test/services/conftest.py
@@ -46,3 +46,14 @@ class MockedNetboxClient:
     @staticmethod
     def delete_interface():
         return None
+
+
+class MockedSharePointClient:
+    class BaseMockObject:
+        def __init__(self, **kwargs):
+            for key, value in kwargs.items():
+                setattr(self, key, value)
+
+    @staticmethod
+    def add_list_item(list_name: str, fields: dict[str, str]) -> str:
+        return f"http://{list_name}/{fields.popitem()}"
diff --git a/test/services/test_librenms_client.py b/test/services/test_librenms_client.py
index 55df5ce176329a66587b53c366988b325ecf49e0..d07da93a1be847be29c267c4c2188b5533a0af65 100644
--- a/test/services/test_librenms_client.py
+++ b/test/services/test_librenms_client.py
@@ -31,7 +31,7 @@ def mock_get_device_success(faker):
                     "cryptopass": None,
                     "cryptoalgo": None,
                     "snmpver": "v2c",
-                    "port": faker.port_number(),
+                    "port": faker.port_number(is_user=True),
                     "transport": "udp",
                     "timeout": None,
                     "retries": None,
@@ -118,7 +118,7 @@ def mock_get_device_misconfigured(faker):
                     "cryptopass": None,
                     "cryptoalgo": None,
                     "snmpver": "v2c",
-                    "port": faker.port_number(),
+                    "port": faker.port_number(is_user=True),
                     "transport": "udp",
                     "timeout": None,
                     "retries": None,
@@ -168,9 +168,12 @@ def mock_get_device_misconfigured(faker):
 
 @pytest.fixture()
 def mock_get_device_unauthenticated():
-    with patch("gso.services.librenms_client.requests.get") as mock_get_unauthorized, patch(
-        "gso.services.librenms_client.LibreNMSClient.get_device",
-    ) as mock_get_device:
+    with (
+        patch("gso.services.librenms_client.requests.get") as mock_get_unauthorized,
+        patch(
+            "gso.services.librenms_client.LibreNMSClient.get_device",
+        ) as mock_get_device,
+    ):
         mock_get_unauthorized().status_code = HTTPStatus.UNAUTHORIZED
         mock_get_unauthorized().json.return_value = {"message": "Unauthenticated."}
         mock_get_device.side_effect = HTTPError(
diff --git a/test/utils/test_helpers.py b/test/utils/test_helpers.py
index 5dee0aa8f7a5cb981771a63cdfec933986423991..e80e6f30049635fdcc4691c9c75f8e13dfbbfb67 100644
--- a/test/utils/test_helpers.py
+++ b/test/utils/test_helpers.py
@@ -23,7 +23,7 @@ def mock_netbox_client():
 
 @pytest.fixture()
 def generate_tt_numbers(faker, request):
-    """Generator for valid and invalid tt numbers."""
+    """Get a Generator for valid and invalid tt numbers."""
     valid_count = request.param.get("valid", 0)
     invalid_count = request.param.get("invalid", 0)
 
@@ -78,7 +78,7 @@ def test_nokia_router_with_interfaces_returns_choice(mock_router, mock_netbox_cl
 
 @pytest.mark.parametrize("generate_tt_numbers", [{"valid": 5, "invalid": 3}], indirect=True)
 def test_tt_number(generate_tt_numbers):
-    """Test different TT numbers"""
+    """Test different TT numbers."""
     for tt_number, is_valid in generate_tt_numbers:
         if is_valid:
             assert validate_tt_number(tt_number) == tt_number
diff --git a/test/workflows/__init__.py b/test/workflows/__init__.py
index a8144aacac102f807324458c14ad0cd04c69c892..b44bace935639aa7eac5716750a3204243e4bd21 100644
--- a/test/workflows/__init__.py
+++ b/test/workflows/__init__.py
@@ -2,16 +2,16 @@ import difflib
 import pprint
 from collections.abc import Callable
 from copy import deepcopy
-from itertools import chain, repeat
 from typing import cast
 from uuid import uuid4
 
 import structlog
-from orchestrator.db import ProcessTable
+from orchestrator.db import ProcessTable, WorkflowTable, db
 from orchestrator.services.processes import StateMerger, _db_create_process
-from orchestrator.types import FormGenerator, InputForm, State
+from orchestrator.types import State
 from orchestrator.utils.json import json_dumps, json_loads
 from orchestrator.workflow import Process, ProcessStat, Step, Success, Workflow, runwf
+from orchestrator.workflow import Process as WFProcess
 from orchestrator.workflows import ALL_WORKFLOWS, LazyWorkflowInstance, get_workflow
 from pydantic_forms.core import post_form
 
@@ -113,10 +113,22 @@ def extract_error(result):
     return extract_state(result).get("error")
 
 
+def store_workflow(wf: Workflow, name: str | None = None) -> WorkflowTable:
+    wf_table = WorkflowTable(name=name or wf.name, target=wf.target, description=wf.description)
+    db.session.add(wf_table)
+    db.session.commit()
+    return wf_table
+
+
+def delete_workflow(wf: WorkflowTable) -> None:
+    db.session.delete(wf)
+    db.session.commit()
+
+
 class WorkflowInstanceForTests(LazyWorkflowInstance):
     """Register Test workflows.
 
-    Similar to ``LazyWorkflowInstance`` but does not require an import during instantiate
+    Similar to `LazyWorkflowInstance` but does not require an import during instantiate
     Used for creating test workflows
     """
 
@@ -125,14 +137,19 @@ class WorkflowInstanceForTests(LazyWorkflowInstance):
     is_callable: bool
 
     def __init__(self, workflow: Workflow, name: str) -> None:
+        super().__init__("orchestrator.test", name)
         self.workflow = workflow
         self.name = name
 
     def __enter__(self):
         ALL_WORKFLOWS[self.name] = self
+        self.workflow_instance = store_workflow(self.workflow, name=self.name)
+        return self.workflow_instance
 
     def __exit__(self, _exc_type, _exc_value, _traceback):
         del ALL_WORKFLOWS[self.name]
+        delete_workflow(self.workflow_instance)
+        del self.workflow_instance
 
     def instantiate(self) -> Workflow:
         """Import and instantiate a workflow and return it.
@@ -140,7 +157,10 @@ class WorkflowInstanceForTests(LazyWorkflowInstance):
         This can be as simple as merely importing a workflow function. However, if it concerns a workflow generating
         function, that function will be called with or without arguments as specified.
 
-        :return Workflow: A workflow function.
+        Returns
+        -------
+            A workflow function.
+
         """
         self.workflow.name = self.name
         return self.workflow
@@ -172,13 +192,23 @@ def _store_step(step_log: list[tuple[Step, Process]]) -> Callable[[ProcessStat,
     return __store_step
 
 
-def run_workflow(workflow_key: str, input_data: State | list[State]) -> tuple[Process, ProcessStat, list]:
-    # ATTENTION!! This code needs to be as similar as possible to ``server.services.processes.start_process``
+def _sanitize_input(input_data: State | list[State]) -> list[State]:
+    # To be backwards compatible convert single dict to list
+    if not isinstance(input_data, list):
+        input_data = [input_data]
+
+    # We need a copy here and we want to mimic the actual code that returns a serialized version of the state
+    return cast(list[State], json_loads(json_dumps(input_data)))
+
+
+def run_workflow(workflow_key: str, input_data: State | list[State]) -> tuple[WFProcess, ProcessStat, list]:
+    # ATTENTION!! This code needs to be as similar as possible to `server.services.processes.start_process`
     # The main differences are: we use a different step log function, and we don't run in
     # a separate thread
+    user_data = _sanitize_input(input_data)
     user = "john.doe"
 
-    step_log: list[tuple[Step, Process]] = []
+    step_log: list[tuple[Step, WFProcess]] = []
 
     process_id = uuid4()
     workflow = get_workflow(workflow_key)
@@ -190,7 +220,7 @@ def run_workflow(workflow_key: str, input_data: State | list[State]) -> tuple[Pr
         "workflow_target": workflow.target,
     }
 
-    user_input = post_form(workflow.initial_input_form, initial_state, input_data)
+    user_input = post_form(workflow.initial_input_form, initial_state, user_data)
 
     pstat = ProcessStat(
         process_id,
@@ -244,72 +274,6 @@ def resume_workflow(
     return result, step_log
 
 
-def run_form_generator(
-    form_generator: FormGenerator,
-    extra_inputs: list[State] | None = None,
-) -> tuple[list[dict], State]:
-    """Run a form generator to get the resulting forms and result.
-
-    Warning! This does not run the actual pydantic validation on purpose. However, you should
-    make sure that anything in extra_inputs matched the values and types as if the pydantic validation has
-    been run.
-
-    :param FormGenerator form_generator: The form generator that will be run.
-    :param list[State] | None extra_inputs: list of user input dicts for each page in the generator.
-                                            If no input is given for a page, an empty dict is used.
-                                            The default value from the form is used as the default value for a field.
-
-    :return tuple[list[dict], State]: A list of generated forms and the result state for the whole generator.
-
-    Example:
-    -------
-        Given the following form generator:
-
-        >>> from pydantic_forms.core import FormPage
-        >>> def form_generator(state):
-        ...     class TestForm(FormPage):
-        ...         field: str = "foo"
-        ...     user_input = yield TestForm
-        ...     return {**user_input.dict(), "bar": 42}
-
-        You can run this without extra_inputs
-        >>> forms, result = run_form_generator(form_generator({"state_field": 1}))
-        >>> forms
-        [{'title': 'unknown', 'type': 'object', 'properties': {
-            'field': {'title': 'Field', 'default': 'foo', 'type': 'string'}}, 'additionalProperties': False}]
-        >>> result
-        {'field': 'foo', 'bar': 42}
-
-
-        Or with extra_inputs:
-        >>> forms, result = run_form_generator(form_generator({'state_field': 1}), [{'field':'baz'}])
-        >>> forms
-        [{'title': 'unknown', 'type': 'object', 'properties': {
-            'field': {'title': 'Field', 'default': 'foo', 'type': 'string'}}, 'additionalProperties': False}]
-        >>> result
-        {'field': 'baz', 'bar': 42}
-
-    """
-    forms: list[dict] = []
-    result: State = {"s": 3}
-    if extra_inputs is None:
-        extra_inputs = []
-
-    try:
-        form = cast(InputForm, next(form_generator))
-        forms.append(form.schema())
-        for extra_input in chain(extra_inputs, repeat(cast(State, {}))):
-            user_input_data = {field_name: field.default for field_name, field in form.__fields__.items()}
-            user_input_data.update(extra_input)
-            user_input = form.construct(**user_input_data)
-            form = form_generator.send(user_input)
-            forms.append(form.schema())
-    except StopIteration as stop:
-        result = stop.value
-
-    return forms, result
-
-
 def user_accept_and_assert_suspended(process_stat, step_log, extra_data=None):
     extra_data = extra_data or {}
     result, step_log = resume_workflow(process_stat, step_log, extra_data)
diff --git a/test/workflows/conftest.py b/test/workflows/conftest.py
index 0665829aee73ae9cd3b9d1129a2781a98c2e210d..9d298a779f3e4f190e009973caa321658eb2433b 100644
--- a/test/workflows/conftest.py
+++ b/test/workflows/conftest.py
@@ -1,14 +1,6 @@
 import pytest
 from urllib3_mock import Responses
 
-from test.fixtures import (  # noqa: F401
-    iptrunk_side_subscription_factory,
-    iptrunk_subscription_factory,
-    juniper_router_subscription_factory,
-    nokia_router_subscription_factory,
-    site_subscription_factory,
-)
-
 
 @pytest.fixture(autouse=True)
 def responses():
diff --git a/test/workflows/iptrunk/test_create_imported_iptrunk.py b/test/workflows/iptrunk/test_create_imported_iptrunk.py
new file mode 100644
index 0000000000000000000000000000000000000000..c08ddbe643a122e0ed710cadffdddea70d78add3
--- /dev/null
+++ b/test/workflows/iptrunk/test_create_imported_iptrunk.py
@@ -0,0 +1,49 @@
+import pytest
+from orchestrator.types import SubscriptionLifecycle
+
+from gso.products import ProductName
+from gso.products.product_blocks.iptrunk import IptrunkType, PhysicalPortCapacity
+from gso.products.product_types.iptrunk import ImportedIptrunk
+from test.workflows import (
+    assert_complete,
+    extract_state,
+    run_workflow,
+)
+
+
+@pytest.fixture()
+def workflow_input_data(faker, nokia_router_subscription_factory):
+    return {
+        "partner": "GEANT",
+        "geant_s_sid": faker.geant_sid(),
+        "iptrunk_description": faker.sentence(),
+        "iptrunk_type": IptrunkType.DARK_FIBER,
+        "iptrunk_speed": PhysicalPortCapacity.FOUR_HUNDRED_GIGABIT_PER_SECOND,
+        "iptrunk_minimum_links": 2,
+        "iptrunk_isis_metric": 10000,
+        "side_a_node_id": nokia_router_subscription_factory(),
+        "side_a_ae_iface": faker.network_interface(),
+        "side_a_ae_geant_a_sid": faker.geant_sid(),
+        "side_a_ae_members": [
+            {"interface_name": faker.network_interface(), "interface_description": faker.sentence()} for _ in range(3)
+        ],
+        "side_b_node_id": nokia_router_subscription_factory(),
+        "side_b_ae_iface": faker.network_interface(),
+        "side_b_ae_geant_a_sid": faker.geant_sid(),
+        "side_b_ae_members": [
+            {"interface_name": faker.network_interface(), "interface_description": faker.sentence()} for _ in range(3)
+        ],
+        "iptrunk_ipv4_network": faker.ipv4_network(max_subnet=31),
+        "iptrunk_ipv6_network": faker.ipv6_network(max_subnet=126),
+    }
+
+
+@pytest.mark.workflow()
+def test_create_imported_iptrunk_success(workflow_input_data):
+    result, _, _ = run_workflow("create_imported_iptrunk", [workflow_input_data])
+    state = extract_state(result)
+    imported_router = ImportedIptrunk.from_subscription(state["subscription_id"])
+
+    assert_complete(result)
+    assert imported_router.product.name == ProductName.IMPORTED_IP_TRUNK
+    assert imported_router.status == SubscriptionLifecycle.ACTIVE
diff --git a/test/workflows/iptrunk/test_create_iptrunk.py b/test/workflows/iptrunk/test_create_iptrunk.py
index 4163f6835166d368cbda5a007d24f7043bd8e7f4..ea82a5bf7254dd535201ecadf08a5b0e91af6091 100644
--- a/test/workflows/iptrunk/test_create_iptrunk.py
+++ b/test/workflows/iptrunk/test_create_iptrunk.py
@@ -6,10 +6,9 @@ import pytest
 from gso.products import Iptrunk, ProductName
 from gso.products.product_blocks.iptrunk import IptrunkType, PhysicalPortCapacity
 from gso.services.subscriptions import get_product_id_by_name
-from gso.utils.helpers import LAGMember
 from gso.utils.shared_enums import Vendor
 from test import USER_CONFIRM_EMPTY_FORM
-from test.services.conftest import MockedNetboxClient
+from test.services.conftest import MockedNetboxClient, MockedSharePointClient
 from test.workflows import (
     assert_complete,
     assert_lso_interaction_failure,
@@ -56,7 +55,7 @@ def input_form_wizard_data(request, juniper_router_subscription_factory, nokia_r
     else:
         router_side_b = nokia_router_subscription_factory()
         side_b_members = [
-            LAGMember(interface_name=f"Interface{interface}", interface_description=faker.sentence())
+            {"interface_name": f"Interface{interface}", "interface_description": faker.sentence()}
             for interface in range(2)
         ]
 
@@ -68,16 +67,16 @@ def input_form_wizard_data(request, juniper_router_subscription_factory, nokia_r
         "iptrunk_speed": PhysicalPortCapacity.HUNDRED_GIGABIT_PER_SECOND,
         "iptrunk_number_of_members": 2,
     }
-    create_ip_trunk_confirm_step = {}
+    create_ip_trunk_confirm_step = {"iptrunk_minimum_links": 1}
     create_ip_trunk_side_a_router_name = {"side_a_node_id": router_side_a}
     create_ip_trunk_side_a_step = {
         "side_a_ae_iface": "lag-1",
         "side_a_ae_geant_a_sid": None,
         "side_a_ae_members": [
-            LAGMember(
-                interface_name=f"Interface{interface}",
-                interface_description=faker.sentence(),
-            )
+            {
+                "interface_name": f"Interface{interface}",
+                "interface_description": faker.sentence(),
+            }
             for interface in range(2)
         ],
     }
@@ -103,7 +102,9 @@ def input_form_wizard_data(request, juniper_router_subscription_factory, nokia_r
 @patch("gso.workflows.iptrunk.create_iptrunk.infoblox.allocate_v6_network")
 @patch("gso.workflows.iptrunk.create_iptrunk.infoblox.allocate_v4_network")
 @patch("gso.workflows.iptrunk.create_iptrunk.infoblox.create_host_by_ip")
+@patch("gso.workflows.iptrunk.create_iptrunk.SharePointClient")
 def test_successful_iptrunk_creation_with_standard_lso_result(
+    mock_sharepoint_client,
     mock_create_host,
     mock_allocate_v4_network,
     mock_allocate_v6_network,
@@ -118,6 +119,8 @@ def test_successful_iptrunk_creation_with_standard_lso_result(
     mock_create_host.return_value = None
     mock_allocate_v4_network.return_value = faker.ipv4_network(max_subnet=31)
     mock_allocate_v6_network.return_value = faker.ipv6_network(max_subnet=126)
+    mock_sharepoint_client.return_value = MockedSharePointClient
+
     product_id = get_product_id_by_name(ProductName.IP_TRUNK)
     initial_site_data = [{"product": product_id}, *input_form_wizard_data]
     result, process_stat, step_log = run_workflow("create_iptrunk", initial_site_data)
@@ -134,12 +137,10 @@ def test_successful_iptrunk_creation_with_standard_lso_result(
     subscription_id = state["subscription_id"]
     subscription = Iptrunk.from_subscription(subscription_id)
 
-    sorted_sides = sorted(
-        [
-            subscription.iptrunk.iptrunk_sides[0].iptrunk_side_node.router_site.site_name,
-            subscription.iptrunk.iptrunk_sides[1].iptrunk_side_node.router_site.site_name,
-        ]
-    )
+    sorted_sides = sorted([
+        subscription.iptrunk.iptrunk_sides[0].iptrunk_side_node.router_site.site_name,
+        subscription.iptrunk.iptrunk_sides[1].iptrunk_side_node.router_site.site_name,
+    ])
     assert subscription.status == "provisioning"
     assert subscription.description == (
         f"IP trunk {sorted_sides[0]} {sorted_sides[1]}, geant_s_sid:{input_form_wizard_data[0]['geant_s_sid']}"
@@ -182,7 +183,9 @@ def test_iptrunk_creation_fails_when_lso_return_code_is_one(
 @patch("gso.workflows.iptrunk.create_iptrunk.infoblox.allocate_v6_network")
 @patch("gso.workflows.iptrunk.create_iptrunk.infoblox.allocate_v4_network")
 @patch("gso.workflows.iptrunk.create_iptrunk.infoblox.create_host_by_ip")
+@patch("gso.workflows.iptrunk.create_iptrunk.SharePointClient")
 def test_successful_iptrunk_creation_with_juniper_interface_names(
+    mock_sharepoint_client,
     mock_create_host,
     mock_allocate_v4_network,
     mock_allocate_v6_network,
@@ -197,6 +200,7 @@ def test_successful_iptrunk_creation_with_juniper_interface_names(
     mock_create_host.return_value = None
     mock_allocate_v4_network.return_value = faker.ipv4_network(max_subnet=31)
     mock_allocate_v6_network.return_value = faker.ipv6_network(max_subnet=126)
+    mock_sharepoint_client.return_value = MockedSharePointClient
     product_id = get_product_id_by_name(ProductName.IP_TRUNK)
     initial_site_data = [{"product": product_id}, *input_form_wizard_data]
     result, process_stat, step_log = run_workflow("create_iptrunk", initial_site_data)
@@ -208,3 +212,4 @@ def test_successful_iptrunk_creation_with_juniper_interface_names(
     result, step_log = resume_workflow(process_stat, step_log, input_data=USER_CONFIRM_EMPTY_FORM)
 
     assert_complete(result)
+    assert mock_execute_playbook.call_count == 6
diff --git a/test/workflows/iptrunk/test_import_iptrunk.py b/test/workflows/iptrunk/test_import_iptrunk.py
new file mode 100644
index 0000000000000000000000000000000000000000..99cdbfd93fc1cd84c7ebf55e7375b1df1d420b97
--- /dev/null
+++ b/test/workflows/iptrunk/test_import_iptrunk.py
@@ -0,0 +1,18 @@
+import pytest
+from orchestrator.types import SubscriptionLifecycle
+
+from gso.products import ProductName
+from gso.products.product_types.iptrunk import Iptrunk
+from test.workflows import assert_complete, run_workflow
+
+
+@pytest.mark.workflow()
+def test_import_iptrunk_success(iptrunk_subscription_factory):
+    imported_iptrunk = iptrunk_subscription_factory(is_imported=False)
+    result, _, _ = run_workflow("import_iptrunk", [{"subscription_id": imported_iptrunk}])
+    subscription = Iptrunk.from_subscription(imported_iptrunk)
+
+    assert_complete(result)
+    assert subscription.product.name == ProductName.IP_TRUNK
+    assert subscription.status == SubscriptionLifecycle.ACTIVE
+    assert subscription.insync
diff --git a/test/workflows/iptrunk/test_migrate_iptrunk.py b/test/workflows/iptrunk/test_migrate_iptrunk.py
index 5640cd646b75083f44d5bfbe37e21d1bfa9115a9..cd46d72100d3432cc868cdd8bb6cb710d0e74c44 100644
--- a/test/workflows/iptrunk/test_migrate_iptrunk.py
+++ b/test/workflows/iptrunk/test_migrate_iptrunk.py
@@ -29,6 +29,8 @@ def migrate_form_input(
     iptrunk_side_subscription_factory,
 ):
     use_juniper = getattr(request, "param", UseJuniperSide.NONE)
+    new_side_ae_members_nokia = faker.link_members_nokia()[0:2]
+    new_side_ae_members_juniper = faker.link_members_juniper()[0:2]
 
     if use_juniper == UseJuniperSide.SIDE_A:
         # Nokia -> Juniper
@@ -36,7 +38,7 @@ def migrate_form_input(
         old_subscription = Iptrunk.from_subscription(product_id)
         new_router = juniper_router_subscription_factory()
         replace_side = str(old_subscription.iptrunk.iptrunk_sides[0].iptrunk_side_node.subscription.subscription_id)
-        new_side_ae_members = faker.link_members_juniper()[0:2]
+        new_side_ae_members = new_side_ae_members_juniper
         lag_name = "ae1"
     elif use_juniper == UseJuniperSide.SIDE_B:
         # Juniper -> Nokia
@@ -48,7 +50,7 @@ def migrate_form_input(
         old_subscription = Iptrunk.from_subscription(product_id)
         new_router = nokia_router_subscription_factory()
         replace_side = str(old_subscription.iptrunk.iptrunk_sides[0].iptrunk_side_node.subscription.subscription_id)
-        new_side_ae_members = faker.link_members_nokia()[0:2]
+        new_side_ae_members = new_side_ae_members_nokia
         lag_name = "lag-1"
     elif use_juniper == UseJuniperSide.SIDE_BOTH:
         # Juniper -> Juniper
@@ -60,7 +62,7 @@ def migrate_form_input(
         old_subscription = Iptrunk.from_subscription(product_id)
         new_router = juniper_router_subscription_factory()
         replace_side = str(old_subscription.iptrunk.iptrunk_sides[0].iptrunk_side_node.subscription.subscription_id)
-        new_side_ae_members = faker.link_members_juniper()[0:2]
+        new_side_ae_members = new_side_ae_members_juniper
         lag_name = "ae1"
     else:
         # Nokia -> Nokia
@@ -68,7 +70,7 @@ def migrate_form_input(
         old_subscription = Iptrunk.from_subscription(product_id)
         new_router = nokia_router_subscription_factory()
         replace_side = str(old_subscription.iptrunk.iptrunk_sides[0].iptrunk_side_node.subscription.subscription_id)
-        new_side_ae_members = faker.link_members_nokia()[0:2]
+        new_side_ae_members = new_side_ae_members_nokia
         lag_name = "lag-1"
 
     return [
diff --git a/test/workflows/iptrunk/test_modify_trunk_interface.py b/test/workflows/iptrunk/test_modify_trunk_interface.py
index 18524cfd70ba8a61a12db319ff4cd1af921c64fe..2a95cc90fc6428dd19c7e9a2b0a0f77a58ef3a2b 100644
--- a/test/workflows/iptrunk/test_modify_trunk_interface.py
+++ b/test/workflows/iptrunk/test_modify_trunk_interface.py
@@ -158,7 +158,11 @@ def test_iptrunk_modify_trunk_interface_success(
     assert mocked_detach_interfaces_from_lag.call_count == num_lag_ifaces  # 1 time per nokia side
 
     # Assert all subscription properties have been updated correctly
-    assert subscription.description == f"IP trunk, geant_s_sid:{new_sid}"
+    side_names = sorted([
+        subscription.iptrunk.iptrunk_sides[0].iptrunk_side_node.router_site.site_name,
+        subscription.iptrunk.iptrunk_sides[1].iptrunk_side_node.router_site.site_name,
+    ])
+    assert subscription.description == f"IP trunk {side_names[0]} {side_names[1]}, geant_s_sid:{new_sid}"
     assert subscription.iptrunk.geant_s_sid == input_form_iptrunk_data[1]["geant_s_sid"]
     assert subscription.iptrunk.iptrunk_description == input_form_iptrunk_data[1]["iptrunk_description"]
     assert subscription.iptrunk.iptrunk_type == input_form_iptrunk_data[1]["iptrunk_type"]
diff --git a/test/workflows/office_router/__init__.py b/test/workflows/office_router/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/test/workflows/office_router/test_create_imported_office_router.py b/test/workflows/office_router/test_create_imported_office_router.py
new file mode 100644
index 0000000000000000000000000000000000000000..ac132d36d4d0653bcce7600c4b83d4416d8af60a
--- /dev/null
+++ b/test/workflows/office_router/test_create_imported_office_router.py
@@ -0,0 +1,34 @@
+import pytest
+from orchestrator.types import SubscriptionLifecycle
+
+from gso.products import ProductName
+from gso.products.product_types.office_router import ImportedOfficeRouter
+from gso.products.product_types.site import Site
+from test.workflows import (
+    assert_complete,
+    extract_state,
+    run_workflow,
+)
+
+
+@pytest.fixture()
+def workflow_input_data(faker, site_subscription_factory):
+    return {
+        "partner": "GEANT",
+        "office_router_site": Site.from_subscription(site_subscription_factory()).site.site_name,
+        "office_router_fqdn": faker.domain_name(levels=4),
+        "office_router_ts_port": faker.port_number(is_user=True),
+        "office_router_lo_ipv4_address": faker.ipv4(),
+        "office_router_lo_ipv6_address": faker.ipv6(),
+    }
+
+
+@pytest.mark.workflow()
+def test_create_imported_office_router_success(workflow_input_data):
+    result, _, _ = run_workflow("create_imported_office_router", [workflow_input_data])
+    state = extract_state(result)
+    imported_office_router = ImportedOfficeRouter.from_subscription(state["subscription_id"])
+
+    assert_complete(result)
+    assert imported_office_router.product.name == ProductName.IMPORTED_OFFICE_ROUTER
+    assert imported_office_router.status == SubscriptionLifecycle.ACTIVE
diff --git a/test/workflows/office_router/test_import_office_router.py b/test/workflows/office_router/test_import_office_router.py
new file mode 100644
index 0000000000000000000000000000000000000000..c9894e98eeb5abb69843f849e04c6209daff8983
--- /dev/null
+++ b/test/workflows/office_router/test_import_office_router.py
@@ -0,0 +1,18 @@
+import pytest
+from orchestrator.types import SubscriptionLifecycle
+
+from gso.products import ProductName
+from gso.products.product_types.office_router import OfficeRouter
+from test.workflows import assert_complete, run_workflow
+
+
+@pytest.mark.workflow()
+def test_import_office_router_success(office_router_subscription_factory):
+    imported_office_router = office_router_subscription_factory(is_imported=False)
+    result, _, _ = run_workflow("import_office_router", [{"subscription_id": imported_office_router}])
+    subscription = OfficeRouter.from_subscription(imported_office_router)
+
+    assert_complete(result)
+    assert subscription.product.name == ProductName.OFFICE_ROUTER
+    assert subscription.status == SubscriptionLifecycle.ACTIVE
+    assert subscription.insync
diff --git a/test/workflows/router/test_create_imported_router.py b/test/workflows/router/test_create_imported_router.py
new file mode 100644
index 0000000000000000000000000000000000000000..6831d0097c53957b3b069b6e541c3ff1a78aa689
--- /dev/null
+++ b/test/workflows/router/test_create_imported_router.py
@@ -0,0 +1,35 @@
+import pytest
+from orchestrator.types import SubscriptionLifecycle
+
+from gso.products.product_blocks.router import RouterRole
+from gso.products.product_types.router import ImportedRouter
+from gso.products.product_types.site import Site
+from gso.utils.helpers import iso_from_ipv4
+from gso.utils.shared_enums import Vendor
+from test.workflows import assert_complete, extract_state, run_workflow
+
+
+@pytest.fixture()
+def imported_router_creation_input_form_data(site_subscription_factory, faker):
+    router_site = site_subscription_factory()
+    fake_v4 = faker.ipv4()
+
+    return {
+        "partner": "GEANT",
+        "router_site": Site.from_subscription(router_site).site.site_name,
+        "hostname": faker.pystr(),
+        "ts_port": faker.pyint(),
+        "router_role": faker.random_choices(elements=(RouterRole.P, RouterRole.PE, RouterRole.AMT), length=1)[0],
+        "router_vendor": faker.random_choices(elements=(Vendor.NOKIA, Vendor.JUNIPER), length=1)[0],
+        "router_lo_ipv4_address": fake_v4,
+        "router_lo_ipv6_address": faker.ipv6(),
+        "router_lo_iso_address": iso_from_ipv4(fake_v4),
+    }
+
+
+def test_create_imported_router_success(faker, imported_router_creation_input_form_data):
+    result, _, _ = run_workflow("create_imported_router", [imported_router_creation_input_form_data])
+    state = extract_state(result)
+    subscription = ImportedRouter.from_subscription(state["subscription_id"])
+    assert_complete(result)
+    assert subscription.status == SubscriptionLifecycle.ACTIVE
diff --git a/test/workflows/router/test_create_router.py b/test/workflows/router/test_create_router.py
index 33244ae5bbc17feef0dbb7c4abbc72823b058526..5a96dbc702002b80890e7c059c4d32ef70b0b067 100644
--- a/test/workflows/router/test_create_router.py
+++ b/test/workflows/router/test_create_router.py
@@ -9,6 +9,7 @@ from gso.products.product_types.router import Router
 from gso.services.subscriptions import get_product_id_by_name
 from gso.utils.shared_enums import Vendor
 from test import USER_CONFIRM_EMPTY_FORM
+from test.services.conftest import MockedSharePointClient
 from test.workflows import (
     assert_complete,
     assert_lso_interaction_failure,
@@ -40,7 +41,9 @@ def router_creation_input_form_data(site_subscription_factory, faker):
 @patch("gso.workflows.router.create_router.infoblox.hostname_available")
 @patch("gso.workflows.router.create_router.infoblox.find_host_by_fqdn")
 @patch("gso.workflows.router.create_router.infoblox.allocate_host")
+@patch("gso.workflows.router.create_router.SharePointClient")
 def test_create_nokia_router_success(
+    mock_sharepoint_client,
     mock_allocate_host,
     mock_find_host_by_fqdn,
     mock_hostname_available,
@@ -61,6 +64,7 @@ def test_create_nokia_router_success(
     )
     mock_hostname_available.return_value = True
     mock_allocate_host.return_value = str(mock_v4), str(mock_v6)
+    mock_sharepoint_client.return_value = MockedSharePointClient
 
     #  Run workflow
     initial_router_data = [{"product": product_id}, router_creation_input_form_data]
@@ -108,6 +112,7 @@ def test_create_nokia_router_success(
     assert mock_provision_router.call_count == 3
     assert mock_netbox_create_device.call_count == 1
     assert mock_find_host_by_fqdn.call_count == 1
+    assert mock_sharepoint_client.call_count == 1
     assert "ipam_warning" not in state
 
 
diff --git a/test/workflows/router/test_import_router.py b/test/workflows/router/test_import_router.py
new file mode 100644
index 0000000000000000000000000000000000000000..20938a7e9607fbc47bed95add3f08ccae1197ec4
--- /dev/null
+++ b/test/workflows/router/test_import_router.py
@@ -0,0 +1,18 @@
+import pytest
+from orchestrator.types import SubscriptionLifecycle
+
+from gso.products import ProductName
+from gso.products.product_types.router import Router
+from test.workflows import assert_complete, run_workflow
+
+
+@pytest.mark.workflow()
+def test_import_site_success(nokia_router_subscription_factory):
+    imported_router = nokia_router_subscription_factory(is_imported=False)
+    result, _, _ = run_workflow("import_router", [{"subscription_id": imported_router}])
+    subscription = Router.from_subscription(imported_router)
+
+    assert_complete(result)
+    assert subscription.product.name == ProductName.ROUTER
+    assert subscription.status == SubscriptionLifecycle.ACTIVE
+    assert subscription.insync
diff --git a/test/workflows/site/test_create_imported_site.py b/test/workflows/site/test_create_imported_site.py
new file mode 100644
index 0000000000000000000000000000000000000000..4e8b8e35cd6a167e02e1d352440688eb2fc517b5
--- /dev/null
+++ b/test/workflows/site/test_create_imported_site.py
@@ -0,0 +1,34 @@
+import pytest
+from orchestrator.types import SubscriptionLifecycle
+
+from gso.products.product_blocks.site import SiteTier
+from gso.products.product_types.site import ImportedSite
+from test.workflows import assert_complete, extract_state, run_workflow
+
+
+@pytest.fixture()
+def workflow_input_data(faker):
+    return {
+        "site_name": faker.site_name(),
+        "site_city": faker.city(),
+        "site_country": faker.country(),
+        "site_country_code": faker.country_code(),
+        "site_latitude": "-74.0060",
+        "site_longitude": "40.7128",
+        "site_bgp_community_id": faker.pyint(),
+        "site_internal_id": faker.pyint(),
+        "site_tier": SiteTier.TIER1,
+        "site_ts_address": faker.ipv4(),
+        "partner": "GEANT",
+    }
+
+
+@pytest.mark.workflow()
+def test_create_imported_site_success(workflow_input_data):
+    result, _, _ = run_workflow("create_imported_site", [workflow_input_data])
+
+    assert_complete(result)
+    state = extract_state(result)
+    subscription_id = state["subscription_id"]
+    subscription = ImportedSite.from_subscription(subscription_id)
+    assert subscription.status == SubscriptionLifecycle.ACTIVE
diff --git a/test/workflows/site/test_create_site.py b/test/workflows/site/test_create_site.py
index e31576152634045e9efe57b864a51785495a41d1..f6c196da217320506521e9d993b7e7609f8e05db 100644
--- a/test/workflows/site/test_create_site.py
+++ b/test/workflows/site/test_create_site.py
@@ -4,7 +4,6 @@ from pydantic_forms.exceptions import FormValidationError
 from gso.products import ProductName
 from gso.products.product_blocks.site import SiteTier
 from gso.products.product_types.site import Site
-from gso.services.partners import get_partner_by_name
 from gso.services.subscriptions import get_product_id_by_name
 from test.workflows import assert_complete, extract_state, run_workflow
 
@@ -65,7 +64,7 @@ def test_site_name_is_incorrect(responses, faker):
             "site_internal_id": faker.pyint(),
             "site_tier": SiteTier.TIER1,
             "site_ts_address": faker.ipv4(),
-            "partner": get_partner_by_name("GEANT")["partner_id"],
+            "partner": "GEANT",
         },
     ]
 
diff --git a/test/workflows/site/test_import_site.py b/test/workflows/site/test_import_site.py
new file mode 100644
index 0000000000000000000000000000000000000000..ac476c107498f342e2e206e42d6457c966fd6732
--- /dev/null
+++ b/test/workflows/site/test_import_site.py
@@ -0,0 +1,18 @@
+import pytest
+from orchestrator.types import SubscriptionLifecycle
+
+from gso.products import ProductName
+from gso.products.product_types.site import Site
+from test.workflows import assert_complete, run_workflow
+
+
+@pytest.mark.workflow()
+def test_import_site_success(site_subscription_factory):
+    imported_site = site_subscription_factory(is_imported=False)
+    result, _, _ = run_workflow("import_site", [{"subscription_id": imported_site}])
+    subscription = Site.from_subscription(imported_site)
+
+    assert_complete(result)
+    assert subscription.product.name == ProductName.SITE
+    assert subscription.status == SubscriptionLifecycle.ACTIVE
+    assert subscription.insync
diff --git a/test/workflows/super_pop_switch/__init__.py b/test/workflows/super_pop_switch/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/test/workflows/super_pop_switch/test_create_imported_super_pop_switch.py b/test/workflows/super_pop_switch/test_create_imported_super_pop_switch.py
new file mode 100644
index 0000000000000000000000000000000000000000..5d0dcd17d9f641747c3223b9738c6ef42bcf58ca
--- /dev/null
+++ b/test/workflows/super_pop_switch/test_create_imported_super_pop_switch.py
@@ -0,0 +1,33 @@
+import pytest
+from orchestrator.types import SubscriptionLifecycle
+
+from gso.products import ProductName
+from gso.products.product_types.site import Site
+from gso.products.product_types.super_pop_switch import ImportedSuperPopSwitch
+from test.workflows import (
+    assert_complete,
+    extract_state,
+    run_workflow,
+)
+
+
+@pytest.fixture()
+def workflow_input_data(faker, site_subscription_factory):
+    return {
+        "partner": "GEANT",
+        "hostname": faker.domain_word(),
+        "super_pop_switch_site": Site.from_subscription(site_subscription_factory()).site.site_name,
+        "super_pop_switch_ts_port": faker.port_number(is_user=True),
+        "super_pop_switch_mgmt_ipv4_address": faker.ipv4(),
+    }
+
+
+@pytest.mark.workflow()
+def test_create_imported_office_router_success(workflow_input_data):
+    result, _, _ = run_workflow("create_imported_super_pop_switch", [workflow_input_data])
+    state = extract_state(result)
+    imported_super_pop_switch = ImportedSuperPopSwitch.from_subscription(state["subscription_id"])
+
+    assert_complete(result)
+    assert imported_super_pop_switch.product.name == ProductName.IMPORTED_SUPER_POP_SWITCH
+    assert imported_super_pop_switch.status == SubscriptionLifecycle.ACTIVE
diff --git a/test/workflows/super_pop_switch/test_import_super_pop_switch.py b/test/workflows/super_pop_switch/test_import_super_pop_switch.py
new file mode 100644
index 0000000000000000000000000000000000000000..4d20774c7d91470f6210da9e19d15632794bbf7c
--- /dev/null
+++ b/test/workflows/super_pop_switch/test_import_super_pop_switch.py
@@ -0,0 +1,18 @@
+import pytest
+from orchestrator.types import SubscriptionLifecycle
+
+from gso.products import ProductName
+from gso.products.product_types.super_pop_switch import SuperPopSwitch
+from test.workflows import assert_complete, run_workflow
+
+
+@pytest.mark.workflow()
+def test_import_super_pop_switch_success(super_pop_switch_subscription_factory):
+    imported_super_pop_switch = super_pop_switch_subscription_factory(is_imported=False)
+    result, _, _ = run_workflow("import_super_pop_switch", [{"subscription_id": imported_super_pop_switch}])
+    subscription = SuperPopSwitch.from_subscription(imported_super_pop_switch)
+
+    assert_complete(result)
+    assert subscription.product.name == ProductName.SUPER_POP_SWITCH
+    assert subscription.status == SubscriptionLifecycle.ACTIVE
+    assert subscription.insync
diff --git a/tox.ini b/tox.ini
index de7627e2dd6ed3c6700daff2e5016d50e4128ba0..a2f334fcf4574f5463b87e4cc3efd75074565602 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,8 +1,9 @@
 [tox]
-envlist = py311
+envlist = py312
 
 [pytest]
-markers = workflow
+markers = "workflow,noautofixt"
+filterwarnings = "ignore,default:::gso"
 
 [testenv]
 passenv = DATABASE_URI_TEST,SKIP_ALL_TESTS,ENVIRONMENT_IGNORE_MUTATION_DISABLED