diff --git a/compendium_v2/db/presentation_models.py b/compendium_v2/db/presentation_models.py
index 52c6411ccc8a5c94df4ecf766e743d71d7e3ed81..7208c516808013de893a67328af2e83e2d35dacb 100644
--- a/compendium_v2/db/presentation_models.py
+++ b/compendium_v2/db/presentation_models.py
@@ -206,8 +206,8 @@ class Standards(db.Model):
     crisis_management_procedure: Mapped[Optional[bool]]
 
 
-class CrisisExcercises(db.Model):
-    __tablename__ = 'crisis_excercises'
+class CrisisExercises(db.Model):
+    __tablename__ = 'crisis_exercises'
     nren_id: Mapped[int_pk_fkNREN]
     nren: Mapped[NREN] = relationship(lazy='joined')
     year: Mapped[int_pk]
diff --git a/compendium_v2/migrations/versions/c7179d69907c_fix_typo_in_table_name.py b/compendium_v2/migrations/versions/c7179d69907c_fix_typo_in_table_name.py
new file mode 100644
index 0000000000000000000000000000000000000000..9d822d1d626cd6145fae237ba89df2759a142175
--- /dev/null
+++ b/compendium_v2/migrations/versions/c7179d69907c_fix_typo_in_table_name.py
@@ -0,0 +1,44 @@
+"""fix typo in table name
+
+Revision ID: c7179d69907c
+Revises: 1fbc4582c0ab
+Create Date: 2023-09-22 15:33:04.699900
+
+"""
+from alembic import op
+import sqlalchemy as sa
+from sqlalchemy.dialects import postgresql
+
+# revision identifiers, used by Alembic.
+revision = 'c7179d69907c'
+down_revision = '1fbc4582c0ab'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+    # ### commands auto generated by Alembic - please adjust! ###
+    op.create_table(
+        'crisis_exercises',
+        sa.Column('nren_id', sa.Integer(), nullable=False),
+        sa.Column('year', sa.Integer(), nullable=False),
+        sa.Column('exercise_descriptions', sa.JSON(), nullable=False),
+        sa.ForeignKeyConstraint(['nren_id'], ['nren.id'], name=op.f('fk_crisis_exercises_nren_id_nren')),
+        sa.PrimaryKeyConstraint('nren_id', 'year', name=op.f('pk_crisis_exercises'))
+    )
+    op.drop_table('crisis_excercises')
+    # ### end Alembic commands ###
+
+
+def downgrade():
+    # ### commands auto generated by Alembic - please adjust! ###
+    op.create_table(
+        'crisis_excercises',
+        sa.Column('nren_id', sa.INTEGER(), autoincrement=False, nullable=False),
+        sa.Column('year', sa.INTEGER(), autoincrement=False, nullable=False),
+        sa.Column('exercise_descriptions', postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=False),
+        sa.ForeignKeyConstraint(['nren_id'], ['nren.id'], name='fk_crisis_excercises_nren_id_nren'),
+        sa.PrimaryKeyConstraint('nren_id', 'year', name='pk_crisis_excercises')
+    )
+    op.drop_table('crisis_exercises')
+    # ### end Alembic commands ###
diff --git a/compendium_v2/publishers/survey_publisher.py b/compendium_v2/publishers/survey_publisher.py
index ccd4b0940343d79fa4d35d4ce97bb5aaebaee602..c22c8967a605ef07ca0bc0e09538e19f72028614 100644
--- a/compendium_v2/publishers/survey_publisher.py
+++ b/compendium_v2/publishers/survey_publisher.py
@@ -18,7 +18,7 @@ from compendium_v2.db import db
 from compendium_v2.db.presentation_models import BudgetEntry, ChargingStructure, ECProject, ExternalConnections, \
     InstitutionURLs, NrenStaff, ParentOrganization, Policy, SubOrganization, TrafficVolume, ExternalConnection, \
     FundingSource, CentralProcurement, ServiceManagement, ServiceUserTypes, EOSCListings, \
-    Standards, CrisisExcercises, SecurityControls, ConnectedProportion, ConnectivityLevel, \
+    Standards, CrisisExercises, SecurityControls, ConnectedProportion, ConnectivityLevel, \
     ConnectionCarrier, ConnectivityLoad, ConnectivityGrowth, CommercialConnectivity, \
     CommercialChargingLevel, RemoteCampuses, DarkFibreLease, DarkFibreInstalled, FibreLight, \
     NetworkMapUrls, MonitoringTools, PassiveMonitoring, TrafficStatistics, SiemVendors, \
@@ -59,7 +59,7 @@ def _map_2023(nren, answers) -> None:
     for table_class in [BudgetEntry, ChargingStructure, ECProject, ExternalConnections,
                         InstitutionURLs, NrenStaff, ParentOrganization, Policy, SubOrganization, TrafficVolume,
                         FundingSource, CentralProcurement, ServiceManagement, ServiceUserTypes, EOSCListings,
-                        Standards, CrisisExcercises, SecurityControls, ConnectedProportion, ConnectivityLevel,
+                        Standards, CrisisExercises, SecurityControls, ConnectedProportion, ConnectivityLevel,
                         ConnectionCarrier, ConnectivityLoad, ConnectivityGrowth, CommercialConnectivity,
                         CommercialChargingLevel, RemoteCampuses, DarkFibreLease, DarkFibreInstalled, FibreLight,
                         NetworkMapUrls, MonitoringTools, PassiveMonitoring, TrafficStatistics, SiemVendors,
@@ -210,7 +210,7 @@ def _map_2023(nren, answers) -> None:
 
     crisis_exercises = answers.get("crisis_exercises")
     if crisis_exercises:
-        db.session.add(CrisisExcercises(
+        db.session.add(CrisisExercises(
             nren_id=nren.id, nren=nren, year=year,
             exercise_descriptions=crisis_exercises
         ))
@@ -356,14 +356,13 @@ def _map_2023(nren, answers) -> None:
             light_description=fibre_light
         ))
 
-    network_map_urls = answers.get("network_map_urls")
-    if network_map_urls:
-        urls = [i.get("network_map_url", "") for i in network_map_urls if i.get("network_map_url", "") != ""]
-        if urls:
-            db.session.add(NetworkMapUrls(
-                nren_id=nren.id, nren=nren, year=year,
-                urls=urls
-            ))
+    network_map_urls = answers.get("network_map_urls", [])
+    urls = [i.get("network_map_url", "") for i in network_map_urls if i.get("network_map_url", "") != ""]
+    if urls:
+        db.session.add(NetworkMapUrls(
+            nren_id=nren.id, nren=nren, year=year,
+            urls=urls
+        ))
 
     monitoring_tools = answers.get("monitoring_tools", [])
     netflow_vendors = answers.get("netflow_vendors", "")
@@ -390,7 +389,7 @@ def _map_2023(nren, answers) -> None:
     traffic_statistics = answers.get("traffic_statistics")
     if traffic_statistics:
         traffic_statistics = traffic_statistics == "Yes"
-        urls = answers.get("traffic_statistics_urls")
+        urls = answers.get("traffic_statistics_urls", [])
         urls = [i.get("traffic_statistics_url", "") for i in urls if i.get("traffic_statistics_url")]
         db.session.add(TrafficStatistics(
             nren_id=nren.id, nren=nren, year=year,
diff --git a/compendium_v2/publishers/survey_publisher_old_db_2022.py b/compendium_v2/publishers/survey_publisher_old_db_2022.py
index f071d75d001076811d8320a06f5b06a63f6dc9e1..5c72d1c52e17e475bd42d2573a9fb023e39c6570 100644
--- a/compendium_v2/publishers/survey_publisher_old_db_2022.py
+++ b/compendium_v2/publishers/survey_publisher_old_db_2022.py
@@ -6,6 +6,7 @@ This module loads the survey data from 2022 from the old survey database into pr
 Registered as click cli command when installing compendium-v2.
 
 """
+from decimal import Decimal
 import logging
 import click
 import enum
@@ -17,7 +18,9 @@ from sqlalchemy import delete, text
 from collections import defaultdict
 
 import compendium_v2
-from compendium_v2.db.presentation_model_enums import FeeType
+from compendium_v2.conversion.mapping import CHARGING_LEVELS, CONNECTION, INTERCONNECTION, SERVICE_USER_TYPE_TO_CODE
+from compendium_v2.db.presentation_model_enums import CommercialCharges, CommercialConnectivityCoverage, \
+    ConnectionMethod, FeeType, ServiceCategory, UserCategory, YesNoPlanned
 from compendium_v2.environment import setup_logging
 from compendium_v2.config import load
 from compendium_v2.publishers.helpers import extract_urls
@@ -60,14 +63,14 @@ WHERE
 ORDER BY n.id, a.question_id, a.updated_at DESC
 """
 
-INSTITUTIONS_URLS_QUERY_UNTIL_2022 = """
+RECURSIVE_QUERY = """
     WITH RECURSIVE parent_questions AS (
         -- Base case
         SELECT q.id, q.equivalent_question_id, c.year, q.title
         FROM questions q
                  JOIN sections s ON q.section_id = s.id
                  JOIN compendia c ON s.compendium_id = c.id
-        WHERE q.id = 16507
+        WHERE q.id = {}
         UNION ALL
         -- Recursive case
         SELECT q.id, q.equivalent_question_id, c.year, q.title
@@ -147,9 +150,10 @@ def query_budget():
     return db.session.execute(text(BUDGET_QUERY), bind_arguments={'bind': db.engines[survey_model.SURVEY_DB_BIND]})
 
 
-def query_institutions_urls():
-    return db.session.execute(text(INSTITUTIONS_URLS_QUERY_UNTIL_2022),
-                              bind_arguments={'bind': db.engines[survey_model.SURVEY_DB_BIND]})
+def recursive_query(question_id_2022):
+    assert question_id_2022
+    query = RECURSIVE_QUERY.format(question_id_2022)
+    return db.session.execute(text(query), bind_arguments={'bind': db.engines[survey_model.SURVEY_DB_BIND]})
 
 
 def query_funding_sources():
@@ -167,6 +171,17 @@ def query_question_id(question_id: int, year: int = 2022):
     return db.session.execute(text(query), bind_arguments={'bind': db.engines[survey_model.SURVEY_DB_BIND]})
 
 
+def _parse_json_urls(value, nren_name):
+    if value and not value.startswith('['):
+        value = f'[{value}]'
+
+    try:
+        return [url.strip().strip('/') for url in json.loads(value) if url.strip()]
+    except json.decoder.JSONDecodeError:
+        logger.info(f'JSON decode error for urls for {nren_name}.')
+        return []
+
+
 def transfer_budget(nren_dict):
     rows = query_budget()
     for row in rows:
@@ -200,17 +215,7 @@ def transfer_budget(nren_dict):
 
 
 def transfer_institutions_urls(nren_dict):
-    def _parse_json(value):
-        if value and not value.startswith('['):
-            value = f'[{value}]'
-
-        try:
-            return [url.strip() for url in json.loads(value) if url.strip()]
-        except json.decoder.JSONDecodeError:
-            logger.info(f'JSON decode error for institution urls for {nren_name}.')
-            return []
-
-    rows = query_institutions_urls()
+    rows = recursive_query(16507)
 
     for row in rows:
         answer_id, nren_name, year, answer = row
@@ -219,7 +224,7 @@ def transfer_institutions_urls(nren_dict):
             continue
 
         urls = extract_urls(text=answer)
-        urls_json = _parse_json(answer)
+        urls_json = _parse_json_urls(answer, nren_name)
         if urls != urls_json:
             logger.info(f'Institution URLs for {nren_name} do not match between json and regex. {urls} != {urls_json}')
 
@@ -556,7 +561,613 @@ def transfer_policies(nren_dict):
     db.session.commit()
 
 
-def _cli(config, app):
+def transfer_central_procurement(nren_dict):
+    rows = recursive_query(16482)
+    amounts = recursive_query(16483)
+    amounts = {(nren_name, year): Decimal(answer.strip('"')) for answer_id, nren_name, year, answer in amounts}
+
+    for answer_id, nren_name, year, answer in rows:
+        if nren_name not in nren_dict:
+            logger.warning(f'{nren_name} unknown. Skipping.')
+            continue
+
+        new_entry = presentation_models.CentralProcurement(
+            nren=nren_dict[nren_name],
+            nren_id=nren_dict[nren_name].id,
+            year=year,
+            central_procurement=answer == '"Yes"',
+            amount=amounts.get((nren_name, year))
+        )
+        db.session.merge(new_entry)
+    db.session.commit()
+
+
+def transfer_service_management(nren_dict):
+    framework = recursive_query(16484)
+    framework = {(nren_name, year): answer == '"Yes"' for answer_id, nren_name, year, answer in framework}
+    targets = recursive_query(16485)
+    targets = {(nren_name, year): answer == '"Yes"' for answer_id, nren_name, year, answer in targets}
+
+    for nren_name, year in framework.keys() | targets.keys():
+        if nren_name not in nren_dict:
+            logger.warning(f'{nren_name} unknown. Skipping.')
+            continue
+
+        new_entry = presentation_models.ServiceManagement(
+            nren=nren_dict[nren_name],
+            nren_id=nren_dict[nren_name].id,
+            year=year,
+            service_management_framework=framework.get((nren_name, year)),
+            service_level_targets=targets.get((nren_name, year))
+        )
+        db.session.merge(new_entry)
+    db.session.commit()
+
+
+def transfer_service_user_types(nren_dict):
+    categories = [
+        (ServiceCategory.identity, 16488),
+        (ServiceCategory.network_services, 16489),
+        (ServiceCategory.collaboration, 16490),
+        (ServiceCategory.security, 16491),
+        (ServiceCategory.isp_support, 16492),
+        (ServiceCategory.storage_and_hosting, 16493),
+        (ServiceCategory.multimedia, 16494),
+        (ServiceCategory.professional_services, 16495)
+    ]
+    for service_category, question_id in categories:
+        rows = recursive_query(question_id)
+        for answer_id, nren_name, year, answer in rows:
+            if nren_name not in nren_dict:
+                logger.warning(f'{nren_name} unknown. Skipping.')
+                continue
+            for user_cat_db in json.loads(answer):
+                user_cat = UserCategory[SERVICE_USER_TYPE_TO_CODE[user_cat_db]]
+                new_entry = presentation_models.ServiceUserTypes(
+                    nren=nren_dict[nren_name],
+                    nren_id=nren_dict[nren_name].id,
+                    year=year,
+                    user_category=user_cat,
+                    service_category=service_category
+                )
+                db.session.merge(new_entry)
+    db.session.commit()
+
+
+def transfer_standards(nren_dict):
+    audits = recursive_query(16499)
+    audits = {(nren_name, year): answer == '"Yes"' for answer_id, nren_name, year, answer in audits}
+    audit_specifics = recursive_query(16500)
+    audit_specifics = {(nren_name, year): answer.strip('"') for answer_id, nren_name, year, answer in audit_specifics}
+    bcp = recursive_query(16501)
+    bcp = {(nren_name, year): answer == '"Yes"' for answer_id, nren_name, year, answer in bcp}
+    bcp_specifics = recursive_query(16502)
+    bcp_specifics = {(nren_name, year): answer.strip('"') for answer_id, nren_name, year, answer in bcp_specifics}
+    cmp = recursive_query(16762)
+    cmp = {(nren_name, year): answer == '"Yes"' for answer_id, nren_name, year, answer in cmp}
+
+    for nren_name, year in audits.keys() | audit_specifics.keys() | bcp.keys() | bcp_specifics.keys() | cmp.keys():
+        if nren_name not in nren_dict:
+            logger.warning(f'{nren_name} unknown. Skipping.')
+            continue
+
+        new_entry = presentation_models.Standards(
+            nren=nren_dict[nren_name],
+            nren_id=nren_dict[nren_name].id,
+            year=year,
+            audits=audits.get((nren_name, year)),
+            audit_specifics=audit_specifics.get((nren_name, year), ""),
+            business_continuity_plans=bcp.get((nren_name, year)),
+            business_continuity_plans_specifics=bcp_specifics.get((nren_name, year), ""),
+            crisis_management_procedure=cmp.get((nren_name, year))
+        )
+        db.session.merge(new_entry)
+    db.session.commit()
+
+
+def transfer_crisis_exercises(nren_dict):
+    rows = recursive_query(16763)
+
+    for answer_id, nren_name, year, answer in rows:
+        if nren_name not in nren_dict:
+            logger.warning(f'{nren_name} unknown. Skipping.')
+            continue
+
+        new_entry = presentation_models.CrisisExercises(
+            nren=nren_dict[nren_name],
+            nren_id=nren_dict[nren_name].id,
+            year=year,
+            exercise_descriptions=json.loads(answer)
+        )
+        db.session.merge(new_entry)
+    db.session.commit()
+
+
+def transfer_security_controls(nren_dict):
+    sc = recursive_query(16503)
+    sc = {(nren_name, year): json.loads(answer) for answer_id, nren_name, year, answer in sc}
+    sc_other = recursive_query(16504)
+    sc_other = {(nren_name, year): json.loads(answer) for answer_id, nren_name, year, answer in sc_other}
+    for key, value in sc_other.items():
+        if not isinstance(value, list):
+            sc_other[key] = [value]
+
+    for nren_name, year in sc.keys() | sc_other.keys():
+        if nren_name not in nren_dict:
+            logger.warning(f'{nren_name} unknown. Skipping.')
+            continue
+
+        full_list = sc.get((nren_name, year), [])
+        other_entries = [e for e in sc_other.get((nren_name, year), []) if e and e.lower() not in ["n/a", "-"]]
+        other_entry = ", ".join(other_entries)
+        if other_entry:
+            full_list.append(other_entry)
+            if "Other" in full_list:
+                full_list.remove("Other")
+        if full_list:
+            new_entry = presentation_models.SecurityControls(
+                nren=nren_dict[nren_name],
+                nren_id=nren_dict[nren_name].id,
+                year=year,
+                security_control_descriptions=full_list
+            )
+            db.session.merge(new_entry)
+    db.session.commit()
+
+
+def transfer_eosc_listings(nren_dict):
+    rows = recursive_query(16497)
+
+    for answer_id, nren_name, year, answer in rows:
+        if nren_name not in nren_dict:
+            logger.warning(f'{nren_name} unknown. Skipping.')
+            continue
+
+        new_entry = presentation_models.EOSCListings(
+            nren=nren_dict[nren_name],
+            nren_id=nren_dict[nren_name].id,
+            year=year,
+            service_names=[x for x in json.loads(answer) if x]
+        )
+        db.session.merge(new_entry)
+    db.session.commit()
+
+
+def transfer_commercial_connectivity(nren_dict):
+
+    simple_connection = {
+        key.replace(" ", "").replace("-", "").replace("/", "").lower(): value for key, value in CONNECTION.items()
+    }
+
+    def get_coverage(db_string):
+        cleaned_str = db_string.strip('"').replace(" ", "").replace("-", "").replace("/", "").lower()
+        key = simple_connection[cleaned_str]
+        return CommercialConnectivityCoverage[key]
+
+    sp = recursive_query(16646)
+    sp = {(nren_name, year): get_coverage(answer) for answer_id, nren_name, year, answer in sp}
+    collab = recursive_query(16647)
+    collab = {(nren_name, year): get_coverage(answer) for answer_id, nren_name, year, answer in collab}
+    r_e = recursive_query(16648)
+    r_e = {(nren_name, year): get_coverage(answer) for answer_id, nren_name, year, answer in r_e}
+    general = recursive_query(16649)
+    general = {(nren_name, year): get_coverage(answer) for answer_id, nren_name, year, answer in general}
+    spin_off = recursive_query(16650)
+    spin_off = {(nren_name, year): get_coverage(answer) for answer_id, nren_name, year, answer in spin_off}
+
+    for nren_name, year in sp.keys() | collab.keys() | r_e.keys() | general.keys() | spin_off.keys():
+        if nren_name not in nren_dict:
+            logger.warning(f'{nren_name} unknown. Skipping.')
+            continue
+
+        new_entry = presentation_models.CommercialConnectivity(
+            nren=nren_dict[nren_name],
+            nren_id=nren_dict[nren_name].id,
+            year=year,
+            commercial_r_and_e=r_e.get((nren_name, year)),
+            commercial_general=general.get((nren_name, year)),
+            commercial_collaboration=collab.get((nren_name, year)),
+            commercial_service_provider=sp.get((nren_name, year)),
+            university_spin_off=spin_off.get((nren_name, year))
+        )
+        db.session.merge(new_entry)
+    db.session.commit()
+
+
+def transfer_commercial_charging_level(nren_dict):
+    simple_charging = {
+        key.replace(" ", "").replace("-", "").replace("/", "").lower(): value for key, value in CHARGING_LEVELS.items()
+    }
+    simple_charging["nochargesapplied"] = "no_charges_if_r_e_requested"
+    simple_charging['nochargesappliedifrequestedbyr+eusers\\"needed?'] = "no_charges_if_r_e_requested"
+
+    def get_charging(db_string):
+        if db_string[0] == '[':
+            db_string = json.loads(db_string)[0]
+        cleaned_str = db_string.strip('"').replace(" ", "").replace("-", "").replace("/", "").lower()
+        key = simple_charging[cleaned_str]
+        return CommercialCharges[key]
+
+    collab = recursive_query(16652)
+    collab = {(nren_name, year): get_charging(answer) for answer_id, nren_name, year, answer in collab}
+    services = recursive_query(16653)
+    services = {(nren_name, year): get_charging(answer) for answer_id, nren_name, year, answer in services}
+    peering = recursive_query(16654)
+    peering = {(nren_name, year): get_charging(answer) for answer_id, nren_name, year, answer in peering}
+
+    for nren_name, year in collab.keys() | services.keys() | peering.keys():
+        if nren_name not in nren_dict:
+            logger.warning(f'{nren_name} unknown. Skipping.')
+            continue
+
+        new_entry = presentation_models.CommercialChargingLevel(
+            nren=nren_dict[nren_name],
+            nren_id=nren_dict[nren_name].id,
+            year=year,
+            collaboration=collab.get((nren_name, year)),
+            service_supplier=services.get((nren_name, year)),
+            direct_peering=peering.get((nren_name, year))
+        )
+        db.session.merge(new_entry)
+    db.session.commit()
+
+
+def transfer_fibre_light(nren_dict):
+    fibre = recursive_query(16668)
+    fibre = {(nren_name, year): answer.strip('"') for answer_id, nren_name, year, answer in fibre}
+    fibre_comment = recursive_query(16669)
+    fibre_comment = {(nren_name, year): answer.strip('"') for answer_id, nren_name, year, answer in fibre_comment}
+
+    for nren_name, year in fibre.keys() | fibre_comment.keys():
+        if nren_name not in nren_dict:
+            logger.warning(f'{nren_name} unknown. Skipping.')
+            continue
+
+        description = fibre.get((nren_name, year))
+        comment = fibre_comment.get((nren_name, year))
+        if description and description[0:5] != "Other":
+            if comment and comment.replace("-", "") != "":
+                logger.warning(
+                    f'fibre light comment while description is not "Other": {description} {comment} {nren_name}.'
+                )
+        else:
+            description = comment
+
+        if description:
+            new_entry = presentation_models.FibreLight(
+                nren=nren_dict[nren_name],
+                nren_id=nren_dict[nren_name].id,
+                year=year,
+                light_description=description
+            )
+            db.session.merge(new_entry)
+    db.session.commit()
+
+
+def transfer_network_map_urls(nren_dict):
+    rows = recursive_query(16670)
+
+    for answer_id, nren_name, year, answer in rows:
+        if nren_name not in nren_dict:
+            logger.warning(f'{nren_name} unknown. Skipping.')
+            continue
+
+        urls = extract_urls(text=answer)
+        urls_json = _parse_json_urls(answer, nren_name)
+        if urls != urls_json:
+            logger.info(f'Institution URLs for {nren_name} do not match between json and regex. {urls} != {urls_json}')
+
+        if not urls:
+            logger.info(f'{nren_name} has no urls for {year}. Skipping.')
+            continue
+
+        new_entry = presentation_models.NetworkMapUrls(
+            nren=nren_dict[nren_name],
+            nren_id=nren_dict[nren_name].id,
+            year=year,
+            urls=urls
+        )
+        db.session.merge(new_entry)
+    db.session.commit()
+
+
+def transfer_traffic_statistics(nren_dict):
+    stats = recursive_query(16677)
+    stat_urls = recursive_query(16678)
+    stat_urls = {(nren_name, year): answer for answer_id, nren_name, year, answer in stat_urls}
+
+    for answer_id, nren_name, year, answer in stats:
+        if nren_name not in nren_dict:
+            logger.warning(f'{nren_name} unknown. Skipping.')
+            continue
+
+        db_urls = stat_urls.get((nren_name, year))
+        if db_urls:
+            urls = extract_urls(text=db_urls)
+            urls_json = _parse_json_urls(db_urls, nren_name)
+            if urls != urls_json:
+                logger.info(
+                    f'Traffic stat URLs for {nren_name} do not match between json and regex. {urls} != {urls_json}'
+                )
+            db_urls = urls
+        else:
+            db_urls = []
+
+        new_entry = presentation_models.TrafficStatistics(
+            nren=nren_dict[nren_name],
+            nren_id=nren_dict[nren_name].id,
+            year=year,
+            traffic_statistics=answer == '"Yes"',
+            urls=db_urls
+        )
+        db.session.merge(new_entry)
+    db.session.commit()
+
+
+def transfer_siem_vendors(nren_dict):
+    vendors = recursive_query(16679)
+    vendors = {(nren_name, year): json.loads(answer) for answer_id, nren_name, year, answer in vendors}
+    vendor_comment = recursive_query(16680)
+    vendor_comment = {(nren_name, year): answer.strip('"') for answer_id, nren_name, year, answer in vendor_comment}
+
+    for nren_name, year in vendors.keys() | vendor_comment.keys():
+        if nren_name not in nren_dict:
+            logger.warning(f'{nren_name} unknown. Skipping.')
+            continue
+
+        vendor_names = vendors.get((nren_name, year), [])
+        comment = vendor_comment.get((nren_name, year))
+        if comment:
+            vendor_names.append(comment)
+            vendor_names.remove("Other")
+
+        new_entry = presentation_models.SiemVendors(
+            nren=nren_dict[nren_name],
+            nren_id=nren_dict[nren_name].id,
+            year=year,
+            vendor_names=vendor_names
+        )
+        db.session.merge(new_entry)
+    db.session.commit()
+
+
+def transfer_certificate_providers(nren_dict):
+    providers = recursive_query(16681)
+    providers = {(nren_name, year): json.loads(answer) for answer_id, nren_name, year, answer in providers}
+    prov_comment = recursive_query(16682)
+    prov_comment = {(nren_name, year): answer.strip('"') for answer_id, nren_name, year, answer in prov_comment}
+
+    for nren_name, year in providers.keys() | prov_comment.keys():
+        if nren_name not in nren_dict:
+            logger.warning(f'{nren_name} unknown. Skipping.')
+            continue
+
+        provider_names = providers.get((nren_name, year), [])
+        comment = prov_comment.get((nren_name, year))
+        if comment:
+            provider_names.append(comment)
+            if "Other" in provider_names:
+                provider_names.remove("Other")
+
+        new_entry = presentation_models.CertificateProviders(
+            nren=nren_dict[nren_name],
+            nren_id=nren_dict[nren_name].id,
+            year=year,
+            provider_names=provider_names
+        )
+        db.session.merge(new_entry)
+    db.session.commit()
+
+
+def transfer_weather_map(nren_dict):
+    weather = recursive_query(16683)
+    urls = recursive_query(16684)
+    urls = {(nren_name, year): answer.strip('" ') for answer_id, nren_name, year, answer in urls}
+
+    for answer_id, nren_name, year, answer in weather:
+        if nren_name not in nren_dict:
+            logger.warning(f'{nren_name} unknown. Skipping.')
+            continue
+
+        url = urls.get((nren_name, year), "")
+        if url:
+            found_urls = extract_urls(text=url)
+            if found_urls:
+                url = found_urls[0]
+            else:
+                url = ""
+
+        orig_url = urls.get((nren_name, year), "").strip("/")
+        if url != orig_url:
+            logger.info(f'Weather URL for {nren_name} do not match between json and regex. {url} != {orig_url}')
+
+        new_entry = presentation_models.WeatherMap(
+            nren=nren_dict[nren_name],
+            nren_id=nren_dict[nren_name].id,
+            year=year,
+            weather_map=answer == '"Yes"',
+            url=url
+        )
+        db.session.merge(new_entry)
+    db.session.commit()
+
+
+def transfer_pert_team(nren_dict):
+    rows = recursive_query(16685)
+
+    for answer_id, nren_name, year, answer in rows:
+        if nren_name not in nren_dict:
+            logger.warning(f'{nren_name} unknown. Skipping.')
+            continue
+
+        if answer == "null":
+            continue
+        pert = YesNoPlanned[answer.strip('"').lower()]
+        new_entry = presentation_models.PertTeam(
+            nren=nren_dict[nren_name],
+            nren_id=nren_dict[nren_name].id,
+            year=year,
+            pert_team=pert
+        )
+        db.session.merge(new_entry)
+    db.session.commit()
+
+
+def transfer_alien_wave(nren_dict):
+    alien = recursive_query(16687)
+    alien = {
+        (nren_name, year): YesNoPlanned[answer.strip('"').lower()] for answer_id, nren_name, year, answer in alien
+    }
+    nr = recursive_query(16688)
+    nr = {(nren_name, year): int(answer.strip('"')) for answer_id, nren_name, year, answer in nr}
+    internal = recursive_query(16689)
+    internal = {(nren_name, year): answer == '"Yes"' for answer_id, nren_name, year, answer in internal}
+
+    for nren_name, year in alien.keys() | nr.keys() | internal.keys():
+        if nren_name not in nren_dict:
+            logger.warning(f'{nren_name} unknown. Skipping.')
+            continue
+
+        new_entry = presentation_models.AlienWave(
+            nren=nren_dict[nren_name],
+            nren_id=nren_dict[nren_name].id,
+            year=year,
+            alien_wave_third_pary=alien.get((nren_name, year)),
+            nr_of_alien_wave_third_party_services=nr.get((nren_name, year)),
+            alien_wave_internal=internal.get((nren_name, year))
+        )
+        db.session.merge(new_entry)
+    db.session.commit()
+
+
+def transfer_external_connections(nren_dict):
+    question_nrs = {
+        16694: (5, "capacity"),
+        16695: (7, "capacity"),
+        16696: (6, "capacity"),
+        16697: (7, "from_organization"),
+        16698: (1, "to_organization"),
+        16699: (8, "to_organization"),
+        16700: (9, "to_organization"),
+        16701: (1, "from_organization"),
+        16702: (8, "capacity"),
+        16703: (5, "to_organization"),
+        16704: (0, "link_name"),
+        16705: (1, "link_name"),
+        16706: (9, "capacity"),
+        16707: (2, "link_name"),
+        16708: (0, "from_organization"),
+        16709: (4, "link_name"),
+        16710: (3, "link_name"),
+        16711: (9, "link_name"),
+        16712: (7, "link_name"),
+        16713: (8, "link_name"),
+        16714: (6, "link_name"),
+        16715: (5, "link_name"),
+        16716: (4, "from_organization"),
+        16717: (5, "from_organization"),
+        16718: (6, "from_organization"),
+        16719: (2, "to_organization"),
+        16720: (3, "to_organization"),
+        16721: (4, "to_organization"),
+        16722: (6, "to_organization"),
+        16723: (7, "to_organization"),
+        16724: (2, "interconnection_method"),
+        16725: (3, "interconnection_method"),
+        16726: (4, "interconnection_method"),
+        16727: (5, "interconnection_method"),
+        16728: (8, "from_organization"),
+        16729: (9, "from_organization"),
+        16730: (0, "to_organization"),
+        16731: (0, "capacity"),
+        16732: (1, "capacity"),
+        16733: (2, "capacity"),
+        16734: (3, "capacity"),
+        16735: (4, "capacity"),
+        16736: (3, "from_organization"),
+        16737: (2, "from_organization"),
+        16738: (1, "interconnection_method"),
+        16739: (7, "interconnection_method"),
+        16740: (8, "interconnection_method"),
+        16741: (0, "interconnection_method"),
+        16742: (9, "interconnection_method"),
+        16743: (6, "interconnection_method")
+    }
+
+    def empty_connection_dict():
+        return {'link_name': '', 'capacity': None, 'from_organization': '',
+                'to_organization': '', 'interconnection_method': None}
+
+    connection_dicts = {}
+    nren_year_set = set()
+    for question_id, (connection_nr, field) in question_nrs.items():
+        rows = recursive_query(question_id)
+        for answer_id, nren_name, year, answer in rows:
+            nren_year_set.add((nren_name, year))
+            conn_dict = connection_dicts.setdefault((nren_name, year, connection_nr), empty_connection_dict())
+            conn_dict[field] = answer.strip('" ')
+
+    int_simple = {key.replace(" ", "").lower(): value for key, value in INTERCONNECTION.items()}
+    int_simple['openexchangepoi'] = "open_exchange"
+
+    for conn_dict in connection_dicts.values():
+        if conn_dict['capacity']:
+            try:
+                conn_dict['capacity'] = str(Decimal(conn_dict['capacity'].split('G')[0].strip()))
+            except:  # noqa: E722
+                logger.warning(f'Capacity could not be converted for {nren_name}: {conn_dict["capacity"]}.')
+                conn_dict['capacity'] = None
+        if conn_dict['interconnection_method']:
+            int_conn = int_simple[conn_dict['interconnection_method'].replace(" ", "").lower()]
+            conn_dict['interconnection_method'] = ConnectionMethod[int_conn].value
+
+    for nren_name, year in nren_year_set:
+        if nren_name not in nren_dict:
+            logger.warning(f'{nren_name} unknown. Skipping.')
+            continue
+
+        connections = []
+        for connection_nr in range(0, 10):
+            conn = connection_dicts.get((nren_name, year, connection_nr))
+            if conn:
+                connections.append(conn)
+
+        new_entry = presentation_models.ExternalConnections(
+            nren=nren_dict[nren_name],
+            nren_id=nren_dict[nren_name].id,
+            year=year,
+            connections=connections
+        )
+        db.session.merge(new_entry)
+    db.session.commit()
+
+
+def transfer_network_automation(nren_dict):
+    rows = recursive_query(16757)
+    tasks = recursive_query(16758)
+    tasks = {(nren_name, year): json.loads(answer) for answer_id, nren_name, year, answer in tasks}
+
+    for answer_id, nren_name, year, answer in rows:
+        if nren_name not in nren_dict:
+            logger.warning(f'{nren_name} unknown. Skipping.')
+            continue
+
+        network_automation = YesNoPlanned[answer.strip('"').lower()]
+        specifics = tasks.get((nren_name, year), [])
+        specifics = [s for s in specifics if s]
+
+        new_entry = presentation_models.NetworkAutomation(
+            nren=nren_dict[nren_name],
+            nren_id=nren_dict[nren_name].id,
+            year=year,
+            network_automation=network_automation,
+            network_automation_specifics=specifics
+        )
+        db.session.merge(new_entry)
+    db.session.commit()
+
+
+def _cli(app):
     with app.app_context():
         nren_dict = helpers.get_uppercase_nren_dict()
         transfer_budget(nren_dict)
@@ -569,6 +1180,29 @@ def _cli(config, app):
         transfer_policies(nren_dict)
         transfer_institutions_urls(nren_dict)
 
+        transfer_central_procurement(nren_dict)
+        transfer_service_management(nren_dict)
+        transfer_service_user_types(nren_dict)
+        transfer_standards(nren_dict)
+        transfer_crisis_exercises(nren_dict)
+        transfer_security_controls(nren_dict)
+        transfer_eosc_listings(nren_dict)
+
+        transfer_commercial_connectivity(nren_dict)
+        transfer_commercial_charging_level(nren_dict)
+
+        transfer_fibre_light(nren_dict)
+        transfer_network_map_urls(nren_dict)
+        transfer_traffic_statistics(nren_dict)
+        transfer_siem_vendors(nren_dict)
+        transfer_certificate_providers(nren_dict)
+        transfer_weather_map(nren_dict)
+        transfer_pert_team(nren_dict)
+        transfer_alien_wave(nren_dict)
+        transfer_external_connections(nren_dict)
+        # traffic ratio was freeform text
+        transfer_network_automation(nren_dict)
+
 
 @click.command()
 @click.option('--config', type=click.STRING, default='config.json')
@@ -578,7 +1212,7 @@ def cli(config):
     app_config['SQLALCHEMY_BINDS'] = {survey_model.SURVEY_DB_BIND: app_config['SURVEY_DATABASE_URI']}
 
     app = compendium_v2._create_app_with_db(app_config)
-    _cli(app_config, app)
+    _cli(app)
 
 
 if __name__ == "__main__":
diff --git a/test/test_survey_publisher.py b/test/test_survey_publisher.py
index 2889eebbc26d928de2af57e3df5d346fd1aee742..140356e22e7db89e5e68a3dc0e3f7b9d6d24999a 100644
--- a/test/test_survey_publisher.py
+++ b/test/test_survey_publisher.py
@@ -128,7 +128,7 @@ def test_v2_publisher_full(app):
         assert standards.business_continuity_plans_specifics == "no"
         assert not standards.crisis_management_procedure
 
-        crisis_excercises = db.session.scalar(select(presentation_models.CrisisExcercises))
+        crisis_excercises = db.session.scalar(select(presentation_models.CrisisExercises))
         assert crisis_excercises.exercise_descriptions == [
             "geant_workshops", "national_excercises", "tabletop_exercises", "other_excercises",
             "none", "simulation_excercises", "real_crisis"
diff --git a/test/test_survey_publisher_old_db_2022.py b/test/test_survey_publisher_old_db_2022.py
index 1d0fbcd1ded07fd52578fc4fc4806293a9b6ee3a..7b2983f906d82a327a5647e65e6acddf265c766c 100644
--- a/test/test_survey_publisher_old_db_2022.py
+++ b/test/test_survey_publisher_old_db_2022.py
@@ -110,7 +110,7 @@ org_dataKTU,"NOC, administrative authority"
         ]
 
 
-def test_publisher(app_with_survey_db, mocker, dummy_config):
+def test_publisher(app_with_survey_db, mocker):
     global org_data
 
     def get_rows_as_tuples(*args, **kwargs):
@@ -194,18 +194,20 @@ def test_publisher(app_with_survey_db, mocker, dummy_config):
                 ('nren3', 'n.a. online'),
             ]
 
-    def institutions_urls_data():
-        return [
-            (87483, 'ANA', 2013, "http://www.rash.al/index.php/network/points-of-presence-pop"),
-            (163286, 'ANA', 2014, "http://www.rash.al/index.php/network/points-of-presence-pop"),
-        ]
+    def institutions_urls_data(question_id):
+        if question_id == 16507:
+            return [
+                (87483, 'ANA', 2013, "http://www.rash.al/index.php/network/points-of-presence-pop"),
+                (163286, 'ANA', 2014, "http://www.rash.al/index.php/network/points-of-presence-pop"),
+            ]
+        else:
+            return []
 
     mocker.patch('compendium_v2.publishers.survey_publisher_old_db_2022.query_budget', get_rows_as_tuples)
     mocker.patch('compendium_v2.publishers.survey_publisher_old_db_2022.query_funding_sources', funding_source_data)
     mocker.patch('compendium_v2.publishers.survey_publisher_old_db_2022.query_question', question_data)
     mocker.patch('compendium_v2.publishers.survey_publisher_old_db_2022.query_question_id', question_id_data)
-    mocker.patch('compendium_v2.publishers.survey_publisher_old_db_2022.query_institutions_urls',
-                 institutions_urls_data)
+    mocker.patch('compendium_v2.publishers.survey_publisher_old_db_2022.recursive_query', institutions_urls_data)
 
     nren_names = ['Nren1', 'Nren2', 'Nren3', 'Nren4', 'SURF', 'KIFU', 'University of Malta', 'ASNET-AM',
                   'SIKT', 'LAT', 'RASH', 'ANAS', 'GRNET', 'CSC']
@@ -213,7 +215,7 @@ def test_publisher(app_with_survey_db, mocker, dummy_config):
         db.session.add_all([presentation_models.NREN(name=nren_name, country='country') for nren_name in nren_names])
         db.session.commit()
 
-    _cli(dummy_config, app_with_survey_db)
+    _cli(app_with_survey_db)
 
     with app_with_survey_db.app_context():
         budgets = db.session.scalars(