diff --git a/gso/__init__.py b/gso/__init__.py
index beabac68199a877aecf491bab7d6325a34097714..bd869b6657e6253a2888ac6a60f7e27b52276a18 100644
--- a/gso/__init__.py
+++ b/gso/__init__.py
@@ -65,12 +65,12 @@ def init_gso_app() -> OrchestratorCore:
def init_cli_app() -> typer.Typer:
"""Initialise GSO as a CLI application."""
- from gso.cli import imports, netbox, prechecks, schedule # noqa: PLC0415
+ from gso.cli import imports, lso_calls, netbox, schedule # noqa: PLC0415
cli_app.add_typer(imports.app, name="import-cli")
cli_app.add_typer(netbox.app, name="netbox-cli")
cli_app.add_typer(schedule.app, name="schedule-cli")
- cli_app.add_typer(prechecks.app, name="precheck-cli")
+ cli_app.add_typer(lso_calls.app, name="lso-cli")
return cli_app()
diff --git a/gso/cli/lso_calls.py b/gso/cli/lso_calls.py
new file mode 100644
index 0000000000000000000000000000000000000000..d4354f66552278cb2edda5767a063eb6876076eb
--- /dev/null
+++ b/gso/cli/lso_calls.py
@@ -0,0 +1,120 @@
+#!/usr/bin/env python3
+"""CLI for GSO pre-check using LSO remote exec endpoint."""
+
+import json
+import logging
+
+import click
+import httpx
+import structlog
+import typer
+from orchestrator.db import db
+from orchestrator.db.database import transactional
+from pydantic import ValidationError
+
+from gso import settings
+from gso.db.models import BgpStatusPreCheckTable
+from gso.services.partners import filter_partners_by_name
+from gso.utils.types.precheck import ExecutableRunResponse
+
+logger = structlog.get_logger(__name__)
+app = typer.Typer()
+
+
+def _validate_partner(partner: str) -> None:
+ if not filter_partners_by_name(name=partner, case_sensitive=True):
+ typer.echo(f"Error: partner '{partner}' not found in database.", err=True)
+ raise typer.Exit(1)
+
+
+def _call_lso(host: str, partner: str) -> ExecutableRunResponse:
+ oss = settings.load_oss_params()
+ proxy = oss.PROVISIONING_PROXY
+ url = f"{proxy.scheme}://{proxy.api_base}/api/execute/"
+ payload = {
+ "executable_name": "bgp_status_pre_check.py",
+ "args": [host, partner],
+ "is_async": False,
+ }
+
+ try:
+ resp = httpx.post(url, json=payload, timeout=30)
+ resp.raise_for_status()
+ except Exception as e:
+ logger.exception("LSO call failed")
+ typer.echo(f"Error: failed to call LSO: {e}", err=True)
+ raise typer.Exit(1) from e
+
+ try:
+ return ExecutableRunResponse(**resp.json())
+ except ValidationError as e:
+ logger.exception("Invalid response from LSO")
+ typer.echo("Error: invalid JSON returned by LSO:", err=True)
+ typer.echo(str(e), err=True)
+ raise typer.Exit(1) from e
+
+
+def _print_full(exec_resp: ExecutableRunResponse) -> None:
+ full_json = exec_resp.model_dump(mode="json")
+ typer.echo(typer.style("\nFull LSO response:", fg=typer.colors.GREEN))
+ typer.echo(json.dumps(full_json, indent=2))
+
+
+def _print_parsed_output(exec_resp: ExecutableRunResponse) -> None:
+ output_str = exec_resp.result.output if exec_resp.result else ""
+ typer.echo(typer.style("\nParsed `result.output` as JSON:", fg=typer.colors.CYAN))
+
+ try:
+ parsed = json.loads(output_str)
+ rendered = json.dumps(parsed, indent=2)
+ max_lines = settings.load_oss_params().GENERAL.pre_check_cli_max_output_lines
+ if rendered.count("\n") > max_lines:
+ click.echo_via_pager(rendered)
+ else:
+ typer.echo(rendered)
+ except json.JSONDecodeError:
+ typer.echo("(not valid JSON, raw string below)")
+ typer.echo(output_str)
+
+
+def _maybe_save(host: str, partner: str, exec_resp: ExecutableRunResponse) -> None:
+ prompt = (
+ f"\nIf you are happy with the above output for router '{host}' "
+ f"(partner: {partner}), shall we save it to the database?"
+ )
+ if not typer.confirm(prompt, default=False):
+ typer.echo("Alright, not saving. You can re-run when ready.")
+ return
+
+ try:
+ with db.database_scope(), transactional(db, logger):
+ record = BgpStatusPreCheckTable(
+ router_fqdn=host,
+ partner=partner,
+ result=exec_resp.result.model_dump(mode="json") if exec_resp.result else {},
+ )
+ db.session.add(record)
+ except Exception as e:
+ logger.exception("Failed to save pre-check record")
+ typer.echo("Error: could not save pre-check to database.", err=True)
+ raise typer.Exit(2) from e
+
+ typer.echo("Pre-check result saved.")
+
+
+@app.command()
+def bgp_status_precheck(
+ host: str = typer.Argument(..., help="FQDN of the router to pre-check"),
+ partner: str = typer.Argument(..., help="Partner name for import file path"),
+) -> None:
+ """Trigger the bgp_status_pre-check script on LSO, print results, and optionally save."""
+ _validate_partner(partner)
+ exec_resp = _call_lso(host, partner)
+ _print_full(exec_resp)
+ _print_parsed_output(exec_resp)
+ _maybe_save(host, partner, exec_resp)
+
+
+if __name__ == "__main__":
+ logging.basicConfig(level=logging.INFO)
+ app()
diff --git a/gso/cli/prechecks.py b/gso/cli/prechecks.py
deleted file mode 100644
index 55f5df698fde5462159ccedca9f8a41152d8ea48..0000000000000000000000000000000000000000
--- a/gso/cli/prechecks.py
+++ /dev/null
@@ -1,107 +0,0 @@
-#!/usr/bin/env python3
-"""CLI for GSO pre-check using LSO remote exec endpoint."""
-
-import json
-import logging
-
-import click
-import httpx
-import structlog
-import typer
-from orchestrator.db import db
-from orchestrator.db.database import transactional
-from pydantic import ValidationError
-
-from gso import settings
-from gso.db.models import BgpStatusPreCheckTable
-from gso.utils.types.precheck import ExecutableRunResponse
-
-logger = structlog.get_logger(__name__)
-app = typer.Typer()
-
-MAX_OUTPUT_LINES = 50 # Max lines to display before paging
-
-
-@app.command()
-def bgp_status(
- host: str = typer.Argument(..., help="FQDN of the router to pre-check"),
- nren: str = typer.Argument(..., help="NREN name for import file path"),
-) -> None:
- """Trigger the bgp_status_pre-check script on LSO, wait for it to finish.
-
- pretty-print the JSON result inline,
- parse the `output` field as JSON-string and page it if large,
- and optionally save to the database.
- """
- oss = settings.load_oss_params()
- p = oss.PROVISIONING_PROXY
- payload = {
- "executable_name": "bgp_status_pre_check.py",
- "args": [host, nren],
- "is_async": False,
- }
- url = f"{p.scheme}://{p.api_base}/api/execute/"
-
- # 1) Call LSO
- try:
- resp = httpx.post(url, json=payload, timeout=30)
- resp.raise_for_status()
- except Exception as e:
- logger.exception("LSO call failed: %s")
- typer.echo(f"Error: failed to call LSO: {e}", err=True)
- raise typer.Exit(1) from e
-
- # 2) Validate response
- try:
- runner = ExecutableRunResponse(**resp.json())
- except ValidationError as e:
- logger.exception("Invalid response from LSO")
- typer.echo("Error: invalid JSON returned by LSO:", err=True)
- typer.echo(str(e), err=True)
- raise typer.Exit(1) from e
-
- # 3) Print full response inline
- full = runner.model_dump(mode="json")
- typer.echo(typer.style("\nFull LSO response:", fg=typer.colors.GREEN))
- typer.echo(json.dumps(full, indent=2))
-
- # 4) Parse and pretty-print the `output` field, with pagination if large
- output_str = runner.result.output if runner.result else ""
- typer.echo(typer.style("\nParsed `result.output` as JSON:", fg=typer.colors.CYAN))
- try:
- parsed = json.loads(output_str)
- parsed_text = json.dumps(parsed, indent=2)
- if parsed_text.count("\n") > MAX_OUTPUT_LINES:
- click.echo_via_pager(parsed_text)
- else:
- typer.echo(parsed_text)
- except json.JSONDecodeError:
- typer.echo("(not valid JSON, raw string below)")
- typer.echo(output_str)
-
- # 5) Save?
- confirm_msg = (
- f"\nIf you are happy with the above output for router '{host}' (NREN: {nren}), "
- "shall we save it to the database?"
- )
- if typer.confirm(confirm_msg, default=False):
- try:
- with db.database_scope(), transactional(db, logger):
- record = BgpStatusPreCheckTable(
- router_fqdn=host,
- nren=nren,
- result=runner.result.model_dump(mode="json") if runner.result else {},
- )
- db.session.add(record)
- except Exception as err:
- logger.exception("Failed to save pre-check record")
- typer.echo("Error: could not save pre-check to database.", err=True)
- raise typer.Exit(2) from err
- typer.echo("Pre-check result saved.")
- else:
- typer.echo("Alright, not saving. You can re-run when ready.")
-
-
-if __name__ == "__main__":
- logging.basicConfig(level=logging.INFO)
- app()
diff --git a/gso/db/models.py b/gso/db/models.py
index 5ac6a46afea88e5cc3ed96b2d9c8716ce7232834..314d9dd56d1e3d3ec670d75d05c5b806cd3809f5 100644
--- a/gso/db/models.py
+++ b/gso/db/models.py
@@ -44,10 +44,10 @@ class BgpStatusPreCheckTable(BaseModel):
index=True,
comment="The FQDN of the router under check",
)
- nren = mapped_column(
+ partner = mapped_column(
String,
nullable=False,
- comment="Name of the NREN (used in import file path)",
+ comment="Name of the partner (used in import file path)",
)
result = mapped_column(
JSON,
diff --git a/gso/migrations/versions/2025-06-27_27308f1dd850_add_bgp_satatus_pre_check_table_.py b/gso/migrations/versions/2025-06-24_e4437e8b46d5_add_bgp_status_pre_check_table.py
similarity index 84%
rename from gso/migrations/versions/2025-06-27_27308f1dd850_add_bgp_satatus_pre_check_table_.py
rename to gso/migrations/versions/2025-06-24_e4437e8b46d5_add_bgp_status_pre_check_table.py
index 91503c32a88d06a788efb9a18ec293b8bb29ee19..48a89d6275cebfec4d51629fd039e925ec5f0c50 100644
--- a/gso/migrations/versions/2025-06-27_27308f1dd850_add_bgp_satatus_pre_check_table_.py
+++ b/gso/migrations/versions/2025-06-24_e4437e8b46d5_add_bgp_status_pre_check_table.py
@@ -1,8 +1,8 @@
"""Add bgp_status_pre_checks table.
-Revision ID: 27308f1dd850
-Revises: 24858fd1d805
-Create Date: 2025-06-27 10:00:00.000000
+Revision ID: e4437e8b46d5
+Revises: 7c3094cd282a
+Create Date: 2025-06-24 13:36:23.866783
"""
import sqlalchemy as sa
@@ -11,8 +11,8 @@ from orchestrator.db import UtcTimestamp
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
-revision = '27308f1dd850'
-down_revision = '24858fd1d805'
+revision = 'e4437e8b46d5'
+down_revision = '7c3094cd282a'
branch_labels = None
depends_on = None
@@ -22,7 +22,7 @@ def upgrade() -> None:
'bgp_status_pre_checks',
sa.Column('pre_check_id', sa.String(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('router_fqdn', sa.String(), nullable=False),
- sa.Column('nren', sa.String(), nullable=False),
+ sa.Column('partner', sa.String(), nullable=False),
sa.Column('result', postgresql.JSON(), nullable=False), # type: ignore[no-untyped-call]
sa.Column('created_at', UtcTimestamp(timezone=True), server_default=sa.text('current_timestamp'), nullable=False),
sa.Column('updated_at', UtcTimestamp(timezone=True), server_default=sa.text('current_timestamp'),
@@ -37,4 +37,4 @@ def downgrade() -> None:
# drop indexes, then table
op.drop_index('ix_bgp_status_pre_checks_router_fqdn', table_name='bgp_status_pre_checks')
op.drop_index('ix_bgp_status_pre_checks_router_id', table_name='bgp_status_pre_checks')
- op.drop_table('bgp_status_pre_checks')
+ op.drop_table('bgp_status_pre_checks')
\ No newline at end of file
diff --git a/gso/oss-params-example.json b/gso/oss-params-example.json
index 98e41280d1f1d47480b64702d2d587452d846d08..decd0de5331c94fb604f09da978a2d5a62708c6c 100644
--- a/gso/oss-params-example.json
+++ b/gso/oss-params-example.json
@@ -3,7 +3,8 @@
"public_hostname": "https://gap.geant.org",
"internal_hostname": "http://gso-api:9000",
"isis_high_metric": 999999,
- "environment": "development"
+ "environment": "development",
+ "pre_check_cli_max_output_lines": 50
},
"NETBOX": {
"api": "https://127.0.0.1:8000",
diff --git a/gso/settings.py b/gso/settings.py
index 8c1b0ec8e2d00c52e89c133ee7474d78289face3..7bcb5549f5c41b0ed2029b91b2c44dd24c211747 100644
--- a/gso/settings.py
+++ b/gso/settings.py
@@ -41,13 +41,16 @@ class GeneralParams(BaseSettings):
"""The hostname of GSO that is for internal use, such as the provisioning proxy."""
isis_high_metric: int
environment: EnvironmentEnum
+ """The environment in which GSO is running, such as development, test, uat, or production."""
+ pre_check_cli_max_output_lines: int = 50
+ """The maximum number of lines to print when displaying the output of a bgp_status_precheck CLI command."""
class CelerySettings(BaseSettings):
"""Parameters for Celery."""
broker_url: str = "redis://localhost:6379/0"
- result_backend: str = "rpc://localhost:6379/0"
+ result_backend: str = "redis://localhost:6379/0"
result_expires: int = 3600
class Config:
diff --git a/test/cli/test_pre_checks.py b/test/cli/test_lso_calls.py
similarity index 82%
rename from test/cli/test_pre_checks.py
rename to test/cli/test_lso_calls.py
index a3d95f65fd4a6773bd6045a9f28e469ae89dd153..51bc2d6cf6c9783e60686ff9e4e3ab2639d3cd9b 100644
--- a/test/cli/test_pre_checks.py
+++ b/test/cli/test_lso_calls.py
@@ -6,7 +6,7 @@ import pytest
from orchestrator.db import db
from typer.testing import CliRunner
-from gso.cli.prechecks import app
+from gso.cli.lso_calls import app
from gso.db.models import BgpStatusPreCheckTable
runner = CliRunner()
@@ -66,8 +66,17 @@ def mock_http_bad_shape(monkeypatch):
return dummy_resp
-def test_no_save_leaves_table_empty(mock_http_success):
+def test_invalid_partner_name(mock_http_success):
+ """Step 0: unknown partner should abort before any HTTP call."""
+ result = runner.invoke(app, ["rt1.ams.geant.org", "UNKNOWN"], input="")
+ assert result.exit_code == 1
+ assert "partner 'unknown' not found" in result.stdout.lower()
+ assert db.session.query(BgpStatusPreCheckTable).count() == 0
+
+
+def test_no_save_leaves_table_empty(mock_http_success, partner_factory):
"""If user declines save, table remains empty."""
+ partner_factory("SURF")
result = runner.invoke(app, ["rt1.example.com", "SURF"], input="n\n")
assert result.exit_code == 0
assert "not saving" in result.stdout.lower()
@@ -75,15 +84,17 @@ def test_no_save_leaves_table_empty(mock_http_success):
assert db.session.query(BgpStatusPreCheckTable).count() == 0
-def test_prompt_save_yes_persists_record(mock_http_success):
+def test_prompt_save_yes_persists_record(mock_http_success, partner_factory):
"""Typing 'y' at prompt should also persist."""
+ partner_factory("SURF")
result = runner.invoke(app, ["rt1.example.com", "SURF"], input="y\n")
assert result.exit_code == 0
assert db.session.query(BgpStatusPreCheckTable).count() == 1
-def test_http_failure_aborts(mock_http_error):
+def test_http_failure_aborts(mock_http_error, partner_factory):
"""Network/timeout errors should abort with exit code 1."""
+ partner_factory("SURF")
result = runner.invoke(app, ["rt1.example.com", "SURF"])
assert result.exit_code == 1
# Now stderr is separately captured:
@@ -93,16 +104,18 @@ def test_http_failure_aborts(mock_http_error):
assert db.session.query(BgpStatusPreCheckTable).count() == 0
-def test_invalid_shape_aborts(mock_http_bad_shape):
+def test_invalid_shape_aborts(mock_http_bad_shape, partner_factory):
"""Malformed top-level JSON shape should abort."""
+ partner_factory("SURF")
result = runner.invoke(app, ["rt1.example.com", "SURF"])
assert result.exit_code == 1
assert "invalid JSON returned by LSO" in result.stdout
assert db.session.query(BgpStatusPreCheckTable).count() == 0
-def test_parse_output_nonjson(mock_http_success):
+def test_parse_output_nonjson(mock_http_success, partner_factory):
"""If output is not valid JSON, we still complete without saving."""
+ partner_factory("SURF")
# Patch BASE_RESPONSE to use non-JSON output
bad = dict(BASE_RESPONSE)
bad["result"] = dict(bad["result"])
@@ -120,8 +133,9 @@ def test_parse_output_nonjson(mock_http_success):
_httpx.post = _orig
-def test_pagination_on_large_output(mock_http_success, monkeypatch):
+def test_pagination_on_large_output(mock_http_success, monkeypatch, partner_factory):
"""Parsed output >50 lines should trigger click.echo_via_pager."""
+ partner_factory("SURF")
# Build huge object
big = {"x": ["line"] * 100}
payload = dict(BASE_RESPONSE)