Skip to content
Snippets Groups Projects
Commit fcc65209 authored by Mohammad Torkashvand's avatar Mohammad Torkashvand
Browse files

rename files, vars and add more validation

parent b245f8f3
No related branches found
No related tags found
1 merge request!440add pre-check command to get bgp status from LSO
...@@ -65,12 +65,12 @@ def init_gso_app() -> OrchestratorCore: ...@@ -65,12 +65,12 @@ def init_gso_app() -> OrchestratorCore:
def init_cli_app() -> typer.Typer: def init_cli_app() -> typer.Typer:
"""Initialise GSO as a CLI application.""" """Initialise GSO as a CLI application."""
from gso.cli import imports, netbox, prechecks, schedule # noqa: PLC0415 from gso.cli import imports, lso_calls, netbox, schedule # noqa: PLC0415
cli_app.add_typer(imports.app, name="import-cli") cli_app.add_typer(imports.app, name="import-cli")
cli_app.add_typer(netbox.app, name="netbox-cli") cli_app.add_typer(netbox.app, name="netbox-cli")
cli_app.add_typer(schedule.app, name="schedule-cli") cli_app.add_typer(schedule.app, name="schedule-cli")
cli_app.add_typer(prechecks.app, name="precheck-cli") cli_app.add_typer(lso_calls.app, name="lso-cli")
return cli_app() return cli_app()
......
...@@ -14,92 +14,105 @@ from pydantic import ValidationError ...@@ -14,92 +14,105 @@ from pydantic import ValidationError
from gso import settings from gso import settings
from gso.db.models import BgpStatusPreCheckTable from gso.db.models import BgpStatusPreCheckTable
from gso.services.partners import filter_partners_by_name
from gso.utils.types.precheck import ExecutableRunResponse from gso.utils.types.precheck import ExecutableRunResponse
logger = structlog.get_logger(__name__) logger = structlog.get_logger(__name__)
app = typer.Typer() app = typer.Typer()
MAX_OUTPUT_LINES = 50 # Max lines to display before paging
def _validate_partner(partner: str) -> None:
if not filter_partners_by_name(name=partner, case_sensitive=True):
typer.echo(f"Error: partner '{partner}' not found in database.", err=True)
raise typer.Exit(1)
@app.command()
def bgp_status(
host: str = typer.Argument(..., help="FQDN of the router to pre-check"),
nren: str = typer.Argument(..., help="NREN name for import file path"),
) -> None:
"""Trigger the bgp_status_pre-check script on LSO, wait for it to finish.
pretty-print the JSON result inline, def _call_lso(host: str, partner: str) -> ExecutableRunResponse:
parse the `output` field as JSON-string and page it if large,
and optionally save to the database.
"""
oss = settings.load_oss_params() oss = settings.load_oss_params()
p = oss.PROVISIONING_PROXY proxy = oss.PROVISIONING_PROXY
url = f"{proxy.scheme}://{proxy.api_base}/api/execute/"
payload = { payload = {
"executable_name": "bgp_status_pre_check.py", "executable_name": "bgp_status_pre_check.py",
"args": [host, nren], "args": [host, partner],
"is_async": False, "is_async": False,
} }
url = f"{p.scheme}://{p.api_base}/api/execute/"
# 1) Call LSO
try: try:
resp = httpx.post(url, json=payload, timeout=30) resp = httpx.post(url, json=payload, timeout=30)
resp.raise_for_status() resp.raise_for_status()
except Exception as e: except Exception as e:
logger.exception("LSO call failed: %s") logger.exception("LSO call failed")
typer.echo(f"Error: failed to call LSO: {e}", err=True) typer.echo(f"Error: failed to call LSO: {e}", err=True)
raise typer.Exit(1) from e raise typer.Exit(1) from e
# 2) Validate response
try: try:
runner = ExecutableRunResponse(**resp.json()) return ExecutableRunResponse(**resp.json())
except ValidationError as e: except ValidationError as e:
logger.exception("Invalid response from LSO") logger.exception("Invalid response from LSO")
typer.echo("Error: invalid JSON returned by LSO:", err=True) typer.echo("Error: invalid JSON returned by LSO:", err=True)
typer.echo(str(e), err=True) typer.echo(str(e), err=True)
raise typer.Exit(1) from e raise typer.Exit(1) from e
# 3) Print full response inline
full = runner.model_dump(mode="json") def _print_full(exec_resp: ExecutableRunResponse) -> None:
full_json = exec_resp.model_dump(mode="json")
typer.echo(typer.style("\nFull LSO response:", fg=typer.colors.GREEN)) typer.echo(typer.style("\nFull LSO response:", fg=typer.colors.GREEN))
typer.echo(json.dumps(full, indent=2)) typer.echo(json.dumps(full_json, indent=2))
# 4) Parse and pretty-print the `output` field, with pagination if large
output_str = runner.result.output if runner.result else "" def _print_parsed_output(exec_resp: ExecutableRunResponse) -> None:
output_str = exec_resp.result.output if exec_resp.result else ""
typer.echo(typer.style("\nParsed `result.output` as JSON:", fg=typer.colors.CYAN)) typer.echo(typer.style("\nParsed `result.output` as JSON:", fg=typer.colors.CYAN))
try: try:
parsed = json.loads(output_str) parsed = json.loads(output_str)
parsed_text = json.dumps(parsed, indent=2) rendered = json.dumps(parsed, indent=2)
if parsed_text.count("\n") > MAX_OUTPUT_LINES: max_lines = settings.load_oss_params().GENERAL.pre_check_cli_max_output_lines
click.echo_via_pager(parsed_text) if rendered.count("\n") > max_lines:
click.echo_via_pager(rendered)
else: else:
typer.echo(parsed_text) typer.echo(rendered)
except json.JSONDecodeError: except json.JSONDecodeError:
typer.echo("(not valid JSON, raw string below)") typer.echo("(not valid JSON, raw string below)")
typer.echo(output_str) typer.echo(output_str)
# 5) Save?
confirm_msg = ( def _maybe_save(host: str, partner: str, exec_resp: ExecutableRunResponse) -> None:
f"\nIf you are happy with the above output for router '{host}' (NREN: {nren}), " prompt = (
"shall we save it to the database?" f"\nIf you are happy with the above output for router '{host}' "
f"(partner: {partner}), shall we save it to the database?"
) )
if typer.confirm(confirm_msg, default=False): if not typer.confirm(prompt, default=False):
try:
with db.database_scope(), transactional(db, logger):
record = BgpStatusPreCheckTable(
router_fqdn=host,
nren=nren,
result=runner.result.model_dump(mode="json") if runner.result else {},
)
db.session.add(record)
except Exception as err:
logger.exception("Failed to save pre-check record")
typer.echo("Error: could not save pre-check to database.", err=True)
raise typer.Exit(2) from err
typer.echo("Pre-check result saved.")
else:
typer.echo("Alright, not saving. You can re-run when ready.") typer.echo("Alright, not saving. You can re-run when ready.")
return
try:
with db.database_scope(), transactional(db, logger):
record = BgpStatusPreCheckTable(
router_fqdn=host,
partner=partner,
result=exec_resp.result.model_dump(mode="json") if exec_resp.result else {},
)
db.session.add(record)
except Exception as e:
logger.exception("Failed to save pre-check record")
typer.echo("Error: could not save pre-check to database.", err=True)
raise typer.Exit(2) from e
typer.echo("Pre-check result saved.")
@app.command()
def bgp_status_precheck(
host: str = typer.Argument(..., help="FQDN of the router to pre-check"),
partner: str = typer.Argument(..., help="Partner name for import file path"),
) -> None:
"""Trigger the bgp_status_pre-check script on LSO, print results, and optionally save."""
_validate_partner(partner)
exec_resp = _call_lso(host, partner)
_print_full(exec_resp)
_print_parsed_output(exec_resp)
_maybe_save(host, partner, exec_resp)
if __name__ == "__main__": if __name__ == "__main__":
......
...@@ -44,10 +44,10 @@ class BgpStatusPreCheckTable(BaseModel): ...@@ -44,10 +44,10 @@ class BgpStatusPreCheckTable(BaseModel):
index=True, index=True,
comment="The FQDN of the router under check", comment="The FQDN of the router under check",
) )
nren = mapped_column( partner = mapped_column(
String, String,
nullable=False, nullable=False,
comment="Name of the NREN (used in import file path)", comment="Name of the partner (used in import file path)",
) )
result = mapped_column( result = mapped_column(
JSON, JSON,
......
"""Add bgp_status_pre_checks table. """Add bgp_status_pre_checks table.
Revision ID: 27308f1dd850 Revision ID: e4437e8b46d5
Revises: 24858fd1d805 Revises: 7c3094cd282a
Create Date: 2025-06-27 10:00:00.000000 Create Date: 2025-06-24 13:36:23.866783
""" """
import sqlalchemy as sa import sqlalchemy as sa
...@@ -11,8 +11,8 @@ from orchestrator.db import UtcTimestamp ...@@ -11,8 +11,8 @@ from orchestrator.db import UtcTimestamp
from sqlalchemy.dialects import postgresql from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = '27308f1dd850' revision = 'e4437e8b46d5'
down_revision = '24858fd1d805' down_revision = '7c3094cd282a'
branch_labels = None branch_labels = None
depends_on = None depends_on = None
...@@ -22,7 +22,7 @@ def upgrade() -> None: ...@@ -22,7 +22,7 @@ def upgrade() -> None:
'bgp_status_pre_checks', 'bgp_status_pre_checks',
sa.Column('pre_check_id', sa.String(), server_default=sa.text('uuid_generate_v4()'), nullable=False), sa.Column('pre_check_id', sa.String(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('router_fqdn', sa.String(), nullable=False), sa.Column('router_fqdn', sa.String(), nullable=False),
sa.Column('nren', sa.String(), nullable=False), sa.Column('partner', sa.String(), nullable=False),
sa.Column('result', postgresql.JSON(), nullable=False), # type: ignore[no-untyped-call] sa.Column('result', postgresql.JSON(), nullable=False), # type: ignore[no-untyped-call]
sa.Column('created_at', UtcTimestamp(timezone=True), server_default=sa.text('current_timestamp'), nullable=False), sa.Column('created_at', UtcTimestamp(timezone=True), server_default=sa.text('current_timestamp'), nullable=False),
sa.Column('updated_at', UtcTimestamp(timezone=True), server_default=sa.text('current_timestamp'), sa.Column('updated_at', UtcTimestamp(timezone=True), server_default=sa.text('current_timestamp'),
...@@ -37,4 +37,4 @@ def downgrade() -> None: ...@@ -37,4 +37,4 @@ def downgrade() -> None:
# drop indexes, then table # drop indexes, then table
op.drop_index('ix_bgp_status_pre_checks_router_fqdn', table_name='bgp_status_pre_checks') op.drop_index('ix_bgp_status_pre_checks_router_fqdn', table_name='bgp_status_pre_checks')
op.drop_index('ix_bgp_status_pre_checks_router_id', table_name='bgp_status_pre_checks') op.drop_index('ix_bgp_status_pre_checks_router_id', table_name='bgp_status_pre_checks')
op.drop_table('bgp_status_pre_checks') op.drop_table('bgp_status_pre_checks')
\ No newline at end of file
...@@ -3,7 +3,8 @@ ...@@ -3,7 +3,8 @@
"public_hostname": "https://gap.geant.org", "public_hostname": "https://gap.geant.org",
"internal_hostname": "http://gso-api:9000", "internal_hostname": "http://gso-api:9000",
"isis_high_metric": 999999, "isis_high_metric": 999999,
"environment": "development" "environment": "development",
"pre_check_cli_max_output_lines": 50
}, },
"NETBOX": { "NETBOX": {
"api": "https://127.0.0.1:8000", "api": "https://127.0.0.1:8000",
......
...@@ -41,13 +41,16 @@ class GeneralParams(BaseSettings): ...@@ -41,13 +41,16 @@ class GeneralParams(BaseSettings):
"""The hostname of GSO that is for internal use, such as the provisioning proxy.""" """The hostname of GSO that is for internal use, such as the provisioning proxy."""
isis_high_metric: int isis_high_metric: int
environment: EnvironmentEnum environment: EnvironmentEnum
"""The environment in which GSO is running, such as development, test, uat, or production."""
pre_check_cli_max_output_lines: int = 50
"""The maximum number of lines to print when displaying the output of a bgp_status_precheck CLI command."""
class CelerySettings(BaseSettings): class CelerySettings(BaseSettings):
"""Parameters for Celery.""" """Parameters for Celery."""
broker_url: str = "redis://localhost:6379/0" broker_url: str = "redis://localhost:6379/0"
result_backend: str = "rpc://localhost:6379/0" result_backend: str = "redis://localhost:6379/0"
result_expires: int = 3600 result_expires: int = 3600
class Config: class Config:
......
...@@ -6,7 +6,7 @@ import pytest ...@@ -6,7 +6,7 @@ import pytest
from orchestrator.db import db from orchestrator.db import db
from typer.testing import CliRunner from typer.testing import CliRunner
from gso.cli.prechecks import app from gso.cli.lso_calls import app
from gso.db.models import BgpStatusPreCheckTable from gso.db.models import BgpStatusPreCheckTable
runner = CliRunner() runner = CliRunner()
...@@ -66,8 +66,17 @@ def mock_http_bad_shape(monkeypatch): ...@@ -66,8 +66,17 @@ def mock_http_bad_shape(monkeypatch):
return dummy_resp return dummy_resp
def test_no_save_leaves_table_empty(mock_http_success): def test_invalid_partner_name(mock_http_success):
"""Step 0: unknown partner should abort before any HTTP call."""
result = runner.invoke(app, ["rt1.ams.geant.org", "UNKNOWN"], input="")
assert result.exit_code == 1
assert "partner 'unknown' not found" in result.stdout.lower()
assert db.session.query(BgpStatusPreCheckTable).count() == 0
def test_no_save_leaves_table_empty(mock_http_success, partner_factory):
"""If user declines save, table remains empty.""" """If user declines save, table remains empty."""
partner_factory("SURF")
result = runner.invoke(app, ["rt1.example.com", "SURF"], input="n\n") result = runner.invoke(app, ["rt1.example.com", "SURF"], input="n\n")
assert result.exit_code == 0 assert result.exit_code == 0
assert "not saving" in result.stdout.lower() assert "not saving" in result.stdout.lower()
...@@ -75,15 +84,17 @@ def test_no_save_leaves_table_empty(mock_http_success): ...@@ -75,15 +84,17 @@ def test_no_save_leaves_table_empty(mock_http_success):
assert db.session.query(BgpStatusPreCheckTable).count() == 0 assert db.session.query(BgpStatusPreCheckTable).count() == 0
def test_prompt_save_yes_persists_record(mock_http_success): def test_prompt_save_yes_persists_record(mock_http_success, partner_factory):
"""Typing 'y' at prompt should also persist.""" """Typing 'y' at prompt should also persist."""
partner_factory("SURF")
result = runner.invoke(app, ["rt1.example.com", "SURF"], input="y\n") result = runner.invoke(app, ["rt1.example.com", "SURF"], input="y\n")
assert result.exit_code == 0 assert result.exit_code == 0
assert db.session.query(BgpStatusPreCheckTable).count() == 1 assert db.session.query(BgpStatusPreCheckTable).count() == 1
def test_http_failure_aborts(mock_http_error): def test_http_failure_aborts(mock_http_error, partner_factory):
"""Network/timeout errors should abort with exit code 1.""" """Network/timeout errors should abort with exit code 1."""
partner_factory("SURF")
result = runner.invoke(app, ["rt1.example.com", "SURF"]) result = runner.invoke(app, ["rt1.example.com", "SURF"])
assert result.exit_code == 1 assert result.exit_code == 1
# Now stderr is separately captured: # Now stderr is separately captured:
...@@ -93,16 +104,18 @@ def test_http_failure_aborts(mock_http_error): ...@@ -93,16 +104,18 @@ def test_http_failure_aborts(mock_http_error):
assert db.session.query(BgpStatusPreCheckTable).count() == 0 assert db.session.query(BgpStatusPreCheckTable).count() == 0
def test_invalid_shape_aborts(mock_http_bad_shape): def test_invalid_shape_aborts(mock_http_bad_shape, partner_factory):
"""Malformed top-level JSON shape should abort.""" """Malformed top-level JSON shape should abort."""
partner_factory("SURF")
result = runner.invoke(app, ["rt1.example.com", "SURF"]) result = runner.invoke(app, ["rt1.example.com", "SURF"])
assert result.exit_code == 1 assert result.exit_code == 1
assert "invalid JSON returned by LSO" in result.stdout assert "invalid JSON returned by LSO" in result.stdout
assert db.session.query(BgpStatusPreCheckTable).count() == 0 assert db.session.query(BgpStatusPreCheckTable).count() == 0
def test_parse_output_nonjson(mock_http_success): def test_parse_output_nonjson(mock_http_success, partner_factory):
"""If output is not valid JSON, we still complete without saving.""" """If output is not valid JSON, we still complete without saving."""
partner_factory("SURF")
# Patch BASE_RESPONSE to use non-JSON output # Patch BASE_RESPONSE to use non-JSON output
bad = dict(BASE_RESPONSE) bad = dict(BASE_RESPONSE)
bad["result"] = dict(bad["result"]) bad["result"] = dict(bad["result"])
...@@ -120,8 +133,9 @@ def test_parse_output_nonjson(mock_http_success): ...@@ -120,8 +133,9 @@ def test_parse_output_nonjson(mock_http_success):
_httpx.post = _orig _httpx.post = _orig
def test_pagination_on_large_output(mock_http_success, monkeypatch): def test_pagination_on_large_output(mock_http_success, monkeypatch, partner_factory):
"""Parsed output >50 lines should trigger click.echo_via_pager.""" """Parsed output >50 lines should trigger click.echo_via_pager."""
partner_factory("SURF")
# Build huge object # Build huge object
big = {"x": ["line"] * 100} big = {"x": ["line"] * 100}
payload = dict(BASE_RESPONSE) payload = dict(BASE_RESPONSE)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment