Skip to content
Snippets Groups Projects
Commit 2cd67397 authored by Mohammad Torkashvand's avatar Mohammad Torkashvand
Browse files

refactore pre-check command and add import_file_path and removed partner from DB

parent 78845042
Branches feature/NAT-1260-refactor-precheck-command
No related tags found
1 merge request!445refactored pre-check command and add import_file_path and removed partner from DB
Pipeline #95215 passed
......@@ -37,12 +37,12 @@ def _validate_partner(partner: str) -> None:
raise typer.Exit(1)
def _load_import_file(import_file_path: Path) -> str:
"""Read a JSON file from the given path, return it as a compact JSON string, Exits on error."""
def _load_import_file(import_file_path: Path) -> tuple[str, dict]:
"""Read a JSON file from the given path, return it as a compact JSON string, and also as a dict. Exits on error."""
try:
with import_file_path.open("r", encoding="utf-8") as f:
data = json.load(f)
return json.dumps(data, separators=(",", ":"))
return json.dumps(data, separators=(",", ":")), data
except Exception as e:
logger.exception("Failed to read import file")
typer.echo(f"Error: could not read or parse '{import_file_path}': {e}")
......@@ -51,19 +51,19 @@ def _load_import_file(import_file_path: Path) -> str:
def _call_lso(
host: str,
partner: str,
import_json_str: str,
) -> ExecutableRunResponse:
oss = settings.load_oss_params()
proxy = oss.PROVISIONING_PROXY
general_params = oss.GENERAL
url = f"{proxy.scheme}://{proxy.api_base}/api/execute/"
payload = {
"executable_name": "bgp_status_pre_check.py",
"args": [host, partner, import_json_str],
"args": [host, import_json_str],
"is_async": False,
}
try:
resp = httpx.post(url, json=payload, timeout=30)
resp = httpx.post(url, json=payload, timeout=general_params.pre_check_cli_http_timeout_sec)
resp.raise_for_status()
except Exception as e:
logger.exception("LSO call failed")
......@@ -79,7 +79,7 @@ def _call_lso(
raise typer.Exit(1) from e
def _print_full(exec_resp: ExecutableRunResponse) -> None:
def _print_full_output(exec_resp: ExecutableRunResponse) -> None:
full_json = exec_resp.model_dump(mode="json")
typer.echo(typer.style("\nFull LSO response:", fg=typer.colors.GREEN))
typer.echo(json.dumps(full_json, indent=2))
......@@ -117,10 +117,13 @@ def _maybe_save(
try:
with db.database_scope(), transactional(db, logger):
result = exec_resp.result.model_dump(mode="json") if exec_resp.result else {}
output = json.loads(result.get("output", "{}"))
peers = output.get("peers", [])
record = BgpStatusPreCheckTable(
router_fqdn=host,
partner=partner,
result=exec_resp.result.model_dump(mode="json") if exec_resp.result else {},
peers=peers,
result=result,
)
db.session.add(record)
except Exception as e:
......@@ -134,14 +137,14 @@ def _maybe_save(
@app.command()
def bgp_status_precheck(
host: str = typer.Argument(..., help="FQDN of the router to pre-check"),
partner: str = typer.Argument(..., help="Partner name for import file path"),
import_file_path: Path = _IMPORT_FILE_ARG,
) -> None:
"""Trigger the bgp_status_pre-check script on LSO, print results, and optionally save."""
import_json_str, data = _load_import_file(import_file_path)
partner = data[0].get("partner")
_validate_partner(partner)
import_json_str = _load_import_file(import_file_path)
exec_resp = _call_lso(host, partner, import_json_str)
_print_full(exec_resp)
exec_resp = _call_lso(host, import_json_str)
_print_full_output(exec_resp)
_print_parsed_output(exec_resp)
_maybe_save(host, partner, exec_resp)
......
......@@ -44,17 +44,19 @@ class BgpStatusPreCheckTable(BaseModel):
index=True,
comment="The FQDN of the router under check",
)
partner = mapped_column(
String,
nullable=False,
comment="Name of the partner (used in import file path)",
)
result = mapped_column(
JSON,
nullable=False,
comment="Raw JSON blob returned by LSO bgp_status_pre_check script",
)
peers = mapped_column(
JSON,
nullable=True,
comment="The BGP peers address, if applicable",
default=list,
)
created_at = mapped_column(
UtcTimestamp,
server_default=text("current_timestamp"),
......
"""remove-partner-add-peers-pre-check.
Revision ID: 227ea7f4f951
Revises: 285954f5ec04
Create Date: 2025-06-26 13:02:33.828117
"""
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '227ea7f4f951'
down_revision = '285954f5ec04'
branch_labels = None
depends_on = None
def upgrade() -> None:
# drop the old partner column
op.drop_column('bgp_status_pre_checks', 'partner')
# add the new peers column
op.add_column(
'bgp_status_pre_checks',
sa.Column(
'peers',
postgresql.JSON(), # type: ignore[no-untyped-call]
nullable=True,
server_default=sa.text("'[]'::json"),
comment="The BGP peers address, if applicable",
)
)
def downgrade() -> None:
# remove peers, re-add partner
op.drop_column('bgp_status_pre_checks', 'peers')
op.add_column(
'bgp_status_pre_checks',
sa.Column('partner', sa.String(), nullable=False),
)
......@@ -44,6 +44,8 @@ class GeneralParams(BaseSettings):
"""The environment in which GSO is running, such as development, test, uat, or production."""
pre_check_cli_max_output_lines: int = 50
"""The maximum number of lines to print when displaying the output of a bgp_status_precheck CLI command."""
pre_check_cli_http_timeout_sec: int = 300
"""The timeout in seconds for the HTTP request to the LSO pre-check endpoint."""
class CelerySettings(BaseSettings):
......
......@@ -15,7 +15,34 @@ runner = CliRunner()
BASE_RESPONSE = {
"job_id": "2c19843b-c721-4662-8014-b1a7a22f1734",
"result": {
"output": json.dumps({"bgp": {"asn": 65001}, "neighbors": []}),
"output": json.dumps({
"peers": [
{
"host": "62.40.119.4",
"address": "62.40.125.178",
"status": "Established",
"peer-as": "13092",
"advertised_routes": "1230",
"received_routes": "0",
"active_routes": "0",
"bfd_oper_state": "up",
"bfd_admin_state": "enabled",
"send_default_route": False,
},
{
"host": "62.40.119.4",
"address": "2001:798:1b:10aa::2",
"status": "Established",
"peer-as": "13092",
"advertised_routes": "235",
"received_routes": "0",
"active_routes": "0",
"bfd_oper_state": "down",
"bfd_admin_state": "enabled",
"send_default_route": False,
},
],
}),
"return_code": 0,
"status": "successful",
},
......@@ -35,14 +62,6 @@ class DummyResponse:
raise httpx.HTTPStatusError("error", request=None, response=self) # noqa: RUF100,EM101
@pytest.fixture()
def import_file(tmp_path):
"""Create a temporary JSON import file."""
path = tmp_path / "import.json"
path.write_text(json.dumps({"foo": "bar"}))
return path
@pytest.fixture()
def mock_http_success(monkeypatch):
"""Return a successful dummy response for httpx.post."""
......@@ -51,6 +70,12 @@ def mock_http_success(monkeypatch):
return dummy_resp
def _write_import_file(tmp_path, partner_name):
path = tmp_path / "import.json"
path.write_text(json.dumps([{"partner": partner_name}]))
return path
@pytest.fixture()
def mock_http_error(monkeypatch):
"""Simulate httpx.post throwing an exception."""
......@@ -73,50 +98,39 @@ def mock_http_bad_shape(monkeypatch):
return dummy_resp
def test_invalid_partner_name(mock_http_success, import_file):
def test_invalid_partner_name(mock_http_success, tmp_path):
"""Step 0: unknown partner should abort before any HTTP call."""
result = runner.invoke(
app,
["rt1.ams.geant.org", "UNKNOWN", str(import_file)],
input="",
)
import_file = _write_import_file(tmp_path, "UNKNOWN")
result = runner.invoke(app, ["rt1.ams.geant.org", str(import_file)], input="")
assert result.exit_code == 1
assert "partner 'unknown' not found" in result.stdout.lower()
assert db.session.query(BgpStatusPreCheckTable).count() == 0
def test_no_save_leaves_table_empty(mock_http_success, partner_factory, import_file):
def test_no_save_leaves_table_empty(mock_http_success, partner_factory, tmp_path):
"""If user declines save, table remains empty."""
partner_factory("SURF")
result = runner.invoke(
app,
["rt1.example.com", "SURF", str(import_file)],
input="n\n",
)
import_file = _write_import_file(tmp_path, "SURF")
result = runner.invoke(app, ["rt1.example.com", str(import_file)], input="n\n")
assert result.exit_code == 0
assert "not saving" in result.stdout.lower()
assert db.session.query(BgpStatusPreCheckTable).count() == 0
def test_prompt_save_yes_persists_record(mock_http_success, partner_factory, import_file):
def test_prompt_save_yes_persists_record(mock_http_success, partner_factory, tmp_path):
"""Typing 'y' at prompt should also persist."""
partner_factory("SURF")
result = runner.invoke(
app,
["rt1.example.com", "SURF", str(import_file)],
input="y\n",
)
import_file = _write_import_file(tmp_path, "SURF")
result = runner.invoke(app, ["rt1.example.com", str(import_file)], input="y\n")
assert result.exit_code == 0
assert db.session.query(BgpStatusPreCheckTable).count() == 1
def test_http_failure_aborts(mock_http_error, partner_factory, import_file):
def test_http_failure_aborts(mock_http_error, partner_factory, tmp_path):
"""Network/timeout errors should abort with exit code 1."""
partner_factory("SURF")
result = runner.invoke(
app,
["rt1.example.com", "SURF", str(import_file)],
)
import_file = _write_import_file(tmp_path, "SURF")
result = runner.invoke(app, ["rt1.example.com", str(import_file)])
assert result.exit_code == 1
# Now stderr is separately captured:
assert "error: failed to call lso: timeout" in result.stdout.lower()
......@@ -125,21 +139,20 @@ def test_http_failure_aborts(mock_http_error, partner_factory, import_file):
assert db.session.query(BgpStatusPreCheckTable).count() == 0
def test_invalid_shape_aborts(mock_http_bad_shape, partner_factory, import_file):
def test_invalid_shape_aborts(mock_http_bad_shape, partner_factory, tmp_path):
"""Malformed top-level JSON shape should abort."""
partner_factory("SURF")
result = runner.invoke(
app,
["rt1.example.com", "SURF", str(import_file)],
)
import_file = _write_import_file(tmp_path, "SURF")
result = runner.invoke(app, ["rt1.example.com", str(import_file)])
assert result.exit_code == 1
assert "invalid JSON returned by LSO" in result.stdout
assert db.session.query(BgpStatusPreCheckTable).count() == 0
def test_parse_output_nonjson(monkeypatch, partner_factory, import_file):
def test_parse_output_nonjson(partner_factory, tmp_path):
"""If output is not valid JSON, we still complete without saving."""
partner_factory("SURF")
import_file = _write_import_file(tmp_path, "SURF")
# Patch BASE_RESPONSE to use non-JSON output
bad = dict(BASE_RESPONSE)
bad["result"] = dict(bad["result"])
......@@ -150,16 +163,18 @@ def test_parse_output_nonjson(monkeypatch, partner_factory, import_file):
_orig = _httpx.post
_httpx.post = lambda *args, **kwargs: DummyResponse(bad) # noqa: ARG005
try:
result = runner.invoke(app, ["rt1.example.com", "SURF", str(import_file)], input="n\n")
result = runner.invoke(app, ["rt1.example.com", str(import_file)], input="n\n")
assert result.exit_code == 0
assert "(not valid JSON, raw string below)" in result.stdout
finally:
_httpx.post = _orig
def test_pagination_on_large_output(monkeypatch, partner_factory, import_file):
def test_pagination_on_large_output(mock_http_success, monkeypatch, partner_factory, tmp_path):
"""Parsed output >50 lines should trigger click.echo_via_pager."""
partner_factory("SURF")
import_file = _write_import_file(tmp_path, "SURF")
# Build huge object
big = {"x": ["line"] * 100}
payload = dict(BASE_RESPONSE)
payload["result"] = dict(payload["result"])
......@@ -173,11 +188,7 @@ def test_pagination_on_large_output(monkeypatch, partner_factory, import_file):
paged = True
monkeypatch.setattr(click, "echo_via_pager", fake_pager)
result = runner.invoke(
app,
["rt1.example.com", "SURF", str(import_file)],
input="n\n",
)
result = runner.invoke(app, ["rt1.example.com", str(import_file)], input="n\n")
assert result.exit_code == 0
assert paged, "Expected parsed-output pager for large JSON"
......@@ -187,12 +198,11 @@ def test_invalid_import_file(tmp_path, partner_factory):
# create invalid JSON file
bad_file = tmp_path / "bad_import.json"
bad_file.write_text("{invalid_json}")
partner_factory("SURF")
# Invoke with the malformed JSON file
result = runner.invoke(
app,
["rt1.example.com", "SURF", str(bad_file)],
["rt1.example.com", str(bad_file)],
)
# Expect exit code 2 from _load_import_file
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment