Improve sources and runs history tables

This commit is contained in:
Abel Luck 2026-03-31 10:49:50 +02:00
parent df68aa95e9
commit 939cd9ea5d
7 changed files with 459 additions and 25 deletions

View file

@ -254,3 +254,51 @@ def test_load_runs_view_running_row_targets_queued_follow_up_cancel(
assert running_row["cancel_post_path"] == (
f"/actions/queued-executions/{int(pending_execution.get_id())}/cancel"
)
def test_load_runs_view_paginates_completed_executions_after_20_rows(
tmp_path: Path,
) -> None:
initialize_database(tmp_path / "jobs-completed-pagination.db")
source = create_source(
name="Completed source",
slug="completed-source",
source_type="feed",
notes="",
spider_arguments="",
enabled=False,
cron_minute="*/5",
cron_hour="*",
cron_day_of_month="*",
cron_day_of_week="*",
cron_month="*",
feed_url="https://example.com/completed.xml",
)
job = Job.get(Job.source == source)
base_time = datetime(2026, 3, 30, 12, 0, tzinfo=UTC)
for offset in range(21):
JobExecution.create(
job=job,
running_status=JobExecutionStatus.SUCCEEDED,
ended_at=base_time - timedelta(minutes=offset),
)
first_page = load_runs_view(
log_dir=tmp_path / "out" / "logs",
now=base_time,
completed_page=1,
)
second_page = load_runs_view(
log_dir=tmp_path / "out" / "logs",
now=base_time,
completed_page=2,
)
assert len(first_page["completed"]) == 20
assert len(second_page["completed"]) == 1
assert first_page["completed_page"] == 1
assert second_page["completed_page"] == 2
assert first_page["completed_total_pages"] == 2
assert second_page["completed_total_pages"] == 2
assert first_page["completed_total_count"] == 21
assert second_page["completed_total_count"] == 21

View file

@ -22,6 +22,7 @@ from repub.model import (
save_setting,
)
from repub.pages.runs import runs_page
from repub.pages.sources import sources_page
from repub.web import (
create_app,
get_refresh_broker,
@ -172,6 +173,66 @@ def test_runs_page_renders_combined_running_jobs_table() -> None:
assert "/actions/queued-executions/42/cancel" in body
def test_sources_page_removes_view_runs_action_and_last_run_caption() -> None:
body = str(
sources_page(
sources=(
{
"name": "Source one",
"slug": "source-one",
"source_type": "Feed",
"upstream": "https://example.com/feed.xml",
"schedule": "cron: */5 * * * *",
"last_run": "Never run",
"state": "Enabled",
"state_tone": "scheduled",
},
)
)
)
assert ">Edit<" in body
assert ">Delete<" in body
assert "View runs" not in body
assert "Never run" not in body
def test_runs_page_renders_clear_completed_button_and_pagination() -> None:
completed_executions = tuple(
{
"source": f"Completed source {index}",
"slug": f"completed-source-{index}",
"job_id": 7,
"execution_id": index,
"ended_at": "2 hours ago",
"ended_at_iso": "2026-01-15T10:00:00+00:00",
"status": "Succeeded",
"status_tone": "done",
"stats": "1 requests • 1 items • 1 bytes",
"summary": "Worker exited successfully",
"log_href": f"/job/7/execution/{index}/logs",
}
for index in range(1, 21)
)
body = str(
runs_page(
completed_executions=completed_executions,
completed_page=2,
completed_page_size=20,
completed_total_count=21,
completed_total_pages=2,
)
)
assert "/actions/completed-executions/clear" in body
assert ">Clear history<" in body
assert "Showing" in body
assert "21" in body
assert 'href="/runs?completed_page=1"' in body
assert 'href="/runs?completed_page=2"' in body
assert 'aria-current="page"' in body
def test_root_get_serves_datastar_shim() -> None:
async def run() -> None:
client = create_app().test_client()
@ -1498,6 +1559,66 @@ def test_cancel_queued_execution_action_deletes_pending_row_without_touching_run
asyncio.run(run())
def test_clear_completed_executions_action_removes_history_and_log_artifacts(
monkeypatch, tmp_path: Path
) -> None:
db_path = tmp_path / "clear-completed-action.db"
log_dir = tmp_path / "out" / "logs"
monkeypatch.setenv("REPUBLISHER_DB_PATH", str(db_path))
async def run() -> None:
app = create_app()
app.config["REPUB_LOG_DIR"] = log_dir
client = app.test_client()
source = create_source(
name="History source",
slug="history-source",
source_type="feed",
notes="",
spider_arguments="",
enabled=True,
cron_minute="*/5",
cron_hour="*",
cron_day_of_month="*",
cron_day_of_week="*",
cron_month="*",
feed_url="https://example.com/history.xml",
)
job = Job.get(Job.source == source)
completed_execution = JobExecution.create(
job=job,
running_status=JobExecutionStatus.SUCCEEDED,
ended_at=datetime(2026, 3, 30, 12, 0, tzinfo=UTC),
)
running_execution = JobExecution.create(
job=job,
running_status=JobExecutionStatus.RUNNING,
started_at=datetime(2026, 3, 30, 12, 5, tzinfo=UTC),
)
log_dir.mkdir(parents=True, exist_ok=True)
completed_prefix = (
log_dir / f"job-{job.id}-execution-{int(completed_execution.get_id())}"
)
running_log_path = (
log_dir / f"job-{job.id}-execution-{int(running_execution.get_id())}.log"
)
for suffix in (".log", ".jsonl", ".pygea.log"):
completed_prefix.with_suffix(suffix).write_text("history", encoding="utf-8")
running_log_path.write_text("running", encoding="utf-8")
response = await client.post("/actions/completed-executions/clear")
assert response.status_code == 204
assert JobExecution.get_or_none(id=int(completed_execution.get_id())) is None
assert JobExecution.get_or_none(id=int(running_execution.get_id())) is not None
for suffix in (".log", ".jsonl", ".pygea.log"):
assert not completed_prefix.with_suffix(suffix).exists()
assert running_log_path.exists()
asyncio.run(run())
def test_move_queued_execution_action_reorders_queue(
monkeypatch, tmp_path: Path
) -> None: