Fix feed validation output

This commit is contained in:
Abel Luck 2026-03-31 12:14:47 +02:00
parent c834c3c254
commit db1d9b44b7
13 changed files with 477 additions and 54 deletions

View file

@ -29,8 +29,13 @@ FIXTURE_FEED_PATH = (
).resolve()
def initialize_runtime_database(db_path: Path) -> None:
initialize_database(db_path)
save_setting("feed_url", "http://localhost:8080")
def test_job_runtime_syncs_enabled_jobs_into_apscheduler(tmp_path: Path) -> None:
initialize_database(tmp_path / "scheduler.db")
initialize_runtime_database(tmp_path / "scheduler.db")
enabled_source = create_source(
name="Enabled source",
slug="enabled-source",
@ -85,7 +90,7 @@ def test_job_runtime_syncs_enabled_jobs_into_apscheduler(tmp_path: Path) -> None
def test_job_runtime_run_now_writes_log_and_stats_and_marks_success(
tmp_path: Path,
) -> None:
initialize_database(tmp_path / "run-now.db")
initialize_runtime_database(tmp_path / "run-now.db")
source = create_source(
name="Manual source",
slug="manual-source",
@ -141,7 +146,7 @@ def test_job_runtime_run_now_writes_log_and_stats_and_marks_success(
def test_job_runtime_respects_max_concurrent_jobs_setting(tmp_path: Path) -> None:
db_path = tmp_path / "max-concurrency.db"
log_dir = tmp_path / "out" / "logs"
initialize_database(db_path)
initialize_runtime_database(db_path)
save_setting("max_concurrent_jobs", 1)
with _slow_feed_server() as feed_url:
@ -216,7 +221,7 @@ def test_job_runtime_starts_queued_execution_after_capacity_opens(
) -> None:
db_path = tmp_path / "drain-queue.db"
log_dir = tmp_path / "out" / "logs"
initialize_database(db_path)
initialize_runtime_database(db_path)
save_setting("max_concurrent_jobs", 1)
with _slow_feed_server() as feed_url:
@ -277,7 +282,7 @@ def test_job_runtime_starts_queued_execution_after_capacity_opens(
def test_job_runtime_deduplicates_manual_queue_requests(tmp_path: Path) -> None:
db_path = tmp_path / "queue-dedup.db"
log_dir = tmp_path / "out" / "logs"
initialize_database(db_path)
initialize_runtime_database(db_path)
save_setting("max_concurrent_jobs", 1)
with _slow_feed_server() as feed_url:
@ -344,7 +349,7 @@ def test_job_runtime_allows_one_running_and_one_pending_per_job(
) -> None:
db_path = tmp_path / "running-plus-pending.db"
log_dir = tmp_path / "out" / "logs"
initialize_database(db_path)
initialize_runtime_database(db_path)
save_setting("max_concurrent_jobs", 1)
with _slow_feed_server() as feed_url:
@ -400,7 +405,7 @@ def test_job_runtime_start_drains_pending_rows_created_before_start(
) -> None:
db_path = tmp_path / "startup-drain.db"
log_dir = tmp_path / "out" / "logs"
initialize_database(db_path)
initialize_runtime_database(db_path)
source = create_source(
name="Queued source",
slug="queued-source",
@ -440,7 +445,7 @@ def test_job_runtime_scheduled_runs_use_the_persistent_queue(
) -> None:
db_path = tmp_path / "scheduled-queue.db"
log_dir = tmp_path / "out" / "logs"
initialize_database(db_path)
initialize_runtime_database(db_path)
save_setting("max_concurrent_jobs", 1)
with _slow_feed_server() as feed_url:
@ -496,7 +501,7 @@ def test_job_runtime_cancel_pending_follow_up_keeps_running_worker_alive(
) -> None:
db_path = tmp_path / "cancel-pending.db"
log_dir = tmp_path / "out" / "logs"
initialize_database(db_path)
initialize_runtime_database(db_path)
save_setting("max_concurrent_jobs", 1)
with _slow_feed_server() as feed_url:
@ -538,7 +543,7 @@ def test_job_runtime_cancel_pending_follow_up_keeps_running_worker_alive(
def test_job_runtime_cancel_marks_execution_canceled(tmp_path: Path) -> None:
initialize_database(tmp_path / "cancel.db")
initialize_runtime_database(tmp_path / "cancel.db")
with _slow_feed_server() as feed_url:
source = create_source(
name="Cancelable source",
@ -582,7 +587,7 @@ def test_job_runtime_cancel_marks_execution_canceled(tmp_path: Path) -> None:
def test_job_runtime_start_reconciles_stale_running_execution(tmp_path: Path) -> None:
initialize_database(tmp_path / "stale-running.db")
initialize_runtime_database(tmp_path / "stale-running.db")
source = create_source(
name="Stale source",
slug="stale-source",
@ -629,7 +634,7 @@ def test_job_runtime_start_reconciles_stale_running_execution(tmp_path: Path) ->
def test_job_runtime_publishes_refresh_while_jobs_are_running(tmp_path: Path) -> None:
initialize_database(tmp_path / "runtime-refresh.db")
initialize_runtime_database(tmp_path / "runtime-refresh.db")
source = create_source(
name="Running source",
slug="running-source",
@ -667,7 +672,7 @@ def test_job_runtime_start_reattaches_live_worker_after_app_restart(
) -> None:
db_path = tmp_path / "live-worker.db"
log_dir = tmp_path / "out" / "logs"
initialize_database(db_path)
initialize_runtime_database(db_path)
with _slow_feed_server() as feed_url:
source = create_source(
name="Live worker source",
@ -743,7 +748,7 @@ def test_job_runtime_start_restores_live_worker_marked_failed_by_restart_bug(
) -> None:
db_path = tmp_path / "restore-live-worker.db"
log_dir = tmp_path / "out" / "logs"
initialize_database(db_path)
initialize_runtime_database(db_path)
with _slow_feed_server() as feed_url:
source = create_source(
name="Recovered worker source",
@ -915,6 +920,7 @@ def test_render_runs_uses_database_backed_jobs_and_executions(
app = create_app()
app.config["REPUB_LOG_DIR"] = log_dir
save_setting("feed_url", "http://localhost:8080")
source = create_source(
name="Runs page source",
slug="runs-page-source",