Add persistent job run queue
This commit is contained in:
parent
2bd0651478
commit
0b3b1b2731
8 changed files with 1047 additions and 27 deletions
|
|
@ -1,6 +1,6 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from datetime import UTC, datetime
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from pathlib import Path
|
||||
|
||||
from repub.jobs import load_runs_view
|
||||
|
|
@ -83,3 +83,160 @@ def test_load_runs_view_humanizes_running_execution_summary_bytes(
|
|||
)
|
||||
|
||||
assert view["running"][0]["stats"] == "14 requests • 11 items • 1.5 KiB"
|
||||
|
||||
|
||||
def test_load_runs_view_projects_queued_executions_in_fifo_order(
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
initialize_database(tmp_path / "jobs-queued.db")
|
||||
first_source = create_source(
|
||||
name="First queued source",
|
||||
slug="first-queued-source",
|
||||
source_type="feed",
|
||||
notes="",
|
||||
spider_arguments="",
|
||||
enabled=True,
|
||||
cron_minute="*/5",
|
||||
cron_hour="*",
|
||||
cron_day_of_month="*",
|
||||
cron_day_of_week="*",
|
||||
cron_month="*",
|
||||
feed_url="https://example.com/first.xml",
|
||||
)
|
||||
second_source = create_source(
|
||||
name="Second queued source",
|
||||
slug="second-queued-source",
|
||||
source_type="feed",
|
||||
notes="",
|
||||
spider_arguments="",
|
||||
enabled=True,
|
||||
cron_minute="*/5",
|
||||
cron_hour="*",
|
||||
cron_day_of_month="*",
|
||||
cron_day_of_week="*",
|
||||
cron_month="*",
|
||||
feed_url="https://example.com/second.xml",
|
||||
)
|
||||
first_job = Job.get(Job.source == first_source)
|
||||
second_job = Job.get(Job.source == second_source)
|
||||
reference_time = datetime(2026, 3, 30, 12, 30, tzinfo=UTC)
|
||||
first_created_at = reference_time - timedelta(minutes=7)
|
||||
second_created_at = reference_time - timedelta(minutes=3)
|
||||
first_execution = JobExecution.create(
|
||||
job=first_job,
|
||||
created_at=first_created_at,
|
||||
running_status=JobExecutionStatus.PENDING,
|
||||
)
|
||||
second_execution = JobExecution.create(
|
||||
job=second_job,
|
||||
created_at=second_created_at,
|
||||
running_status=JobExecutionStatus.PENDING,
|
||||
)
|
||||
|
||||
view = load_runs_view(
|
||||
log_dir=tmp_path / "out" / "logs",
|
||||
now=reference_time,
|
||||
)
|
||||
|
||||
assert tuple(row["execution_id"] for row in view["queued"]) == (
|
||||
int(first_execution.get_id()),
|
||||
int(second_execution.get_id()),
|
||||
)
|
||||
assert tuple(row["queue_position"] for row in view["queued"]) == (1, 2)
|
||||
assert tuple(row["queued_at"] for row in view["queued"]) == (
|
||||
"7 minutes ago",
|
||||
"3 minutes ago",
|
||||
)
|
||||
|
||||
|
||||
def test_load_runs_view_separates_queued_jobs_from_scheduled_jobs(
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
initialize_database(tmp_path / "jobs-queue-separation.db")
|
||||
queued_source = create_source(
|
||||
name="Queued source",
|
||||
slug="queued-source",
|
||||
source_type="feed",
|
||||
notes="",
|
||||
spider_arguments="",
|
||||
enabled=True,
|
||||
cron_minute="*/5",
|
||||
cron_hour="*",
|
||||
cron_day_of_month="*",
|
||||
cron_day_of_week="*",
|
||||
cron_month="*",
|
||||
feed_url="https://example.com/queued.xml",
|
||||
)
|
||||
scheduled_source = create_source(
|
||||
name="Scheduled source",
|
||||
slug="scheduled-source",
|
||||
source_type="feed",
|
||||
notes="",
|
||||
spider_arguments="",
|
||||
enabled=True,
|
||||
cron_minute="*/5",
|
||||
cron_hour="*",
|
||||
cron_day_of_month="*",
|
||||
cron_day_of_week="*",
|
||||
cron_month="*",
|
||||
feed_url="https://example.com/scheduled.xml",
|
||||
)
|
||||
queued_job = Job.get(Job.source == queued_source)
|
||||
Job.get(Job.source == scheduled_source)
|
||||
JobExecution.create(
|
||||
job=queued_job,
|
||||
running_status=JobExecutionStatus.PENDING,
|
||||
)
|
||||
|
||||
view = load_runs_view(
|
||||
log_dir=tmp_path / "out" / "logs",
|
||||
now=datetime(2026, 3, 30, 12, 30, tzinfo=UTC),
|
||||
)
|
||||
|
||||
assert tuple(row["slug"] for row in view["queued"]) == ("queued-source",)
|
||||
assert all(row["slug"] != "queued-source" for row in view["upcoming"])
|
||||
assert tuple(row["slug"] for row in view["upcoming"]) == ("scheduled-source",)
|
||||
assert view["upcoming"][0]["run_reason"] == "Ready"
|
||||
assert view["upcoming"][0]["run_disabled"] is False
|
||||
|
||||
|
||||
def test_load_runs_view_running_row_targets_queued_follow_up_cancel(
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
initialize_database(tmp_path / "jobs-running-cancel.db")
|
||||
source = create_source(
|
||||
name="Running source",
|
||||
slug="running-source",
|
||||
source_type="feed",
|
||||
notes="",
|
||||
spider_arguments="",
|
||||
enabled=True,
|
||||
cron_minute="*/5",
|
||||
cron_hour="*",
|
||||
cron_day_of_month="*",
|
||||
cron_day_of_week="*",
|
||||
cron_month="*",
|
||||
feed_url="https://example.com/running.xml",
|
||||
)
|
||||
job = Job.get(Job.source == source)
|
||||
JobExecution.create(
|
||||
job=job,
|
||||
started_at=datetime(2026, 3, 30, 12, 0, tzinfo=UTC),
|
||||
running_status=JobExecutionStatus.RUNNING,
|
||||
)
|
||||
pending_execution = JobExecution.create(
|
||||
job=job,
|
||||
created_at=datetime(2026, 3, 30, 12, 5, tzinfo=UTC),
|
||||
running_status=JobExecutionStatus.PENDING,
|
||||
)
|
||||
|
||||
view = load_runs_view(
|
||||
log_dir=tmp_path / "out" / "logs",
|
||||
now=datetime(2026, 3, 30, 12, 30, tzinfo=UTC),
|
||||
)
|
||||
|
||||
running_row = view["running"][0]
|
||||
assert running_row["cancel_label"] == "Cancel"
|
||||
assert running_row["cancel_post_path"] == (
|
||||
f"/actions/queued-executions/{int(pending_execution.get_id())}/cancel"
|
||||
)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue