Improve sources and runs history tables

This commit is contained in:
Abel Luck 2026-03-31 10:49:50 +02:00
parent df68aa95e9
commit 939cd9ea5d
7 changed files with 459 additions and 25 deletions

View file

@ -1,6 +1,7 @@
from __future__ import annotations from __future__ import annotations
import json import json
import math
import os import os
import signal import signal
import subprocess import subprocess
@ -10,7 +11,7 @@ import time
from dataclasses import dataclass from dataclasses import dataclass
from datetime import UTC, datetime, timedelta from datetime import UTC, datetime, timedelta
from pathlib import Path from pathlib import Path
from typing import Callable, TextIO, cast from typing import Callable, TextIO, TypedDict, cast
from apscheduler.schedulers.background import BackgroundScheduler from apscheduler.schedulers.background import BackgroundScheduler
from apscheduler.triggers.cron import CronTrigger from apscheduler.triggers.cron import CronTrigger
@ -30,6 +31,7 @@ from repub.model import (
SCHEDULER_JOB_PREFIX = "job-" SCHEDULER_JOB_PREFIX = "job-"
POLL_JOB_ID = "runtime-poll-workers" POLL_JOB_ID = "runtime-poll-workers"
SYNC_JOB_ID = "runtime-sync-jobs" SYNC_JOB_ID = "runtime-sync-jobs"
COMPLETED_EXECUTION_PAGE_SIZE = 20
@dataclass(frozen=True) @dataclass(frozen=True)
@ -102,6 +104,17 @@ class ExecutionLogView:
error_message: str | None = None error_message: str | None = None
class RunsView(TypedDict):
running: tuple[dict[str, object], ...]
queued: tuple[dict[str, object], ...]
upcoming: tuple[dict[str, object], ...]
completed: tuple[dict[str, object], ...]
completed_page: int
completed_page_size: int
completed_total_count: int
completed_total_pages: int
class JobRuntime: class JobRuntime:
def __init__( def __init__(
self, self,
@ -647,10 +660,15 @@ class JobRuntime:
def load_runs_view( def load_runs_view(
*, log_dir: str | Path, now: datetime | None = None *,
) -> dict[str, tuple[dict[str, object], ...]]: log_dir: str | Path,
now: datetime | None = None,
completed_page: int = 1,
completed_page_size: int = COMPLETED_EXECUTION_PAGE_SIZE,
) -> RunsView:
reference_time = now or datetime.now(UTC) reference_time = now or datetime.now(UTC)
resolved_log_dir = Path(log_dir) resolved_log_dir = Path(log_dir)
sanitized_page_size = max(1, completed_page_size)
with database.connection_context(): with database.connection_context():
execution_primary_key = getattr(JobExecution, "_meta").primary_key execution_primary_key = getattr(JobExecution, "_meta").primary_key
jobs = tuple(Job.select(Job, Source).join(Source).order_by(Source.name.asc())) jobs = tuple(Job.select(Job, Source).join(Source).order_by(Source.name.asc()))
@ -668,7 +686,7 @@ def load_runs_view(
.where(JobExecution.running_status == JobExecutionStatus.RUNNING) .where(JobExecution.running_status == JobExecutionStatus.RUNNING)
.order_by(JobExecution.started_at.desc()) .order_by(JobExecution.started_at.desc())
) )
completed_executions = tuple( completed_query = (
JobExecution.select(JobExecution, Job, Source) JobExecution.select(JobExecution, Job, Source)
.join(Job) .join(Job)
.join(Source) .join(Source)
@ -682,7 +700,14 @@ def load_runs_view(
) )
) )
.order_by(JobExecution.ended_at.desc()) .order_by(JobExecution.ended_at.desc())
.limit(20) )
completed_total_count = completed_query.count()
completed_total_pages = max(
1, math.ceil(completed_total_count / sanitized_page_size)
)
sanitized_completed_page = min(max(1, completed_page), completed_total_pages)
completed_executions = tuple(
completed_query.paginate(sanitized_completed_page, sanitized_page_size)
) )
running_by_job = { running_by_job = {
@ -725,9 +750,49 @@ def load_runs_view(
_project_completed_execution(execution, resolved_log_dir, reference_time) _project_completed_execution(execution, resolved_log_dir, reference_time)
for execution in completed_executions for execution in completed_executions
), ),
"completed_page": sanitized_completed_page,
"completed_page_size": sanitized_page_size,
"completed_total_count": completed_total_count,
"completed_total_pages": completed_total_pages,
} }
def clear_completed_executions(*, log_dir: str | Path) -> int:
resolved_log_dir = Path(log_dir)
with database.connection_context():
execution_primary_key = getattr(JobExecution, "_meta").primary_key
completed_executions = tuple(
JobExecution.select(JobExecution, Job)
.join(Job)
.where(
JobExecution.running_status.in_(
(
JobExecutionStatus.SUCCEEDED,
JobExecutionStatus.FAILED,
JobExecutionStatus.CANCELED,
)
)
)
)
if not completed_executions:
return 0
for execution in completed_executions:
job = cast(Job, execution.job)
prefix = f"job-{_job_id(job)}-execution-{_execution_id(execution)}"
for artifact_path in resolved_log_dir.glob(f"{prefix}.*"):
artifact_path.unlink(missing_ok=True)
execution_ids = tuple(
_execution_id(execution) for execution in completed_executions
)
return (
JobExecution.delete()
.where(execution_primary_key.in_(execution_ids))
.execute()
)
def load_dashboard_view( def load_dashboard_view(
*, log_dir: str | Path, now: datetime | None = None *, log_dir: str | Path, now: datetime | None = None
) -> dict[str, object]: ) -> dict[str, object]:

View file

@ -242,12 +242,129 @@ def _completed_row(execution: Mapping[str, object]) -> tuple[Node, ...]:
) )
def _completed_page_href(page: int) -> str:
return f"/runs?completed_page={page}"
def _completed_history_pagination(
*,
completed_page: int,
completed_page_size: int,
completed_total_count: int,
completed_total_pages: int,
) -> Renderable | None:
if completed_total_count <= completed_page_size:
return None
start_result = ((completed_page - 1) * completed_page_size) + 1
end_result = min(completed_total_count, completed_page * completed_page_size)
link_class = (
"relative inline-flex items-center px-4 py-2 text-sm font-semibold text-slate-700 "
"ring-1 ring-inset ring-slate-200 hover:bg-stone-50"
)
return h.div(
class_="flex items-center justify-between border-t border-slate-200 bg-white px-4 py-3 sm:px-6"
)[
h.div(class_="flex flex-1 justify-between sm:hidden")[
h.a(
href=_completed_page_href(max(1, completed_page - 1)),
class_="relative inline-flex items-center rounded-xl border border-slate-200 bg-white px-4 py-2 text-sm font-medium text-slate-700 hover:bg-stone-50",
)["Previous"],
h.a(
href=_completed_page_href(
min(completed_total_pages, completed_page + 1)
),
class_="relative ml-3 inline-flex items-center rounded-xl border border-slate-200 bg-white px-4 py-2 text-sm font-medium text-slate-700 hover:bg-stone-50",
)["Next"],
],
h.div(class_="hidden sm:flex sm:flex-1 sm:items-center sm:justify-between")[
h.p(class_="text-sm text-slate-600")[
"Showing ",
h.span(class_="font-medium text-slate-950")[str(start_result)],
" to ",
h.span(class_="font-medium text-slate-950")[str(end_result)],
" of ",
h.span(class_="font-medium text-slate-950")[str(completed_total_count)],
" results",
],
h.nav(
aria_label="Completed execution pagination",
class_="isolate inline-flex -space-x-px rounded-xl shadow-xs",
)[
(
h.a(
href=_completed_page_href(page_number),
aria_current=(
"page" if page_number == completed_page else None
),
class_=(
"relative z-10 inline-flex items-center bg-amber-500 px-4 py-2 text-sm font-semibold text-slate-950"
if page_number == completed_page
else link_class
),
)[str(page_number)]
for page_number in range(1, completed_total_pages + 1)
)
],
],
]
def _completed_history_section(
*,
completed_rows: tuple[tuple[Node, ...], ...],
completed_page: int,
completed_page_size: int,
completed_total_count: int,
completed_total_pages: int,
) -> Renderable:
pagination = _completed_history_pagination(
completed_page=completed_page,
completed_page_size=completed_page_size,
completed_total_count=completed_total_count,
completed_total_pages=completed_total_pages,
)
return h.section[
table_section(
eyebrow="History",
title="Completed job executions",
empty_message="No job executions have completed yet.",
headers=(
"#",
"Source",
"Ended",
"State",
"Summary",
"Log",
),
rows=completed_rows,
first_header_class="w-px py-2.5 pr-2 pl-3 text-left text-xs font-semibold uppercase tracking-[0.18em] whitespace-nowrap text-slate-500 sm:pl-3",
first_cell_class="w-px py-3 pr-2 pl-3 text-sm font-medium text-slate-950 sm:pl-3",
actions=(
action_button(
label="Clear history",
tone="danger",
post_path="/actions/completed-executions/clear",
)
if completed_total_count > 0
else None
),
),
pagination,
]
def runs_page( def runs_page(
*, *,
running_executions: tuple[Mapping[str, object], ...] | None = None, running_executions: tuple[Mapping[str, object], ...] | None = None,
queued_executions: tuple[Mapping[str, object], ...] | None = None, queued_executions: tuple[Mapping[str, object], ...] | None = None,
upcoming_jobs: tuple[Mapping[str, object], ...] | None = None, upcoming_jobs: tuple[Mapping[str, object], ...] | None = None,
completed_executions: tuple[Mapping[str, object], ...] | None = None, completed_executions: tuple[Mapping[str, object], ...] | None = None,
completed_page: int = 1,
completed_page_size: int = 20,
completed_total_count: int | None = None,
completed_total_pages: int | None = None,
source_count: int = 0, source_count: int = 0,
) -> Renderable: ) -> Renderable:
running_items = running_executions or () running_items = running_executions or ()
@ -262,6 +379,12 @@ def runs_page(
) )
upcoming_rows = tuple(_upcoming_row(job) for job in upcoming_items) upcoming_rows = tuple(_upcoming_row(job) for job in upcoming_items)
completed_rows = tuple(_completed_row(execution) for execution in completed_items) completed_rows = tuple(_completed_row(execution) for execution in completed_items)
resolved_completed_total_count = (
len(completed_items) if completed_total_count is None else completed_total_count
)
resolved_completed_total_pages = (
1 if completed_total_pages is None else completed_total_pages
)
return page_shell( return page_shell(
current_path="/runs", current_path="/runs",
@ -302,21 +425,12 @@ def runs_page(
), ),
rows=upcoming_rows, rows=upcoming_rows,
), ),
table_section( _completed_history_section(
eyebrow="History", completed_rows=completed_rows,
title="Completed job executions", completed_page=completed_page,
empty_message="No job executions have completed yet.", completed_page_size=completed_page_size,
headers=( completed_total_count=resolved_completed_total_count,
"#", completed_total_pages=resolved_completed_total_pages,
"Source",
"Ended",
"State",
"Summary",
"Log",
),
rows=completed_rows,
first_header_class="w-px py-2.5 pr-2 pl-3 text-left text-xs font-semibold uppercase tracking-[0.18em] whitespace-nowrap text-slate-500 sm:pl-3",
first_cell_class="w-px py-3 pr-2 pl-3 text-sm font-medium text-slate-950 sm:pl-3",
), ),
h.script[ h.script[
""" """

View file

@ -75,13 +75,11 @@ def _source_row(source: Mapping[str, object]) -> tuple[Node, ...]:
label=str(source["state"]), label=str(source["state"]),
tone=str(source["state_tone"]), tone=str(source["state_tone"]),
), ),
h.p(class_="mt-2 text-xs text-slate-500")[str(source["last_run"])],
], ],
h.div(class_="flex flex-wrap items-center gap-2")[ h.div(class_="flex flex-nowrap items-center gap-3 whitespace-nowrap")[
inline_link( inline_link(
href=f"/sources/{source['slug']}/edit", label="Edit", tone="amber" href=f"/sources/{source['slug']}/edit", label="Edit", tone="amber"
), ),
inline_link(href="/runs", label="View runs"),
action_button( action_button(
label="Delete", label="Delete",
tone="danger", tone="danger",

View file

@ -251,6 +251,12 @@
.top-0 { .top-0 {
top: calc(var(--spacing) * 0); top: calc(var(--spacing) * 0);
} }
.isolate {
isolation: isolate;
}
.z-10 {
z-index: 10;
}
.container { .container {
width: 100%; width: 100%;
@media (width >= 40rem) { @media (width >= 40rem) {
@ -299,6 +305,9 @@
.mb-3 { .mb-3 {
margin-bottom: calc(var(--spacing) * 3); margin-bottom: calc(var(--spacing) * 3);
} }
.ml-3 {
margin-left: calc(var(--spacing) * 3);
}
.block { .block {
display: block; display: block;
} }
@ -393,6 +402,9 @@
.min-w-\[70rem\] { .min-w-\[70rem\] {
min-width: 70rem; min-width: 70rem;
} }
.flex-1 {
flex: 1;
}
.shrink-0 { .shrink-0 {
flex-shrink: 0; flex-shrink: 0;
} }
@ -475,6 +487,13 @@
margin-block-end: calc(calc(var(--spacing) * 6) * calc(1 - var(--tw-space-y-reverse))); margin-block-end: calc(calc(var(--spacing) * 6) * calc(1 - var(--tw-space-y-reverse)));
} }
} }
.-space-x-px {
:where(& > :not(:last-child)) {
--tw-space-x-reverse: 0;
margin-inline-start: calc(-1px * var(--tw-space-x-reverse));
margin-inline-end: calc(-1px * calc(1 - var(--tw-space-x-reverse)));
}
}
.divide-y { .divide-y {
:where(& > :not(:last-child)) { :where(& > :not(:last-child)) {
--tw-divide-y-reverse: 0; --tw-divide-y-reverse: 0;
@ -912,6 +931,9 @@
transition-timing-function: var(--tw-ease, var(--default-transition-timing-function)); transition-timing-function: var(--tw-ease, var(--default-transition-timing-function));
transition-duration: var(--tw-duration, var(--default-transition-duration)); transition-duration: var(--tw-duration, var(--default-transition-duration));
} }
.ring-inset {
--tw-ring-inset: inset;
}
.placeholder\:text-slate-400 { .placeholder\:text-slate-400 {
&::placeholder { &::placeholder {
color: var(--color-slate-400); color: var(--color-slate-400);
@ -962,6 +984,13 @@
} }
} }
} }
.hover\:bg-stone-50 {
&:hover {
@media (hover: hover) {
background-color: var(--color-stone-50);
}
}
}
.hover\:bg-stone-200 { .hover\:bg-stone-200 {
&:hover { &:hover {
@media (hover: hover) { @media (hover: hover) {
@ -1054,6 +1083,21 @@
background-color: var(--color-slate-200); background-color: var(--color-slate-200);
} }
} }
.sm\:flex {
@media (width >= 40rem) {
display: flex;
}
}
.sm\:hidden {
@media (width >= 40rem) {
display: none;
}
}
.sm\:flex-1 {
@media (width >= 40rem) {
flex: 1;
}
}
.sm\:grid-cols-2 { .sm\:grid-cols-2 {
@media (width >= 40rem) { @media (width >= 40rem) {
grid-template-columns: repeat(2, minmax(0, 1fr)); grid-template-columns: repeat(2, minmax(0, 1fr));
@ -1064,6 +1108,11 @@
flex-direction: row; flex-direction: row;
} }
} }
.sm\:items-center {
@media (width >= 40rem) {
align-items: center;
}
}
.sm\:items-end { .sm\:items-end {
@media (width >= 40rem) { @media (width >= 40rem) {
align-items: flex-end; align-items: flex-end;
@ -1084,6 +1133,11 @@
padding-inline: calc(var(--spacing) * 4); padding-inline: calc(var(--spacing) * 4);
} }
} }
.sm\:px-6 {
@media (width >= 40rem) {
padding-inline: calc(var(--spacing) * 6);
}
}
.sm\:pl-2\.5 { .sm\:pl-2\.5 {
@media (width >= 40rem) { @media (width >= 40rem) {
padding-left: calc(var(--spacing) * 2.5); padding-left: calc(var(--spacing) * 2.5);
@ -1188,6 +1242,11 @@
inherits: false; inherits: false;
initial-value: 0; initial-value: 0;
} }
@property --tw-space-x-reverse {
syntax: "*";
inherits: false;
initial-value: 0;
}
@property --tw-divide-y-reverse { @property --tw-divide-y-reverse {
syntax: "*"; syntax: "*";
inherits: false; inherits: false;
@ -1418,6 +1477,7 @@
--tw-translate-y: 0; --tw-translate-y: 0;
--tw-translate-z: 0; --tw-translate-z: 0;
--tw-space-y-reverse: 0; --tw-space-y-reverse: 0;
--tw-space-x-reverse: 0;
--tw-divide-y-reverse: 0; --tw-divide-y-reverse: 0;
--tw-border-style: solid; --tw-border-style: solid;
--tw-gradient-position: initial; --tw-gradient-position: initial;

View file

@ -13,11 +13,20 @@ from datastar_py.quart import DatastarResponse, read_signals
from datastar_py.sse import DatastarEvent from datastar_py.sse import DatastarEvent
from htpy import Renderable from htpy import Renderable
from peewee import IntegrityError from peewee import IntegrityError
from quart import Quart, Response, request, send_from_directory, url_for from quart import (
Quart,
Response,
has_request_context,
request,
send_from_directory,
url_for,
)
from repub.datastar import RefreshBroker, render_stream from repub.datastar import RefreshBroker, render_stream
from repub.jobs import ( from repub.jobs import (
COMPLETED_EXECUTION_PAGE_SIZE,
JobRuntime, JobRuntime,
clear_completed_executions,
load_dashboard_view, load_dashboard_view,
load_execution_log_view, load_execution_log_view,
load_runs_view, load_runs_view,
@ -329,6 +338,12 @@ def create_app(*, dev_mode: bool = False) -> Quart:
get_job_runtime(app).move_queued_execution(execution_id, direction="down") get_job_runtime(app).move_queued_execution(execution_id, direction="down")
return Response(status=204) return Response(status=204)
@app.post("/actions/completed-executions/clear")
async def clear_completed_executions_action() -> Response:
clear_completed_executions(log_dir=app.config["REPUB_LOG_DIR"])
trigger_refresh(app)
return Response(status=204)
@app.post("/job/<int:job_id>/execution/<int:execution_id>/logs") @app.post("/job/<int:job_id>/execution/<int:execution_id>/logs")
async def logs_patch(job_id: int, execution_id: int) -> DatastarResponse: async def logs_patch(job_id: int, execution_id: int) -> DatastarResponse:
async def render() -> Renderable: async def render() -> Renderable:
@ -420,12 +435,25 @@ async def render_runs(app: Quart | None = None) -> Renderable:
if app is None: if app is None:
return runs_page() return runs_page()
view = load_runs_view(log_dir=app.config["REPUB_LOG_DIR"]) completed_page = (
max(1, request.args.get("completed_page", 1, type=int) or 1)
if has_request_context()
else 1
)
view = load_runs_view(
log_dir=app.config["REPUB_LOG_DIR"],
completed_page=completed_page,
completed_page_size=COMPLETED_EXECUTION_PAGE_SIZE,
)
return runs_page( return runs_page(
running_executions=cast(tuple[dict[str, object], ...], view["running"]), running_executions=cast(tuple[dict[str, object], ...], view["running"]),
queued_executions=cast(tuple[dict[str, object], ...], view["queued"]), queued_executions=cast(tuple[dict[str, object], ...], view["queued"]),
upcoming_jobs=cast(tuple[dict[str, object], ...], view["upcoming"]), upcoming_jobs=cast(tuple[dict[str, object], ...], view["upcoming"]),
completed_executions=cast(tuple[dict[str, object], ...], view["completed"]), completed_executions=cast(tuple[dict[str, object], ...], view["completed"]),
completed_page=cast(int, view["completed_page"]),
completed_page_size=cast(int, view["completed_page_size"]),
completed_total_count=cast(int, view["completed_total_count"]),
completed_total_pages=cast(int, view["completed_total_pages"]),
source_count=len(load_sources()), source_count=len(load_sources()),
) )

View file

@ -254,3 +254,51 @@ def test_load_runs_view_running_row_targets_queued_follow_up_cancel(
assert running_row["cancel_post_path"] == ( assert running_row["cancel_post_path"] == (
f"/actions/queued-executions/{int(pending_execution.get_id())}/cancel" f"/actions/queued-executions/{int(pending_execution.get_id())}/cancel"
) )
def test_load_runs_view_paginates_completed_executions_after_20_rows(
tmp_path: Path,
) -> None:
initialize_database(tmp_path / "jobs-completed-pagination.db")
source = create_source(
name="Completed source",
slug="completed-source",
source_type="feed",
notes="",
spider_arguments="",
enabled=False,
cron_minute="*/5",
cron_hour="*",
cron_day_of_month="*",
cron_day_of_week="*",
cron_month="*",
feed_url="https://example.com/completed.xml",
)
job = Job.get(Job.source == source)
base_time = datetime(2026, 3, 30, 12, 0, tzinfo=UTC)
for offset in range(21):
JobExecution.create(
job=job,
running_status=JobExecutionStatus.SUCCEEDED,
ended_at=base_time - timedelta(minutes=offset),
)
first_page = load_runs_view(
log_dir=tmp_path / "out" / "logs",
now=base_time,
completed_page=1,
)
second_page = load_runs_view(
log_dir=tmp_path / "out" / "logs",
now=base_time,
completed_page=2,
)
assert len(first_page["completed"]) == 20
assert len(second_page["completed"]) == 1
assert first_page["completed_page"] == 1
assert second_page["completed_page"] == 2
assert first_page["completed_total_pages"] == 2
assert second_page["completed_total_pages"] == 2
assert first_page["completed_total_count"] == 21
assert second_page["completed_total_count"] == 21

View file

@ -22,6 +22,7 @@ from repub.model import (
save_setting, save_setting,
) )
from repub.pages.runs import runs_page from repub.pages.runs import runs_page
from repub.pages.sources import sources_page
from repub.web import ( from repub.web import (
create_app, create_app,
get_refresh_broker, get_refresh_broker,
@ -172,6 +173,66 @@ def test_runs_page_renders_combined_running_jobs_table() -> None:
assert "/actions/queued-executions/42/cancel" in body assert "/actions/queued-executions/42/cancel" in body
def test_sources_page_removes_view_runs_action_and_last_run_caption() -> None:
body = str(
sources_page(
sources=(
{
"name": "Source one",
"slug": "source-one",
"source_type": "Feed",
"upstream": "https://example.com/feed.xml",
"schedule": "cron: */5 * * * *",
"last_run": "Never run",
"state": "Enabled",
"state_tone": "scheduled",
},
)
)
)
assert ">Edit<" in body
assert ">Delete<" in body
assert "View runs" not in body
assert "Never run" not in body
def test_runs_page_renders_clear_completed_button_and_pagination() -> None:
completed_executions = tuple(
{
"source": f"Completed source {index}",
"slug": f"completed-source-{index}",
"job_id": 7,
"execution_id": index,
"ended_at": "2 hours ago",
"ended_at_iso": "2026-01-15T10:00:00+00:00",
"status": "Succeeded",
"status_tone": "done",
"stats": "1 requests • 1 items • 1 bytes",
"summary": "Worker exited successfully",
"log_href": f"/job/7/execution/{index}/logs",
}
for index in range(1, 21)
)
body = str(
runs_page(
completed_executions=completed_executions,
completed_page=2,
completed_page_size=20,
completed_total_count=21,
completed_total_pages=2,
)
)
assert "/actions/completed-executions/clear" in body
assert ">Clear history<" in body
assert "Showing" in body
assert "21" in body
assert 'href="/runs?completed_page=1"' in body
assert 'href="/runs?completed_page=2"' in body
assert 'aria-current="page"' in body
def test_root_get_serves_datastar_shim() -> None: def test_root_get_serves_datastar_shim() -> None:
async def run() -> None: async def run() -> None:
client = create_app().test_client() client = create_app().test_client()
@ -1498,6 +1559,66 @@ def test_cancel_queued_execution_action_deletes_pending_row_without_touching_run
asyncio.run(run()) asyncio.run(run())
def test_clear_completed_executions_action_removes_history_and_log_artifacts(
monkeypatch, tmp_path: Path
) -> None:
db_path = tmp_path / "clear-completed-action.db"
log_dir = tmp_path / "out" / "logs"
monkeypatch.setenv("REPUBLISHER_DB_PATH", str(db_path))
async def run() -> None:
app = create_app()
app.config["REPUB_LOG_DIR"] = log_dir
client = app.test_client()
source = create_source(
name="History source",
slug="history-source",
source_type="feed",
notes="",
spider_arguments="",
enabled=True,
cron_minute="*/5",
cron_hour="*",
cron_day_of_month="*",
cron_day_of_week="*",
cron_month="*",
feed_url="https://example.com/history.xml",
)
job = Job.get(Job.source == source)
completed_execution = JobExecution.create(
job=job,
running_status=JobExecutionStatus.SUCCEEDED,
ended_at=datetime(2026, 3, 30, 12, 0, tzinfo=UTC),
)
running_execution = JobExecution.create(
job=job,
running_status=JobExecutionStatus.RUNNING,
started_at=datetime(2026, 3, 30, 12, 5, tzinfo=UTC),
)
log_dir.mkdir(parents=True, exist_ok=True)
completed_prefix = (
log_dir / f"job-{job.id}-execution-{int(completed_execution.get_id())}"
)
running_log_path = (
log_dir / f"job-{job.id}-execution-{int(running_execution.get_id())}.log"
)
for suffix in (".log", ".jsonl", ".pygea.log"):
completed_prefix.with_suffix(suffix).write_text("history", encoding="utf-8")
running_log_path.write_text("running", encoding="utf-8")
response = await client.post("/actions/completed-executions/clear")
assert response.status_code == 204
assert JobExecution.get_or_none(id=int(completed_execution.get_id())) is None
assert JobExecution.get_or_none(id=int(running_execution.get_id())) is not None
for suffix in (".log", ".jsonl", ".pygea.log"):
assert not completed_prefix.with_suffix(suffix).exists()
assert running_log_path.exists()
asyncio.run(run())
def test_move_queued_execution_action_reorders_queue( def test_move_queued_execution_action_reorders_queue(
monkeypatch, tmp_path: Path monkeypatch, tmp_path: Path
) -> None: ) -> None: