runs queue order manipulation and whitespace tightnening

This commit is contained in:
Abel Luck 2026-03-31 10:23:46 +02:00
parent a88eba7dd1
commit 99fd33f770
10 changed files with 478 additions and 121 deletions

View file

@ -1,17 +1,18 @@
from __future__ import annotations from __future__ import annotations
from collections.abc import Mapping
import htpy as h import htpy as h
from htpy import Node, Renderable from htpy import Node, Renderable
def _button_classes(*, tone: str, emphasis: str, disabled: bool = False) -> str: def _button_classes(*, tone: str, emphasis: str, disabled: bool = False) -> str:
base = ( base = "inline-flex shrink-0 items-center justify-center rounded-full font-semibold transition "
"inline-flex items-center justify-center rounded-full font-semibold transition "
)
emphasis_classes = { emphasis_classes = {
"compact": "px-3 py-1.5 text-sm", "compact": "px-3 py-1.5 text-sm",
"regular": "px-4 py-2.5 text-sm", "regular": "px-4 py-2.5 text-sm",
"soft": "px-3.5 py-2 text-sm", "soft": "px-3.5 py-2 text-sm",
"icon": "size-8 p-0",
} }
tone_classes = { tone_classes = {
"amber": "bg-amber-400 text-slate-950 hover:bg-amber-300", "amber": "bg-amber-400 text-slate-950 hover:bg-amber-300",
@ -168,20 +169,24 @@ def inline_link(*, href: str, label: str, tone: str = "default") -> Renderable:
def action_button( def action_button(
*, *,
label: str, label: Node,
tone: str = "default", tone: str = "default",
emphasis: str = "compact", emphasis: str = "compact",
disabled: bool = False, disabled: bool = False,
button_type: str = "button", button_type: str = "button",
post_path: str | None = None, post_path: str | None = None,
title: str | None = None,
) -> Renderable: ) -> Renderable:
attributes: dict[str, str] = {} attributes: dict[str, str] = {}
if post_path is not None and not disabled: if post_path is not None and not disabled:
attributes["data-on:pointerdown"] = f"@post('{post_path}')" attributes["data-on:pointerdown"] = f"@post('{post_path}')"
if title is not None:
attributes["aria-label"] = title
return h.button( return h.button(
attributes, attributes,
type=button_type, type=button_type,
disabled=disabled, disabled=disabled,
title=title,
class_=_button_classes(tone=tone, emphasis=emphasis, disabled=disabled), class_=_button_classes(tone=tone, emphasis=emphasis, disabled=disabled),
)[label] )[label]
@ -269,14 +274,24 @@ def table_section(
empty_message: str, empty_message: str,
headers: tuple[str, ...], headers: tuple[str, ...],
rows: tuple[tuple[Node, ...], ...], rows: tuple[tuple[Node, ...], ...],
row_attrs: tuple[Mapping[str, str], ...] | None = None,
first_header_class: str | None = None,
first_cell_class: str | None = None,
actions: Node | None = None, actions: Node | None = None,
) -> Renderable: ) -> Renderable:
def render_row(row: tuple[Node, ...]) -> Renderable: def render_row(
row: tuple[Node, ...], attrs: Mapping[str, str] | None = None
) -> Renderable:
first_cell, *other_cells = row first_cell, *other_cells = row
return h.tr(class_="align-top")[ row_attributes = dict(attrs or {})
h.td(class_="py-3 pr-5 pl-3 text-sm font-medium text-slate-950 sm:pl-4")[ row_attributes["class"] = f"align-top {row_attributes.get('class', '')}".strip()
first_cell return h.tr(row_attributes)[
], h.td(
class_=(
first_cell_class
or "py-3 pr-5 pl-3 text-sm font-medium text-slate-950 sm:pl-4"
)
)[first_cell],
( (
h.td( h.td(
class_="px-2.5 py-3 align-top text-sm whitespace-nowrap text-slate-600" class_="px-2.5 py-3 align-top text-sm whitespace-nowrap text-slate-600"
@ -287,7 +302,11 @@ def table_section(
body_rows: Node body_rows: Node
if rows: if rows:
body_rows = (render_row(row) for row in rows) row_attributes = row_attrs or tuple({} for _ in rows)
body_rows = (
render_row(row, attrs)
for row, attrs in zip(rows, row_attributes, strict=False)
)
else: else:
body_rows = h.tr[ body_rows = h.tr[
h.td( h.td(
@ -322,9 +341,13 @@ def table_section(
( (
h.th( h.th(
scope="col", scope="col",
class_="px-2.5 py-2.5 text-left text-xs font-semibold uppercase tracking-[0.18em] whitespace-nowrap text-slate-500 first:pl-3 sm:first:pl-4", class_=(
first_header_class
if index == 0 and first_header_class is not None
else "px-2.5 py-2.5 text-left text-xs font-semibold uppercase tracking-[0.18em] whitespace-nowrap text-slate-500 first:pl-3 sm:first:pl-4"
),
)[header] )[header]
for header in headers for index, header in enumerate(headers)
) )
] ]
], ],

View file

@ -47,13 +47,20 @@ def _publish_event(queue: asyncio.Queue[object], event: object) -> None:
async def render_sse_event( async def render_sse_event(
render: RenderFunction, *, last_event_id: str | None = None render: RenderFunction,
*,
last_event_id: str | None = None,
use_view_transition: bool = False,
) -> tuple[str | None, DatastarEvent | None]: ) -> tuple[str | None, DatastarEvent | None]:
html = _coerce_html(await render()) html = _coerce_html(await render())
event_id = _render_hash(html) event_id = _render_hash(html)
if event_id == last_event_id: if event_id == last_event_id:
return last_event_id, None return last_event_id, None
return event_id, SSE.patch_elements(html, event_id=event_id) return event_id, SSE.patch_elements(
html,
event_id=event_id,
use_view_transition=use_view_transition,
)
async def render_stream( async def render_stream(
@ -71,9 +78,11 @@ async def render_stream(
yield event yield event
while True: while True:
await queue.get() event_name = await queue.get()
last_event_id, event = await render_sse_event( last_event_id, event = await render_sse_event(
render, last_event_id=last_event_id render,
last_event_id=last_event_id,
use_view_transition=event_name == "queue-reordered",
) )
if event is not None: if event is not None:
yield event yield event

View file

@ -107,7 +107,7 @@ class JobRuntime:
self, self,
*, *,
log_dir: str | Path, log_dir: str | Path,
refresh_callback: Callable[[], None] | None = None, refresh_callback: Callable[[object], None] | None = None,
graceful_stop_seconds: float = 15.0, graceful_stop_seconds: float = 15.0,
) -> None: ) -> None:
self.log_dir = Path(log_dir) self.log_dir = Path(log_dir)
@ -117,6 +117,7 @@ class JobRuntime:
self._workers: dict[int, RunningWorker] = {} self._workers: dict[int, RunningWorker] = {}
self._run_lock = threading.Lock() self._run_lock = threading.Lock()
self._started = False self._started = False
self._last_runtime_refresh_at = 0.0
def start(self) -> None: def start(self) -> None:
if self._started: if self._started:
@ -353,7 +354,7 @@ class JobRuntime:
) )
worker.process.terminate() worker.process.terminate()
self._trigger_refresh() self._trigger_refresh("queue-reordered")
return True return True
def cancel_queued_execution(self, execution_id: int) -> bool: def cancel_queued_execution(self, execution_id: int) -> bool:
@ -375,6 +376,75 @@ class JobRuntime:
self._trigger_refresh() self._trigger_refresh()
return True return True
def move_queued_execution(self, execution_id: int, *, direction: str) -> bool:
offset = -1 if direction == "up" else 1
with self._run_lock:
with database.connection_context():
execution_primary_key = getattr(JobExecution, "_meta").primary_key
queued_executions = tuple(
JobExecution.select()
.where(JobExecution.running_status == JobExecutionStatus.PENDING)
.order_by(
JobExecution.created_at.asc(), execution_primary_key.asc()
)
)
current_index = next(
(
index
for index, execution in enumerate(queued_executions)
if _execution_id(execution) == execution_id
),
None,
)
if current_index is None:
return False
target_index = current_index + offset
if target_index < 0 or target_index >= len(queued_executions):
return False
current_execution = queued_executions[current_index]
target_execution = queued_executions[target_index]
current_created_at = _coerce_datetime(
cast(datetime | str, current_execution.created_at)
)
target_created_at = _coerce_datetime(
cast(datetime | str, target_execution.created_at)
)
with database.atomic():
if current_created_at == target_created_at:
adjusted_created_at = target_created_at + timedelta(
microseconds=-1 if offset < 0 else 1
)
(
JobExecution.update(created_at=adjusted_created_at)
.where(
execution_primary_key
== _execution_id(current_execution)
)
.execute()
)
else:
(
JobExecution.update(created_at=target_created_at)
.where(
execution_primary_key
== _execution_id(current_execution)
)
.execute()
)
(
JobExecution.update(created_at=current_created_at)
.where(
execution_primary_key == _execution_id(target_execution)
)
.execute()
)
self._trigger_refresh()
return True
def set_job_enabled(self, job_id: int, *, enabled: bool) -> bool: def set_job_enabled(self, job_id: int, *, enabled: bool) -> bool:
with database.connection_context(): with database.connection_context():
with database.atomic(): with database.atomic():
@ -428,6 +498,8 @@ class JobRuntime:
if any_finished: if any_finished:
self._start_queued_jobs() self._start_queued_jobs()
self._refresh_running_runtime()
def _apply_stats(self, worker: RunningWorker) -> None: def _apply_stats(self, worker: RunningWorker) -> None:
if not worker.artifacts.stats_path.exists(): if not worker.artifacts.stats_path.exists():
return return
@ -471,9 +543,27 @@ class JobRuntime:
): ):
worker.process.kill() worker.process.kill()
def _trigger_refresh(self) -> None: def _trigger_refresh(self, event: object = "refresh-event") -> None:
if self.refresh_callback is not None: if self.refresh_callback is not None:
self.refresh_callback() self.refresh_callback(event)
def _refresh_running_runtime(self) -> None:
if not self._has_running_executions():
return
current_time = time.monotonic()
if current_time - self._last_runtime_refresh_at < 1.0:
return
self._last_runtime_refresh_at = current_time
self._trigger_refresh()
def _has_running_executions(self) -> bool:
return (
JobExecution.select()
.where(JobExecution.running_status == JobExecutionStatus.RUNNING)
.exists()
)
def _reconcile_stale_executions(self) -> None: def _reconcile_stale_executions(self) -> None:
live_workers = _find_live_workers() live_workers = _find_live_workers()
@ -614,13 +704,22 @@ def load_runs_view(
for execution in running_executions for execution in running_executions
), ),
"queued": tuple( "queued": tuple(
_project_queued_execution(execution, reference_time, position=position) _project_queued_execution(
execution,
reference_time,
position=position,
total_count=len(queued_executions),
)
for position, execution in enumerate(queued_executions, start=1) for position, execution in enumerate(queued_executions, start=1)
), ),
"upcoming": tuple( "upcoming": tuple(
_project_upcoming_job(job, running_by_job.get(job.id), reference_time) _project_upcoming_job(
job,
running_by_job.get(job.id),
queued_by_job.get(job.id),
reference_time,
)
for job in jobs for job in jobs
if job.id not in queued_by_job
), ),
"completed": tuple( "completed": tuple(
_project_completed_execution(execution, resolved_log_dir, reference_time) _project_completed_execution(execution, resolved_log_dir, reference_time)
@ -772,15 +871,20 @@ def _project_running_execution(
def _project_queued_execution( def _project_queued_execution(
execution: JobExecution, reference_time: datetime, *, position: int execution: JobExecution,
reference_time: datetime,
*,
position: int,
total_count: int,
) -> dict[str, object]: ) -> dict[str, object]:
job = cast(Job, execution.job) job = cast(Job, execution.job)
queued_at = _coerce_datetime(cast(datetime | str, execution.created_at)) queued_at = _coerce_datetime(cast(datetime | str, execution.created_at))
execution_id = _execution_id(execution)
return { return {
"source": job.source.name, "source": job.source.name,
"slug": job.source.slug, "slug": job.source.slug,
"job_id": _job_id(job), "job_id": _job_id(job),
"execution_id": _execution_id(execution), "execution_id": execution_id,
"queued_at": _humanize_relative_time(reference_time, queued_at), "queued_at": _humanize_relative_time(reference_time, queued_at),
"queued_at_iso": queued_at.isoformat(), "queued_at_iso": queued_at.isoformat(),
"queue_position": position, "queue_position": position,
@ -789,14 +893,27 @@ def _project_queued_execution(
"run_label": "Queued", "run_label": "Queued",
"run_disabled": True, "run_disabled": True,
"run_post_path": f"/actions/jobs/{_job_id(job)}/run-now", "run_post_path": f"/actions/jobs/{_job_id(job)}/run-now",
"cancel_post_path": ( "cancel_post_path": (f"/actions/queued-executions/{execution_id}/cancel"),
f"/actions/queued-executions/{_execution_id(execution)}/cancel" "move_up_disabled": position == 1,
"move_up_post_path": (
None
if position == 1
else f"/actions/queued-executions/{execution_id}/move-up"
),
"move_down_disabled": position == total_count,
"move_down_post_path": (
None
if position == total_count
else f"/actions/queued-executions/{execution_id}/move-down"
), ),
} }
def _project_upcoming_job( def _project_upcoming_job(
job: Job, running_execution: JobExecution | None, reference_time: datetime job: Job,
running_execution: JobExecution | None,
queued_execution: JobExecution | None,
reference_time: datetime,
) -> dict[str, object]: ) -> dict[str, object]:
job_id = _job_id(job) job_id = _job_id(job)
trigger = _job_trigger(job) trigger = _job_trigger(job)
@ -805,6 +922,12 @@ def _project_upcoming_job(
if job.enabled and running_execution is None if job.enabled and running_execution is None
else None else None
) )
run_disabled = running_execution is not None or queued_execution is not None
run_reason = (
"Already running"
if running_execution is not None
else ("Queued" if queued_execution is not None else "Ready")
)
return { return {
"source": job.source.name, "source": job.source.name,
"slug": job.source.slug, "slug": job.source.slug,
@ -826,8 +949,8 @@ def _project_upcoming_job(
), ),
"enabled_label": "Enabled" if job.enabled else "Disabled", "enabled_label": "Enabled" if job.enabled else "Disabled",
"enabled_tone": "scheduled" if job.enabled else "idle", "enabled_tone": "scheduled" if job.enabled else "idle",
"run_disabled": running_execution is not None, "run_disabled": run_disabled,
"run_reason": "Already running" if running_execution is not None else "Ready", "run_reason": run_reason,
"toggle_label": "Disable" if job.enabled else "Enable", "toggle_label": "Disable" if job.enabled else "Enable",
"toggle_enabled": not job.enabled, "toggle_enabled": not job.enabled,
"run_post_path": f"/actions/jobs/{job_id}/run-now", "run_post_path": f"/actions/jobs/{job_id}/run-now",

View file

@ -31,19 +31,47 @@ def _flag(values: Mapping[str, object], key: str) -> bool:
return bool(values[key]) return bool(values[key])
def _queue_icon(direction: str) -> Renderable:
path = (
"M4.5 10.5 12 3m0 0 7.5 7.5M12 3v18"
if direction == "up"
else "M19.5 13.5 12 21m0 0-7.5-7.5M12 21V3"
)
return h.svg(
xmlns="http://www.w3.org/2000/svg",
fill="none",
viewBox="0 0 24 24",
stroke_width="1.5",
stroke="currentColor",
class_="size-4",
)[
h.path(
stroke_linecap="round",
stroke_linejoin="round",
d=path,
)
]
def _queue_row_attrs(execution: Mapping[str, object]) -> dict[str, str]:
return {
"style": (
"view-transition-name: " f"running-job-{_text(execution, 'execution_id')};"
)
}
def _running_row(execution: Mapping[str, object]) -> tuple[Node, ...]: def _running_row(execution: Mapping[str, object]) -> tuple[Node, ...]:
return ( return (
h.p(class_="w-px whitespace-nowrap font-medium text-slate-900")[
f"#{_text(execution, 'execution_id')}"
],
h.div[ h.div[
h.div(class_="font-semibold text-slate-950")[_text(execution, "source")], h.div(class_="font-semibold text-slate-950")[_text(execution, "source")],
h.p(class_="mt-0.5 font-mono text-xs text-slate-500")[ h.p(class_="mt-0.5 font-mono text-xs text-slate-500")[
_text(execution, "slug") _text(execution, "slug")
], ],
], ],
h.div[
h.p(class_="font-medium text-slate-900")[
f"#{_text(execution, 'execution_id')}"
],
],
h.div[ h.div[
h.p(class_="font-medium text-slate-900")[_text(execution, "started_at")], h.p(class_="font-medium text-slate-900")[_text(execution, "started_at")],
h.p(class_="mt-0.5 text-xs text-slate-500")[_text(execution, "runtime")], h.p(class_="mt-0.5 text-xs text-slate-500")[_text(execution, "runtime")],
@ -84,34 +112,43 @@ def _queued_row(execution: Mapping[str, object]) -> tuple[Node, ...]:
)[_text(execution, "queued_at")] )[_text(execution, "queued_at")]
return ( return (
h.p(class_="w-px whitespace-nowrap font-medium text-slate-900")[
f"#{_text(execution, 'execution_id')}"
],
h.div[ h.div[
h.div(class_="font-semibold text-slate-950")[_text(execution, "source")], h.div(class_="font-semibold text-slate-950")[_text(execution, "source")],
h.p(class_="mt-0.5 font-mono text-xs text-slate-500")[ h.p(class_="mt-0.5 font-mono text-xs text-slate-500")[
_text(execution, "slug") _text(execution, "slug")
], ],
], ],
h.div[
h.p(class_="font-medium text-slate-900")[
f"#{_text(execution, 'execution_id')}"
],
],
queued_label, queued_label,
h.div[ status_badge(label="Queued", tone="idle"),
h.div(class_="max-w-xs whitespace-normal")[
h.p(class_="font-medium text-slate-900")[ h.p(class_="font-medium text-slate-900")[
f"#{_text(execution, 'queue_position')}" f"Queue position #{_text(execution, 'queue_position')}"
], ],
h.p(class_="mt-0.5 text-xs text-slate-500")["waiting for capacity"],
], ],
action_button(
label=_text(execution, "run_label"),
disabled=_flag(execution, "run_disabled"),
post_path=_maybe_text(execution, "run_post_path"),
),
h.div(class_="flex flex-wrap items-center gap-2")[ h.div(class_="flex flex-wrap items-center gap-2")[
action_button(
label=_queue_icon("up"),
emphasis="icon",
title="Move up",
disabled=_flag(execution, "move_up_disabled"),
post_path=_maybe_text(execution, "move_up_post_path"),
),
action_button(
label=_queue_icon("down"),
emphasis="icon",
title="Move down",
disabled=_flag(execution, "move_down_disabled"),
post_path=_maybe_text(execution, "move_down_post_path"),
),
action_button( action_button(
label="Cancel", label="Cancel",
tone="danger", tone="danger",
post_path=_maybe_text(execution, "cancel_post_path"), post_path=_maybe_text(execution, "cancel_post_path"),
) ),
], ],
) )
@ -180,21 +217,16 @@ def _completed_row(execution: Mapping[str, object]) -> tuple[Node, ...]:
)[_text(execution, "ended_at")] )[_text(execution, "ended_at")]
return ( return (
h.p(class_="w-px whitespace-nowrap font-medium text-slate-900")[
f"#{_text(execution, 'execution_id')}"
],
h.div[ h.div[
h.div(class_="font-semibold text-slate-950")[_text(execution, "source")], h.div(class_="font-semibold text-slate-950")[_text(execution, "source")],
h.p(class_="mt-0.5 font-mono text-xs text-slate-500")[ h.p(class_="mt-0.5 font-mono text-xs text-slate-500")[
_text(execution, "slug") _text(execution, "slug")
], ],
], ],
h.div[ h.div[ended_at_label,],
h.p(class_="font-medium text-slate-900")[
f"#{_text(execution, 'execution_id')}"
],
],
h.div[
ended_at_label,
h.p(class_="mt-0.5 text-xs text-slate-500")[_text(execution, "summary")],
],
status_badge( status_badge(
label=_text(execution, "status"), label=_text(execution, "status"),
tone=_text(execution, "status_tone"), tone=_text(execution, "status_tone"),
@ -224,6 +256,10 @@ def runs_page(
completed_items = completed_executions or () completed_items = completed_executions or ()
running_rows = tuple(_running_row(execution) for execution in running_items) running_rows = tuple(_running_row(execution) for execution in running_items)
queued_rows = tuple(_queued_row(execution) for execution in queued_items) queued_rows = tuple(_queued_row(execution) for execution in queued_items)
live_rows = running_rows + queued_rows
live_row_attrs = tuple(
_queue_row_attrs(execution) for execution in running_items + queued_items
)
upcoming_rows = tuple(_upcoming_row(job) for job in upcoming_items) upcoming_rows = tuple(_upcoming_row(job) for job in upcoming_items)
completed_rows = tuple(_completed_row(execution) for execution in completed_items) completed_rows = tuple(_completed_row(execution) for execution in completed_items)
@ -237,31 +273,20 @@ def runs_page(
content=( content=(
table_section( table_section(
eyebrow="Live work", eyebrow="Live work",
title="Running job executions", title="Running jobs",
empty_message="No job executions are running.", empty_message="No jobs are running or queued.",
headers=( headers=(
"#",
"Source", "Source",
"Execution", "Activity",
"Started", "State",
"Status", "Details",
"Stats",
"Actions", "Actions",
), ),
rows=running_rows, rows=live_rows,
), row_attrs=live_row_attrs,
table_section( first_header_class="w-px py-2.5 pr-2 pl-3 text-left text-xs font-semibold uppercase tracking-[0.18em] whitespace-nowrap text-slate-500 sm:pl-3",
eyebrow="Queue", first_cell_class="w-px py-3 pr-2 pl-3 text-sm font-medium text-slate-950 sm:pl-3",
title="Queued job executions",
empty_message="No queued executions are waiting.",
headers=(
"Source",
"Execution",
"Queued",
"Position",
"Run now",
"Actions",
),
rows=queued_rows,
), ),
table_section( table_section(
eyebrow="Schedule", eyebrow="Schedule",
@ -282,14 +307,16 @@ def runs_page(
title="Completed job executions", title="Completed job executions",
empty_message="No job executions have completed yet.", empty_message="No job executions have completed yet.",
headers=( headers=(
"#",
"Source", "Source",
"Execution",
"Ended", "Ended",
"Status", "State",
"Summary", "Summary",
"Log", "Log",
), ),
rows=completed_rows, rows=completed_rows,
first_header_class="w-px py-2.5 pr-2 pl-3 text-left text-xs font-semibold uppercase tracking-[0.18em] whitespace-nowrap text-slate-500 sm:pl-3",
first_cell_class="w-px py-3 pr-2 pl-3 text-sm font-medium text-slate-950 sm:pl-3",
), ),
h.script[ h.script[
""" """

View file

@ -317,10 +317,18 @@
.table { .table {
display: table; display: table;
} }
.size-4 {
width: calc(var(--spacing) * 4);
height: calc(var(--spacing) * 4);
}
.size-5 { .size-5 {
width: calc(var(--spacing) * 5); width: calc(var(--spacing) * 5);
height: calc(var(--spacing) * 5); height: calc(var(--spacing) * 5);
} }
.size-8 {
width: calc(var(--spacing) * 8);
height: calc(var(--spacing) * 8);
}
.size-10 { .size-10 {
width: calc(var(--spacing) * 10); width: calc(var(--spacing) * 10);
height: calc(var(--spacing) * 10); height: calc(var(--spacing) * 10);
@ -349,6 +357,9 @@
.w-full { .w-full {
width: 100%; width: 100%;
} }
.w-px {
width: 1px;
}
.max-w-3xl { .max-w-3xl {
max-width: var(--container-3xl); max-width: var(--container-3xl);
} }
@ -457,13 +468,6 @@
margin-block-end: calc(calc(var(--spacing) * 4) * calc(1 - var(--tw-space-y-reverse))); margin-block-end: calc(calc(var(--spacing) * 4) * calc(1 - var(--tw-space-y-reverse)));
} }
} }
.space-y-5 {
:where(& > :not(:last-child)) {
--tw-space-y-reverse: 0;
margin-block-start: calc(calc(var(--spacing) * 5) * var(--tw-space-y-reverse));
margin-block-end: calc(calc(var(--spacing) * 5) * calc(1 - var(--tw-space-y-reverse)));
}
}
.space-y-6 { .space-y-6 {
:where(& > :not(:last-child)) { :where(& > :not(:last-child)) {
--tw-space-y-reverse: 0; --tw-space-y-reverse: 0;
@ -632,6 +636,9 @@
--tw-gradient-to: transparent; --tw-gradient-to: transparent;
--tw-gradient-stops: var(--tw-gradient-via-stops, var(--tw-gradient-position), var(--tw-gradient-from) var(--tw-gradient-from-position), var(--tw-gradient-to) var(--tw-gradient-to-position)); --tw-gradient-stops: var(--tw-gradient-via-stops, var(--tw-gradient-position), var(--tw-gradient-from) var(--tw-gradient-from-position), var(--tw-gradient-to) var(--tw-gradient-to-position));
} }
.p-0 {
padding: calc(var(--spacing) * 0);
}
.p-0\.5 { .p-0\.5 {
padding: calc(var(--spacing) * 0.5); padding: calc(var(--spacing) * 0.5);
} }
@ -692,12 +699,21 @@
.pt-6 { .pt-6 {
padding-top: calc(var(--spacing) * 6); padding-top: calc(var(--spacing) * 6);
} }
.pr-1 {
padding-right: calc(var(--spacing) * 1);
}
.pr-2 {
padding-right: calc(var(--spacing) * 2);
}
.pr-5 { .pr-5 {
padding-right: calc(var(--spacing) * 5); padding-right: calc(var(--spacing) * 5);
} }
.pr-6 { .pr-6 {
padding-right: calc(var(--spacing) * 6); padding-right: calc(var(--spacing) * 6);
} }
.pl-2 {
padding-left: calc(var(--spacing) * 2);
}
.pl-3 { .pl-3 {
padding-left: calc(var(--spacing) * 3); padding-left: calc(var(--spacing) * 3);
} }
@ -1068,9 +1084,14 @@
padding-inline: calc(var(--spacing) * 4); padding-inline: calc(var(--spacing) * 4);
} }
} }
.sm\:px-5 { .sm\:pl-2\.5 {
@media (width >= 40rem) { @media (width >= 40rem) {
padding-inline: calc(var(--spacing) * 5); padding-left: calc(var(--spacing) * 2.5);
}
}
.sm\:pl-3 {
@media (width >= 40rem) {
padding-left: calc(var(--spacing) * 3);
} }
} }
.sm\:pl-4 { .sm\:pl-4 {
@ -1115,31 +1136,16 @@
grid-template-columns: 14rem minmax(0,1fr); grid-template-columns: 14rem minmax(0,1fr);
} }
} }
.lg\:grid-cols-\[18rem_minmax\(0\,1fr\)\] {
@media (width >= 64rem) {
grid-template-columns: 18rem minmax(0,1fr);
}
}
.lg\:px-5 { .lg\:px-5 {
@media (width >= 64rem) { @media (width >= 64rem) {
padding-inline: calc(var(--spacing) * 5); padding-inline: calc(var(--spacing) * 5);
} }
} }
.lg\:px-6 {
@media (width >= 64rem) {
padding-inline: calc(var(--spacing) * 6);
}
}
.lg\:py-4 { .lg\:py-4 {
@media (width >= 64rem) { @media (width >= 64rem) {
padding-block: calc(var(--spacing) * 4); padding-block: calc(var(--spacing) * 4);
} }
} }
.lg\:py-5 {
@media (width >= 64rem) {
padding-block: calc(var(--spacing) * 5);
}
}
.xl\:grid-cols-4 { .xl\:grid-cols-4 {
@media (width >= 80rem) { @media (width >= 80rem) {
grid-template-columns: repeat(4, minmax(0, 1fr)); grid-template-columns: repeat(4, minmax(0, 1fr));
@ -1156,6 +1162,12 @@
} }
} }
} }
@layer base {
::view-transition-group(*) {
animation-duration: 180ms;
animation-timing-function: ease;
}
}
@property --tw-translate-x { @property --tw-translate-x {
syntax: "*"; syntax: "*";
inherits: false; inherits: false;

View file

@ -1,2 +1,9 @@
@import "tailwindcss" source("../"); @import "tailwindcss" source("../");
@source inline("bg-amber-500 translate-x-5"); @source inline("bg-amber-500 translate-x-5");
@layer base {
::view-transition-group(*) {
animation-duration: 180ms;
animation-timing-function: ease;
}
}

View file

@ -282,6 +282,16 @@ def create_app(*, dev_mode: bool = False) -> Quart:
trigger_refresh(app) trigger_refresh(app)
return Response(status=204) return Response(status=204)
@app.post("/actions/queued-executions/<int:execution_id>/move-up")
async def move_queued_execution_up_action(execution_id: int) -> Response:
get_job_runtime(app).move_queued_execution(execution_id, direction="up")
return Response(status=204)
@app.post("/actions/queued-executions/<int:execution_id>/move-down")
async def move_queued_execution_down_action(execution_id: int) -> Response:
get_job_runtime(app).move_queued_execution(execution_id, direction="down")
return Response(status=204)
@app.post("/job/<int:job_id>/execution/<int:execution_id>/logs") @app.post("/job/<int:job_id>/execution/<int:execution_id>/logs")
async def logs_patch(job_id: int, execution_id: int) -> DatastarResponse: async def logs_patch(job_id: int, execution_id: int) -> DatastarResponse:
async def render() -> Renderable: async def render() -> Renderable:
@ -311,7 +321,7 @@ def get_job_runtime(app: Quart) -> JobRuntime:
if runtime is None: if runtime is None:
runtime = JobRuntime( runtime = JobRuntime(
log_dir=app.config["REPUB_LOG_DIR"], log_dir=app.config["REPUB_LOG_DIR"],
refresh_callback=lambda: trigger_refresh(app), refresh_callback=lambda event="refresh-event": trigger_refresh(app, event),
) )
app.extensions[JOB_RUNTIME_KEY] = runtime app.extensions[JOB_RUNTIME_KEY] = runtime
return runtime return runtime

View file

@ -147,9 +147,19 @@ def test_load_runs_view_projects_queued_executions_in_fifo_order(
"7 minutes ago", "7 minutes ago",
"3 minutes ago", "3 minutes ago",
) )
assert view["queued"][0]["move_up_disabled"] is True
assert (
view["queued"][0]["move_down_post_path"]
== f"/actions/queued-executions/{int(first_execution.get_id())}/move-down"
)
assert (
view["queued"][1]["move_up_post_path"]
== f"/actions/queued-executions/{int(second_execution.get_id())}/move-up"
)
assert view["queued"][1]["move_down_disabled"] is True
def test_load_runs_view_separates_queued_jobs_from_scheduled_jobs( def test_load_runs_view_keeps_queued_jobs_in_scheduled_jobs(
tmp_path: Path, tmp_path: Path,
) -> None: ) -> None:
initialize_database(tmp_path / "jobs-queue-separation.db") initialize_database(tmp_path / "jobs-queue-separation.db")
@ -194,10 +204,14 @@ def test_load_runs_view_separates_queued_jobs_from_scheduled_jobs(
) )
assert tuple(row["slug"] for row in view["queued"]) == ("queued-source",) assert tuple(row["slug"] for row in view["queued"]) == ("queued-source",)
assert all(row["slug"] != "queued-source" for row in view["upcoming"]) assert tuple(row["slug"] for row in view["upcoming"]) == (
assert tuple(row["slug"] for row in view["upcoming"]) == ("scheduled-source",) "queued-source",
assert view["upcoming"][0]["run_reason"] == "Ready" "scheduled-source",
assert view["upcoming"][0]["run_disabled"] is False )
assert view["upcoming"][0]["run_reason"] == "Queued"
assert view["upcoming"][0]["run_disabled"] is True
assert view["upcoming"][1]["run_reason"] == "Ready"
assert view["upcoming"][1]["run_disabled"] is False
def test_load_runs_view_running_row_targets_queued_follow_up_cancel( def test_load_runs_view_running_row_targets_queued_follow_up_cancel(

View file

@ -628,6 +628,40 @@ def test_job_runtime_start_reconciles_stale_running_execution(tmp_path: Path) ->
runtime.shutdown() runtime.shutdown()
def test_job_runtime_publishes_refresh_while_jobs_are_running(tmp_path: Path) -> None:
initialize_database(tmp_path / "runtime-refresh.db")
source = create_source(
name="Running source",
slug="running-source",
source_type="feed",
notes="",
spider_arguments="",
enabled=False,
cron_minute="*/5",
cron_hour="*",
cron_day_of_month="*",
cron_day_of_week="*",
cron_month="*",
feed_url="https://example.com/running.xml",
)
job = Job.get(Job.source == source)
JobExecution.create(
job=job,
started_at=datetime(2026, 3, 30, 12, 0, tzinfo=UTC),
running_status=JobExecutionStatus.RUNNING,
)
events: list[object] = []
runtime = JobRuntime(
log_dir=tmp_path / "out" / "logs",
refresh_callback=events.append,
)
runtime._last_runtime_refresh_at = time.monotonic() - 2.0
runtime.poll_workers()
assert "refresh-event" in events
def test_job_runtime_start_reattaches_live_worker_after_app_restart( def test_job_runtime_start_reattaches_live_worker_after_app_restart(
tmp_path: Path, tmp_path: Path,
) -> None: ) -> None:
@ -907,7 +941,7 @@ def test_render_runs_uses_database_backed_jobs_and_executions(
body = str(await render_runs(app)) body = str(await render_runs(app))
assert "runs-page-source" in body assert "runs-page-source" in body
assert "Running job executions" in body assert "Running jobs" in body
assert "Scheduled jobs" in body assert "Scheduled jobs" in body
assert "Completed job executions" in body assert "Completed job executions" in body
assert f"/job/{job.id}/execution/{execution.get_id()}/logs" in body assert f"/job/{job.id}/execution/{execution.get_id()}/logs" in body

View file

@ -138,7 +138,7 @@ def test_runs_page_renders_completed_execution_end_time_as_relative_hoverable_ti
assert ">2 hours ago<" in body assert ">2 hours ago<" in body
def test_runs_page_renders_queued_execution_table() -> None: def test_runs_page_renders_combined_running_jobs_table() -> None:
body = str( body = str(
runs_page( runs_page(
queued_executions=( queued_executions=(
@ -156,12 +156,16 @@ def test_runs_page_renders_queued_execution_table() -> None:
"run_disabled": True, "run_disabled": True,
"run_post_path": "/actions/jobs/7/run-now", "run_post_path": "/actions/jobs/7/run-now",
"cancel_post_path": "/actions/queued-executions/42/cancel", "cancel_post_path": "/actions/queued-executions/42/cancel",
"move_up_disabled": True,
"move_up_post_path": None,
"move_down_disabled": True,
"move_down_post_path": None,
}, },
) )
) )
) )
assert "Queued job executions" in body assert "Running jobs" in body
assert "queued-source" in body assert "queued-source" in body
assert ">Queued<" in body assert ">Queued<" in body
assert "/actions/queued-executions/42/cancel" in body assert "/actions/queued-executions/42/cancel" in body
@ -296,6 +300,23 @@ def test_render_stream_yields_on_connect_and_refresh() -> None:
asyncio.run(run()) asyncio.run(run())
def test_render_stream_uses_view_transition_for_queue_reorders() -> None:
async def run() -> None:
queue = RefreshBroker().subscribe()
async def render() -> str:
return '<main id="morph">queue</main>'
stream = render_stream(queue, render, render_on_connect=False)
await queue.put("queue-reordered")
event = await anext(stream)
await stream.aclose()
assert "useViewTransition true" in str(event)
asyncio.run(run())
def test_render_dashboard_shows_dashboard_information_architecture( def test_render_dashboard_shows_dashboard_information_architecture(
monkeypatch, tmp_path: Path monkeypatch, tmp_path: Path
) -> None: ) -> None:
@ -1118,7 +1139,7 @@ def test_settings_action_rejects_non_positive_max_concurrent_jobs(
asyncio.run(run()) asyncio.run(run())
def test_render_runs_shows_running_upcoming_and_completed_tables( def test_render_runs_shows_running_scheduled_and_completed_tables(
monkeypatch, tmp_path: Path monkeypatch, tmp_path: Path
) -> None: ) -> None:
db_path = tmp_path / "runs-render.db" db_path = tmp_path / "runs-render.db"
@ -1149,15 +1170,13 @@ def test_render_runs_shows_running_upcoming_and_completed_tables(
body = str(await render_runs(app)) body = str(await render_runs(app))
assert "Running job executions" in body assert "Running jobs" in body
assert "Queued job executions" in body
assert "Scheduled jobs" in body assert "Scheduled jobs" in body
assert "Completed job executions" in body assert "Completed job executions" in body
assert "runs-render-source" in body assert "runs-render-source" in body
assert f"/job/{job.id}/execution/{execution.get_id()}/logs" in body assert f"/job/{job.id}/execution/{execution.get_id()}/logs" in body
assert "data-next-run-at" in body assert "data-next-run-at" in body
assert "in " in body assert "in " in body
assert "Already running" not in body
asyncio.run(run()) asyncio.run(run())
@ -1187,15 +1206,14 @@ def test_render_runs_shows_empty_state_rows(monkeypatch, tmp_path: Path) -> None
app = create_app() app = create_app()
body = str(await render_runs(app)) body = str(await render_runs(app))
assert body.count("No job executions are running.") == 1 assert body.count("No jobs are running or queued.") == 1
assert "No queued executions are waiting." in body
assert "No jobs are scheduled." in body assert "No jobs are scheduled." in body
assert "No job executions have completed yet." in body assert "No job executions have completed yet." in body
asyncio.run(run()) asyncio.run(run())
def test_render_runs_shows_queued_execution_separately_from_scheduled_jobs( def test_render_runs_keeps_queued_execution_in_scheduled_jobs_table(
monkeypatch, tmp_path: Path monkeypatch, tmp_path: Path
) -> None: ) -> None:
db_path = tmp_path / "runs-queued-render.db" db_path = tmp_path / "runs-queued-render.db"
@ -1241,14 +1259,16 @@ def test_render_runs_shows_queued_execution_separately_from_scheduled_jobs(
async def run() -> None: async def run() -> None:
body = str(await render_runs(app)) body = str(await render_runs(app))
assert "Queued job executions" in body assert "Running jobs" in body
assert "Scheduled jobs" in body assert "Scheduled jobs" in body
assert "queued-source" in body assert "queued-source" in body
assert "scheduled-source" in body assert "scheduled-source" in body
assert ">Queued<" in body
assert ( assert (
f"/actions/queued-executions/{int(queued_execution.get_id())}/cancel" f"/actions/queued-executions/{int(queued_execution.get_id())}/cancel"
in body in body
) )
assert "Ready" in body
asyncio.run(run()) asyncio.run(run())
@ -1296,6 +1316,7 @@ def test_render_runs_shows_cancel_button_for_running_row_with_queued_follow_up(
in body in body
) )
assert ">Cancel<" in body assert ">Cancel<" in body
assert "Running jobs" in body
asyncio.run(run()) asyncio.run(run())
@ -1336,6 +1357,10 @@ def test_render_runs_keeps_all_action_controls_visible_in_html_after_compaction(
"run_disabled": True, "run_disabled": True,
"run_post_path": "/actions/jobs/2/run-now", "run_post_path": "/actions/jobs/2/run-now",
"cancel_post_path": "/actions/queued-executions/22/cancel", "cancel_post_path": "/actions/queued-executions/22/cancel",
"move_up_disabled": True,
"move_up_post_path": None,
"move_down_disabled": True,
"move_down_post_path": None,
}, },
), ),
upcoming_jobs=( upcoming_jobs=(
@ -1374,6 +1399,7 @@ def test_render_runs_keeps_all_action_controls_visible_in_html_after_compaction(
) )
) )
assert "Running jobs" in body
assert ">Stop<" in body assert ">Stop<" in body
assert ">Cancel<" in body assert ">Cancel<" in body
assert ">Run now<" in body assert ">Run now<" in body
@ -1432,6 +1458,78 @@ def test_cancel_queued_execution_action_deletes_pending_row_without_touching_run
asyncio.run(run()) asyncio.run(run())
def test_move_queued_execution_action_reorders_queue(
monkeypatch, tmp_path: Path
) -> None:
db_path = tmp_path / "move-queued-action.db"
log_dir = tmp_path / "out" / "logs"
monkeypatch.setenv("REPUBLISHER_DB_PATH", str(db_path))
async def run() -> None:
app = create_app()
app.config["REPUB_LOG_DIR"] = log_dir
client = app.test_client()
first_source = create_source(
name="First queued source",
slug="first-queued-source",
source_type="feed",
notes="",
spider_arguments="",
enabled=True,
cron_minute="*/5",
cron_hour="*",
cron_day_of_month="*",
cron_day_of_week="*",
cron_month="*",
feed_url="https://example.com/first.xml",
)
second_source = create_source(
name="Second queued source",
slug="second-queued-source",
source_type="feed",
notes="",
spider_arguments="",
enabled=True,
cron_minute="*/5",
cron_hour="*",
cron_day_of_month="*",
cron_day_of_week="*",
cron_month="*",
feed_url="https://example.com/second.xml",
)
first_job = Job.get(Job.source == first_source)
second_job = Job.get(Job.source == second_source)
first_execution = JobExecution.create(
job=first_job,
created_at=datetime(2026, 3, 30, 12, 0, tzinfo=UTC),
running_status=JobExecutionStatus.PENDING,
)
second_execution = JobExecution.create(
job=second_job,
created_at=datetime(2026, 3, 30, 12, 5, tzinfo=UTC),
running_status=JobExecutionStatus.PENDING,
)
response = await client.post(
f"/actions/queued-executions/{int(second_execution.get_id())}/move-up"
)
assert response.status_code == 204
body = str(await render_runs(app))
assert body.index("second-queued-source") < body.index("first-queued-source")
assert (
f"/actions/queued-executions/{int(second_execution.get_id())}/move-down"
in body
)
assert (
f"/actions/queued-executions/{int(first_execution.get_id())}/move-up"
in body
)
asyncio.run(run())
def test_toggle_job_enabled_action_removes_queued_execution( def test_toggle_job_enabled_action_removes_queued_execution(
monkeypatch, tmp_path: Path monkeypatch, tmp_path: Path
) -> None: ) -> None: