Improve sources and runs history tables

This commit is contained in:
Abel Luck 2026-03-31 10:49:50 +02:00
parent df68aa95e9
commit 939cd9ea5d
7 changed files with 459 additions and 25 deletions

View file

@ -1,6 +1,7 @@
from __future__ import annotations
import json
import math
import os
import signal
import subprocess
@ -10,7 +11,7 @@ import time
from dataclasses import dataclass
from datetime import UTC, datetime, timedelta
from pathlib import Path
from typing import Callable, TextIO, cast
from typing import Callable, TextIO, TypedDict, cast
from apscheduler.schedulers.background import BackgroundScheduler
from apscheduler.triggers.cron import CronTrigger
@ -30,6 +31,7 @@ from repub.model import (
SCHEDULER_JOB_PREFIX = "job-"
POLL_JOB_ID = "runtime-poll-workers"
SYNC_JOB_ID = "runtime-sync-jobs"
COMPLETED_EXECUTION_PAGE_SIZE = 20
@dataclass(frozen=True)
@ -102,6 +104,17 @@ class ExecutionLogView:
error_message: str | None = None
class RunsView(TypedDict):
running: tuple[dict[str, object], ...]
queued: tuple[dict[str, object], ...]
upcoming: tuple[dict[str, object], ...]
completed: tuple[dict[str, object], ...]
completed_page: int
completed_page_size: int
completed_total_count: int
completed_total_pages: int
class JobRuntime:
def __init__(
self,
@ -647,10 +660,15 @@ class JobRuntime:
def load_runs_view(
*, log_dir: str | Path, now: datetime | None = None
) -> dict[str, tuple[dict[str, object], ...]]:
*,
log_dir: str | Path,
now: datetime | None = None,
completed_page: int = 1,
completed_page_size: int = COMPLETED_EXECUTION_PAGE_SIZE,
) -> RunsView:
reference_time = now or datetime.now(UTC)
resolved_log_dir = Path(log_dir)
sanitized_page_size = max(1, completed_page_size)
with database.connection_context():
execution_primary_key = getattr(JobExecution, "_meta").primary_key
jobs = tuple(Job.select(Job, Source).join(Source).order_by(Source.name.asc()))
@ -668,7 +686,7 @@ def load_runs_view(
.where(JobExecution.running_status == JobExecutionStatus.RUNNING)
.order_by(JobExecution.started_at.desc())
)
completed_executions = tuple(
completed_query = (
JobExecution.select(JobExecution, Job, Source)
.join(Job)
.join(Source)
@ -682,7 +700,14 @@ def load_runs_view(
)
)
.order_by(JobExecution.ended_at.desc())
.limit(20)
)
completed_total_count = completed_query.count()
completed_total_pages = max(
1, math.ceil(completed_total_count / sanitized_page_size)
)
sanitized_completed_page = min(max(1, completed_page), completed_total_pages)
completed_executions = tuple(
completed_query.paginate(sanitized_completed_page, sanitized_page_size)
)
running_by_job = {
@ -725,9 +750,49 @@ def load_runs_view(
_project_completed_execution(execution, resolved_log_dir, reference_time)
for execution in completed_executions
),
"completed_page": sanitized_completed_page,
"completed_page_size": sanitized_page_size,
"completed_total_count": completed_total_count,
"completed_total_pages": completed_total_pages,
}
def clear_completed_executions(*, log_dir: str | Path) -> int:
resolved_log_dir = Path(log_dir)
with database.connection_context():
execution_primary_key = getattr(JobExecution, "_meta").primary_key
completed_executions = tuple(
JobExecution.select(JobExecution, Job)
.join(Job)
.where(
JobExecution.running_status.in_(
(
JobExecutionStatus.SUCCEEDED,
JobExecutionStatus.FAILED,
JobExecutionStatus.CANCELED,
)
)
)
)
if not completed_executions:
return 0
for execution in completed_executions:
job = cast(Job, execution.job)
prefix = f"job-{_job_id(job)}-execution-{_execution_id(execution)}"
for artifact_path in resolved_log_dir.glob(f"{prefix}.*"):
artifact_path.unlink(missing_ok=True)
execution_ids = tuple(
_execution_id(execution) for execution in completed_executions
)
return (
JobExecution.delete()
.where(execution_primary_key.in_(execution_ids))
.execute()
)
def load_dashboard_view(
*, log_dir: str | Path, now: datetime | None = None
) -> dict[str, object]:

View file

@ -242,12 +242,129 @@ def _completed_row(execution: Mapping[str, object]) -> tuple[Node, ...]:
)
def _completed_page_href(page: int) -> str:
return f"/runs?completed_page={page}"
def _completed_history_pagination(
*,
completed_page: int,
completed_page_size: int,
completed_total_count: int,
completed_total_pages: int,
) -> Renderable | None:
if completed_total_count <= completed_page_size:
return None
start_result = ((completed_page - 1) * completed_page_size) + 1
end_result = min(completed_total_count, completed_page * completed_page_size)
link_class = (
"relative inline-flex items-center px-4 py-2 text-sm font-semibold text-slate-700 "
"ring-1 ring-inset ring-slate-200 hover:bg-stone-50"
)
return h.div(
class_="flex items-center justify-between border-t border-slate-200 bg-white px-4 py-3 sm:px-6"
)[
h.div(class_="flex flex-1 justify-between sm:hidden")[
h.a(
href=_completed_page_href(max(1, completed_page - 1)),
class_="relative inline-flex items-center rounded-xl border border-slate-200 bg-white px-4 py-2 text-sm font-medium text-slate-700 hover:bg-stone-50",
)["Previous"],
h.a(
href=_completed_page_href(
min(completed_total_pages, completed_page + 1)
),
class_="relative ml-3 inline-flex items-center rounded-xl border border-slate-200 bg-white px-4 py-2 text-sm font-medium text-slate-700 hover:bg-stone-50",
)["Next"],
],
h.div(class_="hidden sm:flex sm:flex-1 sm:items-center sm:justify-between")[
h.p(class_="text-sm text-slate-600")[
"Showing ",
h.span(class_="font-medium text-slate-950")[str(start_result)],
" to ",
h.span(class_="font-medium text-slate-950")[str(end_result)],
" of ",
h.span(class_="font-medium text-slate-950")[str(completed_total_count)],
" results",
],
h.nav(
aria_label="Completed execution pagination",
class_="isolate inline-flex -space-x-px rounded-xl shadow-xs",
)[
(
h.a(
href=_completed_page_href(page_number),
aria_current=(
"page" if page_number == completed_page else None
),
class_=(
"relative z-10 inline-flex items-center bg-amber-500 px-4 py-2 text-sm font-semibold text-slate-950"
if page_number == completed_page
else link_class
),
)[str(page_number)]
for page_number in range(1, completed_total_pages + 1)
)
],
],
]
def _completed_history_section(
*,
completed_rows: tuple[tuple[Node, ...], ...],
completed_page: int,
completed_page_size: int,
completed_total_count: int,
completed_total_pages: int,
) -> Renderable:
pagination = _completed_history_pagination(
completed_page=completed_page,
completed_page_size=completed_page_size,
completed_total_count=completed_total_count,
completed_total_pages=completed_total_pages,
)
return h.section[
table_section(
eyebrow="History",
title="Completed job executions",
empty_message="No job executions have completed yet.",
headers=(
"#",
"Source",
"Ended",
"State",
"Summary",
"Log",
),
rows=completed_rows,
first_header_class="w-px py-2.5 pr-2 pl-3 text-left text-xs font-semibold uppercase tracking-[0.18em] whitespace-nowrap text-slate-500 sm:pl-3",
first_cell_class="w-px py-3 pr-2 pl-3 text-sm font-medium text-slate-950 sm:pl-3",
actions=(
action_button(
label="Clear history",
tone="danger",
post_path="/actions/completed-executions/clear",
)
if completed_total_count > 0
else None
),
),
pagination,
]
def runs_page(
*,
running_executions: tuple[Mapping[str, object], ...] | None = None,
queued_executions: tuple[Mapping[str, object], ...] | None = None,
upcoming_jobs: tuple[Mapping[str, object], ...] | None = None,
completed_executions: tuple[Mapping[str, object], ...] | None = None,
completed_page: int = 1,
completed_page_size: int = 20,
completed_total_count: int | None = None,
completed_total_pages: int | None = None,
source_count: int = 0,
) -> Renderable:
running_items = running_executions or ()
@ -262,6 +379,12 @@ def runs_page(
)
upcoming_rows = tuple(_upcoming_row(job) for job in upcoming_items)
completed_rows = tuple(_completed_row(execution) for execution in completed_items)
resolved_completed_total_count = (
len(completed_items) if completed_total_count is None else completed_total_count
)
resolved_completed_total_pages = (
1 if completed_total_pages is None else completed_total_pages
)
return page_shell(
current_path="/runs",
@ -302,21 +425,12 @@ def runs_page(
),
rows=upcoming_rows,
),
table_section(
eyebrow="History",
title="Completed job executions",
empty_message="No job executions have completed yet.",
headers=(
"#",
"Source",
"Ended",
"State",
"Summary",
"Log",
),
rows=completed_rows,
first_header_class="w-px py-2.5 pr-2 pl-3 text-left text-xs font-semibold uppercase tracking-[0.18em] whitespace-nowrap text-slate-500 sm:pl-3",
first_cell_class="w-px py-3 pr-2 pl-3 text-sm font-medium text-slate-950 sm:pl-3",
_completed_history_section(
completed_rows=completed_rows,
completed_page=completed_page,
completed_page_size=completed_page_size,
completed_total_count=resolved_completed_total_count,
completed_total_pages=resolved_completed_total_pages,
),
h.script[
"""

View file

@ -75,13 +75,11 @@ def _source_row(source: Mapping[str, object]) -> tuple[Node, ...]:
label=str(source["state"]),
tone=str(source["state_tone"]),
),
h.p(class_="mt-2 text-xs text-slate-500")[str(source["last_run"])],
],
h.div(class_="flex flex-wrap items-center gap-2")[
h.div(class_="flex flex-nowrap items-center gap-3 whitespace-nowrap")[
inline_link(
href=f"/sources/{source['slug']}/edit", label="Edit", tone="amber"
),
inline_link(href="/runs", label="View runs"),
action_button(
label="Delete",
tone="danger",

View file

@ -251,6 +251,12 @@
.top-0 {
top: calc(var(--spacing) * 0);
}
.isolate {
isolation: isolate;
}
.z-10 {
z-index: 10;
}
.container {
width: 100%;
@media (width >= 40rem) {
@ -299,6 +305,9 @@
.mb-3 {
margin-bottom: calc(var(--spacing) * 3);
}
.ml-3 {
margin-left: calc(var(--spacing) * 3);
}
.block {
display: block;
}
@ -393,6 +402,9 @@
.min-w-\[70rem\] {
min-width: 70rem;
}
.flex-1 {
flex: 1;
}
.shrink-0 {
flex-shrink: 0;
}
@ -475,6 +487,13 @@
margin-block-end: calc(calc(var(--spacing) * 6) * calc(1 - var(--tw-space-y-reverse)));
}
}
.-space-x-px {
:where(& > :not(:last-child)) {
--tw-space-x-reverse: 0;
margin-inline-start: calc(-1px * var(--tw-space-x-reverse));
margin-inline-end: calc(-1px * calc(1 - var(--tw-space-x-reverse)));
}
}
.divide-y {
:where(& > :not(:last-child)) {
--tw-divide-y-reverse: 0;
@ -912,6 +931,9 @@
transition-timing-function: var(--tw-ease, var(--default-transition-timing-function));
transition-duration: var(--tw-duration, var(--default-transition-duration));
}
.ring-inset {
--tw-ring-inset: inset;
}
.placeholder\:text-slate-400 {
&::placeholder {
color: var(--color-slate-400);
@ -962,6 +984,13 @@
}
}
}
.hover\:bg-stone-50 {
&:hover {
@media (hover: hover) {
background-color: var(--color-stone-50);
}
}
}
.hover\:bg-stone-200 {
&:hover {
@media (hover: hover) {
@ -1054,6 +1083,21 @@
background-color: var(--color-slate-200);
}
}
.sm\:flex {
@media (width >= 40rem) {
display: flex;
}
}
.sm\:hidden {
@media (width >= 40rem) {
display: none;
}
}
.sm\:flex-1 {
@media (width >= 40rem) {
flex: 1;
}
}
.sm\:grid-cols-2 {
@media (width >= 40rem) {
grid-template-columns: repeat(2, minmax(0, 1fr));
@ -1064,6 +1108,11 @@
flex-direction: row;
}
}
.sm\:items-center {
@media (width >= 40rem) {
align-items: center;
}
}
.sm\:items-end {
@media (width >= 40rem) {
align-items: flex-end;
@ -1084,6 +1133,11 @@
padding-inline: calc(var(--spacing) * 4);
}
}
.sm\:px-6 {
@media (width >= 40rem) {
padding-inline: calc(var(--spacing) * 6);
}
}
.sm\:pl-2\.5 {
@media (width >= 40rem) {
padding-left: calc(var(--spacing) * 2.5);
@ -1188,6 +1242,11 @@
inherits: false;
initial-value: 0;
}
@property --tw-space-x-reverse {
syntax: "*";
inherits: false;
initial-value: 0;
}
@property --tw-divide-y-reverse {
syntax: "*";
inherits: false;
@ -1418,6 +1477,7 @@
--tw-translate-y: 0;
--tw-translate-z: 0;
--tw-space-y-reverse: 0;
--tw-space-x-reverse: 0;
--tw-divide-y-reverse: 0;
--tw-border-style: solid;
--tw-gradient-position: initial;

View file

@ -13,11 +13,20 @@ from datastar_py.quart import DatastarResponse, read_signals
from datastar_py.sse import DatastarEvent
from htpy import Renderable
from peewee import IntegrityError
from quart import Quart, Response, request, send_from_directory, url_for
from quart import (
Quart,
Response,
has_request_context,
request,
send_from_directory,
url_for,
)
from repub.datastar import RefreshBroker, render_stream
from repub.jobs import (
COMPLETED_EXECUTION_PAGE_SIZE,
JobRuntime,
clear_completed_executions,
load_dashboard_view,
load_execution_log_view,
load_runs_view,
@ -329,6 +338,12 @@ def create_app(*, dev_mode: bool = False) -> Quart:
get_job_runtime(app).move_queued_execution(execution_id, direction="down")
return Response(status=204)
@app.post("/actions/completed-executions/clear")
async def clear_completed_executions_action() -> Response:
clear_completed_executions(log_dir=app.config["REPUB_LOG_DIR"])
trigger_refresh(app)
return Response(status=204)
@app.post("/job/<int:job_id>/execution/<int:execution_id>/logs")
async def logs_patch(job_id: int, execution_id: int) -> DatastarResponse:
async def render() -> Renderable:
@ -420,12 +435,25 @@ async def render_runs(app: Quart | None = None) -> Renderable:
if app is None:
return runs_page()
view = load_runs_view(log_dir=app.config["REPUB_LOG_DIR"])
completed_page = (
max(1, request.args.get("completed_page", 1, type=int) or 1)
if has_request_context()
else 1
)
view = load_runs_view(
log_dir=app.config["REPUB_LOG_DIR"],
completed_page=completed_page,
completed_page_size=COMPLETED_EXECUTION_PAGE_SIZE,
)
return runs_page(
running_executions=cast(tuple[dict[str, object], ...], view["running"]),
queued_executions=cast(tuple[dict[str, object], ...], view["queued"]),
upcoming_jobs=cast(tuple[dict[str, object], ...], view["upcoming"]),
completed_executions=cast(tuple[dict[str, object], ...], view["completed"]),
completed_page=cast(int, view["completed_page"]),
completed_page_size=cast(int, view["completed_page_size"]),
completed_total_count=cast(int, view["completed_total_count"]),
completed_total_pages=cast(int, view["completed_total_pages"]),
source_count=len(load_sources()),
)