Enhance dashboard published feeds controls

This commit is contained in:
Abel Luck 2026-03-31 12:54:21 +02:00
parent e796e09d14
commit ca3d34053f
5 changed files with 177 additions and 29 deletions

View file

@ -503,6 +503,7 @@ def status_badge(*, label: str, tone: str) -> Renderable:
tones = { tones = {
"running": "bg-emerald-100 text-emerald-800", "running": "bg-emerald-100 text-emerald-800",
"scheduled": "bg-sky-100 text-sky-800", "scheduled": "bg-sky-100 text-sky-800",
"queued": "bg-amber-200 text-amber-950",
"idle": "bg-slate-200 text-slate-700", "idle": "bg-slate-200 text-slate-700",
"failed": "bg-rose-100 text-rose-800", "failed": "bg-rose-100 text-rose-800",
"done": "bg-emerald-100 text-emerald-800", "done": "bg-emerald-100 text-emerald-800",

View file

@ -799,8 +799,19 @@ def load_dashboard_view(
reference_time = now or datetime.now(UTC) reference_time = now or datetime.now(UTC)
runs_view = load_runs_view(log_dir=log_dir, now=reference_time) runs_view = load_runs_view(log_dir=log_dir, now=reference_time)
output_dir = Path(log_dir).parent output_dir = Path(log_dir).parent
running_by_job_id = {
int(cast(int, execution["job_id"])): execution
for execution in runs_view["running"]
}
queued_by_job_id = {
int(cast(int, execution["job_id"])): execution
for execution in runs_view["queued"]
}
upcoming_by_job_id = {
int(cast(int, job["job_id"])): job for job in runs_view["upcoming"]
}
with database.connection_context(): with database.connection_context():
sources = tuple(Source.select().order_by(Source.name.asc())) jobs = tuple(Job.select(Job, Source).join(Source).order_by(Source.name.asc()))
failed_last_day = ( failed_last_day = (
JobExecution.select() JobExecution.select()
.where( .where(
@ -818,8 +829,15 @@ def load_dashboard_view(
"running": runs_view["running"], "running": runs_view["running"],
"queued": runs_view["queued"], "queued": runs_view["queued"],
"source_feeds": tuple( "source_feeds": tuple(
_project_source_feed(source, output_dir, reference_time) _project_source_feed(
for source in sources cast(Job, job),
output_dir,
reference_time,
running_execution=running_by_job_id.get(_job_id(cast(Job, job))),
queued_execution=queued_by_job_id.get(_job_id(cast(Job, job))),
upcoming_job=upcoming_by_job_id.get(_job_id(cast(Job, job))),
)
for job in jobs
), ),
"snapshot": { "snapshot": {
"running_now": str(len(runs_view["running"])), "running_now": str(len(runs_view["running"])),
@ -1076,8 +1094,15 @@ def _project_completed_execution(
def _project_source_feed( def _project_source_feed(
source: Source, output_dir: Path, reference_time: datetime job: Job,
output_dir: Path,
reference_time: datetime,
*,
running_execution: dict[str, object] | None = None,
queued_execution: dict[str, object] | None = None,
upcoming_job: dict[str, object] | None = None,
) -> dict[str, object]: ) -> dict[str, object]:
source = cast(Source, job.source)
source_slug = str(source.slug) source_slug = str(source.slug)
source_dir = feed_output_dir(out_dir=output_dir, feed_slug=source_slug) source_dir = feed_output_dir(out_dir=output_dir, feed_slug=source_slug)
feed_path = feed_output_path(out_dir=output_dir, feed_slug=source_slug) feed_path = feed_output_path(out_dir=output_dir, feed_slug=source_slug)
@ -1087,12 +1112,22 @@ def _project_source_feed(
if feed_exists if feed_exists
else None else None
) )
if running_execution is not None:
feed_status_label = str(running_execution["status"])
feed_status_tone = "scheduled"
elif queued_execution is not None:
feed_status_label = "Queued"
feed_status_tone = "queued"
else:
feed_status_label = "Available" if feed_exists else "Missing"
feed_status_tone = "done" if feed_exists else "failed"
return { return {
"source": source.name, "source": source.name,
"slug": source_slug, "slug": source_slug,
"feed_href": f"/feeds/{source_slug}/feed.rss", "feed_href": f"/feeds/{source_slug}/feed.rss",
"feed_status_label": "Available" if feed_exists else "Missing", "feed_status_label": feed_status_label,
"feed_status_tone": "done" if feed_exists else "failed", "feed_status_tone": feed_status_tone,
"feed_exists": feed_exists, "feed_exists": feed_exists,
"last_updated": ( "last_updated": (
_humanize_relative_time(reference_time, updated_at) _humanize_relative_time(reference_time, updated_at)
@ -1100,6 +1135,24 @@ def _project_source_feed(
else "Never published" else "Never published"
), ),
"last_updated_iso": updated_at.isoformat() if updated_at is not None else None, "last_updated_iso": updated_at.isoformat() if updated_at is not None else None,
"next_run": (
str(upcoming_job["next_run"])
if upcoming_job is not None
else "Not scheduled"
),
"next_run_at": (
cast(str | None, upcoming_job["next_run_at"])
if upcoming_job is not None
else None
),
"run_disabled": (
bool(upcoming_job["run_disabled"]) if upcoming_job is not None else False
),
"run_post_path": (
str(upcoming_job["run_post_path"])
if upcoming_job is not None
else f"/actions/jobs/{_job_id(job)}/run-now"
),
"artifact_footprint": _format_bytes(_directory_size(source_dir)), "artifact_footprint": _format_bytes(_directory_size(source_dir)),
} }

View file

@ -1,11 +1,13 @@
from __future__ import annotations from __future__ import annotations
from collections.abc import Mapping from collections.abc import Mapping
from typing import cast
import htpy as h import htpy as h
from htpy import Node, Renderable from htpy import Node, Renderable
from repub.components import ( from repub.components import (
action_button,
app_shell, app_shell,
header_action_link, header_action_link,
inline_link, inline_link,
@ -89,6 +91,19 @@ def _source_feed_row(source_feed: Mapping[str, object]) -> tuple[Node, ...]:
if last_updated_iso is not None if last_updated_iso is not None
else h.p(class_="font-medium text-slate-900")[str(source_feed["last_updated"])] else h.p(class_="font-medium text-slate-900")[str(source_feed["last_updated"])]
) )
next_run_iso = source_feed.get("next_run_at")
next_run = (
h.time(
{
"data-next-run-at": str(next_run_iso),
"title": str(next_run_iso),
},
datetime=str(next_run_iso),
class_="font-medium text-slate-900",
)[str(source_feed["next_run"])]
if next_run_iso is not None
else h.p(class_="font-medium text-slate-900")[str(source_feed["next_run"])]
)
return ( return (
h.div[ h.div[
h.div(class_="font-semibold text-slate-950")[str(source_feed["source"])], h.div(class_="font-semibold text-slate-950")[str(source_feed["source"])],
@ -108,9 +123,15 @@ def _source_feed_row(source_feed: Mapping[str, object]) -> tuple[Node, ...]:
tone=str(source_feed["feed_status_tone"]), tone=str(source_feed["feed_status_tone"]),
), ),
last_updated, last_updated,
next_run,
h.p(class_="font-medium text-slate-900")[ h.p(class_="font-medium text-slate-900")[
str(source_feed["artifact_footprint"]) str(source_feed["artifact_footprint"])
], ],
action_button(
label="Run now",
disabled=bool(source_feed["run_disabled"]),
post_path=cast(str | None, source_feed.get("run_post_path")),
),
) )
@ -122,7 +143,15 @@ def published_feeds_table(
eyebrow="Published feeds", eyebrow="Published feeds",
title="Published feeds", title="Published feeds",
empty_message="No feeds have been published yet.", empty_message="No feeds have been published yet.",
headers=("Source", "Feed URL", "Status", "Last updated", "Disk usage"), headers=(
"Source",
"Feed URL",
"Status",
"Last updated",
"Next run",
"Disk usage",
"Actions",
),
rows=rows, rows=rows,
actions=muted_action_link(href="/sources", label="Manage sources"), actions=muted_action_link(href="/sources", label="Manage sources"),
) )

View file

@ -394,9 +394,6 @@
.min-w-32 { .min-w-32 {
min-width: calc(var(--spacing) * 32); min-width: calc(var(--spacing) * 32);
} }
.min-w-56 {
min-width: calc(var(--spacing) * 56);
}
.min-w-64 { .min-w-64 {
min-width: calc(var(--spacing) * 64); min-width: calc(var(--spacing) * 64);
} }
@ -406,9 +403,6 @@
.min-w-\[64rem\] { .min-w-\[64rem\] {
min-width: 64rem; min-width: 64rem;
} }
.min-w-\[70rem\] {
min-width: 70rem;
}
.flex-1 { .flex-1 {
flex: 1; flex: 1;
} }
@ -741,15 +735,9 @@
.pr-5 { .pr-5 {
padding-right: calc(var(--spacing) * 5); padding-right: calc(var(--spacing) * 5);
} }
.pr-6 {
padding-right: calc(var(--spacing) * 6);
}
.pl-3 { .pl-3 {
padding-left: calc(var(--spacing) * 3); padding-left: calc(var(--spacing) * 3);
} }
.pl-4 {
padding-left: calc(var(--spacing) * 4);
}
.text-center { .text-center {
text-align: center; text-align: center;
} }
@ -955,11 +943,6 @@
padding-left: calc(var(--spacing) * 3); padding-left: calc(var(--spacing) * 3);
} }
} }
.first\:pl-4 {
&:first-child {
padding-left: calc(var(--spacing) * 4);
}
}
.hover\:bg-amber-300 { .hover\:bg-amber-300 {
&:hover { &:hover {
@media (hover: hover) { @media (hover: hover) {

View file

@ -719,7 +719,7 @@ def test_load_dashboard_view_lists_source_feed_artifacts(
app.config["REPUB_LOG_DIR"] = log_dir app.config["REPUB_LOG_DIR"] = log_dir
log_dir.mkdir(parents=True) log_dir.mkdir(parents=True)
create_source( available_source = create_source(
name="Available source", name="Available source",
slug="available-source", slug="available-source",
source_type="feed", source_type="feed",
@ -733,7 +733,7 @@ def test_load_dashboard_view_lists_source_feed_artifacts(
cron_month="*", cron_month="*",
feed_url="https://example.com/available.xml", feed_url="https://example.com/available.xml",
) )
create_source( missing_source = create_source(
name="Missing source", name="Missing source",
slug="missing-source", slug="missing-source",
source_type="feed", source_type="feed",
@ -757,6 +757,8 @@ def test_load_dashboard_view_lists_source_feed_artifacts(
updated_at = reference_time - timedelta(minutes=32) updated_at = reference_time - timedelta(minutes=32)
updated_at_epoch = updated_at.timestamp() updated_at_epoch = updated_at.timestamp()
os.utime(feed_path, (updated_at_epoch, updated_at_epoch)) os.utime(feed_path, (updated_at_epoch, updated_at_epoch))
available_job = Job.get(Job.source == available_source)
missing_job = Job.get(Job.source == missing_source)
source_feeds = cast( source_feeds = cast(
tuple[dict[str, object], ...], tuple[dict[str, object], ...],
@ -773,6 +775,10 @@ def test_load_dashboard_view_lists_source_feed_artifacts(
"feed_exists": True, "feed_exists": True,
"last_updated": "32 minutes ago", "last_updated": "32 minutes ago",
"last_updated_iso": updated_at.isoformat(), "last_updated_iso": updated_at.isoformat(),
"next_run": "Not scheduled",
"next_run_at": None,
"run_disabled": False,
"run_post_path": f"/actions/jobs/{available_job.id}/run-now",
"artifact_footprint": "3.0 KB", "artifact_footprint": "3.0 KB",
}, },
{ {
@ -784,11 +790,80 @@ def test_load_dashboard_view_lists_source_feed_artifacts(
"feed_exists": False, "feed_exists": False,
"last_updated": "Never published", "last_updated": "Never published",
"last_updated_iso": None, "last_updated_iso": None,
"next_run": "Not scheduled",
"next_run_at": None,
"run_disabled": False,
"run_post_path": f"/actions/jobs/{missing_job.id}/run-now",
"artifact_footprint": "0 B", "artifact_footprint": "0 B",
}, },
) )
def test_load_dashboard_view_projects_feed_status_from_job_runtime(
monkeypatch, tmp_path: Path
) -> None:
db_path = tmp_path / "dashboard-feed-status.db"
monkeypatch.setenv("REPUBLISHER_DB_PATH", str(db_path))
create_app()
log_dir = tmp_path / "out" / "logs"
log_dir.mkdir(parents=True)
reference_time = datetime(2026, 3, 30, 12, 30, tzinfo=UTC)
running_source = create_source(
name="Running source",
slug="running-source",
source_type="feed",
notes="",
spider_arguments="",
enabled=True,
cron_minute="35",
cron_hour="12",
cron_day_of_month="30",
cron_day_of_week="*",
cron_month="3",
feed_url="https://example.com/running.xml",
)
queued_source = create_source(
name="Queued source",
slug="queued-source",
source_type="feed",
notes="",
spider_arguments="",
enabled=True,
cron_minute="35",
cron_hour="12",
cron_day_of_month="30",
cron_day_of_week="*",
cron_month="3",
feed_url="https://example.com/queued.xml",
)
running_job = Job.get(Job.source == running_source)
queued_job = Job.get(Job.source == queued_source)
JobExecution.create(
job=running_job,
running_status=JobExecutionStatus.RUNNING,
started_at=reference_time - timedelta(minutes=2),
)
JobExecution.create(
job=queued_job,
running_status=JobExecutionStatus.PENDING,
)
source_feeds = cast(
tuple[dict[str, object], ...],
load_dashboard_view(log_dir=log_dir, now=reference_time)["source_feeds"],
)
assert source_feeds[0]["feed_status_label"] == "Queued"
assert source_feeds[0]["feed_status_tone"] == "queued"
assert source_feeds[0]["run_disabled"] is True
assert source_feeds[1]["feed_status_label"] == "Running"
assert source_feeds[1]["feed_status_tone"] == "scheduled"
assert source_feeds[1]["next_run"] == "Running now"
assert source_feeds[1]["run_disabled"] is True
def test_render_dashboard_shows_source_feed_links_and_statuses( def test_render_dashboard_shows_source_feed_links_and_statuses(
monkeypatch, tmp_path: Path monkeypatch, tmp_path: Path
) -> None: ) -> None:
@ -797,13 +872,13 @@ def test_render_dashboard_shows_source_feed_links_and_statuses(
app = create_app() app = create_app()
app.config["REPUB_LOG_DIR"] = tmp_path / "out" / "logs" app.config["REPUB_LOG_DIR"] = tmp_path / "out" / "logs"
create_source( published_source = create_source(
name="Published source", name="Published source",
slug="published-source", slug="published-source",
source_type="feed", source_type="feed",
notes="", notes="",
spider_arguments="", spider_arguments="",
enabled=False, enabled=True,
cron_minute="*/5", cron_minute="*/5",
cron_hour="*", cron_hour="*",
cron_day_of_month="*", cron_day_of_month="*",
@ -811,7 +886,7 @@ def test_render_dashboard_shows_source_feed_links_and_statuses(
cron_month="*", cron_month="*",
feed_url="https://example.com/published.xml", feed_url="https://example.com/published.xml",
) )
create_source( missing_source = create_source(
name="Missing source", name="Missing source",
slug="missing-source", slug="missing-source",
source_type="feed", source_type="feed",
@ -830,6 +905,8 @@ def test_render_dashboard_shows_source_feed_links_and_statuses(
published_feed = tmp_path / "out" / "feeds" / "published-source" / "feed.rss" published_feed = tmp_path / "out" / "feeds" / "published-source" / "feed.rss"
published_feed.parent.mkdir(parents=True) published_feed.parent.mkdir(parents=True)
published_feed.write_text("<rss/>\n", encoding="utf-8") published_feed.write_text("<rss/>\n", encoding="utf-8")
published_job = Job.get(Job.source == published_source)
missing_job = Job.get(Job.source == missing_source)
body = str(await render_dashboard(app)) body = str(await render_dashboard(app))
@ -839,6 +916,11 @@ def test_render_dashboard_shows_source_feed_links_and_statuses(
assert "Available" in body assert "Available" in body
assert "Missing" in body assert "Missing" in body
assert "Never published" in body assert "Never published" in body
assert "Next run" in body
assert ">Run now<" in body
assert f"/actions/jobs/{published_job.id}/run-now" in body
assert f"/actions/jobs/{missing_job.id}/run-now" in body
assert "data-next-run-at" in body
asyncio.run(run()) asyncio.run(run())