Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
101 changes: 77 additions & 24 deletions ax_cli/commands/alerts.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,12 +148,20 @@ def _build_alert_metadata(
triggered_by_agent: str | None,
title: str | None,
state: str = "triggered",
task_snapshot: dict[str, Any] | None = None,
) -> dict[str, Any]:
"""Build the ``metadata`` block the frontend's AlertCardBody reads.

Shape mirrors the dogfood message 1942cc2c but with the compact
reminder fields ChatGPT flagged (source_task_id, due_at, remind_at,
state) and no task-board widget hydration.

When ``task_snapshot`` is provided (task-aware reminders per task
``e55be7c8``), embed a trimmed task block at ``alert.task`` +
``card_payload.task`` so the frontend can render task title / priority /
status / assignee without a second round-trip. Keys follow the backend
task shape: ``id``, ``title``, ``priority``, ``status``, ``assignee_id``,
``assignee_name``, ``creator_id``, ``deadline``.
"""
card_title = title or (f"Reminder: {reason[:80]}" if kind == "reminder" else f"Alert: {reason[:80]}")
fired_at = _iso_utc_now()
Expand Down Expand Up @@ -185,6 +193,8 @@ def _build_alert_metadata(
alert["context_key"] = evidence
if triggered_by_agent:
alert["triggered_by_agent_name"] = triggered_by_agent
if task_snapshot:
alert["task"] = task_snapshot

card_payload: dict[str, Any] = {
"title": card_title,
Expand All @@ -196,6 +206,8 @@ def _build_alert_metadata(
if source_task_id:
card_payload["source_task_id"] = source_task_id
card_payload["resource_uri"] = f"ui://tasks/{source_task_id}"
if task_snapshot:
card_payload["task"] = task_snapshot

return {
"alert": alert,
Expand All @@ -212,6 +224,59 @@ def _build_alert_metadata(
}


_TASK_SNAPSHOT_KEYS = ("id", "title", "priority", "status", "assignee_id", "creator_id", "deadline")


def _fetch_task_snapshot(client: Any, task_id: str) -> dict[str, Any] | None:
"""Fetch a compact task snapshot for embedding in reminder/alert metadata.

Returns a dict with the task's human-readable fields plus ``assignee_name``
resolved via the agent roster (best-effort). Returns ``None`` on any
failure so callers can fall back to the source_task_id link alone.
"""
try:
r = client._http.get(
f"/api/v1/tasks/{task_id}",
headers=client._with_agent(None),
)
r.raise_for_status()
wrapper = client._parse_json(r)
except Exception:
return None

task = wrapper.get("task", wrapper) if isinstance(wrapper, dict) else {}
if not isinstance(task, dict):
return None

snapshot: dict[str, Any] = {k: task[k] for k in _TASK_SNAPSHOT_KEYS if task.get(k) is not None}
if not snapshot.get("id"):
snapshot["id"] = task_id

assignee_id = snapshot.get("assignee_id")
if assignee_id:
name = _agent_name_for(client, str(assignee_id))
if name:
snapshot["assignee_name"] = name

return snapshot


def _agent_name_for(client: Any, agent_id: str) -> str | None:
"""Best-effort resolution of agent_id → handle via the agent roster."""
try:
rr = client._http.get(
f"/api/v1/agents/{agent_id}",
headers=client._with_agent(None),
)
rr.raise_for_status()
agent_wrapper = client._parse_json(rr)
except Exception:
return None
agent = agent_wrapper.get("agent", agent_wrapper) if isinstance(agent_wrapper, dict) else {}
name = agent.get("name") or agent.get("username") or agent.get("handle")
return name.strip().lstrip("@") if isinstance(name, str) else None


def _resolve_target_from_task(client: Any, task_id: str) -> tuple[str | None, str | None]:
"""Fetch a task and return (target_name, resolved_from).

Expand All @@ -230,31 +295,19 @@ def _resolve_target_from_task(client: Any, task_id: str) -> tuple[str | None, st
return None, None

task = wrapper.get("task", wrapper) if isinstance(wrapper, dict) else {}
if not isinstance(task, dict):
return None, None

# The backend returns ids, not names. Try to resolve via the agent
# roster — best-effort, skip if unreachable.
def _name_for(agent_id: str | None) -> str | None:
if not agent_id:
return None
try:
rr = client._http.get(
f"/api/v1/agents/{agent_id}",
headers=client._with_agent(None),
)
rr.raise_for_status()
agent_wrapper = client._parse_json(rr)
agent = agent_wrapper.get("agent", agent_wrapper) if isinstance(agent_wrapper, dict) else {}
name = agent.get("name") or agent.get("username") or agent.get("handle")
return name.strip().lstrip("@") if isinstance(name, str) else None
except Exception:
return None

assignee_name = _name_for(task.get("assignee_id"))
if assignee_name:
return assignee_name, "assignee"
creator_name = _name_for(task.get("creator_id"))
if creator_name:
return creator_name, "creator"
assignee_id = task.get("assignee_id")
if assignee_id:
assignee_name = _agent_name_for(client, str(assignee_id))
if assignee_name:
return assignee_name, "assignee"
creator_id = task.get("creator_id")
if creator_id:
creator_name = _agent_name_for(client, str(creator_id))
if creator_name:
return creator_name, "creator"
return None, None


Expand Down
4 changes: 4 additions & 0 deletions ax_cli/commands/reminders.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
from ..output import JSON_OPTION, console, print_json, print_table
from .alerts import (
_build_alert_metadata,
_fetch_task_snapshot,
_format_mention_content,
_normalize_severity,
_resolve_target_from_task,
Expand Down Expand Up @@ -257,6 +258,8 @@ def _fire_policy(client: Any, policy: dict[str, Any], *, now: _dt.datetime) -> d
except Exception:
triggered_by = None

task_snapshot = _fetch_task_snapshot(client, source_task) if source_task else None

fired_at = _iso(now)
metadata = _build_alert_metadata(
kind="reminder",
Expand All @@ -271,6 +274,7 @@ def _fire_policy(client: Any, policy: dict[str, Any], *, now: _dt.datetime) -> d
evidence=policy.get("evidence"),
triggered_by_agent=triggered_by,
title=policy.get("title"),
task_snapshot=task_snapshot,
)
metadata["reminder_policy"] = {
"policy_id": policy.get("id"),
Expand Down
134 changes: 134 additions & 0 deletions tests/test_reminders_commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,3 +177,137 @@ def test_run_once_skips_future_policy(monkeypatch, tmp_path):
stored = _load(policy_file)["policies"][0]
assert stored["enabled"] is True
assert stored["fired_count"] == 0


def test_run_once_enriches_alert_with_task_snapshot(monkeypatch, tmp_path):
"""Task e55be7c8: task reminder alerts should carry a task snapshot
(title/priority/status/assignee) so the frontend renders task context
without a second round-trip."""

class _TaskAwareHttp:
def get(self, path: str, *, headers: dict) -> Any:
class _R:
def __init__(self, data):
self._data = data

def raise_for_status(self):
return None

def json(self):
return self._data

if path.endswith("/tasks/task-snap"):
return _R(
{
"task": {
"id": "task-snap",
"title": "Ship delivery receipts",
"priority": "urgent",
"status": "in_progress",
"assignee_id": "agent-orion",
"creator_id": "agent-chatgpt",
"deadline": "2026-04-17T00:00:00Z",
}
}
)
if path.endswith("/agents/agent-orion"):
return _R({"agent": {"id": "agent-orion", "name": "orion"}})
return _R({})

fake = _FakeClient()
fake._http = _TaskAwareHttp() # type: ignore[attr-defined]
fake._with_agent = lambda _: {} # type: ignore[attr-defined]
fake._parse_json = lambda r: r.json() # type: ignore[attr-defined]
_install_fake_runtime(monkeypatch, fake)

policy_file = tmp_path / "reminders.json"
policy_file.write_text(
json.dumps(
{
"version": 1,
"policies": [
{
"id": "rem-snap",
"enabled": True,
"space_id": "space-abc",
"source_task_id": "task-snap",
"reason": "review delivery receipts",
"target": "orion",
"severity": "info",
"cadence_seconds": 300,
"next_fire_at": "2026-04-16T00:00:00Z",
"max_fires": 1,
"fired_count": 0,
"fired_keys": [],
}
],
}
)
)

result = runner.invoke(app, ["reminders", "run", "--once", "--file", str(policy_file), "--json"])

assert result.exit_code == 0, result.output
assert len(fake.sent) == 1
metadata = fake.sent[0]["metadata"]

task = metadata["alert"].get("task")
assert task is not None, "alert.task should be embedded when source_task resolves"
assert task["id"] == "task-snap"
assert task["title"] == "Ship delivery receipts"
assert task["priority"] == "urgent"
assert task["status"] == "in_progress"
assert task["assignee_id"] == "agent-orion"
assert task["assignee_name"] == "orion"
assert task["deadline"] == "2026-04-17T00:00:00Z"

card_payload = metadata["ui"]["cards"][0]["payload"]
assert card_payload.get("task") == task, "card_payload.task should mirror alert.task"
assert card_payload.get("resource_uri") == "ui://tasks/task-snap"


def test_run_once_without_task_snapshot_still_fires(monkeypatch, tmp_path):
"""If the task fetch fails (404, network), the reminder still fires
without a task snapshot — the existing source_task_id link is the fallback."""
fake = _FakeClient()

class _FailingHttp:
def get(self, path: str, *, headers: dict) -> Any:
raise RuntimeError("simulated network failure")

fake._http = _FailingHttp() # type: ignore[attr-defined]
fake._with_agent = lambda _: {} # type: ignore[attr-defined]
fake._parse_json = lambda r: r.json() # type: ignore[attr-defined]
_install_fake_runtime(monkeypatch, fake)

policy_file = tmp_path / "reminders.json"
policy_file.write_text(
json.dumps(
{
"version": 1,
"policies": [
{
"id": "rem-fail",
"enabled": True,
"space_id": "space-abc",
"source_task_id": "task-nope",
"reason": "fallback path",
"target": "orion",
"cadence_seconds": 300,
"next_fire_at": "2026-04-16T00:00:00Z",
"max_fires": 1,
"fired_count": 0,
"fired_keys": [],
}
],
}
)
)

result = runner.invoke(app, ["reminders", "run", "--once", "--file", str(policy_file), "--json"])

assert result.exit_code == 0, result.output
assert len(fake.sent) == 1
metadata = fake.sent[0]["metadata"]
assert "task" not in metadata["alert"], "fallback: no task snapshot embedded on failure"
assert metadata["alert"]["source_task_id"] == "task-nope", "source_task_id link still present"