Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 11 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,17 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/)

## [Unreleased]

## [0.4.90] - 2026-03-16

### Added
- **Sidebar unread badges** - Left-rail navigation items for Messages, Channels, and Social Feed now show aggregate unread counts as compact pill badges that update via periodic polling and on window focus. Zero-state badges are hidden; counts cap visually at `99+`.
- **Durable feed-view acknowledgement** - Opening the Social Feed records a per-user acknowledgement timestamp so the feed unread badge reflects genuinely new activity since the last visit. Own-authored posts are excluded from the unread count.
- **Notification deep-link to exact messages** - Bell notification clicks for channel messages now navigate to the exact target message via a server-side focused context window, even when the message is older than the recent page. DM bell clicks include a `#message-<id>` anchor for exact-message scrolling.
- **Container-aware focus scrolling** - Channel message focus now uses measured offsets within `#messages-container` instead of `scrollIntoView()`, and retries shortly after render to absorb layout shifts from async hydration.

### Fixed
- **Bell duplicate counting for mention-bearing messages** - The notification bell now deduplicates by semantic activity key so a `channel_message` event and a `mention` event for the same source message increment the unread badge only once, with the higher-priority event winning the display slot.

## [0.4.89] - 2026-03-15

### Added
Expand Down
2 changes: 1 addition & 1 deletion canopy/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
Development: AI-assisted implementation (Claude, Codex, GitHub Copilot, Cursor IDE, Ollama)
"""

__version__ = "0.4.89"
__version__ = "0.4.90"
__protocol_version__ = 1
__author__ = "Canopy Contributors"
__license__ = "Apache-2.0"
Expand Down
352 changes: 202 additions & 150 deletions canopy/core/channels.py

Large diffs are not rendered by default.

116 changes: 116 additions & 0 deletions canopy/core/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -244,6 +244,7 @@ def _initialize_database(self) -> None:
CREATE TABLE IF NOT EXISTS user_feed_preferences (
user_id TEXT PRIMARY KEY,
algorithm_json TEXT NOT NULL DEFAULT '{}',
last_viewed_at TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users (id)
);
Expand All @@ -263,7 +264,10 @@ def _initialize_database(self) -> None:
status TEXT DEFAULT 'pending',
priority TEXT DEFAULT 'normal',
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
seen_at TIMESTAMP,
handled_at TIMESTAMP,
completed_at TIMESTAMP,
completion_ref_json TEXT,
expires_at TIMESTAMP,
triggered_by_inbox_id TEXT,
depth INTEGER DEFAULT 0,
Expand Down Expand Up @@ -324,6 +328,35 @@ def _initialize_database(self) -> None:
CREATE INDEX IF NOT EXISTS idx_agent_presence_checkin
ON agent_presence(last_checkin_at);

CREATE TABLE IF NOT EXISTS agent_runtime_state (
user_id TEXT PRIMARY KEY,
last_event_fetch_at TIMESTAMP,
last_event_cursor_seen INTEGER,
last_inbox_fetch_at TIMESTAMP,
updated_at TIMESTAMP NOT NULL
);
CREATE INDEX IF NOT EXISTS idx_agent_runtime_event_fetch
ON agent_runtime_state(last_event_fetch_at);
CREATE INDEX IF NOT EXISTS idx_agent_runtime_inbox_fetch
ON agent_runtime_state(last_inbox_fetch_at);

CREATE TABLE IF NOT EXISTS agent_event_subscription_state (
user_id TEXT PRIMARY KEY,
custom_enabled INTEGER NOT NULL DEFAULT 0,
updated_at TIMESTAMP NOT NULL
);
CREATE INDEX IF NOT EXISTS idx_agent_event_subscription_state_enabled
ON agent_event_subscription_state(custom_enabled, updated_at);

CREATE TABLE IF NOT EXISTS agent_event_subscriptions (
user_id TEXT NOT NULL,
event_type TEXT NOT NULL,
updated_at TIMESTAMP NOT NULL,
PRIMARY KEY (user_id, event_type)
);
CREATE INDEX IF NOT EXISTS idx_agent_event_subscriptions_user
ON agent_event_subscriptions(user_id, updated_at);

-- Local workspace event journal (additive read/delivery model)
CREATE TABLE IF NOT EXISTS workspace_events (
seq INTEGER PRIMARY KEY AUTOINCREMENT,
Expand Down Expand Up @@ -631,6 +664,13 @@ def _run_migrations(self, conn: sqlite3.Connection) -> None:
if 'status' in feed_columns:
conn.execute("CREATE INDEX IF NOT EXISTS idx_feed_posts_status ON feed_posts(status)")
conn.execute("CREATE INDEX IF NOT EXISTS idx_post_permissions_user ON post_permissions(user_id)")

cursor = conn.execute("PRAGMA table_info(user_feed_preferences)")
user_feed_pref_columns = [row[1] for row in cursor.fetchall()]
if 'last_viewed_at' not in user_feed_pref_columns:
logger.info("Migration: Adding last_viewed_at column to user_feed_preferences")
conn.execute("ALTER TABLE user_feed_preferences ADD COLUMN last_viewed_at TIMESTAMP")

# channel_messages is created by ChannelManager — only add index if table exists
cm_exists = conn.execute(
"SELECT name FROM sqlite_master WHERE type='table' AND name='channel_messages'"
Expand Down Expand Up @@ -704,6 +744,79 @@ def _run_migrations(self, conn: sqlite3.Connection) -> None:
ON agent_presence(last_checkin_at);
""")

ars_exists = conn.execute(
"SELECT name FROM sqlite_master WHERE type='table' AND name='agent_runtime_state'"
).fetchone()
if not ars_exists:
logger.info("Migration: Creating agent_runtime_state table")
conn.executescript("""
CREATE TABLE IF NOT EXISTS agent_runtime_state (
user_id TEXT PRIMARY KEY,
last_event_fetch_at TIMESTAMP,
last_event_cursor_seen INTEGER,
last_inbox_fetch_at TIMESTAMP,
updated_at TIMESTAMP NOT NULL
);
CREATE INDEX IF NOT EXISTS idx_agent_runtime_event_fetch
ON agent_runtime_state(last_event_fetch_at);
CREATE INDEX IF NOT EXISTS idx_agent_runtime_inbox_fetch
ON agent_runtime_state(last_inbox_fetch_at);
""")

aess_exists = conn.execute(
"SELECT name FROM sqlite_master WHERE type='table' AND name='agent_event_subscription_state'"
).fetchone()
if not aess_exists:
logger.info("Migration: Creating agent_event_subscription_state table")
conn.executescript("""
CREATE TABLE IF NOT EXISTS agent_event_subscription_state (
user_id TEXT PRIMARY KEY,
custom_enabled INTEGER NOT NULL DEFAULT 0,
updated_at TIMESTAMP NOT NULL
);
CREATE INDEX IF NOT EXISTS idx_agent_event_subscription_state_enabled
ON agent_event_subscription_state(custom_enabled, updated_at);
""")

aes_exists = conn.execute(
"SELECT name FROM sqlite_master WHERE type='table' AND name='agent_event_subscriptions'"
).fetchone()
if not aes_exists:
logger.info("Migration: Creating agent_event_subscriptions table")
conn.executescript("""
CREATE TABLE IF NOT EXISTS agent_event_subscriptions (
user_id TEXT NOT NULL,
event_type TEXT NOT NULL,
updated_at TIMESTAMP NOT NULL,
PRIMARY KEY (user_id, event_type)
);
CREATE INDEX IF NOT EXISTS idx_agent_event_subscriptions_user
ON agent_event_subscriptions(user_id, updated_at);
""")

cursor = conn.execute("PRAGMA table_info(agent_inbox)")
inbox_columns = {row[1] for row in cursor.fetchall()}
if "seen_at" not in inbox_columns:
logger.info("Migration: Adding seen_at column to agent_inbox")
conn.execute("ALTER TABLE agent_inbox ADD COLUMN seen_at TIMESTAMP")
if "completed_at" not in inbox_columns:
logger.info("Migration: Adding completed_at column to agent_inbox")
conn.execute("ALTER TABLE agent_inbox ADD COLUMN completed_at TIMESTAMP")
if "completion_ref_json" not in inbox_columns:
logger.info("Migration: Adding completion_ref_json column to agent_inbox")
conn.execute("ALTER TABLE agent_inbox ADD COLUMN completion_ref_json TEXT")

logger.info("Migration: Normalizing legacy handled inbox rows")
conn.execute(
"""
UPDATE agent_inbox
SET status = 'completed',
seen_at = COALESCE(seen_at, handled_at, created_at),
completed_at = COALESCE(completed_at, handled_at)
WHERE status = 'handled'
"""
)

# Migration: content_contexts table for best-effort extracted text context
conn.executescript("""
CREATE TABLE IF NOT EXISTS content_contexts (
Expand Down Expand Up @@ -1331,6 +1444,9 @@ def _exec_optional(sql: str, params: tuple[Any, ...]) -> None:
_exec_optional("DELETE FROM channel_member_sync_deliveries WHERE target_user_id = ?", (user_id,))
_exec_optional("DELETE FROM likes WHERE user_id = ?", (user_id,))
_exec_optional("DELETE FROM agent_presence WHERE user_id = ?", (user_id,))
_exec_optional("DELETE FROM agent_runtime_state WHERE user_id = ?", (user_id,))
_exec_optional("DELETE FROM agent_event_subscription_state WHERE user_id = ?", (user_id,))
_exec_optional("DELETE FROM agent_event_subscriptions WHERE user_id = ?", (user_id,))

# Channel messages (table in channels.py, same DB): likes then parent refs then messages
try:
Expand Down
73 changes: 73 additions & 0 deletions canopy/core/feed.py
Original file line number Diff line number Diff line change
Expand Up @@ -1001,6 +1001,79 @@ def save_feed_algorithm(self, user_id: str, algo: FeedAlgorithm) -> bool:
logger.error(f"Failed to save feed algorithm for {user_id}: {e}")
return False

def get_feed_last_viewed_at(self, user_id: str) -> Optional[datetime]:
"""Return the last time the user acknowledged the feed view."""
try:
with self.db.get_connection() as conn:
row = conn.execute(
"SELECT last_viewed_at FROM user_feed_preferences WHERE user_id = ?",
(user_id,),
).fetchone()
if not row:
return None
return self._parse_datetime(row['last_viewed_at'])
except Exception as e:
logger.warning(f"Failed to load feed last_viewed_at for {user_id}: {e}")
return None

def mark_feed_viewed(self, user_id: str, viewed_at: Optional[datetime] = None) -> bool:
"""Record that the user has intentionally viewed the feed."""
if not user_id:
return False
viewed_dt = viewed_at or datetime.now(timezone.utc)
viewed_db = self._format_db_timestamp(viewed_dt)
try:
with self.db.get_connection() as conn:
conn.execute("""
INSERT INTO user_feed_preferences (user_id, algorithm_json, last_viewed_at, updated_at)
VALUES (?, '{}', ?, CURRENT_TIMESTAMP)
ON CONFLICT(user_id) DO UPDATE SET
last_viewed_at = excluded.last_viewed_at,
updated_at = CURRENT_TIMESTAMP
""", (user_id, viewed_db))
conn.commit()
return True
except Exception as e:
logger.error(f"Failed to mark feed viewed for {user_id}: {e}")
return False

def count_unread_posts(self, user_id: str, *, exclude_own_posts: bool = True) -> int:
"""Count feed posts with new activity since the user's last acknowledged feed view."""
if not user_id:
return 0

last_viewed_at = self.get_feed_last_viewed_at(user_id)
params: List[Any] = [user_id, user_id]
own_clause = ""
if exclude_own_posts:
own_clause = " AND p.author_id != ?"
params.append(user_id)
since_clause = ""
if last_viewed_at:
since_clause = " AND COALESCE(p.last_activity_at, p.created_at) > ?"
params.append(self._format_db_timestamp(last_viewed_at))

try:
with self.db.get_connection() as conn:
row = conn.execute(f"""
SELECT COUNT(DISTINCT p.id) AS unread_count
FROM feed_posts p
LEFT JOIN post_permissions pp ON p.id = pp.post_id
WHERE (
p.visibility = 'public' OR
p.visibility = 'network' OR
(p.visibility = 'custom' AND pp.user_id = ?) OR
p.author_id = ?
)
AND (p.expires_at IS NULL OR p.expires_at > CURRENT_TIMESTAMP)
{own_clause}
{since_clause}
""", params).fetchone()
return max(0, int((row['unread_count'] if row else 0) or 0))
except Exception as e:
logger.error(f"Failed to count unread feed posts for {user_id}: {e}")
return 0

def get_available_tags(self, limit: int = 50) -> List[Dict[str, Any]]:
"""Get popular tags across all posts for the tag picker UI."""
try:
Expand Down
Loading
Loading