From e27cf5d317a3f6f4a7e6ecc7715468c72dfb7ec5 Mon Sep 17 00:00:00 2001 From: Ammar Date: Sun, 8 Feb 2026 11:41:06 -0600 Subject: [PATCH 01/14] refactor: adopt getErrorMessage() utility across 78 files Replace 214 inline `error instanceof Error ? error.message : String(error)` patterns with the existing `getErrorMessage()` utility from `src/common/utils/errors.ts`. This eliminates the most pervasive DRY violation in the codebase (172+ occurrences across 30+ files). Heaviest adopters: - workspaceService.ts: 32 replacements - projectService.ts: 13 replacements - orpc/router.ts: 8 replacements - historyService.ts: 8 replacements --- .../ChatInput/useCreationWorkspace.ts | 3 +- .../components/hooks/useGitBranchDetails.ts | 5 +- .../hooks/useMuxGatewayAccountStatus.ts | 3 +- src/browser/hooks/useVoiceInput.ts | 5 +- src/browser/hooks/useWorkspaceName.ts | 3 +- src/browser/terminal/TerminalSessionRouter.ts | 3 +- src/cli/run.ts | 5 +- src/desktop/main.ts | 5 +- src/node/git.ts | 7 +- src/node/orpc/router.ts | 17 ++--- src/node/orpc/server.ts | 3 +- src/node/runtime/CoderSSHRuntime.ts | 15 +++-- src/node/runtime/DevcontainerRuntime.ts | 2 +- src/node/runtime/LocalBaseRuntime.ts | 9 +-- src/node/runtime/RemoteRuntime.ts | 3 +- src/node/runtime/ptySpawn.ts | 3 +- src/node/runtime/runtimeFactory.ts | 3 +- src/node/runtime/sshConfigParser.ts | 3 +- src/node/runtime/transports/SSH2Transport.ts | 2 +- .../agentDefinitionsService.ts | 3 +- .../parseAgentDefinitionMarkdown.ts | 3 +- .../resolveAgentInheritanceChain.ts | 3 +- src/node/services/agentResolution.ts | 5 +- src/node/services/agentSession.ts | 11 ++-- .../agentSkills/agentSkillsService.ts | 3 +- .../agentSkills/parseSkillMarkdown.ts | 3 +- src/node/services/aiService.ts | 11 ++-- .../services/backgroundProcessExecutor.ts | 3 +- src/node/services/backgroundProcessManager.ts | 2 +- src/node/services/coderService.test.ts | 13 ++-- src/node/services/coderService.ts | 7 +- src/node/services/codexOauthService.ts | 13 ++-- src/node/services/compactionHandler.ts | 3 +- src/node/services/copilotOauthService.ts | 5 +- src/node/services/editorService.ts | 3 +- src/node/services/experimentsService.ts | 3 +- src/node/services/gitPatchArtifactService.ts | 7 +- src/node/services/historyService.ts | 17 ++--- src/node/services/hooks.ts | 7 +- src/node/services/initStateManager.ts | 3 +- src/node/services/mcpConfigService.ts | 9 +-- src/node/services/mcpOauthService.ts | 11 ++-- src/node/services/mcpServerManager.ts | 9 +-- src/node/services/mock/mockAiStreamPlayer.ts | 3 +- src/node/services/muxGatewayOauthService.ts | 5 +- src/node/services/muxGovernorOauthService.ts | 11 ++-- src/node/services/partialService.ts | 7 +- src/node/services/policyService.ts | 9 +-- src/node/services/projectService.ts | 27 ++++---- src/node/services/providerModelFactory.ts | 3 +- src/node/services/providerService.ts | 7 +- src/node/services/ptc/quickjsRuntime.ts | 5 +- src/node/services/signingService.ts | 13 ++-- src/node/services/streamContextBuilder.ts | 5 +- src/node/services/streamManager.ts | 11 ++-- src/node/services/system1ToolWrapper.ts | 13 ++-- src/node/services/taskService.ts | 9 +-- src/node/services/terminalService.ts | 3 +- src/node/services/tools/agent_skill_read.ts | 3 +- .../services/tools/agent_skill_read_file.ts | 3 +- src/node/services/tools/bash.ts | 3 +- src/node/services/tools/file_edit_insert.ts | 3 +- .../services/tools/file_edit_operation.ts | 3 +- src/node/services/tools/file_read.ts | 3 +- .../services/tools/mux_global_agents_read.ts | 3 +- .../services/tools/mux_global_agents_write.ts | 3 +- src/node/services/tools/notify.ts | 3 +- src/node/services/tools/task.ts | 3 +- src/node/services/tools/task_await.ts | 5 +- src/node/services/tools/web_fetch.ts | 3 +- src/node/services/voiceService.ts | 3 +- src/node/services/workspaceLifecycleHooks.ts | 3 +- .../services/workspaceMcpOverridesService.ts | 3 +- src/node/services/workspaceService.ts | 65 ++++++++++--------- src/node/services/workspaceTitleGenerator.ts | 3 +- src/node/utils/main/bashPath.ts | 3 +- src/node/utils/main/tokenizer.worker.ts | 3 +- src/node/utils/sessionFile.ts | 5 +- 78 files changed, 289 insertions(+), 224 deletions(-) diff --git a/src/browser/components/ChatInput/useCreationWorkspace.ts b/src/browser/components/ChatInput/useCreationWorkspace.ts index 9a1f328466..b8a9fea5d4 100644 --- a/src/browser/components/ChatInput/useCreationWorkspace.ts +++ b/src/browser/components/ChatInput/useCreationWorkspace.ts @@ -45,6 +45,7 @@ import { KNOWN_MODELS } from "@/common/constants/knownModels"; import { getModelCapabilities } from "@/common/utils/ai/modelCapabilities"; import { normalizeModelInput } from "@/browser/utils/models/normalizeModelInput"; import { resolveDevcontainerSelection } from "@/browser/utils/devcontainerSelection"; +import { getErrorMessage } from "@/common/utils/errors"; export type CreationSendResult = { success: true } | { success: false; error?: SendMessageError }; @@ -545,7 +546,7 @@ export function useCreationWorkspace({ return { success: true }; } catch (err) { - const errorMessage = err instanceof Error ? err.message : String(err); + const errorMessage = getErrorMessage(err); setToast({ id: Date.now().toString(), type: "error", diff --git a/src/browser/components/hooks/useGitBranchDetails.ts b/src/browser/components/hooks/useGitBranchDetails.ts index 681cd250a5..147e5f2a49 100644 --- a/src/browser/components/hooks/useGitBranchDetails.ts +++ b/src/browser/components/hooks/useGitBranchDetails.ts @@ -7,6 +7,7 @@ import { type GitBranchHeader, } from "@/common/utils/git/parseGitLog"; import { useAPI } from "@/browser/contexts/API"; +import { getErrorMessage } from "@/common/utils/errors"; const GitBranchDataSchema = z.object({ showBranch: z.string(), @@ -274,9 +275,7 @@ printf '__MUX_BRANCH_DATA__BEGIN_DIRTY_FILES__\\n%s\\n__MUX_BRANCH_DATA__END_DIR timestamp: Date.now(), }; } catch (error) { - setErrorMessage( - `Failed to fetch branch info: ${error instanceof Error ? error.message : String(error)}` - ); + setErrorMessage(`Failed to fetch branch info: ${getErrorMessage(error)}`); setCommits(null); } finally { setIsLoading(false); diff --git a/src/browser/hooks/useMuxGatewayAccountStatus.ts b/src/browser/hooks/useMuxGatewayAccountStatus.ts index d724aad95e..216d481eef 100644 --- a/src/browser/hooks/useMuxGatewayAccountStatus.ts +++ b/src/browser/hooks/useMuxGatewayAccountStatus.ts @@ -5,6 +5,7 @@ import { CUSTOM_EVENTS, createCustomEvent } from "@/common/constants/events"; import { GATEWAY_CONFIGURED_KEY } from "@/common/constants/storage"; import { MUX_GATEWAY_SESSION_EXPIRED_MESSAGE } from "@/common/constants/muxGatewayOAuth"; import { formatCostWithDollar } from "@/common/utils/tokens/usageAggregator"; +import { getErrorMessage } from "@/common/utils/errors"; export interface MuxGatewayAccountStatus { remaining_microdollars: number; @@ -51,7 +52,7 @@ export function useMuxGatewayAccountStatus() { setError(result.error); } catch (err) { - const message = err instanceof Error ? err.message : String(err); + const message = getErrorMessage(err); setError(message); } finally { setIsLoading(false); diff --git a/src/browser/hooks/useVoiceInput.ts b/src/browser/hooks/useVoiceInput.ts index 612627d248..1b81fceda8 100644 --- a/src/browser/hooks/useVoiceInput.ts +++ b/src/browser/hooks/useVoiceInput.ts @@ -11,6 +11,7 @@ import { matchesKeybind, KEYBINDS } from "@/browser/utils/ui/keybinds"; import { stopKeyboardPropagation } from "@/browser/utils/events"; import type { APIClient } from "@/browser/contexts/API"; import { trackVoiceTranscription } from "@/common/telemetry"; +import { getErrorMessage } from "@/common/utils/errors"; export type VoiceInputState = "idle" | "requesting" | "recording" | "transcribing"; @@ -187,7 +188,7 @@ export function useVoiceInput(options: UseVoiceInputOptions): UseVoiceInputResul setTimeout(() => callbacksRef.current.onSend?.(), 0); } } catch (err) { - const msg = err instanceof Error ? err.message : String(err); + const msg = getErrorMessage(err); callbacksRef.current.onError?.(`Transcription failed: ${msg}`); trackVoiceTranscription(audioDurationSecs, false); } finally { @@ -265,7 +266,7 @@ export function useVoiceInput(options: UseVoiceInputOptions): UseVoiceInputResul recordingStartTimeRef.current = Date.now(); setState("recording"); } catch (err) { - const msg = err instanceof Error ? err.message : String(err); + const msg = getErrorMessage(err); const isPermissionDenied = msg.includes("Permission denied") || msg.includes("NotAllowed"); callbacksRef.current.onError?.( diff --git a/src/browser/hooks/useWorkspaceName.ts b/src/browser/hooks/useWorkspaceName.ts index 7f107b995c..a5c62e4a8b 100644 --- a/src/browser/hooks/useWorkspaceName.ts +++ b/src/browser/hooks/useWorkspaceName.ts @@ -6,6 +6,7 @@ import { getWorkspaceNameStateKey } from "@/common/constants/storage"; import { useGateway, formatAsGatewayModel } from "./useGatewayModels"; import { getKnownModel } from "@/common/constants/knownModels"; import { validateWorkspaceName } from "@/common/utils/validation/workspaceValidation"; +import { getErrorMessage } from "@/common/utils/errors"; /** Small/fast models preferred for name generation */ const PREFERRED_MODELS = [getKnownModel("HAIKU").id, getKnownModel("GPT_MINI").id]; @@ -272,7 +273,7 @@ export function useWorkspaceName(options: UseWorkspaceNameOptions): UseWorkspace if (requestId !== requestIdRef.current) { return null; } - const errorMsg = err instanceof Error ? err.message : String(err); + const errorMsg = getErrorMessage(err); setError(errorMsg); safeResolve(null); return null; diff --git a/src/browser/terminal/TerminalSessionRouter.ts b/src/browser/terminal/TerminalSessionRouter.ts index 9498e26e50..0da0c45bba 100644 --- a/src/browser/terminal/TerminalSessionRouter.ts +++ b/src/browser/terminal/TerminalSessionRouter.ts @@ -22,6 +22,7 @@ import type { RouterClient } from "@orpc/server"; import type { AppRouter } from "@/node/orpc/router"; +import { getErrorMessage } from "@/common/utils/errors"; type APIClient = RouterClient; @@ -349,7 +350,7 @@ export class TerminalSessionRouter { } catch (err) { if (!signal.aborted) { // Ignore "session not found" errors for exit stream - const errMsg = err instanceof Error ? err.message : String(err); + const errMsg = getErrorMessage(err); if (!errMsg.includes("isOpen") && !errMsg.includes("undefined")) { console.error(`[TerminalRouter] Exit stream error for ${sessionId}:`, err); } diff --git a/src/cli/run.ts b/src/cli/run.ts index 8ba27ac322..2090c79be2 100644 --- a/src/cli/run.ts +++ b/src/cli/run.ts @@ -71,6 +71,7 @@ import { runFullInit } from "@/node/runtime/runtimeFactory"; import { execSync } from "child_process"; import { getParseOptions } from "./argv"; import { EXPERIMENT_IDS } from "@/common/constants/experiments"; +import { getErrorMessage } from "@/common/utils/errors"; // Display labels for CLI help (OFF, LOW, MED, HIGH, MAX) const THINKING_LABELS_LIST = Object.values(THINKING_DISPLAY_LABELS).join(", "); @@ -530,7 +531,7 @@ async function main(): Promise { initLogger, }); } catch (error) { - const errorMessage = error instanceof Error ? error.message : String(error); + const errorMessage = getErrorMessage(error); initLogger.logStderr(`Initialization failed: ${errorMessage}`); initLogger.logComplete(-1); initResult = { success: false, error: errorMessage }; @@ -1111,6 +1112,6 @@ main() }) .catch((error) => { clearInterval(keepAliveInterval); - console.error(`Error: ${error instanceof Error ? error.message : String(error)}`); + console.error(`Error: ${getErrorMessage(error)}`); process.exit(1); }); diff --git a/src/desktop/main.ts b/src/desktop/main.ts index c971292d4b..1fdee81201 100644 --- a/src/desktop/main.ts +++ b/src/desktop/main.ts @@ -1,5 +1,6 @@ // Enable source map support for better error stack traces in production import "source-map-support/register"; +import { getErrorMessage } from "@/common/utils/errors"; // Fix PATH on macOS when launched from Finder (not terminal). // GUI apps inherit minimal PATH from launchd, missing Homebrew tools like git-lfs. @@ -109,7 +110,7 @@ if (process.env.MUX_DEBUG_START_TIME === "1") { process.on("uncaughtException", (error: unknown) => { console.error("Uncaught Exception:", error); - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); const stack = error instanceof Error ? error.stack : undefined; console.error("Stack:", stack); @@ -128,7 +129,7 @@ process.on("unhandledRejection", (reason, promise) => { console.error("Reason:", reason); if (app.isPackaged) { - const message = reason instanceof Error ? reason.message : String(reason); + const message = getErrorMessage(reason); const stack = reason instanceof Error ? reason.stack : undefined; dialog.showErrorBox( "Unhandled Promise Rejection", diff --git a/src/node/git.ts b/src/node/git.ts index 73a34f47cd..00b6ab1d9b 100644 --- a/src/node/git.ts +++ b/src/node/git.ts @@ -5,6 +5,7 @@ import type { RuntimeConfig } from "@/common/types/runtime"; import { execAsync } from "@/node/utils/disposableExec"; import { createRuntime } from "./runtime/runtimeFactory"; import { log } from "./services/log"; +import { getErrorMessage } from "@/common/utils/errors"; /** * Remove stale .git/index.lock file if it exists and is old. @@ -187,7 +188,7 @@ export async function createWorktree( return { success: true, path: workspacePath }; } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return { success: false, error: message }; } } @@ -233,7 +234,7 @@ export async function removeWorktree( await proc.result; return { success: true }; } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return { success: false, error: message }; } } @@ -247,7 +248,7 @@ export async function pruneWorktrees(projectPath: string): Promise 100 ? "..." : "") ); @@ -184,7 +185,7 @@ async function readPartialJsonBestEffort(partialPath: string): Promise { }, }); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Mux Gateway balance request failed: ${message}`); } @@ -1094,7 +1095,7 @@ export const router = (authToken?: string) => { try { json = await response.json(); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Mux Gateway balance response was not valid JSON: ${message}`); } @@ -1315,7 +1316,7 @@ export const router = (authToken?: string) => { return Ok(undefined); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(message); } }), @@ -2549,7 +2550,7 @@ export const router = (authToken?: string) => { log.warn("workspace.getSubagentTranscript: descendant check failed", { requestingWorkspaceId, taskId, - error: error instanceof Error ? error.message : String(error), + error: getErrorMessage(error), }); } } @@ -3248,7 +3249,7 @@ export const router = (authToken?: string) => { await context.sessionTimingService.clearTimingFile(input.workspaceId); return { success: true, data: undefined }; } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return { success: false, error: message }; } }), @@ -3288,7 +3289,7 @@ export const router = (authToken?: string) => { ); return { success: true, data: undefined }; } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return { success: false, error: message }; } }), diff --git a/src/node/orpc/server.ts b/src/node/orpc/server.ts index 2ef7fd3c12..753d012cec 100644 --- a/src/node/orpc/server.ts +++ b/src/node/orpc/server.ts @@ -24,6 +24,7 @@ import { VERSION } from "@/version"; import { formatOrpcError } from "@/node/orpc/formatOrpcError"; import { log } from "@/node/services/log"; import { attachStreamErrorHandler, isIgnorableStreamError } from "@/node/utils/streamErrors"; +import { getErrorMessage } from "@/common/utils/errors"; type AliveWebSocket = WebSocket & { isAlive?: boolean }; @@ -755,7 +756,7 @@ export async function createOrpcServer({ return; } - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); const code = error && typeof error === "object" && "code" in error && typeof error.code === "string" ? error.code diff --git a/src/node/runtime/CoderSSHRuntime.ts b/src/node/runtime/CoderSSHRuntime.ts index a8eb37e142..151f5f7087 100644 --- a/src/node/runtime/CoderSSHRuntime.ts +++ b/src/node/runtime/CoderSSHRuntime.ts @@ -31,6 +31,7 @@ import { log } from "@/node/services/log"; import { execBuffered } from "@/node/utils/runtime/helpers"; import { expandTildeForSSH } from "./tildeExpansion"; import * as path from "path"; +import { getErrorMessage } from "@/common/utils/errors"; export interface CoderSSHRuntimeConfig extends SSHRuntimeConfig { /** Coder-specific configuration */ @@ -333,7 +334,7 @@ export class CoderSSHRuntime extends SSHRuntime { emitStatus("ready"); return { ready: true }; } catch (error) { - const errorMsg = error instanceof Error ? error.message : String(error); + const errorMsg = getErrorMessage(error); emitStatus("error"); @@ -445,7 +446,7 @@ export class CoderSSHRuntime extends SSHRuntime { if (!coder.existingWorkspace) { await this.coderService.disposeProvisioningSession(workspaceName); } - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err( `Failed to read Coder deployment SSH config. ` + `Make sure you're logged in with the Coder CLI. ` + @@ -571,7 +572,7 @@ export class CoderSSHRuntime extends SSHRuntime { deletedPath: this.getWorkspacePath(projectPath, workspaceName), }; } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); log.error("Failed to delete stopped Coder workspace", { coderWorkspaceName, error: message, @@ -603,7 +604,7 @@ export class CoderSSHRuntime extends SSHRuntime { log.debug(`Deleting Coder workspace "${coderWorkspaceName}"`); await this.coderService.deleteWorkspace(coderWorkspaceName); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); log.error("Failed to delete Coder workspace", { coderWorkspaceName, error: message, @@ -700,7 +701,7 @@ export class CoderSSHRuntime extends SSHRuntime { initLogger.logStdout(line); } } catch (error) { - const errorMsg = error instanceof Error ? error.message : String(error); + const errorMsg = getErrorMessage(error); log.error("Failed to create Coder workspace", { error, config: this.coderConfig }); initLogger.logStderr(`Failed to create Coder workspace: ${errorMsg}`); throw new Error(`Failed to create Coder workspace: ${errorMsg}`); @@ -746,7 +747,7 @@ export class CoderSSHRuntime extends SSHRuntime { initLogger.logStdout(line); } } catch (error) { - const errorMsg = error instanceof Error ? error.message : String(error); + const errorMsg = getErrorMessage(error); log.error("Failed waiting for Coder workspace", { error, config: this.coderConfig }); initLogger.logStderr(`Failed connecting to Coder workspace: ${errorMsg}`); throw new Error(`Failed connecting to Coder workspace: ${errorMsg}`); @@ -758,7 +759,7 @@ export class CoderSSHRuntime extends SSHRuntime { try { await this.coderService.ensureSSHConfig(); } catch (error) { - const errorMsg = error instanceof Error ? error.message : String(error); + const errorMsg = getErrorMessage(error); log.error("Failed to configure SSH for Coder", { error }); initLogger.logStderr(`Failed to configure SSH: ${errorMsg}`); throw new Error(`Failed to configure SSH for Coder: ${errorMsg}`); diff --git a/src/node/runtime/DevcontainerRuntime.ts b/src/node/runtime/DevcontainerRuntime.ts index 27f2c1f07f..f37381f534 100644 --- a/src/node/runtime/DevcontainerRuntime.ts +++ b/src/node/runtime/DevcontainerRuntime.ts @@ -254,7 +254,7 @@ export class DevcontainerRuntime extends LocalBaseRuntime { } else { controller.error( new RuntimeError( - `Failed to read file ${filePath}: ${err instanceof Error ? err.message : String(err)}`, + `Failed to read file ${filePath}: ${getErrorMessage(err)}`, "file_io", err instanceof Error ? err : undefined ) diff --git a/src/node/runtime/LocalBaseRuntime.ts b/src/node/runtime/LocalBaseRuntime.ts index 6cce15a2c0..2079f41bc6 100644 --- a/src/node/runtime/LocalBaseRuntime.ts +++ b/src/node/runtime/LocalBaseRuntime.ts @@ -26,6 +26,7 @@ import { EXIT_CODE_ABORTED, EXIT_CODE_TIMEOUT } from "@/common/constants/exitCod import { DisposableProcess, killProcessTree } from "@/node/utils/disposableExec"; import { expandTilde } from "./tildeExpansion"; import { getInitHookPath, createLineBufferedLoggers } from "./initHook"; +import { getErrorMessage } from "@/common/utils/errors"; /** * Abstract base class for local runtimes (both WorktreeRuntime and LocalRuntime). @@ -215,7 +216,7 @@ export abstract class LocalBaseRuntime implements Runtime { } catch (err) { controller.error( new RuntimeErrorClass( - `Failed to read file ${filePath}: ${err instanceof Error ? err.message : String(err)}`, + `Failed to read file ${filePath}: ${getErrorMessage(err)}`, "file_io", err instanceof Error ? err : undefined ) @@ -272,7 +273,7 @@ export abstract class LocalBaseRuntime implements Runtime { await fsPromises.rename(tempPath, resolvedPath); } catch (err) { throw new RuntimeErrorClass( - `Failed to write file ${filePath}: ${err instanceof Error ? err.message : String(err)}`, + `Failed to write file ${filePath}: ${getErrorMessage(err)}`, "file_io", err instanceof Error ? err : undefined ); @@ -307,7 +308,7 @@ export abstract class LocalBaseRuntime implements Runtime { }; } catch (err) { throw new RuntimeErrorClass( - `Failed to stat ${filePath}: ${err instanceof Error ? err.message : String(err)}`, + `Failed to stat ${filePath}: ${getErrorMessage(err)}`, "file_io", err instanceof Error ? err : undefined ); @@ -320,7 +321,7 @@ export abstract class LocalBaseRuntime implements Runtime { await fsPromises.mkdir(expandedPath, { recursive: true }); } catch (err) { throw new RuntimeErrorClass( - `Failed to create directory ${dirPath}: ${err instanceof Error ? err.message : String(err)}`, + `Failed to create directory ${dirPath}: ${getErrorMessage(err)}`, "file_io", err instanceof Error ? err : undefined ); diff --git a/src/node/runtime/RemoteRuntime.ts b/src/node/runtime/RemoteRuntime.ts index cf4fce98f3..aa5162cf97 100644 --- a/src/node/runtime/RemoteRuntime.ts +++ b/src/node/runtime/RemoteRuntime.ts @@ -36,6 +36,7 @@ import { attachStreamErrorHandler } from "@/node/utils/streamErrors"; import { NON_INTERACTIVE_ENV_VARS } from "@/common/constants/env"; import { DisposableProcess } from "@/node/utils/disposableExec"; import { streamToString, shescape } from "./streamUtils"; +import { getErrorMessage } from "@/common/utils/errors"; /** * Result from spawning a remote process. @@ -324,7 +325,7 @@ export abstract class RemoteRuntime implements Runtime { } else { controller.error( new RuntimeError( - `Failed to read file ${filePath}: ${err instanceof Error ? err.message : String(err)}`, + `Failed to read file ${filePath}: ${getErrorMessage(err)}`, "file_io", err instanceof Error ? err : undefined ) diff --git a/src/node/runtime/ptySpawn.ts b/src/node/runtime/ptySpawn.ts index f13bb1b66c..8c425c9042 100644 --- a/src/node/runtime/ptySpawn.ts +++ b/src/node/runtime/ptySpawn.ts @@ -1,5 +1,6 @@ import type { IPty } from "node-pty"; import { log } from "@/node/services/log"; +import { getErrorMessage } from "@/common/utils/errors"; interface PtySpawnRequest { runtimeLabel: string; @@ -80,7 +81,7 @@ export function spawnPtyProcess(request: PtySpawnRequest): IPty { const printableArgs = request.args.length > 0 ? ` ${request.args.join(" ")}` : ""; const cmd = `${request.command}${printableArgs}`; const details = `cmd="${cmd}", cwd="${request.cwd}", platform="${process.platform}"`; - const errMessage = err instanceof Error ? err.message : String(err); + const errMessage = getErrorMessage(err); if (request.logLocalEnv) { log.error(`Local PTY spawn config: ${cmd} (cwd: ${request.cwd})`); diff --git a/src/node/runtime/runtimeFactory.ts b/src/node/runtime/runtimeFactory.ts index 02e7ed5e2f..71eba8e670 100644 --- a/src/node/runtime/runtimeFactory.ts +++ b/src/node/runtime/runtimeFactory.ts @@ -16,6 +16,7 @@ import type { CoderService } from "@/node/services/coderService"; import { Config } from "@/node/config"; import { checkDevcontainerCliVersion } from "./devcontainerCli"; import { buildDevcontainerConfigInfo, scanDevcontainerConfigs } from "./devcontainerConfigs"; +import { getErrorMessage } from "@/common/utils/errors"; // Re-export for backward compatibility with existing imports export { isIncompatibleRuntimeConfig }; @@ -62,7 +63,7 @@ export function runBackgroundInit( try { await runFullInit(runtime, params); } catch (error: unknown) { - const errorMsg = error instanceof Error ? error.message : String(error); + const errorMsg = getErrorMessage(error); logger?.error(`Workspace init failed for ${workspaceId}:`, { error }); params.initLogger.logStderr(`Initialization failed: ${errorMsg}`); params.initLogger.logComplete(-1); diff --git a/src/node/runtime/sshConfigParser.ts b/src/node/runtime/sshConfigParser.ts index 8b9f415602..5b97ff5db4 100644 --- a/src/node/runtime/sshConfigParser.ts +++ b/src/node/runtime/sshConfigParser.ts @@ -8,6 +8,7 @@ import * as os from "os"; import * as path from "path"; import SSHConfig, { glob } from "ssh-config"; import { log } from "@/node/services/log"; +import { getErrorMessage } from "@/common/utils/errors"; interface ParsedValueToken { val: string; @@ -259,7 +260,7 @@ async function loadSSHConfig(): Promise { if ((error as NodeJS.ErrnoException | undefined)?.code !== "ENOENT") { log.debug("Failed to read SSH config", { configPath, - error: error instanceof Error ? error.message : String(error), + error: getErrorMessage(error), }); } return null; diff --git a/src/node/runtime/transports/SSH2Transport.ts b/src/node/runtime/transports/SSH2Transport.ts index ec51e302c4..c2b100490f 100644 --- a/src/node/runtime/transports/SSH2Transport.ts +++ b/src/node/runtime/transports/SSH2Transport.ts @@ -168,7 +168,7 @@ class SSH2Pty implements PtyHandle { return; } - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); const code = error && typeof error === "object" && "code" in error && typeof error.code === "string" ? error.code diff --git a/src/node/services/agentDefinitions/agentDefinitionsService.ts b/src/node/services/agentDefinitions/agentDefinitionsService.ts index abb5b71b99..f3bd1ca6d1 100644 --- a/src/node/services/agentDefinitions/agentDefinitionsService.ts +++ b/src/node/services/agentDefinitions/agentDefinitionsService.ts @@ -26,6 +26,7 @@ import { AgentDefinitionParseError, parseAgentDefinitionMarkdown, } from "./parseAgentDefinitionMarkdown"; +import { getErrorMessage } from "@/common/utils/errors"; export const MAX_INHERITANCE_DEPTH = 10; @@ -104,7 +105,7 @@ export function getDefaultAgentDefinitionsRoots( } function formatError(error: unknown): string { - return error instanceof Error ? error.message : String(error); + return getErrorMessage(error); } async function listAgentFilesFromLocalFs(root: string): Promise { diff --git a/src/node/services/agentDefinitions/parseAgentDefinitionMarkdown.ts b/src/node/services/agentDefinitions/parseAgentDefinitionMarkdown.ts index a4686ed557..668d4b290f 100644 --- a/src/node/services/agentDefinitions/parseAgentDefinitionMarkdown.ts +++ b/src/node/services/agentDefinitions/parseAgentDefinitionMarkdown.ts @@ -2,6 +2,7 @@ import { AgentDefinitionFrontmatterSchema } from "@/common/orpc/schemas"; import type { AgentDefinitionFrontmatter } from "@/common/types/agentDefinition"; import { MAX_FILE_SIZE } from "@/node/services/tools/fileCommon"; import YAML from "yaml"; +import { getErrorMessage } from "@/common/utils/errors"; export class AgentDefinitionParseError extends Error { constructor(message: string) { @@ -89,7 +90,7 @@ export function parseAgentDefinitionMarkdown(input: { try { raw = YAML.parse(yamlText); } catch (err) { - const message = err instanceof Error ? err.message : String(err); + const message = getErrorMessage(err); throw new AgentDefinitionParseError(`Failed to parse YAML frontmatter: ${message}`); } diff --git a/src/node/services/agentDefinitions/resolveAgentInheritanceChain.ts b/src/node/services/agentDefinitions/resolveAgentInheritanceChain.ts index 683b9e4d6d..618468446f 100644 --- a/src/node/services/agentDefinitions/resolveAgentInheritanceChain.ts +++ b/src/node/services/agentDefinitions/resolveAgentInheritanceChain.ts @@ -9,6 +9,7 @@ import { MAX_INHERITANCE_DEPTH, readAgentDefinition, } from "./agentDefinitionsService"; +import { getErrorMessage } from "@/common/utils/errors"; export interface AgentForInheritance { id: AgentId; @@ -84,7 +85,7 @@ export async function resolveAgentInheritanceChain( workspaceId, agentId, baseId, - error: error instanceof Error ? error.message : String(error), + error: getErrorMessage(error), }); break; } diff --git a/src/node/services/agentResolution.ts b/src/node/services/agentResolution.ts index 31d540a583..466fd55d44 100644 --- a/src/node/services/agentResolution.ts +++ b/src/node/services/agentResolution.ts @@ -39,6 +39,7 @@ import { createAssistantMessageId } from "./utils/messageIds"; import { createErrorEvent } from "./utils/sendMessageError"; import { getTaskDepthFromConfig } from "./taskUtils"; import { log } from "./log"; +import { getErrorMessage } from "@/common/utils/errors"; /** Options for agent resolution. */ export interface ResolveAgentOptions { @@ -141,7 +142,7 @@ export async function resolveAgentForStream( effectiveAgentId, agentDiscoveryPath, disableWorkspaceAgents, - error: error instanceof Error ? error.message : String(error), + error: getErrorMessage(error), }); agentDefinition = await readAgentDefinition(runtime, agentDiscoveryPath, "exec"); } @@ -193,7 +194,7 @@ export async function resolveAgentForStream( // Best-effort only — do not fail a stream due to disablement resolution. workspaceLog.debug("Failed to resolve agent enablement; continuing", { agentId: agentDefinition.id, - error: error instanceof Error ? error.message : String(error), + error: getErrorMessage(error), }); } } diff --git a/src/node/services/agentSession.ts b/src/node/services/agentSession.ts index 6dd4954882..6a9b9ce806 100644 --- a/src/node/services/agentSession.ts +++ b/src/node/services/agentSession.ts @@ -74,6 +74,7 @@ import { getModelCapabilities } from "@/common/utils/ai/modelCapabilities"; import { normalizeGatewayModel, isValidModelFormat } from "@/common/utils/ai/models"; import { readAgentSkill } from "@/node/services/agentSkills/agentSkillsService"; import { materializeFileAtMentions } from "@/node/services/fileAtMentions"; +import { getErrorMessage } from "@/common/utils/errors"; /** * Tracked file state for detecting external edits. @@ -719,9 +720,7 @@ export class AgentSession { options?.disableWorkspaceAgents ); } catch (error) { - return Err( - createUnknownSendMessageError(error instanceof Error ? error.message : String(error)) - ); + return Err(createUnknownSendMessageError(getErrorMessage(error))); } // Persist snapshots (if any) BEFORE the user message so they precede it in the prompt. @@ -1236,7 +1235,7 @@ export class AgentSession { } catch (error) { log.warn("Failed to discard pending post-compaction state", { workspaceId: this.workspaceId, - error: error instanceof Error ? error.message : String(error), + error: getErrorMessage(error), }); } @@ -1431,7 +1430,7 @@ export class AgentSession { } catch (error) { log.warn("Failed to discard pending post-compaction state before hard restart", { workspaceId: this.workspaceId, - error: error instanceof Error ? error.message : String(error), + error: getErrorMessage(error), }); } @@ -1691,7 +1690,7 @@ export class AgentSession { } catch (error) { log.warn("Failed to ack pending post-compaction state", { workspaceId: this.workspaceId, - error: error instanceof Error ? error.message : String(error), + error: getErrorMessage(error), }); } this.onPostCompactionStateChange?.(); diff --git a/src/node/services/agentSkills/agentSkillsService.ts b/src/node/services/agentSkills/agentSkillsService.ts index 346df7c11c..fc7e802453 100644 --- a/src/node/services/agentSkills/agentSkillsService.ts +++ b/src/node/services/agentSkills/agentSkillsService.ts @@ -22,6 +22,7 @@ import { log } from "@/node/services/log"; import { validateFileSize } from "@/node/services/tools/fileCommon"; import { AgentSkillParseError, parseSkillMarkdown } from "./parseSkillMarkdown"; import { getBuiltInSkillByName, getBuiltInSkillDescriptors } from "./builtInSkillDefinitions"; +import { getErrorMessage } from "@/common/utils/errors"; const GLOBAL_SKILLS_ROOT = "~/.mux/skills"; @@ -45,7 +46,7 @@ export function getDefaultAgentSkillsRoots( } function formatError(error: unknown): string { - return error instanceof Error ? error.message : String(error); + return getErrorMessage(error); } async function listSkillDirectoriesFromLocalFs(root: string): Promise { diff --git a/src/node/services/agentSkills/parseSkillMarkdown.ts b/src/node/services/agentSkills/parseSkillMarkdown.ts index bab6357883..d2a6f3b00a 100644 --- a/src/node/services/agentSkills/parseSkillMarkdown.ts +++ b/src/node/services/agentSkills/parseSkillMarkdown.ts @@ -2,6 +2,7 @@ import { AgentSkillFrontmatterSchema } from "@/common/orpc/schemas"; import type { AgentSkillFrontmatter, SkillName } from "@/common/types/agentSkill"; import { MAX_FILE_SIZE } from "@/node/services/tools/fileCommon"; import YAML from "yaml"; +import { getErrorMessage } from "@/common/utils/errors"; export class AgentSkillParseError extends Error { constructor(message: string) { @@ -85,7 +86,7 @@ export function parseSkillMarkdown(input: { try { raw = YAML.parse(yamlText); } catch (err) { - const message = err instanceof Error ? err.message : String(err); + const message = getErrorMessage(err); throw new AgentSkillParseError(`Failed to parse SKILL.md YAML frontmatter: ${message}`); } diff --git a/src/node/services/aiService.ts b/src/node/services/aiService.ts index 6cfbaaa5b7..05322b7bcf 100644 --- a/src/node/services/aiService.ts +++ b/src/node/services/aiService.ts @@ -67,6 +67,7 @@ import { type SimulationContext, } from "./streamSimulation"; import { applyToolPolicyAndExperiments, captureMcpToolTelemetry } from "./toolAssembly"; +import { getErrorMessage } from "@/common/utils/errors"; // --------------------------------------------------------------------------- // streamMessage options @@ -234,7 +235,7 @@ export class AIService extends EventEmitter { } } } catch (error) { - const errMsg = error instanceof Error ? error.message : String(error); + const errMsg = getErrorMessage(error); log.warn("Failed to capture debug LLM response snapshot", { error: errMsg }); } @@ -303,7 +304,7 @@ export class AIService extends EventEmitter { return Ok(metadata); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to read workspace metadata: ${message}`); } } @@ -886,7 +887,7 @@ export class AIService extends EventEmitter { try { this.lastLlmRequestByWorkspace.set(workspaceId, safeClone(snapshot)); } catch (error) { - const errMsg = error instanceof Error ? error.message : String(error); + const errMsg = getErrorMessage(error); workspaceLog.warn("Failed to capture debug LLM request snapshot", { error: errMsg }); } const toolsForStream = @@ -956,7 +957,7 @@ export class AIService extends EventEmitter { // No need for event listener here return Ok(undefined); } catch (error) { - const errorMessage = error instanceof Error ? error.message : String(error); + const errorMessage = getErrorMessage(error); log.error("Stream message error:", error); // Return as unknown error type return Err({ type: "unknown", raw: `Failed to stream message: ${errorMessage}` }); @@ -1104,7 +1105,7 @@ export class AIService extends EventEmitter { await fs.rm(workspaceDir, { recursive: true, force: true }); return Ok(undefined); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to delete workspace: ${message}`); } } diff --git a/src/node/services/backgroundProcessExecutor.ts b/src/node/services/backgroundProcessExecutor.ts index 9262617087..1d83cf85db 100644 --- a/src/node/services/backgroundProcessExecutor.ts +++ b/src/node/services/backgroundProcessExecutor.ts @@ -27,6 +27,7 @@ import { import { execBuffered, writeFileString } from "@/node/utils/runtime/helpers"; import { NON_INTERACTIVE_ENV_VARS } from "@/common/constants/env"; import { toPosixPath } from "@/node/utils/paths"; +import { getErrorMessage } from "@/common/utils/errors"; /** * Quote a path for shell commands. @@ -47,7 +48,7 @@ const FALLBACK_CWD = process.platform === "win32" ? (process.env.TEMP ?? "C:\\") /** Helper to extract error message for logging */ function errorMsg(error: unknown): string { - return error instanceof Error ? error.message : String(error); + return getErrorMessage(error); } /** Subdirectory under temp for background process output */ diff --git a/src/node/services/backgroundProcessManager.ts b/src/node/services/backgroundProcessManager.ts index ec4a2118e1..fdf4c5556c 100644 --- a/src/node/services/backgroundProcessManager.ts +++ b/src/node/services/backgroundProcessManager.ts @@ -883,7 +883,7 @@ export class BackgroundProcessManager extends EventEmitter {}; @@ -662,9 +663,7 @@ describe("CoderService", () => { } expect(thrown).toBeTruthy(); - expect(thrown instanceof Error ? thrown.message : String(thrown)).toBe( - "coder ssh --wait failed (exit 1): Connection refused" - ); + expect(getErrorMessage(thrown)).toBe("coder ssh --wait failed (exit 1): Connection refused"); }); }); @@ -1013,9 +1012,7 @@ describe("CoderService", () => { } expect(thrown).toBeTruthy(); - expect(thrown instanceof Error ? thrown.message : String(thrown)).toContain( - "coder create failed (exit 42)" - ); + expect(getErrorMessage(thrown)).toContain("coder create failed (exit 42)"); }); it("aborts before spawn when already aborted", async () => { @@ -1037,7 +1034,7 @@ describe("CoderService", () => { } expect(thrown).toBeTruthy(); - expect(thrown instanceof Error ? thrown.message : String(thrown)).toContain("aborted"); + expect(getErrorMessage(thrown)).toContain("aborted"); }); it("throws when required param has no default and is not covered by preset", async () => { @@ -1054,7 +1051,7 @@ describe("CoderService", () => { } expect(thrown).toBeTruthy(); - expect(thrown instanceof Error ? thrown.message : String(thrown)).toContain("required-param"); + expect(getErrorMessage(thrown)).toContain("required-param"); }); }); }); diff --git a/src/node/services/coderService.ts b/src/node/services/coderService.ts index 21adda798e..5743f7e2b1 100644 --- a/src/node/services/coderService.ts +++ b/src/node/services/coderService.ts @@ -20,6 +20,7 @@ import { type CoderWorkspace, type CoderWorkspaceStatus, } from "@/common/orpc/schemas/coder"; +import { getErrorMessage } from "@/common/utils/errors"; // Re-export types for consumers that import from this module @@ -1125,7 +1126,7 @@ export class CoderService { return { kind: "ok", status: parsed.data }; } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); log.debug("Failed to get Coder workspace status", { workspaceName, error: message }); return { kind: "error", error: message }; } @@ -1155,7 +1156,7 @@ export class CoderService { return Ok(undefined); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(message); } } @@ -1184,7 +1185,7 @@ export class CoderService { return Ok(undefined); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(message); } } diff --git a/src/node/services/codexOauthService.ts b/src/node/services/codexOauthService.ts index a0e2fcebc3..98e623e614 100644 --- a/src/node/services/codexOauthService.ts +++ b/src/node/services/codexOauthService.ts @@ -24,6 +24,7 @@ import { parseCodexOauthAuth, type CodexOauthAuth, } from "@/node/utils/codexOauthAuth"; +import { getErrorMessage } from "@/common/utils/errors"; const DEFAULT_DESKTOP_TIMEOUT_MS = 5 * 60 * 1000; const DEFAULT_DEVICE_TIMEOUT_MS = 15 * 60 * 1000; @@ -208,7 +209,7 @@ export class CodexOauthService { server.listen(1455, "localhost", () => resolve()); }); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to start OAuth callback listener: ${message}`); } @@ -346,7 +347,7 @@ export class CodexOauthService { this.pollDeviceFlow(flowId).catch((error) => { // The polling loop is responsible for resolving the flow; if we reach // here something unexpected happened. - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); log.warn(`[Codex OAuth] Device polling crashed (flowId=${flowId}): ${message}`); void this.finishDeviceFlow(flowId, Err(`Device polling crashed: ${message}`)); }); @@ -613,7 +614,7 @@ export class CodexOauthService { accountId, }); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Codex OAuth exchange failed: ${message}`); } } @@ -681,7 +682,7 @@ export class CodexOauthService { return Ok(next); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Codex OAuth refresh failed: ${message}`); } } @@ -732,7 +733,7 @@ export class CodexOauthService { return Ok({ deviceAuthId, userCode, intervalSeconds, expiresAtMs }); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Codex OAuth device auth request failed: ${message}`); } } @@ -840,7 +841,7 @@ export class CodexOauthService { return { kind: "fatal", message: "OAuth flow cancelled" }; } - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return { kind: "fatal", message: `Device authorization failed: ${message}` }; } } diff --git a/src/node/services/compactionHandler.ts b/src/node/services/compactionHandler.ts index 39e3c2dbde..099330e3d7 100644 --- a/src/node/services/compactionHandler.ts +++ b/src/node/services/compactionHandler.ts @@ -29,6 +29,7 @@ import { extractEditedFileDiffs, type FileEditDiff, } from "@/common/utils/messages/extractEditedFiles"; +import { getErrorMessage } from "@/common/utils/errors"; /** * Check if a string is just a raw JSON object, which suggests the model @@ -289,7 +290,7 @@ export class CompactionHandler { } catch (error) { log.warn("Failed to persist post-compaction state", { workspaceId: this.workspaceId, - error: error instanceof Error ? error.message : String(error), + error: getErrorMessage(error), }); } } diff --git a/src/node/services/copilotOauthService.ts b/src/node/services/copilotOauthService.ts index 1b95da5817..b7ae136649 100644 --- a/src/node/services/copilotOauthService.ts +++ b/src/node/services/copilotOauthService.ts @@ -4,6 +4,7 @@ import { Err, Ok } from "@/common/types/result"; import type { ProviderService } from "@/node/services/providerService"; import type { WindowService } from "@/node/services/windowService"; import { log } from "@/node/services/log"; +import { getErrorMessage } from "@/common/utils/errors"; const GITHUB_COPILOT_CLIENT_ID = "Ov23liCVKFN3jOo9R7HS"; const SCOPE = "read:user"; @@ -101,7 +102,7 @@ export class CopilotOauthService { userCode: data.user_code, }); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to start device flow: ${message}`); } } @@ -247,7 +248,7 @@ export class CopilotOauthService { } } catch (error) { if (flow.cancelled) return; - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); log.warn(`Copilot OAuth polling error (will retry): ${message}`); // Transient errors — fall through to sleep, then retry } diff --git a/src/node/services/editorService.ts b/src/node/services/editorService.ts index e73c16ec20..ffb95fe989 100644 --- a/src/node/services/editorService.ts +++ b/src/node/services/editorService.ts @@ -3,6 +3,7 @@ import * as fsPromises from "fs/promises"; import type { Config } from "@/node/config"; import { isDockerRuntime, isSSHRuntime, isDevcontainerRuntime } from "@/common/types/runtime"; import { log } from "@/node/services/log"; +import { getErrorMessage } from "@/common/utils/errors"; /** * Quote a string for safe use in shell commands. @@ -140,7 +141,7 @@ export class EditorService { return { success: true, data: undefined }; } catch (err) { - const message = err instanceof Error ? err.message : String(err); + const message = getErrorMessage(err); log.error(`Failed to open in editor: ${message}`); return { success: false, error: message }; } diff --git a/src/node/services/experimentsService.ts b/src/node/services/experimentsService.ts index 9e52b7c116..15b03352fd 100644 --- a/src/node/services/experimentsService.ts +++ b/src/node/services/experimentsService.ts @@ -8,6 +8,7 @@ import type { TelemetryService } from "@/node/services/telemetryService"; import * as fs from "fs/promises"; import writeFileAtomic from "write-file-atomic"; import * as path from "path"; +import { getErrorMessage } from "@/common/utils/errors"; export type { ExperimentValue }; @@ -209,7 +210,7 @@ export class ExperimentsService { } catch (error) { log.debug("Failed to refresh experiment from PostHog", { experimentId, - error: error instanceof Error ? error.message : String(error), + error: getErrorMessage(error), }); } } diff --git a/src/node/services/gitPatchArtifactService.ts b/src/node/services/gitPatchArtifactService.ts index c55620995e..96560dba79 100644 --- a/src/node/services/gitPatchArtifactService.ts +++ b/src/node/services/gitPatchArtifactService.ts @@ -21,6 +21,7 @@ import { } from "@/node/services/subagentGitPatchArtifacts"; import { shellQuote } from "@/common/utils/shell"; import { streamToString } from "@/node/runtime/streamUtils"; +import { getErrorMessage } from "@/common/utils/errors"; /** Callback invoked after patch generation completes (success or failure). */ export type OnPatchGenerationComplete = (childWorkspaceId: string) => Promise; @@ -221,7 +222,7 @@ export class GitPatchArtifactService { createdAtMs: existing?.createdAtMs ?? failedAtMs, updatedAtMs: failedAtMs, status: "failed", - error: error instanceof Error ? error.message : String(error), + error: getErrorMessage(error), }; }, }); @@ -255,7 +256,7 @@ export class GitPatchArtifactService { createdAtMs: existing?.createdAtMs ?? failedAtMs, updatedAtMs: failedAtMs, status: "failed", - error: error instanceof Error ? error.message : String(error), + error: getErrorMessage(error), }; }, }); @@ -519,7 +520,7 @@ export class GitPatchArtifactService { createdAtMs: existing?.createdAtMs ?? nowMs, updatedAtMs: Date.now(), status: "failed", - error: error instanceof Error ? error.message : String(error), + error: getErrorMessage(error), })); } finally { // Unblock auto-cleanup once the patch generation attempt has finished. diff --git a/src/node/services/historyService.ts b/src/node/services/historyService.ts index 02ff68e7ef..94e002e0dc 100644 --- a/src/node/services/historyService.ts +++ b/src/node/services/historyService.ts @@ -11,6 +11,7 @@ import { getTokenizerForModel } from "@/node/utils/main/tokenizer"; import { KNOWN_MODELS } from "@/common/constants/knownModels"; import { safeStringifyForCounting } from "@/common/utils/tokens/safeStringifyForCounting"; import { normalizeLegacyMuxMetadata } from "@/node/utils/messages/legacy"; +import { getErrorMessage } from "@/common/utils/errors"; /** * HistoryService - Manages chat history persistence and sequence numbering @@ -57,7 +58,7 @@ export class HistoryService { // Skip malformed lines but log error for debugging log.warn( `Skipping malformed JSON at line ${i + 1} in ${workspaceId}/chat.jsonl:`, - parseError instanceof Error ? parseError.message : String(parseError), + getErrorMessage(parseError), "\nLine content:", lines[i].substring(0, 100) + (lines[i].length > 100 ? "..." : "") ); @@ -80,7 +81,7 @@ export class HistoryService { const messages = await this.readChatHistory(workspaceId); return Ok(messages); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to read history: ${message}`); } } @@ -178,7 +179,7 @@ export class HistoryService { await fs.appendFile(historyPath, JSON.stringify(historyEntry) + "\n"); return Ok(undefined); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to append to history: ${message}`); } } @@ -241,7 +242,7 @@ export class HistoryService { await writeFileAtomic(historyPath, historyEntries); return Ok(undefined); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to update history: ${message}`); } }); @@ -290,7 +291,7 @@ export class HistoryService { return Ok(undefined); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to delete message: ${message}`); } }); @@ -338,7 +339,7 @@ export class HistoryService { return Ok(undefined); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to truncate history: ${message}`); } }); @@ -466,7 +467,7 @@ export class HistoryService { return Ok(deletedSequences); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to truncate history: ${message}`); } }); @@ -523,7 +524,7 @@ export class HistoryService { return Ok(undefined); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to migrate workspace ID: ${message}`); } }); diff --git a/src/node/services/hooks.ts b/src/node/services/hooks.ts index b17910ff10..c3cc13154b 100644 --- a/src/node/services/hooks.ts +++ b/src/node/services/hooks.ts @@ -31,6 +31,7 @@ import { flattenToolHookValueToEnv } from "@/common/utils/tools/toolHookEnv"; import type { Runtime } from "@/node/runtime/Runtime"; import { log } from "@/node/services/log"; import { execBuffered, writeFileString } from "@/node/utils/runtime/helpers"; +import { getErrorMessage } from "@/common/utils/errors"; const HOOK_FILENAME = "tool_hook"; const PRE_HOOK_FILENAME = "tool_pre"; @@ -380,7 +381,7 @@ export async function runWithHook( success: false, stdoutBeforeExec: "", stdout: "", - stderr: `Failed to execute hook: ${err instanceof Error ? err.message : String(err)}`, + stderr: `Failed to execute hook: ${getErrorMessage(err)}`, exitCode: -1, toolExecuted: false, }, @@ -683,7 +684,7 @@ export async function runPreHook( log.error("[hooks] Pre-hook execution failed", { hookPath, error: err }); return { allowed: false, - output: `Pre-hook failed: ${err instanceof Error ? err.message : String(err)}`, + output: `Pre-hook failed: ${getErrorMessage(err)}`, exitCode: -1, }; } finally { @@ -796,7 +797,7 @@ export async function runPostHook( log.error("[hooks] Post-hook execution failed", { hookPath, error: err }); return { success: false, - output: `Post-hook failed: ${err instanceof Error ? err.message : String(err)}`, + output: `Post-hook failed: ${getErrorMessage(err)}`, exitCode: -1, }; } finally { diff --git a/src/node/services/initStateManager.ts b/src/node/services/initStateManager.ts index 65110177ae..dd3ef8b883 100644 --- a/src/node/services/initStateManager.ts +++ b/src/node/services/initStateManager.ts @@ -4,6 +4,7 @@ import { EventStore } from "@/node/utils/eventStore"; import type { WorkspaceInitEvent } from "@/common/orpc/types"; import { log } from "@/node/services/log"; import { INIT_HOOK_MAX_LINES } from "@/common/constants/toolLimits"; +import { getErrorMessage } from "@/common/utils/errors"; /** * Output line with timestamp for replay timing. @@ -493,7 +494,7 @@ export class InitStateManager extends EventEmitter { } catch (error) { // Init promise was rejected (e.g., workspace deleted) // Log and proceed anyway - let the tool fail with its own error if needed - const errorMsg = error instanceof Error ? error.message : String(error); + const errorMsg = getErrorMessage(error); log.error(`Init wait interrupted for ${workspaceId}: ${errorMsg} - proceeding anyway`); } finally { // Clean up timeout to prevent spurious error logs diff --git a/src/node/services/mcpConfigService.ts b/src/node/services/mcpConfigService.ts index 2e9cd47722..872e85f67d 100644 --- a/src/node/services/mcpConfigService.ts +++ b/src/node/services/mcpConfigService.ts @@ -13,6 +13,7 @@ import type { Result } from "@/common/types/result"; import assert from "@/common/utils/assert"; import type { Config } from "@/node/config"; import { log } from "@/node/services/log"; +import { getErrorMessage } from "@/common/utils/errors"; export class MCPConfigService { private readonly config: Config; @@ -288,7 +289,7 @@ export class MCPConfigService { return Ok(undefined); } catch (error) { log.error("Failed to save MCP server", { name, error }); - return Err(error instanceof Error ? error.message : String(error)); + return Err(getErrorMessage(error)); } } @@ -304,7 +305,7 @@ export class MCPConfigService { return Ok(undefined); } catch (error) { log.error("Failed to update MCP server enabled state", { name, error }); - return Err(error instanceof Error ? error.message : String(error)); + return Err(getErrorMessage(error)); } } @@ -319,7 +320,7 @@ export class MCPConfigService { return Ok(undefined); } catch (error) { log.error("Failed to remove MCP server", { name, error }); - return Err(error instanceof Error ? error.message : String(error)); + return Err(getErrorMessage(error)); } } @@ -341,7 +342,7 @@ export class MCPConfigService { return Ok(undefined); } catch (error) { log.error("Failed to update MCP server tool allowlist", { name, error }); - return Err(error instanceof Error ? error.message : String(error)); + return Err(getErrorMessage(error)); } } } diff --git a/src/node/services/mcpOauthService.ts b/src/node/services/mcpOauthService.ts index e7105f23a3..c8e656b6be 100644 --- a/src/node/services/mcpOauthService.ts +++ b/src/node/services/mcpOauthService.ts @@ -21,6 +21,7 @@ import type { } from "@/common/types/mcpOauth"; import { stripTrailingSlashes } from "@/node/utils/pathUtils"; import { MutexMap } from "@/node/utils/concurrency/mutexMap"; +import { getErrorMessage } from "@/common/utils/errors"; const DEFAULT_DESKTOP_TIMEOUT_MS = 5 * 60 * 1000; const DEFAULT_SERVER_TIMEOUT_MS = 10 * 60 * 1000; @@ -495,7 +496,7 @@ export class McpOauthService { return Ok(undefined); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(message); } } @@ -665,7 +666,7 @@ export class McpOauthService { serverListener.listen(0, "127.0.0.1", () => resolve()); }); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to start OAuth callback listener: ${message}`); } @@ -741,7 +742,7 @@ export class McpOauthService { return Ok({ flowId, authorizeUrl: flow.authorizeUrl, redirectUri }); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); await this.finishDesktopFlow(flowId, Err(message)); return Err(message); } @@ -879,7 +880,7 @@ export class McpOauthService { return Ok({ flowId, authorizeUrl: flow.authorizeUrl, redirectUri: flow.redirectUri }); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); await this.finishServerFlow(flowId, Err(message)); return Err(message); } @@ -1192,7 +1193,7 @@ export class McpOauthService { return Ok(undefined); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(message); } } diff --git a/src/node/services/mcpServerManager.ts b/src/node/services/mcpServerManager.ts index 622dbd4c0c..8cce6929fb 100644 --- a/src/node/services/mcpServerManager.ts +++ b/src/node/services/mcpServerManager.ts @@ -18,6 +18,7 @@ import { parseBearerWwwAuthenticate, type McpOauthService } from "@/node/service import { createRuntime } from "@/node/runtime/runtimeFactory"; import { transformMCPResult, type MCPCallToolResult } from "@/node/services/mcpResultTransform"; import { buildMcpToolName } from "@/common/utils/tools/mcpToolName"; +import { getErrorMessage } from "@/common/utils/errors"; const TEST_TIMEOUT_MS = 10_000; const IDLE_TIMEOUT_MS = 10 * 60 * 1000; // 10 minutes @@ -373,7 +374,7 @@ async function runServerTest( log.info(`[MCP] ${logContext} test successful`, { toolCount: toolNames.length }); return { success: true, tools: toolNames }; } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); log.warn(`[MCP] ${logContext} test failed`, { error: message }); if (client) { @@ -1006,7 +1007,7 @@ export class MCPServerManager { `server "${trimmedName}"` ); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return { success: false, error: message }; } } @@ -1049,7 +1050,7 @@ export class MCPServerManager { trimmedName ? `server "${trimmedName}" (url)` : "url" ); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return { success: false, error: message }; } } @@ -1161,7 +1162,7 @@ export class MCPServerManager { result.set(name, instance); } } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); log.error("Failed to start MCP server", { name, error: message }); } } diff --git a/src/node/services/mock/mockAiStreamPlayer.ts b/src/node/services/mock/mockAiStreamPlayer.ts index bee1462940..d9ee41369e 100644 --- a/src/node/services/mock/mockAiStreamPlayer.ts +++ b/src/node/services/mock/mockAiStreamPlayer.ts @@ -25,6 +25,7 @@ import type { ToolCallStartEvent, ToolCallEndEvent } from "@/common/types/stream import type { ReasoningDeltaEvent } from "@/common/types/stream"; import { getTokenizerForModel } from "@/node/utils/main/tokenizer"; import { KNOWN_MODELS } from "@/common/constants/knownModels"; +import { getErrorMessage } from "@/common/utils/errors"; const MOCK_TOKENIZER_MODEL = KNOWN_MODELS.GPT.id; const TOKENIZE_TIMEOUT_MS = 150; @@ -71,7 +72,7 @@ async function tokenizeWithMockModel(text: string, context: string): Promise resolve()); }); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to start OAuth callback listener: ${message}`); } @@ -402,7 +403,7 @@ export class MuxGatewayOauthService { return Ok(token); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Mux Gateway exchange failed: ${message}`); } } diff --git a/src/node/services/muxGovernorOauthService.ts b/src/node/services/muxGovernorOauthService.ts index 95282cd47b..2ff72342f6 100644 --- a/src/node/services/muxGovernorOauthService.ts +++ b/src/node/services/muxGovernorOauthService.ts @@ -20,6 +20,7 @@ import type { Config } from "@/node/config"; import type { PolicyService } from "@/node/services/policyService"; import type { WindowService } from "@/node/services/windowService"; import { log } from "@/node/services/log"; +import { getErrorMessage } from "@/common/utils/errors"; const DEFAULT_DESKTOP_TIMEOUT_MS = 5 * 60 * 1000; const DEFAULT_SERVER_TIMEOUT_MS = 10 * 60 * 1000; @@ -76,7 +77,7 @@ export class MuxGovernorOauthService { try { governorOrigin = normalizeGovernorUrl(input.governorOrigin); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Invalid Governor URL: ${message}`); } @@ -123,7 +124,7 @@ export class MuxGovernorOauthService { server.listen(0, "127.0.0.1", () => resolve()); }); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to start OAuth callback listener: ${message}`); } @@ -212,7 +213,7 @@ export class MuxGovernorOauthService { try { governorOrigin = normalizeGovernorUrl(input.governorOrigin); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Invalid Governor URL: ${message}`); } @@ -397,7 +398,7 @@ export class MuxGovernorOauthService { muxGovernorToken: tokenResult.data, })); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to save Governor credentials: ${message}`); } @@ -443,7 +444,7 @@ export class MuxGovernorOauthService { return Ok(token); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Mux Governor exchange failed: ${message}`); } } diff --git a/src/node/services/partialService.ts b/src/node/services/partialService.ts index 4e595364e4..43d77d02de 100644 --- a/src/node/services/partialService.ts +++ b/src/node/services/partialService.ts @@ -9,6 +9,7 @@ import type { HistoryService } from "./historyService"; import { workspaceFileLocks } from "@/node/utils/concurrency/workspaceFileLocks"; import { normalizeLegacyMuxMetadata } from "@/node/utils/messages/legacy"; import { log } from "@/node/services/log"; +import { getErrorMessage } from "@/common/utils/errors"; /** * PartialService - Manages partial message persistence for interrupted streams @@ -87,7 +88,7 @@ export class PartialService { await writeFileAtomic(partialPath, JSON.stringify(partialMessage, null, 2)); return Ok(undefined); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to write partial: ${message}`); } }); @@ -106,7 +107,7 @@ export class PartialService { if (error && typeof error === "object" && "code" in error && error.code === "ENOENT") { return Ok(undefined); // Already deleted } - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to delete partial: ${message}`); } }); @@ -205,7 +206,7 @@ export class PartialService { // Delete partial.json after successful commit (or if already finalized) return await this.deletePartial(workspaceId); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to commit partial: ${message}`); } } diff --git a/src/node/services/policyService.ts b/src/node/services/policyService.ts index cde063da90..b0d0e67d2d 100644 --- a/src/node/services/policyService.ts +++ b/src/node/services/policyService.ts @@ -16,6 +16,7 @@ import type { MCPServerTransport } from "@/common/types/mcp"; import { compareVersions } from "@/node/services/coderService"; import packageJson from "../../../package.json"; +import { getErrorMessage } from "@/common/utils/errors"; const POLICY_FETCH_TIMEOUT_MS = 10 * 1000; const POLICY_MAX_BYTES = 1024 * 1024; @@ -89,7 +90,7 @@ async function loadPolicyText(source: string): Promise { try { return await readFile(source, "utf8"); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); throw new Error(`Failed to read policy file: ${message}`); } } @@ -117,7 +118,7 @@ async function loadPolicyText(source: string): Promise { return text; } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); throw new Error(`Failed to fetch policy URL (${formatPolicySourceForLog(source)}): ${message}`); } finally { clearTimeout(timeout); @@ -154,7 +155,7 @@ async function loadGovernorPolicyText(input: { return text; } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); throw new Error( `Failed to fetch Governor policy (${formatPolicySourceForLog(policyUrl)}): ${message}` ); @@ -459,7 +460,7 @@ export class PolicyService { this.updateState({ source: schemaSource, status: { state: "enforced" }, policy: effective }); return Ok(undefined); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); // Fail closed on startup, or if there's no existing enforced policy (e.g., first fetch // after enrollment). This ensures enrollment can't silently bypass policy on a bad first fetch. diff --git a/src/node/services/projectService.ts b/src/node/services/projectService.ts index b9f7eea30d..7d3347e766 100644 --- a/src/node/services/projectService.ts +++ b/src/node/services/projectService.ts @@ -24,6 +24,7 @@ import type { FileTreeNode } from "@/common/utils/git/numstatParser"; import * as path from "path"; import { getMuxProjectsDir } from "@/common/constants/paths"; import { expandTilde } from "@/node/runtime/tildeExpansion"; +import { getErrorMessage } from "@/common/utils/errors"; /** * List directory contents for the DirectoryPickerModal. @@ -146,7 +147,7 @@ export class ProjectService { return Ok({ projectConfig, normalizedPath }); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to create project: ${message}`); } } @@ -177,7 +178,7 @@ export class ProjectService { return Ok(undefined); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to remove project: ${message}`); } } @@ -269,7 +270,7 @@ export class ProjectService { return Ok(undefined); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); log.error("Failed to initialize git repository:", error); return Err(`Failed to initialize git repository: ${message}`); } @@ -326,7 +327,7 @@ export class ProjectService { } catch (error) { log.debug("getFileCompletions: failed to list files", { projectPath: normalizedPath, - error: error instanceof Error ? error.message : String(error), + error: getErrorMessage(error), }); } finally { cacheEntry.fetchedAt = Date.now(); @@ -358,7 +359,7 @@ export class ProjectService { } catch (error) { return { success: false as const, - error: error instanceof Error ? error.message : String(error), + error: getErrorMessage(error), }; } } @@ -377,7 +378,7 @@ export class ProjectService { await fsPromises.mkdir(normalizedPath, { recursive: true }); return Ok({ normalizedPath }); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to create directory: ${message}`); } } @@ -387,7 +388,7 @@ export class ProjectService { await this.config.updateProjectSecrets(projectPath, secrets); return Ok(undefined); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to update project secrets: ${message}`); } } @@ -424,7 +425,7 @@ export class ProjectService { await this.config.saveConfig(config); return Ok(undefined); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to set idle compaction hours: ${message}`); } } @@ -484,7 +485,7 @@ export class ProjectService { await this.config.saveConfig(config); return Ok(section); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to create section: ${message}`); } } @@ -519,7 +520,7 @@ export class ProjectService { await this.config.saveConfig(config); return Ok(undefined); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to update section: ${message}`); } } @@ -567,7 +568,7 @@ export class ProjectService { await this.config.saveConfig(config); return Ok(undefined); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to remove section: ${message}`); } } @@ -603,7 +604,7 @@ export class ProjectService { await this.config.saveConfig(config); return Ok(undefined); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to reorder sections: ${message}`); } } @@ -642,7 +643,7 @@ export class ProjectService { await this.config.saveConfig(config); return Ok(undefined); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to assign workspace to section: ${message}`); } } diff --git a/src/node/services/providerModelFactory.ts b/src/node/services/providerModelFactory.ts index 4d0c001b0f..38c5885357 100644 --- a/src/node/services/providerModelFactory.ts +++ b/src/node/services/providerModelFactory.ts @@ -30,6 +30,7 @@ import { normalizeGatewayGenerateResult, } from "@/node/utils/gatewayStreamNormalization"; import { EnvHttpProxyAgent, type Dispatcher } from "undici"; +import { getErrorMessage } from "@/common/utils/errors"; // --------------------------------------------------------------------------- // Undici agent with unlimited timeouts for AI streaming requests. @@ -1127,7 +1128,7 @@ export class ProviderModelFactory { provider: providerName, }); } catch (error) { - const errorMessage = error instanceof Error ? error.message : String(error); + const errorMessage = getErrorMessage(error); return Err({ type: "unknown", raw: `Failed to create model: ${errorMessage}` }); } } diff --git a/src/node/services/providerService.ts b/src/node/services/providerService.ts index 33538f35af..8169d3f9b1 100644 --- a/src/node/services/providerService.ts +++ b/src/node/services/providerService.ts @@ -11,6 +11,7 @@ import { log } from "@/node/services/log"; import { checkProviderConfigured } from "@/node/utils/providerRequirements"; import { parseCodexOauthAuth } from "@/node/utils/codexOauthAuth"; import type { PolicyService } from "@/node/services/policyService"; +import { getErrorMessage } from "@/common/utils/errors"; // Re-export types for backward compatibility export type { AWSCredentialStatus, ProviderConfigInfo, ProvidersConfigMap }; @@ -195,7 +196,7 @@ export class ProviderService { return { success: true, data: undefined }; } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return { success: false, error: `Failed to set models: ${message}` }; } } @@ -253,7 +254,7 @@ export class ProviderService { return { success: true, data: undefined }; } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return { success: false, error: `Failed to set provider config: ${message}` }; } } @@ -327,7 +328,7 @@ export class ProviderService { return { success: true, data: undefined }; } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return { success: false, error: `Failed to set provider config: ${message}` }; } } diff --git a/src/node/services/ptc/quickjsRuntime.ts b/src/node/services/ptc/quickjsRuntime.ts index 0e5a981fa2..e44e8ae14c 100644 --- a/src/node/services/ptc/quickjsRuntime.ts +++ b/src/node/services/ptc/quickjsRuntime.ts @@ -15,6 +15,7 @@ import crypto from "crypto"; import type { IJSRuntime, IJSRuntimeFactory, RuntimeLimits } from "./runtime"; import type { PTCEvent, PTCExecutionResult, PTCToolCallRecord, PTCConsoleRecord } from "./types"; import { UNAVAILABLE_IDENTIFIERS } from "./staticAnalysis"; +import { getErrorMessage } from "@/common/utils/errors"; // Default limits const DEFAULT_MEMORY_BYTES = 64 * 1024 * 1024; // 64MB @@ -126,7 +127,7 @@ export class QuickJSRuntime implements IJSRuntime { } catch (error) { const endTime = Date.now(); const duration_ms = endTime - startTime; - const errorStr = error instanceof Error ? error.message : String(error); + const errorStr = getErrorMessage(error); // Record failed tool call this.toolCalls.push({ @@ -208,7 +209,7 @@ export class QuickJSRuntime implements IJSRuntime { } catch (error) { const endTime = Date.now(); const duration_ms = endTime - startTime; - const errorStr = error instanceof Error ? error.message : String(error); + const errorStr = getErrorMessage(error); this.toolCalls.push({ toolName: methodName, diff --git a/src/node/services/signingService.ts b/src/node/services/signingService.ts index 93886d0ef3..59192a2812 100644 --- a/src/node/services/signingService.ts +++ b/src/node/services/signingService.ts @@ -25,6 +25,7 @@ import { OpenSSHAgent, type KnownPublicKeys, type ParsedKey, type PublicKeyEntry import { getMuxHome } from "@/common/constants/paths"; import { execAsync } from "@/node/utils/disposableExec"; import { log } from "@/node/services/log"; +import { getErrorMessage } from "@/common/utils/errors"; interface KeyPair { privateKey: sshpk.PrivateKey; @@ -208,7 +209,7 @@ export class SigningService { log.info("[SigningService] Public key:", publicKeyOpenSSH.slice(0, 50) + "..."); return keyPair; } catch (err) { - const message = err instanceof Error ? err.message : String(err); + const message = getErrorMessage(err); // Check for encrypted key if (message.includes("encrypted") || message.includes("passphrase")) { log.info( @@ -265,7 +266,7 @@ export class SigningService { muxKeyType: parsed.type, }); } catch (err) { - const message = err instanceof Error ? err.message : String(err); + const message = getErrorMessage(err); log.debug("[SigningService] Skipping unsupported SSH agent key:", message); } } @@ -331,7 +332,7 @@ export class SigningService { try { candidates = await this.listSshAgentKeyCandidates(sshAuthSock); } catch (err) { - const message = err instanceof Error ? err.message : String(err); + const message = getErrorMessage(err); log.info("[SigningService] Failed to query SSH agent:", message); if (hasOverride) { return { @@ -426,7 +427,7 @@ export class SigningService { this.signingKey = loaded; return loaded; } catch (err) { - const message = err instanceof Error ? err.message : String(err); + const message = getErrorMessage(err); log.warn("[SigningService] Unexpected key load error:", message); this.signingKey = null; this.keyLoadError = "Failed to load signing key"; @@ -543,7 +544,7 @@ export class SigningService { error = "Not logged in to GitHub CLI (run: gh auth login)"; } } catch (err) { - const message = err instanceof Error ? err.message : String(err); + const message = getErrorMessage(err); if (message.includes("command not found") || message.includes("ENOENT")) { log.info("[SigningService] gh CLI not installed"); error = "GitHub CLI not installed (brew install gh)"; @@ -623,7 +624,7 @@ export class SigningService { // eslint-disable-next-line no-restricted-syntax -- not circular-dep hiding; startup resilience const sshAgentModule = await import("@coder/mux-md-client/ssh-agent").catch((err: unknown) => { - const message = err instanceof Error ? err.message : String(err); + const message = getErrorMessage(err); log.error("[SigningService] Failed to load ssh-agent signing module:", message); throw new Error( "SSH agent signing is unavailable — the @coder/mux-md-client/ssh-agent module failed to load. " + diff --git a/src/node/services/streamContextBuilder.ts b/src/node/services/streamContextBuilder.ts index 4f212c5b28..7f6528b70c 100644 --- a/src/node/services/streamContextBuilder.ts +++ b/src/node/services/streamContextBuilder.ts @@ -37,6 +37,7 @@ import { discoverAgentSkills } from "@/node/services/agentSkills/agentSkillsServ import { buildSystemMessage } from "./systemMessage"; import { getTokenizerForModel } from "@/node/utils/main/tokenizer"; import { log } from "./log"; +import { getErrorMessage } from "@/common/utils/errors"; // --------------------------------------------------------------------------- // Plan & Instructions Assembly @@ -179,7 +180,7 @@ export async function buildPlanInstructions( } catch (error) { workspaceLog.warn("Failed to resolve last agent definition for plan handoff", { lastAgentId, - error: error instanceof Error ? error.message : String(error), + error: getErrorMessage(error), }); } } @@ -277,7 +278,7 @@ export async function buildStreamSystemContext( } catch (error: unknown) { workspaceLog.debug("Failed to resolve agent frontmatter for subagent append_prompt", { agentId: agentDefinition.id, - error: error instanceof Error ? error.message : String(error), + error: getErrorMessage(error), }); } } diff --git a/src/node/services/streamManager.ts b/src/node/services/streamManager.ts index 525c7f7f6b..73e3e732e3 100644 --- a/src/node/services/streamManager.ts +++ b/src/node/services/streamManager.ts @@ -54,6 +54,7 @@ import { extractToolMediaAsUserMessagesFromModelMessages } from "@/node/utils/me import { normalizeGatewayModel } from "@/common/utils/ai/models"; import { MUX_GATEWAY_SESSION_EXPIRED_MESSAGE } from "@/common/constants/muxGatewayOAuth"; import { getModelStats } from "@/common/utils/tokens/modelStats"; +import { getErrorMessage } from "@/common/utils/errors"; // Disable AI SDK warning logging (e.g., "setting `toolChoice` to `none` is not supported") globalThis.AI_SDK_LOG_WARNINGS = false; @@ -426,7 +427,7 @@ export class StreamManager extends EventEmitter { try { await runtime.ensureDir(resolvedPath); } catch (err) { - const msg = err instanceof Error ? err.message : String(err); + const msg = getErrorMessage(err); throw new Error(`Failed to create temp directory ${resolvedPath}: ${msg}`); } @@ -1810,9 +1811,7 @@ export class StreamManager extends EventEmitter { ): StreamErrorPayload & { errorType: StreamErrorType } { // Extract error message (errors thrown from 'error' parts already have the correct message) // Apply prefix stripping to remove noisy "undefined: " prefixes from provider errors - let errorMessage: string = stripNoisyErrorPrefix( - error instanceof Error ? error.message : String(error) - ); + let errorMessage: string = stripNoisyErrorPrefix(getErrorMessage(error)); let actualError: unknown = error; // For categorization, use the cause if available (preserves the original error structure) @@ -2128,7 +2127,7 @@ export class StreamManager extends EventEmitter { // } // Fallback for unknown errors - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return { type: "unknown", raw: message }; } @@ -2599,7 +2598,7 @@ export class StreamManager extends EventEmitter { } return Ok(undefined); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to stop stream: ${message}`); } } diff --git a/src/node/services/system1ToolWrapper.ts b/src/node/services/system1ToolWrapper.ts index d4c477e62c..9142382b41 100644 --- a/src/node/services/system1ToolWrapper.ts +++ b/src/node/services/system1ToolWrapper.ts @@ -34,6 +34,7 @@ import type { Result } from "@/common/types/result"; import type { SendMessageError } from "@/common/types/errors"; import { cloneToolPreservingDescriptors } from "@/common/utils/tools/cloneToolPreservingDescriptors"; import { log } from "./log"; +import { getErrorMessage } from "@/common/utils/errors"; // --------------------------------------------------------------------------- // Public interface @@ -333,7 +334,7 @@ async function maybeFilterBashOutput( } catch (error) { log.debug("[system1] Failed to save full bash output to temp file", { workspaceId: opts.workspaceId, - error: error instanceof Error ? error.message : String(error), + error: getErrorMessage(error), }); fullOutputPath = undefined; } @@ -402,7 +403,7 @@ async function maybeFilterBashOutput( } } catch (error) { lastErrorName = error instanceof Error ? error.name : undefined; - lastErrorMessage = error instanceof Error ? error.message : String(error); + lastErrorMessage = getErrorMessage(error); } if (!applied || applied.keptLines === 0) { @@ -480,7 +481,7 @@ async function maybeFilterBashOutput( return { filteredOutput: applied.filteredOutput, notice }; } catch (error) { - const errorMessage = error instanceof Error ? error.message : String(error); + const errorMessage = getErrorMessage(error); const errorName = error instanceof Error ? error.name : undefined; const upstreamAborted = filterParams.abortSignal?.aborted ?? false; const isAbortError = errorName === "AbortError"; @@ -573,7 +574,7 @@ function wrapBashTool( } catch (error) { log.debug("[system1] Failed to filter bash tool output", { workspaceId, - error: error instanceof Error ? error.message : String(error), + error: getErrorMessage(error), }); return result; } @@ -608,7 +609,7 @@ function wrapBashOutputTool( } catch (error) { log.debug("[system1] Failed to filter bash_output tool output", { workspaceId, - error: error instanceof Error ? error.message : String(error), + error: getErrorMessage(error), }); return result; } @@ -696,7 +697,7 @@ function wrapTaskAwaitTool( } catch (error) { log.debug("[system1] Failed to filter task_await tool output", { workspaceId, - error: error instanceof Error ? error.message : String(error), + error: getErrorMessage(error), }); return result; } diff --git a/src/node/services/taskService.ts b/src/node/services/taskService.ts index 63dab737e6..b2476296ff 100644 --- a/src/node/services/taskService.ts +++ b/src/node/services/taskService.ts @@ -57,6 +57,7 @@ import { upsertSubagentReportArtifact, } from "@/node/services/subagentReportArtifacts"; import { secretsToRecord } from "@/common/types/secrets"; +import { getErrorMessage } from "@/common/utils/errors"; export type TaskKind = "agent"; @@ -950,7 +951,7 @@ export class TaskService { } catch (error: unknown) { log.error("Task.create rollback: failed to remove workspace from config", { taskId, - error: error instanceof Error ? error.message : String(error), + error: getErrorMessage(error), }); } @@ -967,7 +968,7 @@ export class TaskService { } catch (error: unknown) { log.error("Task.create rollback: runtime.deleteWorkspace threw", { taskId, - error: error instanceof Error ? error.message : String(error), + error: getErrorMessage(error), }); } @@ -977,7 +978,7 @@ export class TaskService { } catch (error: unknown) { log.error("Task.create rollback: failed to remove session directory", { taskId, - error: error instanceof Error ? error.message : String(error), + error: getErrorMessage(error), }); } } @@ -1886,7 +1887,7 @@ export class TaskService { log.debug("Queued task: failed to read agent definition for skip_init_hook", { taskId, agentId: parsedAgentId.data, - error: error instanceof Error ? error.message : String(error), + error: getErrorMessage(error), }); } } diff --git a/src/node/services/terminalService.ts b/src/node/services/terminalService.ts index f4d7f8a4fe..b49ab15140 100644 --- a/src/node/services/terminalService.ts +++ b/src/node/services/terminalService.ts @@ -15,6 +15,7 @@ import { log } from "@/node/services/log"; import { isCommandAvailable, findAvailableCommand } from "@/node/utils/commandDiscovery"; import { Terminal } from "@xterm/headless"; import { SerializeAddon } from "@xterm/addon-serialize"; +import { getErrorMessage } from "@/common/utils/errors"; /** * Configuration for opening a native terminal @@ -322,7 +323,7 @@ export class TerminalService { }); } } catch (err) { - const message = err instanceof Error ? err.message : String(err); + const message = getErrorMessage(err); log.error(`Failed to open native terminal: ${message}`); throw err; } diff --git a/src/node/services/tools/agent_skill_read.ts b/src/node/services/tools/agent_skill_read.ts index 909babc3a5..4dbe2f8d22 100644 --- a/src/node/services/tools/agent_skill_read.ts +++ b/src/node/services/tools/agent_skill_read.ts @@ -8,9 +8,10 @@ import type { ToolConfiguration, ToolFactory } from "@/common/utils/tools/tools" import { TOOL_DEFINITIONS } from "@/common/utils/tools/toolDefinitions"; import { SkillNameSchema } from "@/common/orpc/schemas"; import { readAgentSkill } from "@/node/services/agentSkills/agentSkillsService"; +import { getErrorMessage } from "@/common/utils/errors"; function formatError(error: unknown): string { - return error instanceof Error ? error.message : String(error); + return getErrorMessage(error); } /** diff --git a/src/node/services/tools/agent_skill_read_file.ts b/src/node/services/tools/agent_skill_read_file.ts index 8c81a93cc1..cfab56c17d 100644 --- a/src/node/services/tools/agent_skill_read_file.ts +++ b/src/node/services/tools/agent_skill_read_file.ts @@ -15,6 +15,7 @@ import { MAX_FILE_SIZE, validateFileSize } from "@/node/services/tools/fileCommo import { readBuiltInSkillFile } from "@/node/services/agentSkills/builtInSkillDefinitions"; import { RuntimeError } from "@/node/runtime/Runtime"; import { readFileString } from "@/node/utils/runtime/helpers"; +import { getErrorMessage } from "@/common/utils/errors"; function readContentWithFileReadLimits(input: { fullContent: string; @@ -92,7 +93,7 @@ function readContentWithFileReadLimits(input: { }; } function formatError(error: unknown): string { - return error instanceof Error ? error.message : String(error); + return getErrorMessage(error); } /** diff --git a/src/node/services/tools/bash.ts b/src/node/services/tools/bash.ts index c32025a92b..b51b70c5ad 100644 --- a/src/node/services/tools/bash.ts +++ b/src/node/services/tools/bash.ts @@ -22,6 +22,7 @@ import { toBashTaskId } from "./taskId"; import { migrateToBackground } from "@/node/services/backgroundProcessExecutor"; import { LocalBaseRuntime } from "@/node/runtime/LocalBaseRuntime"; import { getToolEnvPath } from "@/node/services/hooks"; +import { getErrorMessage } from "@/common/utils/errors"; const CAT_FILE_READ_NOTICE = "[IMPORTANT]\n\nDO NOT use `cat`, `rg`, or `grep` to read files. Use the `file_read` tool instead (supports offset/limit paging). Bash output may be truncated or auto-filtered, which can hide parts of the file."; @@ -1379,7 +1380,7 @@ ${scriptWithEnv}`; } return withNotice({ success: false, - error: `Failed to execute command: ${err instanceof Error ? err.message : String(err)}`, + error: `Failed to execute command: ${getErrorMessage(err)}`, exitCode: -1, wall_duration_ms: Math.round(performance.now() - startTime), }); diff --git a/src/node/services/tools/file_edit_insert.ts b/src/node/services/tools/file_edit_insert.ts index 292a4babcb..4b6c063777 100644 --- a/src/node/services/tools/file_edit_insert.ts +++ b/src/node/services/tools/file_edit_insert.ts @@ -13,6 +13,7 @@ import { convertNewlines, detectFileEol } from "./eol"; import { fileExists } from "@/node/utils/runtime/fileExists"; import { writeFileString } from "@/node/utils/runtime/helpers"; import { RuntimeError } from "@/node/runtime/Runtime"; +import { getErrorMessage } from "@/common/utils/errors"; const READ_AND_RETRY_NOTE = `${EDIT_FAILED_NOTE_PREFIX} ${NOTE_READ_FILE_RETRY}`; @@ -127,7 +128,7 @@ export const createFileEditInsertTool: ToolFactory = (config: ToolConfiguration) }; } - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return { success: false, error: `Failed to insert content: ${message}`, diff --git a/src/node/services/tools/file_edit_operation.ts b/src/node/services/tools/file_edit_operation.ts index 5142487b22..50a334c029 100644 --- a/src/node/services/tools/file_edit_operation.ts +++ b/src/node/services/tools/file_edit_operation.ts @@ -12,6 +12,7 @@ import { } from "./fileCommon"; import { RuntimeError } from "@/node/runtime/Runtime"; import { readFileString, writeFileString } from "@/node/utils/runtime/helpers"; +import { getErrorMessage } from "@/common/utils/errors"; type FileEditOperationResult = | { @@ -174,7 +175,7 @@ export async function executeFileEditOperation({ } } - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return { success: false, error: `Failed to edit file: ${message}`, diff --git a/src/node/services/tools/file_read.ts b/src/node/services/tools/file_read.ts index 463ddabac5..420b72b086 100644 --- a/src/node/services/tools/file_read.ts +++ b/src/node/services/tools/file_read.ts @@ -5,6 +5,7 @@ import { TOOL_DEFINITIONS } from "@/common/utils/tools/toolDefinitions"; import { validateFileSize, validateAndCorrectPath } from "./fileCommon"; import { RuntimeError } from "@/node/runtime/Runtime"; import { readFileString } from "@/node/utils/runtime/helpers"; +import { getErrorMessage } from "@/common/utils/errors"; /** * File read tool factory for AI assistant @@ -178,7 +179,7 @@ export const createFileReadTool: ToolFactory = (config: ToolConfiguration) => { } // Generic error - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return { success: false, error: `Failed to read file: ${message}`, diff --git a/src/node/services/tools/mux_global_agents_read.ts b/src/node/services/tools/mux_global_agents_read.ts index 9c110236f4..8a04678ad1 100644 --- a/src/node/services/tools/mux_global_agents_read.ts +++ b/src/node/services/tools/mux_global_agents_read.ts @@ -5,6 +5,7 @@ import { tool } from "ai"; import type { ToolConfiguration, ToolFactory } from "@/common/utils/tools/tools"; import { TOOL_DEFINITIONS } from "@/common/utils/tools/toolDefinitions"; import { MUX_HELP_CHAT_WORKSPACE_ID } from "@/common/constants/muxChat"; +import { getErrorMessage } from "@/common/utils/errors"; function getMuxHomeFromWorkspaceSessionDir(config: ToolConfiguration): string { if (!config.workspaceSessionDir) { @@ -68,7 +69,7 @@ export const createMuxGlobalAgentsReadTool: ToolFactory = (config: ToolConfigura throw error; } } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return { success: false, error: `Failed to read global AGENTS.md: ${message}`, diff --git a/src/node/services/tools/mux_global_agents_write.ts b/src/node/services/tools/mux_global_agents_write.ts index a8eebd1943..f05bda0862 100644 --- a/src/node/services/tools/mux_global_agents_write.ts +++ b/src/node/services/tools/mux_global_agents_write.ts @@ -7,6 +7,7 @@ import { TOOL_DEFINITIONS } from "@/common/utils/tools/toolDefinitions"; import { MUX_HELP_CHAT_WORKSPACE_ID } from "@/common/constants/muxChat"; import { FILE_EDIT_DIFF_OMITTED_MESSAGE } from "@/common/types/tools"; import { generateDiff } from "./fileCommon"; +import { getErrorMessage } from "@/common/utils/errors"; function getMuxHomeFromWorkspaceSessionDir(config: ToolConfiguration): string { if (!config.workspaceSessionDir) { @@ -113,7 +114,7 @@ export const createMuxGlobalAgentsWriteTool: ToolFactory = (config: ToolConfigur }, }; } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return { success: false, error: `Failed to write global AGENTS.md: ${message}`, diff --git a/src/node/services/tools/notify.ts b/src/node/services/tools/notify.ts index 222421c614..4fa7603ce8 100644 --- a/src/node/services/tools/notify.ts +++ b/src/node/services/tools/notify.ts @@ -14,6 +14,7 @@ import { tool } from "ai"; import type { ToolFactory } from "@/common/utils/tools/tools"; import { TOOL_DEFINITIONS } from "@/common/utils/tools/toolDefinitions"; import type { NotifyToolResult } from "@/common/types/tools"; +import { getErrorMessage } from "@/common/utils/errors"; /** Maximum notification body length (macOS limit is 256 bytes) */ const MAX_NOTIFICATION_BODY_LENGTH = 200; @@ -94,7 +95,7 @@ async function sendElectronNotification( return { success: true }; } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return { success: false, error: `Failed to send notification: ${message}`, diff --git a/src/node/services/tools/task.ts b/src/node/services/tools/task.ts index 5129e53c31..abe9236192 100644 --- a/src/node/services/tools/task.ts +++ b/src/node/services/tools/task.ts @@ -7,6 +7,7 @@ import type { TaskCreatedEvent } from "@/common/types/stream"; import { log } from "@/node/services/log"; import { parseToolResult, requireTaskService, requireWorkspaceId } from "./toolUtils"; +import { getErrorMessage } from "@/common/utils/errors"; /** * Build dynamic task tool description with available sub-agents. @@ -145,7 +146,7 @@ export const createTaskTool: ToolFactory = (config: ToolConfiguration) => { throw new Error("Interrupted"); } - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); if (message === "Timed out waiting for agent_report") { const currentStatus = taskService.getAgentTaskStatus(taskId) ?? created.data.status; const normalizedStatus = currentStatus === "queued" ? "queued" : "running"; diff --git a/src/node/services/tools/task_await.ts b/src/node/services/tools/task_await.ts index 852c502c39..f02f8703c9 100644 --- a/src/node/services/tools/task_await.ts +++ b/src/node/services/tools/task_await.ts @@ -12,6 +12,7 @@ import { requireTaskService, requireWorkspaceId, } from "./toolUtils"; +import { getErrorMessage } from "@/common/utils/errors"; function coerceTimeoutMs(timeoutSecs: unknown): number | undefined { if (typeof timeoutSecs !== "number" || !Number.isFinite(timeoutSecs)) return undefined; @@ -186,7 +187,7 @@ export const createTaskAwaitTool: ToolFactory = (config: ToolConfiguration) => { ...(gitFormatPatch ? { artifacts: { gitFormatPatch } } : {}), }; } catch (error: unknown) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); if (/not found/i.test(message)) { return { status: "not_found" as const, taskId }; } @@ -214,7 +215,7 @@ export const createTaskAwaitTool: ToolFactory = (config: ToolConfiguration) => { return { status: "error" as const, taskId, error: "Interrupted" }; } - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); if (/not found/i.test(message)) { return { status: "not_found" as const, taskId }; } diff --git a/src/node/services/tools/web_fetch.ts b/src/node/services/tools/web_fetch.ts index 1637992dc1..58fca86c7b 100644 --- a/src/node/services/tools/web_fetch.ts +++ b/src/node/services/tools/web_fetch.ts @@ -17,6 +17,7 @@ import { isMuxMdUrl, parseMuxMdUrl, } from "@/common/lib/muxMd"; +import { getErrorMessage } from "@/common/utils/errors"; const USER_AGENT = "Mux/1.0 (https://github.com/coder/mux; web-fetch tool)"; @@ -282,7 +283,7 @@ export const createWebFetchTool: ToolFactory = (config: ToolConfiguration) => { length: content.length, }; } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return { success: false, error: `web_fetch error: ${message}`, diff --git a/src/node/services/voiceService.ts b/src/node/services/voiceService.ts index 52df775750..8569197ce9 100644 --- a/src/node/services/voiceService.ts +++ b/src/node/services/voiceService.ts @@ -1,5 +1,6 @@ import type { Config } from "@/node/config"; import type { Result } from "@/common/types/result"; +import { getErrorMessage } from "@/common/utils/errors"; /** * Voice input service using OpenAI's Whisper API for transcription. @@ -69,7 +70,7 @@ export class VoiceService { const text = await response.text(); return { success: true, data: text }; } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return { success: false, error: `Transcription failed: ${message}` }; } } diff --git a/src/node/services/workspaceLifecycleHooks.ts b/src/node/services/workspaceLifecycleHooks.ts index 6de71a3b3b..51c59efbae 100644 --- a/src/node/services/workspaceLifecycleHooks.ts +++ b/src/node/services/workspaceLifecycleHooks.ts @@ -2,6 +2,7 @@ import type { WorkspaceMetadata } from "@/common/types/workspace"; import type { Result } from "@/common/types/result"; import { Ok, Err } from "@/common/types/result"; import { log } from "@/node/services/log"; +import { getErrorMessage } from "@/common/utils/errors"; export interface BeforeArchiveHookArgs { workspaceId: string; @@ -18,7 +19,7 @@ export interface AfterUnarchiveHookArgs { export type AfterUnarchiveHook = (args: AfterUnarchiveHookArgs) => Promise>; function sanitizeErrorMessage(error: unknown): string { - const raw = error instanceof Error ? error.message : String(error); + const raw = getErrorMessage(error); // Keep single-line, capped error messages to avoid leaking stack traces or long CLI output. const singleLine = raw.split("\n")[0]?.trim() ?? ""; return singleLine.slice(0, 200) || "Unknown error"; diff --git a/src/node/services/workspaceMcpOverridesService.ts b/src/node/services/workspaceMcpOverridesService.ts index cca79b5d90..de5df5a7d9 100644 --- a/src/node/services/workspaceMcpOverridesService.ts +++ b/src/node/services/workspaceMcpOverridesService.ts @@ -9,6 +9,7 @@ import { type createRuntime } from "@/node/runtime/runtimeFactory"; import { createRuntimeForWorkspace } from "@/node/runtime/runtimeHelpers"; import { execBuffered, readFileString, writeFileString } from "@/node/utils/runtime/helpers"; import { log } from "@/node/services/log"; +import { getErrorMessage } from "@/common/utils/errors"; const MCP_OVERRIDES_DIR = ".mux"; const MCP_OVERRIDES_JSONC = "mcp.local.jsonc"; @@ -236,7 +237,7 @@ export class WorkspaceMcpOverridesService { try { await runtime.ensureDir(overridesDirPath); } catch (err) { - const msg = err instanceof Error ? err.message : String(err); + const msg = getErrorMessage(err); throw new Error(`Failed to create ${MCP_OVERRIDES_DIR} directory: ${msg}`); } } diff --git a/src/node/services/workspaceService.ts b/src/node/services/workspaceService.ts index 64ff723081..80e0c0561b 100644 --- a/src/node/services/workspaceService.ts +++ b/src/node/services/workspaceService.ts @@ -109,6 +109,7 @@ import { updateSubagentTranscriptArtifactsFile, upsertSubagentTranscriptArtifactIndexEntry, } from "@/node/services/subagentTranscriptArtifacts"; +import { getErrorMessage } from "@/common/utils/errors"; /** Maximum number of retry attempts when workspace name collides */ const MAX_WORKSPACE_NAME_COLLISION_RETRIES = 3; @@ -176,7 +177,7 @@ async function copyFileBestEffort(params: { ...params.logContext, srcPath: params.srcPath, destPath: params.destPath, - error: error instanceof Error ? error.message : String(error), + error: getErrorMessage(error), }); return false; } @@ -211,7 +212,7 @@ async function copyDirIfMissingBestEffort(params: { ...params.logContext, srcDir: params.srcDir, destDir: params.destDir, - error: error instanceof Error ? error.message : String(error), + error: getErrorMessage(error), }); } } @@ -343,7 +344,7 @@ async function archiveChildSessionArtifactsIntoParentSessionDir(params: { log.error("Failed to archive child transcript into parent session dir", { parentWorkspaceId: params.parentWorkspaceId, childWorkspaceId: params.childWorkspaceId, - error: error instanceof Error ? error.message : String(error), + error: getErrorMessage(error), }); } @@ -423,7 +424,7 @@ async function archiveChildSessionArtifactsIntoParentSessionDir(params: { log.error("Failed to roll up subagent patch artifacts into parent", { parentWorkspaceId: params.parentWorkspaceId, childWorkspaceId: params.childWorkspaceId, - error: error instanceof Error ? error.message : String(error), + error: getErrorMessage(error), }); } @@ -504,7 +505,7 @@ async function archiveChildSessionArtifactsIntoParentSessionDir(params: { log.error("Failed to roll up subagent report artifacts into parent", { parentWorkspaceId: params.parentWorkspaceId, childWorkspaceId: params.childWorkspaceId, - error: error instanceof Error ? error.message : String(error), + error: getErrorMessage(error), }); } @@ -586,7 +587,7 @@ async function archiveChildSessionArtifactsIntoParentSessionDir(params: { log.error("Failed to roll up subagent transcript artifacts into parent", { parentWorkspaceId: params.parentWorkspaceId, childWorkspaceId: params.childWorkspaceId, - error: error instanceof Error ? error.message : String(error), + error: getErrorMessage(error), }); } } @@ -1147,7 +1148,7 @@ export class WorkspaceService extends EventEmitter { ); return Ok(undefined); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to set exclusion: ${message}`); } } @@ -1212,7 +1213,7 @@ export class WorkspaceService extends EventEmitter { } } } catch (error) { - const errorMsg = error instanceof Error ? error.message : String(error); + const errorMsg = getErrorMessage(error); return Err(errorMsg); } @@ -1353,7 +1354,7 @@ export class WorkspaceService extends EventEmitter { return Ok({ metadata: completeMetadata }); } catch (error) { initLogger.logComplete(-1); - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to create workspace: ${message}`); } } @@ -1491,7 +1492,7 @@ export class WorkspaceService extends EventEmitter { log.error("Failed to roll up child session timing into parent", { workspaceId, parentWorkspaceId, - error: error instanceof Error ? error.message : String(error), + error: getErrorMessage(error), }); } } @@ -1523,7 +1524,7 @@ export class WorkspaceService extends EventEmitter { log.error("Failed to roll up child session usage into parent", { workspaceId, parentWorkspaceId, - error: error instanceof Error ? error.message : String(error), + error: getErrorMessage(error), }); } } @@ -1555,7 +1556,7 @@ export class WorkspaceService extends EventEmitter { log.error("Failed to roll up child session artifacts into parent", { workspaceId, parentWorkspaceId, - error: error instanceof Error ? error.message : String(error), + error: getErrorMessage(error), }); } } @@ -1583,7 +1584,7 @@ export class WorkspaceService extends EventEmitter { return Ok(undefined); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to remove workspace: ${message}`); } finally { this.removingWorkspaces.delete(workspaceId); @@ -1758,7 +1759,7 @@ export class WorkspaceService extends EventEmitter { return Ok({ newWorkspaceId: workspaceId }); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to rename workspace: ${message}`); } finally { // Always clear renaming flag, even on error @@ -1805,7 +1806,7 @@ export class WorkspaceService extends EventEmitter { return Ok(undefined); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to update workspace title: ${message}`); } } @@ -1890,7 +1891,7 @@ export class WorkspaceService extends EventEmitter { return Ok(undefined); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to archive workspace: ${message}`); } finally { this.archivingWorkspaces.delete(workspaceId); @@ -1981,7 +1982,7 @@ export class WorkspaceService extends EventEmitter { return Ok(undefined); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to unarchive workspace: ${message}`); } } @@ -2050,7 +2051,7 @@ export class WorkspaceService extends EventEmitter { try { parsed = JSON.parse(output); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); errors.push({ workspaceId, error: `Failed to parse gh output: ${message}` }); return; } @@ -2074,7 +2075,7 @@ export class WorkspaceService extends EventEmitter { skippedWorkspaceIds.push(workspaceId); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); errors.push({ workspaceId, error: message }); } }); @@ -2099,7 +2100,7 @@ export class WorkspaceService extends EventEmitter { errors, }); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to archive merged workspaces: ${message}`); } } @@ -2298,7 +2299,7 @@ export class WorkspaceService extends EventEmitter { return Ok(undefined); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to update workspace AI settings: ${message}`); } } @@ -2435,7 +2436,7 @@ export class WorkspaceService extends EventEmitter { log.error(`Failed to clean up session dir ${newSessionDir}:`, cleanupError); } initLogger.logComplete(-1); - const message = copyError instanceof Error ? copyError.message : String(copyError); + const message = getErrorMessage(copyError); return Err(`Failed to copy chat history: ${message}`); } @@ -2481,7 +2482,7 @@ export class WorkspaceService extends EventEmitter { return Ok({ metadata, projectPath: foundProjectPath }); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to fork workspace: ${message}`); } } @@ -2628,7 +2629,7 @@ export class WorkspaceService extends EventEmitter { log.debug("Failed to cancel pending ask_user_question", { workspaceId, toolCallId: pendingAskUserQuestion.toolCallId, - error: error instanceof Error ? error.message : String(error), + error: getErrorMessage(error), }); } } @@ -2745,7 +2746,7 @@ export class WorkspaceService extends EventEmitter { } return result; } catch (error) { - const errorMessage = error instanceof Error ? error.message : String(error); + const errorMessage = getErrorMessage(error); log.error("Unexpected error in resumeStream handler:", error); // Handle incompatible workspace errors from downgraded configs @@ -2796,7 +2797,7 @@ export class WorkspaceService extends EventEmitter { return Ok(undefined); } catch (error) { - const errorMessage = error instanceof Error ? error.message : String(error); + const errorMessage = getErrorMessage(error); log.error("Unexpected error in interruptStream handler:", error); return Err(`Failed to interrupt stream: ${errorMessage}`); } @@ -2937,7 +2938,7 @@ export class WorkspaceService extends EventEmitter { } if (!best) { - const errorMessage = error instanceof Error ? error.message : String(error); + const errorMessage = getErrorMessage(error); return Err(`Failed to answer ask_user_question: ${errorMessage}`); } @@ -2979,7 +2980,7 @@ export class WorkspaceService extends EventEmitter { return Ok(undefined); } catch (innerError) { - const errorMessage = innerError instanceof Error ? innerError.message : String(innerError); + const errorMessage = getErrorMessage(innerError); return Err(errorMessage); } } @@ -2991,7 +2992,7 @@ export class WorkspaceService extends EventEmitter { session.clearQueue(); return Ok(undefined); } catch (error) { - const errorMessage = error instanceof Error ? error.message : String(error); + const errorMessage = getErrorMessage(error); log.error("Unexpected error in clearQueue handler:", error); return Err(`Failed to clear queue: ${errorMessage}`); } @@ -3168,7 +3169,7 @@ export class WorkspaceService extends EventEmitter { return Ok(undefined); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to replace history: ${message}`); } } @@ -3250,7 +3251,7 @@ export class WorkspaceService extends EventEmitter { } catch (error) { log.debug("getFileCompletions: failed to list files", { workspaceId, - error: error instanceof Error ? error.message : String(error), + error: getErrorMessage(error), }); // Keep any previously indexed data, but avoid retrying in a tight loop. @@ -3370,7 +3371,7 @@ export class WorkspaceService extends EventEmitter { } catch (error) { // bashTool.execute returns error results instead of throwing, so this only catches // failures from setup code (getWorkspaceMetadata, findWorkspace, createRuntime, etc.) - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to execute bash command: ${message}`); } } diff --git a/src/node/services/workspaceTitleGenerator.ts b/src/node/services/workspaceTitleGenerator.ts index 709e4fdd55..73d79ff40c 100644 --- a/src/node/services/workspaceTitleGenerator.ts +++ b/src/node/services/workspaceTitleGenerator.ts @@ -6,6 +6,7 @@ import type { Result } from "@/common/types/result"; import { Ok, Err } from "@/common/types/result"; import type { SendMessageError } from "@/common/types/errors"; import crypto from "crypto"; +import { getErrorMessage } from "@/common/utils/errors"; /** Schema for AI-generated workspace identity (area name + descriptive title) */ const workspaceIdentitySchema = z.object({ @@ -140,7 +141,7 @@ Requirements: } // API error (invalid key, quota, network, etc.) - try next candidate - lastApiError = error instanceof Error ? error.message : String(error); + lastApiError = getErrorMessage(error); log.warn(`Name generation failed with ${modelString}, trying next candidate`, { error: lastApiError, }); diff --git a/src/node/utils/main/bashPath.ts b/src/node/utils/main/bashPath.ts index e2ac77139a..f7fafcc88d 100644 --- a/src/node/utils/main/bashPath.ts +++ b/src/node/utils/main/bashPath.ts @@ -8,6 +8,7 @@ import { execSync, type ExecSyncOptionsWithStringEncoding } from "child_process"; import { existsSync } from "fs"; import path from "path"; +import { getErrorMessage } from "@/common/utils/errors"; const WIN_PATH = path.win32; @@ -267,7 +268,7 @@ export function getBashPath( cachedBashPathError = null; return cachedBashPath; } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); cachedBashPathError = { message, lastCheckedMs: now }; throw error; } diff --git a/src/node/utils/main/tokenizer.worker.ts b/src/node/utils/main/tokenizer.worker.ts index 968c754238..69e7181075 100644 --- a/src/node/utils/main/tokenizer.worker.ts +++ b/src/node/utils/main/tokenizer.worker.ts @@ -3,6 +3,7 @@ import { parentPort } from "node:worker_threads"; import { Tokenizer, models } from "ai-tokenizer"; import type { ModelName } from "ai-tokenizer"; import * as encoding from "ai-tokenizer/encoding"; +import { getErrorMessage } from "@/common/utils/errors"; export interface CountTokensInput { modelName: ModelName; @@ -65,7 +66,7 @@ if (parentPort) { parentPort!.postMessage({ messageId: message.messageId, error: { - message: error instanceof Error ? error.message : String(error), + message: getErrorMessage(error), stack: error instanceof Error ? error.stack : undefined, }, }); diff --git a/src/node/utils/sessionFile.ts b/src/node/utils/sessionFile.ts index 31b2b6b5b2..98bdf714b3 100644 --- a/src/node/utils/sessionFile.ts +++ b/src/node/utils/sessionFile.ts @@ -6,6 +6,7 @@ import { Ok, Err } from "@/common/types/result"; import type { Config } from "@/node/config"; import { workspaceFileLocks } from "@/node/utils/concurrency/workspaceFileLocks"; import { log } from "@/node/services/log"; +import { getErrorMessage } from "@/common/utils/errors"; export interface SessionFileWriteOptions { /** @@ -82,7 +83,7 @@ export class SessionFileManager { await writeFileAtomic(filePath, JSON.stringify(data, null, 2)); return Ok(undefined); } catch (error) { - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to write ${this.fileName}: ${message}`); } }); @@ -102,7 +103,7 @@ export class SessionFileManager { if (error && typeof error === "object" && "code" in error && error.code === "ENOENT") { return Ok(undefined); // Already deleted } - const message = error instanceof Error ? error.message : String(error); + const message = getErrorMessage(error); return Err(`Failed to delete ${this.fileName}: ${message}`); } }); From 4bfea32a89a4242af17287c77bf638eaa0946dde Mon Sep 17 00:00:00 2001 From: Ammar Date: Sun, 8 Feb 2026 11:45:37 -0600 Subject: [PATCH 02/14] refactor: remove 6 dead exports --- src/node/services/agentPresets.ts | 106 ------------------ .../agentSkills/builtInSkillDefinitions.ts | 5 - src/node/services/mock/mockAiRouter.ts | 5 - src/node/services/providerModelFactory.ts | 13 --- src/node/services/tools/code_execution.ts | 10 -- src/node/services/tools/taskId.ts | 4 - tests/ipc/queuedMessages.starting.test.ts | 4 +- tests/ipc/setup.ts | 10 +- 8 files changed, 9 insertions(+), 148 deletions(-) delete mode 100644 src/node/services/agentPresets.ts diff --git a/src/node/services/agentPresets.ts b/src/node/services/agentPresets.ts deleted file mode 100644 index 24282c0241..0000000000 --- a/src/node/services/agentPresets.ts +++ /dev/null @@ -1,106 +0,0 @@ -import type { ToolPolicy } from "@/common/utils/tools/toolPolicy"; - -export interface AgentPreset { - /** Normalized agentType key (e.g., "explore" or "exec") */ - agentType: string; - toolPolicy: ToolPolicy; - systemPrompt: string; -} - -const REPORTING_TOOL_NAMES = ["agent_report"] as const; - -function enableOnly(...toolNames: readonly string[]): ToolPolicy { - return [ - { regex_match: ".*", action: "disable" }, - ...toolNames.map((toolName) => ({ regex_match: toolName, action: "enable" as const })), - ]; -} - -const REPORTING_PROMPT_LINES = [ - "Reporting:", - "- When you have a final answer, call agent_report exactly once.", - "- Do not call agent_report until you have completed the assigned task and integrated all relevant findings.", -] as const; - -function buildSystemPrompt(args: { - agentLabel: string; - goals: string[]; - rules: string[]; - delegation?: string[]; -}): string { - return [ - `You are a ${args.agentLabel} sub-agent running inside a child workspace.`, - "", - "Goals:", - ...args.goals, - "", - "Rules:", - ...args.rules, - "", - ...(args.delegation && args.delegation.length > 0 - ? ["Delegation:", ...args.delegation, ""] - : []), - ...REPORTING_PROMPT_LINES, - ].join("\n"); -} - -const EXEC_PRESET: AgentPreset = { - agentType: "exec", - toolPolicy: [ - // Only the main plan-mode session should call propose_plan. - { regex_match: "propose_plan", action: "disable" }, - ], - systemPrompt: buildSystemPrompt({ - agentLabel: "Exec", - goals: [ - "- Complete the assigned coding task end-to-end in this child workspace.", - "- Make minimal, correct changes that match existing codebase patterns.", - ], - rules: [ - "- You may spawn additional sub-agent tasks when the task tools are available (nesting is limited by maxTaskNestingDepth).", - "- Do not call propose_plan.", - "- Prefer small, reviewable diffs and run targeted checks when feasible.", - ], - }), -}; - -const EXPLORE_PRESET: AgentPreset = { - agentType: "explore", - toolPolicy: enableOnly( - "file_read", - "bash", - "task_await", - "task_list", - "task_terminate", - "web_fetch", - "web_search", - "google_search", - ...REPORTING_TOOL_NAMES - ), - systemPrompt: buildSystemPrompt({ - agentLabel: "Explore", - goals: [ - "- Explore the repository to answer the prompt using read-only investigation.", - "- Return concise, actionable findings (paths, symbols, callsites, and facts).", - ], - rules: [ - "=== CRITICAL: READ-ONLY MODE - NO FILE MODIFICATIONS ===", - "- You MUST NOT create, edit, delete, move, or copy files.", - "- You MUST NOT create temporary files anywhere (including /tmp).", - "- You MUST NOT use redirect operators (>, >>, |) or heredocs to write to files.", - "- You MUST NOT run commands that change system state (rm, mv, cp, mkdir, touch, git add/commit, installs, etc.).", - "- Use bash only for read-only operations (rg, ls, cat, git diff/show/log, etc.).", - "- You MUST NOT spawn additional sub-agent tasks.", - ], - }), -}; - -const PRESETS_BY_AGENT_TYPE: Record = { - explore: EXPLORE_PRESET, - exec: EXEC_PRESET, -}; - -export function getAgentPreset(agentType: string | undefined): AgentPreset | null { - const normalized = (agentType ?? "").trim().toLowerCase(); - return PRESETS_BY_AGENT_TYPE[normalized] ?? null; -} diff --git a/src/node/services/agentSkills/builtInSkillDefinitions.ts b/src/node/services/agentSkills/builtInSkillDefinitions.ts index 5aceab8a7a..3f14274f80 100644 --- a/src/node/services/agentSkills/builtInSkillDefinitions.ts +++ b/src/node/services/agentSkills/builtInSkillDefinitions.ts @@ -120,8 +120,3 @@ export function readBuiltInSkillFile( return { resolvedPath, content }; } - -/** Exposed for testing - clears cached parsed packages */ -export function clearBuiltInSkillCache(): void { - cachedPackages = null; -} diff --git a/src/node/services/mock/mockAiRouter.ts b/src/node/services/mock/mockAiRouter.ts index e7fb4c7c1b..49c186ca9b 100644 --- a/src/node/services/mock/mockAiRouter.ts +++ b/src/node/services/mock/mockAiRouter.ts @@ -51,11 +51,6 @@ const MOCK_MARKER_PREFIX = "[mock:"; /** Marker to trigger the mock stream-start gate (holds stream until released). */ export const MOCK_STREAM_START_GATE_MARKER = "[mock:wait-start]"; -/** Build a message that triggers the mock stream-start gate. */ -export function buildMockStreamStartGateMessage(text: string): string { - return `${MOCK_STREAM_START_GATE_MARKER} ${text}`.trim(); -} - function normalizeText(text: string): string { return text.trim().toLowerCase(); } diff --git a/src/node/services/providerModelFactory.ts b/src/node/services/providerModelFactory.ts index 38c5885357..d85e23ebe8 100644 --- a/src/node/services/providerModelFactory.ts +++ b/src/node/services/providerModelFactory.ts @@ -277,19 +277,6 @@ export function buildAppAttributionHeaders( return headers; } -/** - * Preload AI SDK provider modules to avoid race conditions in concurrent test environments. - * This function loads @ai-sdk/anthropic, @ai-sdk/openai, and ollama-ai-provider-v2 eagerly - * so that subsequent dynamic imports in createModel() hit the module cache instead of racing. - * - * In production, providers are lazy-loaded on first use to optimize startup time. - * In tests, we preload them once during setup to ensure reliable concurrent execution. - */ -export async function preloadAISDKProviders(): Promise { - // Preload providers to ensure they're in the module cache before concurrent tests run - await Promise.all(Object.values(PROVIDER_REGISTRY).map((importFn) => importFn())); -} - /** * Parse provider and model ID from model string. * Handles model IDs with colons (e.g., "ollama:gpt-oss:20b"). diff --git a/src/node/services/tools/code_execution.ts b/src/node/services/tools/code_execution.ts index 569f1739ae..92168714e9 100644 --- a/src/node/services/tools/code_execution.ts +++ b/src/node/services/tools/code_execution.ts @@ -28,16 +28,6 @@ export function clearTypeCaches(): void { clearTypeCache(); } -/** - * Pre-generate type definitions for the given tools. - * Call during workspace initialization to avoid first-call latency. - * Integration with workspace initialization is handled in Phase 6. - */ -export async function preGenerateMuxTypes(tools: Record): Promise { - const toolBridge = new ToolBridge(tools); - await getCachedMuxTypes(toolBridge.getBridgeableTools()); -} - /** PTC event with parentToolCallId attached by code_execution */ export type PTCEventWithParent = PTCEvent & { parentToolCallId: string }; diff --git a/src/node/services/tools/taskId.ts b/src/node/services/tools/taskId.ts index 4466e1e3ff..7bdf0e5fb6 100644 --- a/src/node/services/tools/taskId.ts +++ b/src/node/services/tools/taskId.ts @@ -18,7 +18,3 @@ export function fromBashTaskId(taskId: string): string | null { const processId = taskId.slice(BASH_TASK_ID_PREFIX.length).trim(); return processId.length > 0 ? processId : null; } - -export function isBashTaskId(taskId: string): boolean { - return fromBashTaskId(taskId) !== null; -} diff --git a/tests/ipc/queuedMessages.starting.test.ts b/tests/ipc/queuedMessages.starting.test.ts index 59a0d7bff9..b67f2b29a6 100644 --- a/tests/ipc/queuedMessages.starting.test.ts +++ b/tests/ipc/queuedMessages.starting.test.ts @@ -10,7 +10,7 @@ import { createStreamCollector, } from "./helpers"; import { isMuxMessage } from "@/common/orpc/types"; -import { buildMockStreamStartGateMessage } from "@/node/services/mock/mockAiRouter"; +import { MOCK_STREAM_START_GATE_MARKER } from "@/node/services/mock/mockAiRouter"; describe("Queued messages during stream start", () => { let env: TestEnvironment | null = null; @@ -52,7 +52,7 @@ describe("Queued messages during stream start", () => { try { await collector.waitForSubscription(5000); - const gatedMessage = buildMockStreamStartGateMessage("First message"); + const gatedMessage = `${MOCK_STREAM_START_GATE_MARKER} First message`; const aiService = env.services.aiService; const session = env.services.workspaceService.getOrCreateSession(workspaceId); const firstSendPromise = sendMessageWithModel(env, workspaceId, gatedMessage, HAIKU_MODEL); diff --git a/tests/ipc/setup.ts b/tests/ipc/setup.ts index 31c5ae7dd7..28e7be0969 100644 --- a/tests/ipc/setup.ts +++ b/tests/ipc/setup.ts @@ -187,11 +187,15 @@ export { shouldRunIntegrationTests, validateApiKeys, getApiKey }; * Call this in beforeAll hooks to prevent Jest sandbox race conditions. */ export async function preloadTestModules(): Promise { - const [{ loadTokenizerModules }, { preloadAISDKProviders }] = await Promise.all([ + const [{ loadTokenizerModules }, { PROVIDER_REGISTRY }] = await Promise.all([ import("../../src/node/utils/main/tokenizer"), - import("../../src/node/services/providerModelFactory"), + import("../../src/common/constants/providers"), + ]); + await Promise.all([ + loadTokenizerModules(), + // Preload providers to ensure they're in the module cache before concurrent tests run + ...Object.values(PROVIDER_REGISTRY).map((importFn) => importFn()), ]); - await Promise.all([loadTokenizerModules(), preloadAISDKProviders()]); } /** From 99b8f73d5858a6a83d3e16895c1c4c50dd900941 Mon Sep 17 00:00:00 2001 From: Ammar Date: Sun, 8 Feb 2026 11:47:28 -0600 Subject: [PATCH 03/14] refactor: replace stale manual spread in tests/ipc/setup.ts Replace ~30-line manual service field spread with services.toORPCContext(), which was added specifically to centralize this mapping. Remove the now-unused ORPCContext type import. --- tests/ipc/setup.ts | 35 +---------------------------------- 1 file changed, 1 insertion(+), 34 deletions(-) diff --git a/tests/ipc/setup.ts b/tests/ipc/setup.ts index 28e7be0969..33e05d81f7 100644 --- a/tests/ipc/setup.ts +++ b/tests/ipc/setup.ts @@ -13,7 +13,6 @@ import { cleanupTempGitRepo, } from "./helpers"; import type { OrpcSource } from "./helpers"; -import type { ORPCContext } from "../../src/node/orpc/context"; import type { RuntimeConfig } from "../../src/common/types/runtime"; import { createOrpcTestClient, type OrpcTestClient } from "./orpcTestClient"; import { shouldRunIntegrationTests, validateApiKeys, getApiKey } from "../testUtils"; @@ -81,39 +80,7 @@ export async function createTestEnvironment(): Promise { // Note: Events are consumed via ORPC subscriptions (StreamCollector), not windowService.send() services.windowService.setMainWindow(mockWindow); - const orpcContext: ORPCContext = { - config: services.config, - aiService: services.aiService, - projectService: services.projectService, - workspaceService: services.workspaceService, - muxGatewayOauthService: services.muxGatewayOauthService, - muxGovernorOauthService: services.muxGovernorOauthService, - codexOauthService: services.codexOauthService, - copilotOauthService: services.copilotOauthService, - taskService: services.taskService, - providerService: services.providerService, - terminalService: services.terminalService, - editorService: services.editorService, - windowService: services.windowService, - updateService: services.updateService, - tokenizerService: services.tokenizerService, - serverService: services.serverService, - featureFlagService: services.featureFlagService, - workspaceMcpOverridesService: services.workspaceMcpOverridesService, - sessionTimingService: services.sessionTimingService, - mcpConfigService: services.mcpConfigService, - mcpOauthService: services.mcpOauthService, - mcpServerManager: services.mcpServerManager, - menuEventService: services.menuEventService, - voiceService: services.voiceService, - experimentsService: services.experimentsService, - telemetryService: services.telemetryService, - sessionUsageService: services.sessionUsageService, - signingService: services.signingService, - coderService: services.coderService, - policyService: services.policyService, - }; - const orpc = createOrpcTestClient(orpcContext); + const orpc = createOrpcTestClient(services.toORPCContext()); return { config, From 13ed905bede7037abf2c801b1c21e1bfe98fc71f Mon Sep 17 00:00:00 2001 From: Ammar Date: Sun, 8 Feb 2026 11:51:22 -0600 Subject: [PATCH 04/14] refactor: extract subagent artifact archival to dedicated module Move archiveChildSessionArtifactsIntoParentSessionDir and its supporting helper functions (isErrnoWithCode, isPathInsideDir, copyFileBestEffort, copyDirIfMissingBestEffort, coerceUpdatedAtMs, rollUpAncestorWorkspaceIds) from workspaceService.ts to a new subagentArtifactArchival.ts module. This reduces workspaceService.ts by ~450 lines with no logic changes. The class method that calls archiveChildSessionArtifactsIntoParentSessionDir now imports it from the new module. --- src/node/services/subagentArtifactArchival.ts | 467 ++++++++++++++++++ src/node/services/workspaceService.ts | 463 +---------------- 2 files changed, 468 insertions(+), 462 deletions(-) create mode 100644 src/node/services/subagentArtifactArchival.ts diff --git a/src/node/services/subagentArtifactArchival.ts b/src/node/services/subagentArtifactArchival.ts new file mode 100644 index 0000000000..305f510acf --- /dev/null +++ b/src/node/services/subagentArtifactArchival.ts @@ -0,0 +1,467 @@ +import * as path from "path"; +import * as fsPromises from "fs/promises"; +import { log } from "@/node/services/log"; +import { getErrorMessage } from "@/common/utils/errors"; +import { coerceThinkingLevel, type ThinkingLevel } from "@/common/types/thinking"; +import { + getSubagentGitPatchMboxPath, + readSubagentGitPatchArtifactsFile, + updateSubagentGitPatchArtifactsFile, +} from "@/node/services/subagentGitPatchArtifacts"; +import { + getSubagentReportArtifactPath, + readSubagentReportArtifactsFile, + updateSubagentReportArtifactsFile, +} from "@/node/services/subagentReportArtifacts"; +import { + getSubagentTranscriptChatPath, + getSubagentTranscriptPartialPath, + readSubagentTranscriptArtifactsFile, + updateSubagentTranscriptArtifactsFile, + upsertSubagentTranscriptArtifactIndexEntry, +} from "@/node/services/subagentTranscriptArtifacts"; + +function isErrnoWithCode(error: unknown, code: string): boolean { + return Boolean(error && typeof error === "object" && "code" in error && error.code === code); +} + +function isPathInsideDir(dirPath: string, filePath: string): boolean { + const resolvedDir = path.resolve(dirPath); + const resolvedFile = path.resolve(filePath); + const relative = path.relative(resolvedDir, resolvedFile); + + return relative === "" || (!relative.startsWith("..") && !path.isAbsolute(relative)); +} + +async function copyFileBestEffort(params: { + srcPath: string; + destPath: string; + logContext: Record; +}): Promise { + try { + await fsPromises.mkdir(path.dirname(params.destPath), { recursive: true }); + await fsPromises.copyFile(params.srcPath, params.destPath); + return true; + } catch (error: unknown) { + if (isErrnoWithCode(error, "ENOENT")) { + return false; + } + + log.error("Failed to copy session artifact file", { + ...params.logContext, + srcPath: params.srcPath, + destPath: params.destPath, + error: getErrorMessage(error), + }); + return false; + } +} + +async function copyDirIfMissingBestEffort(params: { + srcDir: string; + destDir: string; + logContext: Record; +}): Promise { + try { + try { + const stat = await fsPromises.stat(params.destDir); + if (stat.isDirectory()) { + return; + } + // If it's a file, fall through and try to copy (will likely fail). + } catch (error: unknown) { + if (!isErrnoWithCode(error, "ENOENT")) { + throw error; + } + } + + await fsPromises.mkdir(path.dirname(params.destDir), { recursive: true }); + await fsPromises.cp(params.srcDir, params.destDir, { recursive: true }); + } catch (error: unknown) { + if (isErrnoWithCode(error, "ENOENT")) { + return; + } + + log.error("Failed to copy session artifact directory", { + ...params.logContext, + srcDir: params.srcDir, + destDir: params.destDir, + error: getErrorMessage(error), + }); + } +} + +function coerceUpdatedAtMs(entry: { createdAtMs?: number; updatedAtMs?: number }): number { + if (typeof entry.updatedAtMs === "number" && Number.isFinite(entry.updatedAtMs)) { + return entry.updatedAtMs; + } + + if (typeof entry.createdAtMs === "number" && Number.isFinite(entry.createdAtMs)) { + return entry.createdAtMs; + } + + return 0; +} + +function rollUpAncestorWorkspaceIds(params: { + ancestorWorkspaceIds: string[]; + removedWorkspaceId: string; + newParentWorkspaceId: string; +}): string[] { + const filtered = params.ancestorWorkspaceIds.filter((id) => id !== params.removedWorkspaceId); + + // Ensure the roll-up target is first (parent-first ordering). + if (filtered[0] === params.newParentWorkspaceId) { + return filtered; + } + + return [ + params.newParentWorkspaceId, + ...filtered.filter((id) => id !== params.newParentWorkspaceId), + ]; +} + +export async function archiveChildSessionArtifactsIntoParentSessionDir(params: { + parentWorkspaceId: string; + parentSessionDir: string; + childWorkspaceId: string; + childSessionDir: string; + /** Task-level model string for the child workspace (optional; persists into transcript artifacts). */ + childTaskModelString?: string; + /** Task-level thinking/reasoning level for the child workspace (optional; persists into transcript artifacts). */ + childTaskThinkingLevel?: ThinkingLevel; +}): Promise { + if (params.parentWorkspaceId.length === 0) { + return; + } + + if (params.childWorkspaceId.length === 0) { + return; + } + + if (params.parentSessionDir.length === 0 || params.childSessionDir.length === 0) { + return; + } + + // 1) Archive the child session transcript (chat.jsonl + partial.json) into the parent session dir + // BEFORE deleting ~/.mux/sessions/. + try { + const childChatPath = path.join(params.childSessionDir, "chat.jsonl"); + const childPartialPath = path.join(params.childSessionDir, "partial.json"); + + const archivedChatPath = getSubagentTranscriptChatPath( + params.parentSessionDir, + params.childWorkspaceId + ); + const archivedPartialPath = getSubagentTranscriptPartialPath( + params.parentSessionDir, + params.childWorkspaceId + ); + + // Defensive: avoid path traversal in workspace IDs. + if (!isPathInsideDir(params.parentSessionDir, archivedChatPath)) { + log.error("Refusing to archive session transcript outside parent session dir", { + parentWorkspaceId: params.parentWorkspaceId, + childWorkspaceId: params.childWorkspaceId, + parentSessionDir: params.parentSessionDir, + archivedChatPath, + }); + } else { + const didCopyChat = await copyFileBestEffort({ + srcPath: childChatPath, + destPath: archivedChatPath, + logContext: { + parentWorkspaceId: params.parentWorkspaceId, + childWorkspaceId: params.childWorkspaceId, + artifact: "chat.jsonl", + }, + }); + + const didCopyPartial = await copyFileBestEffort({ + srcPath: childPartialPath, + destPath: archivedPartialPath, + logContext: { + parentWorkspaceId: params.parentWorkspaceId, + childWorkspaceId: params.childWorkspaceId, + artifact: "partial.json", + }, + }); + + if (didCopyChat || didCopyPartial) { + const nowMs = Date.now(); + + const model = + typeof params.childTaskModelString === "string" && + params.childTaskModelString.trim().length > 0 + ? params.childTaskModelString.trim() + : undefined; + const thinkingLevel = coerceThinkingLevel(params.childTaskThinkingLevel); + + await upsertSubagentTranscriptArtifactIndexEntry({ + workspaceId: params.parentWorkspaceId, + workspaceSessionDir: params.parentSessionDir, + childTaskId: params.childWorkspaceId, + updater: (existing) => ({ + childTaskId: params.childWorkspaceId, + parentWorkspaceId: params.parentWorkspaceId, + createdAtMs: existing?.createdAtMs ?? nowMs, + updatedAtMs: nowMs, + model: model ?? existing?.model, + thinkingLevel: thinkingLevel ?? existing?.thinkingLevel, + chatPath: didCopyChat ? archivedChatPath : existing?.chatPath, + partialPath: didCopyPartial ? archivedPartialPath : existing?.partialPath, + }), + }); + } + } + } catch (error: unknown) { + log.error("Failed to archive child transcript into parent session dir", { + parentWorkspaceId: params.parentWorkspaceId, + childWorkspaceId: params.childWorkspaceId, + error: getErrorMessage(error), + }); + } + + // 2) Roll up nested subagent artifacts from the child session dir into the parent session dir. + // This preserves grandchild artifacts when intermediate subagent workspaces are cleaned up. + + // --- subagent-patches.json + subagent-patches//... + try { + const childArtifacts = await readSubagentGitPatchArtifactsFile(params.childSessionDir); + const childEntries = Object.entries(childArtifacts.artifactsByChildTaskId); + + for (const [taskId] of childEntries) { + if (!taskId) continue; + + const srcDir = path.dirname(getSubagentGitPatchMboxPath(params.childSessionDir, taskId)); + const destDir = path.dirname(getSubagentGitPatchMboxPath(params.parentSessionDir, taskId)); + + if (!isPathInsideDir(params.childSessionDir, srcDir)) { + log.error("Refusing to roll up patch artifact outside child session dir", { + parentWorkspaceId: params.parentWorkspaceId, + childWorkspaceId: params.childWorkspaceId, + taskId, + childSessionDir: params.childSessionDir, + srcDir, + }); + continue; + } + + if (!isPathInsideDir(params.parentSessionDir, destDir)) { + log.error("Refusing to roll up patch artifact outside parent session dir", { + parentWorkspaceId: params.parentWorkspaceId, + childWorkspaceId: params.childWorkspaceId, + taskId, + parentSessionDir: params.parentSessionDir, + destDir, + }); + continue; + } + + await copyDirIfMissingBestEffort({ + srcDir, + destDir, + logContext: { + parentWorkspaceId: params.parentWorkspaceId, + childWorkspaceId: params.childWorkspaceId, + artifact: "subagent-patches", + taskId, + }, + }); + } + + if (childEntries.length > 0) { + await updateSubagentGitPatchArtifactsFile({ + workspaceId: params.parentWorkspaceId, + workspaceSessionDir: params.parentSessionDir, + update: (parentFile) => { + for (const [taskId, childEntry] of childEntries) { + if (!taskId) continue; + const existing = parentFile.artifactsByChildTaskId[taskId] ?? null; + + const childUpdated = coerceUpdatedAtMs(childEntry); + const existingUpdated = existing ? coerceUpdatedAtMs(existing) : -1; + + if (!existing || childUpdated > existingUpdated) { + parentFile.artifactsByChildTaskId[taskId] = { + ...childEntry, + childTaskId: taskId, + parentWorkspaceId: params.parentWorkspaceId, + mboxPath: getSubagentGitPatchMboxPath(params.parentSessionDir, taskId), + }; + } + } + }, + }); + } + } catch (error: unknown) { + log.error("Failed to roll up subagent patch artifacts into parent", { + parentWorkspaceId: params.parentWorkspaceId, + childWorkspaceId: params.childWorkspaceId, + error: getErrorMessage(error), + }); + } + + // --- subagent-reports.json + subagent-reports//... + try { + const childArtifacts = await readSubagentReportArtifactsFile(params.childSessionDir); + const childEntries = Object.entries(childArtifacts.artifactsByChildTaskId); + + for (const [taskId] of childEntries) { + if (!taskId) continue; + + const srcDir = path.dirname(getSubagentReportArtifactPath(params.childSessionDir, taskId)); + const destDir = path.dirname(getSubagentReportArtifactPath(params.parentSessionDir, taskId)); + + if (!isPathInsideDir(params.childSessionDir, srcDir)) { + log.error("Refusing to roll up report artifact outside child session dir", { + parentWorkspaceId: params.parentWorkspaceId, + childWorkspaceId: params.childWorkspaceId, + taskId, + childSessionDir: params.childSessionDir, + srcDir, + }); + continue; + } + + if (!isPathInsideDir(params.parentSessionDir, destDir)) { + log.error("Refusing to roll up report artifact outside parent session dir", { + parentWorkspaceId: params.parentWorkspaceId, + childWorkspaceId: params.childWorkspaceId, + taskId, + parentSessionDir: params.parentSessionDir, + destDir, + }); + continue; + } + + await copyDirIfMissingBestEffort({ + srcDir, + destDir, + logContext: { + parentWorkspaceId: params.parentWorkspaceId, + childWorkspaceId: params.childWorkspaceId, + artifact: "subagent-reports", + taskId, + }, + }); + } + + if (childEntries.length > 0) { + await updateSubagentReportArtifactsFile({ + workspaceId: params.parentWorkspaceId, + workspaceSessionDir: params.parentSessionDir, + update: (parentFile) => { + for (const [taskId, childEntry] of childEntries) { + if (!taskId) continue; + + const existing = parentFile.artifactsByChildTaskId[taskId] ?? null; + const childUpdated = coerceUpdatedAtMs(childEntry); + const existingUpdated = existing ? coerceUpdatedAtMs(existing) : -1; + + if (!existing || childUpdated > existingUpdated) { + parentFile.artifactsByChildTaskId[taskId] = { + ...childEntry, + childTaskId: taskId, + parentWorkspaceId: params.parentWorkspaceId, + ancestorWorkspaceIds: rollUpAncestorWorkspaceIds({ + ancestorWorkspaceIds: childEntry.ancestorWorkspaceIds, + removedWorkspaceId: params.childWorkspaceId, + newParentWorkspaceId: params.parentWorkspaceId, + }), + }; + } + } + }, + }); + } + } catch (error: unknown) { + log.error("Failed to roll up subagent report artifacts into parent", { + parentWorkspaceId: params.parentWorkspaceId, + childWorkspaceId: params.childWorkspaceId, + error: getErrorMessage(error), + }); + } + + // --- subagent-transcripts.json + subagent-transcripts//... + try { + const childArtifacts = await readSubagentTranscriptArtifactsFile(params.childSessionDir); + const childEntries = Object.entries(childArtifacts.artifactsByChildTaskId); + + for (const [taskId] of childEntries) { + if (!taskId) continue; + + const srcDir = path.dirname(getSubagentTranscriptChatPath(params.childSessionDir, taskId)); + const destDir = path.dirname(getSubagentTranscriptChatPath(params.parentSessionDir, taskId)); + + if (!isPathInsideDir(params.childSessionDir, srcDir)) { + log.error("Refusing to roll up transcript artifact outside child session dir", { + parentWorkspaceId: params.parentWorkspaceId, + childWorkspaceId: params.childWorkspaceId, + taskId, + childSessionDir: params.childSessionDir, + srcDir, + }); + continue; + } + + if (!isPathInsideDir(params.parentSessionDir, destDir)) { + log.error("Refusing to roll up transcript artifact outside parent session dir", { + parentWorkspaceId: params.parentWorkspaceId, + childWorkspaceId: params.childWorkspaceId, + taskId, + parentSessionDir: params.parentSessionDir, + destDir, + }); + continue; + } + + await copyDirIfMissingBestEffort({ + srcDir, + destDir, + logContext: { + parentWorkspaceId: params.parentWorkspaceId, + childWorkspaceId: params.childWorkspaceId, + artifact: "subagent-transcripts", + taskId, + }, + }); + } + + if (childEntries.length > 0) { + await updateSubagentTranscriptArtifactsFile({ + workspaceId: params.parentWorkspaceId, + workspaceSessionDir: params.parentSessionDir, + update: (parentFile) => { + for (const [taskId, childEntry] of childEntries) { + if (!taskId) continue; + + const existing = parentFile.artifactsByChildTaskId[taskId] ?? null; + const childUpdated = coerceUpdatedAtMs(childEntry); + const existingUpdated = existing ? coerceUpdatedAtMs(existing) : -1; + + if (!existing || childUpdated > existingUpdated) { + parentFile.artifactsByChildTaskId[taskId] = { + ...childEntry, + childTaskId: taskId, + parentWorkspaceId: params.parentWorkspaceId, + chatPath: childEntry.chatPath + ? getSubagentTranscriptChatPath(params.parentSessionDir, taskId) + : undefined, + partialPath: childEntry.partialPath + ? getSubagentTranscriptPartialPath(params.parentSessionDir, taskId) + : undefined, + }; + } + } + }, + }); + } + } catch (error: unknown) { + log.error("Failed to roll up subagent transcript artifacts into parent", { + parentWorkspaceId: params.parentWorkspaceId, + childWorkspaceId: params.childWorkspaceId, + error: getErrorMessage(error), + }); + } +} diff --git a/src/node/services/workspaceService.ts b/src/node/services/workspaceService.ts index 80e0c0561b..e8ae57652b 100644 --- a/src/node/services/workspaceService.ts +++ b/src/node/services/workspaceService.ts @@ -92,23 +92,7 @@ import { type FileCompletionsIndex, } from "@/node/services/fileCompletionsIndex"; import { taskQueueDebug } from "@/node/services/taskQueueDebug"; -import { - getSubagentGitPatchMboxPath, - readSubagentGitPatchArtifactsFile, - updateSubagentGitPatchArtifactsFile, -} from "@/node/services/subagentGitPatchArtifacts"; -import { - getSubagentReportArtifactPath, - readSubagentReportArtifactsFile, - updateSubagentReportArtifactsFile, -} from "@/node/services/subagentReportArtifacts"; -import { - getSubagentTranscriptChatPath, - getSubagentTranscriptPartialPath, - readSubagentTranscriptArtifactsFile, - updateSubagentTranscriptArtifactsFile, - upsertSubagentTranscriptArtifactIndexEntry, -} from "@/node/services/subagentTranscriptArtifacts"; +import { archiveChildSessionArtifactsIntoParentSessionDir } from "@/node/services/subagentArtifactArchival"; import { getErrorMessage } from "@/common/utils/errors"; /** Maximum number of retry attempts when workspace name collides */ @@ -147,451 +131,6 @@ function appendCollisionSuffix(baseName: string): string { return `${baseName}-${suffix}`; } -function isErrnoWithCode(error: unknown, code: string): boolean { - return Boolean(error && typeof error === "object" && "code" in error && error.code === code); -} - -function isPathInsideDir(dirPath: string, filePath: string): boolean { - const resolvedDir = path.resolve(dirPath); - const resolvedFile = path.resolve(filePath); - const relative = path.relative(resolvedDir, resolvedFile); - - return relative === "" || (!relative.startsWith("..") && !path.isAbsolute(relative)); -} - -async function copyFileBestEffort(params: { - srcPath: string; - destPath: string; - logContext: Record; -}): Promise { - try { - await fsPromises.mkdir(path.dirname(params.destPath), { recursive: true }); - await fsPromises.copyFile(params.srcPath, params.destPath); - return true; - } catch (error: unknown) { - if (isErrnoWithCode(error, "ENOENT")) { - return false; - } - - log.error("Failed to copy session artifact file", { - ...params.logContext, - srcPath: params.srcPath, - destPath: params.destPath, - error: getErrorMessage(error), - }); - return false; - } -} - -async function copyDirIfMissingBestEffort(params: { - srcDir: string; - destDir: string; - logContext: Record; -}): Promise { - try { - try { - const stat = await fsPromises.stat(params.destDir); - if (stat.isDirectory()) { - return; - } - // If it's a file, fall through and try to copy (will likely fail). - } catch (error: unknown) { - if (!isErrnoWithCode(error, "ENOENT")) { - throw error; - } - } - - await fsPromises.mkdir(path.dirname(params.destDir), { recursive: true }); - await fsPromises.cp(params.srcDir, params.destDir, { recursive: true }); - } catch (error: unknown) { - if (isErrnoWithCode(error, "ENOENT")) { - return; - } - - log.error("Failed to copy session artifact directory", { - ...params.logContext, - srcDir: params.srcDir, - destDir: params.destDir, - error: getErrorMessage(error), - }); - } -} - -function coerceUpdatedAtMs(entry: { createdAtMs?: number; updatedAtMs?: number }): number { - if (typeof entry.updatedAtMs === "number" && Number.isFinite(entry.updatedAtMs)) { - return entry.updatedAtMs; - } - - if (typeof entry.createdAtMs === "number" && Number.isFinite(entry.createdAtMs)) { - return entry.createdAtMs; - } - - return 0; -} - -function rollUpAncestorWorkspaceIds(params: { - ancestorWorkspaceIds: string[]; - removedWorkspaceId: string; - newParentWorkspaceId: string; -}): string[] { - const filtered = params.ancestorWorkspaceIds.filter((id) => id !== params.removedWorkspaceId); - - // Ensure the roll-up target is first (parent-first ordering). - if (filtered[0] === params.newParentWorkspaceId) { - return filtered; - } - - return [ - params.newParentWorkspaceId, - ...filtered.filter((id) => id !== params.newParentWorkspaceId), - ]; -} - -async function archiveChildSessionArtifactsIntoParentSessionDir(params: { - parentWorkspaceId: string; - parentSessionDir: string; - childWorkspaceId: string; - childSessionDir: string; - /** Task-level model string for the child workspace (optional; persists into transcript artifacts). */ - childTaskModelString?: string; - /** Task-level thinking/reasoning level for the child workspace (optional; persists into transcript artifacts). */ - childTaskThinkingLevel?: ThinkingLevel; -}): Promise { - if (params.parentWorkspaceId.length === 0) { - return; - } - - if (params.childWorkspaceId.length === 0) { - return; - } - - if (params.parentSessionDir.length === 0 || params.childSessionDir.length === 0) { - return; - } - - // 1) Archive the child session transcript (chat.jsonl + partial.json) into the parent session dir - // BEFORE deleting ~/.mux/sessions/. - try { - const childChatPath = path.join(params.childSessionDir, "chat.jsonl"); - const childPartialPath = path.join(params.childSessionDir, "partial.json"); - - const archivedChatPath = getSubagentTranscriptChatPath( - params.parentSessionDir, - params.childWorkspaceId - ); - const archivedPartialPath = getSubagentTranscriptPartialPath( - params.parentSessionDir, - params.childWorkspaceId - ); - - // Defensive: avoid path traversal in workspace IDs. - if (!isPathInsideDir(params.parentSessionDir, archivedChatPath)) { - log.error("Refusing to archive session transcript outside parent session dir", { - parentWorkspaceId: params.parentWorkspaceId, - childWorkspaceId: params.childWorkspaceId, - parentSessionDir: params.parentSessionDir, - archivedChatPath, - }); - } else { - const didCopyChat = await copyFileBestEffort({ - srcPath: childChatPath, - destPath: archivedChatPath, - logContext: { - parentWorkspaceId: params.parentWorkspaceId, - childWorkspaceId: params.childWorkspaceId, - artifact: "chat.jsonl", - }, - }); - - const didCopyPartial = await copyFileBestEffort({ - srcPath: childPartialPath, - destPath: archivedPartialPath, - logContext: { - parentWorkspaceId: params.parentWorkspaceId, - childWorkspaceId: params.childWorkspaceId, - artifact: "partial.json", - }, - }); - - if (didCopyChat || didCopyPartial) { - const nowMs = Date.now(); - - const model = - typeof params.childTaskModelString === "string" && - params.childTaskModelString.trim().length > 0 - ? params.childTaskModelString.trim() - : undefined; - const thinkingLevel = coerceThinkingLevel(params.childTaskThinkingLevel); - - await upsertSubagentTranscriptArtifactIndexEntry({ - workspaceId: params.parentWorkspaceId, - workspaceSessionDir: params.parentSessionDir, - childTaskId: params.childWorkspaceId, - updater: (existing) => ({ - childTaskId: params.childWorkspaceId, - parentWorkspaceId: params.parentWorkspaceId, - createdAtMs: existing?.createdAtMs ?? nowMs, - updatedAtMs: nowMs, - model: model ?? existing?.model, - thinkingLevel: thinkingLevel ?? existing?.thinkingLevel, - chatPath: didCopyChat ? archivedChatPath : existing?.chatPath, - partialPath: didCopyPartial ? archivedPartialPath : existing?.partialPath, - }), - }); - } - } - } catch (error: unknown) { - log.error("Failed to archive child transcript into parent session dir", { - parentWorkspaceId: params.parentWorkspaceId, - childWorkspaceId: params.childWorkspaceId, - error: getErrorMessage(error), - }); - } - - // 2) Roll up nested subagent artifacts from the child session dir into the parent session dir. - // This preserves grandchild artifacts when intermediate subagent workspaces are cleaned up. - - // --- subagent-patches.json + subagent-patches//... - try { - const childArtifacts = await readSubagentGitPatchArtifactsFile(params.childSessionDir); - const childEntries = Object.entries(childArtifacts.artifactsByChildTaskId); - - for (const [taskId] of childEntries) { - if (!taskId) continue; - - const srcDir = path.dirname(getSubagentGitPatchMboxPath(params.childSessionDir, taskId)); - const destDir = path.dirname(getSubagentGitPatchMboxPath(params.parentSessionDir, taskId)); - - if (!isPathInsideDir(params.childSessionDir, srcDir)) { - log.error("Refusing to roll up patch artifact outside child session dir", { - parentWorkspaceId: params.parentWorkspaceId, - childWorkspaceId: params.childWorkspaceId, - taskId, - childSessionDir: params.childSessionDir, - srcDir, - }); - continue; - } - - if (!isPathInsideDir(params.parentSessionDir, destDir)) { - log.error("Refusing to roll up patch artifact outside parent session dir", { - parentWorkspaceId: params.parentWorkspaceId, - childWorkspaceId: params.childWorkspaceId, - taskId, - parentSessionDir: params.parentSessionDir, - destDir, - }); - continue; - } - - await copyDirIfMissingBestEffort({ - srcDir, - destDir, - logContext: { - parentWorkspaceId: params.parentWorkspaceId, - childWorkspaceId: params.childWorkspaceId, - artifact: "subagent-patches", - taskId, - }, - }); - } - - if (childEntries.length > 0) { - await updateSubagentGitPatchArtifactsFile({ - workspaceId: params.parentWorkspaceId, - workspaceSessionDir: params.parentSessionDir, - update: (parentFile) => { - for (const [taskId, childEntry] of childEntries) { - if (!taskId) continue; - const existing = parentFile.artifactsByChildTaskId[taskId] ?? null; - - const childUpdated = coerceUpdatedAtMs(childEntry); - const existingUpdated = existing ? coerceUpdatedAtMs(existing) : -1; - - if (!existing || childUpdated > existingUpdated) { - parentFile.artifactsByChildTaskId[taskId] = { - ...childEntry, - childTaskId: taskId, - parentWorkspaceId: params.parentWorkspaceId, - mboxPath: getSubagentGitPatchMboxPath(params.parentSessionDir, taskId), - }; - } - } - }, - }); - } - } catch (error: unknown) { - log.error("Failed to roll up subagent patch artifacts into parent", { - parentWorkspaceId: params.parentWorkspaceId, - childWorkspaceId: params.childWorkspaceId, - error: getErrorMessage(error), - }); - } - - // --- subagent-reports.json + subagent-reports//... - try { - const childArtifacts = await readSubagentReportArtifactsFile(params.childSessionDir); - const childEntries = Object.entries(childArtifacts.artifactsByChildTaskId); - - for (const [taskId] of childEntries) { - if (!taskId) continue; - - const srcDir = path.dirname(getSubagentReportArtifactPath(params.childSessionDir, taskId)); - const destDir = path.dirname(getSubagentReportArtifactPath(params.parentSessionDir, taskId)); - - if (!isPathInsideDir(params.childSessionDir, srcDir)) { - log.error("Refusing to roll up report artifact outside child session dir", { - parentWorkspaceId: params.parentWorkspaceId, - childWorkspaceId: params.childWorkspaceId, - taskId, - childSessionDir: params.childSessionDir, - srcDir, - }); - continue; - } - - if (!isPathInsideDir(params.parentSessionDir, destDir)) { - log.error("Refusing to roll up report artifact outside parent session dir", { - parentWorkspaceId: params.parentWorkspaceId, - childWorkspaceId: params.childWorkspaceId, - taskId, - parentSessionDir: params.parentSessionDir, - destDir, - }); - continue; - } - - await copyDirIfMissingBestEffort({ - srcDir, - destDir, - logContext: { - parentWorkspaceId: params.parentWorkspaceId, - childWorkspaceId: params.childWorkspaceId, - artifact: "subagent-reports", - taskId, - }, - }); - } - - if (childEntries.length > 0) { - await updateSubagentReportArtifactsFile({ - workspaceId: params.parentWorkspaceId, - workspaceSessionDir: params.parentSessionDir, - update: (parentFile) => { - for (const [taskId, childEntry] of childEntries) { - if (!taskId) continue; - - const existing = parentFile.artifactsByChildTaskId[taskId] ?? null; - const childUpdated = coerceUpdatedAtMs(childEntry); - const existingUpdated = existing ? coerceUpdatedAtMs(existing) : -1; - - if (!existing || childUpdated > existingUpdated) { - parentFile.artifactsByChildTaskId[taskId] = { - ...childEntry, - childTaskId: taskId, - parentWorkspaceId: params.parentWorkspaceId, - ancestorWorkspaceIds: rollUpAncestorWorkspaceIds({ - ancestorWorkspaceIds: childEntry.ancestorWorkspaceIds, - removedWorkspaceId: params.childWorkspaceId, - newParentWorkspaceId: params.parentWorkspaceId, - }), - }; - } - } - }, - }); - } - } catch (error: unknown) { - log.error("Failed to roll up subagent report artifacts into parent", { - parentWorkspaceId: params.parentWorkspaceId, - childWorkspaceId: params.childWorkspaceId, - error: getErrorMessage(error), - }); - } - - // --- subagent-transcripts.json + subagent-transcripts//... - try { - const childArtifacts = await readSubagentTranscriptArtifactsFile(params.childSessionDir); - const childEntries = Object.entries(childArtifacts.artifactsByChildTaskId); - - for (const [taskId] of childEntries) { - if (!taskId) continue; - - const srcDir = path.dirname(getSubagentTranscriptChatPath(params.childSessionDir, taskId)); - const destDir = path.dirname(getSubagentTranscriptChatPath(params.parentSessionDir, taskId)); - - if (!isPathInsideDir(params.childSessionDir, srcDir)) { - log.error("Refusing to roll up transcript artifact outside child session dir", { - parentWorkspaceId: params.parentWorkspaceId, - childWorkspaceId: params.childWorkspaceId, - taskId, - childSessionDir: params.childSessionDir, - srcDir, - }); - continue; - } - - if (!isPathInsideDir(params.parentSessionDir, destDir)) { - log.error("Refusing to roll up transcript artifact outside parent session dir", { - parentWorkspaceId: params.parentWorkspaceId, - childWorkspaceId: params.childWorkspaceId, - taskId, - parentSessionDir: params.parentSessionDir, - destDir, - }); - continue; - } - - await copyDirIfMissingBestEffort({ - srcDir, - destDir, - logContext: { - parentWorkspaceId: params.parentWorkspaceId, - childWorkspaceId: params.childWorkspaceId, - artifact: "subagent-transcripts", - taskId, - }, - }); - } - - if (childEntries.length > 0) { - await updateSubagentTranscriptArtifactsFile({ - workspaceId: params.parentWorkspaceId, - workspaceSessionDir: params.parentSessionDir, - update: (parentFile) => { - for (const [taskId, childEntry] of childEntries) { - if (!taskId) continue; - - const existing = parentFile.artifactsByChildTaskId[taskId] ?? null; - const childUpdated = coerceUpdatedAtMs(childEntry); - const existingUpdated = existing ? coerceUpdatedAtMs(existing) : -1; - - if (!existing || childUpdated > existingUpdated) { - parentFile.artifactsByChildTaskId[taskId] = { - ...childEntry, - childTaskId: taskId, - parentWorkspaceId: params.parentWorkspaceId, - chatPath: childEntry.chatPath - ? getSubagentTranscriptChatPath(params.parentSessionDir, taskId) - : undefined, - partialPath: childEntry.partialPath - ? getSubagentTranscriptPartialPath(params.parentSessionDir, taskId) - : undefined, - }; - } - } - }, - }); - } - } catch (error: unknown) { - log.error("Failed to roll up subagent transcript artifacts into parent", { - parentWorkspaceId: params.parentWorkspaceId, - childWorkspaceId: params.childWorkspaceId, - error: getErrorMessage(error), - }); - } -} - async function forEachWithConcurrencyLimit( items: readonly T[], limit: number, From b7df920959bcd7afc6d187989ffabdb194f301b4 Mon Sep 17 00:00:00 2001 From: Ammar Date: Sun, 8 Feb 2026 11:58:01 -0600 Subject: [PATCH 05/14] refactor: extract workspace AI settings to dedicated module MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Extract 7 workspace AI settings methods from WorkspaceService into standalone functions in src/node/services/workspaceAISettings.ts: - normalizeWorkspaceAISettings: validate + normalize model/thinking - normalizeSendMessageAgentId: normalize agentId in options - extractWorkspaceAISettingsFromSendOptions: extract AI settings from send opts - maybePersistAISettingsFromOptions: best-effort settings persistence - persistWorkspaceAISettingsForAgent: persist settings to config + emit - updateAgentAISettings: validate + persist AI settings WorkspaceService retains the same public API (updateModeAISettings, updateAgentAISettings) and private helpers, all delegating to the new module. An EmitMetadataFn callback bridges the session/event emission. Zero logic changes — pure extraction. --- src/node/services/workspaceAISettings.ts | 231 +++++++++++++++++++++++ src/node/services/workspaceService.ts | 198 +++---------------- 2 files changed, 259 insertions(+), 170 deletions(-) create mode 100644 src/node/services/workspaceAISettings.ts diff --git a/src/node/services/workspaceAISettings.ts b/src/node/services/workspaceAISettings.ts new file mode 100644 index 0000000000..b83be82382 --- /dev/null +++ b/src/node/services/workspaceAISettings.ts @@ -0,0 +1,231 @@ +import type { z } from "zod"; +import type { Config } from "@/node/config"; +import type { Result } from "@/common/types/result"; +import { Ok, Err } from "@/common/types/result"; +import { log } from "@/node/services/log"; +import { isValidModelFormat, normalizeGatewayModel } from "@/common/utils/ai/models"; +import { WORKSPACE_DEFAULTS } from "@/constants/workspaceDefaults"; +import { getErrorMessage } from "@/common/utils/errors"; +import type { SendMessageOptions } from "@/common/orpc/types"; +import type { FrontendWorkspaceMetadata } from "@/common/types/workspace"; +import type { WorkspaceAISettingsSchema } from "@/common/orpc/schemas"; + +type WorkspaceAISettings = z.infer; + +/** + * Validate + normalize model/thinking settings. + */ +export function normalizeWorkspaceAISettings( + aiSettings: WorkspaceAISettings +): Result { + const rawModel = aiSettings.model; + const model = normalizeGatewayModel(rawModel).trim(); + if (!model) { + return Err("Model is required"); + } + if (!isValidModelFormat(model)) { + return Err(`Invalid model format: ${rawModel}`); + } + + return Ok({ + model, + thinkingLevel: aiSettings.thinkingLevel, + }); +} + +/** + * Normalize agentId in send message options. + */ +export function normalizeSendMessageAgentId(options: SendMessageOptions): SendMessageOptions { + // agentId is required by the schema, so this just normalizes the value. + const rawAgentId = options.agentId; + const normalizedAgentId = + typeof rawAgentId === "string" && rawAgentId.trim().length > 0 + ? rawAgentId.trim().toLowerCase() + : WORKSPACE_DEFAULTS.agentId; + + if (normalizedAgentId === options.agentId) { + return options; + } + + return { + ...options, + agentId: normalizedAgentId, + }; +} + +/** + * Extract AI settings from send message options, returning null if + * the options don't contain valid model + thinking level. + */ +export function extractWorkspaceAISettingsFromSendOptions( + options: SendMessageOptions | undefined +): WorkspaceAISettings | null { + const rawModel = options?.model; + if (typeof rawModel !== "string" || rawModel.trim().length === 0) { + return null; + } + + const model = normalizeGatewayModel(rawModel).trim(); + if (!isValidModelFormat(model)) { + return null; + } + + const requestedThinking = options?.thinkingLevel; + // Be defensive: if a (very) old client doesn't send thinkingLevel, don't overwrite + // any existing workspace-scoped value. + if (requestedThinking === undefined) { + return null; + } + + const thinkingLevel = requestedThinking; + + return { model, thinkingLevel }; +} + +/** Callback to emit updated workspace metadata after persistence. */ +export type EmitMetadataFn = ( + workspaceId: string, + metadata: FrontendWorkspaceMetadata | null +) => void; + +/** + * Persist AI settings for a specific agent within a workspace's config. + * Pure config mutation — uses the provided Config instance and emitMetadata callback. + */ +export async function persistWorkspaceAISettingsForAgent( + config: Config, + emitMetadata: EmitMetadataFn, + workspaceId: string, + agentId: string, + aiSettings: WorkspaceAISettings, + options?: { emitMetadata?: boolean } +): Promise> { + const found = config.findWorkspace(workspaceId); + if (!found) { + return Err("Workspace not found"); + } + + const { projectPath, workspacePath } = found; + + const projectsConfig = config.loadConfigOrDefault(); + const projectConfig = projectsConfig.projects.get(projectPath); + if (!projectConfig) { + return Err(`Project not found: ${projectPath}`); + } + + const workspaceEntry = projectConfig.workspaces.find((w) => w.id === workspaceId); + const workspaceEntryWithFallback = + workspaceEntry ?? projectConfig.workspaces.find((w) => w.path === workspacePath); + if (!workspaceEntryWithFallback) { + return Err("Workspace not found"); + } + + const normalizedAgentId = agentId.trim().toLowerCase(); + if (!normalizedAgentId) { + return Err("Agent ID is required"); + } + + const prev = workspaceEntryWithFallback.aiSettingsByAgent?.[normalizedAgentId]; + const changed = + prev?.model !== aiSettings.model || prev?.thinkingLevel !== aiSettings.thinkingLevel; + if (!changed) { + return Ok(false); + } + + workspaceEntryWithFallback.aiSettingsByAgent = { + ...(workspaceEntryWithFallback.aiSettingsByAgent ?? {}), + [normalizedAgentId]: aiSettings, + }; + + await config.saveConfig(projectsConfig); + + if (options?.emitMetadata !== false) { + const allMetadata = await config.getAllWorkspaceMetadata(); + const updatedMetadata = allMetadata.find((m) => m.id === workspaceId) ?? null; + emitMetadata(workspaceId, updatedMetadata); + } + + return Ok(true); +} + +/** + * Best-effort persist AI settings from send/resume options. + * Skips requests explicitly marked to avoid persistence. + */ +export async function maybePersistAISettingsFromOptions( + config: Config, + emitMetadata: EmitMetadataFn, + workspaceId: string, + options: SendMessageOptions | undefined, + context: "send" | "resume" +): Promise { + if (options?.skipAiSettingsPersistence) { + // One-shot/compaction sends shouldn't overwrite workspace defaults. + return; + } + + const extractedSettings = extractWorkspaceAISettingsFromSendOptions(options); + if (!extractedSettings) return; + + const rawAgentId = options?.agentId; + const agentId = + typeof rawAgentId === "string" && rawAgentId.trim().length > 0 + ? rawAgentId.trim().toLowerCase() + : WORKSPACE_DEFAULTS.agentId; + + const persistResult = await persistWorkspaceAISettingsForAgent( + config, + emitMetadata, + workspaceId, + agentId, + extractedSettings, + { + emitMetadata: false, + } + ); + if (!persistResult.success) { + log.debug(`Failed to persist workspace AI settings from ${context} options`, { + workspaceId, + error: persistResult.error, + }); + } +} + +/** + * Validate, normalize, and persist AI settings for an agent. + * Wraps normalizeWorkspaceAISettings + persistWorkspaceAISettingsForAgent. + */ +export async function updateAgentAISettings( + config: Config, + emitMetadata: EmitMetadataFn, + workspaceId: string, + agentId: string, + aiSettings: WorkspaceAISettings +): Promise> { + try { + const normalized = normalizeWorkspaceAISettings(aiSettings); + if (!normalized.success) { + return Err(normalized.error); + } + + const persistResult = await persistWorkspaceAISettingsForAgent( + config, + emitMetadata, + workspaceId, + agentId, + normalized.data, + { + emitMetadata: true, + } + ); + if (!persistResult.success) { + return Err(persistResult.error); + } + + return Ok(undefined); + } catch (error) { + const message = getErrorMessage(error); + return Err(`Failed to update workspace AI settings: ${message}`); + } +} diff --git a/src/node/services/workspaceService.ts b/src/node/services/workspaceService.ts index e8ae57652b..437a9baa66 100644 --- a/src/node/services/workspaceService.ts +++ b/src/node/services/workspaceService.ts @@ -67,7 +67,7 @@ import { isSSHRuntime, isDockerRuntime, } from "@/common/types/runtime"; -import { isValidModelFormat, normalizeGatewayModel } from "@/common/utils/ai/models"; + import { coerceThinkingLevel, type ThinkingLevel } from "@/common/types/thinking"; import { WORKSPACE_DEFAULTS } from "@/constants/workspaceDefaults"; import type { StreamEndEvent, StreamAbortEvent } from "@/common/types/stream"; @@ -78,6 +78,12 @@ import type { SessionUsageService } from "@/node/services/sessionUsageService"; import type { BackgroundProcessManager } from "@/node/services/backgroundProcessManager"; import type { WorkspaceLifecycleHooks } from "@/node/services/workspaceLifecycleHooks"; import type { TaskService } from "@/node/services/taskService"; +import { + normalizeSendMessageAgentId as normalizeAgentId, + maybePersistAISettingsFromOptions as maybePersistAISettings, + updateAgentAISettings as updateAgentAISettingsFn, + type EmitMetadataFn, +} from "@/node/services/workspaceAISettings"; import { DisposableTempDir } from "@/node/services/tempDir"; import { createBashTool } from "@/node/services/tools/bash"; @@ -1644,164 +1650,32 @@ export class WorkspaceService extends EventEmitter { } } - private normalizeWorkspaceAISettings( - aiSettings: WorkspaceAISettings - ): Result { - const rawModel = aiSettings.model; - const model = normalizeGatewayModel(rawModel).trim(); - if (!model) { - return Err("Model is required"); - } - if (!isValidModelFormat(model)) { - return Err(`Invalid model format: ${rawModel}`); + /** Callback that routes metadata emission through active sessions or fallback event. */ + private readonly emitWorkspaceMetadata: EmitMetadataFn = (workspaceId, metadata) => { + const session = this.sessions.get(workspaceId); + if (session) { + session.emitMetadata(metadata); + } else { + this.emit("metadata", { workspaceId, metadata }); } - - return Ok({ - model, - thinkingLevel: aiSettings.thinkingLevel, - }); - } + }; private normalizeSendMessageAgentId(options: SendMessageOptions): SendMessageOptions { - // agentId is required by the schema, so this just normalizes the value. - const rawAgentId = options.agentId; - const normalizedAgentId = - typeof rawAgentId === "string" && rawAgentId.trim().length > 0 - ? rawAgentId.trim().toLowerCase() - : WORKSPACE_DEFAULTS.agentId; - - if (normalizedAgentId === options.agentId) { - return options; - } - - return { - ...options, - agentId: normalizedAgentId, - }; + return normalizeAgentId(options); } - private extractWorkspaceAISettingsFromSendOptions( - options: SendMessageOptions | undefined - ): WorkspaceAISettings | null { - const rawModel = options?.model; - if (typeof rawModel !== "string" || rawModel.trim().length === 0) { - return null; - } - - const model = normalizeGatewayModel(rawModel).trim(); - if (!isValidModelFormat(model)) { - return null; - } - - const requestedThinking = options?.thinkingLevel; - // Be defensive: if a (very) old client doesn't send thinkingLevel, don't overwrite - // any existing workspace-scoped value. - if (requestedThinking === undefined) { - return null; - } - - const thinkingLevel = requestedThinking; - - return { model, thinkingLevel }; - } - - /** - * Best-effort persist AI settings from send/resume options. - * Skips requests explicitly marked to avoid persistence. - */ private async maybePersistAISettingsFromOptions( workspaceId: string, options: SendMessageOptions | undefined, context: "send" | "resume" ): Promise { - if (options?.skipAiSettingsPersistence) { - // One-shot/compaction sends shouldn't overwrite workspace defaults. - return; - } - - const extractedSettings = this.extractWorkspaceAISettingsFromSendOptions(options); - if (!extractedSettings) return; - - const rawAgentId = options?.agentId; - const agentId = - typeof rawAgentId === "string" && rawAgentId.trim().length > 0 - ? rawAgentId.trim().toLowerCase() - : WORKSPACE_DEFAULTS.agentId; - - const persistResult = await this.persistWorkspaceAISettingsForAgent( + return maybePersistAISettings( + this.config, + this.emitWorkspaceMetadata, workspaceId, - agentId, - extractedSettings, - { - emitMetadata: false, - } + options, + context ); - if (!persistResult.success) { - log.debug(`Failed to persist workspace AI settings from ${context} options`, { - workspaceId, - error: persistResult.error, - }); - } - } - - private async persistWorkspaceAISettingsForAgent( - workspaceId: string, - agentId: string, - aiSettings: WorkspaceAISettings, - options?: { emitMetadata?: boolean } - ): Promise> { - const found = this.config.findWorkspace(workspaceId); - if (!found) { - return Err("Workspace not found"); - } - - const { projectPath, workspacePath } = found; - - const config = this.config.loadConfigOrDefault(); - const projectConfig = config.projects.get(projectPath); - if (!projectConfig) { - return Err(`Project not found: ${projectPath}`); - } - - const workspaceEntry = projectConfig.workspaces.find((w) => w.id === workspaceId); - const workspaceEntryWithFallback = - workspaceEntry ?? projectConfig.workspaces.find((w) => w.path === workspacePath); - if (!workspaceEntryWithFallback) { - return Err("Workspace not found"); - } - - const normalizedAgentId = agentId.trim().toLowerCase(); - if (!normalizedAgentId) { - return Err("Agent ID is required"); - } - - const prev = workspaceEntryWithFallback.aiSettingsByAgent?.[normalizedAgentId]; - const changed = - prev?.model !== aiSettings.model || prev?.thinkingLevel !== aiSettings.thinkingLevel; - if (!changed) { - return Ok(false); - } - - workspaceEntryWithFallback.aiSettingsByAgent = { - ...(workspaceEntryWithFallback.aiSettingsByAgent ?? {}), - [normalizedAgentId]: aiSettings, - }; - - await this.config.saveConfig(config); - - if (options?.emitMetadata !== false) { - const allMetadata = await this.config.getAllWorkspaceMetadata(); - const updatedMetadata = allMetadata.find((m) => m.id === workspaceId) ?? null; - - const session = this.sessions.get(workspaceId); - if (session) { - session.emitMetadata(updatedMetadata); - } else { - this.emit("metadata", { workspaceId, metadata: updatedMetadata }); - } - } - - return Ok(true); } async updateModeAISettings( @@ -1818,29 +1692,13 @@ export class WorkspaceService extends EventEmitter { agentId: string, aiSettings: WorkspaceAISettings ): Promise> { - try { - const normalized = this.normalizeWorkspaceAISettings(aiSettings); - if (!normalized.success) { - return Err(normalized.error); - } - - const persistResult = await this.persistWorkspaceAISettingsForAgent( - workspaceId, - agentId, - normalized.data, - { - emitMetadata: true, - } - ); - if (!persistResult.success) { - return Err(persistResult.error); - } - - return Ok(undefined); - } catch (error) { - const message = getErrorMessage(error); - return Err(`Failed to update workspace AI settings: ${message}`); - } + return updateAgentAISettingsFn( + this.config, + this.emitWorkspaceMetadata, + workspaceId, + agentId, + aiSettings + ); } async fork( From ff5102a0bbd7c0842a9441b3d604dddb5ee68d7d Mon Sep 17 00:00:00 2001 From: Ammar Date: Sun, 8 Feb 2026 12:02:02 -0600 Subject: [PATCH 06/14] refactor: extract TaskHierarchyIndex from TaskService MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Extract all task hierarchy/indexing query methods from taskService.ts into a new taskHierarchyIndex.ts module. These are pure functions operating on config data, making them the cleanest extraction target with zero circular dependency risk. Extracted functions: - buildAgentTaskIndex, listAgentTaskWorkspaces (index building) - listDescendantAgentTaskIdsFromIndex, isDescendantAgentTaskUsingParentById, listAncestorWorkspaceIdsUsingParentById (tree traversal) - getTaskDepth, getTaskDepthFromParentById (depth helpers) - countActiveAgentTasks, hasActiveDescendantAgentTasks, listActiveDescendantAgentTaskIds, listDescendantAgentTasks (active queries) - getAgentTaskStatus (status query) TaskService's public API remains unchanged — methods now delegate to the standalone functions. countActiveAgentTasks takes isForegroundAwaiting and isStreaming callbacks to avoid service-level dependencies. --- src/node/services/taskHierarchyIndex.ts | 322 ++++++++++++++++++++ src/node/services/taskService.ts | 378 ++++-------------------- 2 files changed, 384 insertions(+), 316 deletions(-) create mode 100644 src/node/services/taskHierarchyIndex.ts diff --git a/src/node/services/taskHierarchyIndex.ts b/src/node/services/taskHierarchyIndex.ts new file mode 100644 index 0000000000..c5a2287366 --- /dev/null +++ b/src/node/services/taskHierarchyIndex.ts @@ -0,0 +1,322 @@ +import assert from "node:assert/strict"; + +import type { Workspace as WorkspaceConfigEntry } from "@/node/config"; +import type { ProjectsConfig } from "@/common/types/project"; +import type { AgentTaskStatus, DescendantAgentTaskInfo } from "@/node/services/taskService"; +import { findWorkspaceEntry } from "@/node/services/taskUtils"; + +// --------------------------------------------------------------------------- +// Types +// --------------------------------------------------------------------------- + +export type AgentTaskWorkspaceEntry = WorkspaceConfigEntry & { projectPath: string }; + +export interface AgentTaskIndex { + byId: Map; + childrenByParent: Map; + parentById: Map; +} + +// --------------------------------------------------------------------------- +// Pure index-building helpers +// --------------------------------------------------------------------------- + +export function listAgentTaskWorkspaces(config: ProjectsConfig): AgentTaskWorkspaceEntry[] { + const tasks: AgentTaskWorkspaceEntry[] = []; + for (const [projectPath, project] of config.projects) { + for (const workspace of project.workspaces) { + if (!workspace.id) continue; + if (!workspace.parentWorkspaceId) continue; + tasks.push({ ...workspace, projectPath }); + } + } + return tasks; +} + +export function buildAgentTaskIndex(config: ProjectsConfig): AgentTaskIndex { + const byId = new Map(); + const childrenByParent = new Map(); + const parentById = new Map(); + + for (const task of listAgentTaskWorkspaces(config)) { + const taskId = task.id!; + byId.set(taskId, task); + + const parent = task.parentWorkspaceId; + if (!parent) continue; + + parentById.set(taskId, parent); + const list = childrenByParent.get(parent) ?? []; + list.push(taskId); + childrenByParent.set(parent, list); + } + + return { byId, childrenByParent, parentById }; +} + +// --------------------------------------------------------------------------- +// Tree-traversal queries +// --------------------------------------------------------------------------- + +export function listDescendantAgentTaskIdsFromIndex( + index: AgentTaskIndex, + workspaceId: string +): string[] { + assert( + workspaceId.length > 0, + "listDescendantAgentTaskIdsFromIndex: workspaceId must be non-empty" + ); + + const result: string[] = []; + const stack: string[] = [...(index.childrenByParent.get(workspaceId) ?? [])]; + while (stack.length > 0) { + const next = stack.pop()!; + result.push(next); + const children = index.childrenByParent.get(next); + if (children) { + for (const child of children) { + stack.push(child); + } + } + } + return result; +} + +export function isDescendantAgentTaskUsingParentById( + parentById: Map, + ancestorWorkspaceId: string, + taskId: string +): boolean { + let current = taskId; + for (let i = 0; i < 32; i++) { + const parent = parentById.get(current); + if (!parent) return false; + if (parent === ancestorWorkspaceId) return true; + current = parent; + } + + throw new Error( + `isDescendantAgentTaskUsingParentById: possible parentWorkspaceId cycle starting at ${taskId}` + ); +} + +export function listAncestorWorkspaceIdsUsingParentById( + parentById: Map, + taskId: string +): string[] { + const ancestors: string[] = []; + + let current = taskId; + for (let i = 0; i < 32; i++) { + const parent = parentById.get(current); + if (!parent) return ancestors; + ancestors.push(parent); + current = parent; + } + + throw new Error( + `listAncestorWorkspaceIdsUsingParentById: possible parentWorkspaceId cycle starting at ${taskId}` + ); +} + +// --------------------------------------------------------------------------- +// Depth helpers +// --------------------------------------------------------------------------- + +export function getTaskDepthFromParentById( + parentById: Map, + workspaceId: string +): number { + let depth = 0; + let current = workspaceId; + for (let i = 0; i < 32; i++) { + const parent = parentById.get(current); + if (!parent) break; + depth += 1; + current = parent; + } + + if (depth >= 32) { + throw new Error( + `getTaskDepthFromParentById: possible parentWorkspaceId cycle starting at ${workspaceId}` + ); + } + + return depth; +} + +export function getTaskDepth(config: ProjectsConfig, workspaceId: string): number { + assert(workspaceId.length > 0, "getTaskDepth: workspaceId must be non-empty"); + + return getTaskDepthFromParentById(buildAgentTaskIndex(config).parentById, workspaceId); +} + +// --------------------------------------------------------------------------- +// Active-task queries +// --------------------------------------------------------------------------- + +/** + * Count currently active agent tasks. + * + * @param isForegroundAwaiting - returns true when a task workspace is blocked in a foreground wait + * @param isStreaming - returns true when a task workspace is still streaming (defensive check) + */ +export function countActiveAgentTasks( + config: ProjectsConfig, + isForegroundAwaiting: (workspaceId: string) => boolean, + isStreaming: (workspaceId: string) => boolean +): number { + let activeCount = 0; + for (const task of listAgentTaskWorkspaces(config)) { + const status: AgentTaskStatus = task.taskStatus ?? "running"; + // If this task workspace is blocked in a foreground wait, do not count it towards parallelism. + // This prevents deadlocks where a task spawns a nested task in the foreground while + // maxParallelAgentTasks is low (e.g. 1). + // Note: StreamManager can still report isStreaming() while a tool call is executing, so + // isStreaming is not a reliable signal for "actively doing work" here. + if (status === "running" && task.id && isForegroundAwaiting(task.id)) { + continue; + } + if (status === "running" || status === "awaiting_report") { + activeCount += 1; + continue; + } + + // Defensive: a task may still be streaming even after it transitioned to another status + // (e.g. tool-call-end happened but the stream hasn't ended yet). Count it as active so we + // never exceed the configured parallel limit. + if (task.id && isStreaming(task.id)) { + activeCount += 1; + } + } + + return activeCount; +} + +export function hasActiveDescendantAgentTasks( + config: ProjectsConfig, + workspaceId: string +): boolean { + assert(workspaceId.length > 0, "hasActiveDescendantAgentTasks: workspaceId must be non-empty"); + + const index = buildAgentTaskIndex(config); + + const activeStatuses = new Set(["queued", "running", "awaiting_report"]); + const stack: string[] = [...(index.childrenByParent.get(workspaceId) ?? [])]; + while (stack.length > 0) { + const next = stack.pop()!; + const status = index.byId.get(next)?.taskStatus; + if (status && activeStatuses.has(status)) { + return true; + } + const children = index.childrenByParent.get(next); + if (children) { + for (const child of children) { + stack.push(child); + } + } + } + + return false; +} + +export function listActiveDescendantAgentTaskIds( + config: ProjectsConfig, + workspaceId: string +): string[] { + assert(workspaceId.length > 0, "listActiveDescendantAgentTaskIds: workspaceId must be non-empty"); + + const index = buildAgentTaskIndex(config); + + const activeStatuses = new Set(["queued", "running", "awaiting_report"]); + const result: string[] = []; + const stack: string[] = [...(index.childrenByParent.get(workspaceId) ?? [])]; + while (stack.length > 0) { + const next = stack.pop()!; + const status = index.byId.get(next)?.taskStatus; + if (status && activeStatuses.has(status)) { + result.push(next); + } + const children = index.childrenByParent.get(next); + if (children) { + for (const child of children) { + stack.push(child); + } + } + } + return result; +} + +export function listDescendantAgentTasks( + config: ProjectsConfig, + workspaceId: string, + options?: { statuses?: AgentTaskStatus[] } +): DescendantAgentTaskInfo[] { + assert(workspaceId.length > 0, "listDescendantAgentTasks: workspaceId must be non-empty"); + + const statuses = options?.statuses; + const statusFilter = statuses && statuses.length > 0 ? new Set(statuses) : null; + + const index = buildAgentTaskIndex(config); + + const result: DescendantAgentTaskInfo[] = []; + + const stack: Array<{ taskId: string; depth: number }> = []; + for (const childTaskId of index.childrenByParent.get(workspaceId) ?? []) { + stack.push({ taskId: childTaskId, depth: 1 }); + } + + while (stack.length > 0) { + const next = stack.pop()!; + const entry = index.byId.get(next.taskId); + if (!entry) continue; + + assert( + entry.parentWorkspaceId, + `listDescendantAgentTasks: task ${next.taskId} is missing parentWorkspaceId` + ); + + const status: AgentTaskStatus = entry.taskStatus ?? "running"; + if (!statusFilter || statusFilter.has(status)) { + result.push({ + taskId: next.taskId, + status, + parentWorkspaceId: entry.parentWorkspaceId, + agentType: entry.agentType, + workspaceName: entry.name, + title: entry.title, + createdAt: entry.createdAt, + modelString: entry.aiSettings?.model, + thinkingLevel: entry.aiSettings?.thinkingLevel, + depth: next.depth, + }); + } + + for (const childTaskId of index.childrenByParent.get(next.taskId) ?? []) { + stack.push({ taskId: childTaskId, depth: next.depth + 1 }); + } + } + + // Stable ordering: oldest first, then depth (ties by taskId for determinism). + result.sort((a, b) => { + const aTime = a.createdAt ? Date.parse(a.createdAt) : 0; + const bTime = b.createdAt ? Date.parse(b.createdAt) : 0; + if (aTime !== bTime) return aTime - bTime; + if (a.depth !== b.depth) return a.depth - b.depth; + return a.taskId.localeCompare(b.taskId); + }); + + return result; +} + +// --------------------------------------------------------------------------- +// Status query +// --------------------------------------------------------------------------- + +export function getAgentTaskStatus(config: ProjectsConfig, taskId: string): AgentTaskStatus | null { + assert(taskId.length > 0, "getAgentTaskStatus: taskId must be non-empty"); + + const entry = findWorkspaceEntry(config, taskId); + const status = entry?.workspace.taskStatus; + return status ?? null; +} diff --git a/src/node/services/taskService.ts b/src/node/services/taskService.ts index b2476296ff..8337bff8c2 100644 --- a/src/node/services/taskService.ts +++ b/src/node/services/taskService.ts @@ -27,6 +27,20 @@ import { tryReadGitHeadCommitSha, findWorkspaceEntry, } from "@/node/services/taskUtils"; +import { + buildAgentTaskIndex, + listAgentTaskWorkspaces, + countActiveAgentTasks, + hasActiveDescendantAgentTasks, + listActiveDescendantAgentTaskIds as listActiveDescendantAgentTaskIdsFn, + listDescendantAgentTasks as listDescendantAgentTasksFn, + listDescendantAgentTaskIdsFromIndex, + isDescendantAgentTaskUsingParentById, + listAncestorWorkspaceIdsUsingParentById, + getTaskDepth, + getTaskDepthFromParentById, + getAgentTaskStatus as getAgentTaskStatusFn, +} from "@/node/services/taskHierarchyIndex"; import { validateWorkspaceName } from "@/common/utils/validation/workspaceValidation"; import { Ok, Err, type Result } from "@/common/types/result"; import type { TaskSettings } from "@/common/types/tasks"; @@ -107,19 +121,11 @@ export interface DescendantAgentTaskInfo { depth: number; } -type AgentTaskWorkspaceEntry = WorkspaceConfigEntry & { projectPath: string }; - const COMPLETED_REPORT_CACHE_MAX_ENTRIES = 128; /** Maximum consecutive auto-resumes before stopping. Prevents infinite loops when descendants are stuck. */ const MAX_CONSECUTIVE_PARENT_AUTO_RESUMES = 3; -interface AgentTaskIndex { - byId: Map; - childrenByParent: Map; - parentById: Map; -} - interface PendingTaskWaiter { createdAt: number; resolve: (report: { reportMarkdown: string; title?: string }) => void; @@ -323,18 +329,16 @@ export class TaskService { await this.maybeStartQueuedTasks(); const config = this.config.loadConfigOrDefault(); - const awaitingReportTasks = this.listAgentTaskWorkspaces(config).filter( + const awaitingReportTasks = listAgentTaskWorkspaces(config).filter( (t) => t.taskStatus === "awaiting_report" ); - const runningTasks = this.listAgentTaskWorkspaces(config).filter( - (t) => t.taskStatus === "running" - ); + const runningTasks = listAgentTaskWorkspaces(config).filter((t) => t.taskStatus === "running"); for (const task of awaitingReportTasks) { if (!task.id) continue; // Avoid resuming a task while it still has active descendants (it shouldn't report yet). - const hasActiveDescendants = this.hasActiveDescendantAgentTasks(config, task.id); + const hasActiveDescendants = hasActiveDescendantAgentTasks(config, task.id); if (hasActiveDescendants) { continue; } @@ -371,7 +375,7 @@ export class TaskService { if (!task.id) continue; // Best-effort: if mux restarted mid-stream, nudge the agent to continue and report. // Only do this when the task has no running descendants, to avoid duplicate spawns. - const hasActiveDescendants = this.hasActiveDescendantAgentTasks(config, task.id); + const hasActiveDescendants = hasActiveDescendantAgentTasks(config, task.id); if (hasActiveDescendants) { continue; } @@ -394,7 +398,7 @@ export class TaskService { // Restart-safety for git patch artifacts: // - If mux crashed mid-generation, patch artifacts can be left "pending". // - Reported tasks are auto-deleted once they're leaves; defer deletion while patches are pending. - const reportedTasks = this.listAgentTaskWorkspaces(config).filter( + const reportedTasks = listAgentTaskWorkspaces(config).filter( (t) => t.taskStatus === "reported" && typeof t.id === "string" && t.id.length > 0 ); @@ -482,7 +486,7 @@ export class TaskService { return Err("Task.create: cannot spawn new tasks after agent_report"); } - const requestedDepth = this.getTaskDepth(cfg, parentWorkspaceId) + 1; + const requestedDepth = getTaskDepth(cfg, parentWorkspaceId) + 1; if (requestedDepth > taskSettings.maxTaskNestingDepth) { return Err( `Task.create: maxTaskNestingDepth exceeded (requestedDepth=${requestedDepth}, max=${taskSettings.maxTaskNestingDepth})` @@ -490,7 +494,11 @@ export class TaskService { } // Enforce parallelism (global). - const activeCount = this.countActiveAgentTasks(cfg); + const activeCount = countActiveAgentTasks( + cfg, + (id) => this.isForegroundAwaiting(id), + (id) => this.aiService.isStreaming(id) + ); const shouldQueue = activeCount >= taskSettings.maxParallelAgentTasks; const taskId = this.config.generateStableId(); @@ -889,22 +897,20 @@ export class TaskService { return Err("Task not found"); } - const index = this.buildAgentTaskIndex(cfg); - if ( - !this.isDescendantAgentTaskUsingParentById(index.parentById, ancestorWorkspaceId, taskId) - ) { + const index = buildAgentTaskIndex(cfg); + if (!isDescendantAgentTaskUsingParentById(index.parentById, ancestorWorkspaceId, taskId)) { return Err("Task is not a descendant of this workspace"); } // Terminate the entire subtree to avoid orphaned descendant tasks. - const descendants = this.listDescendantAgentTaskIdsFromIndex(index, taskId); + const descendants = listDescendantAgentTaskIdsFromIndex(index, taskId); const toTerminate = Array.from(new Set([taskId, ...descendants])); // Delete leaves first to avoid leaving children with missing parents. const parentById = index.parentById; const depthById = new Map(); for (const id of toTerminate) { - depthById.set(id, this.getTaskDepthFromParentById(parentById, id)); + depthById.set(id, getTaskDepthFromParentById(parentById, id)); } toTerminate.sort((a, b) => (depthById.get(b) ?? 0) - (depthById.get(a) ?? 0)); @@ -1225,112 +1231,22 @@ export class TaskService { } getAgentTaskStatus(taskId: string): AgentTaskStatus | null { - assert(taskId.length > 0, "getAgentTaskStatus: taskId must be non-empty"); - - const cfg = this.config.loadConfigOrDefault(); - const entry = findWorkspaceEntry(cfg, taskId); - const status = entry?.workspace.taskStatus; - return status ?? null; + return getAgentTaskStatusFn(this.config.loadConfigOrDefault(), taskId); } hasActiveDescendantAgentTasksForWorkspace(workspaceId: string): boolean { - assert( - workspaceId.length > 0, - "hasActiveDescendantAgentTasksForWorkspace: workspaceId must be non-empty" - ); - - const cfg = this.config.loadConfigOrDefault(); - return this.hasActiveDescendantAgentTasks(cfg, workspaceId); + return hasActiveDescendantAgentTasks(this.config.loadConfigOrDefault(), workspaceId); } listActiveDescendantAgentTaskIds(workspaceId: string): string[] { - assert( - workspaceId.length > 0, - "listActiveDescendantAgentTaskIds: workspaceId must be non-empty" - ); - - const cfg = this.config.loadConfigOrDefault(); - const index = this.buildAgentTaskIndex(cfg); - - const activeStatuses = new Set(["queued", "running", "awaiting_report"]); - const result: string[] = []; - const stack: string[] = [...(index.childrenByParent.get(workspaceId) ?? [])]; - while (stack.length > 0) { - const next = stack.pop()!; - const status = index.byId.get(next)?.taskStatus; - if (status && activeStatuses.has(status)) { - result.push(next); - } - const children = index.childrenByParent.get(next); - if (children) { - for (const child of children) { - stack.push(child); - } - } - } - return result; + return listActiveDescendantAgentTaskIdsFn(this.config.loadConfigOrDefault(), workspaceId); } listDescendantAgentTasks( workspaceId: string, options?: { statuses?: AgentTaskStatus[] } ): DescendantAgentTaskInfo[] { - assert(workspaceId.length > 0, "listDescendantAgentTasks: workspaceId must be non-empty"); - - const statuses = options?.statuses; - const statusFilter = statuses && statuses.length > 0 ? new Set(statuses) : null; - - const cfg = this.config.loadConfigOrDefault(); - const index = this.buildAgentTaskIndex(cfg); - - const result: DescendantAgentTaskInfo[] = []; - - const stack: Array<{ taskId: string; depth: number }> = []; - for (const childTaskId of index.childrenByParent.get(workspaceId) ?? []) { - stack.push({ taskId: childTaskId, depth: 1 }); - } - - while (stack.length > 0) { - const next = stack.pop()!; - const entry = index.byId.get(next.taskId); - if (!entry) continue; - - assert( - entry.parentWorkspaceId, - `listDescendantAgentTasks: task ${next.taskId} is missing parentWorkspaceId` - ); - - const status: AgentTaskStatus = entry.taskStatus ?? "running"; - if (!statusFilter || statusFilter.has(status)) { - result.push({ - taskId: next.taskId, - status, - parentWorkspaceId: entry.parentWorkspaceId, - agentType: entry.agentType, - workspaceName: entry.name, - title: entry.title, - createdAt: entry.createdAt, - modelString: entry.aiSettings?.model, - thinkingLevel: entry.aiSettings?.thinkingLevel, - depth: next.depth, - }); - } - - for (const childTaskId of index.childrenByParent.get(next.taskId) ?? []) { - stack.push({ taskId: childTaskId, depth: next.depth + 1 }); - } - } - - // Stable ordering: oldest first, then depth (ties by taskId for determinism). - result.sort((a, b) => { - const aTime = a.createdAt ? Date.parse(a.createdAt) : 0; - const bTime = b.createdAt ? Date.parse(b.createdAt) : 0; - if (aTime !== bTime) return aTime - bTime; - if (a.depth !== b.depth) return a.depth - b.depth; - return a.taskId.localeCompare(b.taskId); - }); - - return result; + return listDescendantAgentTasksFn(this.config.loadConfigOrDefault(), workspaceId, options); } async filterDescendantAgentTaskIds( @@ -1344,7 +1260,7 @@ export class TaskService { assert(Array.isArray(taskIds), "filterDescendantAgentTaskIds: taskIds must be an array"); const cfg = this.config.loadConfigOrDefault(); - const parentById = this.buildAgentTaskIndex(cfg).parentById; + const parentById = buildAgentTaskIndex(cfg).parentById; const result: string[] = []; const maybePersisted: string[] = []; @@ -1352,7 +1268,7 @@ export class TaskService { for (const taskId of taskIds) { if (typeof taskId !== "string" || taskId.length === 0) continue; - if (this.isDescendantAgentTaskUsingParentById(parentById, ancestorWorkspaceId, taskId)) { + if (isDescendantAgentTaskUsingParentById(parentById, ancestorWorkspaceId, taskId)) { result.push(taskId); continue; } @@ -1383,37 +1299,13 @@ export class TaskService { return result; } - private listDescendantAgentTaskIdsFromIndex( - index: AgentTaskIndex, - workspaceId: string - ): string[] { - assert( - workspaceId.length > 0, - "listDescendantAgentTaskIdsFromIndex: workspaceId must be non-empty" - ); - - const result: string[] = []; - const stack: string[] = [...(index.childrenByParent.get(workspaceId) ?? [])]; - while (stack.length > 0) { - const next = stack.pop()!; - result.push(next); - const children = index.childrenByParent.get(next); - if (children) { - for (const child of children) { - stack.push(child); - } - } - } - return result; - } - async isDescendantAgentTask(ancestorWorkspaceId: string, taskId: string): Promise { assert(ancestorWorkspaceId.length > 0, "isDescendantAgentTask: ancestorWorkspaceId required"); assert(taskId.length > 0, "isDescendantAgentTask: taskId required"); const cfg = this.config.loadConfigOrDefault(); - const parentById = this.buildAgentTaskIndex(cfg).parentById; - if (this.isDescendantAgentTaskUsingParentById(parentById, ancestorWorkspaceId, taskId)) { + const parentById = buildAgentTaskIndex(cfg).parentById; + if (isDescendantAgentTaskUsingParentById(parentById, ancestorWorkspaceId, taskId)) { return true; } @@ -1430,173 +1322,19 @@ export class TaskService { return hasAncestorWorkspaceId(entry, ancestorWorkspaceId); } - private isDescendantAgentTaskUsingParentById( - parentById: Map, - ancestorWorkspaceId: string, - taskId: string - ): boolean { - let current = taskId; - for (let i = 0; i < 32; i++) { - const parent = parentById.get(current); - if (!parent) return false; - if (parent === ancestorWorkspaceId) return true; - current = parent; - } - - throw new Error( - `isDescendantAgentTaskUsingParentById: possible parentWorkspaceId cycle starting at ${taskId}` - ); - } - // --- Internal orchestration --- - private listAncestorWorkspaceIdsUsingParentById( - parentById: Map, - taskId: string - ): string[] { - const ancestors: string[] = []; - - let current = taskId; - for (let i = 0; i < 32; i++) { - const parent = parentById.get(current); - if (!parent) return ancestors; - ancestors.push(parent); - current = parent; - } - - throw new Error( - `listAncestorWorkspaceIdsUsingParentById: possible parentWorkspaceId cycle starting at ${taskId}` - ); - } - - private listAgentTaskWorkspaces( - config: ReturnType - ): AgentTaskWorkspaceEntry[] { - const tasks: AgentTaskWorkspaceEntry[] = []; - for (const [projectPath, project] of config.projects) { - for (const workspace of project.workspaces) { - if (!workspace.id) continue; - if (!workspace.parentWorkspaceId) continue; - tasks.push({ ...workspace, projectPath }); - } - } - return tasks; - } - - private buildAgentTaskIndex(config: ReturnType): AgentTaskIndex { - const byId = new Map(); - const childrenByParent = new Map(); - const parentById = new Map(); - - for (const task of this.listAgentTaskWorkspaces(config)) { - const taskId = task.id!; - byId.set(taskId, task); - - const parent = task.parentWorkspaceId; - if (!parent) continue; - - parentById.set(taskId, parent); - const list = childrenByParent.get(parent) ?? []; - list.push(taskId); - childrenByParent.set(parent, list); - } - - return { byId, childrenByParent, parentById }; - } - - private countActiveAgentTasks(config: ReturnType): number { - let activeCount = 0; - for (const task of this.listAgentTaskWorkspaces(config)) { - const status: AgentTaskStatus = task.taskStatus ?? "running"; - // If this task workspace is blocked in a foreground wait, do not count it towards parallelism. - // This prevents deadlocks where a task spawns a nested task in the foreground while - // maxParallelAgentTasks is low (e.g. 1). - // Note: StreamManager can still report isStreaming() while a tool call is executing, so - // isStreaming is not a reliable signal for "actively doing work" here. - if (status === "running" && task.id && this.isForegroundAwaiting(task.id)) { - continue; - } - if (status === "running" || status === "awaiting_report") { - activeCount += 1; - continue; - } - - // Defensive: a task may still be streaming even after it transitioned to another status - // (e.g. tool-call-end happened but the stream hasn't ended yet). Count it as active so we - // never exceed the configured parallel limit. - if (task.id && this.aiService.isStreaming(task.id)) { - activeCount += 1; - } - } - - return activeCount; - } - - private hasActiveDescendantAgentTasks( - config: ReturnType, - workspaceId: string - ): boolean { - assert(workspaceId.length > 0, "hasActiveDescendantAgentTasks: workspaceId must be non-empty"); - - const index = this.buildAgentTaskIndex(config); - - const activeStatuses = new Set(["queued", "running", "awaiting_report"]); - const stack: string[] = [...(index.childrenByParent.get(workspaceId) ?? [])]; - while (stack.length > 0) { - const next = stack.pop()!; - const status = index.byId.get(next)?.taskStatus; - if (status && activeStatuses.has(status)) { - return true; - } - const children = index.childrenByParent.get(next); - if (children) { - for (const child of children) { - stack.push(child); - } - } - } - - return false; - } - - private getTaskDepth( - config: ReturnType, - workspaceId: string - ): number { - assert(workspaceId.length > 0, "getTaskDepth: workspaceId must be non-empty"); - - return this.getTaskDepthFromParentById( - this.buildAgentTaskIndex(config).parentById, - workspaceId - ); - } - - private getTaskDepthFromParentById(parentById: Map, workspaceId: string): number { - let depth = 0; - let current = workspaceId; - for (let i = 0; i < 32; i++) { - const parent = parentById.get(current); - if (!parent) break; - depth += 1; - current = parent; - } - - if (depth >= 32) { - throw new Error( - `getTaskDepthFromParentById: possible parentWorkspaceId cycle starting at ${workspaceId}` - ); - } - - return depth; - } - async maybeStartQueuedTasks(): Promise { await using _lock = await this.mutex.acquire(); const configAtStart = this.config.loadConfigOrDefault(); const taskSettingsAtStart: TaskSettings = configAtStart.taskSettings ?? DEFAULT_TASK_SETTINGS; - const activeCount = this.countActiveAgentTasks(configAtStart); + const activeCount = countActiveAgentTasks( + configAtStart, + (id) => this.isForegroundAwaiting(id), + (id) => this.aiService.isStreaming(id) + ); const availableSlots = Math.max(0, taskSettingsAtStart.maxParallelAgentTasks - activeCount); taskQueueDebug("TaskService.maybeStartQueuedTasks summary", { activeCount, @@ -1605,7 +1343,7 @@ export class TaskService { }); if (availableSlots === 0) return; - const queuedTaskIds = this.listAgentTaskWorkspaces(configAtStart) + const queuedTaskIds = listAgentTaskWorkspaces(configAtStart) .filter((t) => t.taskStatus === "queued" && typeof t.id === "string") .sort((a, b) => { const aTime = a.createdAt ? Date.parse(a.createdAt) : 0; @@ -1628,7 +1366,11 @@ export class TaskService { "TaskService.maybeStartQueuedTasks: maxParallelAgentTasks must be a positive number" ); - const activeCount = this.countActiveAgentTasks(config); + const activeCount = countActiveAgentTasks( + config, + (id) => this.isForegroundAwaiting(id), + (id) => this.aiService.isStreaming(id) + ); if (activeCount >= taskSettings.maxParallelAgentTasks) { break; } @@ -1707,7 +1449,11 @@ export class TaskService { // another task became active in the meantime. const latestConfig = this.config.loadConfigOrDefault(); const latestTaskSettings: TaskSettings = latestConfig.taskSettings ?? DEFAULT_TASK_SETTINGS; - const latestActiveCount = this.countActiveAgentTasks(latestConfig); + const latestActiveCount = countActiveAgentTasks( + latestConfig, + (id) => this.isForegroundAwaiting(id), + (id) => this.aiService.isStreaming(id) + ); if (latestActiveCount >= latestTaskSettings.maxParallelAgentTasks) { taskQueueDebug("TaskService.maybeStartQueuedTasks became full mid-loop", { taskId, @@ -2007,7 +1753,7 @@ export class TaskService { // Parent workspaces must not end while they have active background tasks. // Enforce by auto-resuming the stream with a directive to await outstanding tasks. if (!entry.workspace.parentWorkspaceId) { - const hasActiveDescendants = this.hasActiveDescendantAgentTasks(cfg, workspaceId); + const hasActiveDescendants = hasActiveDescendantAgentTasks(cfg, workspaceId); if (!hasActiveDescendants) { return; } @@ -2064,7 +1810,7 @@ export class TaskService { // Never allow a task to finish/report while it still has active descendant tasks. // We'll auto-resume this task once the last descendant reports. - const hasActiveDescendants = this.hasActiveDescendantAgentTasks(cfg, workspaceId); + const hasActiveDescendants = hasActiveDescendantAgentTasks(cfg, workspaceId); if (hasActiveDescendants) { if (status === "awaiting_report") { await this.setTaskStatus(workspaceId, "running"); @@ -2182,7 +1928,7 @@ export class TaskService { return; } - if (this.hasActiveDescendantAgentTasks(cfgBeforeReport, childWorkspaceId)) { + if (hasActiveDescendantAgentTasks(cfgBeforeReport, childWorkspaceId)) { log.error("agent_report called while task has active descendants; ignoring", { childWorkspaceId, }); @@ -2296,8 +2042,8 @@ export class TaskService { return; } - const parentById = this.buildAgentTaskIndex(cfgAfterReport).parentById; - const ancestorWorkspaceIds = this.listAncestorWorkspaceIdsUsingParentById( + const parentById = buildAgentTaskIndex(cfgAfterReport).parentById; + const ancestorWorkspaceIds = listAncestorWorkspaceIdsUsingParentById( parentById, childWorkspaceId ); @@ -2372,7 +2118,7 @@ export class TaskService { // Parent may have been cleaned up (e.g. it already reported and this was its last descendant). return; } - const hasActiveDescendants = this.hasActiveDescendantAgentTasks(postCfg, parentWorkspaceId); + const hasActiveDescendants = hasActiveDescendantAgentTasks(postCfg, parentWorkspaceId); if (!hasActiveDescendants) { this.consecutiveAutoResumes.delete(parentWorkspaceId); } @@ -2406,8 +2152,8 @@ export class TaskService { private resolveWaiters(taskId: string, report: { reportMarkdown: string; title?: string }): void { const cfg = this.config.loadConfigOrDefault(); - const parentById = this.buildAgentTaskIndex(cfg).parentById; - const ancestorWorkspaceIds = this.listAncestorWorkspaceIdsUsingParentById(parentById, taskId); + const parentById = buildAgentTaskIndex(cfg).parentById; + const ancestorWorkspaceIds = listAncestorWorkspaceIdsUsingParentById(parentById, taskId); this.completedReportsByTaskId.set(taskId, { reportMarkdown: report.reportMarkdown, @@ -2686,7 +2432,7 @@ export class TaskService { if (!parentWorkspaceId) return; if (ws.taskStatus !== "reported") return; - const hasChildren = this.listAgentTaskWorkspaces(config).some( + const hasChildren = listAgentTaskWorkspaces(config).some( (t) => t.parentWorkspaceId === currentWorkspaceId ); const parentSessionDir = this.config.getSessionDir(parentWorkspaceId); From 03ee7cf0acbfb4a9cefd8d6d4184e298608e583a Mon Sep 17 00:00:00 2001 From: Ammar Date: Sun, 8 Feb 2026 12:16:10 -0600 Subject: [PATCH 07/14] refactor: extract PostCompactionService from WorkspaceService Move 7 post-compaction state management methods into dedicated PostCompactionService class: - getPostCompactionState, getPostCompactionExclusions, setPostCompactionExclusion - getPersistedPostCompactionDiffPaths, deletePlanFilesForWorkspace - schedulePostCompactionMetadataRefresh, emitPostCompactionMetadata WorkspaceService delegates to the new service via composition. ~230 lines moved to focused module. --- src/node/services/postCompactionService.ts | 325 +++++++++++++++++++++ src/node/services/workspaceService.ts | 292 ++---------------- 2 files changed, 344 insertions(+), 273 deletions(-) create mode 100644 src/node/services/postCompactionService.ts diff --git a/src/node/services/postCompactionService.ts b/src/node/services/postCompactionService.ts new file mode 100644 index 0000000000..c115d85287 --- /dev/null +++ b/src/node/services/postCompactionService.ts @@ -0,0 +1,325 @@ +import assert from "assert"; +import * as path from "path"; +import fsPromises from "fs/promises"; +import type { Result } from "@/common/types/result"; +import { Ok, Err } from "@/common/types/result"; +import { createRuntimeForWorkspace } from "@/node/runtime/runtimeHelpers"; +import { getPlanFilePath, getLegacyPlanFilePath } from "@/common/utils/planStorage"; +import { shellQuote } from "@/node/runtime/backgroundCommands"; +import { extractEditedFilePaths } from "@/common/utils/messages/extractEditedFiles"; +import { fileExists } from "@/node/utils/runtime/fileExists"; +import { expandTilde, expandTildeForSSH } from "@/node/runtime/tildeExpansion"; +import { isSSHRuntime, isDockerRuntime } from "@/common/types/runtime"; +import type { PostCompactionExclusions } from "@/common/types/attachment"; +import type { FrontendWorkspaceMetadata } from "@/common/types/workspace"; +import type { Config } from "@/node/config"; +import type { HistoryService } from "@/node/services/historyService"; +import type { AgentSession } from "@/node/services/agentSession"; +import { getErrorMessage } from "@/common/utils/errors"; +import { log } from "@/node/services/log"; + +const POST_COMPACTION_METADATA_REFRESH_DEBOUNCE_MS = 100; + +/** + * Manages post-compaction state: plan files, tracked file paths, exclusions, + * and debounced metadata emission. + */ +export class PostCompactionService { + private readonly postCompactionRefreshTimers = new Map>(); + + constructor( + private readonly config: Config, + private readonly historyService: HistoryService, + private readonly sessions: Map, + private readonly getInfo: ( + workspaceId: string + ) => Promise + ) {} + + schedulePostCompactionMetadataRefresh(workspaceId: string): void { + assert(typeof workspaceId === "string", "workspaceId must be a string"); + const trimmed = workspaceId.trim(); + assert(trimmed.length > 0, "workspaceId must not be empty"); + + const existing = this.postCompactionRefreshTimers.get(trimmed); + if (existing) { + clearTimeout(existing); + } + + const timer = setTimeout(() => { + this.postCompactionRefreshTimers.delete(trimmed); + void this.emitPostCompactionMetadata(trimmed); + }, POST_COMPACTION_METADATA_REFRESH_DEBOUNCE_MS); + + this.postCompactionRefreshTimers.set(trimmed, timer); + } + + cancelPendingRefresh(workspaceId: string): void { + const trimmed = workspaceId.trim(); + const refreshTimer = this.postCompactionRefreshTimers.get(trimmed); + if (refreshTimer) { + clearTimeout(refreshTimer); + this.postCompactionRefreshTimers.delete(trimmed); + } + } + + private async emitPostCompactionMetadata(workspaceId: string): Promise { + try { + const session = this.sessions.get(workspaceId); + if (!session) { + return; + } + + const metadata = await this.getInfo(workspaceId); + if (!metadata) { + return; + } + + const postCompaction = await this.getPostCompactionState(workspaceId); + const enrichedMetadata = { ...metadata, postCompaction }; + session.emitMetadata(enrichedMetadata); + } catch (error) { + // Workspace runtime unavailable (e.g., SSH unreachable) - skip emitting post-compaction state. + log.debug("Failed to emit post-compaction metadata", { workspaceId, error }); + } + } + + private async getPersistedPostCompactionDiffPaths(workspaceId: string): Promise { + const postCompactionPath = path.join( + this.config.getSessionDir(workspaceId), + "post-compaction.json" + ); + + try { + const raw = await fsPromises.readFile(postCompactionPath, "utf-8"); + const parsed: unknown = JSON.parse(raw); + const diffsRaw = (parsed as { diffs?: unknown }).diffs; + if (!Array.isArray(diffsRaw)) { + return null; + } + + const result: string[] = []; + for (const diff of diffsRaw) { + if (!diff || typeof diff !== "object") continue; + const p = (diff as { path?: unknown }).path; + if (typeof p !== "string") continue; + const trimmed = p.trim(); + if (trimmed.length === 0) continue; + result.push(trimmed); + } + + return result; + } catch { + return null; + } + } + + /** + * Get post-compaction context state for a workspace. + * Returns info about what will be injected after compaction. + * Prefers cached paths from pending compaction, falls back to history extraction. + */ + async getPostCompactionState(workspaceId: string): Promise<{ + planPath: string | null; + trackedFilePaths: string[]; + excludedItems: string[]; + }> { + // Get workspace metadata to create runtime for plan file check + const metadata = await this.getInfo(workspaceId); + if (!metadata) { + // Can't get metadata, return empty state + const exclusions = await this.getPostCompactionExclusions(workspaceId); + return { planPath: null, trackedFilePaths: [], excludedItems: exclusions.excludedItems }; + } + + const runtime = createRuntimeForWorkspace(metadata); + const muxHome = runtime.getMuxHome(); + const planPath = getPlanFilePath(metadata.name, metadata.projectName, muxHome); + // For local/SSH: expand tilde for comparison with message history paths + // For Docker: paths are already absolute (/var/mux/...), no expansion needed + const expandedPlanPath = muxHome.startsWith("~") ? expandTilde(planPath) : planPath; + // Legacy plan path (stored by workspace ID) for filtering + const legacyPlanPath = getLegacyPlanFilePath(workspaceId); + const expandedLegacyPlanPath = expandTilde(legacyPlanPath); + + // Check both new and legacy plan paths, prefer new path + const newPlanExists = await fileExists(runtime, planPath); + const legacyPlanExists = !newPlanExists && (await fileExists(runtime, legacyPlanPath)); + // Resolve plan path via runtime to get correct absolute path for deep links. + // Local: expands ~ to local home. SSH: expands ~ on remote host. + const activePlanPath = newPlanExists + ? await runtime.resolvePath(planPath) + : legacyPlanExists + ? await runtime.resolvePath(legacyPlanPath) + : null; + + // Load exclusions + const exclusions = await this.getPostCompactionExclusions(workspaceId); + + // Helper to check if a path is a plan file (new or legacy format) + const isPlanPath = (p: string) => + p === planPath || + p === expandedPlanPath || + p === legacyPlanPath || + p === expandedLegacyPlanPath; + + // If session has pending compaction attachments, use cached paths + // (history is cleared after compaction, but cache survives) + const session = this.sessions.get(workspaceId); + const pendingPaths = session?.getPendingTrackedFilePaths(); + if (pendingPaths) { + // Filter out both new and legacy plan file paths + const trackedFilePaths = pendingPaths.filter((p) => !isPlanPath(p)); + return { + planPath: activePlanPath, + trackedFilePaths, + excludedItems: exclusions.excludedItems, + }; + } + + // Fallback (crash-safe): if a post-compaction snapshot exists on disk, use it. + const persistedPaths = await this.getPersistedPostCompactionDiffPaths(workspaceId); + if (persistedPaths !== null) { + const trackedFilePaths = persistedPaths.filter((p) => !isPlanPath(p)); + return { + planPath: activePlanPath, + trackedFilePaths, + excludedItems: exclusions.excludedItems, + }; + } + + // Fallback: compute tracked files from message history (survives reloads) + const historyResult = await this.historyService.getHistory(workspaceId); + const messages = historyResult.success ? historyResult.data : []; + const allPaths = extractEditedFilePaths(messages); + + // Exclude plan file from tracked files since it has its own section + // Filter out both new and legacy plan file paths + const trackedFilePaths = allPaths.filter((p) => !isPlanPath(p)); + return { + planPath: activePlanPath, + trackedFilePaths, + excludedItems: exclusions.excludedItems, + }; + } + + /** + * Get post-compaction exclusions for a workspace. + * Returns empty exclusions if file doesn't exist. + */ + async getPostCompactionExclusions(workspaceId: string): Promise { + const exclusionsPath = path.join(this.config.getSessionDir(workspaceId), "exclusions.json"); + try { + const data = await fsPromises.readFile(exclusionsPath, "utf-8"); + return JSON.parse(data) as PostCompactionExclusions; + } catch { + return { excludedItems: [] }; + } + } + + /** + * Set whether an item is excluded from post-compaction context. + * Item IDs: "plan" for plan file, "file:" for tracked files. + */ + async setPostCompactionExclusion( + workspaceId: string, + itemId: string, + excluded: boolean + ): Promise> { + try { + const exclusions = await this.getPostCompactionExclusions(workspaceId); + const set = new Set(exclusions.excludedItems); + + if (excluded) { + set.add(itemId); + } else { + set.delete(itemId); + } + + const sessionDir = this.config.getSessionDir(workspaceId); + await fsPromises.mkdir(sessionDir, { recursive: true }); + const exclusionsPath = path.join(sessionDir, "exclusions.json"); + await fsPromises.writeFile( + exclusionsPath, + JSON.stringify({ excludedItems: [...set] }, null, 2) + ); + return Ok(undefined); + } catch (error) { + const message = getErrorMessage(error); + return Err(`Failed to set exclusion: ${message}`); + } + } + + /** + * Delete plan files for a workspace (both new and legacy paths). + */ + async deletePlanFilesForWorkspace( + workspaceId: string, + metadata: FrontendWorkspaceMetadata + ): Promise { + // Create runtime to get correct muxHome (Docker uses /var/mux, others use ~/.mux) + const runtime = createRuntimeForWorkspace(metadata); + const muxHome = runtime.getMuxHome(); + const planPath = getPlanFilePath(metadata.name, metadata.projectName, muxHome); + const legacyPlanPath = getLegacyPlanFilePath(workspaceId); + + const isDocker = isDockerRuntime(metadata.runtimeConfig); + const isSSH = isSSHRuntime(metadata.runtimeConfig); + + // For Docker: paths are already absolute (/var/mux/...), just quote + // For SSH: use $HOME expansion so the runtime shell resolves to the runtime home directory + // For local: expand tilde locally since shellQuote prevents shell expansion + const quotedPlanPath = isDocker + ? shellQuote(planPath) + : isSSH + ? expandTildeForSSH(planPath) + : shellQuote(expandTilde(planPath)); + // For legacy path: SSH/Docker use $HOME expansion, local expands tilde + const quotedLegacyPlanPath = + isDocker || isSSH + ? expandTildeForSSH(legacyPlanPath) + : shellQuote(expandTilde(legacyPlanPath)); + + if (isDocker || isSSH) { + try { + // Use exec to delete files since runtime doesn't have a deleteFile method. + // Use runtime workspace path (not host projectPath) for Docker containers. + const workspacePath = runtime.getWorkspacePath(metadata.projectPath, metadata.name); + const execStream = await runtime.exec(`rm -f ${quotedPlanPath} ${quotedLegacyPlanPath}`, { + cwd: workspacePath, + timeout: 10, + }); + + try { + await execStream.stdin.close(); + } catch { + // Ignore stdin-close errors (e.g. already closed). + } + + await execStream.exitCode.catch(() => { + // Best-effort: ignore failures. + }); + } catch { + // Plan files don't exist or can't be deleted - ignore + } + + return; + } + + // Local runtimes: delete directly on the local filesystem. + const planPathAbs = expandTilde(planPath); + const legacyPlanPathAbs = expandTilde(legacyPlanPath); + + await Promise.allSettled([ + fsPromises.rm(planPathAbs, { force: true }), + fsPromises.rm(legacyPlanPathAbs, { force: true }), + ]); + } + + dispose(): void { + for (const timer of this.postCompactionRefreshTimers.values()) { + clearTimeout(timer); + } + this.postCompactionRefreshTimers.clear(); + } +} diff --git a/src/node/services/workspaceService.ts b/src/node/services/workspaceService.ts index 437a9baa66..36a2c815b2 100644 --- a/src/node/services/workspaceService.ts +++ b/src/node/services/workspaceService.ts @@ -27,17 +27,13 @@ import { runBackgroundInit, } from "@/node/runtime/runtimeFactory"; import { createRuntimeForWorkspace } from "@/node/runtime/runtimeHelpers"; +import { PostCompactionService } from "@/node/services/postCompactionService"; import { validateWorkspaceName } from "@/common/utils/validation/workspaceValidation"; -import { getPlanFilePath, getLegacyPlanFilePath } from "@/common/utils/planStorage"; -import { shellQuote } from "@/node/runtime/backgroundCommands"; -import { extractEditedFilePaths } from "@/common/utils/messages/extractEditedFiles"; -import { fileExists } from "@/node/utils/runtime/fileExists"; + import { applyForkRuntimeUpdates } from "@/node/services/utils/forkRuntimeUpdates"; import type { DevcontainerRuntime } from "@/node/runtime/DevcontainerRuntime"; import { getDevcontainerContainerName } from "@/node/runtime/devcontainerCli"; -import { expandTilde, expandTildeForSSH } from "@/node/runtime/tildeExpansion"; -import type { PostCompactionExclusions } from "@/common/types/attachment"; import type { SendMessageOptions, DeleteMessage, @@ -61,12 +57,7 @@ import { import type { UIMode } from "@/common/types/mode"; import type { MuxMessage } from "@/common/types/message"; import type { RuntimeConfig } from "@/common/types/runtime"; -import { - hasSrcBaseDir, - getSrcBaseDir, - isSSHRuntime, - isDockerRuntime, -} from "@/common/types/runtime"; +import { hasSrcBaseDir, getSrcBaseDir, isDockerRuntime } from "@/common/types/runtime"; import { coerceThinkingLevel, type ThinkingLevel } from "@/common/types/thinking"; import { WORKSPACE_DEFAULTS } from "@/constants/workspaceDefaults"; @@ -108,7 +99,6 @@ const MAX_WORKSPACE_NAME_COLLISION_RETRIES = 3; // Shared type for workspace-scoped AI settings (model + thinking) type WorkspaceAISettings = z.infer; -const POST_COMPACTION_METADATA_REFRESH_DEBOUNCE_MS = 100; interface FileCompletionsCacheEntry { index: FileCompletionsIndex; @@ -183,8 +173,7 @@ export class WorkspaceService extends EventEmitter { { chat: () => void; metadata: () => void } >(); - // Debounce post-compaction metadata refreshes (file_edit_* can fire rapidly) - private readonly postCompactionRefreshTimers = new Map>(); + public readonly postCompactionService: PostCompactionService; // Tracks workspaces currently being renamed to prevent streaming during rename private readonly renamingWorkspaces = new Set(); @@ -221,6 +210,12 @@ export class WorkspaceService extends EventEmitter { this.telemetryService = telemetryService; this.experimentsService = experimentsService; this.sessionTimingService = sessionTimingService; + this.postCompactionService = new PostCompactionService( + config, + historyService, + this.sessions, + (wsId) => this.getInfo(wsId) + ); this.setupMetadataListeners(); } @@ -393,42 +388,7 @@ export class WorkspaceService extends EventEmitter { } private schedulePostCompactionMetadataRefresh(workspaceId: string): void { - assert(typeof workspaceId === "string", "workspaceId must be a string"); - const trimmed = workspaceId.trim(); - assert(trimmed.length > 0, "workspaceId must not be empty"); - - const existing = this.postCompactionRefreshTimers.get(trimmed); - if (existing) { - clearTimeout(existing); - } - - const timer = setTimeout(() => { - this.postCompactionRefreshTimers.delete(trimmed); - void this.emitPostCompactionMetadata(trimmed); - }, POST_COMPACTION_METADATA_REFRESH_DEBOUNCE_MS); - - this.postCompactionRefreshTimers.set(trimmed, timer); - } - - private async emitPostCompactionMetadata(workspaceId: string): Promise { - try { - const session = this.sessions.get(workspaceId); - if (!session) { - return; - } - - const metadata = await this.getInfo(workspaceId); - if (!metadata) { - return; - } - - const postCompaction = await this.getPostCompactionState(workspaceId); - const enrichedMetadata = { ...metadata, postCompaction }; - session.emitMetadata(enrichedMetadata); - } catch (error) { - // Workspace runtime unavailable (e.g., SSH unreachable) - skip emitting post-compaction state. - log.debug("Failed to emit post-compaction metadata", { workspaceId, error }); - } + this.postCompactionService.schedulePostCompactionMetadataRefresh(workspaceId); } public getOrCreateSession(workspaceId: string): AgentSession { @@ -511,11 +471,7 @@ export class WorkspaceService extends EventEmitter { public disposeSession(workspaceId: string): void { const trimmed = workspaceId.trim(); const session = this.sessions.get(trimmed); - const refreshTimer = this.postCompactionRefreshTimers.get(trimmed); - if (refreshTimer) { - clearTimeout(refreshTimer); - this.postCompactionRefreshTimers.delete(trimmed); - } + this.postCompactionService.cancelPendingRefresh(trimmed); if (!session) { return; @@ -532,170 +488,16 @@ export class WorkspaceService extends EventEmitter { this.sessions.delete(trimmed); } - private async getPersistedPostCompactionDiffPaths(workspaceId: string): Promise { - const postCompactionPath = path.join( - this.config.getSessionDir(workspaceId), - "post-compaction.json" - ); - - try { - const raw = await fsPromises.readFile(postCompactionPath, "utf-8"); - const parsed: unknown = JSON.parse(raw); - const diffsRaw = (parsed as { diffs?: unknown }).diffs; - if (!Array.isArray(diffsRaw)) { - return null; - } - - const result: string[] = []; - for (const diff of diffsRaw) { - if (!diff || typeof diff !== "object") continue; - const p = (diff as { path?: unknown }).path; - if (typeof p !== "string") continue; - const trimmed = p.trim(); - if (trimmed.length === 0) continue; - result.push(trimmed); - } - - return result; - } catch { - return null; - } + public async getPostCompactionState(workspaceId: string) { + return this.postCompactionService.getPostCompactionState(workspaceId); } - /** - * Get post-compaction context state for a workspace. - * Returns info about what will be injected after compaction. - * Prefers cached paths from pending compaction, falls back to history extraction. - */ - public async getPostCompactionState(workspaceId: string): Promise<{ - planPath: string | null; - trackedFilePaths: string[]; - excludedItems: string[]; - }> { - // Get workspace metadata to create runtime for plan file check - const metadata = await this.getInfo(workspaceId); - if (!metadata) { - // Can't get metadata, return empty state - const exclusions = await this.getPostCompactionExclusions(workspaceId); - return { planPath: null, trackedFilePaths: [], excludedItems: exclusions.excludedItems }; - } - - const runtime = createRuntimeForWorkspace(metadata); - const muxHome = runtime.getMuxHome(); - const planPath = getPlanFilePath(metadata.name, metadata.projectName, muxHome); - // For local/SSH: expand tilde for comparison with message history paths - // For Docker: paths are already absolute (/var/mux/...), no expansion needed - const expandedPlanPath = muxHome.startsWith("~") ? expandTilde(planPath) : planPath; - // Legacy plan path (stored by workspace ID) for filtering - const legacyPlanPath = getLegacyPlanFilePath(workspaceId); - const expandedLegacyPlanPath = expandTilde(legacyPlanPath); - - // Check both new and legacy plan paths, prefer new path - const newPlanExists = await fileExists(runtime, planPath); - const legacyPlanExists = !newPlanExists && (await fileExists(runtime, legacyPlanPath)); - // Resolve plan path via runtime to get correct absolute path for deep links. - // Local: expands ~ to local home. SSH: expands ~ on remote host. - const activePlanPath = newPlanExists - ? await runtime.resolvePath(planPath) - : legacyPlanExists - ? await runtime.resolvePath(legacyPlanPath) - : null; - - // Load exclusions - const exclusions = await this.getPostCompactionExclusions(workspaceId); - - // Helper to check if a path is a plan file (new or legacy format) - const isPlanPath = (p: string) => - p === planPath || - p === expandedPlanPath || - p === legacyPlanPath || - p === expandedLegacyPlanPath; - - // If session has pending compaction attachments, use cached paths - // (history is cleared after compaction, but cache survives) - const session = this.sessions.get(workspaceId); - const pendingPaths = session?.getPendingTrackedFilePaths(); - if (pendingPaths) { - // Filter out both new and legacy plan file paths - const trackedFilePaths = pendingPaths.filter((p) => !isPlanPath(p)); - return { - planPath: activePlanPath, - trackedFilePaths, - excludedItems: exclusions.excludedItems, - }; - } - - // Fallback (crash-safe): if a post-compaction snapshot exists on disk, use it. - const persistedPaths = await this.getPersistedPostCompactionDiffPaths(workspaceId); - if (persistedPaths !== null) { - const trackedFilePaths = persistedPaths.filter((p) => !isPlanPath(p)); - return { - planPath: activePlanPath, - trackedFilePaths, - excludedItems: exclusions.excludedItems, - }; - } - - // Fallback: compute tracked files from message history (survives reloads) - const historyResult = await this.historyService.getHistory(workspaceId); - const messages = historyResult.success ? historyResult.data : []; - const allPaths = extractEditedFilePaths(messages); - - // Exclude plan file from tracked files since it has its own section - // Filter out both new and legacy plan file paths - const trackedFilePaths = allPaths.filter((p) => !isPlanPath(p)); - return { - planPath: activePlanPath, - trackedFilePaths, - excludedItems: exclusions.excludedItems, - }; + public async getPostCompactionExclusions(workspaceId: string) { + return this.postCompactionService.getPostCompactionExclusions(workspaceId); } - /** - * Get post-compaction exclusions for a workspace. - * Returns empty exclusions if file doesn't exist. - */ - public async getPostCompactionExclusions(workspaceId: string): Promise { - const exclusionsPath = path.join(this.config.getSessionDir(workspaceId), "exclusions.json"); - try { - const data = await fsPromises.readFile(exclusionsPath, "utf-8"); - return JSON.parse(data) as PostCompactionExclusions; - } catch { - return { excludedItems: [] }; - } - } - - /** - * Set whether an item is excluded from post-compaction context. - * Item IDs: "plan" for plan file, "file:" for tracked files. - */ - public async setPostCompactionExclusion( - workspaceId: string, - itemId: string, - excluded: boolean - ): Promise> { - try { - const exclusions = await this.getPostCompactionExclusions(workspaceId); - const set = new Set(exclusions.excludedItems); - - if (excluded) { - set.add(itemId); - } else { - set.delete(itemId); - } - - const sessionDir = this.config.getSessionDir(workspaceId); - await fsPromises.mkdir(sessionDir, { recursive: true }); - const exclusionsPath = path.join(sessionDir, "exclusions.json"); - await fsPromises.writeFile( - exclusionsPath, - JSON.stringify({ excludedItems: [...set] }, null, 2) - ); - return Ok(undefined); - } catch (error) { - const message = getErrorMessage(error); - return Err(`Failed to set exclusion: ${message}`); - } + public async setPostCompactionExclusion(workspaceId: string, itemId: string, excluded: boolean) { + return this.postCompactionService.setPostCompactionExclusion(workspaceId, itemId, excluded); } async create( @@ -2404,63 +2206,7 @@ export class WorkspaceService extends EventEmitter { workspaceId: string, metadata: FrontendWorkspaceMetadata ): Promise { - // Create runtime to get correct muxHome (Docker uses /var/mux, others use ~/.mux) - const runtime = createRuntimeForWorkspace(metadata); - const muxHome = runtime.getMuxHome(); - const planPath = getPlanFilePath(metadata.name, metadata.projectName, muxHome); - const legacyPlanPath = getLegacyPlanFilePath(workspaceId); - - const isDocker = isDockerRuntime(metadata.runtimeConfig); - const isSSH = isSSHRuntime(metadata.runtimeConfig); - - // For Docker: paths are already absolute (/var/mux/...), just quote - // For SSH: use $HOME expansion so the runtime shell resolves to the runtime home directory - // For local: expand tilde locally since shellQuote prevents shell expansion - const quotedPlanPath = isDocker - ? shellQuote(planPath) - : isSSH - ? expandTildeForSSH(planPath) - : shellQuote(expandTilde(planPath)); - // For legacy path: SSH/Docker use $HOME expansion, local expands tilde - const quotedLegacyPlanPath = - isDocker || isSSH - ? expandTildeForSSH(legacyPlanPath) - : shellQuote(expandTilde(legacyPlanPath)); - - if (isDocker || isSSH) { - try { - // Use exec to delete files since runtime doesn't have a deleteFile method. - // Use runtime workspace path (not host projectPath) for Docker containers. - const workspacePath = runtime.getWorkspacePath(metadata.projectPath, metadata.name); - const execStream = await runtime.exec(`rm -f ${quotedPlanPath} ${quotedLegacyPlanPath}`, { - cwd: workspacePath, - timeout: 10, - }); - - try { - await execStream.stdin.close(); - } catch { - // Ignore stdin-close errors (e.g. already closed). - } - - await execStream.exitCode.catch(() => { - // Best-effort: ignore failures. - }); - } catch { - // Plan files don't exist or can't be deleted - ignore - } - - return; - } - - // Local runtimes: delete directly on the local filesystem. - const planPathAbs = expandTilde(planPath); - const legacyPlanPathAbs = expandTilde(legacyPlanPath); - - await Promise.allSettled([ - fsPromises.rm(planPathAbs, { force: true }), - fsPromises.rm(legacyPlanPathAbs, { force: true }), - ]); + return this.postCompactionService.deletePlanFilesForWorkspace(workspaceId, metadata); } async truncateHistory(workspaceId: string, percentage?: number): Promise> { From 0f4b92844eda39c37525f72b36a89599c73c131b Mon Sep 17 00:00:00 2001 From: Ammar Date: Sun, 8 Feb 2026 12:17:20 -0600 Subject: [PATCH 08/14] refactor: extract ContextExceededRetryHandler from AgentSession Extract all context-exceeded retry logic (~600 lines + 8 state fields) from AgentSession into a dedicated ContextExceededRetryHandler class. The handler owns: - 8 state fields (retry attempt sets, active stream context/flags) - 11 methods: handleStreamError, three retry strategies (compaction, post-compaction, hard restart exec subagent), and supporting helpers AgentSession interacts with the handler via: - initStreamState/markStreamHadDelta/etc. for state updates - handleStreamError as the error-handling entry point - Callbacks (emitChatEvent, clearQueue, streamWithHistory, etc.) for handler->session communication Zero logic changes - pure extraction with preserved test compatibility. All 31 agentSession tests pass including postCompactionRetry suite. --- src/node/services/agentSession.ts | 757 ++------------------ src/node/services/contextExceededRetry.ts | 806 ++++++++++++++++++++++ 2 files changed, 857 insertions(+), 706 deletions(-) create mode 100644 src/node/services/contextExceededRetry.ts diff --git a/src/node/services/agentSession.ts b/src/node/services/agentSession.ts index 6a9b9ce806..bdcb829007 100644 --- a/src/node/services/agentSession.ts +++ b/src/node/services/agentSession.ts @@ -16,21 +16,18 @@ import type { FrontendWorkspaceMetadata } from "@/common/types/workspace"; import type { RuntimeConfig } from "@/common/types/runtime"; import { DEFAULT_RUNTIME_CONFIG } from "@/common/constants/workspace"; import { DEFAULT_MODEL } from "@/common/constants/knownModels"; -import type { - WorkspaceChatMessage, - SendMessageOptions, - FilePart, - DeleteMessage, -} from "@/common/orpc/types"; -import { WORKSPACE_DEFAULTS } from "@/constants/workspaceDefaults"; +import type { WorkspaceChatMessage, SendMessageOptions, FilePart } from "@/common/orpc/types"; import type { SendMessageError } from "@/common/types/errors"; -import { AgentIdSchema, SkillNameSchema } from "@/common/orpc/schemas"; +import { SkillNameSchema } from "@/common/orpc/schemas"; import { buildStreamErrorEventData, - createStreamErrorMessage, createUnknownSendMessageError, - type StreamErrorPayload, } from "@/node/services/utils/sendMessageError"; +import { + ContextExceededRetryHandler, + isCompactionRequestMetadata, + type StreamErrorPayload, +} from "./contextExceededRetry"; import { createUserMessageId, createFileSnapshotMessageId, @@ -48,17 +45,12 @@ import { createMuxMessage, isCompactionSummaryMetadata, prepareUserMessageForSend, - type CompactionFollowUpRequest, type MuxFrontendMetadata, type MuxFilePart, type MuxMessage, - type ReviewNoteDataForDisplay, } from "@/common/types/message"; import { createRuntime } from "@/node/runtime/runtimeFactory"; import { createRuntimeForWorkspace } from "@/node/runtime/runtimeHelpers"; -import { isExecLikeEditingCapableInResolvedChain } from "@/common/utils/agentTools"; -import { readAgentDefinition } from "@/node/services/agentDefinitions/agentDefinitionsService"; -import { resolveAgentInheritanceChain } from "@/node/services/agentDefinitions/resolveAgentInheritanceChain"; import { MessageQueue } from "./messageQueue"; import type { StreamEndEvent } from "@/common/types/stream"; import { CompactionHandler } from "./compactionHandler"; @@ -83,25 +75,6 @@ import { getErrorMessage } from "@/common/utils/errors"; // Re-export types from FileChangeTracker for backward compatibility export type { FileState, EditedFileAttachment } from "@/node/services/utils/fileChangeTracker"; -// Type guard for compaction request metadata -// Supports both new `followUpContent` and legacy `continueMessage` for backwards compatibility -interface CompactionRequestMetadata { - type: "compaction-request"; - parsed: { - followUpContent?: CompactionFollowUpRequest; - // Legacy field - older persisted requests may use this instead of followUpContent - continueMessage?: { - text?: string; - imageParts?: FilePart[]; - reviews?: ReviewNoteDataForDisplay[]; - muxMetadata?: MuxFrontendMetadata; - model?: string; - agentId?: string; - mode?: "exec" | "plan"; // Legacy: older versions stored mode instead of agentId - }; - }; -} - const PDF_MEDIA_TYPE = "application/pdf"; function normalizeMediaType(mediaType: string): string { @@ -121,14 +94,6 @@ function estimateBase64DataUrlBytes(dataUrl: string): number | null { const padding = base64.endsWith("==") ? 2 : base64.endsWith("=") ? 1 : 0; return Math.floor((base64.length * 3) / 4) - padding; } -function isCompactionRequestMetadata(meta: unknown): meta is CompactionRequestMetadata { - if (typeof meta !== "object" || meta === null) return false; - const obj = meta as Record; - if (obj.type !== "compaction-request") return false; - if (typeof obj.parsed !== "object" || obj.parsed === null) return false; - return true; -} - const MAX_AGENT_SKILL_SNAPSHOT_CHARS = 50_000; export interface AgentSessionChatEvent { @@ -200,43 +165,9 @@ export class AgentSession { * This is intentionally delayed until stream-end so a crash mid-stream doesn't lose the diffs. */ private ackPendingPostCompactionStateOnStreamEnd = false; - /** - * Cache the last-known experiment state so we don't spam metadata refresh - * when post-compaction context is disabled. - */ - /** Track compaction requests that already retried with truncation. */ - private readonly compactionRetryAttempts = new Set(); - /** - * Active compaction request metadata for retry decisions (cleared on stream end/abort). - */ - - /** Tracks the user message id that initiated the currently active stream (for retry guards). */ - private activeStreamUserMessageId?: string; - - /** Track user message ids that already retried without post-compaction injection. */ - private readonly postCompactionRetryAttempts = new Set(); - - /** Track user message ids that already hard-restarted for exec-like subagents. */ - private readonly execSubagentHardRestartAttempts = new Set(); - - /** True once we see any model/tool output for the current stream (retry guard). */ - private activeStreamHadAnyDelta = false; - /** Tracks whether the current stream included post-compaction attachments. */ - private activeStreamHadPostCompactionInjection = false; - - /** Context needed to retry the current stream (cleared on stream end/abort/error). */ - private activeStreamContext?: { - modelString: string; - options?: SendMessageOptions; - openaiTruncationModeOverride?: "auto" | "disabled"; - }; - - private activeCompactionRequest?: { - id: string; - modelString: string; - options?: SendMessageOptions; - }; + /** Handles all context-exceeded retry strategies (compaction, post-compaction, hard restart). */ + private readonly retryHandler: ContextExceededRetryHandler; constructor(options: AgentSessionOptions) { assert(options, "AgentSession requires options"); @@ -279,6 +210,26 @@ export class AgentSession { onCompactionComplete, }); + this.retryHandler = new ContextExceededRetryHandler( + { + workspaceId: this.workspaceId, + historyService: this.historyService, + partialService: this.partialService, + aiService: this.aiService, + compactionHandler: this.compactionHandler, + onPostCompactionStateChange, + }, + { + emitChatEvent: (msg) => this.emitChatEvent(msg), + clearQueue: () => this.clearQueue(), + streamWithHistory: (m, o, t, d) => this.streamWithHistory(m, o, t, d), + isDisposed: () => this.disposed, + setStreamStarting: (v) => { + this.streamStarting = v; + }, + } + ); + this.attachAiListeners(); this.attachInitListeners(); } @@ -892,14 +843,7 @@ export class AgentSession { // Reset per-stream flags (used for retries / crash-safe bookkeeping). this.ackPendingPostCompactionStateOnStreamEnd = false; - this.activeStreamHadAnyDelta = false; - this.activeStreamHadPostCompactionInjection = false; - this.activeStreamContext = { - modelString, - options, - openaiTruncationModeOverride, - }; - this.activeStreamUserMessageId = undefined; + this.retryHandler.initStreamState({ modelString, options, openaiTruncationModeOverride }); const commitResult = await this.partialService.commitToHistory(this.workspaceId); if (!commitResult.success) { @@ -943,13 +887,9 @@ export class AgentSession { // Capture the current user message id so retries are stable across assistant message ids. const lastUserMessage = [...historyResult.data].reverse().find((m) => m.role === "user"); - this.activeStreamUserMessageId = lastUserMessage?.id; + this.retryHandler.setActiveStreamUserMessageId(lastUserMessage?.id); - this.activeCompactionRequest = this.resolveCompactionRequest( - historyResult.data, - modelString, - options - ); + this.retryHandler.resolveAndSetCompactionRequest(historyResult.data, modelString, options); // Check for external file edits (timestamp-based polling) const changedFileAttachments = await this.fileChangeTracker.getChangedAttachments(); @@ -959,8 +899,9 @@ export class AgentSession { disablePostCompactionAttachments === true ? null : await this.getPostCompactionAttachmentsIfNeeded(); - this.activeStreamHadPostCompactionInjection = - postCompactionAttachments !== null && postCompactionAttachments.length > 0; + this.retryHandler.setPostCompactionInjection( + postCompactionAttachments !== null && postCompactionAttachments.length > 0 + ); // Enforce thinking policy for the specified model (single source of truth) // This ensures model-specific requirements are met regardless of where the request originates @@ -994,7 +935,7 @@ export class AgentSession { }); if (!streamResult.success) { - this.activeCompactionRequest = undefined; + this.retryHandler.clearActiveCompactionRequest(); // If stream startup failed before any stream events were emitted (e.g., missing API key), // emit a synthetic stream-error so the UI can surface the failure immediately. @@ -1003,611 +944,13 @@ export class AgentSession { streamResult.error.type !== "runtime_start_failed" ) { const streamError = buildStreamErrorEventData(streamResult.error); - await this.handleStreamError(streamError); + await this.retryHandler.handleStreamError(streamError); } } return streamResult; } - private resolveCompactionRequest( - history: MuxMessage[], - modelString: string, - options?: SendMessageOptions - ): { id: string; modelString: string; options?: SendMessageOptions } | undefined { - for (let index = history.length - 1; index >= 0; index -= 1) { - const message = history[index]; - if (message.role !== "user") { - continue; - } - if (!isCompactionRequestMetadata(message.metadata?.muxMetadata)) { - return undefined; - } - return { - id: message.id, - modelString, - options, - }; - } - return undefined; - } - - private async clearFailedAssistantMessage(messageId: string, reason: string): Promise { - const [partialResult, deleteMessageResult] = await Promise.all([ - this.partialService.deletePartial(this.workspaceId), - this.historyService.deleteMessage(this.workspaceId, messageId), - ]); - - if (!partialResult.success) { - log.warn("Failed to clear partial before retry", { - workspaceId: this.workspaceId, - reason, - error: partialResult.error, - }); - } - - if ( - !deleteMessageResult.success && - !( - typeof deleteMessageResult.error === "string" && - deleteMessageResult.error.includes("not found in history") - ) - ) { - log.warn("Failed to delete failed assistant placeholder", { - workspaceId: this.workspaceId, - reason, - error: deleteMessageResult.error, - }); - } - } - - private async finalizeCompactionRetry(messageId: string): Promise { - this.activeCompactionRequest = undefined; - this.resetActiveStreamState(); - this.emitChatEvent({ - type: "stream-abort", - workspaceId: this.workspaceId, - messageId, - }); - await this.clearFailedAssistantMessage(messageId, "compaction-retry"); - } - - private supports1MContextRetry(modelString: string): boolean { - const normalized = normalizeGatewayModel(modelString); - const [provider, modelName] = normalized.split(":", 2); - const lower = modelName?.toLowerCase() ?? ""; - return ( - provider === "anthropic" && - (lower.startsWith("claude-sonnet-4-5") || lower.startsWith("claude-opus-4-6")) - ); - } - - private withAnthropic1MContext( - modelString: string, - options: SendMessageOptions | undefined - ): SendMessageOptions { - if (options) { - const existingModels = options.providerOptions?.anthropic?.use1MContextModels ?? []; - return { - ...options, - providerOptions: { - ...options.providerOptions, - anthropic: { - ...options.providerOptions?.anthropic, - use1MContext: true, - use1MContextModels: existingModels.includes(modelString) - ? existingModels - : [...existingModels, modelString], - }, - }, - }; - } - - return { - model: modelString, - agentId: WORKSPACE_DEFAULTS.agentId, - providerOptions: { - anthropic: { - use1MContext: true, - use1MContextModels: [modelString], - }, - }, - }; - } - - private isGptClassModel(modelString: string): boolean { - const normalized = normalizeGatewayModel(modelString); - const [provider, modelName] = normalized.split(":", 2); - return provider === "openai" && modelName?.toLowerCase().startsWith("gpt-"); - } - - private async maybeRetryCompactionOnContextExceeded(data: { - messageId: string; - errorType?: string; - }): Promise { - if (data.errorType !== "context_exceeded") { - return false; - } - - const context = this.activeCompactionRequest; - if (!context) { - return false; - } - - const isGptClass = this.isGptClassModel(context.modelString); - const is1MCapable = this.supports1MContextRetry(context.modelString); - - if (!isGptClass && !is1MCapable) { - return false; - } - - if (is1MCapable) { - // Skip retry if 1M context is already enabled (via legacy global flag or per-model list) - const anthropicOpts = context.options?.providerOptions?.anthropic; - const already1M = - anthropicOpts?.use1MContext === true || - (anthropicOpts?.use1MContextModels?.includes(context.modelString) ?? false); - if (already1M) { - return false; - } - } - - if (this.compactionRetryAttempts.has(context.id)) { - return false; - } - - this.compactionRetryAttempts.add(context.id); - - const retryLabel = is1MCapable ? "Anthropic 1M context" : "OpenAI truncation"; - log.info(`Compaction hit context limit; retrying once with ${retryLabel}`, { - workspaceId: this.workspaceId, - model: context.modelString, - compactionRequestId: context.id, - }); - - await this.finalizeCompactionRetry(data.messageId); - - const retryOptions = is1MCapable - ? this.withAnthropic1MContext(context.modelString, context.options) - : context.options; - this.streamStarting = true; - let retryResult: Result; - try { - retryResult = await this.streamWithHistory( - context.modelString, - retryOptions, - isGptClass ? "auto" : undefined - ); - } finally { - this.streamStarting = false; - } - if (!retryResult.success) { - log.error("Compaction retry failed to start", { - workspaceId: this.workspaceId, - error: retryResult.error, - }); - return false; - } - - return true; - } - - private async maybeRetryWithoutPostCompactionOnContextExceeded(data: { - messageId: string; - errorType?: string; - }): Promise { - if (data.errorType !== "context_exceeded") { - return false; - } - - // Only retry if we actually injected post-compaction context. - if (!this.activeStreamHadPostCompactionInjection) { - return false; - } - - // Guardrail: don't retry if we've already emitted any meaningful output. - if (this.activeStreamHadAnyDelta) { - return false; - } - - const requestId = this.activeStreamUserMessageId; - const context = this.activeStreamContext; - if (!requestId || !context) { - return false; - } - - if (this.postCompactionRetryAttempts.has(requestId)) { - return false; - } - - this.postCompactionRetryAttempts.add(requestId); - - log.info("Post-compaction context hit context limit; retrying once without it", { - workspaceId: this.workspaceId, - requestId, - model: context.modelString, - }); - - // The post-compaction diffs are likely the culprit; discard them so we don't loop. - try { - await this.compactionHandler.discardPendingDiffs("context_exceeded"); - this.onPostCompactionStateChange?.(); - } catch (error) { - log.warn("Failed to discard pending post-compaction state", { - workspaceId: this.workspaceId, - error: getErrorMessage(error), - }); - } - - // Abort the failed assistant placeholder and clean up persisted partial/history state. - this.resetActiveStreamState(); - this.emitChatEvent({ - type: "stream-abort", - workspaceId: this.workspaceId, - messageId: data.messageId, - }); - await this.clearFailedAssistantMessage(data.messageId, "post-compaction-retry"); - - // Retry the same request, but without post-compaction injection. - this.streamStarting = true; - let retryResult: Result; - try { - retryResult = await this.streamWithHistory( - context.modelString, - context.options, - context.openaiTruncationModeOverride, - true - ); - } finally { - this.streamStarting = false; - } - - if (!retryResult.success) { - log.error("Post-compaction retry failed to start", { - workspaceId: this.workspaceId, - error: retryResult.error, - }); - return false; - } - - return true; - } - - private async maybeHardRestartExecSubagentOnContextExceeded(data: { - messageId: string; - errorType?: string; - }): Promise { - if (data.errorType !== "context_exceeded") { - return false; - } - - // Only enabled via experiment (and only when we still have a valid retry context). - const context = this.activeStreamContext; - const requestId = this.activeStreamUserMessageId; - const experimentEnabled = context?.options?.experiments?.execSubagentHardRestart === true; - if (!experimentEnabled || !context || !requestId) { - return false; - } - - // Guardrail: don't hard-restart after any meaningful output. - // This is intended to recover from "prompt too long" cases before the model starts streaming. - if (this.activeStreamHadAnyDelta) { - return false; - } - - if (this.execSubagentHardRestartAttempts.has(requestId)) { - return false; - } - - // Guard for test mocks that may not implement getWorkspaceMetadata. - if (typeof this.aiService.getWorkspaceMetadata !== "function") { - return false; - } - - const metadataResult = await this.aiService.getWorkspaceMetadata(this.workspaceId); - if (!metadataResult.success) { - return false; - } - - const metadata = metadataResult.data; - if (!metadata.parentWorkspaceId) { - return false; - } - - const agentIdRaw = (metadata.agentId ?? metadata.agentType ?? WORKSPACE_DEFAULTS.agentId) - .trim() - .toLowerCase(); - const parsedAgentId = AgentIdSchema.safeParse(agentIdRaw); - const agentId = parsedAgentId.success ? parsedAgentId.data : ("exec" as const); - - // Prefer resolving agent inheritance from the parent workspace: project agents may be untracked - // (and therefore absent from child worktrees), but they are always present in the parent that - // spawned the task. - const metadataCandidates: Array = [metadata]; - - try { - const parentMetadataResult = await this.aiService.getWorkspaceMetadata( - metadata.parentWorkspaceId - ); - if (parentMetadataResult.success) { - metadataCandidates.unshift(parentMetadataResult.data); - } - } catch { - // ignore - fall back to child metadata - } - - let chain: Awaited> | undefined; - for (const agentMetadata of metadataCandidates) { - try { - const runtime = createRuntimeForWorkspace(agentMetadata); - - // In-place workspaces (CLI/benchmarks) have projectPath === name. - // Use path directly instead of reconstructing via getWorkspacePath. - const isInPlace = agentMetadata.projectPath === agentMetadata.name; - const workspacePath = isInPlace - ? agentMetadata.projectPath - : runtime.getWorkspacePath(agentMetadata.projectPath, agentMetadata.name); - - const agentDiscoveryPath = - context.options?.disableWorkspaceAgents === true - ? agentMetadata.projectPath - : workspacePath; - - const agentDefinition = await readAgentDefinition(runtime, agentDiscoveryPath, agentId); - chain = await resolveAgentInheritanceChain({ - runtime, - workspacePath: agentDiscoveryPath, - agentId, - agentDefinition, - workspaceId: this.workspaceId, - }); - break; - } catch { - // ignore - try next candidate - } - } - - if (!chain) { - // If we fail to resolve tool policy/inheritance, treat as non-exec-like. - return false; - } - - if (!isExecLikeEditingCapableInResolvedChain(chain)) { - return false; - } - - this.execSubagentHardRestartAttempts.add(requestId); - - const continuationNotice = - "Context limit reached. Mux restarted this agent's chat history and will replay your original prompt below. " + - "Continue using only the current workspace state (files, git history, command output); " + - "re-inspect the repo as needed."; - - log.info("Exec-like subagent hit context limit; hard-restarting history and retrying", { - workspaceId: this.workspaceId, - requestId, - model: context.modelString, - agentId, - }); - - const historyResult = await this.historyService.getHistory(this.workspaceId); - if (!historyResult.success) { - return false; - } - - const messages = historyResult.data; - - const firstPromptIndex = messages.findIndex( - (msg) => msg.role === "user" && msg.metadata?.synthetic !== true - ); - if (firstPromptIndex === -1) { - return false; - } - - // Include any synthetic snapshots that were persisted immediately before the task prompt. - let seedStartIndex = firstPromptIndex; - for (let i = firstPromptIndex - 1; i >= 0; i -= 1) { - const msg = messages[i]; - const isSnapshot = - msg.role === "user" && - msg.metadata?.synthetic === true && - (msg.metadata?.fileAtMentionSnapshot ?? msg.metadata?.agentSkillSnapshot); - if (!isSnapshot) { - break; - } - seedStartIndex = i; - } - - const seedMessages = messages.slice(seedStartIndex, firstPromptIndex + 1); - if (seedMessages.length === 0) { - return false; - } - - // Best-effort: discard pending post-compaction state so we don't immediately re-inject it. - try { - await this.compactionHandler.discardPendingDiffs("execSubagentHardRestart"); - this.onPostCompactionStateChange?.(); - } catch (error) { - log.warn("Failed to discard pending post-compaction state before hard restart", { - workspaceId: this.workspaceId, - error: getErrorMessage(error), - }); - } - - // Abort the failed assistant placeholder and clean up partial/history state. - this.activeCompactionRequest = undefined; - this.resetActiveStreamState(); - if (!this.disposed) { - this.clearQueue(); - } - - this.emitChatEvent({ - type: "stream-abort", - workspaceId: this.workspaceId, - messageId: data.messageId, - }); - - const partialDeleteResult = await this.partialService.deletePartial(this.workspaceId); - if (!partialDeleteResult.success) { - log.warn("Failed to delete partial before exec subagent hard restart", { - workspaceId: this.workspaceId, - error: partialDeleteResult.error, - }); - } - - const clearResult = await this.historyService.clearHistory(this.workspaceId); - if (!clearResult.success) { - log.warn("Failed to clear history for exec subagent hard restart", { - workspaceId: this.workspaceId, - error: clearResult.error, - }); - return false; - } - - const deletedSequences = clearResult.data; - if (deletedSequences.length > 0) { - const deleteMessage: DeleteMessage = { - type: "delete", - historySequences: deletedSequences, - }; - this.emitChatEvent(deleteMessage); - } - - const cloneForAppend = (msg: MuxMessage): MuxMessage => { - const metadataCopy = msg.metadata ? { ...msg.metadata } : undefined; - if (metadataCopy) { - metadataCopy.historySequence = undefined; - metadataCopy.partial = undefined; - metadataCopy.error = undefined; - metadataCopy.errorType = undefined; - } - - return { - ...msg, - metadata: metadataCopy, - parts: [...msg.parts], - }; - }; - - const continuationMessage = createMuxMessage( - createUserMessageId(), - "user", - continuationNotice, - { - timestamp: Date.now(), - synthetic: true, - uiVisible: true, - } - ); - - const messagesToAppend = [continuationMessage, ...seedMessages.map(cloneForAppend)]; - for (const message of messagesToAppend) { - const appendResult = await this.historyService.appendToHistory(this.workspaceId, message); - if (!appendResult.success) { - log.error("Failed to append message during exec subagent hard restart", { - workspaceId: this.workspaceId, - messageId: message.id, - error: appendResult.error, - }); - return false; - } - - // Add type: "message" for discriminated union (MuxMessage doesn't have it) - this.emitChatEvent({ - ...message, - type: "message" as const, - }); - } - - const existingInstructions = context.options?.additionalSystemInstructions; - const mergedAdditionalSystemInstructions = existingInstructions - ? `${continuationNotice}\n\n${existingInstructions}` - : continuationNotice; - - const retryOptions: SendMessageOptions | undefined = context.options - ? { - ...context.options, - additionalSystemInstructions: mergedAdditionalSystemInstructions, - } - : { - model: context.modelString, - agentId: WORKSPACE_DEFAULTS.agentId, - additionalSystemInstructions: mergedAdditionalSystemInstructions, - experiments: { - execSubagentHardRestart: true, - }, - }; - - this.streamStarting = true; - let retryResult: Result; - try { - retryResult = await this.streamWithHistory( - context.modelString, - retryOptions, - context.openaiTruncationModeOverride - ); - } finally { - this.streamStarting = false; - } - - if (!retryResult.success) { - log.error("Exec subagent hard restart retry failed to start", { - workspaceId: this.workspaceId, - error: retryResult.error, - }); - return false; - } - - return true; - } - - private resetActiveStreamState(): void { - this.activeStreamContext = undefined; - this.activeStreamUserMessageId = undefined; - this.activeStreamHadPostCompactionInjection = false; - this.activeStreamHadAnyDelta = false; - this.ackPendingPostCompactionStateOnStreamEnd = false; - } - - private async handleStreamError(data: StreamErrorPayload): Promise { - const hadCompactionRequest = this.activeCompactionRequest !== undefined; - if ( - await this.maybeRetryCompactionOnContextExceeded({ - messageId: data.messageId, - errorType: data.errorType, - }) - ) { - return; - } - - if ( - await this.maybeRetryWithoutPostCompactionOnContextExceeded({ - messageId: data.messageId, - errorType: data.errorType, - }) - ) { - return; - } - - if ( - await this.maybeHardRestartExecSubagentOnContextExceeded({ - messageId: data.messageId, - errorType: data.errorType, - }) - ) { - return; - } - - this.activeCompactionRequest = undefined; - this.resetActiveStreamState(); - - if (hadCompactionRequest && !this.disposed) { - this.clearQueue(); - } - - this.emitChatEvent(createStreamErrorMessage(data)); - } - private attachAiListeners(): void { const forward = ( event: string, @@ -1631,23 +974,23 @@ export class AgentSession { forward("stream-start", (payload) => this.emitChatEvent(payload)); forward("stream-delta", (payload) => { - this.activeStreamHadAnyDelta = true; + this.retryHandler.markStreamHadDelta(); this.emitChatEvent(payload); }); forward("tool-call-start", (payload) => { - this.activeStreamHadAnyDelta = true; + this.retryHandler.markStreamHadDelta(); this.emitChatEvent(payload); }); forward("bash-output", (payload) => { - this.activeStreamHadAnyDelta = true; + this.retryHandler.markStreamHadDelta(); this.emitChatEvent(payload); }); forward("tool-call-delta", (payload) => { - this.activeStreamHadAnyDelta = true; + this.retryHandler.markStreamHadDelta(); this.emitChatEvent(payload); }); forward("tool-call-end", (payload) => { - this.activeStreamHadAnyDelta = true; + this.retryHandler.markStreamHadDelta(); this.emitChatEvent(payload); // Post-compaction context state depends on plan writes + tracked file diffs. @@ -1660,15 +1003,16 @@ export class AgentSession { } }); forward("reasoning-delta", (payload) => { - this.activeStreamHadAnyDelta = true; + this.retryHandler.markStreamHadDelta(); this.emitChatEvent(payload); }); forward("reasoning-end", (payload) => this.emitChatEvent(payload)); forward("usage-delta", (payload) => this.emitChatEvent(payload)); forward("stream-abort", (payload) => { - const hadCompactionRequest = this.activeCompactionRequest !== undefined; - this.activeCompactionRequest = undefined; - this.resetActiveStreamState(); + const hadCompactionRequest = this.retryHandler.hasActiveCompactionRequest(); + this.retryHandler.clearActiveCompactionRequest(); + this.retryHandler.resetActiveStreamState(); + this.ackPendingPostCompactionStateOnStreamEnd = false; if (hadCompactionRequest && !this.disposed) { this.clearQueue(); } @@ -1677,7 +1021,7 @@ export class AgentSession { forward("runtime-status", (payload) => this.emitChatEvent(payload)); forward("stream-end", async (payload) => { - this.activeCompactionRequest = undefined; + this.retryHandler.clearActiveCompactionRequest(); const handled = await this.compactionHandler.handleCompletion(payload as StreamEndEvent); if (!handled) { @@ -1712,7 +1056,8 @@ export class AgentSession { await this.dispatchPendingFollowUp(); } - this.resetActiveStreamState(); + this.retryHandler.resetActiveStreamState(); + this.ackPendingPostCompactionStateOnStreamEnd = false; // Stream end: auto-send queued messages (for user messages typed during streaming) this.sendQueuedMessages(); @@ -1729,7 +1074,7 @@ export class AgentSession { return; } const data = raw as StreamErrorPayload & { workspaceId: string }; - void this.handleStreamError({ + void this.retryHandler.handleStreamError({ messageId: data.messageId, error: data.error, errorType: data.errorType, diff --git a/src/node/services/contextExceededRetry.ts b/src/node/services/contextExceededRetry.ts new file mode 100644 index 0000000000..534ddeb05b --- /dev/null +++ b/src/node/services/contextExceededRetry.ts @@ -0,0 +1,806 @@ +/** + * Handles all context-exceeded retry strategies for AgentSession. + * + * Owns the retry-related state (attempt tracking, active stream context) and + * implements three progressive strategies: + * 1. Compaction retry (OpenAI truncation / Anthropic 1M context) + * 2. Post-compaction retry (strip post-compaction attachments) + * 3. Hard restart exec subagent (clear history, replay seed prompt) + */ +import { log } from "@/node/services/log"; +import type { HistoryService } from "@/node/services/historyService"; +import type { PartialService } from "@/node/services/partialService"; +import type { AIService } from "@/node/services/aiService"; +import type { + WorkspaceChatMessage, + SendMessageOptions, + FilePart, + DeleteMessage, +} from "@/common/orpc/types"; +import type { SendMessageError } from "@/common/types/errors"; +import { + createStreamErrorMessage, + type StreamErrorPayload, +} from "@/node/services/utils/sendMessageError"; +import { + createMuxMessage, + type CompactionFollowUpRequest, + type MuxFrontendMetadata, + type MuxMessage, + type ReviewNoteDataForDisplay, +} from "@/common/types/message"; +import { createUserMessageId } from "@/node/services/utils/messageIds"; +import { WORKSPACE_DEFAULTS } from "@/constants/workspaceDefaults"; +import { normalizeGatewayModel } from "@/common/utils/ai/models"; +import { getErrorMessage } from "@/common/utils/errors"; +import type { Result } from "@/common/types/result"; +import { AgentIdSchema } from "@/common/orpc/schemas"; +import { createRuntimeForWorkspace } from "@/node/runtime/runtimeHelpers"; +import { isExecLikeEditingCapableInResolvedChain } from "@/common/utils/agentTools"; +import { readAgentDefinition } from "@/node/services/agentDefinitions/agentDefinitionsService"; +import { resolveAgentInheritanceChain } from "@/node/services/agentDefinitions/resolveAgentInheritanceChain"; +import type { CompactionHandler } from "./compactionHandler"; + +// Type guard for compaction request metadata +// Supports both new `followUpContent` and legacy `continueMessage` for backwards compatibility +export interface CompactionRequestMetadata { + type: "compaction-request"; + parsed: { + followUpContent?: CompactionFollowUpRequest; + // Legacy field - older persisted requests may use this instead of followUpContent + continueMessage?: { + text?: string; + imageParts?: FilePart[]; + reviews?: ReviewNoteDataForDisplay[]; + muxMetadata?: MuxFrontendMetadata; + model?: string; + agentId?: string; + mode?: "exec" | "plan"; // Legacy: older versions stored mode instead of agentId + }; + }; +} + +export function isCompactionRequestMetadata(meta: unknown): meta is CompactionRequestMetadata { + if (typeof meta !== "object" || meta === null) return false; + const obj = meta as Record; + if (obj.type !== "compaction-request") return false; + if (typeof obj.parsed !== "object" || obj.parsed === null) return false; + return true; +} + +export { type StreamErrorPayload } from "@/node/services/utils/sendMessageError"; + +export interface ContextExceededRetryHandlerOptions { + workspaceId: string; + historyService: HistoryService; + partialService: PartialService; + aiService: AIService; + compactionHandler: CompactionHandler; + onPostCompactionStateChange?: () => void; +} + +export interface ContextExceededRetryCallbacks { + emitChatEvent: (message: WorkspaceChatMessage) => void; + clearQueue: () => void; + streamWithHistory: ( + modelString: string, + options?: SendMessageOptions, + openaiTruncationModeOverride?: "auto" | "disabled", + disablePostCompactionAttachments?: boolean + ) => Promise>; + isDisposed: () => boolean; + setStreamStarting: (value: boolean) => void; +} + +export class ContextExceededRetryHandler { + private readonly workspaceId: string; + private readonly historyService: HistoryService; + private readonly partialService: PartialService; + private readonly aiService: AIService; + private readonly compactionHandler: CompactionHandler; + private readonly onPostCompactionStateChange?: () => void; + private readonly callbacks: ContextExceededRetryCallbacks; + + /** Track compaction requests that already retried with truncation. */ + private readonly compactionRetryAttempts = new Set(); + + /** Track user message ids that already retried without post-compaction injection. */ + private readonly postCompactionRetryAttempts = new Set(); + + /** Track user message ids that already hard-restarted for exec-like subagents. */ + private readonly execSubagentHardRestartAttempts = new Set(); + + /** Tracks the user message id that initiated the currently active stream (for retry guards). */ + private activeStreamUserMessageId?: string; + + /** True once we see any model/tool output for the current stream (retry guard). */ + private activeStreamHadAnyDelta = false; + + /** Tracks whether the current stream included post-compaction attachments. */ + private activeStreamHadPostCompactionInjection = false; + + /** Context needed to retry the current stream (cleared on stream end/abort/error). */ + private activeStreamContext?: { + modelString: string; + options?: SendMessageOptions; + openaiTruncationModeOverride?: "auto" | "disabled"; + }; + + /** + * Active compaction request metadata for retry decisions (cleared on stream end/abort). + */ + private activeCompactionRequest?: { + id: string; + modelString: string; + options?: SendMessageOptions; + }; + + constructor( + options: ContextExceededRetryHandlerOptions, + callbacks: ContextExceededRetryCallbacks + ) { + this.workspaceId = options.workspaceId; + this.historyService = options.historyService; + this.partialService = options.partialService; + this.aiService = options.aiService; + this.compactionHandler = options.compactionHandler; + this.onPostCompactionStateChange = options.onPostCompactionStateChange; + this.callbacks = callbacks; + } + + // ── Public API for AgentSession state management ── + + /** + * Initialize per-stream state. Called at the start of each streamWithHistory. + */ + initStreamState(params: { + modelString: string; + options?: SendMessageOptions; + openaiTruncationModeOverride?: "auto" | "disabled"; + }): void { + this.activeStreamHadAnyDelta = false; + this.activeStreamHadPostCompactionInjection = false; + this.activeStreamContext = { + modelString: params.modelString, + options: params.options, + openaiTruncationModeOverride: params.openaiTruncationModeOverride, + }; + this.activeStreamUserMessageId = undefined; + } + + /** Set the user message ID that initiated the current stream. */ + setActiveStreamUserMessageId(id: string | undefined): void { + this.activeStreamUserMessageId = id; + } + + /** Resolve and set the active compaction request from history. */ + resolveAndSetCompactionRequest( + history: MuxMessage[], + modelString: string, + options?: SendMessageOptions + ): void { + this.activeCompactionRequest = this.resolveCompactionRequest(history, modelString, options); + } + + /** Set whether the current stream included post-compaction attachments. */ + setPostCompactionInjection(had: boolean): void { + this.activeStreamHadPostCompactionInjection = had; + } + + /** Mark that the current stream has received meaningful output. */ + markStreamHadDelta(): void { + this.activeStreamHadAnyDelta = true; + } + + /** Clear the active compaction request (e.g., on stream end/abort/failure). */ + clearActiveCompactionRequest(): void { + this.activeCompactionRequest = undefined; + } + + /** Whether a compaction request is currently active. */ + hasActiveCompactionRequest(): boolean { + return this.activeCompactionRequest !== undefined; + } + + /** Reset all per-stream state fields owned by this handler. */ + resetActiveStreamState(): void { + this.activeStreamContext = undefined; + this.activeStreamUserMessageId = undefined; + this.activeStreamHadPostCompactionInjection = false; + this.activeStreamHadAnyDelta = false; + } + + // ── Error handling entry point ── + + async handleStreamError(data: StreamErrorPayload): Promise { + const hadCompactionRequest = this.activeCompactionRequest !== undefined; + if ( + await this.maybeRetryCompactionOnContextExceeded({ + messageId: data.messageId, + errorType: data.errorType, + }) + ) { + return; + } + + if ( + await this.maybeRetryWithoutPostCompactionOnContextExceeded({ + messageId: data.messageId, + errorType: data.errorType, + }) + ) { + return; + } + + if ( + await this.maybeHardRestartExecSubagentOnContextExceeded({ + messageId: data.messageId, + errorType: data.errorType, + }) + ) { + return; + } + + this.activeCompactionRequest = undefined; + this.resetActiveStreamState(); + + if (hadCompactionRequest && !this.callbacks.isDisposed()) { + this.callbacks.clearQueue(); + } + + this.callbacks.emitChatEvent(createStreamErrorMessage(data)); + } + + // ── Private retry strategies ── + + private resolveCompactionRequest( + history: MuxMessage[], + modelString: string, + options?: SendMessageOptions + ): { id: string; modelString: string; options?: SendMessageOptions } | undefined { + for (let index = history.length - 1; index >= 0; index -= 1) { + const message = history[index]; + if (message.role !== "user") { + continue; + } + if (!isCompactionRequestMetadata(message.metadata?.muxMetadata)) { + return undefined; + } + return { + id: message.id, + modelString, + options, + }; + } + return undefined; + } + + private async clearFailedAssistantMessage(messageId: string, reason: string): Promise { + const [partialResult, deleteMessageResult] = await Promise.all([ + this.partialService.deletePartial(this.workspaceId), + this.historyService.deleteMessage(this.workspaceId, messageId), + ]); + + if (!partialResult.success) { + log.warn("Failed to clear partial before retry", { + workspaceId: this.workspaceId, + reason, + error: partialResult.error, + }); + } + + if ( + !deleteMessageResult.success && + !( + typeof deleteMessageResult.error === "string" && + deleteMessageResult.error.includes("not found in history") + ) + ) { + log.warn("Failed to delete failed assistant placeholder", { + workspaceId: this.workspaceId, + reason, + error: deleteMessageResult.error, + }); + } + } + + private async finalizeCompactionRetry(messageId: string): Promise { + this.activeCompactionRequest = undefined; + this.resetActiveStreamState(); + this.callbacks.emitChatEvent({ + type: "stream-abort", + workspaceId: this.workspaceId, + messageId, + }); + await this.clearFailedAssistantMessage(messageId, "compaction-retry"); + } + + private supports1MContextRetry(modelString: string): boolean { + const normalized = normalizeGatewayModel(modelString); + const [provider, modelName] = normalized.split(":", 2); + const lower = modelName?.toLowerCase() ?? ""; + return ( + provider === "anthropic" && + (lower.startsWith("claude-sonnet-4-5") || lower.startsWith("claude-opus-4-6")) + ); + } + + private withAnthropic1MContext( + modelString: string, + options: SendMessageOptions | undefined + ): SendMessageOptions { + if (options) { + const existingModels = options.providerOptions?.anthropic?.use1MContextModels ?? []; + return { + ...options, + providerOptions: { + ...options.providerOptions, + anthropic: { + ...options.providerOptions?.anthropic, + use1MContext: true, + use1MContextModels: existingModels.includes(modelString) + ? existingModels + : [...existingModels, modelString], + }, + }, + }; + } + + return { + model: modelString, + agentId: WORKSPACE_DEFAULTS.agentId, + providerOptions: { + anthropic: { + use1MContext: true, + use1MContextModels: [modelString], + }, + }, + }; + } + + private isGptClassModel(modelString: string): boolean { + const normalized = normalizeGatewayModel(modelString); + const [provider, modelName] = normalized.split(":", 2); + return provider === "openai" && modelName?.toLowerCase().startsWith("gpt-"); + } + + private async maybeRetryCompactionOnContextExceeded(data: { + messageId: string; + errorType?: string; + }): Promise { + if (data.errorType !== "context_exceeded") { + return false; + } + + const context = this.activeCompactionRequest; + if (!context) { + return false; + } + + const isGptClass = this.isGptClassModel(context.modelString); + const is1MCapable = this.supports1MContextRetry(context.modelString); + + if (!isGptClass && !is1MCapable) { + return false; + } + + if (is1MCapable) { + // Skip retry if 1M context is already enabled (via legacy global flag or per-model list) + const anthropicOpts = context.options?.providerOptions?.anthropic; + const already1M = + anthropicOpts?.use1MContext === true || + (anthropicOpts?.use1MContextModels?.includes(context.modelString) ?? false); + if (already1M) { + return false; + } + } + + if (this.compactionRetryAttempts.has(context.id)) { + return false; + } + + this.compactionRetryAttempts.add(context.id); + + const retryLabel = is1MCapable ? "Anthropic 1M context" : "OpenAI truncation"; + log.info(`Compaction hit context limit; retrying once with ${retryLabel}`, { + workspaceId: this.workspaceId, + model: context.modelString, + compactionRequestId: context.id, + }); + + await this.finalizeCompactionRetry(data.messageId); + + const retryOptions = is1MCapable + ? this.withAnthropic1MContext(context.modelString, context.options) + : context.options; + this.callbacks.setStreamStarting(true); + let retryResult: Result; + try { + retryResult = await this.callbacks.streamWithHistory( + context.modelString, + retryOptions, + isGptClass ? "auto" : undefined + ); + } finally { + this.callbacks.setStreamStarting(false); + } + if (!retryResult.success) { + log.error("Compaction retry failed to start", { + workspaceId: this.workspaceId, + error: retryResult.error, + }); + return false; + } + + return true; + } + + private async maybeRetryWithoutPostCompactionOnContextExceeded(data: { + messageId: string; + errorType?: string; + }): Promise { + if (data.errorType !== "context_exceeded") { + return false; + } + + // Only retry if we actually injected post-compaction context. + if (!this.activeStreamHadPostCompactionInjection) { + return false; + } + + // Guardrail: don't retry if we've already emitted any meaningful output. + if (this.activeStreamHadAnyDelta) { + return false; + } + + const requestId = this.activeStreamUserMessageId; + const context = this.activeStreamContext; + if (!requestId || !context) { + return false; + } + + if (this.postCompactionRetryAttempts.has(requestId)) { + return false; + } + + this.postCompactionRetryAttempts.add(requestId); + + log.info("Post-compaction context hit context limit; retrying once without it", { + workspaceId: this.workspaceId, + requestId, + model: context.modelString, + }); + + // The post-compaction diffs are likely the culprit; discard them so we don't loop. + try { + await this.compactionHandler.discardPendingDiffs("context_exceeded"); + this.onPostCompactionStateChange?.(); + } catch (error) { + log.warn("Failed to discard pending post-compaction state", { + workspaceId: this.workspaceId, + error: getErrorMessage(error), + }); + } + + // Abort the failed assistant placeholder and clean up persisted partial/history state. + this.resetActiveStreamState(); + this.callbacks.emitChatEvent({ + type: "stream-abort", + workspaceId: this.workspaceId, + messageId: data.messageId, + }); + await this.clearFailedAssistantMessage(data.messageId, "post-compaction-retry"); + + // Retry the same request, but without post-compaction injection. + this.callbacks.setStreamStarting(true); + let retryResult: Result; + try { + retryResult = await this.callbacks.streamWithHistory( + context.modelString, + context.options, + context.openaiTruncationModeOverride, + true + ); + } finally { + this.callbacks.setStreamStarting(false); + } + + if (!retryResult.success) { + log.error("Post-compaction retry failed to start", { + workspaceId: this.workspaceId, + error: retryResult.error, + }); + return false; + } + + return true; + } + + private async maybeHardRestartExecSubagentOnContextExceeded(data: { + messageId: string; + errorType?: string; + }): Promise { + if (data.errorType !== "context_exceeded") { + return false; + } + + // Only enabled via experiment (and only when we still have a valid retry context). + const context = this.activeStreamContext; + const requestId = this.activeStreamUserMessageId; + const experimentEnabled = context?.options?.experiments?.execSubagentHardRestart === true; + if (!experimentEnabled || !context || !requestId) { + return false; + } + + // Guardrail: don't hard-restart after any meaningful output. + // This is intended to recover from "prompt too long" cases before the model starts streaming. + if (this.activeStreamHadAnyDelta) { + return false; + } + + if (this.execSubagentHardRestartAttempts.has(requestId)) { + return false; + } + + // Guard for test mocks that may not implement getWorkspaceMetadata. + if (typeof this.aiService.getWorkspaceMetadata !== "function") { + return false; + } + + const metadataResult = await this.aiService.getWorkspaceMetadata(this.workspaceId); + if (!metadataResult.success) { + return false; + } + + const metadata = metadataResult.data; + if (!metadata.parentWorkspaceId) { + return false; + } + + const agentIdRaw = (metadata.agentId ?? metadata.agentType ?? WORKSPACE_DEFAULTS.agentId) + .trim() + .toLowerCase(); + const parsedAgentId = AgentIdSchema.safeParse(agentIdRaw); + const agentId = parsedAgentId.success ? parsedAgentId.data : ("exec" as const); + + // Prefer resolving agent inheritance from the parent workspace: project agents may be untracked + // (and therefore absent from child worktrees), but they are always present in the parent that + // spawned the task. + const metadataCandidates: Array = [metadata]; + + try { + const parentMetadataResult = await this.aiService.getWorkspaceMetadata( + metadata.parentWorkspaceId + ); + if (parentMetadataResult.success) { + metadataCandidates.unshift(parentMetadataResult.data); + } + } catch { + // ignore - fall back to child metadata + } + + let chain: Awaited> | undefined; + for (const agentMetadata of metadataCandidates) { + try { + const runtime = createRuntimeForWorkspace(agentMetadata); + + // In-place workspaces (CLI/benchmarks) have projectPath === name. + // Use path directly instead of reconstructing via getWorkspacePath. + const isInPlace = agentMetadata.projectPath === agentMetadata.name; + const workspacePath = isInPlace + ? agentMetadata.projectPath + : runtime.getWorkspacePath(agentMetadata.projectPath, agentMetadata.name); + + const agentDiscoveryPath = + context.options?.disableWorkspaceAgents === true + ? agentMetadata.projectPath + : workspacePath; + + const agentDefinition = await readAgentDefinition(runtime, agentDiscoveryPath, agentId); + chain = await resolveAgentInheritanceChain({ + runtime, + workspacePath: agentDiscoveryPath, + agentId, + agentDefinition, + workspaceId: this.workspaceId, + }); + break; + } catch { + // ignore - try next candidate + } + } + + if (!chain) { + // If we fail to resolve tool policy/inheritance, treat as non-exec-like. + return false; + } + + if (!isExecLikeEditingCapableInResolvedChain(chain)) { + return false; + } + + this.execSubagentHardRestartAttempts.add(requestId); + + const continuationNotice = + "Context limit reached. Mux restarted this agent's chat history and will replay your original prompt below. " + + "Continue using only the current workspace state (files, git history, command output); " + + "re-inspect the repo as needed."; + + log.info("Exec-like subagent hit context limit; hard-restarting history and retrying", { + workspaceId: this.workspaceId, + requestId, + model: context.modelString, + agentId, + }); + + const historyResult = await this.historyService.getHistory(this.workspaceId); + if (!historyResult.success) { + return false; + } + + const messages = historyResult.data; + + const firstPromptIndex = messages.findIndex( + (msg) => msg.role === "user" && msg.metadata?.synthetic !== true + ); + if (firstPromptIndex === -1) { + return false; + } + + // Include any synthetic snapshots that were persisted immediately before the task prompt. + let seedStartIndex = firstPromptIndex; + for (let i = firstPromptIndex - 1; i >= 0; i -= 1) { + const msg = messages[i]; + const isSnapshot = + msg.role === "user" && + msg.metadata?.synthetic === true && + (msg.metadata?.fileAtMentionSnapshot ?? msg.metadata?.agentSkillSnapshot); + if (!isSnapshot) { + break; + } + seedStartIndex = i; + } + + const seedMessages = messages.slice(seedStartIndex, firstPromptIndex + 1); + if (seedMessages.length === 0) { + return false; + } + + // Best-effort: discard pending post-compaction state so we don't immediately re-inject it. + try { + await this.compactionHandler.discardPendingDiffs("execSubagentHardRestart"); + this.onPostCompactionStateChange?.(); + } catch (error) { + log.warn("Failed to discard pending post-compaction state before hard restart", { + workspaceId: this.workspaceId, + error: getErrorMessage(error), + }); + } + + // Abort the failed assistant placeholder and clean up partial/history state. + this.activeCompactionRequest = undefined; + this.resetActiveStreamState(); + if (!this.callbacks.isDisposed()) { + this.callbacks.clearQueue(); + } + + this.callbacks.emitChatEvent({ + type: "stream-abort", + workspaceId: this.workspaceId, + messageId: data.messageId, + }); + + const partialDeleteResult = await this.partialService.deletePartial(this.workspaceId); + if (!partialDeleteResult.success) { + log.warn("Failed to delete partial before exec subagent hard restart", { + workspaceId: this.workspaceId, + error: partialDeleteResult.error, + }); + } + + const clearResult = await this.historyService.clearHistory(this.workspaceId); + if (!clearResult.success) { + log.warn("Failed to clear history for exec subagent hard restart", { + workspaceId: this.workspaceId, + error: clearResult.error, + }); + return false; + } + + const deletedSequences = clearResult.data; + if (deletedSequences.length > 0) { + const deleteMessage: DeleteMessage = { + type: "delete", + historySequences: deletedSequences, + }; + this.callbacks.emitChatEvent(deleteMessage); + } + + const cloneForAppend = (msg: MuxMessage): MuxMessage => { + const metadataCopy = msg.metadata ? { ...msg.metadata } : undefined; + if (metadataCopy) { + metadataCopy.historySequence = undefined; + metadataCopy.partial = undefined; + metadataCopy.error = undefined; + metadataCopy.errorType = undefined; + } + + return { + ...msg, + metadata: metadataCopy, + parts: [...msg.parts], + }; + }; + + const continuationMessage = createMuxMessage( + createUserMessageId(), + "user", + continuationNotice, + { + timestamp: Date.now(), + synthetic: true, + uiVisible: true, + } + ); + + const messagesToAppend = [continuationMessage, ...seedMessages.map(cloneForAppend)]; + for (const message of messagesToAppend) { + const appendResult = await this.historyService.appendToHistory(this.workspaceId, message); + if (!appendResult.success) { + log.error("Failed to append message during exec subagent hard restart", { + workspaceId: this.workspaceId, + messageId: message.id, + error: appendResult.error, + }); + return false; + } + + // Add type: "message" for discriminated union (MuxMessage doesn't have it) + this.callbacks.emitChatEvent({ + ...message, + type: "message" as const, + }); + } + + const existingInstructions = context.options?.additionalSystemInstructions; + const mergedAdditionalSystemInstructions = existingInstructions + ? `${continuationNotice}\n\n${existingInstructions}` + : continuationNotice; + + const retryOptions: SendMessageOptions | undefined = context.options + ? { + ...context.options, + additionalSystemInstructions: mergedAdditionalSystemInstructions, + } + : { + model: context.modelString, + agentId: WORKSPACE_DEFAULTS.agentId, + additionalSystemInstructions: mergedAdditionalSystemInstructions, + experiments: { + execSubagentHardRestart: true, + }, + }; + + this.callbacks.setStreamStarting(true); + let retryResult: Result; + try { + retryResult = await this.callbacks.streamWithHistory( + context.modelString, + retryOptions, + context.openaiTruncationModeOverride + ); + } finally { + this.callbacks.setStreamStarting(false); + } + + if (!retryResult.success) { + log.error("Exec subagent hard restart retry failed to start", { + workspaceId: this.workspaceId, + error: retryResult.error, + }); + return false; + } + + return true; + } +} From 9d5620aa08a829bfc49639e08df8203a17e09c80 Mon Sep 17 00:00:00 2001 From: Ammar Date: Sun, 8 Feb 2026 12:23:59 -0600 Subject: [PATCH 09/14] test: update WorkspaceService tests for extracted PostCompaction + AISettings - Debounce test patches PostCompactionService directly instead of WorkspaceService delegation wrappers - AI settings tests use saveConfig mock (the real side effect) instead of monkey-patching the now-standalone persistWorkspaceAISettingsForAgent --- src/node/services/workspaceService.test.ts | 49 +++++++++++++++------- 1 file changed, 33 insertions(+), 16 deletions(-) diff --git a/src/node/services/workspaceService.test.ts b/src/node/services/workspaceService.test.ts index d20410609b..34254b5db6 100644 --- a/src/node/services/workspaceService.test.ts +++ b/src/node/services/workspaceService.test.ts @@ -389,12 +389,22 @@ describe("WorkspaceService post-compaction metadata refresh", () => { () => Promise.resolve(postCompactionState) ); - svc.getInfo = getInfoMock; - svc.getPostCompactionState = getPostCompactionStateMock; + // Patch the PostCompactionService's internal methods, not WorkspaceService's delegation wrappers + const pcs = ( + workspaceService as unknown as { + postCompactionService: { + getInfo: unknown; + getPostCompactionState: unknown; + schedulePostCompactionMetadataRefresh: (wsId: string) => void; + }; + } + ).postCompactionService; + (pcs as unknown as Record).getInfo = getInfoMock; + pcs.getPostCompactionState = getPostCompactionStateMock; - svc.schedulePostCompactionMetadataRefresh(workspaceId); - svc.schedulePostCompactionMetadataRefresh(workspaceId); - svc.schedulePostCompactionMetadataRefresh(workspaceId); + pcs.schedulePostCompactionMetadataRefresh(workspaceId); + pcs.schedulePostCompactionMetadataRefresh(workspaceId); + pcs.schedulePostCompactionMetadataRefresh(workspaceId); // Debounce is short, but use a safe buffer. await new Promise((resolve) => setTimeout(resolve, 150)); @@ -430,11 +440,20 @@ describe("WorkspaceService maybePersistAISettingsFromOptions", () => { appendToHistory: mock(() => Promise.resolve({ success: true as const, data: undefined })), }; + // Provide enough config mocking for persistWorkspaceAISettingsForAgent to + // reach its editConfig/saveConfig call (needs findWorkspace + loadConfigOrDefault) + const wsEntry = { id: "ws", path: "/tmp/test/src/ws", name: "ws" }; + const projectConfig = { workspaces: [wsEntry] }; + const projectsConfig = { projects: new Map([["/tmp/test", projectConfig]]) }; + const mockConfig: Partial = { srcDir: "/tmp/test", getSessionDir: mock(() => "/tmp/test/sessions"), generateStableId: mock(() => "test-id"), - findWorkspace: mock(() => null), + findWorkspace: mock(() => ({ projectPath: "/tmp/test", workspacePath: "/tmp/test/src/ws" })), + loadConfigOrDefault: mock(() => projectsConfig), + saveConfig: mock(() => Promise.resolve()), + getAllWorkspaceMetadata: mock(() => Promise.resolve([])), }; const mockPartialService: Partial = { @@ -459,19 +478,19 @@ describe("WorkspaceService maybePersistAISettingsFromOptions", () => { }); test("persists agent AI settings for custom agent", async () => { - const persistSpy = mock(() => Promise.resolve({ success: true as const, data: true })); - + // After extraction, maybePersistAISettingsFromOptions calls the standalone + // function directly. Verify it calls config.saveConfig (the real side effect). interface WorkspaceServiceTestAccess { maybePersistAISettingsFromOptions: ( workspaceId: string, options: unknown, context: "send" | "resume" ) => Promise; - persistWorkspaceAISettingsForAgent: (...args: unknown[]) => unknown; } const svc = workspaceService as unknown as WorkspaceServiceTestAccess; - svc.persistWorkspaceAISettingsForAgent = persistSpy; + const cfg = (workspaceService as unknown as { config: { saveConfig: ReturnType } }) + .config; await svc.maybePersistAISettingsFromOptions( "ws", @@ -483,23 +502,21 @@ describe("WorkspaceService maybePersistAISettingsFromOptions", () => { "send" ); - expect(persistSpy).toHaveBeenCalledTimes(1); + expect(cfg.saveConfig).toHaveBeenCalledTimes(1); }); test("persists agent AI settings when agentId matches", async () => { - const persistSpy = mock(() => Promise.resolve({ success: true as const, data: true })); - interface WorkspaceServiceTestAccess { maybePersistAISettingsFromOptions: ( workspaceId: string, options: unknown, context: "send" | "resume" ) => Promise; - persistWorkspaceAISettingsForAgent: (...args: unknown[]) => unknown; } const svc = workspaceService as unknown as WorkspaceServiceTestAccess; - svc.persistWorkspaceAISettingsForAgent = persistSpy; + const cfg = (workspaceService as unknown as { config: { saveConfig: ReturnType } }) + .config; await svc.maybePersistAISettingsFromOptions( "ws", @@ -511,7 +528,7 @@ describe("WorkspaceService maybePersistAISettingsFromOptions", () => { "send" ); - expect(persistSpy).toHaveBeenCalledTimes(1); + expect(cfg.saveConfig).toHaveBeenCalledTimes(1); }); }); describe("WorkspaceService remove timing rollup", () => { From 2a3fcda231e0e8a0ee77b149bbc19a0401a55efb Mon Sep 17 00:00:00 2001 From: Ammar Date: Sun, 8 Feb 2026 19:46:03 -0600 Subject: [PATCH 10/14] refactor: extract guardStreamingAllowed helper in WorkspaceService Deduplicates ~50 lines of identical guard checks between sendMessage and resumeStream (renaming/removing/missing/queued state checks). --- src/node/services/workspaceService.ts | 159 +++++++++----------------- 1 file changed, 57 insertions(+), 102 deletions(-) diff --git a/src/node/services/workspaceService.ts b/src/node/services/workspaceService.ts index 36a2c815b2..e93d21dc28 100644 --- a/src/node/services/workspaceService.ts +++ b/src/node/services/workspaceService.ts @@ -1686,6 +1686,59 @@ export class WorkspaceService extends EventEmitter { } } + /** + * Shared guard for sendMessage/resumeStream — blocks streaming when the + * workspace is in an incompatible state (renaming, removing, deleted, queued). + * Returns an error result if blocked, or null if streaming is allowed. + */ + private guardStreamingAllowed( + workspaceId: string, + caller: "sendMessage" | "resumeStream", + internal?: { allowQueuedAgentTask?: boolean } + ): Result | null { + if (this.renamingWorkspaces.has(workspaceId)) { + log.debug(`${caller} blocked: workspace is being renamed`, { workspaceId }); + return Err({ + type: "unknown", + raw: "Workspace is being renamed. Please wait and try again.", + }); + } + if (this.removingWorkspaces.has(workspaceId)) { + log.debug(`${caller} blocked: workspace is being removed`, { workspaceId }); + return Err({ + type: "unknown", + raw: "Workspace is being deleted. Please wait and try again.", + }); + } + if (!this.config.findWorkspace(workspaceId)) { + return Err({ type: "unknown", raw: "Workspace not found. It may have been deleted." }); + } + if (!internal?.allowQueuedAgentTask) { + const config = this.config.loadConfigOrDefault(); + for (const [_projectPath, project] of config.projects) { + const ws = project.workspaces.find((w) => w.id === workspaceId); + if (!ws) continue; + if (ws.parentWorkspaceId && ws.taskStatus === "queued") { + taskQueueDebug(`WorkspaceService.${caller} blocked (queued task)`, { + workspaceId, + stack: new Error(`${caller} blocked`).stack, + }); + return Err({ + type: "unknown", + raw: "This agent task is queued and cannot start yet. Wait for a slot to free.", + }); + } + break; + } + } else { + taskQueueDebug(`WorkspaceService.${caller} allowed (internal dequeue)`, { + workspaceId, + stack: new Error(`${caller} internal`).stack, + }); + } + return null; + } + async sendMessage( workspaceId: string, message: string, @@ -1706,57 +1759,8 @@ export class WorkspaceService extends EventEmitter { }); try { - // Block streaming while workspace is being renamed to prevent path conflicts - if (this.renamingWorkspaces.has(workspaceId)) { - log.debug("sendMessage blocked: workspace is being renamed", { workspaceId }); - return Err({ - type: "unknown", - raw: "Workspace is being renamed. Please wait and try again.", - }); - } - - // Block streaming while workspace is being removed to prevent races with config/session deletion. - if (this.removingWorkspaces.has(workspaceId)) { - log.debug("sendMessage blocked: workspace is being removed", { workspaceId }); - return Err({ - type: "unknown", - raw: "Workspace is being deleted. Please wait and try again.", - }); - } - - // Guard: avoid creating sessions for workspaces that don't exist anymore. - if (!this.config.findWorkspace(workspaceId)) { - return Err({ - type: "unknown", - raw: "Workspace not found. It may have been deleted.", - }); - } - - // Guard: queued agent tasks must not start streaming via generic sendMessage calls. - // They should only be started by TaskService once a parallel slot is available. - if (!internal?.allowQueuedAgentTask) { - const config = this.config.loadConfigOrDefault(); - for (const [_projectPath, project] of config.projects) { - const ws = project.workspaces.find((w) => w.id === workspaceId); - if (!ws) continue; - if (ws.parentWorkspaceId && ws.taskStatus === "queued") { - taskQueueDebug("WorkspaceService.sendMessage blocked (queued task)", { - workspaceId, - stack: new Error("sendMessage blocked").stack, - }); - return Err({ - type: "unknown", - raw: "This agent task is queued and cannot start yet. Wait for a slot to free.", - }); - } - break; - } - } else { - taskQueueDebug("WorkspaceService.sendMessage allowed (internal dequeue)", { - workspaceId, - stack: new Error("sendMessage internal").stack, - }); - } + const guardErr = this.guardStreamingAllowed(workspaceId, "sendMessage", internal); + if (guardErr) return guardErr; const session = this.getOrCreateSession(workspaceId); @@ -1877,57 +1881,8 @@ export class WorkspaceService extends EventEmitter { internal?: { allowQueuedAgentTask?: boolean } ): Promise> { try { - // Block streaming while workspace is being renamed to prevent path conflicts - if (this.renamingWorkspaces.has(workspaceId)) { - log.debug("resumeStream blocked: workspace is being renamed", { workspaceId }); - return Err({ - type: "unknown", - raw: "Workspace is being renamed. Please wait and try again.", - }); - } - - // Block streaming while workspace is being removed to prevent races with config/session deletion. - if (this.removingWorkspaces.has(workspaceId)) { - log.debug("resumeStream blocked: workspace is being removed", { workspaceId }); - return Err({ - type: "unknown", - raw: "Workspace is being deleted. Please wait and try again.", - }); - } - - // Guard: avoid creating sessions for workspaces that don't exist anymore. - if (!this.config.findWorkspace(workspaceId)) { - return Err({ - type: "unknown", - raw: "Workspace not found. It may have been deleted.", - }); - } - - // Guard: queued agent tasks must not be resumed by generic UI/API calls. - // TaskService is responsible for dequeuing and starting them. - if (!internal?.allowQueuedAgentTask) { - const config = this.config.loadConfigOrDefault(); - for (const [_projectPath, project] of config.projects) { - const ws = project.workspaces.find((w) => w.id === workspaceId); - if (!ws) continue; - if (ws.parentWorkspaceId && ws.taskStatus === "queued") { - taskQueueDebug("WorkspaceService.resumeStream blocked (queued task)", { - workspaceId, - stack: new Error("resumeStream blocked").stack, - }); - return Err({ - type: "unknown", - raw: "This agent task is queued and cannot start yet. Wait for a slot to free.", - }); - } - break; - } - } else { - taskQueueDebug("WorkspaceService.resumeStream allowed (internal dequeue)", { - workspaceId, - stack: new Error("resumeStream internal").stack, - }); - } + const guardErr = this.guardStreamingAllowed(workspaceId, "resumeStream", internal); + if (guardErr) return guardErr; const session = this.getOrCreateSession(workspaceId); From 95f8591644d4908a86f73b115abbd7f19391f0a2 Mon Sep 17 00:00:00 2001 From: Ammar Date: Sun, 8 Feb 2026 19:49:17 -0600 Subject: [PATCH 11/14] refactor: extract SnapshotMaterializer from AgentSession MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Move materializeFileAtMentionsSnapshot and materializeAgentSkillSnapshot to src/node/services/snapshotMaterializer.ts as stateless free functions. AgentSession delegates with explicit dependency parameters. agentSession.ts: 1680 → 1527 LoC (-153) --- src/node/services/agentSession.ts | 187 ++------------------ src/node/services/snapshotMaterializer.ts | 199 ++++++++++++++++++++++ 2 files changed, 216 insertions(+), 170 deletions(-) create mode 100644 src/node/services/snapshotMaterializer.ts diff --git a/src/node/services/agentSession.ts b/src/node/services/agentSession.ts index bdcb829007..1c5a195daf 100644 --- a/src/node/services/agentSession.ts +++ b/src/node/services/agentSession.ts @@ -1,9 +1,7 @@ import assert from "@/common/utils/assert"; import { EventEmitter } from "events"; import * as path from "path"; -import { createHash } from "crypto"; import { readFile } from "fs/promises"; -import YAML from "yaml"; import { PlatformPaths } from "@/common/utils/paths"; import { log } from "@/node/services/log"; import type { Config } from "@/node/config"; @@ -18,7 +16,6 @@ import { DEFAULT_RUNTIME_CONFIG } from "@/common/constants/workspace"; import { DEFAULT_MODEL } from "@/common/constants/knownModels"; import type { WorkspaceChatMessage, SendMessageOptions, FilePart } from "@/common/orpc/types"; import type { SendMessageError } from "@/common/types/errors"; -import { SkillNameSchema } from "@/common/orpc/schemas"; import { buildStreamErrorEventData, createUnknownSendMessageError, @@ -28,11 +25,7 @@ import { isCompactionRequestMetadata, type StreamErrorPayload, } from "./contextExceededRetry"; -import { - createUserMessageId, - createFileSnapshotMessageId, - createAgentSkillSnapshotMessageId, -} from "@/node/services/utils/messageIds"; +import { createUserMessageId } from "@/node/services/utils/messageIds"; import { FileChangeTracker, type FileState, @@ -51,6 +44,10 @@ import { } from "@/common/types/message"; import { createRuntime } from "@/node/runtime/runtimeFactory"; import { createRuntimeForWorkspace } from "@/node/runtime/runtimeHelpers"; +import { + materializeFileAtMentionsSnapshot, + materializeAgentSkillSnapshot, +} from "@/node/services/snapshotMaterializer"; import { MessageQueue } from "./messageQueue"; import type { StreamEndEvent } from "@/common/types/stream"; import { CompactionHandler } from "./compactionHandler"; @@ -64,8 +61,7 @@ import { TURNS_BETWEEN_ATTACHMENTS } from "@/common/constants/attachments"; import { extractEditedFileDiffs } from "@/common/utils/messages/extractEditedFiles"; import { getModelCapabilities } from "@/common/utils/ai/modelCapabilities"; import { normalizeGatewayModel, isValidModelFormat } from "@/common/utils/ai/models"; -import { readAgentSkill } from "@/node/services/agentSkills/agentSkillsService"; -import { materializeFileAtMentions } from "@/node/services/fileAtMentions"; + import { getErrorMessage } from "@/common/utils/errors"; /** @@ -94,8 +90,6 @@ function estimateBase64DataUrlBytes(dataUrl: string): number | null { const padding = base64.endsWith("==") ? 2 : base64.endsWith("=") ? 1 : 0; return Math.floor((base64.length * 3) / 4) - padding; } -const MAX_AGENT_SKILL_SNAPSHOT_CHARS = 50_000; - export interface AgentSessionChatEvent { workspaceId: string; message: WorkspaceChatMessage; @@ -663,12 +657,20 @@ export class AgentSession { // This ensures prompt-cache stability: we read files once and persist the content, // so subsequent turns don't re-read (which would change the prompt prefix if files changed). // File changes after this point are surfaced via diffs instead. - const snapshotResult = await this.materializeFileAtMentionsSnapshot(trimmedMessage); + const snapshotResult = await materializeFileAtMentionsSnapshot( + trimmedMessage, + this.workspaceId, + this.aiService, + this.fileChangeTracker.record.bind(this.fileChangeTracker) + ); let skillSnapshotResult: { snapshotMessage: MuxMessage } | null = null; try { - skillSnapshotResult = await this.materializeAgentSkillSnapshot( + skillSnapshotResult = await materializeAgentSkillSnapshot( typedMuxMetadata, - options?.disableWorkspaceAgents + options?.disableWorkspaceAgents, + this.workspaceId, + this.aiService, + this.historyService ); } catch (error) { return Err(createUnknownSendMessageError(getErrorMessage(error))); @@ -1440,161 +1442,6 @@ export class AgentSession { return attachments; } - /** - * Materialize @file mentions from a user message into a persisted snapshot message. - * - * This reads the referenced files once and creates a synthetic message containing - * their content. The snapshot is persisted to history so subsequent sends don't - * re-read the files (which would bust prompt cache if files changed). - * - * Also registers file state for change detection via diffs. - * - * @returns The snapshot message and list of materialized mentions, or null if no mentions found - */ - private async materializeFileAtMentionsSnapshot( - messageText: string - ): Promise<{ snapshotMessage: MuxMessage; materializedTokens: string[] } | null> { - // Guard for test mocks that may not implement getWorkspaceMetadata - if (typeof this.aiService.getWorkspaceMetadata !== "function") { - return null; - } - - const metadataResult = await this.aiService.getWorkspaceMetadata(this.workspaceId); - if (!metadataResult.success) { - log.debug("Cannot materialize @file mentions: workspace metadata not found", { - workspaceId: this.workspaceId, - }); - return null; - } - - const metadata = metadataResult.data; - const runtime = createRuntimeForWorkspace(metadata); - const workspacePath = runtime.getWorkspacePath(metadata.projectPath, metadata.name); - - const materialized = await materializeFileAtMentions(messageText, { - runtime, - workspacePath, - }); - - if (materialized.length === 0) { - return null; - } - - // Register file state for each successfully read file (for change detection) - for (const mention of materialized) { - if ( - mention.content !== undefined && - mention.modifiedTimeMs !== undefined && - mention.resolvedPath - ) { - this.recordFileState(mention.resolvedPath, { - content: mention.content, - timestamp: mention.modifiedTimeMs, - }); - } - } - - // Create a synthetic snapshot message (not persisted here - caller handles persistence) - const tokens = materialized.map((m) => m.token); - const blocks = materialized.map((m) => m.block).join("\n\n"); - - const snapshotId = createFileSnapshotMessageId(); - const snapshotMessage = createMuxMessage(snapshotId, "user", blocks, { - timestamp: Date.now(), - synthetic: true, - fileAtMentionSnapshot: tokens, - }); - - return { snapshotMessage, materializedTokens: tokens }; - } - - private async materializeAgentSkillSnapshot( - muxMetadata: MuxFrontendMetadata | undefined, - disableWorkspaceAgents: boolean | undefined - ): Promise<{ snapshotMessage: MuxMessage } | null> { - if (!muxMetadata || muxMetadata.type !== "agent-skill") { - return null; - } - - // Guard for test mocks that may not implement getWorkspaceMetadata. - if (typeof this.aiService.getWorkspaceMetadata !== "function") { - return null; - } - - const parsedName = SkillNameSchema.safeParse(muxMetadata.skillName); - if (!parsedName.success) { - throw new Error(`Invalid agent skill name: ${muxMetadata.skillName}`); - } - - const metadataResult = await this.aiService.getWorkspaceMetadata(this.workspaceId); - if (!metadataResult.success) { - throw new Error("Cannot materialize agent skill: workspace metadata not found"); - } - - const metadata = metadataResult.data; - const runtime = createRuntime(metadata.runtimeConfig, { - projectPath: metadata.projectPath, - workspaceName: metadata.name, - }); - - // In-place workspaces (CLI/benchmarks) have projectPath === name. - // Use the path directly instead of reconstructing via getWorkspacePath. - const isInPlace = metadata.projectPath === metadata.name; - const workspacePath = isInPlace - ? metadata.projectPath - : runtime.getWorkspacePath(metadata.projectPath, metadata.name); - - // When workspace agents are disabled, resolve skills from the project path instead of - // the worktree so skill invocation uses the same precedence/discovery root as the UI. - const skillDiscoveryPath = disableWorkspaceAgents ? metadata.projectPath : workspacePath; - - const resolved = await readAgentSkill(runtime, skillDiscoveryPath, parsedName.data); - const skill = resolved.package; - - const frontmatterYaml = YAML.stringify(skill.frontmatter).trimEnd(); - - const body = - skill.body.length > MAX_AGENT_SKILL_SNAPSHOT_CHARS - ? `${skill.body.slice(0, MAX_AGENT_SKILL_SNAPSHOT_CHARS)}\n\n[Skill body truncated to ${MAX_AGENT_SKILL_SNAPSHOT_CHARS} characters]` - : skill.body; - - const snapshotText = `\n${body}\n`; - - // Include the parsed YAML frontmatter in the hash so frontmatter-only edits (e.g. description) - // generate a new snapshot and keep the UI hover preview in sync. - const sha256 = createHash("sha256") - .update(JSON.stringify({ snapshotText, frontmatterYaml })) - .digest("hex"); - - // Dedupe: if we recently persisted the same snapshot, avoid inserting again. - const historyResult = await this.historyService.getHistory(this.workspaceId); - if (historyResult.success) { - const recentMessages = historyResult.data.slice(Math.max(0, historyResult.data.length - 5)); - const recentSnapshot = [...recentMessages] - .reverse() - .find((msg) => msg.metadata?.synthetic && msg.metadata?.agentSkillSnapshot); - const recentMeta = recentSnapshot?.metadata?.agentSkillSnapshot; - - if (recentMeta?.skillName === skill.frontmatter.name && recentMeta.sha256 === sha256) { - return null; - } - } - - const snapshotId = createAgentSkillSnapshotMessageId(); - const snapshotMessage = createMuxMessage(snapshotId, "user", snapshotText, { - timestamp: Date.now(), - synthetic: true, - agentSkillSnapshot: { - skillName: skill.frontmatter.name, - scope: skill.scope, - sha256, - frontmatterYaml, - }, - }); - - return { snapshotMessage }; - } - /** * Load excluded items from the exclusions file. * Returns empty set if file doesn't exist or can't be read. diff --git a/src/node/services/snapshotMaterializer.ts b/src/node/services/snapshotMaterializer.ts new file mode 100644 index 0000000000..69b5c84b7e --- /dev/null +++ b/src/node/services/snapshotMaterializer.ts @@ -0,0 +1,199 @@ +/** + * Snapshot materializers for @file mentions and agent skills. + * + * These stateless functions read external content (files, skills) and produce + * synthetic MuxMessages that are persisted to history for prompt-cache stability. + * Extracted from AgentSession to keep that class focused on stream orchestration. + */ +import { createHash } from "crypto"; +import YAML from "yaml"; +import { SkillNameSchema } from "@/common/orpc/schemas"; +import { + createMuxMessage, + type MuxFrontendMetadata, + type MuxMessage, +} from "@/common/types/message"; +import { createRuntime } from "@/node/runtime/runtimeFactory"; +import { createRuntimeForWorkspace } from "@/node/runtime/runtimeHelpers"; +import { readAgentSkill } from "@/node/services/agentSkills/agentSkillsService"; +import { materializeFileAtMentions } from "@/node/services/fileAtMentions"; +import { + createFileSnapshotMessageId, + createAgentSkillSnapshotMessageId, +} from "@/node/services/utils/messageIds"; +import { log } from "@/node/services/log"; +import type { FileState } from "@/node/services/utils/fileChangeTracker"; +import type { AIService } from "@/node/services/aiService"; +import type { HistoryService } from "@/node/services/historyService"; + +const MAX_AGENT_SKILL_SNAPSHOT_CHARS = 50_000; + +/** + * Materialize @file mentions from a user message into a persisted snapshot message. + * + * Reads the referenced files once and creates a synthetic message containing + * their content. The snapshot is persisted to history so subsequent sends don't + * re-read the files (which would bust prompt cache if files changed). + * + * Also registers file state for change detection via diffs. + * + * @returns The snapshot message and list of materialized mentions, or null if no mentions found + */ +export async function materializeFileAtMentionsSnapshot( + messageText: string, + workspaceId: string, + aiService: AIService, + recordFileState: (filePath: string, state: FileState) => void +): Promise<{ snapshotMessage: MuxMessage; materializedTokens: string[] } | null> { + // Guard for test mocks that may not implement getWorkspaceMetadata + if (typeof aiService.getWorkspaceMetadata !== "function") { + return null; + } + + const metadataResult = await aiService.getWorkspaceMetadata(workspaceId); + if (!metadataResult.success) { + log.debug("Cannot materialize @file mentions: workspace metadata not found", { + workspaceId, + }); + return null; + } + + const metadata = metadataResult.data; + const runtime = createRuntimeForWorkspace(metadata); + const workspacePath = runtime.getWorkspacePath(metadata.projectPath, metadata.name); + + const materialized = await materializeFileAtMentions(messageText, { + runtime, + workspacePath, + }); + + if (materialized.length === 0) { + return null; + } + + // Register file state for each successfully read file (for change detection) + for (const mention of materialized) { + if ( + mention.content !== undefined && + mention.modifiedTimeMs !== undefined && + mention.resolvedPath + ) { + recordFileState(mention.resolvedPath, { + content: mention.content, + timestamp: mention.modifiedTimeMs, + }); + } + } + + // Create a synthetic snapshot message (not persisted here — caller handles persistence) + const tokens = materialized.map((m) => m.token); + const blocks = materialized.map((m) => m.block).join("\n\n"); + + const snapshotId = createFileSnapshotMessageId(); + const snapshotMessage = createMuxMessage(snapshotId, "user", blocks, { + timestamp: Date.now(), + synthetic: true, + fileAtMentionSnapshot: tokens, + }); + + return { snapshotMessage, materializedTokens: tokens }; +} + +/** + * Materialize an agent skill reference into a persisted snapshot message. + * + * Reads the skill YAML + body, creates a synthetic message containing the skill + * content, and deduplicates against the last 5 history messages to avoid + * inserting identical snapshots on consecutive sends. + * + * @returns The snapshot message, or null if no skill reference or duplicate detected + */ +export async function materializeAgentSkillSnapshot( + muxMetadata: MuxFrontendMetadata | undefined, + disableWorkspaceAgents: boolean | undefined, + workspaceId: string, + aiService: AIService, + historyService: HistoryService +): Promise<{ snapshotMessage: MuxMessage } | null> { + if (!muxMetadata || muxMetadata.type !== "agent-skill") { + return null; + } + + // Guard for test mocks that may not implement getWorkspaceMetadata. + if (typeof aiService.getWorkspaceMetadata !== "function") { + return null; + } + + const parsedName = SkillNameSchema.safeParse(muxMetadata.skillName); + if (!parsedName.success) { + throw new Error(`Invalid agent skill name: ${muxMetadata.skillName}`); + } + + const metadataResult = await aiService.getWorkspaceMetadata(workspaceId); + if (!metadataResult.success) { + throw new Error("Cannot materialize agent skill: workspace metadata not found"); + } + + const metadata = metadataResult.data; + const runtime = createRuntime(metadata.runtimeConfig, { + projectPath: metadata.projectPath, + workspaceName: metadata.name, + }); + + // In-place workspaces (CLI/benchmarks) have projectPath === name. + // Use the path directly instead of reconstructing via getWorkspacePath. + const isInPlace = metadata.projectPath === metadata.name; + const workspacePath = isInPlace + ? metadata.projectPath + : runtime.getWorkspacePath(metadata.projectPath, metadata.name); + + // When workspace agents are disabled, resolve skills from the project path instead of + // the worktree so skill invocation uses the same precedence/discovery root as the UI. + const skillDiscoveryPath = disableWorkspaceAgents ? metadata.projectPath : workspacePath; + + const resolved = await readAgentSkill(runtime, skillDiscoveryPath, parsedName.data); + const skill = resolved.package; + + const frontmatterYaml = YAML.stringify(skill.frontmatter).trimEnd(); + + const body = + skill.body.length > MAX_AGENT_SKILL_SNAPSHOT_CHARS + ? `${skill.body.slice(0, MAX_AGENT_SKILL_SNAPSHOT_CHARS)}\n\n[Skill body truncated to ${MAX_AGENT_SKILL_SNAPSHOT_CHARS} characters]` + : skill.body; + + const snapshotText = `\n${body}\n`; + + // Include the parsed YAML frontmatter in the hash so frontmatter-only edits (e.g. description) + // generate a new snapshot and keep the UI hover preview in sync. + const sha256 = createHash("sha256") + .update(JSON.stringify({ snapshotText, frontmatterYaml })) + .digest("hex"); + + // Dedupe: if we recently persisted the same snapshot, avoid inserting again. + const historyResult = await historyService.getHistory(workspaceId); + if (historyResult.success) { + const recentMessages = historyResult.data.slice(Math.max(0, historyResult.data.length - 5)); + const recentSnapshot = [...recentMessages] + .reverse() + .find((msg) => msg.metadata?.synthetic && msg.metadata?.agentSkillSnapshot); + const recentMeta = recentSnapshot?.metadata?.agentSkillSnapshot; + + if (recentMeta?.skillName === skill.frontmatter.name && recentMeta.sha256 === sha256) { + return null; + } + } + + const snapshotId = createAgentSkillSnapshotMessageId(); + const snapshotMessage = createMuxMessage(snapshotId, "user", snapshotText, { + timestamp: Date.now(), + synthetic: true, + agentSkillSnapshot: { + skillName: skill.frontmatter.name, + scope: skill.scope, + sha256, + frontmatterYaml, + }, + }); + + return { snapshotMessage }; +} From 0cdd9510211d69f7708c8fc3881f8a40b8a00ad6 Mon Sep 17 00:00:00 2001 From: Ammar Date: Sun, 8 Feb 2026 19:54:20 -0600 Subject: [PATCH 12/14] refactor: extract PostCompactionAttachmentBuilder from AgentSession MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Move post-compaction attachment assembly (plan refs, TODOs, edited-file diffs, turn-counter state) to src/node/services/postCompactionAttachments.ts. Also deduplicates two near-identical 50-line attachment assembly blocks into a shared buildAttachments() method. agentSession.ts: 1527 → 1323 LoC (-204) --- src/node/services/agentSession.ts | 240 ++---------------- .../services/postCompactionAttachments.ts | 207 +++++++++++++++ 2 files changed, 225 insertions(+), 222 deletions(-) create mode 100644 src/node/services/postCompactionAttachments.ts diff --git a/src/node/services/agentSession.ts b/src/node/services/agentSession.ts index 1c5a195daf..2807853489 100644 --- a/src/node/services/agentSession.ts +++ b/src/node/services/agentSession.ts @@ -1,7 +1,6 @@ import assert from "@/common/utils/assert"; import { EventEmitter } from "events"; import * as path from "path"; -import { readFile } from "fs/promises"; import { PlatformPaths } from "@/common/utils/paths"; import { log } from "@/node/services/log"; import type { Config } from "@/node/config"; @@ -43,7 +42,6 @@ import { type MuxMessage, } from "@/common/types/message"; import { createRuntime } from "@/node/runtime/runtimeFactory"; -import { createRuntimeForWorkspace } from "@/node/runtime/runtimeHelpers"; import { materializeFileAtMentionsSnapshot, materializeAgentSkillSnapshot, @@ -54,11 +52,7 @@ import { CompactionHandler } from "./compactionHandler"; import type { TelemetryService } from "./telemetryService"; import type { BackgroundProcessManager } from "./backgroundProcessManager"; -import { AttachmentService } from "./attachmentService"; -import type { TodoItem } from "@/common/types/tools"; -import type { PostCompactionAttachment, PostCompactionExclusions } from "@/common/types/attachment"; -import { TURNS_BETWEEN_ATTACHMENTS } from "@/common/constants/attachments"; -import { extractEditedFileDiffs } from "@/common/utils/messages/extractEditedFiles"; +import { PostCompactionAttachmentBuilder } from "./postCompactionAttachments"; import { getModelCapabilities } from "@/common/utils/ai/modelCapabilities"; import { normalizeGatewayModel, isValidModelFormat } from "@/common/utils/ai/models"; @@ -141,24 +135,8 @@ export class AgentSession { /** Tracks file state for detecting external edits. */ private readonly fileChangeTracker = new FileChangeTracker(); - /** - * Track turns since last post-compaction attachment injection. - * Start at max to trigger immediate injection on first turn after compaction. - */ - private turnsSinceLastAttachment = TURNS_BETWEEN_ATTACHMENTS; - - /** - * Flag indicating compaction has occurred in this session. - * Used to enable the cooldown-based attachment injection. - */ - private compactionOccurred = false; - - /** - * When true, clear any persisted post-compaction state after the next successful non-compaction stream. - * - * This is intentionally delayed until stream-end so a crash mid-stream doesn't lose the diffs. - */ - private ackPendingPostCompactionStateOnStreamEnd = false; + /** Builds post-compaction context (plan refs, TODOs, edited-file diffs) for prompt injection. */ + private readonly attachmentBuilder: PostCompactionAttachmentBuilder; /** Handles all context-exceeded retry strategies (compaction, post-compaction, hard restart). */ private readonly retryHandler: ContextExceededRetryHandler; @@ -204,6 +182,15 @@ export class AgentSession { onCompactionComplete, }); + this.attachmentBuilder = new PostCompactionAttachmentBuilder( + this.workspaceId, + this.config, + this.aiService, + this.historyService, + this.compactionHandler, + this.fileChangeTracker + ); + this.retryHandler = new ContextExceededRetryHandler( { workspaceId: this.workspaceId, @@ -844,7 +831,7 @@ export class AgentSession { } // Reset per-stream flags (used for retries / crash-safe bookkeeping). - this.ackPendingPostCompactionStateOnStreamEnd = false; + this.attachmentBuilder.ackPendingOnStreamEnd = false; this.retryHandler.initStreamState({ modelString, options, openaiTruncationModeOverride }); const commitResult = await this.partialService.commitToHistory(this.workspaceId); @@ -900,7 +887,7 @@ export class AgentSession { const postCompactionAttachments = disablePostCompactionAttachments === true ? null - : await this.getPostCompactionAttachmentsIfNeeded(); + : await this.attachmentBuilder.getAttachmentsIfNeeded(); this.retryHandler.setPostCompactionInjection( postCompactionAttachments !== null && postCompactionAttachments.length > 0 ); @@ -1014,7 +1001,7 @@ export class AgentSession { const hadCompactionRequest = this.retryHandler.hasActiveCompactionRequest(); this.retryHandler.clearActiveCompactionRequest(); this.retryHandler.resetActiveStreamState(); - this.ackPendingPostCompactionStateOnStreamEnd = false; + this.attachmentBuilder.ackPendingOnStreamEnd = false; if (hadCompactionRequest && !this.disposed) { this.clearQueue(); } @@ -1029,8 +1016,8 @@ export class AgentSession { if (!handled) { this.emitChatEvent(payload); - if (this.ackPendingPostCompactionStateOnStreamEnd) { - this.ackPendingPostCompactionStateOnStreamEnd = false; + if (this.attachmentBuilder.ackPendingOnStreamEnd) { + this.attachmentBuilder.ackPendingOnStreamEnd = false; try { await this.compactionHandler.ackPendingDiffsConsumed(); } catch (error) { @@ -1059,7 +1046,7 @@ export class AgentSession { } this.retryHandler.resetActiveStreamState(); - this.ackPendingPostCompactionStateOnStreamEnd = false; + this.attachmentBuilder.ackPendingOnStreamEnd = false; // Stream end: auto-send queued messages (for user messages typed during streaming) this.sendQueuedMessages(); @@ -1317,197 +1304,6 @@ export class AgentSession { this.fileChangeTracker.clear(); } - /** - * Get post-compaction attachments if they should be injected this turn. - * - * Logic: - * - On first turn after compaction: inject immediately, clear file state cache - * - Subsequent turns: inject every TURNS_BETWEEN_ATTACHMENTS turns - * - * @returns Attachments to inject, or null if none needed - */ - private async getPostCompactionAttachmentsIfNeeded(): Promise { - // Check if compaction just occurred (immediate injection with cached diffs) - const pendingDiffs = await this.compactionHandler.peekPendingDiffs(); - if (pendingDiffs !== null) { - this.ackPendingPostCompactionStateOnStreamEnd = true; - this.compactionOccurred = true; - this.turnsSinceLastAttachment = 0; - // Clear file state cache since history context is gone - this.fileChangeTracker.clear(); - - // Load exclusions and persistent TODO state (local workspace session data) - const excludedItems = await this.loadExcludedItems(); - const todoAttachment = await this.loadTodoListAttachment(excludedItems); - - // Get runtime for reading plan file - const metadataResult = await this.aiService.getWorkspaceMetadata(this.workspaceId); - if (!metadataResult.success) { - // Can't get metadata, skip plan reference but still include other attachments - const attachments: PostCompactionAttachment[] = []; - - if (todoAttachment) { - attachments.push(todoAttachment); - } - - const editedFilesRef = AttachmentService.generateEditedFilesAttachment(pendingDiffs); - if (editedFilesRef) { - attachments.push(editedFilesRef); - } - - return attachments; - } - const runtime = createRuntimeForWorkspace(metadataResult.data); - - const attachments = await AttachmentService.generatePostCompactionAttachments( - metadataResult.data.name, - metadataResult.data.projectName, - this.workspaceId, - pendingDiffs, - runtime, - excludedItems - ); - - if (todoAttachment) { - // Insert TODO after plan (if present), otherwise first. - const planIndex = attachments.findIndex((att) => att.type === "plan_file_reference"); - const insertIndex = planIndex === -1 ? 0 : planIndex + 1; - attachments.splice(insertIndex, 0, todoAttachment); - } - - return attachments; - } - - // Increment turn counter - this.turnsSinceLastAttachment++; - - // Check cooldown for subsequent injections (re-read from current history) - if (this.compactionOccurred && this.turnsSinceLastAttachment >= TURNS_BETWEEN_ATTACHMENTS) { - this.turnsSinceLastAttachment = 0; - return this.generatePostCompactionAttachments(); - } - - return null; - } - - /** - * Generate post-compaction attachments by extracting diffs from message history. - */ - private async generatePostCompactionAttachments(): Promise { - const historyResult = await this.historyService.getHistory(this.workspaceId); - if (!historyResult.success) { - return []; - } - const fileDiffs = extractEditedFileDiffs(historyResult.data); - - // Load exclusions and persistent TODO state (local workspace session data) - const excludedItems = await this.loadExcludedItems(); - const todoAttachment = await this.loadTodoListAttachment(excludedItems); - - // Get runtime for reading plan file - const metadataResult = await this.aiService.getWorkspaceMetadata(this.workspaceId); - if (!metadataResult.success) { - // Can't get metadata, skip plan reference but still include other attachments - const attachments: PostCompactionAttachment[] = []; - - if (todoAttachment) { - attachments.push(todoAttachment); - } - - const editedFilesRef = AttachmentService.generateEditedFilesAttachment(fileDiffs); - if (editedFilesRef) { - attachments.push(editedFilesRef); - } - - return attachments; - } - const runtime = createRuntimeForWorkspace(metadataResult.data); - - const attachments = await AttachmentService.generatePostCompactionAttachments( - metadataResult.data.name, - metadataResult.data.projectName, - this.workspaceId, - fileDiffs, - runtime, - excludedItems - ); - - if (todoAttachment) { - // Insert TODO after plan (if present), otherwise first. - const planIndex = attachments.findIndex((att) => att.type === "plan_file_reference"); - const insertIndex = planIndex === -1 ? 0 : planIndex + 1; - attachments.splice(insertIndex, 0, todoAttachment); - } - - return attachments; - } - - /** - * Load excluded items from the exclusions file. - * Returns empty set if file doesn't exist or can't be read. - */ - private async loadExcludedItems(): Promise> { - const exclusionsPath = path.join( - this.config.getSessionDir(this.workspaceId), - "exclusions.json" - ); - try { - const data = await readFile(exclusionsPath, "utf-8"); - const exclusions = JSON.parse(data) as PostCompactionExclusions; - return new Set(exclusions.excludedItems); - } catch { - return new Set(); - } - } - - private coerceTodoItems(value: unknown): TodoItem[] { - if (!Array.isArray(value)) { - return []; - } - - const result: TodoItem[] = []; - for (const item of value) { - if (!item || typeof item !== "object") continue; - - const content = (item as { content?: unknown }).content; - const status = (item as { status?: unknown }).status; - - if (typeof content !== "string") continue; - if (status !== "pending" && status !== "in_progress" && status !== "completed") continue; - - result.push({ content, status }); - } - - return result; - } - - private async loadTodoListAttachment( - excludedItems: Set - ): Promise { - if (excludedItems.has("todo")) { - return null; - } - - const todoPath = path.join(this.config.getSessionDir(this.workspaceId), "todos.json"); - - try { - const data = await readFile(todoPath, "utf-8"); - const parsed: unknown = JSON.parse(data); - const todos = this.coerceTodoItems(parsed); - if (todos.length === 0) { - return null; - } - - return { - type: "todo_list", - todos, - }; - } catch { - // File missing or unreadable - return null; - } - } - /** Delegate to FileChangeTracker for external file change detection. */ async getChangedFileAttachments(): Promise { return this.fileChangeTracker.getChangedAttachments(); diff --git a/src/node/services/postCompactionAttachments.ts b/src/node/services/postCompactionAttachments.ts new file mode 100644 index 0000000000..cce64f1114 --- /dev/null +++ b/src/node/services/postCompactionAttachments.ts @@ -0,0 +1,207 @@ +/** + * PostCompactionAttachmentBuilder — assembles post-compaction context + * (plan references, TODO lists, edited-file diffs) that gets injected + * into the prompt after a compaction event. + * + * Extracted from AgentSession to keep that class focused on stream + * orchestration. Owns the turn-counter and compaction-occurred state + * that controls injection cadence. + */ +import * as path from "path"; +import { readFile } from "fs/promises"; +import type { PostCompactionAttachment, PostCompactionExclusions } from "@/common/types/attachment"; +import { TURNS_BETWEEN_ATTACHMENTS } from "@/common/constants/attachments"; +import type { TodoItem } from "@/common/types/tools"; +import { AttachmentService } from "@/node/services/attachmentService"; +import { + extractEditedFileDiffs, + type FileEditDiff, +} from "@/common/utils/messages/extractEditedFiles"; +import { createRuntimeForWorkspace } from "@/node/runtime/runtimeHelpers"; +import type { Config } from "@/node/config"; +import type { AIService } from "@/node/services/aiService"; +import type { HistoryService } from "@/node/services/historyService"; +import type { CompactionHandler } from "@/node/services/compactionHandler"; +import type { FileChangeTracker } from "@/node/services/utils/fileChangeTracker"; + +export class PostCompactionAttachmentBuilder { + /** + * Flag indicating the stream-end handler should acknowledge that pending + * post-compaction diffs were consumed. Set inside `getAttachmentsIfNeeded` + * when diffs are present; read and cleared by AgentSession's stream + * lifecycle handlers. + */ + ackPendingOnStreamEnd = false; + + private turnsSinceLastAttachment = TURNS_BETWEEN_ATTACHMENTS; + private compactionOccurred = false; + + constructor( + private readonly workspaceId: string, + private readonly config: Config, + private readonly aiService: AIService, + private readonly historyService: HistoryService, + private readonly compactionHandler: CompactionHandler, + private readonly fileChangeTracker: FileChangeTracker + ) {} + + /** + * Get post-compaction attachments if they should be injected this turn. + * + * Logic: + * - On first turn after compaction: inject immediately, clear file state cache + * - Subsequent turns: inject every TURNS_BETWEEN_ATTACHMENTS turns + * + * @returns Attachments to inject, or null if none needed + */ + async getAttachmentsIfNeeded(): Promise { + // Check if compaction just occurred (immediate injection with cached diffs) + const pendingDiffs = await this.compactionHandler.peekPendingDiffs(); + if (pendingDiffs !== null) { + this.ackPendingOnStreamEnd = true; + this.compactionOccurred = true; + this.turnsSinceLastAttachment = 0; + // Clear file state cache since history context is gone + this.fileChangeTracker.clear(); + + return this.buildAttachments(pendingDiffs); + } + + // Increment turn counter + this.turnsSinceLastAttachment++; + + // Check cooldown for subsequent injections (re-read from current history) + if (this.compactionOccurred && this.turnsSinceLastAttachment >= TURNS_BETWEEN_ATTACHMENTS) { + this.turnsSinceLastAttachment = 0; + return this.generateFromHistory(); + } + + return null; + } + + /** + * Generate post-compaction attachments by extracting diffs from message history. + */ + private async generateFromHistory(): Promise { + const historyResult = await this.historyService.getHistory(this.workspaceId); + if (!historyResult.success) { + return []; + } + const fileDiffs = extractEditedFileDiffs(historyResult.data); + + return this.buildAttachments(fileDiffs); + } + + /** + * Shared assembly: load exclusions + TODO, then build the full attachment list + * (plan reference, TODO, edited-files diff). + */ + private async buildAttachments(fileDiffs: FileEditDiff[]): Promise { + const excludedItems = await this.loadExcludedItems(); + const todoAttachment = await this.loadTodoListAttachment(excludedItems); + + // Get runtime for reading plan file + const metadataResult = await this.aiService.getWorkspaceMetadata(this.workspaceId); + if (!metadataResult.success) { + // Can't get metadata, skip plan reference but still include other attachments + const attachments: PostCompactionAttachment[] = []; + + if (todoAttachment) { + attachments.push(todoAttachment); + } + + const editedFilesRef = AttachmentService.generateEditedFilesAttachment(fileDiffs); + if (editedFilesRef) { + attachments.push(editedFilesRef); + } + + return attachments; + } + const runtime = createRuntimeForWorkspace(metadataResult.data); + + const attachments = await AttachmentService.generatePostCompactionAttachments( + metadataResult.data.name, + metadataResult.data.projectName, + this.workspaceId, + fileDiffs, + runtime, + excludedItems + ); + + if (todoAttachment) { + // Insert TODO after plan (if present), otherwise first. + const planIndex = attachments.findIndex((att) => att.type === "plan_file_reference"); + const insertIndex = planIndex === -1 ? 0 : planIndex + 1; + attachments.splice(insertIndex, 0, todoAttachment); + } + + return attachments; + } + + /** + * Load excluded items from the exclusions file. + * Returns empty set if file doesn't exist or can't be read. + */ + private async loadExcludedItems(): Promise> { + const exclusionsPath = path.join( + this.config.getSessionDir(this.workspaceId), + "exclusions.json" + ); + try { + const data = await readFile(exclusionsPath, "utf-8"); + const exclusions = JSON.parse(data) as PostCompactionExclusions; + return new Set(exclusions.excludedItems); + } catch { + return new Set(); + } + } + + private async loadTodoListAttachment( + excludedItems: Set + ): Promise { + if (excludedItems.has("todo")) { + return null; + } + + const todoPath = path.join(this.config.getSessionDir(this.workspaceId), "todos.json"); + + try { + const data = await readFile(todoPath, "utf-8"); + const parsed: unknown = JSON.parse(data); + const todos = coerceTodoItems(parsed); + if (todos.length === 0) { + return null; + } + + return { + type: "todo_list", + todos, + }; + } catch { + // File missing or unreadable + return null; + } + } +} + +/** Safely coerce unknown JSON into a TodoItem array. */ +export function coerceTodoItems(value: unknown): TodoItem[] { + if (!Array.isArray(value)) { + return []; + } + + const result: TodoItem[] = []; + for (const item of value) { + if (!item || typeof item !== "object") continue; + + const content = (item as { content?: unknown }).content; + const status = (item as { status?: unknown }).status; + + if (typeof content !== "string") continue; + if (status !== "pending" && status !== "in_progress" && status !== "completed") continue; + + result.push({ content, status }); + } + + return result; +} From 5cd4ac24af349058086c2425d78657341a765f25 Mon Sep 17 00:00:00 2001 From: Ammar Date: Sun, 8 Feb 2026 19:57:17 -0600 Subject: [PATCH 13/14] refactor: decompose sendMessage in AgentSession Extract handleEditTruncation (~80 LoC) and validateModelAndFiles (~45 LoC) as private methods, reducing sendMessage from 310 to ~219 lines. The method now reads as a high-level orchestrator with clearly named sub-steps. --- src/node/services/agentSession.ts | 251 +++++++++++++++++------------- 1 file changed, 142 insertions(+), 109 deletions(-) diff --git a/src/node/services/agentSession.ts b/src/node/services/agentSession.ts index 2807853489..3d68a1a155 100644 --- a/src/node/services/agentSession.ts +++ b/src/node/services/agentSession.ts @@ -422,6 +422,140 @@ export class AgentSession { this.emitMetadata(metadata); } + /** + * Handle edit-specific logic: preserve file parts from the original message, + * interrupt any active stream, walk back over preceding snapshots, and + * truncate history at the edit target. + * + * @returns preserved file parts (if the frontend omitted them), or an error + */ + private async handleEditTruncation( + editMessageId: string, + fileParts: FilePart[] | undefined + ): Promise> { + let preservedFileParts: MuxFilePart[] | undefined; + + // If the frontend omits fileParts, preserve the original message's attachments. + if (fileParts === undefined) { + const historyResult = await this.historyService.getHistory(this.workspaceId); + if (historyResult.success) { + const targetMessage = historyResult.data.find((msg) => msg.id === editMessageId); + const parts = targetMessage?.parts.filter( + (part): part is MuxFilePart => part.type === "file" + ); + if (parts && parts.length > 0) { + preservedFileParts = parts; + } + } + } + + // Interrupt an existing stream or compaction, if active + if (this.aiService.isStreaming(this.workspaceId)) { + // MUST use abandonPartial=true to prevent handleAbort from performing partial compaction + // with mismatched history (since we're about to truncate it) + const stopResult = await this.interruptStream({ abandonPartial: true }); + if (!stopResult.success) { + return Err(createUnknownSendMessageError(stopResult.error)); + } + } + + // Find the truncation target: the edited message or any immediately-preceding snapshots. + // (snapshots are persisted immediately before their corresponding user message) + let truncateTargetId = editMessageId; + const historyResult = await this.historyService.getHistory(this.workspaceId); + if (historyResult.success) { + const messages = historyResult.data; + const editIndex = messages.findIndex((m) => m.id === editMessageId); + if (editIndex > 0) { + // Walk backwards over contiguous synthetic snapshots so we don't orphan them. + for (let i = editIndex - 1; i >= 0; i--) { + const msg = messages[i]; + const isSnapshot = + msg.metadata?.synthetic && + (msg.metadata?.fileAtMentionSnapshot ?? msg.metadata?.agentSkillSnapshot); + if (!isSnapshot) break; + truncateTargetId = msg.id; + } + } + } + + const truncateResult = await this.historyService.truncateAfterMessage( + this.workspaceId, + truncateTargetId + ); + if (!truncateResult.success) { + const isMissingEditTarget = + truncateResult.error.includes("Message with ID") && + truncateResult.error.includes("not found in history"); + if (isMissingEditTarget) { + // This can happen if the frontend is briefly out-of-sync with persisted history + // (e.g., compaction/truncation completed and removed the message while the UI still + // shows it as editable). Treat as a no-op truncation so the user can recover. + log.warn("editMessageId not found in history; proceeding without truncation", { + workspaceId: this.workspaceId, + editMessageId, + error: truncateResult.error, + }); + } else { + return Err(createUnknownSendMessageError(truncateResult.error)); + } + } + + return Ok({ preservedFileParts }); + } + + /** + * Validate model string and file parts against model capabilities. + * Returns normalized options or an error. + */ + private validateModelAndFiles( + options: SendMessageOptions, + effectiveFileParts: Array<{ url: string; mediaType: string; filename?: string }> | undefined + ): Result { + // Defense-in-depth: reject PDFs for models we know don't support them. + if (effectiveFileParts && effectiveFileParts.length > 0) { + const pdfParts = effectiveFileParts.filter( + (part) => normalizeMediaType(part.mediaType) === PDF_MEDIA_TYPE + ); + + if (pdfParts.length > 0) { + const caps = getModelCapabilities(options.model); + + if (caps && !caps.supportsPdfInput) { + return Err( + createUnknownSendMessageError(`Model ${options.model} does not support PDF input.`) + ); + } + + if (caps?.maxPdfSizeMb !== undefined) { + const maxBytes = caps.maxPdfSizeMb * 1024 * 1024; + for (const part of pdfParts) { + const bytes = estimateBase64DataUrlBytes(part.url); + if (bytes !== null && bytes > maxBytes) { + const actualMb = (bytes / (1024 * 1024)).toFixed(1); + const label = part.filename ?? "PDF"; + return Err( + createUnknownSendMessageError( + `${label} is ${actualMb}MB, but ${options.model} allows up to ${caps.maxPdfSizeMb}MB per PDF.` + ) + ); + } + } + } + } + } + + // Validate model string format (must be "provider:model-id") + if (!isValidModelFormat(options.model)) { + return Err({ + type: "invalid_model_string", + message: `Invalid model string format: "${options.model}". Expected "provider:model-id"`, + }); + } + + return Ok(undefined); + } + async sendMessage( message: string, options?: SendMessageOptions & { fileParts?: FilePart[] }, @@ -434,22 +568,12 @@ export class AgentSession { const fileParts = options?.fileParts; const editMessageId = options?.editMessageId; - // Edits are implemented as truncate+replace. If the frontend omits fileParts, - // preserve the original message's attachments. + // Handle edit: preserve file parts, interrupt stream, truncate history at edit target. let preservedEditFileParts: MuxFilePart[] | undefined; - if (editMessageId && fileParts === undefined) { - const historyResult = await this.historyService.getHistory(this.workspaceId); - if (historyResult.success) { - const targetMessage: MuxMessage | undefined = historyResult.data.find( - (msg) => msg.id === editMessageId - ); - const fileParts = targetMessage?.parts.filter( - (part): part is MuxFilePart => part.type === "file" - ); - if (fileParts && fileParts.length > 0) { - preservedEditFileParts = fileParts; - } - } + if (editMessageId) { + const editResult = await this.handleEditTruncation(editMessageId, fileParts); + if (!editResult.success) return Err(editResult.error); + preservedEditFileParts = editResult.data.preservedFileParts; } const hasFiles = (fileParts?.length ?? 0) > 0 || (preservedEditFileParts?.length ?? 0) > 0; @@ -462,60 +586,6 @@ export class AgentSession { ); } - if (editMessageId) { - // Interrupt an existing stream or compaction, if active - if (this.aiService.isStreaming(this.workspaceId)) { - // MUST use abandonPartial=true to prevent handleAbort from performing partial compaction - // with mismatched history (since we're about to truncate it) - const stopResult = await this.interruptStream({ abandonPartial: true }); - if (!stopResult.success) { - return Err(createUnknownSendMessageError(stopResult.error)); - } - } - - // Find the truncation target: the edited message or any immediately-preceding snapshots. - // (snapshots are persisted immediately before their corresponding user message) - let truncateTargetId = editMessageId; - const historyResult = await this.historyService.getHistory(this.workspaceId); - if (historyResult.success) { - const messages = historyResult.data; - const editIndex = messages.findIndex((m) => m.id === editMessageId); - if (editIndex > 0) { - // Walk backwards over contiguous synthetic snapshots so we don't orphan them. - for (let i = editIndex - 1; i >= 0; i--) { - const msg = messages[i]; - const isSnapshot = - msg.metadata?.synthetic && - (msg.metadata?.fileAtMentionSnapshot ?? msg.metadata?.agentSkillSnapshot); - if (!isSnapshot) break; - truncateTargetId = msg.id; - } - } - } - - const truncateResult = await this.historyService.truncateAfterMessage( - this.workspaceId, - truncateTargetId - ); - if (!truncateResult.success) { - const isMissingEditTarget = - truncateResult.error.includes("Message with ID") && - truncateResult.error.includes("not found in history"); - if (isMissingEditTarget) { - // This can happen if the frontend is briefly out-of-sync with persisted history - // (e.g., compaction/truncation completed and removed the message while the UI still - // shows it as editable). Treat as a no-op truncation so the user can recover. - log.warn("editMessageId not found in history; proceeding without truncation", { - workspaceId: this.workspaceId, - editMessageId, - error: truncateResult.error, - }); - } else { - return Err(createUnknownSendMessageError(truncateResult.error)); - } - } - } - const messageId = createUserMessageId(); const additionalParts = preservedEditFileParts && preservedEditFileParts.length > 0 @@ -576,8 +646,7 @@ export class AgentSession { ? { ...options, system1Model: rawSystem1Model } : options; - // Defense-in-depth: reject PDFs for models we know don't support them. - // (Frontend should also block this, but it's easy to bypass via IPC / older clients.) + // Validate model capabilities (PDF support, size limits) and model string format. const effectiveFileParts = preservedEditFileParts && preservedEditFileParts.length > 0 ? preservedEditFileParts.map((part) => ({ @@ -587,44 +656,8 @@ export class AgentSession { })) : fileParts; - if (effectiveFileParts && effectiveFileParts.length > 0) { - const pdfParts = effectiveFileParts.filter( - (part) => normalizeMediaType(part.mediaType) === PDF_MEDIA_TYPE - ); - - if (pdfParts.length > 0) { - const caps = getModelCapabilities(options.model); - - if (caps && !caps.supportsPdfInput) { - return Err( - createUnknownSendMessageError(`Model ${options.model} does not support PDF input.`) - ); - } - - if (caps?.maxPdfSizeMb !== undefined) { - const maxBytes = caps.maxPdfSizeMb * 1024 * 1024; - for (const part of pdfParts) { - const bytes = estimateBase64DataUrlBytes(part.url); - if (bytes !== null && bytes > maxBytes) { - const actualMb = (bytes / (1024 * 1024)).toFixed(1); - const label = part.filename ?? "PDF"; - return Err( - createUnknownSendMessageError( - `${label} is ${actualMb}MB, but ${options.model} allows up to ${caps.maxPdfSizeMb}MB per PDF.` - ) - ); - } - } - } - } - } - // Validate model string format (must be "provider:model-id") - if (!isValidModelFormat(options.model)) { - return Err({ - type: "invalid_model_string", - message: `Invalid model string format: "${options.model}". Expected "provider:model-id"`, - }); - } + const validationResult = this.validateModelAndFiles(options, effectiveFileParts); + if (!validationResult.success) return Err(validationResult.error); const userMessage = createMuxMessage( messageId, From 4d29253f949924a872387d336b779b3aa21343b3 Mon Sep 17 00:00:00 2001 From: Ammar Date: Sun, 8 Feb 2026 20:21:41 -0600 Subject: [PATCH 14/14] fix: convert ToolBridge import to type-only after removing preGenerateMuxTypes Removing preGenerateMuxTypes (dead export) left ToolBridge with only type-level usage, triggering consistent-type-imports lint rule. --- src/node/services/tools/code_execution.ts | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/node/services/tools/code_execution.ts b/src/node/services/tools/code_execution.ts index 92168714e9..b37d4ac6d9 100644 --- a/src/node/services/tools/code_execution.ts +++ b/src/node/services/tools/code_execution.ts @@ -6,10 +6,9 @@ * inference instead of multiple round-trips. */ -import { tool } from "ai"; +import { tool, type Tool } from "ai"; import { z } from "zod"; -import type { Tool } from "ai"; -import { ToolBridge } from "@/node/services/ptc/toolBridge"; +import type { ToolBridge } from "@/node/services/ptc/toolBridge"; import type { IJSRuntimeFactory } from "@/node/services/ptc/runtime"; import type { PTCEvent, PTCExecutionResult } from "@/node/services/ptc/types";