diff --git a/Sources/AnyLanguageModel/Models/OpenAILanguageModel.swift b/Sources/AnyLanguageModel/Models/OpenAILanguageModel.swift index bd3d8c1..f028f97 100644 --- a/Sources/AnyLanguageModel/Models/OpenAILanguageModel.swift +++ b/Sources/AnyLanguageModel/Models/OpenAILanguageModel.swift @@ -514,9 +514,19 @@ public struct OpenAILanguageModel: LanguageModel { for invocation in invocations { let output = invocation.output entries.append(.toolOutput(output)) - let toolSegments: [Transcript.Segment] = output.segments - let blocks = convertSegmentsToOpenAIBlocks(toolSegments) - messages.append(OpenAIMessage(role: .tool(id: invocation.call.id), content: .blocks(blocks))) + // Convert tool output segments to plain text for DeepSeek API compatibility + let toolContent = output.segments.map { segment -> String in + switch segment { + case .text(let textSegment): return textSegment.content + case .structure(let structuredSegment): + switch structuredSegment.content.kind { + case .string(let text): return text + default: return structuredSegment.content.jsonString + } + case .image: return "" + } + }.joined() + messages.append(OpenAIMessage(role: .tool(id: invocation.call.id), content: .text(toolContent))) } continue } @@ -579,9 +589,19 @@ public struct OpenAILanguageModel: LanguageModel { for invocation in invocations { let output = invocation.output entries.append(.toolOutput(output)) - let toolSegments: [Transcript.Segment] = output.segments - let blocks = convertSegmentsToOpenAIBlocks(toolSegments) - messages.append(OpenAIMessage(role: .tool(id: invocation.call.id), content: .blocks(blocks))) + // Convert tool output segments to plain text for DeepSeek API compatibility + let toolContent = output.segments.map { segment -> String in + switch segment { + case .text(let textSegment): return textSegment.content + case .structure(let structuredSegment): + switch structuredSegment.content.kind { + case .string(let text): return text + default: return structuredSegment.content.jsonString + } + case .image: return "" + } + }.joined() + messages.append(OpenAIMessage(role: .tool(id: invocation.call.id), content: .text(toolContent))) } continue } @@ -1165,10 +1185,22 @@ extension Transcript { ) ) case .toolOutput(let toolOutput): + // Convert tool output segments to plain text for DeepSeek API compatibility + let toolContent = toolOutput.segments.map { segment -> String in + switch segment { + case .text(let textSegment): return textSegment.content + case .structure(let structuredSegment): + switch structuredSegment.content.kind { + case .string(let text): return text + default: return structuredSegment.content.jsonString + } + case .image: return "" + } + }.joined() messages.append( .init( role: .tool(id: toolOutput.id), - content: .blocks(convertSegmentsToOpenAIBlocks(toolOutput.segments)) + content: .text(toolContent) ) ) }