Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -316,7 +316,7 @@ OpenAI Chat Completions. Required fields: `model`, `messages`. Optional: `stream
OpenAI Responses API. Required fields: `model`, `input`. Optional: `instructions`, `stream`, `max_output_tokens`.

### POST /v1/messages
Anthropic Messages API. Required fields: `model`, `messages`. Optional: `system`, `max_tokens`, `stream`.
Anthropic Messages API. Required fields: `model`, `messages`. Optional: `system` (string or array of `{type: "text", text: string}` content blocks), `max_tokens`, `stream`.

Errors are returned in Anthropic format: `{ "type": "error", "error": { "type": "...", "message": "..." } }`.

Expand Down
28 changes: 23 additions & 5 deletions index.js
Original file line number Diff line number Diff line change
Expand Up @@ -592,6 +592,22 @@ export function normalizeAnthropicMessages(messages) {
.filter((message) => message.content.length > 0)
}

export function normalizeAnthropicSystem(system) {
if (typeof system === "string") {
const trimmed = system.trim()
return trimmed || null
}
if (Array.isArray(system)) {
const text = system
.filter((block) => block && block.type === "text" && typeof block.text === "string")
.map((block) => block.text.trim())
.filter(Boolean)
.join("\n\n")
return text || null
}
return null
}

export function mapFinishReasonToAnthropic(finish) {
if (!finish) return "end_turn"
if (finish.includes("length")) return "max_tokens"
Expand Down Expand Up @@ -1040,11 +1056,13 @@ export function createProxyFetchHandler(client) {
return anthropicBadRequest("No text content was found in the supplied messages.", 400, request)
}

// Prepend Anthropic top-level system string as a system message so buildSystemPrompt picks it up.
const allMessages =
typeof body.system === "string" && body.system.trim()
? [{ role: "system", content: body.system.trim() }, ...messages]
: messages
// Prepend Anthropic top-level `system` (string or array-of-content-blocks,
// per the Messages API spec) as a system message so buildSystemPrompt
// picks it up.
const systemText = normalizeAnthropicSystem(body.system)
const allMessages = systemText
? [{ role: "system", content: systemText }, ...messages]
: messages

const system = buildSystemPrompt(allMessages, {
temperature: body.temperature,
Expand Down
109 changes: 109 additions & 0 deletions index.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ import {
resolveModel,
normalizeAnthropicMessages,
mapFinishReasonToAnthropic,
normalizeAnthropicSystem,
normalizeGeminiContents,
extractGeminiSystemInstruction,
mapFinishReasonToGemini,
Expand Down Expand Up @@ -1388,6 +1389,114 @@ test("POST /v1/messages system string is included in prompt", async () => {
assert.ok(capturedSystem?.includes("You are a pirate."))
})

test("POST /v1/messages system as content-block array is included in prompt", async () => {
let capturedSystem = null
const client = {
app: { log: async () => {} },
tool: { ids: async () => ({ data: [] }) },
config: {
providers: async () => ({
data: {
providers: [{ id: "anthropic", models: { "claude-3-5-sonnet": { id: "claude-3-5-sonnet" } } }],
},
}),
},
session: {
create: async () => ({ data: { id: "sess-ant-sys-arr" } }),
prompt: async ({ body }) => {
capturedSystem = body.system
return {
data: {
parts: [{ type: "text", text: "ok" }],
info: { tokens: { input: 1, output: 1, reasoning: 0, cache: { read: 0, write: 0 } }, finish: "end_turn" },
},
}
},
},
}

const handler = createProxyFetchHandler(client)
const request = new Request("http://127.0.0.1:4010/v1/messages", {
method: "POST",
headers: { "content-type": "application/json" },
body: JSON.stringify({
model: "anthropic/claude-3-5-sonnet",
system: [{ type: "text", text: "You are a pirate." }],
messages: [{ role: "user", content: "Hello." }],
}),
})

await handler(request)
assert.ok(capturedSystem?.includes("You are a pirate."))
})

test("POST /v1/messages system as multi-block array concatenates text", async () => {
let capturedSystem = null
const client = {
app: { log: async () => {} },
tool: { ids: async () => ({ data: [] }) },
config: {
providers: async () => ({
data: {
providers: [{ id: "anthropic", models: { "claude-3-5-sonnet": { id: "claude-3-5-sonnet" } } }],
},
}),
},
session: {
create: async () => ({ data: { id: "sess-ant-sys-multi" } }),
prompt: async ({ body }) => {
capturedSystem = body.system
return {
data: {
parts: [{ type: "text", text: "ok" }],
info: { tokens: { input: 1, output: 1, reasoning: 0, cache: { read: 0, write: 0 } }, finish: "end_turn" },
},
}
},
},
}

const handler = createProxyFetchHandler(client)
const request = new Request("http://127.0.0.1:4010/v1/messages", {
method: "POST",
headers: { "content-type": "application/json" },
body: JSON.stringify({
model: "anthropic/claude-3-5-sonnet",
system: [
{ type: "text", text: "Line one." },
{ type: "text", text: "Line two." },
],
messages: [{ role: "user", content: "Hello." }],
}),
})

await handler(request)
assert.ok(capturedSystem?.includes("Line one."))
assert.ok(capturedSystem?.includes("Line two."))
})

test("normalizeAnthropicSystem handles string, array, and edge cases", () => {
assert.equal(normalizeAnthropicSystem("hello"), "hello")
assert.equal(normalizeAnthropicSystem(" hi "), "hi")
assert.equal(normalizeAnthropicSystem(""), null)
assert.equal(normalizeAnthropicSystem(" "), null)
assert.equal(normalizeAnthropicSystem([{ type: "text", text: "a" }]), "a")
assert.equal(
normalizeAnthropicSystem([
{ type: "text", text: "a" },
{ type: "text", text: "b" },
]),
"a\n\nb",
)
assert.equal(normalizeAnthropicSystem([{ type: "image", source: {} }]), null)
assert.equal(normalizeAnthropicSystem([]), null)
assert.equal(normalizeAnthropicSystem([{ type: "text", text: "" }]), null)
assert.equal(normalizeAnthropicSystem(undefined), null)
assert.equal(normalizeAnthropicSystem(null), null)
assert.equal(normalizeAnthropicSystem(42), null)
assert.equal(normalizeAnthropicSystem([null, { type: "text", text: "x" }]), "x")
})

test("POST /v1/messages missing model returns Anthropic error format", async () => {
const handler = createProxyFetchHandler(createAnthropicClient())
const request = new Request("http://127.0.0.1:4010/v1/messages", {
Expand Down