This commit is contained in:
Frank
2025-10-27 17:00:49 -04:00
parent 7216a8c86d
commit 71abca9571
12 changed files with 2108 additions and 215 deletions

View File

@@ -0,0 +1,618 @@
import { ProviderHelper, CommonRequest, CommonResponse, CommonChunk } from "./provider"
type Usage = {
cache_creation?: {
ephemeral_5m_input_tokens?: number
ephemeral_1h_input_tokens?: number
}
cache_creation_input_tokens?: number
cache_read_input_tokens?: number
input_tokens?: number
output_tokens?: number
server_tool_use?: {
web_search_requests?: number
}
}
export const anthropicHelper = {
format: "anthropic",
modifyUrl: (providerApi: string) => providerApi + "/messages",
modifyHeaders: (headers: Headers, apiKey: string) => {
headers.set("x-api-key", apiKey)
headers.set("anthropic-version", headers.get("anthropic-version") ?? "2023-06-01")
},
modifyBody: (body: Record<string, any>) => {
return {
...body,
service_tier: "standard_only",
}
},
createUsageParser: () => {
let usage: Usage
return {
parse: (chunk: string) => {
const data = chunk.split("\n")[1]
if (!data.startsWith("data: ")) return
let json
try {
json = JSON.parse(data.slice(6))
} catch (e) {
return
}
const usageUpdate = json.usage ?? json.message?.usage
if (!usageUpdate) return
usage = {
...usage,
...usageUpdate,
cache_creation: {
...usage?.cache_creation,
...usageUpdate.cache_creation,
},
server_tool_use: {
...usage?.server_tool_use,
...usageUpdate.server_tool_use,
},
}
},
retrieve: () => usage,
}
},
normalizeUsage: (usage: Usage) => ({
inputTokens: usage.input_tokens ?? 0,
outputTokens: usage.output_tokens ?? 0,
reasoningTokens: undefined,
cacheReadTokens: usage.cache_read_input_tokens ?? undefined,
cacheWrite5mTokens: usage.cache_creation?.ephemeral_5m_input_tokens ?? undefined,
cacheWrite1hTokens: usage.cache_creation?.ephemeral_1h_input_tokens ?? undefined,
}),
} satisfies ProviderHelper
export function fromAnthropicRequest(body: any): CommonRequest {
if (!body || typeof body !== "object") return body
const msgs: any[] = []
const sys = Array.isArray(body.system) ? body.system : undefined
if (sys && sys.length > 0) {
for (const s of sys) {
if (!s) continue
if ((s as any).type !== "text") continue
if (typeof (s as any).text !== "string") continue
if ((s as any).text.length === 0) continue
msgs.push({ role: "system", content: (s as any).text })
}
}
const toImg = (src: any) => {
if (!src || typeof src !== "object") return undefined
if ((src as any).type === "url" && typeof (src as any).url === "string")
return { type: "image_url", image_url: { url: (src as any).url } }
if (
(src as any).type === "base64" &&
typeof (src as any).media_type === "string" &&
typeof (src as any).data === "string"
)
return { type: "image_url", image_url: { url: `data:${(src as any).media_type};base64,${(src as any).data}` } }
return undefined
}
const inMsgs = Array.isArray(body.messages) ? body.messages : []
for (const m of inMsgs) {
if (!m || !(m as any).role) continue
if ((m as any).role === "user") {
const partsIn = Array.isArray((m as any).content) ? (m as any).content : []
const partsOut: any[] = []
for (const p of partsIn) {
if (!p || !(p as any).type) continue
if ((p as any).type === "text" && typeof (p as any).text === "string")
partsOut.push({ type: "text", text: (p as any).text })
if ((p as any).type === "image") {
const ip = toImg((p as any).source)
if (ip) partsOut.push(ip)
}
if ((p as any).type === "tool_result") {
const id = (p as any).tool_use_id
const content =
typeof (p as any).content === "string" ? (p as any).content : JSON.stringify((p as any).content)
msgs.push({ role: "tool", tool_call_id: id, content })
}
}
if (partsOut.length > 0) {
if (partsOut.length === 1 && partsOut[0].type === "text") msgs.push({ role: "user", content: partsOut[0].text })
else msgs.push({ role: "user", content: partsOut })
}
continue
}
if ((m as any).role === "assistant") {
const partsIn = Array.isArray((m as any).content) ? (m as any).content : []
const texts: string[] = []
const tcs: any[] = []
for (const p of partsIn) {
if (!p || !(p as any).type) continue
if ((p as any).type === "text" && typeof (p as any).text === "string") texts.push((p as any).text)
if ((p as any).type === "tool_use") {
const name = (p as any).name
const id = (p as any).id
const inp = (p as any).input
const input = (() => {
if (typeof inp === "string") return inp
try {
return JSON.stringify(inp ?? {})
} catch {
return String(inp ?? "")
}
})()
tcs.push({ id, type: "function", function: { name, arguments: input } })
}
}
const out: any = { role: "assistant", content: texts.join("") }
if (tcs.length > 0) out.tool_calls = tcs
msgs.push(out)
continue
}
}
const tools = Array.isArray(body.tools)
? body.tools
.filter((t: any) => t && typeof t === "object" && "input_schema" in t)
.map((t: any) => ({
type: "function",
function: { name: (t as any).name, description: (t as any).description, parameters: (t as any).input_schema },
}))
: undefined
const tcin = body.tool_choice
const tc = (() => {
if (!tcin) return undefined
if ((tcin as any).type === "auto") return "auto"
if ((tcin as any).type === "any") return "required"
if ((tcin as any).type === "tool" && typeof (tcin as any).name === "string")
return { type: "function" as const, function: { name: (tcin as any).name } }
return undefined
})()
const stop = (() => {
const v = body.stop_sequences
if (!v) return undefined
if (Array.isArray(v)) return v.length === 1 ? v[0] : v
if (typeof v === "string") return v
return undefined
})()
return {
max_tokens: body.max_tokens,
temperature: body.temperature,
top_p: body.top_p,
stop,
messages: msgs,
stream: !!body.stream,
tools,
tool_choice: tc,
}
}
export function toAnthropicRequest(body: CommonRequest) {
if (!body || typeof body !== "object") return body
const sysIn = Array.isArray(body.messages) ? body.messages.filter((m: any) => m && m.role === "system") : []
let ccCount = 0
const cc = () => {
ccCount++
return ccCount <= 4 ? { cache_control: { type: "ephemeral" } } : {}
}
const system = sysIn
.filter((m: any) => typeof m.content === "string" && m.content.length > 0)
.map((m: any) => ({ type: "text", text: m.content, ...cc() }))
const msgsIn = Array.isArray(body.messages) ? body.messages : []
const msgsOut: any[] = []
const toSrc = (p: any) => {
if (!p || typeof p !== "object") return undefined
if ((p as any).type === "image_url" && (p as any).image_url) {
const u = (p as any).image_url.url ?? (p as any).image_url
if (typeof u === "string" && u.startsWith("data:")) {
const m = u.match(/^data:([^;]+);base64,(.*)$/)
if (m) return { type: "base64", media_type: m[1], data: m[2] }
}
if (typeof u === "string") return { type: "url", url: u }
}
return undefined
}
for (const m of msgsIn) {
if (!m || !(m as any).role) continue
if ((m as any).role === "user") {
if (typeof (m as any).content === "string") {
msgsOut.push({
role: "user",
content: [{ type: "text", text: (m as any).content, ...cc() }],
})
} else if (Array.isArray((m as any).content)) {
const parts: any[] = []
for (const p of (m as any).content) {
if (!p || !(p as any).type) continue
if ((p as any).type === "text" && typeof (p as any).text === "string")
parts.push({ type: "text", text: (p as any).text, ...cc() })
if ((p as any).type === "image_url") {
const s = toSrc(p)
if (s) parts.push({ type: "image", source: s, ...cc() })
}
}
if (parts.length > 0) msgsOut.push({ role: "user", content: parts })
}
continue
}
if ((m as any).role === "assistant") {
const out: any = { role: "assistant", content: [] as any[] }
if (typeof (m as any).content === "string" && (m as any).content.length > 0) {
;(out.content as any[]).push({ type: "text", text: (m as any).content, ...cc() })
}
if (Array.isArray((m as any).tool_calls)) {
for (const tc of (m as any).tool_calls) {
if ((tc as any).type === "function" && (tc as any).function) {
let input: any
const a = (tc as any).function.arguments
if (typeof a === "string") {
try {
input = JSON.parse(a)
} catch {
input = a
}
} else input = a
const id = (tc as any).id || `toolu_${Math.random().toString(36).slice(2)}`
;(out.content as any[]).push({
type: "tool_use",
id,
name: (tc as any).function.name,
input,
...cc(),
})
}
}
}
if ((out.content as any[]).length > 0) msgsOut.push(out)
continue
}
if ((m as any).role === "tool") {
msgsOut.push({
role: "user",
content: [
{
type: "tool_result",
tool_use_id: (m as any).tool_call_id,
content: (m as any).content,
...cc(),
},
],
})
continue
}
}
const tools = Array.isArray(body.tools)
? body.tools
.filter((t: any) => t && typeof t === "object" && (t as any).type === "function")
.map((t: any) => ({
name: (t as any).function.name,
description: (t as any).function.description,
input_schema: (t as any).function.parameters,
...cc(),
}))
: undefined
const tcIn = body.tool_choice
const tool_choice = (() => {
if (!tcIn) return undefined
if (tcIn === "auto") return { type: "auto" }
if (tcIn === "required") return { type: "any" }
if ((tcIn as any).type === "function" && (tcIn as any).function?.name)
return { type: "tool", name: (tcIn as any).function.name }
return undefined
})()
const stop_sequences = (() => {
const v = body.stop
if (!v) return undefined
if (Array.isArray(v)) return v
if (typeof v === "string") return [v]
return undefined
})()
return {
max_tokens: body.max_tokens ?? 32_000,
temperature: body.temperature,
top_p: body.top_p,
system: system.length > 0 ? system : undefined,
messages: msgsOut,
stream: !!body.stream,
tools,
tool_choice,
stop_sequences,
}
}
export function fromAnthropicResponse(resp: any): CommonResponse {
if (!resp || typeof resp !== "object") return resp
if (Array.isArray((resp as any).choices)) return resp
const isAnthropic = typeof (resp as any).type === "string" && (resp as any).type === "message"
if (!isAnthropic) return resp
const idIn = (resp as any).id
const id =
typeof idIn === "string" ? idIn.replace(/^msg_/, "chatcmpl_") : `chatcmpl_${Math.random().toString(36).slice(2)}`
const model = (resp as any).model
const blocks: any[] = Array.isArray((resp as any).content) ? (resp as any).content : []
const text = blocks
.filter((b) => b && b.type === "text" && typeof (b as any).text === "string")
.map((b: any) => b.text)
.join("")
const tcs = blocks
.filter((b) => b && b.type === "tool_use")
.map((b: any) => {
const name = (b as any).name
const args = (() => {
const inp = (b as any).input
if (typeof inp === "string") return inp
try {
return JSON.stringify(inp ?? {})
} catch {
return String(inp ?? "")
}
})()
const tid =
typeof (b as any).id === "string" && (b as any).id.length > 0
? (b as any).id
: `toolu_${Math.random().toString(36).slice(2)}`
return { id: tid, type: "function" as const, function: { name, arguments: args } }
})
const finish = (r: string | null) => {
if (r === "end_turn") return "stop"
if (r === "tool_use") return "tool_calls"
if (r === "max_tokens") return "length"
if (r === "content_filter") return "content_filter"
return null
}
const u = (resp as any).usage
const usage = (() => {
if (!u) return undefined as any
const pt = typeof (u as any).input_tokens === "number" ? (u as any).input_tokens : undefined
const ct = typeof (u as any).output_tokens === "number" ? (u as any).output_tokens : undefined
const total = pt != null && ct != null ? pt + ct : undefined
const cached =
typeof (u as any).cache_read_input_tokens === "number" ? (u as any).cache_read_input_tokens : undefined
const details = cached != null ? { cached_tokens: cached } : undefined
return {
prompt_tokens: pt,
completion_tokens: ct,
total_tokens: total,
...(details ? { prompt_tokens_details: details } : {}),
}
})()
return {
id,
object: "chat.completion",
created: Math.floor(Date.now() / 1000),
model,
choices: [
{
index: 0,
message: {
role: "assistant",
...(text && text.length > 0 ? { content: text } : {}),
...(tcs.length > 0 ? { tool_calls: tcs } : {}),
},
finish_reason: finish((resp as any).stop_reason ?? null),
},
],
...(usage ? { usage } : {}),
}
}
export function toAnthropicResponse(resp: CommonResponse) {
if (!resp || typeof resp !== "object") return resp
if (!Array.isArray((resp as any).choices)) return resp
const choice = (resp as any).choices[0]
if (!choice) return resp
const message = choice.message
if (!message) return resp
const content: any[] = []
if (typeof message.content === "string" && message.content.length > 0)
content.push({ type: "text", text: message.content })
if (Array.isArray(message.tool_calls)) {
for (const tc of message.tool_calls) {
if ((tc as any).type === "function" && (tc as any).function) {
let input: any
try {
input = JSON.parse((tc as any).function.arguments)
} catch {
input = (tc as any).function.arguments
}
content.push({ type: "tool_use", id: (tc as any).id, name: (tc as any).function.name, input })
}
}
}
const stop_reason = (() => {
const r = choice.finish_reason
if (r === "stop") return "end_turn"
if (r === "tool_calls") return "tool_use"
if (r === "length") return "max_tokens"
if (r === "content_filter") return "content_filter"
return null
})()
const usage = (() => {
const u = (resp as any).usage
if (!u) return undefined
return {
input_tokens: u.prompt_tokens,
output_tokens: u.completion_tokens,
cache_read_input_tokens: u.prompt_tokens_details?.cached_tokens,
}
})()
return {
id: (resp as any).id,
type: "message",
role: "assistant",
content: content.length > 0 ? content : [{ type: "text", text: "" }],
model: (resp as any).model,
stop_reason,
usage,
}
}
export function fromAnthropicChunk(chunk: string): CommonChunk | string {
// Anthropic sends two lines per part: "event: <type>\n" + "data: <json>"
const lines = chunk.split("\n")
const dataLine = lines.find((l) => l.startsWith("data: "))
if (!dataLine) return chunk
let json
try {
json = JSON.parse(dataLine.slice(6))
} catch {
return chunk
}
const out: CommonChunk = {
id: json.id ?? json.message?.id ?? "",
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model: json.model ?? json.message?.model ?? "",
choices: [],
}
if (json.type === "content_block_start") {
const cb = json.content_block
if (cb?.type === "text") {
out.choices.push({ index: json.index ?? 0, delta: { role: "assistant", content: "" }, finish_reason: null })
} else if (cb?.type === "tool_use") {
out.choices.push({
index: json.index ?? 0,
delta: {
tool_calls: [
{ index: json.index ?? 0, id: cb.id, type: "function", function: { name: cb.name, arguments: "" } },
],
},
finish_reason: null,
})
}
}
if (json.type === "content_block_delta") {
const d = json.delta
if (d?.type === "text_delta") {
out.choices.push({ index: json.index ?? 0, delta: { content: d.text }, finish_reason: null })
} else if (d?.type === "input_json_delta") {
out.choices.push({
index: json.index ?? 0,
delta: { tool_calls: [{ index: json.index ?? 0, function: { arguments: d.partial_json } }] },
finish_reason: null,
})
}
}
if (json.type === "message_delta") {
const d = json.delta
const finish_reason = (() => {
const r = d?.stop_reason
if (r === "end_turn") return "stop"
if (r === "tool_use") return "tool_calls"
if (r === "max_tokens") return "length"
if (r === "content_filter") return "content_filter"
return null
})()
out.choices.push({ index: 0, delta: {}, finish_reason })
}
if (json.usage) {
const u = json.usage
out.usage = {
prompt_tokens: u.input_tokens,
completion_tokens: u.output_tokens,
total_tokens: (u.input_tokens || 0) + (u.output_tokens || 0),
...(u.cache_read_input_tokens ? { prompt_tokens_details: { cached_tokens: u.cache_read_input_tokens } } : {}),
}
}
return out
}
export function toAnthropicChunk(chunk: CommonChunk): string {
if (!chunk.choices || !Array.isArray(chunk.choices) || chunk.choices.length === 0) {
return JSON.stringify({})
}
const choice = chunk.choices[0]
const delta = choice.delta
if (!delta) return JSON.stringify({})
const result: any = {}
if (delta.content) {
result.type = "content_block_delta"
result.index = 0
result.delta = { type: "text_delta", text: delta.content }
}
if (delta.tool_calls) {
for (const tc of delta.tool_calls) {
if (tc.function?.name) {
result.type = "content_block_start"
result.index = tc.index ?? 0
result.content_block = { type: "tool_use", id: tc.id, name: tc.function.name, input: {} }
} else if (tc.function?.arguments) {
result.type = "content_block_delta"
result.index = tc.index ?? 0
result.delta = { type: "input_json_delta", partial_json: tc.function.arguments }
}
}
}
if (choice.finish_reason) {
const stop_reason = (() => {
const r = choice.finish_reason
if (r === "stop") return "end_turn"
if (r === "tool_calls") return "tool_use"
if (r === "length") return "max_tokens"
if (r === "content_filter") return "content_filter"
return null
})()
result.type = "message_delta"
result.delta = { stop_reason, stop_sequence: null }
}
if (chunk.usage) {
const u = chunk.usage
result.usage = {
input_tokens: u.prompt_tokens,
output_tokens: u.completion_tokens,
cache_read_input_tokens: u.prompt_tokens_details?.cached_tokens,
}
}
return JSON.stringify(result)
}

View File

@@ -0,0 +1,541 @@
import { ProviderHelper, CommonRequest, CommonResponse, CommonChunk } from "./provider"
type Usage = {
prompt_tokens?: number
completion_tokens?: number
total_tokens?: number
// used by moonshot
cached_tokens?: number
// used by xai
prompt_tokens_details?: {
text_tokens?: number
audio_tokens?: number
image_tokens?: number
cached_tokens?: number
}
completion_tokens_details?: {
reasoning_tokens?: number
audio_tokens?: number
accepted_prediction_tokens?: number
rejected_prediction_tokens?: number
}
}
export const oaCompatHelper = {
format: "oa-compat",
modifyUrl: (providerApi: string) => providerApi + "/chat/completions",
modifyHeaders: (headers: Headers, apiKey: string) => {
headers.set("authorization", `Bearer ${apiKey}`)
},
modifyBody: (body: Record<string, any>) => {
return {
...body,
...(body.stream ? { stream_options: { include_usage: true } } : {}),
}
},
createUsageParser: () => {
let usage: Usage
return {
parse: (chunk: string) => {
if (!chunk.startsWith("data: ")) return
let json
try {
json = JSON.parse(chunk.slice(6)) as { usage?: Usage }
} catch (e) {
return
}
if (!json.usage) return
usage = json.usage
},
retrieve: () => usage,
}
},
normalizeUsage: (usage: Usage) => {
const inputTokens = usage.prompt_tokens ?? 0
const outputTokens = usage.completion_tokens ?? 0
const reasoningTokens = usage.completion_tokens_details?.reasoning_tokens ?? undefined
const cacheReadTokens = usage.cached_tokens ?? usage.prompt_tokens_details?.cached_tokens ?? undefined
return {
inputTokens: inputTokens - (cacheReadTokens ?? 0),
outputTokens,
reasoningTokens,
cacheReadTokens,
cacheWrite5mTokens: undefined,
cacheWrite1hTokens: undefined,
}
},
} satisfies ProviderHelper
export function fromOaCompatibleRequest(body: any): CommonRequest {
if (!body || typeof body !== "object") return body
const msgsIn = Array.isArray(body.messages) ? body.messages : []
const msgsOut: any[] = []
for (const m of msgsIn) {
if (!m || !m.role) continue
if (m.role === "system") {
if (typeof m.content === "string" && m.content.length > 0) msgsOut.push({ role: "system", content: m.content })
continue
}
if (m.role === "user") {
if (typeof m.content === "string") {
msgsOut.push({ role: "user", content: m.content })
} else if (Array.isArray(m.content)) {
const parts: any[] = []
for (const p of m.content) {
if (!p || !p.type) continue
if (p.type === "text" && typeof p.text === "string") parts.push({ type: "text", text: p.text })
if (p.type === "image_url") parts.push({ type: "image_url", image_url: p.image_url })
}
if (parts.length === 1 && parts[0].type === "text") msgsOut.push({ role: "user", content: parts[0].text })
else if (parts.length > 0) msgsOut.push({ role: "user", content: parts })
}
continue
}
if (m.role === "assistant") {
const out: any = { role: "assistant" }
if (typeof m.content === "string") out.content = m.content
if (Array.isArray(m.tool_calls)) out.tool_calls = m.tool_calls
msgsOut.push(out)
continue
}
if (m.role === "tool") {
msgsOut.push({ role: "tool", tool_call_id: m.tool_call_id, content: m.content })
continue
}
}
return {
max_tokens: body.max_tokens,
temperature: body.temperature,
top_p: body.top_p,
stop: body.stop,
messages: msgsOut,
stream: !!body.stream,
tools: Array.isArray(body.tools) ? body.tools : undefined,
tool_choice: body.tool_choice,
}
}
export function toOaCompatibleRequest(body: CommonRequest) {
if (!body || typeof body !== "object") return body
const msgsIn = Array.isArray(body.messages) ? body.messages : []
const msgsOut: any[] = []
const toImg = (p: any) => {
if (!p || typeof p !== "object") return undefined
if (p.type === "image_url" && p.image_url) return { type: "image_url", image_url: p.image_url }
const s = (p as any).source
if (!s || typeof s !== "object") return undefined
if (s.type === "url" && typeof s.url === "string") return { type: "image_url", image_url: { url: s.url } }
if (s.type === "base64" && typeof s.media_type === "string" && typeof s.data === "string")
return { type: "image_url", image_url: { url: `data:${s.media_type};base64,${s.data}` } }
return undefined
}
for (const m of msgsIn) {
if (!m || !m.role) continue
if (m.role === "system") {
if (typeof m.content === "string" && m.content.length > 0) msgsOut.push({ role: "system", content: m.content })
continue
}
if (m.role === "user") {
if (typeof m.content === "string") {
msgsOut.push({ role: "user", content: m.content })
continue
}
if (Array.isArray(m.content)) {
const parts: any[] = []
for (const p of m.content) {
if (!p || !p.type) continue
if (p.type === "text" && typeof p.text === "string") parts.push({ type: "text", text: p.text })
const ip = toImg(p)
if (ip) parts.push(ip)
}
if (parts.length === 1 && parts[0].type === "text") msgsOut.push({ role: "user", content: parts[0].text })
else if (parts.length > 0) msgsOut.push({ role: "user", content: parts })
}
continue
}
if (m.role === "assistant") {
const out: any = { role: "assistant" }
if (typeof m.content === "string") out.content = m.content
if (Array.isArray(m.tool_calls)) out.tool_calls = m.tool_calls
msgsOut.push(out)
continue
}
if (m.role === "tool") {
msgsOut.push({ role: "tool", tool_call_id: m.tool_call_id, content: m.content })
continue
}
}
const tools = Array.isArray(body.tools)
? body.tools.map((tool: any) => ({
type: "function",
function: {
name: tool.name,
description: tool.description,
parameters: tool.parameters,
},
}))
: undefined
return {
model: body.model,
max_tokens: body.max_tokens,
temperature: body.temperature,
top_p: body.top_p,
stop: body.stop,
messages: msgsOut,
stream: !!body.stream,
tools,
tool_choice: body.tool_choice,
response_format: (body as any).response_format,
}
}
export function fromOaCompatibleResponse(resp: any): CommonResponse {
if (!resp || typeof resp !== "object") return resp
if (!Array.isArray((resp as any).choices)) return resp
const choice = (resp as any).choices[0]
if (!choice) return resp
const message = choice.message
if (!message) return resp
const content: any[] = []
if (typeof message.content === "string" && message.content.length > 0) {
content.push({ type: "text", text: message.content })
}
if (Array.isArray(message.tool_calls)) {
for (const toolCall of message.tool_calls) {
if (toolCall.type === "function" && toolCall.function) {
let input
try {
input = JSON.parse(toolCall.function.arguments)
} catch {
input = toolCall.function.arguments
}
content.push({
type: "tool_use",
id: toolCall.id,
name: toolCall.function.name,
input,
})
}
}
}
const stopReason = (() => {
const reason = choice.finish_reason
if (reason === "stop") return "stop"
if (reason === "tool_calls") return "tool_calls"
if (reason === "length") return "length"
if (reason === "content_filter") return "content_filter"
return null
})()
const usage = (() => {
const u = (resp as any).usage
if (!u) return undefined
return {
prompt_tokens: u.prompt_tokens,
completion_tokens: u.completion_tokens,
total_tokens: u.total_tokens,
...(u.prompt_tokens_details?.cached_tokens
? { prompt_tokens_details: { cached_tokens: u.prompt_tokens_details.cached_tokens } }
: {}),
}
})()
return {
id: (resp as any).id,
object: "chat.completion" as const,
created: Math.floor(Date.now() / 1000),
model: (resp as any).model,
choices: [
{
index: 0,
message: {
role: "assistant" as const,
...(content.length > 0 && content.some((c) => c.type === "text")
? {
content: content
.filter((c) => c.type === "text")
.map((c: any) => c.text)
.join(""),
}
: {}),
...(content.length > 0 && content.some((c) => c.type === "tool_use")
? {
tool_calls: content
.filter((c) => c.type === "tool_use")
.map((c: any) => ({
id: c.id,
type: "function" as const,
function: {
name: c.name,
arguments: typeof c.input === "string" ? c.input : JSON.stringify(c.input),
},
})),
}
: {}),
},
finish_reason: stopReason,
},
],
...(usage ? { usage } : {}),
}
}
export function toOaCompatibleResponse(resp: CommonResponse) {
if (!resp || typeof resp !== "object") return resp
if (Array.isArray((resp as any).choices)) return resp
const isAnthropic = typeof (resp as any).type === "string" && (resp as any).type === "message"
if (!isAnthropic) return resp
const idIn = (resp as any).id
const id =
typeof idIn === "string" ? idIn.replace(/^msg_/, "chatcmpl_") : `chatcmpl_${Math.random().toString(36).slice(2)}`
const model = (resp as any).model
const blocks: any[] = Array.isArray((resp as any).content) ? (resp as any).content : []
const text = blocks
.filter((b) => b && b.type === "text" && typeof b.text === "string")
.map((b) => b.text)
.join("")
const tcs = blocks
.filter((b) => b && b.type === "tool_use")
.map((b) => {
const name = (b as any).name
const args = (() => {
const inp = (b as any).input
if (typeof inp === "string") return inp
try {
return JSON.stringify(inp ?? {})
} catch {
return String(inp ?? "")
}
})()
const tid =
typeof (b as any).id === "string" && (b as any).id.length > 0
? (b as any).id
: `toolu_${Math.random().toString(36).slice(2)}`
return { id: tid, type: "function" as const, function: { name, arguments: args } }
})
const finish = (r: string | null) => {
if (r === "end_turn") return "stop"
if (r === "tool_use") return "tool_calls"
if (r === "max_tokens") return "length"
if (r === "content_filter") return "content_filter"
return null
}
const u = (resp as any).usage
const usage = (() => {
if (!u) return undefined as any
const pt = typeof u.input_tokens === "number" ? u.input_tokens : undefined
const ct = typeof u.output_tokens === "number" ? u.output_tokens : undefined
const total = pt != null && ct != null ? pt + ct : undefined
const cached = typeof u.cache_read_input_tokens === "number" ? u.cache_read_input_tokens : undefined
const details = cached != null ? { cached_tokens: cached } : undefined
return {
prompt_tokens: pt,
completion_tokens: ct,
total_tokens: total,
...(details ? { prompt_tokens_details: details } : {}),
}
})()
return {
id,
object: "chat.completion",
created: Math.floor(Date.now() / 1000),
model,
choices: [
{
index: 0,
message: {
role: "assistant",
...(text && text.length > 0 ? { content: text } : {}),
...(tcs.length > 0 ? { tool_calls: tcs } : {}),
},
finish_reason: finish((resp as any).stop_reason ?? null),
},
],
...(usage ? { usage } : {}),
}
}
export function fromOaCompatibleChunk(chunk: string): CommonChunk | string {
if (!chunk.startsWith("data: ")) return chunk
let json
try {
json = JSON.parse(chunk.slice(6))
} catch {
return chunk
}
if (!json.choices || !Array.isArray(json.choices) || json.choices.length === 0) {
return chunk
}
const choice = json.choices[0]
const delta = choice.delta
if (!delta) return chunk
const result: CommonChunk = {
id: json.id ?? "",
object: "chat.completion.chunk",
created: json.created ?? Math.floor(Date.now() / 1000),
model: json.model ?? "",
choices: [],
}
if (delta.content) {
result.choices.push({
index: choice.index ?? 0,
delta: { content: delta.content },
finish_reason: null,
})
}
if (delta.tool_calls) {
for (const toolCall of delta.tool_calls) {
result.choices.push({
index: choice.index ?? 0,
delta: {
tool_calls: [
{
index: toolCall.index ?? 0,
id: toolCall.id,
type: toolCall.type ?? "function",
function: toolCall.function,
},
],
},
finish_reason: null,
})
}
}
if (choice.finish_reason) {
result.choices.push({
index: choice.index ?? 0,
delta: {},
finish_reason: choice.finish_reason,
})
}
if (json.usage) {
const usage = json.usage
result.usage = {
prompt_tokens: usage.prompt_tokens,
completion_tokens: usage.completion_tokens,
total_tokens: usage.total_tokens,
...(usage.prompt_tokens_details?.cached_tokens
? { prompt_tokens_details: { cached_tokens: usage.prompt_tokens_details.cached_tokens } }
: {}),
}
}
return result
}
export function toOaCompatibleChunk(chunk: CommonChunk): string {
const result: any = {
id: chunk.id,
object: "chat.completion.chunk",
created: chunk.created,
model: chunk.model,
choices: [],
}
if (!chunk.choices || chunk.choices.length === 0) {
return `data: ${JSON.stringify(result)}`
}
const choice = chunk.choices[0]
const delta = choice.delta
if (delta?.role) {
result.choices.push({
index: choice.index,
delta: { role: delta.role },
finish_reason: null,
})
}
if (delta?.content) {
result.choices.push({
index: choice.index,
delta: { content: delta.content },
finish_reason: null,
})
}
if (delta?.tool_calls) {
for (const tc of delta.tool_calls) {
result.choices.push({
index: choice.index,
delta: {
tool_calls: [
{
index: tc.index,
id: tc.id,
type: tc.type,
function: tc.function,
},
],
},
finish_reason: null,
})
}
}
if (choice.finish_reason) {
result.choices.push({
index: choice.index,
delta: {},
finish_reason: choice.finish_reason,
})
}
if (chunk.usage) {
result.usage = {
prompt_tokens: chunk.usage.prompt_tokens,
completion_tokens: chunk.usage.completion_tokens,
total_tokens: chunk.usage.total_tokens,
...(chunk.usage.prompt_tokens_details?.cached_tokens
? {
prompt_tokens_details: { cached_tokens: chunk.usage.prompt_tokens_details.cached_tokens },
}
: {}),
}
}
return `data: ${JSON.stringify(result)}`
}

View File

@@ -0,0 +1,600 @@
import { ProviderHelper, CommonRequest, CommonResponse, CommonChunk } from "./provider"
type Usage = {
input_tokens?: number
input_tokens_details?: {
cached_tokens?: number
}
output_tokens?: number
output_tokens_details?: {
reasoning_tokens?: number
}
total_tokens?: number
}
export const openaiHelper = {
format: "openai",
modifyUrl: (providerApi: string) => providerApi + "/responses",
modifyHeaders: (headers: Headers, apiKey: string) => {
headers.set("authorization", `Bearer ${apiKey}`)
},
modifyBody: (body: Record<string, any>) => {
return body
},
createUsageParser: () => {
let usage: Usage
return {
parse: (chunk: string) => {
const [event, data] = chunk.split("\n")
if (event !== "event: response.completed") return
if (!data.startsWith("data: ")) return
let json
try {
json = JSON.parse(data.slice(6)) as { response?: { usage?: Usage } }
} catch (e) {
return
}
if (!json.response?.usage) return
usage = json.response.usage
},
retrieve: () => usage,
}
},
normalizeUsage: (usage: Usage) => {
const inputTokens = usage.input_tokens ?? 0
const outputTokens = usage.output_tokens ?? 0
const reasoningTokens = usage.output_tokens_details?.reasoning_tokens ?? undefined
const cacheReadTokens = usage.input_tokens_details?.cached_tokens ?? undefined
return {
inputTokens: inputTokens - (cacheReadTokens ?? 0),
outputTokens: outputTokens - (reasoningTokens ?? 0),
reasoningTokens,
cacheReadTokens,
cacheWrite5mTokens: undefined,
cacheWrite1hTokens: undefined,
}
},
} satisfies ProviderHelper
export function fromOpenaiRequest(body: any): CommonRequest {
if (!body || typeof body !== "object") return body
const toImg = (p: any) => {
if (!p || typeof p !== "object") return undefined
if ((p as any).type === "image_url" && (p as any).image_url)
return { type: "image_url", image_url: (p as any).image_url }
if ((p as any).type === "input_image" && (p as any).image_url)
return { type: "image_url", image_url: (p as any).image_url }
const s = (p as any).source
if (!s || typeof s !== "object") return undefined
if ((s as any).type === "url" && typeof (s as any).url === "string")
return { type: "image_url", image_url: { url: (s as any).url } }
if (
(s as any).type === "base64" &&
typeof (s as any).media_type === "string" &&
typeof (s as any).data === "string"
)
return { type: "image_url", image_url: { url: `data:${(s as any).media_type};base64,${(s as any).data}` } }
return undefined
}
const msgs: any[] = []
const inMsgs = Array.isArray(body.input) ? body.input : Array.isArray(body.messages) ? body.messages : []
for (const m of inMsgs) {
if (!m) continue
// Responses API items without role:
if (!(m as any).role && (m as any).type) {
if ((m as any).type === "function_call") {
const name = (m as any).name
const a = (m as any).arguments
const args = typeof a === "string" ? a : JSON.stringify(a ?? {})
msgs.push({
role: "assistant",
tool_calls: [{ id: (m as any).id, type: "function", function: { name, arguments: args } }],
})
}
if ((m as any).type === "function_call_output") {
const id = (m as any).call_id
const out = (m as any).output
const content = typeof out === "string" ? out : JSON.stringify(out)
msgs.push({ role: "tool", tool_call_id: id, content })
}
continue
}
if ((m as any).role === "system" || (m as any).role === "developer") {
const c = (m as any).content
if (typeof c === "string" && c.length > 0) msgs.push({ role: "system", content: c })
if (Array.isArray(c)) {
const t = c.find((p: any) => p && typeof p.text === "string")
if (t && typeof t.text === "string" && t.text.length > 0) msgs.push({ role: "system", content: t.text })
}
continue
}
if ((m as any).role === "user") {
const c = (m as any).content
if (typeof c === "string") {
msgs.push({ role: "user", content: c })
} else if (Array.isArray(c)) {
const parts: any[] = []
for (const p of c) {
if (!p || !(p as any).type) continue
if (((p as any).type === "text" || (p as any).type === "input_text") && typeof (p as any).text === "string")
parts.push({ type: "text", text: (p as any).text })
const ip = toImg(p)
if (ip) parts.push(ip)
if ((p as any).type === "tool_result") {
const id = (p as any).tool_call_id
const content =
typeof (p as any).content === "string" ? (p as any).content : JSON.stringify((p as any).content)
msgs.push({ role: "tool", tool_call_id: id, content })
}
}
if (parts.length === 1 && parts[0].type === "text") msgs.push({ role: "user", content: parts[0].text })
else if (parts.length > 0) msgs.push({ role: "user", content: parts })
}
continue
}
if ((m as any).role === "assistant") {
const c = (m as any).content
const out: any = { role: "assistant" }
if (typeof c === "string" && c.length > 0) out.content = c
if (Array.isArray((m as any).tool_calls)) out.tool_calls = (m as any).tool_calls
msgs.push(out)
continue
}
if ((m as any).role === "tool") {
msgs.push({ role: "tool", tool_call_id: (m as any).tool_call_id, content: (m as any).content })
continue
}
}
const tcIn = body.tool_choice
const tc = (() => {
if (!tcIn) return undefined
if (tcIn === "auto") return "auto"
if (tcIn === "required") return "required"
if ((tcIn as any).type === "function" && (tcIn as any).function?.name)
return { type: "function" as const, function: { name: (tcIn as any).function.name } }
return undefined
})()
const stop = (() => {
const v = body.stop_sequences ?? body.stop
if (!v) return undefined
if (Array.isArray(v)) return v.length === 1 ? v[0] : v
if (typeof v === "string") return v
return undefined
})()
return {
max_tokens: body.max_output_tokens ?? body.max_tokens,
temperature: body.temperature,
top_p: body.top_p,
stop,
messages: msgs,
stream: !!body.stream,
tools: Array.isArray(body.tools) ? body.tools : undefined,
tool_choice: tc,
}
}
export function toOpenaiRequest(body: CommonRequest) {
if (!body || typeof body !== "object") return body
const msgsIn = Array.isArray(body.messages) ? body.messages : []
const input: any[] = []
const toPart = (p: any) => {
if (!p || typeof p !== "object") return undefined
if ((p as any).type === "text" && typeof (p as any).text === "string")
return { type: "input_text", text: (p as any).text }
if ((p as any).type === "image_url" && (p as any).image_url)
return { type: "input_image", image_url: (p as any).image_url }
const s = (p as any).source
if (!s || typeof s !== "object") return undefined
if ((s as any).type === "url" && typeof (s as any).url === "string")
return { type: "input_image", image_url: { url: (s as any).url } }
if (
(s as any).type === "base64" &&
typeof (s as any).media_type === "string" &&
typeof (s as any).data === "string"
)
return { type: "input_image", image_url: { url: `data:${(s as any).media_type};base64,${(s as any).data}` } }
return undefined
}
for (const m of msgsIn) {
if (!m || !(m as any).role) continue
if ((m as any).role === "system") {
const c = (m as any).content
if (typeof c === "string") input.push({ role: "system", content: c })
continue
}
if ((m as any).role === "user") {
const c = (m as any).content
if (typeof c === "string") {
input.push({ role: "user", content: [{ type: "input_text", text: c }] })
} else if (Array.isArray(c)) {
const parts: any[] = []
for (const p of c) {
const op = toPart(p)
if (op) parts.push(op)
}
if (parts.length > 0) input.push({ role: "user", content: parts })
}
continue
}
if ((m as any).role === "assistant") {
const c = (m as any).content
if (typeof c === "string" && c.length > 0) {
input.push({ role: "assistant", content: [{ type: "output_text", text: c }] })
}
if (Array.isArray((m as any).tool_calls)) {
for (const tc of (m as any).tool_calls) {
if ((tc as any).type === "function" && (tc as any).function) {
const name = (tc as any).function.name
const a = (tc as any).function.arguments
const args = typeof a === "string" ? a : JSON.stringify(a)
input.push({ type: "function_call", call_id: (tc as any).id, name, arguments: args })
}
}
}
continue
}
if ((m as any).role === "tool") {
const out = typeof (m as any).content === "string" ? (m as any).content : JSON.stringify((m as any).content)
input.push({ type: "function_call_output", call_id: (m as any).tool_call_id, output: out })
continue
}
}
const stop_sequences = (() => {
const v = body.stop
if (!v) return undefined
if (Array.isArray(v)) return v
if (typeof v === "string") return [v]
return undefined
})()
const tcIn = body.tool_choice
const tool_choice = (() => {
if (!tcIn) return undefined
if (tcIn === "auto") return "auto"
if (tcIn === "required") return "required"
if ((tcIn as any).type === "function" && (tcIn as any).function?.name)
return { type: "function", function: { name: (tcIn as any).function.name } }
return undefined
})()
const tools = (() => {
if (!Array.isArray(body.tools)) return undefined
return body.tools.map((tool: any) => {
if (tool.type === "function") {
return {
type: "function",
name: tool.function?.name,
description: tool.function?.description,
parameters: tool.function?.parameters,
strict: tool.function?.strict,
}
}
return tool
})
})()
return {
model: body.model,
input,
max_output_tokens: body.max_tokens,
top_p: body.top_p,
stop_sequences,
stream: !!body.stream,
tools,
tool_choice,
include: Array.isArray((body as any).include) ? (body as any).include : undefined,
truncation: (body as any).truncation,
metadata: (body as any).metadata,
store: (body as any).store,
user: (body as any).user,
text: { verbosity: "low" },
reasoning: { effort: "medium" },
}
}
export function fromOpenaiResponse(resp: any): CommonResponse {
if (!resp || typeof resp !== "object") return resp
if (Array.isArray((resp as any).choices)) return resp
const r = (resp as any).response ?? resp
if (!r || typeof r !== "object") return resp
const idIn = (r as any).id
const id =
typeof idIn === "string" ? idIn.replace(/^resp_/, "chatcmpl_") : `chatcmpl_${Math.random().toString(36).slice(2)}`
const model = (r as any).model ?? (resp as any).model
const out = Array.isArray((r as any).output) ? (r as any).output : []
const text = out
.filter((o: any) => o && o.type === "message" && Array.isArray((o as any).content))
.flatMap((o: any) => (o as any).content)
.filter((p: any) => p && p.type === "output_text" && typeof p.text === "string")
.map((p: any) => p.text)
.join("")
const tcs = out
.filter((o: any) => o && o.type === "function_call")
.map((o: any) => {
const name = (o as any).name
const a = (o as any).arguments
const args = typeof a === "string" ? a : JSON.stringify(a ?? {})
const tid =
typeof (o as any).id === "string" && (o as any).id.length > 0
? (o as any).id
: `toolu_${Math.random().toString(36).slice(2)}`
return { id: tid, type: "function" as const, function: { name, arguments: args } }
})
const finish = (r: string | null) => {
if (r === "stop") return "stop"
if (r === "tool_call" || r === "tool_calls") return "tool_calls"
if (r === "length" || r === "max_output_tokens") return "length"
if (r === "content_filter") return "content_filter"
return null
}
const u = (r as any).usage ?? (resp as any).usage
const usage = (() => {
if (!u) return undefined as any
const pt = typeof (u as any).input_tokens === "number" ? (u as any).input_tokens : undefined
const ct = typeof (u as any).output_tokens === "number" ? (u as any).output_tokens : undefined
const total = pt != null && ct != null ? pt + ct : undefined
const cached = (u as any).input_tokens_details?.cached_tokens
const details = typeof cached === "number" ? { cached_tokens: cached } : undefined
return {
prompt_tokens: pt,
completion_tokens: ct,
total_tokens: total,
...(details ? { prompt_tokens_details: details } : {}),
}
})()
return {
id,
object: "chat.completion",
created: Math.floor(Date.now() / 1000),
model,
choices: [
{
index: 0,
message: {
role: "assistant",
...(text && text.length > 0 ? { content: text } : {}),
...(tcs.length > 0 ? { tool_calls: tcs } : {}),
},
finish_reason: finish((r as any).stop_reason ?? null),
},
],
...(usage ? { usage } : {}),
}
}
export function toOpenaiResponse(resp: CommonResponse) {
if (!resp || typeof resp !== "object") return resp
if (!Array.isArray((resp as any).choices)) return resp
const choice = (resp as any).choices[0]
if (!choice) return resp
const msg = choice.message
if (!msg) return resp
const outputItems: any[] = []
if (typeof msg.content === "string" && msg.content.length > 0) {
outputItems.push({
id: `msg_${Math.random().toString(36).slice(2)}`,
type: "message",
status: "completed",
role: "assistant",
content: [{ type: "output_text", text: msg.content, annotations: [], logprobs: [] }],
})
}
if (Array.isArray(msg.tool_calls)) {
for (const tc of msg.tool_calls) {
if ((tc as any).type === "function" && (tc as any).function) {
outputItems.push({
id: (tc as any).id,
type: "function_call",
name: (tc as any).function.name,
call_id: (tc as any).id,
arguments: (tc as any).function.arguments,
})
}
}
}
const stop_reason = (() => {
const r = choice.finish_reason
if (r === "stop") return "stop"
if (r === "tool_calls") return "tool_call"
if (r === "length") return "max_output_tokens"
if (r === "content_filter") return "content_filter"
return null
})()
const usage = (() => {
const u = (resp as any).usage
if (!u) return undefined
return {
input_tokens: u.prompt_tokens,
output_tokens: u.completion_tokens,
total_tokens: u.total_tokens,
...(u.prompt_tokens_details?.cached_tokens
? { input_tokens_details: { cached_tokens: u.prompt_tokens_details.cached_tokens } }
: {}),
}
})()
return {
id: (resp as any).id?.replace(/^chatcmpl_/, "resp_") ?? `resp_${Math.random().toString(36).slice(2)}`,
object: "response",
model: (resp as any).model,
output: outputItems,
stop_reason,
usage,
}
}
export function fromOpenaiChunk(chunk: string): CommonChunk | string {
const lines = chunk.split("\n")
const ev = lines[0]
const dl = lines[1]
if (!ev || !dl || !dl.startsWith("data: ")) return chunk
let json: any
try {
json = JSON.parse(dl.slice(6))
} catch {
return chunk
}
const respObj = json.response ?? {}
const out: CommonChunk = {
id: respObj.id ?? json.id ?? "",
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model: respObj.model ?? json.model ?? "",
choices: [],
}
const e = ev.replace("event: ", "").trim()
if (e === "response.output_text.delta") {
const d = (json as any).delta ?? (json as any).text ?? (json as any).output_text_delta
if (typeof d === "string" && d.length > 0)
out.choices.push({ index: 0, delta: { content: d }, finish_reason: null })
}
if (e === "response.output_item.added" && (json as any).item?.type === "function_call") {
const name = (json as any).item?.name
const id = (json as any).item?.id
if (typeof name === "string" && name.length > 0) {
out.choices.push({
index: 0,
delta: { tool_calls: [{ index: 0, id, type: "function", function: { name, arguments: "" } }] },
finish_reason: null,
})
}
}
if (e === "response.function_call_arguments.delta") {
const a = (json as any).delta ?? (json as any).arguments_delta
if (typeof a === "string" && a.length > 0) {
out.choices.push({
index: 0,
delta: { tool_calls: [{ index: 0, function: { arguments: a } }] },
finish_reason: null,
})
}
}
if (e === "response.completed") {
const fr = (() => {
const sr = (respObj as any).stop_reason ?? (json as any).stop_reason
if (sr === "stop") return "stop"
if (sr === "tool_call" || sr === "tool_calls") return "tool_calls"
if (sr === "length" || sr === "max_output_tokens") return "length"
if (sr === "content_filter") return "content_filter"
return null
})()
out.choices.push({ index: 0, delta: {}, finish_reason: fr })
const u = (respObj as any).usage ?? (json as any).response?.usage
if (u) {
out.usage = {
prompt_tokens: u.input_tokens,
completion_tokens: u.output_tokens,
total_tokens: (u.input_tokens || 0) + (u.output_tokens || 0),
...(u.input_tokens_details?.cached_tokens
? { prompt_tokens_details: { cached_tokens: u.input_tokens_details.cached_tokens } }
: {}),
}
}
}
return out
}
export function toOpenaiChunk(chunk: CommonChunk): string {
if (!chunk.choices || !Array.isArray(chunk.choices) || chunk.choices.length === 0) {
return ""
}
const choice = chunk.choices[0]
const d = choice.delta
if (!d) return ""
const id = chunk.id
const model = chunk.model
if (d.content) {
const data = { id, type: "response.output_text.delta", delta: d.content, response: { id, model } }
return `event: response.output_text.delta\ndata: ${JSON.stringify(data)}`
}
if (d.tool_calls) {
for (const tc of d.tool_calls) {
if (tc.function?.name) {
const data = {
type: "response.output_item.added",
output_index: 0,
item: { id: tc.id, type: "function_call", name: tc.function.name, call_id: tc.id, arguments: "" },
}
return `event: response.output_item.added\ndata: ${JSON.stringify(data)}`
}
if (tc.function?.arguments) {
const data = {
type: "response.function_call_arguments.delta",
output_index: 0,
delta: tc.function.arguments,
}
return `event: response.function_call_arguments.delta\ndata: ${JSON.stringify(data)}`
}
}
}
if (choice.finish_reason) {
const u = chunk.usage
const usage = u
? {
input_tokens: u.prompt_tokens,
output_tokens: u.completion_tokens,
total_tokens: u.total_tokens,
...(u.prompt_tokens_details?.cached_tokens
? { input_tokens_details: { cached_tokens: u.prompt_tokens_details.cached_tokens } }
: {}),
}
: undefined
const data: any = { id, type: "response.completed", response: { id, model, ...(usage ? { usage } : {}) } }
return `event: response.completed\ndata: ${JSON.stringify(data)}`
}
return ""
}

View File

@@ -0,0 +1,207 @@
import { Format } from "../format"
import {
fromAnthropicChunk,
fromAnthropicRequest,
fromAnthropicResponse,
toAnthropicChunk,
toAnthropicRequest,
toAnthropicResponse,
} from "./anthropic"
import {
fromOpenaiChunk,
fromOpenaiRequest,
fromOpenaiResponse,
toOpenaiChunk,
toOpenaiRequest,
toOpenaiResponse,
} from "./openai"
import {
fromOaCompatibleChunk,
fromOaCompatibleRequest,
fromOaCompatibleResponse,
toOaCompatibleChunk,
toOaCompatibleRequest,
toOaCompatibleResponse,
} from "./openai-compatible"
export type ProviderHelper = {
format: Format
modifyUrl: (providerApi: string) => string
modifyHeaders: (headers: Headers, apiKey: string) => void
modifyBody: (body: Record<string, any>) => Record<string, any>
createUsageParser: () => {
parse: (chunk: string) => void
retrieve: () => any
}
normalizeUsage: (usage: any) => {
inputTokens: number
outputTokens: number
reasoningTokens?: number
cacheReadTokens?: number
cacheWrite5mTokens?: number
cacheWrite1hTokens?: number
}
}
export interface CommonMessage {
role: "system" | "user" | "assistant" | "tool"
content?: string | Array<CommonContentPart>
tool_call_id?: string
tool_calls?: CommonToolCall[]
}
export interface CommonContentPart {
type: "text" | "image_url"
text?: string
image_url?: { url: string }
}
export interface CommonToolCall {
id: string
type: "function"
function: {
name: string
arguments: string
}
}
export interface CommonTool {
type: "function"
function: {
name: string
description?: string
parameters?: Record<string, any>
}
}
export interface CommonUsage {
input_tokens?: number
output_tokens?: number
total_tokens?: number
prompt_tokens?: number
completion_tokens?: number
cache_read_input_tokens?: number
cache_creation?: {
ephemeral_5m_input_tokens?: number
ephemeral_1h_input_tokens?: number
}
input_tokens_details?: {
cached_tokens?: number
}
output_tokens_details?: {
reasoning_tokens?: number
}
}
export interface CommonRequest {
model?: string
max_tokens?: number
temperature?: number
top_p?: number
stop?: string | string[]
messages: CommonMessage[]
stream?: boolean
tools?: CommonTool[]
tool_choice?: "auto" | "required" | { type: "function"; function: { name: string } }
}
export interface CommonResponse {
id: string
object: "chat.completion"
created: number
model: string
choices: Array<{
index: number
message: {
role: "assistant"
content?: string
tool_calls?: CommonToolCall[]
}
finish_reason: "stop" | "tool_calls" | "length" | "content_filter" | null
}>
usage?: {
prompt_tokens?: number
completion_tokens?: number
total_tokens?: number
prompt_tokens_details?: { cached_tokens?: number }
}
}
export interface CommonChunk {
id: string
object: "chat.completion.chunk"
created: number
model: string
choices: Array<{
index: number
delta: {
role?: "assistant"
content?: string
tool_calls?: Array<{
index: number
id?: string
type?: "function"
function?: {
name?: string
arguments?: string
}
}>
}
finish_reason: "stop" | "tool_calls" | "length" | "content_filter" | null
}>
usage?: {
prompt_tokens?: number
completion_tokens?: number
total_tokens?: number
prompt_tokens_details?: { cached_tokens?: number }
}
}
export function createBodyConverter(from: Format, to: Format) {
return (body: any): any => {
if (from === to) return body
let raw: CommonRequest
if (from === "anthropic") raw = fromAnthropicRequest(body)
else if (from === "openai") raw = fromOpenaiRequest(body)
else raw = fromOaCompatibleRequest(body)
if (to === "anthropic") return toAnthropicRequest(raw)
if (to === "openai") return toOpenaiRequest(raw)
if (to === "oa-compat") return toOaCompatibleRequest(raw)
}
}
export function createStreamPartConverter(from: Format, to: Format) {
return (part: any): any => {
if (from === to) return part
let raw: CommonChunk | string
if (from === "anthropic") raw = fromAnthropicChunk(part)
else if (from === "openai") raw = fromOpenaiChunk(part)
else raw = fromOaCompatibleChunk(part)
// If result is a string (error case), pass it through
if (typeof raw === "string") return raw
if (to === "anthropic") return toAnthropicChunk(raw)
if (to === "openai") return toOpenaiChunk(raw)
if (to === "oa-compat") return toOaCompatibleChunk(raw)
}
}
export function createResponseConverter(from: Format, to: Format) {
return (response: any): any => {
if (from === to) return response
let raw: CommonResponse
if (from === "anthropic") raw = fromAnthropicResponse(response)
else if (from === "openai") raw = fromOpenaiResponse(response)
else raw = fromOaCompatibleResponse(response)
if (to === "anthropic") return toAnthropicResponse(raw)
if (to === "openai") return toOpenaiResponse(raw)
if (to === "oa-compat") return toOaCompatibleResponse(raw)
}
}