fix: gpt id stuff fr fr this time :/ (#9006)

This commit is contained in:
Aiden Cline
2026-01-16 20:09:36 -08:00
committed by GitHub
parent e8357a87b0
commit 7c3eeeb0fa
3 changed files with 87 additions and 79 deletions

View File

@@ -999,6 +999,24 @@ export namespace Provider {
opts.signal = combined
}
// Strip openai itemId metadata following what codex does
// Codex uses #[serde(skip_serializing)] on id fields for all item types:
// Message, Reasoning, FunctionCall, LocalShellCall, CustomToolCall, WebSearchCall
// IDs are only re-attached for Azure with store=true
if (model.api.npm === "@ai-sdk/openai" && opts.body && opts.method === "POST") {
const body = JSON.parse(opts.body as string)
const isAzure = model.providerID.includes("azure")
const keepIds = isAzure && body.store === true
if (!keepIds && Array.isArray(body.input)) {
for (const item of body.input) {
if ("id" in item) {
delete item.id
}
}
opts.body = JSON.stringify(body)
}
}
return fetchFn(input, {
...opts,
// @ts-ignore see here: https://github.com/oven-sh/bun/issues/16682

View File

@@ -16,34 +16,33 @@ function mimeToModality(mime: string): Modality | undefined {
}
export namespace ProviderTransform {
// Maps npm package to the key the AI SDK expects for providerOptions
function sdkKey(npm: string): string | undefined {
switch (npm) {
case "@ai-sdk/github-copilot":
case "@ai-sdk/openai":
case "@ai-sdk/azure":
return "openai"
case "@ai-sdk/amazon-bedrock":
return "bedrock"
case "@ai-sdk/anthropic":
return "anthropic"
case "@ai-sdk/google-vertex":
case "@ai-sdk/google":
return "google"
case "@ai-sdk/gateway":
return "gateway"
case "@openrouter/ai-sdk-provider":
return "openrouter"
}
return undefined
}
function normalizeMessages(
msgs: ModelMessage[],
model: Provider.Model,
options: Record<string, unknown>,
): ModelMessage[] {
// Strip openai itemId metadata following what codex does
if (model.api.npm === "@ai-sdk/openai" || options.store === false) {
msgs = msgs.map((msg) => {
if (msg.providerOptions) {
for (const options of Object.values(msg.providerOptions)) {
delete options["itemId"]
}
}
if (!Array.isArray(msg.content)) {
return msg
}
const content = msg.content.map((part) => {
if (part.providerOptions) {
for (const options of Object.values(part.providerOptions)) {
delete options["itemId"]
}
}
return part
})
return { ...msg, content } as typeof msg
})
}
// Anthropic rejects messages with empty content - filter out empty string messages
// and remove empty text/reasoning parts from array content
if (model.api.npm === "@ai-sdk/anthropic") {
@@ -257,6 +256,28 @@ export namespace ProviderTransform {
msgs = applyCaching(msgs, model.providerID)
}
// Remap providerOptions keys from stored providerID to expected SDK key
const key = sdkKey(model.api.npm)
if (key && key !== model.providerID) {
const remap = (opts: Record<string, any> | undefined) => {
if (!opts) return opts
if (!(model.providerID in opts)) return opts
const result = { ...opts }
result[key] = result[model.providerID]
delete result[model.providerID]
return result
}
msgs = msgs.map((msg) => {
if (!Array.isArray(msg.content)) return { ...msg, providerOptions: remap(msg.providerOptions) }
return {
...msg,
providerOptions: remap(msg.providerOptions),
content: msg.content.map((part) => ({ ...part, providerOptions: remap(part.providerOptions) })),
} as typeof msg
})
}
return msgs
}
@@ -574,39 +595,8 @@ export namespace ProviderTransform {
}
export function providerOptions(model: Provider.Model, options: { [x: string]: any }) {
switch (model.api.npm) {
case "@ai-sdk/github-copilot":
case "@ai-sdk/openai":
case "@ai-sdk/azure":
return {
["openai" as string]: options,
}
case "@ai-sdk/amazon-bedrock":
return {
["bedrock" as string]: options,
}
case "@ai-sdk/anthropic":
return {
["anthropic" as string]: options,
}
case "@ai-sdk/google-vertex":
case "@ai-sdk/google":
return {
["google" as string]: options,
}
case "@ai-sdk/gateway":
return {
["gateway" as string]: options,
}
case "@openrouter/ai-sdk-provider":
return {
["openrouter" as string]: options,
}
default:
return {
[model.providerID]: options,
}
}
const key = sdkKey(model.api.npm) ?? model.providerID
return { [key]: options }
}
export function maxOutputTokens(

View File

@@ -649,7 +649,7 @@ describe("ProviderTransform.message - strip openai metadata when store=false", (
headers: {},
} as any
test("strips itemId and reasoningEncryptedContent when store=false", () => {
test("preserves itemId and reasoningEncryptedContent when store=false", () => {
const msgs = [
{
role: "assistant",
@@ -680,11 +680,11 @@ describe("ProviderTransform.message - strip openai metadata when store=false", (
const result = ProviderTransform.message(msgs, openaiModel, { store: false }) as any[]
expect(result).toHaveLength(1)
expect(result[0].content[0].providerOptions?.openai?.itemId).toBeUndefined()
expect(result[0].content[1].providerOptions?.openai?.itemId).toBeUndefined()
expect(result[0].content[0].providerOptions?.openai?.itemId).toBe("rs_123")
expect(result[0].content[1].providerOptions?.openai?.itemId).toBe("msg_456")
})
test("strips itemId and reasoningEncryptedContent when store=false even when not openai", () => {
test("preserves itemId and reasoningEncryptedContent when store=false even when not openai", () => {
const zenModel = {
...openaiModel,
providerID: "zen",
@@ -719,11 +719,11 @@ describe("ProviderTransform.message - strip openai metadata when store=false", (
const result = ProviderTransform.message(msgs, zenModel, { store: false }) as any[]
expect(result).toHaveLength(1)
expect(result[0].content[0].providerOptions?.openai?.itemId).toBeUndefined()
expect(result[0].content[1].providerOptions?.openai?.itemId).toBeUndefined()
expect(result[0].content[0].providerOptions?.openai?.itemId).toBe("rs_123")
expect(result[0].content[1].providerOptions?.openai?.itemId).toBe("msg_456")
})
test("preserves other openai options when stripping itemId", () => {
test("preserves other openai options including itemId", () => {
const msgs = [
{
role: "assistant",
@@ -744,11 +744,11 @@ describe("ProviderTransform.message - strip openai metadata when store=false", (
const result = ProviderTransform.message(msgs, openaiModel, { store: false }) as any[]
expect(result[0].content[0].providerOptions?.openai?.itemId).toBeUndefined()
expect(result[0].content[0].providerOptions?.openai?.itemId).toBe("msg_123")
expect(result[0].content[0].providerOptions?.openai?.otherOption).toBe("value")
})
test("strips metadata for openai package even when store is true", () => {
test("preserves metadata for openai package when store is true", () => {
const msgs = [
{
role: "assistant",
@@ -766,13 +766,13 @@ describe("ProviderTransform.message - strip openai metadata when store=false", (
},
] as any[]
// openai package always strips itemId regardless of store value
// openai package preserves itemId regardless of store value
const result = ProviderTransform.message(msgs, openaiModel, { store: true }) as any[]
expect(result[0].content[0].providerOptions?.openai?.itemId).toBeUndefined()
expect(result[0].content[0].providerOptions?.openai?.itemId).toBe("msg_123")
})
test("strips metadata for non-openai packages when store is false", () => {
test("preserves metadata for non-openai packages when store is false", () => {
const anthropicModel = {
...openaiModel,
providerID: "anthropic",
@@ -799,13 +799,13 @@ describe("ProviderTransform.message - strip openai metadata when store=false", (
},
] as any[]
// store=false triggers stripping even for non-openai packages
// store=false preserves metadata for non-openai packages
const result = ProviderTransform.message(msgs, anthropicModel, { store: false }) as any[]
expect(result[0].content[0].providerOptions?.openai?.itemId).toBeUndefined()
expect(result[0].content[0].providerOptions?.openai?.itemId).toBe("msg_123")
})
test("strips metadata using providerID key when store is false", () => {
test("preserves metadata using providerID key when store is false", () => {
const opencodeModel = {
...openaiModel,
providerID: "opencode",
@@ -835,11 +835,11 @@ describe("ProviderTransform.message - strip openai metadata when store=false", (
const result = ProviderTransform.message(msgs, opencodeModel, { store: false }) as any[]
expect(result[0].content[0].providerOptions?.opencode?.itemId).toBeUndefined()
expect(result[0].content[0].providerOptions?.opencode?.itemId).toBe("msg_123")
expect(result[0].content[0].providerOptions?.opencode?.otherOption).toBe("value")
})
test("strips itemId across all providerOptions keys", () => {
test("preserves itemId across all providerOptions keys", () => {
const opencodeModel = {
...openaiModel,
providerID: "opencode",
@@ -873,12 +873,12 @@ describe("ProviderTransform.message - strip openai metadata when store=false", (
const result = ProviderTransform.message(msgs, opencodeModel, { store: false }) as any[]
expect(result[0].providerOptions?.openai?.itemId).toBeUndefined()
expect(result[0].providerOptions?.opencode?.itemId).toBeUndefined()
expect(result[0].providerOptions?.extra?.itemId).toBeUndefined()
expect(result[0].content[0].providerOptions?.openai?.itemId).toBeUndefined()
expect(result[0].content[0].providerOptions?.opencode?.itemId).toBeUndefined()
expect(result[0].content[0].providerOptions?.extra?.itemId).toBeUndefined()
expect(result[0].providerOptions?.openai?.itemId).toBe("msg_root")
expect(result[0].providerOptions?.opencode?.itemId).toBe("msg_opencode")
expect(result[0].providerOptions?.extra?.itemId).toBe("msg_extra")
expect(result[0].content[0].providerOptions?.openai?.itemId).toBe("msg_openai_part")
expect(result[0].content[0].providerOptions?.opencode?.itemId).toBe("msg_opencode_part")
expect(result[0].content[0].providerOptions?.extra?.itemId).toBe("msg_extra_part")
})
test("does not strip metadata for non-openai packages when store is not false", () => {