Skip to content

Commit 0f6bbc6

Browse files
gr2mclaude
andcommitted
fix(providers): reconstruct auth headers after workflow deserialization
When a language model crosses a workflow step boundary, it is serialized via WORKFLOW_SERIALIZE which strips function-valued config (headers, url, fetch). On deserialization, WORKFLOW_DESERIALIZE was passing the stripped config directly, resulting in missing auth headers and "x-api-key header is required" errors. Fix each provider's WORKFLOW_DESERIALIZE to reconstruct the auth headers from environment variables when they are missing from the deserialized config. For OpenAI models, also reconstruct the url function. Providers fixed: Anthropic, OpenAI (chat, completion, responses), Google, Mistral. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
1 parent eeb8bfe commit 0f6bbc6

File tree

6 files changed

+125
-6
lines changed

6 files changed

+125
-6
lines changed

packages/anthropic/src/anthropic-messages-language-model.ts

Lines changed: 17 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@ import {
2323
FetchFunction,
2424
generateId,
2525
InferSchema,
26+
loadApiKey,
2627
isCustomReasoning,
2728
mapReasoningToProviderBudget,
2829
mapReasoningToProviderEffort,
@@ -161,7 +162,22 @@ export class AnthropicMessagesLanguageModel implements LanguageModelV4 {
161162
modelId: AnthropicMessagesModelId;
162163
config: AnthropicMessagesConfig;
163164
}) {
164-
return new AnthropicMessagesLanguageModel(options.modelId, options.config);
165+
const config = { ...options.config };
166+
167+
// After workflow serialization, function-valued config (like headers)
168+
// is stripped. Re-create auth headers from environment variables.
169+
if (!config.headers) {
170+
config.headers = () => ({
171+
'anthropic-version': '2023-06-01',
172+
'x-api-key': loadApiKey({
173+
apiKey: undefined,
174+
environmentVariableName: 'ANTHROPIC_API_KEY',
175+
description: 'Anthropic',
176+
}),
177+
});
178+
}
179+
180+
return new AnthropicMessagesLanguageModel(options.modelId, config);
165181
}
166182

167183
constructor(

packages/google/src/google-generative-ai-language-model.ts

Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@ import {
2727
postJsonToApi,
2828
Resolvable,
2929
resolve,
30+
loadApiKey,
3031
serializeModel,
3132
WORKFLOW_SERIALIZE,
3233
WORKFLOW_DESERIALIZE,
@@ -81,7 +82,19 @@ export class GoogleGenerativeAILanguageModel implements LanguageModelV4 {
8182
modelId: string;
8283
config: GoogleGenerativeAIConfig;
8384
}) {
84-
return new GoogleGenerativeAILanguageModel(options.modelId, options.config);
85+
const config = { ...options.config };
86+
87+
if (!config.headers) {
88+
config.headers = () => ({
89+
'x-goog-api-key': loadApiKey({
90+
apiKey: undefined,
91+
environmentVariableName: 'GOOGLE_GENERATIVE_AI_API_KEY',
92+
description: 'Google Generative AI',
93+
}),
94+
});
95+
}
96+
97+
return new GoogleGenerativeAILanguageModel(options.modelId, config);
8598
}
8699

87100
constructor(

packages/mistral/src/mistral-chat-language-model.ts

Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ import {
1616
generateId,
1717
injectJsonInstructionIntoMessages,
1818
isCustomReasoning,
19+
loadApiKey,
1920
mapReasoningToProviderEffort,
2021
parseProviderOptions,
2122
ParseResult,
@@ -60,7 +61,19 @@ export class MistralChatLanguageModel implements LanguageModelV4 {
6061
modelId: MistralChatModelId;
6162
config: MistralChatConfig;
6263
}) {
63-
return new MistralChatLanguageModel(options.modelId, options.config);
64+
const config = { ...options.config };
65+
66+
if (!config.headers) {
67+
config.headers = () => ({
68+
Authorization: `Bearer ${loadApiKey({
69+
apiKey: undefined,
70+
environmentVariableName: 'MISTRAL_API_KEY',
71+
description: 'Mistral',
72+
})}`,
73+
});
74+
}
75+
76+
return new MistralChatLanguageModel(options.modelId, config);
6477
}
6578

6679
constructor(modelId: MistralChatModelId, config: MistralChatConfig) {

packages/openai/src/chat/openai-chat-language-model.ts

Lines changed: 28 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,9 +19,12 @@ import {
1919
generateId,
2020
isCustomReasoning,
2121
isParsableJson,
22+
loadApiKey,
23+
loadOptionalSetting,
2224
parseProviderOptions,
2325
postJsonToApi,
2426
serializeModel,
27+
withoutTrailingSlash,
2528
WORKFLOW_DESERIALIZE,
2629
WORKFLOW_SERIALIZE,
2730
} from '@ai-sdk/provider-utils';
@@ -71,7 +74,31 @@ export class OpenAIChatLanguageModel implements LanguageModelV4 {
7174
modelId: OpenAIChatModelId;
7275
config: OpenAIChatConfig;
7376
}) {
74-
return new OpenAIChatLanguageModel(options.modelId, options.config);
77+
const config = { ...options.config };
78+
79+
// After workflow serialization, function-valued config (like headers, url)
80+
// is stripped. Re-create from environment variables.
81+
if (!config.headers) {
82+
config.headers = () => ({
83+
Authorization: `Bearer ${loadApiKey({
84+
apiKey: undefined,
85+
environmentVariableName: 'OPENAI_API_KEY',
86+
description: 'OpenAI',
87+
})}`,
88+
});
89+
}
90+
if (!config.url) {
91+
const baseURL =
92+
withoutTrailingSlash(
93+
loadOptionalSetting({
94+
settingValue: undefined,
95+
environmentVariableName: 'OPENAI_BASE_URL',
96+
}),
97+
) ?? 'https://api.openai.com/v1';
98+
config.url = ({ path }) => `${baseURL}${path}`;
99+
}
100+
101+
return new OpenAIChatLanguageModel(options.modelId, config);
75102
}
76103

77104
constructor(modelId: OpenAIChatModelId, config: OpenAIChatConfig) {

packages/openai/src/completion/openai-completion-language-model.ts

Lines changed: 26 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,10 +13,13 @@ import {
1313
createEventSourceResponseHandler,
1414
createJsonResponseHandler,
1515
FetchFunction,
16+
loadApiKey,
17+
loadOptionalSetting,
1618
parseProviderOptions,
1719
ParseResult,
1820
postJsonToApi,
1921
serializeModel,
22+
withoutTrailingSlash,
2023
WORKFLOW_DESERIALIZE,
2124
WORKFLOW_SERIALIZE,
2225
} from '@ai-sdk/provider-utils';
@@ -64,7 +67,29 @@ export class OpenAICompletionLanguageModel implements LanguageModelV4 {
6467
modelId: OpenAICompletionModelId;
6568
config: OpenAICompletionConfig;
6669
}) {
67-
return new OpenAICompletionLanguageModel(options.modelId, options.config);
70+
const config = { ...options.config };
71+
72+
if (!config.headers) {
73+
config.headers = () => ({
74+
Authorization: `Bearer ${loadApiKey({
75+
apiKey: undefined,
76+
environmentVariableName: 'OPENAI_API_KEY',
77+
description: 'OpenAI',
78+
})}`,
79+
});
80+
}
81+
if (!config.url) {
82+
const baseURL =
83+
withoutTrailingSlash(
84+
loadOptionalSetting({
85+
settingValue: undefined,
86+
environmentVariableName: 'OPENAI_BASE_URL',
87+
}),
88+
) ?? 'https://api.openai.com/v1';
89+
config.url = ({ path }) => `${baseURL}${path}`;
90+
}
91+
92+
return new OpenAICompletionLanguageModel(options.modelId, config);
6893
}
6994

7095
constructor(

packages/openai/src/responses/openai-responses-language-model.ts

Lines changed: 26 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,10 +22,13 @@ import {
2222
generateId,
2323
InferSchema,
2424
isCustomReasoning,
25+
loadApiKey,
26+
loadOptionalSetting,
2527
parseProviderOptions,
2628
ParseResult,
2729
postJsonToApi,
2830
serializeModel,
31+
withoutTrailingSlash,
2932
WORKFLOW_DESERIALIZE,
3033
WORKFLOW_SERIALIZE,
3134
} from '@ai-sdk/provider-utils';
@@ -118,7 +121,29 @@ export class OpenAIResponsesLanguageModel implements LanguageModelV4 {
118121
modelId: OpenAIResponsesModelId;
119122
config: OpenAIConfig;
120123
}) {
121-
return new OpenAIResponsesLanguageModel(options.modelId, options.config);
124+
const config = { ...options.config };
125+
126+
if (!config.headers) {
127+
config.headers = () => ({
128+
Authorization: `Bearer ${loadApiKey({
129+
apiKey: undefined,
130+
environmentVariableName: 'OPENAI_API_KEY',
131+
description: 'OpenAI',
132+
})}`,
133+
});
134+
}
135+
if (!config.url) {
136+
const baseURL =
137+
withoutTrailingSlash(
138+
loadOptionalSetting({
139+
settingValue: undefined,
140+
environmentVariableName: 'OPENAI_BASE_URL',
141+
}),
142+
) ?? 'https://api.openai.com/v1';
143+
config.url = ({ path }) => `${baseURL}${path}`;
144+
}
145+
146+
return new OpenAIResponsesLanguageModel(options.modelId, config);
122147
}
123148

124149
constructor(modelId: OpenAIResponsesModelId, config: OpenAIConfig) {

0 commit comments

Comments
 (0)