Skip to content

Commit cf8280e

Browse files
samdentylgrammel
andauthored
fix(providers/xai): return actual usage when streaming instead of NaN (#5873) (#5891)
Co-authored-by: Lars Grammel <lars.grammel@gmail.com>
1 parent a13f993 commit cf8280e

7 files changed

+88
-8
lines changed

.changeset/fast-students-turn.md

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
---
2+
'@ai-sdk/openai-compatible': patch
3+
'@ai-sdk/xai': patch
4+
---
5+
6+
fix(providers/xai): return actual usage when streaming instead of NaN

packages/openai-compatible/src/openai-compatible-chat-language-model.test.ts

Lines changed: 47 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -655,6 +655,27 @@ describe('doGenerate', () => {
655655
expect(warnings).toEqual([]);
656656
});
657657

658+
it('should respect the includeUsage option', async () => {
659+
prepareJsonResponse({ content: '{"value":"test"}' });
660+
661+
const model = new OpenAICompatibleChatLanguageModel('gpt-4o-2024-08-06', {
662+
provider: 'test-provider',
663+
url: () => 'https://my.api.com/v1/chat/completions',
664+
headers: () => ({}),
665+
includeUsage: true,
666+
});
667+
668+
await model.doStream({
669+
inputFormat: 'prompt',
670+
prompt: TEST_PROMPT,
671+
});
672+
673+
const body = await server.calls[0].requestBody;
674+
675+
expect(body.stream).toBe(true);
676+
expect(body.stream_options).toStrictEqual({ include_usage: true });
677+
});
678+
658679
it('should use json_schema & strict with responseFormat json when structuredOutputs are enabled', async () => {
659680
prepareJsonResponse({ content: '{"value":"Spark"}' });
660681

@@ -1708,9 +1729,32 @@ describe('doStream', () => {
17081729
prompt: TEST_PROMPT,
17091730
});
17101731

1711-
expect(request).toStrictEqual({
1712-
body: '{"model":"grok-beta","messages":[{"role":"user","content":"Hello"}],"stream":true}',
1713-
});
1732+
expect(request).toMatchInlineSnapshot(`
1733+
{
1734+
"body": {
1735+
"frequency_penalty": undefined,
1736+
"max_tokens": undefined,
1737+
"messages": [
1738+
{
1739+
"content": "Hello",
1740+
"role": "user",
1741+
},
1742+
],
1743+
"model": "grok-beta",
1744+
"presence_penalty": undefined,
1745+
"response_format": undefined,
1746+
"seed": undefined,
1747+
"stop": undefined,
1748+
"stream": true,
1749+
"stream_options": undefined,
1750+
"temperature": undefined,
1751+
"tool_choice": undefined,
1752+
"tools": undefined,
1753+
"top_p": undefined,
1754+
"user": undefined,
1755+
},
1756+
}
1757+
`);
17141758
});
17151759

17161760
describe('usage details in streaming', () => {

packages/openai-compatible/src/openai-compatible-chat-language-model.ts

Lines changed: 12 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,7 @@ export type OpenAICompatibleChatConfig = {
4141
headers: () => Record<string, string | undefined>;
4242
url: (options: { modelId: string; path: string }) => string;
4343
fetch?: FetchFunction;
44+
includeUsage?: boolean;
4445
errorStructure?: ProviderErrorStructure<any>;
4546
metadataExtractor?: MetadataExtractor;
4647

@@ -304,7 +305,16 @@ export class OpenAICompatibleChatLanguageModel implements LanguageModelV2 {
304305
): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {
305306
const { args, warnings } = this.getArgs({ ...options });
306307

307-
const body = JSON.stringify({ ...args, stream: true });
308+
const body = {
309+
...args,
310+
stream: true,
311+
312+
// only include stream_options when in strict compatibility mode:
313+
stream_options: this.config.includeUsage
314+
? { include_usage: true }
315+
: undefined,
316+
};
317+
308318
const metadataExtractor =
309319
this.config.metadataExtractor?.createStreamExtractor();
310320

@@ -314,10 +324,7 @@ export class OpenAICompatibleChatLanguageModel implements LanguageModelV2 {
314324
modelId: this.modelId,
315325
}),
316326
headers: combineHeaders(this.config.headers(), options.headers),
317-
body: {
318-
...args,
319-
stream: true,
320-
},
327+
body,
321328
failedResponseHandler: this.failedResponseHandler,
322329
successfulResponseHandler: createEventSourceResponseHandler(
323330
this.chunkSchema,

packages/openai-compatible/src/openai-compatible-completion-language-model.test.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -543,6 +543,7 @@ describe('doStream', () => {
543543
"seed": undefined,
544544
"stop": undefined,
545545
"stream": true,
546+
"stream_options": undefined,
546547
"suffix": undefined,
547548
"temperature": undefined,
548549
"top_p": undefined,

packages/openai-compatible/src/openai-compatible-completion-language-model.ts

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@ import {
3333

3434
type OpenAICompatibleCompletionConfig = {
3535
provider: string;
36+
includeUsage?: boolean;
3637
headers: () => Record<string, string | undefined>;
3738
url: (options: { modelId: string; path: string }) => string;
3839
fetch?: FetchFunction;
@@ -220,6 +221,11 @@ export class OpenAICompatibleCompletionLanguageModel
220221
const body = {
221222
...args,
222223
stream: true,
224+
225+
// only include stream_options when in strict compatibility mode:
226+
stream_options: this.config.includeUsage
227+
? { include_usage: true }
228+
: undefined,
223229
};
224230

225231
const { responseHeaders, value: response } = await postJsonToApi({

packages/xai/src/xai-provider.test.ts

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -86,6 +86,21 @@ describe('xAIProvider', () => {
8686

8787
expect(model).toBeInstanceOf(OpenAICompatibleChatLanguageModel);
8888
});
89+
90+
it('should pass the includeUsage option to the chat model, to make sure usage is reported while streaming', () => {
91+
const provider = createXai();
92+
const modelId = 'xai-chat-model';
93+
94+
const model = provider.chat(modelId);
95+
96+
expect(model).toBeInstanceOf(OpenAICompatibleChatLanguageModel);
97+
98+
const constructorCall =
99+
OpenAICompatibleChatLanguageModelMock.mock.calls[0];
100+
101+
expect(constructorCall[0]).toBe(modelId);
102+
expect(constructorCall[1].includeUsage).toBe(true);
103+
});
89104
});
90105

91106
describe('imageModel', () => {

packages/xai/src/xai-provider.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -98,6 +98,7 @@ export function createXai(options: XaiProviderSettings = {}): XaiProvider {
9898
fetch: options.fetch,
9999
errorStructure: xaiErrorStructure,
100100
supportsStructuredOutputs: structuredOutputs,
101+
includeUsage: true,
101102
});
102103
};
103104

0 commit comments

Comments
 (0)