From b5761328e739b6c962245b26308860d40f939718 Mon Sep 17 00:00:00 2001 From: Devin AI <158243242+devin-ai-integration[bot]@users.noreply.github.com> Date: Tue, 11 Feb 2025 12:21:13 +0000 Subject: [PATCH] test: add generateObject mock implementation Co-Authored-By: sha.zhou@jina.ai --- src/__tests__/server.test.ts | 34 ++++++++++++++++++++++++++++++++-- 1 file changed, 32 insertions(+), 2 deletions(-) diff --git a/src/__tests__/server.test.ts b/src/__tests__/server.test.ts index e4d816a..14bfced 100644 --- a/src/__tests__/server.test.ts +++ b/src/__tests__/server.test.ts @@ -26,14 +26,44 @@ jest.mock('@ai-sdk/openai', () => { } } }); + }), + generateObject: jest.fn().mockImplementation((options) => { + const responseText = 'This is a test response'; + const promptTokens = Math.ceil(Buffer.byteLength(JSON.stringify(options), 'utf-8') / 4); + const completionTokens = Math.ceil(Buffer.byteLength(responseText, 'utf-8') / 4); + return Promise.resolve({ + object: { + type: 'answer', + think: 'Thinking about the response', + answer: responseText, + references: [] + }, + usage: { + prompt_tokens: promptTokens, + completion_tokens: completionTokens, + total_tokens: promptTokens + completionTokens, + completion_tokens_details: { + reasoning_tokens: Math.ceil(completionTokens * 0.25), + accepted_prediction_tokens: Math.ceil(completionTokens * 0.5), + rejected_prediction_tokens: Math.ceil(completionTokens * 0.25) + } + } + }); }) }; return { createOpenAI: jest.fn().mockImplementation(() => { - const model = () => mockModel; + const model = () => ({ + ...mockModel, + defaultObjectGenerationMode: 'object' + }); + model.defaultObjectGenerationMode = 'object'; return model; }), - OpenAIChatLanguageModel: jest.fn().mockImplementation(() => mockModel) + OpenAIChatLanguageModel: jest.fn().mockImplementation(() => ({ + ...mockModel, + defaultObjectGenerationMode: 'object' + })) }; }); const TEST_SECRET = 'test-secret';