From dc2c0cae5f0757858ed9a2c87a6a246ae5f76a45 Mon Sep 17 00:00:00 2001 From: Devin AI <158243242+devin-ai-integration[bot]@users.noreply.github.com> Date: Wed, 5 Feb 2025 10:33:28 +0000 Subject: [PATCH] fix: handle undefined openai client in getModel Co-Authored-By: Han Xiao --- src/utils/llm-client.ts | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/utils/llm-client.ts b/src/utils/llm-client.ts index 2178c88..566c0a1 100644 --- a/src/utils/llm-client.ts +++ b/src/utils/llm-client.ts @@ -55,13 +55,14 @@ export class LLMClient { if (llmConfig.provider === 'gemini') { return this.geminiClient.getGenerativeModel(options); } else if (this.openaiClient) { - if (!this.openaiClient) { + const client = this.openaiClient; + if (!client) { throw new Error('OpenAI client not initialized. Set OPENAI_API_KEY and provider="openai" to use OpenAI.'); } return { - ...this.openaiClient.chat.completions, + ...client.chat.completions, temperature: options.temperature, - generateContent: (prompt: string) => this.generateContent(this.openaiClient.chat.completions, prompt) + generateContent: (prompt: string) => this.generateContent(client.chat.completions, prompt) }; } throw new Error('OpenAI client not initialized. Set OPENAI_API_KEY and provider="openai" to use OpenAI.');