diff --git a/.gitignore b/.gitignore index 764581c..f0caab4 100644 --- a/.gitignore +++ b/.gitignore @@ -39,3 +39,4 @@ yarn-error.log* # Misc .DS_Store *.pem +bun.lockb \ No newline at end of file diff --git a/src/ai/providers.ts b/src/ai/providers.ts index b47414a..3f7c6fe 100644 --- a/src/ai/providers.ts +++ b/src/ai/providers.ts @@ -1,26 +1,44 @@ -import { createOpenAI } from '@ai-sdk/openai'; +import { createOpenAI, type OpenAIProviderSettings } from '@ai-sdk/openai'; import { getEncoding } from 'js-tiktoken'; import { RecursiveCharacterTextSplitter } from './text-splitter'; -// Providers +interface CustomOpenAIProviderSettings extends OpenAIProviderSettings { + baseURL?: string; +} +// Providers const openai = createOpenAI({ apiKey: process.env.OPENAI_KEY!, -}); + baseURL: process.env.OPENAI_ENDPOINT || 'https://api.openai.com/v1', +} as CustomOpenAIProviderSettings); + +const isCustomEndpoint = + process.env.OPENAI_ENDPOINT && + process.env.OPENAI_ENDPOINT !== 'https://api.openai.com/v1'; +const customModel = process.env.OPENAI_MODEL; // Models -export const gpt4Model = openai('gpt-4o', { - structuredOutputs: true, -}); -export const gpt4MiniModel = openai('gpt-4o-mini', { - structuredOutputs: true, -}); -export const o3MiniModel = openai('o3-mini', { - reasoningEffort: 'medium', - structuredOutputs: true, -}); +export const gpt4Model = openai( + isCustomEndpoint && customModel ? customModel : 'gpt-4o', + { + structuredOutputs: true, + }, +); +export const gpt4MiniModel = openai( + isCustomEndpoint && customModel ? customModel : 'gpt-4o-mini', + { + structuredOutputs: true, + }, +); +export const o3MiniModel = openai( + isCustomEndpoint && customModel ? customModel : 'o3-mini', + { + reasoningEffort: 'medium', + structuredOutputs: true, + }, +); const MinChunkSize = 140; const encoder = getEncoding('o200k_base');