Merge pull request #29 from xuanmiss/fix_custom_openai

Fix: custom_llm_provider needs to be passed when using self-built openai model
This commit is contained in:
Dongle
2024-07-15 11:22:38 +08:00
committed by GitHub

View File

@@ -84,6 +84,10 @@ export function Setting(){
<Button
onClick={async () => {
const hide = message.loading('检测中...', 0);
const llmConfig = {
...form.getFieldValue("llm"),
custom_llm_provider: form.getFieldValue("format")
};
const res = await fetch(`${localServerBaseUrl}/llm`,
{
method: 'POST',
@@ -92,7 +96,7 @@ export function Setting(){
},
body: JSON.stringify(
{ "messages": [{ "role": "user", "content": "hello" }],
"llm_config": JSON.stringify(form.getFieldValue("llm"))}),
"llm_config": JSON.stringify(llmConfig)}),
}
)
hide();