From 7252ffd5ed3da1be18866af724813979d14da844 Mon Sep 17 00:00:00 2001 From: yrk <12787191+yrk15994109427@user.noreply.gitee.com> Date: Tue, 13 May 2025 17:41:33 +0800 Subject: [PATCH 1/4] Add support for ModelScope --- src/utils/config.py | 22 +++++++++++++++++++++- src/utils/llm_provider.py | 15 +++++++++++++++ 2 files changed, 36 insertions(+), 1 deletion(-) diff --git a/src/utils/config.py b/src/utils/config.py index b3d55fe..3695af4 100644 --- a/src/utils/config.py +++ b/src/utils/config.py @@ -59,5 +59,25 @@ model_names = { "Pro/THUDM/glm-4-9b-chat", ], "ibm": ["ibm/granite-vision-3.1-2b-preview", "meta-llama/llama-4-maverick-17b-128e-instruct-fp8", - "meta-llama/llama-3-2-90b-vision-instruct"] + "meta-llama/llama-3-2-90b-vision-instruct"], + "modelscope":[ + "Qwen/Qwen2.5-Coder-32B-Instruct", + "Qwen/Qwen2.5-Coder-14B-Instruct", + "Qwen/Qwen2.5-Coder-7B-Instruct", + "Qwen/Qwen2.5-72B-Instruct", + "Qwen/Qwen2.5-32B-Instruct", + "Qwen/Qwen2.5-14B-Instruct", + "Qwen/Qwen2.5-7B-Instruct", + "Qwen/QwQ-32B-Preview", + "Qwen/Qwen2.5-VL-3B-Instruct", + "Qwen/Qwen2.5-VL-7B-Instruct", + "Qwen/Qwen2.5-VL-32B-Instruct", + "Qwen/Qwen2.5-VL-72B-Instruct", + "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B", + "deepseek-ai/DeepSeek-R1-Distill-Qwen-14B", + "deepseek-ai/DeepSeek-R1-Distill-Qwen-7B", + "deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B", + "deepseek-ai/DeepSeek-R1", + "deepseek-ai/DeepSeek-V3", + ], } diff --git a/src/utils/llm_provider.py b/src/utils/llm_provider.py index c285e36..beadb1f 100644 --- a/src/utils/llm_provider.py +++ b/src/utils/llm_provider.py @@ -323,5 +323,20 @@ def get_llm_model(provider: str, **kwargs): model_name=kwargs.get("model_name", "Qwen/QwQ-32B"), temperature=kwargs.get("temperature", 0.0), ) + elif provider == "modelscope": + if not kwargs.get("api_key", ""): + api_key = os.getenv("MODELSCOPE_API_KEY", "") + else: + api_key = kwargs.get("api_key") + if not kwargs.get("base_url", ""): + base_url = os.getenv("MODELSCOPE_ENDPOINT", "") + else: + base_url = kwargs.get("base_url") + return ChatOpenAI( + api_key=api_key, + base_url=base_url, + model_name=kwargs.get("model_name", "Qwen/QwQ-32B"), + temperature=kwargs.get("temperature", 0.0), + ) else: raise ValueError(f"Unsupported provider: {provider}") From 760073d0ca2fb563476453ee3e3149ffb67ddb27 Mon Sep 17 00:00:00 2001 From: yrk <12787191+yrk15994109427@user.noreply.gitee.com> Date: Wed, 14 May 2025 10:45:15 +0800 Subject: [PATCH 2/4] add Qwen3 series models --- src/utils/config.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/utils/config.py b/src/utils/config.py index 3695af4..509bc82 100644 --- a/src/utils/config.py +++ b/src/utils/config.py @@ -79,5 +79,12 @@ model_names = { "deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B", "deepseek-ai/DeepSeek-R1", "deepseek-ai/DeepSeek-V3", + "Qwen/Qwen3-1.7B", + "Qwen/Qwen3-4B", + "Qwen/Qwen3-8B", + "Qwen/Qwen3-14B", + "Qwen/Qwen3-30B-A3B", + "Qwen/Qwen3-32B", + "Qwen/Qwen3-235B-A22B", ], } From cc9c2e2299949bf413ea302349565afa67161716 Mon Sep 17 00:00:00 2001 From: Tayyab Akmal <62791376+tayyabakmal1@users.noreply.github.com> Date: Fri, 16 May 2025 21:14:30 +0500 Subject: [PATCH 3/4] 0.1.48 Update requirements.txt 0.1.48 Update requirements.txt --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index f562733..f705524 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -browser-use==0.1.47 +browser-use==0.1.48 pyperclip==1.9.0 gradio==5.27.0 json-repair From 2a03d7f7056524bc7ceb5ae50d7dddedc67833ab Mon Sep 17 00:00:00 2001 From: knowlet Date: Sat, 17 May 2025 09:00:20 +0800 Subject: [PATCH 4/4] fix: yields provider when agent settings change Yields the provider when the agent settings change in order to run the callback. Also adds a short sleep to wait for Gradio UI callback. Fix Planner LLM Model Name not loaded correctly when Load Config #589 Signed-off-by: knowlet --- src/webui/webui_manager.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/webui/webui_manager.py b/src/webui/webui_manager.py index 542d387..0a9d5e1 100644 --- a/src/webui/webui_manager.py +++ b/src/webui/webui_manager.py @@ -7,6 +7,7 @@ from datetime import datetime from typing import Optional, Dict, List import uuid import asyncio +import time from gradio.components import Component from browser_use.browser.browser import Browser @@ -108,6 +109,9 @@ class WebuiManager: update_components[comp] = comp.__class__(value=comp_val, type="messages") else: update_components[comp] = comp.__class__(value=comp_val) + if comp_id == "agent_settings.planner_llm_provider": + yield update_components # yield provider, let callback run + time.sleep(0.1) # wait for Gradio UI callback config_status = self.id_to_component["load_save_config.config_status"] update_components.update(