mirror of
https://github.com/browser-use/web-ui.git
synced 2026-03-22 11:17:17 +08:00
Unbound Integration
+ remove my defaults & dotenv version squash with model list minor fixes
This commit is contained in:
@@ -24,6 +24,9 @@ ALIBABA_API_KEY=
|
||||
MOONSHOT_ENDPOINT=https://api.moonshot.cn/v1
|
||||
MOONSHOT_API_KEY=
|
||||
|
||||
UNBOUND_ENDPOINT=https://api.getunbound.ai
|
||||
UNBOUND_API_KEY=
|
||||
|
||||
# Set to false to disable anonymized telemetry
|
||||
ANONYMIZED_TELEMETRY=false
|
||||
|
||||
|
||||
@@ -4,4 +4,4 @@ gradio==5.10.0
|
||||
json-repair
|
||||
langchain-mistralai==0.2.4
|
||||
langchain-google-genai==2.0.8
|
||||
MainContentExtractor==0.0.4
|
||||
MainContentExtractor==0.0.4
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
from openai import OpenAI
|
||||
import pdb
|
||||
import os
|
||||
from dotenv import load_dotenv
|
||||
from langchain_openai import ChatOpenAI
|
||||
from langchain_core.globals import get_llm_cache
|
||||
from langchain_core.language_models.base import (
|
||||
@@ -29,6 +31,9 @@ from langchain_ollama import ChatOllama
|
||||
from langchain_core.output_parsers.base import OutputParserLike
|
||||
from langchain_core.runnables import Runnable, RunnableConfig
|
||||
from langchain_core.tools import BaseTool
|
||||
from pydantic import Field, PrivateAttr
|
||||
import requests
|
||||
import urllib3
|
||||
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
@@ -103,6 +108,72 @@ class DeepSeekR1ChatOpenAI(ChatOpenAI):
|
||||
return AIMessage(content=content, reasoning_content=reasoning_content)
|
||||
|
||||
|
||||
# Load environment variables
|
||||
load_dotenv()
|
||||
|
||||
class UnboundChatOpenAI(ChatOpenAI):
|
||||
"""Chat model that uses Unbound's API."""
|
||||
|
||||
_session: requests.Session = PrivateAttr()
|
||||
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
kwargs["base_url"] = kwargs.get("base_url", os.getenv("UNBOUND_ENDPOINT", "https://api.getunbound.ai"))
|
||||
kwargs["api_key"] = kwargs.get("api_key", os.getenv("UNBOUND_API_KEY"))
|
||||
if not kwargs["api_key"]:
|
||||
raise ValueError("UNBOUND_API_KEY environment variable is not set")
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
self.client = OpenAI(
|
||||
base_url=kwargs["base_url"],
|
||||
api_key=kwargs["api_key"]
|
||||
)
|
||||
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
self._session = requests.Session()
|
||||
self._session.verify = False
|
||||
|
||||
def invoke(
|
||||
self,
|
||||
input: LanguageModelInput,
|
||||
config: Optional[RunnableConfig] = None,
|
||||
*,
|
||||
stop: Optional[list[str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> AIMessage:
|
||||
message_history = []
|
||||
for input_ in input:
|
||||
if isinstance(input_, SystemMessage):
|
||||
message_history.append({"role": "system", "content": input_.content})
|
||||
elif isinstance(input_, AIMessage):
|
||||
message_history.append({"role": "assistant", "content": input_.content})
|
||||
else:
|
||||
message_history.append({"role": "user", "content": input_.content})
|
||||
|
||||
response = self._session.post(
|
||||
f"{self.client.base_url}/v1/chat/completions",
|
||||
headers={"Authorization": f"Bearer {self.client.api_key}", "Content-Type": "application/json"},
|
||||
json={
|
||||
"model": self.model_name or "gpt-4o-mini",
|
||||
"messages": message_history
|
||||
}
|
||||
)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
content = data["choices"][0]["message"]["content"]
|
||||
return AIMessage(content=content)
|
||||
|
||||
async def ainvoke(
|
||||
self,
|
||||
input: LanguageModelInput,
|
||||
config: Optional[RunnableConfig] = None,
|
||||
*,
|
||||
stop: Optional[list[str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> AIMessage:
|
||||
return self.invoke(input, config, stop=stop, **kwargs)
|
||||
|
||||
|
||||
class DeepSeekR1ChatOllama(ChatOllama):
|
||||
|
||||
async def ainvoke(
|
||||
|
||||
@@ -12,7 +12,7 @@ from langchain_ollama import ChatOllama
|
||||
from langchain_openai import AzureChatOpenAI, ChatOpenAI
|
||||
import gradio as gr
|
||||
|
||||
from .llm import DeepSeekR1ChatOpenAI, DeepSeekR1ChatOllama
|
||||
from .llm import DeepSeekR1ChatOpenAI, DeepSeekR1ChatOllama, UnboundChatOpenAI
|
||||
|
||||
PROVIDER_DISPLAY_NAMES = {
|
||||
"openai": "OpenAI",
|
||||
@@ -21,7 +21,8 @@ PROVIDER_DISPLAY_NAMES = {
|
||||
"deepseek": "DeepSeek",
|
||||
"google": "Google",
|
||||
"alibaba": "Alibaba",
|
||||
"moonshot": "MoonShot"
|
||||
"moonshot": "MoonShot",
|
||||
"unbound": "Unbound AI"
|
||||
}
|
||||
|
||||
|
||||
@@ -151,7 +152,6 @@ def get_llm_model(provider: str, **kwargs):
|
||||
base_url=base_url,
|
||||
api_key=api_key,
|
||||
)
|
||||
|
||||
elif provider == "moonshot":
|
||||
return ChatOpenAI(
|
||||
model=kwargs.get("model_name", "moonshot-v1-32k-vision-preview"),
|
||||
@@ -159,6 +159,18 @@ def get_llm_model(provider: str, **kwargs):
|
||||
base_url=os.getenv("MOONSHOT_ENDPOINT"),
|
||||
api_key=os.getenv("MOONSHOT_API_KEY"),
|
||||
)
|
||||
elif provider == "unbound":
|
||||
if not kwargs.get("base_url", ""):
|
||||
base_url = os.getenv("UNBOUND_ENDPOINT", "https://api.getunbound.ai")
|
||||
else:
|
||||
base_url = kwargs.get("base_url")
|
||||
|
||||
return UnboundChatOpenAI(
|
||||
model=kwargs.get("model_name", "gpt-4o-mini"),
|
||||
temperature=kwargs.get("temperature", 0.0),
|
||||
base_url=base_url,
|
||||
api_key=api_key,
|
||||
)
|
||||
else:
|
||||
raise ValueError(f"Unsupported provider: {provider}")
|
||||
|
||||
@@ -176,6 +188,7 @@ model_names = {
|
||||
"mistral": ["mixtral-large-latest", "mistral-large-latest", "mistral-small-latest", "ministral-8b-latest"],
|
||||
"alibaba": ["qwen-plus", "qwen-max", "qwen-turbo", "qwen-long"],
|
||||
"moonshot": ["moonshot-v1-32k-vision-preview", "moonshot-v1-8k-vision-preview"],
|
||||
"unbound": ["gemini-2.0-flash","gpt-4o-mini", "gpt-4o", "gpt-4.5-preview"]
|
||||
}
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user