Merge branch 'main' into main

This commit is contained in:
Richardson Gunde
2025-01-05 23:18:56 +05:30
committed by GitHub
5 changed files with 22 additions and 6 deletions

View File

@@ -6,7 +6,7 @@ This project builds upon the foundation of the [browser-use](https://github.com/
1. **A Brand New WebUI:** We offer a comprehensive web interface that supports a wide range of `browser-use` functionalities. This UI is designed to be user-friendly and enables easy interaction with the browser agent.
2. **Expanded LLM Support:** We've integrated support for various Large Language Models (LLMs), including: Gemini, OpenAI, Azure OpenAI, Anthropic, DeepSeek etc. And we plan to add support for even more models in the future.
2. **Expanded LLM Support:** We've integrated support for various Large Language Models (LLMs), including: Gemini, OpenAI, Azure OpenAI, Anthropic, DeepSeek, Ollama etc. And we plan to add support for even more models in the future.
3. **Custom Browser Support:** You can use your own browser with our tool, eliminating the need to re-login to sites or deal with other authentication challenges. This feature also supports high-definition screen recording.

View File

@@ -3,3 +3,4 @@ langchain-google-genai
pyperclip
gradio
langchain-ollama

View File

@@ -105,9 +105,15 @@ async def test_browser_use_custom():
# api_key=os.getenv("GOOGLE_API_KEY", "")
# )
# llm = utils.get_llm_model(
# provider="deepseek",
# model_name="deepseek-chat",
# temperature=0.8
# )
llm = utils.get_llm_model(
provider="deepseek",
model_name="deepseek-chat",
provider="ollama",
model_name="qwen2.5:7b",
temperature=0.8
)

View File

@@ -106,7 +106,6 @@ def test_deepseek_model():
base_url=os.getenv("DEEPSEEK_ENDPOINT", ""),
api_key=os.getenv("DEEPSEEK_API_KEY", "")
)
pdb.set_trace()
message = HumanMessage(
content=[
{"type": "text", "text": "who are you?"}
@@ -116,8 +115,17 @@ def test_deepseek_model():
print(ai_msg.content)
def test_ollama_model():
from langchain_ollama import ChatOllama
llm = ChatOllama(model="qwen2.5:7b")
ai_msg = llm.invoke("Sing a ballad of LangChain.")
print(ai_msg.content)
if __name__ == '__main__':
# test_openai_model()
# test_gemini_model()
# test_azure_openai_model()
test_deepseek_model()
# test_deepseek_model()
test_ollama_model()

View File

@@ -260,6 +260,7 @@ def create_ui(theme_name="Ocean"):
with gr.Blocks(title="Browser Use WebUI", theme=theme_map[theme_name], css=css) as demo:
with gr.Row():
gr.Markdown(
"""
# 🌐 Browser Use WebUI
@@ -303,7 +304,7 @@ def create_ui(theme_name="Ocean"):
with gr.TabItem("🔧 LLM Configuration", id=2):
with gr.Group():
llm_provider = gr.Dropdown(
["anthropic", "openai", "gemini", "azure_openai", "deepseek"],
["anthropic", "openai", "gemini", "azure_openai", "deepseek", "ollama"],
label="LLM Provider",
value="gemini",
info="Select your preferred language model provider"