add ollama

This commit is contained in:
warmshao
2025-01-04 10:58:48 +08:00
parent 2c29029ff8
commit e54c1fda0b
5 changed files with 30 additions and 8 deletions

View File

@@ -1,4 +1,5 @@
browser-use
langchain-google-genai
pyperclip
gradio
gradio
langchain-ollama

View File

@@ -11,6 +11,7 @@ import os
from langchain_openai import ChatOpenAI, AzureChatOpenAI
from langchain_anthropic import ChatAnthropic
from langchain_google_genai import ChatGoogleGenerativeAI
from langchain_ollama import ChatOllama
def get_llm_model(provider: str, **kwargs):
@@ -39,7 +40,7 @@ def get_llm_model(provider: str, **kwargs):
)
elif provider == 'openai':
if not kwargs.get("base_url", ""):
base_url = "https://api.openai.com/v1"
base_url = os.getenv("OPENAI_ENDPOINT", "https://api.openai.com/v1")
else:
base_url = kwargs.get("base_url")
@@ -66,7 +67,7 @@ def get_llm_model(provider: str, **kwargs):
api_key = kwargs.get("api_key")
return ChatOpenAI(
model=kwargs.get("model_name", 'gpt-4o'),
model=kwargs.get("model_name", 'deepseek-chat'),
temperature=kwargs.get("temperature", 0.0),
base_url=base_url,
api_key=api_key
@@ -81,6 +82,11 @@ def get_llm_model(provider: str, **kwargs):
temperature=kwargs.get("temperature", 0.0),
google_api_key=api_key,
)
elif provider == 'ollama':
return ChatOllama(
model=kwargs.get("model_name", 'qwen2.5:7b'),
temperature=kwargs.get("temperature", 0.0),
)
elif provider == "azure_openai":
if not kwargs.get("base_url", ""):
base_url = os.getenv("AZURE_OPENAI_ENDPOINT", "")

View File

@@ -105,9 +105,15 @@ async def test_browser_use_custom():
# api_key=os.getenv("GOOGLE_API_KEY", "")
# )
# llm = utils.get_llm_model(
# provider="deepseek",
# model_name="deepseek-chat",
# temperature=0.8
# )
llm = utils.get_llm_model(
provider="deepseek",
model_name="deepseek-chat",
provider="ollama",
model_name="qwen2.5:7b",
temperature=0.8
)

View File

@@ -106,7 +106,6 @@ def test_deepseek_model():
base_url=os.getenv("DEEPSEEK_ENDPOINT", ""),
api_key=os.getenv("DEEPSEEK_API_KEY", "")
)
pdb.set_trace()
message = HumanMessage(
content=[
{"type": "text", "text": "who are you?"}
@@ -116,8 +115,17 @@ def test_deepseek_model():
print(ai_msg.content)
def test_ollama_model():
from langchain_ollama import ChatOllama
llm = ChatOllama(model="qwen2.5:7b")
ai_msg = llm.invoke("Sing a ballad of LangChain.")
print(ai_msg.content)
if __name__ == '__main__':
# test_openai_model()
# test_gemini_model()
# test_azure_openai_model()
test_deepseek_model()
# test_deepseek_model()
test_ollama_model()

View File

@@ -255,7 +255,8 @@ def main():
use_vision = gr.Checkbox(label="use vision", value=True)
with gr.Row():
llm_provider = gr.Dropdown(
["anthropic", "openai", "gemini", "azure_openai", "deepseek"], label="LLM Provider", value="gemini"
["anthropic", "openai", "gemini", "azure_openai", "deepseek", "ollama"], label="LLM Provider",
value="gemini"
)
llm_model_name = gr.Textbox(label="LLM Model Name", value="gemini-2.0-flash-exp")
llm_temperature = gr.Number(label="LLM Temperature", value=1.0)