minor update

This commit is contained in:
Wendong 2025-03-20 16:19:27 +08:00
parent 63c3057bf4
commit 42961908e2
16 changed files with 99 additions and 47 deletions

View File

@ -384,7 +384,7 @@ You can run OWL agent with your own task by modifying the `examples/run.py` scri
```python
# Define your own task
question = "Task description here."
task = "Task description here."
society = construct_society(question)
answer, chat_history, token_count = run_society(society)
@ -396,7 +396,7 @@ For uploading files, simply provide the file path along with your question:
```python
# Task with a local file (e.g., file path: `tmp/example.docx`)
question = "What is in the given DOCX file? Here is the file path: tmp/example.docx"
task = "What is in the given DOCX file? Here is the file path: tmp/example.docx"
society = construct_society(question)
answer, chat_history, token_count = run_society(society)

View File

@ -377,7 +377,7 @@ python examples/run_ollama.py
```python
# Define your own task
question = "Task description here."
task = "Task description here."
society = construct_society(question)
answer, chat_history, token_count = run_society(society)
@ -389,7 +389,7 @@ print(f"\033[94mAnswer: {answer}\033[0m")
```python
# 处理本地文件(例如,文件路径为 `tmp/example.docx`
question = "给定的 DOCX 文件中有什么内容文件路径如下tmp/example.docx"
task = "给定的 DOCX 文件中有什么内容文件路径如下tmp/example.docx"
society = construct_society(question)
answer, chat_history, token_count = run_society(society)

View File

@ -11,6 +11,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
import sys
import pathlib
from dotenv import load_dotenv
from camel.models import ModelFactory
from camel.toolkits import (
@ -29,8 +31,6 @@ from camel.societies import RolePlaying
from owl.utils import run_society, DocumentProcessingToolkit
import pathlib
base_dir = pathlib.Path(__file__).parent.parent
env_path = base_dir / "owl" / ".env"
load_dotenv(dotenv_path=str(env_path))
@ -60,7 +60,7 @@ def construct_society(question: str) -> RolePlaying:
model_type=ModelType.GPT_4O,
model_config_dict={"temperature": 0},
),
"web": ModelFactory.create(
"browsing": ModelFactory.create(
model_platform=ModelPlatformType.OPENAI,
model_type=ModelType.GPT_4O,
model_config_dict={"temperature": 0},
@ -130,11 +130,14 @@ def construct_society(question: str) -> RolePlaying:
def main():
r"""Main function to run the OWL system with an example question."""
# Example research question
question = "Navigate to Amazon.com and identify one product that is attractive to coders. Please provide me with the product name and price. No need to verify your answer."
# Default research question
default_task = "Navigate to Amazon.com and identify one product that is attractive to coders. Please provide me with the product name and price. No need to verify your answer."
# Override default task if command line argument is provided
task = sys.argv[1] if len(sys.argv) > 1 else default_task
# Construct and run the society
society = construct_society(question)
society = construct_society(task)
answer, chat_history, token_count = run_society(society)
# Output the result

View File

@ -12,6 +12,7 @@
# limitations under the License.
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
import os
import sys
from dotenv import load_dotenv
from camel.configs import ChatGPTConfig
from camel.models import ModelFactory
@ -58,7 +59,7 @@ def construct_society(question: str) -> OwlRolePlaying:
models = {
"user": ModelFactory.create(**base_model_config),
"assistant": ModelFactory.create(**base_model_config),
"web": ModelFactory.create(**base_model_config),
"browsing": ModelFactory.create(**base_model_config),
"planning": ModelFactory.create(**base_model_config),
"image": ModelFactory.create(**base_model_config),
}
@ -104,10 +105,14 @@ def construct_society(question: str) -> OwlRolePlaying:
def main():
r"""Main function to run the OWL system with Azure OpenAI."""
# Example question
question = "Navigate to Amazon.com and identify one product that is attractive to coders. Please provide me with the product name and price. No need to verify your answer."
default_task = "Navigate to Amazon.com and identify one product that is attractive to coders. Please provide me with the product name and price. No need to verify your answer."
# Override default task if command line argument is provided
task = sys.argv[1] if len(sys.argv) > 1 else default_task
# Construct and run the society
society = construct_society(question)
society = construct_society(task)
answer, chat_history, token_count = run_society(society)
# Output the result

View File

@ -110,7 +110,7 @@ def construct_society() -> RolePlaying:
model_type=selected_model_type,
model_config_dict={"temperature": 0},
),
"web": ModelFactory.create(
"browsing": ModelFactory.create(
model_platform=selected_model_platform,
model_type=selected_model_type,
model_config_dict={"temperature": 0},
@ -143,7 +143,6 @@ def construct_society() -> RolePlaying:
headless=False,
web_agent_model=models["web"],
planning_agent_model=models["planning"],
output_language="Chinese",
).get_tools(),
*VideoAnalysisToolkit(model=models["video"]).get_tools(),
*CodeExecutionToolkit(sandbox="subprocess", verbose=True).get_tools(),

View File

@ -17,7 +17,7 @@
# You can obtain your API key from DeepSeek platform: https://platform.deepseek.com/api_keys
# Set it as DEEPSEEK_API_KEY="your-api-key" in your .env file or add it to your environment variables
import sys
from dotenv import load_dotenv
from camel.models import ModelFactory
@ -102,10 +102,14 @@ def construct_society(question: str) -> RolePlaying:
def main():
r"""Main function to run the OWL system with an example question."""
# Example research question
question = "搜索OWL项目最近的新闻并生成一篇报告最后保存到本地。"
default_task = "搜索OWL项目最近的新闻并生成一篇报告最后保存到本地。"
# Override default task if command line argument is provided
task = sys.argv[1] if len(sys.argv) > 1 else default_task
# Construct and run the society
society = construct_society(question)
society = construct_society(task)
answer, chat_history, token_count = run_society(society)
# Output the result

View File

@ -71,7 +71,7 @@ def main():
model_type=ModelType.GPT_4O,
model_config_dict=ChatGPTConfig(temperature=0, top_p=1).as_dict(),
),
"web": ModelFactory.create(
"browsing": ModelFactory.create(
model_platform=ModelPlatformType.OPENAI,
model_type=ModelType.GPT_4O,
model_config_dict=ChatGPTConfig(temperature=0, top_p=1).as_dict(),

View File

@ -26,6 +26,7 @@ To use this module:
3. Run with: python -m examples.run_groq
"""
import sys
from dotenv import load_dotenv
from camel.models import ModelFactory
from camel.toolkits import (
@ -70,7 +71,7 @@ def construct_society(question: str) -> OwlRolePlaying:
model_type=ModelType.GROQ_LLAMA_3_3_70B, # Main assistant needs tool capability
model_config_dict={"temperature": 0},
),
"web": ModelFactory.create(
"browsing": ModelFactory.create(
model_platform=ModelPlatformType.GROQ,
model_type=ModelType.GROQ_LLAMA_3_3_70B, # Web browsing requires tool usage
model_config_dict={"temperature": 0},
@ -141,13 +142,18 @@ def construct_society(question: str) -> OwlRolePlaying:
def main():
r"""Main function to run the OWL system with an example question."""
# Example research question
question = "Navigate to Amazon.com and identify one product that is attractive to coders. Please provide me with the product name and price. No need to verify your answer."
default_task = "Navigate to Amazon.com and identify one product that is attractive to coders. Please provide me with the product name and price. No need to verify your answer."
# Construct and run the society
# Note: This configuration uses GROQ_LLAMA_3_3_70B for tool-intensive roles (assistant, web, planning, video, image)
# and GROQ_MIXTRAL_8_7B for document processing. GROQ_LLAMA_3_1_8B is used only for the user role
# which doesn't require tool usage capabilities.
society = construct_society(question)
# Override default task if command line argument is provided
task = sys.argv[1] if len(sys.argv) > 1 else default_task
# Construct and run the society
society = construct_society(task)
answer, chat_history, token_count = run_society(society)
# Output the result

View File

@ -73,6 +73,7 @@ Note:
"""
import asyncio
import sys
from pathlib import Path
from typing import List
@ -146,15 +147,19 @@ async def main():
try:
await mcp_toolkit.connect()
question = (
# Default task
default_task = (
"I'd like a academic report about Andrew Ng, including "
"his research direction, published papers (At least 3),"
" institutions, etc. "
)
# Override default task if command line argument is provided
task = sys.argv[1] if len(sys.argv) > 1 else default_task
# Connect to all MCP toolkits
tools = [*mcp_toolkit.get_tools()]
society = await construct_society(question, tools)
society = await construct_society(task, tools)
answer, chat_history, token_count = await arun_society(society)
print(f"\033[94mAnswer: {answer}\033[0m")

View File

@ -11,6 +11,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
import sys
from dotenv import load_dotenv
from camel.models import ModelFactory
@ -58,7 +59,7 @@ def construct_society(question: str) -> RolePlaying:
model_type=ModelType.GPT_4O,
model_config_dict={"temperature": 0},
),
"web": ModelFactory.create(
"browsing": ModelFactory.create(
model_platform=ModelPlatformType.OPENAI,
model_type=ModelType.GPT_4O,
model_config_dict={"temperature": 0},
@ -106,11 +107,14 @@ def construct_society(question: str) -> RolePlaying:
def main():
r"""Main function to run the OWL system with an example question."""
# Example research question
question = "Navigate to Amazon.com and identify one product that is attractive to coders. Please provide me with the product name and price. No need to verify your answer."
# Default research question
default_task = "Navigate to Amazon.com and identify one product that is attractive to coders. Please provide me with the product name and price. No need to verify your answer."
# Override default task if command line argument is provided
task = sys.argv[1] if len(sys.argv) > 1 else default_task
# Construct and run the society
society = construct_society(question)
society = construct_society(task)
answer, chat_history, token_count = run_society(society)
# Output the result

View File

@ -13,6 +13,7 @@
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
# run_ollama.py by tj-scriptshttps://github.com/tj-scripts
import sys
from dotenv import load_dotenv
from camel.models import ModelFactory
from camel.toolkits import (
@ -64,7 +65,7 @@ def construct_society(question: str) -> RolePlaying:
url="http://localhost:11434/v1",
model_config_dict={"temperature": 0.2, "max_tokens": 1000000},
),
"web": ModelFactory.create(
"browsing": ModelFactory.create(
model_platform=ModelPlatformType.OLLAMA,
model_type="llava:latest",
url="http://localhost:11434/v1",
@ -124,11 +125,14 @@ def construct_society(question: str) -> RolePlaying:
def main():
r"""Main function to run the OWL system with an example question."""
# Example research question
question = "Navigate to Amazon.com and identify one product that is attractive to coders. Please provide me with the product name and price. No need to verify your answer."
# Default research question
default_task = "Navigate to Amazon.com and identify one product that is attractive to coders. Please provide me with the product name and price. No need to verify your answer."
# Override default task if command line argument is provided
task = sys.argv[1] if len(sys.argv) > 1 else default_task
# Construct and run the society
society = construct_society(question)
society = construct_society(task)
answer, chat_history, token_count = run_society(society)
# Output the result

View File

@ -12,6 +12,7 @@
# limitations under the License.
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
import os
import sys
from dotenv import load_dotenv
from camel.models import ModelFactory
@ -64,7 +65,7 @@ def construct_society(question: str) -> RolePlaying:
url="https://dashscope.aliyuncs.com/compatible-mode/v1",
model_config_dict={"temperature": 0.4, "max_tokens": 4096},
),
"web": ModelFactory.create(
"browsing": ModelFactory.create(
model_platform=ModelPlatformType.OPENAI_COMPATIBLE_MODEL,
model_type="qwen-vl-max",
api_key=os.getenv("QWEN_API_KEY"),
@ -128,10 +129,14 @@ def construct_society(question: str) -> RolePlaying:
def main():
r"""Main function to run the OWL system with an example question."""
# Example research question
question = "Navigate to Amazon.com and identify one product that is attractive to coders. Please provide me with the product name and price. No need to verify your answer."
default_task = "Navigate to Amazon.com and identify one product that is attractive to coders. Please provide me with the product name and price. No need to verify your answer."
# Override default task if command line argument is provided
task = sys.argv[1] if len(sys.argv) > 1 else default_task
# Construct and run the society
society = construct_society(question)
society = construct_society(task)
answer, chat_history, token_count = run_society(society)
# Output the result

View File

@ -17,7 +17,7 @@
# Set it as QWEN_API_KEY="your-api-key" in your .env file or add it to your environment variables
from dotenv import load_dotenv
import sys
from camel.models import ModelFactory
from camel.toolkits import BrowserToolkit, SearchToolkit, FileWriteToolkit
from camel.types import ModelPlatformType, ModelType
@ -101,9 +101,14 @@ def construct_society(question: str) -> RolePlaying:
# Example case
question = "浏览亚马逊并找出一款对程序员有吸引力的产品。请提供产品名称和价格"
default_task = "浏览亚马逊并找出一款对程序员有吸引力的产品。请提供产品名称和价格"
# Override default task if command line argument is provided
task = sys.argv[1] if len(sys.argv) > 1 else default_task
# Construct and run the society
society = construct_society(task)
society = construct_society(question)
answer, chat_history, token_count = run_society(society)
print(f"\033[94mAnswer: {answer}\033[0m")

View File

@ -16,6 +16,7 @@
# You can obtain your API key from Bailian platform: bailian.console.aliyun.com
# Set it as QWEN_API_KEY="your-api-key" in your .env file or add it to your environment variables
import sys
from dotenv import load_dotenv
from camel.models import ModelFactory
from camel.toolkits import (
@ -67,7 +68,7 @@ def construct_society(question: str) -> RolePlaying:
model_type=ModelType.QWEN_MAX,
model_config_dict={"temperature": 0},
),
"web": ModelFactory.create(
"browsing": ModelFactory.create(
model_platform=ModelPlatformType.QWEN,
model_type=ModelType.QWEN_VL_MAX,
model_config_dict={"temperature": 0},
@ -140,10 +141,13 @@ def construct_society(question: str) -> RolePlaying:
def main():
r"""Main function to run the OWL system with an example question."""
# Example research question
question = "浏览亚马逊并找出一款对程序员有吸引力的产品。请提供产品名称和价格"
default_task = "浏览亚马逊并找出一款对程序员有吸引力的产品。请提供产品名称和价格"
# Override default task if command line argument is provided
task = sys.argv[1] if len(sys.argv) > 1 else default_task
# Construct and run the society
society = construct_society(question)
society = construct_society(task)
answer, chat_history, token_count = run_society(society)
# Output the result

View File

@ -12,6 +12,7 @@
# limitations under the License.
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
from dotenv import load_dotenv
import sys
import os
from camel.models import ModelFactory
from camel.toolkits import (
@ -58,7 +59,7 @@ def construct_society(question: str) -> RolePlaying:
model_type=ModelType.GPT_4O,
model_config_dict={"temperature": 0},
),
"web": ModelFactory.create(
"browsing": ModelFactory.create(
model_platform=ModelPlatformType.OPENAI,
model_type=ModelType.GPT_4O,
model_config_dict={"temperature": 0},
@ -108,13 +109,16 @@ def construct_society(question: str) -> RolePlaying:
def main():
r"""Main function to run the OWL system with an example question."""
# Example research question
question = f"""Open Google Search, summarize the number of GitHub stars, forks, etc., of the camel framework of camel-ai,
default_task = f"""Open Google Search, summarize the number of GitHub stars, forks, etc., of the camel framework of camel-ai,
and write the numbers into a Python file using the plot package,
save it to "+{os.path.join(base_dir, 'final_output')}+",
and execute the Python file with the local terminal to display the graph for me."""
# Override default task if command line argument is provided
task = sys.argv[1] if len(sys.argv) > 1 else default_task
# Construct and run the society
society = construct_society(question)
society = construct_society(task)
answer, chat_history, token_count = run_society(society)
# Output the result

View File

@ -12,6 +12,7 @@
# limitations under the License.
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
from dotenv import load_dotenv
import sys
import os
from camel.models import ModelFactory
from camel.toolkits import (
@ -58,7 +59,7 @@ def construct_society(question: str) -> RolePlaying:
model_type=ModelType.GPT_4O,
model_config_dict={"temperature": 0},
),
"web": ModelFactory.create(
"browsing": ModelFactory.create(
model_platform=ModelPlatformType.OPENAI,
model_type=ModelType.GPT_4O,
model_config_dict={"temperature": 0},
@ -108,11 +109,14 @@ def construct_society(question: str) -> RolePlaying:
def main():
r"""Main function to run the OWL system with an example question."""
# Example research question
question = f"""打开百度搜索总结一下camel-ai的camel框架的github star、fork数目等并把数字用plot包写成python文件保存到"+{os.path.join
default_task = f"""打开百度搜索总结一下camel-ai的camel框架的github star、fork数目等并把数字用plot包写成python文件保存到"+{os.path.join
(base_dir, 'final_output')}+"用本地终端执行python文件显示图出来给我"""
# Override default task if command line argument is provided
task = sys.argv[1] if len(sys.argv) > 1 else default_task
# Construct and run the society
society = construct_society(question)
society = construct_society(task)
answer, chat_history, token_count = run_society(society)
# Output the result