Update default model to sonnet 3.7 in all applicable places (#8489)

Co-authored-by: openhands <openhands@all-hands.dev>
This commit is contained in:
mamoodi 2025-05-14 10:55:34 -04:00 committed by GitHub
parent 1b57fd4d1e
commit 08a790c4ca
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7 changed files with 27 additions and 22 deletions

View File

@ -1,8 +1,8 @@
# Development Guide
This guide is for people working on OpenHands and editing the source code.
If you wish to contribute your changes, check out the [CONTRIBUTING.md](https://github.com/All-Hands-AI/OpenHands/blob/main/CONTRIBUTING.md) on how to clone and setup the project initially before moving on.
Otherwise, you can clone the OpenHands project directly.
If you wish to contribute your changes, check out the [CONTRIBUTING.md](https://github.com/All-Hands-AI/OpenHands/blob/main/CONTRIBUTING.md) on how to clone and setup the project
initially before moving on. Otherwise, you can clone the OpenHands project directly.
## Start the Server for Development
@ -21,7 +21,8 @@ Make sure you have all these dependencies installed before moving on to `make bu
#### Develop without sudo access
If you want to develop without system admin/sudo access to upgrade/install `Python` and/or `NodeJs`, you can use `conda` or `mamba` to manage the packages for you:
If you want to develop without system admin/sudo access to upgrade/install `Python` and/or `NodeJs`, you can use
`conda` or `mamba` to manage the packages for you:
```bash
# Download and install Mamba (a faster version of conda)
@ -36,7 +37,8 @@ mamba install conda-forge::poetry
### 2. Build and Setup The Environment
Begin by building the project which includes setting up the environment and installing dependencies. This step ensures that OpenHands is ready to run on your system:
Begin by building the project which includes setting up the environment and installing dependencies. This step ensures
that OpenHands is ready to run on your system:
```bash
make build
@ -45,8 +47,6 @@ make build
### 3. Configuring the Language Model
OpenHands supports a diverse array of Language Models (LMs) through the powerful [litellm](https://docs.litellm.ai) library.
By default, we've chosen Claude Sonnet 3.5 as our go-to model, but the world is your oyster! You can unleash the
potential of any other LM that piques your interest.
To configure the LM of your choice, run:
@ -54,9 +54,12 @@ To configure the LM of your choice, run:
make setup-config
```
This command will prompt you to enter the LLM API key, model name, and other variables ensuring that OpenHands is tailored to your specific needs. Note that the model name will apply only when you run headless. If you use the UI, please set the model in the UI.
This command will prompt you to enter the LLM API key, model name, and other variables ensuring that OpenHands is
tailored to your specific needs. Note that the model name will apply only when you run headless. If you use the UI,
please set the model in the UI.
Note: If you have previously run OpenHands using the docker command, you may have already set some environmental variables in your terminal. The final configurations are set from highest to lowest priority:
Note: If you have previously run OpenHands using the docker command, you may have already set some environmental
variables in your terminal. The final configurations are set from highest to lowest priority:
Environment variables > config.toml variables > default variables
**Note on Alternative Models:**
@ -74,13 +77,15 @@ make run
#### Option B: Individual Server Startup
- **Start the Backend Server:** If you prefer, you can start the backend server independently to focus on backend-related tasks or configurations.
- **Start the Backend Server:** If you prefer, you can start the backend server independently to focus on
backend-related tasks or configurations.
```bash
make start-backend
```
- **Start the Frontend Server:** Similarly, you can start the frontend server on its own to work on frontend-related components or interface enhancements.
- **Start the Frontend Server:** Similarly, you can start the frontend server on its own to work on frontend-related
components or interface enhancements.
```bash
make start-frontend
```
@ -115,8 +120,8 @@ poetry run pytest ./tests/unit/test_*.py
### 9. Use existing Docker image
To reduce build time (e.g., if no changes were made to the client-runtime component), you can use an existing Docker container image by
setting the SANDBOX_RUNTIME_CONTAINER_IMAGE environment variable to the desired Docker image.
To reduce build time (e.g., if no changes were made to the client-runtime component), you can use an existing Docker
container image by setting the SANDBOX_RUNTIME_CONTAINER_IMAGE environment variable to the desired Docker image.
Example: `export SANDBOX_RUNTIME_CONTAINER_IMAGE=ghcr.io/all-hands-ai/runtime:0.38-nikolaik`

View File

@ -48,7 +48,7 @@ describe("Content", () => {
await waitFor(() => {
expect(provider).toHaveValue("Anthropic");
expect(model).toHaveValue("claude-3-5-sonnet-20241022");
expect(model).toHaveValue("claude-3-7-sonnet-20250219");
expect(apiKey).toHaveValue("");
expect(apiKey).toHaveProperty("placeholder", "");
@ -135,7 +135,7 @@ describe("Content", () => {
);
const condensor = screen.getByTestId("enable-memory-condenser-switch");
expect(model).toHaveValue("anthropic/claude-3-5-sonnet-20241022");
expect(model).toHaveValue("anthropic/claude-3-7-sonnet-20250219");
expect(baseUrl).toHaveValue("");
expect(apiKey).toHaveValue("");
expect(apiKey).toHaveProperty("placeholder", "");
@ -542,7 +542,7 @@ describe("Form submission", () => {
// select model
await userEvent.click(model);
const modelOption = screen.getByText("claude-3-5-sonnet-20241022");
const modelOption = screen.getByText("claude-3-7-sonnet-20250219");
await userEvent.click(modelOption);
const submitButton = screen.getByTestId("submit-button");
@ -550,7 +550,7 @@ describe("Form submission", () => {
expect(saveSettingsSpy).toHaveBeenCalledWith(
expect.objectContaining({
llm_model: "anthropic/claude-3-5-sonnet-20241022",
llm_model: "anthropic/claude-3-7-sonnet-20250219",
llm_base_url: "",
confirmation_mode: false,
}),

View File

@ -92,7 +92,7 @@ const openHandsHandlers = [
"gpt-4o",
"gpt-4o-mini",
"anthropic/claude-3.5",
"anthropic/claude-3-5-sonnet-20241022",
"anthropic/claude-3-7-sonnet-20250219",
]),
),

View File

@ -304,9 +304,9 @@ function LlmSettingsScreen() {
name="llm-custom-model-input"
label={t(I18nKey.SETTINGS$CUSTOM_MODEL)}
defaultValue={
settings.LLM_MODEL || "anthropic/claude-3-5-sonnet-20241022"
settings.LLM_MODEL || "anthropic/claude-3-7-sonnet-20250219"
}
placeholder="anthropic/claude-3-5-sonnet-20241022"
placeholder="anthropic/claude-3-7-sonnet-20250219"
type="text"
className="w-[680px]"
onChange={handleCustomModelIsDirty}

View File

@ -3,7 +3,7 @@ import { Settings } from "#/types/settings";
export const LATEST_SETTINGS_VERSION = 5;
export const DEFAULT_SETTINGS: Settings = {
LLM_MODEL: "anthropic/claude-3-5-sonnet-20241022",
LLM_MODEL: "anthropic/claude-3-7-sonnet-20250219",
LLM_BASE_URL: "",
AGENT: "CodeActAgent",
LANGUAGE: "en",

View File

@ -109,7 +109,7 @@ export GIT_USERNAME="your-gitlab-username" # Optional, defaults to token owner
# LLM configuration
export LLM_MODEL="anthropic/claude-3-5-sonnet-20241022" # Recommended
export LLM_MODEL="anthropic/claude-3-7-sonnet-20250219" # Recommended
export LLM_API_KEY="your-llm-api-key"
export LLM_BASE_URL="your-api-url" # Optional, for API proxies
```

View File

@ -24,7 +24,7 @@ jobs:
macro: ${{ vars.OPENHANDS_MACRO || '@openhands-agent' }}
max_iterations: ${{ fromJson(vars.OPENHANDS_MAX_ITER || 50) }}
base_container_image: ${{ vars.OPENHANDS_BASE_CONTAINER_IMAGE || '' }}
LLM_MODEL: ${{ vars.LLM_MODEL || 'anthropic/claude-3-5-sonnet-20241022' }}
LLM_MODEL: ${{ vars.LLM_MODEL || 'anthropic/claude-3-7-sonnet-20250219' }}
target_branch: ${{ vars.TARGET_BRANCH || 'main' }}
runner: ${{ vars.TARGET_RUNNER }}
secrets: