Add claude-sonnet-4-5 model support (#11179)

Co-authored-by: openhands <openhands@all-hands.dev>
This commit is contained in:
Xingyao Wang 2025-09-29 23:27:19 -04:00 committed by GitHub
parent c3da6c20bd
commit e19b3dd1f0
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
12 changed files with 32 additions and 8 deletions

View File

@ -100,7 +100,7 @@ docker run -it --rm --pull=always \
### Getting Started
When you open the application, you'll be asked to choose an LLM provider and add an API key.
[Anthropic's Claude Sonnet 4](https://www.anthropic.com/api) (`anthropic/claude-sonnet-4-20250514`)
[Anthropic's Claude Sonnet 4.5](https://www.anthropic.com/api) (`anthropic/claude-sonnet-4-5-20250929`)
works best, but you have [many options](https://docs.all-hands.dev/usage/llms).
See the [Running OpenHands](https://docs.all-hands.dev/usage/installation) guide for

View File

@ -105,7 +105,7 @@ The conversation history will be saved in `~/.openhands/sessions`.
1. Set the following environment variables in your terminal:
- `SANDBOX_VOLUMES` to specify the directory you want OpenHands to access ([See using SANDBOX_VOLUMES for more info](../runtimes/docker#using-sandbox_volumes))
- `LLM_MODEL` - the LLM model to use (e.g. `export LLM_MODEL="anthropic/claude-sonnet-4-20250514"`)
- `LLM_MODEL` - the LLM model to use (e.g. `export LLM_MODEL="anthropic/claude-sonnet-4-20250514"` or `export LLM_MODEL="anthropic/claude-sonnet-4-5-20250929"`)
- `LLM_API_KEY` - your API key (e.g. `export LLM_API_KEY="sk_test_12345"`)
2. Run the following command:

View File

@ -53,7 +53,7 @@ Set environment variables and run the Docker command:
```bash
# Set required environment variables
export SANDBOX_VOLUMES="/path/to/workspace:/workspace:rw" # Format: host_path:container_path:mode
export LLM_MODEL="anthropic/claude-sonnet-4-20250514"
export LLM_MODEL="anthropic/claude-sonnet-4-20250514" # or "anthropic/claude-sonnet-4-5-20250929"
export LLM_API_KEY="your-api-key"
export SANDBOX_SELECTED_REPO="owner/repo-name" # Optional: requires GITHUB_TOKEN
export GITHUB_TOKEN="your-token" # Required for repository operations

View File

@ -18,6 +18,7 @@ Based on these findings and community feedback, these are the latest models that
### Cloud / API-Based Models
- [anthropic/claude-sonnet-4-20250514](https://www.anthropic.com/api) (recommended)
- [anthropic/claude-sonnet-4-5-20250929](https://www.anthropic.com/api) (recommended)
- [openai/gpt-5-2025-08-07](https://openai.com/api/) (recommended)
- [gemini/gemini-2.5-pro](https://blog.google/technology/google-deepmind/gemini-model-thinking-updates-march-2025/)
- [deepseek/deepseek-chat](https://api-docs.deepseek.com/)

View File

@ -15,7 +15,7 @@ description: OpenHands LLM provider with access to state-of-the-art (SOTA) agent
When running OpenHands, you'll need to set the following in the OpenHands UI through the Settings under the `LLM` tab:
- `LLM Provider` to `OpenHands`
- `LLM Model` to the model you will be using (e.g. claude-sonnet-4-20250514)
- `LLM Model` to the model you will be using (e.g. claude-sonnet-4-20250514 or claude-sonnet-4-5-20250929)
- `API Key` to your OpenHands LLM API key copied from above
## Using OpenHands LLM Provider in the CLI
@ -36,6 +36,7 @@ Pricing follows official API provider rates. Below are the current pricing detai
|-------|----------------------------|-----------------------------------|------------------------------|------------------|-------------------|
| claude-opus-4-20250514 | $15.00 | $1.50 | $75.00 | 200,000 | 32,000 |
| claude-sonnet-4-20250514 | $3.00 | $0.30 | $15.00 | 200,000 | 64,000 |
| claude-sonnet-4-5-20250929 | $3.00 | $0.30 | $15.00 | 200,000 | 64,000 |
| devstral-medium-2507 | $0.40 | N/A | $2.00 | 128,000 | 128,000 |
| devstral-small-2505 | $0.10 | N/A | $0.30 | 128,000 | 128,000 |
| devstral-small-2507 | $0.10 | N/A | $0.30 | 128,000 | 128,000 |

View File

@ -115,7 +115,9 @@ const openHandsHandlers = [
"gpt-4o-mini",
"anthropic/claude-3.5",
"anthropic/claude-sonnet-4-20250514",
"anthropic/claude-sonnet-4-5-20250929",
"openhands/claude-sonnet-4-20250514",
"openhands/claude-sonnet-4-5-20250929",
"sambanova/Meta-Llama-3.1-8B-Instruct",
]),
),

View File

@ -13,6 +13,7 @@ export const VERIFIED_MODELS = [
"claude-3-5-sonnet-20241022",
"claude-3-7-sonnet-20250219",
"claude-sonnet-4-20250514",
"claude-sonnet-4-5-20250929",
"claude-opus-4-20250514",
"claude-opus-4-1-20250805",
"gemini-2.5-pro",
@ -51,6 +52,7 @@ export const VERIFIED_ANTHROPIC_MODELS = [
"claude-3-5-haiku-20241022",
"claude-3-7-sonnet-20250219",
"claude-sonnet-4-20250514",
"claude-sonnet-4-5-20250929",
"claude-opus-4-20250514",
"claude-opus-4-1-20250805",
];
@ -67,6 +69,7 @@ export const VERIFIED_MISTRAL_MODELS = [
// (e.g., they return `claude-sonnet-4-20250514` instead of `openhands/claude-sonnet-4-20250514`)
export const VERIFIED_OPENHANDS_MODELS = [
"claude-sonnet-4-20250514",
"claude-sonnet-4-5-20250929",
"gpt-5-2025-08-07",
"gpt-5-mini-2025-08-07",
"claude-opus-4-20250514",

View File

@ -165,6 +165,7 @@ VERIFIED_OPENAI_MODELS = [
VERIFIED_ANTHROPIC_MODELS = [
'claude-sonnet-4-20250514',
'claude-sonnet-4-5-20250929',
'claude-opus-4-20250514',
'claude-opus-4-1-20250805',
'claude-3-7-sonnet-20250219',
@ -186,6 +187,7 @@ VERIFIED_MISTRAL_MODELS = [
VERIFIED_OPENHANDS_MODELS = [
'claude-sonnet-4-20250514',
'claude-sonnet-4-5-20250929',
'gpt-5-2025-08-07',
'gpt-5-mini-2025-08-07',
'claude-opus-4-20250514',

View File

@ -148,7 +148,10 @@ class LLM(RetryMixin, DebugMixin):
logger.debug(
f'Gemini model {self.config.model} with reasoning_effort {self.config.reasoning_effort} mapped to thinking {kwargs.get("thinking")}'
)
elif 'claude-sonnet-4-5' in self.config.model:
kwargs.pop(
'reasoning_effort', None
) # don't send reasoning_effort to Claude Sonnet 4.5
else:
kwargs['reasoning_effort'] = self.config.reasoning_effort
kwargs.pop(
@ -507,6 +510,7 @@ class LLM(RetryMixin, DebugMixin):
'claude-3-7-sonnet',
'claude-3.7-sonnet',
'claude-sonnet-4',
'claude-sonnet-4-5-20250929',
]
if any(model in self.config.model for model in sonnet_models):
self.config.max_output_tokens = 64000 # litellm set max to 128k, but that requires a header to be set
@ -817,6 +821,8 @@ class LLM(RetryMixin, DebugMixin):
message.force_string_serializer = True
if 'openrouter/anthropic/claude-sonnet-4' in self.config.model:
message.force_string_serializer = True
if 'openrouter/anthropic/claude-sonnet-4-5-20250929' in self.config.model:
message.force_string_serializer = True
# let pydantic handle the serialization
return [message.model_dump() for message in messages]

View File

@ -103,6 +103,7 @@ REASONING_EFFORT_PATTERNS: list[str] = [
'gpt-5*',
# DeepSeek reasoning family
'deepseek-r1-0528*',
'claude-sonnet-4-5*',
]
PROMPT_CACHE_PATTERNS: list[str] = [

View File

@ -56,6 +56,7 @@ def get_supported_llm_models(config: OpenHandsConfig) -> list[str]:
# Add OpenHands provider models
openhands_models = [
'openhands/claude-sonnet-4-20250514',
'openhands/claude-sonnet-4-5-20250929',
'openhands/gpt-5-2025-08-07',
'openhands/gpt-5-mini-2025-08-07',
'openhands/claude-opus-4-20250514',

View File

@ -615,7 +615,12 @@ class TestModifyLLMSettingsBasic:
)
@patch(
'openhands.cli.settings.VERIFIED_OPENHANDS_MODELS',
['claude-sonnet-4-20250514', 'claude-opus-4-20250514', 'o3'],
[
'claude-sonnet-4-20250514',
'claude-sonnet-4-5-20250929',
'claude-opus-4-20250514',
'o3',
],
)
@patch('openhands.cli.settings.get_supported_llm_models')
@patch('openhands.cli.settings.organize_models_and_providers')
@ -638,6 +643,7 @@ class TestModifyLLMSettingsBasic:
# Setup mocks
mock_get_models.return_value = [
'openhands/claude-sonnet-4-20250514',
'openhands/claude-sonnet-4-5-20250929',
'openhands/claude-opus-4-20250514',
'openhands/o3',
]
@ -645,6 +651,7 @@ class TestModifyLLMSettingsBasic:
'openhands': {
'models': [
'claude-sonnet-4-20250514',
'claude-sonnet-4-5-20250929',
'claude-opus-4-20250514',
'o3',
],
@ -668,7 +675,7 @@ class TestModifyLLMSettingsBasic:
# Change provider and model
mock_confirm.side_effect = [
0, # Select openhands (index 0 in ['openhands', 'anthropic'])
2, # Select o3 (index 2 in ['claude-sonnet-4-20250514', 'claude-opus-4-20250514', 'o3'])
3, # Select o3 (index 3 in ['claude-sonnet-4-20250514', 'claude-sonnet-4-5-20250929', 'claude-opus-4-20250514', 'o3'])
0, # Save settings
]
@ -702,7 +709,7 @@ class TestModifyLLMSettingsBasic:
)
@patch(
'openhands.cli.settings.VERIFIED_ANTHROPIC_MODELS',
['claude-sonnet-4-20250514', 'claude-3-opus'],
['claude-sonnet-4-20250514', 'claude-sonnet-4-5-20250929', 'claude-3-opus'],
)
@patch('openhands.cli.settings.get_supported_llm_models')
@patch('openhands.cli.settings.organize_models_and_providers')