mirror of
https://github.com/OpenHands/OpenHands.git
synced 2026-03-22 13:47:19 +08:00
Refactor: Add LLM provider utilities and improve API base URL detection (#12766)
Co-authored-by: openhands <openhands@all-hands.dev>
This commit is contained in:
@@ -24,6 +24,7 @@ from openhands.core.config.openhands_config import OpenHandsConfig
|
||||
from openhands.server.settings import Settings
|
||||
from openhands.storage.settings.settings_store import SettingsStore
|
||||
from openhands.utils.async_utils import call_sync_from_async
|
||||
from openhands.utils.llm import is_openhands_model
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -163,7 +164,7 @@ class SaasSettingsStore(SettingsStore):
|
||||
# Check if we need to generate an LLM key.
|
||||
if item.llm_base_url == LITE_LLM_API_URL:
|
||||
await self._ensure_api_key(
|
||||
item, str(org_id), openhands_type=self._is_openhands_provider(item)
|
||||
item, str(org_id), openhands_type=is_openhands_model(item.llm_model)
|
||||
)
|
||||
|
||||
kwargs = item.model_dump(context={'expose_secrets': True})
|
||||
@@ -229,10 +230,6 @@ class SaasSettingsStore(SettingsStore):
|
||||
fernet_key = b64encode(hashlib.sha256(jwt_secret.encode()).digest())
|
||||
return Fernet(fernet_key)
|
||||
|
||||
def _is_openhands_provider(self, item: Settings) -> bool:
|
||||
"""Check if the settings use the OpenHands provider."""
|
||||
return bool(item.llm_model and item.llm_model.startswith('openhands/'))
|
||||
|
||||
async def _ensure_api_key(
|
||||
self, item: Settings, org_id: str, openhands_type: bool = False
|
||||
) -> None:
|
||||
|
||||
@@ -31,6 +31,7 @@ from openhands.server.user_auth import (
|
||||
from openhands.storage.data_models.settings import Settings
|
||||
from openhands.storage.secrets.secrets_store import SecretsStore
|
||||
from openhands.storage.settings.settings_store import SettingsStore
|
||||
from openhands.utils.llm import get_provider_api_base, is_openhands_model
|
||||
|
||||
LITE_LLM_API_URL = os.environ.get(
|
||||
'LITE_LLM_API_URL', 'https://llm-proxy.app.all-hands.dev'
|
||||
@@ -84,6 +85,17 @@ async def load_settings(
|
||||
and bool(settings.search_api_key),
|
||||
provider_tokens_set=provider_tokens_set,
|
||||
)
|
||||
|
||||
# If the base url matches the default for the provider, we don't send it
|
||||
# So that the frontend can display basic mode
|
||||
if is_openhands_model(settings.llm_model):
|
||||
if settings.llm_base_url == LITE_LLM_API_URL:
|
||||
settings_with_token_data.llm_base_url = None
|
||||
elif settings.llm_model and settings.llm_base_url == get_provider_api_base(
|
||||
settings.llm_model
|
||||
):
|
||||
settings_with_token_data.llm_base_url = None
|
||||
|
||||
settings_with_token_data.llm_api_key = None
|
||||
settings_with_token_data.search_api_key = None
|
||||
settings_with_token_data.sandbox_api_key = None
|
||||
@@ -129,9 +141,25 @@ async def store_llm_settings(
|
||||
settings.llm_api_key = existing_settings.llm_api_key
|
||||
if settings.llm_model is None:
|
||||
settings.llm_model = existing_settings.llm_model
|
||||
# if llm_base_url is missing or empty, set to default as this only happens for "basic" settings
|
||||
# if llm_base_url is missing or empty, try to determine appropriate URL
|
||||
if not settings.llm_base_url:
|
||||
settings.llm_base_url = LITE_LLM_API_URL
|
||||
if is_openhands_model(settings.llm_model):
|
||||
# OpenHands models use the LiteLLM proxy
|
||||
settings.llm_base_url = LITE_LLM_API_URL
|
||||
elif settings.llm_model:
|
||||
# For non-openhands models, try to get URL from litellm
|
||||
try:
|
||||
api_base = get_provider_api_base(settings.llm_model)
|
||||
if api_base:
|
||||
settings.llm_base_url = api_base
|
||||
else:
|
||||
logger.debug(
|
||||
f'No api_base found in litellm for model: {settings.llm_model}'
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f'Failed to get api_base from litellm for model {settings.llm_model}: {e}'
|
||||
)
|
||||
# Keep search API key if missing or empty
|
||||
if not settings.search_api_key:
|
||||
settings.search_api_key = existing_settings.search_api_key
|
||||
|
||||
@@ -5,12 +5,68 @@ import httpx
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter('ignore')
|
||||
import litellm
|
||||
from litellm import LlmProviders, ProviderConfigManager, get_llm_provider
|
||||
|
||||
from openhands.core.config import LLMConfig, OpenHandsConfig
|
||||
from openhands.core.logger import openhands_logger as logger
|
||||
from openhands.llm import bedrock
|
||||
|
||||
|
||||
def is_openhands_model(model: str | None) -> bool:
|
||||
"""Check if the model uses the OpenHands provider.
|
||||
|
||||
Args:
|
||||
model: The model name to check.
|
||||
|
||||
Returns:
|
||||
True if the model starts with 'openhands/', False otherwise.
|
||||
"""
|
||||
return bool(model and model.startswith('openhands/'))
|
||||
|
||||
|
||||
def get_provider_api_base(model: str) -> str | None:
|
||||
"""Get the API base URL for a model using litellm.
|
||||
|
||||
This function tries multiple approaches to determine the API base URL:
|
||||
1. First tries litellm.get_api_base() which handles OpenAI, Gemini, Mistral
|
||||
2. Falls back to ProviderConfigManager.get_provider_model_info() for providers
|
||||
like Anthropic that have ModelInfo classes with get_api_base() methods
|
||||
|
||||
Args:
|
||||
model: The model name (e.g., 'gpt-4', 'anthropic/claude-sonnet-4-5-20250929')
|
||||
|
||||
Returns:
|
||||
The API base URL if found, None otherwise.
|
||||
"""
|
||||
# First try get_api_base (handles OpenAI, Gemini with specific URL patterns)
|
||||
try:
|
||||
api_base = litellm.get_api_base(model, {})
|
||||
if api_base:
|
||||
return api_base
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Fall back to ProviderConfigManager for providers like Anthropic
|
||||
try:
|
||||
# Get the provider from the model
|
||||
_, provider_name, _, _ = get_llm_provider(model)
|
||||
if provider_name:
|
||||
# Convert provider name to LlmProviders enum
|
||||
try:
|
||||
provider_enum = LlmProviders(provider_name)
|
||||
model_info = ProviderConfigManager.get_provider_model_info(
|
||||
model, provider_enum
|
||||
)
|
||||
if model_info and hasattr(model_info, 'get_api_base'):
|
||||
return model_info.get_api_base()
|
||||
except ValueError:
|
||||
pass # Provider not in enum
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def get_supported_llm_models(config: OpenHandsConfig) -> list[str]:
|
||||
"""Get all models supported by LiteLLM.
|
||||
|
||||
|
||||
@@ -188,12 +188,12 @@ async def test_store_llm_settings_update_existing():
|
||||
async def test_store_llm_settings_partial_update():
|
||||
"""Test store_llm_settings with partial update.
|
||||
|
||||
Note: When llm_base_url is not provided in the update, it gets set to the default
|
||||
LiteLLM proxy URL. This is intentional behavior for "basic" settings where users
|
||||
don't specify a custom base URL.
|
||||
Note: When llm_base_url is not provided in the update and the model is NOT an
|
||||
openhands model, we attempt to get the URL from litellm.get_api_base().
|
||||
For OpenAI models, this returns https://api.openai.com.
|
||||
"""
|
||||
settings = Settings(
|
||||
llm_model='gpt-4' # Only updating model
|
||||
llm_model='gpt-4' # Only updating model (not an openhands model)
|
||||
)
|
||||
|
||||
# Create existing settings
|
||||
@@ -209,9 +209,84 @@ async def test_store_llm_settings_partial_update():
|
||||
assert result.llm_model == 'gpt-4'
|
||||
# For SecretStr objects, we need to compare the secret value
|
||||
assert result.llm_api_key.get_secret_value() == 'existing-api-key'
|
||||
# When llm_base_url is not provided, it defaults to the LiteLLM proxy URL
|
||||
# OpenAI models: litellm.get_api_base() returns https://api.openai.com
|
||||
assert result.llm_base_url == 'https://api.openai.com'
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_store_llm_settings_anthropic_model_gets_api_base():
|
||||
"""Test store_llm_settings with an Anthropic model.
|
||||
|
||||
For Anthropic models, get_provider_api_base() returns the Anthropic API base URL
|
||||
via ProviderConfigManager.get_provider_model_info().
|
||||
"""
|
||||
settings = Settings(
|
||||
llm_model='anthropic/claude-sonnet-4-5-20250929' # Anthropic model
|
||||
)
|
||||
|
||||
existing_settings = Settings(
|
||||
llm_model='gpt-3.5',
|
||||
llm_api_key=SecretStr('existing-api-key'),
|
||||
)
|
||||
|
||||
result = await store_llm_settings(settings, existing_settings)
|
||||
|
||||
assert result.llm_model == 'anthropic/claude-sonnet-4-5-20250929'
|
||||
assert result.llm_api_key.get_secret_value() == 'existing-api-key'
|
||||
# Anthropic models get https://api.anthropic.com via ProviderConfigManager
|
||||
assert result.llm_base_url == 'https://api.anthropic.com'
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_store_llm_settings_litellm_error_logged():
|
||||
"""Test that litellm errors are logged when getting api_base fails."""
|
||||
from unittest.mock import patch
|
||||
|
||||
settings = Settings(
|
||||
llm_model='unknown-model-xyz' # A model that litellm won't recognize
|
||||
)
|
||||
|
||||
existing_settings = Settings(
|
||||
llm_model='gpt-3.5',
|
||||
llm_api_key=SecretStr('existing-api-key'),
|
||||
)
|
||||
|
||||
# The function should not raise even if litellm fails
|
||||
with patch('openhands.server.routes.settings.logger') as mock_logger:
|
||||
result = await store_llm_settings(settings, existing_settings)
|
||||
|
||||
# llm_base_url should remain None since litellm couldn't find the model
|
||||
assert result.llm_base_url is None
|
||||
# Either error or debug should have been logged
|
||||
assert mock_logger.error.called or mock_logger.debug.called
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_store_llm_settings_openhands_model_gets_default_url():
|
||||
"""Test store_llm_settings with openhands model gets LiteLLM proxy URL.
|
||||
|
||||
When llm_base_url is not provided and the model is an openhands model,
|
||||
it gets set to the default LiteLLM proxy URL.
|
||||
"""
|
||||
import os
|
||||
|
||||
settings = Settings(
|
||||
llm_model='openhands/claude-sonnet-4-5-20250929' # openhands model
|
||||
)
|
||||
|
||||
# Create existing settings
|
||||
existing_settings = Settings(
|
||||
llm_model='gpt-3.5',
|
||||
llm_api_key=SecretStr('existing-api-key'),
|
||||
)
|
||||
|
||||
result = await store_llm_settings(settings, existing_settings)
|
||||
|
||||
# Should return settings with updated model
|
||||
assert result.llm_model == 'openhands/claude-sonnet-4-5-20250929'
|
||||
# For SecretStr objects, we need to compare the secret value
|
||||
assert result.llm_api_key.get_secret_value() == 'existing-api-key'
|
||||
# openhands models get the LiteLLM proxy URL
|
||||
expected_base_url = os.environ.get(
|
||||
'LITE_LLM_API_URL', 'https://llm-proxy.app.all-hands.dev'
|
||||
)
|
||||
|
||||
74
tests/unit/utils/test_llm_utils.py
Normal file
74
tests/unit/utils/test_llm_utils.py
Normal file
@@ -0,0 +1,74 @@
|
||||
"""Tests for openhands.utils.llm module."""
|
||||
|
||||
from openhands.utils.llm import get_provider_api_base, is_openhands_model
|
||||
|
||||
|
||||
class TestIsOpenhandsModel:
|
||||
"""Tests for the is_openhands_model function."""
|
||||
|
||||
def test_openhands_model_returns_true(self):
|
||||
"""Test that models with 'openhands/' prefix return True."""
|
||||
assert is_openhands_model('openhands/claude-sonnet-4-5-20250929') is True
|
||||
assert is_openhands_model('openhands/gpt-5-2025-08-07') is True
|
||||
assert is_openhands_model('openhands/gemini-2.5-pro') is True
|
||||
|
||||
def test_non_openhands_model_returns_false(self):
|
||||
"""Test that models without 'openhands/' prefix return False."""
|
||||
assert is_openhands_model('gpt-4') is False
|
||||
assert is_openhands_model('claude-3-opus-20240229') is False
|
||||
assert is_openhands_model('anthropic/claude-3-opus-20240229') is False
|
||||
assert is_openhands_model('openai/gpt-4') is False
|
||||
|
||||
def test_none_model_returns_false(self):
|
||||
"""Test that None model returns False."""
|
||||
assert is_openhands_model(None) is False
|
||||
|
||||
def test_empty_string_returns_false(self):
|
||||
"""Test that empty string returns False."""
|
||||
assert is_openhands_model('') is False
|
||||
|
||||
def test_similar_prefix_not_matched(self):
|
||||
"""Test that similar prefixes don't incorrectly match."""
|
||||
assert is_openhands_model('openhands') is False # Missing slash
|
||||
assert is_openhands_model('openhandsx/model') is False # Extra char
|
||||
assert is_openhands_model('OPENHANDS/model') is False # Wrong case
|
||||
|
||||
|
||||
class TestGetProviderApiBase:
|
||||
"""Tests for the get_provider_api_base function."""
|
||||
|
||||
def test_openai_model_returns_openai_api_base(self):
|
||||
"""Test that OpenAI models return the OpenAI API base URL."""
|
||||
assert get_provider_api_base('gpt-4') == 'https://api.openai.com'
|
||||
assert get_provider_api_base('openai/gpt-4') == 'https://api.openai.com'
|
||||
|
||||
def test_anthropic_model_returns_anthropic_api_base(self):
|
||||
"""Test that Anthropic models return the Anthropic API base URL."""
|
||||
assert (
|
||||
get_provider_api_base('anthropic/claude-sonnet-4-5-20250929')
|
||||
== 'https://api.anthropic.com'
|
||||
)
|
||||
assert (
|
||||
get_provider_api_base('claude-sonnet-4-5-20250929')
|
||||
== 'https://api.anthropic.com'
|
||||
)
|
||||
|
||||
def test_gemini_model_returns_google_api_base(self):
|
||||
"""Test that Gemini models return a Google API base URL."""
|
||||
api_base = get_provider_api_base('gemini/gemini-pro')
|
||||
assert api_base is not None
|
||||
assert 'generativelanguage.googleapis.com' in api_base
|
||||
|
||||
def test_mistral_model_returns_mistral_api_base(self):
|
||||
"""Test that Mistral models return the Mistral API base URL."""
|
||||
assert (
|
||||
get_provider_api_base('mistral/mistral-large-latest')
|
||||
== 'https://api.mistral.ai/v1'
|
||||
)
|
||||
|
||||
def test_unknown_model_returns_none(self):
|
||||
"""Test that unknown models return None."""
|
||||
result = get_provider_api_base('unknown-provider/unknown-model')
|
||||
# May return None or an API base depending on litellm behavior
|
||||
# The function should not raise an exception
|
||||
assert result is None or isinstance(result, str)
|
||||
Reference in New Issue
Block a user