Refactor: Add LLM provider utilities and improve API base URL detection (#12766)

Co-authored-by: openhands <openhands@all-hands.dev>
This commit is contained in:
Tim O'Farrell
2026-02-05 14:22:32 -07:00
committed by GitHub
parent b23ab33a01
commit 545257f870
5 changed files with 242 additions and 12 deletions

View File

@@ -31,6 +31,7 @@ from openhands.server.user_auth import (
from openhands.storage.data_models.settings import Settings
from openhands.storage.secrets.secrets_store import SecretsStore
from openhands.storage.settings.settings_store import SettingsStore
from openhands.utils.llm import get_provider_api_base, is_openhands_model
LITE_LLM_API_URL = os.environ.get(
'LITE_LLM_API_URL', 'https://llm-proxy.app.all-hands.dev'
@@ -84,6 +85,17 @@ async def load_settings(
and bool(settings.search_api_key),
provider_tokens_set=provider_tokens_set,
)
# If the base url matches the default for the provider, we don't send it
# So that the frontend can display basic mode
if is_openhands_model(settings.llm_model):
if settings.llm_base_url == LITE_LLM_API_URL:
settings_with_token_data.llm_base_url = None
elif settings.llm_model and settings.llm_base_url == get_provider_api_base(
settings.llm_model
):
settings_with_token_data.llm_base_url = None
settings_with_token_data.llm_api_key = None
settings_with_token_data.search_api_key = None
settings_with_token_data.sandbox_api_key = None
@@ -129,9 +141,25 @@ async def store_llm_settings(
settings.llm_api_key = existing_settings.llm_api_key
if settings.llm_model is None:
settings.llm_model = existing_settings.llm_model
# if llm_base_url is missing or empty, set to default as this only happens for "basic" settings
# if llm_base_url is missing or empty, try to determine appropriate URL
if not settings.llm_base_url:
settings.llm_base_url = LITE_LLM_API_URL
if is_openhands_model(settings.llm_model):
# OpenHands models use the LiteLLM proxy
settings.llm_base_url = LITE_LLM_API_URL
elif settings.llm_model:
# For non-openhands models, try to get URL from litellm
try:
api_base = get_provider_api_base(settings.llm_model)
if api_base:
settings.llm_base_url = api_base
else:
logger.debug(
f'No api_base found in litellm for model: {settings.llm_model}'
)
except Exception as e:
logger.error(
f'Failed to get api_base from litellm for model {settings.llm_model}: {e}'
)
# Keep search API key if missing or empty
if not settings.search_api_key:
settings.search_api_key = existing_settings.search_api_key

View File

@@ -5,12 +5,68 @@ import httpx
with warnings.catch_warnings():
warnings.simplefilter('ignore')
import litellm
from litellm import LlmProviders, ProviderConfigManager, get_llm_provider
from openhands.core.config import LLMConfig, OpenHandsConfig
from openhands.core.logger import openhands_logger as logger
from openhands.llm import bedrock
def is_openhands_model(model: str | None) -> bool:
"""Check if the model uses the OpenHands provider.
Args:
model: The model name to check.
Returns:
True if the model starts with 'openhands/', False otherwise.
"""
return bool(model and model.startswith('openhands/'))
def get_provider_api_base(model: str) -> str | None:
"""Get the API base URL for a model using litellm.
This function tries multiple approaches to determine the API base URL:
1. First tries litellm.get_api_base() which handles OpenAI, Gemini, Mistral
2. Falls back to ProviderConfigManager.get_provider_model_info() for providers
like Anthropic that have ModelInfo classes with get_api_base() methods
Args:
model: The model name (e.g., 'gpt-4', 'anthropic/claude-sonnet-4-5-20250929')
Returns:
The API base URL if found, None otherwise.
"""
# First try get_api_base (handles OpenAI, Gemini with specific URL patterns)
try:
api_base = litellm.get_api_base(model, {})
if api_base:
return api_base
except Exception:
pass
# Fall back to ProviderConfigManager for providers like Anthropic
try:
# Get the provider from the model
_, provider_name, _, _ = get_llm_provider(model)
if provider_name:
# Convert provider name to LlmProviders enum
try:
provider_enum = LlmProviders(provider_name)
model_info = ProviderConfigManager.get_provider_model_info(
model, provider_enum
)
if model_info and hasattr(model_info, 'get_api_base'):
return model_info.get_api_base()
except ValueError:
pass # Provider not in enum
except Exception:
pass
return None
def get_supported_llm_models(config: OpenHandsConfig) -> list[str]:
"""Get all models supported by LiteLLM.