add claude 4 to prompt caching and fn call list; do not add icl for devstral (#8642)

This commit is contained in:
Xingyao Wang 2025-05-23 03:10:00 +08:00 committed by GitHub
parent 3980ba53c9
commit 926b425e12
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -56,6 +56,8 @@ CACHE_PROMPT_SUPPORTED_MODELS = [
'claude-3-5-haiku-20241022',
'claude-3-haiku-20240307',
'claude-3-opus-20240229',
'claude-sonnet-4-20250514',
'claude-opus-4-20250514',
]
# function calling supporting models
@ -67,6 +69,8 @@ FUNCTION_CALLING_SUPPORTED_MODELS = [
'claude-3-5-sonnet-20241022',
'claude-3.5-haiku',
'claude-3-5-haiku-20241022',
'claude-sonnet-4-20250514',
'claude-opus-4-20250514',
'gpt-4o-mini',
'gpt-4o',
'o1-2024-12-17',
@ -235,12 +239,17 @@ class LLM(RetryMixin, DebugMixin):
mock_fncall_tools = None
# if the agent or caller has defined tools, and we mock via prompting, convert the messages
if mock_function_calling and 'tools' in kwargs:
add_in_context_learning_example = True
if (
'openhands-lm' in self.config.model
or 'devstral' in self.config.model
):
add_in_context_learning_example = False
messages = convert_fncall_messages_to_non_fncall_messages(
messages,
kwargs['tools'],
add_in_context_learning_example=bool(
'openhands-lm' not in self.config.model
),
add_in_context_learning_example=add_in_context_learning_example,
)
kwargs['messages'] = messages