mirror of
https://github.com/OpenHands/OpenHands.git
synced 2026-03-22 13:47:19 +08:00
fix(llm): remove default reasoning_effort; fix Gemini special case (#11567)
This commit is contained in:
@@ -62,8 +62,11 @@ class AsyncLLM(LLM):
|
||||
elif 'messages' in kwargs:
|
||||
messages = kwargs['messages']
|
||||
|
||||
# Set reasoning effort for models that support it
|
||||
if get_features(self.config.model).supports_reasoning_effort:
|
||||
# Set reasoning effort for models that support it, only if explicitly provided
|
||||
if (
|
||||
get_features(self.config.model).supports_reasoning_effort
|
||||
and self.config.reasoning_effort is not None
|
||||
):
|
||||
kwargs['reasoning_effort'] = self.config.reasoning_effort
|
||||
|
||||
# ensure we work with a list of messages
|
||||
|
||||
@@ -155,7 +155,8 @@ class LLM(RetryMixin, DebugMixin):
|
||||
# don't send reasoning_effort to specific Claude Sonnet/Haiku 4.5 variants
|
||||
kwargs.pop('reasoning_effort', None)
|
||||
else:
|
||||
kwargs['reasoning_effort'] = self.config.reasoning_effort
|
||||
if self.config.reasoning_effort is not None:
|
||||
kwargs['reasoning_effort'] = self.config.reasoning_effort
|
||||
kwargs.pop(
|
||||
'temperature'
|
||||
) # temperature is not supported for reasoning models
|
||||
|
||||
@@ -64,8 +64,11 @@ class StreamingLLM(AsyncLLM):
|
||||
'The messages list is empty. At least one message is required.'
|
||||
)
|
||||
|
||||
# Set reasoning effort for models that support it
|
||||
if get_features(self.config.model).supports_reasoning_effort:
|
||||
# Set reasoning effort for models that support it, only if explicitly provided
|
||||
if (
|
||||
get_features(self.config.model).supports_reasoning_effort
|
||||
and self.config.reasoning_effort is not None
|
||||
):
|
||||
kwargs['reasoning_effort'] = self.config.reasoning_effort
|
||||
|
||||
self.log_prompt(messages)
|
||||
|
||||
Reference in New Issue
Block a user