From 90aab29bc0aa9617293d452b05288819c23248e6 Mon Sep 17 00:00:00 2001 From: Polly <26716201@qq.com> Date: Sat, 10 May 2025 16:29:39 +0800 Subject: [PATCH] Fix Issue #8413 max_output_tokens in openrouter/anthropic/claude-3.7-sonnet doesn't work correctly (#8415) --- frontend/src/i18n/declaration.ts | 1 - openhands/llm/llm.py | 5 ++++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/frontend/src/i18n/declaration.ts b/frontend/src/i18n/declaration.ts index e98a103b37..daa67ae7a7 100644 --- a/frontend/src/i18n/declaration.ts +++ b/frontend/src/i18n/declaration.ts @@ -459,7 +459,6 @@ export enum I18nKey { CONVERSATION$DOWNLOAD_ERROR = "CONVERSATION$DOWNLOAD_ERROR", CONVERSATION$UPDATED = "CONVERSATION$UPDATED", CONVERSATION$TOTAL_COST = "CONVERSATION$TOTAL_COST", - CONVERSATION$TOKENS_USED = "CONVERSATION$TOKENS_USED", CONVERSATION$INPUT = "CONVERSATION$INPUT", CONVERSATION$OUTPUT = "CONVERSATION$OUTPUT", CONVERSATION$TOTAL = "CONVERSATION$TOTAL", diff --git a/openhands/llm/llm.py b/openhands/llm/llm.py index 46a59d9d55..61c686884d 100644 --- a/openhands/llm/llm.py +++ b/openhands/llm/llm.py @@ -473,7 +473,10 @@ class LLM(RetryMixin, DebugMixin): self.model_info['max_tokens'], int ): self.config.max_output_tokens = self.model_info['max_tokens'] - if 'claude-3-7-sonnet' in self.config.model: + if any( + model in self.config.model + for model in ['claude-3-7-sonnet', 'claude-3.7-sonnet'] + ): self.config.max_output_tokens = 64000 # litellm set max to 128k, but that requires a header to be set # Initialize function calling capability