diff --git a/openhands/agenthub/codeact_agent/codeact_agent.py b/openhands/agenthub/codeact_agent/codeact_agent.py index e2e80ecc2d..7a2e0fc62b 100644 --- a/openhands/agenthub/codeact_agent/codeact_agent.py +++ b/openhands/agenthub/codeact_agent/codeact_agent.py @@ -101,7 +101,7 @@ class CodeActAgent(Agent): codeact_enable_llm_editor=self.config.codeact_enable_llm_editor, ) logger.debug( - f'TOOLS loaded for CodeActAgent: {json.dumps(self.tools, indent=2)}' + f'TOOLS loaded for CodeActAgent: {json.dumps(self.tools, indent=2, ensure_ascii=False).replace("\\n", "\n")}' ) self.prompt_manager = PromptManager( microagent_dir=os.path.join(os.path.dirname(__file__), 'micro') diff --git a/openhands/llm/llm.py b/openhands/llm/llm.py index 423301f190..b5e6ac8241 100644 --- a/openhands/llm/llm.py +++ b/openhands/llm/llm.py @@ -359,7 +359,9 @@ class LLM(RetryMixin, DebugMixin): # noinspection PyBroadException except Exception: pass - logger.debug(f'Model info: {self.model_info}') + from openhands.core.utils import json + + logger.debug(f'Model info: {json.dumps(self.model_info, indent=2)}') if self.config.model.startswith('huggingface'): # HF doesn't support the OpenAI default value for top_p (1)