Add support for passing list of Message into LLM completion (#10671)

This commit is contained in:
Ryan H. Tran 2025-08-28 20:22:28 +07:00 committed by GitHub
parent 9709431874
commit 81829289ab
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 18 additions and 8 deletions

View File

@ -217,7 +217,7 @@ class BrowsingAgent(Agent):
messages.append(Message(role='user', content=[TextContent(text=prompt)]))
response = self.llm.completion(
messages=self.llm.format_messages_for_llm(messages),
messages=messages,
stop=[')```', ')\n```'],
)
return self.response_parser.parse(response)

View File

@ -204,7 +204,7 @@ class CodeActAgent(Agent):
initial_user_message = self._get_initial_user_message(state.history)
messages = self._get_messages(condensed_history, initial_user_message)
params: dict = {
'messages': self.llm.format_messages_for_llm(messages),
'messages': messages,
}
params['tools'] = check_tools(self.tools, self.llm.config)
params['extra_body'] = {

View File

@ -301,10 +301,8 @@ You are an agent trying to solve a web task based on the content of the page and
messages.append(Message(role='system', content=[TextContent(text=system_msg)]))
messages.append(Message(role='user', content=human_prompt))
flat_messages = self.llm.format_messages_for_llm(messages)
response = self.llm.completion(
messages=flat_messages,
messages=messages,
temperature=0.0,
stop=[')```', ')\n```'],
)

View File

@ -3,7 +3,7 @@ import os
import time
import warnings
from functools import partial
from typing import Any, Callable
from typing import Any, Callable, cast
import httpx
@ -220,7 +220,9 @@ class LLM(RetryMixin, DebugMixin):
"""Wrapper for the litellm completion function. Logs the input and output of the completion function."""
from openhands.io import json
messages_kwarg: list[dict[str, Any]] | dict[str, Any] = []
messages_kwarg: (
dict[str, Any] | Message | list[dict[str, Any]] | list[Message]
) = []
mock_function_calling = not self.is_function_calling_active()
# some callers might send the model and messages directly
@ -239,9 +241,19 @@ class LLM(RetryMixin, DebugMixin):
messages_kwarg = kwargs['messages']
# ensure we work with a list of messages
messages: list[dict[str, Any]] = (
messages_list = (
messages_kwarg if isinstance(messages_kwarg, list) else [messages_kwarg]
)
# Format Message objects to dict format if needed
messages: list[dict] = []
if messages_list and isinstance(messages_list[0], Message):
messages = self.format_messages_for_llm(
cast(list[Message], messages_list)
)
else:
messages = cast(list[dict[str, Any]], messages_list)
kwargs['messages'] = messages
# handle conversion of to non-function calling messages if needed
original_fncall_messages = copy.deepcopy(messages)