Add support for the groq hosted kimi-k2-instruct model in the functio… (#9759)

This commit is contained in:
Regis David Souza Mesquita 2025-07-21 14:14:09 +01:00 committed by GitHub
parent 95bda09cd9
commit 0daaf21607
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -87,6 +87,7 @@ FUNCTION_CALLING_SUPPORTED_MODELS = [
'gemini-2.5-pro',
'gpt-4.1',
'kimi-k2-0711-preview',
'kimi-k2-instruct',
]
REASONING_EFFORT_SUPPORTED_MODELS = [
@ -810,6 +811,8 @@ class LLM(RetryMixin, DebugMixin):
message.function_calling_enabled = self.is_function_calling_active()
if 'deepseek' in self.config.model:
message.force_string_serializer = True
if 'kimi-k2-instruct' in self.config.model and 'groq' in self.config.model:
message.force_string_serializer = True
# let pydantic handle the serialization
return [message.model_dump() for message in messages]