fix: Conversation creation accessing secret without unwrapping (#6335)

Co-authored-by: Calvin Smith <calvin@all-hands.dev>
This commit is contained in:
Calvin Smith 2025-01-17 15:16:57 -07:00 committed by GitHub
parent b1fa6301f0
commit f07ec7a09c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 10 additions and 7 deletions

View File

@ -60,12 +60,6 @@ class RemoteRuntime(ActionExecutionClient):
)
self.session.headers.update({'X-API-Key': self.config.sandbox.api_key})
if self.config.workspace_base is not None:
self.log(
'debug',
'Setting workspace_base is not supported in the remote runtime.',
)
self.runtime_builder = RemoteRuntimeBuilder(
self.config.sandbox.remote_runtime_api_url,
self.config.sandbox.api_key,
@ -76,6 +70,12 @@ class RemoteRuntime(ActionExecutionClient):
self.available_hosts: dict[str, int] = {}
self._runtime_initialized: bool = False
if self.config.workspace_base is not None:
self.log(
'debug',
'Setting workspace_base is not supported in the remote runtime.',
)
def log(self, level: str, message: str) -> None:
message = f'[runtime session_id={self.sid} runtime_id={self.runtime_id or "unknown"}] {message}'
getattr(logger, level)(message, stacklevel=2)

View File

@ -51,7 +51,10 @@ async def _create_new_conversation(
session_init_args = {**settings.__dict__, **session_init_args}
# We could use litellm.check_valid_key for a more accurate check,
# but that would run a tiny inference.
if not settings.llm_api_key or settings.llm_api_key.isspace():
if (
not settings.llm_api_key
or settings.llm_api_key.get_secret_value().isspace()
):
logger.warn(f'Missing api key for model {settings.llm_model}')
raise LLMAuthenticationError(
'Error authenticating with the LLM provider. Please check your API key'