mirror of
https://github.com/OpenHands/OpenHands.git
synced 2025-12-26 05:48:36 +08:00
Merge remote-tracking branch 'origin/main' into ALL-2596/org-support
This commit is contained in:
commit
6187b8834d
@ -97,6 +97,9 @@ class GithubUserContext(UserContext):
|
||||
user_secrets = await self.secrets_store.load()
|
||||
return dict(user_secrets.custom_secrets) if user_secrets else {}
|
||||
|
||||
async def get_mcp_api_key(self) -> str | None:
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
async def get_user_proactive_conversation_setting(user_id: str | None) -> bool:
|
||||
"""Get the user's proactive conversation setting.
|
||||
|
||||
280
enterprise/poetry.lock
generated
280
enterprise/poetry.lock
generated
@ -201,14 +201,14 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "anthropic"
|
||||
version = "0.72.0"
|
||||
version = "0.75.0"
|
||||
description = "The official Python library for the anthropic API"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "anthropic-0.72.0-py3-none-any.whl", hash = "sha256:0e9f5a7582f038cab8efbb4c959e49ef654a56bfc7ba2da51b5a7b8a84de2e4d"},
|
||||
{file = "anthropic-0.72.0.tar.gz", hash = "sha256:8971fe76dcffc644f74ac3883069beb1527641115ae0d6eb8fa21c1ce4082f7a"},
|
||||
{file = "anthropic-0.75.0-py3-none-any.whl", hash = "sha256:ea8317271b6c15d80225a9f3c670152746e88805a7a61e14d4a374577164965b"},
|
||||
{file = "anthropic-0.75.0.tar.gz", hash = "sha256:e8607422f4ab616db2ea5baacc215dd5f028da99ce2f022e33c7c535b29f3dfb"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@ -682,37 +682,37 @@ crt = ["awscrt (==0.27.6)"]
|
||||
|
||||
[[package]]
|
||||
name = "browser-use"
|
||||
version = "0.9.5"
|
||||
version = "0.10.1"
|
||||
description = "Make websites accessible for AI agents"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.11"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "browser_use-0.9.5-py3-none-any.whl", hash = "sha256:4a2e92847204d1ded269026a99cb0cc0e60e38bd2751fa3f58aedd78f00b4e67"},
|
||||
{file = "browser_use-0.9.5.tar.gz", hash = "sha256:f8285fe253b149d01769a7084883b4cf4db351e2f38e26302c157bcbf14a703f"},
|
||||
{file = "browser_use-0.10.1-py3-none-any.whl", hash = "sha256:96e603bfc71098175342cdcb0592519e6f244412e740f0254e4389fdd82a977f"},
|
||||
{file = "browser_use-0.10.1.tar.gz", hash = "sha256:5f211ecfdf1f9fd186160f10df70dedd661821231e30f1bce40939787abab223"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
aiohttp = "3.12.15"
|
||||
anthropic = ">=0.68.1,<1.0.0"
|
||||
anthropic = ">=0.72.1,<1.0.0"
|
||||
anyio = ">=4.9.0"
|
||||
authlib = ">=1.6.0"
|
||||
bubus = ">=1.5.6"
|
||||
cdp-use = ">=1.4.0"
|
||||
cdp-use = ">=1.4.4"
|
||||
click = ">=8.1.8"
|
||||
cloudpickle = ">=3.1.1"
|
||||
google-api-core = ">=2.25.0"
|
||||
google-api-python-client = ">=2.174.0"
|
||||
google-auth = ">=2.40.3"
|
||||
google-auth-oauthlib = ">=1.2.2"
|
||||
google-genai = ">=1.29.0,<2.0.0"
|
||||
google-genai = ">=1.50.0,<2.0.0"
|
||||
groq = ">=0.30.0"
|
||||
httpx = ">=0.28.1"
|
||||
inquirerpy = ">=0.3.4"
|
||||
markdownify = ">=1.2.0"
|
||||
mcp = ">=1.10.1"
|
||||
ollama = ">=0.5.1"
|
||||
openai = ">=1.99.2,<2.0.0"
|
||||
openai = ">=2.7.2,<3.0.0"
|
||||
pillow = ">=11.2.1"
|
||||
portalocker = ">=2.7.0,<3.0.0"
|
||||
posthog = ">=3.7.0"
|
||||
@ -721,6 +721,7 @@ pydantic = ">=2.11.5"
|
||||
pyobjc = {version = ">=11.0", markers = "platform_system == \"darwin\""}
|
||||
pyotp = ">=2.9.0"
|
||||
pypdf = ">=5.7.0"
|
||||
python-docx = ">=1.2.0"
|
||||
python-dotenv = ">=1.0.1"
|
||||
reportlab = ">=4.0.0"
|
||||
requests = ">=2.32.3"
|
||||
@ -850,14 +851,14 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "cdp-use"
|
||||
version = "1.4.3"
|
||||
version = "1.4.4"
|
||||
description = "Type safe generator/client library for CDP"
|
||||
optional = false
|
||||
python-versions = ">=3.11"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "cdp_use-1.4.3-py3-none-any.whl", hash = "sha256:c48664604470c2579aa1e677c3e3e7e24c4f300c54804c093d935abb50479ecd"},
|
||||
{file = "cdp_use-1.4.3.tar.gz", hash = "sha256:9029c04bdc49fbd3939d2bf1988ad8d88e260729c7d5e35c2f6c87591f5a10e9"},
|
||||
{file = "cdp_use-1.4.4-py3-none-any.whl", hash = "sha256:e37e80e067db2653d6fdf953d4ff9e5d80d75daa27b7c6d48c0261cccbef73e1"},
|
||||
{file = "cdp_use-1.4.4.tar.gz", hash = "sha256:330a848b517006eb9ad1dc468aa6434d913cf0c6918610760c36c3fdfdba0fab"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@ -2978,28 +2979,29 @@ testing = ["pytest"]
|
||||
|
||||
[[package]]
|
||||
name = "google-genai"
|
||||
version = "1.32.0"
|
||||
version = "1.53.0"
|
||||
description = "GenAI Python SDK"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
python-versions = ">=3.10"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "google_genai-1.32.0-py3-none-any.whl", hash = "sha256:c0c4b1d45adf3aa99501050dd73da2f0dea09374002231052d81a6765d15e7f6"},
|
||||
{file = "google_genai-1.32.0.tar.gz", hash = "sha256:349da3f5ff0e981066bd508585fcdd308d28fc4646f318c8f6d1aa6041f4c7e3"},
|
||||
{file = "google_genai-1.53.0-py3-none-any.whl", hash = "sha256:65a3f99e5c03c372d872cda7419f5940e723374bb12a2f3ffd5e3e56e8eb2094"},
|
||||
{file = "google_genai-1.53.0.tar.gz", hash = "sha256:938a26d22f3fd32c6eeeb4276ef204ef82884e63af9842ce3eac05ceb39cbd8d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
anyio = ">=4.8.0,<5.0.0"
|
||||
google-auth = ">=2.14.1,<3.0.0"
|
||||
google-auth = {version = ">=2.14.1,<3.0.0", extras = ["requests"]}
|
||||
httpx = ">=0.28.1,<1.0.0"
|
||||
pydantic = ">=2.0.0,<3.0.0"
|
||||
pydantic = ">=2.9.0,<3.0.0"
|
||||
requests = ">=2.28.1,<3.0.0"
|
||||
tenacity = ">=8.2.3,<9.2.0"
|
||||
typing-extensions = ">=4.11.0,<5.0.0"
|
||||
websockets = ">=13.0.0,<15.1.0"
|
||||
|
||||
[package.extras]
|
||||
aiohttp = ["aiohttp (<4.0.0)"]
|
||||
aiohttp = ["aiohttp (<3.13.3)"]
|
||||
local-tokenizer = ["protobuf", "sentencepiece (>=0.2.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "google-resumable-media"
|
||||
@ -3055,6 +3057,8 @@ files = [
|
||||
{file = "greenlet-3.2.4-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c2ca18a03a8cfb5b25bc1cbe20f3d9a4c80d8c3b13ba3df49ac3961af0b1018d"},
|
||||
{file = "greenlet-3.2.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9fe0a28a7b952a21e2c062cd5756d34354117796c6d9215a87f55e38d15402c5"},
|
||||
{file = "greenlet-3.2.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8854167e06950ca75b898b104b63cc646573aa5fef1353d4508ecdd1ee76254f"},
|
||||
{file = "greenlet-3.2.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f47617f698838ba98f4ff4189aef02e7343952df3a615f847bb575c3feb177a7"},
|
||||
{file = "greenlet-3.2.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:af41be48a4f60429d5cad9d22175217805098a9ef7c40bfef44f7669fb9d74d8"},
|
||||
{file = "greenlet-3.2.4-cp310-cp310-win_amd64.whl", hash = "sha256:73f49b5368b5359d04e18d15828eecc1806033db5233397748f4ca813ff1056c"},
|
||||
{file = "greenlet-3.2.4-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:96378df1de302bc38e99c3a9aa311967b7dc80ced1dcc6f171e99842987882a2"},
|
||||
{file = "greenlet-3.2.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1ee8fae0519a337f2329cb78bd7a8e128ec0f881073d43f023c7b8d4831d5246"},
|
||||
@ -3064,6 +3068,8 @@ files = [
|
||||
{file = "greenlet-3.2.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2523e5246274f54fdadbce8494458a2ebdcdbc7b802318466ac5606d3cded1f8"},
|
||||
{file = "greenlet-3.2.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1987de92fec508535687fb807a5cea1560f6196285a4cde35c100b8cd632cc52"},
|
||||
{file = "greenlet-3.2.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:55e9c5affaa6775e2c6b67659f3a71684de4c549b3dd9afca3bc773533d284fa"},
|
||||
{file = "greenlet-3.2.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c9c6de1940a7d828635fbd254d69db79e54619f165ee7ce32fda763a9cb6a58c"},
|
||||
{file = "greenlet-3.2.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03c5136e7be905045160b1b9fdca93dd6727b180feeafda6818e6496434ed8c5"},
|
||||
{file = "greenlet-3.2.4-cp311-cp311-win_amd64.whl", hash = "sha256:9c40adce87eaa9ddb593ccb0fa6a07caf34015a29bf8d344811665b573138db9"},
|
||||
{file = "greenlet-3.2.4-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3b67ca49f54cede0186854a008109d6ee71f66bd57bb36abd6d0a0267b540cdd"},
|
||||
{file = "greenlet-3.2.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddf9164e7a5b08e9d22511526865780a576f19ddd00d62f8a665949327fde8bb"},
|
||||
@ -3073,6 +3079,8 @@ files = [
|
||||
{file = "greenlet-3.2.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b3812d8d0c9579967815af437d96623f45c0f2ae5f04e366de62a12d83a8fb0"},
|
||||
{file = "greenlet-3.2.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:abbf57b5a870d30c4675928c37278493044d7c14378350b3aa5d484fa65575f0"},
|
||||
{file = "greenlet-3.2.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:20fb936b4652b6e307b8f347665e2c615540d4b42b3b4c8a321d8286da7e520f"},
|
||||
{file = "greenlet-3.2.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ee7a6ec486883397d70eec05059353b8e83eca9168b9f3f9a361971e77e0bcd0"},
|
||||
{file = "greenlet-3.2.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:326d234cbf337c9c3def0676412eb7040a35a768efc92504b947b3e9cfc7543d"},
|
||||
{file = "greenlet-3.2.4-cp312-cp312-win_amd64.whl", hash = "sha256:a7d4e128405eea3814a12cc2605e0e6aedb4035bf32697f72deca74de4105e02"},
|
||||
{file = "greenlet-3.2.4-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31"},
|
||||
{file = "greenlet-3.2.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945"},
|
||||
@ -3082,6 +3090,8 @@ files = [
|
||||
{file = "greenlet-3.2.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671"},
|
||||
{file = "greenlet-3.2.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b"},
|
||||
{file = "greenlet-3.2.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:d25c5091190f2dc0eaa3f950252122edbbadbb682aa7b1ef2f8af0f8c0afefae"},
|
||||
{file = "greenlet-3.2.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e343822feb58ac4d0a1211bd9399de2b3a04963ddeec21530fc426cc121f19b"},
|
||||
{file = "greenlet-3.2.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ca7f6f1f2649b89ce02f6f229d7c19f680a6238af656f61e0115b24857917929"},
|
||||
{file = "greenlet-3.2.4-cp313-cp313-win_amd64.whl", hash = "sha256:554b03b6e73aaabec3745364d6239e9e012d64c68ccd0b8430c64ccc14939a8b"},
|
||||
{file = "greenlet-3.2.4-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:49a30d5fda2507ae77be16479bdb62a660fa51b1eb4928b524975b3bde77b3c0"},
|
||||
{file = "greenlet-3.2.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:299fd615cd8fc86267b47597123e3f43ad79c9d8a22bebdce535e53550763e2f"},
|
||||
@ -3089,6 +3099,8 @@ files = [
|
||||
{file = "greenlet-3.2.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b4a1870c51720687af7fa3e7cda6d08d801dae660f75a76f3845b642b4da6ee1"},
|
||||
{file = "greenlet-3.2.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:061dc4cf2c34852b052a8620d40f36324554bc192be474b9e9770e8c042fd735"},
|
||||
{file = "greenlet-3.2.4-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:44358b9bf66c8576a9f57a590d5f5d6e72fa4228b763d0e43fee6d3b06d3a337"},
|
||||
{file = "greenlet-3.2.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2917bdf657f5859fbf3386b12d68ede4cf1f04c90c3a6bc1f013dd68a22e2269"},
|
||||
{file = "greenlet-3.2.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:015d48959d4add5d6c9f6c5210ee3803a830dce46356e3bc326d6776bde54681"},
|
||||
{file = "greenlet-3.2.4-cp314-cp314-win_amd64.whl", hash = "sha256:e37ab26028f12dbb0ff65f29a8d3d44a765c61e729647bf2ddfbbed621726f01"},
|
||||
{file = "greenlet-3.2.4-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:b6a7c19cf0d2742d0809a4c05975db036fdff50cd294a93632d6a310bf9ac02c"},
|
||||
{file = "greenlet-3.2.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:27890167f55d2387576d1f41d9487ef171849ea0359ce1510ca6e06c8bece11d"},
|
||||
@ -3098,6 +3110,8 @@ files = [
|
||||
{file = "greenlet-3.2.4-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9913f1a30e4526f432991f89ae263459b1c64d1608c0d22a5c79c287b3c70df"},
|
||||
{file = "greenlet-3.2.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b90654e092f928f110e0007f572007c9727b5265f7632c2fa7415b4689351594"},
|
||||
{file = "greenlet-3.2.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:81701fd84f26330f0d5f4944d4e92e61afe6319dcd9775e39396e39d7c3e5f98"},
|
||||
{file = "greenlet-3.2.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:28a3c6b7cd72a96f61b0e4b2a36f681025b60ae4779cc73c1535eb5f29560b10"},
|
||||
{file = "greenlet-3.2.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:52206cd642670b0b320a1fd1cbfd95bca0e043179c1d8a045f2c6109dfe973be"},
|
||||
{file = "greenlet-3.2.4-cp39-cp39-win32.whl", hash = "sha256:65458b409c1ed459ea899e939f0e1cdb14f58dbc803f2f93c5eab5694d32671b"},
|
||||
{file = "greenlet-3.2.4-cp39-cp39-win_amd64.whl", hash = "sha256:d2e685ade4dafd447ede19c31277a224a239a0a1a4eca4e6390efedf20260cfb"},
|
||||
{file = "greenlet-3.2.4.tar.gz", hash = "sha256:0dca0d95ff849f9a364385f36ab49f50065d76964944638be9691e1832e9f86d"},
|
||||
@ -3166,83 +3180,87 @@ protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4
|
||||
|
||||
[[package]]
|
||||
name = "grpcio"
|
||||
version = "1.74.0"
|
||||
version = "1.67.1"
|
||||
description = "HTTP/2-based RPC framework"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "grpcio-1.74.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:85bd5cdf4ed7b2d6438871adf6afff9af7096486fcf51818a81b77ef4dd30907"},
|
||||
{file = "grpcio-1.74.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:68c8ebcca945efff9d86d8d6d7bfb0841cf0071024417e2d7f45c5e46b5b08eb"},
|
||||
{file = "grpcio-1.74.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:e154d230dc1bbbd78ad2fdc3039fa50ad7ffcf438e4eb2fa30bce223a70c7486"},
|
||||
{file = "grpcio-1.74.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8978003816c7b9eabe217f88c78bc26adc8f9304bf6a594b02e5a49b2ef9c11"},
|
||||
{file = "grpcio-1.74.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3d7bd6e3929fd2ea7fbc3f562e4987229ead70c9ae5f01501a46701e08f1ad9"},
|
||||
{file = "grpcio-1.74.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:136b53c91ac1d02c8c24201bfdeb56f8b3ac3278668cbb8e0ba49c88069e1bdc"},
|
||||
{file = "grpcio-1.74.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fe0f540750a13fd8e5da4b3eaba91a785eea8dca5ccd2bc2ffe978caa403090e"},
|
||||
{file = "grpcio-1.74.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4e4181bfc24413d1e3a37a0b7889bea68d973d4b45dd2bc68bb766c140718f82"},
|
||||
{file = "grpcio-1.74.0-cp310-cp310-win32.whl", hash = "sha256:1733969040989f7acc3d94c22f55b4a9501a30f6aaacdbccfaba0a3ffb255ab7"},
|
||||
{file = "grpcio-1.74.0-cp310-cp310-win_amd64.whl", hash = "sha256:9e912d3c993a29df6c627459af58975b2e5c897d93287939b9d5065f000249b5"},
|
||||
{file = "grpcio-1.74.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:69e1a8180868a2576f02356565f16635b99088da7df3d45aaa7e24e73a054e31"},
|
||||
{file = "grpcio-1.74.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:8efe72fde5500f47aca1ef59495cb59c885afe04ac89dd11d810f2de87d935d4"},
|
||||
{file = "grpcio-1.74.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:a8f0302f9ac4e9923f98d8e243939a6fb627cd048f5cd38595c97e38020dffce"},
|
||||
{file = "grpcio-1.74.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f609a39f62a6f6f05c7512746798282546358a37ea93c1fcbadf8b2fed162e3"},
|
||||
{file = "grpcio-1.74.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c98e0b7434a7fa4e3e63f250456eaef52499fba5ae661c58cc5b5477d11e7182"},
|
||||
{file = "grpcio-1.74.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:662456c4513e298db6d7bd9c3b8df6f75f8752f0ba01fb653e252ed4a59b5a5d"},
|
||||
{file = "grpcio-1.74.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3d14e3c4d65e19d8430a4e28ceb71ace4728776fd6c3ce34016947474479683f"},
|
||||
{file = "grpcio-1.74.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bf949792cee20d2078323a9b02bacbbae002b9e3b9e2433f2741c15bdeba1c4"},
|
||||
{file = "grpcio-1.74.0-cp311-cp311-win32.whl", hash = "sha256:55b453812fa7c7ce2f5c88be3018fb4a490519b6ce80788d5913f3f9d7da8c7b"},
|
||||
{file = "grpcio-1.74.0-cp311-cp311-win_amd64.whl", hash = "sha256:86ad489db097141a907c559988c29718719aa3e13370d40e20506f11b4de0d11"},
|
||||
{file = "grpcio-1.74.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:8533e6e9c5bd630ca98062e3a1326249e6ada07d05acf191a77bc33f8948f3d8"},
|
||||
{file = "grpcio-1.74.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:2918948864fec2a11721d91568effffbe0a02b23ecd57f281391d986847982f6"},
|
||||
{file = "grpcio-1.74.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:60d2d48b0580e70d2e1954d0d19fa3c2e60dd7cbed826aca104fff518310d1c5"},
|
||||
{file = "grpcio-1.74.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3601274bc0523f6dc07666c0e01682c94472402ac2fd1226fd96e079863bfa49"},
|
||||
{file = "grpcio-1.74.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:176d60a5168d7948539def20b2a3adcce67d72454d9ae05969a2e73f3a0feee7"},
|
||||
{file = "grpcio-1.74.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e759f9e8bc908aaae0412642afe5416c9f983a80499448fcc7fab8692ae044c3"},
|
||||
{file = "grpcio-1.74.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9e7c4389771855a92934b2846bd807fc25a3dfa820fd912fe6bd8136026b2707"},
|
||||
{file = "grpcio-1.74.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cce634b10aeab37010449124814b05a62fb5f18928ca878f1bf4750d1f0c815b"},
|
||||
{file = "grpcio-1.74.0-cp312-cp312-win32.whl", hash = "sha256:885912559974df35d92219e2dc98f51a16a48395f37b92865ad45186f294096c"},
|
||||
{file = "grpcio-1.74.0-cp312-cp312-win_amd64.whl", hash = "sha256:42f8fee287427b94be63d916c90399ed310ed10aadbf9e2e5538b3e497d269bc"},
|
||||
{file = "grpcio-1.74.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:2bc2d7d8d184e2362b53905cb1708c84cb16354771c04b490485fa07ce3a1d89"},
|
||||
{file = "grpcio-1.74.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:c14e803037e572c177ba54a3e090d6eb12efd795d49327c5ee2b3bddb836bf01"},
|
||||
{file = "grpcio-1.74.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:f6ec94f0e50eb8fa1744a731088b966427575e40c2944a980049798b127a687e"},
|
||||
{file = "grpcio-1.74.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:566b9395b90cc3d0d0c6404bc8572c7c18786ede549cdb540ae27b58afe0fb91"},
|
||||
{file = "grpcio-1.74.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1ea6176d7dfd5b941ea01c2ec34de9531ba494d541fe2057c904e601879f249"},
|
||||
{file = "grpcio-1.74.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:64229c1e9cea079420527fa8ac45d80fc1e8d3f94deaa35643c381fa8d98f362"},
|
||||
{file = "grpcio-1.74.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:0f87bddd6e27fc776aacf7ebfec367b6d49cad0455123951e4488ea99d9b9b8f"},
|
||||
{file = "grpcio-1.74.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:3b03d8f2a07f0fea8c8f74deb59f8352b770e3900d143b3d1475effcb08eec20"},
|
||||
{file = "grpcio-1.74.0-cp313-cp313-win32.whl", hash = "sha256:b6a73b2ba83e663b2480a90b82fdae6a7aa6427f62bf43b29912c0cfd1aa2bfa"},
|
||||
{file = "grpcio-1.74.0-cp313-cp313-win_amd64.whl", hash = "sha256:fd3c71aeee838299c5887230b8a1822795325ddfea635edd82954c1eaa831e24"},
|
||||
{file = "grpcio-1.74.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:4bc5fca10aaf74779081e16c2bcc3d5ec643ffd528d9e7b1c9039000ead73bae"},
|
||||
{file = "grpcio-1.74.0-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:6bab67d15ad617aff094c382c882e0177637da73cbc5532d52c07b4ee887a87b"},
|
||||
{file = "grpcio-1.74.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:655726919b75ab3c34cdad39da5c530ac6fa32696fb23119e36b64adcfca174a"},
|
||||
{file = "grpcio-1.74.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a2b06afe2e50ebfd46247ac3ba60cac523f54ec7792ae9ba6073c12daf26f0a"},
|
||||
{file = "grpcio-1.74.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f251c355167b2360537cf17bea2cf0197995e551ab9da6a0a59b3da5e8704f9"},
|
||||
{file = "grpcio-1.74.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8f7b5882fb50632ab1e48cb3122d6df55b9afabc265582808036b6e51b9fd6b7"},
|
||||
{file = "grpcio-1.74.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:834988b6c34515545b3edd13e902c1acdd9f2465d386ea5143fb558f153a7176"},
|
||||
{file = "grpcio-1.74.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:22b834cef33429ca6cc28303c9c327ba9a3fafecbf62fae17e9a7b7163cc43ac"},
|
||||
{file = "grpcio-1.74.0-cp39-cp39-win32.whl", hash = "sha256:7d95d71ff35291bab3f1c52f52f474c632db26ea12700c2ff0ea0532cb0b5854"},
|
||||
{file = "grpcio-1.74.0-cp39-cp39-win_amd64.whl", hash = "sha256:ecde9ab49f58433abe02f9ed076c7b5be839cf0153883a6d23995937a82392fa"},
|
||||
{file = "grpcio-1.74.0.tar.gz", hash = "sha256:80d1f4fbb35b0742d3e3d3bb654b7381cd5f015f8497279a1e9c21ba623e01b1"},
|
||||
{file = "grpcio-1.67.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:8b0341d66a57f8a3119b77ab32207072be60c9bf79760fa609c5609f2deb1f3f"},
|
||||
{file = "grpcio-1.67.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:f5a27dddefe0e2357d3e617b9079b4bfdc91341a91565111a21ed6ebbc51b22d"},
|
||||
{file = "grpcio-1.67.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:43112046864317498a33bdc4797ae6a268c36345a910de9b9c17159d8346602f"},
|
||||
{file = "grpcio-1.67.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9b929f13677b10f63124c1a410994a401cdd85214ad83ab67cc077fc7e480f0"},
|
||||
{file = "grpcio-1.67.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7d1797a8a3845437d327145959a2c0c47c05947c9eef5ff1a4c80e499dcc6fa"},
|
||||
{file = "grpcio-1.67.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0489063974d1452436139501bf6b180f63d4977223ee87488fe36858c5725292"},
|
||||
{file = "grpcio-1.67.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9fd042de4a82e3e7aca44008ee2fb5da01b3e5adb316348c21980f7f58adc311"},
|
||||
{file = "grpcio-1.67.1-cp310-cp310-win32.whl", hash = "sha256:638354e698fd0c6c76b04540a850bf1db27b4d2515a19fcd5cf645c48d3eb1ed"},
|
||||
{file = "grpcio-1.67.1-cp310-cp310-win_amd64.whl", hash = "sha256:608d87d1bdabf9e2868b12338cd38a79969eaf920c89d698ead08f48de9c0f9e"},
|
||||
{file = "grpcio-1.67.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:7818c0454027ae3384235a65210bbf5464bd715450e30a3d40385453a85a70cb"},
|
||||
{file = "grpcio-1.67.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ea33986b70f83844cd00814cee4451055cd8cab36f00ac64a31f5bb09b31919e"},
|
||||
{file = "grpcio-1.67.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:c7a01337407dd89005527623a4a72c5c8e2894d22bead0895306b23c6695698f"},
|
||||
{file = "grpcio-1.67.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80b866f73224b0634f4312a4674c1be21b2b4afa73cb20953cbbb73a6b36c3cc"},
|
||||
{file = "grpcio-1.67.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9fff78ba10d4250bfc07a01bd6254a6d87dc67f9627adece85c0b2ed754fa96"},
|
||||
{file = "grpcio-1.67.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8a23cbcc5bb11ea7dc6163078be36c065db68d915c24f5faa4f872c573bb400f"},
|
||||
{file = "grpcio-1.67.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1a65b503d008f066e994f34f456e0647e5ceb34cfcec5ad180b1b44020ad4970"},
|
||||
{file = "grpcio-1.67.1-cp311-cp311-win32.whl", hash = "sha256:e29ca27bec8e163dca0c98084040edec3bc49afd10f18b412f483cc68c712744"},
|
||||
{file = "grpcio-1.67.1-cp311-cp311-win_amd64.whl", hash = "sha256:786a5b18544622bfb1e25cc08402bd44ea83edfb04b93798d85dca4d1a0b5be5"},
|
||||
{file = "grpcio-1.67.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:267d1745894200e4c604958da5f856da6293f063327cb049a51fe67348e4f953"},
|
||||
{file = "grpcio-1.67.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:85f69fdc1d28ce7cff8de3f9c67db2b0ca9ba4449644488c1e0303c146135ddb"},
|
||||
{file = "grpcio-1.67.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:f26b0b547eb8d00e195274cdfc63ce64c8fc2d3e2d00b12bf468ece41a0423a0"},
|
||||
{file = "grpcio-1.67.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4422581cdc628f77302270ff839a44f4c24fdc57887dc2a45b7e53d8fc2376af"},
|
||||
{file = "grpcio-1.67.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d7616d2ded471231c701489190379e0c311ee0a6c756f3c03e6a62b95a7146e"},
|
||||
{file = "grpcio-1.67.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8a00efecde9d6fcc3ab00c13f816313c040a28450e5e25739c24f432fc6d3c75"},
|
||||
{file = "grpcio-1.67.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:699e964923b70f3101393710793289e42845791ea07565654ada0969522d0a38"},
|
||||
{file = "grpcio-1.67.1-cp312-cp312-win32.whl", hash = "sha256:4e7b904484a634a0fff132958dabdb10d63e0927398273917da3ee103e8d1f78"},
|
||||
{file = "grpcio-1.67.1-cp312-cp312-win_amd64.whl", hash = "sha256:5721e66a594a6c4204458004852719b38f3d5522082be9061d6510b455c90afc"},
|
||||
{file = "grpcio-1.67.1-cp313-cp313-linux_armv7l.whl", hash = "sha256:aa0162e56fd10a5547fac8774c4899fc3e18c1aa4a4759d0ce2cd00d3696ea6b"},
|
||||
{file = "grpcio-1.67.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:beee96c8c0b1a75d556fe57b92b58b4347c77a65781ee2ac749d550f2a365dc1"},
|
||||
{file = "grpcio-1.67.1-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:a93deda571a1bf94ec1f6fcda2872dad3ae538700d94dc283c672a3b508ba3af"},
|
||||
{file = "grpcio-1.67.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e6f255980afef598a9e64a24efce87b625e3e3c80a45162d111a461a9f92955"},
|
||||
{file = "grpcio-1.67.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e838cad2176ebd5d4a8bb03955138d6589ce9e2ce5d51c3ada34396dbd2dba8"},
|
||||
{file = "grpcio-1.67.1-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:a6703916c43b1d468d0756c8077b12017a9fcb6a1ef13faf49e67d20d7ebda62"},
|
||||
{file = "grpcio-1.67.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:917e8d8994eed1d86b907ba2a61b9f0aef27a2155bca6cbb322430fc7135b7bb"},
|
||||
{file = "grpcio-1.67.1-cp313-cp313-win32.whl", hash = "sha256:e279330bef1744040db8fc432becc8a727b84f456ab62b744d3fdb83f327e121"},
|
||||
{file = "grpcio-1.67.1-cp313-cp313-win_amd64.whl", hash = "sha256:fa0c739ad8b1996bd24823950e3cb5152ae91fca1c09cc791190bf1627ffefba"},
|
||||
{file = "grpcio-1.67.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:178f5db771c4f9a9facb2ab37a434c46cb9be1a75e820f187ee3d1e7805c4f65"},
|
||||
{file = "grpcio-1.67.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0f3e49c738396e93b7ba9016e153eb09e0778e776df6090c1b8c91877cc1c426"},
|
||||
{file = "grpcio-1.67.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:24e8a26dbfc5274d7474c27759b54486b8de23c709d76695237515bc8b5baeab"},
|
||||
{file = "grpcio-1.67.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b6c16489326d79ead41689c4b84bc40d522c9a7617219f4ad94bc7f448c5085"},
|
||||
{file = "grpcio-1.67.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60e6a4dcf5af7bbc36fd9f81c9f372e8ae580870a9e4b6eafe948cd334b81cf3"},
|
||||
{file = "grpcio-1.67.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:95b5f2b857856ed78d72da93cd7d09b6db8ef30102e5e7fe0961fe4d9f7d48e8"},
|
||||
{file = "grpcio-1.67.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b49359977c6ec9f5d0573ea4e0071ad278ef905aa74e420acc73fd28ce39e9ce"},
|
||||
{file = "grpcio-1.67.1-cp38-cp38-win32.whl", hash = "sha256:f5b76ff64aaac53fede0cc93abf57894ab2a7362986ba22243d06218b93efe46"},
|
||||
{file = "grpcio-1.67.1-cp38-cp38-win_amd64.whl", hash = "sha256:804c6457c3cd3ec04fe6006c739579b8d35c86ae3298ffca8de57b493524b771"},
|
||||
{file = "grpcio-1.67.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:a25bdea92b13ff4d7790962190bf6bf5c4639876e01c0f3dda70fc2769616335"},
|
||||
{file = "grpcio-1.67.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cdc491ae35a13535fd9196acb5afe1af37c8237df2e54427be3eecda3653127e"},
|
||||
{file = "grpcio-1.67.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:85f862069b86a305497e74d0dc43c02de3d1d184fc2c180993aa8aa86fbd19b8"},
|
||||
{file = "grpcio-1.67.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec74ef02010186185de82cc594058a3ccd8d86821842bbac9873fd4a2cf8be8d"},
|
||||
{file = "grpcio-1.67.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01f616a964e540638af5130469451cf580ba8c7329f45ca998ab66e0c7dcdb04"},
|
||||
{file = "grpcio-1.67.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:299b3d8c4f790c6bcca485f9963b4846dd92cf6f1b65d3697145d005c80f9fe8"},
|
||||
{file = "grpcio-1.67.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:60336bff760fbb47d7e86165408126f1dded184448e9a4c892189eb7c9d3f90f"},
|
||||
{file = "grpcio-1.67.1-cp39-cp39-win32.whl", hash = "sha256:5ed601c4c6008429e3d247ddb367fe8c7259c355757448d7c1ef7bd4a6739e8e"},
|
||||
{file = "grpcio-1.67.1-cp39-cp39-win_amd64.whl", hash = "sha256:5db70d32d6703b89912af16d6d45d78406374a8b8ef0d28140351dd0ec610e98"},
|
||||
{file = "grpcio-1.67.1.tar.gz", hash = "sha256:3dc2ed4cabea4dc14d5e708c2b426205956077cc5de419b4d4079315017e9732"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
protobuf = ["grpcio-tools (>=1.74.0)"]
|
||||
protobuf = ["grpcio-tools (>=1.67.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "grpcio-status"
|
||||
version = "1.71.2"
|
||||
version = "1.67.1"
|
||||
description = "Status proto mapping for gRPC"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "grpcio_status-1.71.2-py3-none-any.whl", hash = "sha256:803c98cb6a8b7dc6dbb785b1111aed739f241ab5e9da0bba96888aa74704cfd3"},
|
||||
{file = "grpcio_status-1.71.2.tar.gz", hash = "sha256:c7a97e176df71cdc2c179cd1847d7fc86cca5832ad12e9798d7fed6b7a1aab50"},
|
||||
{file = "grpcio_status-1.67.1-py3-none-any.whl", hash = "sha256:16e6c085950bdacac97c779e6a502ea671232385e6e37f258884d6883392c2bd"},
|
||||
{file = "grpcio_status-1.67.1.tar.gz", hash = "sha256:2bf38395e028ceeecfd8866b081f61628114b384da7d51ae064ddc8d766a5d11"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
googleapis-common-protos = ">=1.5.5"
|
||||
grpcio = ">=1.71.2"
|
||||
grpcio = ">=1.67.1"
|
||||
protobuf = ">=5.26.1,<6.0dev"
|
||||
|
||||
[[package]]
|
||||
@ -4540,42 +4558,39 @@ valkey = ["valkey (>=6)"]
|
||||
|
||||
[[package]]
|
||||
name = "litellm"
|
||||
version = "1.77.7"
|
||||
version = "1.80.7"
|
||||
description = "Library to easily interface with LLM API providers"
|
||||
optional = false
|
||||
python-versions = ">=3.8.1,<4.0, !=3.9.7"
|
||||
python-versions = "<4.0,>=3.9"
|
||||
groups = ["main"]
|
||||
files = []
|
||||
develop = false
|
||||
files = [
|
||||
{file = "litellm-1.80.7-py3-none-any.whl", hash = "sha256:f7d993f78c1e0e4e1202b2a925cc6540b55b6e5fb055dd342d88b145ab3102ed"},
|
||||
{file = "litellm-1.80.7.tar.gz", hash = "sha256:3977a8d195aef842d01c18bf9e22984829363c6a4b54daf9a43c9dd9f190b42c"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
aiohttp = ">=3.10"
|
||||
click = "*"
|
||||
fastuuid = ">=0.13.0"
|
||||
grpcio = ">=1.62.3,<1.68.0"
|
||||
httpx = ">=0.23.0"
|
||||
importlib-metadata = ">=6.8.0"
|
||||
jinja2 = "^3.1.2"
|
||||
jsonschema = "^4.22.0"
|
||||
openai = ">=1.99.5"
|
||||
pydantic = "^2.5.0"
|
||||
jinja2 = ">=3.1.2,<4.0.0"
|
||||
jsonschema = ">=4.22.0,<5.0.0"
|
||||
openai = ">=2.8.0"
|
||||
pydantic = ">=2.5.0,<3.0.0"
|
||||
python-dotenv = ">=0.2.0"
|
||||
tiktoken = ">=0.7.0"
|
||||
tokenizers = "*"
|
||||
|
||||
[package.extras]
|
||||
caching = ["diskcache (>=5.6.1,<6.0.0)"]
|
||||
extra-proxy = ["azure-identity (>=1.15.0,<2.0.0)", "azure-keyvault-secrets (>=4.8.0,<5.0.0)", "google-cloud-iam (>=2.19.1,<3.0.0)", "google-cloud-kms (>=2.21.3,<3.0.0)", "prisma (==0.11.0)", "redisvl (>=0.4.1,<0.5.0) ; python_version >= \"3.9\" and python_version < \"3.14\"", "resend (>=0.8.0,<0.9.0)"]
|
||||
extra-proxy = ["azure-identity (>=1.15.0,<2.0.0) ; python_version >= \"3.9\"", "azure-keyvault-secrets (>=4.8.0,<5.0.0)", "google-cloud-iam (>=2.19.1,<3.0.0)", "google-cloud-kms (>=2.21.3,<3.0.0)", "prisma (==0.11.0)", "redisvl (>=0.4.1,<0.5.0) ; python_version >= \"3.9\" and python_version < \"3.14\"", "resend (>=0.8.0)"]
|
||||
mlflow = ["mlflow (>3.1.4) ; python_version >= \"3.10\""]
|
||||
proxy = ["PyJWT (>=2.8.0,<3.0.0)", "apscheduler (>=3.10.4,<4.0.0)", "azure-identity (>=1.15.0,<2.0.0)", "azure-storage-blob (>=12.25.1,<13.0.0)", "backoff", "boto3 (==1.36.0)", "cryptography", "fastapi (>=0.115.5,<0.116.0)", "fastapi-sso (>=0.16.0,<0.17.0)", "gunicorn (>=23.0.0,<24.0.0)", "litellm-enterprise (==0.1.20)", "litellm-proxy-extras (==0.2.25)", "mcp (>=1.10.0,<2.0.0) ; python_version >= \"3.10\"", "orjson (>=3.9.7,<4.0.0)", "polars (>=1.31.0,<2.0.0) ; python_version >= \"3.10\"", "pynacl (>=1.5.0,<2.0.0)", "python-multipart (>=0.0.18,<0.0.19)", "pyyaml (>=6.0.1,<7.0.0)", "rich (==13.7.1)", "rq", "uvicorn (>=0.29.0,<0.30.0)", "uvloop (>=0.21.0,<0.22.0) ; sys_platform != \"win32\"", "websockets (>=13.1.0,<14.0.0)"]
|
||||
semantic-router = ["semantic-router ; python_version >= \"3.9\""]
|
||||
proxy = ["PyJWT (>=2.10.1,<3.0.0) ; python_version >= \"3.9\"", "apscheduler (>=3.10.4,<4.0.0)", "azure-identity (>=1.15.0,<2.0.0) ; python_version >= \"3.9\"", "azure-storage-blob (>=12.25.1,<13.0.0)", "backoff", "boto3 (==1.36.0)", "cryptography", "fastapi (>=0.120.1)", "fastapi-sso (>=0.16.0,<0.17.0)", "gunicorn (>=23.0.0,<24.0.0)", "litellm-enterprise (==0.1.22)", "litellm-proxy-extras (==0.4.9)", "mcp (>=1.21.2,<2.0.0) ; python_version >= \"3.10\"", "orjson (>=3.9.7,<4.0.0)", "polars (>=1.31.0,<2.0.0) ; python_version >= \"3.10\"", "pynacl (>=1.5.0,<2.0.0)", "python-multipart (>=0.0.18,<0.0.19)", "pyyaml (>=6.0.1,<7.0.0)", "rich (==13.7.1)", "rq", "soundfile (>=0.12.1,<0.13.0)", "uvicorn (>=0.31.1,<0.32.0)", "uvloop (>=0.21.0,<0.22.0) ; sys_platform != \"win32\"", "websockets (>=15.0.1,<16.0.0)"]
|
||||
semantic-router = ["semantic-router (>=0.1.12) ; python_version >= \"3.9\" and python_version < \"3.14\""]
|
||||
utils = ["numpydoc"]
|
||||
|
||||
[package.source]
|
||||
type = "git"
|
||||
url = "https://github.com/BerriAI/litellm.git"
|
||||
reference = "v1.77.7.dev9"
|
||||
resolved_reference = "763d2f8ccdd8412dbe6d4ac0e136d9ac34dcd4c0"
|
||||
|
||||
[[package]]
|
||||
name = "llvmlite"
|
||||
version = "0.44.0"
|
||||
@ -5644,28 +5659,28 @@ pydantic = ">=2.9"
|
||||
|
||||
[[package]]
|
||||
name = "openai"
|
||||
version = "1.99.9"
|
||||
version = "2.8.0"
|
||||
description = "The official Python library for the openai API"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main", "test"]
|
||||
files = [
|
||||
{file = "openai-1.99.9-py3-none-any.whl", hash = "sha256:9dbcdb425553bae1ac5d947147bebbd630d91bbfc7788394d4c4f3a35682ab3a"},
|
||||
{file = "openai-1.99.9.tar.gz", hash = "sha256:f2082d155b1ad22e83247c3de3958eb4255b20ccf4a1de2e6681b6957b554e92"},
|
||||
{file = "openai-2.8.0-py3-none-any.whl", hash = "sha256:ba975e347f6add2fe13529ccb94d54a578280e960765e5224c34b08d7e029ddf"},
|
||||
{file = "openai-2.8.0.tar.gz", hash = "sha256:4851908f6d6fcacbd47ba659c5ac084f7725b752b6bfa1e948b6fbfc111a6bad"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
anyio = ">=3.5.0,<5"
|
||||
distro = ">=1.7.0,<2"
|
||||
httpx = ">=0.23.0,<1"
|
||||
jiter = ">=0.4.0,<1"
|
||||
jiter = ">=0.10.0,<1"
|
||||
pydantic = ">=1.9.0,<3"
|
||||
sniffio = "*"
|
||||
tqdm = ">4"
|
||||
typing-extensions = ">=4.11,<5"
|
||||
|
||||
[package.extras]
|
||||
aiohttp = ["aiohttp", "httpx-aiohttp (>=0.1.8)"]
|
||||
aiohttp = ["aiohttp", "httpx-aiohttp (>=0.1.9)"]
|
||||
datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"]
|
||||
realtime = ["websockets (>=13,<16)"]
|
||||
voice-helpers = ["numpy (>=2.0.2)", "sounddevice (>=0.5.1)"]
|
||||
@ -5820,14 +5835,14 @@ llama = ["llama-index (>=0.12.29,<0.13.0)", "llama-index-core (>=0.12.29,<0.13.0
|
||||
|
||||
[[package]]
|
||||
name = "openhands-agent-server"
|
||||
version = "1.3.0"
|
||||
version = "1.4.1"
|
||||
description = "OpenHands Agent Server - REST/WebSocket interface for OpenHands AI Agent"
|
||||
optional = false
|
||||
python-versions = ">=3.12"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "openhands_agent_server-1.3.0-py3-none-any.whl", hash = "sha256:2f87f790c740dc3fb81821c5f9fa375af875fbb937ebca3baa6dc5c035035b3c"},
|
||||
{file = "openhands_agent_server-1.3.0.tar.gz", hash = "sha256:0a83ae77373f5c41d0ba0e22d8f0f6144d54d55784183a50b7c098c96cd5135c"},
|
||||
{file = "openhands_agent_server-1.4.1-py3-none-any.whl", hash = "sha256:1e621d15215a48e2398e23c58a791347f06c215c2344053aeb26b562c34a44ee"},
|
||||
{file = "openhands_agent_server-1.4.1.tar.gz", hash = "sha256:03010a5c8d63bbd5b088458eb75308ef16559018140d75a3644ae5bbc3531bbf"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@ -5835,6 +5850,7 @@ aiosqlite = ">=0.19"
|
||||
alembic = ">=1.13"
|
||||
docker = ">=7.1,<8"
|
||||
fastapi = ">=0.104"
|
||||
openhands-sdk = "*"
|
||||
pydantic = ">=2"
|
||||
sqlalchemy = ">=2"
|
||||
uvicorn = ">=0.31.1"
|
||||
@ -5843,7 +5859,7 @@ wsproto = ">=1.2.0"
|
||||
|
||||
[[package]]
|
||||
name = "openhands-ai"
|
||||
version = "0.62.0"
|
||||
version = "0.0.0-post.5625+0a98f165e"
|
||||
description = "OpenHands: Code Less, Make More"
|
||||
optional = false
|
||||
python-versions = "^3.12,<3.14"
|
||||
@ -5879,15 +5895,15 @@ json-repair = "*"
|
||||
jupyter_kernel_gateway = "*"
|
||||
kubernetes = "^33.1.0"
|
||||
libtmux = ">=0.46.2"
|
||||
litellm = ">=1.74.3, <1.78.0, !=1.64.4, !=1.67.*"
|
||||
litellm = ">=1.74.3, <=1.80.7, !=1.64.4, !=1.67.*"
|
||||
lmnr = "^0.7.20"
|
||||
memory-profiler = "^0.61.0"
|
||||
numpy = "*"
|
||||
openai = "1.99.9"
|
||||
openai = "2.8.0"
|
||||
openhands-aci = "0.3.2"
|
||||
openhands-agent-server = "1.3.0"
|
||||
openhands-sdk = "1.3.0"
|
||||
openhands-tools = "1.3.0"
|
||||
openhands-agent-server = "1.4.1"
|
||||
openhands-sdk = "1.4.1"
|
||||
openhands-tools = "1.4.1"
|
||||
opentelemetry-api = "^1.33.1"
|
||||
opentelemetry-exporter-otlp-proto-grpc = "^1.33.1"
|
||||
pathspec = "^0.12.1"
|
||||
@ -5943,21 +5959,21 @@ url = ".."
|
||||
|
||||
[[package]]
|
||||
name = "openhands-sdk"
|
||||
version = "1.3.0"
|
||||
version = "1.4.1"
|
||||
description = "OpenHands SDK - Core functionality for building AI agents"
|
||||
optional = false
|
||||
python-versions = ">=3.12"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "openhands_sdk-1.3.0-py3-none-any.whl", hash = "sha256:feee838346f8e60ea3e4d3391de7cb854314eb8b3c9e3dbbb56f98a784aadc56"},
|
||||
{file = "openhands_sdk-1.3.0.tar.gz", hash = "sha256:2d060803a78de462121b56dea717a66356922deb02276f37b29fae8af66343fb"},
|
||||
{file = "openhands_sdk-1.4.1-py3-none-any.whl", hash = "sha256:70e453eab7f9ab6b705198c2615fdd844b21e14b29d78afaf62724f4a440bcdc"},
|
||||
{file = "openhands_sdk-1.4.1.tar.gz", hash = "sha256:37365de25ed57cf8cc2a8003ab4d7a1fe2a40b49c8e8da84a3f1ea2b522eddf2"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
deprecation = ">=2.1.0"
|
||||
fastmcp = ">=2.11.3"
|
||||
httpx = ">=0.27.0"
|
||||
litellm = ">=1.77.7.dev9"
|
||||
litellm = ">=1.80.7"
|
||||
lmnr = ">=0.7.20"
|
||||
pydantic = ">=2.11.7"
|
||||
python-frontmatter = ">=1.1.0"
|
||||
@ -5970,14 +5986,14 @@ boto3 = ["boto3 (>=1.35.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "openhands-tools"
|
||||
version = "1.3.0"
|
||||
version = "1.4.1"
|
||||
description = "OpenHands Tools - Runtime tools for AI agents"
|
||||
optional = false
|
||||
python-versions = ">=3.12"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "openhands_tools-1.3.0-py3-none-any.whl", hash = "sha256:f31056d87c3058ac92709f9161c7c602daeee3ed0cb4439097b43cda105ed03e"},
|
||||
{file = "openhands_tools-1.3.0.tar.gz", hash = "sha256:3da46f09e28593677d3e17252ce18584fcc13caab1a73213e66bd7edca2cebe0"},
|
||||
{file = "openhands_tools-1.4.1-py3-none-any.whl", hash = "sha256:8f40189a08bf80eb4a33219ee9ccc528f9c6c4f2d5c9ab807b06c3f3fe21a612"},
|
||||
{file = "openhands_tools-1.4.1.tar.gz", hash = "sha256:4c0caf87f520a207d9035191c77b7b5c53eeec996350a24ffaf7f740a6566b22"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@ -5989,6 +6005,7 @@ func-timeout = ">=4.3.5"
|
||||
libtmux = ">=0.46.2"
|
||||
openhands-sdk = "*"
|
||||
pydantic = ">=2.11.7"
|
||||
tom-swe = ">=1.0.3"
|
||||
|
||||
[[package]]
|
||||
name = "openpyxl"
|
||||
@ -13305,6 +13322,31 @@ dev = ["tokenizers[testing]"]
|
||||
docs = ["setuptools-rust", "sphinx", "sphinx-rtd-theme"]
|
||||
testing = ["black (==22.3)", "datasets", "numpy", "pytest", "pytest-asyncio", "requests", "ruff"]
|
||||
|
||||
[[package]]
|
||||
name = "tom-swe"
|
||||
version = "1.0.3"
|
||||
description = "Theory of Mind modeling for Software Engineering assistants"
|
||||
optional = false
|
||||
python-versions = ">=3.10"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "tom_swe-1.0.3-py3-none-any.whl", hash = "sha256:7b1172b29eb5c8fb7f1975016e7b6a238511b9ac2a7a980bd400dcb4e29773f2"},
|
||||
{file = "tom_swe-1.0.3.tar.gz", hash = "sha256:57c97d0104e563f15bd39edaf2aa6ac4c3e9444afd437fb92458700d22c6c0f5"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
jinja2 = ">=3.0.0"
|
||||
json-repair = ">=0.1.0"
|
||||
litellm = ">=1.0.0"
|
||||
pydantic = ">=2.0.0"
|
||||
python-dotenv = ">=1.0.0"
|
||||
tiktoken = ">=0.8.0"
|
||||
tqdm = ">=4.65.0"
|
||||
|
||||
[package.extras]
|
||||
dev = ["aiofiles (>=23.0.0)", "black (>=22.0.0)", "datasets (>=2.0.0)", "fastapi (>=0.104.0)", "httpx (>=0.25.0)", "huggingface-hub (>=0.0.0)", "isort (>=5.0.0)", "mypy (>=1.0.0)", "numpy (>=1.24.0)", "pandas (>=2.0.0)", "pre-commit (>=3.6.0)", "pytest (>=7.0.0)", "pytest-cov (>=6.2.1)", "rich (>=13.0.0)", "ruff (>=0.3.0)", "typing-extensions (>=4.0.0)", "uvicorn (>=0.24.0)"]
|
||||
search = ["bm25s (>=0.2.0)", "pystemmer (>=2.2.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "toml"
|
||||
version = "0.10.2"
|
||||
|
||||
@ -203,6 +203,15 @@ class SaasUserAuth(UserAuth):
|
||||
self.settings_store = settings_store
|
||||
return settings_store
|
||||
|
||||
async def get_mcp_api_key(self) -> str:
|
||||
api_key_store = ApiKeyStore.get_instance()
|
||||
mcp_api_key = api_key_store.retrieve_mcp_api_key(self.user_id)
|
||||
if not mcp_api_key:
|
||||
mcp_api_key = api_key_store.create_api_key(
|
||||
self.user_id, 'MCP_API_KEY', None
|
||||
)
|
||||
return mcp_api_key
|
||||
|
||||
@classmethod
|
||||
async def get_instance(cls, request: Request) -> UserAuth:
|
||||
logger.debug('saas_user_auth_get_instance')
|
||||
@ -243,7 +252,12 @@ def get_api_key_from_header(request: Request):
|
||||
# This is a temp hack
|
||||
# Streamable HTTP MCP Client works via redirect requests, but drops the Authorization header for reason
|
||||
# We include `X-Session-API-Key` header by default due to nested runtimes, so it used as a drop in replacement here
|
||||
return request.headers.get('X-Session-API-Key')
|
||||
session_api_key = request.headers.get('X-Session-API-Key')
|
||||
if session_api_key:
|
||||
return session_api_key
|
||||
|
||||
# Fallback to X-Access-Token header as an additional option
|
||||
return request.headers.get('X-Access-Token')
|
||||
|
||||
|
||||
async def saas_user_auth_from_bearer(request: Request) -> SaasUserAuth | None:
|
||||
|
||||
@ -70,6 +70,11 @@ RUNTIME_CONVERSATION_URL = RUNTIME_URL_PATTERN + (
|
||||
else '/api/conversations/{conversation_id}'
|
||||
)
|
||||
|
||||
RUNTIME_USERNAME = os.getenv('RUNTIME_USERNAME')
|
||||
SU_TO_USER = os.getenv('SU_TO_USER', 'false')
|
||||
truthy = {'1', 'true', 't', 'yes', 'y', 'on'}
|
||||
SU_TO_USER = str(SU_TO_USER.lower() in truthy).lower()
|
||||
|
||||
# Time in seconds before a Redis entry is considered expired if not refreshed
|
||||
_REDIS_ENTRY_TIMEOUT_SECONDS = 300
|
||||
|
||||
@ -772,7 +777,11 @@ class SaasNestedConversationManager(ConversationManager):
|
||||
env_vars['SERVE_FRONTEND'] = '0'
|
||||
env_vars['RUNTIME'] = 'local'
|
||||
# TODO: In the long term we may come up with a more secure strategy for user management within the nested runtime.
|
||||
env_vars['USER'] = 'openhands' if config.run_as_openhands else 'root'
|
||||
env_vars['USER'] = (
|
||||
RUNTIME_USERNAME
|
||||
if RUNTIME_USERNAME
|
||||
else ('openhands' if config.run_as_openhands else 'root')
|
||||
)
|
||||
env_vars['PERMITTED_CORS_ORIGINS'] = ','.join(PERMITTED_CORS_ORIGINS)
|
||||
env_vars['port'] = '60000'
|
||||
# TODO: These values are static in the runtime-api project, but do not get copied into the runtime ENV
|
||||
@ -789,6 +798,7 @@ class SaasNestedConversationManager(ConversationManager):
|
||||
env_vars['INITIAL_NUM_WARM_SERVERS'] = '1'
|
||||
env_vars['INIT_GIT_IN_EMPTY_WORKSPACE'] = '1'
|
||||
env_vars['ENABLE_V1'] = '0'
|
||||
env_vars['SU_TO_USER'] = SU_TO_USER
|
||||
|
||||
# We need this for LLM traces tracking to identify the source of the LLM calls
|
||||
env_vars['WEB_HOST'] = WEB_HOST
|
||||
|
||||
@ -535,3 +535,115 @@ def test_get_api_key_from_header_with_invalid_authorization_format():
|
||||
|
||||
# Assert that None was returned
|
||||
assert api_key is None
|
||||
|
||||
|
||||
def test_get_api_key_from_header_with_x_access_token():
|
||||
"""Test that get_api_key_from_header extracts API key from X-Access-Token header."""
|
||||
# Create a mock request with X-Access-Token header
|
||||
mock_request = MagicMock(spec=Request)
|
||||
mock_request.headers = {'X-Access-Token': 'access_token_key'}
|
||||
|
||||
# Call the function
|
||||
api_key = get_api_key_from_header(mock_request)
|
||||
|
||||
# Assert that the API key was correctly extracted
|
||||
assert api_key == 'access_token_key'
|
||||
|
||||
|
||||
def test_get_api_key_from_header_priority_authorization_over_x_access_token():
|
||||
"""Test that Authorization header takes priority over X-Access-Token header."""
|
||||
# Create a mock request with both headers
|
||||
mock_request = MagicMock(spec=Request)
|
||||
mock_request.headers = {
|
||||
'Authorization': 'Bearer auth_api_key',
|
||||
'X-Access-Token': 'access_token_key',
|
||||
}
|
||||
|
||||
# Call the function
|
||||
api_key = get_api_key_from_header(mock_request)
|
||||
|
||||
# Assert that the API key from Authorization header was used
|
||||
assert api_key == 'auth_api_key'
|
||||
|
||||
|
||||
def test_get_api_key_from_header_priority_x_session_over_x_access_token():
|
||||
"""Test that X-Session-API-Key header takes priority over X-Access-Token header."""
|
||||
# Create a mock request with both headers
|
||||
mock_request = MagicMock(spec=Request)
|
||||
mock_request.headers = {
|
||||
'X-Session-API-Key': 'session_api_key',
|
||||
'X-Access-Token': 'access_token_key',
|
||||
}
|
||||
|
||||
# Call the function
|
||||
api_key = get_api_key_from_header(mock_request)
|
||||
|
||||
# Assert that the API key from X-Session-API-Key header was used
|
||||
assert api_key == 'session_api_key'
|
||||
|
||||
|
||||
def test_get_api_key_from_header_all_three_headers():
|
||||
"""Test header priority when all three headers are present."""
|
||||
# Create a mock request with all three headers
|
||||
mock_request = MagicMock(spec=Request)
|
||||
mock_request.headers = {
|
||||
'Authorization': 'Bearer auth_api_key',
|
||||
'X-Session-API-Key': 'session_api_key',
|
||||
'X-Access-Token': 'access_token_key',
|
||||
}
|
||||
|
||||
# Call the function
|
||||
api_key = get_api_key_from_header(mock_request)
|
||||
|
||||
# Assert that the API key from Authorization header was used (highest priority)
|
||||
assert api_key == 'auth_api_key'
|
||||
|
||||
|
||||
def test_get_api_key_from_header_invalid_authorization_fallback_to_x_access_token():
|
||||
"""Test that invalid Authorization header falls back to X-Access-Token."""
|
||||
# Create a mock request with invalid Authorization header and X-Access-Token
|
||||
mock_request = MagicMock(spec=Request)
|
||||
mock_request.headers = {
|
||||
'Authorization': 'InvalidFormat api_key',
|
||||
'X-Access-Token': 'access_token_key',
|
||||
}
|
||||
|
||||
# Call the function
|
||||
api_key = get_api_key_from_header(mock_request)
|
||||
|
||||
# Assert that the API key from X-Access-Token header was used
|
||||
assert api_key == 'access_token_key'
|
||||
|
||||
|
||||
def test_get_api_key_from_header_empty_headers():
|
||||
"""Test that empty header values are handled correctly."""
|
||||
# Create a mock request with empty header values
|
||||
mock_request = MagicMock(spec=Request)
|
||||
mock_request.headers = {
|
||||
'Authorization': '',
|
||||
'X-Session-API-Key': '',
|
||||
'X-Access-Token': 'access_token_key',
|
||||
}
|
||||
|
||||
# Call the function
|
||||
api_key = get_api_key_from_header(mock_request)
|
||||
|
||||
# Assert that the API key from X-Access-Token header was used
|
||||
assert api_key == 'access_token_key'
|
||||
|
||||
|
||||
def test_get_api_key_from_header_bearer_with_empty_token():
|
||||
"""Test that Bearer header with empty token falls back to other headers."""
|
||||
# Create a mock request with Bearer header with empty token
|
||||
mock_request = MagicMock(spec=Request)
|
||||
mock_request.headers = {
|
||||
'Authorization': 'Bearer ',
|
||||
'X-Access-Token': 'access_token_key',
|
||||
}
|
||||
|
||||
# Call the function
|
||||
api_key = get_api_key_from_header(mock_request)
|
||||
|
||||
# Assert that empty string from Bearer is returned (current behavior)
|
||||
# This tests the current implementation behavior
|
||||
assert api_key == ''
|
||||
|
||||
@ -143,6 +143,57 @@ describe("UserContextMenu", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("HIDE_LLM_SETTINGS feature flag", () => {
|
||||
it("should hide LLM settings link when HIDE_LLM_SETTINGS is true", async () => {
|
||||
vi.spyOn(OptionService, "getConfig").mockResolvedValue({
|
||||
APP_MODE: "saas",
|
||||
GITHUB_CLIENT_ID: "test",
|
||||
POSTHOG_CLIENT_KEY: "test",
|
||||
FEATURE_FLAGS: {
|
||||
ENABLE_BILLING: false,
|
||||
HIDE_LLM_SETTINGS: true,
|
||||
ENABLE_JIRA: false,
|
||||
ENABLE_JIRA_DC: false,
|
||||
ENABLE_LINEAR: false,
|
||||
},
|
||||
});
|
||||
|
||||
renderUserContextMenu({ type: "user", onClose: vi.fn });
|
||||
|
||||
await waitFor(() => {
|
||||
// Other nav items should still be visible
|
||||
expect(screen.getByText("SETTINGS$NAV_USER")).toBeInTheDocument();
|
||||
// LLM settings (to: "/settings") should NOT be visible
|
||||
expect(
|
||||
screen.queryByText("COMMON$LANGUAGE_MODEL_LLM"),
|
||||
).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it("should show LLM settings link when HIDE_LLM_SETTINGS is false", async () => {
|
||||
vi.spyOn(OptionService, "getConfig").mockResolvedValue({
|
||||
APP_MODE: "saas",
|
||||
GITHUB_CLIENT_ID: "test",
|
||||
POSTHOG_CLIENT_KEY: "test",
|
||||
FEATURE_FLAGS: {
|
||||
ENABLE_BILLING: false,
|
||||
HIDE_LLM_SETTINGS: false,
|
||||
ENABLE_JIRA: false,
|
||||
ENABLE_JIRA_DC: false,
|
||||
ENABLE_LINEAR: false,
|
||||
},
|
||||
});
|
||||
|
||||
renderUserContextMenu({ type: "user", onClose: vi.fn });
|
||||
|
||||
await waitFor(() => {
|
||||
expect(
|
||||
screen.getByText("COMMON$LANGUAGE_MODEL_LLM"),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it("should render additional context items when user is an admin", () => {
|
||||
renderUserContextMenu({ type: "admin", onClose: vi.fn });
|
||||
|
||||
|
||||
@ -57,7 +57,7 @@ describe("MicroagentsModal - Refresh Button", () => {
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks();
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe("Refresh Button Rendering", () => {
|
||||
@ -74,13 +74,15 @@ describe("MicroagentsModal - Refresh Button", () => {
|
||||
describe("Refresh Button Functionality", () => {
|
||||
it("should call refetch when refresh button is clicked", async () => {
|
||||
const user = userEvent.setup();
|
||||
const refreshSpy = vi.spyOn(ConversationService, "getMicroagents");
|
||||
|
||||
renderWithProviders(<MicroagentsModal {...defaultProps} />);
|
||||
|
||||
const refreshSpy = vi.spyOn(ConversationService, "getMicroagents");
|
||||
|
||||
// Wait for the component to load and render the refresh button
|
||||
const refreshButton = await screen.findByTestId("refresh-microagents");
|
||||
|
||||
refreshSpy.mockClear();
|
||||
|
||||
await user.click(refreshButton);
|
||||
|
||||
expect(refreshSpy).toHaveBeenCalledTimes(1);
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
/* eslint-disable max-classes-per-file */
|
||||
import { beforeAll, describe, expect, it, vi, afterEach } from "vitest";
|
||||
import { useTerminal } from "#/hooks/use-terminal";
|
||||
import { Command, useCommandStore } from "#/state/command-store";
|
||||
@ -45,17 +46,29 @@ describe("useTerminal", () => {
|
||||
}));
|
||||
|
||||
beforeAll(() => {
|
||||
// mock ResizeObserver
|
||||
window.ResizeObserver = vi.fn().mockImplementation(() => ({
|
||||
observe: vi.fn(),
|
||||
unobserve: vi.fn(),
|
||||
disconnect: vi.fn(),
|
||||
}));
|
||||
// mock ResizeObserver - use class for Vitest 4 constructor support
|
||||
window.ResizeObserver = class {
|
||||
observe = vi.fn();
|
||||
|
||||
// mock Terminal
|
||||
unobserve = vi.fn();
|
||||
|
||||
disconnect = vi.fn();
|
||||
} as unknown as typeof ResizeObserver;
|
||||
|
||||
// mock Terminal - use class for Vitest 4 constructor support
|
||||
vi.mock("@xterm/xterm", async (importOriginal) => ({
|
||||
...(await importOriginal<typeof import("@xterm/xterm")>()),
|
||||
Terminal: vi.fn().mockImplementation(() => mockTerminal),
|
||||
Terminal: class {
|
||||
loadAddon = mockTerminal.loadAddon;
|
||||
|
||||
open = mockTerminal.open;
|
||||
|
||||
write = mockTerminal.write;
|
||||
|
||||
writeln = mockTerminal.writeln;
|
||||
|
||||
dispose = mockTerminal.dispose;
|
||||
},
|
||||
}));
|
||||
});
|
||||
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import { render, screen, waitFor, within } from "@testing-library/react";
|
||||
import { beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import { QueryClient, QueryClientProvider } from "@tanstack/react-query";
|
||||
import userEvent from "@testing-library/user-event";
|
||||
import { createRoutesStub, Outlet } from "react-router";
|
||||
@ -21,25 +21,25 @@ const MOCK_GET_SECRETS_RESPONSE: GetSecretsResponse["custom_secrets"] = [
|
||||
},
|
||||
];
|
||||
|
||||
const RouterStub = createRoutesStub([
|
||||
{
|
||||
Component: () => <Outlet />,
|
||||
path: "/settings",
|
||||
children: [
|
||||
{
|
||||
Component: SecretsSettingsScreen,
|
||||
path: "/settings/secrets",
|
||||
},
|
||||
{
|
||||
Component: () => <div data-testid="git-settings-screen" />,
|
||||
path: "/settings/integrations",
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
const renderSecretsSettings = () => {
|
||||
const RouterStub = createRoutesStub([
|
||||
{
|
||||
Component: () => <Outlet />,
|
||||
path: "/settings",
|
||||
children: [
|
||||
{
|
||||
Component: SecretsSettingsScreen,
|
||||
path: "/settings/secrets",
|
||||
},
|
||||
{
|
||||
Component: () => <div data-testid="git-settings-screen" />,
|
||||
path: "/settings/integrations",
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
|
||||
const renderSecretsSettings = () =>
|
||||
render(<RouterStub initialEntries={["/settings/secrets"]} />, {
|
||||
return render(<RouterStub initialEntries={["/settings/secrets"]} />, {
|
||||
wrapper: ({ children }) => (
|
||||
<QueryClientProvider
|
||||
client={
|
||||
@ -52,6 +52,7 @@ const renderSecretsSettings = () =>
|
||||
</QueryClientProvider>
|
||||
),
|
||||
});
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
const getConfigSpy = vi.spyOn(OptionService, "getConfig");
|
||||
@ -61,6 +62,10 @@ beforeEach(() => {
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe("Content", () => {
|
||||
it("should render the secrets settings screen", () => {
|
||||
renderSecretsSettings();
|
||||
@ -501,6 +506,8 @@ describe("Secret actions", () => {
|
||||
|
||||
it("should not submit whitespace secret names or values", async () => {
|
||||
const createSecretSpy = vi.spyOn(SecretsService, "createSecret");
|
||||
const getSecretsSpy = vi.spyOn(SecretsService, "getSecrets");
|
||||
getSecretsSpy.mockResolvedValue([]);
|
||||
renderSecretsSettings();
|
||||
|
||||
// render form & hide items
|
||||
@ -532,9 +539,11 @@ describe("Secret actions", () => {
|
||||
await userEvent.click(submitButton);
|
||||
|
||||
expect(createSecretSpy).not.toHaveBeenCalled();
|
||||
expect(
|
||||
screen.queryByText("SECRETS$SECRET_VALUE_REQUIRED"),
|
||||
).toBeInTheDocument();
|
||||
await waitFor(() => {
|
||||
expect(
|
||||
screen.queryByText("SECRETS$SECRET_VALUE_REQUIRED"),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it("should not reset ipout values on an invalid submit", async () => {
|
||||
|
||||
4461
frontend/package-lock.json
generated
4461
frontend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -8,56 +8,56 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@heroui/react": "2.8.5",
|
||||
"@heroui/use-infinite-scroll": "^2.2.11",
|
||||
"@heroui/use-infinite-scroll": "^2.2.12",
|
||||
"@microlink/react-json-view": "^1.26.2",
|
||||
"@monaco-editor/react": "^4.7.0-rc.0",
|
||||
"@posthog/react": "^1.4.0",
|
||||
"@react-router/node": "^7.9.3",
|
||||
"@react-router/serve": "^7.9.3",
|
||||
"@posthog/react": "^1.5.0",
|
||||
"@react-router/node": "^7.9.6",
|
||||
"@react-router/serve": "^7.9.6",
|
||||
"@react-types/shared": "^3.32.0",
|
||||
"@stripe/react-stripe-js": "^4.0.2",
|
||||
"@stripe/stripe-js": "^7.9.0",
|
||||
"@tailwindcss/postcss": "^4.1.13",
|
||||
"@tailwindcss/vite": "^4.1.13",
|
||||
"@tanstack/react-query": "^5.90.2",
|
||||
"@stripe/react-stripe-js": "^5.4.1",
|
||||
"@stripe/stripe-js": "^8.5.3",
|
||||
"@tailwindcss/postcss": "^4.1.17",
|
||||
"@tailwindcss/vite": "^4.1.17",
|
||||
"@tanstack/react-query": "^5.90.11",
|
||||
"@uidotdev/usehooks": "^2.4.1",
|
||||
"@vitejs/plugin-react": "^5.0.4",
|
||||
"@vitejs/plugin-react": "^5.1.1",
|
||||
"@xterm/addon-fit": "^0.10.0",
|
||||
"@xterm/xterm": "^5.4.0",
|
||||
"axios": "^1.12.2",
|
||||
"axios": "^1.13.2",
|
||||
"class-variance-authority": "^0.7.1",
|
||||
"clsx": "^2.1.1",
|
||||
"date-fns": "^4.1.0",
|
||||
"downshift": "^9.0.10",
|
||||
"downshift": "^9.0.12",
|
||||
"eslint-config-airbnb-typescript": "^18.0.0",
|
||||
"framer-motion": "^12.23.22",
|
||||
"i18next": "^25.5.2",
|
||||
"framer-motion": "^12.23.25",
|
||||
"i18next": "^25.7.1",
|
||||
"i18next-browser-languagedetector": "^8.2.0",
|
||||
"i18next-http-backend": "^3.0.2",
|
||||
"isbot": "^5.1.31",
|
||||
"jose": "^6.1.0",
|
||||
"lucide-react": "^0.544.0",
|
||||
"isbot": "^5.1.32",
|
||||
"jose": "^6.1.3",
|
||||
"lucide-react": "^0.555.0",
|
||||
"monaco-editor": "^0.53.0",
|
||||
"posthog-js": "^1.298.1",
|
||||
"react": "^19.1.1",
|
||||
"react-dom": "^19.1.1",
|
||||
"posthog-js": "^1.299.0",
|
||||
"react": "^19.2.0",
|
||||
"react-dom": "^19.2.0",
|
||||
"react-highlight": "^0.15.0",
|
||||
"react-hot-toast": "^2.6.0",
|
||||
"react-i18next": "^16.0.0",
|
||||
"react-i18next": "^16.3.5",
|
||||
"react-icons": "^5.5.0",
|
||||
"react-markdown": "^10.1.0",
|
||||
"react-router": "^7.9.3",
|
||||
"react-syntax-highlighter": "^15.6.6",
|
||||
"react-router": "^7.9.6",
|
||||
"react-syntax-highlighter": "^16.1.0",
|
||||
"remark-breaks": "^4.0.0",
|
||||
"remark-gfm": "^4.0.1",
|
||||
"sirv-cli": "^3.0.1",
|
||||
"socket.io-client": "^4.8.1",
|
||||
"tailwind-merge": "^3.3.1",
|
||||
"tailwind-merge": "^3.4.0",
|
||||
"tailwind-scrollbar": "^4.0.2",
|
||||
"vite": "^7.1.7",
|
||||
"vite": "^7.2.6",
|
||||
"web-vitals": "^5.1.0",
|
||||
"ws": "^8.18.2",
|
||||
"zustand": "^5.0.8"
|
||||
"zustand": "^5.0.9"
|
||||
},
|
||||
"scripts": {
|
||||
"dev": "npm run make-i18n && cross-env VITE_MOCK_API=false react-router dev",
|
||||
@ -96,25 +96,25 @@
|
||||
"@babel/traverse": "^7.28.3",
|
||||
"@babel/types": "^7.28.2",
|
||||
"@mswjs/socket.io-binding": "^0.2.0",
|
||||
"@playwright/test": "^1.55.1",
|
||||
"@react-router/dev": "^7.9.3",
|
||||
"@playwright/test": "^1.57.0",
|
||||
"@react-router/dev": "^7.9.6",
|
||||
"@tailwindcss/typography": "^0.5.19",
|
||||
"@tanstack/eslint-plugin-query": "^5.91.0",
|
||||
"@testing-library/dom": "^10.4.1",
|
||||
"@testing-library/jest-dom": "^6.8.0",
|
||||
"@testing-library/jest-dom": "^6.9.1",
|
||||
"@testing-library/react": "^16.3.0",
|
||||
"@testing-library/user-event": "^14.6.1",
|
||||
"@types/node": "^24.5.2",
|
||||
"@types/react": "^19.1.15",
|
||||
"@types/react-dom": "^19.1.9",
|
||||
"@types/node": "^24.10.1",
|
||||
"@types/react": "^19.2.7",
|
||||
"@types/react-dom": "^19.2.3",
|
||||
"@types/react-highlight": "^0.12.8",
|
||||
"@types/react-syntax-highlighter": "^15.5.13",
|
||||
"@types/ws": "^8.18.1",
|
||||
"@typescript-eslint/eslint-plugin": "^7.18.0",
|
||||
"@typescript-eslint/parser": "^7.18.0",
|
||||
"@vitest/coverage-v8": "^3.2.3",
|
||||
"autoprefixer": "^10.4.21",
|
||||
"cross-env": "^10.0.0",
|
||||
"@vitest/coverage-v8": "^4.0.14",
|
||||
"autoprefixer": "^10.4.22",
|
||||
"cross-env": "^10.1.0",
|
||||
"eslint": "^8.57.0",
|
||||
"eslint-config-airbnb": "^19.0.4",
|
||||
"eslint-config-airbnb-typescript": "^18.0.0",
|
||||
@ -127,16 +127,16 @@
|
||||
"eslint-plugin-react-hooks": "^4.6.2",
|
||||
"eslint-plugin-unused-imports": "^4.2.0",
|
||||
"husky": "^9.1.7",
|
||||
"jsdom": "^27.0.0",
|
||||
"lint-staged": "^16.2.3",
|
||||
"jsdom": "^27.2.0",
|
||||
"lint-staged": "^16.2.7",
|
||||
"msw": "^2.6.6",
|
||||
"prettier": "^3.6.2",
|
||||
"stripe": "^18.5.0",
|
||||
"prettier": "^3.7.3",
|
||||
"stripe": "^20.0.0",
|
||||
"tailwindcss": "^4.1.8",
|
||||
"typescript": "^5.9.2",
|
||||
"typescript": "^5.9.3",
|
||||
"vite-plugin-svgr": "^4.5.0",
|
||||
"vite-tsconfig-paths": "^5.1.4",
|
||||
"vitest": "^3.0.2"
|
||||
"vitest": "^4.0.14"
|
||||
},
|
||||
"packageManager": "npm@10.5.0",
|
||||
"volta": {
|
||||
|
||||
@ -7,8 +7,8 @@
|
||||
* - Please do NOT modify this file.
|
||||
*/
|
||||
|
||||
const PACKAGE_VERSION = '2.11.1'
|
||||
const INTEGRITY_CHECKSUM = 'f5825c521429caf22a4dd13b66e243af'
|
||||
const PACKAGE_VERSION = '2.12.3'
|
||||
const INTEGRITY_CHECKSUM = '4db4a41e972cec1b64cc569c66952d82'
|
||||
const IS_MOCKED_RESPONSE = Symbol('isMockedResponse')
|
||||
const activeClientIds = new Set()
|
||||
|
||||
@ -71,11 +71,6 @@ addEventListener('message', async function (event) {
|
||||
break
|
||||
}
|
||||
|
||||
case 'MOCK_DEACTIVATE': {
|
||||
activeClientIds.delete(clientId)
|
||||
break
|
||||
}
|
||||
|
||||
case 'CLIENT_CLOSED': {
|
||||
activeClientIds.delete(clientId)
|
||||
|
||||
@ -94,6 +89,8 @@ addEventListener('message', async function (event) {
|
||||
})
|
||||
|
||||
addEventListener('fetch', function (event) {
|
||||
const requestInterceptedAt = Date.now()
|
||||
|
||||
// Bypass navigation requests.
|
||||
if (event.request.mode === 'navigate') {
|
||||
return
|
||||
@ -110,23 +107,29 @@ addEventListener('fetch', function (event) {
|
||||
|
||||
// Bypass all requests when there are no active clients.
|
||||
// Prevents the self-unregistered worked from handling requests
|
||||
// after it's been deleted (still remains active until the next reload).
|
||||
// after it's been terminated (still remains active until the next reload).
|
||||
if (activeClientIds.size === 0) {
|
||||
return
|
||||
}
|
||||
|
||||
const requestId = crypto.randomUUID()
|
||||
event.respondWith(handleRequest(event, requestId))
|
||||
event.respondWith(handleRequest(event, requestId, requestInterceptedAt))
|
||||
})
|
||||
|
||||
/**
|
||||
* @param {FetchEvent} event
|
||||
* @param {string} requestId
|
||||
* @param {number} requestInterceptedAt
|
||||
*/
|
||||
async function handleRequest(event, requestId) {
|
||||
async function handleRequest(event, requestId, requestInterceptedAt) {
|
||||
const client = await resolveMainClient(event)
|
||||
const requestCloneForEvents = event.request.clone()
|
||||
const response = await getResponse(event, client, requestId)
|
||||
const response = await getResponse(
|
||||
event,
|
||||
client,
|
||||
requestId,
|
||||
requestInterceptedAt,
|
||||
)
|
||||
|
||||
// Send back the response clone for the "response:*" life-cycle events.
|
||||
// Ensure MSW is active and ready to handle the message, otherwise
|
||||
@ -202,9 +205,10 @@ async function resolveMainClient(event) {
|
||||
* @param {FetchEvent} event
|
||||
* @param {Client | undefined} client
|
||||
* @param {string} requestId
|
||||
* @param {number} requestInterceptedAt
|
||||
* @returns {Promise<Response>}
|
||||
*/
|
||||
async function getResponse(event, client, requestId) {
|
||||
async function getResponse(event, client, requestId, requestInterceptedAt) {
|
||||
// Clone the request because it might've been already used
|
||||
// (i.e. its body has been read and sent to the client).
|
||||
const requestClone = event.request.clone()
|
||||
@ -255,6 +259,7 @@ async function getResponse(event, client, requestId) {
|
||||
type: 'REQUEST',
|
||||
payload: {
|
||||
id: requestId,
|
||||
interceptedAt: requestInterceptedAt,
|
||||
...serializedRequest,
|
||||
},
|
||||
},
|
||||
|
||||
@ -3,15 +3,19 @@ import { Provider } from "#/types/settings";
|
||||
import { V1SandboxStatus } from "../sandbox-service/sandbox-service.types";
|
||||
|
||||
// V1 API Types for requests
|
||||
// Note: This represents the serialized API format, not the internal TextContent/ImageContent types
|
||||
export interface V1MessageContent {
|
||||
type: "text" | "image_url";
|
||||
text?: string;
|
||||
image_url?: {
|
||||
url: string;
|
||||
};
|
||||
// These types match the SDK's TextContent and ImageContent formats
|
||||
export interface V1TextContent {
|
||||
type: "text";
|
||||
text: string;
|
||||
}
|
||||
|
||||
export interface V1ImageContent {
|
||||
type: "image";
|
||||
image_urls: string[];
|
||||
}
|
||||
|
||||
export type V1MessageContent = V1TextContent | V1ImageContent;
|
||||
|
||||
type V1Role = "user" | "system" | "assistant" | "tool";
|
||||
|
||||
export interface V1SendMessageRequest {
|
||||
|
||||
@ -95,7 +95,7 @@ export function ExpandableMessage({
|
||||
const statusIconClasses = "h-4 w-4 ml-2 inline";
|
||||
|
||||
if (
|
||||
config?.FEATURE_FLAGS.ENABLE_BILLING &&
|
||||
config?.FEATURE_FLAGS?.ENABLE_BILLING &&
|
||||
config?.APP_MODE === "saas" &&
|
||||
id === I18nKey.STATUS$ERROR_LLM_OUT_OF_CREDITS
|
||||
) {
|
||||
|
||||
@ -32,13 +32,7 @@ export function Sidebar() {
|
||||
|
||||
const { pathname } = useLocation();
|
||||
|
||||
// TODO: Remove HIDE_LLM_SETTINGS check once released
|
||||
const shouldHideLlmSettings =
|
||||
config?.FEATURE_FLAGS.HIDE_LLM_SETTINGS && config?.APP_MODE === "saas";
|
||||
|
||||
React.useEffect(() => {
|
||||
if (shouldHideLlmSettings) return;
|
||||
|
||||
if (location.pathname === "/settings") {
|
||||
setSettingsModalIsOpen(false);
|
||||
} else if (
|
||||
|
||||
@ -63,11 +63,15 @@ export function UserContextMenu({ type, onClose }: UserContextMenuProps) {
|
||||
|
||||
const isOss = config?.APP_MODE === "oss";
|
||||
// Filter out organization members/org nav items since they're already handled separately in the menu
|
||||
const navItems = (isOss ? OSS_NAV_ITEMS : SAAS_NAV_ITEMS).filter(
|
||||
let navItems = (isOss ? OSS_NAV_ITEMS : SAAS_NAV_ITEMS).filter(
|
||||
(item) =>
|
||||
item.to !== "/settings/organization-members" &&
|
||||
item.to !== "/settings/org",
|
||||
);
|
||||
// Hide LLM settings when the feature flag is enabled
|
||||
if (config?.FEATURE_FLAGS?.HIDE_LLM_SETTINGS) {
|
||||
navItems = navItems.filter((item) => item.to !== "/settings");
|
||||
}
|
||||
|
||||
const [inviteMemberModalIsOpen, setInviteMemberModalIsOpen] =
|
||||
React.useState(false);
|
||||
|
||||
@ -71,7 +71,18 @@ const getTerminalObservationContent = (
|
||||
content = `${content.slice(0, MAX_CONTENT_LENGTH)}...`;
|
||||
}
|
||||
|
||||
return `Output:\n\`\`\`sh\n${content.trim() || i18n.t("OBSERVATION$COMMAND_NO_OUTPUT")}\n\`\`\``;
|
||||
// Build the output string
|
||||
let output = "";
|
||||
|
||||
// Display the command if available
|
||||
if (observation.command) {
|
||||
output += `Command: \`${observation.command}\`\n\n`;
|
||||
}
|
||||
|
||||
// Display the output
|
||||
output += `Output:\n\`\`\`sh\n${content.trim() || i18n.t("OBSERVATION$COMMAND_NO_OUTPUT")}\n\`\`\``;
|
||||
|
||||
return output;
|
||||
};
|
||||
|
||||
// Tool Observations
|
||||
|
||||
@ -4,8 +4,8 @@ import V1ConversationService from "#/api/conversation-service/v1-conversation-se
|
||||
import { SuggestedTask } from "#/utils/types";
|
||||
import { Provider } from "#/types/settings";
|
||||
import { CreateMicroagent, Conversation } from "#/api/open-hands.types";
|
||||
import { USE_V1_CONVERSATION_API } from "#/utils/feature-flags";
|
||||
import { useTracking } from "#/hooks/use-tracking";
|
||||
import { useSettings } from "#/hooks/query/use-settings";
|
||||
|
||||
interface CreateConversationVariables {
|
||||
query?: string;
|
||||
@ -34,6 +34,7 @@ interface CreateConversationResponse extends Partial<Conversation> {
|
||||
export const useCreateConversation = () => {
|
||||
const queryClient = useQueryClient();
|
||||
const { trackConversationCreated } = useTracking();
|
||||
const { data: settings } = useSettings();
|
||||
|
||||
return useMutation({
|
||||
mutationKey: ["create-conversation"],
|
||||
@ -50,7 +51,7 @@ export const useCreateConversation = () => {
|
||||
agentType,
|
||||
} = variables;
|
||||
|
||||
const useV1 = USE_V1_CONVERSATION_API() && !createMicroagent;
|
||||
const useV1 = !!settings?.V1_ENABLED && !createMicroagent;
|
||||
|
||||
if (useV1) {
|
||||
// Use V1 API - creates a conversation start task
|
||||
|
||||
@ -35,6 +35,7 @@ const saveSettingsMutationFn = async (settings: Partial<PostSettings>) => {
|
||||
settings.GIT_USER_NAME?.trim() || DEFAULT_SETTINGS.GIT_USER_NAME,
|
||||
git_user_email:
|
||||
settings.GIT_USER_EMAIL?.trim() || DEFAULT_SETTINGS.GIT_USER_EMAIL,
|
||||
v1_enabled: settings.V1_ENABLED,
|
||||
};
|
||||
|
||||
await SettingsService.saveSettings(apiSettings);
|
||||
|
||||
@ -13,6 +13,6 @@ export const useBalance = () => {
|
||||
enabled:
|
||||
!isOnTosPage &&
|
||||
config?.APP_MODE === "saas" &&
|
||||
config?.FEATURE_FLAGS.ENABLE_BILLING,
|
||||
config?.FEATURE_FLAGS?.ENABLE_BILLING,
|
||||
});
|
||||
};
|
||||
|
||||
@ -36,6 +36,7 @@ const getSettingsQueryFn = async (): Promise<Settings> => {
|
||||
GIT_USER_EMAIL:
|
||||
apiSettings.git_user_email || DEFAULT_SETTINGS.GIT_USER_EMAIL,
|
||||
IS_NEW_USER: false,
|
||||
V1_ENABLED: apiSettings.v1_enabled ?? DEFAULT_SETTINGS.V1_ENABLED,
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import { useQuery } from "@tanstack/react-query";
|
||||
import V1ConversationService from "#/api/conversation-service/v1-conversation-service.api";
|
||||
import { USE_V1_CONVERSATION_API } from "#/utils/feature-flags";
|
||||
import { useSettings } from "#/hooks/query/use-settings";
|
||||
|
||||
/**
|
||||
* Hook to fetch in-progress V1 conversation start tasks
|
||||
@ -13,13 +13,17 @@ import { USE_V1_CONVERSATION_API } from "#/utils/feature-flags";
|
||||
* @param limit Maximum number of tasks to return (max 100)
|
||||
* @returns Query result with array of in-progress start tasks
|
||||
*/
|
||||
export const useStartTasks = (limit = 10) =>
|
||||
useQuery({
|
||||
export const useStartTasks = (limit = 10) => {
|
||||
const { data: settings } = useSettings();
|
||||
const isV1Enabled = settings?.V1_ENABLED;
|
||||
|
||||
return useQuery({
|
||||
queryKey: ["start-tasks", "search", limit],
|
||||
queryFn: () => V1ConversationService.searchStartTasks(limit),
|
||||
enabled: USE_V1_CONVERSATION_API(),
|
||||
enabled: isV1Enabled,
|
||||
select: (tasks) =>
|
||||
tasks.filter(
|
||||
(task) => task.status !== "READY" && task.status !== "ERROR",
|
||||
),
|
||||
});
|
||||
};
|
||||
|
||||
@ -41,13 +41,11 @@ export function useSendMessage() {
|
||||
},
|
||||
];
|
||||
|
||||
// Add images if present
|
||||
// Add images if present - using SDK's ImageContent format
|
||||
if (args.image_urls && args.image_urls.length > 0) {
|
||||
args.image_urls.forEach((url) => {
|
||||
content.push({
|
||||
type: "image_url",
|
||||
image_url: { url },
|
||||
});
|
||||
content.push({
|
||||
type: "image",
|
||||
image_urls: args.image_urls,
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@ -44,7 +44,7 @@ export const useTerminal = () => {
|
||||
new Terminal({
|
||||
fontFamily: "Menlo, Monaco, 'Courier New', monospace",
|
||||
fontSize: 14,
|
||||
scrollback: 1000,
|
||||
scrollback: 10000,
|
||||
scrollSensitivity: 1,
|
||||
fastScrollModifier: "alt",
|
||||
fastScrollSensitivity: 5,
|
||||
@ -62,6 +62,7 @@ export const useTerminal = () => {
|
||||
terminal.current.open(ref.current);
|
||||
// Hide cursor for read-only terminal using ANSI escape sequence
|
||||
terminal.current.write("\x1b[?25l");
|
||||
fitAddon.current?.fit();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@ -30,11 +30,12 @@ function BillingSettingsScreen() {
|
||||
}
|
||||
|
||||
displaySuccessToast(t(I18nKey.PAYMENT$SUCCESS));
|
||||
|
||||
setSearchParams({});
|
||||
} else if (checkoutStatus === "cancel") {
|
||||
displayErrorToast(t(I18nKey.PAYMENT$CANCELLED));
|
||||
setSearchParams({});
|
||||
}
|
||||
|
||||
setSearchParams({});
|
||||
}, [checkoutStatus, searchParams, setSearchParams, t, trackCreditsPurchased]);
|
||||
|
||||
return <PaymentForm />;
|
||||
|
||||
@ -118,6 +118,9 @@ function LlmSettingsScreen() {
|
||||
const isSaasMode = config?.APP_MODE === "saas";
|
||||
const shouldUseOpenHandsKey = isOpenHandsProvider && isSaasMode;
|
||||
|
||||
// Determine if we should hide the agent dropdown when V1 conversation API is enabled
|
||||
const isV1Enabled = settings?.V1_ENABLED;
|
||||
|
||||
React.useEffect(() => {
|
||||
const determineWhetherToToggleAdvancedSettings = () => {
|
||||
if (resources && settings) {
|
||||
@ -612,21 +615,23 @@ function LlmSettingsScreen() {
|
||||
href="https://tavily.com/"
|
||||
/>
|
||||
|
||||
<SettingsDropdownInput
|
||||
testId="agent-input"
|
||||
name="agent-input"
|
||||
label={t(I18nKey.SETTINGS$AGENT)}
|
||||
items={
|
||||
resources?.agents.map((agent) => ({
|
||||
key: agent,
|
||||
label: agent, // TODO: Add i18n support for agent names
|
||||
})) || []
|
||||
}
|
||||
defaultSelectedKey={settings.AGENT}
|
||||
isClearable={false}
|
||||
onInputChange={handleAgentIsDirty}
|
||||
wrapperClassName="w-full max-w-[680px]"
|
||||
/>
|
||||
{!isV1Enabled && (
|
||||
<SettingsDropdownInput
|
||||
testId="agent-input"
|
||||
name="agent-input"
|
||||
label={t(I18nKey.SETTINGS$AGENT)}
|
||||
items={
|
||||
resources?.agents.map((agent) => ({
|
||||
key: agent,
|
||||
label: agent, // TODO: Add i18n support for agent names
|
||||
})) || []
|
||||
}
|
||||
defaultSelectedKey={settings.AGENT}
|
||||
isClearable={false}
|
||||
onInputChange={handleAgentIsDirty}
|
||||
wrapperClassName="w-full max-w-[680px]"
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
|
||||
|
||||
@ -1,19 +1,28 @@
|
||||
import React from "react";
|
||||
import { useTranslation } from "react-i18next";
|
||||
import { I18nKey } from "#/i18n/declaration";
|
||||
import LessonPlanIcon from "#/icons/lesson-plan.svg?react";
|
||||
import { useConversationStore } from "#/state/conversation-store";
|
||||
import { useScrollToBottom } from "#/hooks/use-scroll-to-bottom";
|
||||
import { MarkdownRenderer } from "#/components/features/markdown/markdown-renderer";
|
||||
import { useHandlePlanClick } from "#/hooks/use-handle-plan-click";
|
||||
|
||||
function PlannerTab() {
|
||||
const { t } = useTranslation();
|
||||
const { scrollRef: scrollContainerRef, onChatBodyScroll } = useScrollToBottom(
|
||||
React.useRef<HTMLDivElement>(null),
|
||||
);
|
||||
|
||||
const { planContent } = useConversationStore();
|
||||
const { handlePlanClick } = useHandlePlanClick();
|
||||
|
||||
if (planContent !== null && planContent !== undefined) {
|
||||
return (
|
||||
<div className="flex flex-col w-full h-full p-4 overflow-auto">
|
||||
<div
|
||||
ref={scrollContainerRef}
|
||||
onScroll={(e) => onChatBodyScroll(e.currentTarget)}
|
||||
className="flex flex-col w-full h-full p-4 overflow-auto"
|
||||
>
|
||||
<MarkdownRenderer includeStandard includeHeadings>
|
||||
{planContent}
|
||||
</MarkdownRenderer>
|
||||
|
||||
@ -36,6 +36,15 @@ export const clientLoader = async ({ request }: Route.ClientLoaderArgs) => {
|
||||
return redirect("/settings");
|
||||
}
|
||||
|
||||
// If LLM settings are hidden and user tries to access the LLM settings page
|
||||
if (config?.FEATURE_FLAGS?.HIDE_LLM_SETTINGS && pathname === "/settings") {
|
||||
// Redirect to the first available settings page
|
||||
if (isSaas) {
|
||||
return redirect("/settings/user");
|
||||
}
|
||||
return redirect("/settings/mcp");
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
|
||||
@ -54,13 +63,24 @@ function SettingsScreen() {
|
||||
} else {
|
||||
items.push(...OSS_NAV_ITEMS);
|
||||
}
|
||||
|
||||
// Filter out LLM settings if the feature flag is enabled
|
||||
if (config?.FEATURE_FLAGS?.HIDE_LLM_SETTINGS) {
|
||||
return items.filter((item) => item.to !== "/settings");
|
||||
}
|
||||
|
||||
return items;
|
||||
}, [isSaas]);
|
||||
}, [isSaas, config?.FEATURE_FLAGS?.HIDE_LLM_SETTINGS]);
|
||||
|
||||
// Current section title for the main content area
|
||||
const currentSectionTitle = useMemo(() => {
|
||||
const currentItem = navItems.find((item) => item.to === location.pathname);
|
||||
return currentItem ? currentItem.text : "SETTINGS$NAV_LLM";
|
||||
if (currentItem) {
|
||||
return currentItem.text;
|
||||
}
|
||||
|
||||
// Default to the first available navigation item if current page is not found
|
||||
return navItems.length > 0 ? navItems[0].text : "SETTINGS$TITLE";
|
||||
}, [navItems, location.pathname]);
|
||||
|
||||
return (
|
||||
|
||||
@ -31,6 +31,7 @@ export const DEFAULT_SETTINGS: Settings = {
|
||||
},
|
||||
GIT_USER_NAME: "openhands",
|
||||
GIT_USER_EMAIL: "openhands@all-hands.dev",
|
||||
V1_ENABLED: false,
|
||||
};
|
||||
|
||||
/**
|
||||
|
||||
@ -35,6 +35,7 @@ export type ApiSettings = {
|
||||
email_verified?: boolean;
|
||||
git_user_name?: string;
|
||||
git_user_email?: string;
|
||||
v1_enabled?: boolean;
|
||||
};
|
||||
|
||||
export type PostApiSettings = ApiSettings & {
|
||||
|
||||
@ -63,6 +63,7 @@ export type Settings = {
|
||||
EMAIL_VERIFIED?: boolean;
|
||||
GIT_USER_NAME?: string;
|
||||
GIT_USER_EMAIL?: string;
|
||||
V1_ENABLED?: boolean;
|
||||
};
|
||||
|
||||
export type PostSettings = Settings & {
|
||||
|
||||
@ -17,6 +17,4 @@ export const HIDE_LLM_SETTINGS = () => loadFeatureFlag("HIDE_LLM_SETTINGS");
|
||||
export const VSCODE_IN_NEW_TAB = () => loadFeatureFlag("VSCODE_IN_NEW_TAB");
|
||||
export const ENABLE_TRAJECTORY_REPLAY = () =>
|
||||
loadFeatureFlag("TRAJECTORY_REPLAY");
|
||||
export const USE_V1_CONVERSATION_API = () =>
|
||||
loadFeatureFlag("USE_V1_CONVERSATION_API");
|
||||
export const USE_PLANNING_AGENT = () => loadFeatureFlag("USE_PLANNING_AGENT");
|
||||
|
||||
@ -606,10 +606,15 @@ export const shouldIncludeRepository = (
|
||||
* @returns The query string for searching OpenHands repositories
|
||||
*/
|
||||
export const getOpenHandsQuery = (provider: Provider | null): string => {
|
||||
if (provider === "gitlab") {
|
||||
return "openhands-config";
|
||||
}
|
||||
return ".openhands";
|
||||
const providerRepositorySuffix: Record<string, string> = {
|
||||
gitlab: "openhands-config",
|
||||
azure_devops: "openhands-config",
|
||||
default: ".openhands",
|
||||
} as const;
|
||||
|
||||
return provider && provider in providerRepositorySuffix
|
||||
? providerRepositorySuffix[provider]
|
||||
: providerRepositorySuffix.default;
|
||||
};
|
||||
|
||||
/**
|
||||
@ -621,12 +626,7 @@ export const getOpenHandsQuery = (provider: Provider | null): string => {
|
||||
export const hasOpenHandsSuffix = (
|
||||
repo: GitRepository,
|
||||
provider: Provider | null,
|
||||
): boolean => {
|
||||
if (provider === "gitlab") {
|
||||
return repo.full_name.endsWith("/openhands-config");
|
||||
}
|
||||
return repo.full_name.endsWith("/.openhands");
|
||||
};
|
||||
): boolean => repo.full_name.endsWith(`/${getOpenHandsQuery(provider)}`);
|
||||
|
||||
/**
|
||||
* Build headers for V1 API requests that require session authentication
|
||||
|
||||
@ -9,6 +9,7 @@ from openhands.app_server.app_conversation.app_conversation_models import (
|
||||
AppConversationSortOrder,
|
||||
)
|
||||
from openhands.app_server.services.injector import Injector
|
||||
from openhands.sdk.event import ConversationStateUpdateEvent
|
||||
from openhands.sdk.utils.models import DiscriminatedUnionMixin
|
||||
|
||||
|
||||
@ -92,6 +93,19 @@ class AppConversationInfoService(ABC):
|
||||
Return the stored info
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
async def process_stats_event(
|
||||
self,
|
||||
event: ConversationStateUpdateEvent,
|
||||
conversation_id: UUID,
|
||||
) -> None:
|
||||
"""Process a stats event and update conversation statistics.
|
||||
|
||||
Args:
|
||||
event: The ConversationStateUpdateEvent with key='stats'
|
||||
conversation_id: The ID of the conversation to update
|
||||
"""
|
||||
|
||||
|
||||
class AppConversationInfoServiceInjector(
|
||||
DiscriminatedUnionMixin, Injector[AppConversationInfoService], ABC
|
||||
|
||||
@ -9,6 +9,7 @@ from typing import AsyncGenerator
|
||||
import base62
|
||||
|
||||
from openhands.app_server.app_conversation.app_conversation_models import (
|
||||
AgentType,
|
||||
AppConversationStartTask,
|
||||
AppConversationStartTaskStatus,
|
||||
)
|
||||
@ -25,7 +26,9 @@ from openhands.app_server.sandbox.sandbox_models import SandboxInfo
|
||||
from openhands.app_server.user.user_context import UserContext
|
||||
from openhands.sdk import Agent
|
||||
from openhands.sdk.context.agent_context import AgentContext
|
||||
from openhands.sdk.context.condenser import LLMSummarizingCondenser
|
||||
from openhands.sdk.context.skills import load_user_skills
|
||||
from openhands.sdk.llm import LLM
|
||||
from openhands.sdk.workspace.remote.async_remote_workspace import AsyncRemoteWorkspace
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
@ -182,6 +185,43 @@ class AppConversationServiceBase(AppConversationService, ABC):
|
||||
workspace.working_dir,
|
||||
)
|
||||
|
||||
async def _configure_git_user_settings(
|
||||
self,
|
||||
workspace: AsyncRemoteWorkspace,
|
||||
) -> None:
|
||||
"""Configure git global user settings from user preferences.
|
||||
|
||||
Reads git_user_name and git_user_email from user settings and
|
||||
configures them as git global settings in the workspace.
|
||||
|
||||
Args:
|
||||
workspace: The remote workspace to configure git settings in.
|
||||
"""
|
||||
try:
|
||||
user_info = await self.user_context.get_user_info()
|
||||
|
||||
if user_info.git_user_name:
|
||||
cmd = f'git config --global user.name "{user_info.git_user_name}"'
|
||||
result = await workspace.execute_command(cmd, workspace.working_dir)
|
||||
if result.exit_code:
|
||||
_logger.warning(f'Git config user.name failed: {result.stderr}')
|
||||
else:
|
||||
_logger.info(
|
||||
f'Git configured with user.name={user_info.git_user_name}'
|
||||
)
|
||||
|
||||
if user_info.git_user_email:
|
||||
cmd = f'git config --global user.email "{user_info.git_user_email}"'
|
||||
result = await workspace.execute_command(cmd, workspace.working_dir)
|
||||
if result.exit_code:
|
||||
_logger.warning(f'Git config user.email failed: {result.stderr}')
|
||||
else:
|
||||
_logger.info(
|
||||
f'Git configured with user.email={user_info.git_user_email}'
|
||||
)
|
||||
except Exception as e:
|
||||
_logger.warning(f'Failed to configure git user settings: {e}')
|
||||
|
||||
async def clone_or_init_git_repo(
|
||||
self,
|
||||
task: AppConversationStartTask,
|
||||
@ -197,6 +237,9 @@ class AppConversationServiceBase(AppConversationService, ABC):
|
||||
if result.exit_code:
|
||||
_logger.warning(f'mkdir failed: {result.stderr}')
|
||||
|
||||
# Configure git user settings from user preferences
|
||||
await self._configure_git_user_settings(workspace)
|
||||
|
||||
if not request.selected_repository:
|
||||
if self.init_git_in_empty_workspace:
|
||||
_logger.debug('Initializing a new git repository in the workspace.')
|
||||
@ -221,7 +264,9 @@ class AppConversationServiceBase(AppConversationService, ABC):
|
||||
|
||||
# Clone the repo - this is the slow part!
|
||||
clone_command = f'git clone {remote_repo_url} {dir_name}'
|
||||
result = await workspace.execute_command(clone_command, workspace.working_dir)
|
||||
result = await workspace.execute_command(
|
||||
clone_command, workspace.working_dir, 120
|
||||
)
|
||||
if result.exit_code:
|
||||
_logger.warning(f'Git clone failed: {result.stderr}')
|
||||
|
||||
@ -233,7 +278,10 @@ class AppConversationServiceBase(AppConversationService, ABC):
|
||||
random_str = base62.encodebytes(os.urandom(16))
|
||||
openhands_workspace_branch = f'openhands-workspace-{random_str}'
|
||||
checkout_command = f'git checkout -b {openhands_workspace_branch}'
|
||||
await workspace.execute_command(checkout_command, workspace.working_dir)
|
||||
git_dir = Path(workspace.working_dir) / dir_name
|
||||
result = await workspace.execute_command(checkout_command, git_dir)
|
||||
if result.exit_code:
|
||||
_logger.warning(f'Git checkout failed: {result.stderr}')
|
||||
|
||||
async def maybe_run_setup_script(
|
||||
self,
|
||||
@ -295,3 +343,39 @@ class AppConversationServiceBase(AppConversationService, ABC):
|
||||
return
|
||||
|
||||
_logger.info('Git pre-commit hook installed successfully')
|
||||
|
||||
def _create_condenser(
|
||||
self,
|
||||
llm: LLM,
|
||||
agent_type: AgentType,
|
||||
condenser_max_size: int | None,
|
||||
) -> LLMSummarizingCondenser:
|
||||
"""Create a condenser based on user settings and agent type.
|
||||
|
||||
Args:
|
||||
llm: The LLM instance to use for condensation
|
||||
agent_type: Type of agent (PLAN or DEFAULT)
|
||||
condenser_max_size: condenser_max_size setting
|
||||
|
||||
Returns:
|
||||
Configured LLMSummarizingCondenser instance
|
||||
"""
|
||||
# LLMSummarizingCondenser has defaults: max_size=120, keep_first=4
|
||||
condenser_kwargs = {
|
||||
'llm': llm.model_copy(
|
||||
update={
|
||||
'usage_id': (
|
||||
'condenser'
|
||||
if agent_type == AgentType.DEFAULT
|
||||
else 'planning_condenser'
|
||||
)
|
||||
}
|
||||
),
|
||||
}
|
||||
# Only override max_size if user has a custom value
|
||||
if condenser_max_size is not None:
|
||||
condenser_kwargs['max_size'] = condenser_max_size
|
||||
|
||||
condenser = LLMSummarizingCondenser(**condenser_kwargs)
|
||||
|
||||
return condenser
|
||||
|
||||
@ -4,12 +4,12 @@ from collections import defaultdict
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
from time import time
|
||||
from typing import AsyncGenerator, Sequence
|
||||
from typing import Any, AsyncGenerator, Sequence
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
import httpx
|
||||
from fastapi import Request
|
||||
from pydantic import Field, TypeAdapter
|
||||
from pydantic import Field, SecretStr, TypeAdapter
|
||||
|
||||
from openhands.agent_server.models import (
|
||||
ConversationInfo,
|
||||
@ -63,19 +63,25 @@ from openhands.app_server.sandbox.sandbox_spec_service import SandboxSpecService
|
||||
from openhands.app_server.services.injector import InjectorState
|
||||
from openhands.app_server.services.jwt_service import JwtService
|
||||
from openhands.app_server.user.user_context import UserContext
|
||||
from openhands.app_server.user.user_models import UserInfo
|
||||
from openhands.app_server.utils.docker_utils import (
|
||||
replace_localhost_hostname_for_docker,
|
||||
)
|
||||
from openhands.experiments.experiment_manager import ExperimentManagerImpl
|
||||
from openhands.integrations.provider import ProviderType
|
||||
from openhands.sdk import AgentContext, LocalWorkspace
|
||||
from openhands.sdk import Agent, AgentContext, LocalWorkspace
|
||||
from openhands.sdk.conversation.secret_source import LookupSecret, StaticSecret
|
||||
from openhands.sdk.llm import LLM
|
||||
from openhands.sdk.security.confirmation_policy import AlwaysConfirm
|
||||
from openhands.sdk.workspace.remote.async_remote_workspace import AsyncRemoteWorkspace
|
||||
from openhands.server.types import AppMode
|
||||
from openhands.tools.preset.default import get_default_agent
|
||||
from openhands.tools.preset.planning import get_planning_agent
|
||||
from openhands.tools.preset.default import (
|
||||
get_default_tools,
|
||||
)
|
||||
from openhands.tools.preset.planning import (
|
||||
format_plan_structure,
|
||||
get_planning_tools,
|
||||
)
|
||||
|
||||
_conversation_info_type_adapter = TypeAdapter(list[ConversationInfo | None])
|
||||
_logger = logging.getLogger(__name__)
|
||||
@ -99,6 +105,7 @@ class LiveStatusAppConversationService(AppConversationServiceBase):
|
||||
access_token_hard_timeout: timedelta | None
|
||||
app_mode: str | None = None
|
||||
keycloak_auth_cookie: str | None = None
|
||||
tavily_api_key: str | None = None
|
||||
|
||||
async def search_app_conversations(
|
||||
self,
|
||||
@ -519,6 +526,224 @@ class LiveStatusAppConversationService(AppConversationServiceBase):
|
||||
if not request.llm_model and parent_info.llm_model:
|
||||
request.llm_model = parent_info.llm_model
|
||||
|
||||
async def _setup_secrets_for_git_provider(
|
||||
self, git_provider: ProviderType | None, user: UserInfo
|
||||
) -> dict:
|
||||
"""Set up secrets for git provider authentication.
|
||||
|
||||
Args:
|
||||
git_provider: The git provider type (GitHub, GitLab, etc.)
|
||||
user: User information containing authentication details
|
||||
|
||||
Returns:
|
||||
Dictionary of secrets for the conversation
|
||||
"""
|
||||
secrets = await self.user_context.get_secrets()
|
||||
|
||||
if not git_provider:
|
||||
return secrets
|
||||
|
||||
secret_name = f'{git_provider.name}_TOKEN'
|
||||
|
||||
if self.web_url:
|
||||
# Create an access token for web-based authentication
|
||||
access_token = self.jwt_service.create_jws_token(
|
||||
payload={
|
||||
'user_id': user.id,
|
||||
'provider_type': git_provider.value,
|
||||
},
|
||||
expires_in=self.access_token_hard_timeout,
|
||||
)
|
||||
headers = {'X-Access-Token': access_token}
|
||||
|
||||
# Include keycloak_auth cookie in headers if app_mode is SaaS
|
||||
if self.app_mode == 'saas' and self.keycloak_auth_cookie:
|
||||
headers['Cookie'] = f'keycloak_auth={self.keycloak_auth_cookie}'
|
||||
|
||||
secrets[secret_name] = LookupSecret(
|
||||
url=self.web_url + '/api/v1/webhooks/secrets',
|
||||
headers=headers,
|
||||
)
|
||||
else:
|
||||
# Use static token for environments without web URL access
|
||||
static_token = await self.user_context.get_latest_token(git_provider)
|
||||
if static_token:
|
||||
secrets[secret_name] = StaticSecret(value=static_token)
|
||||
|
||||
return secrets
|
||||
|
||||
async def _configure_llm_and_mcp(
|
||||
self, user: UserInfo, llm_model: str | None
|
||||
) -> tuple[LLM, dict]:
|
||||
"""Configure LLM and MCP (Model Context Protocol) settings.
|
||||
|
||||
Args:
|
||||
user: User information containing LLM preferences
|
||||
llm_model: Optional specific model to use, falls back to user default
|
||||
|
||||
Returns:
|
||||
Tuple of (configured LLM instance, MCP config dictionary)
|
||||
"""
|
||||
# Configure LLM
|
||||
model = llm_model or user.llm_model
|
||||
llm = LLM(
|
||||
model=model,
|
||||
base_url=user.llm_base_url,
|
||||
api_key=user.llm_api_key,
|
||||
usage_id='agent',
|
||||
)
|
||||
|
||||
# Configure MCP
|
||||
mcp_config: dict[str, Any] = {}
|
||||
if self.web_url:
|
||||
mcp_url = f'{self.web_url}/mcp/mcp'
|
||||
mcp_config = {
|
||||
'default': {
|
||||
'url': mcp_url,
|
||||
}
|
||||
}
|
||||
|
||||
# Add API key if available
|
||||
mcp_api_key = await self.user_context.get_mcp_api_key()
|
||||
if mcp_api_key:
|
||||
mcp_config['default']['headers'] = {
|
||||
'X-Session-API-Key': mcp_api_key,
|
||||
}
|
||||
|
||||
# Get the actual API key values, prioritizing user's key over service key
|
||||
user_search_key = None
|
||||
if user.search_api_key:
|
||||
key_value = user.search_api_key.get_secret_value()
|
||||
if key_value and key_value.strip():
|
||||
user_search_key = key_value
|
||||
|
||||
service_tavily_key = None
|
||||
if self.tavily_api_key:
|
||||
# tavily_api_key is already a string (extracted in the factory method)
|
||||
if self.tavily_api_key.strip():
|
||||
service_tavily_key = self.tavily_api_key
|
||||
|
||||
tavily_api_key = user_search_key or service_tavily_key
|
||||
|
||||
if tavily_api_key:
|
||||
_logger.info('Adding search engine to MCP config')
|
||||
mcp_config['tavily'] = {
|
||||
'url': f'https://mcp.tavily.com/mcp/?tavilyApiKey={tavily_api_key}'
|
||||
}
|
||||
else:
|
||||
_logger.info('No search engine API key found, skipping search engine')
|
||||
|
||||
return llm, mcp_config
|
||||
|
||||
def _create_agent_with_context(
|
||||
self,
|
||||
llm: LLM,
|
||||
agent_type: AgentType,
|
||||
system_message_suffix: str | None,
|
||||
mcp_config: dict,
|
||||
condenser_max_size: int | None,
|
||||
) -> Agent:
|
||||
"""Create an agent with appropriate tools and context based on agent type.
|
||||
|
||||
Args:
|
||||
llm: Configured LLM instance
|
||||
agent_type: Type of agent to create (PLAN or DEFAULT)
|
||||
system_message_suffix: Optional suffix for system messages
|
||||
mcp_config: MCP configuration dictionary
|
||||
condenser_max_size: condenser_max_size setting
|
||||
|
||||
Returns:
|
||||
Configured Agent instance with context
|
||||
"""
|
||||
# Create condenser with user's settings
|
||||
condenser = self._create_condenser(llm, agent_type, condenser_max_size)
|
||||
|
||||
# Create agent based on type
|
||||
if agent_type == AgentType.PLAN:
|
||||
agent = Agent(
|
||||
llm=llm,
|
||||
tools=get_planning_tools(),
|
||||
system_prompt_filename='system_prompt_planning.j2',
|
||||
system_prompt_kwargs={'plan_structure': format_plan_structure()},
|
||||
condenser=condenser,
|
||||
security_analyzer=None,
|
||||
mcp_config=mcp_config,
|
||||
)
|
||||
else:
|
||||
agent = Agent(
|
||||
llm=llm,
|
||||
tools=get_default_tools(enable_browser=True),
|
||||
system_prompt_kwargs={'cli_mode': False},
|
||||
condenser=condenser,
|
||||
mcp_config=mcp_config,
|
||||
)
|
||||
|
||||
# Add agent context
|
||||
agent_context = AgentContext(system_message_suffix=system_message_suffix)
|
||||
agent = agent.model_copy(update={'agent_context': agent_context})
|
||||
|
||||
return agent
|
||||
|
||||
async def _finalize_conversation_request(
|
||||
self,
|
||||
agent: Agent,
|
||||
conversation_id: UUID | None,
|
||||
user: UserInfo,
|
||||
workspace: LocalWorkspace,
|
||||
initial_message: SendMessageRequest | None,
|
||||
secrets: dict,
|
||||
sandbox: SandboxInfo,
|
||||
remote_workspace: AsyncRemoteWorkspace | None,
|
||||
selected_repository: str | None,
|
||||
working_dir: str,
|
||||
) -> StartConversationRequest:
|
||||
"""Finalize the conversation request with experiment variants and skills.
|
||||
|
||||
Args:
|
||||
agent: The configured agent
|
||||
conversation_id: Optional conversation ID, generates new one if None
|
||||
user: User information
|
||||
workspace: Local workspace instance
|
||||
initial_message: Optional initial message for the conversation
|
||||
secrets: Dictionary of secrets for authentication
|
||||
sandbox: Sandbox information
|
||||
remote_workspace: Optional remote workspace for skills loading
|
||||
selected_repository: Optional repository name
|
||||
working_dir: Working directory path
|
||||
|
||||
Returns:
|
||||
Complete StartConversationRequest ready for use
|
||||
"""
|
||||
# Generate conversation ID if not provided
|
||||
conversation_id = conversation_id or uuid4()
|
||||
|
||||
# Apply experiment variants
|
||||
agent = ExperimentManagerImpl.run_agent_variant_tests__v1(
|
||||
user.id, conversation_id, agent
|
||||
)
|
||||
|
||||
# Load and merge skills if remote workspace is available
|
||||
if remote_workspace:
|
||||
try:
|
||||
agent = await self._load_skills_and_update_agent(
|
||||
sandbox, agent, remote_workspace, selected_repository, working_dir
|
||||
)
|
||||
except Exception as e:
|
||||
_logger.warning(f'Failed to load skills: {e}', exc_info=True)
|
||||
# Continue without skills - don't fail conversation startup
|
||||
|
||||
# Create and return the final request
|
||||
return StartConversationRequest(
|
||||
conversation_id=conversation_id,
|
||||
agent=agent,
|
||||
workspace=workspace,
|
||||
confirmation_policy=(
|
||||
AlwaysConfirm() if user.confirmation_mode else NeverConfirm()
|
||||
),
|
||||
initial_message=initial_message,
|
||||
secrets=secrets,
|
||||
)
|
||||
|
||||
async def _build_start_conversation_request_for_user(
|
||||
self,
|
||||
sandbox: SandboxInfo,
|
||||
@ -532,87 +757,41 @@ class LiveStatusAppConversationService(AppConversationServiceBase):
|
||||
remote_workspace: AsyncRemoteWorkspace | None = None,
|
||||
selected_repository: str | None = None,
|
||||
) -> StartConversationRequest:
|
||||
"""Build a complete conversation request for a user.
|
||||
|
||||
This method orchestrates the creation of a conversation request by:
|
||||
1. Setting up git provider secrets
|
||||
2. Configuring LLM and MCP settings
|
||||
3. Creating an agent with appropriate context
|
||||
4. Finalizing the request with skills and experiment variants
|
||||
"""
|
||||
user = await self.user_context.get_user_info()
|
||||
|
||||
# Set up a secret for the git token
|
||||
secrets = await self.user_context.get_secrets()
|
||||
if git_provider:
|
||||
secret_name = f'{git_provider.name}_TOKEN'
|
||||
if self.web_url:
|
||||
# If there is a web url, then we create an access token to access it.
|
||||
# For security reasons, we are explicit here - only this user, and
|
||||
# only this provider, with a timeout
|
||||
access_token = self.jwt_service.create_jws_token(
|
||||
payload={
|
||||
'user_id': user.id,
|
||||
'provider_type': git_provider.value,
|
||||
},
|
||||
expires_in=self.access_token_hard_timeout,
|
||||
)
|
||||
headers = {'X-Access-Token': access_token}
|
||||
|
||||
# Include keycloak_auth cookie in headers if app_mode is SaaS
|
||||
if self.app_mode == 'saas' and self.keycloak_auth_cookie:
|
||||
headers['Cookie'] = f'keycloak_auth={self.keycloak_auth_cookie}'
|
||||
|
||||
secrets[secret_name] = LookupSecret(
|
||||
url=self.web_url + '/api/v1/webhooks/secrets',
|
||||
headers=headers,
|
||||
)
|
||||
else:
|
||||
# If there is no URL specified where the sandbox can access the app server
|
||||
# then we supply a static secret with the most recent value. Depending
|
||||
# on the type, this may eventually expire.
|
||||
static_token = await self.user_context.get_latest_token(git_provider)
|
||||
if static_token:
|
||||
secrets[secret_name] = StaticSecret(value=static_token)
|
||||
|
||||
workspace = LocalWorkspace(working_dir=working_dir)
|
||||
|
||||
# Use provided llm_model if available, otherwise fall back to user's default
|
||||
model = llm_model or user.llm_model
|
||||
llm = LLM(
|
||||
model=model,
|
||||
base_url=user.llm_base_url,
|
||||
api_key=user.llm_api_key,
|
||||
usage_id='agent',
|
||||
)
|
||||
# The agent gets passed initial instructions
|
||||
# Select agent based on agent_type
|
||||
if agent_type == AgentType.PLAN:
|
||||
agent = get_planning_agent(llm=llm)
|
||||
else:
|
||||
agent = get_default_agent(llm=llm)
|
||||
# Set up secrets for git provider
|
||||
secrets = await self._setup_secrets_for_git_provider(git_provider, user)
|
||||
|
||||
agent_context = AgentContext(system_message_suffix=system_message_suffix)
|
||||
agent = agent.model_copy(update={'agent_context': agent_context})
|
||||
# Configure LLM and MCP
|
||||
llm, mcp_config = await self._configure_llm_and_mcp(user, llm_model)
|
||||
|
||||
conversation_id = conversation_id or uuid4()
|
||||
agent = ExperimentManagerImpl.run_agent_variant_tests__v1(
|
||||
user.id, conversation_id, agent
|
||||
# Create agent with context
|
||||
agent = self._create_agent_with_context(
|
||||
llm, agent_type, system_message_suffix, mcp_config, user.condenser_max_size
|
||||
)
|
||||
|
||||
# Load and merge all skills if remote_workspace is available
|
||||
if remote_workspace:
|
||||
try:
|
||||
agent = await self._load_skills_and_update_agent(
|
||||
sandbox, agent, remote_workspace, selected_repository, working_dir
|
||||
)
|
||||
except Exception as e:
|
||||
_logger.warning(f'Failed to load skills: {e}', exc_info=True)
|
||||
# Continue without skills - don't fail conversation startup
|
||||
|
||||
start_conversation_request = StartConversationRequest(
|
||||
conversation_id=conversation_id,
|
||||
agent=agent,
|
||||
workspace=workspace,
|
||||
confirmation_policy=(
|
||||
AlwaysConfirm() if user.confirmation_mode else NeverConfirm()
|
||||
),
|
||||
initial_message=initial_message,
|
||||
secrets=secrets,
|
||||
# Finalize and return the conversation request
|
||||
return await self._finalize_conversation_request(
|
||||
agent,
|
||||
conversation_id,
|
||||
user,
|
||||
workspace,
|
||||
initial_message,
|
||||
secrets,
|
||||
sandbox,
|
||||
remote_workspace,
|
||||
selected_repository,
|
||||
working_dir,
|
||||
)
|
||||
return start_conversation_request
|
||||
|
||||
async def update_agent_server_conversation_title(
|
||||
self,
|
||||
@ -817,6 +996,10 @@ class LiveStatusAppConversationServiceInjector(AppConversationServiceInjector):
|
||||
'be retrieved by a sandboxed conversation.'
|
||||
),
|
||||
)
|
||||
tavily_api_key: SecretStr | None = Field(
|
||||
default=None,
|
||||
description='The Tavily Search API key to add to MCP integration',
|
||||
)
|
||||
|
||||
async def inject(
|
||||
self, state: InjectorState, request: Request | None = None
|
||||
@ -874,6 +1057,14 @@ class LiveStatusAppConversationServiceInjector(AppConversationServiceInjector):
|
||||
# If server_config is not available (e.g., in tests), continue without it
|
||||
pass
|
||||
|
||||
# We supply the global tavily key only if the app mode is not SAAS, where
|
||||
# currently the search endpoints are patched into the app server instead
|
||||
# so the tavily key does not need to be shared
|
||||
if self.tavily_api_key and app_mode != AppMode.SAAS:
|
||||
tavily_api_key = self.tavily_api_key.get_secret_value()
|
||||
else:
|
||||
tavily_api_key = None
|
||||
|
||||
yield LiveStatusAppConversationService(
|
||||
init_git_in_empty_workspace=self.init_git_in_empty_workspace,
|
||||
user_context=user_context,
|
||||
@ -890,4 +1081,5 @@ class LiveStatusAppConversationServiceInjector(AppConversationServiceInjector):
|
||||
access_token_hard_timeout=access_token_hard_timeout,
|
||||
app_mode=app_mode,
|
||||
keycloak_auth_cookie=keycloak_auth_cookie,
|
||||
tavily_api_key=tavily_api_key,
|
||||
)
|
||||
|
||||
@ -45,6 +45,8 @@ from openhands.app_server.utils.sql_utils import (
|
||||
create_json_type_decorator,
|
||||
)
|
||||
from openhands.integrations.provider import ProviderType
|
||||
from openhands.sdk.conversation.conversation_stats import ConversationStats
|
||||
from openhands.sdk.event import ConversationStateUpdateEvent
|
||||
from openhands.sdk.llm import MetricsSnapshot
|
||||
from openhands.sdk.llm.utils.metrics import TokenUsage
|
||||
from openhands.storage.data_models.conversation_metadata import ConversationTrigger
|
||||
@ -354,6 +356,130 @@ class SQLAppConversationInfoService(AppConversationInfoService):
|
||||
await self.db_session.commit()
|
||||
return info
|
||||
|
||||
async def update_conversation_statistics(
|
||||
self, conversation_id: UUID, stats: ConversationStats
|
||||
) -> None:
|
||||
"""Update conversation statistics from stats event data.
|
||||
|
||||
Args:
|
||||
conversation_id: The ID of the conversation to update
|
||||
stats: ConversationStats object containing usage_to_metrics data from stats event
|
||||
"""
|
||||
# Extract agent metrics from usage_to_metrics
|
||||
usage_to_metrics = stats.usage_to_metrics
|
||||
agent_metrics = usage_to_metrics.get('agent')
|
||||
|
||||
if not agent_metrics:
|
||||
logger.debug(
|
||||
'No agent metrics found in stats for conversation %s', conversation_id
|
||||
)
|
||||
return
|
||||
|
||||
# Query existing record using secure select (filters for V1 and user if available)
|
||||
query = await self._secure_select()
|
||||
query = query.where(
|
||||
StoredConversationMetadata.conversation_id == str(conversation_id)
|
||||
)
|
||||
result = await self.db_session.execute(query)
|
||||
stored = result.scalar_one_or_none()
|
||||
|
||||
if not stored:
|
||||
logger.debug(
|
||||
'Conversation %s not found or not accessible, skipping statistics update',
|
||||
conversation_id,
|
||||
)
|
||||
return
|
||||
|
||||
# Extract accumulated_cost and max_budget_per_task from Metrics object
|
||||
accumulated_cost = agent_metrics.accumulated_cost
|
||||
max_budget_per_task = agent_metrics.max_budget_per_task
|
||||
|
||||
# Extract accumulated_token_usage from Metrics object
|
||||
accumulated_token_usage = agent_metrics.accumulated_token_usage
|
||||
if accumulated_token_usage:
|
||||
prompt_tokens = accumulated_token_usage.prompt_tokens
|
||||
completion_tokens = accumulated_token_usage.completion_tokens
|
||||
cache_read_tokens = accumulated_token_usage.cache_read_tokens
|
||||
cache_write_tokens = accumulated_token_usage.cache_write_tokens
|
||||
reasoning_tokens = accumulated_token_usage.reasoning_tokens
|
||||
context_window = accumulated_token_usage.context_window
|
||||
per_turn_token = accumulated_token_usage.per_turn_token
|
||||
else:
|
||||
prompt_tokens = None
|
||||
completion_tokens = None
|
||||
cache_read_tokens = None
|
||||
cache_write_tokens = None
|
||||
reasoning_tokens = None
|
||||
context_window = None
|
||||
per_turn_token = None
|
||||
|
||||
# Update fields only if values are provided (not None)
|
||||
if accumulated_cost is not None:
|
||||
stored.accumulated_cost = accumulated_cost
|
||||
if max_budget_per_task is not None:
|
||||
stored.max_budget_per_task = max_budget_per_task
|
||||
if prompt_tokens is not None:
|
||||
stored.prompt_tokens = prompt_tokens
|
||||
if completion_tokens is not None:
|
||||
stored.completion_tokens = completion_tokens
|
||||
if cache_read_tokens is not None:
|
||||
stored.cache_read_tokens = cache_read_tokens
|
||||
if cache_write_tokens is not None:
|
||||
stored.cache_write_tokens = cache_write_tokens
|
||||
if reasoning_tokens is not None:
|
||||
stored.reasoning_tokens = reasoning_tokens
|
||||
if context_window is not None:
|
||||
stored.context_window = context_window
|
||||
if per_turn_token is not None:
|
||||
stored.per_turn_token = per_turn_token
|
||||
|
||||
# Update last_updated_at timestamp
|
||||
stored.last_updated_at = utc_now()
|
||||
|
||||
await self.db_session.commit()
|
||||
|
||||
async def process_stats_event(
|
||||
self,
|
||||
event: ConversationStateUpdateEvent,
|
||||
conversation_id: UUID,
|
||||
) -> None:
|
||||
"""Process a stats event and update conversation statistics.
|
||||
|
||||
Args:
|
||||
event: The ConversationStateUpdateEvent with key='stats'
|
||||
conversation_id: The ID of the conversation to update
|
||||
"""
|
||||
try:
|
||||
# Parse event value into ConversationStats model for type safety
|
||||
# event.value can be a dict (from JSON deserialization) or a ConversationStats object
|
||||
event_value = event.value
|
||||
conversation_stats: ConversationStats | None = None
|
||||
|
||||
if isinstance(event_value, ConversationStats):
|
||||
# Already a ConversationStats object
|
||||
conversation_stats = event_value
|
||||
elif isinstance(event_value, dict):
|
||||
# Parse dict into ConversationStats model
|
||||
# This validates the structure and ensures type safety
|
||||
conversation_stats = ConversationStats.model_validate(event_value)
|
||||
elif hasattr(event_value, 'usage_to_metrics'):
|
||||
# Handle objects with usage_to_metrics attribute (e.g., from tests)
|
||||
# Convert to dict first, then validate
|
||||
stats_dict = {'usage_to_metrics': event_value.usage_to_metrics}
|
||||
conversation_stats = ConversationStats.model_validate(stats_dict)
|
||||
|
||||
if conversation_stats and conversation_stats.usage_to_metrics:
|
||||
# Pass ConversationStats object directly for type safety
|
||||
await self.update_conversation_statistics(
|
||||
conversation_id, conversation_stats
|
||||
)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
'Error updating conversation statistics for conversation %s',
|
||||
conversation_id,
|
||||
stack_info=True,
|
||||
)
|
||||
|
||||
async def _secure_select(self):
|
||||
query = select(StoredConversationMetadata).where(
|
||||
StoredConversationMetadata.conversation_version == 'V1'
|
||||
|
||||
@ -6,7 +6,7 @@ from typing import AsyncContextManager
|
||||
|
||||
import httpx
|
||||
from fastapi import Depends, Request
|
||||
from pydantic import Field
|
||||
from pydantic import Field, SecretStr
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
# Import the event_callback module to ensure all processors are registered
|
||||
@ -185,7 +185,13 @@ def config_from_env() -> AppServerConfig:
|
||||
)
|
||||
|
||||
if config.app_conversation is None:
|
||||
config.app_conversation = LiveStatusAppConversationServiceInjector()
|
||||
tavily_api_key = None
|
||||
tavily_api_key_str = os.getenv('TAVILY_API_KEY') or os.getenv('SEARCH_API_KEY')
|
||||
if tavily_api_key_str:
|
||||
tavily_api_key = SecretStr(tavily_api_key_str)
|
||||
config.app_conversation = LiveStatusAppConversationServiceInjector(
|
||||
tavily_api_key=tavily_api_key
|
||||
)
|
||||
|
||||
if config.user is None:
|
||||
config.user = AuthUserContextInjector()
|
||||
|
||||
@ -6,7 +6,6 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from typing import AsyncGenerator
|
||||
from uuid import UUID
|
||||
|
||||
@ -15,6 +14,7 @@ from sqlalchemy import UUID as SQLUUID
|
||||
from sqlalchemy import Column, Enum, String, and_, func, or_, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from openhands.agent_server.utils import utc_now
|
||||
from openhands.app_server.event_callback.event_callback_models import (
|
||||
CreateEventCallbackRequest,
|
||||
EventCallback,
|
||||
@ -177,7 +177,7 @@ class SQLEventCallbackService(EventCallbackService):
|
||||
return EventCallbackPage(items=callbacks, next_page_id=next_page_id)
|
||||
|
||||
async def save_event_callback(self, event_callback: EventCallback) -> EventCallback:
|
||||
event_callback.updated_at = datetime.now()
|
||||
event_callback.updated_at = utc_now()
|
||||
stored_callback = StoredEventCallback(**event_callback.model_dump())
|
||||
await self.db_session.merge(stored_callback)
|
||||
return event_callback
|
||||
|
||||
@ -43,6 +43,7 @@ from openhands.app_server.user.specifiy_user_context import (
|
||||
from openhands.app_server.user.user_context import UserContext
|
||||
from openhands.integrations.provider import ProviderType
|
||||
from openhands.sdk import Event
|
||||
from openhands.sdk.event import ConversationStateUpdateEvent
|
||||
from openhands.server.user_auth.default_user_auth import DefaultUserAuth
|
||||
from openhands.server.user_auth.user_auth import (
|
||||
get_for_user as get_user_auth_for_user,
|
||||
@ -144,6 +145,13 @@ async def on_event(
|
||||
*[event_service.save_event(conversation_id, event) for event in events]
|
||||
)
|
||||
|
||||
# Process stats events for V1 conversations
|
||||
for event in events:
|
||||
if isinstance(event, ConversationStateUpdateEvent) and event.key == 'stats':
|
||||
await app_conversation_info_service.process_stats_event(
|
||||
event, conversation_id
|
||||
)
|
||||
|
||||
asyncio.create_task(
|
||||
_run_callbacks_in_bg_and_close(
|
||||
conversation_id, app_conversation_info.created_by_user_id, events
|
||||
|
||||
@ -11,7 +11,7 @@ from openhands.sdk.utils.models import DiscriminatedUnionMixin
|
||||
|
||||
# The version of the agent server to use for deployments.
|
||||
# Typically this will be the same as the values from the pyproject.toml
|
||||
AGENT_SERVER_IMAGE = 'ghcr.io/openhands/agent-server:5f62cee-python'
|
||||
AGENT_SERVER_IMAGE = 'ghcr.io/openhands/agent-server:37c4b35-python'
|
||||
|
||||
|
||||
class SandboxSpecService(ABC):
|
||||
|
||||
@ -78,6 +78,10 @@ class AuthUserContext(UserContext):
|
||||
|
||||
return results
|
||||
|
||||
async def get_mcp_api_key(self) -> str | None:
|
||||
mcp_api_key = await self.user_auth.get_mcp_api_key()
|
||||
return mcp_api_key
|
||||
|
||||
|
||||
USER_ID_ATTR = 'user_id'
|
||||
|
||||
|
||||
@ -30,6 +30,9 @@ class SpecifyUserContext(UserContext):
|
||||
async def get_secrets(self) -> dict[str, SecretSource]:
|
||||
raise NotImplementedError()
|
||||
|
||||
async def get_mcp_api_key(self) -> str | None:
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
USER_CONTEXT_ATTR = 'user_context'
|
||||
ADMIN = SpecifyUserContext(user_id=None)
|
||||
|
||||
@ -34,6 +34,10 @@ class UserContext(ABC):
|
||||
async def get_secrets(self) -> dict[str, SecretSource]:
|
||||
"""Get custom secrets and github provider secrets for the conversation."""
|
||||
|
||||
@abstractmethod
|
||||
async def get_mcp_api_key(self) -> str | None:
|
||||
"""Get an MCP API Key."""
|
||||
|
||||
|
||||
class UserContextInjector(DiscriminatedUnionMixin, Injector[UserContext], ABC):
|
||||
"""Injector for user contexts."""
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
import hashlib
|
||||
import os
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
@ -30,8 +31,14 @@ def get_default_encryption_keys(workspace_dir: Path) -> list[EncryptionKey]:
|
||||
"""Generate default encryption keys."""
|
||||
master_key = os.getenv('JWT_SECRET')
|
||||
if master_key:
|
||||
# Derive a deterministic key ID from the secret itself.
|
||||
# This ensures all pods using the same JWT_SECRET get the same key ID,
|
||||
# which is critical for multi-pod deployments where tokens may be
|
||||
# created by one pod and verified by another.
|
||||
key_id = base62.encodebytes(hashlib.sha256(master_key.encode()).digest())
|
||||
return [
|
||||
EncryptionKey(
|
||||
id=key_id,
|
||||
key=SecretStr(master_key),
|
||||
active=True,
|
||||
notes='jwt secret master key',
|
||||
|
||||
@ -88,6 +88,9 @@ class DefaultUserAuth(UserAuth):
|
||||
return None
|
||||
return user_secrets.provider_tokens
|
||||
|
||||
async def get_mcp_api_key(self) -> str | None:
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
async def get_instance(cls, request: Request) -> UserAuth:
|
||||
user_auth = DefaultUserAuth()
|
||||
|
||||
@ -75,6 +75,10 @@ class UserAuth(ABC):
|
||||
def get_auth_type(self) -> AuthType | None:
|
||||
return None
|
||||
|
||||
@abstractmethod
|
||||
async def get_mcp_api_key(self) -> str | None:
|
||||
"""Get an mcp api key for the user"""
|
||||
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
async def get_instance(cls, request: Request) -> UserAuth:
|
||||
|
||||
@ -1,5 +1,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
|
||||
from pydantic import (
|
||||
BaseModel,
|
||||
ConfigDict,
|
||||
@ -48,7 +50,7 @@ class Settings(BaseModel):
|
||||
email_verified: bool | None = None
|
||||
git_user_name: str | None = None
|
||||
git_user_email: str | None = None
|
||||
v1_enabled: bool | None = None
|
||||
v1_enabled: bool | None = Field(default=bool(os.getenv('V1_ENABLED') == '1'))
|
||||
|
||||
model_config = ConfigDict(
|
||||
validate_assignment=True,
|
||||
|
||||
288
poetry.lock
generated
288
poetry.lock
generated
@ -254,14 +254,14 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "anthropic"
|
||||
version = "0.72.0"
|
||||
version = "0.75.0"
|
||||
description = "The official Python library for the anthropic API"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "anthropic-0.72.0-py3-none-any.whl", hash = "sha256:0e9f5a7582f038cab8efbb4c959e49ef654a56bfc7ba2da51b5a7b8a84de2e4d"},
|
||||
{file = "anthropic-0.72.0.tar.gz", hash = "sha256:8971fe76dcffc644f74ac3883069beb1527641115ae0d6eb8fa21c1ce4082f7a"},
|
||||
{file = "anthropic-0.75.0-py3-none-any.whl", hash = "sha256:ea8317271b6c15d80225a9f3c670152746e88805a7a61e14d4a374577164965b"},
|
||||
{file = "anthropic-0.75.0.tar.gz", hash = "sha256:e8607422f4ab616db2ea5baacc215dd5f028da99ce2f022e33c7c535b29f3dfb"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@ -1205,34 +1205,37 @@ botocore = ["botocore"]
|
||||
|
||||
[[package]]
|
||||
name = "browser-use"
|
||||
version = "0.8.0"
|
||||
version = "0.10.1"
|
||||
description = "Make websites accessible for AI agents"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.11"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "browser_use-0.8.0-py3-none-any.whl", hash = "sha256:b7c299e38ec1c1aec42a236cc6ad2268a366226940d6ff9d88ed461afd5a1cc3"},
|
||||
{file = "browser_use-0.8.0.tar.gz", hash = "sha256:2136eb3251424f712a08ee379c9337237c2f93b29b566807db599cf94e6abb5e"},
|
||||
{file = "browser_use-0.10.1-py3-none-any.whl", hash = "sha256:96e603bfc71098175342cdcb0592519e6f244412e740f0254e4389fdd82a977f"},
|
||||
{file = "browser_use-0.10.1.tar.gz", hash = "sha256:5f211ecfdf1f9fd186160f10df70dedd661821231e30f1bce40939787abab223"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
aiohttp = "3.12.15"
|
||||
anthropic = ">=0.68.1,<1.0.0"
|
||||
anthropic = ">=0.72.1,<1.0.0"
|
||||
anyio = ">=4.9.0"
|
||||
authlib = ">=1.6.0"
|
||||
bubus = ">=1.5.6"
|
||||
cdp-use = ">=1.4.0"
|
||||
cdp-use = ">=1.4.4"
|
||||
click = ">=8.1.8"
|
||||
cloudpickle = ">=3.1.1"
|
||||
google-api-core = ">=2.25.0"
|
||||
google-api-python-client = ">=2.174.0"
|
||||
google-auth = ">=2.40.3"
|
||||
google-auth-oauthlib = ">=1.2.2"
|
||||
google-genai = ">=1.29.0,<2.0.0"
|
||||
google-genai = ">=1.50.0,<2.0.0"
|
||||
groq = ">=0.30.0"
|
||||
html2text = ">=2025.4.15"
|
||||
httpx = ">=0.28.1"
|
||||
inquirerpy = ">=0.3.4"
|
||||
markdownify = ">=1.2.0"
|
||||
mcp = ">=1.10.1"
|
||||
ollama = ">=0.5.1"
|
||||
openai = ">=1.99.2,<2.0.0"
|
||||
openai = ">=2.7.2,<3.0.0"
|
||||
pillow = ">=11.2.1"
|
||||
portalocker = ">=2.7.0,<3.0.0"
|
||||
posthog = ">=3.7.0"
|
||||
@ -1241,19 +1244,24 @@ pydantic = ">=2.11.5"
|
||||
pyobjc = {version = ">=11.0", markers = "platform_system == \"darwin\""}
|
||||
pyotp = ">=2.9.0"
|
||||
pypdf = ">=5.7.0"
|
||||
python-docx = ">=1.2.0"
|
||||
python-dotenv = ">=1.0.1"
|
||||
reportlab = ">=4.0.0"
|
||||
requests = ">=2.32.3"
|
||||
rich = ">=14.0.0"
|
||||
screeninfo = {version = ">=0.8.1", markers = "platform_system != \"darwin\""}
|
||||
typing-extensions = ">=4.12.2"
|
||||
uuid7 = ">=0.1.0"
|
||||
|
||||
[package.extras]
|
||||
all = ["agentmail (==0.0.59)", "boto3 (>=1.38.45)", "botocore (>=1.37.23)", "click (>=8.1.8)", "imgcat (>=0.6.0)", "langchain-openai (>=0.3.26)", "rich (>=14.0.0)", "textual (>=3.2.0)"]
|
||||
all = ["agentmail (==0.0.59)", "boto3 (>=1.38.45)", "botocore (>=1.37.23)", "imgcat (>=0.6.0)", "langchain-openai (>=0.3.26)", "oci (>=2.126.4)", "textual (>=3.2.0)"]
|
||||
aws = ["boto3 (>=1.38.45)"]
|
||||
cli = ["click (>=8.1.8)", "rich (>=14.0.0)", "textual (>=3.2.0)"]
|
||||
eval = ["anyio (>=4.9.0)", "browserbase (==1.4.0)", "datamodel-code-generator (>=0.26.0)", "hyperbrowser (==0.47.0)", "lmnr[all] (==0.7.17)", "psutil (>=7.0.0)"]
|
||||
cli = ["textual (>=3.2.0)"]
|
||||
cli-oci = ["oci (>=2.126.4)", "textual (>=3.2.0)"]
|
||||
code = ["matplotlib (>=3.9.0)", "numpy (>=2.3.2)", "pandas (>=2.2.0)", "tabulate (>=0.9.0)"]
|
||||
eval = ["anyio (>=4.9.0)", "datamodel-code-generator (>=0.26.0)", "lmnr[all] (==0.7.17)", "psutil (>=7.0.0)"]
|
||||
examples = ["agentmail (==0.0.59)", "botocore (>=1.37.23)", "imgcat (>=0.6.0)", "langchain-openai (>=0.3.26)"]
|
||||
oci = ["oci (>=2.126.4)"]
|
||||
video = ["imageio[ffmpeg] (>=2.37.0)", "numpy (>=2.3.2)"]
|
||||
|
||||
[[package]]
|
||||
@ -1494,14 +1502,14 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "cdp-use"
|
||||
version = "1.4.3"
|
||||
version = "1.4.4"
|
||||
description = "Type safe generator/client library for CDP"
|
||||
optional = false
|
||||
python-versions = ">=3.11"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "cdp_use-1.4.3-py3-none-any.whl", hash = "sha256:c48664604470c2579aa1e677c3e3e7e24c4f300c54804c093d935abb50479ecd"},
|
||||
{file = "cdp_use-1.4.3.tar.gz", hash = "sha256:9029c04bdc49fbd3939d2bf1988ad8d88e260729c7d5e35c2f6c87591f5a10e9"},
|
||||
{file = "cdp_use-1.4.4-py3-none-any.whl", hash = "sha256:e37e80e067db2653d6fdf953d4ff9e5d80d75daa27b7c6d48c0261cccbef73e1"},
|
||||
{file = "cdp_use-1.4.4.tar.gz", hash = "sha256:330a848b517006eb9ad1dc468aa6434d913cf0c6918610760c36c3fdfdba0fab"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@ -3802,28 +3810,28 @@ testing = ["pytest"]
|
||||
|
||||
[[package]]
|
||||
name = "google-genai"
|
||||
version = "1.45.0"
|
||||
version = "1.53.0"
|
||||
description = "GenAI Python SDK"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
python-versions = ">=3.10"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "google_genai-1.45.0-py3-none-any.whl", hash = "sha256:e755295063e5fd5a4c44acff782a569e37fa8f76a6c75d0ede3375c70d916b7f"},
|
||||
{file = "google_genai-1.45.0.tar.gz", hash = "sha256:96ec32ae99a30b5a1b54cb874b577ec6e41b5d5b808bf0f10ed4620e867f9386"},
|
||||
{file = "google_genai-1.53.0-py3-none-any.whl", hash = "sha256:65a3f99e5c03c372d872cda7419f5940e723374bb12a2f3ffd5e3e56e8eb2094"},
|
||||
{file = "google_genai-1.53.0.tar.gz", hash = "sha256:938a26d22f3fd32c6eeeb4276ef204ef82884e63af9842ce3eac05ceb39cbd8d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
anyio = ">=4.8.0,<5.0.0"
|
||||
google-auth = ">=2.14.1,<3.0.0"
|
||||
google-auth = {version = ">=2.14.1,<3.0.0", extras = ["requests"]}
|
||||
httpx = ">=0.28.1,<1.0.0"
|
||||
pydantic = ">=2.0.0,<3.0.0"
|
||||
pydantic = ">=2.9.0,<3.0.0"
|
||||
requests = ">=2.28.1,<3.0.0"
|
||||
tenacity = ">=8.2.3,<9.2.0"
|
||||
typing-extensions = ">=4.11.0,<5.0.0"
|
||||
websockets = ">=13.0.0,<15.1.0"
|
||||
|
||||
[package.extras]
|
||||
aiohttp = ["aiohttp (<4.0.0)"]
|
||||
aiohttp = ["aiohttp (<3.13.3)"]
|
||||
local-tokenizer = ["protobuf", "sentencepiece (>=0.2.0)"]
|
||||
|
||||
[[package]]
|
||||
@ -3991,67 +3999,71 @@ protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4
|
||||
|
||||
[[package]]
|
||||
name = "grpcio"
|
||||
version = "1.72.1"
|
||||
version = "1.67.1"
|
||||
description = "HTTP/2-based RPC framework"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "grpcio-1.72.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:ce2706ff37be7a6de68fbc4c3f8dde247cab48cc70fee5fedfbc9cd923b4ee5a"},
|
||||
{file = "grpcio-1.72.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:7db9e15ee7618fbea748176a67d347f3100fa92d36acccd0e7eeb741bc82f72a"},
|
||||
{file = "grpcio-1.72.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:8d6e7764181ba4a8b74aa78c98a89c9f3441068ebcee5d6f14c44578214e0be3"},
|
||||
{file = "grpcio-1.72.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:237bb619ba33594006025e6f114f62e60d9563afd6f8e89633ee384868e26687"},
|
||||
{file = "grpcio-1.72.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7f1d8a442fd242aa432c8e1b8411c79ebc409dad2c637614d726e226ce9ed0c"},
|
||||
{file = "grpcio-1.72.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f2359bd4bba85bf94fd9ab8802671b9637a6803bb673d221157a11523a52e6a8"},
|
||||
{file = "grpcio-1.72.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3269cfca37570a420a57a785f2a5d4234c5b12aced55f8843dafced2d3f8c9a6"},
|
||||
{file = "grpcio-1.72.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:06c023d86398714d6257194c21f2bc0b58a53ce45cee87dd3c54c7932c590e17"},
|
||||
{file = "grpcio-1.72.1-cp310-cp310-win32.whl", hash = "sha256:06dbe54eeea5f9dfb3e7ca2ff66c715ff5fc96b07a1feb322122fe14cb42f6aa"},
|
||||
{file = "grpcio-1.72.1-cp310-cp310-win_amd64.whl", hash = "sha256:ba593aa2cd52f4468ba29668c83f893d88c128198d6b1273ca788ef53e3ae5fe"},
|
||||
{file = "grpcio-1.72.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:4e112c083f90c330b0eaa78a633fb206d49c20c443926e827f8cac9eb9d2ea32"},
|
||||
{file = "grpcio-1.72.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:c6f7e3275832adab7384193f78b8c1a98b82541562fa08d7244e8a6b4b5c78a4"},
|
||||
{file = "grpcio-1.72.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:dd03c8847c47ef7ac5455aafdfb5e553ecf84f228282bd6106762b379f27c25c"},
|
||||
{file = "grpcio-1.72.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7497dbdf220b88b66004e2630fb2b1627df5e279db970d3cc20f70d39dce978d"},
|
||||
{file = "grpcio-1.72.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95c2cde3ae8ae901317c049394ed8d3c6964de6b814ae65fc68636a7337b63aa"},
|
||||
{file = "grpcio-1.72.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7a66cef4bc1db81a54108a849e95650da640c9bc1901957bf7d3b1eeb3251ee8"},
|
||||
{file = "grpcio-1.72.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:fc0435ad45d540597f78978e3fd5515b448193f51f9065fb67dda566336e0f5f"},
|
||||
{file = "grpcio-1.72.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:524bad78d610fa1f9f316d47b3aab1ff89d438ba952ee34e3e335ca80a27ba96"},
|
||||
{file = "grpcio-1.72.1-cp311-cp311-win32.whl", hash = "sha256:409ee0abf7e74bbf88941046142452cf3d1f3863d34e11e8fd2b07375170c730"},
|
||||
{file = "grpcio-1.72.1-cp311-cp311-win_amd64.whl", hash = "sha256:ea483e408fac55569c11158c3e6d6d6a8c3b0f798b68f1c10db9b22c5996e19b"},
|
||||
{file = "grpcio-1.72.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:65a5ef28e5852bd281c6d01a923906e8036736e95e370acab8626fcbec041e67"},
|
||||
{file = "grpcio-1.72.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:9e5c594a6c779d674204fb9bdaa1e7b71666ff10b34a62e7769fc6868b5d7511"},
|
||||
{file = "grpcio-1.72.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:d324f4bdb990d852d79b38c59a12d24fcd47cf3b1a38f2e4d2b6d0b1031bc818"},
|
||||
{file = "grpcio-1.72.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:841db55dd29cf2f4121b853b2f89813a1b6175163fbb92c5945fb1b0ca259ef2"},
|
||||
{file = "grpcio-1.72.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00da930aa2711b955a538e835096aa365a4b7f2701bdc2ce1febb242a103f8a1"},
|
||||
{file = "grpcio-1.72.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4b657773480267fbb7ad733fa85abc103c52ab62e5bc97791faf82c53836eefc"},
|
||||
{file = "grpcio-1.72.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a08b483f17a6abca2578283a7ae3aa8d4d90347242b0de2898bdb27395c3f20b"},
|
||||
{file = "grpcio-1.72.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:299f3ea4e03c1d0548f4a174b48d612412f92c667f2100e30a079ab76fdaa813"},
|
||||
{file = "grpcio-1.72.1-cp312-cp312-win32.whl", hash = "sha256:addc721a3708ff789da1bf69876018dc730c1ec9d3d3cb6912776a00c535a5bc"},
|
||||
{file = "grpcio-1.72.1-cp312-cp312-win_amd64.whl", hash = "sha256:22ea2aa92a60dff231ba5fcd7f0220a33c2218e556009996f858eeafe294d1c2"},
|
||||
{file = "grpcio-1.72.1-cp313-cp313-linux_armv7l.whl", hash = "sha256:294be6e9c323a197434569a41e0fb5b5aa0962fd5d55a3dc890ec5df985f611a"},
|
||||
{file = "grpcio-1.72.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:41ec164dac8df2862f67457d9cdf8d8f8b6a4ca475a3ed1ba6547fff98d93717"},
|
||||
{file = "grpcio-1.72.1-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:761736f75c6ddea3732d97eaabe70c616271f5f542a8be95515135fdd1a638f6"},
|
||||
{file = "grpcio-1.72.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:082003cb93618964c111c70d69b60ac0dc6566d4c254c9b2a775faa2965ba8f8"},
|
||||
{file = "grpcio-1.72.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8660f736da75424949c14f7c8b1ac60a25b2f37cabdec95181834b405373e8a7"},
|
||||
{file = "grpcio-1.72.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:2ada1abe2ad122b42407b2bfd79d6706a4940d4797f44bd740f5c98ca1ecda9b"},
|
||||
{file = "grpcio-1.72.1-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:0db2766d0c482ee740abbe7d00a06cc4fb54f7e5a24d3cf27c3352be18a2b1e8"},
|
||||
{file = "grpcio-1.72.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4bdb404d9c2187260b34e2b22783c204fba8a9023a166cf77376190d9cf5a08"},
|
||||
{file = "grpcio-1.72.1-cp313-cp313-win32.whl", hash = "sha256:bb64722c3124c906a5b66e50a90fd36442642f653ba88a24f67d08e94bca59f3"},
|
||||
{file = "grpcio-1.72.1-cp313-cp313-win_amd64.whl", hash = "sha256:329cc6ff5b431df9614340d3825b066a1ff0a5809a01ba2e976ef48c65a0490b"},
|
||||
{file = "grpcio-1.72.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:8941b83addd503c1982090b4631804d0ff1edbbc6c85c9c20ed503b1dc65fef9"},
|
||||
{file = "grpcio-1.72.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:d29b80290c5eda561a4c291d6d5b4315a2a5095ab37061118d6e0781858aca0a"},
|
||||
{file = "grpcio-1.72.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:4ca56d955564db749c9c6d75e9c4c777854e22b2482d247fb6c5a02d5f28ea78"},
|
||||
{file = "grpcio-1.72.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b08a3ef14d2b01eef13882c6d3a2d8fb5fcd73db81bd1e3ab69d4ee75215433a"},
|
||||
{file = "grpcio-1.72.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd7df49801b3b323e4a21047979e3834cd286b32ee5ceee46f5217826274721f"},
|
||||
{file = "grpcio-1.72.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9717617ba2ff65c058ef53b0d5e50f03e8350f0c5597f93bb5c980a31db990c8"},
|
||||
{file = "grpcio-1.72.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:212db80b1e8aa7792d51269bfb32164e2333a9bb273370ace3ed2a378505cb01"},
|
||||
{file = "grpcio-1.72.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a0d19947d4480af5f363f077f221e665931f479e2604280ac4eafe6daa71f77"},
|
||||
{file = "grpcio-1.72.1-cp39-cp39-win32.whl", hash = "sha256:7622ef647dc911ed010a817d9be501df4ae83495b8e5cdd35b555bdcf3880a3e"},
|
||||
{file = "grpcio-1.72.1-cp39-cp39-win_amd64.whl", hash = "sha256:f8d8fa7cd2a7f1b4207e215dec8bc07f1202682d9a216ebe028185c15faece30"},
|
||||
{file = "grpcio-1.72.1.tar.gz", hash = "sha256:87f62c94a40947cec1a0f91f95f5ba0aa8f799f23a1d42ae5be667b6b27b959c"},
|
||||
{file = "grpcio-1.67.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:8b0341d66a57f8a3119b77ab32207072be60c9bf79760fa609c5609f2deb1f3f"},
|
||||
{file = "grpcio-1.67.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:f5a27dddefe0e2357d3e617b9079b4bfdc91341a91565111a21ed6ebbc51b22d"},
|
||||
{file = "grpcio-1.67.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:43112046864317498a33bdc4797ae6a268c36345a910de9b9c17159d8346602f"},
|
||||
{file = "grpcio-1.67.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9b929f13677b10f63124c1a410994a401cdd85214ad83ab67cc077fc7e480f0"},
|
||||
{file = "grpcio-1.67.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7d1797a8a3845437d327145959a2c0c47c05947c9eef5ff1a4c80e499dcc6fa"},
|
||||
{file = "grpcio-1.67.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0489063974d1452436139501bf6b180f63d4977223ee87488fe36858c5725292"},
|
||||
{file = "grpcio-1.67.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9fd042de4a82e3e7aca44008ee2fb5da01b3e5adb316348c21980f7f58adc311"},
|
||||
{file = "grpcio-1.67.1-cp310-cp310-win32.whl", hash = "sha256:638354e698fd0c6c76b04540a850bf1db27b4d2515a19fcd5cf645c48d3eb1ed"},
|
||||
{file = "grpcio-1.67.1-cp310-cp310-win_amd64.whl", hash = "sha256:608d87d1bdabf9e2868b12338cd38a79969eaf920c89d698ead08f48de9c0f9e"},
|
||||
{file = "grpcio-1.67.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:7818c0454027ae3384235a65210bbf5464bd715450e30a3d40385453a85a70cb"},
|
||||
{file = "grpcio-1.67.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ea33986b70f83844cd00814cee4451055cd8cab36f00ac64a31f5bb09b31919e"},
|
||||
{file = "grpcio-1.67.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:c7a01337407dd89005527623a4a72c5c8e2894d22bead0895306b23c6695698f"},
|
||||
{file = "grpcio-1.67.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80b866f73224b0634f4312a4674c1be21b2b4afa73cb20953cbbb73a6b36c3cc"},
|
||||
{file = "grpcio-1.67.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9fff78ba10d4250bfc07a01bd6254a6d87dc67f9627adece85c0b2ed754fa96"},
|
||||
{file = "grpcio-1.67.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8a23cbcc5bb11ea7dc6163078be36c065db68d915c24f5faa4f872c573bb400f"},
|
||||
{file = "grpcio-1.67.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1a65b503d008f066e994f34f456e0647e5ceb34cfcec5ad180b1b44020ad4970"},
|
||||
{file = "grpcio-1.67.1-cp311-cp311-win32.whl", hash = "sha256:e29ca27bec8e163dca0c98084040edec3bc49afd10f18b412f483cc68c712744"},
|
||||
{file = "grpcio-1.67.1-cp311-cp311-win_amd64.whl", hash = "sha256:786a5b18544622bfb1e25cc08402bd44ea83edfb04b93798d85dca4d1a0b5be5"},
|
||||
{file = "grpcio-1.67.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:267d1745894200e4c604958da5f856da6293f063327cb049a51fe67348e4f953"},
|
||||
{file = "grpcio-1.67.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:85f69fdc1d28ce7cff8de3f9c67db2b0ca9ba4449644488c1e0303c146135ddb"},
|
||||
{file = "grpcio-1.67.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:f26b0b547eb8d00e195274cdfc63ce64c8fc2d3e2d00b12bf468ece41a0423a0"},
|
||||
{file = "grpcio-1.67.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4422581cdc628f77302270ff839a44f4c24fdc57887dc2a45b7e53d8fc2376af"},
|
||||
{file = "grpcio-1.67.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d7616d2ded471231c701489190379e0c311ee0a6c756f3c03e6a62b95a7146e"},
|
||||
{file = "grpcio-1.67.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8a00efecde9d6fcc3ab00c13f816313c040a28450e5e25739c24f432fc6d3c75"},
|
||||
{file = "grpcio-1.67.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:699e964923b70f3101393710793289e42845791ea07565654ada0969522d0a38"},
|
||||
{file = "grpcio-1.67.1-cp312-cp312-win32.whl", hash = "sha256:4e7b904484a634a0fff132958dabdb10d63e0927398273917da3ee103e8d1f78"},
|
||||
{file = "grpcio-1.67.1-cp312-cp312-win_amd64.whl", hash = "sha256:5721e66a594a6c4204458004852719b38f3d5522082be9061d6510b455c90afc"},
|
||||
{file = "grpcio-1.67.1-cp313-cp313-linux_armv7l.whl", hash = "sha256:aa0162e56fd10a5547fac8774c4899fc3e18c1aa4a4759d0ce2cd00d3696ea6b"},
|
||||
{file = "grpcio-1.67.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:beee96c8c0b1a75d556fe57b92b58b4347c77a65781ee2ac749d550f2a365dc1"},
|
||||
{file = "grpcio-1.67.1-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:a93deda571a1bf94ec1f6fcda2872dad3ae538700d94dc283c672a3b508ba3af"},
|
||||
{file = "grpcio-1.67.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e6f255980afef598a9e64a24efce87b625e3e3c80a45162d111a461a9f92955"},
|
||||
{file = "grpcio-1.67.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e838cad2176ebd5d4a8bb03955138d6589ce9e2ce5d51c3ada34396dbd2dba8"},
|
||||
{file = "grpcio-1.67.1-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:a6703916c43b1d468d0756c8077b12017a9fcb6a1ef13faf49e67d20d7ebda62"},
|
||||
{file = "grpcio-1.67.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:917e8d8994eed1d86b907ba2a61b9f0aef27a2155bca6cbb322430fc7135b7bb"},
|
||||
{file = "grpcio-1.67.1-cp313-cp313-win32.whl", hash = "sha256:e279330bef1744040db8fc432becc8a727b84f456ab62b744d3fdb83f327e121"},
|
||||
{file = "grpcio-1.67.1-cp313-cp313-win_amd64.whl", hash = "sha256:fa0c739ad8b1996bd24823950e3cb5152ae91fca1c09cc791190bf1627ffefba"},
|
||||
{file = "grpcio-1.67.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:178f5db771c4f9a9facb2ab37a434c46cb9be1a75e820f187ee3d1e7805c4f65"},
|
||||
{file = "grpcio-1.67.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0f3e49c738396e93b7ba9016e153eb09e0778e776df6090c1b8c91877cc1c426"},
|
||||
{file = "grpcio-1.67.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:24e8a26dbfc5274d7474c27759b54486b8de23c709d76695237515bc8b5baeab"},
|
||||
{file = "grpcio-1.67.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b6c16489326d79ead41689c4b84bc40d522c9a7617219f4ad94bc7f448c5085"},
|
||||
{file = "grpcio-1.67.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60e6a4dcf5af7bbc36fd9f81c9f372e8ae580870a9e4b6eafe948cd334b81cf3"},
|
||||
{file = "grpcio-1.67.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:95b5f2b857856ed78d72da93cd7d09b6db8ef30102e5e7fe0961fe4d9f7d48e8"},
|
||||
{file = "grpcio-1.67.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b49359977c6ec9f5d0573ea4e0071ad278ef905aa74e420acc73fd28ce39e9ce"},
|
||||
{file = "grpcio-1.67.1-cp38-cp38-win32.whl", hash = "sha256:f5b76ff64aaac53fede0cc93abf57894ab2a7362986ba22243d06218b93efe46"},
|
||||
{file = "grpcio-1.67.1-cp38-cp38-win_amd64.whl", hash = "sha256:804c6457c3cd3ec04fe6006c739579b8d35c86ae3298ffca8de57b493524b771"},
|
||||
{file = "grpcio-1.67.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:a25bdea92b13ff4d7790962190bf6bf5c4639876e01c0f3dda70fc2769616335"},
|
||||
{file = "grpcio-1.67.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cdc491ae35a13535fd9196acb5afe1af37c8237df2e54427be3eecda3653127e"},
|
||||
{file = "grpcio-1.67.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:85f862069b86a305497e74d0dc43c02de3d1d184fc2c180993aa8aa86fbd19b8"},
|
||||
{file = "grpcio-1.67.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec74ef02010186185de82cc594058a3ccd8d86821842bbac9873fd4a2cf8be8d"},
|
||||
{file = "grpcio-1.67.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01f616a964e540638af5130469451cf580ba8c7329f45ca998ab66e0c7dcdb04"},
|
||||
{file = "grpcio-1.67.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:299b3d8c4f790c6bcca485f9963b4846dd92cf6f1b65d3697145d005c80f9fe8"},
|
||||
{file = "grpcio-1.67.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:60336bff760fbb47d7e86165408126f1dded184448e9a4c892189eb7c9d3f90f"},
|
||||
{file = "grpcio-1.67.1-cp39-cp39-win32.whl", hash = "sha256:5ed601c4c6008429e3d247ddb367fe8c7259c355757448d7c1ef7bd4a6739e8e"},
|
||||
{file = "grpcio-1.67.1-cp39-cp39-win_amd64.whl", hash = "sha256:5db70d32d6703b89912af16d6d45d78406374a8b8ef0d28140351dd0ec610e98"},
|
||||
{file = "grpcio-1.67.1.tar.gz", hash = "sha256:3dc2ed4cabea4dc14d5e708c2b426205956077cc5de419b4d4079315017e9732"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
protobuf = ["grpcio-tools (>=1.72.1)"]
|
||||
protobuf = ["grpcio-tools (>=1.67.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "grpcio-status"
|
||||
@ -4434,6 +4446,25 @@ files = [
|
||||
{file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "inquirerpy"
|
||||
version = "0.3.4"
|
||||
description = "Python port of Inquirer.js (A collection of common interactive command-line user interfaces)"
|
||||
optional = false
|
||||
python-versions = ">=3.7,<4.0"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "InquirerPy-0.3.4-py3-none-any.whl", hash = "sha256:c65fdfbac1fa00e3ee4fb10679f4d3ed7a012abf4833910e63c295827fe2a7d4"},
|
||||
{file = "InquirerPy-0.3.4.tar.gz", hash = "sha256:89d2ada0111f337483cb41ae31073108b2ec1e618a49d7110b0d7ade89fc197e"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
pfzy = ">=0.3.1,<0.4.0"
|
||||
prompt-toolkit = ">=3.0.1,<4.0.0"
|
||||
|
||||
[package.extras]
|
||||
docs = ["Sphinx (>=4.1.2,<5.0.0)", "furo (>=2021.8.17-beta.43,<2022.0.0)", "myst-parser (>=0.15.1,<0.16.0)", "sphinx-autobuild (>=2021.3.14,<2022.0.0)", "sphinx-copybutton (>=0.4.0,<0.5.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "installer"
|
||||
version = "0.7.0"
|
||||
@ -5609,25 +5640,26 @@ types-tqdm = "*"
|
||||
|
||||
[[package]]
|
||||
name = "litellm"
|
||||
version = "1.77.7"
|
||||
version = "1.80.7"
|
||||
description = "Library to easily interface with LLM API providers"
|
||||
optional = false
|
||||
python-versions = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8"
|
||||
python-versions = "<4.0,>=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "litellm-1.77.7-py3-none-any.whl", hash = "sha256:1b3a1b17bd521a0ad25226fb62a912602c803922aabb4a16adf83834673be574"},
|
||||
{file = "litellm-1.77.7.tar.gz", hash = "sha256:e3398fb2575b98726e787c0a1481daed5938d58cafdcd96fbca80c312221af3e"},
|
||||
{file = "litellm-1.80.7-py3-none-any.whl", hash = "sha256:f7d993f78c1e0e4e1202b2a925cc6540b55b6e5fb055dd342d88b145ab3102ed"},
|
||||
{file = "litellm-1.80.7.tar.gz", hash = "sha256:3977a8d195aef842d01c18bf9e22984829363c6a4b54daf9a43c9dd9f190b42c"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
aiohttp = ">=3.10"
|
||||
click = "*"
|
||||
fastuuid = ">=0.13.0"
|
||||
grpcio = ">=1.62.3,<1.68.0"
|
||||
httpx = ">=0.23.0"
|
||||
importlib-metadata = ">=6.8.0"
|
||||
jinja2 = ">=3.1.2,<4.0.0"
|
||||
jsonschema = ">=4.22.0,<5.0.0"
|
||||
openai = ">=1.99.5"
|
||||
openai = ">=2.8.0"
|
||||
pydantic = ">=2.5.0,<3.0.0"
|
||||
python-dotenv = ">=0.2.0"
|
||||
tiktoken = ">=0.7.0"
|
||||
@ -5635,10 +5667,10 @@ tokenizers = "*"
|
||||
|
||||
[package.extras]
|
||||
caching = ["diskcache (>=5.6.1,<6.0.0)"]
|
||||
extra-proxy = ["azure-identity (>=1.15.0,<2.0.0)", "azure-keyvault-secrets (>=4.8.0,<5.0.0)", "google-cloud-iam (>=2.19.1,<3.0.0)", "google-cloud-kms (>=2.21.3,<3.0.0)", "prisma (==0.11.0)", "redisvl (>=0.4.1,<0.5.0) ; python_version >= \"3.9\" and python_version < \"3.14\"", "resend (>=0.8.0,<0.9.0)"]
|
||||
extra-proxy = ["azure-identity (>=1.15.0,<2.0.0) ; python_version >= \"3.9\"", "azure-keyvault-secrets (>=4.8.0,<5.0.0)", "google-cloud-iam (>=2.19.1,<3.0.0)", "google-cloud-kms (>=2.21.3,<3.0.0)", "prisma (==0.11.0)", "redisvl (>=0.4.1,<0.5.0) ; python_version >= \"3.9\" and python_version < \"3.14\"", "resend (>=0.8.0)"]
|
||||
mlflow = ["mlflow (>3.1.4) ; python_version >= \"3.10\""]
|
||||
proxy = ["PyJWT (>=2.8.0,<3.0.0)", "apscheduler (>=3.10.4,<4.0.0)", "azure-identity (>=1.15.0,<2.0.0)", "azure-storage-blob (>=12.25.1,<13.0.0)", "backoff", "boto3 (==1.36.0)", "cryptography", "fastapi (>=0.115.5,<0.116.0)", "fastapi-sso (>=0.16.0,<0.17.0)", "gunicorn (>=23.0.0,<24.0.0)", "litellm-enterprise (==0.1.20)", "litellm-proxy-extras (==0.2.25)", "mcp (>=1.10.0,<2.0.0) ; python_version >= \"3.10\"", "orjson (>=3.9.7,<4.0.0)", "polars (>=1.31.0,<2.0.0) ; python_version >= \"3.10\"", "pynacl (>=1.5.0,<2.0.0)", "python-multipart (>=0.0.18,<0.0.19)", "pyyaml (>=6.0.1,<7.0.0)", "rich (==13.7.1)", "rq", "uvicorn (>=0.29.0,<0.30.0)", "uvloop (>=0.21.0,<0.22.0) ; sys_platform != \"win32\"", "websockets (>=13.1.0,<14.0.0)"]
|
||||
semantic-router = ["semantic-router ; python_version >= \"3.9\""]
|
||||
proxy = ["PyJWT (>=2.10.1,<3.0.0) ; python_version >= \"3.9\"", "apscheduler (>=3.10.4,<4.0.0)", "azure-identity (>=1.15.0,<2.0.0) ; python_version >= \"3.9\"", "azure-storage-blob (>=12.25.1,<13.0.0)", "backoff", "boto3 (==1.36.0)", "cryptography", "fastapi (>=0.120.1)", "fastapi-sso (>=0.16.0,<0.17.0)", "gunicorn (>=23.0.0,<24.0.0)", "litellm-enterprise (==0.1.22)", "litellm-proxy-extras (==0.4.9)", "mcp (>=1.21.2,<2.0.0) ; python_version >= \"3.10\"", "orjson (>=3.9.7,<4.0.0)", "polars (>=1.31.0,<2.0.0) ; python_version >= \"3.10\"", "pynacl (>=1.5.0,<2.0.0)", "python-multipart (>=0.0.18,<0.0.19)", "pyyaml (>=6.0.1,<7.0.0)", "rich (==13.7.1)", "rq", "soundfile (>=0.12.1,<0.13.0)", "uvicorn (>=0.31.1,<0.32.0)", "uvloop (>=0.21.0,<0.22.0) ; sys_platform != \"win32\"", "websockets (>=15.0.1,<16.0.0)"]
|
||||
semantic-router = ["semantic-router (>=0.1.12) ; python_version >= \"3.9\" and python_version < \"3.14\""]
|
||||
utils = ["numpydoc"]
|
||||
|
||||
[[package]]
|
||||
@ -5908,14 +5940,14 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"]
|
||||
|
||||
[[package]]
|
||||
name = "markdownify"
|
||||
version = "1.1.0"
|
||||
version = "1.2.2"
|
||||
description = "Convert HTML to markdown."
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "markdownify-1.1.0-py3-none-any.whl", hash = "sha256:32a5a08e9af02c8a6528942224c91b933b4bd2c7d078f9012943776fc313eeef"},
|
||||
{file = "markdownify-1.1.0.tar.gz", hash = "sha256:449c0bbbf1401c5112379619524f33b63490a8fa479456d41de9dc9e37560ebd"},
|
||||
{file = "markdownify-1.2.2-py3-none-any.whl", hash = "sha256:3f02d3cc52714084d6e589f70397b6fc9f2f3a8531481bf35e8cc39f975e186a"},
|
||||
{file = "markdownify-1.2.2.tar.gz", hash = "sha256:b274f1b5943180b031b699b199cbaeb1e2ac938b75851849a31fd0c3d6603d09"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@ -7191,28 +7223,28 @@ pydantic = ">=2.9"
|
||||
|
||||
[[package]]
|
||||
name = "openai"
|
||||
version = "1.99.9"
|
||||
version = "2.8.0"
|
||||
description = "The official Python library for the openai API"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main", "evaluation"]
|
||||
files = [
|
||||
{file = "openai-1.99.9-py3-none-any.whl", hash = "sha256:9dbcdb425553bae1ac5d947147bebbd630d91bbfc7788394d4c4f3a35682ab3a"},
|
||||
{file = "openai-1.99.9.tar.gz", hash = "sha256:f2082d155b1ad22e83247c3de3958eb4255b20ccf4a1de2e6681b6957b554e92"},
|
||||
{file = "openai-2.8.0-py3-none-any.whl", hash = "sha256:ba975e347f6add2fe13529ccb94d54a578280e960765e5224c34b08d7e029ddf"},
|
||||
{file = "openai-2.8.0.tar.gz", hash = "sha256:4851908f6d6fcacbd47ba659c5ac084f7725b752b6bfa1e948b6fbfc111a6bad"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
anyio = ">=3.5.0,<5"
|
||||
distro = ">=1.7.0,<2"
|
||||
httpx = ">=0.23.0,<1"
|
||||
jiter = ">=0.4.0,<1"
|
||||
jiter = ">=0.10.0,<1"
|
||||
pydantic = ">=1.9.0,<3"
|
||||
sniffio = "*"
|
||||
tqdm = ">4"
|
||||
typing-extensions = ">=4.11,<5"
|
||||
|
||||
[package.extras]
|
||||
aiohttp = ["aiohttp", "httpx-aiohttp (>=0.1.8)"]
|
||||
aiohttp = ["aiohttp", "httpx-aiohttp (>=0.1.9)"]
|
||||
datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"]
|
||||
realtime = ["websockets (>=13,<16)"]
|
||||
voice-helpers = ["numpy (>=2.0.2)", "sounddevice (>=0.5.1)"]
|
||||
@ -7347,14 +7379,14 @@ llama = ["llama-index (>=0.12.29,<0.13.0)", "llama-index-core (>=0.12.29,<0.13.0
|
||||
|
||||
[[package]]
|
||||
name = "openhands-agent-server"
|
||||
version = "1.3.0"
|
||||
version = "1.4.1"
|
||||
description = "OpenHands Agent Server - REST/WebSocket interface for OpenHands AI Agent"
|
||||
optional = false
|
||||
python-versions = ">=3.12"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "openhands_agent_server-1.3.0-py3-none-any.whl", hash = "sha256:2f87f790c740dc3fb81821c5f9fa375af875fbb937ebca3baa6dc5c035035b3c"},
|
||||
{file = "openhands_agent_server-1.3.0.tar.gz", hash = "sha256:0a83ae77373f5c41d0ba0e22d8f0f6144d54d55784183a50b7c098c96cd5135c"},
|
||||
{file = "openhands_agent_server-1.4.1-py3-none-any.whl", hash = "sha256:1e621d15215a48e2398e23c58a791347f06c215c2344053aeb26b562c34a44ee"},
|
||||
{file = "openhands_agent_server-1.4.1.tar.gz", hash = "sha256:03010a5c8d63bbd5b088458eb75308ef16559018140d75a3644ae5bbc3531bbf"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@ -7362,6 +7394,7 @@ aiosqlite = ">=0.19"
|
||||
alembic = ">=1.13"
|
||||
docker = ">=7.1,<8"
|
||||
fastapi = ">=0.104"
|
||||
openhands-sdk = "*"
|
||||
pydantic = ">=2"
|
||||
sqlalchemy = ">=2"
|
||||
uvicorn = ">=0.31.1"
|
||||
@ -7370,21 +7403,21 @@ wsproto = ">=1.2.0"
|
||||
|
||||
[[package]]
|
||||
name = "openhands-sdk"
|
||||
version = "1.3.0"
|
||||
version = "1.4.1"
|
||||
description = "OpenHands SDK - Core functionality for building AI agents"
|
||||
optional = false
|
||||
python-versions = ">=3.12"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "openhands_sdk-1.3.0-py3-none-any.whl", hash = "sha256:feee838346f8e60ea3e4d3391de7cb854314eb8b3c9e3dbbb56f98a784aadc56"},
|
||||
{file = "openhands_sdk-1.3.0.tar.gz", hash = "sha256:2d060803a78de462121b56dea717a66356922deb02276f37b29fae8af66343fb"},
|
||||
{file = "openhands_sdk-1.4.1-py3-none-any.whl", hash = "sha256:70e453eab7f9ab6b705198c2615fdd844b21e14b29d78afaf62724f4a440bcdc"},
|
||||
{file = "openhands_sdk-1.4.1.tar.gz", hash = "sha256:37365de25ed57cf8cc2a8003ab4d7a1fe2a40b49c8e8da84a3f1ea2b522eddf2"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
deprecation = ">=2.1.0"
|
||||
fastmcp = ">=2.11.3"
|
||||
httpx = ">=0.27.0"
|
||||
litellm = ">=1.77.7.dev9"
|
||||
litellm = ">=1.80.7"
|
||||
lmnr = ">=0.7.20"
|
||||
pydantic = ">=2.11.7"
|
||||
python-frontmatter = ">=1.1.0"
|
||||
@ -7397,14 +7430,14 @@ boto3 = ["boto3 (>=1.35.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "openhands-tools"
|
||||
version = "1.3.0"
|
||||
version = "1.4.1"
|
||||
description = "OpenHands Tools - Runtime tools for AI agents"
|
||||
optional = false
|
||||
python-versions = ">=3.12"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "openhands_tools-1.3.0-py3-none-any.whl", hash = "sha256:f31056d87c3058ac92709f9161c7c602daeee3ed0cb4439097b43cda105ed03e"},
|
||||
{file = "openhands_tools-1.3.0.tar.gz", hash = "sha256:3da46f09e28593677d3e17252ce18584fcc13caab1a73213e66bd7edca2cebe0"},
|
||||
{file = "openhands_tools-1.4.1-py3-none-any.whl", hash = "sha256:8f40189a08bf80eb4a33219ee9ccc528f9c6c4f2d5c9ab807b06c3f3fe21a612"},
|
||||
{file = "openhands_tools-1.4.1.tar.gz", hash = "sha256:4c0caf87f520a207d9035191c77b7b5c53eeec996350a24ffaf7f740a6566b22"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@ -7416,6 +7449,7 @@ func-timeout = ">=4.3.5"
|
||||
libtmux = ">=0.46.2"
|
||||
openhands-sdk = "*"
|
||||
pydantic = ">=2.11.7"
|
||||
tom-swe = ">=1.0.3"
|
||||
|
||||
[[package]]
|
||||
name = "openpyxl"
|
||||
@ -7928,6 +7962,21 @@ files = [
|
||||
[package.dependencies]
|
||||
ptyprocess = ">=0.5"
|
||||
|
||||
[[package]]
|
||||
name = "pfzy"
|
||||
version = "0.3.4"
|
||||
description = "Python port of the fzy fuzzy string matching algorithm"
|
||||
optional = false
|
||||
python-versions = ">=3.7,<4.0"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "pfzy-0.3.4-py3-none-any.whl", hash = "sha256:5f50d5b2b3207fa72e7ec0ef08372ef652685470974a107d0d4999fc5a903a96"},
|
||||
{file = "pfzy-0.3.4.tar.gz", hash = "sha256:717ea765dd10b63618e7298b2d98efd819e0b30cd5905c9707223dceeb94b3f1"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["Sphinx (>=4.1.2,<5.0.0)", "furo (>=2021.8.17-beta.43,<2022.0.0)", "myst-parser (>=0.15.1,<0.16.0)", "sphinx-autobuild (>=2021.3.14,<2022.0.0)", "sphinx-copybutton (>=0.4.0,<0.5.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "pg8000"
|
||||
version = "1.31.5"
|
||||
@ -14969,6 +15018,31 @@ dev = ["tokenizers[testing]"]
|
||||
docs = ["setuptools-rust", "sphinx", "sphinx-rtd-theme"]
|
||||
testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests", "ruff"]
|
||||
|
||||
[[package]]
|
||||
name = "tom-swe"
|
||||
version = "1.0.3"
|
||||
description = "Theory of Mind modeling for Software Engineering assistants"
|
||||
optional = false
|
||||
python-versions = ">=3.10"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "tom_swe-1.0.3-py3-none-any.whl", hash = "sha256:7b1172b29eb5c8fb7f1975016e7b6a238511b9ac2a7a980bd400dcb4e29773f2"},
|
||||
{file = "tom_swe-1.0.3.tar.gz", hash = "sha256:57c97d0104e563f15bd39edaf2aa6ac4c3e9444afd437fb92458700d22c6c0f5"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
jinja2 = ">=3.0.0"
|
||||
json-repair = ">=0.1.0"
|
||||
litellm = ">=1.0.0"
|
||||
pydantic = ">=2.0.0"
|
||||
python-dotenv = ">=1.0.0"
|
||||
tiktoken = ">=0.8.0"
|
||||
tqdm = ">=4.65.0"
|
||||
|
||||
[package.extras]
|
||||
dev = ["aiofiles (>=23.0.0)", "black (>=22.0.0)", "datasets (>=2.0.0)", "fastapi (>=0.104.0)", "httpx (>=0.25.0)", "huggingface-hub (>=0.0.0)", "isort (>=5.0.0)", "mypy (>=1.0.0)", "numpy (>=1.24.0)", "pandas (>=2.0.0)", "pre-commit (>=3.6.0)", "pytest (>=7.0.0)", "pytest-cov (>=6.2.1)", "rich (>=13.0.0)", "ruff (>=0.3.0)", "typing-extensions (>=4.0.0)", "uvicorn (>=0.24.0)"]
|
||||
search = ["bm25s (>=0.2.0)", "pystemmer (>=2.2.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "toml"
|
||||
version = "0.10.2"
|
||||
@ -16748,4 +16822,4 @@ third-party-runtimes = ["daytona", "e2b-code-interpreter", "modal", "runloop-api
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = "^3.12,<3.14"
|
||||
content-hash = "52eda0cd6b8a2e245057c005edcf93daf4151e99d019477c8b74ddd0940e890f"
|
||||
content-hash = "c208fcc692f74540f7b6e822136002dd0f079a3d8d1b93227a5bb07a7f4432cb"
|
||||
|
||||
@ -26,8 +26,8 @@ build = "build_vscode.py" # Build VSCode extension during Poetry build
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.12,<3.14"
|
||||
litellm = ">=1.74.3, <1.78.0, !=1.64.4, !=1.67.*" # avoid 1.64.4 (known bug) & 1.67.* (known bug #10272)
|
||||
openai = "1.99.9" # Pin due to litellm incompatibility with >=1.100.0 (BerriAI/litellm#13711)
|
||||
litellm = ">=1.74.3, <=1.80.7, !=1.64.4, !=1.67.*" # avoid 1.64.4 (known bug) & 1.67.* (known bug #10272)
|
||||
openai = "2.8.0" # Pin due to litellm incompatibility with >=1.100.0 (BerriAI/litellm#13711)
|
||||
aiohttp = ">=3.9.0,!=3.11.13" # Pin to avoid yanked version 3.11.13
|
||||
google-genai = "*" # To use litellm with Gemini Pro API
|
||||
google-api-python-client = "^2.164.0" # For Google Sheets API
|
||||
@ -116,9 +116,9 @@ pybase62 = "^1.0.0"
|
||||
#openhands-agent-server = { git = "https://github.com/OpenHands/agent-sdk.git", subdirectory = "openhands-agent-server", rev = "15f565b8ac38876e40dc05c08e2b04ccaae4a66d" }
|
||||
#openhands-sdk = { git = "https://github.com/OpenHands/agent-sdk.git", subdirectory = "openhands-sdk", rev = "15f565b8ac38876e40dc05c08e2b04ccaae4a66d" }
|
||||
#openhands-tools = { git = "https://github.com/OpenHands/agent-sdk.git", subdirectory = "openhands-tools", rev = "15f565b8ac38876e40dc05c08e2b04ccaae4a66d" }
|
||||
openhands-sdk = "1.3.0"
|
||||
openhands-agent-server = "1.3.0"
|
||||
openhands-tools = "1.3.0"
|
||||
openhands-sdk = "1.4.1"
|
||||
openhands-agent-server = "1.4.1"
|
||||
openhands-tools = "1.4.1"
|
||||
python-jose = { version = ">=3.3", extras = [ "cryptography" ] }
|
||||
sqlalchemy = { extras = [ "asyncio" ], version = "^2.0.40" }
|
||||
pg8000 = "^1.31.5"
|
||||
|
||||
628
tests/unit/app_server/test_app_conversation_service_base.py
Normal file
628
tests/unit/app_server/test_app_conversation_service_base.py
Normal file
@ -0,0 +1,628 @@
|
||||
"""Unit tests for git functionality in AppConversationServiceBase.
|
||||
|
||||
This module tests the git-related functionality, specifically the clone_or_init_git_repo method
|
||||
and the recent bug fixes for git checkout operations.
|
||||
"""
|
||||
|
||||
import subprocess
|
||||
from unittest.mock import AsyncMock, MagicMock, Mock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from openhands.app_server.app_conversation.app_conversation_models import AgentType
|
||||
from openhands.app_server.app_conversation.app_conversation_service_base import (
|
||||
AppConversationServiceBase,
|
||||
)
|
||||
from openhands.app_server.user.user_context import UserContext
|
||||
|
||||
|
||||
class MockUserInfo:
|
||||
"""Mock class for UserInfo to simulate user settings."""
|
||||
|
||||
def __init__(
|
||||
self, git_user_name: str | None = None, git_user_email: str | None = None
|
||||
):
|
||||
self.git_user_name = git_user_name
|
||||
self.git_user_email = git_user_email
|
||||
|
||||
|
||||
class MockCommandResult:
|
||||
"""Mock class for command execution result."""
|
||||
|
||||
def __init__(self, exit_code: int = 0, stderr: str = ''):
|
||||
self.exit_code = exit_code
|
||||
self.stderr = stderr
|
||||
|
||||
|
||||
class MockWorkspace:
|
||||
"""Mock class for AsyncRemoteWorkspace."""
|
||||
|
||||
def __init__(self, working_dir: str = '/workspace'):
|
||||
self.working_dir = working_dir
|
||||
self.execute_command = AsyncMock(return_value=MockCommandResult())
|
||||
|
||||
|
||||
class MockAppConversationServiceBase:
|
||||
"""Mock class to test git functionality without complex dependencies."""
|
||||
|
||||
def __init__(self):
|
||||
self.logger = MagicMock()
|
||||
|
||||
async def clone_or_init_git_repo(
|
||||
self,
|
||||
workspace_path: str,
|
||||
repo_url: str,
|
||||
branch: str = 'main',
|
||||
timeout: int = 300,
|
||||
) -> bool:
|
||||
"""Clone or initialize a git repository.
|
||||
|
||||
This is a simplified version of the actual method for testing purposes.
|
||||
"""
|
||||
try:
|
||||
# Try to clone the repository
|
||||
clone_result = subprocess.run(
|
||||
['git', 'clone', '--branch', branch, repo_url, workspace_path],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
if clone_result.returncode == 0:
|
||||
self.logger.info(
|
||||
f'Successfully cloned repository {repo_url} to {workspace_path}'
|
||||
)
|
||||
return True
|
||||
|
||||
# If clone fails, try to checkout the branch
|
||||
checkout_result = subprocess.run(
|
||||
['git', 'checkout', branch],
|
||||
cwd=workspace_path,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
if checkout_result.returncode == 0:
|
||||
self.logger.info(f'Successfully checked out branch {branch}')
|
||||
return True
|
||||
else:
|
||||
self.logger.error(
|
||||
f'Failed to checkout branch {branch}: {checkout_result.stderr}'
|
||||
)
|
||||
return False
|
||||
|
||||
except subprocess.TimeoutExpired:
|
||||
self.logger.error(f'Git operation timed out after {timeout} seconds')
|
||||
return False
|
||||
except Exception as e:
|
||||
self.logger.error(f'Git operation failed: {str(e)}')
|
||||
return False
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def service():
|
||||
"""Create a mock service instance for testing."""
|
||||
return MockAppConversationServiceBase()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_clone_or_init_git_repo_successful_clone(service):
|
||||
"""Test successful git clone operation."""
|
||||
with patch('subprocess.run') as mock_run:
|
||||
# Mock successful clone
|
||||
mock_run.return_value = MagicMock(returncode=0, stderr='', stdout='Cloning...')
|
||||
|
||||
result = await service.clone_or_init_git_repo(
|
||||
workspace_path='/tmp/test_repo',
|
||||
repo_url='https://github.com/test/repo.git',
|
||||
branch='main',
|
||||
timeout=300,
|
||||
)
|
||||
|
||||
assert result is True
|
||||
mock_run.assert_called_once_with(
|
||||
[
|
||||
'git',
|
||||
'clone',
|
||||
'--branch',
|
||||
'main',
|
||||
'https://github.com/test/repo.git',
|
||||
'/tmp/test_repo',
|
||||
],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=300,
|
||||
)
|
||||
service.logger.info.assert_called_with(
|
||||
'Successfully cloned repository https://github.com/test/repo.git to /tmp/test_repo'
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_clone_or_init_git_repo_clone_fails_checkout_succeeds(service):
|
||||
"""Test git clone fails but checkout succeeds."""
|
||||
with patch('subprocess.run') as mock_run:
|
||||
# Mock clone failure, then checkout success
|
||||
mock_run.side_effect = [
|
||||
MagicMock(returncode=1, stderr='Clone failed', stdout=''), # Clone fails
|
||||
MagicMock(
|
||||
returncode=0, stderr='', stdout='Switched to branch'
|
||||
), # Checkout succeeds
|
||||
]
|
||||
|
||||
result = await service.clone_or_init_git_repo(
|
||||
workspace_path='/tmp/test_repo',
|
||||
repo_url='https://github.com/test/repo.git',
|
||||
branch='feature-branch',
|
||||
timeout=300,
|
||||
)
|
||||
|
||||
assert result is True
|
||||
assert mock_run.call_count == 2
|
||||
|
||||
# Check clone call
|
||||
mock_run.assert_any_call(
|
||||
[
|
||||
'git',
|
||||
'clone',
|
||||
'--branch',
|
||||
'feature-branch',
|
||||
'https://github.com/test/repo.git',
|
||||
'/tmp/test_repo',
|
||||
],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=300,
|
||||
)
|
||||
|
||||
# Check checkout call
|
||||
mock_run.assert_any_call(
|
||||
['git', 'checkout', 'feature-branch'],
|
||||
cwd='/tmp/test_repo',
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=300,
|
||||
)
|
||||
|
||||
service.logger.info.assert_called_with(
|
||||
'Successfully checked out branch feature-branch'
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_clone_or_init_git_repo_both_operations_fail(service):
|
||||
"""Test both git clone and checkout operations fail."""
|
||||
with patch('subprocess.run') as mock_run:
|
||||
# Mock both operations failing
|
||||
mock_run.side_effect = [
|
||||
MagicMock(returncode=1, stderr='Clone failed', stdout=''), # Clone fails
|
||||
MagicMock(
|
||||
returncode=1, stderr='Checkout failed', stdout=''
|
||||
), # Checkout fails
|
||||
]
|
||||
|
||||
result = await service.clone_or_init_git_repo(
|
||||
workspace_path='/tmp/test_repo',
|
||||
repo_url='https://github.com/test/repo.git',
|
||||
branch='nonexistent-branch',
|
||||
timeout=300,
|
||||
)
|
||||
|
||||
assert result is False
|
||||
assert mock_run.call_count == 2
|
||||
service.logger.error.assert_called_with(
|
||||
'Failed to checkout branch nonexistent-branch: Checkout failed'
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_clone_or_init_git_repo_timeout(service):
|
||||
"""Test git operation timeout."""
|
||||
with patch('subprocess.run') as mock_run:
|
||||
# Mock timeout exception
|
||||
mock_run.side_effect = subprocess.TimeoutExpired(
|
||||
cmd=['git', 'clone'], timeout=300
|
||||
)
|
||||
|
||||
result = await service.clone_or_init_git_repo(
|
||||
workspace_path='/tmp/test_repo',
|
||||
repo_url='https://github.com/test/repo.git',
|
||||
branch='main',
|
||||
timeout=300,
|
||||
)
|
||||
|
||||
assert result is False
|
||||
service.logger.error.assert_called_with(
|
||||
'Git operation timed out after 300 seconds'
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_clone_or_init_git_repo_exception(service):
|
||||
"""Test git operation with unexpected exception."""
|
||||
with patch('subprocess.run') as mock_run:
|
||||
# Mock unexpected exception
|
||||
mock_run.side_effect = Exception('Unexpected error')
|
||||
|
||||
result = await service.clone_or_init_git_repo(
|
||||
workspace_path='/tmp/test_repo',
|
||||
repo_url='https://github.com/test/repo.git',
|
||||
branch='main',
|
||||
timeout=300,
|
||||
)
|
||||
|
||||
assert result is False
|
||||
service.logger.error.assert_called_with(
|
||||
'Git operation failed: Unexpected error'
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_clone_or_init_git_repo_custom_timeout(service):
|
||||
"""Test git operation with custom timeout."""
|
||||
with patch('subprocess.run') as mock_run:
|
||||
# Mock successful clone with custom timeout
|
||||
mock_run.return_value = MagicMock(returncode=0, stderr='', stdout='Cloning...')
|
||||
|
||||
result = await service.clone_or_init_git_repo(
|
||||
workspace_path='/tmp/test_repo',
|
||||
repo_url='https://github.com/test/repo.git',
|
||||
branch='main',
|
||||
timeout=600, # Custom timeout
|
||||
)
|
||||
|
||||
assert result is True
|
||||
mock_run.assert_called_once_with(
|
||||
[
|
||||
'git',
|
||||
'clone',
|
||||
'--branch',
|
||||
'main',
|
||||
'https://github.com/test/repo.git',
|
||||
'/tmp/test_repo',
|
||||
],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=600, # Verify custom timeout is used
|
||||
)
|
||||
|
||||
|
||||
@patch(
|
||||
'openhands.app_server.app_conversation.app_conversation_service_base.LLMSummarizingCondenser'
|
||||
)
|
||||
def test_create_condenser_default_agent_with_none_max_size(mock_condenser_class):
|
||||
"""Test _create_condenser for DEFAULT agent with condenser_max_size = None uses default."""
|
||||
# Arrange
|
||||
mock_user_context = Mock(spec=UserContext)
|
||||
with patch.object(
|
||||
AppConversationServiceBase,
|
||||
'__abstractmethods__',
|
||||
set(),
|
||||
):
|
||||
service = AppConversationServiceBase(
|
||||
init_git_in_empty_workspace=True,
|
||||
user_context=mock_user_context,
|
||||
)
|
||||
mock_llm = MagicMock()
|
||||
mock_llm_copy = MagicMock()
|
||||
mock_llm_copy.usage_id = 'condenser'
|
||||
mock_llm.model_copy.return_value = mock_llm_copy
|
||||
mock_condenser_instance = MagicMock()
|
||||
mock_condenser_class.return_value = mock_condenser_instance
|
||||
|
||||
# Act
|
||||
service._create_condenser(mock_llm, AgentType.DEFAULT, None)
|
||||
|
||||
# Assert
|
||||
mock_condenser_class.assert_called_once()
|
||||
call_kwargs = mock_condenser_class.call_args[1]
|
||||
# When condenser_max_size is None, max_size should not be passed (uses SDK default of 120)
|
||||
assert 'max_size' not in call_kwargs
|
||||
# keep_first is never passed (uses SDK default of 4)
|
||||
assert 'keep_first' not in call_kwargs
|
||||
assert call_kwargs['llm'].usage_id == 'condenser'
|
||||
mock_llm.model_copy.assert_called_once()
|
||||
|
||||
|
||||
@patch(
|
||||
'openhands.app_server.app_conversation.app_conversation_service_base.LLMSummarizingCondenser'
|
||||
)
|
||||
def test_create_condenser_default_agent_with_custom_max_size(mock_condenser_class):
|
||||
"""Test _create_condenser for DEFAULT agent with custom condenser_max_size."""
|
||||
# Arrange
|
||||
mock_user_context = Mock(spec=UserContext)
|
||||
with patch.object(
|
||||
AppConversationServiceBase,
|
||||
'__abstractmethods__',
|
||||
set(),
|
||||
):
|
||||
service = AppConversationServiceBase(
|
||||
init_git_in_empty_workspace=True,
|
||||
user_context=mock_user_context,
|
||||
)
|
||||
mock_llm = MagicMock()
|
||||
mock_llm_copy = MagicMock()
|
||||
mock_llm_copy.usage_id = 'condenser'
|
||||
mock_llm.model_copy.return_value = mock_llm_copy
|
||||
mock_condenser_instance = MagicMock()
|
||||
mock_condenser_class.return_value = mock_condenser_instance
|
||||
|
||||
# Act
|
||||
service._create_condenser(mock_llm, AgentType.DEFAULT, 150)
|
||||
|
||||
# Assert
|
||||
mock_condenser_class.assert_called_once()
|
||||
call_kwargs = mock_condenser_class.call_args[1]
|
||||
assert call_kwargs['max_size'] == 150 # Custom value should be used
|
||||
# keep_first is never passed (uses SDK default of 4)
|
||||
assert 'keep_first' not in call_kwargs
|
||||
assert call_kwargs['llm'].usage_id == 'condenser'
|
||||
mock_llm.model_copy.assert_called_once()
|
||||
|
||||
|
||||
@patch(
|
||||
'openhands.app_server.app_conversation.app_conversation_service_base.LLMSummarizingCondenser'
|
||||
)
|
||||
def test_create_condenser_plan_agent_with_none_max_size(mock_condenser_class):
|
||||
"""Test _create_condenser for PLAN agent with condenser_max_size = None uses default."""
|
||||
# Arrange
|
||||
mock_user_context = Mock(spec=UserContext)
|
||||
with patch.object(
|
||||
AppConversationServiceBase,
|
||||
'__abstractmethods__',
|
||||
set(),
|
||||
):
|
||||
service = AppConversationServiceBase(
|
||||
init_git_in_empty_workspace=True,
|
||||
user_context=mock_user_context,
|
||||
)
|
||||
mock_llm = MagicMock()
|
||||
mock_llm_copy = MagicMock()
|
||||
mock_llm_copy.usage_id = 'planning_condenser'
|
||||
mock_llm.model_copy.return_value = mock_llm_copy
|
||||
mock_condenser_instance = MagicMock()
|
||||
mock_condenser_class.return_value = mock_condenser_instance
|
||||
|
||||
# Act
|
||||
service._create_condenser(mock_llm, AgentType.PLAN, None)
|
||||
|
||||
# Assert
|
||||
mock_condenser_class.assert_called_once()
|
||||
call_kwargs = mock_condenser_class.call_args[1]
|
||||
# When condenser_max_size is None, max_size should not be passed (uses SDK default of 120)
|
||||
assert 'max_size' not in call_kwargs
|
||||
# keep_first is never passed (uses SDK default of 4)
|
||||
assert 'keep_first' not in call_kwargs
|
||||
assert call_kwargs['llm'].usage_id == 'planning_condenser'
|
||||
mock_llm.model_copy.assert_called_once()
|
||||
|
||||
|
||||
@patch(
|
||||
'openhands.app_server.app_conversation.app_conversation_service_base.LLMSummarizingCondenser'
|
||||
)
|
||||
def test_create_condenser_plan_agent_with_custom_max_size(mock_condenser_class):
|
||||
"""Test _create_condenser for PLAN agent with custom condenser_max_size."""
|
||||
# Arrange
|
||||
mock_user_context = Mock(spec=UserContext)
|
||||
with patch.object(
|
||||
AppConversationServiceBase,
|
||||
'__abstractmethods__',
|
||||
set(),
|
||||
):
|
||||
service = AppConversationServiceBase(
|
||||
init_git_in_empty_workspace=True,
|
||||
user_context=mock_user_context,
|
||||
)
|
||||
mock_llm = MagicMock()
|
||||
mock_llm_copy = MagicMock()
|
||||
mock_llm_copy.usage_id = 'planning_condenser'
|
||||
mock_llm.model_copy.return_value = mock_llm_copy
|
||||
mock_condenser_instance = MagicMock()
|
||||
mock_condenser_class.return_value = mock_condenser_instance
|
||||
|
||||
# Act
|
||||
service._create_condenser(mock_llm, AgentType.PLAN, 200)
|
||||
|
||||
# Assert
|
||||
mock_condenser_class.assert_called_once()
|
||||
call_kwargs = mock_condenser_class.call_args[1]
|
||||
assert call_kwargs['max_size'] == 200 # Custom value should be used
|
||||
# keep_first is never passed (uses SDK default of 4)
|
||||
assert 'keep_first' not in call_kwargs
|
||||
assert call_kwargs['llm'].usage_id == 'planning_condenser'
|
||||
mock_llm.model_copy.assert_called_once()
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Tests for _configure_git_user_settings
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def _create_service_with_mock_user_context(user_info: MockUserInfo) -> tuple:
|
||||
"""Create a mock service with the actual _configure_git_user_settings method.
|
||||
|
||||
Uses MagicMock for the service but binds the real method for testing.
|
||||
|
||||
Returns a tuple of (service, mock_user_context) for testing.
|
||||
"""
|
||||
mock_user_context = MagicMock()
|
||||
mock_user_context.get_user_info = AsyncMock(return_value=user_info)
|
||||
|
||||
# Create a simple mock service and set required attribute
|
||||
service = MagicMock()
|
||||
service.user_context = mock_user_context
|
||||
|
||||
# Bind the actual method from the real class to test real implementation
|
||||
service._configure_git_user_settings = (
|
||||
lambda workspace: AppConversationServiceBase._configure_git_user_settings(
|
||||
service, workspace
|
||||
)
|
||||
)
|
||||
|
||||
return service, mock_user_context
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_workspace():
|
||||
"""Create a mock workspace instance for testing."""
|
||||
return MockWorkspace(working_dir='/workspace/project')
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_configure_git_user_settings_both_name_and_email(mock_workspace):
|
||||
"""Test configuring both git user name and email."""
|
||||
user_info = MockUserInfo(
|
||||
git_user_name='Test User', git_user_email='test@example.com'
|
||||
)
|
||||
service, mock_user_context = _create_service_with_mock_user_context(user_info)
|
||||
|
||||
await service._configure_git_user_settings(mock_workspace)
|
||||
|
||||
# Verify get_user_info was called
|
||||
mock_user_context.get_user_info.assert_called_once()
|
||||
|
||||
# Verify both git config commands were executed
|
||||
assert mock_workspace.execute_command.call_count == 2
|
||||
|
||||
# Check git config user.name call
|
||||
mock_workspace.execute_command.assert_any_call(
|
||||
'git config --global user.name "Test User"', '/workspace/project'
|
||||
)
|
||||
|
||||
# Check git config user.email call
|
||||
mock_workspace.execute_command.assert_any_call(
|
||||
'git config --global user.email "test@example.com"', '/workspace/project'
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_configure_git_user_settings_only_name(mock_workspace):
|
||||
"""Test configuring only git user name."""
|
||||
user_info = MockUserInfo(git_user_name='Test User', git_user_email=None)
|
||||
service, _ = _create_service_with_mock_user_context(user_info)
|
||||
|
||||
await service._configure_git_user_settings(mock_workspace)
|
||||
|
||||
# Verify only user.name was configured
|
||||
assert mock_workspace.execute_command.call_count == 1
|
||||
mock_workspace.execute_command.assert_called_once_with(
|
||||
'git config --global user.name "Test User"', '/workspace/project'
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_configure_git_user_settings_only_email(mock_workspace):
|
||||
"""Test configuring only git user email."""
|
||||
user_info = MockUserInfo(git_user_name=None, git_user_email='test@example.com')
|
||||
service, _ = _create_service_with_mock_user_context(user_info)
|
||||
|
||||
await service._configure_git_user_settings(mock_workspace)
|
||||
|
||||
# Verify only user.email was configured
|
||||
assert mock_workspace.execute_command.call_count == 1
|
||||
mock_workspace.execute_command.assert_called_once_with(
|
||||
'git config --global user.email "test@example.com"', '/workspace/project'
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_configure_git_user_settings_neither_set(mock_workspace):
|
||||
"""Test when neither git user name nor email is set."""
|
||||
user_info = MockUserInfo(git_user_name=None, git_user_email=None)
|
||||
service, _ = _create_service_with_mock_user_context(user_info)
|
||||
|
||||
await service._configure_git_user_settings(mock_workspace)
|
||||
|
||||
# Verify no git config commands were executed
|
||||
mock_workspace.execute_command.assert_not_called()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_configure_git_user_settings_empty_strings(mock_workspace):
|
||||
"""Test when git user name and email are empty strings."""
|
||||
user_info = MockUserInfo(git_user_name='', git_user_email='')
|
||||
service, _ = _create_service_with_mock_user_context(user_info)
|
||||
|
||||
await service._configure_git_user_settings(mock_workspace)
|
||||
|
||||
# Empty strings are falsy, so no commands should be executed
|
||||
mock_workspace.execute_command.assert_not_called()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_configure_git_user_settings_get_user_info_fails(mock_workspace):
|
||||
"""Test handling of exception when get_user_info fails."""
|
||||
user_info = MockUserInfo()
|
||||
service, mock_user_context = _create_service_with_mock_user_context(user_info)
|
||||
mock_user_context.get_user_info = AsyncMock(
|
||||
side_effect=Exception('User info error')
|
||||
)
|
||||
|
||||
# Should not raise exception, just log warning
|
||||
await service._configure_git_user_settings(mock_workspace)
|
||||
|
||||
# Verify no git config commands were executed
|
||||
mock_workspace.execute_command.assert_not_called()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_configure_git_user_settings_name_command_fails(mock_workspace):
|
||||
"""Test handling when git config user.name command fails."""
|
||||
user_info = MockUserInfo(
|
||||
git_user_name='Test User', git_user_email='test@example.com'
|
||||
)
|
||||
service, _ = _create_service_with_mock_user_context(user_info)
|
||||
|
||||
# Make the first command fail (user.name), second succeed (user.email)
|
||||
mock_workspace.execute_command = AsyncMock(
|
||||
side_effect=[
|
||||
MockCommandResult(exit_code=1, stderr='Permission denied'),
|
||||
MockCommandResult(exit_code=0),
|
||||
]
|
||||
)
|
||||
|
||||
# Should not raise exception
|
||||
await service._configure_git_user_settings(mock_workspace)
|
||||
|
||||
# Verify both commands were still attempted
|
||||
assert mock_workspace.execute_command.call_count == 2
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_configure_git_user_settings_email_command_fails(mock_workspace):
|
||||
"""Test handling when git config user.email command fails."""
|
||||
user_info = MockUserInfo(
|
||||
git_user_name='Test User', git_user_email='test@example.com'
|
||||
)
|
||||
service, _ = _create_service_with_mock_user_context(user_info)
|
||||
|
||||
# Make the first command succeed (user.name), second fail (user.email)
|
||||
mock_workspace.execute_command = AsyncMock(
|
||||
side_effect=[
|
||||
MockCommandResult(exit_code=0),
|
||||
MockCommandResult(exit_code=1, stderr='Permission denied'),
|
||||
]
|
||||
)
|
||||
|
||||
# Should not raise exception
|
||||
await service._configure_git_user_settings(mock_workspace)
|
||||
|
||||
# Verify both commands were still attempted
|
||||
assert mock_workspace.execute_command.call_count == 2
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_configure_git_user_settings_special_characters_in_name(mock_workspace):
|
||||
"""Test git user name with special characters."""
|
||||
user_info = MockUserInfo(
|
||||
git_user_name="Test O'Brien", git_user_email='test@example.com'
|
||||
)
|
||||
service, _ = _create_service_with_mock_user_context(user_info)
|
||||
|
||||
await service._configure_git_user_settings(mock_workspace)
|
||||
|
||||
# Verify the name is passed with special characters
|
||||
mock_workspace.execute_command.assert_any_call(
|
||||
'git config --global user.name "Test O\'Brien"', '/workspace/project'
|
||||
)
|
||||
@ -0,0 +1,721 @@
|
||||
"""Unit tests for the methods in LiveStatusAppConversationService."""
|
||||
|
||||
from unittest.mock import AsyncMock, Mock, patch
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
import pytest
|
||||
|
||||
from openhands.agent_server.models import SendMessageRequest, StartConversationRequest
|
||||
from openhands.app_server.app_conversation.app_conversation_models import AgentType
|
||||
from openhands.app_server.app_conversation.live_status_app_conversation_service import (
|
||||
LiveStatusAppConversationService,
|
||||
)
|
||||
from openhands.app_server.sandbox.sandbox_models import SandboxInfo, SandboxStatus
|
||||
from openhands.app_server.user.user_context import UserContext
|
||||
from openhands.integrations.provider import ProviderType
|
||||
from openhands.sdk import Agent
|
||||
from openhands.sdk.conversation.secret_source import LookupSecret, StaticSecret
|
||||
from openhands.sdk.llm import LLM
|
||||
from openhands.sdk.workspace import LocalWorkspace
|
||||
from openhands.sdk.workspace.remote.async_remote_workspace import AsyncRemoteWorkspace
|
||||
from openhands.server.types import AppMode
|
||||
|
||||
|
||||
class TestLiveStatusAppConversationService:
|
||||
"""Test cases for the methods in LiveStatusAppConversationService."""
|
||||
|
||||
def setup_method(self):
|
||||
"""Set up test fixtures."""
|
||||
# Create mock dependencies
|
||||
self.mock_user_context = Mock(spec=UserContext)
|
||||
self.mock_jwt_service = Mock()
|
||||
self.mock_sandbox_service = Mock()
|
||||
self.mock_sandbox_spec_service = Mock()
|
||||
self.mock_app_conversation_info_service = Mock()
|
||||
self.mock_app_conversation_start_task_service = Mock()
|
||||
self.mock_event_callback_service = Mock()
|
||||
self.mock_httpx_client = Mock()
|
||||
|
||||
# Create service instance
|
||||
self.service = LiveStatusAppConversationService(
|
||||
init_git_in_empty_workspace=True,
|
||||
user_context=self.mock_user_context,
|
||||
app_conversation_info_service=self.mock_app_conversation_info_service,
|
||||
app_conversation_start_task_service=self.mock_app_conversation_start_task_service,
|
||||
event_callback_service=self.mock_event_callback_service,
|
||||
sandbox_service=self.mock_sandbox_service,
|
||||
sandbox_spec_service=self.mock_sandbox_spec_service,
|
||||
jwt_service=self.mock_jwt_service,
|
||||
sandbox_startup_timeout=30,
|
||||
sandbox_startup_poll_frequency=1,
|
||||
httpx_client=self.mock_httpx_client,
|
||||
web_url='https://test.example.com',
|
||||
access_token_hard_timeout=None,
|
||||
app_mode='test',
|
||||
keycloak_auth_cookie=None,
|
||||
)
|
||||
|
||||
# Mock user info
|
||||
self.mock_user = Mock()
|
||||
self.mock_user.id = 'test_user_123'
|
||||
self.mock_user.llm_model = 'gpt-4'
|
||||
self.mock_user.llm_base_url = 'https://api.openai.com/v1'
|
||||
self.mock_user.llm_api_key = 'test_api_key'
|
||||
self.mock_user.confirmation_mode = False
|
||||
self.mock_user.search_api_key = None # Default to None
|
||||
self.mock_user.condenser_max_size = None # Default to None
|
||||
|
||||
# Mock sandbox
|
||||
self.mock_sandbox = Mock(spec=SandboxInfo)
|
||||
self.mock_sandbox.id = uuid4()
|
||||
self.mock_sandbox.status = SandboxStatus.RUNNING
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_setup_secrets_for_git_provider_no_provider(self):
|
||||
"""Test _setup_secrets_for_git_provider with no git provider."""
|
||||
# Arrange
|
||||
base_secrets = {'existing': 'secret'}
|
||||
self.mock_user_context.get_secrets.return_value = base_secrets
|
||||
|
||||
# Act
|
||||
result = await self.service._setup_secrets_for_git_provider(
|
||||
None, self.mock_user
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert result == base_secrets
|
||||
self.mock_user_context.get_secrets.assert_called_once()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_setup_secrets_for_git_provider_with_web_url(self):
|
||||
"""Test _setup_secrets_for_git_provider with web URL (creates access token)."""
|
||||
# Arrange
|
||||
base_secrets = {}
|
||||
self.mock_user_context.get_secrets.return_value = base_secrets
|
||||
self.mock_jwt_service.create_jws_token.return_value = 'test_access_token'
|
||||
git_provider = ProviderType.GITHUB
|
||||
|
||||
# Act
|
||||
result = await self.service._setup_secrets_for_git_provider(
|
||||
git_provider, self.mock_user
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert 'GITHUB_TOKEN' in result
|
||||
assert isinstance(result['GITHUB_TOKEN'], LookupSecret)
|
||||
assert (
|
||||
result['GITHUB_TOKEN'].url
|
||||
== 'https://test.example.com/api/v1/webhooks/secrets'
|
||||
)
|
||||
assert result['GITHUB_TOKEN'].headers['X-Access-Token'] == 'test_access_token'
|
||||
|
||||
self.mock_jwt_service.create_jws_token.assert_called_once_with(
|
||||
payload={
|
||||
'user_id': self.mock_user.id,
|
||||
'provider_type': git_provider.value,
|
||||
},
|
||||
expires_in=None,
|
||||
)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_setup_secrets_for_git_provider_with_saas_mode(self):
|
||||
"""Test _setup_secrets_for_git_provider with SaaS mode (includes keycloak cookie)."""
|
||||
# Arrange
|
||||
self.service.app_mode = 'saas'
|
||||
self.service.keycloak_auth_cookie = 'test_cookie'
|
||||
base_secrets = {}
|
||||
self.mock_user_context.get_secrets.return_value = base_secrets
|
||||
self.mock_jwt_service.create_jws_token.return_value = 'test_access_token'
|
||||
git_provider = ProviderType.GITLAB
|
||||
|
||||
# Act
|
||||
result = await self.service._setup_secrets_for_git_provider(
|
||||
git_provider, self.mock_user
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert 'GITLAB_TOKEN' in result
|
||||
lookup_secret = result['GITLAB_TOKEN']
|
||||
assert isinstance(lookup_secret, LookupSecret)
|
||||
assert 'Cookie' in lookup_secret.headers
|
||||
assert lookup_secret.headers['Cookie'] == 'keycloak_auth=test_cookie'
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_setup_secrets_for_git_provider_without_web_url(self):
|
||||
"""Test _setup_secrets_for_git_provider without web URL (uses static token)."""
|
||||
# Arrange
|
||||
self.service.web_url = None
|
||||
base_secrets = {}
|
||||
self.mock_user_context.get_secrets.return_value = base_secrets
|
||||
self.mock_user_context.get_latest_token.return_value = 'static_token_value'
|
||||
git_provider = ProviderType.GITHUB
|
||||
|
||||
# Act
|
||||
result = await self.service._setup_secrets_for_git_provider(
|
||||
git_provider, self.mock_user
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert 'GITHUB_TOKEN' in result
|
||||
assert isinstance(result['GITHUB_TOKEN'], StaticSecret)
|
||||
assert result['GITHUB_TOKEN'].value.get_secret_value() == 'static_token_value'
|
||||
self.mock_user_context.get_latest_token.assert_called_once_with(git_provider)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_setup_secrets_for_git_provider_no_static_token(self):
|
||||
"""Test _setup_secrets_for_git_provider when no static token is available."""
|
||||
# Arrange
|
||||
self.service.web_url = None
|
||||
base_secrets = {}
|
||||
self.mock_user_context.get_secrets.return_value = base_secrets
|
||||
self.mock_user_context.get_latest_token.return_value = None
|
||||
git_provider = ProviderType.GITHUB
|
||||
|
||||
# Act
|
||||
result = await self.service._setup_secrets_for_git_provider(
|
||||
git_provider, self.mock_user
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert 'GITHUB_TOKEN' not in result
|
||||
assert result == base_secrets
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_configure_llm_and_mcp_with_custom_model(self):
|
||||
"""Test _configure_llm_and_mcp with custom LLM model."""
|
||||
# Arrange
|
||||
custom_model = 'gpt-3.5-turbo'
|
||||
self.mock_user_context.get_mcp_api_key.return_value = 'mcp_api_key'
|
||||
|
||||
# Act
|
||||
llm, mcp_config = await self.service._configure_llm_and_mcp(
|
||||
self.mock_user, custom_model
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert isinstance(llm, LLM)
|
||||
assert llm.model == custom_model
|
||||
assert llm.base_url == self.mock_user.llm_base_url
|
||||
assert llm.api_key.get_secret_value() == self.mock_user.llm_api_key
|
||||
assert llm.usage_id == 'agent'
|
||||
|
||||
assert 'default' in mcp_config
|
||||
assert mcp_config['default']['url'] == 'https://test.example.com/mcp/mcp'
|
||||
assert mcp_config['default']['headers']['X-Session-API-Key'] == 'mcp_api_key'
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_configure_llm_and_mcp_with_user_default_model(self):
|
||||
"""Test _configure_llm_and_mcp using user's default model."""
|
||||
# Arrange
|
||||
self.mock_user_context.get_mcp_api_key.return_value = None
|
||||
|
||||
# Act
|
||||
llm, mcp_config = await self.service._configure_llm_and_mcp(
|
||||
self.mock_user, None
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert llm.model == self.mock_user.llm_model
|
||||
assert 'default' in mcp_config
|
||||
assert 'headers' not in mcp_config['default']
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_configure_llm_and_mcp_without_web_url(self):
|
||||
"""Test _configure_llm_and_mcp without web URL (no MCP config)."""
|
||||
# Arrange
|
||||
self.service.web_url = None
|
||||
|
||||
# Act
|
||||
llm, mcp_config = await self.service._configure_llm_and_mcp(
|
||||
self.mock_user, None
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert isinstance(llm, LLM)
|
||||
assert mcp_config == {}
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_configure_llm_and_mcp_tavily_with_user_search_api_key(self):
|
||||
"""Test _configure_llm_and_mcp adds tavily when user has search_api_key."""
|
||||
# Arrange
|
||||
from pydantic import SecretStr
|
||||
|
||||
self.mock_user.search_api_key = SecretStr('user_search_key')
|
||||
self.mock_user_context.get_mcp_api_key.return_value = 'mcp_api_key'
|
||||
|
||||
# Act
|
||||
llm, mcp_config = await self.service._configure_llm_and_mcp(
|
||||
self.mock_user, None
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert isinstance(llm, LLM)
|
||||
assert 'default' in mcp_config
|
||||
assert 'tavily' in mcp_config
|
||||
assert (
|
||||
mcp_config['tavily']['url']
|
||||
== 'https://mcp.tavily.com/mcp/?tavilyApiKey=user_search_key'
|
||||
)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_configure_llm_and_mcp_tavily_with_env_tavily_key(self):
|
||||
"""Test _configure_llm_and_mcp adds tavily when service has tavily_api_key."""
|
||||
# Arrange
|
||||
self.service.tavily_api_key = 'env_tavily_key'
|
||||
self.mock_user_context.get_mcp_api_key.return_value = None
|
||||
|
||||
# Act
|
||||
llm, mcp_config = await self.service._configure_llm_and_mcp(
|
||||
self.mock_user, None
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert isinstance(llm, LLM)
|
||||
assert 'default' in mcp_config
|
||||
assert 'tavily' in mcp_config
|
||||
assert (
|
||||
mcp_config['tavily']['url']
|
||||
== 'https://mcp.tavily.com/mcp/?tavilyApiKey=env_tavily_key'
|
||||
)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_configure_llm_and_mcp_tavily_user_key_takes_precedence(self):
|
||||
"""Test _configure_llm_and_mcp user search_api_key takes precedence over env key."""
|
||||
# Arrange
|
||||
from pydantic import SecretStr
|
||||
|
||||
self.mock_user.search_api_key = SecretStr('user_search_key')
|
||||
self.service.tavily_api_key = 'env_tavily_key'
|
||||
self.mock_user_context.get_mcp_api_key.return_value = None
|
||||
|
||||
# Act
|
||||
llm, mcp_config = await self.service._configure_llm_and_mcp(
|
||||
self.mock_user, None
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert isinstance(llm, LLM)
|
||||
assert 'tavily' in mcp_config
|
||||
assert (
|
||||
mcp_config['tavily']['url']
|
||||
== 'https://mcp.tavily.com/mcp/?tavilyApiKey=user_search_key'
|
||||
)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_configure_llm_and_mcp_no_tavily_without_keys(self):
|
||||
"""Test _configure_llm_and_mcp does not add tavily when no keys are available."""
|
||||
# Arrange
|
||||
self.mock_user.search_api_key = None
|
||||
self.service.tavily_api_key = None
|
||||
self.mock_user_context.get_mcp_api_key.return_value = None
|
||||
|
||||
# Act
|
||||
llm, mcp_config = await self.service._configure_llm_and_mcp(
|
||||
self.mock_user, None
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert isinstance(llm, LLM)
|
||||
assert 'default' in mcp_config
|
||||
assert 'tavily' not in mcp_config
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_configure_llm_and_mcp_saas_mode_no_tavily_without_user_key(self):
|
||||
"""Test _configure_llm_and_mcp does not add tavily in SAAS mode without user search_api_key.
|
||||
|
||||
In SAAS mode, the global tavily_api_key should not be passed to the service instance,
|
||||
so tavily should only be added if the user has their own search_api_key.
|
||||
"""
|
||||
# Arrange - simulate SAAS mode where no global tavily key is available
|
||||
self.service.app_mode = AppMode.SAAS.value
|
||||
self.service.tavily_api_key = None # In SAAS mode, this should be None
|
||||
self.mock_user.search_api_key = None
|
||||
self.mock_user_context.get_mcp_api_key.return_value = None
|
||||
|
||||
# Act
|
||||
llm, mcp_config = await self.service._configure_llm_and_mcp(
|
||||
self.mock_user, None
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert isinstance(llm, LLM)
|
||||
assert 'default' in mcp_config
|
||||
assert 'tavily' not in mcp_config
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_configure_llm_and_mcp_saas_mode_with_user_search_key(self):
|
||||
"""Test _configure_llm_and_mcp adds tavily in SAAS mode when user has search_api_key.
|
||||
|
||||
Even in SAAS mode, if the user has their own search_api_key, tavily should be added.
|
||||
"""
|
||||
# Arrange - simulate SAAS mode with user having their own search key
|
||||
from pydantic import SecretStr
|
||||
|
||||
self.service.app_mode = AppMode.SAAS.value
|
||||
self.service.tavily_api_key = None # In SAAS mode, this should be None
|
||||
self.mock_user.search_api_key = SecretStr('user_search_key')
|
||||
self.mock_user_context.get_mcp_api_key.return_value = None
|
||||
|
||||
# Act
|
||||
llm, mcp_config = await self.service._configure_llm_and_mcp(
|
||||
self.mock_user, None
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert isinstance(llm, LLM)
|
||||
assert 'default' in mcp_config
|
||||
assert 'tavily' in mcp_config
|
||||
assert (
|
||||
mcp_config['tavily']['url']
|
||||
== 'https://mcp.tavily.com/mcp/?tavilyApiKey=user_search_key'
|
||||
)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_configure_llm_and_mcp_tavily_with_empty_user_search_key(self):
|
||||
"""Test _configure_llm_and_mcp handles empty user search_api_key correctly."""
|
||||
# Arrange
|
||||
from pydantic import SecretStr
|
||||
|
||||
self.mock_user.search_api_key = SecretStr('') # Empty string
|
||||
self.service.tavily_api_key = 'env_tavily_key'
|
||||
self.mock_user_context.get_mcp_api_key.return_value = None
|
||||
|
||||
# Act
|
||||
llm, mcp_config = await self.service._configure_llm_and_mcp(
|
||||
self.mock_user, None
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert isinstance(llm, LLM)
|
||||
assert 'tavily' in mcp_config
|
||||
# Should fall back to env key since user key is empty
|
||||
assert (
|
||||
mcp_config['tavily']['url']
|
||||
== 'https://mcp.tavily.com/mcp/?tavilyApiKey=env_tavily_key'
|
||||
)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_configure_llm_and_mcp_tavily_with_whitespace_user_search_key(self):
|
||||
"""Test _configure_llm_and_mcp handles whitespace-only user search_api_key correctly."""
|
||||
# Arrange
|
||||
from pydantic import SecretStr
|
||||
|
||||
self.mock_user.search_api_key = SecretStr(' ') # Whitespace only
|
||||
self.service.tavily_api_key = 'env_tavily_key'
|
||||
self.mock_user_context.get_mcp_api_key.return_value = None
|
||||
|
||||
# Act
|
||||
llm, mcp_config = await self.service._configure_llm_and_mcp(
|
||||
self.mock_user, None
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert isinstance(llm, LLM)
|
||||
assert 'tavily' in mcp_config
|
||||
# Should fall back to env key since user key is whitespace only
|
||||
assert (
|
||||
mcp_config['tavily']['url']
|
||||
== 'https://mcp.tavily.com/mcp/?tavilyApiKey=env_tavily_key'
|
||||
)
|
||||
|
||||
@patch(
|
||||
'openhands.app_server.app_conversation.live_status_app_conversation_service.get_planning_tools'
|
||||
)
|
||||
@patch(
|
||||
'openhands.app_server.app_conversation.app_conversation_service_base.AppConversationServiceBase._create_condenser'
|
||||
)
|
||||
@patch(
|
||||
'openhands.app_server.app_conversation.live_status_app_conversation_service.format_plan_structure'
|
||||
)
|
||||
def test_create_agent_with_context_planning_agent(
|
||||
self, mock_format_plan, mock_create_condenser, mock_get_tools
|
||||
):
|
||||
"""Test _create_agent_with_context for planning agent type."""
|
||||
# Arrange
|
||||
mock_llm = Mock(spec=LLM)
|
||||
mock_llm.model_copy.return_value = mock_llm
|
||||
mock_get_tools.return_value = []
|
||||
mock_condenser = Mock()
|
||||
mock_create_condenser.return_value = mock_condenser
|
||||
mock_format_plan.return_value = 'test_plan_structure'
|
||||
mcp_config = {'default': {'url': 'test'}}
|
||||
system_message_suffix = 'Test suffix'
|
||||
|
||||
# Act
|
||||
with patch(
|
||||
'openhands.app_server.app_conversation.live_status_app_conversation_service.Agent'
|
||||
) as mock_agent_class:
|
||||
mock_agent_instance = Mock()
|
||||
mock_agent_instance.model_copy.return_value = mock_agent_instance
|
||||
mock_agent_class.return_value = mock_agent_instance
|
||||
|
||||
self.service._create_agent_with_context(
|
||||
mock_llm,
|
||||
AgentType.PLAN,
|
||||
system_message_suffix,
|
||||
mcp_config,
|
||||
self.mock_user.condenser_max_size,
|
||||
)
|
||||
|
||||
# Assert
|
||||
mock_agent_class.assert_called_once()
|
||||
call_kwargs = mock_agent_class.call_args[1]
|
||||
assert call_kwargs['llm'] == mock_llm
|
||||
assert call_kwargs['system_prompt_filename'] == 'system_prompt_planning.j2'
|
||||
assert (
|
||||
call_kwargs['system_prompt_kwargs']['plan_structure']
|
||||
== 'test_plan_structure'
|
||||
)
|
||||
assert call_kwargs['mcp_config'] == mcp_config
|
||||
assert call_kwargs['security_analyzer'] is None
|
||||
assert call_kwargs['condenser'] == mock_condenser
|
||||
mock_create_condenser.assert_called_once_with(
|
||||
mock_llm, AgentType.PLAN, self.mock_user.condenser_max_size
|
||||
)
|
||||
|
||||
@patch(
|
||||
'openhands.app_server.app_conversation.live_status_app_conversation_service.get_default_tools'
|
||||
)
|
||||
@patch(
|
||||
'openhands.app_server.app_conversation.app_conversation_service_base.AppConversationServiceBase._create_condenser'
|
||||
)
|
||||
def test_create_agent_with_context_default_agent(
|
||||
self, mock_create_condenser, mock_get_tools
|
||||
):
|
||||
"""Test _create_agent_with_context for default agent type."""
|
||||
# Arrange
|
||||
mock_llm = Mock(spec=LLM)
|
||||
mock_llm.model_copy.return_value = mock_llm
|
||||
mock_get_tools.return_value = []
|
||||
mock_condenser = Mock()
|
||||
mock_create_condenser.return_value = mock_condenser
|
||||
mcp_config = {'default': {'url': 'test'}}
|
||||
|
||||
# Act
|
||||
with patch(
|
||||
'openhands.app_server.app_conversation.live_status_app_conversation_service.Agent'
|
||||
) as mock_agent_class:
|
||||
mock_agent_instance = Mock()
|
||||
mock_agent_instance.model_copy.return_value = mock_agent_instance
|
||||
mock_agent_class.return_value = mock_agent_instance
|
||||
|
||||
self.service._create_agent_with_context(
|
||||
mock_llm,
|
||||
AgentType.DEFAULT,
|
||||
None,
|
||||
mcp_config,
|
||||
self.mock_user.condenser_max_size,
|
||||
)
|
||||
|
||||
# Assert
|
||||
mock_agent_class.assert_called_once()
|
||||
call_kwargs = mock_agent_class.call_args[1]
|
||||
assert call_kwargs['llm'] == mock_llm
|
||||
assert call_kwargs['system_prompt_kwargs']['cli_mode'] is False
|
||||
assert call_kwargs['mcp_config'] == mcp_config
|
||||
assert call_kwargs['condenser'] == mock_condenser
|
||||
mock_get_tools.assert_called_once_with(enable_browser=True)
|
||||
mock_create_condenser.assert_called_once_with(
|
||||
mock_llm, AgentType.DEFAULT, self.mock_user.condenser_max_size
|
||||
)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@patch(
|
||||
'openhands.app_server.app_conversation.live_status_app_conversation_service.ExperimentManagerImpl'
|
||||
)
|
||||
async def test_finalize_conversation_request_with_skills(
|
||||
self, mock_experiment_manager
|
||||
):
|
||||
"""Test _finalize_conversation_request with skills loading."""
|
||||
# Arrange
|
||||
mock_agent = Mock(spec=Agent)
|
||||
mock_updated_agent = Mock(spec=Agent)
|
||||
mock_experiment_manager.run_agent_variant_tests__v1.return_value = (
|
||||
mock_updated_agent
|
||||
)
|
||||
|
||||
conversation_id = uuid4()
|
||||
workspace = LocalWorkspace(working_dir='/test')
|
||||
initial_message = Mock(spec=SendMessageRequest)
|
||||
secrets = {'test': StaticSecret(value='secret')}
|
||||
remote_workspace = Mock(spec=AsyncRemoteWorkspace)
|
||||
|
||||
# Mock the skills loading method
|
||||
self.service._load_skills_and_update_agent = AsyncMock(
|
||||
return_value=mock_updated_agent
|
||||
)
|
||||
|
||||
# Act
|
||||
result = await self.service._finalize_conversation_request(
|
||||
mock_agent,
|
||||
conversation_id,
|
||||
self.mock_user,
|
||||
workspace,
|
||||
initial_message,
|
||||
secrets,
|
||||
self.mock_sandbox,
|
||||
remote_workspace,
|
||||
'test_repo',
|
||||
'/test/dir',
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert isinstance(result, StartConversationRequest)
|
||||
assert result.conversation_id == conversation_id
|
||||
assert result.agent == mock_updated_agent
|
||||
assert result.workspace == workspace
|
||||
assert result.initial_message == initial_message
|
||||
assert result.secrets == secrets
|
||||
|
||||
mock_experiment_manager.run_agent_variant_tests__v1.assert_called_once_with(
|
||||
self.mock_user.id, conversation_id, mock_agent
|
||||
)
|
||||
self.service._load_skills_and_update_agent.assert_called_once_with(
|
||||
self.mock_sandbox,
|
||||
mock_updated_agent,
|
||||
remote_workspace,
|
||||
'test_repo',
|
||||
'/test/dir',
|
||||
)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@patch(
|
||||
'openhands.app_server.app_conversation.live_status_app_conversation_service.ExperimentManagerImpl'
|
||||
)
|
||||
async def test_finalize_conversation_request_without_skills(
|
||||
self, mock_experiment_manager
|
||||
):
|
||||
"""Test _finalize_conversation_request without remote workspace (no skills)."""
|
||||
# Arrange
|
||||
mock_agent = Mock(spec=Agent)
|
||||
mock_updated_agent = Mock(spec=Agent)
|
||||
mock_experiment_manager.run_agent_variant_tests__v1.return_value = (
|
||||
mock_updated_agent
|
||||
)
|
||||
|
||||
workspace = LocalWorkspace(working_dir='/test')
|
||||
secrets = {'test': StaticSecret(value='secret')}
|
||||
|
||||
# Act
|
||||
result = await self.service._finalize_conversation_request(
|
||||
mock_agent,
|
||||
None,
|
||||
self.mock_user,
|
||||
workspace,
|
||||
None,
|
||||
secrets,
|
||||
self.mock_sandbox,
|
||||
None,
|
||||
None,
|
||||
'/test/dir',
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert isinstance(result, StartConversationRequest)
|
||||
assert isinstance(result.conversation_id, UUID)
|
||||
assert result.agent == mock_updated_agent
|
||||
mock_experiment_manager.run_agent_variant_tests__v1.assert_called_once()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@patch(
|
||||
'openhands.app_server.app_conversation.live_status_app_conversation_service.ExperimentManagerImpl'
|
||||
)
|
||||
async def test_finalize_conversation_request_skills_loading_fails(
|
||||
self, mock_experiment_manager
|
||||
):
|
||||
"""Test _finalize_conversation_request when skills loading fails."""
|
||||
# Arrange
|
||||
mock_agent = Mock(spec=Agent)
|
||||
mock_updated_agent = Mock(spec=Agent)
|
||||
mock_experiment_manager.run_agent_variant_tests__v1.return_value = (
|
||||
mock_updated_agent
|
||||
)
|
||||
|
||||
workspace = LocalWorkspace(working_dir='/test')
|
||||
secrets = {'test': StaticSecret(value='secret')}
|
||||
remote_workspace = Mock(spec=AsyncRemoteWorkspace)
|
||||
|
||||
# Mock skills loading to raise an exception
|
||||
self.service._load_skills_and_update_agent = AsyncMock(
|
||||
side_effect=Exception('Skills loading failed')
|
||||
)
|
||||
|
||||
# Act
|
||||
with patch(
|
||||
'openhands.app_server.app_conversation.live_status_app_conversation_service._logger'
|
||||
) as mock_logger:
|
||||
result = await self.service._finalize_conversation_request(
|
||||
mock_agent,
|
||||
None,
|
||||
self.mock_user,
|
||||
workspace,
|
||||
None,
|
||||
secrets,
|
||||
self.mock_sandbox,
|
||||
remote_workspace,
|
||||
'test_repo',
|
||||
'/test/dir',
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert isinstance(result, StartConversationRequest)
|
||||
assert (
|
||||
result.agent == mock_updated_agent
|
||||
) # Should still use the experiment-modified agent
|
||||
mock_logger.warning.assert_called_once()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_build_start_conversation_request_for_user_integration(self):
|
||||
"""Test the main _build_start_conversation_request_for_user method integration."""
|
||||
# Arrange
|
||||
self.mock_user_context.get_user_info.return_value = self.mock_user
|
||||
|
||||
# Mock all the helper methods
|
||||
mock_secrets = {'GITHUB_TOKEN': Mock()}
|
||||
mock_llm = Mock(spec=LLM)
|
||||
mock_mcp_config = {'default': {'url': 'test'}}
|
||||
mock_agent = Mock(spec=Agent)
|
||||
mock_final_request = Mock(spec=StartConversationRequest)
|
||||
|
||||
self.service._setup_secrets_for_git_provider = AsyncMock(
|
||||
return_value=mock_secrets
|
||||
)
|
||||
self.service._configure_llm_and_mcp = AsyncMock(
|
||||
return_value=(mock_llm, mock_mcp_config)
|
||||
)
|
||||
self.service._create_agent_with_context = Mock(return_value=mock_agent)
|
||||
self.service._finalize_conversation_request = AsyncMock(
|
||||
return_value=mock_final_request
|
||||
)
|
||||
|
||||
# Act
|
||||
result = await self.service._build_start_conversation_request_for_user(
|
||||
sandbox=self.mock_sandbox,
|
||||
initial_message=None,
|
||||
system_message_suffix='Test suffix',
|
||||
git_provider=ProviderType.GITHUB,
|
||||
working_dir='/test/dir',
|
||||
agent_type=AgentType.DEFAULT,
|
||||
llm_model='gpt-4',
|
||||
conversation_id=None,
|
||||
remote_workspace=None,
|
||||
selected_repository='test/repo',
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert result == mock_final_request
|
||||
|
||||
self.service._setup_secrets_for_git_provider.assert_called_once_with(
|
||||
ProviderType.GITHUB, self.mock_user
|
||||
)
|
||||
self.service._configure_llm_and_mcp.assert_called_once_with(
|
||||
self.mock_user, 'gpt-4'
|
||||
)
|
||||
self.service._create_agent_with_context.assert_called_once_with(
|
||||
mock_llm,
|
||||
AgentType.DEFAULT,
|
||||
'Test suffix',
|
||||
mock_mcp_config,
|
||||
self.mock_user.condenser_max_size,
|
||||
)
|
||||
self.service._finalize_conversation_request.assert_called_once()
|
||||
615
tests/unit/app_server/test_webhook_router_stats.py
Normal file
615
tests/unit/app_server/test_webhook_router_stats.py
Normal file
@ -0,0 +1,615 @@
|
||||
"""Tests for stats event processing in webhook_router.
|
||||
|
||||
This module tests the stats event processing functionality introduced for
|
||||
updating conversation statistics from ConversationStateUpdateEvent events.
|
||||
"""
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from typing import AsyncGenerator
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
from uuid import uuid4
|
||||
|
||||
import pytest
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
|
||||
from sqlalchemy.pool import StaticPool
|
||||
|
||||
from openhands.app_server.app_conversation.app_conversation_models import (
|
||||
AppConversationInfo,
|
||||
)
|
||||
from openhands.app_server.app_conversation.sql_app_conversation_info_service import (
|
||||
SQLAppConversationInfoService,
|
||||
StoredConversationMetadata,
|
||||
)
|
||||
from openhands.app_server.user.specifiy_user_context import SpecifyUserContext
|
||||
from openhands.app_server.utils.sql_utils import Base
|
||||
from openhands.sdk.conversation.conversation_stats import ConversationStats
|
||||
from openhands.sdk.event import ConversationStateUpdateEvent
|
||||
from openhands.sdk.llm.utils.metrics import Metrics, TokenUsage
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Fixtures
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def async_engine():
|
||||
"""Create an async SQLite engine for testing."""
|
||||
engine = create_async_engine(
|
||||
'sqlite+aiosqlite:///:memory:',
|
||||
poolclass=StaticPool,
|
||||
connect_args={'check_same_thread': False},
|
||||
echo=False,
|
||||
)
|
||||
|
||||
# Create all tables
|
||||
async with engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.create_all)
|
||||
|
||||
yield engine
|
||||
|
||||
await engine.dispose()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def async_session(async_engine) -> AsyncGenerator[AsyncSession, None]:
|
||||
"""Create an async session for testing."""
|
||||
async_session_maker = async_sessionmaker(
|
||||
async_engine, class_=AsyncSession, expire_on_commit=False
|
||||
)
|
||||
|
||||
async with async_session_maker() as db_session:
|
||||
yield db_session
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def service(async_session) -> SQLAppConversationInfoService:
|
||||
"""Create a SQLAppConversationInfoService instance for testing."""
|
||||
return SQLAppConversationInfoService(
|
||||
db_session=async_session, user_context=SpecifyUserContext(user_id=None)
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def v1_conversation_metadata(async_session, service):
|
||||
"""Create a V1 conversation metadata record for testing."""
|
||||
conversation_id = uuid4()
|
||||
stored = StoredConversationMetadata(
|
||||
conversation_id=str(conversation_id),
|
||||
user_id='test_user_123',
|
||||
sandbox_id='sandbox_123',
|
||||
conversation_version='V1',
|
||||
title='Test Conversation',
|
||||
accumulated_cost=0.0,
|
||||
prompt_tokens=0,
|
||||
completion_tokens=0,
|
||||
cache_read_tokens=0,
|
||||
cache_write_tokens=0,
|
||||
reasoning_tokens=0,
|
||||
context_window=0,
|
||||
per_turn_token=0,
|
||||
created_at=datetime.now(timezone.utc),
|
||||
last_updated_at=datetime.now(timezone.utc),
|
||||
)
|
||||
async_session.add(stored)
|
||||
await async_session.commit()
|
||||
return conversation_id, stored
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def stats_event_with_dict_value():
|
||||
"""Create a ConversationStateUpdateEvent with dict value."""
|
||||
event_value = {
|
||||
'usage_to_metrics': {
|
||||
'agent': {
|
||||
'accumulated_cost': 0.03411525,
|
||||
'max_budget_per_task': None,
|
||||
'accumulated_token_usage': {
|
||||
'prompt_tokens': 8770,
|
||||
'completion_tokens': 82,
|
||||
'cache_read_tokens': 0,
|
||||
'cache_write_tokens': 8767,
|
||||
'reasoning_tokens': 0,
|
||||
'context_window': 0,
|
||||
'per_turn_token': 8852,
|
||||
},
|
||||
},
|
||||
'condenser': {
|
||||
'accumulated_cost': 0.0,
|
||||
'accumulated_token_usage': {
|
||||
'prompt_tokens': 0,
|
||||
'completion_tokens': 0,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
return ConversationStateUpdateEvent(key='stats', value=event_value)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def stats_event_with_object_value():
|
||||
"""Create a ConversationStateUpdateEvent with object value."""
|
||||
event_value = MagicMock()
|
||||
event_value.usage_to_metrics = {
|
||||
'agent': {
|
||||
'accumulated_cost': 0.05,
|
||||
'accumulated_token_usage': {
|
||||
'prompt_tokens': 1000,
|
||||
'completion_tokens': 100,
|
||||
},
|
||||
}
|
||||
}
|
||||
return ConversationStateUpdateEvent(key='stats', value=event_value)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def stats_event_no_usage_to_metrics():
|
||||
"""Create a ConversationStateUpdateEvent without usage_to_metrics."""
|
||||
event_value = {'some_other_key': 'value'}
|
||||
return ConversationStateUpdateEvent(key='stats', value=event_value)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Tests for update_conversation_statistics
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestUpdateConversationStatistics:
|
||||
"""Test the update_conversation_statistics method."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_statistics_success(
|
||||
self, service, async_session, v1_conversation_metadata
|
||||
):
|
||||
"""Test successfully updating conversation statistics."""
|
||||
conversation_id, stored = v1_conversation_metadata
|
||||
|
||||
agent_metrics = Metrics(
|
||||
model_name='test-model',
|
||||
accumulated_cost=0.03411525,
|
||||
max_budget_per_task=10.0,
|
||||
accumulated_token_usage=TokenUsage(
|
||||
model='test-model',
|
||||
prompt_tokens=8770,
|
||||
completion_tokens=82,
|
||||
cache_read_tokens=0,
|
||||
cache_write_tokens=8767,
|
||||
reasoning_tokens=0,
|
||||
context_window=0,
|
||||
per_turn_token=8852,
|
||||
),
|
||||
)
|
||||
stats = ConversationStats(usage_to_metrics={'agent': agent_metrics})
|
||||
|
||||
await service.update_conversation_statistics(conversation_id, stats)
|
||||
|
||||
# Verify the update
|
||||
await async_session.refresh(stored)
|
||||
assert stored.accumulated_cost == 0.03411525
|
||||
assert stored.max_budget_per_task == 10.0
|
||||
assert stored.prompt_tokens == 8770
|
||||
assert stored.completion_tokens == 82
|
||||
assert stored.cache_read_tokens == 0
|
||||
assert stored.cache_write_tokens == 8767
|
||||
assert stored.reasoning_tokens == 0
|
||||
assert stored.context_window == 0
|
||||
assert stored.per_turn_token == 8852
|
||||
assert stored.last_updated_at is not None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_statistics_partial_update(
|
||||
self, service, async_session, v1_conversation_metadata
|
||||
):
|
||||
"""Test updating only some statistics fields."""
|
||||
conversation_id, stored = v1_conversation_metadata
|
||||
|
||||
# Set initial values
|
||||
stored.accumulated_cost = 0.01
|
||||
stored.prompt_tokens = 100
|
||||
await async_session.commit()
|
||||
|
||||
agent_metrics = Metrics(
|
||||
model_name='test-model',
|
||||
accumulated_cost=0.05,
|
||||
accumulated_token_usage=TokenUsage(
|
||||
model='test-model',
|
||||
prompt_tokens=200,
|
||||
completion_tokens=0, # Default value
|
||||
),
|
||||
)
|
||||
stats = ConversationStats(usage_to_metrics={'agent': agent_metrics})
|
||||
|
||||
await service.update_conversation_statistics(conversation_id, stats)
|
||||
|
||||
# Verify updated fields
|
||||
await async_session.refresh(stored)
|
||||
assert stored.accumulated_cost == 0.05
|
||||
assert stored.prompt_tokens == 200
|
||||
# completion_tokens should remain unchanged (not None in stats)
|
||||
assert stored.completion_tokens == 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_statistics_no_agent_metrics(
|
||||
self, service, v1_conversation_metadata
|
||||
):
|
||||
"""Test that update is skipped when no agent metrics are present."""
|
||||
conversation_id, stored = v1_conversation_metadata
|
||||
original_cost = stored.accumulated_cost
|
||||
|
||||
condenser_metrics = Metrics(
|
||||
model_name='test-model',
|
||||
accumulated_cost=0.1,
|
||||
)
|
||||
stats = ConversationStats(usage_to_metrics={'condenser': condenser_metrics})
|
||||
|
||||
await service.update_conversation_statistics(conversation_id, stats)
|
||||
|
||||
# Verify no update occurred
|
||||
assert stored.accumulated_cost == original_cost
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_statistics_conversation_not_found(self, service):
|
||||
"""Test that update is skipped when conversation doesn't exist."""
|
||||
nonexistent_id = uuid4()
|
||||
agent_metrics = Metrics(
|
||||
model_name='test-model',
|
||||
accumulated_cost=0.1,
|
||||
)
|
||||
stats = ConversationStats(usage_to_metrics={'agent': agent_metrics})
|
||||
|
||||
# Should not raise an exception
|
||||
await service.update_conversation_statistics(nonexistent_id, stats)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_statistics_v0_conversation_skipped(
|
||||
self, service, async_session
|
||||
):
|
||||
"""Test that V0 conversations are skipped."""
|
||||
conversation_id = uuid4()
|
||||
stored = StoredConversationMetadata(
|
||||
conversation_id=str(conversation_id),
|
||||
user_id='test_user_123',
|
||||
sandbox_id='sandbox_123',
|
||||
conversation_version='V0', # V0 conversation
|
||||
title='V0 Conversation',
|
||||
accumulated_cost=0.0,
|
||||
created_at=datetime.now(timezone.utc),
|
||||
last_updated_at=datetime.now(timezone.utc),
|
||||
)
|
||||
async_session.add(stored)
|
||||
await async_session.commit()
|
||||
|
||||
original_cost = stored.accumulated_cost
|
||||
|
||||
agent_metrics = Metrics(
|
||||
model_name='test-model',
|
||||
accumulated_cost=0.1,
|
||||
)
|
||||
stats = ConversationStats(usage_to_metrics={'agent': agent_metrics})
|
||||
|
||||
await service.update_conversation_statistics(conversation_id, stats)
|
||||
|
||||
# Verify no update occurred
|
||||
await async_session.refresh(stored)
|
||||
assert stored.accumulated_cost == original_cost
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_statistics_with_none_values(
|
||||
self, service, async_session, v1_conversation_metadata
|
||||
):
|
||||
"""Test that None values in stats don't overwrite existing values."""
|
||||
conversation_id, stored = v1_conversation_metadata
|
||||
|
||||
# Set initial values
|
||||
stored.accumulated_cost = 0.01
|
||||
stored.max_budget_per_task = 5.0
|
||||
stored.prompt_tokens = 100
|
||||
await async_session.commit()
|
||||
|
||||
agent_metrics = Metrics(
|
||||
model_name='test-model',
|
||||
accumulated_cost=0.05,
|
||||
max_budget_per_task=None, # None value
|
||||
accumulated_token_usage=TokenUsage(
|
||||
model='test-model',
|
||||
prompt_tokens=200,
|
||||
completion_tokens=0, # Default value (None is not valid for int)
|
||||
),
|
||||
)
|
||||
stats = ConversationStats(usage_to_metrics={'agent': agent_metrics})
|
||||
|
||||
await service.update_conversation_statistics(conversation_id, stats)
|
||||
|
||||
# Verify updated fields and that None values didn't overwrite
|
||||
await async_session.refresh(stored)
|
||||
assert stored.accumulated_cost == 0.05
|
||||
assert stored.max_budget_per_task == 5.0 # Should remain unchanged
|
||||
assert stored.prompt_tokens == 200
|
||||
assert (
|
||||
stored.completion_tokens == 0
|
||||
) # Should remain unchanged (was 0, None doesn't update)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Tests for process_stats_event
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestProcessStatsEvent:
|
||||
"""Test the process_stats_event method."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_process_stats_event_with_dict_value(
|
||||
self,
|
||||
service,
|
||||
async_session,
|
||||
stats_event_with_dict_value,
|
||||
v1_conversation_metadata,
|
||||
):
|
||||
"""Test processing stats event with dict value."""
|
||||
conversation_id, stored = v1_conversation_metadata
|
||||
|
||||
await service.process_stats_event(stats_event_with_dict_value, conversation_id)
|
||||
|
||||
# Verify the update occurred
|
||||
await async_session.refresh(stored)
|
||||
assert stored.accumulated_cost == 0.03411525
|
||||
assert stored.prompt_tokens == 8770
|
||||
assert stored.completion_tokens == 82
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_process_stats_event_with_object_value(
|
||||
self,
|
||||
service,
|
||||
async_session,
|
||||
stats_event_with_object_value,
|
||||
v1_conversation_metadata,
|
||||
):
|
||||
"""Test processing stats event with object value."""
|
||||
conversation_id, stored = v1_conversation_metadata
|
||||
|
||||
await service.process_stats_event(
|
||||
stats_event_with_object_value, conversation_id
|
||||
)
|
||||
|
||||
# Verify the update occurred
|
||||
await async_session.refresh(stored)
|
||||
assert stored.accumulated_cost == 0.05
|
||||
assert stored.prompt_tokens == 1000
|
||||
assert stored.completion_tokens == 100
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_process_stats_event_no_usage_to_metrics(
|
||||
self,
|
||||
service,
|
||||
async_session,
|
||||
stats_event_no_usage_to_metrics,
|
||||
v1_conversation_metadata,
|
||||
):
|
||||
"""Test processing stats event without usage_to_metrics."""
|
||||
conversation_id, stored = v1_conversation_metadata
|
||||
original_cost = stored.accumulated_cost
|
||||
|
||||
await service.process_stats_event(
|
||||
stats_event_no_usage_to_metrics, conversation_id
|
||||
)
|
||||
|
||||
# Verify update_conversation_statistics was NOT called
|
||||
await async_session.refresh(stored)
|
||||
assert stored.accumulated_cost == original_cost
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_process_stats_event_service_error_handled(
|
||||
self, service, stats_event_with_dict_value
|
||||
):
|
||||
"""Test that errors from service are caught and logged."""
|
||||
conversation_id = uuid4()
|
||||
|
||||
# Should not raise an exception
|
||||
with (
|
||||
patch.object(
|
||||
service,
|
||||
'update_conversation_statistics',
|
||||
side_effect=Exception('Database error'),
|
||||
),
|
||||
patch(
|
||||
'openhands.app_server.app_conversation.sql_app_conversation_info_service.logger'
|
||||
) as mock_logger,
|
||||
):
|
||||
await service.process_stats_event(
|
||||
stats_event_with_dict_value, conversation_id
|
||||
)
|
||||
|
||||
# Verify error was logged
|
||||
mock_logger.exception.assert_called_once()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_process_stats_event_empty_usage_to_metrics(
|
||||
self, service, async_session, v1_conversation_metadata
|
||||
):
|
||||
"""Test processing stats event with empty usage_to_metrics."""
|
||||
conversation_id, stored = v1_conversation_metadata
|
||||
original_cost = stored.accumulated_cost
|
||||
|
||||
# Create event with empty usage_to_metrics
|
||||
event = ConversationStateUpdateEvent(
|
||||
key='stats', value={'usage_to_metrics': {}}
|
||||
)
|
||||
|
||||
await service.process_stats_event(event, conversation_id)
|
||||
|
||||
# Empty dict is falsy, so update_conversation_statistics should NOT be called
|
||||
await async_session.refresh(stored)
|
||||
assert stored.accumulated_cost == original_cost
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Integration tests for on_event endpoint
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestOnEventStatsProcessing:
|
||||
"""Test stats event processing in the on_event endpoint."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_on_event_processes_stats_events(self):
|
||||
"""Test that on_event processes stats events."""
|
||||
from openhands.app_server.event_callback.webhook_router import on_event
|
||||
from openhands.app_server.sandbox.sandbox_models import (
|
||||
SandboxInfo,
|
||||
SandboxStatus,
|
||||
)
|
||||
|
||||
conversation_id = uuid4()
|
||||
sandbox_id = 'sandbox_123'
|
||||
|
||||
# Create stats event
|
||||
stats_event = ConversationStateUpdateEvent(
|
||||
key='stats',
|
||||
value={
|
||||
'usage_to_metrics': {
|
||||
'agent': {
|
||||
'accumulated_cost': 0.1,
|
||||
'accumulated_token_usage': {
|
||||
'prompt_tokens': 1000,
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
# Create non-stats event
|
||||
other_event = ConversationStateUpdateEvent(
|
||||
key='execution_status', value='running'
|
||||
)
|
||||
|
||||
events = [stats_event, other_event]
|
||||
|
||||
# Mock dependencies
|
||||
mock_sandbox = SandboxInfo(
|
||||
id=sandbox_id,
|
||||
status=SandboxStatus.RUNNING,
|
||||
session_api_key='test_key',
|
||||
created_by_user_id='user_123',
|
||||
sandbox_spec_id='spec_123',
|
||||
)
|
||||
|
||||
mock_app_conversation_info = AppConversationInfo(
|
||||
id=conversation_id,
|
||||
sandbox_id=sandbox_id,
|
||||
created_by_user_id='user_123',
|
||||
)
|
||||
|
||||
mock_event_service = AsyncMock()
|
||||
mock_app_conversation_info_service = AsyncMock()
|
||||
mock_app_conversation_info_service.get_app_conversation_info.return_value = (
|
||||
mock_app_conversation_info
|
||||
)
|
||||
|
||||
# Set up process_stats_event to call update_conversation_statistics
|
||||
async def process_stats_event_side_effect(event, conversation_id):
|
||||
# Simulate what process_stats_event does - call update_conversation_statistics
|
||||
from openhands.sdk.conversation.conversation_stats import ConversationStats
|
||||
|
||||
if isinstance(event.value, dict):
|
||||
stats = ConversationStats.model_validate(event.value)
|
||||
if stats and stats.usage_to_metrics:
|
||||
await mock_app_conversation_info_service.update_conversation_statistics(
|
||||
conversation_id, stats
|
||||
)
|
||||
|
||||
mock_app_conversation_info_service.process_stats_event.side_effect = (
|
||||
process_stats_event_side_effect
|
||||
)
|
||||
|
||||
with (
|
||||
patch(
|
||||
'openhands.app_server.event_callback.webhook_router.valid_sandbox',
|
||||
return_value=mock_sandbox,
|
||||
),
|
||||
patch(
|
||||
'openhands.app_server.event_callback.webhook_router.valid_conversation',
|
||||
return_value=mock_app_conversation_info,
|
||||
),
|
||||
patch(
|
||||
'openhands.app_server.event_callback.webhook_router._run_callbacks_in_bg_and_close'
|
||||
) as mock_callbacks,
|
||||
):
|
||||
await on_event(
|
||||
events=events,
|
||||
conversation_id=conversation_id,
|
||||
sandbox_info=mock_sandbox,
|
||||
app_conversation_info_service=mock_app_conversation_info_service,
|
||||
event_service=mock_event_service,
|
||||
)
|
||||
|
||||
# Verify events were saved
|
||||
assert mock_event_service.save_event.call_count == 2
|
||||
|
||||
# Verify stats event was processed
|
||||
mock_app_conversation_info_service.update_conversation_statistics.assert_called_once()
|
||||
|
||||
# Verify callbacks were scheduled
|
||||
mock_callbacks.assert_called_once()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_on_event_skips_non_stats_events(self):
|
||||
"""Test that on_event skips non-stats events."""
|
||||
from openhands.app_server.event_callback.webhook_router import on_event
|
||||
from openhands.app_server.sandbox.sandbox_models import (
|
||||
SandboxInfo,
|
||||
SandboxStatus,
|
||||
)
|
||||
from openhands.events.action.message import MessageAction
|
||||
|
||||
conversation_id = uuid4()
|
||||
sandbox_id = 'sandbox_123'
|
||||
|
||||
# Create non-stats events
|
||||
events = [
|
||||
ConversationStateUpdateEvent(key='execution_status', value='running'),
|
||||
MessageAction(content='test'),
|
||||
]
|
||||
|
||||
mock_sandbox = SandboxInfo(
|
||||
id=sandbox_id,
|
||||
status=SandboxStatus.RUNNING,
|
||||
session_api_key='test_key',
|
||||
created_by_user_id='user_123',
|
||||
sandbox_spec_id='spec_123',
|
||||
)
|
||||
|
||||
mock_app_conversation_info = AppConversationInfo(
|
||||
id=conversation_id,
|
||||
sandbox_id=sandbox_id,
|
||||
created_by_user_id='user_123',
|
||||
)
|
||||
|
||||
mock_event_service = AsyncMock()
|
||||
mock_app_conversation_info_service = AsyncMock()
|
||||
mock_app_conversation_info_service.get_app_conversation_info.return_value = (
|
||||
mock_app_conversation_info
|
||||
)
|
||||
|
||||
with (
|
||||
patch(
|
||||
'openhands.app_server.event_callback.webhook_router.valid_sandbox',
|
||||
return_value=mock_sandbox,
|
||||
),
|
||||
patch(
|
||||
'openhands.app_server.event_callback.webhook_router.valid_conversation',
|
||||
return_value=mock_app_conversation_info,
|
||||
),
|
||||
patch(
|
||||
'openhands.app_server.event_callback.webhook_router._run_callbacks_in_bg_and_close'
|
||||
),
|
||||
):
|
||||
await on_event(
|
||||
events=events,
|
||||
conversation_id=conversation_id,
|
||||
sandbox_info=mock_sandbox,
|
||||
app_conversation_info_service=mock_app_conversation_info_service,
|
||||
event_service=mock_event_service,
|
||||
)
|
||||
|
||||
# Verify stats update was NOT called
|
||||
mock_app_conversation_info_service.update_conversation_statistics.assert_not_called()
|
||||
@ -152,6 +152,7 @@ class TestExperimentManagerIntegration:
|
||||
llm_base_url=None,
|
||||
llm_api_key=None,
|
||||
confirmation_mode=False,
|
||||
condenser_max_size=None,
|
||||
)
|
||||
|
||||
async def get_secrets(self):
|
||||
@ -200,8 +201,24 @@ class TestExperimentManagerIntegration:
|
||||
|
||||
# Patch the pieces invoked by the service
|
||||
with (
|
||||
patch(
|
||||
'openhands.app_server.app_conversation.live_status_app_conversation_service.get_default_agent',
|
||||
patch.object(
|
||||
service,
|
||||
'_setup_secrets_for_git_provider',
|
||||
return_value={},
|
||||
),
|
||||
patch.object(
|
||||
service,
|
||||
'_configure_llm_and_mcp',
|
||||
return_value=(mock_llm, {}),
|
||||
),
|
||||
patch.object(
|
||||
service,
|
||||
'_create_agent_with_context',
|
||||
return_value=mock_agent,
|
||||
),
|
||||
patch.object(
|
||||
service,
|
||||
'_load_skills_and_update_agent',
|
||||
return_value=mock_agent,
|
||||
),
|
||||
patch(
|
||||
|
||||
@ -46,6 +46,9 @@ class MockUserAuth(UserAuth):
|
||||
async def get_secrets(self) -> Secrets | None:
|
||||
return None
|
||||
|
||||
async def get_mcp_api_key(self) -> str | None:
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
async def get_instance(cls, request: Request) -> UserAuth:
|
||||
return MockUserAuth()
|
||||
|
||||
@ -46,6 +46,9 @@ class MockUserAuth(UserAuth):
|
||||
async def get_secrets(self) -> Secrets | None:
|
||||
return None
|
||||
|
||||
async def get_mcp_api_key(self) -> str | None:
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
async def get_instance(cls, request: Request) -> UserAuth:
|
||||
return MockUserAuth()
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user