mirror of
https://github.com/OpenHands/OpenHands.git
synced 2025-12-25 21:36:52 +08:00
V1 Integration (#11183)
Co-authored-by: openhands <openhands@all-hands.dev> Co-authored-by: sp.wack <83104063+amanape@users.noreply.github.com> Co-authored-by: Engel Nyst <enyst@users.noreply.github.com>
This commit is contained in:
parent
5076f21e86
commit
f292f3a84d
2
.github/workflows/ghcr-build.yml
vendored
2
.github/workflows/ghcr-build.yml
vendored
@ -126,7 +126,7 @@ jobs:
|
||||
- name: Install Python dependencies using Poetry
|
||||
run: make install-python-dependencies POETRY_GROUP=main INSTALL_PLAYWRIGHT=0
|
||||
- name: Create source distribution and Dockerfile
|
||||
run: poetry run python3 openhands/runtime/utils/runtime_build.py --base_image ${{ matrix.base_image.image }} --build_folder containers/runtime --force_rebuild
|
||||
run: poetry run python3 -m openhands.runtime.utils.runtime_build --base_image ${{ matrix.base_image.image }} --build_folder containers/runtime --force_rebuild
|
||||
- name: Lowercase Repository Owner
|
||||
run: |
|
||||
echo REPO_OWNER=$(echo ${{ github.repository_owner }} | tr '[:upper:]' '[:lower:]') >> $GITHUB_ENV
|
||||
|
||||
@ -6,7 +6,7 @@ that depends on the `base_image` **AND** a [Python source distribution](https://
|
||||
The following command will generate a `Dockerfile` file for `nikolaik/python-nodejs:python3.12-nodejs22` (the default base image), an updated `config.sh` and the runtime source distribution files/folders into `containers/runtime`:
|
||||
|
||||
```bash
|
||||
poetry run python3 openhands/runtime/utils/runtime_build.py \
|
||||
poetry run python3 -m openhands.runtime.utils.runtime_build \
|
||||
--base_image nikolaik/python-nodejs:python3.12-nodejs22 \
|
||||
--build_folder containers/runtime
|
||||
```
|
||||
|
||||
259
enterprise/migrations/versions/076_add_v1_tables.py
Normal file
259
enterprise/migrations/versions/076_add_v1_tables.py
Normal file
@ -0,0 +1,259 @@
|
||||
"""Sync DB with Models
|
||||
|
||||
Revision ID: 076
|
||||
Revises: 075
|
||||
Create Date: 2025-10-05 11:28:41.772294
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
from openhands.app_server.app_conversation.app_conversation_models import (
|
||||
AppConversationStartTaskStatus,
|
||||
)
|
||||
from openhands.app_server.event_callback.event_callback_result_models import (
|
||||
EventCallbackResultStatus,
|
||||
)
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '076'
|
||||
down_revision: Union[str, Sequence[str], None] = '075'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
op.add_column(
|
||||
'conversation_metadata',
|
||||
sa.Column('max_budget_per_task', sa.Float(), nullable=True),
|
||||
)
|
||||
op.add_column(
|
||||
'conversation_metadata',
|
||||
sa.Column('cache_read_tokens', sa.Integer(), server_default='0'),
|
||||
)
|
||||
op.add_column(
|
||||
'conversation_metadata',
|
||||
sa.Column('cache_write_tokens', sa.Integer(), server_default='0'),
|
||||
)
|
||||
op.add_column(
|
||||
'conversation_metadata',
|
||||
sa.Column('reasoning_tokens', sa.Integer(), server_default='0'),
|
||||
)
|
||||
op.add_column(
|
||||
'conversation_metadata',
|
||||
sa.Column('context_window', sa.Integer(), server_default='0'),
|
||||
)
|
||||
op.add_column(
|
||||
'conversation_metadata',
|
||||
sa.Column('per_turn_token', sa.Integer(), server_default='0'),
|
||||
)
|
||||
op.add_column(
|
||||
'conversation_metadata',
|
||||
sa.Column(
|
||||
'conversation_version', sa.String(), nullable=False, server_default='V0'
|
||||
),
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_conversation_metadata_conversation_version'),
|
||||
'conversation_metadata',
|
||||
['conversation_version'],
|
||||
unique=False,
|
||||
)
|
||||
op.add_column('conversation_metadata', sa.Column('sandbox_id', sa.String()))
|
||||
op.create_index(
|
||||
op.f('ix_conversation_metadata_sandbox_id'),
|
||||
'conversation_metadata',
|
||||
['sandbox_id'],
|
||||
unique=False,
|
||||
)
|
||||
op.create_table(
|
||||
'app_conversation_start_task',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('created_by_user_id', sa.String(), nullable=True),
|
||||
sa.Column('status', sa.Enum(AppConversationStartTaskStatus), nullable=True),
|
||||
sa.Column('detail', sa.String(), nullable=True),
|
||||
sa.Column('app_conversation_id', sa.UUID(), nullable=True),
|
||||
sa.Column('sandbox_id', sa.String(), nullable=True),
|
||||
sa.Column('agent_server_url', sa.String(), nullable=True),
|
||||
sa.Column('request', sa.JSON(), nullable=True),
|
||||
sa.Column(
|
||||
'created_at',
|
||||
sa.DateTime(timezone=True),
|
||||
server_default=sa.text('(CURRENT_TIMESTAMP)'),
|
||||
nullable=True,
|
||||
),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_app_conversation_start_task_created_at'),
|
||||
'app_conversation_start_task',
|
||||
['created_at'],
|
||||
unique=False,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_app_conversation_start_task_created_by_user_id'),
|
||||
'app_conversation_start_task',
|
||||
['created_by_user_id'],
|
||||
unique=False,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_app_conversation_start_task_updated_at'),
|
||||
'app_conversation_start_task',
|
||||
['updated_at'],
|
||||
unique=False,
|
||||
)
|
||||
op.create_table(
|
||||
'event_callback',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('conversation_id', sa.UUID(), nullable=True),
|
||||
sa.Column('processor', sa.JSON(), nullable=True),
|
||||
sa.Column('event_kind', sa.String(), nullable=True),
|
||||
sa.Column(
|
||||
'created_at',
|
||||
sa.DateTime(timezone=True),
|
||||
server_default=sa.text('(CURRENT_TIMESTAMP)'),
|
||||
nullable=True,
|
||||
),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_event_callback_created_at'),
|
||||
'event_callback',
|
||||
['created_at'],
|
||||
unique=False,
|
||||
)
|
||||
op.create_table(
|
||||
'event_callback_result',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('status', sa.Enum(EventCallbackResultStatus), nullable=True),
|
||||
sa.Column('event_callback_id', sa.UUID(), nullable=True),
|
||||
sa.Column('event_id', sa.UUID(), nullable=True),
|
||||
sa.Column('conversation_id', sa.UUID(), nullable=True),
|
||||
sa.Column('detail', sa.String(), nullable=True),
|
||||
sa.Column(
|
||||
'created_at',
|
||||
sa.DateTime(timezone=True),
|
||||
server_default=sa.text('(CURRENT_TIMESTAMP)'),
|
||||
nullable=True,
|
||||
),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_event_callback_result_conversation_id'),
|
||||
'event_callback_result',
|
||||
['conversation_id'],
|
||||
unique=False,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_event_callback_result_created_at'),
|
||||
'event_callback_result',
|
||||
['created_at'],
|
||||
unique=False,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_event_callback_result_event_callback_id'),
|
||||
'event_callback_result',
|
||||
['event_callback_id'],
|
||||
unique=False,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_event_callback_result_event_id'),
|
||||
'event_callback_result',
|
||||
['event_id'],
|
||||
unique=False,
|
||||
)
|
||||
op.create_table(
|
||||
'v1_remote_sandbox',
|
||||
sa.Column('id', sa.String(), nullable=False),
|
||||
sa.Column('created_by_user_id', sa.String(), nullable=True),
|
||||
sa.Column('sandbox_spec_id', sa.String(), nullable=True),
|
||||
sa.Column(
|
||||
'created_at',
|
||||
sa.DateTime(timezone=True),
|
||||
server_default=sa.text('(CURRENT_TIMESTAMP)'),
|
||||
nullable=True,
|
||||
),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_v1_remote_sandbox_created_at'),
|
||||
'v1_remote_sandbox',
|
||||
['created_at'],
|
||||
unique=False,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_v1_remote_sandbox_created_by_user_id'),
|
||||
'v1_remote_sandbox',
|
||||
['created_by_user_id'],
|
||||
unique=False,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_v1_remote_sandbox_sandbox_spec_id'),
|
||||
'v1_remote_sandbox',
|
||||
['sandbox_spec_id'],
|
||||
unique=False,
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(
|
||||
op.f('ix_v1_remote_sandbox_sandbox_spec_id'), table_name='v1_remote_sandbox'
|
||||
)
|
||||
op.drop_index(
|
||||
op.f('ix_v1_remote_sandbox_created_by_user_id'), table_name='v1_remote_sandbox'
|
||||
)
|
||||
op.drop_index(
|
||||
op.f('ix_v1_remote_sandbox_created_at'), table_name='v1_remote_sandbox'
|
||||
)
|
||||
op.drop_table('v1_remote_sandbox')
|
||||
op.drop_index(
|
||||
op.f('ix_event_callback_result_event_id'),
|
||||
table_name='event_callback_result',
|
||||
)
|
||||
op.drop_index(
|
||||
op.f('ix_event_callback_result_event_callback_id'),
|
||||
table_name='event_callback_result',
|
||||
)
|
||||
op.drop_index(
|
||||
op.f('ix_event_callback_result_created_at'),
|
||||
table_name='event_callback_result',
|
||||
)
|
||||
op.drop_index(
|
||||
op.f('ix_event_callback_result_conversation_id'),
|
||||
table_name='event_callback_result',
|
||||
)
|
||||
op.drop_table('event_callback_result')
|
||||
op.drop_index(op.f('ix_event_callback_created_at'), table_name='event_callback')
|
||||
op.drop_table('event_callback')
|
||||
op.drop_index(
|
||||
op.f('ix_app_conversation_start_task_updated_at'),
|
||||
table_name='app_conversation_start_task',
|
||||
)
|
||||
op.drop_index(
|
||||
op.f('ix_app_conversation_start_task_created_by_user_id'),
|
||||
table_name='app_conversation_start_task',
|
||||
)
|
||||
op.drop_index(
|
||||
op.f('ix_app_conversation_start_task_created_at'),
|
||||
table_name='app_conversation_start_task',
|
||||
)
|
||||
op.drop_table('app_conversation_start_task')
|
||||
op.drop_column('conversation_metadata', 'sandbox_id')
|
||||
op.drop_column('conversation_metadata', 'conversation_version')
|
||||
op.drop_column('conversation_metadata', 'per_turn_token')
|
||||
op.drop_column('conversation_metadata', 'context_window')
|
||||
op.drop_column('conversation_metadata', 'reasoning_tokens')
|
||||
op.drop_column('conversation_metadata', 'cache_write_tokens')
|
||||
op.drop_column('conversation_metadata', 'cache_read_tokens')
|
||||
op.drop_column('conversation_metadata', 'max_budget_per_task')
|
||||
op.execute('DROP TYPE appconversationstarttaskstatus')
|
||||
op.execute('DROP TYPE eventcallbackresultstatus')
|
||||
# ### end Alembic commands ###
|
||||
280
enterprise/poetry.lock
generated
280
enterprise/poetry.lock
generated
@ -1,4 +1,4 @@
|
||||
# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "aiofiles"
|
||||
@ -148,6 +148,25 @@ files = [
|
||||
frozenlist = ">=1.1.0"
|
||||
typing-extensions = {version = ">=4.2", markers = "python_version < \"3.13\""}
|
||||
|
||||
[[package]]
|
||||
name = "aiosqlite"
|
||||
version = "0.21.0"
|
||||
description = "asyncio bridge to the standard sqlite3 module"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "aiosqlite-0.21.0-py3-none-any.whl", hash = "sha256:2549cf4057f95f53dcba16f2b64e8e2791d7e1adedb13197dd8ed77bb226d7d0"},
|
||||
{file = "aiosqlite-0.21.0.tar.gz", hash = "sha256:131bb8056daa3bc875608c631c678cda73922a2d4ba8aec373b19f18c17e7aa3"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
typing_extensions = ">=4.0"
|
||||
|
||||
[package.extras]
|
||||
dev = ["attribution (==1.7.1)", "black (==24.3.0)", "build (>=1.2)", "coverage[toml] (==7.6.10)", "flake8 (==7.0.0)", "flake8-bugbear (==24.12.12)", "flit (==3.10.1)", "mypy (==1.14.1)", "ufmt (==2.5.1)", "usort (==1.0.8.post1)"]
|
||||
docs = ["sphinx (==8.1.3)", "sphinx-mdinclude (==0.6.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "alembic"
|
||||
version = "1.16.5"
|
||||
@ -1061,7 +1080,7 @@ files = [
|
||||
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
|
||||
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
|
||||
]
|
||||
markers = {main = "platform_system == \"Windows\" or sys_platform == \"win32\" or os_name == \"nt\"", dev = "os_name == \"nt\"", test = "platform_system == \"Windows\" or sys_platform == \"win32\""}
|
||||
markers = {main = "platform_system == \"Windows\" or os_name == \"nt\" or sys_platform == \"win32\"", dev = "os_name == \"nt\"", test = "platform_system == \"Windows\" or sys_platform == \"win32\""}
|
||||
|
||||
[[package]]
|
||||
name = "comm"
|
||||
@ -1797,6 +1816,25 @@ files = [
|
||||
{file = "durationpy-0.10.tar.gz", hash = "sha256:1fa6893409a6e739c9c72334fc65cca1f355dbdd93405d30f726deb5bde42fba"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ecdsa"
|
||||
version = "0.19.1"
|
||||
description = "ECDSA cryptographic signature library (pure python)"
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.6"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "ecdsa-0.19.1-py2.py3-none-any.whl", hash = "sha256:30638e27cf77b7e15c4c4cc1973720149e1033827cfd00661ca5c8cc0cdb24c3"},
|
||||
{file = "ecdsa-0.19.1.tar.gz", hash = "sha256:478cba7b62555866fcb3bb3fe985e06decbdb68ef55713c4e5ab98c57d508e61"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
six = ">=1.9.0"
|
||||
|
||||
[package.extras]
|
||||
gmpy = ["gmpy"]
|
||||
gmpy2 = ["gmpy2"]
|
||||
|
||||
[[package]]
|
||||
name = "email-validator"
|
||||
version = "2.3.0"
|
||||
@ -1968,38 +2006,79 @@ websockets = ["websockets (>=15.0.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "fastuuid"
|
||||
version = "0.12.0"
|
||||
version = "0.13.5"
|
||||
description = "Python bindings to Rust's UUID library."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "fastuuid-0.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:22a900ef0956aacf862b460e20541fdae2d7c340594fe1bd6fdcb10d5f0791a9"},
|
||||
{file = "fastuuid-0.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0302f5acf54dc75de30103025c5a95db06d6c2be36829043a0aa16fc170076bc"},
|
||||
{file = "fastuuid-0.12.0-cp310-cp310-manylinux_2_34_x86_64.whl", hash = "sha256:7946b4a310cfc2d597dcba658019d72a2851612a2cebb949d809c0e2474cf0a6"},
|
||||
{file = "fastuuid-0.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:a1b6764dd42bf0c46c858fb5ade7b7a3d93b7a27485a7a5c184909026694cd88"},
|
||||
{file = "fastuuid-0.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2bced35269315d16fe0c41003f8c9d63f2ee16a59295d90922cad5e6a67d0418"},
|
||||
{file = "fastuuid-0.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82106e4b0a24f4f2f73c88f89dadbc1533bb808900740ca5db9bbb17d3b0c824"},
|
||||
{file = "fastuuid-0.12.0-cp311-cp311-manylinux_2_34_x86_64.whl", hash = "sha256:4db1bc7b8caa1d7412e1bea29b016d23a8d219131cff825b933eb3428f044dca"},
|
||||
{file = "fastuuid-0.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:07afc8e674e67ac3d35a608c68f6809da5fab470fb4ef4469094fdb32ba36c51"},
|
||||
{file = "fastuuid-0.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:328694a573fe9dce556b0b70c9d03776786801e028d82f0b6d9db1cb0521b4d1"},
|
||||
{file = "fastuuid-0.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02acaea2c955bb2035a7d8e7b3fba8bd623b03746ae278e5fa932ef54c702f9f"},
|
||||
{file = "fastuuid-0.12.0-cp312-cp312-manylinux_2_34_x86_64.whl", hash = "sha256:ed9f449cba8cf16cced252521aee06e633d50ec48c807683f21cc1d89e193eb0"},
|
||||
{file = "fastuuid-0.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:0df2ea4c9db96fd8f4fa38d0e88e309b3e56f8fd03675a2f6958a5b082a0c1e4"},
|
||||
{file = "fastuuid-0.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7fe2407316a04ee8f06d3dbc7eae396d0a86591d92bafe2ca32fce23b1145786"},
|
||||
{file = "fastuuid-0.12.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9b31dd488d0778c36f8279b306dc92a42f16904cba54acca71e107d65b60b0c"},
|
||||
{file = "fastuuid-0.12.0-cp313-cp313-manylinux_2_34_x86_64.whl", hash = "sha256:b19361ee649365eefc717ec08005972d3d1eb9ee39908022d98e3bfa9da59e37"},
|
||||
{file = "fastuuid-0.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:8fc66b11423e6f3e1937385f655bedd67aebe56a3dcec0cb835351cfe7d358c9"},
|
||||
{file = "fastuuid-0.12.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:2925f67b88d47cb16aa3eb1ab20fdcf21b94d74490e0818c91ea41434b987493"},
|
||||
{file = "fastuuid-0.12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7b15c54d300279ab20a9cc0579ada9c9f80d1bc92997fc61fb7bf3103d7cb26b"},
|
||||
{file = "fastuuid-0.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:458f1bc3ebbd76fdb89ad83e6b81ccd3b2a99fa6707cd3650b27606745cfb170"},
|
||||
{file = "fastuuid-0.12.0-cp38-cp38-manylinux_2_34_x86_64.whl", hash = "sha256:a8f0f83fbba6dc44271a11b22e15838641b8c45612cdf541b4822a5930f6893c"},
|
||||
{file = "fastuuid-0.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:7cfd2092253d3441f6a8c66feff3c3c009da25a5b3da82bc73737558543632be"},
|
||||
{file = "fastuuid-0.12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9303617e887429c193d036d47d0b32b774ed3618431123e9106f610d601eb57e"},
|
||||
{file = "fastuuid-0.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8790221325b376e1122e95f865753ebf456a9fb8faf0dca4f9bf7a3ff620e413"},
|
||||
{file = "fastuuid-0.12.0-cp39-cp39-manylinux_2_34_x86_64.whl", hash = "sha256:e4b12d3e23515e29773fa61644daa660ceb7725e05397a986c2109f512579a48"},
|
||||
{file = "fastuuid-0.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:e41656457c34b5dcb784729537ea64c7d9bbaf7047b480c6c6a64c53379f455a"},
|
||||
{file = "fastuuid-0.12.0.tar.gz", hash = "sha256:d0bd4e5b35aad2826403f4411937c89e7c88857b1513fe10f696544c03e9bd8e"},
|
||||
{file = "fastuuid-0.13.5-cp310-cp310-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:b9edf8ee30718aee787cdd2e9e1ff3d4a3ec6ddb32fba0a23fa04956df69ab07"},
|
||||
{file = "fastuuid-0.13.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:f67ea1e25c5e782f7fb5aaa5208f157d950401dd9321ce56bcc6d4dc3d72ed60"},
|
||||
{file = "fastuuid-0.13.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9ff3fc87e1f19603dd53c38f42c2ea8d5d5462554deab69e9cf1800574e4756c"},
|
||||
{file = "fastuuid-0.13.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6e5337fa7698dc52bc724da7e9239e93c5b24a09f6904b8660dfb8c41ce3dee"},
|
||||
{file = "fastuuid-0.13.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9db596023c10dabb12489a88c51b75297c3a2478cb2be645e06905934e7b9fc"},
|
||||
{file = "fastuuid-0.13.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:191ff6192fe53c5fc9d4d241ee1156b30a7ed6f1677b1cc2423e7ecdbc26222b"},
|
||||
{file = "fastuuid-0.13.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:348ce9f296dda701ba46d8dceeff309f90dbc75dd85080bbed2b299aa908890a"},
|
||||
{file = "fastuuid-0.13.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:46954fb644995d7fc8bbd710fbd4c65cedaa48c921c86fdbafef0229168a8c96"},
|
||||
{file = "fastuuid-0.13.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22da0f66041e1c10c7d465b495cc6cd8e17e080dda34b4bd5ff5240b860fbb82"},
|
||||
{file = "fastuuid-0.13.5-cp310-cp310-win32.whl", hash = "sha256:3e6b548f06c1ed7bad951a17a09eef69d6f24eb2b874cb4833e26b886d82990f"},
|
||||
{file = "fastuuid-0.13.5-cp310-cp310-win_amd64.whl", hash = "sha256:c82838e52189d16b1307631179cb2cd37778dd8f4ddc00e9ce3c26f920b3b2f7"},
|
||||
{file = "fastuuid-0.13.5-cp311-cp311-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:c122558ca4b5487e2bd0863467e4ccfe636afd1274803741487d48f2e32ea0e1"},
|
||||
{file = "fastuuid-0.13.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d7abd42a03a17a681abddd19aa4d44ca2747138cf8a48373b395cf1341a10de2"},
|
||||
{file = "fastuuid-0.13.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2705cf7c2d6f7c03053404b75a4c44f872a73f6f9d5ea34f1dc6bba400c4a97c"},
|
||||
{file = "fastuuid-0.13.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d220a056fcbad25932c1f25304261198612f271f4d150b2a84e81adb877daf7"},
|
||||
{file = "fastuuid-0.13.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f29f93b5a0c5f5579f97f77d5319e9bfefd61d8678ec59d850201544faf33bf"},
|
||||
{file = "fastuuid-0.13.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:399d86623fb806151b1feb9fdd818ebfc1d50387199a35f7264f98dfc1540af5"},
|
||||
{file = "fastuuid-0.13.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:689e8795a1edd573b2c9a455024e4edf605a9690339bba29709857f7180894ea"},
|
||||
{file = "fastuuid-0.13.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:25e82c4a1734da168b36f7308e397afbe9c9b353799a9c69563a605f11dd4641"},
|
||||
{file = "fastuuid-0.13.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f62299e3cca69aad6a6fb37e26e45055587954d498ad98903fea24382377ea0e"},
|
||||
{file = "fastuuid-0.13.5-cp311-cp311-win32.whl", hash = "sha256:68227f2230381b89fb1ad362ca6e433de85c6c11c36312b41757cad47b8a8e32"},
|
||||
{file = "fastuuid-0.13.5-cp311-cp311-win_amd64.whl", hash = "sha256:4a32306982bd031cb20d5d1a726b7b958a55babebd2300ce6c8e352d3496e931"},
|
||||
{file = "fastuuid-0.13.5-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:35fe8045e866bc6846f8de6fa05acb1de0c32478048484a995e96d31e21dff2a"},
|
||||
{file = "fastuuid-0.13.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:02a460333f52d731a006d18a52ef6fcb2d295a1f5b1a5938d30744191b2f77b7"},
|
||||
{file = "fastuuid-0.13.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:74b0e4f8c307b9f477a5d7284db4431ce53a3c1e3f4173db7a97db18564a6202"},
|
||||
{file = "fastuuid-0.13.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6955a99ef455c2986f3851f4e0ccc35dec56ac1a7720f2b92e88a75d6684512e"},
|
||||
{file = "fastuuid-0.13.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f10c77b826738c1a27dcdaa92ea4dc1ec9d869748a99e1fde54f1379553d4854"},
|
||||
{file = "fastuuid-0.13.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bb25dccbeb249d16d5e664f65f17ebec05136821d5ef462c4110e3f76b86fb86"},
|
||||
{file = "fastuuid-0.13.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a5becc646a3eeafb76ce0a6783ba190cd182e3790a8b2c78ca9db2b5e87af952"},
|
||||
{file = "fastuuid-0.13.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:69b34363752d06e9bb0dbdf02ae391ec56ac948c6f2eb00be90dad68e80774b9"},
|
||||
{file = "fastuuid-0.13.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57d0768afcad0eab8770c9b8cf904716bd3c547e8b9a4e755ee8a673b060a3a3"},
|
||||
{file = "fastuuid-0.13.5-cp312-cp312-win32.whl", hash = "sha256:8ac6c6f5129d52eaa6ef9ea4b6e2f7c69468a053f3ab8e439661186b9c06bb85"},
|
||||
{file = "fastuuid-0.13.5-cp312-cp312-win_amd64.whl", hash = "sha256:ad630e97715beefef07ec37c9c162336e500400774e2c1cbe1a0df6f80d15b9a"},
|
||||
{file = "fastuuid-0.13.5-cp313-cp313-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:ea17dfd35e0e91920a35d91e65e5f9c9d1985db55ac4ff2f1667a0f61189cefa"},
|
||||
{file = "fastuuid-0.13.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:be6ad91e5fefbcc2a4b478858a2715e386d405834ea3ae337c3b6b95cc0e47d6"},
|
||||
{file = "fastuuid-0.13.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ea6df13a306aab3e0439d58c312ff1e6f4f07f09f667579679239b4a6121f64a"},
|
||||
{file = "fastuuid-0.13.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2354c1996d3cf12dc2ba3752e2c4d6edc46e1a38c63893146777b1939f3062d4"},
|
||||
{file = "fastuuid-0.13.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6cf9b7469fc26d1f9b1c43ac4b192e219e85b88fdf81d71aa755a6c08c8a817"},
|
||||
{file = "fastuuid-0.13.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:92ba539170097b9047551375f1ca09d8d2b4aefcc79eeae3e1c43fe49b42072e"},
|
||||
{file = "fastuuid-0.13.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:dbb81d05617bc2970765c1ad82db7e8716f6a2b7a361a14b83de5b9240ade448"},
|
||||
{file = "fastuuid-0.13.5-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:d973bd6bf9d754d3cca874714ac0a6b22a47f239fb3d3c8687569db05aac3471"},
|
||||
{file = "fastuuid-0.13.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e725ceef79486423f05ee657634d4b4c1ca5fb2c8a94e0708f5d6356a83f2a83"},
|
||||
{file = "fastuuid-0.13.5-cp313-cp313-win32.whl", hash = "sha256:a1c430a332ead0b2674f1ef71b17f43b8139ec5a4201182766a21f131a31e021"},
|
||||
{file = "fastuuid-0.13.5-cp313-cp313-win_amd64.whl", hash = "sha256:241fdd362fd96e6b337db62a65dd7cb3dfac20adf854573247a47510e192db6f"},
|
||||
{file = "fastuuid-0.13.5-cp38-cp38-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:e353c0a0d978a5ecd97171ac4fb7f55a6bd6cbae90f1ec4e828e5317f11b995e"},
|
||||
{file = "fastuuid-0.13.5-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:904ac3eb37f4742e23f6a51be0d0451d1d3aceb50df8dac7afc6bf5209793650"},
|
||||
{file = "fastuuid-0.13.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6b070e0dc1965d53b9e07c291537095ececf7d7e36e60aed9b22400fa6c5c7f"},
|
||||
{file = "fastuuid-0.13.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0225f8bc78071a191cb458b3b0e23b04a7f03013575b8a3083da2a84c450e200"},
|
||||
{file = "fastuuid-0.13.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3f3f8f10b962cf2e11d3affc0cf2697ac5c9accc0d282dce981ed555a44ce15"},
|
||||
{file = "fastuuid-0.13.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1ae87968614fe6d3029a8198671b5893341aac9459289e93d201027be9ea7e8"},
|
||||
{file = "fastuuid-0.13.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6ca01f5e614530a1a858bf185dd5556805a4c11b6eba0a2536890b68ed954922"},
|
||||
{file = "fastuuid-0.13.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6d3b6b10d78b9f7056445ac377612443980349da7221a3dd3e3f382f7c437be3"},
|
||||
{file = "fastuuid-0.13.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9ae95e4dcf94775d948ebb843f4443d33cd224bb31174030e106ee3cab66527c"},
|
||||
{file = "fastuuid-0.13.5-cp38-cp38-win32.whl", hash = "sha256:5d753bc9ba8de6dd9caa8bbac045578c2fbe1c6ae40c2026b614676776fbe9dc"},
|
||||
{file = "fastuuid-0.13.5-cp38-cp38-win_amd64.whl", hash = "sha256:f9530f1328b05b80c6fa111e7f2a5d55fa30fbbd72d708326d0c7b55b67ed772"},
|
||||
{file = "fastuuid-0.13.5-cp39-cp39-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:ed78153c589e5efb34faaa216836a5bf8a0b9d34e82183203166011238d9ed13"},
|
||||
{file = "fastuuid-0.13.5-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a67e18c2d7fba8be6ea4aed8ca5a20fcf273f003efa01c1f33a096b72537e69e"},
|
||||
{file = "fastuuid-0.13.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4e362a3d66874d3d11a1ee9a8e717e32c2817cdb5d7a4e913290bf6e0f2a7fd8"},
|
||||
{file = "fastuuid-0.13.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9baa9c33848ec0926231e7ecfef9e02faa0f6d24265b64108ea41f7a0bb3f48"},
|
||||
{file = "fastuuid-0.13.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3ec6dab282162c19ec2172f33bafd467cffe26b92345789278adcbec19428d1"},
|
||||
{file = "fastuuid-0.13.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2424a90688dbc44f119686fa452ff21aa106c9da258214f577816462ad606d5"},
|
||||
{file = "fastuuid-0.13.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f8f2cc6972941ab030f3776961ed8454772c3acad88781fc262d71514df89973"},
|
||||
{file = "fastuuid-0.13.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a41257ea172b5de199c3cfa71cc6c574dcf22367fe51e26cba0d359107f11f30"},
|
||||
{file = "fastuuid-0.13.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cade5f3b8023dbba5a006e5685a7baf0d7a30c43cea17113768aa9ef9582d799"},
|
||||
{file = "fastuuid-0.13.5-cp39-cp39-win32.whl", hash = "sha256:880f0d03ad2518b96757ca422cba6ff76cea5464db2b3ad75c32acf1890e058f"},
|
||||
{file = "fastuuid-0.13.5-cp39-cp39-win_amd64.whl", hash = "sha256:ebe95b730f81808eabc90247ac3d412b96d9fae1c406760b163bb9f134b7af69"},
|
||||
{file = "fastuuid-0.13.5.tar.gz", hash = "sha256:d4976821ab424d41542e1ea39bc828a9d454c3f8a04067c06fca123c5b95a1a1"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -4187,14 +4266,14 @@ dev = ["Sphinx (>=5.1.1)", "black (==24.8.0)", "build (>=0.10.0)", "coverage[tom
|
||||
|
||||
[[package]]
|
||||
name = "libtmux"
|
||||
version = "0.39.0"
|
||||
version = "0.46.2"
|
||||
description = "Typed library that provides an ORM wrapper for tmux, a terminal multiplexer."
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "libtmux-0.39.0-py3-none-any.whl", hash = "sha256:6b6e338be2727f67aa6b7eb67fa134368fa3c3eac5df27565396467692891c1e"},
|
||||
{file = "libtmux-0.39.0.tar.gz", hash = "sha256:59346aeef3c0d6017f3bc5e23248d43cdf50f32b775b9cb5d9ff5e2e5f3059f4"},
|
||||
{file = "libtmux-0.46.2-py3-none-any.whl", hash = "sha256:6c32dbf22bde8e5e33b2714a4295f6e838dc640f337cd4c085a044f6828c7793"},
|
||||
{file = "libtmux-0.46.2.tar.gz", hash = "sha256:9a398fec5d714129c8344555d466e1a903dfc0f741ba07aabe75a8ceb25c5dda"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -4228,26 +4307,24 @@ valkey = ["valkey (>=6)"]
|
||||
|
||||
[[package]]
|
||||
name = "litellm"
|
||||
version = "1.76.1"
|
||||
version = "1.77.7"
|
||||
description = "Library to easily interface with LLM API providers"
|
||||
optional = false
|
||||
python-versions = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8"
|
||||
python-versions = ">=3.8.1,<4.0, !=3.9.7"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "litellm-1.76.1-py3-none-any.whl", hash = "sha256:938f05075372f26098211ea9b3cb0a6bb7b46111330226b70d42d40bd307812f"},
|
||||
{file = "litellm-1.76.1.tar.gz", hash = "sha256:d5a3a3efda04999b60ec0d1c29c1eaaa12f89a7b29db4bda691c7fb55b4fa6ad"},
|
||||
]
|
||||
files = []
|
||||
develop = false
|
||||
|
||||
[package.dependencies]
|
||||
aiohttp = ">=3.10"
|
||||
click = "*"
|
||||
fastuuid = ">=0.12.0"
|
||||
fastuuid = ">=0.13.0"
|
||||
httpx = ">=0.23.0"
|
||||
importlib-metadata = ">=6.8.0"
|
||||
jinja2 = ">=3.1.2,<4.0.0"
|
||||
jsonschema = ">=4.22.0,<5.0.0"
|
||||
jinja2 = "^3.1.2"
|
||||
jsonschema = "^4.22.0"
|
||||
openai = ">=1.99.5"
|
||||
pydantic = ">=2.5.0,<3.0.0"
|
||||
pydantic = "^2.5.0"
|
||||
python-dotenv = ">=0.2.0"
|
||||
tiktoken = ">=0.7.0"
|
||||
tokenizers = "*"
|
||||
@ -4256,10 +4333,16 @@ tokenizers = "*"
|
||||
caching = ["diskcache (>=5.6.1,<6.0.0)"]
|
||||
extra-proxy = ["azure-identity (>=1.15.0,<2.0.0)", "azure-keyvault-secrets (>=4.8.0,<5.0.0)", "google-cloud-iam (>=2.19.1,<3.0.0)", "google-cloud-kms (>=2.21.3,<3.0.0)", "prisma (==0.11.0)", "redisvl (>=0.4.1,<0.5.0) ; python_version >= \"3.9\" and python_version < \"3.14\"", "resend (>=0.8.0,<0.9.0)"]
|
||||
mlflow = ["mlflow (>3.1.4) ; python_version >= \"3.10\""]
|
||||
proxy = ["PyJWT (>=2.8.0,<3.0.0)", "apscheduler (>=3.10.4,<4.0.0)", "azure-identity (>=1.15.0,<2.0.0)", "azure-storage-blob (>=12.25.1,<13.0.0)", "backoff", "boto3 (==1.36.0)", "cryptography (>=43.0.1,<44.0.0)", "fastapi (>=0.115.5,<0.116.0)", "fastapi-sso (>=0.16.0,<0.17.0)", "gunicorn (>=23.0.0,<24.0.0)", "litellm-enterprise (==0.1.19)", "litellm-proxy-extras (==0.2.18)", "mcp (>=1.10.0,<2.0.0) ; python_version >= \"3.10\"", "orjson (>=3.9.7,<4.0.0)", "polars (>=1.31.0,<2.0.0) ; python_version >= \"3.10\"", "pynacl (>=1.5.0,<2.0.0)", "python-multipart (>=0.0.18,<0.0.19)", "pyyaml (>=6.0.1,<7.0.0)", "rich (==13.7.1)", "rq", "uvicorn (>=0.29.0,<0.30.0)", "uvloop (>=0.21.0,<0.22.0) ; sys_platform != \"win32\"", "websockets (>=13.1.0,<14.0.0)"]
|
||||
proxy = ["PyJWT (>=2.8.0,<3.0.0)", "apscheduler (>=3.10.4,<4.0.0)", "azure-identity (>=1.15.0,<2.0.0)", "azure-storage-blob (>=12.25.1,<13.0.0)", "backoff", "boto3 (==1.36.0)", "cryptography", "fastapi (>=0.115.5,<0.116.0)", "fastapi-sso (>=0.16.0,<0.17.0)", "gunicorn (>=23.0.0,<24.0.0)", "litellm-enterprise (==0.1.20)", "litellm-proxy-extras (==0.2.25)", "mcp (>=1.10.0,<2.0.0) ; python_version >= \"3.10\"", "orjson (>=3.9.7,<4.0.0)", "polars (>=1.31.0,<2.0.0) ; python_version >= \"3.10\"", "pynacl (>=1.5.0,<2.0.0)", "python-multipart (>=0.0.18,<0.0.19)", "pyyaml (>=6.0.1,<7.0.0)", "rich (==13.7.1)", "rq", "uvicorn (>=0.29.0,<0.30.0)", "uvloop (>=0.21.0,<0.22.0) ; sys_platform != \"win32\"", "websockets (>=13.1.0,<14.0.0)"]
|
||||
semantic-router = ["semantic-router ; python_version >= \"3.9\""]
|
||||
utils = ["numpydoc"]
|
||||
|
||||
[package.source]
|
||||
type = "git"
|
||||
url = "https://github.com/BerriAI/litellm.git"
|
||||
reference = "v1.77.7.dev9"
|
||||
resolved_reference = "763d2f8ccdd8412dbe6d4ac0e136d9ac34dcd4c0"
|
||||
|
||||
[[package]]
|
||||
name = "llvmlite"
|
||||
version = "0.44.0"
|
||||
@ -5430,9 +5513,36 @@ youtube-transcript-api = ">=0.6.2"
|
||||
[package.extras]
|
||||
llama = ["llama-index (>=0.12.29,<0.13.0)", "llama-index-core (>=0.12.29,<0.13.0)", "llama-index-retrievers-bm25 (>=0.5.2,<0.6.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "openhands-agent-server"
|
||||
version = "1.0.0"
|
||||
description = "OpenHands Agent Server - REST/WebSocket interface for OpenHands AI Agent"
|
||||
optional = false
|
||||
python-versions = ">=3.12"
|
||||
groups = ["main"]
|
||||
files = []
|
||||
develop = false
|
||||
|
||||
[package.dependencies]
|
||||
aiosqlite = ">=0.19"
|
||||
alembic = ">=1.13"
|
||||
docker = ">=7.1,<8"
|
||||
fastapi = ">=0.104"
|
||||
pydantic = ">=2"
|
||||
sqlalchemy = ">=2"
|
||||
uvicorn = ">=0.31.1"
|
||||
websockets = ">=12"
|
||||
|
||||
[package.source]
|
||||
type = "git"
|
||||
url = "https://github.com/All-Hands-AI/agent-sdk.git"
|
||||
reference = "f8ca02c4a3b847bfc50b3c5e579ce126c511fefc"
|
||||
resolved_reference = "f8ca02c4a3b847bfc50b3c5e579ce126c511fefc"
|
||||
subdirectory = "openhands/agent_server"
|
||||
|
||||
[[package]]
|
||||
name = "openhands-ai"
|
||||
version = "0.57.0"
|
||||
version = "0.59.0"
|
||||
description = "OpenHands: Code Less, Make More"
|
||||
optional = false
|
||||
python-versions = "^3.12,<3.14"
|
||||
@ -5444,6 +5554,7 @@ develop = true
|
||||
aiohttp = ">=3.9.0,!=3.11.13"
|
||||
anthropic = {version = "*", extras = ["vertex"]}
|
||||
anyio = "4.9.0"
|
||||
asyncpg = "^0.30.0"
|
||||
bashlex = "^0.18"
|
||||
boto3 = "*"
|
||||
browsergym-core = "0.13.3"
|
||||
@ -5465,21 +5576,26 @@ joblib = "*"
|
||||
json-repair = "*"
|
||||
jupyter_kernel_gateway = "*"
|
||||
kubernetes = "^33.1.0"
|
||||
libtmux = ">=0.37,<0.40"
|
||||
litellm = ">=1.74.3, <1.77.2, !=1.64.4, !=1.67.*"
|
||||
libtmux = ">=0.46.2"
|
||||
litellm = ">=1.74.3, <1.78.0, !=1.64.4, !=1.67.*"
|
||||
memory-profiler = "^0.61.0"
|
||||
numpy = "*"
|
||||
openai = "1.99.9"
|
||||
openhands-aci = "0.3.2"
|
||||
openhands-agent-server = {git = "https://github.com/All-Hands-AI/agent-sdk.git", rev = "f8ca02c4a3b847bfc50b3c5e579ce126c511fefc", subdirectory = "openhands/agent_server"}
|
||||
openhands-sdk = {git = "https://github.com/All-Hands-AI/agent-sdk.git", rev = "f8ca02c4a3b847bfc50b3c5e579ce126c511fefc", subdirectory = "openhands/sdk"}
|
||||
opentelemetry-api = "^1.33.1"
|
||||
opentelemetry-exporter-otlp-proto-grpc = "^1.33.1"
|
||||
pathspec = "^0.12.1"
|
||||
pexpect = "*"
|
||||
pg8000 = "^1.31.5"
|
||||
pillow = "^11.3.0"
|
||||
playwright = "^1.55.0"
|
||||
poetry = "^2.1.2"
|
||||
prompt-toolkit = "^3.0.50"
|
||||
protobuf = "^5.0.0,<6.0.0"
|
||||
psutil = "*"
|
||||
pybase62 = "^1.0.0"
|
||||
pygithub = "^2.5.0"
|
||||
pyjwt = "^2.9.0"
|
||||
pylatexenc = "*"
|
||||
@ -5488,6 +5604,7 @@ PyPDF2 = "*"
|
||||
python-docx = "*"
|
||||
python-dotenv = "*"
|
||||
python-frontmatter = "^1.1.0"
|
||||
python-jose = {version = ">=3.3", extras = ["cryptography"]}
|
||||
python-json-logger = "^3.2.1"
|
||||
python-multipart = "*"
|
||||
python-pptx = "*"
|
||||
@ -5500,6 +5617,7 @@ redis = ">=5.2,<7.0"
|
||||
requests = "^2.32.5"
|
||||
setuptools = ">=78.1.1"
|
||||
shellingham = "^1.5.4"
|
||||
sqlalchemy = {version = "^2.0.40", extras = ["asyncio"]}
|
||||
sse-starlette = "^3.0.2"
|
||||
starlette = "^0.48.0"
|
||||
tenacity = ">=8.5,<10.0"
|
||||
@ -5519,6 +5637,35 @@ third-party-runtimes = ["daytona (==0.24.2)", "e2b-code-interpreter (>=2.0.0,<3.
|
||||
type = "directory"
|
||||
url = ".."
|
||||
|
||||
[[package]]
|
||||
name = "openhands-sdk"
|
||||
version = "1.0.0"
|
||||
description = "OpenHands SDK - Core functionality for building AI agents"
|
||||
optional = false
|
||||
python-versions = ">=3.12"
|
||||
groups = ["main"]
|
||||
files = []
|
||||
develop = false
|
||||
|
||||
[package.dependencies]
|
||||
fastmcp = ">=2.11.3"
|
||||
litellm = {git = "https://github.com/BerriAI/litellm.git", rev = "v1.77.7.dev9"}
|
||||
pydantic = ">=2.11.7"
|
||||
python-frontmatter = ">=1.1.0"
|
||||
python-json-logger = ">=3.3.0"
|
||||
tenacity = ">=9.1.2"
|
||||
websockets = ">=12"
|
||||
|
||||
[package.extras]
|
||||
boto3 = ["boto3 (>=1.35.0)"]
|
||||
|
||||
[package.source]
|
||||
type = "git"
|
||||
url = "https://github.com/All-Hands-AI/agent-sdk.git"
|
||||
reference = "f8ca02c4a3b847bfc50b3c5e579ce126c511fefc"
|
||||
resolved_reference = "f8ca02c4a3b847bfc50b3c5e579ce126c511fefc"
|
||||
subdirectory = "openhands/sdk"
|
||||
|
||||
[[package]]
|
||||
name = "openpyxl"
|
||||
version = "3.1.5"
|
||||
@ -5855,14 +6002,14 @@ ptyprocess = ">=0.5"
|
||||
|
||||
[[package]]
|
||||
name = "pg8000"
|
||||
version = "1.31.4"
|
||||
version = "1.31.5"
|
||||
description = "PostgreSQL interface library"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "pg8000-1.31.4-py3-none-any.whl", hash = "sha256:d14fb2054642ee80f9a216721892e99e19db60a005358460ffa48872351423d4"},
|
||||
{file = "pg8000-1.31.4.tar.gz", hash = "sha256:e7ecce4339891f27b0b22e2f79eb9efe44118bd384207359fc18350f788ace00"},
|
||||
{file = "pg8000-1.31.5-py3-none-any.whl", hash = "sha256:0af2c1926b153307639868d2ee5cef6cd3a7d07448e12736989b10e1d491e201"},
|
||||
{file = "pg8000-1.31.5.tar.gz", hash = "sha256:46ebb03be52b7a77c03c725c79da2ca281d6e8f59577ca66b17c9009618cae78"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@ -6528,6 +6675,17 @@ files = [
|
||||
[package.dependencies]
|
||||
pyasn1 = ">=0.6.1,<0.7.0"
|
||||
|
||||
[[package]]
|
||||
name = "pybase62"
|
||||
version = "1.0.0"
|
||||
description = "Python module for base62 encoding"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "pybase62-1.0.0-py3-none-any.whl", hash = "sha256:60539ad956ec9e9de091bc7ae88c9550bc2fa17f503050cf34d021b75e73cb27"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pycodestyle"
|
||||
version = "2.14.0"
|
||||
@ -7122,6 +7280,30 @@ PyYAML = "*"
|
||||
docs = ["sphinx"]
|
||||
test = ["mypy", "pyaml", "pytest", "toml", "types-PyYAML", "types-toml"]
|
||||
|
||||
[[package]]
|
||||
name = "python-jose"
|
||||
version = "3.5.0"
|
||||
description = "JOSE implementation in Python"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "python_jose-3.5.0-py2.py3-none-any.whl", hash = "sha256:abd1202f23d34dfad2c3d28cb8617b90acf34132c7afd60abd0b0b7d3cb55771"},
|
||||
{file = "python_jose-3.5.0.tar.gz", hash = "sha256:fb4eaa44dbeb1c26dcc69e4bd7ec54a1cb8dd64d3b4d81ef08d90ff453f2b01b"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"cryptography\""}
|
||||
ecdsa = "!=0.15"
|
||||
pyasn1 = ">=0.5.0"
|
||||
rsa = ">=4.0,<4.1.1 || >4.1.1,<4.4 || >4.4,<5.0"
|
||||
|
||||
[package.extras]
|
||||
cryptography = ["cryptography (>=3.4.0)"]
|
||||
pycrypto = ["pycrypto (>=2.6.0,<2.7.0)"]
|
||||
pycryptodome = ["pycryptodome (>=3.3.1,<4.0.0)"]
|
||||
test = ["pytest", "pytest-cov"]
|
||||
|
||||
[[package]]
|
||||
name = "python-json-logger"
|
||||
version = "3.3.0"
|
||||
|
||||
@ -224,6 +224,16 @@ class SaasUserAuth(UserAuth):
|
||||
await rate_limiter.hit('auth_uid', user_id)
|
||||
return instance
|
||||
|
||||
@classmethod
|
||||
async def get_for_user(cls, user_id: str) -> UserAuth:
|
||||
offline_token = await token_manager.load_offline_token(user_id)
|
||||
assert offline_token is not None
|
||||
return SaasUserAuth(
|
||||
user_id=user_id,
|
||||
refresh_token=SecretStr(offline_token),
|
||||
auth_type=AuthType.BEARER,
|
||||
)
|
||||
|
||||
|
||||
def get_api_key_from_header(request: Request):
|
||||
auth_header = request.headers.get('Authorization')
|
||||
|
||||
@ -424,7 +424,7 @@ async def refresh_tokens(
|
||||
provider_handler = ProviderHandler(
|
||||
create_provider_tokens_object([provider]), external_auth_id=user_id
|
||||
)
|
||||
service = provider_handler._get_service(provider)
|
||||
service = provider_handler.get_service(provider)
|
||||
token = await service.get_latest_token()
|
||||
if not token:
|
||||
raise HTTPException(
|
||||
|
||||
@ -2,6 +2,6 @@
|
||||
Unified SQLAlchemy declarative base for all models.
|
||||
"""
|
||||
|
||||
from sqlalchemy.orm import declarative_base
|
||||
from openhands.app_server.utils.sql_utils import Base
|
||||
|
||||
Base = declarative_base()
|
||||
__all__ = ['Base']
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
import asyncio
|
||||
import os
|
||||
|
||||
from google.cloud.sql.connector import Connector
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
@ -26,6 +25,8 @@ def _get_db_engine():
|
||||
if GCP_DB_INSTANCE: # GCP environments
|
||||
|
||||
def get_db_connection():
|
||||
from google.cloud.sql.connector import Connector
|
||||
|
||||
connector = Connector()
|
||||
instance_string = f'{GCP_PROJECT}:{GCP_REGION}:{GCP_DB_INSTANCE}'
|
||||
return connector.connect(
|
||||
@ -52,6 +53,8 @@ def _get_db_engine():
|
||||
|
||||
|
||||
async def async_creator():
|
||||
from google.cloud.sql.connector import Connector
|
||||
|
||||
loop = asyncio.get_running_loop()
|
||||
async with Connector(loop=loop) as connector:
|
||||
conn = await connector.connect_async(
|
||||
|
||||
@ -52,6 +52,14 @@ class SaasConversationStore(ConversationStore):
|
||||
# Convert string to ProviderType enum
|
||||
kwargs['git_provider'] = ProviderType(kwargs['git_provider'])
|
||||
|
||||
# Remove V1 attributes
|
||||
kwargs.pop('max_budget_per_task', None)
|
||||
kwargs.pop('cache_read_tokens', None)
|
||||
kwargs.pop('cache_write_tokens', None)
|
||||
kwargs.pop('reasoning_tokens', None)
|
||||
kwargs.pop('context_window', None)
|
||||
kwargs.pop('per_turn_token', None)
|
||||
|
||||
return ConversationMetadata(**kwargs)
|
||||
|
||||
async def save_metadata(self, metadata: ConversationMetadata):
|
||||
|
||||
@ -1,41 +1,8 @@
|
||||
import uuid
|
||||
from datetime import UTC, datetime
|
||||
from openhands.app_server.app_conversation.sql_app_conversation_info_service import (
|
||||
StoredConversationMetadata as _StoredConversationMetadata,
|
||||
)
|
||||
|
||||
from sqlalchemy import JSON, Column, DateTime, Float, Integer, String
|
||||
from storage.base import Base
|
||||
StoredConversationMetadata = _StoredConversationMetadata
|
||||
|
||||
|
||||
class StoredConversationMetadata(Base): # type: ignore
|
||||
__tablename__ = 'conversation_metadata'
|
||||
conversation_id = Column(
|
||||
String, primary_key=True, default=lambda: str(uuid.uuid4())
|
||||
)
|
||||
github_user_id = Column(String, nullable=True) # The GitHub user ID
|
||||
user_id = Column(String, nullable=False) # The Keycloak User ID
|
||||
selected_repository = Column(String, nullable=True)
|
||||
selected_branch = Column(String, nullable=True)
|
||||
git_provider = Column(
|
||||
String, nullable=True
|
||||
) # The git provider (GitHub, GitLab, etc.)
|
||||
title = Column(String, nullable=True)
|
||||
last_updated_at = Column(
|
||||
DateTime(timezone=True),
|
||||
default=lambda: datetime.now(UTC), # type: ignore[attr-defined]
|
||||
)
|
||||
created_at = Column(
|
||||
DateTime(timezone=True),
|
||||
default=lambda: datetime.now(UTC), # type: ignore[attr-defined]
|
||||
)
|
||||
trigger = Column(String, nullable=True)
|
||||
pr_number = Column(
|
||||
JSON, nullable=True
|
||||
) # List of PR numbers associated with the conversation
|
||||
|
||||
# Cost and token metrics
|
||||
accumulated_cost = Column(Float, default=0.0)
|
||||
prompt_tokens = Column(Integer, default=0)
|
||||
completion_tokens = Column(Integer, default=0)
|
||||
total_tokens = Column(Integer, default=0)
|
||||
|
||||
# LLM model used for the conversation
|
||||
llm_model = Column(String, nullable=True)
|
||||
__all__ = ['StoredConversationMetadata']
|
||||
|
||||
@ -1,44 +1,9 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
# This is a namespace package - extend the path to include installed packages
|
||||
# (We need to do this to support dependencies openhands-sdk, openhands-tools and openhands-agent-server
|
||||
# which all have a top level `openhands`` package.)
|
||||
__path__ = __import__('pkgutil').extend_path(__path__, __name__)
|
||||
|
||||
__package_name__ = 'openhands_ai'
|
||||
# Import version information for backward compatibility
|
||||
from openhands.version import __version__, get_version
|
||||
|
||||
|
||||
def get_version():
|
||||
# Try getting the version from pyproject.toml
|
||||
try:
|
||||
root_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
candidate_paths = [
|
||||
Path(root_dir) / 'pyproject.toml',
|
||||
Path(root_dir) / 'openhands' / 'pyproject.toml',
|
||||
]
|
||||
for file_path in candidate_paths:
|
||||
if file_path.is_file():
|
||||
with open(file_path, 'r') as f:
|
||||
for line in f:
|
||||
if line.strip().startswith('version ='):
|
||||
return line.split('=', 1)[1].strip().strip('"').strip("'")
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
try:
|
||||
from importlib.metadata import PackageNotFoundError, version
|
||||
|
||||
return version(__package_name__)
|
||||
except (ImportError, PackageNotFoundError):
|
||||
pass
|
||||
|
||||
try:
|
||||
from pkg_resources import DistributionNotFound, get_distribution # type: ignore
|
||||
|
||||
return get_distribution(__package_name__).version
|
||||
except (ImportError, DistributionNotFound):
|
||||
pass
|
||||
|
||||
return 'unknown'
|
||||
|
||||
|
||||
try:
|
||||
__version__ = get_version()
|
||||
except Exception:
|
||||
__version__ = 'unknown'
|
||||
__all__ = ['__version__', 'get_version']
|
||||
|
||||
19
openhands/app_server/README.md
Normal file
19
openhands/app_server/README.md
Normal file
@ -0,0 +1,19 @@
|
||||
# OpenHands App Server
|
||||
|
||||
FastAPI-based application server that provides REST API endpoints for OpenHands V1 integration.
|
||||
|
||||
## Overview
|
||||
|
||||
As of 2025-09-29, much of the code in the OpenHands repository can be regarded as legacy, having been superseded by the code in AgentSDK. This package provides endpoints to interface with the new agent SDK and bridge the gap with the existing OpenHands project.
|
||||
|
||||
## Architecture
|
||||
|
||||
The app server is organized into several key modules:
|
||||
|
||||
- **conversation/**: Manages sandboxed conversations and their lifecycle
|
||||
- **event/**: Handles event storage, retrieval, and streaming
|
||||
- **event_callback/**: Manages webhooks and event callbacks
|
||||
- **sandbox/**: Manages sandbox environments for agent execution
|
||||
- **user/**: User management and authentication
|
||||
- **services/**: Core services like JWT authentication
|
||||
- **utils/**: Utility functions for common operations
|
||||
0
openhands/app_server/__init__.py
Normal file
0
openhands/app_server/__init__.py
Normal file
20
openhands/app_server/app_conversation/README.md
Normal file
20
openhands/app_server/app_conversation/README.md
Normal file
@ -0,0 +1,20 @@
|
||||
# Conversation Management
|
||||
|
||||
Manages app conversations and their lifecycle within the OpenHands app server.
|
||||
|
||||
## Overview
|
||||
|
||||
This module provides services and models for managing conversations that run within sandboxed environments. It handles conversation creation, retrieval, status tracking, and lifecycle management.
|
||||
|
||||
## Key Components
|
||||
|
||||
- **AppConversationService**: Abstract service for conversation CRUD operations
|
||||
- **LiveStatusAppConversationService**: Real-time conversation status tracking
|
||||
- **AppConversationRouter**: FastAPI router for conversation endpoints
|
||||
|
||||
## Features
|
||||
|
||||
- Conversation search and filtering by title, dates, and status
|
||||
- Real-time conversation status updates
|
||||
- Pagination support for large conversation lists
|
||||
- Integration with sandbox environments
|
||||
1
openhands/app_server/app_conversation/__init__.py
Normal file
1
openhands/app_server/app_conversation/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
# App conversation module
|
||||
@ -0,0 +1,75 @@
|
||||
import asyncio
|
||||
from abc import ABC, abstractmethod
|
||||
from datetime import datetime
|
||||
from uuid import UUID
|
||||
|
||||
from openhands.app_server.app_conversation.app_conversation_models import (
|
||||
AppConversationInfo,
|
||||
AppConversationInfoPage,
|
||||
AppConversationSortOrder,
|
||||
)
|
||||
from openhands.app_server.services.injector import Injector
|
||||
from openhands.sdk.utils.models import DiscriminatedUnionMixin
|
||||
|
||||
|
||||
class AppConversationInfoService(ABC):
|
||||
"""Service for accessing info on conversations without their current status."""
|
||||
|
||||
@abstractmethod
|
||||
async def search_app_conversation_info(
|
||||
self,
|
||||
title__contains: str | None = None,
|
||||
created_at__gte: datetime | None = None,
|
||||
created_at__lt: datetime | None = None,
|
||||
updated_at__gte: datetime | None = None,
|
||||
updated_at__lt: datetime | None = None,
|
||||
sort_order: AppConversationSortOrder = AppConversationSortOrder.CREATED_AT_DESC,
|
||||
page_id: str | None = None,
|
||||
limit: int = 100,
|
||||
) -> AppConversationInfoPage:
|
||||
"""Search for sandboxed conversations."""
|
||||
|
||||
@abstractmethod
|
||||
async def count_app_conversation_info(
|
||||
self,
|
||||
title__contains: str | None = None,
|
||||
created_at__gte: datetime | None = None,
|
||||
created_at__lt: datetime | None = None,
|
||||
updated_at__gte: datetime | None = None,
|
||||
updated_at__lt: datetime | None = None,
|
||||
) -> int:
|
||||
"""Count sandboxed conversations."""
|
||||
|
||||
@abstractmethod
|
||||
async def get_app_conversation_info(
|
||||
self, conversation_id: UUID
|
||||
) -> AppConversationInfo | None:
|
||||
"""Get a single conversation info, returning None if missing."""
|
||||
|
||||
async def batch_get_app_conversation_info(
|
||||
self, conversation_ids: list[UUID]
|
||||
) -> list[AppConversationInfo | None]:
|
||||
"""Get a batch of conversation info, return None for any missing."""
|
||||
return await asyncio.gather(
|
||||
*[
|
||||
self.get_app_conversation_info(conversation_id)
|
||||
for conversation_id in conversation_ids
|
||||
]
|
||||
)
|
||||
|
||||
# Mutators
|
||||
|
||||
@abstractmethod
|
||||
async def save_app_conversation_info(
|
||||
self, info: AppConversationInfo
|
||||
) -> AppConversationInfo:
|
||||
"""Store the sandboxed conversation info object given.
|
||||
|
||||
Return the stored info
|
||||
"""
|
||||
|
||||
|
||||
class AppConversationInfoServiceInjector(
|
||||
DiscriminatedUnionMixin, Injector[AppConversationInfoService], ABC
|
||||
):
|
||||
pass
|
||||
148
openhands/app_server/app_conversation/app_conversation_models.py
Normal file
148
openhands/app_server/app_conversation/app_conversation_models.py
Normal file
@ -0,0 +1,148 @@
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from openhands.agent_server.models import SendMessageRequest
|
||||
from openhands.agent_server.utils import utc_now
|
||||
from openhands.app_server.event_callback.event_callback_models import (
|
||||
EventCallbackProcessor,
|
||||
)
|
||||
from openhands.app_server.sandbox.sandbox_models import SandboxStatus
|
||||
from openhands.integrations.service_types import ProviderType
|
||||
from openhands.sdk.conversation.state import AgentExecutionStatus
|
||||
from openhands.sdk.llm import MetricsSnapshot
|
||||
from openhands.storage.data_models.conversation_metadata import ConversationTrigger
|
||||
|
||||
|
||||
class AppConversationInfo(BaseModel):
|
||||
"""Conversation info which does not contain status."""
|
||||
|
||||
id: UUID = Field(default_factory=uuid4)
|
||||
|
||||
created_by_user_id: str | None
|
||||
sandbox_id: str
|
||||
|
||||
selected_repository: str | None = None
|
||||
selected_branch: str | None = None
|
||||
git_provider: ProviderType | None = None
|
||||
title: str | None = None
|
||||
trigger: ConversationTrigger | None = None
|
||||
pr_number: list[int] = Field(default_factory=list)
|
||||
llm_model: str | None = None
|
||||
|
||||
metrics: MetricsSnapshot | None = None
|
||||
|
||||
created_at: datetime = Field(default_factory=utc_now)
|
||||
updated_at: datetime = Field(default_factory=utc_now)
|
||||
|
||||
|
||||
class AppConversationSortOrder(Enum):
|
||||
CREATED_AT = 'CREATED_AT'
|
||||
CREATED_AT_DESC = 'CREATED_AT_DESC'
|
||||
UPDATED_AT = 'UPDATED_AT'
|
||||
UPDATED_AT_DESC = 'UPDATED_AT_DESC'
|
||||
TITLE = 'TITLE'
|
||||
TITLE_DESC = 'TITLE_DESC'
|
||||
|
||||
|
||||
class AppConversationInfoPage(BaseModel):
|
||||
items: list[AppConversationInfo]
|
||||
next_page_id: str | None = None
|
||||
|
||||
|
||||
class AppConversation(AppConversationInfo): # type: ignore
|
||||
sandbox_status: SandboxStatus = Field(
|
||||
default=SandboxStatus.MISSING,
|
||||
description='Current sandbox status. Will be MISSING if the sandbox does not exist.',
|
||||
)
|
||||
agent_status: AgentExecutionStatus | None = Field(
|
||||
default=None,
|
||||
description='Current agent status. Will be None if the sandbox_status is not RUNNING',
|
||||
)
|
||||
conversation_url: str | None = Field(
|
||||
default=None, description='The URL where the conversation may be accessed'
|
||||
)
|
||||
session_api_key: str | None = Field(
|
||||
default=None, description='The Session Api Key for REST operations.'
|
||||
)
|
||||
|
||||
# JSON fields for complex data types
|
||||
pr_number: list[int] = Field(default_factory=list)
|
||||
metrics: MetricsSnapshot | None = Field(default=None)
|
||||
|
||||
|
||||
class AppConversationPage(BaseModel):
|
||||
items: list[AppConversation]
|
||||
next_page_id: str | None = None
|
||||
|
||||
|
||||
class AppConversationStartRequest(BaseModel):
|
||||
"""Start conversation request object.
|
||||
|
||||
Although a user can go directly to the sandbox and start conversations, they
|
||||
would need to manually supply required startup parameters such as LLM key. Starting
|
||||
from the app server copies these from the user info.
|
||||
"""
|
||||
|
||||
sandbox_id: str | None = Field(default=None)
|
||||
initial_message: SendMessageRequest | None = None
|
||||
processors: list[EventCallbackProcessor] = Field(default_factory=list)
|
||||
llm_model: str | None = None
|
||||
|
||||
# Git parameters
|
||||
selected_repository: str | None = None
|
||||
selected_branch: str | None = None
|
||||
git_provider: ProviderType | None = None
|
||||
title: str | None = None
|
||||
trigger: ConversationTrigger | None = None
|
||||
pr_number: list[int] = Field(default_factory=list)
|
||||
|
||||
|
||||
class AppConversationStartTaskStatus(Enum):
|
||||
WORKING = 'WORKING'
|
||||
WAITING_FOR_SANDBOX = 'WAITING_FOR_SANDBOX'
|
||||
PREPARING_REPOSITORY = 'PREPARING_REPOSITORY'
|
||||
RUNNING_SETUP_SCRIPT = 'RUNNING_SETUP_SCRIPT'
|
||||
SETTING_UP_GIT_HOOKS = 'SETTING_UP_GIT_HOOKS'
|
||||
STARTING_CONVERSATION = 'STARTING_CONVERSATION'
|
||||
READY = 'READY'
|
||||
ERROR = 'ERROR'
|
||||
|
||||
|
||||
class AppConversationStartTaskSortOrder(Enum):
|
||||
CREATED_AT = 'CREATED_AT'
|
||||
CREATED_AT_DESC = 'CREATED_AT_DESC'
|
||||
UPDATED_AT = 'UPDATED_AT'
|
||||
UPDATED_AT_DESC = 'UPDATED_AT_DESC'
|
||||
|
||||
|
||||
class AppConversationStartTask(BaseModel):
|
||||
"""Object describing the start process for an app conversation.
|
||||
|
||||
Because starting an app conversation can be slow (And can involve starting a sandbox),
|
||||
we kick off a background task for it. Once the conversation is started, the app_conversation_id
|
||||
is populated."""
|
||||
|
||||
id: UUID = Field(default_factory=uuid4)
|
||||
created_by_user_id: str | None
|
||||
status: AppConversationStartTaskStatus = AppConversationStartTaskStatus.WORKING
|
||||
detail: str | None = None
|
||||
app_conversation_id: UUID | None = Field(
|
||||
default=None, description='The id of the app_conversation, if READY'
|
||||
)
|
||||
sandbox_id: str | None = Field(
|
||||
default=None, description='The id of the sandbox, if READY'
|
||||
)
|
||||
agent_server_url: str | None = Field(
|
||||
default=None, description='The agent server url, if READY'
|
||||
)
|
||||
request: AppConversationStartRequest
|
||||
created_at: datetime = Field(default_factory=utc_now)
|
||||
updated_at: datetime = Field(default_factory=utc_now)
|
||||
|
||||
|
||||
class AppConversationStartTaskPage(BaseModel):
|
||||
items: list[AppConversationStartTask]
|
||||
next_page_id: str | None = None
|
||||
307
openhands/app_server/app_conversation/app_conversation_router.py
Normal file
307
openhands/app_server/app_conversation/app_conversation_router.py
Normal file
@ -0,0 +1,307 @@
|
||||
"""Sandboxed Conversation router for OpenHands Server."""
|
||||
|
||||
import asyncio
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from typing import Annotated, AsyncGenerator
|
||||
from uuid import UUID
|
||||
|
||||
import httpx
|
||||
|
||||
from openhands.app_server.services.db_session_injector import set_db_session_keep_open
|
||||
from openhands.app_server.services.httpx_client_injector import (
|
||||
set_httpx_client_keep_open,
|
||||
)
|
||||
from openhands.app_server.services.injector import InjectorState
|
||||
from openhands.app_server.user.specifiy_user_context import USER_CONTEXT_ATTR
|
||||
from openhands.app_server.user.user_context import UserContext
|
||||
|
||||
# Handle anext compatibility for Python < 3.10
|
||||
if sys.version_info >= (3, 10):
|
||||
from builtins import anext
|
||||
else:
|
||||
|
||||
async def anext(async_iterator):
|
||||
"""Compatibility function for anext in Python < 3.10"""
|
||||
return await async_iterator.__anext__()
|
||||
|
||||
|
||||
from fastapi import APIRouter, Query, Request
|
||||
from fastapi.responses import StreamingResponse
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from openhands.app_server.app_conversation.app_conversation_models import (
|
||||
AppConversation,
|
||||
AppConversationPage,
|
||||
AppConversationStartRequest,
|
||||
AppConversationStartTask,
|
||||
AppConversationStartTaskPage,
|
||||
AppConversationStartTaskSortOrder,
|
||||
)
|
||||
from openhands.app_server.app_conversation.app_conversation_service import (
|
||||
AppConversationService,
|
||||
)
|
||||
from openhands.app_server.app_conversation.app_conversation_start_task_service import (
|
||||
AppConversationStartTaskService,
|
||||
)
|
||||
from openhands.app_server.config import (
|
||||
depends_app_conversation_service,
|
||||
depends_app_conversation_start_task_service,
|
||||
depends_db_session,
|
||||
depends_httpx_client,
|
||||
depends_user_context,
|
||||
get_app_conversation_service,
|
||||
)
|
||||
|
||||
router = APIRouter(prefix='/app-conversations', tags=['Conversations'])
|
||||
app_conversation_service_dependency = depends_app_conversation_service()
|
||||
app_conversation_start_task_service_dependency = (
|
||||
depends_app_conversation_start_task_service()
|
||||
)
|
||||
user_context_dependency = depends_user_context()
|
||||
db_session_dependency = depends_db_session()
|
||||
httpx_client_dependency = depends_httpx_client()
|
||||
|
||||
# Read methods
|
||||
|
||||
|
||||
@router.get('/search')
|
||||
async def search_app_conversations(
|
||||
title__contains: Annotated[
|
||||
str | None,
|
||||
Query(title='Filter by title containing this string'),
|
||||
] = None,
|
||||
created_at__gte: Annotated[
|
||||
datetime | None,
|
||||
Query(title='Filter by created_at greater than or equal to this datetime'),
|
||||
] = None,
|
||||
created_at__lt: Annotated[
|
||||
datetime | None,
|
||||
Query(title='Filter by created_at less than this datetime'),
|
||||
] = None,
|
||||
updated_at__gte: Annotated[
|
||||
datetime | None,
|
||||
Query(title='Filter by updated_at greater than or equal to this datetime'),
|
||||
] = None,
|
||||
updated_at__lt: Annotated[
|
||||
datetime | None,
|
||||
Query(title='Filter by updated_at less than this datetime'),
|
||||
] = None,
|
||||
page_id: Annotated[
|
||||
str | None,
|
||||
Query(title='Optional next_page_id from the previously returned page'),
|
||||
] = None,
|
||||
limit: Annotated[
|
||||
int,
|
||||
Query(
|
||||
title='The max number of results in the page',
|
||||
gt=0,
|
||||
lte=100,
|
||||
),
|
||||
] = 100,
|
||||
app_conversation_service: AppConversationService = (
|
||||
app_conversation_service_dependency
|
||||
),
|
||||
) -> AppConversationPage:
|
||||
"""Search / List sandboxed conversations."""
|
||||
assert limit > 0
|
||||
assert limit <= 100
|
||||
return await app_conversation_service.search_app_conversations(
|
||||
title__contains=title__contains,
|
||||
created_at__gte=created_at__gte,
|
||||
created_at__lt=created_at__lt,
|
||||
updated_at__gte=updated_at__gte,
|
||||
updated_at__lt=updated_at__lt,
|
||||
page_id=page_id,
|
||||
limit=limit,
|
||||
)
|
||||
|
||||
|
||||
@router.get('/count')
|
||||
async def count_app_conversations(
|
||||
title__contains: Annotated[
|
||||
str | None,
|
||||
Query(title='Filter by title containing this string'),
|
||||
] = None,
|
||||
created_at__gte: Annotated[
|
||||
datetime | None,
|
||||
Query(title='Filter by created_at greater than or equal to this datetime'),
|
||||
] = None,
|
||||
created_at__lt: Annotated[
|
||||
datetime | None,
|
||||
Query(title='Filter by created_at less than this datetime'),
|
||||
] = None,
|
||||
updated_at__gte: Annotated[
|
||||
datetime | None,
|
||||
Query(title='Filter by updated_at greater than or equal to this datetime'),
|
||||
] = None,
|
||||
updated_at__lt: Annotated[
|
||||
datetime | None,
|
||||
Query(title='Filter by updated_at less than this datetime'),
|
||||
] = None,
|
||||
app_conversation_service: AppConversationService = (
|
||||
app_conversation_service_dependency
|
||||
),
|
||||
) -> int:
|
||||
"""Count sandboxed conversations matching the given filters."""
|
||||
return await app_conversation_service.count_app_conversations(
|
||||
title__contains=title__contains,
|
||||
created_at__gte=created_at__gte,
|
||||
created_at__lt=created_at__lt,
|
||||
updated_at__gte=updated_at__gte,
|
||||
updated_at__lt=updated_at__lt,
|
||||
)
|
||||
|
||||
|
||||
@router.get('')
|
||||
async def batch_get_app_conversations(
|
||||
ids: Annotated[list[UUID], Query()],
|
||||
app_conversation_service: AppConversationService = (
|
||||
app_conversation_service_dependency
|
||||
),
|
||||
) -> list[AppConversation | None]:
|
||||
"""Get a batch of sandboxed conversations given their ids. Return None for any missing."""
|
||||
assert len(ids) < 100
|
||||
app_conversations = await app_conversation_service.batch_get_app_conversations(ids)
|
||||
return app_conversations
|
||||
|
||||
|
||||
@router.post('')
|
||||
async def start_app_conversation(
|
||||
request: Request,
|
||||
start_request: AppConversationStartRequest,
|
||||
db_session: AsyncSession = db_session_dependency,
|
||||
httpx_client: httpx.AsyncClient = httpx_client_dependency,
|
||||
app_conversation_service: AppConversationService = (
|
||||
app_conversation_service_dependency
|
||||
),
|
||||
) -> AppConversationStartTask:
|
||||
# Because we are processing after the request finishes, keep the db connection open
|
||||
set_db_session_keep_open(request.state, True)
|
||||
set_httpx_client_keep_open(request.state, True)
|
||||
|
||||
"""Start an app conversation start task and return it."""
|
||||
async_iter = app_conversation_service.start_app_conversation(start_request)
|
||||
result = await anext(async_iter)
|
||||
asyncio.create_task(_consume_remaining(async_iter, db_session, httpx_client))
|
||||
return result
|
||||
|
||||
|
||||
@router.post('/stream-start')
|
||||
async def stream_app_conversation_start(
|
||||
request: AppConversationStartRequest,
|
||||
user_context: UserContext = user_context_dependency,
|
||||
) -> list[AppConversationStartTask]:
|
||||
"""Start an app conversation start task and stream updates from it.
|
||||
Leaves the connection open until either the conversation starts or there was an error"""
|
||||
response = StreamingResponse(
|
||||
_stream_app_conversation_start(request, user_context),
|
||||
media_type='application/json',
|
||||
)
|
||||
return response
|
||||
|
||||
|
||||
@router.get('/start-tasks/search')
|
||||
async def search_app_conversation_start_tasks(
|
||||
conversation_id__eq: Annotated[
|
||||
UUID | None,
|
||||
Query(title='Filter by conversation ID equal to this value'),
|
||||
] = None,
|
||||
sort_order: Annotated[
|
||||
AppConversationStartTaskSortOrder,
|
||||
Query(title='Sort order for the results'),
|
||||
] = AppConversationStartTaskSortOrder.CREATED_AT_DESC,
|
||||
page_id: Annotated[
|
||||
str | None,
|
||||
Query(title='Optional next_page_id from the previously returned page'),
|
||||
] = None,
|
||||
limit: Annotated[
|
||||
int,
|
||||
Query(
|
||||
title='The max number of results in the page',
|
||||
gt=0,
|
||||
lte=100,
|
||||
),
|
||||
] = 100,
|
||||
app_conversation_start_task_service: AppConversationStartTaskService = (
|
||||
app_conversation_start_task_service_dependency
|
||||
),
|
||||
) -> AppConversationStartTaskPage:
|
||||
"""Search / List conversation start tasks."""
|
||||
assert limit > 0
|
||||
assert limit <= 100
|
||||
return (
|
||||
await app_conversation_start_task_service.search_app_conversation_start_tasks(
|
||||
conversation_id__eq=conversation_id__eq,
|
||||
sort_order=sort_order,
|
||||
page_id=page_id,
|
||||
limit=limit,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@router.get('/start-tasks/count')
|
||||
async def count_app_conversation_start_tasks(
|
||||
conversation_id__eq: Annotated[
|
||||
UUID | None,
|
||||
Query(title='Filter by conversation ID equal to this value'),
|
||||
] = None,
|
||||
app_conversation_start_task_service: AppConversationStartTaskService = (
|
||||
app_conversation_start_task_service_dependency
|
||||
),
|
||||
) -> int:
|
||||
"""Count conversation start tasks matching the given filters."""
|
||||
return await app_conversation_start_task_service.count_app_conversation_start_tasks(
|
||||
conversation_id__eq=conversation_id__eq,
|
||||
)
|
||||
|
||||
|
||||
@router.get('/start-tasks')
|
||||
async def batch_get_app_conversation_start_tasks(
|
||||
ids: Annotated[list[UUID], Query()],
|
||||
app_conversation_start_task_service: AppConversationStartTaskService = (
|
||||
app_conversation_start_task_service_dependency
|
||||
),
|
||||
) -> list[AppConversationStartTask | None]:
|
||||
"""Get a batch of start app conversation tasks given their ids. Return None for any missing."""
|
||||
assert len(ids) < 100
|
||||
start_tasks = await app_conversation_start_task_service.batch_get_app_conversation_start_tasks(
|
||||
ids
|
||||
)
|
||||
return start_tasks
|
||||
|
||||
|
||||
async def _consume_remaining(
|
||||
async_iter, db_session: AsyncSession, httpx_client: httpx.AsyncClient
|
||||
):
|
||||
"""Consume the remaining items from an async iterator"""
|
||||
try:
|
||||
while True:
|
||||
await anext(async_iter)
|
||||
except StopAsyncIteration:
|
||||
return
|
||||
finally:
|
||||
await db_session.close()
|
||||
await httpx_client.aclose()
|
||||
|
||||
|
||||
async def _stream_app_conversation_start(
|
||||
request: AppConversationStartRequest,
|
||||
user_context: UserContext,
|
||||
) -> AsyncGenerator[str, None]:
|
||||
"""Stream a json list, item by item."""
|
||||
|
||||
# Because the original dependencies are closed after the method returns, we need
|
||||
# a new dependency context which will continue intil the stream finishes.
|
||||
state = InjectorState()
|
||||
setattr(state, USER_CONTEXT_ATTR, user_context)
|
||||
async with get_app_conversation_service(state) as app_conversation_service:
|
||||
yield '[\n'
|
||||
comma = False
|
||||
async for task in app_conversation_service.start_app_conversation(request):
|
||||
chunk = task.model_dump_json()
|
||||
if comma:
|
||||
chunk = ',\n' + chunk
|
||||
comma = True
|
||||
yield chunk
|
||||
yield ']'
|
||||
@ -0,0 +1,100 @@
|
||||
import asyncio
|
||||
from abc import ABC, abstractmethod
|
||||
from datetime import datetime
|
||||
from typing import AsyncGenerator
|
||||
from uuid import UUID
|
||||
|
||||
from openhands.app_server.app_conversation.app_conversation_models import (
|
||||
AppConversation,
|
||||
AppConversationPage,
|
||||
AppConversationSortOrder,
|
||||
AppConversationStartRequest,
|
||||
AppConversationStartTask,
|
||||
)
|
||||
from openhands.app_server.services.injector import Injector
|
||||
from openhands.sdk import Workspace
|
||||
from openhands.sdk.utils.models import DiscriminatedUnionMixin
|
||||
|
||||
|
||||
class AppConversationService(ABC):
|
||||
"""Service for managing conversations running in sandboxes."""
|
||||
|
||||
@abstractmethod
|
||||
async def search_app_conversations(
|
||||
self,
|
||||
title__contains: str | None = None,
|
||||
created_at__gte: datetime | None = None,
|
||||
created_at__lt: datetime | None = None,
|
||||
updated_at__gte: datetime | None = None,
|
||||
updated_at__lt: datetime | None = None,
|
||||
sort_order: AppConversationSortOrder = AppConversationSortOrder.CREATED_AT_DESC,
|
||||
page_id: str | None = None,
|
||||
limit: int = 100,
|
||||
) -> AppConversationPage:
|
||||
"""Search for sandboxed conversations."""
|
||||
|
||||
@abstractmethod
|
||||
async def count_app_conversations(
|
||||
self,
|
||||
title__contains: str | None = None,
|
||||
created_at__gte: datetime | None = None,
|
||||
created_at__lt: datetime | None = None,
|
||||
updated_at__gte: datetime | None = None,
|
||||
updated_at__lt: datetime | None = None,
|
||||
) -> int:
|
||||
"""Count sandboxed conversations."""
|
||||
|
||||
@abstractmethod
|
||||
async def get_app_conversation(
|
||||
self, conversation_id: UUID
|
||||
) -> AppConversation | None:
|
||||
"""Get a single sandboxed conversation info. Return None if missing."""
|
||||
|
||||
async def batch_get_app_conversations(
|
||||
self, conversation_ids: list[UUID]
|
||||
) -> list[AppConversation | None]:
|
||||
"""Get a batch of sandboxed conversations, returning None for any missing."""
|
||||
return await asyncio.gather(
|
||||
*[
|
||||
self.get_app_conversation(conversation_id)
|
||||
for conversation_id in conversation_ids
|
||||
]
|
||||
)
|
||||
|
||||
@abstractmethod
|
||||
async def start_app_conversation(
|
||||
self, request: AppConversationStartRequest
|
||||
) -> AsyncGenerator[AppConversationStartTask, None]:
|
||||
"""Start a conversation, optionally specifying a sandbox in which to start.
|
||||
|
||||
If no sandbox is specified a default may be used or started. This is a convenience
|
||||
method - the same effect should be achievable by creating / getting a sandbox
|
||||
id, starting a conversation, attaching a callback, and then running the
|
||||
conversation.
|
||||
|
||||
Yields an instance of AppConversationStartTask as updates occur, which can be used to determine
|
||||
the progress of the task.
|
||||
"""
|
||||
# This is an abstract method - concrete implementations should provide real values
|
||||
from openhands.app_server.app_conversation.app_conversation_models import (
|
||||
AppConversationStartRequest,
|
||||
)
|
||||
|
||||
dummy_request = AppConversationStartRequest()
|
||||
yield AppConversationStartTask(
|
||||
created_by_user_id='dummy',
|
||||
request=dummy_request,
|
||||
)
|
||||
|
||||
@abstractmethod
|
||||
async def run_setup_scripts(
|
||||
self, task: AppConversationStartTask, workspace: Workspace
|
||||
) -> AsyncGenerator[AppConversationStartTask, None]:
|
||||
"""Run the setup scripts for the project and yield status updates"""
|
||||
yield task
|
||||
|
||||
|
||||
class AppConversationServiceInjector(
|
||||
DiscriminatedUnionMixin, Injector[AppConversationService], ABC
|
||||
):
|
||||
pass
|
||||
@ -0,0 +1,63 @@
|
||||
import asyncio
|
||||
from abc import ABC, abstractmethod
|
||||
from uuid import UUID
|
||||
|
||||
from openhands.app_server.app_conversation.app_conversation_models import (
|
||||
AppConversationStartTask,
|
||||
AppConversationStartTaskPage,
|
||||
AppConversationStartTaskSortOrder,
|
||||
)
|
||||
from openhands.app_server.services.injector import Injector
|
||||
from openhands.sdk.utils.models import DiscriminatedUnionMixin
|
||||
|
||||
|
||||
class AppConversationStartTaskService(ABC):
|
||||
"""Service for accessing start tasks for conversations."""
|
||||
|
||||
@abstractmethod
|
||||
async def search_app_conversation_start_tasks(
|
||||
self,
|
||||
conversation_id__eq: UUID | None = None,
|
||||
sort_order: AppConversationStartTaskSortOrder = AppConversationStartTaskSortOrder.CREATED_AT_DESC,
|
||||
page_id: str | None = None,
|
||||
limit: int = 100,
|
||||
) -> AppConversationStartTaskPage:
|
||||
"""Search for conversation start tasks."""
|
||||
|
||||
@abstractmethod
|
||||
async def count_app_conversation_start_tasks(
|
||||
self,
|
||||
conversation_id__eq: UUID | None = None,
|
||||
) -> int:
|
||||
"""Count conversation start tasks."""
|
||||
|
||||
@abstractmethod
|
||||
async def get_app_conversation_start_task(
|
||||
self, task_id: UUID
|
||||
) -> AppConversationStartTask | None:
|
||||
"""Get a single start task, returning None if missing."""
|
||||
|
||||
async def batch_get_app_conversation_start_tasks(
|
||||
self, task_ids: list[UUID]
|
||||
) -> list[AppConversationStartTask | None]:
|
||||
"""Get a batch of start tasks, return None for any missing."""
|
||||
return await asyncio.gather(
|
||||
*[self.get_app_conversation_start_task(task_id) for task_id in task_ids]
|
||||
)
|
||||
|
||||
# Mutators
|
||||
|
||||
@abstractmethod
|
||||
async def save_app_conversation_start_task(
|
||||
self, info: AppConversationStartTask
|
||||
) -> AppConversationStartTask:
|
||||
"""Store the start task object given.
|
||||
|
||||
Return the stored task
|
||||
"""
|
||||
|
||||
|
||||
class AppConversationStartTaskServiceInjector(
|
||||
DiscriminatedUnionMixin, Injector[AppConversationStartTaskService], ABC
|
||||
):
|
||||
pass
|
||||
1
openhands/app_server/app_conversation/git/README.md
Normal file
1
openhands/app_server/app_conversation/git/README.md
Normal file
@ -0,0 +1 @@
|
||||
This directory contains files used in git configuration.
|
||||
11
openhands/app_server/app_conversation/git/pre-commit.sh
Normal file
11
openhands/app_server/app_conversation/git/pre-commit.sh
Normal file
@ -0,0 +1,11 @@
|
||||
#!/bin/bash
|
||||
# This hook was installed by OpenHands
|
||||
# It calls the pre-commit script in the .openhands directory
|
||||
|
||||
if [ -x ".openhands/pre-commit.sh" ]; then
|
||||
source ".openhands/pre-commit.sh"
|
||||
exit $?
|
||||
else
|
||||
echo "Warning: .openhands/pre-commit.sh not found or not executable"
|
||||
exit 0
|
||||
fi
|
||||
@ -0,0 +1,151 @@
|
||||
import logging
|
||||
import os
|
||||
import tempfile
|
||||
from abc import ABC
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import AsyncGenerator
|
||||
|
||||
import base62
|
||||
|
||||
from openhands.app_server.app_conversation.app_conversation_models import (
|
||||
AppConversationStartTask,
|
||||
AppConversationStartTaskStatus,
|
||||
)
|
||||
from openhands.app_server.app_conversation.app_conversation_service import (
|
||||
AppConversationService,
|
||||
)
|
||||
from openhands.app_server.user.user_context import UserContext
|
||||
from openhands.app_server.utils.async_remote_workspace import AsyncRemoteWorkspace
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
PRE_COMMIT_HOOK = '.git/hooks/pre-commit'
|
||||
PRE_COMMIT_LOCAL = '.git/hooks/pre-commit.local'
|
||||
|
||||
|
||||
@dataclass
|
||||
class GitAppConversationService(AppConversationService, ABC):
|
||||
"""App Conversation service which adds git specific functionality.
|
||||
|
||||
Sets up repositories and installs hooks"""
|
||||
|
||||
init_git_in_empty_workspace: bool
|
||||
user_context: UserContext
|
||||
|
||||
async def run_setup_scripts(
|
||||
self,
|
||||
task: AppConversationStartTask,
|
||||
workspace: AsyncRemoteWorkspace,
|
||||
) -> AsyncGenerator[AppConversationStartTask, None]:
|
||||
task.status = AppConversationStartTaskStatus.PREPARING_REPOSITORY
|
||||
yield task
|
||||
await self.clone_or_init_git_repo(task, workspace)
|
||||
|
||||
task.status = AppConversationStartTaskStatus.RUNNING_SETUP_SCRIPT
|
||||
yield task
|
||||
await self.maybe_run_setup_script(workspace)
|
||||
|
||||
task.status = AppConversationStartTaskStatus.SETTING_UP_GIT_HOOKS
|
||||
yield task
|
||||
await self.maybe_setup_git_hooks(workspace)
|
||||
|
||||
async def clone_or_init_git_repo(
|
||||
self,
|
||||
task: AppConversationStartTask,
|
||||
workspace: AsyncRemoteWorkspace,
|
||||
):
|
||||
request = task.request
|
||||
|
||||
if not request.selected_repository:
|
||||
if self.init_git_in_empty_workspace:
|
||||
_logger.debug('Initializing a new git repository in the workspace.')
|
||||
await workspace.execute_command(
|
||||
'git init && git config --global --add safe.directory '
|
||||
+ workspace.working_dir
|
||||
)
|
||||
else:
|
||||
_logger.info('Not initializing a new git repository.')
|
||||
return
|
||||
|
||||
remote_repo_url: str = await self.user_context.get_authenticated_git_url(
|
||||
request.selected_repository
|
||||
)
|
||||
if not remote_repo_url:
|
||||
raise ValueError('Missing either Git token or valid repository')
|
||||
|
||||
dir_name = request.selected_repository.split('/')[-1]
|
||||
|
||||
# Clone the repo - this is the slow part!
|
||||
clone_command = f'git clone {remote_repo_url} {dir_name}'
|
||||
await workspace.execute_command(clone_command, workspace.working_dir)
|
||||
|
||||
# Checkout the appropriate branch
|
||||
if request.selected_branch:
|
||||
checkout_command = f'git checkout {request.selected_branch}'
|
||||
else:
|
||||
# Generate a random branch name to avoid conflicts
|
||||
random_str = base62.encodebytes(os.urandom(16))
|
||||
openhands_workspace_branch = f'openhands-workspace-{random_str}'
|
||||
checkout_command = f'git checkout -b {openhands_workspace_branch}'
|
||||
await workspace.execute_command(checkout_command, workspace.working_dir)
|
||||
|
||||
async def maybe_run_setup_script(
|
||||
self,
|
||||
workspace: AsyncRemoteWorkspace,
|
||||
):
|
||||
"""Run .openhands/setup.sh if it exists in the workspace or repository."""
|
||||
setup_script = workspace.working_dir + '/.openhands/setup.sh'
|
||||
|
||||
await workspace.execute_command(
|
||||
f'chmod +x {setup_script} && source {setup_script}', timeout=600
|
||||
)
|
||||
|
||||
# TODO: Does this need to be done?
|
||||
# Add the action to the event stream as an ENVIRONMENT event
|
||||
# source = EventSource.ENVIRONMENT
|
||||
# self.event_stream.add_event(action, source)
|
||||
|
||||
async def maybe_setup_git_hooks(
|
||||
self,
|
||||
workspace: AsyncRemoteWorkspace,
|
||||
):
|
||||
"""Set up git hooks if .openhands/pre-commit.sh exists in the workspace or repository."""
|
||||
command = 'mkdir -p .git/hooks && chmod +x .openhands/pre-commit.sh'
|
||||
result = await workspace.execute_command(command, workspace.working_dir)
|
||||
if result.exit_code:
|
||||
return
|
||||
|
||||
# Check if there's an existing pre-commit hook
|
||||
with tempfile.TemporaryFile(mode='w+t') as temp_file:
|
||||
result = workspace.file_download(PRE_COMMIT_HOOK, str(temp_file))
|
||||
if result.get('success'):
|
||||
_logger.info('Preserving existing pre-commit hook')
|
||||
# an existing pre-commit hook exists
|
||||
if 'This hook was installed by OpenHands' not in temp_file.read():
|
||||
# Move the existing hook to pre-commit.local
|
||||
command = (
|
||||
f'mv {PRE_COMMIT_HOOK} {PRE_COMMIT_LOCAL} &&'
|
||||
f'chmod +x {PRE_COMMIT_LOCAL}'
|
||||
)
|
||||
result = await workspace.execute_command(
|
||||
command, workspace.working_dir
|
||||
)
|
||||
if result.exit_code != 0:
|
||||
_logger.error(
|
||||
f'Failed to preserve existing pre-commit hook: {result.stderr}',
|
||||
)
|
||||
return
|
||||
|
||||
# write the pre-commit hook
|
||||
await workspace.file_upload(
|
||||
source_path=Path(__file__).parent / 'git' / 'pre-commit.sh',
|
||||
destination_path=PRE_COMMIT_HOOK,
|
||||
)
|
||||
|
||||
# Make the pre-commit hook executable
|
||||
result = await workspace.execute_command(f'chmod +x {PRE_COMMIT_HOOK}')
|
||||
if result.exit_code:
|
||||
_logger.error(f'Failed to make pre-commit hook executable: {result.stderr}')
|
||||
return
|
||||
|
||||
_logger.info('Git pre-commit hook installed successfully')
|
||||
@ -0,0 +1,548 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
from time import time
|
||||
from typing import AsyncGenerator, Sequence
|
||||
from uuid import UUID
|
||||
|
||||
import httpx
|
||||
from fastapi import Request
|
||||
from pydantic import Field, SecretStr, TypeAdapter
|
||||
|
||||
from openhands.agent_server.models import (
|
||||
ConversationInfo,
|
||||
NeverConfirm,
|
||||
SendMessageRequest,
|
||||
StartConversationRequest,
|
||||
)
|
||||
from openhands.app_server.app_conversation.app_conversation_info_service import (
|
||||
AppConversationInfoService,
|
||||
)
|
||||
from openhands.app_server.app_conversation.app_conversation_models import (
|
||||
AppConversation,
|
||||
AppConversationInfo,
|
||||
AppConversationPage,
|
||||
AppConversationSortOrder,
|
||||
AppConversationStartRequest,
|
||||
AppConversationStartTask,
|
||||
AppConversationStartTaskStatus,
|
||||
)
|
||||
from openhands.app_server.app_conversation.app_conversation_service import (
|
||||
AppConversationService,
|
||||
AppConversationServiceInjector,
|
||||
)
|
||||
from openhands.app_server.app_conversation.app_conversation_start_task_service import (
|
||||
AppConversationStartTaskService,
|
||||
)
|
||||
from openhands.app_server.app_conversation.git_app_conversation_service import (
|
||||
GitAppConversationService,
|
||||
)
|
||||
from openhands.app_server.errors import SandboxError
|
||||
from openhands.app_server.sandbox.docker_sandbox_service import DockerSandboxService
|
||||
from openhands.app_server.sandbox.sandbox_models import (
|
||||
AGENT_SERVER,
|
||||
SandboxInfo,
|
||||
SandboxStatus,
|
||||
)
|
||||
from openhands.app_server.sandbox.sandbox_service import SandboxService
|
||||
from openhands.app_server.sandbox.sandbox_spec_service import SandboxSpecService
|
||||
from openhands.app_server.services.injector import InjectorState
|
||||
from openhands.app_server.services.jwt_service import JwtService
|
||||
from openhands.app_server.user.user_context import UserContext
|
||||
from openhands.app_server.utils.async_remote_workspace import AsyncRemoteWorkspace
|
||||
from openhands.integrations.provider import ProviderType
|
||||
from openhands.sdk import LocalWorkspace
|
||||
from openhands.sdk.conversation.secret_source import LookupSecret, StaticSecret
|
||||
from openhands.sdk.llm import LLM
|
||||
from openhands.sdk.security.confirmation_policy import AlwaysConfirm
|
||||
from openhands.tools.preset.default import get_default_agent
|
||||
|
||||
_conversation_info_type_adapter = TypeAdapter(list[ConversationInfo | None])
|
||||
_logger = logging.getLogger(__name__)
|
||||
GIT_TOKEN = 'GIT_TOKEN'
|
||||
|
||||
|
||||
@dataclass
|
||||
class LiveStatusAppConversationService(GitAppConversationService):
|
||||
"""AppConversationService which combines live status info from the sandbox with stored data."""
|
||||
|
||||
user_context: UserContext
|
||||
app_conversation_info_service: AppConversationInfoService
|
||||
app_conversation_start_task_service: AppConversationStartTaskService
|
||||
sandbox_service: SandboxService
|
||||
sandbox_spec_service: SandboxSpecService
|
||||
jwt_service: JwtService
|
||||
sandbox_startup_timeout: int
|
||||
sandbox_startup_poll_frequency: int
|
||||
httpx_client: httpx.AsyncClient
|
||||
web_url: str | None
|
||||
access_token_hard_timeout: timedelta | None
|
||||
|
||||
async def search_app_conversations(
|
||||
self,
|
||||
title__contains: str | None = None,
|
||||
created_at__gte: datetime | None = None,
|
||||
created_at__lt: datetime | None = None,
|
||||
updated_at__gte: datetime | None = None,
|
||||
updated_at__lt: datetime | None = None,
|
||||
sort_order: AppConversationSortOrder = AppConversationSortOrder.CREATED_AT_DESC,
|
||||
page_id: str | None = None,
|
||||
limit: int = 20,
|
||||
) -> AppConversationPage:
|
||||
"""Search for sandboxed conversations."""
|
||||
page = await self.app_conversation_info_service.search_app_conversation_info(
|
||||
title__contains=title__contains,
|
||||
created_at__gte=created_at__gte,
|
||||
created_at__lt=created_at__lt,
|
||||
updated_at__gte=updated_at__gte,
|
||||
updated_at__lt=updated_at__lt,
|
||||
sort_order=sort_order,
|
||||
page_id=page_id,
|
||||
limit=limit,
|
||||
)
|
||||
conversations: list[AppConversation] = await self._build_app_conversations(
|
||||
page.items
|
||||
) # type: ignore
|
||||
return AppConversationPage(items=conversations, next_page_id=page.next_page_id)
|
||||
|
||||
async def count_app_conversations(
|
||||
self,
|
||||
title__contains: str | None = None,
|
||||
created_at__gte: datetime | None = None,
|
||||
created_at__lt: datetime | None = None,
|
||||
updated_at__gte: datetime | None = None,
|
||||
updated_at__lt: datetime | None = None,
|
||||
) -> int:
|
||||
return await self.app_conversation_info_service.count_app_conversation_info(
|
||||
title__contains=title__contains,
|
||||
created_at__gte=created_at__gte,
|
||||
created_at__lt=created_at__lt,
|
||||
updated_at__gte=updated_at__gte,
|
||||
updated_at__lt=updated_at__lt,
|
||||
)
|
||||
|
||||
async def get_app_conversation(
|
||||
self, conversation_id: UUID
|
||||
) -> AppConversation | None:
|
||||
info = await self.app_conversation_info_service.get_app_conversation_info(
|
||||
conversation_id
|
||||
)
|
||||
result = await self._build_app_conversations([info])
|
||||
return result[0]
|
||||
|
||||
async def batch_get_app_conversations(
|
||||
self, conversation_ids: list[UUID]
|
||||
) -> list[AppConversation | None]:
|
||||
info = await self.app_conversation_info_service.batch_get_app_conversation_info(
|
||||
conversation_ids
|
||||
)
|
||||
conversations = await self._build_app_conversations(info)
|
||||
return conversations
|
||||
|
||||
async def start_app_conversation(
|
||||
self, request: AppConversationStartRequest
|
||||
) -> AsyncGenerator[AppConversationStartTask, None]:
|
||||
async for task in self._start_app_conversation(request):
|
||||
await self.app_conversation_start_task_service.save_app_conversation_start_task(
|
||||
task
|
||||
)
|
||||
yield task
|
||||
|
||||
async def _start_app_conversation(
|
||||
self, request: AppConversationStartRequest
|
||||
) -> AsyncGenerator[AppConversationStartTask, None]:
|
||||
# Create and yield the start task
|
||||
user_id = await self.user_context.get_user_id()
|
||||
task = AppConversationStartTask(
|
||||
created_by_user_id=user_id,
|
||||
request=request,
|
||||
)
|
||||
yield task
|
||||
|
||||
try:
|
||||
async for updated_task in self._wait_for_sandbox_start(task):
|
||||
yield updated_task
|
||||
|
||||
# Get the sandbox
|
||||
sandbox_id = task.sandbox_id
|
||||
assert sandbox_id is not None
|
||||
sandbox = await self.sandbox_service.get_sandbox(sandbox_id)
|
||||
assert sandbox is not None
|
||||
agent_server_url = self._get_agent_server_url(sandbox)
|
||||
|
||||
# Get the working dir
|
||||
sandbox_spec = await self.sandbox_spec_service.get_sandbox_spec(
|
||||
sandbox.sandbox_spec_id
|
||||
)
|
||||
assert sandbox_spec is not None
|
||||
|
||||
# Run setup scripts
|
||||
workspace = AsyncRemoteWorkspace(
|
||||
working_dir=sandbox_spec.working_dir,
|
||||
server_url=agent_server_url,
|
||||
session_api_key=sandbox.session_api_key,
|
||||
)
|
||||
async for updated_task in self.run_setup_scripts(task, workspace):
|
||||
yield updated_task
|
||||
|
||||
# Build the start request
|
||||
start_conversation_request = (
|
||||
await self._build_start_conversation_request_for_user(
|
||||
request.initial_message,
|
||||
request.git_provider,
|
||||
sandbox_spec.working_dir,
|
||||
)
|
||||
)
|
||||
|
||||
# update status
|
||||
task.status = AppConversationStartTaskStatus.STARTING_CONVERSATION
|
||||
task.agent_server_url = agent_server_url
|
||||
yield task
|
||||
|
||||
# Start conversation...
|
||||
response = await self.httpx_client.post(
|
||||
f'{agent_server_url}/api/conversations',
|
||||
json=start_conversation_request.model_dump(
|
||||
context={'expose_secrets': True}
|
||||
),
|
||||
headers={'X-Session-API-Key': sandbox.session_api_key},
|
||||
timeout=self.sandbox_startup_timeout,
|
||||
)
|
||||
response.raise_for_status()
|
||||
info = ConversationInfo.model_validate(response.json())
|
||||
|
||||
# Store info...
|
||||
user_id = await self.user_context.get_user_id()
|
||||
app_conversation_info = AppConversationInfo(
|
||||
id=info.id,
|
||||
# TODO: As of writing, StartConversationRequest from AgentServer does not have a title
|
||||
title=f'Conversation {info.id}',
|
||||
sandbox_id=sandbox.id,
|
||||
created_by_user_id=user_id,
|
||||
llm_model=start_conversation_request.agent.llm.model,
|
||||
# Git parameters
|
||||
selected_repository=request.selected_repository,
|
||||
selected_branch=request.selected_branch,
|
||||
git_provider=request.git_provider,
|
||||
trigger=request.trigger,
|
||||
pr_number=request.pr_number,
|
||||
)
|
||||
await self.app_conversation_info_service.save_app_conversation_info(
|
||||
app_conversation_info
|
||||
)
|
||||
|
||||
# Update the start task
|
||||
task.status = AppConversationStartTaskStatus.READY
|
||||
task.app_conversation_id = info.id
|
||||
yield task
|
||||
|
||||
except Exception as exc:
|
||||
_logger.exception('Error starting conversation', stack_info=True)
|
||||
task.status = AppConversationStartTaskStatus.ERROR
|
||||
task.detail = str(exc)
|
||||
yield task
|
||||
|
||||
async def _build_app_conversations(
|
||||
self, app_conversation_infos: Sequence[AppConversationInfo | None]
|
||||
) -> list[AppConversation | None]:
|
||||
sandbox_id_to_conversation_ids = self._get_sandbox_id_to_conversation_ids(
|
||||
app_conversation_infos
|
||||
)
|
||||
|
||||
# Get referenced sandboxes in a single batch operation...
|
||||
sandboxes = await self.sandbox_service.batch_get_sandboxes(
|
||||
list(sandbox_id_to_conversation_ids)
|
||||
)
|
||||
sandboxes_by_id = {sandbox.id: sandbox for sandbox in sandboxes if sandbox}
|
||||
|
||||
# Gather the running conversations
|
||||
tasks = [
|
||||
self._get_live_conversation_info(
|
||||
sandbox, sandbox_id_to_conversation_ids.get(sandbox.id)
|
||||
)
|
||||
for sandbox in sandboxes
|
||||
if sandbox and sandbox.status == SandboxStatus.RUNNING
|
||||
]
|
||||
if tasks:
|
||||
sandbox_conversation_infos = await asyncio.gather(*tasks)
|
||||
else:
|
||||
sandbox_conversation_infos = []
|
||||
|
||||
# Collect the results into a single dictionary
|
||||
conversation_info_by_id = {}
|
||||
for conversation_infos in sandbox_conversation_infos:
|
||||
for conversation_info in conversation_infos:
|
||||
conversation_info_by_id[conversation_info.id] = conversation_info
|
||||
|
||||
# Build app_conversation from info
|
||||
result = [
|
||||
self._build_conversation(
|
||||
app_conversation_info,
|
||||
sandboxes_by_id.get(app_conversation_info.sandbox_id),
|
||||
conversation_info_by_id.get(app_conversation_info.id),
|
||||
)
|
||||
if app_conversation_info
|
||||
else None
|
||||
for app_conversation_info in app_conversation_infos
|
||||
]
|
||||
|
||||
return result
|
||||
|
||||
async def _get_live_conversation_info(
|
||||
self,
|
||||
sandbox: SandboxInfo,
|
||||
conversation_ids: list[str],
|
||||
) -> list[ConversationInfo]:
|
||||
"""Get agent status for multiple conversations from the Agent Server."""
|
||||
try:
|
||||
# Build the URL with query parameters
|
||||
agent_server_url = self._get_agent_server_url(sandbox)
|
||||
url = f'{agent_server_url.rstrip("/")}/api/conversations'
|
||||
params = {'ids': conversation_ids}
|
||||
|
||||
# Set up headers
|
||||
headers = {}
|
||||
if sandbox.session_api_key:
|
||||
headers['X-Session-API-Key'] = sandbox.session_api_key
|
||||
|
||||
response = await self.httpx_client.get(url, params=params, headers=headers)
|
||||
response.raise_for_status()
|
||||
|
||||
data = response.json()
|
||||
conversation_info = _conversation_info_type_adapter.validate_python(data)
|
||||
conversation_info = [c for c in conversation_info if c]
|
||||
return conversation_info
|
||||
except Exception:
|
||||
# Not getting a status is not a fatal error - we just mark the conversation as stopped
|
||||
_logger.exception(
|
||||
f'Error getting conversation status from sandbox {sandbox.id}',
|
||||
stack_info=True,
|
||||
)
|
||||
return []
|
||||
|
||||
def _build_conversation(
|
||||
self,
|
||||
app_conversation_info: AppConversationInfo | None,
|
||||
sandbox: SandboxInfo | None,
|
||||
conversation_info: ConversationInfo | None,
|
||||
) -> AppConversation | None:
|
||||
if app_conversation_info is None:
|
||||
return None
|
||||
sandbox_status = sandbox.status if sandbox else SandboxStatus.MISSING
|
||||
agent_status = conversation_info.agent_status if conversation_info else None
|
||||
conversation_url = None
|
||||
session_api_key = None
|
||||
if sandbox and sandbox.exposed_urls:
|
||||
conversation_url = next(
|
||||
(
|
||||
exposed_url.url
|
||||
for exposed_url in sandbox.exposed_urls
|
||||
if exposed_url.name == AGENT_SERVER
|
||||
),
|
||||
None,
|
||||
)
|
||||
if conversation_url:
|
||||
conversation_url += f'/api/conversations/{app_conversation_info.id.hex}'
|
||||
session_api_key = sandbox.session_api_key
|
||||
|
||||
return AppConversation(
|
||||
**app_conversation_info.model_dump(),
|
||||
sandbox_status=sandbox_status,
|
||||
agent_status=agent_status,
|
||||
conversation_url=conversation_url,
|
||||
session_api_key=session_api_key,
|
||||
)
|
||||
|
||||
def _get_sandbox_id_to_conversation_ids(
|
||||
self, stored_conversations: Sequence[AppConversationInfo | None]
|
||||
):
|
||||
result = defaultdict(list)
|
||||
for stored_conversation in stored_conversations:
|
||||
if stored_conversation:
|
||||
result[stored_conversation.sandbox_id].append(stored_conversation.id)
|
||||
return result
|
||||
|
||||
async def _wait_for_sandbox_start(
|
||||
self, task: AppConversationStartTask
|
||||
) -> AsyncGenerator[AppConversationStartTask, None]:
|
||||
"""Wait for sandbox to start and return info."""
|
||||
|
||||
# Get the sandbox
|
||||
if not task.request.sandbox_id:
|
||||
sandbox = await self.sandbox_service.start_sandbox()
|
||||
task.sandbox_id = sandbox.id
|
||||
else:
|
||||
sandbox_info = await self.sandbox_service.get_sandbox(
|
||||
task.request.sandbox_id
|
||||
)
|
||||
if sandbox_info is None:
|
||||
raise SandboxError(f'Sandbox not found: {task.request.sandbox_id}')
|
||||
sandbox = sandbox_info
|
||||
|
||||
# Update the listener
|
||||
task.status = AppConversationStartTaskStatus.WAITING_FOR_SANDBOX
|
||||
task.sandbox_id = sandbox.id
|
||||
yield task
|
||||
|
||||
if sandbox.status == SandboxStatus.PAUSED:
|
||||
await self.sandbox_service.resume_sandbox(sandbox.id)
|
||||
if sandbox.status in (None, SandboxStatus.ERROR):
|
||||
raise SandboxError(f'Sandbox status: {sandbox.status}')
|
||||
if sandbox.status == SandboxStatus.RUNNING:
|
||||
return
|
||||
if sandbox.status != SandboxStatus.STARTING:
|
||||
raise SandboxError(f'Sandbox not startable: {sandbox.id}')
|
||||
|
||||
start = time()
|
||||
while time() - start <= self.sandbox_startup_timeout:
|
||||
await asyncio.sleep(self.sandbox_startup_poll_frequency)
|
||||
sandbox_info = await self.sandbox_service.get_sandbox(sandbox.id)
|
||||
if sandbox_info is None:
|
||||
raise SandboxError(f'Sandbox not found: {sandbox.id}')
|
||||
if sandbox.status not in (SandboxStatus.STARTING, SandboxStatus.RUNNING):
|
||||
raise SandboxError(f'Sandbox not startable: {sandbox.id}')
|
||||
if sandbox_info.status == SandboxStatus.RUNNING:
|
||||
return
|
||||
raise SandboxError(f'Sandbox failed to start: {sandbox.id}')
|
||||
|
||||
def _get_agent_server_url(self, sandbox: SandboxInfo) -> str:
|
||||
"""Get agent server url for running sandbox."""
|
||||
exposed_urls = sandbox.exposed_urls
|
||||
assert exposed_urls is not None
|
||||
agent_server_url = next(
|
||||
exposed_url.url
|
||||
for exposed_url in exposed_urls
|
||||
if exposed_url.name == AGENT_SERVER
|
||||
)
|
||||
return agent_server_url
|
||||
|
||||
async def _build_start_conversation_request_for_user(
|
||||
self,
|
||||
initial_message: SendMessageRequest | None,
|
||||
git_provider: ProviderType | None,
|
||||
working_dir: str,
|
||||
) -> StartConversationRequest:
|
||||
user = await self.user_context.get_user_info()
|
||||
|
||||
# Set up a secret for the git token
|
||||
secrets = await self.user_context.get_secrets()
|
||||
if git_provider:
|
||||
if self.web_url:
|
||||
# If there is a web url, then we create an access token to access it.
|
||||
# For security reasons, we are explicit here - only this user, and
|
||||
# only this provider, with a timeout
|
||||
access_token = self.jwt_service.create_jws_token(
|
||||
payload={
|
||||
'user_id': user.id,
|
||||
'provider_type': git_provider.value,
|
||||
},
|
||||
expires_in=self.access_token_hard_timeout,
|
||||
)
|
||||
secrets[GIT_TOKEN] = LookupSecret(
|
||||
url=self.web_url + '/ap/v1/webhooks/secrets',
|
||||
headers={'X-Access-Token': access_token},
|
||||
)
|
||||
else:
|
||||
# If there is no URL specified where the sandbox can access the app server
|
||||
# then we supply a static secret with the most recent value. Depending
|
||||
# on the type, this may eventually expire.
|
||||
static_token = await self.user_context.get_latest_token(git_provider)
|
||||
if static_token:
|
||||
secrets[GIT_TOKEN] = StaticSecret(value=SecretStr(static_token))
|
||||
|
||||
workspace = LocalWorkspace(working_dir=working_dir)
|
||||
|
||||
llm = LLM(
|
||||
model=user.llm_model,
|
||||
base_url=user.llm_base_url,
|
||||
api_key=user.llm_api_key,
|
||||
service_id='agent',
|
||||
)
|
||||
agent = get_default_agent(llm=llm)
|
||||
start_conversation_request = StartConversationRequest(
|
||||
agent=agent,
|
||||
workspace=workspace,
|
||||
confirmation_policy=AlwaysConfirm()
|
||||
if user.confirmation_mode
|
||||
else NeverConfirm(),
|
||||
initial_message=initial_message,
|
||||
secrets=secrets,
|
||||
)
|
||||
return start_conversation_request
|
||||
|
||||
|
||||
class LiveStatusAppConversationServiceInjector(AppConversationServiceInjector):
|
||||
sandbox_startup_timeout: int = Field(
|
||||
default=120, description='The max timeout time for sandbox startup'
|
||||
)
|
||||
sandbox_startup_poll_frequency: int = Field(
|
||||
default=2, description='The frequency to poll for sandbox readiness'
|
||||
)
|
||||
init_git_in_empty_workspace: bool = Field(
|
||||
default=True,
|
||||
description='Whether to initialize a git repo when the workspace is empty',
|
||||
)
|
||||
access_token_hard_timeout: int | None = Field(
|
||||
default=14 * 86400,
|
||||
description=(
|
||||
'A security measure - the time after which git tokens may no longer '
|
||||
'be retrieved by a sandboxed conversation.'
|
||||
),
|
||||
)
|
||||
|
||||
async def inject(
|
||||
self, state: InjectorState, request: Request | None = None
|
||||
) -> AsyncGenerator[AppConversationService, None]:
|
||||
from openhands.app_server.config import (
|
||||
get_app_conversation_info_service,
|
||||
get_app_conversation_start_task_service,
|
||||
get_global_config,
|
||||
get_httpx_client,
|
||||
get_jwt_service,
|
||||
get_sandbox_service,
|
||||
get_sandbox_spec_service,
|
||||
get_user_context,
|
||||
)
|
||||
|
||||
async with (
|
||||
get_user_context(state, request) as user_context,
|
||||
get_sandbox_service(state, request) as sandbox_service,
|
||||
get_sandbox_spec_service(state, request) as sandbox_spec_service,
|
||||
get_app_conversation_info_service(
|
||||
state, request
|
||||
) as app_conversation_info_service,
|
||||
get_app_conversation_start_task_service(
|
||||
state, request
|
||||
) as app_conversation_start_task_service,
|
||||
get_jwt_service(state, request) as jwt_service,
|
||||
get_httpx_client(state, request) as httpx_client,
|
||||
):
|
||||
access_token_hard_timeout = None
|
||||
if self.access_token_hard_timeout:
|
||||
access_token_hard_timeout = timedelta(
|
||||
seconds=float(self.access_token_hard_timeout)
|
||||
)
|
||||
config = get_global_config()
|
||||
|
||||
# If no web url has been set and we are using docker, we can use host.docker.internal
|
||||
web_url = config.web_url
|
||||
if web_url is None:
|
||||
if isinstance(sandbox_service, DockerSandboxService):
|
||||
web_url = f'http://host.docker.internal:{sandbox_service.host_port}'
|
||||
|
||||
yield LiveStatusAppConversationService(
|
||||
init_git_in_empty_workspace=self.init_git_in_empty_workspace,
|
||||
user_context=user_context,
|
||||
sandbox_service=sandbox_service,
|
||||
sandbox_spec_service=sandbox_spec_service,
|
||||
app_conversation_info_service=app_conversation_info_service,
|
||||
app_conversation_start_task_service=app_conversation_start_task_service,
|
||||
jwt_service=jwt_service,
|
||||
sandbox_startup_timeout=self.sandbox_startup_timeout,
|
||||
sandbox_startup_poll_frequency=self.sandbox_startup_poll_frequency,
|
||||
httpx_client=httpx_client,
|
||||
web_url=web_url,
|
||||
access_token_hard_timeout=access_token_hard_timeout,
|
||||
)
|
||||
@ -0,0 +1,398 @@
|
||||
"""SQL implementation of AppConversationService.
|
||||
|
||||
This implementation provides CRUD operations for sandboxed conversations focused purely
|
||||
on SQL operations:
|
||||
- Direct database access without permission checks
|
||||
- Batch operations for efficient data retrieval
|
||||
- Integration with SandboxService for sandbox information
|
||||
- HTTP client integration for agent status retrieval
|
||||
- Full async/await support using SQL async db_sessions
|
||||
|
||||
Security and permission checks are handled by wrapper services.
|
||||
|
||||
Key components:
|
||||
- SQLAppConversationService: Main service class implementing all operations
|
||||
- SQLAppConversationInfoServiceInjector: Dependency injection resolver for FastAPI
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import uuid
|
||||
from dataclasses import dataclass
|
||||
from datetime import UTC, datetime
|
||||
from typing import AsyncGenerator
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import Request
|
||||
from sqlalchemy import Column, DateTime, Float, Integer, Select, String, func, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from openhands.agent_server.utils import utc_now
|
||||
from openhands.app_server.app_conversation.app_conversation_info_service import (
|
||||
AppConversationInfoService,
|
||||
AppConversationInfoServiceInjector,
|
||||
)
|
||||
from openhands.app_server.app_conversation.app_conversation_models import (
|
||||
AppConversationInfo,
|
||||
AppConversationInfoPage,
|
||||
AppConversationSortOrder,
|
||||
)
|
||||
from openhands.app_server.services.injector import InjectorState
|
||||
from openhands.app_server.user.user_context import UserContext
|
||||
from openhands.app_server.utils.sql_utils import (
|
||||
Base,
|
||||
create_json_type_decorator,
|
||||
)
|
||||
from openhands.integrations.provider import ProviderType
|
||||
from openhands.sdk.llm import MetricsSnapshot
|
||||
from openhands.sdk.llm.utils.metrics import TokenUsage
|
||||
from openhands.storage.data_models.conversation_metadata import ConversationTrigger
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class StoredConversationMetadata(Base): # type: ignore
|
||||
__tablename__ = 'conversation_metadata'
|
||||
conversation_id = Column(
|
||||
String, primary_key=True, default=lambda: str(uuid.uuid4())
|
||||
)
|
||||
github_user_id = Column(String, nullable=True) # The GitHub user ID
|
||||
user_id = Column(String, nullable=False) # The Keycloak User ID
|
||||
selected_repository = Column(String, nullable=True)
|
||||
selected_branch = Column(String, nullable=True)
|
||||
git_provider = Column(
|
||||
String, nullable=True
|
||||
) # The git provider (GitHub, GitLab, etc.)
|
||||
title = Column(String, nullable=True)
|
||||
last_updated_at = Column(DateTime(timezone=True), default=utc_now) # type: ignore[attr-defined]
|
||||
created_at = Column(DateTime(timezone=True), default=utc_now) # type: ignore[attr-defined]
|
||||
|
||||
trigger = Column(String, nullable=True)
|
||||
pr_number = Column(create_json_type_decorator(list[int]))
|
||||
|
||||
# Cost and token metrics
|
||||
accumulated_cost = Column(Float, default=0.0)
|
||||
prompt_tokens = Column(Integer, default=0)
|
||||
completion_tokens = Column(Integer, default=0)
|
||||
total_tokens = Column(Integer, default=0)
|
||||
max_budget_per_task = Column(Float, nullable=True)
|
||||
cache_read_tokens = Column(Integer, default=0)
|
||||
cache_write_tokens = Column(Integer, default=0)
|
||||
reasoning_tokens = Column(Integer, default=0)
|
||||
context_window = Column(Integer, default=0)
|
||||
per_turn_token = Column(Integer, default=0)
|
||||
|
||||
# LLM model used for the conversation
|
||||
llm_model = Column(String, nullable=True)
|
||||
|
||||
conversation_version = Column(String, nullable=False, default='V0', index=True)
|
||||
sandbox_id = Column(String, nullable=True, index=True)
|
||||
|
||||
|
||||
@dataclass
|
||||
class SQLAppConversationInfoService(AppConversationInfoService):
|
||||
"""SQL implementation of AppConversationInfoService focused on db operations.
|
||||
|
||||
This allows storing a record of a conversation even after its sandbox ceases to exist
|
||||
"""
|
||||
|
||||
db_session: AsyncSession
|
||||
user_context: UserContext
|
||||
|
||||
async def search_app_conversation_info(
|
||||
self,
|
||||
title__contains: str | None = None,
|
||||
created_at__gte: datetime | None = None,
|
||||
created_at__lt: datetime | None = None,
|
||||
updated_at__gte: datetime | None = None,
|
||||
updated_at__lt: datetime | None = None,
|
||||
sort_order: AppConversationSortOrder = AppConversationSortOrder.CREATED_AT_DESC,
|
||||
page_id: str | None = None,
|
||||
limit: int = 100,
|
||||
) -> AppConversationInfoPage:
|
||||
"""Search for sandboxed conversations without permission checks."""
|
||||
query = await self._secure_select()
|
||||
|
||||
query = self._apply_filters(
|
||||
query=query,
|
||||
title__contains=title__contains,
|
||||
created_at__gte=created_at__gte,
|
||||
created_at__lt=created_at__lt,
|
||||
updated_at__gte=updated_at__gte,
|
||||
updated_at__lt=updated_at__lt,
|
||||
)
|
||||
|
||||
# Add sort order
|
||||
if sort_order == AppConversationSortOrder.CREATED_AT:
|
||||
query = query.order_by(StoredConversationMetadata.created_at)
|
||||
elif sort_order == AppConversationSortOrder.CREATED_AT_DESC:
|
||||
query = query.order_by(StoredConversationMetadata.created_at.desc())
|
||||
elif sort_order == AppConversationSortOrder.UPDATED_AT:
|
||||
query = query.order_by(StoredConversationMetadata.updated_at)
|
||||
elif sort_order == AppConversationSortOrder.UPDATED_AT_DESC:
|
||||
query = query.order_by(StoredConversationMetadata.updated_at.desc())
|
||||
elif sort_order == AppConversationSortOrder.TITLE:
|
||||
query = query.order_by(StoredConversationMetadata.title)
|
||||
elif sort_order == AppConversationSortOrder.TITLE_DESC:
|
||||
query = query.order_by(StoredConversationMetadata.title.desc())
|
||||
|
||||
# Apply pagination
|
||||
if page_id is not None:
|
||||
try:
|
||||
offset = int(page_id)
|
||||
query = query.offset(offset)
|
||||
except ValueError:
|
||||
# If page_id is not a valid integer, start from beginning
|
||||
offset = 0
|
||||
else:
|
||||
offset = 0
|
||||
|
||||
# Apply limit and get one extra to check if there are more results
|
||||
query = query.limit(limit + 1)
|
||||
|
||||
result = await self.db_session.execute(query)
|
||||
rows = result.scalars().all()
|
||||
|
||||
# Check if there are more results
|
||||
has_more = len(rows) > limit
|
||||
if has_more:
|
||||
rows = rows[:limit]
|
||||
|
||||
items = [self._to_info(row) for row in rows]
|
||||
|
||||
# Calculate next page ID
|
||||
next_page_id = None
|
||||
if has_more:
|
||||
next_page_id = str(offset + limit)
|
||||
|
||||
return AppConversationInfoPage(items=items, next_page_id=next_page_id)
|
||||
|
||||
async def count_app_conversation_info(
|
||||
self,
|
||||
title__contains: str | None = None,
|
||||
created_at__gte: datetime | None = None,
|
||||
created_at__lt: datetime | None = None,
|
||||
updated_at__gte: datetime | None = None,
|
||||
updated_at__lt: datetime | None = None,
|
||||
) -> int:
|
||||
"""Count sandboxed conversations matching the given filters."""
|
||||
query = select(func.count(StoredConversationMetadata.conversation_id))
|
||||
user_id = await self.user_context.get_user_id()
|
||||
if user_id:
|
||||
query = query.where(
|
||||
StoredConversationMetadata.created_by_user_id == user_id
|
||||
)
|
||||
|
||||
query = self._apply_filters(
|
||||
query=query,
|
||||
title__contains=title__contains,
|
||||
created_at__gte=created_at__gte,
|
||||
created_at__lt=created_at__lt,
|
||||
updated_at__gte=updated_at__gte,
|
||||
updated_at__lt=updated_at__lt,
|
||||
)
|
||||
|
||||
result = await self.db_session.execute(query)
|
||||
count = result.scalar()
|
||||
return count or 0
|
||||
|
||||
def _apply_filters(
|
||||
self,
|
||||
query: Select,
|
||||
title__contains: str | None = None,
|
||||
created_at__gte: datetime | None = None,
|
||||
created_at__lt: datetime | None = None,
|
||||
updated_at__gte: datetime | None = None,
|
||||
updated_at__lt: datetime | None = None,
|
||||
) -> Select:
|
||||
# Apply the same filters as search_app_conversations
|
||||
conditions = []
|
||||
if title__contains is not None:
|
||||
conditions.append(
|
||||
StoredConversationMetadata.title.like(f'%{title__contains}%')
|
||||
)
|
||||
|
||||
if created_at__gte is not None:
|
||||
conditions.append(StoredConversationMetadata.created_at >= created_at__gte)
|
||||
|
||||
if created_at__lt is not None:
|
||||
conditions.append(StoredConversationMetadata.created_at < created_at__lt)
|
||||
|
||||
if updated_at__gte is not None:
|
||||
conditions.append(
|
||||
StoredConversationMetadata.last_updated_at >= updated_at__gte
|
||||
)
|
||||
|
||||
if updated_at__lt is not None:
|
||||
conditions.append(
|
||||
StoredConversationMetadata.last_updated_at < updated_at__lt
|
||||
)
|
||||
|
||||
if conditions:
|
||||
query = query.where(*conditions)
|
||||
return query
|
||||
|
||||
async def get_app_conversation_info(
|
||||
self, conversation_id: UUID
|
||||
) -> AppConversationInfo | None:
|
||||
query = await self._secure_select()
|
||||
query = query.where(
|
||||
StoredConversationMetadata.conversation_id == str(conversation_id)
|
||||
)
|
||||
result_set = await self.db_session.execute(query)
|
||||
result = result_set.scalar_one_or_none()
|
||||
if result:
|
||||
return self._to_info(result)
|
||||
return None
|
||||
|
||||
async def batch_get_app_conversation_info(
|
||||
self, conversation_ids: list[UUID]
|
||||
) -> list[AppConversationInfo | None]:
|
||||
conversation_id_strs = [
|
||||
str(conversation_id) for conversation_id in conversation_ids
|
||||
]
|
||||
query = await self._secure_select()
|
||||
query = query.where(
|
||||
StoredConversationMetadata.conversation_id.in_(conversation_id_strs)
|
||||
)
|
||||
result = await self.db_session.execute(query)
|
||||
rows = result.scalars().all()
|
||||
info_by_id = {info.conversation_id: info for info in rows if info}
|
||||
results: list[AppConversationInfo | None] = []
|
||||
for conversation_id in conversation_id_strs:
|
||||
info = info_by_id.get(conversation_id)
|
||||
if info:
|
||||
results.append(self._to_info(info))
|
||||
else:
|
||||
results.append(None)
|
||||
|
||||
return results
|
||||
|
||||
async def save_app_conversation_info(
|
||||
self, info: AppConversationInfo
|
||||
) -> AppConversationInfo:
|
||||
user_id = await self.user_context.get_user_id()
|
||||
if user_id:
|
||||
query = select(StoredConversationMetadata).where(
|
||||
StoredConversationMetadata.conversation_id == info.id
|
||||
)
|
||||
result = await self.db_session.execute(query)
|
||||
existing = result.scalar_one_or_none()
|
||||
assert existing is None or existing.created_by_user_id == user_id
|
||||
|
||||
metrics = info.metrics or MetricsSnapshot()
|
||||
usage = metrics.accumulated_token_usage or TokenUsage()
|
||||
|
||||
stored = StoredConversationMetadata(
|
||||
conversation_id=str(info.id),
|
||||
github_user_id=None, # TODO: Should we add this to the conversation info?
|
||||
user_id=info.created_by_user_id or '',
|
||||
selected_repository=info.selected_repository,
|
||||
selected_branch=info.selected_branch,
|
||||
git_provider=info.git_provider.value if info.git_provider else None,
|
||||
title=info.title,
|
||||
last_updated_at=info.updated_at,
|
||||
created_at=info.created_at,
|
||||
trigger=info.trigger.value if info.trigger else None,
|
||||
pr_number=info.pr_number,
|
||||
# Cost and token metrics
|
||||
accumulated_cost=metrics.accumulated_cost,
|
||||
prompt_tokens=usage.prompt_tokens,
|
||||
completion_tokens=usage.completion_tokens,
|
||||
total_tokens=0,
|
||||
max_budget_per_task=metrics.max_budget_per_task,
|
||||
cache_read_tokens=usage.cache_read_tokens,
|
||||
cache_write_tokens=usage.cache_write_tokens,
|
||||
context_window=usage.context_window,
|
||||
per_turn_token=usage.per_turn_token,
|
||||
llm_model=info.llm_model,
|
||||
conversation_version='V1',
|
||||
sandbox_id=info.sandbox_id,
|
||||
)
|
||||
|
||||
await self.db_session.merge(stored)
|
||||
await self.db_session.commit()
|
||||
return info
|
||||
|
||||
async def _secure_select(self):
|
||||
query = select(StoredConversationMetadata).where(
|
||||
StoredConversationMetadata.conversation_version == 'V1'
|
||||
)
|
||||
user_id = await self.user_context.get_user_id()
|
||||
if user_id:
|
||||
query = query.where(
|
||||
StoredConversationMetadata.user_id == user_id,
|
||||
)
|
||||
return query
|
||||
|
||||
def _to_info(self, stored: StoredConversationMetadata) -> AppConversationInfo:
|
||||
# V1 conversations should always have a sandbox_id
|
||||
sandbox_id = stored.sandbox_id
|
||||
assert sandbox_id is not None
|
||||
|
||||
# Rebuild token usage
|
||||
token_usage = TokenUsage(
|
||||
prompt_tokens=stored.prompt_tokens,
|
||||
completion_tokens=stored.completion_tokens,
|
||||
cache_read_tokens=stored.cache_read_tokens,
|
||||
cache_write_tokens=stored.cache_write_tokens,
|
||||
context_window=stored.context_window,
|
||||
per_turn_token=stored.per_turn_token,
|
||||
)
|
||||
|
||||
# Rebuild metrics object
|
||||
metrics = MetricsSnapshot(
|
||||
accumulated_cost=stored.accumulated_cost,
|
||||
max_budget_per_task=stored.max_budget_per_task,
|
||||
accumulated_token_usage=token_usage,
|
||||
)
|
||||
|
||||
# Get timestamps
|
||||
created_at = self._fix_timezone(stored.created_at)
|
||||
updated_at = self._fix_timezone(stored.last_updated_at)
|
||||
|
||||
return AppConversationInfo(
|
||||
id=UUID(stored.conversation_id),
|
||||
created_by_user_id=stored.user_id if stored.user_id else None,
|
||||
sandbox_id=stored.sandbox_id,
|
||||
selected_repository=stored.selected_repository,
|
||||
selected_branch=stored.selected_branch,
|
||||
git_provider=ProviderType(stored.git_provider)
|
||||
if stored.git_provider
|
||||
else None,
|
||||
title=stored.title,
|
||||
trigger=ConversationTrigger(stored.trigger) if stored.trigger else None,
|
||||
pr_number=stored.pr_number,
|
||||
llm_model=stored.llm_model,
|
||||
metrics=metrics,
|
||||
created_at=created_at,
|
||||
updated_at=updated_at,
|
||||
)
|
||||
|
||||
def _fix_timezone(self, value: datetime) -> datetime:
|
||||
"""Sqlite does not stpre timezones - and since we can't update the existing models
|
||||
we assume UTC if the timezone is missing."""
|
||||
if not value.tzinfo:
|
||||
value = value.replace(tzinfo=UTC)
|
||||
return value
|
||||
|
||||
|
||||
class SQLAppConversationInfoServiceInjector(AppConversationInfoServiceInjector):
|
||||
async def inject(
|
||||
self, state: InjectorState, request: Request | None = None
|
||||
) -> AsyncGenerator[AppConversationInfoService, None]:
|
||||
# Define inline to prevent circular lookup
|
||||
from openhands.app_server.config import (
|
||||
get_db_session,
|
||||
get_user_context,
|
||||
)
|
||||
|
||||
async with (
|
||||
get_user_context(state, request) as user_context,
|
||||
get_db_session(state, request) as db_session,
|
||||
):
|
||||
service = SQLAppConversationInfoService(
|
||||
db_session=db_session, user_context=user_context
|
||||
)
|
||||
yield service
|
||||
@ -0,0 +1,243 @@
|
||||
# pyright: reportArgumentType=false, reportAttributeAccessIssue=false, reportOptionalMemberAccess=false
|
||||
"""SQL implementation of AppConversationStartTaskService.
|
||||
|
||||
This implementation provides CRUD operations for conversation start tasks focused purely
|
||||
on SQL operations:
|
||||
- Direct database access without permission checks
|
||||
- Batch operations for efficient data retrieval
|
||||
- Full async/await support using SQL async sessions
|
||||
|
||||
Security and permission checks are handled by wrapper services.
|
||||
|
||||
Key components:
|
||||
- SQLAppConversationStartTaskService: Main service class implementing all operations
|
||||
- SQLAppConversationStartTaskServiceInjector: Dependency injection resolver for FastAPI
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
from typing import AsyncGenerator
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import Request
|
||||
from sqlalchemy import UUID as SQLUUID
|
||||
from sqlalchemy import Column, Enum, String, func, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from openhands.agent_server.models import utc_now
|
||||
from openhands.app_server.app_conversation.app_conversation_models import (
|
||||
AppConversationStartRequest,
|
||||
AppConversationStartTask,
|
||||
AppConversationStartTaskPage,
|
||||
AppConversationStartTaskSortOrder,
|
||||
AppConversationStartTaskStatus,
|
||||
)
|
||||
from openhands.app_server.app_conversation.app_conversation_start_task_service import (
|
||||
AppConversationStartTaskService,
|
||||
AppConversationStartTaskServiceInjector,
|
||||
)
|
||||
from openhands.app_server.services.injector import InjectorState
|
||||
from openhands.app_server.utils.sql_utils import (
|
||||
Base,
|
||||
UtcDateTime,
|
||||
create_json_type_decorator,
|
||||
row2dict,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class StoredAppConversationStartTask(Base): # type: ignore
|
||||
__tablename__ = 'app_conversation_start_task'
|
||||
id = Column(SQLUUID, primary_key=True)
|
||||
created_by_user_id = Column(String, index=True)
|
||||
status = Column(Enum(AppConversationStartTaskStatus), nullable=True)
|
||||
detail = Column(String, nullable=True)
|
||||
app_conversation_id = Column(SQLUUID, nullable=True)
|
||||
sandbox_id = Column(String, nullable=True)
|
||||
agent_server_url = Column(String, nullable=True)
|
||||
request = Column(create_json_type_decorator(AppConversationStartRequest))
|
||||
created_at = Column(UtcDateTime, server_default=func.now(), index=True)
|
||||
updated_at = Column(UtcDateTime, onupdate=func.now(), index=True)
|
||||
|
||||
|
||||
@dataclass
|
||||
class SQLAppConversationStartTaskService(AppConversationStartTaskService):
|
||||
"""SQL implementation of AppConversationStartTaskService focused on db operations.
|
||||
|
||||
This allows storing and retrieving conversation start tasks from the database."""
|
||||
|
||||
session: AsyncSession
|
||||
user_id: str | None = None
|
||||
|
||||
async def search_app_conversation_start_tasks(
|
||||
self,
|
||||
conversation_id__eq: UUID | None = None,
|
||||
sort_order: AppConversationStartTaskSortOrder = AppConversationStartTaskSortOrder.CREATED_AT_DESC,
|
||||
page_id: str | None = None,
|
||||
limit: int = 100,
|
||||
) -> AppConversationStartTaskPage:
|
||||
"""Search for conversation start tasks."""
|
||||
query = select(StoredAppConversationStartTask)
|
||||
|
||||
# Apply user filter if user_id is set
|
||||
if self.user_id:
|
||||
query = query.where(
|
||||
StoredAppConversationStartTask.created_by_user_id == self.user_id
|
||||
)
|
||||
|
||||
# Apply conversation_id filter
|
||||
if conversation_id__eq is not None:
|
||||
query = query.where(
|
||||
StoredAppConversationStartTask.app_conversation_id
|
||||
== conversation_id__eq
|
||||
)
|
||||
|
||||
# Add sort order
|
||||
if sort_order == AppConversationStartTaskSortOrder.CREATED_AT:
|
||||
query = query.order_by(StoredAppConversationStartTask.created_at)
|
||||
elif sort_order == AppConversationStartTaskSortOrder.CREATED_AT_DESC:
|
||||
query = query.order_by(StoredAppConversationStartTask.created_at.desc())
|
||||
elif sort_order == AppConversationStartTaskSortOrder.UPDATED_AT:
|
||||
query = query.order_by(StoredAppConversationStartTask.updated_at)
|
||||
elif sort_order == AppConversationStartTaskSortOrder.UPDATED_AT_DESC:
|
||||
query = query.order_by(StoredAppConversationStartTask.updated_at.desc())
|
||||
|
||||
# Apply pagination
|
||||
if page_id is not None:
|
||||
try:
|
||||
offset = int(page_id)
|
||||
query = query.offset(offset)
|
||||
except ValueError:
|
||||
# If page_id is not a valid integer, start from beginning
|
||||
offset = 0
|
||||
else:
|
||||
offset = 0
|
||||
|
||||
# Apply limit and get one extra to check if there are more results
|
||||
query = query.limit(limit + 1)
|
||||
|
||||
result = await self.session.execute(query)
|
||||
rows = result.scalars().all()
|
||||
|
||||
# Check if there are more results
|
||||
has_more = len(rows) > limit
|
||||
if has_more:
|
||||
rows = rows[:limit]
|
||||
|
||||
items = [AppConversationStartTask(**row2dict(row)) for row in rows]
|
||||
|
||||
# Calculate next page ID
|
||||
next_page_id = None
|
||||
if has_more:
|
||||
next_page_id = str(offset + limit)
|
||||
|
||||
return AppConversationStartTaskPage(items=items, next_page_id=next_page_id)
|
||||
|
||||
async def count_app_conversation_start_tasks(
|
||||
self,
|
||||
conversation_id__eq: UUID | None = None,
|
||||
) -> int:
|
||||
"""Count conversation start tasks."""
|
||||
query = select(func.count(StoredAppConversationStartTask.id))
|
||||
|
||||
# Apply user filter if user_id is set
|
||||
if self.user_id:
|
||||
query = query.where(
|
||||
StoredAppConversationStartTask.created_by_user_id == self.user_id
|
||||
)
|
||||
|
||||
# Apply conversation_id filter
|
||||
if conversation_id__eq is not None:
|
||||
query = query.where(
|
||||
StoredAppConversationStartTask.app_conversation_id
|
||||
== conversation_id__eq
|
||||
)
|
||||
|
||||
result = await self.session.execute(query)
|
||||
count = result.scalar()
|
||||
return count or 0
|
||||
|
||||
async def batch_get_app_conversation_start_tasks(
|
||||
self, task_ids: list[UUID]
|
||||
) -> list[AppConversationStartTask | None]:
|
||||
"""Get a batch of start tasks, return None for any missing."""
|
||||
if not task_ids:
|
||||
return []
|
||||
|
||||
query = select(StoredAppConversationStartTask).where(
|
||||
StoredAppConversationStartTask.id.in_(task_ids)
|
||||
)
|
||||
if self.user_id:
|
||||
query = query.where(
|
||||
StoredAppConversationStartTask.created_by_user_id == self.user_id
|
||||
)
|
||||
|
||||
result = await self.session.execute(query)
|
||||
tasks_by_id = {task.id: task for task in result.scalars().all()}
|
||||
|
||||
# Return tasks in the same order as requested, with None for missing ones
|
||||
return [
|
||||
AppConversationStartTask(**row2dict(tasks_by_id[task_id]))
|
||||
if task_id in tasks_by_id
|
||||
else None
|
||||
for task_id in task_ids
|
||||
]
|
||||
|
||||
async def get_app_conversation_start_task(
|
||||
self, task_id: UUID
|
||||
) -> AppConversationStartTask | None:
|
||||
"""Get a single start task, returning None if missing."""
|
||||
query = select(StoredAppConversationStartTask).where(
|
||||
StoredAppConversationStartTask.id == task_id
|
||||
)
|
||||
if self.user_id:
|
||||
query = query.where(
|
||||
StoredAppConversationStartTask.created_by_user_id == self.user_id
|
||||
)
|
||||
|
||||
result = await self.session.execute(query)
|
||||
stored_task = result.scalar_one_or_none()
|
||||
if stored_task:
|
||||
return AppConversationStartTask(**row2dict(stored_task))
|
||||
return None
|
||||
|
||||
async def save_app_conversation_start_task(
|
||||
self, task: AppConversationStartTask
|
||||
) -> AppConversationStartTask:
|
||||
if self.user_id:
|
||||
query = select(StoredAppConversationStartTask).where(
|
||||
StoredAppConversationStartTask.id == task.id
|
||||
)
|
||||
result = await self.session.execute(query)
|
||||
existing = result.scalar_one_or_none()
|
||||
assert existing is None or existing.created_by_user_id == self.user_id
|
||||
task.updated_at = utc_now()
|
||||
await self.session.merge(StoredAppConversationStartTask(**task.model_dump()))
|
||||
await self.session.commit()
|
||||
return task
|
||||
|
||||
|
||||
class SQLAppConversationStartTaskServiceInjector(
|
||||
AppConversationStartTaskServiceInjector
|
||||
):
|
||||
async def inject(
|
||||
self, state: InjectorState, request: Request | None = None
|
||||
) -> AsyncGenerator[AppConversationStartTaskService, None]:
|
||||
# Define inline to prevent circular lookup
|
||||
from openhands.app_server.config import (
|
||||
get_db_session,
|
||||
get_user_context,
|
||||
)
|
||||
|
||||
async with (
|
||||
get_user_context(state, request) as user_context,
|
||||
get_db_session(state, request) as db_session,
|
||||
):
|
||||
user_id = await user_context.get_user_id()
|
||||
service = SQLAppConversationStartTaskService(
|
||||
session=db_session, user_id=user_id
|
||||
)
|
||||
yield service
|
||||
149
openhands/app_server/app_lifespan/alembic.ini
Normal file
149
openhands/app_server/app_lifespan/alembic.ini
Normal file
@ -0,0 +1,149 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts.
|
||||
# this is typically a path given in POSIX (e.g. forward slashes)
|
||||
# format, relative to the token %(here)s which refers to the location of this
|
||||
# ini file
|
||||
script_location = %(here)s/alembic
|
||||
|
||||
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||
# Uncomment the line below if you want the files to be prepended with date and time
|
||||
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
||||
# for all available tokens
|
||||
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
# defaults to the current working directory. for multiple paths, the path separator
|
||||
# is defined by "path_separator" below.
|
||||
prepend_sys_path = .
|
||||
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
# If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library.
|
||||
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
|
||||
# string value is passed to ZoneInfo()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; This defaults
|
||||
# to <script_location>/versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path.
|
||||
# The path separator used here should be the separator specified by "path_separator"
|
||||
# below.
|
||||
# version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions
|
||||
|
||||
# path_separator; This indicates what character is used to split lists of file
|
||||
# paths, including version_locations and prepend_sys_path within configparser
|
||||
# files such as alembic.ini.
|
||||
# The default rendered in new alembic.ini files is "os", which uses os.pathsep
|
||||
# to provide os-dependent path splitting.
|
||||
#
|
||||
# Note that in order to support legacy alembic.ini files, this default does NOT
|
||||
# take place if path_separator is not present in alembic.ini. If this
|
||||
# option is omitted entirely, fallback logic is as follows:
|
||||
#
|
||||
# 1. Parsing of the version_locations option falls back to using the legacy
|
||||
# "version_path_separator" key, which if absent then falls back to the legacy
|
||||
# behavior of splitting on spaces and/or commas.
|
||||
# 2. Parsing of the prepend_sys_path option falls back to the legacy
|
||||
# behavior of splitting on spaces, commas, or colons.
|
||||
#
|
||||
# Valid values for path_separator are:
|
||||
#
|
||||
# path_separator = :
|
||||
# path_separator = ;
|
||||
# path_separator = space
|
||||
# path_separator = newline
|
||||
#
|
||||
# Use os.pathsep. Default configuration used for new projects.
|
||||
path_separator = os
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
# new in Alembic version 1.10
|
||||
# recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
# database URL. This is consumed by the user-maintained env.py script only.
|
||||
# other means of configuring database URLs may be customized within the env.py
|
||||
# file.
|
||||
# Note: The database URL is now configured dynamically in env.py using the DbSessionInjector
|
||||
# from get_global_config(), so this placeholder is not used.
|
||||
# sqlalchemy.url = driver://user:pass@localhost/dbname
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks = black
|
||||
# black.type = console_scripts
|
||||
# black.entrypoint = black
|
||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||
|
||||
# lint with attempts to fix using "ruff" - use the module runner, against the "ruff" module
|
||||
# hooks = ruff
|
||||
# ruff.type = module
|
||||
# ruff.module = ruff
|
||||
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Alternatively, use the exec runner to execute a binary found on your PATH
|
||||
# hooks = ruff
|
||||
# ruff.type = exec
|
||||
# ruff.executable = ruff
|
||||
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Logging configuration. This is also consumed by the user-maintained
|
||||
# env.py script only.
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARNING
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARNING
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
15
openhands/app_server/app_lifespan/alembic/README
Normal file
15
openhands/app_server/app_lifespan/alembic/README
Normal file
@ -0,0 +1,15 @@
|
||||
# OpenHands App Server Alembic Integration
|
||||
|
||||
This alembic integration keeps the SQLite database up to date in single user deployments by managing schema migrations for app_server models. Migrations are applied automatically on startup.
|
||||
|
||||
## Configuration
|
||||
|
||||
Uses `DbSessionInjector` from `get_global_config()` for database connectivity and auto-detects models from the declarative base in `openhands.app_server.utils.sql_utils.Base`.
|
||||
|
||||
## Key Commands
|
||||
|
||||
Generate migration from model changes:
|
||||
```bash
|
||||
cd openhands/app_server/app_lifespan
|
||||
alembic revision --autogenerate -m 'Sync DB with Models'
|
||||
```
|
||||
115
openhands/app_server/app_lifespan/alembic/env.py
Normal file
115
openhands/app_server/app_lifespan/alembic/env.py
Normal file
@ -0,0 +1,115 @@
|
||||
import sys
|
||||
from logging.config import fileConfig
|
||||
from pathlib import Path
|
||||
|
||||
from alembic import context
|
||||
|
||||
# Add the project root to the Python path so we can import OpenHands modules
|
||||
# From alembic/env.py, we need to go up 5 levels to reach the OpenHands project root
|
||||
project_root = Path(__file__).absolute().parent.parent.parent.parent.parent
|
||||
sys.path.insert(0, str(project_root))
|
||||
|
||||
# Import the Base metadata for autogenerate support
|
||||
# Import all models to ensure they are registered with the metadata
|
||||
# This is necessary for alembic autogenerate to detect all tables
|
||||
from openhands.app_server.app_conversation.sql_app_conversation_info_service import ( # noqa: E402
|
||||
StoredConversationMetadata, # noqa: F401
|
||||
)
|
||||
from openhands.app_server.app_conversation.sql_app_conversation_start_task_service import ( # noqa: E402
|
||||
StoredAppConversationStartTask, # noqa: F401
|
||||
)
|
||||
from openhands.app_server.config import get_global_config # noqa: E402
|
||||
from openhands.app_server.event_callback.sql_event_callback_service import ( # noqa: E402
|
||||
StoredEventCallback, # noqa: F401
|
||||
)
|
||||
from openhands.app_server.sandbox.remote_sandbox_service import ( # noqa: E402
|
||||
StoredRemoteSandbox, # noqa: F401
|
||||
)
|
||||
from openhands.app_server.utils.sql_utils import Base # noqa: E402
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
import os
|
||||
|
||||
if os.path.exists(config.config_file_name):
|
||||
fileConfig(config.config_file_name)
|
||||
else:
|
||||
# Use basic logging configuration if config file doesn't exist
|
||||
import logging
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
target_metadata = Base.metadata
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
# Get database URL from DbSessionInjector
|
||||
global_config = get_global_config()
|
||||
db_session = global_config.db_session
|
||||
|
||||
# Get the database URL from the DbSessionInjector
|
||||
if db_session.host:
|
||||
password_value = (
|
||||
db_session.password.get_secret_value() if db_session.password else ''
|
||||
)
|
||||
url = f'postgresql://{db_session.user}:{password_value}@{db_session.host}:{db_session.port}/{db_session.name}'
|
||||
else:
|
||||
url = f'sqlite:///{db_session.persistence_dir}/openhands.db'
|
||||
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={'paramstyle': 'named'},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
# Use the DbSessionInjector engine instead of creating a new one
|
||||
global_config = get_global_config()
|
||||
db_session = global_config.db_session
|
||||
connectable = db_session.get_db_engine()
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(connection=connection, target_metadata=target_metadata)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
28
openhands/app_server/app_lifespan/alembic/script.py.mako
Normal file
28
openhands/app_server/app_lifespan/alembic/script.py.mako
Normal file
@ -0,0 +1,28 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
${downgrades if downgrades else "pass"}
|
||||
268
openhands/app_server/app_lifespan/alembic/versions/001.py
Normal file
268
openhands/app_server/app_lifespan/alembic/versions/001.py
Normal file
@ -0,0 +1,268 @@
|
||||
"""Sync DB with Models
|
||||
|
||||
Revision ID: 001
|
||||
Revises:
|
||||
Create Date: 2025-10-05 11:28:41.772294
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
from openhands.app_server.app_conversation.app_conversation_models import (
|
||||
AppConversationStartTaskStatus,
|
||||
)
|
||||
from openhands.app_server.event_callback.event_callback_result_models import (
|
||||
EventCallbackResultStatus,
|
||||
)
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '001'
|
||||
down_revision: Union[str, Sequence[str], None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
op.create_table(
|
||||
'app_conversation_start_task',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('created_by_user_id', sa.String(), nullable=True),
|
||||
sa.Column('status', sa.Enum(AppConversationStartTaskStatus), nullable=True),
|
||||
sa.Column('detail', sa.String(), nullable=True),
|
||||
sa.Column('app_conversation_id', sa.UUID(), nullable=True),
|
||||
sa.Column('sandbox_id', sa.String(), nullable=True),
|
||||
sa.Column('agent_server_url', sa.String(), nullable=True),
|
||||
sa.Column('request', sa.JSON(), nullable=True),
|
||||
sa.Column(
|
||||
'created_at',
|
||||
sa.DateTime(timezone=True),
|
||||
server_default=sa.text('(CURRENT_TIMESTAMP)'),
|
||||
nullable=True,
|
||||
),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_app_conversation_start_task_created_at'),
|
||||
'app_conversation_start_task',
|
||||
['created_at'],
|
||||
unique=False,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_app_conversation_start_task_created_by_user_id'),
|
||||
'app_conversation_start_task',
|
||||
['created_by_user_id'],
|
||||
unique=False,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_app_conversation_start_task_updated_at'),
|
||||
'app_conversation_start_task',
|
||||
['updated_at'],
|
||||
unique=False,
|
||||
)
|
||||
op.create_table(
|
||||
'event_callback',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('conversation_id', sa.UUID(), nullable=True),
|
||||
sa.Column('processor', sa.JSON(), nullable=True),
|
||||
sa.Column('event_kind', sa.String(), nullable=True),
|
||||
sa.Column(
|
||||
'created_at',
|
||||
sa.DateTime(timezone=True),
|
||||
server_default=sa.text('(CURRENT_TIMESTAMP)'),
|
||||
nullable=True,
|
||||
),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_event_callback_created_at'),
|
||||
'event_callback',
|
||||
['created_at'],
|
||||
unique=False,
|
||||
)
|
||||
op.create_table(
|
||||
'event_callback_result',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('status', sa.Enum(EventCallbackResultStatus), nullable=True),
|
||||
sa.Column('event_callback_id', sa.UUID(), nullable=True),
|
||||
sa.Column('event_id', sa.UUID(), nullable=True),
|
||||
sa.Column('conversation_id', sa.UUID(), nullable=True),
|
||||
sa.Column('detail', sa.String(), nullable=True),
|
||||
sa.Column(
|
||||
'created_at',
|
||||
sa.DateTime(timezone=True),
|
||||
server_default=sa.text('(CURRENT_TIMESTAMP)'),
|
||||
nullable=True,
|
||||
),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_event_callback_result_conversation_id'),
|
||||
'event_callback_result',
|
||||
['conversation_id'],
|
||||
unique=False,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_event_callback_result_created_at'),
|
||||
'event_callback_result',
|
||||
['created_at'],
|
||||
unique=False,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_event_callback_result_event_callback_id'),
|
||||
'event_callback_result',
|
||||
['event_callback_id'],
|
||||
unique=False,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_event_callback_result_event_id'),
|
||||
'event_callback_result',
|
||||
['event_id'],
|
||||
unique=False,
|
||||
)
|
||||
op.create_table(
|
||||
'v1_remote_sandbox',
|
||||
sa.Column('id', sa.String(), nullable=False),
|
||||
sa.Column('created_by_user_id', sa.String(), nullable=True),
|
||||
sa.Column('sandbox_spec_id', sa.String(), nullable=True),
|
||||
sa.Column(
|
||||
'created_at',
|
||||
sa.DateTime(timezone=True),
|
||||
server_default=sa.text('(CURRENT_TIMESTAMP)'),
|
||||
nullable=True,
|
||||
),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_v1_remote_sandbox_created_at'),
|
||||
'v1_remote_sandbox',
|
||||
['created_at'],
|
||||
unique=False,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_v1_remote_sandbox_created_by_user_id'),
|
||||
'v1_remote_sandbox',
|
||||
['created_by_user_id'],
|
||||
unique=False,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_v1_remote_sandbox_sandbox_spec_id'),
|
||||
'v1_remote_sandbox',
|
||||
['sandbox_spec_id'],
|
||||
unique=False,
|
||||
)
|
||||
op.create_table(
|
||||
'conversation_metadata',
|
||||
sa.Column('conversation_id', sa.String(), nullable=False),
|
||||
sa.Column('github_user_id', sa.String(), nullable=True),
|
||||
sa.Column('user_id', sa.String(), nullable=False),
|
||||
sa.Column('selected_repository', sa.String(), nullable=True),
|
||||
sa.Column('selected_branch', sa.String(), nullable=True),
|
||||
sa.Column('git_provider', sa.String(), nullable=True),
|
||||
sa.Column('title', sa.String(), nullable=True),
|
||||
sa.Column('last_updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('trigger', sa.String(), nullable=True),
|
||||
sa.Column('pr_number', sa.JSON(), nullable=True),
|
||||
sa.Column('accumulated_cost', sa.Float(), nullable=True),
|
||||
sa.Column('prompt_tokens', sa.Integer(), nullable=True),
|
||||
sa.Column('completion_tokens', sa.Integer(), nullable=True),
|
||||
sa.Column('total_tokens', sa.Integer(), nullable=True),
|
||||
sa.Column('max_budget_per_task', sa.Float(), nullable=True),
|
||||
sa.Column('cache_read_tokens', sa.Integer(), nullable=True),
|
||||
sa.Column('cache_write_tokens', sa.Integer(), nullable=True),
|
||||
sa.Column('reasoning_tokens', sa.Integer(), nullable=True),
|
||||
sa.Column('context_window', sa.Integer(), nullable=True),
|
||||
sa.Column('per_turn_token', sa.Integer(), nullable=True),
|
||||
sa.Column('llm_model', sa.String(), nullable=True),
|
||||
sa.Column('conversation_version', sa.String(), nullable=False),
|
||||
sa.Column('sandbox_id', sa.String(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('conversation_id'),
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_conversation_metadata_conversation_version'),
|
||||
'conversation_metadata',
|
||||
['conversation_version'],
|
||||
unique=False,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_conversation_metadata_sandbox_id'),
|
||||
'conversation_metadata',
|
||||
['sandbox_id'],
|
||||
unique=False,
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
op.drop_index(
|
||||
op.f('ix_conversation_metadata_sandbox_id'), table_name='conversation_metadata'
|
||||
)
|
||||
op.drop_index(
|
||||
op.f('ix_conversation_metadata_conversation_version'),
|
||||
table_name='conversation_metadata',
|
||||
)
|
||||
op.drop_table('conversation_metadata')
|
||||
op.drop_index(
|
||||
op.f('ix_v1_remote_sandbox_sandbox_spec_id'), table_name='v1_remote_sandbox'
|
||||
)
|
||||
op.drop_index(
|
||||
op.f('ix_v1_remote_sandbox_created_by_user_id'), table_name='v1_remote_sandbox'
|
||||
)
|
||||
op.drop_index(
|
||||
op.f('ix_v1_remote_sandbox_created_at'), table_name='v1_remote_sandbox'
|
||||
)
|
||||
op.drop_table('v1_remote_sandbox')
|
||||
op.drop_index(
|
||||
op.f('ix_event_callback_result_event_id'),
|
||||
table_name='event_callback_result',
|
||||
)
|
||||
op.drop_index(
|
||||
op.f('ix_event_callback_result_event_callback_id'),
|
||||
table_name='event_callback_result',
|
||||
)
|
||||
op.drop_index(
|
||||
op.f('ix_event_callback_result_created_at'),
|
||||
table_name='event_callback_result',
|
||||
)
|
||||
op.drop_index(
|
||||
op.f('ix_event_callback_result_conversation_id'),
|
||||
table_name='event_callback_result',
|
||||
)
|
||||
op.drop_table('event_callback_result')
|
||||
op.drop_index(op.f('ix_event_callback_created_at'), table_name='event_callback')
|
||||
op.drop_table('event_callback')
|
||||
op.drop_index(
|
||||
op.f('ix_app_conversation_start_task_updated_at'),
|
||||
table_name='app_conversation_start_task',
|
||||
)
|
||||
op.drop_index(
|
||||
op.f('ix_app_conversation_start_task_created_by_user_id'),
|
||||
table_name='app_conversation_start_task',
|
||||
)
|
||||
op.drop_index(
|
||||
op.f('ix_app_conversation_start_task_created_at'),
|
||||
table_name='app_conversation_start_task',
|
||||
)
|
||||
op.drop_table('app_conversation_start_task')
|
||||
op.drop_index(
|
||||
op.f('ix_app_conversation_info_updated_at'),
|
||||
table_name='app_conversation_info',
|
||||
)
|
||||
op.drop_index(
|
||||
op.f('ix_app_conversation_info_sandbox_id'),
|
||||
table_name='app_conversation_info',
|
||||
)
|
||||
op.drop_index(
|
||||
op.f('ix_app_conversation_info_created_by_user_id'),
|
||||
table_name='app_conversation_info',
|
||||
)
|
||||
op.drop_index(
|
||||
op.f('ix_app_conversation_info_created_at'),
|
||||
table_name='app_conversation_info',
|
||||
)
|
||||
op.drop_table('app_conversation_info')
|
||||
21
openhands/app_server/app_lifespan/app_lifespan_service.py
Normal file
21
openhands/app_server/app_lifespan/app_lifespan_service.py
Normal file
@ -0,0 +1,21 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
from fastapi import FastAPI
|
||||
|
||||
from openhands.sdk.utils.models import DiscriminatedUnionMixin
|
||||
|
||||
|
||||
class AppLifespanService(DiscriminatedUnionMixin, ABC):
|
||||
def lifespan(self, api: FastAPI):
|
||||
"""Return lifespan wrapper."""
|
||||
return self
|
||||
|
||||
@abstractmethod
|
||||
async def __aenter__(self):
|
||||
"""Open lifespan."""
|
||||
|
||||
@abstractmethod
|
||||
async def __aexit__(self, exc_type, exc_value, traceback):
|
||||
"""Close lifespan."""
|
||||
@ -0,0 +1,38 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
from alembic import command
|
||||
from alembic.config import Config
|
||||
|
||||
from openhands.app_server.app_lifespan.app_lifespan_service import AppLifespanService
|
||||
|
||||
|
||||
class OssAppLifespanService(AppLifespanService):
|
||||
run_alembic_on_startup: bool = True
|
||||
|
||||
async def __aenter__(self):
|
||||
if self.run_alembic_on_startup:
|
||||
self.run_alembic()
|
||||
return self
|
||||
|
||||
async def __aexit__(self, exc_type, exc_value, traceback):
|
||||
pass
|
||||
|
||||
def run_alembic(self):
|
||||
# Run alembic upgrade head to ensure database is up to date
|
||||
alembic_dir = Path(__file__).parent / 'alembic'
|
||||
alembic_ini = alembic_dir / 'alembic.ini'
|
||||
|
||||
# Create alembic config with absolute paths
|
||||
alembic_cfg = Config(str(alembic_ini))
|
||||
alembic_cfg.set_main_option('script_location', str(alembic_dir))
|
||||
|
||||
# Change to alembic directory for the command execution
|
||||
original_cwd = os.getcwd()
|
||||
try:
|
||||
os.chdir(str(alembic_dir.parent))
|
||||
command.upgrade(alembic_cfg, 'head')
|
||||
finally:
|
||||
os.chdir(original_cwd)
|
||||
348
openhands/app_server/config.py
Normal file
348
openhands/app_server/config.py
Normal file
@ -0,0 +1,348 @@
|
||||
"""Configuration for the OpenHands App Server."""
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import AsyncContextManager
|
||||
|
||||
import httpx
|
||||
from fastapi import Depends, Request
|
||||
from pydantic import Field
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from openhands.agent_server.env_parser import from_env
|
||||
from openhands.app_server.app_conversation.app_conversation_info_service import (
|
||||
AppConversationInfoService,
|
||||
AppConversationInfoServiceInjector,
|
||||
)
|
||||
from openhands.app_server.app_conversation.app_conversation_service import (
|
||||
AppConversationService,
|
||||
AppConversationServiceInjector,
|
||||
)
|
||||
from openhands.app_server.app_conversation.app_conversation_start_task_service import (
|
||||
AppConversationStartTaskService,
|
||||
AppConversationStartTaskServiceInjector,
|
||||
)
|
||||
from openhands.app_server.app_lifespan.app_lifespan_service import AppLifespanService
|
||||
from openhands.app_server.app_lifespan.oss_app_lifespan_service import (
|
||||
OssAppLifespanService,
|
||||
)
|
||||
from openhands.app_server.event.event_service import EventService, EventServiceInjector
|
||||
from openhands.app_server.event_callback.event_callback_service import (
|
||||
EventCallbackService,
|
||||
EventCallbackServiceInjector,
|
||||
)
|
||||
from openhands.app_server.sandbox.sandbox_service import (
|
||||
SandboxService,
|
||||
SandboxServiceInjector,
|
||||
)
|
||||
from openhands.app_server.sandbox.sandbox_spec_service import (
|
||||
SandboxSpecService,
|
||||
SandboxSpecServiceInjector,
|
||||
)
|
||||
from openhands.app_server.services.db_session_injector import (
|
||||
DbSessionInjector,
|
||||
)
|
||||
from openhands.app_server.services.httpx_client_injector import HttpxClientInjector
|
||||
from openhands.app_server.services.injector import InjectorState
|
||||
from openhands.app_server.services.jwt_service import JwtService, JwtServiceInjector
|
||||
from openhands.app_server.user.user_context import UserContext, UserContextInjector
|
||||
from openhands.sdk.utils.models import OpenHandsModel
|
||||
|
||||
|
||||
def get_default_persistence_dir() -> Path:
|
||||
# Recheck env because this function is also used to generate other defaults
|
||||
persistence_dir = os.getenv('OH_PERSISTENCE_DIR')
|
||||
|
||||
if persistence_dir:
|
||||
result = Path(persistence_dir)
|
||||
else:
|
||||
result = Path.home() / '.openhands'
|
||||
|
||||
result.mkdir(parents=True, exist_ok=True)
|
||||
return result
|
||||
|
||||
|
||||
def get_default_web_url() -> str | None:
|
||||
"""Get legacy web host parameter.
|
||||
|
||||
If present, we assume we are running under https."""
|
||||
web_host = os.getenv('WEB_HOST')
|
||||
if not web_host:
|
||||
return None
|
||||
return f'https://{web_host}'
|
||||
|
||||
|
||||
def _get_default_lifespan():
|
||||
# Check legacy parameters for saas mode. If we are in SAAS mode do not apply
|
||||
# OSS alembic migrations
|
||||
if 'saas' in (os.getenv('OPENHANDS_CONFIG_CLS') or '').lower():
|
||||
return None
|
||||
return OssAppLifespanService()
|
||||
|
||||
|
||||
class AppServerConfig(OpenHandsModel):
|
||||
persistence_dir: Path = Field(default_factory=get_default_persistence_dir)
|
||||
web_url: str | None = Field(
|
||||
default_factory=get_default_web_url,
|
||||
description='The URL where OpenHands is running (e.g., http://localhost:3000)',
|
||||
)
|
||||
# Dependency Injection Injectors
|
||||
event: EventServiceInjector | None = None
|
||||
event_callback: EventCallbackServiceInjector | None = None
|
||||
sandbox: SandboxServiceInjector | None = None
|
||||
sandbox_spec: SandboxSpecServiceInjector | None = None
|
||||
app_conversation_info: AppConversationInfoServiceInjector | None = None
|
||||
app_conversation_start_task: AppConversationStartTaskServiceInjector | None = None
|
||||
app_conversation: AppConversationServiceInjector | None = None
|
||||
user: UserContextInjector | None = None
|
||||
jwt: JwtServiceInjector | None = None
|
||||
httpx: HttpxClientInjector = Field(default_factory=HttpxClientInjector)
|
||||
db_session: DbSessionInjector = Field(
|
||||
default_factory=lambda: DbSessionInjector(
|
||||
persistence_dir=get_default_persistence_dir()
|
||||
)
|
||||
)
|
||||
|
||||
# Services
|
||||
lifespan: AppLifespanService = Field(default_factory=_get_default_lifespan)
|
||||
|
||||
|
||||
def config_from_env() -> AppServerConfig:
|
||||
# Import defaults...
|
||||
from openhands.app_server.app_conversation.live_status_app_conversation_service import ( # noqa: E501
|
||||
LiveStatusAppConversationServiceInjector,
|
||||
)
|
||||
from openhands.app_server.app_conversation.sql_app_conversation_info_service import ( # noqa: E501
|
||||
SQLAppConversationInfoServiceInjector,
|
||||
)
|
||||
from openhands.app_server.app_conversation.sql_app_conversation_start_task_service import ( # noqa: E501
|
||||
SQLAppConversationStartTaskServiceInjector,
|
||||
)
|
||||
from openhands.app_server.event.filesystem_event_service import (
|
||||
FilesystemEventServiceInjector,
|
||||
)
|
||||
from openhands.app_server.event_callback.sql_event_callback_service import (
|
||||
SQLEventCallbackServiceInjector,
|
||||
)
|
||||
from openhands.app_server.sandbox.docker_sandbox_service import (
|
||||
DockerSandboxServiceInjector,
|
||||
)
|
||||
from openhands.app_server.sandbox.docker_sandbox_spec_service import (
|
||||
DockerSandboxSpecServiceInjector,
|
||||
)
|
||||
from openhands.app_server.sandbox.remote_sandbox_service import (
|
||||
RemoteSandboxServiceInjector,
|
||||
)
|
||||
from openhands.app_server.sandbox.remote_sandbox_spec_service import (
|
||||
RemoteSandboxSpecServiceInjector,
|
||||
)
|
||||
from openhands.app_server.user.auth_user_context import (
|
||||
AuthUserContextInjector,
|
||||
)
|
||||
|
||||
config: AppServerConfig = from_env(AppServerConfig, 'OH') # type: ignore
|
||||
|
||||
if config.event is None:
|
||||
config.event = FilesystemEventServiceInjector()
|
||||
|
||||
if config.event_callback is None:
|
||||
config.event_callback = SQLEventCallbackServiceInjector()
|
||||
|
||||
if config.sandbox is None:
|
||||
# Legacy fallback
|
||||
if os.getenv('RUNTIME') == 'remote':
|
||||
config.sandbox = RemoteSandboxServiceInjector(
|
||||
api_key=os.environ['SANDBOX_API_KEY'],
|
||||
api_url=os.environ['SANDBOX_REMOTE_RUNTIME_API_URL'],
|
||||
)
|
||||
else:
|
||||
config.sandbox = DockerSandboxServiceInjector()
|
||||
|
||||
if config.sandbox_spec is None:
|
||||
if os.getenv('RUNTIME') == 'remote':
|
||||
config.sandbox_spec = RemoteSandboxSpecServiceInjector()
|
||||
else:
|
||||
config.sandbox_spec = DockerSandboxSpecServiceInjector()
|
||||
|
||||
if config.app_conversation_info is None:
|
||||
config.app_conversation_info = SQLAppConversationInfoServiceInjector()
|
||||
|
||||
if config.app_conversation_start_task is None:
|
||||
config.app_conversation_start_task = (
|
||||
SQLAppConversationStartTaskServiceInjector()
|
||||
)
|
||||
|
||||
if config.app_conversation is None:
|
||||
config.app_conversation = LiveStatusAppConversationServiceInjector()
|
||||
|
||||
if config.user is None:
|
||||
config.user = AuthUserContextInjector()
|
||||
|
||||
if config.jwt is None:
|
||||
config.jwt = JwtServiceInjector(persistence_dir=config.persistence_dir)
|
||||
|
||||
return config
|
||||
|
||||
|
||||
_global_config: AppServerConfig | None = None
|
||||
|
||||
|
||||
def get_global_config() -> AppServerConfig:
|
||||
"""Get the default local server config shared across the server."""
|
||||
global _global_config
|
||||
if _global_config is None:
|
||||
# Load configuration from environment...
|
||||
_global_config = config_from_env()
|
||||
|
||||
return _global_config # type: ignore
|
||||
|
||||
|
||||
def get_event_service(
|
||||
state: InjectorState, request: Request | None = None
|
||||
) -> AsyncContextManager[EventService]:
|
||||
injector = get_global_config().event
|
||||
assert injector is not None
|
||||
return injector.context(state, request)
|
||||
|
||||
|
||||
def get_event_callback_service(
|
||||
state: InjectorState, request: Request | None = None
|
||||
) -> AsyncContextManager[EventCallbackService]:
|
||||
injector = get_global_config().event_callback
|
||||
assert injector is not None
|
||||
return injector.context(state, request)
|
||||
|
||||
|
||||
def get_sandbox_service(
|
||||
state: InjectorState, request: Request | None = None
|
||||
) -> AsyncContextManager[SandboxService]:
|
||||
injector = get_global_config().sandbox
|
||||
assert injector is not None
|
||||
return injector.context(state, request)
|
||||
|
||||
|
||||
def get_sandbox_spec_service(
|
||||
state: InjectorState, request: Request | None = None
|
||||
) -> AsyncContextManager[SandboxSpecService]:
|
||||
injector = get_global_config().sandbox_spec
|
||||
assert injector is not None
|
||||
return injector.context(state, request)
|
||||
|
||||
|
||||
def get_app_conversation_info_service(
|
||||
state: InjectorState, request: Request | None = None
|
||||
) -> AsyncContextManager[AppConversationInfoService]:
|
||||
injector = get_global_config().app_conversation_info
|
||||
assert injector is not None
|
||||
return injector.context(state, request)
|
||||
|
||||
|
||||
def get_app_conversation_start_task_service(
|
||||
state: InjectorState, request: Request | None = None
|
||||
) -> AsyncContextManager[AppConversationStartTaskService]:
|
||||
injector = get_global_config().app_conversation_start_task
|
||||
assert injector is not None
|
||||
return injector.context(state, request)
|
||||
|
||||
|
||||
def get_app_conversation_service(
|
||||
state: InjectorState, request: Request | None = None
|
||||
) -> AsyncContextManager[AppConversationService]:
|
||||
injector = get_global_config().app_conversation
|
||||
assert injector is not None
|
||||
return injector.context(state, request)
|
||||
|
||||
|
||||
def get_user_context(
|
||||
state: InjectorState, request: Request | None = None
|
||||
) -> AsyncContextManager[UserContext]:
|
||||
injector = get_global_config().user
|
||||
assert injector is not None
|
||||
return injector.context(state, request)
|
||||
|
||||
|
||||
def get_httpx_client(
|
||||
state: InjectorState, request: Request | None = None
|
||||
) -> AsyncContextManager[httpx.AsyncClient]:
|
||||
return get_global_config().httpx.context(state, request)
|
||||
|
||||
|
||||
def get_jwt_service(
|
||||
state: InjectorState, request: Request | None = None
|
||||
) -> AsyncContextManager[JwtService]:
|
||||
injector = get_global_config().jwt
|
||||
assert injector is not None
|
||||
return injector.context(state, request)
|
||||
|
||||
|
||||
def get_db_session(
|
||||
state: InjectorState, request: Request | None = None
|
||||
) -> AsyncContextManager[AsyncSession]:
|
||||
return get_global_config().db_session.context(state, request)
|
||||
|
||||
|
||||
def get_app_lifespan_service() -> AppLifespanService:
|
||||
config = get_global_config()
|
||||
return config.lifespan
|
||||
|
||||
|
||||
def depends_event_service():
|
||||
injector = get_global_config().event
|
||||
assert injector is not None
|
||||
return Depends(injector.depends)
|
||||
|
||||
|
||||
def depends_event_callback_service():
|
||||
injector = get_global_config().event_callback
|
||||
assert injector is not None
|
||||
return Depends(injector.depends)
|
||||
|
||||
|
||||
def depends_sandbox_service():
|
||||
injector = get_global_config().sandbox
|
||||
assert injector is not None
|
||||
return Depends(injector.depends)
|
||||
|
||||
|
||||
def depends_sandbox_spec_service():
|
||||
injector = get_global_config().sandbox_spec
|
||||
assert injector is not None
|
||||
return Depends(injector.depends)
|
||||
|
||||
|
||||
def depends_app_conversation_info_service():
|
||||
injector = get_global_config().app_conversation_info
|
||||
assert injector is not None
|
||||
return Depends(injector.depends)
|
||||
|
||||
|
||||
def depends_app_conversation_start_task_service():
|
||||
injector = get_global_config().app_conversation_start_task
|
||||
assert injector is not None
|
||||
return Depends(injector.depends)
|
||||
|
||||
|
||||
def depends_app_conversation_service():
|
||||
injector = get_global_config().app_conversation
|
||||
assert injector is not None
|
||||
return Depends(injector.depends)
|
||||
|
||||
|
||||
def depends_user_context():
|
||||
injector = get_global_config().user
|
||||
assert injector is not None
|
||||
return Depends(injector.depends)
|
||||
|
||||
|
||||
def depends_httpx_client():
|
||||
return Depends(get_global_config().httpx.depends)
|
||||
|
||||
|
||||
def depends_jwt_service():
|
||||
injector = get_global_config().jwt
|
||||
assert injector is not None
|
||||
return Depends(injector.depends)
|
||||
|
||||
|
||||
def depends_db_session():
|
||||
return Depends(get_global_config().db_session.depends)
|
||||
43
openhands/app_server/errors.py
Normal file
43
openhands/app_server/errors.py
Normal file
@ -0,0 +1,43 @@
|
||||
from typing import Any
|
||||
|
||||
from fastapi import HTTPException, status
|
||||
|
||||
|
||||
class OpenHandsError(HTTPException):
|
||||
"""General Error"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
detail: Any = None,
|
||||
headers: dict[str, str] | None = None,
|
||||
status_code: int = status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
):
|
||||
super().__init__(status_code=status_code, detail=detail, headers=headers)
|
||||
|
||||
|
||||
class AuthError(OpenHandsError):
|
||||
"""Error in authentication."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
detail: Any = None,
|
||||
headers: dict[str, str] | None = None,
|
||||
status_code: int = status.HTTP_401_UNAUTHORIZED,
|
||||
):
|
||||
super().__init__(status_code=status_code, detail=detail, headers=headers)
|
||||
|
||||
|
||||
class PermissionsError(OpenHandsError):
|
||||
"""Error in permissions."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
detail: Any = None,
|
||||
headers: dict[str, str] | None = None,
|
||||
status_code: int = status.HTTP_403_FORBIDDEN,
|
||||
):
|
||||
super().__init__(status_code=status_code, detail=detail, headers=headers)
|
||||
|
||||
|
||||
class SandboxError(OpenHandsError):
|
||||
"""Error in Sandbox."""
|
||||
21
openhands/app_server/event/README.md
Normal file
21
openhands/app_server/event/README.md
Normal file
@ -0,0 +1,21 @@
|
||||
# Event Management
|
||||
|
||||
Handles event storage, retrieval, and streaming for the OpenHands app server.
|
||||
|
||||
## Overview
|
||||
|
||||
This module provides services for managing events within conversations, including event persistence, querying, and real-time streaming capabilities.
|
||||
|
||||
## Key Components
|
||||
|
||||
- **EventService**: Abstract service for event CRUD operations
|
||||
- **FilesystemEventService**: File-based event storage implementation
|
||||
- **EventRouter**: FastAPI router for event-related endpoints
|
||||
|
||||
## Features
|
||||
|
||||
- Event storage and retrieval by conversation ID
|
||||
- Event filtering by kind, timestamp, and other criteria
|
||||
- Sorting support and pagination for large event sets
|
||||
- Real-time event streaming capabilities
|
||||
- Multiple storage backend support (filesystem, database)
|
||||
110
openhands/app_server/event/event_router.py
Normal file
110
openhands/app_server/event/event_router.py
Normal file
@ -0,0 +1,110 @@
|
||||
"""Event router for OpenHands Server."""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Annotated
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import APIRouter, Query
|
||||
|
||||
from openhands.agent_server.models import EventPage, EventSortOrder
|
||||
from openhands.app_server.config import depends_event_service
|
||||
from openhands.app_server.event.event_service import EventService
|
||||
from openhands.app_server.event_callback.event_callback_models import EventKind
|
||||
from openhands.sdk import Event
|
||||
|
||||
router = APIRouter(prefix='/events', tags=['Events'])
|
||||
event_service_dependency = depends_event_service()
|
||||
|
||||
|
||||
# Read methods
|
||||
|
||||
|
||||
@router.get('/search')
|
||||
async def search_events(
|
||||
conversation_id__eq: Annotated[
|
||||
UUID | None,
|
||||
Query(title='Optional filter by conversation ID'),
|
||||
] = None,
|
||||
kind__eq: Annotated[
|
||||
EventKind | None,
|
||||
Query(title='Optional filter by event kind'),
|
||||
] = None,
|
||||
timestamp__gte: Annotated[
|
||||
datetime | None,
|
||||
Query(title='Optional filter by timestamp greater than or equal to'),
|
||||
] = None,
|
||||
timestamp__lt: Annotated[
|
||||
datetime | None,
|
||||
Query(title='Optional filter by timestamp less than'),
|
||||
] = None,
|
||||
sort_order: Annotated[
|
||||
EventSortOrder,
|
||||
Query(title='Sort order for results'),
|
||||
] = EventSortOrder.TIMESTAMP,
|
||||
page_id: Annotated[
|
||||
str | None,
|
||||
Query(title='Optional next_page_id from the previously returned page'),
|
||||
] = None,
|
||||
limit: Annotated[
|
||||
int,
|
||||
Query(title='The max number of results in the page', gt=0, lte=100),
|
||||
] = 100,
|
||||
event_service: EventService = event_service_dependency,
|
||||
) -> EventPage:
|
||||
"""Search / List events."""
|
||||
assert limit > 0
|
||||
assert limit <= 100
|
||||
return await event_service.search_events(
|
||||
conversation_id__eq=conversation_id__eq,
|
||||
kind__eq=kind__eq,
|
||||
timestamp__gte=timestamp__gte,
|
||||
timestamp__lt=timestamp__lt,
|
||||
sort_order=sort_order,
|
||||
page_id=page_id,
|
||||
limit=limit,
|
||||
)
|
||||
|
||||
|
||||
@router.get('/count')
|
||||
async def count_events(
|
||||
conversation_id__eq: Annotated[
|
||||
UUID | None,
|
||||
Query(title='Optional filter by conversation ID'),
|
||||
] = None,
|
||||
kind__eq: Annotated[
|
||||
EventKind | None,
|
||||
Query(title='Optional filter by event kind'),
|
||||
] = None,
|
||||
timestamp__gte: Annotated[
|
||||
datetime | None,
|
||||
Query(title='Optional filter by timestamp greater than or equal to'),
|
||||
] = None,
|
||||
timestamp__lt: Annotated[
|
||||
datetime | None,
|
||||
Query(title='Optional filter by timestamp less than'),
|
||||
] = None,
|
||||
sort_order: Annotated[
|
||||
EventSortOrder,
|
||||
Query(title='Sort order for results'),
|
||||
] = EventSortOrder.TIMESTAMP,
|
||||
event_service: EventService = event_service_dependency,
|
||||
) -> int:
|
||||
"""Count events matching the given filters."""
|
||||
return await event_service.count_events(
|
||||
conversation_id__eq=conversation_id__eq,
|
||||
kind__eq=kind__eq,
|
||||
timestamp__gte=timestamp__gte,
|
||||
timestamp__lt=timestamp__lt,
|
||||
sort_order=sort_order,
|
||||
)
|
||||
|
||||
|
||||
@router.get('')
|
||||
async def batch_get_events(
|
||||
id: Annotated[list[str], Query()],
|
||||
event_service: EventService = event_service_dependency,
|
||||
) -> list[Event | None]:
|
||||
"""Get a batch of events given their ids, returning null for any missing event."""
|
||||
assert len(id) <= 100
|
||||
events = await event_service.batch_get_events(id)
|
||||
return events
|
||||
59
openhands/app_server/event/event_service.py
Normal file
59
openhands/app_server/event/event_service.py
Normal file
@ -0,0 +1,59 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from abc import ABC, abstractmethod
|
||||
from datetime import datetime
|
||||
from uuid import UUID
|
||||
|
||||
from openhands.agent_server.models import EventPage, EventSortOrder
|
||||
from openhands.app_server.event_callback.event_callback_models import EventKind
|
||||
from openhands.app_server.services.injector import Injector
|
||||
from openhands.sdk import Event
|
||||
from openhands.sdk.utils.models import DiscriminatedUnionMixin
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class EventService(ABC):
|
||||
"""Event Service for getting events."""
|
||||
|
||||
@abstractmethod
|
||||
async def get_event(self, event_id: str) -> Event | None:
|
||||
"""Given an id, retrieve an event."""
|
||||
|
||||
@abstractmethod
|
||||
async def search_events(
|
||||
self,
|
||||
conversation_id__eq: UUID | None = None,
|
||||
kind__eq: EventKind | None = None,
|
||||
timestamp__gte: datetime | None = None,
|
||||
timestamp__lt: datetime | None = None,
|
||||
sort_order: EventSortOrder = EventSortOrder.TIMESTAMP,
|
||||
page_id: str | None = None,
|
||||
limit: int = 100,
|
||||
) -> EventPage:
|
||||
"""Search events matching the given filters."""
|
||||
|
||||
@abstractmethod
|
||||
async def count_events(
|
||||
self,
|
||||
conversation_id__eq: UUID | None = None,
|
||||
kind__eq: EventKind | None = None,
|
||||
timestamp__gte: datetime | None = None,
|
||||
timestamp__lt: datetime | None = None,
|
||||
sort_order: EventSortOrder = EventSortOrder.TIMESTAMP,
|
||||
) -> int:
|
||||
"""Count events matching the given filters."""
|
||||
|
||||
@abstractmethod
|
||||
async def save_event(self, conversation_id: UUID, event: Event):
|
||||
"""Save an event. Internal method intended not be part of the REST api."""
|
||||
|
||||
async def batch_get_events(self, event_ids: list[str]) -> list[Event | None]:
|
||||
"""Given a list of ids, get events (Or none for any which were not found)."""
|
||||
return await asyncio.gather(
|
||||
*[self.get_event(event_id) for event_id in event_ids]
|
||||
)
|
||||
|
||||
|
||||
class EventServiceInjector(DiscriminatedUnionMixin, Injector[EventService], ABC):
|
||||
pass
|
||||
318
openhands/app_server/event/filesystem_event_service.py
Normal file
318
openhands/app_server/event/filesystem_event_service.py
Normal file
@ -0,0 +1,318 @@
|
||||
"""Filesystem-based EventService implementation."""
|
||||
|
||||
import glob
|
||||
import json
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import AsyncGenerator
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import Request
|
||||
|
||||
from openhands.agent_server.models import EventPage, EventSortOrder
|
||||
from openhands.app_server.app_conversation.app_conversation_info_service import (
|
||||
AppConversationInfoService,
|
||||
)
|
||||
from openhands.app_server.errors import OpenHandsError
|
||||
from openhands.app_server.event.event_service import EventService, EventServiceInjector
|
||||
from openhands.app_server.event_callback.event_callback_models import EventKind
|
||||
from openhands.app_server.services.injector import InjectorState
|
||||
from openhands.sdk import Event
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class FilesystemEventService(EventService):
|
||||
"""Filesystem-based implementation of EventService.
|
||||
|
||||
Events are stored in files with the naming format:
|
||||
{conversation_id}/{YYYYMMDDHHMMSS}_{kind}_{id.hex}
|
||||
|
||||
Uses an AppConversationInfoService to lookup conversations
|
||||
"""
|
||||
|
||||
app_conversation_info_service: AppConversationInfoService
|
||||
events_dir: Path
|
||||
|
||||
def _ensure_events_dir(self, conversation_id: UUID | None = None) -> Path:
|
||||
"""Ensure the events directory exists."""
|
||||
if conversation_id:
|
||||
events_path = self.events_dir / str(conversation_id)
|
||||
else:
|
||||
events_path = self.events_dir
|
||||
events_path.mkdir(parents=True, exist_ok=True)
|
||||
return events_path
|
||||
|
||||
def _timestamp_to_str(self, timestamp: datetime | str) -> str:
|
||||
"""Convert timestamp to YYYYMMDDHHMMSS format."""
|
||||
if isinstance(timestamp, str):
|
||||
# Parse ISO format timestamp string
|
||||
dt = datetime.fromisoformat(timestamp.replace('Z', '+00:00'))
|
||||
return dt.strftime('%Y%m%d%H%M%S')
|
||||
return timestamp.strftime('%Y%m%d%H%M%S')
|
||||
|
||||
def _get_event_filename(self, conversation_id: UUID, event: Event) -> str:
|
||||
"""Generate filename using YYYYMMDDHHMMSS_kind_id.hex format."""
|
||||
timestamp_str = self._timestamp_to_str(event.timestamp)
|
||||
kind = event.__class__.__name__
|
||||
# Handle both UUID objects and string UUIDs
|
||||
if isinstance(event.id, str):
|
||||
id_hex = event.id.replace('-', '')
|
||||
else:
|
||||
id_hex = event.id.hex
|
||||
return f'{timestamp_str}_{kind}_{id_hex}'
|
||||
|
||||
def _save_event_to_file(self, conversation_id: UUID, event: Event) -> None:
|
||||
"""Save an event to a file."""
|
||||
events_path = self._ensure_events_dir(conversation_id)
|
||||
filename = self._get_event_filename(conversation_id, event)
|
||||
filepath = events_path / filename
|
||||
|
||||
with open(filepath, 'w') as f:
|
||||
# Use model_dump with mode='json' to handle UUID serialization
|
||||
data = event.model_dump(mode='json')
|
||||
f.write(json.dumps(data, indent=2))
|
||||
|
||||
def _load_event_from_file(self, filepath: Path) -> Event | None:
|
||||
"""Load an event from a file."""
|
||||
try:
|
||||
json_data = filepath.read_text()
|
||||
return Event.model_validate_json(json_data)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def _get_event_files_by_pattern(
|
||||
self, pattern: str, conversation_id: UUID | None = None
|
||||
) -> list[Path]:
|
||||
"""Get event files matching a glob pattern, sorted by timestamp."""
|
||||
if conversation_id:
|
||||
search_path = self.events_dir / str(conversation_id) / pattern
|
||||
else:
|
||||
search_path = self.events_dir / '*' / pattern
|
||||
|
||||
files = glob.glob(str(search_path))
|
||||
return sorted([Path(f) for f in files])
|
||||
|
||||
def _parse_filename(self, filename: str) -> dict[str, str] | None:
|
||||
"""Parse filename to extract timestamp, kind, and event_id."""
|
||||
try:
|
||||
parts = filename.split('_')
|
||||
if len(parts) >= 3:
|
||||
timestamp_str = parts[0]
|
||||
kind = '_'.join(parts[1:-1]) # Handle kinds with underscores
|
||||
event_id = parts[-1]
|
||||
return {'timestamp': timestamp_str, 'kind': kind, 'event_id': event_id}
|
||||
except Exception:
|
||||
pass
|
||||
return None
|
||||
|
||||
def _get_conversation_id(self, file: Path) -> UUID | None:
|
||||
try:
|
||||
return UUID(file.parent.name)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def _get_conversation_ids(self, files: list[Path]) -> set[UUID]:
|
||||
result = set()
|
||||
for file in files:
|
||||
conversation_id = self._get_conversation_id(file)
|
||||
if conversation_id:
|
||||
result.add(conversation_id)
|
||||
return result
|
||||
|
||||
async def _filter_files_by_conversation(self, files: list[Path]) -> list[Path]:
|
||||
conversation_ids = list(self._get_conversation_ids(files))
|
||||
conversations = (
|
||||
await self.app_conversation_info_service.batch_get_app_conversation_info(
|
||||
conversation_ids
|
||||
)
|
||||
)
|
||||
permitted_conversation_ids = set()
|
||||
for conversation in conversations:
|
||||
if conversation:
|
||||
permitted_conversation_ids.add(conversation.id)
|
||||
result = [
|
||||
file
|
||||
for file in files
|
||||
if self._get_conversation_id(file) in permitted_conversation_ids
|
||||
]
|
||||
return result
|
||||
|
||||
def _filter_files_by_criteria(
|
||||
self,
|
||||
files: list[Path],
|
||||
conversation_id__eq: UUID | None = None,
|
||||
kind__eq: EventKind | None = None,
|
||||
timestamp__gte: datetime | None = None,
|
||||
timestamp__lt: datetime | None = None,
|
||||
) -> list[Path]:
|
||||
"""Filter files based on search criteria."""
|
||||
filtered_files = []
|
||||
|
||||
for file_path in files:
|
||||
# Check conversation_id filter
|
||||
if conversation_id__eq:
|
||||
if str(conversation_id__eq) not in str(file_path):
|
||||
continue
|
||||
|
||||
# Parse filename for additional filtering
|
||||
filename_info = self._parse_filename(file_path.name)
|
||||
if not filename_info:
|
||||
continue
|
||||
|
||||
# Check kind filter
|
||||
if kind__eq and filename_info['kind'] != kind__eq:
|
||||
continue
|
||||
|
||||
# Check timestamp filters
|
||||
if timestamp__gte or timestamp__lt:
|
||||
try:
|
||||
file_timestamp = datetime.strptime(
|
||||
filename_info['timestamp'], '%Y%m%d%H%M%S'
|
||||
)
|
||||
if timestamp__gte and file_timestamp < timestamp__gte:
|
||||
continue
|
||||
if timestamp__lt and file_timestamp >= timestamp__lt:
|
||||
continue
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
filtered_files.append(file_path)
|
||||
|
||||
return filtered_files
|
||||
|
||||
async def get_event(self, event_id: str) -> Event | None:
|
||||
"""Get the event with the given id, or None if not found."""
|
||||
# Convert event_id to hex format (remove dashes) for filename matching
|
||||
if isinstance(event_id, str) and '-' in event_id:
|
||||
id_hex = event_id.replace('-', '')
|
||||
else:
|
||||
id_hex = event_id
|
||||
|
||||
# Use glob pattern to find files ending with the event_id
|
||||
pattern = f'*_{id_hex}'
|
||||
files = self._get_event_files_by_pattern(pattern)
|
||||
|
||||
if not files:
|
||||
return None
|
||||
|
||||
# If there is no access to the conversation do not return the event
|
||||
file = files[0]
|
||||
conversation_id = self._get_conversation_id(file)
|
||||
if not conversation_id:
|
||||
return None
|
||||
conversation = (
|
||||
await self.app_conversation_info_service.get_app_conversation_info(
|
||||
conversation_id
|
||||
)
|
||||
)
|
||||
if not conversation:
|
||||
return None
|
||||
|
||||
# Load and return the first matching event
|
||||
return self._load_event_from_file(file)
|
||||
|
||||
async def search_events(
|
||||
self,
|
||||
conversation_id__eq: UUID | None = None,
|
||||
kind__eq: EventKind | None = None,
|
||||
timestamp__gte: datetime | None = None,
|
||||
timestamp__lt: datetime | None = None,
|
||||
sort_order: EventSortOrder = EventSortOrder.TIMESTAMP,
|
||||
page_id: str | None = None,
|
||||
limit: int = 100,
|
||||
) -> EventPage:
|
||||
"""Search for events matching the given filters."""
|
||||
# Build the search pattern
|
||||
pattern = '*'
|
||||
files = self._get_event_files_by_pattern(pattern, conversation_id__eq)
|
||||
|
||||
files = await self._filter_files_by_conversation(files)
|
||||
|
||||
files = self._filter_files_by_criteria(
|
||||
files, conversation_id__eq, kind__eq, timestamp__gte, timestamp__lt
|
||||
)
|
||||
|
||||
files.sort(
|
||||
key=lambda f: f.name,
|
||||
reverse=(sort_order == EventSortOrder.TIMESTAMP_DESC),
|
||||
)
|
||||
|
||||
# Handle pagination
|
||||
start_index = 0
|
||||
if page_id:
|
||||
for i, file_path in enumerate(files):
|
||||
if file_path.name == page_id:
|
||||
start_index = i + 1
|
||||
break
|
||||
|
||||
# Collect items for this page
|
||||
page_files = files[start_index : start_index + limit]
|
||||
next_page_id = None
|
||||
if start_index + limit < len(files):
|
||||
next_page_id = files[start_index + limit].name
|
||||
|
||||
# Load all events from files
|
||||
page_events = []
|
||||
for file_path in page_files:
|
||||
event = self._load_event_from_file(file_path)
|
||||
if event is not None:
|
||||
page_events.append(event)
|
||||
|
||||
return EventPage(items=page_events, next_page_id=next_page_id)
|
||||
|
||||
async def count_events(
|
||||
self,
|
||||
conversation_id__eq: UUID | None = None,
|
||||
kind__eq: EventKind | None = None,
|
||||
timestamp__gte: datetime | None = None,
|
||||
timestamp__lt: datetime | None = None,
|
||||
sort_order: EventSortOrder = EventSortOrder.TIMESTAMP,
|
||||
) -> int:
|
||||
"""Count events matching the given filters."""
|
||||
# Build the search pattern
|
||||
pattern = '*'
|
||||
files = self._get_event_files_by_pattern(pattern, conversation_id__eq)
|
||||
|
||||
files = await self._filter_files_by_conversation(files)
|
||||
|
||||
files = self._filter_files_by_criteria(
|
||||
files, conversation_id__eq, kind__eq, timestamp__gte, timestamp__lt
|
||||
)
|
||||
|
||||
return len(files)
|
||||
|
||||
async def save_event(self, conversation_id: UUID, event: Event):
|
||||
"""Save an event. Internal method intended not be part of the REST api."""
|
||||
conversation = (
|
||||
await self.app_conversation_info_service.get_app_conversation_info(
|
||||
conversation_id
|
||||
)
|
||||
)
|
||||
if not conversation:
|
||||
# This is either an illegal state or somebody is trying to hack
|
||||
raise OpenHandsError('No such conversation: {conversaiont_id}')
|
||||
self._save_event_to_file(conversation_id, event)
|
||||
|
||||
|
||||
class FilesystemEventServiceInjector(EventServiceInjector):
|
||||
async def inject(
|
||||
self, state: InjectorState, request: Request | None = None
|
||||
) -> AsyncGenerator[EventService, None]:
|
||||
from openhands.app_server.config import (
|
||||
get_app_conversation_info_service,
|
||||
get_global_config,
|
||||
)
|
||||
|
||||
async with get_app_conversation_info_service(
|
||||
state, request
|
||||
) as app_conversation_info_service:
|
||||
persistence_dir = get_global_config().persistence_dir
|
||||
|
||||
yield FilesystemEventService(
|
||||
app_conversation_info_service=app_conversation_info_service,
|
||||
events_dir=persistence_dir / 'v1' / 'events',
|
||||
)
|
||||
21
openhands/app_server/event_callback/README.md
Normal file
21
openhands/app_server/event_callback/README.md
Normal file
@ -0,0 +1,21 @@
|
||||
# Event Callbacks
|
||||
|
||||
Manages webhooks and event callbacks for external system integration.
|
||||
|
||||
## Overview
|
||||
|
||||
This module provides webhook and callback functionality, allowing external systems to receive notifications when specific events occur within OpenHands conversations.
|
||||
|
||||
## Key Components
|
||||
|
||||
- **EventCallbackService**: Abstract service for callback CRUD operations
|
||||
- **SqlEventCallbackService**: SQL-based callback storage implementation
|
||||
- **EventWebhookRouter**: FastAPI router for webhook endpoints
|
||||
|
||||
## Features
|
||||
|
||||
- Webhook registration and management
|
||||
- Event filtering by type and conversation
|
||||
- Callback result tracking and status monitoring
|
||||
- Retry logic for failed webhook deliveries
|
||||
- Secure webhook authentication
|
||||
83
openhands/app_server/event_callback/event_callback_models.py
Normal file
83
openhands/app_server/event_callback/event_callback_models.py
Normal file
@ -0,0 +1,83 @@
|
||||
# pyright: reportIncompatibleMethodOverride=false
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from abc import ABC, abstractmethod
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING, Literal
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
from pydantic import Field
|
||||
|
||||
from openhands.agent_server.utils import utc_now
|
||||
from openhands.app_server.event_callback.event_callback_result_models import (
|
||||
EventCallbackResult,
|
||||
EventCallbackResultStatus,
|
||||
)
|
||||
from openhands.sdk import Event
|
||||
from openhands.sdk.utils.models import (
|
||||
DiscriminatedUnionMixin,
|
||||
OpenHandsModel,
|
||||
get_known_concrete_subclasses,
|
||||
)
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
if TYPE_CHECKING:
|
||||
EventKind = str
|
||||
else:
|
||||
EventKind = Literal[tuple(c.__name__ for c in get_known_concrete_subclasses(Event))]
|
||||
|
||||
|
||||
class EventCallbackProcessor(DiscriminatedUnionMixin, ABC):
|
||||
@abstractmethod
|
||||
async def __call__(
|
||||
self,
|
||||
conversation_id: UUID,
|
||||
callback: EventCallback,
|
||||
event: Event,
|
||||
) -> EventCallbackResult:
|
||||
"""Process an event."""
|
||||
|
||||
|
||||
class LoggingCallbackProcessor(EventCallbackProcessor):
|
||||
"""Example implementation which logs callbacks."""
|
||||
|
||||
async def __call__(
|
||||
self,
|
||||
conversation_id: UUID,
|
||||
callback: EventCallback,
|
||||
event: Event,
|
||||
) -> EventCallbackResult:
|
||||
_logger.info(f'Callback {callback.id} Invoked for event {event}')
|
||||
return EventCallbackResult(
|
||||
status=EventCallbackResultStatus.SUCCESS,
|
||||
event_callback_id=callback.id,
|
||||
event_id=event.id,
|
||||
conversation_id=conversation_id,
|
||||
)
|
||||
|
||||
|
||||
class CreateEventCallbackRequest(OpenHandsModel):
|
||||
conversation_id: UUID | None = Field(
|
||||
default=None,
|
||||
description=(
|
||||
'Optional filter on the conversation to which this callback applies'
|
||||
),
|
||||
)
|
||||
processor: EventCallbackProcessor
|
||||
event_kind: EventKind | None = Field(
|
||||
default=None,
|
||||
description=(
|
||||
'Optional filter on the type of events to which this callback applies'
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class EventCallback(CreateEventCallbackRequest):
|
||||
id: UUID = Field(default_factory=uuid4)
|
||||
created_at: datetime = Field(default_factory=utc_now)
|
||||
|
||||
|
||||
class EventCallbackPage(OpenHandsModel):
|
||||
items: list[EventCallback]
|
||||
next_page_id: str | None = None
|
||||
@ -0,0 +1,35 @@
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from openhands.agent_server.utils import utc_now
|
||||
from openhands.sdk.event.types import EventID
|
||||
|
||||
|
||||
class EventCallbackResultStatus(Enum):
|
||||
SUCCESS = 'SUCCESS'
|
||||
ERROR = 'ERROR'
|
||||
|
||||
|
||||
class EventCallbackResultSortOrder(Enum):
|
||||
CREATED_AT = 'CREATED_AT'
|
||||
CREATED_AT_DESC = 'CREATED_AT_DESC'
|
||||
|
||||
|
||||
class EventCallbackResult(BaseModel):
|
||||
"""Object representing the result of an event callback."""
|
||||
|
||||
id: UUID = Field(default_factory=uuid4)
|
||||
status: EventCallbackResultStatus
|
||||
event_callback_id: UUID
|
||||
event_id: EventID
|
||||
conversation_id: UUID
|
||||
detail: str | None = None
|
||||
created_at: datetime = Field(default_factory=utc_now)
|
||||
|
||||
|
||||
class EventCallbackResultPage(BaseModel):
|
||||
items: list[EventCallbackResult]
|
||||
next_page_id: str | None = None
|
||||
@ -0,0 +1,64 @@
|
||||
import asyncio
|
||||
from abc import ABC, abstractmethod
|
||||
from uuid import UUID
|
||||
|
||||
from openhands.app_server.event_callback.event_callback_models import (
|
||||
CreateEventCallbackRequest,
|
||||
EventCallback,
|
||||
EventCallbackPage,
|
||||
EventKind,
|
||||
)
|
||||
from openhands.app_server.services.injector import Injector
|
||||
from openhands.sdk import Event
|
||||
from openhands.sdk.utils.models import DiscriminatedUnionMixin
|
||||
|
||||
|
||||
class EventCallbackService(ABC):
|
||||
"""CRUD service for managing event callbacks."""
|
||||
|
||||
@abstractmethod
|
||||
async def create_event_callback(
|
||||
self, request: CreateEventCallbackRequest
|
||||
) -> EventCallback:
|
||||
"""Create a new event callback."""
|
||||
|
||||
@abstractmethod
|
||||
async def get_event_callback(self, id: UUID) -> EventCallback | None:
|
||||
"""Get a single event callback, returning None if not found."""
|
||||
|
||||
@abstractmethod
|
||||
async def delete_event_callback(self, id: UUID) -> bool:
|
||||
"""Delete a event callback, returning True if deleted, False if not found."""
|
||||
|
||||
@abstractmethod
|
||||
async def search_event_callbacks(
|
||||
self,
|
||||
conversation_id__eq: UUID | None = None,
|
||||
event_kind__eq: EventKind | None = None,
|
||||
event_id__eq: UUID | None = None,
|
||||
page_id: str | None = None,
|
||||
limit: int = 100,
|
||||
) -> EventCallbackPage:
|
||||
"""Search for event callbacks, optionally filtered by event_id."""
|
||||
|
||||
async def batch_get_event_callbacks(
|
||||
self, event_callback_ids: list[UUID]
|
||||
) -> list[EventCallback | None]:
|
||||
"""Get a batch of event callbacks, returning None for any not found."""
|
||||
results = await asyncio.gather(
|
||||
*[
|
||||
self.get_event_callback(event_callback_id)
|
||||
for event_callback_id in event_callback_ids
|
||||
]
|
||||
)
|
||||
return results
|
||||
|
||||
@abstractmethod
|
||||
async def execute_callbacks(self, conversation_id: UUID, event: Event) -> None:
|
||||
"""Execute any applicable callbacks for the event and store the results."""
|
||||
|
||||
|
||||
class EventCallbackServiceInjector(
|
||||
DiscriminatedUnionMixin, Injector[EventCallbackService], ABC
|
||||
):
|
||||
pass
|
||||
@ -0,0 +1,230 @@
|
||||
# pyright: reportArgumentType=false
|
||||
"""SQL implementation of EventCallbackService."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
from typing import AsyncGenerator
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import Request
|
||||
from sqlalchemy import UUID as SQLUUID
|
||||
from sqlalchemy import Column, Enum, String, and_, func, or_, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from openhands.app_server.event_callback.event_callback_models import (
|
||||
CreateEventCallbackRequest,
|
||||
EventCallback,
|
||||
EventCallbackPage,
|
||||
EventCallbackProcessor,
|
||||
EventKind,
|
||||
)
|
||||
from openhands.app_server.event_callback.event_callback_result_models import (
|
||||
EventCallbackResultStatus,
|
||||
)
|
||||
from openhands.app_server.event_callback.event_callback_service import (
|
||||
EventCallbackService,
|
||||
EventCallbackServiceInjector,
|
||||
)
|
||||
from openhands.app_server.services.injector import InjectorState
|
||||
from openhands.app_server.utils.sql_utils import (
|
||||
Base,
|
||||
UtcDateTime,
|
||||
create_json_type_decorator,
|
||||
row2dict,
|
||||
)
|
||||
from openhands.sdk import Event
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
# TODO: Add user level filtering to this class
|
||||
|
||||
|
||||
class StoredEventCallback(Base): # type: ignore
|
||||
__tablename__ = 'event_callback'
|
||||
id = Column(SQLUUID, primary_key=True)
|
||||
conversation_id = Column(SQLUUID, nullable=True)
|
||||
processor = Column(create_json_type_decorator(EventCallbackProcessor))
|
||||
event_kind = Column(String, nullable=True)
|
||||
created_at = Column(UtcDateTime, server_default=func.now(), index=True)
|
||||
|
||||
|
||||
class StoredEventCallbackResult(Base): # type: ignore
|
||||
__tablename__ = 'event_callback_result'
|
||||
id = Column(SQLUUID, primary_key=True)
|
||||
status = Column(Enum(EventCallbackResultStatus), nullable=True)
|
||||
event_callback_id = Column(SQLUUID, index=True)
|
||||
event_id = Column(SQLUUID, index=True)
|
||||
conversation_id = Column(SQLUUID, index=True)
|
||||
detail = Column(String, nullable=True)
|
||||
created_at = Column(UtcDateTime, server_default=func.now(), index=True)
|
||||
|
||||
|
||||
@dataclass
|
||||
class SQLEventCallbackService(EventCallbackService):
|
||||
"""SQL implementation of EventCallbackService."""
|
||||
|
||||
db_session: AsyncSession
|
||||
|
||||
async def create_event_callback(
|
||||
self, request: CreateEventCallbackRequest
|
||||
) -> EventCallback:
|
||||
"""Create a new event callback."""
|
||||
# Create EventCallback from request
|
||||
event_callback = EventCallback(
|
||||
conversation_id=request.conversation_id,
|
||||
processor=request.processor,
|
||||
event_kind=request.event_kind,
|
||||
)
|
||||
|
||||
# Create stored version and add to db_session
|
||||
stored_callback = StoredEventCallback(**event_callback.model_dump())
|
||||
self.db_session.add(stored_callback)
|
||||
await self.db_session.commit()
|
||||
await self.db_session.refresh(stored_callback)
|
||||
return EventCallback(**row2dict(stored_callback))
|
||||
|
||||
async def get_event_callback(self, id: UUID) -> EventCallback | None:
|
||||
"""Get a single event callback, returning None if not found."""
|
||||
stmt = select(StoredEventCallback).where(StoredEventCallback.id == id)
|
||||
result = await self.db_session.execute(stmt)
|
||||
stored_callback = result.scalar_one_or_none()
|
||||
if stored_callback:
|
||||
return EventCallback(**row2dict(stored_callback))
|
||||
return None
|
||||
|
||||
async def delete_event_callback(self, id: UUID) -> bool:
|
||||
"""Delete an event callback, returning True if deleted, False if not found."""
|
||||
stmt = select(StoredEventCallback).where(StoredEventCallback.id == id)
|
||||
result = await self.db_session.execute(stmt)
|
||||
stored_callback = result.scalar_one_or_none()
|
||||
|
||||
if stored_callback is None:
|
||||
return False
|
||||
|
||||
await self.db_session.delete(stored_callback)
|
||||
await self.db_session.commit()
|
||||
return True
|
||||
|
||||
async def search_event_callbacks(
|
||||
self,
|
||||
conversation_id__eq: UUID | None = None,
|
||||
event_kind__eq: EventKind | None = None,
|
||||
event_id__eq: UUID | None = None,
|
||||
page_id: str | None = None,
|
||||
limit: int = 100,
|
||||
) -> EventCallbackPage:
|
||||
"""Search for event callbacks, optionally filtered by parameters."""
|
||||
# Build the query with filters
|
||||
conditions = []
|
||||
|
||||
if conversation_id__eq is not None:
|
||||
conditions.append(
|
||||
StoredEventCallback.conversation_id == conversation_id__eq
|
||||
)
|
||||
|
||||
if event_kind__eq is not None:
|
||||
conditions.append(StoredEventCallback.event_kind == event_kind__eq)
|
||||
|
||||
# Note: event_id__eq is not stored in the event_callbacks table
|
||||
# This parameter might be used for filtering results after retrieval
|
||||
# or might be intended for a different use case
|
||||
|
||||
# Build the base query
|
||||
stmt = select(StoredEventCallback)
|
||||
|
||||
if conditions:
|
||||
stmt = stmt.where(and_(*conditions))
|
||||
|
||||
# Handle pagination
|
||||
if page_id is not None:
|
||||
# Parse page_id to get offset or cursor
|
||||
try:
|
||||
offset = int(page_id)
|
||||
stmt = stmt.offset(offset)
|
||||
except ValueError:
|
||||
# If page_id is not a valid integer, start from beginning
|
||||
offset = 0
|
||||
else:
|
||||
offset = 0
|
||||
|
||||
# Apply limit and get one extra to check if there are more results
|
||||
stmt = stmt.limit(limit + 1).order_by(StoredEventCallback.created_at.desc())
|
||||
|
||||
result = await self.db_session.execute(stmt)
|
||||
stored_callbacks = result.scalars().all()
|
||||
|
||||
# Check if there are more results
|
||||
has_more = len(stored_callbacks) > limit
|
||||
if has_more:
|
||||
stored_callbacks = stored_callbacks[:limit]
|
||||
|
||||
# Calculate next page ID
|
||||
next_page_id = None
|
||||
if has_more:
|
||||
next_page_id = str(offset + limit)
|
||||
|
||||
# Convert stored callbacks to domain models
|
||||
callbacks = [EventCallback(**row2dict(cb)) for cb in stored_callbacks]
|
||||
return EventCallbackPage(items=callbacks, next_page_id=next_page_id)
|
||||
|
||||
async def execute_callbacks(self, conversation_id: UUID, event: Event) -> None:
|
||||
query = (
|
||||
select(StoredEventCallback)
|
||||
.where(
|
||||
or_(
|
||||
StoredEventCallback.event_kind == event.kind,
|
||||
StoredEventCallback.event_kind.is_(None),
|
||||
)
|
||||
)
|
||||
.where(
|
||||
or_(
|
||||
StoredEventCallback.conversation_id == conversation_id,
|
||||
StoredEventCallback.conversation_id.is_(None),
|
||||
)
|
||||
)
|
||||
)
|
||||
result = await self.db_session.execute(query)
|
||||
stored_callbacks = result.scalars().all()
|
||||
if stored_callbacks:
|
||||
callbacks = [EventCallback(**row2dict(cb)) for cb in stored_callbacks]
|
||||
await asyncio.gather(
|
||||
*[
|
||||
self.execute_callback(conversation_id, callback, event)
|
||||
for callback in callbacks
|
||||
]
|
||||
)
|
||||
await self.db_session.commit()
|
||||
|
||||
async def execute_callback(
|
||||
self, conversation_id: UUID, callback: EventCallback, event: Event
|
||||
):
|
||||
try:
|
||||
result = await callback.processor(conversation_id, callback, event)
|
||||
stored_result = StoredEventCallbackResult(**row2dict(result))
|
||||
except Exception as exc:
|
||||
_logger.exception(f'Exception in callback {callback.id}', stack_info=True)
|
||||
stored_result = StoredEventCallbackResult(
|
||||
status=EventCallbackResultStatus.ERROR,
|
||||
event_callback_id=callback.id,
|
||||
event_id=event.id,
|
||||
conversation_id=conversation_id,
|
||||
detail=str(exc),
|
||||
)
|
||||
self.db_session.add(stored_result)
|
||||
|
||||
async def __aexit__(self, exc_type, exc_value, traceback):
|
||||
"""Stop using this event callback service."""
|
||||
pass
|
||||
|
||||
|
||||
class SQLEventCallbackServiceInjector(EventCallbackServiceInjector):
|
||||
async def inject(
|
||||
self, state: InjectorState, request: Request | None = None
|
||||
) -> AsyncGenerator[EventCallbackService, None]:
|
||||
from openhands.app_server.config import get_db_session
|
||||
|
||||
async with get_db_session(state) as db_session:
|
||||
yield SQLEventCallbackService(db_session=db_session)
|
||||
188
openhands/app_server/event_callback/webhook_router.py
Normal file
188
openhands/app_server/event_callback/webhook_router.py
Normal file
@ -0,0 +1,188 @@
|
||||
"""Event Callback router for OpenHands Server."""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from fastapi.security import APIKeyHeader
|
||||
from jwt import InvalidTokenError
|
||||
|
||||
from openhands.agent_server.models import ConversationInfo, Success
|
||||
from openhands.app_server.app_conversation.app_conversation_info_service import (
|
||||
AppConversationInfoService,
|
||||
)
|
||||
from openhands.app_server.app_conversation.app_conversation_models import (
|
||||
AppConversationInfo,
|
||||
)
|
||||
from openhands.app_server.config import (
|
||||
depends_app_conversation_info_service,
|
||||
depends_db_session,
|
||||
depends_event_service,
|
||||
depends_jwt_service,
|
||||
depends_sandbox_service,
|
||||
get_event_callback_service,
|
||||
get_global_config,
|
||||
)
|
||||
from openhands.app_server.errors import AuthError
|
||||
from openhands.app_server.event.event_service import EventService
|
||||
from openhands.app_server.sandbox.sandbox_models import SandboxInfo
|
||||
from openhands.app_server.sandbox.sandbox_service import SandboxService
|
||||
from openhands.app_server.services.injector import InjectorState
|
||||
from openhands.app_server.services.jwt_service import JwtService
|
||||
from openhands.app_server.user.specifiy_user_context import (
|
||||
USER_CONTEXT_ATTR,
|
||||
SpecifyUserContext,
|
||||
as_admin,
|
||||
)
|
||||
from openhands.app_server.user.user_context import UserContext
|
||||
from openhands.integrations.provider import ProviderType
|
||||
from openhands.sdk import Event
|
||||
|
||||
router = APIRouter(prefix='/webhooks', tags=['Webhooks'])
|
||||
sandbox_service_dependency = depends_sandbox_service()
|
||||
event_service_dependency = depends_event_service()
|
||||
app_conversation_info_service_dependency = depends_app_conversation_info_service()
|
||||
jwt_dependency = depends_jwt_service()
|
||||
config = get_global_config()
|
||||
db_session_dependency = depends_db_session()
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def valid_sandbox(
|
||||
sandbox_id: str,
|
||||
user_context: UserContext = Depends(as_admin),
|
||||
session_api_key: str = Depends(
|
||||
APIKeyHeader(name='X-Session-API-Key', auto_error=False)
|
||||
),
|
||||
sandbox_service: SandboxService = sandbox_service_dependency,
|
||||
) -> SandboxInfo:
|
||||
sandbox_info = await sandbox_service.get_sandbox(sandbox_id)
|
||||
if sandbox_info is None or sandbox_info.session_api_key != session_api_key:
|
||||
raise HTTPException(status.HTTP_401_UNAUTHORIZED)
|
||||
return sandbox_info
|
||||
|
||||
|
||||
async def valid_conversation(
|
||||
conversation_id: UUID,
|
||||
sandbox_info: SandboxInfo,
|
||||
app_conversation_info_service: AppConversationInfoService = app_conversation_info_service_dependency,
|
||||
) -> AppConversationInfo:
|
||||
app_conversation_info = (
|
||||
await app_conversation_info_service.get_app_conversation_info(conversation_id)
|
||||
)
|
||||
if not app_conversation_info:
|
||||
# Conversation does not yet exist - create a stub
|
||||
return AppConversationInfo(
|
||||
id=conversation_id,
|
||||
sandbox_id=sandbox_info.id,
|
||||
created_by_user_id=sandbox_info.created_by_user_id,
|
||||
)
|
||||
if app_conversation_info.created_by_user_id != sandbox_info.created_by_user_id:
|
||||
# Make sure that the conversation and sandbox were created by the same user
|
||||
raise AuthError()
|
||||
return app_conversation_info
|
||||
|
||||
|
||||
@router.post('/{sandbox_id}/conversations')
|
||||
async def on_conversation_update(
|
||||
conversation_info: ConversationInfo,
|
||||
sandbox_info: SandboxInfo = Depends(valid_sandbox),
|
||||
app_conversation_info_service: AppConversationInfoService = app_conversation_info_service_dependency,
|
||||
) -> Success:
|
||||
"""Webhook callback for when a conversation starts, pauses, resumes, or deletes."""
|
||||
existing = await valid_conversation(
|
||||
conversation_info.id, sandbox_info, app_conversation_info_service
|
||||
)
|
||||
|
||||
app_conversation_info = AppConversationInfo(
|
||||
id=conversation_info.id,
|
||||
# TODO: As of writing, ConversationInfo from AgentServer does not have a title
|
||||
title=existing.title or f'Conversation {conversation_info.id}',
|
||||
sandbox_id=sandbox_info.id,
|
||||
created_by_user_id=sandbox_info.created_by_user_id,
|
||||
llm_model=conversation_info.agent.llm.model,
|
||||
# Git parameters
|
||||
selected_repository=existing.selected_repository,
|
||||
selected_branch=existing.selected_branch,
|
||||
git_provider=existing.git_provider,
|
||||
trigger=existing.trigger,
|
||||
pr_number=existing.pr_number,
|
||||
)
|
||||
await app_conversation_info_service.save_app_conversation_info(
|
||||
app_conversation_info
|
||||
)
|
||||
|
||||
return Success()
|
||||
|
||||
|
||||
@router.post('/{sandbox_id}/events/{conversation_id}')
|
||||
async def on_event(
|
||||
events: list[Event],
|
||||
conversation_id: UUID,
|
||||
sandbox_info: SandboxInfo = Depends(valid_sandbox),
|
||||
app_conversation_info_service: AppConversationInfoService = app_conversation_info_service_dependency,
|
||||
event_service: EventService = event_service_dependency,
|
||||
) -> Success:
|
||||
"""Webhook callback for when event stream events occur."""
|
||||
|
||||
app_conversation_info = await valid_conversation(
|
||||
conversation_id, sandbox_info, app_conversation_info_service
|
||||
)
|
||||
|
||||
try:
|
||||
# Save events...
|
||||
await asyncio.gather(
|
||||
*[event_service.save_event(conversation_id, event) for event in events]
|
||||
)
|
||||
|
||||
asyncio.create_task(
|
||||
_run_callbacks_in_bg_and_close(
|
||||
conversation_id, app_conversation_info.created_by_user_id, events
|
||||
)
|
||||
)
|
||||
|
||||
except Exception:
|
||||
_logger.exception('Error in webhook', stack_info=True)
|
||||
|
||||
return Success()
|
||||
|
||||
|
||||
@router.get('/secrets')
|
||||
async def get_secret(
|
||||
access_token: str = Depends(APIKeyHeader(name='X-Access-Token', auto_error=False)),
|
||||
jwt_service: JwtService = jwt_dependency,
|
||||
) -> str:
|
||||
"""Given an access token, retrieve a user secret. The access token
|
||||
is limited by user and provider type, and may include a timeout, limiting
|
||||
the damage in the event that a token is ever leaked"""
|
||||
try:
|
||||
payload = jwt_service.verify_jws_token(access_token)
|
||||
user_id = payload['user_id']
|
||||
provider_type = ProviderType[payload['provider_type']]
|
||||
user_injector = config.user
|
||||
assert user_injector is not None
|
||||
user_context = await user_injector.get_for_user(user_id)
|
||||
secret = None
|
||||
if user_context:
|
||||
secret = await user_context.get_latest_token(provider_type)
|
||||
if secret is None:
|
||||
raise HTTPException(404, 'No such provider')
|
||||
return secret
|
||||
except InvalidTokenError:
|
||||
raise HTTPException(status.HTTP_401_UNAUTHORIZED)
|
||||
|
||||
|
||||
async def _run_callbacks_in_bg_and_close(
|
||||
conversation_id: UUID,
|
||||
user_id: str | None,
|
||||
events: list[Event],
|
||||
):
|
||||
"""Run all callbacks and close the session"""
|
||||
state = InjectorState()
|
||||
setattr(state, USER_CONTEXT_ATTR, SpecifyUserContext(user_id=user_id))
|
||||
|
||||
async with get_event_callback_service(state) as event_callback_service:
|
||||
# We don't use asynio.gather here because callbacks must be run in sequence.
|
||||
for event in events:
|
||||
await event_callback_service.execute_callbacks(conversation_id, event)
|
||||
21
openhands/app_server/sandbox/README.md
Normal file
21
openhands/app_server/sandbox/README.md
Normal file
@ -0,0 +1,21 @@
|
||||
# Sandbox Management
|
||||
|
||||
Manages sandbox environments for secure agent execution within OpenHands.
|
||||
|
||||
## Overview
|
||||
|
||||
Since agents can do things that may harm your system, they are typically run inside a sandbox (like a Docker container). This module provides services for creating, managing, and monitoring these sandbox environments.
|
||||
|
||||
## Key Components
|
||||
|
||||
- **SandboxService**: Abstract service for sandbox lifecycle management
|
||||
- **DockerSandboxService**: Docker-based sandbox implementation
|
||||
- **SandboxSpecService**: Manages sandbox specifications and templates
|
||||
- **SandboxRouter**: FastAPI router for sandbox endpoints
|
||||
|
||||
## Features
|
||||
|
||||
- Secure containerized execution environments
|
||||
- Sandbox lifecycle management (create, start, stop, destroy)
|
||||
- Multiple sandbox backend support (Docker, Remote, Local)
|
||||
- User-scoped sandbox access control
|
||||
429
openhands/app_server/sandbox/docker_sandbox_service.py
Normal file
429
openhands/app_server/sandbox/docker_sandbox_service.py
Normal file
@ -0,0 +1,429 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
import socket
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from typing import AsyncGenerator
|
||||
|
||||
import base62
|
||||
import docker
|
||||
import httpx
|
||||
from docker.errors import APIError, NotFound
|
||||
from fastapi import Request
|
||||
from pydantic import BaseModel, ConfigDict, Field
|
||||
|
||||
from openhands.agent_server.utils import utc_now
|
||||
from openhands.app_server.errors import SandboxError
|
||||
from openhands.app_server.sandbox.docker_sandbox_spec_service import get_docker_client
|
||||
from openhands.app_server.sandbox.sandbox_models import (
|
||||
AGENT_SERVER,
|
||||
VSCODE,
|
||||
ExposedUrl,
|
||||
SandboxInfo,
|
||||
SandboxPage,
|
||||
SandboxStatus,
|
||||
)
|
||||
from openhands.app_server.sandbox.sandbox_service import (
|
||||
SandboxService,
|
||||
SandboxServiceInjector,
|
||||
)
|
||||
from openhands.app_server.sandbox.sandbox_spec_service import SandboxSpecService
|
||||
from openhands.app_server.services.injector import InjectorState
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
SESSION_API_KEY_VARIABLE = 'OH_SESSION_API_KEYS_0'
|
||||
WEBHOOK_CALLBACK_VARIABLE = 'OH_WEBHOOKS_0_BASE_URL'
|
||||
|
||||
|
||||
class VolumeMount(BaseModel):
|
||||
"""Mounted volume within the container."""
|
||||
|
||||
host_path: str
|
||||
container_path: str
|
||||
mode: str = 'rw'
|
||||
|
||||
model_config = ConfigDict(frozen=True)
|
||||
|
||||
|
||||
class ExposedPort(BaseModel):
|
||||
"""Exposed port within container to be matched to a free port on the host."""
|
||||
|
||||
name: str
|
||||
description: str
|
||||
container_port: int = 8000
|
||||
|
||||
model_config = ConfigDict(frozen=True)
|
||||
|
||||
|
||||
@dataclass
|
||||
class DockerSandboxService(SandboxService):
|
||||
"""Sandbox service built on docker.
|
||||
|
||||
The Docker API does not currently support async operations, so some of these operations will block.
|
||||
Given that the docker API is intended for local use on a single machine, this is probably acceptable.
|
||||
"""
|
||||
|
||||
sandbox_spec_service: SandboxSpecService
|
||||
container_name_prefix: str
|
||||
host_port: int
|
||||
container_url_pattern: str
|
||||
mounts: list[VolumeMount]
|
||||
exposed_ports: list[ExposedPort]
|
||||
health_check_path: str | None
|
||||
httpx_client: httpx.AsyncClient
|
||||
docker_client: docker.DockerClient = field(default_factory=get_docker_client)
|
||||
|
||||
def _find_unused_port(self) -> int:
|
||||
"""Find an unused port on the host machine."""
|
||||
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
||||
s.bind(('', 0))
|
||||
s.listen(1)
|
||||
port = s.getsockname()[1]
|
||||
return port
|
||||
|
||||
def _docker_status_to_sandbox_status(self, docker_status: str) -> SandboxStatus:
|
||||
"""Convert Docker container status to SandboxStatus."""
|
||||
status_mapping = {
|
||||
'running': SandboxStatus.RUNNING,
|
||||
'paused': SandboxStatus.PAUSED,
|
||||
'exited': SandboxStatus.MISSING,
|
||||
'created': SandboxStatus.STARTING,
|
||||
'restarting': SandboxStatus.STARTING,
|
||||
'removing': SandboxStatus.MISSING,
|
||||
'dead': SandboxStatus.ERROR,
|
||||
}
|
||||
return status_mapping.get(docker_status.lower(), SandboxStatus.ERROR)
|
||||
|
||||
def _get_container_env_vars(self, container) -> dict[str, str | None]:
|
||||
env_vars_list = container.attrs['Config']['Env']
|
||||
result = {}
|
||||
for env_var in env_vars_list:
|
||||
if '=' in env_var:
|
||||
key, value = env_var.split('=', 1)
|
||||
result[key] = value
|
||||
else:
|
||||
# Handle cases where an environment variable might not have a value
|
||||
result[env_var] = None
|
||||
return result
|
||||
|
||||
async def _container_to_sandbox_info(self, container) -> SandboxInfo | None:
|
||||
"""Convert Docker container to SandboxInfo."""
|
||||
# Convert Docker status to runtime status
|
||||
status = self._docker_status_to_sandbox_status(container.status)
|
||||
|
||||
# Parse creation time
|
||||
created_str = container.attrs.get('Created', '')
|
||||
try:
|
||||
created_at = datetime.fromisoformat(created_str.replace('Z', '+00:00'))
|
||||
except (ValueError, AttributeError):
|
||||
created_at = utc_now()
|
||||
|
||||
# Get URL and session key for running containers
|
||||
exposed_urls = None
|
||||
session_api_key = None
|
||||
|
||||
if status == SandboxStatus.RUNNING:
|
||||
# Get the first exposed port mapping
|
||||
exposed_urls = []
|
||||
port_bindings = container.attrs.get('NetworkSettings', {}).get('Ports', {})
|
||||
if port_bindings:
|
||||
for container_port, host_bindings in port_bindings.items():
|
||||
if host_bindings:
|
||||
host_port = host_bindings[0]['HostPort']
|
||||
exposed_port = next(
|
||||
(
|
||||
exposed_port
|
||||
for exposed_port in self.exposed_ports
|
||||
if container_port
|
||||
== f'{exposed_port.container_port}/tcp'
|
||||
),
|
||||
None,
|
||||
)
|
||||
if exposed_port:
|
||||
exposed_urls.append(
|
||||
ExposedUrl(
|
||||
name=exposed_port.name,
|
||||
url=self.container_url_pattern.format(
|
||||
port=host_port
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
# Get session API key
|
||||
env = self._get_container_env_vars(container)
|
||||
session_api_key = env[SESSION_API_KEY_VARIABLE]
|
||||
|
||||
return SandboxInfo(
|
||||
id=container.name,
|
||||
created_by_user_id=None,
|
||||
sandbox_spec_id=container.image.tags[0],
|
||||
status=status,
|
||||
session_api_key=session_api_key,
|
||||
exposed_urls=exposed_urls,
|
||||
created_at=created_at,
|
||||
)
|
||||
|
||||
async def _container_to_checked_sandbox_info(self, container) -> SandboxInfo | None:
|
||||
sandbox_info = await self._container_to_sandbox_info(container)
|
||||
if (
|
||||
sandbox_info
|
||||
and self.health_check_path is not None
|
||||
and sandbox_info.exposed_urls
|
||||
):
|
||||
app_server_url = next(
|
||||
exposed_url.url
|
||||
for exposed_url in sandbox_info.exposed_urls
|
||||
if exposed_url.name == AGENT_SERVER
|
||||
)
|
||||
try:
|
||||
response = await self.httpx_client.get(
|
||||
f'{app_server_url}{self.health_check_path}'
|
||||
)
|
||||
response.raise_for_status()
|
||||
except asyncio.CancelledError:
|
||||
raise
|
||||
except Exception as exc:
|
||||
_logger.info(f'Sandbox server not running: {exc}')
|
||||
sandbox_info.status = SandboxStatus.ERROR
|
||||
sandbox_info.exposed_urls = None
|
||||
sandbox_info.session_api_key = None
|
||||
return sandbox_info
|
||||
|
||||
async def search_sandboxes(
|
||||
self,
|
||||
page_id: str | None = None,
|
||||
limit: int = 100,
|
||||
) -> SandboxPage:
|
||||
"""Search for sandboxes."""
|
||||
try:
|
||||
# Get all containers with our prefix
|
||||
all_containers = self.docker_client.containers.list(all=True)
|
||||
sandboxes = []
|
||||
|
||||
for container in all_containers:
|
||||
if container.name.startswith(self.container_name_prefix):
|
||||
sandbox_info = await self._container_to_checked_sandbox_info(
|
||||
container
|
||||
)
|
||||
if sandbox_info:
|
||||
sandboxes.append(sandbox_info)
|
||||
|
||||
# Sort by creation time (newest first)
|
||||
sandboxes.sort(key=lambda x: x.created_at, reverse=True)
|
||||
|
||||
# Apply pagination
|
||||
start_idx = 0
|
||||
if page_id:
|
||||
try:
|
||||
start_idx = int(page_id)
|
||||
except ValueError:
|
||||
start_idx = 0
|
||||
|
||||
end_idx = start_idx + limit
|
||||
paginated_containers = sandboxes[start_idx:end_idx]
|
||||
|
||||
# Determine next page ID
|
||||
next_page_id = None
|
||||
if end_idx < len(sandboxes):
|
||||
next_page_id = str(end_idx)
|
||||
|
||||
return SandboxPage(items=paginated_containers, next_page_id=next_page_id)
|
||||
|
||||
except APIError:
|
||||
return SandboxPage(items=[], next_page_id=None)
|
||||
|
||||
async def get_sandbox(self, sandbox_id: str) -> SandboxInfo | None:
|
||||
"""Get a single sandbox info."""
|
||||
try:
|
||||
if not sandbox_id.startswith(self.container_name_prefix):
|
||||
return None
|
||||
container = self.docker_client.containers.get(sandbox_id)
|
||||
return await self._container_to_checked_sandbox_info(container)
|
||||
except (NotFound, APIError):
|
||||
return None
|
||||
|
||||
async def start_sandbox(self, sandbox_spec_id: str | None = None) -> SandboxInfo:
|
||||
"""Start a new sandbox."""
|
||||
if sandbox_spec_id is None:
|
||||
sandbox_spec = await self.sandbox_spec_service.get_default_sandbox_spec()
|
||||
else:
|
||||
sandbox_spec_maybe = await self.sandbox_spec_service.get_sandbox_spec(
|
||||
sandbox_spec_id
|
||||
)
|
||||
if sandbox_spec_maybe is None:
|
||||
raise ValueError('Sandbox Spec not found')
|
||||
sandbox_spec = sandbox_spec_maybe
|
||||
|
||||
# Generate container ID and session api key
|
||||
container_name = (
|
||||
f'{self.container_name_prefix}{base62.encodebytes(os.urandom(16))}'
|
||||
)
|
||||
session_api_key = base62.encodebytes(os.urandom(32))
|
||||
|
||||
# Prepare environment variables
|
||||
env_vars = sandbox_spec.initial_env.copy()
|
||||
env_vars[SESSION_API_KEY_VARIABLE] = session_api_key
|
||||
env_vars[WEBHOOK_CALLBACK_VARIABLE] = (
|
||||
f'http://host.docker.internal:{self.host_port}'
|
||||
f'/api/v1/webhooks/{container_name}'
|
||||
)
|
||||
|
||||
# Prepare port mappings and add port environment variables
|
||||
port_mappings = {}
|
||||
for exposed_port in self.exposed_ports:
|
||||
host_port = self._find_unused_port()
|
||||
port_mappings[exposed_port.container_port] = host_port
|
||||
# Add port as environment variable
|
||||
env_vars[exposed_port.name] = str(host_port)
|
||||
|
||||
# Prepare labels
|
||||
labels = {
|
||||
'sandbox_spec_id': sandbox_spec.id,
|
||||
}
|
||||
|
||||
# Prepare volumes
|
||||
volumes = {
|
||||
mount.host_path: {
|
||||
'bind': mount.container_path,
|
||||
'mode': mount.mode,
|
||||
}
|
||||
for mount in self.mounts
|
||||
}
|
||||
|
||||
try:
|
||||
# Create and start the container
|
||||
container = self.docker_client.containers.run( # type: ignore[call-overload]
|
||||
image=sandbox_spec.id,
|
||||
command=sandbox_spec.command, # Use default command from image
|
||||
remove=False,
|
||||
name=container_name,
|
||||
environment=env_vars,
|
||||
ports=port_mappings,
|
||||
volumes=volumes,
|
||||
working_dir=sandbox_spec.working_dir,
|
||||
labels=labels,
|
||||
detach=True,
|
||||
)
|
||||
|
||||
sandbox_info = await self._container_to_sandbox_info(container)
|
||||
assert sandbox_info is not None
|
||||
return sandbox_info
|
||||
|
||||
except APIError as e:
|
||||
raise SandboxError(f'Failed to start container: {e}')
|
||||
|
||||
async def resume_sandbox(self, sandbox_id: str) -> bool:
|
||||
"""Resume a paused sandbox."""
|
||||
try:
|
||||
if not sandbox_id.startswith(self.container_name_prefix):
|
||||
return False
|
||||
container = self.docker_client.containers.get(sandbox_id)
|
||||
|
||||
if container.status == 'paused':
|
||||
container.unpause()
|
||||
elif container.status == 'exited':
|
||||
container.start()
|
||||
|
||||
return True
|
||||
except (NotFound, APIError):
|
||||
return False
|
||||
|
||||
async def pause_sandbox(self, sandbox_id: str) -> bool:
|
||||
"""Pause a running sandbox."""
|
||||
try:
|
||||
if not sandbox_id.startswith(self.container_name_prefix):
|
||||
return False
|
||||
container = self.docker_client.containers.get(sandbox_id)
|
||||
|
||||
if container.status == 'running':
|
||||
container.pause()
|
||||
|
||||
return True
|
||||
except (NotFound, APIError):
|
||||
return False
|
||||
|
||||
async def delete_sandbox(self, sandbox_id: str) -> bool:
|
||||
"""Delete a sandbox."""
|
||||
try:
|
||||
if not sandbox_id.startswith(self.container_name_prefix):
|
||||
return False
|
||||
container = self.docker_client.containers.get(sandbox_id)
|
||||
|
||||
# Stop the container if it's running
|
||||
if container.status in ['running', 'paused']:
|
||||
container.stop(timeout=10)
|
||||
|
||||
# Remove the container
|
||||
container.remove()
|
||||
|
||||
# Remove associated volume
|
||||
try:
|
||||
volume_name = f'openhands-workspace-{sandbox_id}'
|
||||
volume = self.docker_client.volumes.get(volume_name)
|
||||
volume.remove()
|
||||
except (NotFound, APIError):
|
||||
# Volume might not exist or already removed
|
||||
pass
|
||||
|
||||
return True
|
||||
except (NotFound, APIError):
|
||||
return False
|
||||
|
||||
|
||||
class DockerSandboxServiceInjector(SandboxServiceInjector):
|
||||
"""Dependency injector for docker sandbox services."""
|
||||
|
||||
container_url_pattern: str = 'http://localhost:{port}'
|
||||
host_port: int = 3000
|
||||
container_name_prefix: str = 'oh-agent-server-'
|
||||
mounts: list[VolumeMount] = Field(default_factory=list)
|
||||
exposed_ports: list[ExposedPort] = Field(
|
||||
default_factory=lambda: [
|
||||
ExposedPort(
|
||||
name=AGENT_SERVER,
|
||||
description=(
|
||||
'The port on which the agent server runs within the container'
|
||||
),
|
||||
container_port=8000,
|
||||
),
|
||||
ExposedPort(
|
||||
name=VSCODE,
|
||||
description=(
|
||||
'The port on which the VSCode server runs within the container'
|
||||
),
|
||||
container_port=8001,
|
||||
),
|
||||
]
|
||||
)
|
||||
health_check_path: str | None = Field(
|
||||
default='/health',
|
||||
description=(
|
||||
'The url path in the sandbox agent server to check to '
|
||||
'determine whether the server is running'
|
||||
),
|
||||
)
|
||||
|
||||
async def inject(
|
||||
self, state: InjectorState, request: Request | None = None
|
||||
) -> AsyncGenerator[SandboxService, None]:
|
||||
# Define inline to prevent circular lookup
|
||||
from openhands.app_server.config import (
|
||||
get_httpx_client,
|
||||
get_sandbox_spec_service,
|
||||
)
|
||||
|
||||
async with (
|
||||
get_httpx_client(state) as httpx_client,
|
||||
get_sandbox_spec_service(state) as sandbox_spec_service,
|
||||
):
|
||||
yield DockerSandboxService(
|
||||
sandbox_spec_service=sandbox_spec_service,
|
||||
container_name_prefix=self.container_name_prefix,
|
||||
host_port=self.host_port,
|
||||
container_url_pattern=self.container_url_pattern,
|
||||
mounts=self.mounts,
|
||||
exposed_ports=self.exposed_ports,
|
||||
health_check_path=self.health_check_path,
|
||||
httpx_client=httpx_client,
|
||||
)
|
||||
90
openhands/app_server/sandbox/docker_sandbox_spec_service.py
Normal file
90
openhands/app_server/sandbox/docker_sandbox_spec_service.py
Normal file
@ -0,0 +1,90 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import AsyncGenerator
|
||||
|
||||
import docker
|
||||
from fastapi import Request
|
||||
from pydantic import Field
|
||||
|
||||
from openhands.app_server.errors import SandboxError
|
||||
from openhands.app_server.sandbox.preset_sandbox_spec_service import (
|
||||
PresetSandboxSpecService,
|
||||
)
|
||||
from openhands.app_server.sandbox.sandbox_spec_models import (
|
||||
SandboxSpecInfo,
|
||||
)
|
||||
from openhands.app_server.sandbox.sandbox_spec_service import (
|
||||
AGENT_SERVER_VERSION,
|
||||
SandboxSpecService,
|
||||
SandboxSpecServiceInjector,
|
||||
)
|
||||
from openhands.app_server.services.injector import InjectorState
|
||||
|
||||
_global_docker_client: docker.DockerClient | None = None
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_docker_client() -> docker.DockerClient:
|
||||
global _global_docker_client
|
||||
if _global_docker_client is None:
|
||||
_global_docker_client = docker.from_env()
|
||||
return _global_docker_client
|
||||
|
||||
|
||||
def get_default_sandbox_specs():
|
||||
return [
|
||||
SandboxSpecInfo(
|
||||
id=f'ghcr.io/all-hands-ai/agent-server:{AGENT_SERVER_VERSION[:7]}-python',
|
||||
command=['--port', '8000'],
|
||||
initial_env={
|
||||
'OPENVSCODE_SERVER_ROOT': '/openhands/.openvscode-server',
|
||||
'OH_ENABLE_VNC': '0',
|
||||
'LOG_JSON': 'true',
|
||||
'OH_CONVERSATIONS_PATH': '/home/openhands/conversations',
|
||||
'OH_BASH_EVENTS_DIR': '/home/openhands/bash_events',
|
||||
},
|
||||
working_dir='/home/openhands/workspace',
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
class DockerSandboxSpecServiceInjector(SandboxSpecServiceInjector):
|
||||
specs: list[SandboxSpecInfo] = Field(
|
||||
default_factory=get_default_sandbox_specs,
|
||||
description='Preset list of sandbox specs',
|
||||
)
|
||||
pull_if_missing: bool = Field(
|
||||
default=True,
|
||||
description=(
|
||||
'Flag indicating that any missing specs should be pulled from '
|
||||
'remote repositories.'
|
||||
),
|
||||
)
|
||||
|
||||
async def inject(
|
||||
self, state: InjectorState, request: Request | None = None
|
||||
) -> AsyncGenerator[SandboxSpecService, None]:
|
||||
if self.pull_if_missing:
|
||||
await self.pull_missing_specs()
|
||||
# Prevent repeated checks - more efficient but it does mean if you
|
||||
# delete a docker image outside the app you need to restart
|
||||
self.pull_if_missing = False
|
||||
yield PresetSandboxSpecService(specs=self.specs)
|
||||
|
||||
async def pull_missing_specs(self):
|
||||
await asyncio.gather(*[self.pull_spec_if_missing(spec) for spec in self.specs])
|
||||
|
||||
async def pull_spec_if_missing(self, spec: SandboxSpecInfo):
|
||||
_logger.debug(f'Checking Docker Image: {spec.id}')
|
||||
try:
|
||||
docker_client = get_docker_client()
|
||||
try:
|
||||
docker_client.images.get(spec.id)
|
||||
except docker.errors.ImageNotFound:
|
||||
_logger.info(f'⬇️ Pulling Docker Image: {spec.id}')
|
||||
# Pull in a background thread to prevent locking up the main runloop
|
||||
loop = asyncio.get_running_loop()
|
||||
await loop.run_in_executor(None, docker_client.images.pull, spec.id)
|
||||
_logger.info(f'⬇️ Finished Pulling Docker Image: {spec.id}')
|
||||
except docker.errors.APIError as exc:
|
||||
raise SandboxError(f'Error Getting Docker Image: {spec.id}') from exc
|
||||
48
openhands/app_server/sandbox/preset_sandbox_spec_service.py
Normal file
48
openhands/app_server/sandbox/preset_sandbox_spec_service.py
Normal file
@ -0,0 +1,48 @@
|
||||
from dataclasses import dataclass
|
||||
|
||||
from openhands.app_server.sandbox.sandbox_spec_models import (
|
||||
SandboxSpecInfo,
|
||||
SandboxSpecInfoPage,
|
||||
)
|
||||
from openhands.app_server.sandbox.sandbox_spec_service import (
|
||||
SandboxSpecService,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class PresetSandboxSpecService(SandboxSpecService):
|
||||
"""Service which uses a preset set of sandbox specs."""
|
||||
|
||||
specs: list[SandboxSpecInfo]
|
||||
|
||||
async def search_sandbox_specs(
|
||||
self, page_id: str | None = None, limit: int = 100
|
||||
) -> SandboxSpecInfoPage:
|
||||
"""Search for sandbox specs with pagination support."""
|
||||
# Apply pagination
|
||||
start_idx = 0
|
||||
if page_id:
|
||||
try:
|
||||
start_idx = int(page_id)
|
||||
except ValueError:
|
||||
start_idx = 0
|
||||
|
||||
end_idx = start_idx + limit
|
||||
paginated_specs = self.specs[start_idx:end_idx]
|
||||
|
||||
# Determine next page ID
|
||||
next_page_id = None
|
||||
if end_idx < len(self.specs):
|
||||
next_page_id = str(end_idx)
|
||||
|
||||
return SandboxSpecInfoPage(items=paginated_specs, next_page_id=next_page_id)
|
||||
|
||||
async def get_sandbox_spec(self, sandbox_spec_id: str) -> SandboxSpecInfo | None:
|
||||
"""Get a single sandbox spec by ID, returning None if not found."""
|
||||
for spec in self.specs:
|
||||
if spec.id == sandbox_spec_id:
|
||||
return spec
|
||||
return None
|
||||
|
||||
async def get_default_sandbox_spec(self) -> SandboxSpecInfo:
|
||||
return self.specs[0]
|
||||
615
openhands/app_server/sandbox/remote_sandbox_service.py
Normal file
615
openhands/app_server/sandbox/remote_sandbox_service.py
Normal file
@ -0,0 +1,615 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, AsyncGenerator, Union
|
||||
|
||||
import base62
|
||||
import httpx
|
||||
from fastapi import Request
|
||||
from pydantic import Field
|
||||
from sqlalchemy import Column, String, func, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from openhands.agent_server.models import ConversationInfo, EventPage
|
||||
from openhands.agent_server.utils import utc_now
|
||||
from openhands.app_server.app_conversation.app_conversation_info_service import (
|
||||
AppConversationInfoService,
|
||||
)
|
||||
from openhands.app_server.app_conversation.app_conversation_models import (
|
||||
AppConversationInfo,
|
||||
)
|
||||
from openhands.app_server.errors import SandboxError
|
||||
from openhands.app_server.event.event_service import EventService
|
||||
from openhands.app_server.event_callback.event_callback_service import (
|
||||
EventCallbackService,
|
||||
)
|
||||
from openhands.app_server.sandbox.sandbox_models import (
|
||||
AGENT_SERVER,
|
||||
ExposedUrl,
|
||||
SandboxInfo,
|
||||
SandboxPage,
|
||||
SandboxStatus,
|
||||
)
|
||||
from openhands.app_server.sandbox.sandbox_service import (
|
||||
SandboxService,
|
||||
SandboxServiceInjector,
|
||||
)
|
||||
from openhands.app_server.sandbox.sandbox_spec_models import SandboxSpecInfo
|
||||
from openhands.app_server.sandbox.sandbox_spec_service import SandboxSpecService
|
||||
from openhands.app_server.services.injector import InjectorState
|
||||
from openhands.app_server.user.specifiy_user_context import ADMIN, USER_CONTEXT_ATTR
|
||||
from openhands.app_server.user.user_context import UserContext
|
||||
from openhands.app_server.utils.sql_utils import Base, UtcDateTime
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
WEBHOOK_CALLBACK_VARIABLE = 'OH_WEBHOOKS_0_BASE_URL'
|
||||
polling_task: asyncio.Task | None = None
|
||||
POD_STATUS_MAPPING = {
|
||||
'ready': SandboxStatus.RUNNING,
|
||||
'pending': SandboxStatus.STARTING,
|
||||
'running': SandboxStatus.STARTING,
|
||||
'failed': SandboxStatus.ERROR,
|
||||
'unknown': SandboxStatus.ERROR,
|
||||
'crashloopbackoff': SandboxStatus.ERROR,
|
||||
}
|
||||
STATUS_MAPPING = {
|
||||
'running': SandboxStatus.RUNNING,
|
||||
'paused': SandboxStatus.PAUSED,
|
||||
'stopped': SandboxStatus.MISSING,
|
||||
'starting': SandboxStatus.STARTING,
|
||||
'error': SandboxStatus.ERROR,
|
||||
}
|
||||
|
||||
|
||||
class StoredRemoteSandbox(Base): # type: ignore
|
||||
"""Local storage for remote sandbox info.
|
||||
|
||||
The remote runtime API does not return some variables we need, and does not
|
||||
return stopped runtimes in list operations, so we need a local copy. We use
|
||||
the remote api as a source of truth on what is currently running, not what was
|
||||
run historicallly."""
|
||||
|
||||
__tablename__ = 'v1_remote_sandbox'
|
||||
id = Column(String, primary_key=True)
|
||||
created_by_user_id = Column(String, nullable=True, index=True)
|
||||
sandbox_spec_id = Column(String, index=True) # shadows runtime['image']
|
||||
created_at = Column(UtcDateTime, server_default=func.now(), index=True)
|
||||
|
||||
|
||||
@dataclass
|
||||
class RemoteSandboxService(SandboxService):
|
||||
"""Sandbox service that uses HTTP to communicate with a remote runtime API.
|
||||
|
||||
This service adapts the legacy RemoteRuntime HTTP protocol to work with
|
||||
the new Sandbox interface.
|
||||
"""
|
||||
|
||||
sandbox_spec_service: SandboxSpecService
|
||||
api_url: str
|
||||
api_key: str
|
||||
web_url: str | None
|
||||
resource_factor: int
|
||||
runtime_class: str | None
|
||||
start_sandbox_timeout: int
|
||||
user_context: UserContext
|
||||
httpx_client: httpx.AsyncClient
|
||||
db_session: AsyncSession
|
||||
|
||||
async def _send_runtime_api_request(
|
||||
self, method: str, path: str, **kwargs: Any
|
||||
) -> httpx.Response:
|
||||
"""Send a request to the remote runtime API."""
|
||||
try:
|
||||
url = self.api_url + path
|
||||
return await self.httpx_client.request(
|
||||
method, url, headers={'X-API-Key': self.api_key}, **kwargs
|
||||
)
|
||||
except httpx.TimeoutException:
|
||||
_logger.error(f'No response received within timeout for URL: {url}')
|
||||
raise
|
||||
except httpx.HTTPError as e:
|
||||
_logger.error(f'HTTP error for URL {url}: {e}')
|
||||
raise
|
||||
|
||||
async def _to_sandbox_info(
|
||||
self, stored: StoredRemoteSandbox, runtime: dict[str, Any] | None = None
|
||||
) -> SandboxInfo:
|
||||
# If we did not get passsed runtime data, load some
|
||||
if runtime is None:
|
||||
try:
|
||||
runtime = await self._get_runtime(stored.id)
|
||||
except Exception:
|
||||
_logger.exception('Error getting runtime: {stored.id}', stack_info=True)
|
||||
|
||||
if runtime:
|
||||
# Translate status
|
||||
status = None
|
||||
pod_status = runtime['pod_status'].lower()
|
||||
if pod_status:
|
||||
status = POD_STATUS_MAPPING.get(pod_status, None)
|
||||
|
||||
# If we failed to get the status from the pod status, fall back to status
|
||||
if status is None:
|
||||
runtime_status = runtime.get('status')
|
||||
if runtime_status:
|
||||
status = STATUS_MAPPING.get(runtime_status.lower(), None)
|
||||
|
||||
if status is None:
|
||||
status = SandboxStatus.MISSING
|
||||
|
||||
session_api_key = runtime['session_api_key']
|
||||
if status == SandboxStatus.RUNNING:
|
||||
exposed_urls = []
|
||||
url = runtime.get('url', None)
|
||||
if url:
|
||||
exposed_urls.append(ExposedUrl(name=AGENT_SERVER, url=url))
|
||||
else:
|
||||
exposed_urls = None
|
||||
else:
|
||||
session_api_key = None
|
||||
status = SandboxStatus.MISSING
|
||||
exposed_urls = None
|
||||
|
||||
sandbox_spec_id = stored.sandbox_spec_id
|
||||
return SandboxInfo(
|
||||
id=stored.id,
|
||||
created_by_user_id=stored.created_by_user_id,
|
||||
sandbox_spec_id=sandbox_spec_id,
|
||||
status=status,
|
||||
session_api_key=session_api_key,
|
||||
exposed_urls=exposed_urls,
|
||||
created_at=stored.created_at,
|
||||
)
|
||||
|
||||
async def _secure_select(self):
|
||||
query = select(StoredRemoteSandbox)
|
||||
user_id = await self.user_context.get_user_id()
|
||||
if user_id:
|
||||
query = query.where(StoredRemoteSandbox.created_by_user_id == user_id)
|
||||
return query
|
||||
|
||||
async def _get_stored_sandbox(self, sandbox_id: str) -> StoredRemoteSandbox | None:
|
||||
stmt = await self._secure_select()
|
||||
stmt = stmt.where(StoredRemoteSandbox.id == sandbox_id)
|
||||
result = await self.db_session.execute(stmt)
|
||||
stored_sandbox = result.scalar_one_or_none()
|
||||
return stored_sandbox
|
||||
|
||||
async def _get_runtime(self, sandbox_id: str) -> dict[str, Any]:
|
||||
response = await self._send_runtime_api_request(
|
||||
'GET',
|
||||
f'/sessions/{sandbox_id}',
|
||||
)
|
||||
response.raise_for_status()
|
||||
runtime_data = response.json()
|
||||
return runtime_data
|
||||
|
||||
async def _init_environment(
|
||||
self, sandbox_spec: SandboxSpecInfo, sandbox_id: str
|
||||
) -> dict[str, str]:
|
||||
"""Initialize the environment variables for the sandbox."""
|
||||
environment = sandbox_spec.initial_env.copy()
|
||||
|
||||
# If a public facing url is defined, add a callback to the agent server environment.
|
||||
if self.web_url:
|
||||
environment[WEBHOOK_CALLBACK_VARIABLE] = (
|
||||
f'{self.web_url}/api/v1/webhooks/{sandbox_id}'
|
||||
)
|
||||
|
||||
return environment
|
||||
|
||||
async def search_sandboxes(
|
||||
self,
|
||||
page_id: str | None = None,
|
||||
limit: int = 100,
|
||||
) -> SandboxPage:
|
||||
stmt = await self._secure_select()
|
||||
|
||||
# Handle pagination
|
||||
if page_id is not None:
|
||||
# Parse page_id to get offset or cursor
|
||||
try:
|
||||
offset = int(page_id)
|
||||
stmt = stmt.offset(offset)
|
||||
except ValueError:
|
||||
# If page_id is not a valid integer, start from beginning
|
||||
offset = 0
|
||||
else:
|
||||
offset = 0
|
||||
|
||||
# Apply limit and get one extra to check if there are more results
|
||||
stmt = stmt.limit(limit + 1).order_by(StoredRemoteSandbox.created_at.desc())
|
||||
|
||||
result = await self.db_session.execute(stmt)
|
||||
stored_sandboxes = result.scalars().all()
|
||||
|
||||
# Check if there are more results
|
||||
has_more = len(stored_sandboxes) > limit
|
||||
if has_more:
|
||||
stored_sandboxes = stored_sandboxes[:limit]
|
||||
|
||||
# Calculate next page ID
|
||||
next_page_id = None
|
||||
if has_more:
|
||||
next_page_id = str(offset + limit)
|
||||
|
||||
# Convert stored callbacks to domain models
|
||||
items = await asyncio.gather(
|
||||
*[
|
||||
self._to_sandbox_info(stored_sandbox)
|
||||
for stored_sandbox in stored_sandboxes
|
||||
]
|
||||
)
|
||||
|
||||
return SandboxPage(items=items, next_page_id=next_page_id)
|
||||
|
||||
async def get_sandbox(self, sandbox_id: str) -> Union[SandboxInfo, None]:
|
||||
"""Get a single sandbox by checking its corresponding runtime."""
|
||||
stored_sandbox = await self._get_stored_sandbox(sandbox_id)
|
||||
if stored_sandbox is None:
|
||||
return None
|
||||
return await self._to_sandbox_info(stored_sandbox)
|
||||
|
||||
async def start_sandbox(self, sandbox_spec_id: str | None = None) -> SandboxInfo:
|
||||
"""Start a new sandbox by creating a remote runtime."""
|
||||
try:
|
||||
# Get sandbox spec
|
||||
if sandbox_spec_id is None:
|
||||
sandbox_spec = (
|
||||
await self.sandbox_spec_service.get_default_sandbox_spec()
|
||||
)
|
||||
else:
|
||||
sandbox_spec_maybe = await self.sandbox_spec_service.get_sandbox_spec(
|
||||
sandbox_spec_id
|
||||
)
|
||||
if sandbox_spec_maybe is None:
|
||||
raise ValueError('Sandbox Spec not found')
|
||||
sandbox_spec = sandbox_spec_maybe
|
||||
|
||||
# Create a unique id
|
||||
sandbox_id = base62.encodebytes(os.urandom(16))
|
||||
|
||||
# get user id
|
||||
user_id = await self.user_context.get_user_id()
|
||||
|
||||
# Store the sandbox
|
||||
stored_sandbox = StoredRemoteSandbox(
|
||||
id=sandbox_id,
|
||||
created_by_user_id=user_id,
|
||||
sandbox_spec_id=sandbox_spec.id,
|
||||
created_at=utc_now(),
|
||||
)
|
||||
self.db_session.add(stored_sandbox)
|
||||
await self.db_session.commit()
|
||||
|
||||
# Prepare environment variables
|
||||
environment = await self._init_environment(sandbox_spec, sandbox_id)
|
||||
|
||||
# Prepare start request
|
||||
start_request: dict[str, Any] = {
|
||||
'image': sandbox_spec.id, # Use sandbox_spec.id as the container image
|
||||
'command': sandbox_spec.command,
|
||||
#'command': ['python', '-c', 'import time; time.sleep(300)'],
|
||||
'working_dir': sandbox_spec.working_dir,
|
||||
'environment': environment,
|
||||
'session_id': sandbox_id, # Use sandbox_id as session_id
|
||||
'resource_factor': self.resource_factor,
|
||||
'run_as_user': 1000,
|
||||
'run_as_group': 1000,
|
||||
'fs_group': 1000,
|
||||
}
|
||||
|
||||
# Add runtime class if specified
|
||||
if self.runtime_class == 'sysbox':
|
||||
start_request['runtime_class'] = 'sysbox-runc'
|
||||
|
||||
# Start the runtime
|
||||
response = await self._send_runtime_api_request(
|
||||
'POST',
|
||||
'/start',
|
||||
json=start_request,
|
||||
)
|
||||
response.raise_for_status()
|
||||
runtime_data = response.json()
|
||||
|
||||
# Hack - result doesn't contain this
|
||||
runtime_data['pod_status'] = 'pending'
|
||||
|
||||
return await self._to_sandbox_info(stored_sandbox, runtime_data)
|
||||
|
||||
except httpx.HTTPError as e:
|
||||
_logger.error(f'Failed to start sandbox: {e}')
|
||||
raise SandboxError(f'Failed to start sandbox: {e}')
|
||||
|
||||
async def resume_sandbox(self, sandbox_id: str) -> bool:
|
||||
"""Resume a paused sandbox."""
|
||||
try:
|
||||
if not await self._get_stored_sandbox(sandbox_id):
|
||||
return False
|
||||
runtime_data = await self._get_runtime(sandbox_id)
|
||||
response = await self._send_runtime_api_request(
|
||||
'POST',
|
||||
'/resume',
|
||||
json={'runtime_id': runtime_data['runtime_id']},
|
||||
)
|
||||
if response.status_code == 404:
|
||||
return False
|
||||
response.raise_for_status()
|
||||
return True
|
||||
except httpx.HTTPError as e:
|
||||
_logger.error(f'Error resuming sandbox {sandbox_id}: {e}')
|
||||
return False
|
||||
|
||||
async def pause_sandbox(self, sandbox_id: str) -> bool:
|
||||
"""Pause a running sandbox."""
|
||||
try:
|
||||
if not await self._get_stored_sandbox(sandbox_id):
|
||||
return False
|
||||
runtime_data = await self._get_runtime(sandbox_id)
|
||||
response = await self._send_runtime_api_request(
|
||||
'POST',
|
||||
'/pause',
|
||||
json={'runtime_id': runtime_data['runtime_id']},
|
||||
)
|
||||
if response.status_code == 404:
|
||||
return False
|
||||
response.raise_for_status()
|
||||
return True
|
||||
|
||||
except httpx.HTTPError as e:
|
||||
_logger.error(f'Error pausing sandbox {sandbox_id}: {e}')
|
||||
return False
|
||||
|
||||
async def delete_sandbox(self, sandbox_id: str) -> bool:
|
||||
"""Delete a sandbox by stopping its runtime."""
|
||||
try:
|
||||
stored_sandbox = await self._get_stored_sandbox(sandbox_id)
|
||||
if not stored_sandbox:
|
||||
return False
|
||||
await self.db_session.delete(stored_sandbox)
|
||||
await self.db_session.commit()
|
||||
runtime_data = await self._get_runtime(sandbox_id)
|
||||
response = await self._send_runtime_api_request(
|
||||
'POST',
|
||||
'/stop',
|
||||
json={'runtime_id': runtime_data['runtime_id']},
|
||||
)
|
||||
if response.status_code != 404:
|
||||
response.raise_for_status()
|
||||
return True
|
||||
except httpx.HTTPError as e:
|
||||
_logger.error(f'Error deleting sandbox {sandbox_id}: {e}')
|
||||
return False
|
||||
|
||||
|
||||
async def poll_agent_servers(api_url: str, api_key: str, sleep_interval: int):
|
||||
"""When the app server does not have a public facing url, we poll the agent
|
||||
servers for the most recent data.
|
||||
|
||||
This is because webhook callbacks cannot be invoked."""
|
||||
from openhands.app_server.config import (
|
||||
get_app_conversation_info_service,
|
||||
get_event_callback_service,
|
||||
get_event_service,
|
||||
get_httpx_client,
|
||||
)
|
||||
|
||||
while True:
|
||||
try:
|
||||
# Refresh the conversations associated with those sandboxes.
|
||||
state = InjectorState()
|
||||
|
||||
try:
|
||||
# Get the list of running sandboxes using the runtime api /list endpoint.
|
||||
# (This will not return runtimes that have been stopped for a while)
|
||||
async with get_httpx_client(state) as httpx_client:
|
||||
response = await httpx_client.get(
|
||||
f'{api_url}/list', headers={'X-API-Key': api_key}
|
||||
)
|
||||
response.raise_for_status()
|
||||
runtimes = response.json()['runtimes']
|
||||
runtimes_by_sandbox_id = {
|
||||
runtime['session_id']: runtime
|
||||
for runtime in runtimes
|
||||
# The runtime API currently reports a running status when
|
||||
# pods are still starting. Resync can tolerate this.
|
||||
if runtime['status'] == 'running'
|
||||
}
|
||||
|
||||
# We allow access to all items here
|
||||
setattr(state, USER_CONTEXT_ATTR, ADMIN)
|
||||
async with (
|
||||
get_app_conversation_info_service(
|
||||
state
|
||||
) as app_conversation_info_service,
|
||||
get_event_service(state) as event_service,
|
||||
get_event_callback_service(state) as event_callback_service,
|
||||
get_httpx_client(state) as httpx_client,
|
||||
):
|
||||
page_id = None
|
||||
matches = 0
|
||||
while True:
|
||||
page = await app_conversation_info_service.search_app_conversation_info(
|
||||
page_id=page_id
|
||||
)
|
||||
for app_conversation_info in page.items:
|
||||
runtime = runtimes_by_sandbox_id.get(
|
||||
app_conversation_info.sandbox_id
|
||||
)
|
||||
if runtime:
|
||||
matches += 1
|
||||
await refresh_conversation(
|
||||
app_conversation_info_service=app_conversation_info_service,
|
||||
event_service=event_service,
|
||||
event_callback_service=event_callback_service,
|
||||
app_conversation_info=app_conversation_info,
|
||||
runtime=runtime,
|
||||
httpx_client=httpx_client,
|
||||
)
|
||||
page_id = page.next_page_id
|
||||
if page_id is None:
|
||||
_logger.debug(
|
||||
f'Matched {len(runtimes_by_sandbox_id)} Runtimes with {matches} Conversations.'
|
||||
)
|
||||
break
|
||||
|
||||
except Exception as exc:
|
||||
_logger.exception(
|
||||
f'Error when polling agent servers: {exc}', stack_info=True
|
||||
)
|
||||
|
||||
# Sleep between retries
|
||||
await asyncio.sleep(sleep_interval)
|
||||
|
||||
except asyncio.CancelledError:
|
||||
return
|
||||
|
||||
|
||||
async def refresh_conversation(
|
||||
app_conversation_info_service: AppConversationInfoService,
|
||||
event_service: EventService,
|
||||
event_callback_service: EventCallbackService,
|
||||
app_conversation_info: AppConversationInfo,
|
||||
runtime: dict[str, Any],
|
||||
httpx_client: httpx.AsyncClient,
|
||||
):
|
||||
"""Refresh a conversation.
|
||||
|
||||
Grab ConversationInfo and all events from the agent server and make sure they
|
||||
exist in the app server."""
|
||||
_logger.debug(f'Started Refreshing Conversation {app_conversation_info.id}')
|
||||
try:
|
||||
url = runtime['url']
|
||||
|
||||
# TODO: Maybe we can use RemoteConversation here?
|
||||
|
||||
# First get conversation...
|
||||
conversation_url = f'{url}/api/conversations/{app_conversation_info.id.hex}'
|
||||
response = await httpx_client.get(
|
||||
conversation_url, headers={'X-Session-API-Key': runtime['session_api_key']}
|
||||
)
|
||||
response.raise_for_status()
|
||||
|
||||
updated_conversation_info = ConversationInfo.model_validate(response.json())
|
||||
|
||||
# TODO: As of writing, ConversationInfo from AgentServer does not have a title to update...
|
||||
app_conversation_info.updated_at = updated_conversation_info.updated_at
|
||||
# TODO: Update other appropriate attributes...
|
||||
|
||||
await app_conversation_info_service.save_app_conversation_info(
|
||||
app_conversation_info
|
||||
)
|
||||
|
||||
# TODO: It would be nice to have an updated_at__gte filter parameter in the
|
||||
# agent server so that we don't pull the full event list each time
|
||||
event_url = (
|
||||
f'{url}/ap/conversations/{app_conversation_info.id.hex}/events/search'
|
||||
)
|
||||
page_id = None
|
||||
while True:
|
||||
params: dict[str, str] = {}
|
||||
if page_id:
|
||||
params['page_id'] = page_id # type: ignore[unreachable]
|
||||
response = await httpx_client.get(
|
||||
event_url,
|
||||
params=params,
|
||||
headers={'X-Session-API-Key': runtime['session_api_key']},
|
||||
)
|
||||
response.raise_for_status()
|
||||
page = EventPage.model_validate(response.json())
|
||||
|
||||
to_process = []
|
||||
for event in page.items:
|
||||
existing = await event_service.get_event(event.id)
|
||||
if existing is None:
|
||||
await event_service.save_event(app_conversation_info.id, event)
|
||||
to_process.append(event)
|
||||
|
||||
for event in to_process:
|
||||
await event_callback_service.execute_callbacks(
|
||||
app_conversation_info.id, event
|
||||
)
|
||||
|
||||
page_id = page.next_page_id
|
||||
if page_id is None:
|
||||
_logger.debug(
|
||||
f'Finished Refreshing Conversation {app_conversation_info.id}'
|
||||
)
|
||||
break
|
||||
|
||||
except Exception as exc:
|
||||
_logger.exception(f'Error Refreshing Conversation: {exc}', stack_info=True)
|
||||
|
||||
|
||||
class RemoteSandboxServiceInjector(SandboxServiceInjector):
|
||||
"""Dependency injector for remote sandbox services."""
|
||||
|
||||
api_url: str = Field(description='The API URL for remote runtimes')
|
||||
api_key: str = Field(description='The API Key for remote runtimes')
|
||||
polling_interval: int = Field(
|
||||
default=15,
|
||||
description=(
|
||||
'The sleep time between poll operations against agent servers when there is '
|
||||
'no public facing web_url'
|
||||
),
|
||||
)
|
||||
resource_factor: int = Field(
|
||||
default=1,
|
||||
description='Factor by which to scale resources in sandbox: 1, 2, 4, or 8',
|
||||
)
|
||||
runtime_class: str = Field(
|
||||
default='gvisor',
|
||||
description='can be "gvisor" or "sysbox" (support docker inside runtime + more stable)',
|
||||
)
|
||||
start_sandbox_timeout: int = Field(
|
||||
default=120,
|
||||
description=(
|
||||
'The max time to wait for a sandbox to start before considering it to '
|
||||
'be in an error state.'
|
||||
),
|
||||
)
|
||||
|
||||
async def inject(
|
||||
self, state: InjectorState, request: Request | None = None
|
||||
) -> AsyncGenerator[SandboxService, None]:
|
||||
# Define inline to prevent circular lookup
|
||||
from openhands.app_server.config import (
|
||||
get_db_session,
|
||||
get_global_config,
|
||||
get_httpx_client,
|
||||
get_sandbox_spec_service,
|
||||
get_user_context,
|
||||
)
|
||||
|
||||
# If no public facing web url is defined, poll for changes as callbacks will be unavailable.
|
||||
config = get_global_config()
|
||||
web_url = config.web_url
|
||||
if web_url is None:
|
||||
global polling_task
|
||||
if polling_task is None:
|
||||
polling_task = asyncio.create_task(
|
||||
poll_agent_servers(
|
||||
api_url=self.api_url,
|
||||
api_key=self.api_key,
|
||||
sleep_interval=self.polling_interval,
|
||||
)
|
||||
)
|
||||
async with (
|
||||
get_user_context(state, request) as user_context,
|
||||
get_sandbox_spec_service(state, request) as sandbox_spec_service,
|
||||
get_httpx_client(state, request) as httpx_client,
|
||||
get_db_session(state, request) as db_session,
|
||||
):
|
||||
yield RemoteSandboxService(
|
||||
sandbox_spec_service=sandbox_spec_service,
|
||||
api_url=self.api_url,
|
||||
api_key=self.api_key,
|
||||
web_url=web_url,
|
||||
resource_factor=self.resource_factor,
|
||||
runtime_class=self.runtime_class,
|
||||
start_sandbox_timeout=self.start_sandbox_timeout,
|
||||
user_context=user_context,
|
||||
httpx_client=httpx_client,
|
||||
db_session=db_session,
|
||||
)
|
||||
46
openhands/app_server/sandbox/remote_sandbox_spec_service.py
Normal file
46
openhands/app_server/sandbox/remote_sandbox_spec_service.py
Normal file
@ -0,0 +1,46 @@
|
||||
from typing import AsyncGenerator
|
||||
|
||||
from fastapi import Request
|
||||
from pydantic import Field
|
||||
|
||||
from openhands.app_server.sandbox.preset_sandbox_spec_service import (
|
||||
PresetSandboxSpecService,
|
||||
)
|
||||
from openhands.app_server.sandbox.sandbox_spec_models import (
|
||||
SandboxSpecInfo,
|
||||
)
|
||||
from openhands.app_server.sandbox.sandbox_spec_service import (
|
||||
AGENT_SERVER_VERSION,
|
||||
SandboxSpecService,
|
||||
SandboxSpecServiceInjector,
|
||||
)
|
||||
from openhands.app_server.services.injector import InjectorState
|
||||
|
||||
|
||||
def get_default_sandbox_specs():
|
||||
return [
|
||||
SandboxSpecInfo(
|
||||
id=f'ghcr.io/all-hands-ai/agent-server:{AGENT_SERVER_VERSION[:7]}-python',
|
||||
command=['/usr/local/bin/openhands-agent-server', '--port', '60000'],
|
||||
initial_env={
|
||||
'OPENVSCODE_SERVER_ROOT': '/openhands/.openvscode-server',
|
||||
'LOG_JSON': 'true',
|
||||
'OH_ENABLE_VNC': '0',
|
||||
'OH_CONVERSATIONS_PATH': '/workspace/conversations',
|
||||
'OH_BASH_EVENTS_DIR': '/workspace/bash_events',
|
||||
},
|
||||
working_dir='/workspace/projects',
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
class RemoteSandboxSpecServiceInjector(SandboxSpecServiceInjector):
|
||||
specs: list[SandboxSpecInfo] = Field(
|
||||
default_factory=get_default_sandbox_specs,
|
||||
description='Preset list of sandbox specs',
|
||||
)
|
||||
|
||||
async def inject(
|
||||
self, state: InjectorState, request: Request | None = None
|
||||
) -> AsyncGenerator[SandboxSpecService, None]:
|
||||
yield PresetSandboxSpecService(self.specs)
|
||||
58
openhands/app_server/sandbox/sandbox_models.py
Normal file
58
openhands/app_server/sandbox/sandbox_models.py
Normal file
@ -0,0 +1,58 @@
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from openhands.agent_server.utils import utc_now
|
||||
|
||||
|
||||
class SandboxStatus(Enum):
|
||||
STARTING = 'STARTING'
|
||||
RUNNING = 'RUNNING'
|
||||
PAUSED = 'PAUSED'
|
||||
ERROR = 'ERROR'
|
||||
MISSING = 'MISSING'
|
||||
"""Missing - possibly deleted"""
|
||||
|
||||
|
||||
class ExposedUrl(BaseModel):
|
||||
"""URL to access some named service within the container."""
|
||||
|
||||
name: str
|
||||
url: str
|
||||
|
||||
|
||||
# Standard names
|
||||
AGENT_SERVER = 'AGENT_SERVER'
|
||||
VSCODE = 'VSCODE'
|
||||
|
||||
|
||||
class SandboxInfo(BaseModel):
|
||||
"""Information about a sandbox."""
|
||||
|
||||
id: str
|
||||
created_by_user_id: str | None
|
||||
sandbox_spec_id: str
|
||||
status: SandboxStatus
|
||||
session_api_key: str | None = Field(
|
||||
description=(
|
||||
'Key to access sandbox, to be added as an `X-Session-API-Key` header '
|
||||
'in each request. In cases where the sandbox statues is STARTING or '
|
||||
'PAUSED, or the current user does not have full access '
|
||||
'the session_api_key will be None.'
|
||||
)
|
||||
)
|
||||
exposed_urls: list[ExposedUrl] | None = Field(
|
||||
default_factory=lambda: [],
|
||||
description=(
|
||||
'URLs exposed by the sandbox (App server, Vscode, etc...)'
|
||||
'Sandboxes with a status STARTING / PAUSED / ERROR may '
|
||||
'not return urls.'
|
||||
),
|
||||
)
|
||||
created_at: datetime = Field(default_factory=utc_now)
|
||||
|
||||
|
||||
class SandboxPage(BaseModel):
|
||||
items: list[SandboxInfo]
|
||||
next_page_id: str | None = None
|
||||
91
openhands/app_server/sandbox/sandbox_router.py
Normal file
91
openhands/app_server/sandbox/sandbox_router.py
Normal file
@ -0,0 +1,91 @@
|
||||
"""Runtime Containers router for OpenHands Server."""
|
||||
|
||||
from typing import Annotated
|
||||
|
||||
from fastapi import APIRouter, HTTPException, Query, status
|
||||
|
||||
from openhands.agent_server.models import Success
|
||||
from openhands.app_server.config import depends_sandbox_service
|
||||
from openhands.app_server.sandbox.sandbox_models import SandboxInfo, SandboxPage
|
||||
from openhands.app_server.sandbox.sandbox_service import (
|
||||
SandboxService,
|
||||
)
|
||||
|
||||
router = APIRouter(prefix='/sandboxes', tags=['Sandbox'])
|
||||
sandbox_service_dependency = depends_sandbox_service()
|
||||
|
||||
# Read methods
|
||||
|
||||
|
||||
@router.get('/search')
|
||||
async def search_sandboxes(
|
||||
page_id: Annotated[
|
||||
str | None,
|
||||
Query(title='Optional next_page_id from the previously returned page'),
|
||||
] = None,
|
||||
limit: Annotated[
|
||||
int,
|
||||
Query(title='The max number of results in the page', gt=0, lte=100),
|
||||
] = 100,
|
||||
sandbox_service: SandboxService = sandbox_service_dependency,
|
||||
) -> SandboxPage:
|
||||
"""Search / list sandboxes owned by the current user."""
|
||||
assert limit > 0
|
||||
assert limit <= 100
|
||||
return await sandbox_service.search_sandboxes(page_id=page_id, limit=limit)
|
||||
|
||||
|
||||
@router.get('')
|
||||
async def batch_get_sandboxes(
|
||||
id: Annotated[list[str], Query()],
|
||||
sandbox_service: SandboxService = sandbox_service_dependency,
|
||||
) -> list[SandboxInfo | None]:
|
||||
"""Get a batch of sandboxes given their ids, returning null for any missing."""
|
||||
assert len(id) < 100
|
||||
sandboxes = await sandbox_service.batch_get_sandboxes(id)
|
||||
return sandboxes
|
||||
|
||||
|
||||
# Write Methods
|
||||
|
||||
|
||||
@router.post('')
|
||||
async def start_sandbox(
|
||||
sandbox_spec_id: str | None = None,
|
||||
sandbox_service: SandboxService = sandbox_service_dependency,
|
||||
) -> SandboxInfo:
|
||||
info = await sandbox_service.start_sandbox(sandbox_spec_id)
|
||||
return info
|
||||
|
||||
|
||||
@router.post('/{id}/pause', responses={404: {'description': 'Item not found'}})
|
||||
async def pause_sandbox(
|
||||
sandbox_id: str,
|
||||
sandbox_service: SandboxService = sandbox_service_dependency,
|
||||
) -> Success:
|
||||
exists = await sandbox_service.pause_sandbox(sandbox_id)
|
||||
if not exists:
|
||||
raise HTTPException(status.HTTP_404_NOT_FOUND)
|
||||
return Success()
|
||||
|
||||
|
||||
@router.post('/{id}/resume', responses={404: {'description': 'Item not found'}})
|
||||
async def resume_sandbox(
|
||||
sandbox_id: str,
|
||||
sandbox_service: SandboxService = sandbox_service_dependency,
|
||||
) -> Success:
|
||||
exists = await sandbox_service.resume_sandbox(sandbox_id)
|
||||
if not exists:
|
||||
raise HTTPException(status.HTTP_404_NOT_FOUND)
|
||||
return Success()
|
||||
|
||||
|
||||
@router.delete('/{id}', responses={404: {'description': 'Item not found'}})
|
||||
async def delete_sandbox(
|
||||
sandbox_id: str,
|
||||
sandbox_service: SandboxService = sandbox_service_dependency,
|
||||
) -> Success:
|
||||
exists = await sandbox_service.delete_sandbox(sandbox_id)
|
||||
if not exists:
|
||||
raise HTTPException(status.HTTP_404_NOT_FOUND)
|
||||
return Success()
|
||||
65
openhands/app_server/sandbox/sandbox_service.py
Normal file
65
openhands/app_server/sandbox/sandbox_service.py
Normal file
@ -0,0 +1,65 @@
|
||||
import asyncio
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
from openhands.app_server.sandbox.sandbox_models import SandboxInfo, SandboxPage
|
||||
from openhands.app_server.services.injector import Injector
|
||||
from openhands.sdk.utils.models import DiscriminatedUnionMixin
|
||||
|
||||
|
||||
class SandboxService(ABC):
|
||||
"""Service for accessing sandboxes in which conversations may be run."""
|
||||
|
||||
@abstractmethod
|
||||
async def search_sandboxes(
|
||||
self,
|
||||
page_id: str | None = None,
|
||||
limit: int = 100,
|
||||
) -> SandboxPage:
|
||||
"""Search for sandboxes."""
|
||||
|
||||
@abstractmethod
|
||||
async def get_sandbox(self, sandbox_id: str) -> SandboxInfo | None:
|
||||
"""Get a single sandbox. Return None if the sandbox was not found."""
|
||||
|
||||
async def batch_get_sandboxes(
|
||||
self, sandbox_ids: list[str]
|
||||
) -> list[SandboxInfo | None]:
|
||||
"""Get a batch of sandboxes, returning None for any which were not found."""
|
||||
results = await asyncio.gather(
|
||||
*[self.get_sandbox(sandbox_id) for sandbox_id in sandbox_ids]
|
||||
)
|
||||
return results
|
||||
|
||||
@abstractmethod
|
||||
async def start_sandbox(self, sandbox_spec_id: str | None = None) -> SandboxInfo:
|
||||
"""Begin the process of starting a sandbox.
|
||||
|
||||
Return the info on the new sandbox. If no spec is selected, use the default.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
async def resume_sandbox(self, sandbox_id: str) -> bool:
|
||||
"""Begin the process of resuming a sandbox.
|
||||
|
||||
Return True if the sandbox exists and is being resumed or is already running.
|
||||
Return False if the sandbox did not exist.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
async def pause_sandbox(self, sandbox_id: str) -> bool:
|
||||
"""Begin the process of pausing a sandbox.
|
||||
|
||||
Return True if the sandbox exists and is being paused or is already paused.
|
||||
Return False if the sandbox did not exist.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
async def delete_sandbox(self, sandbox_id: str) -> bool:
|
||||
"""Begin the process of deleting a sandbox (which may involve stopping it).
|
||||
|
||||
Return False if the sandbox did not exist.
|
||||
"""
|
||||
|
||||
|
||||
class SandboxServiceInjector(DiscriminatedUnionMixin, Injector[SandboxService], ABC):
|
||||
pass
|
||||
22
openhands/app_server/sandbox/sandbox_spec_models.py
Normal file
22
openhands/app_server/sandbox/sandbox_spec_models.py
Normal file
@ -0,0 +1,22 @@
|
||||
from datetime import datetime
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from openhands.agent_server.utils import utc_now
|
||||
|
||||
|
||||
class SandboxSpecInfo(BaseModel):
|
||||
"""A template for creating a Sandbox (e.g: A Docker Image vs Container)."""
|
||||
|
||||
id: str
|
||||
command: list[str] | None
|
||||
created_at: datetime = Field(default_factory=utc_now)
|
||||
initial_env: dict[str, str] = Field(
|
||||
default_factory=dict, description='Initial Environment Variables'
|
||||
)
|
||||
working_dir: str = '/home/openhands/workspace'
|
||||
|
||||
|
||||
class SandboxSpecInfoPage(BaseModel):
|
||||
items: list[SandboxSpecInfo]
|
||||
next_page_id: str | None = None
|
||||
49
openhands/app_server/sandbox/sandbox_spec_router.py
Normal file
49
openhands/app_server/sandbox/sandbox_spec_router.py
Normal file
@ -0,0 +1,49 @@
|
||||
"""Runtime Images router for OpenHands Server."""
|
||||
|
||||
from typing import Annotated
|
||||
|
||||
from fastapi import APIRouter, Query
|
||||
|
||||
from openhands.app_server.config import depends_sandbox_spec_service
|
||||
from openhands.app_server.sandbox.sandbox_spec_models import (
|
||||
SandboxSpecInfo,
|
||||
SandboxSpecInfoPage,
|
||||
)
|
||||
from openhands.app_server.sandbox.sandbox_spec_service import (
|
||||
SandboxSpecService,
|
||||
)
|
||||
|
||||
router = APIRouter(prefix='/sandbox-specs', tags=['Sandbox'])
|
||||
sandbox_spec_service_dependency = depends_sandbox_spec_service()
|
||||
|
||||
|
||||
# Read methods
|
||||
|
||||
|
||||
@router.get('/search')
|
||||
async def search_sandbox_specs(
|
||||
page_id: Annotated[
|
||||
str | None,
|
||||
Query(title='Optional next_page_id from the previously returned page'),
|
||||
] = None,
|
||||
limit: Annotated[
|
||||
int,
|
||||
Query(title='The max number of results in the page', gt=0, lte=100),
|
||||
] = 100,
|
||||
sandbox_spec_service: SandboxSpecService = sandbox_spec_service_dependency,
|
||||
) -> SandboxSpecInfoPage:
|
||||
"""Search / List sandbox specs."""
|
||||
assert limit > 0
|
||||
assert limit <= 100
|
||||
return await sandbox_spec_service.search_sandbox_specs(page_id=page_id, limit=limit)
|
||||
|
||||
|
||||
@router.get('')
|
||||
async def batch_get_sandbox_specs(
|
||||
id: Annotated[list[str], Query()],
|
||||
sandbox_spec_service: SandboxSpecService = sandbox_spec_service_dependency,
|
||||
) -> list[SandboxSpecInfo | None]:
|
||||
"""Get a batch of sandbox specs given their ids, returning null for any missing."""
|
||||
assert len(id) <= 100
|
||||
sandbox_specs = await sandbox_spec_service.batch_get_sandbox_specs(id)
|
||||
return sandbox_specs
|
||||
59
openhands/app_server/sandbox/sandbox_spec_service.py
Normal file
59
openhands/app_server/sandbox/sandbox_spec_service.py
Normal file
@ -0,0 +1,59 @@
|
||||
import asyncio
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
from openhands.app_server.errors import SandboxError
|
||||
from openhands.app_server.sandbox.sandbox_spec_models import (
|
||||
SandboxSpecInfo,
|
||||
SandboxSpecInfoPage,
|
||||
)
|
||||
from openhands.app_server.services.injector import Injector
|
||||
from openhands.sdk.utils.models import DiscriminatedUnionMixin
|
||||
|
||||
# The version of the agent server to use for deployments.
|
||||
# Typically this will be the same as the values from the pyproject.toml
|
||||
AGENT_SERVER_VERSION = 'f8ca02c4a3b847bfc50b3c5e579ce126c511fefc'
|
||||
|
||||
|
||||
class SandboxSpecService(ABC):
|
||||
"""Service for managing Sandbox specs.
|
||||
|
||||
At present this is read only. The plan is that later this class will allow building
|
||||
and deleting sandbox specs and limiting access by user and group. It would also be
|
||||
nice to be able to set the desired number of warm sandboxes for a spec and scale
|
||||
this up and down.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
async def search_sandbox_specs(
|
||||
self, page_id: str | None = None, limit: int = 100
|
||||
) -> SandboxSpecInfoPage:
|
||||
"""Search for sandbox specs."""
|
||||
|
||||
@abstractmethod
|
||||
async def get_sandbox_spec(self, sandbox_spec_id: str) -> SandboxSpecInfo | None:
|
||||
"""Get a single sandbox spec, returning None if not found."""
|
||||
|
||||
async def get_default_sandbox_spec(self) -> SandboxSpecInfo:
|
||||
"""Get the default sandbox spec."""
|
||||
page = await self.search_sandbox_specs()
|
||||
if not page.items:
|
||||
raise SandboxError('No sandbox specs available!')
|
||||
return page.items[0]
|
||||
|
||||
async def batch_get_sandbox_specs(
|
||||
self, sandbox_spec_ids: list[str]
|
||||
) -> list[SandboxSpecInfo | None]:
|
||||
"""Get a batch of sandbox specs, returning None for any not found."""
|
||||
results = await asyncio.gather(
|
||||
*[
|
||||
self.get_sandbox_spec(sandbox_spec_id)
|
||||
for sandbox_spec_id in sandbox_spec_ids
|
||||
]
|
||||
)
|
||||
return results
|
||||
|
||||
|
||||
class SandboxSpecServiceInjector(
|
||||
DiscriminatedUnionMixin, Injector[SandboxSpecService], ABC
|
||||
):
|
||||
pass
|
||||
19
openhands/app_server/services/README.md
Normal file
19
openhands/app_server/services/README.md
Normal file
@ -0,0 +1,19 @@
|
||||
# Core Services
|
||||
|
||||
Provides essential services for authentication, security, and system operations.
|
||||
|
||||
## Overview
|
||||
|
||||
This module contains core services that support the OpenHands app server infrastructure, including authentication, token management, and security operations.
|
||||
|
||||
## Key Components
|
||||
|
||||
- **JwtService**: JSON Web Token signing, verification, and encryption
|
||||
|
||||
## JWT Service Features
|
||||
|
||||
- Token signing and verification for authentication
|
||||
- JWE (JSON Web Encryption) support for sensitive data
|
||||
- Multi-key support with key rotation capabilities
|
||||
- Configurable algorithms (RS256, HS256, etc.)
|
||||
- Secure token handling and validation
|
||||
300
openhands/app_server/services/db_session_injector.py
Normal file
300
openhands/app_server/services/db_session_injector.py
Normal file
@ -0,0 +1,300 @@
|
||||
"""Database configuration and session management for OpenHands Server."""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import AsyncGenerator
|
||||
|
||||
from fastapi import Request
|
||||
from pydantic import BaseModel, PrivateAttr, SecretStr, model_validator
|
||||
from sqlalchemy import Engine, create_engine
|
||||
from sqlalchemy.engine import URL
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
|
||||
from sqlalchemy.ext.asyncio.engine import AsyncEngine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from sqlalchemy.pool import NullPool
|
||||
from sqlalchemy.util import await_only
|
||||
|
||||
from openhands.app_server.services.injector import Injector, InjectorState
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
DB_SESSION_ATTR = 'db_session'
|
||||
DB_SESSION_KEEP_OPEN_ATTR = 'db_session_keep_open'
|
||||
|
||||
|
||||
class DbSessionInjector(BaseModel, Injector[async_sessionmaker]):
|
||||
persistence_dir: Path
|
||||
host: str | None = None
|
||||
port: int | None = None
|
||||
name: str | None = None
|
||||
user: str | None = None
|
||||
password: SecretStr | None = None
|
||||
echo: bool = False
|
||||
pool_size: int = 25
|
||||
max_overflow: int = 10
|
||||
gcp_db_instance: str | None = None
|
||||
gcp_project: str | None = None
|
||||
gcp_region: str | None = None
|
||||
|
||||
# Private attrs
|
||||
_engine: Engine | None = PrivateAttr(default=None)
|
||||
_async_engine: AsyncEngine | None = PrivateAttr(default=None)
|
||||
_session_maker: sessionmaker | None = PrivateAttr(default=None)
|
||||
_async_session_maker: async_sessionmaker | None = PrivateAttr(default=None)
|
||||
|
||||
@model_validator(mode='after')
|
||||
def fill_empty_fields(self):
|
||||
"""Override any defaults with values from legacy enviroment variables"""
|
||||
if self.host is None:
|
||||
self.host = os.getenv('DB_HOST')
|
||||
if self.port is None:
|
||||
self.port = int(os.getenv('DB_PORT', '5432'))
|
||||
if self.name is None:
|
||||
self.name = os.getenv('DB_NAME', 'openhands')
|
||||
if self.user is None:
|
||||
self.user = os.getenv('DB_USER', 'postgres')
|
||||
if self.password is None:
|
||||
self.password = SecretStr(os.getenv('DB_PASS', 'postgres'))
|
||||
if self.gcp_db_instance is None:
|
||||
self.gcp_db_instance = os.getenv('GCP_DB_INSTANCE')
|
||||
if self.gcp_project is None:
|
||||
self.gcp_project = os.getenv('GCP_PROJECT')
|
||||
if self.gcp_region is None:
|
||||
self.gcp_region = os.getenv('GCP_REGION')
|
||||
return self
|
||||
|
||||
def _create_gcp_db_connection(self):
|
||||
# Lazy import because lib does not import if user does not have posgres installed
|
||||
from google.cloud.sql.connector import Connector
|
||||
|
||||
connector = Connector()
|
||||
instance_string = f'{self.gcp_project}:{self.gcp_region}:{self.gcp_db_instance}'
|
||||
password = self.password
|
||||
assert password is not None
|
||||
return connector.connect(
|
||||
instance_string,
|
||||
'pg8000',
|
||||
user=self.user,
|
||||
password=password.get_secret_value(),
|
||||
db=self.name,
|
||||
)
|
||||
|
||||
async def _create_async_gcp_db_connection(self):
|
||||
# Lazy import because lib does not import if user does not have posgres installed
|
||||
from google.cloud.sql.connector import Connector
|
||||
|
||||
loop = asyncio.get_running_loop()
|
||||
async with Connector(loop=loop) as connector:
|
||||
password = self.password
|
||||
assert password is not None
|
||||
conn = await connector.connect_async(
|
||||
f'{self.gcp_project}:{self.gcp_region}:{self.gcp_db_instance}',
|
||||
'asyncpg',
|
||||
user=self.user,
|
||||
password=password.get_secret_value(),
|
||||
db=self.name,
|
||||
)
|
||||
return conn
|
||||
|
||||
def _create_gcp_engine(self):
|
||||
engine = create_engine(
|
||||
'postgresql+pg8000://',
|
||||
creator=self._create_gcp_db_connection,
|
||||
pool_size=self.pool_size,
|
||||
max_overflow=self.max_overflow,
|
||||
pool_pre_ping=True,
|
||||
)
|
||||
return engine
|
||||
|
||||
async def _create_async_gcp_creator(self):
|
||||
from sqlalchemy.dialects.postgresql.asyncpg import (
|
||||
AsyncAdapt_asyncpg_connection,
|
||||
)
|
||||
|
||||
engine = self._create_gcp_engine()
|
||||
|
||||
return AsyncAdapt_asyncpg_connection(
|
||||
engine.dialect.dbapi,
|
||||
await self._create_async_gcp_db_connection(),
|
||||
prepared_statement_cache_size=100,
|
||||
)
|
||||
|
||||
async def _create_async_gcp_engine(self):
|
||||
from sqlalchemy.dialects.postgresql.asyncpg import (
|
||||
AsyncAdapt_asyncpg_connection,
|
||||
)
|
||||
|
||||
base_engine = self._create_gcp_engine()
|
||||
dbapi = base_engine.dialect.dbapi
|
||||
|
||||
def adapted_creator():
|
||||
return AsyncAdapt_asyncpg_connection(
|
||||
dbapi,
|
||||
await_only(self._create_async_gcp_db_connection()),
|
||||
prepared_statement_cache_size=100,
|
||||
)
|
||||
|
||||
return create_async_engine(
|
||||
'postgresql+asyncpg://',
|
||||
creator=adapted_creator,
|
||||
pool_size=self.pool_size,
|
||||
max_overflow=self.max_overflow,
|
||||
pool_pre_ping=True,
|
||||
)
|
||||
|
||||
async def get_async_db_engine(self) -> AsyncEngine:
|
||||
async_engine = self._async_engine
|
||||
if async_engine:
|
||||
return async_engine
|
||||
if self.gcp_db_instance: # GCP environments
|
||||
async_engine = await self._create_async_gcp_engine()
|
||||
else:
|
||||
if self.host:
|
||||
try:
|
||||
import asyncpg # noqa: F401
|
||||
except Exception as e:
|
||||
raise RuntimeError(
|
||||
"PostgreSQL driver 'asyncpg' is required for async connections but is not installed."
|
||||
) from e
|
||||
password = self.password.get_secret_value() if self.password else None
|
||||
url = URL.create(
|
||||
'postgresql+asyncpg',
|
||||
username=self.user or '',
|
||||
password=password,
|
||||
host=self.host,
|
||||
port=self.port,
|
||||
database=self.name,
|
||||
)
|
||||
else:
|
||||
url = f'sqlite+aiosqlite:///{str(self.persistence_dir)}/openhands.db'
|
||||
|
||||
if self.host:
|
||||
async_engine = create_async_engine(
|
||||
url,
|
||||
pool_size=self.pool_size,
|
||||
max_overflow=self.max_overflow,
|
||||
pool_pre_ping=True,
|
||||
)
|
||||
else:
|
||||
async_engine = create_async_engine(
|
||||
url,
|
||||
poolclass=NullPool,
|
||||
pool_pre_ping=True,
|
||||
)
|
||||
self._async_engine = async_engine
|
||||
return async_engine
|
||||
|
||||
def get_db_engine(self) -> Engine:
|
||||
engine = self._engine
|
||||
if engine:
|
||||
return engine
|
||||
if self.gcp_db_instance: # GCP environments
|
||||
engine = self._create_gcp_engine()
|
||||
else:
|
||||
if self.host:
|
||||
try:
|
||||
import pg8000 # noqa: F401
|
||||
except Exception as e:
|
||||
raise RuntimeError(
|
||||
"PostgreSQL driver 'pg8000' is required for sync connections but is not installed."
|
||||
) from e
|
||||
password = self.password.get_secret_value() if self.password else None
|
||||
url = URL.create(
|
||||
'postgresql+pg8000',
|
||||
username=self.user or '',
|
||||
password=password,
|
||||
host=self.host,
|
||||
port=self.port,
|
||||
database=self.name,
|
||||
)
|
||||
else:
|
||||
url = f'sqlite:///{self.persistence_dir}/openhands.db'
|
||||
engine = create_engine(
|
||||
url,
|
||||
pool_size=self.pool_size,
|
||||
max_overflow=self.max_overflow,
|
||||
pool_pre_ping=True,
|
||||
)
|
||||
self._engine = engine
|
||||
return engine
|
||||
|
||||
def get_session_maker(self) -> sessionmaker:
|
||||
session_maker = self._session_maker
|
||||
if session_maker is None:
|
||||
session_maker = sessionmaker(bind=self.get_db_engine())
|
||||
self._session_maker = session_maker
|
||||
return session_maker
|
||||
|
||||
async def get_async_session_maker(self) -> async_sessionmaker:
|
||||
async_session_maker = self._async_session_maker
|
||||
if async_session_maker is None:
|
||||
db_engine = await self.get_async_db_engine()
|
||||
async_session_maker = async_sessionmaker(
|
||||
db_engine,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False,
|
||||
)
|
||||
self._async_session_maker = async_session_maker
|
||||
return async_session_maker
|
||||
|
||||
async def async_session(self) -> AsyncGenerator[AsyncSession, None]:
|
||||
"""Dependency function that yields database sessions.
|
||||
|
||||
This function creates a new database session for each request
|
||||
and ensures it's properly closed after use.
|
||||
|
||||
Yields:
|
||||
AsyncSession: An async SQL session
|
||||
"""
|
||||
session_maker = await self.get_async_session_maker()
|
||||
async with session_maker() as session:
|
||||
try:
|
||||
yield session
|
||||
finally:
|
||||
await session.close()
|
||||
|
||||
async def inject(
|
||||
self, state: InjectorState, request: Request | None = None
|
||||
) -> AsyncGenerator[AsyncSession, None]:
|
||||
"""Dependency function that manages database sessions through request state.
|
||||
|
||||
This function stores the database session in the request state to enable
|
||||
session reuse across multiple dependencies within the same request.
|
||||
If a session already exists in the request state, it returns that session.
|
||||
Otherwise, it creates a new session and stores it in the request state.
|
||||
|
||||
Args:
|
||||
request: The FastAPI request object
|
||||
|
||||
Yields:
|
||||
AsyncSession: An async SQL session stored in request state
|
||||
"""
|
||||
db_session = getattr(state, DB_SESSION_ATTR, None)
|
||||
if db_session:
|
||||
yield db_session
|
||||
else:
|
||||
# Create a new session and store it in request state
|
||||
session_maker = await self.get_async_session_maker()
|
||||
db_session = session_maker()
|
||||
try:
|
||||
setattr(state, DB_SESSION_ATTR, db_session)
|
||||
yield db_session
|
||||
if not getattr(state, DB_SESSION_KEEP_OPEN_ATTR, False):
|
||||
await db_session.commit()
|
||||
except Exception:
|
||||
_logger.exception('Rolling back SQL due to error', stack_info=True)
|
||||
await db_session.rollback()
|
||||
raise
|
||||
finally:
|
||||
# If instructed, do not close the db session at the end of the request.
|
||||
if not getattr(state, DB_SESSION_KEEP_OPEN_ATTR, False):
|
||||
# Clean up the session from request state
|
||||
if hasattr(state, DB_SESSION_ATTR):
|
||||
delattr(state, DB_SESSION_ATTR)
|
||||
await db_session.close()
|
||||
|
||||
|
||||
def set_db_session_keep_open(state: InjectorState, keep_open: bool):
|
||||
"""Set whether the connection should be kept open after the request terminates."""
|
||||
setattr(state, DB_SESSION_KEEP_OPEN_ATTR, keep_open)
|
||||
42
openhands/app_server/services/httpx_client_injector.py
Normal file
42
openhands/app_server/services/httpx_client_injector.py
Normal file
@ -0,0 +1,42 @@
|
||||
from typing import AsyncGenerator
|
||||
|
||||
import httpx
|
||||
from fastapi import Request
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from openhands.app_server.services.injector import Injector, InjectorState
|
||||
|
||||
HTTPX_CLIENT_ATTR = 'httpx_client'
|
||||
HTTPX_CLIENT_KEEP_OPEN_ATTR = 'httpx_client_keep_open'
|
||||
|
||||
|
||||
class HttpxClientInjector(BaseModel, Injector[httpx.AsyncClient]):
|
||||
"""Injector for a httpx client. By keeping a single httpx client alive in the
|
||||
context of server requests handshakes are minimized while connection pool leaks
|
||||
are prevented."""
|
||||
|
||||
timeout: int = Field(default=15, description='Default timeout on all http requests')
|
||||
|
||||
async def inject(
|
||||
self, state: InjectorState, request: Request | None = None
|
||||
) -> AsyncGenerator[httpx.AsyncClient, None]:
|
||||
httpx_client = getattr(state, HTTPX_CLIENT_ATTR, None)
|
||||
if httpx_client:
|
||||
yield httpx_client
|
||||
return
|
||||
httpx_client = httpx.AsyncClient(timeout=self.timeout)
|
||||
try:
|
||||
setattr(state, HTTPX_CLIENT_ATTR, httpx_client)
|
||||
yield httpx_client
|
||||
finally:
|
||||
# If instructed, do not close the httpx client at the end of the request.
|
||||
if not getattr(state, HTTPX_CLIENT_KEEP_OPEN_ATTR, False):
|
||||
# Clean up the httpx client from request state
|
||||
if hasattr(state, HTTPX_CLIENT_ATTR):
|
||||
delattr(state, HTTPX_CLIENT_ATTR)
|
||||
await httpx_client.aclose()
|
||||
|
||||
|
||||
def set_httpx_client_keep_open(state: InjectorState, keep_open: bool):
|
||||
"""Set whether the connection should be kept open after the request terminates."""
|
||||
setattr(state, HTTPX_CLIENT_KEEP_OPEN_ATTR, keep_open)
|
||||
34
openhands/app_server/services/injector.py
Normal file
34
openhands/app_server/services/injector.py
Normal file
@ -0,0 +1,34 @@
|
||||
import contextlib
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import AsyncGenerator, Generic, TypeAlias, TypeVar
|
||||
|
||||
from fastapi import Request
|
||||
from starlette.datastructures import State
|
||||
|
||||
T = TypeVar('T')
|
||||
InjectorState: TypeAlias = State
|
||||
|
||||
|
||||
class Injector(Generic[T], ABC):
|
||||
"""Object designed to facilitate dependency injection"""
|
||||
|
||||
@abstractmethod
|
||||
async def inject(
|
||||
self, state: InjectorState, request: Request | None = None
|
||||
) -> AsyncGenerator[T, None]:
|
||||
"""Inject an object. The state object may be used to store variables for
|
||||
reuse by other injectors, as injection operations may be nested."""
|
||||
yield None # type: ignore
|
||||
|
||||
@contextlib.asynccontextmanager
|
||||
async def context(
|
||||
self, state: InjectorState, request: Request | None = None
|
||||
) -> AsyncGenerator[T, None]:
|
||||
"""Context function suitable for use in async with clauses"""
|
||||
async for result in self.inject(state, request):
|
||||
yield result
|
||||
|
||||
async def depends(self, request: Request) -> AsyncGenerator[T, None]:
|
||||
"""Depends function suitable for use with FastAPI dependency injection."""
|
||||
async for result in self.inject(request.state, request):
|
||||
yield result
|
||||
248
openhands/app_server/services/jwt_service.py
Normal file
248
openhands/app_server/services/jwt_service.py
Normal file
@ -0,0 +1,248 @@
|
||||
import hashlib
|
||||
import json
|
||||
from datetime import timedelta
|
||||
from pathlib import Path
|
||||
from typing import Any, AsyncGenerator
|
||||
|
||||
import jwt
|
||||
from fastapi import Request
|
||||
from jose import jwe
|
||||
from jose.constants import ALGORITHMS
|
||||
from pydantic import BaseModel, PrivateAttr
|
||||
|
||||
from openhands.agent_server.utils import utc_now
|
||||
from openhands.app_server.services.injector import Injector, InjectorState
|
||||
from openhands.app_server.utils.encryption_key import (
|
||||
EncryptionKey,
|
||||
get_default_encryption_keys,
|
||||
)
|
||||
|
||||
|
||||
class JwtService:
|
||||
"""Service for signing/verifying JWS tokens and encrypting/decrypting JWE tokens."""
|
||||
|
||||
def __init__(self, keys: list[EncryptionKey]):
|
||||
"""Initialize the JWT service with a list of keys.
|
||||
|
||||
Args:
|
||||
keys: List of EncryptionKey objects. If None, will try to load from config.
|
||||
|
||||
Raises:
|
||||
ValueError: If no keys are provided and config is not available
|
||||
"""
|
||||
active_keys = [key for key in keys if key.active]
|
||||
if not active_keys:
|
||||
raise ValueError('At least one active key is required')
|
||||
|
||||
# Store keys by ID for quick lookup
|
||||
self._keys = {key.id: key for key in keys}
|
||||
|
||||
# Find the newest key as default
|
||||
newest_key = max(active_keys, key=lambda k: k.created_at)
|
||||
self._default_key_id = newest_key.id
|
||||
|
||||
@property
|
||||
def default_key_id(self) -> str:
|
||||
"""Get the default key ID."""
|
||||
return self._default_key_id
|
||||
|
||||
def create_jws_token(
|
||||
self,
|
||||
payload: dict[str, Any],
|
||||
key_id: str | None = None,
|
||||
expires_in: timedelta | None = None,
|
||||
) -> str:
|
||||
"""Create a JWS (JSON Web Signature) token.
|
||||
|
||||
Args:
|
||||
payload: The JWT payload
|
||||
key_id: The key ID to use for signing. If None, uses the newest key.
|
||||
expires_in: Token expiration time. If None, defaults to 1 hour.
|
||||
|
||||
Returns:
|
||||
The signed JWS token
|
||||
|
||||
Raises:
|
||||
ValueError: If key_id is invalid
|
||||
"""
|
||||
if key_id is None:
|
||||
key_id = self._default_key_id
|
||||
|
||||
if key_id not in self._keys:
|
||||
raise ValueError(f"Key ID '{key_id}' not found")
|
||||
|
||||
# Add standard JWT claims
|
||||
now = utc_now()
|
||||
if expires_in is None:
|
||||
expires_in = timedelta(hours=1)
|
||||
|
||||
jwt_payload = {
|
||||
**payload,
|
||||
'iat': int(now.timestamp()),
|
||||
'exp': int((now + expires_in).timestamp()),
|
||||
}
|
||||
|
||||
# Use the raw key for JWT signing with key_id in header
|
||||
secret_key = self._keys[key_id].key.get_secret_value()
|
||||
|
||||
return jwt.encode(
|
||||
jwt_payload, secret_key, algorithm='HS256', headers={'kid': key_id}
|
||||
)
|
||||
|
||||
def verify_jws_token(self, token: str, key_id: str | None = None) -> dict[str, Any]:
|
||||
"""Verify and decode a JWS token.
|
||||
|
||||
Args:
|
||||
token: The JWS token to verify
|
||||
key_id: The key ID to use for verification. If None, extracts from
|
||||
token's kid header.
|
||||
|
||||
Returns:
|
||||
The decoded JWT payload
|
||||
|
||||
Raises:
|
||||
ValueError: If token is invalid or key_id is not found
|
||||
jwt.InvalidTokenError: If token verification fails
|
||||
"""
|
||||
if key_id is None:
|
||||
# Try to extract key_id from the token's kid header
|
||||
try:
|
||||
unverified_header = jwt.get_unverified_header(token)
|
||||
key_id = unverified_header.get('kid')
|
||||
if not key_id:
|
||||
raise ValueError("Token does not contain 'kid' header with key ID")
|
||||
except jwt.DecodeError:
|
||||
raise ValueError('Invalid JWT token format')
|
||||
|
||||
if key_id not in self._keys:
|
||||
raise ValueError(f"Key ID '{key_id}' not found")
|
||||
|
||||
# Use the raw key for JWT verification
|
||||
secret_key = self._keys[key_id].key.get_secret_value()
|
||||
|
||||
try:
|
||||
payload = jwt.decode(token, secret_key, algorithms=['HS256'])
|
||||
return payload
|
||||
except jwt.InvalidTokenError as e:
|
||||
raise jwt.InvalidTokenError(f'Token verification failed: {str(e)}')
|
||||
|
||||
def create_jwe_token(
|
||||
self,
|
||||
payload: dict[str, Any],
|
||||
key_id: str | None = None,
|
||||
expires_in: timedelta | None = None,
|
||||
) -> str:
|
||||
"""Create a JWE (JSON Web Encryption) token.
|
||||
|
||||
Args:
|
||||
payload: The JWT payload to encrypt
|
||||
key_id: The key ID to use for encryption. If None, uses the newest key.
|
||||
expires_in: Token expiration time. If None, defaults to 1 hour.
|
||||
|
||||
Returns:
|
||||
The encrypted JWE token
|
||||
|
||||
Raises:
|
||||
ValueError: If key_id is invalid
|
||||
"""
|
||||
if key_id is None:
|
||||
key_id = self._default_key_id
|
||||
|
||||
if key_id not in self._keys:
|
||||
raise ValueError(f"Key ID '{key_id}' not found")
|
||||
|
||||
# Add standard JWT claims
|
||||
now = utc_now()
|
||||
if expires_in is None:
|
||||
expires_in = timedelta(hours=1)
|
||||
|
||||
jwt_payload = {
|
||||
**payload,
|
||||
'iat': int(now.timestamp()),
|
||||
'exp': int((now + expires_in).timestamp()),
|
||||
}
|
||||
|
||||
# Get the raw key for JWE encryption and derive a 256-bit key
|
||||
secret_key = self._keys[key_id].key.get_secret_value()
|
||||
key_bytes = secret_key.encode() if isinstance(secret_key, str) else secret_key
|
||||
# Derive a 256-bit key using SHA256
|
||||
key_256 = hashlib.sha256(key_bytes).digest()
|
||||
|
||||
# Encrypt the payload (convert to JSON string first)
|
||||
payload_json = json.dumps(jwt_payload)
|
||||
encrypted_token = jwe.encrypt(
|
||||
payload_json,
|
||||
key_256,
|
||||
algorithm=ALGORITHMS.DIR,
|
||||
encryption=ALGORITHMS.A256GCM,
|
||||
kid=key_id,
|
||||
)
|
||||
# Ensure we return a string
|
||||
return (
|
||||
encrypted_token.decode('utf-8')
|
||||
if isinstance(encrypted_token, bytes)
|
||||
else encrypted_token
|
||||
)
|
||||
|
||||
def decrypt_jwe_token(
|
||||
self, token: str, key_id: str | None = None
|
||||
) -> dict[str, Any]:
|
||||
"""Decrypt and decode a JWE token.
|
||||
|
||||
Args:
|
||||
token: The JWE token to decrypt
|
||||
key_id: The key ID to use for decryption. If None, extracts
|
||||
from token header.
|
||||
|
||||
Returns:
|
||||
The decrypted JWT payload
|
||||
|
||||
Raises:
|
||||
ValueError: If token is invalid or key_id is not found
|
||||
Exception: If token decryption fails
|
||||
"""
|
||||
if key_id is None:
|
||||
# Try to extract key_id from the token's header
|
||||
try:
|
||||
header = jwe.get_unverified_header(token)
|
||||
key_id = header.get('kid')
|
||||
if not key_id:
|
||||
raise ValueError("Token does not contain 'kid' header with key ID")
|
||||
except Exception:
|
||||
raise ValueError('Invalid JWE token format')
|
||||
|
||||
if key_id not in self._keys:
|
||||
raise ValueError(f"Key ID '{key_id}' not found")
|
||||
|
||||
# Get the raw key for JWE decryption and derive a 256-bit key
|
||||
secret_key = self._keys[key_id].key.get_secret_value()
|
||||
key_bytes = secret_key.encode() if isinstance(secret_key, str) else secret_key
|
||||
# Derive a 256-bit key using SHA256
|
||||
key_256 = hashlib.sha256(key_bytes).digest()
|
||||
|
||||
try:
|
||||
payload_json = jwe.decrypt(token, key_256)
|
||||
assert payload_json is not None
|
||||
# Parse the JSON string back to dictionary
|
||||
payload = json.loads(payload_json)
|
||||
return payload
|
||||
except Exception as e:
|
||||
raise Exception(f'Token decryption failed: {str(e)}')
|
||||
|
||||
|
||||
class JwtServiceInjector(BaseModel, Injector[JwtService]):
|
||||
persistence_dir: Path
|
||||
_jwt_service: JwtService | None = PrivateAttr(default=None)
|
||||
|
||||
def get_jwt_service(self) -> JwtService:
|
||||
jwt_service = self._jwt_service
|
||||
if jwt_service is None:
|
||||
keys = get_default_encryption_keys(self.persistence_dir)
|
||||
jwt_service = JwtService(keys=keys)
|
||||
self._jwt_service = jwt_service
|
||||
return jwt_service
|
||||
|
||||
async def inject(
|
||||
self, state: InjectorState, request: Request | None = None
|
||||
) -> AsyncGenerator[JwtService, None]:
|
||||
yield self.get_jwt_service()
|
||||
21
openhands/app_server/user/README.md
Normal file
21
openhands/app_server/user/README.md
Normal file
@ -0,0 +1,21 @@
|
||||
# User Management
|
||||
|
||||
Handles user authentication, authorization, and profile management for the OpenHands app server.
|
||||
|
||||
## Overview
|
||||
|
||||
This module provides user management capabilities, including authentication, user profile access, and service resolution for user-scoped operations.
|
||||
|
||||
## Key Components
|
||||
|
||||
- **UserContext**: Abstract context for user operations
|
||||
- **AuthUserContext**: Compatibility layer for user auth.
|
||||
- **UserRouter**: FastAPI router for user-related endpoints
|
||||
- **UserContextInjector**: Factory for getting user context with FastAPI dependency injection
|
||||
|
||||
## Features
|
||||
|
||||
- User authentication and session management
|
||||
- Current user profile retrieval
|
||||
- User-scoped service resolution
|
||||
- JWT-based authentication integration
|
||||
99
openhands/app_server/user/auth_user_context.py
Normal file
99
openhands/app_server/user/auth_user_context.py
Normal file
@ -0,0 +1,99 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, AsyncGenerator
|
||||
|
||||
from fastapi import Request
|
||||
from pydantic import PrivateAttr
|
||||
|
||||
from openhands.app_server.errors import AuthError
|
||||
from openhands.app_server.services.injector import InjectorState
|
||||
from openhands.app_server.user.specifiy_user_context import USER_CONTEXT_ATTR
|
||||
from openhands.app_server.user.user_context import UserContext, UserContextInjector
|
||||
from openhands.app_server.user.user_models import UserInfo
|
||||
from openhands.integrations.provider import ProviderHandler, ProviderType
|
||||
from openhands.sdk.conversation.secret_source import SecretSource, StaticSecret
|
||||
from openhands.server.user_auth.user_auth import UserAuth, get_user_auth
|
||||
|
||||
USER_AUTH_ATTR = 'user_auth'
|
||||
|
||||
|
||||
@dataclass
|
||||
class AuthUserContext(UserContext):
|
||||
"""Interface to old user settings service. Eventually we want to migrate
|
||||
this to use true database asyncio."""
|
||||
|
||||
user_auth: UserAuth
|
||||
_user_info: UserInfo | None = None
|
||||
_provider_handler: ProviderHandler | None = None
|
||||
|
||||
async def get_user_id(self) -> str | None:
|
||||
# If you have an auth object here you are logged in. If user_id is None
|
||||
# it means we are in OSS mode.
|
||||
user_id = await self.user_auth.get_user_id()
|
||||
return user_id
|
||||
|
||||
async def get_user_info(self) -> UserInfo:
|
||||
user_info = self._user_info
|
||||
if user_info is None:
|
||||
user_id = await self.get_user_id()
|
||||
settings = await self.user_auth.get_user_settings()
|
||||
assert settings is not None
|
||||
user_info = UserInfo(
|
||||
id=user_id,
|
||||
**settings.model_dump(context={'expose_secrets': True}),
|
||||
)
|
||||
self._user_info = user_info
|
||||
return user_info
|
||||
|
||||
async def get_provider_handler(self):
|
||||
provider_handler = self._provider_handler
|
||||
if not provider_handler:
|
||||
provider_tokens = await self.user_auth.get_provider_tokens()
|
||||
assert provider_tokens is not None
|
||||
user_id = await self.get_user_id()
|
||||
provider_handler = ProviderHandler(
|
||||
provider_tokens=provider_tokens, external_auth_id=user_id
|
||||
)
|
||||
self._provider_handler = provider_handler
|
||||
return provider_handler
|
||||
|
||||
async def get_authenticated_git_url(self, repository: str) -> str:
|
||||
provider_handler = await self.get_provider_handler()
|
||||
url = await provider_handler.get_authenticated_git_url(repository)
|
||||
return url
|
||||
|
||||
async def get_latest_token(self, provider_type: ProviderType) -> str | None:
|
||||
provider_handler = await self.get_provider_handler()
|
||||
service = provider_handler.get_service(provider_type)
|
||||
token = await service.get_latest_token()
|
||||
return token
|
||||
|
||||
async def get_secrets(self) -> dict[str, SecretSource]:
|
||||
results = {}
|
||||
|
||||
# Include custom secrets...
|
||||
secrets = await self.user_auth.get_user_secrets()
|
||||
if secrets:
|
||||
for name, custom_secret in secrets.custom_secrets.items():
|
||||
results[name] = StaticSecret(value=custom_secret.secret)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
USER_ID_ATTR = 'user_id'
|
||||
|
||||
|
||||
class AuthUserContextInjector(UserContextInjector):
|
||||
_user_auth_class: Any = PrivateAttr(default=None)
|
||||
|
||||
async def inject(
|
||||
self, state: InjectorState, request: Request | None = None
|
||||
) -> AsyncGenerator[UserContext, None]:
|
||||
user_context = getattr(state, USER_CONTEXT_ATTR, None)
|
||||
if user_context is None:
|
||||
if request is None:
|
||||
raise AuthError()
|
||||
user_auth = await get_user_auth(request)
|
||||
user_context = AuthUserContext(user_auth=user_auth)
|
||||
setattr(state, USER_CONTEXT_ATTR, user_context)
|
||||
|
||||
yield user_context
|
||||
48
openhands/app_server/user/specifiy_user_context.py
Normal file
48
openhands/app_server/user/specifiy_user_context.py
Normal file
@ -0,0 +1,48 @@
|
||||
from dataclasses import dataclass
|
||||
|
||||
from fastapi import Request
|
||||
|
||||
from openhands.app_server.errors import OpenHandsError
|
||||
from openhands.app_server.user.user_context import UserContext
|
||||
from openhands.app_server.user.user_models import UserInfo
|
||||
from openhands.integrations.provider import ProviderType
|
||||
from openhands.sdk.conversation.secret_source import SecretSource
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class SpecifyUserContext(UserContext):
|
||||
"""User context for use in admin operations which allows access beyond the scope of a single user"""
|
||||
|
||||
user_id: str | None
|
||||
|
||||
async def get_user_id(self) -> str | None:
|
||||
return self.user_id
|
||||
|
||||
async def get_user_info(self) -> UserInfo:
|
||||
raise NotImplementedError()
|
||||
|
||||
async def get_authenticated_git_url(self, repository: str) -> str:
|
||||
raise NotImplementedError()
|
||||
|
||||
async def get_latest_token(self, provider_type: ProviderType) -> str | None:
|
||||
raise NotImplementedError()
|
||||
|
||||
async def get_secrets(self) -> dict[str, SecretSource]:
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
USER_CONTEXT_ATTR = 'user_context'
|
||||
ADMIN = SpecifyUserContext(user_id=None)
|
||||
|
||||
|
||||
def as_admin(request: Request):
|
||||
"""Service the request as an admin user without restrictions. The endpoint should
|
||||
handle security."""
|
||||
user_context = getattr(request.state, USER_CONTEXT_ATTR, None)
|
||||
if user_context not in (None, ADMIN):
|
||||
raise OpenHandsError(
|
||||
'Non admin context already present! '
|
||||
'(Do you need to move the as_admin dependency to the start of the args?)'
|
||||
)
|
||||
setattr(request.state, USER_CONTEXT_ATTR, ADMIN)
|
||||
return ADMIN
|
||||
41
openhands/app_server/user/user_context.py
Normal file
41
openhands/app_server/user/user_context.py
Normal file
@ -0,0 +1,41 @@
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
from openhands.app_server.services.injector import Injector
|
||||
from openhands.app_server.user.user_models import (
|
||||
UserInfo,
|
||||
)
|
||||
from openhands.integrations.provider import ProviderType
|
||||
from openhands.sdk.conversation.secret_source import SecretSource
|
||||
from openhands.sdk.utils.models import DiscriminatedUnionMixin
|
||||
|
||||
|
||||
class UserContext(ABC):
|
||||
"""Service for managing users."""
|
||||
|
||||
# Read methods
|
||||
|
||||
@abstractmethod
|
||||
async def get_user_id(self) -> str | None:
|
||||
"""Get the user id"""
|
||||
|
||||
@abstractmethod
|
||||
async def get_user_info(self) -> UserInfo:
|
||||
"""Get the user info."""
|
||||
|
||||
@abstractmethod
|
||||
async def get_authenticated_git_url(self, repository: str) -> str:
|
||||
"""Get the provider tokens for the user"""
|
||||
|
||||
@abstractmethod
|
||||
async def get_latest_token(self, provider_type: ProviderType) -> str | None:
|
||||
"""Get the latest token for the provider type given"""
|
||||
|
||||
@abstractmethod
|
||||
async def get_secrets(self) -> dict[str, SecretSource]:
|
||||
"""Get custom secrets and github provider secrets for the conversation."""
|
||||
|
||||
|
||||
class UserContextInjector(DiscriminatedUnionMixin, Injector[UserContext], ABC):
|
||||
"""Injector for user contexts."""
|
||||
|
||||
pass
|
||||
13
openhands/app_server/user/user_models.py
Normal file
13
openhands/app_server/user/user_models.py
Normal file
@ -0,0 +1,13 @@
|
||||
from openhands.integrations.provider import PROVIDER_TOKEN_TYPE
|
||||
from openhands.storage.data_models.settings import Settings
|
||||
|
||||
|
||||
class UserInfo(Settings):
|
||||
"""Model for user settings including the current user id."""
|
||||
|
||||
id: str | None = None
|
||||
|
||||
|
||||
class ProviderTokenPage:
|
||||
items: list[PROVIDER_TOKEN_TYPE]
|
||||
next_page_id: str | None = None
|
||||
23
openhands/app_server/user/user_router.py
Normal file
23
openhands/app_server/user/user_router.py
Normal file
@ -0,0 +1,23 @@
|
||||
"""User router for OpenHands Server. For the moment, this simply implements the /me endpoint."""
|
||||
|
||||
from fastapi import APIRouter, HTTPException, status
|
||||
|
||||
from openhands.app_server.config import depends_user_context
|
||||
from openhands.app_server.user.user_context import UserContext
|
||||
from openhands.app_server.user.user_models import UserInfo
|
||||
|
||||
router = APIRouter(prefix='/users', tags=['User'])
|
||||
user_dependency = depends_user_context()
|
||||
|
||||
# Read methods
|
||||
|
||||
|
||||
@router.get('/me')
|
||||
async def get_current_user(
|
||||
user_context: UserContext = user_dependency,
|
||||
) -> UserInfo:
|
||||
"""Get the current authenticated user."""
|
||||
user = await user_context.get_user_info()
|
||||
if user is None:
|
||||
raise HTTPException(status.HTTP_401_UNAUTHORIZED, detail='Not authenticated')
|
||||
return user
|
||||
20
openhands/app_server/utils/README.md
Normal file
20
openhands/app_server/utils/README.md
Normal file
@ -0,0 +1,20 @@
|
||||
# Utilities
|
||||
|
||||
Common utility functions and helpers for the OpenHands app server.
|
||||
|
||||
## Overview
|
||||
|
||||
This module provides utility functions that are used across the app server for common operations like date handling, SQL operations, and dynamic imports.
|
||||
|
||||
## Key Components
|
||||
|
||||
- **date_utils**: Date and time utilities
|
||||
- **sql_utils**: SQL database operation helpers
|
||||
- **import_utils**: Dynamic module import utilities
|
||||
|
||||
## Key Functions
|
||||
|
||||
- **utc_now()**: Returns current UTC timestamp (replaces deprecated datetime.utcnow)
|
||||
- Database connection and query helpers
|
||||
- Dynamic module loading utilities
|
||||
- Safe import error handling
|
||||
256
openhands/app_server/utils/async_remote_workspace.py
Normal file
256
openhands/app_server/utils/async_remote_workspace.py
Normal file
@ -0,0 +1,256 @@
|
||||
import logging
|
||||
import time
|
||||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
|
||||
import httpx
|
||||
|
||||
from openhands.sdk.workspace.models import CommandResult, FileOperationResult
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class AsyncRemoteWorkspace:
|
||||
"""Mixin providing remote workspace operations."""
|
||||
|
||||
working_dir: str
|
||||
server_url: str
|
||||
session_api_key: str | None = None
|
||||
client: httpx.AsyncClient = field(default_factory=httpx.AsyncClient)
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
# Set up remote host and API key
|
||||
self.server_url = self.server_url.rstrip('/')
|
||||
|
||||
def _headers(self):
|
||||
headers = {}
|
||||
if self.session_api_key:
|
||||
headers['X-Session-API-Key'] = self.session_api_key
|
||||
return headers
|
||||
|
||||
async def execute_command(
|
||||
self,
|
||||
command: str,
|
||||
cwd: str | Path | None = None,
|
||||
timeout: float = 30.0,
|
||||
) -> CommandResult:
|
||||
"""Execute a bash command on the remote system.
|
||||
|
||||
This method starts a bash command via the remote agent server API,
|
||||
then polls for the output until the command completes.
|
||||
|
||||
Args:
|
||||
command: The bash command to execute
|
||||
cwd: Working directory (optional)
|
||||
timeout: Timeout in seconds
|
||||
|
||||
Returns:
|
||||
CommandResult: Result with stdout, stderr, exit_code, and other metadata
|
||||
"""
|
||||
_logger.debug(f'Executing remote command: {command}')
|
||||
|
||||
# Step 1: Start the bash command
|
||||
payload = {
|
||||
'command': command,
|
||||
'timeout': int(timeout),
|
||||
}
|
||||
if cwd is not None:
|
||||
payload['cwd'] = str(cwd)
|
||||
|
||||
try:
|
||||
# Start the command
|
||||
response = await self.client.post(
|
||||
f'{self.server_url}/api/bash/execute_bash_command',
|
||||
json=payload,
|
||||
timeout=timeout + 5.0, # Add buffer to HTTP timeout
|
||||
headers=self._headers(),
|
||||
)
|
||||
response.raise_for_status()
|
||||
bash_command = response.json()
|
||||
command_id = bash_command['id']
|
||||
|
||||
_logger.debug(f'Started command with ID: {command_id}')
|
||||
|
||||
# Step 2: Poll for output until command completes
|
||||
start_time = time.time()
|
||||
stdout_parts = []
|
||||
stderr_parts = []
|
||||
exit_code = None
|
||||
|
||||
while time.time() - start_time < timeout:
|
||||
# Search for all events and filter client-side
|
||||
# (workaround for bash service filtering bug)
|
||||
search_response = await self.client.get(
|
||||
f'{self.server_url}/api/bash/bash_events/search',
|
||||
params={
|
||||
'sort_order': 'TIMESTAMP',
|
||||
'limit': 100,
|
||||
},
|
||||
timeout=10.0,
|
||||
headers=self._headers(),
|
||||
)
|
||||
search_response.raise_for_status()
|
||||
search_result = search_response.json()
|
||||
|
||||
# Filter for BashOutput events for this command
|
||||
for event in search_result.get('items', []):
|
||||
if (
|
||||
event.get('kind') == 'BashOutput'
|
||||
and event.get('command_id') == command_id
|
||||
):
|
||||
if event.get('stdout'):
|
||||
stdout_parts.append(event['stdout'])
|
||||
if event.get('stderr'):
|
||||
stderr_parts.append(event['stderr'])
|
||||
if event.get('exit_code') is not None:
|
||||
exit_code = event['exit_code']
|
||||
|
||||
# If we have an exit code, the command is complete
|
||||
if exit_code is not None:
|
||||
break
|
||||
|
||||
# Wait a bit before polling again
|
||||
time.sleep(0.1)
|
||||
|
||||
# If we timed out waiting for completion
|
||||
if exit_code is None:
|
||||
_logger.warning(f'Command timed out after {timeout} seconds: {command}')
|
||||
exit_code = -1
|
||||
stderr_parts.append(f'Command timed out after {timeout} seconds')
|
||||
|
||||
# Combine all output parts
|
||||
stdout = ''.join(stdout_parts)
|
||||
stderr = ''.join(stderr_parts)
|
||||
|
||||
return CommandResult(
|
||||
command=command,
|
||||
exit_code=exit_code,
|
||||
stdout=stdout,
|
||||
stderr=stderr,
|
||||
timeout_occurred=exit_code == -1 and 'timed out' in stderr,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
_logger.error(f'Remote command execution failed: {e}')
|
||||
return CommandResult(
|
||||
command=command,
|
||||
exit_code=-1,
|
||||
stdout='',
|
||||
stderr=f'Remote execution error: {str(e)}',
|
||||
timeout_occurred=False,
|
||||
)
|
||||
|
||||
async def file_upload(
|
||||
self,
|
||||
source_path: str | Path,
|
||||
destination_path: str | Path,
|
||||
) -> FileOperationResult:
|
||||
"""Upload a file to the remote system.
|
||||
|
||||
Reads the local file and sends it to the remote system via HTTP API.
|
||||
|
||||
Args:
|
||||
source_path: Path to the local source file
|
||||
destination_path: Path where the file should be uploaded on remote system
|
||||
|
||||
Returns:
|
||||
FileOperationResult: Result with success status and metadata
|
||||
"""
|
||||
source = Path(source_path)
|
||||
destination = Path(destination_path)
|
||||
|
||||
_logger.debug(f'Remote file upload: {source} -> {destination}')
|
||||
|
||||
try:
|
||||
# Read the file content
|
||||
with open(source, 'rb') as f:
|
||||
file_content = f.read()
|
||||
|
||||
# Prepare the upload
|
||||
files = {'file': (source.name, file_content)}
|
||||
data = {'destination_path': str(destination)}
|
||||
|
||||
# Make synchronous HTTP call
|
||||
response = await self.client.post(
|
||||
'/api/files/upload',
|
||||
files=files,
|
||||
data=data,
|
||||
timeout=60.0,
|
||||
)
|
||||
response.raise_for_status()
|
||||
result_data = response.json()
|
||||
|
||||
# Convert the API response to our model
|
||||
return FileOperationResult(
|
||||
success=result_data.get('success', True),
|
||||
source_path=str(source),
|
||||
destination_path=str(destination),
|
||||
file_size=result_data.get('file_size'),
|
||||
error=result_data.get('error'),
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
_logger.error(f'Remote file upload failed: {e}')
|
||||
return FileOperationResult(
|
||||
success=False,
|
||||
source_path=str(source),
|
||||
destination_path=str(destination),
|
||||
error=str(e),
|
||||
)
|
||||
|
||||
async def file_download(
|
||||
self,
|
||||
source_path: str | Path,
|
||||
destination_path: str | Path,
|
||||
) -> FileOperationResult:
|
||||
"""Download a file from the remote system.
|
||||
|
||||
Requests the file from the remote system via HTTP API and saves it locally.
|
||||
|
||||
Args:
|
||||
source_path: Path to the source file on remote system
|
||||
destination_path: Path where the file should be saved locally
|
||||
|
||||
Returns:
|
||||
FileOperationResult: Result with success status and metadata
|
||||
"""
|
||||
source = Path(source_path)
|
||||
destination = Path(destination_path)
|
||||
|
||||
_logger.debug(f'Remote file download: {source} -> {destination}')
|
||||
|
||||
try:
|
||||
# Request the file from remote system
|
||||
params = {'file_path': str(source)}
|
||||
|
||||
# Make synchronous HTTP call
|
||||
response = await self.client.get(
|
||||
'/api/files/download',
|
||||
params=params,
|
||||
timeout=60.0,
|
||||
)
|
||||
response.raise_for_status()
|
||||
|
||||
# Ensure destination directory exists
|
||||
destination.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Write the file content
|
||||
with open(destination, 'wb') as f:
|
||||
f.write(response.content)
|
||||
|
||||
return FileOperationResult(
|
||||
success=True,
|
||||
source_path=str(source),
|
||||
destination_path=str(destination),
|
||||
file_size=len(response.content),
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
_logger.error(f'Remote file download failed: {e}')
|
||||
return FileOperationResult(
|
||||
success=False,
|
||||
source_path=str(source),
|
||||
destination_path=str(destination),
|
||||
error=str(e),
|
||||
)
|
||||
58
openhands/app_server/utils/encryption_key.py
Normal file
58
openhands/app_server/utils/encryption_key.py
Normal file
@ -0,0 +1,58 @@
|
||||
import os
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import base62
|
||||
from pydantic import BaseModel, Field, SecretStr, TypeAdapter, field_serializer
|
||||
|
||||
from openhands.agent_server.utils import utc_now
|
||||
|
||||
|
||||
class EncryptionKey(BaseModel):
|
||||
"""Configuration for an encryption key."""
|
||||
|
||||
id: str = Field(default_factory=lambda: base62.encodebytes(os.urandom(32)))
|
||||
key: SecretStr
|
||||
active: bool = True
|
||||
notes: str | None = None
|
||||
created_at: datetime = Field(default_factory=utc_now)
|
||||
|
||||
@field_serializer('key')
|
||||
def serialize_key(self, key: SecretStr, info: Any):
|
||||
"""Conditionally serialize the key based on context."""
|
||||
if info.context and info.context.get('reveal_secrets'):
|
||||
return key.get_secret_value()
|
||||
return str(key) # Returns '**********' by default
|
||||
|
||||
|
||||
def get_default_encryption_keys(workspace_dir: Path) -> list[EncryptionKey]:
|
||||
"""Generate default encryption keys."""
|
||||
master_key = os.getenv('JWT_SECRET')
|
||||
if master_key:
|
||||
return [
|
||||
EncryptionKey(
|
||||
key=SecretStr(master_key),
|
||||
active=True,
|
||||
notes='jwt secret master key',
|
||||
)
|
||||
]
|
||||
|
||||
key_file = workspace_dir / '.keys'
|
||||
type_adapter = TypeAdapter(list[EncryptionKey])
|
||||
if key_file.exists():
|
||||
encryption_keys = type_adapter.validate_json(key_file.read_text())
|
||||
return encryption_keys
|
||||
|
||||
encryption_keys = [
|
||||
EncryptionKey(
|
||||
key=SecretStr(base62.encodebytes(os.urandom(32))),
|
||||
active=True,
|
||||
notes='generated master key',
|
||||
)
|
||||
]
|
||||
json_data = type_adapter.dump_json(
|
||||
encryption_keys, context={'expose_secrets': True}
|
||||
)
|
||||
key_file.write_bytes(json_data)
|
||||
return encryption_keys
|
||||
78
openhands/app_server/utils/import_utils.py
Normal file
78
openhands/app_server/utils/import_utils.py
Normal file
@ -0,0 +1,78 @@
|
||||
import importlib
|
||||
from functools import lru_cache
|
||||
from typing import TypeVar
|
||||
|
||||
T = TypeVar('T')
|
||||
|
||||
|
||||
def import_from(qual_name: str):
|
||||
"""Import a value from its fully qualified name.
|
||||
|
||||
This function is a utility to dynamically import any Python value (class,
|
||||
function, variable) from its fully qualified name. For example,
|
||||
'openhands.server.user_auth.UserAuth' would import the UserAuth class from the
|
||||
openhands.server.user_auth module.
|
||||
|
||||
Args:
|
||||
qual_name: A fully qualified name in the format 'module.submodule.name'
|
||||
e.g. 'openhands.server.user_auth.UserAuth'
|
||||
|
||||
Returns:
|
||||
The imported value (class, function, or variable)
|
||||
|
||||
Example:
|
||||
>>> UserAuth = import_from('openhands.server.user_auth.UserAuth')
|
||||
>>> auth = UserAuth()
|
||||
"""
|
||||
parts = qual_name.split('.')
|
||||
module_name = '.'.join(parts[:-1])
|
||||
module = importlib.import_module(module_name)
|
||||
result = getattr(module, parts[-1])
|
||||
return result
|
||||
|
||||
|
||||
@lru_cache()
|
||||
def _get_impl(cls: type[T], impl_name: str | None) -> type[T]:
|
||||
if impl_name is None:
|
||||
return cls
|
||||
impl_class = import_from(impl_name)
|
||||
assert cls == impl_class or issubclass(impl_class, cls)
|
||||
return impl_class
|
||||
|
||||
|
||||
def get_impl(cls: type[T], impl_name: str | None) -> type[T]:
|
||||
"""Import and validate a named implementation of a base class.
|
||||
|
||||
This function is an extensibility mechanism in OpenHands that allows runtime
|
||||
substitution of implementations. It enables applications to customize behavior by
|
||||
providing their own implementations of OpenHands base classes.
|
||||
|
||||
The function ensures type safety by validating that the imported class is either
|
||||
the same as or a subclass of the specified base class.
|
||||
|
||||
Args:
|
||||
cls: The base class that defines the interface
|
||||
impl_name: Fully qualified name of the implementation class, or None to use
|
||||
the base class
|
||||
e.g. 'openhands.server.conversation_service.'
|
||||
'StandaloneConversationService'
|
||||
|
||||
Returns:
|
||||
The implementation class, which is guaranteed to be a subclass of cls
|
||||
|
||||
Example:
|
||||
>>> # Get default implementation
|
||||
>>> ConversationService = get_impl(ConversationService, None)
|
||||
>>> # Get custom implementation
|
||||
>>> CustomService = get_impl(
|
||||
... ConversationService, 'myapp.CustomConversationService'
|
||||
... )
|
||||
|
||||
Common Use Cases:
|
||||
- Server components (ConversationService, UserAuth, etc.)
|
||||
- Storage implementations (ConversationStore, SettingsStore, etc.)
|
||||
- Service integrations (GitHub, GitLab, Bitbucket services)
|
||||
|
||||
The implementation is cached to avoid repeated imports of the same class.
|
||||
"""
|
||||
return _get_impl(cls, impl_name) # type: ignore
|
||||
104
openhands/app_server/utils/sql_utils.py
Normal file
104
openhands/app_server/utils/sql_utils.py
Normal file
@ -0,0 +1,104 @@
|
||||
from datetime import UTC, datetime
|
||||
from enum import Enum
|
||||
from typing import TypeVar
|
||||
|
||||
from pydantic import SecretStr, TypeAdapter
|
||||
from sqlalchemy import JSON, DateTime, String, TypeDecorator
|
||||
from sqlalchemy.orm import declarative_base
|
||||
|
||||
Base = declarative_base()
|
||||
T = TypeVar('T', bound=Enum)
|
||||
|
||||
|
||||
def create_json_type_decorator(object_type: type):
|
||||
"""Create a decorator for a particular type. Introduced because SQLAlchemy could not process lists of enum values."""
|
||||
type_adapter: TypeAdapter = TypeAdapter(object_type)
|
||||
|
||||
class JsonTypeDecorator(TypeDecorator):
|
||||
impl = JSON
|
||||
cache_ok = True
|
||||
|
||||
def process_bind_param(self, value, dialect):
|
||||
return type_adapter.dump_python(
|
||||
value, mode='json', context={'expose_secrets': True}
|
||||
)
|
||||
|
||||
def process_result_param(self, value, dialect):
|
||||
return type_adapter.validate_python(value)
|
||||
|
||||
return JsonTypeDecorator
|
||||
|
||||
|
||||
class StoredSecretStr(TypeDecorator):
|
||||
"""TypeDecorator for secret strings. Encrypts the value using the default key before storing."""
|
||||
|
||||
impl = String
|
||||
cache_ok = True
|
||||
|
||||
def process_bind_param(self, value, dialect):
|
||||
if value is not None:
|
||||
from openhands.app_server.config import get_global_config
|
||||
|
||||
jwt_service_injector = get_global_config().jwt
|
||||
assert jwt_service_injector is not None
|
||||
jwt_service = jwt_service_injector.get_jwt_service()
|
||||
token = jwt_service.create_jwe_token({'v': value.get_secret_value()})
|
||||
return token
|
||||
return None
|
||||
|
||||
def process_result_param(self, value, dialect):
|
||||
if value is not None:
|
||||
from openhands.app_server.config import get_global_config
|
||||
|
||||
jwt_service_injector = get_global_config().jwt
|
||||
assert jwt_service_injector is not None
|
||||
jwt_service = jwt_service_injector.get_jwt_service()
|
||||
token = jwt_service.decrypt_jwe_token(value)
|
||||
return SecretStr(token['v'])
|
||||
return None
|
||||
|
||||
|
||||
class UtcDateTime(TypeDecorator):
|
||||
"""TypeDecorator for datetime - stores all datetimes in utc. Assumes datetime without
|
||||
a specified timezone are utc. (Sqlite doesn't always return these)"""
|
||||
|
||||
impl = DateTime(timezone=True)
|
||||
cache_ok = True
|
||||
|
||||
def process_bind_param(self, value, dialect):
|
||||
if isinstance(value, datetime) and value.tzinfo != UTC:
|
||||
value = value.astimezone(UTC)
|
||||
return value
|
||||
|
||||
def process_result_param(self, value, dialect):
|
||||
if isinstance(value, datetime):
|
||||
if value.tzinfo is None:
|
||||
value = value.replace(tzinfo=UTC)
|
||||
elif value.tzinfo != UTC:
|
||||
value = value.astimezone(UTC)
|
||||
return value
|
||||
|
||||
|
||||
def create_enum_type_decorator(enum_type: type[T]):
|
||||
class EnumTypeDecorator(TypeDecorator):
|
||||
impl = String
|
||||
cache_ok = True
|
||||
|
||||
def process_bind_param(self, value, dialect):
|
||||
if value is None:
|
||||
return None
|
||||
return value.value
|
||||
|
||||
def process_result_param(self, value, dialect):
|
||||
if value:
|
||||
return enum_type[value]
|
||||
|
||||
return EnumTypeDecorator
|
||||
|
||||
|
||||
def row2dict(row):
|
||||
d = {}
|
||||
for column in row.__table__.columns:
|
||||
d[column.name] = getattr(row, column.name)
|
||||
|
||||
return d
|
||||
18
openhands/app_server/v1_router.py
Normal file
18
openhands/app_server/v1_router.py
Normal file
@ -0,0 +1,18 @@
|
||||
from fastapi import APIRouter
|
||||
|
||||
from openhands.app_server.app_conversation import app_conversation_router
|
||||
from openhands.app_server.event import event_router
|
||||
from openhands.app_server.event_callback import (
|
||||
webhook_router,
|
||||
)
|
||||
from openhands.app_server.sandbox import sandbox_router, sandbox_spec_router
|
||||
from openhands.app_server.user import user_router
|
||||
|
||||
# Include routers
|
||||
router = APIRouter(prefix='/api/v1')
|
||||
router.include_router(event_router.router)
|
||||
router.include_router(app_conversation_router.router)
|
||||
router.include_router(sandbox_router.router)
|
||||
router.include_router(sandbox_spec_router.router)
|
||||
router.include_router(user_router.router)
|
||||
router.include_router(webhook_router.router)
|
||||
@ -30,7 +30,9 @@ def main():
|
||||
args = parser.parse_args()
|
||||
|
||||
if hasattr(args, 'version') and args.version:
|
||||
print(f'OpenHands CLI version: {openhands.get_version()}')
|
||||
from openhands import get_version
|
||||
|
||||
print(f'OpenHands CLI version: {get_version()}')
|
||||
sys.exit(0)
|
||||
|
||||
if args.command == 'serve':
|
||||
|
||||
@ -167,11 +167,12 @@ def handle_fast_commands() -> bool:
|
||||
|
||||
# Handle --version or -v
|
||||
if len(sys.argv) == 2 and sys.argv[1] in ('--version', '-v'):
|
||||
import openhands
|
||||
from openhands import get_version
|
||||
|
||||
print(f'OpenHands CLI version: {get_version()}')
|
||||
|
||||
display_deprecation_warning()
|
||||
|
||||
print(f'OpenHands CLI version: {openhands.get_version()}')
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
@ -12,7 +12,6 @@ import toml
|
||||
from dotenv import load_dotenv
|
||||
from pydantic import BaseModel, SecretStr, ValidationError
|
||||
|
||||
from openhands import __version__
|
||||
from openhands.core import logger
|
||||
from openhands.core.config.agent_config import AgentConfig
|
||||
from openhands.core.config.arg_utils import get_headless_parser
|
||||
@ -785,9 +784,10 @@ def parse_arguments() -> argparse.Namespace:
|
||||
"""Parse command line arguments."""
|
||||
parser = get_headless_parser()
|
||||
args = parser.parse_args()
|
||||
from openhands import get_version
|
||||
|
||||
if args.version:
|
||||
print(f'OpenHands version: {__version__}')
|
||||
print(f'OpenHands version: {get_version()}')
|
||||
sys.exit(0)
|
||||
|
||||
return args
|
||||
|
||||
@ -411,6 +411,7 @@ LOQUACIOUS_LOGGERS = [
|
||||
'socketio',
|
||||
'socketio.client',
|
||||
'socketio.server',
|
||||
'aiosqlite',
|
||||
]
|
||||
|
||||
for logger_name in LOQUACIOUS_LOGGERS:
|
||||
|
||||
@ -238,7 +238,7 @@ async def run_controller(
|
||||
file_path = config.save_trajectory_path
|
||||
os.makedirs(os.path.dirname(file_path), exist_ok=True)
|
||||
histories = controller.get_trajectory(config.save_screenshots_in_trajectory)
|
||||
with open(file_path, 'w') as f: # noqa: ASYNC101
|
||||
with open(file_path, 'w') as f:
|
||||
json.dump(histories, f, indent=4)
|
||||
|
||||
return state
|
||||
|
||||
@ -1,9 +1,9 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
import openhands
|
||||
from openhands.core.schema import ActionType
|
||||
from openhands.events.action.action import Action, ActionSecurityRisk
|
||||
from openhands.version import get_version
|
||||
|
||||
|
||||
@dataclass
|
||||
@ -48,7 +48,7 @@ class SystemMessageAction(Action):
|
||||
|
||||
content: str
|
||||
tools: list[Any] | None = None
|
||||
openhands_version: str | None = openhands.__version__
|
||||
openhands_version: str | None = get_version()
|
||||
agent_class: str | None = None
|
||||
action: ActionType = ActionType.SYSTEM
|
||||
|
||||
|
||||
@ -146,7 +146,7 @@ class ProviderHandler:
|
||||
"""Read-only access to provider tokens."""
|
||||
return self._provider_tokens
|
||||
|
||||
def _get_service(self, provider: ProviderType) -> GitService:
|
||||
def get_service(self, provider: ProviderType) -> GitService:
|
||||
"""Helper method to instantiate a service for a given provider"""
|
||||
token = self.provider_tokens[provider]
|
||||
service_class = self.service_class_map[provider]
|
||||
@ -163,7 +163,7 @@ class ProviderHandler:
|
||||
"""Get user information from the first available provider"""
|
||||
for provider in self.provider_tokens:
|
||||
try:
|
||||
service = self._get_service(provider)
|
||||
service = self.get_service(provider)
|
||||
return await service.get_user()
|
||||
except Exception:
|
||||
continue
|
||||
@ -196,7 +196,7 @@ class ProviderHandler:
|
||||
return None
|
||||
|
||||
async def get_github_installations(self) -> list[str]:
|
||||
service = cast(InstallationsService, self._get_service(ProviderType.GITHUB))
|
||||
service = cast(InstallationsService, self.get_service(ProviderType.GITHUB))
|
||||
try:
|
||||
return await service.get_installations()
|
||||
except Exception as e:
|
||||
@ -205,7 +205,7 @@ class ProviderHandler:
|
||||
return []
|
||||
|
||||
async def get_bitbucket_workspaces(self) -> list[str]:
|
||||
service = cast(InstallationsService, self._get_service(ProviderType.BITBUCKET))
|
||||
service = cast(InstallationsService, self.get_service(ProviderType.BITBUCKET))
|
||||
try:
|
||||
return await service.get_installations()
|
||||
except Exception as e:
|
||||
@ -231,7 +231,7 @@ class ProviderHandler:
|
||||
if not page or not per_page:
|
||||
raise ValueError('Failed to provider params for paginating repos')
|
||||
|
||||
service = self._get_service(selected_provider)
|
||||
service = self.get_service(selected_provider)
|
||||
return await service.get_paginated_repos(
|
||||
page, per_page, sort, installation_id
|
||||
)
|
||||
@ -239,7 +239,7 @@ class ProviderHandler:
|
||||
all_repos: list[Repository] = []
|
||||
for provider in self.provider_tokens:
|
||||
try:
|
||||
service = self._get_service(provider)
|
||||
service = self.get_service(provider)
|
||||
service_repos = await service.get_all_repositories(sort, app_mode)
|
||||
all_repos.extend(service_repos)
|
||||
except Exception as e:
|
||||
@ -252,7 +252,7 @@ class ProviderHandler:
|
||||
tasks: list[SuggestedTask] = []
|
||||
for provider in self.provider_tokens:
|
||||
try:
|
||||
service = self._get_service(provider)
|
||||
service = self.get_service(provider)
|
||||
service_repos = await service.get_suggested_tasks()
|
||||
tasks.extend(service_repos)
|
||||
except Exception as e:
|
||||
@ -269,7 +269,7 @@ class ProviderHandler:
|
||||
) -> list[Branch]:
|
||||
"""Search for branches within a repository using the appropriate provider service."""
|
||||
if selected_provider:
|
||||
service = self._get_service(selected_provider)
|
||||
service = self.get_service(selected_provider)
|
||||
try:
|
||||
return await service.search_branches(repository, query, per_page)
|
||||
except Exception as e:
|
||||
@ -281,7 +281,7 @@ class ProviderHandler:
|
||||
# If provider not specified, determine provider by verifying repository access
|
||||
try:
|
||||
repo_details = await self.verify_repo_provider(repository)
|
||||
service = self._get_service(repo_details.git_provider)
|
||||
service = self.get_service(repo_details.git_provider)
|
||||
return await service.search_branches(repository, query, per_page)
|
||||
except Exception as e:
|
||||
logger.warning(f'Error searching branches for {repository}: {e}')
|
||||
@ -296,7 +296,7 @@ class ProviderHandler:
|
||||
order: str,
|
||||
) -> list[Repository]:
|
||||
if selected_provider:
|
||||
service = self._get_service(selected_provider)
|
||||
service = self.get_service(selected_provider)
|
||||
public = self._is_repository_url(query, selected_provider)
|
||||
user_repos = await service.search_repositories(
|
||||
query, per_page, sort, order, public
|
||||
@ -306,7 +306,7 @@ class ProviderHandler:
|
||||
all_repos: list[Repository] = []
|
||||
for provider in self.provider_tokens:
|
||||
try:
|
||||
service = self._get_service(provider)
|
||||
service = self.get_service(provider)
|
||||
public = self._is_repository_url(query, provider)
|
||||
service_repos = await service.search_repositories(
|
||||
query, per_page, sort, order, public
|
||||
@ -454,14 +454,14 @@ class ProviderHandler:
|
||||
|
||||
if specified_provider:
|
||||
try:
|
||||
service = self._get_service(specified_provider)
|
||||
service = self.get_service(specified_provider)
|
||||
return await service.get_repository_details_from_repo_name(repository)
|
||||
except Exception as e:
|
||||
errors.append(f'{specified_provider.value}: {str(e)}')
|
||||
|
||||
for provider in self.provider_tokens:
|
||||
try:
|
||||
service = self._get_service(provider)
|
||||
service = self.get_service(provider)
|
||||
return await service.get_repository_details_from_repo_name(repository)
|
||||
except Exception as e:
|
||||
errors.append(f'{provider.value}: {str(e)}')
|
||||
@ -504,7 +504,7 @@ class ProviderHandler:
|
||||
"""
|
||||
if specified_provider:
|
||||
try:
|
||||
service = self._get_service(specified_provider)
|
||||
service = self.get_service(specified_provider)
|
||||
return await service.get_paginated_branches(repository, page, per_page)
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
@ -513,7 +513,7 @@ class ProviderHandler:
|
||||
|
||||
for provider in self.provider_tokens:
|
||||
try:
|
||||
service = self._get_service(provider)
|
||||
service = self.get_service(provider)
|
||||
return await service.get_paginated_branches(repository, page, per_page)
|
||||
except Exception as e:
|
||||
logger.warning(f'Error fetching branches from {provider}: {e}')
|
||||
@ -543,7 +543,7 @@ class ProviderHandler:
|
||||
errors = []
|
||||
for provider in self.provider_tokens:
|
||||
try:
|
||||
service = self._get_service(provider)
|
||||
service = self.get_service(provider)
|
||||
result = await service.get_microagents(repository)
|
||||
# Only return early if we got a non-empty result
|
||||
if result:
|
||||
@ -587,7 +587,7 @@ class ProviderHandler:
|
||||
errors = []
|
||||
for provider in self.provider_tokens:
|
||||
try:
|
||||
service = self._get_service(provider)
|
||||
service = self.get_service(provider)
|
||||
result = await service.get_microagent_content(repository, file_path)
|
||||
# If we got content, return it immediately
|
||||
if result:
|
||||
@ -691,7 +691,7 @@ class ProviderHandler:
|
||||
True if PR is active (open), False if closed/merged, True if can't determine
|
||||
"""
|
||||
try:
|
||||
service = self._get_service(git_provider)
|
||||
service = self.get_service(git_provider)
|
||||
return await service.is_pr_open(repository, pr_number)
|
||||
|
||||
except Exception as e:
|
||||
|
||||
@ -571,7 +571,7 @@ class IssueResolver:
|
||||
# checkout the repo
|
||||
repo_dir = os.path.join(self.output_dir, 'repo')
|
||||
if not os.path.exists(repo_dir):
|
||||
checkout_output = subprocess.check_output( # noqa: ASYNC101
|
||||
checkout_output = subprocess.check_output(
|
||||
[
|
||||
'git',
|
||||
'clone',
|
||||
@ -584,7 +584,7 @@ class IssueResolver:
|
||||
|
||||
# get the commit id of current repo for reproducibility
|
||||
base_commit = (
|
||||
subprocess.check_output(['git', 'rev-parse', 'HEAD'], cwd=repo_dir) # noqa: ASYNC101
|
||||
subprocess.check_output(['git', 'rev-parse', 'HEAD'], cwd=repo_dir)
|
||||
.decode('utf-8')
|
||||
.strip()
|
||||
)
|
||||
@ -596,7 +596,7 @@ class IssueResolver:
|
||||
repo_dir, '.openhands_instructions'
|
||||
)
|
||||
if os.path.exists(openhands_instructions_path):
|
||||
with open(openhands_instructions_path, 'r') as f: # noqa: ASYNC101
|
||||
with open(openhands_instructions_path, 'r') as f:
|
||||
self.repo_instruction = f.read()
|
||||
|
||||
# OUTPUT FILE
|
||||
@ -605,7 +605,7 @@ class IssueResolver:
|
||||
|
||||
# Check if this issue was already processed
|
||||
if os.path.exists(output_file):
|
||||
with open(output_file, 'r') as f: # noqa: ASYNC101
|
||||
with open(output_file, 'r') as f:
|
||||
for line in f:
|
||||
data = ResolverOutput.model_validate_json(line)
|
||||
if data.issue.number == self.issue_number:
|
||||
@ -614,7 +614,7 @@ class IssueResolver:
|
||||
)
|
||||
return
|
||||
|
||||
output_fp = open(output_file, 'a') # noqa: ASYNC101
|
||||
output_fp = open(output_file, 'a')
|
||||
|
||||
logger.info(
|
||||
f'Resolving issue {self.issue_number} with Agent {AGENT_CLASS}, model {model_name}, max iterations {self.max_iterations}.'
|
||||
@ -633,20 +633,20 @@ class IssueResolver:
|
||||
|
||||
# Fetch the branch first to ensure it exists locally
|
||||
fetch_cmd = ['git', 'fetch', 'origin', branch_to_use]
|
||||
subprocess.check_output( # noqa: ASYNC101
|
||||
subprocess.check_output(
|
||||
fetch_cmd,
|
||||
cwd=repo_dir,
|
||||
)
|
||||
|
||||
# Checkout the branch
|
||||
checkout_cmd = ['git', 'checkout', branch_to_use]
|
||||
subprocess.check_output( # noqa: ASYNC101
|
||||
subprocess.check_output(
|
||||
checkout_cmd,
|
||||
cwd=repo_dir,
|
||||
)
|
||||
|
||||
base_commit = (
|
||||
subprocess.check_output(['git', 'rev-parse', 'HEAD'], cwd=repo_dir) # noqa: ASYNC101
|
||||
subprocess.check_output(['git', 'rev-parse', 'HEAD'], cwd=repo_dir)
|
||||
.decode('utf-8')
|
||||
.strip()
|
||||
)
|
||||
|
||||
@ -5,12 +5,12 @@ import time
|
||||
|
||||
import docker
|
||||
|
||||
from openhands import __version__ as oh_version
|
||||
from openhands.core.exceptions import AgentRuntimeBuildError
|
||||
from openhands.core.logger import RollingLogger
|
||||
from openhands.core.logger import openhands_logger as logger
|
||||
from openhands.runtime.builder.base import RuntimeBuilder
|
||||
from openhands.utils.term_color import TermColor, colorize
|
||||
from openhands.version import get_version
|
||||
|
||||
|
||||
class DockerRuntimeBuilder(RuntimeBuilder):
|
||||
@ -131,7 +131,7 @@ class DockerRuntimeBuilder(RuntimeBuilder):
|
||||
'buildx',
|
||||
'build',
|
||||
'--progress=plain',
|
||||
f'--build-arg=OPENHANDS_RUNTIME_VERSION={oh_version}',
|
||||
f'--build-arg=OPENHANDS_RUNTIME_VERSION={get_version()}',
|
||||
f'--build-arg=OPENHANDS_RUNTIME_BUILD_TIME={datetime.datetime.now().isoformat()}',
|
||||
f'--tag={target_image_hash_name}',
|
||||
'--load',
|
||||
|
||||
@ -78,7 +78,7 @@ class JupyterPlugin(Plugin):
|
||||
|
||||
# Using synchronous subprocess.Popen for Windows as asyncio.create_subprocess_shell
|
||||
# has limitations on Windows platforms
|
||||
self.gateway_process = subprocess.Popen( # type: ignore[ASYNC101] # noqa: ASYNC101
|
||||
self.gateway_process = subprocess.Popen( # type: ignore[ASYNC101]
|
||||
jupyter_launch_command,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
@ -91,19 +91,19 @@ class JupyterPlugin(Plugin):
|
||||
output = ''
|
||||
while should_continue():
|
||||
if self.gateway_process.stdout is None:
|
||||
time.sleep(1) # type: ignore[ASYNC101] # noqa: ASYNC101
|
||||
time.sleep(1) # type: ignore[ASYNC101]
|
||||
continue
|
||||
|
||||
line = self.gateway_process.stdout.readline()
|
||||
if not line:
|
||||
time.sleep(1) # type: ignore[ASYNC101] # noqa: ASYNC101
|
||||
time.sleep(1) # type: ignore[ASYNC101]
|
||||
continue
|
||||
|
||||
output += line
|
||||
if 'at' in line:
|
||||
break
|
||||
|
||||
time.sleep(1) # type: ignore[ASYNC101] # noqa: ASYNC101
|
||||
time.sleep(1) # type: ignore[ASYNC101]
|
||||
logger.debug('Waiting for jupyter kernel gateway to start...')
|
||||
|
||||
logger.debug(
|
||||
|
||||
@ -231,7 +231,7 @@ class BashSession:
|
||||
|
||||
# Set history limit to a large number to avoid losing history
|
||||
# https://unix.stackexchange.com/questions/43414/unlimited-history-in-tmux
|
||||
self.session.set_option('history-limit', str(self.HISTORY_LIMIT), _global=True)
|
||||
self.session.set_option('history-limit', str(self.HISTORY_LIMIT), global_=True)
|
||||
self.session.history_limit = self.HISTORY_LIMIT
|
||||
# We need to create a new pane because the initial pane's history limit is (default) 2000
|
||||
_initial_window = self.session.active_window
|
||||
|
||||
@ -86,7 +86,7 @@ async def read_file(
|
||||
)
|
||||
|
||||
try:
|
||||
with open(whole_path, 'r', encoding='utf-8') as file: # noqa: ASYNC101
|
||||
with open(whole_path, 'r', encoding='utf-8') as file:
|
||||
lines = read_lines(file.readlines(), start, end)
|
||||
except FileNotFoundError:
|
||||
return ErrorObservation(f'File not found: {path}')
|
||||
@ -127,7 +127,7 @@ async def write_file(
|
||||
os.makedirs(os.path.dirname(whole_path))
|
||||
mode = 'w' if not os.path.exists(whole_path) else 'r+'
|
||||
try:
|
||||
with open(whole_path, mode, encoding='utf-8') as file: # noqa: ASYNC101
|
||||
with open(whole_path, mode, encoding='utf-8') as file:
|
||||
if mode != 'w':
|
||||
all_lines = file.readlines()
|
||||
new_file = insert_lines(insert, all_lines, start, end)
|
||||
|
||||
@ -12,10 +12,10 @@ from dirhash import dirhash
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
|
||||
import openhands
|
||||
from openhands import __version__ as oh_version
|
||||
from openhands.core.exceptions import AgentRuntimeBuildError
|
||||
from openhands.core.logger import openhands_logger as logger
|
||||
from openhands.runtime.builder import DockerRuntimeBuilder, RuntimeBuilder
|
||||
from openhands.version import get_version
|
||||
|
||||
|
||||
class BuildFromImageType(Enum):
|
||||
@ -93,11 +93,11 @@ def get_runtime_image_repo_and_tag(base_image: str) -> tuple[str, str]:
|
||||
repo = f'{repo_hash}_{repo[-24:]}' # Use 8 char hash + last 24 chars
|
||||
repo = repo.replace('/', '_s_')
|
||||
|
||||
new_tag = f'oh_v{oh_version}_image_{repo}_tag_{tag}'
|
||||
new_tag = f'oh_v{get_version()}_image_{repo}_tag_{tag}'
|
||||
|
||||
# if it's still too long, hash the entire image name
|
||||
if len(new_tag) > 128:
|
||||
new_tag = f'oh_v{oh_version}_image_{hashlib.md5(new_tag.encode()).hexdigest()[:64]}'
|
||||
new_tag = f'oh_v{get_version()}_image_{hashlib.md5(new_tag.encode()).hexdigest()[:64]}'
|
||||
logger.warning(
|
||||
f'The new tag [{new_tag}] is still too long, so we use an hash of the entire image name: {new_tag}'
|
||||
)
|
||||
@ -177,10 +177,12 @@ def build_runtime_image_in_folder(
|
||||
enable_browser: bool = True,
|
||||
) -> str:
|
||||
runtime_image_repo, _ = get_runtime_image_repo_and_tag(base_image)
|
||||
lock_tag = f'oh_v{oh_version}_{get_hash_for_lock_files(base_image, enable_browser)}'
|
||||
lock_tag = (
|
||||
f'oh_v{get_version()}_{get_hash_for_lock_files(base_image, enable_browser)}'
|
||||
)
|
||||
versioned_tag = (
|
||||
# truncate the base image to 96 characters to fit in the tag max length (128 characters)
|
||||
f'oh_v{oh_version}_{get_tag_for_versioned_image(base_image)}'
|
||||
f'oh_v{get_version()}_{get_tag_for_versioned_image(base_image)}'
|
||||
)
|
||||
versioned_image_name = f'{runtime_image_repo}:{versioned_tag}'
|
||||
source_tag = f'{lock_tag}_{get_hash_for_source_files()}'
|
||||
|
||||
@ -15,7 +15,8 @@ from fastapi import (
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
import openhands.agenthub # noqa F401 (we import this to get the agents registered)
|
||||
from openhands import __version__
|
||||
from openhands.app_server import v1_router
|
||||
from openhands.app_server.config import get_app_lifespan_service
|
||||
from openhands.integrations.service_types import AuthenticationError
|
||||
from openhands.server.routes.conversation import app as conversation_api_router
|
||||
from openhands.server.routes.feedback import app as feedback_api_router
|
||||
@ -33,6 +34,7 @@ from openhands.server.routes.settings import app as settings_router
|
||||
from openhands.server.routes.trajectory import app as trajectory_router
|
||||
from openhands.server.shared import conversation_manager, server_config
|
||||
from openhands.server.types import AppMode
|
||||
from openhands.version import get_version
|
||||
|
||||
mcp_app = mcp_server.http_app(path='/mcp')
|
||||
|
||||
@ -55,11 +57,17 @@ async def _lifespan(app: FastAPI) -> AsyncIterator[None]:
|
||||
yield
|
||||
|
||||
|
||||
lifespans = [_lifespan, mcp_app.lifespan]
|
||||
app_lifespan_ = get_app_lifespan_service()
|
||||
if app_lifespan_:
|
||||
lifespans.append(app_lifespan_.lifespan)
|
||||
|
||||
|
||||
app = FastAPI(
|
||||
title='OpenHands',
|
||||
description='OpenHands: Code Less, Make More',
|
||||
version=__version__,
|
||||
lifespan=combine_lifespans(_lifespan, mcp_app.lifespan),
|
||||
version=get_version(),
|
||||
lifespan=combine_lifespans(*lifespans),
|
||||
routes=[Mount(path='/mcp', app=mcp_app)],
|
||||
)
|
||||
|
||||
@ -82,5 +90,6 @@ app.include_router(settings_router)
|
||||
app.include_router(secrets_router)
|
||||
if server_config.app_mode == AppMode.OSS:
|
||||
app.include_router(git_api_router)
|
||||
app.include_router(v1_router.router)
|
||||
app.include_router(trajectory_router)
|
||||
add_health_endpoints(app)
|
||||
|
||||
@ -62,6 +62,7 @@ class DockerNestedConversationManager(ConversationManager):
|
||||
async def __aenter__(self):
|
||||
runtime_cls = get_runtime_cls(self.config.runtime)
|
||||
runtime_cls.setup(self.config)
|
||||
return self
|
||||
|
||||
async def __aexit__(self, exc_type, exc_value, traceback):
|
||||
runtime_cls = get_runtime_cls(self.config.runtime)
|
||||
|
||||
@ -27,3 +27,4 @@ class ConversationInfo:
|
||||
session_api_key: str | None = None
|
||||
created_at: datetime = field(default_factory=lambda: datetime.now(timezone.utc))
|
||||
pr_number: list[int] = field(default_factory=list)
|
||||
conversation_version: str = 'V0'
|
||||
|
||||
@ -1,14 +1,26 @@
|
||||
import base64
|
||||
import itertools
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import uuid
|
||||
from datetime import datetime, timezone
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
import base62
|
||||
from fastapi import APIRouter, Depends, status
|
||||
from fastapi.responses import JSONResponse
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
from pydantic import BaseModel, ConfigDict, Field
|
||||
|
||||
from openhands.app_server.app_conversation.app_conversation_models import (
|
||||
AppConversation,
|
||||
)
|
||||
from openhands.app_server.app_conversation.app_conversation_service import (
|
||||
AppConversationService,
|
||||
)
|
||||
from openhands.app_server.config import (
|
||||
depends_app_conversation_service,
|
||||
)
|
||||
from openhands.core.config.llm_config import LLMConfig
|
||||
from openhands.core.config.mcp_config import MCPConfig
|
||||
from openhands.core.logger import openhands_logger as logger
|
||||
@ -34,6 +46,7 @@ from openhands.integrations.service_types import (
|
||||
)
|
||||
from openhands.runtime import get_runtime_cls
|
||||
from openhands.runtime.runtime_status import RuntimeStatus
|
||||
from openhands.sdk.conversation.state import AgentExecutionStatus
|
||||
from openhands.server.data_models.agent_loop_info import AgentLoopInfo
|
||||
from openhands.server.data_models.conversation_info import ConversationInfo
|
||||
from openhands.server.data_models.conversation_info_result_set import (
|
||||
@ -76,6 +89,7 @@ from openhands.utils.async_utils import wait_all
|
||||
from openhands.utils.conversation_summary import get_default_conversation_title
|
||||
|
||||
app = APIRouter(prefix='/api', dependencies=get_dependencies())
|
||||
app_conversation_service_dependency = depends_app_conversation_service()
|
||||
|
||||
|
||||
def _filter_conversations_by_age(
|
||||
@ -282,44 +296,150 @@ async def search_conversations(
|
||||
selected_repository: str | None = None,
|
||||
conversation_trigger: ConversationTrigger | None = None,
|
||||
conversation_store: ConversationStore = Depends(get_conversation_store),
|
||||
app_conversation_service: AppConversationService = app_conversation_service_dependency,
|
||||
) -> ConversationInfoResultSet:
|
||||
conversation_metadata_result_set = await conversation_store.search(page_id, limit)
|
||||
# Parse combined page_id to extract separate page_ids for each source
|
||||
v0_page_id = None
|
||||
v1_page_id = None
|
||||
|
||||
# Apply age filter first using common function
|
||||
filtered_results = _filter_conversations_by_age(
|
||||
conversation_metadata_result_set.results, config.conversation_max_age_seconds
|
||||
if page_id:
|
||||
try:
|
||||
# Try to parse as JSON first
|
||||
page_data = json.loads(base64.b64decode(page_id))
|
||||
v0_page_id = page_data.get('v0')
|
||||
v1_page_id = page_data.get('v1')
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
# Fallback: treat as v0 page_id for backward compatibility
|
||||
v0_page_id = page_id
|
||||
|
||||
# Get results from old conversation store (V0)
|
||||
conversation_metadata_result_set = await conversation_store.search(
|
||||
v0_page_id, limit
|
||||
)
|
||||
|
||||
# Apply additional filters
|
||||
final_filtered_results = []
|
||||
for conversation in filtered_results:
|
||||
# Apply repository filter
|
||||
if (
|
||||
selected_repository is not None
|
||||
and conversation.selected_repository != selected_repository
|
||||
):
|
||||
continue
|
||||
# Get results from new app conversation service (V1)
|
||||
age_filter_date = None
|
||||
if config.conversation_max_age_seconds:
|
||||
age_filter_date = datetime.now(timezone.utc) - timedelta(
|
||||
seconds=config.conversation_max_age_seconds
|
||||
)
|
||||
|
||||
# Apply conversation trigger filter
|
||||
if (
|
||||
conversation_trigger is not None
|
||||
and conversation.trigger != conversation_trigger
|
||||
):
|
||||
continue
|
||||
|
||||
final_filtered_results.append(conversation)
|
||||
|
||||
return await _build_conversation_result_set(
|
||||
final_filtered_results, conversation_metadata_result_set.next_page_id
|
||||
app_conversation_page = await app_conversation_service.search_app_conversations(
|
||||
page_id=v1_page_id,
|
||||
limit=limit,
|
||||
# Apply age filter at the service level if possible
|
||||
created_at__gte=age_filter_date,
|
||||
)
|
||||
|
||||
# Convert V1 conversations to ConversationInfo format
|
||||
v1_conversations = [
|
||||
_to_conversation_info(app_conv) for app_conv in app_conversation_page.items
|
||||
]
|
||||
|
||||
# Apply age filter to V0 conversations
|
||||
v0_filtered_results = _filter_conversations_by_age(
|
||||
conversation_metadata_result_set.results,
|
||||
config.conversation_max_age_seconds,
|
||||
)
|
||||
v0_conversation_ids = set(
|
||||
conversation.conversation_id for conversation in v0_filtered_results
|
||||
)
|
||||
await conversation_manager.get_connections(filter_to_sids=v0_conversation_ids)
|
||||
v0_agent_loop_info = await conversation_manager.get_agent_loop_info(
|
||||
filter_to_sids=v0_conversation_ids
|
||||
)
|
||||
v0_agent_loop_info_by_conversation_id = {
|
||||
info.conversation_id: info for info in v0_agent_loop_info
|
||||
}
|
||||
v0_conversations = await wait_all(
|
||||
_get_conversation_info(
|
||||
conversation=conversation,
|
||||
num_connections=sum(
|
||||
1
|
||||
for conversation_id in v0_agent_loop_info_by_conversation_id.values()
|
||||
if conversation_id == conversation.conversation_id
|
||||
),
|
||||
agent_loop_info=v0_agent_loop_info_by_conversation_id.get(
|
||||
conversation.conversation_id
|
||||
),
|
||||
)
|
||||
for conversation in v0_filtered_results
|
||||
)
|
||||
|
||||
# Apply additional filters to both V0 and V1 results
|
||||
def apply_filters(conversations: list[ConversationInfo]) -> list[ConversationInfo]:
|
||||
filtered = []
|
||||
for conversation in conversations:
|
||||
# Apply repository filter
|
||||
if (
|
||||
selected_repository is not None
|
||||
and conversation.selected_repository != selected_repository
|
||||
):
|
||||
continue
|
||||
|
||||
# Apply conversation trigger filter
|
||||
if (
|
||||
conversation_trigger is not None
|
||||
and conversation.trigger != conversation_trigger
|
||||
):
|
||||
continue
|
||||
|
||||
filtered.append(conversation)
|
||||
return filtered
|
||||
|
||||
v0_final_filtered = apply_filters(v0_conversations)
|
||||
v1_final_filtered = apply_filters(v1_conversations)
|
||||
|
||||
# Combine results from both sources
|
||||
all_conversations = v0_final_filtered + v1_final_filtered
|
||||
|
||||
# Sort by created_at descending (most recent first)
|
||||
all_conversations.sort(
|
||||
key=lambda x: x.created_at or datetime.min.replace(tzinfo=timezone.utc),
|
||||
reverse=True,
|
||||
)
|
||||
|
||||
# Limit to requested number of results
|
||||
final_results = all_conversations[:limit]
|
||||
|
||||
# Create combined page_id for next page
|
||||
next_page_id = None
|
||||
if (
|
||||
conversation_metadata_result_set.next_page_id
|
||||
or app_conversation_page.next_page_id
|
||||
):
|
||||
next_page_data = {
|
||||
'v0': conversation_metadata_result_set.next_page_id,
|
||||
'v1': app_conversation_page.next_page_id,
|
||||
}
|
||||
# Only include page_id if at least one source has more pages
|
||||
if next_page_data['v0'] or next_page_data['v1']:
|
||||
next_page_id = base64.b64encode(
|
||||
json.dumps(next_page_data).encode()
|
||||
).decode()
|
||||
|
||||
return ConversationInfoResultSet(results=final_results, next_page_id=next_page_id)
|
||||
|
||||
|
||||
@app.get('/conversations/{conversation_id}')
|
||||
async def get_conversation(
|
||||
conversation_id: str = Depends(validate_conversation_id),
|
||||
conversation_store: ConversationStore = Depends(get_conversation_store),
|
||||
app_conversation_service: AppConversationService = app_conversation_service_dependency,
|
||||
) -> ConversationInfo | None:
|
||||
try:
|
||||
# Shim to add V1 conversations
|
||||
try:
|
||||
conversation_uuid = uuid.UUID(conversation_id)
|
||||
app_conversation = await app_conversation_service.get_app_conversation(
|
||||
conversation_uuid
|
||||
)
|
||||
if app_conversation:
|
||||
return _to_conversation_info(app_conversation)
|
||||
except (ValueError, TypeError, Exception):
|
||||
# Not a V1 conversation or service error
|
||||
pass
|
||||
|
||||
metadata = await conversation_store.get_metadata(conversation_id)
|
||||
num_connections = len(
|
||||
await conversation_manager.get_connections(filter_to_sids={conversation_id})
|
||||
@ -833,3 +953,56 @@ async def get_microagent_management_conversations(
|
||||
return await _build_conversation_result_set(
|
||||
final_filtered_results, conversation_metadata_result_set.next_page_id
|
||||
)
|
||||
|
||||
|
||||
def _to_conversation_info(app_conversation: AppConversation) -> ConversationInfo:
|
||||
"""Convert a V1 AppConversation into an old style ConversationInfo"""
|
||||
from openhands.app_server.sandbox.sandbox_models import SandboxStatus
|
||||
|
||||
# Map SandboxStatus to ConversationStatus
|
||||
conversation_status_mapping = {
|
||||
SandboxStatus.RUNNING: ConversationStatus.RUNNING,
|
||||
SandboxStatus.STARTING: ConversationStatus.STARTING,
|
||||
SandboxStatus.PAUSED: ConversationStatus.STOPPED,
|
||||
SandboxStatus.ERROR: ConversationStatus.ERROR,
|
||||
SandboxStatus.MISSING: ConversationStatus.ARCHIVED,
|
||||
}
|
||||
|
||||
conversation_status = conversation_status_mapping.get(
|
||||
app_conversation.sandbox_status, ConversationStatus.STOPPED
|
||||
)
|
||||
|
||||
runtime_status_mapping = {
|
||||
AgentExecutionStatus.ERROR: RuntimeStatus.ERROR,
|
||||
AgentExecutionStatus.IDLE: RuntimeStatus.READY,
|
||||
AgentExecutionStatus.RUNNING: RuntimeStatus.READY,
|
||||
AgentExecutionStatus.PAUSED: RuntimeStatus.READY,
|
||||
AgentExecutionStatus.WAITING_FOR_CONFIRMATION: RuntimeStatus.READY,
|
||||
AgentExecutionStatus.FINISHED: RuntimeStatus.READY,
|
||||
AgentExecutionStatus.STUCK: RuntimeStatus.ERROR,
|
||||
}
|
||||
runtime_status = runtime_status_mapping.get(
|
||||
app_conversation.agent_status, RuntimeStatus.ERROR
|
||||
)
|
||||
title = (
|
||||
app_conversation.title
|
||||
or f'Conversation {base62.encodebytes(app_conversation.id.bytes)}'
|
||||
)
|
||||
|
||||
return ConversationInfo(
|
||||
conversation_id=str(app_conversation.id),
|
||||
title=title,
|
||||
last_updated_at=app_conversation.updated_at,
|
||||
status=conversation_status,
|
||||
runtime_status=runtime_status,
|
||||
selected_repository=app_conversation.selected_repository,
|
||||
selected_branch=app_conversation.selected_branch,
|
||||
git_provider=app_conversation.git_provider,
|
||||
trigger=app_conversation.trigger,
|
||||
num_connections=0, # V1 conversations don't track connections the same way
|
||||
url=app_conversation.conversation_url,
|
||||
session_api_key=app_conversation.session_api_key,
|
||||
created_at=app_conversation.created_at,
|
||||
pr_number=app_conversation.pr_number,
|
||||
conversation_version='V1',
|
||||
)
|
||||
|
||||
@ -13,7 +13,6 @@ from openhands.integrations.github.github_service import GithubServiceImpl
|
||||
from openhands.integrations.gitlab.gitlab_service import GitLabServiceImpl
|
||||
from openhands.integrations.provider import ProviderToken
|
||||
from openhands.integrations.service_types import GitService, ProviderType
|
||||
from openhands.server.dependencies import get_dependencies
|
||||
from openhands.server.shared import ConversationStoreImpl, config, server_config
|
||||
from openhands.server.types import AppMode
|
||||
from openhands.server.user_auth import (
|
||||
@ -24,7 +23,7 @@ from openhands.server.user_auth import (
|
||||
from openhands.storage.data_models.conversation_metadata import ConversationMetadata
|
||||
|
||||
mcp_server = FastMCP(
|
||||
'mcp', stateless_http=True, dependencies=get_dependencies(), mask_error_details=True
|
||||
'mcp', stateless_http=True, mask_error_details=True, dependencies=None
|
||||
)
|
||||
|
||||
HOST = f'https://{os.getenv("WEB_HOST", "app.all-hands.dev").strip()}'
|
||||
|
||||
@ -92,3 +92,8 @@ class DefaultUserAuth(UserAuth):
|
||||
async def get_instance(cls, request: Request) -> UserAuth:
|
||||
user_auth = DefaultUserAuth()
|
||||
return user_auth
|
||||
|
||||
@classmethod
|
||||
async def get_for_user(cls, user_id: str) -> UserAuth:
|
||||
assert user_id == 'root'
|
||||
return DefaultUserAuth()
|
||||
|
||||
@ -80,6 +80,11 @@ class UserAuth(ABC):
|
||||
async def get_instance(cls, request: Request) -> UserAuth:
|
||||
"""Get an instance of UserAuth from the request given"""
|
||||
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
async def get_for_user(cls, user_id: str) -> UserAuth:
|
||||
"""Get an instance of UserAuth for the user given"""
|
||||
|
||||
|
||||
async def get_user_auth(request: Request) -> UserAuth:
|
||||
user_auth: UserAuth | None = getattr(request.state, 'user_auth', None)
|
||||
@ -92,3 +97,10 @@ async def get_user_auth(request: Request) -> UserAuth:
|
||||
raise ValueError('Failed to get user auth instance')
|
||||
request.state.user_auth = user_auth
|
||||
return user_auth
|
||||
|
||||
|
||||
async def get_for_user(user_id: str) -> UserAuth:
|
||||
impl_name = server_config.user_auth_class
|
||||
impl = get_impl(UserAuth, impl_name)
|
||||
user_auth = await impl.get_for_user(user_id)
|
||||
return user_auth
|
||||
|
||||
@ -36,3 +36,6 @@ class ConversationMetadata:
|
||||
prompt_tokens: int = 0
|
||||
completion_tokens: int = 0
|
||||
total_tokens: int = 0
|
||||
# V1 compatibility
|
||||
sandbox_id: str | None = None
|
||||
conversation_version: str | None = None
|
||||
|
||||
44
openhands/version.py
Normal file
44
openhands/version.py
Normal file
@ -0,0 +1,44 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
__package_name__ = 'openhands_ai'
|
||||
|
||||
|
||||
def get_version():
|
||||
# Try getting the version from pyproject.toml
|
||||
try:
|
||||
root_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
candidate_paths = [
|
||||
Path(root_dir) / 'pyproject.toml',
|
||||
Path(root_dir) / 'openhands' / 'pyproject.toml',
|
||||
]
|
||||
for file_path in candidate_paths:
|
||||
if file_path.is_file():
|
||||
with open(file_path, 'r') as f:
|
||||
for line in f:
|
||||
if line.strip().startswith('version ='):
|
||||
return line.split('=', 1)[1].strip().strip('"').strip("'")
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
try:
|
||||
from importlib.metadata import PackageNotFoundError, version
|
||||
|
||||
return version(__package_name__)
|
||||
except (ImportError, PackageNotFoundError):
|
||||
pass
|
||||
|
||||
try:
|
||||
from pkg_resources import DistributionNotFound, get_distribution # type: ignore
|
||||
|
||||
return get_distribution(__package_name__).version
|
||||
except (ImportError, DistributionNotFound):
|
||||
pass
|
||||
|
||||
return 'unknown'
|
||||
|
||||
|
||||
try:
|
||||
__version__ = get_version()
|
||||
except Exception:
|
||||
__version__ = 'unknown'
|
||||
710
poetry.lock
generated
710
poetry.lock
generated
@ -177,6 +177,45 @@ files = [
|
||||
frozenlist = ">=1.1.0"
|
||||
typing-extensions = {version = ">=4.2", markers = "python_version < \"3.13\""}
|
||||
|
||||
[[package]]
|
||||
name = "aiosqlite"
|
||||
version = "0.21.0"
|
||||
description = "asyncio bridge to the standard sqlite3 module"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "aiosqlite-0.21.0-py3-none-any.whl", hash = "sha256:2549cf4057f95f53dcba16f2b64e8e2791d7e1adedb13197dd8ed77bb226d7d0"},
|
||||
{file = "aiosqlite-0.21.0.tar.gz", hash = "sha256:131bb8056daa3bc875608c631c678cda73922a2d4ba8aec373b19f18c17e7aa3"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
typing_extensions = ">=4.0"
|
||||
|
||||
[package.extras]
|
||||
dev = ["attribution (==1.7.1)", "black (==24.3.0)", "build (>=1.2)", "coverage[toml] (==7.6.10)", "flake8 (==7.0.0)", "flake8-bugbear (==24.12.12)", "flit (==3.10.1)", "mypy (==1.14.1)", "ufmt (==2.5.1)", "usort (==1.0.8.post1)"]
|
||||
docs = ["sphinx (==8.1.3)", "sphinx-mdinclude (==0.6.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "alembic"
|
||||
version = "1.16.5"
|
||||
description = "A database migration tool for SQLAlchemy."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "alembic-1.16.5-py3-none-any.whl", hash = "sha256:e845dfe090c5ffa7b92593ae6687c5cb1a101e91fa53868497dbd79847f9dbe3"},
|
||||
{file = "alembic-1.16.5.tar.gz", hash = "sha256:a88bb7f6e513bd4301ecf4c7f2206fe93f9913f9b48dac3b78babde2d6fe765e"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
Mako = "*"
|
||||
SQLAlchemy = ">=1.4.0"
|
||||
typing-extensions = ">=4.12"
|
||||
|
||||
[package.extras]
|
||||
tz = ["tzdata"]
|
||||
|
||||
[[package]]
|
||||
name = "altair"
|
||||
version = "5.5.0"
|
||||
@ -349,6 +388,18 @@ types-python-dateutil = ">=2.8.10"
|
||||
doc = ["doc8", "sphinx (>=7.0.0)", "sphinx-autobuild", "sphinx-autodoc-typehints", "sphinx_rtd_theme (>=1.3.0)"]
|
||||
test = ["dateparser (==1.*)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2021.1)", "simplejson (==3.*)"]
|
||||
|
||||
[[package]]
|
||||
name = "asn1crypto"
|
||||
version = "1.5.1"
|
||||
description = "Fast ASN.1 parser and serializer with definitions for private keys, public keys, certificates, CRL, OCSP, CMS, PKCS#3, PKCS#7, PKCS#8, PKCS#12, PKCS#5, X.509 and TSP"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "asn1crypto-1.5.1-py2.py3-none-any.whl", hash = "sha256:db4e40728b728508912cbb3d44f19ce188f218e9eba635821bb4b68564f8fd67"},
|
||||
{file = "asn1crypto-1.5.1.tar.gz", hash = "sha256:13ae38502be632115abf8a24cbe5f4da52e3b5231990aff31123c805306ccb9c"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "asttokens"
|
||||
version = "3.0.0"
|
||||
@ -377,6 +428,70 @@ files = [
|
||||
{file = "async_lru-2.0.5.tar.gz", hash = "sha256:481d52ccdd27275f42c43a928b4a50c3bfb2d67af4e78b170e3e0bb39c66e5bb"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "asyncpg"
|
||||
version = "0.30.0"
|
||||
description = "An asyncio PostgreSQL driver"
|
||||
optional = false
|
||||
python-versions = ">=3.8.0"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "asyncpg-0.30.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bfb4dd5ae0699bad2b233672c8fc5ccbd9ad24b89afded02341786887e37927e"},
|
||||
{file = "asyncpg-0.30.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dc1f62c792752a49f88b7e6f774c26077091b44caceb1983509edc18a2222ec0"},
|
||||
{file = "asyncpg-0.30.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3152fef2e265c9c24eec4ee3d22b4f4d2703d30614b0b6753e9ed4115c8a146f"},
|
||||
{file = "asyncpg-0.30.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7255812ac85099a0e1ffb81b10dc477b9973345793776b128a23e60148dd1af"},
|
||||
{file = "asyncpg-0.30.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:578445f09f45d1ad7abddbff2a3c7f7c291738fdae0abffbeb737d3fc3ab8b75"},
|
||||
{file = "asyncpg-0.30.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c42f6bb65a277ce4d93f3fba46b91a265631c8df7250592dd4f11f8b0152150f"},
|
||||
{file = "asyncpg-0.30.0-cp310-cp310-win32.whl", hash = "sha256:aa403147d3e07a267ada2ae34dfc9324e67ccc4cdca35261c8c22792ba2b10cf"},
|
||||
{file = "asyncpg-0.30.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb622c94db4e13137c4c7f98834185049cc50ee01d8f657ef898b6407c7b9c50"},
|
||||
{file = "asyncpg-0.30.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5e0511ad3dec5f6b4f7a9e063591d407eee66b88c14e2ea636f187da1dcfff6a"},
|
||||
{file = "asyncpg-0.30.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:915aeb9f79316b43c3207363af12d0e6fd10776641a7de8a01212afd95bdf0ed"},
|
||||
{file = "asyncpg-0.30.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c198a00cce9506fcd0bf219a799f38ac7a237745e1d27f0e1f66d3707c84a5a"},
|
||||
{file = "asyncpg-0.30.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3326e6d7381799e9735ca2ec9fd7be4d5fef5dcbc3cb555d8a463d8460607956"},
|
||||
{file = "asyncpg-0.30.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:51da377487e249e35bd0859661f6ee2b81db11ad1f4fc036194bc9cb2ead5056"},
|
||||
{file = "asyncpg-0.30.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bc6d84136f9c4d24d358f3b02be4b6ba358abd09f80737d1ac7c444f36108454"},
|
||||
{file = "asyncpg-0.30.0-cp311-cp311-win32.whl", hash = "sha256:574156480df14f64c2d76450a3f3aaaf26105869cad3865041156b38459e935d"},
|
||||
{file = "asyncpg-0.30.0-cp311-cp311-win_amd64.whl", hash = "sha256:3356637f0bd830407b5597317b3cb3571387ae52ddc3bca6233682be88bbbc1f"},
|
||||
{file = "asyncpg-0.30.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c902a60b52e506d38d7e80e0dd5399f657220f24635fee368117b8b5fce1142e"},
|
||||
{file = "asyncpg-0.30.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aca1548e43bbb9f0f627a04666fedaca23db0a31a84136ad1f868cb15deb6e3a"},
|
||||
{file = "asyncpg-0.30.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c2a2ef565400234a633da0eafdce27e843836256d40705d83ab7ec42074efb3"},
|
||||
{file = "asyncpg-0.30.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1292b84ee06ac8a2ad8e51c7475aa309245874b61333d97411aab835c4a2f737"},
|
||||
{file = "asyncpg-0.30.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0f5712350388d0cd0615caec629ad53c81e506b1abaaf8d14c93f54b35e3595a"},
|
||||
{file = "asyncpg-0.30.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:db9891e2d76e6f425746c5d2da01921e9a16b5a71a1c905b13f30e12a257c4af"},
|
||||
{file = "asyncpg-0.30.0-cp312-cp312-win32.whl", hash = "sha256:68d71a1be3d83d0570049cd1654a9bdfe506e794ecc98ad0873304a9f35e411e"},
|
||||
{file = "asyncpg-0.30.0-cp312-cp312-win_amd64.whl", hash = "sha256:9a0292c6af5c500523949155ec17b7fe01a00ace33b68a476d6b5059f9630305"},
|
||||
{file = "asyncpg-0.30.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05b185ebb8083c8568ea8a40e896d5f7af4b8554b64d7719c0eaa1eb5a5c3a70"},
|
||||
{file = "asyncpg-0.30.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c47806b1a8cbb0a0db896f4cd34d89942effe353a5035c62734ab13b9f938da3"},
|
||||
{file = "asyncpg-0.30.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b6fde867a74e8c76c71e2f64f80c64c0f3163e687f1763cfaf21633ec24ec33"},
|
||||
{file = "asyncpg-0.30.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46973045b567972128a27d40001124fbc821c87a6cade040cfcd4fa8a30bcdc4"},
|
||||
{file = "asyncpg-0.30.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9110df111cabc2ed81aad2f35394a00cadf4f2e0635603db6ebbd0fc896f46a4"},
|
||||
{file = "asyncpg-0.30.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04ff0785ae7eed6cc138e73fc67b8e51d54ee7a3ce9b63666ce55a0bf095f7ba"},
|
||||
{file = "asyncpg-0.30.0-cp313-cp313-win32.whl", hash = "sha256:ae374585f51c2b444510cdf3595b97ece4f233fde739aa14b50e0d64e8a7a590"},
|
||||
{file = "asyncpg-0.30.0-cp313-cp313-win_amd64.whl", hash = "sha256:f59b430b8e27557c3fb9869222559f7417ced18688375825f8f12302c34e915e"},
|
||||
{file = "asyncpg-0.30.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:29ff1fc8b5bf724273782ff8b4f57b0f8220a1b2324184846b39d1ab4122031d"},
|
||||
{file = "asyncpg-0.30.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:64e899bce0600871b55368b8483e5e3e7f1860c9482e7f12e0a771e747988168"},
|
||||
{file = "asyncpg-0.30.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b290f4726a887f75dcd1b3006f484252db37602313f806e9ffc4e5996cfe5cb"},
|
||||
{file = "asyncpg-0.30.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f86b0e2cd3f1249d6fe6fd6cfe0cd4538ba994e2d8249c0491925629b9104d0f"},
|
||||
{file = "asyncpg-0.30.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:393af4e3214c8fa4c7b86da6364384c0d1b3298d45803375572f415b6f673f38"},
|
||||
{file = "asyncpg-0.30.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:fd4406d09208d5b4a14db9a9dbb311b6d7aeeab57bded7ed2f8ea41aeef39b34"},
|
||||
{file = "asyncpg-0.30.0-cp38-cp38-win32.whl", hash = "sha256:0b448f0150e1c3b96cb0438a0d0aa4871f1472e58de14a3ec320dbb2798fb0d4"},
|
||||
{file = "asyncpg-0.30.0-cp38-cp38-win_amd64.whl", hash = "sha256:f23b836dd90bea21104f69547923a02b167d999ce053f3d502081acea2fba15b"},
|
||||
{file = "asyncpg-0.30.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6f4e83f067b35ab5e6371f8a4c93296e0439857b4569850b178a01385e82e9ad"},
|
||||
{file = "asyncpg-0.30.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5df69d55add4efcd25ea2a3b02025b669a285b767bfbf06e356d68dbce4234ff"},
|
||||
{file = "asyncpg-0.30.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3479a0d9a852c7c84e822c073622baca862d1217b10a02dd57ee4a7a081f708"},
|
||||
{file = "asyncpg-0.30.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26683d3b9a62836fad771a18ecf4659a30f348a561279d6227dab96182f46144"},
|
||||
{file = "asyncpg-0.30.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1b982daf2441a0ed314bd10817f1606f1c28b1136abd9e4f11335358c2c631cb"},
|
||||
{file = "asyncpg-0.30.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1c06a3a50d014b303e5f6fc1e5f95eb28d2cee89cf58384b700da621e5d5e547"},
|
||||
{file = "asyncpg-0.30.0-cp39-cp39-win32.whl", hash = "sha256:1b11a555a198b08f5c4baa8f8231c74a366d190755aa4f99aacec5970afe929a"},
|
||||
{file = "asyncpg-0.30.0-cp39-cp39-win_amd64.whl", hash = "sha256:8b684a3c858a83cd876f05958823b68e8d14ec01bb0c0d14a6704c5bf9711773"},
|
||||
{file = "asyncpg-0.30.0.tar.gz", hash = "sha256:c551e9928ab6707602f44811817f82ba3c446e018bfe1d3abecc8ba5f3eac851"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["Sphinx (>=8.1.3,<8.2.0)", "sphinx-rtd-theme (>=1.2.2)"]
|
||||
gssauth = ["gssapi ; platform_system != \"Windows\"", "sspilib ; platform_system == \"Windows\""]
|
||||
test = ["distro (>=1.9.0,<1.10.0)", "flake8 (>=6.1,<7.0)", "flake8-pyi (>=24.1.0,<24.2.0)", "gssapi ; platform_system == \"Linux\"", "k5test ; platform_system == \"Linux\"", "mypy (>=1.8.0,<1.9.0)", "sspilib ; platform_system == \"Windows\"", "uvloop (>=0.15.3) ; platform_system != \"Windows\" and python_version < \"3.14.0\""]
|
||||
|
||||
[[package]]
|
||||
name = "attrs"
|
||||
version = "25.3.0"
|
||||
@ -1885,6 +2000,29 @@ files = [
|
||||
docs = ["ipython", "matplotlib", "numpydoc", "sphinx"]
|
||||
tests = ["pytest", "pytest-cov", "pytest-xdist"]
|
||||
|
||||
[[package]]
|
||||
name = "cyclopts"
|
||||
version = "3.24.0"
|
||||
description = "Intuitive, easy CLIs based on type hints."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "cyclopts-3.24.0-py3-none-any.whl", hash = "sha256:809d04cde9108617106091140c3964ee6fceb33cecdd537f7ffa360bde13ed71"},
|
||||
{file = "cyclopts-3.24.0.tar.gz", hash = "sha256:de6964a041dfb3c57bf043b41e68c43548227a17de1bad246e3a0bfc5c4b7417"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
attrs = ">=23.1.0"
|
||||
docstring-parser = {version = ">=0.15", markers = "python_version < \"4.0\""}
|
||||
rich = ">=13.6.0"
|
||||
rich-rst = ">=1.3.1,<2.0.0"
|
||||
|
||||
[package.extras]
|
||||
toml = ["tomli (>=2.0.0) ; python_version < \"3.11\""]
|
||||
trio = ["trio (>=0.10.0)"]
|
||||
yaml = ["pyyaml (>=6.0.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "dataclasses-json"
|
||||
version = "0.6.7"
|
||||
@ -2141,6 +2279,27 @@ files = [
|
||||
{file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dnspython"
|
||||
version = "2.8.0"
|
||||
description = "DNS toolkit"
|
||||
optional = false
|
||||
python-versions = ">=3.10"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "dnspython-2.8.0-py3-none-any.whl", hash = "sha256:01d9bbc4a2d76bf0db7c1f729812ded6d912bd318d3b1cf81d30c0f845dbf3af"},
|
||||
{file = "dnspython-2.8.0.tar.gz", hash = "sha256:181d3c6996452cb1189c4046c61599b84a5a86e099562ffde77d26984ff26d0f"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
dev = ["black (>=25.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "hypercorn (>=0.17.0)", "mypy (>=1.17)", "pylint (>=3)", "pytest (>=8.4)", "pytest-cov (>=6.2.0)", "quart-trio (>=0.12.0)", "sphinx (>=8.2.0)", "sphinx-rtd-theme (>=3.0.0)", "twine (>=6.1.0)", "wheel (>=0.45.0)"]
|
||||
dnssec = ["cryptography (>=45)"]
|
||||
doh = ["h2 (>=4.2.0)", "httpcore (>=1.0.0)", "httpx (>=0.28.0)"]
|
||||
doq = ["aioquic (>=1.2.0)"]
|
||||
idna = ["idna (>=3.10)"]
|
||||
trio = ["trio (>=0.30)"]
|
||||
wmi = ["wmi (>=1.5.1) ; platform_system == \"Windows\""]
|
||||
|
||||
[[package]]
|
||||
name = "docker"
|
||||
version = "7.1.0"
|
||||
@ -2176,6 +2335,18 @@ files = [
|
||||
{file = "docstring_parser-0.16.tar.gz", hash = "sha256:538beabd0af1e2db0146b6bd3caa526c35a34d61af9fd2887f3a8a27a739aa6e"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "docutils"
|
||||
version = "0.22.2"
|
||||
description = "Docutils -- Python Documentation Utilities"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "docutils-0.22.2-py3-none-any.whl", hash = "sha256:b0e98d679283fc3bb0ead8a5da7f501baa632654e7056e9c5846842213d674d8"},
|
||||
{file = "docutils-0.22.2.tar.gz", hash = "sha256:9fdb771707c8784c8f2728b67cb2c691305933d68137ef95a75db5f4dfbc213d"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dulwich"
|
||||
version = "0.22.8"
|
||||
@ -2295,6 +2466,41 @@ attrs = ">=21.3.0"
|
||||
e2b = ">=2.0.0,<3.0.0"
|
||||
httpx = ">=0.20.0,<1.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "ecdsa"
|
||||
version = "0.19.1"
|
||||
description = "ECDSA cryptographic signature library (pure python)"
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.6"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "ecdsa-0.19.1-py2.py3-none-any.whl", hash = "sha256:30638e27cf77b7e15c4c4cc1973720149e1033827cfd00661ca5c8cc0cdb24c3"},
|
||||
{file = "ecdsa-0.19.1.tar.gz", hash = "sha256:478cba7b62555866fcb3bb3fe985e06decbdb68ef55713c4e5ab98c57d508e61"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
six = ">=1.9.0"
|
||||
|
||||
[package.extras]
|
||||
gmpy = ["gmpy"]
|
||||
gmpy2 = ["gmpy2"]
|
||||
|
||||
[[package]]
|
||||
name = "email-validator"
|
||||
version = "2.3.0"
|
||||
description = "A robust email address syntax and deliverability validation library."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "email_validator-2.3.0-py3-none-any.whl", hash = "sha256:80f13f623413e6b197ae73bb10bf4eb0908faf509ad8362c5edeb0be7fd450b4"},
|
||||
{file = "email_validator-2.3.0.tar.gz", hash = "sha256:9fc05c37f2f6cf439ff414f8fc46d917929974a82244c20eb10231ba60c54426"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
dnspython = ">=2.0.0"
|
||||
idna = ">=2.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "english-words"
|
||||
version = "2.0.1"
|
||||
@ -2508,26 +2714,32 @@ devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benc
|
||||
|
||||
[[package]]
|
||||
name = "fastmcp"
|
||||
version = "2.6.1"
|
||||
description = "The fast, Pythonic way to build MCP servers."
|
||||
version = "2.12.4"
|
||||
description = "The fast, Pythonic way to build MCP servers and clients."
|
||||
optional = false
|
||||
python-versions = ">=3.10"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "fastmcp-2.6.1-py3-none-any.whl", hash = "sha256:d83a2fcffa721cbb91b29c738d39de20b54e1b1ebef5be652d6a4956ecac7ad3"},
|
||||
{file = "fastmcp-2.6.1.tar.gz", hash = "sha256:212f15a4edf8289e5c3c70796910dc612ef891f84df3257a277457bb761d1362"},
|
||||
{file = "fastmcp-2.12.4-py3-none-any.whl", hash = "sha256:56188fbbc1a9df58c537063f25958c57b5c4d715f73e395c41b51550b247d140"},
|
||||
{file = "fastmcp-2.12.4.tar.gz", hash = "sha256:b55fe89537038f19d0f4476544f9ca5ac171033f61811cc8f12bdeadcbea5016"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
authlib = ">=1.5.2"
|
||||
cyclopts = ">=3.0.0"
|
||||
exceptiongroup = ">=1.2.2"
|
||||
httpx = ">=0.28.1"
|
||||
mcp = ">=1.9.2,<2.0.0"
|
||||
mcp = ">=1.12.4,<2.0.0"
|
||||
openapi-core = ">=0.19.5"
|
||||
openapi-pydantic = ">=0.5.1"
|
||||
pydantic = {version = ">=2.11.7", extras = ["email"]}
|
||||
pyperclip = ">=1.9.0"
|
||||
python-dotenv = ">=1.1.0"
|
||||
rich = ">=13.9.4"
|
||||
typer = ">=0.15.2"
|
||||
websockets = ">=14.0"
|
||||
|
||||
[package.extras]
|
||||
openai = ["openai (>=1.102.0)"]
|
||||
websockets = ["websockets (>=15.0.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "fastuuid"
|
||||
@ -4110,6 +4322,18 @@ widgetsnbextension = ">=4.0.14,<4.1.0"
|
||||
[package.extras]
|
||||
test = ["ipykernel", "jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"]
|
||||
|
||||
[[package]]
|
||||
name = "isodate"
|
||||
version = "0.7.2"
|
||||
description = "An ISO 8601 date/time/duration parser and formatter"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15"},
|
||||
{file = "isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "isoduration"
|
||||
version = "20.11.0"
|
||||
@ -4430,6 +4654,24 @@ webcolors = {version = ">=24.6.0", optional = true, markers = "extra == \"format
|
||||
format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"]
|
||||
format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=24.6.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "jsonschema-path"
|
||||
version = "0.3.4"
|
||||
description = "JSONSchema Spec with object-oriented paths"
|
||||
optional = false
|
||||
python-versions = "<4.0.0,>=3.8.0"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "jsonschema_path-0.3.4-py3-none-any.whl", hash = "sha256:f502191fdc2b22050f9a81c9237be9d27145b9001c55842bece5e94e382e52f8"},
|
||||
{file = "jsonschema_path-0.3.4.tar.gz", hash = "sha256:8365356039f16cc65fddffafda5f58766e34bebab7d6d105616ab52bc4297001"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
pathable = ">=0.4.1,<0.5.0"
|
||||
PyYAML = ">=5.1"
|
||||
referencing = "<0.37.0"
|
||||
requests = ">=2.31.0,<3.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "jsonschema-specifications"
|
||||
version = "2025.4.1"
|
||||
@ -4863,6 +5105,60 @@ dev = ["changelist (==0.5)"]
|
||||
lint = ["pre-commit (==3.7.0)"]
|
||||
test = ["pytest (>=7.4)", "pytest-cov (>=4.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "lazy-object-proxy"
|
||||
version = "1.12.0"
|
||||
description = "A fast and thorough lazy object proxy."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "lazy_object_proxy-1.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:61d5e3310a4aa5792c2b599a7a78ccf8687292c8eb09cf187cca8f09cf6a7519"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1ca33565f698ac1aece152a10f432415d1a2aa9a42dfe23e5ba2bc255ab91f6"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d01c7819a410f7c255b20799b65d36b414379a30c6f1684c7bd7eb6777338c1b"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:029d2b355076710505c9545aef5ab3f750d89779310e26ddf2b7b23f6ea03cd8"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc6e3614eca88b1c8a625fc0a47d0d745e7c3255b21dac0e30b3037c5e3deeb8"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:be5fe974e39ceb0d6c9db0663c0464669cf866b2851c73971409b9566e880eab"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1cf69cd1a6c7fe2dbcc3edaa017cf010f4192e53796538cc7d5e1fedbfa4bcff"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:efff4375a8c52f55a145dc8487a2108c2140f0bec4151ab4e1843e52eb9987ad"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1192e8c2f1031a6ff453ee40213afa01ba765b3dc861302cd91dbdb2e2660b00"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3605b632e82a1cbc32a1e5034278a64db555b3496e0795723ee697006b980508"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a61095f5d9d1a743e1e20ec6d6db6c2ca511961777257ebd9b288951b23b44fa"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:997b1d6e10ecc6fb6fe0f2c959791ae59599f41da61d652f6c903d1ee58b7370"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8ee0d6027b760a11cc18281e702c0309dd92da458a74b4c15025d7fc490deede"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:4ab2c584e3cc8be0dfca422e05ad30a9abe3555ce63e9ab7a559f62f8dbc6ff9"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:14e348185adbd03ec17d051e169ec45686dcd840a3779c9d4c10aabe2ca6e1c0"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c4fcbe74fb85df8ba7825fa05eddca764138da752904b378f0ae5ab33a36c308"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:563d2ec8e4d4b68ee7848c5ab4d6057a6d703cb7963b342968bb8758dda33a23"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:53c7fd99eb156bbb82cbc5d5188891d8fdd805ba6c1e3b92b90092da2a837073"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:86fd61cb2ba249b9f436d789d1356deae69ad3231dc3c0f17293ac535162672e"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:81d1852fb30fab81696f93db1b1e55a5d1ff7940838191062f5f56987d5fcc3e"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be9045646d83f6c2664c1330904b245ae2371b5c57a3195e4028aedc9f999655"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:67f07ab742f1adfb3966c40f630baaa7902be4222a17941f3d85fd1dae5565ff"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:75ba769017b944fcacbf6a80c18b2761a1795b03f8899acdad1f1c39db4409be"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:7b22c2bbfb155706b928ac4d74c1a63ac8552a55ba7fff4445155523ea4067e1"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4a79b909aa16bde8ae606f06e6bbc9d3219d2e57fb3e0076e17879072b742c65"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:338ab2f132276203e404951205fe80c3fd59429b3a724e7b662b2eb539bb1be9"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8c40b3c9faee2e32bfce0df4ae63f4e73529766893258eca78548bac801c8f66"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:717484c309df78cedf48396e420fa57fc8a2b1f06ea889df7248fdd156e58847"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a6b7ea5ea1ffe15059eb44bcbcb258f97bcb40e139b88152c40d07b1a1dfc9ac"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp313-cp313t-win_amd64.whl", hash = "sha256:08c465fb5cd23527512f9bd7b4c7ba6cec33e28aad36fbbe46bf7b858f9f3f7f"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c9defba70ab943f1df98a656247966d7729da2fe9c2d5d85346464bf320820a3"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6763941dbf97eea6b90f5b06eb4da9418cc088fce0e3883f5816090f9afcde4a"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fdc70d81235fc586b9e3d1aeef7d1553259b62ecaae9db2167a5d2550dcc391a"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:0a83c6f7a6b2bfc11ef3ed67f8cbe99f8ff500b05655d8e7df9aab993a6abc95"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:256262384ebd2a77b023ad02fbcc9326282bcfd16484d5531154b02bc304f4c5"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp314-cp314-win_amd64.whl", hash = "sha256:7601ec171c7e8584f8ff3f4e440aa2eebf93e854f04639263875b8c2971f819f"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ae575ad9b674d0029fc077c5231b3bc6b433a3d1a62a8c363df96974b5534728"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31020c84005d3daa4cc0fa5a310af2066efe6b0d82aeebf9ab199292652ff036"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:800f32b00a47c27446a2b767df7538e6c66a3488632c402b4fb2224f9794f3c0"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:15400b18893f345857b9e18b9bd87bd06aba84af6ed086187add70aeaa3f93f1"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3d3964fbd326578bcdfffd017ef101b6fb0484f34e731fe060ba9b8816498c36"},
|
||||
{file = "lazy_object_proxy-1.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:424a8ab6695400845c39f13c685050eab69fa0bbac5790b201cd27375e5e41d7"},
|
||||
{file = "lazy_object_proxy-1.12.0-pp39.pp310.pp311.graalpy311-none-any.whl", hash = "sha256:c3b2e0af1f7f77c4263759c4824316ce458fabe0fceadcd24ef8ca08b2d1e402"},
|
||||
{file = "lazy_object_proxy-1.12.0.tar.gz", hash = "sha256:1f5a462d92fd0cfb82f1fab28b51bfb209fabbe6aabf7f0d51472c0c124c0c61"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "levenshtein"
|
||||
version = "0.27.1"
|
||||
@ -5019,14 +5315,14 @@ dev = ["Sphinx (>=5.1.1)", "black (==24.8.0)", "build (>=0.10.0)", "coverage[tom
|
||||
|
||||
[[package]]
|
||||
name = "libtmux"
|
||||
version = "0.39.0"
|
||||
version = "0.46.2"
|
||||
description = "Typed library that provides an ORM wrapper for tmux, a terminal multiplexer."
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "libtmux-0.39.0-py3-none-any.whl", hash = "sha256:6b6e338be2727f67aa6b7eb67fa134368fa3c3eac5df27565396467692891c1e"},
|
||||
{file = "libtmux-0.39.0.tar.gz", hash = "sha256:59346aeef3c0d6017f3bc5e23248d43cdf50f32b775b9cb5d9ff5e2e5f3059f4"},
|
||||
{file = "libtmux-0.46.2-py3-none-any.whl", hash = "sha256:6c32dbf22bde8e5e33b2714a4295f6e838dc640f337cd4c085a044f6828c7793"},
|
||||
{file = "libtmux-0.46.2.tar.gz", hash = "sha256:9a398fec5d714129c8344555d466e1a903dfc0f741ba07aabe75a8ceb25c5dda"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -5204,11 +5500,8 @@ files = [
|
||||
{file = "lxml-5.4.0-cp36-cp36m-win_amd64.whl", hash = "sha256:7ce1a171ec325192c6a636b64c94418e71a1964f56d002cc28122fceff0b6121"},
|
||||
{file = "lxml-5.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:795f61bcaf8770e1b37eec24edf9771b307df3af74d1d6f27d812e15a9ff3872"},
|
||||
{file = "lxml-5.4.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29f451a4b614a7b5b6c2e043d7b64a15bd8304d7e767055e8ab68387a8cacf4e"},
|
||||
{file = "lxml-5.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:891f7f991a68d20c75cb13c5c9142b2a3f9eb161f1f12a9489c82172d1f133c0"},
|
||||
{file = "lxml-5.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4aa412a82e460571fad592d0f93ce9935a20090029ba08eca05c614f99b0cc92"},
|
||||
{file = "lxml-5.4.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:ac7ba71f9561cd7d7b55e1ea5511543c0282e2b6450f122672a2694621d63b7e"},
|
||||
{file = "lxml-5.4.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:c5d32f5284012deaccd37da1e2cd42f081feaa76981f0eaa474351b68df813c5"},
|
||||
{file = "lxml-5.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:ce31158630a6ac85bddd6b830cffd46085ff90498b397bd0a259f59d27a12188"},
|
||||
{file = "lxml-5.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:31e63621e073e04697c1b2d23fcb89991790eef370ec37ce4d5d469f40924ed6"},
|
||||
{file = "lxml-5.4.0-cp37-cp37m-win32.whl", hash = "sha256:be2ba4c3c5b7900246a8f866580700ef0d538f2ca32535e991027bdaba944063"},
|
||||
{file = "lxml-5.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:09846782b1ef650b321484ad429217f5154da4d6e786636c38e434fa32e94e49"},
|
||||
@ -5267,6 +5560,26 @@ html5 = ["html5lib"]
|
||||
htmlsoup = ["BeautifulSoup4"]
|
||||
source = ["Cython (>=3.0.11,<3.1.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "mako"
|
||||
version = "1.3.10"
|
||||
description = "A super-fast templating language that borrows the best ideas from the existing templating languages."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59"},
|
||||
{file = "mako-1.3.10.tar.gz", hash = "sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
MarkupSafe = ">=0.9.2"
|
||||
|
||||
[package.extras]
|
||||
babel = ["Babel"]
|
||||
lingua = ["lingua"]
|
||||
testing = ["pytest"]
|
||||
|
||||
[[package]]
|
||||
name = "mammoth"
|
||||
version = "1.9.1"
|
||||
@ -5502,29 +5815,31 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "mcp"
|
||||
version = "1.9.2"
|
||||
version = "1.16.0"
|
||||
description = "Model Context Protocol SDK"
|
||||
optional = false
|
||||
python-versions = ">=3.10"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "mcp-1.9.2-py3-none-any.whl", hash = "sha256:bc29f7fd67d157fef378f89a4210384f5fecf1168d0feb12d22929818723f978"},
|
||||
{file = "mcp-1.9.2.tar.gz", hash = "sha256:3c7651c053d635fd235990a12e84509fe32780cd359a5bbef352e20d4d963c05"},
|
||||
{file = "mcp-1.16.0-py3-none-any.whl", hash = "sha256:ec917be9a5d31b09ba331e1768aa576e0af45470d657a0319996a20a57d7d633"},
|
||||
{file = "mcp-1.16.0.tar.gz", hash = "sha256:39b8ca25460c578ee2cdad33feeea122694cfdf73eef58bee76c42f6ef0589df"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
anyio = ">=4.5"
|
||||
httpx = ">=0.27"
|
||||
httpx = ">=0.27.1"
|
||||
httpx-sse = ">=0.4"
|
||||
pydantic = ">=2.7.2,<3.0.0"
|
||||
jsonschema = ">=4.20.0"
|
||||
pydantic = ">=2.11.0,<3.0.0"
|
||||
pydantic-settings = ">=2.5.2"
|
||||
python-multipart = ">=0.0.9"
|
||||
pywin32 = {version = ">=310", markers = "sys_platform == \"win32\""}
|
||||
sse-starlette = ">=1.6.1"
|
||||
starlette = ">=0.27"
|
||||
uvicorn = {version = ">=0.23.1", markers = "sys_platform != \"emscripten\""}
|
||||
uvicorn = {version = ">=0.31.1", markers = "sys_platform != \"emscripten\""}
|
||||
|
||||
[package.extras]
|
||||
cli = ["python-dotenv (>=1.0.0)", "typer (>=0.12.4)"]
|
||||
cli = ["python-dotenv (>=1.0.0)", "typer (>=0.16.0)"]
|
||||
rich = ["rich (>=13.9.4)"]
|
||||
ws = ["websockets (>=15.0.1)"]
|
||||
|
||||
@ -6578,6 +6893,38 @@ datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"]
|
||||
realtime = ["websockets (>=13,<16)"]
|
||||
voice-helpers = ["numpy (>=2.0.2)", "sounddevice (>=0.5.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "openapi-core"
|
||||
version = "0.19.5"
|
||||
description = "client-side and server-side support for the OpenAPI Specification v3"
|
||||
optional = false
|
||||
python-versions = "<4.0.0,>=3.8.0"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "openapi_core-0.19.5-py3-none-any.whl", hash = "sha256:ef7210e83a59394f46ce282639d8d26ad6fc8094aa904c9c16eb1bac8908911f"},
|
||||
{file = "openapi_core-0.19.5.tar.gz", hash = "sha256:421e753da56c391704454e66afe4803a290108590ac8fa6f4a4487f4ec11f2d3"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
isodate = "*"
|
||||
jsonschema = ">=4.18.0,<5.0.0"
|
||||
jsonschema-path = ">=0.3.1,<0.4.0"
|
||||
more-itertools = "*"
|
||||
openapi-schema-validator = ">=0.6.0,<0.7.0"
|
||||
openapi-spec-validator = ">=0.7.1,<0.8.0"
|
||||
parse = "*"
|
||||
typing-extensions = ">=4.8.0,<5.0.0"
|
||||
werkzeug = "<3.1.2"
|
||||
|
||||
[package.extras]
|
||||
aiohttp = ["aiohttp (>=3.0)", "multidict (>=6.0.4,<7.0.0)"]
|
||||
django = ["django (>=3.0)"]
|
||||
falcon = ["falcon (>=3.0)"]
|
||||
fastapi = ["fastapi (>=0.111,<0.116)"]
|
||||
flask = ["flask"]
|
||||
requests = ["requests"]
|
||||
starlette = ["aioitertools (>=0.11,<0.13)", "starlette (>=0.26.1,<0.45.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "openapi-pydantic"
|
||||
version = "0.5.1"
|
||||
@ -6593,6 +6940,41 @@ files = [
|
||||
[package.dependencies]
|
||||
pydantic = ">=1.8"
|
||||
|
||||
[[package]]
|
||||
name = "openapi-schema-validator"
|
||||
version = "0.6.3"
|
||||
description = "OpenAPI schema validation for Python"
|
||||
optional = false
|
||||
python-versions = "<4.0.0,>=3.8.0"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "openapi_schema_validator-0.6.3-py3-none-any.whl", hash = "sha256:f3b9870f4e556b5a62a1c39da72a6b4b16f3ad9c73dc80084b1b11e74ba148a3"},
|
||||
{file = "openapi_schema_validator-0.6.3.tar.gz", hash = "sha256:f37bace4fc2a5d96692f4f8b31dc0f8d7400fd04f3a937798eaf880d425de6ee"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
jsonschema = ">=4.19.1,<5.0.0"
|
||||
jsonschema-specifications = ">=2023.5.2"
|
||||
rfc3339-validator = "*"
|
||||
|
||||
[[package]]
|
||||
name = "openapi-spec-validator"
|
||||
version = "0.7.2"
|
||||
description = "OpenAPI 2.0 (aka Swagger) and OpenAPI 3 spec validator"
|
||||
optional = false
|
||||
python-versions = "<4.0.0,>=3.8.0"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "openapi_spec_validator-0.7.2-py3-none-any.whl", hash = "sha256:4bbdc0894ec85f1d1bea1d6d9c8b2c3c8d7ccaa13577ef40da9c006c9fd0eb60"},
|
||||
{file = "openapi_spec_validator-0.7.2.tar.gz", hash = "sha256:cc029309b5c5dbc7859df0372d55e9d1ff43e96d678b9ba087f7c56fc586f734"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
jsonschema = ">=4.18.0,<5.0.0"
|
||||
jsonschema-path = ">=0.3.1,<0.4.0"
|
||||
lazy-object-proxy = ">=1.7.1,<2.0.0"
|
||||
openapi-schema-validator = ">=0.6.0,<0.7.0"
|
||||
|
||||
[[package]]
|
||||
name = "openhands-aci"
|
||||
version = "0.3.2"
|
||||
@ -6639,6 +7021,62 @@ youtube-transcript-api = ">=0.6.2"
|
||||
[package.extras]
|
||||
llama = ["llama-index (>=0.12.29,<0.13.0)", "llama-index-core (>=0.12.29,<0.13.0)", "llama-index-retrievers-bm25 (>=0.5.2,<0.6.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "openhands-agent-server"
|
||||
version = "1.0.0"
|
||||
description = "OpenHands Agent Server - REST/WebSocket interface for OpenHands AI Agent"
|
||||
optional = false
|
||||
python-versions = ">=3.12"
|
||||
groups = ["main"]
|
||||
files = []
|
||||
develop = false
|
||||
|
||||
[package.dependencies]
|
||||
aiosqlite = ">=0.19"
|
||||
alembic = ">=1.13"
|
||||
docker = ">=7.1,<8"
|
||||
fastapi = ">=0.104"
|
||||
pydantic = ">=2"
|
||||
sqlalchemy = ">=2"
|
||||
uvicorn = ">=0.31.1"
|
||||
websockets = ">=12"
|
||||
|
||||
[package.source]
|
||||
type = "git"
|
||||
url = "https://github.com/All-Hands-AI/agent-sdk.git"
|
||||
reference = "f8ca02c4a3b847bfc50b3c5e579ce126c511fefc"
|
||||
resolved_reference = "f8ca02c4a3b847bfc50b3c5e579ce126c511fefc"
|
||||
subdirectory = "openhands/agent_server"
|
||||
|
||||
[[package]]
|
||||
name = "openhands-sdk"
|
||||
version = "1.0.0"
|
||||
description = "OpenHands SDK - Core functionality for building AI agents"
|
||||
optional = false
|
||||
python-versions = ">=3.12"
|
||||
groups = ["main"]
|
||||
files = []
|
||||
develop = false
|
||||
|
||||
[package.dependencies]
|
||||
fastmcp = ">=2.11.3"
|
||||
litellm = ">=1.75.9"
|
||||
pydantic = ">=2.11.7"
|
||||
python-frontmatter = ">=1.1.0"
|
||||
python-json-logger = ">=3.3.0"
|
||||
tenacity = ">=9.1.2"
|
||||
websockets = ">=12"
|
||||
|
||||
[package.extras]
|
||||
boto3 = ["boto3 (>=1.35.0)"]
|
||||
|
||||
[package.source]
|
||||
type = "git"
|
||||
url = "https://github.com/All-Hands-AI/agent-sdk.git"
|
||||
reference = "f8ca02c4a3b847bfc50b3c5e579ce126c511fefc"
|
||||
resolved_reference = "f8ca02c4a3b847bfc50b3c5e579ce126c511fefc"
|
||||
subdirectory = "openhands/sdk"
|
||||
|
||||
[[package]]
|
||||
name = "openpyxl"
|
||||
version = "3.1.5"
|
||||
@ -6876,6 +7314,18 @@ files = [
|
||||
{file = "pandocfilters-1.5.1.tar.gz", hash = "sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "parse"
|
||||
version = "1.20.2"
|
||||
description = "parse() is the opposite of format()"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "parse-1.20.2-py2.py3-none-any.whl", hash = "sha256:967095588cb802add9177d0c0b6133b5ba33b1ea9007ca800e526f42a85af558"},
|
||||
{file = "parse-1.20.2.tar.gz", hash = "sha256:b41d604d16503c79d81af5165155c0b20f6c8d6c559efa66b4b695c3e5a0a0ce"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "parso"
|
||||
version = "0.8.4"
|
||||
@ -6892,6 +7342,18 @@ files = [
|
||||
qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"]
|
||||
testing = ["docopt", "pytest"]
|
||||
|
||||
[[package]]
|
||||
name = "pathable"
|
||||
version = "0.4.4"
|
||||
description = "Object-oriented paths"
|
||||
optional = false
|
||||
python-versions = "<4.0.0,>=3.7.0"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "pathable-0.4.4-py3-none-any.whl", hash = "sha256:5ae9e94793b6ef5a4cbe0a7ce9dbbefc1eec38df253763fd0aeeacf2762dbbc2"},
|
||||
{file = "pathable-0.4.4.tar.gz", hash = "sha256:6905a3cd17804edfac7875b5f6c9142a218c7caef78693c2dbbbfbac186d88b2"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pathspec"
|
||||
version = "0.12.1"
|
||||
@ -6961,6 +7423,22 @@ files = [
|
||||
[package.dependencies]
|
||||
ptyprocess = ">=0.5"
|
||||
|
||||
[[package]]
|
||||
name = "pg8000"
|
||||
version = "1.31.5"
|
||||
description = "PostgreSQL interface library"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "pg8000-1.31.5-py3-none-any.whl", hash = "sha256:0af2c1926b153307639868d2ee5cef6cd3a7d07448e12736989b10e1d491e201"},
|
||||
{file = "pg8000-1.31.5.tar.gz", hash = "sha256:46ebb03be52b7a77c03c725c79da2ca281d6e8f59577ca66b17c9009618cae78"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
python-dateutil = ">=2.8.2"
|
||||
scramp = ">=1.4.5"
|
||||
|
||||
[[package]]
|
||||
name = "pillow"
|
||||
version = "11.3.0"
|
||||
@ -7577,6 +8055,17 @@ files = [
|
||||
[package.dependencies]
|
||||
pyasn1 = ">=0.6.1,<0.7.0"
|
||||
|
||||
[[package]]
|
||||
name = "pybase62"
|
||||
version = "1.0.0"
|
||||
description = "Python module for base62 encoding"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "pybase62-1.0.0-py3-none-any.whl", hash = "sha256:60539ad956ec9e9de091bc7ae88c9550bc2fa17f503050cf34d021b75e73cb27"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pycodestyle"
|
||||
version = "2.14.0"
|
||||
@ -7604,18 +8093,19 @@ markers = {test = "platform_python_implementation == \"CPython\" and sys_platfor
|
||||
|
||||
[[package]]
|
||||
name = "pydantic"
|
||||
version = "2.11.5"
|
||||
version = "2.11.10"
|
||||
description = "Data validation using Python type hints"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main", "evaluation"]
|
||||
files = [
|
||||
{file = "pydantic-2.11.5-py3-none-any.whl", hash = "sha256:f9c26ba06f9747749ca1e5c94d6a85cb84254577553c8785576fd38fa64dc0f7"},
|
||||
{file = "pydantic-2.11.5.tar.gz", hash = "sha256:7f853db3d0ce78ce8bbb148c401c2cdd6431b3473c0cdff2755c7690952a7b7a"},
|
||||
{file = "pydantic-2.11.10-py3-none-any.whl", hash = "sha256:802a655709d49bd004c31e865ef37da30b540786a46bfce02333e0e24b5fe29a"},
|
||||
{file = "pydantic-2.11.10.tar.gz", hash = "sha256:dc280f0982fbda6c38fada4e476dc0a4f3aeaf9c6ad4c28df68a666ec3c61423"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
annotated-types = ">=0.6.0"
|
||||
email-validator = {version = ">=2.0.0", optional = true, markers = "extra == \"email\""}
|
||||
pydantic-core = "2.33.2"
|
||||
typing-extensions = ">=4.12.2"
|
||||
typing-inspection = ">=0.4.0"
|
||||
@ -7970,6 +8460,18 @@ docs = ["myst_parser", "sphinx", "sphinx_rtd_theme"]
|
||||
full = ["Pillow", "PyCryptodome"]
|
||||
image = ["Pillow"]
|
||||
|
||||
[[package]]
|
||||
name = "pyperclip"
|
||||
version = "1.11.0"
|
||||
description = "A cross-platform clipboard module for Python. (Only handles plain text for now.)"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "pyperclip-1.11.0-py3-none-any.whl", hash = "sha256:299403e9ff44581cb9ba2ffeed69c7aa96a008622ad0c46cb575ca75b5b84273"},
|
||||
{file = "pyperclip-1.11.0.tar.gz", hash = "sha256:244035963e4428530d9e3a6101a1ef97209c6825edab1567beac148ccc1db1b6"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyproject-hooks"
|
||||
version = "1.2.0"
|
||||
@ -8230,6 +8732,30 @@ PyYAML = "*"
|
||||
docs = ["sphinx"]
|
||||
test = ["mypy", "pyaml", "pytest", "toml", "types-PyYAML", "types-toml"]
|
||||
|
||||
[[package]]
|
||||
name = "python-jose"
|
||||
version = "3.5.0"
|
||||
description = "JOSE implementation in Python"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "python_jose-3.5.0-py2.py3-none-any.whl", hash = "sha256:abd1202f23d34dfad2c3d28cb8617b90acf34132c7afd60abd0b0b7d3cb55771"},
|
||||
{file = "python_jose-3.5.0.tar.gz", hash = "sha256:fb4eaa44dbeb1c26dcc69e4bd7ec54a1cb8dd64d3b4d81ef08d90ff453f2b01b"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"cryptography\""}
|
||||
ecdsa = "!=0.15"
|
||||
pyasn1 = ">=0.5.0"
|
||||
rsa = ">=4.0,<4.1.1 || >4.1.1,<4.4 || >4.4,<5.0"
|
||||
|
||||
[package.extras]
|
||||
cryptography = ["cryptography (>=3.4.0)"]
|
||||
pycrypto = ["pycrypto (>=2.6.0,<2.7.0)"]
|
||||
pycryptodome = ["pycryptodome (>=3.3.1,<4.0.0)"]
|
||||
test = ["pytest", "pytest-cov"]
|
||||
|
||||
[[package]]
|
||||
name = "python-json-logger"
|
||||
version = "3.3.0"
|
||||
@ -9013,6 +9539,22 @@ pygments = ">=2.13.0,<3.0.0"
|
||||
[package.extras]
|
||||
jupyter = ["ipywidgets (>=7.5.1,<9)"]
|
||||
|
||||
[[package]]
|
||||
name = "rich-rst"
|
||||
version = "1.3.1"
|
||||
description = "A beautiful reStructuredText renderer for rich"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "rich_rst-1.3.1-py3-none-any.whl", hash = "sha256:498a74e3896507ab04492d326e794c3ef76e7cda078703aa592d1853d91098c1"},
|
||||
{file = "rich_rst-1.3.1.tar.gz", hash = "sha256:fad46e3ba42785ea8c1785e2ceaa56e0ffa32dbe5410dec432f37e4107c4f383"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
docutils = "*"
|
||||
rich = ">=12.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "rouge"
|
||||
version = "1.0.1"
|
||||
@ -9433,6 +9975,21 @@ dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy (==1.10.0)", "pycodest
|
||||
doc = ["intersphinx_registry", "jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.19.1)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0,<8.0.0)", "sphinx-copybutton", "sphinx-design (>=0.4.0)"]
|
||||
test = ["Cython", "array-api-strict (>=2.0,<2.1.1)", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja ; sys_platform != \"emscripten\"", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"]
|
||||
|
||||
[[package]]
|
||||
name = "scramp"
|
||||
version = "1.4.6"
|
||||
description = "An implementation of the SCRAM protocol."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "scramp-1.4.6-py3-none-any.whl", hash = "sha256:a0cf9d2b4624b69bac5432dd69fecfc55a542384fe73c3a23ed9b138cda484e1"},
|
||||
{file = "scramp-1.4.6.tar.gz", hash = "sha256:fe055ebbebf4397b9cb323fcc4b299f219cd1b03fd673ca40c97db04ac7d107e"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
asn1crypto = ">=1.5.1"
|
||||
|
||||
[[package]]
|
||||
name = "seaborn"
|
||||
version = "0.13.2"
|
||||
@ -9695,6 +10252,102 @@ openai = ["httpx (<0.28)", "openai"]
|
||||
pocketsphinx = ["pocketsphinx"]
|
||||
whisper-local = ["openai-whisper", "soundfile"]
|
||||
|
||||
[[package]]
|
||||
name = "sqlalchemy"
|
||||
version = "2.0.43"
|
||||
description = "Database Abstraction Library"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "SQLAlchemy-2.0.43-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:21ba7a08a4253c5825d1db389d4299f64a100ef9800e4624c8bf70d8f136e6ed"},
|
||||
{file = "SQLAlchemy-2.0.43-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11b9503fa6f8721bef9b8567730f664c5a5153d25e247aadc69247c4bc605227"},
|
||||
{file = "SQLAlchemy-2.0.43-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07097c0a1886c150ef2adba2ff7437e84d40c0f7dcb44a2c2b9c905ccfc6361c"},
|
||||
{file = "SQLAlchemy-2.0.43-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:cdeff998cb294896a34e5b2f00e383e7c5c4ef3b4bfa375d9104723f15186443"},
|
||||
{file = "SQLAlchemy-2.0.43-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:bcf0724a62a5670e5718957e05c56ec2d6850267ea859f8ad2481838f889b42c"},
|
||||
{file = "SQLAlchemy-2.0.43-cp37-cp37m-win32.whl", hash = "sha256:c697575d0e2b0a5f0433f679bda22f63873821d991e95a90e9e52aae517b2e32"},
|
||||
{file = "SQLAlchemy-2.0.43-cp37-cp37m-win_amd64.whl", hash = "sha256:d34c0f6dbefd2e816e8f341d0df7d4763d382e3f452423e752ffd1e213da2512"},
|
||||
{file = "sqlalchemy-2.0.43-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:70322986c0c699dca241418fcf18e637a4369e0ec50540a2b907b184c8bca069"},
|
||||
{file = "sqlalchemy-2.0.43-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:87accdbba88f33efa7b592dc2e8b2a9c2cdbca73db2f9d5c510790428c09c154"},
|
||||
{file = "sqlalchemy-2.0.43-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c00e7845d2f692ebfc7d5e4ec1a3fd87698e4337d09e58d6749a16aedfdf8612"},
|
||||
{file = "sqlalchemy-2.0.43-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:022e436a1cb39b13756cf93b48ecce7aa95382b9cfacceb80a7d263129dfd019"},
|
||||
{file = "sqlalchemy-2.0.43-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c5e73ba0d76eefc82ec0219d2301cb33bfe5205ed7a2602523111e2e56ccbd20"},
|
||||
{file = "sqlalchemy-2.0.43-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9c2e02f06c68092b875d5cbe4824238ab93a7fa35d9c38052c033f7ca45daa18"},
|
||||
{file = "sqlalchemy-2.0.43-cp310-cp310-win32.whl", hash = "sha256:e7a903b5b45b0d9fa03ac6a331e1c1d6b7e0ab41c63b6217b3d10357b83c8b00"},
|
||||
{file = "sqlalchemy-2.0.43-cp310-cp310-win_amd64.whl", hash = "sha256:4bf0edb24c128b7be0c61cd17eef432e4bef507013292415f3fb7023f02b7d4b"},
|
||||
{file = "sqlalchemy-2.0.43-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:52d9b73b8fb3e9da34c2b31e6d99d60f5f99fd8c1225c9dad24aeb74a91e1d29"},
|
||||
{file = "sqlalchemy-2.0.43-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f42f23e152e4545157fa367b2435a1ace7571cab016ca26038867eb7df2c3631"},
|
||||
{file = "sqlalchemy-2.0.43-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fb1a8c5438e0c5ea51afe9c6564f951525795cf432bed0c028c1cb081276685"},
|
||||
{file = "sqlalchemy-2.0.43-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db691fa174e8f7036afefe3061bc40ac2b770718be2862bfb03aabae09051aca"},
|
||||
{file = "sqlalchemy-2.0.43-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fe2b3b4927d0bc03d02ad883f402d5de201dbc8894ac87d2e981e7d87430e60d"},
|
||||
{file = "sqlalchemy-2.0.43-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4d3d9b904ad4a6b175a2de0738248822f5ac410f52c2fd389ada0b5262d6a1e3"},
|
||||
{file = "sqlalchemy-2.0.43-cp311-cp311-win32.whl", hash = "sha256:5cda6b51faff2639296e276591808c1726c4a77929cfaa0f514f30a5f6156921"},
|
||||
{file = "sqlalchemy-2.0.43-cp311-cp311-win_amd64.whl", hash = "sha256:c5d1730b25d9a07727d20ad74bc1039bbbb0a6ca24e6769861c1aa5bf2c4c4a8"},
|
||||
{file = "sqlalchemy-2.0.43-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:20d81fc2736509d7a2bd33292e489b056cbae543661bb7de7ce9f1c0cd6e7f24"},
|
||||
{file = "sqlalchemy-2.0.43-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25b9fc27650ff5a2c9d490c13c14906b918b0de1f8fcbb4c992712d8caf40e83"},
|
||||
{file = "sqlalchemy-2.0.43-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6772e3ca8a43a65a37c88e2f3e2adfd511b0b1da37ef11ed78dea16aeae85bd9"},
|
||||
{file = "sqlalchemy-2.0.43-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a113da919c25f7f641ffbd07fbc9077abd4b3b75097c888ab818f962707eb48"},
|
||||
{file = "sqlalchemy-2.0.43-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4286a1139f14b7d70141c67a8ae1582fc2b69105f1b09d9573494eb4bb4b2687"},
|
||||
{file = "sqlalchemy-2.0.43-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:529064085be2f4d8a6e5fab12d36ad44f1909a18848fcfbdb59cc6d4bbe48efe"},
|
||||
{file = "sqlalchemy-2.0.43-cp312-cp312-win32.whl", hash = "sha256:b535d35dea8bbb8195e7e2b40059e2253acb2b7579b73c1b432a35363694641d"},
|
||||
{file = "sqlalchemy-2.0.43-cp312-cp312-win_amd64.whl", hash = "sha256:1c6d85327ca688dbae7e2b06d7d84cfe4f3fffa5b5f9e21bb6ce9d0e1a0e0e0a"},
|
||||
{file = "sqlalchemy-2.0.43-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e7c08f57f75a2bb62d7ee80a89686a5e5669f199235c6d1dac75cd59374091c3"},
|
||||
{file = "sqlalchemy-2.0.43-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:14111d22c29efad445cd5021a70a8b42f7d9152d8ba7f73304c4d82460946aaa"},
|
||||
{file = "sqlalchemy-2.0.43-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21b27b56eb2f82653168cefe6cb8e970cdaf4f3a6cb2c5e3c3c1cf3158968ff9"},
|
||||
{file = "sqlalchemy-2.0.43-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c5a9da957c56e43d72126a3f5845603da00e0293720b03bde0aacffcf2dc04f"},
|
||||
{file = "sqlalchemy-2.0.43-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5d79f9fdc9584ec83d1b3c75e9f4595c49017f5594fee1a2217117647225d738"},
|
||||
{file = "sqlalchemy-2.0.43-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9df7126fd9db49e3a5a3999442cc67e9ee8971f3cb9644250107d7296cb2a164"},
|
||||
{file = "sqlalchemy-2.0.43-cp313-cp313-win32.whl", hash = "sha256:7f1ac7828857fcedb0361b48b9ac4821469f7694089d15550bbcf9ab22564a1d"},
|
||||
{file = "sqlalchemy-2.0.43-cp313-cp313-win_amd64.whl", hash = "sha256:971ba928fcde01869361f504fcff3b7143b47d30de188b11c6357c0505824197"},
|
||||
{file = "sqlalchemy-2.0.43-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4e6aeb2e0932f32950cf56a8b4813cb15ff792fc0c9b3752eaf067cfe298496a"},
|
||||
{file = "sqlalchemy-2.0.43-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:61f964a05356f4bca4112e6334ed7c208174511bd56e6b8fc86dad4d024d4185"},
|
||||
{file = "sqlalchemy-2.0.43-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46293c39252f93ea0910aababa8752ad628bcce3a10d3f260648dd472256983f"},
|
||||
{file = "sqlalchemy-2.0.43-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:136063a68644eca9339d02e6693932116f6a8591ac013b0014479a1de664e40a"},
|
||||
{file = "sqlalchemy-2.0.43-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6e2bf13d9256398d037fef09fd8bf9b0bf77876e22647d10761d35593b9ac547"},
|
||||
{file = "sqlalchemy-2.0.43-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:44337823462291f17f994d64282a71c51d738fc9ef561bf265f1d0fd9116a782"},
|
||||
{file = "sqlalchemy-2.0.43-cp38-cp38-win32.whl", hash = "sha256:13194276e69bb2af56198fef7909d48fd34820de01d9c92711a5fa45497cc7ed"},
|
||||
{file = "sqlalchemy-2.0.43-cp38-cp38-win_amd64.whl", hash = "sha256:334f41fa28de9f9be4b78445e68530da3c5fa054c907176460c81494f4ae1f5e"},
|
||||
{file = "sqlalchemy-2.0.43-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ceb5c832cc30663aeaf5e39657712f4c4241ad1f638d487ef7216258f6d41fe7"},
|
||||
{file = "sqlalchemy-2.0.43-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:11f43c39b4b2ec755573952bbcc58d976779d482f6f832d7f33a8d869ae891bf"},
|
||||
{file = "sqlalchemy-2.0.43-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:413391b2239db55be14fa4223034d7e13325a1812c8396ecd4f2c08696d5ccad"},
|
||||
{file = "sqlalchemy-2.0.43-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c379e37b08c6c527181a397212346be39319fb64323741d23e46abd97a400d34"},
|
||||
{file = "sqlalchemy-2.0.43-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:03d73ab2a37d9e40dec4984d1813d7878e01dbdc742448d44a7341b7a9f408c7"},
|
||||
{file = "sqlalchemy-2.0.43-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8cee08f15d9e238ede42e9bbc1d6e7158d0ca4f176e4eab21f88ac819ae3bd7b"},
|
||||
{file = "sqlalchemy-2.0.43-cp39-cp39-win32.whl", hash = "sha256:b3edaec7e8b6dc5cd94523c6df4f294014df67097c8217a89929c99975811414"},
|
||||
{file = "sqlalchemy-2.0.43-cp39-cp39-win_amd64.whl", hash = "sha256:227119ce0a89e762ecd882dc661e0aa677a690c914e358f0dd8932a2e8b2765b"},
|
||||
{file = "sqlalchemy-2.0.43-py3-none-any.whl", hash = "sha256:1681c21dd2ccee222c2fe0bef671d1aef7c504087c9c4e800371cfcc8ac966fc"},
|
||||
{file = "sqlalchemy-2.0.43.tar.gz", hash = "sha256:788bfcef6787a7764169cfe9859fe425bf44559619e1d9f56f5bddf2ebf6f417"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
greenlet = {version = ">=1", optional = true, markers = "python_version < \"3.14\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\") or extra == \"asyncio\""}
|
||||
typing-extensions = ">=4.6.0"
|
||||
|
||||
[package.extras]
|
||||
aiomysql = ["aiomysql (>=0.2.0)", "greenlet (>=1)"]
|
||||
aioodbc = ["aioodbc", "greenlet (>=1)"]
|
||||
aiosqlite = ["aiosqlite", "greenlet (>=1)", "typing_extensions (!=3.10.0.1)"]
|
||||
asyncio = ["greenlet (>=1)"]
|
||||
asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (>=1)"]
|
||||
mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"]
|
||||
mssql = ["pyodbc"]
|
||||
mssql-pymssql = ["pymssql"]
|
||||
mssql-pyodbc = ["pyodbc"]
|
||||
mypy = ["mypy (>=0.910)"]
|
||||
mysql = ["mysqlclient (>=1.4.0)"]
|
||||
mysql-connector = ["mysql-connector-python"]
|
||||
oracle = ["cx_oracle (>=8)"]
|
||||
oracle-oracledb = ["oracledb (>=1.0.1)"]
|
||||
postgresql = ["psycopg2 (>=2.7)"]
|
||||
postgresql-asyncpg = ["asyncpg", "greenlet (>=1)"]
|
||||
postgresql-pg8000 = ["pg8000 (>=1.29.1)"]
|
||||
postgresql-psycopg = ["psycopg (>=3.0.7)"]
|
||||
postgresql-psycopg2binary = ["psycopg2-binary"]
|
||||
postgresql-psycopg2cffi = ["psycopg2cffi"]
|
||||
postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"]
|
||||
pymysql = ["pymysql"]
|
||||
sqlcipher = ["sqlcipher3_binary"]
|
||||
|
||||
[[package]]
|
||||
name = "sse-starlette"
|
||||
version = "3.0.2"
|
||||
@ -10577,6 +11230,7 @@ files = [
|
||||
{file = "typer-0.16.0-py3-none-any.whl", hash = "sha256:1f79bed11d4d02d4310e3c1b7ba594183bcedb0ac73b27a9e5f28f6fb5b98855"},
|
||||
{file = "typer-0.16.0.tar.gz", hash = "sha256:af377ffaee1dbe37ae9440cb4e8f11686ea5ce4e9bae01b84ae7c63b87f1dd3b"},
|
||||
]
|
||||
markers = {main = "extra == \"third-party-runtimes\""}
|
||||
|
||||
[package.dependencies]
|
||||
click = ">=8.0.0"
|
||||
@ -11188,14 +11842,14 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "werkzeug"
|
||||
version = "3.1.3"
|
||||
version = "3.1.1"
|
||||
description = "The comprehensive WSGI web application library."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["evaluation"]
|
||||
groups = ["main", "evaluation"]
|
||||
files = [
|
||||
{file = "werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e"},
|
||||
{file = "werkzeug-3.1.3.tar.gz", hash = "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746"},
|
||||
{file = "werkzeug-3.1.1-py3-none-any.whl", hash = "sha256:a71124d1ef06008baafa3d266c02f56e1836a5984afd6dd6c9230669d60d9fb5"},
|
||||
{file = "werkzeug-3.1.1.tar.gz", hash = "sha256:8cd39dfbdfc1e051965f156163e2974e52c210f130810e9ad36858f0fd3edad4"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@ -11929,4 +12583,4 @@ third-party-runtimes = ["daytona", "e2b-code-interpreter", "modal", "runloop-api
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = "^3.12,<3.14"
|
||||
content-hash = "21f7dfae679c6b0fd2832c2fb9a5982eeaa5e0539ed58977a863919aa6b5cd79"
|
||||
content-hash = "90ae740f15865e77791e259038940ba45652f2639159cad26f2b3292948b32e8"
|
||||
|
||||
@ -61,7 +61,7 @@ protobuf = "^5.0.0,<6.0.0" # Updated to support newer op
|
||||
opentelemetry-api = "^1.33.1"
|
||||
opentelemetry-exporter-otlp-proto-grpc = "^1.33.1"
|
||||
|
||||
libtmux = ">=0.37,<0.40"
|
||||
libtmux = ">=0.46.2"
|
||||
pygithub = "^2.5.0"
|
||||
joblib = "*"
|
||||
openhands-aci = "0.3.2"
|
||||
@ -110,6 +110,17 @@ runloop-api-client = { version = "0.50.0", optional = true }
|
||||
daytona = { version = "0.24.2", optional = true }
|
||||
httpx-aiohttp = "^0.1.8"
|
||||
e2b-code-interpreter = { version = "^2.0.0", optional = true }
|
||||
pybase62 = "^1.0.0"
|
||||
|
||||
# V1 dependencies
|
||||
openhands-agent-server = { git = "https://github.com/All-Hands-AI/agent-sdk.git", subdirectory = "openhands/agent_server", rev = "f8ca02c4a3b847bfc50b3c5e579ce126c511fefc" }
|
||||
openhands-sdk = { git = "https://github.com/All-Hands-AI/agent-sdk.git", subdirectory = "openhands/sdk", rev = "f8ca02c4a3b847bfc50b3c5e579ce126c511fefc" }
|
||||
# This refuses to install
|
||||
# openhands-tools = { git = "https://github.com/All-Hands-AI/agent-sdk.git", subdirectory = "openhands/tools", rev = "f8ca02c4a3b847bfc50b3c5e579ce126c511fefc" }
|
||||
python-jose = { version = ">=3.3", extras = [ "cryptography" ] }
|
||||
sqlalchemy = { extras = [ "asyncio" ], version = "^2.0.40" }
|
||||
pg8000 = "^1.31.5"
|
||||
asyncpg = "^0.30.0"
|
||||
|
||||
[tool.poetry.extras]
|
||||
third_party_runtimes = [ "e2b-code-interpreter", "modal", "runloop-api-client", "daytona" ]
|
||||
|
||||
8
scripts/dump_config_schema.py
Normal file
8
scripts/dump_config_schema.py
Normal file
@ -0,0 +1,8 @@
|
||||
import json
|
||||
|
||||
from openhands.app_server.config import get_global_config
|
||||
|
||||
if __name__ == '__main__':
|
||||
config = get_global_config()
|
||||
schema = config.model_json_schema()
|
||||
print(json.dumps(schema, indent=2))
|
||||
@ -39,7 +39,7 @@ def sse_mcp_docker_server():
|
||||
host_port = s.getsockname()[1]
|
||||
|
||||
container_internal_port = (
|
||||
8000 # The port the MCP server listens on *inside* the container
|
||||
8080 # The port the MCP server listens on *inside* the container
|
||||
)
|
||||
|
||||
container_command_args = [
|
||||
@ -106,14 +106,31 @@ def sse_mcp_docker_server():
|
||||
log_streamer.close()
|
||||
|
||||
|
||||
@pytest.mark.skip('This test is flaky')
|
||||
def test_default_activated_tools():
|
||||
project_root = os.path.dirname(openhands.__file__)
|
||||
mcp_config_path = os.path.join(project_root, 'runtime', 'mcp', 'config.json')
|
||||
assert os.path.exists(mcp_config_path), (
|
||||
f'MCP config file not found at {mcp_config_path}'
|
||||
)
|
||||
with open(mcp_config_path, 'r') as f:
|
||||
mcp_config = json.load(f)
|
||||
import importlib.resources
|
||||
|
||||
# Use importlib.resources to access the config file properly
|
||||
# This works both when running from source and from installed package
|
||||
try:
|
||||
with importlib.resources.as_file(
|
||||
importlib.resources.files('openhands').joinpath(
|
||||
'runtime', 'mcp', 'config.json'
|
||||
)
|
||||
) as config_path:
|
||||
assert config_path.exists(), f'MCP config file not found at {config_path}'
|
||||
with open(config_path, 'r') as f:
|
||||
mcp_config = json.load(f)
|
||||
except (FileNotFoundError, ImportError):
|
||||
# Fallback to the old method for development environments
|
||||
project_root = os.path.dirname(openhands.__file__)
|
||||
mcp_config_path = os.path.join(project_root, 'runtime', 'mcp', 'config.json')
|
||||
assert os.path.exists(mcp_config_path), (
|
||||
f'MCP config file not found at {mcp_config_path}'
|
||||
)
|
||||
with open(mcp_config_path, 'r') as f:
|
||||
mcp_config = json.load(f)
|
||||
|
||||
assert 'mcpServers' in mcp_config
|
||||
assert 'default' in mcp_config['mcpServers']
|
||||
assert 'tools' in mcp_config
|
||||
@ -121,6 +138,7 @@ def test_default_activated_tools():
|
||||
assert len(mcp_config['tools']) == 0
|
||||
|
||||
|
||||
@pytest.mark.skip('This test is flaky')
|
||||
@pytest.mark.asyncio
|
||||
async def test_fetch_mcp_via_stdio(temp_dir, runtime_cls, run_as_openhands):
|
||||
mcp_stdio_server_config = MCPStdioServerConfig(
|
||||
@ -136,7 +154,7 @@ async def test_fetch_mcp_via_stdio(temp_dir, runtime_cls, run_as_openhands):
|
||||
)
|
||||
|
||||
# Test browser server
|
||||
action_cmd = CmdRunAction(command='python3 -m http.server 8000 > server.log 2>&1 &')
|
||||
action_cmd = CmdRunAction(command='python3 -m http.server 8080 > server.log 2>&1 &')
|
||||
logger.info(action_cmd, extra={'msg_type': 'ACTION'})
|
||||
obs = runtime.run_action(action_cmd)
|
||||
logger.info(obs, extra={'msg_type': 'OBSERVATION'})
|
||||
@ -151,7 +169,7 @@ async def test_fetch_mcp_via_stdio(temp_dir, runtime_cls, run_as_openhands):
|
||||
logger.info(obs, extra={'msg_type': 'OBSERVATION'})
|
||||
assert obs.exit_code == 0
|
||||
|
||||
mcp_action = MCPAction(name='fetch', arguments={'url': 'http://localhost:8000'})
|
||||
mcp_action = MCPAction(name='fetch', arguments={'url': 'http://localhost:8080'})
|
||||
obs = await runtime.call_tool_mcp(mcp_action)
|
||||
logger.info(obs, extra={'msg_type': 'OBSERVATION'})
|
||||
assert isinstance(obs, MCPObservation), (
|
||||
@ -164,12 +182,13 @@ async def test_fetch_mcp_via_stdio(temp_dir, runtime_cls, run_as_openhands):
|
||||
assert result_json['content'][0]['type'] == 'text'
|
||||
assert (
|
||||
result_json['content'][0]['text']
|
||||
== 'Contents of http://localhost:8000/:\n---\n\n* <.downloads/>\n* <server.log>\n\n---'
|
||||
== 'Contents of http://localhost:8080/:\n---\n\n* <.downloads/>\n* <server.log>\n\n---'
|
||||
)
|
||||
|
||||
runtime.close()
|
||||
|
||||
|
||||
@pytest.mark.skip('This test is flaky')
|
||||
@pytest.mark.asyncio
|
||||
async def test_filesystem_mcp_via_sse(
|
||||
temp_dir, runtime_cls, run_as_openhands, sse_mcp_docker_server
|
||||
@ -201,6 +220,7 @@ async def test_filesystem_mcp_via_sse(
|
||||
# Container and log_streamer cleanup is handled by the sse_mcp_docker_server fixture
|
||||
|
||||
|
||||
@pytest.mark.skip('This test is flaky')
|
||||
@pytest.mark.asyncio
|
||||
async def test_both_stdio_and_sse_mcp(
|
||||
temp_dir, runtime_cls, run_as_openhands, sse_mcp_docker_server
|
||||
@ -239,7 +259,7 @@ async def test_both_stdio_and_sse_mcp(
|
||||
# ======= Test stdio server =======
|
||||
# Test browser server
|
||||
action_cmd_http = CmdRunAction(
|
||||
command='python3 -m http.server 8000 > server.log 2>&1 &'
|
||||
command='python3 -m http.server 8080 > server.log 2>&1 &'
|
||||
)
|
||||
logger.info(action_cmd_http, extra={'msg_type': 'ACTION'})
|
||||
obs_http = runtime.run_action(action_cmd_http)
|
||||
@ -260,7 +280,7 @@ async def test_both_stdio_and_sse_mcp(
|
||||
# And FastMCP Proxy will pre-pend the server name (in this case, `fetch`)
|
||||
# to the tool name, so the full tool name becomes `fetch_fetch`
|
||||
name='fetch',
|
||||
arguments={'url': 'http://localhost:8000'},
|
||||
arguments={'url': 'http://localhost:8080'},
|
||||
)
|
||||
obs_fetch = await runtime.call_tool_mcp(mcp_action_fetch)
|
||||
logger.info(obs_fetch, extra={'msg_type': 'OBSERVATION'})
|
||||
@ -274,7 +294,7 @@ async def test_both_stdio_and_sse_mcp(
|
||||
assert result_json['content'][0]['type'] == 'text'
|
||||
assert (
|
||||
result_json['content'][0]['text']
|
||||
== 'Contents of http://localhost:8000/:\n---\n\n* <.downloads/>\n* <server.log>\n\n---'
|
||||
== 'Contents of http://localhost:8080/:\n---\n\n* <.downloads/>\n* <server.log>\n\n---'
|
||||
)
|
||||
finally:
|
||||
if runtime:
|
||||
@ -282,6 +302,7 @@ async def test_both_stdio_and_sse_mcp(
|
||||
# SSE Docker container cleanup is handled by the sse_mcp_docker_server fixture
|
||||
|
||||
|
||||
@pytest.mark.skip('This test is flaky')
|
||||
@pytest.mark.asyncio
|
||||
async def test_microagent_and_one_stdio_mcp_in_config(
|
||||
temp_dir, runtime_cls, run_as_openhands
|
||||
@ -329,7 +350,7 @@ async def test_microagent_and_one_stdio_mcp_in_config(
|
||||
# ======= Test the stdio server added by the microagent =======
|
||||
# Test browser server
|
||||
action_cmd_http = CmdRunAction(
|
||||
command='python3 -m http.server 8000 > server.log 2>&1 &'
|
||||
command='python3 -m http.server 8080 > server.log 2>&1 &'
|
||||
)
|
||||
logger.info(action_cmd_http, extra={'msg_type': 'ACTION'})
|
||||
obs_http = runtime.run_action(action_cmd_http)
|
||||
@ -346,7 +367,7 @@ async def test_microagent_and_one_stdio_mcp_in_config(
|
||||
assert obs_cat.exit_code == 0
|
||||
|
||||
mcp_action_fetch = MCPAction(
|
||||
name='fetch_fetch', arguments={'url': 'http://localhost:8000'}
|
||||
name='fetch_fetch', arguments={'url': 'http://localhost:8080'}
|
||||
)
|
||||
obs_fetch = await runtime.call_tool_mcp(mcp_action_fetch)
|
||||
logger.info(obs_fetch, extra={'msg_type': 'OBSERVATION'})
|
||||
@ -360,7 +381,7 @@ async def test_microagent_and_one_stdio_mcp_in_config(
|
||||
assert result_json['content'][0]['type'] == 'text'
|
||||
assert (
|
||||
result_json['content'][0]['text']
|
||||
== 'Contents of http://localhost:8000/:\n---\n\n* <.downloads/>\n* <server.log>\n\n---'
|
||||
== 'Contents of http://localhost:8080/:\n---\n\n* <.downloads/>\n* <server.log>\n\n---'
|
||||
)
|
||||
finally:
|
||||
if runtime:
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user