Merge branch 'main' into multi-swe-bench-dependency

This commit is contained in:
Graham Neubig 2025-10-16 07:53:17 -04:00 committed by GitHub
commit dfe4bd4941
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
173 changed files with 14328 additions and 428 deletions

View File

@ -21,11 +21,19 @@ concurrency:
cancel-in-progress: true
jobs:
build-and-test-binary:
name: Build and test binary executable
build-binary:
name: Build binary executable
strategy:
matrix:
os: [ubuntu-latest, macos-latest]
include:
# Build on Ubuntu 22.04 for maximum GLIBC compatibility (GLIBC 2.31)
- os: ubuntu-22.04
platform: linux
artifact_name: openhands-cli-linux
# Build on macOS for macOS users
- os: macos-15
platform: macos
artifact_name: openhands-cli-macos
runs-on: ${{ matrix.os }}
steps:
@ -63,18 +71,17 @@ jobs:
echo "✅ Build & test finished without ❌ markers"
- name: Upload binary artifact (for releases only)
if: startsWith(github.ref, 'refs/tags/')
- name: Upload binary artifact
uses: actions/upload-artifact@v4
with:
name: openhands-cli-${{ matrix.os }}
name: ${{ matrix.artifact_name }}
path: openhands-cli/dist/openhands*
retention-days: 30
create-github-release:
name: Create GitHub Release
runs-on: ubuntu-latest
needs: build-and-test-binary
needs: build-binary
if: startsWith(github.ref, 'refs/tags/')
steps:
- name: Checkout repository
@ -88,12 +95,12 @@ jobs:
- name: Prepare release assets
run: |
mkdir -p release-assets
# Rename binaries to include OS in filename
if [ -f artifacts/openhands-cli-ubuntu-latest/openhands ]; then
cp artifacts/openhands-cli-ubuntu-latest/openhands release-assets/openhands-linux
# Copy binaries with appropriate names for release
if [ -f artifacts/openhands-cli-linux/openhands ]; then
cp artifacts/openhands-cli-linux/openhands release-assets/openhands-linux
fi
if [ -f artifacts/openhands-cli-macos-latest/openhands ]; then
cp artifacts/openhands-cli-macos-latest/openhands release-assets/openhands-macos
if [ -f artifacts/openhands-cli-macos/openhands ]; then
cp artifacts/openhands-cli-macos/openhands release-assets/openhands-macos
fi
ls -la release-assets/

View File

@ -126,7 +126,7 @@ jobs:
- name: Install Python dependencies using Poetry
run: make install-python-dependencies POETRY_GROUP=main INSTALL_PLAYWRIGHT=0
- name: Create source distribution and Dockerfile
run: poetry run python3 openhands/runtime/utils/runtime_build.py --base_image ${{ matrix.base_image.image }} --build_folder containers/runtime --force_rebuild
run: poetry run python3 -m openhands.runtime.utils.runtime_build --base_image ${{ matrix.base_image.image }} --build_folder containers/runtime --force_rebuild
- name: Lowercase Repository Owner
run: |
echo REPO_OWNER=$(echo ${{ github.repository_owner }} | tr '[:upper:]' '[:lower:]') >> $GITHUB_ENV

View File

@ -71,7 +71,7 @@ jobs:
run: pip install pre-commit==4.2.0
- name: Run pre-commit hooks
working-directory: ./enterprise
run: pre-commit run --all-files --config ./dev_config/python/.pre-commit-config.yaml
run: pre-commit run --all-files --show-diff-on-failure --config ./dev_config/python/.pre-commit-config.yaml
lint-cli-python:
name: Lint CLI python

View File

@ -1,7 +1,7 @@
<a name="readme-top"></a>
<div align="center">
<img src="./docs/static/img/logo.png" alt="Logo" width="200">
<img src="https://raw.githubusercontent.com/All-Hands-AI/docs/main/openhands/static/img/logo.png" alt="Logo" width="200">
<h1 align="center">OpenHands: Code Less, Make More</h1>
</div>
@ -38,6 +38,12 @@ call APIs, and yes—even copy code snippets from StackOverflow.
Learn more at [docs.all-hands.dev](https://docs.all-hands.dev), or [sign up for OpenHands Cloud](https://app.all-hands.dev) to get started.
> [!IMPORTANT]
> **Upcoming change**: We are renaming our GitHub Org from `All-Hands-AI` to `OpenHands` on October 20th, 2025.
> Check the [tracking issue](https://github.com/All-Hands-AI/OpenHands/issues/11376) for more information.
> [!IMPORTANT]
> Using OpenHands for work? We'd love to chat! Fill out
> [this short form](https://docs.google.com/forms/d/e/1FAIpQLSet3VbGaz8z32gW9Wm-Grl4jpt5WgMXPgJ4EDPVmCETCBpJtQ/viewform)

View File

@ -6,7 +6,7 @@ that depends on the `base_image` **AND** a [Python source distribution](https://
The following command will generate a `Dockerfile` file for `nikolaik/python-nodejs:python3.12-nodejs22` (the default base image), an updated `config.sh` and the runtime source distribution files/folders into `containers/runtime`:
```bash
poetry run python3 openhands/runtime/utils/runtime_build.py \
poetry run python3 -m openhands.runtime.utils.runtime_build \
--base_image nikolaik/python-nodejs:python3.12-nodejs22 \
--build_folder containers/runtime
```

View File

@ -0,0 +1,259 @@
"""Sync DB with Models
Revision ID: 076
Revises: 075
Create Date: 2025-10-05 11:28:41.772294
"""
from typing import Sequence, Union
import sqlalchemy as sa
from alembic import op
from openhands.app_server.app_conversation.app_conversation_models import (
AppConversationStartTaskStatus,
)
from openhands.app_server.event_callback.event_callback_result_models import (
EventCallbackResultStatus,
)
# revision identifiers, used by Alembic.
revision: str = '076'
down_revision: Union[str, Sequence[str], None] = '075'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
op.add_column(
'conversation_metadata',
sa.Column('max_budget_per_task', sa.Float(), nullable=True),
)
op.add_column(
'conversation_metadata',
sa.Column('cache_read_tokens', sa.Integer(), server_default='0'),
)
op.add_column(
'conversation_metadata',
sa.Column('cache_write_tokens', sa.Integer(), server_default='0'),
)
op.add_column(
'conversation_metadata',
sa.Column('reasoning_tokens', sa.Integer(), server_default='0'),
)
op.add_column(
'conversation_metadata',
sa.Column('context_window', sa.Integer(), server_default='0'),
)
op.add_column(
'conversation_metadata',
sa.Column('per_turn_token', sa.Integer(), server_default='0'),
)
op.add_column(
'conversation_metadata',
sa.Column(
'conversation_version', sa.String(), nullable=False, server_default='V0'
),
)
op.create_index(
op.f('ix_conversation_metadata_conversation_version'),
'conversation_metadata',
['conversation_version'],
unique=False,
)
op.add_column('conversation_metadata', sa.Column('sandbox_id', sa.String()))
op.create_index(
op.f('ix_conversation_metadata_sandbox_id'),
'conversation_metadata',
['sandbox_id'],
unique=False,
)
op.create_table(
'app_conversation_start_task',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('created_by_user_id', sa.String(), nullable=True),
sa.Column('status', sa.Enum(AppConversationStartTaskStatus), nullable=True),
sa.Column('detail', sa.String(), nullable=True),
sa.Column('app_conversation_id', sa.UUID(), nullable=True),
sa.Column('sandbox_id', sa.String(), nullable=True),
sa.Column('agent_server_url', sa.String(), nullable=True),
sa.Column('request', sa.JSON(), nullable=True),
sa.Column(
'created_at',
sa.DateTime(timezone=True),
server_default=sa.text('(CURRENT_TIMESTAMP)'),
nullable=True,
),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id'),
)
op.create_index(
op.f('ix_app_conversation_start_task_created_at'),
'app_conversation_start_task',
['created_at'],
unique=False,
)
op.create_index(
op.f('ix_app_conversation_start_task_created_by_user_id'),
'app_conversation_start_task',
['created_by_user_id'],
unique=False,
)
op.create_index(
op.f('ix_app_conversation_start_task_updated_at'),
'app_conversation_start_task',
['updated_at'],
unique=False,
)
op.create_table(
'event_callback',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('conversation_id', sa.UUID(), nullable=True),
sa.Column('processor', sa.JSON(), nullable=True),
sa.Column('event_kind', sa.String(), nullable=True),
sa.Column(
'created_at',
sa.DateTime(timezone=True),
server_default=sa.text('(CURRENT_TIMESTAMP)'),
nullable=True,
),
sa.PrimaryKeyConstraint('id'),
)
op.create_index(
op.f('ix_event_callback_created_at'),
'event_callback',
['created_at'],
unique=False,
)
op.create_table(
'event_callback_result',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('status', sa.Enum(EventCallbackResultStatus), nullable=True),
sa.Column('event_callback_id', sa.UUID(), nullable=True),
sa.Column('event_id', sa.UUID(), nullable=True),
sa.Column('conversation_id', sa.UUID(), nullable=True),
sa.Column('detail', sa.String(), nullable=True),
sa.Column(
'created_at',
sa.DateTime(timezone=True),
server_default=sa.text('(CURRENT_TIMESTAMP)'),
nullable=True,
),
sa.PrimaryKeyConstraint('id'),
)
op.create_index(
op.f('ix_event_callback_result_conversation_id'),
'event_callback_result',
['conversation_id'],
unique=False,
)
op.create_index(
op.f('ix_event_callback_result_created_at'),
'event_callback_result',
['created_at'],
unique=False,
)
op.create_index(
op.f('ix_event_callback_result_event_callback_id'),
'event_callback_result',
['event_callback_id'],
unique=False,
)
op.create_index(
op.f('ix_event_callback_result_event_id'),
'event_callback_result',
['event_id'],
unique=False,
)
op.create_table(
'v1_remote_sandbox',
sa.Column('id', sa.String(), nullable=False),
sa.Column('created_by_user_id', sa.String(), nullable=True),
sa.Column('sandbox_spec_id', sa.String(), nullable=True),
sa.Column(
'created_at',
sa.DateTime(timezone=True),
server_default=sa.text('(CURRENT_TIMESTAMP)'),
nullable=True,
),
sa.PrimaryKeyConstraint('id'),
)
op.create_index(
op.f('ix_v1_remote_sandbox_created_at'),
'v1_remote_sandbox',
['created_at'],
unique=False,
)
op.create_index(
op.f('ix_v1_remote_sandbox_created_by_user_id'),
'v1_remote_sandbox',
['created_by_user_id'],
unique=False,
)
op.create_index(
op.f('ix_v1_remote_sandbox_sandbox_spec_id'),
'v1_remote_sandbox',
['sandbox_spec_id'],
unique=False,
)
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(
op.f('ix_v1_remote_sandbox_sandbox_spec_id'), table_name='v1_remote_sandbox'
)
op.drop_index(
op.f('ix_v1_remote_sandbox_created_by_user_id'), table_name='v1_remote_sandbox'
)
op.drop_index(
op.f('ix_v1_remote_sandbox_created_at'), table_name='v1_remote_sandbox'
)
op.drop_table('v1_remote_sandbox')
op.drop_index(
op.f('ix_event_callback_result_event_id'),
table_name='event_callback_result',
)
op.drop_index(
op.f('ix_event_callback_result_event_callback_id'),
table_name='event_callback_result',
)
op.drop_index(
op.f('ix_event_callback_result_created_at'),
table_name='event_callback_result',
)
op.drop_index(
op.f('ix_event_callback_result_conversation_id'),
table_name='event_callback_result',
)
op.drop_table('event_callback_result')
op.drop_index(op.f('ix_event_callback_created_at'), table_name='event_callback')
op.drop_table('event_callback')
op.drop_index(
op.f('ix_app_conversation_start_task_updated_at'),
table_name='app_conversation_start_task',
)
op.drop_index(
op.f('ix_app_conversation_start_task_created_by_user_id'),
table_name='app_conversation_start_task',
)
op.drop_index(
op.f('ix_app_conversation_start_task_created_at'),
table_name='app_conversation_start_task',
)
op.drop_table('app_conversation_start_task')
op.drop_column('conversation_metadata', 'sandbox_id')
op.drop_column('conversation_metadata', 'conversation_version')
op.drop_column('conversation_metadata', 'per_turn_token')
op.drop_column('conversation_metadata', 'context_window')
op.drop_column('conversation_metadata', 'reasoning_tokens')
op.drop_column('conversation_metadata', 'cache_write_tokens')
op.drop_column('conversation_metadata', 'cache_read_tokens')
op.drop_column('conversation_metadata', 'max_budget_per_task')
op.execute('DROP TYPE appconversationstarttaskstatus')
op.execute('DROP TYPE eventcallbackresultstatus')
# ### end Alembic commands ###

280
enterprise/poetry.lock generated
View File

@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand.
# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand.
[[package]]
name = "aiofiles"
@ -148,6 +148,25 @@ files = [
frozenlist = ">=1.1.0"
typing-extensions = {version = ">=4.2", markers = "python_version < \"3.13\""}
[[package]]
name = "aiosqlite"
version = "0.21.0"
description = "asyncio bridge to the standard sqlite3 module"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "aiosqlite-0.21.0-py3-none-any.whl", hash = "sha256:2549cf4057f95f53dcba16f2b64e8e2791d7e1adedb13197dd8ed77bb226d7d0"},
{file = "aiosqlite-0.21.0.tar.gz", hash = "sha256:131bb8056daa3bc875608c631c678cda73922a2d4ba8aec373b19f18c17e7aa3"},
]
[package.dependencies]
typing_extensions = ">=4.0"
[package.extras]
dev = ["attribution (==1.7.1)", "black (==24.3.0)", "build (>=1.2)", "coverage[toml] (==7.6.10)", "flake8 (==7.0.0)", "flake8-bugbear (==24.12.12)", "flit (==3.10.1)", "mypy (==1.14.1)", "ufmt (==2.5.1)", "usort (==1.0.8.post1)"]
docs = ["sphinx (==8.1.3)", "sphinx-mdinclude (==0.6.1)"]
[[package]]
name = "alembic"
version = "1.16.5"
@ -1061,7 +1080,7 @@ files = [
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
]
markers = {main = "platform_system == \"Windows\" or sys_platform == \"win32\" or os_name == \"nt\"", dev = "os_name == \"nt\"", test = "platform_system == \"Windows\" or sys_platform == \"win32\""}
markers = {main = "platform_system == \"Windows\" or os_name == \"nt\" or sys_platform == \"win32\"", dev = "os_name == \"nt\"", test = "platform_system == \"Windows\" or sys_platform == \"win32\""}
[[package]]
name = "comm"
@ -1797,6 +1816,25 @@ files = [
{file = "durationpy-0.10.tar.gz", hash = "sha256:1fa6893409a6e739c9c72334fc65cca1f355dbdd93405d30f726deb5bde42fba"},
]
[[package]]
name = "ecdsa"
version = "0.19.1"
description = "ECDSA cryptographic signature library (pure python)"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.6"
groups = ["main"]
files = [
{file = "ecdsa-0.19.1-py2.py3-none-any.whl", hash = "sha256:30638e27cf77b7e15c4c4cc1973720149e1033827cfd00661ca5c8cc0cdb24c3"},
{file = "ecdsa-0.19.1.tar.gz", hash = "sha256:478cba7b62555866fcb3bb3fe985e06decbdb68ef55713c4e5ab98c57d508e61"},
]
[package.dependencies]
six = ">=1.9.0"
[package.extras]
gmpy = ["gmpy"]
gmpy2 = ["gmpy2"]
[[package]]
name = "email-validator"
version = "2.3.0"
@ -1968,38 +2006,79 @@ websockets = ["websockets (>=15.0.1)"]
[[package]]
name = "fastuuid"
version = "0.12.0"
version = "0.13.5"
description = "Python bindings to Rust's UUID library."
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "fastuuid-0.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:22a900ef0956aacf862b460e20541fdae2d7c340594fe1bd6fdcb10d5f0791a9"},
{file = "fastuuid-0.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0302f5acf54dc75de30103025c5a95db06d6c2be36829043a0aa16fc170076bc"},
{file = "fastuuid-0.12.0-cp310-cp310-manylinux_2_34_x86_64.whl", hash = "sha256:7946b4a310cfc2d597dcba658019d72a2851612a2cebb949d809c0e2474cf0a6"},
{file = "fastuuid-0.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:a1b6764dd42bf0c46c858fb5ade7b7a3d93b7a27485a7a5c184909026694cd88"},
{file = "fastuuid-0.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2bced35269315d16fe0c41003f8c9d63f2ee16a59295d90922cad5e6a67d0418"},
{file = "fastuuid-0.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82106e4b0a24f4f2f73c88f89dadbc1533bb808900740ca5db9bbb17d3b0c824"},
{file = "fastuuid-0.12.0-cp311-cp311-manylinux_2_34_x86_64.whl", hash = "sha256:4db1bc7b8caa1d7412e1bea29b016d23a8d219131cff825b933eb3428f044dca"},
{file = "fastuuid-0.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:07afc8e674e67ac3d35a608c68f6809da5fab470fb4ef4469094fdb32ba36c51"},
{file = "fastuuid-0.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:328694a573fe9dce556b0b70c9d03776786801e028d82f0b6d9db1cb0521b4d1"},
{file = "fastuuid-0.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02acaea2c955bb2035a7d8e7b3fba8bd623b03746ae278e5fa932ef54c702f9f"},
{file = "fastuuid-0.12.0-cp312-cp312-manylinux_2_34_x86_64.whl", hash = "sha256:ed9f449cba8cf16cced252521aee06e633d50ec48c807683f21cc1d89e193eb0"},
{file = "fastuuid-0.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:0df2ea4c9db96fd8f4fa38d0e88e309b3e56f8fd03675a2f6958a5b082a0c1e4"},
{file = "fastuuid-0.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7fe2407316a04ee8f06d3dbc7eae396d0a86591d92bafe2ca32fce23b1145786"},
{file = "fastuuid-0.12.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9b31dd488d0778c36f8279b306dc92a42f16904cba54acca71e107d65b60b0c"},
{file = "fastuuid-0.12.0-cp313-cp313-manylinux_2_34_x86_64.whl", hash = "sha256:b19361ee649365eefc717ec08005972d3d1eb9ee39908022d98e3bfa9da59e37"},
{file = "fastuuid-0.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:8fc66b11423e6f3e1937385f655bedd67aebe56a3dcec0cb835351cfe7d358c9"},
{file = "fastuuid-0.12.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:2925f67b88d47cb16aa3eb1ab20fdcf21b94d74490e0818c91ea41434b987493"},
{file = "fastuuid-0.12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7b15c54d300279ab20a9cc0579ada9c9f80d1bc92997fc61fb7bf3103d7cb26b"},
{file = "fastuuid-0.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:458f1bc3ebbd76fdb89ad83e6b81ccd3b2a99fa6707cd3650b27606745cfb170"},
{file = "fastuuid-0.12.0-cp38-cp38-manylinux_2_34_x86_64.whl", hash = "sha256:a8f0f83fbba6dc44271a11b22e15838641b8c45612cdf541b4822a5930f6893c"},
{file = "fastuuid-0.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:7cfd2092253d3441f6a8c66feff3c3c009da25a5b3da82bc73737558543632be"},
{file = "fastuuid-0.12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9303617e887429c193d036d47d0b32b774ed3618431123e9106f610d601eb57e"},
{file = "fastuuid-0.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8790221325b376e1122e95f865753ebf456a9fb8faf0dca4f9bf7a3ff620e413"},
{file = "fastuuid-0.12.0-cp39-cp39-manylinux_2_34_x86_64.whl", hash = "sha256:e4b12d3e23515e29773fa61644daa660ceb7725e05397a986c2109f512579a48"},
{file = "fastuuid-0.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:e41656457c34b5dcb784729537ea64c7d9bbaf7047b480c6c6a64c53379f455a"},
{file = "fastuuid-0.12.0.tar.gz", hash = "sha256:d0bd4e5b35aad2826403f4411937c89e7c88857b1513fe10f696544c03e9bd8e"},
{file = "fastuuid-0.13.5-cp310-cp310-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:b9edf8ee30718aee787cdd2e9e1ff3d4a3ec6ddb32fba0a23fa04956df69ab07"},
{file = "fastuuid-0.13.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:f67ea1e25c5e782f7fb5aaa5208f157d950401dd9321ce56bcc6d4dc3d72ed60"},
{file = "fastuuid-0.13.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9ff3fc87e1f19603dd53c38f42c2ea8d5d5462554deab69e9cf1800574e4756c"},
{file = "fastuuid-0.13.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6e5337fa7698dc52bc724da7e9239e93c5b24a09f6904b8660dfb8c41ce3dee"},
{file = "fastuuid-0.13.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9db596023c10dabb12489a88c51b75297c3a2478cb2be645e06905934e7b9fc"},
{file = "fastuuid-0.13.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:191ff6192fe53c5fc9d4d241ee1156b30a7ed6f1677b1cc2423e7ecdbc26222b"},
{file = "fastuuid-0.13.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:348ce9f296dda701ba46d8dceeff309f90dbc75dd85080bbed2b299aa908890a"},
{file = "fastuuid-0.13.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:46954fb644995d7fc8bbd710fbd4c65cedaa48c921c86fdbafef0229168a8c96"},
{file = "fastuuid-0.13.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22da0f66041e1c10c7d465b495cc6cd8e17e080dda34b4bd5ff5240b860fbb82"},
{file = "fastuuid-0.13.5-cp310-cp310-win32.whl", hash = "sha256:3e6b548f06c1ed7bad951a17a09eef69d6f24eb2b874cb4833e26b886d82990f"},
{file = "fastuuid-0.13.5-cp310-cp310-win_amd64.whl", hash = "sha256:c82838e52189d16b1307631179cb2cd37778dd8f4ddc00e9ce3c26f920b3b2f7"},
{file = "fastuuid-0.13.5-cp311-cp311-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:c122558ca4b5487e2bd0863467e4ccfe636afd1274803741487d48f2e32ea0e1"},
{file = "fastuuid-0.13.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d7abd42a03a17a681abddd19aa4d44ca2747138cf8a48373b395cf1341a10de2"},
{file = "fastuuid-0.13.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2705cf7c2d6f7c03053404b75a4c44f872a73f6f9d5ea34f1dc6bba400c4a97c"},
{file = "fastuuid-0.13.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d220a056fcbad25932c1f25304261198612f271f4d150b2a84e81adb877daf7"},
{file = "fastuuid-0.13.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f29f93b5a0c5f5579f97f77d5319e9bfefd61d8678ec59d850201544faf33bf"},
{file = "fastuuid-0.13.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:399d86623fb806151b1feb9fdd818ebfc1d50387199a35f7264f98dfc1540af5"},
{file = "fastuuid-0.13.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:689e8795a1edd573b2c9a455024e4edf605a9690339bba29709857f7180894ea"},
{file = "fastuuid-0.13.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:25e82c4a1734da168b36f7308e397afbe9c9b353799a9c69563a605f11dd4641"},
{file = "fastuuid-0.13.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f62299e3cca69aad6a6fb37e26e45055587954d498ad98903fea24382377ea0e"},
{file = "fastuuid-0.13.5-cp311-cp311-win32.whl", hash = "sha256:68227f2230381b89fb1ad362ca6e433de85c6c11c36312b41757cad47b8a8e32"},
{file = "fastuuid-0.13.5-cp311-cp311-win_amd64.whl", hash = "sha256:4a32306982bd031cb20d5d1a726b7b958a55babebd2300ce6c8e352d3496e931"},
{file = "fastuuid-0.13.5-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:35fe8045e866bc6846f8de6fa05acb1de0c32478048484a995e96d31e21dff2a"},
{file = "fastuuid-0.13.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:02a460333f52d731a006d18a52ef6fcb2d295a1f5b1a5938d30744191b2f77b7"},
{file = "fastuuid-0.13.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:74b0e4f8c307b9f477a5d7284db4431ce53a3c1e3f4173db7a97db18564a6202"},
{file = "fastuuid-0.13.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6955a99ef455c2986f3851f4e0ccc35dec56ac1a7720f2b92e88a75d6684512e"},
{file = "fastuuid-0.13.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f10c77b826738c1a27dcdaa92ea4dc1ec9d869748a99e1fde54f1379553d4854"},
{file = "fastuuid-0.13.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bb25dccbeb249d16d5e664f65f17ebec05136821d5ef462c4110e3f76b86fb86"},
{file = "fastuuid-0.13.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a5becc646a3eeafb76ce0a6783ba190cd182e3790a8b2c78ca9db2b5e87af952"},
{file = "fastuuid-0.13.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:69b34363752d06e9bb0dbdf02ae391ec56ac948c6f2eb00be90dad68e80774b9"},
{file = "fastuuid-0.13.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57d0768afcad0eab8770c9b8cf904716bd3c547e8b9a4e755ee8a673b060a3a3"},
{file = "fastuuid-0.13.5-cp312-cp312-win32.whl", hash = "sha256:8ac6c6f5129d52eaa6ef9ea4b6e2f7c69468a053f3ab8e439661186b9c06bb85"},
{file = "fastuuid-0.13.5-cp312-cp312-win_amd64.whl", hash = "sha256:ad630e97715beefef07ec37c9c162336e500400774e2c1cbe1a0df6f80d15b9a"},
{file = "fastuuid-0.13.5-cp313-cp313-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:ea17dfd35e0e91920a35d91e65e5f9c9d1985db55ac4ff2f1667a0f61189cefa"},
{file = "fastuuid-0.13.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:be6ad91e5fefbcc2a4b478858a2715e386d405834ea3ae337c3b6b95cc0e47d6"},
{file = "fastuuid-0.13.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ea6df13a306aab3e0439d58c312ff1e6f4f07f09f667579679239b4a6121f64a"},
{file = "fastuuid-0.13.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2354c1996d3cf12dc2ba3752e2c4d6edc46e1a38c63893146777b1939f3062d4"},
{file = "fastuuid-0.13.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6cf9b7469fc26d1f9b1c43ac4b192e219e85b88fdf81d71aa755a6c08c8a817"},
{file = "fastuuid-0.13.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:92ba539170097b9047551375f1ca09d8d2b4aefcc79eeae3e1c43fe49b42072e"},
{file = "fastuuid-0.13.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:dbb81d05617bc2970765c1ad82db7e8716f6a2b7a361a14b83de5b9240ade448"},
{file = "fastuuid-0.13.5-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:d973bd6bf9d754d3cca874714ac0a6b22a47f239fb3d3c8687569db05aac3471"},
{file = "fastuuid-0.13.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e725ceef79486423f05ee657634d4b4c1ca5fb2c8a94e0708f5d6356a83f2a83"},
{file = "fastuuid-0.13.5-cp313-cp313-win32.whl", hash = "sha256:a1c430a332ead0b2674f1ef71b17f43b8139ec5a4201182766a21f131a31e021"},
{file = "fastuuid-0.13.5-cp313-cp313-win_amd64.whl", hash = "sha256:241fdd362fd96e6b337db62a65dd7cb3dfac20adf854573247a47510e192db6f"},
{file = "fastuuid-0.13.5-cp38-cp38-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:e353c0a0d978a5ecd97171ac4fb7f55a6bd6cbae90f1ec4e828e5317f11b995e"},
{file = "fastuuid-0.13.5-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:904ac3eb37f4742e23f6a51be0d0451d1d3aceb50df8dac7afc6bf5209793650"},
{file = "fastuuid-0.13.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6b070e0dc1965d53b9e07c291537095ececf7d7e36e60aed9b22400fa6c5c7f"},
{file = "fastuuid-0.13.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0225f8bc78071a191cb458b3b0e23b04a7f03013575b8a3083da2a84c450e200"},
{file = "fastuuid-0.13.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3f3f8f10b962cf2e11d3affc0cf2697ac5c9accc0d282dce981ed555a44ce15"},
{file = "fastuuid-0.13.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1ae87968614fe6d3029a8198671b5893341aac9459289e93d201027be9ea7e8"},
{file = "fastuuid-0.13.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6ca01f5e614530a1a858bf185dd5556805a4c11b6eba0a2536890b68ed954922"},
{file = "fastuuid-0.13.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6d3b6b10d78b9f7056445ac377612443980349da7221a3dd3e3f382f7c437be3"},
{file = "fastuuid-0.13.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9ae95e4dcf94775d948ebb843f4443d33cd224bb31174030e106ee3cab66527c"},
{file = "fastuuid-0.13.5-cp38-cp38-win32.whl", hash = "sha256:5d753bc9ba8de6dd9caa8bbac045578c2fbe1c6ae40c2026b614676776fbe9dc"},
{file = "fastuuid-0.13.5-cp38-cp38-win_amd64.whl", hash = "sha256:f9530f1328b05b80c6fa111e7f2a5d55fa30fbbd72d708326d0c7b55b67ed772"},
{file = "fastuuid-0.13.5-cp39-cp39-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:ed78153c589e5efb34faaa216836a5bf8a0b9d34e82183203166011238d9ed13"},
{file = "fastuuid-0.13.5-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a67e18c2d7fba8be6ea4aed8ca5a20fcf273f003efa01c1f33a096b72537e69e"},
{file = "fastuuid-0.13.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4e362a3d66874d3d11a1ee9a8e717e32c2817cdb5d7a4e913290bf6e0f2a7fd8"},
{file = "fastuuid-0.13.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9baa9c33848ec0926231e7ecfef9e02faa0f6d24265b64108ea41f7a0bb3f48"},
{file = "fastuuid-0.13.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3ec6dab282162c19ec2172f33bafd467cffe26b92345789278adcbec19428d1"},
{file = "fastuuid-0.13.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2424a90688dbc44f119686fa452ff21aa106c9da258214f577816462ad606d5"},
{file = "fastuuid-0.13.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f8f2cc6972941ab030f3776961ed8454772c3acad88781fc262d71514df89973"},
{file = "fastuuid-0.13.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a41257ea172b5de199c3cfa71cc6c574dcf22367fe51e26cba0d359107f11f30"},
{file = "fastuuid-0.13.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cade5f3b8023dbba5a006e5685a7baf0d7a30c43cea17113768aa9ef9582d799"},
{file = "fastuuid-0.13.5-cp39-cp39-win32.whl", hash = "sha256:880f0d03ad2518b96757ca422cba6ff76cea5464db2b3ad75c32acf1890e058f"},
{file = "fastuuid-0.13.5-cp39-cp39-win_amd64.whl", hash = "sha256:ebe95b730f81808eabc90247ac3d412b96d9fae1c406760b163bb9f134b7af69"},
{file = "fastuuid-0.13.5.tar.gz", hash = "sha256:d4976821ab424d41542e1ea39bc828a9d454c3f8a04067c06fca123c5b95a1a1"},
]
[[package]]
@ -4187,14 +4266,14 @@ dev = ["Sphinx (>=5.1.1)", "black (==24.8.0)", "build (>=0.10.0)", "coverage[tom
[[package]]
name = "libtmux"
version = "0.39.0"
version = "0.46.2"
description = "Typed library that provides an ORM wrapper for tmux, a terminal multiplexer."
optional = false
python-versions = "<4.0,>=3.9"
groups = ["main"]
files = [
{file = "libtmux-0.39.0-py3-none-any.whl", hash = "sha256:6b6e338be2727f67aa6b7eb67fa134368fa3c3eac5df27565396467692891c1e"},
{file = "libtmux-0.39.0.tar.gz", hash = "sha256:59346aeef3c0d6017f3bc5e23248d43cdf50f32b775b9cb5d9ff5e2e5f3059f4"},
{file = "libtmux-0.46.2-py3-none-any.whl", hash = "sha256:6c32dbf22bde8e5e33b2714a4295f6e838dc640f337cd4c085a044f6828c7793"},
{file = "libtmux-0.46.2.tar.gz", hash = "sha256:9a398fec5d714129c8344555d466e1a903dfc0f741ba07aabe75a8ceb25c5dda"},
]
[[package]]
@ -4228,26 +4307,24 @@ valkey = ["valkey (>=6)"]
[[package]]
name = "litellm"
version = "1.76.1"
version = "1.77.7"
description = "Library to easily interface with LLM API providers"
optional = false
python-versions = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8"
python-versions = ">=3.8.1,<4.0, !=3.9.7"
groups = ["main"]
files = [
{file = "litellm-1.76.1-py3-none-any.whl", hash = "sha256:938f05075372f26098211ea9b3cb0a6bb7b46111330226b70d42d40bd307812f"},
{file = "litellm-1.76.1.tar.gz", hash = "sha256:d5a3a3efda04999b60ec0d1c29c1eaaa12f89a7b29db4bda691c7fb55b4fa6ad"},
]
files = []
develop = false
[package.dependencies]
aiohttp = ">=3.10"
click = "*"
fastuuid = ">=0.12.0"
fastuuid = ">=0.13.0"
httpx = ">=0.23.0"
importlib-metadata = ">=6.8.0"
jinja2 = ">=3.1.2,<4.0.0"
jsonschema = ">=4.22.0,<5.0.0"
jinja2 = "^3.1.2"
jsonschema = "^4.22.0"
openai = ">=1.99.5"
pydantic = ">=2.5.0,<3.0.0"
pydantic = "^2.5.0"
python-dotenv = ">=0.2.0"
tiktoken = ">=0.7.0"
tokenizers = "*"
@ -4256,10 +4333,16 @@ tokenizers = "*"
caching = ["diskcache (>=5.6.1,<6.0.0)"]
extra-proxy = ["azure-identity (>=1.15.0,<2.0.0)", "azure-keyvault-secrets (>=4.8.0,<5.0.0)", "google-cloud-iam (>=2.19.1,<3.0.0)", "google-cloud-kms (>=2.21.3,<3.0.0)", "prisma (==0.11.0)", "redisvl (>=0.4.1,<0.5.0) ; python_version >= \"3.9\" and python_version < \"3.14\"", "resend (>=0.8.0,<0.9.0)"]
mlflow = ["mlflow (>3.1.4) ; python_version >= \"3.10\""]
proxy = ["PyJWT (>=2.8.0,<3.0.0)", "apscheduler (>=3.10.4,<4.0.0)", "azure-identity (>=1.15.0,<2.0.0)", "azure-storage-blob (>=12.25.1,<13.0.0)", "backoff", "boto3 (==1.36.0)", "cryptography (>=43.0.1,<44.0.0)", "fastapi (>=0.115.5,<0.116.0)", "fastapi-sso (>=0.16.0,<0.17.0)", "gunicorn (>=23.0.0,<24.0.0)", "litellm-enterprise (==0.1.19)", "litellm-proxy-extras (==0.2.18)", "mcp (>=1.10.0,<2.0.0) ; python_version >= \"3.10\"", "orjson (>=3.9.7,<4.0.0)", "polars (>=1.31.0,<2.0.0) ; python_version >= \"3.10\"", "pynacl (>=1.5.0,<2.0.0)", "python-multipart (>=0.0.18,<0.0.19)", "pyyaml (>=6.0.1,<7.0.0)", "rich (==13.7.1)", "rq", "uvicorn (>=0.29.0,<0.30.0)", "uvloop (>=0.21.0,<0.22.0) ; sys_platform != \"win32\"", "websockets (>=13.1.0,<14.0.0)"]
proxy = ["PyJWT (>=2.8.0,<3.0.0)", "apscheduler (>=3.10.4,<4.0.0)", "azure-identity (>=1.15.0,<2.0.0)", "azure-storage-blob (>=12.25.1,<13.0.0)", "backoff", "boto3 (==1.36.0)", "cryptography", "fastapi (>=0.115.5,<0.116.0)", "fastapi-sso (>=0.16.0,<0.17.0)", "gunicorn (>=23.0.0,<24.0.0)", "litellm-enterprise (==0.1.20)", "litellm-proxy-extras (==0.2.25)", "mcp (>=1.10.0,<2.0.0) ; python_version >= \"3.10\"", "orjson (>=3.9.7,<4.0.0)", "polars (>=1.31.0,<2.0.0) ; python_version >= \"3.10\"", "pynacl (>=1.5.0,<2.0.0)", "python-multipart (>=0.0.18,<0.0.19)", "pyyaml (>=6.0.1,<7.0.0)", "rich (==13.7.1)", "rq", "uvicorn (>=0.29.0,<0.30.0)", "uvloop (>=0.21.0,<0.22.0) ; sys_platform != \"win32\"", "websockets (>=13.1.0,<14.0.0)"]
semantic-router = ["semantic-router ; python_version >= \"3.9\""]
utils = ["numpydoc"]
[package.source]
type = "git"
url = "https://github.com/BerriAI/litellm.git"
reference = "v1.77.7.dev9"
resolved_reference = "763d2f8ccdd8412dbe6d4ac0e136d9ac34dcd4c0"
[[package]]
name = "llvmlite"
version = "0.44.0"
@ -5430,9 +5513,36 @@ youtube-transcript-api = ">=0.6.2"
[package.extras]
llama = ["llama-index (>=0.12.29,<0.13.0)", "llama-index-core (>=0.12.29,<0.13.0)", "llama-index-retrievers-bm25 (>=0.5.2,<0.6.0)"]
[[package]]
name = "openhands-agent-server"
version = "1.0.0"
description = "OpenHands Agent Server - REST/WebSocket interface for OpenHands AI Agent"
optional = false
python-versions = ">=3.12"
groups = ["main"]
files = []
develop = false
[package.dependencies]
aiosqlite = ">=0.19"
alembic = ">=1.13"
docker = ">=7.1,<8"
fastapi = ">=0.104"
pydantic = ">=2"
sqlalchemy = ">=2"
uvicorn = ">=0.31.1"
websockets = ">=12"
[package.source]
type = "git"
url = "https://github.com/All-Hands-AI/agent-sdk.git"
reference = "08cf609a996523c0199c61c768d74417b7e96109"
resolved_reference = "08cf609a996523c0199c61c768d74417b7e96109"
subdirectory = "openhands/agent_server"
[[package]]
name = "openhands-ai"
version = "0.57.0"
version = "0.59.0"
description = "OpenHands: Code Less, Make More"
optional = false
python-versions = "^3.12,<3.14"
@ -5444,6 +5554,7 @@ develop = true
aiohttp = ">=3.9.0,!=3.11.13"
anthropic = {version = "*", extras = ["vertex"]}
anyio = "4.9.0"
asyncpg = "^0.30.0"
bashlex = "^0.18"
boto3 = "*"
browsergym-core = "0.13.3"
@ -5465,21 +5576,26 @@ joblib = "*"
json-repair = "*"
jupyter_kernel_gateway = "*"
kubernetes = "^33.1.0"
libtmux = ">=0.37,<0.40"
litellm = ">=1.74.3, <1.77.2, !=1.64.4, !=1.67.*"
libtmux = ">=0.46.2"
litellm = ">=1.74.3, <1.78.0, !=1.64.4, !=1.67.*"
memory-profiler = "^0.61.0"
numpy = "*"
openai = "1.99.9"
openhands-aci = "0.3.2"
openhands-agent-server = {git = "https://github.com/All-Hands-AI/agent-sdk.git", rev = "08cf609a996523c0199c61c768d74417b7e96109", subdirectory = "openhands/agent_server"}
openhands-sdk = {git = "https://github.com/All-Hands-AI/agent-sdk.git", rev = "08cf609a996523c0199c61c768d74417b7e96109", subdirectory = "openhands/sdk"}
opentelemetry-api = "^1.33.1"
opentelemetry-exporter-otlp-proto-grpc = "^1.33.1"
pathspec = "^0.12.1"
pexpect = "*"
pg8000 = "^1.31.5"
pillow = "^11.3.0"
playwright = "^1.55.0"
poetry = "^2.1.2"
prompt-toolkit = "^3.0.50"
protobuf = "^5.0.0,<6.0.0"
psutil = "*"
pybase62 = "^1.0.0"
pygithub = "^2.5.0"
pyjwt = "^2.9.0"
pylatexenc = "*"
@ -5488,6 +5604,7 @@ PyPDF2 = "*"
python-docx = "*"
python-dotenv = "*"
python-frontmatter = "^1.1.0"
python-jose = {version = ">=3.3", extras = ["cryptography"]}
python-json-logger = "^3.2.1"
python-multipart = "*"
python-pptx = "*"
@ -5500,6 +5617,7 @@ redis = ">=5.2,<7.0"
requests = "^2.32.5"
setuptools = ">=78.1.1"
shellingham = "^1.5.4"
sqlalchemy = {version = "^2.0.40", extras = ["asyncio"]}
sse-starlette = "^3.0.2"
starlette = "^0.48.0"
tenacity = ">=8.5,<10.0"
@ -5519,6 +5637,35 @@ third-party-runtimes = ["daytona (==0.24.2)", "e2b-code-interpreter (>=2.0.0,<3.
type = "directory"
url = ".."
[[package]]
name = "openhands-sdk"
version = "1.0.0"
description = "OpenHands SDK - Core functionality for building AI agents"
optional = false
python-versions = ">=3.12"
groups = ["main"]
files = []
develop = false
[package.dependencies]
fastmcp = ">=2.11.3"
litellm = {git = "https://github.com/BerriAI/litellm.git", rev = "v1.77.7.dev9"}
pydantic = ">=2.11.7"
python-frontmatter = ">=1.1.0"
python-json-logger = ">=3.3.0"
tenacity = ">=9.1.2"
websockets = ">=12"
[package.extras]
boto3 = ["boto3 (>=1.35.0)"]
[package.source]
type = "git"
url = "https://github.com/All-Hands-AI/agent-sdk.git"
reference = "08cf609a996523c0199c61c768d74417b7e96109"
resolved_reference = "08cf609a996523c0199c61c768d74417b7e96109"
subdirectory = "openhands/sdk"
[[package]]
name = "openpyxl"
version = "3.1.5"
@ -5855,14 +6002,14 @@ ptyprocess = ">=0.5"
[[package]]
name = "pg8000"
version = "1.31.4"
version = "1.31.5"
description = "PostgreSQL interface library"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "pg8000-1.31.4-py3-none-any.whl", hash = "sha256:d14fb2054642ee80f9a216721892e99e19db60a005358460ffa48872351423d4"},
{file = "pg8000-1.31.4.tar.gz", hash = "sha256:e7ecce4339891f27b0b22e2f79eb9efe44118bd384207359fc18350f788ace00"},
{file = "pg8000-1.31.5-py3-none-any.whl", hash = "sha256:0af2c1926b153307639868d2ee5cef6cd3a7d07448e12736989b10e1d491e201"},
{file = "pg8000-1.31.5.tar.gz", hash = "sha256:46ebb03be52b7a77c03c725c79da2ca281d6e8f59577ca66b17c9009618cae78"},
]
[package.dependencies]
@ -6528,6 +6675,17 @@ files = [
[package.dependencies]
pyasn1 = ">=0.6.1,<0.7.0"
[[package]]
name = "pybase62"
version = "1.0.0"
description = "Python module for base62 encoding"
optional = false
python-versions = "*"
groups = ["main"]
files = [
{file = "pybase62-1.0.0-py3-none-any.whl", hash = "sha256:60539ad956ec9e9de091bc7ae88c9550bc2fa17f503050cf34d021b75e73cb27"},
]
[[package]]
name = "pycodestyle"
version = "2.14.0"
@ -7122,6 +7280,30 @@ PyYAML = "*"
docs = ["sphinx"]
test = ["mypy", "pyaml", "pytest", "toml", "types-PyYAML", "types-toml"]
[[package]]
name = "python-jose"
version = "3.5.0"
description = "JOSE implementation in Python"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "python_jose-3.5.0-py2.py3-none-any.whl", hash = "sha256:abd1202f23d34dfad2c3d28cb8617b90acf34132c7afd60abd0b0b7d3cb55771"},
{file = "python_jose-3.5.0.tar.gz", hash = "sha256:fb4eaa44dbeb1c26dcc69e4bd7ec54a1cb8dd64d3b4d81ef08d90ff453f2b01b"},
]
[package.dependencies]
cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"cryptography\""}
ecdsa = "!=0.15"
pyasn1 = ">=0.5.0"
rsa = ">=4.0,<4.1.1 || >4.1.1,<4.4 || >4.4,<5.0"
[package.extras]
cryptography = ["cryptography (>=3.4.0)"]
pycrypto = ["pycrypto (>=2.6.0,<2.7.0)"]
pycryptodome = ["pycryptodome (>=3.3.1,<4.0.0)"]
test = ["pytest", "pytest-cov"]
[[package]]
name = "python-json-logger"
version = "3.3.0"

View File

@ -224,6 +224,16 @@ class SaasUserAuth(UserAuth):
await rate_limiter.hit('auth_uid', user_id)
return instance
@classmethod
async def get_for_user(cls, user_id: str) -> UserAuth:
offline_token = await token_manager.load_offline_token(user_id)
assert offline_token is not None
return SaasUserAuth(
user_id=user_id,
refresh_token=SecretStr(offline_token),
auth_type=AuthType.BEARER,
)
def get_api_key_from_header(request: Request):
auth_header = request.headers.get('Authorization')

View File

@ -424,7 +424,7 @@ async def refresh_tokens(
provider_handler = ProviderHandler(
create_provider_tokens_object([provider]), external_auth_id=user_id
)
service = provider_handler._get_service(provider)
service = provider_handler.get_service(provider)
token = await service.get_latest_token()
if not token:
raise HTTPException(

View File

@ -784,6 +784,7 @@ class SaasNestedConversationManager(ConversationManager):
env_vars['SKIP_DEPENDENCY_CHECK'] = '1'
env_vars['INITIAL_NUM_WARM_SERVERS'] = '1'
env_vars['INIT_GIT_IN_EMPTY_WORKSPACE'] = '1'
env_vars['ENABLE_V1'] = '0'
# We need this for LLM traces tracking to identify the source of the LLM calls
env_vars['WEB_HOST'] = WEB_HOST

View File

@ -195,14 +195,11 @@ def update_active_working_seconds(
file_store: The FileStore instance for accessing conversation data
"""
try:
# Get all events for the conversation
events = list(event_store.get_events())
# Track agent state changes and calculate running time
running_start_time = None
total_running_seconds = 0.0
for event in events:
for event in event_store.search_events():
if isinstance(event, AgentStateChangedObservation) and event.timestamp:
event_timestamp = datetime.fromisoformat(event.timestamp).timestamp()

View File

@ -2,6 +2,6 @@
Unified SQLAlchemy declarative base for all models.
"""
from sqlalchemy.orm import declarative_base
from openhands.app_server.utils.sql_utils import Base
Base = declarative_base()
__all__ = ['Base']

View File

@ -1,7 +1,6 @@
import asyncio
import os
from google.cloud.sql.connector import Connector
from sqlalchemy import create_engine
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
from sqlalchemy.orm import sessionmaker
@ -26,6 +25,8 @@ def _get_db_engine():
if GCP_DB_INSTANCE: # GCP environments
def get_db_connection():
from google.cloud.sql.connector import Connector
connector = Connector()
instance_string = f'{GCP_PROJECT}:{GCP_REGION}:{GCP_DB_INSTANCE}'
return connector.connect(
@ -52,6 +53,8 @@ def _get_db_engine():
async def async_creator():
from google.cloud.sql.connector import Connector
loop = asyncio.get_running_loop()
async with Connector(loop=loop) as connector:
conn = await connector.connect_async(

View File

@ -52,6 +52,14 @@ class SaasConversationStore(ConversationStore):
# Convert string to ProviderType enum
kwargs['git_provider'] = ProviderType(kwargs['git_provider'])
# Remove V1 attributes
kwargs.pop('max_budget_per_task', None)
kwargs.pop('cache_read_tokens', None)
kwargs.pop('cache_write_tokens', None)
kwargs.pop('reasoning_tokens', None)
kwargs.pop('context_window', None)
kwargs.pop('per_turn_token', None)
return ConversationMetadata(**kwargs)
async def save_metadata(self, metadata: ConversationMetadata):

View File

@ -1,41 +1,8 @@
import uuid
from datetime import UTC, datetime
from openhands.app_server.app_conversation.sql_app_conversation_info_service import (
StoredConversationMetadata as _StoredConversationMetadata,
)
from sqlalchemy import JSON, Column, DateTime, Float, Integer, String
from storage.base import Base
StoredConversationMetadata = _StoredConversationMetadata
class StoredConversationMetadata(Base): # type: ignore
__tablename__ = 'conversation_metadata'
conversation_id = Column(
String, primary_key=True, default=lambda: str(uuid.uuid4())
)
github_user_id = Column(String, nullable=True) # The GitHub user ID
user_id = Column(String, nullable=False) # The Keycloak User ID
selected_repository = Column(String, nullable=True)
selected_branch = Column(String, nullable=True)
git_provider = Column(
String, nullable=True
) # The git provider (GitHub, GitLab, etc.)
title = Column(String, nullable=True)
last_updated_at = Column(
DateTime(timezone=True),
default=lambda: datetime.now(UTC), # type: ignore[attr-defined]
)
created_at = Column(
DateTime(timezone=True),
default=lambda: datetime.now(UTC), # type: ignore[attr-defined]
)
trigger = Column(String, nullable=True)
pr_number = Column(
JSON, nullable=True
) # List of PR numbers associated with the conversation
# Cost and token metrics
accumulated_cost = Column(Float, default=0.0)
prompt_tokens = Column(Integer, default=0)
completion_tokens = Column(Integer, default=0)
total_tokens = Column(Integer, default=0)
# LLM model used for the conversation
llm_model = Column(String, nullable=True)
__all__ = ['StoredConversationMetadata']

View File

@ -80,7 +80,7 @@ class TestUpdateActiveWorkingSeconds:
events.append(event6)
# Configure the mock event store to return our test events
mock_event_store.get_events.return_value = events
mock_event_store.search_events.return_value = events
# Call the function under test with mocked session_maker
with patch(
@ -133,7 +133,7 @@ class TestUpdateActiveWorkingSeconds:
events = [event1, event2]
mock_event_store.get_events.return_value = events
mock_event_store.search_events.return_value = events
# Call the function under test with mocked session_maker
with patch(
@ -178,7 +178,7 @@ class TestUpdateActiveWorkingSeconds:
events = [event1, event2, event3]
# No final state change - agent still running
mock_event_store.get_events.return_value = events
mock_event_store.search_events.return_value = events
# Call the function under test with mocked session_maker
with patch(
@ -221,7 +221,7 @@ class TestUpdateActiveWorkingSeconds:
events = [event1, event2, event3]
mock_event_store.get_events.return_value = events
mock_event_store.search_events.return_value = events
# Call the function under test with mocked session_maker
with patch(
@ -267,7 +267,7 @@ class TestUpdateActiveWorkingSeconds:
events = [event1, event2, event3, event4]
mock_event_store.get_events.return_value = events
mock_event_store.search_events.return_value = events
# Call the function under test with mocked session_maker
with patch(
@ -297,7 +297,7 @@ class TestUpdateActiveWorkingSeconds:
user_id = 'test_user_error'
# Configure the mock to raise an exception
mock_event_store.get_events.side_effect = Exception('Test error')
mock_event_store.search_events.side_effect = Exception('Test error')
# Call the function under test
update_active_working_seconds(
@ -376,7 +376,7 @@ class TestUpdateActiveWorkingSeconds:
event10.timestamp = '1970-01-01T00:00:37.000000'
events.append(event10)
mock_event_store.get_events.return_value = events
mock_event_store.search_events.return_value = events
# Call the function under test with mocked session_maker
with patch(

View File

@ -307,7 +307,7 @@ class TheoremqaTask(Task):
# Converting the string answer to a number/list/bool/option
try:
prediction = eval(prediction)
prediction = ast.literal_eval(prediction)
except Exception:
LOGGER.warning(
f'[TASK] Failed to convert the answer: {prediction}\n{traceback.format_exc()}'

View File

@ -16,6 +16,10 @@ At the end, you must test your code rigorously using the tools provided, and do
You MUST plan extensively before each function call, and reflect extensively on the outcomes of the previous function calls. DO NOT do this entire process by making function calls only, as this can impair your ability to solve the problem and think insightfully.
## Issue Description
{{ instance.problem_statement }}
# Workflow
## High-Level Problem Solving Strategy
@ -73,6 +77,7 @@ Carefully read the issue and think hard about a plan to solve it before coding.
## 8. Final Reflection and Additional Testing
- Reflect carefully on the original intent of the user and the problem statement.
- Compare your changes with the base commit {{ instance.base_commit }} to ensure minimal and focused modifications.
- Think about potential edge cases or scenarios that may not be covered by existing tests.
- Write additional tests that would need to pass to fully validate the correctness of your solution.
- Run these new tests and ensure they all pass.

View File

@ -121,7 +121,7 @@ class ConversationService {
reason?: string;
}>(url);
return data;
} catch (error) {
} catch {
// Error checking if feedback exists
return { exists: false };
}

View File

@ -120,7 +120,7 @@ export function InteractiveChatBox({
// Step 5: Handle failed results
handleFailedFiles(fileResults, imageResults);
} catch (error) {
} catch {
// Clear loading states and show error
clearLoadingStates(validFiles, validImages);
displayErrorToast("An unexpected error occurred while processing files");

View File

@ -90,7 +90,7 @@ export function ConversationCard({
}
}
// VS Code URL not available
} catch (error) {
} catch {
// Failed to fetch VS Code URL
}
}

View File

@ -23,7 +23,7 @@ export function useUrlSearch(inputValue: string, provider: Provider) {
);
setUrlSearchResults(repositories);
} catch (error) {
} catch {
setUrlSearchResults([]);
} finally {
setIsUrlSearchLoading(false);

View File

@ -28,7 +28,7 @@ export function CancelSubscriptionModal({
await cancelSubscriptionMutation.mutateAsync();
displaySuccessToast(t(I18nKey.PAYMENT$SUBSCRIPTION_CANCELLED));
onClose();
} catch (error) {
} catch {
displayErrorToast(t(I18nKey.ERROR$GENERIC));
}
};

View File

@ -39,7 +39,7 @@ export function CreateApiKeyModal({
onKeyCreated(newKey);
displaySuccessToast(t(I18nKey.SETTINGS$API_KEY_CREATED));
setNewKeyName("");
} catch (error) {
} catch {
displayErrorToast(t(I18nKey.ERROR$GENERIC));
}
};

View File

@ -32,7 +32,7 @@ export function DeleteApiKeyModal({
await deleteApiKeyMutation.mutateAsync(keyToDelete.id);
displaySuccessToast(t(I18nKey.SETTINGS$API_KEY_DELETED));
onClose();
} catch (error) {
} catch {
displayErrorToast(t(I18nKey.ERROR$GENERIC));
}
};

View File

@ -2,7 +2,7 @@ import React from "react";
import { useLocation } from "react-router";
import { useGitUser } from "#/hooks/query/use-git-user";
import { UserActions } from "./user-actions";
import { AllHandsLogoButton } from "#/components/shared/buttons/all-hands-logo-button";
import { OpenHandsLogoButton } from "#/components/shared/buttons/openhands-logo-button";
import { NewProjectButton } from "#/components/shared/buttons/new-project-button";
import { ConversationPanelButton } from "#/components/shared/buttons/conversation-panel-button";
import { SettingsModal } from "#/components/shared/modals/settings/settings-modal";
@ -74,7 +74,7 @@ export function Sidebar() {
<nav className="flex flex-row md:flex-col items-center justify-between w-full h-auto md:w-auto md:h-full">
<div className="flex flex-row md:flex-col items-center gap-[26px]">
<div className="flex items-center justify-center">
<AllHandsLogoButton />
<OpenHandsLogoButton />
</div>
<div>
<NewProjectButton disabled={settings?.EMAIL_VERIFIED === false} />

View File

@ -3,13 +3,13 @@ import AllHandsLogo from "#/assets/branding/all-hands-logo.svg?react";
import { I18nKey } from "#/i18n/declaration";
import { TooltipButton } from "./tooltip-button";
export function AllHandsLogoButton() {
export function OpenHandsLogoButton() {
const { t } = useTranslation();
return (
<TooltipButton
tooltip={t(I18nKey.BRANDING$ALL_HANDS_AI)}
ariaLabel={t(I18nKey.BRANDING$ALL_HANDS_LOGO)}
tooltip={t(I18nKey.BRANDING$OPENHANDS)}
ariaLabel={t(I18nKey.BRANDING$OPENHANDS_LOGO)}
navLinkTo="/"
>
<AllHandsLogo width={46} height={30} />

View File

@ -275,7 +275,7 @@ export function ConversationSubscriptionsProvider({
setActiveConversationIds((prev) =>
prev.includes(conversationId) ? prev : [...prev, conversationId],
);
} catch (error) {
} catch {
// Clean up the event handler if there was an error
delete eventHandlersRef.current[conversationId];
}

View File

@ -25,7 +25,7 @@ function PosthogInit() {
try {
const config = await OptionService.getConfig();
setPosthogClientKey(config.POSTHOG_CLIENT_KEY);
} catch (error) {
} catch {
displayErrorToast("Error fetching PostHog client key");
}
})();

View File

@ -140,7 +140,7 @@ export function useConversationNameContextMenu({
}
}
// VS Code URL not available
} catch (error) {
} catch {
// Failed to fetch VS Code URL
}
}

View File

@ -3,6 +3,7 @@ import { useConfig } from "./query/use-config";
import { useGitUser } from "./query/use-git-user";
import { getLoginMethod, LoginMethod } from "#/utils/local-storage";
import reoService, { ReoIdentity } from "#/utils/reo";
import { isProductionDomain } from "#/utils/utils";
/**
* Maps login method to Reo identity type
@ -92,10 +93,14 @@ export const useReoTracking = () => {
const { data: user } = useGitUser();
const [hasIdentified, setHasIdentified] = React.useState(false);
// Initialize Reo.dev when in SaaS mode
// Initialize Reo.dev when in SaaS mode and on the correct domain
React.useEffect(() => {
const initReo = async () => {
if (config?.APP_MODE === "saas" && !reoService.isInitialized()) {
if (
config?.APP_MODE === "saas" &&
isProductionDomain() &&
!reoService.isInitialized()
) {
await reoService.init();
}
};
@ -103,10 +108,11 @@ export const useReoTracking = () => {
initReo();
}, [config?.APP_MODE]);
// Identify user when user data is available and we're in SaaS mode
// Identify user when user data is available and we're in SaaS mode on correct domain
React.useEffect(() => {
if (
config?.APP_MODE !== "saas" ||
!isProductionDomain() ||
!user ||
hasIdentified ||
!reoService.isInitialized()

View File

@ -168,8 +168,8 @@ export enum I18nKey {
GITHUB$CODE_NOT_IN_GITHUB = "GITHUB$CODE_NOT_IN_GITHUB",
GITHUB$START_FROM_SCRATCH = "GITHUB$START_FROM_SCRATCH",
AVATAR$ALT_TEXT = "AVATAR$ALT_TEXT",
BRANDING$ALL_HANDS_AI = "BRANDING$ALL_HANDS_AI",
BRANDING$ALL_HANDS_LOGO = "BRANDING$ALL_HANDS_LOGO",
BRANDING$OPENHANDS = "BRANDING$OPENHANDS",
BRANDING$OPENHANDS_LOGO = "BRANDING$OPENHANDS_LOGO",
ERROR$GENERIC = "ERROR$GENERIC",
GITHUB$AUTH_SCOPE = "GITHUB$AUTH_SCOPE",
FILE_SERVICE$INVALID_FILE_PATH = "FILE_SERVICE$INVALID_FILE_PATH",

View File

@ -2687,37 +2687,37 @@
"tr": "Kullanıcı avatarı",
"uk": "аватар користувача"
},
"BRANDING$ALL_HANDS_AI": {
"en": "All Hands AI",
"ja": "All Hands AI",
"zh-CN": "All Hands AI",
"zh-TW": "All Hands AI",
"ko-KR": "All Hands AI",
"de": "All Hands AI",
"no": "All Hands AI",
"it": "All Hands AI",
"pt": "All Hands AI",
"es": "All Hands AI",
"ar": "All Hands AI",
"fr": "All Hands AI",
"tr": "All Hands AI",
"uk": "All Hands AI"
"BRANDING$OPENHANDS": {
"en": "OpenHands",
"ja": "OpenHands",
"zh-CN": "OpenHands",
"zh-TW": "OpenHands",
"ko-KR": "OpenHands",
"de": "OpenHands",
"no": "OpenHands",
"it": "OpenHands",
"pt": "OpenHands",
"es": "OpenHands",
"ar": "OpenHands",
"fr": "OpenHands",
"tr": "OpenHands",
"uk": "OpenHands"
},
"BRANDING$ALL_HANDS_LOGO": {
"en": "All Hands Logo",
"ja": "All Handsロゴ",
"zh-CN": "All Hands标志",
"zh-TW": "All Hands標誌",
"ko-KR": "All Hands 로고",
"de": "All Hands Logo",
"no": "All Hands Logo",
"it": "Logo All Hands",
"pt": "Logo All Hands",
"es": "Logo de All Hands",
"ar": "شعار All Hands",
"fr": "Logo All Hands",
"tr": "All Hands Logosu",
"uk": "All Hands лого"
"BRANDING$OPENHANDS_LOGO": {
"en": "OpenHands Logo",
"ja": "OpenHandsロゴ",
"zh-CN": "OpenHands标志",
"zh-TW": "OpenHands標誌",
"ko-KR": "OpenHands 로고",
"de": "OpenHands Logo",
"no": "OpenHands Logo",
"it": "Logo OpenHands",
"pt": "Logo OpenHands",
"es": "Logo de OpenHands",
"ar": "شعار OpenHands",
"fr": "Logo OpenHands",
"tr": "OpenHands Logosu",
"uk": "OpenHands лого"
},
"ERROR$GENERIC": {
"en": "An error occurred",

View File

@ -5,6 +5,7 @@ import { SuggestedTaskGroup } from "#/utils/types";
import { ConversationStatus } from "#/types/conversation-status";
import { GitRepository } from "#/types/git";
import { sanitizeQuery } from "#/utils/sanitize-query";
import { PRODUCT_URL } from "#/utils/constants";
export function cn(...inputs: ClassValue[]) {
return twMerge(clsx(inputs));
@ -49,6 +50,13 @@ export const isMobileDevice = (): boolean =>
"ontouchstart" in window ||
navigator.maxTouchPoints > 0;
/**
* Checks if the current domain is the production domain
* @returns True if the current domain matches the production URL
*/
export const isProductionDomain = (): boolean =>
window.location.origin === PRODUCT_URL.PRODUCTION;
interface EventActionHistory {
args?: {
LLM_API_KEY?: string;

View File

@ -24,7 +24,7 @@ export function transformVSCodeUrl(vsCodeUrl: string | null): string | null {
}
return vsCodeUrl;
} catch (error) {
} catch {
// Silently handle the error and return the original URL
return vsCodeUrl;
}

View File

@ -164,7 +164,7 @@ def test_executable() -> bool:
)
# --- Wait for welcome ---
deadline = boot_start + 30
deadline = boot_start + 60
saw_welcome = False
captured = []

View File

@ -1,3 +1,8 @@
"""OpenHands CLI package."""
"""OpenHands package."""
__version__ = '0.1.0'
from importlib.metadata import version, PackageNotFoundError
try:
__version__ = version("openhands")
except PackageNotFoundError:
__version__ = "0.0.0"

View File

@ -54,6 +54,7 @@ def _print_exit_hint(conversation_id: str) -> None:
)
def run_cli_entry(resume_conversation_id: str | None = None) -> None:
"""Run the agent chat session using the agent SDK.

View File

@ -113,21 +113,12 @@ def launch_gui_server(mount_cwd: bool = False, gpu: bool = False) -> None:
pull_cmd = ['docker', 'pull', runtime_image]
print_formatted_text(HTML(_format_docker_command_for_logging(pull_cmd)))
try:
subprocess.run(
pull_cmd,
check=True,
timeout=300, # 5 minutes timeout
)
subprocess.run(pull_cmd, check=True)
except subprocess.CalledProcessError:
print_formatted_text(
HTML('<ansired>❌ Failed to pull runtime image.</ansired>')
)
sys.exit(1)
except subprocess.TimeoutExpired:
print_formatted_text(
HTML('<ansired>❌ Timeout while pulling runtime image.</ansired>')
)
sys.exit(1)
print_formatted_text('')
print_formatted_text(

View File

@ -57,8 +57,6 @@ def display_banner(conversation_id: str, resume: bool = False) -> None:
style=DEFAULT_STYLE,
)
print_formatted_text(HTML(f'<grey>OpenHands CLI v{__version__}</grey>'))
print_formatted_text('')
if not resume:
print_formatted_text(

View File

@ -1,3 +1,4 @@
import html
from prompt_toolkit import HTML, print_formatted_text
from openhands.sdk.security.confirmation_policy import (
@ -37,7 +38,7 @@ def ask_user_confirmation(
or '[unknown action]'
)
print_formatted_text(
HTML(f'<grey> {i}. {tool_name}: {action_content}...</grey>')
HTML(f'<grey> {i}. {tool_name}: {html.escape(action_content)}...</grey>')
)
question = 'Choose an option:'

View File

@ -123,9 +123,15 @@ def prompt_api_key(
validator = NonEmptyValueValidator()
question = helper_text + step_counter.next_step(question)
return cli_text_input(
user_input = cli_text_input(
question, escapable=escapable, validator=validator, is_password=True
)
# If user pressed ENTER with existing key (empty input), return the existing key
if existing_api_key and not user_input.strip():
return existing_api_key.get_secret_value()
return user_input
# Advanced settings functions

View File

@ -4,7 +4,7 @@ requires = [ "hatchling>=1.25" ]
[project]
name = "openhands"
version = "1.0.0"
version = "1.0.1"
description = "OpenHands CLI - Terminal User Interface for OpenHands AI Agent"
readme = "README.md"
license = { text = "MIT" }
@ -15,15 +15,16 @@ classifiers = [
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
]
# Using Git URLs for dependencies so installs from PyPI pull from GitHub
# TODO: pin package versions once agent-sdk has published PyPI packages
dependencies = [
"openhands-sdk",
"openhands-tools",
"openhands-sdk @ git+https://github.com/All-Hands-AI/agent-sdk.git@50b094a92817e448ec4352d2950df4f19edd5a9f#subdirectory=openhands/sdk",
"openhands-tools @ git+https://github.com/All-Hands-AI/agent-sdk.git@50b094a92817e448ec4352d2950df4f19edd5a9f#subdirectory=openhands/tools",
"prompt-toolkit>=3",
"typer>=0.17.4",
]
# Dev-only tools with uv groups: `uv sync --group dev`
scripts.openhands = "openhands_cli.simple_main:main"
scripts = { openhands = "openhands_cli.simple_main:main" }
[dependency-groups]
# Hatchling wheel target: include the package directory
@ -42,6 +43,9 @@ dev = [
"ruff>=0.11.8",
]
[tool.hatch.metadata]
allow-direct-references = true
[tool.hatch.build.targets.wheel]
packages = [ "openhands_cli" ]
@ -96,5 +100,5 @@ disallow_untyped_defs = true
ignore_missing_imports = true
[tool.uv.sources]
openhands-sdk = { git = "https://github.com/All-Hands-AI/agent-sdk.git", subdirectory = "openhands/sdk", rev = "189979a5013751aa86852ab41afe9a79555e62ac" }
openhands-tools = { git = "https://github.com/All-Hands-AI/agent-sdk.git", subdirectory = "openhands/tools", rev = "189979a5013751aa86852ab41afe9a79555e62ac" }
openhands-sdk = { git = "https://github.com/All-Hands-AI/agent-sdk.git", subdirectory = "openhands/sdk", rev = "50b094a92817e448ec4352d2950df4f19edd5a9f" }
openhands-tools = { git = "https://github.com/All-Hands-AI/agent-sdk.git", subdirectory = "openhands/tools", rev = "50b094a92817e448ec4352d2950df4f19edd5a9f" }

View File

@ -0,0 +1,56 @@
"""Test for API key preservation bug when updating settings."""
from unittest.mock import patch
import pytest
from pydantic import SecretStr
from openhands_cli.user_actions.settings_action import prompt_api_key
from openhands_cli.tui.utils import StepCounter
def test_api_key_preservation_when_user_presses_enter():
"""Test that API key is preserved when user presses ENTER to keep current key.
This test replicates the bug where API keys disappear when updating settings.
When a user presses ENTER to keep the current API key, the function should
return the existing API key, not an empty string.
"""
step_counter = StepCounter(1)
existing_api_key = SecretStr("sk-existing-key-123")
# Mock cli_text_input to return empty string (simulating user pressing ENTER)
with patch('openhands_cli.user_actions.settings_action.cli_text_input', return_value=''):
result = prompt_api_key(
step_counter=step_counter,
provider='openai',
existing_api_key=existing_api_key,
escapable=True
)
# The bug: result is empty string instead of the existing key
# This test will fail initially, demonstrating the bug
assert result == existing_api_key.get_secret_value(), (
f"Expected existing API key '{existing_api_key.get_secret_value()}' "
f"but got '{result}'. API key should be preserved when user presses ENTER."
)
def test_api_key_update_when_user_enters_new_key():
"""Test that API key is updated when user enters a new key."""
step_counter = StepCounter(1)
existing_api_key = SecretStr("sk-existing-key-123")
new_api_key = "sk-new-key-456"
# Mock cli_text_input to return new API key
with patch('openhands_cli.user_actions.settings_action.cli_text_input', return_value=new_api_key):
result = prompt_api_key(
step_counter=step_counter,
provider='openai',
existing_api_key=existing_api_key,
escapable=True
)
# Should return the new API key
assert result == new_api_key

View File

@ -111,8 +111,6 @@ class TestLaunchGuiServer:
[
# Docker pull failure
(subprocess.CalledProcessError(1, 'docker pull'), None, 1, False, False),
# Docker pull timeout
(subprocess.TimeoutExpired('docker pull', 300), None, 1, False, False),
# Docker run failure
(MagicMock(returncode=0), subprocess.CalledProcessError(1, 'docker run'), 1, False, False),
# KeyboardInterrupt during run

53
openhands-cli/uv.lock generated
View File

@ -660,18 +660,32 @@ wheels = [
[[package]]
name = "fastuuid"
version = "0.12.0"
version = "0.13.5"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/19/17/13146a1e916bd2971d0a58db5e0a4ad23efdd49f78f33ac871c161f8007b/fastuuid-0.12.0.tar.gz", hash = "sha256:d0bd4e5b35aad2826403f4411937c89e7c88857b1513fe10f696544c03e9bd8e", size = 19180, upload-time = "2025-01-27T18:04:14.387Z" }
sdist = { url = "https://files.pythonhosted.org/packages/15/80/3c16a1edad2e6cd82fbd15ac998cc1b881f478bf1f80ca717d941c441874/fastuuid-0.13.5.tar.gz", hash = "sha256:d4976821ab424d41542e1ea39bc828a9d454c3f8a04067c06fca123c5b95a1a1", size = 18255, upload-time = "2025-09-26T09:05:38.281Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/f6/28/442e79d6219b90208cb243ac01db05d89cc4fdf8ecd563fb89476baf7122/fastuuid-0.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:328694a573fe9dce556b0b70c9d03776786801e028d82f0b6d9db1cb0521b4d1", size = 247372, upload-time = "2025-01-27T18:03:40.967Z" },
{ url = "https://files.pythonhosted.org/packages/40/eb/e0fd56890970ca7a9ec0d116844580988b692b1a749ac38e0c39e1dbdf23/fastuuid-0.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02acaea2c955bb2035a7d8e7b3fba8bd623b03746ae278e5fa932ef54c702f9f", size = 258200, upload-time = "2025-01-27T18:04:12.138Z" },
{ url = "https://files.pythonhosted.org/packages/f5/3c/4b30e376e65597a51a3dc929461a0dec77c8aec5d41d930f482b8f43e781/fastuuid-0.12.0-cp312-cp312-manylinux_2_34_x86_64.whl", hash = "sha256:ed9f449cba8cf16cced252521aee06e633d50ec48c807683f21cc1d89e193eb0", size = 278446, upload-time = "2025-01-27T18:04:15.877Z" },
{ url = "https://files.pythonhosted.org/packages/fe/96/cc5975fd23d2197b3e29f650a7a9beddce8993eaf934fa4ac595b77bb71f/fastuuid-0.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:0df2ea4c9db96fd8f4fa38d0e88e309b3e56f8fd03675a2f6958a5b082a0c1e4", size = 157185, upload-time = "2025-01-27T18:06:19.21Z" },
{ url = "https://files.pythonhosted.org/packages/a9/e8/d2bb4f19e5ee15f6f8e3192a54a897678314151aa17d0fb766d2c2cbc03d/fastuuid-0.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7fe2407316a04ee8f06d3dbc7eae396d0a86591d92bafe2ca32fce23b1145786", size = 247512, upload-time = "2025-01-27T18:04:08.115Z" },
{ url = "https://files.pythonhosted.org/packages/bc/53/25e811d92fd60f5c65e098c3b68bd8f1a35e4abb6b77a153025115b680de/fastuuid-0.12.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9b31dd488d0778c36f8279b306dc92a42f16904cba54acca71e107d65b60b0c", size = 258257, upload-time = "2025-01-27T18:03:56.408Z" },
{ url = "https://files.pythonhosted.org/packages/10/23/73618e7793ea0b619caae2accd9e93e60da38dd78dd425002d319152ef2f/fastuuid-0.12.0-cp313-cp313-manylinux_2_34_x86_64.whl", hash = "sha256:b19361ee649365eefc717ec08005972d3d1eb9ee39908022d98e3bfa9da59e37", size = 278559, upload-time = "2025-01-27T18:03:58.661Z" },
{ url = "https://files.pythonhosted.org/packages/e4/41/6317ecfc4757d5f2a604e5d3993f353ba7aee85fa75ad8b86fce6fc2fa40/fastuuid-0.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:8fc66b11423e6f3e1937385f655bedd67aebe56a3dcec0cb835351cfe7d358c9", size = 157276, upload-time = "2025-01-27T18:06:39.245Z" },
{ url = "https://files.pythonhosted.org/packages/21/36/434f137c5970cac19e57834e1f7680e85301619d49891618c00666700c61/fastuuid-0.13.5-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:35fe8045e866bc6846f8de6fa05acb1de0c32478048484a995e96d31e21dff2a", size = 494638, upload-time = "2025-09-26T09:14:58.695Z" },
{ url = "https://files.pythonhosted.org/packages/ca/3c/083de2ac007b2b305523b9c006dba5051e5afd87a626ef1a39f76e2c6b82/fastuuid-0.13.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:02a460333f52d731a006d18a52ef6fcb2d295a1f5b1a5938d30744191b2f77b7", size = 253138, upload-time = "2025-09-26T09:13:33.283Z" },
{ url = "https://files.pythonhosted.org/packages/73/5e/630cffa1c8775db526e39e9e4c5c7db0c27be0786bb21ba82c912ae19f63/fastuuid-0.13.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:74b0e4f8c307b9f477a5d7284db4431ce53a3c1e3f4173db7a97db18564a6202", size = 244521, upload-time = "2025-09-26T09:14:40.682Z" },
{ url = "https://files.pythonhosted.org/packages/4d/51/55d78705f4fbdadf88fb40f382f508d6c7a4941ceddd7825fafebb4cc778/fastuuid-0.13.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6955a99ef455c2986f3851f4e0ccc35dec56ac1a7720f2b92e88a75d6684512e", size = 271557, upload-time = "2025-09-26T09:15:09.75Z" },
{ url = "https://files.pythonhosted.org/packages/6a/2b/1b89e90a8635e5587ccdbbeb169c590672ce7637880f2c047482a0359950/fastuuid-0.13.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f10c77b826738c1a27dcdaa92ea4dc1ec9d869748a99e1fde54f1379553d4854", size = 272334, upload-time = "2025-09-26T09:07:48.865Z" },
{ url = "https://files.pythonhosted.org/packages/0c/06/4c8207894eeb30414999e5c3f66ac039bc4003437eb4060d8a1bceb4cc6f/fastuuid-0.13.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bb25dccbeb249d16d5e664f65f17ebec05136821d5ef462c4110e3f76b86fb86", size = 290594, upload-time = "2025-09-26T09:12:54.124Z" },
{ url = "https://files.pythonhosted.org/packages/50/69/96d221931a31d77a47cc2487bdfacfb3091edfc2e7a04b1795df1aec05df/fastuuid-0.13.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a5becc646a3eeafb76ce0a6783ba190cd182e3790a8b2c78ca9db2b5e87af952", size = 452835, upload-time = "2025-09-26T09:14:00.994Z" },
{ url = "https://files.pythonhosted.org/packages/25/ef/bf045f0a47dcec96247497ef3f7a31d86ebc074330e2dccc34b8dbc0468a/fastuuid-0.13.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:69b34363752d06e9bb0dbdf02ae391ec56ac948c6f2eb00be90dad68e80774b9", size = 468225, upload-time = "2025-09-26T09:13:38.585Z" },
{ url = "https://files.pythonhosted.org/packages/30/46/4817ab5a3778927155a4bde92540d4c4fa996161ec8b8e080c8928b0984e/fastuuid-0.13.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57d0768afcad0eab8770c9b8cf904716bd3c547e8b9a4e755ee8a673b060a3a3", size = 444907, upload-time = "2025-09-26T09:14:30.163Z" },
{ url = "https://files.pythonhosted.org/packages/80/27/ab284117ce4dc9b356a7196bdbf220510285f201d27f1f078592cdc8187b/fastuuid-0.13.5-cp312-cp312-win32.whl", hash = "sha256:8ac6c6f5129d52eaa6ef9ea4b6e2f7c69468a053f3ab8e439661186b9c06bb85", size = 145415, upload-time = "2025-09-26T09:08:59.494Z" },
{ url = "https://files.pythonhosted.org/packages/f4/0c/f970a4222773b248931819f8940800b760283216ca3dda173ed027e94bdd/fastuuid-0.13.5-cp312-cp312-win_amd64.whl", hash = "sha256:ad630e97715beefef07ec37c9c162336e500400774e2c1cbe1a0df6f80d15b9a", size = 150840, upload-time = "2025-09-26T09:13:46.115Z" },
{ url = "https://files.pythonhosted.org/packages/4f/62/74fc53f6e04a4dc5b36c34e4e679f85a4c14eec800dcdb0f2c14b5442217/fastuuid-0.13.5-cp313-cp313-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:ea17dfd35e0e91920a35d91e65e5f9c9d1985db55ac4ff2f1667a0f61189cefa", size = 494678, upload-time = "2025-09-26T09:14:30.908Z" },
{ url = "https://files.pythonhosted.org/packages/09/ba/f28b9b7045738a8bfccfb9cd6aff4b91fce2669e6b383a48b0694ee9b3ff/fastuuid-0.13.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:be6ad91e5fefbcc2a4b478858a2715e386d405834ea3ae337c3b6b95cc0e47d6", size = 253162, upload-time = "2025-09-26T09:13:35.879Z" },
{ url = "https://files.pythonhosted.org/packages/b1/18/13fac89cb4c9f0cd7e81a9154a77ecebcc95d2b03477aa91d4d50f7227ee/fastuuid-0.13.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ea6df13a306aab3e0439d58c312ff1e6f4f07f09f667579679239b4a6121f64a", size = 244546, upload-time = "2025-09-26T09:14:58.13Z" },
{ url = "https://files.pythonhosted.org/packages/04/bf/9691167804d59411cc4269841df949f6dd5e76452ab10dcfcd1dbe04c5bc/fastuuid-0.13.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2354c1996d3cf12dc2ba3752e2c4d6edc46e1a38c63893146777b1939f3062d4", size = 271528, upload-time = "2025-09-26T09:14:48.996Z" },
{ url = "https://files.pythonhosted.org/packages/a9/b5/7a75a03d1c7aa0b6d573032fcca39391f0aef7f2caabeeb45a672bc0bd3c/fastuuid-0.13.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6cf9b7469fc26d1f9b1c43ac4b192e219e85b88fdf81d71aa755a6c08c8a817", size = 272292, upload-time = "2025-09-26T09:14:42.82Z" },
{ url = "https://files.pythonhosted.org/packages/c0/db/fa0f16cbf76e6880599533af4ef01bb586949c5320612e9d884eff13e603/fastuuid-0.13.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:92ba539170097b9047551375f1ca09d8d2b4aefcc79eeae3e1c43fe49b42072e", size = 290466, upload-time = "2025-09-26T09:08:33.161Z" },
{ url = "https://files.pythonhosted.org/packages/1e/02/6b8c45bfbc8500994dd94edba7f59555f9683c4d8c9a164ae1d25d03c7c7/fastuuid-0.13.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:dbb81d05617bc2970765c1ad82db7e8716f6a2b7a361a14b83de5b9240ade448", size = 452838, upload-time = "2025-09-26T09:13:44.747Z" },
{ url = "https://files.pythonhosted.org/packages/27/12/85d95a84f265b888e8eb9f9e2b5aaf331e8be60c0a7060146364b3544b6a/fastuuid-0.13.5-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:d973bd6bf9d754d3cca874714ac0a6b22a47f239fb3d3c8687569db05aac3471", size = 468149, upload-time = "2025-09-26T09:13:18.712Z" },
{ url = "https://files.pythonhosted.org/packages/ad/da/dd9a137e9ea707e883c92470113a432233482ec9ad3e9b99c4defc4904e6/fastuuid-0.13.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e725ceef79486423f05ee657634d4b4c1ca5fb2c8a94e0708f5d6356a83f2a83", size = 444933, upload-time = "2025-09-26T09:14:09.494Z" },
{ url = "https://files.pythonhosted.org/packages/12/f4/ab363d7f4ac3989691e2dc5ae2d8391cfb0b4169e52ef7fa0ac363e936f0/fastuuid-0.13.5-cp313-cp313-win32.whl", hash = "sha256:a1c430a332ead0b2674f1ef71b17f43b8139ec5a4201182766a21f131a31e021", size = 145462, upload-time = "2025-09-26T09:14:15.105Z" },
{ url = "https://files.pythonhosted.org/packages/aa/8a/52eb77d9c294a54caa0d2d8cc9f906207aa6d916a22de963687ab6db8b86/fastuuid-0.13.5-cp313-cp313-win_amd64.whl", hash = "sha256:241fdd362fd96e6b337db62a65dd7cb3dfac20adf854573247a47510e192db6f", size = 150923, upload-time = "2025-09-26T09:13:03.923Z" },
]
[[package]]
@ -1266,8 +1280,8 @@ wheels = [
[[package]]
name = "litellm"
version = "1.76.2"
source = { registry = "https://pypi.org/simple" }
version = "1.77.7"
source = { git = "https://github.com/BerriAI/litellm.git?rev=v1.77.7.dev9#763d2f8ccdd8412dbe6d4ac0e136d9ac34dcd4c0" }
dependencies = [
{ name = "aiohttp" },
{ name = "click" },
@ -1282,10 +1296,6 @@ dependencies = [
{ name = "tiktoken" },
{ name = "tokenizers" },
]
sdist = { url = "https://files.pythonhosted.org/packages/75/a3/f7c00c660972eed1ba5ed53771ac9b4235e7fb1dc410e91d35aff2778ae7/litellm-1.76.2.tar.gz", hash = "sha256:fc7af111fa0f06943d8dbebed73f88000f9902f0d0ee0882c57d0bd5c1a37ecb", size = 10189238, upload-time = "2025-09-04T00:25:09.472Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/79/f4/980cc81c21424026dcb48a541654fd6f4286891825a3d0dd51f02b65cbc3/litellm-1.76.2-py3-none-any.whl", hash = "sha256:a9a2ef64a598b5b4ae245f1de6afc400856477cd6f708ff633d95e2275605a45", size = 8973847, upload-time = "2025-09-04T00:25:05.353Z" },
]
[[package]]
name = "macholib"
@ -1615,7 +1625,7 @@ wheels = [
[[package]]
name = "openhands"
version = "0.1.0"
version = "1.0.1"
source = { editable = "." }
dependencies = [
{ name = "openhands-sdk" },
@ -1642,8 +1652,8 @@ dev = [
[package.metadata]
requires-dist = [
{ name = "openhands-sdk", git = "https://github.com/All-Hands-AI/agent-sdk.git?subdirectory=openhands%2Fsdk&rev=189979a5013751aa86852ab41afe9a79555e62ac" },
{ name = "openhands-tools", git = "https://github.com/All-Hands-AI/agent-sdk.git?subdirectory=openhands%2Ftools&rev=189979a5013751aa86852ab41afe9a79555e62ac" },
{ name = "openhands-sdk", git = "https://github.com/All-Hands-AI/agent-sdk.git?subdirectory=openhands%2Fsdk&rev=50b094a92817e448ec4352d2950df4f19edd5a9f" },
{ name = "openhands-tools", git = "https://github.com/All-Hands-AI/agent-sdk.git?subdirectory=openhands%2Ftools&rev=50b094a92817e448ec4352d2950df4f19edd5a9f" },
{ name = "prompt-toolkit", specifier = ">=3" },
{ name = "typer", specifier = ">=0.17.4" },
]
@ -1667,9 +1677,10 @@ dev = [
[[package]]
name = "openhands-sdk"
version = "1.0.0"
source = { git = "https://github.com/All-Hands-AI/agent-sdk.git?subdirectory=openhands%2Fsdk&rev=189979a5013751aa86852ab41afe9a79555e62ac#189979a5013751aa86852ab41afe9a79555e62ac" }
source = { git = "https://github.com/All-Hands-AI/agent-sdk.git?subdirectory=openhands%2Fsdk&rev=50b094a92817e448ec4352d2950df4f19edd5a9f#50b094a92817e448ec4352d2950df4f19edd5a9f" }
dependencies = [
{ name = "fastmcp" },
{ name = "httpx" },
{ name = "litellm" },
{ name = "pydantic" },
{ name = "python-frontmatter" },
@ -1681,7 +1692,7 @@ dependencies = [
[[package]]
name = "openhands-tools"
version = "1.0.0"
source = { git = "https://github.com/All-Hands-AI/agent-sdk.git?subdirectory=openhands%2Ftools&rev=189979a5013751aa86852ab41afe9a79555e62ac#189979a5013751aa86852ab41afe9a79555e62ac" }
source = { git = "https://github.com/All-Hands-AI/agent-sdk.git?subdirectory=openhands%2Ftools&rev=50b094a92817e448ec4352d2950df4f19edd5a9f#50b094a92817e448ec4352d2950df4f19edd5a9f" }
dependencies = [
{ name = "bashlex" },
{ name = "binaryornot" },

View File

@ -1,44 +1,9 @@
import os
from pathlib import Path
# This is a namespace package - extend the path to include installed packages
# (We need to do this to support dependencies openhands-sdk, openhands-tools and openhands-agent-server
# which all have a top level `openhands`` package.)
__path__ = __import__('pkgutil').extend_path(__path__, __name__)
__package_name__ = 'openhands_ai'
# Import version information for backward compatibility
from openhands.version import __version__, get_version
def get_version():
# Try getting the version from pyproject.toml
try:
root_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
candidate_paths = [
Path(root_dir) / 'pyproject.toml',
Path(root_dir) / 'openhands' / 'pyproject.toml',
]
for file_path in candidate_paths:
if file_path.is_file():
with open(file_path, 'r') as f:
for line in f:
if line.strip().startswith('version ='):
return line.split('=', 1)[1].strip().strip('"').strip("'")
except FileNotFoundError:
pass
try:
from importlib.metadata import PackageNotFoundError, version
return version(__package_name__)
except (ImportError, PackageNotFoundError):
pass
try:
from pkg_resources import DistributionNotFound, get_distribution # type: ignore
return get_distribution(__package_name__).version
except (ImportError, DistributionNotFound):
pass
return 'unknown'
try:
__version__ = get_version()
except Exception:
__version__ = 'unknown'
__all__ = ['__version__', 'get_version']

View File

@ -3,9 +3,9 @@
At the user's request, repository {{ repository_info.repo_name }} has been cloned to {{ repository_info.repo_directory }} in the current working directory.
{% if repository_info.branch_name %}The repository has been checked out to branch "{{ repository_info.branch_name }}".
IMPORTANT: You should work within the current branch "{{ repository_info.branch_name }}" unless
IMPORTANT: You should work within the current branch "{{ repository_info.branch_name }}" unless:
1. the user explicitly instructs otherwise
2. if the current branch is "main", "master", or another default branch where direct pushes may be unsafe
2. the current branch is "main", "master", or another default branch where direct pushes may be unsafe
{% endif %}
</REPOSITORY_INFO>
{% endif %}
@ -35,9 +35,9 @@ For example, if you are using vite.config.js, you should set server.host and ser
{% endif %}
{% if runtime_info.custom_secrets_descriptions %}
<CUSTOM_SECRETS>
You are have access to the following environment variables
You have access to the following environment variables
{% for secret_name, secret_description in runtime_info.custom_secrets_descriptions.items() %}
* $**{{ secret_name }}**: {{ secret_description }}
* **${{ secret_name }}**: {{ secret_description }}
{% endfor %}
</CUSTOM_SECRETS>
{% endif %}

View File

@ -0,0 +1,19 @@
# OpenHands App Server
FastAPI-based application server that provides REST API endpoints for OpenHands V1 integration.
## Overview
As of 2025-09-29, much of the code in the OpenHands repository can be regarded as legacy, having been superseded by the code in AgentSDK. This package provides endpoints to interface with the new agent SDK and bridge the gap with the existing OpenHands project.
## Architecture
The app server is organized into several key modules:
- **conversation/**: Manages sandboxed conversations and their lifecycle
- **event/**: Handles event storage, retrieval, and streaming
- **event_callback/**: Manages webhooks and event callbacks
- **sandbox/**: Manages sandbox environments for agent execution
- **user/**: User management and authentication
- **services/**: Core services like JWT authentication
- **utils/**: Utility functions for common operations

View File

View File

@ -0,0 +1,20 @@
# Conversation Management
Manages app conversations and their lifecycle within the OpenHands app server.
## Overview
This module provides services and models for managing conversations that run within sandboxed environments. It handles conversation creation, retrieval, status tracking, and lifecycle management.
## Key Components
- **AppConversationService**: Abstract service for conversation CRUD operations
- **LiveStatusAppConversationService**: Real-time conversation status tracking
- **AppConversationRouter**: FastAPI router for conversation endpoints
## Features
- Conversation search and filtering by title, dates, and status
- Real-time conversation status updates
- Pagination support for large conversation lists
- Integration with sandbox environments

View File

@ -0,0 +1 @@
# App conversation module

View File

@ -0,0 +1,75 @@
import asyncio
from abc import ABC, abstractmethod
from datetime import datetime
from uuid import UUID
from openhands.app_server.app_conversation.app_conversation_models import (
AppConversationInfo,
AppConversationInfoPage,
AppConversationSortOrder,
)
from openhands.app_server.services.injector import Injector
from openhands.sdk.utils.models import DiscriminatedUnionMixin
class AppConversationInfoService(ABC):
"""Service for accessing info on conversations without their current status."""
@abstractmethod
async def search_app_conversation_info(
self,
title__contains: str | None = None,
created_at__gte: datetime | None = None,
created_at__lt: datetime | None = None,
updated_at__gte: datetime | None = None,
updated_at__lt: datetime | None = None,
sort_order: AppConversationSortOrder = AppConversationSortOrder.CREATED_AT_DESC,
page_id: str | None = None,
limit: int = 100,
) -> AppConversationInfoPage:
"""Search for sandboxed conversations."""
@abstractmethod
async def count_app_conversation_info(
self,
title__contains: str | None = None,
created_at__gte: datetime | None = None,
created_at__lt: datetime | None = None,
updated_at__gte: datetime | None = None,
updated_at__lt: datetime | None = None,
) -> int:
"""Count sandboxed conversations."""
@abstractmethod
async def get_app_conversation_info(
self, conversation_id: UUID
) -> AppConversationInfo | None:
"""Get a single conversation info, returning None if missing."""
async def batch_get_app_conversation_info(
self, conversation_ids: list[UUID]
) -> list[AppConversationInfo | None]:
"""Get a batch of conversation info, return None for any missing."""
return await asyncio.gather(
*[
self.get_app_conversation_info(conversation_id)
for conversation_id in conversation_ids
]
)
# Mutators
@abstractmethod
async def save_app_conversation_info(
self, info: AppConversationInfo
) -> AppConversationInfo:
"""Store the sandboxed conversation info object given.
Return the stored info
"""
class AppConversationInfoServiceInjector(
DiscriminatedUnionMixin, Injector[AppConversationInfoService], ABC
):
pass

View File

@ -0,0 +1,148 @@
from datetime import datetime
from enum import Enum
from uuid import UUID, uuid4
from pydantic import BaseModel, Field
from openhands.agent_server.models import SendMessageRequest
from openhands.agent_server.utils import utc_now
from openhands.app_server.event_callback.event_callback_models import (
EventCallbackProcessor,
)
from openhands.app_server.sandbox.sandbox_models import SandboxStatus
from openhands.integrations.service_types import ProviderType
from openhands.sdk.conversation.state import AgentExecutionStatus
from openhands.sdk.llm import MetricsSnapshot
from openhands.storage.data_models.conversation_metadata import ConversationTrigger
class AppConversationInfo(BaseModel):
"""Conversation info which does not contain status."""
id: UUID = Field(default_factory=uuid4)
created_by_user_id: str | None
sandbox_id: str
selected_repository: str | None = None
selected_branch: str | None = None
git_provider: ProviderType | None = None
title: str | None = None
trigger: ConversationTrigger | None = None
pr_number: list[int] = Field(default_factory=list)
llm_model: str | None = None
metrics: MetricsSnapshot | None = None
created_at: datetime = Field(default_factory=utc_now)
updated_at: datetime = Field(default_factory=utc_now)
class AppConversationSortOrder(Enum):
CREATED_AT = 'CREATED_AT'
CREATED_AT_DESC = 'CREATED_AT_DESC'
UPDATED_AT = 'UPDATED_AT'
UPDATED_AT_DESC = 'UPDATED_AT_DESC'
TITLE = 'TITLE'
TITLE_DESC = 'TITLE_DESC'
class AppConversationInfoPage(BaseModel):
items: list[AppConversationInfo]
next_page_id: str | None = None
class AppConversation(AppConversationInfo): # type: ignore
sandbox_status: SandboxStatus = Field(
default=SandboxStatus.MISSING,
description='Current sandbox status. Will be MISSING if the sandbox does not exist.',
)
agent_status: AgentExecutionStatus | None = Field(
default=None,
description='Current agent status. Will be None if the sandbox_status is not RUNNING',
)
conversation_url: str | None = Field(
default=None, description='The URL where the conversation may be accessed'
)
session_api_key: str | None = Field(
default=None, description='The Session Api Key for REST operations.'
)
# JSON fields for complex data types
pr_number: list[int] = Field(default_factory=list)
metrics: MetricsSnapshot | None = Field(default=None)
class AppConversationPage(BaseModel):
items: list[AppConversation]
next_page_id: str | None = None
class AppConversationStartRequest(BaseModel):
"""Start conversation request object.
Although a user can go directly to the sandbox and start conversations, they
would need to manually supply required startup parameters such as LLM key. Starting
from the app server copies these from the user info.
"""
sandbox_id: str | None = Field(default=None)
initial_message: SendMessageRequest | None = None
processors: list[EventCallbackProcessor] = Field(default_factory=list)
llm_model: str | None = None
# Git parameters
selected_repository: str | None = None
selected_branch: str | None = None
git_provider: ProviderType | None = None
title: str | None = None
trigger: ConversationTrigger | None = None
pr_number: list[int] = Field(default_factory=list)
class AppConversationStartTaskStatus(Enum):
WORKING = 'WORKING'
WAITING_FOR_SANDBOX = 'WAITING_FOR_SANDBOX'
PREPARING_REPOSITORY = 'PREPARING_REPOSITORY'
RUNNING_SETUP_SCRIPT = 'RUNNING_SETUP_SCRIPT'
SETTING_UP_GIT_HOOKS = 'SETTING_UP_GIT_HOOKS'
STARTING_CONVERSATION = 'STARTING_CONVERSATION'
READY = 'READY'
ERROR = 'ERROR'
class AppConversationStartTaskSortOrder(Enum):
CREATED_AT = 'CREATED_AT'
CREATED_AT_DESC = 'CREATED_AT_DESC'
UPDATED_AT = 'UPDATED_AT'
UPDATED_AT_DESC = 'UPDATED_AT_DESC'
class AppConversationStartTask(BaseModel):
"""Object describing the start process for an app conversation.
Because starting an app conversation can be slow (And can involve starting a sandbox),
we kick off a background task for it. Once the conversation is started, the app_conversation_id
is populated."""
id: UUID = Field(default_factory=uuid4)
created_by_user_id: str | None
status: AppConversationStartTaskStatus = AppConversationStartTaskStatus.WORKING
detail: str | None = None
app_conversation_id: UUID | None = Field(
default=None, description='The id of the app_conversation, if READY'
)
sandbox_id: str | None = Field(
default=None, description='The id of the sandbox, if READY'
)
agent_server_url: str | None = Field(
default=None, description='The agent server url, if READY'
)
request: AppConversationStartRequest
created_at: datetime = Field(default_factory=utc_now)
updated_at: datetime = Field(default_factory=utc_now)
class AppConversationStartTaskPage(BaseModel):
items: list[AppConversationStartTask]
next_page_id: str | None = None

View File

@ -0,0 +1,307 @@
"""Sandboxed Conversation router for OpenHands Server."""
import asyncio
import sys
from datetime import datetime
from typing import Annotated, AsyncGenerator
from uuid import UUID
import httpx
from openhands.app_server.services.db_session_injector import set_db_session_keep_open
from openhands.app_server.services.httpx_client_injector import (
set_httpx_client_keep_open,
)
from openhands.app_server.services.injector import InjectorState
from openhands.app_server.user.specifiy_user_context import USER_CONTEXT_ATTR
from openhands.app_server.user.user_context import UserContext
# Handle anext compatibility for Python < 3.10
if sys.version_info >= (3, 10):
from builtins import anext
else:
async def anext(async_iterator):
"""Compatibility function for anext in Python < 3.10"""
return await async_iterator.__anext__()
from fastapi import APIRouter, Query, Request
from fastapi.responses import StreamingResponse
from sqlalchemy.ext.asyncio import AsyncSession
from openhands.app_server.app_conversation.app_conversation_models import (
AppConversation,
AppConversationPage,
AppConversationStartRequest,
AppConversationStartTask,
AppConversationStartTaskPage,
AppConversationStartTaskSortOrder,
)
from openhands.app_server.app_conversation.app_conversation_service import (
AppConversationService,
)
from openhands.app_server.app_conversation.app_conversation_start_task_service import (
AppConversationStartTaskService,
)
from openhands.app_server.config import (
depends_app_conversation_service,
depends_app_conversation_start_task_service,
depends_db_session,
depends_httpx_client,
depends_user_context,
get_app_conversation_service,
)
router = APIRouter(prefix='/app-conversations', tags=['Conversations'])
app_conversation_service_dependency = depends_app_conversation_service()
app_conversation_start_task_service_dependency = (
depends_app_conversation_start_task_service()
)
user_context_dependency = depends_user_context()
db_session_dependency = depends_db_session()
httpx_client_dependency = depends_httpx_client()
# Read methods
@router.get('/search')
async def search_app_conversations(
title__contains: Annotated[
str | None,
Query(title='Filter by title containing this string'),
] = None,
created_at__gte: Annotated[
datetime | None,
Query(title='Filter by created_at greater than or equal to this datetime'),
] = None,
created_at__lt: Annotated[
datetime | None,
Query(title='Filter by created_at less than this datetime'),
] = None,
updated_at__gte: Annotated[
datetime | None,
Query(title='Filter by updated_at greater than or equal to this datetime'),
] = None,
updated_at__lt: Annotated[
datetime | None,
Query(title='Filter by updated_at less than this datetime'),
] = None,
page_id: Annotated[
str | None,
Query(title='Optional next_page_id from the previously returned page'),
] = None,
limit: Annotated[
int,
Query(
title='The max number of results in the page',
gt=0,
lte=100,
),
] = 100,
app_conversation_service: AppConversationService = (
app_conversation_service_dependency
),
) -> AppConversationPage:
"""Search / List sandboxed conversations."""
assert limit > 0
assert limit <= 100
return await app_conversation_service.search_app_conversations(
title__contains=title__contains,
created_at__gte=created_at__gte,
created_at__lt=created_at__lt,
updated_at__gte=updated_at__gte,
updated_at__lt=updated_at__lt,
page_id=page_id,
limit=limit,
)
@router.get('/count')
async def count_app_conversations(
title__contains: Annotated[
str | None,
Query(title='Filter by title containing this string'),
] = None,
created_at__gte: Annotated[
datetime | None,
Query(title='Filter by created_at greater than or equal to this datetime'),
] = None,
created_at__lt: Annotated[
datetime | None,
Query(title='Filter by created_at less than this datetime'),
] = None,
updated_at__gte: Annotated[
datetime | None,
Query(title='Filter by updated_at greater than or equal to this datetime'),
] = None,
updated_at__lt: Annotated[
datetime | None,
Query(title='Filter by updated_at less than this datetime'),
] = None,
app_conversation_service: AppConversationService = (
app_conversation_service_dependency
),
) -> int:
"""Count sandboxed conversations matching the given filters."""
return await app_conversation_service.count_app_conversations(
title__contains=title__contains,
created_at__gte=created_at__gte,
created_at__lt=created_at__lt,
updated_at__gte=updated_at__gte,
updated_at__lt=updated_at__lt,
)
@router.get('')
async def batch_get_app_conversations(
ids: Annotated[list[UUID], Query()],
app_conversation_service: AppConversationService = (
app_conversation_service_dependency
),
) -> list[AppConversation | None]:
"""Get a batch of sandboxed conversations given their ids. Return None for any missing."""
assert len(ids) < 100
app_conversations = await app_conversation_service.batch_get_app_conversations(ids)
return app_conversations
@router.post('')
async def start_app_conversation(
request: Request,
start_request: AppConversationStartRequest,
db_session: AsyncSession = db_session_dependency,
httpx_client: httpx.AsyncClient = httpx_client_dependency,
app_conversation_service: AppConversationService = (
app_conversation_service_dependency
),
) -> AppConversationStartTask:
# Because we are processing after the request finishes, keep the db connection open
set_db_session_keep_open(request.state, True)
set_httpx_client_keep_open(request.state, True)
"""Start an app conversation start task and return it."""
async_iter = app_conversation_service.start_app_conversation(start_request)
result = await anext(async_iter)
asyncio.create_task(_consume_remaining(async_iter, db_session, httpx_client))
return result
@router.post('/stream-start')
async def stream_app_conversation_start(
request: AppConversationStartRequest,
user_context: UserContext = user_context_dependency,
) -> list[AppConversationStartTask]:
"""Start an app conversation start task and stream updates from it.
Leaves the connection open until either the conversation starts or there was an error"""
response = StreamingResponse(
_stream_app_conversation_start(request, user_context),
media_type='application/json',
)
return response
@router.get('/start-tasks/search')
async def search_app_conversation_start_tasks(
conversation_id__eq: Annotated[
UUID | None,
Query(title='Filter by conversation ID equal to this value'),
] = None,
sort_order: Annotated[
AppConversationStartTaskSortOrder,
Query(title='Sort order for the results'),
] = AppConversationStartTaskSortOrder.CREATED_AT_DESC,
page_id: Annotated[
str | None,
Query(title='Optional next_page_id from the previously returned page'),
] = None,
limit: Annotated[
int,
Query(
title='The max number of results in the page',
gt=0,
lte=100,
),
] = 100,
app_conversation_start_task_service: AppConversationStartTaskService = (
app_conversation_start_task_service_dependency
),
) -> AppConversationStartTaskPage:
"""Search / List conversation start tasks."""
assert limit > 0
assert limit <= 100
return (
await app_conversation_start_task_service.search_app_conversation_start_tasks(
conversation_id__eq=conversation_id__eq,
sort_order=sort_order,
page_id=page_id,
limit=limit,
)
)
@router.get('/start-tasks/count')
async def count_app_conversation_start_tasks(
conversation_id__eq: Annotated[
UUID | None,
Query(title='Filter by conversation ID equal to this value'),
] = None,
app_conversation_start_task_service: AppConversationStartTaskService = (
app_conversation_start_task_service_dependency
),
) -> int:
"""Count conversation start tasks matching the given filters."""
return await app_conversation_start_task_service.count_app_conversation_start_tasks(
conversation_id__eq=conversation_id__eq,
)
@router.get('/start-tasks')
async def batch_get_app_conversation_start_tasks(
ids: Annotated[list[UUID], Query()],
app_conversation_start_task_service: AppConversationStartTaskService = (
app_conversation_start_task_service_dependency
),
) -> list[AppConversationStartTask | None]:
"""Get a batch of start app conversation tasks given their ids. Return None for any missing."""
assert len(ids) < 100
start_tasks = await app_conversation_start_task_service.batch_get_app_conversation_start_tasks(
ids
)
return start_tasks
async def _consume_remaining(
async_iter, db_session: AsyncSession, httpx_client: httpx.AsyncClient
):
"""Consume the remaining items from an async iterator"""
try:
while True:
await anext(async_iter)
except StopAsyncIteration:
return
finally:
await db_session.close()
await httpx_client.aclose()
async def _stream_app_conversation_start(
request: AppConversationStartRequest,
user_context: UserContext,
) -> AsyncGenerator[str, None]:
"""Stream a json list, item by item."""
# Because the original dependencies are closed after the method returns, we need
# a new dependency context which will continue intil the stream finishes.
state = InjectorState()
setattr(state, USER_CONTEXT_ATTR, user_context)
async with get_app_conversation_service(state) as app_conversation_service:
yield '[\n'
comma = False
async for task in app_conversation_service.start_app_conversation(request):
chunk = task.model_dump_json()
if comma:
chunk = ',\n' + chunk
comma = True
yield chunk
yield ']'

View File

@ -0,0 +1,100 @@
import asyncio
from abc import ABC, abstractmethod
from datetime import datetime
from typing import AsyncGenerator
from uuid import UUID
from openhands.app_server.app_conversation.app_conversation_models import (
AppConversation,
AppConversationPage,
AppConversationSortOrder,
AppConversationStartRequest,
AppConversationStartTask,
)
from openhands.app_server.services.injector import Injector
from openhands.sdk import Workspace
from openhands.sdk.utils.models import DiscriminatedUnionMixin
class AppConversationService(ABC):
"""Service for managing conversations running in sandboxes."""
@abstractmethod
async def search_app_conversations(
self,
title__contains: str | None = None,
created_at__gte: datetime | None = None,
created_at__lt: datetime | None = None,
updated_at__gte: datetime | None = None,
updated_at__lt: datetime | None = None,
sort_order: AppConversationSortOrder = AppConversationSortOrder.CREATED_AT_DESC,
page_id: str | None = None,
limit: int = 100,
) -> AppConversationPage:
"""Search for sandboxed conversations."""
@abstractmethod
async def count_app_conversations(
self,
title__contains: str | None = None,
created_at__gte: datetime | None = None,
created_at__lt: datetime | None = None,
updated_at__gte: datetime | None = None,
updated_at__lt: datetime | None = None,
) -> int:
"""Count sandboxed conversations."""
@abstractmethod
async def get_app_conversation(
self, conversation_id: UUID
) -> AppConversation | None:
"""Get a single sandboxed conversation info. Return None if missing."""
async def batch_get_app_conversations(
self, conversation_ids: list[UUID]
) -> list[AppConversation | None]:
"""Get a batch of sandboxed conversations, returning None for any missing."""
return await asyncio.gather(
*[
self.get_app_conversation(conversation_id)
for conversation_id in conversation_ids
]
)
@abstractmethod
async def start_app_conversation(
self, request: AppConversationStartRequest
) -> AsyncGenerator[AppConversationStartTask, None]:
"""Start a conversation, optionally specifying a sandbox in which to start.
If no sandbox is specified a default may be used or started. This is a convenience
method - the same effect should be achievable by creating / getting a sandbox
id, starting a conversation, attaching a callback, and then running the
conversation.
Yields an instance of AppConversationStartTask as updates occur, which can be used to determine
the progress of the task.
"""
# This is an abstract method - concrete implementations should provide real values
from openhands.app_server.app_conversation.app_conversation_models import (
AppConversationStartRequest,
)
dummy_request = AppConversationStartRequest()
yield AppConversationStartTask(
created_by_user_id='dummy',
request=dummy_request,
)
@abstractmethod
async def run_setup_scripts(
self, task: AppConversationStartTask, workspace: Workspace
) -> AsyncGenerator[AppConversationStartTask, None]:
"""Run the setup scripts for the project and yield status updates"""
yield task
class AppConversationServiceInjector(
DiscriminatedUnionMixin, Injector[AppConversationService], ABC
):
pass

View File

@ -0,0 +1,63 @@
import asyncio
from abc import ABC, abstractmethod
from uuid import UUID
from openhands.app_server.app_conversation.app_conversation_models import (
AppConversationStartTask,
AppConversationStartTaskPage,
AppConversationStartTaskSortOrder,
)
from openhands.app_server.services.injector import Injector
from openhands.sdk.utils.models import DiscriminatedUnionMixin
class AppConversationStartTaskService(ABC):
"""Service for accessing start tasks for conversations."""
@abstractmethod
async def search_app_conversation_start_tasks(
self,
conversation_id__eq: UUID | None = None,
sort_order: AppConversationStartTaskSortOrder = AppConversationStartTaskSortOrder.CREATED_AT_DESC,
page_id: str | None = None,
limit: int = 100,
) -> AppConversationStartTaskPage:
"""Search for conversation start tasks."""
@abstractmethod
async def count_app_conversation_start_tasks(
self,
conversation_id__eq: UUID | None = None,
) -> int:
"""Count conversation start tasks."""
@abstractmethod
async def get_app_conversation_start_task(
self, task_id: UUID
) -> AppConversationStartTask | None:
"""Get a single start task, returning None if missing."""
async def batch_get_app_conversation_start_tasks(
self, task_ids: list[UUID]
) -> list[AppConversationStartTask | None]:
"""Get a batch of start tasks, return None for any missing."""
return await asyncio.gather(
*[self.get_app_conversation_start_task(task_id) for task_id in task_ids]
)
# Mutators
@abstractmethod
async def save_app_conversation_start_task(
self, info: AppConversationStartTask
) -> AppConversationStartTask:
"""Store the start task object given.
Return the stored task
"""
class AppConversationStartTaskServiceInjector(
DiscriminatedUnionMixin, Injector[AppConversationStartTaskService], ABC
):
pass

View File

@ -0,0 +1 @@
This directory contains files used in git configuration.

View File

@ -0,0 +1,11 @@
#!/bin/bash
# This hook was installed by OpenHands
# It calls the pre-commit script in the .openhands directory
if [ -x ".openhands/pre-commit.sh" ]; then
source ".openhands/pre-commit.sh"
exit $?
else
echo "Warning: .openhands/pre-commit.sh not found or not executable"
exit 0
fi

View File

@ -0,0 +1,151 @@
import logging
import os
import tempfile
from abc import ABC
from dataclasses import dataclass
from pathlib import Path
from typing import AsyncGenerator
import base62
from openhands.app_server.app_conversation.app_conversation_models import (
AppConversationStartTask,
AppConversationStartTaskStatus,
)
from openhands.app_server.app_conversation.app_conversation_service import (
AppConversationService,
)
from openhands.app_server.user.user_context import UserContext
from openhands.app_server.utils.async_remote_workspace import AsyncRemoteWorkspace
_logger = logging.getLogger(__name__)
PRE_COMMIT_HOOK = '.git/hooks/pre-commit'
PRE_COMMIT_LOCAL = '.git/hooks/pre-commit.local'
@dataclass
class GitAppConversationService(AppConversationService, ABC):
"""App Conversation service which adds git specific functionality.
Sets up repositories and installs hooks"""
init_git_in_empty_workspace: bool
user_context: UserContext
async def run_setup_scripts(
self,
task: AppConversationStartTask,
workspace: AsyncRemoteWorkspace,
) -> AsyncGenerator[AppConversationStartTask, None]:
task.status = AppConversationStartTaskStatus.PREPARING_REPOSITORY
yield task
await self.clone_or_init_git_repo(task, workspace)
task.status = AppConversationStartTaskStatus.RUNNING_SETUP_SCRIPT
yield task
await self.maybe_run_setup_script(workspace)
task.status = AppConversationStartTaskStatus.SETTING_UP_GIT_HOOKS
yield task
await self.maybe_setup_git_hooks(workspace)
async def clone_or_init_git_repo(
self,
task: AppConversationStartTask,
workspace: AsyncRemoteWorkspace,
):
request = task.request
if not request.selected_repository:
if self.init_git_in_empty_workspace:
_logger.debug('Initializing a new git repository in the workspace.')
await workspace.execute_command(
'git init && git config --global --add safe.directory '
+ workspace.working_dir
)
else:
_logger.info('Not initializing a new git repository.')
return
remote_repo_url: str = await self.user_context.get_authenticated_git_url(
request.selected_repository
)
if not remote_repo_url:
raise ValueError('Missing either Git token or valid repository')
dir_name = request.selected_repository.split('/')[-1]
# Clone the repo - this is the slow part!
clone_command = f'git clone {remote_repo_url} {dir_name}'
await workspace.execute_command(clone_command, workspace.working_dir)
# Checkout the appropriate branch
if request.selected_branch:
checkout_command = f'git checkout {request.selected_branch}'
else:
# Generate a random branch name to avoid conflicts
random_str = base62.encodebytes(os.urandom(16))
openhands_workspace_branch = f'openhands-workspace-{random_str}'
checkout_command = f'git checkout -b {openhands_workspace_branch}'
await workspace.execute_command(checkout_command, workspace.working_dir)
async def maybe_run_setup_script(
self,
workspace: AsyncRemoteWorkspace,
):
"""Run .openhands/setup.sh if it exists in the workspace or repository."""
setup_script = workspace.working_dir + '/.openhands/setup.sh'
await workspace.execute_command(
f'chmod +x {setup_script} && source {setup_script}', timeout=600
)
# TODO: Does this need to be done?
# Add the action to the event stream as an ENVIRONMENT event
# source = EventSource.ENVIRONMENT
# self.event_stream.add_event(action, source)
async def maybe_setup_git_hooks(
self,
workspace: AsyncRemoteWorkspace,
):
"""Set up git hooks if .openhands/pre-commit.sh exists in the workspace or repository."""
command = 'mkdir -p .git/hooks && chmod +x .openhands/pre-commit.sh'
result = await workspace.execute_command(command, workspace.working_dir)
if result.exit_code:
return
# Check if there's an existing pre-commit hook
with tempfile.TemporaryFile(mode='w+t') as temp_file:
result = workspace.file_download(PRE_COMMIT_HOOK, str(temp_file))
if result.get('success'):
_logger.info('Preserving existing pre-commit hook')
# an existing pre-commit hook exists
if 'This hook was installed by OpenHands' not in temp_file.read():
# Move the existing hook to pre-commit.local
command = (
f'mv {PRE_COMMIT_HOOK} {PRE_COMMIT_LOCAL} &&'
f'chmod +x {PRE_COMMIT_LOCAL}'
)
result = await workspace.execute_command(
command, workspace.working_dir
)
if result.exit_code != 0:
_logger.error(
f'Failed to preserve existing pre-commit hook: {result.stderr}',
)
return
# write the pre-commit hook
await workspace.file_upload(
source_path=Path(__file__).parent / 'git' / 'pre-commit.sh',
destination_path=PRE_COMMIT_HOOK,
)
# Make the pre-commit hook executable
result = await workspace.execute_command(f'chmod +x {PRE_COMMIT_HOOK}')
if result.exit_code:
_logger.error(f'Failed to make pre-commit hook executable: {result.stderr}')
return
_logger.info('Git pre-commit hook installed successfully')

View File

@ -0,0 +1,548 @@
import asyncio
import logging
from collections import defaultdict
from dataclasses import dataclass
from datetime import datetime, timedelta
from time import time
from typing import AsyncGenerator, Sequence
from uuid import UUID
import httpx
from fastapi import Request
from pydantic import Field, SecretStr, TypeAdapter
from openhands.agent_server.models import (
ConversationInfo,
NeverConfirm,
SendMessageRequest,
StartConversationRequest,
)
from openhands.app_server.app_conversation.app_conversation_info_service import (
AppConversationInfoService,
)
from openhands.app_server.app_conversation.app_conversation_models import (
AppConversation,
AppConversationInfo,
AppConversationPage,
AppConversationSortOrder,
AppConversationStartRequest,
AppConversationStartTask,
AppConversationStartTaskStatus,
)
from openhands.app_server.app_conversation.app_conversation_service import (
AppConversationService,
AppConversationServiceInjector,
)
from openhands.app_server.app_conversation.app_conversation_start_task_service import (
AppConversationStartTaskService,
)
from openhands.app_server.app_conversation.git_app_conversation_service import (
GitAppConversationService,
)
from openhands.app_server.errors import SandboxError
from openhands.app_server.sandbox.docker_sandbox_service import DockerSandboxService
from openhands.app_server.sandbox.sandbox_models import (
AGENT_SERVER,
SandboxInfo,
SandboxStatus,
)
from openhands.app_server.sandbox.sandbox_service import SandboxService
from openhands.app_server.sandbox.sandbox_spec_service import SandboxSpecService
from openhands.app_server.services.injector import InjectorState
from openhands.app_server.services.jwt_service import JwtService
from openhands.app_server.user.user_context import UserContext
from openhands.app_server.utils.async_remote_workspace import AsyncRemoteWorkspace
from openhands.integrations.provider import ProviderType
from openhands.sdk import LocalWorkspace
from openhands.sdk.conversation.secret_source import LookupSecret, StaticSecret
from openhands.sdk.llm import LLM
from openhands.sdk.security.confirmation_policy import AlwaysConfirm
from openhands.tools.preset.default import get_default_agent
_conversation_info_type_adapter = TypeAdapter(list[ConversationInfo | None])
_logger = logging.getLogger(__name__)
GIT_TOKEN = 'GIT_TOKEN'
@dataclass
class LiveStatusAppConversationService(GitAppConversationService):
"""AppConversationService which combines live status info from the sandbox with stored data."""
user_context: UserContext
app_conversation_info_service: AppConversationInfoService
app_conversation_start_task_service: AppConversationStartTaskService
sandbox_service: SandboxService
sandbox_spec_service: SandboxSpecService
jwt_service: JwtService
sandbox_startup_timeout: int
sandbox_startup_poll_frequency: int
httpx_client: httpx.AsyncClient
web_url: str | None
access_token_hard_timeout: timedelta | None
async def search_app_conversations(
self,
title__contains: str | None = None,
created_at__gte: datetime | None = None,
created_at__lt: datetime | None = None,
updated_at__gte: datetime | None = None,
updated_at__lt: datetime | None = None,
sort_order: AppConversationSortOrder = AppConversationSortOrder.CREATED_AT_DESC,
page_id: str | None = None,
limit: int = 20,
) -> AppConversationPage:
"""Search for sandboxed conversations."""
page = await self.app_conversation_info_service.search_app_conversation_info(
title__contains=title__contains,
created_at__gte=created_at__gte,
created_at__lt=created_at__lt,
updated_at__gte=updated_at__gte,
updated_at__lt=updated_at__lt,
sort_order=sort_order,
page_id=page_id,
limit=limit,
)
conversations: list[AppConversation] = await self._build_app_conversations(
page.items
) # type: ignore
return AppConversationPage(items=conversations, next_page_id=page.next_page_id)
async def count_app_conversations(
self,
title__contains: str | None = None,
created_at__gte: datetime | None = None,
created_at__lt: datetime | None = None,
updated_at__gte: datetime | None = None,
updated_at__lt: datetime | None = None,
) -> int:
return await self.app_conversation_info_service.count_app_conversation_info(
title__contains=title__contains,
created_at__gte=created_at__gte,
created_at__lt=created_at__lt,
updated_at__gte=updated_at__gte,
updated_at__lt=updated_at__lt,
)
async def get_app_conversation(
self, conversation_id: UUID
) -> AppConversation | None:
info = await self.app_conversation_info_service.get_app_conversation_info(
conversation_id
)
result = await self._build_app_conversations([info])
return result[0]
async def batch_get_app_conversations(
self, conversation_ids: list[UUID]
) -> list[AppConversation | None]:
info = await self.app_conversation_info_service.batch_get_app_conversation_info(
conversation_ids
)
conversations = await self._build_app_conversations(info)
return conversations
async def start_app_conversation(
self, request: AppConversationStartRequest
) -> AsyncGenerator[AppConversationStartTask, None]:
async for task in self._start_app_conversation(request):
await self.app_conversation_start_task_service.save_app_conversation_start_task(
task
)
yield task
async def _start_app_conversation(
self, request: AppConversationStartRequest
) -> AsyncGenerator[AppConversationStartTask, None]:
# Create and yield the start task
user_id = await self.user_context.get_user_id()
task = AppConversationStartTask(
created_by_user_id=user_id,
request=request,
)
yield task
try:
async for updated_task in self._wait_for_sandbox_start(task):
yield updated_task
# Get the sandbox
sandbox_id = task.sandbox_id
assert sandbox_id is not None
sandbox = await self.sandbox_service.get_sandbox(sandbox_id)
assert sandbox is not None
agent_server_url = self._get_agent_server_url(sandbox)
# Get the working dir
sandbox_spec = await self.sandbox_spec_service.get_sandbox_spec(
sandbox.sandbox_spec_id
)
assert sandbox_spec is not None
# Run setup scripts
workspace = AsyncRemoteWorkspace(
working_dir=sandbox_spec.working_dir,
server_url=agent_server_url,
session_api_key=sandbox.session_api_key,
)
async for updated_task in self.run_setup_scripts(task, workspace):
yield updated_task
# Build the start request
start_conversation_request = (
await self._build_start_conversation_request_for_user(
request.initial_message,
request.git_provider,
sandbox_spec.working_dir,
)
)
# update status
task.status = AppConversationStartTaskStatus.STARTING_CONVERSATION
task.agent_server_url = agent_server_url
yield task
# Start conversation...
response = await self.httpx_client.post(
f'{agent_server_url}/api/conversations',
json=start_conversation_request.model_dump(
context={'expose_secrets': True}
),
headers={'X-Session-API-Key': sandbox.session_api_key},
timeout=self.sandbox_startup_timeout,
)
response.raise_for_status()
info = ConversationInfo.model_validate(response.json())
# Store info...
user_id = await self.user_context.get_user_id()
app_conversation_info = AppConversationInfo(
id=info.id,
# TODO: As of writing, StartConversationRequest from AgentServer does not have a title
title=f'Conversation {info.id}',
sandbox_id=sandbox.id,
created_by_user_id=user_id,
llm_model=start_conversation_request.agent.llm.model,
# Git parameters
selected_repository=request.selected_repository,
selected_branch=request.selected_branch,
git_provider=request.git_provider,
trigger=request.trigger,
pr_number=request.pr_number,
)
await self.app_conversation_info_service.save_app_conversation_info(
app_conversation_info
)
# Update the start task
task.status = AppConversationStartTaskStatus.READY
task.app_conversation_id = info.id
yield task
except Exception as exc:
_logger.exception('Error starting conversation', stack_info=True)
task.status = AppConversationStartTaskStatus.ERROR
task.detail = str(exc)
yield task
async def _build_app_conversations(
self, app_conversation_infos: Sequence[AppConversationInfo | None]
) -> list[AppConversation | None]:
sandbox_id_to_conversation_ids = self._get_sandbox_id_to_conversation_ids(
app_conversation_infos
)
# Get referenced sandboxes in a single batch operation...
sandboxes = await self.sandbox_service.batch_get_sandboxes(
list(sandbox_id_to_conversation_ids)
)
sandboxes_by_id = {sandbox.id: sandbox for sandbox in sandboxes if sandbox}
# Gather the running conversations
tasks = [
self._get_live_conversation_info(
sandbox, sandbox_id_to_conversation_ids.get(sandbox.id)
)
for sandbox in sandboxes
if sandbox and sandbox.status == SandboxStatus.RUNNING
]
if tasks:
sandbox_conversation_infos = await asyncio.gather(*tasks)
else:
sandbox_conversation_infos = []
# Collect the results into a single dictionary
conversation_info_by_id = {}
for conversation_infos in sandbox_conversation_infos:
for conversation_info in conversation_infos:
conversation_info_by_id[conversation_info.id] = conversation_info
# Build app_conversation from info
result = [
self._build_conversation(
app_conversation_info,
sandboxes_by_id.get(app_conversation_info.sandbox_id),
conversation_info_by_id.get(app_conversation_info.id),
)
if app_conversation_info
else None
for app_conversation_info in app_conversation_infos
]
return result
async def _get_live_conversation_info(
self,
sandbox: SandboxInfo,
conversation_ids: list[str],
) -> list[ConversationInfo]:
"""Get agent status for multiple conversations from the Agent Server."""
try:
# Build the URL with query parameters
agent_server_url = self._get_agent_server_url(sandbox)
url = f'{agent_server_url.rstrip("/")}/api/conversations'
params = {'ids': conversation_ids}
# Set up headers
headers = {}
if sandbox.session_api_key:
headers['X-Session-API-Key'] = sandbox.session_api_key
response = await self.httpx_client.get(url, params=params, headers=headers)
response.raise_for_status()
data = response.json()
conversation_info = _conversation_info_type_adapter.validate_python(data)
conversation_info = [c for c in conversation_info if c]
return conversation_info
except Exception:
# Not getting a status is not a fatal error - we just mark the conversation as stopped
_logger.exception(
f'Error getting conversation status from sandbox {sandbox.id}',
stack_info=True,
)
return []
def _build_conversation(
self,
app_conversation_info: AppConversationInfo | None,
sandbox: SandboxInfo | None,
conversation_info: ConversationInfo | None,
) -> AppConversation | None:
if app_conversation_info is None:
return None
sandbox_status = sandbox.status if sandbox else SandboxStatus.MISSING
agent_status = conversation_info.agent_status if conversation_info else None
conversation_url = None
session_api_key = None
if sandbox and sandbox.exposed_urls:
conversation_url = next(
(
exposed_url.url
for exposed_url in sandbox.exposed_urls
if exposed_url.name == AGENT_SERVER
),
None,
)
if conversation_url:
conversation_url += f'/api/conversations/{app_conversation_info.id.hex}'
session_api_key = sandbox.session_api_key
return AppConversation(
**app_conversation_info.model_dump(),
sandbox_status=sandbox_status,
agent_status=agent_status,
conversation_url=conversation_url,
session_api_key=session_api_key,
)
def _get_sandbox_id_to_conversation_ids(
self, stored_conversations: Sequence[AppConversationInfo | None]
):
result = defaultdict(list)
for stored_conversation in stored_conversations:
if stored_conversation:
result[stored_conversation.sandbox_id].append(stored_conversation.id)
return result
async def _wait_for_sandbox_start(
self, task: AppConversationStartTask
) -> AsyncGenerator[AppConversationStartTask, None]:
"""Wait for sandbox to start and return info."""
# Get the sandbox
if not task.request.sandbox_id:
sandbox = await self.sandbox_service.start_sandbox()
task.sandbox_id = sandbox.id
else:
sandbox_info = await self.sandbox_service.get_sandbox(
task.request.sandbox_id
)
if sandbox_info is None:
raise SandboxError(f'Sandbox not found: {task.request.sandbox_id}')
sandbox = sandbox_info
# Update the listener
task.status = AppConversationStartTaskStatus.WAITING_FOR_SANDBOX
task.sandbox_id = sandbox.id
yield task
if sandbox.status == SandboxStatus.PAUSED:
await self.sandbox_service.resume_sandbox(sandbox.id)
if sandbox.status in (None, SandboxStatus.ERROR):
raise SandboxError(f'Sandbox status: {sandbox.status}')
if sandbox.status == SandboxStatus.RUNNING:
return
if sandbox.status != SandboxStatus.STARTING:
raise SandboxError(f'Sandbox not startable: {sandbox.id}')
start = time()
while time() - start <= self.sandbox_startup_timeout:
await asyncio.sleep(self.sandbox_startup_poll_frequency)
sandbox_info = await self.sandbox_service.get_sandbox(sandbox.id)
if sandbox_info is None:
raise SandboxError(f'Sandbox not found: {sandbox.id}')
if sandbox.status not in (SandboxStatus.STARTING, SandboxStatus.RUNNING):
raise SandboxError(f'Sandbox not startable: {sandbox.id}')
if sandbox_info.status == SandboxStatus.RUNNING:
return
raise SandboxError(f'Sandbox failed to start: {sandbox.id}')
def _get_agent_server_url(self, sandbox: SandboxInfo) -> str:
"""Get agent server url for running sandbox."""
exposed_urls = sandbox.exposed_urls
assert exposed_urls is not None
agent_server_url = next(
exposed_url.url
for exposed_url in exposed_urls
if exposed_url.name == AGENT_SERVER
)
return agent_server_url
async def _build_start_conversation_request_for_user(
self,
initial_message: SendMessageRequest | None,
git_provider: ProviderType | None,
working_dir: str,
) -> StartConversationRequest:
user = await self.user_context.get_user_info()
# Set up a secret for the git token
secrets = await self.user_context.get_secrets()
if git_provider:
if self.web_url:
# If there is a web url, then we create an access token to access it.
# For security reasons, we are explicit here - only this user, and
# only this provider, with a timeout
access_token = self.jwt_service.create_jws_token(
payload={
'user_id': user.id,
'provider_type': git_provider.value,
},
expires_in=self.access_token_hard_timeout,
)
secrets[GIT_TOKEN] = LookupSecret(
url=self.web_url + '/ap/v1/webhooks/secrets',
headers={'X-Access-Token': access_token},
)
else:
# If there is no URL specified where the sandbox can access the app server
# then we supply a static secret with the most recent value. Depending
# on the type, this may eventually expire.
static_token = await self.user_context.get_latest_token(git_provider)
if static_token:
secrets[GIT_TOKEN] = StaticSecret(value=SecretStr(static_token))
workspace = LocalWorkspace(working_dir=working_dir)
llm = LLM(
model=user.llm_model,
base_url=user.llm_base_url,
api_key=user.llm_api_key,
service_id='agent',
)
agent = get_default_agent(llm=llm)
start_conversation_request = StartConversationRequest(
agent=agent,
workspace=workspace,
confirmation_policy=AlwaysConfirm()
if user.confirmation_mode
else NeverConfirm(),
initial_message=initial_message,
secrets=secrets,
)
return start_conversation_request
class LiveStatusAppConversationServiceInjector(AppConversationServiceInjector):
sandbox_startup_timeout: int = Field(
default=120, description='The max timeout time for sandbox startup'
)
sandbox_startup_poll_frequency: int = Field(
default=2, description='The frequency to poll for sandbox readiness'
)
init_git_in_empty_workspace: bool = Field(
default=True,
description='Whether to initialize a git repo when the workspace is empty',
)
access_token_hard_timeout: int | None = Field(
default=14 * 86400,
description=(
'A security measure - the time after which git tokens may no longer '
'be retrieved by a sandboxed conversation.'
),
)
async def inject(
self, state: InjectorState, request: Request | None = None
) -> AsyncGenerator[AppConversationService, None]:
from openhands.app_server.config import (
get_app_conversation_info_service,
get_app_conversation_start_task_service,
get_global_config,
get_httpx_client,
get_jwt_service,
get_sandbox_service,
get_sandbox_spec_service,
get_user_context,
)
async with (
get_user_context(state, request) as user_context,
get_sandbox_service(state, request) as sandbox_service,
get_sandbox_spec_service(state, request) as sandbox_spec_service,
get_app_conversation_info_service(
state, request
) as app_conversation_info_service,
get_app_conversation_start_task_service(
state, request
) as app_conversation_start_task_service,
get_jwt_service(state, request) as jwt_service,
get_httpx_client(state, request) as httpx_client,
):
access_token_hard_timeout = None
if self.access_token_hard_timeout:
access_token_hard_timeout = timedelta(
seconds=float(self.access_token_hard_timeout)
)
config = get_global_config()
# If no web url has been set and we are using docker, we can use host.docker.internal
web_url = config.web_url
if web_url is None:
if isinstance(sandbox_service, DockerSandboxService):
web_url = f'http://host.docker.internal:{sandbox_service.host_port}'
yield LiveStatusAppConversationService(
init_git_in_empty_workspace=self.init_git_in_empty_workspace,
user_context=user_context,
sandbox_service=sandbox_service,
sandbox_spec_service=sandbox_spec_service,
app_conversation_info_service=app_conversation_info_service,
app_conversation_start_task_service=app_conversation_start_task_service,
jwt_service=jwt_service,
sandbox_startup_timeout=self.sandbox_startup_timeout,
sandbox_startup_poll_frequency=self.sandbox_startup_poll_frequency,
httpx_client=httpx_client,
web_url=web_url,
access_token_hard_timeout=access_token_hard_timeout,
)

View File

@ -0,0 +1,398 @@
"""SQL implementation of AppConversationService.
This implementation provides CRUD operations for sandboxed conversations focused purely
on SQL operations:
- Direct database access without permission checks
- Batch operations for efficient data retrieval
- Integration with SandboxService for sandbox information
- HTTP client integration for agent status retrieval
- Full async/await support using SQL async db_sessions
Security and permission checks are handled by wrapper services.
Key components:
- SQLAppConversationService: Main service class implementing all operations
- SQLAppConversationInfoServiceInjector: Dependency injection resolver for FastAPI
"""
from __future__ import annotations
import logging
import uuid
from dataclasses import dataclass
from datetime import UTC, datetime
from typing import AsyncGenerator
from uuid import UUID
from fastapi import Request
from sqlalchemy import Column, DateTime, Float, Integer, Select, String, func, select
from sqlalchemy.ext.asyncio import AsyncSession
from openhands.agent_server.utils import utc_now
from openhands.app_server.app_conversation.app_conversation_info_service import (
AppConversationInfoService,
AppConversationInfoServiceInjector,
)
from openhands.app_server.app_conversation.app_conversation_models import (
AppConversationInfo,
AppConversationInfoPage,
AppConversationSortOrder,
)
from openhands.app_server.services.injector import InjectorState
from openhands.app_server.user.user_context import UserContext
from openhands.app_server.utils.sql_utils import (
Base,
create_json_type_decorator,
)
from openhands.integrations.provider import ProviderType
from openhands.sdk.llm import MetricsSnapshot
from openhands.sdk.llm.utils.metrics import TokenUsage
from openhands.storage.data_models.conversation_metadata import ConversationTrigger
logger = logging.getLogger(__name__)
class StoredConversationMetadata(Base): # type: ignore
__tablename__ = 'conversation_metadata'
conversation_id = Column(
String, primary_key=True, default=lambda: str(uuid.uuid4())
)
github_user_id = Column(String, nullable=True) # The GitHub user ID
user_id = Column(String, nullable=False) # The Keycloak User ID
selected_repository = Column(String, nullable=True)
selected_branch = Column(String, nullable=True)
git_provider = Column(
String, nullable=True
) # The git provider (GitHub, GitLab, etc.)
title = Column(String, nullable=True)
last_updated_at = Column(DateTime(timezone=True), default=utc_now) # type: ignore[attr-defined]
created_at = Column(DateTime(timezone=True), default=utc_now) # type: ignore[attr-defined]
trigger = Column(String, nullable=True)
pr_number = Column(create_json_type_decorator(list[int]))
# Cost and token metrics
accumulated_cost = Column(Float, default=0.0)
prompt_tokens = Column(Integer, default=0)
completion_tokens = Column(Integer, default=0)
total_tokens = Column(Integer, default=0)
max_budget_per_task = Column(Float, nullable=True)
cache_read_tokens = Column(Integer, default=0)
cache_write_tokens = Column(Integer, default=0)
reasoning_tokens = Column(Integer, default=0)
context_window = Column(Integer, default=0)
per_turn_token = Column(Integer, default=0)
# LLM model used for the conversation
llm_model = Column(String, nullable=True)
conversation_version = Column(String, nullable=False, default='V0', index=True)
sandbox_id = Column(String, nullable=True, index=True)
@dataclass
class SQLAppConversationInfoService(AppConversationInfoService):
"""SQL implementation of AppConversationInfoService focused on db operations.
This allows storing a record of a conversation even after its sandbox ceases to exist
"""
db_session: AsyncSession
user_context: UserContext
async def search_app_conversation_info(
self,
title__contains: str | None = None,
created_at__gte: datetime | None = None,
created_at__lt: datetime | None = None,
updated_at__gte: datetime | None = None,
updated_at__lt: datetime | None = None,
sort_order: AppConversationSortOrder = AppConversationSortOrder.CREATED_AT_DESC,
page_id: str | None = None,
limit: int = 100,
) -> AppConversationInfoPage:
"""Search for sandboxed conversations without permission checks."""
query = await self._secure_select()
query = self._apply_filters(
query=query,
title__contains=title__contains,
created_at__gte=created_at__gte,
created_at__lt=created_at__lt,
updated_at__gte=updated_at__gte,
updated_at__lt=updated_at__lt,
)
# Add sort order
if sort_order == AppConversationSortOrder.CREATED_AT:
query = query.order_by(StoredConversationMetadata.created_at)
elif sort_order == AppConversationSortOrder.CREATED_AT_DESC:
query = query.order_by(StoredConversationMetadata.created_at.desc())
elif sort_order == AppConversationSortOrder.UPDATED_AT:
query = query.order_by(StoredConversationMetadata.updated_at)
elif sort_order == AppConversationSortOrder.UPDATED_AT_DESC:
query = query.order_by(StoredConversationMetadata.updated_at.desc())
elif sort_order == AppConversationSortOrder.TITLE:
query = query.order_by(StoredConversationMetadata.title)
elif sort_order == AppConversationSortOrder.TITLE_DESC:
query = query.order_by(StoredConversationMetadata.title.desc())
# Apply pagination
if page_id is not None:
try:
offset = int(page_id)
query = query.offset(offset)
except ValueError:
# If page_id is not a valid integer, start from beginning
offset = 0
else:
offset = 0
# Apply limit and get one extra to check if there are more results
query = query.limit(limit + 1)
result = await self.db_session.execute(query)
rows = result.scalars().all()
# Check if there are more results
has_more = len(rows) > limit
if has_more:
rows = rows[:limit]
items = [self._to_info(row) for row in rows]
# Calculate next page ID
next_page_id = None
if has_more:
next_page_id = str(offset + limit)
return AppConversationInfoPage(items=items, next_page_id=next_page_id)
async def count_app_conversation_info(
self,
title__contains: str | None = None,
created_at__gte: datetime | None = None,
created_at__lt: datetime | None = None,
updated_at__gte: datetime | None = None,
updated_at__lt: datetime | None = None,
) -> int:
"""Count sandboxed conversations matching the given filters."""
query = select(func.count(StoredConversationMetadata.conversation_id))
user_id = await self.user_context.get_user_id()
if user_id:
query = query.where(
StoredConversationMetadata.created_by_user_id == user_id
)
query = self._apply_filters(
query=query,
title__contains=title__contains,
created_at__gte=created_at__gte,
created_at__lt=created_at__lt,
updated_at__gte=updated_at__gte,
updated_at__lt=updated_at__lt,
)
result = await self.db_session.execute(query)
count = result.scalar()
return count or 0
def _apply_filters(
self,
query: Select,
title__contains: str | None = None,
created_at__gte: datetime | None = None,
created_at__lt: datetime | None = None,
updated_at__gte: datetime | None = None,
updated_at__lt: datetime | None = None,
) -> Select:
# Apply the same filters as search_app_conversations
conditions = []
if title__contains is not None:
conditions.append(
StoredConversationMetadata.title.like(f'%{title__contains}%')
)
if created_at__gte is not None:
conditions.append(StoredConversationMetadata.created_at >= created_at__gte)
if created_at__lt is not None:
conditions.append(StoredConversationMetadata.created_at < created_at__lt)
if updated_at__gte is not None:
conditions.append(
StoredConversationMetadata.last_updated_at >= updated_at__gte
)
if updated_at__lt is not None:
conditions.append(
StoredConversationMetadata.last_updated_at < updated_at__lt
)
if conditions:
query = query.where(*conditions)
return query
async def get_app_conversation_info(
self, conversation_id: UUID
) -> AppConversationInfo | None:
query = await self._secure_select()
query = query.where(
StoredConversationMetadata.conversation_id == str(conversation_id)
)
result_set = await self.db_session.execute(query)
result = result_set.scalar_one_or_none()
if result:
return self._to_info(result)
return None
async def batch_get_app_conversation_info(
self, conversation_ids: list[UUID]
) -> list[AppConversationInfo | None]:
conversation_id_strs = [
str(conversation_id) for conversation_id in conversation_ids
]
query = await self._secure_select()
query = query.where(
StoredConversationMetadata.conversation_id.in_(conversation_id_strs)
)
result = await self.db_session.execute(query)
rows = result.scalars().all()
info_by_id = {info.conversation_id: info for info in rows if info}
results: list[AppConversationInfo | None] = []
for conversation_id in conversation_id_strs:
info = info_by_id.get(conversation_id)
if info:
results.append(self._to_info(info))
else:
results.append(None)
return results
async def save_app_conversation_info(
self, info: AppConversationInfo
) -> AppConversationInfo:
user_id = await self.user_context.get_user_id()
if user_id:
query = select(StoredConversationMetadata).where(
StoredConversationMetadata.conversation_id == info.id
)
result = await self.db_session.execute(query)
existing = result.scalar_one_or_none()
assert existing is None or existing.created_by_user_id == user_id
metrics = info.metrics or MetricsSnapshot()
usage = metrics.accumulated_token_usage or TokenUsage()
stored = StoredConversationMetadata(
conversation_id=str(info.id),
github_user_id=None, # TODO: Should we add this to the conversation info?
user_id=info.created_by_user_id or '',
selected_repository=info.selected_repository,
selected_branch=info.selected_branch,
git_provider=info.git_provider.value if info.git_provider else None,
title=info.title,
last_updated_at=info.updated_at,
created_at=info.created_at,
trigger=info.trigger.value if info.trigger else None,
pr_number=info.pr_number,
# Cost and token metrics
accumulated_cost=metrics.accumulated_cost,
prompt_tokens=usage.prompt_tokens,
completion_tokens=usage.completion_tokens,
total_tokens=0,
max_budget_per_task=metrics.max_budget_per_task,
cache_read_tokens=usage.cache_read_tokens,
cache_write_tokens=usage.cache_write_tokens,
context_window=usage.context_window,
per_turn_token=usage.per_turn_token,
llm_model=info.llm_model,
conversation_version='V1',
sandbox_id=info.sandbox_id,
)
await self.db_session.merge(stored)
await self.db_session.commit()
return info
async def _secure_select(self):
query = select(StoredConversationMetadata).where(
StoredConversationMetadata.conversation_version == 'V1'
)
user_id = await self.user_context.get_user_id()
if user_id:
query = query.where(
StoredConversationMetadata.user_id == user_id,
)
return query
def _to_info(self, stored: StoredConversationMetadata) -> AppConversationInfo:
# V1 conversations should always have a sandbox_id
sandbox_id = stored.sandbox_id
assert sandbox_id is not None
# Rebuild token usage
token_usage = TokenUsage(
prompt_tokens=stored.prompt_tokens,
completion_tokens=stored.completion_tokens,
cache_read_tokens=stored.cache_read_tokens,
cache_write_tokens=stored.cache_write_tokens,
context_window=stored.context_window,
per_turn_token=stored.per_turn_token,
)
# Rebuild metrics object
metrics = MetricsSnapshot(
accumulated_cost=stored.accumulated_cost,
max_budget_per_task=stored.max_budget_per_task,
accumulated_token_usage=token_usage,
)
# Get timestamps
created_at = self._fix_timezone(stored.created_at)
updated_at = self._fix_timezone(stored.last_updated_at)
return AppConversationInfo(
id=UUID(stored.conversation_id),
created_by_user_id=stored.user_id if stored.user_id else None,
sandbox_id=stored.sandbox_id,
selected_repository=stored.selected_repository,
selected_branch=stored.selected_branch,
git_provider=ProviderType(stored.git_provider)
if stored.git_provider
else None,
title=stored.title,
trigger=ConversationTrigger(stored.trigger) if stored.trigger else None,
pr_number=stored.pr_number,
llm_model=stored.llm_model,
metrics=metrics,
created_at=created_at,
updated_at=updated_at,
)
def _fix_timezone(self, value: datetime) -> datetime:
"""Sqlite does not stpre timezones - and since we can't update the existing models
we assume UTC if the timezone is missing."""
if not value.tzinfo:
value = value.replace(tzinfo=UTC)
return value
class SQLAppConversationInfoServiceInjector(AppConversationInfoServiceInjector):
async def inject(
self, state: InjectorState, request: Request | None = None
) -> AsyncGenerator[AppConversationInfoService, None]:
# Define inline to prevent circular lookup
from openhands.app_server.config import (
get_db_session,
get_user_context,
)
async with (
get_user_context(state, request) as user_context,
get_db_session(state, request) as db_session,
):
service = SQLAppConversationInfoService(
db_session=db_session, user_context=user_context
)
yield service

View File

@ -0,0 +1,243 @@
# pyright: reportArgumentType=false, reportAttributeAccessIssue=false, reportOptionalMemberAccess=false
"""SQL implementation of AppConversationStartTaskService.
This implementation provides CRUD operations for conversation start tasks focused purely
on SQL operations:
- Direct database access without permission checks
- Batch operations for efficient data retrieval
- Full async/await support using SQL async sessions
Security and permission checks are handled by wrapper services.
Key components:
- SQLAppConversationStartTaskService: Main service class implementing all operations
- SQLAppConversationStartTaskServiceInjector: Dependency injection resolver for FastAPI
"""
from __future__ import annotations
import logging
from dataclasses import dataclass
from typing import AsyncGenerator
from uuid import UUID
from fastapi import Request
from sqlalchemy import UUID as SQLUUID
from sqlalchemy import Column, Enum, String, func, select
from sqlalchemy.ext.asyncio import AsyncSession
from openhands.agent_server.models import utc_now
from openhands.app_server.app_conversation.app_conversation_models import (
AppConversationStartRequest,
AppConversationStartTask,
AppConversationStartTaskPage,
AppConversationStartTaskSortOrder,
AppConversationStartTaskStatus,
)
from openhands.app_server.app_conversation.app_conversation_start_task_service import (
AppConversationStartTaskService,
AppConversationStartTaskServiceInjector,
)
from openhands.app_server.services.injector import InjectorState
from openhands.app_server.utils.sql_utils import (
Base,
UtcDateTime,
create_json_type_decorator,
row2dict,
)
logger = logging.getLogger(__name__)
class StoredAppConversationStartTask(Base): # type: ignore
__tablename__ = 'app_conversation_start_task'
id = Column(SQLUUID, primary_key=True)
created_by_user_id = Column(String, index=True)
status = Column(Enum(AppConversationStartTaskStatus), nullable=True)
detail = Column(String, nullable=True)
app_conversation_id = Column(SQLUUID, nullable=True)
sandbox_id = Column(String, nullable=True)
agent_server_url = Column(String, nullable=True)
request = Column(create_json_type_decorator(AppConversationStartRequest))
created_at = Column(UtcDateTime, server_default=func.now(), index=True)
updated_at = Column(UtcDateTime, onupdate=func.now(), index=True)
@dataclass
class SQLAppConversationStartTaskService(AppConversationStartTaskService):
"""SQL implementation of AppConversationStartTaskService focused on db operations.
This allows storing and retrieving conversation start tasks from the database."""
session: AsyncSession
user_id: str | None = None
async def search_app_conversation_start_tasks(
self,
conversation_id__eq: UUID | None = None,
sort_order: AppConversationStartTaskSortOrder = AppConversationStartTaskSortOrder.CREATED_AT_DESC,
page_id: str | None = None,
limit: int = 100,
) -> AppConversationStartTaskPage:
"""Search for conversation start tasks."""
query = select(StoredAppConversationStartTask)
# Apply user filter if user_id is set
if self.user_id:
query = query.where(
StoredAppConversationStartTask.created_by_user_id == self.user_id
)
# Apply conversation_id filter
if conversation_id__eq is not None:
query = query.where(
StoredAppConversationStartTask.app_conversation_id
== conversation_id__eq
)
# Add sort order
if sort_order == AppConversationStartTaskSortOrder.CREATED_AT:
query = query.order_by(StoredAppConversationStartTask.created_at)
elif sort_order == AppConversationStartTaskSortOrder.CREATED_AT_DESC:
query = query.order_by(StoredAppConversationStartTask.created_at.desc())
elif sort_order == AppConversationStartTaskSortOrder.UPDATED_AT:
query = query.order_by(StoredAppConversationStartTask.updated_at)
elif sort_order == AppConversationStartTaskSortOrder.UPDATED_AT_DESC:
query = query.order_by(StoredAppConversationStartTask.updated_at.desc())
# Apply pagination
if page_id is not None:
try:
offset = int(page_id)
query = query.offset(offset)
except ValueError:
# If page_id is not a valid integer, start from beginning
offset = 0
else:
offset = 0
# Apply limit and get one extra to check if there are more results
query = query.limit(limit + 1)
result = await self.session.execute(query)
rows = result.scalars().all()
# Check if there are more results
has_more = len(rows) > limit
if has_more:
rows = rows[:limit]
items = [AppConversationStartTask(**row2dict(row)) for row in rows]
# Calculate next page ID
next_page_id = None
if has_more:
next_page_id = str(offset + limit)
return AppConversationStartTaskPage(items=items, next_page_id=next_page_id)
async def count_app_conversation_start_tasks(
self,
conversation_id__eq: UUID | None = None,
) -> int:
"""Count conversation start tasks."""
query = select(func.count(StoredAppConversationStartTask.id))
# Apply user filter if user_id is set
if self.user_id:
query = query.where(
StoredAppConversationStartTask.created_by_user_id == self.user_id
)
# Apply conversation_id filter
if conversation_id__eq is not None:
query = query.where(
StoredAppConversationStartTask.app_conversation_id
== conversation_id__eq
)
result = await self.session.execute(query)
count = result.scalar()
return count or 0
async def batch_get_app_conversation_start_tasks(
self, task_ids: list[UUID]
) -> list[AppConversationStartTask | None]:
"""Get a batch of start tasks, return None for any missing."""
if not task_ids:
return []
query = select(StoredAppConversationStartTask).where(
StoredAppConversationStartTask.id.in_(task_ids)
)
if self.user_id:
query = query.where(
StoredAppConversationStartTask.created_by_user_id == self.user_id
)
result = await self.session.execute(query)
tasks_by_id = {task.id: task for task in result.scalars().all()}
# Return tasks in the same order as requested, with None for missing ones
return [
AppConversationStartTask(**row2dict(tasks_by_id[task_id]))
if task_id in tasks_by_id
else None
for task_id in task_ids
]
async def get_app_conversation_start_task(
self, task_id: UUID
) -> AppConversationStartTask | None:
"""Get a single start task, returning None if missing."""
query = select(StoredAppConversationStartTask).where(
StoredAppConversationStartTask.id == task_id
)
if self.user_id:
query = query.where(
StoredAppConversationStartTask.created_by_user_id == self.user_id
)
result = await self.session.execute(query)
stored_task = result.scalar_one_or_none()
if stored_task:
return AppConversationStartTask(**row2dict(stored_task))
return None
async def save_app_conversation_start_task(
self, task: AppConversationStartTask
) -> AppConversationStartTask:
if self.user_id:
query = select(StoredAppConversationStartTask).where(
StoredAppConversationStartTask.id == task.id
)
result = await self.session.execute(query)
existing = result.scalar_one_or_none()
assert existing is None or existing.created_by_user_id == self.user_id
task.updated_at = utc_now()
await self.session.merge(StoredAppConversationStartTask(**task.model_dump()))
await self.session.commit()
return task
class SQLAppConversationStartTaskServiceInjector(
AppConversationStartTaskServiceInjector
):
async def inject(
self, state: InjectorState, request: Request | None = None
) -> AsyncGenerator[AppConversationStartTaskService, None]:
# Define inline to prevent circular lookup
from openhands.app_server.config import (
get_db_session,
get_user_context,
)
async with (
get_user_context(state, request) as user_context,
get_db_session(state, request) as db_session,
):
user_id = await user_context.get_user_id()
service = SQLAppConversationStartTaskService(
session=db_session, user_id=user_id
)
yield service

View File

@ -0,0 +1,149 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts.
# this is typically a path given in POSIX (e.g. forward slashes)
# format, relative to the token %(here)s which refers to the location of this
# ini file
script_location = %(here)s/alembic
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
# for all available tokens
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory. for multiple paths, the path separator
# is defined by "path_separator" below.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library.
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
# string value is passed to ZoneInfo()
# leave blank for localtime
# timezone =
# max length of characters to apply to the "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to <script_location>/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "path_separator"
# below.
# version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions
# path_separator; This indicates what character is used to split lists of file
# paths, including version_locations and prepend_sys_path within configparser
# files such as alembic.ini.
# The default rendered in new alembic.ini files is "os", which uses os.pathsep
# to provide os-dependent path splitting.
#
# Note that in order to support legacy alembic.ini files, this default does NOT
# take place if path_separator is not present in alembic.ini. If this
# option is omitted entirely, fallback logic is as follows:
#
# 1. Parsing of the version_locations option falls back to using the legacy
# "version_path_separator" key, which if absent then falls back to the legacy
# behavior of splitting on spaces and/or commas.
# 2. Parsing of the prepend_sys_path option falls back to the legacy
# behavior of splitting on spaces, commas, or colons.
#
# Valid values for path_separator are:
#
# path_separator = :
# path_separator = ;
# path_separator = space
# path_separator = newline
#
# Use os.pathsep. Default configuration used for new projects.
path_separator = os
# set to 'true' to search source files recursively
# in each "version_locations" directory
# new in Alembic version 1.10
# recursive_version_locations = false
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
# database URL. This is consumed by the user-maintained env.py script only.
# other means of configuring database URLs may be customized within the env.py
# file.
# Note: The database URL is now configured dynamically in env.py using the DbSessionInjector
# from get_global_config(), so this placeholder is not used.
# sqlalchemy.url = driver://user:pass@localhost/dbname
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# lint with attempts to fix using "ruff" - use the module runner, against the "ruff" module
# hooks = ruff
# ruff.type = module
# ruff.module = ruff
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
# Alternatively, use the exec runner to execute a binary found on your PATH
# hooks = ruff
# ruff.type = exec
# ruff.executable = ruff
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
# Logging configuration. This is also consumed by the user-maintained
# env.py script only.
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARNING
handlers = console
qualname =
[logger_sqlalchemy]
level = WARNING
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View File

@ -0,0 +1,15 @@
# OpenHands App Server Alembic Integration
This alembic integration keeps the SQLite database up to date in single user deployments by managing schema migrations for app_server models. Migrations are applied automatically on startup.
## Configuration
Uses `DbSessionInjector` from `get_global_config()` for database connectivity and auto-detects models from the declarative base in `openhands.app_server.utils.sql_utils.Base`.
## Key Commands
Generate migration from model changes:
```bash
cd openhands/app_server/app_lifespan
alembic revision --autogenerate -m 'Sync DB with Models'
```

View File

@ -0,0 +1,115 @@
import sys
from logging.config import fileConfig
from pathlib import Path
from alembic import context
# Add the project root to the Python path so we can import OpenHands modules
# From alembic/env.py, we need to go up 5 levels to reach the OpenHands project root
project_root = Path(__file__).absolute().parent.parent.parent.parent.parent
sys.path.insert(0, str(project_root))
# Import the Base metadata for autogenerate support
# Import all models to ensure they are registered with the metadata
# This is necessary for alembic autogenerate to detect all tables
from openhands.app_server.app_conversation.sql_app_conversation_info_service import ( # noqa: E402
StoredConversationMetadata, # noqa: F401
)
from openhands.app_server.app_conversation.sql_app_conversation_start_task_service import ( # noqa: E402
StoredAppConversationStartTask, # noqa: F401
)
from openhands.app_server.config import get_global_config # noqa: E402
from openhands.app_server.event_callback.sql_event_callback_service import ( # noqa: E402
StoredEventCallback, # noqa: F401
)
from openhands.app_server.sandbox.remote_sandbox_service import ( # noqa: E402
StoredRemoteSandbox, # noqa: F401
)
from openhands.app_server.utils.sql_utils import Base # noqa: E402
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
import os
if os.path.exists(config.config_file_name):
fileConfig(config.config_file_name)
else:
# Use basic logging configuration if config file doesn't exist
import logging
logging.basicConfig(level=logging.INFO)
# add your model's MetaData object here
# for 'autogenerate' support
target_metadata = Base.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
# Get database URL from DbSessionInjector
global_config = get_global_config()
db_session = global_config.db_session
# Get the database URL from the DbSessionInjector
if db_session.host:
password_value = (
db_session.password.get_secret_value() if db_session.password else ''
)
url = f'postgresql://{db_session.user}:{password_value}@{db_session.host}:{db_session.port}/{db_session.name}'
else:
url = f'sqlite:///{db_session.persistence_dir}/openhands.db'
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={'paramstyle': 'named'},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
# Use the DbSessionInjector engine instead of creating a new one
global_config = get_global_config()
db_session = global_config.db_session
connectable = db_session.get_db_engine()
with connectable.connect() as connection:
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@ -0,0 +1,28 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
"""Upgrade schema."""
${upgrades if upgrades else "pass"}
def downgrade() -> None:
"""Downgrade schema."""
${downgrades if downgrades else "pass"}

View File

@ -0,0 +1,268 @@
"""Sync DB with Models
Revision ID: 001
Revises:
Create Date: 2025-10-05 11:28:41.772294
"""
from typing import Sequence, Union
import sqlalchemy as sa
from alembic import op
from openhands.app_server.app_conversation.app_conversation_models import (
AppConversationStartTaskStatus,
)
from openhands.app_server.event_callback.event_callback_result_models import (
EventCallbackResultStatus,
)
# revision identifiers, used by Alembic.
revision: str = '001'
down_revision: Union[str, Sequence[str], None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
op.create_table(
'app_conversation_start_task',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('created_by_user_id', sa.String(), nullable=True),
sa.Column('status', sa.Enum(AppConversationStartTaskStatus), nullable=True),
sa.Column('detail', sa.String(), nullable=True),
sa.Column('app_conversation_id', sa.UUID(), nullable=True),
sa.Column('sandbox_id', sa.String(), nullable=True),
sa.Column('agent_server_url', sa.String(), nullable=True),
sa.Column('request', sa.JSON(), nullable=True),
sa.Column(
'created_at',
sa.DateTime(timezone=True),
server_default=sa.text('(CURRENT_TIMESTAMP)'),
nullable=True,
),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id'),
)
op.create_index(
op.f('ix_app_conversation_start_task_created_at'),
'app_conversation_start_task',
['created_at'],
unique=False,
)
op.create_index(
op.f('ix_app_conversation_start_task_created_by_user_id'),
'app_conversation_start_task',
['created_by_user_id'],
unique=False,
)
op.create_index(
op.f('ix_app_conversation_start_task_updated_at'),
'app_conversation_start_task',
['updated_at'],
unique=False,
)
op.create_table(
'event_callback',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('conversation_id', sa.UUID(), nullable=True),
sa.Column('processor', sa.JSON(), nullable=True),
sa.Column('event_kind', sa.String(), nullable=True),
sa.Column(
'created_at',
sa.DateTime(timezone=True),
server_default=sa.text('(CURRENT_TIMESTAMP)'),
nullable=True,
),
sa.PrimaryKeyConstraint('id'),
)
op.create_index(
op.f('ix_event_callback_created_at'),
'event_callback',
['created_at'],
unique=False,
)
op.create_table(
'event_callback_result',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('status', sa.Enum(EventCallbackResultStatus), nullable=True),
sa.Column('event_callback_id', sa.UUID(), nullable=True),
sa.Column('event_id', sa.UUID(), nullable=True),
sa.Column('conversation_id', sa.UUID(), nullable=True),
sa.Column('detail', sa.String(), nullable=True),
sa.Column(
'created_at',
sa.DateTime(timezone=True),
server_default=sa.text('(CURRENT_TIMESTAMP)'),
nullable=True,
),
sa.PrimaryKeyConstraint('id'),
)
op.create_index(
op.f('ix_event_callback_result_conversation_id'),
'event_callback_result',
['conversation_id'],
unique=False,
)
op.create_index(
op.f('ix_event_callback_result_created_at'),
'event_callback_result',
['created_at'],
unique=False,
)
op.create_index(
op.f('ix_event_callback_result_event_callback_id'),
'event_callback_result',
['event_callback_id'],
unique=False,
)
op.create_index(
op.f('ix_event_callback_result_event_id'),
'event_callback_result',
['event_id'],
unique=False,
)
op.create_table(
'v1_remote_sandbox',
sa.Column('id', sa.String(), nullable=False),
sa.Column('created_by_user_id', sa.String(), nullable=True),
sa.Column('sandbox_spec_id', sa.String(), nullable=True),
sa.Column(
'created_at',
sa.DateTime(timezone=True),
server_default=sa.text('(CURRENT_TIMESTAMP)'),
nullable=True,
),
sa.PrimaryKeyConstraint('id'),
)
op.create_index(
op.f('ix_v1_remote_sandbox_created_at'),
'v1_remote_sandbox',
['created_at'],
unique=False,
)
op.create_index(
op.f('ix_v1_remote_sandbox_created_by_user_id'),
'v1_remote_sandbox',
['created_by_user_id'],
unique=False,
)
op.create_index(
op.f('ix_v1_remote_sandbox_sandbox_spec_id'),
'v1_remote_sandbox',
['sandbox_spec_id'],
unique=False,
)
op.create_table(
'conversation_metadata',
sa.Column('conversation_id', sa.String(), nullable=False),
sa.Column('github_user_id', sa.String(), nullable=True),
sa.Column('user_id', sa.String(), nullable=False),
sa.Column('selected_repository', sa.String(), nullable=True),
sa.Column('selected_branch', sa.String(), nullable=True),
sa.Column('git_provider', sa.String(), nullable=True),
sa.Column('title', sa.String(), nullable=True),
sa.Column('last_updated_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('trigger', sa.String(), nullable=True),
sa.Column('pr_number', sa.JSON(), nullable=True),
sa.Column('accumulated_cost', sa.Float(), nullable=True),
sa.Column('prompt_tokens', sa.Integer(), nullable=True),
sa.Column('completion_tokens', sa.Integer(), nullable=True),
sa.Column('total_tokens', sa.Integer(), nullable=True),
sa.Column('max_budget_per_task', sa.Float(), nullable=True),
sa.Column('cache_read_tokens', sa.Integer(), nullable=True),
sa.Column('cache_write_tokens', sa.Integer(), nullable=True),
sa.Column('reasoning_tokens', sa.Integer(), nullable=True),
sa.Column('context_window', sa.Integer(), nullable=True),
sa.Column('per_turn_token', sa.Integer(), nullable=True),
sa.Column('llm_model', sa.String(), nullable=True),
sa.Column('conversation_version', sa.String(), nullable=False),
sa.Column('sandbox_id', sa.String(), nullable=True),
sa.PrimaryKeyConstraint('conversation_id'),
)
op.create_index(
op.f('ix_conversation_metadata_conversation_version'),
'conversation_metadata',
['conversation_version'],
unique=False,
)
op.create_index(
op.f('ix_conversation_metadata_sandbox_id'),
'conversation_metadata',
['sandbox_id'],
unique=False,
)
def downgrade() -> None:
"""Downgrade schema."""
op.drop_index(
op.f('ix_conversation_metadata_sandbox_id'), table_name='conversation_metadata'
)
op.drop_index(
op.f('ix_conversation_metadata_conversation_version'),
table_name='conversation_metadata',
)
op.drop_table('conversation_metadata')
op.drop_index(
op.f('ix_v1_remote_sandbox_sandbox_spec_id'), table_name='v1_remote_sandbox'
)
op.drop_index(
op.f('ix_v1_remote_sandbox_created_by_user_id'), table_name='v1_remote_sandbox'
)
op.drop_index(
op.f('ix_v1_remote_sandbox_created_at'), table_name='v1_remote_sandbox'
)
op.drop_table('v1_remote_sandbox')
op.drop_index(
op.f('ix_event_callback_result_event_id'),
table_name='event_callback_result',
)
op.drop_index(
op.f('ix_event_callback_result_event_callback_id'),
table_name='event_callback_result',
)
op.drop_index(
op.f('ix_event_callback_result_created_at'),
table_name='event_callback_result',
)
op.drop_index(
op.f('ix_event_callback_result_conversation_id'),
table_name='event_callback_result',
)
op.drop_table('event_callback_result')
op.drop_index(op.f('ix_event_callback_created_at'), table_name='event_callback')
op.drop_table('event_callback')
op.drop_index(
op.f('ix_app_conversation_start_task_updated_at'),
table_name='app_conversation_start_task',
)
op.drop_index(
op.f('ix_app_conversation_start_task_created_by_user_id'),
table_name='app_conversation_start_task',
)
op.drop_index(
op.f('ix_app_conversation_start_task_created_at'),
table_name='app_conversation_start_task',
)
op.drop_table('app_conversation_start_task')
op.drop_index(
op.f('ix_app_conversation_info_updated_at'),
table_name='app_conversation_info',
)
op.drop_index(
op.f('ix_app_conversation_info_sandbox_id'),
table_name='app_conversation_info',
)
op.drop_index(
op.f('ix_app_conversation_info_created_by_user_id'),
table_name='app_conversation_info',
)
op.drop_index(
op.f('ix_app_conversation_info_created_at'),
table_name='app_conversation_info',
)
op.drop_table('app_conversation_info')

View File

@ -0,0 +1,21 @@
from __future__ import annotations
from abc import ABC, abstractmethod
from fastapi import FastAPI
from openhands.sdk.utils.models import DiscriminatedUnionMixin
class AppLifespanService(DiscriminatedUnionMixin, ABC):
def lifespan(self, api: FastAPI):
"""Return lifespan wrapper."""
return self
@abstractmethod
async def __aenter__(self):
"""Open lifespan."""
@abstractmethod
async def __aexit__(self, exc_type, exc_value, traceback):
"""Close lifespan."""

View File

@ -0,0 +1,38 @@
from __future__ import annotations
import os
from pathlib import Path
from alembic import command
from alembic.config import Config
from openhands.app_server.app_lifespan.app_lifespan_service import AppLifespanService
class OssAppLifespanService(AppLifespanService):
run_alembic_on_startup: bool = True
async def __aenter__(self):
if self.run_alembic_on_startup:
self.run_alembic()
return self
async def __aexit__(self, exc_type, exc_value, traceback):
pass
def run_alembic(self):
# Run alembic upgrade head to ensure database is up to date
alembic_dir = Path(__file__).parent / 'alembic'
alembic_ini = alembic_dir / 'alembic.ini'
# Create alembic config with absolute paths
alembic_cfg = Config(str(alembic_ini))
alembic_cfg.set_main_option('script_location', str(alembic_dir))
# Change to alembic directory for the command execution
original_cwd = os.getcwd()
try:
os.chdir(str(alembic_dir.parent))
command.upgrade(alembic_cfg, 'head')
finally:
os.chdir(original_cwd)

View File

@ -0,0 +1,358 @@
"""Configuration for the OpenHands App Server."""
import os
from pathlib import Path
from typing import AsyncContextManager
import httpx
from fastapi import Depends, Request
from pydantic import Field
from sqlalchemy.ext.asyncio import AsyncSession
from openhands.agent_server.env_parser import from_env
from openhands.app_server.app_conversation.app_conversation_info_service import (
AppConversationInfoService,
AppConversationInfoServiceInjector,
)
from openhands.app_server.app_conversation.app_conversation_service import (
AppConversationService,
AppConversationServiceInjector,
)
from openhands.app_server.app_conversation.app_conversation_start_task_service import (
AppConversationStartTaskService,
AppConversationStartTaskServiceInjector,
)
from openhands.app_server.app_lifespan.app_lifespan_service import AppLifespanService
from openhands.app_server.app_lifespan.oss_app_lifespan_service import (
OssAppLifespanService,
)
from openhands.app_server.event.event_service import EventService, EventServiceInjector
from openhands.app_server.event_callback.event_callback_service import (
EventCallbackService,
EventCallbackServiceInjector,
)
from openhands.app_server.sandbox.sandbox_service import (
SandboxService,
SandboxServiceInjector,
)
from openhands.app_server.sandbox.sandbox_spec_service import (
SandboxSpecService,
SandboxSpecServiceInjector,
)
from openhands.app_server.services.db_session_injector import (
DbSessionInjector,
)
from openhands.app_server.services.httpx_client_injector import HttpxClientInjector
from openhands.app_server.services.injector import InjectorState
from openhands.app_server.services.jwt_service import JwtService, JwtServiceInjector
from openhands.app_server.user.user_context import UserContext, UserContextInjector
from openhands.sdk.utils.models import OpenHandsModel
def get_default_persistence_dir() -> Path:
# Recheck env because this function is also used to generate other defaults
persistence_dir = os.getenv('OH_PERSISTENCE_DIR')
if persistence_dir:
result = Path(persistence_dir)
else:
result = Path.home() / '.openhands'
result.mkdir(parents=True, exist_ok=True)
return result
def get_default_web_url() -> str | None:
"""Get legacy web host parameter.
If present, we assume we are running under https."""
web_host = os.getenv('WEB_HOST')
if not web_host:
return None
return f'https://{web_host}'
def _get_default_lifespan():
# Check legacy parameters for saas mode. If we are in SAAS mode do not apply
# OSS alembic migrations
if 'saas' in (os.getenv('OPENHANDS_CONFIG_CLS') or '').lower():
return None
return OssAppLifespanService()
class AppServerConfig(OpenHandsModel):
persistence_dir: Path = Field(default_factory=get_default_persistence_dir)
web_url: str | None = Field(
default_factory=get_default_web_url,
description='The URL where OpenHands is running (e.g., http://localhost:3000)',
)
# Dependency Injection Injectors
event: EventServiceInjector | None = None
event_callback: EventCallbackServiceInjector | None = None
sandbox: SandboxServiceInjector | None = None
sandbox_spec: SandboxSpecServiceInjector | None = None
app_conversation_info: AppConversationInfoServiceInjector | None = None
app_conversation_start_task: AppConversationStartTaskServiceInjector | None = None
app_conversation: AppConversationServiceInjector | None = None
user: UserContextInjector | None = None
jwt: JwtServiceInjector | None = None
httpx: HttpxClientInjector = Field(default_factory=HttpxClientInjector)
db_session: DbSessionInjector = Field(
default_factory=lambda: DbSessionInjector(
persistence_dir=get_default_persistence_dir()
)
)
# Services
lifespan: AppLifespanService = Field(default_factory=_get_default_lifespan)
def config_from_env() -> AppServerConfig:
# Import defaults...
from openhands.app_server.app_conversation.live_status_app_conversation_service import ( # noqa: E501
LiveStatusAppConversationServiceInjector,
)
from openhands.app_server.app_conversation.sql_app_conversation_info_service import ( # noqa: E501
SQLAppConversationInfoServiceInjector,
)
from openhands.app_server.app_conversation.sql_app_conversation_start_task_service import ( # noqa: E501
SQLAppConversationStartTaskServiceInjector,
)
from openhands.app_server.event.filesystem_event_service import (
FilesystemEventServiceInjector,
)
from openhands.app_server.event_callback.sql_event_callback_service import (
SQLEventCallbackServiceInjector,
)
from openhands.app_server.sandbox.docker_sandbox_service import (
DockerSandboxServiceInjector,
)
from openhands.app_server.sandbox.docker_sandbox_spec_service import (
DockerSandboxSpecServiceInjector,
)
from openhands.app_server.sandbox.process_sandbox_service import (
ProcessSandboxServiceInjector,
)
from openhands.app_server.sandbox.process_sandbox_spec_service import (
ProcessSandboxSpecServiceInjector,
)
from openhands.app_server.sandbox.remote_sandbox_service import (
RemoteSandboxServiceInjector,
)
from openhands.app_server.sandbox.remote_sandbox_spec_service import (
RemoteSandboxSpecServiceInjector,
)
from openhands.app_server.user.auth_user_context import (
AuthUserContextInjector,
)
config: AppServerConfig = from_env(AppServerConfig, 'OH') # type: ignore
if config.event is None:
config.event = FilesystemEventServiceInjector()
if config.event_callback is None:
config.event_callback = SQLEventCallbackServiceInjector()
if config.sandbox is None:
# Legacy fallback
if os.getenv('RUNTIME') == 'remote':
config.sandbox = RemoteSandboxServiceInjector(
api_key=os.environ['SANDBOX_API_KEY'],
api_url=os.environ['SANDBOX_REMOTE_RUNTIME_API_URL'],
)
elif os.getenv('RUNTIME') in ('local', 'process'):
config.sandbox = ProcessSandboxServiceInjector()
else:
config.sandbox = DockerSandboxServiceInjector()
if config.sandbox_spec is None:
if os.getenv('RUNTIME') == 'remote':
config.sandbox_spec = RemoteSandboxSpecServiceInjector()
elif os.getenv('RUNTIME') in ('local', 'process'):
config.sandbox_spec = ProcessSandboxSpecServiceInjector()
else:
config.sandbox_spec = DockerSandboxSpecServiceInjector()
if config.app_conversation_info is None:
config.app_conversation_info = SQLAppConversationInfoServiceInjector()
if config.app_conversation_start_task is None:
config.app_conversation_start_task = (
SQLAppConversationStartTaskServiceInjector()
)
if config.app_conversation is None:
config.app_conversation = LiveStatusAppConversationServiceInjector()
if config.user is None:
config.user = AuthUserContextInjector()
if config.jwt is None:
config.jwt = JwtServiceInjector(persistence_dir=config.persistence_dir)
return config
_global_config: AppServerConfig | None = None
def get_global_config() -> AppServerConfig:
"""Get the default local server config shared across the server."""
global _global_config
if _global_config is None:
# Load configuration from environment...
_global_config = config_from_env()
return _global_config # type: ignore
def get_event_service(
state: InjectorState, request: Request | None = None
) -> AsyncContextManager[EventService]:
injector = get_global_config().event
assert injector is not None
return injector.context(state, request)
def get_event_callback_service(
state: InjectorState, request: Request | None = None
) -> AsyncContextManager[EventCallbackService]:
injector = get_global_config().event_callback
assert injector is not None
return injector.context(state, request)
def get_sandbox_service(
state: InjectorState, request: Request | None = None
) -> AsyncContextManager[SandboxService]:
injector = get_global_config().sandbox
assert injector is not None
return injector.context(state, request)
def get_sandbox_spec_service(
state: InjectorState, request: Request | None = None
) -> AsyncContextManager[SandboxSpecService]:
injector = get_global_config().sandbox_spec
assert injector is not None
return injector.context(state, request)
def get_app_conversation_info_service(
state: InjectorState, request: Request | None = None
) -> AsyncContextManager[AppConversationInfoService]:
injector = get_global_config().app_conversation_info
assert injector is not None
return injector.context(state, request)
def get_app_conversation_start_task_service(
state: InjectorState, request: Request | None = None
) -> AsyncContextManager[AppConversationStartTaskService]:
injector = get_global_config().app_conversation_start_task
assert injector is not None
return injector.context(state, request)
def get_app_conversation_service(
state: InjectorState, request: Request | None = None
) -> AsyncContextManager[AppConversationService]:
injector = get_global_config().app_conversation
assert injector is not None
return injector.context(state, request)
def get_user_context(
state: InjectorState, request: Request | None = None
) -> AsyncContextManager[UserContext]:
injector = get_global_config().user
assert injector is not None
return injector.context(state, request)
def get_httpx_client(
state: InjectorState, request: Request | None = None
) -> AsyncContextManager[httpx.AsyncClient]:
return get_global_config().httpx.context(state, request)
def get_jwt_service(
state: InjectorState, request: Request | None = None
) -> AsyncContextManager[JwtService]:
injector = get_global_config().jwt
assert injector is not None
return injector.context(state, request)
def get_db_session(
state: InjectorState, request: Request | None = None
) -> AsyncContextManager[AsyncSession]:
return get_global_config().db_session.context(state, request)
def get_app_lifespan_service() -> AppLifespanService:
config = get_global_config()
return config.lifespan
def depends_event_service():
injector = get_global_config().event
assert injector is not None
return Depends(injector.depends)
def depends_event_callback_service():
injector = get_global_config().event_callback
assert injector is not None
return Depends(injector.depends)
def depends_sandbox_service():
injector = get_global_config().sandbox
assert injector is not None
return Depends(injector.depends)
def depends_sandbox_spec_service():
injector = get_global_config().sandbox_spec
assert injector is not None
return Depends(injector.depends)
def depends_app_conversation_info_service():
injector = get_global_config().app_conversation_info
assert injector is not None
return Depends(injector.depends)
def depends_app_conversation_start_task_service():
injector = get_global_config().app_conversation_start_task
assert injector is not None
return Depends(injector.depends)
def depends_app_conversation_service():
injector = get_global_config().app_conversation
assert injector is not None
return Depends(injector.depends)
def depends_user_context():
injector = get_global_config().user
assert injector is not None
return Depends(injector.depends)
def depends_httpx_client():
return Depends(get_global_config().httpx.depends)
def depends_jwt_service():
injector = get_global_config().jwt
assert injector is not None
return Depends(injector.depends)
def depends_db_session():
return Depends(get_global_config().db_session.depends)

View File

@ -0,0 +1,43 @@
from typing import Any
from fastapi import HTTPException, status
class OpenHandsError(HTTPException):
"""General Error"""
def __init__(
self,
detail: Any = None,
headers: dict[str, str] | None = None,
status_code: int = status.HTTP_500_INTERNAL_SERVER_ERROR,
):
super().__init__(status_code=status_code, detail=detail, headers=headers)
class AuthError(OpenHandsError):
"""Error in authentication."""
def __init__(
self,
detail: Any = None,
headers: dict[str, str] | None = None,
status_code: int = status.HTTP_401_UNAUTHORIZED,
):
super().__init__(status_code=status_code, detail=detail, headers=headers)
class PermissionsError(OpenHandsError):
"""Error in permissions."""
def __init__(
self,
detail: Any = None,
headers: dict[str, str] | None = None,
status_code: int = status.HTTP_403_FORBIDDEN,
):
super().__init__(status_code=status_code, detail=detail, headers=headers)
class SandboxError(OpenHandsError):
"""Error in Sandbox."""

View File

@ -0,0 +1,21 @@
# Event Management
Handles event storage, retrieval, and streaming for the OpenHands app server.
## Overview
This module provides services for managing events within conversations, including event persistence, querying, and real-time streaming capabilities.
## Key Components
- **EventService**: Abstract service for event CRUD operations
- **FilesystemEventService**: File-based event storage implementation
- **EventRouter**: FastAPI router for event-related endpoints
## Features
- Event storage and retrieval by conversation ID
- Event filtering by kind, timestamp, and other criteria
- Sorting support and pagination for large event sets
- Real-time event streaming capabilities
- Multiple storage backend support (filesystem, database)

View File

@ -0,0 +1,110 @@
"""Event router for OpenHands Server."""
from datetime import datetime
from typing import Annotated
from uuid import UUID
from fastapi import APIRouter, Query
from openhands.agent_server.models import EventPage, EventSortOrder
from openhands.app_server.config import depends_event_service
from openhands.app_server.event.event_service import EventService
from openhands.app_server.event_callback.event_callback_models import EventKind
from openhands.sdk import Event
router = APIRouter(prefix='/events', tags=['Events'])
event_service_dependency = depends_event_service()
# Read methods
@router.get('/search')
async def search_events(
conversation_id__eq: Annotated[
UUID | None,
Query(title='Optional filter by conversation ID'),
] = None,
kind__eq: Annotated[
EventKind | None,
Query(title='Optional filter by event kind'),
] = None,
timestamp__gte: Annotated[
datetime | None,
Query(title='Optional filter by timestamp greater than or equal to'),
] = None,
timestamp__lt: Annotated[
datetime | None,
Query(title='Optional filter by timestamp less than'),
] = None,
sort_order: Annotated[
EventSortOrder,
Query(title='Sort order for results'),
] = EventSortOrder.TIMESTAMP,
page_id: Annotated[
str | None,
Query(title='Optional next_page_id from the previously returned page'),
] = None,
limit: Annotated[
int,
Query(title='The max number of results in the page', gt=0, lte=100),
] = 100,
event_service: EventService = event_service_dependency,
) -> EventPage:
"""Search / List events."""
assert limit > 0
assert limit <= 100
return await event_service.search_events(
conversation_id__eq=conversation_id__eq,
kind__eq=kind__eq,
timestamp__gte=timestamp__gte,
timestamp__lt=timestamp__lt,
sort_order=sort_order,
page_id=page_id,
limit=limit,
)
@router.get('/count')
async def count_events(
conversation_id__eq: Annotated[
UUID | None,
Query(title='Optional filter by conversation ID'),
] = None,
kind__eq: Annotated[
EventKind | None,
Query(title='Optional filter by event kind'),
] = None,
timestamp__gte: Annotated[
datetime | None,
Query(title='Optional filter by timestamp greater than or equal to'),
] = None,
timestamp__lt: Annotated[
datetime | None,
Query(title='Optional filter by timestamp less than'),
] = None,
sort_order: Annotated[
EventSortOrder,
Query(title='Sort order for results'),
] = EventSortOrder.TIMESTAMP,
event_service: EventService = event_service_dependency,
) -> int:
"""Count events matching the given filters."""
return await event_service.count_events(
conversation_id__eq=conversation_id__eq,
kind__eq=kind__eq,
timestamp__gte=timestamp__gte,
timestamp__lt=timestamp__lt,
sort_order=sort_order,
)
@router.get('')
async def batch_get_events(
id: Annotated[list[str], Query()],
event_service: EventService = event_service_dependency,
) -> list[Event | None]:
"""Get a batch of events given their ids, returning null for any missing event."""
assert len(id) <= 100
events = await event_service.batch_get_events(id)
return events

View File

@ -0,0 +1,59 @@
import asyncio
import logging
from abc import ABC, abstractmethod
from datetime import datetime
from uuid import UUID
from openhands.agent_server.models import EventPage, EventSortOrder
from openhands.app_server.event_callback.event_callback_models import EventKind
from openhands.app_server.services.injector import Injector
from openhands.sdk import Event
from openhands.sdk.utils.models import DiscriminatedUnionMixin
_logger = logging.getLogger(__name__)
class EventService(ABC):
"""Event Service for getting events."""
@abstractmethod
async def get_event(self, event_id: str) -> Event | None:
"""Given an id, retrieve an event."""
@abstractmethod
async def search_events(
self,
conversation_id__eq: UUID | None = None,
kind__eq: EventKind | None = None,
timestamp__gte: datetime | None = None,
timestamp__lt: datetime | None = None,
sort_order: EventSortOrder = EventSortOrder.TIMESTAMP,
page_id: str | None = None,
limit: int = 100,
) -> EventPage:
"""Search events matching the given filters."""
@abstractmethod
async def count_events(
self,
conversation_id__eq: UUID | None = None,
kind__eq: EventKind | None = None,
timestamp__gte: datetime | None = None,
timestamp__lt: datetime | None = None,
sort_order: EventSortOrder = EventSortOrder.TIMESTAMP,
) -> int:
"""Count events matching the given filters."""
@abstractmethod
async def save_event(self, conversation_id: UUID, event: Event):
"""Save an event. Internal method intended not be part of the REST api."""
async def batch_get_events(self, event_ids: list[str]) -> list[Event | None]:
"""Given a list of ids, get events (Or none for any which were not found)."""
return await asyncio.gather(
*[self.get_event(event_id) for event_id in event_ids]
)
class EventServiceInjector(DiscriminatedUnionMixin, Injector[EventService], ABC):
pass

View File

@ -0,0 +1,318 @@
"""Filesystem-based EventService implementation."""
import glob
import json
import logging
from dataclasses import dataclass
from datetime import datetime
from pathlib import Path
from typing import AsyncGenerator
from uuid import UUID
from fastapi import Request
from openhands.agent_server.models import EventPage, EventSortOrder
from openhands.app_server.app_conversation.app_conversation_info_service import (
AppConversationInfoService,
)
from openhands.app_server.errors import OpenHandsError
from openhands.app_server.event.event_service import EventService, EventServiceInjector
from openhands.app_server.event_callback.event_callback_models import EventKind
from openhands.app_server.services.injector import InjectorState
from openhands.sdk import Event
_logger = logging.getLogger(__name__)
@dataclass
class FilesystemEventService(EventService):
"""Filesystem-based implementation of EventService.
Events are stored in files with the naming format:
{conversation_id}/{YYYYMMDDHHMMSS}_{kind}_{id.hex}
Uses an AppConversationInfoService to lookup conversations
"""
app_conversation_info_service: AppConversationInfoService
events_dir: Path
def _ensure_events_dir(self, conversation_id: UUID | None = None) -> Path:
"""Ensure the events directory exists."""
if conversation_id:
events_path = self.events_dir / str(conversation_id)
else:
events_path = self.events_dir
events_path.mkdir(parents=True, exist_ok=True)
return events_path
def _timestamp_to_str(self, timestamp: datetime | str) -> str:
"""Convert timestamp to YYYYMMDDHHMMSS format."""
if isinstance(timestamp, str):
# Parse ISO format timestamp string
dt = datetime.fromisoformat(timestamp.replace('Z', '+00:00'))
return dt.strftime('%Y%m%d%H%M%S')
return timestamp.strftime('%Y%m%d%H%M%S')
def _get_event_filename(self, conversation_id: UUID, event: Event) -> str:
"""Generate filename using YYYYMMDDHHMMSS_kind_id.hex format."""
timestamp_str = self._timestamp_to_str(event.timestamp)
kind = event.__class__.__name__
# Handle both UUID objects and string UUIDs
if isinstance(event.id, str):
id_hex = event.id.replace('-', '')
else:
id_hex = event.id.hex
return f'{timestamp_str}_{kind}_{id_hex}'
def _save_event_to_file(self, conversation_id: UUID, event: Event) -> None:
"""Save an event to a file."""
events_path = self._ensure_events_dir(conversation_id)
filename = self._get_event_filename(conversation_id, event)
filepath = events_path / filename
with open(filepath, 'w') as f:
# Use model_dump with mode='json' to handle UUID serialization
data = event.model_dump(mode='json')
f.write(json.dumps(data, indent=2))
def _load_event_from_file(self, filepath: Path) -> Event | None:
"""Load an event from a file."""
try:
json_data = filepath.read_text()
return Event.model_validate_json(json_data)
except Exception:
return None
def _get_event_files_by_pattern(
self, pattern: str, conversation_id: UUID | None = None
) -> list[Path]:
"""Get event files matching a glob pattern, sorted by timestamp."""
if conversation_id:
search_path = self.events_dir / str(conversation_id) / pattern
else:
search_path = self.events_dir / '*' / pattern
files = glob.glob(str(search_path))
return sorted([Path(f) for f in files])
def _parse_filename(self, filename: str) -> dict[str, str] | None:
"""Parse filename to extract timestamp, kind, and event_id."""
try:
parts = filename.split('_')
if len(parts) >= 3:
timestamp_str = parts[0]
kind = '_'.join(parts[1:-1]) # Handle kinds with underscores
event_id = parts[-1]
return {'timestamp': timestamp_str, 'kind': kind, 'event_id': event_id}
except Exception:
pass
return None
def _get_conversation_id(self, file: Path) -> UUID | None:
try:
return UUID(file.parent.name)
except Exception:
return None
def _get_conversation_ids(self, files: list[Path]) -> set[UUID]:
result = set()
for file in files:
conversation_id = self._get_conversation_id(file)
if conversation_id:
result.add(conversation_id)
return result
async def _filter_files_by_conversation(self, files: list[Path]) -> list[Path]:
conversation_ids = list(self._get_conversation_ids(files))
conversations = (
await self.app_conversation_info_service.batch_get_app_conversation_info(
conversation_ids
)
)
permitted_conversation_ids = set()
for conversation in conversations:
if conversation:
permitted_conversation_ids.add(conversation.id)
result = [
file
for file in files
if self._get_conversation_id(file) in permitted_conversation_ids
]
return result
def _filter_files_by_criteria(
self,
files: list[Path],
conversation_id__eq: UUID | None = None,
kind__eq: EventKind | None = None,
timestamp__gte: datetime | None = None,
timestamp__lt: datetime | None = None,
) -> list[Path]:
"""Filter files based on search criteria."""
filtered_files = []
for file_path in files:
# Check conversation_id filter
if conversation_id__eq:
if str(conversation_id__eq) not in str(file_path):
continue
# Parse filename for additional filtering
filename_info = self._parse_filename(file_path.name)
if not filename_info:
continue
# Check kind filter
if kind__eq and filename_info['kind'] != kind__eq:
continue
# Check timestamp filters
if timestamp__gte or timestamp__lt:
try:
file_timestamp = datetime.strptime(
filename_info['timestamp'], '%Y%m%d%H%M%S'
)
if timestamp__gte and file_timestamp < timestamp__gte:
continue
if timestamp__lt and file_timestamp >= timestamp__lt:
continue
except ValueError:
continue
filtered_files.append(file_path)
return filtered_files
async def get_event(self, event_id: str) -> Event | None:
"""Get the event with the given id, or None if not found."""
# Convert event_id to hex format (remove dashes) for filename matching
if isinstance(event_id, str) and '-' in event_id:
id_hex = event_id.replace('-', '')
else:
id_hex = event_id
# Use glob pattern to find files ending with the event_id
pattern = f'*_{id_hex}'
files = self._get_event_files_by_pattern(pattern)
if not files:
return None
# If there is no access to the conversation do not return the event
file = files[0]
conversation_id = self._get_conversation_id(file)
if not conversation_id:
return None
conversation = (
await self.app_conversation_info_service.get_app_conversation_info(
conversation_id
)
)
if not conversation:
return None
# Load and return the first matching event
return self._load_event_from_file(file)
async def search_events(
self,
conversation_id__eq: UUID | None = None,
kind__eq: EventKind | None = None,
timestamp__gte: datetime | None = None,
timestamp__lt: datetime | None = None,
sort_order: EventSortOrder = EventSortOrder.TIMESTAMP,
page_id: str | None = None,
limit: int = 100,
) -> EventPage:
"""Search for events matching the given filters."""
# Build the search pattern
pattern = '*'
files = self._get_event_files_by_pattern(pattern, conversation_id__eq)
files = await self._filter_files_by_conversation(files)
files = self._filter_files_by_criteria(
files, conversation_id__eq, kind__eq, timestamp__gte, timestamp__lt
)
files.sort(
key=lambda f: f.name,
reverse=(sort_order == EventSortOrder.TIMESTAMP_DESC),
)
# Handle pagination
start_index = 0
if page_id:
for i, file_path in enumerate(files):
if file_path.name == page_id:
start_index = i + 1
break
# Collect items for this page
page_files = files[start_index : start_index + limit]
next_page_id = None
if start_index + limit < len(files):
next_page_id = files[start_index + limit].name
# Load all events from files
page_events = []
for file_path in page_files:
event = self._load_event_from_file(file_path)
if event is not None:
page_events.append(event)
return EventPage(items=page_events, next_page_id=next_page_id)
async def count_events(
self,
conversation_id__eq: UUID | None = None,
kind__eq: EventKind | None = None,
timestamp__gte: datetime | None = None,
timestamp__lt: datetime | None = None,
sort_order: EventSortOrder = EventSortOrder.TIMESTAMP,
) -> int:
"""Count events matching the given filters."""
# Build the search pattern
pattern = '*'
files = self._get_event_files_by_pattern(pattern, conversation_id__eq)
files = await self._filter_files_by_conversation(files)
files = self._filter_files_by_criteria(
files, conversation_id__eq, kind__eq, timestamp__gte, timestamp__lt
)
return len(files)
async def save_event(self, conversation_id: UUID, event: Event):
"""Save an event. Internal method intended not be part of the REST api."""
conversation = (
await self.app_conversation_info_service.get_app_conversation_info(
conversation_id
)
)
if not conversation:
# This is either an illegal state or somebody is trying to hack
raise OpenHandsError('No such conversation: {conversaiont_id}')
self._save_event_to_file(conversation_id, event)
class FilesystemEventServiceInjector(EventServiceInjector):
async def inject(
self, state: InjectorState, request: Request | None = None
) -> AsyncGenerator[EventService, None]:
from openhands.app_server.config import (
get_app_conversation_info_service,
get_global_config,
)
async with get_app_conversation_info_service(
state, request
) as app_conversation_info_service:
persistence_dir = get_global_config().persistence_dir
yield FilesystemEventService(
app_conversation_info_service=app_conversation_info_service,
events_dir=persistence_dir / 'v1' / 'events',
)

View File

@ -0,0 +1,21 @@
# Event Callbacks
Manages webhooks and event callbacks for external system integration.
## Overview
This module provides webhook and callback functionality, allowing external systems to receive notifications when specific events occur within OpenHands conversations.
## Key Components
- **EventCallbackService**: Abstract service for callback CRUD operations
- **SqlEventCallbackService**: SQL-based callback storage implementation
- **EventWebhookRouter**: FastAPI router for webhook endpoints
## Features
- Webhook registration and management
- Event filtering by type and conversation
- Callback result tracking and status monitoring
- Retry logic for failed webhook deliveries
- Secure webhook authentication

View File

@ -0,0 +1,83 @@
# pyright: reportIncompatibleMethodOverride=false
from __future__ import annotations
import logging
from abc import ABC, abstractmethod
from datetime import datetime
from typing import TYPE_CHECKING, Literal
from uuid import UUID, uuid4
from pydantic import Field
from openhands.agent_server.utils import utc_now
from openhands.app_server.event_callback.event_callback_result_models import (
EventCallbackResult,
EventCallbackResultStatus,
)
from openhands.sdk import Event
from openhands.sdk.utils.models import (
DiscriminatedUnionMixin,
OpenHandsModel,
get_known_concrete_subclasses,
)
_logger = logging.getLogger(__name__)
if TYPE_CHECKING:
EventKind = str
else:
EventKind = Literal[tuple(c.__name__ for c in get_known_concrete_subclasses(Event))]
class EventCallbackProcessor(DiscriminatedUnionMixin, ABC):
@abstractmethod
async def __call__(
self,
conversation_id: UUID,
callback: EventCallback,
event: Event,
) -> EventCallbackResult:
"""Process an event."""
class LoggingCallbackProcessor(EventCallbackProcessor):
"""Example implementation which logs callbacks."""
async def __call__(
self,
conversation_id: UUID,
callback: EventCallback,
event: Event,
) -> EventCallbackResult:
_logger.info(f'Callback {callback.id} Invoked for event {event}')
return EventCallbackResult(
status=EventCallbackResultStatus.SUCCESS,
event_callback_id=callback.id,
event_id=event.id,
conversation_id=conversation_id,
)
class CreateEventCallbackRequest(OpenHandsModel):
conversation_id: UUID | None = Field(
default=None,
description=(
'Optional filter on the conversation to which this callback applies'
),
)
processor: EventCallbackProcessor
event_kind: EventKind | None = Field(
default=None,
description=(
'Optional filter on the type of events to which this callback applies'
),
)
class EventCallback(CreateEventCallbackRequest):
id: UUID = Field(default_factory=uuid4)
created_at: datetime = Field(default_factory=utc_now)
class EventCallbackPage(OpenHandsModel):
items: list[EventCallback]
next_page_id: str | None = None

View File

@ -0,0 +1,35 @@
from datetime import datetime
from enum import Enum
from uuid import UUID, uuid4
from pydantic import BaseModel, Field
from openhands.agent_server.utils import utc_now
from openhands.sdk.event.types import EventID
class EventCallbackResultStatus(Enum):
SUCCESS = 'SUCCESS'
ERROR = 'ERROR'
class EventCallbackResultSortOrder(Enum):
CREATED_AT = 'CREATED_AT'
CREATED_AT_DESC = 'CREATED_AT_DESC'
class EventCallbackResult(BaseModel):
"""Object representing the result of an event callback."""
id: UUID = Field(default_factory=uuid4)
status: EventCallbackResultStatus
event_callback_id: UUID
event_id: EventID
conversation_id: UUID
detail: str | None = None
created_at: datetime = Field(default_factory=utc_now)
class EventCallbackResultPage(BaseModel):
items: list[EventCallbackResult]
next_page_id: str | None = None

View File

@ -0,0 +1,64 @@
import asyncio
from abc import ABC, abstractmethod
from uuid import UUID
from openhands.app_server.event_callback.event_callback_models import (
CreateEventCallbackRequest,
EventCallback,
EventCallbackPage,
EventKind,
)
from openhands.app_server.services.injector import Injector
from openhands.sdk import Event
from openhands.sdk.utils.models import DiscriminatedUnionMixin
class EventCallbackService(ABC):
"""CRUD service for managing event callbacks."""
@abstractmethod
async def create_event_callback(
self, request: CreateEventCallbackRequest
) -> EventCallback:
"""Create a new event callback."""
@abstractmethod
async def get_event_callback(self, id: UUID) -> EventCallback | None:
"""Get a single event callback, returning None if not found."""
@abstractmethod
async def delete_event_callback(self, id: UUID) -> bool:
"""Delete a event callback, returning True if deleted, False if not found."""
@abstractmethod
async def search_event_callbacks(
self,
conversation_id__eq: UUID | None = None,
event_kind__eq: EventKind | None = None,
event_id__eq: UUID | None = None,
page_id: str | None = None,
limit: int = 100,
) -> EventCallbackPage:
"""Search for event callbacks, optionally filtered by event_id."""
async def batch_get_event_callbacks(
self, event_callback_ids: list[UUID]
) -> list[EventCallback | None]:
"""Get a batch of event callbacks, returning None for any not found."""
results = await asyncio.gather(
*[
self.get_event_callback(event_callback_id)
for event_callback_id in event_callback_ids
]
)
return results
@abstractmethod
async def execute_callbacks(self, conversation_id: UUID, event: Event) -> None:
"""Execute any applicable callbacks for the event and store the results."""
class EventCallbackServiceInjector(
DiscriminatedUnionMixin, Injector[EventCallbackService], ABC
):
pass

View File

@ -0,0 +1,230 @@
# pyright: reportArgumentType=false
"""SQL implementation of EventCallbackService."""
from __future__ import annotations
import asyncio
import logging
from dataclasses import dataclass
from typing import AsyncGenerator
from uuid import UUID
from fastapi import Request
from sqlalchemy import UUID as SQLUUID
from sqlalchemy import Column, Enum, String, and_, func, or_, select
from sqlalchemy.ext.asyncio import AsyncSession
from openhands.app_server.event_callback.event_callback_models import (
CreateEventCallbackRequest,
EventCallback,
EventCallbackPage,
EventCallbackProcessor,
EventKind,
)
from openhands.app_server.event_callback.event_callback_result_models import (
EventCallbackResultStatus,
)
from openhands.app_server.event_callback.event_callback_service import (
EventCallbackService,
EventCallbackServiceInjector,
)
from openhands.app_server.services.injector import InjectorState
from openhands.app_server.utils.sql_utils import (
Base,
UtcDateTime,
create_json_type_decorator,
row2dict,
)
from openhands.sdk import Event
_logger = logging.getLogger(__name__)
# TODO: Add user level filtering to this class
class StoredEventCallback(Base): # type: ignore
__tablename__ = 'event_callback'
id = Column(SQLUUID, primary_key=True)
conversation_id = Column(SQLUUID, nullable=True)
processor = Column(create_json_type_decorator(EventCallbackProcessor))
event_kind = Column(String, nullable=True)
created_at = Column(UtcDateTime, server_default=func.now(), index=True)
class StoredEventCallbackResult(Base): # type: ignore
__tablename__ = 'event_callback_result'
id = Column(SQLUUID, primary_key=True)
status = Column(Enum(EventCallbackResultStatus), nullable=True)
event_callback_id = Column(SQLUUID, index=True)
event_id = Column(SQLUUID, index=True)
conversation_id = Column(SQLUUID, index=True)
detail = Column(String, nullable=True)
created_at = Column(UtcDateTime, server_default=func.now(), index=True)
@dataclass
class SQLEventCallbackService(EventCallbackService):
"""SQL implementation of EventCallbackService."""
db_session: AsyncSession
async def create_event_callback(
self, request: CreateEventCallbackRequest
) -> EventCallback:
"""Create a new event callback."""
# Create EventCallback from request
event_callback = EventCallback(
conversation_id=request.conversation_id,
processor=request.processor,
event_kind=request.event_kind,
)
# Create stored version and add to db_session
stored_callback = StoredEventCallback(**event_callback.model_dump())
self.db_session.add(stored_callback)
await self.db_session.commit()
await self.db_session.refresh(stored_callback)
return EventCallback(**row2dict(stored_callback))
async def get_event_callback(self, id: UUID) -> EventCallback | None:
"""Get a single event callback, returning None if not found."""
stmt = select(StoredEventCallback).where(StoredEventCallback.id == id)
result = await self.db_session.execute(stmt)
stored_callback = result.scalar_one_or_none()
if stored_callback:
return EventCallback(**row2dict(stored_callback))
return None
async def delete_event_callback(self, id: UUID) -> bool:
"""Delete an event callback, returning True if deleted, False if not found."""
stmt = select(StoredEventCallback).where(StoredEventCallback.id == id)
result = await self.db_session.execute(stmt)
stored_callback = result.scalar_one_or_none()
if stored_callback is None:
return False
await self.db_session.delete(stored_callback)
await self.db_session.commit()
return True
async def search_event_callbacks(
self,
conversation_id__eq: UUID | None = None,
event_kind__eq: EventKind | None = None,
event_id__eq: UUID | None = None,
page_id: str | None = None,
limit: int = 100,
) -> EventCallbackPage:
"""Search for event callbacks, optionally filtered by parameters."""
# Build the query with filters
conditions = []
if conversation_id__eq is not None:
conditions.append(
StoredEventCallback.conversation_id == conversation_id__eq
)
if event_kind__eq is not None:
conditions.append(StoredEventCallback.event_kind == event_kind__eq)
# Note: event_id__eq is not stored in the event_callbacks table
# This parameter might be used for filtering results after retrieval
# or might be intended for a different use case
# Build the base query
stmt = select(StoredEventCallback)
if conditions:
stmt = stmt.where(and_(*conditions))
# Handle pagination
if page_id is not None:
# Parse page_id to get offset or cursor
try:
offset = int(page_id)
stmt = stmt.offset(offset)
except ValueError:
# If page_id is not a valid integer, start from beginning
offset = 0
else:
offset = 0
# Apply limit and get one extra to check if there are more results
stmt = stmt.limit(limit + 1).order_by(StoredEventCallback.created_at.desc())
result = await self.db_session.execute(stmt)
stored_callbacks = result.scalars().all()
# Check if there are more results
has_more = len(stored_callbacks) > limit
if has_more:
stored_callbacks = stored_callbacks[:limit]
# Calculate next page ID
next_page_id = None
if has_more:
next_page_id = str(offset + limit)
# Convert stored callbacks to domain models
callbacks = [EventCallback(**row2dict(cb)) for cb in stored_callbacks]
return EventCallbackPage(items=callbacks, next_page_id=next_page_id)
async def execute_callbacks(self, conversation_id: UUID, event: Event) -> None:
query = (
select(StoredEventCallback)
.where(
or_(
StoredEventCallback.event_kind == event.kind,
StoredEventCallback.event_kind.is_(None),
)
)
.where(
or_(
StoredEventCallback.conversation_id == conversation_id,
StoredEventCallback.conversation_id.is_(None),
)
)
)
result = await self.db_session.execute(query)
stored_callbacks = result.scalars().all()
if stored_callbacks:
callbacks = [EventCallback(**row2dict(cb)) for cb in stored_callbacks]
await asyncio.gather(
*[
self.execute_callback(conversation_id, callback, event)
for callback in callbacks
]
)
await self.db_session.commit()
async def execute_callback(
self, conversation_id: UUID, callback: EventCallback, event: Event
):
try:
result = await callback.processor(conversation_id, callback, event)
stored_result = StoredEventCallbackResult(**row2dict(result))
except Exception as exc:
_logger.exception(f'Exception in callback {callback.id}', stack_info=True)
stored_result = StoredEventCallbackResult(
status=EventCallbackResultStatus.ERROR,
event_callback_id=callback.id,
event_id=event.id,
conversation_id=conversation_id,
detail=str(exc),
)
self.db_session.add(stored_result)
async def __aexit__(self, exc_type, exc_value, traceback):
"""Stop using this event callback service."""
pass
class SQLEventCallbackServiceInjector(EventCallbackServiceInjector):
async def inject(
self, state: InjectorState, request: Request | None = None
) -> AsyncGenerator[EventCallbackService, None]:
from openhands.app_server.config import get_db_session
async with get_db_session(state) as db_session:
yield SQLEventCallbackService(db_session=db_session)

View File

@ -0,0 +1,188 @@
"""Event Callback router for OpenHands Server."""
import asyncio
import logging
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, status
from fastapi.security import APIKeyHeader
from jwt import InvalidTokenError
from openhands.agent_server.models import ConversationInfo, Success
from openhands.app_server.app_conversation.app_conversation_info_service import (
AppConversationInfoService,
)
from openhands.app_server.app_conversation.app_conversation_models import (
AppConversationInfo,
)
from openhands.app_server.config import (
depends_app_conversation_info_service,
depends_db_session,
depends_event_service,
depends_jwt_service,
depends_sandbox_service,
get_event_callback_service,
get_global_config,
)
from openhands.app_server.errors import AuthError
from openhands.app_server.event.event_service import EventService
from openhands.app_server.sandbox.sandbox_models import SandboxInfo
from openhands.app_server.sandbox.sandbox_service import SandboxService
from openhands.app_server.services.injector import InjectorState
from openhands.app_server.services.jwt_service import JwtService
from openhands.app_server.user.specifiy_user_context import (
USER_CONTEXT_ATTR,
SpecifyUserContext,
as_admin,
)
from openhands.app_server.user.user_context import UserContext
from openhands.integrations.provider import ProviderType
from openhands.sdk import Event
router = APIRouter(prefix='/webhooks', tags=['Webhooks'])
sandbox_service_dependency = depends_sandbox_service()
event_service_dependency = depends_event_service()
app_conversation_info_service_dependency = depends_app_conversation_info_service()
jwt_dependency = depends_jwt_service()
config = get_global_config()
db_session_dependency = depends_db_session()
_logger = logging.getLogger(__name__)
async def valid_sandbox(
sandbox_id: str,
user_context: UserContext = Depends(as_admin),
session_api_key: str = Depends(
APIKeyHeader(name='X-Session-API-Key', auto_error=False)
),
sandbox_service: SandboxService = sandbox_service_dependency,
) -> SandboxInfo:
sandbox_info = await sandbox_service.get_sandbox(sandbox_id)
if sandbox_info is None or sandbox_info.session_api_key != session_api_key:
raise HTTPException(status.HTTP_401_UNAUTHORIZED)
return sandbox_info
async def valid_conversation(
conversation_id: UUID,
sandbox_info: SandboxInfo,
app_conversation_info_service: AppConversationInfoService = app_conversation_info_service_dependency,
) -> AppConversationInfo:
app_conversation_info = (
await app_conversation_info_service.get_app_conversation_info(conversation_id)
)
if not app_conversation_info:
# Conversation does not yet exist - create a stub
return AppConversationInfo(
id=conversation_id,
sandbox_id=sandbox_info.id,
created_by_user_id=sandbox_info.created_by_user_id,
)
if app_conversation_info.created_by_user_id != sandbox_info.created_by_user_id:
# Make sure that the conversation and sandbox were created by the same user
raise AuthError()
return app_conversation_info
@router.post('/{sandbox_id}/conversations')
async def on_conversation_update(
conversation_info: ConversationInfo,
sandbox_info: SandboxInfo = Depends(valid_sandbox),
app_conversation_info_service: AppConversationInfoService = app_conversation_info_service_dependency,
) -> Success:
"""Webhook callback for when a conversation starts, pauses, resumes, or deletes."""
existing = await valid_conversation(
conversation_info.id, sandbox_info, app_conversation_info_service
)
app_conversation_info = AppConversationInfo(
id=conversation_info.id,
# TODO: As of writing, ConversationInfo from AgentServer does not have a title
title=existing.title or f'Conversation {conversation_info.id}',
sandbox_id=sandbox_info.id,
created_by_user_id=sandbox_info.created_by_user_id,
llm_model=conversation_info.agent.llm.model,
# Git parameters
selected_repository=existing.selected_repository,
selected_branch=existing.selected_branch,
git_provider=existing.git_provider,
trigger=existing.trigger,
pr_number=existing.pr_number,
)
await app_conversation_info_service.save_app_conversation_info(
app_conversation_info
)
return Success()
@router.post('/{sandbox_id}/events/{conversation_id}')
async def on_event(
events: list[Event],
conversation_id: UUID,
sandbox_info: SandboxInfo = Depends(valid_sandbox),
app_conversation_info_service: AppConversationInfoService = app_conversation_info_service_dependency,
event_service: EventService = event_service_dependency,
) -> Success:
"""Webhook callback for when event stream events occur."""
app_conversation_info = await valid_conversation(
conversation_id, sandbox_info, app_conversation_info_service
)
try:
# Save events...
await asyncio.gather(
*[event_service.save_event(conversation_id, event) for event in events]
)
asyncio.create_task(
_run_callbacks_in_bg_and_close(
conversation_id, app_conversation_info.created_by_user_id, events
)
)
except Exception:
_logger.exception('Error in webhook', stack_info=True)
return Success()
@router.get('/secrets')
async def get_secret(
access_token: str = Depends(APIKeyHeader(name='X-Access-Token', auto_error=False)),
jwt_service: JwtService = jwt_dependency,
) -> str:
"""Given an access token, retrieve a user secret. The access token
is limited by user and provider type, and may include a timeout, limiting
the damage in the event that a token is ever leaked"""
try:
payload = jwt_service.verify_jws_token(access_token)
user_id = payload['user_id']
provider_type = ProviderType[payload['provider_type']]
user_injector = config.user
assert user_injector is not None
user_context = await user_injector.get_for_user(user_id)
secret = None
if user_context:
secret = await user_context.get_latest_token(provider_type)
if secret is None:
raise HTTPException(404, 'No such provider')
return secret
except InvalidTokenError:
raise HTTPException(status.HTTP_401_UNAUTHORIZED)
async def _run_callbacks_in_bg_and_close(
conversation_id: UUID,
user_id: str | None,
events: list[Event],
):
"""Run all callbacks and close the session"""
state = InjectorState()
setattr(state, USER_CONTEXT_ATTR, SpecifyUserContext(user_id=user_id))
async with get_event_callback_service(state) as event_callback_service:
# We don't use asynio.gather here because callbacks must be run in sequence.
for event in events:
await event_callback_service.execute_callbacks(conversation_id, event)

View File

@ -0,0 +1,21 @@
# Sandbox Management
Manages sandbox environments for secure agent execution within OpenHands.
## Overview
Since agents can do things that may harm your system, they are typically run inside a sandbox (like a Docker container). This module provides services for creating, managing, and monitoring these sandbox environments.
## Key Components
- **SandboxService**: Abstract service for sandbox lifecycle management
- **DockerSandboxService**: Docker-based sandbox implementation
- **SandboxSpecService**: Manages sandbox specifications and templates
- **SandboxRouter**: FastAPI router for sandbox endpoints
## Features
- Secure containerized execution environments
- Sandbox lifecycle management (create, start, stop, destroy)
- Multiple sandbox backend support (Docker, Remote, Local)
- User-scoped sandbox access control

View File

@ -0,0 +1,429 @@
import asyncio
import logging
import os
import socket
from dataclasses import dataclass, field
from datetime import datetime
from typing import AsyncGenerator
import base62
import docker
import httpx
from docker.errors import APIError, NotFound
from fastapi import Request
from pydantic import BaseModel, ConfigDict, Field
from openhands.agent_server.utils import utc_now
from openhands.app_server.errors import SandboxError
from openhands.app_server.sandbox.docker_sandbox_spec_service import get_docker_client
from openhands.app_server.sandbox.sandbox_models import (
AGENT_SERVER,
VSCODE,
ExposedUrl,
SandboxInfo,
SandboxPage,
SandboxStatus,
)
from openhands.app_server.sandbox.sandbox_service import (
SandboxService,
SandboxServiceInjector,
)
from openhands.app_server.sandbox.sandbox_spec_service import SandboxSpecService
from openhands.app_server.services.injector import InjectorState
_logger = logging.getLogger(__name__)
SESSION_API_KEY_VARIABLE = 'OH_SESSION_API_KEYS_0'
WEBHOOK_CALLBACK_VARIABLE = 'OH_WEBHOOKS_0_BASE_URL'
class VolumeMount(BaseModel):
"""Mounted volume within the container."""
host_path: str
container_path: str
mode: str = 'rw'
model_config = ConfigDict(frozen=True)
class ExposedPort(BaseModel):
"""Exposed port within container to be matched to a free port on the host."""
name: str
description: str
container_port: int = 8000
model_config = ConfigDict(frozen=True)
@dataclass
class DockerSandboxService(SandboxService):
"""Sandbox service built on docker.
The Docker API does not currently support async operations, so some of these operations will block.
Given that the docker API is intended for local use on a single machine, this is probably acceptable.
"""
sandbox_spec_service: SandboxSpecService
container_name_prefix: str
host_port: int
container_url_pattern: str
mounts: list[VolumeMount]
exposed_ports: list[ExposedPort]
health_check_path: str | None
httpx_client: httpx.AsyncClient
docker_client: docker.DockerClient = field(default_factory=get_docker_client)
def _find_unused_port(self) -> int:
"""Find an unused port on the host machine."""
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.bind(('', 0))
s.listen(1)
port = s.getsockname()[1]
return port
def _docker_status_to_sandbox_status(self, docker_status: str) -> SandboxStatus:
"""Convert Docker container status to SandboxStatus."""
status_mapping = {
'running': SandboxStatus.RUNNING,
'paused': SandboxStatus.PAUSED,
'exited': SandboxStatus.MISSING,
'created': SandboxStatus.STARTING,
'restarting': SandboxStatus.STARTING,
'removing': SandboxStatus.MISSING,
'dead': SandboxStatus.ERROR,
}
return status_mapping.get(docker_status.lower(), SandboxStatus.ERROR)
def _get_container_env_vars(self, container) -> dict[str, str | None]:
env_vars_list = container.attrs['Config']['Env']
result = {}
for env_var in env_vars_list:
if '=' in env_var:
key, value = env_var.split('=', 1)
result[key] = value
else:
# Handle cases where an environment variable might not have a value
result[env_var] = None
return result
async def _container_to_sandbox_info(self, container) -> SandboxInfo | None:
"""Convert Docker container to SandboxInfo."""
# Convert Docker status to runtime status
status = self._docker_status_to_sandbox_status(container.status)
# Parse creation time
created_str = container.attrs.get('Created', '')
try:
created_at = datetime.fromisoformat(created_str.replace('Z', '+00:00'))
except (ValueError, AttributeError):
created_at = utc_now()
# Get URL and session key for running containers
exposed_urls = None
session_api_key = None
if status == SandboxStatus.RUNNING:
# Get the first exposed port mapping
exposed_urls = []
port_bindings = container.attrs.get('NetworkSettings', {}).get('Ports', {})
if port_bindings:
for container_port, host_bindings in port_bindings.items():
if host_bindings:
host_port = host_bindings[0]['HostPort']
exposed_port = next(
(
exposed_port
for exposed_port in self.exposed_ports
if container_port
== f'{exposed_port.container_port}/tcp'
),
None,
)
if exposed_port:
exposed_urls.append(
ExposedUrl(
name=exposed_port.name,
url=self.container_url_pattern.format(
port=host_port
),
)
)
# Get session API key
env = self._get_container_env_vars(container)
session_api_key = env[SESSION_API_KEY_VARIABLE]
return SandboxInfo(
id=container.name,
created_by_user_id=None,
sandbox_spec_id=container.image.tags[0],
status=status,
session_api_key=session_api_key,
exposed_urls=exposed_urls,
created_at=created_at,
)
async def _container_to_checked_sandbox_info(self, container) -> SandboxInfo | None:
sandbox_info = await self._container_to_sandbox_info(container)
if (
sandbox_info
and self.health_check_path is not None
and sandbox_info.exposed_urls
):
app_server_url = next(
exposed_url.url
for exposed_url in sandbox_info.exposed_urls
if exposed_url.name == AGENT_SERVER
)
try:
response = await self.httpx_client.get(
f'{app_server_url}{self.health_check_path}'
)
response.raise_for_status()
except asyncio.CancelledError:
raise
except Exception as exc:
_logger.info(f'Sandbox server not running: {exc}')
sandbox_info.status = SandboxStatus.ERROR
sandbox_info.exposed_urls = None
sandbox_info.session_api_key = None
return sandbox_info
async def search_sandboxes(
self,
page_id: str | None = None,
limit: int = 100,
) -> SandboxPage:
"""Search for sandboxes."""
try:
# Get all containers with our prefix
all_containers = self.docker_client.containers.list(all=True)
sandboxes = []
for container in all_containers:
if container.name.startswith(self.container_name_prefix):
sandbox_info = await self._container_to_checked_sandbox_info(
container
)
if sandbox_info:
sandboxes.append(sandbox_info)
# Sort by creation time (newest first)
sandboxes.sort(key=lambda x: x.created_at, reverse=True)
# Apply pagination
start_idx = 0
if page_id:
try:
start_idx = int(page_id)
except ValueError:
start_idx = 0
end_idx = start_idx + limit
paginated_containers = sandboxes[start_idx:end_idx]
# Determine next page ID
next_page_id = None
if end_idx < len(sandboxes):
next_page_id = str(end_idx)
return SandboxPage(items=paginated_containers, next_page_id=next_page_id)
except APIError:
return SandboxPage(items=[], next_page_id=None)
async def get_sandbox(self, sandbox_id: str) -> SandboxInfo | None:
"""Get a single sandbox info."""
try:
if not sandbox_id.startswith(self.container_name_prefix):
return None
container = self.docker_client.containers.get(sandbox_id)
return await self._container_to_checked_sandbox_info(container)
except (NotFound, APIError):
return None
async def start_sandbox(self, sandbox_spec_id: str | None = None) -> SandboxInfo:
"""Start a new sandbox."""
if sandbox_spec_id is None:
sandbox_spec = await self.sandbox_spec_service.get_default_sandbox_spec()
else:
sandbox_spec_maybe = await self.sandbox_spec_service.get_sandbox_spec(
sandbox_spec_id
)
if sandbox_spec_maybe is None:
raise ValueError('Sandbox Spec not found')
sandbox_spec = sandbox_spec_maybe
# Generate container ID and session api key
container_name = (
f'{self.container_name_prefix}{base62.encodebytes(os.urandom(16))}'
)
session_api_key = base62.encodebytes(os.urandom(32))
# Prepare environment variables
env_vars = sandbox_spec.initial_env.copy()
env_vars[SESSION_API_KEY_VARIABLE] = session_api_key
env_vars[WEBHOOK_CALLBACK_VARIABLE] = (
f'http://host.docker.internal:{self.host_port}'
f'/api/v1/webhooks/{container_name}'
)
# Prepare port mappings and add port environment variables
port_mappings = {}
for exposed_port in self.exposed_ports:
host_port = self._find_unused_port()
port_mappings[exposed_port.container_port] = host_port
# Add port as environment variable
env_vars[exposed_port.name] = str(host_port)
# Prepare labels
labels = {
'sandbox_spec_id': sandbox_spec.id,
}
# Prepare volumes
volumes = {
mount.host_path: {
'bind': mount.container_path,
'mode': mount.mode,
}
for mount in self.mounts
}
try:
# Create and start the container
container = self.docker_client.containers.run( # type: ignore[call-overload]
image=sandbox_spec.id,
command=sandbox_spec.command, # Use default command from image
remove=False,
name=container_name,
environment=env_vars,
ports=port_mappings,
volumes=volumes,
working_dir=sandbox_spec.working_dir,
labels=labels,
detach=True,
)
sandbox_info = await self._container_to_sandbox_info(container)
assert sandbox_info is not None
return sandbox_info
except APIError as e:
raise SandboxError(f'Failed to start container: {e}')
async def resume_sandbox(self, sandbox_id: str) -> bool:
"""Resume a paused sandbox."""
try:
if not sandbox_id.startswith(self.container_name_prefix):
return False
container = self.docker_client.containers.get(sandbox_id)
if container.status == 'paused':
container.unpause()
elif container.status == 'exited':
container.start()
return True
except (NotFound, APIError):
return False
async def pause_sandbox(self, sandbox_id: str) -> bool:
"""Pause a running sandbox."""
try:
if not sandbox_id.startswith(self.container_name_prefix):
return False
container = self.docker_client.containers.get(sandbox_id)
if container.status == 'running':
container.pause()
return True
except (NotFound, APIError):
return False
async def delete_sandbox(self, sandbox_id: str) -> bool:
"""Delete a sandbox."""
try:
if not sandbox_id.startswith(self.container_name_prefix):
return False
container = self.docker_client.containers.get(sandbox_id)
# Stop the container if it's running
if container.status in ['running', 'paused']:
container.stop(timeout=10)
# Remove the container
container.remove()
# Remove associated volume
try:
volume_name = f'openhands-workspace-{sandbox_id}'
volume = self.docker_client.volumes.get(volume_name)
volume.remove()
except (NotFound, APIError):
# Volume might not exist or already removed
pass
return True
except (NotFound, APIError):
return False
class DockerSandboxServiceInjector(SandboxServiceInjector):
"""Dependency injector for docker sandbox services."""
container_url_pattern: str = 'http://localhost:{port}'
host_port: int = 3000
container_name_prefix: str = 'oh-agent-server-'
mounts: list[VolumeMount] = Field(default_factory=list)
exposed_ports: list[ExposedPort] = Field(
default_factory=lambda: [
ExposedPort(
name=AGENT_SERVER,
description=(
'The port on which the agent server runs within the container'
),
container_port=8000,
),
ExposedPort(
name=VSCODE,
description=(
'The port on which the VSCode server runs within the container'
),
container_port=8001,
),
]
)
health_check_path: str | None = Field(
default='/health',
description=(
'The url path in the sandbox agent server to check to '
'determine whether the server is running'
),
)
async def inject(
self, state: InjectorState, request: Request | None = None
) -> AsyncGenerator[SandboxService, None]:
# Define inline to prevent circular lookup
from openhands.app_server.config import (
get_httpx_client,
get_sandbox_spec_service,
)
async with (
get_httpx_client(state) as httpx_client,
get_sandbox_spec_service(state) as sandbox_spec_service,
):
yield DockerSandboxService(
sandbox_spec_service=sandbox_spec_service,
container_name_prefix=self.container_name_prefix,
host_port=self.host_port,
container_url_pattern=self.container_url_pattern,
mounts=self.mounts,
exposed_ports=self.exposed_ports,
health_check_path=self.health_check_path,
httpx_client=httpx_client,
)

View File

@ -0,0 +1,90 @@
import asyncio
import logging
from typing import AsyncGenerator
import docker
from fastapi import Request
from pydantic import Field
from openhands.app_server.errors import SandboxError
from openhands.app_server.sandbox.preset_sandbox_spec_service import (
PresetSandboxSpecService,
)
from openhands.app_server.sandbox.sandbox_spec_models import (
SandboxSpecInfo,
)
from openhands.app_server.sandbox.sandbox_spec_service import (
AGENT_SERVER_VERSION,
SandboxSpecService,
SandboxSpecServiceInjector,
)
from openhands.app_server.services.injector import InjectorState
_global_docker_client: docker.DockerClient | None = None
_logger = logging.getLogger(__name__)
def get_docker_client() -> docker.DockerClient:
global _global_docker_client
if _global_docker_client is None:
_global_docker_client = docker.from_env()
return _global_docker_client
def get_default_sandbox_specs():
return [
SandboxSpecInfo(
id=f'ghcr.io/all-hands-ai/agent-server:{AGENT_SERVER_VERSION[:7]}-python',
command=['--port', '8000'],
initial_env={
'OPENVSCODE_SERVER_ROOT': '/openhands/.openvscode-server',
'OH_ENABLE_VNC': '0',
'LOG_JSON': 'true',
'OH_CONVERSATIONS_PATH': '/home/openhands/conversations',
'OH_BASH_EVENTS_DIR': '/home/openhands/bash_events',
},
working_dir='/home/openhands/workspace',
)
]
class DockerSandboxSpecServiceInjector(SandboxSpecServiceInjector):
specs: list[SandboxSpecInfo] = Field(
default_factory=get_default_sandbox_specs,
description='Preset list of sandbox specs',
)
pull_if_missing: bool = Field(
default=True,
description=(
'Flag indicating that any missing specs should be pulled from '
'remote repositories.'
),
)
async def inject(
self, state: InjectorState, request: Request | None = None
) -> AsyncGenerator[SandboxSpecService, None]:
if self.pull_if_missing:
await self.pull_missing_specs()
# Prevent repeated checks - more efficient but it does mean if you
# delete a docker image outside the app you need to restart
self.pull_if_missing = False
yield PresetSandboxSpecService(specs=self.specs)
async def pull_missing_specs(self):
await asyncio.gather(*[self.pull_spec_if_missing(spec) for spec in self.specs])
async def pull_spec_if_missing(self, spec: SandboxSpecInfo):
_logger.debug(f'Checking Docker Image: {spec.id}')
try:
docker_client = get_docker_client()
try:
docker_client.images.get(spec.id)
except docker.errors.ImageNotFound:
_logger.info(f'⬇️ Pulling Docker Image: {spec.id}')
# Pull in a background thread to prevent locking up the main runloop
loop = asyncio.get_running_loop()
await loop.run_in_executor(None, docker_client.images.pull, spec.id)
_logger.info(f'⬇️ Finished Pulling Docker Image: {spec.id}')
except docker.errors.APIError as exc:
raise SandboxError(f'Error Getting Docker Image: {spec.id}') from exc

View File

@ -0,0 +1,48 @@
from dataclasses import dataclass
from openhands.app_server.sandbox.sandbox_spec_models import (
SandboxSpecInfo,
SandboxSpecInfoPage,
)
from openhands.app_server.sandbox.sandbox_spec_service import (
SandboxSpecService,
)
@dataclass
class PresetSandboxSpecService(SandboxSpecService):
"""Service which uses a preset set of sandbox specs."""
specs: list[SandboxSpecInfo]
async def search_sandbox_specs(
self, page_id: str | None = None, limit: int = 100
) -> SandboxSpecInfoPage:
"""Search for sandbox specs with pagination support."""
# Apply pagination
start_idx = 0
if page_id:
try:
start_idx = int(page_id)
except ValueError:
start_idx = 0
end_idx = start_idx + limit
paginated_specs = self.specs[start_idx:end_idx]
# Determine next page ID
next_page_id = None
if end_idx < len(self.specs):
next_page_id = str(end_idx)
return SandboxSpecInfoPage(items=paginated_specs, next_page_id=next_page_id)
async def get_sandbox_spec(self, sandbox_spec_id: str) -> SandboxSpecInfo | None:
"""Get a single sandbox spec by ID, returning None if not found."""
for spec in self.specs:
if spec.id == sandbox_spec_id:
return spec
return None
async def get_default_sandbox_spec(self) -> SandboxSpecInfo:
return self.specs[0]

View File

@ -0,0 +1,438 @@
"""Process-based sandbox service implementation.
This service creates sandboxes by spawning separate agent server processes,
each running within a dedicated directory.
"""
import asyncio
import logging
import os
import socket
import subprocess
import sys
import time
from dataclasses import dataclass
from datetime import datetime
from typing import AsyncGenerator
import base62
import httpx
import psutil
from fastapi import Request
from pydantic import BaseModel, ConfigDict, Field
from openhands.agent_server.utils import utc_now
from openhands.app_server.errors import SandboxError
from openhands.app_server.sandbox.sandbox_models import (
AGENT_SERVER,
ExposedUrl,
SandboxInfo,
SandboxPage,
SandboxStatus,
)
from openhands.app_server.sandbox.sandbox_service import (
SandboxService,
SandboxServiceInjector,
)
from openhands.app_server.sandbox.sandbox_spec_models import SandboxSpecInfo
from openhands.app_server.sandbox.sandbox_spec_service import SandboxSpecService
from openhands.app_server.services.injector import InjectorState
_logger = logging.getLogger(__name__)
class ProcessInfo(BaseModel):
"""Information about a running process."""
pid: int
port: int
user_id: str | None
working_dir: str
session_api_key: str
created_at: datetime
sandbox_spec_id: str
model_config = ConfigDict(frozen=True)
# Global store
_processes: dict[str, ProcessInfo] = {}
@dataclass
class ProcessSandboxService(SandboxService):
"""Sandbox service that spawns separate agent server processes.
Each sandbox is implemented as a separate Python process running the
action execution server, with each process:
- Operating in a dedicated directory
- Listening on a unique port
- Having its own session API key
"""
user_id: str | None
sandbox_spec_service: SandboxSpecService
base_working_dir: str
base_port: int
python_executable: str
agent_server_module: str
health_check_path: str
httpx_client: httpx.AsyncClient
def __post_init__(self):
"""Initialize the service after dataclass creation."""
# Ensure base working directory exists
os.makedirs(self.base_working_dir, exist_ok=True)
def _find_unused_port(self) -> int:
"""Find an unused port starting from base_port."""
port = self.base_port
while port < self.base_port + 10000: # Try up to 10000 ports
try:
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.bind(('', port))
return port
except OSError:
port += 1
raise SandboxError('No available ports found')
def _create_sandbox_directory(self, sandbox_id: str) -> str:
"""Create a dedicated directory for the sandbox."""
sandbox_dir = os.path.join(self.base_working_dir, sandbox_id)
os.makedirs(sandbox_dir, exist_ok=True)
return sandbox_dir
async def _start_agent_process(
self,
sandbox_id: str,
port: int,
working_dir: str,
session_api_key: str,
sandbox_spec: SandboxSpecInfo,
) -> subprocess.Popen:
"""Start the agent server process."""
# Prepare environment variables
env = os.environ.copy()
env.update(sandbox_spec.initial_env)
env['SESSION_API_KEY'] = session_api_key
# Prepare command arguments
cmd = [
self.python_executable,
'-m',
self.agent_server_module,
'--port',
str(port),
]
_logger.info(
f'Starting agent process for sandbox {sandbox_id}: {" ".join(cmd)}'
)
try:
# Start the process
process = subprocess.Popen(
cmd,
env=env,
cwd=working_dir,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
# Wait a moment for the process to start
await asyncio.sleep(1)
# Check if process is still running
if process.poll() is not None:
stdout, stderr = process.communicate()
raise SandboxError(f'Agent process failed to start: {stderr.decode()}')
return process
except Exception as e:
raise SandboxError(f'Failed to start agent process: {e}')
async def _wait_for_server_ready(self, port: int, timeout: int = 30) -> bool:
"""Wait for the agent server to be ready."""
start_time = time.time()
while time.time() - start_time < timeout:
try:
response = await self.httpx_client.get(
f'http://localhost:{port}/alive', timeout=5.0
)
if response.status_code == 200:
data = response.json()
if data.get('status') == 'ok':
return True
except Exception:
pass
await asyncio.sleep(1)
return False
def _get_process_status(self, process_info: ProcessInfo) -> SandboxStatus:
"""Get the status of a process."""
try:
process = psutil.Process(process_info.pid)
if process.is_running():
status = process.status()
if status == psutil.STATUS_RUNNING:
return SandboxStatus.RUNNING
elif status == psutil.STATUS_STOPPED:
return SandboxStatus.PAUSED
else:
return SandboxStatus.STARTING
else:
return SandboxStatus.MISSING
except (psutil.NoSuchProcess, psutil.AccessDenied):
return SandboxStatus.MISSING
async def _process_to_sandbox_info(
self, sandbox_id: str, process_info: ProcessInfo
) -> SandboxInfo:
"""Convert process info to sandbox info."""
status = self._get_process_status(process_info)
exposed_urls = None
session_api_key = None
if status == SandboxStatus.RUNNING:
# Check if server is actually responding
try:
response = await self.httpx_client.get(
f'http://localhost:{process_info.port}{self.health_check_path}',
timeout=5.0,
)
if response.status_code == 200:
exposed_urls = [
ExposedUrl(
name=AGENT_SERVER,
url=f'http://localhost:{process_info.port}',
),
]
session_api_key = process_info.session_api_key
else:
status = SandboxStatus.ERROR
except Exception:
status = SandboxStatus.ERROR
return SandboxInfo(
id=sandbox_id,
created_by_user_id=process_info.user_id,
sandbox_spec_id=process_info.sandbox_spec_id,
status=status,
session_api_key=session_api_key,
exposed_urls=exposed_urls,
created_at=process_info.created_at,
)
async def search_sandboxes(
self,
page_id: str | None = None,
limit: int = 100,
) -> SandboxPage:
"""Search for sandboxes."""
# Get all process infos
all_processes = list(_processes.items())
# Sort by creation time (newest first)
all_processes.sort(key=lambda x: x[1].created_at, reverse=True)
# Apply pagination
start_idx = 0
if page_id:
try:
start_idx = int(page_id)
except ValueError:
start_idx = 0
end_idx = start_idx + limit
paginated_processes = all_processes[start_idx:end_idx]
# Convert to sandbox infos
items = []
for sandbox_id, process_info in paginated_processes:
sandbox_info = await self._process_to_sandbox_info(sandbox_id, process_info)
items.append(sandbox_info)
# Determine next page ID
next_page_id = None
if end_idx < len(all_processes):
next_page_id = str(end_idx)
return SandboxPage(items=items, next_page_id=next_page_id)
async def get_sandbox(self, sandbox_id: str) -> SandboxInfo | None:
"""Get a single sandbox."""
process_info = _processes.get(sandbox_id)
if process_info is None:
return None
return await self._process_to_sandbox_info(sandbox_id, process_info)
async def start_sandbox(self, sandbox_spec_id: str | None = None) -> SandboxInfo:
"""Start a new sandbox."""
# Get sandbox spec
if sandbox_spec_id is None:
sandbox_spec = await self.sandbox_spec_service.get_default_sandbox_spec()
else:
sandbox_spec_maybe = await self.sandbox_spec_service.get_sandbox_spec(
sandbox_spec_id
)
if sandbox_spec_maybe is None:
raise ValueError('Sandbox Spec not found')
sandbox_spec = sandbox_spec_maybe
# Generate unique sandbox ID and session API key
sandbox_id = base62.encodebytes(os.urandom(16))
session_api_key = base62.encodebytes(os.urandom(32))
# Find available port
port = self._find_unused_port()
# Create sandbox directory
working_dir = self._create_sandbox_directory(sandbox_id)
# Start the agent process
process = await self._start_agent_process(
sandbox_id=sandbox_id,
port=port,
working_dir=working_dir,
session_api_key=session_api_key,
sandbox_spec=sandbox_spec,
)
# Store process info
process_info = ProcessInfo(
pid=process.pid,
port=port,
user_id=self.user_id,
working_dir=working_dir,
session_api_key=session_api_key,
created_at=utc_now(),
sandbox_spec_id=sandbox_spec.id,
)
_processes[sandbox_id] = process_info
# Wait for server to be ready
if not await self._wait_for_server_ready(port):
# Clean up if server didn't start properly
await self.delete_sandbox(sandbox_id)
raise SandboxError('Agent Server Failed to start properly')
return await self._process_to_sandbox_info(sandbox_id, process_info)
async def resume_sandbox(self, sandbox_id: str) -> bool:
"""Resume a paused sandbox."""
process_info = _processes.get(sandbox_id)
if process_info is None:
return False
try:
process = psutil.Process(process_info.pid)
if process.status() == psutil.STATUS_STOPPED:
process.resume()
return True
except (psutil.NoSuchProcess, psutil.AccessDenied):
return False
async def pause_sandbox(self, sandbox_id: str) -> bool:
"""Pause a running sandbox."""
process_info = _processes.get(sandbox_id)
if process_info is None:
return False
try:
process = psutil.Process(process_info.pid)
if process.is_running():
process.suspend()
return True
except (psutil.NoSuchProcess, psutil.AccessDenied):
return False
async def delete_sandbox(self, sandbox_id: str) -> bool:
"""Delete a sandbox."""
process_info = _processes.get(sandbox_id)
if process_info is None:
return False
try:
# Terminate the process
process = psutil.Process(process_info.pid)
if process.is_running():
# Try graceful termination first
process.terminate()
try:
process.wait(timeout=10)
except psutil.TimeoutExpired:
# Force kill if graceful termination fails
process.kill()
process.wait(timeout=5)
# Clean up the working directory
import shutil
if os.path.exists(process_info.working_dir):
shutil.rmtree(process_info.working_dir, ignore_errors=True)
# Remove from our tracking
del _processes[sandbox_id]
return True
except (psutil.NoSuchProcess, psutil.AccessDenied, OSError) as e:
_logger.warning(f'Error deleting sandbox {sandbox_id}: {e}')
# Still remove from tracking even if cleanup failed
if sandbox_id in _processes:
del _processes[sandbox_id]
return True
class ProcessSandboxServiceInjector(SandboxServiceInjector):
"""Dependency injector for process sandbox services."""
base_working_dir: str = Field(
default='/tmp/openhands-sandboxes',
description='Base directory for sandbox working directories',
)
base_port: int = Field(
default=8000, description='Base port number for agent servers'
)
python_executable: str = Field(
default=sys.executable,
description='Python executable to use for agent processes',
)
agent_server_module: str = Field(
default='openhands.agent_server',
description='Python module for the agent server',
)
health_check_path: str = Field(
default='/alive', description='Health check endpoint path'
)
async def inject(
self, state: InjectorState, request: Request | None = None
) -> AsyncGenerator[SandboxService, None]:
# Define inline to prevent circular lookup
from openhands.app_server.config import (
get_httpx_client,
get_sandbox_spec_service,
get_user_context,
)
async with (
get_httpx_client(state, request) as httpx_client,
get_sandbox_spec_service(state, request) as sandbox_spec_service,
get_user_context(state, request) as user_context,
):
user_id = await user_context.get_user_id()
yield ProcessSandboxService(
user_id=user_id,
sandbox_spec_service=sandbox_spec_service,
base_working_dir=self.base_working_dir,
base_port=self.base_port,
python_executable=self.python_executable,
agent_server_module=self.agent_server_module,
health_check_path=self.health_check_path,
httpx_client=httpx_client,
)

View File

@ -0,0 +1,43 @@
from typing import AsyncGenerator
from fastapi import Request
from pydantic import Field
from openhands.app_server.sandbox.preset_sandbox_spec_service import (
PresetSandboxSpecService,
)
from openhands.app_server.sandbox.sandbox_spec_models import (
SandboxSpecInfo,
)
from openhands.app_server.sandbox.sandbox_spec_service import (
AGENT_SERVER_VERSION,
SandboxSpecService,
SandboxSpecServiceInjector,
)
from openhands.app_server.services.injector import InjectorState
def get_default_sandbox_specs():
return [
SandboxSpecInfo(
id=AGENT_SERVER_VERSION,
command=['python', '-m', 'openhands.agent_server'],
initial_env={
# VSCode disabled for now
'OH_ENABLE_VS_CODE': '0',
},
working_dir='',
)
]
class ProcessSandboxSpecServiceInjector(SandboxSpecServiceInjector):
specs: list[SandboxSpecInfo] = Field(
default_factory=get_default_sandbox_specs,
description='Preset list of sandbox specs',
)
async def inject(
self, state: InjectorState, request: Request | None = None
) -> AsyncGenerator[SandboxSpecService, None]:
yield PresetSandboxSpecService(specs=self.specs)

View File

@ -0,0 +1,615 @@
import asyncio
import logging
import os
from dataclasses import dataclass
from typing import Any, AsyncGenerator, Union
import base62
import httpx
from fastapi import Request
from pydantic import Field
from sqlalchemy import Column, String, func, select
from sqlalchemy.ext.asyncio import AsyncSession
from openhands.agent_server.models import ConversationInfo, EventPage
from openhands.agent_server.utils import utc_now
from openhands.app_server.app_conversation.app_conversation_info_service import (
AppConversationInfoService,
)
from openhands.app_server.app_conversation.app_conversation_models import (
AppConversationInfo,
)
from openhands.app_server.errors import SandboxError
from openhands.app_server.event.event_service import EventService
from openhands.app_server.event_callback.event_callback_service import (
EventCallbackService,
)
from openhands.app_server.sandbox.sandbox_models import (
AGENT_SERVER,
ExposedUrl,
SandboxInfo,
SandboxPage,
SandboxStatus,
)
from openhands.app_server.sandbox.sandbox_service import (
SandboxService,
SandboxServiceInjector,
)
from openhands.app_server.sandbox.sandbox_spec_models import SandboxSpecInfo
from openhands.app_server.sandbox.sandbox_spec_service import SandboxSpecService
from openhands.app_server.services.injector import InjectorState
from openhands.app_server.user.specifiy_user_context import ADMIN, USER_CONTEXT_ATTR
from openhands.app_server.user.user_context import UserContext
from openhands.app_server.utils.sql_utils import Base, UtcDateTime
_logger = logging.getLogger(__name__)
WEBHOOK_CALLBACK_VARIABLE = 'OH_WEBHOOKS_0_BASE_URL'
polling_task: asyncio.Task | None = None
POD_STATUS_MAPPING = {
'ready': SandboxStatus.RUNNING,
'pending': SandboxStatus.STARTING,
'running': SandboxStatus.STARTING,
'failed': SandboxStatus.ERROR,
'unknown': SandboxStatus.ERROR,
'crashloopbackoff': SandboxStatus.ERROR,
}
STATUS_MAPPING = {
'running': SandboxStatus.RUNNING,
'paused': SandboxStatus.PAUSED,
'stopped': SandboxStatus.MISSING,
'starting': SandboxStatus.STARTING,
'error': SandboxStatus.ERROR,
}
class StoredRemoteSandbox(Base): # type: ignore
"""Local storage for remote sandbox info.
The remote runtime API does not return some variables we need, and does not
return stopped runtimes in list operations, so we need a local copy. We use
the remote api as a source of truth on what is currently running, not what was
run historicallly."""
__tablename__ = 'v1_remote_sandbox'
id = Column(String, primary_key=True)
created_by_user_id = Column(String, nullable=True, index=True)
sandbox_spec_id = Column(String, index=True) # shadows runtime['image']
created_at = Column(UtcDateTime, server_default=func.now(), index=True)
@dataclass
class RemoteSandboxService(SandboxService):
"""Sandbox service that uses HTTP to communicate with a remote runtime API.
This service adapts the legacy RemoteRuntime HTTP protocol to work with
the new Sandbox interface.
"""
sandbox_spec_service: SandboxSpecService
api_url: str
api_key: str
web_url: str | None
resource_factor: int
runtime_class: str | None
start_sandbox_timeout: int
user_context: UserContext
httpx_client: httpx.AsyncClient
db_session: AsyncSession
async def _send_runtime_api_request(
self, method: str, path: str, **kwargs: Any
) -> httpx.Response:
"""Send a request to the remote runtime API."""
try:
url = self.api_url + path
return await self.httpx_client.request(
method, url, headers={'X-API-Key': self.api_key}, **kwargs
)
except httpx.TimeoutException:
_logger.error(f'No response received within timeout for URL: {url}')
raise
except httpx.HTTPError as e:
_logger.error(f'HTTP error for URL {url}: {e}')
raise
async def _to_sandbox_info(
self, stored: StoredRemoteSandbox, runtime: dict[str, Any] | None = None
) -> SandboxInfo:
# If we did not get passsed runtime data, load some
if runtime is None:
try:
runtime = await self._get_runtime(stored.id)
except Exception:
_logger.exception('Error getting runtime: {stored.id}', stack_info=True)
if runtime:
# Translate status
status = None
pod_status = runtime['pod_status'].lower()
if pod_status:
status = POD_STATUS_MAPPING.get(pod_status, None)
# If we failed to get the status from the pod status, fall back to status
if status is None:
runtime_status = runtime.get('status')
if runtime_status:
status = STATUS_MAPPING.get(runtime_status.lower(), None)
if status is None:
status = SandboxStatus.MISSING
session_api_key = runtime['session_api_key']
if status == SandboxStatus.RUNNING:
exposed_urls = []
url = runtime.get('url', None)
if url:
exposed_urls.append(ExposedUrl(name=AGENT_SERVER, url=url))
else:
exposed_urls = None
else:
session_api_key = None
status = SandboxStatus.MISSING
exposed_urls = None
sandbox_spec_id = stored.sandbox_spec_id
return SandboxInfo(
id=stored.id,
created_by_user_id=stored.created_by_user_id,
sandbox_spec_id=sandbox_spec_id,
status=status,
session_api_key=session_api_key,
exposed_urls=exposed_urls,
created_at=stored.created_at,
)
async def _secure_select(self):
query = select(StoredRemoteSandbox)
user_id = await self.user_context.get_user_id()
if user_id:
query = query.where(StoredRemoteSandbox.created_by_user_id == user_id)
return query
async def _get_stored_sandbox(self, sandbox_id: str) -> StoredRemoteSandbox | None:
stmt = await self._secure_select()
stmt = stmt.where(StoredRemoteSandbox.id == sandbox_id)
result = await self.db_session.execute(stmt)
stored_sandbox = result.scalar_one_or_none()
return stored_sandbox
async def _get_runtime(self, sandbox_id: str) -> dict[str, Any]:
response = await self._send_runtime_api_request(
'GET',
f'/sessions/{sandbox_id}',
)
response.raise_for_status()
runtime_data = response.json()
return runtime_data
async def _init_environment(
self, sandbox_spec: SandboxSpecInfo, sandbox_id: str
) -> dict[str, str]:
"""Initialize the environment variables for the sandbox."""
environment = sandbox_spec.initial_env.copy()
# If a public facing url is defined, add a callback to the agent server environment.
if self.web_url:
environment[WEBHOOK_CALLBACK_VARIABLE] = (
f'{self.web_url}/api/v1/webhooks/{sandbox_id}'
)
return environment
async def search_sandboxes(
self,
page_id: str | None = None,
limit: int = 100,
) -> SandboxPage:
stmt = await self._secure_select()
# Handle pagination
if page_id is not None:
# Parse page_id to get offset or cursor
try:
offset = int(page_id)
stmt = stmt.offset(offset)
except ValueError:
# If page_id is not a valid integer, start from beginning
offset = 0
else:
offset = 0
# Apply limit and get one extra to check if there are more results
stmt = stmt.limit(limit + 1).order_by(StoredRemoteSandbox.created_at.desc())
result = await self.db_session.execute(stmt)
stored_sandboxes = result.scalars().all()
# Check if there are more results
has_more = len(stored_sandboxes) > limit
if has_more:
stored_sandboxes = stored_sandboxes[:limit]
# Calculate next page ID
next_page_id = None
if has_more:
next_page_id = str(offset + limit)
# Convert stored callbacks to domain models
items = await asyncio.gather(
*[
self._to_sandbox_info(stored_sandbox)
for stored_sandbox in stored_sandboxes
]
)
return SandboxPage(items=items, next_page_id=next_page_id)
async def get_sandbox(self, sandbox_id: str) -> Union[SandboxInfo, None]:
"""Get a single sandbox by checking its corresponding runtime."""
stored_sandbox = await self._get_stored_sandbox(sandbox_id)
if stored_sandbox is None:
return None
return await self._to_sandbox_info(stored_sandbox)
async def start_sandbox(self, sandbox_spec_id: str | None = None) -> SandboxInfo:
"""Start a new sandbox by creating a remote runtime."""
try:
# Get sandbox spec
if sandbox_spec_id is None:
sandbox_spec = (
await self.sandbox_spec_service.get_default_sandbox_spec()
)
else:
sandbox_spec_maybe = await self.sandbox_spec_service.get_sandbox_spec(
sandbox_spec_id
)
if sandbox_spec_maybe is None:
raise ValueError('Sandbox Spec not found')
sandbox_spec = sandbox_spec_maybe
# Create a unique id
sandbox_id = base62.encodebytes(os.urandom(16))
# get user id
user_id = await self.user_context.get_user_id()
# Store the sandbox
stored_sandbox = StoredRemoteSandbox(
id=sandbox_id,
created_by_user_id=user_id,
sandbox_spec_id=sandbox_spec.id,
created_at=utc_now(),
)
self.db_session.add(stored_sandbox)
await self.db_session.commit()
# Prepare environment variables
environment = await self._init_environment(sandbox_spec, sandbox_id)
# Prepare start request
start_request: dict[str, Any] = {
'image': sandbox_spec.id, # Use sandbox_spec.id as the container image
'command': sandbox_spec.command,
#'command': ['python', '-c', 'import time; time.sleep(300)'],
'working_dir': sandbox_spec.working_dir,
'environment': environment,
'session_id': sandbox_id, # Use sandbox_id as session_id
'resource_factor': self.resource_factor,
'run_as_user': 1000,
'run_as_group': 1000,
'fs_group': 1000,
}
# Add runtime class if specified
if self.runtime_class == 'sysbox':
start_request['runtime_class'] = 'sysbox-runc'
# Start the runtime
response = await self._send_runtime_api_request(
'POST',
'/start',
json=start_request,
)
response.raise_for_status()
runtime_data = response.json()
# Hack - result doesn't contain this
runtime_data['pod_status'] = 'pending'
return await self._to_sandbox_info(stored_sandbox, runtime_data)
except httpx.HTTPError as e:
_logger.error(f'Failed to start sandbox: {e}')
raise SandboxError(f'Failed to start sandbox: {e}')
async def resume_sandbox(self, sandbox_id: str) -> bool:
"""Resume a paused sandbox."""
try:
if not await self._get_stored_sandbox(sandbox_id):
return False
runtime_data = await self._get_runtime(sandbox_id)
response = await self._send_runtime_api_request(
'POST',
'/resume',
json={'runtime_id': runtime_data['runtime_id']},
)
if response.status_code == 404:
return False
response.raise_for_status()
return True
except httpx.HTTPError as e:
_logger.error(f'Error resuming sandbox {sandbox_id}: {e}')
return False
async def pause_sandbox(self, sandbox_id: str) -> bool:
"""Pause a running sandbox."""
try:
if not await self._get_stored_sandbox(sandbox_id):
return False
runtime_data = await self._get_runtime(sandbox_id)
response = await self._send_runtime_api_request(
'POST',
'/pause',
json={'runtime_id': runtime_data['runtime_id']},
)
if response.status_code == 404:
return False
response.raise_for_status()
return True
except httpx.HTTPError as e:
_logger.error(f'Error pausing sandbox {sandbox_id}: {e}')
return False
async def delete_sandbox(self, sandbox_id: str) -> bool:
"""Delete a sandbox by stopping its runtime."""
try:
stored_sandbox = await self._get_stored_sandbox(sandbox_id)
if not stored_sandbox:
return False
await self.db_session.delete(stored_sandbox)
await self.db_session.commit()
runtime_data = await self._get_runtime(sandbox_id)
response = await self._send_runtime_api_request(
'POST',
'/stop',
json={'runtime_id': runtime_data['runtime_id']},
)
if response.status_code != 404:
response.raise_for_status()
return True
except httpx.HTTPError as e:
_logger.error(f'Error deleting sandbox {sandbox_id}: {e}')
return False
async def poll_agent_servers(api_url: str, api_key: str, sleep_interval: int):
"""When the app server does not have a public facing url, we poll the agent
servers for the most recent data.
This is because webhook callbacks cannot be invoked."""
from openhands.app_server.config import (
get_app_conversation_info_service,
get_event_callback_service,
get_event_service,
get_httpx_client,
)
while True:
try:
# Refresh the conversations associated with those sandboxes.
state = InjectorState()
try:
# Get the list of running sandboxes using the runtime api /list endpoint.
# (This will not return runtimes that have been stopped for a while)
async with get_httpx_client(state) as httpx_client:
response = await httpx_client.get(
f'{api_url}/list', headers={'X-API-Key': api_key}
)
response.raise_for_status()
runtimes = response.json()['runtimes']
runtimes_by_sandbox_id = {
runtime['session_id']: runtime
for runtime in runtimes
# The runtime API currently reports a running status when
# pods are still starting. Resync can tolerate this.
if runtime['status'] == 'running'
}
# We allow access to all items here
setattr(state, USER_CONTEXT_ATTR, ADMIN)
async with (
get_app_conversation_info_service(
state
) as app_conversation_info_service,
get_event_service(state) as event_service,
get_event_callback_service(state) as event_callback_service,
get_httpx_client(state) as httpx_client,
):
page_id = None
matches = 0
while True:
page = await app_conversation_info_service.search_app_conversation_info(
page_id=page_id
)
for app_conversation_info in page.items:
runtime = runtimes_by_sandbox_id.get(
app_conversation_info.sandbox_id
)
if runtime:
matches += 1
await refresh_conversation(
app_conversation_info_service=app_conversation_info_service,
event_service=event_service,
event_callback_service=event_callback_service,
app_conversation_info=app_conversation_info,
runtime=runtime,
httpx_client=httpx_client,
)
page_id = page.next_page_id
if page_id is None:
_logger.debug(
f'Matched {len(runtimes_by_sandbox_id)} Runtimes with {matches} Conversations.'
)
break
except Exception as exc:
_logger.exception(
f'Error when polling agent servers: {exc}', stack_info=True
)
# Sleep between retries
await asyncio.sleep(sleep_interval)
except asyncio.CancelledError:
return
async def refresh_conversation(
app_conversation_info_service: AppConversationInfoService,
event_service: EventService,
event_callback_service: EventCallbackService,
app_conversation_info: AppConversationInfo,
runtime: dict[str, Any],
httpx_client: httpx.AsyncClient,
):
"""Refresh a conversation.
Grab ConversationInfo and all events from the agent server and make sure they
exist in the app server."""
_logger.debug(f'Started Refreshing Conversation {app_conversation_info.id}')
try:
url = runtime['url']
# TODO: Maybe we can use RemoteConversation here?
# First get conversation...
conversation_url = f'{url}/api/conversations/{app_conversation_info.id.hex}'
response = await httpx_client.get(
conversation_url, headers={'X-Session-API-Key': runtime['session_api_key']}
)
response.raise_for_status()
updated_conversation_info = ConversationInfo.model_validate(response.json())
# TODO: As of writing, ConversationInfo from AgentServer does not have a title to update...
app_conversation_info.updated_at = updated_conversation_info.updated_at
# TODO: Update other appropriate attributes...
await app_conversation_info_service.save_app_conversation_info(
app_conversation_info
)
# TODO: It would be nice to have an updated_at__gte filter parameter in the
# agent server so that we don't pull the full event list each time
event_url = (
f'{url}/ap/conversations/{app_conversation_info.id.hex}/events/search'
)
page_id = None
while True:
params: dict[str, str] = {}
if page_id:
params['page_id'] = page_id # type: ignore[unreachable]
response = await httpx_client.get(
event_url,
params=params,
headers={'X-Session-API-Key': runtime['session_api_key']},
)
response.raise_for_status()
page = EventPage.model_validate(response.json())
to_process = []
for event in page.items:
existing = await event_service.get_event(event.id)
if existing is None:
await event_service.save_event(app_conversation_info.id, event)
to_process.append(event)
for event in to_process:
await event_callback_service.execute_callbacks(
app_conversation_info.id, event
)
page_id = page.next_page_id
if page_id is None:
_logger.debug(
f'Finished Refreshing Conversation {app_conversation_info.id}'
)
break
except Exception as exc:
_logger.exception(f'Error Refreshing Conversation: {exc}', stack_info=True)
class RemoteSandboxServiceInjector(SandboxServiceInjector):
"""Dependency injector for remote sandbox services."""
api_url: str = Field(description='The API URL for remote runtimes')
api_key: str = Field(description='The API Key for remote runtimes')
polling_interval: int = Field(
default=15,
description=(
'The sleep time between poll operations against agent servers when there is '
'no public facing web_url'
),
)
resource_factor: int = Field(
default=1,
description='Factor by which to scale resources in sandbox: 1, 2, 4, or 8',
)
runtime_class: str = Field(
default='gvisor',
description='can be "gvisor" or "sysbox" (support docker inside runtime + more stable)',
)
start_sandbox_timeout: int = Field(
default=120,
description=(
'The max time to wait for a sandbox to start before considering it to '
'be in an error state.'
),
)
async def inject(
self, state: InjectorState, request: Request | None = None
) -> AsyncGenerator[SandboxService, None]:
# Define inline to prevent circular lookup
from openhands.app_server.config import (
get_db_session,
get_global_config,
get_httpx_client,
get_sandbox_spec_service,
get_user_context,
)
# If no public facing web url is defined, poll for changes as callbacks will be unavailable.
config = get_global_config()
web_url = config.web_url
if web_url is None:
global polling_task
if polling_task is None:
polling_task = asyncio.create_task(
poll_agent_servers(
api_url=self.api_url,
api_key=self.api_key,
sleep_interval=self.polling_interval,
)
)
async with (
get_user_context(state, request) as user_context,
get_sandbox_spec_service(state, request) as sandbox_spec_service,
get_httpx_client(state, request) as httpx_client,
get_db_session(state, request) as db_session,
):
yield RemoteSandboxService(
sandbox_spec_service=sandbox_spec_service,
api_url=self.api_url,
api_key=self.api_key,
web_url=web_url,
resource_factor=self.resource_factor,
runtime_class=self.runtime_class,
start_sandbox_timeout=self.start_sandbox_timeout,
user_context=user_context,
httpx_client=httpx_client,
db_session=db_session,
)

View File

@ -0,0 +1,46 @@
from typing import AsyncGenerator
from fastapi import Request
from pydantic import Field
from openhands.app_server.sandbox.preset_sandbox_spec_service import (
PresetSandboxSpecService,
)
from openhands.app_server.sandbox.sandbox_spec_models import (
SandboxSpecInfo,
)
from openhands.app_server.sandbox.sandbox_spec_service import (
AGENT_SERVER_VERSION,
SandboxSpecService,
SandboxSpecServiceInjector,
)
from openhands.app_server.services.injector import InjectorState
def get_default_sandbox_specs():
return [
SandboxSpecInfo(
id=f'ghcr.io/all-hands-ai/agent-server:{AGENT_SERVER_VERSION[:7]}-python',
command=['/usr/local/bin/openhands-agent-server', '--port', '60000'],
initial_env={
'OPENVSCODE_SERVER_ROOT': '/openhands/.openvscode-server',
'LOG_JSON': 'true',
'OH_ENABLE_VNC': '0',
'OH_CONVERSATIONS_PATH': '/workspace/conversations',
'OH_BASH_EVENTS_DIR': '/workspace/bash_events',
},
working_dir='/workspace/projects',
)
]
class RemoteSandboxSpecServiceInjector(SandboxSpecServiceInjector):
specs: list[SandboxSpecInfo] = Field(
default_factory=get_default_sandbox_specs,
description='Preset list of sandbox specs',
)
async def inject(
self, state: InjectorState, request: Request | None = None
) -> AsyncGenerator[SandboxSpecService, None]:
yield PresetSandboxSpecService(self.specs)

View File

@ -0,0 +1,58 @@
from datetime import datetime
from enum import Enum
from pydantic import BaseModel, Field
from openhands.agent_server.utils import utc_now
class SandboxStatus(Enum):
STARTING = 'STARTING'
RUNNING = 'RUNNING'
PAUSED = 'PAUSED'
ERROR = 'ERROR'
MISSING = 'MISSING'
"""Missing - possibly deleted"""
class ExposedUrl(BaseModel):
"""URL to access some named service within the container."""
name: str
url: str
# Standard names
AGENT_SERVER = 'AGENT_SERVER'
VSCODE = 'VSCODE'
class SandboxInfo(BaseModel):
"""Information about a sandbox."""
id: str
created_by_user_id: str | None
sandbox_spec_id: str
status: SandboxStatus
session_api_key: str | None = Field(
description=(
'Key to access sandbox, to be added as an `X-Session-API-Key` header '
'in each request. In cases where the sandbox statues is STARTING or '
'PAUSED, or the current user does not have full access '
'the session_api_key will be None.'
)
)
exposed_urls: list[ExposedUrl] | None = Field(
default_factory=lambda: [],
description=(
'URLs exposed by the sandbox (App server, Vscode, etc...)'
'Sandboxes with a status STARTING / PAUSED / ERROR may '
'not return urls.'
),
)
created_at: datetime = Field(default_factory=utc_now)
class SandboxPage(BaseModel):
items: list[SandboxInfo]
next_page_id: str | None = None

View File

@ -0,0 +1,91 @@
"""Runtime Containers router for OpenHands Server."""
from typing import Annotated
from fastapi import APIRouter, HTTPException, Query, status
from openhands.agent_server.models import Success
from openhands.app_server.config import depends_sandbox_service
from openhands.app_server.sandbox.sandbox_models import SandboxInfo, SandboxPage
from openhands.app_server.sandbox.sandbox_service import (
SandboxService,
)
router = APIRouter(prefix='/sandboxes', tags=['Sandbox'])
sandbox_service_dependency = depends_sandbox_service()
# Read methods
@router.get('/search')
async def search_sandboxes(
page_id: Annotated[
str | None,
Query(title='Optional next_page_id from the previously returned page'),
] = None,
limit: Annotated[
int,
Query(title='The max number of results in the page', gt=0, lte=100),
] = 100,
sandbox_service: SandboxService = sandbox_service_dependency,
) -> SandboxPage:
"""Search / list sandboxes owned by the current user."""
assert limit > 0
assert limit <= 100
return await sandbox_service.search_sandboxes(page_id=page_id, limit=limit)
@router.get('')
async def batch_get_sandboxes(
id: Annotated[list[str], Query()],
sandbox_service: SandboxService = sandbox_service_dependency,
) -> list[SandboxInfo | None]:
"""Get a batch of sandboxes given their ids, returning null for any missing."""
assert len(id) < 100
sandboxes = await sandbox_service.batch_get_sandboxes(id)
return sandboxes
# Write Methods
@router.post('')
async def start_sandbox(
sandbox_spec_id: str | None = None,
sandbox_service: SandboxService = sandbox_service_dependency,
) -> SandboxInfo:
info = await sandbox_service.start_sandbox(sandbox_spec_id)
return info
@router.post('/{sandbox_id}/pause', responses={404: {'description': 'Item not found'}})
async def pause_sandbox(
sandbox_id: str,
sandbox_service: SandboxService = sandbox_service_dependency,
) -> Success:
exists = await sandbox_service.pause_sandbox(sandbox_id)
if not exists:
raise HTTPException(status.HTTP_404_NOT_FOUND)
return Success()
@router.post('/{sandbox_id}/resume', responses={404: {'description': 'Item not found'}})
async def resume_sandbox(
sandbox_id: str,
sandbox_service: SandboxService = sandbox_service_dependency,
) -> Success:
exists = await sandbox_service.resume_sandbox(sandbox_id)
if not exists:
raise HTTPException(status.HTTP_404_NOT_FOUND)
return Success()
@router.delete('/{id}', responses={404: {'description': 'Item not found'}})
async def delete_sandbox(
sandbox_id: str,
sandbox_service: SandboxService = sandbox_service_dependency,
) -> Success:
exists = await sandbox_service.delete_sandbox(sandbox_id)
if not exists:
raise HTTPException(status.HTTP_404_NOT_FOUND)
return Success()

View File

@ -0,0 +1,65 @@
import asyncio
from abc import ABC, abstractmethod
from openhands.app_server.sandbox.sandbox_models import SandboxInfo, SandboxPage
from openhands.app_server.services.injector import Injector
from openhands.sdk.utils.models import DiscriminatedUnionMixin
class SandboxService(ABC):
"""Service for accessing sandboxes in which conversations may be run."""
@abstractmethod
async def search_sandboxes(
self,
page_id: str | None = None,
limit: int = 100,
) -> SandboxPage:
"""Search for sandboxes."""
@abstractmethod
async def get_sandbox(self, sandbox_id: str) -> SandboxInfo | None:
"""Get a single sandbox. Return None if the sandbox was not found."""
async def batch_get_sandboxes(
self, sandbox_ids: list[str]
) -> list[SandboxInfo | None]:
"""Get a batch of sandboxes, returning None for any which were not found."""
results = await asyncio.gather(
*[self.get_sandbox(sandbox_id) for sandbox_id in sandbox_ids]
)
return results
@abstractmethod
async def start_sandbox(self, sandbox_spec_id: str | None = None) -> SandboxInfo:
"""Begin the process of starting a sandbox.
Return the info on the new sandbox. If no spec is selected, use the default.
"""
@abstractmethod
async def resume_sandbox(self, sandbox_id: str) -> bool:
"""Begin the process of resuming a sandbox.
Return True if the sandbox exists and is being resumed or is already running.
Return False if the sandbox did not exist.
"""
@abstractmethod
async def pause_sandbox(self, sandbox_id: str) -> bool:
"""Begin the process of pausing a sandbox.
Return True if the sandbox exists and is being paused or is already paused.
Return False if the sandbox did not exist.
"""
@abstractmethod
async def delete_sandbox(self, sandbox_id: str) -> bool:
"""Begin the process of deleting a sandbox (which may involve stopping it).
Return False if the sandbox did not exist.
"""
class SandboxServiceInjector(DiscriminatedUnionMixin, Injector[SandboxService], ABC):
pass

View File

@ -0,0 +1,22 @@
from datetime import datetime
from pydantic import BaseModel, Field
from openhands.agent_server.utils import utc_now
class SandboxSpecInfo(BaseModel):
"""A template for creating a Sandbox (e.g: A Docker Image vs Container)."""
id: str
command: list[str] | None
created_at: datetime = Field(default_factory=utc_now)
initial_env: dict[str, str] = Field(
default_factory=dict, description='Initial Environment Variables'
)
working_dir: str = '/home/openhands/workspace'
class SandboxSpecInfoPage(BaseModel):
items: list[SandboxSpecInfo]
next_page_id: str | None = None

View File

@ -0,0 +1,49 @@
"""Runtime Images router for OpenHands Server."""
from typing import Annotated
from fastapi import APIRouter, Query
from openhands.app_server.config import depends_sandbox_spec_service
from openhands.app_server.sandbox.sandbox_spec_models import (
SandboxSpecInfo,
SandboxSpecInfoPage,
)
from openhands.app_server.sandbox.sandbox_spec_service import (
SandboxSpecService,
)
router = APIRouter(prefix='/sandbox-specs', tags=['Sandbox'])
sandbox_spec_service_dependency = depends_sandbox_spec_service()
# Read methods
@router.get('/search')
async def search_sandbox_specs(
page_id: Annotated[
str | None,
Query(title='Optional next_page_id from the previously returned page'),
] = None,
limit: Annotated[
int,
Query(title='The max number of results in the page', gt=0, lte=100),
] = 100,
sandbox_spec_service: SandboxSpecService = sandbox_spec_service_dependency,
) -> SandboxSpecInfoPage:
"""Search / List sandbox specs."""
assert limit > 0
assert limit <= 100
return await sandbox_spec_service.search_sandbox_specs(page_id=page_id, limit=limit)
@router.get('')
async def batch_get_sandbox_specs(
id: Annotated[list[str], Query()],
sandbox_spec_service: SandboxSpecService = sandbox_spec_service_dependency,
) -> list[SandboxSpecInfo | None]:
"""Get a batch of sandbox specs given their ids, returning null for any missing."""
assert len(id) <= 100
sandbox_specs = await sandbox_spec_service.batch_get_sandbox_specs(id)
return sandbox_specs

View File

@ -0,0 +1,59 @@
import asyncio
from abc import ABC, abstractmethod
from openhands.app_server.errors import SandboxError
from openhands.app_server.sandbox.sandbox_spec_models import (
SandboxSpecInfo,
SandboxSpecInfoPage,
)
from openhands.app_server.services.injector import Injector
from openhands.sdk.utils.models import DiscriminatedUnionMixin
# The version of the agent server to use for deployments.
# Typically this will be the same as the values from the pyproject.toml
AGENT_SERVER_VERSION = '08cf609a996523c0199c61c768d74417b7e96109'
class SandboxSpecService(ABC):
"""Service for managing Sandbox specs.
At present this is read only. The plan is that later this class will allow building
and deleting sandbox specs and limiting access by user and group. It would also be
nice to be able to set the desired number of warm sandboxes for a spec and scale
this up and down.
"""
@abstractmethod
async def search_sandbox_specs(
self, page_id: str | None = None, limit: int = 100
) -> SandboxSpecInfoPage:
"""Search for sandbox specs."""
@abstractmethod
async def get_sandbox_spec(self, sandbox_spec_id: str) -> SandboxSpecInfo | None:
"""Get a single sandbox spec, returning None if not found."""
async def get_default_sandbox_spec(self) -> SandboxSpecInfo:
"""Get the default sandbox spec."""
page = await self.search_sandbox_specs()
if not page.items:
raise SandboxError('No sandbox specs available!')
return page.items[0]
async def batch_get_sandbox_specs(
self, sandbox_spec_ids: list[str]
) -> list[SandboxSpecInfo | None]:
"""Get a batch of sandbox specs, returning None for any not found."""
results = await asyncio.gather(
*[
self.get_sandbox_spec(sandbox_spec_id)
for sandbox_spec_id in sandbox_spec_ids
]
)
return results
class SandboxSpecServiceInjector(
DiscriminatedUnionMixin, Injector[SandboxSpecService], ABC
):
pass

View File

@ -0,0 +1,19 @@
# Core Services
Provides essential services for authentication, security, and system operations.
## Overview
This module contains core services that support the OpenHands app server infrastructure, including authentication, token management, and security operations.
## Key Components
- **JwtService**: JSON Web Token signing, verification, and encryption
## JWT Service Features
- Token signing and verification for authentication
- JWE (JSON Web Encryption) support for sensitive data
- Multi-key support with key rotation capabilities
- Configurable algorithms (RS256, HS256, etc.)
- Secure token handling and validation

View File

@ -0,0 +1,300 @@
"""Database configuration and session management for OpenHands Server."""
import asyncio
import logging
import os
from pathlib import Path
from typing import AsyncGenerator
from fastapi import Request
from pydantic import BaseModel, PrivateAttr, SecretStr, model_validator
from sqlalchemy import Engine, create_engine
from sqlalchemy.engine import URL
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
from sqlalchemy.ext.asyncio.engine import AsyncEngine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.pool import NullPool
from sqlalchemy.util import await_only
from openhands.app_server.services.injector import Injector, InjectorState
_logger = logging.getLogger(__name__)
DB_SESSION_ATTR = 'db_session'
DB_SESSION_KEEP_OPEN_ATTR = 'db_session_keep_open'
class DbSessionInjector(BaseModel, Injector[async_sessionmaker]):
persistence_dir: Path
host: str | None = None
port: int | None = None
name: str | None = None
user: str | None = None
password: SecretStr | None = None
echo: bool = False
pool_size: int = 25
max_overflow: int = 10
gcp_db_instance: str | None = None
gcp_project: str | None = None
gcp_region: str | None = None
# Private attrs
_engine: Engine | None = PrivateAttr(default=None)
_async_engine: AsyncEngine | None = PrivateAttr(default=None)
_session_maker: sessionmaker | None = PrivateAttr(default=None)
_async_session_maker: async_sessionmaker | None = PrivateAttr(default=None)
@model_validator(mode='after')
def fill_empty_fields(self):
"""Override any defaults with values from legacy enviroment variables"""
if self.host is None:
self.host = os.getenv('DB_HOST')
if self.port is None:
self.port = int(os.getenv('DB_PORT', '5432'))
if self.name is None:
self.name = os.getenv('DB_NAME', 'openhands')
if self.user is None:
self.user = os.getenv('DB_USER', 'postgres')
if self.password is None:
self.password = SecretStr(os.getenv('DB_PASS', 'postgres').strip())
if self.gcp_db_instance is None:
self.gcp_db_instance = os.getenv('GCP_DB_INSTANCE')
if self.gcp_project is None:
self.gcp_project = os.getenv('GCP_PROJECT')
if self.gcp_region is None:
self.gcp_region = os.getenv('GCP_REGION')
return self
def _create_gcp_db_connection(self):
# Lazy import because lib does not import if user does not have posgres installed
from google.cloud.sql.connector import Connector
connector = Connector()
instance_string = f'{self.gcp_project}:{self.gcp_region}:{self.gcp_db_instance}'
password = self.password
assert password is not None
return connector.connect(
instance_string,
'pg8000',
user=self.user,
password=password.get_secret_value(),
db=self.name,
)
async def _create_async_gcp_db_connection(self):
# Lazy import because lib does not import if user does not have posgres installed
from google.cloud.sql.connector import Connector
loop = asyncio.get_running_loop()
async with Connector(loop=loop) as connector:
password = self.password
assert password is not None
conn = await connector.connect_async(
f'{self.gcp_project}:{self.gcp_region}:{self.gcp_db_instance}',
'asyncpg',
user=self.user,
password=password.get_secret_value(),
db=self.name,
)
return conn
def _create_gcp_engine(self):
engine = create_engine(
'postgresql+pg8000://',
creator=self._create_gcp_db_connection,
pool_size=self.pool_size,
max_overflow=self.max_overflow,
pool_pre_ping=True,
)
return engine
async def _create_async_gcp_creator(self):
from sqlalchemy.dialects.postgresql.asyncpg import (
AsyncAdapt_asyncpg_connection,
)
engine = self._create_gcp_engine()
return AsyncAdapt_asyncpg_connection(
engine.dialect.dbapi,
await self._create_async_gcp_db_connection(),
prepared_statement_cache_size=100,
)
async def _create_async_gcp_engine(self):
from sqlalchemy.dialects.postgresql.asyncpg import (
AsyncAdapt_asyncpg_connection,
)
base_engine = self._create_gcp_engine()
dbapi = base_engine.dialect.dbapi
def adapted_creator():
return AsyncAdapt_asyncpg_connection(
dbapi,
await_only(self._create_async_gcp_db_connection()),
prepared_statement_cache_size=100,
)
return create_async_engine(
'postgresql+asyncpg://',
creator=adapted_creator,
pool_size=self.pool_size,
max_overflow=self.max_overflow,
pool_pre_ping=True,
)
async def get_async_db_engine(self) -> AsyncEngine:
async_engine = self._async_engine
if async_engine:
return async_engine
if self.gcp_db_instance: # GCP environments
async_engine = await self._create_async_gcp_engine()
else:
if self.host:
try:
import asyncpg # noqa: F401
except Exception as e:
raise RuntimeError(
"PostgreSQL driver 'asyncpg' is required for async connections but is not installed."
) from e
password = self.password.get_secret_value() if self.password else None
url = URL.create(
'postgresql+asyncpg',
username=self.user or '',
password=password,
host=self.host,
port=self.port,
database=self.name,
)
else:
url = f'sqlite+aiosqlite:///{str(self.persistence_dir)}/openhands.db'
if self.host:
async_engine = create_async_engine(
url,
pool_size=self.pool_size,
max_overflow=self.max_overflow,
pool_pre_ping=True,
)
else:
async_engine = create_async_engine(
url,
poolclass=NullPool,
pool_pre_ping=True,
)
self._async_engine = async_engine
return async_engine
def get_db_engine(self) -> Engine:
engine = self._engine
if engine:
return engine
if self.gcp_db_instance: # GCP environments
engine = self._create_gcp_engine()
else:
if self.host:
try:
import pg8000 # noqa: F401
except Exception as e:
raise RuntimeError(
"PostgreSQL driver 'pg8000' is required for sync connections but is not installed."
) from e
password = self.password.get_secret_value() if self.password else None
url = URL.create(
'postgresql+pg8000',
username=self.user or '',
password=password,
host=self.host,
port=self.port,
database=self.name,
)
else:
url = f'sqlite:///{self.persistence_dir}/openhands.db'
engine = create_engine(
url,
pool_size=self.pool_size,
max_overflow=self.max_overflow,
pool_pre_ping=True,
)
self._engine = engine
return engine
def get_session_maker(self) -> sessionmaker:
session_maker = self._session_maker
if session_maker is None:
session_maker = sessionmaker(bind=self.get_db_engine())
self._session_maker = session_maker
return session_maker
async def get_async_session_maker(self) -> async_sessionmaker:
async_session_maker = self._async_session_maker
if async_session_maker is None:
db_engine = await self.get_async_db_engine()
async_session_maker = async_sessionmaker(
db_engine,
class_=AsyncSession,
expire_on_commit=False,
)
self._async_session_maker = async_session_maker
return async_session_maker
async def async_session(self) -> AsyncGenerator[AsyncSession, None]:
"""Dependency function that yields database sessions.
This function creates a new database session for each request
and ensures it's properly closed after use.
Yields:
AsyncSession: An async SQL session
"""
session_maker = await self.get_async_session_maker()
async with session_maker() as session:
try:
yield session
finally:
await session.close()
async def inject(
self, state: InjectorState, request: Request | None = None
) -> AsyncGenerator[AsyncSession, None]:
"""Dependency function that manages database sessions through request state.
This function stores the database session in the request state to enable
session reuse across multiple dependencies within the same request.
If a session already exists in the request state, it returns that session.
Otherwise, it creates a new session and stores it in the request state.
Args:
request: The FastAPI request object
Yields:
AsyncSession: An async SQL session stored in request state
"""
db_session = getattr(state, DB_SESSION_ATTR, None)
if db_session:
yield db_session
else:
# Create a new session and store it in request state
session_maker = await self.get_async_session_maker()
db_session = session_maker()
try:
setattr(state, DB_SESSION_ATTR, db_session)
yield db_session
if not getattr(state, DB_SESSION_KEEP_OPEN_ATTR, False):
await db_session.commit()
except Exception:
_logger.exception('Rolling back SQL due to error', stack_info=True)
await db_session.rollback()
raise
finally:
# If instructed, do not close the db session at the end of the request.
if not getattr(state, DB_SESSION_KEEP_OPEN_ATTR, False):
# Clean up the session from request state
if hasattr(state, DB_SESSION_ATTR):
delattr(state, DB_SESSION_ATTR)
await db_session.close()
def set_db_session_keep_open(state: InjectorState, keep_open: bool):
"""Set whether the connection should be kept open after the request terminates."""
setattr(state, DB_SESSION_KEEP_OPEN_ATTR, keep_open)

View File

@ -0,0 +1,42 @@
from typing import AsyncGenerator
import httpx
from fastapi import Request
from pydantic import BaseModel, Field
from openhands.app_server.services.injector import Injector, InjectorState
HTTPX_CLIENT_ATTR = 'httpx_client'
HTTPX_CLIENT_KEEP_OPEN_ATTR = 'httpx_client_keep_open'
class HttpxClientInjector(BaseModel, Injector[httpx.AsyncClient]):
"""Injector for a httpx client. By keeping a single httpx client alive in the
context of server requests handshakes are minimized while connection pool leaks
are prevented."""
timeout: int = Field(default=15, description='Default timeout on all http requests')
async def inject(
self, state: InjectorState, request: Request | None = None
) -> AsyncGenerator[httpx.AsyncClient, None]:
httpx_client = getattr(state, HTTPX_CLIENT_ATTR, None)
if httpx_client:
yield httpx_client
return
httpx_client = httpx.AsyncClient(timeout=self.timeout)
try:
setattr(state, HTTPX_CLIENT_ATTR, httpx_client)
yield httpx_client
finally:
# If instructed, do not close the httpx client at the end of the request.
if not getattr(state, HTTPX_CLIENT_KEEP_OPEN_ATTR, False):
# Clean up the httpx client from request state
if hasattr(state, HTTPX_CLIENT_ATTR):
delattr(state, HTTPX_CLIENT_ATTR)
await httpx_client.aclose()
def set_httpx_client_keep_open(state: InjectorState, keep_open: bool):
"""Set whether the connection should be kept open after the request terminates."""
setattr(state, HTTPX_CLIENT_KEEP_OPEN_ATTR, keep_open)

View File

@ -0,0 +1,34 @@
import contextlib
from abc import ABC, abstractmethod
from typing import AsyncGenerator, Generic, TypeAlias, TypeVar
from fastapi import Request
from starlette.datastructures import State
T = TypeVar('T')
InjectorState: TypeAlias = State
class Injector(Generic[T], ABC):
"""Object designed to facilitate dependency injection"""
@abstractmethod
async def inject(
self, state: InjectorState, request: Request | None = None
) -> AsyncGenerator[T, None]:
"""Inject an object. The state object may be used to store variables for
reuse by other injectors, as injection operations may be nested."""
yield None # type: ignore
@contextlib.asynccontextmanager
async def context(
self, state: InjectorState, request: Request | None = None
) -> AsyncGenerator[T, None]:
"""Context function suitable for use in async with clauses"""
async for result in self.inject(state, request):
yield result
async def depends(self, request: Request) -> AsyncGenerator[T, None]:
"""Depends function suitable for use with FastAPI dependency injection."""
async for result in self.inject(request.state, request):
yield result

Some files were not shown because too many files have changed in this diff Show More