[Fix]: Remove remaining hard coded refs to sessions store (#7176)

Co-authored-by: openhands <openhands@all-hands.dev>
This commit is contained in:
Rohit Malhotra 2025-03-10 15:07:01 -04:00 committed by GitHub
parent 4b04f09035
commit ac680e7688
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
9 changed files with 41 additions and 25 deletions

View File

@ -14,6 +14,7 @@ from openhands.events.action.agent import AgentFinishAction
from openhands.events.event import Event, EventSource
from openhands.llm.metrics import Metrics
from openhands.storage.files import FileStore
from openhands.storage.locations import get_conversation_agent_state_filename
class TrafficControlState(str, Enum):
@ -106,7 +107,7 @@ class State:
logger.debug(f'Saving state to session {sid}:{self.agent_state}')
encoded = base64.b64encode(pickled).decode('utf-8')
try:
file_store.write(f'sessions/{sid}/agent_state.pkl', encoded)
file_store.write(get_conversation_agent_state_filename(sid), encoded)
except Exception as e:
logger.error(f'Failed to save state to session: {e}')
raise e
@ -114,7 +115,7 @@ class State:
@staticmethod
def restore_from_session(sid: str, file_store: FileStore) -> 'State':
try:
encoded = file_store.read(f'sessions/{sid}/agent_state.pkl')
encoded = file_store.read(get_conversation_agent_state_filename(sid))
pickled = base64.b64decode(encoded)
state = pickle.loads(pickled)
except Exception as e:

View File

@ -5,6 +5,7 @@ from typing import Any
import httpx
from pydantic import SecretStr
from openhands.core.logger import openhands_logger as logger
from openhands.integrations.github.github_types import (
GhAuthenticationError,
GHUnknownException,
@ -14,7 +15,7 @@ from openhands.integrations.github.github_types import (
TaskType,
)
from openhands.utils.import_utils import get_impl
from openhands.core.logger import openhands_logger as logger
class GitHubService:
BASE_URL = 'https://api.github.com'
@ -81,11 +82,11 @@ class GitHubService:
if e.response.status_code == 401:
raise GhAuthenticationError('Invalid Github token')
logger.warning(f"Status error on GH API: {e}")
logger.warning(f'Status error on GH API: {e}')
raise GHUnknownException('Unknown error')
except httpx.HTTPError as e:
logger.warning(f"HTTP error on GH API: {e}")
logger.warning(f'HTTP error on GH API: {e}')
raise GHUnknownException('Unknown error')
async def get_user(self) -> GitHubUser:
@ -177,12 +178,12 @@ class GitHubService:
except httpx.HTTPStatusError as e:
if e.response.status_code == 401:
raise GhAuthenticationError('Invalid Github token')
logger.warning(f"Status error on GH API: {e}")
logger.warning(f'Status error on GH API: {e}')
raise GHUnknownException('Unknown error')
except httpx.HTTPError as e:
logger.warning(f"HTTP error on GH API: {e}")
logger.warning(f'HTTP error on GH API: {e}')
raise GHUnknownException('Unknown error')
async def get_suggested_tasks(self) -> list[SuggestedTask]:

View File

@ -9,7 +9,7 @@ from openhands.memory.condenser.condenser import Condenser
class BrowserOutputCondenser(Condenser):
"""A condenser that masks the observations from browser outputs outside of a recent attention window.
The intent here is to mask just the browser outputs and leave everything else untouched. This is important because currently we provide screenshots and accessibility trees as input to the model for browser observations. These are really large and consume a lot of tokens without any benefits in performance. So we want to mask all such observations from all previous timesteps, and leave only the most recent one in context.
"""

View File

@ -19,3 +19,7 @@ def get_conversation_metadata_filename(sid: str) -> str:
def get_conversation_init_data_filename(sid: str) -> str:
return f'{get_conversation_dir(sid)}init.json'
def get_conversation_agent_state_filename(sid: str) -> str:
return f'{get_conversation_dir(sid)}agent_state.pkl'

View File

@ -110,6 +110,7 @@ reportlab = "*"
[tool.coverage.run]
concurrency = ["gevent"]
[tool.poetry.group.runtime.dependencies]
jupyterlab = "*"
notebook = "*"
@ -138,6 +139,7 @@ ignore = ["D1"]
[tool.ruff.lint.pydocstyle]
convention = "google"
[tool.poetry.group.evaluation.dependencies]
streamlit = "*"
whatthepatch = "*"

View File

@ -17,6 +17,7 @@ from openhands.server.routes.manage_conversations import (
update_conversation,
)
from openhands.storage.data_models.conversation_status import ConversationStatus
from openhands.storage.locations import get_conversation_metadata_filename
from openhands.storage.memory import InMemoryFileStore
@ -24,7 +25,7 @@ from openhands.storage.memory import InMemoryFileStore
def _patch_store():
file_store = InMemoryFileStore()
file_store.write(
'sessions/some_conversation_id/metadata.json',
get_conversation_metadata_filename('some_conversation_id'),
json.dumps(
{
'title': 'Some Conversation',

View File

@ -25,6 +25,7 @@ from openhands.events.observation.files import (
FileWriteObservation,
)
from openhands.storage import get_file_store
from openhands.storage.locations import get_conversation_event_filename
@pytest.fixture
@ -48,7 +49,7 @@ def test_stream_storage(temp_dir: str):
event_stream = EventStream('abc', file_store)
event_stream.add_event(NullObservation(''), EventSource.AGENT)
assert len(collect_events(event_stream)) == 1
content = event_stream.file_store.read('sessions/abc/events/0.json')
content = event_stream.file_store.read(get_conversation_event_filename('abc', 0))
assert content is not None
data = json.loads(content)
assert 'timestamp' in data

View File

@ -4,6 +4,7 @@ import pytest
from openhands.storage.conversation.file_conversation_store import FileConversationStore
from openhands.storage.data_models.conversation_metadata import ConversationMetadata
from openhands.storage.locations import get_conversation_metadata_filename
from openhands.storage.memory import InMemoryFileStore
@ -26,7 +27,7 @@ async def test_load_int_user_id():
store = FileConversationStore(
InMemoryFileStore(
{
'sessions/some-conversation-id/metadata.json': json.dumps(
get_conversation_metadata_filename('some-conversation-id'): json.dumps(
{
'conversation_id': 'some-conversation-id',
'github_user_id': 12345,
@ -56,7 +57,7 @@ async def test_search_basic():
store = FileConversationStore(
InMemoryFileStore(
{
'sessions/conv1/metadata.json': json.dumps(
get_conversation_metadata_filename('conv1'): json.dumps(
{
'conversation_id': 'conv1',
'github_user_id': '123',
@ -65,7 +66,7 @@ async def test_search_basic():
'created_at': '2025-01-16T19:51:04Z',
}
),
'sessions/conv2/metadata.json': json.dumps(
get_conversation_metadata_filename('conv2'): json.dumps(
{
'conversation_id': 'conv2',
'github_user_id': '123',
@ -74,7 +75,7 @@ async def test_search_basic():
'created_at': '2025-01-17T19:51:04Z',
}
),
'sessions/conv3/metadata.json': json.dumps(
get_conversation_metadata_filename('conv3'): json.dumps(
{
'conversation_id': 'conv3',
'github_user_id': '123',
@ -102,7 +103,7 @@ async def test_search_pagination():
store = FileConversationStore(
InMemoryFileStore(
{
f'sessions/conv{i}/metadata.json': json.dumps(
get_conversation_metadata_filename(f'conv{i}'): json.dumps(
{
'conversation_id': f'conv{i}',
'github_user_id': '123',
@ -143,7 +144,7 @@ async def test_search_with_invalid_conversation():
store = FileConversationStore(
InMemoryFileStore(
{
'sessions/conv1/metadata.json': json.dumps(
get_conversation_metadata_filename('conv1'): json.dumps(
{
'conversation_id': 'conv1',
'github_user_id': '123',
@ -152,7 +153,9 @@ async def test_search_with_invalid_conversation():
'created_at': '2025-01-16T19:51:04Z',
}
),
'sessions/conv2/metadata.json': 'invalid json', # Invalid conversation
get_conversation_metadata_filename(
'conv2'
): 'invalid json', # Invalid conversation
}
)
)
@ -169,7 +172,7 @@ async def test_get_all_metadata():
store = FileConversationStore(
InMemoryFileStore(
{
'sessions/conv1/metadata.json': json.dumps(
get_conversation_metadata_filename('conv1'): json.dumps(
{
'conversation_id': 'conv1',
'github_user_id': '123',
@ -178,7 +181,7 @@ async def test_get_all_metadata():
'created_at': '2025-01-16T19:51:04Z',
}
),
'sessions/conv2/metadata.json': json.dumps(
get_conversation_metadata_filename('conv2'): json.dumps(
{
'conversation_id': 'conv2',
'github_user_id': '123',

View File

@ -3,6 +3,7 @@ import json
import pytest
from openhands.storage.conversation.file_conversation_store import FileConversationStore
from openhands.storage.locations import get_conversation_metadata_filename
from openhands.storage.memory import InMemoryFileStore
from openhands.utils.search_utils import iterate, offset_to_page_id, page_id_to_offset
@ -44,7 +45,7 @@ async def test_iterate_single_page():
store = FileConversationStore(
InMemoryFileStore(
{
'sessions/conv1/metadata.json': json.dumps(
get_conversation_metadata_filename('conv1'): json.dumps(
{
'conversation_id': 'conv1',
'github_user_id': '123',
@ -53,7 +54,7 @@ async def test_iterate_single_page():
'created_at': '2025-01-16T19:51:04Z',
}
),
'sessions/conv2/metadata.json': json.dumps(
get_conversation_metadata_filename('conv2'): json.dumps(
{
'conversation_id': 'conv2',
'github_user_id': '123',
@ -81,7 +82,7 @@ async def test_iterate_multiple_pages():
store = FileConversationStore(
InMemoryFileStore(
{
f'sessions/conv{i}/metadata.json': json.dumps(
get_conversation_metadata_filename(f'conv{i}'): json.dumps(
{
'conversation_id': f'conv{i}',
'github_user_id': '123',
@ -115,7 +116,7 @@ async def test_iterate_with_invalid_conversation():
store = FileConversationStore(
InMemoryFileStore(
{
'sessions/conv1/metadata.json': json.dumps(
get_conversation_metadata_filename('conv1'): json.dumps(
{
'conversation_id': 'conv1',
'github_user_id': '123',
@ -124,7 +125,9 @@ async def test_iterate_with_invalid_conversation():
'created_at': '2025-01-16T19:51:04Z',
}
),
'sessions/conv2/metadata.json': 'invalid json', # Invalid conversation
get_conversation_metadata_filename(
'conv2'
): 'invalid json', # Invalid conversation
}
)
)