feat: Add configurable stuck/loop detection (#11799)

Co-authored-by: openhands <openhands@all-hands.dev>
Co-authored-by: chuckbutkus <chuck@all-hands.dev>
This commit is contained in:
Graham Neubig
2025-11-21 17:27:38 -05:00
committed by GitHub
parent b9b8d27135
commit 1e513ad63f
6 changed files with 42 additions and 1 deletions

View File

@@ -895,7 +895,7 @@ class AgentController:
# Synchronize spend across all llm services with the budget flag
self.state_tracker.sync_budget_flag_with_metrics()
if self._is_stuck():
if self.agent.config.enable_stuck_detection and self._is_stuck():
await self._react_to_exception(
AgentStuckInLoopError('Agent got stuck in a loop')
)

View File

@@ -32,6 +32,7 @@ The `load_from_env` function in the config package is responsible for loading co
export LLM_API_KEY='your_api_key_here'
export LLM_MODEL='gpt-4'
export AGENT_MEMORY_ENABLED='true'
export AGENT_ENABLE_STUCK_DETECTION='false' # Disable loop detection
export SANDBOX_TIMEOUT='300'
```

View File

@@ -51,6 +51,8 @@ class AgentConfig(BaseModel):
"""Whether to enable SoM (Set of Marks) visual browsing."""
enable_plan_mode: bool = Field(default=True)
"""Whether to enable plan mode, which uses the long horizon system message and add the new tool - task_tracker - for planning, tracking and executing complex tasks."""
enable_stuck_detection: bool = Field(default=True)
"""Whether to enable stuck/loop detection. When disabled, the agent will not automatically detect and recover from loops."""
condenser: CondenserConfig = Field(
# The default condenser is set to the conversation window condenser -- if
# we use NoOp and the conversation hits the LLM context length limit,