Merge branch 'main' into openhands-fix-issue-4706

This commit is contained in:
Engel Nyst 2024-12-07 07:27:15 +01:00 committed by GitHub
commit 15ea9cdc9f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
24 changed files with 163 additions and 88 deletions

View File

@ -24,7 +24,8 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
node-version: [20]
node-version: [20, 22]
fail-fast: true
steps:
- name: Checkout
uses: actions/checkout@v4

View File

@ -16,6 +16,10 @@ on:
type: string
default: "main"
description: "Target branch to pull and create PR against"
LLM_MODEL:
required: false
type: string
default: "anthropic/claude-3-5-sonnet-20241022"
base_container_image:
required: false
type: string
@ -23,15 +27,15 @@ on:
description: "Custom sandbox env"
secrets:
LLM_MODEL:
required: true
required: false
LLM_API_KEY:
required: true
LLM_BASE_URL:
required: false
PAT_TOKEN:
required: true
required: false
PAT_USERNAME:
required: true
required: false
issues:
types: [labeled]
@ -106,13 +110,14 @@ jobs:
- name: Check required environment variables
env:
LLM_MODEL: ${{ secrets.LLM_MODEL }}
LLM_MODEL: ${{ secrets.LLM_MODEL || inputs.LLM_MODEL }}
LLM_API_KEY: ${{ secrets.LLM_API_KEY }}
LLM_BASE_URL: ${{ secrets.LLM_BASE_URL }}
PAT_TOKEN: ${{ secrets.PAT_TOKEN }}
PAT_USERNAME: ${{ secrets.PAT_USERNAME }}
GITHUB_TOKEN: ${{ github.token }}
run: |
required_vars=("LLM_MODEL" "LLM_API_KEY" "PAT_TOKEN" "PAT_USERNAME")
required_vars=("LLM_MODEL" "LLM_API_KEY")
for var in "${required_vars[@]}"; do
if [ -z "${!var}" ]; then
echo "Error: Required environment variable $var is not set."
@ -120,6 +125,19 @@ jobs:
fi
done
# Check optional variables and warn about fallbacks
if [ -z "$PAT_TOKEN" ]; then
echo "Warning: PAT_TOKEN is not set, falling back to GITHUB_TOKEN"
fi
if [ -z "$LLM_BASE_URL" ]; then
echo "Warning: LLM_BASE_URL is not set, will use default API endpoint"
fi
if [ -z "$PAT_USERNAME" ]; then
echo "Warning: PAT_USERNAME is not set, will use openhands-agent"
fi
- name: Set environment variables
run: |
if [ -n "${{ github.event.review.body }}" ]; then
@ -143,7 +161,7 @@ jobs:
fi
echo "MAX_ITERATIONS=${{ inputs.max_iterations || 50 }}" >> $GITHUB_ENV
echo "SANDBOX_ENV_GITHUB_TOKEN=${{ secrets.GITHUB_TOKEN }}" >> $GITHUB_ENV
echo "SANDBOX_ENV_GITHUB_TOKEN=${{ secrets.PAT_TOKEN || github.token }}" >> $GITHUB_ENV
echo "SANDBOX_ENV_BASE_CONTAINER_IMAGE=${{ inputs.base_container_image }}" >> $GITHUB_ENV
# Set branch variables
@ -152,7 +170,7 @@ jobs:
- name: Comment on issue with start message
uses: actions/github-script@v7
with:
github-token: ${{secrets.GITHUB_TOKEN}}
github-token: ${{ secrets.PAT_TOKEN || github.token }}
script: |
const issueType = process.env.ISSUE_TYPE;
github.rest.issues.createComment({
@ -177,9 +195,9 @@ jobs:
- name: Attempt to resolve issue
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_USERNAME: ${{ secrets.PAT_USERNAME }}
LLM_MODEL: ${{ secrets.LLM_MODEL }}
GITHUB_TOKEN: ${{ secrets.PAT_TOKEN || github.token }}
GITHUB_USERNAME: ${{ secrets.PAT_USERNAME || 'openhands-agent' }}
LLM_MODEL: ${{ secrets.LLM_MODEL || inputs.LLM_MODEL }}
LLM_API_KEY: ${{ secrets.LLM_API_KEY }}
LLM_BASE_URL: ${{ secrets.LLM_BASE_URL }}
PYTHONPATH: ""
@ -189,7 +207,7 @@ jobs:
--issue-number ${{ env.ISSUE_NUMBER }} \
--issue-type ${{ env.ISSUE_TYPE }} \
--max-iterations ${{ env.MAX_ITERATIONS }} \
--comment-id ${{ env.COMMENT_ID }} \
--comment-id ${{ env.COMMENT_ID }}
- name: Check resolution result
id: check_result
@ -211,9 +229,9 @@ jobs:
- name: Create draft PR or push branch
if: always() # Create PR or branch even if the previous steps fail
env:
GITHUB_TOKEN: ${{ secrets.PAT_TOKEN }}
GITHUB_USERNAME: ${{ secrets.PAT_USERNAME }}
LLM_MODEL: ${{ secrets.LLM_MODEL }}
GITHUB_TOKEN: ${{ secrets.PAT_TOKEN || github.token }}
GITHUB_USERNAME: ${{ secrets.PAT_USERNAME || 'openhands-agent' }}
LLM_MODEL: ${{ secrets.LLM_MODEL || inputs.LLM_MODEL }}
LLM_API_KEY: ${{ secrets.LLM_API_KEY }}
LLM_BASE_URL: ${{ secrets.LLM_BASE_URL }}
PYTHONPATH: ""
@ -235,7 +253,7 @@ jobs:
uses: actions/github-script@v7
if: always() # Comment on issue even if the previous steps fail
with:
github-token: ${{secrets.GITHUB_TOKEN}}
github-token: ${{ secrets.PAT_TOKEN || github.token }}
script: |
const fs = require('fs');
const issueNumber = ${{ env.ISSUE_NUMBER }};

View File

@ -1,6 +1,6 @@
{
"name": "openhands-frontend",
"version": "0.15.0",
"version": "0.15.1",
"lockfileVersion": 3,
"requires": true,
"packages": {

View File

@ -1,6 +1,6 @@
{
"name": "openhands-frontend",
"version": "0.15.0",
"version": "0.15.1",
"private": true,
"type": "module",
"engines": {

View File

@ -9,13 +9,11 @@ import { useSaveFile } from "#/hooks/mutation/use-save-file";
interface CodeEditorComponentProps {
onMount: EditorProps["onMount"];
isReadOnly: boolean;
cursorPosition: { line: number; column: number };
}
function CodeEditorComponent({
onMount,
isReadOnly,
cursorPosition,
}: CodeEditorComponentProps) {
const { t } = useTranslation();
const {
@ -102,22 +100,15 @@ function CodeEditorComponent({
}
return (
<div className="flex flex-col h-full w-full">
<div className="flex-grow min-h-0 relative">
<Editor
data-testid="code-editor"
path={selectedPath ?? undefined}
defaultValue=""
value={selectedPath ? fileContent : undefined}
onMount={onMount}
onChange={handleEditorChange}
options={{ readOnly: isReadOnly }}
/>
</div>
<div className="p-2 text-neutral-500 flex-shrink-0 absolute bottom-1">
Row: {cursorPosition.line}, Column: {cursorPosition.column}
</div>
</div>
<Editor
data-testid="code-editor"
path={selectedPath ?? undefined}
defaultValue=""
value={selectedPath ? fileContent : undefined}
onMount={onMount}
onChange={handleEditorChange}
options={{ readOnly: isReadOnly }}
/>
);
}

View File

@ -38,6 +38,7 @@ interface WsClientProviderProps {
enabled: boolean;
token: string | null;
ghToken: string | null;
selectedRepository: string | null;
settings: Settings | null;
}
@ -45,12 +46,14 @@ export function WsClientProvider({
enabled,
token,
ghToken,
selectedRepository,
settings,
children,
}: React.PropsWithChildren<WsClientProviderProps>) {
const sioRef = React.useRef<Socket | null>(null);
const tokenRef = React.useRef<string | null>(token);
const ghTokenRef = React.useRef<string | null>(ghToken);
const selectedRepositoryRef = React.useRef<string | null>(selectedRepository);
const disconnectRef = React.useRef<ReturnType<typeof setTimeout> | null>(
null,
);
@ -81,6 +84,9 @@ export function WsClientProvider({
if (ghToken) {
initEvent.github_token = ghToken;
}
if (selectedRepository) {
initEvent.selected_repository = selectedRepository;
}
const lastEvent = lastEventRef.current;
if (lastEvent) {
initEvent.latest_event_id = lastEvent.id;
@ -158,6 +164,7 @@ export function WsClientProvider({
sioRef.current = sio;
tokenRef.current = token;
ghTokenRef.current = ghToken;
selectedRepositoryRef.current = selectedRepository;
return () => {
sio.off("connect", handleConnect);
@ -166,7 +173,7 @@ export function WsClientProvider({
sio.off("connect_failed", handleError);
sio.off("disconnect", handleDisconnect);
};
}, [enabled, token, ghToken]);
}, [enabled, token, ghToken, selectedRepository]);
// Strict mode mounts and unmounts each component twice, so we have to wait in the destructor
// before actually disconnecting the socket and cancel the operation if the component gets remounted.

View File

@ -1,4 +1,4 @@
import React, { useState } from "react";
import React from "react";
import { useSelector } from "react-redux";
import { useRouteError } from "react-router";
import { editor } from "monaco-editor";
@ -32,7 +32,6 @@ function CodeEditor() {
} = useFiles();
const [fileExplorerIsOpen, setFileExplorerIsOpen] = React.useState(true);
const [cursorPosition, setCursorPosition] = useState({ line: 1, column: 1 });
const editorRef = React.useRef<editor.IStandaloneCodeEditor | null>(null);
const { mutate: saveFile } = useSaveFile();
@ -54,13 +53,6 @@ function CodeEditor() {
},
});
monaco.editor.setTheme("oh-dark");
e.onDidChangeCursorPosition((ee) => {
setCursorPosition({
line: ee.position.lineNumber,
column: ee.position.column,
});
});
};
const agentState = useSelector(
@ -111,7 +103,6 @@ function CodeEditor() {
<CodeEditorComponent
onMount={handleEditorDidMount}
isReadOnly={!isEditingAllowed}
cursorPosition={cursorPosition}
/>
</div>
</div>

View File

@ -6,7 +6,6 @@ import {
WsClientProviderStatus,
} from "#/context/ws-client-provider";
import { createChatMessage } from "#/services/chat-service";
import { getCloneRepoCommand } from "#/services/terminal-service";
import { setCurrentAgentState } from "#/state/agent-slice";
import { addUserMessage } from "#/state/chat-slice";
import {
@ -37,11 +36,6 @@ export const useWSStatusChange = () => {
send(createChatMessage(query, base64Files, timestamp));
};
const dispatchCloneRepoCommand = (ghToken: string, repository: string) => {
send(getCloneRepoCommand(ghToken, repository));
dispatch(clearSelectedRepository());
};
const dispatchInitialQuery = (query: string, additionalInfo: string) => {
if (additionalInfo) {
sendInitialQuery(`${query}\n\n[${additionalInfo}]`, files);
@ -57,8 +51,7 @@ export const useWSStatusChange = () => {
let additionalInfo = "";
if (gitHubToken && selectedRepository) {
dispatchCloneRepoCommand(gitHubToken, selectedRepository);
additionalInfo = `Repository ${selectedRepository} has been cloned to /workspace. Please check the /workspace for files.`;
dispatch(clearSelectedRepository());
} else if (importedProjectZip) {
// if there's an uploaded project zip, add it to the chat
additionalInfo =

View File

@ -64,6 +64,7 @@ function App() {
enabled
token={token}
ghToken={gitHubToken}
selectedRepository={selectedRepository}
settings={settings}
>
<EventHandler>

View File

@ -10,11 +10,3 @@ export function getGitHubTokenCommand(gitHubToken: string) {
const event = getTerminalCommand(command, true);
return event;
}
export function getCloneRepoCommand(gitHubToken: string, repository: string) {
const url = `https://${gitHubToken}@github.com/${repository}.git`;
const dirName = repository.split("/")[1];
const command = `git clone ${url} ${dirName} ; cd ${dirName} ; git checkout -b openhands-workspace`;
const event = getTerminalCommand(command, true);
return event;
}

View File

@ -1,5 +1,7 @@
/** @type {import('tailwindcss').Config} */
const { nextui } = require("@nextui-org/react");
import { nextui } from "@nextui-org/react";
import typography from '@tailwindcss/typography';
export default {
content: [
"./src/**/*.{js,ts,jsx,tsx}",
@ -33,6 +35,6 @@ export default {
}
}
}),
require('@tailwindcss/typography'),
typography,
],
};

View File

@ -398,6 +398,9 @@ class CodeActAgent(Agent):
- Messages from the same role are combined to prevent consecutive same-role messages
- For Anthropic models, specific messages are cached according to their documentation
"""
if not self.prompt_manager:
raise Exception('Prompt Manager not instantiated.')
messages: list[Message] = [
Message(
role='system',

View File

@ -11,6 +11,7 @@ from openhands.core.exceptions import (
)
from openhands.llm.llm import LLM
from openhands.runtime.plugins import PluginRequirement
from openhands.utils.prompt import PromptManager
class Agent(ABC):
@ -33,6 +34,7 @@ class Agent(ABC):
self.llm = llm
self.config = config
self._complete = False
self.prompt_manager: PromptManager | None = None
@property
def complete(self) -> bool:

View File

@ -32,11 +32,11 @@ Follow these steps to use this workflow in your own repository:
5. Set up [GitHub secrets](https://docs.github.com/en/actions/security-for-github-actions/security-guides/using-secrets-in-github-actions):
- Required:
- `LLM_API_KEY`: Your LLM API key
- Optional:
- `PAT_USERNAME`: GitHub username for the personal access token
- `PAT_TOKEN`: The personal access token
- `LLM_MODEL`: LLM model to use (e.g., "anthropic/claude-3-5-sonnet-20241022")
- `LLM_API_KEY`: Your LLM API key
- Optional:
- `LLM_MODEL`: LLM model to use (defaults to "anthropic/claude-3-5-sonnet-20241022")
- `LLM_BASE_URL`: Base URL for LLM API (only if using a proxy)
Note: You can set these secrets at the organization level to use across multiple repositories.

View File

@ -213,6 +213,47 @@ class Runtime(FileEditRuntimeMixin):
source = event.source if event.source else EventSource.AGENT
self.event_stream.add_event(observation, source) # type: ignore[arg-type]
def clone_repo(self, github_token: str | None, selected_repository: str | None):
if not github_token or not selected_repository:
return
url = f'https://{github_token}@github.com/{selected_repository}.git'
dir_name = selected_repository.split('/')[1]
action = CmdRunAction(
command=f'git clone {url} {dir_name} ; cd {dir_name} ; git checkout -b openhands-workspace'
)
self.log('info', 'Cloning repo: {selected_repository}')
self.run_action(action)
def get_custom_microagents(self, selected_repository: str | None) -> list[str]:
custom_microagents_content = []
custom_microagents_dir = Path('.openhands') / 'microagents'
dir_name = str(custom_microagents_dir)
if selected_repository:
dir_name = str(
Path(selected_repository.split('/')[1]) / custom_microagents_dir
)
oh_instructions_header = '---\nname: openhands_instructions\nagent: CodeActAgent\ntriggers:\n- ""\n---\n'
obs = self.read(FileReadAction(path='.openhands_instructions'))
if isinstance(obs, ErrorObservation):
self.log('error', 'Failed to read openhands_instructions')
else:
openhands_instructions = oh_instructions_header + obs.content
self.log('info', f'openhands_instructions: {openhands_instructions}')
custom_microagents_content.append(openhands_instructions)
files = self.list_files(dir_name)
self.log('info', f'Found {len(files)} custom microagents.')
for fname in files:
content = self.read(
FileReadAction(path=str(custom_microagents_dir / fname))
).content
custom_microagents_content.append(content)
return custom_microagents_content
def run_action(self, action: Action) -> Observation:
"""Run an action and return the resulting observation.
If the action is not runnable in any runtime, a NullObservation is returned.

View File

@ -1,7 +1,7 @@
import socketio
from openhands.server.app import app as base_app
from openhands.server.socket import sio
from openhands.server.listen_socket import sio
from openhands.server.static import SPAStaticFiles
base_app.mount(

View File

@ -32,6 +32,8 @@ async def oh_action(connection_id: str, data: dict):
latest_event_id = int(data.pop('latest_event_id', -1))
kwargs = {k.lower(): v for k, v in (data.get('args') or {}).items()}
session_init_data = SessionInitData(**kwargs)
session_init_data.github_token = github_token
session_init_data.selected_repository = data.get('selected_repository', None)
await init_connection(
connection_id, token, github_token, session_init_data, latest_event_id
)

View File

@ -7,7 +7,7 @@ from openhands.controller.state.state import State
from openhands.core.config import AgentConfig, AppConfig, LLMConfig
from openhands.core.logger import openhands_logger as logger
from openhands.core.schema.agent import AgentState
from openhands.events.action.agent import ChangeAgentStateAction
from openhands.events.action import ChangeAgentStateAction
from openhands.events.event import EventSource
from openhands.events.stream import EventStream
from openhands.runtime import get_runtime_cls
@ -60,6 +60,8 @@ class AgentSession:
max_budget_per_task: float | None = None,
agent_to_llm_config: dict[str, LLMConfig] | None = None,
agent_configs: dict[str, AgentConfig] | None = None,
github_token: str | None = None,
selected_repository: str | None = None,
):
"""Starts the Agent session
Parameters:
@ -86,6 +88,8 @@ class AgentSession:
max_budget_per_task,
agent_to_llm_config,
agent_configs,
github_token,
selected_repository,
)
def _start_thread(self, *args):
@ -104,13 +108,18 @@ class AgentSession:
max_budget_per_task: float | None = None,
agent_to_llm_config: dict[str, LLMConfig] | None = None,
agent_configs: dict[str, AgentConfig] | None = None,
github_token: str | None = None,
selected_repository: str | None = None,
):
self._create_security_analyzer(config.security.security_analyzer)
await self._create_runtime(
runtime_name=runtime_name,
config=config,
agent=agent,
github_token=github_token,
selected_repository=selected_repository,
)
self._create_controller(
agent,
config.security.confirmation_mode,
@ -165,6 +174,8 @@ class AgentSession:
runtime_name: str,
config: AppConfig,
agent: Agent,
github_token: str | None = None,
selected_repository: str | None = None,
):
"""Creates a runtime instance
@ -199,6 +210,12 @@ class AgentSession:
return
if self.runtime is not None:
self.runtime.clone_repo(github_token, selected_repository)
if agent.prompt_manager:
agent.prompt_manager.load_microagent_files(
self.runtime.get_custom_microagents(selected_repository)
)
logger.debug(
f'Runtime initialized with plugins: {[plugin.name for plugin in self.runtime.plugins]}'
)

View File

@ -72,7 +72,6 @@ class Session:
self.config.security.security_analyzer = session_init_data.security_analyzer or self.config.security.security_analyzer
max_iterations = session_init_data.max_iterations or self.config.max_iterations
# override default LLM config
default_llm_config = self.config.get_llm_config()
default_llm_config.model = session_init_data.llm_model or default_llm_config.model
@ -94,6 +93,8 @@ class Session:
max_budget_per_task=self.config.max_budget_per_task,
agent_to_llm_config=self.config.get_agent_to_llm_config_map(),
agent_configs=self.config.get_agent_configs(),
github_token=session_init_data.github_token,
selected_repository=session_init_data.selected_repository,
)
except Exception as e:
logger.exception(f'Error creating controller: {e}')

View File

@ -16,3 +16,5 @@ class SessionInitData:
llm_model: str | None = None
llm_api_key: str | None = None
llm_base_url: str | None = None
github_token: str | None = None
selected_repository: str | None = None

View File

@ -11,14 +11,20 @@ class MicroAgentMetadata(pydantic.BaseModel):
class MicroAgent:
def __init__(self, path: str):
self.path = path
if not os.path.exists(path):
raise FileNotFoundError(f'Micro agent file {path} is not found')
with open(path, 'r') as file:
self._loaded = frontmatter.load(file)
self._content = self._loaded.content
self._metadata = MicroAgentMetadata(**self._loaded.metadata)
def __init__(self, path: str | None = None, content: str | None = None):
if path and not content:
self.path = path
if not os.path.exists(path):
raise FileNotFoundError(f'Micro agent file {path} is not found')
with open(path, 'r') as file:
self._loaded = frontmatter.load(file)
self._content = self._loaded.content
self._metadata = MicroAgentMetadata(**self._loaded.metadata)
elif content and not path:
self._metadata, self._content = frontmatter.parse(content)
self._metadata = MicroAgentMetadata(**self._metadata)
else:
raise Exception('You must pass either path or file content, but not both.')
def get_trigger(self, message: str) -> str | None:
message = message.lower()

View File

@ -42,13 +42,18 @@ class PromptManager:
if f.endswith('.md')
]
for microagent_file in microagent_files:
microagent = MicroAgent(microagent_file)
microagent = MicroAgent(path=microagent_file)
if (
disabled_microagents is None
or microagent.name not in disabled_microagents
):
self.microagents[microagent.name] = microagent
def load_microagent_files(self, microagent_files: list[str]):
for microagent_file in microagent_files:
microagent = MicroAgent(content=microagent_file)
self.microagents[microagent.name] = microagent
def _load_template(self, template_name: str) -> Template:
if self.prompt_dir is None:
raise ValueError('Prompt directory is not set')

14
poetry.lock generated
View File

@ -3782,19 +3782,19 @@ pydantic = ">=1.10"
[[package]]
name = "llama-index"
version = "0.12.2"
version = "0.12.3"
description = "Interface between LLMs and your data"
optional = false
python-versions = "<4.0,>=3.9"
files = [
{file = "llama_index-0.12.2-py3-none-any.whl", hash = "sha256:971528db7889f5a0d15fd9039a403bc6f92bfafc2d4e1bab2d166657728ae94c"},
{file = "llama_index-0.12.2.tar.gz", hash = "sha256:da9738dd666e219689839c7451c9df8bed72e6510a6f7d6f7d9907bfdd4588eb"},
{file = "llama_index-0.12.3-py3-none-any.whl", hash = "sha256:0fe8836c84becf05bb95c19aaf15c643bd57a5cb324088a84f2f299a779e97bb"},
{file = "llama_index-0.12.3.tar.gz", hash = "sha256:d3ea4d599225c934ff9e56712203f5236e7e143eaf2144d92238f37f1de24ccd"},
]
[package.dependencies]
llama-index-agent-openai = ">=0.4.0,<0.5.0"
llama-index-cli = ">=0.4.0,<0.5.0"
llama-index-core = ">=0.12.2,<0.13.0"
llama-index-core = ">=0.12.3,<0.13.0"
llama-index-embeddings-openai = ">=0.3.0,<0.4.0"
llama-index-indices-managed-llama-cloud = ">=0.4.0"
llama-index-legacy = ">=0.9.48,<0.10.0"
@ -3840,13 +3840,13 @@ llama-index-llms-openai = ">=0.3.0,<0.4.0"
[[package]]
name = "llama-index-core"
version = "0.12.2"
version = "0.12.3"
description = "Interface between LLMs and your data"
optional = false
python-versions = "<4.0,>=3.9"
files = [
{file = "llama_index_core-0.12.2-py3-none-any.whl", hash = "sha256:27a5548523435a5c2b84f75c15894a44522b7f968e9f29a03f9a301ca09fb7fa"},
{file = "llama_index_core-0.12.2.tar.gz", hash = "sha256:a48b2de9c3a09608ab5c03c5a313428f119c86946acdefde555992b7c0b8a38e"},
{file = "llama_index_core-0.12.3-py3-none-any.whl", hash = "sha256:f0034965e74f508594cb96b8ebb17ce123d73d5f8858adcf38f5cdd936b94262"},
{file = "llama_index_core-0.12.3.tar.gz", hash = "sha256:61fa0a1155a022b5b63c081d6709f5b57bae231b1c847c78e2052c93a231b90a"},
]
[package.dependencies]

View File

@ -1,6 +1,6 @@
[tool.poetry]
name = "openhands-ai"
version = "0.15.0"
version = "0.15.1"
description = "OpenHands: Code Less, Make More"
authors = ["OpenHands"]
license = "MIT"