mirror of
https://github.com/OpenHands/OpenHands.git
synced 2026-03-22 13:47:19 +08:00
Merge branch 'main' into hotfix/user-signup-event-data
This commit is contained in:
37
.agents/skills/upcoming-release/SKILL.md
Normal file
37
.agents/skills/upcoming-release/SKILL.md
Normal file
@@ -0,0 +1,37 @@
|
||||
---
|
||||
name: upcoming-release
|
||||
description: This skill should be used when the user asks to "generate release notes", "list upcoming release PRs", "summarize upcoming release", "/upcoming-release", or needs to know what changes are part of an upcoming release.
|
||||
---
|
||||
|
||||
# Upcoming Release Summary
|
||||
|
||||
Generate a concise summary of PRs included in the upcoming release.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Two commit SHAs are required:
|
||||
- **First SHA**: The older commit (current release)
|
||||
- **Second SHA**: The newer commit (what's being released)
|
||||
|
||||
If the user does not provide both SHAs, ask for them before proceeding.
|
||||
|
||||
## Workflow
|
||||
|
||||
1. Run the script from the repository root with the `--json` flag:
|
||||
```bash
|
||||
.github/scripts/find_prs_between_commits.py <older-sha> <newer-sha> --json
|
||||
```
|
||||
|
||||
2. Filter out PRs that are:
|
||||
- Chores
|
||||
- Dependency updates
|
||||
- Adding logs
|
||||
- Refactors
|
||||
|
||||
3. Categorize the remaining PRs:
|
||||
- **Features** - New functionality
|
||||
- **Bug fixes** - Corrections to existing behavior
|
||||
- **Security/CVE fixes** - Security-related changes
|
||||
- **Other** - Everything else
|
||||
|
||||
4. Format the output with PRs listed under their category, including the PR number and a brief description.
|
||||
2
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
2
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
# disable blank issue creation
|
||||
blank_issues_enabled: false
|
||||
330
.github/scripts/find_prs_between_commits.py
vendored
Normal file
330
.github/scripts/find_prs_between_commits.py
vendored
Normal file
@@ -0,0 +1,330 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Find all PRs that went in between two commits in the OpenHands/OpenHands repository.
|
||||
Handles cherry-picks and different merge strategies.
|
||||
|
||||
This script is designed to run from within the OpenHands repository under .github/scripts:
|
||||
.github/scripts/find_prs_between_commits.py
|
||||
|
||||
Usage: find_prs_between_commits <older_commit> <newer_commit> [--repo <path>]
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from collections import defaultdict
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
|
||||
def find_openhands_repo() -> Optional[Path]:
|
||||
"""
|
||||
Find the OpenHands repository.
|
||||
Since this script is designed to live in .github/scripts/, it assumes
|
||||
the repository root is two levels up from the script location.
|
||||
Tries:
|
||||
1. Repository root (../../ from script location)
|
||||
2. Current directory
|
||||
3. Environment variable OPENHANDS_REPO
|
||||
"""
|
||||
# Check repository root (assuming script is in .github/scripts/)
|
||||
script_dir = Path(__file__).parent.absolute()
|
||||
repo_root = (
|
||||
script_dir.parent.parent
|
||||
) # Go up two levels: scripts -> .github -> repo root
|
||||
if (repo_root / '.git').exists():
|
||||
return repo_root
|
||||
|
||||
# Check current directory
|
||||
if (Path.cwd() / '.git').exists():
|
||||
return Path.cwd()
|
||||
|
||||
# Check environment variable
|
||||
if 'OPENHANDS_REPO' in os.environ:
|
||||
repo_path = Path(os.environ['OPENHANDS_REPO'])
|
||||
if (repo_path / '.git').exists():
|
||||
return repo_path
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def run_git_command(cmd: list[str], repo_path: Path) -> str:
|
||||
"""Run a git command in the repository directory and return its output."""
|
||||
try:
|
||||
result = subprocess.run(
|
||||
cmd, capture_output=True, text=True, check=True, cwd=str(repo_path)
|
||||
)
|
||||
return result.stdout.strip()
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f'Error running git command: {" ".join(cmd)}', file=sys.stderr)
|
||||
print(f'Error: {e.stderr}', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def extract_pr_numbers_from_message(message: str) -> set[int]:
|
||||
"""Extract PR numbers from commit message in any common format."""
|
||||
# Match #12345 anywhere, including in patterns like (#12345) or "Merge pull request #12345"
|
||||
matches = re.findall(r'#(\d+)', message)
|
||||
return set(int(m) for m in matches)
|
||||
|
||||
|
||||
def get_commit_info(commit_hash: str, repo_path: Path) -> tuple[str, str, str]:
|
||||
"""Get commit subject, body, and author from a commit hash."""
|
||||
subject = run_git_command(
|
||||
['git', 'log', '-1', '--format=%s', commit_hash], repo_path
|
||||
)
|
||||
body = run_git_command(['git', 'log', '-1', '--format=%b', commit_hash], repo_path)
|
||||
author = run_git_command(
|
||||
['git', 'log', '-1', '--format=%an <%ae>', commit_hash], repo_path
|
||||
)
|
||||
return subject, body, author
|
||||
|
||||
|
||||
def get_commits_between(
|
||||
older_commit: str, newer_commit: str, repo_path: Path
|
||||
) -> list[str]:
|
||||
"""Get all commit hashes between two commits."""
|
||||
commits_output = run_git_command(
|
||||
['git', 'rev-list', f'{older_commit}..{newer_commit}'], repo_path
|
||||
)
|
||||
|
||||
if not commits_output:
|
||||
return []
|
||||
|
||||
return commits_output.split('\n')
|
||||
|
||||
|
||||
def get_pr_info_from_github(pr_number: int, repo_path: Path) -> Optional[dict]:
|
||||
"""Get PR information from GitHub API if GITHUB_TOKEN is available."""
|
||||
try:
|
||||
# Set up environment with GitHub token
|
||||
env = os.environ.copy()
|
||||
if 'GITHUB_TOKEN' in env:
|
||||
env['GH_TOKEN'] = env['GITHUB_TOKEN']
|
||||
|
||||
result = subprocess.run(
|
||||
[
|
||||
'gh',
|
||||
'pr',
|
||||
'view',
|
||||
str(pr_number),
|
||||
'--json',
|
||||
'number,title,author,mergedAt,baseRefName,headRefName,url',
|
||||
],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True,
|
||||
env=env,
|
||||
cwd=str(repo_path),
|
||||
)
|
||||
return json.loads(result.stdout)
|
||||
except (subprocess.CalledProcessError, FileNotFoundError, json.JSONDecodeError):
|
||||
return None
|
||||
|
||||
|
||||
def find_prs_between_commits(
|
||||
older_commit: str, newer_commit: str, repo_path: Path
|
||||
) -> dict[int, dict]:
|
||||
"""
|
||||
Find all PRs that went in between two commits.
|
||||
Returns a dictionary mapping PR numbers to their information.
|
||||
"""
|
||||
print(f'Repository: {repo_path}', file=sys.stderr)
|
||||
print('Finding PRs between commits:', file=sys.stderr)
|
||||
print(f' Older: {older_commit}', file=sys.stderr)
|
||||
print(f' Newer: {newer_commit}', file=sys.stderr)
|
||||
print(file=sys.stderr)
|
||||
|
||||
# Verify commits exist
|
||||
try:
|
||||
run_git_command(['git', 'rev-parse', '--verify', older_commit], repo_path)
|
||||
run_git_command(['git', 'rev-parse', '--verify', newer_commit], repo_path)
|
||||
except SystemExit:
|
||||
print('Error: One or both commits not found in repository', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# Extract PRs from the older commit itself (to exclude from results)
|
||||
# These PRs are already included at or before the older commit
|
||||
older_subject, older_body, _ = get_commit_info(older_commit, repo_path)
|
||||
older_message = f'{older_subject}\n{older_body}'
|
||||
excluded_prs = extract_pr_numbers_from_message(older_message)
|
||||
|
||||
if excluded_prs:
|
||||
print(
|
||||
f'Excluding PRs already in older commit: {", ".join(f"#{pr}" for pr in sorted(excluded_prs))}',
|
||||
file=sys.stderr,
|
||||
)
|
||||
print(file=sys.stderr)
|
||||
|
||||
# Get all commits between the two
|
||||
commits = get_commits_between(older_commit, newer_commit, repo_path)
|
||||
print(f'Found {len(commits)} commits to analyze', file=sys.stderr)
|
||||
print(file=sys.stderr)
|
||||
|
||||
# Extract PR numbers from all commits
|
||||
pr_info: dict[int, dict] = {}
|
||||
commits_by_pr: dict[int, list[str]] = defaultdict(list)
|
||||
|
||||
for commit_hash in commits:
|
||||
subject, body, author = get_commit_info(commit_hash, repo_path)
|
||||
full_message = f'{subject}\n{body}'
|
||||
|
||||
pr_numbers = extract_pr_numbers_from_message(full_message)
|
||||
|
||||
for pr_num in pr_numbers:
|
||||
# Skip PRs that are already in the older commit
|
||||
if pr_num in excluded_prs:
|
||||
continue
|
||||
|
||||
commits_by_pr[pr_num].append(commit_hash)
|
||||
|
||||
if pr_num not in pr_info:
|
||||
pr_info[pr_num] = {
|
||||
'number': pr_num,
|
||||
'first_commit': commit_hash[:8],
|
||||
'first_commit_subject': subject,
|
||||
'commits': [],
|
||||
'github_info': None,
|
||||
}
|
||||
|
||||
pr_info[pr_num]['commits'].append(
|
||||
{'hash': commit_hash[:8], 'subject': subject, 'author': author}
|
||||
)
|
||||
|
||||
# Try to get additional info from GitHub API
|
||||
print('Fetching additional info from GitHub API...', file=sys.stderr)
|
||||
for pr_num in pr_info.keys():
|
||||
github_info = get_pr_info_from_github(pr_num, repo_path)
|
||||
if github_info:
|
||||
pr_info[pr_num]['github_info'] = github_info
|
||||
|
||||
print(file=sys.stderr)
|
||||
|
||||
return pr_info
|
||||
|
||||
|
||||
def print_results(pr_info: dict[int, dict]):
|
||||
"""Print the results in a readable format."""
|
||||
sorted_prs = sorted(pr_info.items(), key=lambda x: x[0])
|
||||
|
||||
print(f'{"=" * 80}')
|
||||
print(f'Found {len(sorted_prs)} PRs')
|
||||
print(f'{"=" * 80}')
|
||||
print()
|
||||
|
||||
for pr_num, info in sorted_prs:
|
||||
print(f'PR #{pr_num}')
|
||||
|
||||
if info['github_info']:
|
||||
gh = info['github_info']
|
||||
print(f' Title: {gh["title"]}')
|
||||
print(f' Author: {gh["author"]["login"]}')
|
||||
print(f' URL: {gh["url"]}')
|
||||
if gh.get('mergedAt'):
|
||||
print(f' Merged: {gh["mergedAt"]}')
|
||||
if gh.get('baseRefName'):
|
||||
print(f' Base: {gh["baseRefName"]} ← {gh["headRefName"]}')
|
||||
else:
|
||||
print(f' Subject: {info["first_commit_subject"]}')
|
||||
|
||||
# Show if this PR has multiple commits (cherry-picked or multiple commits)
|
||||
commit_count = len(info['commits'])
|
||||
if commit_count > 1:
|
||||
print(
|
||||
f' ⚠️ Found {commit_count} commits (possible cherry-pick or multi-commit PR):'
|
||||
)
|
||||
for commit in info['commits'][:3]: # Show first 3
|
||||
print(f' {commit["hash"]}: {commit["subject"][:60]}')
|
||||
if commit_count > 3:
|
||||
print(f' ... and {commit_count - 3} more')
|
||||
else:
|
||||
print(f' Commit: {info["first_commit"]}')
|
||||
|
||||
print()
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) < 3:
|
||||
print('Usage: find_prs_between_commits <older_commit> <newer_commit> [options]')
|
||||
print()
|
||||
print('Arguments:')
|
||||
print(' <older_commit> The older commit hash (or ref)')
|
||||
print(' <newer_commit> The newer commit hash (or ref)')
|
||||
print()
|
||||
print('Options:')
|
||||
print(' --json Output results in JSON format')
|
||||
print(' --repo <path> Path to OpenHands repository (default: auto-detect)')
|
||||
print()
|
||||
print('Example:')
|
||||
print(
|
||||
' find_prs_between_commits c79e0cd3c7a2501a719c9296828d7a31e4030585 35bddb14f15124a3dc448a74651a6592911d99e9'
|
||||
)
|
||||
print()
|
||||
print('Repository Detection:')
|
||||
print(' The script will try to find the OpenHands repository in this order:')
|
||||
print(' 1. --repo argument')
|
||||
print(' 2. Repository root (../../ from script location)')
|
||||
print(' 3. Current directory')
|
||||
print(' 4. OPENHANDS_REPO environment variable')
|
||||
print()
|
||||
print('Environment variables:')
|
||||
print(
|
||||
' GITHUB_TOKEN Optional. If set, will fetch additional PR info from GitHub API'
|
||||
)
|
||||
print(' OPENHANDS_REPO Optional. Path to OpenHands repository')
|
||||
sys.exit(1)
|
||||
|
||||
older_commit = sys.argv[1]
|
||||
newer_commit = sys.argv[2]
|
||||
json_output = '--json' in sys.argv
|
||||
|
||||
# Check for --repo argument
|
||||
repo_path = None
|
||||
if '--repo' in sys.argv:
|
||||
repo_idx = sys.argv.index('--repo')
|
||||
if repo_idx + 1 < len(sys.argv):
|
||||
repo_path = Path(sys.argv[repo_idx + 1])
|
||||
if not (repo_path / '.git').exists():
|
||||
print(f'Error: {repo_path} is not a git repository', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# Auto-detect repository if not specified
|
||||
if repo_path is None:
|
||||
repo_path = find_openhands_repo()
|
||||
if repo_path is None:
|
||||
print('Error: Could not find OpenHands repository', file=sys.stderr)
|
||||
print('Please either:', file=sys.stderr)
|
||||
print(
|
||||
' 1. Place this script in .github/scripts/ within the OpenHands repository',
|
||||
file=sys.stderr,
|
||||
)
|
||||
print(' 2. Run from the OpenHands repository directory', file=sys.stderr)
|
||||
print(
|
||||
' 3. Use --repo <path> to specify the repository location',
|
||||
file=sys.stderr,
|
||||
)
|
||||
print(' 4. Set OPENHANDS_REPO environment variable', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# Find PRs
|
||||
pr_info = find_prs_between_commits(older_commit, newer_commit, repo_path)
|
||||
|
||||
if json_output:
|
||||
# Output as JSON
|
||||
print(json.dumps(pr_info, indent=2))
|
||||
else:
|
||||
# Print results in human-readable format
|
||||
print_results(pr_info)
|
||||
|
||||
# Also print a simple list for easy copying
|
||||
print(f'{"=" * 80}')
|
||||
print('PR Numbers (for easy copying):')
|
||||
print(f'{"=" * 80}')
|
||||
sorted_pr_nums = sorted(pr_info.keys())
|
||||
print(', '.join(f'#{pr}' for pr in sorted_pr_nums))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -165,7 +165,7 @@ Each integration follows a consistent pattern with service classes, storage mode
|
||||
|
||||
**Import Patterns:**
|
||||
- Use relative imports without `enterprise.` prefix in enterprise code
|
||||
- Example: `from storage.database import session_maker` not `from enterprise.storage.database import session_maker`
|
||||
- Example: `from storage.database import a_session_maker` not `from enterprise.storage.database import a_session_maker`
|
||||
- This ensures code works in both OpenHands and enterprise contexts
|
||||
|
||||
**Test Structure:**
|
||||
|
||||
@@ -1772,6 +1772,40 @@
|
||||
"sendIdTokenOnLogout": "true",
|
||||
"passMaxAge": "false"
|
||||
}
|
||||
},
|
||||
{
|
||||
"alias": "bitbucket_data_center",
|
||||
"displayName": "Bitbucket Data Center",
|
||||
"internalId": "b77b4ead-20e8-451c-ad27-99f92d561616",
|
||||
"providerId": "oauth2",
|
||||
"enabled": true,
|
||||
"updateProfileFirstLoginMode": "on",
|
||||
"trustEmail": true,
|
||||
"storeToken": true,
|
||||
"addReadTokenRoleOnCreate": false,
|
||||
"authenticateByDefault": false,
|
||||
"linkOnly": false,
|
||||
"hideOnLogin": false,
|
||||
"config": {
|
||||
"givenNameClaim": "given_name",
|
||||
"userInfoUrl": "https://${WEB_HOST}/bitbucket-dc-proxy/oauth2/userinfo",
|
||||
"clientId": "$BITBUCKET_DATA_CENTER_CLIENT_ID",
|
||||
"tokenUrl": "https://${BITBUCKET_DATA_CENTER_HOST}/rest/oauth2/latest/token",
|
||||
"acceptsPromptNoneForwardFromClient": "false",
|
||||
"fullNameClaim": "name",
|
||||
"userIDClaim": "sub",
|
||||
"emailClaim": "email",
|
||||
"userNameClaim": "preferred_username",
|
||||
"caseSensitiveOriginalUsername": "false",
|
||||
"familyNameClaim": "family_name",
|
||||
"pkceEnabled": "false",
|
||||
"authorizationUrl": "https://${BITBUCKET_DATA_CENTER_HOST}/rest/oauth2/latest/authorize",
|
||||
"clientAuthMethod": "client_secret_post",
|
||||
"syncMode": "IMPORT",
|
||||
"clientSecret": "$BITBUCKET_DATA_CENTER_CLIENT_SECRET",
|
||||
"allowedClockSkew": "0",
|
||||
"defaultScope": "REPO_WRITE"
|
||||
}
|
||||
}
|
||||
],
|
||||
"identityProviderMappers": [
|
||||
@@ -1829,6 +1863,26 @@
|
||||
"syncMode": "FORCE",
|
||||
"attribute": "identity_provider"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "id-mapper",
|
||||
"identityProviderAlias": "bitbucket_data_center",
|
||||
"identityProviderMapper": "oidc-user-attribute-idp-mapper",
|
||||
"config": {
|
||||
"syncMode": "FORCE",
|
||||
"claim": "sub",
|
||||
"user.attribute": "bitbucket_data_center_id"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "identity-provider",
|
||||
"identityProviderAlias": "bitbucket_data_center",
|
||||
"identityProviderMapper": "hardcoded-attribute-idp-mapper",
|
||||
"config": {
|
||||
"attribute.value": "bitbucket_data_center",
|
||||
"syncMode": "FORCE",
|
||||
"attribute": "identity_provider"
|
||||
}
|
||||
}
|
||||
],
|
||||
"components": {
|
||||
|
||||
@@ -50,8 +50,10 @@ repos:
|
||||
- ./
|
||||
- stripe==11.5.0
|
||||
- pygithub==2.6.1
|
||||
# To see gaps add `--html-report mypy-report/`
|
||||
entry: mypy --config-file enterprise/dev_config/python/mypy.ini enterprise/
|
||||
# Use -p (package) to avoid dual module name conflict when using MYPYPATH
|
||||
# MYPYPATH=enterprise allows resolving bare imports like "from integrations.xxx"
|
||||
# Note: tests package excluded to avoid conflict with core openhands tests
|
||||
entry: bash -c 'MYPYPATH=enterprise mypy --config-file enterprise/dev_config/python/mypy.ini -p integrations -p server -p storage -p sync'
|
||||
always_run: true
|
||||
pass_filenames: false
|
||||
files: ^enterprise/
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
warn_unused_configs = True
|
||||
ignore_missing_imports = True
|
||||
check_untyped_defs = True
|
||||
explicit_package_bases = True
|
||||
warn_unreachable = True
|
||||
warn_redundant_casts = True
|
||||
no_implicit_optional = True
|
||||
|
||||
@@ -200,7 +200,7 @@ class MetricsCollector(ABC):
|
||||
"""Base class for metrics collectors."""
|
||||
|
||||
@abstractmethod
|
||||
def collect(self) -> List[MetricResult]:
|
||||
async def collect(self) -> List[MetricResult]:
|
||||
"""Collect metrics and return results."""
|
||||
pass
|
||||
|
||||
@@ -264,12 +264,13 @@ class SystemMetricsCollector(MetricsCollector):
|
||||
def collector_name(self) -> str:
|
||||
return "system_metrics"
|
||||
|
||||
def collect(self) -> List[MetricResult]:
|
||||
async def collect(self) -> List[MetricResult]:
|
||||
results = []
|
||||
|
||||
# Collect user count
|
||||
with session_maker() as session:
|
||||
user_count = session.query(UserSettings).count()
|
||||
async with a_session_maker() as session:
|
||||
user_count_result = await session.execute(select(func.count()).select_from(UserSettings))
|
||||
user_count = user_count_result.scalar()
|
||||
results.append(MetricResult(
|
||||
key="total_users",
|
||||
value=user_count
|
||||
@@ -277,9 +278,11 @@ class SystemMetricsCollector(MetricsCollector):
|
||||
|
||||
# Collect conversation count (last 30 days)
|
||||
thirty_days_ago = datetime.now(timezone.utc) - timedelta(days=30)
|
||||
conversation_count = session.query(StoredConversationMetadata)\
|
||||
.filter(StoredConversationMetadata.created_at >= thirty_days_ago)\
|
||||
.count()
|
||||
conversation_count_result = await session.execute(
|
||||
select(func.count()).select_from(StoredConversationMetadata)
|
||||
.where(StoredConversationMetadata.created_at >= thirty_days_ago)
|
||||
)
|
||||
conversation_count = conversation_count_result.scalar()
|
||||
|
||||
results.append(MetricResult(
|
||||
key="conversations_30d",
|
||||
@@ -303,7 +306,7 @@ class TelemetryCollectionProcessor(MaintenanceTaskProcessor):
|
||||
"""Collect metrics from all registered collectors."""
|
||||
|
||||
# Check if collection is needed
|
||||
if not self._should_collect():
|
||||
if not await self._should_collect():
|
||||
return {"status": "skipped", "reason": "too_recent"}
|
||||
|
||||
# Collect metrics from all registered collectors
|
||||
@@ -313,7 +316,7 @@ class TelemetryCollectionProcessor(MaintenanceTaskProcessor):
|
||||
for collector in collector_registry.get_all_collectors():
|
||||
try:
|
||||
if collector.should_collect():
|
||||
results = collector.collect()
|
||||
results = await collector.collect()
|
||||
for result in results:
|
||||
all_metrics[result.key] = result.value
|
||||
collector_results[collector.collector_name] = len(results)
|
||||
@@ -322,13 +325,13 @@ class TelemetryCollectionProcessor(MaintenanceTaskProcessor):
|
||||
collector_results[collector.collector_name] = f"error: {e}"
|
||||
|
||||
# Store metrics in database
|
||||
with session_maker() as session:
|
||||
async with a_session_maker() as session:
|
||||
telemetry_record = TelemetryMetrics(
|
||||
metrics_data=all_metrics,
|
||||
collected_at=datetime.now(timezone.utc)
|
||||
)
|
||||
session.add(telemetry_record)
|
||||
session.commit()
|
||||
await session.commit()
|
||||
|
||||
# Note: No need to track last_collection_at separately
|
||||
# Can be derived from MAX(collected_at) in telemetry_metrics
|
||||
@@ -339,11 +342,12 @@ class TelemetryCollectionProcessor(MaintenanceTaskProcessor):
|
||||
"collectors_run": collector_results
|
||||
}
|
||||
|
||||
def _should_collect(self) -> bool:
|
||||
async def _should_collect(self) -> bool:
|
||||
"""Check if collection is needed based on interval."""
|
||||
with session_maker() as session:
|
||||
async with a_session_maker() as session:
|
||||
# Get last collection time from metrics table
|
||||
last_collected = session.query(func.max(TelemetryMetrics.collected_at)).scalar()
|
||||
result = await session.execute(select(func.max(TelemetryMetrics.collected_at)))
|
||||
last_collected = result.scalar()
|
||||
if not last_collected:
|
||||
return True
|
||||
|
||||
@@ -366,17 +370,19 @@ class TelemetryUploadProcessor(MaintenanceTaskProcessor):
|
||||
"""Upload pending metrics to Replicated."""
|
||||
|
||||
# Get pending metrics
|
||||
with session_maker() as session:
|
||||
pending_metrics = session.query(TelemetryMetrics)\
|
||||
.filter(TelemetryMetrics.uploaded_at.is_(None))\
|
||||
.order_by(TelemetryMetrics.collected_at)\
|
||||
.all()
|
||||
async with a_session_maker() as session:
|
||||
result = await session.execute(
|
||||
select(TelemetryMetrics)
|
||||
.where(TelemetryMetrics.uploaded_at.is_(None))
|
||||
.order_by(TelemetryMetrics.collected_at)
|
||||
)
|
||||
pending_metrics = result.scalars().all()
|
||||
|
||||
if not pending_metrics:
|
||||
return {"status": "no_pending_metrics"}
|
||||
|
||||
# Get admin email - skip if not available
|
||||
admin_email = self._get_admin_email()
|
||||
admin_email = await self._get_admin_email()
|
||||
if not admin_email:
|
||||
logger.info("Skipping telemetry upload - no admin email available")
|
||||
return {
|
||||
@@ -413,13 +419,15 @@ class TelemetryUploadProcessor(MaintenanceTaskProcessor):
|
||||
await instance.set_status(InstanceStatus.RUNNING)
|
||||
|
||||
# Mark as uploaded
|
||||
with session_maker() as session:
|
||||
record = session.query(TelemetryMetrics)\
|
||||
.filter(TelemetryMetrics.id == metric_record.id)\
|
||||
.first()
|
||||
async with a_session_maker() as session:
|
||||
result = await session.execute(
|
||||
select(TelemetryMetrics)
|
||||
.where(TelemetryMetrics.id == metric_record.id)
|
||||
)
|
||||
record = result.scalar_one_or_none()
|
||||
if record:
|
||||
record.uploaded_at = datetime.now(timezone.utc)
|
||||
session.commit()
|
||||
await session.commit()
|
||||
|
||||
uploaded_count += 1
|
||||
|
||||
@@ -427,14 +435,16 @@ class TelemetryUploadProcessor(MaintenanceTaskProcessor):
|
||||
logger.error(f"Failed to upload metrics {metric_record.id}: {e}")
|
||||
|
||||
# Update error info
|
||||
with session_maker() as session:
|
||||
record = session.query(TelemetryMetrics)\
|
||||
.filter(TelemetryMetrics.id == metric_record.id)\
|
||||
.first()
|
||||
async with a_session_maker() as session:
|
||||
result = await session.execute(
|
||||
select(TelemetryMetrics)
|
||||
.where(TelemetryMetrics.id == metric_record.id)
|
||||
)
|
||||
record = result.scalar_one_or_none()
|
||||
if record:
|
||||
record.upload_attempts += 1
|
||||
record.last_upload_error = str(e)
|
||||
session.commit()
|
||||
await session.commit()
|
||||
|
||||
failed_count += 1
|
||||
|
||||
@@ -448,7 +458,7 @@ class TelemetryUploadProcessor(MaintenanceTaskProcessor):
|
||||
"total_processed": len(pending_metrics)
|
||||
}
|
||||
|
||||
def _get_admin_email(self) -> str | None:
|
||||
async def _get_admin_email(self) -> str | None:
|
||||
"""Get administrator email for customer identification."""
|
||||
# 1. Check environment variable first
|
||||
env_admin_email = os.getenv('OPENHANDS_ADMIN_EMAIL')
|
||||
@@ -457,12 +467,15 @@ class TelemetryUploadProcessor(MaintenanceTaskProcessor):
|
||||
return env_admin_email
|
||||
|
||||
# 2. Use first active user's email (earliest accepted_tos)
|
||||
with session_maker() as session:
|
||||
first_user = session.query(UserSettings)\
|
||||
.filter(UserSettings.email.isnot(None))\
|
||||
.filter(UserSettings.accepted_tos.isnot(None))\
|
||||
.order_by(UserSettings.accepted_tos.asc())\
|
||||
.first()
|
||||
async with a_session_maker() as session:
|
||||
result = await session.execute(
|
||||
select(UserSettings)
|
||||
.where(UserSettings.email.isnot(None))
|
||||
.where(UserSettings.accepted_tos.isnot(None))
|
||||
.order_by(UserSettings.accepted_tos.asc())
|
||||
.limit(1)
|
||||
)
|
||||
first_user = result.scalar_one_or_none()
|
||||
|
||||
if first_user and first_user.email:
|
||||
logger.info(f"Using first active user email: {first_user.email}")
|
||||
@@ -474,15 +487,16 @@ class TelemetryUploadProcessor(MaintenanceTaskProcessor):
|
||||
|
||||
async def _update_telemetry_identity(self, customer_id: str, instance_id: str) -> None:
|
||||
"""Update or create telemetry identity record."""
|
||||
with session_maker() as session:
|
||||
identity = session.query(TelemetryIdentity).first()
|
||||
async with a_session_maker() as session:
|
||||
result = await session.execute(select(TelemetryIdentity).limit(1))
|
||||
identity = result.scalar_one_or_none()
|
||||
if not identity:
|
||||
identity = TelemetryIdentity()
|
||||
session.add(identity)
|
||||
|
||||
identity.customer_id = customer_id
|
||||
identity.instance_id = instance_id
|
||||
session.commit()
|
||||
await session.commit()
|
||||
```
|
||||
|
||||
### 4.4 License Warning System
|
||||
@@ -503,11 +517,13 @@ async def get_license_status():
|
||||
if not _is_openhands_enterprise():
|
||||
return {"warn": False, "message": ""}
|
||||
|
||||
with session_maker() as session:
|
||||
async with a_session_maker() as session:
|
||||
# Get last successful upload time from metrics table
|
||||
last_upload = session.query(func.max(TelemetryMetrics.uploaded_at))\
|
||||
.filter(TelemetryMetrics.uploaded_at.isnot(None))\
|
||||
.scalar()
|
||||
result = await session.execute(
|
||||
select(func.max(TelemetryMetrics.uploaded_at))
|
||||
.where(TelemetryMetrics.uploaded_at.isnot(None))
|
||||
)
|
||||
last_upload = result.scalar()
|
||||
|
||||
if not last_upload:
|
||||
# No successful uploads yet - show warning after 4 days
|
||||
@@ -521,10 +537,13 @@ async def get_license_status():
|
||||
|
||||
if days_since_upload > 4:
|
||||
# Find oldest unsent batch
|
||||
oldest_unsent = session.query(TelemetryMetrics)\
|
||||
.filter(TelemetryMetrics.uploaded_at.is_(None))\
|
||||
.order_by(TelemetryMetrics.collected_at)\
|
||||
.first()
|
||||
result = await session.execute(
|
||||
select(TelemetryMetrics)
|
||||
.where(TelemetryMetrics.uploaded_at.is_(None))
|
||||
.order_by(TelemetryMetrics.collected_at)
|
||||
.limit(1)
|
||||
)
|
||||
oldest_unsent = result.scalar_one_or_none()
|
||||
|
||||
if oldest_unsent:
|
||||
# Calculate expiration date (oldest unsent + 34 days)
|
||||
@@ -630,19 +649,23 @@ spec:
|
||||
- python
|
||||
- -c
|
||||
- |
|
||||
import asyncio
|
||||
from enterprise.storage.maintenance_task import MaintenanceTask, MaintenanceTaskStatus
|
||||
from enterprise.storage.database import session_maker
|
||||
from enterprise.storage.database import a_session_maker
|
||||
from enterprise.server.telemetry.collection_processor import TelemetryCollectionProcessor
|
||||
|
||||
# Create collection task
|
||||
processor = TelemetryCollectionProcessor()
|
||||
task = MaintenanceTask()
|
||||
task.set_processor(processor)
|
||||
task.status = MaintenanceTaskStatus.PENDING
|
||||
async def main():
|
||||
# Create collection task
|
||||
processor = TelemetryCollectionProcessor()
|
||||
task = MaintenanceTask()
|
||||
task.set_processor(processor)
|
||||
task.status = MaintenanceTaskStatus.PENDING
|
||||
|
||||
with session_maker() as session:
|
||||
session.add(task)
|
||||
session.commit()
|
||||
async with a_session_maker() as session:
|
||||
session.add(task)
|
||||
await session.commit()
|
||||
|
||||
asyncio.run(main())
|
||||
restartPolicy: OnFailure
|
||||
```
|
||||
|
||||
@@ -680,23 +703,27 @@ spec:
|
||||
- python
|
||||
- -c
|
||||
- |
|
||||
import asyncio
|
||||
from enterprise.storage.maintenance_task import MaintenanceTask, MaintenanceTaskStatus
|
||||
from enterprise.storage.database import session_maker
|
||||
from enterprise.storage.database import a_session_maker
|
||||
from enterprise.server.telemetry.upload_processor import TelemetryUploadProcessor
|
||||
import os
|
||||
|
||||
# Create upload task
|
||||
processor = TelemetryUploadProcessor(
|
||||
replicated_publishable_key=os.getenv('REPLICATED_PUBLISHABLE_KEY'),
|
||||
replicated_app_slug=os.getenv('REPLICATED_APP_SLUG', 'openhands-enterprise')
|
||||
)
|
||||
task = MaintenanceTask()
|
||||
task.set_processor(processor)
|
||||
task.status = MaintenanceTaskStatus.PENDING
|
||||
async def main():
|
||||
# Create upload task
|
||||
processor = TelemetryUploadProcessor(
|
||||
replicated_publishable_key=os.getenv('REPLICATED_PUBLISHABLE_KEY'),
|
||||
replicated_app_slug=os.getenv('REPLICATED_APP_SLUG', 'openhands-enterprise')
|
||||
)
|
||||
task = MaintenanceTask()
|
||||
task.set_processor(processor)
|
||||
task.status = MaintenanceTaskStatus.PENDING
|
||||
|
||||
with session_maker() as session:
|
||||
session.add(task)
|
||||
session.commit()
|
||||
async with a_session_maker() as session:
|
||||
session.add(task)
|
||||
await session.commit()
|
||||
|
||||
asyncio.run(main())
|
||||
restartPolicy: OnFailure
|
||||
```
|
||||
|
||||
|
||||
131
enterprise/doc/design-doc/plugin-launch-flow.md
Normal file
131
enterprise/doc/design-doc/plugin-launch-flow.md
Normal file
@@ -0,0 +1,131 @@
|
||||
# Plugin Launch Flow
|
||||
|
||||
This document describes how plugins are launched in OpenHands Saas / Enterprise, from the plugin directory through to agent execution.
|
||||
|
||||
## Architecture Overview
|
||||
|
||||
```
|
||||
Plugin Directory ──▶ Frontend /launch ──▶ App Server ──▶ Agent Server ──▶ SDK
|
||||
(external) (modal) (API) (in sandbox) (plugin loading)
|
||||
```
|
||||
|
||||
| Component | Responsibility |
|
||||
|-----------|---------------|
|
||||
| **Plugin Directory** | Index plugins, present to user, construct launch URLs |
|
||||
| **Frontend** | Display confirmation modal, collect parameters, call API |
|
||||
| **App Server** | Validate request, pass plugin specs to agent server |
|
||||
| **Agent Server** | Run inside sandbox, delegate plugin loading to SDK |
|
||||
| **SDK** | Fetch plugins, load contents, merge skills/hooks/MCP into agent |
|
||||
|
||||
## User Experience
|
||||
|
||||
### Plugin Directory
|
||||
|
||||
The plugin directory presents users with a catalog of available plugins. For each plugin, users see:
|
||||
- Plugin name and description (from `plugin.json`)
|
||||
- Author and version information
|
||||
- A "Launch" button
|
||||
|
||||
When a user clicks "Launch", the plugin directory:
|
||||
1. Reads the plugin's `entry_command` to know which slash command to invoke
|
||||
2. Determines what parameters the plugin accepts (if any)
|
||||
3. Redirects to OpenHands with this information encoded in the URL
|
||||
|
||||
### Parameter Collection
|
||||
|
||||
If a plugin requires user input (API keys, configuration values, etc.), the frontend displays a form modal before starting the conversation. Parameters are passed in the launch URL and rendered as form fields based on their type:
|
||||
|
||||
- **String values** → Text input
|
||||
- **Number values** → Number input
|
||||
- **Boolean values** → Checkbox
|
||||
|
||||
Only primitive types are supported. Complex types (arrays, objects) are not currently supported for parameter input.
|
||||
|
||||
The user fills in required values, then clicks "Start Conversation" to proceed.
|
||||
|
||||
## Launch Flow
|
||||
|
||||
1. **Plugin Directory** (external) constructs a launch URL to the OpenHands app server when user clicks "Launch":
|
||||
```
|
||||
/launch?plugins=BASE64_JSON&message=/city-weather:now%20Tokyo
|
||||
```
|
||||
|
||||
The `plugins` parameter includes any parameter definitions with default values:
|
||||
```json
|
||||
[{
|
||||
"source": "github:owner/repo",
|
||||
"repo_path": "plugins/my-plugin",
|
||||
"parameters": {"api_key": "", "timeout": 30, "debug": false}
|
||||
}]
|
||||
```
|
||||
|
||||
2. **OpenHands Frontend** (`/launch` route, [PR #12699](https://github.com/OpenHands/OpenHands/pull/12699)) displays modal with parameter form, collects user input
|
||||
|
||||
3. **OpenHands App Server** ([PR #12338](https://github.com/OpenHands/OpenHands/pull/12338)) receives the API call:
|
||||
```
|
||||
POST /api/v1/app-conversations
|
||||
{
|
||||
"plugins": [{"source": "github:owner/repo", "repo_path": "plugins/city-weather"}],
|
||||
"initial_message": {"content": [{"type": "text", "text": "/city-weather:now Tokyo"}]}
|
||||
}
|
||||
```
|
||||
|
||||
Call stack:
|
||||
- `AppConversationRouter` receives request with `PluginSpec` list
|
||||
- `LiveStatusAppConversationService._finalize_conversation_request()` converts `PluginSpec` → `PluginSource`
|
||||
- Creates `StartConversationRequest(plugins=sdk_plugins, ...)` and sends to agent server
|
||||
|
||||
4. **Agent Server** (inside sandbox, [SDK PR #1651](https://github.com/OpenHands/software-agent-sdk/pull/1651)) stores specs, defers loading:
|
||||
|
||||
Call stack:
|
||||
- `ConversationService.start_conversation()` receives `StartConversationRequest`
|
||||
- Creates `StoredConversation` with plugin specs
|
||||
- Creates `LocalConversation(plugins=request.plugins, ...)`
|
||||
- Plugin loading deferred until first `run()` or `send_message()`
|
||||
|
||||
5. **SDK** fetches and loads plugins on first use:
|
||||
|
||||
Call stack:
|
||||
- `LocalConversation._ensure_plugins_loaded()` triggered by first message
|
||||
- For each plugin spec:
|
||||
- `Plugin.fetch(source, ref, repo_path)` → clones/caches git repo
|
||||
- `Plugin.load(path)` → parses `plugin.json`, loads commands/skills/hooks
|
||||
- `plugin.add_skills_to(context)` → merges skills into agent
|
||||
- `plugin.add_mcp_config_to(config)` → merges MCP servers
|
||||
|
||||
6. **Agent** receives message, `/city-weather:now` triggers the skill
|
||||
|
||||
## Key Design Decisions
|
||||
|
||||
### Plugin Loading in Sandbox
|
||||
|
||||
Plugins load **inside the sandbox** because:
|
||||
- Plugin hooks and scripts need isolated execution
|
||||
- MCP servers run inside the sandbox
|
||||
- Skills may reference sandbox filesystem
|
||||
|
||||
### Entry Command Handling
|
||||
|
||||
The `entry_command` field in `plugin.json` allows plugin authors to declare a default command:
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "city-weather",
|
||||
"entry_command": "now"
|
||||
}
|
||||
```
|
||||
|
||||
This flows through the system:
|
||||
1. Plugin author declares `entry_command` in plugin.json
|
||||
2. Plugin directory reads it when indexing
|
||||
3. Plugin directory includes `/city-weather:now` in the launch URL's `message` parameter
|
||||
4. Message passes through to agent as `initial_message`
|
||||
|
||||
The SDK exposes this field but does not auto-invoke it—callers control the initial message.
|
||||
|
||||
## Related
|
||||
|
||||
- [OpenHands PR #12338](https://github.com/OpenHands/OpenHands/pull/12338) - App server plugin support
|
||||
- [OpenHands PR #12699](https://github.com/OpenHands/OpenHands/pull/12699) - Frontend `/launch` route
|
||||
- [SDK PR #1651](https://github.com/OpenHands/software-agent-sdk/pull/1651) - Agent server plugin loading
|
||||
- [SDK PR #1647](https://github.com/OpenHands/software-agent-sdk/pull/1647) - Plugin.fetch() for remote plugin fetching
|
||||
@@ -1,207 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
"""
|
||||
This script can be removed once orgs is established - probably after Feb 15 2026
|
||||
|
||||
Downgrade script for migrated users.
|
||||
|
||||
This script identifies users who have been migrated (already_migrated=True)
|
||||
and reverts them back to the pre-migration state.
|
||||
|
||||
Usage:
|
||||
# Dry run - just list the users that would be downgraded
|
||||
python downgrade_migrated_users.py --dry-run
|
||||
|
||||
# Downgrade a specific user by their keycloak_user_id
|
||||
python downgrade_migrated_users.py --user-id <user_id>
|
||||
|
||||
# Downgrade all migrated users (with confirmation)
|
||||
python downgrade_migrated_users.py --all
|
||||
|
||||
# Downgrade all migrated users without confirmation (dangerous!)
|
||||
python downgrade_migrated_users.py --all --no-confirm
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import asyncio
|
||||
import sys
|
||||
|
||||
# Add the enterprise directory to the path
|
||||
sys.path.insert(0, '/workspace/project/OpenHands/enterprise')
|
||||
|
||||
from server.logger import logger
|
||||
from sqlalchemy import select, text
|
||||
from storage.database import session_maker
|
||||
from storage.user_settings import UserSettings
|
||||
from storage.user_store import UserStore
|
||||
|
||||
|
||||
def get_migrated_users() -> list[str]:
|
||||
"""Get list of keycloak_user_ids for users who have been migrated.
|
||||
|
||||
This includes:
|
||||
1. Users with already_migrated=True in user_settings (migrated users)
|
||||
2. Users in the 'user' table who don't have a user_settings entry (new sign-ups)
|
||||
"""
|
||||
with session_maker() as session:
|
||||
# Get users from user_settings with already_migrated=True
|
||||
migrated_result = session.execute(
|
||||
select(UserSettings.keycloak_user_id).where(
|
||||
UserSettings.already_migrated.is_(True)
|
||||
)
|
||||
)
|
||||
migrated_users = {row[0] for row in migrated_result.fetchall() if row[0]}
|
||||
|
||||
# Get users from the 'user' table (new sign-ups won't have user_settings)
|
||||
# These are users who signed up after the migration was deployed
|
||||
new_signup_result = session.execute(
|
||||
text("""
|
||||
SELECT CAST(u.id AS VARCHAR)
|
||||
FROM "user" u
|
||||
WHERE NOT EXISTS (
|
||||
SELECT 1 FROM user_settings us
|
||||
WHERE us.keycloak_user_id = CAST(u.id AS VARCHAR)
|
||||
)
|
||||
""")
|
||||
)
|
||||
new_signups = {row[0] for row in new_signup_result.fetchall() if row[0]}
|
||||
|
||||
# Combine both sets
|
||||
all_users = migrated_users | new_signups
|
||||
return list(all_users)
|
||||
|
||||
|
||||
async def downgrade_user(user_id: str) -> bool:
|
||||
"""Downgrade a single user.
|
||||
|
||||
Args:
|
||||
user_id: The keycloak_user_id to downgrade
|
||||
|
||||
Returns:
|
||||
True if successful, False otherwise
|
||||
"""
|
||||
try:
|
||||
result = await UserStore.downgrade_user(user_id)
|
||||
if result:
|
||||
print(f'✓ Successfully downgraded user: {user_id}')
|
||||
return True
|
||||
else:
|
||||
print(f'✗ Failed to downgrade user: {user_id}')
|
||||
return False
|
||||
except Exception as e:
|
||||
print(f'✗ Error downgrading user {user_id}: {e}')
|
||||
logger.exception(
|
||||
'downgrade_script:error',
|
||||
extra={'user_id': user_id, 'error': str(e)},
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
async def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Downgrade migrated users back to pre-migration state'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--dry-run',
|
||||
action='store_true',
|
||||
help='Just list users that would be downgraded, without making changes',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--user-id',
|
||||
type=str,
|
||||
help='Downgrade a specific user by keycloak_user_id',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--all',
|
||||
action='store_true',
|
||||
help='Downgrade all migrated users',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--no-confirm',
|
||||
action='store_true',
|
||||
help='Skip confirmation prompt (use with caution!)',
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Get list of migrated users
|
||||
migrated_users = get_migrated_users()
|
||||
print(f'\nFound {len(migrated_users)} migrated user(s).')
|
||||
|
||||
if args.dry_run:
|
||||
print('\n--- DRY RUN MODE ---')
|
||||
print('The following users would be downgraded:')
|
||||
for user_id in migrated_users:
|
||||
print(f' - {user_id}')
|
||||
print('\nNo changes were made.')
|
||||
return
|
||||
|
||||
if args.user_id:
|
||||
# Downgrade a specific user
|
||||
if args.user_id not in migrated_users:
|
||||
print(f'\nUser {args.user_id} is not in the migrated users list.')
|
||||
print('Either the user was not migrated, or the user_id is incorrect.')
|
||||
return
|
||||
|
||||
print(f'\nDowngrading user: {args.user_id}')
|
||||
if not args.no_confirm:
|
||||
confirm = input('Are you sure? (yes/no): ')
|
||||
if confirm.lower() != 'yes':
|
||||
print('Cancelled.')
|
||||
return
|
||||
|
||||
success = await downgrade_user(args.user_id)
|
||||
if success:
|
||||
print('\nDowngrade completed successfully.')
|
||||
else:
|
||||
print('\nDowngrade failed. Check logs for details.')
|
||||
sys.exit(1)
|
||||
|
||||
elif args.all:
|
||||
# Downgrade all migrated users
|
||||
if not migrated_users:
|
||||
print('\nNo migrated users to downgrade.')
|
||||
return
|
||||
|
||||
print(f'\n⚠️ About to downgrade {len(migrated_users)} user(s).')
|
||||
if not args.no_confirm:
|
||||
print('\nThis will:')
|
||||
print(' - Revert LiteLLM team/user budget settings')
|
||||
print(' - Delete organization entries')
|
||||
print(' - Delete user entries in the new schema')
|
||||
print(' - Reset the already_migrated flag')
|
||||
print('\nUsers to downgrade:')
|
||||
for user_id in migrated_users[:10]: # Show first 10
|
||||
print(f' - {user_id}')
|
||||
if len(migrated_users) > 10:
|
||||
print(f' ... and {len(migrated_users) - 10} more')
|
||||
|
||||
confirm = input('\nType "yes" to proceed: ')
|
||||
if confirm.lower() != 'yes':
|
||||
print('Cancelled.')
|
||||
return
|
||||
|
||||
print('\nStarting downgrade...\n')
|
||||
success_count = 0
|
||||
fail_count = 0
|
||||
|
||||
for user_id in migrated_users:
|
||||
success = await downgrade_user(user_id)
|
||||
if success:
|
||||
success_count += 1
|
||||
else:
|
||||
fail_count += 1
|
||||
|
||||
print('\n--- Summary ---')
|
||||
print(f'Successful: {success_count}')
|
||||
print(f'Failed: {fail_count}')
|
||||
|
||||
if fail_count > 0:
|
||||
sys.exit(1)
|
||||
|
||||
else:
|
||||
parser.print_help()
|
||||
print('\nPlease specify --dry-run, --user-id, or --all')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
asyncio.run(main())
|
||||
@@ -109,6 +109,9 @@ lines.append(
|
||||
lines.append(
|
||||
'OPENHANDS_BITBUCKET_SERVICE_CLS=integrations.bitbucket.bitbucket_service.SaaSBitBucketService'
|
||||
)
|
||||
lines.append(
|
||||
'OPENHANDS_BITBUCKET_DATA_CENTER_SERVICE_CLS=integrations.bitbucket_data_center.bitbucket_dc_service.SaaSBitbucketDCService'
|
||||
)
|
||||
lines.append(
|
||||
'OPENHANDS_CONVERSATION_VALIDATOR_CLS=storage.saas_conversation_validator.SaasConversationValidator'
|
||||
)
|
||||
|
||||
@@ -1,47 +0,0 @@
|
||||
import os
|
||||
|
||||
import posthog
|
||||
|
||||
from openhands.core.logger import openhands_logger as logger
|
||||
|
||||
# Initialize PostHog
|
||||
posthog.api_key = os.environ.get('POSTHOG_CLIENT_KEY', 'phc_placeholder')
|
||||
posthog.host = os.environ.get('POSTHOG_HOST', 'https://us.i.posthog.com')
|
||||
|
||||
# Log PostHog configuration with masked API key for security
|
||||
api_key = posthog.api_key
|
||||
if api_key and len(api_key) > 8:
|
||||
masked_key = f'{api_key[:4]}...{api_key[-4:]}'
|
||||
else:
|
||||
masked_key = 'not_set_or_too_short'
|
||||
logger.info('posthog_configuration', extra={'posthog_api_key_masked': masked_key})
|
||||
|
||||
# Global toggle for the experiment manager
|
||||
ENABLE_EXPERIMENT_MANAGER = (
|
||||
os.environ.get('ENABLE_EXPERIMENT_MANAGER', 'false').lower() == 'true'
|
||||
)
|
||||
|
||||
# Get the current experiment type from environment variable
|
||||
# If None, no experiment is running
|
||||
EXPERIMENT_LITELLM_DEFAULT_MODEL_EXPERIMENT = os.environ.get(
|
||||
'EXPERIMENT_LITELLM_DEFAULT_MODEL_EXPERIMENT', ''
|
||||
)
|
||||
# System prompt experiment toggle
|
||||
EXPERIMENT_SYSTEM_PROMPT_EXPERIMENT = os.environ.get(
|
||||
'EXPERIMENT_SYSTEM_PROMPT_EXPERIMENT', ''
|
||||
)
|
||||
|
||||
EXPERIMENT_CLAUDE4_VS_GPT5 = os.environ.get('EXPERIMENT_CLAUDE4_VS_GPT5', '')
|
||||
|
||||
EXPERIMENT_CONDENSER_MAX_STEP = os.environ.get('EXPERIMENT_CONDENSER_MAX_STEP', '')
|
||||
|
||||
logger.info(
|
||||
'experiment_manager:run_conversation_variant_test:experiment_config',
|
||||
extra={
|
||||
'enable_experiment_manager': ENABLE_EXPERIMENT_MANAGER,
|
||||
'experiment_litellm_default_model_experiment': EXPERIMENT_LITELLM_DEFAULT_MODEL_EXPERIMENT,
|
||||
'experiment_system_prompt_experiment': EXPERIMENT_SYSTEM_PROMPT_EXPERIMENT,
|
||||
'experiment_claude4_vs_gpt5_experiment': EXPERIMENT_CLAUDE4_VS_GPT5,
|
||||
'experiment_condenser_max_step': EXPERIMENT_CONDENSER_MAX_STEP,
|
||||
},
|
||||
)
|
||||
@@ -1,99 +0,0 @@
|
||||
from uuid import UUID
|
||||
|
||||
from experiments.constants import (
|
||||
ENABLE_EXPERIMENT_MANAGER,
|
||||
EXPERIMENT_SYSTEM_PROMPT_EXPERIMENT,
|
||||
)
|
||||
from experiments.experiment_versions import (
|
||||
handle_system_prompt_experiment,
|
||||
)
|
||||
|
||||
from openhands.core.config.openhands_config import OpenHandsConfig
|
||||
from openhands.core.logger import openhands_logger as logger
|
||||
from openhands.experiments.experiment_manager import ExperimentManager
|
||||
from openhands.sdk import Agent
|
||||
from openhands.server.session.conversation_init_data import ConversationInitData
|
||||
|
||||
|
||||
class SaaSExperimentManager(ExperimentManager):
|
||||
@staticmethod
|
||||
def run_agent_variant_tests__v1(
|
||||
user_id: str | None, conversation_id: UUID, agent: Agent
|
||||
) -> Agent:
|
||||
if not ENABLE_EXPERIMENT_MANAGER:
|
||||
logger.info(
|
||||
'experiment_manager:run_conversation_variant_test:skipped',
|
||||
extra={'reason': 'experiment_manager_disabled'},
|
||||
)
|
||||
return agent
|
||||
|
||||
if EXPERIMENT_SYSTEM_PROMPT_EXPERIMENT:
|
||||
# Skip experiment for planning agents which require their specialized prompt
|
||||
if agent.system_prompt_filename != 'system_prompt_planning.j2':
|
||||
agent = agent.model_copy(
|
||||
update={'system_prompt_filename': 'system_prompt_long_horizon.j2'}
|
||||
)
|
||||
|
||||
return agent
|
||||
|
||||
@staticmethod
|
||||
def run_conversation_variant_test(
|
||||
user_id, conversation_id, conversation_settings
|
||||
) -> ConversationInitData:
|
||||
"""
|
||||
Run conversation variant test and potentially modify the conversation settings
|
||||
based on the PostHog feature flags.
|
||||
|
||||
Args:
|
||||
user_id: The user ID
|
||||
conversation_id: The conversation ID
|
||||
conversation_settings: The conversation settings that may include convo_id and llm_model
|
||||
|
||||
Returns:
|
||||
The modified conversation settings
|
||||
"""
|
||||
logger.debug(
|
||||
'experiment_manager:run_conversation_variant_test:started',
|
||||
extra={'user_id': user_id, 'conversation_id': conversation_id},
|
||||
)
|
||||
|
||||
return conversation_settings
|
||||
|
||||
@staticmethod
|
||||
def run_config_variant_test(
|
||||
user_id: str | None, conversation_id: str, config: OpenHandsConfig
|
||||
) -> OpenHandsConfig:
|
||||
"""
|
||||
Run agent config variant test and potentially modify the OpenHands config
|
||||
based on the current experiment type and PostHog feature flags.
|
||||
|
||||
Args:
|
||||
user_id: The user ID
|
||||
conversation_id: The conversation ID
|
||||
config: The OpenHands configuration
|
||||
|
||||
Returns:
|
||||
The modified OpenHands configuration
|
||||
"""
|
||||
logger.info(
|
||||
'experiment_manager:run_config_variant_test:started',
|
||||
extra={'user_id': user_id},
|
||||
)
|
||||
|
||||
# Skip all experiment processing if the experiment manager is disabled
|
||||
if not ENABLE_EXPERIMENT_MANAGER:
|
||||
logger.info(
|
||||
'experiment_manager:run_config_variant_test:skipped',
|
||||
extra={'reason': 'experiment_manager_disabled'},
|
||||
)
|
||||
return config
|
||||
|
||||
# Pass the entire OpenHands config to the system prompt experiment
|
||||
# Let the experiment handler directly modify the config as needed
|
||||
modified_config = handle_system_prompt_experiment(
|
||||
user_id, conversation_id, config
|
||||
)
|
||||
|
||||
# Condenser max step experiment is applied via conversation variant test,
|
||||
# not config variant test. Return modified config from system prompt only.
|
||||
return modified_config
|
||||
@@ -1,107 +0,0 @@
|
||||
"""
|
||||
LiteLLM model experiment handler.
|
||||
|
||||
This module contains the handler for the LiteLLM model experiment.
|
||||
"""
|
||||
|
||||
import posthog
|
||||
from experiments.constants import EXPERIMENT_LITELLM_DEFAULT_MODEL_EXPERIMENT
|
||||
from server.constants import (
|
||||
IS_FEATURE_ENV,
|
||||
build_litellm_proxy_model_path,
|
||||
get_default_litellm_model,
|
||||
)
|
||||
|
||||
from openhands.core.logger import openhands_logger as logger
|
||||
|
||||
|
||||
def handle_litellm_default_model_experiment(
|
||||
user_id, conversation_id, conversation_settings
|
||||
):
|
||||
"""
|
||||
Handle the LiteLLM model experiment.
|
||||
|
||||
Args:
|
||||
user_id: The user ID
|
||||
conversation_id: The conversation ID
|
||||
conversation_settings: The conversation settings
|
||||
|
||||
Returns:
|
||||
Modified conversation settings
|
||||
"""
|
||||
# No-op if the specific experiment is not enabled
|
||||
if not EXPERIMENT_LITELLM_DEFAULT_MODEL_EXPERIMENT:
|
||||
logger.info(
|
||||
'experiment_manager:ab_testing:skipped',
|
||||
extra={
|
||||
'convo_id': conversation_id,
|
||||
'reason': 'experiment_not_enabled',
|
||||
'experiment': EXPERIMENT_LITELLM_DEFAULT_MODEL_EXPERIMENT,
|
||||
},
|
||||
)
|
||||
return conversation_settings
|
||||
|
||||
# Use experiment name as the flag key
|
||||
try:
|
||||
enabled_variant = posthog.get_feature_flag(
|
||||
EXPERIMENT_LITELLM_DEFAULT_MODEL_EXPERIMENT, conversation_id
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
'experiment_manager:get_feature_flag:failed',
|
||||
extra={
|
||||
'convo_id': conversation_id,
|
||||
'experiment': EXPERIMENT_LITELLM_DEFAULT_MODEL_EXPERIMENT,
|
||||
'error': str(e),
|
||||
},
|
||||
)
|
||||
return conversation_settings
|
||||
|
||||
# Log the experiment event
|
||||
# If this is a feature environment, add "FEATURE_" prefix to user_id for PostHog
|
||||
posthog_user_id = f'FEATURE_{user_id}' if IS_FEATURE_ENV else user_id
|
||||
|
||||
try:
|
||||
posthog.capture(
|
||||
distinct_id=posthog_user_id,
|
||||
event='model_set',
|
||||
properties={
|
||||
'conversation_id': conversation_id,
|
||||
'variant': enabled_variant,
|
||||
'original_user_id': user_id,
|
||||
'is_feature_env': IS_FEATURE_ENV,
|
||||
},
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
'experiment_manager:posthog_capture:failed',
|
||||
extra={
|
||||
'convo_id': conversation_id,
|
||||
'experiment': EXPERIMENT_LITELLM_DEFAULT_MODEL_EXPERIMENT,
|
||||
'error': str(e),
|
||||
},
|
||||
)
|
||||
# Continue execution as this is not critical
|
||||
|
||||
logger.info(
|
||||
'posthog_capture',
|
||||
extra={
|
||||
'event': 'model_set',
|
||||
'posthog_user_id': posthog_user_id,
|
||||
'is_feature_env': IS_FEATURE_ENV,
|
||||
'conversation_id': conversation_id,
|
||||
'variant': enabled_variant,
|
||||
},
|
||||
)
|
||||
|
||||
# Set the model based on the feature flag variant
|
||||
if enabled_variant == 'claude37':
|
||||
# Use the shared utility to construct the LiteLLM proxy model path
|
||||
model = build_litellm_proxy_model_path('claude-3-7-sonnet-20250219')
|
||||
# Update the conversation settings with the selected model
|
||||
conversation_settings.llm_model = model
|
||||
else:
|
||||
# Update the conversation settings with the default model for the current version
|
||||
conversation_settings.llm_model = get_default_litellm_model()
|
||||
|
||||
return conversation_settings
|
||||
@@ -1,181 +0,0 @@
|
||||
"""
|
||||
System prompt experiment handler.
|
||||
|
||||
This module contains the handler for the system prompt experiment that uses
|
||||
the PostHog variant as the system prompt filename.
|
||||
"""
|
||||
|
||||
import copy
|
||||
|
||||
import posthog
|
||||
from experiments.constants import EXPERIMENT_SYSTEM_PROMPT_EXPERIMENT
|
||||
from server.constants import IS_FEATURE_ENV
|
||||
from storage.experiment_assignment_store import ExperimentAssignmentStore
|
||||
|
||||
from openhands.core.config.openhands_config import OpenHandsConfig
|
||||
from openhands.core.logger import openhands_logger as logger
|
||||
|
||||
|
||||
def _get_system_prompt_variant(user_id, conversation_id):
|
||||
"""
|
||||
Get the system prompt variant for the experiment.
|
||||
|
||||
Args:
|
||||
user_id: The user ID
|
||||
conversation_id: The conversation ID
|
||||
|
||||
Returns:
|
||||
str or None: The PostHog variant name or None if experiment is not enabled or error occurs
|
||||
"""
|
||||
# No-op if the specific experiment is not enabled
|
||||
if not EXPERIMENT_SYSTEM_PROMPT_EXPERIMENT:
|
||||
logger.info(
|
||||
'experiment_manager_002:ab_testing:skipped',
|
||||
extra={
|
||||
'convo_id': conversation_id,
|
||||
'reason': 'experiment_not_enabled',
|
||||
'experiment': EXPERIMENT_SYSTEM_PROMPT_EXPERIMENT,
|
||||
},
|
||||
)
|
||||
return None
|
||||
|
||||
# Use experiment name as the flag key
|
||||
try:
|
||||
enabled_variant = posthog.get_feature_flag(
|
||||
EXPERIMENT_SYSTEM_PROMPT_EXPERIMENT, conversation_id
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
'experiment_manager:get_feature_flag:failed',
|
||||
extra={
|
||||
'convo_id': conversation_id,
|
||||
'experiment': EXPERIMENT_SYSTEM_PROMPT_EXPERIMENT,
|
||||
'error': str(e),
|
||||
},
|
||||
)
|
||||
return None
|
||||
|
||||
# Store the experiment assignment in the database
|
||||
try:
|
||||
experiment_store = ExperimentAssignmentStore()
|
||||
experiment_store.update_experiment_variant(
|
||||
conversation_id=conversation_id,
|
||||
experiment_name='system_prompt_experiment',
|
||||
variant=enabled_variant,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
'experiment_manager:store_assignment:failed',
|
||||
extra={
|
||||
'convo_id': conversation_id,
|
||||
'experiment': EXPERIMENT_SYSTEM_PROMPT_EXPERIMENT,
|
||||
'variant': enabled_variant,
|
||||
'error': str(e),
|
||||
},
|
||||
)
|
||||
# Fail the experiment if we cannot track the splits - results would not be explainable
|
||||
return None
|
||||
|
||||
# Log the experiment event
|
||||
# If this is a feature environment, add "FEATURE_" prefix to user_id for PostHog
|
||||
posthog_user_id = f'FEATURE_{user_id}' if IS_FEATURE_ENV else user_id
|
||||
|
||||
try:
|
||||
posthog.capture(
|
||||
distinct_id=posthog_user_id,
|
||||
event='system_prompt_set',
|
||||
properties={
|
||||
'conversation_id': conversation_id,
|
||||
'variant': enabled_variant,
|
||||
'original_user_id': user_id,
|
||||
'is_feature_env': IS_FEATURE_ENV,
|
||||
},
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
'experiment_manager:posthog_capture:failed',
|
||||
extra={
|
||||
'convo_id': conversation_id,
|
||||
'experiment': EXPERIMENT_SYSTEM_PROMPT_EXPERIMENT,
|
||||
'error': str(e),
|
||||
},
|
||||
)
|
||||
# Continue execution as this is not critical
|
||||
|
||||
logger.info(
|
||||
'posthog_capture',
|
||||
extra={
|
||||
'event': 'system_prompt_set',
|
||||
'posthog_user_id': posthog_user_id,
|
||||
'is_feature_env': IS_FEATURE_ENV,
|
||||
'conversation_id': conversation_id,
|
||||
'variant': enabled_variant,
|
||||
},
|
||||
)
|
||||
|
||||
return enabled_variant
|
||||
|
||||
|
||||
def handle_system_prompt_experiment(
|
||||
user_id, conversation_id, config: OpenHandsConfig
|
||||
) -> OpenHandsConfig:
|
||||
"""
|
||||
Handle the system prompt experiment for OpenHands config.
|
||||
|
||||
Args:
|
||||
user_id: The user ID
|
||||
conversation_id: The conversation ID
|
||||
config: The OpenHands configuration
|
||||
|
||||
Returns:
|
||||
Modified OpenHands configuration
|
||||
"""
|
||||
enabled_variant = _get_system_prompt_variant(user_id, conversation_id)
|
||||
|
||||
# If variant is None, experiment is not enabled or there was an error
|
||||
if enabled_variant is None:
|
||||
return config
|
||||
|
||||
# Deep copy the config to avoid modifying the original
|
||||
modified_config = copy.deepcopy(config)
|
||||
|
||||
# Set the system prompt filename based on the variant
|
||||
if enabled_variant == 'control':
|
||||
# Use the long-horizon system prompt for the control variant
|
||||
agent_config = modified_config.get_agent_config(modified_config.default_agent)
|
||||
agent_config.system_prompt_filename = 'system_prompt_long_horizon.j2'
|
||||
agent_config.enable_plan_mode = True
|
||||
elif enabled_variant == 'interactive':
|
||||
modified_config.get_agent_config(
|
||||
modified_config.default_agent
|
||||
).system_prompt_filename = 'system_prompt_interactive.j2'
|
||||
elif enabled_variant == 'no_tools':
|
||||
modified_config.get_agent_config(
|
||||
modified_config.default_agent
|
||||
).system_prompt_filename = 'system_prompt.j2'
|
||||
else:
|
||||
logger.error(
|
||||
'system_prompt_experiment:unknown_variant',
|
||||
extra={
|
||||
'user_id': user_id,
|
||||
'convo_id': conversation_id,
|
||||
'variant': enabled_variant,
|
||||
'reason': 'no explicit mapping; returning original config',
|
||||
},
|
||||
)
|
||||
return config
|
||||
|
||||
# Log which prompt is being used
|
||||
logger.info(
|
||||
'system_prompt_experiment:prompt_selected',
|
||||
extra={
|
||||
'user_id': user_id,
|
||||
'convo_id': conversation_id,
|
||||
'system_prompt_filename': modified_config.get_agent_config(
|
||||
modified_config.default_agent
|
||||
).system_prompt_filename,
|
||||
'variant': enabled_variant,
|
||||
},
|
||||
)
|
||||
|
||||
return modified_config
|
||||
@@ -1,137 +0,0 @@
|
||||
"""
|
||||
LiteLLM model experiment handler.
|
||||
|
||||
This module contains the handler for the LiteLLM model experiment.
|
||||
"""
|
||||
|
||||
import posthog
|
||||
from experiments.constants import EXPERIMENT_CLAUDE4_VS_GPT5
|
||||
from server.constants import (
|
||||
IS_FEATURE_ENV,
|
||||
build_litellm_proxy_model_path,
|
||||
get_default_litellm_model,
|
||||
)
|
||||
from storage.experiment_assignment_store import ExperimentAssignmentStore
|
||||
|
||||
from openhands.core.logger import openhands_logger as logger
|
||||
from openhands.server.session.conversation_init_data import ConversationInitData
|
||||
|
||||
|
||||
def _get_model_variant(user_id: str | None, conversation_id: str) -> str | None:
|
||||
if not EXPERIMENT_CLAUDE4_VS_GPT5:
|
||||
logger.info(
|
||||
'experiment_manager:ab_testing:skipped',
|
||||
extra={
|
||||
'convo_id': conversation_id,
|
||||
'reason': 'experiment_not_enabled',
|
||||
'experiment': EXPERIMENT_CLAUDE4_VS_GPT5,
|
||||
},
|
||||
)
|
||||
return None
|
||||
|
||||
try:
|
||||
enabled_variant = posthog.get_feature_flag(
|
||||
EXPERIMENT_CLAUDE4_VS_GPT5, conversation_id
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
'experiment_manager:get_feature_flag:failed',
|
||||
extra={
|
||||
'convo_id': conversation_id,
|
||||
'experiment': EXPERIMENT_CLAUDE4_VS_GPT5,
|
||||
'error': str(e),
|
||||
},
|
||||
)
|
||||
return None
|
||||
|
||||
# Store the experiment assignment in the database
|
||||
try:
|
||||
experiment_store = ExperimentAssignmentStore()
|
||||
experiment_store.update_experiment_variant(
|
||||
conversation_id=conversation_id,
|
||||
experiment_name='claude4_vs_gpt5_experiment',
|
||||
variant=enabled_variant,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
'experiment_manager:store_assignment:failed',
|
||||
extra={
|
||||
'convo_id': conversation_id,
|
||||
'experiment': EXPERIMENT_CLAUDE4_VS_GPT5,
|
||||
'variant': enabled_variant,
|
||||
'error': str(e),
|
||||
},
|
||||
)
|
||||
# Fail the experiment if we cannot track the splits - results would not be explainable
|
||||
return None
|
||||
|
||||
# Log the experiment event
|
||||
# If this is a feature environment, add "FEATURE_" prefix to user_id for PostHog
|
||||
posthog_user_id = f'FEATURE_{user_id}' if IS_FEATURE_ENV else user_id
|
||||
|
||||
try:
|
||||
posthog.capture(
|
||||
distinct_id=posthog_user_id,
|
||||
event='claude4_or_gpt5_set',
|
||||
properties={
|
||||
'conversation_id': conversation_id,
|
||||
'variant': enabled_variant,
|
||||
'original_user_id': user_id,
|
||||
'is_feature_env': IS_FEATURE_ENV,
|
||||
},
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
'experiment_manager:posthog_capture:failed',
|
||||
extra={
|
||||
'convo_id': conversation_id,
|
||||
'experiment': EXPERIMENT_CLAUDE4_VS_GPT5,
|
||||
'error': str(e),
|
||||
},
|
||||
)
|
||||
# Continue execution as this is not critical
|
||||
|
||||
logger.info(
|
||||
'posthog_capture',
|
||||
extra={
|
||||
'event': 'claude4_or_gpt5_set',
|
||||
'posthog_user_id': posthog_user_id,
|
||||
'is_feature_env': IS_FEATURE_ENV,
|
||||
'conversation_id': conversation_id,
|
||||
'variant': enabled_variant,
|
||||
},
|
||||
)
|
||||
|
||||
return enabled_variant
|
||||
|
||||
|
||||
def handle_claude4_vs_gpt5_experiment(
|
||||
user_id: str | None,
|
||||
conversation_id: str,
|
||||
conversation_settings: ConversationInitData,
|
||||
) -> ConversationInitData:
|
||||
"""
|
||||
Handle the LiteLLM model experiment.
|
||||
|
||||
Args:
|
||||
user_id: The user ID
|
||||
conversation_id: The conversation ID
|
||||
conversation_settings: The conversation settings
|
||||
|
||||
Returns:
|
||||
Modified conversation settings
|
||||
"""
|
||||
|
||||
enabled_variant = _get_model_variant(user_id, conversation_id)
|
||||
|
||||
if not enabled_variant:
|
||||
return conversation_settings
|
||||
|
||||
# Set the model based on the feature flag variant
|
||||
if enabled_variant == 'gpt5':
|
||||
model = build_litellm_proxy_model_path('gpt-5-2025-08-07')
|
||||
conversation_settings.llm_model = model
|
||||
else:
|
||||
conversation_settings.llm_model = get_default_litellm_model()
|
||||
|
||||
return conversation_settings
|
||||
@@ -1,232 +0,0 @@
|
||||
"""
|
||||
Condenser max step experiment handler.
|
||||
|
||||
This module contains the handler for the condenser max step experiment that tests
|
||||
different max_size values for the condenser configuration.
|
||||
"""
|
||||
|
||||
from uuid import UUID
|
||||
|
||||
import posthog
|
||||
from experiments.constants import EXPERIMENT_CONDENSER_MAX_STEP
|
||||
from server.constants import IS_FEATURE_ENV
|
||||
from storage.experiment_assignment_store import ExperimentAssignmentStore
|
||||
|
||||
from openhands.core.logger import openhands_logger as logger
|
||||
from openhands.sdk import Agent
|
||||
from openhands.sdk.context.condenser import (
|
||||
LLMSummarizingCondenser,
|
||||
)
|
||||
from openhands.server.session.conversation_init_data import ConversationInitData
|
||||
|
||||
|
||||
def _get_condenser_max_step_variant(user_id, conversation_id):
|
||||
"""
|
||||
Get the condenser max step variant for the experiment.
|
||||
|
||||
Args:
|
||||
user_id: The user ID
|
||||
conversation_id: The conversation ID
|
||||
|
||||
Returns:
|
||||
str or None: The PostHog variant name or None if experiment is not enabled or error occurs
|
||||
"""
|
||||
# No-op if the specific experiment is not enabled
|
||||
if not EXPERIMENT_CONDENSER_MAX_STEP:
|
||||
logger.info(
|
||||
'experiment_manager_004:ab_testing:skipped',
|
||||
extra={
|
||||
'convo_id': conversation_id,
|
||||
'reason': 'experiment_not_enabled',
|
||||
'experiment': EXPERIMENT_CONDENSER_MAX_STEP,
|
||||
},
|
||||
)
|
||||
return None
|
||||
|
||||
# Use experiment name as the flag key
|
||||
try:
|
||||
enabled_variant = posthog.get_feature_flag(
|
||||
EXPERIMENT_CONDENSER_MAX_STEP, conversation_id
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
'experiment_manager:get_feature_flag:failed',
|
||||
extra={
|
||||
'convo_id': conversation_id,
|
||||
'experiment': EXPERIMENT_CONDENSER_MAX_STEP,
|
||||
'error': str(e),
|
||||
},
|
||||
)
|
||||
return None
|
||||
|
||||
# Store the experiment assignment in the database
|
||||
try:
|
||||
experiment_store = ExperimentAssignmentStore()
|
||||
experiment_store.update_experiment_variant(
|
||||
conversation_id=conversation_id,
|
||||
experiment_name='condenser_max_step_experiment',
|
||||
variant=enabled_variant,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
'experiment_manager:store_assignment:failed',
|
||||
extra={
|
||||
'convo_id': conversation_id,
|
||||
'experiment': EXPERIMENT_CONDENSER_MAX_STEP,
|
||||
'variant': enabled_variant,
|
||||
'error': str(e),
|
||||
},
|
||||
)
|
||||
# Fail the experiment if we cannot track the splits - results would not be explainable
|
||||
return None
|
||||
|
||||
# Log the experiment event
|
||||
# If this is a feature environment, add "FEATURE_" prefix to user_id for PostHog
|
||||
posthog_user_id = f'FEATURE_{user_id}' if IS_FEATURE_ENV else user_id
|
||||
|
||||
try:
|
||||
posthog.capture(
|
||||
distinct_id=posthog_user_id,
|
||||
event='condenser_max_step_set',
|
||||
properties={
|
||||
'conversation_id': conversation_id,
|
||||
'variant': enabled_variant,
|
||||
'original_user_id': user_id,
|
||||
'is_feature_env': IS_FEATURE_ENV,
|
||||
},
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
'experiment_manager:posthog_capture:failed',
|
||||
extra={
|
||||
'convo_id': conversation_id,
|
||||
'experiment': EXPERIMENT_CONDENSER_MAX_STEP,
|
||||
'error': str(e),
|
||||
},
|
||||
)
|
||||
# Continue execution as this is not critical
|
||||
|
||||
logger.info(
|
||||
'posthog_capture',
|
||||
extra={
|
||||
'event': 'condenser_max_step_set',
|
||||
'posthog_user_id': posthog_user_id,
|
||||
'is_feature_env': IS_FEATURE_ENV,
|
||||
'conversation_id': conversation_id,
|
||||
'variant': enabled_variant,
|
||||
},
|
||||
)
|
||||
|
||||
return enabled_variant
|
||||
|
||||
|
||||
def handle_condenser_max_step_experiment(
|
||||
user_id: str | None,
|
||||
conversation_id: str,
|
||||
conversation_settings: ConversationInitData,
|
||||
) -> ConversationInitData:
|
||||
"""
|
||||
Handle the condenser max step experiment for conversation settings.
|
||||
|
||||
We should not modify persistent user settings. Instead, apply the experiment
|
||||
variant to the conversation's in-memory settings object for this session only.
|
||||
|
||||
Variants:
|
||||
- control -> condenser_max_size = 120
|
||||
- treatment -> condenser_max_size = 80
|
||||
|
||||
Returns the (potentially) modified conversation_settings.
|
||||
"""
|
||||
|
||||
enabled_variant = _get_condenser_max_step_variant(user_id, conversation_id)
|
||||
|
||||
if enabled_variant is None:
|
||||
return conversation_settings
|
||||
|
||||
if enabled_variant == 'control':
|
||||
condenser_max_size = 120
|
||||
elif enabled_variant == 'treatment':
|
||||
condenser_max_size = 80
|
||||
else:
|
||||
logger.error(
|
||||
'condenser_max_step_experiment:unknown_variant',
|
||||
extra={
|
||||
'user_id': user_id,
|
||||
'convo_id': conversation_id,
|
||||
'variant': enabled_variant,
|
||||
'reason': 'unknown variant; returning original conversation settings',
|
||||
},
|
||||
)
|
||||
return conversation_settings
|
||||
|
||||
try:
|
||||
# Apply the variant to this conversation only; do not persist to DB.
|
||||
# Not all OpenHands versions expose `condenser_max_size` on settings.
|
||||
if hasattr(conversation_settings, 'condenser_max_size'):
|
||||
conversation_settings.condenser_max_size = condenser_max_size
|
||||
logger.info(
|
||||
'condenser_max_step_experiment:conversation_settings_applied',
|
||||
extra={
|
||||
'user_id': user_id,
|
||||
'convo_id': conversation_id,
|
||||
'variant': enabled_variant,
|
||||
'condenser_max_size': condenser_max_size,
|
||||
},
|
||||
)
|
||||
else:
|
||||
logger.warning(
|
||||
'condenser_max_step_experiment:field_missing_on_settings',
|
||||
extra={
|
||||
'user_id': user_id,
|
||||
'convo_id': conversation_id,
|
||||
'variant': enabled_variant,
|
||||
'reason': 'condenser_max_size not present on ConversationInitData',
|
||||
},
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
'condenser_max_step_experiment:apply_failed',
|
||||
extra={
|
||||
'user_id': user_id,
|
||||
'convo_id': conversation_id,
|
||||
'variant': enabled_variant,
|
||||
'error': str(e),
|
||||
},
|
||||
)
|
||||
return conversation_settings
|
||||
|
||||
return conversation_settings
|
||||
|
||||
|
||||
def handle_condenser_max_step_experiment__v1(
|
||||
user_id: str | None,
|
||||
conversation_id: UUID,
|
||||
agent: Agent,
|
||||
) -> Agent:
|
||||
enabled_variant = _get_condenser_max_step_variant(user_id, str(conversation_id))
|
||||
|
||||
if enabled_variant is None:
|
||||
return agent
|
||||
|
||||
if enabled_variant == 'control':
|
||||
condenser_max_size = 120
|
||||
elif enabled_variant == 'treatment':
|
||||
condenser_max_size = 80
|
||||
else:
|
||||
logger.error(
|
||||
'condenser_max_step_experiment:unknown_variant',
|
||||
extra={
|
||||
'user_id': user_id,
|
||||
'convo_id': conversation_id,
|
||||
'variant': enabled_variant,
|
||||
'reason': 'unknown variant; returning original conversation settings',
|
||||
},
|
||||
)
|
||||
return agent
|
||||
|
||||
condenser_llm = agent.llm.model_copy(update={'usage_id': 'condenser'})
|
||||
condenser = LLMSummarizingCondenser(
|
||||
llm=condenser_llm, max_size=condenser_max_size, keep_first=4
|
||||
)
|
||||
|
||||
return agent.model_copy(update={'condenser': condenser})
|
||||
@@ -1,25 +0,0 @@
|
||||
"""
|
||||
Experiment versions package.
|
||||
|
||||
This package contains handlers for different experiment versions.
|
||||
"""
|
||||
|
||||
from experiments.experiment_versions._001_litellm_default_model_experiment import (
|
||||
handle_litellm_default_model_experiment,
|
||||
)
|
||||
from experiments.experiment_versions._002_system_prompt_experiment import (
|
||||
handle_system_prompt_experiment,
|
||||
)
|
||||
from experiments.experiment_versions._003_llm_claude4_vs_gpt5_experiment import (
|
||||
handle_claude4_vs_gpt5_experiment,
|
||||
)
|
||||
from experiments.experiment_versions._004_condenser_max_step_experiment import (
|
||||
handle_condenser_max_step_experiment,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
'handle_litellm_default_model_experiment',
|
||||
'handle_system_prompt_experiment',
|
||||
'handle_claude4_vs_gpt5_experiment',
|
||||
'handle_condenser_max_step_experiment',
|
||||
]
|
||||
@@ -0,0 +1,65 @@
|
||||
from pydantic import SecretStr
|
||||
from server.auth.token_manager import TokenManager
|
||||
|
||||
from openhands.core.logger import openhands_logger as logger
|
||||
from openhands.integrations.bitbucket_data_center.bitbucket_dc_service import (
|
||||
BitbucketDCService,
|
||||
)
|
||||
from openhands.integrations.service_types import ProviderType
|
||||
|
||||
|
||||
class SaaSBitbucketDCService(BitbucketDCService):
|
||||
def __init__(
|
||||
self,
|
||||
user_id: str | None = None,
|
||||
external_auth_token: SecretStr | None = None,
|
||||
external_auth_id: str | None = None,
|
||||
token: SecretStr | None = None,
|
||||
external_token_manager: bool = False,
|
||||
base_domain: str | None = None,
|
||||
):
|
||||
logger.debug(
|
||||
f'SaaSBitbucketDCService created with user_id {user_id}, external_auth_id {external_auth_id}, external_auth_token {'set' if external_auth_token else 'None'}, token {'set' if token else 'None'}, external_token_manager {external_token_manager}'
|
||||
)
|
||||
super().__init__(
|
||||
user_id=user_id,
|
||||
external_auth_token=external_auth_token,
|
||||
external_auth_id=external_auth_id,
|
||||
token=token,
|
||||
external_token_manager=external_token_manager,
|
||||
base_domain=base_domain,
|
||||
)
|
||||
|
||||
self.token_manager = TokenManager(external=external_token_manager)
|
||||
self.refresh = True
|
||||
|
||||
async def get_latest_token(self) -> SecretStr | None:
|
||||
bitbucket_dc_token = None
|
||||
if self.external_auth_token:
|
||||
bitbucket_dc_token = SecretStr(
|
||||
await self.token_manager.get_idp_token(
|
||||
self.external_auth_token.get_secret_value(),
|
||||
idp=ProviderType.BITBUCKET_DATA_CENTER,
|
||||
)
|
||||
)
|
||||
logger.debug('Got Bitbucket DC token via external_auth_token')
|
||||
elif self.external_auth_id:
|
||||
offline_token = await self.token_manager.load_offline_token(
|
||||
self.external_auth_id
|
||||
)
|
||||
bitbucket_dc_token = SecretStr(
|
||||
await self.token_manager.get_idp_token_from_offline_token(
|
||||
offline_token, ProviderType.BITBUCKET_DATA_CENTER
|
||||
)
|
||||
)
|
||||
logger.debug('Got Bitbucket DC token via external_auth_id')
|
||||
elif self.user_id:
|
||||
bitbucket_dc_token = SecretStr(
|
||||
await self.token_manager.get_idp_token_from_idp_user_id(
|
||||
self.user_id, ProviderType.BITBUCKET_DATA_CENTER
|
||||
)
|
||||
)
|
||||
logger.debug('Got Bitbucket DC token via user_id')
|
||||
else:
|
||||
logger.warning('external_auth_token and user_id not set!')
|
||||
return bitbucket_dc_token
|
||||
@@ -116,10 +116,8 @@ class GitHubDataCollector:
|
||||
|
||||
return suffix
|
||||
|
||||
def _get_installation_access_token(self, installation_id: str) -> str:
|
||||
token_data = self.github_integration.get_access_token(
|
||||
installation_id # type: ignore[arg-type]
|
||||
)
|
||||
def _get_installation_access_token(self, installation_id: int) -> str:
|
||||
token_data = self.github_integration.get_access_token(installation_id)
|
||||
return token_data.token
|
||||
|
||||
def _check_openhands_author(self, name, login) -> bool:
|
||||
@@ -134,7 +132,7 @@ class GitHubDataCollector:
|
||||
)
|
||||
|
||||
def _get_issue_comments(
|
||||
self, installation_id: str, repo_name: str, issue_number: int, conversation_id
|
||||
self, installation_id: int, repo_name: str, issue_number: int, conversation_id
|
||||
) -> list[dict[str, Any]]:
|
||||
"""
|
||||
Retrieve all comments from an issue until a comment with conversation_id is found
|
||||
@@ -234,7 +232,7 @@ class GitHubDataCollector:
|
||||
f'[Github]: Saved issue #{issue_number} for {github_view.full_repo_name}'
|
||||
)
|
||||
|
||||
def _get_pr_commits(self, installation_id: str, repo_name: str, pr_number: int):
|
||||
def _get_pr_commits(self, installation_id: int, repo_name: str, pr_number: int):
|
||||
commits = []
|
||||
installation_token = self._get_installation_access_token(installation_id)
|
||||
with Github(auth=Auth.Token(installation_token)) as github_client:
|
||||
@@ -431,7 +429,7 @@ class GitHubDataCollector:
|
||||
- Num openhands review comments
|
||||
"""
|
||||
pr_number = openhands_pr.pr_number
|
||||
installation_id = openhands_pr.installation_id
|
||||
installation_id = int(openhands_pr.installation_id)
|
||||
repo_id = openhands_pr.repo_id
|
||||
|
||||
# Get installation token and create Github client
|
||||
@@ -569,7 +567,7 @@ class GitHubDataCollector:
|
||||
openhands_helped_author = openhands_commit_count > 0
|
||||
|
||||
# Update the PR with OpenHands statistics
|
||||
update_success = store.update_pr_openhands_stats(
|
||||
update_success = await store.update_pr_openhands_stats(
|
||||
repo_id=repo_id,
|
||||
pr_number=pr_number,
|
||||
original_updated_at=openhands_pr.updated_at,
|
||||
@@ -612,7 +610,7 @@ class GitHubDataCollector:
|
||||
action = payload.get('action', '')
|
||||
return action == 'closed' and 'pull_request' in payload
|
||||
|
||||
def _track_closed_or_merged_pr(self, payload):
|
||||
async def _track_closed_or_merged_pr(self, payload):
|
||||
"""
|
||||
Track PR closed/merged event
|
||||
"""
|
||||
@@ -671,17 +669,17 @@ class GitHubDataCollector:
|
||||
num_general_comments=num_general_comments,
|
||||
)
|
||||
|
||||
store.insert_pr(pr)
|
||||
await store.insert_pr(pr)
|
||||
logger.info(f'Tracked PR {status}: {repo_id}#{pr_number}')
|
||||
|
||||
def process_payload(self, message: Message):
|
||||
async def process_payload(self, message: Message):
|
||||
if not COLLECT_GITHUB_INTERACTIONS:
|
||||
return
|
||||
|
||||
raw_payload = message.message.get('payload', {})
|
||||
|
||||
if self._is_pr_closed_or_merged(raw_payload):
|
||||
self._track_closed_or_merged_pr(raw_payload)
|
||||
await self._track_closed_or_merged_pr(raw_payload)
|
||||
|
||||
async def save_data(self, github_view: ResolverViewInterface):
|
||||
if not COLLECT_GITHUB_INTERACTIONS:
|
||||
|
||||
@@ -10,6 +10,7 @@ from integrations.github.github_view import (
|
||||
GithubIssue,
|
||||
GithubIssueComment,
|
||||
GithubPRComment,
|
||||
GithubViewType,
|
||||
)
|
||||
from integrations.manager import Manager
|
||||
from integrations.models import (
|
||||
@@ -19,9 +20,11 @@ from integrations.models import (
|
||||
from integrations.types import ResolverViewInterface
|
||||
from integrations.utils import (
|
||||
CONVERSATION_URL,
|
||||
ENABLE_SOLVABILITY_ANALYSIS,
|
||||
HOST_URL,
|
||||
OPENHANDS_RESOLVER_TEMPLATES_DIR,
|
||||
get_session_expired_message,
|
||||
get_user_not_found_message,
|
||||
)
|
||||
from integrations.v1_utils import get_saas_user_auth
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
@@ -40,10 +43,9 @@ from openhands.server.types import (
|
||||
SessionExpiredError,
|
||||
)
|
||||
from openhands.storage.data_models.secrets import Secrets
|
||||
from openhands.utils.async_utils import call_sync_from_async
|
||||
|
||||
|
||||
class GithubManager(Manager):
|
||||
class GithubManager(Manager[GithubViewType]):
|
||||
def __init__(
|
||||
self, token_manager: TokenManager, data_collector: GitHubDataCollector
|
||||
):
|
||||
@@ -67,11 +69,8 @@ class GithubManager(Manager):
|
||||
|
||||
return f'{owner}/{repo_name}'
|
||||
|
||||
def _get_installation_access_token(self, installation_id: str) -> str:
|
||||
# get_access_token is typed to only accept int, but it can handle str.
|
||||
token_data = self.github_integration.get_access_token(
|
||||
installation_id # type: ignore[arg-type]
|
||||
)
|
||||
def _get_installation_access_token(self, installation_id: int) -> str:
|
||||
token_data = self.github_integration.get_access_token(installation_id)
|
||||
return token_data.token
|
||||
|
||||
def _add_reaction(
|
||||
@@ -126,6 +125,76 @@ class GithubManager(Manager):
|
||||
|
||||
return False
|
||||
|
||||
def _get_issue_number_from_payload(self, message: Message) -> int | None:
|
||||
"""Extract issue/PR number from a GitHub webhook payload.
|
||||
|
||||
Supports all event types that can trigger jobs:
|
||||
- Labeled issues: payload['issue']['number']
|
||||
- Issue comments: payload['issue']['number']
|
||||
- PR comments: payload['issue']['number'] (PRs are accessed via issue endpoint)
|
||||
- Inline PR comments: payload['pull_request']['number']
|
||||
|
||||
Args:
|
||||
message: The incoming GitHub webhook message
|
||||
|
||||
Returns:
|
||||
The issue/PR number, or None if not found
|
||||
"""
|
||||
payload = message.message.get('payload', {})
|
||||
|
||||
# Labeled issues, issue comments, and PR comments all have 'issue' in payload
|
||||
if 'issue' in payload:
|
||||
return payload['issue']['number']
|
||||
|
||||
# Inline PR comments have 'pull_request' directly in payload
|
||||
if 'pull_request' in payload:
|
||||
return payload['pull_request']['number']
|
||||
|
||||
return None
|
||||
|
||||
def _send_user_not_found_message(self, message: Message, username: str):
|
||||
"""Send a message to the user informing them they need to create an OpenHands account.
|
||||
|
||||
This method handles all supported trigger types:
|
||||
- Labeled issues (action='labeled' with openhands label)
|
||||
- Issue comments (comment containing @openhands)
|
||||
- PR comments (comment containing @openhands on a PR)
|
||||
- Inline PR review comments (comment containing @openhands)
|
||||
|
||||
Args:
|
||||
message: The incoming GitHub webhook message
|
||||
username: The GitHub username to mention in the response
|
||||
"""
|
||||
payload = message.message.get('payload', {})
|
||||
installation_id = message.message['installation']
|
||||
repo_obj = payload['repository']
|
||||
full_repo_name = self._get_full_repo_name(repo_obj)
|
||||
|
||||
# Get installation token to post the comment
|
||||
installation_token = self._get_installation_access_token(installation_id)
|
||||
|
||||
# Determine the issue/PR number based on the event type
|
||||
issue_number = self._get_issue_number_from_payload(message)
|
||||
|
||||
if not issue_number:
|
||||
logger.warning(
|
||||
f'[GitHub] Could not determine issue/PR number to send user not found message for {username}. '
|
||||
f'Payload keys: {list(payload.keys())}'
|
||||
)
|
||||
return
|
||||
|
||||
# Post the comment
|
||||
try:
|
||||
with Github(auth=Auth.Token(installation_token)) as github_client:
|
||||
repo = github_client.get_repo(full_repo_name)
|
||||
issue = repo.get_issue(number=issue_number)
|
||||
issue.create_comment(get_user_not_found_message(username))
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f'[GitHub] Failed to send user not found message to {username} '
|
||||
f'on {full_repo_name}#{issue_number}: {e}'
|
||||
)
|
||||
|
||||
async def is_job_requested(self, message: Message) -> bool:
|
||||
self._confirm_incoming_source_type(message)
|
||||
|
||||
@@ -170,7 +239,7 @@ class GithubManager(Manager):
|
||||
async def receive_message(self, message: Message):
|
||||
self._confirm_incoming_source_type(message)
|
||||
try:
|
||||
await call_sync_from_async(self.data_collector.process_payload, message)
|
||||
await self.data_collector.process_payload(message)
|
||||
except Exception:
|
||||
logger.warning(
|
||||
'[Github]: Error processing payload for gh interaction', exc_info=True
|
||||
@@ -179,9 +248,20 @@ class GithubManager(Manager):
|
||||
if await self.is_job_requested(message):
|
||||
payload = message.message.get('payload', {})
|
||||
user_id = payload['sender']['id']
|
||||
username = payload['sender']['login']
|
||||
keycloak_user_id = await self.token_manager.get_user_id_from_idp_user_id(
|
||||
user_id, ProviderType.GITHUB
|
||||
)
|
||||
|
||||
# Check if the user has an OpenHands account
|
||||
if not keycloak_user_id:
|
||||
logger.warning(
|
||||
f'[GitHub] User {username} (id={user_id}) not found in Keycloak. '
|
||||
f'User must create an OpenHands account first.'
|
||||
)
|
||||
self._send_user_not_found_message(message, username)
|
||||
return
|
||||
|
||||
github_view = await GithubFactory.create_github_view_from_payload(
|
||||
message, keycloak_user_id
|
||||
)
|
||||
@@ -193,46 +273,51 @@ class GithubManager(Manager):
|
||||
github_view.installation_id
|
||||
)
|
||||
# Store the installation token
|
||||
self.token_manager.store_org_token(
|
||||
await self.token_manager.store_org_token(
|
||||
github_view.installation_id, installation_token
|
||||
)
|
||||
# Add eyes reaction to acknowledge we've read the request
|
||||
self._add_reaction(github_view, 'eyes', installation_token)
|
||||
await self.start_job(github_view)
|
||||
|
||||
async def send_message(self, message: Message, github_view: ResolverViewInterface):
|
||||
installation_token = self.token_manager.load_org_token(
|
||||
async def send_message(self, message: str, github_view: GithubViewType):
|
||||
"""Send a message to GitHub.
|
||||
|
||||
Args:
|
||||
message: The message content to send (plain text string)
|
||||
github_view: The GitHub view object containing issue/PR/comment info
|
||||
"""
|
||||
installation_token = await self.token_manager.load_org_token(
|
||||
github_view.installation_id
|
||||
)
|
||||
if not installation_token:
|
||||
logger.warning('Missing installation token')
|
||||
return
|
||||
|
||||
outgoing_message = message.message
|
||||
|
||||
if isinstance(github_view, GithubInlinePRComment):
|
||||
with Github(auth=Auth.Token(installation_token)) as github_client:
|
||||
repo = github_client.get_repo(github_view.full_repo_name)
|
||||
pr = repo.get_pull(github_view.issue_number)
|
||||
pr.create_review_comment_reply(
|
||||
comment_id=github_view.comment_id, body=outgoing_message
|
||||
comment_id=github_view.comment_id, body=message
|
||||
)
|
||||
|
||||
elif (
|
||||
isinstance(github_view, GithubPRComment)
|
||||
or isinstance(github_view, GithubIssueComment)
|
||||
or isinstance(github_view, GithubIssue)
|
||||
elif isinstance(
|
||||
github_view, (GithubPRComment, GithubIssueComment, GithubIssue)
|
||||
):
|
||||
with Github(auth=Auth.Token(installation_token)) as github_client:
|
||||
repo = github_client.get_repo(github_view.full_repo_name)
|
||||
issue = repo.get_issue(number=github_view.issue_number)
|
||||
issue.create_comment(outgoing_message)
|
||||
issue.create_comment(message)
|
||||
|
||||
else:
|
||||
logger.warning('Unsupported location')
|
||||
# Catch any new types added to GithubViewType that aren't handled above
|
||||
logger.warning( # type: ignore[unreachable]
|
||||
f'Unsupported github_view type: {type(github_view).__name__}'
|
||||
)
|
||||
return
|
||||
|
||||
async def start_job(self, github_view: ResolverViewInterface):
|
||||
async def start_job(self, github_view: GithubViewType) -> None:
|
||||
"""Kick off a job with openhands agent.
|
||||
|
||||
1. Get user credential
|
||||
@@ -245,7 +330,7 @@ class GithubManager(Manager):
|
||||
)
|
||||
|
||||
try:
|
||||
msg_info = None
|
||||
msg_info: str = ''
|
||||
|
||||
try:
|
||||
user_info = github_view.user_info
|
||||
@@ -286,19 +371,19 @@ class GithubManager(Manager):
|
||||
# 3. Once the conversation is started, its base cost will include the report's spend as well which allows us to control max budget per resolver task
|
||||
convo_metadata = await github_view.initialize_new_conversation()
|
||||
solvability_summary = None
|
||||
try:
|
||||
if user_token:
|
||||
if not ENABLE_SOLVABILITY_ANALYSIS:
|
||||
logger.info(
|
||||
'[Github]: Solvability report feature is disabled, skipping'
|
||||
)
|
||||
else:
|
||||
try:
|
||||
solvability_summary = await summarize_issue_solvability(
|
||||
github_view, user_token
|
||||
)
|
||||
else:
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
'[Github]: No user token available for solvability analysis'
|
||||
f'[Github]: Error summarizing issue solvability: {str(e)}'
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
f'[Github]: Error summarizing issue solvability: {str(e)}'
|
||||
)
|
||||
|
||||
saas_user_auth = await get_saas_user_auth(
|
||||
github_view.user_info.keycloak_user_id, self.token_manager
|
||||
@@ -361,15 +446,13 @@ class GithubManager(Manager):
|
||||
|
||||
msg_info = get_session_expired_message(user_info.username)
|
||||
|
||||
msg = self.create_outgoing_message(msg_info)
|
||||
await self.send_message(msg, github_view)
|
||||
await self.send_message(msg_info, github_view)
|
||||
|
||||
except Exception:
|
||||
logger.exception('[Github]: Error starting job')
|
||||
msg = self.create_outgoing_message(
|
||||
msg='Uh oh! There was an unexpected error starting the job :('
|
||||
await self.send_message(
|
||||
'Uh oh! There was an unexpected error starting the job :(', github_view
|
||||
)
|
||||
await self.send_message(msg, github_view)
|
||||
|
||||
try:
|
||||
await self.data_collector.save_data(github_view)
|
||||
|
||||
@@ -122,13 +122,37 @@ class SaaSGitHubService(GitHubService):
|
||||
raise Exception(f'No node_id found for repository {repo_id}')
|
||||
return node_id
|
||||
|
||||
async def _get_external_auth_id(self) -> str | None:
|
||||
"""Get or fetch external_auth_id from Keycloak token if not already set."""
|
||||
if self.external_auth_id:
|
||||
return self.external_auth_id
|
||||
|
||||
if self.external_auth_token:
|
||||
try:
|
||||
user_info = await self.token_manager.get_user_info(
|
||||
self.external_auth_token.get_secret_value()
|
||||
)
|
||||
self.external_auth_id = user_info.sub
|
||||
logger.info(
|
||||
f'Determined external_auth_id from Keycloak token: {self.external_auth_id}'
|
||||
)
|
||||
return self.external_auth_id
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
f'Could not determine external_auth_id from token: {e}',
|
||||
exc_info=True,
|
||||
)
|
||||
return None
|
||||
|
||||
async def get_paginated_repos(self, page, per_page, sort, installation_id):
|
||||
repositories = await super().get_paginated_repos(
|
||||
page, per_page, sort, installation_id
|
||||
)
|
||||
asyncio.create_task(
|
||||
store_repositories_in_db(repositories, self.external_auth_id)
|
||||
)
|
||||
external_auth_id = await self._get_external_auth_id()
|
||||
if external_auth_id:
|
||||
asyncio.create_task(
|
||||
store_repositories_in_db(repositories, external_auth_id)
|
||||
)
|
||||
return repositories
|
||||
|
||||
async def get_all_repositories(
|
||||
@@ -136,8 +160,10 @@ class SaaSGitHubService(GitHubService):
|
||||
) -> list[Repository]:
|
||||
repositories = await super().get_all_repositories(sort, app_mode)
|
||||
# Schedule the background task without awaiting it
|
||||
asyncio.create_task(
|
||||
store_repositories_in_db(repositories, self.external_auth_id)
|
||||
)
|
||||
external_auth_id = await self._get_external_auth_id()
|
||||
if external_auth_id:
|
||||
asyncio.create_task(
|
||||
store_repositories_in_db(repositories, external_auth_id)
|
||||
)
|
||||
# Return repositories immediately
|
||||
return repositories
|
||||
|
||||
@@ -14,7 +14,6 @@ from integrations.solvability.models.summary import SolvabilitySummary
|
||||
from integrations.utils import ENABLE_SOLVABILITY_ANALYSIS
|
||||
from pydantic import ValidationError
|
||||
from server.config import get_config
|
||||
from storage.database import session_maker
|
||||
from storage.saas_settings_store import SaasSettingsStore
|
||||
|
||||
from openhands.core.config import LLMConfig
|
||||
@@ -90,7 +89,6 @@ async def summarize_issue_solvability(
|
||||
# Grab the user's information so we can load their LLM configuration
|
||||
store = SaasSettingsStore(
|
||||
user_id=github_view.user_info.keycloak_user_id,
|
||||
session_maker=session_maker,
|
||||
config=get_config(),
|
||||
)
|
||||
|
||||
@@ -108,6 +106,11 @@ async def summarize_issue_solvability(
|
||||
f'Solvability analysis disabled for user {github_view.user_info.user_id}'
|
||||
)
|
||||
|
||||
if user_settings.llm_api_key is None:
|
||||
raise ValueError(
|
||||
f'[Solvability] No LLM API key found for user {github_view.user_info.user_id}'
|
||||
)
|
||||
|
||||
try:
|
||||
llm_config = LLMConfig(
|
||||
model=user_settings.llm_model,
|
||||
|
||||
@@ -3,8 +3,9 @@ from typing import Any
|
||||
from uuid import UUID
|
||||
|
||||
import httpx
|
||||
from github import Auth, Github, GithubIntegration
|
||||
from integrations.utils import CONVERSATION_URL, get_summary_instruction
|
||||
from github import Auth, Github, GithubException, GithubIntegration
|
||||
from integrations.utils import get_summary_instruction
|
||||
from integrations.v1_utils import handle_callback_error
|
||||
from pydantic import Field
|
||||
from server.auth.constants import GITHUB_APP_CLIENT_ID, GITHUB_APP_PRIVATE_KEY
|
||||
|
||||
@@ -42,7 +43,6 @@ class GithubV1CallbackProcessor(EventCallbackProcessor):
|
||||
event: Event,
|
||||
) -> EventCallbackResult | None:
|
||||
"""Process events for GitHub V1 integration."""
|
||||
|
||||
# Only handle ConversationStateUpdateEvent
|
||||
if not isinstance(event, ConversationStateUpdateEvent):
|
||||
return None
|
||||
@@ -78,25 +78,20 @@ class GithubV1CallbackProcessor(EventCallbackProcessor):
|
||||
detail=summary,
|
||||
)
|
||||
except Exception as e:
|
||||
_logger.exception('[GitHub V1] Error processing callback: %s', e)
|
||||
|
||||
# Only try to post error to GitHub if we have basic requirements
|
||||
try:
|
||||
# Check if we have installation ID and credentials before posting
|
||||
if (
|
||||
self.github_view_data.get('installation_id')
|
||||
and GITHUB_APP_CLIENT_ID
|
||||
and GITHUB_APP_PRIVATE_KEY
|
||||
):
|
||||
await self._post_summary_to_github(
|
||||
f'OpenHands encountered an error: **{str(e)}**.\n\n'
|
||||
f'[See the conversation]({CONVERSATION_URL.format(conversation_id)})'
|
||||
'for more information.'
|
||||
)
|
||||
except Exception as post_error:
|
||||
_logger.warning(
|
||||
'[GitHub V1] Failed to post error message to GitHub: %s', post_error
|
||||
)
|
||||
# Check if we have installation ID and credentials before posting
|
||||
can_post_error = bool(
|
||||
self.github_view_data.get('installation_id')
|
||||
and GITHUB_APP_CLIENT_ID
|
||||
and GITHUB_APP_PRIVATE_KEY
|
||||
)
|
||||
await handle_callback_error(
|
||||
error=e,
|
||||
conversation_id=conversation_id,
|
||||
service_name='GitHub',
|
||||
service_logger=_logger,
|
||||
can_post_error=can_post_error,
|
||||
post_error_func=self._post_summary_to_github,
|
||||
)
|
||||
|
||||
return EventCallbackResult(
|
||||
status=EventCallbackResultStatus.ERROR,
|
||||
@@ -137,19 +132,30 @@ class GithubV1CallbackProcessor(EventCallbackProcessor):
|
||||
full_repo_name = self.github_view_data['full_repo_name']
|
||||
issue_number = self.github_view_data['issue_number']
|
||||
|
||||
if self.inline_pr_comment:
|
||||
try:
|
||||
if self.inline_pr_comment:
|
||||
with Github(auth=Auth.Token(installation_token)) as github_client:
|
||||
repo = github_client.get_repo(full_repo_name)
|
||||
pr = repo.get_pull(issue_number)
|
||||
pr.create_review_comment_reply(
|
||||
comment_id=self.github_view_data.get('comment_id', ''),
|
||||
body=summary,
|
||||
)
|
||||
return
|
||||
|
||||
with Github(auth=Auth.Token(installation_token)) as github_client:
|
||||
repo = github_client.get_repo(full_repo_name)
|
||||
pr = repo.get_pull(issue_number)
|
||||
pr.create_review_comment_reply(
|
||||
comment_id=self.github_view_data.get('comment_id', ''), body=summary
|
||||
issue = repo.get_issue(number=issue_number)
|
||||
issue.create_comment(summary)
|
||||
except GithubException as e:
|
||||
if e.status == 410:
|
||||
_logger.info(
|
||||
'[GitHub V1] Issue/PR %s#%s was deleted, skipping summary post',
|
||||
full_repo_name,
|
||||
issue_number,
|
||||
)
|
||||
return
|
||||
|
||||
with Github(auth=Auth.Token(installation_token)) as github_client:
|
||||
repo = github_client.get_repo(full_repo_name)
|
||||
issue = repo.get_issue(number=issue_number)
|
||||
issue.create_comment(summary)
|
||||
else:
|
||||
raise
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Agent / sandbox helpers
|
||||
@@ -167,8 +173,8 @@ class GithubV1CallbackProcessor(EventCallbackProcessor):
|
||||
send_message_request = AskAgentRequest(question=message_content)
|
||||
|
||||
url = (
|
||||
f'{agent_server_url.rstrip("/")}'
|
||||
f'/api/conversations/{conversation_id}/ask_agent'
|
||||
f"{agent_server_url.rstrip('/')}"
|
||||
f"/api/conversations/{conversation_id}/ask_agent"
|
||||
)
|
||||
headers = {'X-Session-API-Key': session_api_key}
|
||||
payload = send_message_request.model_dump()
|
||||
@@ -230,8 +236,7 @@ class GithubV1CallbackProcessor(EventCallbackProcessor):
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
async def _request_summary(self, conversation_id: UUID) -> str:
|
||||
"""
|
||||
Ask the agent to produce a summary of its work and return the agent response.
|
||||
"""Ask the agent to produce a summary of its work and return the agent response.
|
||||
|
||||
NOTE: This method now returns a string (the agent server's response text)
|
||||
and raises exceptions on errors. The wrapping into EventCallbackResult
|
||||
|
||||
@@ -24,7 +24,6 @@ from jinja2 import Environment
|
||||
from server.auth.constants import GITHUB_APP_CLIENT_ID, GITHUB_APP_PRIVATE_KEY
|
||||
from server.auth.token_manager import TokenManager
|
||||
from server.config import get_config
|
||||
from storage.database import session_maker
|
||||
from storage.org_store import OrgStore
|
||||
from storage.proactive_conversation_store import ProactiveConversationStore
|
||||
from storage.saas_secrets_store import SaasSecretsStore
|
||||
@@ -73,7 +72,6 @@ async def get_user_proactive_conversation_setting(user_id: str | None) -> bool:
|
||||
This function checks both the global environment variable kill switch AND
|
||||
the user's individual setting. Both must be true for the function to return true.
|
||||
"""
|
||||
|
||||
# If no user ID is provided, we can't check user settings
|
||||
if not user_id:
|
||||
return False
|
||||
@@ -82,13 +80,10 @@ async def get_user_proactive_conversation_setting(user_id: str | None) -> bool:
|
||||
if not ENABLE_PROACTIVE_CONVERSATION_STARTERS:
|
||||
return False
|
||||
|
||||
def _get_setting():
|
||||
org = OrgStore.get_current_org_from_keycloak_user_id(user_id)
|
||||
if not org:
|
||||
return False
|
||||
return bool(org.enable_proactive_conversation_starters)
|
||||
|
||||
return await call_sync_from_async(_get_setting)
|
||||
org = await OrgStore.get_current_org_from_keycloak_user_id(user_id)
|
||||
if not org:
|
||||
return False
|
||||
return bool(org.enable_proactive_conversation_starters)
|
||||
|
||||
|
||||
# =================================================
|
||||
@@ -153,9 +148,7 @@ class GithubIssue(ResolverViewInterface):
|
||||
return user_instructions, conversation_instructions
|
||||
|
||||
async def _get_user_secrets(self):
|
||||
secrets_store = SaasSecretsStore(
|
||||
self.user_info.keycloak_user_id, session_maker, get_config()
|
||||
)
|
||||
secrets_store = SaasSecretsStore(self.user_info.keycloak_user_id, get_config())
|
||||
user_secrets = await secrets_store.load()
|
||||
|
||||
return user_secrets.custom_secrets if user_secrets else None
|
||||
@@ -238,6 +231,29 @@ class GithubIssue(ResolverViewInterface):
|
||||
conversation_instructions=conversation_instructions,
|
||||
)
|
||||
|
||||
async def _get_v1_initial_user_message(self, jinja_env: Environment) -> str:
|
||||
"""Build the initial user message for V1 resolver conversations.
|
||||
|
||||
For "issue opened" events (no specific comment body), we can simply
|
||||
concatenate the user prompt and the rendered issue context.
|
||||
|
||||
Subclasses that represent comment-driven events (issue comments, PR review
|
||||
comments, inline review comments) override this method to control ordering
|
||||
(e.g., context first, then the triggering comment, then previous comments).
|
||||
"""
|
||||
|
||||
user_instructions, conversation_instructions = await self._get_instructions(
|
||||
jinja_env
|
||||
)
|
||||
|
||||
parts: list[str] = []
|
||||
if user_instructions.strip():
|
||||
parts.append(user_instructions.strip())
|
||||
if conversation_instructions.strip():
|
||||
parts.append(conversation_instructions.strip())
|
||||
|
||||
return '\n\n'.join(parts)
|
||||
|
||||
async def _create_v1_conversation(
|
||||
self,
|
||||
jinja_env: Environment,
|
||||
@@ -247,13 +263,11 @@ class GithubIssue(ResolverViewInterface):
|
||||
"""Create conversation using the new V1 app conversation system."""
|
||||
logger.info('[GitHub V1]: Creating V1 conversation')
|
||||
|
||||
user_instructions, conversation_instructions = await self._get_instructions(
|
||||
jinja_env
|
||||
)
|
||||
initial_user_text = await self._get_v1_initial_user_message(jinja_env)
|
||||
|
||||
# Create the initial message request
|
||||
initial_message = SendMessageRequest(
|
||||
role='user', content=[TextContent(text=user_instructions)]
|
||||
role='user', content=[TextContent(text=initial_user_text)]
|
||||
)
|
||||
|
||||
# Create the GitHub V1 callback processor
|
||||
@@ -265,7 +279,9 @@ class GithubIssue(ResolverViewInterface):
|
||||
# Create the V1 conversation start request with the callback processor
|
||||
start_request = AppConversationStartRequest(
|
||||
conversation_id=UUID(conversation_metadata.conversation_id),
|
||||
system_message_suffix=conversation_instructions,
|
||||
# NOTE: Resolver instructions are intended to be lower priority than the
|
||||
# system prompt, so we inject them into the initial user message.
|
||||
system_message_suffix=None,
|
||||
initial_message=initial_message,
|
||||
selected_repository=self.full_repo_name,
|
||||
selected_branch=self._get_branch_name(),
|
||||
@@ -336,6 +352,17 @@ class GithubIssueComment(GithubIssue):
|
||||
|
||||
return user_instructions, conversation_instructions
|
||||
|
||||
async def _get_v1_initial_user_message(self, jinja_env: Environment) -> str:
|
||||
await self._load_resolver_context()
|
||||
template = jinja_env.get_template('issue_comment_initial_message.j2')
|
||||
return template.render(
|
||||
issue_number=self.issue_number,
|
||||
issue_title=self.title,
|
||||
issue_body=self.description,
|
||||
issue_comment=self.comment_body,
|
||||
previous_comments=self.previous_comments,
|
||||
).strip()
|
||||
|
||||
|
||||
@dataclass
|
||||
class GithubPRComment(GithubIssueComment):
|
||||
@@ -362,6 +389,18 @@ class GithubPRComment(GithubIssueComment):
|
||||
|
||||
return user_instructions, conversation_instructions
|
||||
|
||||
async def _get_v1_initial_user_message(self, jinja_env: Environment) -> str:
|
||||
await self._load_resolver_context()
|
||||
template = jinja_env.get_template('pr_update_initial_message.j2')
|
||||
return template.render(
|
||||
pr_number=self.issue_number,
|
||||
branch_name=self.branch_name,
|
||||
pr_title=self.title,
|
||||
pr_body=self.description,
|
||||
pr_comment=self.comment_body,
|
||||
comments=self.previous_comments,
|
||||
).strip()
|
||||
|
||||
|
||||
@dataclass
|
||||
class GithubInlinePRComment(GithubPRComment):
|
||||
@@ -408,6 +447,20 @@ class GithubInlinePRComment(GithubPRComment):
|
||||
|
||||
return user_instructions, conversation_instructions
|
||||
|
||||
async def _get_v1_initial_user_message(self, jinja_env: Environment) -> str:
|
||||
await self._load_resolver_context()
|
||||
template = jinja_env.get_template('pr_update_initial_message.j2')
|
||||
return template.render(
|
||||
pr_number=self.issue_number,
|
||||
branch_name=self.branch_name,
|
||||
pr_title=self.title,
|
||||
pr_body=self.description,
|
||||
file_location=self.file_location,
|
||||
line_number=self.line_number,
|
||||
pr_comment=self.comment_body,
|
||||
comments=self.previous_comments,
|
||||
).strip()
|
||||
|
||||
def _create_github_v1_callback_processor(self):
|
||||
"""Create a V1 callback processor for GitHub integration."""
|
||||
from integrations.github.github_v1_callback_processor import (
|
||||
@@ -740,7 +793,7 @@ class GithubFactory:
|
||||
@staticmethod
|
||||
async def create_github_view_from_payload(
|
||||
message: Message, keycloak_user_id: str
|
||||
) -> ResolverViewInterface:
|
||||
) -> GithubViewType:
|
||||
"""Create the appropriate class (GithubIssue or GithubPRComment) based on the payload.
|
||||
Also return metadata about the event (e.g., action type).
|
||||
"""
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from types import MappingProxyType
|
||||
from typing import cast
|
||||
|
||||
from integrations.gitlab.gitlab_view import (
|
||||
GitlabFactory,
|
||||
@@ -17,6 +20,7 @@ from integrations.utils import (
|
||||
OPENHANDS_RESOLVER_TEMPLATES_DIR,
|
||||
get_session_expired_message,
|
||||
)
|
||||
from integrations.v1_utils import get_saas_user_auth
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
from pydantic import SecretStr
|
||||
from server.auth.token_manager import TokenManager
|
||||
@@ -33,7 +37,7 @@ from openhands.server.types import (
|
||||
from openhands.storage.data_models.secrets import Secrets
|
||||
|
||||
|
||||
class GitlabManager(Manager):
|
||||
class GitlabManager(Manager[GitlabViewType]):
|
||||
def __init__(self, token_manager: TokenManager, data_collector: None = None):
|
||||
self.token_manager = token_manager
|
||||
|
||||
@@ -67,11 +71,11 @@ class GitlabManager(Manager):
|
||||
logger.warning(f'Got invalid keyloak user id for GitLab User {user_id}')
|
||||
return False
|
||||
|
||||
# Importing here prevents circular import
|
||||
# GitLabServiceImpl returns SaaSGitLabService in enterprise context
|
||||
from integrations.gitlab.gitlab_service import SaaSGitLabService
|
||||
|
||||
gitlab_service: SaaSGitLabService = GitLabServiceImpl(
|
||||
external_auth_id=keycloak_user_id
|
||||
gitlab_service = cast(
|
||||
SaaSGitLabService, GitLabServiceImpl(external_auth_id=keycloak_user_id)
|
||||
)
|
||||
|
||||
return await gitlab_service.user_has_write_access(project_id)
|
||||
@@ -121,55 +125,52 @@ class GitlabManager(Manager):
|
||||
# Check if the user has write access to the repository
|
||||
return has_write_access
|
||||
|
||||
async def send_message(self, message: Message, gitlab_view: ResolverViewInterface):
|
||||
"""
|
||||
Send a message to GitLab based on the view type.
|
||||
async def send_message(self, message: str, gitlab_view: ResolverViewInterface):
|
||||
"""Send a message to GitLab based on the view type.
|
||||
|
||||
Args:
|
||||
message: The message to send
|
||||
message: The message content to send (plain text string)
|
||||
gitlab_view: The GitLab view object containing issue/PR/comment info
|
||||
"""
|
||||
keycloak_user_id = gitlab_view.user_info.keycloak_user_id
|
||||
|
||||
# Importing here prevents circular import
|
||||
# GitLabServiceImpl returns SaaSGitLabService in enterprise context
|
||||
from integrations.gitlab.gitlab_service import SaaSGitLabService
|
||||
|
||||
gitlab_service: SaaSGitLabService = GitLabServiceImpl(
|
||||
external_auth_id=keycloak_user_id
|
||||
gitlab_service = cast(
|
||||
SaaSGitLabService, GitLabServiceImpl(external_auth_id=keycloak_user_id)
|
||||
)
|
||||
|
||||
outgoing_message = message.message
|
||||
|
||||
if isinstance(gitlab_view, GitlabInlineMRComment) or isinstance(
|
||||
gitlab_view, GitlabMRComment
|
||||
):
|
||||
await gitlab_service.reply_to_mr(
|
||||
gitlab_view.project_id,
|
||||
gitlab_view.issue_number,
|
||||
gitlab_view.discussion_id,
|
||||
message.message,
|
||||
project_id=str(gitlab_view.project_id),
|
||||
merge_request_iid=str(gitlab_view.issue_number),
|
||||
discussion_id=gitlab_view.discussion_id,
|
||||
body=message,
|
||||
)
|
||||
|
||||
elif isinstance(gitlab_view, GitlabIssueComment):
|
||||
await gitlab_service.reply_to_issue(
|
||||
gitlab_view.project_id,
|
||||
gitlab_view.issue_number,
|
||||
gitlab_view.discussion_id,
|
||||
outgoing_message,
|
||||
project_id=str(gitlab_view.project_id),
|
||||
issue_number=str(gitlab_view.issue_number),
|
||||
discussion_id=gitlab_view.discussion_id,
|
||||
body=message,
|
||||
)
|
||||
elif isinstance(gitlab_view, GitlabIssue):
|
||||
await gitlab_service.reply_to_issue(
|
||||
gitlab_view.project_id,
|
||||
gitlab_view.issue_number,
|
||||
None, # no discussion id, issue is tagged
|
||||
outgoing_message,
|
||||
project_id=str(gitlab_view.project_id),
|
||||
issue_number=str(gitlab_view.issue_number),
|
||||
discussion_id=None, # no discussion id, issue is tagged
|
||||
body=message,
|
||||
)
|
||||
else:
|
||||
logger.warning(
|
||||
f'[GitLab] Unsupported view type: {type(gitlab_view).__name__}'
|
||||
)
|
||||
|
||||
async def start_job(self, gitlab_view: GitlabViewType):
|
||||
async def start_job(self, gitlab_view: GitlabViewType) -> None:
|
||||
"""
|
||||
Start a job for the GitLab view.
|
||||
|
||||
@@ -214,8 +215,18 @@ class GitlabManager(Manager):
|
||||
)
|
||||
)
|
||||
|
||||
# Initialize conversation and get metadata (following GitHub pattern)
|
||||
convo_metadata = await gitlab_view.initialize_new_conversation()
|
||||
|
||||
saas_user_auth = await get_saas_user_auth(
|
||||
gitlab_view.user_info.keycloak_user_id, self.token_manager
|
||||
)
|
||||
|
||||
await gitlab_view.create_new_conversation(
|
||||
self.jinja_env, secret_store.provider_tokens
|
||||
self.jinja_env,
|
||||
secret_store.provider_tokens,
|
||||
convo_metadata,
|
||||
saas_user_auth,
|
||||
)
|
||||
|
||||
conversation_id = gitlab_view.conversation_id
|
||||
@@ -224,18 +235,19 @@ class GitlabManager(Manager):
|
||||
f'[GitLab] Created conversation {conversation_id} for user {user_info.username}'
|
||||
)
|
||||
|
||||
# Create a GitlabCallbackProcessor for this conversation
|
||||
processor = GitlabCallbackProcessor(
|
||||
gitlab_view=gitlab_view,
|
||||
send_summary_instruction=True,
|
||||
)
|
||||
if not gitlab_view.v1_enabled:
|
||||
# Create a GitlabCallbackProcessor for this conversation
|
||||
processor = GitlabCallbackProcessor(
|
||||
gitlab_view=gitlab_view,
|
||||
send_summary_instruction=True,
|
||||
)
|
||||
|
||||
# Register the callback processor
|
||||
register_callback_processor(conversation_id, processor)
|
||||
# Register the callback processor
|
||||
register_callback_processor(conversation_id, processor)
|
||||
|
||||
logger.info(
|
||||
f'[GitLab] Created callback processor for conversation {conversation_id}'
|
||||
)
|
||||
logger.info(
|
||||
f'[GitLab] Created callback processor for conversation {conversation_id}'
|
||||
)
|
||||
|
||||
conversation_link = CONVERSATION_URL.format(conversation_id)
|
||||
msg_info = f"I'm on it! {user_info.username} can [track my progress at all-hands.dev]({conversation_link})"
|
||||
@@ -262,12 +274,10 @@ class GitlabManager(Manager):
|
||||
msg_info = get_session_expired_message(user_info.username)
|
||||
|
||||
# Send the acknowledgment message
|
||||
msg = self.create_outgoing_message(msg_info)
|
||||
await self.send_message(msg, gitlab_view)
|
||||
await self.send_message(msg_info, gitlab_view)
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f'[GitLab] Error starting job: {str(e)}')
|
||||
msg = self.create_outgoing_message(
|
||||
msg='Uh oh! There was an unexpected error starting the job :('
|
||||
await self.send_message(
|
||||
'Uh oh! There was an unexpected error starting the job :(', gitlab_view
|
||||
)
|
||||
await self.send_message(msg, gitlab_view)
|
||||
|
||||
@@ -185,6 +185,30 @@ class SaaSGitLabService(GitLabService):
|
||||
users_personal_projects: List of personal projects owned by the user
|
||||
repositories: List of Repository objects to store
|
||||
"""
|
||||
# If external_auth_id is not set, try to determine it from the Keycloak token
|
||||
if not self.external_auth_id and self.external_auth_token:
|
||||
try:
|
||||
user_info = await self.token_manager.get_user_info(
|
||||
self.external_auth_token.get_secret_value()
|
||||
)
|
||||
keycloak_user_id = user_info.sub
|
||||
self.external_auth_id = keycloak_user_id
|
||||
logger.info(
|
||||
f'Determined external_auth_id from Keycloak token: {self.external_auth_id}'
|
||||
)
|
||||
except Exception:
|
||||
logger.warning(
|
||||
'Cannot store repository data: external_auth_id is not set and could not be determined from token',
|
||||
exc_info=True,
|
||||
)
|
||||
return
|
||||
|
||||
if not self.external_auth_id:
|
||||
logger.warning(
|
||||
'Cannot store repository data: external_auth_id could not be determined'
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
# First, add owned projects and groups to the database
|
||||
await self.add_owned_projects_and_groups_to_db(users_personal_projects)
|
||||
|
||||
269
enterprise/integrations/gitlab/gitlab_v1_callback_processor.py
Normal file
269
enterprise/integrations/gitlab/gitlab_v1_callback_processor.py
Normal file
@@ -0,0 +1,269 @@
|
||||
import logging
|
||||
from typing import Any
|
||||
from uuid import UUID
|
||||
|
||||
import httpx
|
||||
from integrations.utils import get_summary_instruction
|
||||
from integrations.v1_utils import handle_callback_error
|
||||
from pydantic import Field
|
||||
|
||||
from openhands.agent_server.models import AskAgentRequest, AskAgentResponse
|
||||
from openhands.app_server.event_callback.event_callback_models import (
|
||||
EventCallback,
|
||||
EventCallbackProcessor,
|
||||
)
|
||||
from openhands.app_server.event_callback.event_callback_result_models import (
|
||||
EventCallbackResult,
|
||||
EventCallbackResultStatus,
|
||||
)
|
||||
from openhands.app_server.event_callback.util import (
|
||||
ensure_conversation_found,
|
||||
ensure_running_sandbox,
|
||||
get_agent_server_url_from_sandbox,
|
||||
)
|
||||
from openhands.sdk import Event
|
||||
from openhands.sdk.event import ConversationStateUpdateEvent
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class GitlabV1CallbackProcessor(EventCallbackProcessor):
|
||||
"""Callback processor for GitLab V1 integrations."""
|
||||
|
||||
gitlab_view_data: dict[str, Any] = Field(default_factory=dict)
|
||||
should_request_summary: bool = Field(default=True)
|
||||
inline_mr_comment: bool = Field(default=False)
|
||||
|
||||
async def __call__(
|
||||
self,
|
||||
conversation_id: UUID,
|
||||
callback: EventCallback,
|
||||
event: Event,
|
||||
) -> EventCallbackResult | None:
|
||||
"""Process events for GitLab V1 integration."""
|
||||
# Only handle ConversationStateUpdateEvent
|
||||
if not isinstance(event, ConversationStateUpdateEvent):
|
||||
return None
|
||||
|
||||
# Only act when execution has finished
|
||||
if not (event.key == 'execution_status' and event.value == 'finished'):
|
||||
return None
|
||||
|
||||
_logger.info('[GitLab V1] Callback agent state was %s', event)
|
||||
_logger.info(
|
||||
'[GitLab V1] Should request summary: %s', self.should_request_summary
|
||||
)
|
||||
|
||||
if not self.should_request_summary:
|
||||
return None
|
||||
|
||||
self.should_request_summary = False
|
||||
|
||||
try:
|
||||
_logger.info(f'[GitLab V1] Requesting summary {conversation_id}')
|
||||
summary = await self._request_summary(conversation_id)
|
||||
_logger.info(
|
||||
f'[GitLab V1] Posting summary {conversation_id}',
|
||||
extra={'summary': summary},
|
||||
)
|
||||
await self._post_summary_to_gitlab(summary)
|
||||
|
||||
return EventCallbackResult(
|
||||
status=EventCallbackResultStatus.SUCCESS,
|
||||
event_callback_id=callback.id,
|
||||
event_id=event.id,
|
||||
conversation_id=conversation_id,
|
||||
detail=summary,
|
||||
)
|
||||
except Exception as e:
|
||||
can_post_error = bool(self.gitlab_view_data.get('keycloak_user_id'))
|
||||
await handle_callback_error(
|
||||
error=e,
|
||||
conversation_id=conversation_id,
|
||||
service_name='GitLab',
|
||||
service_logger=_logger,
|
||||
can_post_error=can_post_error,
|
||||
post_error_func=self._post_summary_to_gitlab,
|
||||
)
|
||||
|
||||
return EventCallbackResult(
|
||||
status=EventCallbackResultStatus.ERROR,
|
||||
event_callback_id=callback.id,
|
||||
event_id=event.id,
|
||||
conversation_id=conversation_id,
|
||||
detail=str(e),
|
||||
)
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# GitLab helpers
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
async def _post_summary_to_gitlab(self, summary: str) -> None:
|
||||
"""Post a summary comment to the configured GitLab issue or MR."""
|
||||
# Import here to avoid circular imports
|
||||
from integrations.gitlab.gitlab_service import SaaSGitLabService
|
||||
|
||||
keycloak_user_id = self.gitlab_view_data.get('keycloak_user_id')
|
||||
if not keycloak_user_id:
|
||||
raise RuntimeError('Missing keycloak user ID for GitLab')
|
||||
|
||||
gitlab_service = SaaSGitLabService(external_auth_id=keycloak_user_id)
|
||||
|
||||
project_id = self.gitlab_view_data['project_id']
|
||||
issue_number = self.gitlab_view_data['issue_number']
|
||||
discussion_id = self.gitlab_view_data['discussion_id']
|
||||
is_mr = self.gitlab_view_data.get('is_mr', False)
|
||||
|
||||
if is_mr:
|
||||
await gitlab_service.reply_to_mr(
|
||||
project_id,
|
||||
issue_number,
|
||||
discussion_id,
|
||||
summary,
|
||||
)
|
||||
else:
|
||||
await gitlab_service.reply_to_issue(
|
||||
project_id,
|
||||
issue_number,
|
||||
discussion_id,
|
||||
summary,
|
||||
)
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Agent / sandbox helpers
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
async def _ask_question(
|
||||
self,
|
||||
httpx_client: httpx.AsyncClient,
|
||||
agent_server_url: str,
|
||||
conversation_id: UUID,
|
||||
session_api_key: str,
|
||||
message_content: str,
|
||||
) -> str:
|
||||
"""Send a message to the agent server via the V1 API and return response text."""
|
||||
send_message_request = AskAgentRequest(question=message_content)
|
||||
|
||||
url = (
|
||||
f"{agent_server_url.rstrip('/')}"
|
||||
f"/api/conversations/{conversation_id}/ask_agent"
|
||||
)
|
||||
headers = {'X-Session-API-Key': session_api_key}
|
||||
payload = send_message_request.model_dump()
|
||||
|
||||
try:
|
||||
response = await httpx_client.post(
|
||||
url,
|
||||
json=payload,
|
||||
headers=headers,
|
||||
timeout=30.0,
|
||||
)
|
||||
response.raise_for_status()
|
||||
|
||||
agent_response = AskAgentResponse.model_validate(response.json())
|
||||
return agent_response.response
|
||||
|
||||
except httpx.HTTPStatusError as e:
|
||||
error_detail = f'HTTP {e.response.status_code} error'
|
||||
try:
|
||||
error_body = e.response.text
|
||||
if error_body:
|
||||
error_detail += f': {error_body}'
|
||||
except Exception: # noqa: BLE001
|
||||
pass
|
||||
|
||||
_logger.error(
|
||||
'[GitLab V1] HTTP error sending message to %s: %s. '
|
||||
'Request payload: %s. Response headers: %s',
|
||||
url,
|
||||
error_detail,
|
||||
payload,
|
||||
dict(e.response.headers),
|
||||
exc_info=True,
|
||||
)
|
||||
raise Exception(f'Failed to send message to agent server: {error_detail}')
|
||||
|
||||
except httpx.TimeoutException:
|
||||
error_detail = f'Request timeout after 30 seconds to {url}'
|
||||
_logger.error(
|
||||
'[GitLab V1] %s. Request payload: %s',
|
||||
error_detail,
|
||||
payload,
|
||||
exc_info=True,
|
||||
)
|
||||
raise Exception(error_detail)
|
||||
|
||||
except httpx.RequestError as e:
|
||||
error_detail = f'Request error to {url}: {str(e)}'
|
||||
_logger.error(
|
||||
'[GitLab V1] %s. Request payload: %s',
|
||||
error_detail,
|
||||
payload,
|
||||
exc_info=True,
|
||||
)
|
||||
raise Exception(error_detail)
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Summary orchestration
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
async def _request_summary(self, conversation_id: UUID) -> str:
|
||||
"""Ask the agent to produce a summary of its work and return the agent response.
|
||||
|
||||
NOTE: This method now returns a string (the agent server's response text)
|
||||
and raises exceptions on errors. The wrapping into EventCallbackResult
|
||||
is handled by __call__.
|
||||
"""
|
||||
# Import services within the method to avoid circular imports
|
||||
from openhands.app_server.config import (
|
||||
get_app_conversation_info_service,
|
||||
get_httpx_client,
|
||||
get_sandbox_service,
|
||||
)
|
||||
from openhands.app_server.services.injector import InjectorState
|
||||
from openhands.app_server.user.specifiy_user_context import (
|
||||
ADMIN,
|
||||
USER_CONTEXT_ATTR,
|
||||
)
|
||||
|
||||
# Create injector state for dependency injection
|
||||
state = InjectorState()
|
||||
setattr(state, USER_CONTEXT_ATTR, ADMIN)
|
||||
|
||||
async with (
|
||||
get_app_conversation_info_service(state) as app_conversation_info_service,
|
||||
get_sandbox_service(state) as sandbox_service,
|
||||
get_httpx_client(state) as httpx_client,
|
||||
):
|
||||
# 1. Conversation lookup
|
||||
app_conversation_info = ensure_conversation_found(
|
||||
await app_conversation_info_service.get_app_conversation_info(
|
||||
conversation_id
|
||||
),
|
||||
conversation_id,
|
||||
)
|
||||
|
||||
# 2. Sandbox lookup + validation
|
||||
sandbox = ensure_running_sandbox(
|
||||
await sandbox_service.get_sandbox(app_conversation_info.sandbox_id),
|
||||
app_conversation_info.sandbox_id,
|
||||
)
|
||||
|
||||
assert (
|
||||
sandbox.session_api_key is not None
|
||||
), f'No session API key for sandbox: {sandbox.id}'
|
||||
|
||||
# 3. URL + instruction
|
||||
agent_server_url = get_agent_server_url_from_sandbox(sandbox)
|
||||
|
||||
# Prepare message based on agent state
|
||||
message_content = get_summary_instruction()
|
||||
|
||||
# Ask the agent and return the response text
|
||||
return await self._ask_question(
|
||||
httpx_client=httpx_client,
|
||||
agent_server_url=agent_server_url,
|
||||
conversation_id=conversation_id,
|
||||
session_api_key=sandbox.session_api_key,
|
||||
message_content=message_content,
|
||||
)
|
||||
@@ -1,25 +1,53 @@
|
||||
from dataclasses import dataclass
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
from integrations.models import Message
|
||||
from integrations.resolver_context import ResolverUserContext
|
||||
from integrations.types import ResolverViewInterface, UserData
|
||||
from integrations.utils import HOST, get_oh_labels, has_exact_mention
|
||||
from integrations.utils import (
|
||||
ENABLE_V1_GITLAB_RESOLVER,
|
||||
HOST,
|
||||
get_oh_labels,
|
||||
get_user_v1_enabled_setting,
|
||||
has_exact_mention,
|
||||
)
|
||||
from jinja2 import Environment
|
||||
from server.auth.token_manager import TokenManager
|
||||
from server.config import get_config
|
||||
from storage.database import session_maker
|
||||
from storage.saas_secrets_store import SaasSecretsStore
|
||||
|
||||
from openhands.agent_server.models import SendMessageRequest
|
||||
from openhands.app_server.app_conversation.app_conversation_models import (
|
||||
AppConversationStartRequest,
|
||||
AppConversationStartTaskStatus,
|
||||
)
|
||||
from openhands.app_server.config import get_app_conversation_service
|
||||
from openhands.app_server.services.injector import InjectorState
|
||||
from openhands.app_server.user.specifiy_user_context import USER_CONTEXT_ATTR
|
||||
from openhands.core.logger import openhands_logger as logger
|
||||
from openhands.integrations.gitlab.gitlab_service import GitLabServiceImpl
|
||||
from openhands.integrations.provider import PROVIDER_TOKEN_TYPE, ProviderType
|
||||
from openhands.integrations.service_types import Comment
|
||||
from openhands.server.services.conversation_service import create_new_conversation
|
||||
from openhands.storage.data_models.conversation_metadata import ConversationTrigger
|
||||
from openhands.sdk import TextContent
|
||||
from openhands.server.services.conversation_service import (
|
||||
initialize_conversation,
|
||||
start_conversation,
|
||||
)
|
||||
from openhands.server.user_auth.user_auth import UserAuth
|
||||
from openhands.storage.data_models.conversation_metadata import (
|
||||
ConversationMetadata,
|
||||
ConversationTrigger,
|
||||
)
|
||||
|
||||
OH_LABEL, INLINE_OH_LABEL = get_oh_labels(HOST)
|
||||
CONFIDENTIAL_NOTE = 'confidential_note'
|
||||
NOTE_TYPES = ['note', CONFIDENTIAL_NOTE]
|
||||
|
||||
|
||||
async def is_v1_enabled_for_gitlab_resolver(user_id: str) -> bool:
|
||||
return await get_user_v1_enabled_setting(user_id) and ENABLE_V1_GITLAB_RESOLVER
|
||||
|
||||
|
||||
# =================================================
|
||||
# SECTION: Factory to create appriorate Gitlab view
|
||||
# =================================================
|
||||
@@ -41,6 +69,10 @@ class GitlabIssue(ResolverViewInterface):
|
||||
description: str
|
||||
previous_comments: list[Comment]
|
||||
is_mr: bool
|
||||
v1_enabled: bool
|
||||
|
||||
def _get_branch_name(self) -> str | None:
|
||||
return getattr(self, 'branch_name', None)
|
||||
|
||||
async def _load_resolver_context(self):
|
||||
gitlab_service = GitLabServiceImpl(
|
||||
@@ -78,35 +110,158 @@ class GitlabIssue(ResolverViewInterface):
|
||||
return user_instructions, conversation_instructions
|
||||
|
||||
async def _get_user_secrets(self):
|
||||
secrets_store = SaasSecretsStore(
|
||||
self.user_info.keycloak_user_id, session_maker, get_config()
|
||||
)
|
||||
secrets_store = SaasSecretsStore(self.user_info.keycloak_user_id, get_config())
|
||||
user_secrets = await secrets_store.load()
|
||||
|
||||
return user_secrets.custom_secrets if user_secrets else None
|
||||
|
||||
async def initialize_new_conversation(self) -> ConversationMetadata:
|
||||
# v1_enabled is already set at construction time in the factory method
|
||||
# This is the source of truth for the conversation type
|
||||
if self.v1_enabled:
|
||||
# Create dummy conversation metadata
|
||||
# Don't save to conversation store
|
||||
# V1 conversations are stored in a separate table
|
||||
self.conversation_id = uuid4().hex
|
||||
return ConversationMetadata(
|
||||
conversation_id=self.conversation_id,
|
||||
selected_repository=self.full_repo_name,
|
||||
)
|
||||
|
||||
conversation_metadata: ConversationMetadata = await initialize_conversation( # type: ignore[assignment]
|
||||
user_id=self.user_info.keycloak_user_id,
|
||||
conversation_id=None,
|
||||
selected_repository=self.full_repo_name,
|
||||
selected_branch=self._get_branch_name(),
|
||||
conversation_trigger=ConversationTrigger.RESOLVER,
|
||||
git_provider=ProviderType.GITLAB,
|
||||
)
|
||||
|
||||
self.conversation_id = conversation_metadata.conversation_id
|
||||
return conversation_metadata
|
||||
|
||||
async def create_new_conversation(
|
||||
self, jinja_env: Environment, git_provider_tokens: PROVIDER_TOKEN_TYPE
|
||||
self,
|
||||
jinja_env: Environment,
|
||||
git_provider_tokens: PROVIDER_TOKEN_TYPE,
|
||||
conversation_metadata: ConversationMetadata,
|
||||
saas_user_auth: UserAuth,
|
||||
):
|
||||
# v1_enabled is already set at construction time in the factory method
|
||||
if self.v1_enabled:
|
||||
# Use V1 app conversation service
|
||||
await self._create_v1_conversation(
|
||||
jinja_env, saas_user_auth, conversation_metadata
|
||||
)
|
||||
else:
|
||||
await self._create_v0_conversation(
|
||||
jinja_env, git_provider_tokens, conversation_metadata
|
||||
)
|
||||
|
||||
async def _create_v0_conversation(
|
||||
self,
|
||||
jinja_env: Environment,
|
||||
git_provider_tokens: PROVIDER_TOKEN_TYPE,
|
||||
conversation_metadata: ConversationMetadata,
|
||||
):
|
||||
"""Create conversation using the legacy V0 system."""
|
||||
logger.info('[GitLab]: Creating V0 conversation')
|
||||
custom_secrets = await self._get_user_secrets()
|
||||
|
||||
user_instructions, conversation_instructions = await self._get_instructions(
|
||||
jinja_env
|
||||
)
|
||||
agent_loop_info = await create_new_conversation(
|
||||
|
||||
await start_conversation(
|
||||
user_id=self.user_info.keycloak_user_id,
|
||||
git_provider_tokens=git_provider_tokens,
|
||||
custom_secrets=custom_secrets,
|
||||
selected_repository=self.full_repo_name,
|
||||
selected_branch=None,
|
||||
initial_user_msg=user_instructions,
|
||||
conversation_instructions=conversation_instructions,
|
||||
image_urls=None,
|
||||
conversation_trigger=ConversationTrigger.RESOLVER,
|
||||
replay_json=None,
|
||||
conversation_id=conversation_metadata.conversation_id,
|
||||
conversation_metadata=conversation_metadata,
|
||||
conversation_instructions=conversation_instructions,
|
||||
)
|
||||
|
||||
async def _create_v1_conversation(
|
||||
self,
|
||||
jinja_env: Environment,
|
||||
saas_user_auth: UserAuth,
|
||||
conversation_metadata: ConversationMetadata,
|
||||
):
|
||||
"""Create conversation using the new V1 app conversation system."""
|
||||
logger.info('[GitLab V1]: Creating V1 conversation')
|
||||
|
||||
user_instructions, conversation_instructions = await self._get_instructions(
|
||||
jinja_env
|
||||
)
|
||||
|
||||
# Create the initial message request
|
||||
initial_message = SendMessageRequest(
|
||||
role='user', content=[TextContent(text=user_instructions)]
|
||||
)
|
||||
|
||||
# Create the GitLab V1 callback processor
|
||||
gitlab_callback_processor = self._create_gitlab_v1_callback_processor()
|
||||
|
||||
# Get the app conversation service and start the conversation
|
||||
injector_state = InjectorState()
|
||||
|
||||
# Determine the title based on whether it's an MR or issue
|
||||
title_prefix = 'GitLab MR' if self.is_mr else 'GitLab Issue'
|
||||
title = f'{title_prefix} #{self.issue_number}: {self.title}'
|
||||
|
||||
# Create the V1 conversation start request with the callback processor
|
||||
start_request = AppConversationStartRequest(
|
||||
conversation_id=UUID(conversation_metadata.conversation_id),
|
||||
system_message_suffix=conversation_instructions,
|
||||
initial_message=initial_message,
|
||||
selected_repository=self.full_repo_name,
|
||||
selected_branch=self._get_branch_name(),
|
||||
git_provider=ProviderType.GITLAB,
|
||||
title=title,
|
||||
trigger=ConversationTrigger.RESOLVER,
|
||||
processors=[
|
||||
gitlab_callback_processor
|
||||
], # Pass the callback processor directly
|
||||
)
|
||||
|
||||
# Set up the GitLab user context for the V1 system
|
||||
gitlab_user_context = ResolverUserContext(saas_user_auth=saas_user_auth)
|
||||
setattr(injector_state, USER_CONTEXT_ATTR, gitlab_user_context)
|
||||
|
||||
async with get_app_conversation_service(
|
||||
injector_state
|
||||
) as app_conversation_service:
|
||||
async for task in app_conversation_service.start_app_conversation(
|
||||
start_request
|
||||
):
|
||||
if task.status == AppConversationStartTaskStatus.ERROR:
|
||||
logger.error(f'Failed to start V1 conversation: {task.detail}')
|
||||
raise RuntimeError(
|
||||
f'Failed to start V1 conversation: {task.detail}'
|
||||
)
|
||||
|
||||
def _create_gitlab_v1_callback_processor(self):
|
||||
"""Create a V1 callback processor for GitLab integration."""
|
||||
from integrations.gitlab.gitlab_v1_callback_processor import (
|
||||
GitlabV1CallbackProcessor,
|
||||
)
|
||||
|
||||
# Create and return the GitLab V1 callback processor
|
||||
return GitlabV1CallbackProcessor(
|
||||
gitlab_view_data={
|
||||
'issue_number': self.issue_number,
|
||||
'project_id': self.project_id,
|
||||
'full_repo_name': self.full_repo_name,
|
||||
'installation_id': self.installation_id,
|
||||
'keycloak_user_id': self.user_info.keycloak_user_id,
|
||||
'is_mr': self.is_mr,
|
||||
'discussion_id': getattr(self, 'discussion_id', None),
|
||||
},
|
||||
send_summary_instruction=self.send_summary_instruction,
|
||||
)
|
||||
self.conversation_id = agent_loop_info.conversation_id
|
||||
return self.conversation_id
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -141,6 +296,9 @@ class GitlabIssueComment(GitlabIssue):
|
||||
class GitlabMRComment(GitlabIssueComment):
|
||||
branch_name: str
|
||||
|
||||
def _get_branch_name(self) -> str | None:
|
||||
return self.branch_name
|
||||
|
||||
async def _get_instructions(self, jinja_env: Environment) -> tuple[str, str]:
|
||||
user_instructions_template = jinja_env.get_template('mr_update_prompt.j2')
|
||||
await self._load_resolver_context()
|
||||
@@ -162,29 +320,6 @@ class GitlabMRComment(GitlabIssueComment):
|
||||
|
||||
return user_instructions, conversation_instructions
|
||||
|
||||
async def create_new_conversation(
|
||||
self, jinja_env: Environment, git_provider_tokens: PROVIDER_TOKEN_TYPE
|
||||
):
|
||||
custom_secrets = await self._get_user_secrets()
|
||||
|
||||
user_instructions, conversation_instructions = await self._get_instructions(
|
||||
jinja_env
|
||||
)
|
||||
agent_loop_info = await create_new_conversation(
|
||||
user_id=self.user_info.keycloak_user_id,
|
||||
git_provider_tokens=git_provider_tokens,
|
||||
custom_secrets=custom_secrets,
|
||||
selected_repository=self.full_repo_name,
|
||||
selected_branch=self.branch_name,
|
||||
initial_user_msg=user_instructions,
|
||||
conversation_instructions=conversation_instructions,
|
||||
image_urls=None,
|
||||
conversation_trigger=ConversationTrigger.RESOLVER,
|
||||
replay_json=None,
|
||||
)
|
||||
self.conversation_id = agent_loop_info.conversation_id
|
||||
return self.conversation_id
|
||||
|
||||
|
||||
@dataclass
|
||||
class GitlabInlineMRComment(GitlabMRComment):
|
||||
@@ -306,7 +441,7 @@ class GitlabFactory:
|
||||
@staticmethod
|
||||
async def create_gitlab_view_from_payload(
|
||||
message: Message, token_manager: TokenManager
|
||||
) -> ResolverViewInterface:
|
||||
) -> GitlabViewType:
|
||||
payload = message.message['payload']
|
||||
installation_id = message.message['installation_id']
|
||||
user = payload['user']
|
||||
@@ -325,6 +460,16 @@ class GitlabFactory:
|
||||
user_id=user_id, username=username, keycloak_user_id=keycloak_user_id
|
||||
)
|
||||
|
||||
# Check v1_enabled at construction time - this is the source of truth
|
||||
v1_enabled = (
|
||||
await is_v1_enabled_for_gitlab_resolver(keycloak_user_id)
|
||||
if keycloak_user_id
|
||||
else False
|
||||
)
|
||||
logger.info(
|
||||
f'[GitLab V1]: User flag found for {keycloak_user_id} is {v1_enabled}'
|
||||
)
|
||||
|
||||
if GitlabFactory.is_labeled_issue(message):
|
||||
issue_iid = payload['object_attributes']['iid']
|
||||
|
||||
@@ -346,6 +491,7 @@ class GitlabFactory:
|
||||
description='',
|
||||
previous_comments=[],
|
||||
is_mr=False,
|
||||
v1_enabled=v1_enabled,
|
||||
)
|
||||
|
||||
elif GitlabFactory.is_issue_comment(message):
|
||||
@@ -376,6 +522,7 @@ class GitlabFactory:
|
||||
description='',
|
||||
previous_comments=[],
|
||||
is_mr=False,
|
||||
v1_enabled=v1_enabled,
|
||||
)
|
||||
|
||||
elif GitlabFactory.is_mr_comment(message):
|
||||
@@ -408,6 +555,7 @@ class GitlabFactory:
|
||||
description='',
|
||||
previous_comments=[],
|
||||
is_mr=True,
|
||||
v1_enabled=v1_enabled,
|
||||
)
|
||||
|
||||
elif GitlabFactory.is_mr_comment(message, inline=True):
|
||||
@@ -448,4 +596,7 @@ class GitlabFactory:
|
||||
description='',
|
||||
previous_comments=[],
|
||||
is_mr=True,
|
||||
v1_enabled=v1_enabled,
|
||||
)
|
||||
|
||||
raise ValueError(f'Unhandled GitLab webhook event: {message}')
|
||||
|
||||
@@ -4,7 +4,9 @@ This module contains reusable functions and classes for installing GitLab webhoo
|
||||
that can be used by both the cron job and API routes.
|
||||
"""
|
||||
|
||||
from typing import cast
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
from uuid import uuid4
|
||||
|
||||
from integrations.types import GitLabResourceType
|
||||
@@ -13,7 +15,9 @@ from storage.gitlab_webhook import GitlabWebhook, WebhookStatus
|
||||
from storage.gitlab_webhook_store import GitlabWebhookStore
|
||||
|
||||
from openhands.core.logger import openhands_logger as logger
|
||||
from openhands.integrations.service_types import GitService
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from integrations.gitlab.gitlab_service import SaaSGitLabService
|
||||
|
||||
# Webhook configuration constants
|
||||
WEBHOOK_NAME = 'OpenHands Resolver'
|
||||
@@ -35,7 +39,7 @@ class BreakLoopException(Exception):
|
||||
|
||||
|
||||
async def verify_webhook_conditions(
|
||||
gitlab_service: type[GitService],
|
||||
gitlab_service: SaaSGitLabService,
|
||||
resource_type: GitLabResourceType,
|
||||
resource_id: str,
|
||||
webhook_store: GitlabWebhookStore,
|
||||
@@ -52,10 +56,6 @@ async def verify_webhook_conditions(
|
||||
webhook_store: Webhook store instance
|
||||
webhook: Webhook object to verify
|
||||
"""
|
||||
from integrations.gitlab.gitlab_service import SaaSGitLabService
|
||||
|
||||
gitlab_service = cast(type[SaaSGitLabService], gitlab_service)
|
||||
|
||||
# Check if resource exists
|
||||
does_resource_exist, status = await gitlab_service.check_resource_exists(
|
||||
resource_type, resource_id
|
||||
@@ -106,7 +106,9 @@ async def verify_webhook_conditions(
|
||||
does_webhook_exist_on_resource,
|
||||
status,
|
||||
) = await gitlab_service.check_webhook_exists_on_resource(
|
||||
resource_type, resource_id, GITLAB_WEBHOOK_URL
|
||||
resource_type=resource_type,
|
||||
resource_id=resource_id,
|
||||
webhook_url=GITLAB_WEBHOOK_URL,
|
||||
)
|
||||
|
||||
logger.info(
|
||||
@@ -131,7 +133,7 @@ async def verify_webhook_conditions(
|
||||
|
||||
|
||||
async def install_webhook_on_resource(
|
||||
gitlab_service: type[GitService],
|
||||
gitlab_service: SaaSGitLabService,
|
||||
resource_type: GitLabResourceType,
|
||||
resource_id: str,
|
||||
webhook_store: GitlabWebhookStore,
|
||||
@@ -150,10 +152,6 @@ async def install_webhook_on_resource(
|
||||
Returns:
|
||||
Tuple of (webhook_id, status)
|
||||
"""
|
||||
from integrations.gitlab.gitlab_service import SaaSGitLabService
|
||||
|
||||
gitlab_service = cast(type[SaaSGitLabService], gitlab_service)
|
||||
|
||||
webhook_secret = f'{webhook.user_id}-{str(uuid4())}'
|
||||
webhook_uuid = f'{str(uuid4())}'
|
||||
|
||||
|
||||
@@ -57,7 +57,7 @@ JIRA_CLOUD_API_URL = 'https://api.atlassian.com/ex/jira'
|
||||
OH_LABEL, INLINE_OH_LABEL = get_oh_labels(HOST)
|
||||
|
||||
|
||||
class JiraManager(Manager):
|
||||
class JiraManager(Manager[JiraViewInterface]):
|
||||
"""Manager for processing Jira webhook events.
|
||||
|
||||
This class orchestrates the flow from webhook receipt to conversation creation,
|
||||
@@ -257,7 +257,7 @@ class JiraManager(Manager):
|
||||
|
||||
return jira_user, saas_user_auth
|
||||
|
||||
async def start_job(self, view: JiraViewInterface):
|
||||
async def start_job(self, view: JiraViewInterface) -> None:
|
||||
"""Start a Jira job/conversation."""
|
||||
# Import here to prevent circular import
|
||||
from server.conversation_callback_processor.jira_callback_processor import (
|
||||
@@ -341,17 +341,25 @@ class JiraManager(Manager):
|
||||
|
||||
async def send_message(
|
||||
self,
|
||||
message: Message,
|
||||
message: str,
|
||||
issue_key: str,
|
||||
jira_cloud_id: str,
|
||||
svc_acc_email: str,
|
||||
svc_acc_api_key: str,
|
||||
):
|
||||
"""Send a comment to a Jira issue."""
|
||||
"""Send a comment to a Jira issue.
|
||||
|
||||
Args:
|
||||
message: The message content to send (plain text string)
|
||||
issue_key: The Jira issue key (e.g., 'PROJ-123')
|
||||
jira_cloud_id: The Jira Cloud ID
|
||||
svc_acc_email: Service account email for authentication
|
||||
svc_acc_api_key: Service account API key for authentication
|
||||
"""
|
||||
url = (
|
||||
f'{JIRA_CLOUD_API_URL}/{jira_cloud_id}/rest/api/2/issue/{issue_key}/comment'
|
||||
)
|
||||
data = {'body': message.message}
|
||||
data = {'body': message}
|
||||
async with httpx.AsyncClient(verify=httpx_verify_option()) as client:
|
||||
response = await client.post(
|
||||
url, auth=(svc_acc_email, svc_acc_api_key), json=data
|
||||
@@ -366,7 +374,7 @@ class JiraManager(Manager):
|
||||
view.jira_workspace.svc_acc_api_key
|
||||
)
|
||||
await self.send_message(
|
||||
self.create_outgoing_message(msg=msg),
|
||||
msg,
|
||||
issue_key=view.payload.issue_key,
|
||||
jira_cloud_id=view.jira_workspace.jira_cloud_id,
|
||||
svc_acc_email=view.jira_workspace.svc_acc_email,
|
||||
@@ -388,7 +396,7 @@ class JiraManager(Manager):
|
||||
try:
|
||||
api_key = self.token_manager.decrypt_text(workspace.svc_acc_api_key)
|
||||
await self.send_message(
|
||||
self.create_outgoing_message(msg=error_msg),
|
||||
error_msg,
|
||||
issue_key=payload.issue_key,
|
||||
jira_cloud_id=workspace.jira_cloud_id,
|
||||
svc_acc_email=workspace.svc_acc_email,
|
||||
|
||||
@@ -212,8 +212,6 @@ class JiraPayloadParser:
|
||||
missing.append('issue.id')
|
||||
if not issue_key:
|
||||
missing.append('issue.key')
|
||||
if not user_email:
|
||||
missing.append('user.emailAddress')
|
||||
if not display_name:
|
||||
missing.append('user.displayName')
|
||||
if not account_id:
|
||||
|
||||
@@ -42,7 +42,7 @@ from openhands.server.user_auth.user_auth import UserAuth
|
||||
from openhands.utils.http_session import httpx_verify_option
|
||||
|
||||
|
||||
class JiraDcManager(Manager):
|
||||
class JiraDcManager(Manager[JiraDcViewInterface]):
|
||||
def __init__(self, token_manager: TokenManager):
|
||||
self.token_manager = token_manager
|
||||
self.integration_store = JiraDcIntegrationStore.get_instance()
|
||||
@@ -353,7 +353,7 @@ class JiraDcManager(Manager):
|
||||
logger.error(f'[Jira DC] Error in is_job_requested: {str(e)}')
|
||||
return False
|
||||
|
||||
async def start_job(self, jira_dc_view: JiraDcViewInterface):
|
||||
async def start_job(self, jira_dc_view: JiraDcViewInterface) -> None:
|
||||
"""Start a Jira DC job/conversation."""
|
||||
# Import here to prevent circular import
|
||||
from server.conversation_callback_processor.jira_dc_callback_processor import (
|
||||
@@ -418,7 +418,7 @@ class JiraDcManager(Manager):
|
||||
jira_dc_view.jira_dc_workspace.svc_acc_api_key
|
||||
)
|
||||
await self.send_message(
|
||||
self.create_outgoing_message(msg=msg_info),
|
||||
msg_info,
|
||||
issue_key=jira_dc_view.job_context.issue_key,
|
||||
base_api_url=jira_dc_view.job_context.base_api_url,
|
||||
svc_acc_api_key=api_key,
|
||||
@@ -456,12 +456,19 @@ class JiraDcManager(Manager):
|
||||
return title, description
|
||||
|
||||
async def send_message(
|
||||
self, message: Message, issue_key: str, base_api_url: str, svc_acc_api_key: str
|
||||
self, message: str, issue_key: str, base_api_url: str, svc_acc_api_key: str
|
||||
):
|
||||
"""Send message/comment to Jira DC issue."""
|
||||
"""Send message/comment to Jira DC issue.
|
||||
|
||||
Args:
|
||||
message: The message content to send (plain text string)
|
||||
issue_key: The Jira issue key (e.g., 'PROJ-123')
|
||||
base_api_url: The base API URL for the Jira DC instance
|
||||
svc_acc_api_key: Service account API key for authentication
|
||||
"""
|
||||
url = f'{base_api_url}/rest/api/2/issue/{issue_key}/comment'
|
||||
headers = {'Authorization': f'Bearer {svc_acc_api_key}'}
|
||||
data = {'body': message.message}
|
||||
data = {'body': message}
|
||||
async with httpx.AsyncClient(verify=httpx_verify_option()) as client:
|
||||
response = await client.post(url, headers=headers, json=data)
|
||||
response.raise_for_status()
|
||||
@@ -481,7 +488,7 @@ class JiraDcManager(Manager):
|
||||
try:
|
||||
api_key = self.token_manager.decrypt_text(workspace.svc_acc_api_key)
|
||||
await self.send_message(
|
||||
self.create_outgoing_message(msg=error_msg),
|
||||
error_msg,
|
||||
issue_key=job_context.issue_key,
|
||||
base_api_url=job_context.base_api_url,
|
||||
svc_acc_api_key=api_key,
|
||||
@@ -502,7 +509,7 @@ class JiraDcManager(Manager):
|
||||
)
|
||||
|
||||
await self.send_message(
|
||||
self.create_outgoing_message(msg=comment_msg),
|
||||
comment_msg,
|
||||
issue_key=jira_dc_view.job_context.issue_key,
|
||||
base_api_url=jira_dc_view.job_context.base_api_url,
|
||||
svc_acc_api_key=api_key,
|
||||
|
||||
@@ -19,7 +19,7 @@ class JiraDcViewInterface(ABC):
|
||||
conversation_id: str
|
||||
|
||||
@abstractmethod
|
||||
def _get_instructions(self, jinja_env: Environment) -> tuple[str, str]:
|
||||
async def _get_instructions(self, jinja_env: Environment) -> tuple[str, str]:
|
||||
"""Get initial instructions for the conversation."""
|
||||
pass
|
||||
|
||||
|
||||
@@ -36,7 +36,7 @@ class JiraDcNewConversationView(JiraDcViewInterface):
|
||||
selected_repo: str | None
|
||||
conversation_id: str
|
||||
|
||||
def _get_instructions(self, jinja_env: Environment) -> tuple[str, str]:
|
||||
async def _get_instructions(self, jinja_env: Environment) -> tuple[str, str]:
|
||||
"""Instructions passed when conversation is first initialized"""
|
||||
|
||||
instructions_template = jinja_env.get_template('jira_dc_instructions.j2')
|
||||
@@ -61,7 +61,7 @@ class JiraDcNewConversationView(JiraDcViewInterface):
|
||||
|
||||
provider_tokens = await self.saas_user_auth.get_provider_tokens()
|
||||
user_secrets = await self.saas_user_auth.get_secrets()
|
||||
instructions, user_msg = self._get_instructions(jinja_env)
|
||||
instructions, user_msg = await self._get_instructions(jinja_env)
|
||||
|
||||
try:
|
||||
agent_loop_info = await create_new_conversation(
|
||||
@@ -113,7 +113,7 @@ class JiraDcExistingConversationView(JiraDcViewInterface):
|
||||
selected_repo: str | None
|
||||
conversation_id: str
|
||||
|
||||
def _get_instructions(self, jinja_env: Environment) -> tuple[str, str]:
|
||||
async def _get_instructions(self, jinja_env: Environment) -> tuple[str, str]:
|
||||
"""Instructions passed when conversation is first initialized"""
|
||||
|
||||
user_msg_template = jinja_env.get_template('jira_dc_existing_conversation.j2')
|
||||
@@ -155,6 +155,9 @@ class JiraDcExistingConversationView(JiraDcViewInterface):
|
||||
self.conversation_id, conversation_init_data, user_id
|
||||
)
|
||||
|
||||
if agent_loop_info.event_store is None:
|
||||
raise StartingConvoException('Event store not available')
|
||||
|
||||
final_agent_observation = get_final_agent_observation(
|
||||
agent_loop_info.event_store
|
||||
)
|
||||
@@ -167,7 +170,7 @@ class JiraDcExistingConversationView(JiraDcViewInterface):
|
||||
if not agent_state or agent_state == AgentState.LOADING:
|
||||
raise StartingConvoException('Conversation is still starting')
|
||||
|
||||
_, user_msg = self._get_instructions(jinja_env)
|
||||
_, user_msg = await self._get_instructions(jinja_env)
|
||||
user_message_event = MessageAction(content=user_msg)
|
||||
await conversation_manager.send_event_to_conversation(
|
||||
self.conversation_id, event_to_dict(user_message_event)
|
||||
|
||||
@@ -39,7 +39,7 @@ from openhands.server.user_auth.user_auth import UserAuth
|
||||
from openhands.utils.http_session import httpx_verify_option
|
||||
|
||||
|
||||
class LinearManager(Manager):
|
||||
class LinearManager(Manager[LinearViewInterface]):
|
||||
def __init__(self, token_manager: TokenManager):
|
||||
self.token_manager = token_manager
|
||||
self.integration_store = LinearIntegrationStore.get_instance()
|
||||
@@ -343,7 +343,7 @@ class LinearManager(Manager):
|
||||
logger.error(f'[Linear] Error in is_job_requested: {str(e)}')
|
||||
return False
|
||||
|
||||
async def start_job(self, linear_view: LinearViewInterface):
|
||||
async def start_job(self, linear_view: LinearViewInterface) -> None:
|
||||
"""Start a Linear job/conversation."""
|
||||
# Import here to prevent circular import
|
||||
from server.conversation_callback_processor.linear_callback_processor import (
|
||||
@@ -408,7 +408,7 @@ class LinearManager(Manager):
|
||||
linear_view.linear_workspace.svc_acc_api_key
|
||||
)
|
||||
await self.send_message(
|
||||
self.create_outgoing_message(msg=msg_info),
|
||||
msg_info,
|
||||
linear_view.job_context.issue_id,
|
||||
api_key,
|
||||
)
|
||||
@@ -473,8 +473,14 @@ class LinearManager(Manager):
|
||||
|
||||
return title, description
|
||||
|
||||
async def send_message(self, message: Message, issue_id: str, api_key: str):
|
||||
"""Send message/comment to Linear issue."""
|
||||
async def send_message(self, message: str, issue_id: str, api_key: str):
|
||||
"""Send message/comment to Linear issue.
|
||||
|
||||
Args:
|
||||
message: The message content to send (plain text string)
|
||||
issue_id: The Linear issue ID to comment on
|
||||
api_key: The Linear API key for authentication
|
||||
"""
|
||||
query = """
|
||||
mutation CommentCreate($input: CommentCreateInput!) {
|
||||
commentCreate(input: $input) {
|
||||
@@ -485,7 +491,7 @@ class LinearManager(Manager):
|
||||
}
|
||||
}
|
||||
"""
|
||||
variables = {'input': {'issueId': issue_id, 'body': message.message}}
|
||||
variables = {'input': {'issueId': issue_id, 'body': message}}
|
||||
return await self._query_api(query, variables, api_key)
|
||||
|
||||
async def _send_error_comment(
|
||||
@@ -498,9 +504,7 @@ class LinearManager(Manager):
|
||||
|
||||
try:
|
||||
api_key = self.token_manager.decrypt_text(workspace.svc_acc_api_key)
|
||||
await self.send_message(
|
||||
self.create_outgoing_message(msg=error_msg), issue_id, api_key
|
||||
)
|
||||
await self.send_message(error_msg, issue_id, api_key)
|
||||
except Exception as e:
|
||||
logger.error(f'[Linear] Failed to send error comment: {str(e)}')
|
||||
|
||||
@@ -517,7 +521,7 @@ class LinearManager(Manager):
|
||||
)
|
||||
|
||||
await self.send_message(
|
||||
self.create_outgoing_message(msg=comment_msg),
|
||||
comment_msg,
|
||||
linear_view.job_context.issue_id,
|
||||
api_key,
|
||||
)
|
||||
|
||||
@@ -19,7 +19,7 @@ class LinearViewInterface(ABC):
|
||||
conversation_id: str
|
||||
|
||||
@abstractmethod
|
||||
def _get_instructions(self, jinja_env: Environment) -> tuple[str, str]:
|
||||
async def _get_instructions(self, jinja_env: Environment) -> tuple[str, str]:
|
||||
"""Get initial instructions for the conversation."""
|
||||
pass
|
||||
|
||||
|
||||
@@ -33,7 +33,7 @@ class LinearNewConversationView(LinearViewInterface):
|
||||
selected_repo: str | None
|
||||
conversation_id: str
|
||||
|
||||
def _get_instructions(self, jinja_env: Environment) -> tuple[str, str]:
|
||||
async def _get_instructions(self, jinja_env: Environment) -> tuple[str, str]:
|
||||
"""Instructions passed when conversation is first initialized"""
|
||||
|
||||
instructions_template = jinja_env.get_template('linear_instructions.j2')
|
||||
@@ -58,7 +58,7 @@ class LinearNewConversationView(LinearViewInterface):
|
||||
|
||||
provider_tokens = await self.saas_user_auth.get_provider_tokens()
|
||||
user_secrets = await self.saas_user_auth.get_secrets()
|
||||
instructions, user_msg = self._get_instructions(jinja_env)
|
||||
instructions, user_msg = await self._get_instructions(jinja_env)
|
||||
|
||||
try:
|
||||
agent_loop_info = await create_new_conversation(
|
||||
@@ -110,7 +110,7 @@ class LinearExistingConversationView(LinearViewInterface):
|
||||
selected_repo: str | None
|
||||
conversation_id: str
|
||||
|
||||
def _get_instructions(self, jinja_env: Environment) -> tuple[str, str]:
|
||||
async def _get_instructions(self, jinja_env: Environment) -> tuple[str, str]:
|
||||
"""Instructions passed when conversation is first initialized"""
|
||||
|
||||
user_msg_template = jinja_env.get_template('linear_existing_conversation.j2')
|
||||
@@ -152,6 +152,9 @@ class LinearExistingConversationView(LinearViewInterface):
|
||||
self.conversation_id, conversation_init_data, user_id
|
||||
)
|
||||
|
||||
if agent_loop_info.event_store is None:
|
||||
raise StartingConvoException('Event store not available')
|
||||
|
||||
final_agent_observation = get_final_agent_observation(
|
||||
agent_loop_info.event_store
|
||||
)
|
||||
@@ -164,7 +167,7 @@ class LinearExistingConversationView(LinearViewInterface):
|
||||
if not agent_state or agent_state == AgentState.LOADING:
|
||||
raise StartingConvoException('Conversation is still starting')
|
||||
|
||||
_, user_msg = self._get_instructions(jinja_env)
|
||||
_, user_msg = await self._get_instructions(jinja_env)
|
||||
user_message_event = MessageAction(content=user_msg)
|
||||
await conversation_manager.send_event_to_conversation(
|
||||
self.conversation_id, event_to_dict(user_message_event)
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Any, Generic, TypeVar
|
||||
|
||||
from integrations.models import Message, SourceType
|
||||
|
||||
# TypeVar for view types - each manager subclass specifies its own view type
|
||||
ViewT = TypeVar('ViewT')
|
||||
|
||||
class Manager(ABC):
|
||||
|
||||
class Manager(ABC, Generic[ViewT]):
|
||||
manager_type: SourceType
|
||||
|
||||
@abstractmethod
|
||||
@@ -12,14 +16,21 @@ class Manager(ABC):
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
def send_message(self, message: Message):
|
||||
"Send message to integration from Openhands server"
|
||||
def send_message(self, message: str, *args: Any, **kwargs: Any):
|
||||
"""Send message to integration from OpenHands server.
|
||||
|
||||
Args:
|
||||
message: The message content to send (plain text string).
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
def start_job(self):
|
||||
"Kick off a job with openhands agent"
|
||||
raise NotImplementedError
|
||||
async def start_job(self, view: ViewT) -> None:
|
||||
"""Kick off a job with openhands agent.
|
||||
|
||||
def create_outgoing_message(self, msg: str | dict, ephemeral: bool = False):
|
||||
return Message(source=SourceType.OPENHANDS, message=msg, ephemeral=ephemeral)
|
||||
Args:
|
||||
view: Integration-specific view object containing job context.
|
||||
Each manager subclass accepts its own view type
|
||||
(e.g., SlackViewInterface, JiraViewInterface, etc.)
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
from enum import Enum
|
||||
from typing import Any
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
@@ -16,8 +17,16 @@ class SourceType(str, Enum):
|
||||
|
||||
|
||||
class Message(BaseModel):
|
||||
"""Message model for incoming webhook payloads from integrations.
|
||||
|
||||
Note: This model is intended for INCOMING messages only.
|
||||
For outgoing messages (e.g., sending comments to GitHub/GitLab),
|
||||
pass strings directly to the send_message methods instead of
|
||||
wrapping them in a Message object.
|
||||
"""
|
||||
|
||||
source: SourceType
|
||||
message: str | dict
|
||||
message: dict[str, Any]
|
||||
ephemeral: bool = False
|
||||
|
||||
|
||||
|
||||
@@ -1,9 +1,14 @@
|
||||
import re
|
||||
from typing import Any
|
||||
|
||||
import jwt
|
||||
from integrations.manager import Manager
|
||||
from integrations.models import Message, SourceType
|
||||
from integrations.slack.slack_types import SlackViewInterface, StartingConvoException
|
||||
from integrations.slack.slack_types import (
|
||||
SlackMessageView,
|
||||
SlackViewInterface,
|
||||
StartingConvoException,
|
||||
)
|
||||
from integrations.slack.slack_view import (
|
||||
SlackFactory,
|
||||
SlackNewConversationFromRepoFormView,
|
||||
@@ -22,12 +27,13 @@ from server.constants import SLACK_CLIENT_ID
|
||||
from server.utils.conversation_callback_utils import register_callback_processor
|
||||
from slack_sdk.oauth import AuthorizeUrlGenerator
|
||||
from slack_sdk.web.async_client import AsyncWebClient
|
||||
from storage.database import session_maker
|
||||
from sqlalchemy import select
|
||||
from storage.database import a_session_maker
|
||||
from storage.slack_user import SlackUser
|
||||
|
||||
from openhands.core.logger import openhands_logger as logger
|
||||
from openhands.integrations.provider import ProviderHandler
|
||||
from openhands.integrations.service_types import Repository
|
||||
from openhands.integrations.service_types import ProviderTimeoutError, Repository
|
||||
from openhands.server.shared import config, server_config
|
||||
from openhands.server.types import (
|
||||
LLMAuthenticationError,
|
||||
@@ -43,7 +49,7 @@ authorize_url_generator = AuthorizeUrlGenerator(
|
||||
)
|
||||
|
||||
|
||||
class SlackManager(Manager):
|
||||
class SlackManager(Manager[SlackViewInterface]):
|
||||
def __init__(self, token_manager):
|
||||
self.token_manager = token_manager
|
||||
self.login_link = (
|
||||
@@ -63,12 +69,11 @@ class SlackManager(Manager):
|
||||
) -> tuple[SlackUser | None, UserAuth | None]:
|
||||
# We get the user and correlate them back to a user in OpenHands - if we can
|
||||
slack_user = None
|
||||
with session_maker() as session:
|
||||
slack_user = (
|
||||
session.query(SlackUser)
|
||||
.filter(SlackUser.slack_user_id == slack_user_id)
|
||||
.first()
|
||||
async with a_session_maker() as session:
|
||||
result = await session.execute(
|
||||
select(SlackUser).where(SlackUser.slack_user_id == slack_user_id)
|
||||
)
|
||||
slack_user = result.scalar_one_or_none()
|
||||
|
||||
# slack_view.slack_to_openhands_user = slack_user # attach user auth info to view
|
||||
|
||||
@@ -180,7 +185,7 @@ class SlackManager(Manager):
|
||||
)
|
||||
|
||||
try:
|
||||
slack_view = SlackFactory.create_slack_view_from_payload(
|
||||
slack_view = await SlackFactory.create_slack_view_from_payload(
|
||||
message, slack_user, saas_user_auth
|
||||
)
|
||||
except Exception as e:
|
||||
@@ -202,9 +207,7 @@ class SlackManager(Manager):
|
||||
msg = self.login_link.format(link)
|
||||
|
||||
logger.info('slack_not_yet_authenticated')
|
||||
await self.send_message(
|
||||
self.create_outgoing_message(msg, ephemeral=True), slack_view
|
||||
)
|
||||
await self.send_message(msg, slack_view, ephemeral=True)
|
||||
return
|
||||
|
||||
if not await self.is_job_requested(message, slack_view):
|
||||
@@ -212,27 +215,42 @@ class SlackManager(Manager):
|
||||
|
||||
await self.start_job(slack_view)
|
||||
|
||||
async def send_message(self, message: Message, slack_view: SlackViewInterface):
|
||||
async def send_message(
|
||||
self,
|
||||
message: str | dict[str, Any],
|
||||
slack_view: SlackMessageView,
|
||||
ephemeral: bool = False,
|
||||
):
|
||||
"""Send a message to Slack.
|
||||
|
||||
Args:
|
||||
message: The message content. Can be a string (for simple text) or
|
||||
a dict with 'text' and 'blocks' keys (for structured messages).
|
||||
slack_view: The Slack view object containing channel/thread info.
|
||||
Can be either SlackMessageView (for unauthenticated users)
|
||||
or SlackViewInterface (for authenticated users).
|
||||
ephemeral: If True, send as an ephemeral message visible only to the user.
|
||||
"""
|
||||
client = AsyncWebClient(token=slack_view.bot_access_token)
|
||||
if message.ephemeral and isinstance(message.message, str):
|
||||
if ephemeral and isinstance(message, str):
|
||||
await client.chat_postEphemeral(
|
||||
channel=slack_view.channel_id,
|
||||
markdown_text=message.message,
|
||||
markdown_text=message,
|
||||
user=slack_view.slack_user_id,
|
||||
thread_ts=slack_view.thread_ts,
|
||||
)
|
||||
elif message.ephemeral and isinstance(message.message, dict):
|
||||
elif ephemeral and isinstance(message, dict):
|
||||
await client.chat_postEphemeral(
|
||||
channel=slack_view.channel_id,
|
||||
user=slack_view.slack_user_id,
|
||||
thread_ts=slack_view.thread_ts,
|
||||
text=message.message['text'],
|
||||
blocks=message.message['blocks'],
|
||||
text=message['text'],
|
||||
blocks=message['blocks'],
|
||||
)
|
||||
else:
|
||||
await client.chat_postMessage(
|
||||
channel=slack_view.channel_id,
|
||||
markdown_text=message.message,
|
||||
markdown_text=message,
|
||||
thread_ts=slack_view.message_ts,
|
||||
)
|
||||
|
||||
@@ -251,9 +269,31 @@ class SlackManager(Manager):
|
||||
return True
|
||||
elif isinstance(slack_view, SlackNewConversationView):
|
||||
user = slack_view.slack_to_openhands_user
|
||||
user_repos: list[Repository] = await self._get_repositories(
|
||||
slack_view.saas_user_auth
|
||||
|
||||
# Fetch repositories, handling timeout errors from the provider
|
||||
logger.info(
|
||||
f'[Slack] Fetching repositories for user {user.slack_display_name} (id={slack_view.saas_user_auth.get_user_id()})'
|
||||
)
|
||||
try:
|
||||
user_repos: list[Repository] = await self._get_repositories(
|
||||
slack_view.saas_user_auth
|
||||
)
|
||||
except ProviderTimeoutError:
|
||||
logger.warning(
|
||||
'repo_query_timeout',
|
||||
extra={
|
||||
'slack_user_id': user.slack_user_id,
|
||||
'keycloak_user_id': user.keycloak_user_id,
|
||||
},
|
||||
)
|
||||
timeout_msg = (
|
||||
'The repository selection timed out while fetching your repository list. '
|
||||
'Please re-send your message with a more specific repository name '
|
||||
'(e.g., "owner/repo-name") to help me find it faster.'
|
||||
)
|
||||
await self.send_message(timeout_msg, slack_view, ephemeral=True)
|
||||
return False
|
||||
|
||||
match, repos = self.filter_potential_repos_by_user_msg(
|
||||
slack_view.user_msg, user_repos
|
||||
)
|
||||
@@ -279,16 +319,13 @@ class SlackManager(Manager):
|
||||
repos, slack_view.message_ts, slack_view.thread_ts
|
||||
),
|
||||
}
|
||||
await self.send_message(
|
||||
self.create_outgoing_message(repo_selection_msg, ephemeral=True),
|
||||
slack_view,
|
||||
)
|
||||
await self.send_message(repo_selection_msg, slack_view, ephemeral=True)
|
||||
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def start_job(self, slack_view: SlackViewInterface):
|
||||
async def start_job(self, slack_view: SlackViewInterface) -> None:
|
||||
# Importing here prevents circular import
|
||||
from server.conversation_callback_processor.slack_callback_processor import (
|
||||
SlackCallbackProcessor,
|
||||
@@ -296,7 +333,7 @@ class SlackManager(Manager):
|
||||
|
||||
try:
|
||||
msg_info = None
|
||||
user_info: SlackUser = slack_view.slack_to_openhands_user
|
||||
user_info = slack_view.slack_to_openhands_user
|
||||
try:
|
||||
logger.info(
|
||||
f'[Slack] Starting job for user {user_info.slack_display_name} (id={user_info.slack_user_id})',
|
||||
@@ -368,9 +405,10 @@ class SlackManager(Manager):
|
||||
except StartingConvoException as e:
|
||||
msg_info = str(e)
|
||||
|
||||
await self.send_message(self.create_outgoing_message(msg_info), slack_view)
|
||||
await self.send_message(msg_info, slack_view)
|
||||
|
||||
except Exception:
|
||||
logger.exception('[Slack]: Error starting job')
|
||||
msg = 'Uh oh! There was an unexpected error starting the job :('
|
||||
await self.send_message(self.create_outgoing_message(msg), slack_view)
|
||||
await self.send_message(
|
||||
'Uh oh! There was an unexpected error starting the job :(', slack_view
|
||||
)
|
||||
|
||||
@@ -7,15 +7,31 @@ from storage.slack_user import SlackUser
|
||||
from openhands.server.user_auth.user_auth import UserAuth
|
||||
|
||||
|
||||
class SlackViewInterface(SummaryExtractionTracker, ABC):
|
||||
class SlackMessageView(ABC):
|
||||
"""Minimal interface for sending messages to Slack.
|
||||
|
||||
This base class contains only the fields needed to send messages,
|
||||
without requiring user authentication. Used by both authenticated
|
||||
and unauthenticated Slack views.
|
||||
"""
|
||||
|
||||
bot_access_token: str
|
||||
user_msg: str | None
|
||||
slack_user_id: str
|
||||
slack_to_openhands_user: SlackUser | None
|
||||
saas_user_auth: UserAuth | None
|
||||
channel_id: str
|
||||
message_ts: str
|
||||
thread_ts: str | None
|
||||
|
||||
|
||||
class SlackViewInterface(SlackMessageView, SummaryExtractionTracker, ABC):
|
||||
"""Interface for authenticated Slack views that can create conversations.
|
||||
|
||||
All fields are required (non-None) because this interface is only used
|
||||
for users who have linked their Slack account to OpenHands.
|
||||
"""
|
||||
|
||||
user_msg: str
|
||||
slack_to_openhands_user: SlackUser
|
||||
saas_user_auth: UserAuth
|
||||
selected_repo: str | None
|
||||
should_extract: bool
|
||||
send_summary_instruction: bool
|
||||
@@ -24,7 +40,7 @@ class SlackViewInterface(SummaryExtractionTracker, ABC):
|
||||
v1_enabled: bool
|
||||
|
||||
@abstractmethod
|
||||
def _get_instructions(self, jinja_env: Environment) -> tuple[str, str]:
|
||||
async def _get_instructions(self, jinja_env: Environment) -> tuple[str, str]:
|
||||
"""Instructions passed when conversation is first initialized"""
|
||||
pass
|
||||
|
||||
|
||||
@@ -2,7 +2,8 @@ import logging
|
||||
from uuid import UUID
|
||||
|
||||
import httpx
|
||||
from integrations.utils import CONVERSATION_URL, get_summary_instruction
|
||||
from integrations.utils import get_summary_instruction
|
||||
from integrations.v1_utils import handle_callback_error
|
||||
from pydantic import Field
|
||||
from slack_sdk import WebClient
|
||||
from storage.slack_team_store import SlackTeamStore
|
||||
@@ -39,7 +40,6 @@ class SlackV1CallbackProcessor(EventCallbackProcessor):
|
||||
event: Event,
|
||||
) -> EventCallbackResult | None:
|
||||
"""Process events for Slack V1 integration."""
|
||||
|
||||
# Only handle ConversationStateUpdateEvent
|
||||
if not isinstance(event, ConversationStateUpdateEvent):
|
||||
return None
|
||||
@@ -62,19 +62,14 @@ class SlackV1CallbackProcessor(EventCallbackProcessor):
|
||||
detail=summary,
|
||||
)
|
||||
except Exception as e:
|
||||
_logger.exception('[Slack V1] Error processing callback: %s', e)
|
||||
|
||||
# Only try to post error to Slack if we have basic requirements
|
||||
try:
|
||||
await self._post_summary_to_slack(
|
||||
f'OpenHands encountered an error: **{str(e)}**.\n\n'
|
||||
f'[See the conversation]({CONVERSATION_URL.format(conversation_id)})'
|
||||
'for more information.'
|
||||
)
|
||||
except Exception as post_error:
|
||||
_logger.warning(
|
||||
'[Slack V1] Failed to post error message to Slack: %s', post_error
|
||||
)
|
||||
await handle_callback_error(
|
||||
error=e,
|
||||
conversation_id=conversation_id,
|
||||
service_name='Slack',
|
||||
service_logger=_logger,
|
||||
can_post_error=True, # Slack always attempts to post errors
|
||||
post_error_func=self._post_summary_to_slack,
|
||||
)
|
||||
|
||||
return EventCallbackResult(
|
||||
status=EventCallbackResultStatus.ERROR,
|
||||
@@ -88,17 +83,18 @@ class SlackV1CallbackProcessor(EventCallbackProcessor):
|
||||
# Slack helpers
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
def _get_bot_access_token(self):
|
||||
async def _get_bot_access_token(self) -> str | None:
|
||||
team_id = self.slack_view_data.get('team_id')
|
||||
if team_id is None:
|
||||
return None
|
||||
slack_team_store = SlackTeamStore.get_instance()
|
||||
bot_access_token = slack_team_store.get_team_bot_token(
|
||||
self.slack_view_data['team_id']
|
||||
)
|
||||
bot_access_token = await slack_team_store.get_team_bot_token(team_id)
|
||||
|
||||
return bot_access_token
|
||||
|
||||
async def _post_summary_to_slack(self, summary: str) -> None:
|
||||
"""Post a summary message to the configured Slack channel."""
|
||||
bot_access_token = self._get_bot_access_token()
|
||||
bot_access_token = await self._get_bot_access_token()
|
||||
if not bot_access_token:
|
||||
raise RuntimeError('Missing Slack bot access token')
|
||||
|
||||
@@ -148,8 +144,8 @@ class SlackV1CallbackProcessor(EventCallbackProcessor):
|
||||
send_message_request = AskAgentRequest(question=message_content)
|
||||
|
||||
url = (
|
||||
f'{agent_server_url.rstrip("/")}'
|
||||
f'/api/conversations/{conversation_id}/ask_agent'
|
||||
f"{agent_server_url.rstrip('/')}"
|
||||
f"/api/conversations/{conversation_id}/ask_agent"
|
||||
)
|
||||
headers = {'X-Session-API-Key': session_api_key}
|
||||
payload = send_message_request.model_dump()
|
||||
@@ -211,8 +207,7 @@ class SlackV1CallbackProcessor(EventCallbackProcessor):
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
async def _request_summary(self, conversation_id: UUID) -> str:
|
||||
"""
|
||||
Ask the agent to produce a summary of its work and return the agent response.
|
||||
"""Ask the agent to produce a summary of its work and return the agent response.
|
||||
|
||||
NOTE: This method now returns a string (the agent server's response text)
|
||||
and raises exceptions on errors. The wrapping into EventCallbackResult
|
||||
|
||||
@@ -1,9 +1,14 @@
|
||||
import asyncio
|
||||
from dataclasses import dataclass
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
from integrations.models import Message
|
||||
from integrations.resolver_context import ResolverUserContext
|
||||
from integrations.slack.slack_types import SlackViewInterface, StartingConvoException
|
||||
from integrations.slack.slack_types import (
|
||||
SlackMessageView,
|
||||
SlackViewInterface,
|
||||
StartingConvoException,
|
||||
)
|
||||
from integrations.slack.slack_v1_callback_processor import SlackV1CallbackProcessor
|
||||
from integrations.utils import (
|
||||
CONVERSATION_URL,
|
||||
@@ -42,7 +47,7 @@ from openhands.server.user_auth.user_auth import UserAuth
|
||||
from openhands.storage.data_models.conversation_metadata import (
|
||||
ConversationTrigger,
|
||||
)
|
||||
from openhands.utils.async_utils import GENERAL_TIMEOUT, call_async_from_sync
|
||||
from openhands.utils.async_utils import GENERAL_TIMEOUT
|
||||
|
||||
# =================================================
|
||||
# SECTION: Slack view types
|
||||
@@ -59,36 +64,25 @@ async def is_v1_enabled_for_slack_resolver(user_id: str) -> bool:
|
||||
|
||||
|
||||
@dataclass
|
||||
class SlackUnkownUserView(SlackViewInterface):
|
||||
class SlackUnkownUserView(SlackMessageView):
|
||||
"""View for unauthenticated Slack users who haven't linked their account.
|
||||
|
||||
This view only contains the minimal fields needed to send a login link
|
||||
message back to the user. It does not implement SlackViewInterface
|
||||
because it cannot create conversations without user authentication.
|
||||
"""
|
||||
|
||||
bot_access_token: str
|
||||
user_msg: str | None
|
||||
slack_user_id: str
|
||||
slack_to_openhands_user: SlackUser | None
|
||||
saas_user_auth: UserAuth | None
|
||||
channel_id: str
|
||||
message_ts: str
|
||||
thread_ts: str | None
|
||||
selected_repo: str | None
|
||||
should_extract: bool
|
||||
send_summary_instruction: bool
|
||||
conversation_id: str
|
||||
team_id: str
|
||||
v1_enabled: bool
|
||||
|
||||
def _get_instructions(self, jinja_env: Environment) -> tuple[str, str]:
|
||||
raise NotImplementedError
|
||||
|
||||
async def create_or_update_conversation(self, jinja_env: Environment):
|
||||
raise NotImplementedError
|
||||
|
||||
def get_response_msg(self) -> str:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
@dataclass
|
||||
class SlackNewConversationView(SlackViewInterface):
|
||||
bot_access_token: str
|
||||
user_msg: str | None
|
||||
user_msg: str
|
||||
slack_user_id: str
|
||||
slack_to_openhands_user: SlackUser
|
||||
saas_user_auth: UserAuth
|
||||
@@ -118,7 +112,7 @@ class SlackNewConversationView(SlackViewInterface):
|
||||
return block['user_id']
|
||||
return ''
|
||||
|
||||
def _get_instructions(self, jinja_env: Environment) -> tuple[str, str]:
|
||||
async def _get_instructions(self, jinja_env: Environment) -> tuple[str, str]:
|
||||
"""Instructions passed when conversation is first initialized"""
|
||||
user_info: SlackUser = self.slack_to_openhands_user
|
||||
|
||||
@@ -242,7 +236,9 @@ class SlackNewConversationView(SlackViewInterface):
|
||||
self, jinja: Environment, provider_tokens, user_secrets
|
||||
) -> None:
|
||||
"""Create conversation using the legacy V0 system."""
|
||||
user_instructions, conversation_instructions = self._get_instructions(jinja)
|
||||
user_instructions, conversation_instructions = await self._get_instructions(
|
||||
jinja
|
||||
)
|
||||
|
||||
# Determine git provider from repository
|
||||
git_provider = None
|
||||
@@ -273,7 +269,9 @@ class SlackNewConversationView(SlackViewInterface):
|
||||
|
||||
async def _create_v1_conversation(self, jinja: Environment) -> None:
|
||||
"""Create conversation using the new V1 app conversation system."""
|
||||
user_instructions, conversation_instructions = self._get_instructions(jinja)
|
||||
user_instructions, conversation_instructions = await self._get_instructions(
|
||||
jinja
|
||||
)
|
||||
|
||||
# Create the initial message request
|
||||
initial_message = SendMessageRequest(
|
||||
@@ -346,7 +344,7 @@ class SlackNewConversationFromRepoFormView(SlackNewConversationView):
|
||||
class SlackUpdateExistingConversationView(SlackNewConversationView):
|
||||
slack_conversation: SlackConversation
|
||||
|
||||
def _get_instructions(self, jinja_env: Environment) -> tuple[str, str]:
|
||||
async def _get_instructions(self, jinja_env: Environment) -> tuple[str, str]:
|
||||
client = WebClient(token=self.bot_access_token)
|
||||
result = client.conversations_replies(
|
||||
channel=self.channel_id,
|
||||
@@ -389,6 +387,9 @@ class SlackUpdateExistingConversationView(SlackNewConversationView):
|
||||
self.conversation_id, conversation_init_data, user_id
|
||||
)
|
||||
|
||||
if agent_loop_info.event_store is None:
|
||||
raise StartingConvoException('Event store not available')
|
||||
|
||||
final_agent_observation = get_final_agent_observation(
|
||||
agent_loop_info.event_store
|
||||
)
|
||||
@@ -401,7 +402,7 @@ class SlackUpdateExistingConversationView(SlackNewConversationView):
|
||||
if not agent_state or agent_state == AgentState.LOADING:
|
||||
raise StartingConvoException('Conversation is still starting')
|
||||
|
||||
instructions, _ = self._get_instructions(jinja)
|
||||
instructions, _ = await self._get_instructions(jinja)
|
||||
user_msg = MessageAction(content=instructions)
|
||||
await conversation_manager.send_event_to_conversation(
|
||||
self.conversation_id, event_to_dict(user_msg)
|
||||
@@ -469,7 +470,7 @@ class SlackUpdateExistingConversationView(SlackNewConversationView):
|
||||
agent_server_url = get_agent_server_url_from_sandbox(running_sandbox)
|
||||
|
||||
# 4. Prepare the message content
|
||||
user_msg, _ = self._get_instructions(jinja)
|
||||
user_msg, _ = await self._get_instructions(jinja)
|
||||
|
||||
# 5. Create the message request
|
||||
send_message_request = SendMessageRequest(
|
||||
@@ -545,11 +546,14 @@ class SlackFactory:
|
||||
return None
|
||||
|
||||
# thread_ts in slack payloads in the parent's (root level msg's) message ID
|
||||
if channel_id is None:
|
||||
return None
|
||||
return await slack_conversation_store.get_slack_conversation(
|
||||
channel_id, thread_ts
|
||||
)
|
||||
|
||||
def create_slack_view_from_payload(
|
||||
@staticmethod
|
||||
async def create_slack_view_from_payload(
|
||||
message: Message, slack_user: SlackUser | None, saas_user_auth: UserAuth | None
|
||||
):
|
||||
payload = message.message
|
||||
@@ -560,7 +564,7 @@ class SlackFactory:
|
||||
team_id = payload['team_id']
|
||||
user_msg = payload.get('user_msg')
|
||||
|
||||
bot_access_token = slack_team_store.get_team_bot_token(team_id)
|
||||
bot_access_token = await slack_team_store.get_team_bot_token(team_id)
|
||||
if not bot_access_token:
|
||||
logger.error(
|
||||
'Did not find slack team',
|
||||
@@ -572,28 +576,25 @@ class SlackFactory:
|
||||
raise Exception('Did not find slack team')
|
||||
|
||||
# Determine if this is a known slack user by openhands
|
||||
if not slack_user or not saas_user_auth or not channel_id:
|
||||
if not slack_user or not saas_user_auth or not channel_id or not message_ts:
|
||||
return SlackUnkownUserView(
|
||||
bot_access_token=bot_access_token,
|
||||
user_msg=user_msg,
|
||||
slack_user_id=slack_user_id,
|
||||
slack_to_openhands_user=slack_user,
|
||||
saas_user_auth=saas_user_auth,
|
||||
channel_id=channel_id,
|
||||
message_ts=message_ts,
|
||||
channel_id=channel_id or '',
|
||||
message_ts=message_ts or '',
|
||||
thread_ts=thread_ts,
|
||||
selected_repo=None,
|
||||
should_extract=False,
|
||||
send_summary_instruction=False,
|
||||
conversation_id='',
|
||||
team_id=team_id,
|
||||
v1_enabled=False,
|
||||
)
|
||||
|
||||
conversation: SlackConversation | None = call_async_from_sync(
|
||||
SlackFactory.determine_if_updating_existing_conversation,
|
||||
GENERAL_TIMEOUT,
|
||||
message,
|
||||
# At this point, we've verified slack_user, saas_user_auth, channel_id, and message_ts are set
|
||||
# user_msg should always be present in Slack payloads
|
||||
if not user_msg:
|
||||
raise ValueError('user_msg is required but was not provided in payload')
|
||||
assert channel_id is not None
|
||||
assert message_ts is not None
|
||||
|
||||
conversation = await asyncio.wait_for(
|
||||
SlackFactory.determine_if_updating_existing_conversation(message),
|
||||
timeout=GENERAL_TIMEOUT,
|
||||
)
|
||||
if conversation:
|
||||
logger.info(
|
||||
|
||||
@@ -42,11 +42,11 @@ async def store_repositories_in_db(repos: list[Repository], user_id: str) -> Non
|
||||
try:
|
||||
# Store repositories in the repos table
|
||||
repo_store = RepositoryStore.get_instance(config)
|
||||
repo_store.store_projects(stored_repos)
|
||||
await repo_store.store_projects(stored_repos)
|
||||
|
||||
# Store user-repository mappings in the user-repos table
|
||||
user_repo_store = UserRepositoryMapStore.get_instance(config)
|
||||
user_repo_store.store_user_repo_mappings(user_repos)
|
||||
await user_repo_store.store_user_repo_mappings(user_repos)
|
||||
|
||||
logger.info(f'Saved repos for user {user_id}')
|
||||
except Exception:
|
||||
|
||||
@@ -3,24 +3,20 @@ from uuid import UUID
|
||||
import stripe
|
||||
from server.constants import STRIPE_API_KEY
|
||||
from server.logger import logger
|
||||
from sqlalchemy.orm import Session
|
||||
from storage.database import session_maker
|
||||
from sqlalchemy import select
|
||||
from storage.database import a_session_maker
|
||||
from storage.org import Org
|
||||
from storage.org_store import OrgStore
|
||||
from storage.stripe_customer import StripeCustomer
|
||||
|
||||
from openhands.utils.async_utils import call_sync_from_async
|
||||
|
||||
stripe.api_key = STRIPE_API_KEY
|
||||
|
||||
|
||||
async def find_customer_id_by_org_id(org_id: UUID) -> str | None:
|
||||
with session_maker() as session:
|
||||
stripe_customer = (
|
||||
session.query(StripeCustomer)
|
||||
.filter(StripeCustomer.org_id == org_id)
|
||||
.first()
|
||||
)
|
||||
async with a_session_maker() as session:
|
||||
stmt = select(StripeCustomer).where(StripeCustomer.org_id == org_id)
|
||||
result = await session.execute(stmt)
|
||||
stripe_customer = result.scalar_one_or_none()
|
||||
if stripe_customer:
|
||||
return stripe_customer.stripe_customer_id
|
||||
|
||||
@@ -40,9 +36,7 @@ async def find_customer_id_by_org_id(org_id: UUID) -> str | None:
|
||||
|
||||
async def find_customer_id_by_user_id(user_id: str) -> str | None:
|
||||
# First search our own DB...
|
||||
org = await call_sync_from_async(
|
||||
OrgStore.get_current_org_from_keycloak_user_id, user_id
|
||||
)
|
||||
org = await OrgStore.get_current_org_from_keycloak_user_id(user_id)
|
||||
if not org:
|
||||
logger.warning(f'Org not found for user {user_id}')
|
||||
return None
|
||||
@@ -52,9 +46,7 @@ async def find_customer_id_by_user_id(user_id: str) -> str | None:
|
||||
|
||||
async def find_or_create_customer_by_user_id(user_id: str) -> dict | None:
|
||||
# Get the current org for the user
|
||||
org = await call_sync_from_async(
|
||||
OrgStore.get_current_org_from_keycloak_user_id, user_id
|
||||
)
|
||||
org = await OrgStore.get_current_org_from_keycloak_user_id(user_id)
|
||||
if not org:
|
||||
logger.warning(f'Org not found for user {user_id}')
|
||||
return None
|
||||
@@ -74,7 +66,7 @@ async def find_or_create_customer_by_user_id(user_id: str) -> dict | None:
|
||||
)
|
||||
|
||||
# Save the stripe customer in the local db
|
||||
with session_maker() as session:
|
||||
async with a_session_maker() as session:
|
||||
session.add(
|
||||
StripeCustomer(
|
||||
keycloak_user_id=user_id,
|
||||
@@ -82,7 +74,7 @@ async def find_or_create_customer_by_user_id(user_id: str) -> dict | None:
|
||||
stripe_customer_id=customer.id,
|
||||
)
|
||||
)
|
||||
session.commit()
|
||||
await session.commit()
|
||||
|
||||
logger.info(
|
||||
'created_customer',
|
||||
@@ -108,26 +100,27 @@ async def has_payment_method_by_user_id(user_id: str) -> bool:
|
||||
return bool(payment_methods.data)
|
||||
|
||||
|
||||
async def migrate_customer(session: Session, user_id: str, org: Org):
|
||||
stripe_customer = (
|
||||
session.query(StripeCustomer)
|
||||
.filter(StripeCustomer.keycloak_user_id == user_id)
|
||||
.first()
|
||||
)
|
||||
if stripe_customer is None:
|
||||
return
|
||||
stripe_customer.org_id = org.id
|
||||
customer = await stripe.Customer.modify_async(
|
||||
id=stripe_customer.stripe_customer_id,
|
||||
email=org.contact_email,
|
||||
metadata={'user_id': '', 'org_id': str(org.id)},
|
||||
)
|
||||
async def migrate_customer(user_id: str, org: Org):
|
||||
async with a_session_maker() as session:
|
||||
result = await session.execute(
|
||||
select(StripeCustomer).where(StripeCustomer.keycloak_user_id == user_id)
|
||||
)
|
||||
stripe_customer = result.scalar_one_or_none()
|
||||
if stripe_customer is None:
|
||||
return
|
||||
stripe_customer.org_id = org.id
|
||||
customer = await stripe.Customer.modify_async(
|
||||
id=stripe_customer.stripe_customer_id,
|
||||
email=org.contact_email,
|
||||
metadata={'user_id': '', 'org_id': str(org.id)},
|
||||
)
|
||||
|
||||
logger.info(
|
||||
'migrated_customer',
|
||||
extra={
|
||||
'user_id': user_id,
|
||||
'org_id': str(org.id),
|
||||
'stripe_customer_id': customer.id,
|
||||
},
|
||||
)
|
||||
logger.info(
|
||||
'migrated_customer',
|
||||
extra={
|
||||
'user_id': user_id,
|
||||
'org_id': str(org.id),
|
||||
'stripe_customer_id': customer.id,
|
||||
},
|
||||
)
|
||||
await session.commit()
|
||||
|
||||
@@ -1,9 +1,17 @@
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from jinja2 import Environment
|
||||
from pydantic import BaseModel
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from integrations.models import Message
|
||||
|
||||
from openhands.integrations.provider import PROVIDER_TOKEN_TYPE
|
||||
from openhands.server.user_auth.user_auth import UserAuth
|
||||
from openhands.storage.data_models.conversation_metadata import ConversationMetadata
|
||||
|
||||
|
||||
class GitLabResourceType(Enum):
|
||||
GROUP = 'group'
|
||||
@@ -31,17 +39,41 @@ class SummaryExtractionTracker:
|
||||
|
||||
@dataclass
|
||||
class ResolverViewInterface(SummaryExtractionTracker):
|
||||
installation_id: int
|
||||
# installation_id type varies by provider:
|
||||
# - GitHub: int (GitHub App installation ID)
|
||||
# - GitLab: str (webhook installation ID from our DB)
|
||||
installation_id: int | str
|
||||
user_info: UserData
|
||||
issue_number: int
|
||||
full_repo_name: str
|
||||
is_public_repo: bool
|
||||
raw_payload: dict
|
||||
raw_payload: 'Message'
|
||||
|
||||
def _get_instructions(self, jinja_env: Environment) -> tuple[str, str]:
|
||||
"Instructions passed when conversation is first initialized"
|
||||
async def _get_instructions(self, jinja_env: Environment) -> tuple[str, str]:
|
||||
"""Instructions passed when conversation is first initialized."""
|
||||
raise NotImplementedError()
|
||||
|
||||
async def create_new_conversation(self, jinja_env: Environment, token: str):
|
||||
"Create a new conversation"
|
||||
async def initialize_new_conversation(self) -> 'ConversationMetadata':
|
||||
"""Initialize a new conversation and return metadata.
|
||||
|
||||
For V1 conversations, creates a dummy ConversationMetadata.
|
||||
For V0 conversations, initializes through the conversation store.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
async def create_new_conversation(
|
||||
self,
|
||||
jinja_env: Environment,
|
||||
git_provider_tokens: 'PROVIDER_TOKEN_TYPE',
|
||||
conversation_metadata: 'ConversationMetadata',
|
||||
saas_user_auth: 'UserAuth',
|
||||
) -> None:
|
||||
"""Create a new conversation.
|
||||
|
||||
Args:
|
||||
jinja_env: Jinja2 environment for template rendering
|
||||
git_provider_tokens: Token mapping for git providers
|
||||
conversation_metadata: Metadata for the conversation
|
||||
saas_user_auth: User authentication for SaaS
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@@ -21,7 +21,6 @@ from openhands.events.event_store_abc import EventStoreABC
|
||||
from openhands.events.observation.agent import AgentStateChangedObservation
|
||||
from openhands.integrations.service_types import Repository
|
||||
from openhands.storage.data_models.conversation_status import ConversationStatus
|
||||
from openhands.utils.async_utils import call_sync_from_async
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from openhands.server.conversation_manager.conversation_manager import (
|
||||
@@ -33,7 +32,8 @@ if TYPE_CHECKING:
|
||||
HOST = WEB_HOST
|
||||
# ---- DO NOT REMOVE ----
|
||||
|
||||
HOST_URL = f'https://{HOST}' if 'localhost' not in HOST else f'http://{HOST}'
|
||||
IS_LOCAL_DEPLOYMENT = 'localhost' in HOST
|
||||
HOST_URL = f'https://{HOST}' if not IS_LOCAL_DEPLOYMENT else f'http://{HOST}'
|
||||
GITHUB_WEBHOOK_URL = f'{HOST_URL}/integration/github/events'
|
||||
GITLAB_WEBHOOK_URL = f'{HOST_URL}/integration/gitlab/events'
|
||||
conversation_prefix = 'conversations/{}'
|
||||
@@ -65,6 +65,25 @@ def get_session_expired_message(username: str | None = None) -> str:
|
||||
return f'Your session has expired. Please login again at [OpenHands Cloud]({HOST_URL}) and try again.'
|
||||
|
||||
|
||||
def get_user_not_found_message(username: str | None = None) -> str:
|
||||
"""Get a user-friendly message when a user hasn't created an OpenHands account.
|
||||
|
||||
Used by integrations to notify users when they try to use OpenHands features
|
||||
but haven't logged into OpenHands Cloud yet (no Keycloak account exists).
|
||||
|
||||
Args:
|
||||
username: Optional username to mention in the message. If provided,
|
||||
the message will include @username prefix (used by Git providers
|
||||
like GitHub, GitLab, Slack). If None, returns a generic message.
|
||||
|
||||
Returns:
|
||||
A formatted user not found message
|
||||
"""
|
||||
if username:
|
||||
return f"@{username} it looks like you haven't created an OpenHands account yet. Please sign up at [OpenHands Cloud]({HOST_URL}) and try again."
|
||||
return f"It looks like you haven't created an OpenHands account yet. Please sign up at [OpenHands Cloud]({HOST_URL}) and try again."
|
||||
|
||||
|
||||
# Toggle for solvability report feature
|
||||
ENABLE_SOLVABILITY_ANALYSIS = (
|
||||
os.getenv('ENABLE_SOLVABILITY_ANALYSIS', 'false').lower() == 'true'
|
||||
@@ -79,6 +98,11 @@ ENABLE_V1_SLACK_RESOLVER = (
|
||||
os.getenv('ENABLE_V1_SLACK_RESOLVER', 'false').lower() == 'true'
|
||||
)
|
||||
|
||||
# Toggle for V1 GitLab resolver feature
|
||||
ENABLE_V1_GITLAB_RESOLVER = (
|
||||
os.getenv('ENABLE_V1_GITLAB_RESOLVER', 'false').lower() == 'true'
|
||||
)
|
||||
|
||||
OPENHANDS_RESOLVER_TEMPLATES_DIR = (
|
||||
os.getenv('OPENHANDS_RESOLVER_TEMPLATES_DIR')
|
||||
or 'openhands/integrations/templates/resolver/'
|
||||
@@ -122,9 +146,7 @@ async def get_user_v1_enabled_setting(user_id: str | None) -> bool:
|
||||
if not user_id:
|
||||
return False
|
||||
|
||||
org = await call_sync_from_async(
|
||||
OrgStore.get_current_org_from_keycloak_user_id, user_id
|
||||
)
|
||||
org = await OrgStore.get_current_org_from_keycloak_user_id(user_id)
|
||||
|
||||
if not org or org.v1_enabled is None:
|
||||
return False
|
||||
|
||||
@@ -1,3 +1,8 @@
|
||||
import logging
|
||||
from typing import Callable, Coroutine
|
||||
from uuid import UUID
|
||||
|
||||
from integrations.utils import CONVERSATION_URL
|
||||
from pydantic import SecretStr
|
||||
from server.auth.saas_user_auth import SaasUserAuth
|
||||
from server.auth.token_manager import TokenManager
|
||||
@@ -6,6 +11,78 @@ from openhands.core.logger import openhands_logger as logger
|
||||
from openhands.server.user_auth.user_auth import UserAuth
|
||||
|
||||
|
||||
def is_budget_exceeded_error(error_message: str) -> bool:
|
||||
"""Check if an error message indicates a budget exceeded condition.
|
||||
|
||||
This is used to downgrade error logs to info logs for budget exceeded errors
|
||||
since they are expected cost control behavior rather than unexpected errors.
|
||||
"""
|
||||
lower_message = error_message.lower()
|
||||
return 'budget' in lower_message and 'exceeded' in lower_message
|
||||
|
||||
|
||||
BUDGET_EXCEEDED_USER_MESSAGE = 'LLM budget has been exceeded, please re-fill.'
|
||||
|
||||
|
||||
async def handle_callback_error(
|
||||
error: Exception,
|
||||
conversation_id: UUID,
|
||||
service_name: str,
|
||||
service_logger: logging.Logger,
|
||||
can_post_error: bool,
|
||||
post_error_func: Callable[[str], Coroutine],
|
||||
) -> None:
|
||||
"""Handle callback processing errors with appropriate logging and user messages.
|
||||
|
||||
This centralizes the error handling logic for V1 callback processors to:
|
||||
- Log budget exceeded errors at INFO level (expected cost control behavior)
|
||||
- Log other errors at EXCEPTION level
|
||||
- Post user-friendly error messages to the integration platform
|
||||
|
||||
Args:
|
||||
error: The exception that occurred
|
||||
conversation_id: The conversation ID for logging and linking
|
||||
service_name: The service name for log messages (e.g., "GitHub", "GitLab", "Slack")
|
||||
service_logger: The logger instance to use for logging
|
||||
can_post_error: Whether the prerequisites are met to post an error message
|
||||
post_error_func: Async function to post the error message to the platform
|
||||
"""
|
||||
error_str = str(error)
|
||||
budget_exceeded = is_budget_exceeded_error(error_str)
|
||||
|
||||
# Log appropriately based on error type
|
||||
if budget_exceeded:
|
||||
service_logger.info(
|
||||
'[%s V1] Budget exceeded for conversation %s: %s',
|
||||
service_name,
|
||||
conversation_id,
|
||||
error,
|
||||
)
|
||||
else:
|
||||
service_logger.exception(
|
||||
'[%s V1] Error processing callback: %s', service_name, error
|
||||
)
|
||||
|
||||
# Try to post error message to the platform
|
||||
if can_post_error:
|
||||
try:
|
||||
error_detail = (
|
||||
BUDGET_EXCEEDED_USER_MESSAGE if budget_exceeded else error_str
|
||||
)
|
||||
await post_error_func(
|
||||
f'OpenHands encountered an error: **{error_detail}**\n\n'
|
||||
f'[See the conversation]({CONVERSATION_URL.format(conversation_id)}) '
|
||||
'for more information.'
|
||||
)
|
||||
except Exception as post_error:
|
||||
service_logger.warning(
|
||||
'[%s V1] Failed to post error message to %s: %s',
|
||||
service_name,
|
||||
service_name,
|
||||
post_error,
|
||||
)
|
||||
|
||||
|
||||
async def get_saas_user_auth(
|
||||
keycloak_user_id: str, token_manager: TokenManager
|
||||
) -> UserAuth:
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
"""Create verified_models table.
|
||||
|
||||
Revision ID: 097
|
||||
Revises: 096
|
||||
Create Date: 2026-02-21 00:00:00.000000
|
||||
Revision ID: 098
|
||||
Revises: 097
|
||||
Create Date: 2026-02-26 00:00:00.000000
|
||||
|
||||
"""
|
||||
|
||||
@@ -12,8 +12,8 @@ import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '097'
|
||||
down_revision: Union[str, None] = '096'
|
||||
revision: str = '098'
|
||||
down_revision: Union[str, None] = '097'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
@@ -0,0 +1,136 @@
|
||||
"""Create user_authorizations table and migrate blocked_email_domains
|
||||
|
||||
Revision ID: 099
|
||||
Revises: 098
|
||||
Create Date: 2025-03-05 00:00:00.000000
|
||||
|
||||
"""
|
||||
|
||||
import os
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '099'
|
||||
down_revision: Union[str, None] = '098'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def _seed_from_environment() -> None:
|
||||
"""Seed user_authorizations table from environment variables.
|
||||
|
||||
Reads EMAIL_PATTERN_BLACKLIST and EMAIL_PATTERN_WHITELIST environment variables.
|
||||
Each should be a comma-separated list of SQL LIKE patterns (e.g., '%@example.com').
|
||||
|
||||
If the environment variables are not set or empty, this function does nothing.
|
||||
|
||||
This allows us to set up feature deployments with particular patterns already
|
||||
blacklisted or whitelisted. (For example, you could blacklist everything with
|
||||
`%`, and then whitelist certain email accounts.)
|
||||
"""
|
||||
blacklist_patterns = os.environ.get('EMAIL_PATTERN_BLACKLIST', '').strip()
|
||||
whitelist_patterns = os.environ.get('EMAIL_PATTERN_WHITELIST', '').strip()
|
||||
|
||||
connection = op.get_bind()
|
||||
|
||||
if blacklist_patterns:
|
||||
for pattern in blacklist_patterns.split(','):
|
||||
pattern = pattern.strip()
|
||||
if pattern:
|
||||
connection.execute(
|
||||
sa.text("""
|
||||
INSERT INTO user_authorizations
|
||||
(email_pattern, provider_type, type)
|
||||
VALUES
|
||||
(:pattern, NULL, 'blacklist')
|
||||
"""),
|
||||
{'pattern': pattern},
|
||||
)
|
||||
|
||||
if whitelist_patterns:
|
||||
for pattern in whitelist_patterns.split(','):
|
||||
pattern = pattern.strip()
|
||||
if pattern:
|
||||
connection.execute(
|
||||
sa.text("""
|
||||
INSERT INTO user_authorizations
|
||||
(email_pattern, provider_type, type)
|
||||
VALUES
|
||||
(:pattern, NULL, 'whitelist')
|
||||
"""),
|
||||
{'pattern': pattern},
|
||||
)
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Create user_authorizations table, migrate data, and drop blocked_email_domains."""
|
||||
# Create user_authorizations table
|
||||
op.create_table(
|
||||
'user_authorizations',
|
||||
sa.Column('id', sa.Integer(), sa.Identity(), nullable=False, primary_key=True),
|
||||
sa.Column('email_pattern', sa.String(), nullable=True),
|
||||
sa.Column('provider_type', sa.String(), nullable=True),
|
||||
sa.Column('type', sa.String(), nullable=False),
|
||||
sa.Column(
|
||||
'created_at',
|
||||
sa.DateTime(timezone=True),
|
||||
nullable=False,
|
||||
server_default=sa.text('CURRENT_TIMESTAMP'),
|
||||
),
|
||||
sa.Column(
|
||||
'updated_at',
|
||||
sa.DateTime(timezone=True),
|
||||
nullable=False,
|
||||
server_default=sa.text('CURRENT_TIMESTAMP'),
|
||||
),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
)
|
||||
|
||||
# Create index on email_pattern for efficient LIKE queries
|
||||
op.create_index(
|
||||
'ix_user_authorizations_email_pattern',
|
||||
'user_authorizations',
|
||||
['email_pattern'],
|
||||
)
|
||||
|
||||
# Create index on type for efficient filtering
|
||||
op.create_index(
|
||||
'ix_user_authorizations_type',
|
||||
'user_authorizations',
|
||||
['type'],
|
||||
)
|
||||
|
||||
# Migrate existing blocked_email_domains to user_authorizations as blacklist entries
|
||||
# The domain patterns are converted to SQL LIKE patterns:
|
||||
# - 'example.com' becomes '%@example.com' (matches user@example.com)
|
||||
# - '.us' becomes '%@%.us' (matches user@anything.us)
|
||||
# We also add '%.' prefix for subdomain matching
|
||||
op.execute("""
|
||||
INSERT INTO user_authorizations (email_pattern, provider_type, type, created_at, updated_at)
|
||||
SELECT
|
||||
CASE
|
||||
WHEN domain LIKE '.%' THEN '%' || domain
|
||||
ELSE '%@%' || domain
|
||||
END as email_pattern,
|
||||
NULL as provider_type,
|
||||
'blacklist' as type,
|
||||
created_at,
|
||||
updated_at
|
||||
FROM blocked_email_domains
|
||||
""")
|
||||
|
||||
# Seed additional patterns from environment variables (if set)
|
||||
_seed_from_environment()
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Recreate blocked_email_domains table and migrate data back."""
|
||||
# Drop user_authorizations table
|
||||
op.drop_index('ix_user_authorizations_type', table_name='user_authorizations')
|
||||
op.drop_index(
|
||||
'ix_user_authorizations_email_pattern', table_name='user_authorizations'
|
||||
)
|
||||
op.drop_table('user_authorizations')
|
||||
446
enterprise/poetry.lock
generated
446
enterprise/poetry.lock
generated
@@ -26,98 +26,132 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "aiohttp"
|
||||
version = "3.12.15"
|
||||
version = "3.13.3"
|
||||
description = "Async http client/server framework (asyncio)"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "aiohttp-3.12.15-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b6fc902bff74d9b1879ad55f5404153e2b33a82e72a95c89cec5eb6cc9e92fbc"},
|
||||
{file = "aiohttp-3.12.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:098e92835b8119b54c693f2f88a1dec690e20798ca5f5fe5f0520245253ee0af"},
|
||||
{file = "aiohttp-3.12.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:40b3fee496a47c3b4a39a731954c06f0bd9bd3e8258c059a4beb76ac23f8e421"},
|
||||
{file = "aiohttp-3.12.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ce13fcfb0bb2f259fb42106cdc63fa5515fb85b7e87177267d89a771a660b79"},
|
||||
{file = "aiohttp-3.12.15-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3beb14f053222b391bf9cf92ae82e0171067cc9c8f52453a0f1ec7c37df12a77"},
|
||||
{file = "aiohttp-3.12.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c39e87afe48aa3e814cac5f535bc6199180a53e38d3f51c5e2530f5aa4ec58c"},
|
||||
{file = "aiohttp-3.12.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5f1b4ce5bc528a6ee38dbf5f39bbf11dd127048726323b72b8e85769319ffc4"},
|
||||
{file = "aiohttp-3.12.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1004e67962efabbaf3f03b11b4c43b834081c9e3f9b32b16a7d97d4708a9abe6"},
|
||||
{file = "aiohttp-3.12.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8faa08fcc2e411f7ab91d1541d9d597d3a90e9004180edb2072238c085eac8c2"},
|
||||
{file = "aiohttp-3.12.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fe086edf38b2222328cdf89af0dde2439ee173b8ad7cb659b4e4c6f385b2be3d"},
|
||||
{file = "aiohttp-3.12.15-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:79b26fe467219add81d5e47b4a4ba0f2394e8b7c7c3198ed36609f9ba161aecb"},
|
||||
{file = "aiohttp-3.12.15-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b761bac1192ef24e16706d761aefcb581438b34b13a2f069a6d343ec8fb693a5"},
|
||||
{file = "aiohttp-3.12.15-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e153e8adacfe2af562861b72f8bc47f8a5c08e010ac94eebbe33dc21d677cd5b"},
|
||||
{file = "aiohttp-3.12.15-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:fc49c4de44977aa8601a00edbf157e9a421f227aa7eb477d9e3df48343311065"},
|
||||
{file = "aiohttp-3.12.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2776c7ec89c54a47029940177e75c8c07c29c66f73464784971d6a81904ce9d1"},
|
||||
{file = "aiohttp-3.12.15-cp310-cp310-win32.whl", hash = "sha256:2c7d81a277fa78b2203ab626ced1487420e8c11a8e373707ab72d189fcdad20a"},
|
||||
{file = "aiohttp-3.12.15-cp310-cp310-win_amd64.whl", hash = "sha256:83603f881e11f0f710f8e2327817c82e79431ec976448839f3cd05d7afe8f830"},
|
||||
{file = "aiohttp-3.12.15-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d3ce17ce0220383a0f9ea07175eeaa6aa13ae5a41f30bc61d84df17f0e9b1117"},
|
||||
{file = "aiohttp-3.12.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:010cc9bbd06db80fe234d9003f67e97a10fe003bfbedb40da7d71c1008eda0fe"},
|
||||
{file = "aiohttp-3.12.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3f9d7c55b41ed687b9d7165b17672340187f87a773c98236c987f08c858145a9"},
|
||||
{file = "aiohttp-3.12.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc4fbc61bb3548d3b482f9ac7ddd0f18c67e4225aaa4e8552b9f1ac7e6bda9e5"},
|
||||
{file = "aiohttp-3.12.15-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7fbc8a7c410bb3ad5d595bb7118147dfbb6449d862cc1125cf8867cb337e8728"},
|
||||
{file = "aiohttp-3.12.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74dad41b3458dbb0511e760fb355bb0b6689e0630de8a22b1b62a98777136e16"},
|
||||
{file = "aiohttp-3.12.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b6f0af863cf17e6222b1735a756d664159e58855da99cfe965134a3ff63b0b0"},
|
||||
{file = "aiohttp-3.12.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5b7fe4972d48a4da367043b8e023fb70a04d1490aa7d68800e465d1b97e493b"},
|
||||
{file = "aiohttp-3.12.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6443cca89553b7a5485331bc9bedb2342b08d073fa10b8c7d1c60579c4a7b9bd"},
|
||||
{file = "aiohttp-3.12.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6c5f40ec615e5264f44b4282ee27628cea221fcad52f27405b80abb346d9f3f8"},
|
||||
{file = "aiohttp-3.12.15-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:2abbb216a1d3a2fe86dbd2edce20cdc5e9ad0be6378455b05ec7f77361b3ab50"},
|
||||
{file = "aiohttp-3.12.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:db71ce547012a5420a39c1b744d485cfb823564d01d5d20805977f5ea1345676"},
|
||||
{file = "aiohttp-3.12.15-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ced339d7c9b5030abad5854aa5413a77565e5b6e6248ff927d3e174baf3badf7"},
|
||||
{file = "aiohttp-3.12.15-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:7c7dd29c7b5bda137464dc9bfc738d7ceea46ff70309859ffde8c022e9b08ba7"},
|
||||
{file = "aiohttp-3.12.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:421da6fd326460517873274875c6c5a18ff225b40da2616083c5a34a7570b685"},
|
||||
{file = "aiohttp-3.12.15-cp311-cp311-win32.whl", hash = "sha256:4420cf9d179ec8dfe4be10e7d0fe47d6d606485512ea2265b0d8c5113372771b"},
|
||||
{file = "aiohttp-3.12.15-cp311-cp311-win_amd64.whl", hash = "sha256:edd533a07da85baa4b423ee8839e3e91681c7bfa19b04260a469ee94b778bf6d"},
|
||||
{file = "aiohttp-3.12.15-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:802d3868f5776e28f7bf69d349c26fc0efadb81676d0afa88ed00d98a26340b7"},
|
||||
{file = "aiohttp-3.12.15-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f2800614cd560287be05e33a679638e586a2d7401f4ddf99e304d98878c29444"},
|
||||
{file = "aiohttp-3.12.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8466151554b593909d30a0a125d638b4e5f3836e5aecde85b66b80ded1cb5b0d"},
|
||||
{file = "aiohttp-3.12.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e5a495cb1be69dae4b08f35a6c4579c539e9b5706f606632102c0f855bcba7c"},
|
||||
{file = "aiohttp-3.12.15-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6404dfc8cdde35c69aaa489bb3542fb86ef215fc70277c892be8af540e5e21c0"},
|
||||
{file = "aiohttp-3.12.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ead1c00f8521a5c9070fcb88f02967b1d8a0544e6d85c253f6968b785e1a2ab"},
|
||||
{file = "aiohttp-3.12.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6990ef617f14450bc6b34941dba4f12d5613cbf4e33805932f853fbd1cf18bfb"},
|
||||
{file = "aiohttp-3.12.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd736ed420f4db2b8148b52b46b88ed038d0354255f9a73196b7bbce3ea97545"},
|
||||
{file = "aiohttp-3.12.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c5092ce14361a73086b90c6efb3948ffa5be2f5b6fbcf52e8d8c8b8848bb97c"},
|
||||
{file = "aiohttp-3.12.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:aaa2234bb60c4dbf82893e934d8ee8dea30446f0647e024074237a56a08c01bd"},
|
||||
{file = "aiohttp-3.12.15-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6d86a2fbdd14192e2f234a92d3b494dd4457e683ba07e5905a0b3ee25389ac9f"},
|
||||
{file = "aiohttp-3.12.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a041e7e2612041a6ddf1c6a33b883be6a421247c7afd47e885969ee4cc58bd8d"},
|
||||
{file = "aiohttp-3.12.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5015082477abeafad7203757ae44299a610e89ee82a1503e3d4184e6bafdd519"},
|
||||
{file = "aiohttp-3.12.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:56822ff5ddfd1b745534e658faba944012346184fbfe732e0d6134b744516eea"},
|
||||
{file = "aiohttp-3.12.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b2acbbfff69019d9014508c4ba0401822e8bae5a5fdc3b6814285b71231b60f3"},
|
||||
{file = "aiohttp-3.12.15-cp312-cp312-win32.whl", hash = "sha256:d849b0901b50f2185874b9a232f38e26b9b3d4810095a7572eacea939132d4e1"},
|
||||
{file = "aiohttp-3.12.15-cp312-cp312-win_amd64.whl", hash = "sha256:b390ef5f62bb508a9d67cb3bba9b8356e23b3996da7062f1a57ce1a79d2b3d34"},
|
||||
{file = "aiohttp-3.12.15-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:9f922ffd05034d439dde1c77a20461cf4a1b0831e6caa26151fe7aa8aaebc315"},
|
||||
{file = "aiohttp-3.12.15-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2ee8a8ac39ce45f3e55663891d4b1d15598c157b4d494a4613e704c8b43112cd"},
|
||||
{file = "aiohttp-3.12.15-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3eae49032c29d356b94eee45a3f39fdf4b0814b397638c2f718e96cfadf4c4e4"},
|
||||
{file = "aiohttp-3.12.15-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b97752ff12cc12f46a9b20327104448042fce5c33a624f88c18f66f9368091c7"},
|
||||
{file = "aiohttp-3.12.15-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:894261472691d6fe76ebb7fcf2e5870a2ac284c7406ddc95823c8598a1390f0d"},
|
||||
{file = "aiohttp-3.12.15-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5fa5d9eb82ce98959fc1031c28198b431b4d9396894f385cb63f1e2f3f20ca6b"},
|
||||
{file = "aiohttp-3.12.15-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0fa751efb11a541f57db59c1dd821bec09031e01452b2b6217319b3a1f34f3d"},
|
||||
{file = "aiohttp-3.12.15-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5346b93e62ab51ee2a9d68e8f73c7cf96ffb73568a23e683f931e52450e4148d"},
|
||||
{file = "aiohttp-3.12.15-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:049ec0360f939cd164ecbfd2873eaa432613d5e77d6b04535e3d1fbae5a9e645"},
|
||||
{file = "aiohttp-3.12.15-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b52dcf013b57464b6d1e51b627adfd69a8053e84b7103a7cd49c030f9ca44461"},
|
||||
{file = "aiohttp-3.12.15-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:9b2af240143dd2765e0fb661fd0361a1b469cab235039ea57663cda087250ea9"},
|
||||
{file = "aiohttp-3.12.15-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ac77f709a2cde2cc71257ab2d8c74dd157c67a0558a0d2799d5d571b4c63d44d"},
|
||||
{file = "aiohttp-3.12.15-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:47f6b962246f0a774fbd3b6b7be25d59b06fdb2f164cf2513097998fc6a29693"},
|
||||
{file = "aiohttp-3.12.15-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:760fb7db442f284996e39cf9915a94492e1896baac44f06ae551974907922b64"},
|
||||
{file = "aiohttp-3.12.15-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad702e57dc385cae679c39d318def49aef754455f237499d5b99bea4ef582e51"},
|
||||
{file = "aiohttp-3.12.15-cp313-cp313-win32.whl", hash = "sha256:f813c3e9032331024de2eb2e32a88d86afb69291fbc37a3a3ae81cc9917fb3d0"},
|
||||
{file = "aiohttp-3.12.15-cp313-cp313-win_amd64.whl", hash = "sha256:1a649001580bdb37c6fdb1bebbd7e3bc688e8ec2b5c6f52edbb664662b17dc84"},
|
||||
{file = "aiohttp-3.12.15-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:691d203c2bdf4f4637792efbbcdcd157ae11e55eaeb5e9c360c1206fb03d4d98"},
|
||||
{file = "aiohttp-3.12.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e995e1abc4ed2a454c731385bf4082be06f875822adc4c6d9eaadf96e20d406"},
|
||||
{file = "aiohttp-3.12.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bd44d5936ab3193c617bfd6c9a7d8d1085a8dc8c3f44d5f1dcf554d17d04cf7d"},
|
||||
{file = "aiohttp-3.12.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46749be6e89cd78d6068cdf7da51dbcfa4321147ab8e4116ee6678d9a056a0cf"},
|
||||
{file = "aiohttp-3.12.15-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0c643f4d75adea39e92c0f01b3fb83d57abdec8c9279b3078b68a3a52b3933b6"},
|
||||
{file = "aiohttp-3.12.15-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0a23918fedc05806966a2438489dcffccbdf83e921a1170773b6178d04ade142"},
|
||||
{file = "aiohttp-3.12.15-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:74bdd8c864b36c3673741023343565d95bfbd778ffe1eb4d412c135a28a8dc89"},
|
||||
{file = "aiohttp-3.12.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a146708808c9b7a988a4af3821379e379e0f0e5e466ca31a73dbdd0325b0263"},
|
||||
{file = "aiohttp-3.12.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7011a70b56facde58d6d26da4fec3280cc8e2a78c714c96b7a01a87930a9530"},
|
||||
{file = "aiohttp-3.12.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:3bdd6e17e16e1dbd3db74d7f989e8af29c4d2e025f9828e6ef45fbdee158ec75"},
|
||||
{file = "aiohttp-3.12.15-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:57d16590a351dfc914670bd72530fd78344b885a00b250e992faea565b7fdc05"},
|
||||
{file = "aiohttp-3.12.15-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:bc9a0f6569ff990e0bbd75506c8d8fe7214c8f6579cca32f0546e54372a3bb54"},
|
||||
{file = "aiohttp-3.12.15-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:536ad7234747a37e50e7b6794ea868833d5220b49c92806ae2d7e8a9d6b5de02"},
|
||||
{file = "aiohttp-3.12.15-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:f0adb4177fa748072546fb650d9bd7398caaf0e15b370ed3317280b13f4083b0"},
|
||||
{file = "aiohttp-3.12.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:14954a2988feae3987f1eb49c706bff39947605f4b6fa4027c1d75743723eb09"},
|
||||
{file = "aiohttp-3.12.15-cp39-cp39-win32.whl", hash = "sha256:b784d6ed757f27574dca1c336f968f4e81130b27595e458e69457e6878251f5d"},
|
||||
{file = "aiohttp-3.12.15-cp39-cp39-win_amd64.whl", hash = "sha256:86ceded4e78a992f835209e236617bffae649371c4a50d5e5a3987f237db84b8"},
|
||||
{file = "aiohttp-3.12.15.tar.gz", hash = "sha256:4fc61385e9c98d72fcdf47e6dd81833f47b2f77c114c29cd64a361be57a763a2"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d5a372fd5afd301b3a89582817fdcdb6c34124787c70dbcc616f259013e7eef7"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:147e422fd1223005c22b4fe080f5d93ced44460f5f9c105406b753612b587821"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:859bd3f2156e81dd01432f5849fc73e2243d4a487c4fd26609b1299534ee1845"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dca68018bf48c251ba17c72ed479f4dafe9dbd5a73707ad8d28a38d11f3d42af"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:fee0c6bc7db1de362252affec009707a17478a00ec69f797d23ca256e36d5940"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c048058117fd649334d81b4b526e94bde3ccaddb20463a815ced6ecbb7d11160"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:215a685b6fbbfcf71dfe96e3eba7a6f58f10da1dfdf4889c7dd856abe430dca7"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2c184bb1fe2cbd2cefba613e9db29a5ab559323f994b6737e370d3da0ac455"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:75ca857eba4e20ce9f546cd59c7007b33906a4cd48f2ff6ccf1ccfc3b646f279"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:81e97251d9298386c2b7dbeb490d3d1badbdc69107fb8c9299dd04eb39bddc0e"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:c0e2d366af265797506f0283487223146af57815b388623f0357ef7eac9b209d"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4e239d501f73d6db1522599e14b9b321a7e3b1de66ce33d53a765d975e9f4808"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:0db318f7a6f065d84cb1e02662c526294450b314a02bd9e2a8e67f0d8564ce40"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:bfc1cc2fe31a6026a8a88e4ecfb98d7f6b1fec150cfd708adbfd1d2f42257c29"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:af71fff7bac6bb7508956696dce8f6eec2bbb045eceb40343944b1ae62b5ef11"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-win32.whl", hash = "sha256:37da61e244d1749798c151421602884db5270faf479cf0ef03af0ff68954c9dd"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-win_amd64.whl", hash = "sha256:7e63f210bc1b57ef699035f2b4b6d9ce096b5914414a49b0997c839b2bd2223c"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5b6073099fb654e0a068ae678b10feff95c5cae95bbfcbfa7af669d361a8aa6b"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cb93e166e6c28716c8c6aeb5f99dfb6d5ccf482d29fe9bf9a794110e6d0ab64"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:28e027cf2f6b641693a09f631759b4d9ce9165099d2b5d92af9bd4e197690eea"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3b61b7169ababd7802f9568ed96142616a9118dd2be0d1866e920e77ec8fa92a"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:80dd4c21b0f6237676449c6baaa1039abae86b91636b6c91a7f8e61c87f89540"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:65d2ccb7eabee90ce0503c17716fc77226be026dcc3e65cce859a30db715025b"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5b179331a481cb5529fca8b432d8d3c7001cb217513c94cd72d668d1248688a3"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d4c940f02f49483b18b079d1c27ab948721852b281f8b015c058100e9421dd1"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f9444f105664c4ce47a2a7171a2418bce5b7bae45fb610f4e2c36045d85911d3"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:694976222c711d1d00ba131904beb60534f93966562f64440d0c9d41b8cdb440"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:f33ed1a2bf1997a36661874b017f5c4b760f41266341af36febaf271d179f6d7"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e636b3c5f61da31a92bf0d91da83e58fdfa96f178ba682f11d24f31944cdd28c"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:5d2d94f1f5fcbe40838ac51a6ab5704a6f9ea42e72ceda48de5e6b898521da51"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2be0e9ccf23e8a94f6f0650ce06042cefc6ac703d0d7ab6c7a917289f2539ad4"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9af5e68ee47d6534d36791bbe9b646d2a7c7deb6fc24d7943628edfbb3581f29"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-win32.whl", hash = "sha256:a2212ad43c0833a873d0fb3c63fa1bacedd4cf6af2fee62bf4b739ceec3ab239"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-win_amd64.whl", hash = "sha256:642f752c3eb117b105acbd87e2c143de710987e09860d674e068c4c2c441034f"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b903a4dfee7d347e2d87697d0713be59e0b87925be030c9178c5faa58ea58d5c"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a45530014d7a1e09f4a55f4f43097ba0fd155089372e105e4bff4ca76cb1b168"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:27234ef6d85c914f9efeb77ff616dbf4ad2380be0cda40b4db086ffc7ddd1b7d"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d32764c6c9aafb7fb55366a224756387cd50bfa720f32b88e0e6fa45b27dcf29"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b1a6102b4d3ebc07dad44fbf07b45bb600300f15b552ddf1851b5390202ea2e3"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c014c7ea7fb775dd015b2d3137378b7be0249a448a1612268b5a90c2d81de04d"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2b8d8ddba8f95ba17582226f80e2de99c7a7948e66490ef8d947e272a93e9463"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ae8dd55c8e6c4257eae3a20fd2c8f41edaea5992ed67156642493b8daf3cecc"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:01ad2529d4b5035578f5081606a465f3b814c542882804e2e8cda61adf5c71bf"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bb4f7475e359992b580559e008c598091c45b5088f28614e855e42d39c2f1033"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c19b90316ad3b24c69cd78d5c9b4f3aa4497643685901185b65166293d36a00f"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:96d604498a7c782cb15a51c406acaea70d8c027ee6b90c569baa6e7b93073679"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:084911a532763e9d3dd95adf78a78f4096cd5f58cdc18e6fdbc1b58417a45423"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7a4a94eb787e606d0a09404b9c38c113d3b099d508021faa615d70a0131907ce"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:87797e645d9d8e222e04160ee32aa06bc5c163e8499f24db719e7852ec23093a"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-win32.whl", hash = "sha256:b04be762396457bef43f3597c991e192ee7da460a4953d7e647ee4b1c28e7046"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-win_amd64.whl", hash = "sha256:e3531d63d3bdfa7e3ac5e9b27b2dd7ec9df3206a98e0b3445fa906f233264c57"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:5dff64413671b0d3e7d5918ea490bdccb97a4ad29b3f311ed423200b2203e01c"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:87b9aab6d6ed88235aa2970294f496ff1a1f9adcd724d800e9b952395a80ffd9"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:425c126c0dc43861e22cb1c14ba4c8e45d09516d0a3ae0a3f7494b79f5f233a3"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f9120f7093c2a32d9647abcaf21e6ad275b4fbec5b55969f978b1a97c7c86bf"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:697753042d57f4bf7122cab985bf15d0cef23c770864580f5af4f52023a56bd6"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6de499a1a44e7de70735d0b39f67c8f25eb3d91eb3103be99ca0fa882cdd987d"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:37239e9f9a7ea9ac5bf6b92b0260b01f8a22281996da609206a84df860bc1261"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f76c1e3fe7d7c8afad7ed193f89a292e1999608170dcc9751a7462a87dfd5bc0"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fc290605db2a917f6e81b0e1e0796469871f5af381ce15c604a3c5c7e51cb730"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4021b51936308aeea0367b8f006dc999ca02bc118a0cc78c303f50a2ff6afb91"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:49a03727c1bba9a97d3e93c9f93ca03a57300f484b6e935463099841261195d3"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3d9908a48eb7416dc1f4524e69f1d32e5d90e3981e4e37eb0aa1cd18f9cfa2a4"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:2712039939ec963c237286113c68dbad80a82a4281543f3abf766d9d73228998"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:7bfdc049127717581866fa4708791220970ce291c23e28ccf3922c700740fdc0"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8057c98e0c8472d8846b9c79f56766bcc57e3e8ac7bfd510482332366c56c591"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-win32.whl", hash = "sha256:1449ceddcdbcf2e0446957863af03ebaaa03f94c090f945411b61269e2cb5daf"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-win_amd64.whl", hash = "sha256:693781c45a4033d31d4187d2436f5ac701e7bbfe5df40d917736108c1cc7436e"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:ea37047c6b367fd4bd632bff8077449b8fa034b69e812a18e0132a00fae6e808"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:6fc0e2337d1a4c3e6acafda6a78a39d4c14caea625124817420abceed36e2415"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c685f2d80bb67ca8c3837823ad76196b3694b0159d232206d1e461d3d434666f"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:48e377758516d262bde50c2584fc6c578af272559c409eecbdd2bae1601184d6"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:34749271508078b261c4abb1767d42b8d0c0cc9449c73a4df494777dc55f0687"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:82611aeec80eb144416956ec85b6ca45a64d76429c1ed46ae1b5f86c6e0c9a26"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2fff83cfc93f18f215896e3a190e8e5cb413ce01553901aca925176e7568963a"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bbe7d4cecacb439e2e2a8a1a7b935c25b812af7a5fd26503a66dadf428e79ec1"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b928f30fe49574253644b1ca44b1b8adbd903aa0da4b9054a6c20fc7f4092a25"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7b5e8fe4de30df199155baaf64f2fcd604f4c678ed20910db8e2c66dc4b11603"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:8542f41a62bcc58fc7f11cf7c90e0ec324ce44950003feb70640fc2a9092c32a"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:5e1d8c8b8f1d91cd08d8f4a3c2b067bfca6ec043d3ff36de0f3a715feeedf926"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:90455115e5da1c3c51ab619ac57f877da8fd6d73c05aacd125c5ae9819582aba"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:042e9e0bcb5fba81886c8b4fbb9a09d6b8a00245fd8d88e4d989c1f96c74164c"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2eb752b102b12a76ca02dff751a801f028b4ffbbc478840b473597fc91a9ed43"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-win32.whl", hash = "sha256:b556c85915d8efaed322bf1bdae9486aa0f3f764195a0fb6ee962e5c71ef5ce1"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-win_amd64.whl", hash = "sha256:9bf9f7a65e7aa20dd764151fb3d616c81088f91f8df39c3893a536e279b4b984"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:05861afbbec40650d8a07ea324367cb93e9e8cc7762e04dd4405df99fa65159c"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2fc82186fadc4a8316768d61f3722c230e2c1dcab4200d52d2ebdf2482e47592"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0add0900ff220d1d5c5ebbf99ed88b0c1bbf87aa7e4262300ed1376a6b13414f"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:568f416a4072fbfae453dcf9a99194bbb8bdeab718e08ee13dfa2ba0e4bebf29"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:add1da70de90a2569c5e15249ff76a631ccacfe198375eead4aadf3b8dc849dc"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:10b47b7ba335d2e9b1239fa571131a87e2d8ec96b333e68b2a305e7a98b0bae2"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3dd4dce1c718e38081c8f35f323209d4c1df7d4db4bab1b5c88a6b4d12b74587"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34bac00a67a812570d4a460447e1e9e06fae622946955f939051e7cc895cfab8"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a19884d2ee70b06d9204b2727a7b9f983d0c684c650254679e716b0b77920632"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5f8ca7f2bb6ba8348a3614c7918cc4bb73268c5ac2a207576b7afea19d3d9f64"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:b0d95340658b9d2f11d9697f59b3814a9d3bb4b7a7c20b131df4bcef464037c0"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:a1e53262fd202e4b40b70c3aff944a8155059beedc8a89bba9dc1f9ef06a1b56"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:d60ac9663f44168038586cab2157e122e46bdef09e9368b37f2d82d354c23f72"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:90751b8eed69435bac9ff4e3d2f6b3af1f57e37ecb0fbeee59c0174c9e2d41df"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:fc353029f176fd2b3ec6cfc71be166aba1936fe5d73dd1992ce289ca6647a9aa"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-win32.whl", hash = "sha256:2e41b18a58da1e474a057b3d35248d8320029f61d70a37629535b16a0c8f3767"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-win_amd64.whl", hash = "sha256:44531a36aa2264a1860089ffd4dce7baf875ee5a6079d5fb42e261c704ef7344"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:31a83ea4aead760dfcb6962efb1d861db48c34379f2ff72db9ddddd4cda9ea2e"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:988a8c5e317544fdf0d39871559e67b6341065b87fceac641108c2096d5506b7"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9b174f267b5cfb9a7dba9ee6859cecd234e9a681841eb85068059bc867fb8f02"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:947c26539750deeaee933b000fb6517cc770bbd064bad6033f1cff4803881e43"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:9ebf57d09e131f5323464bd347135a88622d1c0976e88ce15b670e7ad57e4bd6"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4ae5b5a0e1926e504c81c5b84353e7a5516d8778fbbff00429fe7b05bb25cbce"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2ba0eea45eb5cc3172dbfc497c066f19c41bac70963ea1a67d51fc92e4cf9a80"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bae5c2ed2eae26cc382020edad80d01f36cb8e746da40b292e68fec40421dc6a"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8a60e60746623925eab7d25823329941aee7242d559baa119ca2b253c88a7bd6"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e50a2e1404f063427c9d027378472316201a2290959a295169bcf25992d04558"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:9a9dc347e5a3dc7dfdbc1f82da0ef29e388ddb2ed281bfce9dd8248a313e62b7"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:b46020d11d23fe16551466c77823df9cc2f2c1e63cc965daf67fa5eec6ca1877"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:69c56fbc1993fa17043e24a546959c0178fe2b5782405ad4559e6c13975c15e3"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:b99281b0704c103d4e11e72a76f1b543d4946fea7dd10767e7e1b5f00d4e5704"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:40c5e40ecc29ba010656c18052b877a1c28f84344825efa106705e835c28530f"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-win32.whl", hash = "sha256:56339a36b9f1fc708260c76c87e593e2afb30d26de9ae1eb445b5e051b98a7a1"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-win_amd64.whl", hash = "sha256:c6b8568a3bb5819a0ad087f16d40e5a3fb6099f39ea1d5625a3edc1e923fc538"},
|
||||
{file = "aiohttp-3.13.3.tar.gz", hash = "sha256:a949eee43d3782f2daae4f4a2819b2cb9b0c5d3b7f7a927067cc84dafdbb9f88"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -130,7 +164,7 @@ propcache = ">=0.2.0"
|
||||
yarl = ">=1.17.0,<2.0"
|
||||
|
||||
[package.extras]
|
||||
speedups = ["Brotli ; platform_python_implementation == \"CPython\"", "aiodns (>=3.3.0)", "brotlicffi ; platform_python_implementation != \"CPython\""]
|
||||
speedups = ["Brotli (>=1.2) ; platform_python_implementation == \"CPython\"", "aiodns (>=3.3.0)", "backports.zstd ; platform_python_implementation == \"CPython\" and python_version < \"3.14\"", "brotlicffi (>=1.2) ; platform_python_implementation != \"CPython\""]
|
||||
|
||||
[[package]]
|
||||
name = "aiosignal"
|
||||
@@ -535,14 +569,14 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "authlib"
|
||||
version = "1.6.6"
|
||||
version = "1.6.7"
|
||||
description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "authlib-1.6.6-py2.py3-none-any.whl", hash = "sha256:7d9e9bc535c13974313a87f53e8430eb6ea3d1cf6ae4f6efcd793f2e949143fd"},
|
||||
{file = "authlib-1.6.6.tar.gz", hash = "sha256:45770e8e056d0f283451d9996fbb59b70d45722b45d854d58f32878d0a40c38e"},
|
||||
{file = "authlib-1.6.7-py2.py3-none-any.whl", hash = "sha256:c637340d9a02789d2efa1d003a7437d10d3e565237bcb5fcbc6c134c7b95bab0"},
|
||||
{file = "authlib-1.6.7.tar.gz", hash = "sha256:dbf10100011d1e1b34048c9d120e83f13b35d69a826ae762b93d2fb5aafc337b"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -703,24 +737,24 @@ crt = ["awscrt (==0.29.2)"]
|
||||
|
||||
[[package]]
|
||||
name = "browser-use"
|
||||
version = "0.11.2"
|
||||
version = "0.11.13"
|
||||
description = "Make websites accessible for AI agents"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.11"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "browser_use-0.11.2-py3-none-any.whl", hash = "sha256:6ac57c0e2a495749fc83c1faee85001737567f8cd4301ebfcda2a886018a325b"},
|
||||
{file = "browser_use-0.11.2.tar.gz", hash = "sha256:543face2fd5662ee89526d2099a79e01468b51784cdcc85faa36a81fdae4aa68"},
|
||||
{file = "browser_use-0.11.13-py3-none-any.whl", hash = "sha256:f5232309213715e66e8f2079fb7097ac79a880728735968e4c7d41031ed15e83"},
|
||||
{file = "browser_use-0.11.13.tar.gz", hash = "sha256:c20d029f17c44add2047a72c836cb589b85e90a31a91cf3632a22a2de1928dfe"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
aiohttp = "3.12.15"
|
||||
aiohttp = ">=3.13.3"
|
||||
anthropic = ">=0.72.1,<1.0.0"
|
||||
anyio = ">=4.9.0"
|
||||
authlib = ">=1.6.0"
|
||||
browser-use-sdk = ">=2.0.12"
|
||||
bubus = ">=1.5.6"
|
||||
cdp-use = ">=1.4.4"
|
||||
cdp-use = ">=1.4.5"
|
||||
click = ">=8.1.8"
|
||||
cloudpickle = ">=3.1.1"
|
||||
google-api-core = ">=2.25.0"
|
||||
@@ -758,7 +792,7 @@ aws = ["boto3 (>=1.38.45)"]
|
||||
cli = ["textual (>=3.2.0)"]
|
||||
cli-oci = ["oci (>=2.126.4)", "textual (>=3.2.0)"]
|
||||
code = ["matplotlib (>=3.9.0)", "numpy (>=2.3.2)", "pandas (>=2.2.0)", "tabulate (>=0.9.0)"]
|
||||
eval = ["anyio (>=4.9.0)", "datamodel-code-generator (>=0.26.0)", "lmnr[all] (==0.7.17)", "psutil (>=7.0.0)"]
|
||||
eval = ["anyio (>=4.9.0)", "datamodel-code-generator (>=0.26.0)", "lmnr[all] (==0.7.42)", "psutil (>=7.0.0)"]
|
||||
examples = ["agentmail (==0.0.59)", "botocore (>=1.37.23)", "imgcat (>=0.6.0)", "langchain-openai (>=0.3.26)"]
|
||||
oci = ["oci (>=2.126.4)"]
|
||||
video = ["imageio[ffmpeg] (>=2.37.0)", "numpy (>=2.3.2)"]
|
||||
@@ -891,18 +925,19 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "cdp-use"
|
||||
version = "1.4.4"
|
||||
version = "1.4.5"
|
||||
description = "Type safe generator/client library for CDP"
|
||||
optional = false
|
||||
python-versions = ">=3.11"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "cdp_use-1.4.4-py3-none-any.whl", hash = "sha256:e37e80e067db2653d6fdf953d4ff9e5d80d75daa27b7c6d48c0261cccbef73e1"},
|
||||
{file = "cdp_use-1.4.4.tar.gz", hash = "sha256:330a848b517006eb9ad1dc468aa6434d913cf0c6918610760c36c3fdfdba0fab"},
|
||||
{file = "cdp_use-1.4.5-py3-none-any.whl", hash = "sha256:8f8e2435e3a20e4009d2974144192cf3c132f6c2971338e156198814d9b91ecb"},
|
||||
{file = "cdp_use-1.4.5.tar.gz", hash = "sha256:0da3a32df46336a03ff5a22bc6bc442cd7d2f2d50a118fd4856f29d37f6d26a0"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
httpx = ">=0.28.1"
|
||||
typing-extensions = ">=4.12.2"
|
||||
websockets = ">=15.0.1"
|
||||
|
||||
[[package]]
|
||||
@@ -1540,66 +1575,61 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "cryptography"
|
||||
version = "46.0.3"
|
||||
version = "46.0.5"
|
||||
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
|
||||
optional = false
|
||||
python-versions = "!=3.9.0,!=3.9.1,>=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a"},
|
||||
{file = "cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc"},
|
||||
{file = "cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d"},
|
||||
{file = "cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb"},
|
||||
{file = "cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849"},
|
||||
{file = "cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8"},
|
||||
{file = "cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec"},
|
||||
{file = "cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91"},
|
||||
{file = "cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e"},
|
||||
{file = "cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926"},
|
||||
{file = "cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71"},
|
||||
{file = "cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac"},
|
||||
{file = "cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018"},
|
||||
{file = "cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb"},
|
||||
{file = "cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c"},
|
||||
{file = "cryptography-46.0.3-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:00a5e7e87938e5ff9ff5447ab086a5706a957137e6e433841e9d24f38a065217"},
|
||||
{file = "cryptography-46.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c8daeb2d2174beb4575b77482320303f3d39b8e81153da4f0fb08eb5fe86a6c5"},
|
||||
{file = "cryptography-46.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:39b6755623145ad5eff1dab323f4eae2a32a77a7abef2c5089a04a3d04366715"},
|
||||
{file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:db391fa7c66df6762ee3f00c95a89e6d428f4d60e7abc8328f4fe155b5ac6e54"},
|
||||
{file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:78a97cf6a8839a48c49271cdcbd5cf37ca2c1d6b7fdd86cc864f302b5e9bf459"},
|
||||
{file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:dfb781ff7eaa91a6f7fd41776ec37c5853c795d3b358d4896fdbb5df168af422"},
|
||||
{file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:6f61efb26e76c45c4a227835ddeae96d83624fb0d29eb5df5b96e14ed1a0afb7"},
|
||||
{file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:23b1a8f26e43f47ceb6d6a43115f33a5a37d57df4ea0ca295b780ae8546e8044"},
|
||||
{file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:b419ae593c86b87014b9be7396b385491ad7f320bde96826d0dd174459e54665"},
|
||||
{file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:50fc3343ac490c6b08c0cf0d704e881d0d660be923fd3076db3e932007e726e3"},
|
||||
{file = "cryptography-46.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22d7e97932f511d6b0b04f2bfd818d73dcd5928db509460aaf48384778eb6d20"},
|
||||
{file = "cryptography-46.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d55f3dffadd674514ad19451161118fd010988540cee43d8bc20675e775925de"},
|
||||
{file = "cryptography-46.0.3-cp314-cp314t-win32.whl", hash = "sha256:8a6e050cb6164d3f830453754094c086ff2d0b2f3a897a1d9820f6139a1f0914"},
|
||||
{file = "cryptography-46.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:760f83faa07f8b64e9c33fc963d790a2edb24efb479e3520c14a45741cd9b2db"},
|
||||
{file = "cryptography-46.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:516ea134e703e9fe26bcd1277a4b59ad30586ea90c365a87781d7887a646fe21"},
|
||||
{file = "cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936"},
|
||||
{file = "cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683"},
|
||||
{file = "cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d"},
|
||||
{file = "cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0"},
|
||||
{file = "cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc"},
|
||||
{file = "cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3"},
|
||||
{file = "cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971"},
|
||||
{file = "cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac"},
|
||||
{file = "cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04"},
|
||||
{file = "cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506"},
|
||||
{file = "cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963"},
|
||||
{file = "cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4"},
|
||||
{file = "cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df"},
|
||||
{file = "cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f"},
|
||||
{file = "cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372"},
|
||||
{file = "cryptography-46.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a23582810fedb8c0bc47524558fb6c56aac3fc252cb306072fd2815da2a47c32"},
|
||||
{file = "cryptography-46.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e7aec276d68421f9574040c26e2a7c3771060bc0cff408bae1dcb19d3ab1e63c"},
|
||||
{file = "cryptography-46.0.3-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7ce938a99998ed3c8aa7e7272dca1a610401ede816d36d0693907d863b10d9ea"},
|
||||
{file = "cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:191bb60a7be5e6f54e30ba16fdfae78ad3a342a0599eb4193ba88e3f3d6e185b"},
|
||||
{file = "cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c70cc23f12726be8f8bc72e41d5065d77e4515efae3690326764ea1b07845cfb"},
|
||||
{file = "cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:9394673a9f4de09e28b5356e7fff97d778f8abad85c9d5ac4a4b7e25a0de7717"},
|
||||
{file = "cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:94cd0549accc38d1494e1f8de71eca837d0509d0d44bf11d158524b0e12cebf9"},
|
||||
{file = "cryptography-46.0.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6b5063083824e5509fdba180721d55909ffacccc8adbec85268b48439423d78c"},
|
||||
{file = "cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1"},
|
||||
{file = "cryptography-46.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:351695ada9ea9618b3500b490ad54c739860883df6c1f555e088eaf25b1bbaad"},
|
||||
{file = "cryptography-46.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c18ff11e86df2e28854939acde2d003f7984f721eba450b56a200ad90eeb0e6b"},
|
||||
{file = "cryptography-46.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d7e3d356b8cd4ea5aff04f129d5f66ebdc7b6f8eae802b93739ed520c47c79b"},
|
||||
{file = "cryptography-46.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:50bfb6925eff619c9c023b967d5b77a54e04256c4281b0e21336a130cd7fc263"},
|
||||
{file = "cryptography-46.0.5-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:803812e111e75d1aa73690d2facc295eaefd4439be1023fefc4995eaea2af90d"},
|
||||
{file = "cryptography-46.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ee190460e2fbe447175cda91b88b84ae8322a104fc27766ad09428754a618ed"},
|
||||
{file = "cryptography-46.0.5-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:f145bba11b878005c496e93e257c1e88f154d278d2638e6450d17e0f31e558d2"},
|
||||
{file = "cryptography-46.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e9251e3be159d1020c4030bd2e5f84d6a43fe54b6c19c12f51cde9542a2817b2"},
|
||||
{file = "cryptography-46.0.5-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:47fb8a66058b80e509c47118ef8a75d14c455e81ac369050f20ba0d23e77fee0"},
|
||||
{file = "cryptography-46.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:4c3341037c136030cb46e4b1e17b7418ea4cbd9dd207e4a6f3b2b24e0d4ac731"},
|
||||
{file = "cryptography-46.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:890bcb4abd5a2d3f852196437129eb3667d62630333aacc13dfd470fad3aaa82"},
|
||||
{file = "cryptography-46.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:80a8d7bfdf38f87ca30a5391c0c9ce4ed2926918e017c29ddf643d0ed2778ea1"},
|
||||
{file = "cryptography-46.0.5-cp311-abi3-win32.whl", hash = "sha256:60ee7e19e95104d4c03871d7d7dfb3d22ef8a9b9c6778c94e1c8fcc8365afd48"},
|
||||
{file = "cryptography-46.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:38946c54b16c885c72c4f59846be9743d699eee2b69b6988e0a00a01f46a61a4"},
|
||||
{file = "cryptography-46.0.5-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:94a76daa32eb78d61339aff7952ea819b1734b46f73646a07decb40e5b3448e2"},
|
||||
{file = "cryptography-46.0.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5be7bf2fb40769e05739dd0046e7b26f9d4670badc7b032d6ce4db64dddc0678"},
|
||||
{file = "cryptography-46.0.5-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe346b143ff9685e40192a4960938545c699054ba11d4f9029f94751e3f71d87"},
|
||||
{file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:c69fd885df7d089548a42d5ec05be26050ebcd2283d89b3d30676eb32ff87dee"},
|
||||
{file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:8293f3dea7fc929ef7240796ba231413afa7b68ce38fd21da2995549f5961981"},
|
||||
{file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:1abfdb89b41c3be0365328a410baa9df3ff8a9110fb75e7b52e66803ddabc9a9"},
|
||||
{file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:d66e421495fdb797610a08f43b05269e0a5ea7f5e652a89bfd5a7d3c1dee3648"},
|
||||
{file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:4e817a8920bfbcff8940ecfd60f23d01836408242b30f1a708d93198393a80b4"},
|
||||
{file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:68f68d13f2e1cb95163fa3b4db4bf9a159a418f5f6e7242564fc75fcae667fd0"},
|
||||
{file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:a3d1fae9863299076f05cb8a778c467578262fae09f9dc0ee9b12eb4268ce663"},
|
||||
{file = "cryptography-46.0.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c4143987a42a2397f2fc3b4d7e3a7d313fbe684f67ff443999e803dd75a76826"},
|
||||
{file = "cryptography-46.0.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:7d731d4b107030987fd61a7f8ab512b25b53cef8f233a97379ede116f30eb67d"},
|
||||
{file = "cryptography-46.0.5-cp314-cp314t-win32.whl", hash = "sha256:c3bcce8521d785d510b2aad26ae2c966092b7daa8f45dd8f44734a104dc0bc1a"},
|
||||
{file = "cryptography-46.0.5-cp314-cp314t-win_amd64.whl", hash = "sha256:4d8ae8659ab18c65ced284993c2265910f6c9e650189d4e3f68445ef82a810e4"},
|
||||
{file = "cryptography-46.0.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4108d4c09fbbf2789d0c926eb4152ae1760d5a2d97612b92d508d96c861e4d31"},
|
||||
{file = "cryptography-46.0.5-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1f30a86d2757199cb2d56e48cce14deddf1f9c95f1ef1b64ee91ea43fe2e18"},
|
||||
{file = "cryptography-46.0.5-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:039917b0dc418bb9f6edce8a906572d69e74bd330b0b3fea4f79dab7f8ddd235"},
|
||||
{file = "cryptography-46.0.5-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ba2a27ff02f48193fc4daeadf8ad2590516fa3d0adeeb34336b96f7fa64c1e3a"},
|
||||
{file = "cryptography-46.0.5-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:61aa400dce22cb001a98014f647dc21cda08f7915ceb95df0c9eaf84b4b6af76"},
|
||||
{file = "cryptography-46.0.5-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ce58ba46e1bc2aac4f7d9290223cead56743fa6ab94a5d53292ffaac6a91614"},
|
||||
{file = "cryptography-46.0.5-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:420d0e909050490d04359e7fdb5ed7e667ca5c3c402b809ae2563d7e66a92229"},
|
||||
{file = "cryptography-46.0.5-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:582f5fcd2afa31622f317f80426a027f30dc792e9c80ffee87b993200ea115f1"},
|
||||
{file = "cryptography-46.0.5-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:bfd56bb4b37ed4f330b82402f6f435845a5f5648edf1ad497da51a8452d5d62d"},
|
||||
{file = "cryptography-46.0.5-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:a3d507bb6a513ca96ba84443226af944b0f7f47dcc9a399d110cd6146481d24c"},
|
||||
{file = "cryptography-46.0.5-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9f16fbdf4da055efb21c22d81b89f155f02ba420558db21288b3d0035bafd5f4"},
|
||||
{file = "cryptography-46.0.5-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ced80795227d70549a411a4ab66e8ce307899fad2220ce5ab2f296e687eacde9"},
|
||||
{file = "cryptography-46.0.5-cp38-abi3-win32.whl", hash = "sha256:02f547fce831f5096c9a567fd41bc12ca8f11df260959ecc7c3202555cc47a72"},
|
||||
{file = "cryptography-46.0.5-cp38-abi3-win_amd64.whl", hash = "sha256:556e106ee01aa13484ce9b0239bca667be5004efb0aabbed28d353df86445595"},
|
||||
{file = "cryptography-46.0.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:3b4995dc971c9fb83c25aa44cf45f02ba86f71ee600d81091c2f0cbae116b06c"},
|
||||
{file = "cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bc84e875994c3b445871ea7181d424588171efec3e185dced958dad9e001950a"},
|
||||
{file = "cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2ae6971afd6246710480e3f15824ed3029a60fc16991db250034efd0b9fb4356"},
|
||||
{file = "cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d861ee9e76ace6cf36a6a89b959ec08e7bc2493ee39d07ffe5acb23ef46d27da"},
|
||||
{file = "cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:2b7a67c9cd56372f3249b39699f2ad479f6991e62ea15800973b956f4b73e257"},
|
||||
{file = "cryptography-46.0.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:8456928655f856c6e1533ff59d5be76578a7157224dbd9ce6872f25055ab9ab7"},
|
||||
{file = "cryptography-46.0.5.tar.gz", hash = "sha256:abace499247268e3757271b2f1e244b36b06f8515cf27c4d49468fc9eb16e93d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1612,7 +1642,7 @@ nox = ["nox[uv] (>=2024.4.15)"]
|
||||
pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.14)", "ruff (>=0.11.11)"]
|
||||
sdist = ["build (>=1.0.0)"]
|
||||
ssh = ["bcrypt (>=3.1.5)"]
|
||||
test = ["certifi (>=2024)", "cryptography-vectors (==46.0.3)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
|
||||
test = ["certifi (>=2024)", "cryptography-vectors (==46.0.5)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
|
||||
test-randomorder = ["pytest-randomly"]
|
||||
|
||||
[[package]]
|
||||
@@ -2084,25 +2114,6 @@ files = [
|
||||
{file = "durationpy-0.10.tar.gz", hash = "sha256:1fa6893409a6e739c9c72334fc65cca1f355dbdd93405d30f726deb5bde42fba"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ecdsa"
|
||||
version = "0.19.1"
|
||||
description = "ECDSA cryptographic signature library (pure python)"
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.6"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "ecdsa-0.19.1-py2.py3-none-any.whl", hash = "sha256:30638e27cf77b7e15c4c4cc1973720149e1033827cfd00661ca5c8cc0cdb24c3"},
|
||||
{file = "ecdsa-0.19.1.tar.gz", hash = "sha256:478cba7b62555866fcb3bb3fe985e06decbdb68ef55713c4e5ab98c57d508e61"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
six = ">=1.9.0"
|
||||
|
||||
[package.extras]
|
||||
gmpy = ["gmpy"]
|
||||
gmpy2 = ["gmpy2"]
|
||||
|
||||
[[package]]
|
||||
name = "email-validator"
|
||||
version = "2.3.0"
|
||||
@@ -5755,9 +5766,9 @@ test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>=
|
||||
[[package]]
|
||||
name = "nbconvert"
|
||||
version = "7.17.0"
|
||||
description = "Converting Jupyter Notebooks (.ipynb files) to other formats. Output formats include asciidoc, html, latex, markdown, pdf, py, rst, script. nbconvert can be used both as a Python library (`import nbconvert`) or as a command line tool (invoked as `jupyter nbconvert ...`)."
|
||||
description = "Convert Jupyter Notebooks (.ipynb files) to other formats."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "nbconvert-7.17.0-py3-none-any.whl", hash = "sha256:4f99a63b337b9a23504347afdab24a11faa7d86b405e5c8f9881cd313336d518"},
|
||||
@@ -5781,8 +5792,8 @@ pygments = ">=2.4.1"
|
||||
traitlets = ">=5.1"
|
||||
|
||||
[package.extras]
|
||||
all = ["flaky", "ipykernel", "ipython", "ipywidgets (>=7.5)", "myst-parser", "nbsphinx (>=0.2.12)", "playwright", "pydata-sphinx-theme", "pyqtwebengine (>=5.15)", "pytest (>=7)", "sphinx (==5.0.2)", "sphinxcontrib-spelling", "tornado (>=6.1)"]
|
||||
docs = ["ipykernel", "ipython", "myst-parser", "nbsphinx (>=0.2.12)", "pydata-sphinx-theme", "sphinx (==5.0.2)", "sphinxcontrib-spelling"]
|
||||
all = ["flaky", "intersphinx-registry", "ipykernel", "ipython", "ipywidgets (>=7.5)", "myst-parser", "nbsphinx (>=0.2.12)", "playwright", "pydata-sphinx-theme", "pyqtwebengine (>=5.15)", "pytest (>=7)", "sphinx (>=5.0.2)", "sphinxcontrib-spelling", "tornado (>=6.1)"]
|
||||
docs = ["intersphinx-registry", "ipykernel", "ipython", "myst-parser", "nbsphinx (>=0.2.12)", "pydata-sphinx-theme", "sphinx (>=5.0.2)", "sphinxcontrib-spelling"]
|
||||
qtpdf = ["pyqtwebengine (>=5.15)"]
|
||||
qtpng = ["pyqtwebengine (>=5.15)"]
|
||||
serve = ["tornado (>=6.1)"]
|
||||
@@ -6056,14 +6067,14 @@ numpy = {version = ">=2,<2.3.0", markers = "python_version >= \"3.9\""}
|
||||
|
||||
[[package]]
|
||||
name = "openhands-aci"
|
||||
version = "0.3.2"
|
||||
version = "0.3.3"
|
||||
description = "An Agent-Computer Interface (ACI) designed for software development agents OpenHands."
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.12"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "openhands_aci-0.3.2-py3-none-any.whl", hash = "sha256:a3ff6fe3dd50124598b8bc3aff8d9742d6e75f933f7e7635a9d0b37d45eb826e"},
|
||||
{file = "openhands_aci-0.3.2.tar.gz", hash = "sha256:df7b64df6acb70b45b23e88c13508e7af8f27725bed30c3e88691a0f3d1f7a44"},
|
||||
{file = "openhands_aci-0.3.3-py3-none-any.whl", hash = "sha256:35795a4d6f5939290f74b26190d5b4cd7477b06ffb7c7f0b505166739461d651"},
|
||||
{file = "openhands_aci-0.3.3.tar.gz", hash = "sha256:567fc65bb881e3ea56c987f4251c8f703d3c88fae99402b46ea7dcc48d85adb2"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -6086,7 +6097,6 @@ puremagic = ">=1.28"
|
||||
pydantic = ">=2.11.3,<3.0.0"
|
||||
pydub = ">=0.25.1,<0.26.0"
|
||||
pypdf = ">=5.1.0"
|
||||
pypdf2 = ">=3.0.1,<4.0.0"
|
||||
python-pptx = ">=1.0.2,<2.0.0"
|
||||
rapidfuzz = ">=3.13.0,<4.0.0"
|
||||
requests = ">=2.32.3"
|
||||
@@ -6135,10 +6145,11 @@ files = []
|
||||
develop = true
|
||||
|
||||
[package.dependencies]
|
||||
aiohttp = ">=3.9,<3.11.13 || >3.11.13"
|
||||
aiohttp = ">=3.13.3"
|
||||
anthropic = {version = "*", extras = ["vertex"]}
|
||||
anyio = "4.9"
|
||||
asyncpg = ">=0.30"
|
||||
authlib = ">=1.6.7"
|
||||
bashlex = ">=0.18"
|
||||
boto3 = "*"
|
||||
browsergym-core = "0.13.3"
|
||||
@@ -6160,6 +6171,7 @@ jinja2 = ">=3.1.6"
|
||||
joblib = "*"
|
||||
json-repair = "*"
|
||||
jupyter-kernel-gateway = "*"
|
||||
jwcrypto = ">=1.5.6"
|
||||
kubernetes = ">=33.1"
|
||||
libtmux = ">=0.46.2"
|
||||
litellm = ">=1.74.3"
|
||||
@@ -6167,7 +6179,7 @@ lmnr = ">=0.7.20"
|
||||
memory-profiler = ">=0.61"
|
||||
numpy = "*"
|
||||
openai = "2.8"
|
||||
openhands-aci = "0.3.2"
|
||||
openhands-aci = "0.3.3"
|
||||
openhands-agent-server = "1.11.5"
|
||||
openhands-sdk = "1.11.5"
|
||||
openhands-tools = "1.11.5"
|
||||
@@ -6176,23 +6188,22 @@ opentelemetry-exporter-otlp-proto-grpc = ">=1.33.1"
|
||||
pathspec = ">=0.12.1"
|
||||
pexpect = "*"
|
||||
pg8000 = ">=1.31.5"
|
||||
pillow = ">=11.3"
|
||||
pillow = ">=12.1.1"
|
||||
playwright = ">=1.55"
|
||||
poetry = ">=2.1.2"
|
||||
prompt-toolkit = ">=3.0.50"
|
||||
protobuf = ">=5,<6"
|
||||
protobuf = ">=5.29.6,<6"
|
||||
psutil = "*"
|
||||
pybase62 = ">=1"
|
||||
pygithub = ">=2.5"
|
||||
pyjwt = ">=2.9"
|
||||
pylatexenc = "*"
|
||||
pypdf = ">=6"
|
||||
pypdf = ">=6.7.2"
|
||||
python-docx = "*"
|
||||
python-dotenv = "*"
|
||||
python-frontmatter = ">=1.1"
|
||||
python-jose = {version = ">=3.3", extras = ["cryptography"]}
|
||||
python-json-logger = ">=3.2.1"
|
||||
python-multipart = "*"
|
||||
python-multipart = ">=0.0.22"
|
||||
python-pptx = "*"
|
||||
python-socketio = "5.14"
|
||||
pythonnet = "*"
|
||||
@@ -6205,7 +6216,7 @@ setuptools = ">=78.1.1"
|
||||
shellingham = ">=1.5.4"
|
||||
sqlalchemy = {version = ">=2.0.40", extras = ["asyncio"]}
|
||||
sse-starlette = ">=3.0.2"
|
||||
starlette = ">=0.48"
|
||||
starlette = ">=0.49.1"
|
||||
tenacity = ">=8.5,<10"
|
||||
termcolor = "*"
|
||||
toml = "*"
|
||||
@@ -11578,43 +11589,24 @@ diagrams = ["jinja2", "railroad-diagrams"]
|
||||
|
||||
[[package]]
|
||||
name = "pypdf"
|
||||
version = "6.6.0"
|
||||
version = "6.7.5"
|
||||
description = "A pure-python PDF library capable of splitting, merging, cropping, and transforming PDF files"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "pypdf-6.6.0-py3-none-any.whl", hash = "sha256:bca9091ef6de36c7b1a81e09327c554b7ce51e88dad68f5890c2b4a4417f1fd7"},
|
||||
{file = "pypdf-6.6.0.tar.gz", hash = "sha256:4c887ef2ea38d86faded61141995a3c7d068c9d6ae8477be7ae5de8a8e16592f"},
|
||||
{file = "pypdf-6.7.5-py3-none-any.whl", hash = "sha256:07ba7f1d6e6d9aa2a17f5452e320a84718d4ce863367f7ede2fd72280349ab13"},
|
||||
{file = "pypdf-6.7.5.tar.gz", hash = "sha256:40bb2e2e872078655f12b9b89e2f900888bb505e88a82150b64f9f34fa25651d"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
crypto = ["cryptography"]
|
||||
cryptodome = ["PyCryptodome"]
|
||||
dev = ["black", "flit", "pip-tools", "pre-commit", "pytest-cov", "pytest-socket", "pytest-timeout", "pytest-xdist", "wheel"]
|
||||
dev = ["flit", "pip-tools", "pre-commit", "pytest-cov", "pytest-socket", "pytest-timeout", "pytest-xdist", "wheel"]
|
||||
docs = ["myst_parser", "sphinx", "sphinx_rtd_theme"]
|
||||
full = ["Pillow (>=8.0.0)", "cryptography"]
|
||||
image = ["Pillow (>=8.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "pypdf2"
|
||||
version = "3.0.1"
|
||||
description = "A pure-python PDF library capable of splitting, merging, cropping, and transforming PDF files"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "PyPDF2-3.0.1.tar.gz", hash = "sha256:a74408f69ba6271f71b9352ef4ed03dc53a31aa404d29b5d31f53bfecfee1440"},
|
||||
{file = "pypdf2-3.0.1-py3-none-any.whl", hash = "sha256:d16e4205cfee272fbdc0568b68d82be796540b1537508cef59388f839c191928"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
crypto = ["PyCryptodome"]
|
||||
dev = ["black", "flit", "pip-tools", "pre-commit (<2.18.0)", "pytest-cov", "wheel"]
|
||||
docs = ["myst_parser", "sphinx", "sphinx_rtd_theme"]
|
||||
full = ["Pillow", "PyCryptodome"]
|
||||
image = ["Pillow"]
|
||||
|
||||
[[package]]
|
||||
name = "pyperclip"
|
||||
version = "1.11.0"
|
||||
@@ -11824,30 +11816,6 @@ PyYAML = "*"
|
||||
docs = ["sphinx"]
|
||||
test = ["mypy", "pyaml", "pytest", "toml", "types-PyYAML", "types-toml"]
|
||||
|
||||
[[package]]
|
||||
name = "python-jose"
|
||||
version = "3.5.0"
|
||||
description = "JOSE implementation in Python"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "python_jose-3.5.0-py2.py3-none-any.whl", hash = "sha256:abd1202f23d34dfad2c3d28cb8617b90acf34132c7afd60abd0b0b7d3cb55771"},
|
||||
{file = "python_jose-3.5.0.tar.gz", hash = "sha256:fb4eaa44dbeb1c26dcc69e4bd7ec54a1cb8dd64d3b4d81ef08d90ff453f2b01b"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"cryptography\""}
|
||||
ecdsa = "!=0.15"
|
||||
pyasn1 = ">=0.5.0"
|
||||
rsa = ">=4.0,<4.1.1 || >4.1.1,<4.4 || >4.4,<5.0"
|
||||
|
||||
[package.extras]
|
||||
cryptography = ["cryptography (>=3.4.0)"]
|
||||
pycrypto = ["pycrypto (>=2.6.0,<2.7.0)"]
|
||||
pycryptodome = ["pycryptodome (>=3.3.1,<4.0.0)"]
|
||||
test = ["pytest", "pytest-cov"]
|
||||
|
||||
[[package]]
|
||||
name = "python-json-logger"
|
||||
version = "3.3.0"
|
||||
@@ -14917,4 +14885,4 @@ cffi = ["cffi (>=1.17,<2.0) ; platform_python_implementation != \"PyPy\" and pyt
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = "^3.12,<3.14"
|
||||
content-hash = "1cad6029269393af67155e930c72eae2c03da02e4b3a3699823f6168c14a4218"
|
||||
content-hash = "ef037f6d6085d26166d35c56ce266439f8f1a4fea90bc43ccf15cfeaf116cae5"
|
||||
|
||||
@@ -17,7 +17,6 @@ packages = [
|
||||
{ include = "storage" },
|
||||
{ include = "sync" },
|
||||
{ include = "integrations" },
|
||||
{ include = "experiments" },
|
||||
]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
@@ -49,7 +48,7 @@ prometheus-client = "^0.24.0"
|
||||
pandas = "^2.2.0"
|
||||
numpy = "^2.2.0"
|
||||
mcp = "^1.10.0"
|
||||
pillow = "^12.1.0"
|
||||
pillow = "^12.1.1"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
ruff = "0.8.3"
|
||||
|
||||
@@ -14,6 +14,7 @@ from fastapi.middleware.cors import CORSMiddleware # noqa: E402
|
||||
from fastapi.responses import JSONResponse # noqa: E402
|
||||
from server.auth.auth_error import ExpiredError, NoCredentialsError # noqa: E402
|
||||
from server.auth.constants import ( # noqa: E402
|
||||
BITBUCKET_DATA_CENTER_HOST,
|
||||
ENABLE_JIRA,
|
||||
ENABLE_JIRA_DC,
|
||||
ENABLE_LINEAR,
|
||||
@@ -27,7 +28,6 @@ from server.rate_limit import setup_rate_limit_handler # noqa: E402
|
||||
from server.routes.api_keys import api_router as api_keys_router # noqa: E402
|
||||
from server.routes.auth import api_router, oauth_router # noqa: E402
|
||||
from server.routes.billing import billing_router # noqa: E402
|
||||
from server.routes.debugging import add_debugging_routes # noqa: E402
|
||||
from server.routes.email import api_router as email_router # noqa: E402
|
||||
from server.routes.event_webhook import event_webhook_router # noqa: E402
|
||||
from server.routes.feedback import router as feedback_router # noqa: E402
|
||||
@@ -47,15 +47,19 @@ from server.routes.org_invitations import ( # noqa: E402
|
||||
from server.routes.orgs import org_router # noqa: E402
|
||||
from server.routes.readiness import readiness_router # noqa: E402
|
||||
from server.routes.user import saas_user_router # noqa: E402
|
||||
from server.routes.verified_models import ( # noqa: E402
|
||||
api_router as verified_models_router,
|
||||
)
|
||||
from server.routes.user_app_settings import user_app_settings_router # noqa: E402
|
||||
from server.sharing.shared_conversation_router import ( # noqa: E402
|
||||
router as shared_conversation_router,
|
||||
)
|
||||
from server.sharing.shared_event_router import ( # noqa: E402
|
||||
router as shared_event_router,
|
||||
)
|
||||
from server.verified_models.verified_model_router import ( # noqa: E402
|
||||
api_router as verified_models_router,
|
||||
)
|
||||
from server.verified_models.verified_model_router import ( # noqa: E402
|
||||
override_llm_models_dependency,
|
||||
)
|
||||
|
||||
from openhands.server.app import app as base_app # noqa: E402
|
||||
from openhands.server.listen_socket import sio # noqa: E402
|
||||
@@ -79,6 +83,7 @@ base_app.include_router(api_router) # Add additional route for github auth
|
||||
base_app.include_router(oauth_router) # Add additional route for oauth callback
|
||||
base_app.include_router(oauth_device_router) # Add OAuth 2.0 Device Flow routes
|
||||
base_app.include_router(saas_user_router) # Add additional route SAAS user calls
|
||||
base_app.include_router(user_app_settings_router) # Add routes for user app settings
|
||||
base_app.include_router(
|
||||
billing_router
|
||||
) # Add routes for credit management and Stripe payment integration
|
||||
@@ -111,12 +116,14 @@ base_app.include_router(org_router) # Add routes for organization management
|
||||
base_app.include_router(
|
||||
verified_models_router
|
||||
) # Add routes for verified models management
|
||||
|
||||
# Override the default LLM models implementation with SaaS version
|
||||
# This must happen after all routers are included
|
||||
override_llm_models_dependency(base_app)
|
||||
|
||||
base_app.include_router(invitation_router) # Add routes for org invitation management
|
||||
base_app.include_router(invitation_accept_router) # Add route for accepting invitations
|
||||
add_github_proxy_routes(base_app)
|
||||
add_debugging_routes(
|
||||
base_app
|
||||
) # Add diagnostic routes for testing and debugging (disabled in production)
|
||||
base_app.include_router(slack_router)
|
||||
if ENABLE_JIRA:
|
||||
base_app.include_router(jira_integration_router)
|
||||
@@ -124,6 +131,12 @@ if ENABLE_JIRA_DC:
|
||||
base_app.include_router(jira_dc_integration_router)
|
||||
if ENABLE_LINEAR:
|
||||
base_app.include_router(linear_integration_router)
|
||||
if BITBUCKET_DATA_CENTER_HOST:
|
||||
from server.routes.bitbucket_dc_proxy import (
|
||||
router as bitbucket_dc_proxy_router, # noqa: E402
|
||||
)
|
||||
|
||||
base_app.include_router(bitbucket_dc_proxy_router)
|
||||
base_app.include_router(email_router) # Add routes for email management
|
||||
base_app.include_router(feedback_router) # Add routes for conversation feedback
|
||||
base_app.include_router(
|
||||
|
||||
@@ -1,79 +0,0 @@
|
||||
import os
|
||||
|
||||
from server.auth.sheets_client import GoogleSheetsClient
|
||||
|
||||
from openhands.core.logger import openhands_logger as logger
|
||||
|
||||
|
||||
class UserVerifier:
|
||||
def __init__(self) -> None:
|
||||
logger.debug('Initializing UserVerifier')
|
||||
self.file_users: list[str] | None = None
|
||||
self.sheets_client: GoogleSheetsClient | None = None
|
||||
self.spreadsheet_id: str | None = None
|
||||
|
||||
# Initialize from environment variables
|
||||
self._init_file_users()
|
||||
self._init_sheets_client()
|
||||
|
||||
def _init_file_users(self) -> None:
|
||||
"""Load users from text file if configured."""
|
||||
waitlist = os.getenv('GITHUB_USER_LIST_FILE')
|
||||
if not waitlist:
|
||||
logger.debug('GITHUB_USER_LIST_FILE not configured')
|
||||
return
|
||||
|
||||
if not os.path.exists(waitlist):
|
||||
logger.error(f'User list file not found: {waitlist}')
|
||||
raise FileNotFoundError(f'User list file not found: {waitlist}')
|
||||
|
||||
try:
|
||||
with open(waitlist, 'r') as f:
|
||||
self.file_users = [line.strip().lower() for line in f if line.strip()]
|
||||
logger.info(
|
||||
f'Successfully loaded {len(self.file_users)} users from {waitlist}'
|
||||
)
|
||||
except Exception:
|
||||
logger.exception(f'Error reading user list file {waitlist}')
|
||||
|
||||
def _init_sheets_client(self) -> None:
|
||||
"""Initialize Google Sheets client if configured."""
|
||||
sheet_id = os.getenv('GITHUB_USERS_SHEET_ID')
|
||||
|
||||
if not sheet_id:
|
||||
logger.debug('GITHUB_USERS_SHEET_ID not configured')
|
||||
return
|
||||
|
||||
logger.debug('Initializing Google Sheets integration')
|
||||
self.sheets_client = GoogleSheetsClient()
|
||||
self.spreadsheet_id = sheet_id
|
||||
|
||||
def is_active(self) -> bool:
|
||||
if os.getenv('DISABLE_WAITLIST', '').lower() == 'true':
|
||||
logger.info('Waitlist disabled via DISABLE_WAITLIST env var')
|
||||
return False
|
||||
return bool(self.file_users or (self.sheets_client and self.spreadsheet_id))
|
||||
|
||||
def is_user_allowed(self, username: str) -> bool:
|
||||
"""Check if user is allowed based on file and/or sheet configuration."""
|
||||
logger.debug(f'Checking if GitHub user {username} is allowed')
|
||||
if self.file_users:
|
||||
if username.lower() in self.file_users:
|
||||
logger.debug(f'User {username} found in text file allowlist')
|
||||
return True
|
||||
logger.debug(f'User {username} not found in text file allowlist')
|
||||
|
||||
if self.sheets_client and self.spreadsheet_id:
|
||||
sheet_users = [
|
||||
u.lower() for u in self.sheets_client.get_usernames(self.spreadsheet_id)
|
||||
]
|
||||
if username.lower() in sheet_users:
|
||||
logger.debug(f'User {username} found in Google Sheets allowlist')
|
||||
return True
|
||||
logger.debug(f'User {username} not found in Google Sheets allowlist')
|
||||
|
||||
logger.debug(f'User {username} not found in any allowlist')
|
||||
return False
|
||||
|
||||
|
||||
user_verifier = UserVerifier()
|
||||
@@ -157,9 +157,9 @@ ROLE_PERMISSIONS: dict[RoleName, frozenset[Permission]] = {
|
||||
}
|
||||
|
||||
|
||||
def get_user_org_role(user_id: str, org_id: UUID | None) -> Role | None:
|
||||
async def get_user_org_role(user_id: str, org_id: UUID | None) -> Role | None:
|
||||
"""
|
||||
Get the user's role in an organization (synchronous version).
|
||||
Get the user's role in an organization.
|
||||
|
||||
Args:
|
||||
user_id: User ID (string that will be converted to UUID)
|
||||
@@ -171,40 +171,15 @@ def get_user_org_role(user_id: str, org_id: UUID | None) -> Role | None:
|
||||
from uuid import UUID as parse_uuid
|
||||
|
||||
if org_id is None:
|
||||
org_member = OrgMemberStore.get_org_member_for_current_org(parse_uuid(user_id))
|
||||
else:
|
||||
org_member = OrgMemberStore.get_org_member(org_id, parse_uuid(user_id))
|
||||
if not org_member:
|
||||
return None
|
||||
|
||||
return RoleStore.get_role_by_id(org_member.role_id)
|
||||
|
||||
|
||||
async def get_user_org_role_async(user_id: str, org_id: UUID | None) -> Role | None:
|
||||
"""
|
||||
Get the user's role in an organization (async version).
|
||||
|
||||
Args:
|
||||
user_id: User ID (string that will be converted to UUID)
|
||||
org_id: Organization ID, or None to use the user's current organization
|
||||
|
||||
Returns:
|
||||
Role object if user is a member, None otherwise
|
||||
"""
|
||||
from uuid import UUID as parse_uuid
|
||||
|
||||
if org_id is None:
|
||||
org_member = await OrgMemberStore.get_org_member_for_current_org_async(
|
||||
org_member = await OrgMemberStore.get_org_member_for_current_org(
|
||||
parse_uuid(user_id)
|
||||
)
|
||||
else:
|
||||
org_member = await OrgMemberStore.get_org_member_async(
|
||||
org_id, parse_uuid(user_id)
|
||||
)
|
||||
org_member = await OrgMemberStore.get_org_member(org_id, parse_uuid(user_id))
|
||||
if not org_member:
|
||||
return None
|
||||
|
||||
return await RoleStore.get_role_by_id_async(org_member.role_id)
|
||||
return await RoleStore.get_role_by_id(org_member.role_id)
|
||||
|
||||
|
||||
def get_role_permissions(role_name: str) -> frozenset[Permission]:
|
||||
@@ -274,7 +249,7 @@ def require_permission(permission: Permission):
|
||||
detail='User not authenticated',
|
||||
)
|
||||
|
||||
user_role = await get_user_org_role_async(user_id, org_id)
|
||||
user_role = await get_user_org_role(user_id, org_id)
|
||||
|
||||
if not user_role:
|
||||
logger.warning(
|
||||
|
||||
@@ -40,6 +40,16 @@ ROLE_CHECK_ENABLED = os.getenv('ROLE_CHECK_ENABLED', 'false').lower() in (
|
||||
)
|
||||
|
||||
DUPLICATE_EMAIL_CHECK = os.getenv('DUPLICATE_EMAIL_CHECK', 'true') in ('1', 'true')
|
||||
BITBUCKET_DATA_CENTER_CLIENT_ID = os.getenv(
|
||||
'BITBUCKET_DATA_CENTER_CLIENT_ID', ''
|
||||
).strip()
|
||||
BITBUCKET_DATA_CENTER_CLIENT_SECRET = os.getenv(
|
||||
'BITBUCKET_DATA_CENTER_CLIENT_SECRET', ''
|
||||
).strip()
|
||||
BITBUCKET_DATA_CENTER_HOST = os.getenv('BITBUCKET_DATA_CENTER_HOST', '').strip()
|
||||
BITBUCKET_DATA_CENTER_TOKEN_URL = (
|
||||
f'https://{BITBUCKET_DATA_CENTER_HOST}/rest/oauth2/latest/token'
|
||||
)
|
||||
|
||||
# reCAPTCHA Enterprise
|
||||
RECAPTCHA_PROJECT_ID = os.getenv('RECAPTCHA_PROJECT_ID', '').strip()
|
||||
|
||||
@@ -1,67 +0,0 @@
|
||||
from storage.blocked_email_domain_store import BlockedEmailDomainStore
|
||||
from storage.database import session_maker
|
||||
|
||||
from openhands.core.logger import openhands_logger as logger
|
||||
|
||||
|
||||
class DomainBlocker:
|
||||
def __init__(self, store: BlockedEmailDomainStore) -> None:
|
||||
logger.debug('Initializing DomainBlocker')
|
||||
self.store = store
|
||||
|
||||
def _extract_domain(self, email: str) -> str | None:
|
||||
"""Extract and normalize email domain from email address"""
|
||||
if not email:
|
||||
return None
|
||||
try:
|
||||
# Extract domain part after @
|
||||
if '@' not in email:
|
||||
return None
|
||||
domain = email.split('@')[1].strip().lower()
|
||||
return domain if domain else None
|
||||
except Exception:
|
||||
logger.debug(f'Error extracting domain from email: {email}', exc_info=True)
|
||||
return None
|
||||
|
||||
def is_domain_blocked(self, email: str) -> bool:
|
||||
"""Check if email domain is blocked by querying the database directly via SQL.
|
||||
|
||||
Supports blocking:
|
||||
- Exact domains: 'example.com' blocks 'user@example.com'
|
||||
- Subdomains: 'example.com' blocks 'user@subdomain.example.com'
|
||||
- TLDs: '.us' blocks 'user@company.us' and 'user@subdomain.company.us'
|
||||
|
||||
The blocking logic is handled efficiently in SQL, avoiding the need to load
|
||||
all blocked domains into memory.
|
||||
"""
|
||||
if not email:
|
||||
logger.debug('No email provided for domain check')
|
||||
return False
|
||||
|
||||
domain = self._extract_domain(email)
|
||||
if not domain:
|
||||
logger.debug(f'Could not extract domain from email: {email}')
|
||||
return False
|
||||
|
||||
try:
|
||||
# Query database directly via SQL to check if domain is blocked
|
||||
is_blocked = self.store.is_domain_blocked(domain)
|
||||
|
||||
if is_blocked:
|
||||
logger.warning(f'Email domain {domain} is blocked for email: {email}')
|
||||
else:
|
||||
logger.debug(f'Email domain {domain} is not blocked')
|
||||
|
||||
return is_blocked
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f'Error checking if domain is blocked for email {email}: {e}',
|
||||
exc_info=True,
|
||||
)
|
||||
# Fail-safe: if database query fails, don't block (allow auth to proceed)
|
||||
return False
|
||||
|
||||
|
||||
# Initialize store and domain blocker
|
||||
_store = BlockedEmailDomainStore(session_maker=session_maker)
|
||||
domain_blocker = DomainBlocker(store=_store)
|
||||
@@ -1,87 +1,11 @@
|
||||
import os
|
||||
|
||||
from integrations.github.github_service import SaaSGitHubService
|
||||
from pydantic import SecretStr
|
||||
from server.auth.sheets_client import GoogleSheetsClient
|
||||
from server.auth.auth_utils import user_verifier
|
||||
|
||||
from openhands.core.logger import openhands_logger as logger
|
||||
from openhands.integrations.github.github_types import GitHubUser
|
||||
|
||||
|
||||
class UserVerifier:
|
||||
def __init__(self) -> None:
|
||||
logger.debug('Initializing UserVerifier')
|
||||
self.file_users: list[str] | None = None
|
||||
self.sheets_client: GoogleSheetsClient | None = None
|
||||
self.spreadsheet_id: str | None = None
|
||||
|
||||
# Initialize from environment variables
|
||||
self._init_file_users()
|
||||
self._init_sheets_client()
|
||||
|
||||
def _init_file_users(self) -> None:
|
||||
"""Load users from text file if configured"""
|
||||
waitlist = os.getenv('GITHUB_USER_LIST_FILE')
|
||||
if not waitlist:
|
||||
logger.debug('GITHUB_USER_LIST_FILE not configured')
|
||||
return
|
||||
|
||||
if not os.path.exists(waitlist):
|
||||
logger.error(f'User list file not found: {waitlist}')
|
||||
raise FileNotFoundError(f'User list file not found: {waitlist}')
|
||||
|
||||
try:
|
||||
with open(waitlist, 'r') as f:
|
||||
self.file_users = [line.strip().lower() for line in f if line.strip()]
|
||||
logger.info(
|
||||
f'Successfully loaded {len(self.file_users)} users from {waitlist}'
|
||||
)
|
||||
except Exception:
|
||||
logger.error(f'Error reading user list file {waitlist}', exc_info=True)
|
||||
|
||||
def _init_sheets_client(self) -> None:
|
||||
"""Initialize Google Sheets client if configured"""
|
||||
sheet_id = os.getenv('GITHUB_USERS_SHEET_ID')
|
||||
|
||||
if not sheet_id:
|
||||
logger.debug('GITHUB_USERS_SHEET_ID not configured')
|
||||
return
|
||||
|
||||
logger.debug('Initializing Google Sheets integration')
|
||||
self.sheets_client = GoogleSheetsClient()
|
||||
self.spreadsheet_id = sheet_id
|
||||
|
||||
def is_active(self) -> bool:
|
||||
if os.getenv('DISABLE_WAITLIST', '').lower() == 'true':
|
||||
logger.info('Waitlist disabled via DISABLE_WAITLIST env var')
|
||||
return False
|
||||
return bool(self.file_users or (self.sheets_client and self.spreadsheet_id))
|
||||
|
||||
def is_user_allowed(self, username: str) -> bool:
|
||||
"""Check if user is allowed based on file and/or sheet configuration"""
|
||||
logger.debug(f'Checking if GitHub user {username} is allowed')
|
||||
if self.file_users:
|
||||
if username.lower() in self.file_users:
|
||||
logger.debug(f'User {username} found in text file allowlist')
|
||||
return True
|
||||
logger.debug(f'User {username} not found in text file allowlist')
|
||||
|
||||
if self.sheets_client and self.spreadsheet_id:
|
||||
sheet_users = [
|
||||
u.lower() for u in self.sheets_client.get_usernames(self.spreadsheet_id)
|
||||
]
|
||||
if username.lower() in sheet_users:
|
||||
logger.debug(f'User {username} found in Google Sheets allowlist')
|
||||
return True
|
||||
logger.debug(f'User {username} not found in Google Sheets allowlist')
|
||||
|
||||
logger.debug(f'User {username} not found in any allowlist')
|
||||
return False
|
||||
|
||||
|
||||
user_verifier = UserVerifier()
|
||||
|
||||
|
||||
def is_user_allowed(user_login: str):
|
||||
if user_verifier.is_active() and not user_verifier.is_user_allowed(user_login):
|
||||
logger.warning(f'GitHub user {user_login} not in allow list')
|
||||
|
||||
@@ -13,16 +13,19 @@ from server.auth.auth_error import (
|
||||
ExpiredError,
|
||||
NoCredentialsError,
|
||||
)
|
||||
from server.auth.domain_blocker import domain_blocker
|
||||
from server.auth.constants import BITBUCKET_DATA_CENTER_HOST
|
||||
from server.auth.token_manager import TokenManager
|
||||
from server.config import get_config
|
||||
from server.logger import logger
|
||||
from server.rate_limit import RateLimiter, create_redis_rate_limiter
|
||||
from sqlalchemy import delete, select
|
||||
from storage.api_key_store import ApiKeyStore
|
||||
from storage.auth_tokens import AuthTokens
|
||||
from storage.database import session_maker
|
||||
from storage.database import a_session_maker
|
||||
from storage.saas_secrets_store import SaasSecretsStore
|
||||
from storage.saas_settings_store import SaasSettingsStore
|
||||
from storage.user_authorization import UserAuthorizationType
|
||||
from storage.user_authorization_store import UserAuthorizationStore
|
||||
from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_fixed
|
||||
|
||||
from openhands.integrations.provider import (
|
||||
@@ -118,13 +121,12 @@ class SaasUserAuth(UserAuth):
|
||||
self._settings = settings
|
||||
return settings
|
||||
|
||||
async def get_secrets_store(self):
|
||||
async def get_secrets_store(self) -> SaasSecretsStore:
|
||||
logger.debug('saas_user_auth_get_secrets_store')
|
||||
secrets_store = self.secrets_store
|
||||
if secrets_store:
|
||||
return secrets_store
|
||||
user_id = await self.get_user_id()
|
||||
secrets_store = SaasSecretsStore(user_id, session_maker, get_config())
|
||||
secrets_store = SaasSecretsStore(self.user_id, get_config())
|
||||
self.secrets_store = secrets_store
|
||||
return secrets_store
|
||||
|
||||
@@ -161,12 +163,13 @@ class SaasUserAuth(UserAuth):
|
||||
|
||||
try:
|
||||
# TODO: I think we can do this in a single request if we refactor
|
||||
with session_maker() as session:
|
||||
tokens = (
|
||||
session.query(AuthTokens)
|
||||
.where(AuthTokens.keycloak_user_id == self.user_id)
|
||||
.all()
|
||||
async with a_session_maker() as session:
|
||||
result = await session.execute(
|
||||
select(AuthTokens).where(
|
||||
AuthTokens.keycloak_user_id == self.user_id
|
||||
)
|
||||
)
|
||||
tokens = result.scalars().all()
|
||||
|
||||
for token in tokens:
|
||||
idp_type = ProviderType(token.identity_provider)
|
||||
@@ -175,6 +178,9 @@ class SaasUserAuth(UserAuth):
|
||||
if user_secrets and idp_type in user_secrets.provider_tokens:
|
||||
host = user_secrets.provider_tokens[idp_type].host
|
||||
|
||||
if idp_type == ProviderType.BITBUCKET_DATA_CENTER and not host:
|
||||
host = BITBUCKET_DATA_CENTER_HOST or None
|
||||
|
||||
provider_token = await token_manager.get_idp_token(
|
||||
access_token.get_secret_value(),
|
||||
idp=idp_type,
|
||||
@@ -192,11 +198,11 @@ class SaasUserAuth(UserAuth):
|
||||
'idp_type': token.identity_provider,
|
||||
},
|
||||
)
|
||||
with session_maker() as session:
|
||||
session.query(AuthTokens).filter(
|
||||
AuthTokens.id == token.id
|
||||
).delete()
|
||||
session.commit()
|
||||
async with a_session_maker() as session:
|
||||
await session.execute(
|
||||
delete(AuthTokens).where(AuthTokens.id == token.id)
|
||||
)
|
||||
await session.commit()
|
||||
raise
|
||||
|
||||
self.provider_tokens = MappingProxyType(provider_tokens)
|
||||
@@ -209,8 +215,7 @@ class SaasUserAuth(UserAuth):
|
||||
settings_store = self.settings_store
|
||||
if settings_store:
|
||||
return settings_store
|
||||
user_id = await self.get_user_id()
|
||||
settings_store = SaasSettingsStore(user_id, session_maker, get_config())
|
||||
settings_store = SaasSettingsStore(self.user_id, get_config())
|
||||
self.settings_store = settings_store
|
||||
return settings_store
|
||||
|
||||
@@ -278,7 +283,7 @@ async def saas_user_auth_from_bearer(request: Request) -> SaasUserAuth | None:
|
||||
return None
|
||||
|
||||
api_key_store = ApiKeyStore.get_instance()
|
||||
user_id = api_key_store.validate_api_key(api_key)
|
||||
user_id = await api_key_store.validate_api_key(api_key)
|
||||
if not user_id:
|
||||
return None
|
||||
offline_token = await token_manager.load_offline_token(user_id)
|
||||
@@ -326,14 +331,16 @@ async def saas_user_auth_from_signed_token(signed_token: str) -> SaasUserAuth:
|
||||
email = access_token_payload['email']
|
||||
email_verified = access_token_payload['email_verified']
|
||||
|
||||
# Check if email domain is blocked
|
||||
if email and domain_blocker.is_domain_blocked(email):
|
||||
logger.warning(
|
||||
f'Blocked authentication attempt for existing user with email: {email}'
|
||||
)
|
||||
raise AuthError(
|
||||
'Access denied: Your email domain is not allowed to access this service'
|
||||
)
|
||||
# Check if email is blacklisted (whitelist takes precedence)
|
||||
if email:
|
||||
auth_type = await UserAuthorizationStore.get_authorization_type(email, None)
|
||||
if auth_type == UserAuthorizationType.BLACKLIST:
|
||||
logger.warning(
|
||||
f'Blocked authentication attempt for existing user with email: {email}'
|
||||
)
|
||||
raise AuthError(
|
||||
'Access denied: Your email domain is not allowed to access this service'
|
||||
)
|
||||
|
||||
logger.debug('saas_user_auth_from_signed_token:return')
|
||||
|
||||
|
||||
@@ -16,10 +16,15 @@ from keycloak.exceptions import (
|
||||
KeycloakError,
|
||||
KeycloakPostError,
|
||||
)
|
||||
from pydantic import BaseModel
|
||||
from server.auth.auth_error import ExpiredError
|
||||
from server.auth.constants import (
|
||||
BITBUCKET_APP_CLIENT_ID,
|
||||
BITBUCKET_APP_CLIENT_SECRET,
|
||||
BITBUCKET_DATA_CENTER_CLIENT_ID,
|
||||
BITBUCKET_DATA_CENTER_CLIENT_SECRET,
|
||||
BITBUCKET_DATA_CENTER_HOST,
|
||||
BITBUCKET_DATA_CENTER_TOKEN_URL,
|
||||
DUPLICATE_EMAIL_CHECK,
|
||||
GITHUB_APP_CLIENT_ID,
|
||||
GITHUB_APP_CLIENT_SECRET,
|
||||
@@ -38,9 +43,9 @@ from server.auth.keycloak_manager import get_keycloak_admin, get_keycloak_openid
|
||||
from server.config import get_config
|
||||
from server.logger import logger
|
||||
from sqlalchemy import String as SQLString
|
||||
from sqlalchemy import type_coerce
|
||||
from sqlalchemy import select, type_coerce
|
||||
from storage.auth_token_store import AuthTokenStore
|
||||
from storage.database import session_maker
|
||||
from storage.database import a_session_maker
|
||||
from storage.github_app_installation import GithubAppInstallation
|
||||
from storage.offline_token_store import OfflineTokenStore
|
||||
from tenacity import RetryCallState, retry, retry_if_exception_type, stop_after_attempt
|
||||
@@ -49,6 +54,30 @@ from openhands.integrations.service_types import ProviderType
|
||||
from openhands.server.types import SessionExpiredError
|
||||
from openhands.utils.http_session import httpx_verify_option
|
||||
|
||||
|
||||
class KeycloakUserInfo(BaseModel):
|
||||
"""Pydantic model for Keycloak UserInfo endpoint response.
|
||||
|
||||
Based on OIDC standard claims. 'sub' is always required per OIDC spec.
|
||||
Additional fields from Keycloak are captured via model_config extra='allow'.
|
||||
"""
|
||||
|
||||
model_config = {'extra': 'allow'}
|
||||
|
||||
sub: str
|
||||
name: str | None = None
|
||||
given_name: str | None = None
|
||||
family_name: str | None = None
|
||||
preferred_username: str | None = None
|
||||
email: str | None = None
|
||||
email_verified: bool | None = None
|
||||
picture: str | None = None
|
||||
attributes: dict[str, list[str]] | None = None
|
||||
identity_provider: str | None = None
|
||||
company: str | None = None
|
||||
roles: list[str] | None = None
|
||||
|
||||
|
||||
# HTTP timeout for external IDP calls (in seconds)
|
||||
# This prevents indefinite blocking if an IDP is slow or unresponsive
|
||||
IDP_HTTP_TIMEOUT = 15.0
|
||||
@@ -141,22 +170,22 @@ class TokenManager:
|
||||
new_keycloak_tokens['refresh_token'],
|
||||
)
|
||||
|
||||
# UserInfo from Keycloak return a dictionary with the following format:
|
||||
# {
|
||||
# 'sub': '248289761001',
|
||||
# 'name': 'Jane Doe',
|
||||
# 'given_name': 'Jane',
|
||||
# 'family_name': 'Doe',
|
||||
# 'preferred_username': 'j.doe',
|
||||
# 'email': 'janedoe@example.com',
|
||||
# 'picture': 'http://example.com/janedoe/me.jpg'
|
||||
# 'github_id': '354322532'
|
||||
# }
|
||||
async def get_user_info(self, access_token: str) -> dict:
|
||||
if not access_token:
|
||||
return {}
|
||||
async def get_user_info(self, access_token: str) -> KeycloakUserInfo:
|
||||
"""Get user info from Keycloak userinfo endpoint.
|
||||
|
||||
Args:
|
||||
access_token: A valid Keycloak access token
|
||||
|
||||
Returns:
|
||||
KeycloakUserInfo with user claims. 'sub' is always present per OIDC spec.
|
||||
|
||||
Raises:
|
||||
KeycloakAuthenticationError: If the token is invalid
|
||||
ValidationError: If the response is missing the required 'sub' field
|
||||
"""
|
||||
user_info = await get_keycloak_openid(self.external).a_userinfo(access_token)
|
||||
return user_info
|
||||
# Pydantic validation will raise ValidationError if 'sub' is missing
|
||||
return KeycloakUserInfo.model_validate(user_info)
|
||||
|
||||
@retry(
|
||||
stop=stop_after_attempt(2),
|
||||
@@ -270,8 +299,8 @@ class TokenManager:
|
||||
) -> str:
|
||||
# Get user info to determine user_id and idp
|
||||
user_info = await self.get_user_info(access_token=access_token)
|
||||
user_id = user_info.get('sub')
|
||||
username = user_info.get('preferred_username')
|
||||
user_id = user_info.sub
|
||||
username = user_info.preferred_username
|
||||
logger.info(f'Getting token for user {username} and IDP {idp}')
|
||||
token_store = await AuthTokenStore.get_instance(
|
||||
keycloak_user_id=user_id, idp=idp
|
||||
@@ -354,6 +383,8 @@ class TokenManager:
|
||||
return await self._refresh_gitlab_token(refresh_token)
|
||||
elif idp == ProviderType.BITBUCKET:
|
||||
return await self._refresh_bitbucket_token(refresh_token)
|
||||
elif idp == ProviderType.BITBUCKET_DATA_CENTER:
|
||||
return await self._refresh_bitbucket_data_center_token(refresh_token)
|
||||
else:
|
||||
raise ValueError(f'Unsupported IDP: {idp}')
|
||||
|
||||
@@ -435,6 +466,33 @@ class TokenManager:
|
||||
data = response.json()
|
||||
return await self._parse_refresh_response(data)
|
||||
|
||||
async def _refresh_bitbucket_data_center_token(
|
||||
self, refresh_token: str
|
||||
) -> dict[str, str | int]:
|
||||
if not BITBUCKET_DATA_CENTER_HOST:
|
||||
raise ValueError(
|
||||
'BITBUCKET_DATA_CENTER_HOST is not configured. '
|
||||
'Set the BITBUCKET_DATA_CENTER_HOST environment variable.'
|
||||
)
|
||||
url = BITBUCKET_DATA_CENTER_TOKEN_URL
|
||||
logger.info(f'Refreshing Bitbucket Data Center token with URL: {url}')
|
||||
|
||||
payload = {
|
||||
'client_id': BITBUCKET_DATA_CENTER_CLIENT_ID,
|
||||
'client_secret': BITBUCKET_DATA_CENTER_CLIENT_SECRET,
|
||||
'refresh_token': refresh_token,
|
||||
'grant_type': 'refresh_token',
|
||||
}
|
||||
async with httpx.AsyncClient(
|
||||
verify=httpx_verify_option(), timeout=IDP_HTTP_TIMEOUT
|
||||
) as client:
|
||||
response = await client.post(url, data=payload)
|
||||
response.raise_for_status()
|
||||
logger.info('Successfully refreshed Bitbucket Data Center token')
|
||||
|
||||
data = response.json()
|
||||
return await self._parse_refresh_response(data)
|
||||
|
||||
async def _parse_refresh_response(self, data: dict) -> dict[str, str | int]:
|
||||
access_token = data.get('access_token')
|
||||
refresh_token = data.get('refresh_token')
|
||||
@@ -783,25 +841,24 @@ class TokenManager:
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
def store_org_token(self, installation_id: int, installation_token: str):
|
||||
async def store_org_token(self, installation_id: int, installation_token: str):
|
||||
"""Store a GitHub App installation token.
|
||||
|
||||
Args:
|
||||
installation_id: GitHub installation ID (integer or string)
|
||||
installation_token: The token to store
|
||||
"""
|
||||
with session_maker() as session:
|
||||
async with a_session_maker() as session:
|
||||
# Ensure installation_id is a string
|
||||
str_installation_id = str(installation_id)
|
||||
# Use type_coerce to ensure SQLAlchemy treats the parameter as a string
|
||||
installation = (
|
||||
session.query(GithubAppInstallation)
|
||||
.filter(
|
||||
result = await session.execute(
|
||||
select(GithubAppInstallation).filter(
|
||||
GithubAppInstallation.installation_id
|
||||
== type_coerce(str_installation_id, SQLString)
|
||||
)
|
||||
.first()
|
||||
)
|
||||
installation = result.scalars().first()
|
||||
if installation:
|
||||
installation.encrypted_token = self.encrypt_text(installation_token)
|
||||
else:
|
||||
@@ -811,9 +868,9 @@ class TokenManager:
|
||||
encrypted_token=self.encrypt_text(installation_token),
|
||||
)
|
||||
)
|
||||
session.commit()
|
||||
await session.commit()
|
||||
|
||||
def load_org_token(self, installation_id: int) -> str | None:
|
||||
async def load_org_token(self, installation_id: int) -> str | None:
|
||||
"""Load a GitHub App installation token.
|
||||
|
||||
Args:
|
||||
@@ -822,17 +879,16 @@ class TokenManager:
|
||||
Returns:
|
||||
The decrypted token if found, None otherwise
|
||||
"""
|
||||
with session_maker() as session:
|
||||
async with a_session_maker() as session:
|
||||
# Ensure installation_id is a string and use type_coerce
|
||||
str_installation_id = str(installation_id)
|
||||
installation = (
|
||||
session.query(GithubAppInstallation)
|
||||
.filter(
|
||||
result = await session.execute(
|
||||
select(GithubAppInstallation).filter(
|
||||
GithubAppInstallation.installation_id
|
||||
== type_coerce(str_installation_id, SQLString)
|
||||
)
|
||||
.first()
|
||||
)
|
||||
installation = result.scalars().first()
|
||||
if not installation:
|
||||
return None
|
||||
token = self.decrypt_text(installation.encrypted_token)
|
||||
|
||||
98
enterprise/server/auth/user/default_user_authorizer.py
Normal file
98
enterprise/server/auth/user/default_user_authorizer.py
Normal file
@@ -0,0 +1,98 @@
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
from typing import AsyncGenerator
|
||||
|
||||
from fastapi import Request
|
||||
from pydantic import Field
|
||||
from server.auth.email_validation import extract_base_email
|
||||
from server.auth.token_manager import KeycloakUserInfo, TokenManager
|
||||
from server.auth.user.user_authorizer import (
|
||||
UserAuthorizationResponse,
|
||||
UserAuthorizer,
|
||||
UserAuthorizerInjector,
|
||||
)
|
||||
from storage.user_authorization import UserAuthorizationType
|
||||
from storage.user_authorization_store import UserAuthorizationStore
|
||||
|
||||
from openhands.app_server.services.injector import InjectorState
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
token_manager = TokenManager()
|
||||
|
||||
|
||||
@dataclass
|
||||
class DefaultUserAuthorizer(UserAuthorizer):
|
||||
"""Class determining whether a user may be authorized.
|
||||
|
||||
Uses the user_authorizations database table to check whitelist/blacklist rules.
|
||||
"""
|
||||
|
||||
prevent_duplicates: bool
|
||||
|
||||
async def authorize_user(
|
||||
self, user_info: KeycloakUserInfo
|
||||
) -> UserAuthorizationResponse:
|
||||
user_id = user_info.sub
|
||||
email = user_info.email
|
||||
provider_type = user_info.identity_provider
|
||||
try:
|
||||
if not email:
|
||||
logger.warning(f'No email provided for user_id: {user_id}')
|
||||
return UserAuthorizationResponse(
|
||||
success=False, error_detail='missing_email'
|
||||
)
|
||||
|
||||
if self.prevent_duplicates:
|
||||
has_duplicate = await token_manager.check_duplicate_base_email(
|
||||
email, user_id
|
||||
)
|
||||
if has_duplicate:
|
||||
logger.warning(
|
||||
f'Blocked signup attempt for email {email} - duplicate base email found',
|
||||
extra={'user_id': user_id, 'email': email},
|
||||
)
|
||||
return UserAuthorizationResponse(
|
||||
success=False, error_detail='duplicate_email'
|
||||
)
|
||||
|
||||
# Check authorization rules (whitelist takes precedence over blacklist)
|
||||
base_email = extract_base_email(email)
|
||||
if base_email is None:
|
||||
return UserAuthorizationResponse(
|
||||
success=False, error_detail='invalid_email'
|
||||
)
|
||||
auth_type = await UserAuthorizationStore.get_authorization_type(
|
||||
base_email, provider_type
|
||||
)
|
||||
|
||||
if auth_type == UserAuthorizationType.WHITELIST:
|
||||
logger.debug(
|
||||
f'User {email} matched whitelist rule',
|
||||
extra={'user_id': user_id, 'email': email},
|
||||
)
|
||||
return UserAuthorizationResponse(success=True)
|
||||
|
||||
if auth_type == UserAuthorizationType.BLACKLIST:
|
||||
logger.warning(
|
||||
f'Blocked authentication attempt for email: {email}, user_id: {user_id}'
|
||||
)
|
||||
return UserAuthorizationResponse(success=False, error_detail='blocked')
|
||||
|
||||
return UserAuthorizationResponse(success=True)
|
||||
except Exception:
|
||||
logger.exception('error authorizing user', extra={'user_id': user_id})
|
||||
return UserAuthorizationResponse(success=False)
|
||||
|
||||
|
||||
class DefaultUserAuthorizerInjector(UserAuthorizerInjector):
|
||||
prevent_duplicates: bool = Field(
|
||||
default=True,
|
||||
description='Whether duplicate emails (containing +) are filtered',
|
||||
)
|
||||
|
||||
async def inject(
|
||||
self, state: InjectorState, request: Request | None = None
|
||||
) -> AsyncGenerator[UserAuthorizer, None]:
|
||||
yield DefaultUserAuthorizer(
|
||||
prevent_duplicates=self.prevent_duplicates,
|
||||
)
|
||||
48
enterprise/server/auth/user/user_authorizer.py
Normal file
48
enterprise/server/auth/user/user_authorizer.py
Normal file
@@ -0,0 +1,48 @@
|
||||
import logging
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
from fastapi import Depends
|
||||
from pydantic import BaseModel
|
||||
from server.auth.token_manager import KeycloakUserInfo
|
||||
|
||||
from openhands.agent_server.env_parser import from_env
|
||||
from openhands.app_server.services.injector import Injector
|
||||
from openhands.sdk.utils.models import DiscriminatedUnionMixin
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class UserAuthorizationResponse(BaseModel):
|
||||
success: bool
|
||||
error_detail: str | None = None
|
||||
|
||||
|
||||
class UserAuthorizer(ABC):
|
||||
"""Class determining whether a user may be authorized."""
|
||||
|
||||
@abstractmethod
|
||||
async def authorize_user(
|
||||
self, user_info: KeycloakUserInfo
|
||||
) -> UserAuthorizationResponse:
|
||||
"""Determine whether the info given is permitted."""
|
||||
|
||||
|
||||
class UserAuthorizerInjector(DiscriminatedUnionMixin, Injector[UserAuthorizer], ABC):
|
||||
pass
|
||||
|
||||
|
||||
def depends_user_authorizer():
|
||||
from server.auth.user.default_user_authorizer import (
|
||||
DefaultUserAuthorizerInjector,
|
||||
)
|
||||
|
||||
try:
|
||||
injector: UserAuthorizerInjector = from_env(
|
||||
UserAuthorizerInjector, 'OH_USER_AUTHORIZER'
|
||||
)
|
||||
except Exception as ex:
|
||||
print(ex)
|
||||
logger.info('Using default UserAuthorizer')
|
||||
injector = DefaultUserAuthorizerInjector()
|
||||
|
||||
return Depends(injector.depends)
|
||||
@@ -7,7 +7,8 @@ from uuid import uuid4
|
||||
import socketio
|
||||
from server.logger import logger
|
||||
from server.utils.conversation_callback_utils import invoke_conversation_callbacks
|
||||
from storage.database import session_maker
|
||||
from sqlalchemy import select
|
||||
from storage.database import a_session_maker
|
||||
from storage.stored_conversation_metadata_saas import StoredConversationMetadataSaas
|
||||
|
||||
from openhands.core.config import LLMConfig
|
||||
@@ -523,15 +524,14 @@ class ClusteredConversationManager(StandaloneConversationManager):
|
||||
f'local_connection_to_stopped_conversation:{connection_id}:{conversation_id}'
|
||||
)
|
||||
# Look up the user_id from the database
|
||||
with session_maker() as session:
|
||||
conversation_metadata_saas = (
|
||||
session.query(StoredConversationMetadataSaas)
|
||||
.filter(
|
||||
async with a_session_maker() as session:
|
||||
result = await session.execute(
|
||||
select(StoredConversationMetadataSaas).where(
|
||||
StoredConversationMetadataSaas.conversation_id
|
||||
== conversation_id
|
||||
)
|
||||
.first()
|
||||
)
|
||||
conversation_metadata_saas = result.scalars().first()
|
||||
user_id = (
|
||||
str(conversation_metadata_saas.user_id)
|
||||
if conversation_metadata_saas
|
||||
@@ -749,6 +749,9 @@ class ClusteredConversationManager(StandaloneConversationManager):
|
||||
config = load_openhands_config()
|
||||
settings_store = await SaasSettingsStore.get_instance(config, user_id)
|
||||
settings = await settings_store.load()
|
||||
if not settings:
|
||||
logger.error(f'Failed to load settings for user {user_id}')
|
||||
return
|
||||
await self.maybe_start_agent_loop(conversation_id, settings, user_id)
|
||||
|
||||
async def _start_agent_loop(
|
||||
|
||||
@@ -9,6 +9,7 @@ import requests # type: ignore
|
||||
from fastapi import HTTPException
|
||||
from server.auth.constants import (
|
||||
BITBUCKET_APP_CLIENT_ID,
|
||||
BITBUCKET_DATA_CENTER_CLIENT_ID,
|
||||
ENABLE_ENTERPRISE_SSO,
|
||||
ENABLE_JIRA,
|
||||
ENABLE_JIRA_DC,
|
||||
@@ -164,6 +165,9 @@ class SaaSServerConfig(ServerConfig):
|
||||
if ENABLE_ENTERPRISE_SSO:
|
||||
providers_configured.append(ProviderType.ENTERPRISE_SSO)
|
||||
|
||||
if BITBUCKET_DATA_CENTER_CLIENT_ID:
|
||||
providers_configured.append(ProviderType.BITBUCKET_DATA_CENTER)
|
||||
|
||||
config: dict[str, typing.Any] = {
|
||||
'APP_MODE': self.app_mode,
|
||||
'APP_SLUG': self.app_slug,
|
||||
|
||||
@@ -3,7 +3,6 @@ from datetime import datetime
|
||||
|
||||
from integrations.github.github_manager import GithubManager
|
||||
from integrations.github.github_view import GithubViewType
|
||||
from integrations.models import Message, SourceType
|
||||
from integrations.utils import (
|
||||
extract_summary_from_conversation_manager,
|
||||
get_summary_instruction,
|
||||
@@ -35,16 +34,12 @@ class GithubCallbackProcessor(ConversationCallbackProcessor):
|
||||
send_summary_instruction: bool = True
|
||||
|
||||
async def _send_message_to_github(self, message: str) -> None:
|
||||
"""
|
||||
Send a message to GitHub.
|
||||
"""Send a message to GitHub.
|
||||
|
||||
Args:
|
||||
message: The message content to send to GitHub
|
||||
"""
|
||||
try:
|
||||
# Create a message object for GitHub
|
||||
message_obj = Message(source=SourceType.OPENHANDS, message=message)
|
||||
|
||||
# Get the token manager
|
||||
token_manager = TokenManager()
|
||||
|
||||
@@ -53,8 +48,8 @@ class GithubCallbackProcessor(ConversationCallbackProcessor):
|
||||
|
||||
github_manager = GithubManager(token_manager, GitHubDataCollector())
|
||||
|
||||
# Send the message
|
||||
await github_manager.send_message(message_obj, self.github_view)
|
||||
# Send the message directly as a string
|
||||
await github_manager.send_message(message, self.github_view)
|
||||
|
||||
logger.info(
|
||||
f'[GitHub] Sent summary message to {self.github_view.full_repo_name}#{self.github_view.issue_number}'
|
||||
|
||||
@@ -3,7 +3,6 @@ from datetime import datetime
|
||||
|
||||
from integrations.gitlab.gitlab_manager import GitlabManager
|
||||
from integrations.gitlab.gitlab_view import GitlabViewType
|
||||
from integrations.models import Message, SourceType
|
||||
from integrations.utils import (
|
||||
extract_summary_from_conversation_manager,
|
||||
get_summary_instruction,
|
||||
@@ -14,7 +13,7 @@ from storage.conversation_callback import (
|
||||
ConversationCallback,
|
||||
ConversationCallbackProcessor,
|
||||
)
|
||||
from storage.database import session_maker
|
||||
from storage.database import a_session_maker
|
||||
|
||||
from openhands.core.logger import openhands_logger as logger
|
||||
from openhands.core.schema.agent import AgentState
|
||||
@@ -28,8 +27,7 @@ gitlab_manager = GitlabManager(token_manager)
|
||||
|
||||
|
||||
class GitlabCallbackProcessor(ConversationCallbackProcessor):
|
||||
"""
|
||||
Processor for sending conversation summaries to GitLab.
|
||||
"""Processor for sending conversation summaries to GitLab.
|
||||
|
||||
This processor is used to send summaries of conversations to GitLab
|
||||
when agent state changes occur.
|
||||
@@ -39,22 +37,18 @@ class GitlabCallbackProcessor(ConversationCallbackProcessor):
|
||||
send_summary_instruction: bool = True
|
||||
|
||||
async def _send_message_to_gitlab(self, message: str) -> None:
|
||||
"""
|
||||
Send a message to GitLab.
|
||||
"""Send a message to GitLab.
|
||||
|
||||
Args:
|
||||
message: The message content to send to GitLab
|
||||
"""
|
||||
try:
|
||||
# Create a message object for GitHub
|
||||
message_obj = Message(source=SourceType.OPENHANDS, message=message)
|
||||
|
||||
# Get the token manager
|
||||
token_manager = TokenManager()
|
||||
gitlab_manager = GitlabManager(token_manager)
|
||||
|
||||
# Send the message
|
||||
await gitlab_manager.send_message(message_obj, self.gitlab_view)
|
||||
# Send the message directly as a string
|
||||
await gitlab_manager.send_message(message, self.gitlab_view)
|
||||
|
||||
logger.info(
|
||||
f'[GitLab] Sent summary message to {self.gitlab_view.full_repo_name}#{self.gitlab_view.issue_number}'
|
||||
@@ -111,9 +105,9 @@ class GitlabCallbackProcessor(ConversationCallbackProcessor):
|
||||
self.send_summary_instruction = False
|
||||
callback.set_processor(self)
|
||||
callback.updated_at = datetime.now()
|
||||
with session_maker() as session:
|
||||
async with a_session_maker() as session:
|
||||
session.merge(callback)
|
||||
session.commit()
|
||||
await session.commit()
|
||||
return
|
||||
|
||||
# Extract the summary from the event store
|
||||
@@ -132,9 +126,9 @@ class GitlabCallbackProcessor(ConversationCallbackProcessor):
|
||||
# Mark callback as completed status
|
||||
callback.status = CallbackStatus.COMPLETED
|
||||
callback.updated_at = datetime.now()
|
||||
with session_maker() as session:
|
||||
async with a_session_maker() as session:
|
||||
session.merge(callback)
|
||||
session.commit()
|
||||
await session.commit()
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
|
||||
@@ -37,8 +37,7 @@ class JiraCallbackProcessor(ConversationCallbackProcessor):
|
||||
workspace_name: str
|
||||
|
||||
async def _send_comment_to_jira(self, message: str) -> None:
|
||||
"""
|
||||
Send a comment to Jira issue.
|
||||
"""Send a comment to Jira issue.
|
||||
|
||||
Args:
|
||||
message: The message content to send to Jira
|
||||
@@ -59,8 +58,9 @@ class JiraCallbackProcessor(ConversationCallbackProcessor):
|
||||
# Decrypt API key
|
||||
api_key = jira_manager.token_manager.decrypt_text(workspace.svc_acc_api_key)
|
||||
|
||||
# Send comment directly as a string
|
||||
await jira_manager.send_message(
|
||||
jira_manager.create_outgoing_message(msg=message),
|
||||
message,
|
||||
issue_key=self.issue_key,
|
||||
jira_cloud_id=workspace.jira_cloud_id,
|
||||
svc_acc_email=workspace.svc_acc_email,
|
||||
|
||||
@@ -37,8 +37,7 @@ class JiraDcCallbackProcessor(ConversationCallbackProcessor):
|
||||
base_api_url: str
|
||||
|
||||
async def _send_comment_to_jira_dc(self, message: str) -> None:
|
||||
"""
|
||||
Send a comment to Jira DC issue.
|
||||
"""Send a comment to Jira DC issue.
|
||||
|
||||
Args:
|
||||
message: The message content to send to Jira DC
|
||||
@@ -61,8 +60,9 @@ class JiraDcCallbackProcessor(ConversationCallbackProcessor):
|
||||
workspace.svc_acc_api_key
|
||||
)
|
||||
|
||||
# Send comment directly as a string
|
||||
await jira_dc_manager.send_message(
|
||||
jira_dc_manager.create_outgoing_message(msg=message),
|
||||
message,
|
||||
issue_key=self.issue_key,
|
||||
base_api_url=self.base_api_url,
|
||||
svc_acc_api_key=api_key,
|
||||
|
||||
@@ -36,8 +36,7 @@ class LinearCallbackProcessor(ConversationCallbackProcessor):
|
||||
workspace_name: str
|
||||
|
||||
async def _send_comment_to_linear(self, message: str) -> None:
|
||||
"""
|
||||
Send a comment to Linear issue.
|
||||
"""Send a comment to Linear issue.
|
||||
|
||||
Args:
|
||||
message: The message content to send to Linear
|
||||
@@ -60,9 +59,9 @@ class LinearCallbackProcessor(ConversationCallbackProcessor):
|
||||
workspace.svc_acc_api_key
|
||||
)
|
||||
|
||||
# Send comment
|
||||
# Send comment directly as a string
|
||||
await linear_manager.send_message(
|
||||
linear_manager.create_outgoing_message(msg=message),
|
||||
message,
|
||||
self.issue_id,
|
||||
api_key,
|
||||
)
|
||||
|
||||
@@ -26,8 +26,7 @@ slack_manager = SlackManager(token_manager)
|
||||
|
||||
|
||||
class SlackCallbackProcessor(ConversationCallbackProcessor):
|
||||
"""
|
||||
Processor for sending conversation summaries to Slack.
|
||||
"""Processor for sending conversation summaries to Slack.
|
||||
|
||||
This processor is used to send summaries of conversations to Slack channels
|
||||
when agent state changes occur.
|
||||
@@ -41,14 +40,13 @@ class SlackCallbackProcessor(ConversationCallbackProcessor):
|
||||
last_user_msg_id: int | None = None
|
||||
|
||||
async def _send_message_to_slack(self, message: str) -> None:
|
||||
"""
|
||||
Send a message to Slack using the conversation_manager's send_to_event_stream method.
|
||||
"""Send a message to Slack.
|
||||
|
||||
Args:
|
||||
message: The message content to send to Slack
|
||||
"""
|
||||
try:
|
||||
# Create a message object for Slack
|
||||
# Create a message object for Slack view creation (incoming message format)
|
||||
message_obj = Message(
|
||||
source=SourceType.SLACK,
|
||||
message={
|
||||
@@ -64,12 +62,11 @@ class SlackCallbackProcessor(ConversationCallbackProcessor):
|
||||
slack_user, saas_user_auth = await slack_manager.authenticate_user(
|
||||
self.slack_user_id
|
||||
)
|
||||
slack_view = SlackFactory.create_slack_view_from_payload(
|
||||
slack_view = await SlackFactory.create_slack_view_from_payload(
|
||||
message_obj, slack_user, saas_user_auth
|
||||
)
|
||||
await slack_manager.send_message(
|
||||
slack_manager.create_outgoing_message(message), slack_view
|
||||
)
|
||||
# Send the message directly as a string
|
||||
await slack_manager.send_message(message, slack_view)
|
||||
|
||||
logger.info(
|
||||
f'[Slack] Sent summary message to channel {self.channel_id} '
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from typing import Callable
|
||||
from typing import Callable, cast
|
||||
|
||||
import jwt
|
||||
from fastapi import Request, Response, status
|
||||
@@ -19,7 +19,7 @@ from server.routes.auth import (
|
||||
)
|
||||
|
||||
from openhands.core.logger import openhands_logger as logger
|
||||
from openhands.server.user_auth.user_auth import AuthType, get_user_auth
|
||||
from openhands.server.user_auth.user_auth import AuthType, UserAuth, get_user_auth
|
||||
from openhands.server.utils import config
|
||||
|
||||
|
||||
@@ -43,19 +43,21 @@ class SetAuthCookieMiddleware:
|
||||
if not user_auth or user_auth.auth_type != AuthType.COOKIE:
|
||||
return response
|
||||
if user_auth.refreshed:
|
||||
if user_auth.access_token is None:
|
||||
return response
|
||||
set_response_cookie(
|
||||
request=request,
|
||||
response=response,
|
||||
keycloak_access_token=user_auth.access_token.get_secret_value(),
|
||||
keycloak_refresh_token=user_auth.refresh_token.get_secret_value(),
|
||||
secure=False if request.url.hostname == 'localhost' else True,
|
||||
accepted_tos=user_auth.accepted_tos,
|
||||
accepted_tos=user_auth.accepted_tos or False,
|
||||
)
|
||||
|
||||
# On re-authentication (token refresh), kick off background sync for GitLab repos
|
||||
schedule_gitlab_repo_sync(
|
||||
await user_auth.get_user_id(),
|
||||
)
|
||||
user_id = await user_auth.get_user_id()
|
||||
if user_id:
|
||||
schedule_gitlab_repo_sync(user_id)
|
||||
|
||||
if (
|
||||
self._should_attach(request)
|
||||
@@ -97,7 +99,10 @@ class SetAuthCookieMiddleware:
|
||||
return response
|
||||
|
||||
def _get_user_auth(self, request: Request) -> SaasUserAuth | None:
|
||||
return getattr(request.state, 'user_auth', None)
|
||||
user_auth: UserAuth | None = getattr(request.state, 'user_auth', None)
|
||||
if user_auth is None:
|
||||
return None
|
||||
return cast(SaasUserAuth, user_auth)
|
||||
|
||||
def _check_tos(self, request: Request):
|
||||
keycloak_auth_cookie = request.cookies.get('keycloak_auth')
|
||||
@@ -187,7 +192,7 @@ class SetAuthCookieMiddleware:
|
||||
async def _logout(self, request: Request):
|
||||
# Log out of keycloak - this prevents issues where you did not log in with the idp you believe you used
|
||||
try:
|
||||
user_auth: SaasUserAuth = await get_user_auth(request)
|
||||
user_auth = cast(SaasUserAuth, await get_user_auth(request))
|
||||
if user_auth and user_auth.refresh_token:
|
||||
await token_manager.logout(user_auth.refresh_token.get_secret_value())
|
||||
except Exception:
|
||||
|
||||
@@ -17,12 +17,12 @@ from openhands.server.user_auth import get_user_id
|
||||
# Helper functions for BYOR API key management
|
||||
async def get_byor_key_from_db(user_id: str) -> str | None:
|
||||
"""Get the BYOR key from the database for a user."""
|
||||
user = await UserStore.get_user_by_id_async(user_id)
|
||||
user = await UserStore.get_user_by_id(user_id)
|
||||
if not user:
|
||||
return None
|
||||
|
||||
current_org_id = user.current_org_id
|
||||
current_org_member: OrgMember = None
|
||||
current_org_member: OrgMember | None = None
|
||||
for org_member in user.org_members:
|
||||
if org_member.org_id == current_org_id:
|
||||
current_org_member = org_member
|
||||
@@ -36,12 +36,12 @@ async def get_byor_key_from_db(user_id: str) -> str | None:
|
||||
|
||||
async def store_byor_key_in_db(user_id: str, key: str) -> None:
|
||||
"""Store the BYOR key in the database for a user."""
|
||||
user = await UserStore.get_user_by_id_async(user_id)
|
||||
user = await UserStore.get_user_by_id(user_id)
|
||||
if not user:
|
||||
return None
|
||||
|
||||
current_org_id = user.current_org_id
|
||||
current_org_member: OrgMember = None
|
||||
current_org_member: OrgMember | None = None
|
||||
for org_member in user.org_members:
|
||||
if org_member.org_id == current_org_id:
|
||||
current_org_member = org_member
|
||||
@@ -49,13 +49,13 @@ async def store_byor_key_in_db(user_id: str, key: str) -> None:
|
||||
if not current_org_member:
|
||||
return None
|
||||
current_org_member.llm_api_key_for_byor = key
|
||||
OrgMemberStore.update_org_member(current_org_member)
|
||||
await OrgMemberStore.update_org_member(current_org_member)
|
||||
|
||||
|
||||
async def generate_byor_key(user_id: str) -> str | None:
|
||||
"""Generate a new BYOR key for a user."""
|
||||
try:
|
||||
user = await UserStore.get_user_by_id_async(user_id)
|
||||
user = await UserStore.get_user_by_id(user_id)
|
||||
if not user:
|
||||
return None
|
||||
current_org_id = str(user.current_org_id)
|
||||
@@ -66,22 +66,15 @@ async def generate_byor_key(user_id: str) -> str | None:
|
||||
{'type': 'byor'},
|
||||
)
|
||||
|
||||
if key:
|
||||
logger.info(
|
||||
'Successfully generated new BYOR key',
|
||||
extra={
|
||||
'user_id': user_id,
|
||||
'key_length': len(key) if key else 0,
|
||||
'key_prefix': key[:10] + '...' if key and len(key) > 10 else key,
|
||||
},
|
||||
)
|
||||
return key
|
||||
else:
|
||||
logger.error(
|
||||
'Failed to generate BYOR LLM API key - no key in response',
|
||||
extra={'user_id': user_id},
|
||||
)
|
||||
return None
|
||||
logger.info(
|
||||
'Successfully generated new BYOR key',
|
||||
extra={
|
||||
'user_id': user_id,
|
||||
'key_length': len(key),
|
||||
'key_prefix': key[:10] + '...' if len(key) > 10 else key,
|
||||
},
|
||||
)
|
||||
return key
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
'Error generating BYOR key',
|
||||
@@ -98,7 +91,7 @@ async def delete_byor_key_from_litellm(user_id: str, byor_key: str) -> bool:
|
||||
"""
|
||||
try:
|
||||
# Get user to construct the key alias
|
||||
user = await UserStore.get_user_by_id_async(user_id)
|
||||
user = await UserStore.get_user_by_id(user_id)
|
||||
key_alias = None
|
||||
if user and user.current_org_id:
|
||||
key_alias = f'BYOR Key - user {user_id}, org {user.current_org_id}'
|
||||
@@ -251,7 +244,7 @@ async def delete_api_key(
|
||||
)
|
||||
|
||||
# Delete the key
|
||||
success = api_key_store.delete_api_key_by_id(key_id)
|
||||
success = await api_key_store.delete_api_key_by_id(key_id)
|
||||
|
||||
if not success:
|
||||
raise HTTPException(
|
||||
|
||||
@@ -3,15 +3,14 @@ import json
|
||||
import uuid
|
||||
import warnings
|
||||
from datetime import datetime, timezone
|
||||
from typing import Annotated, Literal, Optional
|
||||
from urllib.parse import quote
|
||||
from typing import Annotated, Literal, Optional, cast
|
||||
from urllib.parse import quote, urlencode
|
||||
from uuid import UUID as parse_uuid
|
||||
|
||||
import posthog
|
||||
from fastapi import APIRouter, Header, HTTPException, Request, Response, status
|
||||
from fastapi.responses import JSONResponse, RedirectResponse
|
||||
from pydantic import SecretStr
|
||||
from server.auth.auth_utils import user_verifier
|
||||
from server.auth.constants import (
|
||||
KEYCLOAK_CLIENT_ID,
|
||||
KEYCLOAK_REALM_NAME,
|
||||
@@ -19,11 +18,14 @@ from server.auth.constants import (
|
||||
RECAPTCHA_SITE_KEY,
|
||||
ROLE_CHECK_ENABLED,
|
||||
)
|
||||
from server.auth.domain_blocker import domain_blocker
|
||||
from server.auth.gitlab_sync import schedule_gitlab_repo_sync
|
||||
from server.auth.recaptcha_service import recaptcha_service
|
||||
from server.auth.saas_user_auth import SaasUserAuth
|
||||
from server.auth.token_manager import TokenManager
|
||||
from server.auth.user.user_authorizer import (
|
||||
UserAuthorizer,
|
||||
depends_user_authorizer,
|
||||
)
|
||||
from server.config import sign_token
|
||||
from server.constants import IS_FEATURE_ENV
|
||||
from server.routes.event_webhook import _get_session_api_key, _get_user_id
|
||||
@@ -34,10 +36,13 @@ from server.services.org_invitation_service import (
|
||||
OrgInvitationService,
|
||||
UserAlreadyMemberError,
|
||||
)
|
||||
from storage.database import session_maker
|
||||
from server.utils.rate_limit_utils import check_rate_limit_by_user_id
|
||||
from sqlalchemy import select
|
||||
from storage.database import a_session_maker
|
||||
from storage.user import User
|
||||
from storage.user_store import UserStore
|
||||
|
||||
from openhands.app_server.config import get_global_config
|
||||
from openhands.core.logger import openhands_logger as logger
|
||||
from openhands.integrations.provider import ProviderHandler
|
||||
from openhands.integrations.service_types import ProviderType, TokenResponse
|
||||
@@ -155,11 +160,16 @@ async def keycloak_callback(
|
||||
state: Optional[str] = None,
|
||||
error: Optional[str] = None,
|
||||
error_description: Optional[str] = None,
|
||||
user_authorizer: UserAuthorizer = depends_user_authorizer(),
|
||||
):
|
||||
# Extract redirect URL, reCAPTCHA token, and invitation token from state
|
||||
redirect_url, recaptcha_token, invitation_token = _extract_oauth_state(state)
|
||||
if not redirect_url:
|
||||
redirect_url = str(request.base_url)
|
||||
|
||||
if redirect_url is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail='Missing state in request params',
|
||||
)
|
||||
|
||||
if not code:
|
||||
# check if this is a forward from the account linking page
|
||||
@@ -168,55 +178,58 @@ async def keycloak_callback(
|
||||
and error_description == 'authentication_expired'
|
||||
):
|
||||
return RedirectResponse(redirect_url, status_code=302)
|
||||
return JSONResponse(
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
content={'error': 'Missing code in request params'},
|
||||
detail='Missing code in request params',
|
||||
)
|
||||
scheme = 'http' if request.url.hostname == 'localhost' else 'https'
|
||||
redirect_uri = f'{scheme}://{request.url.netloc}{request.url.path}'
|
||||
logger.debug(f'code: {code}, redirect_uri: {redirect_uri}')
|
||||
|
||||
web_url = get_global_config().web_url
|
||||
if not web_url:
|
||||
scheme = 'http' if request.url.hostname == 'localhost' else 'https'
|
||||
web_url = f'{scheme}://{request.url.netloc}'
|
||||
redirect_uri = web_url + request.url.path
|
||||
|
||||
(
|
||||
keycloak_access_token,
|
||||
keycloak_refresh_token,
|
||||
) = await token_manager.get_keycloak_tokens(code, redirect_uri)
|
||||
if not keycloak_access_token or not keycloak_refresh_token:
|
||||
return JSONResponse(
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
content={'error': 'Problem retrieving Keycloak tokens'},
|
||||
detail='Problem retrieving Keycloak tokens',
|
||||
)
|
||||
|
||||
user_info = await token_manager.get_user_info(keycloak_access_token)
|
||||
logger.debug(f'user_info: {user_info}')
|
||||
if ROLE_CHECK_ENABLED and 'roles' not in user_info:
|
||||
return JSONResponse(
|
||||
if ROLE_CHECK_ENABLED and user_info.roles is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail='Missing required role'
|
||||
)
|
||||
|
||||
authorization = await user_authorizer.authorize_user(user_info)
|
||||
if not authorization.success:
|
||||
# Return unauthorized
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
content={'error': 'Missing required role'},
|
||||
detail=authorization.error_detail,
|
||||
)
|
||||
|
||||
if 'sub' not in user_info or 'preferred_username' not in user_info:
|
||||
return JSONResponse(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
content={'error': 'Missing user ID or username in response'},
|
||||
)
|
||||
|
||||
email = user_info.get('email')
|
||||
user_id = user_info['sub']
|
||||
user = await UserStore.get_user_by_id_async(user_id)
|
||||
email = user_info.email
|
||||
user_id = user_info.sub
|
||||
user_info_dict = user_info.model_dump(exclude_none=True)
|
||||
user = await UserStore.get_user_by_id(user_id)
|
||||
if not user:
|
||||
user = await UserStore.create_user(user_id, user_info)
|
||||
user = await UserStore.create_user(user_id, user_info_dict)
|
||||
else:
|
||||
# Existing user — gradually backfill contact_name if it still has a username-style value
|
||||
await UserStore.backfill_contact_name(user_id, user_info)
|
||||
await UserStore.backfill_user_email(user_id, user_info)
|
||||
await UserStore.backfill_contact_name(user_id, user_info_dict)
|
||||
await UserStore.backfill_user_email(user_id, user_info_dict)
|
||||
|
||||
if not user:
|
||||
logger.error(f'Failed to authenticate user {user_info["preferred_username"]}')
|
||||
return JSONResponse(
|
||||
logger.error(f'Failed to authenticate user {user_info.email}')
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
content={
|
||||
'error': f'Failed to authenticate user {user_info["preferred_username"]}'
|
||||
},
|
||||
detail=f'Failed to authenticate user {user_info.email}',
|
||||
)
|
||||
|
||||
logger.info(f'Logging in user {str(user.id)} in org {user.current_org_id}')
|
||||
@@ -231,7 +244,7 @@ async def keycloak_callback(
|
||||
'email': email,
|
||||
},
|
||||
)
|
||||
error_url = f'{request.base_url}login?recaptcha_blocked=true'
|
||||
error_url = f'{web_url}/login?recaptcha_blocked=true'
|
||||
return RedirectResponse(error_url, status_code=302)
|
||||
|
||||
user_ip = request.client.host if request.client else 'unknown'
|
||||
@@ -262,74 +275,48 @@ async def keycloak_callback(
|
||||
},
|
||||
)
|
||||
# Redirect to home with error parameter
|
||||
error_url = f'{request.base_url}login?recaptcha_blocked=true'
|
||||
error_url = f'{web_url}/login?recaptcha_blocked=true'
|
||||
return RedirectResponse(error_url, status_code=302)
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f'reCAPTCHA verification error at callback: {e}')
|
||||
# Fail open - continue with login if reCAPTCHA service unavailable
|
||||
|
||||
# Check if email domain is blocked
|
||||
if email and domain_blocker.is_domain_blocked(email):
|
||||
logger.warning(
|
||||
f'Blocked authentication attempt for email: {email}, user_id: {user_id}'
|
||||
)
|
||||
|
||||
# Disable the Keycloak account
|
||||
await token_manager.disable_keycloak_user(user_id, email)
|
||||
|
||||
return JSONResponse(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
content={
|
||||
'error': 'Access denied: Your email domain is not allowed to access this service'
|
||||
},
|
||||
)
|
||||
|
||||
# Check for duplicate email with + modifier
|
||||
if email:
|
||||
try:
|
||||
has_duplicate = await token_manager.check_duplicate_base_email(
|
||||
email, user_id
|
||||
)
|
||||
if has_duplicate:
|
||||
logger.warning(
|
||||
f'Blocked signup attempt for email {email} - duplicate base email found',
|
||||
extra={'user_id': user_id, 'email': email},
|
||||
)
|
||||
|
||||
# Delete the Keycloak user that was automatically created during OAuth
|
||||
# This prevents orphaned accounts in Keycloak
|
||||
# The delete_keycloak_user method already handles all errors internally
|
||||
deletion_success = await token_manager.delete_keycloak_user(user_id)
|
||||
if deletion_success:
|
||||
logger.info(
|
||||
f'Deleted Keycloak user {user_id} after detecting duplicate email {email}'
|
||||
)
|
||||
else:
|
||||
logger.warning(
|
||||
f'Failed to delete Keycloak user {user_id} after detecting duplicate email {email}. '
|
||||
f'User may need to be manually cleaned up.'
|
||||
)
|
||||
|
||||
# Redirect to home page with query parameter indicating the issue
|
||||
home_url = f'{request.base_url}/login?duplicated_email=true'
|
||||
return RedirectResponse(home_url, status_code=302)
|
||||
except Exception as e:
|
||||
# Log error but allow signup to proceed (fail open)
|
||||
logger.error(
|
||||
f'Error checking duplicate email for {email}: {e}',
|
||||
extra={'user_id': user_id, 'email': email},
|
||||
)
|
||||
|
||||
# Check email verification status
|
||||
email_verified = user_info.get('email_verified', False)
|
||||
email_verified = user_info.email_verified or False
|
||||
if not email_verified:
|
||||
# Send verification email
|
||||
# Send verification email with rate limiting to prevent abuse
|
||||
# Users who repeatedly login without verifying would otherwise trigger
|
||||
# unlimited verification emails
|
||||
# Import locally to avoid circular import with email.py
|
||||
from server.routes.email import verify_email
|
||||
|
||||
await verify_email(request=request, user_id=user_id, is_auth_flow=True)
|
||||
# Rate limit verification emails during auth flow (60 seconds per user)
|
||||
# This is separate from the manual resend rate limit which uses 30 seconds
|
||||
rate_limited = False
|
||||
try:
|
||||
await check_rate_limit_by_user_id(
|
||||
request=request,
|
||||
key_prefix='auth_verify_email',
|
||||
user_id=user_id,
|
||||
user_rate_limit_seconds=60,
|
||||
ip_rate_limit_seconds=120,
|
||||
)
|
||||
await verify_email(request=request, user_id=user_id, is_auth_flow=True)
|
||||
except HTTPException as e:
|
||||
if e.status_code == status.HTTP_429_TOO_MANY_REQUESTS:
|
||||
# Rate limited - still redirect to verification page but don't send email
|
||||
rate_limited = True
|
||||
logger.info(
|
||||
f'Rate limited verification email for user {user_id} during auth flow'
|
||||
)
|
||||
else:
|
||||
raise
|
||||
|
||||
verification_redirect_url = f'{request.base_url}login?email_verification_required=true&user_id={user_id}'
|
||||
if rate_limited:
|
||||
verification_redirect_url = f'{verification_redirect_url}&rate_limited=true'
|
||||
|
||||
# Preserve invitation token so it can be included in OAuth state after verification
|
||||
if invitation_token:
|
||||
verification_redirect_url = (
|
||||
@@ -340,7 +327,7 @@ async def keycloak_callback(
|
||||
|
||||
# default to github IDP for now.
|
||||
# TODO: remove default once Keycloak is updated universally with the new attribute.
|
||||
idp: str = user_info.get('identity_provider', ProviderType.GITHUB.value)
|
||||
idp: str = user_info.identity_provider or ProviderType.GITHUB.value
|
||||
logger.info(f'Full IDP is {idp}')
|
||||
idp_type = 'oidc'
|
||||
if ':' in idp:
|
||||
@@ -351,15 +338,8 @@ async def keycloak_callback(
|
||||
ProviderType(idp), user_id, keycloak_access_token
|
||||
)
|
||||
|
||||
username = user_info['preferred_username']
|
||||
if user_verifier.is_active() and not user_verifier.is_user_allowed(username):
|
||||
return JSONResponse(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
content={'error': 'Not authorized via waitlist'},
|
||||
)
|
||||
|
||||
valid_offline_token = (
|
||||
await token_manager.validate_offline_token(user_id=user_info['sub'])
|
||||
await token_manager.validate_offline_token(user_id=user_info.sub)
|
||||
if idp_type != 'saml'
|
||||
else True
|
||||
)
|
||||
@@ -403,13 +383,19 @@ async def keycloak_callback(
|
||||
)
|
||||
|
||||
if not valid_offline_token:
|
||||
param_str = urlencode(
|
||||
{
|
||||
'client_id': KEYCLOAK_CLIENT_ID,
|
||||
'response_type': 'code',
|
||||
'kc_idp_hint': idp,
|
||||
'redirect_uri': f'{web_url}/oauth/keycloak/offline/callback',
|
||||
'scope': 'openid email profile offline_access',
|
||||
'state': state,
|
||||
}
|
||||
)
|
||||
redirect_url = (
|
||||
f'{KEYCLOAK_SERVER_URL_EXT}/realms/{KEYCLOAK_REALM_NAME}/protocol/openid-connect/auth'
|
||||
f'?client_id={KEYCLOAK_CLIENT_ID}&response_type=code'
|
||||
f'&kc_idp_hint={idp}'
|
||||
f'&redirect_uri={scheme}%3A%2F%2F{request.url.netloc}%2Foauth%2Fkeycloak%2Foffline%2Fcallback'
|
||||
f'&scope=openid%20email%20profile%20offline_access'
|
||||
f'&state={state}'
|
||||
f'?{param_str}'
|
||||
)
|
||||
|
||||
has_accepted_tos = user.accepted_tos is not None
|
||||
@@ -504,7 +490,7 @@ async def keycloak_callback(
|
||||
response=response,
|
||||
keycloak_access_token=keycloak_access_token,
|
||||
keycloak_refresh_token=keycloak_refresh_token,
|
||||
secure=True if scheme == 'https' else False,
|
||||
secure=True if redirect_url.startswith('https') else False,
|
||||
accepted_tos=has_accepted_tos,
|
||||
)
|
||||
|
||||
@@ -540,14 +526,10 @@ async def keycloak_offline_callback(code: str, state: str, request: Request):
|
||||
|
||||
user_info = await token_manager.get_user_info(keycloak_access_token)
|
||||
logger.debug(f'user_info: {user_info}')
|
||||
if 'sub' not in user_info:
|
||||
return JSONResponse(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
content={'error': 'Missing Keycloak ID in response'},
|
||||
)
|
||||
# sub is a required field in KeycloakUserInfo, validation happens in get_user_info
|
||||
|
||||
await token_manager.store_offline_token(
|
||||
user_id=user_info['sub'], offline_token=keycloak_refresh_token
|
||||
user_id=user_info.sub, offline_token=keycloak_refresh_token
|
||||
)
|
||||
|
||||
redirect_url, _, _ = _extract_oauth_state(state)
|
||||
@@ -590,7 +572,7 @@ async def authenticate(request: Request):
|
||||
|
||||
@api_router.post('/accept_tos')
|
||||
async def accept_tos(request: Request):
|
||||
user_auth: SaasUserAuth = await get_user_auth(request)
|
||||
user_auth = cast(SaasUserAuth, await get_user_auth(request))
|
||||
access_token = await user_auth.get_access_token()
|
||||
refresh_token = user_auth.refresh_token
|
||||
user_id = await user_auth.get_user_id()
|
||||
@@ -609,18 +591,21 @@ async def accept_tos(request: Request):
|
||||
redirect_url = body.get('redirect_url', str(request.base_url))
|
||||
|
||||
# Update user settings with TOS acceptance
|
||||
accepted_tos: datetime = datetime.now(timezone.utc)
|
||||
with session_maker() as session:
|
||||
user = session.query(User).filter(User.id == uuid.UUID(user_id)).first()
|
||||
accepted_tos: datetime = datetime.now(timezone.utc).replace(tzinfo=None)
|
||||
async with a_session_maker() as session:
|
||||
result = await session.execute(
|
||||
select(User).where(User.id == uuid.UUID(user_id))
|
||||
)
|
||||
user = result.scalar_one_or_none()
|
||||
if not user:
|
||||
session.rollback()
|
||||
await session.rollback()
|
||||
logger.error('User for {user_id} not found.')
|
||||
return JSONResponse(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
content={'error': 'User does not exist'},
|
||||
)
|
||||
user.accepted_tos = accepted_tos
|
||||
session.commit()
|
||||
await session.commit()
|
||||
|
||||
logger.info(f'User {user_id} accepted TOS')
|
||||
|
||||
@@ -656,7 +641,7 @@ async def logout(request: Request):
|
||||
|
||||
# Try to properly logout from Keycloak, but don't fail if it doesn't work
|
||||
try:
|
||||
user_auth: SaasUserAuth = await get_user_auth(request)
|
||||
user_auth = cast(SaasUserAuth, await get_user_auth(request))
|
||||
if user_auth and user_auth.refresh_token:
|
||||
refresh_token = user_auth.refresh_token.get_secret_value()
|
||||
await token_manager.logout(refresh_token)
|
||||
|
||||
@@ -11,9 +11,10 @@ from integrations import stripe_service
|
||||
from pydantic import BaseModel
|
||||
from server.constants import STRIPE_API_KEY
|
||||
from server.logger import logger
|
||||
from sqlalchemy import select
|
||||
from starlette.datastructures import URL
|
||||
from storage.billing_session import BillingSession
|
||||
from storage.database import session_maker
|
||||
from storage.database import a_session_maker
|
||||
from storage.lite_llm_manager import LiteLlmManager
|
||||
from storage.org import Org
|
||||
from storage.subscription_access import SubscriptionAccess
|
||||
@@ -89,7 +90,9 @@ def calculate_credits(user_info: LiteLlmUserInfo) -> float:
|
||||
async def get_credits(user_id: str = Depends(get_user_id)) -> GetCreditsResponse:
|
||||
if not stripe_service.STRIPE_API_KEY:
|
||||
return GetCreditsResponse()
|
||||
user = await UserStore.get_user_by_id_async(user_id)
|
||||
user = await UserStore.get_user_by_id(user_id)
|
||||
if user is None:
|
||||
raise HTTPException(status.HTTP_404_NOT_FOUND, detail='User not found')
|
||||
user_team_info = await LiteLlmManager.get_user_team_info(
|
||||
user_id, str(user.current_org_id)
|
||||
)
|
||||
@@ -106,16 +109,17 @@ async def get_subscription_access(
|
||||
user_id: str = Depends(get_user_id),
|
||||
) -> SubscriptionAccessResponse | None:
|
||||
"""Get details of the currently valid subscription for the user."""
|
||||
with session_maker() as session:
|
||||
async with a_session_maker() as session:
|
||||
now = datetime.now(UTC)
|
||||
subscription_access = (
|
||||
session.query(SubscriptionAccess)
|
||||
.filter(SubscriptionAccess.status == 'ACTIVE')
|
||||
.filter(SubscriptionAccess.user_id == user_id)
|
||||
.filter(SubscriptionAccess.start_at <= now)
|
||||
.filter(SubscriptionAccess.end_at >= now)
|
||||
.first()
|
||||
result = await session.execute(
|
||||
select(SubscriptionAccess).where(
|
||||
SubscriptionAccess.status == 'ACTIVE',
|
||||
SubscriptionAccess.user_id == user_id,
|
||||
SubscriptionAccess.start_at <= now,
|
||||
SubscriptionAccess.end_at >= now,
|
||||
)
|
||||
)
|
||||
subscription_access = result.scalar_one_or_none()
|
||||
if not subscription_access:
|
||||
return None
|
||||
return SubscriptionAccessResponse(
|
||||
@@ -142,6 +146,11 @@ async def create_customer_setup_session(
|
||||
) -> CreateBillingSessionResponse:
|
||||
await validate_billing_enabled()
|
||||
customer_info = await stripe_service.find_or_create_customer_by_user_id(user_id)
|
||||
if not customer_info:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail='Could not find or create customer for user',
|
||||
)
|
||||
base_url = _get_base_url(request)
|
||||
checkout_session = await stripe.checkout.Session.create_async(
|
||||
customer=customer_info['customer_id'],
|
||||
@@ -163,6 +172,11 @@ async def create_checkout_session(
|
||||
await validate_billing_enabled()
|
||||
base_url = _get_base_url(request)
|
||||
customer_info = await stripe_service.find_or_create_customer_by_user_id(user_id)
|
||||
if not customer_info:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail='Could not find or create customer for user',
|
||||
)
|
||||
checkout_session = await stripe.checkout.Session.create_async(
|
||||
customer=customer_info['customer_id'],
|
||||
line_items=[
|
||||
@@ -197,7 +211,7 @@ async def create_checkout_session(
|
||||
'checkout_session_id': checkout_session.id,
|
||||
},
|
||||
)
|
||||
with session_maker() as session:
|
||||
async with a_session_maker() as session:
|
||||
billing_session = BillingSession(
|
||||
id=checkout_session.id,
|
||||
user_id=user_id,
|
||||
@@ -206,7 +220,7 @@ async def create_checkout_session(
|
||||
price_code='NA',
|
||||
)
|
||||
session.add(billing_session)
|
||||
session.commit()
|
||||
await session.commit()
|
||||
|
||||
return CreateBillingSessionResponse(redirect_url=checkout_session.url)
|
||||
|
||||
@@ -215,13 +229,14 @@ async def create_checkout_session(
|
||||
@billing_router.get('/success')
|
||||
async def success_callback(session_id: str, request: Request):
|
||||
# We can't use the auth cookie because of SameSite=strict
|
||||
with session_maker() as session:
|
||||
billing_session = (
|
||||
session.query(BillingSession)
|
||||
.filter(BillingSession.id == session_id)
|
||||
.filter(BillingSession.status == 'in_progress')
|
||||
.first()
|
||||
async with a_session_maker() as session:
|
||||
result = await session.execute(
|
||||
select(BillingSession).where(
|
||||
BillingSession.id == session_id,
|
||||
BillingSession.status == 'in_progress',
|
||||
)
|
||||
)
|
||||
billing_session = result.scalar_one_or_none()
|
||||
|
||||
if billing_session is None:
|
||||
# Hopefully this never happens - we get a redirect from stripe where the session does not exist
|
||||
@@ -243,7 +258,9 @@ async def success_callback(session_id: str, request: Request):
|
||||
)
|
||||
raise HTTPException(status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
user = await UserStore.get_user_by_id_async(billing_session.user_id)
|
||||
user = await UserStore.get_user_by_id(billing_session.user_id)
|
||||
if user is None:
|
||||
raise HTTPException(status.HTTP_404_NOT_FOUND, detail='User not found')
|
||||
user_team_info = await LiteLlmManager.get_user_team_info(
|
||||
billing_session.user_id, str(user.current_org_id)
|
||||
)
|
||||
@@ -253,7 +270,8 @@ async def success_callback(session_id: str, request: Request):
|
||||
user_team_info, billing_session.user_id, str(user.current_org_id)
|
||||
)
|
||||
|
||||
org = session.query(Org).filter(Org.id == user.current_org_id).first()
|
||||
result = await session.execute(select(Org).where(Org.id == user.current_org_id))
|
||||
org = result.scalar_one_or_none()
|
||||
new_max_budget = max_budget + add_credits
|
||||
|
||||
await LiteLlmManager.update_team_and_users_budget(
|
||||
@@ -279,7 +297,7 @@ async def success_callback(session_id: str, request: Request):
|
||||
'stripe_customer_id': stripe_session.customer,
|
||||
},
|
||||
)
|
||||
session.commit()
|
||||
await session.commit()
|
||||
|
||||
return RedirectResponse(
|
||||
f'{_get_base_url(request)}settings/billing?checkout=success', status_code=302
|
||||
@@ -289,13 +307,14 @@ async def success_callback(session_id: str, request: Request):
|
||||
# Callback endpoint for cancelled Stripe payments - updates billing session status
|
||||
@billing_router.get('/cancel')
|
||||
async def cancel_callback(session_id: str, request: Request):
|
||||
with session_maker() as session:
|
||||
billing_session = (
|
||||
session.query(BillingSession)
|
||||
.filter(BillingSession.id == session_id)
|
||||
.filter(BillingSession.status == 'in_progress')
|
||||
.first()
|
||||
async with a_session_maker() as session:
|
||||
result = await session.execute(
|
||||
select(BillingSession).where(
|
||||
BillingSession.id == session_id,
|
||||
BillingSession.status == 'in_progress',
|
||||
)
|
||||
)
|
||||
billing_session = result.scalar_one_or_none()
|
||||
if billing_session:
|
||||
logger.info(
|
||||
'stripe_checkout_cancel',
|
||||
@@ -307,7 +326,7 @@ async def cancel_callback(session_id: str, request: Request):
|
||||
billing_session.status = 'cancelled'
|
||||
billing_session.updated_at = datetime.now(UTC)
|
||||
session.merge(billing_session)
|
||||
session.commit()
|
||||
await session.commit()
|
||||
|
||||
return RedirectResponse(
|
||||
f'{_get_base_url(request)}settings/billing?checkout=cancel', status_code=302
|
||||
|
||||
63
enterprise/server/routes/bitbucket_dc_proxy.py
Normal file
63
enterprise/server/routes/bitbucket_dc_proxy.py
Normal file
@@ -0,0 +1,63 @@
|
||||
import httpx
|
||||
from fastapi import APIRouter, Request
|
||||
from fastapi.responses import JSONResponse
|
||||
from server.auth.constants import BITBUCKET_DATA_CENTER_HOST
|
||||
|
||||
from openhands.utils.http_session import httpx_verify_option
|
||||
|
||||
router = APIRouter(prefix='/bitbucket-dc-proxy')
|
||||
|
||||
BITBUCKET_DC_TIMEOUT = 10 # seconds
|
||||
|
||||
|
||||
# Bitbucket Data Center is not an OIDC provider, so keycloak
|
||||
# can't retrieve user info from it directly.
|
||||
# This endpoint proxies requests to bitbucket data center to get user info
|
||||
# given a Bitbucket Data Center access token. Keycloak
|
||||
# is configured to use this endpoint as the User Info Endpoint
|
||||
# for the Bitbucket Data Center OIDC provider.
|
||||
@router.get('/oauth2/userinfo')
|
||||
async def userinfo(request: Request):
|
||||
if not BITBUCKET_DATA_CENTER_HOST:
|
||||
raise ValueError('BITBUCKET_DATA_CENTER_HOST must be configured')
|
||||
bitbucket_base_url = f'https://{BITBUCKET_DATA_CENTER_HOST}'
|
||||
|
||||
auth_header = request.headers.get('Authorization', '')
|
||||
if not auth_header.startswith('Bearer '):
|
||||
return JSONResponse({'error': 'missing_token'}, status_code=401)
|
||||
|
||||
headers = {'Authorization': auth_header}
|
||||
async with httpx.AsyncClient(verify=httpx_verify_option()) as client:
|
||||
# Step 1: get username
|
||||
whoami_resp = await client.get(
|
||||
f'{bitbucket_base_url}/plugins/servlet/applinks/whoami',
|
||||
headers=headers,
|
||||
timeout=BITBUCKET_DC_TIMEOUT,
|
||||
)
|
||||
if whoami_resp.status_code != 200:
|
||||
return JSONResponse({'error': 'not_authenticated'}, status_code=401)
|
||||
username = whoami_resp.text.strip()
|
||||
if not username:
|
||||
return JSONResponse({'error': 'not_authenticated'}, status_code=401)
|
||||
|
||||
# Step 2: get user details
|
||||
user_resp = await client.get(
|
||||
f'{bitbucket_base_url}/rest/api/latest/users/{username}',
|
||||
headers=headers,
|
||||
timeout=BITBUCKET_DC_TIMEOUT,
|
||||
)
|
||||
if user_resp.status_code != 200:
|
||||
return JSONResponse(
|
||||
{'error': f'bitbucket_error: {user_resp.status_code}'},
|
||||
status_code=user_resp.status_code,
|
||||
)
|
||||
user_data = user_resp.json()
|
||||
|
||||
return JSONResponse(
|
||||
{
|
||||
'sub': str(user_data.get('id', username)),
|
||||
'preferred_username': user_data.get('name', username),
|
||||
'name': user_data.get('displayName', username),
|
||||
'email': user_data.get('emailAddress', ''),
|
||||
}
|
||||
)
|
||||
@@ -1,163 +0,0 @@
|
||||
import asyncio
|
||||
import os
|
||||
import time
|
||||
from threading import Thread
|
||||
|
||||
from fastapi import APIRouter, FastAPI
|
||||
from sqlalchemy import func, select
|
||||
from storage.database import a_session_maker, get_engine, session_maker
|
||||
from storage.user import User
|
||||
|
||||
from openhands.core.logger import openhands_logger as logger
|
||||
from openhands.utils.async_utils import wait_all
|
||||
|
||||
# Safety flag to prevent chaos routes from being added in production environments
|
||||
# Only enables these routes in non-production environments
|
||||
ADD_DEBUGGING_ROUTES = os.environ.get('ADD_DEBUGGING_ROUTES') in ('1', 'true')
|
||||
|
||||
|
||||
def add_debugging_routes(api: FastAPI):
|
||||
"""
|
||||
# HERE BE DRAGONS!
|
||||
Chaos scripts for debugging and stress testing the system.
|
||||
|
||||
This module contains endpoints that deliberately stress test and potentially break
|
||||
the system to help identify weaknesses and bottlenecks. It includes a safety check
|
||||
to ensure these routes are never deployed to production environments.
|
||||
|
||||
The routes in this module are specifically designed for:
|
||||
- Testing connection pool behavior under load
|
||||
- Simulating database connection exhaustion
|
||||
- Testing async vs sync database access patterns
|
||||
- Simulating event loop blocking
|
||||
"""
|
||||
|
||||
if not ADD_DEBUGGING_ROUTES:
|
||||
return
|
||||
|
||||
chaos_router = APIRouter(prefix='/debugging')
|
||||
|
||||
@chaos_router.get('/pool-stats')
|
||||
def pool_stats() -> dict[str, int]:
|
||||
"""
|
||||
Returns current database connection pool statistics.
|
||||
|
||||
This endpoint provides real-time metrics about the SQLAlchemy connection pool:
|
||||
- checked_in: Number of connections currently available in the pool
|
||||
- checked_out: Number of connections currently in use
|
||||
- overflow: Number of overflow connections created beyond pool_size
|
||||
"""
|
||||
engine = get_engine()
|
||||
return {
|
||||
'checked_in': engine.pool.checkedin(),
|
||||
'checked_out': engine.pool.checkedout(),
|
||||
'overflow': engine.pool.overflow(),
|
||||
}
|
||||
|
||||
@chaos_router.get('/test-db')
|
||||
def test_db(num_tests: int = 10, delay: int = 1) -> str:
|
||||
"""
|
||||
Stress tests the database connection pool using multiple threads.
|
||||
|
||||
Creates multiple threads that each open a database connection, perform a query,
|
||||
hold the connection for the specified delay, and then release it.
|
||||
|
||||
Parameters:
|
||||
num_tests: Number of concurrent database connections to create
|
||||
delay: Number of seconds each connection is held open
|
||||
|
||||
This test helps identify connection pool exhaustion issues and connection
|
||||
leaks under concurrent load.
|
||||
"""
|
||||
threads = [Thread(target=_db_check, args=(delay,)) for _ in range(num_tests)]
|
||||
for thread in threads:
|
||||
thread.start()
|
||||
for thread in threads:
|
||||
thread.join()
|
||||
return 'success'
|
||||
|
||||
@chaos_router.get('/a-test-db')
|
||||
async def a_chaos_monkey(num_tests: int = 10, delay: int = 1) -> str:
|
||||
"""
|
||||
Stress tests the async database connection pool.
|
||||
|
||||
Similar to /test-db but uses async connections and coroutines instead of threads.
|
||||
This endpoint helps compare the behavior of async vs sync connection pools
|
||||
under similar load conditions.
|
||||
|
||||
Parameters:
|
||||
num_tests: Number of concurrent async database connections to create
|
||||
delay: Number of seconds each connection is held open
|
||||
"""
|
||||
await wait_all((_a_db_check(delay) for _ in range(num_tests)))
|
||||
return 'success'
|
||||
|
||||
@chaos_router.get('/lock-main-runloop')
|
||||
async def lock_main_runloop(duration: int = 10) -> str:
|
||||
"""
|
||||
Deliberately blocks the main asyncio event loop.
|
||||
|
||||
This endpoint uses a synchronous sleep operation in an async function,
|
||||
which blocks the entire FastAPI server's event loop for the specified duration.
|
||||
This simulates what happens when CPU-intensive operations or blocking I/O
|
||||
operations are incorrectly used in async code.
|
||||
|
||||
Parameters:
|
||||
duration: Number of seconds to block the event loop
|
||||
|
||||
WARNING: This will make the entire server unresponsive for the duration!
|
||||
"""
|
||||
time.sleep(duration)
|
||||
return 'success'
|
||||
|
||||
api.include_router(chaos_router) # Add routes for readiness checks
|
||||
|
||||
|
||||
def _db_check(delay: int):
|
||||
"""
|
||||
Executes a single request against the database with an artificial delay.
|
||||
|
||||
This helper function:
|
||||
1. Opens a database connection from the pool
|
||||
2. Executes a simple query to count users
|
||||
3. Holds the connection for the specified delay
|
||||
4. Logs connection pool statistics
|
||||
5. Implicitly returns the connection to the pool when the session closes
|
||||
|
||||
Args:
|
||||
delay: Number of seconds to hold the database connection
|
||||
"""
|
||||
with session_maker() as session:
|
||||
num_users = session.query(User).count()
|
||||
time.sleep(delay)
|
||||
engine = get_engine()
|
||||
logger.info(
|
||||
'check',
|
||||
extra={
|
||||
'num_users': num_users,
|
||||
'checked_in': engine.pool.checkedin(),
|
||||
'checked_out': engine.pool.checkedout(),
|
||||
'overflow': engine.pool.overflow(),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def _a_db_check(delay: int):
|
||||
"""
|
||||
Executes a single async request against the database with an artificial delay.
|
||||
|
||||
This is the async version of _db_check that:
|
||||
1. Opens an async database connection from the pool
|
||||
2. Executes a simple query to count users using SQLAlchemy's async API
|
||||
3. Holds the connection for the specified delay using asyncio.sleep
|
||||
4. Logs the results
|
||||
5. Implicitly returns the connection to the pool when the async session closes
|
||||
|
||||
Args:
|
||||
delay: Number of seconds to hold the database connection
|
||||
"""
|
||||
async with a_session_maker() as a_session:
|
||||
stmt = select(func.count(User.id))
|
||||
num_users = await a_session.execute(stmt)
|
||||
await asyncio.sleep(delay)
|
||||
logger.info(f'a_num_users:{num_users.scalar_one()}')
|
||||
@@ -1,4 +1,5 @@
|
||||
import re
|
||||
from typing import cast
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request, status
|
||||
from fastapi.responses import JSONResponse, RedirectResponse
|
||||
@@ -67,7 +68,7 @@ async def update_email(
|
||||
user_id=user_id, email=email, email_verified=False
|
||||
)
|
||||
|
||||
user_auth: SaasUserAuth = await get_user_auth(request)
|
||||
user_auth = cast(SaasUserAuth, await get_user_auth(request))
|
||||
await user_auth.refresh() # refresh so access token has updated email
|
||||
user_auth.email = email
|
||||
user_auth.email_verified = False
|
||||
@@ -76,13 +77,18 @@ async def update_email(
|
||||
)
|
||||
|
||||
# need to set auth cookie to the new tokens
|
||||
if user_auth.access_token is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail='Access token not found',
|
||||
)
|
||||
set_response_cookie(
|
||||
request=request,
|
||||
response=response,
|
||||
keycloak_access_token=user_auth.access_token.get_secret_value(),
|
||||
keycloak_refresh_token=user_auth.refresh_token.get_secret_value(),
|
||||
secure=False if request.url.hostname == 'localhost' else True,
|
||||
accepted_tos=user_auth.accepted_tos,
|
||||
accepted_tos=user_auth.accepted_tos or False,
|
||||
)
|
||||
|
||||
await verify_email(request=request, user_id=user_id)
|
||||
@@ -146,7 +152,7 @@ async def resend_email_verification(
|
||||
|
||||
@api_router.get('/verified')
|
||||
async def verified_email(request: Request):
|
||||
user_auth: SaasUserAuth = await get_user_auth(request)
|
||||
user_auth = cast(SaasUserAuth, await get_user_auth(request))
|
||||
await user_auth.refresh() # refresh so access token has updated email
|
||||
user_auth.email_verified = True
|
||||
await UserStore.update_user_email(user_id=user_auth.user_id, email_verified=True)
|
||||
@@ -155,13 +161,17 @@ async def verified_email(request: Request):
|
||||
response = RedirectResponse(redirect_uri, status_code=302)
|
||||
|
||||
# need to set auth cookie to the new tokens
|
||||
if user_auth.access_token is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail='Access token not found'
|
||||
)
|
||||
set_response_cookie(
|
||||
request=request,
|
||||
response=response,
|
||||
keycloak_access_token=user_auth.access_token.get_secret_value(),
|
||||
keycloak_refresh_token=user_auth.refresh_token.get_secret_value(),
|
||||
secure=False if request.url.hostname == 'localhost' else True,
|
||||
accepted_tos=user_auth.accepted_tos,
|
||||
accepted_tos=user_auth.accepted_tos or False,
|
||||
)
|
||||
|
||||
logger.info(f'Email {user_auth.email} verified.')
|
||||
|
||||
@@ -93,6 +93,16 @@ async def _process_batch_operations_background(
|
||||
)
|
||||
continue # Skip this operation but continue with others
|
||||
|
||||
if user_id is None:
|
||||
logger.error(
|
||||
'user_id_not_set_in_batch_webhook',
|
||||
extra={
|
||||
'conversation_id': conversation_id,
|
||||
'path': batch_op.path,
|
||||
},
|
||||
)
|
||||
continue
|
||||
|
||||
if subpath == 'agent_state.pkl':
|
||||
update_agent_state(user_id, conversation_id, batch_op.get_content())
|
||||
continue
|
||||
@@ -119,10 +129,6 @@ async def _process_batch_operations_background(
|
||||
# No action required
|
||||
continue
|
||||
|
||||
if subpath == 'exp_config.json':
|
||||
# No action required
|
||||
continue
|
||||
|
||||
# Log unhandled paths for future implementation
|
||||
logger.warning(
|
||||
'unknown_path_in_batch_webhook',
|
||||
|
||||
@@ -3,7 +3,7 @@ from typing import Any, Dict, List, Optional
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from pydantic import BaseModel, Field
|
||||
from sqlalchemy.future import select
|
||||
from storage.database import session_maker
|
||||
from storage.database import a_session_maker
|
||||
from storage.feedback import ConversationFeedback
|
||||
from storage.stored_conversation_metadata_saas import StoredConversationMetadataSaas
|
||||
|
||||
@@ -11,7 +11,6 @@ from openhands.events.event_store import EventStore
|
||||
from openhands.server.dependencies import get_dependencies
|
||||
from openhands.server.shared import file_store
|
||||
from openhands.server.user_auth import get_user_id
|
||||
from openhands.utils.async_utils import call_sync_from_async
|
||||
|
||||
# We use the get_dependencies method here to signal to the OpenAPI docs that this endpoint
|
||||
# is protected. The actual protection is provided by SetAuthCookieMiddleware
|
||||
@@ -37,23 +36,19 @@ async def get_event_ids(conversation_id: str, user_id: str) -> List[int]:
|
||||
"""
|
||||
|
||||
# Verify the conversation belongs to the user
|
||||
def _verify_conversation():
|
||||
with session_maker() as session:
|
||||
metadata = (
|
||||
session.query(StoredConversationMetadataSaas)
|
||||
.filter(
|
||||
StoredConversationMetadataSaas.conversation_id == conversation_id,
|
||||
StoredConversationMetadataSaas.user_id == user_id,
|
||||
)
|
||||
.first()
|
||||
async with a_session_maker() as session:
|
||||
result = await session.execute(
|
||||
select(StoredConversationMetadataSaas).where(
|
||||
StoredConversationMetadataSaas.conversation_id == conversation_id,
|
||||
StoredConversationMetadataSaas.user_id == user_id,
|
||||
)
|
||||
)
|
||||
metadata = result.scalars().first()
|
||||
if not metadata:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f'Conversation {conversation_id} not found',
|
||||
)
|
||||
if not metadata:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f'Conversation {conversation_id} not found',
|
||||
)
|
||||
|
||||
await call_sync_from_async(_verify_conversation)
|
||||
|
||||
# Create an event store to access the events directly
|
||||
# This works even when the conversation is not running
|
||||
@@ -103,12 +98,9 @@ async def submit_conversation_feedback(feedback: FeedbackRequest):
|
||||
)
|
||||
|
||||
# Add to database
|
||||
def _save_feedback():
|
||||
with session_maker() as session:
|
||||
session.add(new_feedback)
|
||||
session.commit()
|
||||
|
||||
await call_sync_from_async(_save_feedback)
|
||||
async with a_session_maker() as session:
|
||||
session.add(new_feedback)
|
||||
await session.commit()
|
||||
|
||||
return {'status': 'success', 'message': 'Feedback submitted successfully'}
|
||||
|
||||
@@ -127,30 +119,27 @@ async def get_batch_feedback(conversation_id: str, user_id: str = Depends(get_us
|
||||
return {}
|
||||
|
||||
# Query for existing feedback for all events
|
||||
def _check_feedback():
|
||||
with session_maker() as session:
|
||||
result = session.execute(
|
||||
select(ConversationFeedback).where(
|
||||
ConversationFeedback.conversation_id == conversation_id,
|
||||
ConversationFeedback.event_id.in_(event_ids),
|
||||
)
|
||||
async with a_session_maker() as session:
|
||||
result = await session.execute(
|
||||
select(ConversationFeedback).where(
|
||||
ConversationFeedback.conversation_id == conversation_id,
|
||||
ConversationFeedback.event_id.in_(event_ids),
|
||||
)
|
||||
)
|
||||
|
||||
# Create a mapping of event_id to feedback
|
||||
feedback_map = {
|
||||
feedback.event_id: {
|
||||
'exists': True,
|
||||
'rating': feedback.rating,
|
||||
'reason': feedback.reason,
|
||||
}
|
||||
for feedback in result.scalars()
|
||||
# Create a mapping of event_id to feedback
|
||||
feedback_map = {
|
||||
feedback.event_id: {
|
||||
'exists': True,
|
||||
'rating': feedback.rating,
|
||||
'reason': feedback.reason,
|
||||
}
|
||||
for feedback in result.scalars()
|
||||
}
|
||||
|
||||
# Build response including all events
|
||||
response = {}
|
||||
for event_id in event_ids:
|
||||
response[str(event_id)] = feedback_map.get(event_id, {'exists': False})
|
||||
# Build response including all events
|
||||
response = {}
|
||||
for event_id in event_ids:
|
||||
response[str(event_id)] = feedback_map.get(event_id, {'exists': False})
|
||||
|
||||
return response
|
||||
|
||||
return await call_sync_from_async(_check_feedback)
|
||||
return response
|
||||
|
||||
@@ -13,7 +13,7 @@ from integrations.gitlab.webhook_installation import (
|
||||
)
|
||||
from integrations.models import Message, SourceType
|
||||
from integrations.types import GitLabResourceType
|
||||
from integrations.utils import GITLAB_WEBHOOK_URL
|
||||
from integrations.utils import GITLAB_WEBHOOK_URL, IS_LOCAL_DEPLOYMENT
|
||||
from pydantic import BaseModel
|
||||
from server.auth.token_manager import TokenManager
|
||||
from storage.gitlab_webhook import GitlabWebhook
|
||||
@@ -68,11 +68,14 @@ async def verify_gitlab_signature(
|
||||
if not header_webhook_secret or not webhook_uuid or not user_id:
|
||||
raise HTTPException(status_code=403, detail='Required payload headers missing!')
|
||||
|
||||
webhook_secret = await webhook_store.get_webhook_secret(
|
||||
webhook_uuid=webhook_uuid, user_id=user_id
|
||||
)
|
||||
if IS_LOCAL_DEPLOYMENT:
|
||||
webhook_secret: str | None = 'localdeploymentwebhooktesttoken'
|
||||
else:
|
||||
webhook_secret = await webhook_store.get_webhook_secret(
|
||||
webhook_uuid=webhook_uuid, user_id=user_id
|
||||
)
|
||||
|
||||
if header_webhook_secret != webhook_secret:
|
||||
if not webhook_secret or header_webhook_secret != webhook_secret:
|
||||
raise HTTPException(status_code=403, detail="Request signatures didn't match!")
|
||||
|
||||
|
||||
@@ -329,6 +332,12 @@ async def reinstall_gitlab_webhook(
|
||||
resource_type, resource_id
|
||||
)
|
||||
|
||||
if not webhook:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail='Failed to create or fetch webhook record',
|
||||
)
|
||||
|
||||
# Verify conditions and install webhook
|
||||
try:
|
||||
await verify_webhook_conditions(
|
||||
|
||||
@@ -4,6 +4,7 @@ import json
|
||||
import os
|
||||
import re
|
||||
import uuid
|
||||
from typing import cast
|
||||
from urllib.parse import urlencode, urlparse
|
||||
|
||||
import requests
|
||||
@@ -308,10 +309,11 @@ async def jira_events(
|
||||
logger.info(f'Processing new Jira webhook event: {signature}')
|
||||
redis_client.setex(key, 300, '1')
|
||||
|
||||
# Process the webhook
|
||||
# Process the webhook in background after returning response.
|
||||
# Note: For async functions, BackgroundTasks runs them in the same event loop
|
||||
# (not a thread pool), so asyncpg connections work correctly.
|
||||
message_payload = {'payload': payload}
|
||||
message = Message(source=SourceType.JIRA, message=message_payload)
|
||||
|
||||
background_tasks.add_task(jira_manager.receive_message, message)
|
||||
|
||||
return JSONResponse({'success': True})
|
||||
@@ -331,7 +333,7 @@ async def jira_events(
|
||||
async def create_jira_workspace(request: Request, workspace_data: JiraWorkspaceCreate):
|
||||
"""Create a new Jira workspace registration."""
|
||||
try:
|
||||
user_auth: SaasUserAuth = await get_user_auth(request)
|
||||
user_auth = cast(SaasUserAuth, await get_user_auth(request))
|
||||
user_id = await user_auth.get_user_id()
|
||||
user_email = await user_auth.get_user_email()
|
||||
|
||||
@@ -395,7 +397,7 @@ async def create_jira_workspace(request: Request, workspace_data: JiraWorkspaceC
|
||||
async def create_workspace_link(request: Request, link_data: JiraLinkCreate):
|
||||
"""Register a user mapping to a Jira workspace."""
|
||||
try:
|
||||
user_auth: SaasUserAuth = await get_user_auth(request)
|
||||
user_auth = cast(SaasUserAuth, await get_user_auth(request))
|
||||
user_id = await user_auth.get_user_id()
|
||||
user_email = await user_auth.get_user_email()
|
||||
|
||||
@@ -596,9 +598,15 @@ async def jira_callback(request: Request, code: str, state: str):
|
||||
async def get_current_workspace_link(request: Request):
|
||||
"""Get current user's Jira integration details."""
|
||||
try:
|
||||
user_auth: SaasUserAuth = await get_user_auth(request)
|
||||
user_auth = cast(SaasUserAuth, await get_user_auth(request))
|
||||
user_id = await user_auth.get_user_id()
|
||||
|
||||
if not user_id:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail='User ID not found',
|
||||
)
|
||||
|
||||
user = await jira_manager.integration_store.get_user_by_active_workspace(
|
||||
user_id
|
||||
)
|
||||
@@ -649,9 +657,15 @@ async def get_current_workspace_link(request: Request):
|
||||
async def unlink_workspace(request: Request):
|
||||
"""Unlink user from Jira integration by setting status to inactive."""
|
||||
try:
|
||||
user_auth: SaasUserAuth = await get_user_auth(request)
|
||||
user_auth = cast(SaasUserAuth, await get_user_auth(request))
|
||||
user_id = await user_auth.get_user_id()
|
||||
|
||||
if not user_id:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail='User ID not found',
|
||||
)
|
||||
|
||||
user = await jira_manager.integration_store.get_user_by_active_workspace(
|
||||
user_id
|
||||
)
|
||||
@@ -705,7 +719,7 @@ async def validate_workspace_integration(request: Request, workspace_name: str):
|
||||
detail='workspace_name can only contain alphanumeric characters, hyphens, underscores, and periods',
|
||||
)
|
||||
|
||||
user_auth: SaasUserAuth = await get_user_auth(request)
|
||||
user_auth = cast(SaasUserAuth, await get_user_auth(request))
|
||||
user_email = await user_auth.get_user_email()
|
||||
if not user_email:
|
||||
raise HTTPException(
|
||||
|
||||
@@ -2,6 +2,7 @@ import json
|
||||
import os
|
||||
import re
|
||||
import uuid
|
||||
from typing import cast
|
||||
from urllib.parse import urlencode, urlparse
|
||||
|
||||
import requests
|
||||
@@ -276,10 +277,16 @@ async def create_jira_dc_workspace(
|
||||
):
|
||||
"""Create a new Jira DC workspace registration."""
|
||||
try:
|
||||
user_auth: SaasUserAuth = await get_user_auth(request)
|
||||
user_auth = cast(SaasUserAuth, await get_user_auth(request))
|
||||
user_id = await user_auth.get_user_id()
|
||||
user_email = await user_auth.get_user_email()
|
||||
|
||||
if not user_id:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail='User ID not found',
|
||||
)
|
||||
|
||||
if JIRA_DC_ENABLE_OAUTH:
|
||||
# OAuth flow enabled - create session and redirect to OAuth
|
||||
state = str(uuid.uuid4())
|
||||
@@ -399,10 +406,16 @@ async def create_jira_dc_workspace(
|
||||
async def create_workspace_link(request: Request, link_data: JiraDcLinkCreate):
|
||||
"""Register a user mapping to a Jira DC workspace."""
|
||||
try:
|
||||
user_auth: SaasUserAuth = await get_user_auth(request)
|
||||
user_auth = cast(SaasUserAuth, await get_user_auth(request))
|
||||
user_id = await user_auth.get_user_id()
|
||||
user_email = await user_auth.get_user_email()
|
||||
|
||||
if not user_id:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail='User ID not found',
|
||||
)
|
||||
|
||||
target_workspace = link_data.workspace_name
|
||||
|
||||
if JIRA_DC_ENABLE_OAUTH:
|
||||
@@ -589,9 +602,15 @@ async def jira_dc_callback(request: Request, code: str, state: str):
|
||||
async def get_current_workspace_link(request: Request):
|
||||
"""Get current user's Jira DC integration details."""
|
||||
try:
|
||||
user_auth: SaasUserAuth = await get_user_auth(request)
|
||||
user_auth = cast(SaasUserAuth, await get_user_auth(request))
|
||||
user_id = await user_auth.get_user_id()
|
||||
|
||||
if not user_id:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail='User ID not found',
|
||||
)
|
||||
|
||||
user = await jira_dc_manager.integration_store.get_user_by_active_workspace(
|
||||
user_id
|
||||
)
|
||||
@@ -641,9 +660,15 @@ async def get_current_workspace_link(request: Request):
|
||||
async def unlink_workspace(request: Request):
|
||||
"""Unlink user from Jira DC integration by setting status to inactive."""
|
||||
try:
|
||||
user_auth: SaasUserAuth = await get_user_auth(request)
|
||||
user_auth = cast(SaasUserAuth, await get_user_auth(request))
|
||||
user_id = await user_auth.get_user_id()
|
||||
|
||||
if not user_id:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail='User ID not found',
|
||||
)
|
||||
|
||||
user = await jira_dc_manager.integration_store.get_user_by_active_workspace(
|
||||
user_id
|
||||
)
|
||||
|
||||
@@ -2,6 +2,7 @@ import json
|
||||
import os
|
||||
import re
|
||||
import uuid
|
||||
from typing import cast
|
||||
|
||||
import requests
|
||||
from fastapi import APIRouter, BackgroundTasks, HTTPException, Request, status
|
||||
@@ -269,7 +270,7 @@ async def create_linear_workspace(
|
||||
):
|
||||
"""Create a new Linear workspace registration."""
|
||||
try:
|
||||
user_auth: SaasUserAuth = await get_user_auth(request)
|
||||
user_auth = cast(SaasUserAuth, await get_user_auth(request))
|
||||
user_id = await user_auth.get_user_id()
|
||||
user_email = await user_auth.get_user_email()
|
||||
|
||||
@@ -331,7 +332,7 @@ async def create_linear_workspace(
|
||||
async def create_workspace_link(request: Request, link_data: LinearLinkCreate):
|
||||
"""Register a user mapping to a Linear workspace."""
|
||||
try:
|
||||
user_auth: SaasUserAuth = await get_user_auth(request)
|
||||
user_auth = cast(SaasUserAuth, await get_user_auth(request))
|
||||
user_id = await user_auth.get_user_id()
|
||||
user_email = await user_auth.get_user_email()
|
||||
|
||||
@@ -520,8 +521,13 @@ async def linear_callback(request: Request, code: str, state: str):
|
||||
async def get_current_workspace_link(request: Request):
|
||||
"""Get current user's Linear integration details."""
|
||||
try:
|
||||
user_auth: SaasUserAuth = await get_user_auth(request)
|
||||
user_auth = cast(SaasUserAuth, await get_user_auth(request))
|
||||
user_id = await user_auth.get_user_id()
|
||||
if not user_id:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail='User not authenticated',
|
||||
)
|
||||
|
||||
user = await linear_manager.integration_store.get_user_by_active_workspace(
|
||||
user_id
|
||||
@@ -573,8 +579,13 @@ async def get_current_workspace_link(request: Request):
|
||||
async def unlink_workspace(request: Request):
|
||||
"""Unlink user from Linear integration by setting status to inactive."""
|
||||
try:
|
||||
user_auth: SaasUserAuth = await get_user_auth(request)
|
||||
user_auth = cast(SaasUserAuth, await get_user_auth(request))
|
||||
user_id = await user_auth.get_user_id()
|
||||
if not user_id:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail='User not authenticated',
|
||||
)
|
||||
|
||||
user = await linear_manager.integration_store.get_user_by_active_workspace(
|
||||
user_id
|
||||
@@ -629,7 +640,7 @@ async def validate_workspace_integration(request: Request, workspace_name: str):
|
||||
detail='workspace_name can only contain alphanumeric characters, hyphens, underscores, and periods',
|
||||
)
|
||||
|
||||
user_auth: SaasUserAuth = await get_user_auth(request)
|
||||
user_auth = cast(SaasUserAuth, await get_user_auth(request))
|
||||
user_email = await user_auth.get_user_email()
|
||||
if not user_email:
|
||||
raise HTTPException(
|
||||
|
||||
@@ -31,7 +31,8 @@ from server.logger import logger
|
||||
from slack_sdk.oauth import AuthorizeUrlGenerator
|
||||
from slack_sdk.signature import SignatureVerifier
|
||||
from slack_sdk.web.async_client import AsyncWebClient
|
||||
from storage.database import session_maker
|
||||
from sqlalchemy import delete
|
||||
from storage.database import a_session_maker
|
||||
from storage.slack_team_store import SlackTeamStore
|
||||
from storage.slack_user import SlackUser
|
||||
from storage.user_store import UserStore
|
||||
@@ -170,7 +171,7 @@ async def keycloak_callback(
|
||||
state, config.jwt_secret.get_secret_value(), algorithms=['HS256']
|
||||
)
|
||||
slack_user_id = payload['slack_user_id']
|
||||
bot_access_token = payload['bot_access_token']
|
||||
bot_access_token: str | None = payload['bot_access_token']
|
||||
team_id = payload['team_id']
|
||||
|
||||
# Retrieve the keycloak_user_id
|
||||
@@ -195,8 +196,8 @@ async def keycloak_callback(
|
||||
)
|
||||
|
||||
user_info = await token_manager.get_user_info(keycloak_access_token)
|
||||
keycloak_user_id = user_info['sub']
|
||||
user = await UserStore.get_user_by_id_async(keycloak_user_id)
|
||||
keycloak_user_id = user_info.sub
|
||||
user = await UserStore.get_user_by_id(keycloak_user_id)
|
||||
if not user:
|
||||
return _html_response(
|
||||
title='Failed to authenticate.',
|
||||
@@ -207,7 +208,7 @@ async def keycloak_callback(
|
||||
# These tokens are offline access tokens - store them!
|
||||
await token_manager.store_offline_token(keycloak_user_id, keycloak_refresh_token)
|
||||
|
||||
idp: str = user_info.get('identity_provider', ProviderType.GITHUB)
|
||||
idp: str = user_info.identity_provider or ProviderType.GITHUB.value
|
||||
idp_type = 'oidc'
|
||||
if ':' in idp:
|
||||
idp, idp_type = idp.rsplit(':', 1)
|
||||
@@ -218,9 +219,9 @@ async def keycloak_callback(
|
||||
|
||||
# Retrieve bot token
|
||||
if team_id and bot_access_token:
|
||||
slack_team_store.create_team(team_id, bot_access_token)
|
||||
await slack_team_store.create_team(team_id, bot_access_token)
|
||||
else:
|
||||
bot_access_token = slack_team_store.get_team_bot_token(team_id)
|
||||
bot_access_token = await slack_team_store.get_team_bot_token(team_id)
|
||||
|
||||
if not bot_access_token:
|
||||
logger.error(
|
||||
@@ -239,15 +240,15 @@ async def keycloak_callback(
|
||||
slack_display_name=slack_display_name,
|
||||
)
|
||||
|
||||
with session_maker(expire_on_commit=False) as session:
|
||||
async with a_session_maker(expire_on_commit=False) as session:
|
||||
# First delete any existing tokens
|
||||
session.query(SlackUser).filter(
|
||||
SlackUser.slack_user_id == slack_user_id
|
||||
).delete()
|
||||
await session.execute(
|
||||
delete(SlackUser).where(SlackUser.slack_user_id == slack_user_id)
|
||||
)
|
||||
|
||||
# Store the token
|
||||
session.add(slack_user)
|
||||
session.commit()
|
||||
await session.commit()
|
||||
|
||||
message = Message(source=SourceType.SLACK, message=payload)
|
||||
|
||||
|
||||
@@ -7,7 +7,6 @@ from fastapi import APIRouter, Depends, Form, HTTPException, Request, status
|
||||
from fastapi.responses import JSONResponse
|
||||
from pydantic import BaseModel
|
||||
from storage.api_key_store import ApiKeyStore
|
||||
from storage.database import session_maker
|
||||
from storage.device_code_store import DeviceCodeStore
|
||||
|
||||
from openhands.core.logger import openhands_logger as logger
|
||||
@@ -54,7 +53,7 @@ class DeviceTokenErrorResponse(BaseModel):
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
oauth_device_router = APIRouter(prefix='/oauth/device')
|
||||
device_code_store = DeviceCodeStore(session_maker)
|
||||
device_code_store = DeviceCodeStore()
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
@@ -90,7 +89,7 @@ async def device_authorization(
|
||||
) -> DeviceAuthorizationResponse:
|
||||
"""Start device flow by generating device and user codes."""
|
||||
try:
|
||||
device_code_entry = device_code_store.create_device_code(
|
||||
device_code_entry = await device_code_store.create_device_code(
|
||||
expires_in=DEVICE_CODE_EXPIRES_IN,
|
||||
)
|
||||
|
||||
@@ -125,7 +124,7 @@ async def device_authorization(
|
||||
async def device_token(device_code: str = Form(...)):
|
||||
"""Poll for a token until the user authorizes or the code expires."""
|
||||
try:
|
||||
device_code_entry = device_code_store.get_by_device_code(device_code)
|
||||
device_code_entry = await device_code_store.get_by_device_code(device_code)
|
||||
|
||||
if not device_code_entry:
|
||||
return _oauth_error(
|
||||
@@ -138,7 +137,9 @@ async def device_token(device_code: str = Form(...)):
|
||||
is_too_fast, current_interval = device_code_entry.check_rate_limit()
|
||||
if is_too_fast:
|
||||
# Update poll time and increase interval
|
||||
device_code_store.update_poll_time(device_code, increase_interval=True)
|
||||
await device_code_store.update_poll_time(
|
||||
device_code, increase_interval=True
|
||||
)
|
||||
logger.warning(
|
||||
'Client polling too fast, returning slow_down error',
|
||||
extra={
|
||||
@@ -154,7 +155,7 @@ async def device_token(device_code: str = Form(...)):
|
||||
)
|
||||
|
||||
# Update poll time for successful rate limit check
|
||||
device_code_store.update_poll_time(device_code, increase_interval=False)
|
||||
await device_code_store.update_poll_time(device_code, increase_interval=False)
|
||||
|
||||
if device_code_entry.is_expired():
|
||||
return _oauth_error(
|
||||
@@ -181,7 +182,7 @@ async def device_token(device_code: str = Form(...)):
|
||||
# Retrieve the specific API key for this device using the user_code
|
||||
api_key_store = ApiKeyStore.get_instance()
|
||||
device_key_name = f'{API_KEY_NAME} ({device_code_entry.user_code})'
|
||||
device_api_key = api_key_store.retrieve_api_key_by_name(
|
||||
device_api_key = await api_key_store.retrieve_api_key_by_name(
|
||||
device_code_entry.keycloak_user_id, device_key_name
|
||||
)
|
||||
|
||||
@@ -238,7 +239,7 @@ async def device_verification_authenticated(
|
||||
)
|
||||
|
||||
# Validate device code
|
||||
device_code_entry = device_code_store.get_by_user_code(user_code)
|
||||
device_code_entry = await device_code_store.get_by_user_code(user_code)
|
||||
if not device_code_entry:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
@@ -252,7 +253,7 @@ async def device_verification_authenticated(
|
||||
)
|
||||
|
||||
# First, authorize the device code
|
||||
success = device_code_store.authorize_device_code(
|
||||
success = await device_code_store.authorize_device_code(
|
||||
user_code=user_code,
|
||||
user_id=user_id,
|
||||
)
|
||||
@@ -289,7 +290,7 @@ async def device_verification_authenticated(
|
||||
# Clean up: revert the device authorization since API key creation failed
|
||||
# This prevents the device from being in an authorized state without an API key
|
||||
try:
|
||||
device_code_store.deny_device_code(user_code)
|
||||
await device_code_store.deny_device_code(user_code)
|
||||
logger.info(
|
||||
'Reverted device authorization due to API key creation failure',
|
||||
extra={'user_code': user_code, 'user_id': user_id},
|
||||
|
||||
@@ -76,7 +76,7 @@ class InvitationResponse(BaseModel):
|
||||
inviter_email: str | None = None
|
||||
|
||||
@classmethod
|
||||
def from_invitation(
|
||||
async def from_invitation(
|
||||
cls,
|
||||
invitation: OrgInvitation,
|
||||
inviter_email: str | None = None,
|
||||
@@ -94,7 +94,7 @@ class InvitationResponse(BaseModel):
|
||||
if invitation.role:
|
||||
role_name = invitation.role.name
|
||||
elif invitation.role_id:
|
||||
role = RoleStore.get_role_by_id(invitation.role_id)
|
||||
role = await RoleStore.get_role_by_id(invitation.role_id)
|
||||
role_name = role.name if role else ''
|
||||
|
||||
return cls(
|
||||
|
||||
@@ -91,8 +91,11 @@ async def create_invitation(
|
||||
},
|
||||
)
|
||||
|
||||
successful_responses = [
|
||||
await InvitationResponse.from_invitation(inv) for inv in successful
|
||||
]
|
||||
return BatchInvitationResponse(
|
||||
successful=[InvitationResponse.from_invitation(inv) for inv in successful],
|
||||
successful=successful_responses,
|
||||
failed=[
|
||||
InvitationFailure(email=email, error=error) for email, error in failed
|
||||
],
|
||||
|
||||
@@ -1,6 +1,13 @@
|
||||
from typing import Annotated
|
||||
|
||||
from pydantic import BaseModel, EmailStr, Field, SecretStr, StringConstraints
|
||||
from pydantic import (
|
||||
BaseModel,
|
||||
EmailStr,
|
||||
Field,
|
||||
SecretStr,
|
||||
StringConstraints,
|
||||
field_validator,
|
||||
)
|
||||
from storage.org import Org
|
||||
from storage.org_member import OrgMember
|
||||
from storage.role import Role
|
||||
@@ -252,6 +259,115 @@ class OrgUpdate(BaseModel):
|
||||
condenser_max_size: int | None = Field(default=None, ge=20)
|
||||
|
||||
|
||||
class OrgLLMSettingsResponse(BaseModel):
|
||||
"""Response model for organization LLM settings."""
|
||||
|
||||
default_llm_model: str | None = None
|
||||
default_llm_base_url: str | None = None
|
||||
search_api_key: str | None = None # Masked in response
|
||||
agent: str | None = None
|
||||
confirmation_mode: bool | None = None
|
||||
security_analyzer: str | None = None
|
||||
enable_default_condenser: bool = True
|
||||
condenser_max_size: int | None = None
|
||||
default_max_iterations: int | None = None
|
||||
|
||||
@staticmethod
|
||||
def _mask_key(secret: SecretStr | None) -> str | None:
|
||||
"""Mask an API key, showing only last 4 characters."""
|
||||
if secret is None:
|
||||
return None
|
||||
raw = secret.get_secret_value()
|
||||
if not raw:
|
||||
return None
|
||||
if len(raw) <= 4:
|
||||
return '****'
|
||||
return '****' + raw[-4:]
|
||||
|
||||
@classmethod
|
||||
def from_org(cls, org: Org) -> 'OrgLLMSettingsResponse':
|
||||
"""Create response from Org entity."""
|
||||
return cls(
|
||||
default_llm_model=org.default_llm_model,
|
||||
default_llm_base_url=org.default_llm_base_url,
|
||||
search_api_key=cls._mask_key(org.search_api_key),
|
||||
agent=org.agent,
|
||||
confirmation_mode=org.confirmation_mode,
|
||||
security_analyzer=org.security_analyzer,
|
||||
enable_default_condenser=org.enable_default_condenser
|
||||
if org.enable_default_condenser is not None
|
||||
else True,
|
||||
condenser_max_size=org.condenser_max_size,
|
||||
default_max_iterations=org.default_max_iterations,
|
||||
)
|
||||
|
||||
|
||||
class OrgMemberLLMSettings(BaseModel):
|
||||
"""LLM settings to propagate to organization members.
|
||||
|
||||
Field names match OrgMember DB columns.
|
||||
"""
|
||||
|
||||
llm_model: str | None = None
|
||||
llm_base_url: str | None = None
|
||||
max_iterations: int | None = None
|
||||
llm_api_key: str | None = None
|
||||
|
||||
def has_updates(self) -> bool:
|
||||
"""Check if any field is set (not None)."""
|
||||
return any(getattr(self, field) is not None for field in self.model_fields)
|
||||
|
||||
|
||||
class OrgLLMSettingsUpdate(BaseModel):
|
||||
"""Request model for updating organization LLM settings.
|
||||
|
||||
Field names match Org DB columns exactly.
|
||||
"""
|
||||
|
||||
default_llm_model: str | None = None
|
||||
default_llm_base_url: str | None = None
|
||||
search_api_key: str | None = None
|
||||
agent: str | None = None
|
||||
confirmation_mode: bool | None = None
|
||||
security_analyzer: str | None = None
|
||||
enable_default_condenser: bool | None = None
|
||||
condenser_max_size: int | None = Field(default=None, ge=20)
|
||||
default_max_iterations: int | None = Field(default=None, gt=0)
|
||||
llm_api_key: str | None = None
|
||||
|
||||
def has_updates(self) -> bool:
|
||||
"""Check if any field is set (not None)."""
|
||||
return any(getattr(self, field) is not None for field in self.model_fields)
|
||||
|
||||
def apply_to_org(self, org: Org) -> None:
|
||||
"""Apply non-None settings to the organization model.
|
||||
|
||||
Args:
|
||||
org: Organization entity to update in place
|
||||
"""
|
||||
for field_name in self.model_fields:
|
||||
value = getattr(self, field_name)
|
||||
# Skip llm_api_key - it's only for member propagation, not org-level
|
||||
if value is not None and field_name != 'llm_api_key':
|
||||
setattr(org, field_name, value)
|
||||
|
||||
def get_member_updates(self) -> OrgMemberLLMSettings | None:
|
||||
"""Get updates that need to be propagated to org members.
|
||||
|
||||
Returns:
|
||||
OrgMemberLLMSettings with mapped field values, or None if no member updates needed.
|
||||
Maps: default_llm_model → llm_model, default_llm_base_url → llm_base_url,
|
||||
default_max_iterations → max_iterations, llm_api_key → llm_api_key
|
||||
"""
|
||||
member_settings = OrgMemberLLMSettings(
|
||||
llm_model=self.default_llm_model,
|
||||
llm_base_url=self.default_llm_base_url,
|
||||
max_iterations=self.default_max_iterations,
|
||||
llm_api_key=self.llm_api_key,
|
||||
)
|
||||
return member_settings if member_settings.has_updates() else None
|
||||
|
||||
|
||||
class OrgMemberResponse(BaseModel):
|
||||
"""Response model for a single organization member."""
|
||||
|
||||
@@ -327,3 +443,44 @@ class MeResponse(BaseModel):
|
||||
llm_base_url=member.llm_base_url,
|
||||
status=member.status,
|
||||
)
|
||||
|
||||
|
||||
class OrgAppSettingsResponse(BaseModel):
|
||||
"""Response model for organization app settings."""
|
||||
|
||||
enable_proactive_conversation_starters: bool = True
|
||||
enable_solvability_analysis: bool | None = None
|
||||
max_budget_per_task: float | None = None
|
||||
|
||||
@classmethod
|
||||
def from_org(cls, org: Org) -> 'OrgAppSettingsResponse':
|
||||
"""Create an OrgAppSettingsResponse from an Org entity.
|
||||
|
||||
Args:
|
||||
org: The organization entity
|
||||
|
||||
Returns:
|
||||
OrgAppSettingsResponse with app settings
|
||||
"""
|
||||
return cls(
|
||||
enable_proactive_conversation_starters=org.enable_proactive_conversation_starters
|
||||
if org.enable_proactive_conversation_starters is not None
|
||||
else True,
|
||||
enable_solvability_analysis=org.enable_solvability_analysis,
|
||||
max_budget_per_task=org.max_budget_per_task,
|
||||
)
|
||||
|
||||
|
||||
class OrgAppSettingsUpdate(BaseModel):
|
||||
"""Request model for updating organization app settings."""
|
||||
|
||||
enable_proactive_conversation_starters: bool | None = None
|
||||
enable_solvability_analysis: bool | None = None
|
||||
max_budget_per_task: float | None = None
|
||||
|
||||
@field_validator('max_budget_per_task')
|
||||
@classmethod
|
||||
def validate_max_budget_per_task(cls, v: float | None) -> float | None:
|
||||
if v is not None and v <= 0:
|
||||
raise ValueError('max_budget_per_task must be greater than 0')
|
||||
return v
|
||||
|
||||
@@ -15,9 +15,13 @@ from server.routes.org_models import (
|
||||
LiteLLMIntegrationError,
|
||||
MemberUpdateError,
|
||||
MeResponse,
|
||||
OrgAppSettingsResponse,
|
||||
OrgAppSettingsUpdate,
|
||||
OrgAuthorizationError,
|
||||
OrgCreate,
|
||||
OrgDatabaseError,
|
||||
OrgLLMSettingsResponse,
|
||||
OrgLLMSettingsUpdate,
|
||||
OrgMemberNotFoundError,
|
||||
OrgMemberPage,
|
||||
OrgMemberResponse,
|
||||
@@ -30,6 +34,14 @@ from server.routes.org_models import (
|
||||
OrphanedUserError,
|
||||
RoleNotFoundError,
|
||||
)
|
||||
from server.services.org_app_settings_service import (
|
||||
OrgAppSettingsService,
|
||||
OrgAppSettingsServiceInjector,
|
||||
)
|
||||
from server.services.org_llm_settings_service import (
|
||||
OrgLLMSettingsService,
|
||||
OrgLLMSettingsServiceInjector,
|
||||
)
|
||||
from server.services.org_member_service import OrgMemberService
|
||||
from storage.org_service import OrgService
|
||||
from storage.user_store import UserStore
|
||||
@@ -40,6 +52,13 @@ from openhands.server.user_auth import get_user_id
|
||||
# Initialize API router
|
||||
org_router = APIRouter(prefix='/api/organizations', tags=['Orgs'])
|
||||
|
||||
# Create injector instance and dependency for LLM settings
|
||||
_org_llm_settings_injector = OrgLLMSettingsServiceInjector()
|
||||
org_llm_settings_service_dependency = Depends(_org_llm_settings_injector.depends)
|
||||
# Create injector instance and dependency at module level
|
||||
_org_app_settings_injector = OrgAppSettingsServiceInjector()
|
||||
org_app_settings_service_dependency = Depends(_org_app_settings_injector.depends)
|
||||
|
||||
|
||||
@org_router.get('', response_model=OrgPage)
|
||||
async def list_user_orgs(
|
||||
@@ -80,13 +99,13 @@ async def list_user_orgs(
|
||||
|
||||
try:
|
||||
# Fetch user to get current_org_id
|
||||
user = await UserStore.get_user_by_id_async(user_id)
|
||||
user = await UserStore.get_user_by_id(user_id)
|
||||
current_org_id = (
|
||||
str(user.current_org_id) if user and user.current_org_id else None
|
||||
)
|
||||
|
||||
# Fetch organizations from service layer
|
||||
orgs, next_page_id = OrgService.get_user_orgs_paginated(
|
||||
orgs, next_page_id = await OrgService.get_user_orgs_paginated(
|
||||
user_id=user_id,
|
||||
page_id=page_id,
|
||||
limit=limit,
|
||||
@@ -201,6 +220,195 @@ async def create_org(
|
||||
)
|
||||
|
||||
|
||||
@org_router.get(
|
||||
'/llm',
|
||||
response_model=OrgLLMSettingsResponse,
|
||||
dependencies=[Depends(require_permission(Permission.VIEW_LLM_SETTINGS))],
|
||||
)
|
||||
async def get_org_llm_settings(
|
||||
service: OrgLLMSettingsService = org_llm_settings_service_dependency,
|
||||
) -> OrgLLMSettingsResponse:
|
||||
"""Get LLM settings for the user's current organization.
|
||||
|
||||
This endpoint retrieves the LLM configuration settings for the
|
||||
authenticated user's current organization. All organization members
|
||||
can view these settings.
|
||||
|
||||
Args:
|
||||
service: OrgLLMSettingsService (injected by dependency)
|
||||
|
||||
Returns:
|
||||
OrgLLMSettingsResponse: The organization's LLM settings
|
||||
|
||||
Raises:
|
||||
HTTPException: 401 if not authenticated
|
||||
HTTPException: 403 if not a member of any organization
|
||||
HTTPException: 404 if current organization not found
|
||||
HTTPException: 500 if retrieval fails
|
||||
"""
|
||||
try:
|
||||
return await service.get_org_llm_settings()
|
||||
except OrgNotFoundError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=str(e),
|
||||
)
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
'Error getting organization LLM settings',
|
||||
extra={'error': str(e)},
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail='Failed to retrieve LLM settings',
|
||||
)
|
||||
|
||||
|
||||
@org_router.post(
|
||||
'/llm',
|
||||
response_model=OrgLLMSettingsResponse,
|
||||
dependencies=[Depends(require_permission(Permission.EDIT_LLM_SETTINGS))],
|
||||
)
|
||||
async def update_org_llm_settings(
|
||||
settings: OrgLLMSettingsUpdate,
|
||||
service: OrgLLMSettingsService = org_llm_settings_service_dependency,
|
||||
) -> OrgLLMSettingsResponse:
|
||||
"""Update LLM settings for the user's current organization.
|
||||
|
||||
This endpoint updates the LLM configuration settings for the
|
||||
authenticated user's current organization. Only admins and owners
|
||||
can update these settings.
|
||||
|
||||
Args:
|
||||
settings: The LLM settings to update (only non-None fields are updated)
|
||||
service: OrgLLMSettingsService (injected by dependency)
|
||||
|
||||
Returns:
|
||||
OrgLLMSettingsResponse: The updated organization's LLM settings
|
||||
|
||||
Raises:
|
||||
HTTPException: 401 if not authenticated
|
||||
HTTPException: 403 if user lacks EDIT_LLM_SETTINGS permission
|
||||
HTTPException: 404 if current organization not found
|
||||
HTTPException: 500 if update fails
|
||||
"""
|
||||
try:
|
||||
return await service.update_org_llm_settings(settings)
|
||||
except OrgNotFoundError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=str(e),
|
||||
)
|
||||
except OrgDatabaseError as e:
|
||||
logger.error(
|
||||
'Database error updating LLM settings',
|
||||
extra={'error': str(e)},
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail='Failed to update LLM settings',
|
||||
)
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
'Error updating organization LLM settings',
|
||||
extra={'error': str(e)},
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail='Failed to update LLM settings',
|
||||
)
|
||||
|
||||
|
||||
@org_router.get(
|
||||
'/app',
|
||||
response_model=OrgAppSettingsResponse,
|
||||
dependencies=[Depends(require_permission(Permission.MANAGE_APPLICATION_SETTINGS))],
|
||||
)
|
||||
async def get_org_app_settings(
|
||||
service: OrgAppSettingsService = org_app_settings_service_dependency,
|
||||
) -> OrgAppSettingsResponse:
|
||||
"""Get organization app settings for the user's current organization.
|
||||
|
||||
This endpoint retrieves application settings for the authenticated user's
|
||||
current organization. Access requires the MANAGE_APPLICATION_SETTINGS permission,
|
||||
which is granted to all organization members (member, admin, and owner roles).
|
||||
|
||||
Args:
|
||||
service: OrgAppSettingsService (injected by dependency)
|
||||
|
||||
Returns:
|
||||
OrgAppSettingsResponse: The organization app settings
|
||||
|
||||
Raises:
|
||||
HTTPException: 401 if user is not authenticated
|
||||
HTTPException: 403 if user lacks MANAGE_APPLICATION_SETTINGS permission
|
||||
HTTPException: 404 if current organization not found
|
||||
"""
|
||||
try:
|
||||
return await service.get_org_app_settings()
|
||||
except OrgNotFoundError:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail='Current organization not found',
|
||||
)
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
'Unexpected error retrieving organization app settings',
|
||||
extra={'error': str(e)},
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail='An unexpected error occurred',
|
||||
)
|
||||
|
||||
|
||||
@org_router.post(
|
||||
'/app',
|
||||
response_model=OrgAppSettingsResponse,
|
||||
dependencies=[Depends(require_permission(Permission.MANAGE_APPLICATION_SETTINGS))],
|
||||
)
|
||||
async def update_org_app_settings(
|
||||
update_data: OrgAppSettingsUpdate,
|
||||
service: OrgAppSettingsService = org_app_settings_service_dependency,
|
||||
) -> OrgAppSettingsResponse:
|
||||
"""Update organization app settings for the user's current organization.
|
||||
|
||||
This endpoint updates application settings for the authenticated user's
|
||||
current organization. Access requires the MANAGE_APPLICATION_SETTINGS permission,
|
||||
which is granted to all organization members (member, admin, and owner roles).
|
||||
|
||||
Args:
|
||||
update_data: App settings update data
|
||||
service: OrgAppSettingsService (injected by dependency)
|
||||
|
||||
Returns:
|
||||
OrgAppSettingsResponse: The updated organization app settings
|
||||
|
||||
Raises:
|
||||
HTTPException: 401 if user is not authenticated
|
||||
HTTPException: 403 if user lacks MANAGE_APPLICATION_SETTINGS permission
|
||||
HTTPException: 404 if current organization not found
|
||||
HTTPException: 422 if validation errors occur (handled by FastAPI)
|
||||
HTTPException: 500 if update fails
|
||||
"""
|
||||
try:
|
||||
return await service.update_org_app_settings(update_data)
|
||||
except OrgNotFoundError:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail='Current organization not found',
|
||||
)
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
'Unexpected error updating organization app settings',
|
||||
extra={'error': str(e)},
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail='An unexpected error occurred',
|
||||
)
|
||||
|
||||
|
||||
@org_router.get('/{org_id}', response_model=OrgResponse, status_code=status.HTTP_200_OK)
|
||||
async def get_org(
|
||||
org_id: UUID,
|
||||
@@ -289,7 +497,7 @@ async def get_me(
|
||||
|
||||
try:
|
||||
user_uuid = UUID(user_id)
|
||||
return OrgMemberService.get_me(org_id, user_uuid)
|
||||
return await OrgMemberService.get_me(org_id, user_uuid)
|
||||
|
||||
except OrgMemberNotFoundError:
|
||||
raise HTTPException(
|
||||
@@ -573,7 +781,7 @@ async def get_org_members(
|
||||
)
|
||||
|
||||
if not success:
|
||||
error_map = {
|
||||
error_map: dict[str | None, tuple[int, str]] = {
|
||||
'not_a_member': (
|
||||
status.HTTP_403_FORBIDDEN,
|
||||
'You are not a member of this organization',
|
||||
@@ -582,9 +790,14 @@ async def get_org_members(
|
||||
status.HTTP_400_BAD_REQUEST,
|
||||
'Invalid page_id format',
|
||||
),
|
||||
None: (
|
||||
status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
'An error occurred',
|
||||
),
|
||||
}
|
||||
status_code, detail = error_map.get(
|
||||
error_code, (status.HTTP_500_INTERNAL_SERVER_ERROR, 'An error occurred')
|
||||
error_code,
|
||||
(status.HTTP_500_INTERNAL_SERVER_ERROR, 'An error occurred'),
|
||||
)
|
||||
raise HTTPException(status_code=status_code, detail=detail)
|
||||
|
||||
@@ -692,7 +905,7 @@ async def remove_org_member(
|
||||
)
|
||||
|
||||
if not success:
|
||||
error_map = {
|
||||
error_map: dict[str | None, tuple[int, str]] = {
|
||||
'not_a_member': (
|
||||
status.HTTP_403_FORBIDDEN,
|
||||
'You are not a member of this organization',
|
||||
@@ -717,9 +930,14 @@ async def remove_org_member(
|
||||
status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
'Failed to remove member',
|
||||
),
|
||||
None: (
|
||||
status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
'An error occurred',
|
||||
),
|
||||
}
|
||||
status_code, detail = error_map.get(
|
||||
error, (status.HTTP_500_INTERNAL_SERVER_ERROR, 'An error occurred')
|
||||
error,
|
||||
(status.HTTP_500_INTERNAL_SERVER_ERROR, 'An error occurred'),
|
||||
)
|
||||
raise HTTPException(status_code=status_code, detail=detail)
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from fastapi import APIRouter, HTTPException, status
|
||||
from sqlalchemy.sql import text
|
||||
from storage.database import session_maker
|
||||
from storage.database import a_session_maker
|
||||
from storage.redis import create_redis_client
|
||||
|
||||
from openhands.core.logger import openhands_logger as logger
|
||||
@@ -9,11 +9,11 @@ readiness_router = APIRouter()
|
||||
|
||||
|
||||
@readiness_router.get('/ready')
|
||||
def is_ready():
|
||||
async def is_ready():
|
||||
# Check database connection
|
||||
try:
|
||||
with session_maker() as session:
|
||||
session.execute(text('SELECT 1'))
|
||||
async with a_session_maker() as session:
|
||||
await session.execute(text('SELECT 1'))
|
||||
except Exception as e:
|
||||
logger.error(f'Database check failed: {str(e)}')
|
||||
raise HTTPException(
|
||||
|
||||
@@ -111,30 +111,26 @@ async def saas_get_user(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
)
|
||||
user_info = await token_manager.get_user_info(access_token.get_secret_value())
|
||||
if not user_info:
|
||||
return JSONResponse(
|
||||
content='Failed to retrieve user_info.',
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
)
|
||||
# Prefer email from DB; fall back to Keycloak if not yet persisted
|
||||
email = user_info.get('email') if user_info else None
|
||||
sub = user_info.get('sub') if user_info else ''
|
||||
email = user_info.email
|
||||
sub = user_info.sub
|
||||
if sub:
|
||||
db_user = await UserStore.get_user_by_id_async(sub)
|
||||
db_user = await UserStore.get_user_by_id(sub)
|
||||
if db_user and db_user.email is not None:
|
||||
email = db_user.email
|
||||
|
||||
user_info_dict = user_info.model_dump(exclude_none=True)
|
||||
retval = await _check_idp(
|
||||
access_token=access_token,
|
||||
default_value=User(
|
||||
id=sub,
|
||||
login=(user_info.get('preferred_username') if user_info else '') or '',
|
||||
login=user_info.preferred_username or '',
|
||||
avatar_url='',
|
||||
email=email,
|
||||
name=resolve_display_name(user_info) if user_info else None,
|
||||
company=user_info.get('company') if user_info else None,
|
||||
name=resolve_display_name(user_info_dict),
|
||||
company=user_info.company,
|
||||
),
|
||||
user_info=user_info,
|
||||
user_info=user_info_dict,
|
||||
)
|
||||
if retval is not None:
|
||||
return retval
|
||||
@@ -364,16 +360,11 @@ async def _check_idp(
|
||||
content='User is not authenticated.',
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
)
|
||||
user_info = (
|
||||
user_info
|
||||
if user_info
|
||||
else await token_manager.get_user_info(access_token.get_secret_value())
|
||||
)
|
||||
if not user_info:
|
||||
return JSONResponse(
|
||||
content='Failed to retrieve user_info.',
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
if user_info is None:
|
||||
user_info_model = await token_manager.get_user_info(
|
||||
access_token.get_secret_value()
|
||||
)
|
||||
user_info = user_info_model.model_dump(exclude_none=True)
|
||||
idp: str | None = user_info.get('identity_provider')
|
||||
if not idp:
|
||||
return JSONResponse(
|
||||
@@ -388,5 +379,4 @@ async def _check_idp(
|
||||
access_token.get_secret_value(), ProviderType(idp)
|
||||
):
|
||||
return default_value
|
||||
|
||||
return None
|
||||
|
||||
115
enterprise/server/routes/user_app_settings.py
Normal file
115
enterprise/server/routes/user_app_settings.py
Normal file
@@ -0,0 +1,115 @@
|
||||
"""Routes for user app settings API.
|
||||
|
||||
Provides endpoints for managing user-level app preferences:
|
||||
- GET /api/users/app - Retrieve current user's app settings
|
||||
- POST /api/users/app - Update current user's app settings
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from server.routes.user_app_settings_models import (
|
||||
UserAppSettingsResponse,
|
||||
UserAppSettingsUpdate,
|
||||
UserNotFoundError,
|
||||
)
|
||||
from server.services.user_app_settings_service import (
|
||||
UserAppSettingsService,
|
||||
UserAppSettingsServiceInjector,
|
||||
)
|
||||
|
||||
from openhands.core.logger import openhands_logger as logger
|
||||
|
||||
user_app_settings_router = APIRouter(prefix='/api/users')
|
||||
|
||||
# Create injector instance and dependency at module level
|
||||
_injector = UserAppSettingsServiceInjector()
|
||||
user_app_settings_service_dependency = Depends(_injector.depends)
|
||||
|
||||
|
||||
@user_app_settings_router.get('/app', response_model=UserAppSettingsResponse)
|
||||
async def get_user_app_settings(
|
||||
service: UserAppSettingsService = user_app_settings_service_dependency,
|
||||
) -> UserAppSettingsResponse:
|
||||
"""Get the current user's app settings.
|
||||
|
||||
Returns language, analytics consent, sound notifications, and git config.
|
||||
|
||||
Args:
|
||||
service: UserAppSettingsService (injected by dependency)
|
||||
|
||||
Returns:
|
||||
UserAppSettingsResponse: The user's app settings
|
||||
|
||||
Raises:
|
||||
HTTPException: 401 if user is not authenticated
|
||||
HTTPException: 404 if user not found
|
||||
HTTPException: 500 if retrieval fails
|
||||
"""
|
||||
try:
|
||||
return await service.get_user_app_settings()
|
||||
|
||||
except ValueError as e:
|
||||
# User not authenticated
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=str(e),
|
||||
)
|
||||
except UserNotFoundError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=str(e),
|
||||
)
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
'Unexpected error retrieving user app settings',
|
||||
extra={'error': str(e)},
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail='Failed to retrieve user app settings',
|
||||
)
|
||||
|
||||
|
||||
@user_app_settings_router.post('/app', response_model=UserAppSettingsResponse)
|
||||
async def update_user_app_settings(
|
||||
update_data: UserAppSettingsUpdate,
|
||||
service: UserAppSettingsService = user_app_settings_service_dependency,
|
||||
) -> UserAppSettingsResponse:
|
||||
"""Update the current user's app settings (partial update).
|
||||
|
||||
Only provided fields will be updated. Pass null to clear a field.
|
||||
|
||||
Args:
|
||||
update_data: Fields to update
|
||||
service: UserAppSettingsService (injected by dependency)
|
||||
|
||||
Returns:
|
||||
UserAppSettingsResponse: The updated user's app settings
|
||||
|
||||
Raises:
|
||||
HTTPException: 401 if user is not authenticated
|
||||
HTTPException: 404 if user not found
|
||||
HTTPException: 500 if update fails
|
||||
"""
|
||||
try:
|
||||
return await service.update_user_app_settings(update_data)
|
||||
|
||||
except ValueError as e:
|
||||
# User not authenticated
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=str(e),
|
||||
)
|
||||
except UserNotFoundError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=str(e),
|
||||
)
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
'Failed to update user app settings',
|
||||
extra={'error': str(e)},
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail='Failed to update user app settings',
|
||||
)
|
||||
57
enterprise/server/routes/user_app_settings_models.py
Normal file
57
enterprise/server/routes/user_app_settings_models.py
Normal file
@@ -0,0 +1,57 @@
|
||||
"""
|
||||
Pydantic models for user app settings API.
|
||||
"""
|
||||
|
||||
from pydantic import BaseModel, EmailStr
|
||||
from storage.user import User
|
||||
|
||||
|
||||
class UserAppSettingsError(Exception):
|
||||
"""Base exception for user app settings errors."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class UserNotFoundError(UserAppSettingsError):
|
||||
"""Raised when user is not found."""
|
||||
|
||||
def __init__(self, user_id: str):
|
||||
self.user_id = user_id
|
||||
super().__init__(f'User with id "{user_id}" not found')
|
||||
|
||||
|
||||
class UserAppSettingsUpdateError(UserAppSettingsError):
|
||||
"""Raised when user app settings update fails."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class UserAppSettingsResponse(BaseModel):
|
||||
"""Response model for user app settings."""
|
||||
|
||||
language: str | None = None
|
||||
user_consents_to_analytics: bool | None = None
|
||||
enable_sound_notifications: bool | None = None
|
||||
git_user_name: str | None = None
|
||||
git_user_email: EmailStr | None = None
|
||||
|
||||
@classmethod
|
||||
def from_user(cls, user: User) -> 'UserAppSettingsResponse':
|
||||
"""Create response from User entity."""
|
||||
return cls(
|
||||
language=user.language,
|
||||
user_consents_to_analytics=user.user_consents_to_analytics,
|
||||
enable_sound_notifications=user.enable_sound_notifications,
|
||||
git_user_name=user.git_user_name,
|
||||
git_user_email=user.git_user_email,
|
||||
)
|
||||
|
||||
|
||||
class UserAppSettingsUpdate(BaseModel):
|
||||
"""Request model for updating user app settings (partial update)."""
|
||||
|
||||
language: str | None = None
|
||||
user_consents_to_analytics: bool | None = None
|
||||
enable_sound_notifications: bool | None = None
|
||||
git_user_name: str | None = None
|
||||
git_user_email: EmailStr | None = None
|
||||
@@ -1,184 +0,0 @@
|
||||
"""API routes for managing verified LLM models (admin only)."""
|
||||
|
||||
from typing import Annotated
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
||||
from pydantic import BaseModel, field_validator
|
||||
from server.email_validation import get_admin_user_id
|
||||
from storage.verified_model_store import VerifiedModelStore
|
||||
|
||||
from openhands.core.logger import openhands_logger as logger
|
||||
|
||||
api_router = APIRouter(prefix='/api/admin/verified-models', tags=['Verified Models'])
|
||||
|
||||
|
||||
class VerifiedModelCreate(BaseModel):
|
||||
model_name: str
|
||||
provider: str
|
||||
is_enabled: bool = True
|
||||
|
||||
@field_validator('model_name')
|
||||
@classmethod
|
||||
def validate_model_name(cls, v: str) -> str:
|
||||
v = v.strip()
|
||||
if not v or len(v) > 255:
|
||||
raise ValueError('model_name must be 1-255 characters')
|
||||
return v
|
||||
|
||||
@field_validator('provider')
|
||||
@classmethod
|
||||
def validate_provider(cls, v: str) -> str:
|
||||
v = v.strip()
|
||||
if not v or len(v) > 100:
|
||||
raise ValueError('provider must be 1-100 characters')
|
||||
return v
|
||||
|
||||
|
||||
class VerifiedModelUpdate(BaseModel):
|
||||
is_enabled: bool | None = None
|
||||
|
||||
|
||||
class VerifiedModelResponse(BaseModel):
|
||||
id: int
|
||||
model_name: str
|
||||
provider: str
|
||||
is_enabled: bool
|
||||
|
||||
|
||||
class VerifiedModelPage(BaseModel):
|
||||
"""Paginated response model for verified model list."""
|
||||
|
||||
items: list[VerifiedModelResponse]
|
||||
next_page_id: str | None = None
|
||||
|
||||
|
||||
def _to_response(model) -> VerifiedModelResponse:
|
||||
return VerifiedModelResponse(
|
||||
id=model.id,
|
||||
model_name=model.model_name,
|
||||
provider=model.provider,
|
||||
is_enabled=model.is_enabled,
|
||||
)
|
||||
|
||||
|
||||
@api_router.get('', response_model=VerifiedModelPage)
|
||||
async def list_verified_models(
|
||||
provider: str | None = None,
|
||||
page_id: Annotated[
|
||||
str | None,
|
||||
Query(title='Optional next_page_id from the previously returned page'),
|
||||
] = None,
|
||||
limit: Annotated[
|
||||
int, Query(title='The max number of results in the page', gt=0, le=100)
|
||||
] = 100,
|
||||
user_id: str = Depends(get_admin_user_id),
|
||||
):
|
||||
"""List all verified models, optionally filtered by provider."""
|
||||
try:
|
||||
if provider:
|
||||
all_models = VerifiedModelStore.get_models_by_provider(provider)
|
||||
else:
|
||||
all_models = VerifiedModelStore.get_all_models()
|
||||
|
||||
try:
|
||||
offset = int(page_id) if page_id else 0
|
||||
except ValueError:
|
||||
offset = 0
|
||||
page = all_models[offset : offset + limit + 1]
|
||||
has_more = len(page) > limit
|
||||
if has_more:
|
||||
page = page[:limit]
|
||||
|
||||
return VerifiedModelPage(
|
||||
items=[_to_response(m) for m in page],
|
||||
next_page_id=str(offset + limit) if has_more else None,
|
||||
)
|
||||
except Exception:
|
||||
logger.exception('Error listing verified models')
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail='Failed to list verified models',
|
||||
)
|
||||
|
||||
|
||||
@api_router.post('', response_model=VerifiedModelResponse, status_code=201)
|
||||
async def create_verified_model(
|
||||
data: VerifiedModelCreate,
|
||||
user_id: str = Depends(get_admin_user_id),
|
||||
):
|
||||
"""Create a new verified model."""
|
||||
try:
|
||||
model = VerifiedModelStore.create_model(
|
||||
model_name=data.model_name,
|
||||
provider=data.provider,
|
||||
is_enabled=data.is_enabled,
|
||||
)
|
||||
return _to_response(model)
|
||||
except ValueError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
detail=str(e),
|
||||
)
|
||||
except Exception:
|
||||
logger.exception('Error creating verified model')
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail='Failed to create verified model',
|
||||
)
|
||||
|
||||
|
||||
@api_router.put('/{provider}/{model_name:path}', response_model=VerifiedModelResponse)
|
||||
async def update_verified_model(
|
||||
provider: str,
|
||||
model_name: str,
|
||||
data: VerifiedModelUpdate,
|
||||
user_id: str = Depends(get_admin_user_id),
|
||||
):
|
||||
"""Update a verified model by provider and model name."""
|
||||
try:
|
||||
model = VerifiedModelStore.update_model(
|
||||
model_name=model_name,
|
||||
provider=provider,
|
||||
is_enabled=data.is_enabled,
|
||||
)
|
||||
if not model:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f'Model {provider}/{model_name} not found',
|
||||
)
|
||||
return _to_response(model)
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception:
|
||||
logger.exception(f'Error updating verified model: {provider}/{model_name}')
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail='Failed to update verified model',
|
||||
)
|
||||
|
||||
|
||||
@api_router.delete('/{provider}/{model_name:path}')
|
||||
async def delete_verified_model(
|
||||
provider: str,
|
||||
model_name: str,
|
||||
user_id: str = Depends(get_admin_user_id),
|
||||
):
|
||||
"""Delete a verified model by provider and model name."""
|
||||
try:
|
||||
success = VerifiedModelStore.delete_model(
|
||||
model_name=model_name, provider=provider
|
||||
)
|
||||
if not success:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f'Model {provider}/{model_name} not found',
|
||||
)
|
||||
return {'message': f'Model {provider}/{model_name} deleted'}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception:
|
||||
logger.exception(f'Error deleting verified model: {provider}/{model_name}')
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail='Failed to delete verified model',
|
||||
)
|
||||
@@ -19,9 +19,9 @@ from server.utils.conversation_callback_utils import (
|
||||
process_event,
|
||||
update_conversation_metadata,
|
||||
)
|
||||
from sqlalchemy import orm
|
||||
from sqlalchemy import select
|
||||
from storage.api_key_store import ApiKeyStore
|
||||
from storage.database import session_maker
|
||||
from storage.database import a_session_maker
|
||||
from storage.stored_conversation_metadata import StoredConversationMetadata
|
||||
from storage.stored_conversation_metadata_saas import StoredConversationMetadataSaas
|
||||
|
||||
@@ -59,7 +59,6 @@ from openhands.storage.locations import (
|
||||
get_conversation_event_filename,
|
||||
get_conversation_events_dir,
|
||||
)
|
||||
from openhands.utils.async_utils import call_sync_from_async
|
||||
from openhands.utils.http_session import httpx_verify_option
|
||||
from openhands.utils.import_utils import get_impl
|
||||
from openhands.utils.shutdown_listener import should_continue
|
||||
@@ -166,8 +165,8 @@ class SaasNestedConversationManager(ConversationManager):
|
||||
}
|
||||
|
||||
if user_id:
|
||||
user_conversation_ids = await call_sync_from_async(
|
||||
self._get_recent_conversation_ids_for_user, user_id
|
||||
user_conversation_ids = await self._get_recent_conversation_ids_for_user(
|
||||
user_id
|
||||
)
|
||||
conversation_ids = conversation_ids.intersection(user_conversation_ids)
|
||||
|
||||
@@ -392,39 +391,11 @@ class SaasNestedConversationManager(ConversationManager):
|
||||
await self._setup_nested_settings(client, api_url, settings)
|
||||
await self._setup_provider_tokens(client, api_url, settings)
|
||||
await self._setup_custom_secrets(client, api_url, settings.custom_secrets) # type: ignore
|
||||
await self._setup_experiment_config(client, api_url, sid, user_id)
|
||||
await self._create_nested_conversation(
|
||||
client, api_url, sid, user_id, settings, initial_user_msg, replay_json
|
||||
)
|
||||
await self._wait_for_conversation_ready(client, api_url, sid)
|
||||
|
||||
async def _setup_experiment_config(
|
||||
self, client: httpx.AsyncClient, api_url: str, sid: str, user_id: str
|
||||
):
|
||||
# Prevent circular import
|
||||
from openhands.experiments.experiment_manager import (
|
||||
ExperimentConfig,
|
||||
ExperimentManagerImpl,
|
||||
)
|
||||
|
||||
config: OpenHandsConfig = ExperimentManagerImpl.run_config_variant_test(
|
||||
user_id, sid, self.config
|
||||
)
|
||||
|
||||
experiment_config = ExperimentConfig(
|
||||
config={
|
||||
'system_prompt_filename': config.get_agent_config(
|
||||
config.default_agent
|
||||
).system_prompt_filename
|
||||
}
|
||||
)
|
||||
|
||||
response = await client.post(
|
||||
f'{api_url}/api/conversations/{sid}/exp-config',
|
||||
json=experiment_config.model_dump(),
|
||||
)
|
||||
response.raise_for_status()
|
||||
|
||||
async def _setup_nested_settings(
|
||||
self, client: httpx.AsyncClient, api_url: str, settings: Settings
|
||||
) -> None:
|
||||
@@ -643,19 +614,18 @@ class SaasNestedConversationManager(ConversationManager):
|
||||
},
|
||||
)
|
||||
|
||||
def _get_user_id_from_conversation(self, conversation_id: str) -> str:
|
||||
async def _get_user_id_from_conversation(self, conversation_id: str) -> str:
|
||||
"""
|
||||
Get user_id from conversation_id.
|
||||
"""
|
||||
|
||||
with session_maker() as session:
|
||||
conversation_metadata_saas = (
|
||||
session.query(StoredConversationMetadataSaas)
|
||||
.filter(
|
||||
async with a_session_maker() as session:
|
||||
result = await session.execute(
|
||||
select(StoredConversationMetadataSaas).where(
|
||||
StoredConversationMetadataSaas.conversation_id == conversation_id
|
||||
)
|
||||
.first()
|
||||
)
|
||||
conversation_metadata_saas = result.scalars().first()
|
||||
|
||||
if not conversation_metadata_saas:
|
||||
raise ValueError(f'No conversation found {conversation_id}')
|
||||
@@ -753,8 +723,8 @@ class SaasNestedConversationManager(ConversationManager):
|
||||
user_id_for_convo = user_id
|
||||
if not user_id_for_convo:
|
||||
try:
|
||||
user_id_for_convo = await call_sync_from_async(
|
||||
self._get_user_id_from_conversation, conversation_id
|
||||
user_id_for_convo = await self._get_user_id_from_conversation(
|
||||
conversation_id
|
||||
)
|
||||
except Exception:
|
||||
continue
|
||||
@@ -995,23 +965,23 @@ class SaasNestedConversationManager(ConversationManager):
|
||||
}
|
||||
return conversation_ids
|
||||
|
||||
def _get_recent_conversation_ids_for_user(self, user_id: str) -> set[str]:
|
||||
with session_maker() as session:
|
||||
async def _get_recent_conversation_ids_for_user(self, user_id: str) -> set[str]:
|
||||
async with a_session_maker() as session:
|
||||
# Only include conversations updated in the past week
|
||||
one_week_ago = datetime.now(UTC) - timedelta(days=7)
|
||||
query = (
|
||||
session.query(StoredConversationMetadata.conversation_id)
|
||||
result = await session.execute(
|
||||
select(StoredConversationMetadata.conversation_id)
|
||||
.join(
|
||||
StoredConversationMetadataSaas,
|
||||
StoredConversationMetadata.conversation_id
|
||||
== StoredConversationMetadataSaas.conversation_id,
|
||||
)
|
||||
.filter(
|
||||
.where(
|
||||
StoredConversationMetadataSaas.user_id == user_id,
|
||||
StoredConversationMetadata.last_updated_at >= one_week_ago,
|
||||
)
|
||||
)
|
||||
user_conversation_ids = set(query)
|
||||
user_conversation_ids = set(result.scalars().all())
|
||||
return user_conversation_ids
|
||||
|
||||
async def _get_runtime(self, sid: str) -> dict | None:
|
||||
@@ -1055,14 +1025,13 @@ class SaasNestedConversationManager(ConversationManager):
|
||||
await asyncio.sleep(_POLLING_INTERVAL)
|
||||
agent_loop_infos = await self.get_agent_loop_info()
|
||||
|
||||
with session_maker() as session:
|
||||
for agent_loop_info in agent_loop_infos:
|
||||
if agent_loop_info.status != ConversationStatus.RUNNING:
|
||||
continue
|
||||
try:
|
||||
await self._poll_agent_loop_events(agent_loop_info, session)
|
||||
except Exception as e:
|
||||
logger.exception(f'error_polling_events:{str(e)}')
|
||||
for agent_loop_info in agent_loop_infos:
|
||||
if agent_loop_info.status != ConversationStatus.RUNNING:
|
||||
continue
|
||||
try:
|
||||
await self._poll_agent_loop_events(agent_loop_info)
|
||||
except Exception as e:
|
||||
logger.exception(f'error_polling_events:{str(e)}')
|
||||
except Exception as e:
|
||||
try:
|
||||
asyncio.get_running_loop()
|
||||
@@ -1071,23 +1040,27 @@ class SaasNestedConversationManager(ConversationManager):
|
||||
# Loop has been shut down, exit gracefully
|
||||
return
|
||||
|
||||
async def _poll_agent_loop_events(
|
||||
self, agent_loop_info: AgentLoopInfo, session: orm.Session
|
||||
):
|
||||
async def _poll_agent_loop_events(self, agent_loop_info: AgentLoopInfo):
|
||||
"""This method is typically only run in localhost, where the webhook callbacks from the remote runtime are unavailable"""
|
||||
if agent_loop_info.status != ConversationStatus.RUNNING:
|
||||
return
|
||||
conversation_id = agent_loop_info.conversation_id
|
||||
conversation_metadata = (
|
||||
session.query(StoredConversationMetadata)
|
||||
.filter(StoredConversationMetadata.conversation_id == conversation_id)
|
||||
.first()
|
||||
)
|
||||
conversation_metadata_saas = (
|
||||
session.query(StoredConversationMetadataSaas)
|
||||
.filter(StoredConversationMetadataSaas.conversation_id == conversation_id)
|
||||
.first()
|
||||
)
|
||||
|
||||
async with a_session_maker() as session:
|
||||
result = await session.execute(
|
||||
select(StoredConversationMetadata).where(
|
||||
StoredConversationMetadata.conversation_id == conversation_id
|
||||
)
|
||||
)
|
||||
conversation_metadata = result.scalars().first()
|
||||
|
||||
result = await session.execute(
|
||||
select(StoredConversationMetadataSaas).where(
|
||||
StoredConversationMetadataSaas.conversation_id == conversation_id
|
||||
)
|
||||
)
|
||||
conversation_metadata_saas = result.scalars().first()
|
||||
|
||||
if conversation_metadata is None or conversation_metadata_saas is None:
|
||||
# Conversation is running in different server
|
||||
return
|
||||
|
||||
137
enterprise/server/services/org_app_settings_service.py
Normal file
137
enterprise/server/services/org_app_settings_service.py
Normal file
@@ -0,0 +1,137 @@
|
||||
"""Service class for managing organization app settings.
|
||||
|
||||
Separates business logic from route handlers.
|
||||
Uses dependency injection for db_session and user_context.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import AsyncGenerator
|
||||
|
||||
from fastapi import Request
|
||||
from server.routes.org_models import (
|
||||
OrgAppSettingsResponse,
|
||||
OrgAppSettingsUpdate,
|
||||
OrgNotFoundError,
|
||||
)
|
||||
from storage.org_app_settings_store import OrgAppSettingsStore
|
||||
|
||||
from openhands.app_server.errors import AuthError
|
||||
from openhands.app_server.services.injector import Injector, InjectorState
|
||||
from openhands.app_server.user.user_context import UserContext
|
||||
from openhands.core.logger import openhands_logger as logger
|
||||
|
||||
|
||||
@dataclass
|
||||
class OrgAppSettingsService:
|
||||
"""Service for organization app settings with injected dependencies."""
|
||||
|
||||
store: OrgAppSettingsStore
|
||||
user_context: UserContext
|
||||
|
||||
async def get_org_app_settings(self) -> OrgAppSettingsResponse:
|
||||
"""Get organization app settings.
|
||||
|
||||
User ID is obtained from the injected user_context.
|
||||
|
||||
Returns:
|
||||
OrgAppSettingsResponse: The organization's app settings
|
||||
|
||||
Raises:
|
||||
OrgNotFoundError: If current organization is not found
|
||||
AuthError: If user is not authenticated
|
||||
"""
|
||||
user_id = await self.user_context.get_user_id()
|
||||
if not user_id:
|
||||
raise AuthError('User not authenticated')
|
||||
|
||||
logger.info(
|
||||
'Getting organization app settings',
|
||||
extra={'user_id': user_id},
|
||||
)
|
||||
|
||||
org = await self.store.get_current_org_by_user_id(user_id)
|
||||
|
||||
if not org:
|
||||
raise OrgNotFoundError('current')
|
||||
|
||||
return OrgAppSettingsResponse.from_org(org)
|
||||
|
||||
async def update_org_app_settings(
|
||||
self,
|
||||
update_data: OrgAppSettingsUpdate,
|
||||
) -> OrgAppSettingsResponse:
|
||||
"""Update organization app settings.
|
||||
|
||||
Only updates fields that are explicitly provided in update_data.
|
||||
User ID is obtained from the injected user_context.
|
||||
Session auto-commits at request end via DbSessionInjector.
|
||||
|
||||
Args:
|
||||
update_data: The update data from the request
|
||||
|
||||
Returns:
|
||||
OrgAppSettingsResponse: The updated organization's app settings
|
||||
|
||||
Raises:
|
||||
OrgNotFoundError: If current organization is not found
|
||||
AuthError: If user is not authenticated
|
||||
"""
|
||||
user_id = await self.user_context.get_user_id()
|
||||
if not user_id:
|
||||
raise AuthError('User not authenticated')
|
||||
|
||||
logger.info(
|
||||
'Updating organization app settings',
|
||||
extra={'user_id': user_id},
|
||||
)
|
||||
|
||||
# Get current org first
|
||||
org = await self.store.get_current_org_by_user_id(user_id)
|
||||
|
||||
if not org:
|
||||
raise OrgNotFoundError('current')
|
||||
|
||||
# Check if any fields are provided
|
||||
update_dict = update_data.model_dump(exclude_unset=True)
|
||||
|
||||
if not update_dict:
|
||||
# No fields to update, just return current settings
|
||||
logger.info(
|
||||
'No fields to update in app settings',
|
||||
extra={'user_id': user_id, 'org_id': str(org.id)},
|
||||
)
|
||||
return OrgAppSettingsResponse.from_org(org)
|
||||
|
||||
updated_org = await self.store.update_org_app_settings(
|
||||
org_id=org.id,
|
||||
update_data=update_data,
|
||||
)
|
||||
|
||||
if not updated_org:
|
||||
raise OrgNotFoundError('current')
|
||||
|
||||
logger.info(
|
||||
'Organization app settings updated successfully',
|
||||
extra={'user_id': user_id, 'updated_fields': list(update_dict.keys())},
|
||||
)
|
||||
|
||||
return OrgAppSettingsResponse.from_org(updated_org)
|
||||
|
||||
|
||||
class OrgAppSettingsServiceInjector(Injector[OrgAppSettingsService]):
|
||||
"""Injector that composes store and user_context for OrgAppSettingsService."""
|
||||
|
||||
async def inject(
|
||||
self, state: InjectorState, request: Request | None = None
|
||||
) -> AsyncGenerator[OrgAppSettingsService, None]:
|
||||
# Local imports to avoid circular dependencies
|
||||
from openhands.app_server.config import get_db_session, get_user_context
|
||||
|
||||
async with (
|
||||
get_user_context(state, request) as user_context,
|
||||
get_db_session(state, request) as db_session,
|
||||
):
|
||||
store = OrgAppSettingsStore(db_session=db_session)
|
||||
yield OrgAppSettingsService(store=store, user_context=user_context)
|
||||
@@ -73,7 +73,7 @@ class OrgInvitationService:
|
||||
)
|
||||
|
||||
# Step 1: Validate organization exists
|
||||
org = OrgStore.get_org_by_id(org_id)
|
||||
org = await OrgStore.get_org_by_id(org_id)
|
||||
if not org:
|
||||
raise ValueError(f'Organization {org_id} not found')
|
||||
|
||||
@@ -85,13 +85,13 @@ class OrgInvitationService:
|
||||
)
|
||||
|
||||
# Step 3: Check inviter is a member and has permission
|
||||
inviter_member = OrgMemberStore.get_org_member(org_id, inviter_id)
|
||||
inviter_member = await OrgMemberStore.get_org_member(org_id, inviter_id)
|
||||
if not inviter_member:
|
||||
raise InsufficientPermissionError(
|
||||
'You are not a member of this organization'
|
||||
)
|
||||
|
||||
inviter_role = RoleStore.get_role_by_id(inviter_member.role_id)
|
||||
inviter_role = await RoleStore.get_role_by_id(inviter_member.role_id)
|
||||
if not inviter_role or inviter_role.name not in [ROLE_OWNER, ROLE_ADMIN]:
|
||||
raise InsufficientPermissionError('Only owners and admins can invite users')
|
||||
|
||||
@@ -101,14 +101,16 @@ class OrgInvitationService:
|
||||
raise InsufficientPermissionError('Only owners can invite with owner role')
|
||||
|
||||
# Get the target role
|
||||
target_role = RoleStore.get_role_by_name(role_name_lower)
|
||||
target_role = await RoleStore.get_role_by_name(role_name_lower)
|
||||
if not target_role:
|
||||
raise ValueError(f'Invalid role: {role_name}')
|
||||
|
||||
# Step 5: Check if user is already a member (by email)
|
||||
existing_user = await UserStore.get_user_by_email_async(email)
|
||||
existing_user = await UserStore.get_user_by_email(email)
|
||||
if existing_user:
|
||||
existing_member = OrgMemberStore.get_org_member(org_id, existing_user.id)
|
||||
existing_member = await OrgMemberStore.get_org_member(
|
||||
org_id, existing_user.id
|
||||
)
|
||||
if existing_member:
|
||||
raise UserAlreadyMemberError(
|
||||
'User is already a member of this organization'
|
||||
@@ -125,7 +127,7 @@ class OrgInvitationService:
|
||||
# Step 7: Send invitation email
|
||||
try:
|
||||
# Get inviter info for the email
|
||||
inviter_user = UserStore.get_user_by_id(str(inviter_member.user_id))
|
||||
inviter_user = await UserStore.get_user_by_id(str(inviter_member.user_id))
|
||||
inviter_name = 'A team member'
|
||||
if inviter_user and inviter_user.email:
|
||||
inviter_name = inviter_user.email.split('@')[0]
|
||||
@@ -187,7 +189,7 @@ class OrgInvitationService:
|
||||
)
|
||||
|
||||
# Step 1: Validate permissions upfront (shared for all emails)
|
||||
org = OrgStore.get_org_by_id(org_id)
|
||||
org = await OrgStore.get_org_by_id(org_id)
|
||||
if not org:
|
||||
raise ValueError(f'Organization {org_id} not found')
|
||||
|
||||
@@ -196,13 +198,13 @@ class OrgInvitationService:
|
||||
'Cannot invite users to a personal workspace'
|
||||
)
|
||||
|
||||
inviter_member = OrgMemberStore.get_org_member(org_id, inviter_id)
|
||||
inviter_member = await OrgMemberStore.get_org_member(org_id, inviter_id)
|
||||
if not inviter_member:
|
||||
raise InsufficientPermissionError(
|
||||
'You are not a member of this organization'
|
||||
)
|
||||
|
||||
inviter_role = RoleStore.get_role_by_id(inviter_member.role_id)
|
||||
inviter_role = await RoleStore.get_role_by_id(inviter_member.role_id)
|
||||
if not inviter_role or inviter_role.name not in [ROLE_OWNER, ROLE_ADMIN]:
|
||||
raise InsufficientPermissionError('Only owners and admins can invite users')
|
||||
|
||||
@@ -210,7 +212,7 @@ class OrgInvitationService:
|
||||
if role_name_lower == ROLE_OWNER and inviter_role.name != ROLE_OWNER:
|
||||
raise InsufficientPermissionError('Only owners can invite with owner role')
|
||||
|
||||
target_role = RoleStore.get_role_by_name(role_name_lower)
|
||||
target_role = await RoleStore.get_role_by_name(role_name_lower)
|
||||
if not target_role:
|
||||
raise ValueError(f'Invalid role: {role_name}')
|
||||
|
||||
@@ -306,7 +308,7 @@ class OrgInvitationService:
|
||||
raise InvitationExpiredError('Invitation has expired')
|
||||
|
||||
# Step 2.5: Verify user email matches invitation email
|
||||
user = await UserStore.get_user_by_id_async(str(user_id))
|
||||
user = await UserStore.get_user_by_id(str(user_id))
|
||||
if not user:
|
||||
raise InvitationInvalidError('User not found')
|
||||
|
||||
@@ -336,7 +338,9 @@ class OrgInvitationService:
|
||||
raise EmailMismatchError()
|
||||
|
||||
# Step 3: Check if user is already a member
|
||||
existing_member = OrgMemberStore.get_org_member(invitation.org_id, user_id)
|
||||
existing_member = await OrgMemberStore.get_org_member(
|
||||
invitation.org_id, user_id
|
||||
)
|
||||
if existing_member:
|
||||
raise UserAlreadyMemberError(
|
||||
'You are already a member of this organization'
|
||||
@@ -369,11 +373,16 @@ class OrgInvitationService:
|
||||
org_member_kwargs.pop('llm_model', None)
|
||||
org_member_kwargs.pop('llm_base_url', None)
|
||||
|
||||
OrgMemberStore.add_user_to_org(
|
||||
# Get the llm_api_key as string (it's SecretStr | None in Settings)
|
||||
llm_api_key = (
|
||||
settings.llm_api_key.get_secret_value() if settings.llm_api_key else ''
|
||||
)
|
||||
|
||||
await OrgMemberStore.add_user_to_org(
|
||||
org_id=invitation.org_id,
|
||||
user_id=user_id,
|
||||
role_id=invitation.role_id,
|
||||
llm_api_key=settings.llm_api_key,
|
||||
llm_api_key=llm_api_key,
|
||||
status='active',
|
||||
)
|
||||
|
||||
@@ -384,6 +393,9 @@ class OrgInvitationService:
|
||||
accepted_by_user_id=user_id,
|
||||
)
|
||||
|
||||
if not updated_invitation:
|
||||
raise InvitationInvalidError('Failed to update invitation status')
|
||||
|
||||
logger.info(
|
||||
'Organization invitation accepted',
|
||||
extra={
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user