diff --git a/frontend/package-lock.json b/frontend/package-lock.json
index 3179a29657..05f6800ee5 100644
--- a/frontend/package-lock.json
+++ b/frontend/package-lock.json
@@ -21,7 +21,7 @@
"i18next": "^23.11.5",
"i18next-browser-languagedetector": "^8.0.0",
"i18next-http-backend": "^2.5.2",
- "jose": "^5.6.1",
+ "jose": "^5.6.2",
"monaco-editor": "^0.50.0",
"react": "^18.3.1",
"react-dom": "^18.3.1",
@@ -34,7 +34,7 @@
"react-router-dom": "^6.24.0",
"react-syntax-highlighter": "^15.5.0",
"tailwind-merge": "^2.3.0",
- "vite": "^5.3.1",
+ "vite": "^5.3.2",
"web-vitals": "^3.5.2"
},
"devDependencies": {
@@ -10552,9 +10552,9 @@
}
},
"node_modules/jose": {
- "version": "5.6.1",
- "resolved": "https://registry.npmjs.org/jose/-/jose-5.6.1.tgz",
- "integrity": "sha512-KyxsIFAtR0BH72iRCOwe+PRnsGltiXDRtemlOkp2xz7FrakAKd4lvYYJgFOJm2fryOMMUk0+f0E/uuTDoHDiTA==",
+ "version": "5.6.2",
+ "resolved": "https://registry.npmjs.org/jose/-/jose-5.6.2.tgz",
+ "integrity": "sha512-F1t1/WZJ4JdmCE/XoMYw1dPOW5g8JF0xGm6Ox2fwaCAPlCzt+4Bh0EWP59iQuZNHHauDkCdjx+kCZSh5z/PGow==",
"funding": {
"url": "https://github.com/sponsors/panva"
}
@@ -15147,9 +15147,9 @@
"integrity": "sha512-dqId9J8K/vGi5Zr7oo212BGii5m3q5Hxlkwy3WpYuKPklmBEvsbMYYyLxAQpSffdLl/gdW0XUpKWFvYmyoWCoQ=="
},
"node_modules/vite": {
- "version": "5.3.1",
- "resolved": "https://registry.npmjs.org/vite/-/vite-5.3.1.tgz",
- "integrity": "sha512-XBmSKRLXLxiaPYamLv3/hnP/KXDai1NDexN0FpkTaZXTfycHvkRHoenpgl/fvuK/kPbB6xAgoyiryAhQNxYmAQ==",
+ "version": "5.3.2",
+ "resolved": "https://registry.npmjs.org/vite/-/vite-5.3.2.tgz",
+ "integrity": "sha512-6lA7OBHBlXUxiJxbO5aAY2fsHHzDr1q7DvXYnyZycRs2Dz+dXBWuhpWHvmljTRTpQC2uvGmUFFkSHF2vGo90MA==",
"dependencies": {
"esbuild": "^0.21.3",
"postcss": "^8.4.38",
diff --git a/frontend/package.json b/frontend/package.json
index 9801a246d3..b0ba8d1661 100644
--- a/frontend/package.json
+++ b/frontend/package.json
@@ -20,7 +20,7 @@
"i18next": "^23.11.5",
"i18next-browser-languagedetector": "^8.0.0",
"i18next-http-backend": "^2.5.2",
- "jose": "^5.6.1",
+ "jose": "^5.6.2",
"monaco-editor": "^0.50.0",
"react": "^18.3.1",
"react-dom": "^18.3.1",
@@ -33,7 +33,7 @@
"react-router-dom": "^6.24.0",
"react-syntax-highlighter": "^15.5.0",
"tailwind-merge": "^2.3.0",
- "vite": "^5.3.1",
+ "vite": "^5.3.2",
"web-vitals": "^3.5.2"
},
"scripts": {
diff --git a/frontend/src/components/file-explorer/FileExplorer.tsx b/frontend/src/components/file-explorer/FileExplorer.tsx
index 06d4ba8ca0..32508d4a3d 100644
--- a/frontend/src/components/file-explorer/FileExplorer.tsx
+++ b/frontend/src/components/file-explorer/FileExplorer.tsx
@@ -107,15 +107,60 @@ function FileExplorer() {
return;
}
dispatch(setRefreshID(Math.random()));
- setFiles(await listFiles("/"));
+ try {
+ const fileList = await listFiles("/");
+ setFiles(fileList);
+ if (fileList.length === 0) {
+ toast.info(t(I18nKey.EXPLORER$EMPTY_WORKSPACE_MESSAGE));
+ }
+ } catch (error) {
+ toast.error("refresh-error", t(I18nKey.EXPLORER$REFRESH_ERROR_MESSAGE));
+ }
};
const uploadFileData = async (toAdd: FileList) => {
try {
- await uploadFiles(toAdd);
+ const result = await uploadFiles(toAdd);
+
+ if (result.error) {
+ // Handle error response
+ toast.error(
+ `upload-error-${new Date().getTime()}`,
+ result.error || t(I18nKey.EXPLORER$UPLOAD_ERROR_MESSAGE),
+ );
+ return;
+ }
+
+ const uploadedCount = result.uploadedFiles.length;
+ const skippedCount = result.skippedFiles.length;
+
+ if (uploadedCount > 0) {
+ toast.success(
+ `upload-success-${new Date().getTime()}`,
+ t(I18nKey.EXPLORER$UPLOAD_SUCCESS_MESSAGE, {
+ count: uploadedCount,
+ }),
+ );
+ }
+
+ if (skippedCount > 0) {
+ const message = t(I18nKey.EXPLORER$UPLOAD_PARTIAL_SUCCESS_MESSAGE, {
+ count: skippedCount,
+ });
+ toast.info(message);
+ }
+
+ if (uploadedCount === 0 && skippedCount === 0) {
+ toast.info(t(I18nKey.EXPLORER$NO_FILES_UPLOADED_MESSAGE));
+ }
+
await refreshWorkspace();
} catch (error) {
- toast.error("ws", t(I18nKey.EXPLORER$UPLOAD_ERROR_MESSAGE));
+ // Handle unexpected errors (network issues, etc.)
+ toast.error(
+ `upload-error-${new Date().getTime()}`,
+ t(I18nKey.EXPLORER$UPLOAD_ERROR_MESSAGE),
+ );
}
};
@@ -148,13 +193,16 @@ function FileExplorer() {
}
return (
-
+
{isDragging && (
{
event.preventDefault();
- uploadFileData(event.dataTransfer.files);
+ const { files: droppedFiles } = event.dataTransfer;
+ if (droppedFiles.length > 0) {
+ uploadFileData(droppedFiles);
+ }
}}
onDragOver={(event) => event.preventDefault()}
className="z-10 absolute flex flex-col justify-center items-center bg-black top-0 bottom-0 left-0 right-0 opacity-65"
@@ -167,32 +215,37 @@ function FileExplorer() {
)}
-
-
- {!isHidden && (
-
- {t(I18nKey.EXPLORER$LABEL_WORKSPACE)}
-
- )}
-
setIsHidden((prev) => !prev)}
- onRefresh={refreshWorkspace}
- onUpload={selectFileInput}
- />
+
+
+
+ {!isHidden && (
+
+
+ {t(I18nKey.EXPLORER$LABEL_WORKSPACE)}
+
+
+ )}
+
setIsHidden((prev) => !prev)}
+ onRefresh={refreshWorkspace}
+ onUpload={selectFileInput}
+ />
+
-
-
{
- if (event.target.files) {
- uploadFileData(event.target.files);
+ const { files: selectedFiles } = event.target;
+ if (selectedFiles && selectedFiles.length > 0) {
+ uploadFileData(selectedFiles);
}
}}
/>
diff --git a/frontend/src/i18n/translation.json b/frontend/src/i18n/translation.json
index 9e0d4c407a..bcd9bf8676 100644
--- a/frontend/src/i18n/translation.json
+++ b/frontend/src/i18n/translation.json
@@ -308,6 +308,36 @@
"zh-CN": "工作区",
"de": "Arbeitsbereich"
},
+ "EXPLORER$EMPTY_WORKSPACE_MESSAGE": {
+ "en": "No files in workspace",
+ "zh-CN": "工作区没有文件",
+ "de": "Keine Dateien im Arbeitsbereich"
+ },
+ "EXPLORER$REFRESH_ERROR_MESSAGE": {
+ "en": "Error refreshing workspace",
+ "zh-CN": "工作区刷新错误",
+ "de": "Fehler beim Aktualisieren des Arbeitsbereichs"
+ },
+ "EXPLORER$UPLOAD_SUCCESS_MESSAGE": {
+ "en": "Successfully uploaded {{count}} file(s)",
+ "zh-CN": "成功上传 {{count}} 个文件",
+ "de": "Erfolgreich {{count}} Datei(en) hochgeladen"
+ },
+ "EXPLORER$NO_FILES_UPLOADED_MESSAGE": {
+ "en": "No files were uploaded",
+ "zh-CN": "没有文件上传",
+ "de": "Keine Dateien wurden hochgeladen"
+ },
+ "EXPLORER$UPLOAD_PARTIAL_SUCCESS_MESSAGE": {
+ "en": "{{count}} file(s) were skipped during upload",
+ "zh-CN": "{{count}} 个文件在上传过程中被跳过",
+ "de": "{{count}} Datei(en) wurden während des Hochladens übersprungen"
+ },
+ "EXPLORER$UPLOAD_UNEXPECTED_RESPONSE_MESSAGE": {
+ "en": "Unexpected response structure from server",
+ "zh-CN": "服务器响应结构不符合预期",
+ "de": "Unerwartetes Antwortformat vom Server"
+ },
"LOAD_SESSION$MODAL_TITLE": {
"en": "Return to existing session?",
"de": "Zurück zu vorhandener Sitzung?",
diff --git a/frontend/src/services/fileService.ts b/frontend/src/services/fileService.ts
index f12a0e499b..d8933e02df 100644
--- a/frontend/src/services/fileService.ts
+++ b/frontend/src/services/fileService.ts
@@ -1,23 +1,74 @@
import { request } from "./api";
export async function selectFile(file: string): Promise
{
- const data = await request(`/api/select-file?file=${file}`);
+ const encodedFile = encodeURIComponent(file);
+ const data = await request(`/api/select-file?file=${encodedFile}`);
return data.code as string;
}
-export async function uploadFiles(files: FileList) {
+interface UploadResult {
+ message: string;
+ uploadedFiles: string[];
+ skippedFiles: Array<{ name: string; reason: string }>;
+ error?: string;
+}
+
+export async function uploadFiles(files: FileList): Promise {
const formData = new FormData();
+ const skippedFiles: Array<{ name: string; reason: string }> = [];
+
+ let uploadedCount = 0;
+
for (let i = 0; i < files.length; i += 1) {
- formData.append("files", files[i]);
+ const file = files[i];
+
+ if (
+ file.name.includes("..") ||
+ file.name.includes("/") ||
+ file.name.includes("\\")
+ ) {
+ skippedFiles.push({
+ name: file.name,
+ reason: "Invalid file name",
+ });
+ } else {
+ formData.append("files", file);
+ uploadedCount += 1;
+ }
}
- await request("/api/upload-files", {
+ // Add skippedFilesCount to formData
+ formData.append("skippedFilesCount", skippedFiles.length.toString());
+
+ // Add uploadedFilesCount to formData
+ formData.append("uploadedFilesCount", uploadedCount.toString());
+
+ const response = await request("/api/upload-files", {
method: "POST",
body: formData,
});
+
+ if (
+ typeof response.message !== "string" ||
+ !Array.isArray(response.uploaded_files) ||
+ !Array.isArray(response.skipped_files)
+ ) {
+ throw new Error("Unexpected response structure from server");
+ }
+
+ return {
+ message: response.message,
+ uploadedFiles: response.uploaded_files,
+ skippedFiles: [...skippedFiles, ...response.skipped_files],
+ };
}
export async function listFiles(path: string = "/"): Promise {
- const data = await request(`/api/list-files?path=${path}`);
- return data as string[];
+ try {
+ const encodedPath = encodeURIComponent(path);
+ const data = await request(`/api/list-files?path=${encodedPath}`);
+ return data as string[];
+ } catch (error) {
+ return [];
+ }
}
diff --git a/opendevin/core/config.py b/opendevin/core/config.py
index 76724aa892..4c7db697c9 100644
--- a/opendevin/core/config.py
+++ b/opendevin/core/config.py
@@ -160,6 +160,9 @@ class AppConfig(metaclass=Singleton):
sandbox_timeout: The timeout for the sandbox.
debug: Whether to enable debugging.
enable_auto_lint: Whether to enable auto linting. This is False by default, for regular runs of the app. For evaluation, please set this to True.
+ file_uploads_max_file_size_mb: Maximum file size for uploads in megabytes. 0 means no limit.
+ file_uploads_restrict_file_types: Whether to restrict file types for file uploads. Defaults to False.
+ file_uploads_allowed_extensions: List of allowed file extensions for uploads. ['.*'] means all extensions are allowed.
"""
llm: LLMConfig = field(default_factory=LLMConfig)
@@ -199,6 +202,9 @@ class AppConfig(metaclass=Singleton):
False # once enabled, OpenDevin would lint files after editing
)
enable_main_session: bool = False
+ file_uploads_max_file_size_mb: int = 0
+ file_uploads_restrict_file_types: bool = False
+ file_uploads_allowed_extensions: list[str] = field(default_factory=lambda: ['.*'])
defaults_dict: ClassVar[dict] = {}
diff --git a/opendevin/core/schema/config.py b/opendevin/core/schema/config.py
index 94813fb6f1..1bece3582e 100644
--- a/opendevin/core/schema/config.py
+++ b/opendevin/core/schema/config.py
@@ -42,3 +42,6 @@ class ConfigType(str, Enum):
SSH_HOSTNAME = 'SSH_HOSTNAME'
DISABLE_COLOR = 'DISABLE_COLOR'
DEBUG = 'DEBUG'
+ FILE_UPLOADS_MAX_FILE_SIZE_MB = 'FILE_UPLOADS_MAX_FILE_SIZE_MB'
+ FILE_UPLOADS_RESTRICT_FILE_TYPES = 'FILE_UPLOADS_RESTRICT_FILE_TYPES'
+ FILE_UPLOADS_ALLOWED_EXTENSIONS = 'FILE_UPLOADS_ALLOWED_EXTENSIONS'
diff --git a/opendevin/server/listen.py b/opendevin/server/listen.py
index ad74279898..baae6771eb 100644
--- a/opendevin/server/listen.py
+++ b/opendevin/server/listen.py
@@ -1,12 +1,16 @@
+import os
+import re
import uuid
import warnings
+from pathspec import PathSpec
+from pathspec.patterns import GitWildMatchPattern
+
from opendevin.server.data_models.feedback import FeedbackDataModel, store_feedback
with warnings.catch_warnings():
warnings.simplefilter('ignore')
import litellm
-from pathlib import Path
from fastapi import FastAPI, Request, Response, UploadFile, WebSocket, status
from fastapi.middleware.cors import CORSMiddleware
@@ -19,7 +23,10 @@ from opendevin.controller.agent import Agent
from opendevin.core.config import config
from opendevin.core.logger import opendevin_logger as logger
from opendevin.events.action import ChangeAgentStateAction, NullAction
-from opendevin.events.observation import AgentStateChangedObservation, NullObservation
+from opendevin.events.observation import (
+ AgentStateChangedObservation,
+ NullObservation,
+)
from opendevin.events.serialization import event_to_dict
from opendevin.llm import bedrock
from opendevin.server.auth import get_sid_from_token, sign_token
@@ -37,6 +44,96 @@ app.add_middleware(
security_scheme = HTTPBearer()
+def load_file_upload_config() -> tuple[int, bool, list[str]]:
+ """
+ Load file upload configuration from the config object.
+
+ This function retrieves the file upload settings from the global config object.
+ It handles the following settings:
+ - Maximum file size for uploads
+ - Whether to restrict file types
+ - List of allowed file extensions
+
+ It also performs sanity checks on the values to ensure they are valid and safe.
+
+ Returns:
+ tuple: A tuple containing:
+ - max_file_size_mb (int): Maximum file size in MB. 0 means no limit.
+ - restrict_file_types (bool): Whether file type restrictions are enabled.
+ - allowed_extensions (set): Set of allowed file extensions.
+ """
+ # Retrieve values from config
+ max_file_size_mb = config.file_uploads_max_file_size_mb
+ restrict_file_types = config.file_uploads_restrict_file_types
+ allowed_extensions = config.file_uploads_allowed_extensions
+
+ # Sanity check for max_file_size_mb
+ MAX_ALLOWED_SIZE = 1024 # Maximum allowed file size 1 GB
+ if not isinstance(max_file_size_mb, int) or max_file_size_mb < 0:
+ logger.warning(
+ f'Invalid max_file_size_mb: {max_file_size_mb}. Setting to 0 (no limit).'
+ )
+ max_file_size_mb = 0
+ elif max_file_size_mb > MAX_ALLOWED_SIZE:
+ logger.warning(
+ f'max_file_size_mb exceeds maximum allowed size. Capping at {MAX_ALLOWED_SIZE}MB.'
+ )
+ max_file_size_mb = MAX_ALLOWED_SIZE
+
+ # Sanity check for allowed_extensions
+ if not isinstance(allowed_extensions, (list, set)) or not allowed_extensions:
+ logger.warning(
+ f'Invalid allowed_extensions: {allowed_extensions}. Setting to [".*"].'
+ )
+ allowed_extensions = ['.*']
+ else:
+ # Ensure all extensions start with a dot and are lowercase
+ allowed_extensions = [
+ ext.lower() if ext.startswith('.') else f'.{ext.lower()}'
+ for ext in allowed_extensions
+ ]
+
+ # If restrictions are disabled, allow all
+ if not restrict_file_types:
+ allowed_extensions = ['.*']
+
+ logger.info(
+ f'File upload config: max_size={max_file_size_mb}MB, '
+ f'restrict_types={restrict_file_types}, '
+ f'allowed_extensions={allowed_extensions}'
+ )
+
+ return max_file_size_mb, restrict_file_types, allowed_extensions
+
+
+# Load configuration
+MAX_FILE_SIZE_MB, RESTRICT_FILE_TYPES, ALLOWED_EXTENSIONS = load_file_upload_config()
+
+
+def is_extension_allowed(filename):
+ """
+ Check if the file extension is allowed based on the current configuration.
+
+ This function supports wildcards and files without extensions.
+ The check is case-insensitive for extensions.
+
+ Args:
+ filename (str): The name of the file to check.
+
+ Returns:
+ bool: True if the file extension is allowed, False otherwise.
+ """
+ if not RESTRICT_FILE_TYPES:
+ return True
+
+ file_ext = os.path.splitext(filename)[1].lower() # Convert to lowercase
+ return (
+ '.*' in ALLOWED_EXTENSIONS
+ or file_ext in (ext.lower() for ext in ALLOWED_EXTENSIONS)
+ or (file_ext == '' and '.' in ALLOWED_EXTENSIONS)
+ )
+
+
@app.middleware('http')
async def attach_session(request: Request, call_next):
if request.url.path.startswith('/api/options/') or not request.url.path.startswith(
@@ -225,48 +322,85 @@ def list_files(request: Request, path: str = '/'):
content={'error': 'Runtime not yet initialized'},
)
- exclude_list = (
- '.git',
- '.DS_Store',
- '.svn',
- '.hg',
- '.idea',
- '.vscode',
- '.settings',
- '.pytest_cache',
- '__pycache__',
- 'node_modules',
- 'vendor',
- 'build',
- 'dist',
- 'bin',
- 'logs',
- 'log',
- 'tmp',
- 'temp',
- 'coverage',
- 'venv',
- 'env',
- )
-
try:
+ # Get the full path of the requested directory
+ full_path = (
+ request.state.session.agent_session.runtime.file_store.get_full_path(path)
+ )
+
+ # Check if the directory exists
+ if not os.path.exists(full_path) or not os.path.isdir(full_path):
+ return []
+
+ # Check if .gitignore exists
+ gitignore_path = os.path.join(full_path, '.gitignore')
+ if os.path.exists(gitignore_path):
+ # Use PathSpec to parse .gitignore
+ with open(gitignore_path, 'r') as f:
+ spec = PathSpec.from_lines(GitWildMatchPattern, f.readlines())
+ else:
+ # Fallback to default exclude list if .gitignore doesn't exist
+ default_exclude = [
+ '.git',
+ '.DS_Store',
+ '.svn',
+ '.hg',
+ '.idea',
+ '.vscode',
+ '.settings',
+ '.pytest_cache',
+ '__pycache__',
+ 'node_modules',
+ 'vendor',
+ 'build',
+ 'dist',
+ 'bin',
+ 'logs',
+ 'log',
+ 'tmp',
+ 'temp',
+ 'coverage',
+ 'venv',
+ 'env',
+ ]
+ spec = PathSpec.from_lines(GitWildMatchPattern, default_exclude)
+
entries = request.state.session.agent_session.runtime.file_store.list(path)
- # Filter entries, excluding special folders
- if entries:
- return [
- entry
- for entry in entries
- if Path(entry).parts and Path(entry).parts[-1] not in exclude_list
- ]
- return []
+ # Filter entries using PathSpec
+ filtered_entries = [
+ entry
+ for entry in entries
+ if not spec.match_file(os.path.relpath(entry, full_path))
+ ]
+
+ # Separate directories and files
+ directories = []
+ files = []
+ for entry in filtered_entries:
+ # Remove leading slash and any parent directory components
+ entry_relative = entry.lstrip('/').split('/')[-1]
+
+ # Construct the full path by joining the base path with the relative entry path
+ full_entry_path = os.path.join(full_path, entry_relative)
+ if os.path.exists(full_entry_path):
+ is_dir = os.path.isdir(full_entry_path)
+ if is_dir:
+ directories.append(entry)
+ else:
+ files.append(entry)
+
+ # Sort directories and files separately
+ directories.sort(key=str.lower)
+ files.sort(key=str.lower)
+
+ # Combine sorted directories and files
+ sorted_entries = directories + files
+ return sorted_entries
+
except Exception as e:
- logger.error(f'Error refreshing files: {e}', exc_info=False)
- error_msg = f'Error refreshing files: {e}'
- return JSONResponse(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- content={'error': error_msg},
- )
+ logger.error(f'Error listing files: {e}', exc_info=True)
+ return []
@app.get('/api/select-file')
@@ -291,6 +425,22 @@ def select_file(file: str, request: Request):
return {'code': content}
+def sanitize_filename(filename):
+ """
+ Sanitize the filename to prevent directory traversal
+ """
+ # Remove any directory components
+ filename = os.path.basename(filename)
+ # Remove any non-alphanumeric characters except for .-_
+ filename = re.sub(r'[^\w\-_\.]', '', filename)
+ # Limit the filename length
+ max_length = 255
+ if len(filename) > max_length:
+ name, ext = os.path.splitext(filename)
+ filename = name[: max_length - len(ext)] + ext
+ return filename
+
+
@app.post('/api/upload-files')
async def upload_file(request: Request, files: list[UploadFile]):
"""
@@ -302,24 +452,68 @@ async def upload_file(request: Request, files: list[UploadFile]):
```
"""
try:
+ uploaded_files = []
+ skipped_files = []
for file in files:
+ safe_filename = sanitize_filename(file.filename)
file_contents = await file.read()
+
+ if (
+ MAX_FILE_SIZE_MB > 0
+ and len(file_contents) > MAX_FILE_SIZE_MB * 1024 * 1024
+ ):
+ skipped_files.append(
+ {
+ 'name': safe_filename,
+ 'reason': f'Exceeds maximum size limit of {MAX_FILE_SIZE_MB}MB',
+ }
+ )
+ continue
+
+ if not is_extension_allowed(safe_filename):
+ skipped_files.append(
+ {'name': safe_filename, 'reason': 'File type not allowed'}
+ )
+ continue
+
request.state.session.agent_session.runtime.file_store.write(
- file.filename, file_contents
+ safe_filename, file_contents
)
+ uploaded_files.append(safe_filename)
+
+ response_content = {
+ 'message': 'File upload process completed',
+ 'uploaded_files': uploaded_files,
+ 'skipped_files': skipped_files,
+ }
+
+ if not uploaded_files and skipped_files:
+ return JSONResponse(
+ status_code=status.HTTP_400_BAD_REQUEST,
+ content={
+ **response_content,
+ 'error': 'No files were uploaded successfully',
+ },
+ )
+
+ return JSONResponse(status_code=status.HTTP_200_OK, content=response_content)
+
except Exception as e:
- logger.error(f'Error saving files: {e}', exc_info=True)
+ logger.error(f'Error during file upload: {e}', exc_info=True)
return JSONResponse(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- content={'error': f'Error saving file:s {e}'},
+ content={
+ 'error': f'Error during file upload: {str(e)}',
+ 'uploaded_files': [],
+ 'skipped_files': [],
+ },
)
- return {'message': 'Files uploaded successfully', 'file_count': len(files)}
@app.post('/api/submit-feedback')
async def submit_feedback(request: Request, feedback: FeedbackDataModel):
"""
- Upload files to the workspace.
+ Upload feedback data to the feedback site.
To upload files:
```sh
@@ -327,7 +521,7 @@ async def submit_feedback(request: Request, feedback: FeedbackDataModel):
```
"""
# Assuming the storage service is already configured in the backend
- # and there is a function to handle the storage.
+ # and there is a function to handle the storage.
try:
feedback_data = store_feedback(feedback)
return JSONResponse(status_code=200, content=feedback_data)
diff --git a/opendevin/storage/local.py b/opendevin/storage/local.py
index 6c671cd47e..c657a3930a 100644
--- a/opendevin/storage/local.py
+++ b/opendevin/storage/local.py
@@ -16,10 +16,11 @@ class LocalFileStore(FileStore):
path = path[1:]
return os.path.join(self.root, path)
- def write(self, path: str, contents: str) -> None:
+ def write(self, path: str, contents: str | bytes):
full_path = self.get_full_path(path)
os.makedirs(os.path.dirname(full_path), exist_ok=True)
- with open(full_path, 'w') as f:
+ mode = 'w' if isinstance(contents, str) else 'wb'
+ with open(full_path, mode) as f:
f.write(contents)
def read(self, path: str) -> str:
diff --git a/poetry.lock b/poetry.lock
index c0efcf4d2f..02ee5c4c3f 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,4 +1,4 @@
-# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand.
+# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
[[package]]
name = "aenum"
@@ -416,17 +416,17 @@ files = [
[[package]]
name = "boto3"
-version = "1.34.134"
+version = "1.34.135"
description = "The AWS SDK for Python"
optional = false
python-versions = ">=3.8"
files = [
- {file = "boto3-1.34.134-py3-none-any.whl", hash = "sha256:342782c02ff077aae118c9c61179eed95c585831fba666baacc5588ff04aa6e1"},
- {file = "boto3-1.34.134.tar.gz", hash = "sha256:f6d6e5b0c9ab022a75373fa16c01f0cd54bc1bb64ef3b6ac64ac7cedd56cbe9c"},
+ {file = "boto3-1.34.135-py3-none-any.whl", hash = "sha256:6f5d7a20afbe45e3f7c6b5e96071752d36c3942535b1f7924964f1fdf25376a7"},
+ {file = "boto3-1.34.135.tar.gz", hash = "sha256:344f635233c85dbb509b87638232ff9132739f90bb5e6bf01fa0e0a521a9107e"},
]
[package.dependencies]
-botocore = ">=1.34.134,<1.35.0"
+botocore = ">=1.34.135,<1.35.0"
jmespath = ">=0.7.1,<2.0.0"
s3transfer = ">=0.10.0,<0.11.0"
@@ -435,13 +435,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
[[package]]
name = "botocore"
-version = "1.34.134"
+version = "1.34.135"
description = "Low-level, data-driven core of boto 3."
optional = false
python-versions = ">=3.8"
files = [
- {file = "botocore-1.34.134-py3-none-any.whl", hash = "sha256:45219e00639755f92569b29f8f279d5dde721494791412c1f7026a3779e8d9f4"},
- {file = "botocore-1.34.134.tar.gz", hash = "sha256:e29c299599426ed16dd2d4c1e20eef784f96b15e1850ebbc59a3250959285b95"},
+ {file = "botocore-1.34.135-py3-none-any.whl", hash = "sha256:3aa9e85e7c479babefb5a590e844435449df418085f3c74d604277bc52dc3109"},
+ {file = "botocore-1.34.135.tar.gz", hash = "sha256:2e72f37072f75cb1391fca9d7a4c32cecb52a3557d62431d0f59d5311dc7d0cf"},
]
[package.dependencies]
@@ -2547,13 +2547,13 @@ files = [
[[package]]
name = "json-repair"
-version = "0.25.1"
+version = "0.25.2"
description = "A package to repair broken json strings"
optional = false
python-versions = ">=3.7"
files = [
- {file = "json_repair-0.25.1-py3-none-any.whl", hash = "sha256:6fba3960f1036be01dc0d4ef909fb543f4bd4e3d6851ad0cdfb8191d7ee19763"},
- {file = "json_repair-0.25.1.tar.gz", hash = "sha256:b808a86699516a0eaa9e0c52233cd2eedab24bac156d4eca9a4c5e2f1586afcb"},
+ {file = "json_repair-0.25.2-py3-none-any.whl", hash = "sha256:51d67295c3184b6c41a3572689661c6128cef6cfc9fb04db63130709adfc5bf0"},
+ {file = "json_repair-0.25.2.tar.gz", hash = "sha256:161a56d7e6bbfd4cad3a614087e3e0dbd0e10d402dd20dc7db418432428cb32b"},
]
[[package]]
@@ -2771,13 +2771,13 @@ types-tqdm = "*"
[[package]]
name = "litellm"
-version = "1.40.28"
+version = "1.40.29"
description = "Library to easily interface with LLM API providers"
optional = false
python-versions = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8"
files = [
- {file = "litellm-1.40.28-py3-none-any.whl", hash = "sha256:aa6d59390f24d1b1168a202b966249f9f5f93d08deba38ed9528654544065e96"},
- {file = "litellm-1.40.28.tar.gz", hash = "sha256:08fdfcb01715006f9dadb8d05b94143f782e08d1944e5691d9faf20300e62739"},
+ {file = "litellm-1.40.29-py3-none-any.whl", hash = "sha256:f541c6a868e62a9018d1502a2043d9e3a669833a6a4ed4946635eec26329540a"},
+ {file = "litellm-1.40.29.tar.gz", hash = "sha256:167357fcfe33813bf3410c5f13058c7d8ca39a38a476a4ddb80ffd3c18dab770"},
]
[package.dependencies]
@@ -3119,13 +3119,13 @@ llama-parse = ">=0.4.0,<0.5.0"
[[package]]
name = "llama-index-vector-stores-chroma"
-version = "0.1.9"
+version = "0.1.10"
description = "llama-index vector_stores chroma integration"
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
- {file = "llama_index_vector_stores_chroma-0.1.9-py3-none-any.whl", hash = "sha256:0d900fe97def537c2dd1c2d155287fae014b63848e3aff28902eb38c45e0bc28"},
- {file = "llama_index_vector_stores_chroma-0.1.9.tar.gz", hash = "sha256:6a5c27ab3ae25cf504bed9513c1f035365dfb576b886fe334d46908ca24a59cf"},
+ {file = "llama_index_vector_stores_chroma-0.1.10-py3-none-any.whl", hash = "sha256:18859272ec8d3ed20bae7e4a9bc18feb4233e8be2a725d33626f283ac41d1475"},
+ {file = "llama_index_vector_stores_chroma-0.1.10.tar.gz", hash = "sha256:97971f7b36461ef37be023b9ceb5531396cc48360d0bdbda51cce1290301cc47"},
]
[package.dependencies]
@@ -4114,7 +4114,6 @@ description = "Nvidia JIT LTO Library"
optional = false
python-versions = ">=3"
files = [
- {file = "nvidia_nvjitlink_cu12-12.5.40-py3-none-manylinux2014_aarch64.whl", hash = "sha256:004186d5ea6a57758fd6d57052a123c73a4815adf365eb8dd6a85c9eaa7535ff"},
{file = "nvidia_nvjitlink_cu12-12.5.40-py3-none-manylinux2014_x86_64.whl", hash = "sha256:d9714f27c1d0f0895cd8915c07a87a1d0029a0aa36acaf9156952ec2a8a12189"},
{file = "nvidia_nvjitlink_cu12-12.5.40-py3-none-win_amd64.whl", hash = "sha256:c3401dc8543b52d3a8158007a0c1ab4e9c768fcbd24153a48c86972102197ddd"},
]
@@ -4190,13 +4189,13 @@ sympy = "*"
[[package]]
name = "openai"
-version = "1.35.6"
+version = "1.35.7"
description = "The official Python library for the openai API"
optional = false
python-versions = ">=3.7.1"
files = [
- {file = "openai-1.35.6-py3-none-any.whl", hash = "sha256:c2bfa599445a2d6010adc7954476c2dc64e1aa8dad02ef29e0f31b9a887c1d02"},
- {file = "openai-1.35.6.tar.gz", hash = "sha256:c5958617048a2d777d2b96050fd69ae6721bdffbf59967698694223cc092abd9"},
+ {file = "openai-1.35.7-py3-none-any.whl", hash = "sha256:3d1e0b0aac9b0db69a972d36dc7efa7563f8e8d65550b27a48f2a0c2ec207e80"},
+ {file = "openai-1.35.7.tar.gz", hash = "sha256:009bfa1504c9c7ef64d87be55936d142325656bbc6d98c68b669d6472e4beb09"},
]
[package.dependencies]
@@ -4544,6 +4543,17 @@ sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-d
test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"]
xml = ["lxml (>=4.9.2)"]
+[[package]]
+name = "pathspec"
+version = "0.12.1"
+description = "Utility library for gitignore style pattern matching of file paths."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"},
+ {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"},
+]
+
[[package]]
name = "pexpect"
version = "4.9.0"
@@ -7737,4 +7747,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"]
[metadata]
lock-version = "2.0"
python-versions = "^3.11"
-content-hash = "dba3c8c3812d657e413a57e3bd87ad6f80adadc08857948ff1fd6e1c62692ca7"
+content-hash = "d30ba49e7737bdacfb1c08a821ab1d41f97e00c19b691bec504e6eae301ee0e7"
diff --git a/pyproject.toml b/pyproject.toml
index 11fbaad013..52da949a84 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -34,6 +34,7 @@ gevent = "^24.2.1"
pyarrow = "16.1.0" # transitive dependency, pinned here to avoid conflicts
tenacity = "^8.4.2"
zope-interface = "6.4.post2"
+pathspec = "^0.12.1"
[tool.poetry.group.llama-index.dependencies]
llama-index = "*"