Logging additions and fixes (#1139)

* Refactor print_to_color into a color formatter

misc fixes

catch ValueErrors and others from Router initialization

add default methods

* Tweak console log formatting, clean up after rebasing exceptions out

* Fix prompts/responses

* clean up

* keep regular colors when no msg_type

* fix filename

* handle file log first

* happy mypy

* ok, mypy

---------

Co-authored-by: Robert Brennan <accounts@rbren.io>
This commit is contained in:
Engel Nyst 2024-04-16 18:55:22 +02:00 committed by GitHub
parent 5881857d5c
commit 1115b60a74
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 101 additions and 84 deletions

View File

@ -5,6 +5,7 @@ from llama_index.core import VectorStoreIndex
from llama_index.vector_stores.chroma import ChromaVectorStore
from opendevin import config
from opendevin.logger import opendevin_logger as logger
from . import json
embedding_strategy = config.get('LLM_EMBEDDING_MODEL')
@ -83,6 +84,7 @@ class LongTermMemory:
},
)
self.thought_idx += 1
logger.debug("Adding %s event to memory: %d", t, self.thought_idx)
self.index.insert(doc)
def search(self, query: str, k: int = 10):

View File

@ -1,11 +1,10 @@
import json
from typing import List, Tuple, Dict, Type
from opendevin.controller.agent_controller import print_with_color
from opendevin.plan import Plan
from opendevin.action import Action, action_from_dict
from opendevin.observation import Observation
from opendevin.schema import ActionType
from opendevin.logger import opendevin_logger as logger
from opendevin.action import (
NullAction,
@ -198,7 +197,7 @@ def get_prompt(plan: Plan, history: List[Tuple[Action, Observation]]) -> str:
elif latest_action_id == ActionType.FINISH:
hint = ''
print_with_color('HINT:\n' + hint, 'INFO')
logger.info('HINT:\n' + hint, extra={'msg_type': 'INFO'})
return prompt % {
'task': plan.main_goal,
'plan': plan_str,

View File

@ -2,13 +2,20 @@ import asyncio
import inspect
import traceback
import time
from typing import List, Callable, Literal, Mapping, Awaitable, Any, cast
from termcolor import colored
from typing import List, Callable, Awaitable, cast
from opendevin.plan import Plan
from opendevin.state import State
from opendevin.agent import Agent
from opendevin.observation import Observation, AgentErrorObservation, NullObservation
from litellm.exceptions import APIConnectionError
from openai import AuthenticationError
from opendevin import config
from opendevin.logger import opendevin_logger as logger
from opendevin.exceptions import MaxCharsExceedError
from .command_manager import CommandManager
from opendevin.action import (
Action,
NullAction,
@ -16,59 +23,12 @@ from opendevin.action import (
AddTaskAction,
ModifyTaskAction,
)
from opendevin.agent import Agent
from opendevin.logger import opendevin_logger as logger
from opendevin.exceptions import MaxCharsExceedError, AgentNoActionError
from opendevin.observation import Observation, AgentErrorObservation, NullObservation
from opendevin.plan import Plan
from opendevin.state import State
from .command_manager import CommandManager
from opendevin.exceptions import AgentNoActionError
ColorType = Literal[
'red',
'green',
'yellow',
'blue',
'magenta',
'cyan',
'light_grey',
'dark_grey',
'light_red',
'light_green',
'light_yellow',
'light_blue',
'light_magenta',
'light_cyan',
'white',
]
DISABLE_COLOR_PRINTING = (
config.get('DISABLE_COLOR').lower() == 'true'
)
MAX_ITERATIONS = config.get('MAX_ITERATIONS')
MAX_CHARS = config.get('MAX_CHARS')
def print_with_color(text: Any, print_type: str = 'INFO'):
TYPE_TO_COLOR: Mapping[str, ColorType] = {
'BACKGROUND LOG': 'blue',
'ACTION': 'green',
'OBSERVATION': 'yellow',
'INFO': 'cyan',
'ERROR': 'red',
'PLAN': 'light_magenta',
}
color = TYPE_TO_COLOR.get(print_type.upper(), TYPE_TO_COLOR['INFO'])
if DISABLE_COLOR_PRINTING:
print(f'\n{print_type.upper()}:\n{str(text)}', flush=True)
else:
print(
colored(f'\n{print_type.upper()}:\n', color, attrs=['bold'])
+ colored(str(text), color),
flush=True,
)
class AgentController:
id: str
agent: Agent
@ -125,9 +85,8 @@ class AgentController:
logger.info('Exited before finishing the task.')
async def step(self, i: int):
print('\n\n==============', flush=True)
print('STEP', i, flush=True)
print_with_color(self.state.plan.main_goal, 'PLAN')
logger.info(f'STEP {i}', extra={'msg_type': 'STEP'})
logger.info(self.state.plan.main_goal, extra={'msg_type': 'PLAN'})
if self.state.num_of_chars > self.max_chars:
raise MaxCharsExceedError(
self.state.num_of_chars, self.max_chars)
@ -136,7 +95,7 @@ class AgentController:
for obs in log_obs:
self.add_history(NullAction(), obs)
await self._run_callbacks(obs)
print_with_color(obs, 'BACKGROUND LOG')
logger.info(obs, extra={'msg_type': 'BACKGROUND LOG'})
self.update_state_for_step(i)
action: Action = NullAction()
@ -145,11 +104,11 @@ class AgentController:
action = self.agent.step(self.state)
if action is None:
raise AgentNoActionError()
print_with_color(action, 'ACTION')
logger.info(action, extra={'msg_type': 'ACTION'})
except Exception as e:
observation = AgentErrorObservation(str(e))
print_with_color(observation, 'ERROR')
traceback.print_exc()
logger.error(e)
if isinstance(e, APIConnectionError):
time.sleep(3)
@ -168,7 +127,7 @@ class AgentController:
finished = isinstance(action, AgentFinishAction)
if finished:
print_with_color(action, 'INFO')
logger.info(action, extra={'msg_type': 'INFO'})
return True
if isinstance(action, AddTaskAction):
@ -177,14 +136,14 @@ class AgentController:
action.parent, action.goal, action.subtasks)
except Exception as e:
observation = AgentErrorObservation(str(e))
print_with_color(observation, 'ERROR')
logger.error(e)
traceback.print_exc()
elif isinstance(action, ModifyTaskAction):
try:
self.state.plan.set_subtask_state(action.id, action.state)
except Exception as e:
observation = AgentErrorObservation(str(e))
print_with_color(observation, 'ERROR')
logger.error(e)
traceback.print_exc()
if action.executable:
@ -194,11 +153,11 @@ class AgentController:
observation = await cast(Awaitable[Observation], observation)
except Exception as e:
observation = AgentErrorObservation(str(e))
print_with_color(observation, 'ERROR')
logger.error(e)
traceback.print_exc()
if not isinstance(observation, NullObservation):
print_with_color(observation, 'OBSERVATION')
logger.info(observation, extra={'msg_type': 'OBSERVATION'})
self.add_history(action, observation)
await self._run_callbacks(observation)

View File

@ -57,3 +57,6 @@ class LLM:
Decorator for the litellm completion function.
"""
return self._completion
def __str__(self):
return f'LLM(model={self.model_name}, base_url={self.base_url})'

View File

@ -3,11 +3,63 @@ import os
import sys
import traceback
from datetime import datetime
from opendevin import config
from typing import Literal, Mapping
from termcolor import colored
console_formatter = logging.Formatter(
DISABLE_COLOR_PRINTING = (
config.get('DISABLE_COLOR').lower() == 'true'
)
ColorType = Literal[
'red',
'green',
'yellow',
'blue',
'magenta',
'cyan',
'light_grey',
'dark_grey',
'light_red',
'light_green',
'light_yellow',
'light_blue',
'light_magenta',
'light_cyan',
'white',
]
LOG_COLORS: Mapping[str, ColorType] = {
'BACKGROUND LOG': 'blue',
'ACTION': 'green',
'OBSERVATION': 'yellow',
'INFO': 'cyan',
'ERROR': 'red',
'PLAN': 'light_magenta',
}
class ColoredFormatter(logging.Formatter):
def format(self, record):
msg_type = record.__dict__.get('msg_type', None)
if msg_type in LOG_COLORS and not DISABLE_COLOR_PRINTING:
msg_type_color = colored(msg_type, LOG_COLORS[msg_type])
msg = colored(record.msg, LOG_COLORS[msg_type])
time_str = colored(self.formatTime(record, self.datefmt), 'green')
name_str = colored(record.name, 'cyan')
level_str = colored(record.levelname, 'yellow')
return f'{time_str} - {name_str}:{level_str}: {record.filename}:{record.lineno}\n{msg_type_color}\n{msg}'
elif msg_type == 'STEP':
msg = '\n\n==============\n' + record.msg + '\n'
return f'{msg}'
return super().format(record)
console_formatter = ColoredFormatter(
'\033[92m%(asctime)s - %(name)s:%(levelname)s\033[0m: %(filename)s:%(lineno)s - %(message)s',
datefmt='%H:%M:%S',
)
file_formatter = logging.Formatter(
'%(asctime)s - %(name)s:%(levelname)s: %(filename)s:%(lineno)s - %(message)s',
datefmt='%H:%M:%S',
@ -33,7 +85,7 @@ def get_file_handler():
"""
log_dir = os.path.join(os.getcwd(), 'logs')
os.makedirs(log_dir, exist_ok=True)
timestamp = datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
timestamp = datetime.now().strftime('%Y-%m-%d')
file_name = f'opendevin_{timestamp}.log'
file_handler = logging.FileHandler(os.path.join(log_dir, file_name))
file_handler.setLevel(logging.DEBUG)
@ -65,24 +117,25 @@ sys.excepthook = log_uncaught_exceptions
opendevin_logger = logging.getLogger('opendevin')
opendevin_logger.setLevel(logging.INFO)
opendevin_logger.addHandler(get_console_handler())
opendevin_logger.addHandler(get_file_handler())
opendevin_logger.addHandler(get_console_handler())
opendevin_logger.propagate = False
opendevin_logger.debug('Logging initialized')
opendevin_logger.debug('Logging to %s', os.path.join(
os.getcwd(), 'logs', 'opendevin.log'))
# Exclude "litellm" from logging output
# Exclude LiteLLM from logging output
logging.getLogger('LiteLLM').disabled = True
logging.getLogger('LiteLLM Router').disabled = True
logging.getLogger('LiteLLM Proxy').disabled = True
# LLM prompt and response logging
class LlmFileHandler(logging.FileHandler):
"""
# LLM prompt and response logging
"""
def __init__(self, filename, mode='a', encoding=None, delay=False):
def __init__(self, filename, mode='a', encoding='utf-8', delay=False):
"""
Initializes an instance of LlmFileHandler.
@ -94,12 +147,11 @@ class LlmFileHandler(logging.FileHandler):
"""
self.filename = filename
self.message_counter = 1
self.session = datetime.now().strftime('%y-%m-%d_%H-%M-%S')
self.log_directory = os.path.join(
os.getcwd(), 'logs', 'llm', self.session)
self.session = datetime.now().strftime('%y-%m-%d_%H-%M')
self.log_directory = os.path.join(os.getcwd(), 'logs', 'llm', self.session)
os.makedirs(self.log_directory, exist_ok=True)
self.baseFilename = os.path.join(
self.log_directory, f'{self.filename}_{self.message_counter:03}.log')
filename = f'{self.filename}_{self.message_counter:03}.log'
self.baseFilename = os.path.join(self.log_directory, filename)
super().__init__(self.baseFilename, mode, encoding, delay)
def emit(self, record):
@ -109,8 +161,8 @@ class LlmFileHandler(logging.FileHandler):
Args:
record (logging.LogRecord): The log record to emit.
"""
self.baseFilename = os.path.join(
self.log_directory, f'{self.filename}_{self.message_counter:03}.log')
filename = f'{self.filename}_{self.message_counter:03}.log'
self.baseFilename = os.path.join(self.log_directory, filename)
self.stream = self._open()
super().emit(record)
self.stream.close
@ -122,9 +174,9 @@ def get_llm_prompt_file_handler():
"""
Returns a file handler for LLM prompt logging.
"""
llm_prompt_file_handler = LlmFileHandler('prompt')
llm_prompt_file_handler.setLevel(logging.INFO)
llm_prompt_file_handler = LlmFileHandler('prompt', delay=True)
llm_prompt_file_handler.setFormatter(llm_formatter)
llm_prompt_file_handler.setLevel(logging.DEBUG)
return llm_prompt_file_handler
@ -132,16 +184,18 @@ def get_llm_response_file_handler():
"""
Returns a file handler for LLM response logging.
"""
llm_response_file_handler = LlmFileHandler('response')
llm_response_file_handler.setLevel(logging.INFO)
llm_response_file_handler = LlmFileHandler('response', delay=True)
llm_response_file_handler.setFormatter(llm_formatter)
llm_response_file_handler.setLevel(logging.DEBUG)
return llm_response_file_handler
llm_prompt_logger = logging.getLogger('prompt')
llm_prompt_logger.propagate = False
llm_prompt_logger.setLevel(logging.DEBUG)
llm_prompt_logger.addHandler(get_llm_prompt_file_handler())
llm_response_logger = logging.getLogger('response')
llm_response_logger.propagate = False
llm_response_logger.setLevel(logging.DEBUG)
llm_response_logger.addHandler(get_llm_response_file_handler())