diff --git a/openhands/memory/condenser/condenser.py b/openhands/memory/condenser/condenser.py index abc036a73a..43c2573d2a 100644 --- a/openhands/memory/condenser/condenser.py +++ b/openhands/memory/condenser/condenser.py @@ -51,6 +51,7 @@ class Condenser(ABC): def __init__(self): self._metadata_batch: dict[str, Any] = {} + self._llm_metadata: dict[str, Any] = {} def add_metadata(self, key: str, value: Any) -> None: """Add information to the current metadata batch. @@ -100,6 +101,7 @@ class Condenser(ABC): def condensed_history(self, state: State) -> View | Condensation: """Condense the state's history.""" + self._llm_metadata = state.to_llm_metadata('condenser') with self.metadata_batch(state): return self.condense(state.view) diff --git a/openhands/memory/condenser/impl/llm_summarizing_condenser.py b/openhands/memory/condenser/impl/llm_summarizing_condenser.py index 5911e321b2..46e9c29897 100644 --- a/openhands/memory/condenser/impl/llm_summarizing_condenser.py +++ b/openhands/memory/condenser/impl/llm_summarizing_condenser.py @@ -133,6 +133,7 @@ CURRENT_STATE: Last flip: Heads, Haiku count: 15/20""" response = self.llm.completion( messages=self.llm.format_messages_for_llm(messages), + extra_body={'metadata': self._llm_metadata}, ) summary = response.choices[0].message.content