From c82b3378a64edeee0572b5526ba149d2ce689247 Mon Sep 17 00:00:00 2001 From: Dani <71450225+dan1dr@users.noreply.github.com> Date: Tue, 29 Apr 2025 22:28:01 +0200 Subject: [PATCH] Fix issue #8145: Correct name for max_tokens for condenser in config.template.toml (#8165) --- config.template.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config.template.toml b/config.template.toml index ef7e4d9732..f924a66a32 100644 --- a/config.template.toml +++ b/config.template.toml @@ -391,7 +391,7 @@ type = "noop" #[llm.condenser] #model = "gpt-4o" #temperature = 0.1 -#max_tokens = 1024 +#max_input_tokens = 1024 #################################### Eval #################################### # Configuration for the evaluation, please refer to the specific evaluation