fix: update token counting to use content length estimation

Co-Authored-By: Han Xiao <han.xiao@jina.ai>
This commit is contained in:
Devin AI
2025-02-11 09:32:41 +00:00
parent fbb60198cb
commit fd7dd8dbe8
3 changed files with 12 additions and 4 deletions

View File

@@ -404,11 +404,10 @@ app.post('/v1/chat/completions', (async (req: Request, res: Response) => {
requestId
});
// Track error as rejected tokens with Vercel token counting
// Track error tokens using content length estimation
const errorMessage = error?.message || 'An error occurred';
// Default to 1 token for errors as per Vercel AI SDK convention
const errorTokens = 1;
context.tokenTracker.trackUsage('evaluator', errorTokens, TOKEN_CATEGORIES.REJECTED);
const estimatedTokens = Math.ceil(Buffer.byteLength(errorMessage, 'utf-8') / 4);
context.tokenTracker.trackUsage('evaluator', estimatedTokens, TOKEN_CATEGORIES.REJECTED);
// Clean up event listeners
context.actionTracker.removeAllListeners('action');