saas: llm usage gets a x3 multiplier (#124)

* pricing-change: llm consumptions x3

* fix: llm usage condition
This commit is contained in:
Yanlong Wang 2025-09-15 12:46:08 +08:00 committed by GitHub
parent 06b91dbcf8
commit 58af7eb3a8
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 6 additions and 4 deletions

View File

@ -58,7 +58,7 @@ export async function readUrl(
tokenTracker.trackUsage('read', {
totalTokens: tokens,
promptTokens: url.length,
completionTokens: tokens
completionTokens: 0
});
return { response: data };

View File

@ -31,9 +31,11 @@ export class TokenTracker extends EventEmitter {
getTotalUsage(): LanguageModelUsage {
return this.usages.reduce((acc, { usage }) => {
acc.promptTokens += usage.promptTokens;
acc.completionTokens += usage.completionTokens;
acc.totalTokens += usage.totalTokens;
// CompletionTokens > 0 means LLM usage, apply 3x multiplier
const scaler = usage.completionTokens > 0 ? 3 : 1;
acc.promptTokens += usage.promptTokens * scaler;
acc.completionTokens += usage.completionTokens * scaler;
acc.totalTokens += usage.totalTokens * scaler;
return acc;
}, { promptTokens: 0, completionTokens: 0, totalTokens: 0 });
}