fix: standardize TokenTracker imports and usage pattern (#9)

- Update dedup.ts and error-analyzer.ts to use TokenTracker class import
- Add optional tracker parameter for dependency injection
- Match evaluator.ts pattern for token tracking

Co-authored-by: Devin AI <158243242+devin-ai-integration[bot]@users.noreply.github.com>
Co-authored-by: Han Xiao <han.xiao@jina.ai>
This commit is contained in:
devin-ai-integration[bot] 2025-02-02 23:33:08 +08:00 committed by GitHub
parent 5be008e8b9
commit 48f8565291
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 6 additions and 6 deletions

View File

@ -1,6 +1,6 @@
import { GoogleGenerativeAI, SchemaType } from "@google/generative-ai";
import { GEMINI_API_KEY, modelConfigs } from "../config";
import { tokenTracker } from "../utils/token-tracker";
import { TokenTracker } from "../utils/token-tracker";
import { DedupResponse } from '../types';
@ -110,7 +110,7 @@ Set A: ${JSON.stringify(newQueries)}
Set B: ${JSON.stringify(existingQueries)}`;
}
export async function dedupQueries(newQueries: string[], existingQueries: string[]): Promise<{ unique_queries: string[], tokens: number }> {
export async function dedupQueries(newQueries: string[], existingQueries: string[], tracker?: TokenTracker): Promise<{ unique_queries: string[], tokens: number }> {
try {
const prompt = getPrompt(newQueries, existingQueries);
const result = await model.generateContent(prompt);
@ -119,7 +119,7 @@ export async function dedupQueries(newQueries: string[], existingQueries: string
const json = JSON.parse(response.text()) as DedupResponse;
console.log('Dedup:', json.unique_queries);
const tokens = usage?.totalTokenCount || 0;
tokenTracker.trackUsage('dedup', tokens);
(tracker || new TokenTracker()).trackUsage('dedup', tokens);
return { unique_queries: json.unique_queries, tokens };
} catch (error) {
console.error('Error in deduplication analysis:', error);

View File

@ -1,6 +1,6 @@
import {GoogleGenerativeAI, SchemaType} from "@google/generative-ai";
import { GEMINI_API_KEY, modelConfigs } from "../config";
import { tokenTracker } from "../utils/token-tracker";
import { TokenTracker } from "../utils/token-tracker";
import { ErrorAnalysisResponse } from '../types';
@ -113,7 +113,7 @@ ${diaryContext.join('\n')}
`;
}
export async function analyzeSteps(diaryContext: string[]): Promise<{ response: ErrorAnalysisResponse, tokens: number }> {
export async function analyzeSteps(diaryContext: string[], tracker?: TokenTracker): Promise<{ response: ErrorAnalysisResponse, tokens: number }> {
try {
const prompt = getPrompt(diaryContext);
const result = await model.generateContent(prompt);
@ -125,7 +125,7 @@ export async function analyzeSteps(diaryContext: string[]): Promise<{ response:
reason: json.blame || 'No issues found'
});
const tokens = usage?.totalTokenCount || 0;
tokenTracker.trackUsage('error-analyzer', tokens);
(tracker || new TokenTracker()).trackUsage('error-analyzer', tokens);
return { response: json, tokens };
} catch (error) {
console.error('Error in answer evaluation:', error);