mirror of
https://github.com/dzhng/deep-research.git
synced 2025-12-25 20:36:48 +08:00
simplified
This commit is contained in:
parent
be18bc208d
commit
ad0c3d08d7
@ -6,4 +6,4 @@ OPENAI_KEY="YOUR_KEY"
|
||||
CONTEXT_SIZE="128000"
|
||||
# If you want to use other OpenAI compatible API, add the following below:
|
||||
# OPENAI_ENDPOINT="http://localhost:11434/v1"
|
||||
# OPENAI_MODEL="llama3.1"
|
||||
# CUSTOM_MODEL="llama3.1"
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@ -2,6 +2,8 @@
|
||||
|
||||
# Output files
|
||||
output.md
|
||||
report.md
|
||||
answer.md
|
||||
|
||||
# Dependencies
|
||||
node_modules
|
||||
|
||||
104
package-lock.json
generated
104
package-lock.json
generated
@ -9,6 +9,7 @@
|
||||
"version": "0.0.1",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@ai-sdk/fireworks": "^0.1.14",
|
||||
"@ai-sdk/openai": "^1.1.9",
|
||||
"@mendable/firecrawl-js": "^1.16.0",
|
||||
"ai": "^4.1.17",
|
||||
@ -35,6 +36,58 @@
|
||||
"node": "22.x"
|
||||
}
|
||||
},
|
||||
"node_modules/@ai-sdk/fireworks": {
|
||||
"version": "0.1.14",
|
||||
"resolved": "https://registry.npmjs.org/@ai-sdk/fireworks/-/fireworks-0.1.14.tgz",
|
||||
"integrity": "sha512-aVUFSIM7dV4yhVRvGcgJFComofKkKYkXqQqI/ckRrqnawWQc0nuv1YyMqHtng7GGTuCNT06imNZHkGP1+VPrww==",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@ai-sdk/openai-compatible": "0.1.13",
|
||||
"@ai-sdk/provider": "1.0.10",
|
||||
"@ai-sdk/provider-utils": "2.1.11"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"zod": "^3.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@ai-sdk/fireworks/node_modules/@ai-sdk/provider": {
|
||||
"version": "1.0.10",
|
||||
"resolved": "https://registry.npmjs.org/@ai-sdk/provider/-/provider-1.0.10.tgz",
|
||||
"integrity": "sha512-pco8Zl9U0xwXI+nCLc0woMtxbvjU8hRmGTseAUiPHFLYAAL8trRPCukg69IDeinOvIeo1SmXxAIdWWPZOLb4Cg==",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"json-schema": "^0.4.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
}
|
||||
},
|
||||
"node_modules/@ai-sdk/fireworks/node_modules/@ai-sdk/provider-utils": {
|
||||
"version": "2.1.11",
|
||||
"resolved": "https://registry.npmjs.org/@ai-sdk/provider-utils/-/provider-utils-2.1.11.tgz",
|
||||
"integrity": "sha512-lMnXA5KaRJidzW7gQmlo/SnX6D+AKk5GxHFcQtOaGOSJNmu/qcNZc1rGaO7K5qW52OvCLXtnWudR4cc/FvMpVQ==",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@ai-sdk/provider": "1.0.10",
|
||||
"eventsource-parser": "^3.0.0",
|
||||
"nanoid": "^3.3.8",
|
||||
"secure-json-parse": "^2.7.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"zod": "^3.0.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"zod": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@ai-sdk/openai": {
|
||||
"version": "1.1.9",
|
||||
"resolved": "https://registry.npmjs.org/@ai-sdk/openai/-/openai-1.1.9.tgz",
|
||||
@ -51,6 +104,57 @@
|
||||
"zod": "^3.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@ai-sdk/openai-compatible": {
|
||||
"version": "0.1.13",
|
||||
"resolved": "https://registry.npmjs.org/@ai-sdk/openai-compatible/-/openai-compatible-0.1.13.tgz",
|
||||
"integrity": "sha512-hgj6BdvasVXCTmJwbsiWo+e626GkmEBJKG8PYwpVq7moLWj93wJnfBNlDjxVjhZ32d5KGT32RIMZjqaX8QkClg==",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@ai-sdk/provider": "1.0.10",
|
||||
"@ai-sdk/provider-utils": "2.1.11"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"zod": "^3.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@ai-sdk/openai-compatible/node_modules/@ai-sdk/provider": {
|
||||
"version": "1.0.10",
|
||||
"resolved": "https://registry.npmjs.org/@ai-sdk/provider/-/provider-1.0.10.tgz",
|
||||
"integrity": "sha512-pco8Zl9U0xwXI+nCLc0woMtxbvjU8hRmGTseAUiPHFLYAAL8trRPCukg69IDeinOvIeo1SmXxAIdWWPZOLb4Cg==",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"json-schema": "^0.4.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
}
|
||||
},
|
||||
"node_modules/@ai-sdk/openai-compatible/node_modules/@ai-sdk/provider-utils": {
|
||||
"version": "2.1.11",
|
||||
"resolved": "https://registry.npmjs.org/@ai-sdk/provider-utils/-/provider-utils-2.1.11.tgz",
|
||||
"integrity": "sha512-lMnXA5KaRJidzW7gQmlo/SnX6D+AKk5GxHFcQtOaGOSJNmu/qcNZc1rGaO7K5qW52OvCLXtnWudR4cc/FvMpVQ==",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@ai-sdk/provider": "1.0.10",
|
||||
"eventsource-parser": "^3.0.0",
|
||||
"nanoid": "^3.3.8",
|
||||
"secure-json-parse": "^2.7.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"zod": "^3.0.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"zod": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@ai-sdk/provider": {
|
||||
"version": "1.0.7",
|
||||
"resolved": "https://registry.npmjs.org/@ai-sdk/provider/-/provider-1.0.7.tgz",
|
||||
|
||||
@ -25,6 +25,7 @@
|
||||
"typescript": "^5.7.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"@ai-sdk/fireworks": "^0.1.14",
|
||||
"@ai-sdk/openai": "^1.1.9",
|
||||
"@mendable/firecrawl-js": "^1.16.0",
|
||||
"ai": "^4.1.17",
|
||||
|
||||
@ -1,27 +1,50 @@
|
||||
import { createOpenAI, type OpenAIProviderSettings } from '@ai-sdk/openai';
|
||||
import { createFireworks } from '@ai-sdk/fireworks';
|
||||
import { createOpenAI } from '@ai-sdk/openai';
|
||||
import { LanguageModelV1 } from 'ai';
|
||||
import { getEncoding } from 'js-tiktoken';
|
||||
|
||||
import { RecursiveCharacterTextSplitter } from './text-splitter';
|
||||
|
||||
interface CustomOpenAIProviderSettings extends OpenAIProviderSettings {
|
||||
baseURL?: string;
|
||||
}
|
||||
|
||||
// Providers
|
||||
const openai = createOpenAI({
|
||||
apiKey: process.env.OPENAI_KEY!,
|
||||
baseURL: process.env.OPENAI_ENDPOINT || 'https://api.openai.com/v1',
|
||||
} as CustomOpenAIProviderSettings);
|
||||
const openai = process.env.OPENAI_KEY
|
||||
? createOpenAI({
|
||||
apiKey: process.env.OPENAI_KEY,
|
||||
baseURL: process.env.OPENAI_ENDPOINT || 'https://api.openai.com/v1',
|
||||
})
|
||||
: undefined;
|
||||
|
||||
const customModel = process.env.OPENAI_MODEL || 'o3-mini';
|
||||
const fireworks = process.env.FIREWORKS_KEY
|
||||
? createFireworks({
|
||||
apiKey: process.env.FIREWORKS_KEY,
|
||||
})
|
||||
: undefined;
|
||||
|
||||
const customModel = process.env.CUSTOM_MODEL
|
||||
? openai?.(process.env.CUSTOM_MODEL)
|
||||
: undefined;
|
||||
|
||||
// Models
|
||||
|
||||
export const o3MiniModel = openai(customModel, {
|
||||
reasoningEffort: customModel.startsWith('o') ? 'medium' : undefined,
|
||||
const o3MiniModel = openai?.('o3-mini', {
|
||||
reasoningEffort: 'medium',
|
||||
structuredOutputs: true,
|
||||
});
|
||||
|
||||
const deepSeekR1Model = fireworks?.('accounts/fireworks/models/deepseek-r1');
|
||||
|
||||
export function getModel(): LanguageModelV1 {
|
||||
if (customModel) {
|
||||
return customModel;
|
||||
}
|
||||
|
||||
const model = deepSeekR1Model ?? o3MiniModel;
|
||||
if (!model) {
|
||||
throw new Error('No model found');
|
||||
}
|
||||
|
||||
return model as LanguageModelV1;
|
||||
}
|
||||
|
||||
const MinChunkSize = 140;
|
||||
const encoder = getEncoding('o200k_base');
|
||||
|
||||
|
||||
91
src/api.ts
91
src/api.ts
@ -1,7 +1,7 @@
|
||||
import express, { Request, Response } from 'express';
|
||||
import cors from 'cors';
|
||||
import * as fs from 'fs/promises';
|
||||
import { deepResearch, writeFinalReport, writeFinalAnswer } from './deep-research';
|
||||
import express, { Request, Response } from 'express';
|
||||
|
||||
import { deepResearch, writeFinalAnswer } from './deep-research';
|
||||
import { generateFeedback } from './feedback';
|
||||
import { OutputManager } from './output-manager';
|
||||
|
||||
@ -23,57 +23,29 @@ function log(...args: any[]) {
|
||||
// API endpoint to run research
|
||||
app.post('/api/research', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { query, followUpAnswers = [], depth = 3, breadth = 3 } = req.body;
|
||||
|
||||
const { query, depth = 3, breadth = 3 } = req.body;
|
||||
|
||||
if (!query) {
|
||||
return res.status(400).json({ error: 'Query is required' });
|
||||
}
|
||||
|
||||
log(`Creating research plan for: ${query}`);
|
||||
|
||||
// Generate follow-up questions
|
||||
const followUpQuestions = await generateFeedback({
|
||||
query,
|
||||
numQuestions: 0
|
||||
});
|
||||
|
||||
// Combine all information for deep research
|
||||
let combinedQuery = `Initial Query: ${query}`;
|
||||
|
||||
if (followUpQuestions.length > 0 && followUpAnswers.length > 0) {
|
||||
combinedQuery += `\nFollow-up Questions and Answers:\n${
|
||||
followUpQuestions
|
||||
.slice(0, followUpAnswers.length)
|
||||
.map((q: string, i: number) => `Q: ${q}\nA: ${followUpAnswers[i]}`)
|
||||
.join('\n')
|
||||
}`;
|
||||
}
|
||||
|
||||
log('\nResearching your topic...');
|
||||
log('\nStarting research with progress tracking...\n');
|
||||
|
||||
// Track progress
|
||||
let currentProgress = {};
|
||||
|
||||
|
||||
const { learnings, visitedUrls } = await deepResearch({
|
||||
query: combinedQuery,
|
||||
query,
|
||||
breadth,
|
||||
depth,
|
||||
onProgress: (progress) => {
|
||||
onProgress: progress => {
|
||||
output.updateProgress(progress);
|
||||
currentProgress = progress;
|
||||
},
|
||||
});
|
||||
|
||||
log(`\n\nLearnings:\n\n${learnings.join('\n')}`);
|
||||
log(`\n\nVisited URLs (${visitedUrls.length}):\n\n${visitedUrls.join('\n')}`);
|
||||
log('Writing final report...');
|
||||
|
||||
const report = await writeFinalReport({
|
||||
prompt: combinedQuery,
|
||||
learnings,
|
||||
visitedUrls,
|
||||
});
|
||||
log(
|
||||
`\n\nVisited URLs (${visitedUrls.length}):\n\n${visitedUrls.join('\n')}`,
|
||||
);
|
||||
log('Writing final answer...');
|
||||
|
||||
// Save report to file with timestamp
|
||||
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
||||
@ -81,9 +53,8 @@ app.post('/api/research', async (req: Request, res: Response) => {
|
||||
// await fs.writeFile(reportFilename, report, 'utf-8');
|
||||
|
||||
const answer = await writeFinalAnswer({
|
||||
prompt: combinedQuery,
|
||||
prompt: query,
|
||||
learnings,
|
||||
report,
|
||||
});
|
||||
|
||||
// Save answer to file
|
||||
@ -93,45 +64,17 @@ app.post('/api/research', async (req: Request, res: Response) => {
|
||||
// Return the results
|
||||
return res.json({
|
||||
success: true,
|
||||
report,
|
||||
answer,
|
||||
learnings,
|
||||
visitedUrls,
|
||||
reportFilename,
|
||||
answerFilename
|
||||
answerFilename,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
console.error('Error in research API:', error);
|
||||
return res.status(500).json({
|
||||
return res.status(500).json({
|
||||
error: 'An error occurred during research',
|
||||
message: error instanceof Error ? error.message : String(error)
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// API endpoint to get follow-up questions
|
||||
app.post('/api/questions', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { query, numQuestions = 3 } = req.body;
|
||||
|
||||
if (!query) {
|
||||
return res.status(400).json({ error: 'Query is required' });
|
||||
}
|
||||
|
||||
const followUpQuestions = await generateFeedback({
|
||||
query,
|
||||
numQuestions
|
||||
});
|
||||
|
||||
return res.json({
|
||||
success: true,
|
||||
questions: followUpQuestions
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
console.error('Error generating questions:', error);
|
||||
return res.status(500).json({
|
||||
error: 'An error occurred while generating questions',
|
||||
message: error instanceof Error ? error.message : String(error)
|
||||
message: error instanceof Error ? error.message : String(error),
|
||||
});
|
||||
}
|
||||
});
|
||||
@ -141,4 +84,4 @@ app.listen(port, () => {
|
||||
console.log(`Deep Research API running on port ${port}`);
|
||||
});
|
||||
|
||||
export default app;
|
||||
export default app;
|
||||
|
||||
@ -4,9 +4,9 @@ import { compact } from 'lodash-es';
|
||||
import pLimit from 'p-limit';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { o3MiniModel, trimPrompt } from './ai/providers';
|
||||
import { systemPrompt } from './prompt';
|
||||
import { getModel, trimPrompt } from './ai/providers';
|
||||
import { OutputManager } from './output-manager';
|
||||
import { systemPrompt } from './prompt';
|
||||
|
||||
// Initialize output manager for coordinated console/progress output
|
||||
const output = new OutputManager();
|
||||
@ -54,7 +54,7 @@ async function generateSerpQueries({
|
||||
learnings?: string[];
|
||||
}) {
|
||||
const res = await generateObject({
|
||||
model: o3MiniModel,
|
||||
model: getModel(),
|
||||
system: systemPrompt(),
|
||||
prompt: `Given the following prompt from the user, generate a list of SERP queries to research the topic. Return a maximum of ${numQueries} queries, but feel free to return less if the original prompt is clear. Make sure each query is unique and not similar to each other: <prompt>${query}</prompt>\n\n${
|
||||
learnings
|
||||
@ -78,10 +78,7 @@ async function generateSerpQueries({
|
||||
.describe(`List of SERP queries, max of ${numQueries}`),
|
||||
}),
|
||||
});
|
||||
log(
|
||||
`Created ${res.object.queries.length} queries`,
|
||||
res.object.queries,
|
||||
);
|
||||
log(`Created ${res.object.queries.length} queries`, res.object.queries);
|
||||
|
||||
return res.object.queries.slice(0, numQueries);
|
||||
}
|
||||
@ -103,7 +100,7 @@ async function processSerpResult({
|
||||
log(`Ran ${query}, found ${contents.length} contents`);
|
||||
|
||||
const res = await generateObject({
|
||||
model: o3MiniModel,
|
||||
model: getModel(),
|
||||
abortSignal: AbortSignal.timeout(60_000),
|
||||
system: systemPrompt(),
|
||||
prompt: `Given the following contents from a SERP search for the query <query>${query}</query>, generate a list of learnings from the contents. Return a maximum of ${numLearnings} learnings, but feel free to return less if the contents are clear. Make sure each learning is unique and not similar to each other. The learnings should be concise and to the point, as detailed and information dense as possible. Make sure to include any entities like people, places, companies, products, things, etc in the learnings, as well as any exact metrics, numbers, or dates. The learnings will be used to research the topic further.\n\n<contents>${contents
|
||||
@ -120,10 +117,7 @@ async function processSerpResult({
|
||||
),
|
||||
}),
|
||||
});
|
||||
log(
|
||||
`Created ${res.object.learnings.length} learnings`,
|
||||
res.object.learnings,
|
||||
);
|
||||
log(`Created ${res.object.learnings.length} learnings`, res.object.learnings);
|
||||
|
||||
return res.object;
|
||||
}
|
||||
@ -145,7 +139,7 @@ export async function writeFinalReport({
|
||||
);
|
||||
|
||||
const res = await generateObject({
|
||||
model: o3MiniModel,
|
||||
model: getModel(),
|
||||
system: systemPrompt(),
|
||||
prompt: `Given the following prompt from the user, write a final report on the topic using the learnings from research. Make it as as detailed as possible, aim for 3 or more pages, include ALL the learnings from research:\n\n<prompt>${prompt}</prompt>\n\nHere are all the learnings from previous research:\n\n<learnings>\n${learningsString}\n</learnings>`,
|
||||
schema: z.object({
|
||||
@ -163,32 +157,31 @@ export async function writeFinalReport({
|
||||
export async function writeFinalAnswer({
|
||||
prompt,
|
||||
learnings,
|
||||
report,
|
||||
}: {
|
||||
prompt: string;
|
||||
learnings: string[];
|
||||
report: string;
|
||||
}) {
|
||||
const learningsString = trimPrompt(
|
||||
learnings
|
||||
.map(learning => `<learning>\n${learning}\n</learning>`)
|
||||
.join('\n'),
|
||||
150_000,
|
||||
);
|
||||
|
||||
const res = await generateObject({
|
||||
model: o3MiniModel,
|
||||
model: getModel(),
|
||||
system: systemPrompt(),
|
||||
prompt: `Given the following prompt from the user, write a final answer on the topic using the learnings from research. Follow the format specified in the prompt. Do not yap or babble or include any other text than the answer besides the format specified in the prompt. Keep the answer as concise as possible - usually it should be just a few words or maximum a sentence. Try to follow the format specified in the prompt (for example, if the prompt is using Latex, the answer should be in Latex. If the prompt gives multiple answer choices, the answer should be one of the choices).
|
||||
|
||||
<prompt>${prompt}</prompt>
|
||||
|
||||
<report>${report}</report>
|
||||
|
||||
<format>
|
||||
<answer>
|
||||
</answer>
|
||||
</format>
|
||||
`,
|
||||
prompt: `Given the following prompt from the user, write a final answer on the topic using the learnings from research. Follow the format specified in the prompt. Do not yap or babble or include any other text than the answer besides the format specified in the prompt. Keep the answer as concise as possible - usually it should be just a few words or maximum a sentence. Try to follow the format specified in the prompt (for example, if the prompt is using Latex, the answer should be in Latex. If the prompt gives multiple answer choices, the answer should be one of the choices).\n\n<prompt>${prompt}</prompt>\n\nHere are all the learnings from research on the topic that you can use to help answer the prompt:\n\n<learnings>\n${learningsString}\n</learnings>`,
|
||||
schema: z.object({
|
||||
answer: z.string().describe('The final answer'),
|
||||
exactAnswer: z
|
||||
.string()
|
||||
.describe(
|
||||
'The final answer, make it short and concise, just the answer, no other text',
|
||||
),
|
||||
}),
|
||||
});
|
||||
|
||||
return res.object.answer;
|
||||
return res.object.exactAnswer;
|
||||
}
|
||||
|
||||
export async function deepResearch({
|
||||
@ -214,7 +207,7 @@ export async function deepResearch({
|
||||
totalQueries: 0,
|
||||
completedQueries: 0,
|
||||
};
|
||||
|
||||
|
||||
const reportProgress = (update: Partial<ResearchProgress>) => {
|
||||
Object.assign(progress, update);
|
||||
onProgress?.(progress);
|
||||
@ -225,12 +218,12 @@ export async function deepResearch({
|
||||
learnings,
|
||||
numQueries: breadth,
|
||||
});
|
||||
|
||||
|
||||
reportProgress({
|
||||
totalQueries: serpQueries.length,
|
||||
currentQuery: serpQueries[0]?.query
|
||||
currentQuery: serpQueries[0]?.query,
|
||||
});
|
||||
|
||||
|
||||
const limit = pLimit(ConcurrencyLimit);
|
||||
|
||||
const results = await Promise.all(
|
||||
@ -294,10 +287,7 @@ export async function deepResearch({
|
||||
}
|
||||
} catch (e: any) {
|
||||
if (e.message && e.message.includes('Timeout')) {
|
||||
log(
|
||||
`Timeout error running query: ${serpQuery.query}: `,
|
||||
e,
|
||||
);
|
||||
log(`Timeout error running query: ${serpQuery.query}: `, e);
|
||||
} else {
|
||||
log(`Error running query: ${serpQuery.query}: `, e);
|
||||
}
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import { generateObject } from 'ai';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { o3MiniModel } from './ai/providers';
|
||||
import { getModel } from './ai/providers';
|
||||
import { systemPrompt } from './prompt';
|
||||
|
||||
export async function generateFeedback({
|
||||
@ -12,7 +12,7 @@ export async function generateFeedback({
|
||||
numQuestions?: number;
|
||||
}) {
|
||||
const userFeedback = await generateObject({
|
||||
model: o3MiniModel,
|
||||
model: getModel(),
|
||||
system: systemPrompt(),
|
||||
prompt: `Given the following query from the user, ask some follow up questions to clarify the research direction. Return a maximum of ${numQuestions} questions, but feel free to return less if the original query is clear: <query>${query}</query>`,
|
||||
schema: z.object({
|
||||
|
||||
@ -4,13 +4,13 @@ export class OutputManager {
|
||||
private progressLines: number = 4;
|
||||
private progressArea: string[] = [];
|
||||
private initialized: boolean = false;
|
||||
|
||||
|
||||
constructor() {
|
||||
// Initialize terminal
|
||||
process.stdout.write('\n'.repeat(this.progressLines));
|
||||
this.initialized = true;
|
||||
}
|
||||
|
||||
|
||||
log(...args: any[]) {
|
||||
// Move cursor up to progress area
|
||||
if (this.initialized) {
|
||||
@ -25,26 +25,28 @@ export class OutputManager {
|
||||
this.drawProgress();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
updateProgress(progress: ResearchProgress) {
|
||||
this.progressArea = [
|
||||
`Depth: [${this.getProgressBar(progress.totalDepth - progress.currentDepth, progress.totalDepth)}] ${Math.round((progress.totalDepth - progress.currentDepth) / progress.totalDepth * 100)}%`,
|
||||
`Breadth: [${this.getProgressBar(progress.totalBreadth - progress.currentBreadth, progress.totalBreadth)}] ${Math.round((progress.totalBreadth - progress.currentBreadth) / progress.totalBreadth * 100)}%`,
|
||||
`Queries: [${this.getProgressBar(progress.completedQueries, progress.totalQueries)}] ${Math.round(progress.completedQueries / progress.totalQueries * 100)}%`,
|
||||
progress.currentQuery ? `Current: ${progress.currentQuery}` : ''
|
||||
`Depth: [${this.getProgressBar(progress.totalDepth - progress.currentDepth, progress.totalDepth)}] ${Math.round(((progress.totalDepth - progress.currentDepth) / progress.totalDepth) * 100)}%`,
|
||||
`Breadth: [${this.getProgressBar(progress.totalBreadth - progress.currentBreadth, progress.totalBreadth)}] ${Math.round(((progress.totalBreadth - progress.currentBreadth) / progress.totalBreadth) * 100)}%`,
|
||||
`Queries: [${this.getProgressBar(progress.completedQueries, progress.totalQueries)}] ${Math.round((progress.completedQueries / progress.totalQueries) * 100)}%`,
|
||||
progress.currentQuery ? `Current: ${progress.currentQuery}` : '',
|
||||
];
|
||||
this.drawProgress();
|
||||
}
|
||||
|
||||
|
||||
private getProgressBar(value: number, total: number): string {
|
||||
const width = process.stdout.columns ? Math.min(30, process.stdout.columns - 20) : 30;
|
||||
const width = process.stdout.columns
|
||||
? Math.min(30, process.stdout.columns - 20)
|
||||
: 30;
|
||||
const filled = Math.round((width * value) / total);
|
||||
return '█'.repeat(filled) + ' '.repeat(width - filled);
|
||||
}
|
||||
|
||||
|
||||
private drawProgress() {
|
||||
if (!this.initialized || this.progressArea.length === 0) return;
|
||||
|
||||
|
||||
// Move cursor to progress area
|
||||
const terminalHeight = process.stdout.rows || 24;
|
||||
process.stdout.write(`\x1B[${terminalHeight - this.progressLines};1H`);
|
||||
|
||||
@ -1,76 +0,0 @@
|
||||
import { ResearchProgress } from './deep-research';
|
||||
|
||||
export class ProgressManager {
|
||||
private lastProgress: ResearchProgress | undefined;
|
||||
private progressLines: number = 4; // Fixed number of lines for progress display
|
||||
private initialized: boolean = false;
|
||||
|
||||
constructor() {
|
||||
// Initialize terminal
|
||||
process.stdout.write('\n'.repeat(this.progressLines));
|
||||
}
|
||||
|
||||
private drawProgressBar(
|
||||
label: string,
|
||||
value: number,
|
||||
total: number,
|
||||
char: string = '='
|
||||
): string {
|
||||
const width = process.stdout.columns ? Math.min(30, process.stdout.columns - 20) : 30;
|
||||
const percent = (value / total) * 100;
|
||||
const filled = Math.round((width * percent) / 100);
|
||||
const empty = width - filled;
|
||||
|
||||
return `${label} [${char.repeat(filled)}${' '.repeat(empty)}] ${Math.round(percent)}%`;
|
||||
}
|
||||
|
||||
updateProgress(progress: ResearchProgress) {
|
||||
// Store progress for potential redraw
|
||||
this.lastProgress = progress;
|
||||
|
||||
// Calculate position for progress bars (at bottom of terminal)
|
||||
const terminalHeight = process.stdout.rows || 24;
|
||||
const progressStart = terminalHeight - this.progressLines;
|
||||
|
||||
// Move cursor to progress area
|
||||
process.stdout.write(`\x1B[${progressStart};1H\x1B[0J`);
|
||||
|
||||
// Draw progress bars horizontally
|
||||
const lines = [
|
||||
this.drawProgressBar(
|
||||
'Depth: ',
|
||||
progress.totalDepth - progress.currentDepth,
|
||||
progress.totalDepth,
|
||||
'█'
|
||||
),
|
||||
this.drawProgressBar(
|
||||
'Breadth: ',
|
||||
progress.totalBreadth - progress.currentBreadth,
|
||||
progress.totalBreadth,
|
||||
'█'
|
||||
),
|
||||
this.drawProgressBar(
|
||||
'Queries: ',
|
||||
progress.completedQueries,
|
||||
progress.totalQueries,
|
||||
'█'
|
||||
),
|
||||
];
|
||||
|
||||
// Add current query if available
|
||||
if (progress.currentQuery) {
|
||||
lines.push(`Current: ${progress.currentQuery}`);
|
||||
}
|
||||
|
||||
// Output progress bars at fixed position
|
||||
process.stdout.write(lines.join('\n') + '\n');
|
||||
|
||||
// Move cursor back up for next output
|
||||
process.stdout.write(`\x1B[${this.progressLines}A`);
|
||||
}
|
||||
|
||||
stop() {
|
||||
// Move cursor past progress area
|
||||
process.stdout.write(`\x1B[${this.progressLines}B\n`);
|
||||
}
|
||||
}
|
||||
60
src/run.ts
60
src/run.ts
@ -1,8 +1,11 @@
|
||||
import * as fs from 'fs/promises';
|
||||
import * as readline from 'readline';
|
||||
|
||||
|
||||
import { deepResearch, writeFinalAnswer, writeFinalReport } from './deep-research';
|
||||
import {
|
||||
deepResearch,
|
||||
writeFinalAnswer,
|
||||
writeFinalReport,
|
||||
} from './deep-research';
|
||||
import { generateFeedback } from './feedback';
|
||||
import { OutputManager } from './output-manager';
|
||||
|
||||
@ -45,13 +48,16 @@ async function run() {
|
||||
await askQuestion('Enter research depth (recommended 1-5, default 2): '),
|
||||
10,
|
||||
) || 2;
|
||||
const isReport =
|
||||
(await askQuestion(
|
||||
'Do you want to generate a long report or a specific answer? (report/answer)',
|
||||
)) === 'report';
|
||||
|
||||
log(`Creating research plan...`);
|
||||
|
||||
// Generate follow-up questions
|
||||
const followUpQuestions = await generateFeedback({
|
||||
query: initialQuery,
|
||||
numQuestions: 0
|
||||
});
|
||||
|
||||
log(
|
||||
@ -72,47 +78,43 @@ Follow-up Questions and Answers:
|
||||
${followUpQuestions.map((q: string, i: number) => `Q: ${q}\nA: ${answers[i]}`).join('\n')}
|
||||
`;
|
||||
|
||||
log('\nResearching your topic...');
|
||||
log('\nStarting research...\n');
|
||||
|
||||
log('\nStarting research with progress tracking...\n');
|
||||
|
||||
const { learnings, visitedUrls } = await deepResearch({
|
||||
query: combinedQuery,
|
||||
breadth,
|
||||
depth,
|
||||
onProgress: (progress) => {
|
||||
onProgress: progress => {
|
||||
output.updateProgress(progress);
|
||||
},
|
||||
});
|
||||
|
||||
log(`\n\nLearnings:\n\n${learnings.join('\n')}`);
|
||||
log(
|
||||
`\n\nVisited URLs (${visitedUrls.length}):\n\n${visitedUrls.join('\n')}`,
|
||||
);
|
||||
log(`\n\nVisited URLs (${visitedUrls.length}):\n\n${visitedUrls.join('\n')}`);
|
||||
log('Writing final report...');
|
||||
|
||||
const report = await writeFinalReport({
|
||||
prompt: combinedQuery,
|
||||
learnings,
|
||||
visitedUrls,
|
||||
});
|
||||
if (isReport) {
|
||||
const report = await writeFinalReport({
|
||||
prompt: combinedQuery,
|
||||
learnings,
|
||||
visitedUrls,
|
||||
});
|
||||
|
||||
// Save report to file
|
||||
await fs.writeFile('output.md', report, 'utf-8');
|
||||
await fs.writeFile('report.md', report, 'utf-8');
|
||||
console.log(`\n\nFinal Report:\n\n${report}`);
|
||||
console.log('\nReport has been saved to report.md');
|
||||
} else {
|
||||
const answer = await writeFinalAnswer({
|
||||
prompt: combinedQuery,
|
||||
learnings,
|
||||
});
|
||||
|
||||
await fs.writeFile('answer.md', answer, 'utf-8');
|
||||
console.log(`\n\nFinal Answer:\n\n${answer}`);
|
||||
console.log('\nAnswer has been saved to answer.md');
|
||||
}
|
||||
|
||||
console.log(`\n\nFinal Report:\n\n${report}`);
|
||||
console.log('\nReport has been saved to output.md');
|
||||
rl.close();
|
||||
|
||||
const answer = await writeFinalAnswer({
|
||||
prompt: combinedQuery,
|
||||
learnings,
|
||||
report,
|
||||
});
|
||||
|
||||
console.log(`\n\nFinal Answer:\n\n${answer}`);
|
||||
|
||||
await fs.writeFile('answer.md', answer, 'utf-8');
|
||||
}
|
||||
|
||||
run().catch(console.error);
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user