/**
 * Minified by jsDelivr using Terser v5.39.0.
 * Original file: /npm/gpt-repo@0.0.7/dist/index.js
 *
 * Do NOT use SRI with dynamically generated files! More information: https://www.jsdelivr.com/using-sri-with-dynamic-files
 */
"use strict";Object.defineProperty(exports,"__esModule",{value:!0});const tslib_1=require("tslib"),fs=tslib_1.__importStar(require("fs")),path=tslib_1.__importStar(require("path")),readline=tslib_1.__importStar(require("readline")),util_1=require("util"),openai_1=tslib_1.__importDefault(require("openai")),dotenv=tslib_1.__importStar(require("dotenv"));dotenv.config();const PROMPT="Summarize the following code file. Format your response as a bullet-pointed list with no extra whitespace, designed for consumption by another gpt model. Be succint, concise, and comprehensive. Include correct symbol names in your response, so responses from multiple files can refer to the same symbols. Describe all functionality of each symbol, such as naming each method and it's arguments or signature. Come up with a concise notation, keep in mind your response will be parsed by a gpt model.Include a header at the top of the name of the file, and other metadata you find important. Include full specification of data types for all functions, events, event handlers, etc. Indicate what is exported and imported, leaving nothing out. Describe the libraries and technological components used to implement each piece of functionality, such as specifying a service is a moleculer service.",readdir=(0,util_1.promisify)(fs.readdir),readFile=(0,util_1.promisify)(fs.readFile),writeFile=(0,util_1.promisify)(fs.writeFile),access=(0,util_1.promisify)(fs.access),rl=readline.createInterface({input:process.stdin,output:process.stdout}),client=new openai_1.default({apiKey:process.env.OPENAI_API_KEY}),question=e=>new Promise((t=>rl.question(e,t)));async function getTsFiles(e){const t=await readdir(e);return(await Promise.all(t.map((async t=>{const i=path.resolve(e,t);return(await fs.promises.stat(i)).isDirectory()?getTsFiles(i):i})))).flat().filter((e=>e.endsWith(".ts")))}async function processFiles(e){for(const t of e){const e=path.join("data",`a-${path.basename(t)}.response`);try{await access(e),console.log(`Skipping ${t}, already processed.`);continue}catch{}const i=await readFile(t,"utf-8"),s=`${PROMPT}Path: ${t}\n\`\`\`${i}\`\`\``;console.log(`Completing ${t}...`);const o=await client.chat.completions.create({messages:[{role:"user",content:s}],model:"gpt-3.5-turbo"});await writeFile(e,o.choices[0].message.content??""),console.log(`Processed and saved response for ${t}`)}}async function main(){const e=await question("Enter the path to the directory: ");rl.close();const t=await getTsFiles(e);console.log(`Found ${t.length} TypeScript files.`),await processFiles(t)}main().catch(console.error);
//# sourceMappingURL=/sm/1c745f94b52d1ed2d9ba9d0e31aca3fb8e347547fb7b9d51d51df3bfd9d94d82.map