diff --git a/package.json b/package.json index 7fa9cff..f08a0f0 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@kevisual/query", - "version": "0.0.8", + "version": "0.0.9-alpha.1", "main": "dist/index.js", "module": "dist/index.js", "types": "dist/index.d.ts", @@ -25,11 +25,11 @@ "@rollup/plugin-node-resolve": "^16.0.0", "@rollup/plugin-typescript": "^12.1.2", "rollup": "^4.34.9", + "rollup-plugin-dts": "^6.1.1", "ts-node": "^10.9.2", "tslib": "^2.8.1", "typescript": "^5.8.2", - "zustand": "^5.0.3", - "rollup-plugin-dts": "^6.1.1" + "zustand": "^5.0.3" }, "packageManager": "yarn@1.22.22", "publishConfig": { @@ -53,5 +53,7 @@ "require": "./dist/query-ws.js" } }, - "dependencies": {} + "dependencies": { + "openai": "^4.86.1" + } } \ No newline at end of file diff --git a/rollup.config.js b/rollup.config.js index 92719f8..66b6295 100644 --- a/rollup.config.js +++ b/rollup.config.js @@ -81,4 +81,20 @@ export default [ }, plugins: [dts()], }, + { + input: 'src/query-ai.ts', + output: { + file: 'dist/query-ai.js', + format: 'es', + }, + plugins: [resolve(), typescript()], + }, + { + input: 'src/query-ai.ts', // TypeScript 入口文件 + output: { + file: 'dist/query-ai.d.ts', // 输出文件 + format: 'es', // 输出格式设置为 ES 模块 + }, + plugins: [dts()], + }, ]; diff --git a/src/query-ai.ts b/src/query-ai.ts new file mode 100644 index 0000000..e6d244f --- /dev/null +++ b/src/query-ai.ts @@ -0,0 +1,58 @@ +import OpenAI, { ClientOptions } from 'openai'; +import type { RequestOptions } from 'openai/core.mjs'; + +type QueryOpts = { + /** + * OpenAI model name, example: deepseek-chat + */ + model: string; + /** + * OpenAI client options + * QueryAi.init() will be called with these options + */ + openAiOpts?: ClientOptions; + openai?: OpenAI; +}; +export class QueryAI { + private openai: OpenAI; + model?: string; + constructor(opts?: QueryOpts) { + this.model = opts?.model; + if (opts?.openai) { + this.openai = opts.openai; + } else if (opts?.openAiOpts) { + this.init(opts?.openAiOpts); + } + } + init(opts: ClientOptions) { + this.openai = new OpenAI(opts); + } + async query(prompt: string, opts?: RequestOptions) { + return this.openai.chat.completions.create({ + model: this.model, + messages: [ + { + role: 'system', + content: prompt, + }, + ], + stream: false, + ...opts, + }); + } + async queryAsync(prompt: string, opts?: RequestOptions) { + return this.openai.chat.completions.create({ + model: this.model, + messages: [ + { + role: 'system', + content: prompt, + }, + ], + stream: true, + ...opts, + }); + } +} + +export { OpenAI };