add query-ai

This commit is contained in:
xion 2025-03-02 12:14:09 +08:00
parent 3a8396279b
commit 64aa095ffe
3 changed files with 80 additions and 4 deletions

View File

@ -1,6 +1,6 @@
{
"name": "@kevisual/query",
"version": "0.0.8",
"version": "0.0.9-alpha.1",
"main": "dist/index.js",
"module": "dist/index.js",
"types": "dist/index.d.ts",
@ -25,11 +25,11 @@
"@rollup/plugin-node-resolve": "^16.0.0",
"@rollup/plugin-typescript": "^12.1.2",
"rollup": "^4.34.9",
"rollup-plugin-dts": "^6.1.1",
"ts-node": "^10.9.2",
"tslib": "^2.8.1",
"typescript": "^5.8.2",
"zustand": "^5.0.3",
"rollup-plugin-dts": "^6.1.1"
"zustand": "^5.0.3"
},
"packageManager": "yarn@1.22.22",
"publishConfig": {
@ -53,5 +53,7 @@
"require": "./dist/query-ws.js"
}
},
"dependencies": {}
"dependencies": {
"openai": "^4.86.1"
}
}

View File

@ -81,4 +81,20 @@ export default [
},
plugins: [dts()],
},
{
input: 'src/query-ai.ts',
output: {
file: 'dist/query-ai.js',
format: 'es',
},
plugins: [resolve(), typescript()],
},
{
input: 'src/query-ai.ts', // TypeScript 入口文件
output: {
file: 'dist/query-ai.d.ts', // 输出文件
format: 'es', // 输出格式设置为 ES 模块
},
plugins: [dts()],
},
];

58
src/query-ai.ts Normal file
View File

@ -0,0 +1,58 @@
import OpenAI, { ClientOptions } from 'openai';
import type { RequestOptions } from 'openai/core.mjs';
type QueryOpts = {
/**
* OpenAI model name, example: deepseek-chat
*/
model: string;
/**
* OpenAI client options
* QueryAi.init() will be called with these options
*/
openAiOpts?: ClientOptions;
openai?: OpenAI;
};
export class QueryAI {
private openai: OpenAI;
model?: string;
constructor(opts?: QueryOpts) {
this.model = opts?.model;
if (opts?.openai) {
this.openai = opts.openai;
} else if (opts?.openAiOpts) {
this.init(opts?.openAiOpts);
}
}
init(opts: ClientOptions) {
this.openai = new OpenAI(opts);
}
async query(prompt: string, opts?: RequestOptions) {
return this.openai.chat.completions.create({
model: this.model,
messages: [
{
role: 'system',
content: prompt,
},
],
stream: false,
...opts,
});
}
async queryAsync(prompt: string, opts?: RequestOptions) {
return this.openai.chat.completions.create({
model: this.model,
messages: [
{
role: 'system',
content: prompt,
},
],
stream: true,
...opts,
});
}
}
export { OpenAI };