add query-ai
This commit is contained in:
parent
3a8396279b
commit
64aa095ffe
10
package.json
10
package.json
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@kevisual/query",
|
||||
"version": "0.0.8",
|
||||
"version": "0.0.9-alpha.1",
|
||||
"main": "dist/index.js",
|
||||
"module": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
@ -25,11 +25,11 @@
|
||||
"@rollup/plugin-node-resolve": "^16.0.0",
|
||||
"@rollup/plugin-typescript": "^12.1.2",
|
||||
"rollup": "^4.34.9",
|
||||
"rollup-plugin-dts": "^6.1.1",
|
||||
"ts-node": "^10.9.2",
|
||||
"tslib": "^2.8.1",
|
||||
"typescript": "^5.8.2",
|
||||
"zustand": "^5.0.3",
|
||||
"rollup-plugin-dts": "^6.1.1"
|
||||
"zustand": "^5.0.3"
|
||||
},
|
||||
"packageManager": "yarn@1.22.22",
|
||||
"publishConfig": {
|
||||
@ -53,5 +53,7 @@
|
||||
"require": "./dist/query-ws.js"
|
||||
}
|
||||
},
|
||||
"dependencies": {}
|
||||
"dependencies": {
|
||||
"openai": "^4.86.1"
|
||||
}
|
||||
}
|
@ -81,4 +81,20 @@ export default [
|
||||
},
|
||||
plugins: [dts()],
|
||||
},
|
||||
{
|
||||
input: 'src/query-ai.ts',
|
||||
output: {
|
||||
file: 'dist/query-ai.js',
|
||||
format: 'es',
|
||||
},
|
||||
plugins: [resolve(), typescript()],
|
||||
},
|
||||
{
|
||||
input: 'src/query-ai.ts', // TypeScript 入口文件
|
||||
output: {
|
||||
file: 'dist/query-ai.d.ts', // 输出文件
|
||||
format: 'es', // 输出格式设置为 ES 模块
|
||||
},
|
||||
plugins: [dts()],
|
||||
},
|
||||
];
|
||||
|
58
src/query-ai.ts
Normal file
58
src/query-ai.ts
Normal file
@ -0,0 +1,58 @@
|
||||
import OpenAI, { ClientOptions } from 'openai';
|
||||
import type { RequestOptions } from 'openai/core.mjs';
|
||||
|
||||
type QueryOpts = {
|
||||
/**
|
||||
* OpenAI model name, example: deepseek-chat
|
||||
*/
|
||||
model: string;
|
||||
/**
|
||||
* OpenAI client options
|
||||
* QueryAi.init() will be called with these options
|
||||
*/
|
||||
openAiOpts?: ClientOptions;
|
||||
openai?: OpenAI;
|
||||
};
|
||||
export class QueryAI {
|
||||
private openai: OpenAI;
|
||||
model?: string;
|
||||
constructor(opts?: QueryOpts) {
|
||||
this.model = opts?.model;
|
||||
if (opts?.openai) {
|
||||
this.openai = opts.openai;
|
||||
} else if (opts?.openAiOpts) {
|
||||
this.init(opts?.openAiOpts);
|
||||
}
|
||||
}
|
||||
init(opts: ClientOptions) {
|
||||
this.openai = new OpenAI(opts);
|
||||
}
|
||||
async query(prompt: string, opts?: RequestOptions) {
|
||||
return this.openai.chat.completions.create({
|
||||
model: this.model,
|
||||
messages: [
|
||||
{
|
||||
role: 'system',
|
||||
content: prompt,
|
||||
},
|
||||
],
|
||||
stream: false,
|
||||
...opts,
|
||||
});
|
||||
}
|
||||
async queryAsync(prompt: string, opts?: RequestOptions) {
|
||||
return this.openai.chat.completions.create({
|
||||
model: this.model,
|
||||
messages: [
|
||||
{
|
||||
role: 'system',
|
||||
content: prompt,
|
||||
},
|
||||
],
|
||||
stream: true,
|
||||
...opts,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export { OpenAI };
|
Loading…
x
Reference in New Issue
Block a user