This commit is contained in:
2025-05-25 14:01:37 +08:00
commit 8f52a10ae0
42 changed files with 1946 additions and 0 deletions

143
src/provider/core/chat.ts Normal file
View File

@@ -0,0 +1,143 @@
import { OpenAI } from 'openai';
import type {
BaseChatInterface,
ChatMessageComplete,
ChatMessage,
ChatMessageOptions,
BaseChatUsageInterface,
ChatStream,
EmbeddingMessage,
EmbeddingMessageComplete,
} from './type.ts';
export type BaseChatOptions<T = Record<string, any>> = {
/**
* 默认baseURL
*/
baseURL: string;
/**
* 默认模型
*/
model?: string;
/**
* 默认apiKey
*/
apiKey: string;
/**
* 是否在浏览器中使用
*/
isBrowser?: boolean;
/**
* 是否流式输出, 默认 false
*/
stream?: boolean;
} & T;
export class BaseChat implements BaseChatInterface, BaseChatUsageInterface {
/**
* 默认baseURL
*/
baseURL: string;
/**
* 默认模型
*/
model: string;
/**
* 默认apiKey
*/
apiKey: string;
/**
* 是否在浏览器中使用
*/
isBrowser: boolean;
/**
* openai实例
*/
openai: OpenAI;
prompt_tokens: number;
total_tokens: number;
completion_tokens: number;
constructor(options: BaseChatOptions) {
this.baseURL = options.baseURL;
this.model = options.model;
this.apiKey = options.apiKey;
this.isBrowser = options.isBrowser ?? false;
this.openai = new OpenAI({
apiKey: this.apiKey,
baseURL: this.baseURL,
dangerouslyAllowBrowser: this.isBrowser,
});
}
/**
* 聊天
*/
async chat(messages: ChatMessage[], options?: ChatMessageOptions): Promise<ChatMessageComplete> {
const createParams: OpenAI.Chat.Completions.ChatCompletionCreateParams = {
model: this.model,
messages,
...options,
stream: false,
};
const res = (await this.openai.chat.completions.create(createParams)) as ChatMessageComplete;
this.prompt_tokens = res.usage?.prompt_tokens ?? 0;
this.total_tokens = res.usage?.total_tokens ?? 0;
this.completion_tokens = res.usage?.completion_tokens ?? 0;
return res;
}
async chatStream(messages: ChatMessage[], options?: ChatMessageOptions) {
const createParams: OpenAI.Chat.Completions.ChatCompletionCreateParams = {
model: this.model,
messages,
...options,
stream: true,
};
if (createParams.response_format) {
throw new Error('response_format is not supported in stream mode');
}
return this.openai.chat.completions.create(createParams) as unknown as ChatStream;
}
/**
* 测试
*/
test() {
return this.chat([{ role: 'user', content: 'Hello, world!' }]);
}
/**
* 获取聊天使用情况
* @returns
*/
getChatUsage() {
return {
prompt_tokens: this.prompt_tokens,
total_tokens: this.total_tokens,
completion_tokens: this.completion_tokens,
};
}
getHeaders(headers?: Record<string, string>) {
return {
'Content-Type': 'application/json',
Authorization: `Bearer ${this.apiKey}`,
...headers,
};
}
/**
* 生成embedding 内部
* @param text
* @returns
*/
async generateEmbeddingCore(text: string | string[], options?: EmbeddingMessage): Promise<EmbeddingMessageComplete> {
const embeddingModel = options?.model || this.model;
const res = await this.openai.embeddings.create({
model: embeddingModel,
input: text,
encoding_format: 'float',
...options,
});
this.prompt_tokens += res.usage.prompt_tokens;
this.total_tokens += res.usage.total_tokens;
return res;
}
}