This commit is contained in:
2026-01-10 16:26:20 +08:00
parent b4033e146e
commit 31cc0c42d8
9 changed files with 326 additions and 252 deletions

View File

@@ -0,0 +1,10 @@
import { BaseChat, type BaseChatOptions } from "../chat.ts";
type MimoOptions = Partial<BaseChatOptions>;
export class MimoChat extends BaseChat {
static BASE_URL = 'https://api.xiaomimimo.com/v1';
constructor(options: MimoOptions) {
const baseURL = options.baseURL || MimoChat.BASE_URL;
super({ ...(options as BaseChatOptions), baseURL: baseURL });
}
}

View File

@@ -25,11 +25,6 @@ export class Ollama extends BaseChat {
const baseURL = options.baseURL || Ollama.BASE_URL;
super({ ...(options as BaseChatOptions), baseURL: baseURL });
}
async chat(messages: ChatMessage[], options?: ChatMessageOptions) {
const res = await super.chat(messages, options);
console.log('thunk', this.getChatUsage());
return res;
}
/**
* 获取模型列表
* @returns

View File

@@ -32,8 +32,4 @@ export class SiliconFlow extends BaseChat {
async getUsageInfo(): Promise<SiliconFlowUsageResponse> {
return this.get('/user/info');
}
async chat(messages: ChatMessage[], options?: ChatMessageOptions) {
const res = await super.chat(messages, options);
return res;
}
}

View File

@@ -88,11 +88,17 @@ export class BaseChat implements BaseChatInterface, Usage {
/**
* 聊天
*/
async chat(messages: ChatMessage[], options?: ChatMessageOptions): Promise<ChatMessageComplete> {
chat(options: ChatMessageOptions): Promise<ChatMessageComplete>;
chat(messages: ChatMessage[], options?: ChatMessageOptions): Promise<ChatMessageComplete>;
async chat(messagesOrOptions: ChatMessage[] | ChatMessageOptions, options?: ChatMessageOptions): Promise<ChatMessageComplete> {
const isFirstParamOptions = !Array.isArray(messagesOrOptions);
const messages: ChatMessage[] = isFirstParamOptions ? messagesOrOptions.messages! : messagesOrOptions;
const opts: ChatMessageOptions = isFirstParamOptions ? messagesOrOptions : options || {};
const requestBody = {
model: this.model,
messages,
...options,
...opts,
stream: false,
};
@@ -110,15 +116,21 @@ export class BaseChat implements BaseChatInterface, Usage {
this.responseText = res.choices[0]?.message?.content || '';
return res;
}
async chatStream(messages: ChatMessage[], options?: ChatMessageOptions) {
if (options?.response_format) {
chatStream(options: ChatMessageOptions): AsyncGenerator<ChatMessageComplete>;
chatStream(messages: ChatMessage[], options?: ChatMessageOptions): AsyncGenerator<ChatMessageComplete>;
async *chatStream(messagesOrOptions: ChatMessage[] | ChatMessageOptions, options?: ChatMessageOptions) {
const isFirstParamOptions = !Array.isArray(messagesOrOptions);
const messages: ChatMessage[] = isFirstParamOptions ? messagesOrOptions.messages! : messagesOrOptions;
const opts: ChatMessageOptions = isFirstParamOptions ? messagesOrOptions : options || {};
if (opts.response_format) {
throw new Error('response_format is not supported in stream mode');
}
const requestBody = {
model: this.model,
messages,
...options,
...opts,
stream: true,
};

View File

@@ -2,8 +2,8 @@ export type ChatMessage = {
role?: 'user' | 'assistant' | 'system' | 'tool';
content: string;
}
export type ChatMessageOptions = {
messages: ChatMessage[];
export type ChatMessageOptions<T = {}> = {
messages?: ChatMessage[];
/**
* 模型名称
*/
@@ -43,7 +43,7 @@ export type ChatMessageOptions = {
stream?: boolean;
/**
* 是否能够思考
* 如果会话是千文,服务器的接口,默认为 true
* 如果会话是千文,服务器的接口,默认为 false
*/
enable_thinking?: boolean;
response_format?: 'text' | 'json' | 'xml' | 'html';
@@ -53,7 +53,9 @@ export type ChatMessageOptions = {
*/
tool_calls?: any;
};
[key: string]: any;
} & T;
type Choice = {
finish_reason: 'stop' | 'length' | 'tool_calls' | 'content_filter' | 'function_call';
@@ -167,6 +169,7 @@ export type EmbeddingMessageComplete = {
export interface BaseChatInterface {
chat(options: ChatMessageOptions): Promise<ChatMessageComplete>;
chat(messages: ChatMessage[], options?: ChatMessageOptions): Promise<ChatMessageComplete>;
}