init
This commit is contained in:
14
src/provider/chat-adapter/custom.ts
Normal file
14
src/provider/chat-adapter/custom.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import { BaseChat, BaseChatOptions } from '../core/chat.ts';
|
||||
|
||||
export type OllamaOptions = BaseChatOptions;
|
||||
|
||||
/**
|
||||
* 自定义模型
|
||||
*/
|
||||
export class Custom extends BaseChat {
|
||||
static BASE_URL = 'https://api.deepseek.com/v1/';
|
||||
constructor(options: OllamaOptions) {
|
||||
const baseURL = options.baseURL || Custom.BASE_URL;
|
||||
super({ ...(options as BaseChatOptions), baseURL: baseURL });
|
||||
}
|
||||
}
|
||||
10
src/provider/chat-adapter/deepseek.ts
Normal file
10
src/provider/chat-adapter/deepseek.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { BaseChat, BaseChatOptions } from '../core/chat.ts';
|
||||
|
||||
export type DeepSeekOptions = Partial<BaseChatOptions>;
|
||||
export class DeepSeek extends BaseChat {
|
||||
static BASE_URL = 'https://api.deepseek.com/v1/';
|
||||
constructor(options: DeepSeekOptions) {
|
||||
const baseURL = options.baseURL || DeepSeek.BASE_URL;
|
||||
super({ ...(options as BaseChatOptions), baseURL: baseURL });
|
||||
}
|
||||
}
|
||||
11
src/provider/chat-adapter/model-scope.ts
Normal file
11
src/provider/chat-adapter/model-scope.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
// https://api-inference.modelscope.cn/v1/
|
||||
import { BaseChat, BaseChatOptions } from '../core/chat.ts';
|
||||
|
||||
export type ModelScopeOptions = Partial<BaseChatOptions>;
|
||||
export class ModelScope extends BaseChat {
|
||||
static BASE_URL = 'https://api-inference.modelscope.cn/v1/';
|
||||
constructor(options: ModelScopeOptions) {
|
||||
const baseURL = options.baseURL || ModelScope.BASE_URL;
|
||||
super({ ...options, baseURL: baseURL } as any);
|
||||
}
|
||||
}
|
||||
47
src/provider/chat-adapter/ollama.ts
Normal file
47
src/provider/chat-adapter/ollama.ts
Normal file
@@ -0,0 +1,47 @@
|
||||
import { BaseChat, BaseChatOptions } from '../core/index.ts';
|
||||
import type { ChatMessage, ChatMessageOptions } from '../core/index.ts';
|
||||
|
||||
export type OllamaOptions = Partial<BaseChatOptions>;
|
||||
|
||||
type OllamaModel = {
|
||||
name: string;
|
||||
model: string;
|
||||
modified_at: string;
|
||||
|
||||
size: number;
|
||||
digest: string;
|
||||
details: {
|
||||
parent_model: string;
|
||||
format: string; // example: gguf
|
||||
family: string; // example qwen
|
||||
families: string[];
|
||||
parameter_size: string;
|
||||
quantization_level: string; // example: Q4_K_M Q4_0
|
||||
};
|
||||
};
|
||||
export class Ollama extends BaseChat {
|
||||
static BASE_URL = 'http://localhost:11434/v1';
|
||||
constructor(options: OllamaOptions) {
|
||||
const baseURL = options.baseURL || Ollama.BASE_URL;
|
||||
super({ ...(options as BaseChatOptions), baseURL: baseURL });
|
||||
}
|
||||
async chat(messages: ChatMessage[], options?: ChatMessageOptions) {
|
||||
const res = await super.chat(messages, options);
|
||||
console.log('thunk', this.getChatUsage());
|
||||
return res;
|
||||
}
|
||||
/**
|
||||
* 获取模型列表
|
||||
* @returns
|
||||
*/
|
||||
async listModels(): Promise<{ models: OllamaModel[] }> {
|
||||
const _url = new URL(this.baseURL);
|
||||
const tagsURL = new URL('/api/tags', _url);
|
||||
return this.openai.get(tagsURL.toString());
|
||||
}
|
||||
async listRunModels(): Promise<{ models: OllamaModel[] }> {
|
||||
const _url = new URL(this.baseURL);
|
||||
const tagsURL = new URL('/api/ps', _url);
|
||||
return this.openai.get(tagsURL.toString());
|
||||
}
|
||||
}
|
||||
39
src/provider/chat-adapter/siliconflow.ts
Normal file
39
src/provider/chat-adapter/siliconflow.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
import { BaseChat, BaseChatOptions } from '../core/chat.ts';
|
||||
import { OpenAI } from 'openai';
|
||||
|
||||
export type SiliconFlowOptions = Partial<BaseChatOptions>;
|
||||
|
||||
type SiliconFlowUsageData = {
|
||||
id: string;
|
||||
name: string;
|
||||
image: string;
|
||||
email: string;
|
||||
isAdmin: boolean;
|
||||
balance: string;
|
||||
status: 'normal' | 'suspended' | 'expired' | string; // 状态
|
||||
introduce: string;
|
||||
role: string;
|
||||
chargeBalance: string;
|
||||
totalBalance: string;
|
||||
category: string;
|
||||
};
|
||||
type SiliconFlowUsageResponse = {
|
||||
code: number;
|
||||
message: string;
|
||||
status: boolean;
|
||||
data: SiliconFlowUsageData;
|
||||
};
|
||||
export class SiliconFlow extends BaseChat {
|
||||
static BASE_URL = 'https://api.siliconflow.cn/v1';
|
||||
constructor(options: SiliconFlowOptions) {
|
||||
const baseURL = options.baseURL || SiliconFlow.BASE_URL;
|
||||
super({ ...(options as BaseChatOptions), baseURL: baseURL });
|
||||
}
|
||||
async getUsageInfo(): Promise<SiliconFlowUsageResponse> {
|
||||
return this.openai.get('/user/info');
|
||||
}
|
||||
async chat(messages: OpenAI.Chat.Completions.ChatCompletionMessageParam[], options?: Partial<OpenAI.Chat.Completions.ChatCompletionCreateParams>) {
|
||||
const res = await super.chat(messages, options);
|
||||
return res;
|
||||
}
|
||||
}
|
||||
10
src/provider/chat-adapter/volces.ts
Normal file
10
src/provider/chat-adapter/volces.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { BaseChat, BaseChatOptions } from '../core/chat.ts';
|
||||
|
||||
export type VolcesOptions = Partial<BaseChatOptions>;
|
||||
export class Volces extends BaseChat {
|
||||
static BASE_URL = 'https://ark.cn-beijing.volces.com/api/v3/';
|
||||
constructor(options: VolcesOptions) {
|
||||
const baseURL = options.baseURL || Volces.BASE_URL;
|
||||
super({ ...(options as BaseChatOptions), baseURL: baseURL });
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user