This commit is contained in:
2025-12-08 14:29:20 +08:00
parent bcef28cae3
commit 809bf4e373
5 changed files with 249 additions and 2581 deletions

View File

@@ -1,6 +1,6 @@
{
"name": "@kevisual/ai",
"version": "0.0.16",
"version": "0.0.17",
"description": "AI Center Services",
"main": "index.js",
"basename": "/root/ai-center-services",
@@ -53,40 +53,27 @@
}
},
"devDependencies": {
"@kevisual/code-center-module": "0.0.24",
"@kevisual/mark": "0.0.7",
"@kevisual/router": "0.0.33",
"@kevisual/router": "0.0.36",
"@kevisual/types": "^0.0.10",
"@kevisual/use-config": "^1.0.21",
"@types/bun": "^1.3.3",
"@types/bun": "^1.3.4",
"@types/crypto-js": "^4.2.2",
"@types/formidable": "^3.4.6",
"@types/lodash-es": "^4.17.12",
"@types/node": "^24.10.1",
"@vitejs/plugin-basic-ssl": "^2.1.0",
"cookie": "^1.1.1",
"cross-env": "^10.1.0",
"crypto-js": "^4.2.0",
"dayjs": "^1.11.19",
"dotenv": "^17.2.3",
"formidable": "^3.5.4",
"ioredis": "^5.8.2",
"json5": "^2.2.3",
"lodash-es": "^4.17.21",
"openai": "6.10.0",
"pm2": "^6.0.14",
"rimraf": "^6.1.2",
"rollup": "^4.53.3",
"rollup-plugin-dts": "^6.3.0",
"sequelize": "^6.37.7",
"tape": "^5.9.0",
"tiktoken": "^1.0.22",
"typescript": "^5.9.3",
"vite": "^7.2.6"
},
"dependencies": {
"@kevisual/logger": "^0.0.4",
"@kevisual/permission": "^0.0.3",
"@kevisual/query": "^0.0.30"
"@kevisual/query": "^0.0.31"
}
}

2739
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,14 @@
import { BaseChat, BaseChatOptions } from '../core/chat.ts';
export type KevisualOptions = Partial<BaseChatOptions>;
/**
* Kevisual Chat Adapter
*/
export class Kevisual extends BaseChat {
static BASE_URL = 'https://newapi.kevisual.cn/v1/';
constructor(options: KevisualOptions) {
const baseURL = options.baseURL || Kevisual.BASE_URL;
super({ ...(options as BaseChatOptions), baseURL: baseURL });
}
}

View File

@@ -10,6 +10,7 @@ import { ModelScope } from './chat-adapter/model-scope.ts';
import { BailianChat } from './chat-adapter/dashscope.ts';
import { Zhipu } from './chat-adapter/zhipu.ts';
import { Kimi } from './chat-adapter/kimi.ts';
import { Kevisual } from './chat-adapter/kevisual.ts';
import { ChatMessage } from './core/type.ts';
@@ -25,16 +26,19 @@ export {
Zhipu,
Kimi,
ChatMessage,
Kevisual,
}
export const OllamaProvider = Ollama;
export const SiliconFlowProvider = SiliconFlow;
export const CustomProvider = Custom;
export const VolcesProvider = Volces;
export const DeepSeekProvider = DeepSeek;
export const ModelScopeProvider = ModelScope;
export const BailianProvider = BailianChat;
export const ZhipuProvider = Zhipu;
export const KimiProvider = Kimi;
export class OllamaProvider extends Ollama { }
export class SiliconFlowProvider extends SiliconFlow { }
export class CustomProvider extends Custom { }
export class VolcesProvider extends Volces { }
export class DeepSeekProvider extends DeepSeek { }
export class ModelScopeProvider extends ModelScope { }
export class BailianProvider extends BailianChat { }
export class ZhipuProvider extends Zhipu { }
export class KimiProvider extends Kimi { }
export class KevisualProvider extends Kevisual { }
export const ChatProviderMap = {
Ollama: OllamaProvider,

View File

@@ -1,34 +0,0 @@
import { encoding_for_model, get_encoding } from 'tiktoken';
const MODEL_TO_ENCODING = {
'gpt-4': 'cl100k_base',
'gpt-4-turbo': 'cl100k_base',
'gpt-3.5-turbo': 'cl100k_base',
'text-embedding-ada-002': 'cl100k_base',
'text-davinci-002': 'p50k_base',
'text-davinci-003': 'p50k_base',
} as const;
export function numTokensFromString(text: string, model: keyof typeof MODEL_TO_ENCODING = 'gpt-3.5-turbo'): number {
try {
// 对于特定模型使用专门的编码器
const encoder = encoding_for_model(model);
const tokens = encoder.encode(text);
const tokenCount = tokens.length;
encoder.free(); // 释放编码器
return tokenCount;
} catch (error) {
try {
// 如果模型特定的编码器失败,尝试使用基础编码器
const encoder = get_encoding(MODEL_TO_ENCODING[model]);
const tokens = encoder.encode(text);
const tokenCount = tokens.length;
encoder.free(); // 释放编码器
return tokenCount;
} catch (error) {
// 如果编码失败使用一个粗略的估计平均每个字符0.25个token
return Math.ceil(text.length * 0.25);
}
}
}