init
This commit is contained in:
47
src/provider/chat-adapter/ollama.ts
Normal file
47
src/provider/chat-adapter/ollama.ts
Normal file
@@ -0,0 +1,47 @@
|
||||
import { BaseChat, BaseChatOptions } from '../core/index.ts';
|
||||
import type { ChatMessage, ChatMessageOptions } from '../core/index.ts';
|
||||
|
||||
export type OllamaOptions = Partial<BaseChatOptions>;
|
||||
|
||||
type OllamaModel = {
|
||||
name: string;
|
||||
model: string;
|
||||
modified_at: string;
|
||||
|
||||
size: number;
|
||||
digest: string;
|
||||
details: {
|
||||
parent_model: string;
|
||||
format: string; // example: gguf
|
||||
family: string; // example qwen
|
||||
families: string[];
|
||||
parameter_size: string;
|
||||
quantization_level: string; // example: Q4_K_M Q4_0
|
||||
};
|
||||
};
|
||||
export class Ollama extends BaseChat {
|
||||
static BASE_URL = 'http://localhost:11434/v1';
|
||||
constructor(options: OllamaOptions) {
|
||||
const baseURL = options.baseURL || Ollama.BASE_URL;
|
||||
super({ ...(options as BaseChatOptions), baseURL: baseURL });
|
||||
}
|
||||
async chat(messages: ChatMessage[], options?: ChatMessageOptions) {
|
||||
const res = await super.chat(messages, options);
|
||||
console.log('thunk', this.getChatUsage());
|
||||
return res;
|
||||
}
|
||||
/**
|
||||
* 获取模型列表
|
||||
* @returns
|
||||
*/
|
||||
async listModels(): Promise<{ models: OllamaModel[] }> {
|
||||
const _url = new URL(this.baseURL);
|
||||
const tagsURL = new URL('/api/tags', _url);
|
||||
return this.openai.get(tagsURL.toString());
|
||||
}
|
||||
async listRunModels(): Promise<{ models: OllamaModel[] }> {
|
||||
const _url = new URL(this.baseURL);
|
||||
const tagsURL = new URL('/api/ps', _url);
|
||||
return this.openai.get(tagsURL.toString());
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user