Files
ai/src/provider/chat-adapter/ollama.ts
2026-01-10 16:26:20 +08:00

43 lines
1.2 KiB
TypeScript

import { BaseChat, BaseChatOptions } from '../core/index.ts';
import type { ChatMessage, ChatMessageOptions } from '../core/index.ts';
export type OllamaOptions = Partial<BaseChatOptions>;
type OllamaModel = {
name: string;
model: string;
modified_at: string;
size: number;
digest: string;
details: {
parent_model: string;
format: string; // example: gguf
family: string; // example qwen
families: string[];
parameter_size: string;
quantization_level: string; // example: Q4_K_M Q4_0
};
};
export class Ollama extends BaseChat {
static BASE_URL = 'http://localhost:11434/v1';
constructor(options: OllamaOptions) {
const baseURL = options.baseURL || Ollama.BASE_URL;
super({ ...(options as BaseChatOptions), baseURL: baseURL });
}
/**
* 获取模型列表
* @returns
*/
async listModels(): Promise<{ models: OllamaModel[] }> {
const _url = new URL(this.baseURL);
const tagsURL = new URL('/api/tags', _url);
return this.get(tagsURL.toString());
}
async listRunModels(): Promise<{ models: OllamaModel[] }> {
const _url = new URL(this.baseURL);
const tagsURL = new URL('/api/ps', _url);
return this.get(tagsURL.toString());
}
}