feat: add agent

This commit is contained in:
2024-09-27 23:03:30 +08:00
parent cfdf2ba00d
commit 718322ae47
16 changed files with 446 additions and 43 deletions

View File

@@ -21,6 +21,7 @@
"@langchain/ollama": "^0.1.0",
"@langchain/openai": "^0.3.2",
"mongodb": "^6.9.0",
"nanoid": "^5.0.7",
"ws": "^8.18.0"
},
"devDependencies": {

View File

@@ -1,31 +0,0 @@
import { createReactAgent } from '@langchain/langgraph/prebuilt';
import { MemorySaver } from '@langchain/langgraph';
import { ChatOllama } from '@langchain/ollama';
import { ChatOpenAI } from '@langchain/openai';
import { checkpointer } from '../module/save.ts';
import { HumanMessage } from '@langchain/core/messages';
export { HumanMessage };
// const agentModel = new ChatOllama({ temperature: 0, model: 'llama3.1:8b', baseUrl: 'http://mz.zxj.im:11434' });
export const agentModelBakllava = new ChatOllama({ temperature: 0, model: 'bakllava:latest', baseUrl: 'http://mz.zxj.im:11434' });
export const agentModel = new ChatOllama({ temperature: 0, model: 'qwen2.5:14b', baseUrl: 'http://mz.zxj.im:11434' });
export const agentModelOpenAI = new ChatOpenAI(
{ temperature: 0, model: 'gpt-4o', apiKey: 'sk-GJE6I8OJWDr2ErFBD4C4706a65Ad4cD9B596Cf7c76943e45' },
{
baseURL: 'https://oneapi.on-ai.ai/v1',
},
);
const agentCheckpointer = checkpointer;
export const agent = createReactAgent({
llm: agentModel,
tools: [],
checkpointSaver: agentCheckpointer,
});
export const agentLlava = createReactAgent({
llm: agentModelBakllava,
tools: [],
checkpointSaver: agentCheckpointer,
});
export const agentOpenAI = createReactAgent({ llm: agentModelOpenAI, tools: [], checkpointSaver: agentCheckpointer });

View File

@@ -2,7 +2,6 @@ import { App } from '@abearxiong/router';
import { useConfig } from '@abearxiong/use-config';
const config = useConfig();
console.log('config in ai-lang', config);
export const app = new App({
serverOptions: {

View File

@@ -1 +1,4 @@
export * from './app.ts'
export * from './app.ts';
import './routes/agent.ts';
import { agentManger } from './module/agent.ts';
export { agentManger };

View File

@@ -0,0 +1,51 @@
import { AiAgent, AiAgentOpts } from './create-agent.ts';
export enum AgentMangerStatus {
init = 'i',
ready = 'r',
error = 'e',
}
export class AgentManger {
agents: AiAgent[] = [];
staus: AgentMangerStatus = AgentMangerStatus.init;
constructor() {}
addAgent(agent: AiAgent) {
this.agents.push(agent);
}
getAgent(id: string) {
const agent = this.agents.find((agent) => agent.id === id);
return agent;
}
removeAgent(id: string) {
this.agents = this.agents.filter((agent) => agent.id !== id);
}
createAgent(opts: AiAgentOpts) {
if (!opts.id) {
const agent = new AiAgent(opts);
this.addAgent(agent);
return agent;
}
const agent = this.agents.find((agent) => agent.id === opts.id);
if (!agent) {
const agent = new AiAgent(opts);
this.addAgent(agent);
return agent;
}
return agent;
}
/**
* 临时创建一个agent
* @param opts
* @returns
*/
newAgent(opts: AiAgentOpts) {
return new AiAgent(opts);
}
createAgentList(opts: AiAgentOpts[]) {
if (this.staus === AgentMangerStatus.init) {
return;
}
this.staus = AgentMangerStatus.ready;
return opts.map((opt) => this.createAgent(opt));
}
}
export const agentManger = new AgentManger();

View File

@@ -0,0 +1,142 @@
import { createReactAgent } from '@langchain/langgraph/prebuilt';
import { MemorySaver } from '@langchain/langgraph';
import { ChatOllama } from '@langchain/ollama';
import { ChatOpenAI } from '@langchain/openai';
import { client } from './mongo.ts';
import { MongoDBSaver } from '@langchain/langgraph-checkpoint-mongodb';
import { nanoid } from 'nanoid';
import { HumanMessage } from '@langchain/core/messages';
export { HumanMessage };
export const agentModelList = ['qwen2.5:14b', 'qwen2.5-coder:7b', 'llama3.1:8b', 'bakllava:latest', 'gpt-4o'] as const;
export type AiAgentModel = (typeof agentModelList)[number];
export type AiAgentCache = 'memory' | 'mongodb';
export type AiAgentOpts = {
id: string;
type: 'ollama' | 'openai';
model: AiAgentModel;
baseUrl: string;
apiKey?: string;
temperature?: number;
cache?: AiAgentCache;
cacheName?: string;
};
export type AiAgentStatus = 'ready' | 'loading' | 'error';
// export const CreateAgent = (opts: CreateAgentOptions) => {
// const;
// };
export class AiAgent {
agent: ReturnType<typeof createReactAgent>;
agentModel: ChatOllama | ChatOpenAI;
memorySaver: MemorySaver | MongoDBSaver;
id: string;
baseUrl: string;
type: 'ollama' | 'openai';
model: AiAgentModel;
apiKey: string;
temperature = 0;
cache?: AiAgentCache;
cacheName?: string;
status?: 'ready' | 'loading' | 'error';
constructor(opts?: AiAgentOpts) {
this.type = opts?.type || 'ollama';
this.baseUrl = opts?.baseUrl || 'http://localhost:11434';
this.model = opts?.model;
this.apiKey = opts?.apiKey;
this.temperature = opts?.temperature || 0;
this.cache = opts?.cache || 'mongodb';
this.cacheName = opts?.cacheName || 'checkpointer';
this.id = opts?.id || nanoid(8);
if (this.type === 'openai') {
if (!this.apiKey) {
throw new Error('apiKey is required for openai agent');
}
}
this.status = 'loading';
this.createAgent();
}
createAgent() {
this.createAgentModel();
this.createMemoerSaver();
if (this.status === 'error') {
return;
}
const agentModel = this.agentModel;
const memoerSaver = this.memorySaver;
this.agent = createReactAgent({
llm: agentModel,
tools: [],
checkpointSaver: memoerSaver,
});
this.status = 'ready';
}
createAgentModel() {
const type = this.type;
const model = this.model;
const temperature = this.temperature;
const apiKey = this.apiKey;
const baseUrl = this.baseUrl;
let agentModel;
try {
if (type === 'ollama') {
agentModel = new ChatOllama({ temperature, model, baseUrl });
} else if (type === 'openai') {
agentModel = new ChatOpenAI(
{ temperature, model, apiKey },
{
baseURL: baseUrl,
},
);
}
} catch (e) {
console.error('loading model error', e);
this.status = 'error';
return;
}
this.agentModel = agentModel;
return this;
}
createMemoerSaver() {
const cache = this.cache;
const cacheName = this.cacheName;
let memorySaver;
try {
if (cache === 'memory') {
memorySaver = new MemorySaver();
} else if (cache === 'mongodb') {
memorySaver = new MongoDBSaver({ client, dbName: cacheName });
}
} catch (e) {
console.error(e);
this.status = 'error';
return;
}
this.memorySaver = memorySaver;
}
sendHumanMessage(message: string, opts?: { thread_id: string }) {
const mesage = new HumanMessage(message);
return this.agent.invoke({ messages: [mesage] }, { configurable: { thread_id: 'test_human', ...opts } });
}
close() {
// 清除 memory saver
this.memorySaver = null;
this.agentModel = null;
this.agent = null;
}
async testQuery() {
const id = this.id;
try {
const agent = this.agent;
const message = new HumanMessage('你好');
const res = await agent.invoke({ messages: [message] }, { configurable: { thread_id: 'test_ping' } });
if (res) {
return res;
}
} catch (e) {
console.error(`test query [${id}]:`, e);
this.status = 'error';
}
}
}

View File

View File

@@ -2,3 +2,13 @@ import { MongoClient } from 'mongodb';
import { useConfig } from '@abearxiong/use-config';
const { mongo } = useConfig<{ host: string; password: string; username: string }>();
export const client = new MongoClient(`mongodb://${mongo.username}:${mongo.password}@${mongo.host}`, {});
// 当连接成功时,打印出连接成功的信息
client
.connect()
.then(() => {
console.log('mongo Connected successfully to server');
})
.catch((err) => {
console.error(err);
});

View File

@@ -1,4 +0,0 @@
import { MongoDBSaver } from '@langchain/langgraph-checkpoint-mongodb';
import { client } from './mongo.ts';
export const checkpointer = new MongoDBSaver({ client });

View File

@@ -1,10 +1,19 @@
import { CustomError } from '@abearxiong/router';
import { app } from '../app.ts';
import { agent, HumanMessage } from '../agent/index.ts';
// import { agent, HumanMessage } from '../agent/index.ts';
import { agentManger } from '../module/agent.ts';
app
.route('ai', 'chat')
.define(async (ctx) => {
const { message } = ctx.query;
const response = await agent.invoke({ messages: [new HumanMessage(message)] }, { configurable: { thread_id: '44' } });
ctx.body = response;
const { message, agentId, chatId } = ctx.query.data;
// const response = await agent.invoke({ messages: [new HumanMessage(message)] }, { configurable: { thread_id: '44' } });
// ctx.body = response;
//
const agent = agentManger.getAgent(agentId);
if (!agent) {
throw new CustomError('agent not found');
}
})
.addTo(app);
// app.router.parse({})