feat: add agent

This commit is contained in:
xion 2024-09-27 23:03:30 +08:00
parent cfdf2ba00d
commit 718322ae47
16 changed files with 446 additions and 43 deletions

View File

@ -21,6 +21,7 @@
"@langchain/ollama": "^0.1.0", "@langchain/ollama": "^0.1.0",
"@langchain/openai": "^0.3.2", "@langchain/openai": "^0.3.2",
"mongodb": "^6.9.0", "mongodb": "^6.9.0",
"nanoid": "^5.0.7",
"ws": "^8.18.0" "ws": "^8.18.0"
}, },
"devDependencies": { "devDependencies": {

View File

@ -1,31 +0,0 @@
import { createReactAgent } from '@langchain/langgraph/prebuilt';
import { MemorySaver } from '@langchain/langgraph';
import { ChatOllama } from '@langchain/ollama';
import { ChatOpenAI } from '@langchain/openai';
import { checkpointer } from '../module/save.ts';
import { HumanMessage } from '@langchain/core/messages';
export { HumanMessage };
// const agentModel = new ChatOllama({ temperature: 0, model: 'llama3.1:8b', baseUrl: 'http://mz.zxj.im:11434' });
export const agentModelBakllava = new ChatOllama({ temperature: 0, model: 'bakllava:latest', baseUrl: 'http://mz.zxj.im:11434' });
export const agentModel = new ChatOllama({ temperature: 0, model: 'qwen2.5:14b', baseUrl: 'http://mz.zxj.im:11434' });
export const agentModelOpenAI = new ChatOpenAI(
{ temperature: 0, model: 'gpt-4o', apiKey: 'sk-GJE6I8OJWDr2ErFBD4C4706a65Ad4cD9B596Cf7c76943e45' },
{
baseURL: 'https://oneapi.on-ai.ai/v1',
},
);
const agentCheckpointer = checkpointer;
export const agent = createReactAgent({
llm: agentModel,
tools: [],
checkpointSaver: agentCheckpointer,
});
export const agentLlava = createReactAgent({
llm: agentModelBakllava,
tools: [],
checkpointSaver: agentCheckpointer,
});
export const agentOpenAI = createReactAgent({ llm: agentModelOpenAI, tools: [], checkpointSaver: agentCheckpointer });

View File

@ -2,7 +2,6 @@ import { App } from '@abearxiong/router';
import { useConfig } from '@abearxiong/use-config'; import { useConfig } from '@abearxiong/use-config';
const config = useConfig(); const config = useConfig();
console.log('config in ai-lang', config);
export const app = new App({ export const app = new App({
serverOptions: { serverOptions: {

View File

@ -1 +1,4 @@
export * from './app.ts' export * from './app.ts';
import './routes/agent.ts';
import { agentManger } from './module/agent.ts';
export { agentManger };

View File

@ -0,0 +1,51 @@
import { AiAgent, AiAgentOpts } from './create-agent.ts';
export enum AgentMangerStatus {
init = 'i',
ready = 'r',
error = 'e',
}
export class AgentManger {
agents: AiAgent[] = [];
staus: AgentMangerStatus = AgentMangerStatus.init;
constructor() {}
addAgent(agent: AiAgent) {
this.agents.push(agent);
}
getAgent(id: string) {
const agent = this.agents.find((agent) => agent.id === id);
return agent;
}
removeAgent(id: string) {
this.agents = this.agents.filter((agent) => agent.id !== id);
}
createAgent(opts: AiAgentOpts) {
if (!opts.id) {
const agent = new AiAgent(opts);
this.addAgent(agent);
return agent;
}
const agent = this.agents.find((agent) => agent.id === opts.id);
if (!agent) {
const agent = new AiAgent(opts);
this.addAgent(agent);
return agent;
}
return agent;
}
/**
* agent
* @param opts
* @returns
*/
newAgent(opts: AiAgentOpts) {
return new AiAgent(opts);
}
createAgentList(opts: AiAgentOpts[]) {
if (this.staus === AgentMangerStatus.init) {
return;
}
this.staus = AgentMangerStatus.ready;
return opts.map((opt) => this.createAgent(opt));
}
}
export const agentManger = new AgentManger();

View File

@ -0,0 +1,142 @@
import { createReactAgent } from '@langchain/langgraph/prebuilt';
import { MemorySaver } from '@langchain/langgraph';
import { ChatOllama } from '@langchain/ollama';
import { ChatOpenAI } from '@langchain/openai';
import { client } from './mongo.ts';
import { MongoDBSaver } from '@langchain/langgraph-checkpoint-mongodb';
import { nanoid } from 'nanoid';
import { HumanMessage } from '@langchain/core/messages';
export { HumanMessage };
export const agentModelList = ['qwen2.5:14b', 'qwen2.5-coder:7b', 'llama3.1:8b', 'bakllava:latest', 'gpt-4o'] as const;
export type AiAgentModel = (typeof agentModelList)[number];
export type AiAgentCache = 'memory' | 'mongodb';
export type AiAgentOpts = {
id: string;
type: 'ollama' | 'openai';
model: AiAgentModel;
baseUrl: string;
apiKey?: string;
temperature?: number;
cache?: AiAgentCache;
cacheName?: string;
};
export type AiAgentStatus = 'ready' | 'loading' | 'error';
// export const CreateAgent = (opts: CreateAgentOptions) => {
// const;
// };
export class AiAgent {
agent: ReturnType<typeof createReactAgent>;
agentModel: ChatOllama | ChatOpenAI;
memorySaver: MemorySaver | MongoDBSaver;
id: string;
baseUrl: string;
type: 'ollama' | 'openai';
model: AiAgentModel;
apiKey: string;
temperature = 0;
cache?: AiAgentCache;
cacheName?: string;
status?: 'ready' | 'loading' | 'error';
constructor(opts?: AiAgentOpts) {
this.type = opts?.type || 'ollama';
this.baseUrl = opts?.baseUrl || 'http://localhost:11434';
this.model = opts?.model;
this.apiKey = opts?.apiKey;
this.temperature = opts?.temperature || 0;
this.cache = opts?.cache || 'mongodb';
this.cacheName = opts?.cacheName || 'checkpointer';
this.id = opts?.id || nanoid(8);
if (this.type === 'openai') {
if (!this.apiKey) {
throw new Error('apiKey is required for openai agent');
}
}
this.status = 'loading';
this.createAgent();
}
createAgent() {
this.createAgentModel();
this.createMemoerSaver();
if (this.status === 'error') {
return;
}
const agentModel = this.agentModel;
const memoerSaver = this.memorySaver;
this.agent = createReactAgent({
llm: agentModel,
tools: [],
checkpointSaver: memoerSaver,
});
this.status = 'ready';
}
createAgentModel() {
const type = this.type;
const model = this.model;
const temperature = this.temperature;
const apiKey = this.apiKey;
const baseUrl = this.baseUrl;
let agentModel;
try {
if (type === 'ollama') {
agentModel = new ChatOllama({ temperature, model, baseUrl });
} else if (type === 'openai') {
agentModel = new ChatOpenAI(
{ temperature, model, apiKey },
{
baseURL: baseUrl,
},
);
}
} catch (e) {
console.error('loading model error', e);
this.status = 'error';
return;
}
this.agentModel = agentModel;
return this;
}
createMemoerSaver() {
const cache = this.cache;
const cacheName = this.cacheName;
let memorySaver;
try {
if (cache === 'memory') {
memorySaver = new MemorySaver();
} else if (cache === 'mongodb') {
memorySaver = new MongoDBSaver({ client, dbName: cacheName });
}
} catch (e) {
console.error(e);
this.status = 'error';
return;
}
this.memorySaver = memorySaver;
}
sendHumanMessage(message: string, opts?: { thread_id: string }) {
const mesage = new HumanMessage(message);
return this.agent.invoke({ messages: [mesage] }, { configurable: { thread_id: 'test_human', ...opts } });
}
close() {
// 清除 memory saver
this.memorySaver = null;
this.agentModel = null;
this.agent = null;
}
async testQuery() {
const id = this.id;
try {
const agent = this.agent;
const message = new HumanMessage('你好');
const res = await agent.invoke({ messages: [message] }, { configurable: { thread_id: 'test_ping' } });
if (res) {
return res;
}
} catch (e) {
console.error(`test query [${id}]:`, e);
this.status = 'error';
}
}
}

View File

View File

@ -2,3 +2,13 @@ import { MongoClient } from 'mongodb';
import { useConfig } from '@abearxiong/use-config'; import { useConfig } from '@abearxiong/use-config';
const { mongo } = useConfig<{ host: string; password: string; username: string }>(); const { mongo } = useConfig<{ host: string; password: string; username: string }>();
export const client = new MongoClient(`mongodb://${mongo.username}:${mongo.password}@${mongo.host}`, {}); export const client = new MongoClient(`mongodb://${mongo.username}:${mongo.password}@${mongo.host}`, {});
// 当连接成功时,打印出连接成功的信息
client
.connect()
.then(() => {
console.log('mongo Connected successfully to server');
})
.catch((err) => {
console.error(err);
});

View File

@ -1,4 +0,0 @@
import { MongoDBSaver } from '@langchain/langgraph-checkpoint-mongodb';
import { client } from './mongo.ts';
export const checkpointer = new MongoDBSaver({ client });

View File

@ -1,10 +1,19 @@
import { CustomError } from '@abearxiong/router';
import { app } from '../app.ts'; import { app } from '../app.ts';
import { agent, HumanMessage } from '../agent/index.ts'; // import { agent, HumanMessage } from '../agent/index.ts';
import { agentManger } from '../module/agent.ts';
app app
.route('ai', 'chat') .route('ai', 'chat')
.define(async (ctx) => { .define(async (ctx) => {
const { message } = ctx.query; const { message, agentId, chatId } = ctx.query.data;
const response = await agent.invoke({ messages: [new HumanMessage(message)] }, { configurable: { thread_id: '44' } }); // const response = await agent.invoke({ messages: [new HumanMessage(message)] }, { configurable: { thread_id: '44' } });
ctx.body = response; // ctx.body = response;
//
const agent = agentManger.getAgent(agentId);
if (!agent) {
throw new CustomError('agent not found');
}
}) })
.addTo(app); .addTo(app);
// app.router.parse({})

3
pnpm-lock.yaml generated
View File

@ -191,6 +191,9 @@ importers:
mongodb: mongodb:
specifier: ^6.9.0 specifier: ^6.9.0
version: 6.9.0 version: 6.9.0
nanoid:
specifier: ^5.0.7
version: 5.0.7
ws: ws:
specifier: ^8.18.0 specifier: ^8.18.0
version: 8.18.0 version: 8.18.0

View File

@ -7,5 +7,5 @@ import { app as aiApp } from '@kevisual/ai-lang/src/index.ts';
export { aiApp }; export { aiApp };
export { app }; export { app };
app.listen(config.port, () => { app.listen(config.port, () => {
console.log(`server is running at http://localhost:${config.port}`); console.log(`server2 is running at http://localhost:${config.port}`);
}); });

85
src/models/agent.ts Normal file
View File

@ -0,0 +1,85 @@
import { sequelize } from '@/modules/sequelize.ts';
import { DataTypes, Model } from 'sequelize';
export class AiAgent extends Model {
id: string;
type: string;
model: string;
baseUrl: string;
apiKey: string;
temperature: number;
cache: string;
cacheName: string;
status: string;
data: any;
key: string;
}
// 获取AIAgent的属性
export type AiProperties = {
id: string;
type: string;
model: string;
baseUrl: string;
apiKey?: string;
temperature?: number;
cache?: string;
cacheName?: string;
data?: any;
};
AiAgent.init(
{
id: {
type: DataTypes.UUID,
primaryKey: true,
defaultValue: DataTypes.UUIDV4,
},
type: {
type: DataTypes.STRING,
allowNull: false,
},
status: {
type: DataTypes.STRING,
defaultValue: 'open',
},
model: {
type: DataTypes.STRING,
allowNull: false,
},
baseUrl: {
type: DataTypes.STRING,
allowNull: false,
},
apiKey: {
type: DataTypes.STRING,
allowNull: false,
},
key: {
type: DataTypes.STRING,
},
temperature: {
type: DataTypes.FLOAT,
allowNull: true,
},
cache: {
type: DataTypes.STRING,
allowNull: true,
},
cacheName: {
type: DataTypes.STRING,
allowNull: true,
},
data: {
type: DataTypes.JSON,
allowNull: true,
defaultValue: {},
},
},
{
sequelize,
tableName: 'ai_agent',
},
);
AiAgent.sync({ alter: true, logging: false }).catch((e) => {
console.error('AiAgent sync error', e);
});

View File

@ -0,0 +1 @@
import './list.ts';

132
src/routes/agent/list.ts Normal file
View File

@ -0,0 +1,132 @@
import { app } from '@/app.ts';
import { AiAgent, AiProperties } from '@/models/agent.ts';
import { CustomError } from '@abearxiong/router';
import { agentManger } from '@kevisual/ai-lang';
import { v4 } from 'uuid';
app
.route('agent', 'list')
.define(async (ctx) => {
const agents = await AiAgent.findAll({
order: [['updatedAt', 'DESC']],
// 返回的内容不包含apiKey的字段
attributes: { exclude: ['apiKey'] },
});
ctx.body = agents;
})
.addTo(app);
app
.route('agent', 'get')
.define(async (ctx) => {
const id = ctx.query.id;
if (!id) {
throw new CustomError('id is required');
}
ctx.body = await AiAgent.findByPk(id, {
attributes: { exclude: ['apiKey'] },
});
return ctx;
})
.addTo(app);
app
.route('agent', 'update')
.define(async (ctx) => {
const { id, ...rest } = ctx.query.data;
let agent = await AiAgent.findByPk(id);
if (!agent) {
agent = await AiAgent.create(rest);
ctx.body = agent;
return ctx;
}
await agent.update(rest);
ctx.body = agent;
return ctx;
})
.addTo(app);
app
.route('agent', 'delete')
.define(async (ctx) => {
const id = ctx.query.id;
if (!id) {
throw new CustomError('id is required');
}
const agent = await AiAgent.findByPk(id);
if (!agent) {
throw new CustomError('agent not found');
}
await agent.destroy();
ctx.body = agent;
return ctx;
})
.addTo(app);
app
.route('agent', 'test')
.define(async (ctx) => {
const { message } = ctx.query;
const data: AiProperties = {
type: 'ollama',
id: 'test',
model: 'qwen2.5:14b',
baseUrl: 'http://mz.zxj.im:11434',
cache: 'memory',
};
const agent = agentManger.createAgent(data as any);
const res = await agent.sendHumanMessage(message);
// agent.close();
agentManger.removeAgent(agent.id);
ctx.body = res;
return ctx;
})
.addTo(app);
export const agentModelList = ['qwen2.5:14b', 'qwen2.5-coder:7b', 'llama3.1:8b', 'bakllava:latest'] as const;
export const openAiModels = ['gpt-4o'];
const demoData: AiProperties[] = [
{
id: v4(),
type: 'openai',
model: 'gpt-4o',
baseUrl: 'https://oneapi.on-ai.ai/v1',
apiKey: 'sk-GJE6I8OJWDr2ErFBD4C4706a65Ad4cD9B596Cf7c76943e45',
},
...agentModelList.map((item) => {
return {
id: v4(),
type: 'ollama',
model: item,
baseUrl: 'http://mz.zxj.im:11434',
apiKey: 'sk-GJE6I8OJWDr2ErFBD4C4706a65Ad4cD9B596Cf7c76943e45',
};
}),
];
// AiAgent.bulkCreate(demoData, { ignoreDuplicates: true }).then(() => {
// console.log('create demo data success');
// });
const initManager = async () => {
// const list = await AiAgent.findAll();
const list = await AiAgent.findAll({
where: {
status: 'open',
},
});
const data = list.map((item) => {
return {
id: item.id,
type: item.type as any,
model: item.model as any,
baseUrl: item.baseUrl,
apiKey: item.apiKey,
temperature: item.temperature,
cache: item.cache as any,
cacheName: item.cacheName,
};
});
agentManger.createAgentList(data);
};
setTimeout(() => {
initManager();
}, 1000);

View File

@ -5,3 +5,5 @@ import './page/index.ts';
import './resource/index.ts'; import './resource/index.ts';
import './prompt-graph/index.ts'; import './prompt-graph/index.ts';
import './agent/index.ts';