117 lines
3.2 KiB
TypeScript
117 lines
3.2 KiB
TypeScript
import { SiliconFlow } from '../../provider/chat-adapter/siliconflow.ts';
|
|
import { Ollama } from '../../provider/chat-adapter/ollama.ts';
|
|
import dotenv from 'dotenv';
|
|
|
|
dotenv.config();
|
|
const siliconflow = new SiliconFlow({
|
|
apiKey: process.env.SILICONFLOW_API_KEY,
|
|
model: 'Qwen/Qwen3-14B',
|
|
});
|
|
const ollama = new Ollama({
|
|
model: 'qwen3:32b',
|
|
apiKey: process.env.OLLAMA_API_KEY,
|
|
baseURL: process.env.OLLAMA_BASE_URL,
|
|
});
|
|
const main = async () => {
|
|
const usage = await siliconflow.getUsageInfo();
|
|
console.log(usage);
|
|
};
|
|
// 1. 定义工具函数
|
|
const availableFunctions: Record<string, (args: any) => Promise<any>> = {
|
|
get_time: async (args: { location: string }) => {
|
|
// 模拟API调用
|
|
console.log('time', args);
|
|
return {
|
|
time: '2022-03-22 12:00:00',
|
|
};
|
|
},
|
|
get_location: async (args: { symbol: string }) => {
|
|
// 模拟API调用
|
|
console.log('location', args);
|
|
return {
|
|
city: 'Beijing',
|
|
};
|
|
},
|
|
};
|
|
|
|
// main();
|
|
const funcCall = async (model = siliconflow) => {
|
|
const tools = [
|
|
{
|
|
type: 'function',
|
|
function: {
|
|
name: 'get_time',
|
|
description: '获取当前时间',
|
|
parameters: {
|
|
type: 'object',
|
|
properties: {
|
|
place: {
|
|
type: 'string',
|
|
description: '位置',
|
|
},
|
|
},
|
|
required: ['place'],
|
|
},
|
|
},
|
|
},
|
|
{
|
|
type: 'function',
|
|
function: {
|
|
name: 'get_location',
|
|
description: '获取当前位置',
|
|
// parameters: {},
|
|
parameters: {},
|
|
strict: false,
|
|
},
|
|
},
|
|
];
|
|
const messages: any[] = [{ role: 'user', content: '获取当前位置的当前时间' }];
|
|
const res = await model.chat(messages, {
|
|
tools: tools as any,
|
|
});
|
|
console.log(res.choices[0]);
|
|
const assistantMessage = res.choices[0].message;
|
|
const finish_reason = res.choices[0].finish_reason;
|
|
messages.push(assistantMessage);
|
|
let toolCalls = assistantMessage.tool_calls;
|
|
console.log("toolCalls", JSON.stringify(toolCalls));
|
|
let maxRetries = 3;
|
|
while (toolCalls && toolCalls.length > 0) {
|
|
// 处理每个函数调用
|
|
for (const toolCall of toolCalls) {
|
|
const functionName = toolCall.function.name;
|
|
const functionArgs = JSON.parse(toolCall.function.arguments);
|
|
// 调用本地函数
|
|
const functionResponse = await availableFunctions[functionName](functionArgs);
|
|
// 将结果添加到消息历史
|
|
messages.push({
|
|
role: 'tool',
|
|
name: functionName,
|
|
content: JSON.stringify(functionResponse),
|
|
tool_call_id: toolCall.id,
|
|
});
|
|
}
|
|
|
|
// 第二次调用 - 将函数结果发送给模型获取最终回复
|
|
const secondResponse = await model.chat(messages, {
|
|
tools: tools as any,
|
|
});
|
|
|
|
const finalMessage = secondResponse.choices[0].message;
|
|
messages.push(finalMessage);
|
|
const _toolCalls = finalMessage.tool_calls;
|
|
console.log("toolCalls", JSON.stringify(toolCalls) ,finalMessage.role);
|
|
toolCalls = _toolCalls ? _toolCalls : [];
|
|
maxRetries--;
|
|
if (maxRetries <= 0) {
|
|
break;
|
|
}
|
|
|
|
console.log('tool calls', toolCalls);
|
|
}
|
|
|
|
console.log(messages);
|
|
};
|
|
|
|
funcCall(ollama as any);
|