qq-bot/llm/index.ts
2025-10-15 23:31:54 +08:00

108 lines
3.4 KiB
TypeScript

import OpenAI from "openai";
import storage from 'node-persist'
import { sendMsg } from "../lib/qq";
import prompt from './prompt.txt'
await storage.init();
storage.clear();
const client = new OpenAI({
baseURL: process.env.OPENAI_BASE_URL,
apiKey: process.env.OPENAI_API_KEY,
// logLevel: "debug"
})
const tools = [{
type: "function" as const,
name: "send_msg",
description: "Send a message to the user. Always use this to respond to the user.",
parameters: {
type: "object",
properties: {
text: { type: "string", description: "The message content sent to the user through QQ." }
},
required: ["text"],
additionalProperties: false
},
strict: true
}]
/**
*
* @param input 提问
* @param target_id 用户 QQ 号
*/
export async function chat(input: string, target_id: string) {
const chatHistoryKey = `chat_history_${target_id}`;
let chatHistory: OpenAI.Responses.ResponseInput = await storage.getItem(chatHistoryKey) || [];
// 添加新输入到对话历史
chatHistory.push({ role: "user", content: input });
// 保存更新后的对话历史
console.log(`[LLM] 使用对话, 历史:`, chatHistory);
await storage.setItem(chatHistoryKey, chatHistory);
const response = await client.responses.create({
model: process.env.CHAT_MODEL || "gpt-5-nano",
instructions: prompt,
reasoning: { effort: 'minimal' },
input: chatHistory,
tools
});
await storage.setItem(chatHistoryKey, chatHistory);
// 继续调用工具,直到没有工具调用为止
await toolUseCycle(response.output);
async function toolUseCycle(outputArr: OpenAI.Responses.ResponseOutputItem[]) {
chatHistory.push(...outputArr);
const functionCalls = (outputArr ?? []).filter(item => item.type === 'function_call');
console.log("进入 toolUseCycle, with functionCalls", functionCalls.length, "个");
console.log(JSON.stringify(chatHistory, null, 2));
if (functionCalls.length == 0) {
let lastMessage = outputArr.at(-1);
if (!lastMessage) return
if (lastMessage.type != 'message') return
if (lastMessage.role != 'assistant') return
const msg = lastMessage.content.map(c => c.type == 'output_text' ? c.text : '').join('');
if (msg.trim().length > 0) {
// 结束,发送最后的消息
sendMsg(msg, target_id);
}
return
}
for (const item of functionCalls ?? []) {
if (item.name === "send_msg") {
console.log(item.arguments);
const { text } = JSON.parse(item.arguments);
sendMsg(text, target_id);
chatHistory.push({ type: "function_call_output", call_id: item.call_id, output: "OK" });
}
}
await storage.setItem(chatHistoryKey, chatHistory);
const response = await client.responses.create({
model: process.env.CHAT_MODEL || "gpt-5-nano",
instructions: prompt,
reasoning: { effort: 'minimal' },
input: chatHistory,
tools
});
toolUseCycle(response.output);
}
}
export async function resetChat(target_id: string) {
const chatHistoryKey = `chat_history_${target_id}`;
await storage.removeItem(chatHistoryKey);
sendMsg("已为你重置对话历史。", target_id);
}