mirror of
https://git.femboyfinancial.jp/james/FemScoreboard.git
synced 2024-11-21 10:12:02 -08:00
373 lines
13 KiB
TypeScript
373 lines
13 KiB
TypeScript
/**
|
|
* bot.ts
|
|
* Scans the chat for reactions and updates the leaderboard database.
|
|
*/
|
|
|
|
import {
|
|
Attachment,
|
|
AttachmentBuilder,
|
|
Client,
|
|
Collection,
|
|
Events,
|
|
GatewayIntentBits,
|
|
Interaction,
|
|
Message,
|
|
MessageFlags,
|
|
MessageReaction,
|
|
MessageType,
|
|
PartialMessageReaction,
|
|
Partials, SlashCommandBuilder,
|
|
TextChannel,
|
|
User
|
|
} from 'discord.js';
|
|
import fs = require('node:fs');
|
|
import path = require('node:path');
|
|
import fetch from 'node-fetch';
|
|
import FormData = require('form-data');
|
|
import tmp = require('tmp');
|
|
import { JSDOM } from 'jsdom';
|
|
import { logError, logInfo, logWarn } from '../logging';
|
|
import {
|
|
db,
|
|
openDb,
|
|
reactionEmojis,
|
|
recordReaction,
|
|
requestTTSResponse,
|
|
sync
|
|
} from './util';
|
|
import 'dotenv/config';
|
|
|
|
const KNOWN_USERNAMES = ['vinso1445', 'bapazheng', 'f0oby', 'shibe.mp4', '1thinker', 'bapabakshi', 'keliande27', 'gnuwu', 'scoliono', 'adam28405'];
|
|
const config = {};
|
|
|
|
interface CommandClient extends Client {
|
|
commands?: Collection<string, { data: SlashCommandBuilder, execute: (interaction: Interaction) => Promise<void> }>
|
|
}
|
|
|
|
const client: CommandClient = new Client({
|
|
intents: [GatewayIntentBits.Guilds, GatewayIntentBits.GuildMessages, GatewayIntentBits.GuildMessageReactions, GatewayIntentBits.MessageContent],
|
|
partials: [Partials.Message, Partials.Channel, Partials.Reaction],
|
|
});
|
|
client.commands = new Collection();
|
|
|
|
client.once(Events.ClientReady, async () => {
|
|
logInfo('[bot] Ready.');
|
|
for (let i = 0; i < reactionEmojis.length; ++i)
|
|
logInfo(`[bot] config: reaction_${i + 1} = ${reactionEmojis[i]}`);
|
|
});
|
|
|
|
|
|
async function onMessageReactionChanged(reaction: MessageReaction | PartialMessageReaction, user: User)
|
|
{
|
|
// When a reaction is received, check if the structure is partial
|
|
if (reaction.partial) {
|
|
// If the message this reaction belongs to was removed, the fetching might result in an API error which should be handled
|
|
try {
|
|
await reaction.fetch();
|
|
} catch (error) {
|
|
logError('[bot] Something went wrong when fetching the reaction:', error);
|
|
// Return as `reaction.message.author` may be undefined/null
|
|
return;
|
|
}
|
|
}
|
|
if (reaction.message.partial) {
|
|
// If the message this reaction belongs to was removed, the fetching might result in an API error which should be handled
|
|
try {
|
|
await reaction.message.fetch();
|
|
} catch (error) {
|
|
logError('[bot] Something went wrong when fetching the message:', error);
|
|
// Return as `reaction.message.author` may be undefined/null
|
|
return;
|
|
}
|
|
}
|
|
|
|
// Now the message has been cached and is fully available
|
|
logInfo(`[bot] ${reaction.message.author.id}'s message reaction count changed: ${reaction.emoji.name}x${reaction.count}`);
|
|
await recordReaction(<MessageReaction> reaction);
|
|
}
|
|
|
|
function textOnlyMessages(message: Message)
|
|
{
|
|
return message.cleanContent.length > 0 &&
|
|
(message.type === MessageType.Default || message.type === MessageType.Reply);
|
|
}
|
|
|
|
function isGoodResponse(response: string)
|
|
{
|
|
return response.length > 0 && !(response in [
|
|
'@Today Man-San(1990)🍁🍂',
|
|
'@1981 Celical Man🍁🍂',
|
|
'@Exiled Sammy 🔒🏝⏱'
|
|
]);
|
|
}
|
|
|
|
async function onNewMessage(message: Message)
|
|
{
|
|
if (message.author.bot) {
|
|
return;
|
|
}
|
|
|
|
/** First, handle audio messages */
|
|
if (message.flags.has(MessageFlags.IsVoiceMessage)) {
|
|
try {
|
|
const audio = await requestRVCResponse(message.attachments.first());
|
|
const audioBuf = await audio.arrayBuffer();
|
|
const audioFile = new AttachmentBuilder(Buffer.from(audioBuf)).setName('mikuified.wav');
|
|
await message.reply({
|
|
files: [audioFile]
|
|
});
|
|
} catch (err) {
|
|
logError(`[bot] Failed to generate audio message reply: ${err}`);
|
|
}
|
|
}
|
|
|
|
/** Text messages */
|
|
if (!textOnlyMessages(message)) {
|
|
return;
|
|
}
|
|
|
|
// Miku must reply when spoken to
|
|
const mustReply = message.mentions.has(process.env.CLIENT) || message.cleanContent.toLowerCase().includes('miku');
|
|
|
|
const history = await message.channel.messages.fetch({
|
|
limit: config["llmconf"].llmSettings.msg_context-1,
|
|
before: message.id
|
|
});
|
|
|
|
// change Miku's message probability depending on current message frequency
|
|
const historyMessages = [...history.values()].reverse();
|
|
//const historyTimes = historyMessages.map((m: Message) => m.createdAt.getTime());
|
|
//const historyAvgDelayMins = (historyTimes[historyTimes.length - 1] - historyTimes[0]) / 60000;
|
|
const replyChance = Math.floor(Math.random() * 1/Number(process.env.REPLY_CHANCE)) === 0;
|
|
const willReply = mustReply || replyChance;
|
|
|
|
if (!willReply) {
|
|
return;
|
|
}
|
|
|
|
const cleanHistory = historyMessages.filter(textOnlyMessages);
|
|
const cleanHistoryList = [
|
|
...cleanHistory,
|
|
message
|
|
];
|
|
|
|
try {
|
|
await message.channel.sendTyping();
|
|
|
|
const response = await requestLLMResponse(cleanHistoryList);
|
|
// evaluate response
|
|
if (!isGoodResponse(response)) {
|
|
logWarn(`[bot] Burning bad response: "${response}"`);
|
|
return;
|
|
}
|
|
await message.reply(response);
|
|
} catch (err) {
|
|
logError(`[bot] Error while generating LLM response: ${err}`);
|
|
}
|
|
}
|
|
|
|
async function fetchMotd()
|
|
{
|
|
try {
|
|
const res = await fetch(process.env.MOTD_HREF);
|
|
const xml = await res.text();
|
|
const parser = new JSDOM(xml);
|
|
const doc = parser.window.document;
|
|
const el = doc.querySelector(process.env.MOTD_QUERY);
|
|
return el ? el.textContent : null;
|
|
} catch (err) {
|
|
logWarn('[bot] Failed to fetch MOTD; is the booru down?');
|
|
}
|
|
}
|
|
|
|
async function requestRVCResponse(src: Attachment): Promise<Blob>
|
|
{
|
|
logInfo(`[bot] Downloading audio message ${src.url}`);
|
|
const srcres = await fetch(src.url);
|
|
const srcbuf = await srcres.arrayBuffer();
|
|
const tmpFile = tmp.fileSync();
|
|
const tmpFileName = tmpFile.name;
|
|
fs.writeFileSync(tmpFileName, Buffer.from(srcbuf));
|
|
logInfo(`[bot] Got audio file: ${srcbuf.size} bytes`);
|
|
|
|
const queryParams = new URLSearchParams();
|
|
queryParams.append("token", process.env.LLM_TOKEN);
|
|
|
|
const fd = new FormData();
|
|
fd.append('file', fs.readFileSync(tmpFileName), 'voice-message.ogg');
|
|
|
|
const rvcEndpoint = `${process.env.LLM_HOST}/rvc?${queryParams.toString()}`;
|
|
logInfo(`[bot] Requesting RVC response for ${src.id}`);
|
|
const res = await fetch(rvcEndpoint, {
|
|
method: 'POST',
|
|
body: fd
|
|
});
|
|
const resContents = await res.blob();
|
|
return resContents;
|
|
}
|
|
|
|
async function requestLLMResponse(messages)
|
|
{
|
|
const queryParams = new URLSearchParams();
|
|
queryParams.append("token", process.env.LLM_TOKEN);
|
|
for (const field of Object.keys(config["llmconf"].llmSettings)) {
|
|
queryParams.append(field, config["llmconf"].llmSettings[field]);
|
|
}
|
|
const llmEndpoint = `${process.env.LLM_HOST}/?${queryParams.toString()}`;
|
|
let messageList = await Promise.all(
|
|
messages.map(async (m: Message) => {
|
|
let role = 'user';
|
|
if (m.author.id === process.env.CLIENT) {
|
|
role = 'assistant';
|
|
} else if (m.author.bot) {
|
|
return null;
|
|
/* } else if (KNOWN_USERNAMES.includes(m.author.username)) {
|
|
role = m.author.username; */
|
|
}
|
|
// fetch replied-to message, if there is one, and prompt it as such
|
|
let cleanContent = m.cleanContent;
|
|
if (m.type == MessageType.Reply && m.reference) {
|
|
// what about deeply nested replies? could possibly be recursive?
|
|
const repliedToMsg = await m.fetchReference();
|
|
if (repliedToMsg) {
|
|
const repliedToMsgLines = repliedToMsg.cleanContent.split('\n');
|
|
cleanContent = `> ${repliedToMsgLines.join('\n> ')}\n${cleanContent}`;
|
|
}
|
|
}
|
|
|
|
return { role, content: cleanContent };
|
|
})
|
|
);
|
|
messageList = messageList.filter(x => !!x);
|
|
|
|
// at the beginning, inject the system prompt
|
|
// at the end, start our text generation as a reply to the most recent msg from history
|
|
const replyContext = `> ${messageList[messageList.length - 1].content.split('\n').join('\n> ')}\n`;
|
|
const reqBody = [
|
|
{
|
|
"role": "system",
|
|
"content": config["llmconf"].sys_prompt
|
|
},
|
|
...messageList,
|
|
{
|
|
"role": "assistant",
|
|
"content": replyContext
|
|
}
|
|
];
|
|
logInfo("[bot] Requesting LLM response with message list: " + reqBody.map(m => m.content));
|
|
const res = await fetch(llmEndpoint, {
|
|
method: 'POST',
|
|
headers: {
|
|
'Content-Type': 'application/json',
|
|
},
|
|
body: JSON.stringify(reqBody)
|
|
});
|
|
const txt = await res.json();
|
|
const txtRaw: string = txt["raw"][0];
|
|
// Depends on chat template used
|
|
const prefix = "<|start_header_id|>assistant<|end_header_id|>\n\n";
|
|
const suffix = "<|eot_id|>";
|
|
const txtStart = txtRaw.lastIndexOf(prefix);
|
|
const txtEnd = txtRaw.slice(txtStart + prefix.length);
|
|
const txtStop = txtEnd.indexOf(suffix) !== -1 ? txtEnd.indexOf(suffix) : txtEnd.length;
|
|
return txtEnd.slice(0, txtStop);
|
|
}
|
|
|
|
async function scheduleRandomMessage(firstTime = false)
|
|
{
|
|
if (!firstTime) {
|
|
const channel = <TextChannel> await client.channels.fetch(process.env.MOTD_CHANNEL);
|
|
if (!channel) {
|
|
logWarn(`[bot] Channel ${process.env.MOTD_CHANNEL} not found, disabling MOTD.`);
|
|
return;
|
|
}
|
|
const randomMessage = await fetchMotd();
|
|
if (randomMessage) {
|
|
try {
|
|
const audio = await requestTTSResponse(randomMessage);
|
|
const audioBuf = await audio.arrayBuffer();
|
|
const audioFile = new AttachmentBuilder(Buffer.from(audioBuf)).setName('mikuified.wav');
|
|
await channel.send({
|
|
content: randomMessage,
|
|
files: [audioFile]
|
|
});
|
|
logInfo(`[bot] Sent MOTD + TTS: ${randomMessage}`);
|
|
} catch (err) {
|
|
await channel.send(randomMessage);
|
|
logWarn(`[bot] Could not fetch MOTD TTS: ${err}`);
|
|
logInfo(`[bot] Send text MOTD: ${randomMessage}`);
|
|
}
|
|
} else {
|
|
logWarn(`[bot] Could not fetch MOTD.`);
|
|
}
|
|
}
|
|
// wait between 2-8 hours
|
|
const timeoutMins = Math.random() * 360 + 120;
|
|
const scheduledTime = new Date();
|
|
scheduledTime.setMinutes(scheduledTime.getMinutes() + timeoutMins);
|
|
logInfo(`[bot] Next MOTD: ${scheduledTime.toLocaleTimeString()}`);
|
|
setTimeout(scheduleRandomMessage, timeoutMins * 60 * 1000);
|
|
}
|
|
|
|
client.on(Events.InteractionCreate, async interaction => {
|
|
if (!interaction.isChatInputCommand()) return;
|
|
});
|
|
|
|
client.on(Events.MessageCreate, onNewMessage);
|
|
client.on(Events.MessageReactionAdd, onMessageReactionChanged);
|
|
client.on(Events.MessageReactionRemove, onMessageReactionChanged);
|
|
client.on(Events.InteractionCreate, async interaction => {
|
|
if (!interaction.isChatInputCommand()) return;
|
|
|
|
const client: CommandClient = interaction.client;
|
|
const command = client.commands.get(interaction.commandName);
|
|
|
|
if (!command) {
|
|
logError(`[bot] No command matching ${interaction.commandName} was found.`);
|
|
return;
|
|
}
|
|
|
|
try {
|
|
await command.execute(interaction);
|
|
} catch (error) {
|
|
logError(error);
|
|
if (interaction.replied || interaction.deferred) {
|
|
await interaction.followUp({ content: 'There was an error while executing this command!', ephemeral: true });
|
|
} else {
|
|
await interaction.reply({ content: 'There was an error while executing this command!', ephemeral: true });
|
|
}
|
|
}
|
|
});
|
|
|
|
// startup
|
|
(async () => {
|
|
tmp.setGracefulCleanup();
|
|
logInfo("[db] Opening...");
|
|
await openDb();
|
|
logInfo("[db] Migrating...");
|
|
await db.migrate();
|
|
logInfo("[db] Ready.");
|
|
|
|
logInfo("[bot] Loading commands...");
|
|
const foldersPath = path.join(__dirname, 'commands');
|
|
const commandFolders = fs.readdirSync(foldersPath);
|
|
for (const folder of commandFolders) {
|
|
const commandsPath = path.join(foldersPath, folder);
|
|
const commandFiles = fs.readdirSync(commandsPath).filter(file => file.endsWith('.js'));
|
|
for (const file of commandFiles) {
|
|
const filePath = path.join(commandsPath, file);
|
|
const command = require(filePath);
|
|
client.commands.set(command.data.name, command);
|
|
config[command.data.name] = command.config;
|
|
logInfo(`[bot] Found command: /${command.data.name}`);
|
|
}
|
|
}
|
|
|
|
logInfo("[bot] Logging in...");
|
|
await client.login(process.env.TOKEN);
|
|
await sync(client.guilds);
|
|
if (process.env.ENABLE_MOTD) {
|
|
await scheduleRandomMessage(true);
|
|
}
|
|
})();
|