mirror of
https://git.femboyfinancial.jp/james/FemScoreboard.git
synced 2024-11-22 02:32:02 -08:00
58 lines
2.0 KiB
TypeScript
58 lines
2.0 KiB
TypeScript
import { InteractionResponse, Message, MessageComponentInteraction, SlashCommandBuilder } from 'discord.js';
|
|
import { ChatMessage, llamacpp, streamText } from 'modelfusion';
|
|
import { logInfo, logError } from '../../../logging';
|
|
|
|
const llamaCppServer = llamacpp.Api({
|
|
baseUrl: {
|
|
host: "localhost",
|
|
port: process.env.LLAMACPP_PORT,
|
|
}
|
|
});
|
|
|
|
async function llamaChat(interaction: MessageComponentInteraction)
|
|
{
|
|
logInfo(`[chat] Fetching last 5 messages in ${interaction.channelId}...`);
|
|
const history = await interaction.channel.messages.fetch({ limit: 5 });
|
|
logInfo(`[chat] Generating LLaMA response for interaction ${interaction.id}...`);
|
|
const textStream = await streamText({
|
|
model: llamacpp
|
|
.CompletionTextGenerator({
|
|
promptTemplate: llamacpp.prompt.Llama2,
|
|
maxGenerationTokens: 400,
|
|
temperature: 0.7,
|
|
api: llamaCppServer
|
|
})
|
|
.withChatPrompt(),
|
|
prompt: {
|
|
system: "You are Hatsune Miku, the famous 16-year-old Japanese virtual singer from Crypton Future Media. You have a penchant for politically incorrect humor, and are making banter with your like-minded friends.",
|
|
messages: history.sort().map((msg: Message<true>): ChatMessage => ({
|
|
"role": msg.author.id === interaction.client.user.id ? "assistant" : "user",
|
|
"content": msg.content
|
|
}))
|
|
}
|
|
});
|
|
|
|
let outMsg: InteractionResponse;
|
|
let allText = '';
|
|
try {
|
|
for await (const textPart of textStream) {
|
|
allText += textPart;
|
|
if (!outMsg) {
|
|
outMsg = await interaction.reply(allText);
|
|
} else {
|
|
await outMsg.edit(allText);
|
|
}
|
|
}
|
|
} catch (err) {
|
|
logError(err);
|
|
await interaction.reply(err.toString());
|
|
}
|
|
}
|
|
|
|
export = {
|
|
data: new SlashCommandBuilder()
|
|
.setName('chat')
|
|
.setDescription('Miku adds to the conversation with an AI-generated response.'),
|
|
execute: llamaChat
|
|
};
|