From 8346f52f231a24fa84b3069849b44199eb72e9ab Mon Sep 17 00:00:00 2001 From: James Shiffer Date: Tue, 6 Feb 2024 18:50:55 -0800 Subject: [PATCH] Change chat cmd max tokens, replies --- discord/.env.example | 1 + discord/commands/chat/chat.ts | 28 +++++++++++----------------- 2 files changed, 12 insertions(+), 17 deletions(-) diff --git a/discord/.env.example b/discord/.env.example index dc2f0a6..875780f 100644 --- a/discord/.env.example +++ b/discord/.env.example @@ -3,6 +3,7 @@ REACTIONS="💀,💯,😭" CLIENT="123456789012345678" GUILD="123456789012345678" +LLAMACPP_HOST=127.0.0.1 LLAMACPP_PORT=9999 ENABLE_MOTD=1 diff --git a/discord/commands/chat/chat.ts b/discord/commands/chat/chat.ts index abac3ea..e8bb283 100644 --- a/discord/commands/chat/chat.ts +++ b/discord/commands/chat/chat.ts @@ -1,17 +1,13 @@ import { ChatInputCommandInteraction, - Interaction, - InteractionResponse, - Message, - MessageComponentInteraction, SlashCommandBuilder } from 'discord.js'; -import { ChatMessage, llamacpp, streamText } from 'modelfusion'; +import { llamacpp, streamText } from 'modelfusion'; import { logInfo, logError } from '../../../logging'; const llamaCppServer = llamacpp.Api({ baseUrl: { - host: "localhost", + host: process.env.LLAMACPP_HOST, port: process.env.LLAMACPP_PORT, } }); @@ -25,34 +21,32 @@ async function llamaChat(interaction: ChatInputCommandInteraction) model: llamacpp .CompletionTextGenerator({ promptTemplate: llamacpp.prompt.Llama2, - maxGenerationTokens: 400, + maxGenerationTokens: 16, temperature: 0.7, api: llamaCppServer }) .withChatPrompt(), prompt: { - system: "You are Hatsune Miku, the famous 16-year-old Japanese virtual singer from Crypton Future Media. You have a penchant for politically incorrect humor, and are making banter with your like-minded friends.", + system: 'You are Hatsune Miku, the famous 16-year-old Japanese virtual singer from Crypton Future Media. You have a penchant for politically incorrect humor, and are making banter with your like-minded friends.', messages: [{ - "role": "user", - "content": interaction.options.getString('prompt') + 'role': 'user', + 'content': interaction.options.getString('prompt') }] } }); - let outMsg: InteractionResponse; let allText = ''; + await interaction.deferReply(); try { for await (const textPart of textStream) { + logInfo(`[chat] Added to LLaMA response: ${textPart}`); allText += textPart; - if (!outMsg) { - outMsg = await interaction.reply(allText); - } else { - await outMsg.edit(allText); - } + await interaction.editReply(allText); } + logInfo(`[chat] Final LLaMA response: ${allText}`); } catch (err) { logError(err); - await interaction.reply(err.toString()); + await interaction.editReply(err.toString()); } }