2024-02-06 18:26:50 -08:00
import {
ChatInputCommandInteraction ,
Interaction ,
InteractionResponse ,
Message ,
MessageComponentInteraction ,
SlashCommandBuilder
} from 'discord.js' ;
2024-02-06 17:57:10 -08:00
import { ChatMessage , llamacpp , streamText } from 'modelfusion' ;
2024-02-06 18:12:00 -08:00
import { logInfo , logError } from '../../../logging' ;
2024-02-06 17:57:10 -08:00
const llamaCppServer = llamacpp . Api ( {
baseUrl : {
host : "localhost" ,
port : process.env.LLAMACPP_PORT ,
}
} ) ;
2024-02-06 18:26:50 -08:00
async function llamaChat ( interaction : ChatInputCommandInteraction )
2024-02-06 17:57:10 -08:00
{
logInfo ( ` [chat] Fetching last 5 messages in ${ interaction . channelId } ... ` ) ;
const history = await interaction . channel . messages . fetch ( { limit : 5 } ) ;
logInfo ( ` [chat] Generating LLaMA response for interaction ${ interaction . id } ... ` ) ;
const textStream = await streamText ( {
model : llamacpp
. CompletionTextGenerator ( {
promptTemplate : llamacpp.prompt.Llama2 ,
maxGenerationTokens : 400 ,
temperature : 0.7 ,
api : llamaCppServer
} )
. withChatPrompt ( ) ,
prompt : {
system : "You are Hatsune Miku, the famous 16-year-old Japanese virtual singer from Crypton Future Media. You have a penchant for politically incorrect humor, and are making banter with your like-minded friends." ,
2024-02-06 18:26:50 -08:00
messages : [ {
"role" : "user" ,
"content" : interaction . options . getString ( 'prompt' )
} ]
2024-02-06 17:57:10 -08:00
}
} ) ;
let outMsg : InteractionResponse ;
let allText = '' ;
try {
for await ( const textPart of textStream ) {
allText += textPart ;
if ( ! outMsg ) {
outMsg = await interaction . reply ( allText ) ;
} else {
await outMsg . edit ( allText ) ;
}
}
} catch ( err ) {
logError ( err ) ;
await interaction . reply ( err . toString ( ) ) ;
}
}
2024-02-06 18:12:00 -08:00
export = {
2024-02-06 17:57:10 -08:00
data : new SlashCommandBuilder ( )
. setName ( 'chat' )
2024-02-06 18:26:50 -08:00
. setDescription ( 'Miku responds to your prompt with an AI-generated response.' )
. addStringOption (
opt = > opt . setName ( 'prompt' ) . setDescription ( 'Prompt' ) . setRequired ( true )
) ,
2024-02-06 17:57:10 -08:00
execute : llamaChat
} ;