2024-02-06 18:26:50 -08:00
import {
ChatInputCommandInteraction ,
SlashCommandBuilder
} from 'discord.js' ;
2024-02-06 18:50:55 -08:00
import { llamacpp , streamText } from 'modelfusion' ;
2024-02-06 18:12:00 -08:00
import { logInfo , logError } from '../../../logging' ;
2024-02-06 17:57:10 -08:00
const llamaCppServer = llamacpp . Api ( {
baseUrl : {
2024-02-06 18:50:55 -08:00
host : process.env.LLAMACPP_HOST ,
2024-02-06 17:57:10 -08:00
port : process.env.LLAMACPP_PORT ,
}
} ) ;
2024-02-06 18:26:50 -08:00
async function llamaChat ( interaction : ChatInputCommandInteraction )
2024-02-06 17:57:10 -08:00
{
logInfo ( ` [chat] Fetching last 5 messages in ${ interaction . channelId } ... ` ) ;
const history = await interaction . channel . messages . fetch ( { limit : 5 } ) ;
logInfo ( ` [chat] Generating LLaMA response for interaction ${ interaction . id } ... ` ) ;
const textStream = await streamText ( {
model : llamacpp
. CompletionTextGenerator ( {
promptTemplate : llamacpp.prompt.Llama2 ,
2024-02-06 18:50:55 -08:00
maxGenerationTokens : 16 ,
2024-02-06 17:57:10 -08:00
temperature : 0.7 ,
api : llamaCppServer
} )
. withChatPrompt ( ) ,
prompt : {
2024-02-06 18:50:55 -08:00
system : 'You are Hatsune Miku, the famous 16-year-old Japanese virtual singer from Crypton Future Media. You have a penchant for politically incorrect humor, and are making banter with your like-minded friends.' ,
2024-02-06 18:26:50 -08:00
messages : [ {
2024-02-06 18:50:55 -08:00
'role' : 'user' ,
'content' : interaction . options . getString ( 'prompt' )
2024-02-06 18:26:50 -08:00
} ]
2024-02-06 17:57:10 -08:00
}
} ) ;
let allText = '' ;
2024-02-06 18:50:55 -08:00
await interaction . deferReply ( ) ;
2024-02-06 17:57:10 -08:00
try {
for await ( const textPart of textStream ) {
2024-02-06 18:50:55 -08:00
logInfo ( ` [chat] Added to LLaMA response: ${ textPart } ` ) ;
2024-02-06 17:57:10 -08:00
allText += textPart ;
2024-02-06 18:50:55 -08:00
await interaction . editReply ( allText ) ;
2024-02-06 17:57:10 -08:00
}
2024-02-06 18:50:55 -08:00
logInfo ( ` [chat] Final LLaMA response: ${ allText } ` ) ;
2024-02-06 17:57:10 -08:00
} catch ( err ) {
logError ( err ) ;
2024-02-06 18:50:55 -08:00
await interaction . editReply ( err . toString ( ) ) ;
2024-02-06 17:57:10 -08:00
}
}
2024-02-06 18:12:00 -08:00
export = {
2024-02-06 17:57:10 -08:00
data : new SlashCommandBuilder ( )
. setName ( 'chat' )
2024-02-06 18:26:50 -08:00
. setDescription ( 'Miku responds to your prompt with an AI-generated response.' )
. addStringOption (
opt = > opt . setName ( 'prompt' ) . setDescription ( 'Prompt' ) . setRequired ( true )
) ,
2024-02-06 17:57:10 -08:00
execute : llamaChat
} ;