Compare commits

...

5 Commits

Author SHA1 Message Date
0daee61d61 Client VCData (SSRC -> UserID)
Can use this to store per guild and per channel information

May also store a list of visible users/those that have talked too
while Alan was in the chat
2023-03-14 18:53:24 -04:00
c7558645c2 Attach speaking state update 2023-03-13 20:56:37 -04:00
632a115930 Make speaking state more prominent 2023-03-13 20:29:23 -04:00
95806edca6 Remove dead ding 2023-03-13 18:50:49 -04:00
c4b01bf78a User ID Storage 2023-03-13 18:50:26 -04:00
2 changed files with 218 additions and 137 deletions

View File

@@ -56,9 +56,8 @@ async fn main() {
.await .await
.expect("Err creating client"); .expect("Err creating client");
// TODO: do checks when getting data in add guild etc.
println!("Initialize guild voice popin state");
utils::guild_popin::init(&client).await; utils::guild_popin::init(&client).await;
vc::init(&client).await;
if let Err(why) = client.start().await { if let Err(why) = client.start().await {
println!("Client error: {:?}", why); println!("Client error: {:?}", why);

View File

@@ -1,167 +1,249 @@
use std::sync::Arc; use std::{collections::HashMap, sync::Arc};
use serenity::{async_trait, use serenity::{
model::prelude::{ChannelId, Guild}, async_trait,
prelude::{Context, Mutex}}; model::prelude::{ChannelId, Guild, GuildId},
use songbird::{EventHandler, Event, EventContext, prelude::{Context, Mutex, RwLock, TypeMapKey}, Client,
model::payload::{Speaking, ClientDisconnect}, ffmpeg, create_player, Call, CoreEvent, events::context_data::ConnectData}; };
struct Receiver { use songbird::{
call : Arc<Mutex<Call>>, create_player,
events::context_data::{ConnectData, DisconnectData},
ffmpeg,
model::{
id::UserId,
payload::{ClientDisconnect, Speaking},
},
Call, CoreEvent, Event, EventContext, EventHandler,
};
#[derive(Eq, Hash)]
struct CallLocation {
guild: GuildId,
channel: ChannelId
} }
impl Receiver { impl PartialEq for CallLocation {
fn eq(&self, other: &Self) -> bool {
self.guild == other.guild && self.channel == other.channel
}
}
struct VoiceData {
call: Arc<Mutex<Call>>,
users: Arc<RwLock<HashMap<u32, Option<UserId>>>>,
}
impl VoiceData {
pub fn new(call: Arc<Mutex<Call>>) -> Self { pub fn new(call: Arc<Mutex<Call>>) -> Self {
// You can manage state here, such as a buffer of audio packet bytes so
// you can later store them in intervals.
Self { Self {
call call,
users: Arc::new(RwLock::new(HashMap::default()))
} }
} }
} }
struct VCData {
loc: Arc<CallLocation>,
data: Arc<RwLock<VoiceData>>
}
impl TypeMapKey for VCData {
type Value = Arc<RwLock<HashMap<CallLocation, VCData>>>;
}
impl VCData {
pub fn new(loc: CallLocation, data: VoiceData) -> Self {
// You can manage state here, such as a buffer of audio packet bytes so
// you can later store them in intervals.
VCData {
loc: Arc::new(loc),
data: Arc::new(RwLock::new(data))
}
}
pub fn clone(&self) -> Self {
VCData {
loc: self.loc.clone(),
data: self.data.clone()
}
}
}
pub async fn init(client: &Client) {
let mut data = client.data.write().await;
data.insert::<VCData>(Arc::new(RwLock::new(HashMap::default())))
}
#[async_trait] #[async_trait]
impl EventHandler for Receiver { impl EventHandler for VCData {
#[allow(unused_variables)] #[allow(unused_variables)]
async fn act(&self, ctx: &EventContext<'_>) -> Option<Event> { async fn act(&self, ctx: &EventContext<'_>) -> Option<Event> {
use EventContext as Ctx; use EventContext as Ctx;
match ctx { match ctx {
Ctx::SpeakingStateUpdate( Ctx::SpeakingStateUpdate(Speaking {
Speaking {speaking, ssrc, user_id, ..} speaking,
) => { ssrc,
// Discord voice calls use RTP, where every sender uses a randomly allocated user_id,
// *Synchronisation Source* (SSRC) to allow receivers to tell which audio ..
// stream a received packet belongs to. As this number is not derived from }) => {
// the sender's user_id, only Discord Voice Gateway messages like this one // Discord voice calls use RTP, where every sender uses a randomly allocated
// inform us about which random SSRC a user has been allocated. Future voice // *Synchronisation Source* (SSRC) to allow receivers to tell which audio
// packets will contain *only* the SSRC. // stream a received packet belongs to. As this number is not derived from
// // the sender's user_id, only Discord Voice Gateway messages like this one
// You can implement logic here so that you can differentiate users' // inform us about which random SSRC a user has been allocated. Future voice
// SSRCs and map the SSRC to the User ID and maintain this state. // packets will contain *only* the SSRC.
// Using this map, you can map the `ssrc` in `voice_packet` //
// to the user ID and handle their audio packets separately. // You can implement logic here so that you can differentiate users'
println!( // SSRCs and map the SSRC to the User ID and maintain this state.
"Speaking state update: user {:?} has SSRC {:?}, using {:?}", // Using this map, you can map the `ssrc` in `voice_packet`
user_id, // to the user ID and handle their audio packets separately.
ssrc, {
speaking, let data = self.data.write().await;
); let mut users = data.users.write().await;
}, users.insert(ssrc.clone(), user_id.clone());
Ctx::SpeakingUpdate(data) => { }
// You can implement logic here which reacts to a user starting println!(
// or stopping speaking, and to map their SSRC to User ID. "\n\n\nSpeaking state update: user {:?} has SSRC {:?}, using {:?}\n\n\n",
println!( user_id, ssrc, speaking,
"Source {} has {} speaking.", );
data.ssrc, }
if data.speaking {"started"} else {"stopped"}, Ctx::SpeakingUpdate(data) => {
); // You can implement logic here which reacts to a user starting
}, // or stopping speaking, and to map their SSRC to User ID.
Ctx::VoicePacket(data) => { let vcdata = self.data.read().await;
// An event which fires for every received audio packet, let users = vcdata.users.read().await;
// containing the decoded data. println!(
if let Some(audio) = data.audio { "Source {}/{:?} has {} speaking.",
// println!("Audio packet's first 5 samples: {:?}", audio.get(..5.min(audio.len()))); data.ssrc,
// println!( users.get(&data.ssrc),
// "Audio packet sequence {:05} has {:04} bytes (decompressed from {}), SSRC {}", if data.speaking { "started" } else { "stopped" },
// data.packet.sequence.0, );
// audio.len() * std::mem::size_of::<i16>(), }
// data.packet.payload.len(), Ctx::VoicePacket(data) => {
// data.packet.ssrc, // An event which fires for every received audio packet,
// ); // containing the decoded data.
} else { if let Some(audio) = data.audio {
// println!("RTP packet, but no audio. Driver may not be configured to decode."); // println!("Audio packet's first 5 samples: {:?}", audio.get(..5.min(audio.len())));
} // println!(
}, // "Audio packet sequence {:05} has {:04} bytes (decompressed from {}), SSRC {}",
Ctx::RtcpPacket(data) => { // data.packet.sequence.0,
// An event which fires for every received rtcp packet, // audio.len() * std::mem::size_of::<i16>(),
// containing the call statistics and reporting information. // data.packet.payload.len(),
// println!("RTCP packet received: {:?}", data.packet); // data.packet.ssrc,
}, // );
Ctx::ClientDisconnect( } else {
ClientDisconnect {user_id, ..} // println!("RTP packet, but no audio. Driver may not be configured to decode.");
) => { }
// You can implement your own logic here to handle a user who has left the }
// voice channel e.g., finalise processing of statistics etc. Ctx::RtcpPacket(data) => {
// You will typically need to map the User ID to their SSRC; observed when // An event which fires for every received rtcp packet,
// first speaking. // containing the call statistics and reporting information.
// println!("RTCP packet received: {:?}", data.packet);
}
Ctx::ClientDisconnect(ClientDisconnect { user_id, .. }) => {
// You can implement your own logic here to handle a user who has left the
// voice channel e.g., finalise processing of statistics etc.
// You will typically need to map the User ID to their SSRC; observed when
// first speaking.
println!("Client disconnected: user {:?}", user_id); println!("Client disconnected: user {:?}", user_id);
}, }
Ctx::DriverConnect( Ctx::DriverConnect(ConnectData { channel_id, .. }) => {
ConnectData { channel_id, ..} println!("VoiceDriver is connected.");
) => { }
println!("VoiceDriver is connected."); Ctx::DriverDisconnect(DisconnectData {
match channel_id { channel_id,
Some(chan) => { guild_id,
let ding_src = ..
std::env::var("DING_SOUND").expect("DING not found in DING_SOUND"); }) => {
let ding = ffmpeg(ding_src).await.expect("no ding."); // TODO: Remove data from GuildVoiceData
let (audio, handle) = create_player(ding);
let mut call = self.call.lock().await;
call.play(audio);
},
None => {}
}
},
_ => {
// We won't be registering this struct for any more event classes.
unimplemented!()
} }
} _ => {
// We won't be registering this struct for any more event classes.
unimplemented!()
}
}
None None
} }
} }
pub async fn play_file(call: Arc<Mutex<Call>>, file: String) {
let mut call = call.lock().await;
let ff_src = ffmpeg(file).await.expect("Unable to find file.");
let (audio, handle) = create_player(ff_src);
call.play(audio);
}
pub async fn join(ctx: Context, guild: Guild, cid: ChannelId) -> Option<Arc<Mutex<Call>>> { pub async fn join(ctx: Context, guild: Guild, cid: ChannelId) -> Option<Arc<Mutex<Call>>> {
let manager = songbird::get(&ctx).await.expect("Songbird: intialization"); let manager = songbird::get(&ctx).await.expect("Songbird: intialization");
let (call, status) = manager.join(guild.id, cid).await; let (call, status) = manager.join(guild.id, cid).await;
match status { match status {
Ok(_) => { Ok(_) => {
let call_handle = call.clone(); let vc_data: VCData = VCData::new(
{ CallLocation {
let mut call = call.lock().await; guild: guild.id,
channel: cid
}, VoiceData::new(
call.clone()
)
);
{
let data = ctx.data.read().await;
match data.get::<VCData>() {
Some(vc_guild) => {
let mut vc_guild = vc_guild.write().await;
vc_guild.insert(CallLocation {
guild: guild.id,
channel: cid
}, vc_data.clone());
}
None => {
println!("VoiceData for client hasn't been initialized");
}
}
}
let call_handle = call.clone();
{
let mut call = call.lock().await;
call.add_global_event( call.add_global_event(
CoreEvent::SpeakingUpdate.into(), CoreEvent::SpeakingStateUpdate.into(),
Receiver::new(call_handle.clone()), vc_data.clone()
); );
call.add_global_event( call.add_global_event(
CoreEvent::VoicePacket.into(), CoreEvent::SpeakingUpdate.into(),
Receiver::new(call_handle.clone()), vc_data.clone()
); );
call.add_global_event( call.add_global_event(
CoreEvent::RtcpPacket.into(), CoreEvent::VoicePacket.into(),
Receiver::new(call_handle.clone()), vc_data.clone()
); );
call.add_global_event( call.add_global_event(
CoreEvent::ClientDisconnect.into(), CoreEvent::RtcpPacket.into(),
Receiver::new(call_handle.clone()), vc_data.clone()
); );
call.add_global_event( call.add_global_event(
CoreEvent::ClientDisconnect.into(), CoreEvent::ClientDisconnect.into(),
Receiver::new(call_handle.clone()), vc_data.clone()
); );
call.add_global_event( call.add_global_event(
CoreEvent::DriverConnect.into(), CoreEvent::DriverConnect.into(),
Receiver::new(call_handle.clone()), vc_data.clone()
); );
let ding_src = }
std::env::var("DING_SOUND").expect("DING not found in DING_SOUND"); let ding_src = std::env::var("DING_SOUND").expect("DING not found in DING_SOUND");
let ding = ffmpeg(ding_src).await.expect("no ding."); play_file(call_handle, ding_src).await;
let (audio, handle) = create_player(ding); return Some(call);
call.play(audio); }
} Err(_err) => {
return Some(call); println!("Error joining channel");
} }
Err(_err) => {
println!("Error joining channel");
}
} }
None None
} }