Compare commits

...

9 Commits

Author SHA1 Message Date
0daee61d61 Client VCData (SSRC -> UserID)
Can use this to store per guild and per channel information

May also store a list of visible users/those that have talked too
while Alan was in the chat
2023-03-14 18:53:24 -04:00
c7558645c2 Attach speaking state update 2023-03-13 20:56:37 -04:00
632a115930 Make speaking state more prominent 2023-03-13 20:29:23 -04:00
95806edca6 Remove dead ding 2023-03-13 18:50:49 -04:00
c4b01bf78a User ID Storage 2023-03-13 18:50:26 -04:00
c47eff4fc9 Driver Connect Seems to not Connect 2023-03-13 18:11:52 -04:00
9fdea3d643 Allow Spaces in ALAN! prefix 2023-03-13 18:07:09 -04:00
577f286773 Unbound Future 2023-03-12 14:46:09 -04:00
51053b043b Remove hello from join 2023-03-12 14:15:02 -04:00
3 changed files with 229 additions and 135 deletions

View File

@@ -14,7 +14,6 @@ pub async fn join(ctx: &Context, msg: &Message) -> CommandResult {
.voice_states .voice_states
.get(&msg.author.id) .get(&msg.author.id)
.and_then(|voice_state| voice_state.channel_id); .and_then(|voice_state| voice_state.channel_id);
msg.channel_id.say(&ctx.http, "pong!").await?;
let connect_to = match channel_id { let connect_to = match channel_id {
Some(channel) => channel, Some(channel) => channel,
@@ -25,7 +24,7 @@ pub async fn join(ctx: &Context, msg: &Message) -> CommandResult {
}, },
}; };
vc::join(ctx.clone(), guild.clone(), connect_to); vc::join(ctx.clone(), guild.clone(), connect_to).await;
Ok(()) Ok(())
} }

View File

@@ -2,7 +2,7 @@
use std::env; use std::env;
use serenity::framework::standard::StandardFramework; use serenity::framework::standard::{StandardFramework, WithWhiteSpace};
use serenity::prelude::*; use serenity::prelude::*;
// This trait adds the `register_songbird` and `register_songbird_with` methods // This trait adds the `register_songbird` and `register_songbird_with` methods
@@ -35,7 +35,15 @@ async fn main() {
| GatewayIntents::GUILD_VOICE_STATES; | GatewayIntents::GUILD_VOICE_STATES;
let framework = StandardFramework::new() let framework = StandardFramework::new()
.configure(|c| c.prefix("ALAN! ")) .configure(|c| {
c.prefix("ALAN! ");
c.with_whitespace(WithWhiteSpace {
commands: true,
groups: true,
prefixes: true
})
}
)
.group(&GENERAL_GROUP); .group(&GENERAL_GROUP);
let songbird_config = Config::default() let songbird_config = Config::default()
@@ -48,9 +56,8 @@ async fn main() {
.await .await
.expect("Err creating client"); .expect("Err creating client");
// TODO: do checks when getting data in add guild etc.
println!("Initialize guild voice popin state");
utils::guild_popin::init(&client).await; utils::guild_popin::init(&client).await;
vc::init(&client).await;
if let Err(why) = client.start().await { if let Err(why) = client.start().await {
println!("Client error: {:?}", why); println!("Client error: {:?}", why);

View File

@@ -1,34 +1,91 @@
use std::sync::Arc; use std::{collections::HashMap, sync::Arc};
use serenity::{async_trait, use serenity::{
model::prelude::{ChannelId, Guild}, async_trait,
prelude::{Context, Mutex}}; model::prelude::{ChannelId, Guild, GuildId},
use songbird::{EventHandler, Event, EventContext, prelude::{Context, Mutex, RwLock, TypeMapKey}, Client,
model::payload::{Speaking, ClientDisconnect}, ffmpeg, create_player, Call, CoreEvent, events::context_data::ConnectData}; };
struct Receiver { use songbird::{
call : Arc<Mutex<Call>>, create_player,
events::context_data::{ConnectData, DisconnectData},
ffmpeg,
model::{
id::UserId,
payload::{ClientDisconnect, Speaking},
},
Call, CoreEvent, Event, EventContext, EventHandler,
};
#[derive(Eq, Hash)]
struct CallLocation {
guild: GuildId,
channel: ChannelId
} }
impl Receiver { impl PartialEq for CallLocation {
fn eq(&self, other: &Self) -> bool {
self.guild == other.guild && self.channel == other.channel
}
}
struct VoiceData {
call: Arc<Mutex<Call>>,
users: Arc<RwLock<HashMap<u32, Option<UserId>>>>,
}
impl VoiceData {
pub fn new(call: Arc<Mutex<Call>>) -> Self { pub fn new(call: Arc<Mutex<Call>>) -> Self {
Self {
call,
users: Arc::new(RwLock::new(HashMap::default()))
}
}
}
struct VCData {
loc: Arc<CallLocation>,
data: Arc<RwLock<VoiceData>>
}
impl TypeMapKey for VCData {
type Value = Arc<RwLock<HashMap<CallLocation, VCData>>>;
}
impl VCData {
pub fn new(loc: CallLocation, data: VoiceData) -> Self {
// You can manage state here, such as a buffer of audio packet bytes so // You can manage state here, such as a buffer of audio packet bytes so
// you can later store them in intervals. // you can later store them in intervals.
Self { VCData {
call loc: Arc::new(loc),
data: Arc::new(RwLock::new(data))
} }
} }
pub fn clone(&self) -> Self {
VCData {
loc: self.loc.clone(),
data: self.data.clone()
}
}
}
pub async fn init(client: &Client) {
let mut data = client.data.write().await;
data.insert::<VCData>(Arc::new(RwLock::new(HashMap::default())))
} }
#[async_trait] #[async_trait]
impl EventHandler for Receiver { impl EventHandler for VCData {
#[allow(unused_variables)] #[allow(unused_variables)]
async fn act(&self, ctx: &EventContext<'_>) -> Option<Event> { async fn act(&self, ctx: &EventContext<'_>) -> Option<Event> {
use EventContext as Ctx; use EventContext as Ctx;
match ctx { match ctx {
Ctx::SpeakingStateUpdate( Ctx::SpeakingStateUpdate(Speaking {
Speaking {speaking, ssrc, user_id, ..} speaking,
) => { ssrc,
user_id,
..
}) => {
// Discord voice calls use RTP, where every sender uses a randomly allocated // Discord voice calls use RTP, where every sender uses a randomly allocated
// *Synchronisation Source* (SSRC) to allow receivers to tell which audio // *Synchronisation Source* (SSRC) to allow receivers to tell which audio
// stream a received packet belongs to. As this number is not derived from // stream a received packet belongs to. As this number is not derived from
@@ -40,68 +97,67 @@ impl EventHandler for Receiver {
// SSRCs and map the SSRC to the User ID and maintain this state. // SSRCs and map the SSRC to the User ID and maintain this state.
// Using this map, you can map the `ssrc` in `voice_packet` // Using this map, you can map the `ssrc` in `voice_packet`
// to the user ID and handle their audio packets separately. // to the user ID and handle their audio packets separately.
{
let data = self.data.write().await;
let mut users = data.users.write().await;
users.insert(ssrc.clone(), user_id.clone());
}
println!( println!(
"Speaking state update: user {:?} has SSRC {:?}, using {:?}", "\n\n\nSpeaking state update: user {:?} has SSRC {:?}, using {:?}\n\n\n",
user_id, user_id, ssrc, speaking,
ssrc,
speaking,
); );
}, }
Ctx::SpeakingUpdate(data) => { Ctx::SpeakingUpdate(data) => {
// You can implement logic here which reacts to a user starting // You can implement logic here which reacts to a user starting
// or stopping speaking, and to map their SSRC to User ID. // or stopping speaking, and to map their SSRC to User ID.
let vcdata = self.data.read().await;
let users = vcdata.users.read().await;
println!( println!(
"Source {} has {} speaking.", "Source {}/{:?} has {} speaking.",
data.ssrc, data.ssrc,
users.get(&data.ssrc),
if data.speaking { "started" } else { "stopped" }, if data.speaking { "started" } else { "stopped" },
); );
}, }
Ctx::VoicePacket(data) => { Ctx::VoicePacket(data) => {
// An event which fires for every received audio packet, // An event which fires for every received audio packet,
// containing the decoded data. // containing the decoded data.
if let Some(audio) = data.audio { if let Some(audio) = data.audio {
println!("Audio packet's first 5 samples: {:?}", audio.get(..5.min(audio.len()))); // println!("Audio packet's first 5 samples: {:?}", audio.get(..5.min(audio.len())));
println!( // println!(
"Audio packet sequence {:05} has {:04} bytes (decompressed from {}), SSRC {}", // "Audio packet sequence {:05} has {:04} bytes (decompressed from {}), SSRC {}",
data.packet.sequence.0, // data.packet.sequence.0,
audio.len() * std::mem::size_of::<i16>(), // audio.len() * std::mem::size_of::<i16>(),
data.packet.payload.len(), // data.packet.payload.len(),
data.packet.ssrc, // data.packet.ssrc,
); // );
} else { } else {
println!("RTP packet, but no audio. Driver may not be configured to decode."); // println!("RTP packet, but no audio. Driver may not be configured to decode.");
}
} }
},
Ctx::RtcpPacket(data) => { Ctx::RtcpPacket(data) => {
// An event which fires for every received rtcp packet, // An event which fires for every received rtcp packet,
// containing the call statistics and reporting information. // containing the call statistics and reporting information.
println!("RTCP packet received: {:?}", data.packet); // println!("RTCP packet received: {:?}", data.packet);
}, }
Ctx::ClientDisconnect( Ctx::ClientDisconnect(ClientDisconnect { user_id, .. }) => {
ClientDisconnect {user_id, ..}
) => {
// You can implement your own logic here to handle a user who has left the // You can implement your own logic here to handle a user who has left the
// voice channel e.g., finalise processing of statistics etc. // voice channel e.g., finalise processing of statistics etc.
// You will typically need to map the User ID to their SSRC; observed when // You will typically need to map the User ID to their SSRC; observed when
// first speaking. // first speaking.
println!("Client disconnected: user {:?}", user_id); println!("Client disconnected: user {:?}", user_id);
},
Ctx::DriverConnect(
ConnectData { channel_id, ..}
) => {
match channel_id {
Some(chan) => {
let ding_src =
std::env::var("DING_SOUND").expect("DING not found in DING_SOUND");
let ding = ffmpeg(ding_src).await.expect("no ding.");
let (audio, handle) = create_player(ding);
let mut call = self.call.lock().await;
call.play(audio);
},
None => {}
} }
}, Ctx::DriverConnect(ConnectData { channel_id, .. }) => {
println!("VoiceDriver is connected.");
}
Ctx::DriverDisconnect(DisconnectData {
channel_id,
guild_id,
..
}) => {
// TODO: Remove data from GuildVoiceData
}
_ => { _ => {
// We won't be registering this struct for any more event classes. // We won't be registering this struct for any more event classes.
unimplemented!() unimplemented!()
@@ -112,45 +168,77 @@ impl EventHandler for Receiver {
} }
} }
pub async fn play_file(call: Arc<Mutex<Call>>, file: String) {
let mut call = call.lock().await;
let ff_src = ffmpeg(file).await.expect("Unable to find file.");
let (audio, handle) = create_player(ff_src);
call.play(audio);
}
pub async fn join(ctx: Context, guild: Guild, cid: ChannelId) -> Option<Arc<Mutex<Call>>> { pub async fn join(ctx: Context, guild: Guild, cid: ChannelId) -> Option<Arc<Mutex<Call>>> {
let manager = songbird::get(&ctx).await.expect("Songbird: intialization"); let manager = songbird::get(&ctx).await.expect("Songbird: intialization");
let (call, status) = manager.join(guild.id, cid).await; let (call, status) = manager.join(guild.id, cid).await;
match status { match status {
Ok(_) => { Ok(_) => {
let vc_data: VCData = VCData::new(
CallLocation {
guild: guild.id,
channel: cid
}, VoiceData::new(
call.clone()
)
);
{
let data = ctx.data.read().await;
match data.get::<VCData>() {
Some(vc_guild) => {
let mut vc_guild = vc_guild.write().await;
vc_guild.insert(CallLocation {
guild: guild.id,
channel: cid
}, vc_data.clone());
}
None => {
println!("VoiceData for client hasn't been initialized");
}
}
}
let call_handle = call.clone(); let call_handle = call.clone();
{ {
let mut call = call.lock().await; let mut call = call.lock().await;
call.add_global_event(
CoreEvent::SpeakingStateUpdate.into(),
vc_data.clone()
);
call.add_global_event( call.add_global_event(
CoreEvent::SpeakingUpdate.into(), CoreEvent::SpeakingUpdate.into(),
Receiver::new(call_handle.clone()), vc_data.clone()
); );
call.add_global_event( call.add_global_event(
CoreEvent::VoicePacket.into(), CoreEvent::VoicePacket.into(),
Receiver::new(call_handle.clone()), vc_data.clone()
); );
call.add_global_event( call.add_global_event(
CoreEvent::RtcpPacket.into(), CoreEvent::RtcpPacket.into(),
Receiver::new(call_handle.clone()), vc_data.clone()
); );
call.add_global_event( call.add_global_event(
CoreEvent::ClientDisconnect.into(), CoreEvent::ClientDisconnect.into(),
Receiver::new(call_handle.clone()), vc_data.clone()
);
call.add_global_event(
CoreEvent::ClientDisconnect.into(),
Receiver::new(call_handle.clone()),
); );
call.add_global_event( call.add_global_event(
CoreEvent::DriverConnect.into(), CoreEvent::DriverConnect.into(),
Receiver::new(call_handle.clone()), vc_data.clone()
); );
} }
let ding_src = std::env::var("DING_SOUND").expect("DING not found in DING_SOUND");
play_file(call_handle, ding_src).await;
return Some(call); return Some(call);
} }
Err(_err) => { Err(_err) => {