I would like to monitor audio levels of the remote participant.
Is it possible to detect when the remote participant is speaking or the audio levels are changing? My goal is to determine when the remote participant is speaking and adding an observer to the RemoteAudioTrack does not work in this case.
SDK: https://www.twilio.com/docs/voice/sdks/ios/get-started
Example code:
<code>extension VideoCallViewController: RemoteParticipantDelegate {
func localParticipantDidPublishAudioTrack(participant: LocalParticipant, audioTrackPublication: LocalAudioTrackPublication) {
print("did publish audio track")
}
func didSubscribeToAudioTrack(audioTrack: RemoteAudioTrack, publication: RemoteAudioTrackPublication, participant: RemoteParticipant) {
print("Subscribed to remote audio track for participant (participant.identity)")
// Monitor audio levels
monitorAudioLevels(audioTrack: audioTrack)
}
func monitorAudioLevels(audioTrack: RemoteAudioTrack) {
print("in monitoring audio levels")
// Assuming audioTrack provides a way to monitor audio levels
audioTrack.addObserver(self, forKeyPath: "audioLevel", options: .new, context: nil)
}
override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) {
print("in observe")
guard let audioTrack = object as? RemoteAudioTrack, keyPath == "audioLevel" else {
print("Observer guard failed")
return
}
print("Observer guard passed")
if let audioLevel = change?[.newKey] as? Float {
// Threshold for detecting speech
let threshold: Float = 0.1
if audioLevel > threshold {
print("Remote participant is speaking with audioLevel: (audioLevel)")
startTranscribing()
} else {
print("Remote participant is silent with audioLevel: (audioLevel)")
stopTranscribing()
}
}
}
}
</code>
<code>extension VideoCallViewController: RemoteParticipantDelegate {
func localParticipantDidPublishAudioTrack(participant: LocalParticipant, audioTrackPublication: LocalAudioTrackPublication) {
print("did publish audio track")
}
func didSubscribeToAudioTrack(audioTrack: RemoteAudioTrack, publication: RemoteAudioTrackPublication, participant: RemoteParticipant) {
print("Subscribed to remote audio track for participant (participant.identity)")
// Monitor audio levels
monitorAudioLevels(audioTrack: audioTrack)
}
func monitorAudioLevels(audioTrack: RemoteAudioTrack) {
print("in monitoring audio levels")
// Assuming audioTrack provides a way to monitor audio levels
audioTrack.addObserver(self, forKeyPath: "audioLevel", options: .new, context: nil)
}
override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) {
print("in observe")
guard let audioTrack = object as? RemoteAudioTrack, keyPath == "audioLevel" else {
print("Observer guard failed")
return
}
print("Observer guard passed")
if let audioLevel = change?[.newKey] as? Float {
// Threshold for detecting speech
let threshold: Float = 0.1
if audioLevel > threshold {
print("Remote participant is speaking with audioLevel: (audioLevel)")
startTranscribing()
} else {
print("Remote participant is silent with audioLevel: (audioLevel)")
stopTranscribing()
}
}
}
}
</code>
extension VideoCallViewController: RemoteParticipantDelegate {
func localParticipantDidPublishAudioTrack(participant: LocalParticipant, audioTrackPublication: LocalAudioTrackPublication) {
print("did publish audio track")
}
func didSubscribeToAudioTrack(audioTrack: RemoteAudioTrack, publication: RemoteAudioTrackPublication, participant: RemoteParticipant) {
print("Subscribed to remote audio track for participant (participant.identity)")
// Monitor audio levels
monitorAudioLevels(audioTrack: audioTrack)
}
func monitorAudioLevels(audioTrack: RemoteAudioTrack) {
print("in monitoring audio levels")
// Assuming audioTrack provides a way to monitor audio levels
audioTrack.addObserver(self, forKeyPath: "audioLevel", options: .new, context: nil)
}
override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) {
print("in observe")
guard let audioTrack = object as? RemoteAudioTrack, keyPath == "audioLevel" else {
print("Observer guard failed")
return
}
print("Observer guard passed")
if let audioLevel = change?[.newKey] as? Float {
// Threshold for detecting speech
let threshold: Float = 0.1
if audioLevel > threshold {
print("Remote participant is speaking with audioLevel: (audioLevel)")
startTranscribing()
} else {
print("Remote participant is silent with audioLevel: (audioLevel)")
stopTranscribing()
}
}
}
}