While recording the sound in my application, I measure its decibel and display it on the screen in the UI. However, in the UI, I want to divide the sound entering from the current microphone into frequencies and show it in a graph. I tried to add AVAudioInputNode to this class (https://github.com/vNakamura/SwiftChartsAudioVisualizer/blob/main/SwiftChartsAudioVisualizer/AudioProcessing.swift) to process only the sound, regardless of Decibel measurement, but it still did not work. I need your help.
Thanks!
import Foundation
import AVFoundation
import UIKit
import CoreAudio
class DecibelRecorder: NSObject, ObservableObject, AVAudioRecorderDelegate {
@Published var decibel = 0
@Published var avgDecibel = 0
@Published var peakDecibel = 0
let kRecordAudioFile = "myRecord.caf"
var audioRecorder: AVAudioRecorder?
var audioPlayer: AVAudioPlayer?
var timer: Timer?
var isRecording = false
static var url: URL?
func setAudioSession() {
let audioSession = AVAudioSession.sharedInstance()
do {
// Set to play and record state, so that the recording can be played after recording
try audioSession.setCategory(.playAndRecord)
try audioSession.setActive(true)
} catch {
print("Failed to set audio session: (error.localizedDescription)")
}
}
func getSavePath() -> URL? {
guard let urlStr = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).last else {
return nil
}
let url = URL(fileURLWithPath: urlStr).appendingPathComponent(kRecordAudioFile)
print("file path: (url.path)")
DecibelRecorder.url = url
return url
}
func getAudioSetting() -> [String: Any] {
var settings: [String: Any] = [:]
settings[AVFormatIDKey] = kAudioFormatLinearPCM
settings[AVSampleRateKey] = 8000
settings[AVNumberOfChannelsKey] = 2
settings[AVLinearPCMBitDepthKey] = 8
settings[AVLinearPCMIsFloatKey] = true
settings[AVSampleRateConverterAudioQualityKey] = AVAudioQuality.high.rawValue
return settings
}
var audioRecorderr: AVAudioRecorder? {
if audioRecorder == nil {
// route
guard let url = getSavePath() else {
return nil
}
// recording settings
let settings = getAudioSetting()
// create recorder
do {
audioRecorder = try AVAudioRecorder(url: url, settings: settings)
audioRecorder?.delegate = self
audioRecorder?.isMeteringEnabled = true
} catch {
print("Failed to create audio recorder: (error.localizedDescription)")
return nil
}
}
return audioRecorder
}
var timerr: Timer? {
if timer == nil {
timer = Timer.scheduledTimer(timeInterval: 0.7, target: self, selector: #selector(audioPowerChange), userInfo: nil, repeats: true)
}
return timer
}
@objc func audioPowerChange() {
self.audioRecorderr?.updateMeters() //update measurements
let power = self.audioRecorderr?.averagePower(forChannel: 0) ?? 0.0
let powerMax = self.audioRecorderr?.peakPower(forChannel: 0) ?? 0.0
let adjustedPower = power + 155 - 50
var dB = 0
if adjustedPower < 0.0 {
dB = 0
} else if adjustedPower < 40.0 {
dB = Int(adjustedPower * 0.875)
} else if adjustedPower < 100.0 {
dB = Int(adjustedPower - 15)
} else if adjustedPower < 110.0 {
dB = Int(adjustedPower * 2.5 - 165)
} else {
dB = 110
}
self.avgDecibel = Int(abs(power))
self.peakDecibel = Int(abs(powerMax))
//showing in UI
self.decibel = dB
}
// MARK: - UI Events
func recordClick() {
if !(audioRecorderr?.isRecording ?? false) {
isRecording = true
audioRecorderr?.record()
timerr?.fireDate = Date.distantPast
}
}
func pauseClick() {
if audioRecorderr?.isRecording ?? false {
isRecording = false
audioRecorderr?.pause()
timerr?.fireDate = Date.distantFuture
}
}
func stopClick() {
isRecording = false
audioRecorderr?.stop()
timerr?.fireDate = Date.distantFuture
}
// MARK: - Recorder proxy method
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) {
if !(audioPlayer?.isPlaying ?? false) {
audioPlayer?.play()
}
print("Recording complete!")
}
}