微信公众号搜"智元新知"关注
微信扫一扫可直接关注哦!

我在记录时崩溃了:“所需条件为假:format.sampleRate == hwFormat.sampleRate” afterweb rtc 调用

如何解决我在记录时崩溃了:“所需条件为假:format.sampleRate == hwFormat.sampleRate” afterweb rtc 调用

我的记录工作正常,但问题是在调用 WebRTC 后,我崩溃了

所需条件为假:format.sampleRate == hwFormat.sampleRate

这是我如何开始崩溃和 installTap:

func startRecord() {
        self.filePath = nil
        
        print("last format: \(audioEngine.inputNode.inputFormat(forBus: 0).sampleRate)")
        let session = AVAudioSession.sharedInstance()
        do {
            try session.setCategory(.playAndRecord,options: .mixWithOthers)
        } catch {
            print("======== Error setting setCategory \(error.localizedDescription)")
        }
        do {
            try session.setPreferredSampleRate(44100.0)
        } catch {
            print("======== Error setting rate \(error.localizedDescription)")
        }
        do {
            try session.setPreferredioBufferDuration(0.005)
        } catch {
            print("======== Error IOBufferDuration \(error.localizedDescription)")
        }
        do {
            try session.setActive(true,options: .notifyOthersOnDeactivation)
        } catch {
            print("========== Error starting session \(error.localizedDescription)")
        }
        let format = AVAudioFormat(commonFormat: AVAudioCommonFormat.pcmFormatInt16,sampleRate: 44100.0,//            sampleRate: audioEngine.inputNode.inputFormat(forBus: 0).sampleRate,channels: 1,interleaved: true)
        audioEngine.connect(audioEngine.inputNode,to: mixer,format: format)
        audioEngine.connect(mixer,to: audioEngine.mainmixerNode,format: format)

        let dir = NSSearchPathForDirectoriesInDomains(.documentDirectory,.userDomainMask,true).first! as String
        filePath =  dir.appending("/\(UUID.init().uuidString).wav")

        _ = ExtAudioFileCreateWithURL(URL(fileURLWithPath: filePath!) as CFURL,kAudioFileWAVEType,(format?.streamDescription)!,nil,AudioFileFlags.eraseFile.rawValue,&outref)

        mixer.installTap(onBus: 0,bufferSize: AVAudioFrameCount((format?.sampleRate)!),format: format,block: { (buffer: AVAudioPCMBuffer!,time: AVAudioTime!) -> Void in

            let audioBuffer : AVAudioBuffer = buffer
            _ = ExtAudioFileWrite(self.outref!,buffer.frameLength,audioBuffer.audiobufferlist)
        })

        try! audioEngine.start()
        startMP3Rec(path: filePath!,rate: 128)
    }

    func stopRecord() {

        self.audioFilePlayer.stop()
        self.audioEngine.stop()
        self.mixer.removeTap(onBus: 0)

        self.stopMP3Rec()
        ExtAudioFiledispose(self.outref!)

        try? AVAudioSession.sharedInstance().setActive(false)
    }
    
    func startMP3Rec(path: String,rate: Int32) {

        self.isMP3Active = true
        var total = 0
        var read = 0
        var write: Int32 = 0

        let mp3path = path.replacingOccurrences(of: "wav",with: "mp3")
        var pcm: UnsafeMutablePointer<FILE> = fopen(path,"rb")
        fseek(pcm,4*1024,SEEK_CUR)
        let mp3: UnsafeMutablePointer<FILE> = fopen(mp3path,"wb")
        let PCM_SIZE: Int = 8192
        let MP3_SIZE: Int32 = 8192
        let pcmbuffer = UnsafeMutablePointer<Int16>.allocate(capacity: Int(PCM_SIZE*2))
        let mp3buffer = UnsafeMutablePointer<UInt8>.allocate(capacity: Int(MP3_SIZE))

        let lame = lame_init()
        lame_set_num_channels(lame,1)
        lame_set_mode(lame,MONO)
        lame_set_in_samplerate(lame,44100)
        lame_set_brate(lame,rate)
        lame_set_VBR(lame,vbr_off)
        lame_init_params(lame)

        dispatchQueue.global(qos: .default).async {
            while true {
                pcm = fopen(path,"rb")
                fseek(pcm,4*1024 + total,SEEK_CUR)
                read = fread(pcmbuffer,MemoryLayout<Int16>.size,PCM_SIZE,pcm)
                if read != 0 {
                    write = lame_encode_buffer(lame,pcmbuffer,Int32(read),mp3buffer,MP3_SIZE)
                    fwrite(mp3buffer,Int(write),1,mp3)
                    total += read * MemoryLayout<Int16>.size
                    fclose(pcm)
                } else if !self.isMP3Active {
                    _ = lame_encode_flush(lame,MP3_SIZE)
                    _ = fwrite(mp3buffer,mp3)
                    break
                } else {
                    fclose(pcm)
                    usleep(50)
                }
            }
            lame_close(lame)
            fclose(mp3)
            fclose(pcm)
            self.filePathMP3 = mp3path
        }
    }
    
    func stopMP3Rec() {
        self.isMP3Active = false
    }

作为第一次运行应用程序,我使用

记录最后一种格式
print("last format: \(audioEngine.inputNode.inputFormat(forBus: 0).sampleRate)")

--> 返回 0 -> 正常记录 下次返回 44100 -> 正常记录

但是在调用 webrtc 之后,我得到了 48000,然后它在这一行崩溃了

self.audioEngine.connect(self.audioEngine.inputNode,to: self.mixer,format: format)

我在 stackoverflow 上花了 4 个小时,但没有解决方案对我有用。

我不想要 48000 格式,因为我已经将样本设置为

sampleRate: audioEngine.inputNode.inputFormat(forBus: 0).sampleRate,

-> 我的输出很难听到,我能认出我的声音 :(

所以我认为 44100 是最好的

有人能给我一些建议吗?谢谢

解决方法

这条线有问题。

let format = AVAudioFormat(commonFormat: AVAudioCommonFormat.pcmFormatInt16,...

AVAudioCommonFormat.pcmFormatInt16 在默认情况下不起作用。

您应该使用 .pcmFormatFloat32


xcode 提示很明显,

崩溃线

self.audioEngine.connect(self.audioEngine.inputNode,to: self.mixer,format: format)

打印 mixer.inputFormat(forBus: 0 )


然后你得到了实际设备的采样率 48000。你可以通过转换得到44100


只需使用 AVAudioConverter 来做下采样音频缓冲。

let input = engine.inputNode
let bus = 0
let inputFormat = input.outputFormat(forBus: bus )

 guard let outputFormat = AVAudioFormat(commonFormat: .pcmFormatFloat32,sampleRate: 44100,channels: 1,interleaved: true),let converter = AVAudioConverter(from: inputFormat,to: outputFormat) else{
        return
    }

if let convertedBuffer = AVAudioPCMBuffer(pcmFormat: outputFormat,frameCapacity: AVAudioFrameCount(outputFormat.sampleRate) * buffer.frameLength / AVAudioFrameCount(buffer.format.sampleRate)){
            var error: NSError?
            let status = converter.convert(to: convertedBuffer,error: &error,withInputFrom: inputCallback)
            assert(status != .error)
            print(convertedBuffer.format)
        }
,

下样部分,让您的案例更加生动。

    let bus = 0
    let inputNode = audioEngine.inputNode
    let inputFormat = inputNode.outputFormat(forBus: bus)
        
    let outputFormat = AVAudioFormat(commonFormat: .pcmFormatFloat32,interleaved: true)!

    let converter = AVAudioConverter(from: inputFormat,to: outputFormat)!

    inputNode.installTap(onBus: bus,bufferSize: 1024,format: inputFormat){ (buffer: AVAudioPCMBuffer,when: AVAudioTime) in
                
                var newBufferAvailable = true
    
                let inputCallback: AVAudioConverterInputBlock = { inNumPackets,outStatus in
                    if newBufferAvailable {
                        outStatus.pointee = .haveData
                        newBufferAvailable = false
                        
                        return buffer
                    } else {
                        outStatus.pointee = .noDataNow
                        return nil
                    }
                }
    
                let convertedBuffer = AVAudioPCMBuffer(pcmFormat: outputFormat,frameCapacity: AVAudioFrameCount(outputFormat.sampleRate) * buffer.frameLength / AVAudioFrameCount(buffer.format.sampleRate))!
    
                var error: NSError?
                let status = converter.convert(to: convertedBuffer,withInputFrom: inputCallback)
    
                // 44100 Hz buffer
                print(convertedBuffer.format)
                
            }

版权声明:本文内容由互联网用户自发贡献,该文观点与技术仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌侵权/违法违规的内容, 请发送邮件至 dio@foxmail.com 举报,一经查实,本站将立刻删除。