如何解决处理格式和数据正确的音频文件时出错
我正在尝试将处理音频 url 与视频 url 合并。我已经处理音频以使用 audioEngine.renderOffline
方法更改音高值。但是输出音频文件为 audioAsset.tracks(withMediaType: .audio).first
返回 nil 值(而且 audioAsset.Metadata 为空)。由于值为零,我无法合并视频和音频。
注意:我可以共享进行中的音频文件,当我在 AVAudioPlayer
中播放同一个文件时,它可以工作但无法工作。
我也尝试使用 installTapOnBus
方法处理音频,但每次都没有得到正确的输出文件。
请帮我修正上述错误。
代码如下:
func extractAudio(url:URL) {
// Create a composition
let composition = AVMutableComposition()
let asset = AVURLAsset(url: url)
do {
guard let audioAssetTrack = asset.tracks(withMediaType: AVMediaType.audio).first else { return }
guard let audioCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaType.audio,preferredTrackID: kCMPersistentTrackID_Invalid) else { return }
try audioCompositionTrack.insertTimeRange(audioAssetTrack.timeRange,of: audioAssetTrack,at: CMTime.zero)
} catch {
print(error)
}
// Get url for output
let outputUrl = URL(fileURLWithPath: NstemporaryDirectory() + "out.m4a")
if FileManager.default.fileExists(atPath: outputUrl.path) {
try? FileManager.default.removeItem(atPath: outputUrl.path)
}
// Create an export session
let exportSession = AVAssetExportSession(asset: composition,presetName: AVAssetExportPresetAppleM4A)!
exportSession.outputFileType = AVFileType.m4a
exportSession.outputURL = URL.init(fileURLWithPath: outputUrl.path)
exportSession.timeRange = CMTimeRangeMake(start: CMTime.zero,duration: asset.duration)
// Export file
exportSession.exportAsynchronously {
guard case exportSession.status = AVAssetExportSession.Status.completed else { return }
self.sourceFile = try! AVAudioFile(forReading: outputUrl)
self.format = self.sourceFile.processingFormat
self.playAndRecord(pitch: -500,rate: 1.0,reverb: 10,echo: 1.0)
}
}
func playAndRecord(pitch : Float,rate: Float,reverb: Float,echo: Float) {
let engine = AVAudioEngine()
let player = AVAudioPlayerNode()
let reverbEffect = AVAudioUnitReverb()
let pitchEffect = AVAudioUnitTimePitch()
let playbackRateEffect = AVAudioUnitvarispeed()
engine.attach(player)
engine.attach(reverbEffect)
engine.attach(pitchEffect)
engine.attach(playbackRateEffect)
// Set the desired reverb parameters.
reverbEffect.loadFactoryPreset(.mediumHall)
reverbEffect.wetDryMix = reverb
pitchEffect.pitch = pitch
playbackRateEffect.rate = rate
// Connect the nodes.
engine.connect(player,to: reverbEffect,format: format)
engine.connect(reverbEffect,to: pitchEffect,format: format)
engine.connect(pitchEffect,to: playbackRateEffect,format: format)
engine.connect(playbackRateEffect,to: engine.mainmixerNode,format: format)
// Schedule the source file.
player.scheduleFile(sourceFile,at: nil)
do {
// The maximum number of frames the engine renders in any single render call.
let maxFrames: AVAudioFrameCount = 4096
try engine.enableManualRenderingMode(.offline,format: format,maximumFrameCount: maxFrames)
} catch {
fatalError("Enabling manual rendering mode Failed: \(error).")
}
do {
try engine.start()
player.play()
} catch {
fatalError("Unable to start audio engine: \(error).")
}
// The output buffer to which the engine renders the processed data.
let buffer = AVAudioPCMBuffer(pcmFormat: engine.manualRenderingFormat,frameCapacity: engine.manualRenderingMaximumFrameCount)!
var outputFile: AVAudioFile
do {
let documentsURL = FileManager.default.urls(for: .documentDirectory,in: .userDomainMask)[0]
let outputURL = documentsURL.appendingPathComponent("EDittedFile.m4a")
outputFile = try AVAudioFile(forWriting: outputURL,settings: sourceFile.fileFormat.settings,commonFormat: .pcmFormatInt32,interleaved: true)
} catch {
fatalError("Unable to open output audio file: \(error).")
}
// Process the file
while engine.manualRenderingSampleTime < sourceFile.length {
do {
let frameCount = sourceFile.length - engine.manualRenderingSampleTime
let framesToRender = min(AVAudioFrameCount(frameCount),buffer.frameCapacity)
let status = try engine.renderOffline(framesToRender,to: buffer)
switch status {
case .success:
// The data rendered successfully. Write it to the output file.
try outputFile.write(from: buffer)
case .insufficientDataFromInputNode:
// Applicable only when using the input node as one of the sources.
break
case .cannotDoInCurrentContext:
// The engine Couldn't render in the current render call.
// Retry in the next iteration.
break
case .error:
// An error occurred while rendering the audio.
fatalError("The manual rendering Failed.")
}
} catch {
fatalError("The manual rendering Failed: \(error).")
}
}
print("finished")
let asset = AVURLAsset.init(url: outputFile.url)
print(asset.tracks(withMediaType: .audio))
player.stop()
engine.stop()
}
提前致谢。
版权声明:本文内容由互联网用户自发贡献,该文观点与技术仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌侵权/违法违规的内容, 请发送邮件至 dio@foxmail.com 举报,一经查实,本站将立刻删除。