微信公众号搜"智元新知"关注
微信扫一扫可直接关注哦!

如何在视频或 CAtextlayer 上添加测试我想在录制的视频上显示文字当我这样做时,音频很好,视频变为空白提前致谢

如何解决如何在视频或 CAtextlayer 上添加测试我想在录制的视频上显示文字当我这样做时,音频很好,视频变为空白提前致谢

我正在录制 3 个视频并将它们合并为一个。我需要所有视频的文字。正在做,但没有成功。视频变黑,音频正常。当我没有 CALayer 时,一切都很好需要帮助。 我的代码有问题,或者我做错了,请指导我。

 private func doMerge(arrayVideos:[AVAsset],animation:Bool,completion:@escaping Completion) -> Void {
            var insertTime = CMTime.zero
            var arrayLayerInstructions:[AVMutableVideoCompositionLayerInstruction] = []
            var outputSize = CGSize.init(width: 0,height: 0)
            
            // Determine video output size
            for videoAsset in arrayVideos {
                let videoTrack = videoAsset.tracks(withMediaType: AVMediaType.video)[0]
                
                let assetInfo = orientationFromTransform(transform: videoTrack.preferredTransform)
                
                var videoSize = videoTrack.naturalSize
                if assetInfo.isPortrait == true {
                    videoSize.width = videoTrack.naturalSize.height
                    videoSize.height = videoTrack.naturalSize.width
                }
                
                if videoSize.height > outputSize.height {
                    outputSize = videoSize
                }
            }
            
            if outputSize.width == 0 || outputSize.height == 0 {
                outputSize = defaultSize
            }
            
            // Silence sound (in case of video has no sound track)
    //        let silenceURL = Bundle.main.url(forResource: "silence",withExtension: "mp3")
    //        let silenceAsset = AVAsset(url:silenceURL!)
    //        let silenceSoundTrack = silenceAsset.tracks(withMediaType: AVMediaType.audio).first
            
            // Init composition
            let mixComposition = AVMutableComposition.init()
            
            for videoAsset in arrayVideos {
                // Get video track
                guard let videoTrack = videoAsset.tracks(withMediaType: AVMediaType.video).first else { continue }
                
                // Get audio track
                var audioTrack:AVAssetTrack?
                if videoAsset.tracks(withMediaType: AVMediaType.audio).count > 0 {
                    audioTrack = videoAsset.tracks(withMediaType: AVMediaType.audio).first
                }
                else {
    //                audioTrack = silenceSoundTrack
                }
                
                // Init video & audio composition track
                let videoCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video,preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
                
                let audioCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio,preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
                
                do {
                    let startTime = CMTime.zero
                    let duration = videoAsset.duration
                    
                    // Add video track to video composition at specific time
                    try videoCompositionTrack?.insertTimeRange(CMTimeRangeMake(start: startTime,duration: duration),of: videoTrack,at: insertTime)
                    
                    // Add audio track to audio composition at specific time
                    if let audioTrack = audioTrack {
                        try audioCompositionTrack?.insertTimeRange(CMTimeRangeMake(start: startTime,of: audioTrack,at: insertTime)
                    }
                    
                    // Add instruction for video track
                    let layerInstruction = videoCompositionInstructionForTrack(track: videoCompositionTrack!,asset: videoAsset,standardSize: outputSize,atTime: insertTime)
                    
                    // Hide video track before changing to new track
                    let endTime = CMTimeAdd(insertTime,duration)
                    
                    if animation {
                        let timeScale = videoAsset.duration.timescale
                        let durationAnimation = CMTime.init(seconds: 1,preferredTimescale: timeScale)
                        
                        layerInstruction.setopacityRamp(fromStartOpacity: 1.0,toEndOpacity: 0.0,timeRange: CMTimeRange.init(start: endTime,duration: durationAnimation))
                    }
                    else {
                        layerInstruction.setopacity(0,at: endTime)
                    }
                    
                    arrayLayerInstructions.append(layerInstruction)
                    
                    // Increase the insert time
                    insertTime = CMTimeAdd(insertTime,duration)
                }
                catch {
                    print("Load track error")
                }
                // Watermark Effect
                    let size = videoTrack.naturalSize
                // create text Layer
                    let titleLayer = CATextLayer()
                titleLayer.backgroundColor = UIColor.clear.cgColor
                titleLayer.contentsScale = UIScreen.main.scale
                    titleLayer.string = "Dummy text"
                titleLayer.foregroundColor = UIColor.white.cgColor
                    titleLayer.font = UIFont(name: "Helvetica",size: 28)
                    titleLayer.shadowOpacity = 0.5
                    titleLayer.alignmentMode = CATextLayerAlignmentMode.center
                titleLayer.frame = CGRect(x: 0,y: 50,width: size.width,height: size.height)
                let videolayer = CALayer()
                videolayer.backgroundColor = UIColor.clear.cgColor
    //                videolayer.frame = CGRect(x: 0,y: 0,height: size.height)
    //            let layercomposition = AVMutableVideoComposition()
    //                layercomposition.frameDuration = CMTimeMake(value: 1,timescale: 30)
    //                layercomposition.renderSize = size
                mainComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer,in: titleLayer)
            }
            
            // Main video composition instruction
            let mainInstruction = AVMutableVideoCompositionInstruction()
            mainInstruction.timeRange = CMTimeRangeMake(start: CMTime.zero,duration: insertTime)
            mainInstruction.layerInstructions = arrayLayerInstructions
            
            // Main video composition
    //        mainComposition = AVMutableVideoComposition()
            mainComposition.instructions = [mainInstruction]
            mainComposition.frameDuration = CMTimeMake(value: 1,timescale: 30)
            mainComposition.renderSize = outputSize
            
            // Export to file
            let path = NstemporaryDirectory().appending("mergedVideo.mp4")
            let exportURL = URL.init(fileURLWithPath: path)
            
            // Remove file if existed
            FileManager.default.removeItemIfExisted(exportURL)
            
            // Init exporter
            let exporter = AVAssetExportSession.init(asset: mixComposition,presetName: AVAssetExportPresetHighestQuality)
            exporter?.outputURL = exportURL
            exporter?.outputFileType = AVFileType.mp4
            exporter?.shouldOptimizeforNetworkUse = true
            exporter?.videoComposition = mainComposition
            
            // Do export
            exporter?.exportAsynchronously(completionHandler: {
                dispatchQueue.main.async {
                    self.exportDidFinish(exporter: exporter,videoURL: exportURL,completion: completion)
                }
            })
            
        }

解决方法

只需更改此部分

let assetInfo = orientationFromTransform(transform: videoTrack.preferredTransform)
            
            var videoSize = videoTrack.naturalSize
            if assetInfo.isPortrait == true {
                videoSize.width = videoTrack.naturalSize.height
                videoSize.height = videoTrack.naturalSize.width
            }
//                let size = videoTrack.naturalSize
            // create text Layer
                let titleLayer = CATextLayer()
            titleLayer.backgroundColor = UIColor.clear.cgColor
            titleLayer.contentsScale = UIScreen.main.scale
            
                titleLayer.string = questions[counter]
            counter = counter + 1
            titleLayer.foregroundColor = UIColor.black.cgColor
                titleLayer.font = UIFont(name: "Helvetica",size: 28)
                titleLayer.shadowOpacity = 0.5
                titleLayer.alignmentMode = CATextLayerAlignmentMode.center
            titleLayer.frame = CGRect(x: 0,y: 0,width: videoSize.width,height: videoSize.height)
            let videolayer = CALayer()
            videolayer.backgroundColor = UIColor.clear.cgColor
            videolayer.backgroundColor = UIColor.red.cgColor
            videolayer.frame = CGRect(x: 0,height: videoSize.height)
            let parentlayer = CALayer()
                parentlayer.frame = CGRect(x: 0,height: videoSize.height)
                parentlayer.addSublayer(videolayer)
                parentlayer.addSublayer(titleLayer)
                
//            let layercomposition = AVMutableVideoComposition()
//                layercomposition.frameDuration = CMTimeMake(value: 1,timescale: 30)
//                layercomposition.renderSize = size
            mainComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer,in: parentlayer)

版权声明:本文内容由互联网用户自发贡献,该文观点与技术仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌侵权/违法违规的内容, 请发送邮件至 dio@foxmail.com 举报,一经查实,本站将立刻删除。