To create app like Dubsmash. <https://itunes.apple.com/in/app/dubsmash/id918820076?mt=8>

I have merged audio file of any dialogue or song with captured video.

Check method to merge video with audio :-

// MARK: - Merge Video

func mergeVideoWithAudio() {

let mutableComposition = AVMutableComposition()

        var mutableCompositionVideoTrack : [AVMutableCompositionTrack] = []
        var mutableCompositionAudioTrack : [AVMutableCompositionTrack] = []
        let totalVideoCompositionInstruction : AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()

        //start merge        
        let aVideoAsset : AVAsset = AVAsset(url: videoFilePath)
        let aAudioAsset : AVAsset = AVAsset(url: audioFilePath)
        mutableCompositionVideoTrack.append( mutableComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid))
        mutableCompositionAudioTrack.append( mutableComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)) 
       
        let aVideoAssetTrack : AVAssetTrack = aVideoAsset.tracks(withMediaType: AVMediaTypeVideo)[0]
        let aAudioAssetTrack : AVAssetTrack = aAudioAsset.tracks(withMediaType: AVMediaTypeAudio)[0]
        
        do{
            try mutableCompositionVideoTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), of: aVideoAssetTrack, at: kCMTimeZero)
    
            //In my case my audio file is longer then video file so i took videoAsset duration
            //instead of audioAsset duration            

            try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), of: aAudioAssetTrack, at: kCMTimeZero)
            
            //Use this instead above line if your audiofile and video file's playing durations are same
            
            //            try mutableCompositionAudioTrack[0].insertTimeRange( CMTimeRangeMake (kCMTimeZero, aVideoAssetTrack.timeRange.duration), ofTrack: aAudioAssetTrack, atTime: kCMTimeZero)
        }catch{
            
        }
        totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration )

        let firstTrack = mutableComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))

        do {
            try firstTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAsset.duration), of: aVideoAsset.tracks(withMediaType: AVMediaTypeVideo)[0], at: kCMTimeZero)
        } 
        catch _ {
            print("Failed to load first track")
        }
        
        // 2.2
        let firstInstruction = videoCompositionInstructionForTrack(track: firstTrack, asset: aVideoAsset)
        firstInstruction.setOpacity(0.0, at: aVideoAsset.duration)        
        totalVideoCompositionInstruction.layerInstructions = [firstInstruction]

        let mainComposition = AVMutableVideoComposition()
        mainComposition.instructions = [totalVideoCompositionInstruction]
        mainComposition.frameDuration = CMTimeMake(1, 30)
        mainComposition.renderSize = CGSize(width: UIScreen.main.bounds.width, height: UIScreen.main.bounds.height)
        
        //find your video on this URl        
        let strFileName = “merged_video.mp4"        
        let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]

        let savePathUrl : URL = documentsURL.appendingPathComponent("(strName)")
        let assetExport: AVAssetExportSession = AVAssetExportSession(asset: mutableComposition, presetName: AVAssetExportPresetHighestQuality)!
        assetExport.outputFileType = AVFileTypeMPEG4
        assetExport.outputURL = savePathUrl as URL
        assetExport.shouldOptimizeForNetworkUse = true
        assetExport.videoComposition = mainComposition
        assetExport.exportAsynchronously { () -> Void in

            switch assetExport.status {
            case AVAssetExportSessionStatus.completed:

                //Uncomment this if u want to store your video in asset
                let assetsLib = ALAssetsLibrary()
                assetsLib.writeVideoAtPath(toSavedPhotosAlbum: savePathUrl as URL!, completionBlock: nil)

                print("success")
                Helper.hideHud(pView: self.view)

                DispatchQueue.main.async {
                    self.playVideo(filePath: savePathUrl)
                }

            case  AVAssetExportSessionStatus.failed:
                print("failed (assetExport.error)")

            case AVAssetExportSessionStatus.cancelled:
                print("cancelled (assetExport.error)")

            default:
                print("complete")
            }
        }
}

After merging video, video rotation changes to 90 degree, so to rotate video to its original orientation
add below methods to code :- 

// MARK: - Rotate Video to Original Orientation

    func orientationFromTransform(transform: CGAffineTransform) -> (orientation: UIImageOrientation, isPortrait: Bool) {

        var assetOrientation = UIImageOrientation.up
        var isPortrait = false

        if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
            assetOrientation = .right
            isPortrait = true
        } 
        else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 &&  transform.d == 0 {
            assetOrientation = .left
            isPortrait = true
        } 
        else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0  {
            assetOrientation = .up
        } 
        else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
            assetOrientation = .down
        }
        return (assetOrientation, isPortrait)
    }

    func videoCompositionInstructionForTrack(track: AVCompositionTrack, asset: AVAsset) -> AVMutableVideoCompositionLayerInstruction {

        let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
        let assetTrack = asset.tracks(withMediaType: AVMediaTypeVideo)[0]        
        let transform = assetTrack.preferredTransform
        let assetInfo = orientationFromTransform(transform: transform)       
        var scaleToFitRatio = UIScreen.main.bounds.width / assetTrack.naturalSize.width

        if assetInfo.isPortrait {
            scaleToFitRatio = UIScreen.main.bounds.width / assetTrack.naturalSize.height
            let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
            instruction.setTransform( assetTrack.preferredTransform.concatenating( scaleFactor), at: kCMTimeZero)

        } 
        else {
            let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
            var concat = assetTrack.preferredTransform.concatenating(scaleFactor).concatenating(CGAffineTransform(translationX: 0, y: UIScreen.main.bounds.width / 2))
            if assetInfo.orientation == .down {
                let fixUpsideDown = CGAffineTransform(rotationAngle: CGFloat(M_PI))
                let windowBounds = UIScreen.main.bounds
                let yFix = assetTrack.naturalSize.height + windowBounds.height
                let centerFix = CGAffineTransform(translationX: assetTrack.naturalSize.width, y: yFix)
                concat = fixUpsideDown.concatenating(centerFix).concatenating(scaleFactor)
            }
            instruction.setTransform(concat, at: kCMTimeZero)
        }

        return instruction
    }

You may also like

Leave a Reply