2016-11-10 117 views
9

此代碼用於在iOS9上工作,爲視頻添加水印和文字,但自iOS10以後它不再工作。有一個iOS 10 bug已經提交,但沒有回答蘋果。我無法實施任何解決方法,以在視頻上添加水印和文字。有了這段代碼,視頻將被成功導出,但大部分時間不會被導出。Swift 3:如何在視頻上添加水印? AVVideoCompositionCoreAnimationTool iOS 10問題

我應該如何使用AVVideoCompositionCoreAnimationTool,以便它可以像在iOS9上一樣工作。

let videoComposition: AVMutableVideoComposition = AVMutableVideoComposition() 

videoComposition.frameDuration = CMTimeMake(1, 60) 
videoComposition.renderSize = CGSize(width: clipVideoTrack.naturalSize.height, height: clipVideoTrack.naturalSize.height) 


let instruction: AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction() 

instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60, 30)) 

// transformer is applied to set the video in portrait otherwise it is rotated by 90 degrees 
let transformer: AVMutableVideoCompositionLayerInstruction = 
    AVMutableVideoCompositionLayerInstruction(assetTrack: clipVideoTrack) 

let t1: CGAffineTransform = CGAffineTransform(translationX: clipVideoTrack.naturalSize.height, y: -(clipVideoTrack.naturalSize.width - clipVideoTrack.naturalSize.height)/2) 

let t2: CGAffineTransform = t1.rotated(by: CGFloat(M_PI_2)) 

var finalTransform: CGAffineTransform = t2 

transformer.setTransform(finalTransform, at: kCMTimeZero) 

instruction.layerInstructions = NSArray(object: transformer) as! [AVVideoCompositionLayerInstruction] 

videoComposition.instructions = NSArray(object: instruction) as! [AVVideoCompositionInstructionProtocol] 



let mixComposition = AVMutableComposition() 
let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid) 


do { 
    try compositionVideoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, asset.duration), of: clipVideoTrack, at: kCMTimeZero) 
} catch { 
    print(error) 
} 


//Add watermark 


let myImage = UIImage(named: "logo") 

let aLayer = CALayer() 
aLayer.contents = myImage!.cgImage 
aLayer.frame = CGRect(x: (clipVideoTrack.naturalSize.height*(self.view.bounds.width-45))/self.view.bounds.width, y: (clipVideoTrack.naturalSize.height*(self.view.bounds.width-40))/self.view.bounds.width, width: (clipVideoTrack.naturalSize.height*40)/self.view.bounds.width, height: (clipVideoTrack.naturalSize.height*40)/self.view.bounds.width) 

let titleLayer = CATextLayer() 
titleLayer.string = "text" 
titleLayer.font = UIFont(name: "helvetica", size: 0) 
titleLayer.fontSize = clipVideoTrack.naturalSize.height/16 
titleLayer.shadowOpacity = 0.5 
titleLayer.alignmentMode = kCAAlignmentCenter 
titleLayer.frame = CGRect(x: 0, y: 0, width: clipVideoTrack.naturalSize.height, height: clipVideoTrack.naturalSize.height/6) 
titleLayer.display() 


let videoSize = asset.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize 
let parentLayer = CALayer() 
let videoLayer = CALayer() 
parentLayer.frame = CGRect(x: 0, y: 0, width: videoSize.height, height: videoSize.height) 
videoLayer.frame = CGRect(x: 0, y: 0, width: videoSize.height, height: videoSize.height) 

parentLayer.addSublayer(videoLayer) 
parentLayer.addSublayer(aLayer) 
parentLayer.addSublayer(titleLayer) 


videoComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentLayer) 



do { try FileManager.default.removeItem(at: filePath) } 
catch let error as NSError { 
    NSLog("\(error), \(error.localizedDescription)") 
} 



var exportUrl: URL = filePath 
self.videoUrl = filePath as NSURL 


var exporter = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetMediumQuality) 

exporter!.videoComposition = videoComposition 
exporter!.outputFileType = AVFileTypeQuickTimeMovie 
exporter!.outputURL = URL(fileURLWithPath: exportUrl.path) 


exporter!.exportAsynchronously(completionHandler: { 

    DispatchQueue.main.async { 


     self.view.layer.addSublayer(self.avPlayerLayer) 

     let item = AVPlayerItem(url: exportUrl) 
     self.player.replaceCurrentItem(with: item) 

     if (self.player.currentItem != nil) { 
      print("Starting playback!") 
      self.player.play() 
     } 

    } 

}) 

請注意:如果我刪除AVVideoCompositionCoreAnimationTool那麼視頻始終是出口,但對此,漢斯無水印和視頻文本。如何使其工作,使AVVideoCompositionCoreAnimationTool不與AVAssetExportSession衝突?

一些實施workaroundcustomVideoCompositorClassAVVideoCompositing協議,但這似乎是一個繁重的解決方法相比,它如何工作。

回答

2

我從這裏得到了這個答案,它爲我工作。看看它是否適合你。

import UIKit 
import AssetsLibrary 
import AVFoundation 

enum QUWatermarkPosition { 
    case TopLeft 
    case TopRight 
    case BottomLeft 
    case BottomRight 
    case Default 
} 

class QUWatermarkManager: NSObject { 

    func watermark(video videoAsset:AVAsset, watermarkText text : String, saveToLibrary flag : Bool, watermarkPosition position : QUWatermarkPosition, completion : ((status : AVAssetExportSessionStatus!, session: AVAssetExportSession!, outputURL : NSURL!) ->())?) { 
     self.watermark(video: videoAsset, watermarkText: text, imageName: nil, saveToLibrary: flag, watermarkPosition: position) { (status, session, outputURL) ->() in 
      completion!(status: status, session: session, outputURL: outputURL) 
     } 
    } 

    func watermark(video videoAsset:AVAsset, imageName name : String, saveToLibrary flag : Bool, watermarkPosition position : QUWatermarkPosition, completion : ((status : AVAssetExportSessionStatus!, session: AVAssetExportSession!, outputURL : NSURL!) ->())?) { 
     self.watermark(video: videoAsset, watermarkText: nil, imageName: name, saveToLibrary: flag, watermarkPosition: position) { (status, session, outputURL) ->() in 
      completion!(status: status, session: session, outputURL: outputURL) 
     } 
    } 

    private func watermark(video videoAsset:AVAsset, watermarkText text : String!, imageName name : String!, saveToLibrary flag : Bool, watermarkPosition position : QUWatermarkPosition, completion : ((status : AVAssetExportSessionStatus!, session: AVAssetExportSession!, outputURL : NSURL!) ->())?) { 

     dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), {() -> Void in 
      var mixComposition = AVMutableComposition() 

      var compositionVideoTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) 
      var clipVideoTrack = videoAsset.tracksWithMediaType(AVMediaTypeVideo)[0] as! AVAssetTrack 
      compositionVideoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), ofTrack: clipVideoTrack, atTime: kCMTimeZero, error: nil) 
      clipVideoTrack.preferredTransform 

      let videoSize = clipVideoTrack.naturalSize 

      var parentLayer = CALayer() 
      var videoLayer = CALayer() 
      parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height) 
      videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height) 
      parentLayer.addSublayer(videoLayer) 

      if text != nil { 
       var titleLayer = CATextLayer() 
       titleLayer.backgroundColor = UIColor.redColor().CGColor 
       titleLayer.string = text 
       titleLayer.font = "Helvetica" 
       titleLayer.fontSize = 15 
       titleLayer.alignmentMode = kCAAlignmentCenter 
       titleLayer.bounds = CGRectMake(0, 0, videoSize.width, videoSize.height) 
       parentLayer.addSublayer(titleLayer) 
      } else if name != nil { 
       var watermarkImage = UIImage(named: name) 
       var imageLayer = CALayer() 
       imageLayer.contents = watermarkImage?.CGImage 

       var xPosition : CGFloat = 0.0 
       var yPosition : CGFloat = 0.0 
       let imageSize : CGFloat = 57.0 

       switch (position) { 
       case .TopLeft: 
        xPosition = 0 
        yPosition = 0 
        break 
       case .TopRight: 
        xPosition = videoSize.width - imageSize 
        yPosition = 0 
        break 
       case .BottomLeft: 
        xPosition = 0 
        yPosition = videoSize.height - imageSize 
        break 
       case .BottomRight, .Default: 
        xPosition = videoSize.width - imageSize 
        yPosition = videoSize.height - imageSize 
        break 
       default: 
        break 
       } 


       imageLayer.frame = CGRectMake(xPosition, yPosition, imageSize, imageSize) 
       imageLayer.opacity = 0.65 
       parentLayer.addSublayer(imageLayer) 
      } 

      var videoComp = AVMutableVideoComposition() 
      videoComp.renderSize = videoSize 
      videoComp.frameDuration = CMTimeMake(1, 30) 
      videoComp.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, inLayer: parentLayer) 

      var instruction = AVMutableVideoCompositionInstruction() 
      instruction.timeRange = CMTimeRangeMake(kCMTimeZero, mixComposition.duration) 
      var videoTrack = mixComposition.tracksWithMediaType(AVMediaTypeVideo)[0] as! AVAssetTrack 

      let layerInstruction = self.videoCompositionInstructionForTrack(compositionVideoTrack, asset: videoAsset) 

      instruction.layerInstructions = [layerInstruction] 
      videoComp.instructions = [instruction] 

      let documentDirectory = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0] as! String 
      var dateFormatter = NSDateFormatter() 
      dateFormatter.dateStyle = .LongStyle 
      dateFormatter.timeStyle = .ShortStyle 
      let date = dateFormatter.stringFromDate(NSDate()) 
      let savePath = documentDirectory.stringByAppendingPathComponent("watermarkVideo-\(date).mov") 
      let url = NSURL(fileURLWithPath: savePath) 

      let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) 
      exporter.outputURL = url 
      exporter.outputFileType = AVFileTypeQuickTimeMovie 
      exporter.shouldOptimizeForNetworkUse = true 
      exporter.videoComposition = videoComp 

      exporter.exportAsynchronouslyWithCompletionHandler() { 
       dispatch_async(dispatch_get_main_queue(), {() -> Void in 
        if exporter.status == AVAssetExportSessionStatus.Completed { 
         let outputURL = exporter.outputURL 
         if flag { 
          // Save to library 
          let library = ALAssetsLibrary() 
          if library.videoAtPathIsCompatibleWithSavedPhotosAlbum(outputURL) { 
           library.writeVideoAtPathToSavedPhotosAlbum(outputURL, 
            completionBlock: { (assetURL:NSURL!, error:NSError!) -> Void in 
             completion!(status: AVAssetExportSessionStatus.Completed, session: exporter, outputURL: outputURL) 
           }) 
          } 
         } else { 
          completion!(status: AVAssetExportSessionStatus.Completed, session: exporter, outputURL: outputURL) 
         } 

        } else { 
         // Error 
         completion!(status: exporter.status, session: exporter, outputURL: nil) 
        } 
       }) 
      } 
     }) 
    } 


    private func orientationFromTransform(transform: CGAffineTransform) -> (orientation: UIImageOrientation, isPortrait: Bool) { 
     var assetOrientation = UIImageOrientation.Up 
     var isPortrait = false 
     if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 { 
      assetOrientation = .Right 
      isPortrait = true 
     } else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 { 
      assetOrientation = .Left 
      isPortrait = true 
     } else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 { 
      assetOrientation = .Up 
     } else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 { 
      assetOrientation = .Down 
     } 
     return (assetOrientation, isPortrait) 
    } 

    private func videoCompositionInstructionForTrack(track: AVCompositionTrack, asset: AVAsset) -> AVMutableVideoCompositionLayerInstruction { 
     let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track) 
     let assetTrack = asset.tracksWithMediaType(AVMediaTypeVideo)[0] as! AVAssetTrack 

     var transform = assetTrack.preferredTransform 
     let assetInfo = orientationFromTransform(transform) 

     var scaleToFitRatio = UIScreen.mainScreen().bounds.width/assetTrack.naturalSize.width 
     if assetInfo.isPortrait { 
      scaleToFitRatio = UIScreen.mainScreen().bounds.width/assetTrack.naturalSize.height 
      let scaleFactor = CGAffineTransformMakeScale(scaleToFitRatio, scaleToFitRatio) 
      instruction.setTransform(CGAffineTransformConcat(assetTrack.preferredTransform, scaleFactor), 
       atTime: kCMTimeZero) 
     } else { 
      let scaleFactor = CGAffineTransformMakeScale(scaleToFitRatio, scaleToFitRatio) 
      var concat = CGAffineTransformConcat(CGAffineTransformConcat(assetTrack.preferredTransform, scaleFactor), CGAffineTransformMakeTranslation(0, UIScreen.mainScreen().bounds.width/2)) 
      if assetInfo.orientation == .Down { 
       let fixUpsideDown = CGAffineTransformMakeRotation(CGFloat(M_PI)) 
       let windowBounds = UIScreen.mainScreen().bounds 
       let yFix = assetTrack.naturalSize.height + windowBounds.height 
       let centerFix = CGAffineTransformMakeTranslation(assetTrack.naturalSize.width, yFix) 
       concat = CGAffineTransformConcat(CGAffineTransformConcat(fixUpsideDown, centerFix), scaleFactor) 
      } 
      instruction.setTransform(concat, atTime: kCMTimeZero) 
     } 

     return instruction 
    } 
} 
2

@ User511在

答案

斯威夫特3:

import UIKit 
import AssetsLibrary 
import AVFoundation 
import Photos 

enum QUWatermarkPosition { 
    case TopLeft 
    case TopRight 
    case BottomLeft 
    case BottomRight 
    case Default 
} 

class QUWatermarkManager: NSObject { 

    func watermark(video videoAsset:AVAsset, watermarkText text : String, saveToLibrary flag : Bool, watermarkPosition position : QUWatermarkPosition, completion : ((_ status : AVAssetExportSessionStatus?, _ session: AVAssetExportSession?, _ outputURL : URL?) ->())?) { 
     self.watermark(video: videoAsset, watermarkText: text, imageName: nil, saveToLibrary: flag, watermarkPosition: position) { (status, session, outputURL) ->() in 
      completion!(status, session, outputURL) 
     } 
    } 

    func watermark(video videoAsset:AVAsset, imageName name : String, saveToLibrary flag : Bool, watermarkPosition position : QUWatermarkPosition, completion : ((_ status : AVAssetExportSessionStatus?, _ session: AVAssetExportSession?, _ outputURL : URL?) ->())?) { 
     self.watermark(video: videoAsset, watermarkText: nil, imageName: name, saveToLibrary: flag, watermarkPosition: position) { (status, session, outputURL) ->() in 
      completion!(status, session, outputURL) 
     } 
    } 

    private func watermark(video videoAsset:AVAsset, watermarkText text : String!, imageName name : String!, saveToLibrary flag : Bool, watermarkPosition position : QUWatermarkPosition, completion : ((_ status : AVAssetExportSessionStatus?, _ session: AVAssetExportSession?, _ outputURL : URL?) ->())?) { 
     DispatchQueue.global(qos: DispatchQoS.QoSClass.default).async { 

      let mixComposition = AVMutableComposition() 

      let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) 
      let clipVideoTrack = videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0] 
      do { 
       try compositionVideoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: clipVideoTrack, at: kCMTimeZero) 
      } 
      catch { 
       print(error.localizedDescription) 
      } 

      let videoSize = clipVideoTrack.naturalSize 

      let parentLayer = CALayer() 
      let videoLayer = CALayer() 
      parentLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height) 
      videoLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height) 
      parentLayer.addSublayer(videoLayer) 

      if text != nil { 
       let titleLayer = CATextLayer() 
       titleLayer.backgroundColor = UIColor.red.cgColor 
       titleLayer.string = text 
       titleLayer.font = "Helvetica" as CFTypeRef 
       titleLayer.fontSize = 15 
       titleLayer.alignmentMode = kCAAlignmentCenter 
       titleLayer.bounds = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height) 
       parentLayer.addSublayer(titleLayer) 
      } else if name != nil { 
       let watermarkImage = UIImage(named: name) 
       let imageLayer = CALayer() 
       imageLayer.contents = watermarkImage?.cgImage 

       var xPosition : CGFloat = 0.0 
       var yPosition : CGFloat = 0.0 
       let imageSize : CGFloat = 57.0 

       switch (position) { 
       case .TopLeft: 
        xPosition = 0 
        yPosition = 0 
        break 
       case .TopRight: 
        xPosition = videoSize.width - imageSize 
        yPosition = 0 
        break 
       case .BottomLeft: 
        xPosition = 0 
        yPosition = videoSize.height - imageSize 
        break 
       case .BottomRight, .Default: 
        xPosition = videoSize.width - imageSize 
        yPosition = videoSize.height - imageSize 
        break 
       } 


       imageLayer.frame = CGRect(x: xPosition, y: yPosition, width: imageSize, height: imageSize) 
       imageLayer.opacity = 0.65 
       parentLayer.addSublayer(imageLayer) 
      } 

      let videoComp = AVMutableVideoComposition() 
      videoComp.renderSize = videoSize 
      videoComp.frameDuration = CMTimeMake(1, 30) 
      videoComp.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentLayer) 

      let instruction = AVMutableVideoCompositionInstruction() 
      instruction.timeRange = CMTimeRangeMake(kCMTimeZero, mixComposition.duration) 
      _ = mixComposition.tracks(withMediaType: AVMediaTypeVideo)[0] as AVAssetTrack 

      let layerInstruction = self.videoCompositionInstructionForTrack(track: compositionVideoTrack, asset: videoAsset) 

      instruction.layerInstructions = [layerInstruction] 
      videoComp.instructions = [instruction] 

      let documentDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] 
      let dateFormatter = DateFormatter() 
      dateFormatter.dateStyle = .long 
      dateFormatter.timeStyle = .short 
      let date = dateFormatter.string(from: Date()) 
      let url = URL(fileURLWithPath: documentDirectory).appendingPathComponent("watermarkVideo-\(date).mov") 

      let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) 
      exporter?.outputURL = url 
      exporter?.outputFileType = AVFileTypeQuickTimeMovie 
      exporter?.shouldOptimizeForNetworkUse = true 
      exporter?.videoComposition = videoComp 

      exporter?.exportAsynchronously() { 
       DispatchQueue.main.async { 

        if exporter?.status == AVAssetExportSessionStatus.completed { 
         let outputURL = exporter?.outputURL 
         if flag { 
          // Save to library 
//       let library = ALAssetsLibrary() 

          if UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(outputURL!.path) { 
           PHPhotoLibrary.shared().performChanges({ 
            PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputURL!) 
           }) { saved, error in 
            if saved { 
             completion!(AVAssetExportSessionStatus.completed, exporter, outputURL) 
            } 
           } 
          } 

//       if library.videoAtPathIs(compatibleWithSavedPhotosAlbum: outputURL) { 
//        library.writeVideoAtPathToSavedPhotosAlbum(outputURL, 
//                   completionBlock: { (assetURL:NSURL!, error:NSError!) -> Void in 
//                    
//                   completion!(AVAssetExportSessionStatus.Completed, exporter, outputURL) 
//        }) 
//       } 
         } else { 
          completion!(AVAssetExportSessionStatus.completed, exporter, outputURL) 
         } 

        } else { 
         // Error 
         completion!(exporter?.status, exporter, nil) 
        } 
       } 
      } 
     } 
    } 


    private func orientationFromTransform(transform: CGAffineTransform) -> (orientation: UIImageOrientation, isPortrait: Bool) { 
     var assetOrientation = UIImageOrientation.up 
     var isPortrait = false 
     if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 { 
      assetOrientation = .right 
      isPortrait = true 
     } else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 { 
      assetOrientation = .left 
      isPortrait = true 
     } else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 { 
      assetOrientation = .up 
     } else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 { 
      assetOrientation = .down 
     } 
     return (assetOrientation, isPortrait) 
    } 

    private func videoCompositionInstructionForTrack(track: AVCompositionTrack, asset: AVAsset) -> AVMutableVideoCompositionLayerInstruction { 
     let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track) 
     let assetTrack = asset.tracks(withMediaType: AVMediaTypeVideo)[0] 

     let transform = assetTrack.preferredTransform 
     let assetInfo = orientationFromTransform(transform: transform) 

     var scaleToFitRatio = UIScreen.main.bounds.width/assetTrack.naturalSize.width 
     if assetInfo.isPortrait { 
      scaleToFitRatio = UIScreen.main.bounds.width/assetTrack.naturalSize.height 
      let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio) 
      instruction.setTransform(assetTrack.preferredTransform.concatenating(scaleFactor), 
            at: kCMTimeZero) 
     } else { 
      let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio) 
      var concat = assetTrack.preferredTransform.concatenating(scaleFactor).concatenating(CGAffineTransform(translationX: 0, y: UIScreen.main.bounds.width/2)) 
      if assetInfo.orientation == .down { 
       let fixUpsideDown = CGAffineTransform(rotationAngle: CGFloat(Double.pi)) 
       let windowBounds = UIScreen.main.bounds 
       let yFix = assetTrack.naturalSize.height + windowBounds.height 
       let centerFix = CGAffineTransform(translationX: assetTrack.naturalSize.width, y: yFix) 
       concat = fixUpsideDown.concatenating(centerFix).concatenating(scaleFactor) 
      } 
      instruction.setTransform(concat, at: kCMTimeZero) 
     } 

     return instruction 
    } 
} 
0

該解決方案的工作對我來說,超級簡單和超快速

func addWatermark(inputURL: URL, outputURL: URL, handler:@escaping (_ exportSession: AVAssetExportSession?)-> Void) { 
let mixComposition = AVMutableComposition() 
let asset = AVAsset(url: inputURL) 
let videoTrack = asset.tracks(withMediaType: AVMediaType.video)[0] 
let timerange = CMTimeRangeMake(kCMTimeZero, asset.duration) 

    let compositionVideoTrack:AVMutableCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: CMPersistentTrackID(kCMPersistentTrackID_Invalid))! 

do { 
    try compositionVideoTrack.insertTimeRange(timerange, of: videoTrack, at: kCMTimeZero) 
    compositionVideoTrack.preferredTransform = videoTrack.preferredTransform 
} catch { 
    print(error) 
} 

let watermarkFilter = CIFilter(name: "CISourceOverCompositing")! 
let watermarkImage = CIImage(image: UIImage(named: "waterMark")!) 
let videoComposition = AVVideoComposition(asset: asset) { (filteringRequest) in 
    let source = filteringRequest.sourceImage.clampedToExtent() 
    watermarkFilter.setValue(source, forKey: "inputBackgroundImage") 
    let transform = CGAffineTransform(translationX: filteringRequest.sourceImage.extent.width - (watermarkImage?.extent.width)! - 2, y: 0) 
    watermarkFilter.setValue(watermarkImage?.transformed(by: transform), forKey: "inputImage") 
    filteringRequest.finish(with: watermarkFilter.outputImage!, context: nil) 
} 

guard let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPreset640x480) else { 
    handler(nil) 

    return 
} 

exportSession.outputURL = outputURL 
exportSession.outputFileType = AVFileType.mp4 
exportSession.shouldOptimizeForNetworkUse = true 
exportSession.videoComposition = videoComposition 
exportSession.exportAsynchronously {() -> Void in 
    handler(exportSession) 
} 

}