Подтвердить что ты не робот

Выходное видео AVMutableVideoComposition сжато

Я новичок в Swift. Я пытаюсь добавить водяной знак со ссылкой на код из SO. Мое оригинальное разрешение видео составляет 1280 X 720, но выходное видео является сжатой версией.

Вот фотографии до и после

Before

After

Вот моя функция, чтобы создать водяной знак.

private func watermark(video videoAsset:AVAsset, watermarkText text : String!, image : CGImage!, saveToLibrary flag : Bool, completion : ((_ status : AVAssetExportSessionStatus?, _ session: AVAssetExportSession?, _ outputURL : URL?) -> ())?) {
    DispatchQueue.global(qos: DispatchQoS.QoSClass.default).async {

        let mixComposition = AVMutableComposition()

        let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
        let clipVideoTrack = videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0]
        do {
            try compositionVideoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: clipVideoTrack, at: kCMTimeZero)
        }
        catch {
            print(error.localizedDescription)
        }

        let videoSize = clipVideoTrack.naturalSize

        print("Video size", videoSize.height) //720
        print("Video size", videoSize.width) //1280

        let parentLayer = CALayer()
        let videoLayer = CALayer()
        parentLayer.frame = CGRect(x: 0.0,
                                   y: 0.0,
                                   width: videoSize.width,
                                   height: videoSize.height)
        videoLayer.frame = CGRect(x: 0.0,
                                  y: 0.0,
                                  width: videoSize.width,
                                  height: videoSize.height)
        parentLayer.addSublayer(videoLayer)

        if text != nil {
            let titleLayer = CATextLayer()
            titleLayer.backgroundColor = UIColor.red.cgColor
            titleLayer.string = text
            titleLayer.font = "Helvetica" as CFTypeRef
            titleLayer.fontSize = 15
            titleLayer.alignmentMode = kCAAlignmentCenter
            titleLayer.bounds = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
            parentLayer.addSublayer(titleLayer)
        } else if image != nil {
            let imageLayer = CALayer()
            imageLayer.contents = image

            let width: CGFloat = (self.imageView.image?.size.width)!
            let height: CGFloat = (self.imageView.image?.size.height)!

            print("Video size", height) //720
            print("Video size", width) //1280

            imageLayer.frame = CGRect(x: 0.0, y: 0.0, width: width, height: height)
            imageLayer.opacity = 0.65
            parentLayer.addSublayer(imageLayer)
        }

        let videoComp = AVMutableVideoComposition()
        videoComp.renderSize = videoSize
        videoComp.frameDuration = CMTimeMake(1, Int32(clipVideoTrack.nominalFrameRate))
        videoComp.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentLayer)

        let instruction = AVMutableVideoCompositionInstruction()
        instruction.timeRange = CMTimeRangeMake(kCMTimeZero, mixComposition.duration)
        _ = mixComposition.tracks(withMediaType: AVMediaTypeVideo)[0] as AVAssetTrack

        let layerInstruction = self.videoCompositionInstructionForTrack(track: compositionVideoTrack, asset: videoAsset)

        instruction.layerInstructions = [layerInstruction]
        videoComp.instructions = [instruction]

        let documentDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
        let dateFormatter = DateFormatter()
        dateFormatter.dateStyle = .long
        dateFormatter.timeStyle = .short
        let date = dateFormatter.string(from: Date())
        let url = URL(fileURLWithPath: documentDirectory).appendingPathComponent("watermarkVideo-\(date).mov")

        let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
        exporter?.outputURL = url
        exporter?.outputFileType = AVFileTypeQuickTimeMovie
        exporter?.shouldOptimizeForNetworkUse = true
        exporter?.videoComposition = videoComp

        exporter?.exportAsynchronously() {
            DispatchQueue.main.async {

                if exporter?.status == AVAssetExportSessionStatus.completed {
                    let outputURL = exporter?.outputURL
                    if flag {

                        if UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(outputURL!.path) {
                            PHPhotoLibrary.shared().performChanges({
                                PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputURL!)
                            }) { saved, error in
                                if saved {
                                    completion!(AVAssetExportSessionStatus.completed, exporter, outputURL)
                                }
                            }
                        }
                    } else {
                        completion!(AVAssetExportSessionStatus.completed, exporter, outputURL)
                    }

                } else {
                    // Error
                    completion!(exporter?.status, exporter, nil)
                }
            }
        }
    }
}

В то время как размер изображения водяного знака правильный, видео сокращено.

4b9b3361

Ответ 1

вы можете попробовать эту функцию

private func watermark(video videoAsset: AVAsset, watermarkText text : String!, image : CGImage!, saveToLibrary flag : Bool, completion : ((_ status : AVAssetExportSessionStatus ?, _ session: AVAssetExportSession ?, _ outputURL : URL ?) -> ())?) {
  DispatchQueue.global(qos: DispatchQoS.QoSClass.default).async {

    let mixComposition = AVMutableComposition()

    let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
    let clipVideoTrack = videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0] as AVAssetTrack
    do {
      try compositionVideoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: clipVideoTrack, at: kCMTimeZero)
    }
      catch {
      print(error.localizedDescription)
    }

    let videoSize = clipVideoTrack.naturalSize

    let parentLayer = CALayer()
    let videoLayer = CALayer()
    parentLayer.frame = CGRect(x: 0.0,
      y: 0.0,
      width: videoSize.width,
      height: videoSize.height)
    videoLayer.frame = CGRect(x: 0.0,
      y: 0.0,
      width: videoSize.width,
      height: videoSize.height)
    parentLayer.addSublayer(videoLayer)

    //            if text != nil {
    //                let titleLayer = CATextLayer()
    //                titleLayer.backgroundColor = UIColor.red.cgColor
    //                titleLayer.string = text
    //                titleLayer.font = "Helvetica" as CFTypeRef
    //                titleLayer.fontSize = 15
    //                titleLayer.alignmentMode = kCAAlignmentCenter
    //                titleLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
    //                parentLayer.addSublayer(titleLayer)
    //            } else
    if image != nil {
      let imageLayer = CALayer()
      imageLayer.contents = image

      let width: CGFloat = (self.imageView.image ?.size.width)!
      let height: CGFloat = (self.imageView.image ?.size.height)!
      //
      print("Video size", height)
      print("Video size", width)

      imageLayer.frame = CGRect(x: 0, y: 0, width: width, height: height)

      //                imageLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)

      imageLayer.opacity = 1
      parentLayer.addSublayer(imageLayer)
    }

    let videoComp = AVMutableVideoComposition()
    videoComp.renderSize = videoSize
    videoComp.frameDuration = CMTimeMake(1, Int32(clipVideoTrack.nominalFrameRate))
    videoComp.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentLayer)

    let instruction = AVMutableVideoCompositionInstruction()
    instruction.timeRange = CMTimeRangeMake(kCMTimeZero, mixComposition.duration)
    let videotrack = mixComposition.tracks(withMediaType: AVMediaTypeVideo)[0] as AVAssetTrack
    let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videotrack)

    //            let layerInstruction = self.videoCompositionInstructionForTrack(track: compositionVideoTrack, asset: videoAsset)

    instruction.layerInstructions = [layerInstruction]
    videoComp.instructions = [instruction]

    let documentDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
    let dateFormatter = DateFormatter()
    dateFormatter.dateStyle = .long
    dateFormatter.timeStyle = .short
    let date = dateFormatter.string(from: Date())
    let url = URL(fileURLWithPath: documentDirectory).appendingPathComponent("watermarkVideo-\(date).mp4")

    guard let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) else { return}
    exporter.videoComposition = videoComp
    exporter.outputFileType = AVFileTypeMPEG4
    exporter.outputURL = url

    exporter.exportAsynchronously() {
      DispatchQueue.main.async {

        if exporter.status == AVAssetExportSessionStatus.completed {
          let outputURL = exporter.outputURL
          if flag {
            // Save to library
            //                            let library = ALAssetsLibrary()

            if UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(outputURL!.path) {
              PHPhotoLibrary.shared().performChanges({
                PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputURL!)
              }) {
                saved, error in
                              if saved {
                  completion!(AVAssetExportSessionStatus.completed, exporter, outputURL)
                }
              }
            }

            //                            if library.videoAtPathIs(compatibleWithSavedPhotosAlbum: outputURL) {
            //                                library.writeVideoAtPathToSavedPhotosAlbum(outputURL,
            //                                                                           completionBlock: { (assetURL:NSURL!, error:NSError!) -> Void in
            //
            //                                                                            completion!(AVAssetExportSessionStatus.Completed, exporter, outputURL)
            //                                })
            //                            }
          } else {
            completion!(AVAssetExportSessionStatus.completed, exporter, outputURL)
          }

        } else {
          // Error
          completion!(exporter.status, exporter, nil)
        }
      }
    }
  }
}

Ответ 2

Вышеупомянутый код для создания видео с водяными знаками, по-видимому, не является причиной меньшего разрешения вывода.

проблема

Разрешение зависит от того, какой тип AVAsset помещается в метод водяных знаков.

Пример. Часто используется UIImagePickerController. Существует метод делегата

func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [String : Any]) 

Там часто можно увидеть что-то вроде этого:

let url = info[UIImagePickerControllerMediaURL] as? URL
let videoAsset = AVAsset(url: url!)
self.watermark(video: videoAsset, watermarkText: nil, image: self.imageView.image?.cgImage ...

Но с линиями, расположенными над уменьшенным входным изображением, например, вместо того, чтобы иметь видео с разрешением 1920x1080, снижается размер видео 1280x720.

Решение

Метод определения AVAsset из PHAsset может выглядеть следующим образом:

private func videoAsset(for asset: PHAsset, completion: @escaping (AVAsset?) -> Void) {
    let requestOptions = PHVideoRequestOptions()
    requestOptions.version = .original
    PHImageManager.default().requestAVAsset(forVideo: asset, options: requestOptions, resultHandler: {
        (avAsset, avAudioMix, info) in
        completion(avAsset)
    })
}

А откуда взять PHASset? Его также можно определить в методе didFinishPickingMediaWithInfo с помощью UIImagePickerControllerPHAsset:

let asset = info[UIImagePickerControllerPHAsset] as? PHAsset

Быстрый тест

Для быстрого теста можно было бы использовать:

func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [String : Any]) {
    if let asset = info[UIImagePickerControllerPHAsset] as? PHAsset {
        picker.dismiss(animated: true, completion: { [weak self] in
            self?.videoAsset(for: asset, completion: { (avAsset) in
                if let videoAsset = avAsset {
                    DispatchQueue.main.async {
                        self?.watermark(video: videoAsset, watermarkText: nil, image: self?.imageView.image?.cgImage, saveToLibrary: true) { (exportStat: AVAssetExportSessionStatus? , session: AVAssetExportSession?, url: URL?) in
                            print("url: \(String(describing: url?.debugDescription))")
                        }
                    }
                }
            })
        })
    }
}

Результатом является видео в исходном разрешении с водяным знаком в левом нижнем углу, см. Скриншот результирующего видео:

test of adding watermark