0

使用提供的代碼錄製視頻時遇到問題。我正在使用爲錄製視頻創建的示例代碼。在iOS中使用AVFoundation錄製視頻

具體我不能編譯此行沒有這個錯誤:「無法將類型的價值‘的ViewController’指定類型‘AVCaptureFileOutputRecordingDelegate’

var recordingDelegate:AVCaptureFileOutputRecordingDelegate? = self 

此線位於一個IBAction爲函數:

@IBAction func RecordButtonPressed(_ sender: Any) { 

    var recordingDelegate:AVCaptureFileOutputRecordingDelegate? = self 

    var videoFileOutput = AVCaptureMovieFileOutput() 
    self.captureSession.addOutput(videoFileOutput) 

    let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] 
    let filePath = documentsURL.appendingPathComponent("temp") 

    videoFileOutput.startRecording(toOutputFileURL: filePath, recordingDelegate: recordingDelegate) 

    RecordButton.setTitle("Stop", for: .normal); 

} 

休息的代碼是在這裏:

import UIKit 
import AVFoundation 
import Darwin 




class ViewController: UIViewController { 



@IBOutlet weak var CameraView: UIImageView! 

@IBOutlet weak var RecordButton: UIButton! 

@IBOutlet weak var SelectFrButton: UIButton! 

@IBOutlet weak var ISOslider: UISlider! 

@IBOutlet weak var SSslider: UISlider! 

@IBOutlet weak var ISOtextfield: UITextField! 

@IBOutlet weak var SStextfield: UITextField! 

@IBOutlet weak var TorchSlider: UISlider! 

@IBOutlet weak var Torchtextfield: UITextField! 

var captureSession = AVCaptureSession(); 
var DisplaySessionOutput = AVCaptureVideoDataOutput(); 
var SaveSessionOutput = AVCaptureMovieFileOutput(); 
var previewLayer = AVCaptureVideoPreviewLayer(); 
var CaptureDevice:AVCaptureDevice? = nil; 
var CurrentTorchLevel:Float = 0.5; 


override func viewDidLoad() { 
    super.viewDidLoad() 

    captureSession.sessionPreset = AVCaptureSessionPresetHigh 
    // Loop through all the capture devices on this phone 

    let deviceDiscoverySession = AVCaptureDeviceDiscoverySession(deviceTypes: [AVCaptureDeviceType.builtInDuoCamera, AVCaptureDeviceType.builtInTelephotoCamera,AVCaptureDeviceType.builtInWideAngleCamera], mediaType: AVMediaTypeVideo, position: AVCaptureDevicePosition.unspecified) 

    for device in (deviceDiscoverySession?.devices)! { 
     if(device.position == AVCaptureDevicePosition.back){ 
      do{ 

       try device.lockForConfiguration() 


       device.setExposureModeCustomWithDuration(CMTimeMake(1, 30), iso: 50, completionHandler: { (time) in 

        // Set text and sliders to correct levels 
        self.ISOslider.maximumValue = (self.CaptureDevice?.activeFormat.maxISO)!; 
        self.ISOslider.minimumValue = (self.CaptureDevice?.activeFormat.minISO)!; 

        self.SSslider.maximumValue = Float((self.CaptureDevice?.activeFormat.maxExposureDuration.seconds)!); 
        self.SSslider.minimumValue = Float((self.CaptureDevice?.activeFormat.minExposureDuration.seconds)!); 

        self.ISOtextfield.text = device.iso.description; 
        self.ISOslider.setValue(device.iso, animated: false) 

        self.SStextfield.text = device.exposureDuration.seconds.description; 
        self.SSslider.setValue(Float(device.exposureDuration.seconds), animated: false); 

        self.TorchSlider.minimumValue = 0.01; 
        self.TorchSlider.maximumValue = 1; 
        self.TorchSlider.value = 0.5; 
        self.Torchtextfield.text = "0.5"; 
       }) 




       //Turn torch on 

       if (device.torchMode == AVCaptureTorchMode.on) { 
        device.torchMode = AVCaptureTorchMode.off 
       } else { 
        try device.setTorchModeOnWithLevel(1.0) 

       } 

       device.unlockForConfiguration(); 

       CaptureDevice = device; 

       let input = try AVCaptureDeviceInput(device: CaptureDevice) 
       if(captureSession.canAddInput(input)){ 
        captureSession.addInput(input); 

        if(captureSession.canAddOutput(DisplaySessionOutput)){ 
         captureSession.addOutput(DisplaySessionOutput); 
         previewLayer = AVCaptureVideoPreviewLayer(session: captureSession); 
         previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; 
         previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.portrait; 
         CameraView.layer.addSublayer(previewLayer); 
        } 
       } 
      } 
      catch{ 
       print("exception!"); 
      } 
     } 
    } 

    CameraView.transform = CGAffineTransform.init(scaleX: -1, y: -1); 

    captureSession.startRunning() 


} 

    // Do any additional setup after loading the view, typically from a nib. 


override func viewDidLayoutSubviews() { 

    previewLayer.frame = CameraView.bounds 

} 


override func didReceiveMemoryWarning() { 
    super.didReceiveMemoryWarning() 
    // Dispose of any resources that can be recreated. 
} 


@IBAction func RecordButtonPressed(_ sender: Any) { 

    var recordingDelegate:AVCaptureFileOutputRecordingDelegate? = self 

    var videoFileOutput = AVCaptureMovieFileOutput() 
    self.captureSession.addOutput(videoFileOutput) 

    let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] 
    let filePath = documentsURL.appendingPathComponent("temp") 

    videoFileOutput.startRecording(toOutputFileURL: filePath, recordingDelegate: recordingDelegate) 

    RecordButton.setTitle("Stop", for: .normal); 

} 

@IBAction func ISOvaluechanged(_ sender: Any) { 

    SetVideoSettings(isolevel: ISOslider.value, exposurelevel: AVCaptureExposureDurationCurrent, TorchLevel: CurrentTorchLevel) 
} 

@IBAction func SSvaluechanged(_ sender: Any) { 

    let time = CMTimeMake(Int64(self.SSslider.value * 1000000),1000000); 
    SetVideoSettings(isolevel: AVCaptureISOCurrent, exposurelevel: time, TorchLevel: CurrentTorchLevel) 
} 

@IBAction func ISOtextchanged(_ sender: Any) { 

} 

@IBAction func SStextchanged(_ sender: Any) { 

    //let time = CMTimeMake(Int64(exposurelevel * 100000),100000); 

} 


@IBAction func ChooseButtonPressed(_ sender: Any) { 
} 

func ShowAlert(AlertMessage: String) { 

    let alertController = UIAlertController(title: "Alert", message: AlertMessage, preferredStyle: .alert) 

    self.present(alertController, animated: true, completion:nil) 

    let OKAction = UIAlertAction(title: "OK", style: .default) { (action:UIAlertAction) in 
    } 

    alertController.addAction(OKAction) 

} 

@IBAction func TorchSliderChanged(_ sender: Any) { 

    CurrentTorchLevel = self.TorchSlider.value; 
    SetVideoSettings(isolevel: AVCaptureISOCurrent, exposurelevel: AVCaptureExposureDurationCurrent, TorchLevel: CurrentTorchLevel); 
} 

func SetVideoSettings(isolevel: Float, exposurelevel: CMTime, TorchLevel: Float) { 

    var newISOval = isolevel; 
    var newSSval = exposurelevel; 
    let newTorchVal = TorchLevel; 

    if(newISOval == FLT_MAX){ 
     // Pass through 0,0 for maintaining current SS. 
    } 

    else if(newISOval > (self.CaptureDevice?.activeFormat.maxISO)!) { 

     newISOval = (self.CaptureDevice?.activeFormat.maxISO)!; 
    } 

    else if(newISOval < (self.CaptureDevice?.activeFormat.minISO)!) { 

     newISOval = (self.CaptureDevice?.activeFormat.minISO)!; 
    } 

    if(newSSval.timescale == 0){ 
     // Pass through 0,0 for maintaining current SS. 
    } 

    else if(CMTimeCompare(newSSval, (self.CaptureDevice?.activeFormat.maxExposureDuration)!) > 0) { 

     newSSval = (self.CaptureDevice?.activeFormat.maxExposureDuration)!; 
    } 

    else if(CMTimeCompare(newSSval,(self.CaptureDevice?.activeFormat.minExposureDuration)!) < 0) { 

     newSSval = (self.CaptureDevice?.activeFormat.minExposureDuration)!; 
    } 



     do { 

     try self.CaptureDevice?.lockForConfiguration(); 

     try CaptureDevice?.setTorchModeOnWithLevel(newTorchVal); 

     CaptureDevice?.setExposureModeCustomWithDuration(newSSval, iso: newISOval, completionHandler: { (time) in 

      // Set text and sliders to correct levels 
      self.ISOtextfield.text = self.CaptureDevice?.iso.description; 
      self.ISOslider.setValue((self.CaptureDevice?.iso)!, animated: false) 

      self.SStextfield.text = self.CaptureDevice?.exposureDuration.seconds.description; 
      self.SSslider.setValue(Float((self.CaptureDevice?.exposureDuration.seconds)!), animated: false); 

      self.TorchSlider.setValue(self.CurrentTorchLevel, animated: false); 
      self.Torchtextfield.text = self.CurrentTorchLevel.description; 

     }) 

     self.CaptureDevice?.unlockForConfiguration(); 

    } 

    catch { 
     ShowAlert(AlertMessage: "Unable to set camera settings"); 
     self.CaptureDevice?.unlockForConfiguration(); 


    } 

} 

func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) { 
    return 
} 

func captureOutput(captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAtURL fileURL: NSURL!, fromConnections connections: [AnyObject]!) { 
    return 
} 

} 

臨屋你可以提供任何幫助!

回答

1

對您的UIViewController進行擴展,使其符合AVCaptureFileOutputRecordingDelegate。刪除並添加ViewController類中的最後兩個方法。

class ViewController:UIViewController { 
     //your methods as usual but remove the final two methods and add them to the extension that follows. Those methods are what will make you conform to AVCaptureFileOutputRecordingDelegate 
    } 

    extension ViewController: AVCaptureFileOutputRecordingDelegate { 
    func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) { 

} 

func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) { 

} 
    } 

您可以通過如下擴展UIViewController中做同樣的事情,但我想我給你一個乾淨的解決方案如上。你可以選擇。

class ViewController:UIViewController, AVCaptureFileOutputRecordingDelegate { 
    //your methods as usual but you keep your final two methods this time 

    func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) { 

} 

func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) { 

} 
    } 
+0

嗨,感謝您的幫助。我嘗試了第二個解決方案,但收到錯誤:「類型'ViewController'未確認協議'AVCaptureFileOutputRecordingDelegate'」 – aforward

+0

要符合AVCaptureFileOutputRecordingDelegate,您需要ViewController中的兩個captureOutput方法。我已經更新了我的解決方案以更清楚地包含這些解決方案你還有嗎?第一個錯誤是因爲View Controller沒有擴展AVCaptureFileOutputRecordingDelegate,所以當你在@IBAction中使用'self'時,Xcode試圖將UIViewController轉換爲AVCaptureFileOutputRecordingDelegate。 – gwinyai

+0

想通了。在Swift 3.0中,這些方法已經發生了變化:https://developer.apple.com/reference/avfoundation/avcapturefileoutputrecordingdelegate(我建議改變你的答案,以便將來排除故障。再次感謝您的幫助! – aforward

相關問題