aforward aforward - 5 days ago 5
Swift Question

Recording Video with AVFoundation in Swift for iOS

I am having trouble recording video using the code provided. I am using example code created for recording video.

Specifically I am unable to compile this line without this error: "Cannot convert value of type 'ViewController' to specified type 'AVCaptureFileOutputRecordingDelegate'

var recordingDelegate:AVCaptureFileOutputRecordingDelegate? = self


This line is located in a IBAction function:

@IBAction func RecordButtonPressed(_ sender: Any) {

var recordingDelegate:AVCaptureFileOutputRecordingDelegate? = self

var videoFileOutput = AVCaptureMovieFileOutput()
self.captureSession.addOutput(videoFileOutput)

let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
let filePath = documentsURL.appendingPathComponent("temp")

videoFileOutput.startRecording(toOutputFileURL: filePath, recordingDelegate: recordingDelegate)

RecordButton.setTitle("Stop", for: .normal);

}


Rest of code is here:

import UIKit
import AVFoundation
import Darwin




class ViewController: UIViewController {



@IBOutlet weak var CameraView: UIImageView!

@IBOutlet weak var RecordButton: UIButton!

@IBOutlet weak var SelectFrButton: UIButton!

@IBOutlet weak var ISOslider: UISlider!

@IBOutlet weak var SSslider: UISlider!

@IBOutlet weak var ISOtextfield: UITextField!

@IBOutlet weak var SStextfield: UITextField!

@IBOutlet weak var TorchSlider: UISlider!

@IBOutlet weak var Torchtextfield: UITextField!

var captureSession = AVCaptureSession();
var DisplaySessionOutput = AVCaptureVideoDataOutput();
var SaveSessionOutput = AVCaptureMovieFileOutput();
var previewLayer = AVCaptureVideoPreviewLayer();
var CaptureDevice:AVCaptureDevice? = nil;
var CurrentTorchLevel:Float = 0.5;


override func viewDidLoad() {
super.viewDidLoad()

captureSession.sessionPreset = AVCaptureSessionPresetHigh
// Loop through all the capture devices on this phone

let deviceDiscoverySession = AVCaptureDeviceDiscoverySession(deviceTypes: [AVCaptureDeviceType.builtInDuoCamera, AVCaptureDeviceType.builtInTelephotoCamera,AVCaptureDeviceType.builtInWideAngleCamera], mediaType: AVMediaTypeVideo, position: AVCaptureDevicePosition.unspecified)

for device in (deviceDiscoverySession?.devices)! {
if(device.position == AVCaptureDevicePosition.back){
do{

try device.lockForConfiguration()


device.setExposureModeCustomWithDuration(CMTimeMake(1, 30), iso: 50, completionHandler: { (time) in

// Set text and sliders to correct levels
self.ISOslider.maximumValue = (self.CaptureDevice?.activeFormat.maxISO)!;
self.ISOslider.minimumValue = (self.CaptureDevice?.activeFormat.minISO)!;

self.SSslider.maximumValue = Float((self.CaptureDevice?.activeFormat.maxExposureDuration.seconds)!);
self.SSslider.minimumValue = Float((self.CaptureDevice?.activeFormat.minExposureDuration.seconds)!);

self.ISOtextfield.text = device.iso.description;
self.ISOslider.setValue(device.iso, animated: false)

self.SStextfield.text = device.exposureDuration.seconds.description;
self.SSslider.setValue(Float(device.exposureDuration.seconds), animated: false);

self.TorchSlider.minimumValue = 0.01;
self.TorchSlider.maximumValue = 1;
self.TorchSlider.value = 0.5;
self.Torchtextfield.text = "0.5";
})




//Turn torch on

if (device.torchMode == AVCaptureTorchMode.on) {
device.torchMode = AVCaptureTorchMode.off
} else {
try device.setTorchModeOnWithLevel(1.0)

}

device.unlockForConfiguration();

CaptureDevice = device;

let input = try AVCaptureDeviceInput(device: CaptureDevice)
if(captureSession.canAddInput(input)){
captureSession.addInput(input);

if(captureSession.canAddOutput(DisplaySessionOutput)){
captureSession.addOutput(DisplaySessionOutput);
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession);
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.portrait;
CameraView.layer.addSublayer(previewLayer);
}
}
}
catch{
print("exception!");
}
}
}

CameraView.transform = CGAffineTransform.init(scaleX: -1, y: -1);

captureSession.startRunning()


}

// Do any additional setup after loading the view, typically from a nib.


override func viewDidLayoutSubviews() {

previewLayer.frame = CameraView.bounds

}


override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}


@IBAction func RecordButtonPressed(_ sender: Any) {

var recordingDelegate:AVCaptureFileOutputRecordingDelegate? = self

var videoFileOutput = AVCaptureMovieFileOutput()
self.captureSession.addOutput(videoFileOutput)

let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
let filePath = documentsURL.appendingPathComponent("temp")

videoFileOutput.startRecording(toOutputFileURL: filePath, recordingDelegate: recordingDelegate)

RecordButton.setTitle("Stop", for: .normal);

}

@IBAction func ISOvaluechanged(_ sender: Any) {

SetVideoSettings(isolevel: ISOslider.value, exposurelevel: AVCaptureExposureDurationCurrent, TorchLevel: CurrentTorchLevel)
}

@IBAction func SSvaluechanged(_ sender: Any) {

let time = CMTimeMake(Int64(self.SSslider.value * 1000000),1000000);
SetVideoSettings(isolevel: AVCaptureISOCurrent, exposurelevel: time, TorchLevel: CurrentTorchLevel)
}

@IBAction func ISOtextchanged(_ sender: Any) {

}

@IBAction func SStextchanged(_ sender: Any) {

//let time = CMTimeMake(Int64(exposurelevel * 100000),100000);

}


@IBAction func ChooseButtonPressed(_ sender: Any) {
}

func ShowAlert(AlertMessage: String) {

let alertController = UIAlertController(title: "Alert", message: AlertMessage, preferredStyle: .alert)

self.present(alertController, animated: true, completion:nil)

let OKAction = UIAlertAction(title: "OK", style: .default) { (action:UIAlertAction) in
}

alertController.addAction(OKAction)

}

@IBAction func TorchSliderChanged(_ sender: Any) {

CurrentTorchLevel = self.TorchSlider.value;
SetVideoSettings(isolevel: AVCaptureISOCurrent, exposurelevel: AVCaptureExposureDurationCurrent, TorchLevel: CurrentTorchLevel);
}

func SetVideoSettings(isolevel: Float, exposurelevel: CMTime, TorchLevel: Float) {

var newISOval = isolevel;
var newSSval = exposurelevel;
let newTorchVal = TorchLevel;

if(newISOval == FLT_MAX){
// Pass through 0,0 for maintaining current SS.
}

else if(newISOval > (self.CaptureDevice?.activeFormat.maxISO)!) {

newISOval = (self.CaptureDevice?.activeFormat.maxISO)!;
}

else if(newISOval < (self.CaptureDevice?.activeFormat.minISO)!) {

newISOval = (self.CaptureDevice?.activeFormat.minISO)!;
}

if(newSSval.timescale == 0){
// Pass through 0,0 for maintaining current SS.
}

else if(CMTimeCompare(newSSval, (self.CaptureDevice?.activeFormat.maxExposureDuration)!) > 0) {

newSSval = (self.CaptureDevice?.activeFormat.maxExposureDuration)!;
}

else if(CMTimeCompare(newSSval,(self.CaptureDevice?.activeFormat.minExposureDuration)!) < 0) {

newSSval = (self.CaptureDevice?.activeFormat.minExposureDuration)!;
}



do {

try self.CaptureDevice?.lockForConfiguration();

try CaptureDevice?.setTorchModeOnWithLevel(newTorchVal);

CaptureDevice?.setExposureModeCustomWithDuration(newSSval, iso: newISOval, completionHandler: { (time) in

// Set text and sliders to correct levels
self.ISOtextfield.text = self.CaptureDevice?.iso.description;
self.ISOslider.setValue((self.CaptureDevice?.iso)!, animated: false)

self.SStextfield.text = self.CaptureDevice?.exposureDuration.seconds.description;
self.SSslider.setValue(Float((self.CaptureDevice?.exposureDuration.seconds)!), animated: false);

self.TorchSlider.setValue(self.CurrentTorchLevel, animated: false);
self.Torchtextfield.text = self.CurrentTorchLevel.description;

})

self.CaptureDevice?.unlockForConfiguration();

}

catch {
ShowAlert(AlertMessage: "Unable to set camera settings");
self.CaptureDevice?.unlockForConfiguration();


}

}

func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
return
}

func captureOutput(captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAtURL fileURL: NSURL!, fromConnections connections: [AnyObject]!) {
return
}

}


Thank you for any help you can provide!

Answer

Make an extension for your UIViewController that makes it conform to AVCaptureFileOutputRecordingDelegate. Remove and add the final two methods in your ViewController class into it.

   class ViewController:UIViewController {
      //your methods as usual but remove the final two methods and add them to the extension that follows. Those methods are what will make you conform to AVCaptureFileOutputRecordingDelegate
   }

  extension ViewController: AVCaptureFileOutputRecordingDelegate {
    func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
return
}

  func captureOutput(captureOutput: AVCaptureFileOutput!,   didStartRecordingToOutputFileAtURL fileURL: NSURL!, fromConnections connections: [AnyObject]!) {
return
 }
  }

You can do the same thing by extending your UIViewController as below but I thought I'd give you a clean solution as above. You can choose.

    class ViewController:UIViewController, AVCaptureFileOutputRecordingDelegate {
  //your methods as usual but you keep your final two methods this time 

    func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) {

}

func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {

}
    }
Comments