I am following a tutorial on Youtube about CoreML: Real Time Camera object detection with machine learning - Swift 4 by Mr. Brian
captureSession.sessionPreset = .photo
Type "String" has no member 'photo'.
dataOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "videoQueue"))
Cannot convert value of type 'ViewController' to expected argument type 'AVCaptureVideoDataOutputSampleBufferDelegate!'
import UIKit
import AVFoundation
import AVKit
import Vision
class ViewController: UIViewController, AVCaptureAudioDataOutputSampleBufferDelegate {
override func viewDidLoad() {
super.viewDidLoad()
// here is where we start up the camera
let captureSession = AVCaptureSession()
captureSession.sessionPreset = .photo
guard let captureDevice =
AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo) else { return }
guard let input = try? AVCaptureDeviceInput(device:
captureDevice) else { return }
captureSession.addInput(input)
captureSession.startRunning()
let previewLayer = AVCaptureVideoPreviewLayer(session:
captureSession)
view.layer.addSublayer(previewLayer!)
previewLayer!.frame = view.frame
let dataOutput = AVCaptureVideoDataOutput()
dataOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "videoQueue"))
captureSession.addOutput(dataOutput)
I think you got the answer for second error, for first error replace the error line with below
SWIFT 3
captureSession.sessionPreset = AVCaptureSessionPresetPhoto
SWIFT 4
captureSession.sessionPreset = AVCaptureSession.Preset.photo