Display both front and back views of camera in the same class - Stack Overflow

I want to display both the front and the back camera views on a single view at the same time (2 separate views in the same single UIView view controller). Right now only the back view is being displayed. I have tried a couple of things but the I have not found a solution. I tried created 2 different capture sessions but that did not work and I don’t know if it is nesecary.

import UIKit;import AVFoundation

class ViewController: UIViewController,AVCapturePhotoCaptureDelegate {
    var captureSession: AVCaptureSession!
    var stillImageOutput: AVCapturePhotoOutput!
    var videoPreviewLayer: AVCaptureVideoPreviewLayer!
    
    
    
    
    var frontCameraView = UIView()
    
    
    var backCameraView = UIView()
    
    
    var heightNum = 30
    var brie = 0.1
    
    override func viewDidLoad() {
        super.viewDidLoad()
        // Do any additional setup after loading the view.
        
        [frontCameraView,backCameraView].forEach{
            $0.translatesAutoresizingMaskIntoConstraints = false
            view.addSubview($0)
            
            
            $0.backgroundColor = UIColor(red: 0.1, green: 0.1, blue: CGFloat(brie), alpha: 1)
            heightNum += 120
            brie += 0.1
            $0.frame = CGRect(x: 100, y: heightNum, width: 100, height: 100)
            heightNum += 120
        }
        
        

        
        
        
        
    }
    override func viewDidAppear(_ animated: Bool) {
        super.viewDidAppear(animated)
        // Setup your camera here...
        captureSession = AVCaptureSession()
        captureSession.sessionPreset = .medium
        
        
        
        guard let backCamera = AVCaptureDevice.default( AVCaptureDevice.DeviceType.builtInWideAngleCamera, for: AVMediaType.video, position: .back)
        //.default(for: AVMediaType.video)
        else {
            print("Unable to access back camera!")
            return
        }
        
        guard let frontCamera = AVCaptureDevice.default( AVCaptureDevice.DeviceType.builtInWideAngleCamera, for: AVMediaType.video, position: .front)
        //.default(for: AVMediaType.video)
        else {
            print("Unable to access back camera!")
            return
        }
        
        
        do {
            let input = try AVCaptureDeviceInput(device: backCamera)
            let input2 = try AVCaptureDeviceInput(device: frontCamera)
            //Step 9
            
            stillImageOutput = AVCapturePhotoOutput()
            
            if captureSession.canAddInput(input) && captureSession.canAddOutput(stillImageOutput) {
                captureSession.addInput(input)
                captureSession.addOutput(stillImageOutput)
                setupLivePreview()
            }
            if captureSession.canAddInput(input2) && captureSession.canAddOutput(stillImageOutput) {
                captureSession.addInput(input2)
                captureSession.addOutput(stillImageOutput)
                setupLivePreview()
            }
        }
        
        catch let error  {
            print("Error Unable to initialize back camera:  \(error.localizedDescription)")
        }
        
        
        
        
        
    }
    
    func setupLivePreview() {
        
        videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        
        videoPreviewLayer.videoGravity = .resizeAspect
        videoPreviewLayer.connection?.videoOrientation = .portrait
        frontCameraView.layer.addSublayer(videoPreviewLayer)
        backCameraView.layer.addSublayer(videoPreviewLayer)
        
        //Step12
        
        DispatchQueue.global(qos: .userInitiated).async { //[weak self] in
            self.captureSession.startRunning()
            //Step 13
            DispatchQueue.main.async {
                self.videoPreviewLayer.frame = self.frontCameraView.bounds
            }
        }
    }
    
    
    
    
}

Hi @timswift, thank you for sharing your code and question. I don’t know if it’s possible to with what you’re trying to do but I did find this sample code from Apple’s AVFoundation documentation: AVMultiCamPiP: Capturing from Multiple Cameras. Maybe the sample code could provide a possible idea or solution.

Best,
Gina

This topic was automatically closed after 166 days. New replies are no longer allowed.