Skip to content

Overlay does not resize with window #5

@ahartman

Description

@ahartman

I am trying to adapt this app to add a crosshair overlay to the window.
That way, I can use the app to find the zero point with a webcam on my CNC-mill.

I managed to adapt an overlay technique from another example.
The only issue is that the overlay does not resize when resizing the window.
I am a hobby IOS developer, but I have no experience on macOS.
I cannot do this with MacCatalyst as it needs to run on an older Mac with OS 12.5.

There is something in the Main.storyboard that keeps the overlay fixed at 480 x 270.

Any help how to make the overlay resize correctly would be appreciated, thank you.
I attached the Xcode project in order to observe the issue; resize the window and you will see.

//
//  CameraManager.swift
//  macOS Camera
//
//  Created by Mihail Șalari. on 16.05.2022.
//  Copyright © 2017 Mihail Șalari. All rights reserved.
//

import AVFoundation
import Cocoa

typealias CameraCaptureOutput = AVCaptureOutput
typealias CameraSampleBuffer = CMSampleBuffer
typealias CameraCaptureConnection = AVCaptureConnection

protocol CameraManagerDelegate: AnyObject {
    func cameraManager(_ output: CameraCaptureOutput, didOutput sampleBuffer: CameraSampleBuffer, from connection: CameraCaptureConnection)
}

protocol CameraManagerProtocol: AnyObject {
    var delegate: CameraManagerDelegate? { get set }

    func startSession() throws
    func stopSession() throws
}

class CameraManager: NSObject, CameraManagerProtocol {
    private var previewLayer: AVCaptureVideoPreviewLayer!
    private var overlayLayer: AVCaptureVideoPreviewLayer!
    private var videoSession: AVCaptureSession!
    private var cameraDevice: AVCaptureDevice!

    private let cameraQueue: DispatchQueue

    private let containerView: NSView

    weak var delegate: CameraManagerDelegate?

    init(containerView: NSView) throws {
        self.containerView = containerView
        cameraQueue = DispatchQueue(label: "sample buffer delegate", attributes: [])
        super.init()
        try prepareCamera()
    }

    deinit {
        previewLayer = nil
        overlayLayer = nil
        videoSession = nil
        cameraDevice = nil
    }

    private func prepareCamera() throws {
        videoSession = AVCaptureSession()
        previewLayer = AVCaptureVideoPreviewLayer(session: videoSession)
        previewLayer.videoGravity = .resizeAspectFill
        overlayLayer = AVCaptureVideoPreviewLayer(session: videoSession)
        overlayLayer.videoGravity = .resizeAspectFill

        cameraDevice = AVCaptureDevice.devices()
            .filter { $0.hasMediaType(.video) }
            .compactMap { $0 }.first
        if cameraDevice != nil {
            do {
                let input = try AVCaptureDeviceInput(device: cameraDevice)
                if videoSession.canAddInput(input) {
                    videoSession.addInput(input)
                } else {
                    throw CameraError.cannotAddInput
                }

                if let connection = previewLayer.connection, connection.isVideoMirroringSupported {
                    connection.automaticallyAdjustsVideoMirroring = false
                    connection.isVideoMirrored = true
                } else {
                    throw CameraError.previewLayerConnectionError
                }

                previewLayer.frame = containerView.bounds
                containerView.layer = previewLayer

                overlayLayer = OverlayPreviewLayer(session: videoSession)
                overlayLayer.frame = containerView.bounds
                containerView.layer?.addSublayer(overlayLayer)

                containerView.wantsLayer = true
        } catch {
                throw CameraError.cannotDetectCameraDevice
            }
        }

        let videoOutput = AVCaptureVideoDataOutput()
        videoOutput.setSampleBufferDelegate(self, queue: cameraQueue)
        if videoSession.canAddOutput(videoOutput) {
            videoSession.addOutput(videoOutput)
        } else {
            throw CameraError.cannotAddOutput
        }
    }

    func startSession() throws {
        if let videoSession = videoSession {
            if !videoSession.isRunning {
                cameraQueue.async {
                    videoSession.startRunning()
                }
            }
        } else {
            throw CameraError.videoSessionNil
        }
    }

    func stopSession() throws {
        if let videoSession = videoSession {
            if videoSession.isRunning {
                cameraQueue.async {
                    videoSession.stopRunning()
                }
            }
        } else {
            throw CameraError.videoSessionNil
        }
    }
}

enum CameraError: LocalizedError {
    case cannotDetectCameraDevice
    case cannotAddInput
    case previewLayerConnectionError
    case cannotAddOutput
    case videoSessionNil

    var localizedDescription: String {
        switch self {
        case .cannotDetectCameraDevice: return "Cannot detect camera device"
        case .cannotAddInput: return "Cannot add camera input"
        case .previewLayerConnectionError: return "Preview layer connection error"
        case .cannotAddOutput: return "Cannot add video output"
        case .videoSessionNil: return "Camera video session is nil"
        }
    }
}

extension CameraManager: AVCaptureVideoDataOutputSampleBufferDelegate {
    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        delegate?.cameraManager(output, didOutput: sampleBuffer, from: connection)
    }
}

macOS-Camera.zip

Metadata

Metadata

Assignees

No one assigned

    Labels

    No labels
    No labels

    Projects

    No projects

    Milestone

    No milestone

    Relationships

    None yet

    Development

    No branches or pull requests

    Issue actions