Skip to main content
Camera Kit iOS

Building Mirrors

If you're interested in building AR Mirrors, there are a couple of important considerations to keep in mind.

  • Mirrors can only be built by developers who have agreed to the Mirror-specific addendum, which is accessible when enabling Mirrors in your app configuration within the Snap Kit Developer Portal.
  • AR Mirrors rotate the camera feed to create a 9:16 video. This requires specific configurations both in your project code and in the project setup, which are outlined below.

Looking to add Snapchat's library of viral Lenses to your project? Have questions about custom hardware or Lenses for your AR Mirror? Fill out this form for more information!

Getting Started​

We provide a sample Mirror app to help you configure your project. This sample includes the necessary customizations, such as the MirrorAVSessionInput class.

Project Configuration​

1. Custom Camera Input​

Use a custom MirrorAVSessionInput (code below) class to handle external cameras with a video orientation of .landscapeLeft. This class is included in the sample Mirror app linked above.

2. Orientation Settings​

  • Enable support for all orientations except Upside Down in your project settings.

3. Info.plist Configuration​

Add the following keys to your project's Info.plist file:

  • UISupportsTrueScreenSizeOnMac → YES
  • UILaunchToFullScreenByDefaultOnMac → YES

Sample App​

Download the sample Mirror app for a working implementation, including:

  • A pre-configured MirrorAVSessionInput class.
  • Correct project orientation and Info.plist settings.

MirrorAVSessionInput.swift​

import AVFoundation
import Foundation
import SCSDKCameraKit

class MirrorAVSessionInput: NSObject, Input {
var destination: InputDestination?
private(set) var frameSize: CGSize
private(set) var frameOrientation: AVCaptureVideoOrientation
var position: AVCaptureDevice.Position {
didSet {
guard position != oldValue else { return }
videoSession.beginConfiguration()
if let videoDeviceInput { videoSession.removeInput(videoDeviceInput) }
if let device = captureDevice {
do {
let input = try AVCaptureDeviceInput(device: device)
if videoSession.canAddInput(input) { videoSession.addInput(input) }
update(input: input, isAsync: false)
videoSession.commitConfiguration()
destination?.inputChangedAttributes(self)
} catch {
debugPrint("[\(String(describing: self))]: Failed to add \(position) input")
}
}
}
}

private var captureDevice: AVCaptureDevice? {
AVCaptureDevice.default(.builtInWideAngleCamera, for: AVMediaType.video, position: position)
}

var isRunning: Bool { videoSession.isRunning }
var horizontalFieldOfView: CGFloat { fieldOfView }

private var fieldOfView: CGFloat
private var isVideoMirrored: Bool
private var format: AVCaptureDevice.Format?
private var prevCaptureInput: AVCaptureInput?
private var videoOrientation: AVCaptureVideoOrientation

private let context = CIContext()
private let videoSession: AVCaptureSession
private let videoOutput: AVCaptureVideoDataOutput

private var videoDeviceInput: AVCaptureDeviceInput? {
deviceInput(for: .video, session: videoSession)
}

private var videoConnection: AVCaptureConnection? {
videoOutput.connection(with: .video)
}

private let videoQueue: DispatchQueue
private let configurationQueue: DispatchQueue

init(session: AVCaptureSession, fieldOfView: CGFloat = Constants.defaultFieldOfView) {
self.fieldOfView = fieldOfView
self.videoSession = session
self.frameOrientation = .portrait
self.configurationQueue = DispatchQueue(label: "com.snap.mirror.avsessioninput.configuration")
self.videoOutput = AVCaptureVideoDataOutput()
self.videoQueue = DispatchQueue(label: "com.snap.mirror.videoOutput")
self.frameSize = UIScreen.main.bounds.size
self.position = .front
self.isVideoMirrored = true
self.videoOrientation = .landscapeLeft
super.init()

videoSession.beginConfiguration()
videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_420YpCbCr8BiPlanarFullRange]
videoOutput.setSampleBufferDelegate(self, queue: videoQueue)
if videoSession.canAddOutput(videoOutput) { videoSession.addOutput(videoOutput) }
videoConnection?.videoOrientation = videoOrientation
videoSession.commitConfiguration()
}

func startRunning() {
restoreFormat()
videoSession.startRunning()
}

func stopRunning() {
storeFormat()
videoSession.stopRunning()
}

func setVideoOrientation(_ videoOrientation: AVCaptureVideoOrientation) {
self.videoOrientation = videoOrientation
destination?.inputChangedAttributes(self)
configurationQueue.async { [weak self] in
self?.videoConnection?.videoOrientation = videoOrientation
}
}
}

extension MirrorAVSessionInput: AVCaptureVideoDataOutputSampleBufferDelegate {
func captureOutput(
_ output: AVCaptureOutput,
didOutput sampleBuffer: CMSampleBuffer,
from connection: AVCaptureConnection
) {
if output == videoOutput {
if let input = connection.inputPorts.first?.input, input != prevCaptureInput {
update(input: input)
destination?.inputChangedAttributes(self)
}
destination?.input(self, receivedVideoSampleBuffer: sampleBuffer)
}
}
}

private extension MirrorAVSessionInput {
func restoreFormat() {
if let format, let device = videoDeviceInput?.device {
do {
try device.lockForConfiguration()
device.activeFormat = format
device.unlockForConfiguration()
self.format = nil
} catch {
debugPrint("[\(String(describing: self))]: Failed to restore format")
}
}
}

func storeFormat() {
format = videoDeviceInput?.device.activeFormat
}

func deviceInput(for mediaType: AVMediaType, session: AVCaptureSession) -> AVCaptureDeviceInput? {
for input in session.inputs {
if let deviceInput = input as? AVCaptureDeviceInput, deviceInput.device.hasMediaType(mediaType) {
return deviceInput
}
}
return nil
}

func update(input: AVCaptureInput, isAsync: Bool = true) {
if let input = input as? AVCaptureDeviceInput {
fieldOfView = CGFloat(input.device.activeFormat.videoFieldOfView)
position = input.device.position
format = input.device.activeFormat
}

isVideoMirrored = position == .front

if isAsync {
configurationQueue.async { [weak self] in
self?.updateConnection()
}
} else {
updateConnection()
}

prevCaptureInput = input
}

func updateConnection() {
if let isMirrored = videoConnection?.isVideoMirrored, isMirrored != isVideoMirrored {
videoConnection?.isVideoMirrored = isVideoMirrored
}

if let orientation = videoConnection?.videoOrientation, orientation != videoOrientation {
videoConnection?.videoOrientation = videoOrientation
}
}
}

extension MirrorAVSessionInput {
enum Constants {
static let defaultFieldOfView: CGFloat = 78.0
}
}
Was this page helpful?
Yes
No

AI-Powered Search