How to Build a QR Code Scanner in SwiftUI
Wrap an AVCaptureSession with an
AVCaptureMetadataOutput inside a
UIViewRepresentable to get a live camera
preview that fires a callback whenever a QR code is detected.
// Minimal QR scanner view
struct QRScannerView: UIViewRepresentable {
let onScan: (String) -> Void
func makeUIView(context: Context) -> PreviewView {
let view = PreviewView()
context.coordinator.configure(view: view, onScan: onScan)
return view
}
func updateUIView(_ uiView: PreviewView, context: Context) {}
func makeCoordinator() -> Coordinator { Coordinator() }
}
Full implementation
The scanner is split into three pieces: a PreviewView
(a plain UIView whose layer is
AVCaptureVideoPreviewLayer), a
Coordinator that owns the
AVCaptureSession and implements the metadata
delegate, and an @Observable view-model that
holds scan state so your SwiftUI hierarchy stays clean.
The session is started and stopped on a background serial queue to keep the main thread free, and
the camera permission check is performed lazily on first appearance using the new
iOS 17 AVCaptureDevice.requestAccess async API.
import SwiftUI
import AVFoundation
// MARK: - Observable model
@Observable
final class ScannerModel {
var scannedCode: String?
var isUnauthorized = false
var isScanning = false
}
// MARK: - Preview UIView
final class PreviewView: UIView {
override class var layerClass: AnyClass { AVCaptureVideoPreviewLayer.self }
var previewLayer: AVCaptureVideoPreviewLayer { layer as! AVCaptureVideoPreviewLayer }
}
// MARK: - UIViewRepresentable
struct QRScannerView: UIViewRepresentable {
@Bindable var model: ScannerModel
func makeCoordinator() -> Coordinator { Coordinator(model: model) }
func makeUIView(context: Context) -> PreviewView {
let view = PreviewView()
view.previewLayer.videoGravity = .resizeAspectFill
Task { await context.coordinator.startSession(previewView: view) }
return view
}
func updateUIView(_ uiView: PreviewView, context: Context) {}
static func dismantleUIView(_ uiView: PreviewView, coordinator: Coordinator) {
coordinator.stopSession()
}
}
// MARK: - Coordinator
final class Coordinator: NSObject, AVCaptureMetadataOutputObjectsDelegate {
private let model: ScannerModel
private let session = AVCaptureSession()
private let sessionQueue = DispatchQueue(label: "qr.session.queue")
init(model: ScannerModel) { self.model = model }
func startSession(previewView: PreviewView) async {
// 1. Request camera permission
let status = AVCaptureDevice.authorizationStatus(for: .video)
if status == .notDetermined {
guard await AVCaptureDevice.requestAccess(for: .video) else {
await MainActor.run { model.isUnauthorized = true }
return
}
} else if status != .authorized {
await MainActor.run { model.isUnauthorized = true }
return
}
sessionQueue.async { [weak self] in
guard let self else { return }
self.configure(previewView: previewView)
self.session.startRunning()
DispatchQueue.main.async { self.model.isScanning = true }
}
}
func stopSession() {
sessionQueue.async { [weak self] in
self?.session.stopRunning()
DispatchQueue.main.async { self?.model.isScanning = false }
}
}
private func configure(previewView: PreviewView) {
session.beginConfiguration()
defer { session.commitConfiguration() }
// 2. Add video input
guard
let device = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back),
let input = try? AVCaptureDeviceInput(device: device),
session.canAddInput(input)
else { return }
session.addInput(input)
// 3. Add metadata output
let output = AVCaptureMetadataOutput()
guard session.canAddOutput(output) else { return }
session.addOutput(output)
output.setMetadataObjectsDelegate(self, queue: .main)
output.metadataObjectTypes = [.qr]
// 4. Attach preview layer
DispatchQueue.main.async {
previewView.previewLayer.session = self.session
}
}
// 5. Delegate callback
func metadataOutput(
_ output: AVCaptureMetadataOutput,
didOutput metadataObjects: [AVMetadataObject],
from connection: AVCaptureConnection
) {
guard
let obj = metadataObjects.first as? AVMetadataMachineReadableCodeObject,
let value = obj.stringValue
else { return }
model.scannedCode = value
// Optionally pause scanning after first result
stopSession()
}
}
// MARK: - SwiftUI wrapper
struct QRCodeScannerScreen: View {
@State private var model = ScannerModel()
@State private var showResult = false
var body: some View {
ZStack {
if model.isUnauthorized {
ContentUnavailableView(
"Camera Access Required",
systemImage: "camera.slash",
description: Text("Enable camera access in Settings to scan QR codes.")
)
} else {
QRScannerView(model: model)
.ignoresSafeArea()
// Viewfinder overlay
RoundedRectangle(cornerRadius: 16)
.strokeBorder(.white, lineWidth: 3)
.frame(width: 240, height: 240)
.shadow(color: .black.opacity(0.4), radius: 8)
VStack {
Spacer()
if let code = model.scannedCode {
Text(code)
.font(.callout.monospaced())
.padding()
.background(.ultraThinMaterial, in: RoundedRectangle(cornerRadius: 12))
.padding(.bottom, 40)
} else {
Text("Align a QR code within the frame")
.font(.footnote)
.foregroundStyle(.white.opacity(0.8))
.padding(.bottom, 40)
}
}
}
}
.navigationTitle("QR Scanner")
.navigationBarTitleDisplayMode(.inline)
.toolbar {
if model.scannedCode != nil {
ToolbarItem(placement: .primaryAction) {
Button("Scan Again") {
model.scannedCode = nil
Task { /* restart by reinitialising view — see Variants */ }
}
}
}
}
}
}
#Preview {
NavigationStack {
QRCodeScannerScreen()
}
}
How it works
-
Permission gating —
AVCaptureDevice.requestAccess(for: .video)is called as anasyncfunction insidestartSession. If the user denies access,model.isUnauthorizedflips totrueand SwiftUI swaps to theContentUnavailableView— no crash, no blank screen. -
Session configuration on a background queue — all
AVCaptureSessionmutations happen onsessionQueue. Attaching thepreviewLayer.sessionis the only step that hops back to the main thread, asAVCaptureVideoPreviewLayeris a UIKit object. -
Metadata output for QR codes — setting
output.metadataObjectTypes = [.qr]after adding the output (but not before) tells the system to filter for QR symbols only, which is more efficient than scanning all barcode types. -
Delegate on the main queue — passing
queue: .maintosetMetadataObjectsDelegatemeansmodel.scannedCodecan be updated directly without an extraMainActor.runcall. -
Clean teardown via
dismantleUIView— the staticdismantleUIViewmethod is called by SwiftUI when the view leaves the hierarchy, ensuring the capture session is stopped and the camera is released — important for battery life and for other apps needing camera access.
Variants
Restrict scan area to the viewfinder frame
AVCaptureMetadataOutput.rectOfInterest accepts
a normalized CGRect in the
capture coordinate system (origin top-left, axes flipped from UIKit).
Call previewLayer.metadataOutputRectConverted(fromLayerRect:)
to convert your on-screen viewfinder frame into that coordinate space.
// Inside Coordinator.configure(), after setting metadataObjectTypes:
let viewfinderRect = CGRect(x: 0, y: 0, width: 240, height: 240) // screen coords
// Must be called after session.commitConfiguration() & previewLayer is set
DispatchQueue.main.async {
previewView.previewLayer.session = self.session
// Convert screen rect → capture rect
let captureRect = previewView.previewLayer
.metadataOutputRectConverted(fromLayerRect: viewfinderRect)
self.metadataOutput.rectOfInterest = captureRect
}
// Store output reference on Coordinator so we can update it later
private var metadataOutput: AVCaptureMetadataOutput = AVCaptureMetadataOutput()
Support additional barcode types
Expand metadataObjectTypes to include other
symbologies your app needs — for example
[.qr, .ean13, .code128, .pdf417].
Always set metadataObjectTypes after
adding the output to the session; setting it before will throw a runtime exception because the
available types depend on the configured inputs.
Common pitfalls
-
Missing Info.plist key. You must add
NSCameraUsageDescriptionto your Info.plist (or the target's Info tab in Xcode 16). Omitting it causes a silent crash on first camera access — the process is killed by the system without a visible error in the console. -
Setting
metadataObjectTypesbefore adding the output. The property setter validates against the session's available types; if you set it before callingsession.addOutput(output), you'll receive a runtime exception. Always configure metadata types after the output is added and the session configuration is committed. -
Blocking the main thread with session start/stop.
session.startRunning()can take 200–600 ms on older hardware. Always call it on a background queue. Calling it on the main thread causes noticeable UI jank and will trigger watchdog termination in constrained environments. -
VoiceOver support.
The camera preview is not inherently accessible. Add an
.accessibilityLabel("QR code scanner camera preview")to theQRScannerViewand announce scan results withUIAccessibility.post(notification: .announcement, argument: scannedCode)so VoiceOver users hear the decoded value immediately.
Prompt this with Claude Code
When using Soarias or Claude Code directly to implement this:
Implement a qr code scanner in SwiftUI for iOS 17+. Use AVCaptureMetadataOutput and AVCaptureSession. Run the session on a background serial DispatchQueue. Request camera permission with the async AVCaptureDevice.requestAccess API. Show a ContentUnavailableView when access is denied. Make it accessible (VoiceOver labels, announce scan result). Add a #Preview with realistic sample data.
In Soarias, drop this prompt into the Build phase after your screen mockup is approved — the agent will wire up the permission key, the capture pipeline, and the SwiftUI wrapper in one pass so you stay in flow without context-switching to documentation.
Related
FAQ
Does this work on iOS 16?
The AVCaptureMetadataOutput pipeline works
back to iOS 7, but this implementation uses the
@Observable macro (iOS 17+) and the
ContentUnavailableView (iOS 17+).
To support iOS 16, replace @Observable
with ObservableObject / @Published, and
replace ContentUnavailableView with a
custom fallback view. The camera capture code itself requires no changes.
How do I debounce rapid repeated scans of the same code?
The delegate fires multiple times per second while a QR code is in view. The simplest guard is to
store the last scanned string and ignore duplicates:
guard value != model.scannedCode else { return }.
For a richer approach, call stopSession()
immediately after the first successful scan and add a "Scan Again" button to restart — this
pattern also gives users a deliberate confirmation step.
What is the UIKit equivalent?
In UIKit you'd subclass UIViewController,
create the AVCaptureSession in
viewDidLoad, and set the
AVCaptureVideoPreviewLayer directly on the
view's layer. The SwiftUI version above wraps exactly this pattern inside
UIViewRepresentable so the same
AVFoundation machinery runs untouched — there
is no higher-level SwiftUI-native API for live camera capture as of iOS 17.
Last reviewed: 2026-05-11 by the Soarias team.