```html SwiftUI: How to Implement Voice Recording (iOS 17+, 2026)

How to implement voice recording in SwiftUI

iOS 17+ Xcode 16+ Intermediate APIs: AVAudioRecorder Updated: May 12, 2026
TL;DR

Request microphone permission via AVAudioApplication.requestRecordPermission, configure an AVAudioSession for recording, then start and stop an AVAudioRecorder pointed at a temporary file URL. The session and recorder live in an @Observable class so your SwiftUI view stays reactive.

import AVFoundation

@Observable
final class VoiceRecorder {
    var isRecording = false
    private var recorder: AVAudioRecorder?

    var fileURL: URL {
        FileManager.default.temporaryDirectory
            .appendingPathComponent("recording.m4a")
    }

    func start() throws {
        let session = AVAudioSession.sharedInstance()
        try session.setCategory(.record, mode: .default)
        try session.setActive(true)
        let settings: [String: Any] = [
            AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
            AVSampleRateKey: 44_100,
            AVNumberOfChannelsKey: 1
        ]
        recorder = try AVAudioRecorder(url: fileURL, settings: settings)
        recorder?.record()
        isRecording = true
    }

    func stop() {
        recorder?.stop()
        isRecording = false
    }
}

Full implementation

The implementation below separates concerns cleanly: an @Observable VoiceRecorderModel owns all AVFoundation state, while the SwiftUI view reacts to isRecording, permissionGranted, and an optional errorMessage. Playback is handled by a companion AVAudioPlayer created on demand from the same file URL so you never ship a separate playback class. Permission is requested lazily on first tap, matching the iOS best-practice of asking only when the feature is used — not at app launch.

import SwiftUI
import AVFoundation

// MARK: - Model

@Observable
final class VoiceRecorderModel: NSObject, AVAudioRecorderDelegate {
    var isRecording = false
    var isPlaying   = false
    var hasRecording = false
    var permissionGranted: Bool? = nil   // nil = not yet asked
    var errorMessage: String?

    private var recorder: AVAudioRecorder?
    private var player:   AVAudioPlayer?

    var recordingURL: URL {
        FileManager.default.temporaryDirectory
            .appendingPathComponent("voice_memo.m4a")
    }

    // MARK: Permission

    func requestPermission() async {
        let granted = await AVAudioApplication.requestRecordPermission()
        await MainActor.run { permissionGranted = granted }
    }

    // MARK: Recording

    func startRecording() {
        do {
            let session = AVAudioSession.sharedInstance()
            try session.setCategory(.record, mode: .default, options: [])
            try session.setActive(true)

            let settings: [String: Any] = [
                AVFormatIDKey:             Int(kAudioFormatMPEG4AAC),
                AVSampleRateKey:           44_100,
                AVNumberOfChannelsKey:     1,
                AVEncoderAudioQualityKey:  AVAudioQuality.high.rawValue
            ]
            recorder = try AVAudioRecorder(url: recordingURL, settings: settings)
            recorder?.delegate = self
            recorder?.isMeteringEnabled = true
            recorder?.record()
            isRecording = true
            errorMessage = nil
        } catch {
            errorMessage = error.localizedDescription
        }
    }

    func stopRecording() {
        recorder?.stop()
        isRecording  = false
        hasRecording = FileManager.default.fileExists(atPath: recordingURL.path)
        try? AVAudioSession.sharedInstance().setActive(false)
    }

    // MARK: Playback

    func togglePlayback() {
        if isPlaying {
            player?.stop()
            isPlaying = false
        } else {
            do {
                let session = AVAudioSession.sharedInstance()
                try session.setCategory(.playback)
                try session.setActive(true)
                player = try AVAudioPlayer(contentsOf: recordingURL)
                player?.play()
                isPlaying = true
            } catch {
                errorMessage = error.localizedDescription
            }
        }
    }

    // MARK: AVAudioRecorderDelegate

    func audioRecorderDidFinishRecording(_ r: AVAudioRecorder, successfully flag: Bool) {
        if !flag { errorMessage = "Recording failed." }
    }
}

// MARK: - View

struct VoiceRecorderView: View {
    @State private var model = VoiceRecorderModel()

    var body: some View {
        VStack(spacing: 32) {
            Text("Voice Recorder")
                .font(.largeTitle.bold())

            statusBadge

            if let error = model.errorMessage {
                Text(error)
                    .foregroundStyle(.red)
                    .font(.caption)
                    .multilineTextAlignment(.center)
            }

            recordButton

            if model.hasRecording {
                playbackButton
            }
        }
        .padding(32)
        .task {
            if model.permissionGranted == nil {
                await model.requestPermission()
            }
        }
    }

    // MARK: Subviews

    @ViewBuilder
    private var statusBadge: some View {
        let label = model.isRecording ? "Recording…" : (model.hasRecording ? "Ready" : "Idle")
        let color: Color = model.isRecording ? .red : .secondary
        Label(label, systemImage: model.isRecording ? "waveform" : "mic")
            .foregroundStyle(color)
            .font(.subheadline.weight(.semibold))
            .symbolEffect(.variableColor.iterative, isActive: model.isRecording)
    }

    private var recordButton: some View {
        Button {
            if model.isRecording {
                model.stopRecording()
            } else {
                guard model.permissionGranted == true else { return }
                model.startRecording()
            }
        } label: {
            ZStack {
                Circle()
                    .fill(model.isRecording ? Color.red.opacity(0.15) : Color.red)
                    .frame(width: 80, height: 80)
                Image(systemName: model.isRecording ? "stop.fill" : "mic.fill")
                    .font(.system(size: 28))
                    .foregroundStyle(model.isRecording ? .red : .white)
            }
        }
        .accessibilityLabel(model.isRecording ? "Stop recording" : "Start recording")
        .sensoryFeedback(.impact, trigger: model.isRecording)
        .disabled(model.permissionGranted != true)
    }

    private var playbackButton: some View {
        Button {
            model.togglePlayback()
        } label: {
            Label(model.isPlaying ? "Stop" : "Play Recording",
                  systemImage: model.isPlaying ? "stop.circle" : "play.circle")
                .font(.headline)
        }
        .buttonStyle(.bordered)
        .accessibilityLabel(model.isPlaying ? "Stop playback" : "Play recording")
    }
}

// MARK: - Preview

#Preview {
    VoiceRecorderView()
}

How it works

  1. Permission with AVAudioApplication.requestRecordPermission() — iOS 17 replaced the old AVAudioSession.requestRecordPermission callback API with an async/await version on AVAudioApplication. It's called lazily inside a .task modifier so the prompt appears only when the user first opens the recorder screen, not at launch.
  2. Session category switchingstartRecording() sets the session to .record, and togglePlayback() switches it to .playback. Both call setActive(true/false) so the system can mix audio routes correctly with other apps.
  3. Recording to a temporary file — The M4A file lives at FileManager.default.temporaryDirectory. This is fine for short-lived memos; move the file to applicationSupportDirectory before the next recording or the OS may evict it.
  4. isMeteringEnabled = true — Flipping this flag on the recorder lets you later call recorder?.updateMeters() and read averagePower(forChannel:) to drive a live waveform animation without a separate audio tap.
  5. Reactive state via @Observable — Because VoiceRecorderModel is @Observable, SwiftUI automatically re-renders only the parts of the view that read isRecording, hasRecording, or errorMessage — no manual objectWillChange needed.

Variants

Live waveform meter using TimelineView

struct WaveformMeter: View {
    var recorder: AVAudioRecorder?
    // Normalized 0–1 power level updated every 80 ms
    @State private var level: Float = 0

    var body: some View {
        TimelineView(.animation(minimumInterval: 0.08)) { _ in
            Capsule()
                .fill(Color.red)
                .frame(width: 6, height: max(4, CGFloat(level) * 60))
                .animation(.easeOut(duration: 0.08), value: level)
        }
        .onChange(of: recorder?.isRecording) { _, recording in
            guard recording == true else { level = 0; return }
        }
        .onReceive(Timer.publish(every: 0.08, on: .main, in: .common).autoconnect()) { _ in
            guard let r = recorder, r.isRecording else { return }
            r.updateMeters()
            // averagePower returns dB; map -60…0 → 0…1
            let db = r.averagePower(forChannel: 0)
            level = Float((db + 60) / 60).clamped(to: 0...1)
        }
    }
}

extension Comparable {
    func clamped(to range: ClosedRange<Self>) -> Self {
        min(max(self, range.lowerBound), range.upperBound)
    }
}

Persist recordings to the app's Documents folder

For recordings the user should keep across sessions, replace temporaryDirectory with a timestamped file inside FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]. Store the URL array in @AppStorage or SwiftData so the list survives launches. Each file name can encode the date ("memo-\(Date.now.timeIntervalSince1970).m4a") for cheap uniqueness.

Common pitfalls

Prompt this with Claude Code

When using Soarias or Claude Code directly to implement this:

Implement voice recording in SwiftUI for iOS 17+.
Use AVAudioRecorder, AVAudioSession, and AVAudioApplication.requestRecordPermission().
Store the model state in an @Observable class.
Make it accessible (VoiceOver labels on all interactive controls).
Add a live waveform meter driven by AVAudioRecorder.updateMeters().
Add a #Preview with realistic sample data.

In Soarias's Build phase, paste this prompt directly into the implementation panel — it maps to a single screen card, so Claude Code scopes the output to one file and auto-links the generated Info.plist key change as a companion diff.

Related

FAQ

Does this work on iOS 16?

Partially. AVAudioRecorder itself is available back to iOS 3, but AVAudioApplication.requestRecordPermission() (the async/await API) requires iOS 17+. On iOS 16 you'd use the older callback form: AVAudioSession.sharedInstance().requestRecordPermission { granted in … }. If you need to support iOS 16, wrap both paths behind an #available check. Since Xcode 16's default deployment target is iOS 17, the code in this guide compiles without additional guards.

How do I export the recording to the Files app or share sheet?

Use ShareLink with the file URL directly — SwiftUI handles the share sheet presentation for you. Pass ShareLink(item: model.recordingURL) and iOS will offer AirDrop, Messages, Files, and any app that accepts M4A audio. For Files app access without the share sheet, set the UIFileSharingEnabled key to true in Info.plist and write recordings to the Documents directory.

What's the UIKit equivalent?

In UIKit you'd use the same AVAudioRecorder API — it's a Foundation-layer class, not UI-layer, so it doesn't change between frameworks. The difference is that you'd manage state in a UIViewController and update UI elements manually in the AVAudioRecorderDelegate callbacks instead of relying on @Observable to drive automatic re-renders.

Last reviewed: 2026-05-12 by the Soarias team.

```