How to implement voice recording in SwiftUI
Request microphone permission via AVAudioApplication.requestRecordPermission, configure an AVAudioSession for recording, then start and stop an AVAudioRecorder pointed at a temporary file URL. The session and recorder live in an @Observable class so your SwiftUI view stays reactive.
import AVFoundation
@Observable
final class VoiceRecorder {
var isRecording = false
private var recorder: AVAudioRecorder?
var fileURL: URL {
FileManager.default.temporaryDirectory
.appendingPathComponent("recording.m4a")
}
func start() throws {
let session = AVAudioSession.sharedInstance()
try session.setCategory(.record, mode: .default)
try session.setActive(true)
let settings: [String: Any] = [
AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 44_100,
AVNumberOfChannelsKey: 1
]
recorder = try AVAudioRecorder(url: fileURL, settings: settings)
recorder?.record()
isRecording = true
}
func stop() {
recorder?.stop()
isRecording = false
}
}
Full implementation
The implementation below separates concerns cleanly: an @Observable VoiceRecorderModel owns all AVFoundation state, while the SwiftUI view reacts to isRecording, permissionGranted, and an optional errorMessage. Playback is handled by a companion AVAudioPlayer created on demand from the same file URL so you never ship a separate playback class.
Permission is requested lazily on first tap, matching the iOS best-practice of asking only when the feature is used — not at app launch.
import SwiftUI
import AVFoundation
// MARK: - Model
@Observable
final class VoiceRecorderModel: NSObject, AVAudioRecorderDelegate {
var isRecording = false
var isPlaying = false
var hasRecording = false
var permissionGranted: Bool? = nil // nil = not yet asked
var errorMessage: String?
private var recorder: AVAudioRecorder?
private var player: AVAudioPlayer?
var recordingURL: URL {
FileManager.default.temporaryDirectory
.appendingPathComponent("voice_memo.m4a")
}
// MARK: Permission
func requestPermission() async {
let granted = await AVAudioApplication.requestRecordPermission()
await MainActor.run { permissionGranted = granted }
}
// MARK: Recording
func startRecording() {
do {
let session = AVAudioSession.sharedInstance()
try session.setCategory(.record, mode: .default, options: [])
try session.setActive(true)
let settings: [String: Any] = [
AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 44_100,
AVNumberOfChannelsKey: 1,
AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue
]
recorder = try AVAudioRecorder(url: recordingURL, settings: settings)
recorder?.delegate = self
recorder?.isMeteringEnabled = true
recorder?.record()
isRecording = true
errorMessage = nil
} catch {
errorMessage = error.localizedDescription
}
}
func stopRecording() {
recorder?.stop()
isRecording = false
hasRecording = FileManager.default.fileExists(atPath: recordingURL.path)
try? AVAudioSession.sharedInstance().setActive(false)
}
// MARK: Playback
func togglePlayback() {
if isPlaying {
player?.stop()
isPlaying = false
} else {
do {
let session = AVAudioSession.sharedInstance()
try session.setCategory(.playback)
try session.setActive(true)
player = try AVAudioPlayer(contentsOf: recordingURL)
player?.play()
isPlaying = true
} catch {
errorMessage = error.localizedDescription
}
}
}
// MARK: AVAudioRecorderDelegate
func audioRecorderDidFinishRecording(_ r: AVAudioRecorder, successfully flag: Bool) {
if !flag { errorMessage = "Recording failed." }
}
}
// MARK: - View
struct VoiceRecorderView: View {
@State private var model = VoiceRecorderModel()
var body: some View {
VStack(spacing: 32) {
Text("Voice Recorder")
.font(.largeTitle.bold())
statusBadge
if let error = model.errorMessage {
Text(error)
.foregroundStyle(.red)
.font(.caption)
.multilineTextAlignment(.center)
}
recordButton
if model.hasRecording {
playbackButton
}
}
.padding(32)
.task {
if model.permissionGranted == nil {
await model.requestPermission()
}
}
}
// MARK: Subviews
@ViewBuilder
private var statusBadge: some View {
let label = model.isRecording ? "Recording…" : (model.hasRecording ? "Ready" : "Idle")
let color: Color = model.isRecording ? .red : .secondary
Label(label, systemImage: model.isRecording ? "waveform" : "mic")
.foregroundStyle(color)
.font(.subheadline.weight(.semibold))
.symbolEffect(.variableColor.iterative, isActive: model.isRecording)
}
private var recordButton: some View {
Button {
if model.isRecording {
model.stopRecording()
} else {
guard model.permissionGranted == true else { return }
model.startRecording()
}
} label: {
ZStack {
Circle()
.fill(model.isRecording ? Color.red.opacity(0.15) : Color.red)
.frame(width: 80, height: 80)
Image(systemName: model.isRecording ? "stop.fill" : "mic.fill")
.font(.system(size: 28))
.foregroundStyle(model.isRecording ? .red : .white)
}
}
.accessibilityLabel(model.isRecording ? "Stop recording" : "Start recording")
.sensoryFeedback(.impact, trigger: model.isRecording)
.disabled(model.permissionGranted != true)
}
private var playbackButton: some View {
Button {
model.togglePlayback()
} label: {
Label(model.isPlaying ? "Stop" : "Play Recording",
systemImage: model.isPlaying ? "stop.circle" : "play.circle")
.font(.headline)
}
.buttonStyle(.bordered)
.accessibilityLabel(model.isPlaying ? "Stop playback" : "Play recording")
}
}
// MARK: - Preview
#Preview {
VoiceRecorderView()
}
How it works
-
Permission with
AVAudioApplication.requestRecordPermission()— iOS 17 replaced the oldAVAudioSession.requestRecordPermissioncallback API with an async/await version onAVAudioApplication. It's called lazily inside a.taskmodifier so the prompt appears only when the user first opens the recorder screen, not at launch. -
Session category switching —
startRecording()sets the session to.record, andtogglePlayback()switches it to.playback. Both callsetActive(true/false)so the system can mix audio routes correctly with other apps. -
Recording to a temporary file — The M4A file lives at
FileManager.default.temporaryDirectory. This is fine for short-lived memos; move the file toapplicationSupportDirectorybefore the next recording or the OS may evict it. -
isMeteringEnabled = true— Flipping this flag on the recorder lets you later callrecorder?.updateMeters()and readaveragePower(forChannel:)to drive a live waveform animation without a separate audio tap. -
Reactive state via
@Observable— BecauseVoiceRecorderModelis@Observable, SwiftUI automatically re-renders only the parts of the view that readisRecording,hasRecording, orerrorMessage— no manualobjectWillChangeneeded.
Variants
Live waveform meter using TimelineView
struct WaveformMeter: View {
var recorder: AVAudioRecorder?
// Normalized 0–1 power level updated every 80 ms
@State private var level: Float = 0
var body: some View {
TimelineView(.animation(minimumInterval: 0.08)) { _ in
Capsule()
.fill(Color.red)
.frame(width: 6, height: max(4, CGFloat(level) * 60))
.animation(.easeOut(duration: 0.08), value: level)
}
.onChange(of: recorder?.isRecording) { _, recording in
guard recording == true else { level = 0; return }
}
.onReceive(Timer.publish(every: 0.08, on: .main, in: .common).autoconnect()) { _ in
guard let r = recorder, r.isRecording else { return }
r.updateMeters()
// averagePower returns dB; map -60…0 → 0…1
let db = r.averagePower(forChannel: 0)
level = Float((db + 60) / 60).clamped(to: 0...1)
}
}
}
extension Comparable {
func clamped(to range: ClosedRange<Self>) -> Self {
min(max(self, range.lowerBound), range.upperBound)
}
}
Persist recordings to the app's Documents folder
For recordings the user should keep across sessions, replace temporaryDirectory with a timestamped file inside FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]. Store the URL array in @AppStorage or SwiftData so the list survives launches. Each file name can encode the date ("memo-\(Date.now.timeIntervalSince1970).m4a") for cheap uniqueness.
Common pitfalls
-
Missing Info.plist key. You must add
NSMicrophoneUsageDescriptionto your app's Info.plist. Without it,AVAudioApplication.requestRecordPermission()crashes on device (but not always on Simulator — a common source of confusion during development). -
Forgetting to deactivate the audio session. If you don't call
AVAudioSession.sharedInstance().setActive(false)after stopping, background audio from Music or Podcasts never resumes. Always deactivate instopRecording(). -
Overwriting the temp file without checking. Each new recording silently overwrites
voice_memo.m4ain the temp directory. If you present a "save" flow, copy the file out immediately afteraudioRecorderDidFinishRecordingfires — the OS can delete the temp directory at any time, including while your app is backgrounded. -
Accessibility for the record button. The button icon changes between
mic.fillandstop.fillbut VoiceOver users won't know why unless you explicitly set.accessibilityLabel. The code above does this; don't remove those modifiers during cleanup.
Prompt this with Claude Code
When using Soarias or Claude Code directly to implement this:
Implement voice recording in SwiftUI for iOS 17+. Use AVAudioRecorder, AVAudioSession, and AVAudioApplication.requestRecordPermission(). Store the model state in an @Observable class. Make it accessible (VoiceOver labels on all interactive controls). Add a live waveform meter driven by AVAudioRecorder.updateMeters(). Add a #Preview with realistic sample data.
In Soarias's Build phase, paste this prompt directly into the implementation panel — it maps to a single screen card, so Claude Code scopes the output to one file and auto-links the generated Info.plist key change as a companion diff.
Related
FAQ
Does this work on iOS 16?
Partially. AVAudioRecorder itself is available back to iOS 3, but AVAudioApplication.requestRecordPermission() (the async/await API) requires iOS 17+. On iOS 16 you'd use the older callback form: AVAudioSession.sharedInstance().requestRecordPermission { granted in … }. If you need to support iOS 16, wrap both paths behind an #available check. Since Xcode 16's default deployment target is iOS 17, the code in this guide compiles without additional guards.
How do I export the recording to the Files app or share sheet?
Use ShareLink with the file URL directly — SwiftUI handles the share sheet presentation for you. Pass ShareLink(item: model.recordingURL) and iOS will offer AirDrop, Messages, Files, and any app that accepts M4A audio. For Files app access without the share sheet, set the UIFileSharingEnabled key to true in Info.plist and write recordings to the Documents directory.
What's the UIKit equivalent?
In UIKit you'd use the same AVAudioRecorder API — it's a Foundation-layer class, not UI-layer, so it doesn't change between frameworks. The difference is that you'd manage state in a UIViewController and update UI elements manually in the AVAudioRecorderDelegate callbacks instead of relying on @Observable to drive automatic re-renders.
Last reviewed: 2026-05-12 by the Soarias team.