How to Build an Audio Player in SwiftUI
Wrap AVPlayer in an @Observable view-model, subscribe to periodic time updates, and bind a Slider plus a play/pause Button to the published state.
import AVKit
@Observable
final class AudioPlayer {
let player = AVPlayer(url: Bundle.main.url(forResource: "track", withExtension: "mp3")!)
var currentTime: Double = 0
var duration: Double = 1
var isPlaying = false
func togglePlay() {
isPlaying ? player.pause() : player.play()
isPlaying.toggle()
}
}
Full implementation
The view-model owns the AVPlayer and registers a periodic time observer that fires every half-second, keeping currentTime and duration in sync with the player. The SwiftUI view binds a Slider to those values, pausing playback while the user scrubs and resuming on release — a pattern that avoids audio glitches during seek operations.
import SwiftUI
import AVKit
import Combine
// MARK: - View-model
@Observable
final class AudioPlayerModel {
let player: AVPlayer
var currentTime: Double = 0
var duration: Double = 1
var isPlaying = false
var title: String
var artist: String
private var timeObserver: Any?
private var statusObserver: NSKeyValueObservation?
init(url: URL, title: String = "Unknown Track", artist: String = "Unknown Artist") {
self.player = AVPlayer(url: url)
self.title = title
self.artist = artist
observeDuration()
observeTime()
}
deinit {
if let obs = timeObserver { player.removeTimeObserver(obs) }
}
func togglePlay() {
if isPlaying { player.pause() } else { player.play() }
isPlaying.toggle()
}
func seek(to time: Double) {
let target = CMTime(seconds: time, preferredTimescale: 600)
player.seek(to: target, toleranceBefore: .zero, toleranceAfter: .zero)
}
private func observeDuration() {
statusObserver = player.currentItem?.observe(\.status, options: [.new]) { [weak self] item, _ in
guard item.status == .readyToPlay else { return }
let secs = item.duration.seconds
if secs.isFinite { self?.duration = secs }
}
}
private func observeTime() {
let interval = CMTime(seconds: 0.5, preferredTimescale: 600)
timeObserver = player.addPeriodicTimeObserver(forInterval: interval, queue: .main) { [weak self] time in
self?.currentTime = time.seconds
}
}
}
// MARK: - View
struct AudioPlayerView: View {
@State private var model = AudioPlayerModel(
url: Bundle.main.url(forResource: "demo", withExtension: "mp3")!,
title: "Chill Groove",
artist: "Lo-Fi Studio"
)
@State private var isScrubbing = false
var body: some View {
VStack(spacing: 24) {
// Album art placeholder
RoundedRectangle(cornerRadius: 20)
.fill(Color.indigo.gradient)
.frame(width: 240, height: 240)
.overlay {
Image(systemName: "music.note")
.font(.system(size: 72))
.foregroundStyle(.white.opacity(0.8))
}
.shadow(radius: 12)
// Track info
VStack(spacing: 4) {
Text(model.title)
.font(.title2).bold()
Text(model.artist)
.font(.subheadline)
.foregroundStyle(.secondary)
}
// Seek bar
VStack(spacing: 4) {
Slider(
value: Binding(
get: { model.currentTime },
set: { newVal in
model.currentTime = newVal
if !isScrubbing {
isScrubbing = true
model.player.pause()
}
}
),
in: 0...model.duration
) {
Text("Seek")
} minimumValueLabel: {
Text(formatTime(model.currentTime)).font(.caption2).monospacedDigit()
} maximumValueLabel: {
Text(formatTime(model.duration)).font(.caption2).monospacedDigit()
} onEditingChanged: { editing in
if !editing {
model.seek(to: model.currentTime)
if model.isPlaying { model.player.play() }
isScrubbing = false
}
}
.tint(.indigo)
.accessibilityLabel("Seek bar")
.accessibilityValue("\(Int(model.currentTime)) of \(Int(model.duration)) seconds")
}
// Transport controls
HStack(spacing: 48) {
Button {
model.seek(to: max(model.currentTime - 15, 0))
} label: {
Image(systemName: "gobackward.15")
.font(.title2)
}
.accessibilityLabel("Skip back 15 seconds")
Button {
model.togglePlay()
} label: {
Image(systemName: model.isPlaying ? "pause.circle.fill" : "play.circle.fill")
.font(.system(size: 64))
.foregroundStyle(.indigo)
}
.accessibilityLabel(model.isPlaying ? "Pause" : "Play")
Button {
model.seek(to: min(model.currentTime + 15, model.duration))
} label: {
Image(systemName: "goforward.15")
.font(.title2)
}
.accessibilityLabel("Skip forward 15 seconds")
}
.foregroundStyle(.primary)
}
.padding(32)
}
private func formatTime(_ seconds: Double) -> String {
guard seconds.isFinite else { return "0:00" }
let total = Int(seconds)
return String(format: "%d:%02d", total / 60, total % 60)
}
}
#Preview {
AudioPlayerView()
}
How it works
-
@Observableview-model — Using the Swift 5.9@Observablemacro (iOS 17+) instead ofObservableObjectgives finer-grained dependency tracking: only the properties accessed inside the view body trigger re-renders, reducing unnecessary redraws during rapid time updates. -
addPeriodicTimeObserver— Registered inobserveTime(), this fires on the main queue every 0.5 s and writes tocurrentTime, which theSliderreads. The observer token is retained and explicitly removed indeinitto prevent retain cycles. -
KVO on
AVPlayerItem.status—AVPlayerloads assets asynchronously;durationis only valid after status reaches.readyToPlay. TheobserveDuration()method waits for that signal before settingmodel.duration, preventing a divide-by-zero in the slider range. -
Scrub-pause-resume pattern — When the user starts dragging the
Slider,isScrubbingflips totrueand the player pauses. OnonEditingChangedreceivingfalse, the view callsmodel.seek(to:)with tight tolerances (.zero, .zero) for sample-accurate seeking, then resumes playback only if it was previously playing. -
Transport skip buttons — The ±15-second skip buttons call
model.seek(to:)directly and are clamped to[0, duration]withmax/min, so they cannot seek past the track boundaries.
Variants
Remote URL streaming
// Replace the local URL with any HTTPS audio stream.
// Enable background audio in Capabilities → Background Modes → Audio.
// Then configure the audio session once at app launch:
import AVFoundation
func configureAudioSession() {
let session = AVAudioSession.sharedInstance()
try? session.setCategory(.playback, mode: .default)
try? session.setActive(true)
}
// In your App entry point:
@main
struct MyApp: App {
init() { configureAudioSession() }
var body: some Scene { WindowGroup { ContentView() } }
}
// In AudioPlayerModel.init, simply pass a remote URL:
// let remoteURL = URL(string: "https://example.com/stream.mp3")!
// self.player = AVPlayer(url: remoteURL)
Loop a track automatically
Observe AVPlayerItem.didPlayToEndTimeNotification and seek back to zero when it fires. Add this to the view-model's init:
NotificationCenter.default.addObserver(
forName: .AVPlayerItemDidPlayToEndTime,
object: player.currentItem,
queue: .main
) { [weak self] _ in
self?.seek(to: 0)
self?.player.play()
}
Common pitfalls
-
iOS 17 minimum for
@Observable: If you need iOS 16 support, replace@ObservablewithObservableObject+@Publishedand inject via@StateObject/@EnvironmentObject. The rest of the implementation is identical. -
Reading
durationbefore.readyToPlay:AVPlayerItem.durationreturnsCMTime.indefiniteuntil the asset is loaded. Initialising your slider range to0...1and updating it inside the KVO callback prevents aNaNslider value or runtime crash. -
Retain cycles with
addPeriodicTimeObserver: The closure capturesself; always use[weak self]and callplayer.removeTimeObserver(_:)indeinit. Forgetting either leaks the view-model for the lifetime of the player. -
Background audio entitlement: Without the Audio, AirPlay, and Picture in Picture background mode and a configured
AVAudioSessioncategory of.playback, audio will stop the moment the app is backgrounded — a common App Review rejection reason for media apps.
Prompt this with Claude Code
When using Soarias or Claude Code directly to implement this:
Implement an audio player in SwiftUI for iOS 17+. Use AVPlayer. Make it accessible (VoiceOver labels on play/pause, seek bar, skip buttons). Add a #Preview with realistic sample data (title, artist, 3-minute track duration).
In Soarias's Build phase, drop this prompt into the active session after scaffolding your screen list — Claude Code will wire up the AVPlayer integration, configure the audio session, and surface the component inside your existing navigation hierarchy without leaving the editor.
Related
FAQ
Does this work on iOS 16?
The AVPlayer logic works on iOS 16, but the @Observable macro requires iOS 17. To target iOS 16, swap @Observable for class AudioPlayerModel: ObservableObject, mark each property with @Published, and use @StateObject in the view. Everything else compiles unchanged.
How do I display Now Playing info on the Lock Screen?
Populate MPNowPlayingInfoCenter.default().nowPlayingInfo with a dictionary containing MPMediaItemPropertyTitle, MPMediaItemPropertyArtist, MPNowPlayingInfoPropertyElapsedPlaybackTime, and optionally MPMediaItemPropertyArtwork. Update the dictionary whenever playback state changes. Also register remote command handlers via MPRemoteCommandCenter.shared() so the Lock Screen play/pause button controls your player.
What is the UIKit equivalent?
In UIKit you would use AVPlayerViewController for a full-screen system player UI, or embed an AVPlayerLayer inside a custom UIView subclass and drive it with the same AVPlayer APIs. The SwiftUI approach above gives you the same control surface with far less boilerplate and first-class state binding.
Last reviewed: 2026-05-11 by the Soarias team.