```html SwiftUI: How to Build Audio Player (iOS 17+, 2026)

How to Build an Audio Player in SwiftUI

iOS 17+ Xcode 16+ Intermediate APIs: AVPlayer Updated: May 11, 2026
TL;DR

Wrap AVPlayer in an @Observable view-model, subscribe to periodic time updates, and bind a Slider plus a play/pause Button to the published state.

import AVKit

@Observable
final class AudioPlayer {
    let player = AVPlayer(url: Bundle.main.url(forResource: "track", withExtension: "mp3")!)
    var currentTime: Double = 0
    var duration: Double = 1
    var isPlaying = false

    func togglePlay() {
        isPlaying ? player.pause() : player.play()
        isPlaying.toggle()
    }
}

Full implementation

The view-model owns the AVPlayer and registers a periodic time observer that fires every half-second, keeping currentTime and duration in sync with the player. The SwiftUI view binds a Slider to those values, pausing playback while the user scrubs and resuming on release — a pattern that avoids audio glitches during seek operations.

import SwiftUI
import AVKit
import Combine

// MARK: - View-model

@Observable
final class AudioPlayerModel {
    let player: AVPlayer
    var currentTime: Double = 0
    var duration: Double = 1
    var isPlaying = false
    var title: String
    var artist: String

    private var timeObserver: Any?
    private var statusObserver: NSKeyValueObservation?

    init(url: URL, title: String = "Unknown Track", artist: String = "Unknown Artist") {
        self.player = AVPlayer(url: url)
        self.title  = title
        self.artist = artist
        observeDuration()
        observeTime()
    }

    deinit {
        if let obs = timeObserver { player.removeTimeObserver(obs) }
    }

    func togglePlay() {
        if isPlaying { player.pause() } else { player.play() }
        isPlaying.toggle()
    }

    func seek(to time: Double) {
        let target = CMTime(seconds: time, preferredTimescale: 600)
        player.seek(to: target, toleranceBefore: .zero, toleranceAfter: .zero)
    }

    private func observeDuration() {
        statusObserver = player.currentItem?.observe(\.status, options: [.new]) { [weak self] item, _ in
            guard item.status == .readyToPlay else { return }
            let secs = item.duration.seconds
            if secs.isFinite { self?.duration = secs }
        }
    }

    private func observeTime() {
        let interval = CMTime(seconds: 0.5, preferredTimescale: 600)
        timeObserver = player.addPeriodicTimeObserver(forInterval: interval, queue: .main) { [weak self] time in
            self?.currentTime = time.seconds
        }
    }
}

// MARK: - View

struct AudioPlayerView: View {
    @State private var model = AudioPlayerModel(
        url: Bundle.main.url(forResource: "demo", withExtension: "mp3")!,
        title: "Chill Groove",
        artist: "Lo-Fi Studio"
    )
    @State private var isScrubbing = false

    var body: some View {
        VStack(spacing: 24) {
            // Album art placeholder
            RoundedRectangle(cornerRadius: 20)
                .fill(Color.indigo.gradient)
                .frame(width: 240, height: 240)
                .overlay {
                    Image(systemName: "music.note")
                        .font(.system(size: 72))
                        .foregroundStyle(.white.opacity(0.8))
                }
                .shadow(radius: 12)

            // Track info
            VStack(spacing: 4) {
                Text(model.title)
                    .font(.title2).bold()
                Text(model.artist)
                    .font(.subheadline)
                    .foregroundStyle(.secondary)
            }

            // Seek bar
            VStack(spacing: 4) {
                Slider(
                    value: Binding(
                        get: { model.currentTime },
                        set: { newVal in
                            model.currentTime = newVal
                            if !isScrubbing {
                                isScrubbing = true
                                model.player.pause()
                            }
                        }
                    ),
                    in: 0...model.duration
                ) {
                    Text("Seek")
                } minimumValueLabel: {
                    Text(formatTime(model.currentTime)).font(.caption2).monospacedDigit()
                } maximumValueLabel: {
                    Text(formatTime(model.duration)).font(.caption2).monospacedDigit()
                } onEditingChanged: { editing in
                    if !editing {
                        model.seek(to: model.currentTime)
                        if model.isPlaying { model.player.play() }
                        isScrubbing = false
                    }
                }
                .tint(.indigo)
                .accessibilityLabel("Seek bar")
                .accessibilityValue("\(Int(model.currentTime)) of \(Int(model.duration)) seconds")
            }

            // Transport controls
            HStack(spacing: 48) {
                Button {
                    model.seek(to: max(model.currentTime - 15, 0))
                } label: {
                    Image(systemName: "gobackward.15")
                        .font(.title2)
                }
                .accessibilityLabel("Skip back 15 seconds")

                Button {
                    model.togglePlay()
                } label: {
                    Image(systemName: model.isPlaying ? "pause.circle.fill" : "play.circle.fill")
                        .font(.system(size: 64))
                        .foregroundStyle(.indigo)
                }
                .accessibilityLabel(model.isPlaying ? "Pause" : "Play")

                Button {
                    model.seek(to: min(model.currentTime + 15, model.duration))
                } label: {
                    Image(systemName: "goforward.15")
                        .font(.title2)
                }
                .accessibilityLabel("Skip forward 15 seconds")
            }
            .foregroundStyle(.primary)
        }
        .padding(32)
    }

    private func formatTime(_ seconds: Double) -> String {
        guard seconds.isFinite else { return "0:00" }
        let total = Int(seconds)
        return String(format: "%d:%02d", total / 60, total % 60)
    }
}

#Preview {
    AudioPlayerView()
}

How it works

  1. @Observable view-model — Using the Swift 5.9 @Observable macro (iOS 17+) instead of ObservableObject gives finer-grained dependency tracking: only the properties accessed inside the view body trigger re-renders, reducing unnecessary redraws during rapid time updates.
  2. addPeriodicTimeObserver — Registered in observeTime(), this fires on the main queue every 0.5 s and writes to currentTime, which the Slider reads. The observer token is retained and explicitly removed in deinit to prevent retain cycles.
  3. KVO on AVPlayerItem.statusAVPlayer loads assets asynchronously; duration is only valid after status reaches .readyToPlay. The observeDuration() method waits for that signal before setting model.duration, preventing a divide-by-zero in the slider range.
  4. Scrub-pause-resume pattern — When the user starts dragging the Slider, isScrubbing flips to true and the player pauses. On onEditingChanged receiving false, the view calls model.seek(to:) with tight tolerances (.zero, .zero) for sample-accurate seeking, then resumes playback only if it was previously playing.
  5. Transport skip buttons — The ±15-second skip buttons call model.seek(to:) directly and are clamped to [0, duration] with max/min, so they cannot seek past the track boundaries.

Variants

Remote URL streaming

// Replace the local URL with any HTTPS audio stream.
// Enable background audio in Capabilities → Background Modes → Audio.
// Then configure the audio session once at app launch:

import AVFoundation

func configureAudioSession() {
    let session = AVAudioSession.sharedInstance()
    try? session.setCategory(.playback, mode: .default)
    try? session.setActive(true)
}

// In your App entry point:
@main
struct MyApp: App {
    init() { configureAudioSession() }
    var body: some Scene { WindowGroup { ContentView() } }
}

// In AudioPlayerModel.init, simply pass a remote URL:
// let remoteURL = URL(string: "https://example.com/stream.mp3")!
// self.player = AVPlayer(url: remoteURL)

Loop a track automatically

Observe AVPlayerItem.didPlayToEndTimeNotification and seek back to zero when it fires. Add this to the view-model's init:

NotificationCenter.default.addObserver(
    forName: .AVPlayerItemDidPlayToEndTime,
    object: player.currentItem,
    queue: .main
) { [weak self] _ in
    self?.seek(to: 0)
    self?.player.play()
}

Common pitfalls

Prompt this with Claude Code

When using Soarias or Claude Code directly to implement this:

Implement an audio player in SwiftUI for iOS 17+.
Use AVPlayer.
Make it accessible (VoiceOver labels on play/pause, seek bar, skip buttons).
Add a #Preview with realistic sample data (title, artist, 3-minute track duration).

In Soarias's Build phase, drop this prompt into the active session after scaffolding your screen list — Claude Code will wire up the AVPlayer integration, configure the audio session, and surface the component inside your existing navigation hierarchy without leaving the editor.

Related

FAQ

Does this work on iOS 16?

The AVPlayer logic works on iOS 16, but the @Observable macro requires iOS 17. To target iOS 16, swap @Observable for class AudioPlayerModel: ObservableObject, mark each property with @Published, and use @StateObject in the view. Everything else compiles unchanged.

How do I display Now Playing info on the Lock Screen?

Populate MPNowPlayingInfoCenter.default().nowPlayingInfo with a dictionary containing MPMediaItemPropertyTitle, MPMediaItemPropertyArtist, MPNowPlayingInfoPropertyElapsedPlaybackTime, and optionally MPMediaItemPropertyArtwork. Update the dictionary whenever playback state changes. Also register remote command handlers via MPRemoteCommandCenter.shared() so the Lock Screen play/pause button controls your player.

What is the UIKit equivalent?

In UIKit you would use AVPlayerViewController for a full-screen system player UI, or embed an AVPlayerLayer inside a custom UIView subclass and drive it with the same AVPlayer APIs. The SwiftUI approach above gives you the same control surface with far less boilerplate and first-class state binding.

Last reviewed: 2026-05-11 by the Soarias team.

```