SwiftUI provides a declarative way to build user interfaces across all Apple platforms. Creating an audio recording app involves handling audio sessions, recording audio, and managing file storage. In this blog post, we’ll walk through the steps to build a basic audio recording app using SwiftUI and AVFoundation
.
Prerequisites
Before you start, make sure you have:
- Xcode installed
- A basic understanding of SwiftUI and Swift
Setting up the Project
Create a new Xcode project, choosing the iOS App template, and make sure SwiftUI is selected as the interface.
Step 1: Importing AVFoundation
AVFoundation
is Apple’s framework for working with audio and video. Import it into your SwiftUI file.
import SwiftUI
import AVFoundation
Step 2: Creating the AudioRecorder Class
Create a class that manages the audio recording functionality. This class will handle setting up the audio session, starting and stopping the recording, and storing the recorded audio.
import AVFoundation
import Foundation
class AudioRecorder: NSObject, ObservableObject, AVAudioRecorderDelegate {
@Published var isRecording: Bool = false
private var audioRecorder: AVAudioRecorder?
private var recordingSession: AVAudioSession!
override init() {
super.init()
recordingSession = AVAudioSession.sharedInstance()
do {
try recordingSession.setCategory(.playAndRecord, mode: .default)
try recordingSession.setActive(true)
recordingSession.requestRecordPermission() { allowed in
DispatchQueue.main.async {
if allowed {
// We're good to go!
} else {
// Failed to record!
}
}
}
} catch {
// Failed to record!
}
}
func startRecording() {
let fileName = "recording-\(Date().timeIntervalSince1970).m4a"
let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
let audioURL = documentsDirectory.appendingPathComponent(fileName)
let settings = [
AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 12000,
AVNumberOfChannelsKey: 1,
AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue
]
do {
audioRecorder = try AVAudioRecorder(url: audioURL, settings: settings)
audioRecorder?.delegate = self
audioRecorder?.record()
isRecording = true
} catch {
print("Could not start recording")
isRecording = false
}
}
func stopRecording() {
audioRecorder?.stop()
isRecording = false
}
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) {
if !flag {
stopRecording()
}
}
}
Explanation:
AudioRecorder
Class: Manages the audio recording functionality.@Published var isRecording: Bool
: A published property to track the recording state.AVAudioRecorder
: Instance to handle the recording process.recordingSession
: AnAVAudioSession
to configure the audio session for recording.init()
: Initializes the audio session, sets the category to.playAndRecord
, and requests recording permissions.startRecording()
: Starts the audio recording, sets up the audio file, and configures the recording settings.stopRecording()
: Stops the audio recording.audioRecorderDidFinishRecording(_:successfully:)
: Delegate method called when the recording finishes.
Step 3: Creating the SwiftUI View
Now, create the SwiftUI view that uses the AudioRecorder
class to handle recording.
import SwiftUI
struct ContentView: View {
@ObservedObject var audioRecorder = AudioRecorder()
var body: some View {
VStack {
Button(action: {
if audioRecorder.isRecording {
audioRecorder.stopRecording()
} else {
audioRecorder.startRecording()
}
}) {
Text(audioRecorder.isRecording ? "Stop Recording" : "Start Recording")
}
}
.padding()
}
}
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
ContentView()
}
}
Explanation:
@ObservedObject var audioRecorder
: Creates an observed object of theAudioRecorder
class.Button
: A button that toggles the recording state, callingstartRecording()
orstopRecording()
.- The button’s text updates based on the
isRecording
state.
Step 4: Adding Recording List
Display the recorded audio files in the ContentView
.
import SwiftUI
import AVFoundation
import Foundation
class AudioRecorder: NSObject, ObservableObject, AVAudioRecorderDelegate {
@Published var isRecording: Bool = false
@Published var recordings: [Recording] = [] // Track recorded files
private var audioRecorder: AVAudioRecorder?
private var recordingSession: AVAudioSession!
override init() {
super.init()
recordingSession = AVAudioSession.sharedInstance()
do {
try recordingSession.setCategory(.playAndRecord, mode: .default)
try recordingSession.setActive(true)
recordingSession.requestRecordPermission() { allowed in
DispatchQueue.main.async {
if allowed {
// We're good to go!
} else {
// Failed to record!
}
}
}
fetchRecordings() // Load previously recorded files
} catch {
// Failed to record!
}
}
func startRecording() {
let fileName = "recording-\(Date().timeIntervalSince1970).m4a"
let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
let audioURL = documentsDirectory.appendingPathComponent(fileName)
let settings = [
AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 12000,
AVNumberOfChannelsKey: 1,
AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue
]
do {
audioRecorder = try AVAudioRecorder(url: audioURL, settings: settings)
audioRecorder?.delegate = self
audioRecorder?.record()
isRecording = true
} catch {
print("Could not start recording")
isRecording = false
}
}
func stopRecording() {
audioRecorder?.stop()
isRecording = false
fetchRecordings()
}
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) {
if !flag {
stopRecording()
}
}
private func fetchRecordings() {
let fileManager = FileManager.default
let documentsDirectory = fileManager.urls(for: .documentDirectory, in: .userDomainMask)[0]
do {
let paths = try fileManager.contentsOfDirectory(at: documentsDirectory, includingPropertiesForKeys: nil, options: .skipsHiddenFiles)
recordings = paths.filter { $0.pathExtension == "m4a" }.map { url in
let name = url.lastPathComponent
return Recording(fileURL: url, createdAt: fileManager.creationDate(forItemAtPath: url.path) ?? Date(), name: name)
}.sorted(by: { $0.createdAt.compare($1.createdAt) == .orderedAscending })
} catch {
print("Could not fetch recordings: \(error)")
}
}
}
struct Recording: Identifiable {
let id = UUID()
let fileURL: URL
let createdAt: Date
let name: String
}
Update the ContentView
to list the recordings:
import SwiftUI
struct ContentView: View {
@ObservedObject var audioRecorder = AudioRecorder()
var body: some View {
NavigationView {
VStack {
Button(action: {
if audioRecorder.isRecording {
audioRecorder.stopRecording()
} else {
audioRecorder.startRecording()
}
}) {
Text(audioRecorder.isRecording ? "Stop Recording" : "Start Recording")
}
List {
ForEach(audioRecorder.recordings) { recording in
Text(recording.name)
}
}
}
.padding()
.navigationTitle("Audio Recorder")
}
}
}
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
ContentView()
}
}
Explanations:
- Added a
recordings
array inAudioRecorder
to hold recorded files’ metadata. - The
fetchRecordings()
method retrieves the audio files from the documents directory and populates therecordings
array. - Updated the
ContentView
to display a list of recordings usingForEach
.
Step 5: Playing Audio Recordings
To play audio recordings, add a new class named AudioManager
.
import AVFoundation
import Foundation
class AudioManager: NSObject, ObservableObject, AVAudioPlayerDelegate {
@Published var isPlaying: Bool = false
private var audioPlayer: AVAudioPlayer?
func startPlayback(audioURL: URL) {
do {
audioPlayer = try AVAudioPlayer(contentsOf: audioURL)
audioPlayer?.delegate = self
audioPlayer?.play()
isPlaying = true
} catch {
print("Playback failed: \(error.localizedDescription)")
isPlaying = false
}
}
func stopPlayback() {
audioPlayer?.stop()
isPlaying = false
}
func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) {
isPlaying = false
}
}
Update ContentView
to use AudioManager
and play recordings when tapped:
import SwiftUI
struct ContentView: View {
@ObservedObject var audioRecorder = AudioRecorder()
@ObservedObject var audioManager = AudioManager()
var body: some View {
NavigationView {
VStack {
Button(action: {
if audioRecorder.isRecording {
audioRecorder.stopRecording()
} else {
audioRecorder.startRecording()
}
}) {
Text(audioRecorder.isRecording ? "Stop Recording" : "Start Recording")
}
List {
ForEach(audioRecorder.recordings) { recording in
HStack {
Text(recording.name)
Spacer()
Button(action: {
if audioManager.isPlaying {
audioManager.stopPlayback()
} else {
audioManager.startPlayback(audioURL: recording.fileURL)
}
}) {
Image(systemName: audioManager.isPlaying ? "stop.fill" : "play.fill")
}
}
}
}
}
.padding()
.navigationTitle("Audio Recorder")
}
}
}
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
ContentView()
}
}
Step 6: Requesting Permissions
Make sure you’ve added the necessary permission in your Info.plist
file:
- Privacy – Microphone Usage Description
Conclusion
This post demonstrated building a simple audio recording app using SwiftUI and AVFoundation
. We covered setting up the audio session, recording audio, displaying recordings in a list, and playing back audio files. This serves as a basic framework for building more complex audio recording apps.