计时问题:节拍器使用 AVAudioEngine scheduleBuffer 的完成处理程序



我想使用AVAudioEngine构建一个简单的节拍器应用程序,该应用程序具有以下功能:

  • 稳定计时(我知道,我知道,应该使用音频单元,但我仍然在为核心音频的东西/Oj-C包装等而挣扎(
  • 两个不同的声音在";1〃;并且在节拍上";2〃/"3〃/"4〃;酒吧
  • 某种视觉反馈(至少显示当前节拍(,需要与音频同步

所以我创建了两个短点击声音(26ms/1150个样本@16位/44,1千赫/立体声波形文件(,并将它们加载到2个缓冲区中。它们的长度将被设置为代表一个周期。

我的UI设置很简单:一个切换开始/暂停的按钮和一个显示当前节拍的标签(我的"计数器"变量(。

当使用scheduleBuffer的循环属性时,时间是可以的,但由于我需要两种不同的声音和一种在循环点击时同步/更新我的UI的方法,我不能使用这个。我想使用completionHandler来重新启动我的playClickLoop((函数——请参阅下面的代码附件。

不幸的是,在实现这一点时,我并没有真正衡量时间的准确性。现在,当将bpm设置为120时,它的循环速度仅为117,5bpm左右——相当稳定,但仍然太慢。当bpm设置为180时,我的应用程序的播放速度约为172,3bpm。

这是怎么回事?这种延迟是通过使用completionHandler引入的吗?有什么方法可以改善时间安排吗?还是我的整个方法都错了?

提前感谢!Alex

import UIKit
import AVFoundation
class ViewController: UIViewController {

private let engine = AVAudioEngine()
private let player = AVAudioPlayerNode()

private let fileName1 = "sound1.wav"
private let fileName2 = "sound2.wav"
private var file1: AVAudioFile! = nil
private var file2: AVAudioFile! = nil
private var buffer1: AVAudioPCMBuffer! = nil
private var buffer2: AVAudioPCMBuffer! = nil

private let sampleRate: Double = 44100

private var bpm: Double = 180.0
private var periodLengthInSamples: Double { 60.0 / bpm * sampleRate }
private var counter: Int = 0

private enum MetronomeState {case run; case stop}
private var state: MetronomeState = .stop

@IBOutlet weak var label: UILabel!

override func viewDidLoad() {

super.viewDidLoad()

//
// MARK: Loading buffer1
//
let path1 = Bundle.main.path(forResource: fileName1, ofType: nil)!
let url1 = URL(fileURLWithPath: path1)
do {file1 = try AVAudioFile(forReading: url1)
buffer1 = AVAudioPCMBuffer(
pcmFormat: file1.processingFormat,
frameCapacity: AVAudioFrameCount(periodLengthInSamples))
try file1.read(into: buffer1!)
buffer1.frameLength = AVAudioFrameCount(periodLengthInSamples)
} catch { print("Error loading buffer1 (error)") }

//
// MARK: Loading buffer2
//
let path2 = Bundle.main.path(forResource: fileName2, ofType: nil)!
let url2 = URL(fileURLWithPath: path2)
do {file2 = try AVAudioFile(forReading: url2)
buffer2 = AVAudioPCMBuffer(
pcmFormat: file2.processingFormat,
frameCapacity: AVAudioFrameCount(periodLengthInSamples))
try file2.read(into: buffer2!)
buffer2.frameLength = AVAudioFrameCount(periodLengthInSamples)
} catch { print("Error loading buffer2 (error)") }

//
// MARK: Configure + start engine
//
engine.attach(player)
engine.connect(player, to: engine.mainMixerNode, format: file1.processingFormat)
engine.prepare()
do { try engine.start() } catch { print(error) }
}

//
// MARK: Play / Pause toggle action
//
@IBAction func buttonPresed(_ sender: UIButton) {

sender.isSelected = !sender.isSelected

if player.isPlaying {
state = .stop
} else {
state = .run

try! engine.start()
player.play()

playClickLoop()
}
}

private func playClickLoop() {

//
//  MARK: Completion handler
//
let scheduleBufferCompletionHandler = { [unowned self] /*(_: AVAudioPlayerNodeCompletionCallbackType)*/ in

DispatchQueue.main.async {

switch state {

case .run:
self.playClickLoop()

case .stop:
engine.stop()
player.stop()
counter = 0
}
}
}

//
// MARK: Schedule buffer + play
//
if engine.isRunning {

counter += 1; if counter > 4 {counter = 1} // Counting from 1 to 4 only

if counter == 1 {
//
// MARK: Playing sound1 on beat 1
//
player.scheduleBuffer(buffer1,
at: nil,
options: [.interruptsAtLoop],
//completionCallbackType: .dataPlayedBack,
completionHandler: scheduleBufferCompletionHandler)
} else {
//
// MARK: Playing sound2 on beats 2, 3 & 4
//
player.scheduleBuffer(buffer2,
at: nil,
options: [.interruptsAtLoop],
//completionCallbackType: .dataRendered,
completionHandler: scheduleBufferCompletionHandler)
}
//
// MARK: Display current beat on UILabel + to console
//
DispatchQueue.main.async {
self.label.text = String(self.counter)
print(self.counter)
}
}
}
}

正如Phil Freihofner上面所建议的,以下是我自己问题的解决方案:

我学到的最重要的一课是:scheduleBuffer命令提供的completionHandler回调调用得不够早,无法在第一个缓冲区仍在播放时触发另一个缓冲区的重新调度。这将导致(听不见的(声音之间的间隙,并打乱时间。必须已经存在另一个缓冲区";"备用";,即在当前调度之前已经被调度。

考虑到完成回调的时间,使用scheduleBuffer的completionCallbackType参数没有太大变化:当将其设置为.dataRendered或.dataConsumed时,回调已经太迟,无法重新调度另一个缓冲区。使用.dataPlayedback只会让情况变得更糟:-(

因此,为了实现无缝播放(使用正确的时间!(,我只需激活一个定时器,每个周期触发两次。所有奇数计时器事件将重新安排另一个缓冲区。

有时候解决方案很简单,令人尴尬。。。但有时你必须先尝试几乎所有错误的方法才能找到它;-(

我完整的工作解决方案(包括两个声音文件和UI(可以在GitHub:上找到

https://github.com/Alexander-Nagel/Metronome-using-AVAudioEngine

import UIKit
import AVFoundation
private let DEBUGGING_OUTPUT = true
class ViewController: UIViewController{

private var engine = AVAudioEngine()
private var player = AVAudioPlayerNode()
private var mixer = AVAudioMixerNode()

private let fileName1 = "sound1.wav"
private let fileName2 = "sound2.wav"
private var file1: AVAudioFile! = nil
private var file2: AVAudioFile! = nil
private var buffer1: AVAudioPCMBuffer! = nil
private var buffer2: AVAudioPCMBuffer! = nil

private let sampleRate: Double = 44100

private var bpm: Double = 133.33
private var periodLengthInSamples: Double {
60.0 / bpm * sampleRate
}
private var timerEventCounter: Int = 1
private var currentBeat: Int = 1
private var timer: Timer! = nil

private enum MetronomeState {case running; case stopped}
private var state: MetronomeState = .stopped

@IBOutlet weak var beatLabel: UILabel!
@IBOutlet weak var bpmLabel: UILabel!
@IBOutlet weak var playPauseButton: UIButton!

override func viewDidLoad() {

super.viewDidLoad()

bpmLabel.text = "(bpm) BPM"

setupAudio()
}

private func setupAudio() {

//
// MARK: Loading buffer1
//
let path1 = Bundle.main.path(forResource: fileName1, ofType: nil)!
let url1 = URL(fileURLWithPath: path1)
do {file1 = try AVAudioFile(forReading: url1)
buffer1 = AVAudioPCMBuffer(
pcmFormat: file1.processingFormat,
frameCapacity: AVAudioFrameCount(periodLengthInSamples))
try file1.read(into: buffer1!)
buffer1.frameLength = AVAudioFrameCount(periodLengthInSamples)
} catch { print("Error loading buffer1 (error)") }

//
// MARK: Loading buffer2
//
let path2 = Bundle.main.path(forResource: fileName2, ofType: nil)!
let url2 = URL(fileURLWithPath: path2)
do {file2 = try AVAudioFile(forReading: url2)
buffer2 = AVAudioPCMBuffer(
pcmFormat: file2.processingFormat,
frameCapacity: AVAudioFrameCount(periodLengthInSamples))
try file2.read(into: buffer2!)
buffer2.frameLength = AVAudioFrameCount(periodLengthInSamples)
} catch { print("Error loading buffer2 (error)") }

//
// MARK: Configure + start engine
//
engine.attach(player)
engine.connect(player, to: engine.mainMixerNode, format: file1.processingFormat)
engine.prepare()
do { try engine.start() } catch { print(error) }
}

//
// MARK: Play / Pause toggle action
//
@IBAction func buttonPresed(_ sender: UIButton) {

sender.isSelected = !sender.isSelected

if state == .running {

//
// PAUSE: Stop timer and reset counters
//
state = .stopped

timer.invalidate()

timerEventCounter = 1
currentBeat = 1

} else {

//
// START: Pre-load first sound and start timer
//
state = .running

scheduleFirstBuffer()

startTimer()
}
}

private func startTimer() {

if DEBUGGING_OUTPUT {
print("# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #  ")
print()
}

//
// Compute interval for 2 events per period and set up timer
//
let timerIntervallInSamples = 0.5 * self.periodLengthInSamples / sampleRate

timer = Timer.scheduledTimer(withTimeInterval: timerIntervallInSamples, repeats: true) { timer in

//
// Only for debugging: Print counter values at start of timer event
//
// Values at begin of timer event
if DEBUGGING_OUTPUT {
print("timerEvent #(self.timerEventCounter) at (self.bpm) BPM")
print("Entering ttimerEventCounter: (self.timerEventCounter) tcurrentBeat: (self.currentBeat) ")
}

//
// Schedule next buffer at 1st, 3rd, 5th & 7th timerEvent
//
var bufferScheduled: String = "" // only needed for debugging / console output
switch self.timerEventCounter {
case 7:

//
// Schedule main sound
//
self.player.scheduleBuffer(self.buffer1, at:nil, options: [], completionHandler: nil)
bufferScheduled = "buffer1"

case 1, 3, 5:

//
// Schedule subdivision sound
//
self.player.scheduleBuffer(self.buffer2, at:nil, options: [], completionHandler: nil)
bufferScheduled = "buffer2"

default:
bufferScheduled = ""
}

//
// Display current beat & increase currentBeat (1...4) at 2nd, 4th, 6th & 8th timerEvent
//
if self.timerEventCounter % 2 == 0 {
DispatchQueue.main.async {
self.beatLabel.text = String(self.currentBeat)
}
self.currentBeat += 1; if self.currentBeat > 4 {self.currentBeat = 1}
}

//
// Increase timerEventCounter, two events per beat.
//
self.timerEventCounter += 1; if self.timerEventCounter > 8 {self.timerEventCounter = 1}


//
// Only for debugging: Print counter values at end of timer event
//
if DEBUGGING_OUTPUT {
print("Exiting ttimerEventCounter: (self.timerEventCounter) tcurrentBeat: (self.currentBeat) tscheduling: (bufferScheduled)")
print()
}
}
}

private func scheduleFirstBuffer() {

player.stop()

//
// pre-load accented main sound (for beat "1") before trigger starts
//
player.scheduleBuffer(buffer1, at: nil, options: [], completionHandler: nil)
player.play()
beatLabel.text = String(currentBeat)
}
}

非常感谢大家的帮助!这是一个很棒的社区。

Alex

用于获取度量的工具或过程的准确性如何?

我不能确定你的文件是否有正确数量的PCM帧,因为我不是C程序员。加载文件时,wav标头中的数据似乎已包含在内。这让我想知道,在每次播放或循环开始时,当标头信息被重复处理时,回放是否会产生一些延迟。

我很幸运地用Java构建了一个节拍器,它使用了一个连续输出从读取PCM帧中导出的无尽流的计划。计时是通过对PCM帧进行计数,并根据所选节拍器设置的周期和PCM帧中点击的长度,在静音(PCM数据点=0(或点击的PCM数据中进行路由来实现的。

最新更新