AVAudioPlayerNode: Unterschied zwischen den Versionen

Aus Wikizone
Wechseln zu: Navigation, Suche
 
(2 dazwischenliegende Versionen desselben Benutzers werden nicht angezeigt)
Zeile 93: Zeile 93:
 
     data[i] = sin(Float(i) / 44100 * 2 * Float.pi * 440)
 
     data[i] = sin(Float(i) / 44100 * 2 * Float.pi * 440)
 
}
 
}
 +
</syntaxhighlight>
 +
== Beispiele ==
 +
=== Beispiel Sine Wave ===
 +
<syntaxhighlight lang="swift">
 +
ae = AVAudioEngine()
 +
player = AVAudioPlayerNode()
 +
mixer = ae?.mainMixerNode;
 +
buffer = AVAudioPCMBuffer(pcmFormat: (player?.outputFormat(forBus: 0))!, frameCapacity: 100)
 +
buffer?.frameLength = 100
 +
 +
// generate sine wave
 +
let sr:Float = Float((mixer?.outputFormat(forBus: 0).sampleRate)!)
 +
let n_channels = mixer?.outputFormat(forBus: 0).channelCount
 +
 +
for i in stride(from:0, to: Int((buffer?.frameLength)!), by: Int(n_channels!)) {
 +
    let val = sinf(441.0*Float(i)*2*Float(Double.pi)/sr)
 +
    buffer?.floatChannelData?.pointee[i] = val * 0.5
 +
}
 +
       
 +
// setup audio engine
 +
ae?.attach(player!)
 +
ae?.connect(player!, to: mixer!, format: player?.outputFormat(forBus: 0))
 +
do{
 +
    try ae?.start()
 +
} catch {
 +
 +
}
 +
       
 +
// play player and buffer
 +
player?.play()
 +
player?.scheduleBuffer(buffer!, at: nil, options: .loops, completionHandler: nil)
 +
</syntaxhighlight>
 +
 +
=== Beispiel Metronome ===
 +
In diesem Beispiel werden die AudioBuffer von einem eigenen Triangle Waveform Generator gespeist.
 +
<syntaxhighlight lang="swift">
 +
//
 +
//  MetronomeBrain.swift
 +
//  Beatsaver
 +
//
 +
//  Created by Stephan Schlegel on 05.01.23.
 +
 +
 +
import UIKit
 +
import AudioToolbox
 +
import AVFoundation
 +
 +
struct GlobalConstants {
 +
    static let kBipDurationSeconds: Float32 = 0.020 // length of bip
 +
    static let kTempoChangeResponsivenessSeconds: Float32 = 0.250
 +
}
 +
 +
protocol MetronomeDelegate {
 +
    func metronomeTicking(_ metronome: Metronome, bar: Int32, beat: Int32)
 +
    func metronomeChangedTempo(_ metronome: Metronome)
 +
    func metronomeDidStop(_ metronome: Metronome)
 +
    func metronomeDidStart(_ metronome: Metronome)
 +
    func metronomeShouldStop(_ metronome: Metronome) // called when countIn is reached
 +
}
 +
 +
class Metronome{
 +
   
 +
    var countInMode = false
 +
    var countInBars = 3
 +
    var bar: Int32 = 0 // actual number of bars
 +
    var beat: Int32 = 0 // number of beat in bar
 +
    var signatureCounter: Int32 = 4 // time signature counter/denominator (4/4)
 +
    var signatureDenominator: Int32 = 4 // time signature counter/denominator (4/4)
 +
    var clickCount = 0 // obsolete
 +
    var tempoBPM: Float32 = 120.0
 +
    var interval: TimeInterval { return TimeInterval( (60 / self.tempoBPM) / 2 ) } //  between two beats
 +
 +
   
 +
    // vars for audio engine
 +
    var engine: AVAudioEngine = AVAudioEngine()
 +
    var player: AVAudioPlayerNode = AVAudioPlayerNode()    // owned by engine
 +
    var soundBuffer = [AVAudioPCMBuffer?]()
 +
    var bufferNumber: Int = 0
 +
    var bufferSampleRate: Float64 = 0.0
 +
    var syncQueue: DispatchQueue? = nil
 +
    var beatNumber: Int32 = 0
 +
    //var barNumber: Int32 { beatNumber / signatureDenominator}
 +
    var nextBeatSampleTime: Float64 = 0.0
 +
    var beatsToScheduleAhead: Int32 = 0    // controls responsiveness to tempo changes
 +
    var beatsScheduled: Int32 = 0
 +
    var isPlaying: Bool = false // metronome playing sounds
 +
    var playerStarted: Bool = false // audio engine started
 +
 +
    var delegate: MetronomeDelegate?
 +
   
 +
    /**
 +
    OBOLET - replaced by delegate
 +
    target View is used as a reference of the view that uses the metronome view.
 +
    This way we can manipulate the visuals in the target i.e. animate background
 +
    Todo maybe delegate pattern would be the better way
 +
    */
 +
//    var targetView: UIView?
 +
//    var beatColor = UIColor(hue: 10/255, saturation: 96/255, brightness: 79/255, alpha: 1)
 +
   
 +
    init() {
 +
        // borrowed from apples hello metronome
 +
        // Create a standard audio format deinterleaved float.
 +
        let format = AVAudioFormat(standardFormatWithSampleRate: 44100.0, channels: 2)
 +
        // How many audio frames?
 +
        let bipFrames: UInt32 = UInt32(GlobalConstants.kBipDurationSeconds * Float(format!.sampleRate))
 +
       
 +
        // Create the PCM buffers.
 +
        soundBuffer.append(AVAudioPCMBuffer(pcmFormat: format!, frameCapacity: bipFrames))
 +
        soundBuffer.append(AVAudioPCMBuffer(pcmFormat: format!, frameCapacity: bipFrames))
 +
        // Fill in the number of valid sample frames in the buffers (required).
 +
        soundBuffer[0]?.frameLength = bipFrames
 +
        soundBuffer[1]?.frameLength = bipFrames
 +
        // Generate the metronme bips, first buffer will be A440 and the second buffer Middle C.
 +
        let wg1 = TriangleWaveGenerator(sampleRate: Float(format!.sampleRate)) // A 440
 +
        let wg2 = TriangleWaveGenerator(sampleRate: Float(format!.sampleRate), frequency: 261.6) // Middle C
 +
        wg1.render(soundBuffer[0]!)
 +
        wg2.render(soundBuffer[1]!)
 +
       
 +
        // Connect player -> output, with the format of the buffers we're playing.
 +
        let output: AVAudioOutputNode = engine.outputNode
 +
        engine.attach(player)
 +
        engine.connect(player, to: output, fromBus: 0, toBus: 0, format: format)
 +
        bufferSampleRate = format!.sampleRate
 +
        // Create a serial dispatch queue for synchronizing callbacks.
 +
        syncQueue = DispatchQueue(label: "Metronome")
 +
       
 +
        self.setTempo(self.roundToHalfSteps(value: self.tempoBPM))
 +
    }
 +
   
 +
    deinit {
 +
        self.stop()
 +
        engine.detach(player)
 +
        soundBuffer[0] = nil
 +
        soundBuffer[1] = nil
 +
    }
 +
   
 +
    func scheduleBeats() {
 +
        if (!isPlaying) { return }
 +
        while (beatsScheduled < beatsToScheduleAhead) {
 +
 +
            // Schedule the beat.
 +
            let secondsPerBeat = 60.0 / tempoBPM
 +
            let samplesPerBeat = Float(secondsPerBeat * Float(bufferSampleRate))
 +
            let beatSampleTime: AVAudioFramePosition = AVAudioFramePosition(nextBeatSampleTime)
 +
            let playerBeatTime: AVAudioTime = AVAudioTime(sampleTime: AVAudioFramePosition(beatSampleTime), atRate: bufferSampleRate)
 +
            // This time is relative to the player's start time.
 +
            player.scheduleBuffer(soundBuffer[bufferNumber]!, at: playerBeatTime, options: AVAudioPlayerNodeBufferOptions(rawValue: 0), completionHandler: {
 +
                self.syncQueue!.sync() {
 +
                    self.beatsScheduled -= 1
 +
                    //self.bufferNumber ^= 1 // xor 1 -> meaning switching 1 / 0
 +
                    print(self.beat)
 +
                    if self.beat == self.signatureDenominator {
 +
                        // next buffer will be first beat
 +
                        self.bufferNumber = 0
 +
                    }else {
 +
                        self.bufferNumber = 1
 +
                    }
 +
                    //self.bufferNumber = 0
 +
                    self.scheduleBeats() // schedule next beat
 +
                }
 +
            })
 +
            beatsScheduled += 1
 +
            if (!playerStarted) {
 +
                // We defer the starting of the player so that the first beat will play precisely
 +
                // at player time 0. Having scheduled the first beat, we need the player to be running
 +
                // in order for nodeTimeForPlayerTime to return a non-nil value.
 +
                player.play()
 +
                playerStarted = true
 +
            }
 +
           
 +
            // Schedule the delegate callback (metronomeTicking:bar:beat:) if necessary.
 +
            let callbackBeat = beatNumber
 +
            beatNumber += 1
 +
            if delegate?.metronomeTicking != nil {
 +
                let nodeBeatTime: AVAudioTime = player.nodeTime(forPlayerTime: playerBeatTime)!
 +
                let output: AVAudioIONode = engine.outputNode
 +
                //print(" \(playerBeatTime), \(nodeBeatTime), \(output.presentationLatency)")
 +
                let latencyHostTicks: UInt64 = AVAudioTime.hostTime(forSeconds: output.presentationLatency)
 +
                let dispatchTime = DispatchTime(uptimeNanoseconds: nodeBeatTime.hostTime + latencyHostTicks)
 +
                DispatchQueue.global(qos: .userInitiated).asyncAfter(deadline: dispatchTime) {
 +
                    if (self.isPlaying) {
 +
                        self.bar = (callbackBeat / self.signatureCounter) + 1
 +
                        self.beat = (callbackBeat % self.signatureCounter) + 1
 +
                        self.delegate!.metronomeTicking(self, bar: self.bar, beat: self.beat)
 +
                        if(self.countInMode == true && self.bar > self.countInBars){
 +
                            self.delegate!.metronomeShouldStop(self)
 +
                        }
 +
                    }
 +
                }
 +
            }
 +
            nextBeatSampleTime += Float64(samplesPerBeat)
 +
        }
 +
    }
 +
   
 +
    @discardableResult func start() -> Bool {
 +
        // Hello Metronome Apple Start
 +
        // Start the engine without playing anything yet.
 +
        do {
 +
            try engine.start()
 +
            isPlaying = true
 +
            nextBeatSampleTime = 0
 +
            beatNumber = 0
 +
            bufferNumber = 0
 +
            self.syncQueue!.sync() {
 +
                self.scheduleBeats()
 +
            }
 +
            delegate?.metronomeDidStart(self)
 +
            return true
 +
        } catch {
 +
            print("\(error)")
 +
            return false
 +
        }
 +
    }
 +
   
 +
    func stop() {
 +
        isPlaying = false;
 +
        /* Note that pausing or stopping all AVAudioPlayerNode's connected to an engine does
 +
        NOT pause or stop the engine or the underlying hardware.
 +
        The engine must be explicitly paused or stopped for the hardware to stop.
 +
        */
 +
        player.stop()
 +
        player.reset()
 +
        /* Stop the audio hardware and the engine and release the resources allocated by the prepare method.
 +
        Note that pause will also stop the audio hardware and the flow of audio through the engine, but
 +
        will not deallocate the resources allocated by the prepare method.
 +
        It is recommended that the engine be paused or stopped (as applicable) when not in use,
 +
        to minimize power consumption.
 +
        */
 +
        engine.stop()
 +
        playerStarted = false
 +
        delegate?.metronomeDidStop(self)
 +
    }
 +
   
 +
    /**
 +
    Obsolet Performs a simple system click
 +
    bad timing
 +
    */
 +
    @objc private func click(){
 +
        clickCount += 1
 +
        //print("Metronome::click \(clickCount)")
 +
        AudioServicesPlaySystemSound(1104) // click
 +
    }
 +
   
 +
    func setFrequency(value: Float32){
 +
        //print("Metronome::setFrequency(Float)")
 +
        // TODO REPLACE WITH SETTEMPO
 +
        let frequency = self.roundToHalfSteps(value: value)
 +
        self.setTempo(Float(frequency))
 +
    }
 +
   
 +
    func setTempo(_ tempo: Float32) {
 +
        print("Metronome::setTempo")
 +
        tempoBPM = tempo
 +
        let secondsPerBeat: Float32 = 60.0 / tempoBPM
 +
        beatsToScheduleAhead = Int32(GlobalConstants.kTempoChangeResponsivenessSeconds / secondsPerBeat)
 +
        if (beatsToScheduleAhead < 1) { beatsToScheduleAhead = 1 }
 +
        delegate?.metronomeChangedTempo(self)
 +
    }
 +
   
 +
    func roundToHalfSteps(value: Float) -> Float {
 +
        let steps = value / 0.5
 +
        let roundedSteps = round(steps)
 +
        return roundedSteps * 0.5
 +
    }
 +
 +
}
 +
 +
</syntaxhighlight>
 +
<syntaxhighlight lang="swift">
 +
//
 +
//  TWGenerator.swift
 +
//  BeatSheet
 +
//
 +
//  Created by Stephan Schlegel on 21.01.23.
 +
//  Abstract:
 +
//  Triangle Wave Generator
 +
//
 +
 +
import Foundation
 +
import AVFoundation
 +
 +
class TriangleWaveGenerator : NSObject {
 +
    var mSampleRate: Float = 44100.0
 +
    var mFreqHz: Float = 440.0
 +
    var mAmplitude: Float = 0.25
 +
    var mFrameCount: Float = 0.0
 +
 +
    override init() {
 +
        super.init()
 +
    }
 +
 +
    convenience init(sampleRate: Float) {
 +
        self.init(sampleRate: sampleRate, frequency: 440.0, amplitude: 0.25)
 +
    }
 +
 +
    convenience init(sampleRate: Float, frequency: Float) {
 +
        self.init(sampleRate: sampleRate, frequency: frequency, amplitude: 0.25)
 +
    }
 +
 +
    init(sampleRate: Float, frequency: Float, amplitude: Float) {
 +
        super.init()
 +
        self.mSampleRate = sampleRate
 +
        self.mFreqHz = frequency
 +
        self.mAmplitude = amplitude
 +
    }
 +
   
 +
    func render(_ buffer: AVAudioPCMBuffer) {
 +
        print("Buffer: \(buffer.format.description) \(buffer.description)\n")
 +
        let nFrames = buffer.frameLength
 +
        let nChannels = buffer.format.channelCount
 +
        let isInterleaved = buffer.format.isInterleaved
 +
        let amp = mAmplitude
 +
        let phaseStep = mFreqHz / mSampleRate;
 +
        if (isInterleaved) {
 +
            var ptr = buffer.floatChannelData?[0]
 +
            for frame in 0 ..< nFrames {
 +
                let phase = fmodf(Float(frame) * phaseStep, 1.0)
 +
                let value = (fabsf(2.0 - 4.0 * phase) - 1.0) * amp;
 +
                for _ in 0 ..< nChannels {
 +
                    ptr?.pointee = value;
 +
                    ptr = ptr?.successor()
 +
                }
 +
            }
 +
        } else {
 +
            for ch in 0 ..< nChannels {
 +
                var ptr = buffer.floatChannelData?[Int(ch)]
 +
                for frame in 0 ..< nFrames {
 +
                    let phase = fmodf(Float(frame) * phaseStep, 1.0)
 +
                    let value = (fabsf(2.0 - 4.0 * phase) - 1.0) * amp;
 +
                    ptr?.pointee = value
 +
                    ptr = ptr?.successor()
 +
                }
 +
            }
 +
        }
 +
        mFrameCount = Float(nFrames);
 +
    }
 +
}
 +
 
</syntaxhighlight>
 
</syntaxhighlight>

Aktuelle Version vom 22. Januar 2023, 13:39 Uhr

Links[Bearbeiten]

Swift (Programmiersprache)#Swift - Audio
Swift (Programmiersprache)


Scheduled Audio for timing accurate playback[Bearbeiten]

You can use AVAudioEngine and AVAudioPlayerNode to schedule buffers for timing accurate playback. The AVAudioPlayerNode class has a method called scheduleBuffer(_:at:options:completionHandler:) that allows you to schedule a buffer for playback at a specific time in the future. This can be used to achieve timing accurate playback.

Here is an example of how you can use AVAudioEngine and AVAudioPlayerNode to schedule buffers for timing accurate playback:

let engine = AVAudioEngine()
let player = AVAudioPlayerNode()
engine.attach(player)
engine.connect(player, to: engine.mainMixerNode, format: engine.mainMixerNode.outputFormat(forBus: 0))

// Prepare a buffer with your audio data
let buffer = AVAudioPCMBuffer(pcmFormat: player.outputFormat(forBus: 0), frameCapacity: 1024)!
// ... fill buffer with your audio data

// Schedule the buffer for playback
let sampleTime = AVAudioFramePosition(44100) // the sample time you want the buffer to start playing
player.scheduleBuffer(buffer, at: nil, options: .interrupts, completionHandler: nil)
engine.startAndReturnError(nil)

In this example, it first creates an instance of AVAudioEngine and AVAudioPlayerNode, attaches the player to the engine, connects the player to the engine's main mixer node, then create a buffer with the audio data. Finally, it schedules the buffer for playback at a specific time (in this case, 44100 samples) by calling the scheduleBuffer(_:at:options:completionHandler:) method of the player.

The scheduleBuffer(_:at:options:completionHandler:) method allows you to specify the sample time at which the buffer should start playing. If you pass nil to the atTime parameter, the buffer will start playing as soon as possible.

It's worth noting that you should also set the engine.mainMixerNode.outputLatency to 0 or a low value to reduce delay between the scheduled time and the actual playback.

Please let me know if this helps, or if you have any other questions.

Play in accurate intervals[Bearbeiten]

How can I use this to play a sound in accurate intervals for example 120 times a minute

You can use the AVAudioPlayerNode's scheduleBuffer(_:at:options:completionHandler:) method to schedule multiple buffers for playback in accurate intervals to achieve a metronome like functionality.

Here's an example of how you can use this to play a sound 120 times per minute:

let engine = AVAudioEngine()
let player = AVAudioPlayerNode()
engine.attach(player)
engine.connect(player, to: engine.mainMixerNode, format: engine.mainMixerNode.outputFormat(forBus: 0))

// Prepare a buffer with your audio data
let buffer = AVAudioPCMBuffer(pcmFormat: player.outputFormat(forBus: 0), frameCapacity: 1024)!
// ... fill buffer with your audio data

// schedule the buffer for playback
let samplesPerMinute = 44100 * 60
let samplesPerClick = samplesPerMinute / 120

var nextClickSampleTime = engine.outputNode.lastRenderTime.sampleTime + samplesPerClick

for _ in 0..<120 {
    player.scheduleBuffer(buffer, at: nil, options: .interrupts, completionHandler: nil)
    nextClickSampleTime += samplesPerClick
}

engine.startAndReturnError(nil)

In this example, it first creates an instance of AVAudioEngine and AVAudioPlayerNode, attaches the player to the engine, connects the player to the engine's main mixer node. Then it creates a buffer with the audio data that should be played. After that, it calculates the number of samples per minute and number of samples per click, using the sample rate of the audio format (44100 samples per second) and the desired number of clicks per minute (120). It sets the nextClickSampleTime to the current sample time plus the number of samples per click and then it schedules 120 buffers for playback at the nextClickSampleTime with the scheduleBuffer(_:at:options:completionHandler:) method.

The scheduleBuffer(_:at:options:completionHandler:) method allows you to specify the sample time at which the buffer should start playing, so by scheduling the buffer at the nextClickSampleTime variable, you are scheduling the playback of the buffer at accurate intervals.

How do I fill the buffer with audio data?[Bearbeiten]

To fill the buffer with audio data, you can use the AVAudioFile class to read an audio file from disk, and then use the AVAudioPCMBuffer's append(_:) method to copy the audio data from the file into the buffer.

Here's an example of how you can fill an AVAudioPCMBuffer with audio data from an audio file:

let fileURL = Bundle.main.url(forResource: "audioFile", withExtension: "caf")!
let file = try AVAudioFile(forReading: fileURL)

let buffer = AVAudioPCMBuffer(pcmFormat: file.processingFormat, frameCapacity: AVAudioFrameCount(file.length))!
try file.read(into: buffer)

In this example, it first constructs a file url of the audio file, then creates an instance of AVAudioFile using this url, then it creates an instance of AVAudioPCMBuffer with the same format as the file and with the enough frame capacity to hold the audio data, and then it reads the file's data into the buffer using the read(into:) method of the file instance.

You can also use the AVAudioPCMBuffer's floatChannelData property to access the buffer's audio data as a UnsafeMutableBufferPointer<Float>. This can be useful if you want to generate audio data programmatically, for example, to create a sine wave.

let buffer = AVAudioPCMBuffer(pcmFormat: format, frameCapacity: 44100)!
let data = buffer.floatChannelData![0]

// fill the buffer with a sine wave
for i in 0..<Int(buffer.frameCapacity) {
    data[i] = sin(Float(i) / 44100 * 2 * Float.pi * 440)
}

Beispiele[Bearbeiten]

Beispiel Sine Wave[Bearbeiten]

ae = AVAudioEngine()
player = AVAudioPlayerNode()
mixer = ae?.mainMixerNode;
buffer = AVAudioPCMBuffer(pcmFormat: (player?.outputFormat(forBus: 0))!, frameCapacity: 100)
buffer?.frameLength = 100

// generate sine wave
let sr:Float = Float((mixer?.outputFormat(forBus: 0).sampleRate)!)
let n_channels = mixer?.outputFormat(forBus: 0).channelCount

for i in stride(from:0, to: Int((buffer?.frameLength)!), by: Int(n_channels!)) {
    let val = sinf(441.0*Float(i)*2*Float(Double.pi)/sr)
    buffer?.floatChannelData?.pointee[i] = val * 0.5
}
        
// setup audio engine
ae?.attach(player!)
ae?.connect(player!, to: mixer!, format: player?.outputFormat(forBus: 0))
do{
    try ae?.start()
} catch {

}
        
// play player and buffer
player?.play()
player?.scheduleBuffer(buffer!, at: nil, options: .loops, completionHandler: nil)

Beispiel Metronome[Bearbeiten]

In diesem Beispiel werden die AudioBuffer von einem eigenen Triangle Waveform Generator gespeist.

//
//  MetronomeBrain.swift
//  Beatsaver
//
//  Created by Stephan Schlegel on 05.01.23.


import UIKit
import AudioToolbox
import AVFoundation

struct GlobalConstants {
    static let kBipDurationSeconds: Float32 = 0.020 // length of bip
    static let kTempoChangeResponsivenessSeconds: Float32 = 0.250
}

protocol MetronomeDelegate {
    func metronomeTicking(_ metronome: Metronome, bar: Int32, beat: Int32)
    func metronomeChangedTempo(_ metronome: Metronome)
    func metronomeDidStop(_ metronome: Metronome)
    func metronomeDidStart(_ metronome: Metronome)
    func metronomeShouldStop(_ metronome: Metronome) // called when countIn is reached
}

class Metronome{
    
    var countInMode = false
    var countInBars = 3
    var bar: Int32 = 0 // actual number of bars
    var beat: Int32 = 0 // number of beat in bar
    var signatureCounter: Int32 = 4 // time signature counter/denominator (4/4)
    var signatureDenominator: Int32 = 4 // time signature counter/denominator (4/4)
    var clickCount = 0 // obsolete
    var tempoBPM: Float32 = 120.0
    var interval: TimeInterval { return TimeInterval( (60 / self.tempoBPM) / 2 ) } //  between two beats

    
    // vars for audio engine
    var engine: AVAudioEngine = AVAudioEngine()
    var player: AVAudioPlayerNode = AVAudioPlayerNode()    // owned by engine
    var soundBuffer = [AVAudioPCMBuffer?]()
    var bufferNumber: Int = 0
    var bufferSampleRate: Float64 = 0.0
    var syncQueue: DispatchQueue? = nil
    var beatNumber: Int32 = 0
    //var barNumber: Int32 { beatNumber / signatureDenominator}
    var nextBeatSampleTime: Float64 = 0.0
    var beatsToScheduleAhead: Int32 = 0     // controls responsiveness to tempo changes
    var beatsScheduled: Int32 = 0
    var isPlaying: Bool = false // metronome playing sounds
    var playerStarted: Bool = false // audio engine started

    var delegate: MetronomeDelegate?
    
    /**
     OBOLET - replaced by delegate
     target View is used as a reference of the view that uses the metronome view.
     This way we can manipulate the visuals in the target i.e. animate background
     Todo maybe delegate pattern would be the better way
     */
//    var targetView: UIView?
//    var beatColor = UIColor(hue: 10/255, saturation: 96/255, brightness: 79/255, alpha: 1)
    
    init() {
        // borrowed from apples hello metronome
        // Create a standard audio format deinterleaved float.
        let format = AVAudioFormat(standardFormatWithSampleRate: 44100.0, channels: 2)
        // How many audio frames?
        let bipFrames: UInt32 = UInt32(GlobalConstants.kBipDurationSeconds * Float(format!.sampleRate))
        
        // Create the PCM buffers.
        soundBuffer.append(AVAudioPCMBuffer(pcmFormat: format!, frameCapacity: bipFrames))
        soundBuffer.append(AVAudioPCMBuffer(pcmFormat: format!, frameCapacity: bipFrames))
        // Fill in the number of valid sample frames in the buffers (required).
        soundBuffer[0]?.frameLength = bipFrames
        soundBuffer[1]?.frameLength = bipFrames
        // Generate the metronme bips, first buffer will be A440 and the second buffer Middle C.
        let wg1 = TriangleWaveGenerator(sampleRate: Float(format!.sampleRate)) // A 440
        let wg2 = TriangleWaveGenerator(sampleRate: Float(format!.sampleRate), frequency: 261.6) // Middle C
        wg1.render(soundBuffer[0]!)
        wg2.render(soundBuffer[1]!)
        
        // Connect player -> output, with the format of the buffers we're playing.
        let output: AVAudioOutputNode = engine.outputNode
        engine.attach(player)
        engine.connect(player, to: output, fromBus: 0, toBus: 0, format: format)
        bufferSampleRate = format!.sampleRate
        // Create a serial dispatch queue for synchronizing callbacks.
        syncQueue = DispatchQueue(label: "Metronome")
        
        self.setTempo(self.roundToHalfSteps(value: self.tempoBPM))
    }
    
    deinit {
        self.stop()
        engine.detach(player)
        soundBuffer[0] = nil
        soundBuffer[1] = nil
    }
    
    func scheduleBeats() {
        if (!isPlaying) { return }
        while (beatsScheduled < beatsToScheduleAhead) {

            // Schedule the beat.
            let secondsPerBeat = 60.0 / tempoBPM
            let samplesPerBeat = Float(secondsPerBeat * Float(bufferSampleRate))
            let beatSampleTime: AVAudioFramePosition = AVAudioFramePosition(nextBeatSampleTime)
            let playerBeatTime: AVAudioTime = AVAudioTime(sampleTime: AVAudioFramePosition(beatSampleTime), atRate: bufferSampleRate)
            // This time is relative to the player's start time.
            player.scheduleBuffer(soundBuffer[bufferNumber]!, at: playerBeatTime, options: AVAudioPlayerNodeBufferOptions(rawValue: 0), completionHandler: {
                self.syncQueue!.sync() {
                    self.beatsScheduled -= 1
                    //self.bufferNumber ^= 1 // xor 1 -> meaning switching 1 / 0
                    print(self.beat)
                    if self.beat == self.signatureDenominator {
                        // next buffer will be first beat
                        self.bufferNumber = 0
                    }else {
                        self.bufferNumber = 1
                    }
                    //self.bufferNumber = 0
                    self.scheduleBeats() // schedule next beat
                }
            })
            beatsScheduled += 1
            if (!playerStarted) {
                // We defer the starting of the player so that the first beat will play precisely
                // at player time 0. Having scheduled the first beat, we need the player to be running
                // in order for nodeTimeForPlayerTime to return a non-nil value.
                player.play()
                playerStarted = true
            }
            
            // Schedule the delegate callback (metronomeTicking:bar:beat:) if necessary.
            let callbackBeat = beatNumber
            beatNumber += 1
            if delegate?.metronomeTicking != nil {
                let nodeBeatTime: AVAudioTime = player.nodeTime(forPlayerTime: playerBeatTime)!
                let output: AVAudioIONode = engine.outputNode
                //print(" \(playerBeatTime), \(nodeBeatTime), \(output.presentationLatency)")
                let latencyHostTicks: UInt64 = AVAudioTime.hostTime(forSeconds: output.presentationLatency)
                let dispatchTime = DispatchTime(uptimeNanoseconds: nodeBeatTime.hostTime + latencyHostTicks)
                DispatchQueue.global(qos: .userInitiated).asyncAfter(deadline: dispatchTime) {
                    if (self.isPlaying) {
                        self.bar = (callbackBeat / self.signatureCounter) + 1
                        self.beat = (callbackBeat % self.signatureCounter) + 1
                        self.delegate!.metronomeTicking(self, bar: self.bar, beat: self.beat)
                        if(self.countInMode == true && self.bar > self.countInBars){
                            self.delegate!.metronomeShouldStop(self)
                        }
                    }
                }
            }
            nextBeatSampleTime += Float64(samplesPerBeat)
        }
    }
    
    @discardableResult func start() -> Bool {
        // Hello Metronome Apple Start
        // Start the engine without playing anything yet.
        do {
            try engine.start()
            isPlaying = true
            nextBeatSampleTime = 0
            beatNumber = 0
            bufferNumber = 0
            self.syncQueue!.sync() {
                self.scheduleBeats()
            }
            delegate?.metronomeDidStart(self)
            return true
        } catch {
            print("\(error)")
            return false
        }
    }
    
    func stop() {
        isPlaying = false;
        /* Note that pausing or stopping all AVAudioPlayerNode's connected to an engine does
         NOT pause or stop the engine or the underlying hardware.
         The engine must be explicitly paused or stopped for the hardware to stop.
        */
        player.stop()
        player.reset()
        /* Stop the audio hardware and the engine and release the resources allocated by the prepare method.
         Note that pause will also stop the audio hardware and the flow of audio through the engine, but
         will not deallocate the resources allocated by the prepare method.
         It is recommended that the engine be paused or stopped (as applicable) when not in use,
         to minimize power consumption.
        */
        engine.stop()
        playerStarted = false
        delegate?.metronomeDidStop(self)
    }
    
    /**
    Obsolet Performs a simple system click
    bad timing
    */
    @objc private func click(){
        clickCount += 1
        //print("Metronome::click \(clickCount)")
        AudioServicesPlaySystemSound(1104) // click
    }
    
    func setFrequency(value: Float32){
        //print("Metronome::setFrequency(Float)")
        // TODO REPLACE WITH SETTEMPO
        let frequency = self.roundToHalfSteps(value: value)
        self.setTempo(Float(frequency))
    }
    
    func setTempo(_ tempo: Float32) {
        print("Metronome::setTempo")
        tempoBPM = tempo
        let secondsPerBeat: Float32 = 60.0 / tempoBPM
        beatsToScheduleAhead = Int32(GlobalConstants.kTempoChangeResponsivenessSeconds / secondsPerBeat)
        if (beatsToScheduleAhead < 1) { beatsToScheduleAhead = 1 }
        delegate?.metronomeChangedTempo(self)
    }
    
    func roundToHalfSteps(value: Float) -> Float {
        let steps = value / 0.5
        let roundedSteps = round(steps)
        return roundedSteps * 0.5
    }

}
//
//  TWGenerator.swift
//  BeatSheet
//
//  Created by Stephan Schlegel on 21.01.23.
//  Abstract:
//  Triangle Wave Generator
//

import Foundation
import AVFoundation

class TriangleWaveGenerator : NSObject {
    var mSampleRate: Float = 44100.0
    var mFreqHz: Float = 440.0
    var mAmplitude: Float = 0.25
    var mFrameCount: Float = 0.0

    override init() {
        super.init()
    }

    convenience init(sampleRate: Float) {
        self.init(sampleRate: sampleRate, frequency: 440.0, amplitude: 0.25)
    }

    convenience init(sampleRate: Float, frequency: Float) {
        self.init(sampleRate: sampleRate, frequency: frequency, amplitude: 0.25)
    }

    init(sampleRate: Float, frequency: Float, amplitude: Float) {
        super.init()
        self.mSampleRate = sampleRate
        self.mFreqHz = frequency
        self.mAmplitude = amplitude
    }
    
    func render(_ buffer: AVAudioPCMBuffer) {
        print("Buffer: \(buffer.format.description) \(buffer.description)\n")
        let nFrames = buffer.frameLength
        let nChannels = buffer.format.channelCount
        let isInterleaved = buffer.format.isInterleaved
        let amp = mAmplitude
        let phaseStep = mFreqHz / mSampleRate;
        if (isInterleaved) {
            var ptr = buffer.floatChannelData?[0]
            for frame in 0 ..< nFrames {
                let phase = fmodf(Float(frame) * phaseStep, 1.0)
                let value = (fabsf(2.0 - 4.0 * phase) - 1.0) * amp;
                for _ in 0 ..< nChannels {
                    ptr?.pointee = value;
                    ptr = ptr?.successor()
                }
            }
        } else {
            for ch in 0 ..< nChannels {
                var ptr = buffer.floatChannelData?[Int(ch)]
                for frame in 0 ..< nFrames {
                    let phase = fmodf(Float(frame) * phaseStep, 1.0)
                    let value = (fabsf(2.0 - 4.0 * phase) - 1.0) * amp;
                    ptr?.pointee = value
                    ptr = ptr?.successor()
                }
            }
        }
        mFrameCount = Float(nFrames);
    }
}