Skip to content

Commit

Permalink
Merge pull request ysak-y#17 from DaveHineman/patch-1
Browse files Browse the repository at this point in the history
Update AudioCapture.swift
  • Loading branch information
ysak-y authored Dec 18, 2022
2 parents 2637ff4 + 06dfcd5 commit 04d68cd
Showing 1 changed file with 22 additions and 22 deletions.
44 changes: 22 additions & 22 deletions ios/Classes/AudioCapture.swift
Original file line number Diff line number Diff line change
@@ -1,30 +1,32 @@
import Foundation
import AVFoundation

class AudioCapture {
private let audioEngine: AVAudioEngine = AVAudioEngine()

init() {}
public class AudioCapture {
let audioEngine: AVAudioEngine = AVAudioEngine()
private var outputFormat = AVAudioFormat(commonFormat: AVAudioCommonFormat.pcmFormatInt16, sampleRate: 44100, channels: 2, interleaved: true)
init() {
do{
let audioSession: AVAudioSession = AVAudioSession.sharedInstance()
try audioSession.setCategory(AVAudioSession.Category.playAndRecord,
mode: AVAudioSession.Mode.default,
options: [.defaultToSpeaker, .mixWithOthers, .allowBluetoothA2DP, .allowAirPlay, .allowBluetooth])
try audioSession.setActive(true)
}
catch let err {
print(err)
}
}

deinit{
audioEngine.inputNode.removeTap(onBus: 0)
audioEngine.reset()
audioEngine.stop()
}

public func startSession(bufferSize: UInt32, sampleRate: Double, cb: @escaping (_ buffer: Array<Float>) -> Void) throws {
audioEngine.inputNode.removeTap(onBus: 0)
audioEngine.reset()

let audioSession: AVAudioSession = AVAudioSession.sharedInstance()
try audioSession.setCategory(AVAudioSession.Category.playAndRecord,
mode: AVAudioSession.Mode.default,
options: [.allowBluetoothA2DP, .allowAirPlay, .allowBluetooth])
try audioSession.setActive(true)


let inputNode = audioEngine.inputNode

let inputFormat = inputNode.inputFormat(forBus: 0)

try! audioEngine.start()
inputNode.installTap(onBus: 0,
bufferSize: bufferSize,
format: inputFormat) { (buffer: AVAudioPCMBuffer, when: AVAudioTime) in
Expand All @@ -38,7 +40,8 @@ class AudioCapture {
var convertedBuffer: AVAudioPCMBuffer? = buffer
if let converter = AVAudioConverter(from: inputFormat, to: formatToConvert) {
convertedBuffer = AVAudioPCMBuffer(pcmFormat: formatToConvert,
frameCapacity: AVAudioFrameCount( formatToConvert.sampleRate * 0.4))
// frameCapacity: AVAudioFrameCount( formatToConvert.sampleRate * 0.4))
frameCapacity: AVAudioFrameCount(self.outputFormat!.sampleRate) * buffer.frameLength / AVAudioFrameCount(buffer.format.sampleRate))
let inputBlock : AVAudioConverterInputBlock = { (inNumPackets, outStatus) -> AVAudioBuffer? in
outStatus.pointee = AVAudioConverterInputStatus.haveData
let audioBuffer : AVAudioBuffer = buffer
Expand All @@ -54,14 +57,11 @@ class AudioCapture {
cb(Array(UnsafeBufferPointer(start: buffer.floatChannelData![0], count:Int(buffer.frameLength))))
}
}

audioEngine.prepare()
try audioEngine.start()

}

public func stopSession() throws {
let audioSession: AVAudioSession = AVAudioSession.sharedInstance()
audioEngine.inputNode.removeTap(onBus: 0)
audioEngine.stop()
try audioSession.setActive(false, options: .notifyOthersOnDeactivation)
}
}

0 comments on commit 04d68cd

Please sign in to comment.