Cas*_*les 7 avaudioplayer ios avaudiosession swift avaudioengine
我最近发布了这个关于在 iOS 上使用多路由的问题,我以为我解决了它,但是我发现它不太有效:AVAudioEngine Multichannel mapping
我遇到的问题是多路由仅适用于前两个输出通道。我正在尝试使其适用于 4 通道音频接口。
我已经设法使用 AVAudioPlayer 将音频路由到 USB 接口的每个输出:
var avplayer = AVAudioPlayer()
@IBAction func avAudioPlayerPlay(_ sender: Any)
{
let audioSession = AVAudioSession.sharedInstance()
let route = audioSession.currentRoute
// set the session category
do
{
//try audioSession.setCategory(.multiRoute)
try audioSession.setCategory(.multiRoute, options: .mixWithOthers)
}
catch
{
print("unable to set category", error)
return
}
// activate the audio session - turns on multiroute I believe
do
{
try audioSession.setActive(true)
//try audioSession.setActive(true, options: .notifyOthersOnDeactivation)
}
catch
{
print("unable to set active", error)
return
}
//audio interface + headphone jack
let outputs:[AVAudioSessionChannelDescription] = [
route.outputs[0].channels![2], // 3rd channel on Audio Interface
route.outputs[1].channels![1] // Right Channel of Headphones
]
guard let filePath: String = Bundle.main.path(forResource: "audio", ofType: "m4a") else { return }
let fileURL: URL = URL(fileURLWithPath: filePath)
do
{
avplayer = try AVAudioPlayer(contentsOf: fileURL)
}
catch
{
print("play error", error)
return
}
avplayer.channelAssignments = outputs
let result = avplayer.play()
print(result)
}
Run Code Online (Sandbox Code Playgroud)
但我无法使用 AVAudioEngine 让它工作:
private func getOutputChannelMapIndices(_ names:[String?]) -> [Int]
{
let session = AVAudioSession.sharedInstance()
let route = session.currentRoute
let outputPorts = route.outputs
var channelMapIndices:[Int] = []
for name in names
{
var chIndex = 0
for outputPort in outputPorts
{
guard let channels = outputPort.channels else
{
continue
}
for channel in channels
{
print(channel.channelName)
if channel.channelName == name
{
if names.count > channelMapIndices.count
{
channelMapIndices.append(chIndex)
}
}
chIndex += 1
}
}
}
return channelMapIndices
}
@IBAction func nodesPlay(_ sender: Any)
{
let channelNames = [
"UMC204HD 192k 3",
"Headphones Left",
"Headphones Right",
nil
]
let audioSession = AVAudioSession.sharedInstance()
// set the session category
do
{
//try audioSession.setCategory(.multiRoute)
try audioSession.setCategory(.multiRoute, options: .mixWithOthers)
}
catch
{
print("unable to set category", error)
return
}
// activate the audio session - turns on multiroute I believe
do
{
try audioSession.setActive(true)
//try audioSession.setActive(true, options: .notifyOthersOnDeactivation)
}
catch
{
print("unable to set active", error)
return
}
let channelMapIndices = getOutputChannelMapIndices(channelNames)
print("channelMapIndices: ", channelMapIndices)
engine = AVAudioEngine()
output = engine.outputNode
mixer = engine.mainMixerNode
player = AVAudioPlayerNode()
engine.attach(player)
guard let filePath: String = Bundle.main.path(forResource: "audio", ofType: "m4a") else { return }
let fileURL: URL = URL(fileURLWithPath: filePath)
let file = try! AVAudioFile(forReading: fileURL)
let outputNumChannels = output.outputFormat(forBus: 0).channelCount
print("outputNumChannels:" , outputNumChannels)
var outputChannelMap:[Int] = Array(repeating: -1, count: Int(outputNumChannels))
let numberOfSourceChannels = file.processingFormat.channelCount
print("numberOfSourceChannels: ", numberOfSourceChannels)
var sourceChIndex = 0
for chIndex in channelMapIndices
{
if chIndex < outputNumChannels && sourceChIndex < numberOfSourceChannels
{
outputChannelMap[chIndex] = sourceChIndex
sourceChIndex += 1
}
}
print("outputChannelMap: ", outputChannelMap)
if let au = output.audioUnit
{
let propSize = UInt32(MemoryLayout.size(ofValue: outputChannelMap))
print("propSize:", propSize)
let result = AudioUnitSetProperty(au, kAudioOutputUnitProperty_ChannelMap, kAudioUnitScope_Global, 0, &outputChannelMap, propSize)
print("result: ", result)
}
let channelLayout = AVAudioChannelLayout(layoutTag: kAudioChannelLayoutTag_DiscreteInOrder | UInt32(numberOfSourceChannels))
let format = AVAudioFormat(streamDescription: file.processingFormat.streamDescription, channelLayout: channelLayout)
engine.connect(player, to: mixer, format:format)
engine.connect(mixer, to: output, format:format)
player.scheduleFile(file, at: nil, completionHandler: nil)
do
{
try engine.start()
}
catch
{
print("can't start", error)
return
}
player.play()
}
Run Code Online (Sandbox Code Playgroud)
如果有人能解释为什么我似乎无法播放任何音频来输出 3 或 4,我将不胜感激。
注意,很多代码是从这里翻译过来的:https : //forums.developer.apple.com/thread/15416
我相信问题出在线路上
let propSize = UInt32(MemoryLayout.size(ofValue: outputChannelMap))
Run Code Online (Sandbox Code Playgroud)
这给出了数组对象的大小,本质上是指针的大小,而不是数组中对象的大小。请参阅Apple 文档中的讨论。
属性的大小应该是数组中包含的通道数乘以 的大小Int32,因为AudioUnitSetProperty是 C API 并且这将是相应 C 数组的大小。
let propSize = UInt32(MemoryLayout<Int32>.stride * outputChannelMap.count)
Run Code Online (Sandbox Code Playgroud)
您还应该声明outputChannelMap为数组,Int32因为这是 所期望的类型kAudioOutputUnitProperty_ChannelMap:
var outputChannelMap:[Int32] = Array(repeating: -1, count: Int(outputNumChannels))
Run Code Online (Sandbox Code Playgroud)
| 归档时间: |
|
| 查看次数: |
305 次 |
| 最近记录: |