无法编写远程I/O渲染回调函数

Car*_*ega 5 objective-c callback core-audio audiounit ios

我正在编写一个iOS应用程序,它从麦克风输入,通过高通滤波器音频单元运行,然后通过扬声器播放.我已经能够通过使用AUGraph API成功地做到这一点.在其中,我放置了两个节点:远程I/O单元和效果音频单元(kAudioUnitType_Effect,kAudioUnitSubType_HighPassFilter),并将io单元的输入元素的输出范围连接到效果单元的输入,效果节点的输出连接到io单元的输出元素输入范围.但是现在我需要根据处理过的音频样本做一些分析,所以我需要直接访问缓冲区.这意味着(如果我错了请纠正我)我不能再用来AUGraphConnectNodeInput在效果节点的输出和io单元的输出元素之间建立连接,并且必须为io单元的输出元素附加渲染回调函数,所以每当扬声器需要新样本时我都可以访问缓冲区.我已经这样做了,但是当我AudioUnitRender在渲染回调中调用该函数时,我得到-50错误.我相信我有一个ASBD在两个音频单元之间不匹配的情况,因为我在渲染回调中没有做任何事情(而AUGraph之前已经处理过它).这是代码:

AudioController.h:

@interface AudioController : NSObject
{
    AUGraph mGraph;
    AudioUnit mEffects;
    AudioUnit ioUnit;
}

@property (readonly, nonatomic) AudioUnit mEffects;
@property (readonly, nonatomic) AudioUnit ioUnit;

-(void)initializeAUGraph;
-(void)startAUGraph;
-(void)stopAUGraph;

@end
Run Code Online (Sandbox Code Playgroud)

AudioController.mm:

@implementation AudioController

…

static OSStatus renderInput(void *inRefCon, AudioUnitRenderActionFlags *ioActionFlags, const AudioTimeStamp *inTimeStamp, UInt32 inBusNumber, UInt32 inNumberFrames, AudioBufferList *ioData)
{
    AudioController *THIS = (__bridge AudioController*)inRefCon;

    AudioBuffer buffer;

    AudioStreamBasicDescription fxOutputASBD;
    UInt32 fxOutputASBDSize = sizeof(fxOutputASBD);
    AudioUnitGetProperty([THIS mEffects], kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 0, &fxOutputASBD, &fxOutputASBDSize);

    buffer.mDataByteSize = inNumberFrames * fxOutputASBD.mBytesPerFrame;
    buffer.mNumberChannels = fxOutputASBD.mChannelsPerFrame;
    buffer.mData = malloc(buffer.mDataByteSize);

    AudioBufferList bufferList;
    bufferList.mNumberBuffers = 1;
    bufferList.mBuffers[0] = buffer;

    //TODO prender ARM y solucionar problema de memoria

    OSStatus result = AudioUnitRender([THIS mEffects], ioActionFlags, inTimeStamp, inBusNumber, inNumberFrames, &bufferList);
    [THIS hasError:result:__FILE__:__LINE__];

    memcpy(ioData, buffer.mData, buffer.mDataByteSize);

    return noErr;
}


- (void)initializeAUGraph
{
    OSStatus result = noErr;

    // create a new AUGraph
    result = NewAUGraph(&mGraph);

    AUNode outputNode;
    AUNode effectsNode;

    AudioComponentDescription effects_desc;
    effects_desc.componentType = kAudioUnitType_Effect;
    effects_desc.componentSubType = kAudioUnitSubType_LowPassFilter;
    effects_desc.componentFlags = 0;
    effects_desc.componentFlagsMask = 0;
    effects_desc.componentManufacturer = kAudioUnitManufacturer_Apple;

    AudioComponentDescription output_desc;
    output_desc.componentType = kAudioUnitType_Output;
    output_desc.componentSubType = kAudioUnitSubType_RemoteIO;
    output_desc.componentFlags = 0;
    output_desc.componentFlagsMask = 0;
    output_desc.componentManufacturer = kAudioUnitManufacturer_Apple;

    // Add nodes to the graph to hold the AudioUnits
    result = AUGraphAddNode(mGraph, &output_desc, &outputNode);
    [self hasError:result:__FILE__:__LINE__];
    result = AUGraphAddNode(mGraph, &effects_desc, &effectsNode );
    [self hasError:result:__FILE__:__LINE__];

    // Connect the effect node's output to the output node's input
    // This is no longer the case, as I need to access the buffer
    // result = AUGraphConnectNodeInput(mGraph, effectsNode, 0, outputNode, 0);
    [self hasError:result:__FILE__:__LINE__];

    // Connect the output node's input scope's output to the effectsNode input
    result = AUGraphConnectNodeInput(mGraph, outputNode, 1, effectsNode, 0);
    [self hasError:result:__FILE__:__LINE__];

    // open the graph AudioUnits
    result = AUGraphOpen(mGraph);
    [self hasError:result:__FILE__:__LINE__];

    // Get a link to the effect AU
    result = AUGraphNodeInfo(mGraph, effectsNode, NULL, &mEffects);
    [self hasError:result:__FILE__:__LINE__];

    // Same for io unit
    result = AUGraphNodeInfo(mGraph, outputNode, NULL, &ioUnit);
    [self hasError:result:__FILE__:__LINE__];

    // Enable input on io unit
    UInt32 flag = 1;
    result = AudioUnitSetProperty(ioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, 1, &flag, sizeof(flag));
    [self hasError:result:__FILE__:__LINE__];

    // Setup render callback struct
    AURenderCallbackStruct renderCallbackStruct;
    renderCallbackStruct.inputProc = &renderInput;
    renderCallbackStruct.inputProcRefCon = (__bridge void*)self;

    // Set a callback for the specified node's specified input
    result = AUGraphSetNodeInputCallback(mGraph, outputNode, 0, &renderCallbackStruct);
    [self hasError:result:__FILE__:__LINE__];

    // Get fx unit's input current stream format...
    AudioStreamBasicDescription fxInputASBD;
    UInt32 sizeOfASBD = sizeof(AudioStreamBasicDescription);

    result = AudioUnitGetProperty(mEffects, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &fxInputASBD, &sizeOfASBD);
    [self hasError:result:__FILE__:__LINE__];

    // ...and set it on the io unit's input scope's output 
    result = AudioUnitSetProperty(ioUnit,
                                  kAudioUnitProperty_StreamFormat,
                                  kAudioUnitScope_Output,
                                  1,
                                  &fxInputASBD,
                                  sizeof(fxInputASBD));
    [self hasError:result:__FILE__:__LINE__];

    // Set fx unit's output sample rate, just in case
    Float64 sampleRate = 44100.0;

    result = AudioUnitSetProperty(mEffects,
                                  kAudioUnitProperty_SampleRate,
                                  kAudioUnitScope_Output,
                                  0,
                                  &sampleRate,
                                  sizeof(sampleRate));
    [self hasError:result:__FILE__:__LINE__];

    // Once everything is set up call initialize to validate connections
    result = AUGraphInitialize(mGraph);
    [self hasError:result:__FILE__:__LINE__];
}

@end
Run Code Online (Sandbox Code Playgroud)

正如我之前所说的那样,我在AudioUnitRender通话时遇到-50错误,而且我几乎找不到任何关于它的文档.

任何帮助都感激不尽.

感谢Tim Bolstad(http://timbolstad.com/2010/03/14/core-audio-getting-started/)提供了一个出色的起点教程.

hot*_*aw2 0

有一些更简单的使用 RemoteIO 来播放音频缓冲区的工作示例。也许首先从其中一个而不是图表开始。