2012-12-01 3 views
6

변경 사항없이 마이크에서 수신 한 사운드를 스피커로 전달하는 iOS 애플리케이션을 작성하려고합니다. 애플의 문서와 가이드를 읽었습니다. 이 guide에서 첫 번째 패턴을 선택했습니다. 그러나 아무 일도 일어나지 않습니다. 보시다시피 나는 AUAudioGraph (주석 처리 된)를 사용하려고 시도했습니다. 동일한 결과 (이 간단한 예제에서는 필요합니까?).iOS AudioUnits 통과

콜백이 사용되는 인터넷에서 몇 가지 예를 보았지만 사용하고 싶지 않습니다. 가능한가?

제안 사항? 감사합니다.

warrenm으로

#import "AudioController.h" 
#import <AudioToolbox/AudioToolbox.h> 
#import <AVFoundation/AVFoundation.h> 
#import <AudioToolbox/AudioServices.h> 
#define kInputBus 1 
#define kOutputBus 0 

@interface AudioController() 
{ 
    AudioComponentDescription desc; 
    AudioComponent component; 
    AudioUnit unit; 
    AudioStreamBasicDescription audioFormat; 
    double rate; 
    //AUGraph graph; 
} 


@end 

@implementation AudioController 

- (void) setUp { 
    AVAudioSession *sess = [AVAudioSession sharedInstance]; 
    NSError *error = nil; 
    rate = 44100.0; 
    [sess setPreferredSampleRate:rate error:&error]; 
    [sess setCategory:AVAudioSessionCategoryPlayAndRecord error:&error]; 
    [sess setActive:YES error:&error]; 
    rate = [sess sampleRate]; 
    if (error) { 
     NSLog(@"%@", error); 
    } 

    NSLog(@"Init..."); 
    [self createUnitDesc]; 
    [self getComponent]; 
    [self getAudioUnit]; 
    [self enableIORec]; 
    [self enableIOPb]; 
    [self createFormat]; 
    [self applyFormat]; 
    OSStatus err = AudioUnitInitialize(unit); 
    if (noErr != err) { 
     [self showStatus:err]; 
    } 
    /*NewAUGraph(&graph); 
    AUNode node; 
    AUGraphAddNode(graph, &desc, &node); 
AUGraphInitialize(graph); 
AUGraphOpen(graph);*/ 
} 

- (void) createUnitDesc { 
    desc.componentType = kAudioUnitType_Output; 
    desc.componentSubType = kAudioUnitSubType_RemoteIO; 
    desc.componentFlags = 0; 
    desc.componentFlagsMask = 0; 
    desc.componentManufacturer = kAudioUnitManufacturer_Apple; 

} 

- (void) getComponent { 
    component = AudioComponentFindNext(NULL, &desc); 
} 

- (void) getAudioUnit { 
    OSStatus res = AudioComponentInstanceNew(component, &unit); 
    if (noErr != res) { 
     [self showStatus:res]; 
    } 
} 

- (void) enableIORec { 
    UInt32 flag = 1; 
    OSStatus err = AudioUnitSetProperty(unit, 
            kAudioOutputUnitProperty_EnableIO, 
            kAudioUnitScope_Input, 
            kInputBus, 
            &flag, 
            sizeof(flag)); 
    if (noErr != err) { 
     [self showStatus:err]; 
    } 
} 

- (void) enableIOPb { 
    UInt32 flag = 1; 
    OSStatus err = AudioUnitSetProperty(unit, 
            kAudioOutputUnitProperty_EnableIO, 
            kAudioUnitScope_Output, 
            kOutputBus, 
            &flag, 
            sizeof(flag)); 
    if (noErr != err) { 
     [self showStatus:err]; 
    } 
} 

- (void) createFormat { 
    // Describe format 
    audioFormat.mSampleRate   = rate;//44100.00; 
    audioFormat.mFormatID   = kAudioFormatLinearPCM; 
    audioFormat.mFormatFlags  = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked; 
    audioFormat.mFramesPerPacket = 1; 
    audioFormat.mChannelsPerFrame = 1; 
    audioFormat.mBitsPerChannel  = 16; 
    audioFormat.mBytesPerPacket  = 2; 
    audioFormat.mBytesPerFrame  = 2; 
} 

- (void) applyFormat { 
    OSStatus err = AudioUnitSetProperty(unit, 
            kAudioUnitProperty_StreamFormat, 
            kAudioUnitScope_Output, 
            kInputBus, 
            &audioFormat, 
            sizeof(audioFormat)); 
    if (noErr != err) { 
     [self showStatus:err]; 
    } 
} 

- (void) start { 
    NSLog(@"starting"); 
    OSStatus err = AudioOutputUnitStart(unit); 
    //AUGraphStart(graph); 
    if (noErr != err) { 
     [self showStatus:err]; 
    } 
} 

- (void) end { 
    NSLog(@"ending"); 
    OSStatus err = AudioOutputUnitStop(unit); 
    //AUGraphStop(graph); 
    if (noErr != err) { 
     [self showStatus:err]; 
    } 
} 

- (void) showStatus:(OSStatus) st{ 
    NSString *text = nil; 
    switch (st) { 
     case kAudioUnitErr_CannotDoInCurrentContext: text = @"kAudioUnitErr_CannotDoInCurrentContext"; break; 
     case kAudioUnitErr_FailedInitialization: text = @"kAudioUnitErr_FailedInitialization"; break; 
     case kAudioUnitErr_FileNotSpecified: text = @"kAudioUnitErr_FileNotSpecified"; break; 
     case kAudioUnitErr_FormatNotSupported: text = @"kAudioUnitErr_FormatNotSupported"; break; 
     case kAudioUnitErr_IllegalInstrument: text = @"kAudioUnitErr_IllegalInstrument"; break; 
     case kAudioUnitErr_Initialized: text = @"kAudioUnitErr_Initialized"; break; 
     case kAudioUnitErr_InstrumentTypeNotFound: text = @"kAudioUnitErr_InstrumentTypeNotFound"; break; 
     case kAudioUnitErr_InvalidElement: text = @"kAudioUnitErr_InvalidElement"; break; 
     case kAudioUnitErr_InvalidFile: text = @"kAudioUnitErr_InvalidFile"; break; 
     case kAudioUnitErr_InvalidOfflineRender: text = @"kAudioUnitErr_InvalidOfflineRender"; break; 
     case kAudioUnitErr_InvalidParameter: text = @"kAudioUnitErr_InvalidParameter"; break; 
     case kAudioUnitErr_InvalidProperty: text = @"kAudioUnitErr_InvalidProperty"; break; 
     case kAudioUnitErr_InvalidPropertyValue: text = @"kAudioUnitErr_InvalidPropertyValue"; break; 
     case kAudioUnitErr_InvalidScope: text = @"kAudioUnitErr_InvalidScope"; break; 
     case kAudioUnitErr_NoConnection: text = @"kAudioUnitErr_NoConnection"; break; 
     case kAudioUnitErr_PropertyNotInUse: text = @"kAudioUnitErr_PropertyNotInUse"; break; 
     case kAudioUnitErr_PropertyNotWritable: text = @"kAudioUnitErr_PropertyNotWritable"; break; 
     case kAudioUnitErr_TooManyFramesToProcess: text = @"kAudioUnitErr_TooManyFramesToProcess"; break; 
     case kAudioUnitErr_Unauthorized: text = @"kAudioUnitErr_Unauthorized"; break; 
     case kAudioUnitErr_Uninitialized: text = @"kAudioUnitErr_Uninitialized"; break; 
     case kAudioUnitErr_UnknownFileType: text = @"kAudioUnitErr_UnknownFileType"; break; 
     default: text = @"unknown error"; 
    } 
    NSLog(@"TRANSLATED_ERROR = %li = %@", st, text); 
} 

- (void) dealloc { 
    AudioUnitUninitialize(unit); 

    [super dealloc]; 
} 

@end 
+2

당신은 모든 것을 거의 올바르게하고있는 것처럼 보입니다. 그러나 실제로 입력 범위의 출력 요소를 출력 범위의 입력 요소에 연결하는 위치는 표시되지 않습니다. RemoteIO 유닛은 하드웨어 입력과 출력을 모두 처리한다는 점에서 특별하지만, 유닛이 인스턴스화 될 때 암묵적으로 연결되지는 않습니다. – warrenm

+0

흠, 어떻게 할 수 있겠습니까? AUAudioGraph에 대해 이야기하고 있습니까? 또는 요소 사이에 연결을 만드는 다른 방법이 있습니까? 고맙습니다. –

+0

감사합니다. AudioUnitConnection conn을 사용합니다. conn.destInputNumber = 0; conn.sourceAudioUnit = unit; conn.sourceOutputNumber = 1; err = AudioUnitSetProperty (단위, kAudioUnitProperty_MakeConnection, kAudioUnitScope_Input, 0, & conn, sizeof (conn)); if (noErr! = err) { [self showStatus : err]; } –

답변

10

원격 IO 요소가 도움이 사이의 연결을 설정했다 실제 코드입니다. 는 그래서 모두 초기화 후 배치 코드 완료 :

.H 파일

#import <Foundation/Foundation.h> 

@interface AudioController : NSObject 

- (void)setUp; 
- (void)start; 
- (void)end; 
@end 
:

AudioUnitConnection conn; 
conn.destInputNumber = kOutputBus; 
conn.sourceAudioUnit = unit; 
conn.sourceOutputNumber = kInputBus; 
err = AudioUnitSetProperty(unit, kAudioUnitProperty_MakeConnection, kAudioUnitScope_Input, kOutputBus, &conn, sizeof(conn)); 
if (noErr != err) { [self showStatus:err]; } 

UPDATE 가 쉽게 내가 여기에 전체 코드를 게시 할 예정입니다 다른 사람이 솔루션을 사용 할 수 있도록

.m 파일

#import "AudioController.h" 
#import <AudioToolbox/AudioToolbox.h> 
#import <AVFoundation/AVFoundation.h> 
#import <AudioToolbox/AudioServices.h> 
#define kInputBus 1 
#define kOutputBus 0 

@interface AudioController() 
{ 
    AudioComponentDescription desc; 
    AudioComponent component; 
    AudioUnit unit; 
    AudioStreamBasicDescription audioFormat; 
    double rate; 
} 
@end 

@implementation AudioController 

- (void)setUp 
{ 
    AVAudioSession *sess = [AVAudioSession sharedInstance]; 
    NSError *error = nil; 
    rate = 44100.0; 
    [sess setPreferredSampleRate:rate error:&error]; 
    [sess setCategory:AVAudioSessionCategoryPlayAndRecord error:&error]; 
    [sess setActive:YES error:&error]; 
    rate = [sess sampleRate]; 
    if (error) { 
     NSLog(@"%@", error); 
    } 

    NSLog(@"Initing"); 
    [self createUnitDesc]; 
    [self getComponent]; 
    [self getAudioUnit]; 
    [self enableIORec]; 
    [self enableIOPb]; 
    [self createFormat]; 
    [self applyFormat]; 
    OSStatus err = AudioUnitInitialize(unit); 
    if (noErr != err) { 
     [self showStatus:err]; 
    } 

    AudioUnitConnection conn; 
    conn.destInputNumber = 0; 
    conn.sourceAudioUnit = unit; 
    conn.sourceOutputNumber = 1; 
    err = AudioUnitSetProperty(unit, kAudioUnitProperty_MakeConnection, kAudioUnitScope_Input, 0, &conn, sizeof(conn)); 
    if (noErr != err) { 
     [self showStatus:err]; 
    } 
} 

- (void)createUnitDesc 
{ 
    desc.componentType = kAudioUnitType_Output; 
    desc.componentSubType = kAudioUnitSubType_RemoteIO; 
    desc.componentFlags = 0; 
    desc.componentFlagsMask = 0; 
    desc.componentManufacturer = kAudioUnitManufacturer_Apple; 

} 

- (void)getComponent 
{ 
    component = AudioComponentFindNext(NULL, &desc); 
} 

- (void)getAudioUnit 
{ 
    OSStatus res = AudioComponentInstanceNew(component, &unit); 
    if (noErr != res) { 
     [self showStatus:res]; 
    } 
} 

- (void)enableIORec 
{ 
    UInt32 flag = 1; 
    OSStatus err = AudioUnitSetProperty(unit, 
            kAudioOutputUnitProperty_EnableIO, 
            kAudioUnitScope_Input, 
            kInputBus, 
            &flag, 
            sizeof(flag)); 
    if (noErr != err) { 
     [self showStatus:err]; 
    } 
} 

- (void)enableIOPb 
{ 
    UInt32 flag = 1; 
    OSStatus err = AudioUnitSetProperty(unit, 
            kAudioOutputUnitProperty_EnableIO, 
            kAudioUnitScope_Output, 
            kOutputBus, 
            &flag, 
            sizeof(flag)); 
    if (noErr != err) { 
     [self showStatus:err]; 
    } 
} 

- (void)createFormat 
{ 
    // Describe format 
    audioFormat.mSampleRate   = rate; 
    audioFormat.mFormatID   = kAudioFormatLinearPCM; 
    audioFormat.mFormatFlags  = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked; 
    audioFormat.mFramesPerPacket = 1; 
    audioFormat.mChannelsPerFrame = 1; 
    audioFormat.mBitsPerChannel  = 16; 
    audioFormat.mBytesPerPacket  = 2; 
    audioFormat.mBytesPerFrame  = 2; 
} 

- (void)applyFormat 
{ 
    OSStatus err = AudioUnitSetProperty(unit, 
            kAudioUnitProperty_StreamFormat, 
            kAudioUnitScope_Output, 
            kInputBus, 
            &audioFormat, 
            sizeof(audioFormat)); 
    if (noErr != err) { 
     [self showStatus:err]; 
    } 
} 

- (void)start 
{ 
    NSLog(@"starting"); 
    OSStatus err = AudioOutputUnitStart(unit); 
    if (noErr != err) { 
     [self showStatus:err]; 
    } 
} 

- (void)end 
{ 
    NSLog(@"ending"); 
    OSStatus err = AudioOutputUnitStop(unit); 
    if (noErr != err) { 
     [self showStatus:err]; 
    } 
} 

- (void)showStatus:(OSStatus)st 
{ 
    NSString *text = nil; 
    switch (st) { 
     case kAudioUnitErr_CannotDoInCurrentContext: text = @"kAudioUnitErr_CannotDoInCurrentContext"; break; 
     case kAudioUnitErr_FailedInitialization: text = @"kAudioUnitErr_FailedInitialization"; break; 
     case kAudioUnitErr_FileNotSpecified: text = @"kAudioUnitErr_FileNotSpecified"; break; 
     case kAudioUnitErr_FormatNotSupported: text = @"kAudioUnitErr_FormatNotSupported"; break; 
     case kAudioUnitErr_IllegalInstrument: text = @"kAudioUnitErr_IllegalInstrument"; break; 
     case kAudioUnitErr_Initialized: text = @"kAudioUnitErr_Initialized"; break; 
     case kAudioUnitErr_InstrumentTypeNotFound: text = @"kAudioUnitErr_InstrumentTypeNotFound"; break; 
     case kAudioUnitErr_InvalidElement: text = @"kAudioUnitErr_InvalidElement"; break; 
     case kAudioUnitErr_InvalidFile: text = @"kAudioUnitErr_InvalidFile"; break; 
     case kAudioUnitErr_InvalidOfflineRender: text = @"kAudioUnitErr_InvalidOfflineRender"; break; 
     case kAudioUnitErr_InvalidParameter: text = @"kAudioUnitErr_InvalidParameter"; break; 
     case kAudioUnitErr_InvalidProperty: text = @"kAudioUnitErr_InvalidProperty"; break; 
     case kAudioUnitErr_InvalidPropertyValue: text = @"kAudioUnitErr_InvalidPropertyValue"; break; 
     case kAudioUnitErr_InvalidScope: text = @"kAudioUnitErr_InvalidScope"; break; 
     case kAudioUnitErr_NoConnection: text = @"kAudioUnitErr_NoConnection"; break; 
     case kAudioUnitErr_PropertyNotInUse: text = @"kAudioUnitErr_PropertyNotInUse"; break; 
     case kAudioUnitErr_PropertyNotWritable: text = @"kAudioUnitErr_PropertyNotWritable"; break; 
     case kAudioUnitErr_TooManyFramesToProcess: text = @"kAudioUnitErr_TooManyFramesToProcess"; break; 
     case kAudioUnitErr_Unauthorized: text = @"kAudioUnitErr_Unauthorized"; break; 
     case kAudioUnitErr_Uninitialized: text = @"kAudioUnitErr_Uninitialized"; break; 
     case kAudioUnitErr_UnknownFileType: text = @"kAudioUnitErr_UnknownFileType"; break; 
     default: text = @"unknown error"; 
    } 
    NSLog(@"TRANSLATED_ERROR = %li = %@", st, text); 
} 

- (void)dealloc 
{ 
    AudioUnitUninitialize(unit); 

    [super dealloc]; 
} 

@end 
+0

어떻게 작동시키는 지 자세히 설명해 주시겠습니까? 나는 그것을 작동시킬 수 없다. – Srikanth

+2

@Srikanth, 내가 게시 한 코드를 보아라. 먼저'AudioController'의 객체를 생성 한 다음'setUp'을 호출하고'start'와'end' 메소드를 호출하여 재생을 제어해야합니다. –

+1

@Srikanth는 시작 및 중지라는 두 개의 버튼을 만든 다음 시작 버튼 콘센트에 대해 start 메소드를 호출하고 중지 버튼 콘센트에 대해서는 end 메소드를 호출합니다. 희망이 도움이됩니다. – madLokesh