SuperpoweredIOSAudioIO.mm 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489
  1. #import "SuperpoweredIOSAudioIO.h"
  2. #import <AudioToolbox/AudioToolbox.h>
  3. #import <AudioUnit/AudioUnit.h>
  4. #import <MediaPlayer/MediaPlayer.h>
  5. // Helpers
  6. #define SILENCE_DEPRECATION(code) \
  7. { \
  8. _Pragma("clang diagnostic push") \
  9. _Pragma("clang diagnostic ignored \"-Wdeprecated-declarations\"") \
  10. code; \
  11. _Pragma("clang diagnostic pop") \
  12. }
  13. typedef enum audioDeviceType {
  14. audioDeviceType_USB = 1, audioDeviceType_headphone = 2, audioDeviceType_HDMI = 3, audioDeviceType_other = 4
  15. } audioDeviceType;
  16. static audioDeviceType NSStringToAudioDeviceType(NSString *str) {
  17. if ([str isEqualToString:AVAudioSessionPortHeadphones]) return audioDeviceType_headphone;
  18. else if ([str isEqualToString:AVAudioSessionPortUSBAudio]) return audioDeviceType_USB;
  19. else if ([str isEqualToString:AVAudioSessionPortHDMI]) return audioDeviceType_HDMI;
  20. else return audioDeviceType_other;
  21. }
  22. // Initialization
  23. @implementation SuperpoweredIOSAudioIO {
  24. id<SuperpoweredIOSAudioIODelegate>delegate;
  25. NSString *externalAudioDeviceName, *audioSessionCategory;
  26. NSMutableString *audioSystemInfo;
  27. audioProcessingCallback_C processingCallback;
  28. void *processingClientdata;
  29. AudioBufferList *inputBufferListForRecordingCategory;
  30. AudioComponentInstance audioUnit;
  31. multiOutputChannelMap outputChannelMap;
  32. multiInputChannelMap inputChannelMap;
  33. audioDeviceType RemoteIOOutputChannelMap[64];
  34. int numChannels, silenceFrames, samplerate, preferredMinimumSamplerate;
  35. bool audioUnitRunning, iOS6, background, inputEnabled;
  36. }
  37. @synthesize preferredBufferSizeMs, saveBatteryInBackground;
  38. - (void)setdelg:(id<SuperpoweredIOSAudioIODelegate>)itdelegate
  39. {
  40. delegate = itdelegate;
  41. }
  42. - (BOOL)getRuning
  43. {
  44. return audioUnitRunning;
  45. }
  46. - (void)createAudioBuffersForRecordingCategory {
  47. inputBufferListForRecordingCategory = (AudioBufferList *)malloc(sizeof(AudioBufferList) + (sizeof(AudioBuffer) * numChannels));
  48. inputBufferListForRecordingCategory->mNumberBuffers = numChannels;
  49. for (int n = 0; n < numChannels; n++) {
  50. inputBufferListForRecordingCategory->mBuffers[n].mDataByteSize = 2048 * 4;
  51. inputBufferListForRecordingCategory->mBuffers[n].mNumberChannels = 1;
  52. inputBufferListForRecordingCategory->mBuffers[n].mData = malloc(inputBufferListForRecordingCategory->mBuffers[n].mDataByteSize);
  53. };
  54. }
  55. - (id)initWithDelegate:(NSObject<SuperpoweredIOSAudioIODelegate> *)d preferredBufferSize:(unsigned int)preferredBufferSize preferredMinimumSamplerate:(unsigned int)prefsamplerate audioSessionCategory:(NSString *)category channels:(int)channels {
  56. self = [super init];
  57. if (self) {
  58. iOS6 = ([[[UIDevice currentDevice] systemVersion] compare:@"6.0" options:NSNumericSearch] != NSOrderedAscending);
  59. numChannels = !iOS6 ? 2 : channels;
  60. #if !__has_feature(objc_arc)
  61. audioSessionCategory = [category retain];
  62. #else
  63. audioSessionCategory = category;
  64. #endif
  65. saveBatteryInBackground = true;
  66. preferredBufferSizeMs = preferredBufferSize;
  67. preferredMinimumSamplerate = prefsamplerate;
  68. bool recordOnly = [category isEqualToString:AVAudioSessionCategoryRecord];
  69. inputEnabled = recordOnly || [category isEqualToString:AVAudioSessionCategoryPlayAndRecord];
  70. processingCallback = NULL;
  71. processingClientdata = NULL;
  72. delegate = d;
  73. audioSystemInfo = [[NSMutableString alloc] initWithCapacity:256];
  74. silenceFrames = 0;
  75. background = audioUnitRunning = false;
  76. samplerate = 0;
  77. externalAudioDeviceName = nil;
  78. audioUnit = NULL;
  79. if (recordOnly) [self createAudioBuffersForRecordingCategory]; else inputBufferListForRecordingCategory = NULL;
  80. [self resetAudio];
  81. // Need to listen for a few app and audio session related events.
  82. [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(onForeground) name:UIApplicationWillEnterForegroundNotification object:nil];
  83. [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(onBackground) name:UIApplicationDidEnterBackgroundNotification object:nil];
  84. if (iOS6) {
  85. [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(onMediaServerReset:) name:AVAudioSessionMediaServicesWereResetNotification object:[AVAudioSession sharedInstance]];
  86. [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(onAudioSessionInterrupted:) name:AVAudioSessionInterruptionNotification object:[AVAudioSession sharedInstance]];
  87. [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(onRouteChange:) name:AVAudioSessionRouteChangeNotification object:[AVAudioSession sharedInstance]];
  88. } else {
  89. AVAudioSession *s = [AVAudioSession sharedInstance];
  90. SILENCE_DEPRECATION(s.delegate = (id<AVAudioSessionDelegate>)self); // iOS 5 compatibility
  91. };
  92. };
  93. return self;
  94. }
  95. - (void)setProcessingCallback_C:(audioProcessingCallback_C)callback clientdata:(void *)clientdata {
  96. processingCallback = callback;
  97. processingClientdata = clientdata;
  98. }
  99. - (void)dealloc {
  100. if (audioUnit != NULL) {
  101. AudioUnitUninitialize(audioUnit);
  102. AudioComponentInstanceDispose(audioUnit);
  103. };
  104. if (inputBufferListForRecordingCategory) {
  105. for (int n = 0; n < numChannels; n++) free(inputBufferListForRecordingCategory->mBuffers[n].mData);
  106. free(inputBufferListForRecordingCategory);
  107. };
  108. [[AVAudioSession sharedInstance] setActive:NO error:nil];
  109. [[NSNotificationCenter defaultCenter] removeObserver:self];
  110. #if !__has_feature(objc_arc)
  111. [audioSystemInfo release];
  112. [externalAudioDeviceName release];
  113. [super dealloc];
  114. #endif
  115. }
  116. // App and audio session lifecycle
  117. - (void)onMediaServerReset:(NSNotification *)notification { // The mediaserver daemon can die. Yes, it happens sometimes.
  118. if (![NSThread isMainThread]) [self performSelectorOnMainThread:@selector(onMediaServerReset:) withObject:nil waitUntilDone:NO];
  119. else {
  120. if (audioUnit) AudioOutputUnitStop(audioUnit);
  121. audioUnitRunning = false;
  122. [[AVAudioSession sharedInstance] setActive:NO error:nil];
  123. [self resetAudio];
  124. [self start];
  125. };
  126. }
  127. - (void)beginInterruption { // Phone call, etc.
  128. if (![NSThread isMainThread]) [self performSelectorOnMainThread:@selector(beginInterruption) withObject:nil waitUntilDone:NO];
  129. else {
  130. audioUnitRunning = false;
  131. [delegate interruptionStarted];
  132. if (audioUnit) AudioOutputUnitStop(audioUnit);
  133. };
  134. }
  135. - (void)endInterruption {
  136. if (audioUnitRunning) return;
  137. if (![NSThread isMainThread]) [self performSelectorOnMainThread:@selector(endInterruption) withObject:nil waitUntilDone:NO];
  138. else for (int n = 0; n < 2; n++) if ([[AVAudioSession sharedInstance] setActive:YES error:nil] && [self start]) { // Need to try twice sometimes. Don't know why.
  139. [delegate interruptionEnded];
  140. audioUnitRunning = true;
  141. break;
  142. };
  143. }
  144. - (void)endInterruptionWithFlags:(NSUInteger)flags {
  145. [self endInterruption];
  146. }
  147. - (void)startDelegateInterruption {
  148. [delegate interruptionStarted];
  149. }
  150. - (void)onAudioSessionInterrupted:(NSNotification *)notification {
  151. NSNumber *interruption = [notification.userInfo objectForKey:AVAudioSessionInterruptionTypeKey];
  152. if (interruption) switch ([interruption intValue]) {
  153. case AVAudioSessionInterruptionTypeBegan:
  154. if (audioUnitRunning) [self performSelectorOnMainThread:@selector(startDelegateInterruption) withObject:nil waitUntilDone:NO];
  155. [self beginInterruption];
  156. break;
  157. case AVAudioSessionInterruptionTypeEnded: [self endInterruption]; break;
  158. };
  159. }
  160. - (void)onForeground { // App comes foreground.
  161. if (background) {
  162. background = false;
  163. [self endInterruption];
  164. };
  165. }
  166. - (void)onBackground { // App went to background.
  167. if( !background )
  168. {
  169. background = true;
  170. [self beginInterruption];
  171. }
  172. }
  173. // Audio Session
  174. - (void)onRouteChange:(NSNotification *)notification {
  175. if (![NSThread isMainThread]) {
  176. [self performSelectorOnMainThread:@selector(onRouteChange:) withObject:notification waitUntilDone:NO];
  177. return;
  178. };
  179. bool receiverAvailable = false, usbOrHDMIAvailable = false;
  180. for (AVAudioSessionPortDescription *port in [[[AVAudioSession sharedInstance] currentRoute] outputs]) {
  181. if ([port.portType isEqualToString:AVAudioSessionPortUSBAudio] || [port.portType isEqualToString:AVAudioSessionPortHDMI]) {
  182. usbOrHDMIAvailable = true;
  183. break;
  184. } else if ([port.portType isEqualToString:AVAudioSessionPortBuiltInReceiver]) receiverAvailable = true;
  185. };
  186. if (receiverAvailable && !usbOrHDMIAvailable) { // Comment this out if you would like to use the receiver instead.
  187. [[AVAudioSession sharedInstance] overrideOutputAudioPort:AVAudioSessionPortOverrideSpeaker error:nil];
  188. [self performSelectorOnMainThread:@selector(onRouteChange:) withObject:nil waitUntilDone:NO];
  189. return;
  190. };
  191. memset(RemoteIOOutputChannelMap, 0, sizeof(RemoteIOOutputChannelMap));
  192. outputChannelMap.headphoneAvailable = false;
  193. outputChannelMap.numberOfHDMIChannelsAvailable = outputChannelMap.numberOfUSBChannelsAvailable = inputChannelMap.numberOfUSBChannelsAvailable = 0;
  194. #if !__has_feature(objc_arc)
  195. [audioSystemInfo release];
  196. [externalAudioDeviceName release];
  197. #endif
  198. audioSystemInfo = [[NSMutableString alloc] initWithCapacity:128];
  199. [audioSystemInfo appendString:@"Outputs: "];
  200. externalAudioDeviceName = nil;
  201. bool first = true; int n = 0;
  202. for (AVAudioSessionPortDescription *port in [[[AVAudioSession sharedInstance] currentRoute] outputs]) {
  203. int channels = (int)[port.channels count];
  204. audioDeviceType type = NSStringToAudioDeviceType(port.portType);
  205. [audioSystemInfo appendFormat:@"%s%@ (%i out)", first ? "" : ", ", [port.portName stringByTrimmingCharactersInSet:[NSCharacterSet whitespaceAndNewlineCharacterSet]], channels];
  206. first = false;
  207. if (type == audioDeviceType_headphone) outputChannelMap.headphoneAvailable = true;
  208. else if (type == audioDeviceType_HDMI) {
  209. outputChannelMap.numberOfHDMIChannelsAvailable = channels;
  210. #if !__has_feature(objc_arc)
  211. [externalAudioDeviceName release], externalAudioDeviceName = [port.portName retain];
  212. #else
  213. externalAudioDeviceName = port.portName;
  214. #endif
  215. } else if (type == audioDeviceType_USB) { // iOS can handle one USB audio device only
  216. outputChannelMap.numberOfUSBChannelsAvailable = channels;
  217. #if !__has_feature(objc_arc)
  218. [externalAudioDeviceName release], externalAudioDeviceName = [port.portName retain];
  219. #else
  220. externalAudioDeviceName = port.portName;
  221. #endif
  222. };
  223. while (channels > 0) {
  224. RemoteIOOutputChannelMap[n++] = type;
  225. channels--;
  226. };
  227. };
  228. if ([[AVAudioSession sharedInstance] isInputAvailable]) {
  229. [audioSystemInfo appendString:@", Inputs: "];
  230. first = true;
  231. for (AVAudioSessionPortDescription *port in [[[AVAudioSession sharedInstance] currentRoute] inputs]) {
  232. int channels = (int)[port.channels count];
  233. audioDeviceType type = NSStringToAudioDeviceType(port.portType);
  234. [audioSystemInfo appendFormat:@"%s%@ (%i in)", first ? "" : ", ", [port.portName stringByTrimmingCharactersInSet:[NSCharacterSet whitespaceAndNewlineCharacterSet]], channels];
  235. first = false;
  236. if (type == audioDeviceType_USB) inputChannelMap.numberOfUSBChannelsAvailable = channels;
  237. };
  238. if (first) [audioSystemInfo appendString:@"-"];
  239. };
  240. [self mapChannels];
  241. if (usbOrHDMIAvailable) SILENCE_DEPRECATION([[MPMusicPlayerController applicationMusicPlayer] setVolume:1.0f]); // iOS 5 and iOS 6 compatibility
  242. }
  243. - (void)setSamplerateAndBuffersize {
  244. if (samplerate > 0) {
  245. double sr = samplerate < preferredMinimumSamplerate ? preferredMinimumSamplerate : 0, current;
  246. if (!iOS6) {
  247. SILENCE_DEPRECATION(current = [[AVAudioSession sharedInstance] preferredHardwareSampleRate]); // iOS 5 compatibility
  248. } else current = [[AVAudioSession sharedInstance] preferredSampleRate];
  249. if (current != sr) {
  250. if (!iOS6) {
  251. SILENCE_DEPRECATION([[AVAudioSession sharedInstance] setPreferredHardwareSampleRate:sr error:NULL]); // iOS 5 compatibility
  252. } else [[AVAudioSession sharedInstance] setPreferredSampleRate:sr error:NULL];
  253. };
  254. };
  255. [[AVAudioSession sharedInstance] setPreferredIOBufferDuration:double(preferredBufferSizeMs) * 0.001 error:NULL];
  256. }
  257. - (void)resetAudio {
  258. if (audioUnit != NULL) {
  259. AudioUnitUninitialize(audioUnit);
  260. AudioComponentInstanceDispose(audioUnit);
  261. audioUnit = NULL;
  262. };
  263. bool multiRoute = false;
  264. if ((numChannels > 2) && [audioSessionCategory isEqualToString:AVAudioSessionCategoryPlayback]) {
  265. for (AVAudioSessionPortDescription *port in [[[AVAudioSession sharedInstance] currentRoute] outputs]) {
  266. if ([port.portType isEqualToString:AVAudioSessionPortUSBAudio]) {
  267. if ([port.channels count] == 2) multiRoute = true;
  268. break;
  269. };
  270. }
  271. };
  272. [[AVAudioSession sharedInstance] setCategory:multiRoute ? AVAudioSessionCategoryMultiRoute : audioSessionCategory error:NULL];
  273. [[AVAudioSession sharedInstance] setMode:AVAudioSessionModeDefault error:NULL];
  274. [self setSamplerateAndBuffersize];
  275. [[AVAudioSession sharedInstance] setActive:YES error:NULL];
  276. audioUnit = [self createRemoteIO];
  277. if (!multiRoute) {
  278. if (iOS6) [self onRouteChange:nil]; else [self mapChannels];
  279. };
  280. }
  281. // RemoteIO
  282. static void streamFormatChangedCallback(void *inRefCon, AudioUnit inUnit, AudioUnitPropertyID inID, AudioUnitScope inScope, AudioUnitElement inElement) {
  283. if ((inScope == kAudioUnitScope_Output) && (inElement == 0)) {
  284. UInt32 size = 0;
  285. AudioUnitGetPropertyInfo(inUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 0, &size, NULL);
  286. AudioStreamBasicDescription format;
  287. AudioUnitGetProperty(inUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 0, &format, &size);
  288. SuperpoweredIOSAudioIO *self = (__bridge SuperpoweredIOSAudioIO *)inRefCon;
  289. self->samplerate = (int)format.mSampleRate;
  290. [self performSelectorOnMainThread:@selector(setSamplerateAndBuffersize) withObject:nil waitUntilDone:NO];
  291. };
  292. }
  293. static OSStatus audioProcessingCallback(void *inRefCon, AudioUnitRenderActionFlags *ioActionFlags, const AudioTimeStamp *inTimeStamp, UInt32 inBusNumber, UInt32 inNumberFrames, AudioBufferList *ioData) {
  294. SuperpoweredIOSAudioIO *self = (__bridge SuperpoweredIOSAudioIO *)inRefCon;
  295. if (!ioData) ioData = self->inputBufferListForRecordingCategory;
  296. div_t d = div(inNumberFrames, 8);
  297. if ((d.rem != 0) || (inNumberFrames < 32) || (inNumberFrames > 512) || (ioData->mNumberBuffers != self->numChannels)) {
  298. return kAudioUnitErr_InvalidParameter;
  299. };
  300. // Get audio input.
  301. unsigned int inputChannels = (self->inputEnabled && !AudioUnitRender(self->audioUnit, ioActionFlags, inTimeStamp, 1, inNumberFrames, ioData)) ? self->numChannels : 0;
  302. float *bufs[self->numChannels];
  303. for (int n = 0; n < self->numChannels; n++) bufs[n] = (float *)ioData->mBuffers[n].mData;
  304. bool silence = true;
  305. // Make audio output.
  306. if (self->processingCallback) silence = !self->processingCallback(self->processingClientdata, bufs, inputChannels, self->numChannels, inNumberFrames, self->samplerate, inTimeStamp->mHostTime);
  307. else silence = ![self->delegate audioProcessingCallback:bufs inputChannels:inputChannels outputChannels:self->numChannels numberOfSamples:inNumberFrames samplerate:self->samplerate hostTime:inTimeStamp->mHostTime];
  308. if (silence) { // Despite of ioActionFlags, it outputs garbage sometimes, so must zero the buffers:
  309. *ioActionFlags |= kAudioUnitRenderAction_OutputIsSilence;
  310. for (unsigned char n = 0; n < ioData->mNumberBuffers; n++) memset(ioData->mBuffers[n].mData, 0, inNumberFrames << 2);
  311. if (self->background && self->saveBatteryInBackground) { // If the app is in the background, check if we don't output anything.
  312. self->silenceFrames += inNumberFrames;
  313. if (self->silenceFrames > self->samplerate) { // If we waited for more than 1 second with silence, stop RemoteIO to save battery.
  314. self->silenceFrames = 0;
  315. [self beginInterruption];
  316. };
  317. } else self->silenceFrames = 0;
  318. } else self->silenceFrames = 0;
  319. return noErr;
  320. }
  321. - (AudioUnit)createRemoteIO {
  322. AudioUnit au;
  323. AudioComponentDescription desc;
  324. desc.componentType = kAudioUnitType_Output;
  325. desc.componentSubType = kAudioUnitSubType_RemoteIO;
  326. desc.componentFlags = 0;
  327. desc.componentFlagsMask = 0;
  328. desc.componentManufacturer = kAudioUnitManufacturer_Apple;
  329. AudioComponent component = AudioComponentFindNext(NULL, &desc);
  330. if (AudioComponentInstanceNew(component, &au) != 0) return NULL;
  331. bool recordOnly = [audioSessionCategory isEqualToString:AVAudioSessionCategoryRecord];
  332. UInt32 value = recordOnly ? 0 : 1;
  333. if (AudioUnitSetProperty(au, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Output, 0, &value, sizeof(value))) { AudioComponentInstanceDispose(au); return NULL; };
  334. value = inputEnabled ? 1 : 0;
  335. if (AudioUnitSetProperty(au, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, 1, &value, sizeof(value))) { AudioComponentInstanceDispose(au); return NULL; };
  336. AudioUnitAddPropertyListener(au, kAudioUnitProperty_StreamFormat, streamFormatChangedCallback, (__bridge void *)self);
  337. UInt32 size = 0;
  338. AudioUnitGetPropertyInfo(au, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 0, &size, NULL);
  339. AudioStreamBasicDescription format;
  340. AudioUnitGetProperty(au, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 0, &format, &size);
  341. samplerate = (int)format.mSampleRate;
  342. [self setSamplerateAndBuffersize];
  343. format.mFormatID = kAudioFormatLinearPCM;
  344. format.mFormatFlags = kAudioFormatFlagIsFloat | kAudioFormatFlagIsPacked | kAudioFormatFlagIsNonInterleaved | kAudioFormatFlagsNativeEndian;
  345. format.mBitsPerChannel = 32;
  346. format.mFramesPerPacket = 1;
  347. format.mBytesPerFrame = 4;
  348. format.mBytesPerPacket = 4;
  349. format.mChannelsPerFrame = numChannels;
  350. if (AudioUnitSetProperty(au, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &format, sizeof(format))) { AudioComponentInstanceDispose(au); return NULL; };
  351. if (AudioUnitSetProperty(au, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &format, sizeof(format))) { AudioComponentInstanceDispose(au); return NULL; };
  352. AURenderCallbackStruct callbackStruct;
  353. callbackStruct.inputProc = audioProcessingCallback;
  354. callbackStruct.inputProcRefCon = (__bridge void *)self;
  355. if (recordOnly) {
  356. if (AudioUnitSetProperty(au, kAudioOutputUnitProperty_SetInputCallback, kAudioUnitScope_Global, 1, &callbackStruct, sizeof(callbackStruct))) { AudioComponentInstanceDispose(au); return NULL; };
  357. } else {
  358. if (AudioUnitSetProperty(au, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, 0, &callbackStruct, sizeof(callbackStruct))) { AudioComponentInstanceDispose(au); return NULL; };
  359. };
  360. if (AudioUnitInitialize(au)) { AudioComponentInstanceDispose(au); return NULL; };
  361. return au;
  362. }
  363. // Public methods
  364. - (bool)start {
  365. if (audioUnit == NULL) return false;
  366. if (AudioOutputUnitStart(audioUnit)) return false;
  367. audioUnitRunning = true;
  368. return true;
  369. }
  370. - (void)stop {
  371. if (audioUnit != NULL) AudioOutputUnitStop(audioUnit);
  372. }
  373. - (void)setPreferredBufferSizeMs:(int)ms {
  374. preferredBufferSizeMs = ms;
  375. [self setSamplerateAndBuffersize];
  376. }
  377. - (void)mapChannels {
  378. outputChannelMap.deviceChannels[0] = outputChannelMap.deviceChannels[1] = -1;
  379. for (int n = 0; n < 8; n++) outputChannelMap.HDMIChannels[n] = -1;
  380. for (int n = 0; n < 32; n++) outputChannelMap.USBChannels[n] = inputChannelMap.USBChannels[n] = - 1;
  381. [delegate mapChannels:&outputChannelMap inputMap:&inputChannelMap externalAudioDeviceName:externalAudioDeviceName outputsAndInputs:audioSystemInfo];
  382. if (!audioUnit || !iOS6 || (numChannels <= 2)) return;
  383. SInt32 outputmap[32], inputmap[32];
  384. int devicePos = 0, hdmiPos = 0, usbPos = 0;
  385. for (int n = 0; n < 32; n++) {
  386. if (RemoteIOOutputChannelMap[n] != 0) switch (RemoteIOOutputChannelMap[n]) {
  387. case audioDeviceType_HDMI: if (hdmiPos < 8) outputmap[n] = outputChannelMap.HDMIChannels[hdmiPos++]; break;
  388. case audioDeviceType_USB: if (usbPos < 32) outputmap[n] = outputChannelMap.USBChannels[usbPos++]; break;
  389. default: if (devicePos < 2) outputmap[n] = outputChannelMap.deviceChannels[devicePos++];
  390. } else outputmap[n] = -1;
  391. inputmap[n] = inputChannelMap.USBChannels[n];
  392. };
  393. #if !TARGET_IPHONE_SIMULATOR
  394. AudioUnitSetProperty(audioUnit, kAudioOutputUnitProperty_ChannelMap, kAudioUnitScope_Input, 0, outputmap, 128);
  395. AudioUnitSetProperty(audioUnit, kAudioOutputUnitProperty_ChannelMap, kAudioUnitScope_Input, 1, inputmap, 128);
  396. #endif
  397. }
  398. - (void)reconfigureWithAudioSessionCategory:(NSString *)category {
  399. if (![NSThread isMainThread]) {
  400. [self performSelectorOnMainThread:@selector(reconfigureWithAudioSessionCategory:) withObject:category waitUntilDone:NO];
  401. return;
  402. };
  403. #if !__has_feature(objc_arc)
  404. [audioSessionCategory release], audioSessionCategory = [category retain];
  405. #else
  406. audioSessionCategory = category;
  407. #endif
  408. bool recordOnly = [category isEqualToString:AVAudioSessionCategoryRecord];
  409. if (recordOnly && !inputBufferListForRecordingCategory) [self createAudioBuffersForRecordingCategory];
  410. inputEnabled = recordOnly || [category isEqualToString:AVAudioSessionCategoryPlayAndRecord];
  411. if (inputEnabled && [[AVAudioSession sharedInstance] respondsToSelector:@selector(recordPermission)] && [[AVAudioSession sharedInstance] respondsToSelector:@selector(requestRecordPermission:)]) {
  412. if ([[AVAudioSession sharedInstance] recordPermission] == AVAudioSessionRecordPermissionGranted) [self onMediaServerReset:nil];
  413. else {
  414. [[AVAudioSession sharedInstance] requestRecordPermission:^(BOOL granted) {
  415. if (granted) [self onMediaServerReset:nil]; else [delegate recordPermissionRefused];
  416. }];
  417. };
  418. } else [self onMediaServerReset:nil];
  419. }
  420. @end