The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

806 lines
28KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2015 - ROLI Ltd.
  5. Permission is granted to use this software under the terms of either:
  6. a) the GPL v2 (or any later version)
  7. b) the Affero GPL v3
  8. Details of these licenses can be found at: www.gnu.org/licenses
  9. JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
  10. WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
  11. A PARTICULAR PURPOSE. See the GNU General Public License for more details.
  12. ------------------------------------------------------------------------------
  13. To release a closed-source product which uses JUCE, commercial licenses are
  14. available: visit www.juce.com for more information.
  15. ==============================================================================
  16. */
  17. class iOSAudioIODevice;
  18. static const char* const iOSAudioDeviceName = "iOS Audio";
  19. //==============================================================================
  20. struct AudioSessionHolder
  21. {
  22. AudioSessionHolder();
  23. ~AudioSessionHolder();
  24. void handleStatusChange (bool enabled, const char* reason) const;
  25. void handleRouteChange (const char* reason) const;
  26. Array<iOSAudioIODevice*> activeDevices;
  27. id nativeSession;
  28. };
  29. static const char* getRoutingChangeReason (AVAudioSessionRouteChangeReason reason) noexcept
  30. {
  31. switch (reason)
  32. {
  33. case AVAudioSessionRouteChangeReasonNewDeviceAvailable: return "New device available";
  34. case AVAudioSessionRouteChangeReasonOldDeviceUnavailable: return "Old device unavailable";
  35. case AVAudioSessionRouteChangeReasonCategoryChange: return "Category change";
  36. case AVAudioSessionRouteChangeReasonOverride: return "Override";
  37. case AVAudioSessionRouteChangeReasonWakeFromSleep: return "Wake from sleep";
  38. case AVAudioSessionRouteChangeReasonNoSuitableRouteForCategory: return "No suitable route for category";
  39. case AVAudioSessionRouteChangeReasonRouteConfigurationChange: return "Route configuration change";
  40. case AVAudioSessionRouteChangeReasonUnknown:
  41. default: return "Unknown";
  42. }
  43. }
  44. bool getNotificationValueForKey (NSNotification* notification, NSString* key, NSUInteger& value) noexcept
  45. {
  46. if (notification != nil)
  47. {
  48. if (NSDictionary* userInfo = [notification userInfo])
  49. {
  50. if (NSNumber* number = [userInfo objectForKey: key])
  51. {
  52. value = [number unsignedIntegerValue];
  53. return true;
  54. }
  55. }
  56. }
  57. jassertfalse;
  58. return false;
  59. }
  60. } // juce namespace
  61. //==============================================================================
  62. @interface iOSAudioSessionNative : NSObject
  63. {
  64. @private
  65. juce::AudioSessionHolder* audioSessionHolder;
  66. };
  67. - (id) init: (juce::AudioSessionHolder*) holder;
  68. - (void) dealloc;
  69. - (void) audioSessionDidChangeInterruptionType: (NSNotification*) notification;
  70. - (void) handleMediaServicesReset;
  71. - (void) handleMediaServicesLost;
  72. - (void) handleRouteChange: (NSNotification*) notification;
  73. @end
  74. @implementation iOSAudioSessionNative
  75. - (id) init: (juce::AudioSessionHolder*) holder
  76. {
  77. self = [super init];
  78. if (self != nil)
  79. {
  80. audioSessionHolder = holder;
  81. auto session = [AVAudioSession sharedInstance];
  82. auto centre = [NSNotificationCenter defaultCenter];
  83. [centre addObserver: self
  84. selector: @selector (audioSessionDidChangeInterruptionType:)
  85. name: AVAudioSessionInterruptionNotification
  86. object: session];
  87. [centre addObserver: self
  88. selector: @selector (handleMediaServicesLost)
  89. name: AVAudioSessionMediaServicesWereLostNotification
  90. object: session];
  91. [centre addObserver: self
  92. selector: @selector (handleMediaServicesReset)
  93. name: AVAudioSessionMediaServicesWereResetNotification
  94. object: session];
  95. [centre addObserver: self
  96. selector: @selector (handleRouteChange:)
  97. name: AVAudioSessionRouteChangeNotification
  98. object: session];
  99. }
  100. else
  101. {
  102. jassertfalse;
  103. }
  104. return self;
  105. }
  106. - (void) dealloc
  107. {
  108. [[NSNotificationCenter defaultCenter] removeObserver: self];
  109. [super dealloc];
  110. }
  111. - (void) audioSessionDidChangeInterruptionType: (NSNotification*) notification
  112. {
  113. NSUInteger value;
  114. if (juce::getNotificationValueForKey (notification, AVAudioSessionInterruptionTypeKey, value))
  115. {
  116. switch ((AVAudioSessionInterruptionType) value)
  117. {
  118. case AVAudioSessionInterruptionTypeBegan:
  119. audioSessionHolder->handleStatusChange (false, "AVAudioSessionInterruptionTypeBegan");
  120. break;
  121. case AVAudioSessionInterruptionTypeEnded:
  122. audioSessionHolder->handleStatusChange (true, "AVAudioSessionInterruptionTypeEnded");
  123. break;
  124. // No default so the code doesn't compile if this enum is extended.
  125. }
  126. }
  127. }
  128. - (void) handleMediaServicesReset
  129. {
  130. audioSessionHolder->handleStatusChange (true, "AVAudioSessionMediaServicesWereResetNotification");
  131. }
  132. - (void) handleMediaServicesLost
  133. {
  134. audioSessionHolder->handleStatusChange (false, "AVAudioSessionMediaServicesWereLostNotification");
  135. }
  136. - (void) handleRouteChange: (NSNotification*) notification
  137. {
  138. NSUInteger value;
  139. if (juce::getNotificationValueForKey (notification, AVAudioSessionRouteChangeReasonKey, value))
  140. audioSessionHolder->handleRouteChange (juce::getRoutingChangeReason ((AVAudioSessionRouteChangeReason) value));
  141. }
  142. @end
  143. //==============================================================================
  144. namespace juce {
  145. #ifndef JUCE_IOS_AUDIO_LOGGING
  146. #define JUCE_IOS_AUDIO_LOGGING 0
  147. #endif
  148. #if JUCE_IOS_AUDIO_LOGGING
  149. #define JUCE_IOS_AUDIO_LOG(x) DBG(x)
  150. #else
  151. #define JUCE_IOS_AUDIO_LOG(x)
  152. #endif
  153. static void logNSError (NSError* e)
  154. {
  155. if (e != nil)
  156. {
  157. JUCE_IOS_AUDIO_LOG ("iOS Audio error: " << [e.localizedDescription UTF8String]);
  158. jassertfalse;
  159. }
  160. }
  161. #define JUCE_NSERROR_CHECK(X) { NSError* error = nil; X; logNSError (error); }
  162. //==============================================================================
  163. class iOSAudioIODevice : public AudioIODevice
  164. {
  165. public:
  166. iOSAudioIODevice (const String& deviceName)
  167. : AudioIODevice (deviceName, iOSAudioDeviceName)
  168. {
  169. sessionHolder->activeDevices.add (this);
  170. updateSampleRateAndAudioInput();
  171. }
  172. ~iOSAudioIODevice()
  173. {
  174. sessionHolder->activeDevices.removeFirstMatchingValue (this);
  175. close();
  176. }
  177. StringArray getOutputChannelNames() override
  178. {
  179. return { "Left", "Right" };
  180. }
  181. StringArray getInputChannelNames() override
  182. {
  183. if (audioInputIsAvailable)
  184. return { "Left", "Right" };
  185. return {};
  186. }
  187. static void setAudioSessionActive (bool enabled)
  188. {
  189. JUCE_NSERROR_CHECK ([[AVAudioSession sharedInstance] setActive: enabled
  190. error: &error]);
  191. }
  192. static double trySampleRate (double rate)
  193. {
  194. auto session = [AVAudioSession sharedInstance];
  195. JUCE_NSERROR_CHECK ([session setPreferredSampleRate: rate
  196. error: &error]);
  197. return session.sampleRate;
  198. }
  199. Array<double> getAvailableSampleRates() override
  200. {
  201. Array<double> rates;
  202. // Important: the supported audio sample rates change on the iPhone 6S
  203. // depending on whether the headphones are plugged in or not!
  204. setAudioSessionActive (true);
  205. const double lowestRate = trySampleRate (4000);
  206. const double highestRate = trySampleRate (192000);
  207. for (double rate = lowestRate; rate <= highestRate; rate += 1000)
  208. {
  209. const double supportedRate = trySampleRate (rate);
  210. rates.addIfNotAlreadyThere (supportedRate);
  211. rate = jmax (rate, supportedRate);
  212. }
  213. for (auto r : rates)
  214. {
  215. ignoreUnused (r);
  216. JUCE_IOS_AUDIO_LOG ("available rate = " + String (r, 0) + "Hz");
  217. }
  218. return rates;
  219. }
  220. Array<int> getAvailableBufferSizes() override
  221. {
  222. Array<int> r;
  223. for (int i = 6; i < 12; ++i)
  224. r.add (1 << i);
  225. return r;
  226. }
  227. int getDefaultBufferSize() override
  228. {
  229. #if TARGET_IPHONE_SIMULATOR
  230. return 512;
  231. #else
  232. return 256;
  233. #endif
  234. }
  235. String open (const BigInteger& inputChannelsWanted,
  236. const BigInteger& outputChannelsWanted,
  237. double targetSampleRate, int bufferSize) override
  238. {
  239. close();
  240. lastError.clear();
  241. preferredBufferSize = bufferSize <= 0 ? getDefaultBufferSize()
  242. : bufferSize;
  243. // xxx set up channel mapping
  244. activeOutputChans = outputChannelsWanted;
  245. activeOutputChans.setRange (2, activeOutputChans.getHighestBit(), false);
  246. numOutputChannels = activeOutputChans.countNumberOfSetBits();
  247. monoOutputChannelNumber = activeOutputChans.findNextSetBit (0);
  248. activeInputChans = inputChannelsWanted;
  249. activeInputChans.setRange (2, activeInputChans.getHighestBit(), false);
  250. numInputChannels = activeInputChans.countNumberOfSetBits();
  251. monoInputChannelNumber = activeInputChans.findNextSetBit (0);
  252. setAudioSessionActive (true);
  253. // Set the session category & options:
  254. auto session = [AVAudioSession sharedInstance];
  255. const bool useInputs = (numInputChannels > 0 && audioInputIsAvailable);
  256. NSString* category = (useInputs ? AVAudioSessionCategoryPlayAndRecord : AVAudioSessionCategoryPlayback);
  257. NSUInteger options = AVAudioSessionCategoryOptionMixWithOthers; // Alternatively AVAudioSessionCategoryOptionDuckOthers
  258. if (useInputs) // These options are only valid for category = PlayAndRecord
  259. options |= (AVAudioSessionCategoryOptionDefaultToSpeaker | AVAudioSessionCategoryOptionAllowBluetooth);
  260. JUCE_NSERROR_CHECK ([session setCategory: category
  261. withOptions: options
  262. error: &error]);
  263. fixAudioRouteIfSetToReceiver();
  264. // Set the sample rate
  265. trySampleRate (targetSampleRate);
  266. updateSampleRateAndAudioInput();
  267. updateCurrentBufferSize();
  268. prepareFloatBuffers (actualBufferSize);
  269. isRunning = true;
  270. handleRouteChange ("Started AudioUnit");
  271. lastError = (audioUnit != 0 ? "" : "Couldn't open the device");
  272. setAudioSessionActive (true);
  273. return lastError;
  274. }
  275. void close() override
  276. {
  277. if (isRunning)
  278. {
  279. isRunning = false;
  280. if (audioUnit != 0)
  281. {
  282. AudioOutputUnitStart (audioUnit);
  283. AudioComponentInstanceDispose (audioUnit);
  284. audioUnit = 0;
  285. }
  286. setAudioSessionActive (false);
  287. }
  288. }
  289. bool isOpen() override { return isRunning; }
  290. int getCurrentBufferSizeSamples() override { return actualBufferSize; }
  291. double getCurrentSampleRate() override { return sampleRate; }
  292. int getCurrentBitDepth() override { return 16; }
  293. BigInteger getActiveOutputChannels() const override { return activeOutputChans; }
  294. BigInteger getActiveInputChannels() const override { return activeInputChans; }
  295. int getOutputLatencyInSamples() override { return roundToInt (getCurrentSampleRate() * [AVAudioSession sharedInstance].outputLatency); }
  296. int getInputLatencyInSamples() override { return roundToInt (getCurrentSampleRate() * [AVAudioSession sharedInstance].inputLatency); }
  297. void start (AudioIODeviceCallback* newCallback) override
  298. {
  299. if (isRunning && callback != newCallback)
  300. {
  301. if (newCallback != nullptr)
  302. newCallback->audioDeviceAboutToStart (this);
  303. const ScopedLock sl (callbackLock);
  304. callback = newCallback;
  305. }
  306. }
  307. void stop() override
  308. {
  309. if (isRunning)
  310. {
  311. AudioIODeviceCallback* lastCallback;
  312. {
  313. const ScopedLock sl (callbackLock);
  314. lastCallback = callback;
  315. callback = nullptr;
  316. }
  317. if (lastCallback != nullptr)
  318. lastCallback->audioDeviceStopped();
  319. }
  320. }
  321. bool isPlaying() override { return isRunning && callback != nullptr; }
  322. String getLastError() override { return lastError; }
  323. bool setAudioPreprocessingEnabled (bool enable) override
  324. {
  325. auto session = [AVAudioSession sharedInstance];
  326. NSString* mode = (enable ? AVAudioSessionModeMeasurement
  327. : AVAudioSessionModeDefault);
  328. JUCE_NSERROR_CHECK ([session setMode: mode
  329. error: &error]);
  330. return session.mode == mode;
  331. }
  332. void invokeAudioDeviceErrorCallback (const String& reason)
  333. {
  334. const ScopedLock sl (callbackLock);
  335. if (callback != nullptr)
  336. callback->audioDeviceError (reason);
  337. }
  338. void handleStatusChange (bool enabled, const char* reason)
  339. {
  340. const ScopedLock myScopedLock (callbackLock);
  341. JUCE_IOS_AUDIO_LOG ("handleStatusChange: enabled: " << (int) enabled << ", reason: " << reason);
  342. isRunning = enabled;
  343. setAudioSessionActive (enabled);
  344. if (enabled)
  345. AudioOutputUnitStart (audioUnit);
  346. else
  347. AudioOutputUnitStop (audioUnit);
  348. if (! enabled)
  349. invokeAudioDeviceErrorCallback (reason);
  350. }
  351. void handleRouteChange (const char* reason)
  352. {
  353. const ScopedLock myScopedLock (callbackLock);
  354. JUCE_IOS_AUDIO_LOG ("handleRouteChange: reason: " << reason);
  355. fixAudioRouteIfSetToReceiver();
  356. if (isRunning)
  357. {
  358. invokeAudioDeviceErrorCallback (reason);
  359. updateSampleRateAndAudioInput();
  360. updateCurrentBufferSize();
  361. createAudioUnit();
  362. setAudioSessionActive (true);
  363. if (audioUnit != 0)
  364. {
  365. UInt32 formatSize = sizeof (format);
  366. AudioUnitGetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &format, &formatSize);
  367. AudioOutputUnitStart (audioUnit);
  368. }
  369. if (callback != nullptr)
  370. callback->audioDeviceAboutToStart (this);
  371. }
  372. }
  373. private:
  374. //==============================================================================
  375. SharedResourcePointer<AudioSessionHolder> sessionHolder;
  376. CriticalSection callbackLock;
  377. NSTimeInterval sampleRate = 0;
  378. int numInputChannels = 2, numOutputChannels = 2;
  379. int preferredBufferSize = 0, actualBufferSize = 0;
  380. bool isRunning = false;
  381. String lastError;
  382. AudioStreamBasicDescription format;
  383. AudioUnit audioUnit {};
  384. bool audioInputIsAvailable = false;
  385. AudioIODeviceCallback* callback = nullptr;
  386. BigInteger activeOutputChans, activeInputChans;
  387. AudioSampleBuffer floatData;
  388. float* inputChannels[3];
  389. float* outputChannels[3];
  390. bool monoInputChannelNumber, monoOutputChannelNumber;
  391. void prepareFloatBuffers (int bufferSize)
  392. {
  393. if (numInputChannels + numOutputChannels > 0)
  394. {
  395. floatData.setSize (numInputChannels + numOutputChannels, bufferSize);
  396. zeromem (inputChannels, sizeof (inputChannels));
  397. zeromem (outputChannels, sizeof (outputChannels));
  398. for (int i = 0; i < numInputChannels; ++i)
  399. inputChannels[i] = floatData.getWritePointer (i);
  400. for (int i = 0; i < numOutputChannels; ++i)
  401. outputChannels[i] = floatData.getWritePointer (i + numInputChannels);
  402. }
  403. }
  404. //==============================================================================
  405. OSStatus process (AudioUnitRenderActionFlags* flags, const AudioTimeStamp* time,
  406. const UInt32 numFrames, AudioBufferList* data)
  407. {
  408. OSStatus err = noErr;
  409. if (audioInputIsAvailable && numInputChannels > 0)
  410. err = AudioUnitRender (audioUnit, flags, time, 1, numFrames, data);
  411. const ScopedTryLock stl (callbackLock);
  412. if (stl.isLocked() && callback != nullptr)
  413. {
  414. if ((int) numFrames > floatData.getNumSamples())
  415. prepareFloatBuffers ((int) numFrames);
  416. if (audioInputIsAvailable && numInputChannels > 0)
  417. {
  418. short* shortData = (short*) data->mBuffers[0].mData;
  419. if (numInputChannels >= 2)
  420. {
  421. for (UInt32 i = 0; i < numFrames; ++i)
  422. {
  423. inputChannels[0][i] = *shortData++ * (1.0f / 32768.0f);
  424. inputChannels[1][i] = *shortData++ * (1.0f / 32768.0f);
  425. }
  426. }
  427. else
  428. {
  429. if (monoInputChannelNumber > 0)
  430. ++shortData;
  431. for (UInt32 i = 0; i < numFrames; ++i)
  432. {
  433. inputChannels[0][i] = *shortData++ * (1.0f / 32768.0f);
  434. ++shortData;
  435. }
  436. }
  437. }
  438. else
  439. {
  440. for (int i = numInputChannels; --i >= 0;)
  441. zeromem (inputChannels[i], sizeof (float) * numFrames);
  442. }
  443. callback->audioDeviceIOCallback ((const float**) inputChannels, numInputChannels,
  444. outputChannels, numOutputChannels, (int) numFrames);
  445. short* const shortData = (short*) data->mBuffers[0].mData;
  446. int n = 0;
  447. if (numOutputChannels >= 2)
  448. {
  449. for (UInt32 i = 0; i < numFrames; ++i)
  450. {
  451. shortData [n++] = (short) (outputChannels[0][i] * 32767.0f);
  452. shortData [n++] = (short) (outputChannels[1][i] * 32767.0f);
  453. }
  454. }
  455. else if (numOutputChannels == 1)
  456. {
  457. for (UInt32 i = 0; i < numFrames; ++i)
  458. {
  459. const short s = (short) (outputChannels[monoOutputChannelNumber][i] * 32767.0f);
  460. shortData [n++] = s;
  461. shortData [n++] = s;
  462. }
  463. }
  464. else
  465. {
  466. zeromem (data->mBuffers[0].mData, 2 * sizeof (short) * numFrames);
  467. }
  468. }
  469. else
  470. {
  471. zeromem (data->mBuffers[0].mData, 2 * sizeof (short) * numFrames);
  472. }
  473. return err;
  474. }
  475. void updateSampleRateAndAudioInput()
  476. {
  477. auto session = [AVAudioSession sharedInstance];
  478. sampleRate = session.sampleRate;
  479. audioInputIsAvailable = session.isInputAvailable;
  480. actualBufferSize = roundToInt (sampleRate * session.IOBufferDuration);
  481. JUCE_IOS_AUDIO_LOG ("AVAudioSession: sampleRate: " << sampleRate
  482. << "Hz, audioInputAvailable: " << (int) audioInputIsAvailable);
  483. }
  484. void updateCurrentBufferSize()
  485. {
  486. NSTimeInterval bufferDuration = sampleRate > 0 ? (NSTimeInterval) ((preferredBufferSize + 1) / sampleRate) : 0.0;
  487. JUCE_NSERROR_CHECK ([[AVAudioSession sharedInstance] setPreferredIOBufferDuration: bufferDuration
  488. error: &error]);
  489. updateSampleRateAndAudioInput();
  490. }
  491. //==============================================================================
  492. static OSStatus processStatic (void* client, AudioUnitRenderActionFlags* flags, const AudioTimeStamp* time,
  493. UInt32 /*busNumber*/, UInt32 numFrames, AudioBufferList* data)
  494. {
  495. return static_cast<iOSAudioIODevice*> (client)->process (flags, time, numFrames, data);
  496. }
  497. //==============================================================================
  498. void resetFormat (const int numChannels) noexcept
  499. {
  500. zerostruct (format);
  501. format.mFormatID = kAudioFormatLinearPCM;
  502. format.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked | kAudioFormatFlagsNativeEndian;
  503. format.mBitsPerChannel = 8 * sizeof (short);
  504. format.mChannelsPerFrame = (UInt32) numChannels;
  505. format.mFramesPerPacket = 1;
  506. format.mBytesPerFrame = format.mBytesPerPacket = (UInt32) numChannels * sizeof (short);
  507. }
  508. bool createAudioUnit()
  509. {
  510. if (audioUnit != 0)
  511. {
  512. AudioComponentInstanceDispose (audioUnit);
  513. audioUnit = 0;
  514. }
  515. resetFormat (2);
  516. AudioComponentDescription desc;
  517. desc.componentType = kAudioUnitType_Output;
  518. desc.componentSubType = kAudioUnitSubType_RemoteIO;
  519. desc.componentManufacturer = kAudioUnitManufacturer_Apple;
  520. desc.componentFlags = 0;
  521. desc.componentFlagsMask = 0;
  522. AudioComponent comp = AudioComponentFindNext (0, &desc);
  523. AudioComponentInstanceNew (comp, &audioUnit);
  524. if (audioUnit == 0)
  525. return false;
  526. if (numInputChannels > 0)
  527. {
  528. const UInt32 one = 1;
  529. AudioUnitSetProperty (audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, 1, &one, sizeof (one));
  530. }
  531. {
  532. AudioChannelLayout layout;
  533. layout.mChannelBitmap = 0;
  534. layout.mNumberChannelDescriptions = 0;
  535. layout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
  536. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_AudioChannelLayout, kAudioUnitScope_Input, 0, &layout, sizeof (layout));
  537. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_AudioChannelLayout, kAudioUnitScope_Output, 0, &layout, sizeof (layout));
  538. }
  539. {
  540. AURenderCallbackStruct inputProc;
  541. inputProc.inputProc = processStatic;
  542. inputProc.inputProcRefCon = this;
  543. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, 0, &inputProc, sizeof (inputProc));
  544. }
  545. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &format, sizeof (format));
  546. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &format, sizeof (format));
  547. UInt32 framesPerSlice;
  548. UInt32 dataSize = sizeof (framesPerSlice);
  549. AudioUnitInitialize (audioUnit);
  550. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_MaximumFramesPerSlice,
  551. kAudioUnitScope_Global, 0, &actualBufferSize, sizeof (actualBufferSize));
  552. if (AudioUnitGetProperty (audioUnit, kAudioUnitProperty_MaximumFramesPerSlice,
  553. kAudioUnitScope_Global, 0, &framesPerSlice, &dataSize) == noErr
  554. && dataSize == sizeof (framesPerSlice) && static_cast<int> (framesPerSlice) != actualBufferSize)
  555. {
  556. actualBufferSize = static_cast<int> (framesPerSlice);
  557. prepareFloatBuffers (actualBufferSize);
  558. }
  559. return true;
  560. }
  561. // If the routing is set to go through the receiver (i.e. the speaker, but quiet), this re-routes it
  562. // to make it loud. Needed because by default when using an input + output, the output is kept quiet.
  563. static void fixAudioRouteIfSetToReceiver()
  564. {
  565. auto session = [AVAudioSession sharedInstance];
  566. auto route = session.currentRoute;
  567. for (AVAudioSessionPortDescription* port in route.inputs)
  568. {
  569. ignoreUnused (port);
  570. JUCE_IOS_AUDIO_LOG ("AVAudioSession: input: " << [port.description UTF8String]);
  571. }
  572. for (AVAudioSessionPortDescription* port in route.outputs)
  573. {
  574. JUCE_IOS_AUDIO_LOG ("AVAudioSession: output: " << [port.description UTF8String]);
  575. if ([port.portName isEqualToString: @"Receiver"])
  576. {
  577. JUCE_NSERROR_CHECK ([session overrideOutputAudioPort: AVAudioSessionPortOverrideSpeaker
  578. error: &error]);
  579. setAudioSessionActive (true);
  580. }
  581. }
  582. }
  583. JUCE_DECLARE_NON_COPYABLE (iOSAudioIODevice)
  584. };
  585. //==============================================================================
  586. class iOSAudioIODeviceType : public AudioIODeviceType
  587. {
  588. public:
  589. iOSAudioIODeviceType() : AudioIODeviceType (iOSAudioDeviceName) {}
  590. void scanForDevices() {}
  591. StringArray getDeviceNames (bool /*wantInputNames*/) const { return StringArray (iOSAudioDeviceName); }
  592. int getDefaultDeviceIndex (bool /*forInput*/) const { return 0; }
  593. int getIndexOfDevice (AudioIODevice* d, bool /*asInput*/) const { return d != nullptr ? 0 : -1; }
  594. bool hasSeparateInputsAndOutputs() const { return false; }
  595. AudioIODevice* createDevice (const String& outputDeviceName, const String& inputDeviceName)
  596. {
  597. if (outputDeviceName.isNotEmpty() || inputDeviceName.isNotEmpty())
  598. return new iOSAudioIODevice (outputDeviceName.isNotEmpty() ? outputDeviceName : inputDeviceName);
  599. return nullptr;
  600. }
  601. private:
  602. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (iOSAudioIODeviceType)
  603. };
  604. //==============================================================================
  605. AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_iOSAudio()
  606. {
  607. return new iOSAudioIODeviceType();
  608. }
  609. //==============================================================================
  610. AudioSessionHolder::AudioSessionHolder() { nativeSession = [[iOSAudioSessionNative alloc] init: this]; }
  611. AudioSessionHolder::~AudioSessionHolder() { [nativeSession release]; }
  612. void AudioSessionHolder::handleStatusChange (bool enabled, const char* reason) const
  613. {
  614. for (auto device: activeDevices)
  615. device->handleStatusChange (enabled, reason);
  616. }
  617. void AudioSessionHolder::handleRouteChange (const char* reason) const
  618. {
  619. struct RouteChangeMessage : public CallbackMessage
  620. {
  621. RouteChangeMessage (Array<iOSAudioIODevice*> devs, const char* r)
  622. : devices (devs), changeReason (r)
  623. {
  624. }
  625. void messageCallback() override
  626. {
  627. for (auto device: devices)
  628. device->handleRouteChange (changeReason);
  629. }
  630. Array<iOSAudioIODevice*> devices;
  631. const char* changeReason;
  632. };
  633. (new RouteChangeMessage (activeDevices, reason))->post();
  634. }
  635. #undef JUCE_NSERROR_CHECK