The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

786 lines
27KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2015 - ROLI Ltd.
  5. Permission is granted to use this software under the terms of either:
  6. a) the GPL v2 (or any later version)
  7. b) the Affero GPL v3
  8. Details of these licenses can be found at: www.gnu.org/licenses
  9. JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
  10. WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
  11. A PARTICULAR PURPOSE. See the GNU General Public License for more details.
  12. ------------------------------------------------------------------------------
  13. To release a closed-source product which uses JUCE, commercial licenses are
  14. available: visit www.juce.com for more information.
  15. ==============================================================================
  16. */
  17. class iOSAudioIODevice;
  18. static const char* const iOSAudioDeviceName = "iOS Audio";
  19. //==============================================================================
  20. struct AudioSessionHolder
  21. {
  22. AudioSessionHolder();
  23. ~AudioSessionHolder();
  24. void handleStatusChange (bool enabled, const char* reason) const;
  25. void handleRouteChange (const char* reason) const;
  26. Array<iOSAudioIODevice*> activeDevices;
  27. id nativeSession;
  28. };
  29. static const char* getRoutingChangeReason (AVAudioSessionRouteChangeReason reason) noexcept
  30. {
  31. switch (reason)
  32. {
  33. case AVAudioSessionRouteChangeReasonNewDeviceAvailable: return "New device available";
  34. case AVAudioSessionRouteChangeReasonOldDeviceUnavailable: return "Old device unavailable";
  35. case AVAudioSessionRouteChangeReasonCategoryChange: return "Category change";
  36. case AVAudioSessionRouteChangeReasonOverride: return "Override";
  37. case AVAudioSessionRouteChangeReasonWakeFromSleep: return "Wake from sleep";
  38. case AVAudioSessionRouteChangeReasonNoSuitableRouteForCategory: return "No suitable route for category";
  39. case AVAudioSessionRouteChangeReasonRouteConfigurationChange: return "Route configuration change";
  40. case AVAudioSessionRouteChangeReasonUnknown:
  41. default: return "Unknown";
  42. }
  43. }
  44. bool getNotificationValueForKey (NSNotification* notification, NSString* key, NSUInteger& value) noexcept
  45. {
  46. if (notification != nil)
  47. {
  48. if (NSDictionary* userInfo = [notification userInfo])
  49. {
  50. if (NSNumber* number = [userInfo objectForKey: key])
  51. {
  52. value = [number unsignedIntegerValue];
  53. return true;
  54. }
  55. }
  56. }
  57. jassertfalse;
  58. return false;
  59. }
  60. } // juce namespace
  61. //==============================================================================
  62. @interface iOSAudioSessionNative : NSObject
  63. {
  64. @private
  65. juce::AudioSessionHolder* audioSessionHolder;
  66. };
  67. - (id) init: (juce::AudioSessionHolder*) holder;
  68. - (void) dealloc;
  69. - (void) audioSessionDidChangeInterruptionType: (NSNotification*) notification;
  70. - (void) handleMediaServicesReset;
  71. - (void) handleMediaServicesLost;
  72. - (void) handleRouteChange: (NSNotification*) notification;
  73. @end
  74. @implementation iOSAudioSessionNative
  75. - (id) init: (juce::AudioSessionHolder*) holder
  76. {
  77. self = [super init];
  78. if (self != nil)
  79. {
  80. audioSessionHolder = holder;
  81. auto session = [AVAudioSession sharedInstance];
  82. auto centre = [NSNotificationCenter defaultCenter];
  83. [centre addObserver: self
  84. selector: @selector (audioSessionDidChangeInterruptionType:)
  85. name: AVAudioSessionInterruptionNotification
  86. object: session];
  87. [centre addObserver: self
  88. selector: @selector (handleMediaServicesLost)
  89. name: AVAudioSessionMediaServicesWereLostNotification
  90. object: session];
  91. [centre addObserver: self
  92. selector: @selector (handleMediaServicesReset)
  93. name: AVAudioSessionMediaServicesWereResetNotification
  94. object: session];
  95. [centre addObserver: self
  96. selector: @selector (handleRouteChange:)
  97. name: AVAudioSessionRouteChangeNotification
  98. object: session];
  99. }
  100. else
  101. {
  102. jassertfalse;
  103. }
  104. return self;
  105. }
  106. - (void) dealloc
  107. {
  108. [[NSNotificationCenter defaultCenter] removeObserver: self];
  109. [super dealloc];
  110. }
  111. - (void) audioSessionDidChangeInterruptionType: (NSNotification*) notification
  112. {
  113. NSUInteger value;
  114. if (juce::getNotificationValueForKey (notification, AVAudioSessionInterruptionTypeKey, value))
  115. {
  116. switch ((AVAudioSessionInterruptionType) value)
  117. {
  118. case AVAudioSessionInterruptionTypeBegan:
  119. audioSessionHolder->handleStatusChange (false, "AVAudioSessionInterruptionTypeBegan");
  120. break;
  121. case AVAudioSessionInterruptionTypeEnded:
  122. audioSessionHolder->handleStatusChange (true, "AVAudioSessionInterruptionTypeEnded");
  123. break;
  124. // No default so the code doesn't compile if this enum is extended.
  125. }
  126. }
  127. }
  128. - (void) handleMediaServicesReset
  129. {
  130. audioSessionHolder->handleStatusChange (true, "AVAudioSessionMediaServicesWereResetNotification");
  131. }
  132. - (void) handleMediaServicesLost
  133. {
  134. audioSessionHolder->handleStatusChange (false, "AVAudioSessionMediaServicesWereLostNotification");
  135. }
  136. - (void) handleRouteChange: (NSNotification*) notification
  137. {
  138. NSUInteger value;
  139. if (juce::getNotificationValueForKey (notification, AVAudioSessionRouteChangeReasonKey, value))
  140. audioSessionHolder->handleRouteChange (juce::getRoutingChangeReason ((AVAudioSessionRouteChangeReason) value));
  141. }
  142. @end
  143. //==============================================================================
  144. namespace juce {
  145. #ifndef JUCE_IOS_AUDIO_LOGGING
  146. #define JUCE_IOS_AUDIO_LOGGING 0
  147. #endif
  148. #if JUCE_IOS_AUDIO_LOGGING
  149. #define JUCE_IOS_AUDIO_LOG(x) DBG(x)
  150. #else
  151. #define JUCE_IOS_AUDIO_LOG(x)
  152. #endif
  153. static void logNSError (NSError* e)
  154. {
  155. if (e != nil)
  156. {
  157. JUCE_IOS_AUDIO_LOG ("iOS Audio error: " << [e.localizedDescription UTF8String]);
  158. jassertfalse;
  159. }
  160. }
  161. #define JUCE_NSERROR_CHECK(X) { NSError* error = nil; X; logNSError (error); }
  162. //==============================================================================
  163. class iOSAudioIODevice : public AudioIODevice
  164. {
  165. public:
  166. iOSAudioIODevice (const String& deviceName)
  167. : AudioIODevice (deviceName, iOSAudioDeviceName)
  168. {
  169. sessionHolder->activeDevices.add (this);
  170. updateSampleRateAndAudioInput();
  171. }
  172. ~iOSAudioIODevice()
  173. {
  174. sessionHolder->activeDevices.removeFirstMatchingValue (this);
  175. close();
  176. }
  177. StringArray getOutputChannelNames() override
  178. {
  179. return { "Left", "Right" };
  180. }
  181. StringArray getInputChannelNames() override
  182. {
  183. if (audioInputIsAvailable)
  184. return { "Left", "Right" };
  185. return {};
  186. }
  187. static void setAudioSessionActive (bool enabled)
  188. {
  189. JUCE_NSERROR_CHECK ([[AVAudioSession sharedInstance] setActive: enabled
  190. error: &error]);
  191. }
  192. static double trySampleRate (double rate)
  193. {
  194. auto session = [AVAudioSession sharedInstance];
  195. JUCE_NSERROR_CHECK ([session setPreferredSampleRate: rate
  196. error: &error]);
  197. return session.sampleRate;
  198. }
  199. Array<double> getAvailableSampleRates() override
  200. {
  201. Array<double> rates;
  202. // Important: the supported audio sample rates change on the iPhone 6S
  203. // depending on whether the headphones are plugged in or not!
  204. setAudioSessionActive (true);
  205. const double lowestRate = trySampleRate (4000);
  206. const double highestRate = trySampleRate (192000);
  207. for (double rate = lowestRate; rate <= highestRate; rate += 1000)
  208. {
  209. const double supportedRate = trySampleRate (rate);
  210. rates.addIfNotAlreadyThere (supportedRate);
  211. rate = jmax (rate, supportedRate);
  212. }
  213. for (auto r : rates)
  214. {
  215. ignoreUnused (r);
  216. JUCE_IOS_AUDIO_LOG ("available rate = " + String (r, 0) + "Hz");
  217. }
  218. return rates;
  219. }
  220. Array<int> getAvailableBufferSizes() override
  221. {
  222. Array<int> r;
  223. for (int i = 6; i < 12; ++i)
  224. r.add (1 << i);
  225. return r;
  226. }
  227. int getDefaultBufferSize() override
  228. {
  229. #if TARGET_IPHONE_SIMULATOR
  230. return 512;
  231. #else
  232. return 256;
  233. #endif
  234. }
  235. String open (const BigInteger& inputChannelsWanted,
  236. const BigInteger& outputChannelsWanted,
  237. double targetSampleRate, int bufferSize) override
  238. {
  239. close();
  240. lastError.clear();
  241. preferredBufferSize = bufferSize <= 0 ? getDefaultBufferSize()
  242. : bufferSize;
  243. // xxx set up channel mapping
  244. activeOutputChans = outputChannelsWanted;
  245. activeOutputChans.setRange (2, activeOutputChans.getHighestBit(), false);
  246. numOutputChannels = activeOutputChans.countNumberOfSetBits();
  247. monoOutputChannelNumber = activeOutputChans.findNextSetBit (0);
  248. activeInputChans = inputChannelsWanted;
  249. activeInputChans.setRange (2, activeInputChans.getHighestBit(), false);
  250. numInputChannels = activeInputChans.countNumberOfSetBits();
  251. monoInputChannelNumber = activeInputChans.findNextSetBit (0);
  252. setAudioSessionActive (true);
  253. // Set the session category & options:
  254. auto session = [AVAudioSession sharedInstance];
  255. const bool useInputs = (numInputChannels > 0 && audioInputIsAvailable);
  256. NSString* category = (useInputs ? AVAudioSessionCategoryPlayAndRecord : AVAudioSessionCategoryPlayback);
  257. NSUInteger options = AVAudioSessionCategoryOptionMixWithOthers; // Alternatively AVAudioSessionCategoryOptionDuckOthers
  258. if (useInputs) // These options are only valid for category = PlayAndRecord
  259. options |= (AVAudioSessionCategoryOptionDefaultToSpeaker | AVAudioSessionCategoryOptionAllowBluetooth);
  260. JUCE_NSERROR_CHECK ([session setCategory: category
  261. withOptions: options
  262. error: &error]);
  263. fixAudioRouteIfSetToReceiver();
  264. // Set the sample rate
  265. trySampleRate (targetSampleRate);
  266. updateSampleRateAndAudioInput();
  267. updateCurrentBufferSize();
  268. prepareFloatBuffers (actualBufferSize);
  269. isRunning = true;
  270. handleRouteChange ("Started AudioUnit");
  271. lastError = (audioUnit != 0 ? "" : "Couldn't open the device");
  272. setAudioSessionActive (true);
  273. return lastError;
  274. }
  275. void close() override
  276. {
  277. if (isRunning)
  278. {
  279. isRunning = false;
  280. if (audioUnit != 0)
  281. {
  282. AudioOutputUnitStart (audioUnit);
  283. AudioComponentInstanceDispose (audioUnit);
  284. audioUnit = 0;
  285. }
  286. setAudioSessionActive (false);
  287. }
  288. }
  289. bool isOpen() override { return isRunning; }
  290. int getCurrentBufferSizeSamples() override { return actualBufferSize; }
  291. double getCurrentSampleRate() override { return sampleRate; }
  292. int getCurrentBitDepth() override { return 16; }
  293. BigInteger getActiveOutputChannels() const override { return activeOutputChans; }
  294. BigInteger getActiveInputChannels() const override { return activeInputChans; }
  295. int getOutputLatencyInSamples() override { return roundToInt (getCurrentSampleRate() * [AVAudioSession sharedInstance].outputLatency); }
  296. int getInputLatencyInSamples() override { return roundToInt (getCurrentSampleRate() * [AVAudioSession sharedInstance].inputLatency); }
  297. void start (AudioIODeviceCallback* newCallback) override
  298. {
  299. if (isRunning && callback != newCallback)
  300. {
  301. if (newCallback != nullptr)
  302. newCallback->audioDeviceAboutToStart (this);
  303. const ScopedLock sl (callbackLock);
  304. callback = newCallback;
  305. }
  306. }
  307. void stop() override
  308. {
  309. if (isRunning)
  310. {
  311. AudioIODeviceCallback* lastCallback;
  312. {
  313. const ScopedLock sl (callbackLock);
  314. lastCallback = callback;
  315. callback = nullptr;
  316. }
  317. if (lastCallback != nullptr)
  318. lastCallback->audioDeviceStopped();
  319. }
  320. }
  321. bool isPlaying() override { return isRunning && callback != nullptr; }
  322. String getLastError() override { return lastError; }
  323. bool setAudioPreprocessingEnabled (bool enable) override
  324. {
  325. auto session = [AVAudioSession sharedInstance];
  326. NSString* mode = (enable ? AVAudioSessionModeMeasurement
  327. : AVAudioSessionModeDefault);
  328. JUCE_NSERROR_CHECK ([session setMode: mode
  329. error: &error]);
  330. return session.mode == mode;
  331. }
  332. void invokeAudioDeviceErrorCallback (const String& reason)
  333. {
  334. const ScopedLock sl (callbackLock);
  335. if (callback != nullptr)
  336. callback->audioDeviceError (reason);
  337. }
  338. void handleStatusChange (bool enabled, const char* reason)
  339. {
  340. JUCE_IOS_AUDIO_LOG ("handleStatusChange: enabled: " << (int) enabled << ", reason: " << reason);
  341. isRunning = enabled;
  342. setAudioSessionActive (enabled);
  343. if (enabled)
  344. AudioOutputUnitStart (audioUnit);
  345. else
  346. AudioOutputUnitStop (audioUnit);
  347. if (! enabled)
  348. invokeAudioDeviceErrorCallback (reason);
  349. }
  350. void handleRouteChange (const char* reason)
  351. {
  352. JUCE_IOS_AUDIO_LOG ("handleRouteChange: reason: " << reason);
  353. fixAudioRouteIfSetToReceiver();
  354. if (isRunning)
  355. {
  356. invokeAudioDeviceErrorCallback (reason);
  357. updateSampleRateAndAudioInput();
  358. updateCurrentBufferSize();
  359. createAudioUnit();
  360. setAudioSessionActive (true);
  361. if (audioUnit != 0)
  362. {
  363. UInt32 formatSize = sizeof (format);
  364. AudioUnitGetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &format, &formatSize);
  365. AudioOutputUnitStart (audioUnit);
  366. }
  367. if (callback != nullptr)
  368. callback->audioDeviceAboutToStart (this);
  369. }
  370. }
  371. private:
  372. //==============================================================================
  373. SharedResourcePointer<AudioSessionHolder> sessionHolder;
  374. CriticalSection callbackLock;
  375. NSTimeInterval sampleRate = 0;
  376. int numInputChannels = 2, numOutputChannels = 2;
  377. int preferredBufferSize = 0, actualBufferSize = 0;
  378. bool isRunning = false;
  379. String lastError;
  380. AudioStreamBasicDescription format;
  381. AudioUnit audioUnit {};
  382. bool audioInputIsAvailable = false;
  383. AudioIODeviceCallback* callback = nullptr;
  384. BigInteger activeOutputChans, activeInputChans;
  385. AudioSampleBuffer floatData;
  386. float* inputChannels[3];
  387. float* outputChannels[3];
  388. bool monoInputChannelNumber, monoOutputChannelNumber;
  389. void prepareFloatBuffers (int bufferSize)
  390. {
  391. if (numInputChannels + numOutputChannels > 0)
  392. {
  393. floatData.setSize (numInputChannels + numOutputChannels, bufferSize);
  394. zeromem (inputChannels, sizeof (inputChannels));
  395. zeromem (outputChannels, sizeof (outputChannels));
  396. for (int i = 0; i < numInputChannels; ++i)
  397. inputChannels[i] = floatData.getWritePointer (i);
  398. for (int i = 0; i < numOutputChannels; ++i)
  399. outputChannels[i] = floatData.getWritePointer (i + numInputChannels);
  400. }
  401. }
  402. //==============================================================================
  403. OSStatus process (AudioUnitRenderActionFlags* flags, const AudioTimeStamp* time,
  404. const UInt32 numFrames, AudioBufferList* data)
  405. {
  406. OSStatus err = noErr;
  407. if (audioInputIsAvailable && numInputChannels > 0)
  408. err = AudioUnitRender (audioUnit, flags, time, 1, numFrames, data);
  409. const ScopedLock sl (callbackLock);
  410. if (callback != nullptr)
  411. {
  412. if ((int) numFrames > floatData.getNumSamples())
  413. prepareFloatBuffers ((int) numFrames);
  414. if (audioInputIsAvailable && numInputChannels > 0)
  415. {
  416. short* shortData = (short*) data->mBuffers[0].mData;
  417. if (numInputChannels >= 2)
  418. {
  419. for (UInt32 i = 0; i < numFrames; ++i)
  420. {
  421. inputChannels[0][i] = *shortData++ * (1.0f / 32768.0f);
  422. inputChannels[1][i] = *shortData++ * (1.0f / 32768.0f);
  423. }
  424. }
  425. else
  426. {
  427. if (monoInputChannelNumber > 0)
  428. ++shortData;
  429. for (UInt32 i = 0; i < numFrames; ++i)
  430. {
  431. inputChannels[0][i] = *shortData++ * (1.0f / 32768.0f);
  432. ++shortData;
  433. }
  434. }
  435. }
  436. else
  437. {
  438. for (int i = numInputChannels; --i >= 0;)
  439. zeromem (inputChannels[i], sizeof (float) * numFrames);
  440. }
  441. callback->audioDeviceIOCallback ((const float**) inputChannels, numInputChannels,
  442. outputChannels, numOutputChannels, (int) numFrames);
  443. short* const shortData = (short*) data->mBuffers[0].mData;
  444. int n = 0;
  445. if (numOutputChannels >= 2)
  446. {
  447. for (UInt32 i = 0; i < numFrames; ++i)
  448. {
  449. shortData [n++] = (short) (outputChannels[0][i] * 32767.0f);
  450. shortData [n++] = (short) (outputChannels[1][i] * 32767.0f);
  451. }
  452. }
  453. else if (numOutputChannels == 1)
  454. {
  455. for (UInt32 i = 0; i < numFrames; ++i)
  456. {
  457. const short s = (short) (outputChannels[monoOutputChannelNumber][i] * 32767.0f);
  458. shortData [n++] = s;
  459. shortData [n++] = s;
  460. }
  461. }
  462. else
  463. {
  464. zeromem (data->mBuffers[0].mData, 2 * sizeof (short) * numFrames);
  465. }
  466. }
  467. else
  468. {
  469. zeromem (data->mBuffers[0].mData, 2 * sizeof (short) * numFrames);
  470. }
  471. return err;
  472. }
  473. void updateSampleRateAndAudioInput()
  474. {
  475. auto session = [AVAudioSession sharedInstance];
  476. sampleRate = session.sampleRate;
  477. audioInputIsAvailable = session.isInputAvailable;
  478. actualBufferSize = roundToInt (sampleRate * session.IOBufferDuration);
  479. JUCE_IOS_AUDIO_LOG ("AVAudioSession: sampleRate: " << sampleRate
  480. << "Hz, audioInputAvailable: " << (int) audioInputIsAvailable);
  481. }
  482. void updateCurrentBufferSize()
  483. {
  484. NSTimeInterval bufferDuration = sampleRate > 0 ? (NSTimeInterval) ((preferredBufferSize + 1) / sampleRate) : 0.0;
  485. JUCE_NSERROR_CHECK ([[AVAudioSession sharedInstance] setPreferredIOBufferDuration: bufferDuration
  486. error: &error]);
  487. updateSampleRateAndAudioInput();
  488. }
  489. //==============================================================================
  490. static OSStatus processStatic (void* client, AudioUnitRenderActionFlags* flags, const AudioTimeStamp* time,
  491. UInt32 /*busNumber*/, UInt32 numFrames, AudioBufferList* data)
  492. {
  493. return static_cast<iOSAudioIODevice*> (client)->process (flags, time, numFrames, data);
  494. }
  495. //==============================================================================
  496. void resetFormat (const int numChannels) noexcept
  497. {
  498. zerostruct (format);
  499. format.mFormatID = kAudioFormatLinearPCM;
  500. format.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked | kAudioFormatFlagsNativeEndian;
  501. format.mBitsPerChannel = 8 * sizeof (short);
  502. format.mChannelsPerFrame = (UInt32) numChannels;
  503. format.mFramesPerPacket = 1;
  504. format.mBytesPerFrame = format.mBytesPerPacket = (UInt32) numChannels * sizeof (short);
  505. }
  506. bool createAudioUnit()
  507. {
  508. if (audioUnit != 0)
  509. {
  510. AudioComponentInstanceDispose (audioUnit);
  511. audioUnit = 0;
  512. }
  513. resetFormat (2);
  514. AudioComponentDescription desc;
  515. desc.componentType = kAudioUnitType_Output;
  516. desc.componentSubType = kAudioUnitSubType_RemoteIO;
  517. desc.componentManufacturer = kAudioUnitManufacturer_Apple;
  518. desc.componentFlags = 0;
  519. desc.componentFlagsMask = 0;
  520. AudioComponent comp = AudioComponentFindNext (0, &desc);
  521. AudioComponentInstanceNew (comp, &audioUnit);
  522. if (audioUnit == 0)
  523. return false;
  524. if (numInputChannels > 0)
  525. {
  526. const UInt32 one = 1;
  527. AudioUnitSetProperty (audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, 1, &one, sizeof (one));
  528. }
  529. {
  530. AudioChannelLayout layout;
  531. layout.mChannelBitmap = 0;
  532. layout.mNumberChannelDescriptions = 0;
  533. layout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
  534. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_AudioChannelLayout, kAudioUnitScope_Input, 0, &layout, sizeof (layout));
  535. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_AudioChannelLayout, kAudioUnitScope_Output, 0, &layout, sizeof (layout));
  536. }
  537. {
  538. AURenderCallbackStruct inputProc;
  539. inputProc.inputProc = processStatic;
  540. inputProc.inputProcRefCon = this;
  541. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, 0, &inputProc, sizeof (inputProc));
  542. }
  543. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &format, sizeof (format));
  544. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &format, sizeof (format));
  545. UInt32 framesPerSlice;
  546. UInt32 dataSize = sizeof (framesPerSlice);
  547. AudioUnitInitialize (audioUnit);
  548. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_MaximumFramesPerSlice,
  549. kAudioUnitScope_Global, 0, &actualBufferSize, sizeof (actualBufferSize));
  550. if (AudioUnitGetProperty (audioUnit, kAudioUnitProperty_MaximumFramesPerSlice,
  551. kAudioUnitScope_Global, 0, &framesPerSlice, &dataSize) == noErr
  552. && dataSize == sizeof (framesPerSlice) && static_cast<int> (framesPerSlice) != actualBufferSize)
  553. {
  554. actualBufferSize = static_cast<int> (framesPerSlice);
  555. prepareFloatBuffers (actualBufferSize);
  556. }
  557. return true;
  558. }
  559. // If the routing is set to go through the receiver (i.e. the speaker, but quiet), this re-routes it
  560. // to make it loud. Needed because by default when using an input + output, the output is kept quiet.
  561. static void fixAudioRouteIfSetToReceiver()
  562. {
  563. auto session = [AVAudioSession sharedInstance];
  564. auto route = session.currentRoute;
  565. for (AVAudioSessionPortDescription* port in route.inputs)
  566. {
  567. ignoreUnused (port);
  568. JUCE_IOS_AUDIO_LOG ("AVAudioSession: input: " << [port.description UTF8String]);
  569. }
  570. for (AVAudioSessionPortDescription* port in route.outputs)
  571. {
  572. JUCE_IOS_AUDIO_LOG ("AVAudioSession: output: " << [port.description UTF8String]);
  573. if ([port.portName isEqualToString: @"Receiver"])
  574. {
  575. JUCE_NSERROR_CHECK ([session overrideOutputAudioPort: AVAudioSessionPortOverrideSpeaker
  576. error: &error]);
  577. setAudioSessionActive (true);
  578. }
  579. }
  580. }
  581. JUCE_DECLARE_NON_COPYABLE (iOSAudioIODevice)
  582. };
  583. //==============================================================================
  584. class iOSAudioIODeviceType : public AudioIODeviceType
  585. {
  586. public:
  587. iOSAudioIODeviceType() : AudioIODeviceType (iOSAudioDeviceName) {}
  588. void scanForDevices() {}
  589. StringArray getDeviceNames (bool /*wantInputNames*/) const { return StringArray (iOSAudioDeviceName); }
  590. int getDefaultDeviceIndex (bool /*forInput*/) const { return 0; }
  591. int getIndexOfDevice (AudioIODevice* d, bool /*asInput*/) const { return d != nullptr ? 0 : -1; }
  592. bool hasSeparateInputsAndOutputs() const { return false; }
  593. AudioIODevice* createDevice (const String& outputDeviceName, const String& inputDeviceName)
  594. {
  595. if (outputDeviceName.isNotEmpty() || inputDeviceName.isNotEmpty())
  596. return new iOSAudioIODevice (outputDeviceName.isNotEmpty() ? outputDeviceName : inputDeviceName);
  597. return nullptr;
  598. }
  599. private:
  600. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (iOSAudioIODeviceType)
  601. };
  602. //==============================================================================
  603. AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_iOSAudio()
  604. {
  605. return new iOSAudioIODeviceType();
  606. }
  607. //==============================================================================
  608. AudioSessionHolder::AudioSessionHolder() { nativeSession = [[iOSAudioSessionNative alloc] init: this]; }
  609. AudioSessionHolder::~AudioSessionHolder() { [nativeSession release]; }
  610. void AudioSessionHolder::handleStatusChange (bool enabled, const char* reason) const
  611. {
  612. for (auto device: activeDevices)
  613. device->handleStatusChange (enabled, reason);
  614. }
  615. void AudioSessionHolder::handleRouteChange (const char* reason) const
  616. {
  617. for (auto device: activeDevices)
  618. device->handleRouteChange (reason);
  619. }
  620. #undef JUCE_NSERROR_CHECK