Audio plugin host https://kx.studio/carla
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

758 lines
26KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2015 - ROLI Ltd.
  5. Permission is granted to use this software under the terms of either:
  6. a) the GPL v2 (or any later version)
  7. b) the Affero GPL v3
  8. Details of these licenses can be found at: www.gnu.org/licenses
  9. JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
  10. WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
  11. A PARTICULAR PURPOSE. See the GNU General Public License for more details.
  12. ------------------------------------------------------------------------------
  13. To release a closed-source product which uses JUCE, commercial licenses are
  14. available: visit www.juce.com for more information.
  15. ==============================================================================
  16. */
  17. class iOSAudioIODevice;
  18. //==================================================================================================
  19. struct AudioSessionHolder
  20. {
  21. AudioSessionHolder();
  22. ~AudioSessionHolder();
  23. void handleStatusChange (bool enabled, const char* reason) const;
  24. void handleRouteChange (const char* reason) const;
  25. Array<iOSAudioIODevice*> activeDevices;
  26. id nativeSession;
  27. };
  28. static const char* getRoutingChangeReason (AVAudioSessionRouteChangeReason reason) noexcept
  29. {
  30. switch (reason)
  31. {
  32. case AVAudioSessionRouteChangeReasonNewDeviceAvailable: return "New device available";
  33. case AVAudioSessionRouteChangeReasonOldDeviceUnavailable: return "Old device unavailable";
  34. case AVAudioSessionRouteChangeReasonCategoryChange: return "Category change";
  35. case AVAudioSessionRouteChangeReasonOverride: return "Override";
  36. case AVAudioSessionRouteChangeReasonWakeFromSleep: return "Wake from sleep";
  37. case AVAudioSessionRouteChangeReasonNoSuitableRouteForCategory: return "No suitable route for category";
  38. case AVAudioSessionRouteChangeReasonRouteConfigurationChange: return "Route configuration change";
  39. case AVAudioSessionRouteChangeReasonUnknown:
  40. default: return "Unknown";
  41. }
  42. }
  43. bool getNotificationValueForKey (NSNotification* notification, NSString* key, NSUInteger& value) noexcept
  44. {
  45. if (notification != nil)
  46. {
  47. if (NSDictionary* userInfo = [notification userInfo])
  48. {
  49. if (NSNumber* number = [userInfo objectForKey: key])
  50. {
  51. value = [number unsignedIntegerValue];
  52. return true;
  53. }
  54. }
  55. }
  56. jassertfalse;
  57. return false;
  58. }
  59. } // juce namespace
  60. //==================================================================================================
  61. @interface iOSAudioSessionNative : NSObject
  62. {
  63. @private
  64. juce::AudioSessionHolder* audioSessionHolder;
  65. };
  66. - (id) init: (juce::AudioSessionHolder*) holder;
  67. - (void) dealloc;
  68. - (void) audioSessionDidChangeInterruptionType: (NSNotification*) notification;
  69. - (void) handleMediaServicesReset;
  70. - (void) handleMediaServicesLost;
  71. - (void) handleRouteChange: (NSNotification*) notification;
  72. @end
  73. @implementation iOSAudioSessionNative
  74. - (id) init: (juce::AudioSessionHolder*) holder
  75. {
  76. self = [super init];
  77. if (self != nil)
  78. {
  79. audioSessionHolder = holder;
  80. auto session = [AVAudioSession sharedInstance];
  81. auto centre = [NSNotificationCenter defaultCenter];
  82. [centre addObserver: self
  83. selector: @selector (audioSessionDidChangeInterruptionType:)
  84. name: AVAudioSessionInterruptionNotification
  85. object: session];
  86. [centre addObserver: self
  87. selector: @selector (handleMediaServicesLost)
  88. name: AVAudioSessionMediaServicesWereLostNotification
  89. object: session];
  90. [centre addObserver: self
  91. selector: @selector (handleMediaServicesReset)
  92. name: AVAudioSessionMediaServicesWereResetNotification
  93. object: session];
  94. [centre addObserver: self
  95. selector: @selector (handleRouteChange:)
  96. name: AVAudioSessionRouteChangeNotification
  97. object: session];
  98. }
  99. else
  100. {
  101. jassertfalse;
  102. }
  103. return self;
  104. }
  105. - (void) dealloc
  106. {
  107. [[NSNotificationCenter defaultCenter] removeObserver: self];
  108. [super dealloc];
  109. }
  110. - (void) audioSessionDidChangeInterruptionType: (NSNotification*) notification
  111. {
  112. NSUInteger value;
  113. if (juce::getNotificationValueForKey (notification, AVAudioSessionInterruptionTypeKey, value))
  114. {
  115. switch ((AVAudioSessionInterruptionType) value)
  116. {
  117. case AVAudioSessionInterruptionTypeBegan:
  118. audioSessionHolder->handleStatusChange (false, "AVAudioSessionInterruptionTypeBegan");
  119. break;
  120. case AVAudioSessionInterruptionTypeEnded:
  121. audioSessionHolder->handleStatusChange (true, "AVAudioSessionInterruptionTypeEnded");
  122. break;
  123. // No default so the code doesn't compile if this enum is extended.
  124. }
  125. }
  126. }
  127. - (void) handleMediaServicesReset
  128. {
  129. audioSessionHolder->handleStatusChange (true, "AVAudioSessionMediaServicesWereResetNotification");
  130. }
  131. - (void) handleMediaServicesLost
  132. {
  133. audioSessionHolder->handleStatusChange (false, "AVAudioSessionMediaServicesWereLostNotification");
  134. }
  135. - (void) handleRouteChange: (NSNotification*) notification
  136. {
  137. NSUInteger value;
  138. if (juce::getNotificationValueForKey (notification, AVAudioSessionRouteChangeReasonKey, value))
  139. audioSessionHolder->handleRouteChange (juce::getRoutingChangeReason ((AVAudioSessionRouteChangeReason) value));
  140. }
  141. @end
  142. //==================================================================================================
  143. namespace juce {
  144. #ifndef JUCE_IOS_AUDIO_LOGGING
  145. #define JUCE_IOS_AUDIO_LOGGING 0
  146. #endif
  147. #if JUCE_IOS_AUDIO_LOGGING
  148. #define JUCE_IOS_AUDIO_LOG(x) DBG(x)
  149. #else
  150. #define JUCE_IOS_AUDIO_LOG(x)
  151. #endif
  152. static void logNSError (NSError* e)
  153. {
  154. if (e != nil)
  155. {
  156. JUCE_IOS_AUDIO_LOG ("iOS Audio error: " << [e.localizedDescription UTF8String]);
  157. jassertfalse;
  158. }
  159. }
  160. #define JUCE_NSERROR_CHECK(X) { NSError* error = nil; X; logNSError (error); }
  161. //==================================================================================================
  162. class iOSAudioIODevice : public AudioIODevice
  163. {
  164. public:
  165. iOSAudioIODevice (const String& deviceName) : AudioIODevice (deviceName, "Audio")
  166. {
  167. sessionHolder->activeDevices.add (this);
  168. updateSampleRateAndAudioInput();
  169. }
  170. ~iOSAudioIODevice()
  171. {
  172. sessionHolder->activeDevices.removeFirstMatchingValue (this);
  173. close();
  174. }
  175. StringArray getOutputChannelNames() override
  176. {
  177. return { "Left", "Right" };
  178. }
  179. StringArray getInputChannelNames() override
  180. {
  181. if (audioInputIsAvailable)
  182. return { "Left", "Right" };
  183. return {};
  184. }
  185. static void setAudioSessionActive (bool enabled)
  186. {
  187. JUCE_NSERROR_CHECK ([[AVAudioSession sharedInstance] setActive: enabled
  188. error: &error]);
  189. }
  190. static double trySampleRate (double rate)
  191. {
  192. auto session = [AVAudioSession sharedInstance];
  193. JUCE_NSERROR_CHECK ([session setPreferredSampleRate: rate
  194. error: &error]);
  195. return session.sampleRate;
  196. }
  197. Array<double> getAvailableSampleRates() override
  198. {
  199. Array<double> rates;
  200. // Important: the supported audio sample rates change on the iPhone 6S
  201. // depending on whether the headphones are plugged in or not!
  202. setAudioSessionActive (true);
  203. const double lowestRate = trySampleRate (4000);
  204. const double highestRate = trySampleRate (192000);
  205. for (double rate = lowestRate; rate <= highestRate; rate += 1000)
  206. {
  207. const double supportedRate = trySampleRate (rate);
  208. rates.addIfNotAlreadyThere (supportedRate);
  209. rate = jmax (rate, supportedRate);
  210. }
  211. for (auto r : rates)
  212. {
  213. ignoreUnused (r);
  214. JUCE_IOS_AUDIO_LOG ("available rate = " + String (r, 0) + "Hz");
  215. }
  216. return rates;
  217. }
  218. Array<int> getAvailableBufferSizes() override
  219. {
  220. Array<int> r;
  221. for (int i = 6; i < 12; ++i)
  222. r.add (1 << i);
  223. return r;
  224. }
  225. int getDefaultBufferSize() override { return 256; }
  226. String open (const BigInteger& inputChannelsWanted,
  227. const BigInteger& outputChannelsWanted,
  228. double targetSampleRate, int bufferSize) override
  229. {
  230. close();
  231. lastError.clear();
  232. preferredBufferSize = bufferSize <= 0 ? getDefaultBufferSize()
  233. : bufferSize;
  234. // xxx set up channel mapping
  235. activeOutputChans = outputChannelsWanted;
  236. activeOutputChans.setRange (2, activeOutputChans.getHighestBit(), false);
  237. numOutputChannels = activeOutputChans.countNumberOfSetBits();
  238. monoOutputChannelNumber = activeOutputChans.findNextSetBit (0);
  239. activeInputChans = inputChannelsWanted;
  240. activeInputChans.setRange (2, activeInputChans.getHighestBit(), false);
  241. numInputChannels = activeInputChans.countNumberOfSetBits();
  242. monoInputChannelNumber = activeInputChans.findNextSetBit (0);
  243. setAudioSessionActive (true);
  244. // Set the session category & options:
  245. auto session = [AVAudioSession sharedInstance];
  246. const bool useInputs = (numInputChannels > 0 && audioInputIsAvailable);
  247. NSString* category = (useInputs ? AVAudioSessionCategoryPlayAndRecord : AVAudioSessionCategoryPlayback);
  248. NSUInteger options = AVAudioSessionCategoryOptionMixWithOthers; // Alternatively AVAudioSessionCategoryOptionDuckOthers
  249. if (useInputs) // These options are only valid for category = PlayAndRecord
  250. options |= (AVAudioSessionCategoryOptionDefaultToSpeaker | AVAudioSessionCategoryOptionAllowBluetooth);
  251. JUCE_NSERROR_CHECK ([session setCategory: category
  252. withOptions: options
  253. error: &error]);
  254. fixAudioRouteIfSetToReceiver();
  255. // Set the sample rate
  256. trySampleRate (targetSampleRate);
  257. updateSampleRateAndAudioInput();
  258. updateCurrentBufferSize();
  259. prepareFloatBuffers (actualBufferSize);
  260. isRunning = true;
  261. handleRouteChange ("Started AudioUnit");
  262. lastError = (audioUnit != 0 ? "" : "Couldn't open the device");
  263. setAudioSessionActive (true);
  264. return lastError;
  265. }
  266. void close() override
  267. {
  268. if (isRunning)
  269. {
  270. isRunning = false;
  271. setAudioSessionActive (false);
  272. if (audioUnit != 0)
  273. {
  274. AudioComponentInstanceDispose (audioUnit);
  275. audioUnit = 0;
  276. }
  277. }
  278. }
  279. bool isOpen() override { return isRunning; }
  280. int getCurrentBufferSizeSamples() override { return actualBufferSize; }
  281. double getCurrentSampleRate() override { return sampleRate; }
  282. int getCurrentBitDepth() override { return 16; }
  283. BigInteger getActiveOutputChannels() const override { return activeOutputChans; }
  284. BigInteger getActiveInputChannels() const override { return activeInputChans; }
  285. int getOutputLatencyInSamples() override { return roundToInt (getCurrentSampleRate() * [AVAudioSession sharedInstance].outputLatency); }
  286. int getInputLatencyInSamples() override { return roundToInt (getCurrentSampleRate() * [AVAudioSession sharedInstance].inputLatency); }
  287. void start (AudioIODeviceCallback* newCallback) override
  288. {
  289. if (isRunning && callback != newCallback)
  290. {
  291. if (newCallback != nullptr)
  292. newCallback->audioDeviceAboutToStart (this);
  293. const ScopedLock sl (callbackLock);
  294. callback = newCallback;
  295. }
  296. }
  297. void stop() override
  298. {
  299. if (isRunning)
  300. {
  301. AudioIODeviceCallback* lastCallback;
  302. {
  303. const ScopedLock sl (callbackLock);
  304. lastCallback = callback;
  305. callback = nullptr;
  306. }
  307. if (lastCallback != nullptr)
  308. lastCallback->audioDeviceStopped();
  309. }
  310. }
  311. bool isPlaying() override { return isRunning && callback != nullptr; }
  312. String getLastError() override { return lastError; }
  313. bool setAudioPreprocessingEnabled (bool enable) override
  314. {
  315. auto session = [AVAudioSession sharedInstance];
  316. NSString* mode = (enable ? AVAudioSessionModeMeasurement
  317. : AVAudioSessionModeDefault);
  318. JUCE_NSERROR_CHECK ([session setMode: mode
  319. error: &error]);
  320. return session.mode == mode;
  321. }
  322. void invokeAudioDeviceErrorCallback (const String& reason)
  323. {
  324. const ScopedLock sl (callbackLock);
  325. if (callback != nullptr)
  326. callback->audioDeviceError (reason);
  327. }
  328. void handleStatusChange (bool enabled, const char* reason)
  329. {
  330. JUCE_IOS_AUDIO_LOG ("handleStatusChange: enabled: " << (int) enabled << ", reason: " << reason);
  331. isRunning = enabled;
  332. setAudioSessionActive (enabled);
  333. if (enabled)
  334. AudioOutputUnitStart (audioUnit);
  335. else
  336. AudioOutputUnitStop (audioUnit);
  337. if (! enabled)
  338. invokeAudioDeviceErrorCallback (reason);
  339. }
  340. void handleRouteChange (const char* reason)
  341. {
  342. JUCE_IOS_AUDIO_LOG ("handleRouteChange: reason: " << reason);
  343. fixAudioRouteIfSetToReceiver();
  344. if (isRunning)
  345. {
  346. invokeAudioDeviceErrorCallback (reason);
  347. updateSampleRateAndAudioInput();
  348. updateCurrentBufferSize();
  349. createAudioUnit();
  350. setAudioSessionActive (true);
  351. if (audioUnit != 0)
  352. {
  353. UInt32 formatSize = sizeof (format);
  354. AudioUnitGetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &format, &formatSize);
  355. AudioOutputUnitStart (audioUnit);
  356. }
  357. if (callback)
  358. callback->audioDeviceAboutToStart (this);
  359. }
  360. }
  361. private:
  362. //==================================================================================================
  363. SharedResourcePointer<AudioSessionHolder> sessionHolder;
  364. CriticalSection callbackLock;
  365. NSTimeInterval sampleRate = 0;
  366. int numInputChannels = 2, numOutputChannels = 2;
  367. int preferredBufferSize = 0, actualBufferSize = 0;
  368. bool isRunning = false;
  369. String lastError;
  370. AudioStreamBasicDescription format;
  371. AudioUnit audioUnit {};
  372. bool audioInputIsAvailable = false;
  373. AudioIODeviceCallback* callback = nullptr;
  374. BigInteger activeOutputChans, activeInputChans;
  375. AudioSampleBuffer floatData;
  376. float* inputChannels[3];
  377. float* outputChannels[3];
  378. bool monoInputChannelNumber, monoOutputChannelNumber;
  379. void prepareFloatBuffers (int bufferSize)
  380. {
  381. if (numInputChannels + numOutputChannels > 0)
  382. {
  383. floatData.setSize (numInputChannels + numOutputChannels, bufferSize);
  384. zeromem (inputChannels, sizeof (inputChannels));
  385. zeromem (outputChannels, sizeof (outputChannels));
  386. for (int i = 0; i < numInputChannels; ++i)
  387. inputChannels[i] = floatData.getWritePointer (i);
  388. for (int i = 0; i < numOutputChannels; ++i)
  389. outputChannels[i] = floatData.getWritePointer (i + numInputChannels);
  390. }
  391. }
  392. //==================================================================================================
  393. OSStatus process (AudioUnitRenderActionFlags* flags, const AudioTimeStamp* time,
  394. const UInt32 numFrames, AudioBufferList* data)
  395. {
  396. OSStatus err = noErr;
  397. if (audioInputIsAvailable && numInputChannels > 0)
  398. err = AudioUnitRender (audioUnit, flags, time, 1, numFrames, data);
  399. const ScopedLock sl (callbackLock);
  400. if (callback != nullptr)
  401. {
  402. if ((int) numFrames > floatData.getNumSamples())
  403. prepareFloatBuffers ((int) numFrames);
  404. if (audioInputIsAvailable && numInputChannels > 0)
  405. {
  406. short* shortData = (short*) data->mBuffers[0].mData;
  407. if (numInputChannels >= 2)
  408. {
  409. for (UInt32 i = 0; i < numFrames; ++i)
  410. {
  411. inputChannels[0][i] = *shortData++ * (1.0f / 32768.0f);
  412. inputChannels[1][i] = *shortData++ * (1.0f / 32768.0f);
  413. }
  414. }
  415. else
  416. {
  417. if (monoInputChannelNumber > 0)
  418. ++shortData;
  419. for (UInt32 i = 0; i < numFrames; ++i)
  420. {
  421. inputChannels[0][i] = *shortData++ * (1.0f / 32768.0f);
  422. ++shortData;
  423. }
  424. }
  425. }
  426. else
  427. {
  428. for (int i = numInputChannels; --i >= 0;)
  429. zeromem (inputChannels[i], sizeof (float) * numFrames);
  430. }
  431. callback->audioDeviceIOCallback ((const float**) inputChannels, numInputChannels,
  432. outputChannels, numOutputChannels, (int) numFrames);
  433. short* const shortData = (short*) data->mBuffers[0].mData;
  434. int n = 0;
  435. if (numOutputChannels >= 2)
  436. {
  437. for (UInt32 i = 0; i < numFrames; ++i)
  438. {
  439. shortData [n++] = (short) (outputChannels[0][i] * 32767.0f);
  440. shortData [n++] = (short) (outputChannels[1][i] * 32767.0f);
  441. }
  442. }
  443. else if (numOutputChannels == 1)
  444. {
  445. for (UInt32 i = 0; i < numFrames; ++i)
  446. {
  447. const short s = (short) (outputChannels[monoOutputChannelNumber][i] * 32767.0f);
  448. shortData [n++] = s;
  449. shortData [n++] = s;
  450. }
  451. }
  452. else
  453. {
  454. zeromem (data->mBuffers[0].mData, 2 * sizeof (short) * numFrames);
  455. }
  456. }
  457. else
  458. {
  459. zeromem (data->mBuffers[0].mData, 2 * sizeof (short) * numFrames);
  460. }
  461. return err;
  462. }
  463. void updateSampleRateAndAudioInput()
  464. {
  465. auto session = [AVAudioSession sharedInstance];
  466. sampleRate = session.sampleRate;
  467. audioInputIsAvailable = session.isInputAvailable;
  468. JUCE_IOS_AUDIO_LOG ("AVAudioSession: sampleRate: " << sampleRate << "Hz, audioInputAvailable: " << (int) audioInputIsAvailable);
  469. }
  470. void updateCurrentBufferSize()
  471. {
  472. auto session = [AVAudioSession sharedInstance];
  473. NSTimeInterval bufferDuration = sampleRate > 0 ? (NSTimeInterval) (preferredBufferSize / sampleRate) : 0.0;
  474. JUCE_NSERROR_CHECK ([session setPreferredIOBufferDuration: bufferDuration
  475. error: &error]);
  476. bufferDuration = session.IOBufferDuration;
  477. actualBufferSize = roundToInt (sampleRate * bufferDuration);
  478. }
  479. //==================================================================================================
  480. static OSStatus processStatic (void* client, AudioUnitRenderActionFlags* flags, const AudioTimeStamp* time,
  481. UInt32 /*busNumber*/, UInt32 numFrames, AudioBufferList* data)
  482. {
  483. return static_cast<iOSAudioIODevice*> (client)->process (flags, time, numFrames, data);
  484. }
  485. //==================================================================================================
  486. void resetFormat (const int numChannels) noexcept
  487. {
  488. zerostruct (format);
  489. format.mFormatID = kAudioFormatLinearPCM;
  490. format.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked | kAudioFormatFlagsNativeEndian;
  491. format.mBitsPerChannel = 8 * sizeof (short);
  492. format.mChannelsPerFrame = (UInt32) numChannels;
  493. format.mFramesPerPacket = 1;
  494. format.mBytesPerFrame = format.mBytesPerPacket = (UInt32) numChannels * sizeof (short);
  495. }
  496. bool createAudioUnit()
  497. {
  498. if (audioUnit != 0)
  499. {
  500. AudioComponentInstanceDispose (audioUnit);
  501. audioUnit = 0;
  502. }
  503. resetFormat (2);
  504. AudioComponentDescription desc;
  505. desc.componentType = kAudioUnitType_Output;
  506. desc.componentSubType = kAudioUnitSubType_RemoteIO;
  507. desc.componentManufacturer = kAudioUnitManufacturer_Apple;
  508. desc.componentFlags = 0;
  509. desc.componentFlagsMask = 0;
  510. AudioComponent comp = AudioComponentFindNext (0, &desc);
  511. AudioComponentInstanceNew (comp, &audioUnit);
  512. if (audioUnit == 0)
  513. return false;
  514. if (numInputChannels > 0)
  515. {
  516. const UInt32 one = 1;
  517. AudioUnitSetProperty (audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, 1, &one, sizeof (one));
  518. }
  519. {
  520. AudioChannelLayout layout;
  521. layout.mChannelBitmap = 0;
  522. layout.mNumberChannelDescriptions = 0;
  523. layout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
  524. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_AudioChannelLayout, kAudioUnitScope_Input, 0, &layout, sizeof (layout));
  525. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_AudioChannelLayout, kAudioUnitScope_Output, 0, &layout, sizeof (layout));
  526. }
  527. {
  528. AURenderCallbackStruct inputProc;
  529. inputProc.inputProc = processStatic;
  530. inputProc.inputProcRefCon = this;
  531. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, 0, &inputProc, sizeof (inputProc));
  532. }
  533. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &format, sizeof (format));
  534. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &format, sizeof (format));
  535. AudioUnitInitialize (audioUnit);
  536. return true;
  537. }
  538. // If the routing is set to go through the receiver (i.e. the speaker, but quiet), this re-routes it
  539. // to make it loud. Needed because by default when using an input + output, the output is kept quiet.
  540. static void fixAudioRouteIfSetToReceiver()
  541. {
  542. auto session = [AVAudioSession sharedInstance];
  543. auto route = session.currentRoute;
  544. for (AVAudioSessionPortDescription* port in route.inputs)
  545. {
  546. ignoreUnused (port);
  547. JUCE_IOS_AUDIO_LOG ("AVAudioSession: input: " << [port.description UTF8String]);
  548. }
  549. for (AVAudioSessionPortDescription* port in route.outputs)
  550. {
  551. JUCE_IOS_AUDIO_LOG ("AVAudioSession: output: " << [port.description UTF8String]);
  552. if ([port.portName isEqualToString: @"Receiver"])
  553. {
  554. JUCE_NSERROR_CHECK ([session overrideOutputAudioPort: AVAudioSessionPortOverrideSpeaker
  555. error: &error]);
  556. setAudioSessionActive (true);
  557. }
  558. }
  559. }
  560. JUCE_DECLARE_NON_COPYABLE (iOSAudioIODevice)
  561. };
  562. //==============================================================================
  563. class iOSAudioIODeviceType : public AudioIODeviceType
  564. {
  565. public:
  566. iOSAudioIODeviceType() : AudioIODeviceType ("iOS Audio") {}
  567. void scanForDevices() {}
  568. StringArray getDeviceNames (bool /*wantInputNames*/) const { return StringArray ("iOS Audio"); }
  569. int getDefaultDeviceIndex (bool /*forInput*/) const { return 0; }
  570. int getIndexOfDevice (AudioIODevice* d, bool /*asInput*/) const { return d != nullptr ? 0 : -1; }
  571. bool hasSeparateInputsAndOutputs() const { return false; }
  572. AudioIODevice* createDevice (const String& outputDeviceName, const String& inputDeviceName)
  573. {
  574. if (outputDeviceName.isNotEmpty() || inputDeviceName.isNotEmpty())
  575. return new iOSAudioIODevice (outputDeviceName.isNotEmpty() ? outputDeviceName : inputDeviceName);
  576. return nullptr;
  577. }
  578. private:
  579. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (iOSAudioIODeviceType)
  580. };
  581. //==============================================================================
  582. AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_iOSAudio()
  583. {
  584. return new iOSAudioIODeviceType();
  585. }
  586. //==================================================================================================
  587. AudioSessionHolder::AudioSessionHolder() { nativeSession = [[iOSAudioSessionNative alloc] init: this]; }
  588. AudioSessionHolder::~AudioSessionHolder() { [nativeSession release]; }
  589. void AudioSessionHolder::handleStatusChange (bool enabled, const char* reason) const
  590. {
  591. for (auto device: activeDevices)
  592. device->handleStatusChange (enabled, reason);
  593. }
  594. void AudioSessionHolder::handleRouteChange (const char* reason) const
  595. {
  596. for (auto device: activeDevices)
  597. device->handleRouteChange (reason);
  598. }
  599. #undef JUCE_NSERROR_CHECK