Audio plugin host https://kx.studio/carla
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

763 lines
26KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2015 - ROLI Ltd.
  5. Permission is granted to use this software under the terms of either:
  6. a) the GPL v2 (or any later version)
  7. b) the Affero GPL v3
  8. Details of these licenses can be found at: www.gnu.org/licenses
  9. JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
  10. WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
  11. A PARTICULAR PURPOSE. See the GNU General Public License for more details.
  12. ------------------------------------------------------------------------------
  13. To release a closed-source product which uses JUCE, commercial licenses are
  14. available: visit www.juce.com for more information.
  15. ==============================================================================
  16. */
  17. class iOSAudioIODevice;
  18. static const char* const iOSAudioDeviceName = "iOS Audio";
  19. //==============================================================================
  20. struct AudioSessionHolder
  21. {
  22. AudioSessionHolder();
  23. ~AudioSessionHolder();
  24. void handleStatusChange (bool enabled, const char* reason) const;
  25. void handleRouteChange (const char* reason) const;
  26. Array<iOSAudioIODevice*> activeDevices;
  27. id nativeSession;
  28. };
  29. static const char* getRoutingChangeReason (AVAudioSessionRouteChangeReason reason) noexcept
  30. {
  31. switch (reason)
  32. {
  33. case AVAudioSessionRouteChangeReasonNewDeviceAvailable: return "New device available";
  34. case AVAudioSessionRouteChangeReasonOldDeviceUnavailable: return "Old device unavailable";
  35. case AVAudioSessionRouteChangeReasonCategoryChange: return "Category change";
  36. case AVAudioSessionRouteChangeReasonOverride: return "Override";
  37. case AVAudioSessionRouteChangeReasonWakeFromSleep: return "Wake from sleep";
  38. case AVAudioSessionRouteChangeReasonNoSuitableRouteForCategory: return "No suitable route for category";
  39. case AVAudioSessionRouteChangeReasonRouteConfigurationChange: return "Route configuration change";
  40. case AVAudioSessionRouteChangeReasonUnknown:
  41. default: return "Unknown";
  42. }
  43. }
  44. bool getNotificationValueForKey (NSNotification* notification, NSString* key, NSUInteger& value) noexcept
  45. {
  46. if (notification != nil)
  47. {
  48. if (NSDictionary* userInfo = [notification userInfo])
  49. {
  50. if (NSNumber* number = [userInfo objectForKey: key])
  51. {
  52. value = [number unsignedIntegerValue];
  53. return true;
  54. }
  55. }
  56. }
  57. jassertfalse;
  58. return false;
  59. }
  60. } // juce namespace
  61. //==============================================================================
  62. @interface iOSAudioSessionNative : NSObject
  63. {
  64. @private
  65. juce::AudioSessionHolder* audioSessionHolder;
  66. };
  67. - (id) init: (juce::AudioSessionHolder*) holder;
  68. - (void) dealloc;
  69. - (void) audioSessionDidChangeInterruptionType: (NSNotification*) notification;
  70. - (void) handleMediaServicesReset;
  71. - (void) handleMediaServicesLost;
  72. - (void) handleRouteChange: (NSNotification*) notification;
  73. @end
  74. @implementation iOSAudioSessionNative
  75. - (id) init: (juce::AudioSessionHolder*) holder
  76. {
  77. self = [super init];
  78. if (self != nil)
  79. {
  80. audioSessionHolder = holder;
  81. auto session = [AVAudioSession sharedInstance];
  82. auto centre = [NSNotificationCenter defaultCenter];
  83. [centre addObserver: self
  84. selector: @selector (audioSessionDidChangeInterruptionType:)
  85. name: AVAudioSessionInterruptionNotification
  86. object: session];
  87. [centre addObserver: self
  88. selector: @selector (handleMediaServicesLost)
  89. name: AVAudioSessionMediaServicesWereLostNotification
  90. object: session];
  91. [centre addObserver: self
  92. selector: @selector (handleMediaServicesReset)
  93. name: AVAudioSessionMediaServicesWereResetNotification
  94. object: session];
  95. [centre addObserver: self
  96. selector: @selector (handleRouteChange:)
  97. name: AVAudioSessionRouteChangeNotification
  98. object: session];
  99. }
  100. else
  101. {
  102. jassertfalse;
  103. }
  104. return self;
  105. }
  106. - (void) dealloc
  107. {
  108. [[NSNotificationCenter defaultCenter] removeObserver: self];
  109. [super dealloc];
  110. }
  111. - (void) audioSessionDidChangeInterruptionType: (NSNotification*) notification
  112. {
  113. NSUInteger value;
  114. if (juce::getNotificationValueForKey (notification, AVAudioSessionInterruptionTypeKey, value))
  115. {
  116. switch ((AVAudioSessionInterruptionType) value)
  117. {
  118. case AVAudioSessionInterruptionTypeBegan:
  119. audioSessionHolder->handleStatusChange (false, "AVAudioSessionInterruptionTypeBegan");
  120. break;
  121. case AVAudioSessionInterruptionTypeEnded:
  122. audioSessionHolder->handleStatusChange (true, "AVAudioSessionInterruptionTypeEnded");
  123. break;
  124. // No default so the code doesn't compile if this enum is extended.
  125. }
  126. }
  127. }
  128. - (void) handleMediaServicesReset
  129. {
  130. audioSessionHolder->handleStatusChange (true, "AVAudioSessionMediaServicesWereResetNotification");
  131. }
  132. - (void) handleMediaServicesLost
  133. {
  134. audioSessionHolder->handleStatusChange (false, "AVAudioSessionMediaServicesWereLostNotification");
  135. }
  136. - (void) handleRouteChange: (NSNotification*) notification
  137. {
  138. NSUInteger value;
  139. if (juce::getNotificationValueForKey (notification, AVAudioSessionRouteChangeReasonKey, value))
  140. audioSessionHolder->handleRouteChange (juce::getRoutingChangeReason ((AVAudioSessionRouteChangeReason) value));
  141. }
  142. @end
  143. //==============================================================================
  144. namespace juce {
  145. #ifndef JUCE_IOS_AUDIO_LOGGING
  146. #define JUCE_IOS_AUDIO_LOGGING 0
  147. #endif
  148. #if JUCE_IOS_AUDIO_LOGGING
  149. #define JUCE_IOS_AUDIO_LOG(x) DBG(x)
  150. #else
  151. #define JUCE_IOS_AUDIO_LOG(x)
  152. #endif
  153. static void logNSError (NSError* e)
  154. {
  155. if (e != nil)
  156. {
  157. JUCE_IOS_AUDIO_LOG ("iOS Audio error: " << [e.localizedDescription UTF8String]);
  158. jassertfalse;
  159. }
  160. }
  161. #define JUCE_NSERROR_CHECK(X) { NSError* error = nil; X; logNSError (error); }
  162. //==============================================================================
  163. class iOSAudioIODevice : public AudioIODevice
  164. {
  165. public:
  166. iOSAudioIODevice (const String& deviceName)
  167. : AudioIODevice (deviceName, iOSAudioDeviceName)
  168. {
  169. sessionHolder->activeDevices.add (this);
  170. updateSampleRateAndAudioInput();
  171. }
  172. ~iOSAudioIODevice()
  173. {
  174. sessionHolder->activeDevices.removeFirstMatchingValue (this);
  175. close();
  176. }
  177. StringArray getOutputChannelNames() override
  178. {
  179. return { "Left", "Right" };
  180. }
  181. StringArray getInputChannelNames() override
  182. {
  183. if (audioInputIsAvailable)
  184. return { "Left", "Right" };
  185. return {};
  186. }
  187. static void setAudioSessionActive (bool enabled)
  188. {
  189. JUCE_NSERROR_CHECK ([[AVAudioSession sharedInstance] setActive: enabled
  190. error: &error]);
  191. }
  192. static double trySampleRate (double rate)
  193. {
  194. auto session = [AVAudioSession sharedInstance];
  195. JUCE_NSERROR_CHECK ([session setPreferredSampleRate: rate
  196. error: &error]);
  197. return session.sampleRate;
  198. }
  199. Array<double> getAvailableSampleRates() override
  200. {
  201. Array<double> rates;
  202. // Important: the supported audio sample rates change on the iPhone 6S
  203. // depending on whether the headphones are plugged in or not!
  204. setAudioSessionActive (true);
  205. const double lowestRate = trySampleRate (4000);
  206. const double highestRate = trySampleRate (192000);
  207. for (double rate = lowestRate; rate <= highestRate; rate += 1000)
  208. {
  209. const double supportedRate = trySampleRate (rate);
  210. rates.addIfNotAlreadyThere (supportedRate);
  211. rate = jmax (rate, supportedRate);
  212. }
  213. for (auto r : rates)
  214. {
  215. ignoreUnused (r);
  216. JUCE_IOS_AUDIO_LOG ("available rate = " + String (r, 0) + "Hz");
  217. }
  218. return rates;
  219. }
  220. Array<int> getAvailableBufferSizes() override
  221. {
  222. Array<int> r;
  223. for (int i = 6; i < 12; ++i)
  224. r.add (1 << i);
  225. return r;
  226. }
  227. int getDefaultBufferSize() override { return 256; }
  228. String open (const BigInteger& inputChannelsWanted,
  229. const BigInteger& outputChannelsWanted,
  230. double targetSampleRate, int bufferSize) override
  231. {
  232. close();
  233. lastError.clear();
  234. preferredBufferSize = bufferSize <= 0 ? getDefaultBufferSize()
  235. : bufferSize;
  236. // xxx set up channel mapping
  237. activeOutputChans = outputChannelsWanted;
  238. activeOutputChans.setRange (2, activeOutputChans.getHighestBit(), false);
  239. numOutputChannels = activeOutputChans.countNumberOfSetBits();
  240. monoOutputChannelNumber = activeOutputChans.findNextSetBit (0);
  241. activeInputChans = inputChannelsWanted;
  242. activeInputChans.setRange (2, activeInputChans.getHighestBit(), false);
  243. numInputChannels = activeInputChans.countNumberOfSetBits();
  244. monoInputChannelNumber = activeInputChans.findNextSetBit (0);
  245. setAudioSessionActive (true);
  246. // Set the session category & options:
  247. auto session = [AVAudioSession sharedInstance];
  248. const bool useInputs = (numInputChannels > 0 && audioInputIsAvailable);
  249. NSString* category = (useInputs ? AVAudioSessionCategoryPlayAndRecord : AVAudioSessionCategoryPlayback);
  250. NSUInteger options = AVAudioSessionCategoryOptionMixWithOthers; // Alternatively AVAudioSessionCategoryOptionDuckOthers
  251. if (useInputs) // These options are only valid for category = PlayAndRecord
  252. options |= (AVAudioSessionCategoryOptionDefaultToSpeaker | AVAudioSessionCategoryOptionAllowBluetooth);
  253. JUCE_NSERROR_CHECK ([session setCategory: category
  254. withOptions: options
  255. error: &error]);
  256. fixAudioRouteIfSetToReceiver();
  257. // Set the sample rate
  258. trySampleRate (targetSampleRate);
  259. updateSampleRateAndAudioInput();
  260. updateCurrentBufferSize();
  261. prepareFloatBuffers (actualBufferSize);
  262. isRunning = true;
  263. handleRouteChange ("Started AudioUnit");
  264. lastError = (audioUnit != 0 ? "" : "Couldn't open the device");
  265. setAudioSessionActive (true);
  266. return lastError;
  267. }
  268. void close() override
  269. {
  270. if (isRunning)
  271. {
  272. isRunning = false;
  273. if (audioUnit != 0)
  274. {
  275. AudioOutputUnitStart (audioUnit);
  276. AudioComponentInstanceDispose (audioUnit);
  277. audioUnit = 0;
  278. }
  279. setAudioSessionActive (false);
  280. }
  281. }
  282. bool isOpen() override { return isRunning; }
  283. int getCurrentBufferSizeSamples() override { return actualBufferSize; }
  284. double getCurrentSampleRate() override { return sampleRate; }
  285. int getCurrentBitDepth() override { return 16; }
  286. BigInteger getActiveOutputChannels() const override { return activeOutputChans; }
  287. BigInteger getActiveInputChannels() const override { return activeInputChans; }
  288. int getOutputLatencyInSamples() override { return roundToInt (getCurrentSampleRate() * [AVAudioSession sharedInstance].outputLatency); }
  289. int getInputLatencyInSamples() override { return roundToInt (getCurrentSampleRate() * [AVAudioSession sharedInstance].inputLatency); }
  290. void start (AudioIODeviceCallback* newCallback) override
  291. {
  292. if (isRunning && callback != newCallback)
  293. {
  294. if (newCallback != nullptr)
  295. newCallback->audioDeviceAboutToStart (this);
  296. const ScopedLock sl (callbackLock);
  297. callback = newCallback;
  298. }
  299. }
  300. void stop() override
  301. {
  302. if (isRunning)
  303. {
  304. AudioIODeviceCallback* lastCallback;
  305. {
  306. const ScopedLock sl (callbackLock);
  307. lastCallback = callback;
  308. callback = nullptr;
  309. }
  310. if (lastCallback != nullptr)
  311. lastCallback->audioDeviceStopped();
  312. }
  313. }
  314. bool isPlaying() override { return isRunning && callback != nullptr; }
  315. String getLastError() override { return lastError; }
  316. bool setAudioPreprocessingEnabled (bool enable) override
  317. {
  318. auto session = [AVAudioSession sharedInstance];
  319. NSString* mode = (enable ? AVAudioSessionModeMeasurement
  320. : AVAudioSessionModeDefault);
  321. JUCE_NSERROR_CHECK ([session setMode: mode
  322. error: &error]);
  323. return session.mode == mode;
  324. }
  325. void invokeAudioDeviceErrorCallback (const String& reason)
  326. {
  327. const ScopedLock sl (callbackLock);
  328. if (callback != nullptr)
  329. callback->audioDeviceError (reason);
  330. }
  331. void handleStatusChange (bool enabled, const char* reason)
  332. {
  333. JUCE_IOS_AUDIO_LOG ("handleStatusChange: enabled: " << (int) enabled << ", reason: " << reason);
  334. isRunning = enabled;
  335. setAudioSessionActive (enabled);
  336. if (enabled)
  337. AudioOutputUnitStart (audioUnit);
  338. else
  339. AudioOutputUnitStop (audioUnit);
  340. if (! enabled)
  341. invokeAudioDeviceErrorCallback (reason);
  342. }
  343. void handleRouteChange (const char* reason)
  344. {
  345. JUCE_IOS_AUDIO_LOG ("handleRouteChange: reason: " << reason);
  346. fixAudioRouteIfSetToReceiver();
  347. if (isRunning)
  348. {
  349. invokeAudioDeviceErrorCallback (reason);
  350. updateSampleRateAndAudioInput();
  351. updateCurrentBufferSize();
  352. createAudioUnit();
  353. setAudioSessionActive (true);
  354. if (audioUnit != 0)
  355. {
  356. UInt32 formatSize = sizeof (format);
  357. AudioUnitGetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &format, &formatSize);
  358. AudioOutputUnitStart (audioUnit);
  359. }
  360. if (callback != nullptr)
  361. callback->audioDeviceAboutToStart (this);
  362. }
  363. }
  364. private:
  365. //==============================================================================
  366. SharedResourcePointer<AudioSessionHolder> sessionHolder;
  367. CriticalSection callbackLock;
  368. NSTimeInterval sampleRate = 0;
  369. int numInputChannels = 2, numOutputChannels = 2;
  370. int preferredBufferSize = 0, actualBufferSize = 0;
  371. bool isRunning = false;
  372. String lastError;
  373. AudioStreamBasicDescription format;
  374. AudioUnit audioUnit {};
  375. bool audioInputIsAvailable = false;
  376. AudioIODeviceCallback* callback = nullptr;
  377. BigInteger activeOutputChans, activeInputChans;
  378. AudioSampleBuffer floatData;
  379. float* inputChannels[3];
  380. float* outputChannels[3];
  381. bool monoInputChannelNumber, monoOutputChannelNumber;
  382. void prepareFloatBuffers (int bufferSize)
  383. {
  384. if (numInputChannels + numOutputChannels > 0)
  385. {
  386. floatData.setSize (numInputChannels + numOutputChannels, bufferSize);
  387. zeromem (inputChannels, sizeof (inputChannels));
  388. zeromem (outputChannels, sizeof (outputChannels));
  389. for (int i = 0; i < numInputChannels; ++i)
  390. inputChannels[i] = floatData.getWritePointer (i);
  391. for (int i = 0; i < numOutputChannels; ++i)
  392. outputChannels[i] = floatData.getWritePointer (i + numInputChannels);
  393. }
  394. }
  395. //==============================================================================
  396. OSStatus process (AudioUnitRenderActionFlags* flags, const AudioTimeStamp* time,
  397. const UInt32 numFrames, AudioBufferList* data)
  398. {
  399. OSStatus err = noErr;
  400. if (audioInputIsAvailable && numInputChannels > 0)
  401. err = AudioUnitRender (audioUnit, flags, time, 1, numFrames, data);
  402. const ScopedLock sl (callbackLock);
  403. if (callback != nullptr)
  404. {
  405. if ((int) numFrames > floatData.getNumSamples())
  406. prepareFloatBuffers ((int) numFrames);
  407. if (audioInputIsAvailable && numInputChannels > 0)
  408. {
  409. short* shortData = (short*) data->mBuffers[0].mData;
  410. if (numInputChannels >= 2)
  411. {
  412. for (UInt32 i = 0; i < numFrames; ++i)
  413. {
  414. inputChannels[0][i] = *shortData++ * (1.0f / 32768.0f);
  415. inputChannels[1][i] = *shortData++ * (1.0f / 32768.0f);
  416. }
  417. }
  418. else
  419. {
  420. if (monoInputChannelNumber > 0)
  421. ++shortData;
  422. for (UInt32 i = 0; i < numFrames; ++i)
  423. {
  424. inputChannels[0][i] = *shortData++ * (1.0f / 32768.0f);
  425. ++shortData;
  426. }
  427. }
  428. }
  429. else
  430. {
  431. for (int i = numInputChannels; --i >= 0;)
  432. zeromem (inputChannels[i], sizeof (float) * numFrames);
  433. }
  434. callback->audioDeviceIOCallback ((const float**) inputChannels, numInputChannels,
  435. outputChannels, numOutputChannels, (int) numFrames);
  436. short* const shortData = (short*) data->mBuffers[0].mData;
  437. int n = 0;
  438. if (numOutputChannels >= 2)
  439. {
  440. for (UInt32 i = 0; i < numFrames; ++i)
  441. {
  442. shortData [n++] = (short) (outputChannels[0][i] * 32767.0f);
  443. shortData [n++] = (short) (outputChannels[1][i] * 32767.0f);
  444. }
  445. }
  446. else if (numOutputChannels == 1)
  447. {
  448. for (UInt32 i = 0; i < numFrames; ++i)
  449. {
  450. const short s = (short) (outputChannels[monoOutputChannelNumber][i] * 32767.0f);
  451. shortData [n++] = s;
  452. shortData [n++] = s;
  453. }
  454. }
  455. else
  456. {
  457. zeromem (data->mBuffers[0].mData, 2 * sizeof (short) * numFrames);
  458. }
  459. }
  460. else
  461. {
  462. zeromem (data->mBuffers[0].mData, 2 * sizeof (short) * numFrames);
  463. }
  464. return err;
  465. }
  466. void updateSampleRateAndAudioInput()
  467. {
  468. auto session = [AVAudioSession sharedInstance];
  469. sampleRate = session.sampleRate;
  470. audioInputIsAvailable = session.isInputAvailable;
  471. actualBufferSize = roundToInt (sampleRate * session.IOBufferDuration);
  472. JUCE_IOS_AUDIO_LOG ("AVAudioSession: sampleRate: " << sampleRate
  473. << "Hz, audioInputAvailable: " << (int) audioInputIsAvailable);
  474. }
  475. void updateCurrentBufferSize()
  476. {
  477. NSTimeInterval bufferDuration = sampleRate > 0 ? (NSTimeInterval) ((preferredBufferSize + 1) / sampleRate) : 0.0;
  478. JUCE_NSERROR_CHECK ([[AVAudioSession sharedInstance] setPreferredIOBufferDuration: bufferDuration
  479. error: &error]);
  480. updateSampleRateAndAudioInput();
  481. }
  482. //==============================================================================
  483. static OSStatus processStatic (void* client, AudioUnitRenderActionFlags* flags, const AudioTimeStamp* time,
  484. UInt32 /*busNumber*/, UInt32 numFrames, AudioBufferList* data)
  485. {
  486. return static_cast<iOSAudioIODevice*> (client)->process (flags, time, numFrames, data);
  487. }
  488. //==============================================================================
  489. void resetFormat (const int numChannels) noexcept
  490. {
  491. zerostruct (format);
  492. format.mFormatID = kAudioFormatLinearPCM;
  493. format.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked | kAudioFormatFlagsNativeEndian;
  494. format.mBitsPerChannel = 8 * sizeof (short);
  495. format.mChannelsPerFrame = (UInt32) numChannels;
  496. format.mFramesPerPacket = 1;
  497. format.mBytesPerFrame = format.mBytesPerPacket = (UInt32) numChannels * sizeof (short);
  498. }
  499. bool createAudioUnit()
  500. {
  501. if (audioUnit != 0)
  502. {
  503. AudioComponentInstanceDispose (audioUnit);
  504. audioUnit = 0;
  505. }
  506. resetFormat (2);
  507. AudioComponentDescription desc;
  508. desc.componentType = kAudioUnitType_Output;
  509. desc.componentSubType = kAudioUnitSubType_RemoteIO;
  510. desc.componentManufacturer = kAudioUnitManufacturer_Apple;
  511. desc.componentFlags = 0;
  512. desc.componentFlagsMask = 0;
  513. AudioComponent comp = AudioComponentFindNext (0, &desc);
  514. AudioComponentInstanceNew (comp, &audioUnit);
  515. if (audioUnit == 0)
  516. return false;
  517. if (numInputChannels > 0)
  518. {
  519. const UInt32 one = 1;
  520. AudioUnitSetProperty (audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, 1, &one, sizeof (one));
  521. }
  522. {
  523. AudioChannelLayout layout;
  524. layout.mChannelBitmap = 0;
  525. layout.mNumberChannelDescriptions = 0;
  526. layout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
  527. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_AudioChannelLayout, kAudioUnitScope_Input, 0, &layout, sizeof (layout));
  528. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_AudioChannelLayout, kAudioUnitScope_Output, 0, &layout, sizeof (layout));
  529. }
  530. {
  531. AURenderCallbackStruct inputProc;
  532. inputProc.inputProc = processStatic;
  533. inputProc.inputProcRefCon = this;
  534. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, 0, &inputProc, sizeof (inputProc));
  535. }
  536. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &format, sizeof (format));
  537. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &format, sizeof (format));
  538. AudioUnitInitialize (audioUnit);
  539. return true;
  540. }
  541. // If the routing is set to go through the receiver (i.e. the speaker, but quiet), this re-routes it
  542. // to make it loud. Needed because by default when using an input + output, the output is kept quiet.
  543. static void fixAudioRouteIfSetToReceiver()
  544. {
  545. auto session = [AVAudioSession sharedInstance];
  546. auto route = session.currentRoute;
  547. for (AVAudioSessionPortDescription* port in route.inputs)
  548. {
  549. ignoreUnused (port);
  550. JUCE_IOS_AUDIO_LOG ("AVAudioSession: input: " << [port.description UTF8String]);
  551. }
  552. for (AVAudioSessionPortDescription* port in route.outputs)
  553. {
  554. JUCE_IOS_AUDIO_LOG ("AVAudioSession: output: " << [port.description UTF8String]);
  555. if ([port.portName isEqualToString: @"Receiver"])
  556. {
  557. JUCE_NSERROR_CHECK ([session overrideOutputAudioPort: AVAudioSessionPortOverrideSpeaker
  558. error: &error]);
  559. setAudioSessionActive (true);
  560. }
  561. }
  562. }
  563. JUCE_DECLARE_NON_COPYABLE (iOSAudioIODevice)
  564. };
  565. //==============================================================================
  566. class iOSAudioIODeviceType : public AudioIODeviceType
  567. {
  568. public:
  569. iOSAudioIODeviceType() : AudioIODeviceType (iOSAudioDeviceName) {}
  570. void scanForDevices() {}
  571. StringArray getDeviceNames (bool /*wantInputNames*/) const { return StringArray (iOSAudioDeviceName); }
  572. int getDefaultDeviceIndex (bool /*forInput*/) const { return 0; }
  573. int getIndexOfDevice (AudioIODevice* d, bool /*asInput*/) const { return d != nullptr ? 0 : -1; }
  574. bool hasSeparateInputsAndOutputs() const { return false; }
  575. AudioIODevice* createDevice (const String& outputDeviceName, const String& inputDeviceName)
  576. {
  577. if (outputDeviceName.isNotEmpty() || inputDeviceName.isNotEmpty())
  578. return new iOSAudioIODevice (outputDeviceName.isNotEmpty() ? outputDeviceName : inputDeviceName);
  579. return nullptr;
  580. }
  581. private:
  582. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (iOSAudioIODeviceType)
  583. };
  584. //==============================================================================
  585. AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_iOSAudio()
  586. {
  587. return new iOSAudioIODeviceType();
  588. }
  589. //==============================================================================
  590. AudioSessionHolder::AudioSessionHolder() { nativeSession = [[iOSAudioSessionNative alloc] init: this]; }
  591. AudioSessionHolder::~AudioSessionHolder() { [nativeSession release]; }
  592. void AudioSessionHolder::handleStatusChange (bool enabled, const char* reason) const
  593. {
  594. for (auto device: activeDevices)
  595. device->handleStatusChange (enabled, reason);
  596. }
  597. void AudioSessionHolder::handleRouteChange (const char* reason) const
  598. {
  599. for (auto device: activeDevices)
  600. device->handleRouteChange (reason);
  601. }
  602. #undef JUCE_NSERROR_CHECK