The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

553 lines
20KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2013 - Raw Material Software Ltd.
  5. Permission is granted to use this software under the terms of either:
  6. a) the GPL v2 (or any later version)
  7. b) the Affero GPL v3
  8. Details of these licenses can be found at: www.gnu.org/licenses
  9. JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
  10. WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
  11. A PARTICULAR PURPOSE. See the GNU General Public License for more details.
  12. ------------------------------------------------------------------------------
  13. To release a closed-source product which uses JUCE, commercial licenses are
  14. available: visit www.juce.com for more information.
  15. ==============================================================================
  16. */
  17. class iOSAudioIODevice : public AudioIODevice
  18. {
  19. public:
  20. iOSAudioIODevice (const String& deviceName)
  21. : AudioIODevice (deviceName, "Audio"),
  22. actualBufferSize (0),
  23. isRunning (false),
  24. audioUnit (0),
  25. callback (nullptr),
  26. floatData (1, 2)
  27. {
  28. getSessionHolder().activeDevices.add (this);
  29. numInputChannels = 2;
  30. numOutputChannels = 2;
  31. preferredBufferSize = 0;
  32. updateDeviceInfo();
  33. }
  34. ~iOSAudioIODevice()
  35. {
  36. getSessionHolder().activeDevices.removeFirstMatchingValue (this);
  37. close();
  38. }
  39. StringArray getOutputChannelNames()
  40. {
  41. StringArray s;
  42. s.add ("Left");
  43. s.add ("Right");
  44. return s;
  45. }
  46. StringArray getInputChannelNames()
  47. {
  48. StringArray s;
  49. if (audioInputIsAvailable)
  50. {
  51. s.add ("Left");
  52. s.add ("Right");
  53. }
  54. return s;
  55. }
  56. int getNumSampleRates() { return 1; }
  57. double getSampleRate (int index) { return sampleRate; }
  58. int getNumBufferSizesAvailable() { return 6; }
  59. int getBufferSizeSamples (int index) { return 1 << (jlimit (0, 5, index) + 6); }
  60. int getDefaultBufferSize() { return 1024; }
  61. String open (const BigInteger& inputChannels,
  62. const BigInteger& outputChannels,
  63. double sampleRate,
  64. int bufferSize)
  65. {
  66. close();
  67. lastError = String::empty;
  68. preferredBufferSize = (bufferSize <= 0) ? getDefaultBufferSize() : bufferSize;
  69. // xxx set up channel mapping
  70. activeOutputChans = outputChannels;
  71. activeOutputChans.setRange (2, activeOutputChans.getHighestBit(), false);
  72. numOutputChannels = activeOutputChans.countNumberOfSetBits();
  73. monoOutputChannelNumber = activeOutputChans.findNextSetBit (0);
  74. activeInputChans = inputChannels;
  75. activeInputChans.setRange (2, activeInputChans.getHighestBit(), false);
  76. numInputChannels = activeInputChans.countNumberOfSetBits();
  77. monoInputChannelNumber = activeInputChans.findNextSetBit (0);
  78. AudioSessionSetActive (true);
  79. UInt32 audioCategory = (numInputChannels > 0 && audioInputIsAvailable) ? kAudioSessionCategory_PlayAndRecord
  80. : kAudioSessionCategory_MediaPlayback;
  81. AudioSessionSetProperty (kAudioSessionProperty_AudioCategory, sizeof (audioCategory), &audioCategory);
  82. if (audioCategory == kAudioSessionCategory_PlayAndRecord;
  83. {
  84. // (note: mustn't set this until after the audio category property has been set)
  85. UInt32 allowBluetoothInput = 1;
  86. AudioSessionSetProperty (kAudioSessionProperty_OverrideCategoryEnableBluetoothInput,
  87. sizeof (allowBluetoothInput), &allowBluetoothInput);
  88. }
  89. AudioSessionAddPropertyListener (kAudioSessionProperty_AudioRouteChange, routingChangedStatic, this);
  90. fixAudioRouteIfSetToReceiver();
  91. updateDeviceInfo();
  92. Float32 bufferDuration = preferredBufferSize / sampleRate;
  93. AudioSessionSetProperty (kAudioSessionProperty_PreferredHardwareIOBufferDuration, sizeof (bufferDuration), &bufferDuration);
  94. actualBufferSize = preferredBufferSize;
  95. prepareFloatBuffers();
  96. isRunning = true;
  97. routingChanged (nullptr); // creates and starts the AU
  98. lastError = audioUnit != 0 ? "" : "Couldn't open the device";
  99. return lastError;
  100. }
  101. void close()
  102. {
  103. if (isRunning)
  104. {
  105. isRunning = false;
  106. AudioSessionRemovePropertyListenerWithUserData (kAudioSessionProperty_AudioRouteChange, routingChangedStatic, this);
  107. AudioSessionSetActive (false);
  108. if (audioUnit != 0)
  109. {
  110. AudioComponentInstanceDispose (audioUnit);
  111. audioUnit = 0;
  112. }
  113. }
  114. }
  115. bool isOpen() { return isRunning; }
  116. int getCurrentBufferSizeSamples() { return actualBufferSize; }
  117. double getCurrentSampleRate() { return sampleRate; }
  118. int getCurrentBitDepth() { return 16; }
  119. BigInteger getActiveOutputChannels() const { return activeOutputChans; }
  120. BigInteger getActiveInputChannels() const { return activeInputChans; }
  121. int getOutputLatencyInSamples() { return 0; } //xxx
  122. int getInputLatencyInSamples() { return 0; } //xxx
  123. void start (AudioIODeviceCallback* newCallback)
  124. {
  125. if (isRunning && callback != newCallback)
  126. {
  127. if (newCallback != nullptr)
  128. newCallback->audioDeviceAboutToStart (this);
  129. const ScopedLock sl (callbackLock);
  130. callback = newCallback;
  131. }
  132. }
  133. void stop()
  134. {
  135. if (isRunning)
  136. {
  137. AudioIODeviceCallback* lastCallback;
  138. {
  139. const ScopedLock sl (callbackLock);
  140. lastCallback = callback;
  141. callback = nullptr;
  142. }
  143. if (lastCallback != nullptr)
  144. lastCallback->audioDeviceStopped();
  145. }
  146. }
  147. bool isPlaying() { return isRunning && callback != nullptr; }
  148. String getLastError() { return lastError; }
  149. private:
  150. //==================================================================================================
  151. CriticalSection callbackLock;
  152. Float64 sampleRate;
  153. int numInputChannels, numOutputChannels;
  154. int preferredBufferSize, actualBufferSize;
  155. bool isRunning;
  156. String lastError;
  157. AudioStreamBasicDescription format;
  158. AudioUnit audioUnit;
  159. UInt32 audioInputIsAvailable;
  160. AudioIODeviceCallback* callback;
  161. BigInteger activeOutputChans, activeInputChans;
  162. AudioSampleBuffer floatData;
  163. float* inputChannels[3];
  164. float* outputChannels[3];
  165. bool monoInputChannelNumber, monoOutputChannelNumber;
  166. void prepareFloatBuffers()
  167. {
  168. if (numInputChannels + numOutputChannels > 0)
  169. {
  170. floatData.setSize (numInputChannels + numOutputChannels, actualBufferSize);
  171. zeromem (inputChannels, sizeof (inputChannels));
  172. zeromem (outputChannels, sizeof (outputChannels));
  173. for (int i = 0; i < numInputChannels; ++i)
  174. inputChannels[i] = floatData.getSampleData (i);
  175. for (int i = 0; i < numOutputChannels; ++i)
  176. outputChannels[i] = floatData.getSampleData (i + numInputChannels);
  177. }
  178. }
  179. //==================================================================================================
  180. OSStatus process (AudioUnitRenderActionFlags* flags, const AudioTimeStamp* time,
  181. const UInt32 numFrames, AudioBufferList* data)
  182. {
  183. OSStatus err = noErr;
  184. if (audioInputIsAvailable && numInputChannels > 0)
  185. err = AudioUnitRender (audioUnit, flags, time, 1, numFrames, data);
  186. const ScopedLock sl (callbackLock);
  187. if (callback != nullptr)
  188. {
  189. if (audioInputIsAvailable && numInputChannels > 0)
  190. {
  191. short* shortData = (short*) data->mBuffers[0].mData;
  192. if (numInputChannels >= 2)
  193. {
  194. for (UInt32 i = 0; i < numFrames; ++i)
  195. {
  196. inputChannels[0][i] = *shortData++ * (1.0f / 32768.0f);
  197. inputChannels[1][i] = *shortData++ * (1.0f / 32768.0f);
  198. }
  199. }
  200. else
  201. {
  202. if (monoInputChannelNumber > 0)
  203. ++shortData;
  204. for (UInt32 i = 0; i < numFrames; ++i)
  205. {
  206. inputChannels[0][i] = *shortData++ * (1.0f / 32768.0f);
  207. ++shortData;
  208. }
  209. }
  210. }
  211. else
  212. {
  213. for (int i = numInputChannels; --i >= 0;)
  214. zeromem (inputChannels[i], sizeof (float) * numFrames);
  215. }
  216. callback->audioDeviceIOCallback ((const float**) inputChannels, numInputChannels,
  217. outputChannels, numOutputChannels, (int) numFrames);
  218. short* shortData = (short*) data->mBuffers[0].mData;
  219. int n = 0;
  220. if (numOutputChannels >= 2)
  221. {
  222. for (UInt32 i = 0; i < numFrames; ++i)
  223. {
  224. shortData [n++] = (short) (outputChannels[0][i] * 32767.0f);
  225. shortData [n++] = (short) (outputChannels[1][i] * 32767.0f);
  226. }
  227. }
  228. else if (numOutputChannels == 1)
  229. {
  230. for (UInt32 i = 0; i < numFrames; ++i)
  231. {
  232. const short s = (short) (outputChannels[monoOutputChannelNumber][i] * 32767.0f);
  233. shortData [n++] = s;
  234. shortData [n++] = s;
  235. }
  236. }
  237. else
  238. {
  239. zeromem (data->mBuffers[0].mData, 2 * sizeof (short) * numFrames);
  240. }
  241. }
  242. else
  243. {
  244. zeromem (data->mBuffers[0].mData, 2 * sizeof (short) * numFrames);
  245. }
  246. return err;
  247. }
  248. void updateDeviceInfo()
  249. {
  250. UInt32 size = sizeof (sampleRate);
  251. AudioSessionGetProperty (kAudioSessionProperty_CurrentHardwareSampleRate, &size, &sampleRate);
  252. size = sizeof (audioInputIsAvailable);
  253. AudioSessionGetProperty (kAudioSessionProperty_AudioInputAvailable, &size, &audioInputIsAvailable);
  254. }
  255. void routingChanged (const void* propertyValue)
  256. {
  257. if (! isRunning)
  258. return;
  259. if (propertyValue != nullptr)
  260. {
  261. CFDictionaryRef routeChangeDictionary = (CFDictionaryRef) propertyValue;
  262. CFNumberRef routeChangeReasonRef = (CFNumberRef) CFDictionaryGetValue (routeChangeDictionary,
  263. CFSTR (kAudioSession_AudioRouteChangeKey_Reason));
  264. SInt32 routeChangeReason;
  265. CFNumberGetValue (routeChangeReasonRef, kCFNumberSInt32Type, &routeChangeReason);
  266. if (routeChangeReason == kAudioSessionRouteChangeReason_OldDeviceUnavailable)
  267. {
  268. const ScopedLock sl (callbackLock);
  269. if (callback != nullptr)
  270. callback->audioDeviceError ("Old device unavailable");
  271. }
  272. }
  273. updateDeviceInfo();
  274. createAudioUnit();
  275. AudioSessionSetActive (true);
  276. if (audioUnit != 0)
  277. {
  278. UInt32 formatSize = sizeof (format);
  279. AudioUnitGetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &format, &formatSize);
  280. Float32 bufferDuration = preferredBufferSize / sampleRate;
  281. UInt32 bufferDurationSize = sizeof (bufferDuration);
  282. AudioSessionGetProperty (kAudioSessionProperty_CurrentHardwareIOBufferDuration, &bufferDurationSize, &bufferDuration);
  283. actualBufferSize = (int) (sampleRate * bufferDuration + 0.5);
  284. AudioOutputUnitStart (audioUnit);
  285. }
  286. }
  287. //==================================================================================================
  288. struct AudioSessionHolder
  289. {
  290. AudioSessionHolder()
  291. {
  292. AudioSessionInitialize (0, 0, interruptionListenerCallback, this);
  293. }
  294. static void interruptionListenerCallback (void* client, UInt32 interruptionType)
  295. {
  296. const Array <iOSAudioIODevice*>& activeDevices = static_cast <AudioSessionHolder*> (client)->activeDevices;
  297. for (int i = activeDevices.size(); --i >= 0;)
  298. activeDevices.getUnchecked(i)->interruptionListener (interruptionType);
  299. }
  300. Array <iOSAudioIODevice*> activeDevices;
  301. };
  302. static AudioSessionHolder& getSessionHolder()
  303. {
  304. static AudioSessionHolder audioSessionHolder;
  305. return audioSessionHolder;
  306. }
  307. void interruptionListener (const UInt32 interruptionType)
  308. {
  309. if (interruptionType == kAudioSessionBeginInterruption)
  310. {
  311. isRunning = false;
  312. AudioOutputUnitStop (audioUnit);
  313. AudioSessionSetActive (false);
  314. const ScopedLock sl (callbackLock);
  315. if (callback != nullptr)
  316. callback->audioDeviceError ("iOS audio session interruption");
  317. }
  318. if (interruptionType == kAudioSessionEndInterruption)
  319. {
  320. isRunning = true;
  321. AudioSessionSetActive (true);
  322. AudioOutputUnitStart (audioUnit);
  323. }
  324. }
  325. //==================================================================================================
  326. static OSStatus processStatic (void* client, AudioUnitRenderActionFlags* flags, const AudioTimeStamp* time,
  327. UInt32 /*busNumber*/, UInt32 numFrames, AudioBufferList* data)
  328. {
  329. return static_cast <iOSAudioIODevice*> (client)->process (flags, time, numFrames, data);
  330. }
  331. static void routingChangedStatic (void* client, AudioSessionPropertyID, UInt32 /*inDataSize*/, const void* propertyValue)
  332. {
  333. static_cast <iOSAudioIODevice*> (client)->routingChanged (propertyValue);
  334. }
  335. //==================================================================================================
  336. void resetFormat (const int numChannels) noexcept
  337. {
  338. zerostruct (format);
  339. format.mFormatID = kAudioFormatLinearPCM;
  340. format.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked | kAudioFormatFlagsNativeEndian;
  341. format.mBitsPerChannel = 8 * sizeof (short);
  342. format.mChannelsPerFrame = numChannels;
  343. format.mFramesPerPacket = 1;
  344. format.mBytesPerFrame = format.mBytesPerPacket = numChannels * sizeof (short);
  345. }
  346. bool createAudioUnit()
  347. {
  348. if (audioUnit != 0)
  349. {
  350. AudioComponentInstanceDispose (audioUnit);
  351. audioUnit = 0;
  352. }
  353. resetFormat (2);
  354. AudioComponentDescription desc;
  355. desc.componentType = kAudioUnitType_Output;
  356. desc.componentSubType = kAudioUnitSubType_RemoteIO;
  357. desc.componentManufacturer = kAudioUnitManufacturer_Apple;
  358. desc.componentFlags = 0;
  359. desc.componentFlagsMask = 0;
  360. AudioComponent comp = AudioComponentFindNext (0, &desc);
  361. AudioComponentInstanceNew (comp, &audioUnit);
  362. if (audioUnit == 0)
  363. return false;
  364. if (numInputChannels > 0)
  365. {
  366. const UInt32 one = 1;
  367. AudioUnitSetProperty (audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, 1, &one, sizeof (one));
  368. }
  369. {
  370. AudioChannelLayout layout;
  371. layout.mChannelBitmap = 0;
  372. layout.mNumberChannelDescriptions = 0;
  373. layout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
  374. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_AudioChannelLayout, kAudioUnitScope_Input, 0, &layout, sizeof (layout));
  375. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_AudioChannelLayout, kAudioUnitScope_Output, 0, &layout, sizeof (layout));
  376. }
  377. {
  378. AURenderCallbackStruct inputProc;
  379. inputProc.inputProc = processStatic;
  380. inputProc.inputProcRefCon = this;
  381. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, 0, &inputProc, sizeof (inputProc));
  382. }
  383. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &format, sizeof (format));
  384. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &format, sizeof (format));
  385. AudioUnitInitialize (audioUnit);
  386. return true;
  387. }
  388. // If the routing is set to go through the receiver (i.e. the speaker, but quiet), this re-routes it
  389. // to make it loud. Needed because by default when using an input + output, the output is kept quiet.
  390. static void fixAudioRouteIfSetToReceiver()
  391. {
  392. CFStringRef audioRoute = 0;
  393. UInt32 propertySize = sizeof (audioRoute);
  394. if (AudioSessionGetProperty (kAudioSessionProperty_AudioRoute, &propertySize, &audioRoute) == noErr)
  395. {
  396. NSString* route = (NSString*) audioRoute;
  397. //DBG ("audio route: " + nsStringToJuce (route));
  398. if ([route hasPrefix: @"Receiver"])
  399. {
  400. UInt32 audioRouteOverride = kAudioSessionOverrideAudioRoute_Speaker;
  401. AudioSessionSetProperty (kAudioSessionProperty_OverrideAudioRoute, sizeof (audioRouteOverride), &audioRouteOverride);
  402. }
  403. CFRelease (audioRoute);
  404. }
  405. }
  406. JUCE_DECLARE_NON_COPYABLE (iOSAudioIODevice)
  407. };
  408. //==============================================================================
  409. class iOSAudioIODeviceType : public AudioIODeviceType
  410. {
  411. public:
  412. iOSAudioIODeviceType() : AudioIODeviceType ("iOS Audio")
  413. {
  414. }
  415. void scanForDevices() {}
  416. StringArray getDeviceNames (bool wantInputNames) const
  417. {
  418. return StringArray ("iOS Audio");
  419. }
  420. int getDefaultDeviceIndex (bool forInput) const
  421. {
  422. return 0;
  423. }
  424. int getIndexOfDevice (AudioIODevice* device, bool asInput) const
  425. {
  426. return device != nullptr ? 0 : -1;
  427. }
  428. bool hasSeparateInputsAndOutputs() const { return false; }
  429. AudioIODevice* createDevice (const String& outputDeviceName,
  430. const String& inputDeviceName)
  431. {
  432. if (outputDeviceName.isNotEmpty() || inputDeviceName.isNotEmpty())
  433. return new iOSAudioIODevice (outputDeviceName.isNotEmpty() ? outputDeviceName
  434. : inputDeviceName);
  435. return nullptr;
  436. }
  437. private:
  438. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (iOSAudioIODeviceType)
  439. };
  440. //==============================================================================
  441. AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_iOSAudio()
  442. {
  443. return new iOSAudioIODeviceType();
  444. }