The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

552 lines
19KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2013 - Raw Material Software Ltd.
  5. Permission is granted to use this software under the terms of either:
  6. a) the GPL v2 (or any later version)
  7. b) the Affero GPL v3
  8. Details of these licenses can be found at: www.gnu.org/licenses
  9. JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
  10. WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
  11. A PARTICULAR PURPOSE. See the GNU General Public License for more details.
  12. ------------------------------------------------------------------------------
  13. To release a closed-source product which uses JUCE, commercial licenses are
  14. available: visit www.juce.com for more information.
  15. ==============================================================================
  16. */
  17. class iOSAudioIODevice : public AudioIODevice
  18. {
  19. public:
  20. iOSAudioIODevice (const String& deviceName)
  21. : AudioIODevice (deviceName, "Audio"),
  22. actualBufferSize (0),
  23. isRunning (false),
  24. audioUnit (0),
  25. callback (nullptr),
  26. floatData (1, 2)
  27. {
  28. getSessionHolder().activeDevices.add (this);
  29. numInputChannels = 2;
  30. numOutputChannels = 2;
  31. preferredBufferSize = 0;
  32. updateDeviceInfo();
  33. }
  34. ~iOSAudioIODevice()
  35. {
  36. getSessionHolder().activeDevices.removeFirstMatchingValue (this);
  37. close();
  38. }
  39. StringArray getOutputChannelNames()
  40. {
  41. StringArray s;
  42. s.add ("Left");
  43. s.add ("Right");
  44. return s;
  45. }
  46. StringArray getInputChannelNames()
  47. {
  48. StringArray s;
  49. if (audioInputIsAvailable)
  50. {
  51. s.add ("Left");
  52. s.add ("Right");
  53. }
  54. return s;
  55. }
  56. int getNumSampleRates() { return 1; }
  57. double getSampleRate (int index) { return sampleRate; }
  58. int getNumBufferSizesAvailable() { return 6; }
  59. int getBufferSizeSamples (int index) { return 1 << (jlimit (0, 5, index) + 6); }
  60. int getDefaultBufferSize() { return 1024; }
  61. String open (const BigInteger& inputChannels,
  62. const BigInteger& outputChannels,
  63. double sampleRate,
  64. int bufferSize)
  65. {
  66. close();
  67. lastError = String::empty;
  68. preferredBufferSize = (bufferSize <= 0) ? getDefaultBufferSize() : bufferSize;
  69. // xxx set up channel mapping
  70. activeOutputChans = outputChannels;
  71. activeOutputChans.setRange (2, activeOutputChans.getHighestBit(), false);
  72. numOutputChannels = activeOutputChans.countNumberOfSetBits();
  73. monoOutputChannelNumber = activeOutputChans.findNextSetBit (0);
  74. activeInputChans = inputChannels;
  75. activeInputChans.setRange (2, activeInputChans.getHighestBit(), false);
  76. numInputChannels = activeInputChans.countNumberOfSetBits();
  77. monoInputChannelNumber = activeInputChans.findNextSetBit (0);
  78. AudioSessionSetActive (true);
  79. UInt32 audioCategory = kAudioSessionCategory_MediaPlayback;
  80. if (numInputChannels > 0 && audioInputIsAvailable)
  81. {
  82. audioCategory = kAudioSessionCategory_PlayAndRecord;
  83. UInt32 allowBluetoothInput = 1;
  84. AudioSessionSetProperty (kAudioSessionProperty_OverrideCategoryEnableBluetoothInput,
  85. sizeof (allowBluetoothInput), &allowBluetoothInput);
  86. }
  87. AudioSessionSetProperty (kAudioSessionProperty_AudioCategory, sizeof (audioCategory), &audioCategory);
  88. AudioSessionAddPropertyListener (kAudioSessionProperty_AudioRouteChange, routingChangedStatic, this);
  89. fixAudioRouteIfSetToReceiver();
  90. updateDeviceInfo();
  91. Float32 bufferDuration = preferredBufferSize / sampleRate;
  92. AudioSessionSetProperty (kAudioSessionProperty_PreferredHardwareIOBufferDuration, sizeof (bufferDuration), &bufferDuration);
  93. actualBufferSize = preferredBufferSize;
  94. prepareFloatBuffers();
  95. isRunning = true;
  96. routingChanged (nullptr); // creates and starts the AU
  97. lastError = audioUnit != 0 ? "" : "Couldn't open the device";
  98. return lastError;
  99. }
  100. void close()
  101. {
  102. if (isRunning)
  103. {
  104. isRunning = false;
  105. AudioSessionRemovePropertyListenerWithUserData (kAudioSessionProperty_AudioRouteChange, routingChangedStatic, this);
  106. AudioSessionSetActive (false);
  107. if (audioUnit != 0)
  108. {
  109. AudioComponentInstanceDispose (audioUnit);
  110. audioUnit = 0;
  111. }
  112. }
  113. }
  114. bool isOpen() { return isRunning; }
  115. int getCurrentBufferSizeSamples() { return actualBufferSize; }
  116. double getCurrentSampleRate() { return sampleRate; }
  117. int getCurrentBitDepth() { return 16; }
  118. BigInteger getActiveOutputChannels() const { return activeOutputChans; }
  119. BigInteger getActiveInputChannels() const { return activeInputChans; }
  120. int getOutputLatencyInSamples() { return 0; } //xxx
  121. int getInputLatencyInSamples() { return 0; } //xxx
  122. void start (AudioIODeviceCallback* newCallback)
  123. {
  124. if (isRunning && callback != newCallback)
  125. {
  126. if (newCallback != nullptr)
  127. newCallback->audioDeviceAboutToStart (this);
  128. const ScopedLock sl (callbackLock);
  129. callback = newCallback;
  130. }
  131. }
  132. void stop()
  133. {
  134. if (isRunning)
  135. {
  136. AudioIODeviceCallback* lastCallback;
  137. {
  138. const ScopedLock sl (callbackLock);
  139. lastCallback = callback;
  140. callback = nullptr;
  141. }
  142. if (lastCallback != nullptr)
  143. lastCallback->audioDeviceStopped();
  144. }
  145. }
  146. bool isPlaying() { return isRunning && callback != nullptr; }
  147. String getLastError() { return lastError; }
  148. private:
  149. //==================================================================================================
  150. CriticalSection callbackLock;
  151. Float64 sampleRate;
  152. int numInputChannels, numOutputChannels;
  153. int preferredBufferSize, actualBufferSize;
  154. bool isRunning;
  155. String lastError;
  156. AudioStreamBasicDescription format;
  157. AudioUnit audioUnit;
  158. UInt32 audioInputIsAvailable;
  159. AudioIODeviceCallback* callback;
  160. BigInteger activeOutputChans, activeInputChans;
  161. AudioSampleBuffer floatData;
  162. float* inputChannels[3];
  163. float* outputChannels[3];
  164. bool monoInputChannelNumber, monoOutputChannelNumber;
  165. void prepareFloatBuffers()
  166. {
  167. if (numInputChannels + numOutputChannels > 0)
  168. {
  169. floatData.setSize (numInputChannels + numOutputChannels, actualBufferSize);
  170. zeromem (inputChannels, sizeof (inputChannels));
  171. zeromem (outputChannels, sizeof (outputChannels));
  172. for (int i = 0; i < numInputChannels; ++i)
  173. inputChannels[i] = floatData.getSampleData (i);
  174. for (int i = 0; i < numOutputChannels; ++i)
  175. outputChannels[i] = floatData.getSampleData (i + numInputChannels);
  176. }
  177. }
  178. //==================================================================================================
  179. OSStatus process (AudioUnitRenderActionFlags* flags, const AudioTimeStamp* time,
  180. const UInt32 numFrames, AudioBufferList* data)
  181. {
  182. OSStatus err = noErr;
  183. if (audioInputIsAvailable && numInputChannels > 0)
  184. err = AudioUnitRender (audioUnit, flags, time, 1, numFrames, data);
  185. const ScopedLock sl (callbackLock);
  186. if (callback != nullptr)
  187. {
  188. if (audioInputIsAvailable && numInputChannels > 0)
  189. {
  190. short* shortData = (short*) data->mBuffers[0].mData;
  191. if (numInputChannels >= 2)
  192. {
  193. for (UInt32 i = 0; i < numFrames; ++i)
  194. {
  195. inputChannels[0][i] = *shortData++ * (1.0f / 32768.0f);
  196. inputChannels[1][i] = *shortData++ * (1.0f / 32768.0f);
  197. }
  198. }
  199. else
  200. {
  201. if (monoInputChannelNumber > 0)
  202. ++shortData;
  203. for (UInt32 i = 0; i < numFrames; ++i)
  204. {
  205. inputChannels[0][i] = *shortData++ * (1.0f / 32768.0f);
  206. ++shortData;
  207. }
  208. }
  209. }
  210. else
  211. {
  212. for (int i = numInputChannels; --i >= 0;)
  213. zeromem (inputChannels[i], sizeof (float) * numFrames);
  214. }
  215. callback->audioDeviceIOCallback ((const float**) inputChannels, numInputChannels,
  216. outputChannels, numOutputChannels, (int) numFrames);
  217. short* shortData = (short*) data->mBuffers[0].mData;
  218. int n = 0;
  219. if (numOutputChannels >= 2)
  220. {
  221. for (UInt32 i = 0; i < numFrames; ++i)
  222. {
  223. shortData [n++] = (short) (outputChannels[0][i] * 32767.0f);
  224. shortData [n++] = (short) (outputChannels[1][i] * 32767.0f);
  225. }
  226. }
  227. else if (numOutputChannels == 1)
  228. {
  229. for (UInt32 i = 0; i < numFrames; ++i)
  230. {
  231. const short s = (short) (outputChannels[monoOutputChannelNumber][i] * 32767.0f);
  232. shortData [n++] = s;
  233. shortData [n++] = s;
  234. }
  235. }
  236. else
  237. {
  238. zeromem (data->mBuffers[0].mData, 2 * sizeof (short) * numFrames);
  239. }
  240. }
  241. else
  242. {
  243. zeromem (data->mBuffers[0].mData, 2 * sizeof (short) * numFrames);
  244. }
  245. return err;
  246. }
  247. void updateDeviceInfo()
  248. {
  249. UInt32 size = sizeof (sampleRate);
  250. AudioSessionGetProperty (kAudioSessionProperty_CurrentHardwareSampleRate, &size, &sampleRate);
  251. size = sizeof (audioInputIsAvailable);
  252. AudioSessionGetProperty (kAudioSessionProperty_AudioInputAvailable, &size, &audioInputIsAvailable);
  253. }
  254. void routingChanged (const void* propertyValue)
  255. {
  256. if (! isRunning)
  257. return;
  258. if (propertyValue != nullptr)
  259. {
  260. CFDictionaryRef routeChangeDictionary = (CFDictionaryRef) propertyValue;
  261. CFNumberRef routeChangeReasonRef = (CFNumberRef) CFDictionaryGetValue (routeChangeDictionary,
  262. CFSTR (kAudioSession_AudioRouteChangeKey_Reason));
  263. SInt32 routeChangeReason;
  264. CFNumberGetValue (routeChangeReasonRef, kCFNumberSInt32Type, &routeChangeReason);
  265. if (routeChangeReason == kAudioSessionRouteChangeReason_OldDeviceUnavailable)
  266. {
  267. const ScopedLock sl (callbackLock);
  268. if (callback != nullptr)
  269. callback->audioDeviceError ("Old device unavailable");
  270. }
  271. }
  272. updateDeviceInfo();
  273. createAudioUnit();
  274. AudioSessionSetActive (true);
  275. if (audioUnit != 0)
  276. {
  277. UInt32 formatSize = sizeof (format);
  278. AudioUnitGetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &format, &formatSize);
  279. Float32 bufferDuration = preferredBufferSize / sampleRate;
  280. UInt32 bufferDurationSize = sizeof (bufferDuration);
  281. AudioSessionGetProperty (kAudioSessionProperty_CurrentHardwareIOBufferDuration, &bufferDurationSize, &bufferDuration);
  282. actualBufferSize = (int) (sampleRate * bufferDuration + 0.5);
  283. AudioOutputUnitStart (audioUnit);
  284. }
  285. }
  286. //==================================================================================================
  287. struct AudioSessionHolder
  288. {
  289. AudioSessionHolder()
  290. {
  291. AudioSessionInitialize (0, 0, interruptionListenerCallback, this);
  292. }
  293. static void interruptionListenerCallback (void* client, UInt32 interruptionType)
  294. {
  295. const Array <iOSAudioIODevice*>& activeDevices = static_cast <AudioSessionHolder*> (client)->activeDevices;
  296. for (int i = activeDevices.size(); --i >= 0;)
  297. activeDevices.getUnchecked(i)->interruptionListener (interruptionType);
  298. }
  299. Array <iOSAudioIODevice*> activeDevices;
  300. };
  301. static AudioSessionHolder& getSessionHolder()
  302. {
  303. static AudioSessionHolder audioSessionHolder;
  304. return audioSessionHolder;
  305. }
  306. void interruptionListener (const UInt32 interruptionType)
  307. {
  308. if (interruptionType == kAudioSessionBeginInterruption)
  309. {
  310. isRunning = false;
  311. AudioOutputUnitStop (audioUnit);
  312. AudioSessionSetActive (false);
  313. const ScopedLock sl (callbackLock);
  314. if (callback != nullptr)
  315. callback->audioDeviceError ("iOS audio session interruption");
  316. }
  317. if (interruptionType == kAudioSessionEndInterruption)
  318. {
  319. isRunning = true;
  320. AudioSessionSetActive (true);
  321. AudioOutputUnitStart (audioUnit);
  322. }
  323. }
  324. //==================================================================================================
  325. static OSStatus processStatic (void* client, AudioUnitRenderActionFlags* flags, const AudioTimeStamp* time,
  326. UInt32 /*busNumber*/, UInt32 numFrames, AudioBufferList* data)
  327. {
  328. return static_cast <iOSAudioIODevice*> (client)->process (flags, time, numFrames, data);
  329. }
  330. static void routingChangedStatic (void* client, AudioSessionPropertyID, UInt32 /*inDataSize*/, const void* propertyValue)
  331. {
  332. static_cast <iOSAudioIODevice*> (client)->routingChanged (propertyValue);
  333. }
  334. //==================================================================================================
  335. void resetFormat (const int numChannels) noexcept
  336. {
  337. zerostruct (format);
  338. format.mFormatID = kAudioFormatLinearPCM;
  339. format.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked | kAudioFormatFlagsNativeEndian;
  340. format.mBitsPerChannel = 8 * sizeof (short);
  341. format.mChannelsPerFrame = numChannels;
  342. format.mFramesPerPacket = 1;
  343. format.mBytesPerFrame = format.mBytesPerPacket = numChannels * sizeof (short);
  344. }
  345. bool createAudioUnit()
  346. {
  347. if (audioUnit != 0)
  348. {
  349. AudioComponentInstanceDispose (audioUnit);
  350. audioUnit = 0;
  351. }
  352. resetFormat (2);
  353. AudioComponentDescription desc;
  354. desc.componentType = kAudioUnitType_Output;
  355. desc.componentSubType = kAudioUnitSubType_RemoteIO;
  356. desc.componentManufacturer = kAudioUnitManufacturer_Apple;
  357. desc.componentFlags = 0;
  358. desc.componentFlagsMask = 0;
  359. AudioComponent comp = AudioComponentFindNext (0, &desc);
  360. AudioComponentInstanceNew (comp, &audioUnit);
  361. if (audioUnit == 0)
  362. return false;
  363. if (numInputChannels > 0)
  364. {
  365. const UInt32 one = 1;
  366. AudioUnitSetProperty (audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, 1, &one, sizeof (one));
  367. }
  368. {
  369. AudioChannelLayout layout;
  370. layout.mChannelBitmap = 0;
  371. layout.mNumberChannelDescriptions = 0;
  372. layout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
  373. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_AudioChannelLayout, kAudioUnitScope_Input, 0, &layout, sizeof (layout));
  374. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_AudioChannelLayout, kAudioUnitScope_Output, 0, &layout, sizeof (layout));
  375. }
  376. {
  377. AURenderCallbackStruct inputProc;
  378. inputProc.inputProc = processStatic;
  379. inputProc.inputProcRefCon = this;
  380. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, 0, &inputProc, sizeof (inputProc));
  381. }
  382. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &format, sizeof (format));
  383. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &format, sizeof (format));
  384. AudioUnitInitialize (audioUnit);
  385. return true;
  386. }
  387. // If the routing is set to go through the receiver (i.e. the speaker, but quiet), this re-routes it
  388. // to make it loud. Needed because by default when using an input + output, the output is kept quiet.
  389. static void fixAudioRouteIfSetToReceiver()
  390. {
  391. CFStringRef audioRoute = 0;
  392. UInt32 propertySize = sizeof (audioRoute);
  393. if (AudioSessionGetProperty (kAudioSessionProperty_AudioRoute, &propertySize, &audioRoute) == noErr)
  394. {
  395. NSString* route = (NSString*) audioRoute;
  396. //DBG ("audio route: " + nsStringToJuce (route));
  397. if ([route hasPrefix: @"Receiver"])
  398. {
  399. UInt32 audioRouteOverride = kAudioSessionOverrideAudioRoute_Speaker;
  400. AudioSessionSetProperty (kAudioSessionProperty_OverrideAudioRoute, sizeof (audioRouteOverride), &audioRouteOverride);
  401. }
  402. CFRelease (audioRoute);
  403. }
  404. }
  405. JUCE_DECLARE_NON_COPYABLE (iOSAudioIODevice)
  406. };
  407. //==============================================================================
  408. class iOSAudioIODeviceType : public AudioIODeviceType
  409. {
  410. public:
  411. iOSAudioIODeviceType() : AudioIODeviceType ("iOS Audio")
  412. {
  413. }
  414. void scanForDevices() {}
  415. StringArray getDeviceNames (bool wantInputNames) const
  416. {
  417. return StringArray ("iOS Audio");
  418. }
  419. int getDefaultDeviceIndex (bool forInput) const
  420. {
  421. return 0;
  422. }
  423. int getIndexOfDevice (AudioIODevice* device, bool asInput) const
  424. {
  425. return device != nullptr ? 0 : -1;
  426. }
  427. bool hasSeparateInputsAndOutputs() const { return false; }
  428. AudioIODevice* createDevice (const String& outputDeviceName,
  429. const String& inputDeviceName)
  430. {
  431. if (outputDeviceName.isNotEmpty() || inputDeviceName.isNotEmpty())
  432. return new iOSAudioIODevice (outputDeviceName.isNotEmpty() ? outputDeviceName
  433. : inputDeviceName);
  434. return nullptr;
  435. }
  436. private:
  437. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (iOSAudioIODeviceType)
  438. };
  439. //==============================================================================
  440. AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_iOSAudio()
  441. {
  442. return new iOSAudioIODeviceType();
  443. }