Audio plugin host https://kx.studio/carla
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

577 lines
21KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2013 - Raw Material Software Ltd.
  5. Permission is granted to use this software under the terms of either:
  6. a) the GPL v2 (or any later version)
  7. b) the Affero GPL v3
  8. Details of these licenses can be found at: www.gnu.org/licenses
  9. JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
  10. WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
  11. A PARTICULAR PURPOSE. See the GNU General Public License for more details.
  12. ------------------------------------------------------------------------------
  13. To release a closed-source product which uses JUCE, commercial licenses are
  14. available: visit www.juce.com for more information.
  15. ==============================================================================
  16. */
  17. class iOSAudioIODevice : public AudioIODevice
  18. {
  19. public:
  20. iOSAudioIODevice (const String& deviceName)
  21. : AudioIODevice (deviceName, "Audio"),
  22. actualBufferSize (0),
  23. isRunning (false),
  24. audioUnit (0),
  25. callback (nullptr),
  26. floatData (1, 2)
  27. {
  28. getSessionHolder().activeDevices.add (this);
  29. numInputChannels = 2;
  30. numOutputChannels = 2;
  31. preferredBufferSize = 0;
  32. updateDeviceInfo();
  33. }
  34. ~iOSAudioIODevice()
  35. {
  36. getSessionHolder().activeDevices.removeFirstMatchingValue (this);
  37. close();
  38. }
  39. StringArray getOutputChannelNames() override
  40. {
  41. StringArray s;
  42. s.add ("Left");
  43. s.add ("Right");
  44. return s;
  45. }
  46. StringArray getInputChannelNames() override
  47. {
  48. StringArray s;
  49. if (audioInputIsAvailable)
  50. {
  51. s.add ("Left");
  52. s.add ("Right");
  53. }
  54. return s;
  55. }
  56. Array<double> getAvailableSampleRates() override
  57. {
  58. // can't find a good way to actually ask the device for which of these it supports..
  59. static const double rates[] = { 8000.0, 16000.0, 22050.0, 32000.0, 44100.0, 48000.0 };
  60. return Array<double> (rates, numElementsInArray (rates));
  61. }
  62. Array<int> getAvailableBufferSizes() override
  63. {
  64. Array<int> r;
  65. for (int i = 6; i < 12; ++i)
  66. r.add (1 << i);
  67. return r;
  68. }
  69. int getDefaultBufferSize() override { return 1024; }
  70. String open (const BigInteger& inputChannelsWanted,
  71. const BigInteger& outputChannelsWanted,
  72. double targetSampleRate, int bufferSize) override
  73. {
  74. close();
  75. lastError.clear();
  76. preferredBufferSize = (bufferSize <= 0) ? getDefaultBufferSize() : bufferSize;
  77. // xxx set up channel mapping
  78. activeOutputChans = outputChannelsWanted;
  79. activeOutputChans.setRange (2, activeOutputChans.getHighestBit(), false);
  80. numOutputChannels = activeOutputChans.countNumberOfSetBits();
  81. monoOutputChannelNumber = activeOutputChans.findNextSetBit (0);
  82. activeInputChans = inputChannelsWanted;
  83. activeInputChans.setRange (2, activeInputChans.getHighestBit(), false);
  84. numInputChannels = activeInputChans.countNumberOfSetBits();
  85. monoInputChannelNumber = activeInputChans.findNextSetBit (0);
  86. AudioSessionSetActive (true);
  87. if (numInputChannels > 0 && audioInputIsAvailable)
  88. {
  89. setSessionUInt32Property (kAudioSessionProperty_AudioCategory, kAudioSessionCategory_PlayAndRecord);
  90. setSessionUInt32Property (kAudioSessionProperty_OverrideCategoryEnableBluetoothInput, 1);
  91. }
  92. else
  93. {
  94. setSessionUInt32Property (kAudioSessionProperty_AudioCategory, kAudioSessionCategory_MediaPlayback);
  95. }
  96. AudioSessionAddPropertyListener (kAudioSessionProperty_AudioRouteChange, routingChangedStatic, this);
  97. fixAudioRouteIfSetToReceiver();
  98. setSessionFloat64Property (kAudioSessionProperty_PreferredHardwareSampleRate, targetSampleRate);
  99. updateDeviceInfo();
  100. setSessionFloat32Property (kAudioSessionProperty_PreferredHardwareIOBufferDuration, preferredBufferSize / sampleRate);
  101. updateCurrentBufferSize();
  102. prepareFloatBuffers (actualBufferSize);
  103. isRunning = true;
  104. routingChanged (nullptr); // creates and starts the AU
  105. lastError = audioUnit != 0 ? "" : "Couldn't open the device";
  106. return lastError;
  107. }
  108. void close() override
  109. {
  110. if (isRunning)
  111. {
  112. isRunning = false;
  113. setSessionUInt32Property (kAudioSessionProperty_AudioCategory, kAudioSessionCategory_MediaPlayback);
  114. AudioSessionRemovePropertyListenerWithUserData (kAudioSessionProperty_AudioRouteChange, routingChangedStatic, this);
  115. AudioSessionSetActive (false);
  116. if (audioUnit != 0)
  117. {
  118. AudioComponentInstanceDispose (audioUnit);
  119. audioUnit = 0;
  120. }
  121. }
  122. }
  123. bool isOpen() override { return isRunning; }
  124. int getCurrentBufferSizeSamples() override { return actualBufferSize; }
  125. double getCurrentSampleRate() override { return sampleRate; }
  126. int getCurrentBitDepth() override { return 16; }
  127. BigInteger getActiveOutputChannels() const override { return activeOutputChans; }
  128. BigInteger getActiveInputChannels() const override { return activeInputChans; }
  129. int getOutputLatencyInSamples() override { return getLatency (kAudioSessionProperty_CurrentHardwareOutputLatency); }
  130. int getInputLatencyInSamples() override { return getLatency (kAudioSessionProperty_CurrentHardwareInputLatency); }
  131. int getLatency (AudioSessionPropertyID propID)
  132. {
  133. Float32 latency = 0;
  134. getSessionProperty (propID, latency);
  135. return roundToInt (latency * getCurrentSampleRate());
  136. }
  137. void start (AudioIODeviceCallback* newCallback) override
  138. {
  139. if (isRunning && callback != newCallback)
  140. {
  141. if (newCallback != nullptr)
  142. newCallback->audioDeviceAboutToStart (this);
  143. const ScopedLock sl (callbackLock);
  144. callback = newCallback;
  145. }
  146. }
  147. void stop() override
  148. {
  149. if (isRunning)
  150. {
  151. AudioIODeviceCallback* lastCallback;
  152. {
  153. const ScopedLock sl (callbackLock);
  154. lastCallback = callback;
  155. callback = nullptr;
  156. }
  157. if (lastCallback != nullptr)
  158. lastCallback->audioDeviceStopped();
  159. }
  160. }
  161. bool isPlaying() override { return isRunning && callback != nullptr; }
  162. String getLastError() override { return lastError; }
  163. bool setAudioPreprocessingEnabled (bool enable) override
  164. {
  165. return setSessionUInt32Property (kAudioSessionProperty_Mode, enable ? kAudioSessionMode_Default
  166. : kAudioSessionMode_Measurement);
  167. }
  168. private:
  169. //==================================================================================================
  170. CriticalSection callbackLock;
  171. Float64 sampleRate;
  172. int numInputChannels, numOutputChannels;
  173. int preferredBufferSize, actualBufferSize;
  174. bool isRunning;
  175. String lastError;
  176. AudioStreamBasicDescription format;
  177. AudioUnit audioUnit;
  178. UInt32 audioInputIsAvailable;
  179. AudioIODeviceCallback* callback;
  180. BigInteger activeOutputChans, activeInputChans;
  181. AudioSampleBuffer floatData;
  182. float* inputChannels[3];
  183. float* outputChannels[3];
  184. bool monoInputChannelNumber, monoOutputChannelNumber;
  185. void prepareFloatBuffers (int bufferSize)
  186. {
  187. if (numInputChannels + numOutputChannels > 0)
  188. {
  189. floatData.setSize (numInputChannels + numOutputChannels, bufferSize);
  190. zeromem (inputChannels, sizeof (inputChannels));
  191. zeromem (outputChannels, sizeof (outputChannels));
  192. for (int i = 0; i < numInputChannels; ++i)
  193. inputChannels[i] = floatData.getWritePointer (i);
  194. for (int i = 0; i < numOutputChannels; ++i)
  195. outputChannels[i] = floatData.getWritePointer (i + numInputChannels);
  196. }
  197. }
  198. //==================================================================================================
  199. OSStatus process (AudioUnitRenderActionFlags* flags, const AudioTimeStamp* time,
  200. const UInt32 numFrames, AudioBufferList* data)
  201. {
  202. OSStatus err = noErr;
  203. if (audioInputIsAvailable && numInputChannels > 0)
  204. err = AudioUnitRender (audioUnit, flags, time, 1, numFrames, data);
  205. const ScopedLock sl (callbackLock);
  206. if (callback != nullptr)
  207. {
  208. if ((int) numFrames > floatData.getNumSamples())
  209. prepareFloatBuffers ((int) numFrames);
  210. if (audioInputIsAvailable && numInputChannels > 0)
  211. {
  212. short* shortData = (short*) data->mBuffers[0].mData;
  213. if (numInputChannels >= 2)
  214. {
  215. for (UInt32 i = 0; i < numFrames; ++i)
  216. {
  217. inputChannels[0][i] = *shortData++ * (1.0f / 32768.0f);
  218. inputChannels[1][i] = *shortData++ * (1.0f / 32768.0f);
  219. }
  220. }
  221. else
  222. {
  223. if (monoInputChannelNumber > 0)
  224. ++shortData;
  225. for (UInt32 i = 0; i < numFrames; ++i)
  226. {
  227. inputChannels[0][i] = *shortData++ * (1.0f / 32768.0f);
  228. ++shortData;
  229. }
  230. }
  231. }
  232. else
  233. {
  234. for (int i = numInputChannels; --i >= 0;)
  235. zeromem (inputChannels[i], sizeof (float) * numFrames);
  236. }
  237. callback->audioDeviceIOCallback ((const float**) inputChannels, numInputChannels,
  238. outputChannels, numOutputChannels, (int) numFrames);
  239. short* shortData = (short*) data->mBuffers[0].mData;
  240. int n = 0;
  241. if (numOutputChannels >= 2)
  242. {
  243. for (UInt32 i = 0; i < numFrames; ++i)
  244. {
  245. shortData [n++] = (short) (outputChannels[0][i] * 32767.0f);
  246. shortData [n++] = (short) (outputChannels[1][i] * 32767.0f);
  247. }
  248. }
  249. else if (numOutputChannels == 1)
  250. {
  251. for (UInt32 i = 0; i < numFrames; ++i)
  252. {
  253. const short s = (short) (outputChannels[monoOutputChannelNumber][i] * 32767.0f);
  254. shortData [n++] = s;
  255. shortData [n++] = s;
  256. }
  257. }
  258. else
  259. {
  260. zeromem (data->mBuffers[0].mData, 2 * sizeof (short) * numFrames);
  261. }
  262. }
  263. else
  264. {
  265. zeromem (data->mBuffers[0].mData, 2 * sizeof (short) * numFrames);
  266. }
  267. return err;
  268. }
  269. void updateDeviceInfo()
  270. {
  271. getSessionProperty (kAudioSessionProperty_CurrentHardwareSampleRate, sampleRate);
  272. getSessionProperty (kAudioSessionProperty_AudioInputAvailable, audioInputIsAvailable);
  273. }
  274. void updateCurrentBufferSize()
  275. {
  276. Float32 bufferDuration = sampleRate > 0 ? (Float32) (preferredBufferSize / sampleRate) : 0.0f;
  277. getSessionProperty (kAudioSessionProperty_CurrentHardwareIOBufferDuration, bufferDuration);
  278. actualBufferSize = (int) (sampleRate * bufferDuration + 0.5);
  279. }
  280. void routingChanged (const void* propertyValue)
  281. {
  282. if (! isRunning)
  283. return;
  284. if (propertyValue != nullptr)
  285. {
  286. CFDictionaryRef routeChangeDictionary = (CFDictionaryRef) propertyValue;
  287. CFNumberRef routeChangeReasonRef = (CFNumberRef) CFDictionaryGetValue (routeChangeDictionary,
  288. CFSTR (kAudioSession_AudioRouteChangeKey_Reason));
  289. SInt32 routeChangeReason;
  290. CFNumberGetValue (routeChangeReasonRef, kCFNumberSInt32Type, &routeChangeReason);
  291. if (routeChangeReason == kAudioSessionRouteChangeReason_OldDeviceUnavailable)
  292. {
  293. const ScopedLock sl (callbackLock);
  294. if (callback != nullptr)
  295. callback->audioDeviceError ("Old device unavailable");
  296. }
  297. }
  298. updateDeviceInfo();
  299. createAudioUnit();
  300. AudioSessionSetActive (true);
  301. if (audioUnit != 0)
  302. {
  303. UInt32 formatSize = sizeof (format);
  304. AudioUnitGetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &format, &formatSize);
  305. updateCurrentBufferSize();
  306. AudioOutputUnitStart (audioUnit);
  307. }
  308. }
  309. //==================================================================================================
  310. struct AudioSessionHolder
  311. {
  312. AudioSessionHolder()
  313. {
  314. AudioSessionInitialize (0, 0, interruptionListenerCallback, this);
  315. }
  316. static void interruptionListenerCallback (void* client, UInt32 interruptionType)
  317. {
  318. const Array <iOSAudioIODevice*>& activeDevices = static_cast <AudioSessionHolder*> (client)->activeDevices;
  319. for (int i = activeDevices.size(); --i >= 0;)
  320. activeDevices.getUnchecked(i)->interruptionListener (interruptionType);
  321. }
  322. Array <iOSAudioIODevice*> activeDevices;
  323. };
  324. static AudioSessionHolder& getSessionHolder()
  325. {
  326. static AudioSessionHolder audioSessionHolder;
  327. return audioSessionHolder;
  328. }
  329. void interruptionListener (const UInt32 interruptionType)
  330. {
  331. if (interruptionType == kAudioSessionBeginInterruption)
  332. {
  333. isRunning = false;
  334. AudioOutputUnitStop (audioUnit);
  335. AudioSessionSetActive (false);
  336. const ScopedLock sl (callbackLock);
  337. if (callback != nullptr)
  338. callback->audioDeviceError ("iOS audio session interruption");
  339. }
  340. if (interruptionType == kAudioSessionEndInterruption)
  341. {
  342. isRunning = true;
  343. AudioSessionSetActive (true);
  344. AudioOutputUnitStart (audioUnit);
  345. const ScopedLock sl (callbackLock);
  346. if (callback != nullptr)
  347. callback->audioDeviceError ("iOS audio session resumed");
  348. }
  349. }
  350. //==================================================================================================
  351. static OSStatus processStatic (void* client, AudioUnitRenderActionFlags* flags, const AudioTimeStamp* time,
  352. UInt32 /*busNumber*/, UInt32 numFrames, AudioBufferList* data)
  353. {
  354. return static_cast<iOSAudioIODevice*> (client)->process (flags, time, numFrames, data);
  355. }
  356. static void routingChangedStatic (void* client, AudioSessionPropertyID, UInt32 /*inDataSize*/, const void* propertyValue)
  357. {
  358. static_cast<iOSAudioIODevice*> (client)->routingChanged (propertyValue);
  359. }
  360. //==================================================================================================
  361. void resetFormat (const int numChannels) noexcept
  362. {
  363. zerostruct (format);
  364. format.mFormatID = kAudioFormatLinearPCM;
  365. format.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked | kAudioFormatFlagsNativeEndian;
  366. format.mBitsPerChannel = 8 * sizeof (short);
  367. format.mChannelsPerFrame = (UInt32) numChannels;
  368. format.mFramesPerPacket = 1;
  369. format.mBytesPerFrame = format.mBytesPerPacket = (UInt32) numChannels * sizeof (short);
  370. }
  371. bool createAudioUnit()
  372. {
  373. if (audioUnit != 0)
  374. {
  375. AudioComponentInstanceDispose (audioUnit);
  376. audioUnit = 0;
  377. }
  378. resetFormat (2);
  379. AudioComponentDescription desc;
  380. desc.componentType = kAudioUnitType_Output;
  381. desc.componentSubType = kAudioUnitSubType_RemoteIO;
  382. desc.componentManufacturer = kAudioUnitManufacturer_Apple;
  383. desc.componentFlags = 0;
  384. desc.componentFlagsMask = 0;
  385. AudioComponent comp = AudioComponentFindNext (0, &desc);
  386. AudioComponentInstanceNew (comp, &audioUnit);
  387. if (audioUnit == 0)
  388. return false;
  389. if (numInputChannels > 0)
  390. {
  391. const UInt32 one = 1;
  392. AudioUnitSetProperty (audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, 1, &one, sizeof (one));
  393. }
  394. {
  395. AudioChannelLayout layout;
  396. layout.mChannelBitmap = 0;
  397. layout.mNumberChannelDescriptions = 0;
  398. layout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
  399. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_AudioChannelLayout, kAudioUnitScope_Input, 0, &layout, sizeof (layout));
  400. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_AudioChannelLayout, kAudioUnitScope_Output, 0, &layout, sizeof (layout));
  401. }
  402. {
  403. AURenderCallbackStruct inputProc;
  404. inputProc.inputProc = processStatic;
  405. inputProc.inputProcRefCon = this;
  406. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, 0, &inputProc, sizeof (inputProc));
  407. }
  408. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &format, sizeof (format));
  409. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &format, sizeof (format));
  410. AudioUnitInitialize (audioUnit);
  411. return true;
  412. }
  413. // If the routing is set to go through the receiver (i.e. the speaker, but quiet), this re-routes it
  414. // to make it loud. Needed because by default when using an input + output, the output is kept quiet.
  415. static void fixAudioRouteIfSetToReceiver()
  416. {
  417. CFStringRef audioRoute = 0;
  418. if (getSessionProperty (kAudioSessionProperty_AudioRoute, audioRoute) == noErr)
  419. {
  420. NSString* route = (NSString*) audioRoute;
  421. //DBG ("audio route: " + nsStringToJuce (route));
  422. if ([route hasPrefix: @"Receiver"])
  423. setSessionUInt32Property (kAudioSessionProperty_OverrideAudioRoute, kAudioSessionOverrideAudioRoute_Speaker);
  424. CFRelease (audioRoute);
  425. }
  426. }
  427. template <typename Type>
  428. static OSStatus getSessionProperty (AudioSessionPropertyID propID, Type& result) noexcept
  429. {
  430. UInt32 valueSize = sizeof (result);
  431. return AudioSessionGetProperty (propID, &valueSize, &result);
  432. }
  433. static bool setSessionUInt32Property (AudioSessionPropertyID propID, UInt32 v) noexcept { return AudioSessionSetProperty (propID, sizeof (v), &v) == kAudioSessionNoError; }
  434. static bool setSessionFloat32Property (AudioSessionPropertyID propID, Float32 v) noexcept { return AudioSessionSetProperty (propID, sizeof (v), &v) == kAudioSessionNoError; }
  435. static bool setSessionFloat64Property (AudioSessionPropertyID propID, Float64 v) noexcept { return AudioSessionSetProperty (propID, sizeof (v), &v) == kAudioSessionNoError; }
  436. JUCE_DECLARE_NON_COPYABLE (iOSAudioIODevice)
  437. };
  438. //==============================================================================
  439. class iOSAudioIODeviceType : public AudioIODeviceType
  440. {
  441. public:
  442. iOSAudioIODeviceType() : AudioIODeviceType ("iOS Audio") {}
  443. void scanForDevices() {}
  444. StringArray getDeviceNames (bool /*wantInputNames*/) const { return StringArray ("iOS Audio"); }
  445. int getDefaultDeviceIndex (bool /*forInput*/) const { return 0; }
  446. int getIndexOfDevice (AudioIODevice* d, bool /*asInput*/) const { return d != nullptr ? 0 : -1; }
  447. bool hasSeparateInputsAndOutputs() const { return false; }
  448. AudioIODevice* createDevice (const String& outputDeviceName, const String& inputDeviceName)
  449. {
  450. if (outputDeviceName.isNotEmpty() || inputDeviceName.isNotEmpty())
  451. return new iOSAudioIODevice (outputDeviceName.isNotEmpty() ? outputDeviceName
  452. : inputDeviceName);
  453. return nullptr;
  454. }
  455. private:
  456. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (iOSAudioIODeviceType)
  457. };
  458. //==============================================================================
  459. AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_iOSAudio()
  460. {
  461. return new iOSAudioIODeviceType();
  462. }