The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

542 lines
19KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library - "Jules' Utility Class Extensions"
  4. Copyright 2004-11 by Raw Material Software Ltd.
  5. ------------------------------------------------------------------------------
  6. JUCE can be redistributed and/or modified under the terms of the GNU General
  7. Public License (Version 2), as published by the Free Software Foundation.
  8. A copy of the license is included in the JUCE distribution, or can be found
  9. online at www.gnu.org/licenses.
  10. JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
  11. WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
  12. A PARTICULAR PURPOSE. See the GNU General Public License for more details.
  13. ------------------------------------------------------------------------------
  14. To release a closed-source product which uses JUCE, commercial licenses are
  15. available: visit www.rawmaterialsoftware.com/juce for more information.
  16. ==============================================================================
  17. */
  18. class IPhoneAudioIODevice : public AudioIODevice
  19. {
  20. public:
  21. IPhoneAudioIODevice (const String& deviceName)
  22. : AudioIODevice (deviceName, "Audio"),
  23. actualBufferSize (0),
  24. isRunning (false),
  25. audioUnit (0),
  26. callback (nullptr),
  27. floatData (1, 2)
  28. {
  29. getSessionHolder().activeDevices.add (this);
  30. numInputChannels = 2;
  31. numOutputChannels = 2;
  32. preferredBufferSize = 0;
  33. updateDeviceInfo();
  34. }
  35. ~IPhoneAudioIODevice()
  36. {
  37. getSessionHolder().activeDevices.removeValue (this);
  38. close();
  39. }
  40. StringArray getOutputChannelNames()
  41. {
  42. StringArray s;
  43. s.add ("Left");
  44. s.add ("Right");
  45. return s;
  46. }
  47. StringArray getInputChannelNames()
  48. {
  49. StringArray s;
  50. if (audioInputIsAvailable)
  51. {
  52. s.add ("Left");
  53. s.add ("Right");
  54. }
  55. return s;
  56. }
  57. int getNumSampleRates() { return 1; }
  58. double getSampleRate (int index) { return sampleRate; }
  59. int getNumBufferSizesAvailable() { return 6; }
  60. int getBufferSizeSamples (int index) { return 1 << (jlimit (0, 5, index) + 6); }
  61. int getDefaultBufferSize() { return 1024; }
  62. String open (const BigInteger& inputChannels,
  63. const BigInteger& outputChannels,
  64. double sampleRate,
  65. int bufferSize)
  66. {
  67. close();
  68. lastError = String::empty;
  69. preferredBufferSize = (bufferSize <= 0) ? getDefaultBufferSize() : bufferSize;
  70. // xxx set up channel mapping
  71. activeOutputChans = outputChannels;
  72. activeOutputChans.setRange (2, activeOutputChans.getHighestBit(), false);
  73. numOutputChannels = activeOutputChans.countNumberOfSetBits();
  74. monoOutputChannelNumber = activeOutputChans.findNextSetBit (0);
  75. activeInputChans = inputChannels;
  76. activeInputChans.setRange (2, activeInputChans.getHighestBit(), false);
  77. numInputChannels = activeInputChans.countNumberOfSetBits();
  78. monoInputChannelNumber = activeInputChans.findNextSetBit (0);
  79. AudioSessionSetActive (true);
  80. UInt32 audioCategory = audioInputIsAvailable ? kAudioSessionCategory_PlayAndRecord
  81. : kAudioSessionCategory_MediaPlayback;
  82. AudioSessionSetProperty (kAudioSessionProperty_AudioCategory, sizeof (audioCategory), &audioCategory);
  83. AudioSessionAddPropertyListener (kAudioSessionProperty_AudioRouteChange, routingChangedStatic, this);
  84. fixAudioRouteIfSetToReceiver();
  85. updateDeviceInfo();
  86. Float32 bufferDuration = preferredBufferSize / sampleRate;
  87. AudioSessionSetProperty (kAudioSessionProperty_PreferredHardwareIOBufferDuration, sizeof (bufferDuration), &bufferDuration);
  88. actualBufferSize = preferredBufferSize;
  89. prepareFloatBuffers();
  90. isRunning = true;
  91. routingChanged (nullptr); // creates and starts the AU
  92. lastError = audioUnit != 0 ? "" : "Couldn't open the device";
  93. return lastError;
  94. }
  95. void close()
  96. {
  97. if (isRunning)
  98. {
  99. isRunning = false;
  100. AudioSessionRemovePropertyListenerWithUserData (kAudioSessionProperty_AudioRouteChange, routingChangedStatic, this);
  101. AudioSessionSetActive (false);
  102. if (audioUnit != 0)
  103. {
  104. AudioComponentInstanceDispose (audioUnit);
  105. audioUnit = 0;
  106. }
  107. }
  108. }
  109. bool isOpen() { return isRunning; }
  110. int getCurrentBufferSizeSamples() { return actualBufferSize; }
  111. double getCurrentSampleRate() { return sampleRate; }
  112. int getCurrentBitDepth() { return 16; }
  113. BigInteger getActiveOutputChannels() const { return activeOutputChans; }
  114. BigInteger getActiveInputChannels() const { return activeInputChans; }
  115. int getOutputLatencyInSamples() { return 0; } //xxx
  116. int getInputLatencyInSamples() { return 0; } //xxx
  117. void start (AudioIODeviceCallback* callback_)
  118. {
  119. if (isRunning && callback != callback_)
  120. {
  121. if (callback_ != nullptr)
  122. callback_->audioDeviceAboutToStart (this);
  123. const ScopedLock sl (callbackLock);
  124. callback = callback_;
  125. }
  126. }
  127. void stop()
  128. {
  129. if (isRunning)
  130. {
  131. AudioIODeviceCallback* lastCallback;
  132. {
  133. const ScopedLock sl (callbackLock);
  134. lastCallback = callback;
  135. callback = nullptr;
  136. }
  137. if (lastCallback != nullptr)
  138. lastCallback->audioDeviceStopped();
  139. }
  140. }
  141. bool isPlaying() { return isRunning && callback != nullptr; }
  142. String getLastError() { return lastError; }
  143. private:
  144. //==================================================================================================
  145. CriticalSection callbackLock;
  146. Float64 sampleRate;
  147. int numInputChannels, numOutputChannels;
  148. int preferredBufferSize, actualBufferSize;
  149. bool isRunning;
  150. String lastError;
  151. AudioStreamBasicDescription format;
  152. AudioUnit audioUnit;
  153. UInt32 audioInputIsAvailable;
  154. AudioIODeviceCallback* callback;
  155. BigInteger activeOutputChans, activeInputChans;
  156. AudioSampleBuffer floatData;
  157. float* inputChannels[3];
  158. float* outputChannels[3];
  159. bool monoInputChannelNumber, monoOutputChannelNumber;
  160. void prepareFloatBuffers()
  161. {
  162. floatData.setSize (numInputChannels + numOutputChannels, actualBufferSize);
  163. zeromem (inputChannels, sizeof (inputChannels));
  164. zeromem (outputChannels, sizeof (outputChannels));
  165. for (int i = 0; i < numInputChannels; ++i)
  166. inputChannels[i] = floatData.getSampleData (i);
  167. for (int i = 0; i < numOutputChannels; ++i)
  168. outputChannels[i] = floatData.getSampleData (i + numInputChannels);
  169. }
  170. //==================================================================================================
  171. OSStatus process (AudioUnitRenderActionFlags* flags, const AudioTimeStamp* time,
  172. const UInt32 numFrames, AudioBufferList* data)
  173. {
  174. OSStatus err = noErr;
  175. if (audioInputIsAvailable && numInputChannels > 0)
  176. err = AudioUnitRender (audioUnit, flags, time, 1, numFrames, data);
  177. const ScopedLock sl (callbackLock);
  178. if (callback != nullptr)
  179. {
  180. if (audioInputIsAvailable && numInputChannels > 0)
  181. {
  182. short* shortData = (short*) data->mBuffers[0].mData;
  183. if (numInputChannels >= 2)
  184. {
  185. for (UInt32 i = 0; i < numFrames; ++i)
  186. {
  187. inputChannels[0][i] = *shortData++ * (1.0f / 32768.0f);
  188. inputChannels[1][i] = *shortData++ * (1.0f / 32768.0f);
  189. }
  190. }
  191. else
  192. {
  193. if (monoInputChannelNumber > 0)
  194. ++shortData;
  195. for (UInt32 i = 0; i < numFrames; ++i)
  196. {
  197. inputChannels[0][i] = *shortData++ * (1.0f / 32768.0f);
  198. ++shortData;
  199. }
  200. }
  201. }
  202. else
  203. {
  204. for (int i = numInputChannels; --i >= 0;)
  205. zeromem (inputChannels[i], sizeof (float) * numFrames);
  206. }
  207. callback->audioDeviceIOCallback ((const float**) inputChannels, numInputChannels,
  208. outputChannels, numOutputChannels, (int) numFrames);
  209. short* shortData = (short*) data->mBuffers[0].mData;
  210. int n = 0;
  211. if (numOutputChannels >= 2)
  212. {
  213. for (UInt32 i = 0; i < numFrames; ++i)
  214. {
  215. shortData [n++] = (short) (outputChannels[0][i] * 32767.0f);
  216. shortData [n++] = (short) (outputChannels[1][i] * 32767.0f);
  217. }
  218. }
  219. else if (numOutputChannels == 1)
  220. {
  221. for (UInt32 i = 0; i < numFrames; ++i)
  222. {
  223. const short s = (short) (outputChannels[monoOutputChannelNumber][i] * 32767.0f);
  224. shortData [n++] = s;
  225. shortData [n++] = s;
  226. }
  227. }
  228. else
  229. {
  230. zeromem (data->mBuffers[0].mData, 2 * sizeof (short) * numFrames);
  231. }
  232. }
  233. else
  234. {
  235. zeromem (data->mBuffers[0].mData, 2 * sizeof (short) * numFrames);
  236. }
  237. return err;
  238. }
  239. void updateDeviceInfo()
  240. {
  241. UInt32 size = sizeof (sampleRate);
  242. AudioSessionGetProperty (kAudioSessionProperty_CurrentHardwareSampleRate, &size, &sampleRate);
  243. size = sizeof (audioInputIsAvailable);
  244. AudioSessionGetProperty (kAudioSessionProperty_AudioInputAvailable, &size, &audioInputIsAvailable);
  245. }
  246. void routingChanged (const void* propertyValue)
  247. {
  248. if (! isRunning)
  249. return;
  250. if (propertyValue != nullptr)
  251. {
  252. CFDictionaryRef routeChangeDictionary = (CFDictionaryRef) propertyValue;
  253. CFNumberRef routeChangeReasonRef = (CFNumberRef) CFDictionaryGetValue (routeChangeDictionary,
  254. CFSTR (kAudioSession_AudioRouteChangeKey_Reason));
  255. SInt32 routeChangeReason;
  256. CFNumberGetValue (routeChangeReasonRef, kCFNumberSInt32Type, &routeChangeReason);
  257. if (routeChangeReason == kAudioSessionRouteChangeReason_OldDeviceUnavailable)
  258. fixAudioRouteIfSetToReceiver();
  259. }
  260. updateDeviceInfo();
  261. createAudioUnit();
  262. AudioSessionSetActive (true);
  263. if (audioUnit != 0)
  264. {
  265. UInt32 formatSize = sizeof (format);
  266. AudioUnitGetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &format, &formatSize);
  267. Float32 bufferDuration = preferredBufferSize / sampleRate;
  268. UInt32 bufferDurationSize = sizeof (bufferDuration);
  269. AudioSessionGetProperty (kAudioSessionProperty_CurrentHardwareIOBufferDuration, &bufferDurationSize, &bufferDurationSize);
  270. actualBufferSize = (int) (sampleRate * bufferDuration + 0.5);
  271. AudioOutputUnitStart (audioUnit);
  272. }
  273. }
  274. //==================================================================================================
  275. struct AudioSessionHolder
  276. {
  277. AudioSessionHolder()
  278. {
  279. AudioSessionInitialize (0, 0, interruptionListenerCallback, this);
  280. }
  281. static void interruptionListenerCallback (void* client, UInt32 interruptionType)
  282. {
  283. const Array <IPhoneAudioIODevice*>& activeDevices = static_cast <AudioSessionHolder*> (client)->activeDevices;
  284. for (int i = activeDevices.size(); --i >= 0;)
  285. activeDevices.getUnchecked(i)->interruptionListener (interruptionType);
  286. }
  287. Array <IPhoneAudioIODevice*> activeDevices;
  288. };
  289. static AudioSessionHolder& getSessionHolder()
  290. {
  291. static AudioSessionHolder audioSessionHolder;
  292. return audioSessionHolder;
  293. }
  294. void interruptionListener (const UInt32 interruptionType)
  295. {
  296. /*if (interruptionType == kAudioSessionBeginInterruption)
  297. {
  298. isRunning = false;
  299. AudioOutputUnitStop (audioUnit);
  300. if (juce_iPhoneShowModalAlert ("Audio Interrupted",
  301. "This could have been interrupted by another application or by unplugging a headset",
  302. @"Resume",
  303. @"Cancel"))
  304. {
  305. isRunning = true;
  306. routingChanged (nullptr);
  307. }
  308. }*/
  309. if (interruptionType == kAudioSessionEndInterruption)
  310. {
  311. isRunning = true;
  312. AudioSessionSetActive (true);
  313. AudioOutputUnitStart (audioUnit);
  314. }
  315. }
  316. //==================================================================================================
  317. static OSStatus processStatic (void* client, AudioUnitRenderActionFlags* flags, const AudioTimeStamp* time,
  318. UInt32 /*busNumber*/, UInt32 numFrames, AudioBufferList* data)
  319. {
  320. return static_cast <IPhoneAudioIODevice*> (client)->process (flags, time, numFrames, data);
  321. }
  322. static void routingChangedStatic (void* client, AudioSessionPropertyID, UInt32 /*inDataSize*/, const void* propertyValue)
  323. {
  324. static_cast <IPhoneAudioIODevice*> (client)->routingChanged (propertyValue);
  325. }
  326. //==================================================================================================
  327. void resetFormat (const int numChannels) noexcept
  328. {
  329. zerostruct (format);
  330. format.mFormatID = kAudioFormatLinearPCM;
  331. format.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked | kAudioFormatFlagsNativeEndian;
  332. format.mBitsPerChannel = 8 * sizeof (short);
  333. format.mChannelsPerFrame = numChannels;
  334. format.mFramesPerPacket = 1;
  335. format.mBytesPerFrame = format.mBytesPerPacket = numChannels * sizeof (short);
  336. }
  337. bool createAudioUnit()
  338. {
  339. if (audioUnit != 0)
  340. {
  341. AudioComponentInstanceDispose (audioUnit);
  342. audioUnit = 0;
  343. }
  344. resetFormat (2);
  345. AudioComponentDescription desc;
  346. desc.componentType = kAudioUnitType_Output;
  347. desc.componentSubType = kAudioUnitSubType_RemoteIO;
  348. desc.componentManufacturer = kAudioUnitManufacturer_Apple;
  349. desc.componentFlags = 0;
  350. desc.componentFlagsMask = 0;
  351. AudioComponent comp = AudioComponentFindNext (0, &desc);
  352. AudioComponentInstanceNew (comp, &audioUnit);
  353. if (audioUnit == 0)
  354. return false;
  355. if (numInputChannels > 0)
  356. {
  357. const UInt32 one = 1;
  358. AudioUnitSetProperty (audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, 1, &one, sizeof (one));
  359. }
  360. {
  361. AudioChannelLayout layout;
  362. layout.mChannelBitmap = 0;
  363. layout.mNumberChannelDescriptions = 0;
  364. layout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
  365. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_AudioChannelLayout, kAudioUnitScope_Input, 0, &layout, sizeof (layout));
  366. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_AudioChannelLayout, kAudioUnitScope_Output, 0, &layout, sizeof (layout));
  367. }
  368. {
  369. AURenderCallbackStruct inputProc;
  370. inputProc.inputProc = processStatic;
  371. inputProc.inputProcRefCon = this;
  372. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, 0, &inputProc, sizeof (inputProc));
  373. }
  374. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &format, sizeof (format));
  375. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &format, sizeof (format));
  376. AudioUnitInitialize (audioUnit);
  377. return true;
  378. }
  379. // If the routing is set to go through the receiver (i.e. the speaker, but quiet), this re-routes it
  380. // to make it loud. Needed because by default when using an input + output, the output is kept quiet.
  381. static void fixAudioRouteIfSetToReceiver()
  382. {
  383. CFStringRef audioRoute = 0;
  384. UInt32 propertySize = sizeof (audioRoute);
  385. if (AudioSessionGetProperty (kAudioSessionProperty_AudioRoute, &propertySize, &audioRoute) == noErr)
  386. {
  387. NSString* route = (NSString*) audioRoute;
  388. //DBG ("audio route: " + nsStringToJuce (route));
  389. if ([route hasPrefix: @"Receiver"])
  390. {
  391. UInt32 audioRouteOverride = kAudioSessionOverrideAudioRoute_Speaker;
  392. AudioSessionSetProperty (kAudioSessionProperty_OverrideAudioRoute, sizeof (audioRouteOverride), &audioRouteOverride);
  393. }
  394. CFRelease (audioRoute);
  395. }
  396. }
  397. JUCE_DECLARE_NON_COPYABLE (IPhoneAudioIODevice);
  398. };
  399. //==============================================================================
  400. class IPhoneAudioIODeviceType : public AudioIODeviceType
  401. {
  402. public:
  403. //==============================================================================
  404. IPhoneAudioIODeviceType()
  405. : AudioIODeviceType ("iPhone Audio")
  406. {
  407. }
  408. void scanForDevices() {}
  409. StringArray getDeviceNames (bool wantInputNames) const
  410. {
  411. return StringArray ("iPhone Audio");
  412. }
  413. int getDefaultDeviceIndex (bool forInput) const
  414. {
  415. return 0;
  416. }
  417. int getIndexOfDevice (AudioIODevice* device, bool asInput) const
  418. {
  419. return device != nullptr ? 0 : -1;
  420. }
  421. bool hasSeparateInputsAndOutputs() const { return false; }
  422. AudioIODevice* createDevice (const String& outputDeviceName,
  423. const String& inputDeviceName)
  424. {
  425. if (outputDeviceName.isNotEmpty() || inputDeviceName.isNotEmpty())
  426. return new IPhoneAudioIODevice (outputDeviceName.isNotEmpty() ? outputDeviceName
  427. : inputDeviceName);
  428. return nullptr;
  429. }
  430. private:
  431. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (IPhoneAudioIODeviceType);
  432. };
  433. //==============================================================================
  434. AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_iOSAudio()
  435. {
  436. return new IPhoneAudioIODeviceType();
  437. }