The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

541 lines
19KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library - "Jules' Utility Class Extensions"
  4. Copyright 2004-11 by Raw Material Software Ltd.
  5. ------------------------------------------------------------------------------
  6. JUCE can be redistributed and/or modified under the terms of the GNU General
  7. Public License (Version 2), as published by the Free Software Foundation.
  8. A copy of the license is included in the JUCE distribution, or can be found
  9. online at www.gnu.org/licenses.
  10. JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
  11. WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
  12. A PARTICULAR PURPOSE. See the GNU General Public License for more details.
  13. ------------------------------------------------------------------------------
  14. To release a closed-source product which uses JUCE, commercial licenses are
  15. available: visit www.rawmaterialsoftware.com/juce for more information.
  16. ==============================================================================
  17. */
  18. class IPhoneAudioIODevice : public AudioIODevice
  19. {
  20. public:
  21. IPhoneAudioIODevice (const String& deviceName)
  22. : AudioIODevice (deviceName, "Audio"),
  23. actualBufferSize (0),
  24. isRunning (false),
  25. audioUnit (0),
  26. callback (nullptr),
  27. floatData (1, 2)
  28. {
  29. getSessionHolder().activeDevices.add (this);
  30. numInputChannels = 2;
  31. numOutputChannels = 2;
  32. preferredBufferSize = 0;
  33. updateDeviceInfo();
  34. }
  35. ~IPhoneAudioIODevice()
  36. {
  37. getSessionHolder().activeDevices.removeValue (this);
  38. close();
  39. }
  40. StringArray getOutputChannelNames()
  41. {
  42. StringArray s;
  43. s.add ("Left");
  44. s.add ("Right");
  45. return s;
  46. }
  47. StringArray getInputChannelNames()
  48. {
  49. StringArray s;
  50. if (audioInputIsAvailable)
  51. {
  52. s.add ("Left");
  53. s.add ("Right");
  54. }
  55. return s;
  56. }
  57. int getNumSampleRates() { return 1; }
  58. double getSampleRate (int index) { return sampleRate; }
  59. int getNumBufferSizesAvailable() { return 6; }
  60. int getBufferSizeSamples (int index) { return 1 << (jlimit (0, 5, index) + 6); }
  61. int getDefaultBufferSize() { return 1024; }
  62. String open (const BigInteger& inputChannels,
  63. const BigInteger& outputChannels,
  64. double sampleRate,
  65. int bufferSize)
  66. {
  67. close();
  68. lastError = String::empty;
  69. preferredBufferSize = (bufferSize <= 0) ? getDefaultBufferSize() : bufferSize;
  70. // xxx set up channel mapping
  71. activeOutputChans = outputChannels;
  72. activeOutputChans.setRange (2, activeOutputChans.getHighestBit(), false);
  73. numOutputChannels = activeOutputChans.countNumberOfSetBits();
  74. monoOutputChannelNumber = activeOutputChans.findNextSetBit (0);
  75. activeInputChans = inputChannels;
  76. activeInputChans.setRange (2, activeInputChans.getHighestBit(), false);
  77. numInputChannels = activeInputChans.countNumberOfSetBits();
  78. monoInputChannelNumber = activeInputChans.findNextSetBit (0);
  79. AudioSessionSetActive (true);
  80. UInt32 audioCategory = audioInputIsAvailable ? kAudioSessionCategory_PlayAndRecord
  81. : kAudioSessionCategory_MediaPlayback;
  82. AudioSessionSetProperty (kAudioSessionProperty_AudioCategory, sizeof (audioCategory), &audioCategory);
  83. AudioSessionAddPropertyListener (kAudioSessionProperty_AudioRouteChange, routingChangedStatic, this);
  84. fixAudioRouteIfSetToReceiver();
  85. updateDeviceInfo();
  86. Float32 bufferDuration = preferredBufferSize / sampleRate;
  87. AudioSessionSetProperty (kAudioSessionProperty_PreferredHardwareIOBufferDuration, sizeof (bufferDuration), &bufferDuration);
  88. actualBufferSize = preferredBufferSize;
  89. prepareFloatBuffers();
  90. isRunning = true;
  91. routingChanged (nullptr); // creates and starts the AU
  92. lastError = audioUnit != 0 ? "" : "Couldn't open the device";
  93. return lastError;
  94. }
  95. void close()
  96. {
  97. if (isRunning)
  98. {
  99. isRunning = false;
  100. AudioSessionSetActive (false);
  101. if (audioUnit != 0)
  102. {
  103. AudioComponentInstanceDispose (audioUnit);
  104. audioUnit = 0;
  105. }
  106. }
  107. }
  108. bool isOpen() { return isRunning; }
  109. int getCurrentBufferSizeSamples() { return actualBufferSize; }
  110. double getCurrentSampleRate() { return sampleRate; }
  111. int getCurrentBitDepth() { return 16; }
  112. BigInteger getActiveOutputChannels() const { return activeOutputChans; }
  113. BigInteger getActiveInputChannels() const { return activeInputChans; }
  114. int getOutputLatencyInSamples() { return 0; } //xxx
  115. int getInputLatencyInSamples() { return 0; } //xxx
  116. void start (AudioIODeviceCallback* callback_)
  117. {
  118. if (isRunning && callback != callback_)
  119. {
  120. if (callback_ != nullptr)
  121. callback_->audioDeviceAboutToStart (this);
  122. const ScopedLock sl (callbackLock);
  123. callback = callback_;
  124. }
  125. }
  126. void stop()
  127. {
  128. if (isRunning)
  129. {
  130. AudioIODeviceCallback* lastCallback;
  131. {
  132. const ScopedLock sl (callbackLock);
  133. lastCallback = callback;
  134. callback = nullptr;
  135. }
  136. if (lastCallback != nullptr)
  137. lastCallback->audioDeviceStopped();
  138. }
  139. }
  140. bool isPlaying() { return isRunning && callback != nullptr; }
  141. String getLastError() { return lastError; }
  142. private:
  143. //==================================================================================================
  144. CriticalSection callbackLock;
  145. Float64 sampleRate;
  146. int numInputChannels, numOutputChannels;
  147. int preferredBufferSize, actualBufferSize;
  148. bool isRunning;
  149. String lastError;
  150. AudioStreamBasicDescription format;
  151. AudioUnit audioUnit;
  152. UInt32 audioInputIsAvailable;
  153. AudioIODeviceCallback* callback;
  154. BigInteger activeOutputChans, activeInputChans;
  155. AudioSampleBuffer floatData;
  156. float* inputChannels[3];
  157. float* outputChannels[3];
  158. bool monoInputChannelNumber, monoOutputChannelNumber;
  159. void prepareFloatBuffers()
  160. {
  161. floatData.setSize (numInputChannels + numOutputChannels, actualBufferSize);
  162. zeromem (inputChannels, sizeof (inputChannels));
  163. zeromem (outputChannels, sizeof (outputChannels));
  164. for (int i = 0; i < numInputChannels; ++i)
  165. inputChannels[i] = floatData.getSampleData (i);
  166. for (int i = 0; i < numOutputChannels; ++i)
  167. outputChannels[i] = floatData.getSampleData (i + numInputChannels);
  168. }
  169. //==================================================================================================
  170. OSStatus process (AudioUnitRenderActionFlags* flags, const AudioTimeStamp* time,
  171. const UInt32 numFrames, AudioBufferList* data)
  172. {
  173. OSStatus err = noErr;
  174. if (audioInputIsAvailable && numInputChannels > 0)
  175. err = AudioUnitRender (audioUnit, flags, time, 1, numFrames, data);
  176. const ScopedLock sl (callbackLock);
  177. if (callback != nullptr)
  178. {
  179. if (audioInputIsAvailable && numInputChannels > 0)
  180. {
  181. short* shortData = (short*) data->mBuffers[0].mData;
  182. if (numInputChannels >= 2)
  183. {
  184. for (UInt32 i = 0; i < numFrames; ++i)
  185. {
  186. inputChannels[0][i] = *shortData++ * (1.0f / 32768.0f);
  187. inputChannels[1][i] = *shortData++ * (1.0f / 32768.0f);
  188. }
  189. }
  190. else
  191. {
  192. if (monoInputChannelNumber > 0)
  193. ++shortData;
  194. for (UInt32 i = 0; i < numFrames; ++i)
  195. {
  196. inputChannels[0][i] = *shortData++ * (1.0f / 32768.0f);
  197. ++shortData;
  198. }
  199. }
  200. }
  201. else
  202. {
  203. for (int i = numInputChannels; --i >= 0;)
  204. zeromem (inputChannels[i], sizeof (float) * numFrames);
  205. }
  206. callback->audioDeviceIOCallback ((const float**) inputChannels, numInputChannels,
  207. outputChannels, numOutputChannels, (int) numFrames);
  208. short* shortData = (short*) data->mBuffers[0].mData;
  209. int n = 0;
  210. if (numOutputChannels >= 2)
  211. {
  212. for (UInt32 i = 0; i < numFrames; ++i)
  213. {
  214. shortData [n++] = (short) (outputChannels[0][i] * 32767.0f);
  215. shortData [n++] = (short) (outputChannels[1][i] * 32767.0f);
  216. }
  217. }
  218. else if (numOutputChannels == 1)
  219. {
  220. for (UInt32 i = 0; i < numFrames; ++i)
  221. {
  222. const short s = (short) (outputChannels[monoOutputChannelNumber][i] * 32767.0f);
  223. shortData [n++] = s;
  224. shortData [n++] = s;
  225. }
  226. }
  227. else
  228. {
  229. zeromem (data->mBuffers[0].mData, 2 * sizeof (short) * numFrames);
  230. }
  231. }
  232. else
  233. {
  234. zeromem (data->mBuffers[0].mData, 2 * sizeof (short) * numFrames);
  235. }
  236. return err;
  237. }
  238. void updateDeviceInfo()
  239. {
  240. UInt32 size = sizeof (sampleRate);
  241. AudioSessionGetProperty (kAudioSessionProperty_CurrentHardwareSampleRate, &size, &sampleRate);
  242. size = sizeof (audioInputIsAvailable);
  243. AudioSessionGetProperty (kAudioSessionProperty_AudioInputAvailable, &size, &audioInputIsAvailable);
  244. }
  245. void routingChanged (const void* propertyValue)
  246. {
  247. if (! isRunning)
  248. return;
  249. if (propertyValue != nullptr)
  250. {
  251. CFDictionaryRef routeChangeDictionary = (CFDictionaryRef) propertyValue;
  252. CFNumberRef routeChangeReasonRef = (CFNumberRef) CFDictionaryGetValue (routeChangeDictionary,
  253. CFSTR (kAudioSession_AudioRouteChangeKey_Reason));
  254. SInt32 routeChangeReason;
  255. CFNumberGetValue (routeChangeReasonRef, kCFNumberSInt32Type, &routeChangeReason);
  256. if (routeChangeReason == kAudioSessionRouteChangeReason_OldDeviceUnavailable)
  257. fixAudioRouteIfSetToReceiver();
  258. }
  259. updateDeviceInfo();
  260. createAudioUnit();
  261. AudioSessionSetActive (true);
  262. if (audioUnit != 0)
  263. {
  264. UInt32 formatSize = sizeof (format);
  265. AudioUnitGetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &format, &formatSize);
  266. Float32 bufferDuration = preferredBufferSize / sampleRate;
  267. UInt32 bufferDurationSize = sizeof (bufferDuration);
  268. AudioSessionGetProperty (kAudioSessionProperty_CurrentHardwareIOBufferDuration, &bufferDurationSize, &bufferDurationSize);
  269. actualBufferSize = (int) (sampleRate * bufferDuration + 0.5);
  270. AudioOutputUnitStart (audioUnit);
  271. }
  272. }
  273. //==================================================================================================
  274. struct AudioSessionHolder
  275. {
  276. AudioSessionHolder()
  277. {
  278. AudioSessionInitialize (0, 0, interruptionListenerCallback, this);
  279. }
  280. static void interruptionListenerCallback (void* client, UInt32 interruptionType)
  281. {
  282. const Array <IPhoneAudioIODevice*>& activeDevices = static_cast <AudioSessionHolder*> (client)->activeDevices;
  283. for (int i = activeDevices.size(); --i >= 0;)
  284. activeDevices.getUnchecked(i)->interruptionListener (interruptionType);
  285. }
  286. Array <IPhoneAudioIODevice*> activeDevices;
  287. };
  288. static AudioSessionHolder& getSessionHolder()
  289. {
  290. static AudioSessionHolder audioSessionHolder;
  291. return audioSessionHolder;
  292. }
  293. void interruptionListener (const UInt32 interruptionType)
  294. {
  295. /*if (interruptionType == kAudioSessionBeginInterruption)
  296. {
  297. isRunning = false;
  298. AudioOutputUnitStop (audioUnit);
  299. if (juce_iPhoneShowModalAlert ("Audio Interrupted",
  300. "This could have been interrupted by another application or by unplugging a headset",
  301. @"Resume",
  302. @"Cancel"))
  303. {
  304. isRunning = true;
  305. routingChanged (nullptr);
  306. }
  307. }*/
  308. if (interruptionType == kAudioSessionEndInterruption)
  309. {
  310. isRunning = true;
  311. AudioSessionSetActive (true);
  312. AudioOutputUnitStart (audioUnit);
  313. }
  314. }
  315. //==================================================================================================
  316. static OSStatus processStatic (void* client, AudioUnitRenderActionFlags* flags, const AudioTimeStamp* time,
  317. UInt32 /*busNumber*/, UInt32 numFrames, AudioBufferList* data)
  318. {
  319. return static_cast <IPhoneAudioIODevice*> (client)->process (flags, time, numFrames, data);
  320. }
  321. static void routingChangedStatic (void* client, AudioSessionPropertyID, UInt32 /*inDataSize*/, const void* propertyValue)
  322. {
  323. static_cast <IPhoneAudioIODevice*> (client)->routingChanged (propertyValue);
  324. }
  325. //==================================================================================================
  326. void resetFormat (const int numChannels) noexcept
  327. {
  328. zerostruct (format);
  329. format.mFormatID = kAudioFormatLinearPCM;
  330. format.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked | kAudioFormatFlagsNativeEndian;
  331. format.mBitsPerChannel = 8 * sizeof (short);
  332. format.mChannelsPerFrame = numChannels;
  333. format.mFramesPerPacket = 1;
  334. format.mBytesPerFrame = format.mBytesPerPacket = numChannels * sizeof (short);
  335. }
  336. bool createAudioUnit()
  337. {
  338. if (audioUnit != 0)
  339. {
  340. AudioComponentInstanceDispose (audioUnit);
  341. audioUnit = 0;
  342. }
  343. resetFormat (2);
  344. AudioComponentDescription desc;
  345. desc.componentType = kAudioUnitType_Output;
  346. desc.componentSubType = kAudioUnitSubType_RemoteIO;
  347. desc.componentManufacturer = kAudioUnitManufacturer_Apple;
  348. desc.componentFlags = 0;
  349. desc.componentFlagsMask = 0;
  350. AudioComponent comp = AudioComponentFindNext (0, &desc);
  351. AudioComponentInstanceNew (comp, &audioUnit);
  352. if (audioUnit == 0)
  353. return false;
  354. if (numInputChannels > 0)
  355. {
  356. const UInt32 one = 1;
  357. AudioUnitSetProperty (audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, 1, &one, sizeof (one));
  358. }
  359. {
  360. AudioChannelLayout layout;
  361. layout.mChannelBitmap = 0;
  362. layout.mNumberChannelDescriptions = 0;
  363. layout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
  364. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_AudioChannelLayout, kAudioUnitScope_Input, 0, &layout, sizeof (layout));
  365. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_AudioChannelLayout, kAudioUnitScope_Output, 0, &layout, sizeof (layout));
  366. }
  367. {
  368. AURenderCallbackStruct inputProc;
  369. inputProc.inputProc = processStatic;
  370. inputProc.inputProcRefCon = this;
  371. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, 0, &inputProc, sizeof (inputProc));
  372. }
  373. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &format, sizeof (format));
  374. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &format, sizeof (format));
  375. AudioUnitInitialize (audioUnit);
  376. return true;
  377. }
  378. // If the routing is set to go through the receiver (i.e. the speaker, but quiet), this re-routes it
  379. // to make it loud. Needed because by default when using an input + output, the output is kept quiet.
  380. static void fixAudioRouteIfSetToReceiver()
  381. {
  382. CFStringRef audioRoute = 0;
  383. UInt32 propertySize = sizeof (audioRoute);
  384. if (AudioSessionGetProperty (kAudioSessionProperty_AudioRoute, &propertySize, &audioRoute) == noErr)
  385. {
  386. NSString* route = (NSString*) audioRoute;
  387. //DBG ("audio route: " + nsStringToJuce (route));
  388. if ([route hasPrefix: @"Receiver"])
  389. {
  390. UInt32 audioRouteOverride = kAudioSessionOverrideAudioRoute_Speaker;
  391. AudioSessionSetProperty (kAudioSessionProperty_OverrideAudioRoute, sizeof (audioRouteOverride), &audioRouteOverride);
  392. }
  393. CFRelease (audioRoute);
  394. }
  395. }
  396. JUCE_DECLARE_NON_COPYABLE (IPhoneAudioIODevice);
  397. };
  398. //==============================================================================
  399. class IPhoneAudioIODeviceType : public AudioIODeviceType
  400. {
  401. public:
  402. //==============================================================================
  403. IPhoneAudioIODeviceType()
  404. : AudioIODeviceType ("iPhone Audio")
  405. {
  406. }
  407. void scanForDevices() {}
  408. StringArray getDeviceNames (bool wantInputNames) const
  409. {
  410. return StringArray ("iPhone Audio");
  411. }
  412. int getDefaultDeviceIndex (bool forInput) const
  413. {
  414. return 0;
  415. }
  416. int getIndexOfDevice (AudioIODevice* device, bool asInput) const
  417. {
  418. return device != nullptr ? 0 : -1;
  419. }
  420. bool hasSeparateInputsAndOutputs() const { return false; }
  421. AudioIODevice* createDevice (const String& outputDeviceName,
  422. const String& inputDeviceName)
  423. {
  424. if (outputDeviceName.isNotEmpty() || inputDeviceName.isNotEmpty())
  425. return new IPhoneAudioIODevice (outputDeviceName.isNotEmpty() ? outputDeviceName
  426. : inputDeviceName);
  427. return nullptr;
  428. }
  429. private:
  430. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (IPhoneAudioIODeviceType);
  431. };
  432. //==============================================================================
  433. AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_iOSAudio()
  434. {
  435. return new IPhoneAudioIODeviceType();
  436. }