The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

519 lines
19KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library - "Jules' Utility Class Extensions"
  4. Copyright 2004-11 by Raw Material Software Ltd.
  5. ------------------------------------------------------------------------------
  6. JUCE can be redistributed and/or modified under the terms of the GNU General
  7. Public License (Version 2), as published by the Free Software Foundation.
  8. A copy of the license is included in the JUCE distribution, or can be found
  9. online at www.gnu.org/licenses.
  10. JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
  11. WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
  12. A PARTICULAR PURPOSE. See the GNU General Public License for more details.
  13. ------------------------------------------------------------------------------
  14. To release a closed-source product which uses JUCE, commercial licenses are
  15. available: visit www.rawmaterialsoftware.com/juce for more information.
  16. ==============================================================================
  17. */
  18. class IPhoneAudioIODevice : public AudioIODevice
  19. {
  20. public:
  21. IPhoneAudioIODevice (const String& deviceName)
  22. : AudioIODevice (deviceName, "Audio"),
  23. actualBufferSize (0),
  24. isRunning (false),
  25. audioUnit (0),
  26. callback (nullptr),
  27. floatData (1, 2)
  28. {
  29. numInputChannels = 2;
  30. numOutputChannels = 2;
  31. preferredBufferSize = 0;
  32. AudioSessionInitialize (0, 0, interruptionListenerStatic, this);
  33. updateDeviceInfo();
  34. }
  35. ~IPhoneAudioIODevice()
  36. {
  37. close();
  38. }
  39. StringArray getOutputChannelNames()
  40. {
  41. StringArray s;
  42. s.add ("Left");
  43. s.add ("Right");
  44. return s;
  45. }
  46. StringArray getInputChannelNames()
  47. {
  48. StringArray s;
  49. if (audioInputIsAvailable)
  50. {
  51. s.add ("Left");
  52. s.add ("Right");
  53. }
  54. return s;
  55. }
  56. int getNumSampleRates() { return 1; }
  57. double getSampleRate (int index) { return sampleRate; }
  58. int getNumBufferSizesAvailable() { return 6; }
  59. int getBufferSizeSamples (int index) { return 1 << (jlimit (0, 5, index) + 6); }
  60. int getDefaultBufferSize() { return 1024; }
  61. String open (const BigInteger& inputChannels,
  62. const BigInteger& outputChannels,
  63. double sampleRate,
  64. int bufferSize)
  65. {
  66. close();
  67. lastError = String::empty;
  68. preferredBufferSize = (bufferSize <= 0) ? getDefaultBufferSize() : bufferSize;
  69. // xxx set up channel mapping
  70. activeOutputChans = outputChannels;
  71. activeOutputChans.setRange (2, activeOutputChans.getHighestBit(), false);
  72. numOutputChannels = activeOutputChans.countNumberOfSetBits();
  73. monoOutputChannelNumber = activeOutputChans.findNextSetBit (0);
  74. activeInputChans = inputChannels;
  75. activeInputChans.setRange (2, activeInputChans.getHighestBit(), false);
  76. numInputChannels = activeInputChans.countNumberOfSetBits();
  77. monoInputChannelNumber = activeInputChans.findNextSetBit (0);
  78. AudioSessionSetActive (true);
  79. UInt32 audioCategory = audioInputIsAvailable ? kAudioSessionCategory_PlayAndRecord
  80. : kAudioSessionCategory_MediaPlayback;
  81. AudioSessionSetProperty (kAudioSessionProperty_AudioCategory, sizeof (audioCategory), &audioCategory);
  82. AudioSessionAddPropertyListener (kAudioSessionProperty_AudioRouteChange, routingChangedStatic, this);
  83. fixAudioRouteIfSetToReceiver();
  84. updateDeviceInfo();
  85. Float32 bufferDuration = preferredBufferSize / sampleRate;
  86. AudioSessionSetProperty (kAudioSessionProperty_PreferredHardwareIOBufferDuration, sizeof (bufferDuration), &bufferDuration);
  87. actualBufferSize = preferredBufferSize;
  88. prepareFloatBuffers();
  89. isRunning = true;
  90. routingChanged (nullptr); // creates and starts the AU
  91. lastError = audioUnit != 0 ? "" : "Couldn't open the device";
  92. return lastError;
  93. }
  94. void close()
  95. {
  96. if (isRunning)
  97. {
  98. isRunning = false;
  99. AudioSessionSetActive (false);
  100. if (audioUnit != 0)
  101. {
  102. AudioComponentInstanceDispose (audioUnit);
  103. audioUnit = 0;
  104. }
  105. }
  106. }
  107. bool isOpen() { return isRunning; }
  108. int getCurrentBufferSizeSamples() { return actualBufferSize; }
  109. double getCurrentSampleRate() { return sampleRate; }
  110. int getCurrentBitDepth() { return 16; }
  111. BigInteger getActiveOutputChannels() const { return activeOutputChans; }
  112. BigInteger getActiveInputChannels() const { return activeInputChans; }
  113. int getOutputLatencyInSamples() { return 0; } //xxx
  114. int getInputLatencyInSamples() { return 0; } //xxx
  115. void start (AudioIODeviceCallback* callback_)
  116. {
  117. if (isRunning && callback != callback_)
  118. {
  119. if (callback_ != nullptr)
  120. callback_->audioDeviceAboutToStart (this);
  121. const ScopedLock sl (callbackLock);
  122. callback = callback_;
  123. }
  124. }
  125. void stop()
  126. {
  127. if (isRunning)
  128. {
  129. AudioIODeviceCallback* lastCallback;
  130. {
  131. const ScopedLock sl (callbackLock);
  132. lastCallback = callback;
  133. callback = nullptr;
  134. }
  135. if (lastCallback != nullptr)
  136. lastCallback->audioDeviceStopped();
  137. }
  138. }
  139. bool isPlaying() { return isRunning && callback != nullptr; }
  140. String getLastError() { return lastError; }
  141. private:
  142. //==================================================================================================
  143. CriticalSection callbackLock;
  144. Float64 sampleRate;
  145. int numInputChannels, numOutputChannels;
  146. int preferredBufferSize, actualBufferSize;
  147. bool isRunning;
  148. String lastError;
  149. AudioStreamBasicDescription format;
  150. AudioUnit audioUnit;
  151. UInt32 audioInputIsAvailable;
  152. AudioIODeviceCallback* callback;
  153. BigInteger activeOutputChans, activeInputChans;
  154. AudioSampleBuffer floatData;
  155. float* inputChannels[3];
  156. float* outputChannels[3];
  157. bool monoInputChannelNumber, monoOutputChannelNumber;
  158. void prepareFloatBuffers()
  159. {
  160. floatData.setSize (numInputChannels + numOutputChannels, actualBufferSize);
  161. zeromem (inputChannels, sizeof (inputChannels));
  162. zeromem (outputChannels, sizeof (outputChannels));
  163. for (int i = 0; i < numInputChannels; ++i)
  164. inputChannels[i] = floatData.getSampleData (i);
  165. for (int i = 0; i < numOutputChannels; ++i)
  166. outputChannels[i] = floatData.getSampleData (i + numInputChannels);
  167. }
  168. //==================================================================================================
  169. OSStatus process (AudioUnitRenderActionFlags* flags, const AudioTimeStamp* time,
  170. const UInt32 numFrames, AudioBufferList* data)
  171. {
  172. OSStatus err = noErr;
  173. if (audioInputIsAvailable && numInputChannels > 0)
  174. err = AudioUnitRender (audioUnit, flags, time, 1, numFrames, data);
  175. const ScopedLock sl (callbackLock);
  176. if (callback != nullptr)
  177. {
  178. if (audioInputIsAvailable && numInputChannels > 0)
  179. {
  180. short* shortData = (short*) data->mBuffers[0].mData;
  181. if (numInputChannels >= 2)
  182. {
  183. for (UInt32 i = 0; i < numFrames; ++i)
  184. {
  185. inputChannels[0][i] = *shortData++ * (1.0f / 32768.0f);
  186. inputChannels[1][i] = *shortData++ * (1.0f / 32768.0f);
  187. }
  188. }
  189. else
  190. {
  191. if (monoInputChannelNumber > 0)
  192. ++shortData;
  193. for (UInt32 i = 0; i < numFrames; ++i)
  194. {
  195. inputChannels[0][i] = *shortData++ * (1.0f / 32768.0f);
  196. ++shortData;
  197. }
  198. }
  199. }
  200. else
  201. {
  202. for (int i = numInputChannels; --i >= 0;)
  203. zeromem (inputChannels[i], sizeof (float) * numFrames);
  204. }
  205. callback->audioDeviceIOCallback ((const float**) inputChannels, numInputChannels,
  206. outputChannels, numOutputChannels, (int) numFrames);
  207. short* shortData = (short*) data->mBuffers[0].mData;
  208. int n = 0;
  209. if (numOutputChannels >= 2)
  210. {
  211. for (UInt32 i = 0; i < numFrames; ++i)
  212. {
  213. shortData [n++] = (short) (outputChannels[0][i] * 32767.0f);
  214. shortData [n++] = (short) (outputChannels[1][i] * 32767.0f);
  215. }
  216. }
  217. else if (numOutputChannels == 1)
  218. {
  219. for (UInt32 i = 0; i < numFrames; ++i)
  220. {
  221. const short s = (short) (outputChannels[monoOutputChannelNumber][i] * 32767.0f);
  222. shortData [n++] = s;
  223. shortData [n++] = s;
  224. }
  225. }
  226. else
  227. {
  228. zeromem (data->mBuffers[0].mData, 2 * sizeof (short) * numFrames);
  229. }
  230. }
  231. else
  232. {
  233. zeromem (data->mBuffers[0].mData, 2 * sizeof (short) * numFrames);
  234. }
  235. return err;
  236. }
  237. void updateDeviceInfo()
  238. {
  239. UInt32 size = sizeof (sampleRate);
  240. AudioSessionGetProperty (kAudioSessionProperty_CurrentHardwareSampleRate, &size, &sampleRate);
  241. size = sizeof (audioInputIsAvailable);
  242. AudioSessionGetProperty (kAudioSessionProperty_AudioInputAvailable, &size, &audioInputIsAvailable);
  243. }
  244. void routingChanged (const void* propertyValue)
  245. {
  246. if (! isRunning)
  247. return;
  248. if (propertyValue != nullptr)
  249. {
  250. CFDictionaryRef routeChangeDictionary = (CFDictionaryRef) propertyValue;
  251. CFNumberRef routeChangeReasonRef = (CFNumberRef) CFDictionaryGetValue (routeChangeDictionary,
  252. CFSTR (kAudioSession_AudioRouteChangeKey_Reason));
  253. SInt32 routeChangeReason;
  254. CFNumberGetValue (routeChangeReasonRef, kCFNumberSInt32Type, &routeChangeReason);
  255. if (routeChangeReason == kAudioSessionRouteChangeReason_OldDeviceUnavailable)
  256. fixAudioRouteIfSetToReceiver();
  257. }
  258. updateDeviceInfo();
  259. createAudioUnit();
  260. AudioSessionSetActive (true);
  261. if (audioUnit != 0)
  262. {
  263. UInt32 formatSize = sizeof (format);
  264. AudioUnitGetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &format, &formatSize);
  265. Float32 bufferDuration = preferredBufferSize / sampleRate;
  266. UInt32 bufferDurationSize = sizeof (bufferDuration);
  267. AudioSessionGetProperty (kAudioSessionProperty_CurrentHardwareIOBufferDuration, &bufferDurationSize, &bufferDurationSize);
  268. actualBufferSize = (int) (sampleRate * bufferDuration + 0.5);
  269. AudioOutputUnitStart (audioUnit);
  270. }
  271. }
  272. void interruptionListener (const UInt32 interruptionType)
  273. {
  274. /*if (interruptionType == kAudioSessionBeginInterruption)
  275. {
  276. isRunning = false;
  277. AudioOutputUnitStop (audioUnit);
  278. if (juce_iPhoneShowModalAlert ("Audio Interrupted",
  279. "This could have been interrupted by another application or by unplugging a headset",
  280. @"Resume",
  281. @"Cancel"))
  282. {
  283. isRunning = true;
  284. routingChanged (nullptr);
  285. }
  286. }*/
  287. if (interruptionType == kAudioSessionEndInterruption)
  288. {
  289. isRunning = true;
  290. AudioSessionSetActive (true);
  291. AudioOutputUnitStart (audioUnit);
  292. }
  293. }
  294. //==================================================================================================
  295. static OSStatus processStatic (void* client, AudioUnitRenderActionFlags* flags, const AudioTimeStamp* time,
  296. UInt32 /*busNumber*/, UInt32 numFrames, AudioBufferList* data)
  297. {
  298. return static_cast <IPhoneAudioIODevice*> (client)->process (flags, time, numFrames, data);
  299. }
  300. static void routingChangedStatic (void* client, AudioSessionPropertyID, UInt32 /*inDataSize*/, const void* propertyValue)
  301. {
  302. static_cast <IPhoneAudioIODevice*> (client)->routingChanged (propertyValue);
  303. }
  304. static void interruptionListenerStatic (void* client, UInt32 interruptionType)
  305. {
  306. static_cast <IPhoneAudioIODevice*> (client)->interruptionListener (interruptionType);
  307. }
  308. //==================================================================================================
  309. void resetFormat (const int numChannels) noexcept
  310. {
  311. zerostruct (format);
  312. format.mFormatID = kAudioFormatLinearPCM;
  313. format.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked | kAudioFormatFlagsNativeEndian;
  314. format.mBitsPerChannel = 8 * sizeof (short);
  315. format.mChannelsPerFrame = numChannels;
  316. format.mFramesPerPacket = 1;
  317. format.mBytesPerFrame = format.mBytesPerPacket = numChannels * sizeof (short);
  318. }
  319. bool createAudioUnit()
  320. {
  321. if (audioUnit != 0)
  322. {
  323. AudioComponentInstanceDispose (audioUnit);
  324. audioUnit = 0;
  325. }
  326. resetFormat (2);
  327. AudioComponentDescription desc;
  328. desc.componentType = kAudioUnitType_Output;
  329. desc.componentSubType = kAudioUnitSubType_RemoteIO;
  330. desc.componentManufacturer = kAudioUnitManufacturer_Apple;
  331. desc.componentFlags = 0;
  332. desc.componentFlagsMask = 0;
  333. AudioComponent comp = AudioComponentFindNext (0, &desc);
  334. AudioComponentInstanceNew (comp, &audioUnit);
  335. if (audioUnit == 0)
  336. return false;
  337. if (numInputChannels > 0)
  338. {
  339. const UInt32 one = 1;
  340. AudioUnitSetProperty (audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, 1, &one, sizeof (one));
  341. }
  342. {
  343. AudioChannelLayout layout;
  344. layout.mChannelBitmap = 0;
  345. layout.mNumberChannelDescriptions = 0;
  346. layout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
  347. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_AudioChannelLayout, kAudioUnitScope_Input, 0, &layout, sizeof (layout));
  348. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_AudioChannelLayout, kAudioUnitScope_Output, 0, &layout, sizeof (layout));
  349. }
  350. {
  351. AURenderCallbackStruct inputProc;
  352. inputProc.inputProc = processStatic;
  353. inputProc.inputProcRefCon = this;
  354. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, 0, &inputProc, sizeof (inputProc));
  355. }
  356. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &format, sizeof (format));
  357. AudioUnitSetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &format, sizeof (format));
  358. AudioUnitInitialize (audioUnit);
  359. return true;
  360. }
  361. // If the routing is set to go through the receiver (i.e. the speaker, but quiet), this re-routes it
  362. // to make it loud. Needed because by default when using an input + output, the output is kept quiet.
  363. static void fixAudioRouteIfSetToReceiver()
  364. {
  365. CFStringRef audioRoute = 0;
  366. UInt32 propertySize = sizeof (audioRoute);
  367. if (AudioSessionGetProperty (kAudioSessionProperty_AudioRoute, &propertySize, &audioRoute) == noErr)
  368. {
  369. NSString* route = (NSString*) audioRoute;
  370. //DBG ("audio route: " + nsStringToJuce (route));
  371. if ([route hasPrefix: @"Receiver"])
  372. {
  373. UInt32 audioRouteOverride = kAudioSessionOverrideAudioRoute_Speaker;
  374. AudioSessionSetProperty (kAudioSessionProperty_OverrideAudioRoute, sizeof (audioRouteOverride), &audioRouteOverride);
  375. }
  376. CFRelease (audioRoute);
  377. }
  378. }
  379. JUCE_DECLARE_NON_COPYABLE (IPhoneAudioIODevice);
  380. };
  381. //==============================================================================
  382. class IPhoneAudioIODeviceType : public AudioIODeviceType
  383. {
  384. public:
  385. //==============================================================================
  386. IPhoneAudioIODeviceType()
  387. : AudioIODeviceType ("iPhone Audio")
  388. {
  389. }
  390. void scanForDevices() {}
  391. StringArray getDeviceNames (bool wantInputNames) const
  392. {
  393. return StringArray ("iPhone Audio");
  394. }
  395. int getDefaultDeviceIndex (bool forInput) const
  396. {
  397. return 0;
  398. }
  399. int getIndexOfDevice (AudioIODevice* device, bool asInput) const
  400. {
  401. return device != nullptr ? 0 : -1;
  402. }
  403. bool hasSeparateInputsAndOutputs() const { return false; }
  404. AudioIODevice* createDevice (const String& outputDeviceName,
  405. const String& inputDeviceName)
  406. {
  407. if (outputDeviceName.isNotEmpty() || inputDeviceName.isNotEmpty())
  408. return new IPhoneAudioIODevice (outputDeviceName.isNotEmpty() ? outputDeviceName
  409. : inputDeviceName);
  410. return nullptr;
  411. }
  412. private:
  413. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (IPhoneAudioIODeviceType);
  414. };
  415. //==============================================================================
  416. AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_iOSAudio()
  417. {
  418. return new IPhoneAudioIODeviceType();
  419. }