The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1173 lines
44KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library - "Jules' Utility Class Extensions"
  4. Copyright 2004-11 by Raw Material Software Ltd.
  5. ------------------------------------------------------------------------------
  6. JUCE can be redistributed and/or modified under the terms of the GNU General
  7. Public License (Version 2), as published by the Free Software Foundation.
  8. A copy of the license is included in the JUCE distribution, or can be found
  9. online at www.gnu.org/licenses.
  10. JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
  11. WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
  12. A PARTICULAR PURPOSE. See the GNU General Public License for more details.
  13. ------------------------------------------------------------------------------
  14. To release a closed-source product which uses JUCE, commercial licenses are
  15. available: visit www.rawmaterialsoftware.com/juce for more information.
  16. ==============================================================================
  17. */
  18. #ifndef WASAPI_ENABLE_LOGGING
  19. #define WASAPI_ENABLE_LOGGING 0
  20. #endif
  21. //==============================================================================
  22. namespace WasapiClasses
  23. {
  24. void logFailure (HRESULT hr)
  25. {
  26. (void) hr;
  27. jassert (hr != 0x800401f0); // If you hit this, it means you're trying to call from
  28. // a thread which hasn't been initialised with CoInitialize().
  29. #if WASAPI_ENABLE_LOGGING
  30. if (FAILED (hr))
  31. {
  32. String e;
  33. e << Time::getCurrentTime().toString (true, true, true, true)
  34. << " -- WASAPI error: ";
  35. switch (hr)
  36. {
  37. case E_POINTER: e << "E_POINTER"; break;
  38. case E_INVALIDARG: e << "E_INVALIDARG"; break;
  39. case AUDCLNT_E_NOT_INITIALIZED: e << "AUDCLNT_E_NOT_INITIALIZED"; break;
  40. case AUDCLNT_E_ALREADY_INITIALIZED: e << "AUDCLNT_E_ALREADY_INITIALIZED"; break;
  41. case AUDCLNT_E_WRONG_ENDPOINT_TYPE: e << "AUDCLNT_E_WRONG_ENDPOINT_TYPE"; break;
  42. case AUDCLNT_E_DEVICE_INVALIDATED: e << "AUDCLNT_E_DEVICE_INVALIDATED"; break;
  43. case AUDCLNT_E_NOT_STOPPED: e << "AUDCLNT_E_NOT_STOPPED"; break;
  44. case AUDCLNT_E_BUFFER_TOO_LARGE: e << "AUDCLNT_E_BUFFER_TOO_LARGE"; break;
  45. case AUDCLNT_E_OUT_OF_ORDER: e << "AUDCLNT_E_OUT_OF_ORDER"; break;
  46. case AUDCLNT_E_UNSUPPORTED_FORMAT: e << "AUDCLNT_E_UNSUPPORTED_FORMAT"; break;
  47. case AUDCLNT_E_INVALID_SIZE: e << "AUDCLNT_E_INVALID_SIZE"; break;
  48. case AUDCLNT_E_DEVICE_IN_USE: e << "AUDCLNT_E_DEVICE_IN_USE"; break;
  49. case AUDCLNT_E_BUFFER_OPERATION_PENDING: e << "AUDCLNT_E_BUFFER_OPERATION_PENDING"; break;
  50. case AUDCLNT_E_THREAD_NOT_REGISTERED: e << "AUDCLNT_E_THREAD_NOT_REGISTERED"; break;
  51. case AUDCLNT_E_EXCLUSIVE_MODE_NOT_ALLOWED: e << "AUDCLNT_E_EXCLUSIVE_MODE_NOT_ALLOWED"; break;
  52. case AUDCLNT_E_ENDPOINT_CREATE_FAILED: e << "AUDCLNT_E_ENDPOINT_CREATE_FAILED"; break;
  53. case AUDCLNT_E_SERVICE_NOT_RUNNING: e << "AUDCLNT_E_SERVICE_NOT_RUNNING"; break;
  54. case AUDCLNT_E_EVENTHANDLE_NOT_EXPECTED: e << "AUDCLNT_E_EVENTHANDLE_NOT_EXPECTED"; break;
  55. case AUDCLNT_E_EXCLUSIVE_MODE_ONLY: e << "AUDCLNT_E_EXCLUSIVE_MODE_ONLY"; break;
  56. case AUDCLNT_E_BUFDURATION_PERIOD_NOT_EQUAL: e << "AUDCLNT_E_BUFDURATION_PERIOD_NOT_EQUAL"; break;
  57. case AUDCLNT_E_EVENTHANDLE_NOT_SET: e << "AUDCLNT_E_EVENTHANDLE_NOT_SET"; break;
  58. case AUDCLNT_E_INCORRECT_BUFFER_SIZE: e << "AUDCLNT_E_INCORRECT_BUFFER_SIZE"; break;
  59. case AUDCLNT_E_BUFFER_SIZE_ERROR: e << "AUDCLNT_E_BUFFER_SIZE_ERROR"; break;
  60. case AUDCLNT_S_BUFFER_EMPTY: e << "AUDCLNT_S_BUFFER_EMPTY"; break;
  61. case AUDCLNT_S_THREAD_ALREADY_REGISTERED: e << "AUDCLNT_S_THREAD_ALREADY_REGISTERED"; break;
  62. default: e << String::toHexString ((int) hr); break;
  63. }
  64. DBG (e);
  65. jassertfalse;
  66. }
  67. #endif
  68. }
  69. #undef check
  70. bool check (HRESULT hr)
  71. {
  72. logFailure (hr);
  73. return SUCCEEDED (hr);
  74. }
  75. //==============================================================================
  76. String getDeviceID (IMMDevice* const device)
  77. {
  78. String s;
  79. WCHAR* deviceId = nullptr;
  80. if (check (device->GetId (&deviceId)))
  81. {
  82. s = String (deviceId);
  83. CoTaskMemFree (deviceId);
  84. }
  85. return s;
  86. }
  87. EDataFlow getDataFlow (const ComSmartPtr<IMMDevice>& device)
  88. {
  89. EDataFlow flow = eRender;
  90. ComSmartPtr <IMMEndpoint> endPoint;
  91. if (check (device.QueryInterface (endPoint)))
  92. (void) check (endPoint->GetDataFlow (&flow));
  93. return flow;
  94. }
  95. int refTimeToSamples (const REFERENCE_TIME& t, const double sampleRate) noexcept
  96. {
  97. return roundDoubleToInt (sampleRate * ((double) t) * 0.0000001);
  98. }
  99. void copyWavFormat (WAVEFORMATEXTENSIBLE& dest, const WAVEFORMATEX* const src) noexcept
  100. {
  101. memcpy (&dest, src, src->wFormatTag == WAVE_FORMAT_EXTENSIBLE ? sizeof (WAVEFORMATEXTENSIBLE)
  102. : sizeof (WAVEFORMATEX));
  103. }
  104. //==============================================================================
  105. class WASAPIDeviceBase
  106. {
  107. public:
  108. WASAPIDeviceBase (const ComSmartPtr <IMMDevice>& device_, const bool useExclusiveMode_)
  109. : device (device_),
  110. sampleRate (0),
  111. defaultSampleRate (0),
  112. numChannels (0),
  113. actualNumChannels (0),
  114. minBufferSize (0),
  115. defaultBufferSize (0),
  116. latencySamples (0),
  117. useExclusiveMode (useExclusiveMode_),
  118. sampleRateHasChanged (false)
  119. {
  120. clientEvent = CreateEvent (0, false, false, _T("JuceWASAPI"));
  121. ComSmartPtr <IAudioClient> tempClient (createClient());
  122. if (tempClient == nullptr)
  123. return;
  124. REFERENCE_TIME defaultPeriod, minPeriod;
  125. if (! check (tempClient->GetDevicePeriod (&defaultPeriod, &minPeriod)))
  126. return;
  127. WAVEFORMATEX* mixFormat = nullptr;
  128. if (! check (tempClient->GetMixFormat (&mixFormat)))
  129. return;
  130. WAVEFORMATEXTENSIBLE format;
  131. copyWavFormat (format, mixFormat);
  132. CoTaskMemFree (mixFormat);
  133. actualNumChannels = numChannels = format.Format.nChannels;
  134. defaultSampleRate = format.Format.nSamplesPerSec;
  135. minBufferSize = refTimeToSamples (minPeriod, defaultSampleRate);
  136. defaultBufferSize = refTimeToSamples (defaultPeriod, defaultSampleRate);
  137. rates.addUsingDefaultSort (defaultSampleRate);
  138. static const double ratesToTest[] = { 44100.0, 48000.0, 88200.0, 96000.0 };
  139. for (int i = 0; i < numElementsInArray (ratesToTest); ++i)
  140. {
  141. if (ratesToTest[i] == defaultSampleRate)
  142. continue;
  143. format.Format.nSamplesPerSec = (DWORD) roundDoubleToInt (ratesToTest[i]);
  144. if (SUCCEEDED (tempClient->IsFormatSupported (useExclusiveMode ? AUDCLNT_SHAREMODE_EXCLUSIVE : AUDCLNT_SHAREMODE_SHARED,
  145. (WAVEFORMATEX*) &format, 0)))
  146. if (! rates.contains (ratesToTest[i]))
  147. rates.addUsingDefaultSort (ratesToTest[i]);
  148. }
  149. }
  150. ~WASAPIDeviceBase()
  151. {
  152. device = nullptr;
  153. CloseHandle (clientEvent);
  154. }
  155. bool isOk() const noexcept { return defaultBufferSize > 0 && defaultSampleRate > 0; }
  156. bool openClient (const double newSampleRate, const BigInteger& newChannels)
  157. {
  158. sampleRate = newSampleRate;
  159. channels = newChannels;
  160. channels.setRange (actualNumChannels, channels.getHighestBit() + 1 - actualNumChannels, false);
  161. numChannels = channels.getHighestBit() + 1;
  162. if (numChannels == 0)
  163. return true;
  164. client = createClient();
  165. if (client != nullptr
  166. && (tryInitialisingWithFormat (true, 4) || tryInitialisingWithFormat (false, 4)
  167. || tryInitialisingWithFormat (false, 3) || tryInitialisingWithFormat (false, 2)))
  168. {
  169. sampleRateHasChanged = false;
  170. channelMaps.clear();
  171. for (int i = 0; i <= channels.getHighestBit(); ++i)
  172. if (channels[i])
  173. channelMaps.add (i);
  174. REFERENCE_TIME latency;
  175. if (check (client->GetStreamLatency (&latency)))
  176. latencySamples = refTimeToSamples (latency, sampleRate);
  177. (void) check (client->GetBufferSize (&actualBufferSize));
  178. createSessionEventCallback();
  179. return check (client->SetEventHandle (clientEvent));
  180. }
  181. return false;
  182. }
  183. void closeClient()
  184. {
  185. if (client != nullptr)
  186. client->Stop();
  187. deleteSessionEventCallback();
  188. client = nullptr;
  189. ResetEvent (clientEvent);
  190. }
  191. void deviceSampleRateChanged()
  192. {
  193. sampleRateHasChanged = true;
  194. }
  195. //==============================================================================
  196. ComSmartPtr <IMMDevice> device;
  197. ComSmartPtr <IAudioClient> client;
  198. double sampleRate, defaultSampleRate;
  199. int numChannels, actualNumChannels;
  200. int minBufferSize, defaultBufferSize, latencySamples;
  201. const bool useExclusiveMode;
  202. Array <double> rates;
  203. HANDLE clientEvent;
  204. BigInteger channels;
  205. Array <int> channelMaps;
  206. UINT32 actualBufferSize;
  207. int bytesPerSample;
  208. bool sampleRateHasChanged;
  209. virtual void updateFormat (bool isFloat) = 0;
  210. private:
  211. //==============================================================================
  212. class SessionEventCallback : public ComBaseClassHelper <IAudioSessionEvents>
  213. {
  214. public:
  215. SessionEventCallback (WASAPIDeviceBase& owner_) : owner (owner_) {}
  216. JUCE_COMRESULT OnDisplayNameChanged (LPCWSTR, LPCGUID) { return S_OK; }
  217. JUCE_COMRESULT OnIconPathChanged (LPCWSTR, LPCGUID) { return S_OK; }
  218. JUCE_COMRESULT OnSimpleVolumeChanged (float, BOOL, LPCGUID) { return S_OK; }
  219. JUCE_COMRESULT OnChannelVolumeChanged (DWORD, float*, DWORD, LPCGUID) { return S_OK; }
  220. JUCE_COMRESULT OnGroupingParamChanged (LPCGUID, LPCGUID) { return S_OK; }
  221. JUCE_COMRESULT OnStateChanged (AudioSessionState) { return S_OK; }
  222. JUCE_COMRESULT OnSessionDisconnected (AudioSessionDisconnectReason reason)
  223. {
  224. if (reason == DisconnectReasonFormatChanged)
  225. owner.deviceSampleRateChanged();
  226. return S_OK;
  227. }
  228. private:
  229. WASAPIDeviceBase& owner;
  230. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (SessionEventCallback);
  231. };
  232. ComSmartPtr <IAudioSessionControl> audioSessionControl;
  233. ComSmartPtr <SessionEventCallback> sessionEventCallback;
  234. void createSessionEventCallback()
  235. {
  236. deleteSessionEventCallback();
  237. client->GetService (__uuidof (IAudioSessionControl),
  238. (void**) audioSessionControl.resetAndGetPointerAddress());
  239. if (audioSessionControl != nullptr)
  240. {
  241. sessionEventCallback = new SessionEventCallback (*this);
  242. audioSessionControl->RegisterAudioSessionNotification (sessionEventCallback);
  243. sessionEventCallback->Release(); // (required because ComBaseClassHelper objects are constructed with a ref count of 1)
  244. }
  245. }
  246. void deleteSessionEventCallback()
  247. {
  248. if (audioSessionControl != nullptr && sessionEventCallback != nullptr)
  249. audioSessionControl->UnregisterAudioSessionNotification (sessionEventCallback);
  250. audioSessionControl = nullptr;
  251. sessionEventCallback = nullptr;
  252. }
  253. //==============================================================================
  254. const ComSmartPtr <IAudioClient> createClient()
  255. {
  256. ComSmartPtr <IAudioClient> client;
  257. if (device != nullptr)
  258. {
  259. HRESULT hr = device->Activate (__uuidof (IAudioClient), CLSCTX_INPROC_SERVER, 0, (void**) client.resetAndGetPointerAddress());
  260. logFailure (hr);
  261. }
  262. return client;
  263. }
  264. bool tryInitialisingWithFormat (const bool useFloat, const int bytesPerSampleToTry)
  265. {
  266. WAVEFORMATEXTENSIBLE format = { 0 };
  267. if (numChannels <= 2 && bytesPerSampleToTry <= 2)
  268. {
  269. format.Format.wFormatTag = WAVE_FORMAT_PCM;
  270. }
  271. else
  272. {
  273. format.Format.wFormatTag = WAVE_FORMAT_EXTENSIBLE;
  274. format.Format.cbSize = sizeof (WAVEFORMATEXTENSIBLE) - sizeof (WAVEFORMATEX);
  275. }
  276. format.Format.nSamplesPerSec = (DWORD) roundDoubleToInt (sampleRate);
  277. format.Format.nChannels = (WORD) numChannels;
  278. format.Format.wBitsPerSample = (WORD) (8 * bytesPerSampleToTry);
  279. format.Format.nAvgBytesPerSec = (DWORD) (format.Format.nSamplesPerSec * numChannels * bytesPerSampleToTry);
  280. format.Format.nBlockAlign = (WORD) (numChannels * bytesPerSampleToTry);
  281. format.SubFormat = useFloat ? KSDATAFORMAT_SUBTYPE_IEEE_FLOAT : KSDATAFORMAT_SUBTYPE_PCM;
  282. format.Samples.wValidBitsPerSample = format.Format.wBitsPerSample;
  283. switch (numChannels)
  284. {
  285. case 1: format.dwChannelMask = SPEAKER_FRONT_CENTER; break;
  286. case 2: format.dwChannelMask = SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT; break;
  287. case 4: format.dwChannelMask = SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | SPEAKER_BACK_LEFT | SPEAKER_BACK_RIGHT; break;
  288. case 6: format.dwChannelMask = SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | SPEAKER_FRONT_CENTER | SPEAKER_LOW_FREQUENCY | SPEAKER_BACK_LEFT | SPEAKER_BACK_RIGHT; break;
  289. case 8: format.dwChannelMask = SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | SPEAKER_FRONT_CENTER | SPEAKER_LOW_FREQUENCY | SPEAKER_BACK_LEFT | SPEAKER_BACK_RIGHT | SPEAKER_FRONT_LEFT_OF_CENTER | SPEAKER_FRONT_RIGHT_OF_CENTER; break;
  290. default: break;
  291. }
  292. WAVEFORMATEXTENSIBLE* nearestFormat = nullptr;
  293. HRESULT hr = client->IsFormatSupported (useExclusiveMode ? AUDCLNT_SHAREMODE_EXCLUSIVE : AUDCLNT_SHAREMODE_SHARED,
  294. (WAVEFORMATEX*) &format, useExclusiveMode ? nullptr : (WAVEFORMATEX**) &nearestFormat);
  295. logFailure (hr);
  296. if (hr == S_FALSE && format.Format.nSamplesPerSec == nearestFormat->Format.nSamplesPerSec)
  297. {
  298. copyWavFormat (format, (WAVEFORMATEX*) nearestFormat);
  299. hr = S_OK;
  300. }
  301. CoTaskMemFree (nearestFormat);
  302. REFERENCE_TIME defaultPeriod = 0, minPeriod = 0;
  303. if (useExclusiveMode)
  304. check (client->GetDevicePeriod (&defaultPeriod, &minPeriod));
  305. GUID session;
  306. if (hr == S_OK
  307. && check (client->Initialize (useExclusiveMode ? AUDCLNT_SHAREMODE_EXCLUSIVE : AUDCLNT_SHAREMODE_SHARED,
  308. AUDCLNT_STREAMFLAGS_EVENTCALLBACK,
  309. defaultPeriod, defaultPeriod, (WAVEFORMATEX*) &format, &session)))
  310. {
  311. actualNumChannels = format.Format.nChannels;
  312. const bool isFloat = format.Format.wFormatTag == WAVE_FORMAT_EXTENSIBLE && format.SubFormat == KSDATAFORMAT_SUBTYPE_IEEE_FLOAT;
  313. bytesPerSample = format.Format.wBitsPerSample / 8;
  314. updateFormat (isFloat);
  315. return true;
  316. }
  317. return false;
  318. }
  319. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (WASAPIDeviceBase);
  320. };
  321. //==============================================================================
  322. class WASAPIInputDevice : public WASAPIDeviceBase
  323. {
  324. public:
  325. WASAPIInputDevice (const ComSmartPtr <IMMDevice>& device_, const bool useExclusiveMode_)
  326. : WASAPIDeviceBase (device_, useExclusiveMode_),
  327. reservoir (1, 1)
  328. {
  329. }
  330. ~WASAPIInputDevice()
  331. {
  332. close();
  333. }
  334. bool open (const double newSampleRate, const BigInteger& newChannels)
  335. {
  336. reservoirSize = 0;
  337. reservoirCapacity = 16384;
  338. reservoir.setSize (actualNumChannels * reservoirCapacity * sizeof (float));
  339. return openClient (newSampleRate, newChannels)
  340. && (numChannels == 0 || check (client->GetService (__uuidof (IAudioCaptureClient),
  341. (void**) captureClient.resetAndGetPointerAddress())));
  342. }
  343. void close()
  344. {
  345. closeClient();
  346. captureClient = nullptr;
  347. reservoir.setSize (0);
  348. }
  349. template <class SourceType>
  350. void updateFormatWithType (SourceType*)
  351. {
  352. typedef AudioData::Pointer <AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::NonConst> NativeType;
  353. converter = new AudioData::ConverterInstance <AudioData::Pointer <SourceType, AudioData::LittleEndian, AudioData::Interleaved, AudioData::Const>, NativeType> (actualNumChannels, 1);
  354. }
  355. void updateFormat (bool isFloat)
  356. {
  357. if (isFloat) updateFormatWithType ((AudioData::Float32*) 0);
  358. else if (bytesPerSample == 4) updateFormatWithType ((AudioData::Int32*) 0);
  359. else if (bytesPerSample == 3) updateFormatWithType ((AudioData::Int24*) 0);
  360. else updateFormatWithType ((AudioData::Int16*) 0);
  361. }
  362. void copyBuffers (float** destBuffers, int numDestBuffers, int bufferSize, Thread& thread)
  363. {
  364. if (numChannels <= 0)
  365. return;
  366. int offset = 0;
  367. while (bufferSize > 0)
  368. {
  369. if (reservoirSize > 0) // There's stuff in the reservoir, so use that...
  370. {
  371. const int samplesToDo = jmin (bufferSize, (int) reservoirSize);
  372. for (int i = 0; i < numDestBuffers; ++i)
  373. converter->convertSamples (destBuffers[i] + offset, 0, reservoir.getData(), channelMaps.getUnchecked(i), samplesToDo);
  374. bufferSize -= samplesToDo;
  375. offset += samplesToDo;
  376. reservoirSize = 0;
  377. }
  378. else
  379. {
  380. UINT32 packetLength = 0;
  381. if (! check (captureClient->GetNextPacketSize (&packetLength)))
  382. break;
  383. if (packetLength == 0)
  384. {
  385. if (thread.threadShouldExit()
  386. || WaitForSingleObject (clientEvent, 1000) == WAIT_TIMEOUT)
  387. break;
  388. continue;
  389. }
  390. uint8* inputData;
  391. UINT32 numSamplesAvailable;
  392. DWORD flags;
  393. if (check (captureClient->GetBuffer (&inputData, &numSamplesAvailable, &flags, 0, 0)))
  394. {
  395. const int samplesToDo = jmin (bufferSize, (int) numSamplesAvailable);
  396. for (int i = 0; i < numDestBuffers; ++i)
  397. converter->convertSamples (destBuffers[i] + offset, 0, inputData, channelMaps.getUnchecked(i), samplesToDo);
  398. bufferSize -= samplesToDo;
  399. offset += samplesToDo;
  400. if (samplesToDo < (int) numSamplesAvailable)
  401. {
  402. reservoirSize = jmin ((int) (numSamplesAvailable - samplesToDo), reservoirCapacity);
  403. memcpy ((uint8*) reservoir.getData(), inputData + bytesPerSample * actualNumChannels * samplesToDo,
  404. (size_t) (bytesPerSample * actualNumChannels * reservoirSize));
  405. }
  406. captureClient->ReleaseBuffer (numSamplesAvailable);
  407. }
  408. }
  409. }
  410. }
  411. ComSmartPtr <IAudioCaptureClient> captureClient;
  412. MemoryBlock reservoir;
  413. int reservoirSize, reservoirCapacity;
  414. ScopedPointer <AudioData::Converter> converter;
  415. private:
  416. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (WASAPIInputDevice);
  417. };
  418. //==============================================================================
  419. class WASAPIOutputDevice : public WASAPIDeviceBase
  420. {
  421. public:
  422. WASAPIOutputDevice (const ComSmartPtr <IMMDevice>& device_, const bool useExclusiveMode_)
  423. : WASAPIDeviceBase (device_, useExclusiveMode_)
  424. {
  425. }
  426. ~WASAPIOutputDevice()
  427. {
  428. close();
  429. }
  430. bool open (const double newSampleRate, const BigInteger& newChannels)
  431. {
  432. return openClient (newSampleRate, newChannels)
  433. && (numChannels == 0 || check (client->GetService (__uuidof (IAudioRenderClient), (void**) renderClient.resetAndGetPointerAddress())));
  434. }
  435. void close()
  436. {
  437. closeClient();
  438. renderClient = nullptr;
  439. }
  440. template <class DestType>
  441. void updateFormatWithType (DestType*)
  442. {
  443. typedef AudioData::Pointer <AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::Const> NativeType;
  444. converter = new AudioData::ConverterInstance <NativeType, AudioData::Pointer <DestType, AudioData::LittleEndian, AudioData::Interleaved, AudioData::NonConst> > (1, actualNumChannels);
  445. }
  446. void updateFormat (bool isFloat)
  447. {
  448. if (isFloat) updateFormatWithType ((AudioData::Float32*) 0);
  449. else if (bytesPerSample == 4) updateFormatWithType ((AudioData::Int32*) 0);
  450. else if (bytesPerSample == 3) updateFormatWithType ((AudioData::Int24*) 0);
  451. else updateFormatWithType ((AudioData::Int16*) 0);
  452. }
  453. void copyBuffers (const float** const srcBuffers, const int numSrcBuffers, int bufferSize, Thread& thread)
  454. {
  455. if (numChannels <= 0)
  456. return;
  457. int offset = 0;
  458. while (bufferSize > 0)
  459. {
  460. UINT32 padding = 0;
  461. if (! check (client->GetCurrentPadding (&padding)))
  462. return;
  463. int samplesToDo = useExclusiveMode ? bufferSize
  464. : jmin ((int) (actualBufferSize - padding), bufferSize);
  465. if (samplesToDo <= 0)
  466. {
  467. if (thread.threadShouldExit()
  468. || WaitForSingleObject (clientEvent, 1000) == WAIT_TIMEOUT)
  469. break;
  470. continue;
  471. }
  472. uint8* outputData = nullptr;
  473. if (check (renderClient->GetBuffer ((UINT32) samplesToDo, &outputData)))
  474. {
  475. for (int i = 0; i < numSrcBuffers; ++i)
  476. converter->convertSamples (outputData, channelMaps.getUnchecked(i), srcBuffers[i] + offset, 0, samplesToDo);
  477. renderClient->ReleaseBuffer ((UINT32) samplesToDo, 0);
  478. offset += samplesToDo;
  479. bufferSize -= samplesToDo;
  480. }
  481. }
  482. }
  483. ComSmartPtr <IAudioRenderClient> renderClient;
  484. ScopedPointer <AudioData::Converter> converter;
  485. private:
  486. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (WASAPIOutputDevice);
  487. };
  488. //==============================================================================
  489. class WASAPIAudioIODevice : public AudioIODevice,
  490. public Thread
  491. {
  492. public:
  493. WASAPIAudioIODevice (const String& deviceName,
  494. const String& outputDeviceId_,
  495. const String& inputDeviceId_,
  496. const bool useExclusiveMode_)
  497. : AudioIODevice (deviceName, "Windows Audio"),
  498. Thread ("Juce WASAPI"),
  499. outputDeviceId (outputDeviceId_),
  500. inputDeviceId (inputDeviceId_),
  501. useExclusiveMode (useExclusiveMode_),
  502. isOpen_ (false),
  503. isStarted (false),
  504. currentBufferSizeSamples (0),
  505. currentSampleRate (0),
  506. callback (nullptr)
  507. {
  508. }
  509. ~WASAPIAudioIODevice()
  510. {
  511. close();
  512. }
  513. bool initialise()
  514. {
  515. latencyIn = latencyOut = 0;
  516. Array <double> ratesIn, ratesOut;
  517. if (createDevices())
  518. {
  519. jassert (inputDevice != nullptr || outputDevice != nullptr);
  520. if (inputDevice != nullptr && outputDevice != nullptr)
  521. {
  522. defaultSampleRate = jmin (inputDevice->defaultSampleRate, outputDevice->defaultSampleRate);
  523. minBufferSize = jmin (inputDevice->minBufferSize, outputDevice->minBufferSize);
  524. defaultBufferSize = jmax (inputDevice->defaultBufferSize, outputDevice->defaultBufferSize);
  525. sampleRates = inputDevice->rates;
  526. sampleRates.removeValuesNotIn (outputDevice->rates);
  527. }
  528. else
  529. {
  530. WASAPIDeviceBase* d = inputDevice != nullptr ? static_cast<WASAPIDeviceBase*> (inputDevice)
  531. : static_cast<WASAPIDeviceBase*> (outputDevice);
  532. defaultSampleRate = d->defaultSampleRate;
  533. minBufferSize = d->minBufferSize;
  534. defaultBufferSize = d->defaultBufferSize;
  535. sampleRates = d->rates;
  536. }
  537. bufferSizes.addUsingDefaultSort (defaultBufferSize);
  538. if (minBufferSize != defaultBufferSize)
  539. bufferSizes.addUsingDefaultSort (minBufferSize);
  540. int n = 64;
  541. for (int i = 0; i < 40; ++i)
  542. {
  543. if (n >= minBufferSize && n <= 2048 && ! bufferSizes.contains (n))
  544. bufferSizes.addUsingDefaultSort (n);
  545. n += (n < 512) ? 32 : (n < 1024 ? 64 : 128);
  546. }
  547. return true;
  548. }
  549. return false;
  550. }
  551. StringArray getOutputChannelNames()
  552. {
  553. StringArray outChannels;
  554. if (outputDevice != nullptr)
  555. for (int i = 1; i <= outputDevice->actualNumChannels; ++i)
  556. outChannels.add ("Output channel " + String (i));
  557. return outChannels;
  558. }
  559. StringArray getInputChannelNames()
  560. {
  561. StringArray inChannels;
  562. if (inputDevice != nullptr)
  563. for (int i = 1; i <= inputDevice->actualNumChannels; ++i)
  564. inChannels.add ("Input channel " + String (i));
  565. return inChannels;
  566. }
  567. int getNumSampleRates() { return sampleRates.size(); }
  568. double getSampleRate (int index) { return sampleRates [index]; }
  569. int getNumBufferSizesAvailable() { return bufferSizes.size(); }
  570. int getBufferSizeSamples (int index) { return bufferSizes [index]; }
  571. int getDefaultBufferSize() { return defaultBufferSize; }
  572. int getCurrentBufferSizeSamples() { return currentBufferSizeSamples; }
  573. double getCurrentSampleRate() { return currentSampleRate; }
  574. int getCurrentBitDepth() { return 32; }
  575. int getOutputLatencyInSamples() { return latencyOut; }
  576. int getInputLatencyInSamples() { return latencyIn; }
  577. BigInteger getActiveOutputChannels() const { return outputDevice != nullptr ? outputDevice->channels : BigInteger(); }
  578. BigInteger getActiveInputChannels() const { return inputDevice != nullptr ? inputDevice->channels : BigInteger(); }
  579. String getLastError() { return lastError; }
  580. String open (const BigInteger& inputChannels, const BigInteger& outputChannels,
  581. double sampleRate, int bufferSizeSamples)
  582. {
  583. close();
  584. lastError = String::empty;
  585. if (sampleRates.size() == 0 && inputDevice != nullptr && outputDevice != nullptr)
  586. {
  587. lastError = "The input and output devices don't share a common sample rate!";
  588. return lastError;
  589. }
  590. currentBufferSizeSamples = bufferSizeSamples <= 0 ? defaultBufferSize : jmax (bufferSizeSamples, minBufferSize);
  591. currentSampleRate = sampleRate > 0 ? sampleRate : defaultSampleRate;
  592. if (inputDevice != nullptr && ! inputDevice->open (currentSampleRate, inputChannels))
  593. {
  594. lastError = "Couldn't open the input device!";
  595. return lastError;
  596. }
  597. if (outputDevice != nullptr && ! outputDevice->open (currentSampleRate, outputChannels))
  598. {
  599. close();
  600. lastError = "Couldn't open the output device!";
  601. return lastError;
  602. }
  603. if (inputDevice != nullptr) ResetEvent (inputDevice->clientEvent);
  604. if (outputDevice != nullptr) ResetEvent (outputDevice->clientEvent);
  605. startThread (8);
  606. Thread::sleep (5);
  607. if (inputDevice != nullptr && inputDevice->client != nullptr)
  608. {
  609. latencyIn = (int) (inputDevice->latencySamples + currentBufferSizeSamples);
  610. if (! check (inputDevice->client->Start()))
  611. {
  612. close();
  613. lastError = "Couldn't start the input device!";
  614. return lastError;
  615. }
  616. }
  617. if (outputDevice != nullptr && outputDevice->client != nullptr)
  618. {
  619. latencyOut = (int) (outputDevice->latencySamples + currentBufferSizeSamples);
  620. if (! check (outputDevice->client->Start()))
  621. {
  622. close();
  623. lastError = "Couldn't start the output device!";
  624. return lastError;
  625. }
  626. }
  627. isOpen_ = true;
  628. return lastError;
  629. }
  630. void close()
  631. {
  632. stop();
  633. signalThreadShouldExit();
  634. if (inputDevice != nullptr) SetEvent (inputDevice->clientEvent);
  635. if (outputDevice != nullptr) SetEvent (outputDevice->clientEvent);
  636. stopThread (5000);
  637. if (inputDevice != nullptr) inputDevice->close();
  638. if (outputDevice != nullptr) outputDevice->close();
  639. isOpen_ = false;
  640. }
  641. bool isOpen() { return isOpen_ && isThreadRunning(); }
  642. bool isPlaying() { return isStarted && isOpen_ && isThreadRunning(); }
  643. void start (AudioIODeviceCallback* call)
  644. {
  645. if (isOpen_ && call != nullptr && ! isStarted)
  646. {
  647. if (! isThreadRunning())
  648. {
  649. // something's gone wrong and the thread's stopped..
  650. isOpen_ = false;
  651. return;
  652. }
  653. call->audioDeviceAboutToStart (this);
  654. const ScopedLock sl (startStopLock);
  655. callback = call;
  656. isStarted = true;
  657. }
  658. }
  659. void stop()
  660. {
  661. if (isStarted)
  662. {
  663. AudioIODeviceCallback* const callbackLocal = callback;
  664. {
  665. const ScopedLock sl (startStopLock);
  666. isStarted = false;
  667. }
  668. if (callbackLocal != nullptr)
  669. callbackLocal->audioDeviceStopped();
  670. }
  671. }
  672. void setMMThreadPriority()
  673. {
  674. DynamicLibrary dll ("avrt.dll");
  675. JUCE_LOAD_WINAPI_FUNCTION (dll, AvSetMmThreadCharacteristicsW, avSetMmThreadCharacteristics, HANDLE, (LPCWSTR, LPDWORD))
  676. JUCE_LOAD_WINAPI_FUNCTION (dll, AvSetMmThreadPriority, avSetMmThreadPriority, HANDLE, (HANDLE, AVRT_PRIORITY))
  677. if (avSetMmThreadCharacteristics != 0 && avSetMmThreadPriority != 0)
  678. {
  679. DWORD dummy = 0;
  680. HANDLE h = avSetMmThreadCharacteristics (L"Pro Audio", &dummy);
  681. if (h != 0)
  682. avSetMmThreadPriority (h, AVRT_PRIORITY_NORMAL);
  683. }
  684. }
  685. void run()
  686. {
  687. setMMThreadPriority();
  688. const int bufferSize = currentBufferSizeSamples;
  689. const int numInputBuffers = getActiveInputChannels().countNumberOfSetBits();
  690. const int numOutputBuffers = getActiveOutputChannels().countNumberOfSetBits();
  691. bool sampleRateChanged = false;
  692. AudioSampleBuffer ins (jmax (1, numInputBuffers), bufferSize + 32);
  693. AudioSampleBuffer outs (jmax (1, numOutputBuffers), bufferSize + 32);
  694. float** const inputBuffers = ins.getArrayOfChannels();
  695. float** const outputBuffers = outs.getArrayOfChannels();
  696. ins.clear();
  697. while (! threadShouldExit())
  698. {
  699. if (inputDevice != nullptr)
  700. {
  701. inputDevice->copyBuffers (inputBuffers, numInputBuffers, bufferSize, *this);
  702. if (threadShouldExit())
  703. break;
  704. if (inputDevice->sampleRateHasChanged)
  705. sampleRateChanged = true;
  706. }
  707. JUCE_TRY
  708. {
  709. const ScopedLock sl (startStopLock);
  710. if (isStarted)
  711. callback->audioDeviceIOCallback (const_cast <const float**> (inputBuffers), numInputBuffers,
  712. outputBuffers, numOutputBuffers, bufferSize);
  713. else
  714. outs.clear();
  715. }
  716. JUCE_CATCH_EXCEPTION
  717. if (outputDevice != nullptr)
  718. {
  719. outputDevice->copyBuffers (const_cast <const float**> (outputBuffers), numOutputBuffers, bufferSize, *this);
  720. if (outputDevice->sampleRateHasChanged)
  721. sampleRateChanged = true;
  722. }
  723. if (sampleRateChanged)
  724. {
  725. // xxx one of the devices has had its sample rate changed externally.. not 100% sure how
  726. // to handle this..
  727. }
  728. }
  729. }
  730. //==============================================================================
  731. String outputDeviceId, inputDeviceId;
  732. String lastError;
  733. private:
  734. // Device stats...
  735. ScopedPointer<WASAPIInputDevice> inputDevice;
  736. ScopedPointer<WASAPIOutputDevice> outputDevice;
  737. const bool useExclusiveMode;
  738. double defaultSampleRate;
  739. int minBufferSize, defaultBufferSize;
  740. int latencyIn, latencyOut;
  741. Array <double> sampleRates;
  742. Array <int> bufferSizes;
  743. // Active state...
  744. bool isOpen_, isStarted;
  745. int currentBufferSizeSamples;
  746. double currentSampleRate;
  747. AudioIODeviceCallback* callback;
  748. CriticalSection startStopLock;
  749. //==============================================================================
  750. bool createDevices()
  751. {
  752. ComSmartPtr <IMMDeviceEnumerator> enumerator;
  753. if (! check (enumerator.CoCreateInstance (__uuidof (MMDeviceEnumerator))))
  754. return false;
  755. ComSmartPtr <IMMDeviceCollection> deviceCollection;
  756. if (! check (enumerator->EnumAudioEndpoints (eAll, DEVICE_STATE_ACTIVE, deviceCollection.resetAndGetPointerAddress())))
  757. return false;
  758. UINT32 numDevices = 0;
  759. if (! check (deviceCollection->GetCount (&numDevices)))
  760. return false;
  761. for (UINT32 i = 0; i < numDevices; ++i)
  762. {
  763. ComSmartPtr <IMMDevice> device;
  764. if (! check (deviceCollection->Item (i, device.resetAndGetPointerAddress())))
  765. continue;
  766. const String deviceId (getDeviceID (device));
  767. if (deviceId.isEmpty())
  768. continue;
  769. const EDataFlow flow = getDataFlow (device);
  770. if (deviceId == inputDeviceId && flow == eCapture)
  771. inputDevice = new WASAPIInputDevice (device, useExclusiveMode);
  772. else if (deviceId == outputDeviceId && flow == eRender)
  773. outputDevice = new WASAPIOutputDevice (device, useExclusiveMode);
  774. }
  775. return (outputDeviceId.isEmpty() || (outputDevice != nullptr && outputDevice->isOk()))
  776. && (inputDeviceId.isEmpty() || (inputDevice != nullptr && inputDevice->isOk()));
  777. }
  778. //==============================================================================
  779. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (WASAPIAudioIODevice);
  780. };
  781. //==============================================================================
  782. class WASAPIAudioIODeviceType : public AudioIODeviceType,
  783. private DeviceChangeDetector
  784. {
  785. public:
  786. WASAPIAudioIODeviceType()
  787. : AudioIODeviceType ("Windows Audio"),
  788. DeviceChangeDetector (L"Windows Audio"),
  789. hasScanned (false)
  790. {
  791. }
  792. //==============================================================================
  793. void scanForDevices()
  794. {
  795. hasScanned = true;
  796. outputDeviceNames.clear();
  797. inputDeviceNames.clear();
  798. outputDeviceIds.clear();
  799. inputDeviceIds.clear();
  800. scan (outputDeviceNames, inputDeviceNames,
  801. outputDeviceIds, inputDeviceIds);
  802. }
  803. StringArray getDeviceNames (bool wantInputNames) const
  804. {
  805. jassert (hasScanned); // need to call scanForDevices() before doing this
  806. return wantInputNames ? inputDeviceNames
  807. : outputDeviceNames;
  808. }
  809. int getDefaultDeviceIndex (bool /*forInput*/) const
  810. {
  811. jassert (hasScanned); // need to call scanForDevices() before doing this
  812. return 0;
  813. }
  814. int getIndexOfDevice (AudioIODevice* device, bool asInput) const
  815. {
  816. jassert (hasScanned); // need to call scanForDevices() before doing this
  817. WASAPIAudioIODevice* const d = dynamic_cast <WASAPIAudioIODevice*> (device);
  818. return d == nullptr ? -1 : (asInput ? inputDeviceIds.indexOf (d->inputDeviceId)
  819. : outputDeviceIds.indexOf (d->outputDeviceId));
  820. }
  821. bool hasSeparateInputsAndOutputs() const { return true; }
  822. AudioIODevice* createDevice (const String& outputDeviceName,
  823. const String& inputDeviceName)
  824. {
  825. jassert (hasScanned); // need to call scanForDevices() before doing this
  826. const bool useExclusiveMode = false;
  827. ScopedPointer<WASAPIAudioIODevice> device;
  828. const int outputIndex = outputDeviceNames.indexOf (outputDeviceName);
  829. const int inputIndex = inputDeviceNames.indexOf (inputDeviceName);
  830. if (outputIndex >= 0 || inputIndex >= 0)
  831. {
  832. device = new WASAPIAudioIODevice (outputDeviceName.isNotEmpty() ? outputDeviceName
  833. : inputDeviceName,
  834. outputDeviceIds [outputIndex],
  835. inputDeviceIds [inputIndex],
  836. useExclusiveMode);
  837. if (! device->initialise())
  838. device = nullptr;
  839. }
  840. return device.release();
  841. }
  842. //==============================================================================
  843. StringArray outputDeviceNames, outputDeviceIds;
  844. StringArray inputDeviceNames, inputDeviceIds;
  845. private:
  846. bool hasScanned;
  847. //==============================================================================
  848. static String getDefaultEndpoint (IMMDeviceEnumerator* const enumerator, const bool forCapture)
  849. {
  850. String s;
  851. IMMDevice* dev = nullptr;
  852. if (check (enumerator->GetDefaultAudioEndpoint (forCapture ? eCapture : eRender,
  853. eMultimedia, &dev)))
  854. {
  855. WCHAR* deviceId = nullptr;
  856. if (check (dev->GetId (&deviceId)))
  857. {
  858. s = deviceId;
  859. CoTaskMemFree (deviceId);
  860. }
  861. dev->Release();
  862. }
  863. return s;
  864. }
  865. //==============================================================================
  866. void scan (StringArray& outputDeviceNames,
  867. StringArray& inputDeviceNames,
  868. StringArray& outputDeviceIds,
  869. StringArray& inputDeviceIds)
  870. {
  871. ComSmartPtr <IMMDeviceEnumerator> enumerator;
  872. if (! check (enumerator.CoCreateInstance (__uuidof (MMDeviceEnumerator))))
  873. return;
  874. const String defaultRenderer (getDefaultEndpoint (enumerator, false));
  875. const String defaultCapture (getDefaultEndpoint (enumerator, true));
  876. ComSmartPtr <IMMDeviceCollection> deviceCollection;
  877. UINT32 numDevices = 0;
  878. if (! (check (enumerator->EnumAudioEndpoints (eAll, DEVICE_STATE_ACTIVE, deviceCollection.resetAndGetPointerAddress()))
  879. && check (deviceCollection->GetCount (&numDevices))))
  880. return;
  881. for (UINT32 i = 0; i < numDevices; ++i)
  882. {
  883. ComSmartPtr <IMMDevice> device;
  884. if (! check (deviceCollection->Item (i, device.resetAndGetPointerAddress())))
  885. continue;
  886. DWORD state = 0;
  887. if (! (check (device->GetState (&state)) && state == DEVICE_STATE_ACTIVE))
  888. continue;
  889. const String deviceId (getDeviceID (device));
  890. String name;
  891. {
  892. ComSmartPtr <IPropertyStore> properties;
  893. if (! check (device->OpenPropertyStore (STGM_READ, properties.resetAndGetPointerAddress())))
  894. continue;
  895. PROPVARIANT value;
  896. PropVariantInit (&value);
  897. if (check (properties->GetValue (PKEY_Device_FriendlyName, &value)))
  898. name = value.pwszVal;
  899. PropVariantClear (&value);
  900. }
  901. const EDataFlow flow = getDataFlow (device);
  902. if (flow == eRender)
  903. {
  904. const int index = (deviceId == defaultRenderer) ? 0 : -1;
  905. outputDeviceIds.insert (index, deviceId);
  906. outputDeviceNames.insert (index, name);
  907. }
  908. else if (flow == eCapture)
  909. {
  910. const int index = (deviceId == defaultCapture) ? 0 : -1;
  911. inputDeviceIds.insert (index, deviceId);
  912. inputDeviceNames.insert (index, name);
  913. }
  914. }
  915. inputDeviceNames.appendNumbersToDuplicates (false, false);
  916. outputDeviceNames.appendNumbersToDuplicates (false, false);
  917. }
  918. //==============================================================================
  919. void systemDeviceChanged()
  920. {
  921. StringArray newOutNames, newInNames, newOutIds, newInIds;
  922. scan (newOutNames, newInNames, newOutIds, newInIds);
  923. if (newOutNames != outputDeviceNames
  924. || newInNames != inputDeviceNames
  925. || newOutIds != outputDeviceIds
  926. || newInIds != inputDeviceIds)
  927. {
  928. hasScanned = true;
  929. outputDeviceNames = newOutNames;
  930. inputDeviceNames = newInNames;
  931. outputDeviceIds = newOutIds;
  932. inputDeviceIds = newInIds;
  933. callDeviceChangeListeners();
  934. }
  935. }
  936. //==============================================================================
  937. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (WASAPIAudioIODeviceType);
  938. };
  939. }
  940. //==============================================================================
  941. AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_WASAPI()
  942. {
  943. if (SystemStats::getOperatingSystemType() >= SystemStats::WinVista)
  944. return new WasapiClasses::WASAPIAudioIODeviceType();
  945. return nullptr;
  946. }