The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1171 lines
44KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library - "Jules' Utility Class Extensions"
  4. Copyright 2004-11 by Raw Material Software Ltd.
  5. ------------------------------------------------------------------------------
  6. JUCE can be redistributed and/or modified under the terms of the GNU General
  7. Public License (Version 2), as published by the Free Software Foundation.
  8. A copy of the license is included in the JUCE distribution, or can be found
  9. online at www.gnu.org/licenses.
  10. JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
  11. WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
  12. A PARTICULAR PURPOSE. See the GNU General Public License for more details.
  13. ------------------------------------------------------------------------------
  14. To release a closed-source product which uses JUCE, commercial licenses are
  15. available: visit www.rawmaterialsoftware.com/juce for more information.
  16. ==============================================================================
  17. */
  18. #ifndef WASAPI_ENABLE_LOGGING
  19. #define WASAPI_ENABLE_LOGGING 0
  20. #endif
  21. //==============================================================================
  22. namespace WasapiClasses
  23. {
  24. void logFailure (HRESULT hr)
  25. {
  26. (void) hr;
  27. #if WASAPI_ENABLE_LOGGING
  28. if (FAILED (hr))
  29. {
  30. String e;
  31. e << Time::getCurrentTime().toString (true, true, true, true)
  32. << " -- WASAPI error: ";
  33. switch (hr)
  34. {
  35. case E_POINTER: e << "E_POINTER"; break;
  36. case E_INVALIDARG: e << "E_INVALIDARG"; break;
  37. case AUDCLNT_E_NOT_INITIALIZED: e << "AUDCLNT_E_NOT_INITIALIZED"; break;
  38. case AUDCLNT_E_ALREADY_INITIALIZED: e << "AUDCLNT_E_ALREADY_INITIALIZED"; break;
  39. case AUDCLNT_E_WRONG_ENDPOINT_TYPE: e << "AUDCLNT_E_WRONG_ENDPOINT_TYPE"; break;
  40. case AUDCLNT_E_DEVICE_INVALIDATED: e << "AUDCLNT_E_DEVICE_INVALIDATED"; break;
  41. case AUDCLNT_E_NOT_STOPPED: e << "AUDCLNT_E_NOT_STOPPED"; break;
  42. case AUDCLNT_E_BUFFER_TOO_LARGE: e << "AUDCLNT_E_BUFFER_TOO_LARGE"; break;
  43. case AUDCLNT_E_OUT_OF_ORDER: e << "AUDCLNT_E_OUT_OF_ORDER"; break;
  44. case AUDCLNT_E_UNSUPPORTED_FORMAT: e << "AUDCLNT_E_UNSUPPORTED_FORMAT"; break;
  45. case AUDCLNT_E_INVALID_SIZE: e << "AUDCLNT_E_INVALID_SIZE"; break;
  46. case AUDCLNT_E_DEVICE_IN_USE: e << "AUDCLNT_E_DEVICE_IN_USE"; break;
  47. case AUDCLNT_E_BUFFER_OPERATION_PENDING: e << "AUDCLNT_E_BUFFER_OPERATION_PENDING"; break;
  48. case AUDCLNT_E_THREAD_NOT_REGISTERED: e << "AUDCLNT_E_THREAD_NOT_REGISTERED"; break;
  49. case AUDCLNT_E_EXCLUSIVE_MODE_NOT_ALLOWED: e << "AUDCLNT_E_EXCLUSIVE_MODE_NOT_ALLOWED"; break;
  50. case AUDCLNT_E_ENDPOINT_CREATE_FAILED: e << "AUDCLNT_E_ENDPOINT_CREATE_FAILED"; break;
  51. case AUDCLNT_E_SERVICE_NOT_RUNNING: e << "AUDCLNT_E_SERVICE_NOT_RUNNING"; break;
  52. case AUDCLNT_E_EVENTHANDLE_NOT_EXPECTED: e << "AUDCLNT_E_EVENTHANDLE_NOT_EXPECTED"; break;
  53. case AUDCLNT_E_EXCLUSIVE_MODE_ONLY: e << "AUDCLNT_E_EXCLUSIVE_MODE_ONLY"; break;
  54. case AUDCLNT_E_BUFDURATION_PERIOD_NOT_EQUAL: e << "AUDCLNT_E_BUFDURATION_PERIOD_NOT_EQUAL"; break;
  55. case AUDCLNT_E_EVENTHANDLE_NOT_SET: e << "AUDCLNT_E_EVENTHANDLE_NOT_SET"; break;
  56. case AUDCLNT_E_INCORRECT_BUFFER_SIZE: e << "AUDCLNT_E_INCORRECT_BUFFER_SIZE"; break;
  57. case AUDCLNT_E_BUFFER_SIZE_ERROR: e << "AUDCLNT_E_BUFFER_SIZE_ERROR"; break;
  58. case AUDCLNT_S_BUFFER_EMPTY: e << "AUDCLNT_S_BUFFER_EMPTY"; break;
  59. case AUDCLNT_S_THREAD_ALREADY_REGISTERED: e << "AUDCLNT_S_THREAD_ALREADY_REGISTERED"; break;
  60. default: e << String::toHexString ((int) hr); break;
  61. }
  62. DBG (e);
  63. jassertfalse;
  64. }
  65. #endif
  66. }
  67. #undef check
  68. bool check (HRESULT hr)
  69. {
  70. logFailure (hr);
  71. return SUCCEEDED (hr);
  72. }
  73. //==============================================================================
  74. String getDeviceID (IMMDevice* const device)
  75. {
  76. String s;
  77. WCHAR* deviceId = nullptr;
  78. if (check (device->GetId (&deviceId)))
  79. {
  80. s = String (deviceId);
  81. CoTaskMemFree (deviceId);
  82. }
  83. return s;
  84. }
  85. EDataFlow getDataFlow (const ComSmartPtr<IMMDevice>& device)
  86. {
  87. EDataFlow flow = eRender;
  88. ComSmartPtr <IMMEndpoint> endPoint;
  89. if (check (device.QueryInterface (endPoint)))
  90. (void) check (endPoint->GetDataFlow (&flow));
  91. return flow;
  92. }
  93. int refTimeToSamples (const REFERENCE_TIME& t, const double sampleRate) noexcept
  94. {
  95. return roundDoubleToInt (sampleRate * ((double) t) * 0.0000001);
  96. }
  97. void copyWavFormat (WAVEFORMATEXTENSIBLE& dest, const WAVEFORMATEX* const src) noexcept
  98. {
  99. memcpy (&dest, src, src->wFormatTag == WAVE_FORMAT_EXTENSIBLE ? sizeof (WAVEFORMATEXTENSIBLE)
  100. : sizeof (WAVEFORMATEX));
  101. }
  102. //==============================================================================
  103. class WASAPIDeviceBase
  104. {
  105. public:
  106. WASAPIDeviceBase (const ComSmartPtr <IMMDevice>& device_, const bool useExclusiveMode_)
  107. : device (device_),
  108. sampleRate (0),
  109. defaultSampleRate (0),
  110. numChannels (0),
  111. actualNumChannels (0),
  112. minBufferSize (0),
  113. defaultBufferSize (0),
  114. latencySamples (0),
  115. useExclusiveMode (useExclusiveMode_),
  116. sampleRateHasChanged (false)
  117. {
  118. clientEvent = CreateEvent (0, false, false, _T("JuceWASAPI"));
  119. ComSmartPtr <IAudioClient> tempClient (createClient());
  120. if (tempClient == nullptr)
  121. return;
  122. REFERENCE_TIME defaultPeriod, minPeriod;
  123. if (! check (tempClient->GetDevicePeriod (&defaultPeriod, &minPeriod)))
  124. return;
  125. WAVEFORMATEX* mixFormat = nullptr;
  126. if (! check (tempClient->GetMixFormat (&mixFormat)))
  127. return;
  128. WAVEFORMATEXTENSIBLE format;
  129. copyWavFormat (format, mixFormat);
  130. CoTaskMemFree (mixFormat);
  131. actualNumChannels = numChannels = format.Format.nChannels;
  132. defaultSampleRate = format.Format.nSamplesPerSec;
  133. minBufferSize = refTimeToSamples (minPeriod, defaultSampleRate);
  134. defaultBufferSize = refTimeToSamples (defaultPeriod, defaultSampleRate);
  135. rates.addUsingDefaultSort (defaultSampleRate);
  136. static const double ratesToTest[] = { 44100.0, 48000.0, 88200.0, 96000.0 };
  137. for (int i = 0; i < numElementsInArray (ratesToTest); ++i)
  138. {
  139. if (ratesToTest[i] == defaultSampleRate)
  140. continue;
  141. format.Format.nSamplesPerSec = (DWORD) roundDoubleToInt (ratesToTest[i]);
  142. if (SUCCEEDED (tempClient->IsFormatSupported (useExclusiveMode ? AUDCLNT_SHAREMODE_EXCLUSIVE : AUDCLNT_SHAREMODE_SHARED,
  143. (WAVEFORMATEX*) &format, 0)))
  144. if (! rates.contains (ratesToTest[i]))
  145. rates.addUsingDefaultSort (ratesToTest[i]);
  146. }
  147. }
  148. ~WASAPIDeviceBase()
  149. {
  150. device = nullptr;
  151. CloseHandle (clientEvent);
  152. }
  153. bool isOk() const noexcept { return defaultBufferSize > 0 && defaultSampleRate > 0; }
  154. bool openClient (const double newSampleRate, const BigInteger& newChannels)
  155. {
  156. sampleRate = newSampleRate;
  157. channels = newChannels;
  158. channels.setRange (actualNumChannels, channels.getHighestBit() + 1 - actualNumChannels, false);
  159. numChannels = channels.getHighestBit() + 1;
  160. if (numChannels == 0)
  161. return true;
  162. client = createClient();
  163. if (client != nullptr
  164. && (tryInitialisingWithFormat (true, 4) || tryInitialisingWithFormat (false, 4)
  165. || tryInitialisingWithFormat (false, 3) || tryInitialisingWithFormat (false, 2)))
  166. {
  167. sampleRateHasChanged = false;
  168. channelMaps.clear();
  169. for (int i = 0; i <= channels.getHighestBit(); ++i)
  170. if (channels[i])
  171. channelMaps.add (i);
  172. REFERENCE_TIME latency;
  173. if (check (client->GetStreamLatency (&latency)))
  174. latencySamples = refTimeToSamples (latency, sampleRate);
  175. (void) check (client->GetBufferSize (&actualBufferSize));
  176. createSessionEventCallback();
  177. return check (client->SetEventHandle (clientEvent));
  178. }
  179. return false;
  180. }
  181. void closeClient()
  182. {
  183. if (client != nullptr)
  184. client->Stop();
  185. deleteSessionEventCallback();
  186. client = nullptr;
  187. ResetEvent (clientEvent);
  188. }
  189. void deviceSampleRateChanged()
  190. {
  191. sampleRateHasChanged = true;
  192. }
  193. //==============================================================================
  194. ComSmartPtr <IMMDevice> device;
  195. ComSmartPtr <IAudioClient> client;
  196. double sampleRate, defaultSampleRate;
  197. int numChannels, actualNumChannels;
  198. int minBufferSize, defaultBufferSize, latencySamples;
  199. const bool useExclusiveMode;
  200. Array <double> rates;
  201. HANDLE clientEvent;
  202. BigInteger channels;
  203. Array <int> channelMaps;
  204. UINT32 actualBufferSize;
  205. int bytesPerSample;
  206. bool sampleRateHasChanged;
  207. virtual void updateFormat (bool isFloat) = 0;
  208. private:
  209. //==============================================================================
  210. class SessionEventCallback : public ComBaseClassHelper <IAudioSessionEvents>
  211. {
  212. public:
  213. SessionEventCallback (WASAPIDeviceBase& owner_) : owner (owner_) {}
  214. JUCE_COMRESULT OnDisplayNameChanged (LPCWSTR, LPCGUID) { return S_OK; }
  215. JUCE_COMRESULT OnIconPathChanged (LPCWSTR, LPCGUID) { return S_OK; }
  216. JUCE_COMRESULT OnSimpleVolumeChanged (float, BOOL, LPCGUID) { return S_OK; }
  217. JUCE_COMRESULT OnChannelVolumeChanged (DWORD, float*, DWORD, LPCGUID) { return S_OK; }
  218. JUCE_COMRESULT OnGroupingParamChanged (LPCGUID, LPCGUID) { return S_OK; }
  219. JUCE_COMRESULT OnStateChanged (AudioSessionState) { return S_OK; }
  220. JUCE_COMRESULT OnSessionDisconnected (AudioSessionDisconnectReason reason)
  221. {
  222. if (reason == DisconnectReasonFormatChanged)
  223. owner.deviceSampleRateChanged();
  224. return S_OK;
  225. }
  226. private:
  227. WASAPIDeviceBase& owner;
  228. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (SessionEventCallback);
  229. };
  230. ComSmartPtr <IAudioSessionControl> audioSessionControl;
  231. ComSmartPtr <SessionEventCallback> sessionEventCallback;
  232. void createSessionEventCallback()
  233. {
  234. deleteSessionEventCallback();
  235. client->GetService (__uuidof (IAudioSessionControl),
  236. (void**) audioSessionControl.resetAndGetPointerAddress());
  237. if (audioSessionControl != nullptr)
  238. {
  239. sessionEventCallback = new SessionEventCallback (*this);
  240. audioSessionControl->RegisterAudioSessionNotification (sessionEventCallback);
  241. sessionEventCallback->Release(); // (required because ComBaseClassHelper objects are constructed with a ref count of 1)
  242. }
  243. }
  244. void deleteSessionEventCallback()
  245. {
  246. if (audioSessionControl != nullptr && sessionEventCallback != nullptr)
  247. audioSessionControl->UnregisterAudioSessionNotification (sessionEventCallback);
  248. audioSessionControl = nullptr;
  249. sessionEventCallback = nullptr;
  250. }
  251. //==============================================================================
  252. const ComSmartPtr <IAudioClient> createClient()
  253. {
  254. ComSmartPtr <IAudioClient> client;
  255. if (device != nullptr)
  256. {
  257. HRESULT hr = device->Activate (__uuidof (IAudioClient), CLSCTX_INPROC_SERVER, 0, (void**) client.resetAndGetPointerAddress());
  258. logFailure (hr);
  259. }
  260. return client;
  261. }
  262. bool tryInitialisingWithFormat (const bool useFloat, const int bytesPerSampleToTry)
  263. {
  264. WAVEFORMATEXTENSIBLE format = { 0 };
  265. if (numChannels <= 2 && bytesPerSampleToTry <= 2)
  266. {
  267. format.Format.wFormatTag = WAVE_FORMAT_PCM;
  268. }
  269. else
  270. {
  271. format.Format.wFormatTag = WAVE_FORMAT_EXTENSIBLE;
  272. format.Format.cbSize = sizeof (WAVEFORMATEXTENSIBLE) - sizeof (WAVEFORMATEX);
  273. }
  274. format.Format.nSamplesPerSec = (DWORD) roundDoubleToInt (sampleRate);
  275. format.Format.nChannels = (WORD) numChannels;
  276. format.Format.wBitsPerSample = (WORD) (8 * bytesPerSampleToTry);
  277. format.Format.nAvgBytesPerSec = (DWORD) (format.Format.nSamplesPerSec * numChannels * bytesPerSampleToTry);
  278. format.Format.nBlockAlign = (WORD) (numChannels * bytesPerSampleToTry);
  279. format.SubFormat = useFloat ? KSDATAFORMAT_SUBTYPE_IEEE_FLOAT : KSDATAFORMAT_SUBTYPE_PCM;
  280. format.Samples.wValidBitsPerSample = format.Format.wBitsPerSample;
  281. switch (numChannels)
  282. {
  283. case 1: format.dwChannelMask = SPEAKER_FRONT_CENTER; break;
  284. case 2: format.dwChannelMask = SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT; break;
  285. case 4: format.dwChannelMask = SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | SPEAKER_BACK_LEFT | SPEAKER_BACK_RIGHT; break;
  286. case 6: format.dwChannelMask = SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | SPEAKER_FRONT_CENTER | SPEAKER_LOW_FREQUENCY | SPEAKER_BACK_LEFT | SPEAKER_BACK_RIGHT; break;
  287. case 8: format.dwChannelMask = SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | SPEAKER_FRONT_CENTER | SPEAKER_LOW_FREQUENCY | SPEAKER_BACK_LEFT | SPEAKER_BACK_RIGHT | SPEAKER_FRONT_LEFT_OF_CENTER | SPEAKER_FRONT_RIGHT_OF_CENTER; break;
  288. default: break;
  289. }
  290. WAVEFORMATEXTENSIBLE* nearestFormat = nullptr;
  291. HRESULT hr = client->IsFormatSupported (useExclusiveMode ? AUDCLNT_SHAREMODE_EXCLUSIVE : AUDCLNT_SHAREMODE_SHARED,
  292. (WAVEFORMATEX*) &format, useExclusiveMode ? nullptr : (WAVEFORMATEX**) &nearestFormat);
  293. logFailure (hr);
  294. if (hr == S_FALSE && format.Format.nSamplesPerSec == nearestFormat->Format.nSamplesPerSec)
  295. {
  296. copyWavFormat (format, (WAVEFORMATEX*) nearestFormat);
  297. hr = S_OK;
  298. }
  299. CoTaskMemFree (nearestFormat);
  300. REFERENCE_TIME defaultPeriod = 0, minPeriod = 0;
  301. if (useExclusiveMode)
  302. check (client->GetDevicePeriod (&defaultPeriod, &minPeriod));
  303. GUID session;
  304. if (hr == S_OK
  305. && check (client->Initialize (useExclusiveMode ? AUDCLNT_SHAREMODE_EXCLUSIVE : AUDCLNT_SHAREMODE_SHARED,
  306. AUDCLNT_STREAMFLAGS_EVENTCALLBACK,
  307. defaultPeriod, defaultPeriod, (WAVEFORMATEX*) &format, &session)))
  308. {
  309. actualNumChannels = format.Format.nChannels;
  310. const bool isFloat = format.Format.wFormatTag == WAVE_FORMAT_EXTENSIBLE && format.SubFormat == KSDATAFORMAT_SUBTYPE_IEEE_FLOAT;
  311. bytesPerSample = format.Format.wBitsPerSample / 8;
  312. updateFormat (isFloat);
  313. return true;
  314. }
  315. return false;
  316. }
  317. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (WASAPIDeviceBase);
  318. };
  319. //==============================================================================
  320. class WASAPIInputDevice : public WASAPIDeviceBase
  321. {
  322. public:
  323. WASAPIInputDevice (const ComSmartPtr <IMMDevice>& device_, const bool useExclusiveMode_)
  324. : WASAPIDeviceBase (device_, useExclusiveMode_),
  325. reservoir (1, 1)
  326. {
  327. }
  328. ~WASAPIInputDevice()
  329. {
  330. close();
  331. }
  332. bool open (const double newSampleRate, const BigInteger& newChannels)
  333. {
  334. reservoirSize = 0;
  335. reservoirCapacity = 16384;
  336. reservoir.setSize (actualNumChannels * reservoirCapacity * sizeof (float));
  337. return openClient (newSampleRate, newChannels)
  338. && (numChannels == 0 || check (client->GetService (__uuidof (IAudioCaptureClient),
  339. (void**) captureClient.resetAndGetPointerAddress())));
  340. }
  341. void close()
  342. {
  343. closeClient();
  344. captureClient = nullptr;
  345. reservoir.setSize (0);
  346. }
  347. template <class SourceType>
  348. void updateFormatWithType (SourceType*)
  349. {
  350. typedef AudioData::Pointer <AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::NonConst> NativeType;
  351. converter = new AudioData::ConverterInstance <AudioData::Pointer <SourceType, AudioData::LittleEndian, AudioData::Interleaved, AudioData::Const>, NativeType> (actualNumChannels, 1);
  352. }
  353. void updateFormat (bool isFloat)
  354. {
  355. if (isFloat) updateFormatWithType ((AudioData::Float32*) 0);
  356. else if (bytesPerSample == 4) updateFormatWithType ((AudioData::Int32*) 0);
  357. else if (bytesPerSample == 3) updateFormatWithType ((AudioData::Int24*) 0);
  358. else updateFormatWithType ((AudioData::Int16*) 0);
  359. }
  360. void copyBuffers (float** destBuffers, int numDestBuffers, int bufferSize, Thread& thread)
  361. {
  362. if (numChannels <= 0)
  363. return;
  364. int offset = 0;
  365. while (bufferSize > 0)
  366. {
  367. if (reservoirSize > 0) // There's stuff in the reservoir, so use that...
  368. {
  369. const int samplesToDo = jmin (bufferSize, (int) reservoirSize);
  370. for (int i = 0; i < numDestBuffers; ++i)
  371. converter->convertSamples (destBuffers[i] + offset, 0, reservoir.getData(), channelMaps.getUnchecked(i), samplesToDo);
  372. bufferSize -= samplesToDo;
  373. offset += samplesToDo;
  374. reservoirSize = 0;
  375. }
  376. else
  377. {
  378. UINT32 packetLength = 0;
  379. if (! check (captureClient->GetNextPacketSize (&packetLength)))
  380. break;
  381. if (packetLength == 0)
  382. {
  383. if (thread.threadShouldExit()
  384. || WaitForSingleObject (clientEvent, 1000) == WAIT_TIMEOUT)
  385. break;
  386. continue;
  387. }
  388. uint8* inputData;
  389. UINT32 numSamplesAvailable;
  390. DWORD flags;
  391. if (check (captureClient->GetBuffer (&inputData, &numSamplesAvailable, &flags, 0, 0)))
  392. {
  393. const int samplesToDo = jmin (bufferSize, (int) numSamplesAvailable);
  394. for (int i = 0; i < numDestBuffers; ++i)
  395. converter->convertSamples (destBuffers[i] + offset, 0, inputData, channelMaps.getUnchecked(i), samplesToDo);
  396. bufferSize -= samplesToDo;
  397. offset += samplesToDo;
  398. if (samplesToDo < (int) numSamplesAvailable)
  399. {
  400. reservoirSize = jmin ((int) (numSamplesAvailable - samplesToDo), reservoirCapacity);
  401. memcpy ((uint8*) reservoir.getData(), inputData + bytesPerSample * actualNumChannels * samplesToDo,
  402. (size_t) (bytesPerSample * actualNumChannels * reservoirSize));
  403. }
  404. captureClient->ReleaseBuffer (numSamplesAvailable);
  405. }
  406. }
  407. }
  408. }
  409. ComSmartPtr <IAudioCaptureClient> captureClient;
  410. MemoryBlock reservoir;
  411. int reservoirSize, reservoirCapacity;
  412. ScopedPointer <AudioData::Converter> converter;
  413. private:
  414. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (WASAPIInputDevice);
  415. };
  416. //==============================================================================
  417. class WASAPIOutputDevice : public WASAPIDeviceBase
  418. {
  419. public:
  420. WASAPIOutputDevice (const ComSmartPtr <IMMDevice>& device_, const bool useExclusiveMode_)
  421. : WASAPIDeviceBase (device_, useExclusiveMode_)
  422. {
  423. }
  424. ~WASAPIOutputDevice()
  425. {
  426. close();
  427. }
  428. bool open (const double newSampleRate, const BigInteger& newChannels)
  429. {
  430. return openClient (newSampleRate, newChannels)
  431. && (numChannels == 0 || check (client->GetService (__uuidof (IAudioRenderClient), (void**) renderClient.resetAndGetPointerAddress())));
  432. }
  433. void close()
  434. {
  435. closeClient();
  436. renderClient = nullptr;
  437. }
  438. template <class DestType>
  439. void updateFormatWithType (DestType*)
  440. {
  441. typedef AudioData::Pointer <AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::Const> NativeType;
  442. converter = new AudioData::ConverterInstance <NativeType, AudioData::Pointer <DestType, AudioData::LittleEndian, AudioData::Interleaved, AudioData::NonConst> > (1, actualNumChannels);
  443. }
  444. void updateFormat (bool isFloat)
  445. {
  446. if (isFloat) updateFormatWithType ((AudioData::Float32*) 0);
  447. else if (bytesPerSample == 4) updateFormatWithType ((AudioData::Int32*) 0);
  448. else if (bytesPerSample == 3) updateFormatWithType ((AudioData::Int24*) 0);
  449. else updateFormatWithType ((AudioData::Int16*) 0);
  450. }
  451. void copyBuffers (const float** const srcBuffers, const int numSrcBuffers, int bufferSize, Thread& thread)
  452. {
  453. if (numChannels <= 0)
  454. return;
  455. int offset = 0;
  456. while (bufferSize > 0)
  457. {
  458. UINT32 padding = 0;
  459. if (! check (client->GetCurrentPadding (&padding)))
  460. return;
  461. int samplesToDo = useExclusiveMode ? bufferSize
  462. : jmin ((int) (actualBufferSize - padding), bufferSize);
  463. if (samplesToDo <= 0)
  464. {
  465. if (thread.threadShouldExit()
  466. || WaitForSingleObject (clientEvent, 1000) == WAIT_TIMEOUT)
  467. break;
  468. continue;
  469. }
  470. uint8* outputData = nullptr;
  471. if (check (renderClient->GetBuffer ((UINT32) samplesToDo, &outputData)))
  472. {
  473. for (int i = 0; i < numSrcBuffers; ++i)
  474. converter->convertSamples (outputData, channelMaps.getUnchecked(i), srcBuffers[i] + offset, 0, samplesToDo);
  475. renderClient->ReleaseBuffer ((UINT32) samplesToDo, 0);
  476. offset += samplesToDo;
  477. bufferSize -= samplesToDo;
  478. }
  479. }
  480. }
  481. ComSmartPtr <IAudioRenderClient> renderClient;
  482. ScopedPointer <AudioData::Converter> converter;
  483. private:
  484. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (WASAPIOutputDevice);
  485. };
  486. //==============================================================================
  487. class WASAPIAudioIODevice : public AudioIODevice,
  488. public Thread
  489. {
  490. public:
  491. WASAPIAudioIODevice (const String& deviceName,
  492. const String& outputDeviceId_,
  493. const String& inputDeviceId_,
  494. const bool useExclusiveMode_)
  495. : AudioIODevice (deviceName, "Windows Audio"),
  496. Thread ("Juce WASAPI"),
  497. outputDeviceId (outputDeviceId_),
  498. inputDeviceId (inputDeviceId_),
  499. useExclusiveMode (useExclusiveMode_),
  500. isOpen_ (false),
  501. isStarted (false),
  502. currentBufferSizeSamples (0),
  503. currentSampleRate (0),
  504. callback (nullptr)
  505. {
  506. }
  507. ~WASAPIAudioIODevice()
  508. {
  509. close();
  510. }
  511. bool initialise()
  512. {
  513. latencyIn = latencyOut = 0;
  514. Array <double> ratesIn, ratesOut;
  515. if (createDevices())
  516. {
  517. jassert (inputDevice != nullptr || outputDevice != nullptr);
  518. if (inputDevice != nullptr && outputDevice != nullptr)
  519. {
  520. defaultSampleRate = jmin (inputDevice->defaultSampleRate, outputDevice->defaultSampleRate);
  521. minBufferSize = jmin (inputDevice->minBufferSize, outputDevice->minBufferSize);
  522. defaultBufferSize = jmax (inputDevice->defaultBufferSize, outputDevice->defaultBufferSize);
  523. sampleRates = inputDevice->rates;
  524. sampleRates.removeValuesNotIn (outputDevice->rates);
  525. }
  526. else
  527. {
  528. WASAPIDeviceBase* d = inputDevice != nullptr ? static_cast<WASAPIDeviceBase*> (inputDevice)
  529. : static_cast<WASAPIDeviceBase*> (outputDevice);
  530. defaultSampleRate = d->defaultSampleRate;
  531. minBufferSize = d->minBufferSize;
  532. defaultBufferSize = d->defaultBufferSize;
  533. sampleRates = d->rates;
  534. }
  535. bufferSizes.addUsingDefaultSort (defaultBufferSize);
  536. if (minBufferSize != defaultBufferSize)
  537. bufferSizes.addUsingDefaultSort (minBufferSize);
  538. int n = 64;
  539. for (int i = 0; i < 40; ++i)
  540. {
  541. if (n >= minBufferSize && n <= 2048 && ! bufferSizes.contains (n))
  542. bufferSizes.addUsingDefaultSort (n);
  543. n += (n < 512) ? 32 : (n < 1024 ? 64 : 128);
  544. }
  545. return true;
  546. }
  547. return false;
  548. }
  549. StringArray getOutputChannelNames()
  550. {
  551. StringArray outChannels;
  552. if (outputDevice != nullptr)
  553. for (int i = 1; i <= outputDevice->actualNumChannels; ++i)
  554. outChannels.add ("Output channel " + String (i));
  555. return outChannels;
  556. }
  557. StringArray getInputChannelNames()
  558. {
  559. StringArray inChannels;
  560. if (inputDevice != nullptr)
  561. for (int i = 1; i <= inputDevice->actualNumChannels; ++i)
  562. inChannels.add ("Input channel " + String (i));
  563. return inChannels;
  564. }
  565. int getNumSampleRates() { return sampleRates.size(); }
  566. double getSampleRate (int index) { return sampleRates [index]; }
  567. int getNumBufferSizesAvailable() { return bufferSizes.size(); }
  568. int getBufferSizeSamples (int index) { return bufferSizes [index]; }
  569. int getDefaultBufferSize() { return defaultBufferSize; }
  570. int getCurrentBufferSizeSamples() { return currentBufferSizeSamples; }
  571. double getCurrentSampleRate() { return currentSampleRate; }
  572. int getCurrentBitDepth() { return 32; }
  573. int getOutputLatencyInSamples() { return latencyOut; }
  574. int getInputLatencyInSamples() { return latencyIn; }
  575. BigInteger getActiveOutputChannels() const { return outputDevice != nullptr ? outputDevice->channels : BigInteger(); }
  576. BigInteger getActiveInputChannels() const { return inputDevice != nullptr ? inputDevice->channels : BigInteger(); }
  577. String getLastError() { return lastError; }
  578. String open (const BigInteger& inputChannels, const BigInteger& outputChannels,
  579. double sampleRate, int bufferSizeSamples)
  580. {
  581. close();
  582. lastError = String::empty;
  583. if (sampleRates.size() == 0 && inputDevice != nullptr && outputDevice != nullptr)
  584. {
  585. lastError = "The input and output devices don't share a common sample rate!";
  586. return lastError;
  587. }
  588. currentBufferSizeSamples = bufferSizeSamples <= 0 ? defaultBufferSize : jmax (bufferSizeSamples, minBufferSize);
  589. currentSampleRate = sampleRate > 0 ? sampleRate : defaultSampleRate;
  590. if (inputDevice != nullptr && ! inputDevice->open (currentSampleRate, inputChannels))
  591. {
  592. lastError = "Couldn't open the input device!";
  593. return lastError;
  594. }
  595. if (outputDevice != nullptr && ! outputDevice->open (currentSampleRate, outputChannels))
  596. {
  597. close();
  598. lastError = "Couldn't open the output device!";
  599. return lastError;
  600. }
  601. if (inputDevice != nullptr) ResetEvent (inputDevice->clientEvent);
  602. if (outputDevice != nullptr) ResetEvent (outputDevice->clientEvent);
  603. startThread (8);
  604. Thread::sleep (5);
  605. if (inputDevice != nullptr && inputDevice->client != nullptr)
  606. {
  607. latencyIn = (int) (inputDevice->latencySamples + currentBufferSizeSamples);
  608. if (! check (inputDevice->client->Start()))
  609. {
  610. close();
  611. lastError = "Couldn't start the input device!";
  612. return lastError;
  613. }
  614. }
  615. if (outputDevice != nullptr && outputDevice->client != nullptr)
  616. {
  617. latencyOut = (int) (outputDevice->latencySamples + currentBufferSizeSamples);
  618. if (! check (outputDevice->client->Start()))
  619. {
  620. close();
  621. lastError = "Couldn't start the output device!";
  622. return lastError;
  623. }
  624. }
  625. isOpen_ = true;
  626. return lastError;
  627. }
  628. void close()
  629. {
  630. stop();
  631. signalThreadShouldExit();
  632. if (inputDevice != nullptr) SetEvent (inputDevice->clientEvent);
  633. if (outputDevice != nullptr) SetEvent (outputDevice->clientEvent);
  634. stopThread (5000);
  635. if (inputDevice != nullptr) inputDevice->close();
  636. if (outputDevice != nullptr) outputDevice->close();
  637. isOpen_ = false;
  638. }
  639. bool isOpen() { return isOpen_ && isThreadRunning(); }
  640. bool isPlaying() { return isStarted && isOpen_ && isThreadRunning(); }
  641. void start (AudioIODeviceCallback* call)
  642. {
  643. if (isOpen_ && call != nullptr && ! isStarted)
  644. {
  645. if (! isThreadRunning())
  646. {
  647. // something's gone wrong and the thread's stopped..
  648. isOpen_ = false;
  649. return;
  650. }
  651. call->audioDeviceAboutToStart (this);
  652. const ScopedLock sl (startStopLock);
  653. callback = call;
  654. isStarted = true;
  655. }
  656. }
  657. void stop()
  658. {
  659. if (isStarted)
  660. {
  661. AudioIODeviceCallback* const callbackLocal = callback;
  662. {
  663. const ScopedLock sl (startStopLock);
  664. isStarted = false;
  665. }
  666. if (callbackLocal != nullptr)
  667. callbackLocal->audioDeviceStopped();
  668. }
  669. }
  670. void setMMThreadPriority()
  671. {
  672. DynamicLibrary dll ("avrt.dll");
  673. JUCE_LOAD_WINAPI_FUNCTION (dll, AvSetMmThreadCharacteristicsW, avSetMmThreadCharacteristics, HANDLE, (LPCWSTR, LPDWORD))
  674. JUCE_LOAD_WINAPI_FUNCTION (dll, AvSetMmThreadPriority, avSetMmThreadPriority, HANDLE, (HANDLE, AVRT_PRIORITY))
  675. if (avSetMmThreadCharacteristics != 0 && avSetMmThreadPriority != 0)
  676. {
  677. DWORD dummy = 0;
  678. HANDLE h = avSetMmThreadCharacteristics (L"Pro Audio", &dummy);
  679. if (h != 0)
  680. avSetMmThreadPriority (h, AVRT_PRIORITY_NORMAL);
  681. }
  682. }
  683. void run()
  684. {
  685. setMMThreadPriority();
  686. const int bufferSize = currentBufferSizeSamples;
  687. const int numInputBuffers = getActiveInputChannels().countNumberOfSetBits();
  688. const int numOutputBuffers = getActiveOutputChannels().countNumberOfSetBits();
  689. bool sampleRateChanged = false;
  690. AudioSampleBuffer ins (jmax (1, numInputBuffers), bufferSize + 32);
  691. AudioSampleBuffer outs (jmax (1, numOutputBuffers), bufferSize + 32);
  692. float** const inputBuffers = ins.getArrayOfChannels();
  693. float** const outputBuffers = outs.getArrayOfChannels();
  694. ins.clear();
  695. while (! threadShouldExit())
  696. {
  697. if (inputDevice != nullptr)
  698. {
  699. inputDevice->copyBuffers (inputBuffers, numInputBuffers, bufferSize, *this);
  700. if (threadShouldExit())
  701. break;
  702. if (inputDevice->sampleRateHasChanged)
  703. sampleRateChanged = true;
  704. }
  705. JUCE_TRY
  706. {
  707. const ScopedLock sl (startStopLock);
  708. if (isStarted)
  709. callback->audioDeviceIOCallback (const_cast <const float**> (inputBuffers), numInputBuffers,
  710. outputBuffers, numOutputBuffers, bufferSize);
  711. else
  712. outs.clear();
  713. }
  714. JUCE_CATCH_EXCEPTION
  715. if (outputDevice != nullptr)
  716. {
  717. outputDevice->copyBuffers (const_cast <const float**> (outputBuffers), numOutputBuffers, bufferSize, *this);
  718. if (outputDevice->sampleRateHasChanged)
  719. sampleRateChanged = true;
  720. }
  721. if (sampleRateChanged)
  722. {
  723. // xxx one of the devices has had its sample rate changed externally.. not 100% sure how
  724. // to handle this..
  725. }
  726. }
  727. }
  728. //==============================================================================
  729. String outputDeviceId, inputDeviceId;
  730. String lastError;
  731. private:
  732. // Device stats...
  733. ScopedPointer<WASAPIInputDevice> inputDevice;
  734. ScopedPointer<WASAPIOutputDevice> outputDevice;
  735. const bool useExclusiveMode;
  736. double defaultSampleRate;
  737. int minBufferSize, defaultBufferSize;
  738. int latencyIn, latencyOut;
  739. Array <double> sampleRates;
  740. Array <int> bufferSizes;
  741. // Active state...
  742. bool isOpen_, isStarted;
  743. int currentBufferSizeSamples;
  744. double currentSampleRate;
  745. AudioIODeviceCallback* callback;
  746. CriticalSection startStopLock;
  747. //==============================================================================
  748. bool createDevices()
  749. {
  750. ComSmartPtr <IMMDeviceEnumerator> enumerator;
  751. if (! check (enumerator.CoCreateInstance (__uuidof (MMDeviceEnumerator))))
  752. return false;
  753. ComSmartPtr <IMMDeviceCollection> deviceCollection;
  754. if (! check (enumerator->EnumAudioEndpoints (eAll, DEVICE_STATE_ACTIVE, deviceCollection.resetAndGetPointerAddress())))
  755. return false;
  756. UINT32 numDevices = 0;
  757. if (! check (deviceCollection->GetCount (&numDevices)))
  758. return false;
  759. for (UINT32 i = 0; i < numDevices; ++i)
  760. {
  761. ComSmartPtr <IMMDevice> device;
  762. if (! check (deviceCollection->Item (i, device.resetAndGetPointerAddress())))
  763. continue;
  764. const String deviceId (getDeviceID (device));
  765. if (deviceId.isEmpty())
  766. continue;
  767. const EDataFlow flow = getDataFlow (device);
  768. if (deviceId == inputDeviceId && flow == eCapture)
  769. inputDevice = new WASAPIInputDevice (device, useExclusiveMode);
  770. else if (deviceId == outputDeviceId && flow == eRender)
  771. outputDevice = new WASAPIOutputDevice (device, useExclusiveMode);
  772. }
  773. return (outputDeviceId.isEmpty() || (outputDevice != nullptr && outputDevice->isOk()))
  774. && (inputDeviceId.isEmpty() || (inputDevice != nullptr && inputDevice->isOk()));
  775. }
  776. //==============================================================================
  777. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (WASAPIAudioIODevice);
  778. };
  779. //==============================================================================
  780. class WASAPIAudioIODeviceType : public AudioIODeviceType,
  781. private DeviceChangeDetector
  782. {
  783. public:
  784. WASAPIAudioIODeviceType()
  785. : AudioIODeviceType ("Windows Audio"),
  786. DeviceChangeDetector (L"Windows Audio"),
  787. hasScanned (false)
  788. {
  789. }
  790. //==============================================================================
  791. void scanForDevices()
  792. {
  793. hasScanned = true;
  794. outputDeviceNames.clear();
  795. inputDeviceNames.clear();
  796. outputDeviceIds.clear();
  797. inputDeviceIds.clear();
  798. scan (outputDeviceNames, inputDeviceNames,
  799. outputDeviceIds, inputDeviceIds);
  800. }
  801. StringArray getDeviceNames (bool wantInputNames) const
  802. {
  803. jassert (hasScanned); // need to call scanForDevices() before doing this
  804. return wantInputNames ? inputDeviceNames
  805. : outputDeviceNames;
  806. }
  807. int getDefaultDeviceIndex (bool /*forInput*/) const
  808. {
  809. jassert (hasScanned); // need to call scanForDevices() before doing this
  810. return 0;
  811. }
  812. int getIndexOfDevice (AudioIODevice* device, bool asInput) const
  813. {
  814. jassert (hasScanned); // need to call scanForDevices() before doing this
  815. WASAPIAudioIODevice* const d = dynamic_cast <WASAPIAudioIODevice*> (device);
  816. return d == nullptr ? -1 : (asInput ? inputDeviceIds.indexOf (d->inputDeviceId)
  817. : outputDeviceIds.indexOf (d->outputDeviceId));
  818. }
  819. bool hasSeparateInputsAndOutputs() const { return true; }
  820. AudioIODevice* createDevice (const String& outputDeviceName,
  821. const String& inputDeviceName)
  822. {
  823. jassert (hasScanned); // need to call scanForDevices() before doing this
  824. const bool useExclusiveMode = false;
  825. ScopedPointer<WASAPIAudioIODevice> device;
  826. const int outputIndex = outputDeviceNames.indexOf (outputDeviceName);
  827. const int inputIndex = inputDeviceNames.indexOf (inputDeviceName);
  828. if (outputIndex >= 0 || inputIndex >= 0)
  829. {
  830. device = new WASAPIAudioIODevice (outputDeviceName.isNotEmpty() ? outputDeviceName
  831. : inputDeviceName,
  832. outputDeviceIds [outputIndex],
  833. inputDeviceIds [inputIndex],
  834. useExclusiveMode);
  835. if (! device->initialise())
  836. device = nullptr;
  837. }
  838. return device.release();
  839. }
  840. //==============================================================================
  841. StringArray outputDeviceNames, outputDeviceIds;
  842. StringArray inputDeviceNames, inputDeviceIds;
  843. private:
  844. bool hasScanned;
  845. //==============================================================================
  846. static String getDefaultEndpoint (IMMDeviceEnumerator* const enumerator, const bool forCapture)
  847. {
  848. String s;
  849. IMMDevice* dev = nullptr;
  850. if (check (enumerator->GetDefaultAudioEndpoint (forCapture ? eCapture : eRender,
  851. eMultimedia, &dev)))
  852. {
  853. WCHAR* deviceId = nullptr;
  854. if (check (dev->GetId (&deviceId)))
  855. {
  856. s = deviceId;
  857. CoTaskMemFree (deviceId);
  858. }
  859. dev->Release();
  860. }
  861. return s;
  862. }
  863. //==============================================================================
  864. void scan (StringArray& outputDeviceNames,
  865. StringArray& inputDeviceNames,
  866. StringArray& outputDeviceIds,
  867. StringArray& inputDeviceIds)
  868. {
  869. ComSmartPtr <IMMDeviceEnumerator> enumerator;
  870. if (! check (enumerator.CoCreateInstance (__uuidof (MMDeviceEnumerator))))
  871. return;
  872. const String defaultRenderer (getDefaultEndpoint (enumerator, false));
  873. const String defaultCapture (getDefaultEndpoint (enumerator, true));
  874. ComSmartPtr <IMMDeviceCollection> deviceCollection;
  875. UINT32 numDevices = 0;
  876. if (! (check (enumerator->EnumAudioEndpoints (eAll, DEVICE_STATE_ACTIVE, deviceCollection.resetAndGetPointerAddress()))
  877. && check (deviceCollection->GetCount (&numDevices))))
  878. return;
  879. for (UINT32 i = 0; i < numDevices; ++i)
  880. {
  881. ComSmartPtr <IMMDevice> device;
  882. if (! check (deviceCollection->Item (i, device.resetAndGetPointerAddress())))
  883. continue;
  884. DWORD state = 0;
  885. if (! (check (device->GetState (&state)) && state == DEVICE_STATE_ACTIVE))
  886. continue;
  887. const String deviceId (getDeviceID (device));
  888. String name;
  889. {
  890. ComSmartPtr <IPropertyStore> properties;
  891. if (! check (device->OpenPropertyStore (STGM_READ, properties.resetAndGetPointerAddress())))
  892. continue;
  893. PROPVARIANT value;
  894. PropVariantInit (&value);
  895. if (check (properties->GetValue (PKEY_Device_FriendlyName, &value)))
  896. name = value.pwszVal;
  897. PropVariantClear (&value);
  898. }
  899. const EDataFlow flow = getDataFlow (device);
  900. if (flow == eRender)
  901. {
  902. const int index = (deviceId == defaultRenderer) ? 0 : -1;
  903. outputDeviceIds.insert (index, deviceId);
  904. outputDeviceNames.insert (index, name);
  905. }
  906. else if (flow == eCapture)
  907. {
  908. const int index = (deviceId == defaultCapture) ? 0 : -1;
  909. inputDeviceIds.insert (index, deviceId);
  910. inputDeviceNames.insert (index, name);
  911. }
  912. }
  913. inputDeviceNames.appendNumbersToDuplicates (false, false);
  914. outputDeviceNames.appendNumbersToDuplicates (false, false);
  915. }
  916. //==============================================================================
  917. void systemDeviceChanged()
  918. {
  919. StringArray newOutNames, newInNames, newOutIds, newInIds;
  920. scan (newOutNames, newInNames, newOutIds, newInIds);
  921. if (newOutNames != outputDeviceNames
  922. || newInNames != inputDeviceNames
  923. || newOutIds != outputDeviceIds
  924. || newInIds != inputDeviceIds)
  925. {
  926. hasScanned = true;
  927. outputDeviceNames = newOutNames;
  928. inputDeviceNames = newInNames;
  929. outputDeviceIds = newOutIds;
  930. inputDeviceIds = newInIds;
  931. callDeviceChangeListeners();
  932. }
  933. }
  934. //==============================================================================
  935. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (WASAPIAudioIODeviceType);
  936. };
  937. }
  938. //==============================================================================
  939. AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_WASAPI()
  940. {
  941. if (SystemStats::getOperatingSystemType() >= SystemStats::WinVista)
  942. return new WasapiClasses::WASAPIAudioIODeviceType();
  943. return nullptr;
  944. }