The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1167 lines
44KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library - "Jules' Utility Class Extensions"
  4. Copyright 2004-11 by Raw Material Software Ltd.
  5. ------------------------------------------------------------------------------
  6. JUCE can be redistributed and/or modified under the terms of the GNU General
  7. Public License (Version 2), as published by the Free Software Foundation.
  8. A copy of the license is included in the JUCE distribution, or can be found
  9. online at www.gnu.org/licenses.
  10. JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
  11. WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
  12. A PARTICULAR PURPOSE. See the GNU General Public License for more details.
  13. ------------------------------------------------------------------------------
  14. To release a closed-source product which uses JUCE, commercial licenses are
  15. available: visit www.rawmaterialsoftware.com/juce for more information.
  16. ==============================================================================
  17. */
  18. // (This file gets included by juce_win32_NativeCode.cpp, rather than being
  19. // compiled on its own).
  20. #if JUCE_INCLUDED_FILE && JUCE_WASAPI
  21. #ifndef WASAPI_ENABLE_LOGGING
  22. #define WASAPI_ENABLE_LOGGING 0
  23. #endif
  24. //==============================================================================
  25. namespace WasapiClasses
  26. {
  27. void logFailure (HRESULT hr)
  28. {
  29. (void) hr;
  30. #if WASAPI_ENABLE_LOGGING
  31. if (FAILED (hr))
  32. {
  33. String e;
  34. e << Time::getCurrentTime().toString (true, true, true, true)
  35. << " -- WASAPI error: ";
  36. switch (hr)
  37. {
  38. case E_POINTER: e << "E_POINTER"; break;
  39. case E_INVALIDARG: e << "E_INVALIDARG"; break;
  40. case AUDCLNT_E_NOT_INITIALIZED: e << "AUDCLNT_E_NOT_INITIALIZED"; break;
  41. case AUDCLNT_E_ALREADY_INITIALIZED: e << "AUDCLNT_E_ALREADY_INITIALIZED"; break;
  42. case AUDCLNT_E_WRONG_ENDPOINT_TYPE: e << "AUDCLNT_E_WRONG_ENDPOINT_TYPE"; break;
  43. case AUDCLNT_E_DEVICE_INVALIDATED: e << "AUDCLNT_E_DEVICE_INVALIDATED"; break;
  44. case AUDCLNT_E_NOT_STOPPED: e << "AUDCLNT_E_NOT_STOPPED"; break;
  45. case AUDCLNT_E_BUFFER_TOO_LARGE: e << "AUDCLNT_E_BUFFER_TOO_LARGE"; break;
  46. case AUDCLNT_E_OUT_OF_ORDER: e << "AUDCLNT_E_OUT_OF_ORDER"; break;
  47. case AUDCLNT_E_UNSUPPORTED_FORMAT: e << "AUDCLNT_E_UNSUPPORTED_FORMAT"; break;
  48. case AUDCLNT_E_INVALID_SIZE: e << "AUDCLNT_E_INVALID_SIZE"; break;
  49. case AUDCLNT_E_DEVICE_IN_USE: e << "AUDCLNT_E_DEVICE_IN_USE"; break;
  50. case AUDCLNT_E_BUFFER_OPERATION_PENDING: e << "AUDCLNT_E_BUFFER_OPERATION_PENDING"; break;
  51. case AUDCLNT_E_THREAD_NOT_REGISTERED: e << "AUDCLNT_E_THREAD_NOT_REGISTERED"; break;
  52. case AUDCLNT_E_EXCLUSIVE_MODE_NOT_ALLOWED: e << "AUDCLNT_E_EXCLUSIVE_MODE_NOT_ALLOWED"; break;
  53. case AUDCLNT_E_ENDPOINT_CREATE_FAILED: e << "AUDCLNT_E_ENDPOINT_CREATE_FAILED"; break;
  54. case AUDCLNT_E_SERVICE_NOT_RUNNING: e << "AUDCLNT_E_SERVICE_NOT_RUNNING"; break;
  55. case AUDCLNT_E_EVENTHANDLE_NOT_EXPECTED: e << "AUDCLNT_E_EVENTHANDLE_NOT_EXPECTED"; break;
  56. case AUDCLNT_E_EXCLUSIVE_MODE_ONLY: e << "AUDCLNT_E_EXCLUSIVE_MODE_ONLY"; break;
  57. case AUDCLNT_E_BUFDURATION_PERIOD_NOT_EQUAL: e << "AUDCLNT_E_BUFDURATION_PERIOD_NOT_EQUAL"; break;
  58. case AUDCLNT_E_EVENTHANDLE_NOT_SET: e << "AUDCLNT_E_EVENTHANDLE_NOT_SET"; break;
  59. case AUDCLNT_E_INCORRECT_BUFFER_SIZE: e << "AUDCLNT_E_INCORRECT_BUFFER_SIZE"; break;
  60. case AUDCLNT_E_BUFFER_SIZE_ERROR: e << "AUDCLNT_E_BUFFER_SIZE_ERROR"; break;
  61. case AUDCLNT_S_BUFFER_EMPTY: e << "AUDCLNT_S_BUFFER_EMPTY"; break;
  62. case AUDCLNT_S_THREAD_ALREADY_REGISTERED: e << "AUDCLNT_S_THREAD_ALREADY_REGISTERED"; break;
  63. default: e << String::toHexString ((int) hr); break;
  64. }
  65. DBG (e);
  66. jassertfalse;
  67. }
  68. #endif
  69. }
  70. #undef check
  71. bool check (HRESULT hr)
  72. {
  73. logFailure (hr);
  74. return SUCCEEDED (hr);
  75. }
  76. //==============================================================================
  77. String getDeviceID (IMMDevice* const device)
  78. {
  79. String s;
  80. WCHAR* deviceId = nullptr;
  81. if (check (device->GetId (&deviceId)))
  82. {
  83. s = String (deviceId);
  84. CoTaskMemFree (deviceId);
  85. }
  86. return s;
  87. }
  88. EDataFlow getDataFlow (const ComSmartPtr<IMMDevice>& device)
  89. {
  90. EDataFlow flow = eRender;
  91. ComSmartPtr <IMMEndpoint> endPoint;
  92. if (check (device.QueryInterface (endPoint)))
  93. (void) check (endPoint->GetDataFlow (&flow));
  94. return flow;
  95. }
  96. int refTimeToSamples (const REFERENCE_TIME& t, const double sampleRate) noexcept
  97. {
  98. return roundDoubleToInt (sampleRate * ((double) t) * 0.0000001);
  99. }
  100. void copyWavFormat (WAVEFORMATEXTENSIBLE& dest, const WAVEFORMATEX* const src) noexcept
  101. {
  102. memcpy (&dest, src, src->wFormatTag == WAVE_FORMAT_EXTENSIBLE ? sizeof (WAVEFORMATEXTENSIBLE)
  103. : sizeof (WAVEFORMATEX));
  104. }
  105. //==============================================================================
  106. class WASAPIDeviceBase
  107. {
  108. public:
  109. WASAPIDeviceBase (const ComSmartPtr <IMMDevice>& device_, const bool useExclusiveMode_)
  110. : device (device_),
  111. sampleRate (0),
  112. defaultSampleRate (0),
  113. numChannels (0),
  114. actualNumChannels (0),
  115. minBufferSize (0),
  116. defaultBufferSize (0),
  117. latencySamples (0),
  118. useExclusiveMode (useExclusiveMode_),
  119. sampleRateHasChanged (false)
  120. {
  121. clientEvent = CreateEvent (0, false, false, _T("JuceWASAPI"));
  122. ComSmartPtr <IAudioClient> tempClient (createClient());
  123. if (tempClient == nullptr)
  124. return;
  125. REFERENCE_TIME defaultPeriod, minPeriod;
  126. if (! check (tempClient->GetDevicePeriod (&defaultPeriod, &minPeriod)))
  127. return;
  128. WAVEFORMATEX* mixFormat = nullptr;
  129. if (! check (tempClient->GetMixFormat (&mixFormat)))
  130. return;
  131. WAVEFORMATEXTENSIBLE format;
  132. copyWavFormat (format, mixFormat);
  133. CoTaskMemFree (mixFormat);
  134. actualNumChannels = numChannels = format.Format.nChannels;
  135. defaultSampleRate = format.Format.nSamplesPerSec;
  136. minBufferSize = refTimeToSamples (minPeriod, defaultSampleRate);
  137. defaultBufferSize = refTimeToSamples (defaultPeriod, defaultSampleRate);
  138. rates.addUsingDefaultSort (defaultSampleRate);
  139. static const double ratesToTest[] = { 44100.0, 48000.0, 88200.0, 96000.0 };
  140. for (int i = 0; i < numElementsInArray (ratesToTest); ++i)
  141. {
  142. if (ratesToTest[i] == defaultSampleRate)
  143. continue;
  144. format.Format.nSamplesPerSec = roundDoubleToInt (ratesToTest[i]);
  145. if (SUCCEEDED (tempClient->IsFormatSupported (useExclusiveMode ? AUDCLNT_SHAREMODE_EXCLUSIVE : AUDCLNT_SHAREMODE_SHARED,
  146. (WAVEFORMATEX*) &format, 0)))
  147. if (! rates.contains (ratesToTest[i]))
  148. rates.addUsingDefaultSort (ratesToTest[i]);
  149. }
  150. }
  151. ~WASAPIDeviceBase()
  152. {
  153. device = nullptr;
  154. CloseHandle (clientEvent);
  155. }
  156. bool isOk() const noexcept { return defaultBufferSize > 0 && defaultSampleRate > 0; }
  157. bool openClient (const double newSampleRate, const BigInteger& newChannels)
  158. {
  159. sampleRate = newSampleRate;
  160. channels = newChannels;
  161. channels.setRange (actualNumChannels, channels.getHighestBit() + 1 - actualNumChannels, false);
  162. numChannels = channels.getHighestBit() + 1;
  163. if (numChannels == 0)
  164. return true;
  165. client = createClient();
  166. if (client != nullptr
  167. && (tryInitialisingWithFormat (true, 4) || tryInitialisingWithFormat (false, 4)
  168. || tryInitialisingWithFormat (false, 3) || tryInitialisingWithFormat (false, 2)))
  169. {
  170. sampleRateHasChanged = false;
  171. channelMaps.clear();
  172. for (int i = 0; i <= channels.getHighestBit(); ++i)
  173. if (channels[i])
  174. channelMaps.add (i);
  175. REFERENCE_TIME latency;
  176. if (check (client->GetStreamLatency (&latency)))
  177. latencySamples = refTimeToSamples (latency, sampleRate);
  178. (void) check (client->GetBufferSize (&actualBufferSize));
  179. createSessionEventCallback();
  180. return check (client->SetEventHandle (clientEvent));
  181. }
  182. return false;
  183. }
  184. void closeClient()
  185. {
  186. if (client != nullptr)
  187. client->Stop();
  188. deleteSessionEventCallback();
  189. client = nullptr;
  190. ResetEvent (clientEvent);
  191. }
  192. void deviceSampleRateChanged()
  193. {
  194. sampleRateHasChanged = true;
  195. }
  196. //==============================================================================
  197. ComSmartPtr <IMMDevice> device;
  198. ComSmartPtr <IAudioClient> client;
  199. double sampleRate, defaultSampleRate;
  200. int numChannels, actualNumChannels;
  201. int minBufferSize, defaultBufferSize, latencySamples;
  202. const bool useExclusiveMode;
  203. Array <double> rates;
  204. HANDLE clientEvent;
  205. BigInteger channels;
  206. Array <int> channelMaps;
  207. UINT32 actualBufferSize;
  208. int bytesPerSample;
  209. bool sampleRateHasChanged;
  210. virtual void updateFormat (bool isFloat) = 0;
  211. private:
  212. //==============================================================================
  213. class SessionEventCallback : public ComBaseClassHelper <IAudioSessionEvents>
  214. {
  215. public:
  216. SessionEventCallback (WASAPIDeviceBase& owner_) : owner (owner_) {}
  217. JUCE_COMRESULT OnDisplayNameChanged (LPCWSTR, LPCGUID) { return S_OK; }
  218. JUCE_COMRESULT OnIconPathChanged (LPCWSTR, LPCGUID) { return S_OK; }
  219. JUCE_COMRESULT OnSimpleVolumeChanged (float, BOOL, LPCGUID) { return S_OK; }
  220. JUCE_COMRESULT OnChannelVolumeChanged (DWORD, float*, DWORD, LPCGUID) { return S_OK; }
  221. JUCE_COMRESULT OnGroupingParamChanged (LPCGUID, LPCGUID) { return S_OK; }
  222. JUCE_COMRESULT OnStateChanged (AudioSessionState) { return S_OK; }
  223. JUCE_COMRESULT OnSessionDisconnected (AudioSessionDisconnectReason reason)
  224. {
  225. if (reason == DisconnectReasonFormatChanged)
  226. owner.deviceSampleRateChanged();
  227. return S_OK;
  228. }
  229. private:
  230. WASAPIDeviceBase& owner;
  231. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (SessionEventCallback);
  232. };
  233. ComSmartPtr <IAudioSessionControl> audioSessionControl;
  234. ComSmartPtr <SessionEventCallback> sessionEventCallback;
  235. void createSessionEventCallback()
  236. {
  237. deleteSessionEventCallback();
  238. client->GetService (__uuidof (IAudioSessionControl),
  239. (void**) audioSessionControl.resetAndGetPointerAddress());
  240. if (audioSessionControl != nullptr)
  241. {
  242. sessionEventCallback = new SessionEventCallback (*this);
  243. audioSessionControl->RegisterAudioSessionNotification (sessionEventCallback);
  244. sessionEventCallback->Release(); // (required because ComBaseClassHelper objects are constructed with a ref count of 1)
  245. }
  246. }
  247. void deleteSessionEventCallback()
  248. {
  249. if (audioSessionControl != nullptr && sessionEventCallback != nullptr)
  250. audioSessionControl->UnregisterAudioSessionNotification (sessionEventCallback);
  251. audioSessionControl = nullptr;
  252. sessionEventCallback = nullptr;
  253. }
  254. //==============================================================================
  255. const ComSmartPtr <IAudioClient> createClient()
  256. {
  257. ComSmartPtr <IAudioClient> client;
  258. if (device != nullptr)
  259. {
  260. HRESULT hr = device->Activate (__uuidof (IAudioClient), CLSCTX_INPROC_SERVER, 0, (void**) client.resetAndGetPointerAddress());
  261. logFailure (hr);
  262. }
  263. return client;
  264. }
  265. bool tryInitialisingWithFormat (const bool useFloat, const int bytesPerSampleToTry)
  266. {
  267. WAVEFORMATEXTENSIBLE format = { 0 };
  268. if (numChannels <= 2 && bytesPerSampleToTry <= 2)
  269. {
  270. format.Format.wFormatTag = WAVE_FORMAT_PCM;
  271. }
  272. else
  273. {
  274. format.Format.wFormatTag = WAVE_FORMAT_EXTENSIBLE;
  275. format.Format.cbSize = sizeof (WAVEFORMATEXTENSIBLE) - sizeof (WAVEFORMATEX);
  276. }
  277. format.Format.nSamplesPerSec = roundDoubleToInt (sampleRate);
  278. format.Format.nChannels = (WORD) numChannels;
  279. format.Format.wBitsPerSample = (WORD) (8 * bytesPerSampleToTry);
  280. format.Format.nAvgBytesPerSec = (DWORD) (format.Format.nSamplesPerSec * numChannels * bytesPerSampleToTry);
  281. format.Format.nBlockAlign = (WORD) (numChannels * bytesPerSampleToTry);
  282. format.SubFormat = useFloat ? KSDATAFORMAT_SUBTYPE_IEEE_FLOAT : KSDATAFORMAT_SUBTYPE_PCM;
  283. format.Samples.wValidBitsPerSample = format.Format.wBitsPerSample;
  284. switch (numChannels)
  285. {
  286. case 1: format.dwChannelMask = SPEAKER_FRONT_CENTER; break;
  287. case 2: format.dwChannelMask = SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT; break;
  288. case 4: format.dwChannelMask = SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | SPEAKER_BACK_LEFT | SPEAKER_BACK_RIGHT; break;
  289. case 6: format.dwChannelMask = SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | SPEAKER_FRONT_CENTER | SPEAKER_LOW_FREQUENCY | SPEAKER_BACK_LEFT | SPEAKER_BACK_RIGHT; break;
  290. case 8: format.dwChannelMask = SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | SPEAKER_FRONT_CENTER | SPEAKER_LOW_FREQUENCY | SPEAKER_BACK_LEFT | SPEAKER_BACK_RIGHT | SPEAKER_FRONT_LEFT_OF_CENTER | SPEAKER_FRONT_RIGHT_OF_CENTER; break;
  291. default: break;
  292. }
  293. WAVEFORMATEXTENSIBLE* nearestFormat = nullptr;
  294. HRESULT hr = client->IsFormatSupported (useExclusiveMode ? AUDCLNT_SHAREMODE_EXCLUSIVE : AUDCLNT_SHAREMODE_SHARED,
  295. (WAVEFORMATEX*) &format, useExclusiveMode ? nullptr : (WAVEFORMATEX**) &nearestFormat);
  296. logFailure (hr);
  297. if (hr == S_FALSE && format.Format.nSamplesPerSec == nearestFormat->Format.nSamplesPerSec)
  298. {
  299. copyWavFormat (format, (WAVEFORMATEX*) nearestFormat);
  300. hr = S_OK;
  301. }
  302. CoTaskMemFree (nearestFormat);
  303. REFERENCE_TIME defaultPeriod = 0, minPeriod = 0;
  304. if (useExclusiveMode)
  305. check (client->GetDevicePeriod (&defaultPeriod, &minPeriod));
  306. GUID session;
  307. if (hr == S_OK
  308. && check (client->Initialize (useExclusiveMode ? AUDCLNT_SHAREMODE_EXCLUSIVE : AUDCLNT_SHAREMODE_SHARED,
  309. AUDCLNT_STREAMFLAGS_EVENTCALLBACK,
  310. defaultPeriod, defaultPeriod, (WAVEFORMATEX*) &format, &session)))
  311. {
  312. actualNumChannels = format.Format.nChannels;
  313. const bool isFloat = format.Format.wFormatTag == WAVE_FORMAT_EXTENSIBLE && format.SubFormat == KSDATAFORMAT_SUBTYPE_IEEE_FLOAT;
  314. bytesPerSample = format.Format.wBitsPerSample / 8;
  315. updateFormat (isFloat);
  316. return true;
  317. }
  318. return false;
  319. }
  320. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (WASAPIDeviceBase);
  321. };
  322. //==============================================================================
  323. class WASAPIInputDevice : public WASAPIDeviceBase
  324. {
  325. public:
  326. WASAPIInputDevice (const ComSmartPtr <IMMDevice>& device_, const bool useExclusiveMode_)
  327. : WASAPIDeviceBase (device_, useExclusiveMode_),
  328. reservoir (1, 1)
  329. {
  330. }
  331. ~WASAPIInputDevice()
  332. {
  333. close();
  334. }
  335. bool open (const double newSampleRate, const BigInteger& newChannels)
  336. {
  337. reservoirSize = 0;
  338. reservoirCapacity = 16384;
  339. reservoir.setSize (actualNumChannels * reservoirCapacity * sizeof (float));
  340. return openClient (newSampleRate, newChannels)
  341. && (numChannels == 0 || check (client->GetService (__uuidof (IAudioCaptureClient),
  342. (void**) captureClient.resetAndGetPointerAddress())));
  343. }
  344. void close()
  345. {
  346. closeClient();
  347. captureClient = nullptr;
  348. reservoir.setSize (0);
  349. }
  350. template <class SourceType>
  351. void updateFormatWithType (SourceType*)
  352. {
  353. typedef AudioData::Pointer <AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::NonConst> NativeType;
  354. converter = new AudioData::ConverterInstance <AudioData::Pointer <SourceType, AudioData::LittleEndian, AudioData::Interleaved, AudioData::Const>, NativeType> (actualNumChannels, 1);
  355. }
  356. void updateFormat (bool isFloat)
  357. {
  358. if (isFloat) updateFormatWithType ((AudioData::Float32*) 0);
  359. else if (bytesPerSample == 4) updateFormatWithType ((AudioData::Int32*) 0);
  360. else if (bytesPerSample == 3) updateFormatWithType ((AudioData::Int24*) 0);
  361. else updateFormatWithType ((AudioData::Int16*) 0);
  362. }
  363. void copyBuffers (float** destBuffers, int numDestBuffers, int bufferSize, Thread& thread)
  364. {
  365. if (numChannels <= 0)
  366. return;
  367. int offset = 0;
  368. while (bufferSize > 0)
  369. {
  370. if (reservoirSize > 0) // There's stuff in the reservoir, so use that...
  371. {
  372. const int samplesToDo = jmin (bufferSize, (int) reservoirSize);
  373. for (int i = 0; i < numDestBuffers; ++i)
  374. converter->convertSamples (destBuffers[i] + offset, 0, reservoir.getData(), channelMaps.getUnchecked(i), samplesToDo);
  375. bufferSize -= samplesToDo;
  376. offset += samplesToDo;
  377. reservoirSize = 0;
  378. }
  379. else
  380. {
  381. UINT32 packetLength = 0;
  382. if (! check (captureClient->GetNextPacketSize (&packetLength)))
  383. break;
  384. if (packetLength == 0)
  385. {
  386. if (thread.threadShouldExit()
  387. || WaitForSingleObject (clientEvent, 1000) == WAIT_TIMEOUT)
  388. break;
  389. continue;
  390. }
  391. uint8* inputData;
  392. UINT32 numSamplesAvailable;
  393. DWORD flags;
  394. if (check (captureClient->GetBuffer (&inputData, &numSamplesAvailable, &flags, 0, 0)))
  395. {
  396. const int samplesToDo = jmin (bufferSize, (int) numSamplesAvailable);
  397. for (int i = 0; i < numDestBuffers; ++i)
  398. converter->convertSamples (destBuffers[i] + offset, 0, inputData, channelMaps.getUnchecked(i), samplesToDo);
  399. bufferSize -= samplesToDo;
  400. offset += samplesToDo;
  401. if (samplesToDo < (int) numSamplesAvailable)
  402. {
  403. reservoirSize = jmin ((int) (numSamplesAvailable - samplesToDo), reservoirCapacity);
  404. memcpy ((uint8*) reservoir.getData(), inputData + bytesPerSample * actualNumChannels * samplesToDo,
  405. bytesPerSample * actualNumChannels * reservoirSize);
  406. }
  407. captureClient->ReleaseBuffer (numSamplesAvailable);
  408. }
  409. }
  410. }
  411. }
  412. ComSmartPtr <IAudioCaptureClient> captureClient;
  413. MemoryBlock reservoir;
  414. int reservoirSize, reservoirCapacity;
  415. ScopedPointer <AudioData::Converter> converter;
  416. private:
  417. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (WASAPIInputDevice);
  418. };
  419. //==============================================================================
  420. class WASAPIOutputDevice : public WASAPIDeviceBase
  421. {
  422. public:
  423. WASAPIOutputDevice (const ComSmartPtr <IMMDevice>& device_, const bool useExclusiveMode_)
  424. : WASAPIDeviceBase (device_, useExclusiveMode_)
  425. {
  426. }
  427. ~WASAPIOutputDevice()
  428. {
  429. close();
  430. }
  431. bool open (const double newSampleRate, const BigInteger& newChannels)
  432. {
  433. return openClient (newSampleRate, newChannels)
  434. && (numChannels == 0 || check (client->GetService (__uuidof (IAudioRenderClient), (void**) renderClient.resetAndGetPointerAddress())));
  435. }
  436. void close()
  437. {
  438. closeClient();
  439. renderClient = nullptr;
  440. }
  441. template <class DestType>
  442. void updateFormatWithType (DestType*)
  443. {
  444. typedef AudioData::Pointer <AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::Const> NativeType;
  445. converter = new AudioData::ConverterInstance <NativeType, AudioData::Pointer <DestType, AudioData::LittleEndian, AudioData::Interleaved, AudioData::NonConst> > (1, actualNumChannels);
  446. }
  447. void updateFormat (bool isFloat)
  448. {
  449. if (isFloat) updateFormatWithType ((AudioData::Float32*) 0);
  450. else if (bytesPerSample == 4) updateFormatWithType ((AudioData::Int32*) 0);
  451. else if (bytesPerSample == 3) updateFormatWithType ((AudioData::Int24*) 0);
  452. else updateFormatWithType ((AudioData::Int16*) 0);
  453. }
  454. void copyBuffers (const float** const srcBuffers, const int numSrcBuffers, int bufferSize, Thread& thread)
  455. {
  456. if (numChannels <= 0)
  457. return;
  458. int offset = 0;
  459. while (bufferSize > 0)
  460. {
  461. UINT32 padding = 0;
  462. if (! check (client->GetCurrentPadding (&padding)))
  463. return;
  464. int samplesToDo = useExclusiveMode ? bufferSize
  465. : jmin ((int) (actualBufferSize - padding), bufferSize);
  466. if (samplesToDo <= 0)
  467. {
  468. if (thread.threadShouldExit()
  469. || WaitForSingleObject (clientEvent, 1000) == WAIT_TIMEOUT)
  470. break;
  471. continue;
  472. }
  473. uint8* outputData = nullptr;
  474. if (check (renderClient->GetBuffer (samplesToDo, &outputData)))
  475. {
  476. for (int i = 0; i < numSrcBuffers; ++i)
  477. converter->convertSamples (outputData, channelMaps.getUnchecked(i), srcBuffers[i] + offset, 0, samplesToDo);
  478. renderClient->ReleaseBuffer (samplesToDo, 0);
  479. offset += samplesToDo;
  480. bufferSize -= samplesToDo;
  481. }
  482. }
  483. }
  484. ComSmartPtr <IAudioRenderClient> renderClient;
  485. ScopedPointer <AudioData::Converter> converter;
  486. private:
  487. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (WASAPIOutputDevice);
  488. };
  489. //==============================================================================
  490. class WASAPIAudioIODevice : public AudioIODevice,
  491. public Thread
  492. {
  493. public:
  494. WASAPIAudioIODevice (const String& deviceName,
  495. const String& outputDeviceId_,
  496. const String& inputDeviceId_,
  497. const bool useExclusiveMode_)
  498. : AudioIODevice (deviceName, "Windows Audio"),
  499. Thread ("Juce WASAPI"),
  500. outputDeviceId (outputDeviceId_),
  501. inputDeviceId (inputDeviceId_),
  502. useExclusiveMode (useExclusiveMode_),
  503. isOpen_ (false),
  504. isStarted (false),
  505. currentBufferSizeSamples (0),
  506. currentSampleRate (0),
  507. callback (nullptr)
  508. {
  509. }
  510. ~WASAPIAudioIODevice()
  511. {
  512. close();
  513. }
  514. bool initialise()
  515. {
  516. latencyIn = latencyOut = 0;
  517. Array <double> ratesIn, ratesOut;
  518. if (createDevices())
  519. {
  520. jassert (inputDevice != nullptr || outputDevice != nullptr);
  521. if (inputDevice != nullptr && outputDevice != nullptr)
  522. {
  523. defaultSampleRate = jmin (inputDevice->defaultSampleRate, outputDevice->defaultSampleRate);
  524. minBufferSize = jmin (inputDevice->minBufferSize, outputDevice->minBufferSize);
  525. defaultBufferSize = jmax (inputDevice->defaultBufferSize, outputDevice->defaultBufferSize);
  526. sampleRates = inputDevice->rates;
  527. sampleRates.removeValuesNotIn (outputDevice->rates);
  528. }
  529. else
  530. {
  531. WASAPIDeviceBase* d = inputDevice != nullptr ? static_cast<WASAPIDeviceBase*> (inputDevice)
  532. : static_cast<WASAPIDeviceBase*> (outputDevice);
  533. defaultSampleRate = d->defaultSampleRate;
  534. minBufferSize = d->minBufferSize;
  535. defaultBufferSize = d->defaultBufferSize;
  536. sampleRates = d->rates;
  537. }
  538. bufferSizes.addUsingDefaultSort (defaultBufferSize);
  539. if (minBufferSize != defaultBufferSize)
  540. bufferSizes.addUsingDefaultSort (minBufferSize);
  541. int n = 64;
  542. for (int i = 0; i < 40; ++i)
  543. {
  544. if (n >= minBufferSize && n <= 2048 && ! bufferSizes.contains (n))
  545. bufferSizes.addUsingDefaultSort (n);
  546. n += (n < 512) ? 32 : (n < 1024 ? 64 : 128);
  547. }
  548. return true;
  549. }
  550. return false;
  551. }
  552. StringArray getOutputChannelNames()
  553. {
  554. StringArray outChannels;
  555. if (outputDevice != nullptr)
  556. for (int i = 1; i <= outputDevice->actualNumChannels; ++i)
  557. outChannels.add ("Output channel " + String (i));
  558. return outChannels;
  559. }
  560. StringArray getInputChannelNames()
  561. {
  562. StringArray inChannels;
  563. if (inputDevice != nullptr)
  564. for (int i = 1; i <= inputDevice->actualNumChannels; ++i)
  565. inChannels.add ("Input channel " + String (i));
  566. return inChannels;
  567. }
  568. int getNumSampleRates() { return sampleRates.size(); }
  569. double getSampleRate (int index) { return sampleRates [index]; }
  570. int getNumBufferSizesAvailable() { return bufferSizes.size(); }
  571. int getBufferSizeSamples (int index) { return bufferSizes [index]; }
  572. int getDefaultBufferSize() { return defaultBufferSize; }
  573. int getCurrentBufferSizeSamples() { return currentBufferSizeSamples; }
  574. double getCurrentSampleRate() { return currentSampleRate; }
  575. int getCurrentBitDepth() { return 32; }
  576. int getOutputLatencyInSamples() { return latencyOut; }
  577. int getInputLatencyInSamples() { return latencyIn; }
  578. const BigInteger getActiveOutputChannels() const { return outputDevice != nullptr ? outputDevice->channels : BigInteger(); }
  579. const BigInteger getActiveInputChannels() const { return inputDevice != nullptr ? inputDevice->channels : BigInteger(); }
  580. const String getLastError() { return lastError; }
  581. const String open (const BigInteger& inputChannels, const BigInteger& outputChannels,
  582. double sampleRate, int bufferSizeSamples)
  583. {
  584. close();
  585. lastError = String::empty;
  586. if (sampleRates.size() == 0 && inputDevice != nullptr && outputDevice != nullptr)
  587. {
  588. lastError = "The input and output devices don't share a common sample rate!";
  589. return lastError;
  590. }
  591. currentBufferSizeSamples = bufferSizeSamples <= 0 ? defaultBufferSize : jmax (bufferSizeSamples, minBufferSize);
  592. currentSampleRate = sampleRate > 0 ? sampleRate : defaultSampleRate;
  593. if (inputDevice != nullptr && ! inputDevice->open (currentSampleRate, inputChannels))
  594. {
  595. lastError = "Couldn't open the input device!";
  596. return lastError;
  597. }
  598. if (outputDevice != nullptr && ! outputDevice->open (currentSampleRate, outputChannels))
  599. {
  600. close();
  601. lastError = "Couldn't open the output device!";
  602. return lastError;
  603. }
  604. if (inputDevice != nullptr) ResetEvent (inputDevice->clientEvent);
  605. if (outputDevice != nullptr) ResetEvent (outputDevice->clientEvent);
  606. startThread (8);
  607. Thread::sleep (5);
  608. if (inputDevice != nullptr && inputDevice->client != nullptr)
  609. {
  610. latencyIn = inputDevice->latencySamples + inputDevice->actualBufferSize + inputDevice->minBufferSize;
  611. HRESULT hr = inputDevice->client->Start();
  612. logFailure (hr); //xxx handle this
  613. }
  614. if (outputDevice != nullptr && outputDevice->client != nullptr)
  615. {
  616. latencyOut = outputDevice->latencySamples + outputDevice->actualBufferSize + outputDevice->minBufferSize;
  617. HRESULT hr = outputDevice->client->Start();
  618. logFailure (hr); //xxx handle this
  619. }
  620. isOpen_ = true;
  621. return lastError;
  622. }
  623. void close()
  624. {
  625. stop();
  626. signalThreadShouldExit();
  627. if (inputDevice != nullptr) SetEvent (inputDevice->clientEvent);
  628. if (outputDevice != nullptr) SetEvent (outputDevice->clientEvent);
  629. stopThread (5000);
  630. if (inputDevice != nullptr) inputDevice->close();
  631. if (outputDevice != nullptr) outputDevice->close();
  632. isOpen_ = false;
  633. }
  634. bool isOpen() { return isOpen_ && isThreadRunning(); }
  635. bool isPlaying() { return isStarted && isOpen_ && isThreadRunning(); }
  636. void start (AudioIODeviceCallback* call)
  637. {
  638. if (isOpen_ && call != nullptr && ! isStarted)
  639. {
  640. if (! isThreadRunning())
  641. {
  642. // something's gone wrong and the thread's stopped..
  643. isOpen_ = false;
  644. return;
  645. }
  646. call->audioDeviceAboutToStart (this);
  647. const ScopedLock sl (startStopLock);
  648. callback = call;
  649. isStarted = true;
  650. }
  651. }
  652. void stop()
  653. {
  654. if (isStarted)
  655. {
  656. AudioIODeviceCallback* const callbackLocal = callback;
  657. {
  658. const ScopedLock sl (startStopLock);
  659. isStarted = false;
  660. }
  661. if (callbackLocal != nullptr)
  662. callbackLocal->audioDeviceStopped();
  663. }
  664. }
  665. void setMMThreadPriority()
  666. {
  667. DynamicLibrary dll ("avrt.dll");
  668. JUCE_DLL_FUNCTION (AvSetMmThreadCharacteristicsW, avSetMmThreadCharacteristics, HANDLE, dll, (LPCWSTR, LPDWORD))
  669. JUCE_DLL_FUNCTION (AvSetMmThreadPriority, avSetMmThreadPriority, HANDLE, dll, (HANDLE, AVRT_PRIORITY))
  670. if (avSetMmThreadCharacteristics != 0 && avSetMmThreadPriority != 0)
  671. {
  672. DWORD dummy = 0;
  673. HANDLE h = avSetMmThreadCharacteristics (L"Pro Audio", &dummy);
  674. if (h != 0)
  675. avSetMmThreadPriority (h, AVRT_PRIORITY_NORMAL);
  676. }
  677. }
  678. void run()
  679. {
  680. setMMThreadPriority();
  681. const int bufferSize = currentBufferSizeSamples;
  682. const int numInputBuffers = getActiveInputChannels().countNumberOfSetBits();
  683. const int numOutputBuffers = getActiveOutputChannels().countNumberOfSetBits();
  684. bool sampleRateChanged = false;
  685. AudioSampleBuffer ins (jmax (1, numInputBuffers), bufferSize + 32);
  686. AudioSampleBuffer outs (jmax (1, numOutputBuffers), bufferSize + 32);
  687. float** const inputBuffers = ins.getArrayOfChannels();
  688. float** const outputBuffers = outs.getArrayOfChannels();
  689. ins.clear();
  690. while (! threadShouldExit())
  691. {
  692. if (inputDevice != nullptr)
  693. {
  694. inputDevice->copyBuffers (inputBuffers, numInputBuffers, bufferSize, *this);
  695. if (threadShouldExit())
  696. break;
  697. if (inputDevice->sampleRateHasChanged)
  698. sampleRateChanged = true;
  699. }
  700. JUCE_TRY
  701. {
  702. const ScopedLock sl (startStopLock);
  703. if (isStarted)
  704. callback->audioDeviceIOCallback (const_cast <const float**> (inputBuffers), numInputBuffers,
  705. outputBuffers, numOutputBuffers, bufferSize);
  706. else
  707. outs.clear();
  708. }
  709. JUCE_CATCH_EXCEPTION
  710. if (outputDevice != nullptr)
  711. {
  712. outputDevice->copyBuffers (const_cast <const float**> (outputBuffers), numOutputBuffers, bufferSize, *this);
  713. if (outputDevice->sampleRateHasChanged)
  714. sampleRateChanged = true;
  715. }
  716. if (sampleRateChanged)
  717. {
  718. // xxx one of the devices has had its sample rate changed externally.. not 100% sure how
  719. // to handle this..
  720. }
  721. }
  722. }
  723. //==============================================================================
  724. String outputDeviceId, inputDeviceId;
  725. String lastError;
  726. private:
  727. // Device stats...
  728. ScopedPointer<WASAPIInputDevice> inputDevice;
  729. ScopedPointer<WASAPIOutputDevice> outputDevice;
  730. const bool useExclusiveMode;
  731. double defaultSampleRate;
  732. int minBufferSize, defaultBufferSize;
  733. int latencyIn, latencyOut;
  734. Array <double> sampleRates;
  735. Array <int> bufferSizes;
  736. // Active state...
  737. bool isOpen_, isStarted;
  738. int currentBufferSizeSamples;
  739. double currentSampleRate;
  740. AudioIODeviceCallback* callback;
  741. CriticalSection startStopLock;
  742. //==============================================================================
  743. bool createDevices()
  744. {
  745. ComSmartPtr <IMMDeviceEnumerator> enumerator;
  746. if (! check (enumerator.CoCreateInstance (__uuidof (MMDeviceEnumerator))))
  747. return false;
  748. ComSmartPtr <IMMDeviceCollection> deviceCollection;
  749. if (! check (enumerator->EnumAudioEndpoints (eAll, DEVICE_STATE_ACTIVE, deviceCollection.resetAndGetPointerAddress())))
  750. return false;
  751. UINT32 numDevices = 0;
  752. if (! check (deviceCollection->GetCount (&numDevices)))
  753. return false;
  754. for (UINT32 i = 0; i < numDevices; ++i)
  755. {
  756. ComSmartPtr <IMMDevice> device;
  757. if (! check (deviceCollection->Item (i, device.resetAndGetPointerAddress())))
  758. continue;
  759. const String deviceId (getDeviceID (device));
  760. if (deviceId.isEmpty())
  761. continue;
  762. const EDataFlow flow = getDataFlow (device);
  763. if (deviceId == inputDeviceId && flow == eCapture)
  764. inputDevice = new WASAPIInputDevice (device, useExclusiveMode);
  765. else if (deviceId == outputDeviceId && flow == eRender)
  766. outputDevice = new WASAPIOutputDevice (device, useExclusiveMode);
  767. }
  768. return (outputDeviceId.isEmpty() || (outputDevice != nullptr && outputDevice->isOk()))
  769. && (inputDeviceId.isEmpty() || (inputDevice != nullptr && inputDevice->isOk()));
  770. }
  771. //==============================================================================
  772. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (WASAPIAudioIODevice);
  773. };
  774. //==============================================================================
  775. class WASAPIAudioIODeviceType : public AudioIODeviceType,
  776. private DeviceChangeDetector
  777. {
  778. public:
  779. WASAPIAudioIODeviceType()
  780. : AudioIODeviceType ("Windows Audio"),
  781. DeviceChangeDetector (L"Windows Audio"),
  782. hasScanned (false)
  783. {
  784. }
  785. //==============================================================================
  786. void scanForDevices()
  787. {
  788. hasScanned = true;
  789. outputDeviceNames.clear();
  790. inputDeviceNames.clear();
  791. outputDeviceIds.clear();
  792. inputDeviceIds.clear();
  793. scan (outputDeviceNames, inputDeviceNames,
  794. outputDeviceIds, inputDeviceIds);
  795. }
  796. StringArray getDeviceNames (bool wantInputNames) const
  797. {
  798. jassert (hasScanned); // need to call scanForDevices() before doing this
  799. return wantInputNames ? inputDeviceNames
  800. : outputDeviceNames;
  801. }
  802. int getDefaultDeviceIndex (bool /*forInput*/) const
  803. {
  804. jassert (hasScanned); // need to call scanForDevices() before doing this
  805. return 0;
  806. }
  807. int getIndexOfDevice (AudioIODevice* device, bool asInput) const
  808. {
  809. jassert (hasScanned); // need to call scanForDevices() before doing this
  810. WASAPIAudioIODevice* const d = dynamic_cast <WASAPIAudioIODevice*> (device);
  811. return d == nullptr ? -1 : (asInput ? inputDeviceIds.indexOf (d->inputDeviceId)
  812. : outputDeviceIds.indexOf (d->outputDeviceId));
  813. }
  814. bool hasSeparateInputsAndOutputs() const { return true; }
  815. AudioIODevice* createDevice (const String& outputDeviceName,
  816. const String& inputDeviceName)
  817. {
  818. jassert (hasScanned); // need to call scanForDevices() before doing this
  819. const bool useExclusiveMode = false;
  820. ScopedPointer<WASAPIAudioIODevice> device;
  821. const int outputIndex = outputDeviceNames.indexOf (outputDeviceName);
  822. const int inputIndex = inputDeviceNames.indexOf (inputDeviceName);
  823. if (outputIndex >= 0 || inputIndex >= 0)
  824. {
  825. device = new WASAPIAudioIODevice (outputDeviceName.isNotEmpty() ? outputDeviceName
  826. : inputDeviceName,
  827. outputDeviceIds [outputIndex],
  828. inputDeviceIds [inputIndex],
  829. useExclusiveMode);
  830. if (! device->initialise())
  831. device = nullptr;
  832. }
  833. return device.release();
  834. }
  835. //==============================================================================
  836. StringArray outputDeviceNames, outputDeviceIds;
  837. StringArray inputDeviceNames, inputDeviceIds;
  838. private:
  839. bool hasScanned;
  840. //==============================================================================
  841. static String getDefaultEndpoint (IMMDeviceEnumerator* const enumerator, const bool forCapture)
  842. {
  843. String s;
  844. IMMDevice* dev = nullptr;
  845. if (check (enumerator->GetDefaultAudioEndpoint (forCapture ? eCapture : eRender,
  846. eMultimedia, &dev)))
  847. {
  848. WCHAR* deviceId = nullptr;
  849. if (check (dev->GetId (&deviceId)))
  850. {
  851. s = deviceId;
  852. CoTaskMemFree (deviceId);
  853. }
  854. dev->Release();
  855. }
  856. return s;
  857. }
  858. //==============================================================================
  859. void scan (StringArray& outputDeviceNames,
  860. StringArray& inputDeviceNames,
  861. StringArray& outputDeviceIds,
  862. StringArray& inputDeviceIds)
  863. {
  864. ComSmartPtr <IMMDeviceEnumerator> enumerator;
  865. if (! check (enumerator.CoCreateInstance (__uuidof (MMDeviceEnumerator))))
  866. return;
  867. const String defaultRenderer (getDefaultEndpoint (enumerator, false));
  868. const String defaultCapture (getDefaultEndpoint (enumerator, true));
  869. ComSmartPtr <IMMDeviceCollection> deviceCollection;
  870. UINT32 numDevices = 0;
  871. if (! (check (enumerator->EnumAudioEndpoints (eAll, DEVICE_STATE_ACTIVE, deviceCollection.resetAndGetPointerAddress()))
  872. && check (deviceCollection->GetCount (&numDevices))))
  873. return;
  874. for (UINT32 i = 0; i < numDevices; ++i)
  875. {
  876. ComSmartPtr <IMMDevice> device;
  877. if (! check (deviceCollection->Item (i, device.resetAndGetPointerAddress())))
  878. continue;
  879. DWORD state = 0;
  880. if (! (check (device->GetState (&state)) && state == DEVICE_STATE_ACTIVE))
  881. continue;
  882. const String deviceId (getDeviceID (device));
  883. String name;
  884. {
  885. ComSmartPtr <IPropertyStore> properties;
  886. if (! check (device->OpenPropertyStore (STGM_READ, properties.resetAndGetPointerAddress())))
  887. continue;
  888. PROPVARIANT value;
  889. PropVariantInit (&value);
  890. if (check (properties->GetValue (PKEY_Device_FriendlyName, &value)))
  891. name = value.pwszVal;
  892. PropVariantClear (&value);
  893. }
  894. const EDataFlow flow = getDataFlow (device);
  895. if (flow == eRender)
  896. {
  897. const int index = (deviceId == defaultRenderer) ? 0 : -1;
  898. outputDeviceIds.insert (index, deviceId);
  899. outputDeviceNames.insert (index, name);
  900. }
  901. else if (flow == eCapture)
  902. {
  903. const int index = (deviceId == defaultCapture) ? 0 : -1;
  904. inputDeviceIds.insert (index, deviceId);
  905. inputDeviceNames.insert (index, name);
  906. }
  907. }
  908. inputDeviceNames.appendNumbersToDuplicates (false, false);
  909. outputDeviceNames.appendNumbersToDuplicates (false, false);
  910. }
  911. //==============================================================================
  912. void systemDeviceChanged()
  913. {
  914. StringArray newOutNames, newInNames, newOutIds, newInIds;
  915. scan (newOutNames, newInNames, newOutIds, newInIds);
  916. if (newOutNames != outputDeviceNames
  917. || newInNames != inputDeviceNames
  918. || newOutIds != outputDeviceIds
  919. || newInIds != inputDeviceIds)
  920. {
  921. hasScanned = true;
  922. outputDeviceNames = newOutNames;
  923. inputDeviceNames = newInNames;
  924. outputDeviceIds = newOutIds;
  925. inputDeviceIds = newInIds;
  926. callDeviceChangeListeners();
  927. }
  928. }
  929. //==============================================================================
  930. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (WASAPIAudioIODeviceType);
  931. };
  932. }
  933. //==============================================================================
  934. AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_WASAPI()
  935. {
  936. if (SystemStats::getOperatingSystemType() >= SystemStats::WinVista)
  937. return new WasapiClasses::WASAPIAudioIODeviceType();
  938. return nullptr;
  939. }
  940. #endif