The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1128 lines
42KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library - "Jules' Utility Class Extensions"
  4. Copyright 2004-9 by Raw Material Software Ltd.
  5. ------------------------------------------------------------------------------
  6. JUCE can be redistributed and/or modified under the terms of the GNU General
  7. Public License (Version 2), as published by the Free Software Foundation.
  8. A copy of the license is included in the JUCE distribution, or can be found
  9. online at www.gnu.org/licenses.
  10. JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
  11. WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
  12. A PARTICULAR PURPOSE. See the GNU General Public License for more details.
  13. ------------------------------------------------------------------------------
  14. To release a closed-source product which uses JUCE, commercial licenses are
  15. available: visit www.rawmaterialsoftware.com/juce for more information.
  16. ==============================================================================
  17. */
  18. // (This file gets included by juce_win32_NativeCode.cpp, rather than being
  19. // compiled on its own).
  20. #if JUCE_INCLUDED_FILE && JUCE_WASAPI
  21. //==============================================================================
  22. #if 1
  23. const String getAudioErrorDesc (HRESULT hr)
  24. {
  25. const char* e = 0;
  26. switch (hr)
  27. {
  28. case E_POINTER: e = "E_POINTER"; break;
  29. case E_INVALIDARG: e = "E_INVALIDARG"; break;
  30. case AUDCLNT_E_NOT_INITIALIZED: e = "AUDCLNT_E_NOT_INITIALIZED"; break;
  31. case AUDCLNT_E_ALREADY_INITIALIZED: e = "AUDCLNT_E_ALREADY_INITIALIZED"; break;
  32. case AUDCLNT_E_WRONG_ENDPOINT_TYPE: e = "AUDCLNT_E_WRONG_ENDPOINT_TYPE"; break;
  33. case AUDCLNT_E_DEVICE_INVALIDATED: e = "AUDCLNT_E_DEVICE_INVALIDATED"; break;
  34. case AUDCLNT_E_NOT_STOPPED: e = "AUDCLNT_E_NOT_STOPPED"; break;
  35. case AUDCLNT_E_BUFFER_TOO_LARGE: e = "AUDCLNT_E_BUFFER_TOO_LARGE"; break;
  36. case AUDCLNT_E_OUT_OF_ORDER: e = "AUDCLNT_E_OUT_OF_ORDER"; break;
  37. case AUDCLNT_E_UNSUPPORTED_FORMAT: e = "AUDCLNT_E_UNSUPPORTED_FORMAT"; break;
  38. case AUDCLNT_E_INVALID_SIZE: e = "AUDCLNT_E_INVALID_SIZE"; break;
  39. case AUDCLNT_E_DEVICE_IN_USE: e = "AUDCLNT_E_DEVICE_IN_USE"; break;
  40. case AUDCLNT_E_BUFFER_OPERATION_PENDING: e = "AUDCLNT_E_BUFFER_OPERATION_PENDING"; break;
  41. case AUDCLNT_E_THREAD_NOT_REGISTERED: e = "AUDCLNT_E_THREAD_NOT_REGISTERED"; break;
  42. case AUDCLNT_E_EXCLUSIVE_MODE_NOT_ALLOWED: e = "AUDCLNT_E_EXCLUSIVE_MODE_NOT_ALLOWED"; break;
  43. case AUDCLNT_E_ENDPOINT_CREATE_FAILED: e = "AUDCLNT_E_ENDPOINT_CREATE_FAILED"; break;
  44. case AUDCLNT_E_SERVICE_NOT_RUNNING: e = "AUDCLNT_E_SERVICE_NOT_RUNNING"; break;
  45. case AUDCLNT_E_EVENTHANDLE_NOT_EXPECTED: e = "AUDCLNT_E_EVENTHANDLE_NOT_EXPECTED"; break;
  46. case AUDCLNT_E_EXCLUSIVE_MODE_ONLY: e = "AUDCLNT_E_EXCLUSIVE_MODE_ONLY"; break;
  47. case AUDCLNT_E_BUFDURATION_PERIOD_NOT_EQUAL: e = "AUDCLNT_E_BUFDURATION_PERIOD_NOT_EQUAL"; break;
  48. case AUDCLNT_E_EVENTHANDLE_NOT_SET: e = "AUDCLNT_E_EVENTHANDLE_NOT_SET"; break;
  49. case AUDCLNT_E_INCORRECT_BUFFER_SIZE: e = "AUDCLNT_E_INCORRECT_BUFFER_SIZE"; break;
  50. case AUDCLNT_E_BUFFER_SIZE_ERROR: e = "AUDCLNT_E_BUFFER_SIZE_ERROR"; break;
  51. case AUDCLNT_S_BUFFER_EMPTY: e = "AUDCLNT_S_BUFFER_EMPTY"; break;
  52. case AUDCLNT_S_THREAD_ALREADY_REGISTERED: e = "AUDCLNT_S_THREAD_ALREADY_REGISTERED"; break;
  53. default: return String::toHexString ((int) hr);
  54. }
  55. return e;
  56. }
  57. #define logFailure(hr) { if (FAILED (hr)) { DBG ("WASAPI FAIL! " + getAudioErrorDesc (hr)); jassertfalse } }
  58. #define OK(a) wasapi_checkResult(a)
  59. static bool wasapi_checkResult (HRESULT hr)
  60. {
  61. logFailure (hr);
  62. return SUCCEEDED (hr);
  63. }
  64. #else
  65. #define logFailure(hr) {}
  66. #define OK(a) SUCCEEDED(a)
  67. #endif
  68. //==============================================================================
  69. static const String wasapi_getDeviceID (IMMDevice* const device)
  70. {
  71. String s;
  72. WCHAR* deviceId = 0;
  73. if (OK (device->GetId (&deviceId)))
  74. {
  75. s = String (deviceId);
  76. CoTaskMemFree (deviceId);
  77. }
  78. return s;
  79. }
  80. static EDataFlow wasapi_getDataFlow (IMMDevice* const device)
  81. {
  82. EDataFlow flow = eRender;
  83. ComSmartPtr <IMMEndpoint> endPoint;
  84. if (OK (device->QueryInterface (__uuidof (IMMEndpoint), (void**) &endPoint)))
  85. (void) OK (endPoint->GetDataFlow (&flow));
  86. return flow;
  87. }
  88. static int wasapi_refTimeToSamples (const REFERENCE_TIME& t, const double sampleRate) throw()
  89. {
  90. return roundDoubleToInt (sampleRate * ((double) t) * 0.0000001);
  91. }
  92. static void wasapi_copyWavFormat (WAVEFORMATEXTENSIBLE& dest, const WAVEFORMATEX* const src) throw()
  93. {
  94. memcpy (&dest, src, src->wFormatTag == WAVE_FORMAT_EXTENSIBLE ? sizeof (WAVEFORMATEXTENSIBLE)
  95. : sizeof (WAVEFORMATEX));
  96. }
  97. //==============================================================================
  98. class WASAPIDeviceBase
  99. {
  100. public:
  101. WASAPIDeviceBase (const ComSmartPtr <IMMDevice>& device_, const bool useExclusiveMode_)
  102. : device (device_),
  103. sampleRate (0),
  104. numChannels (0),
  105. actualNumChannels (0),
  106. defaultSampleRate (0),
  107. minBufferSize (0),
  108. defaultBufferSize (0),
  109. latencySamples (0),
  110. useExclusiveMode (useExclusiveMode_)
  111. {
  112. clientEvent = CreateEvent (0, false, false, _T("JuceWASAPI"));
  113. ComSmartPtr <IAudioClient> tempClient (createClient());
  114. if (tempClient == 0)
  115. return;
  116. REFERENCE_TIME defaultPeriod, minPeriod;
  117. if (! OK (tempClient->GetDevicePeriod (&defaultPeriod, &minPeriod)))
  118. return;
  119. WAVEFORMATEX* mixFormat = 0;
  120. if (! OK (tempClient->GetMixFormat (&mixFormat)))
  121. return;
  122. WAVEFORMATEXTENSIBLE format;
  123. wasapi_copyWavFormat (format, mixFormat);
  124. CoTaskMemFree (mixFormat);
  125. actualNumChannels = numChannels = format.Format.nChannels;
  126. defaultSampleRate = format.Format.nSamplesPerSec;
  127. minBufferSize = wasapi_refTimeToSamples (minPeriod, defaultSampleRate);
  128. defaultBufferSize = wasapi_refTimeToSamples (defaultPeriod, defaultSampleRate);
  129. FloatElementComparator<double> comparator;
  130. rates.addSorted (comparator, defaultSampleRate);
  131. static const double ratesToTest[] = { 44100.0, 48000.0, 88200.0, 96000.0 };
  132. for (int i = 0; i < numElementsInArray (ratesToTest); ++i)
  133. {
  134. if (ratesToTest[i] == defaultSampleRate)
  135. continue;
  136. format.Format.nSamplesPerSec = roundDoubleToInt (ratesToTest[i]);
  137. if (SUCCEEDED (tempClient->IsFormatSupported (useExclusiveMode ? AUDCLNT_SHAREMODE_EXCLUSIVE : AUDCLNT_SHAREMODE_SHARED,
  138. (WAVEFORMATEX*) &format, 0)))
  139. if (! rates.contains (ratesToTest[i]))
  140. rates.addSorted (comparator, ratesToTest[i]);
  141. }
  142. }
  143. ~WASAPIDeviceBase()
  144. {
  145. device = 0;
  146. CloseHandle (clientEvent);
  147. }
  148. bool isOk() const throw() { return defaultBufferSize > 0 && defaultSampleRate > 0; }
  149. bool openClient (const double newSampleRate, const BigInteger& newChannels)
  150. {
  151. sampleRate = newSampleRate;
  152. channels = newChannels;
  153. channels.setRange (actualNumChannels, channels.getHighestBit() + 1 - actualNumChannels, false);
  154. numChannels = channels.getHighestBit() + 1;
  155. if (numChannels == 0)
  156. return true;
  157. client = createClient();
  158. if (client != 0
  159. && (tryInitialisingWithFormat (true, 4) || tryInitialisingWithFormat (false, 4)
  160. || tryInitialisingWithFormat (false, 3) || tryInitialisingWithFormat (false, 2)))
  161. {
  162. channelMaps.clear();
  163. for (int i = 0; i <= channels.getHighestBit(); ++i)
  164. if (channels[i])
  165. channelMaps.add (i);
  166. REFERENCE_TIME latency;
  167. if (OK (client->GetStreamLatency (&latency)))
  168. latencySamples = wasapi_refTimeToSamples (latency, sampleRate);
  169. (void) OK (client->GetBufferSize (&actualBufferSize));
  170. return OK (client->SetEventHandle (clientEvent));
  171. }
  172. return false;
  173. }
  174. void closeClient()
  175. {
  176. if (client != 0)
  177. client->Stop();
  178. client = 0;
  179. ResetEvent (clientEvent);
  180. }
  181. ComSmartPtr <IMMDevice> device;
  182. ComSmartPtr <IAudioClient> client;
  183. double sampleRate, defaultSampleRate;
  184. int numChannels, actualNumChannels;
  185. int minBufferSize, defaultBufferSize, latencySamples;
  186. const bool useExclusiveMode;
  187. Array <double> rates;
  188. HANDLE clientEvent;
  189. BigInteger channels;
  190. AudioDataConverters::DataFormat dataFormat;
  191. Array <int> channelMaps;
  192. UINT32 actualBufferSize;
  193. int bytesPerSample;
  194. private:
  195. const ComSmartPtr <IAudioClient> createClient()
  196. {
  197. ComSmartPtr <IAudioClient> client;
  198. if (device != 0)
  199. {
  200. HRESULT hr = device->Activate (__uuidof (IAudioClient), CLSCTX_INPROC_SERVER, 0, (void**) &client);
  201. logFailure (hr);
  202. }
  203. return client;
  204. }
  205. bool tryInitialisingWithFormat (const bool useFloat, const int bytesPerSampleToTry)
  206. {
  207. WAVEFORMATEXTENSIBLE format;
  208. zerostruct (format);
  209. if (numChannels <= 2 && bytesPerSampleToTry <= 2)
  210. {
  211. format.Format.wFormatTag = WAVE_FORMAT_PCM;
  212. }
  213. else
  214. {
  215. format.Format.wFormatTag = WAVE_FORMAT_EXTENSIBLE;
  216. format.Format.cbSize = sizeof (WAVEFORMATEXTENSIBLE) - sizeof (WAVEFORMATEX);
  217. }
  218. format.Format.nSamplesPerSec = roundDoubleToInt (sampleRate);
  219. format.Format.nChannels = (WORD) numChannels;
  220. format.Format.wBitsPerSample = (WORD) (8 * bytesPerSampleToTry);
  221. format.Format.nAvgBytesPerSec = (DWORD) (format.Format.nSamplesPerSec * numChannels * bytesPerSampleToTry);
  222. format.Format.nBlockAlign = (WORD) (numChannels * bytesPerSampleToTry);
  223. format.SubFormat = useFloat ? KSDATAFORMAT_SUBTYPE_IEEE_FLOAT : KSDATAFORMAT_SUBTYPE_PCM;
  224. format.Samples.wValidBitsPerSample = format.Format.wBitsPerSample;
  225. switch (numChannels)
  226. {
  227. case 1: format.dwChannelMask = SPEAKER_FRONT_CENTER; break;
  228. case 2: format.dwChannelMask = SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT; break;
  229. case 4: format.dwChannelMask = SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | SPEAKER_BACK_LEFT | SPEAKER_BACK_RIGHT; break;
  230. case 6: format.dwChannelMask = SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | SPEAKER_FRONT_CENTER | SPEAKER_LOW_FREQUENCY | SPEAKER_BACK_LEFT | SPEAKER_BACK_RIGHT; break;
  231. case 8: format.dwChannelMask = SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | SPEAKER_FRONT_CENTER | SPEAKER_LOW_FREQUENCY | SPEAKER_BACK_LEFT | SPEAKER_BACK_RIGHT | SPEAKER_FRONT_LEFT_OF_CENTER | SPEAKER_FRONT_RIGHT_OF_CENTER; break;
  232. default: break;
  233. }
  234. WAVEFORMATEXTENSIBLE* nearestFormat = 0;
  235. HRESULT hr = client->IsFormatSupported (useExclusiveMode ? AUDCLNT_SHAREMODE_EXCLUSIVE : AUDCLNT_SHAREMODE_SHARED,
  236. (WAVEFORMATEX*) &format, useExclusiveMode ? 0 : (WAVEFORMATEX**) &nearestFormat);
  237. logFailure (hr);
  238. if (hr == S_FALSE && format.Format.nSamplesPerSec == nearestFormat->Format.nSamplesPerSec)
  239. {
  240. wasapi_copyWavFormat (format, (WAVEFORMATEX*) nearestFormat);
  241. hr = S_OK;
  242. }
  243. CoTaskMemFree (nearestFormat);
  244. REFERENCE_TIME defaultPeriod = 0, minPeriod = 0;
  245. if (useExclusiveMode)
  246. OK (client->GetDevicePeriod (&defaultPeriod, &minPeriod));
  247. GUID session;
  248. if (hr == S_OK
  249. && OK (client->Initialize (useExclusiveMode ? AUDCLNT_SHAREMODE_EXCLUSIVE : AUDCLNT_SHAREMODE_SHARED,
  250. AUDCLNT_STREAMFLAGS_EVENTCALLBACK,
  251. defaultPeriod, defaultPeriod, (WAVEFORMATEX*) &format, &session)))
  252. {
  253. actualNumChannels = format.Format.nChannels;
  254. const bool isFloat = format.Format.wFormatTag == WAVE_FORMAT_EXTENSIBLE && format.SubFormat == KSDATAFORMAT_SUBTYPE_IEEE_FLOAT;
  255. bytesPerSample = format.Format.wBitsPerSample / 8;
  256. dataFormat = isFloat ? AudioDataConverters::float32LE
  257. : (bytesPerSample == 4 ? AudioDataConverters::int32LE
  258. : ((bytesPerSample == 3 ? AudioDataConverters::int24LE
  259. : AudioDataConverters::int16LE)));
  260. return true;
  261. }
  262. return false;
  263. }
  264. };
  265. //==============================================================================
  266. class WASAPIInputDevice : public WASAPIDeviceBase
  267. {
  268. public:
  269. WASAPIInputDevice (const ComSmartPtr <IMMDevice>& device_, const bool useExclusiveMode_)
  270. : WASAPIDeviceBase (device_, useExclusiveMode_),
  271. reservoir (1, 1)
  272. {
  273. }
  274. ~WASAPIInputDevice()
  275. {
  276. close();
  277. }
  278. bool open (const double newSampleRate, const BigInteger& newChannels)
  279. {
  280. reservoirSize = 0;
  281. reservoirCapacity = 16384;
  282. reservoir.setSize (actualNumChannels * reservoirCapacity * sizeof (float));
  283. return openClient (newSampleRate, newChannels)
  284. && (numChannels == 0 || OK (client->GetService (__uuidof (IAudioCaptureClient), (void**) &captureClient)));
  285. }
  286. void close()
  287. {
  288. closeClient();
  289. captureClient = 0;
  290. reservoir.setSize (0);
  291. }
  292. void copyBuffers (float** destBuffers, int numDestBuffers, int bufferSize, Thread& thread)
  293. {
  294. if (numChannels <= 0)
  295. return;
  296. int offset = 0;
  297. while (bufferSize > 0)
  298. {
  299. if (reservoirSize > 0) // There's stuff in the reservoir, so use that...
  300. {
  301. const int samplesToDo = jmin (bufferSize, (int) reservoirSize);
  302. for (int i = 0; i < numDestBuffers; ++i)
  303. {
  304. float* const dest = destBuffers[i] + offset;
  305. const int srcChan = channelMaps.getUnchecked(i);
  306. switch (dataFormat)
  307. {
  308. case AudioDataConverters::float32LE:
  309. AudioDataConverters::convertFloat32LEToFloat (((uint8*) reservoir.getData()) + 4 * srcChan, dest, samplesToDo, 4 * actualNumChannels);
  310. break;
  311. case AudioDataConverters::int32LE:
  312. AudioDataConverters::convertInt32LEToFloat (((uint8*) reservoir.getData()) + 4 * srcChan, dest, samplesToDo, 4 * actualNumChannels);
  313. break;
  314. case AudioDataConverters::int24LE:
  315. AudioDataConverters::convertInt24LEToFloat (((uint8*) reservoir.getData()) + 3 * srcChan, dest, samplesToDo, 3 * actualNumChannels);
  316. break;
  317. case AudioDataConverters::int16LE:
  318. AudioDataConverters::convertInt16LEToFloat (((uint8*) reservoir.getData()) + 2 * srcChan, dest, samplesToDo, 2 * actualNumChannels);
  319. break;
  320. default: jassertfalse; break;
  321. }
  322. }
  323. bufferSize -= samplesToDo;
  324. offset += samplesToDo;
  325. reservoirSize -= samplesToDo;
  326. }
  327. else
  328. {
  329. UINT32 packetLength = 0;
  330. if (! OK (captureClient->GetNextPacketSize (&packetLength)))
  331. break;
  332. if (packetLength == 0)
  333. {
  334. if (thread.threadShouldExit())
  335. break;
  336. Thread::sleep (1);
  337. continue;
  338. }
  339. uint8* inputData = 0;
  340. UINT32 numSamplesAvailable;
  341. DWORD flags;
  342. if (OK (captureClient->GetBuffer (&inputData, &numSamplesAvailable, &flags, 0, 0)))
  343. {
  344. const int samplesToDo = jmin (bufferSize, (int) numSamplesAvailable);
  345. for (int i = 0; i < numDestBuffers; ++i)
  346. {
  347. float* const dest = destBuffers[i] + offset;
  348. const int srcChan = channelMaps.getUnchecked(i);
  349. switch (dataFormat)
  350. {
  351. case AudioDataConverters::float32LE:
  352. AudioDataConverters::convertFloat32LEToFloat (inputData + 4 * srcChan, dest, samplesToDo, 4 * actualNumChannels);
  353. break;
  354. case AudioDataConverters::int32LE:
  355. AudioDataConverters::convertInt32LEToFloat (inputData + 4 * srcChan, dest, samplesToDo, 4 * actualNumChannels);
  356. break;
  357. case AudioDataConverters::int24LE:
  358. AudioDataConverters::convertInt24LEToFloat (inputData + 3 * srcChan, dest, samplesToDo, 3 * actualNumChannels);
  359. break;
  360. case AudioDataConverters::int16LE:
  361. AudioDataConverters::convertInt16LEToFloat (inputData + 2 * srcChan, dest, samplesToDo, 2 * actualNumChannels);
  362. break;
  363. default: jassertfalse; break;
  364. }
  365. }
  366. bufferSize -= samplesToDo;
  367. offset += samplesToDo;
  368. if (samplesToDo < (int) numSamplesAvailable)
  369. {
  370. reservoirSize = jmin ((int) (numSamplesAvailable - samplesToDo), reservoirCapacity);
  371. memcpy ((uint8*) reservoir.getData(), inputData + bytesPerSample * actualNumChannels * samplesToDo,
  372. bytesPerSample * actualNumChannels * reservoirSize);
  373. }
  374. captureClient->ReleaseBuffer (numSamplesAvailable);
  375. }
  376. }
  377. }
  378. }
  379. ComSmartPtr <IAudioCaptureClient> captureClient;
  380. MemoryBlock reservoir;
  381. int reservoirSize, reservoirCapacity;
  382. };
  383. //==============================================================================
  384. class WASAPIOutputDevice : public WASAPIDeviceBase
  385. {
  386. public:
  387. WASAPIOutputDevice (const ComSmartPtr <IMMDevice>& device_, const bool useExclusiveMode_)
  388. : WASAPIDeviceBase (device_, useExclusiveMode_)
  389. {
  390. }
  391. ~WASAPIOutputDevice()
  392. {
  393. close();
  394. }
  395. bool open (const double newSampleRate, const BigInteger& newChannels)
  396. {
  397. return openClient (newSampleRate, newChannels)
  398. && (numChannels == 0 || OK (client->GetService (__uuidof (IAudioRenderClient), (void**) &renderClient)));
  399. }
  400. void close()
  401. {
  402. closeClient();
  403. renderClient = 0;
  404. }
  405. void copyBuffers (const float** const srcBuffers, const int numSrcBuffers, int bufferSize, Thread& thread)
  406. {
  407. if (numChannels <= 0)
  408. return;
  409. int offset = 0;
  410. while (bufferSize > 0)
  411. {
  412. UINT32 padding = 0;
  413. if (! OK (client->GetCurrentPadding (&padding)))
  414. return;
  415. int samplesToDo = useExclusiveMode ? bufferSize
  416. : jmin ((int) (actualBufferSize - padding), bufferSize);
  417. if (samplesToDo <= 0)
  418. {
  419. if (thread.threadShouldExit())
  420. break;
  421. Thread::sleep (0);
  422. continue;
  423. }
  424. uint8* outputData = 0;
  425. if (OK (renderClient->GetBuffer (samplesToDo, &outputData)))
  426. {
  427. for (int i = 0; i < numSrcBuffers; ++i)
  428. {
  429. const float* const source = srcBuffers[i] + offset;
  430. const int destChan = channelMaps.getUnchecked(i);
  431. switch (dataFormat)
  432. {
  433. case AudioDataConverters::float32LE:
  434. AudioDataConverters::convertFloatToFloat32LE (source, outputData + 4 * destChan, samplesToDo, 4 * actualNumChannels);
  435. break;
  436. case AudioDataConverters::int32LE:
  437. AudioDataConverters::convertFloatToInt32LE (source, outputData + 4 * destChan, samplesToDo, 4 * actualNumChannels);
  438. break;
  439. case AudioDataConverters::int24LE:
  440. AudioDataConverters::convertFloatToInt24LE (source, outputData + 3 * destChan, samplesToDo, 3 * actualNumChannels);
  441. break;
  442. case AudioDataConverters::int16LE:
  443. AudioDataConverters::convertFloatToInt16LE (source, outputData + 2 * destChan, samplesToDo, 2 * actualNumChannels);
  444. break;
  445. default: jassertfalse; break;
  446. }
  447. }
  448. renderClient->ReleaseBuffer (samplesToDo, 0);
  449. offset += samplesToDo;
  450. bufferSize -= samplesToDo;
  451. }
  452. }
  453. }
  454. ComSmartPtr <IAudioRenderClient> renderClient;
  455. };
  456. //==============================================================================
  457. class WASAPIAudioIODevice : public AudioIODevice,
  458. public Thread
  459. {
  460. public:
  461. WASAPIAudioIODevice (const String& deviceName,
  462. const String& outputDeviceId_,
  463. const String& inputDeviceId_,
  464. const bool useExclusiveMode_)
  465. : AudioIODevice (deviceName, "Windows Audio"),
  466. Thread ("Juce WASAPI"),
  467. isOpen_ (false),
  468. isStarted (false),
  469. outputDevice (0),
  470. outputDeviceId (outputDeviceId_),
  471. inputDevice (0),
  472. inputDeviceId (inputDeviceId_),
  473. useExclusiveMode (useExclusiveMode_),
  474. currentBufferSizeSamples (0),
  475. currentSampleRate (0),
  476. callback (0)
  477. {
  478. }
  479. ~WASAPIAudioIODevice()
  480. {
  481. close();
  482. deleteAndZero (inputDevice);
  483. deleteAndZero (outputDevice);
  484. }
  485. bool initialise()
  486. {
  487. double defaultSampleRateIn = 0, defaultSampleRateOut = 0;
  488. int minBufferSizeIn = 0, defaultBufferSizeIn = 0, minBufferSizeOut = 0, defaultBufferSizeOut = 0;
  489. latencyIn = latencyOut = 0;
  490. Array <double> ratesIn, ratesOut;
  491. if (createDevices())
  492. {
  493. jassert (inputDevice != 0 || outputDevice != 0);
  494. if (inputDevice != 0 && outputDevice != 0)
  495. {
  496. defaultSampleRate = jmin (inputDevice->defaultSampleRate, outputDevice->defaultSampleRate);
  497. minBufferSize = jmin (inputDevice->minBufferSize, outputDevice->minBufferSize);
  498. defaultBufferSize = jmax (inputDevice->defaultBufferSize, outputDevice->defaultBufferSize);
  499. sampleRates = inputDevice->rates;
  500. sampleRates.removeValuesNotIn (outputDevice->rates);
  501. }
  502. else
  503. {
  504. WASAPIDeviceBase* const d = inputDevice != 0 ? (WASAPIDeviceBase*) inputDevice : (WASAPIDeviceBase*) outputDevice;
  505. defaultSampleRate = d->defaultSampleRate;
  506. minBufferSize = d->minBufferSize;
  507. defaultBufferSize = d->defaultBufferSize;
  508. sampleRates = d->rates;
  509. }
  510. IntegerElementComparator<int> comparator;
  511. bufferSizes.addSorted (comparator, defaultBufferSize);
  512. if (minBufferSize != defaultBufferSize)
  513. bufferSizes.addSorted (comparator, minBufferSize);
  514. int n = 64;
  515. for (int i = 0; i < 40; ++i)
  516. {
  517. if (n >= minBufferSize && n <= 2048 && ! bufferSizes.contains (n))
  518. bufferSizes.addSorted (comparator, n);
  519. n += (n < 512) ? 32 : (n < 1024 ? 64 : 128);
  520. }
  521. return true;
  522. }
  523. return false;
  524. }
  525. const StringArray getOutputChannelNames()
  526. {
  527. StringArray outChannels;
  528. if (outputDevice != 0)
  529. for (int i = 1; i <= outputDevice->actualNumChannels; ++i)
  530. outChannels.add ("Output channel " + String (i));
  531. return outChannels;
  532. }
  533. const StringArray getInputChannelNames()
  534. {
  535. StringArray inChannels;
  536. if (inputDevice != 0)
  537. for (int i = 1; i <= inputDevice->actualNumChannels; ++i)
  538. inChannels.add ("Input channel " + String (i));
  539. return inChannels;
  540. }
  541. int getNumSampleRates() { return sampleRates.size(); }
  542. double getSampleRate (int index) { return sampleRates [index]; }
  543. int getNumBufferSizesAvailable() { return bufferSizes.size(); }
  544. int getBufferSizeSamples (int index) { return bufferSizes [index]; }
  545. int getDefaultBufferSize() { return defaultBufferSize; }
  546. int getCurrentBufferSizeSamples() { return currentBufferSizeSamples; }
  547. double getCurrentSampleRate() { return currentSampleRate; }
  548. int getCurrentBitDepth() { return 32; }
  549. int getOutputLatencyInSamples() { return latencyOut; }
  550. int getInputLatencyInSamples() { return latencyIn; }
  551. const BigInteger getActiveOutputChannels() const { return outputDevice != 0 ? outputDevice->channels : BigInteger(); }
  552. const BigInteger getActiveInputChannels() const { return inputDevice != 0 ? inputDevice->channels : BigInteger(); }
  553. const String getLastError() { return lastError; }
  554. const String open (const BigInteger& inputChannels, const BigInteger& outputChannels,
  555. double sampleRate, int bufferSizeSamples)
  556. {
  557. close();
  558. lastError = String::empty;
  559. if (sampleRates.size() == 0 && inputDevice != 0 && outputDevice != 0)
  560. {
  561. lastError = "The input and output devices don't share a common sample rate!";
  562. return lastError;
  563. }
  564. currentBufferSizeSamples = bufferSizeSamples <= 0 ? defaultBufferSize : jmax (bufferSizeSamples, minBufferSize);
  565. currentSampleRate = sampleRate > 0 ? sampleRate : defaultSampleRate;
  566. if (inputDevice != 0 && ! inputDevice->open (currentSampleRate, inputChannels))
  567. {
  568. lastError = "Couldn't open the input device!";
  569. return lastError;
  570. }
  571. if (outputDevice != 0 && ! outputDevice->open (currentSampleRate, outputChannels))
  572. {
  573. close();
  574. lastError = "Couldn't open the output device!";
  575. return lastError;
  576. }
  577. if (inputDevice != 0)
  578. ResetEvent (inputDevice->clientEvent);
  579. if (outputDevice != 0)
  580. ResetEvent (outputDevice->clientEvent);
  581. startThread (8);
  582. Thread::sleep (5);
  583. if (inputDevice != 0 && inputDevice->client != 0)
  584. {
  585. latencyIn = inputDevice->latencySamples + inputDevice->actualBufferSize + inputDevice->minBufferSize;
  586. HRESULT hr = inputDevice->client->Start();
  587. logFailure (hr); //xxx handle this
  588. }
  589. if (outputDevice != 0 && outputDevice->client != 0)
  590. {
  591. latencyOut = outputDevice->latencySamples + outputDevice->actualBufferSize + outputDevice->minBufferSize;
  592. HRESULT hr = outputDevice->client->Start();
  593. logFailure (hr); //xxx handle this
  594. }
  595. isOpen_ = true;
  596. return lastError;
  597. }
  598. void close()
  599. {
  600. stop();
  601. if (inputDevice != 0)
  602. SetEvent (inputDevice->clientEvent);
  603. if (outputDevice != 0)
  604. SetEvent (outputDevice->clientEvent);
  605. stopThread (5000);
  606. if (inputDevice != 0)
  607. inputDevice->close();
  608. if (outputDevice != 0)
  609. outputDevice->close();
  610. isOpen_ = false;
  611. }
  612. bool isOpen() { return isOpen_ && isThreadRunning(); }
  613. bool isPlaying() { return isStarted && isOpen_ && isThreadRunning(); }
  614. void start (AudioIODeviceCallback* call)
  615. {
  616. if (isOpen_ && call != 0 && ! isStarted)
  617. {
  618. if (! isThreadRunning())
  619. {
  620. // something's gone wrong and the thread's stopped..
  621. isOpen_ = false;
  622. return;
  623. }
  624. call->audioDeviceAboutToStart (this);
  625. const ScopedLock sl (startStopLock);
  626. callback = call;
  627. isStarted = true;
  628. }
  629. }
  630. void stop()
  631. {
  632. if (isStarted)
  633. {
  634. AudioIODeviceCallback* const callbackLocal = callback;
  635. {
  636. const ScopedLock sl (startStopLock);
  637. isStarted = false;
  638. }
  639. if (callbackLocal != 0)
  640. callbackLocal->audioDeviceStopped();
  641. }
  642. }
  643. void setMMThreadPriority()
  644. {
  645. DynamicLibraryLoader dll ("avrt.dll");
  646. DynamicLibraryImport (AvSetMmThreadCharacteristics, avSetMmThreadCharacteristics, HANDLE, dll, (LPCTSTR, LPDWORD))
  647. DynamicLibraryImport (AvSetMmThreadPriority, avSetMmThreadPriority, HANDLE, dll, (HANDLE, AVRT_PRIORITY))
  648. if (avSetMmThreadCharacteristics != 0 && avSetMmThreadPriority != 0)
  649. {
  650. DWORD dummy = 0;
  651. HANDLE h = avSetMmThreadCharacteristics (_T("Pro Audio"), &dummy);
  652. if (h != 0)
  653. avSetMmThreadPriority (h, AVRT_PRIORITY_NORMAL);
  654. }
  655. }
  656. void run()
  657. {
  658. setMMThreadPriority();
  659. const int bufferSize = currentBufferSizeSamples;
  660. HANDLE events[2];
  661. int numEvents = 0;
  662. if (inputDevice != 0)
  663. events [numEvents++] = inputDevice->clientEvent;
  664. if (outputDevice != 0)
  665. events [numEvents++] = outputDevice->clientEvent;
  666. const int numInputBuffers = getActiveInputChannels().countNumberOfSetBits();
  667. const int numOutputBuffers = getActiveOutputChannels().countNumberOfSetBits();
  668. AudioSampleBuffer ins (jmax (1, numInputBuffers), bufferSize + 32);
  669. AudioSampleBuffer outs (jmax (1, numOutputBuffers), bufferSize + 32);
  670. float** const inputBuffers = ins.getArrayOfChannels();
  671. float** const outputBuffers = outs.getArrayOfChannels();
  672. ins.clear();
  673. while (! threadShouldExit())
  674. {
  675. const DWORD result = useExclusiveMode ? WaitForSingleObject (inputDevice->clientEvent, 1000)
  676. : WaitForMultipleObjects (numEvents, events, true, 1000);
  677. if (result == WAIT_TIMEOUT)
  678. continue;
  679. if (threadShouldExit())
  680. break;
  681. if (inputDevice != 0)
  682. inputDevice->copyBuffers (inputBuffers, numInputBuffers, bufferSize, *this);
  683. // Make the callback..
  684. {
  685. const ScopedLock sl (startStopLock);
  686. if (isStarted)
  687. {
  688. JUCE_TRY
  689. {
  690. callback->audioDeviceIOCallback ((const float**) inputBuffers,
  691. numInputBuffers,
  692. outputBuffers,
  693. numOutputBuffers,
  694. bufferSize);
  695. }
  696. JUCE_CATCH_EXCEPTION
  697. }
  698. else
  699. {
  700. outs.clear();
  701. }
  702. }
  703. if (useExclusiveMode && WaitForSingleObject (outputDevice->clientEvent, 1000) == WAIT_TIMEOUT)
  704. continue;
  705. if (outputDevice != 0)
  706. outputDevice->copyBuffers ((const float**) outputBuffers, numOutputBuffers, bufferSize, *this);
  707. }
  708. }
  709. //==============================================================================
  710. juce_UseDebuggingNewOperator
  711. //==============================================================================
  712. String outputDeviceId, inputDeviceId;
  713. String lastError;
  714. private:
  715. // Device stats...
  716. WASAPIInputDevice* inputDevice;
  717. WASAPIOutputDevice* outputDevice;
  718. const bool useExclusiveMode;
  719. double defaultSampleRate;
  720. int minBufferSize, defaultBufferSize;
  721. int latencyIn, latencyOut;
  722. Array <double> sampleRates;
  723. Array <int> bufferSizes;
  724. // Active state...
  725. bool isOpen_, isStarted;
  726. int currentBufferSizeSamples;
  727. double currentSampleRate;
  728. AudioIODeviceCallback* callback;
  729. CriticalSection startStopLock;
  730. //==============================================================================
  731. bool createDevices()
  732. {
  733. ComSmartPtr <IMMDeviceEnumerator> enumerator;
  734. if (! OK (enumerator.CoCreateInstance (__uuidof (MMDeviceEnumerator), CLSCTX_INPROC_SERVER)))
  735. return false;
  736. ComSmartPtr <IMMDeviceCollection> deviceCollection;
  737. if (! OK (enumerator->EnumAudioEndpoints (eAll, DEVICE_STATE_ACTIVE, &deviceCollection)))
  738. return false;
  739. UINT32 numDevices = 0;
  740. if (! OK (deviceCollection->GetCount (&numDevices)))
  741. return false;
  742. for (UINT32 i = 0; i < numDevices; ++i)
  743. {
  744. ComSmartPtr <IMMDevice> device;
  745. if (! OK (deviceCollection->Item (i, &device)))
  746. continue;
  747. const String deviceId (wasapi_getDeviceID (device));
  748. if (deviceId.isEmpty())
  749. continue;
  750. const EDataFlow flow = wasapi_getDataFlow (device);
  751. if (deviceId == inputDeviceId && flow == eCapture)
  752. inputDevice = new WASAPIInputDevice (device, useExclusiveMode);
  753. else if (deviceId == outputDeviceId && flow == eRender)
  754. outputDevice = new WASAPIOutputDevice (device, useExclusiveMode);
  755. }
  756. return (outputDeviceId.isEmpty() || (outputDevice != 0 && outputDevice->isOk()))
  757. && (inputDeviceId.isEmpty() || (inputDevice != 0 && inputDevice->isOk()));
  758. }
  759. //==============================================================================
  760. WASAPIAudioIODevice (const WASAPIAudioIODevice&);
  761. WASAPIAudioIODevice& operator= (const WASAPIAudioIODevice&);
  762. };
  763. //==============================================================================
  764. class WASAPIAudioIODeviceType : public AudioIODeviceType
  765. {
  766. public:
  767. WASAPIAudioIODeviceType()
  768. : AudioIODeviceType ("Windows Audio"),
  769. hasScanned (false)
  770. {
  771. }
  772. ~WASAPIAudioIODeviceType()
  773. {
  774. }
  775. //==============================================================================
  776. void scanForDevices()
  777. {
  778. hasScanned = true;
  779. outputDeviceNames.clear();
  780. inputDeviceNames.clear();
  781. outputDeviceIds.clear();
  782. inputDeviceIds.clear();
  783. ComSmartPtr <IMMDeviceEnumerator> enumerator;
  784. if (! OK (enumerator.CoCreateInstance (__uuidof (MMDeviceEnumerator), CLSCTX_INPROC_SERVER)))
  785. return;
  786. const String defaultRenderer = getDefaultEndpoint (enumerator, false);
  787. const String defaultCapture = getDefaultEndpoint (enumerator, true);
  788. ComSmartPtr <IMMDeviceCollection> deviceCollection;
  789. UINT32 numDevices = 0;
  790. if (! (OK (enumerator->EnumAudioEndpoints (eAll, DEVICE_STATE_ACTIVE, &deviceCollection))
  791. && OK (deviceCollection->GetCount (&numDevices))))
  792. return;
  793. for (UINT32 i = 0; i < numDevices; ++i)
  794. {
  795. ComSmartPtr <IMMDevice> device;
  796. if (! OK (deviceCollection->Item (i, &device)))
  797. continue;
  798. const String deviceId (wasapi_getDeviceID (device));
  799. DWORD state = 0;
  800. if (! OK (device->GetState (&state)))
  801. continue;
  802. if (state != DEVICE_STATE_ACTIVE)
  803. continue;
  804. String name;
  805. {
  806. ComSmartPtr <IPropertyStore> properties;
  807. if (! OK (device->OpenPropertyStore (STGM_READ, &properties)))
  808. continue;
  809. PROPVARIANT value;
  810. PropVariantInit (&value);
  811. if (OK (properties->GetValue (PKEY_Device_FriendlyName, &value)))
  812. name = value.pwszVal;
  813. PropVariantClear (&value);
  814. }
  815. const EDataFlow flow = wasapi_getDataFlow (device);
  816. if (flow == eRender)
  817. {
  818. const int index = (deviceId == defaultRenderer) ? 0 : -1;
  819. outputDeviceIds.insert (index, deviceId);
  820. outputDeviceNames.insert (index, name);
  821. }
  822. else if (flow == eCapture)
  823. {
  824. const int index = (deviceId == defaultCapture) ? 0 : -1;
  825. inputDeviceIds.insert (index, deviceId);
  826. inputDeviceNames.insert (index, name);
  827. }
  828. }
  829. inputDeviceNames.appendNumbersToDuplicates (false, false);
  830. outputDeviceNames.appendNumbersToDuplicates (false, false);
  831. }
  832. const StringArray getDeviceNames (bool wantInputNames) const
  833. {
  834. jassert (hasScanned); // need to call scanForDevices() before doing this
  835. return wantInputNames ? inputDeviceNames
  836. : outputDeviceNames;
  837. }
  838. int getDefaultDeviceIndex (bool /*forInput*/) const
  839. {
  840. jassert (hasScanned); // need to call scanForDevices() before doing this
  841. return 0;
  842. }
  843. int getIndexOfDevice (AudioIODevice* device, bool asInput) const
  844. {
  845. jassert (hasScanned); // need to call scanForDevices() before doing this
  846. WASAPIAudioIODevice* const d = dynamic_cast <WASAPIAudioIODevice*> (device);
  847. return d == 0 ? -1 : (asInput ? inputDeviceIds.indexOf (d->inputDeviceId)
  848. : outputDeviceIds.indexOf (d->outputDeviceId));
  849. }
  850. bool hasSeparateInputsAndOutputs() const { return true; }
  851. AudioIODevice* createDevice (const String& outputDeviceName,
  852. const String& inputDeviceName)
  853. {
  854. jassert (hasScanned); // need to call scanForDevices() before doing this
  855. const bool useExclusiveMode = false;
  856. ScopedPointer<WASAPIAudioIODevice> device;
  857. const int outputIndex = outputDeviceNames.indexOf (outputDeviceName);
  858. const int inputIndex = inputDeviceNames.indexOf (inputDeviceName);
  859. if (outputIndex >= 0 || inputIndex >= 0)
  860. {
  861. device = new WASAPIAudioIODevice (outputDeviceName.isNotEmpty() ? outputDeviceName
  862. : inputDeviceName,
  863. outputDeviceIds [outputIndex],
  864. inputDeviceIds [inputIndex],
  865. useExclusiveMode);
  866. if (! device->initialise())
  867. device = 0;
  868. }
  869. return device.release();
  870. }
  871. //==============================================================================
  872. juce_UseDebuggingNewOperator
  873. StringArray outputDeviceNames, outputDeviceIds;
  874. StringArray inputDeviceNames, inputDeviceIds;
  875. private:
  876. bool hasScanned;
  877. //==============================================================================
  878. static const String getDefaultEndpoint (IMMDeviceEnumerator* const enumerator, const bool forCapture)
  879. {
  880. String s;
  881. IMMDevice* dev = 0;
  882. if (OK (enumerator->GetDefaultAudioEndpoint (forCapture ? eCapture : eRender,
  883. eMultimedia, &dev)))
  884. {
  885. WCHAR* deviceId = 0;
  886. if (OK (dev->GetId (&deviceId)))
  887. {
  888. s = String (deviceId);
  889. CoTaskMemFree (deviceId);
  890. }
  891. dev->Release();
  892. }
  893. return s;
  894. }
  895. //==============================================================================
  896. WASAPIAudioIODeviceType (const WASAPIAudioIODeviceType&);
  897. WASAPIAudioIODeviceType& operator= (const WASAPIAudioIODeviceType&);
  898. };
  899. //==============================================================================
  900. AudioIODeviceType* juce_createAudioIODeviceType_WASAPI()
  901. {
  902. return new WASAPIAudioIODeviceType();
  903. }
  904. #undef logFailure
  905. #undef OK
  906. #endif