You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

8204 lines
257KB

  1. /************************************************************************/
  2. /*! \class RtAudio
  3. \brief Realtime audio i/o C++ classes.
  4. RtAudio provides a common API (Application Programming Interface)
  5. for realtime audio input/output across Linux (native ALSA, Jack,
  6. and OSS), SGI, Macintosh OS X (CoreAudio), and Windows
  7. (DirectSound and ASIO) operating systems.
  8. RtAudio WWW site: http://music.mcgill.ca/~gary/rtaudio/
  9. RtAudio: a realtime audio i/o C++ class
  10. Copyright (c) 2001-2004 Gary P. Scavone
  11. Permission is hereby granted, free of charge, to any person
  12. obtaining a copy of this software and associated documentation files
  13. (the "Software"), to deal in the Software without restriction,
  14. including without limitation the rights to use, copy, modify, merge,
  15. publish, distribute, sublicense, and/or sell copies of the Software,
  16. and to permit persons to whom the Software is furnished to do so,
  17. subject to the following conditions:
  18. The above copyright notice and this permission notice shall be
  19. included in all copies or substantial portions of the Software.
  20. Any person wishing to distribute modifications to the Software is
  21. requested to send the modifications to the original developer so that
  22. they can be incorporated into the canonical version.
  23. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
  24. EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
  25. MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
  26. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
  27. ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
  28. CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
  29. WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
  30. */
  31. /************************************************************************/
  32. // RtAudio: Version 3.0, 11 March 2004
  33. #include "RtAudio.h"
  34. #include <iostream>
  35. // Static variable definitions.
  36. const unsigned int RtApi::MAX_SAMPLE_RATES = 14;
  37. const unsigned int RtApi::SAMPLE_RATES[] = {
  38. 4000, 5512, 8000, 9600, 11025, 16000, 22050,
  39. 32000, 44100, 48000, 88200, 96000, 176400, 192000
  40. };
  41. #if defined(__WINDOWS_DS__) || defined(__WINDOWS_ASIO__)
  42. #define MUTEX_INITIALIZE(A) InitializeCriticalSection(A)
  43. #define MUTEX_DESTROY(A) DeleteCriticalSection(A);
  44. #define MUTEX_LOCK(A) EnterCriticalSection(A)
  45. #define MUTEX_UNLOCK(A) LeaveCriticalSection(A)
  46. #else // pthread API
  47. #define MUTEX_INITIALIZE(A) pthread_mutex_init(A, NULL)
  48. #define MUTEX_DESTROY(A) pthread_mutex_destroy(A);
  49. #define MUTEX_LOCK(A) pthread_mutex_lock(A)
  50. #define MUTEX_UNLOCK(A) pthread_mutex_unlock(A)
  51. #endif
  52. // *************************************************** //
  53. //
  54. // Public common (OS-independent) methods.
  55. //
  56. // *************************************************** //
  57. RtAudio :: RtAudio( RtAudioApi api )
  58. {
  59. initialize( api );
  60. }
  61. RtAudio :: RtAudio( int outputDevice, int outputChannels,
  62. int inputDevice, int inputChannels,
  63. RtAudioFormat format, int sampleRate,
  64. int *bufferSize, int numberOfBuffers, RtAudioApi api )
  65. {
  66. initialize( api );
  67. try {
  68. rtapi_->openStream( outputDevice, outputChannels,
  69. inputDevice, inputChannels,
  70. format, sampleRate,
  71. bufferSize, numberOfBuffers );
  72. }
  73. catch (RtError &exception) {
  74. // Deallocate the RtApi instance.
  75. delete rtapi_;
  76. throw exception;
  77. }
  78. }
  79. RtAudio :: ~RtAudio()
  80. {
  81. delete rtapi_;
  82. }
  83. void RtAudio :: openStream( int outputDevice, int outputChannels,
  84. int inputDevice, int inputChannels,
  85. RtAudioFormat format, int sampleRate,
  86. int *bufferSize, int numberOfBuffers )
  87. {
  88. rtapi_->openStream( outputDevice, outputChannels, inputDevice,
  89. inputChannels, format, sampleRate,
  90. bufferSize, numberOfBuffers );
  91. }
  92. void RtAudio::initialize( RtAudioApi api )
  93. {
  94. rtapi_ = 0;
  95. // First look for a compiled match to a specified API value. If one
  96. // of these constructors throws an error, it will be passed up the
  97. // inheritance chain.
  98. #if defined(__LINUX_JACK__)
  99. if ( api == LINUX_JACK )
  100. rtapi_ = new RtApiJack();
  101. #endif
  102. #if defined(__LINUX_ALSA__)
  103. if ( api == LINUX_ALSA )
  104. rtapi_ = new RtApiAlsa();
  105. #endif
  106. #if defined(__LINUX_OSS__)
  107. if ( api == LINUX_OSS )
  108. rtapi_ = new RtApiOss();
  109. #endif
  110. #if defined(__WINDOWS_ASIO__)
  111. if ( api == WINDOWS_ASIO )
  112. rtapi_ = new RtApiAsio();
  113. #endif
  114. #if defined(__WINDOWS_DS__)
  115. if ( api == WINDOWS_DS )
  116. rtapi_ = new RtApiDs();
  117. #endif
  118. #if defined(__IRIX_AL__)
  119. if ( api == IRIX_AL )
  120. rtapi_ = new RtApiAl();
  121. #endif
  122. #if defined(__MACOSX_CORE__)
  123. if ( api == MACOSX_CORE )
  124. rtapi_ = new RtApiCore();
  125. #endif
  126. if ( rtapi_ ) return;
  127. if ( api > 0 ) {
  128. // No compiled support for specified API value.
  129. throw RtError( "RtAudio: no compiled support for specified API argument!", RtError::INVALID_PARAMETER );
  130. }
  131. // No specified API ... search for "best" option.
  132. try {
  133. #if defined(__LINUX_JACK__)
  134. rtapi_ = new RtApiJack();
  135. #elif defined(__WINDOWS_ASIO__)
  136. rtapi_ = new RtApiAsio();
  137. #elif defined(__IRIX_AL__)
  138. rtapi_ = new RtApiAl();
  139. #elif defined(__MACOSX_CORE__)
  140. rtapi_ = new RtApiCore();
  141. #else
  142. ;
  143. #endif
  144. }
  145. catch (RtError &) {
  146. #if defined(__RTAUDIO_DEBUG__)
  147. fprintf(stderr, "\nRtAudio: no devices found for first api option (JACK, ASIO, Al, or CoreAudio).\n\n");
  148. #endif
  149. rtapi_ = 0;
  150. }
  151. if ( rtapi_ ) return;
  152. // Try second API support
  153. if ( rtapi_ == 0 ) {
  154. try {
  155. #if defined(__LINUX_ALSA__)
  156. rtapi_ = new RtApiAlsa();
  157. #elif defined(__WINDOWS_DS__)
  158. rtapi_ = new RtApiDs();
  159. #else
  160. ;
  161. #endif
  162. }
  163. catch (RtError &) {
  164. #if defined(__RTAUDIO_DEBUG__)
  165. fprintf(stderr, "\nRtAudio: no devices found for second api option (Alsa or DirectSound).\n\n");
  166. #endif
  167. rtapi_ = 0;
  168. }
  169. }
  170. if ( rtapi_ ) return;
  171. // Try third API support
  172. if ( rtapi_ == 0 ) {
  173. #if defined(__LINUX_OSS__)
  174. try {
  175. rtapi_ = new RtApiOss();
  176. }
  177. catch (RtError &error) {
  178. rtapi_ = 0;
  179. }
  180. #else
  181. ;
  182. #endif
  183. }
  184. if ( rtapi_ == 0 ) {
  185. // No devices found.
  186. throw RtError( "RtAudio: no devices found for compiled audio APIs!", RtError::NO_DEVICES_FOUND );
  187. }
  188. }
  189. RtApi :: RtApi()
  190. {
  191. stream_.mode = UNINITIALIZED;
  192. stream_.apiHandle = 0;
  193. MUTEX_INITIALIZE(&stream_.mutex);
  194. }
  195. RtApi :: ~RtApi()
  196. {
  197. MUTEX_DESTROY(&stream_.mutex);
  198. }
  199. void RtApi :: openStream( int outputDevice, int outputChannels,
  200. int inputDevice, int inputChannels,
  201. RtAudioFormat format, int sampleRate,
  202. int *bufferSize, int numberOfBuffers )
  203. {
  204. if ( stream_.mode != UNINITIALIZED ) {
  205. sprintf(message_, "RtApi: only one open stream allowed per class instance.");
  206. error(RtError::INVALID_STREAM);
  207. }
  208. if (outputChannels < 1 && inputChannels < 1) {
  209. sprintf(message_,"RtApi: one or both 'channel' parameters must be greater than zero.");
  210. error(RtError::INVALID_PARAMETER);
  211. }
  212. if ( formatBytes(format) == 0 ) {
  213. sprintf(message_,"RtApi: 'format' parameter value is undefined.");
  214. error(RtError::INVALID_PARAMETER);
  215. }
  216. if ( outputChannels > 0 ) {
  217. if (outputDevice > nDevices_ || outputDevice < 0) {
  218. sprintf(message_,"RtApi: 'outputDevice' parameter value (%d) is invalid.", outputDevice);
  219. error(RtError::INVALID_PARAMETER);
  220. }
  221. }
  222. if ( inputChannels > 0 ) {
  223. if (inputDevice > nDevices_ || inputDevice < 0) {
  224. sprintf(message_,"RtApi: 'inputDevice' parameter value (%d) is invalid.", inputDevice);
  225. error(RtError::INVALID_PARAMETER);
  226. }
  227. }
  228. clearStreamInfo();
  229. bool result = FAILURE;
  230. int device, defaultDevice = 0;
  231. StreamMode mode;
  232. int channels;
  233. if ( outputChannels > 0 ) {
  234. mode = OUTPUT;
  235. channels = outputChannels;
  236. if ( outputDevice == 0 ) { // Try default device first.
  237. defaultDevice = getDefaultOutputDevice();
  238. device = defaultDevice;
  239. }
  240. else
  241. device = outputDevice - 1;
  242. for ( int i=-1; i<nDevices_; i++ ) {
  243. if ( i >= 0 ) {
  244. if ( i == defaultDevice ) continue;
  245. device = i;
  246. }
  247. if (devices_[device].probed == false) {
  248. // If the device wasn't successfully probed before, try it
  249. // (again) now.
  250. clearDeviceInfo(&devices_[device]);
  251. probeDeviceInfo(&devices_[device]);
  252. }
  253. if ( devices_[device].probed )
  254. result = probeDeviceOpen(device, mode, channels, sampleRate,
  255. format, bufferSize, numberOfBuffers);
  256. if ( result == SUCCESS ) break;
  257. if ( outputDevice > 0 ) break;
  258. clearStreamInfo();
  259. }
  260. }
  261. if ( inputChannels > 0 && ( result == SUCCESS || outputChannels <= 0 ) ) {
  262. mode = INPUT;
  263. channels = inputChannels;
  264. if ( inputDevice == 0 ) { // Try default device first.
  265. defaultDevice = getDefaultInputDevice();
  266. device = defaultDevice;
  267. }
  268. else
  269. device = inputDevice - 1;
  270. for (int i=-1; i<nDevices_; i++) {
  271. if (i >= 0 ) {
  272. if ( i == defaultDevice ) continue;
  273. device = i;
  274. }
  275. if (devices_[device].probed == false) {
  276. // If the device wasn't successfully probed before, try it
  277. // (again) now.
  278. clearDeviceInfo(&devices_[device]);
  279. probeDeviceInfo(&devices_[device]);
  280. }
  281. if ( devices_[device].probed )
  282. result = probeDeviceOpen(device, mode, channels, sampleRate,
  283. format, bufferSize, numberOfBuffers);
  284. if (result == SUCCESS) break;
  285. if ( outputDevice > 0 ) break;
  286. }
  287. }
  288. if ( result == SUCCESS )
  289. return;
  290. // If we get here, all attempted probes failed. Close any opened
  291. // devices and clear the stream structure.
  292. if ( stream_.mode != UNINITIALIZED ) closeStream();
  293. clearStreamInfo();
  294. if ( ( outputDevice == 0 && outputChannels > 0 )
  295. || ( inputDevice == 0 && inputChannels > 0 ) )
  296. sprintf(message_,"RtApi: no devices found for given stream parameters.");
  297. else
  298. sprintf(message_,"RtApi: unable to open specified device(s) with given stream parameters.");
  299. error(RtError::INVALID_PARAMETER);
  300. return;
  301. }
  302. int RtApi :: getDeviceCount(void)
  303. {
  304. return devices_.size();
  305. }
  306. RtAudioDeviceInfo RtApi :: getDeviceInfo( int device )
  307. {
  308. if (device > (int) devices_.size() || device < 1) {
  309. sprintf(message_, "RtApi: invalid device specifier (%d)!", device);
  310. error(RtError::INVALID_DEVICE);
  311. }
  312. RtAudioDeviceInfo info;
  313. int deviceIndex = device - 1;
  314. // If the device wasn't successfully probed before, try it now (or again).
  315. if (devices_[deviceIndex].probed == false) {
  316. clearDeviceInfo(&devices_[deviceIndex]);
  317. probeDeviceInfo(&devices_[deviceIndex]);
  318. }
  319. info.name.append( devices_[deviceIndex].name );
  320. info.probed = devices_[deviceIndex].probed;
  321. if ( info.probed == true ) {
  322. info.outputChannels = devices_[deviceIndex].maxOutputChannels;
  323. info.inputChannels = devices_[deviceIndex].maxInputChannels;
  324. info.duplexChannels = devices_[deviceIndex].maxDuplexChannels;
  325. for (unsigned int i=0; i<devices_[deviceIndex].sampleRates.size(); i++)
  326. info.sampleRates.push_back( devices_[deviceIndex].sampleRates[i] );
  327. info.nativeFormats = devices_[deviceIndex].nativeFormats;
  328. if ( (deviceIndex == getDefaultOutputDevice()) ||
  329. (deviceIndex == getDefaultInputDevice()) )
  330. info.isDefault = true;
  331. }
  332. return info;
  333. }
  334. char * const RtApi :: getStreamBuffer(void)
  335. {
  336. verifyStream();
  337. return stream_.userBuffer;
  338. }
  339. int RtApi :: getDefaultInputDevice(void)
  340. {
  341. // Should be implemented in subclasses if appropriate.
  342. return 0;
  343. }
  344. int RtApi :: getDefaultOutputDevice(void)
  345. {
  346. // Should be implemented in subclasses if appropriate.
  347. return 0;
  348. }
  349. void RtApi :: closeStream(void)
  350. {
  351. // MUST be implemented in subclasses!
  352. }
  353. void RtApi :: probeDeviceInfo( RtApiDevice *info )
  354. {
  355. // MUST be implemented in subclasses!
  356. }
  357. bool RtApi :: probeDeviceOpen( int device, StreamMode mode, int channels,
  358. int sampleRate, RtAudioFormat format,
  359. int *bufferSize, int numberOfBuffers )
  360. {
  361. // MUST be implemented in subclasses!
  362. return FAILURE;
  363. }
  364. // *************************************************** //
  365. //
  366. // OS/API-specific methods.
  367. //
  368. // *************************************************** //
  369. #if defined(__LINUX_OSS__)
  370. #include <unistd.h>
  371. #include <sys/stat.h>
  372. #include <sys/types.h>
  373. #include <sys/ioctl.h>
  374. #include <unistd.h>
  375. #include <fcntl.h>
  376. #include <sys/soundcard.h>
  377. #include <errno.h>
  378. #include <math.h>
  379. #define DAC_NAME "/dev/dsp"
  380. #define MAX_DEVICES 16
  381. #define MAX_CHANNELS 16
  382. extern "C" void *ossCallbackHandler(void * ptr);
  383. RtApiOss :: RtApiOss()
  384. {
  385. this->initialize();
  386. if (nDevices_ <= 0) {
  387. sprintf(message_, "RtApiOss: no Linux OSS audio devices found!");
  388. error(RtError::NO_DEVICES_FOUND);
  389. }
  390. }
  391. RtApiOss :: ~RtApiOss()
  392. {
  393. if ( stream_.mode != UNINITIALIZED )
  394. closeStream();
  395. }
  396. void RtApiOss :: initialize(void)
  397. {
  398. // Count cards and devices
  399. nDevices_ = 0;
  400. // We check /dev/dsp before probing devices. /dev/dsp is supposed to
  401. // be a link to the "default" audio device, of the form /dev/dsp0,
  402. // /dev/dsp1, etc... However, I've seen many cases where /dev/dsp was a
  403. // real device, so we need to check for that. Also, sometimes the
  404. // link is to /dev/dspx and other times just dspx. I'm not sure how
  405. // the latter works, but it does.
  406. char device_name[16];
  407. struct stat dspstat;
  408. int dsplink = -1;
  409. int i = 0;
  410. if (lstat(DAC_NAME, &dspstat) == 0) {
  411. if (S_ISLNK(dspstat.st_mode)) {
  412. i = readlink(DAC_NAME, device_name, sizeof(device_name));
  413. if (i > 0) {
  414. device_name[i] = '\0';
  415. if (i > 8) { // check for "/dev/dspx"
  416. if (!strncmp(DAC_NAME, device_name, 8))
  417. dsplink = atoi(&device_name[8]);
  418. }
  419. else if (i > 3) { // check for "dspx"
  420. if (!strncmp("dsp", device_name, 3))
  421. dsplink = atoi(&device_name[3]);
  422. }
  423. }
  424. else {
  425. sprintf(message_, "RtApiOss: cannot read value of symbolic link %s.", DAC_NAME);
  426. error(RtError::SYSTEM_ERROR);
  427. }
  428. }
  429. }
  430. else {
  431. sprintf(message_, "RtApiOss: cannot stat %s.", DAC_NAME);
  432. error(RtError::SYSTEM_ERROR);
  433. }
  434. // The OSS API doesn't provide a routine for determining the number
  435. // of devices. Thus, we'll just pursue a brute force method. The
  436. // idea is to start with /dev/dsp(0) and continue with higher device
  437. // numbers until we reach MAX_DSP_DEVICES. This should tell us how
  438. // many devices we have ... it is not a fullproof scheme, but hopefully
  439. // it will work most of the time.
  440. int fd = 0;
  441. RtApiDevice device;
  442. for (i=-1; i<MAX_DEVICES; i++) {
  443. // Probe /dev/dsp first, since it is supposed to be the default device.
  444. if (i == -1)
  445. sprintf(device_name, "%s", DAC_NAME);
  446. else if (i == dsplink)
  447. continue; // We've aready probed this device via /dev/dsp link ... try next device.
  448. else
  449. sprintf(device_name, "%s%d", DAC_NAME, i);
  450. // First try to open the device for playback, then record mode.
  451. fd = open(device_name, O_WRONLY | O_NONBLOCK);
  452. if (fd == -1) {
  453. // Open device for playback failed ... either busy or doesn't exist.
  454. if (errno != EBUSY && errno != EAGAIN) {
  455. // Try to open for capture
  456. fd = open(device_name, O_RDONLY | O_NONBLOCK);
  457. if (fd == -1) {
  458. // Open device for record failed.
  459. if (errno != EBUSY && errno != EAGAIN)
  460. continue;
  461. else {
  462. sprintf(message_, "RtApiOss: OSS record device (%s) is busy.", device_name);
  463. error(RtError::WARNING);
  464. // still count it for now
  465. }
  466. }
  467. }
  468. else {
  469. sprintf(message_, "RtApiOss: OSS playback device (%s) is busy.", device_name);
  470. error(RtError::WARNING);
  471. // still count it for now
  472. }
  473. }
  474. if (fd >= 0) close(fd);
  475. device.name.erase();
  476. device.name.append( (const char *)device_name, strlen(device_name)+1);
  477. devices_.push_back(device);
  478. nDevices_++;
  479. }
  480. }
  481. void RtApiOss :: probeDeviceInfo(RtApiDevice *info)
  482. {
  483. int i, fd, channels, mask;
  484. // The OSS API doesn't provide a means for probing the capabilities
  485. // of devices. Thus, we'll just pursue a brute force method.
  486. // First try for playback
  487. fd = open(info->name.c_str(), O_WRONLY | O_NONBLOCK);
  488. if (fd == -1) {
  489. // Open device failed ... either busy or doesn't exist
  490. if (errno == EBUSY || errno == EAGAIN)
  491. sprintf(message_, "RtApiOss: OSS playback device (%s) is busy and cannot be probed.",
  492. info->name.c_str());
  493. else
  494. sprintf(message_, "RtApiOss: OSS playback device (%s) open error.", info->name.c_str());
  495. error(RtError::DEBUG_WARNING);
  496. goto capture_probe;
  497. }
  498. // We have an open device ... see how many channels it can handle
  499. for (i=MAX_CHANNELS; i>0; i--) {
  500. channels = i;
  501. if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1) {
  502. // This would normally indicate some sort of hardware error, but under ALSA's
  503. // OSS emulation, it sometimes indicates an invalid channel value. Further,
  504. // the returned channel value is not changed. So, we'll ignore the possible
  505. // hardware error.
  506. continue; // try next channel number
  507. }
  508. // Check to see whether the device supports the requested number of channels
  509. if (channels != i ) continue; // try next channel number
  510. // If here, we found the largest working channel value
  511. break;
  512. }
  513. info->maxOutputChannels = i;
  514. // Now find the minimum number of channels it can handle
  515. for (i=1; i<=info->maxOutputChannels; i++) {
  516. channels = i;
  517. if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1 || channels != i)
  518. continue; // try next channel number
  519. // If here, we found the smallest working channel value
  520. break;
  521. }
  522. info->minOutputChannels = i;
  523. close(fd);
  524. capture_probe:
  525. // Now try for capture
  526. fd = open(info->name.c_str(), O_RDONLY | O_NONBLOCK);
  527. if (fd == -1) {
  528. // Open device for capture failed ... either busy or doesn't exist
  529. if (errno == EBUSY || errno == EAGAIN)
  530. sprintf(message_, "RtApiOss: OSS capture device (%s) is busy and cannot be probed.",
  531. info->name.c_str());
  532. else
  533. sprintf(message_, "RtApiOss: OSS capture device (%s) open error.", info->name.c_str());
  534. error(RtError::DEBUG_WARNING);
  535. if (info->maxOutputChannels == 0)
  536. // didn't open for playback either ... device invalid
  537. return;
  538. goto probe_parameters;
  539. }
  540. // We have the device open for capture ... see how many channels it can handle
  541. for (i=MAX_CHANNELS; i>0; i--) {
  542. channels = i;
  543. if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1 || channels != i) {
  544. continue; // as above
  545. }
  546. // If here, we found a working channel value
  547. break;
  548. }
  549. info->maxInputChannels = i;
  550. // Now find the minimum number of channels it can handle
  551. for (i=1; i<=info->maxInputChannels; i++) {
  552. channels = i;
  553. if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1 || channels != i)
  554. continue; // try next channel number
  555. // If here, we found the smallest working channel value
  556. break;
  557. }
  558. info->minInputChannels = i;
  559. close(fd);
  560. if (info->maxOutputChannels == 0 && info->maxInputChannels == 0) {
  561. sprintf(message_, "RtApiOss: device (%s) reports zero channels for input and output.",
  562. info->name.c_str());
  563. error(RtError::DEBUG_WARNING);
  564. return;
  565. }
  566. // If device opens for both playback and capture, we determine the channels.
  567. if (info->maxOutputChannels == 0 || info->maxInputChannels == 0)
  568. goto probe_parameters;
  569. fd = open(info->name.c_str(), O_RDWR | O_NONBLOCK);
  570. if (fd == -1)
  571. goto probe_parameters;
  572. ioctl(fd, SNDCTL_DSP_SETDUPLEX, 0);
  573. ioctl(fd, SNDCTL_DSP_GETCAPS, &mask);
  574. if (mask & DSP_CAP_DUPLEX) {
  575. info->hasDuplexSupport = true;
  576. // We have the device open for duplex ... see how many channels it can handle
  577. for (i=MAX_CHANNELS; i>0; i--) {
  578. channels = i;
  579. if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1 || channels != i)
  580. continue; // as above
  581. // If here, we found a working channel value
  582. break;
  583. }
  584. info->maxDuplexChannels = i;
  585. // Now find the minimum number of channels it can handle
  586. for (i=1; i<=info->maxDuplexChannels; i++) {
  587. channels = i;
  588. if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1 || channels != i)
  589. continue; // try next channel number
  590. // If here, we found the smallest working channel value
  591. break;
  592. }
  593. info->minDuplexChannels = i;
  594. }
  595. close(fd);
  596. probe_parameters:
  597. // At this point, we need to figure out the supported data formats
  598. // and sample rates. We'll proceed by openning the device in the
  599. // direction with the maximum number of channels, or playback if
  600. // they are equal. This might limit our sample rate options, but so
  601. // be it.
  602. if (info->maxOutputChannels >= info->maxInputChannels) {
  603. fd = open(info->name.c_str(), O_WRONLY | O_NONBLOCK);
  604. channels = info->maxOutputChannels;
  605. }
  606. else {
  607. fd = open(info->name.c_str(), O_RDONLY | O_NONBLOCK);
  608. channels = info->maxInputChannels;
  609. }
  610. if (fd == -1) {
  611. // We've got some sort of conflict ... abort
  612. sprintf(message_, "RtApiOss: device (%s) won't reopen during probe.",
  613. info->name.c_str());
  614. error(RtError::DEBUG_WARNING);
  615. return;
  616. }
  617. // We have an open device ... set to maximum channels.
  618. i = channels;
  619. if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1 || channels != i) {
  620. // We've got some sort of conflict ... abort
  621. close(fd);
  622. sprintf(message_, "RtApiOss: device (%s) won't revert to previous channel setting.",
  623. info->name.c_str());
  624. error(RtError::DEBUG_WARNING);
  625. return;
  626. }
  627. if (ioctl(fd, SNDCTL_DSP_GETFMTS, &mask) == -1) {
  628. close(fd);
  629. sprintf(message_, "RtApiOss: device (%s) can't get supported audio formats.",
  630. info->name.c_str());
  631. error(RtError::DEBUG_WARNING);
  632. return;
  633. }
  634. // Probe the supported data formats ... we don't care about endian-ness just yet.
  635. int format;
  636. info->nativeFormats = 0;
  637. #if defined (AFMT_S32_BE)
  638. // This format does not seem to be in the 2.4 kernel version of OSS soundcard.h
  639. if (mask & AFMT_S32_BE) {
  640. format = AFMT_S32_BE;
  641. info->nativeFormats |= RTAUDIO_SINT32;
  642. }
  643. #endif
  644. #if defined (AFMT_S32_LE)
  645. /* This format is not in the 2.4.4 kernel version of OSS soundcard.h */
  646. if (mask & AFMT_S32_LE) {
  647. format = AFMT_S32_LE;
  648. info->nativeFormats |= RTAUDIO_SINT32;
  649. }
  650. #endif
  651. if (mask & AFMT_S8) {
  652. format = AFMT_S8;
  653. info->nativeFormats |= RTAUDIO_SINT8;
  654. }
  655. if (mask & AFMT_S16_BE) {
  656. format = AFMT_S16_BE;
  657. info->nativeFormats |= RTAUDIO_SINT16;
  658. }
  659. if (mask & AFMT_S16_LE) {
  660. format = AFMT_S16_LE;
  661. info->nativeFormats |= RTAUDIO_SINT16;
  662. }
  663. // Check that we have at least one supported format
  664. if (info->nativeFormats == 0) {
  665. close(fd);
  666. sprintf(message_, "RtApiOss: device (%s) data format not supported by RtAudio.",
  667. info->name.c_str());
  668. error(RtError::DEBUG_WARNING);
  669. return;
  670. }
  671. // Set the format
  672. i = format;
  673. if (ioctl(fd, SNDCTL_DSP_SETFMT, &format) == -1 || format != i) {
  674. close(fd);
  675. sprintf(message_, "RtApiOss: device (%s) error setting data format.",
  676. info->name.c_str());
  677. error(RtError::DEBUG_WARNING);
  678. return;
  679. }
  680. // Probe the supported sample rates.
  681. info->sampleRates.clear();
  682. for (unsigned int k=0; k<MAX_SAMPLE_RATES; k++) {
  683. int speed = SAMPLE_RATES[k];
  684. if (ioctl(fd, SNDCTL_DSP_SPEED, &speed) != -1 && speed == (int)SAMPLE_RATES[k])
  685. info->sampleRates.push_back(speed);
  686. }
  687. if (info->sampleRates.size() == 0) {
  688. close(fd);
  689. sprintf(message_, "RtApiOss: no supported sample rates found for device (%s).",
  690. info->name.c_str());
  691. error(RtError::DEBUG_WARNING);
  692. return;
  693. }
  694. // That's all ... close the device and return
  695. close(fd);
  696. info->probed = true;
  697. return;
  698. }
  699. bool RtApiOss :: probeDeviceOpen(int device, StreamMode mode, int channels,
  700. int sampleRate, RtAudioFormat format,
  701. int *bufferSize, int numberOfBuffers)
  702. {
  703. int buffers, buffer_bytes, device_channels, device_format;
  704. int srate, temp, fd;
  705. int *handle = (int *) stream_.apiHandle;
  706. const char *name = devices_[device].name.c_str();
  707. if (mode == OUTPUT)
  708. fd = open(name, O_WRONLY | O_NONBLOCK);
  709. else { // mode == INPUT
  710. if (stream_.mode == OUTPUT && stream_.device[0] == device) {
  711. // We just set the same device for playback ... close and reopen for duplex (OSS only).
  712. close(handle[0]);
  713. handle[0] = 0;
  714. // First check that the number previously set channels is the same.
  715. if (stream_.nUserChannels[0] != channels) {
  716. sprintf(message_, "RtApiOss: input/output channels must be equal for OSS duplex device (%s).", name);
  717. goto error;
  718. }
  719. fd = open(name, O_RDWR | O_NONBLOCK);
  720. }
  721. else
  722. fd = open(name, O_RDONLY | O_NONBLOCK);
  723. }
  724. if (fd == -1) {
  725. if (errno == EBUSY || errno == EAGAIN)
  726. sprintf(message_, "RtApiOss: device (%s) is busy and cannot be opened.",
  727. name);
  728. else
  729. sprintf(message_, "RtApiOss: device (%s) cannot be opened.", name);
  730. goto error;
  731. }
  732. // Now reopen in blocking mode.
  733. close(fd);
  734. if (mode == OUTPUT)
  735. fd = open(name, O_WRONLY | O_SYNC);
  736. else { // mode == INPUT
  737. if (stream_.mode == OUTPUT && stream_.device[0] == device)
  738. fd = open(name, O_RDWR | O_SYNC);
  739. else
  740. fd = open(name, O_RDONLY | O_SYNC);
  741. }
  742. if (fd == -1) {
  743. sprintf(message_, "RtApiOss: device (%s) cannot be opened.", name);
  744. goto error;
  745. }
  746. // Get the sample format mask
  747. int mask;
  748. if (ioctl(fd, SNDCTL_DSP_GETFMTS, &mask) == -1) {
  749. close(fd);
  750. sprintf(message_, "RtApiOss: device (%s) can't get supported audio formats.",
  751. name);
  752. goto error;
  753. }
  754. // Determine how to set the device format.
  755. stream_.userFormat = format;
  756. device_format = -1;
  757. stream_.doByteSwap[mode] = false;
  758. if (format == RTAUDIO_SINT8) {
  759. if (mask & AFMT_S8) {
  760. device_format = AFMT_S8;
  761. stream_.deviceFormat[mode] = RTAUDIO_SINT8;
  762. }
  763. }
  764. else if (format == RTAUDIO_SINT16) {
  765. if (mask & AFMT_S16_NE) {
  766. device_format = AFMT_S16_NE;
  767. stream_.deviceFormat[mode] = RTAUDIO_SINT16;
  768. }
  769. #if BYTE_ORDER == LITTLE_ENDIAN
  770. else if (mask & AFMT_S16_BE) {
  771. device_format = AFMT_S16_BE;
  772. stream_.deviceFormat[mode] = RTAUDIO_SINT16;
  773. stream_.doByteSwap[mode] = true;
  774. }
  775. #else
  776. else if (mask & AFMT_S16_LE) {
  777. device_format = AFMT_S16_LE;
  778. stream_.deviceFormat[mode] = RTAUDIO_SINT16;
  779. stream_.doByteSwap[mode] = true;
  780. }
  781. #endif
  782. }
  783. #if defined (AFMT_S32_NE) && defined (AFMT_S32_LE) && defined (AFMT_S32_BE)
  784. else if (format == RTAUDIO_SINT32) {
  785. if (mask & AFMT_S32_NE) {
  786. device_format = AFMT_S32_NE;
  787. stream_.deviceFormat[mode] = RTAUDIO_SINT32;
  788. }
  789. #if BYTE_ORDER == LITTLE_ENDIAN
  790. else if (mask & AFMT_S32_BE) {
  791. device_format = AFMT_S32_BE;
  792. stream_.deviceFormat[mode] = RTAUDIO_SINT32;
  793. stream_.doByteSwap[mode] = true;
  794. }
  795. #else
  796. else if (mask & AFMT_S32_LE) {
  797. device_format = AFMT_S32_LE;
  798. stream_.deviceFormat[mode] = RTAUDIO_SINT32;
  799. stream_.doByteSwap[mode] = true;
  800. }
  801. #endif
  802. }
  803. #endif
  804. if (device_format == -1) {
  805. // The user requested format is not natively supported by the device.
  806. if (mask & AFMT_S16_NE) {
  807. device_format = AFMT_S16_NE;
  808. stream_.deviceFormat[mode] = RTAUDIO_SINT16;
  809. }
  810. #if BYTE_ORDER == LITTLE_ENDIAN
  811. else if (mask & AFMT_S16_BE) {
  812. device_format = AFMT_S16_BE;
  813. stream_.deviceFormat[mode] = RTAUDIO_SINT16;
  814. stream_.doByteSwap[mode] = true;
  815. }
  816. #else
  817. else if (mask & AFMT_S16_LE) {
  818. device_format = AFMT_S16_LE;
  819. stream_.deviceFormat[mode] = RTAUDIO_SINT16;
  820. stream_.doByteSwap[mode] = true;
  821. }
  822. #endif
  823. #if defined (AFMT_S32_NE) && defined (AFMT_S32_LE) && defined (AFMT_S32_BE)
  824. else if (mask & AFMT_S32_NE) {
  825. device_format = AFMT_S32_NE;
  826. stream_.deviceFormat[mode] = RTAUDIO_SINT32;
  827. }
  828. #if BYTE_ORDER == LITTLE_ENDIAN
  829. else if (mask & AFMT_S32_BE) {
  830. device_format = AFMT_S32_BE;
  831. stream_.deviceFormat[mode] = RTAUDIO_SINT32;
  832. stream_.doByteSwap[mode] = true;
  833. }
  834. #else
  835. else if (mask & AFMT_S32_LE) {
  836. device_format = AFMT_S32_LE;
  837. stream_.deviceFormat[mode] = RTAUDIO_SINT32;
  838. stream_.doByteSwap[mode] = true;
  839. }
  840. #endif
  841. #endif
  842. else if (mask & AFMT_S8) {
  843. device_format = AFMT_S8;
  844. stream_.deviceFormat[mode] = RTAUDIO_SINT8;
  845. }
  846. }
  847. if (stream_.deviceFormat[mode] == 0) {
  848. // This really shouldn't happen ...
  849. close(fd);
  850. sprintf(message_, "RtApiOss: device (%s) data format not supported by RtAudio.",
  851. name);
  852. goto error;
  853. }
  854. // Determine the number of channels for this device. Note that the
  855. // channel value requested by the user might be < min_X_Channels.
  856. stream_.nUserChannels[mode] = channels;
  857. device_channels = channels;
  858. if (mode == OUTPUT) {
  859. if (channels < devices_[device].minOutputChannels)
  860. device_channels = devices_[device].minOutputChannels;
  861. }
  862. else { // mode == INPUT
  863. if (stream_.mode == OUTPUT && stream_.device[0] == device) {
  864. // We're doing duplex setup here.
  865. if (channels < devices_[device].minDuplexChannels)
  866. device_channels = devices_[device].minDuplexChannels;
  867. }
  868. else {
  869. if (channels < devices_[device].minInputChannels)
  870. device_channels = devices_[device].minInputChannels;
  871. }
  872. }
  873. stream_.nDeviceChannels[mode] = device_channels;
  874. // Attempt to set the buffer size. According to OSS, the minimum
  875. // number of buffers is two. The supposed minimum buffer size is 16
  876. // bytes, so that will be our lower bound. The argument to this
  877. // call is in the form 0xMMMMSSSS (hex), where the buffer size (in
  878. // bytes) is given as 2^SSSS and the number of buffers as 2^MMMM.
  879. // We'll check the actual value used near the end of the setup
  880. // procedure.
  881. buffer_bytes = *bufferSize * formatBytes(stream_.deviceFormat[mode]) * device_channels;
  882. if (buffer_bytes < 16) buffer_bytes = 16;
  883. buffers = numberOfBuffers;
  884. if (buffers < 2) buffers = 2;
  885. temp = ((int) buffers << 16) + (int)(log10((double)buffer_bytes)/log10(2.0));
  886. if (ioctl(fd, SNDCTL_DSP_SETFRAGMENT, &temp)) {
  887. close(fd);
  888. sprintf(message_, "RtApiOss: error setting fragment size for device (%s).",
  889. name);
  890. goto error;
  891. }
  892. stream_.nBuffers = buffers;
  893. // Set the data format.
  894. temp = device_format;
  895. if (ioctl(fd, SNDCTL_DSP_SETFMT, &device_format) == -1 || device_format != temp) {
  896. close(fd);
  897. sprintf(message_, "RtApiOss: error setting data format for device (%s).",
  898. name);
  899. goto error;
  900. }
  901. // Set the number of channels.
  902. temp = device_channels;
  903. if (ioctl(fd, SNDCTL_DSP_CHANNELS, &device_channels) == -1 || device_channels != temp) {
  904. close(fd);
  905. sprintf(message_, "RtApiOss: error setting %d channels on device (%s).",
  906. temp, name);
  907. goto error;
  908. }
  909. // Set the sample rate.
  910. srate = sampleRate;
  911. temp = srate;
  912. if (ioctl(fd, SNDCTL_DSP_SPEED, &srate) == -1) {
  913. close(fd);
  914. sprintf(message_, "RtApiOss: error setting sample rate = %d on device (%s).",
  915. temp, name);
  916. goto error;
  917. }
  918. // Verify the sample rate setup worked.
  919. if (abs(srate - temp) > 100) {
  920. close(fd);
  921. sprintf(message_, "RtApiOss: error ... audio device (%s) doesn't support sample rate of %d.",
  922. name, temp);
  923. goto error;
  924. }
  925. stream_.sampleRate = sampleRate;
  926. if (ioctl(fd, SNDCTL_DSP_GETBLKSIZE, &buffer_bytes) == -1) {
  927. close(fd);
  928. sprintf(message_, "RtApiOss: error getting buffer size for device (%s).",
  929. name);
  930. goto error;
  931. }
  932. // Save buffer size (in sample frames).
  933. *bufferSize = buffer_bytes / (formatBytes(stream_.deviceFormat[mode]) * device_channels);
  934. stream_.bufferSize = *bufferSize;
  935. if (mode == INPUT && stream_.mode == OUTPUT &&
  936. stream_.device[0] == device) {
  937. // We're doing duplex setup here.
  938. stream_.deviceFormat[0] = stream_.deviceFormat[1];
  939. stream_.nDeviceChannels[0] = device_channels;
  940. }
  941. // Allocate the stream handles if necessary and then save.
  942. if ( stream_.apiHandle == 0 ) {
  943. handle = (int *) calloc(2, sizeof(int));
  944. stream_.apiHandle = (void *) handle;
  945. handle[0] = 0;
  946. handle[1] = 0;
  947. }
  948. else {
  949. handle = (int *) stream_.apiHandle;
  950. }
  951. handle[mode] = fd;
  952. // Set flags for buffer conversion
  953. stream_.doConvertBuffer[mode] = false;
  954. if (stream_.userFormat != stream_.deviceFormat[mode])
  955. stream_.doConvertBuffer[mode] = true;
  956. if (stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode])
  957. stream_.doConvertBuffer[mode] = true;
  958. // Allocate necessary internal buffers
  959. if ( stream_.nUserChannels[0] != stream_.nUserChannels[1] ) {
  960. long buffer_bytes;
  961. if (stream_.nUserChannels[0] >= stream_.nUserChannels[1])
  962. buffer_bytes = stream_.nUserChannels[0];
  963. else
  964. buffer_bytes = stream_.nUserChannels[1];
  965. buffer_bytes *= *bufferSize * formatBytes(stream_.userFormat);
  966. if (stream_.userBuffer) free(stream_.userBuffer);
  967. stream_.userBuffer = (char *) calloc(buffer_bytes, 1);
  968. if (stream_.userBuffer == NULL) {
  969. close(fd);
  970. sprintf(message_, "RtApiOss: error allocating user buffer memory (%s).",
  971. name);
  972. goto error;
  973. }
  974. }
  975. if ( stream_.doConvertBuffer[mode] ) {
  976. long buffer_bytes;
  977. bool makeBuffer = true;
  978. if ( mode == OUTPUT )
  979. buffer_bytes = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
  980. else { // mode == INPUT
  981. buffer_bytes = stream_.nDeviceChannels[1] * formatBytes(stream_.deviceFormat[1]);
  982. if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
  983. long bytes_out = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
  984. if ( buffer_bytes < bytes_out ) makeBuffer = false;
  985. }
  986. }
  987. if ( makeBuffer ) {
  988. buffer_bytes *= *bufferSize;
  989. if (stream_.deviceBuffer) free(stream_.deviceBuffer);
  990. stream_.deviceBuffer = (char *) calloc(buffer_bytes, 1);
  991. if (stream_.deviceBuffer == NULL) {
  992. close(fd);
  993. sprintf(message_, "RtApiOss: error allocating device buffer memory (%s).",
  994. name);
  995. goto error;
  996. }
  997. }
  998. }
  999. stream_.device[mode] = device;
  1000. stream_.state = STREAM_STOPPED;
  1001. if ( stream_.mode == OUTPUT && mode == INPUT ) {
  1002. stream_.mode = DUPLEX;
  1003. if (stream_.device[0] == device)
  1004. handle[0] = fd;
  1005. }
  1006. else
  1007. stream_.mode = mode;
  1008. return SUCCESS;
  1009. error:
  1010. if (handle) {
  1011. if (handle[0])
  1012. close(handle[0]);
  1013. free(handle);
  1014. stream_.apiHandle = 0;
  1015. }
  1016. if (stream_.userBuffer) {
  1017. free(stream_.userBuffer);
  1018. stream_.userBuffer = 0;
  1019. }
  1020. error(RtError::WARNING);
  1021. return FAILURE;
  1022. }
  1023. void RtApiOss :: closeStream()
  1024. {
  1025. // We don't want an exception to be thrown here because this
  1026. // function is called by our class destructor. So, do our own
  1027. // stream check.
  1028. if ( stream_.mode == UNINITIALIZED ) {
  1029. sprintf(message_, "RtApiOss::closeStream(): no open stream to close!");
  1030. error(RtError::WARNING);
  1031. return;
  1032. }
  1033. int *handle = (int *) stream_.apiHandle;
  1034. if (stream_.state == STREAM_RUNNING) {
  1035. if (stream_.mode == OUTPUT || stream_.mode == DUPLEX)
  1036. ioctl(handle[0], SNDCTL_DSP_RESET, 0);
  1037. else
  1038. ioctl(handle[1], SNDCTL_DSP_RESET, 0);
  1039. stream_.state = STREAM_STOPPED;
  1040. }
  1041. if (stream_.callbackInfo.usingCallback) {
  1042. stream_.callbackInfo.usingCallback = false;
  1043. pthread_join(stream_.callbackInfo.thread, NULL);
  1044. }
  1045. if (handle) {
  1046. if (handle[0]) close(handle[0]);
  1047. if (handle[1]) close(handle[1]);
  1048. free(handle);
  1049. stream_.apiHandle = 0;
  1050. }
  1051. if (stream_.userBuffer) {
  1052. free(stream_.userBuffer);
  1053. stream_.userBuffer = 0;
  1054. }
  1055. if (stream_.deviceBuffer) {
  1056. free(stream_.deviceBuffer);
  1057. stream_.deviceBuffer = 0;
  1058. }
  1059. stream_.mode = UNINITIALIZED;
  1060. }
  1061. void RtApiOss :: startStream()
  1062. {
  1063. verifyStream();
  1064. if (stream_.state == STREAM_RUNNING) return;
  1065. MUTEX_LOCK(&stream_.mutex);
  1066. stream_.state = STREAM_RUNNING;
  1067. // No need to do anything else here ... OSS automatically starts
  1068. // when fed samples.
  1069. MUTEX_UNLOCK(&stream_.mutex);
  1070. }
  1071. void RtApiOss :: stopStream()
  1072. {
  1073. verifyStream();
  1074. if (stream_.state == STREAM_STOPPED) return;
  1075. // Change the state before the lock to improve shutdown response
  1076. // when using a callback.
  1077. stream_.state = STREAM_STOPPED;
  1078. MUTEX_LOCK(&stream_.mutex);
  1079. int err;
  1080. int *handle = (int *) stream_.apiHandle;
  1081. if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
  1082. err = ioctl(handle[0], SNDCTL_DSP_POST, 0);
  1083. //err = ioctl(handle[0], SNDCTL_DSP_SYNC, 0);
  1084. if (err < -1) {
  1085. sprintf(message_, "RtApiOss: error stopping device (%s).",
  1086. devices_[stream_.device[0]].name.c_str());
  1087. error(RtError::DRIVER_ERROR);
  1088. }
  1089. }
  1090. else {
  1091. err = ioctl(handle[1], SNDCTL_DSP_POST, 0);
  1092. //err = ioctl(handle[1], SNDCTL_DSP_SYNC, 0);
  1093. if (err < -1) {
  1094. sprintf(message_, "RtApiOss: error stopping device (%s).",
  1095. devices_[stream_.device[1]].name.c_str());
  1096. error(RtError::DRIVER_ERROR);
  1097. }
  1098. }
  1099. MUTEX_UNLOCK(&stream_.mutex);
  1100. }
  1101. void RtApiOss :: abortStream()
  1102. {
  1103. stopStream();
  1104. }
  1105. int RtApiOss :: streamWillBlock()
  1106. {
  1107. verifyStream();
  1108. if (stream_.state == STREAM_STOPPED) return 0;
  1109. MUTEX_LOCK(&stream_.mutex);
  1110. int bytes = 0, channels = 0, frames = 0;
  1111. audio_buf_info info;
  1112. int *handle = (int *) stream_.apiHandle;
  1113. if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
  1114. ioctl(handle[0], SNDCTL_DSP_GETOSPACE, &info);
  1115. bytes = info.bytes;
  1116. channels = stream_.nDeviceChannels[0];
  1117. }
  1118. if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
  1119. ioctl(handle[1], SNDCTL_DSP_GETISPACE, &info);
  1120. if (stream_.mode == DUPLEX ) {
  1121. bytes = (bytes < info.bytes) ? bytes : info.bytes;
  1122. channels = stream_.nDeviceChannels[0];
  1123. }
  1124. else {
  1125. bytes = info.bytes;
  1126. channels = stream_.nDeviceChannels[1];
  1127. }
  1128. }
  1129. frames = (int) (bytes / (channels * formatBytes(stream_.deviceFormat[0])));
  1130. frames -= stream_.bufferSize;
  1131. if (frames < 0) frames = 0;
  1132. MUTEX_UNLOCK(&stream_.mutex);
  1133. return frames;
  1134. }
  1135. void RtApiOss :: tickStream()
  1136. {
  1137. verifyStream();
  1138. int stopStream = 0;
  1139. if (stream_.state == STREAM_STOPPED) {
  1140. if (stream_.callbackInfo.usingCallback) usleep(50000); // sleep 50 milliseconds
  1141. return;
  1142. }
  1143. else if (stream_.callbackInfo.usingCallback) {
  1144. RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;
  1145. stopStream = callback(stream_.userBuffer, stream_.bufferSize, stream_.callbackInfo.userData);
  1146. }
  1147. MUTEX_LOCK(&stream_.mutex);
  1148. // The state might change while waiting on a mutex.
  1149. if (stream_.state == STREAM_STOPPED)
  1150. goto unlock;
  1151. int result, *handle;
  1152. char *buffer;
  1153. int samples;
  1154. RtAudioFormat format;
  1155. handle = (int *) stream_.apiHandle;
  1156. if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
  1157. // Setup parameters and do buffer conversion if necessary.
  1158. if (stream_.doConvertBuffer[0]) {
  1159. convertStreamBuffer(OUTPUT);
  1160. buffer = stream_.deviceBuffer;
  1161. samples = stream_.bufferSize * stream_.nDeviceChannels[0];
  1162. format = stream_.deviceFormat[0];
  1163. }
  1164. else {
  1165. buffer = stream_.userBuffer;
  1166. samples = stream_.bufferSize * stream_.nUserChannels[0];
  1167. format = stream_.userFormat;
  1168. }
  1169. // Do byte swapping if necessary.
  1170. if (stream_.doByteSwap[0])
  1171. byteSwapBuffer(buffer, samples, format);
  1172. // Write samples to device.
  1173. result = write(handle[0], buffer, samples * formatBytes(format));
  1174. if (result == -1) {
  1175. // This could be an underrun, but the basic OSS API doesn't provide a means for determining that.
  1176. sprintf(message_, "RtApiOss: audio write error for device (%s).",
  1177. devices_[stream_.device[0]].name.c_str());
  1178. error(RtError::DRIVER_ERROR);
  1179. }
  1180. }
  1181. if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
  1182. // Setup parameters.
  1183. if (stream_.doConvertBuffer[1]) {
  1184. buffer = stream_.deviceBuffer;
  1185. samples = stream_.bufferSize * stream_.nDeviceChannels[1];
  1186. format = stream_.deviceFormat[1];
  1187. }
  1188. else {
  1189. buffer = stream_.userBuffer;
  1190. samples = stream_.bufferSize * stream_.nUserChannels[1];
  1191. format = stream_.userFormat;
  1192. }
  1193. // Read samples from device.
  1194. result = read(handle[1], buffer, samples * formatBytes(format));
  1195. if (result == -1) {
  1196. // This could be an overrun, but the basic OSS API doesn't provide a means for determining that.
  1197. sprintf(message_, "RtApiOss: audio read error for device (%s).",
  1198. devices_[stream_.device[1]].name.c_str());
  1199. error(RtError::DRIVER_ERROR);
  1200. }
  1201. // Do byte swapping if necessary.
  1202. if (stream_.doByteSwap[1])
  1203. byteSwapBuffer(buffer, samples, format);
  1204. // Do buffer conversion if necessary.
  1205. if (stream_.doConvertBuffer[1])
  1206. convertStreamBuffer(INPUT);
  1207. }
  1208. unlock:
  1209. MUTEX_UNLOCK(&stream_.mutex);
  1210. if (stream_.callbackInfo.usingCallback && stopStream)
  1211. this->stopStream();
  1212. }
  1213. void RtApiOss :: setStreamCallback(RtAudioCallback callback, void *userData)
  1214. {
  1215. verifyStream();
  1216. CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
  1217. if ( info->usingCallback ) {
  1218. sprintf(message_, "RtApiOss: A callback is already set for this stream!");
  1219. error(RtError::WARNING);
  1220. return;
  1221. }
  1222. info->callback = (void *) callback;
  1223. info->userData = userData;
  1224. info->usingCallback = true;
  1225. info->object = (void *) this;
  1226. // Set the thread attributes for joinable and realtime scheduling
  1227. // priority. The higher priority will only take affect if the
  1228. // program is run as root or suid.
  1229. pthread_attr_t attr;
  1230. pthread_attr_init(&attr);
  1231. pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
  1232. pthread_attr_setschedpolicy(&attr, SCHED_RR);
  1233. int err = pthread_create(&(info->thread), &attr, ossCallbackHandler, &stream_.callbackInfo);
  1234. pthread_attr_destroy(&attr);
  1235. if (err) {
  1236. info->usingCallback = false;
  1237. sprintf(message_, "RtApiOss: error starting callback thread!");
  1238. error(RtError::THREAD_ERROR);
  1239. }
  1240. }
  1241. void RtApiOss :: cancelStreamCallback()
  1242. {
  1243. verifyStream();
  1244. if (stream_.callbackInfo.usingCallback) {
  1245. if (stream_.state == STREAM_RUNNING)
  1246. stopStream();
  1247. MUTEX_LOCK(&stream_.mutex);
  1248. stream_.callbackInfo.usingCallback = false;
  1249. pthread_join(stream_.callbackInfo.thread, NULL);
  1250. stream_.callbackInfo.thread = 0;
  1251. stream_.callbackInfo.callback = NULL;
  1252. stream_.callbackInfo.userData = NULL;
  1253. MUTEX_UNLOCK(&stream_.mutex);
  1254. }
  1255. }
  1256. extern "C" void *ossCallbackHandler(void *ptr)
  1257. {
  1258. CallbackInfo *info = (CallbackInfo *) ptr;
  1259. RtApiOss *object = (RtApiOss *) info->object;
  1260. bool *usingCallback = &info->usingCallback;
  1261. while ( *usingCallback ) {
  1262. pthread_testcancel();
  1263. try {
  1264. object->tickStream();
  1265. }
  1266. catch (RtError &exception) {
  1267. fprintf(stderr, "\nRtApiOss: callback thread error (%s) ... closing thread.\n\n",
  1268. exception.getMessageString());
  1269. break;
  1270. }
  1271. }
  1272. return 0;
  1273. }
  1274. //******************** End of __LINUX_OSS__ *********************//
  1275. #endif
  1276. #if defined(__MACOSX_CORE__)
  1277. // The OS X CoreAudio API is designed to use a separate callback
  1278. // procedure for each of its audio devices. A single RtAudio duplex
  1279. // stream using two different devices is supported here, though it
  1280. // cannot be guaranteed to always behave correctly because we cannot
  1281. // synchronize these two callbacks. This same functionality can be
  1282. // achieved with better synchrony by opening two separate streams for
  1283. // the devices and using RtAudio blocking calls (i.e. tickStream()).
  1284. //
  1285. // A property listener is installed for over/underrun information.
  1286. // However, no functionality is currently provided to allow property
  1287. // listeners to trigger user handlers because it is unclear what could
  1288. // be done if a critical stream parameter (buffer size, sample rate,
  1289. // device disconnect) notification arrived. The listeners entail
  1290. // quite a bit of extra code and most likely, a user program wouldn't
  1291. // be prepared for the result anyway.
  1292. // A structure to hold various information related to the CoreAuio API
  1293. // implementation.
  1294. struct CoreHandle {
  1295. UInt32 index[2];
  1296. bool stopStream;
  1297. bool xrun;
  1298. char *deviceBuffer;
  1299. pthread_cond_t condition;
  1300. CoreHandle()
  1301. :stopStream(false), xrun(false), deviceBuffer(0) {}
  1302. };
  1303. RtApiCore :: RtApiCore()
  1304. {
  1305. this->initialize();
  1306. if (nDevices_ <= 0) {
  1307. sprintf(message_, "RtApiCore: no Macintosh OS-X Core Audio devices found!");
  1308. error(RtError::NO_DEVICES_FOUND);
  1309. }
  1310. }
  1311. RtApiCore :: ~RtApiCore()
  1312. {
  1313. // The subclass destructor gets called before the base class
  1314. // destructor, so close an existing stream before deallocating
  1315. // apiDeviceId memory.
  1316. if ( stream_.mode != UNINITIALIZED ) closeStream();
  1317. // Free our allocated apiDeviceId memory.
  1318. AudioDeviceID *id;
  1319. for ( unsigned int i=0; i<devices_.size(); i++ ) {
  1320. id = (AudioDeviceID *) devices_[i].apiDeviceId;
  1321. if (id) free(id);
  1322. }
  1323. }
  1324. void RtApiCore :: initialize(void)
  1325. {
  1326. OSStatus err = noErr;
  1327. UInt32 dataSize;
  1328. AudioDeviceID *deviceList = NULL;
  1329. nDevices_ = 0;
  1330. // Find out how many audio devices there are, if any.
  1331. err = AudioHardwareGetPropertyInfo(kAudioHardwarePropertyDevices, &dataSize, NULL);
  1332. if (err != noErr) {
  1333. sprintf(message_, "RtApiCore: OS-X error getting device info!");
  1334. error(RtError::SYSTEM_ERROR);
  1335. }
  1336. nDevices_ = dataSize / sizeof(AudioDeviceID);
  1337. if (nDevices_ == 0) return;
  1338. // Make space for the devices we are about to get.
  1339. deviceList = (AudioDeviceID *) malloc( dataSize );
  1340. if (deviceList == NULL) {
  1341. sprintf(message_, "RtApiCore: memory allocation error during initialization!");
  1342. error(RtError::MEMORY_ERROR);
  1343. }
  1344. // Get the array of AudioDeviceIDs.
  1345. err = AudioHardwareGetProperty(kAudioHardwarePropertyDevices, &dataSize, (void *) deviceList);
  1346. if (err != noErr) {
  1347. free(deviceList);
  1348. sprintf(message_, "RtApiCore: OS-X error getting device properties!");
  1349. error(RtError::SYSTEM_ERROR);
  1350. }
  1351. // Create list of device structures and write device identifiers.
  1352. RtApiDevice device;
  1353. AudioDeviceID *id;
  1354. for (int i=0; i<nDevices_; i++) {
  1355. devices_.push_back(device);
  1356. id = (AudioDeviceID *) malloc( sizeof(AudioDeviceID) );
  1357. *id = deviceList[i];
  1358. devices_[i].apiDeviceId = (void *) id;
  1359. }
  1360. free(deviceList);
  1361. }
  1362. int RtApiCore :: getDefaultInputDevice(void)
  1363. {
  1364. AudioDeviceID id, *deviceId;
  1365. UInt32 dataSize = sizeof( AudioDeviceID );
  1366. OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDefaultInputDevice,
  1367. &dataSize, &id );
  1368. if (result != noErr) {
  1369. sprintf( message_, "RtApiCore: OS-X error getting default input device." );
  1370. error(RtError::WARNING);
  1371. return 0;
  1372. }
  1373. for ( int i=0; i<nDevices_; i++ ) {
  1374. deviceId = (AudioDeviceID *) devices_[i].apiDeviceId;
  1375. if ( id == *deviceId ) return i;
  1376. }
  1377. return 0;
  1378. }
  1379. int RtApiCore :: getDefaultOutputDevice(void)
  1380. {
  1381. AudioDeviceID id, *deviceId;
  1382. UInt32 dataSize = sizeof( AudioDeviceID );
  1383. OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDefaultOutputDevice,
  1384. &dataSize, &id );
  1385. if (result != noErr) {
  1386. sprintf( message_, "RtApiCore: OS-X error getting default output device." );
  1387. error(RtError::WARNING);
  1388. return 0;
  1389. }
  1390. for ( int i=0; i<nDevices_; i++ ) {
  1391. deviceId = (AudioDeviceID *) devices_[i].apiDeviceId;
  1392. if ( id == *deviceId ) return i;
  1393. }
  1394. return 0;
  1395. }
  1396. static bool deviceSupportsFormat( AudioDeviceID id, bool isInput,
  1397. AudioStreamBasicDescription *desc, bool isDuplex )
  1398. {
  1399. OSStatus result = noErr;
  1400. UInt32 dataSize = sizeof( AudioStreamBasicDescription );
  1401. result = AudioDeviceGetProperty( id, 0, isInput,
  1402. kAudioDevicePropertyStreamFormatSupported,
  1403. &dataSize, desc );
  1404. if (result == kAudioHardwareNoError) {
  1405. if ( isDuplex ) {
  1406. result = AudioDeviceGetProperty( id, 0, true,
  1407. kAudioDevicePropertyStreamFormatSupported,
  1408. &dataSize, desc );
  1409. if (result != kAudioHardwareNoError)
  1410. return false;
  1411. }
  1412. return true;
  1413. }
  1414. return false;
  1415. }
  1416. void RtApiCore :: probeDeviceInfo( RtApiDevice *info )
  1417. {
  1418. OSStatus err = noErr;
  1419. // Get the device manufacturer and name.
  1420. char name[256];
  1421. char fullname[512];
  1422. UInt32 dataSize = 256;
  1423. AudioDeviceID *id = (AudioDeviceID *) info->apiDeviceId;
  1424. err = AudioDeviceGetProperty( *id, 0, false,
  1425. kAudioDevicePropertyDeviceManufacturer,
  1426. &dataSize, name );
  1427. if (err != noErr) {
  1428. sprintf( message_, "RtApiCore: OS-X error getting device manufacturer." );
  1429. error(RtError::DEBUG_WARNING);
  1430. return;
  1431. }
  1432. strncpy(fullname, name, 256);
  1433. strcat(fullname, ": " );
  1434. dataSize = 256;
  1435. err = AudioDeviceGetProperty( *id, 0, false,
  1436. kAudioDevicePropertyDeviceName,
  1437. &dataSize, name );
  1438. if (err != noErr) {
  1439. sprintf( message_, "RtApiCore: OS-X error getting device name." );
  1440. error(RtError::DEBUG_WARNING);
  1441. return;
  1442. }
  1443. strncat(fullname, name, 254);
  1444. info->name.erase();
  1445. info->name.append( (const char *)fullname, strlen(fullname)+1);
  1446. // Get output channel information.
  1447. unsigned int i, minChannels = 0, maxChannels = 0, nStreams = 0;
  1448. AudioBufferList *bufferList = nil;
  1449. err = AudioDeviceGetPropertyInfo( *id, 0, false,
  1450. kAudioDevicePropertyStreamConfiguration,
  1451. &dataSize, NULL );
  1452. if (err == noErr && dataSize > 0) {
  1453. bufferList = (AudioBufferList *) malloc( dataSize );
  1454. if (bufferList == NULL) {
  1455. sprintf(message_, "RtApiCore: memory allocation error!");
  1456. error(RtError::DEBUG_WARNING);
  1457. return;
  1458. }
  1459. err = AudioDeviceGetProperty( *id, 0, false,
  1460. kAudioDevicePropertyStreamConfiguration,
  1461. &dataSize, bufferList );
  1462. if (err == noErr) {
  1463. maxChannels = 0;
  1464. minChannels = 1000;
  1465. nStreams = bufferList->mNumberBuffers;
  1466. for ( i=0; i<nStreams; i++ ) {
  1467. maxChannels += bufferList->mBuffers[i].mNumberChannels;
  1468. if ( bufferList->mBuffers[i].mNumberChannels < minChannels )
  1469. minChannels = bufferList->mBuffers[i].mNumberChannels;
  1470. }
  1471. }
  1472. }
  1473. free (bufferList);
  1474. if (err != noErr || dataSize <= 0) {
  1475. sprintf( message_, "RtApiCore: OS-X error getting output channels for device (%s).",
  1476. info->name.c_str() );
  1477. error(RtError::DEBUG_WARNING);
  1478. return;
  1479. }
  1480. if ( nStreams ) {
  1481. if ( maxChannels > 0 )
  1482. info->maxOutputChannels = maxChannels;
  1483. if ( minChannels > 0 )
  1484. info->minOutputChannels = minChannels;
  1485. }
  1486. // Get input channel information.
  1487. bufferList = nil;
  1488. err = AudioDeviceGetPropertyInfo( *id, 0, true,
  1489. kAudioDevicePropertyStreamConfiguration,
  1490. &dataSize, NULL );
  1491. if (err == noErr && dataSize > 0) {
  1492. bufferList = (AudioBufferList *) malloc( dataSize );
  1493. if (bufferList == NULL) {
  1494. sprintf(message_, "RtApiCore: memory allocation error!");
  1495. error(RtError::DEBUG_WARNING);
  1496. return;
  1497. }
  1498. err = AudioDeviceGetProperty( *id, 0, true,
  1499. kAudioDevicePropertyStreamConfiguration,
  1500. &dataSize, bufferList );
  1501. if (err == noErr) {
  1502. maxChannels = 0;
  1503. minChannels = 1000;
  1504. nStreams = bufferList->mNumberBuffers;
  1505. for ( i=0; i<nStreams; i++ ) {
  1506. if ( bufferList->mBuffers[i].mNumberChannels < minChannels )
  1507. minChannels = bufferList->mBuffers[i].mNumberChannels;
  1508. maxChannels += bufferList->mBuffers[i].mNumberChannels;
  1509. }
  1510. }
  1511. }
  1512. free (bufferList);
  1513. if (err != noErr || dataSize <= 0) {
  1514. sprintf( message_, "RtApiCore: OS-X error getting input channels for device (%s).",
  1515. info->name.c_str() );
  1516. error(RtError::DEBUG_WARNING);
  1517. return;
  1518. }
  1519. if ( nStreams ) {
  1520. if ( maxChannels > 0 )
  1521. info->maxInputChannels = maxChannels;
  1522. if ( minChannels > 0 )
  1523. info->minInputChannels = minChannels;
  1524. }
  1525. // If device opens for both playback and capture, we determine the channels.
  1526. if (info->maxOutputChannels > 0 && info->maxInputChannels > 0) {
  1527. info->hasDuplexSupport = true;
  1528. info->maxDuplexChannels = (info->maxOutputChannels > info->maxInputChannels) ?
  1529. info->maxInputChannels : info->maxOutputChannels;
  1530. info->minDuplexChannels = (info->minOutputChannels > info->minInputChannels) ?
  1531. info->minInputChannels : info->minOutputChannels;
  1532. }
  1533. // Probe the device sample rate and data format parameters. The
  1534. // core audio query mechanism is performed on a "stream"
  1535. // description, which can have a variable number of channels and
  1536. // apply to input or output only.
  1537. // Create a stream description structure.
  1538. AudioStreamBasicDescription description;
  1539. dataSize = sizeof( AudioStreamBasicDescription );
  1540. memset(&description, 0, sizeof(AudioStreamBasicDescription));
  1541. bool isInput = false;
  1542. if ( info->maxOutputChannels == 0 ) isInput = true;
  1543. bool isDuplex = false;
  1544. if ( info->maxDuplexChannels > 0 ) isDuplex = true;
  1545. // Determine the supported sample rates.
  1546. info->sampleRates.clear();
  1547. for (unsigned int k=0; k<MAX_SAMPLE_RATES; k++) {
  1548. description.mSampleRate = (double) SAMPLE_RATES[k];
  1549. if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
  1550. info->sampleRates.push_back( SAMPLE_RATES[k] );
  1551. }
  1552. if (info->sampleRates.size() == 0) {
  1553. sprintf( message_, "RtApiCore: No supported sample rates found for OS-X device (%s).",
  1554. info->name.c_str() );
  1555. error(RtError::DEBUG_WARNING);
  1556. return;
  1557. }
  1558. // Determine the supported data formats.
  1559. info->nativeFormats = 0;
  1560. description.mFormatID = kAudioFormatLinearPCM;
  1561. description.mBitsPerChannel = 8;
  1562. description.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked | kLinearPCMFormatFlagIsBigEndian;
  1563. if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
  1564. info->nativeFormats |= RTAUDIO_SINT8;
  1565. else {
  1566. description.mFormatFlags &= ~kLinearPCMFormatFlagIsBigEndian;
  1567. if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
  1568. info->nativeFormats |= RTAUDIO_SINT8;
  1569. }
  1570. description.mBitsPerChannel = 16;
  1571. description.mFormatFlags |= kLinearPCMFormatFlagIsBigEndian;
  1572. if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
  1573. info->nativeFormats |= RTAUDIO_SINT16;
  1574. else {
  1575. description.mFormatFlags &= ~kLinearPCMFormatFlagIsBigEndian;
  1576. if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
  1577. info->nativeFormats |= RTAUDIO_SINT16;
  1578. }
  1579. description.mBitsPerChannel = 32;
  1580. description.mFormatFlags |= kLinearPCMFormatFlagIsBigEndian;
  1581. if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
  1582. info->nativeFormats |= RTAUDIO_SINT32;
  1583. else {
  1584. description.mFormatFlags &= ~kLinearPCMFormatFlagIsBigEndian;
  1585. if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
  1586. info->nativeFormats |= RTAUDIO_SINT32;
  1587. }
  1588. description.mBitsPerChannel = 24;
  1589. description.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsAlignedHigh | kLinearPCMFormatFlagIsBigEndian;
  1590. if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
  1591. info->nativeFormats |= RTAUDIO_SINT24;
  1592. else {
  1593. description.mFormatFlags &= ~kLinearPCMFormatFlagIsBigEndian;
  1594. if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
  1595. info->nativeFormats |= RTAUDIO_SINT24;
  1596. }
  1597. description.mBitsPerChannel = 32;
  1598. description.mFormatFlags = kLinearPCMFormatFlagIsFloat | kLinearPCMFormatFlagIsPacked | kLinearPCMFormatFlagIsBigEndian;
  1599. if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
  1600. info->nativeFormats |= RTAUDIO_FLOAT32;
  1601. else {
  1602. description.mFormatFlags &= ~kLinearPCMFormatFlagIsBigEndian;
  1603. if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
  1604. info->nativeFormats |= RTAUDIO_FLOAT32;
  1605. }
  1606. description.mBitsPerChannel = 64;
  1607. description.mFormatFlags |= kLinearPCMFormatFlagIsBigEndian;
  1608. if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
  1609. info->nativeFormats |= RTAUDIO_FLOAT64;
  1610. else {
  1611. description.mFormatFlags &= ~kLinearPCMFormatFlagIsBigEndian;
  1612. if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
  1613. info->nativeFormats |= RTAUDIO_FLOAT64;
  1614. }
  1615. // Check that we have at least one supported format.
  1616. if (info->nativeFormats == 0) {
  1617. sprintf(message_, "RtApiCore: OS-X device (%s) data format not supported by RtAudio.",
  1618. info->name.c_str());
  1619. error(RtError::DEBUG_WARNING);
  1620. return;
  1621. }
  1622. info->probed = true;
  1623. }
  1624. OSStatus callbackHandler(AudioDeviceID inDevice,
  1625. const AudioTimeStamp* inNow,
  1626. const AudioBufferList* inInputData,
  1627. const AudioTimeStamp* inInputTime,
  1628. AudioBufferList* outOutputData,
  1629. const AudioTimeStamp* inOutputTime,
  1630. void* infoPointer)
  1631. {
  1632. CallbackInfo *info = (CallbackInfo *) infoPointer;
  1633. RtApiCore *object = (RtApiCore *) info->object;
  1634. try {
  1635. object->callbackEvent( inDevice, (void *)inInputData, (void *)outOutputData );
  1636. }
  1637. catch (RtError &exception) {
  1638. fprintf(stderr, "\nRtApiCore: callback handler error (%s)!\n\n", exception.getMessageString());
  1639. return kAudioHardwareUnspecifiedError;
  1640. }
  1641. return kAudioHardwareNoError;
  1642. }
  1643. OSStatus deviceListener(AudioDeviceID inDevice,
  1644. UInt32 channel,
  1645. Boolean isInput,
  1646. AudioDevicePropertyID propertyID,
  1647. void* handlePointer)
  1648. {
  1649. CoreHandle *handle = (CoreHandle *) handlePointer;
  1650. if ( propertyID == kAudioDeviceProcessorOverload ) {
  1651. if ( isInput )
  1652. fprintf(stderr, "\nRtApiCore: OS-X audio input overrun detected!\n");
  1653. else
  1654. fprintf(stderr, "\nRtApiCore: OS-X audio output underrun detected!\n");
  1655. handle->xrun = true;
  1656. }
  1657. return kAudioHardwareNoError;
  1658. }
  1659. bool RtApiCore :: probeDeviceOpen( int device, StreamMode mode, int channels,
  1660. int sampleRate, RtAudioFormat format,
  1661. int *bufferSize, int numberOfBuffers )
  1662. {
  1663. // Setup for stream mode.
  1664. bool isInput = false;
  1665. AudioDeviceID id = *((AudioDeviceID *) devices_[device].apiDeviceId);
  1666. if ( mode == INPUT ) isInput = true;
  1667. // Search for a stream which contains the desired number of channels.
  1668. OSStatus err = noErr;
  1669. UInt32 dataSize;
  1670. unsigned int deviceChannels, nStreams = 0;
  1671. UInt32 iChannel = 0, iStream = 0;
  1672. AudioBufferList *bufferList = nil;
  1673. err = AudioDeviceGetPropertyInfo( id, 0, isInput,
  1674. kAudioDevicePropertyStreamConfiguration,
  1675. &dataSize, NULL );
  1676. if (err == noErr && dataSize > 0) {
  1677. bufferList = (AudioBufferList *) malloc( dataSize );
  1678. if (bufferList == NULL) {
  1679. sprintf(message_, "RtApiCore: memory allocation error in probeDeviceOpen()!");
  1680. error(RtError::DEBUG_WARNING);
  1681. return FAILURE;
  1682. }
  1683. err = AudioDeviceGetProperty( id, 0, isInput,
  1684. kAudioDevicePropertyStreamConfiguration,
  1685. &dataSize, bufferList );
  1686. if (err == noErr) {
  1687. stream_.deInterleave[mode] = false;
  1688. nStreams = bufferList->mNumberBuffers;
  1689. for ( iStream=0; iStream<nStreams; iStream++ ) {
  1690. if ( bufferList->mBuffers[iStream].mNumberChannels >= (unsigned int) channels ) break;
  1691. iChannel += bufferList->mBuffers[iStream].mNumberChannels;
  1692. }
  1693. // If we didn't find a single stream above, see if we can meet
  1694. // the channel specification in mono mode (i.e. using separate
  1695. // non-interleaved buffers). This can only work if there are N
  1696. // consecutive one-channel streams, where N is the number of
  1697. // desired channels.
  1698. iChannel = 0;
  1699. if ( iStream >= nStreams && nStreams >= (unsigned int) channels ) {
  1700. int counter = 0;
  1701. for ( iStream=0; iStream<nStreams; iStream++ ) {
  1702. if ( bufferList->mBuffers[iStream].mNumberChannels == 1 )
  1703. counter++;
  1704. else
  1705. counter = 0;
  1706. if ( counter == channels ) {
  1707. iStream -= channels - 1;
  1708. iChannel -= channels - 1;
  1709. stream_.deInterleave[mode] = true;
  1710. break;
  1711. }
  1712. iChannel += bufferList->mBuffers[iStream].mNumberChannels;
  1713. }
  1714. }
  1715. }
  1716. }
  1717. if (err != noErr || dataSize <= 0) {
  1718. if ( bufferList ) free( bufferList );
  1719. sprintf( message_, "RtApiCore: OS-X error getting channels for device (%s).",
  1720. devices_[device].name.c_str() );
  1721. error(RtError::DEBUG_WARNING);
  1722. return FAILURE;
  1723. }
  1724. if (iStream >= nStreams) {
  1725. free (bufferList);
  1726. sprintf( message_, "RtApiCore: unable to find OS-X audio stream on device (%s) for requested channels (%d).",
  1727. devices_[device].name.c_str(), channels );
  1728. error(RtError::DEBUG_WARNING);
  1729. return FAILURE;
  1730. }
  1731. // This is ok even for mono mode ... it gets updated later.
  1732. deviceChannels = bufferList->mBuffers[iStream].mNumberChannels;
  1733. free (bufferList);
  1734. // Determine the buffer size.
  1735. AudioValueRange bufferRange;
  1736. dataSize = sizeof(AudioValueRange);
  1737. err = AudioDeviceGetProperty( id, 0, isInput,
  1738. kAudioDevicePropertyBufferSizeRange,
  1739. &dataSize, &bufferRange);
  1740. if (err != noErr) {
  1741. sprintf( message_, "RtApiCore: OS-X error getting buffer size range for device (%s).",
  1742. devices_[device].name.c_str() );
  1743. error(RtError::DEBUG_WARNING);
  1744. return FAILURE;
  1745. }
  1746. long bufferBytes = *bufferSize * deviceChannels * formatBytes(RTAUDIO_FLOAT32);
  1747. if (bufferRange.mMinimum > bufferBytes) bufferBytes = (int) bufferRange.mMinimum;
  1748. else if (bufferRange.mMaximum < bufferBytes) bufferBytes = (int) bufferRange.mMaximum;
  1749. // Set the buffer size. For mono mode, I'm assuming we only need to
  1750. // make this setting for the first channel.
  1751. UInt32 theSize = (UInt32) bufferBytes;
  1752. dataSize = sizeof( UInt32);
  1753. err = AudioDeviceSetProperty(id, NULL, 0, isInput,
  1754. kAudioDevicePropertyBufferSize,
  1755. dataSize, &theSize);
  1756. if (err != noErr) {
  1757. sprintf( message_, "RtApiCore: OS-X error setting the buffer size for device (%s).",
  1758. devices_[device].name.c_str() );
  1759. error(RtError::DEBUG_WARNING);
  1760. return FAILURE;
  1761. }
  1762. // If attempting to setup a duplex stream, the bufferSize parameter
  1763. // MUST be the same in both directions!
  1764. *bufferSize = bufferBytes / ( deviceChannels * formatBytes(RTAUDIO_FLOAT32) );
  1765. if ( stream_.mode == OUTPUT && mode == INPUT && *bufferSize != stream_.bufferSize ) {
  1766. sprintf( message_, "RtApiCore: OS-X error setting buffer size for duplex stream on device (%s).",
  1767. devices_[device].name.c_str() );
  1768. error(RtError::DEBUG_WARNING);
  1769. return FAILURE;
  1770. }
  1771. stream_.bufferSize = *bufferSize;
  1772. stream_.nBuffers = 1;
  1773. // Set the stream format description. Do for each channel in mono mode.
  1774. AudioStreamBasicDescription description;
  1775. dataSize = sizeof( AudioStreamBasicDescription );
  1776. if ( stream_.deInterleave[mode] ) nStreams = channels;
  1777. else nStreams = 1;
  1778. for ( unsigned int i=0; i<nStreams; i++, iChannel++ ) {
  1779. err = AudioDeviceGetProperty( id, iChannel, isInput,
  1780. kAudioDevicePropertyStreamFormat,
  1781. &dataSize, &description );
  1782. if (err != noErr) {
  1783. sprintf( message_, "RtApiCore: OS-X error getting stream format for device (%s).",
  1784. devices_[device].name.c_str() );
  1785. error(RtError::DEBUG_WARNING);
  1786. return FAILURE;
  1787. }
  1788. // Set the sample rate and data format id.
  1789. description.mSampleRate = (double) sampleRate;
  1790. description.mFormatID = kAudioFormatLinearPCM;
  1791. err = AudioDeviceSetProperty( id, NULL, iChannel, isInput,
  1792. kAudioDevicePropertyStreamFormat,
  1793. dataSize, &description );
  1794. if (err != noErr) {
  1795. sprintf( message_, "RtApiCore: OS-X error setting sample rate or data format for device (%s).",
  1796. devices_[device].name.c_str() );
  1797. error(RtError::DEBUG_WARNING);
  1798. return FAILURE;
  1799. }
  1800. }
  1801. // Check whether we need byte-swapping (assuming OS-X host is big-endian).
  1802. iChannel -= nStreams;
  1803. err = AudioDeviceGetProperty( id, iChannel, isInput,
  1804. kAudioDevicePropertyStreamFormat,
  1805. &dataSize, &description );
  1806. if (err != noErr) {
  1807. sprintf( message_, "RtApiCore: OS-X error getting stream format for device (%s).", devices_[device].name.c_str() );
  1808. error(RtError::DEBUG_WARNING);
  1809. return FAILURE;
  1810. }
  1811. stream_.doByteSwap[mode] = false;
  1812. if ( !description.mFormatFlags & kLinearPCMFormatFlagIsBigEndian )
  1813. stream_.doByteSwap[mode] = true;
  1814. // From the CoreAudio documentation, PCM data must be supplied as
  1815. // 32-bit floats.
  1816. stream_.userFormat = format;
  1817. stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
  1818. if ( stream_.deInterleave[mode] ) // mono mode
  1819. stream_.nDeviceChannels[mode] = channels;
  1820. else
  1821. stream_.nDeviceChannels[mode] = description.mChannelsPerFrame;
  1822. stream_.nUserChannels[mode] = channels;
  1823. // Set flags for buffer conversion.
  1824. stream_.doConvertBuffer[mode] = false;
  1825. if (stream_.userFormat != stream_.deviceFormat[mode])
  1826. stream_.doConvertBuffer[mode] = true;
  1827. if (stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode])
  1828. stream_.doConvertBuffer[mode] = true;
  1829. if (stream_.nUserChannels[mode] > 1 && stream_.deInterleave[mode])
  1830. stream_.doConvertBuffer[mode] = true;
  1831. // Allocate our CoreHandle structure for the stream.
  1832. CoreHandle *handle;
  1833. if ( stream_.apiHandle == 0 ) {
  1834. handle = (CoreHandle *) calloc(1, sizeof(CoreHandle));
  1835. if ( handle == NULL ) {
  1836. sprintf(message_, "RtApiCore: OS-X error allocating coreHandle memory (%s).",
  1837. devices_[device].name.c_str());
  1838. goto error;
  1839. }
  1840. handle->index[0] = 0;
  1841. handle->index[1] = 0;
  1842. if ( pthread_cond_init(&handle->condition, NULL) ) {
  1843. sprintf(message_, "RtApiCore: error initializing pthread condition variable (%s).",
  1844. devices_[device].name.c_str());
  1845. goto error;
  1846. }
  1847. stream_.apiHandle = (void *) handle;
  1848. }
  1849. else
  1850. handle = (CoreHandle *) stream_.apiHandle;
  1851. handle->index[mode] = iStream;
  1852. // Allocate necessary internal buffers.
  1853. if ( stream_.nUserChannels[0] != stream_.nUserChannels[1] ) {
  1854. long buffer_bytes;
  1855. if (stream_.nUserChannels[0] >= stream_.nUserChannels[1])
  1856. buffer_bytes = stream_.nUserChannels[0];
  1857. else
  1858. buffer_bytes = stream_.nUserChannels[1];
  1859. buffer_bytes *= *bufferSize * formatBytes(stream_.userFormat);
  1860. if (stream_.userBuffer) free(stream_.userBuffer);
  1861. stream_.userBuffer = (char *) calloc(buffer_bytes, 1);
  1862. if (stream_.userBuffer == NULL) {
  1863. sprintf(message_, "RtApiCore: OS-X error allocating user buffer memory (%s).",
  1864. devices_[device].name.c_str());
  1865. goto error;
  1866. }
  1867. }
  1868. if ( stream_.deInterleave[mode] ) {
  1869. long buffer_bytes;
  1870. bool makeBuffer = true;
  1871. if ( mode == OUTPUT )
  1872. buffer_bytes = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
  1873. else { // mode == INPUT
  1874. buffer_bytes = stream_.nDeviceChannels[1] * formatBytes(stream_.deviceFormat[1]);
  1875. if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
  1876. long bytes_out = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
  1877. if ( buffer_bytes < bytes_out ) makeBuffer = false;
  1878. }
  1879. }
  1880. if ( makeBuffer ) {
  1881. buffer_bytes *= *bufferSize;
  1882. if (stream_.deviceBuffer) free(stream_.deviceBuffer);
  1883. stream_.deviceBuffer = (char *) calloc(buffer_bytes, 1);
  1884. if (stream_.deviceBuffer == NULL) {
  1885. sprintf(message_, "RtApiCore: error allocating device buffer memory (%s).",
  1886. devices_[device].name.c_str());
  1887. goto error;
  1888. }
  1889. // If not de-interleaving, we point stream_.deviceBuffer to the
  1890. // OS X supplied device buffer before doing any necessary data
  1891. // conversions. This presents a problem if we have a duplex
  1892. // stream using one device which needs de-interleaving and
  1893. // another device which doesn't. So, save a pointer to our own
  1894. // device buffer in the CallbackInfo structure.
  1895. handle->deviceBuffer = stream_.deviceBuffer;
  1896. }
  1897. }
  1898. stream_.sampleRate = sampleRate;
  1899. stream_.device[mode] = device;
  1900. stream_.state = STREAM_STOPPED;
  1901. stream_.callbackInfo.object = (void *) this;
  1902. if ( stream_.mode == OUTPUT && mode == INPUT && stream_.device[0] == device )
  1903. // Only one callback procedure per device.
  1904. stream_.mode = DUPLEX;
  1905. else {
  1906. err = AudioDeviceAddIOProc( id, callbackHandler, (void *) &stream_.callbackInfo );
  1907. if (err != noErr) {
  1908. sprintf( message_, "RtApiCore: OS-X error setting callback for device (%s).", devices_[device].name.c_str() );
  1909. error(RtError::DEBUG_WARNING);
  1910. return FAILURE;
  1911. }
  1912. if ( stream_.mode == OUTPUT && mode == INPUT )
  1913. stream_.mode = DUPLEX;
  1914. else
  1915. stream_.mode = mode;
  1916. }
  1917. // Setup the device property listener for over/underload.
  1918. err = AudioDeviceAddPropertyListener( id, iChannel, isInput,
  1919. kAudioDeviceProcessorOverload,
  1920. deviceListener, (void *) handle );
  1921. return SUCCESS;
  1922. error:
  1923. if ( handle ) {
  1924. pthread_cond_destroy(&handle->condition);
  1925. free(handle);
  1926. stream_.apiHandle = 0;
  1927. }
  1928. if (stream_.userBuffer) {
  1929. free(stream_.userBuffer);
  1930. stream_.userBuffer = 0;
  1931. }
  1932. error(RtError::WARNING);
  1933. return FAILURE;
  1934. }
  1935. void RtApiCore :: closeStream()
  1936. {
  1937. // We don't want an exception to be thrown here because this
  1938. // function is called by our class destructor. So, do our own
  1939. // stream check.
  1940. if ( stream_.mode == UNINITIALIZED ) {
  1941. sprintf(message_, "RtApiCore::closeStream(): no open stream to close!");
  1942. error(RtError::WARNING);
  1943. return;
  1944. }
  1945. AudioDeviceID id = *( (AudioDeviceID *) devices_[stream_.device[0]].apiDeviceId );
  1946. if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
  1947. if (stream_.state == STREAM_RUNNING)
  1948. AudioDeviceStop( id, callbackHandler );
  1949. AudioDeviceRemoveIOProc( id, callbackHandler );
  1950. }
  1951. id = *( (AudioDeviceID *) devices_[stream_.device[1]].apiDeviceId );
  1952. if (stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1]) ) {
  1953. if (stream_.state == STREAM_RUNNING)
  1954. AudioDeviceStop( id, callbackHandler );
  1955. AudioDeviceRemoveIOProc( id, callbackHandler );
  1956. }
  1957. if (stream_.userBuffer) {
  1958. free(stream_.userBuffer);
  1959. stream_.userBuffer = 0;
  1960. }
  1961. if ( stream_.deInterleave[0] || stream_.deInterleave[1] ) {
  1962. free(stream_.deviceBuffer);
  1963. stream_.deviceBuffer = 0;
  1964. }
  1965. CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
  1966. // Destroy pthread condition variable and free the CoreHandle structure.
  1967. if ( handle ) {
  1968. pthread_cond_destroy(&handle->condition);
  1969. free( handle );
  1970. stream_.apiHandle = 0;
  1971. }
  1972. stream_.mode = UNINITIALIZED;
  1973. }
  1974. void RtApiCore :: startStream()
  1975. {
  1976. verifyStream();
  1977. if (stream_.state == STREAM_RUNNING) return;
  1978. MUTEX_LOCK(&stream_.mutex);
  1979. OSStatus err;
  1980. AudioDeviceID id;
  1981. if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
  1982. id = *( (AudioDeviceID *) devices_[stream_.device[0]].apiDeviceId );
  1983. err = AudioDeviceStart(id, callbackHandler);
  1984. if (err != noErr) {
  1985. sprintf(message_, "RtApiCore: OS-X error starting callback procedure on device (%s).",
  1986. devices_[stream_.device[0]].name.c_str());
  1987. MUTEX_UNLOCK(&stream_.mutex);
  1988. error(RtError::DRIVER_ERROR);
  1989. }
  1990. }
  1991. if (stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1]) ) {
  1992. id = *( (AudioDeviceID *) devices_[stream_.device[1]].apiDeviceId );
  1993. err = AudioDeviceStart(id, callbackHandler);
  1994. if (err != noErr) {
  1995. sprintf(message_, "RtApiCore: OS-X error starting input callback procedure on device (%s).",
  1996. devices_[stream_.device[0]].name.c_str());
  1997. MUTEX_UNLOCK(&stream_.mutex);
  1998. error(RtError::DRIVER_ERROR);
  1999. }
  2000. }
  2001. CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
  2002. handle->stopStream = false;
  2003. stream_.state = STREAM_RUNNING;
  2004. MUTEX_UNLOCK(&stream_.mutex);
  2005. }
  2006. void RtApiCore :: stopStream()
  2007. {
  2008. verifyStream();
  2009. if (stream_.state == STREAM_STOPPED) return;
  2010. // Change the state before the lock to improve shutdown response
  2011. // when using a callback.
  2012. stream_.state = STREAM_STOPPED;
  2013. MUTEX_LOCK(&stream_.mutex);
  2014. OSStatus err;
  2015. AudioDeviceID id;
  2016. if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
  2017. id = *( (AudioDeviceID *) devices_[stream_.device[0]].apiDeviceId );
  2018. err = AudioDeviceStop(id, callbackHandler);
  2019. if (err != noErr) {
  2020. sprintf(message_, "RtApiCore: OS-X error stopping callback procedure on device (%s).",
  2021. devices_[stream_.device[0]].name.c_str());
  2022. MUTEX_UNLOCK(&stream_.mutex);
  2023. error(RtError::DRIVER_ERROR);
  2024. }
  2025. }
  2026. if (stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1]) ) {
  2027. id = *( (AudioDeviceID *) devices_[stream_.device[1]].apiDeviceId );
  2028. err = AudioDeviceStop(id, callbackHandler);
  2029. if (err != noErr) {
  2030. sprintf(message_, "RtApiCore: OS-X error stopping input callback procedure on device (%s).",
  2031. devices_[stream_.device[0]].name.c_str());
  2032. MUTEX_UNLOCK(&stream_.mutex);
  2033. error(RtError::DRIVER_ERROR);
  2034. }
  2035. }
  2036. MUTEX_UNLOCK(&stream_.mutex);
  2037. }
  2038. void RtApiCore :: abortStream()
  2039. {
  2040. stopStream();
  2041. }
  2042. void RtApiCore :: tickStream()
  2043. {
  2044. verifyStream();
  2045. if (stream_.state == STREAM_STOPPED) return;
  2046. if (stream_.callbackInfo.usingCallback) {
  2047. sprintf(message_, "RtApiCore: tickStream() should not be used when a callback function is set!");
  2048. error(RtError::WARNING);
  2049. return;
  2050. }
  2051. CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
  2052. MUTEX_LOCK(&stream_.mutex);
  2053. pthread_cond_wait(&handle->condition, &stream_.mutex);
  2054. MUTEX_UNLOCK(&stream_.mutex);
  2055. }
  2056. void RtApiCore :: callbackEvent( AudioDeviceID deviceId, void *inData, void *outData )
  2057. {
  2058. verifyStream();
  2059. if (stream_.state == STREAM_STOPPED) return;
  2060. CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
  2061. CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
  2062. AudioBufferList *inBufferList = (AudioBufferList *) inData;
  2063. AudioBufferList *outBufferList = (AudioBufferList *) outData;
  2064. if ( info->usingCallback && handle->stopStream ) {
  2065. // Check if the stream should be stopped (via the previous user
  2066. // callback return value). We stop the stream here, rather than
  2067. // after the function call, so that output data can first be
  2068. // processed.
  2069. this->stopStream();
  2070. return;
  2071. }
  2072. MUTEX_LOCK(&stream_.mutex);
  2073. // Invoke user callback first, to get fresh output data. Don't
  2074. // invoke the user callback if duplex mode AND the input/output devices
  2075. // are different AND this function is called for the input device.
  2076. AudioDeviceID id = *( (AudioDeviceID *) devices_[stream_.device[0]].apiDeviceId );
  2077. if ( info->usingCallback && (stream_.mode != DUPLEX || deviceId == id ) ) {
  2078. RtAudioCallback callback = (RtAudioCallback) info->callback;
  2079. handle->stopStream = callback(stream_.userBuffer, stream_.bufferSize, info->userData);
  2080. if ( handle->xrun == true ) {
  2081. handle->xrun = false;
  2082. MUTEX_UNLOCK(&stream_.mutex);
  2083. return;
  2084. }
  2085. }
  2086. if ( stream_.mode == OUTPUT || ( stream_.mode == DUPLEX && deviceId == id ) ) {
  2087. if (stream_.doConvertBuffer[0]) {
  2088. if ( !stream_.deInterleave[0] )
  2089. stream_.deviceBuffer = (char *) outBufferList->mBuffers[handle->index[0]].mData;
  2090. else
  2091. stream_.deviceBuffer = handle->deviceBuffer;
  2092. convertStreamBuffer(OUTPUT);
  2093. if ( stream_.doByteSwap[0] )
  2094. byteSwapBuffer(stream_.deviceBuffer,
  2095. stream_.bufferSize * stream_.nDeviceChannels[0],
  2096. stream_.deviceFormat[0]);
  2097. if ( stream_.deInterleave[0] ) {
  2098. int bufferBytes = outBufferList->mBuffers[handle->index[0]].mDataByteSize;
  2099. for ( int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
  2100. memcpy(outBufferList->mBuffers[handle->index[0]+i].mData,
  2101. &stream_.deviceBuffer[i*bufferBytes], bufferBytes );
  2102. }
  2103. }
  2104. }
  2105. else {
  2106. if (stream_.doByteSwap[0])
  2107. byteSwapBuffer(stream_.userBuffer,
  2108. stream_.bufferSize * stream_.nUserChannels[0],
  2109. stream_.userFormat);
  2110. memcpy(outBufferList->mBuffers[handle->index[0]].mData,
  2111. stream_.userBuffer,
  2112. outBufferList->mBuffers[handle->index[0]].mDataByteSize );
  2113. }
  2114. }
  2115. if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && deviceId == id ) ) {
  2116. if (stream_.doConvertBuffer[1]) {
  2117. if ( stream_.deInterleave[1] ) {
  2118. stream_.deviceBuffer = (char *) handle->deviceBuffer;
  2119. int bufferBytes = inBufferList->mBuffers[handle->index[1]].mDataByteSize;
  2120. for ( int i=0; i<stream_.nDeviceChannels[1]; i++ ) {
  2121. memcpy(&stream_.deviceBuffer[i*bufferBytes],
  2122. inBufferList->mBuffers[handle->index[1]+i].mData, bufferBytes );
  2123. }
  2124. }
  2125. else
  2126. stream_.deviceBuffer = (char *) inBufferList->mBuffers[handle->index[1]].mData;
  2127. if ( stream_.doByteSwap[1] )
  2128. byteSwapBuffer(stream_.deviceBuffer,
  2129. stream_.bufferSize * stream_.nDeviceChannels[1],
  2130. stream_.deviceFormat[1]);
  2131. convertStreamBuffer(INPUT);
  2132. }
  2133. else {
  2134. memcpy(stream_.userBuffer,
  2135. inBufferList->mBuffers[handle->index[1]].mData,
  2136. inBufferList->mBuffers[handle->index[1]].mDataByteSize );
  2137. if (stream_.doByteSwap[1])
  2138. byteSwapBuffer(stream_.userBuffer,
  2139. stream_.bufferSize * stream_.nUserChannels[1],
  2140. stream_.userFormat);
  2141. }
  2142. }
  2143. if ( !info->usingCallback && (stream_.mode != DUPLEX || deviceId == id ) )
  2144. pthread_cond_signal(&handle->condition);
  2145. MUTEX_UNLOCK(&stream_.mutex);
  2146. }
  2147. void RtApiCore :: setStreamCallback(RtAudioCallback callback, void *userData)
  2148. {
  2149. verifyStream();
  2150. if ( stream_.callbackInfo.usingCallback ) {
  2151. sprintf(message_, "RtApiCore: A callback is already set for this stream!");
  2152. error(RtError::WARNING);
  2153. return;
  2154. }
  2155. stream_.callbackInfo.callback = (void *) callback;
  2156. stream_.callbackInfo.userData = userData;
  2157. stream_.callbackInfo.usingCallback = true;
  2158. }
  2159. void RtApiCore :: cancelStreamCallback()
  2160. {
  2161. verifyStream();
  2162. if (stream_.callbackInfo.usingCallback) {
  2163. if (stream_.state == STREAM_RUNNING)
  2164. stopStream();
  2165. MUTEX_LOCK(&stream_.mutex);
  2166. stream_.callbackInfo.usingCallback = false;
  2167. stream_.callbackInfo.userData = NULL;
  2168. stream_.state = STREAM_STOPPED;
  2169. stream_.callbackInfo.callback = NULL;
  2170. MUTEX_UNLOCK(&stream_.mutex);
  2171. }
  2172. }
  2173. //******************** End of __MACOSX_CORE__ *********************//
  2174. #endif
  2175. #if defined(__LINUX_JACK__)
  2176. // JACK is a low-latency audio server, written primarily for the
  2177. // GNU/Linux operating system. It can connect a number of different
  2178. // applications to an audio device, as well as allowing them to share
  2179. // audio between themselves.
  2180. //
  2181. // The JACK server must be running before RtApiJack can be instantiated.
  2182. // RtAudio will report just a single "device", which is the JACK audio
  2183. // server. The JACK server is typically started in a terminal as follows:
  2184. //
  2185. // .jackd -d alsa -d hw:0
  2186. //
  2187. // Many of the parameters normally set for a stream are fixed by the
  2188. // JACK server and can be specified when the JACK server is started.
  2189. // In particular,
  2190. //
  2191. // .jackd -d alsa -d hw:0 -r 44100 -p 512 -n 4
  2192. //
  2193. // specifies a sample rate of 44100 Hz, a buffer size of 512 sample
  2194. // frames, and number of buffers = 4. Once the server is running, it
  2195. // is not possible to override these values. If the values are not
  2196. // specified in the command-line, the JACK server uses default values.
  2197. #include <jack/jack.h>
  2198. #include <unistd.h>
  2199. // A structure to hold various information related to the Jack API
  2200. // implementation.
  2201. struct JackHandle {
  2202. jack_client_t *client;
  2203. jack_port_t **ports[2];
  2204. bool clientOpen;
  2205. bool stopStream;
  2206. pthread_cond_t condition;
  2207. JackHandle()
  2208. :client(0), clientOpen(false), stopStream(false) {}
  2209. };
  2210. std::string jackmsg;
  2211. static void jackerror (const char *desc)
  2212. {
  2213. jackmsg.erase();
  2214. jackmsg.append( desc, strlen(desc)+1 );
  2215. }
  2216. RtApiJack :: RtApiJack()
  2217. {
  2218. this->initialize();
  2219. if (nDevices_ <= 0) {
  2220. sprintf(message_, "RtApiJack: no Linux Jack server found or connection error (jack: %s)!",
  2221. jackmsg.c_str());
  2222. error(RtError::NO_DEVICES_FOUND);
  2223. }
  2224. }
  2225. RtApiJack :: ~RtApiJack()
  2226. {
  2227. if ( stream_.mode != UNINITIALIZED ) closeStream();
  2228. }
  2229. void RtApiJack :: initialize(void)
  2230. {
  2231. nDevices_ = 0;
  2232. // Tell the jack server to call jackerror() when it experiences an
  2233. // error. This function saves the error message for subsequent
  2234. // reporting via the normal RtAudio error function.
  2235. jack_set_error_function( jackerror );
  2236. // Look for jack server and try to become a client.
  2237. jack_client_t *client;
  2238. if ( (client = jack_client_new( "RtApiJack" )) == 0)
  2239. return;
  2240. RtApiDevice device;
  2241. // Determine the name of the device.
  2242. device.name = "Jack Server";
  2243. devices_.push_back(device);
  2244. nDevices_++;
  2245. jack_client_close(client);
  2246. }
  2247. void RtApiJack :: probeDeviceInfo(RtApiDevice *info)
  2248. {
  2249. // Look for jack server and try to become a client.
  2250. jack_client_t *client;
  2251. if ( (client = jack_client_new( "RtApiJack" )) == 0) {
  2252. sprintf(message_, "RtApiJack: error connecting to Linux Jack server in probeDeviceInfo() (jack: %s)!",
  2253. jackmsg.c_str());
  2254. error(RtError::WARNING);
  2255. return;
  2256. }
  2257. // Get the current jack server sample rate.
  2258. info->sampleRates.clear();
  2259. info->sampleRates.push_back( jack_get_sample_rate(client) );
  2260. // Count the available ports as device channels. Jack "input ports"
  2261. // equal RtAudio output channels.
  2262. const char **ports;
  2263. char *port;
  2264. unsigned int nChannels = 0;
  2265. ports = jack_get_ports( client, NULL, NULL, JackPortIsInput );
  2266. if ( ports ) {
  2267. port = (char *) ports[nChannels];
  2268. while ( port )
  2269. port = (char *) ports[++nChannels];
  2270. free( ports );
  2271. info->maxOutputChannels = nChannels;
  2272. info->minOutputChannels = 1;
  2273. }
  2274. // Jack "output ports" equal RtAudio input channels.
  2275. nChannels = 0;
  2276. ports = jack_get_ports( client, NULL, NULL, JackPortIsOutput );
  2277. if ( ports ) {
  2278. port = (char *) ports[nChannels];
  2279. while ( port )
  2280. port = (char *) ports[++nChannels];
  2281. free( ports );
  2282. info->maxInputChannels = nChannels;
  2283. info->minInputChannels = 1;
  2284. }
  2285. if (info->maxOutputChannels == 0 && info->maxInputChannels == 0) {
  2286. jack_client_close(client);
  2287. sprintf(message_, "RtApiJack: error determining jack input/output channels!");
  2288. error(RtError::WARNING);
  2289. return;
  2290. }
  2291. if (info->maxOutputChannels > 0 && info->maxInputChannels > 0) {
  2292. info->hasDuplexSupport = true;
  2293. info->maxDuplexChannels = (info->maxOutputChannels > info->maxInputChannels) ?
  2294. info->maxInputChannels : info->maxOutputChannels;
  2295. info->minDuplexChannels = (info->minOutputChannels > info->minInputChannels) ?
  2296. info->minInputChannels : info->minOutputChannels;
  2297. }
  2298. // Get the jack data format type. There isn't much documentation
  2299. // regarding supported data formats in jack. I'm assuming here that
  2300. // the default type will always be a floating-point type, of length
  2301. // equal to either 4 or 8 bytes.
  2302. int sample_size = sizeof( jack_default_audio_sample_t );
  2303. if ( sample_size == 4 )
  2304. info->nativeFormats = RTAUDIO_FLOAT32;
  2305. else if ( sample_size == 8 )
  2306. info->nativeFormats = RTAUDIO_FLOAT64;
  2307. // Check that we have a supported format
  2308. if (info->nativeFormats == 0) {
  2309. jack_client_close(client);
  2310. sprintf(message_, "RtApiJack: error determining jack server data format!");
  2311. error(RtError::WARNING);
  2312. return;
  2313. }
  2314. jack_client_close(client);
  2315. info->probed = true;
  2316. }
  2317. int jackCallbackHandler(jack_nframes_t nframes, void *infoPointer)
  2318. {
  2319. CallbackInfo *info = (CallbackInfo *) infoPointer;
  2320. RtApiJack *object = (RtApiJack *) info->object;
  2321. try {
  2322. object->callbackEvent( (unsigned long) nframes );
  2323. }
  2324. catch (RtError &exception) {
  2325. fprintf(stderr, "\nRtApiJack: callback handler error (%s)!\n\n", exception.getMessageString());
  2326. return 0;
  2327. }
  2328. return 0;
  2329. }
  2330. void jackShutdown(void *infoPointer)
  2331. {
  2332. CallbackInfo *info = (CallbackInfo *) infoPointer;
  2333. JackHandle *handle = (JackHandle *) info->apiInfo;
  2334. handle->clientOpen = false;
  2335. RtApiJack *object = (RtApiJack *) info->object;
  2336. try {
  2337. object->closeStream();
  2338. }
  2339. catch (RtError &exception) {
  2340. fprintf(stderr, "\nRtApiJack: jackShutdown error (%s)!\n\n", exception.getMessageString());
  2341. return;
  2342. }
  2343. fprintf(stderr, "\nRtApiJack: the Jack server is shutting down ... stream stopped and closed!!!\n\n");
  2344. }
  2345. int jackXrun( void * )
  2346. {
  2347. fprintf(stderr, "\nRtApiJack: audio overrun/underrun reported!\n");
  2348. return 0;
  2349. }
  2350. bool RtApiJack :: probeDeviceOpen(int device, StreamMode mode, int channels,
  2351. int sampleRate, RtAudioFormat format,
  2352. int *bufferSize, int numberOfBuffers)
  2353. {
  2354. // Compare the jack server channels to the requested number of channels.
  2355. if ( (mode == OUTPUT && devices_[device].maxOutputChannels < channels ) ||
  2356. (mode == INPUT && devices_[device].maxInputChannels < channels ) ) {
  2357. sprintf(message_, "RtApiJack: the Jack server does not support requested channels!");
  2358. error(RtError::DEBUG_WARNING);
  2359. return FAILURE;
  2360. }
  2361. JackHandle *handle = (JackHandle *) stream_.apiHandle;
  2362. // Look for jack server and try to become a client (only do once per stream).
  2363. char label[32];
  2364. jack_client_t *client = 0;
  2365. if ( mode == OUTPUT || (mode == INPUT && stream_.mode != OUTPUT) ) {
  2366. snprintf(label, 32, "RtApiJack");
  2367. if ( (client = jack_client_new( (const char *) label )) == 0) {
  2368. sprintf(message_, "RtApiJack: cannot connect to Linux Jack server in probeDeviceOpen() (jack: %s)!",
  2369. jackmsg.c_str());
  2370. error(RtError::DEBUG_WARNING);
  2371. return FAILURE;
  2372. }
  2373. }
  2374. else {
  2375. // The handle must have been created on an earlier pass.
  2376. client = handle->client;
  2377. }
  2378. // First, check the jack server sample rate.
  2379. int jack_rate;
  2380. jack_rate = (int) jack_get_sample_rate(client);
  2381. if ( sampleRate != jack_rate ) {
  2382. jack_client_close(client);
  2383. sprintf( message_, "RtApiJack: the requested sample rate (%d) is different than the JACK server rate (%d).",
  2384. sampleRate, jack_rate );
  2385. error(RtError::DEBUG_WARNING);
  2386. return FAILURE;
  2387. }
  2388. stream_.sampleRate = jack_rate;
  2389. // The jack server seems to support just a single floating-point
  2390. // data type. Since we already checked it before, just use what we
  2391. // found then.
  2392. stream_.deviceFormat[mode] = devices_[device].nativeFormats;
  2393. stream_.userFormat = format;
  2394. // Jack always uses non-interleaved buffers. We'll need to
  2395. // de-interleave if we have more than one channel.
  2396. stream_.deInterleave[mode] = false;
  2397. if ( channels > 1 )
  2398. stream_.deInterleave[mode] = true;
  2399. // Jack always provides host byte-ordered data.
  2400. stream_.doByteSwap[mode] = false;
  2401. // Get the buffer size. The buffer size and number of buffers
  2402. // (periods) is set when the jack server is started.
  2403. stream_.bufferSize = (int) jack_get_buffer_size(client);
  2404. *bufferSize = stream_.bufferSize;
  2405. stream_.nDeviceChannels[mode] = channels;
  2406. stream_.nUserChannels[mode] = channels;
  2407. stream_.doConvertBuffer[mode] = false;
  2408. if (stream_.userFormat != stream_.deviceFormat[mode])
  2409. stream_.doConvertBuffer[mode] = true;
  2410. if (stream_.deInterleave[mode])
  2411. stream_.doConvertBuffer[mode] = true;
  2412. // Allocate our JackHandle structure for the stream.
  2413. if ( handle == 0 ) {
  2414. handle = (JackHandle *) calloc(1, sizeof(JackHandle));
  2415. if ( handle == NULL ) {
  2416. sprintf(message_, "RtApiJack: error allocating JackHandle memory (%s).",
  2417. devices_[device].name.c_str());
  2418. goto error;
  2419. }
  2420. handle->ports[0] = 0;
  2421. handle->ports[1] = 0;
  2422. if ( pthread_cond_init(&handle->condition, NULL) ) {
  2423. sprintf(message_, "RtApiJack: error initializing pthread condition variable!");
  2424. goto error;
  2425. }
  2426. stream_.apiHandle = (void *) handle;
  2427. handle->client = client;
  2428. handle->clientOpen = true;
  2429. }
  2430. // Allocate necessary internal buffers.
  2431. if ( stream_.nUserChannels[0] != stream_.nUserChannels[1] ) {
  2432. long buffer_bytes;
  2433. if (stream_.nUserChannels[0] >= stream_.nUserChannels[1])
  2434. buffer_bytes = stream_.nUserChannels[0];
  2435. else
  2436. buffer_bytes = stream_.nUserChannels[1];
  2437. buffer_bytes *= *bufferSize * formatBytes(stream_.userFormat);
  2438. if (stream_.userBuffer) free(stream_.userBuffer);
  2439. stream_.userBuffer = (char *) calloc(buffer_bytes, 1);
  2440. if (stream_.userBuffer == NULL) {
  2441. sprintf(message_, "RtApiJack: error allocating user buffer memory (%s).",
  2442. devices_[device].name.c_str());
  2443. goto error;
  2444. }
  2445. }
  2446. if ( stream_.doConvertBuffer[mode] ) {
  2447. long buffer_bytes;
  2448. bool makeBuffer = true;
  2449. if ( mode == OUTPUT )
  2450. buffer_bytes = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
  2451. else { // mode == INPUT
  2452. buffer_bytes = stream_.nDeviceChannels[1] * formatBytes(stream_.deviceFormat[1]);
  2453. if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
  2454. long bytes_out = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
  2455. if ( buffer_bytes < bytes_out ) makeBuffer = false;
  2456. }
  2457. }
  2458. if ( makeBuffer ) {
  2459. buffer_bytes *= *bufferSize;
  2460. if (stream_.deviceBuffer) free(stream_.deviceBuffer);
  2461. stream_.deviceBuffer = (char *) calloc(buffer_bytes, 1);
  2462. if (stream_.deviceBuffer == NULL) {
  2463. sprintf(message_, "RtApiJack: error allocating device buffer memory (%s).",
  2464. devices_[device].name.c_str());
  2465. goto error;
  2466. }
  2467. }
  2468. }
  2469. // Allocate memory for the Jack ports (channels) identifiers.
  2470. handle->ports[mode] = (jack_port_t **) malloc (sizeof (jack_port_t *) * channels);
  2471. if ( handle->ports[mode] == NULL ) {
  2472. sprintf(message_, "RtApiJack: error allocating port handle memory (%s).",
  2473. devices_[device].name.c_str());
  2474. goto error;
  2475. }
  2476. stream_.device[mode] = device;
  2477. stream_.state = STREAM_STOPPED;
  2478. stream_.callbackInfo.usingCallback = false;
  2479. stream_.callbackInfo.object = (void *) this;
  2480. stream_.callbackInfo.apiInfo = (void *) handle;
  2481. if ( stream_.mode == OUTPUT && mode == INPUT )
  2482. // We had already set up the stream for output.
  2483. stream_.mode = DUPLEX;
  2484. else {
  2485. stream_.mode = mode;
  2486. jack_set_process_callback( handle->client, jackCallbackHandler, (void *) &stream_.callbackInfo );
  2487. jack_set_xrun_callback( handle->client, jackXrun, NULL );
  2488. jack_on_shutdown( handle->client, jackShutdown, (void *) &stream_.callbackInfo );
  2489. }
  2490. return SUCCESS;
  2491. error:
  2492. if ( handle ) {
  2493. pthread_cond_destroy(&handle->condition);
  2494. if ( handle->clientOpen == true )
  2495. jack_client_close(handle->client);
  2496. if ( handle->ports[0] ) free(handle->ports[0]);
  2497. if ( handle->ports[1] ) free(handle->ports[1]);
  2498. free( handle );
  2499. stream_.apiHandle = 0;
  2500. }
  2501. if (stream_.userBuffer) {
  2502. free(stream_.userBuffer);
  2503. stream_.userBuffer = 0;
  2504. }
  2505. error(RtError::WARNING);
  2506. return FAILURE;
  2507. }
  2508. void RtApiJack :: closeStream()
  2509. {
  2510. // We don't want an exception to be thrown here because this
  2511. // function is called by our class destructor. So, do our own
  2512. // stream check.
  2513. if ( stream_.mode == UNINITIALIZED ) {
  2514. sprintf(message_, "RtApiJack::closeStream(): no open stream to close!");
  2515. error(RtError::WARNING);
  2516. return;
  2517. }
  2518. JackHandle *handle = (JackHandle *) stream_.apiHandle;
  2519. if ( handle && handle->clientOpen == true ) {
  2520. if (stream_.state == STREAM_RUNNING)
  2521. jack_deactivate(handle->client);
  2522. jack_client_close(handle->client);
  2523. }
  2524. if ( handle ) {
  2525. if ( handle->ports[0] ) free(handle->ports[0]);
  2526. if ( handle->ports[1] ) free(handle->ports[1]);
  2527. pthread_cond_destroy(&handle->condition);
  2528. free( handle );
  2529. stream_.apiHandle = 0;
  2530. }
  2531. if (stream_.userBuffer) {
  2532. free(stream_.userBuffer);
  2533. stream_.userBuffer = 0;
  2534. }
  2535. if (stream_.deviceBuffer) {
  2536. free(stream_.deviceBuffer);
  2537. stream_.deviceBuffer = 0;
  2538. }
  2539. stream_.mode = UNINITIALIZED;
  2540. }
  2541. void RtApiJack :: startStream()
  2542. {
  2543. verifyStream();
  2544. if (stream_.state == STREAM_RUNNING) return;
  2545. MUTEX_LOCK(&stream_.mutex);
  2546. char label[64];
  2547. JackHandle *handle = (JackHandle *) stream_.apiHandle;
  2548. if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
  2549. for ( int i=0; i<stream_.nUserChannels[0]; i++ ) {
  2550. snprintf(label, 64, "outport %d", i);
  2551. handle->ports[0][i] = jack_port_register(handle->client, (const char *)label,
  2552. JACK_DEFAULT_AUDIO_TYPE, JackPortIsOutput, 0);
  2553. }
  2554. }
  2555. if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
  2556. for ( int i=0; i<stream_.nUserChannels[1]; i++ ) {
  2557. snprintf(label, 64, "inport %d", i);
  2558. handle->ports[1][i] = jack_port_register(handle->client, (const char *)label,
  2559. JACK_DEFAULT_AUDIO_TYPE, JackPortIsInput, 0);
  2560. }
  2561. }
  2562. if (jack_activate(handle->client)) {
  2563. sprintf(message_, "RtApiJack: unable to activate JACK client!");
  2564. error(RtError::SYSTEM_ERROR);
  2565. }
  2566. const char **ports;
  2567. int result;
  2568. // Get the list of available ports.
  2569. if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
  2570. ports = jack_get_ports(handle->client, NULL, NULL, JackPortIsPhysical|JackPortIsInput);
  2571. if ( ports == NULL) {
  2572. sprintf(message_, "RtApiJack: error determining available jack input ports!");
  2573. error(RtError::SYSTEM_ERROR);
  2574. }
  2575. // Now make the port connections. Since RtAudio wasn't designed to
  2576. // allow the user to select particular channels of a device, we'll
  2577. // just open the first "nChannels" ports.
  2578. for ( int i=0; i<stream_.nUserChannels[0]; i++ ) {
  2579. result = 1;
  2580. if ( ports[i] )
  2581. result = jack_connect( handle->client, jack_port_name(handle->ports[0][i]), ports[i] );
  2582. if ( result ) {
  2583. free(ports);
  2584. sprintf(message_, "RtApiJack: error connecting output ports!");
  2585. error(RtError::SYSTEM_ERROR);
  2586. }
  2587. }
  2588. free(ports);
  2589. }
  2590. if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
  2591. ports = jack_get_ports( handle->client, NULL, NULL, JackPortIsPhysical|JackPortIsOutput );
  2592. if ( ports == NULL) {
  2593. sprintf(message_, "RtApiJack: error determining available jack output ports!");
  2594. error(RtError::SYSTEM_ERROR);
  2595. }
  2596. // Now make the port connections. See note above.
  2597. for ( int i=0; i<stream_.nUserChannels[1]; i++ ) {
  2598. result = 1;
  2599. if ( ports[i] )
  2600. result = jack_connect( handle->client, ports[i], jack_port_name(handle->ports[1][i]) );
  2601. if ( result ) {
  2602. free(ports);
  2603. sprintf(message_, "RtApiJack: error connecting input ports!");
  2604. error(RtError::SYSTEM_ERROR);
  2605. }
  2606. }
  2607. free(ports);
  2608. }
  2609. handle->stopStream = false;
  2610. stream_.state = STREAM_RUNNING;
  2611. MUTEX_UNLOCK(&stream_.mutex);
  2612. }
  2613. void RtApiJack :: stopStream()
  2614. {
  2615. verifyStream();
  2616. if (stream_.state == STREAM_STOPPED) return;
  2617. // Change the state before the lock to improve shutdown response
  2618. // when using a callback.
  2619. stream_.state = STREAM_STOPPED;
  2620. MUTEX_LOCK(&stream_.mutex);
  2621. JackHandle *handle = (JackHandle *) stream_.apiHandle;
  2622. jack_deactivate(handle->client);
  2623. MUTEX_UNLOCK(&stream_.mutex);
  2624. }
  2625. void RtApiJack :: abortStream()
  2626. {
  2627. stopStream();
  2628. }
  2629. void RtApiJack :: tickStream()
  2630. {
  2631. verifyStream();
  2632. if (stream_.state == STREAM_STOPPED) return;
  2633. if (stream_.callbackInfo.usingCallback) {
  2634. sprintf(message_, "RtApiJack: tickStream() should not be used when a callback function is set!");
  2635. error(RtError::WARNING);
  2636. return;
  2637. }
  2638. JackHandle *handle = (JackHandle *) stream_.apiHandle;
  2639. MUTEX_LOCK(&stream_.mutex);
  2640. pthread_cond_wait(&handle->condition, &stream_.mutex);
  2641. MUTEX_UNLOCK(&stream_.mutex);
  2642. }
  2643. void RtApiJack :: callbackEvent( unsigned long nframes )
  2644. {
  2645. verifyStream();
  2646. if (stream_.state == STREAM_STOPPED) return;
  2647. CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
  2648. JackHandle *handle = (JackHandle *) stream_.apiHandle;
  2649. if ( info->usingCallback && handle->stopStream ) {
  2650. // Check if the stream should be stopped (via the previous user
  2651. // callback return value). We stop the stream here, rather than
  2652. // after the function call, so that output data can first be
  2653. // processed.
  2654. this->stopStream();
  2655. return;
  2656. }
  2657. MUTEX_LOCK(&stream_.mutex);
  2658. // Invoke user callback first, to get fresh output data.
  2659. if ( info->usingCallback ) {
  2660. RtAudioCallback callback = (RtAudioCallback) info->callback;
  2661. handle->stopStream = callback(stream_.userBuffer, stream_.bufferSize, info->userData);
  2662. }
  2663. jack_default_audio_sample_t *jackbuffer;
  2664. long bufferBytes = nframes * sizeof (jack_default_audio_sample_t);
  2665. if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
  2666. if (stream_.doConvertBuffer[0]) {
  2667. convertStreamBuffer(OUTPUT);
  2668. for ( int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
  2669. jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer(handle->ports[0][i],
  2670. (jack_nframes_t) nframes);
  2671. memcpy(jackbuffer, &stream_.deviceBuffer[i*bufferBytes], bufferBytes );
  2672. }
  2673. }
  2674. else { // single channel only
  2675. jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer(handle->ports[0][0],
  2676. (jack_nframes_t) nframes);
  2677. memcpy(jackbuffer, stream_.userBuffer, bufferBytes );
  2678. }
  2679. }
  2680. if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
  2681. if (stream_.doConvertBuffer[1]) {
  2682. for ( int i=0; i<stream_.nDeviceChannels[1]; i++ ) {
  2683. jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer(handle->ports[1][i],
  2684. (jack_nframes_t) nframes);
  2685. memcpy(&stream_.deviceBuffer[i*bufferBytes], jackbuffer, bufferBytes );
  2686. }
  2687. convertStreamBuffer(INPUT);
  2688. }
  2689. else { // single channel only
  2690. jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer(handle->ports[1][0],
  2691. (jack_nframes_t) nframes);
  2692. memcpy(stream_.userBuffer, jackbuffer, bufferBytes );
  2693. }
  2694. }
  2695. if ( !info->usingCallback )
  2696. pthread_cond_signal(&handle->condition);
  2697. MUTEX_UNLOCK(&stream_.mutex);
  2698. }
  2699. void RtApiJack :: setStreamCallback(RtAudioCallback callback, void *userData)
  2700. {
  2701. verifyStream();
  2702. if ( stream_.callbackInfo.usingCallback ) {
  2703. sprintf(message_, "RtApiJack: A callback is already set for this stream!");
  2704. error(RtError::WARNING);
  2705. return;
  2706. }
  2707. stream_.callbackInfo.callback = (void *) callback;
  2708. stream_.callbackInfo.userData = userData;
  2709. stream_.callbackInfo.usingCallback = true;
  2710. }
  2711. void RtApiJack :: cancelStreamCallback()
  2712. {
  2713. verifyStream();
  2714. if (stream_.callbackInfo.usingCallback) {
  2715. if (stream_.state == STREAM_RUNNING)
  2716. stopStream();
  2717. MUTEX_LOCK(&stream_.mutex);
  2718. stream_.callbackInfo.usingCallback = false;
  2719. stream_.callbackInfo.userData = NULL;
  2720. stream_.state = STREAM_STOPPED;
  2721. stream_.callbackInfo.callback = NULL;
  2722. MUTEX_UNLOCK(&stream_.mutex);
  2723. }
  2724. }
  2725. #endif
  2726. #if defined(__LINUX_ALSA__)
  2727. #include <alsa/asoundlib.h>
  2728. #include <unistd.h>
  2729. #include <ctype.h>
  2730. extern "C" void *alsaCallbackHandler(void * ptr);
  2731. RtApiAlsa :: RtApiAlsa()
  2732. {
  2733. this->initialize();
  2734. if (nDevices_ <= 0) {
  2735. sprintf(message_, "RtApiAlsa: no Linux ALSA audio devices found!");
  2736. error(RtError::NO_DEVICES_FOUND);
  2737. }
  2738. }
  2739. RtApiAlsa :: ~RtApiAlsa()
  2740. {
  2741. if ( stream_.mode != UNINITIALIZED )
  2742. closeStream();
  2743. }
  2744. void RtApiAlsa :: initialize(void)
  2745. {
  2746. int card, subdevice, result;
  2747. char name[64];
  2748. const char *cardId;
  2749. snd_ctl_t *handle;
  2750. snd_ctl_card_info_t *info;
  2751. snd_ctl_card_info_alloca(&info);
  2752. RtApiDevice device;
  2753. // Count cards and devices
  2754. nDevices_ = 0;
  2755. card = -1;
  2756. snd_card_next(&card);
  2757. while ( card >= 0 ) {
  2758. sprintf(name, "hw:%d", card);
  2759. result = snd_ctl_open(&handle, name, 0);
  2760. if (result < 0) {
  2761. sprintf(message_, "RtApiAlsa: control open (%i): %s.", card, snd_strerror(result));
  2762. error(RtError::DEBUG_WARNING);
  2763. goto next_card;
  2764. }
  2765. result = snd_ctl_card_info(handle, info);
  2766. if (result < 0) {
  2767. sprintf(message_, "RtApiAlsa: control hardware info (%i): %s.", card, snd_strerror(result));
  2768. error(RtError::DEBUG_WARNING);
  2769. goto next_card;
  2770. }
  2771. cardId = snd_ctl_card_info_get_id(info);
  2772. subdevice = -1;
  2773. while (1) {
  2774. result = snd_ctl_pcm_next_device(handle, &subdevice);
  2775. if (result < 0) {
  2776. sprintf(message_, "RtApiAlsa: control next device (%i): %s.", card, snd_strerror(result));
  2777. error(RtError::DEBUG_WARNING);
  2778. break;
  2779. }
  2780. if (subdevice < 0)
  2781. break;
  2782. sprintf( name, "hw:%d,%d", card, subdevice );
  2783. // If a cardId exists and it contains at least one non-numeric
  2784. // character, use it to identify the device. This avoids a bug
  2785. // in ALSA such that a numeric string is interpreted as a device
  2786. // number.
  2787. for ( unsigned int i=0; i<strlen(cardId); i++ ) {
  2788. if ( !isdigit( cardId[i] ) ) {
  2789. sprintf( name, "hw:%s,%d", cardId, subdevice );
  2790. break;
  2791. }
  2792. }
  2793. device.name.erase();
  2794. device.name.append( (const char *)name, strlen(name)+1 );
  2795. devices_.push_back(device);
  2796. nDevices_++;
  2797. }
  2798. next_card:
  2799. snd_ctl_close(handle);
  2800. snd_card_next(&card);
  2801. }
  2802. }
  2803. void RtApiAlsa :: probeDeviceInfo(RtApiDevice *info)
  2804. {
  2805. int err;
  2806. int open_mode = SND_PCM_ASYNC;
  2807. snd_pcm_t *handle;
  2808. snd_ctl_t *chandle;
  2809. snd_pcm_stream_t stream;
  2810. snd_pcm_info_t *pcminfo;
  2811. snd_pcm_info_alloca(&pcminfo);
  2812. snd_pcm_hw_params_t *params;
  2813. snd_pcm_hw_params_alloca(&params);
  2814. char name[64];
  2815. char *card;
  2816. // Open the control interface for this card.
  2817. strncpy( name, info->name.c_str(), 64 );
  2818. card = strtok(name, ",");
  2819. err = snd_ctl_open(&chandle, card, SND_CTL_NONBLOCK);
  2820. if (err < 0) {
  2821. sprintf(message_, "RtApiAlsa: control open (%s): %s.", card, snd_strerror(err));
  2822. error(RtError::DEBUG_WARNING);
  2823. return;
  2824. }
  2825. unsigned int dev = (unsigned int) atoi( strtok(NULL, ",") );
  2826. // First try for playback
  2827. stream = SND_PCM_STREAM_PLAYBACK;
  2828. snd_pcm_info_set_device(pcminfo, dev);
  2829. snd_pcm_info_set_subdevice(pcminfo, 0);
  2830. snd_pcm_info_set_stream(pcminfo, stream);
  2831. if ((err = snd_ctl_pcm_info(chandle, pcminfo)) < 0) {
  2832. if (err == -ENOENT) {
  2833. sprintf(message_, "RtApiAlsa: pcm device (%s) doesn't handle output!", info->name.c_str());
  2834. error(RtError::DEBUG_WARNING);
  2835. }
  2836. else {
  2837. sprintf(message_, "RtApiAlsa: snd_ctl_pcm_info error for device (%s) output: %s",
  2838. info->name.c_str(), snd_strerror(err));
  2839. error(RtError::DEBUG_WARNING);
  2840. }
  2841. goto capture_probe;
  2842. }
  2843. err = snd_pcm_open(&handle, info->name.c_str(), stream, open_mode | SND_PCM_NONBLOCK );
  2844. if (err < 0) {
  2845. if ( err == EBUSY )
  2846. sprintf(message_, "RtApiAlsa: pcm playback device (%s) is busy: %s.",
  2847. info->name.c_str(), snd_strerror(err));
  2848. else
  2849. sprintf(message_, "RtApiAlsa: pcm playback open (%s) error: %s.",
  2850. info->name.c_str(), snd_strerror(err));
  2851. error(RtError::DEBUG_WARNING);
  2852. goto capture_probe;
  2853. }
  2854. // We have an open device ... allocate the parameter structure.
  2855. err = snd_pcm_hw_params_any(handle, params);
  2856. if (err < 0) {
  2857. snd_pcm_close(handle);
  2858. sprintf(message_, "RtApiAlsa: hardware probe error (%s): %s.",
  2859. info->name.c_str(), snd_strerror(err));
  2860. error(RtError::WARNING);
  2861. goto capture_probe;
  2862. }
  2863. // Get output channel information.
  2864. unsigned int value;
  2865. err = snd_pcm_hw_params_get_channels_min(params, &value);
  2866. if (err < 0) {
  2867. snd_pcm_close(handle);
  2868. sprintf(message_, "RtApiAlsa: hardware minimum channel probe error (%s): %s.",
  2869. info->name.c_str(), snd_strerror(err));
  2870. error(RtError::WARNING);
  2871. goto capture_probe;
  2872. }
  2873. info->minOutputChannels = value;
  2874. err = snd_pcm_hw_params_get_channels_max(params, &value);
  2875. if (err < 0) {
  2876. snd_pcm_close(handle);
  2877. sprintf(message_, "RtApiAlsa: hardware maximum channel probe error (%s): %s.",
  2878. info->name.c_str(), snd_strerror(err));
  2879. error(RtError::WARNING);
  2880. goto capture_probe;
  2881. }
  2882. info->maxOutputChannels = value;
  2883. snd_pcm_close(handle);
  2884. capture_probe:
  2885. // Now try for capture
  2886. stream = SND_PCM_STREAM_CAPTURE;
  2887. snd_pcm_info_set_stream(pcminfo, stream);
  2888. err = snd_ctl_pcm_info(chandle, pcminfo);
  2889. snd_ctl_close(chandle);
  2890. if ( err < 0 ) {
  2891. if (err == -ENOENT) {
  2892. sprintf(message_, "RtApiAlsa: pcm device (%s) doesn't handle input!", info->name.c_str());
  2893. error(RtError::DEBUG_WARNING);
  2894. }
  2895. else {
  2896. sprintf(message_, "RtApiAlsa: snd_ctl_pcm_info error for device (%s) input: %s",
  2897. info->name.c_str(), snd_strerror(err));
  2898. error(RtError::DEBUG_WARNING);
  2899. }
  2900. if (info->maxOutputChannels == 0)
  2901. // didn't open for playback either ... device invalid
  2902. return;
  2903. goto probe_parameters;
  2904. }
  2905. err = snd_pcm_open(&handle, info->name.c_str(), stream, open_mode | SND_PCM_NONBLOCK);
  2906. if (err < 0) {
  2907. if ( err == EBUSY )
  2908. sprintf(message_, "RtApiAlsa: pcm capture device (%s) is busy: %s.",
  2909. info->name.c_str(), snd_strerror(err));
  2910. else
  2911. sprintf(message_, "RtApiAlsa: pcm capture open (%s) error: %s.",
  2912. info->name.c_str(), snd_strerror(err));
  2913. error(RtError::DEBUG_WARNING);
  2914. if (info->maxOutputChannels == 0)
  2915. // didn't open for playback either ... device invalid
  2916. return;
  2917. goto probe_parameters;
  2918. }
  2919. // We have an open capture device ... allocate the parameter structure.
  2920. err = snd_pcm_hw_params_any(handle, params);
  2921. if (err < 0) {
  2922. snd_pcm_close(handle);
  2923. sprintf(message_, "RtApiAlsa: hardware probe error (%s): %s.",
  2924. info->name.c_str(), snd_strerror(err));
  2925. error(RtError::WARNING);
  2926. if (info->maxOutputChannels > 0)
  2927. goto probe_parameters;
  2928. else
  2929. return;
  2930. }
  2931. // Get input channel information.
  2932. err = snd_pcm_hw_params_get_channels_min(params, &value);
  2933. if (err < 0) {
  2934. snd_pcm_close(handle);
  2935. sprintf(message_, "RtApiAlsa: hardware minimum in channel probe error (%s): %s.",
  2936. info->name.c_str(), snd_strerror(err));
  2937. error(RtError::WARNING);
  2938. if (info->maxOutputChannels > 0)
  2939. goto probe_parameters;
  2940. else
  2941. return;
  2942. }
  2943. info->minInputChannels = value;
  2944. err = snd_pcm_hw_params_get_channels_max(params, &value);
  2945. if (err < 0) {
  2946. snd_pcm_close(handle);
  2947. sprintf(message_, "RtApiAlsa: hardware maximum in channel probe error (%s): %s.",
  2948. info->name.c_str(), snd_strerror(err));
  2949. error(RtError::WARNING);
  2950. if (info->maxOutputChannels > 0)
  2951. goto probe_parameters;
  2952. else
  2953. return;
  2954. }
  2955. info->maxInputChannels = value;
  2956. snd_pcm_close(handle);
  2957. // If device opens for both playback and capture, we determine the channels.
  2958. if (info->maxOutputChannels == 0 || info->maxInputChannels == 0)
  2959. goto probe_parameters;
  2960. info->hasDuplexSupport = true;
  2961. info->maxDuplexChannels = (info->maxOutputChannels > info->maxInputChannels) ?
  2962. info->maxInputChannels : info->maxOutputChannels;
  2963. info->minDuplexChannels = (info->minOutputChannels > info->minInputChannels) ?
  2964. info->minInputChannels : info->minOutputChannels;
  2965. probe_parameters:
  2966. // At this point, we just need to figure out the supported data
  2967. // formats and sample rates. We'll proceed by opening the device in
  2968. // the direction with the maximum number of channels, or playback if
  2969. // they are equal. This might limit our sample rate options, but so
  2970. // be it.
  2971. if (info->maxOutputChannels >= info->maxInputChannels)
  2972. stream = SND_PCM_STREAM_PLAYBACK;
  2973. else
  2974. stream = SND_PCM_STREAM_CAPTURE;
  2975. err = snd_pcm_open(&handle, info->name.c_str(), stream, open_mode);
  2976. if (err < 0) {
  2977. sprintf(message_, "RtApiAlsa: pcm (%s) won't reopen during probe: %s.",
  2978. info->name.c_str(), snd_strerror(err));
  2979. error(RtError::WARNING);
  2980. return;
  2981. }
  2982. // We have an open device ... allocate the parameter structure.
  2983. err = snd_pcm_hw_params_any(handle, params);
  2984. if (err < 0) {
  2985. snd_pcm_close(handle);
  2986. sprintf(message_, "RtApiAlsa: hardware reopen probe error (%s): %s.",
  2987. info->name.c_str(), snd_strerror(err));
  2988. error(RtError::WARNING);
  2989. return;
  2990. }
  2991. // Test our discrete set of sample rate values.
  2992. int dir = 0;
  2993. info->sampleRates.clear();
  2994. for (unsigned int i=0; i<MAX_SAMPLE_RATES; i++) {
  2995. if (snd_pcm_hw_params_test_rate(handle, params, SAMPLE_RATES[i], dir) == 0)
  2996. info->sampleRates.push_back(SAMPLE_RATES[i]);
  2997. }
  2998. if (info->sampleRates.size() == 0) {
  2999. snd_pcm_close(handle);
  3000. sprintf(message_, "RtApiAlsa: no supported sample rates found for device (%s).",
  3001. info->name.c_str());
  3002. error(RtError::DEBUG_WARNING);
  3003. return;
  3004. }
  3005. // Probe the supported data formats ... we don't care about endian-ness just yet
  3006. snd_pcm_format_t format;
  3007. info->nativeFormats = 0;
  3008. format = SND_PCM_FORMAT_S8;
  3009. if (snd_pcm_hw_params_test_format(handle, params, format) == 0)
  3010. info->nativeFormats |= RTAUDIO_SINT8;
  3011. format = SND_PCM_FORMAT_S16;
  3012. if (snd_pcm_hw_params_test_format(handle, params, format) == 0)
  3013. info->nativeFormats |= RTAUDIO_SINT16;
  3014. format = SND_PCM_FORMAT_S24;
  3015. if (snd_pcm_hw_params_test_format(handle, params, format) == 0)
  3016. info->nativeFormats |= RTAUDIO_SINT24;
  3017. format = SND_PCM_FORMAT_S32;
  3018. if (snd_pcm_hw_params_test_format(handle, params, format) == 0)
  3019. info->nativeFormats |= RTAUDIO_SINT32;
  3020. format = SND_PCM_FORMAT_FLOAT;
  3021. if (snd_pcm_hw_params_test_format(handle, params, format) == 0)
  3022. info->nativeFormats |= RTAUDIO_FLOAT32;
  3023. format = SND_PCM_FORMAT_FLOAT64;
  3024. if (snd_pcm_hw_params_test_format(handle, params, format) == 0)
  3025. info->nativeFormats |= RTAUDIO_FLOAT64;
  3026. // Check that we have at least one supported format
  3027. if (info->nativeFormats == 0) {
  3028. snd_pcm_close(handle);
  3029. sprintf(message_, "RtApiAlsa: pcm device (%s) data format not supported by RtAudio.",
  3030. info->name.c_str());
  3031. error(RtError::WARNING);
  3032. return;
  3033. }
  3034. // That's all ... close the device and return
  3035. snd_pcm_close(handle);
  3036. info->probed = true;
  3037. return;
  3038. }
  3039. bool RtApiAlsa :: probeDeviceOpen( int device, StreamMode mode, int channels,
  3040. int sampleRate, RtAudioFormat format,
  3041. int *bufferSize, int numberOfBuffers )
  3042. {
  3043. #if defined(__RTAUDIO_DEBUG__)
  3044. snd_output_t *out;
  3045. snd_output_stdio_attach(&out, stderr, 0);
  3046. #endif
  3047. // I'm not using the "plug" interface ... too much inconsistent behavior.
  3048. const char *name = devices_[device].name.c_str();
  3049. snd_pcm_stream_t alsa_stream;
  3050. if (mode == OUTPUT)
  3051. alsa_stream = SND_PCM_STREAM_PLAYBACK;
  3052. else
  3053. alsa_stream = SND_PCM_STREAM_CAPTURE;
  3054. int err;
  3055. snd_pcm_t *handle;
  3056. int alsa_open_mode = SND_PCM_ASYNC;
  3057. err = snd_pcm_open(&handle, name, alsa_stream, alsa_open_mode);
  3058. if (err < 0) {
  3059. sprintf(message_,"RtApiAlsa: pcm device (%s) won't open: %s.",
  3060. name, snd_strerror(err));
  3061. error(RtError::WARNING);
  3062. return FAILURE;
  3063. }
  3064. // Fill the parameter structure.
  3065. snd_pcm_hw_params_t *hw_params;
  3066. snd_pcm_hw_params_alloca(&hw_params);
  3067. err = snd_pcm_hw_params_any(handle, hw_params);
  3068. if (err < 0) {
  3069. snd_pcm_close(handle);
  3070. sprintf(message_, "RtApiAlsa: error getting parameter handle (%s): %s.",
  3071. name, snd_strerror(err));
  3072. error(RtError::WARNING);
  3073. return FAILURE;
  3074. }
  3075. #if defined(__RTAUDIO_DEBUG__)
  3076. fprintf(stderr, "\nRtApiAlsa: dump hardware params just after device open:\n\n");
  3077. snd_pcm_hw_params_dump(hw_params, out);
  3078. #endif
  3079. // Set access ... try interleaved access first, then non-interleaved
  3080. if ( !snd_pcm_hw_params_test_access( handle, hw_params, SND_PCM_ACCESS_RW_INTERLEAVED) ) {
  3081. err = snd_pcm_hw_params_set_access(handle, hw_params, SND_PCM_ACCESS_RW_INTERLEAVED);
  3082. }
  3083. else if ( !snd_pcm_hw_params_test_access( handle, hw_params, SND_PCM_ACCESS_RW_NONINTERLEAVED) ) {
  3084. err = snd_pcm_hw_params_set_access(handle, hw_params, SND_PCM_ACCESS_RW_NONINTERLEAVED);
  3085. stream_.deInterleave[mode] = true;
  3086. }
  3087. else {
  3088. snd_pcm_close(handle);
  3089. sprintf(message_, "RtApiAlsa: device (%s) access not supported by RtAudio.", name);
  3090. error(RtError::WARNING);
  3091. return FAILURE;
  3092. }
  3093. if (err < 0) {
  3094. snd_pcm_close(handle);
  3095. sprintf(message_, "RtApiAlsa: error setting access ( (%s): %s.", name, snd_strerror(err));
  3096. error(RtError::WARNING);
  3097. return FAILURE;
  3098. }
  3099. // Determine how to set the device format.
  3100. stream_.userFormat = format;
  3101. snd_pcm_format_t device_format = SND_PCM_FORMAT_UNKNOWN;
  3102. if (format == RTAUDIO_SINT8)
  3103. device_format = SND_PCM_FORMAT_S8;
  3104. else if (format == RTAUDIO_SINT16)
  3105. device_format = SND_PCM_FORMAT_S16;
  3106. else if (format == RTAUDIO_SINT24)
  3107. device_format = SND_PCM_FORMAT_S24;
  3108. else if (format == RTAUDIO_SINT32)
  3109. device_format = SND_PCM_FORMAT_S32;
  3110. else if (format == RTAUDIO_FLOAT32)
  3111. device_format = SND_PCM_FORMAT_FLOAT;
  3112. else if (format == RTAUDIO_FLOAT64)
  3113. device_format = SND_PCM_FORMAT_FLOAT64;
  3114. if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) {
  3115. stream_.deviceFormat[mode] = format;
  3116. goto set_format;
  3117. }
  3118. // The user requested format is not natively supported by the device.
  3119. device_format = SND_PCM_FORMAT_FLOAT64;
  3120. if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) {
  3121. stream_.deviceFormat[mode] = RTAUDIO_FLOAT64;
  3122. goto set_format;
  3123. }
  3124. device_format = SND_PCM_FORMAT_FLOAT;
  3125. if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) {
  3126. stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
  3127. goto set_format;
  3128. }
  3129. device_format = SND_PCM_FORMAT_S32;
  3130. if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) {
  3131. stream_.deviceFormat[mode] = RTAUDIO_SINT32;
  3132. goto set_format;
  3133. }
  3134. device_format = SND_PCM_FORMAT_S24;
  3135. if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) {
  3136. stream_.deviceFormat[mode] = RTAUDIO_SINT24;
  3137. goto set_format;
  3138. }
  3139. device_format = SND_PCM_FORMAT_S16;
  3140. if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) {
  3141. stream_.deviceFormat[mode] = RTAUDIO_SINT16;
  3142. goto set_format;
  3143. }
  3144. device_format = SND_PCM_FORMAT_S8;
  3145. if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) {
  3146. stream_.deviceFormat[mode] = RTAUDIO_SINT8;
  3147. goto set_format;
  3148. }
  3149. // If we get here, no supported format was found.
  3150. sprintf(message_,"RtApiAlsa: pcm device (%s) data format not supported by RtAudio.", name);
  3151. snd_pcm_close(handle);
  3152. error(RtError::WARNING);
  3153. return FAILURE;
  3154. set_format:
  3155. err = snd_pcm_hw_params_set_format(handle, hw_params, device_format);
  3156. if (err < 0) {
  3157. snd_pcm_close(handle);
  3158. sprintf(message_, "RtApiAlsa: error setting format (%s): %s.",
  3159. name, snd_strerror(err));
  3160. error(RtError::WARNING);
  3161. return FAILURE;
  3162. }
  3163. // Determine whether byte-swaping is necessary.
  3164. stream_.doByteSwap[mode] = false;
  3165. if (device_format != SND_PCM_FORMAT_S8) {
  3166. err = snd_pcm_format_cpu_endian(device_format);
  3167. if (err == 0)
  3168. stream_.doByteSwap[mode] = true;
  3169. else if (err < 0) {
  3170. snd_pcm_close(handle);
  3171. sprintf(message_, "RtApiAlsa: error getting format endian-ness (%s): %s.",
  3172. name, snd_strerror(err));
  3173. error(RtError::WARNING);
  3174. return FAILURE;
  3175. }
  3176. }
  3177. // Set the sample rate.
  3178. err = snd_pcm_hw_params_set_rate(handle, hw_params, (unsigned int)sampleRate, 0);
  3179. if (err < 0) {
  3180. snd_pcm_close(handle);
  3181. sprintf(message_, "RtApiAlsa: error setting sample rate (%d) on device (%s): %s.",
  3182. sampleRate, name, snd_strerror(err));
  3183. error(RtError::WARNING);
  3184. return FAILURE;
  3185. }
  3186. // Determine the number of channels for this device. We support a possible
  3187. // minimum device channel number > than the value requested by the user.
  3188. stream_.nUserChannels[mode] = channels;
  3189. unsigned int value;
  3190. err = snd_pcm_hw_params_get_channels_max(hw_params, &value);
  3191. int device_channels = value;
  3192. if (err < 0 || device_channels < channels) {
  3193. snd_pcm_close(handle);
  3194. sprintf(message_, "RtApiAlsa: channels (%d) not supported by device (%s).",
  3195. channels, name);
  3196. error(RtError::WARNING);
  3197. return FAILURE;
  3198. }
  3199. err = snd_pcm_hw_params_get_channels_min(hw_params, &value);
  3200. if (err < 0 ) {
  3201. snd_pcm_close(handle);
  3202. sprintf(message_, "RtApiAlsa: error getting min channels count on device (%s).", name);
  3203. error(RtError::WARNING);
  3204. return FAILURE;
  3205. }
  3206. device_channels = value;
  3207. if (device_channels < channels) device_channels = channels;
  3208. stream_.nDeviceChannels[mode] = device_channels;
  3209. // Set the device channels.
  3210. err = snd_pcm_hw_params_set_channels(handle, hw_params, device_channels);
  3211. if (err < 0) {
  3212. snd_pcm_close(handle);
  3213. sprintf(message_, "RtApiAlsa: error setting channels (%d) on device (%s): %s.",
  3214. device_channels, name, snd_strerror(err));
  3215. error(RtError::WARNING);
  3216. return FAILURE;
  3217. }
  3218. // Set the buffer number, which in ALSA is referred to as the "period".
  3219. int dir;
  3220. unsigned int periods = numberOfBuffers;
  3221. // Even though the hardware might allow 1 buffer, it won't work reliably.
  3222. if (periods < 2) periods = 2;
  3223. err = snd_pcm_hw_params_get_periods_min(hw_params, &value, &dir);
  3224. if (err < 0) {
  3225. snd_pcm_close(handle);
  3226. sprintf(message_, "RtApiAlsa: error getting min periods on device (%s): %s.",
  3227. name, snd_strerror(err));
  3228. error(RtError::WARNING);
  3229. return FAILURE;
  3230. }
  3231. if (value > periods) periods = value;
  3232. err = snd_pcm_hw_params_get_periods_max(hw_params, &value, &dir);
  3233. if (err < 0) {
  3234. snd_pcm_close(handle);
  3235. sprintf(message_, "RtApiAlsa: error getting max periods on device (%s): %s.",
  3236. name, snd_strerror(err));
  3237. error(RtError::WARNING);
  3238. return FAILURE;
  3239. }
  3240. if (value < periods) periods = value;
  3241. err = snd_pcm_hw_params_set_periods(handle, hw_params, periods, 0);
  3242. if (err < 0) {
  3243. snd_pcm_close(handle);
  3244. sprintf(message_, "RtApiAlsa: error setting periods (%s): %s.",
  3245. name, snd_strerror(err));
  3246. error(RtError::WARNING);
  3247. return FAILURE;
  3248. }
  3249. // Set the buffer (or period) size.
  3250. snd_pcm_uframes_t period_size;
  3251. err = snd_pcm_hw_params_get_period_size_min(hw_params, &period_size, &dir);
  3252. if (err < 0) {
  3253. snd_pcm_close(handle);
  3254. sprintf(message_, "RtApiAlsa: error getting period size (%s): %s.",
  3255. name, snd_strerror(err));
  3256. error(RtError::WARNING);
  3257. return FAILURE;
  3258. }
  3259. if (*bufferSize < (int) period_size) *bufferSize = (int) period_size;
  3260. err = snd_pcm_hw_params_set_period_size(handle, hw_params, *bufferSize, 0);
  3261. if (err < 0) {
  3262. snd_pcm_close(handle);
  3263. sprintf(message_, "RtApiAlsa: error setting period size (%s): %s.",
  3264. name, snd_strerror(err));
  3265. error(RtError::WARNING);
  3266. return FAILURE;
  3267. }
  3268. // If attempting to setup a duplex stream, the bufferSize parameter
  3269. // MUST be the same in both directions!
  3270. if ( stream_.mode == OUTPUT && mode == INPUT && *bufferSize != stream_.bufferSize ) {
  3271. sprintf( message_, "RtApiAlsa: error setting buffer size for duplex stream on device (%s).",
  3272. name );
  3273. error(RtError::DEBUG_WARNING);
  3274. return FAILURE;
  3275. }
  3276. stream_.bufferSize = *bufferSize;
  3277. // Install the hardware configuration
  3278. err = snd_pcm_hw_params(handle, hw_params);
  3279. if (err < 0) {
  3280. snd_pcm_close(handle);
  3281. sprintf(message_, "RtApiAlsa: error installing hardware configuration (%s): %s.",
  3282. name, snd_strerror(err));
  3283. error(RtError::WARNING);
  3284. return FAILURE;
  3285. }
  3286. #if defined(__RTAUDIO_DEBUG__)
  3287. fprintf(stderr, "\nRtApiAlsa: dump hardware params after installation:\n\n");
  3288. snd_pcm_hw_params_dump(hw_params, out);
  3289. #endif
  3290. // Allocate the stream handle if necessary and then save.
  3291. snd_pcm_t **handles;
  3292. if ( stream_.apiHandle == 0 ) {
  3293. handles = (snd_pcm_t **) calloc(2, sizeof(snd_pcm_t *));
  3294. if ( handle == NULL ) {
  3295. sprintf(message_, "RtApiAlsa: error allocating handle memory (%s).",
  3296. devices_[device].name.c_str());
  3297. goto error;
  3298. }
  3299. stream_.apiHandle = (void *) handles;
  3300. handles[0] = 0;
  3301. handles[1] = 0;
  3302. }
  3303. else {
  3304. handles = (snd_pcm_t **) stream_.apiHandle;
  3305. }
  3306. handles[mode] = handle;
  3307. // Set flags for buffer conversion
  3308. stream_.doConvertBuffer[mode] = false;
  3309. if (stream_.userFormat != stream_.deviceFormat[mode])
  3310. stream_.doConvertBuffer[mode] = true;
  3311. if (stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode])
  3312. stream_.doConvertBuffer[mode] = true;
  3313. if (stream_.nUserChannels[mode] > 1 && stream_.deInterleave[mode])
  3314. stream_.doConvertBuffer[mode] = true;
  3315. // Allocate necessary internal buffers
  3316. if ( stream_.nUserChannels[0] != stream_.nUserChannels[1] ) {
  3317. long buffer_bytes;
  3318. if (stream_.nUserChannels[0] >= stream_.nUserChannels[1])
  3319. buffer_bytes = stream_.nUserChannels[0];
  3320. else
  3321. buffer_bytes = stream_.nUserChannels[1];
  3322. buffer_bytes *= *bufferSize * formatBytes(stream_.userFormat);
  3323. if (stream_.userBuffer) free(stream_.userBuffer);
  3324. stream_.userBuffer = (char *) calloc(buffer_bytes, 1);
  3325. if (stream_.userBuffer == NULL) {
  3326. sprintf(message_, "RtApiAlsa: error allocating user buffer memory (%s).",
  3327. devices_[device].name.c_str());
  3328. goto error;
  3329. }
  3330. }
  3331. if ( stream_.doConvertBuffer[mode] ) {
  3332. long buffer_bytes;
  3333. bool makeBuffer = true;
  3334. if ( mode == OUTPUT )
  3335. buffer_bytes = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
  3336. else { // mode == INPUT
  3337. buffer_bytes = stream_.nDeviceChannels[1] * formatBytes(stream_.deviceFormat[1]);
  3338. if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
  3339. long bytes_out = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
  3340. if ( buffer_bytes < bytes_out ) makeBuffer = false;
  3341. }
  3342. }
  3343. if ( makeBuffer ) {
  3344. buffer_bytes *= *bufferSize;
  3345. if (stream_.deviceBuffer) free(stream_.deviceBuffer);
  3346. stream_.deviceBuffer = (char *) calloc(buffer_bytes, 1);
  3347. if (stream_.deviceBuffer == NULL) {
  3348. sprintf(message_, "RtApiAlsa: error allocating device buffer memory (%s).",
  3349. devices_[device].name.c_str());
  3350. goto error;
  3351. }
  3352. }
  3353. }
  3354. stream_.device[mode] = device;
  3355. stream_.state = STREAM_STOPPED;
  3356. if ( stream_.mode == OUTPUT && mode == INPUT )
  3357. // We had already set up an output stream.
  3358. stream_.mode = DUPLEX;
  3359. else
  3360. stream_.mode = mode;
  3361. stream_.nBuffers = periods;
  3362. stream_.sampleRate = sampleRate;
  3363. return SUCCESS;
  3364. error:
  3365. if (handles) {
  3366. if (handles[0])
  3367. snd_pcm_close(handles[0]);
  3368. if (handles[1])
  3369. snd_pcm_close(handles[1]);
  3370. free(handles);
  3371. stream_.apiHandle = 0;
  3372. }
  3373. if (stream_.userBuffer) {
  3374. free(stream_.userBuffer);
  3375. stream_.userBuffer = 0;
  3376. }
  3377. error(RtError::WARNING);
  3378. return FAILURE;
  3379. }
  3380. void RtApiAlsa :: closeStream()
  3381. {
  3382. // We don't want an exception to be thrown here because this
  3383. // function is called by our class destructor. So, do our own
  3384. // stream check.
  3385. if ( stream_.mode == UNINITIALIZED ) {
  3386. sprintf(message_, "RtApiAlsa::closeStream(): no open stream to close!");
  3387. error(RtError::WARNING);
  3388. return;
  3389. }
  3390. snd_pcm_t **handle = (snd_pcm_t **) stream_.apiHandle;
  3391. if (stream_.state == STREAM_RUNNING) {
  3392. if (stream_.mode == OUTPUT || stream_.mode == DUPLEX)
  3393. snd_pcm_drop(handle[0]);
  3394. if (stream_.mode == INPUT || stream_.mode == DUPLEX)
  3395. snd_pcm_drop(handle[1]);
  3396. stream_.state = STREAM_STOPPED;
  3397. }
  3398. if (stream_.callbackInfo.usingCallback) {
  3399. stream_.callbackInfo.usingCallback = false;
  3400. pthread_join(stream_.callbackInfo.thread, NULL);
  3401. }
  3402. if (handle) {
  3403. if (handle[0]) snd_pcm_close(handle[0]);
  3404. if (handle[1]) snd_pcm_close(handle[1]);
  3405. free(handle);
  3406. handle = 0;
  3407. }
  3408. if (stream_.userBuffer) {
  3409. free(stream_.userBuffer);
  3410. stream_.userBuffer = 0;
  3411. }
  3412. if (stream_.deviceBuffer) {
  3413. free(stream_.deviceBuffer);
  3414. stream_.deviceBuffer = 0;
  3415. }
  3416. stream_.mode = UNINITIALIZED;
  3417. }
  3418. void RtApiAlsa :: startStream()
  3419. {
  3420. // This method calls snd_pcm_prepare if the device isn't already in that state.
  3421. verifyStream();
  3422. if (stream_.state == STREAM_RUNNING) return;
  3423. MUTEX_LOCK(&stream_.mutex);
  3424. int err;
  3425. snd_pcm_state_t state;
  3426. snd_pcm_t **handle = (snd_pcm_t **) stream_.apiHandle;
  3427. if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
  3428. state = snd_pcm_state(handle[0]);
  3429. if (state != SND_PCM_STATE_PREPARED) {
  3430. err = snd_pcm_prepare(handle[0]);
  3431. if (err < 0) {
  3432. sprintf(message_, "RtApiAlsa: error preparing pcm device (%s): %s.",
  3433. devices_[stream_.device[0]].name.c_str(), snd_strerror(err));
  3434. MUTEX_UNLOCK(&stream_.mutex);
  3435. error(RtError::DRIVER_ERROR);
  3436. }
  3437. }
  3438. }
  3439. if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
  3440. state = snd_pcm_state(handle[1]);
  3441. if (state != SND_PCM_STATE_PREPARED) {
  3442. err = snd_pcm_prepare(handle[1]);
  3443. if (err < 0) {
  3444. sprintf(message_, "RtApiAlsa: error preparing pcm device (%s): %s.",
  3445. devices_[stream_.device[1]].name.c_str(), snd_strerror(err));
  3446. MUTEX_UNLOCK(&stream_.mutex);
  3447. error(RtError::DRIVER_ERROR);
  3448. }
  3449. }
  3450. }
  3451. stream_.state = STREAM_RUNNING;
  3452. MUTEX_UNLOCK(&stream_.mutex);
  3453. }
  3454. void RtApiAlsa :: stopStream()
  3455. {
  3456. verifyStream();
  3457. if (stream_.state == STREAM_STOPPED) return;
  3458. // Change the state before the lock to improve shutdown response
  3459. // when using a callback.
  3460. stream_.state = STREAM_STOPPED;
  3461. MUTEX_LOCK(&stream_.mutex);
  3462. int err;
  3463. snd_pcm_t **handle = (snd_pcm_t **) stream_.apiHandle;
  3464. if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
  3465. err = snd_pcm_drain(handle[0]);
  3466. if (err < 0) {
  3467. sprintf(message_, "RtApiAlsa: error draining pcm device (%s): %s.",
  3468. devices_[stream_.device[0]].name.c_str(), snd_strerror(err));
  3469. MUTEX_UNLOCK(&stream_.mutex);
  3470. error(RtError::DRIVER_ERROR);
  3471. }
  3472. }
  3473. if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
  3474. err = snd_pcm_drain(handle[1]);
  3475. if (err < 0) {
  3476. sprintf(message_, "RtApiAlsa: error draining pcm device (%s): %s.",
  3477. devices_[stream_.device[1]].name.c_str(), snd_strerror(err));
  3478. MUTEX_UNLOCK(&stream_.mutex);
  3479. error(RtError::DRIVER_ERROR);
  3480. }
  3481. }
  3482. MUTEX_UNLOCK(&stream_.mutex);
  3483. }
  3484. void RtApiAlsa :: abortStream()
  3485. {
  3486. verifyStream();
  3487. if (stream_.state == STREAM_STOPPED) return;
  3488. // Change the state before the lock to improve shutdown response
  3489. // when using a callback.
  3490. stream_.state = STREAM_STOPPED;
  3491. MUTEX_LOCK(&stream_.mutex);
  3492. int err;
  3493. snd_pcm_t **handle = (snd_pcm_t **) stream_.apiHandle;
  3494. if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
  3495. err = snd_pcm_drop(handle[0]);
  3496. if (err < 0) {
  3497. sprintf(message_, "RtApiAlsa: error draining pcm device (%s): %s.",
  3498. devices_[stream_.device[0]].name.c_str(), snd_strerror(err));
  3499. MUTEX_UNLOCK(&stream_.mutex);
  3500. error(RtError::DRIVER_ERROR);
  3501. }
  3502. }
  3503. if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
  3504. err = snd_pcm_drop(handle[1]);
  3505. if (err < 0) {
  3506. sprintf(message_, "RtApiAlsa: error draining pcm device (%s): %s.",
  3507. devices_[stream_.device[1]].name.c_str(), snd_strerror(err));
  3508. MUTEX_UNLOCK(&stream_.mutex);
  3509. error(RtError::DRIVER_ERROR);
  3510. }
  3511. }
  3512. MUTEX_UNLOCK(&stream_.mutex);
  3513. }
  3514. int RtApiAlsa :: streamWillBlock()
  3515. {
  3516. verifyStream();
  3517. if (stream_.state == STREAM_STOPPED) return 0;
  3518. MUTEX_LOCK(&stream_.mutex);
  3519. int err = 0, frames = 0;
  3520. snd_pcm_t **handle = (snd_pcm_t **) stream_.apiHandle;
  3521. if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
  3522. err = snd_pcm_avail_update(handle[0]);
  3523. if (err < 0) {
  3524. sprintf(message_, "RtApiAlsa: error getting available frames for device (%s): %s.",
  3525. devices_[stream_.device[0]].name.c_str(), snd_strerror(err));
  3526. MUTEX_UNLOCK(&stream_.mutex);
  3527. error(RtError::DRIVER_ERROR);
  3528. }
  3529. }
  3530. frames = err;
  3531. if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
  3532. err = snd_pcm_avail_update(handle[1]);
  3533. if (err < 0) {
  3534. sprintf(message_, "RtApiAlsa: error getting available frames for device (%s): %s.",
  3535. devices_[stream_.device[1]].name.c_str(), snd_strerror(err));
  3536. MUTEX_UNLOCK(&stream_.mutex);
  3537. error(RtError::DRIVER_ERROR);
  3538. }
  3539. if (frames > err) frames = err;
  3540. }
  3541. frames = stream_.bufferSize - frames;
  3542. if (frames < 0) frames = 0;
  3543. MUTEX_UNLOCK(&stream_.mutex);
  3544. return frames;
  3545. }
  3546. void RtApiAlsa :: tickStream()
  3547. {
  3548. verifyStream();
  3549. int stopStream = 0;
  3550. if (stream_.state == STREAM_STOPPED) {
  3551. if (stream_.callbackInfo.usingCallback) usleep(50000); // sleep 50 milliseconds
  3552. return;
  3553. }
  3554. else if (stream_.callbackInfo.usingCallback) {
  3555. RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;
  3556. stopStream = callback(stream_.userBuffer, stream_.bufferSize, stream_.callbackInfo.userData);
  3557. }
  3558. MUTEX_LOCK(&stream_.mutex);
  3559. // The state might change while waiting on a mutex.
  3560. if (stream_.state == STREAM_STOPPED)
  3561. goto unlock;
  3562. int err;
  3563. char *buffer;
  3564. int channels;
  3565. snd_pcm_t **handle;
  3566. RtAudioFormat format;
  3567. handle = (snd_pcm_t **) stream_.apiHandle;
  3568. if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
  3569. // Setup parameters and do buffer conversion if necessary.
  3570. if (stream_.doConvertBuffer[0]) {
  3571. convertStreamBuffer(OUTPUT);
  3572. buffer = stream_.deviceBuffer;
  3573. channels = stream_.nDeviceChannels[0];
  3574. format = stream_.deviceFormat[0];
  3575. }
  3576. else {
  3577. buffer = stream_.userBuffer;
  3578. channels = stream_.nUserChannels[0];
  3579. format = stream_.userFormat;
  3580. }
  3581. // Do byte swapping if necessary.
  3582. if (stream_.doByteSwap[0])
  3583. byteSwapBuffer(buffer, stream_.bufferSize * channels, format);
  3584. // Write samples to device in interleaved/non-interleaved format.
  3585. if (stream_.deInterleave[0]) {
  3586. void *bufs[channels];
  3587. size_t offset = stream_.bufferSize * formatBytes(format);
  3588. for (int i=0; i<channels; i++)
  3589. bufs[i] = (void *) (buffer + (i * offset));
  3590. err = snd_pcm_writen(handle[0], bufs, stream_.bufferSize);
  3591. }
  3592. else
  3593. err = snd_pcm_writei(handle[0], buffer, stream_.bufferSize);
  3594. if (err < stream_.bufferSize) {
  3595. // Either an error or underrun occured.
  3596. if (err == -EPIPE) {
  3597. snd_pcm_state_t state = snd_pcm_state(handle[0]);
  3598. if (state == SND_PCM_STATE_XRUN) {
  3599. sprintf(message_, "RtApiAlsa: underrun detected.");
  3600. error(RtError::WARNING);
  3601. err = snd_pcm_prepare(handle[0]);
  3602. if (err < 0) {
  3603. sprintf(message_, "RtApiAlsa: error preparing handle after underrun: %s.",
  3604. snd_strerror(err));
  3605. MUTEX_UNLOCK(&stream_.mutex);
  3606. error(RtError::DRIVER_ERROR);
  3607. }
  3608. }
  3609. else {
  3610. sprintf(message_, "RtApiAlsa: tickStream() error, current state is %s.",
  3611. snd_pcm_state_name(state));
  3612. MUTEX_UNLOCK(&stream_.mutex);
  3613. error(RtError::DRIVER_ERROR);
  3614. }
  3615. goto unlock;
  3616. }
  3617. else {
  3618. sprintf(message_, "RtApiAlsa: audio write error for device (%s): %s.",
  3619. devices_[stream_.device[0]].name.c_str(), snd_strerror(err));
  3620. MUTEX_UNLOCK(&stream_.mutex);
  3621. error(RtError::DRIVER_ERROR);
  3622. }
  3623. }
  3624. }
  3625. if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
  3626. // Setup parameters.
  3627. if (stream_.doConvertBuffer[1]) {
  3628. buffer = stream_.deviceBuffer;
  3629. channels = stream_.nDeviceChannels[1];
  3630. format = stream_.deviceFormat[1];
  3631. }
  3632. else {
  3633. buffer = stream_.userBuffer;
  3634. channels = stream_.nUserChannels[1];
  3635. format = stream_.userFormat;
  3636. }
  3637. // Read samples from device in interleaved/non-interleaved format.
  3638. if (stream_.deInterleave[1]) {
  3639. void *bufs[channels];
  3640. size_t offset = stream_.bufferSize * formatBytes(format);
  3641. for (int i=0; i<channels; i++)
  3642. bufs[i] = (void *) (buffer + (i * offset));
  3643. err = snd_pcm_readn(handle[1], bufs, stream_.bufferSize);
  3644. }
  3645. else
  3646. err = snd_pcm_readi(handle[1], buffer, stream_.bufferSize);
  3647. if (err < stream_.bufferSize) {
  3648. // Either an error or underrun occured.
  3649. if (err == -EPIPE) {
  3650. snd_pcm_state_t state = snd_pcm_state(handle[1]);
  3651. if (state == SND_PCM_STATE_XRUN) {
  3652. sprintf(message_, "RtApiAlsa: overrun detected.");
  3653. error(RtError::WARNING);
  3654. err = snd_pcm_prepare(handle[1]);
  3655. if (err < 0) {
  3656. sprintf(message_, "RtApiAlsa: error preparing handle after overrun: %s.",
  3657. snd_strerror(err));
  3658. MUTEX_UNLOCK(&stream_.mutex);
  3659. error(RtError::DRIVER_ERROR);
  3660. }
  3661. }
  3662. else {
  3663. sprintf(message_, "RtApiAlsa: tickStream() error, current state is %s.",
  3664. snd_pcm_state_name(state));
  3665. MUTEX_UNLOCK(&stream_.mutex);
  3666. error(RtError::DRIVER_ERROR);
  3667. }
  3668. goto unlock;
  3669. }
  3670. else {
  3671. sprintf(message_, "RtApiAlsa: audio read error for device (%s): %s.",
  3672. devices_[stream_.device[1]].name.c_str(), snd_strerror(err));
  3673. MUTEX_UNLOCK(&stream_.mutex);
  3674. error(RtError::DRIVER_ERROR);
  3675. }
  3676. }
  3677. // Do byte swapping if necessary.
  3678. if (stream_.doByteSwap[1])
  3679. byteSwapBuffer(buffer, stream_.bufferSize * channels, format);
  3680. // Do buffer conversion if necessary.
  3681. if (stream_.doConvertBuffer[1])
  3682. convertStreamBuffer(INPUT);
  3683. }
  3684. unlock:
  3685. MUTEX_UNLOCK(&stream_.mutex);
  3686. if (stream_.callbackInfo.usingCallback && stopStream)
  3687. this->stopStream();
  3688. }
  3689. void RtApiAlsa :: setStreamCallback(RtAudioCallback callback, void *userData)
  3690. {
  3691. verifyStream();
  3692. CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
  3693. if ( info->usingCallback ) {
  3694. sprintf(message_, "RtApiAlsa: A callback is already set for this stream!");
  3695. error(RtError::WARNING);
  3696. return;
  3697. }
  3698. info->callback = (void *) callback;
  3699. info->userData = userData;
  3700. info->usingCallback = true;
  3701. info->object = (void *) this;
  3702. // Set the thread attributes for joinable and realtime scheduling
  3703. // priority. The higher priority will only take affect if the
  3704. // program is run as root or suid.
  3705. pthread_attr_t attr;
  3706. pthread_attr_init(&attr);
  3707. pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
  3708. pthread_attr_setschedpolicy(&attr, SCHED_RR);
  3709. int err = pthread_create(&info->thread, &attr, alsaCallbackHandler, &stream_.callbackInfo);
  3710. pthread_attr_destroy(&attr);
  3711. if (err) {
  3712. info->usingCallback = false;
  3713. sprintf(message_, "RtApiAlsa: error starting callback thread!");
  3714. error(RtError::THREAD_ERROR);
  3715. }
  3716. }
  3717. void RtApiAlsa :: cancelStreamCallback()
  3718. {
  3719. verifyStream();
  3720. if (stream_.callbackInfo.usingCallback) {
  3721. if (stream_.state == STREAM_RUNNING)
  3722. stopStream();
  3723. MUTEX_LOCK(&stream_.mutex);
  3724. stream_.callbackInfo.usingCallback = false;
  3725. pthread_join(stream_.callbackInfo.thread, NULL);
  3726. stream_.callbackInfo.thread = 0;
  3727. stream_.callbackInfo.callback = NULL;
  3728. stream_.callbackInfo.userData = NULL;
  3729. MUTEX_UNLOCK(&stream_.mutex);
  3730. }
  3731. }
  3732. extern "C" void *alsaCallbackHandler(void *ptr)
  3733. {
  3734. CallbackInfo *info = (CallbackInfo *) ptr;
  3735. RtApiAlsa *object = (RtApiAlsa *) info->object;
  3736. bool *usingCallback = &info->usingCallback;
  3737. while ( *usingCallback ) {
  3738. try {
  3739. object->tickStream();
  3740. }
  3741. catch (RtError &exception) {
  3742. fprintf(stderr, "\nRtApiAlsa: callback thread error (%s) ... closing thread.\n\n",
  3743. exception.getMessageString());
  3744. break;
  3745. }
  3746. }
  3747. pthread_exit(NULL);
  3748. }
  3749. //******************** End of __LINUX_ALSA__ *********************//
  3750. #endif
  3751. #if defined(__WINDOWS_ASIO__) // ASIO API on Windows
  3752. // The ASIO API is designed around a callback scheme, so this
  3753. // implementation is similar to that used for OS-X CoreAudio and Linux
  3754. // Jack. The primary constraint with ASIO is that it only allows
  3755. // access to a single driver at a time. Thus, it is not possible to
  3756. // have more than one simultaneous RtAudio stream.
  3757. //
  3758. // This implementation also requires a number of external ASIO files
  3759. // and a few global variables. The ASIO callback scheme does not
  3760. // allow for the passing of user data, so we must create a global
  3761. // pointer to our callbackInfo structure.
  3762. //
  3763. // On unix systems, we make use of a pthread condition variable.
  3764. // Since there is no equivalent in Windows, I hacked something based
  3765. // on information found in
  3766. // http://www.cs.wustl.edu/~schmidt/win32-cv-1.html.
  3767. #include "asio/asiosys.h"
  3768. #include "asio/asio.h"
  3769. #include "asio/asiodrivers.h"
  3770. #include <math.h>
  3771. AsioDrivers drivers;
  3772. ASIOCallbacks asioCallbacks;
  3773. ASIODriverInfo driverInfo;
  3774. CallbackInfo *asioCallbackInfo;
  3775. struct AsioHandle {
  3776. bool stopStream;
  3777. ASIOBufferInfo *bufferInfos;
  3778. HANDLE condition;
  3779. AsioHandle()
  3780. :stopStream(false), bufferInfos(0) {}
  3781. };
  3782. RtApiAsio :: RtApiAsio()
  3783. {
  3784. this->initialize();
  3785. if (nDevices_ <= 0) {
  3786. sprintf(message_, "RtApiAsio: no Windows ASIO audio drivers found!");
  3787. error(RtError::NO_DEVICES_FOUND);
  3788. }
  3789. }
  3790. RtApiAsio :: ~RtApiAsio()
  3791. {
  3792. if ( stream_.mode != UNINITIALIZED ) closeStream();
  3793. }
  3794. void RtApiAsio :: initialize(void)
  3795. {
  3796. nDevices_ = drivers.asioGetNumDev();
  3797. if (nDevices_ <= 0) return;
  3798. // Create device structures and write device driver names to each.
  3799. RtApiDevice device;
  3800. char name[128];
  3801. for (int i=0; i<nDevices_; i++) {
  3802. if ( drivers.asioGetDriverName( i, name, 128 ) == 0 ) {
  3803. device.name.erase();
  3804. device.name.append( (const char *)name, strlen(name)+1);
  3805. devices_.push_back(device);
  3806. }
  3807. else {
  3808. sprintf(message_, "RtApiAsio: error getting driver name for device index %d!", i);
  3809. error(RtError::WARNING);
  3810. }
  3811. }
  3812. nDevices_ = (int) devices_.size();
  3813. drivers.removeCurrentDriver();
  3814. driverInfo.asioVersion = 2;
  3815. // See note in DirectSound implementation about GetDesktopWindow().
  3816. driverInfo.sysRef = GetForegroundWindow();
  3817. }
  3818. void RtApiAsio :: probeDeviceInfo(RtApiDevice *info)
  3819. {
  3820. // Don't probe if a stream is already open.
  3821. if ( stream_.mode != UNINITIALIZED ) {
  3822. sprintf(message_, "RtApiAsio: unable to probe driver while a stream is open.");
  3823. error(RtError::DEBUG_WARNING);
  3824. return;
  3825. }
  3826. if ( !drivers.loadDriver( (char *)info->name.c_str() ) ) {
  3827. sprintf(message_, "RtApiAsio: error loading driver (%s).", info->name.c_str());
  3828. error(RtError::DEBUG_WARNING);
  3829. return;
  3830. }
  3831. ASIOError result = ASIOInit( &driverInfo );
  3832. if ( result != ASE_OK ) {
  3833. char details[32];
  3834. if ( result == ASE_HWMalfunction )
  3835. sprintf(details, "hardware malfunction");
  3836. else if ( result == ASE_NoMemory )
  3837. sprintf(details, "no memory");
  3838. else if ( result == ASE_NotPresent )
  3839. sprintf(details, "driver/hardware not present");
  3840. else
  3841. sprintf(details, "unspecified");
  3842. sprintf(message_, "RtApiAsio: error (%s) initializing driver (%s).", details, info->name.c_str());
  3843. error(RtError::DEBUG_WARNING);
  3844. return;
  3845. }
  3846. // Determine the device channel information.
  3847. long inputChannels, outputChannels;
  3848. result = ASIOGetChannels( &inputChannels, &outputChannels );
  3849. if ( result != ASE_OK ) {
  3850. drivers.removeCurrentDriver();
  3851. sprintf(message_, "RtApiAsio: error getting input/output channel count (%s).", info->name.c_str());
  3852. error(RtError::DEBUG_WARNING);
  3853. return;
  3854. }
  3855. info->maxOutputChannels = outputChannels;
  3856. if ( outputChannels > 0 ) info->minOutputChannels = 1;
  3857. info->maxInputChannels = inputChannels;
  3858. if ( inputChannels > 0 ) info->minInputChannels = 1;
  3859. // If device opens for both playback and capture, we determine the channels.
  3860. if (info->maxOutputChannels > 0 && info->maxInputChannels > 0) {
  3861. info->hasDuplexSupport = true;
  3862. info->maxDuplexChannels = (info->maxOutputChannels > info->maxInputChannels) ?
  3863. info->maxInputChannels : info->maxOutputChannels;
  3864. info->minDuplexChannels = (info->minOutputChannels > info->minInputChannels) ?
  3865. info->minInputChannels : info->minOutputChannels;
  3866. }
  3867. // Determine the supported sample rates.
  3868. info->sampleRates.clear();
  3869. for (unsigned int i=0; i<MAX_SAMPLE_RATES; i++) {
  3870. result = ASIOCanSampleRate( (ASIOSampleRate) SAMPLE_RATES[i] );
  3871. if ( result == ASE_OK )
  3872. info->sampleRates.push_back( SAMPLE_RATES[i] );
  3873. }
  3874. if (info->sampleRates.size() == 0) {
  3875. drivers.removeCurrentDriver();
  3876. sprintf( message_, "RtApiAsio: No supported sample rates found for driver (%s).", info->name.c_str() );
  3877. error(RtError::DEBUG_WARNING);
  3878. return;
  3879. }
  3880. // Determine supported data types ... just check first channel and assume rest are the same.
  3881. ASIOChannelInfo channelInfo;
  3882. channelInfo.channel = 0;
  3883. channelInfo.isInput = true;
  3884. if ( info->maxInputChannels <= 0 ) channelInfo.isInput = false;
  3885. result = ASIOGetChannelInfo( &channelInfo );
  3886. if ( result != ASE_OK ) {
  3887. drivers.removeCurrentDriver();
  3888. sprintf(message_, "RtApiAsio: error getting driver (%s) channel information.", info->name.c_str());
  3889. error(RtError::DEBUG_WARNING);
  3890. return;
  3891. }
  3892. if ( channelInfo.type == ASIOSTInt16MSB || channelInfo.type == ASIOSTInt16LSB )
  3893. info->nativeFormats |= RTAUDIO_SINT16;
  3894. else if ( channelInfo.type == ASIOSTInt32MSB || channelInfo.type == ASIOSTInt32LSB )
  3895. info->nativeFormats |= RTAUDIO_SINT32;
  3896. else if ( channelInfo.type == ASIOSTFloat32MSB || channelInfo.type == ASIOSTFloat32LSB )
  3897. info->nativeFormats |= RTAUDIO_FLOAT32;
  3898. else if ( channelInfo.type == ASIOSTFloat64MSB || channelInfo.type == ASIOSTFloat64LSB )
  3899. info->nativeFormats |= RTAUDIO_FLOAT64;
  3900. // Check that we have at least one supported format.
  3901. if (info->nativeFormats == 0) {
  3902. drivers.removeCurrentDriver();
  3903. sprintf(message_, "RtApiAsio: driver (%s) data format not supported by RtAudio.",
  3904. info->name.c_str());
  3905. error(RtError::DEBUG_WARNING);
  3906. return;
  3907. }
  3908. info->probed = true;
  3909. drivers.removeCurrentDriver();
  3910. }
  3911. void bufferSwitch(long index, ASIOBool processNow)
  3912. {
  3913. RtApiAsio *object = (RtApiAsio *) asioCallbackInfo->object;
  3914. try {
  3915. object->callbackEvent( index );
  3916. }
  3917. catch (RtError &exception) {
  3918. fprintf(stderr, "\nRtApiAsio: callback handler error (%s)!\n\n", exception.getMessageString());
  3919. return;
  3920. }
  3921. return;
  3922. }
  3923. void sampleRateChanged(ASIOSampleRate sRate)
  3924. {
  3925. // The ASIO documentation says that this usually only happens during
  3926. // external sync. Audio processing is not stopped by the driver,
  3927. // actual sample rate might not have even changed, maybe only the
  3928. // sample rate status of an AES/EBU or S/PDIF digital input at the
  3929. // audio device.
  3930. RtAudio *object = (RtAudio *) asioCallbackInfo->object;
  3931. try {
  3932. object->stopStream();
  3933. }
  3934. catch (RtError &exception) {
  3935. fprintf(stderr, "\nRtApiAsio: sampleRateChanged() error (%s)!\n\n", exception.getMessageString());
  3936. return;
  3937. }
  3938. fprintf(stderr, "\nRtApiAsio: driver reports sample rate changed to %d ... stream stopped!!!", (int) sRate);
  3939. }
  3940. long asioMessages(long selector, long value, void* message, double* opt)
  3941. {
  3942. long ret = 0;
  3943. switch(selector) {
  3944. case kAsioSelectorSupported:
  3945. if(value == kAsioResetRequest
  3946. || value == kAsioEngineVersion
  3947. || value == kAsioResyncRequest
  3948. || value == kAsioLatenciesChanged
  3949. // The following three were added for ASIO 2.0, you don't
  3950. // necessarily have to support them.
  3951. || value == kAsioSupportsTimeInfo
  3952. || value == kAsioSupportsTimeCode
  3953. || value == kAsioSupportsInputMonitor)
  3954. ret = 1L;
  3955. break;
  3956. case kAsioResetRequest:
  3957. // Defer the task and perform the reset of the driver during the
  3958. // next "safe" situation. You cannot reset the driver right now,
  3959. // as this code is called from the driver. Reset the driver is
  3960. // done by completely destruct is. I.e. ASIOStop(),
  3961. // ASIODisposeBuffers(), Destruction Afterwards you initialize the
  3962. // driver again.
  3963. fprintf(stderr, "\nRtApiAsio: driver reset requested!!!");
  3964. ret = 1L;
  3965. break;
  3966. case kAsioResyncRequest:
  3967. // This informs the application that the driver encountered some
  3968. // non-fatal data loss. It is used for synchronization purposes
  3969. // of different media. Added mainly to work around the Win16Mutex
  3970. // problems in Windows 95/98 with the Windows Multimedia system,
  3971. // which could lose data because the Mutex was held too long by
  3972. // another thread. However a driver can issue it in other
  3973. // situations, too.
  3974. fprintf(stderr, "\nRtApiAsio: driver resync requested!!!");
  3975. ret = 1L;
  3976. break;
  3977. case kAsioLatenciesChanged:
  3978. // This will inform the host application that the drivers were
  3979. // latencies changed. Beware, it this does not mean that the
  3980. // buffer sizes have changed! You might need to update internal
  3981. // delay data.
  3982. fprintf(stderr, "\nRtApiAsio: driver latency may have changed!!!");
  3983. ret = 1L;
  3984. break;
  3985. case kAsioEngineVersion:
  3986. // Return the supported ASIO version of the host application. If
  3987. // a host application does not implement this selector, ASIO 1.0
  3988. // is assumed by the driver.
  3989. ret = 2L;
  3990. break;
  3991. case kAsioSupportsTimeInfo:
  3992. // Informs the driver whether the
  3993. // asioCallbacks.bufferSwitchTimeInfo() callback is supported.
  3994. // For compatibility with ASIO 1.0 drivers the host application
  3995. // should always support the "old" bufferSwitch method, too.
  3996. ret = 0;
  3997. break;
  3998. case kAsioSupportsTimeCode:
  3999. // Informs the driver wether application is interested in time
  4000. // code info. If an application does not need to know about time
  4001. // code, the driver has less work to do.
  4002. ret = 0;
  4003. break;
  4004. }
  4005. return ret;
  4006. }
  4007. bool RtApiAsio :: probeDeviceOpen(int device, StreamMode mode, int channels,
  4008. int sampleRate, RtAudioFormat format,
  4009. int *bufferSize, int numberOfBuffers)
  4010. {
  4011. // For ASIO, a duplex stream MUST use the same driver.
  4012. if ( mode == INPUT && stream_.mode == OUTPUT && stream_.device[0] != device ) {
  4013. sprintf(message_, "RtApiAsio: duplex stream must use the same device for input and output.");
  4014. error(RtError::WARNING);
  4015. return FAILURE;
  4016. }
  4017. // Only load the driver once for duplex stream.
  4018. ASIOError result;
  4019. if ( mode != INPUT || stream_.mode != OUTPUT ) {
  4020. if ( !drivers.loadDriver( (char *)devices_[device].name.c_str() ) ) {
  4021. sprintf(message_, "RtApiAsio: error loading driver (%s).", devices_[device].name.c_str());
  4022. error(RtError::DEBUG_WARNING);
  4023. return FAILURE;
  4024. }
  4025. result = ASIOInit( &driverInfo );
  4026. if ( result != ASE_OK ) {
  4027. char details[32];
  4028. if ( result == ASE_HWMalfunction )
  4029. sprintf(details, "hardware malfunction");
  4030. else if ( result == ASE_NoMemory )
  4031. sprintf(details, "no memory");
  4032. else if ( result == ASE_NotPresent )
  4033. sprintf(details, "driver/hardware not present");
  4034. else
  4035. sprintf(details, "unspecified");
  4036. sprintf(message_, "RtApiAsio: error (%s) initializing driver (%s).", details, devices_[device].name.c_str());
  4037. error(RtError::DEBUG_WARNING);
  4038. return FAILURE;
  4039. }
  4040. }
  4041. // Check the device channel count.
  4042. long inputChannels, outputChannels;
  4043. result = ASIOGetChannels( &inputChannels, &outputChannels );
  4044. if ( result != ASE_OK ) {
  4045. drivers.removeCurrentDriver();
  4046. sprintf(message_, "RtApiAsio: error getting input/output channel count (%s).",
  4047. devices_[device].name.c_str());
  4048. error(RtError::DEBUG_WARNING);
  4049. return FAILURE;
  4050. }
  4051. if ( ( mode == OUTPUT && channels > outputChannels) ||
  4052. ( mode == INPUT && channels > inputChannels) ) {
  4053. drivers.removeCurrentDriver();
  4054. sprintf(message_, "RtApiAsio: driver (%s) does not support requested channel count (%d).",
  4055. devices_[device].name.c_str(), channels);
  4056. error(RtError::DEBUG_WARNING);
  4057. return FAILURE;
  4058. }
  4059. stream_.nDeviceChannels[mode] = channels;
  4060. stream_.nUserChannels[mode] = channels;
  4061. // Verify the sample rate is supported.
  4062. result = ASIOCanSampleRate( (ASIOSampleRate) sampleRate );
  4063. if ( result != ASE_OK ) {
  4064. drivers.removeCurrentDriver();
  4065. sprintf(message_, "RtApiAsio: driver (%s) does not support requested sample rate (%d).",
  4066. devices_[device].name.c_str(), sampleRate);
  4067. error(RtError::DEBUG_WARNING);
  4068. return FAILURE;
  4069. }
  4070. // Set the sample rate.
  4071. result = ASIOSetSampleRate( (ASIOSampleRate) sampleRate );
  4072. if ( result != ASE_OK ) {
  4073. drivers.removeCurrentDriver();
  4074. sprintf(message_, "RtApiAsio: driver (%s) error setting sample rate (%d).",
  4075. devices_[device].name.c_str(), sampleRate);
  4076. error(RtError::DEBUG_WARNING);
  4077. return FAILURE;
  4078. }
  4079. // Determine the driver data type.
  4080. ASIOChannelInfo channelInfo;
  4081. channelInfo.channel = 0;
  4082. if ( mode == OUTPUT ) channelInfo.isInput = false;
  4083. else channelInfo.isInput = true;
  4084. result = ASIOGetChannelInfo( &channelInfo );
  4085. if ( result != ASE_OK ) {
  4086. drivers.removeCurrentDriver();
  4087. sprintf(message_, "RtApiAsio: driver (%s) error getting data format.",
  4088. devices_[device].name.c_str());
  4089. error(RtError::DEBUG_WARNING);
  4090. return FAILURE;
  4091. }
  4092. // Assuming WINDOWS host is always little-endian.
  4093. stream_.doByteSwap[mode] = false;
  4094. stream_.userFormat = format;
  4095. stream_.deviceFormat[mode] = 0;
  4096. if ( channelInfo.type == ASIOSTInt16MSB || channelInfo.type == ASIOSTInt16LSB ) {
  4097. stream_.deviceFormat[mode] = RTAUDIO_SINT16;
  4098. if ( channelInfo.type == ASIOSTInt16MSB ) stream_.doByteSwap[mode] = true;
  4099. }
  4100. else if ( channelInfo.type == ASIOSTInt32MSB || channelInfo.type == ASIOSTInt32LSB ) {
  4101. stream_.deviceFormat[mode] = RTAUDIO_SINT32;
  4102. if ( channelInfo.type == ASIOSTInt32MSB ) stream_.doByteSwap[mode] = true;
  4103. }
  4104. else if ( channelInfo.type == ASIOSTFloat32MSB || channelInfo.type == ASIOSTFloat32LSB ) {
  4105. stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
  4106. if ( channelInfo.type == ASIOSTFloat32MSB ) stream_.doByteSwap[mode] = true;
  4107. }
  4108. else if ( channelInfo.type == ASIOSTFloat64MSB || channelInfo.type == ASIOSTFloat64LSB ) {
  4109. stream_.deviceFormat[mode] = RTAUDIO_FLOAT64;
  4110. if ( channelInfo.type == ASIOSTFloat64MSB ) stream_.doByteSwap[mode] = true;
  4111. }
  4112. if ( stream_.deviceFormat[mode] == 0 ) {
  4113. drivers.removeCurrentDriver();
  4114. sprintf(message_, "RtApiAsio: driver (%s) data format not supported by RtAudio.",
  4115. devices_[device].name.c_str());
  4116. error(RtError::DEBUG_WARNING);
  4117. return FAILURE;
  4118. }
  4119. // Set the buffer size. For a duplex stream, this will end up
  4120. // setting the buffer size based on the input constraints, which
  4121. // should be ok.
  4122. long minSize, maxSize, preferSize, granularity;
  4123. result = ASIOGetBufferSize( &minSize, &maxSize, &preferSize, &granularity );
  4124. if ( result != ASE_OK ) {
  4125. drivers.removeCurrentDriver();
  4126. sprintf(message_, "RtApiAsio: driver (%s) error getting buffer size.",
  4127. devices_[device].name.c_str());
  4128. error(RtError::DEBUG_WARNING);
  4129. return FAILURE;
  4130. }
  4131. if ( *bufferSize < minSize ) *bufferSize = minSize;
  4132. else if ( *bufferSize > maxSize ) *bufferSize = maxSize;
  4133. else if ( granularity == -1 ) {
  4134. // Make sure bufferSize is a power of two.
  4135. double power = log10( (double) *bufferSize ) / log10( 2.0 );
  4136. *bufferSize = (int) pow( 2.0, floor(power+0.5) );
  4137. if ( *bufferSize < minSize ) *bufferSize = minSize;
  4138. else if ( *bufferSize > maxSize ) *bufferSize = maxSize;
  4139. else *bufferSize = preferSize;
  4140. }
  4141. if ( mode == INPUT && stream_.mode == OUTPUT && stream_.bufferSize != *bufferSize )
  4142. std::cerr << "Possible input/output buffersize discrepancy!" << std::endl;
  4143. stream_.bufferSize = *bufferSize;
  4144. stream_.nBuffers = 2;
  4145. // ASIO always uses deinterleaved channels.
  4146. stream_.deInterleave[mode] = true;
  4147. // Allocate, if necessary, our AsioHandle structure for the stream.
  4148. AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
  4149. if ( handle == 0 ) {
  4150. handle = (AsioHandle *) calloc(1, sizeof(AsioHandle));
  4151. if ( handle == NULL ) {
  4152. drivers.removeCurrentDriver();
  4153. sprintf(message_, "RtApiAsio: error allocating AsioHandle memory (%s).",
  4154. devices_[device].name.c_str());
  4155. error(RtError::DEBUG_WARNING);
  4156. return FAILURE;
  4157. }
  4158. handle->bufferInfos = 0;
  4159. // Create a manual-reset event.
  4160. handle->condition = CreateEvent(NULL, // no security
  4161. TRUE, // manual-reset
  4162. FALSE, // non-signaled initially
  4163. NULL); // unnamed
  4164. stream_.apiHandle = (void *) handle;
  4165. }
  4166. // Create the ASIO internal buffers. Since RtAudio sets up input
  4167. // and output separately, we'll have to dispose of previously
  4168. // created output buffers for a duplex stream.
  4169. if ( mode == INPUT && stream_.mode == OUTPUT ) {
  4170. ASIODisposeBuffers();
  4171. if ( handle->bufferInfos ) free( handle->bufferInfos );
  4172. }
  4173. // Allocate, initialize, and save the bufferInfos in our stream callbackInfo structure.
  4174. int i, nChannels = stream_.nDeviceChannels[0] + stream_.nDeviceChannels[1];
  4175. handle->bufferInfos = (ASIOBufferInfo *) malloc( nChannels * sizeof(ASIOBufferInfo) );
  4176. if (handle->bufferInfos == NULL) {
  4177. sprintf(message_, "RtApiAsio: error allocating bufferInfo memory (%s).",
  4178. devices_[device].name.c_str());
  4179. goto error;
  4180. }
  4181. ASIOBufferInfo *infos;
  4182. infos = handle->bufferInfos;
  4183. for ( i=0; i<stream_.nDeviceChannels[0]; i++, infos++ ) {
  4184. infos->isInput = ASIOFalse;
  4185. infos->channelNum = i;
  4186. infos->buffers[0] = infos->buffers[1] = 0;
  4187. }
  4188. for ( i=0; i<stream_.nDeviceChannels[1]; i++, infos++ ) {
  4189. infos->isInput = ASIOTrue;
  4190. infos->channelNum = i;
  4191. infos->buffers[0] = infos->buffers[1] = 0;
  4192. }
  4193. // Set up the ASIO callback structure and create the ASIO data buffers.
  4194. asioCallbacks.bufferSwitch = &bufferSwitch;
  4195. asioCallbacks.sampleRateDidChange = &sampleRateChanged;
  4196. asioCallbacks.asioMessage = &asioMessages;
  4197. asioCallbacks.bufferSwitchTimeInfo = NULL;
  4198. result = ASIOCreateBuffers( handle->bufferInfos, nChannels, stream_.bufferSize, &asioCallbacks);
  4199. if ( result != ASE_OK ) {
  4200. sprintf(message_, "RtApiAsio: driver (%s) error creating buffers.",
  4201. devices_[device].name.c_str());
  4202. goto error;
  4203. }
  4204. // Set flags for buffer conversion.
  4205. stream_.doConvertBuffer[mode] = false;
  4206. if (stream_.userFormat != stream_.deviceFormat[mode])
  4207. stream_.doConvertBuffer[mode] = true;
  4208. if (stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode])
  4209. stream_.doConvertBuffer[mode] = true;
  4210. if (stream_.nUserChannels[mode] > 1 && stream_.deInterleave[mode])
  4211. stream_.doConvertBuffer[mode] = true;
  4212. // Allocate necessary internal buffers
  4213. if ( stream_.nUserChannels[0] != stream_.nUserChannels[1] ) {
  4214. long buffer_bytes;
  4215. if (stream_.nUserChannels[0] >= stream_.nUserChannels[1])
  4216. buffer_bytes = stream_.nUserChannels[0];
  4217. else
  4218. buffer_bytes = stream_.nUserChannels[1];
  4219. buffer_bytes *= *bufferSize * formatBytes(stream_.userFormat);
  4220. if (stream_.userBuffer) free(stream_.userBuffer);
  4221. stream_.userBuffer = (char *) calloc(buffer_bytes, 1);
  4222. if (stream_.userBuffer == NULL) {
  4223. sprintf(message_, "RtApiAsio: error allocating user buffer memory (%s).",
  4224. devices_[device].name.c_str());
  4225. goto error;
  4226. }
  4227. }
  4228. if ( stream_.doConvertBuffer[mode] ) {
  4229. long buffer_bytes;
  4230. bool makeBuffer = true;
  4231. if ( mode == OUTPUT )
  4232. buffer_bytes = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
  4233. else { // mode == INPUT
  4234. buffer_bytes = stream_.nDeviceChannels[1] * formatBytes(stream_.deviceFormat[1]);
  4235. if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
  4236. long bytes_out = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
  4237. if ( buffer_bytes < bytes_out ) makeBuffer = false;
  4238. }
  4239. }
  4240. if ( makeBuffer ) {
  4241. buffer_bytes *= *bufferSize;
  4242. if (stream_.deviceBuffer) free(stream_.deviceBuffer);
  4243. stream_.deviceBuffer = (char *) calloc(buffer_bytes, 1);
  4244. if (stream_.deviceBuffer == NULL) {
  4245. sprintf(message_, "RtApiAsio: error allocating device buffer memory (%s).",
  4246. devices_[device].name.c_str());
  4247. goto error;
  4248. }
  4249. }
  4250. }
  4251. stream_.device[mode] = device;
  4252. stream_.state = STREAM_STOPPED;
  4253. if ( stream_.mode == OUTPUT && mode == INPUT )
  4254. // We had already set up an output stream.
  4255. stream_.mode = DUPLEX;
  4256. else
  4257. stream_.mode = mode;
  4258. stream_.sampleRate = sampleRate;
  4259. asioCallbackInfo = &stream_.callbackInfo;
  4260. stream_.callbackInfo.object = (void *) this;
  4261. return SUCCESS;
  4262. error:
  4263. ASIODisposeBuffers();
  4264. drivers.removeCurrentDriver();
  4265. if ( handle ) {
  4266. CloseHandle( handle->condition );
  4267. if ( handle->bufferInfos )
  4268. free( handle->bufferInfos );
  4269. free( handle );
  4270. stream_.apiHandle = 0;
  4271. }
  4272. if (stream_.userBuffer) {
  4273. free(stream_.userBuffer);
  4274. stream_.userBuffer = 0;
  4275. }
  4276. error(RtError::WARNING);
  4277. return FAILURE;
  4278. }
  4279. void RtApiAsio :: closeStream()
  4280. {
  4281. // We don't want an exception to be thrown here because this
  4282. // function is called by our class destructor. So, do our own
  4283. // streamId check.
  4284. if ( stream_.mode == UNINITIALIZED ) {
  4285. sprintf(message_, "RtApiAsio::closeStream(): no open stream to close!");
  4286. error(RtError::WARNING);
  4287. return;
  4288. }
  4289. if (stream_.state == STREAM_RUNNING)
  4290. ASIOStop();
  4291. ASIODisposeBuffers();
  4292. drivers.removeCurrentDriver();
  4293. AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
  4294. if ( handle ) {
  4295. CloseHandle( handle->condition );
  4296. if ( handle->bufferInfos )
  4297. free( handle->bufferInfos );
  4298. free( handle );
  4299. stream_.apiHandle = 0;
  4300. }
  4301. if (stream_.userBuffer) {
  4302. free(stream_.userBuffer);
  4303. stream_.userBuffer = 0;
  4304. }
  4305. if (stream_.deviceBuffer) {
  4306. free(stream_.deviceBuffer);
  4307. stream_.deviceBuffer = 0;
  4308. }
  4309. stream_.mode = UNINITIALIZED;
  4310. }
  4311. void RtApiAsio :: setStreamCallback(RtAudioCallback callback, void *userData)
  4312. {
  4313. verifyStream();
  4314. if ( stream_.callbackInfo.usingCallback ) {
  4315. sprintf(message_, "RtApiAsio: A callback is already set for this stream!");
  4316. error(RtError::WARNING);
  4317. return;
  4318. }
  4319. stream_.callbackInfo.callback = (void *) callback;
  4320. stream_.callbackInfo.userData = userData;
  4321. stream_.callbackInfo.usingCallback = true;
  4322. }
  4323. void RtApiAsio :: cancelStreamCallback()
  4324. {
  4325. verifyStream();
  4326. if (stream_.callbackInfo.usingCallback) {
  4327. if (stream_.state == STREAM_RUNNING)
  4328. stopStream();
  4329. MUTEX_LOCK(&stream_.mutex);
  4330. stream_.callbackInfo.usingCallback = false;
  4331. stream_.callbackInfo.userData = NULL;
  4332. stream_.state = STREAM_STOPPED;
  4333. stream_.callbackInfo.callback = NULL;
  4334. MUTEX_UNLOCK(&stream_.mutex);
  4335. }
  4336. }
  4337. void RtApiAsio :: startStream()
  4338. {
  4339. verifyStream();
  4340. if (stream_.state == STREAM_RUNNING) return;
  4341. MUTEX_LOCK(&stream_.mutex);
  4342. ASIOError result = ASIOStart();
  4343. if ( result != ASE_OK ) {
  4344. sprintf(message_, "RtApiAsio: error starting device (%s).",
  4345. devices_[stream_.device[0]].name.c_str());
  4346. MUTEX_UNLOCK(&stream_.mutex);
  4347. error(RtError::DRIVER_ERROR);
  4348. }
  4349. AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
  4350. handle->stopStream = false;
  4351. stream_.state = STREAM_RUNNING;
  4352. MUTEX_UNLOCK(&stream_.mutex);
  4353. }
  4354. void RtApiAsio :: stopStream()
  4355. {
  4356. verifyStream();
  4357. if (stream_.state == STREAM_STOPPED) return;
  4358. // Change the state before the lock to improve shutdown response
  4359. // when using a callback.
  4360. stream_.state = STREAM_STOPPED;
  4361. MUTEX_LOCK(&stream_.mutex);
  4362. ASIOError result = ASIOStop();
  4363. if ( result != ASE_OK ) {
  4364. sprintf(message_, "RtApiAsio: error stopping device (%s).",
  4365. devices_[stream_.device[0]].name.c_str());
  4366. MUTEX_UNLOCK(&stream_.mutex);
  4367. error(RtError::DRIVER_ERROR);
  4368. }
  4369. MUTEX_UNLOCK(&stream_.mutex);
  4370. }
  4371. void RtApiAsio :: abortStream()
  4372. {
  4373. stopStream();
  4374. }
  4375. void RtApiAsio :: tickStream()
  4376. {
  4377. verifyStream();
  4378. if (stream_.state == STREAM_STOPPED)
  4379. return;
  4380. if (stream_.callbackInfo.usingCallback) {
  4381. sprintf(message_, "RtApiAsio: tickStream() should not be used when a callback function is set!");
  4382. error(RtError::WARNING);
  4383. return;
  4384. }
  4385. AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
  4386. MUTEX_LOCK(&stream_.mutex);
  4387. // Release the stream_mutex here and wait for the event
  4388. // to become signaled by the callback process.
  4389. MUTEX_UNLOCK(&stream_.mutex);
  4390. WaitForMultipleObjects(1, &handle->condition, FALSE, INFINITE);
  4391. ResetEvent( handle->condition );
  4392. }
  4393. void RtApiAsio :: callbackEvent(long bufferIndex)
  4394. {
  4395. verifyStream();
  4396. if (stream_.state == STREAM_STOPPED) return;
  4397. CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
  4398. AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
  4399. if ( info->usingCallback && handle->stopStream ) {
  4400. // Check if the stream should be stopped (via the previous user
  4401. // callback return value). We stop the stream here, rather than
  4402. // after the function call, so that output data can first be
  4403. // processed.
  4404. this->stopStream();
  4405. return;
  4406. }
  4407. MUTEX_LOCK(&stream_.mutex);
  4408. // Invoke user callback first, to get fresh output data.
  4409. if ( info->usingCallback ) {
  4410. RtAudioCallback callback = (RtAudioCallback) info->callback;
  4411. if ( callback(stream_.userBuffer, stream_.bufferSize, info->userData) )
  4412. handle->stopStream = true;
  4413. }
  4414. int bufferBytes, j;
  4415. int nChannels = stream_.nDeviceChannels[0] + stream_.nDeviceChannels[1];
  4416. if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
  4417. bufferBytes = stream_.bufferSize * formatBytes(stream_.deviceFormat[0]);
  4418. if (stream_.doConvertBuffer[0]) {
  4419. convertStreamBuffer(OUTPUT);
  4420. if ( stream_.doByteSwap[0] )
  4421. byteSwapBuffer(stream_.deviceBuffer,
  4422. stream_.bufferSize * stream_.nDeviceChannels[0],
  4423. stream_.deviceFormat[0]);
  4424. // Always de-interleave ASIO output data.
  4425. j = 0;
  4426. for ( int i=0; i<nChannels; i++ ) {
  4427. if ( handle->bufferInfos[i].isInput != ASIOTrue )
  4428. memcpy(handle->bufferInfos[i].buffers[bufferIndex],
  4429. &stream_.deviceBuffer[j++*bufferBytes], bufferBytes );
  4430. }
  4431. }
  4432. else { // single channel only
  4433. if (stream_.doByteSwap[0])
  4434. byteSwapBuffer(stream_.userBuffer,
  4435. stream_.bufferSize * stream_.nUserChannels[0],
  4436. stream_.userFormat);
  4437. for ( int i=0; i<nChannels; i++ ) {
  4438. if ( handle->bufferInfos[i].isInput != ASIOTrue ) {
  4439. memcpy(handle->bufferInfos[i].buffers[bufferIndex], stream_.userBuffer, bufferBytes );
  4440. break;
  4441. }
  4442. }
  4443. }
  4444. }
  4445. if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
  4446. bufferBytes = stream_.bufferSize * formatBytes(stream_.deviceFormat[1]);
  4447. if (stream_.doConvertBuffer[1]) {
  4448. // Always interleave ASIO input data.
  4449. j = 0;
  4450. for ( int i=0; i<nChannels; i++ ) {
  4451. if ( handle->bufferInfos[i].isInput == ASIOTrue )
  4452. memcpy(&stream_.deviceBuffer[j++*bufferBytes],
  4453. handle->bufferInfos[i].buffers[bufferIndex],
  4454. bufferBytes );
  4455. }
  4456. if ( stream_.doByteSwap[1] )
  4457. byteSwapBuffer(stream_.deviceBuffer,
  4458. stream_.bufferSize * stream_.nDeviceChannels[1],
  4459. stream_.deviceFormat[1]);
  4460. convertStreamBuffer(INPUT);
  4461. }
  4462. else { // single channel only
  4463. for ( int i=0; i<nChannels; i++ ) {
  4464. if ( handle->bufferInfos[i].isInput == ASIOTrue ) {
  4465. memcpy(stream_.userBuffer,
  4466. handle->bufferInfos[i].buffers[bufferIndex],
  4467. bufferBytes );
  4468. break;
  4469. }
  4470. }
  4471. if (stream_.doByteSwap[1])
  4472. byteSwapBuffer(stream_.userBuffer,
  4473. stream_.bufferSize * stream_.nUserChannels[1],
  4474. stream_.userFormat);
  4475. }
  4476. }
  4477. if ( !info->usingCallback )
  4478. SetEvent( handle->condition );
  4479. MUTEX_UNLOCK(&stream_.mutex);
  4480. }
  4481. //******************** End of __WINDOWS_ASIO__ *********************//
  4482. #endif
  4483. #if defined(__WINDOWS_DS__) // Windows DirectSound API
  4484. #include <dsound.h>
  4485. // A structure to hold various information related to the DirectSound
  4486. // API implementation.
  4487. struct DsHandle {
  4488. void *object;
  4489. void *buffer;
  4490. UINT bufferPointer;
  4491. };
  4492. // Declarations for utility functions, callbacks, and structures
  4493. // specific to the DirectSound implementation.
  4494. static bool CALLBACK deviceCountCallback(LPGUID lpguid,
  4495. LPCSTR lpcstrDescription,
  4496. LPCSTR lpcstrModule,
  4497. LPVOID lpContext);
  4498. static bool CALLBACK deviceInfoCallback(LPGUID lpguid,
  4499. LPCSTR lpcstrDescription,
  4500. LPCSTR lpcstrModule,
  4501. LPVOID lpContext);
  4502. static bool CALLBACK defaultDeviceCallback(LPGUID lpguid,
  4503. LPCSTR lpcstrDescription,
  4504. LPCSTR lpcstrModule,
  4505. LPVOID lpContext);
  4506. static bool CALLBACK deviceIdCallback(LPGUID lpguid,
  4507. LPCSTR lpcstrDescription,
  4508. LPCSTR lpcstrModule,
  4509. LPVOID lpContext);
  4510. static char* getErrorString(int code);
  4511. extern "C" unsigned __stdcall callbackHandler(void *ptr);
  4512. struct enum_info {
  4513. char name[64];
  4514. LPGUID id;
  4515. bool isInput;
  4516. bool isValid;
  4517. };
  4518. RtApiDs :: RtApiDs()
  4519. {
  4520. this->initialize();
  4521. if (nDevices_ <= 0) {
  4522. sprintf(message_, "RtApiDs: no Windows DirectSound audio devices found!");
  4523. error(RtError::NO_DEVICES_FOUND);
  4524. }
  4525. }
  4526. RtApiDs :: ~RtApiDs()
  4527. {
  4528. if ( stream_.mode != UNINITIALIZED ) closeStream();
  4529. }
  4530. int RtApiDs :: getDefaultInputDevice(void)
  4531. {
  4532. enum_info info;
  4533. info.name[0] = '\0';
  4534. // Enumerate through devices to find the default output.
  4535. HRESULT result = DirectSoundCaptureEnumerate((LPDSENUMCALLBACK)defaultDeviceCallback, &info);
  4536. if ( FAILED(result) ) {
  4537. sprintf(message_, "RtApiDs: Error performing default input device enumeration: %s.",
  4538. getErrorString(result));
  4539. error(RtError::WARNING);
  4540. return 0;
  4541. }
  4542. for ( int i=0; i<nDevices_; i++ ) {
  4543. if ( strncmp( info.name, devices_[i].name.c_str(), 64 ) == 0 ) return i;
  4544. }
  4545. return 0;
  4546. }
  4547. int RtApiDs :: getDefaultOutputDevice(void)
  4548. {
  4549. enum_info info;
  4550. info.name[0] = '\0';
  4551. // Enumerate through devices to find the default output.
  4552. HRESULT result = DirectSoundEnumerate((LPDSENUMCALLBACK)defaultDeviceCallback, &info);
  4553. if ( FAILED(result) ) {
  4554. sprintf(message_, "RtApiDs: Error performing default output device enumeration: %s.",
  4555. getErrorString(result));
  4556. error(RtError::WARNING);
  4557. return 0;
  4558. }
  4559. for ( int i=0; i<nDevices_; i++ )
  4560. if ( strncmp( info.name, devices_[i].name.c_str(), 64 ) == 0 ) return i;
  4561. return 0;
  4562. }
  4563. void RtApiDs :: initialize(void)
  4564. {
  4565. int i, ins = 0, outs = 0, count = 0;
  4566. HRESULT result;
  4567. nDevices_ = 0;
  4568. // Count DirectSound devices.
  4569. result = DirectSoundEnumerate((LPDSENUMCALLBACK)deviceCountCallback, &outs);
  4570. if ( FAILED(result) ) {
  4571. sprintf(message_, "RtApiDs: Unable to enumerate through sound playback devices: %s.",
  4572. getErrorString(result));
  4573. error(RtError::DRIVER_ERROR);
  4574. }
  4575. // Count DirectSoundCapture devices.
  4576. result = DirectSoundCaptureEnumerate((LPDSENUMCALLBACK)deviceCountCallback, &ins);
  4577. if ( FAILED(result) ) {
  4578. sprintf(message_, "RtApiDs: Unable to enumerate through sound capture devices: %s.",
  4579. getErrorString(result));
  4580. error(RtError::DRIVER_ERROR);
  4581. }
  4582. count = ins + outs;
  4583. if (count == 0) return;
  4584. std::vector<enum_info> info(count);
  4585. for (i=0; i<count; i++) {
  4586. info[i].name[0] = '\0';
  4587. if (i < outs) info[i].isInput = false;
  4588. else info[i].isInput = true;
  4589. }
  4590. // Get playback device info and check capabilities.
  4591. result = DirectSoundEnumerate((LPDSENUMCALLBACK)deviceInfoCallback, &info[0]);
  4592. if ( FAILED(result) ) {
  4593. sprintf(message_, "RtApiDs: Unable to enumerate through sound playback devices: %s.",
  4594. getErrorString(result));
  4595. error(RtError::DRIVER_ERROR);
  4596. }
  4597. // Get capture device info and check capabilities.
  4598. result = DirectSoundCaptureEnumerate((LPDSENUMCALLBACK)deviceInfoCallback, &info[0]);
  4599. if ( FAILED(result) ) {
  4600. sprintf(message_, "RtApiDs: Unable to enumerate through sound capture devices: %s.",
  4601. getErrorString(result));
  4602. error(RtError::DRIVER_ERROR);
  4603. }
  4604. // Create device structures for valid devices and write device names
  4605. // to each. Devices are considered invalid if they cannot be
  4606. // opened, they report < 1 supported channels, or they report no
  4607. // supported data (capture only).
  4608. RtApiDevice device;
  4609. int index = 0;
  4610. for (i=0; i<count; i++) {
  4611. if ( info[i].isValid ) {
  4612. device.name.erase();
  4613. device.name.append( (const char *)info[i].name, strlen(info[i].name)+1);
  4614. devices_.push_back(device);
  4615. }
  4616. }
  4617. nDevices_ = devices_.size();
  4618. return;
  4619. }
  4620. void RtApiDs :: probeDeviceInfo(RtApiDevice *info)
  4621. {
  4622. enum_info dsinfo;
  4623. strncpy( dsinfo.name, info->name.c_str(), 64 );
  4624. dsinfo.isValid = false;
  4625. // Enumerate through input devices to find the id (if it exists).
  4626. HRESULT result = DirectSoundCaptureEnumerate((LPDSENUMCALLBACK)deviceIdCallback, &dsinfo);
  4627. if ( FAILED(result) ) {
  4628. sprintf(message_, "RtApiDs: Error performing input device id enumeration: %s.",
  4629. getErrorString(result));
  4630. error(RtError::WARNING);
  4631. return;
  4632. }
  4633. // Do capture probe first.
  4634. if ( dsinfo.isValid == false )
  4635. goto playback_probe;
  4636. LPDIRECTSOUNDCAPTURE input;
  4637. result = DirectSoundCaptureCreate( dsinfo.id, &input, NULL );
  4638. if ( FAILED(result) ) {
  4639. sprintf(message_, "RtApiDs: Could not create capture object (%s): %s.",
  4640. info->name.c_str(), getErrorString(result));
  4641. error(RtError::WARNING);
  4642. goto playback_probe;
  4643. }
  4644. DSCCAPS in_caps;
  4645. in_caps.dwSize = sizeof(in_caps);
  4646. result = input->GetCaps( &in_caps );
  4647. if ( FAILED(result) ) {
  4648. input->Release();
  4649. sprintf(message_, "RtApiDs: Could not get capture capabilities (%s): %s.",
  4650. info->name.c_str(), getErrorString(result));
  4651. error(RtError::WARNING);
  4652. goto playback_probe;
  4653. }
  4654. // Get input channel information.
  4655. info->minInputChannels = 1;
  4656. info->maxInputChannels = in_caps.dwChannels;
  4657. // Get sample rate and format information.
  4658. info->sampleRates.clear();
  4659. if( in_caps.dwChannels == 2 ) {
  4660. if( in_caps.dwFormats & WAVE_FORMAT_1S16 ) info->nativeFormats |= RTAUDIO_SINT16;
  4661. if( in_caps.dwFormats & WAVE_FORMAT_2S16 ) info->nativeFormats |= RTAUDIO_SINT16;
  4662. if( in_caps.dwFormats & WAVE_FORMAT_4S16 ) info->nativeFormats |= RTAUDIO_SINT16;
  4663. if( in_caps.dwFormats & WAVE_FORMAT_1S08 ) info->nativeFormats |= RTAUDIO_SINT8;
  4664. if( in_caps.dwFormats & WAVE_FORMAT_2S08 ) info->nativeFormats |= RTAUDIO_SINT8;
  4665. if( in_caps.dwFormats & WAVE_FORMAT_4S08 ) info->nativeFormats |= RTAUDIO_SINT8;
  4666. if ( info->nativeFormats & RTAUDIO_SINT16 ) {
  4667. if( in_caps.dwFormats & WAVE_FORMAT_1S16 ) info->sampleRates.push_back( 11025 );
  4668. if( in_caps.dwFormats & WAVE_FORMAT_2S16 ) info->sampleRates.push_back( 22050 );
  4669. if( in_caps.dwFormats & WAVE_FORMAT_4S16 ) info->sampleRates.push_back( 44100 );
  4670. }
  4671. else if ( info->nativeFormats & RTAUDIO_SINT8 ) {
  4672. if( in_caps.dwFormats & WAVE_FORMAT_1S08 ) info->sampleRates.push_back( 11025 );
  4673. if( in_caps.dwFormats & WAVE_FORMAT_2S08 ) info->sampleRates.push_back( 22050 );
  4674. if( in_caps.dwFormats & WAVE_FORMAT_4S08 ) info->sampleRates.push_back( 44100 );
  4675. }
  4676. }
  4677. else if ( in_caps.dwChannels == 1 ) {
  4678. if( in_caps.dwFormats & WAVE_FORMAT_1M16 ) info->nativeFormats |= RTAUDIO_SINT16;
  4679. if( in_caps.dwFormats & WAVE_FORMAT_2M16 ) info->nativeFormats |= RTAUDIO_SINT16;
  4680. if( in_caps.dwFormats & WAVE_FORMAT_4M16 ) info->nativeFormats |= RTAUDIO_SINT16;
  4681. if( in_caps.dwFormats & WAVE_FORMAT_1M08 ) info->nativeFormats |= RTAUDIO_SINT8;
  4682. if( in_caps.dwFormats & WAVE_FORMAT_2M08 ) info->nativeFormats |= RTAUDIO_SINT8;
  4683. if( in_caps.dwFormats & WAVE_FORMAT_4M08 ) info->nativeFormats |= RTAUDIO_SINT8;
  4684. if ( info->nativeFormats & RTAUDIO_SINT16 ) {
  4685. if( in_caps.dwFormats & WAVE_FORMAT_1M16 ) info->sampleRates.push_back( 11025 );
  4686. if( in_caps.dwFormats & WAVE_FORMAT_2M16 ) info->sampleRates.push_back( 22050 );
  4687. if( in_caps.dwFormats & WAVE_FORMAT_4M16 ) info->sampleRates.push_back( 44100 );
  4688. }
  4689. else if ( info->nativeFormats & RTAUDIO_SINT8 ) {
  4690. if( in_caps.dwFormats & WAVE_FORMAT_1M08 ) info->sampleRates.push_back( 11025 );
  4691. if( in_caps.dwFormats & WAVE_FORMAT_2M08 ) info->sampleRates.push_back( 22050 );
  4692. if( in_caps.dwFormats & WAVE_FORMAT_4M08 ) info->sampleRates.push_back( 44100 );
  4693. }
  4694. }
  4695. else info->minInputChannels = 0; // technically, this would be an error
  4696. input->Release();
  4697. playback_probe:
  4698. dsinfo.isValid = false;
  4699. // Enumerate through output devices to find the id (if it exists).
  4700. result = DirectSoundEnumerate((LPDSENUMCALLBACK)deviceIdCallback, &dsinfo);
  4701. if ( FAILED(result) ) {
  4702. sprintf(message_, "RtApiDs: Error performing output device id enumeration: %s.",
  4703. getErrorString(result));
  4704. error(RtError::WARNING);
  4705. return;
  4706. }
  4707. // Now do playback probe.
  4708. if ( dsinfo.isValid == false )
  4709. goto check_parameters;
  4710. LPDIRECTSOUND output;
  4711. DSCAPS out_caps;
  4712. result = DirectSoundCreate( dsinfo.id, &output, NULL );
  4713. if ( FAILED(result) ) {
  4714. sprintf(message_, "RtApiDs: Could not create playback object (%s): %s.",
  4715. info->name.c_str(), getErrorString(result));
  4716. error(RtError::WARNING);
  4717. goto check_parameters;
  4718. }
  4719. out_caps.dwSize = sizeof(out_caps);
  4720. result = output->GetCaps( &out_caps );
  4721. if ( FAILED(result) ) {
  4722. output->Release();
  4723. sprintf(message_, "RtApiDs: Could not get playback capabilities (%s): %s.",
  4724. info->name.c_str(), getErrorString(result));
  4725. error(RtError::WARNING);
  4726. goto check_parameters;
  4727. }
  4728. // Get output channel information.
  4729. info->minOutputChannels = 1;
  4730. info->maxOutputChannels = ( out_caps.dwFlags & DSCAPS_PRIMARYSTEREO ) ? 2 : 1;
  4731. // Get sample rate information. Use capture device rate information
  4732. // if it exists.
  4733. if ( info->sampleRates.size() == 0 ) {
  4734. info->sampleRates.push_back( (int) out_caps.dwMinSecondarySampleRate );
  4735. info->sampleRates.push_back( (int) out_caps.dwMaxSecondarySampleRate );
  4736. }
  4737. else {
  4738. // Check input rates against output rate range.
  4739. for ( unsigned int i=info->sampleRates.size()-1; i>=0; i-- ) {
  4740. if ( (unsigned int) info->sampleRates[i] > out_caps.dwMaxSecondarySampleRate )
  4741. info->sampleRates.erase( info->sampleRates.begin() + i );
  4742. }
  4743. while ( info->sampleRates.size() > 0 &&
  4744. ((unsigned int) info->sampleRates[0] < out_caps.dwMinSecondarySampleRate) ) {
  4745. info->sampleRates.erase( info->sampleRates.begin() );
  4746. }
  4747. }
  4748. // Get format information.
  4749. if ( out_caps.dwFlags & DSCAPS_PRIMARY16BIT ) info->nativeFormats |= RTAUDIO_SINT16;
  4750. if ( out_caps.dwFlags & DSCAPS_PRIMARY8BIT ) info->nativeFormats |= RTAUDIO_SINT8;
  4751. output->Release();
  4752. check_parameters:
  4753. if ( info->maxInputChannels == 0 && info->maxOutputChannels == 0 ) {
  4754. sprintf(message_, "RtApiDs: no reported input or output channels for device (%s).",
  4755. info->name.c_str());
  4756. error(RtError::DEBUG_WARNING);
  4757. return;
  4758. }
  4759. if ( info->sampleRates.size() == 0 || info->nativeFormats == 0 ) {
  4760. sprintf(message_, "RtApiDs: no reported sample rates or data formats for device (%s).",
  4761. info->name.c_str());
  4762. error(RtError::DEBUG_WARNING);
  4763. return;
  4764. }
  4765. // Determine duplex status.
  4766. if (info->maxInputChannels < info->maxOutputChannels)
  4767. info->maxDuplexChannels = info->maxInputChannels;
  4768. else
  4769. info->maxDuplexChannels = info->maxOutputChannels;
  4770. if (info->minInputChannels < info->minOutputChannels)
  4771. info->minDuplexChannels = info->minInputChannels;
  4772. else
  4773. info->minDuplexChannels = info->minOutputChannels;
  4774. if ( info->maxDuplexChannels > 0 ) info->hasDuplexSupport = true;
  4775. else info->hasDuplexSupport = false;
  4776. info->probed = true;
  4777. return;
  4778. }
  4779. bool RtApiDs :: probeDeviceOpen( int device, StreamMode mode, int channels,
  4780. int sampleRate, RtAudioFormat format,
  4781. int *bufferSize, int numberOfBuffers)
  4782. {
  4783. HRESULT result;
  4784. HWND hWnd = GetForegroundWindow();
  4785. // According to a note in PortAudio, using GetDesktopWindow()
  4786. // instead of GetForegroundWindow() is supposed to avoid problems
  4787. // that occur when the application's window is not the foreground
  4788. // window. Also, if the application window closes before the
  4789. // DirectSound buffer, DirectSound can crash. However, for console
  4790. // applications, no sound was produced when using GetDesktopWindow().
  4791. long buffer_size;
  4792. LPVOID audioPtr;
  4793. DWORD dataLen;
  4794. int nBuffers;
  4795. // Check the numberOfBuffers parameter and limit the lowest value to
  4796. // two. This is a judgement call and a value of two is probably too
  4797. // low for capture, but it should work for playback.
  4798. if (numberOfBuffers < 2)
  4799. nBuffers = 2;
  4800. else
  4801. nBuffers = numberOfBuffers;
  4802. // Define the wave format structure (16-bit PCM, srate, channels)
  4803. WAVEFORMATEX waveFormat;
  4804. ZeroMemory(&waveFormat, sizeof(WAVEFORMATEX));
  4805. waveFormat.wFormatTag = WAVE_FORMAT_PCM;
  4806. waveFormat.nChannels = channels;
  4807. waveFormat.nSamplesPerSec = (unsigned long) sampleRate;
  4808. // Determine the data format.
  4809. if ( devices_[device].nativeFormats ) { // 8-bit and/or 16-bit support
  4810. if ( format == RTAUDIO_SINT8 ) {
  4811. if ( devices_[device].nativeFormats & RTAUDIO_SINT8 )
  4812. waveFormat.wBitsPerSample = 8;
  4813. else
  4814. waveFormat.wBitsPerSample = 16;
  4815. }
  4816. else {
  4817. if ( devices_[device].nativeFormats & RTAUDIO_SINT16 )
  4818. waveFormat.wBitsPerSample = 16;
  4819. else
  4820. waveFormat.wBitsPerSample = 8;
  4821. }
  4822. }
  4823. else {
  4824. sprintf(message_, "RtApiDs: no reported data formats for device (%s).",
  4825. devices_[device].name.c_str());
  4826. error(RtError::DEBUG_WARNING);
  4827. return FAILURE;
  4828. }
  4829. waveFormat.nBlockAlign = waveFormat.nChannels * waveFormat.wBitsPerSample / 8;
  4830. waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
  4831. enum_info dsinfo;
  4832. void *ohandle = 0, *bhandle = 0;
  4833. strncpy( dsinfo.name, devices_[device].name.c_str(), 64 );
  4834. dsinfo.isValid = false;
  4835. if ( mode == OUTPUT ) {
  4836. if ( devices_[device].maxOutputChannels < channels ) {
  4837. sprintf(message_, "RtApiDs: requested channels (%d) > than supported (%d) by device (%s).",
  4838. channels, devices_[device].maxOutputChannels, devices_[device].name.c_str());
  4839. error(RtError::DEBUG_WARNING);
  4840. return FAILURE;
  4841. }
  4842. // Enumerate through output devices to find the id (if it exists).
  4843. result = DirectSoundEnumerate((LPDSENUMCALLBACK)deviceIdCallback, &dsinfo);
  4844. if ( FAILED(result) ) {
  4845. sprintf(message_, "RtApiDs: Error performing output device id enumeration: %s.",
  4846. getErrorString(result));
  4847. error(RtError::DEBUG_WARNING);
  4848. return FAILURE;
  4849. }
  4850. if ( dsinfo.isValid == false ) {
  4851. sprintf(message_, "RtApiDs: output device (%s) id not found!", devices_[device].name.c_str());
  4852. error(RtError::DEBUG_WARNING);
  4853. return FAILURE;
  4854. }
  4855. LPGUID id = dsinfo.id;
  4856. LPDIRECTSOUND object;
  4857. LPDIRECTSOUNDBUFFER buffer;
  4858. DSBUFFERDESC bufferDescription;
  4859. result = DirectSoundCreate( id, &object, NULL );
  4860. if ( FAILED(result) ) {
  4861. sprintf(message_, "RtApiDs: Could not create playback object (%s): %s.",
  4862. devices_[device].name.c_str(), getErrorString(result));
  4863. error(RtError::DEBUG_WARNING);
  4864. return FAILURE;
  4865. }
  4866. // Set cooperative level to DSSCL_EXCLUSIVE
  4867. result = object->SetCooperativeLevel(hWnd, DSSCL_EXCLUSIVE);
  4868. if ( FAILED(result) ) {
  4869. object->Release();
  4870. sprintf(message_, "RtApiDs: Unable to set cooperative level (%s): %s.",
  4871. devices_[device].name.c_str(), getErrorString(result));
  4872. error(RtError::WARNING);
  4873. return FAILURE;
  4874. }
  4875. // Even though we will write to the secondary buffer, we need to
  4876. // access the primary buffer to set the correct output format
  4877. // (since the default is 8-bit, 22 kHz!). Setup the DS primary
  4878. // buffer description.
  4879. ZeroMemory(&bufferDescription, sizeof(DSBUFFERDESC));
  4880. bufferDescription.dwSize = sizeof(DSBUFFERDESC);
  4881. bufferDescription.dwFlags = DSBCAPS_PRIMARYBUFFER;
  4882. // Obtain the primary buffer
  4883. result = object->CreateSoundBuffer(&bufferDescription, &buffer, NULL);
  4884. if ( FAILED(result) ) {
  4885. object->Release();
  4886. sprintf(message_, "RtApiDs: Unable to access primary buffer (%s): %s.",
  4887. devices_[device].name.c_str(), getErrorString(result));
  4888. error(RtError::WARNING);
  4889. return FAILURE;
  4890. }
  4891. // Set the primary DS buffer sound format.
  4892. result = buffer->SetFormat(&waveFormat);
  4893. if ( FAILED(result) ) {
  4894. object->Release();
  4895. sprintf(message_, "RtApiDs: Unable to set primary buffer format (%s): %s.",
  4896. devices_[device].name.c_str(), getErrorString(result));
  4897. error(RtError::WARNING);
  4898. return FAILURE;
  4899. }
  4900. // Setup the secondary DS buffer description.
  4901. buffer_size = channels * *bufferSize * nBuffers * waveFormat.wBitsPerSample / 8;
  4902. ZeroMemory(&bufferDescription, sizeof(DSBUFFERDESC));
  4903. bufferDescription.dwSize = sizeof(DSBUFFERDESC);
  4904. bufferDescription.dwFlags = ( DSBCAPS_STICKYFOCUS |
  4905. DSBCAPS_GETCURRENTPOSITION2 |
  4906. DSBCAPS_LOCHARDWARE ); // Force hardware mixing
  4907. bufferDescription.dwBufferBytes = buffer_size;
  4908. bufferDescription.lpwfxFormat = &waveFormat;
  4909. // Try to create the secondary DS buffer. If that doesn't work,
  4910. // try to use software mixing. Otherwise, there's a problem.
  4911. result = object->CreateSoundBuffer(&bufferDescription, &buffer, NULL);
  4912. if ( FAILED(result) ) {
  4913. bufferDescription.dwFlags = ( DSBCAPS_STICKYFOCUS |
  4914. DSBCAPS_GETCURRENTPOSITION2 |
  4915. DSBCAPS_LOCSOFTWARE ); // Force software mixing
  4916. result = object->CreateSoundBuffer(&bufferDescription, &buffer, NULL);
  4917. if ( FAILED(result) ) {
  4918. object->Release();
  4919. sprintf(message_, "RtApiDs: Unable to create secondary DS buffer (%s): %s.",
  4920. devices_[device].name.c_str(), getErrorString(result));
  4921. error(RtError::WARNING);
  4922. return FAILURE;
  4923. }
  4924. }
  4925. // Get the buffer size ... might be different from what we specified.
  4926. DSBCAPS dsbcaps;
  4927. dsbcaps.dwSize = sizeof(DSBCAPS);
  4928. buffer->GetCaps(&dsbcaps);
  4929. buffer_size = dsbcaps.dwBufferBytes;
  4930. // Lock the DS buffer
  4931. result = buffer->Lock(0, buffer_size, &audioPtr, &dataLen, NULL, NULL, 0);
  4932. if ( FAILED(result) ) {
  4933. object->Release();
  4934. buffer->Release();
  4935. sprintf(message_, "RtApiDs: Unable to lock buffer (%s): %s.",
  4936. devices_[device].name.c_str(), getErrorString(result));
  4937. error(RtError::WARNING);
  4938. return FAILURE;
  4939. }
  4940. // Zero the DS buffer
  4941. ZeroMemory(audioPtr, dataLen);
  4942. // Unlock the DS buffer
  4943. result = buffer->Unlock(audioPtr, dataLen, NULL, 0);
  4944. if ( FAILED(result) ) {
  4945. object->Release();
  4946. buffer->Release();
  4947. sprintf(message_, "RtApiDs: Unable to unlock buffer(%s): %s.",
  4948. devices_[device].name.c_str(), getErrorString(result));
  4949. error(RtError::WARNING);
  4950. return FAILURE;
  4951. }
  4952. ohandle = (void *) object;
  4953. bhandle = (void *) buffer;
  4954. stream_.nDeviceChannels[0] = channels;
  4955. }
  4956. if ( mode == INPUT ) {
  4957. if ( devices_[device].maxInputChannels < channels )
  4958. return FAILURE;
  4959. // Enumerate through input devices to find the id (if it exists).
  4960. result = DirectSoundCaptureEnumerate((LPDSENUMCALLBACK)deviceIdCallback, &dsinfo);
  4961. if ( FAILED(result) ) {
  4962. sprintf(message_, "RtApiDs: Error performing input device id enumeration: %s.",
  4963. getErrorString(result));
  4964. error(RtError::DEBUG_WARNING);
  4965. return FAILURE;
  4966. }
  4967. if ( dsinfo.isValid == false ) {
  4968. sprintf(message_, "RtAudioDS: input device (%s) id not found!", devices_[device].name.c_str());
  4969. error(RtError::DEBUG_WARNING);
  4970. return FAILURE;
  4971. }
  4972. LPGUID id = dsinfo.id;
  4973. LPDIRECTSOUNDCAPTURE object;
  4974. LPDIRECTSOUNDCAPTUREBUFFER buffer;
  4975. DSCBUFFERDESC bufferDescription;
  4976. result = DirectSoundCaptureCreate( id, &object, NULL );
  4977. if ( FAILED(result) ) {
  4978. sprintf(message_, "RtApiDs: Could not create capture object (%s): %s.",
  4979. devices_[device].name.c_str(), getErrorString(result));
  4980. error(RtError::WARNING);
  4981. return FAILURE;
  4982. }
  4983. // Setup the secondary DS buffer description.
  4984. buffer_size = channels * *bufferSize * nBuffers * waveFormat.wBitsPerSample / 8;
  4985. ZeroMemory(&bufferDescription, sizeof(DSCBUFFERDESC));
  4986. bufferDescription.dwSize = sizeof(DSCBUFFERDESC);
  4987. bufferDescription.dwFlags = 0;
  4988. bufferDescription.dwReserved = 0;
  4989. bufferDescription.dwBufferBytes = buffer_size;
  4990. bufferDescription.lpwfxFormat = &waveFormat;
  4991. // Create the capture buffer.
  4992. result = object->CreateCaptureBuffer(&bufferDescription, &buffer, NULL);
  4993. if ( FAILED(result) ) {
  4994. object->Release();
  4995. sprintf(message_, "RtApiDs: Unable to create capture buffer (%s): %s.",
  4996. devices_[device].name.c_str(), getErrorString(result));
  4997. error(RtError::WARNING);
  4998. return FAILURE;
  4999. }
  5000. // Lock the capture buffer
  5001. result = buffer->Lock(0, buffer_size, &audioPtr, &dataLen, NULL, NULL, 0);
  5002. if ( FAILED(result) ) {
  5003. object->Release();
  5004. buffer->Release();
  5005. sprintf(message_, "RtApiDs: Unable to lock capture buffer (%s): %s.",
  5006. devices_[device].name.c_str(), getErrorString(result));
  5007. error(RtError::WARNING);
  5008. return FAILURE;
  5009. }
  5010. // Zero the buffer
  5011. ZeroMemory(audioPtr, dataLen);
  5012. // Unlock the buffer
  5013. result = buffer->Unlock(audioPtr, dataLen, NULL, 0);
  5014. if ( FAILED(result) ) {
  5015. object->Release();
  5016. buffer->Release();
  5017. sprintf(message_, "RtApiDs: Unable to unlock capture buffer (%s): %s.",
  5018. devices_[device].name.c_str(), getErrorString(result));
  5019. error(RtError::WARNING);
  5020. return FAILURE;
  5021. }
  5022. ohandle = (void *) object;
  5023. bhandle = (void *) buffer;
  5024. stream_.nDeviceChannels[1] = channels;
  5025. }
  5026. stream_.userFormat = format;
  5027. if ( waveFormat.wBitsPerSample == 8 )
  5028. stream_.deviceFormat[mode] = RTAUDIO_SINT8;
  5029. else
  5030. stream_.deviceFormat[mode] = RTAUDIO_SINT16;
  5031. stream_.nUserChannels[mode] = channels;
  5032. *bufferSize = buffer_size / (channels * nBuffers * waveFormat.wBitsPerSample / 8);
  5033. stream_.bufferSize = *bufferSize;
  5034. // Set flags for buffer conversion
  5035. stream_.doConvertBuffer[mode] = false;
  5036. if (stream_.userFormat != stream_.deviceFormat[mode])
  5037. stream_.doConvertBuffer[mode] = true;
  5038. if (stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode])
  5039. stream_.doConvertBuffer[mode] = true;
  5040. // Allocate necessary internal buffers
  5041. if ( stream_.nUserChannels[0] != stream_.nUserChannels[1] ) {
  5042. long buffer_bytes;
  5043. if (stream_.nUserChannels[0] >= stream_.nUserChannels[1])
  5044. buffer_bytes = stream_.nUserChannels[0];
  5045. else
  5046. buffer_bytes = stream_.nUserChannels[1];
  5047. buffer_bytes *= *bufferSize * formatBytes(stream_.userFormat);
  5048. if (stream_.userBuffer) free(stream_.userBuffer);
  5049. stream_.userBuffer = (char *) calloc(buffer_bytes, 1);
  5050. if (stream_.userBuffer == NULL) {
  5051. sprintf(message_, "RtApiDs: error allocating user buffer memory (%s).",
  5052. devices_[device].name.c_str());
  5053. goto error;
  5054. }
  5055. }
  5056. if ( stream_.doConvertBuffer[mode] ) {
  5057. long buffer_bytes;
  5058. bool makeBuffer = true;
  5059. if ( mode == OUTPUT )
  5060. buffer_bytes = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
  5061. else { // mode == INPUT
  5062. buffer_bytes = stream_.nDeviceChannels[1] * formatBytes(stream_.deviceFormat[1]);
  5063. if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
  5064. long bytes_out = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
  5065. if ( buffer_bytes < bytes_out ) makeBuffer = false;
  5066. }
  5067. }
  5068. if ( makeBuffer ) {
  5069. buffer_bytes *= *bufferSize;
  5070. if (stream_.deviceBuffer) free(stream_.deviceBuffer);
  5071. stream_.deviceBuffer = (char *) calloc(buffer_bytes, 1);
  5072. if (stream_.deviceBuffer == NULL) {
  5073. sprintf(message_, "RtApiDs: error allocating device buffer memory (%s).",
  5074. devices_[device].name.c_str());
  5075. goto error;
  5076. }
  5077. }
  5078. }
  5079. // Allocate our DsHandle structures for the stream.
  5080. DsHandle *handles;
  5081. if ( stream_.apiHandle == 0 ) {
  5082. handles = (DsHandle *) calloc(2, sizeof(DsHandle));
  5083. if ( handles == NULL ) {
  5084. sprintf(message_, "RtApiDs: Error allocating DsHandle memory (%s).",
  5085. devices_[device].name.c_str());
  5086. goto error;
  5087. }
  5088. handles[0].object = 0;
  5089. handles[1].object = 0;
  5090. stream_.apiHandle = (void *) handles;
  5091. }
  5092. else
  5093. handles = (DsHandle *) stream_.apiHandle;
  5094. handles[mode].object = ohandle;
  5095. handles[mode].buffer = bhandle;
  5096. stream_.device[mode] = device;
  5097. stream_.state = STREAM_STOPPED;
  5098. if ( stream_.mode == OUTPUT && mode == INPUT )
  5099. // We had already set up an output stream.
  5100. stream_.mode = DUPLEX;
  5101. else
  5102. stream_.mode = mode;
  5103. stream_.nBuffers = nBuffers;
  5104. stream_.sampleRate = sampleRate;
  5105. return SUCCESS;
  5106. error:
  5107. if (handles) {
  5108. if (handles[0].object) {
  5109. LPDIRECTSOUND object = (LPDIRECTSOUND) handles[0].object;
  5110. LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handles[0].buffer;
  5111. if (buffer) buffer->Release();
  5112. object->Release();
  5113. }
  5114. if (handles[1].object) {
  5115. LPDIRECTSOUNDCAPTURE object = (LPDIRECTSOUNDCAPTURE) handles[1].object;
  5116. LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handles[1].buffer;
  5117. if (buffer) buffer->Release();
  5118. object->Release();
  5119. }
  5120. free(handles);
  5121. stream_.apiHandle = 0;
  5122. }
  5123. if (stream_.userBuffer) {
  5124. free(stream_.userBuffer);
  5125. stream_.userBuffer = 0;
  5126. }
  5127. error(RtError::WARNING);
  5128. return FAILURE;
  5129. }
  5130. void RtApiDs :: setStreamCallback(RtAudioCallback callback, void *userData)
  5131. {
  5132. verifyStream();
  5133. CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
  5134. if ( info->usingCallback ) {
  5135. sprintf(message_, "RtApiDs: A callback is already set for this stream!");
  5136. error(RtError::WARNING);
  5137. return;
  5138. }
  5139. info->callback = (void *) callback;
  5140. info->userData = userData;
  5141. info->usingCallback = true;
  5142. info->object = (void *) this;
  5143. unsigned thread_id;
  5144. info->thread = _beginthreadex(NULL, 0, &callbackHandler,
  5145. &stream_.callbackInfo, 0, &thread_id);
  5146. if (info->thread == 0) {
  5147. info->usingCallback = false;
  5148. sprintf(message_, "RtApiDs: error starting callback thread!");
  5149. error(RtError::THREAD_ERROR);
  5150. }
  5151. // When spawning multiple threads in quick succession, it appears to be
  5152. // necessary to wait a bit for each to initialize ... another windoism!
  5153. Sleep(1);
  5154. }
  5155. void RtApiDs :: cancelStreamCallback()
  5156. {
  5157. verifyStream();
  5158. if (stream_.callbackInfo.usingCallback) {
  5159. if (stream_.state == STREAM_RUNNING)
  5160. stopStream();
  5161. MUTEX_LOCK(&stream_.mutex);
  5162. stream_.callbackInfo.usingCallback = false;
  5163. WaitForSingleObject( (HANDLE)stream_.callbackInfo.thread, INFINITE );
  5164. CloseHandle( (HANDLE)stream_.callbackInfo.thread );
  5165. stream_.callbackInfo.thread = 0;
  5166. stream_.callbackInfo.callback = NULL;
  5167. stream_.callbackInfo.userData = NULL;
  5168. MUTEX_UNLOCK(&stream_.mutex);
  5169. }
  5170. }
  5171. void RtApiDs :: closeStream()
  5172. {
  5173. // We don't want an exception to be thrown here because this
  5174. // function is called by our class destructor. So, do our own
  5175. // streamId check.
  5176. if ( stream_.mode == UNINITIALIZED ) {
  5177. sprintf(message_, "RtApiDs::closeStream(): no open stream to close!");
  5178. error(RtError::WARNING);
  5179. return;
  5180. }
  5181. if (stream_.callbackInfo.usingCallback) {
  5182. stream_.callbackInfo.usingCallback = false;
  5183. WaitForSingleObject( (HANDLE)stream_.callbackInfo.thread, INFINITE );
  5184. CloseHandle( (HANDLE)stream_.callbackInfo.thread );
  5185. }
  5186. DsHandle *handles = (DsHandle *) stream_.apiHandle;
  5187. if (handles) {
  5188. if (handles[0].object) {
  5189. LPDIRECTSOUND object = (LPDIRECTSOUND) handles[0].object;
  5190. LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handles[0].buffer;
  5191. if (buffer) {
  5192. buffer->Stop();
  5193. buffer->Release();
  5194. }
  5195. object->Release();
  5196. }
  5197. if (handles[1].object) {
  5198. LPDIRECTSOUNDCAPTURE object = (LPDIRECTSOUNDCAPTURE) handles[1].object;
  5199. LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handles[1].buffer;
  5200. if (buffer) {
  5201. buffer->Stop();
  5202. buffer->Release();
  5203. }
  5204. object->Release();
  5205. }
  5206. free(handles);
  5207. stream_.apiHandle = 0;
  5208. }
  5209. if (stream_.userBuffer) {
  5210. free(stream_.userBuffer);
  5211. stream_.userBuffer = 0;
  5212. }
  5213. if (stream_.deviceBuffer) {
  5214. free(stream_.deviceBuffer);
  5215. stream_.deviceBuffer = 0;
  5216. }
  5217. stream_.mode = UNINITIALIZED;
  5218. }
  5219. void RtApiDs :: startStream()
  5220. {
  5221. verifyStream();
  5222. if (stream_.state == STREAM_RUNNING) return;
  5223. MUTEX_LOCK(&stream_.mutex);
  5224. HRESULT result;
  5225. DsHandle *handles = (DsHandle *) stream_.apiHandle;
  5226. if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
  5227. LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handles[0].buffer;
  5228. result = buffer->Play(0, 0, DSBPLAY_LOOPING );
  5229. if ( FAILED(result) ) {
  5230. sprintf(message_, "RtApiDs: Unable to start buffer (%s): %s.",
  5231. devices_[stream_.device[0]].name.c_str(), getErrorString(result));
  5232. error(RtError::DRIVER_ERROR);
  5233. }
  5234. }
  5235. if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
  5236. LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handles[1].buffer;
  5237. result = buffer->Start(DSCBSTART_LOOPING );
  5238. if ( FAILED(result) ) {
  5239. sprintf(message_, "RtApiDs: Unable to start capture buffer (%s): %s.",
  5240. devices_[stream_.device[1]].name.c_str(), getErrorString(result));
  5241. error(RtError::DRIVER_ERROR);
  5242. }
  5243. }
  5244. stream_.state = STREAM_RUNNING;
  5245. MUTEX_UNLOCK(&stream_.mutex);
  5246. }
  5247. void RtApiDs :: stopStream()
  5248. {
  5249. verifyStream();
  5250. if (stream_.state == STREAM_STOPPED) return;
  5251. // Change the state before the lock to improve shutdown response
  5252. // when using a callback.
  5253. stream_.state = STREAM_STOPPED;
  5254. MUTEX_LOCK(&stream_.mutex);
  5255. // There is no specific DirectSound API call to "drain" a buffer
  5256. // before stopping. We can hack this for playback by writing zeroes
  5257. // for another bufferSize * nBuffers frames. For capture, the
  5258. // concept is less clear so we'll repeat what we do in the
  5259. // abortStream() case.
  5260. HRESULT result;
  5261. DWORD dsBufferSize;
  5262. LPVOID buffer1 = NULL;
  5263. LPVOID buffer2 = NULL;
  5264. DWORD bufferSize1 = 0;
  5265. DWORD bufferSize2 = 0;
  5266. DsHandle *handles = (DsHandle *) stream_.apiHandle;
  5267. if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
  5268. DWORD currentPos, safePos;
  5269. long buffer_bytes = stream_.bufferSize * stream_.nDeviceChannels[0];
  5270. buffer_bytes *= formatBytes(stream_.deviceFormat[0]);
  5271. LPDIRECTSOUNDBUFFER dsBuffer = (LPDIRECTSOUNDBUFFER) handles[0].buffer;
  5272. UINT nextWritePos = handles[0].bufferPointer;
  5273. dsBufferSize = buffer_bytes * stream_.nBuffers;
  5274. // Write zeroes for nBuffer counts.
  5275. for (int i=0; i<stream_.nBuffers; i++) {
  5276. // Find out where the read and "safe write" pointers are.
  5277. result = dsBuffer->GetCurrentPosition(&currentPos, &safePos);
  5278. if ( FAILED(result) ) {
  5279. sprintf(message_, "RtApiDs: Unable to get current position (%s): %s.",
  5280. devices_[stream_.device[0]].name.c_str(), getErrorString(result));
  5281. error(RtError::DRIVER_ERROR);
  5282. }
  5283. if ( currentPos < nextWritePos ) currentPos += dsBufferSize; // unwrap offset
  5284. DWORD endWrite = nextWritePos + buffer_bytes;
  5285. // Check whether the entire write region is behind the play pointer.
  5286. while ( currentPos < endWrite ) {
  5287. double millis = (endWrite - currentPos) * 900.0;
  5288. millis /= ( formatBytes(stream_.deviceFormat[0]) * stream_.sampleRate);
  5289. if ( millis < 1.0 ) millis = 1.0;
  5290. Sleep( (DWORD) millis );
  5291. // Wake up, find out where we are now
  5292. result = dsBuffer->GetCurrentPosition( &currentPos, &safePos );
  5293. if ( FAILED(result) ) {
  5294. sprintf(message_, "RtApiDs: Unable to get current position (%s): %s.",
  5295. devices_[stream_.device[0]].name.c_str(), getErrorString(result));
  5296. error(RtError::DRIVER_ERROR);
  5297. }
  5298. if ( currentPos < nextWritePos ) currentPos += dsBufferSize; // unwrap offset
  5299. }
  5300. // Lock free space in the buffer
  5301. result = dsBuffer->Lock (nextWritePos, buffer_bytes, &buffer1,
  5302. &bufferSize1, &buffer2, &bufferSize2, 0);
  5303. if ( FAILED(result) ) {
  5304. sprintf(message_, "RtApiDs: Unable to lock buffer during playback (%s): %s.",
  5305. devices_[stream_.device[0]].name.c_str(), getErrorString(result));
  5306. error(RtError::DRIVER_ERROR);
  5307. }
  5308. // Zero the free space
  5309. ZeroMemory(buffer1, bufferSize1);
  5310. if (buffer2 != NULL) ZeroMemory(buffer2, bufferSize2);
  5311. // Update our buffer offset and unlock sound buffer
  5312. dsBuffer->Unlock (buffer1, bufferSize1, buffer2, bufferSize2);
  5313. if ( FAILED(result) ) {
  5314. sprintf(message_, "RtApiDs: Unable to unlock buffer during playback (%s): %s.",
  5315. devices_[stream_.device[0]].name.c_str(), getErrorString(result));
  5316. error(RtError::DRIVER_ERROR);
  5317. }
  5318. nextWritePos = (nextWritePos + bufferSize1 + bufferSize2) % dsBufferSize;
  5319. handles[0].bufferPointer = nextWritePos;
  5320. }
  5321. // If we play again, start at the beginning of the buffer.
  5322. handles[0].bufferPointer = 0;
  5323. }
  5324. if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
  5325. LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handles[1].buffer;
  5326. buffer1 = NULL;
  5327. bufferSize1 = 0;
  5328. result = buffer->Stop();
  5329. if ( FAILED(result) ) {
  5330. sprintf(message_, "RtApiDs: Unable to stop capture buffer (%s): %s",
  5331. devices_[stream_.device[1]].name.c_str(), getErrorString(result));
  5332. error(RtError::DRIVER_ERROR);
  5333. }
  5334. dsBufferSize = stream_.bufferSize * stream_.nDeviceChannels[1];
  5335. dsBufferSize *= formatBytes(stream_.deviceFormat[1]) * stream_.nBuffers;
  5336. // Lock the buffer and clear it so that if we start to play again,
  5337. // we won't have old data playing.
  5338. result = buffer->Lock(0, dsBufferSize, &buffer1, &bufferSize1, NULL, NULL, 0);
  5339. if ( FAILED(result) ) {
  5340. sprintf(message_, "RtApiDs: Unable to lock capture buffer (%s): %s.",
  5341. devices_[stream_.device[1]].name.c_str(), getErrorString(result));
  5342. error(RtError::DRIVER_ERROR);
  5343. }
  5344. // Zero the DS buffer
  5345. ZeroMemory(buffer1, bufferSize1);
  5346. // Unlock the DS buffer
  5347. result = buffer->Unlock(buffer1, bufferSize1, NULL, 0);
  5348. if ( FAILED(result) ) {
  5349. sprintf(message_, "RtApiDs: Unable to unlock capture buffer (%s): %s.",
  5350. devices_[stream_.device[1]].name.c_str(), getErrorString(result));
  5351. error(RtError::DRIVER_ERROR);
  5352. }
  5353. // If we start recording again, we must begin at beginning of buffer.
  5354. handles[1].bufferPointer = 0;
  5355. }
  5356. MUTEX_UNLOCK(&stream_.mutex);
  5357. }
  5358. void RtApiDs :: abortStream()
  5359. {
  5360. verifyStream();
  5361. if (stream_.state == STREAM_STOPPED) return;
  5362. // Change the state before the lock to improve shutdown response
  5363. // when using a callback.
  5364. stream_.state = STREAM_STOPPED;
  5365. MUTEX_LOCK(&stream_.mutex);
  5366. HRESULT result;
  5367. long dsBufferSize;
  5368. LPVOID audioPtr;
  5369. DWORD dataLen;
  5370. DsHandle *handles = (DsHandle *) stream_.apiHandle;
  5371. if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
  5372. LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handles[0].buffer;
  5373. result = buffer->Stop();
  5374. if ( FAILED(result) ) {
  5375. sprintf(message_, "RtApiDs: Unable to stop buffer (%s): %s",
  5376. devices_[stream_.device[0]].name.c_str(), getErrorString(result));
  5377. error(RtError::DRIVER_ERROR);
  5378. }
  5379. dsBufferSize = stream_.bufferSize * stream_.nDeviceChannels[0];
  5380. dsBufferSize *= formatBytes(stream_.deviceFormat[0]) * stream_.nBuffers;
  5381. // Lock the buffer and clear it so that if we start to play again,
  5382. // we won't have old data playing.
  5383. result = buffer->Lock(0, dsBufferSize, &audioPtr, &dataLen, NULL, NULL, 0);
  5384. if ( FAILED(result) ) {
  5385. sprintf(message_, "RtApiDs: Unable to lock buffer (%s): %s.",
  5386. devices_[stream_.device[0]].name.c_str(), getErrorString(result));
  5387. error(RtError::DRIVER_ERROR);
  5388. }
  5389. // Zero the DS buffer
  5390. ZeroMemory(audioPtr, dataLen);
  5391. // Unlock the DS buffer
  5392. result = buffer->Unlock(audioPtr, dataLen, NULL, 0);
  5393. if ( FAILED(result) ) {
  5394. sprintf(message_, "RtApiDs: Unable to unlock buffer (%s): %s.",
  5395. devices_[stream_.device[0]].name.c_str(), getErrorString(result));
  5396. error(RtError::DRIVER_ERROR);
  5397. }
  5398. // If we start playing again, we must begin at beginning of buffer.
  5399. handles[0].bufferPointer = 0;
  5400. }
  5401. if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
  5402. LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handles[1].buffer;
  5403. audioPtr = NULL;
  5404. dataLen = 0;
  5405. result = buffer->Stop();
  5406. if ( FAILED(result) ) {
  5407. sprintf(message_, "RtApiDs: Unable to stop capture buffer (%s): %s",
  5408. devices_[stream_.device[1]].name.c_str(), getErrorString(result));
  5409. error(RtError::DRIVER_ERROR);
  5410. }
  5411. dsBufferSize = stream_.bufferSize * stream_.nDeviceChannels[1];
  5412. dsBufferSize *= formatBytes(stream_.deviceFormat[1]) * stream_.nBuffers;
  5413. // Lock the buffer and clear it so that if we start to play again,
  5414. // we won't have old data playing.
  5415. result = buffer->Lock(0, dsBufferSize, &audioPtr, &dataLen, NULL, NULL, 0);
  5416. if ( FAILED(result) ) {
  5417. sprintf(message_, "RtApiDs: Unable to lock capture buffer (%s): %s.",
  5418. devices_[stream_.device[1]].name.c_str(), getErrorString(result));
  5419. error(RtError::DRIVER_ERROR);
  5420. }
  5421. // Zero the DS buffer
  5422. ZeroMemory(audioPtr, dataLen);
  5423. // Unlock the DS buffer
  5424. result = buffer->Unlock(audioPtr, dataLen, NULL, 0);
  5425. if ( FAILED(result) ) {
  5426. sprintf(message_, "RtApiDs: Unable to unlock capture buffer (%s): %s.",
  5427. devices_[stream_.device[1]].name.c_str(), getErrorString(result));
  5428. error(RtError::DRIVER_ERROR);
  5429. }
  5430. // If we start recording again, we must begin at beginning of buffer.
  5431. handles[1].bufferPointer = 0;
  5432. }
  5433. MUTEX_UNLOCK(&stream_.mutex);
  5434. }
  5435. int RtApiDs :: streamWillBlock()
  5436. {
  5437. verifyStream();
  5438. if (stream_.state == STREAM_STOPPED) return 0;
  5439. MUTEX_LOCK(&stream_.mutex);
  5440. int channels;
  5441. int frames = 0;
  5442. HRESULT result;
  5443. DWORD currentPos, safePos;
  5444. channels = 1;
  5445. DsHandle *handles = (DsHandle *) stream_.apiHandle;
  5446. if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
  5447. LPDIRECTSOUNDBUFFER dsBuffer = (LPDIRECTSOUNDBUFFER) handles[0].buffer;
  5448. UINT nextWritePos = handles[0].bufferPointer;
  5449. channels = stream_.nDeviceChannels[0];
  5450. DWORD dsBufferSize = stream_.bufferSize * channels;
  5451. dsBufferSize *= formatBytes(stream_.deviceFormat[0]) * stream_.nBuffers;
  5452. // Find out where the read and "safe write" pointers are.
  5453. result = dsBuffer->GetCurrentPosition(&currentPos, &safePos);
  5454. if ( FAILED(result) ) {
  5455. sprintf(message_, "RtApiDs: Unable to get current position (%s): %s.",
  5456. devices_[stream_.device[0]].name.c_str(), getErrorString(result));
  5457. error(RtError::DRIVER_ERROR);
  5458. }
  5459. if ( currentPos < nextWritePos ) currentPos += dsBufferSize; // unwrap offset
  5460. frames = currentPos - nextWritePos;
  5461. frames /= channels * formatBytes(stream_.deviceFormat[0]);
  5462. }
  5463. if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
  5464. LPDIRECTSOUNDCAPTUREBUFFER dsBuffer = (LPDIRECTSOUNDCAPTUREBUFFER) handles[1].buffer;
  5465. UINT nextReadPos = handles[1].bufferPointer;
  5466. channels = stream_.nDeviceChannels[1];
  5467. DWORD dsBufferSize = stream_.bufferSize * channels;
  5468. dsBufferSize *= formatBytes(stream_.deviceFormat[1]) * stream_.nBuffers;
  5469. // Find out where the write and "safe read" pointers are.
  5470. result = dsBuffer->GetCurrentPosition(&currentPos, &safePos);
  5471. if ( FAILED(result) ) {
  5472. sprintf(message_, "RtApiDs: Unable to get current capture position (%s): %s.",
  5473. devices_[stream_.device[1]].name.c_str(), getErrorString(result));
  5474. error(RtError::DRIVER_ERROR);
  5475. }
  5476. if ( safePos < nextReadPos ) safePos += dsBufferSize; // unwrap offset
  5477. if (stream_.mode == DUPLEX ) {
  5478. // Take largest value of the two.
  5479. int temp = safePos - nextReadPos;
  5480. temp /= channels * formatBytes(stream_.deviceFormat[1]);
  5481. frames = ( temp > frames ) ? temp : frames;
  5482. }
  5483. else {
  5484. frames = safePos - nextReadPos;
  5485. frames /= channels * formatBytes(stream_.deviceFormat[1]);
  5486. }
  5487. }
  5488. frames = stream_.bufferSize - frames;
  5489. if (frames < 0) frames = 0;
  5490. MUTEX_UNLOCK(&stream_.mutex);
  5491. return frames;
  5492. }
  5493. void RtApiDs :: tickStream()
  5494. {
  5495. verifyStream();
  5496. int stopStream = 0;
  5497. if (stream_.state == STREAM_STOPPED) {
  5498. if (stream_.callbackInfo.usingCallback) Sleep(50); // sleep 50 milliseconds
  5499. return;
  5500. }
  5501. else if (stream_.callbackInfo.usingCallback) {
  5502. RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;
  5503. stopStream = callback(stream_.userBuffer, stream_.bufferSize, stream_.callbackInfo.userData);
  5504. }
  5505. MUTEX_LOCK(&stream_.mutex);
  5506. // The state might change while waiting on a mutex.
  5507. if (stream_.state == STREAM_STOPPED) {
  5508. MUTEX_UNLOCK(&stream_.mutex);
  5509. return;
  5510. }
  5511. HRESULT result;
  5512. DWORD currentPos, safePos;
  5513. LPVOID buffer1 = NULL;
  5514. LPVOID buffer2 = NULL;
  5515. DWORD bufferSize1 = 0;
  5516. DWORD bufferSize2 = 0;
  5517. char *buffer;
  5518. long buffer_bytes;
  5519. DsHandle *handles = (DsHandle *) stream_.apiHandle;
  5520. if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
  5521. // Setup parameters and do buffer conversion if necessary.
  5522. if (stream_.doConvertBuffer[0]) {
  5523. convertStreamBuffer(OUTPUT);
  5524. buffer = stream_.deviceBuffer;
  5525. buffer_bytes = stream_.bufferSize * stream_.nDeviceChannels[0];
  5526. buffer_bytes *= formatBytes(stream_.deviceFormat[0]);
  5527. }
  5528. else {
  5529. buffer = stream_.userBuffer;
  5530. buffer_bytes = stream_.bufferSize * stream_.nUserChannels[0];
  5531. buffer_bytes *= formatBytes(stream_.userFormat);
  5532. }
  5533. // No byte swapping necessary in DirectSound implementation.
  5534. LPDIRECTSOUNDBUFFER dsBuffer = (LPDIRECTSOUNDBUFFER) handles[0].buffer;
  5535. UINT nextWritePos = handles[0].bufferPointer;
  5536. DWORD dsBufferSize = buffer_bytes * stream_.nBuffers;
  5537. // Find out where the read and "safe write" pointers are.
  5538. result = dsBuffer->GetCurrentPosition(&currentPos, &safePos);
  5539. if ( FAILED(result) ) {
  5540. sprintf(message_, "RtApiDs: Unable to get current position (%s): %s.",
  5541. devices_[stream_.device[0]].name.c_str(), getErrorString(result));
  5542. error(RtError::DRIVER_ERROR);
  5543. }
  5544. if ( currentPos < nextWritePos ) currentPos += dsBufferSize; // unwrap offset
  5545. DWORD endWrite = nextWritePos + buffer_bytes;
  5546. // Check whether the entire write region is behind the play pointer.
  5547. while ( currentPos < endWrite ) {
  5548. // If we are here, then we must wait until the play pointer gets
  5549. // beyond the write region. The approach here is to use the
  5550. // Sleep() function to suspend operation until safePos catches
  5551. // up. Calculate number of milliseconds to wait as:
  5552. // time = distance * (milliseconds/second) * fudgefactor /
  5553. // ((bytes/sample) * (samples/second))
  5554. // A "fudgefactor" less than 1 is used because it was found
  5555. // that sleeping too long was MUCH worse than sleeping for
  5556. // several shorter periods.
  5557. double millis = (endWrite - currentPos) * 900.0;
  5558. millis /= ( formatBytes(stream_.deviceFormat[0]) * stream_.sampleRate);
  5559. if ( millis < 1.0 ) millis = 1.0;
  5560. Sleep( (DWORD) millis );
  5561. // Wake up, find out where we are now
  5562. result = dsBuffer->GetCurrentPosition( &currentPos, &safePos );
  5563. if ( FAILED(result) ) {
  5564. sprintf(message_, "RtApiDs: Unable to get current position (%s): %s.",
  5565. devices_[stream_.device[0]].name.c_str(), getErrorString(result));
  5566. error(RtError::DRIVER_ERROR);
  5567. }
  5568. if ( currentPos < nextWritePos ) currentPos += dsBufferSize; // unwrap offset
  5569. }
  5570. // Lock free space in the buffer
  5571. result = dsBuffer->Lock (nextWritePos, buffer_bytes, &buffer1,
  5572. &bufferSize1, &buffer2, &bufferSize2, 0);
  5573. if ( FAILED(result) ) {
  5574. sprintf(message_, "RtApiDs: Unable to lock buffer during playback (%s): %s.",
  5575. devices_[stream_.device[0]].name.c_str(), getErrorString(result));
  5576. error(RtError::DRIVER_ERROR);
  5577. }
  5578. // Copy our buffer into the DS buffer
  5579. CopyMemory(buffer1, buffer, bufferSize1);
  5580. if (buffer2 != NULL) CopyMemory(buffer2, buffer+bufferSize1, bufferSize2);
  5581. // Update our buffer offset and unlock sound buffer
  5582. dsBuffer->Unlock (buffer1, bufferSize1, buffer2, bufferSize2);
  5583. if ( FAILED(result) ) {
  5584. sprintf(message_, "RtApiDs: Unable to unlock buffer during playback (%s): %s.",
  5585. devices_[stream_.device[0]].name.c_str(), getErrorString(result));
  5586. error(RtError::DRIVER_ERROR);
  5587. }
  5588. nextWritePos = (nextWritePos + bufferSize1 + bufferSize2) % dsBufferSize;
  5589. handles[0].bufferPointer = nextWritePos;
  5590. }
  5591. if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
  5592. // Setup parameters.
  5593. if (stream_.doConvertBuffer[1]) {
  5594. buffer = stream_.deviceBuffer;
  5595. buffer_bytes = stream_.bufferSize * stream_.nDeviceChannels[1];
  5596. buffer_bytes *= formatBytes(stream_.deviceFormat[1]);
  5597. }
  5598. else {
  5599. buffer = stream_.userBuffer;
  5600. buffer_bytes = stream_.bufferSize * stream_.nUserChannels[1];
  5601. buffer_bytes *= formatBytes(stream_.userFormat);
  5602. }
  5603. LPDIRECTSOUNDCAPTUREBUFFER dsBuffer = (LPDIRECTSOUNDCAPTUREBUFFER) handles[1].buffer;
  5604. UINT nextReadPos = handles[1].bufferPointer;
  5605. DWORD dsBufferSize = buffer_bytes * stream_.nBuffers;
  5606. // Find out where the write and "safe read" pointers are.
  5607. result = dsBuffer->GetCurrentPosition(&currentPos, &safePos);
  5608. if ( FAILED(result) ) {
  5609. sprintf(message_, "RtApiDs: Unable to get current capture position (%s): %s.",
  5610. devices_[stream_.device[1]].name.c_str(), getErrorString(result));
  5611. error(RtError::DRIVER_ERROR);
  5612. }
  5613. if ( safePos < nextReadPos ) safePos += dsBufferSize; // unwrap offset
  5614. DWORD endRead = nextReadPos + buffer_bytes;
  5615. // Check whether the entire write region is behind the play pointer.
  5616. while ( safePos < endRead ) {
  5617. // See comments for playback.
  5618. double millis = (endRead - safePos) * 900.0;
  5619. millis /= ( formatBytes(stream_.deviceFormat[1]) * stream_.sampleRate);
  5620. if ( millis < 1.0 ) millis = 1.0;
  5621. Sleep( (DWORD) millis );
  5622. // Wake up, find out where we are now
  5623. result = dsBuffer->GetCurrentPosition( &currentPos, &safePos );
  5624. if ( FAILED(result) ) {
  5625. sprintf(message_, "RtApiDs: Unable to get current capture position (%s): %s.",
  5626. devices_[stream_.device[1]].name.c_str(), getErrorString(result));
  5627. error(RtError::DRIVER_ERROR);
  5628. }
  5629. if ( safePos < nextReadPos ) safePos += dsBufferSize; // unwrap offset
  5630. }
  5631. // Lock free space in the buffer
  5632. result = dsBuffer->Lock (nextReadPos, buffer_bytes, &buffer1,
  5633. &bufferSize1, &buffer2, &bufferSize2, 0);
  5634. if ( FAILED(result) ) {
  5635. sprintf(message_, "RtApiDs: Unable to lock buffer during capture (%s): %s.",
  5636. devices_[stream_.device[1]].name.c_str(), getErrorString(result));
  5637. error(RtError::DRIVER_ERROR);
  5638. }
  5639. // Copy our buffer into the DS buffer
  5640. CopyMemory(buffer, buffer1, bufferSize1);
  5641. if (buffer2 != NULL) CopyMemory(buffer+bufferSize1, buffer2, bufferSize2);
  5642. // Update our buffer offset and unlock sound buffer
  5643. nextReadPos = (nextReadPos + bufferSize1 + bufferSize2) % dsBufferSize;
  5644. dsBuffer->Unlock (buffer1, bufferSize1, buffer2, bufferSize2);
  5645. if ( FAILED(result) ) {
  5646. sprintf(message_, "RtApiDs: Unable to unlock buffer during capture (%s): %s.",
  5647. devices_[stream_.device[1]].name.c_str(), getErrorString(result));
  5648. error(RtError::DRIVER_ERROR);
  5649. }
  5650. handles[1].bufferPointer = nextReadPos;
  5651. // No byte swapping necessary in DirectSound implementation.
  5652. // Do buffer conversion if necessary.
  5653. if (stream_.doConvertBuffer[1])
  5654. convertStreamBuffer(INPUT);
  5655. }
  5656. MUTEX_UNLOCK(&stream_.mutex);
  5657. if (stream_.callbackInfo.usingCallback && stopStream)
  5658. this->stopStream();
  5659. }
  5660. // Definitions for utility functions and callbacks
  5661. // specific to the DirectSound implementation.
  5662. extern "C" unsigned __stdcall callbackHandler(void *ptr)
  5663. {
  5664. CallbackInfo *info = (CallbackInfo *) ptr;
  5665. RtApiDs *object = (RtApiDs *) info->object;
  5666. bool *usingCallback = &info->usingCallback;
  5667. while ( *usingCallback ) {
  5668. try {
  5669. object->tickStream();
  5670. }
  5671. catch (RtError &exception) {
  5672. fprintf(stderr, "\nRtApiDs: callback thread error (%s) ... closing thread.\n\n",
  5673. exception.getMessageString());
  5674. break;
  5675. }
  5676. }
  5677. _endthreadex( 0 );
  5678. return 0;
  5679. }
  5680. static bool CALLBACK deviceCountCallback(LPGUID lpguid,
  5681. LPCSTR lpcstrDescription,
  5682. LPCSTR lpcstrModule,
  5683. LPVOID lpContext)
  5684. {
  5685. int *pointer = ((int *) lpContext);
  5686. (*pointer)++;
  5687. return true;
  5688. }
  5689. static bool CALLBACK deviceInfoCallback(LPGUID lpguid,
  5690. LPCSTR lpcstrDescription,
  5691. LPCSTR lpcstrModule,
  5692. LPVOID lpContext)
  5693. {
  5694. enum_info *info = ((enum_info *) lpContext);
  5695. while (strlen(info->name) > 0) info++;
  5696. strncpy(info->name, lpcstrDescription, 64);
  5697. info->id = lpguid;
  5698. HRESULT hr;
  5699. info->isValid = false;
  5700. if (info->isInput == true) {
  5701. DSCCAPS caps;
  5702. LPDIRECTSOUNDCAPTURE object;
  5703. hr = DirectSoundCaptureCreate( lpguid, &object, NULL );
  5704. if( hr != DS_OK ) return true;
  5705. caps.dwSize = sizeof(caps);
  5706. hr = object->GetCaps( &caps );
  5707. if( hr == DS_OK ) {
  5708. if (caps.dwChannels > 0 && caps.dwFormats > 0)
  5709. info->isValid = true;
  5710. }
  5711. object->Release();
  5712. }
  5713. else {
  5714. DSCAPS caps;
  5715. LPDIRECTSOUND object;
  5716. hr = DirectSoundCreate( lpguid, &object, NULL );
  5717. if( hr != DS_OK ) return true;
  5718. caps.dwSize = sizeof(caps);
  5719. hr = object->GetCaps( &caps );
  5720. if( hr == DS_OK ) {
  5721. if ( caps.dwFlags & DSCAPS_PRIMARYMONO || caps.dwFlags & DSCAPS_PRIMARYSTEREO )
  5722. info->isValid = true;
  5723. }
  5724. object->Release();
  5725. }
  5726. return true;
  5727. }
  5728. static bool CALLBACK defaultDeviceCallback(LPGUID lpguid,
  5729. LPCSTR lpcstrDescription,
  5730. LPCSTR lpcstrModule,
  5731. LPVOID lpContext)
  5732. {
  5733. enum_info *info = ((enum_info *) lpContext);
  5734. if ( lpguid == NULL ) {
  5735. strncpy(info->name, lpcstrDescription, 64);
  5736. return false;
  5737. }
  5738. return true;
  5739. }
  5740. static bool CALLBACK deviceIdCallback(LPGUID lpguid,
  5741. LPCSTR lpcstrDescription,
  5742. LPCSTR lpcstrModule,
  5743. LPVOID lpContext)
  5744. {
  5745. enum_info *info = ((enum_info *) lpContext);
  5746. if ( strncmp( info->name, lpcstrDescription, 64 ) == 0 ) {
  5747. info->id = lpguid;
  5748. info->isValid = true;
  5749. return false;
  5750. }
  5751. return true;
  5752. }
  5753. static char* getErrorString(int code)
  5754. {
  5755. switch (code) {
  5756. case DSERR_ALLOCATED:
  5757. return "Direct Sound already allocated";
  5758. case DSERR_CONTROLUNAVAIL:
  5759. return "Direct Sound control unavailable";
  5760. case DSERR_INVALIDPARAM:
  5761. return "Direct Sound invalid parameter";
  5762. case DSERR_INVALIDCALL:
  5763. return "Direct Sound invalid call";
  5764. case DSERR_GENERIC:
  5765. return "Direct Sound generic error";
  5766. case DSERR_PRIOLEVELNEEDED:
  5767. return "Direct Sound Priority level needed";
  5768. case DSERR_OUTOFMEMORY:
  5769. return "Direct Sound out of memory";
  5770. case DSERR_BADFORMAT:
  5771. return "Direct Sound bad format";
  5772. case DSERR_UNSUPPORTED:
  5773. return "Direct Sound unsupported error";
  5774. case DSERR_NODRIVER:
  5775. return "Direct Sound no driver error";
  5776. case DSERR_ALREADYINITIALIZED:
  5777. return "Direct Sound already initialized";
  5778. case DSERR_NOAGGREGATION:
  5779. return "Direct Sound no aggregation";
  5780. case DSERR_BUFFERLOST:
  5781. return "Direct Sound buffer lost";
  5782. case DSERR_OTHERAPPHASPRIO:
  5783. return "Direct Sound other app has priority";
  5784. case DSERR_UNINITIALIZED:
  5785. return "Direct Sound uninitialized";
  5786. default:
  5787. return "Direct Sound unknown error";
  5788. }
  5789. }
  5790. //******************** End of __WINDOWS_DS__ *********************//
  5791. #endif
  5792. #if defined(__IRIX_AL__) // SGI's AL API for IRIX
  5793. #include <dmedia/audio.h>
  5794. #include <unistd.h>
  5795. #include <errno.h>
  5796. extern "C" void *callbackHandler(void * ptr);
  5797. RtApiAl :: RtApiAl()
  5798. {
  5799. this->initialize();
  5800. if (nDevices_ <= 0) {
  5801. sprintf(message_, "RtApiAl: no Irix AL audio devices found!");
  5802. error(RtError::NO_DEVICES_FOUND);
  5803. }
  5804. }
  5805. RtApiAl :: ~RtApiAl()
  5806. {
  5807. // The subclass destructor gets called before the base class
  5808. // destructor, so close any existing streams before deallocating
  5809. // apiDeviceId memory.
  5810. if ( stream_.mode != UNINITIALIZED ) closeStream();
  5811. // Free our allocated apiDeviceId memory.
  5812. long *id;
  5813. for ( unsigned int i=0; i<devices_.size(); i++ ) {
  5814. id = (long *) devices_[i].apiDeviceId;
  5815. if (id) free(id);
  5816. }
  5817. }
  5818. void RtApiAl :: initialize(void)
  5819. {
  5820. // Count cards and devices
  5821. nDevices_ = 0;
  5822. // Determine the total number of input and output devices.
  5823. nDevices_ = alQueryValues(AL_SYSTEM, AL_DEVICES, 0, 0, 0, 0);
  5824. if (nDevices_ < 0) {
  5825. sprintf(message_, "RtApiAl: error counting devices: %s.",
  5826. alGetErrorString(oserror()));
  5827. error(RtError::DRIVER_ERROR);
  5828. }
  5829. if (nDevices_ <= 0) return;
  5830. ALvalue *vls = (ALvalue *) new ALvalue[nDevices_];
  5831. // Create our list of devices and write their ascii identifiers and resource ids.
  5832. char name[64];
  5833. int outs, ins, i;
  5834. ALpv pvs[1];
  5835. pvs[0].param = AL_NAME;
  5836. pvs[0].value.ptr = name;
  5837. pvs[0].sizeIn = 64;
  5838. RtApiDevice device;
  5839. long *id;
  5840. outs = alQueryValues(AL_SYSTEM, AL_DEFAULT_OUTPUT, vls, nDevices_, 0, 0);
  5841. if (outs < 0) {
  5842. delete [] vls;
  5843. sprintf(message_, "RtApiAl: error getting output devices: %s.",
  5844. alGetErrorString(oserror()));
  5845. error(RtError::DRIVER_ERROR);
  5846. }
  5847. for (i=0; i<outs; i++) {
  5848. if (alGetParams(vls[i].i, pvs, 1) < 0) {
  5849. delete [] vls;
  5850. sprintf(message_, "RtApiAl: error querying output devices: %s.",
  5851. alGetErrorString(oserror()));
  5852. error(RtError::DRIVER_ERROR);
  5853. }
  5854. device.name.erase();
  5855. device.name.append( (const char *)name, strlen(name)+1);
  5856. devices_.push_back(device);
  5857. id = (long *) calloc(2, sizeof(long));
  5858. id[0] = vls[i].i;
  5859. devices_[i].apiDeviceId = (void *) id;
  5860. }
  5861. ins = alQueryValues(AL_SYSTEM, AL_DEFAULT_INPUT, &vls[outs], nDevices_-outs, 0, 0);
  5862. if (ins < 0) {
  5863. delete [] vls;
  5864. sprintf(message_, "RtApiAl: error getting input devices: %s.",
  5865. alGetErrorString(oserror()));
  5866. error(RtError::DRIVER_ERROR);
  5867. }
  5868. for (i=outs; i<ins+outs; i++) {
  5869. if (alGetParams(vls[i].i, pvs, 1) < 0) {
  5870. delete [] vls;
  5871. sprintf(message_, "RtApiAl: error querying input devices: %s.",
  5872. alGetErrorString(oserror()));
  5873. error(RtError::DRIVER_ERROR);
  5874. }
  5875. device.name.erase();
  5876. device.name.append( (const char *)name, strlen(name)+1);
  5877. devices_.push_back(device);
  5878. id = (long *) calloc(2, sizeof(long));
  5879. id[1] = vls[i].i;
  5880. devices_[i].apiDeviceId = (void *) id;
  5881. }
  5882. delete [] vls;
  5883. }
  5884. int RtApiAl :: getDefaultInputDevice(void)
  5885. {
  5886. ALvalue value;
  5887. long *id;
  5888. int result = alQueryValues(AL_SYSTEM, AL_DEFAULT_INPUT, &value, 1, 0, 0);
  5889. if (result < 0) {
  5890. sprintf(message_, "RtApiAl: error getting default input device id: %s.",
  5891. alGetErrorString(oserror()));
  5892. error(RtError::WARNING);
  5893. }
  5894. else {
  5895. for ( unsigned int i=0; i<devices_.size(); i++ ) {
  5896. id = (long *) devices_[i].apiDeviceId;
  5897. if ( id[1] == value.i ) return i;
  5898. }
  5899. }
  5900. return 0;
  5901. }
  5902. int RtApiAl :: getDefaultOutputDevice(void)
  5903. {
  5904. ALvalue value;
  5905. long *id;
  5906. int result = alQueryValues(AL_SYSTEM, AL_DEFAULT_OUTPUT, &value, 1, 0, 0);
  5907. if (result < 0) {
  5908. sprintf(message_, "RtApiAl: error getting default output device id: %s.",
  5909. alGetErrorString(oserror()));
  5910. error(RtError::WARNING);
  5911. }
  5912. else {
  5913. for ( unsigned int i=0; i<devices_.size(); i++ ) {
  5914. id = (long *) devices_[i].apiDeviceId;
  5915. if ( id[0] == value.i ) return i;
  5916. }
  5917. }
  5918. return 0;
  5919. }
  5920. void RtApiAl :: probeDeviceInfo(RtApiDevice *info)
  5921. {
  5922. int result;
  5923. long resource;
  5924. ALvalue value;
  5925. ALparamInfo pinfo;
  5926. // Get output resource ID if it exists.
  5927. long *id = (long *) info->apiDeviceId;
  5928. resource = id[0];
  5929. if (resource > 0) {
  5930. // Probe output device parameters.
  5931. result = alQueryValues(resource, AL_CHANNELS, &value, 1, 0, 0);
  5932. if (result < 0) {
  5933. sprintf(message_, "RtApiAl: error getting device (%s) channels: %s.",
  5934. info->name.c_str(), alGetErrorString(oserror()));
  5935. error(RtError::WARNING);
  5936. }
  5937. else {
  5938. info->maxOutputChannels = value.i;
  5939. info->minOutputChannels = 1;
  5940. }
  5941. result = alGetParamInfo(resource, AL_RATE, &pinfo);
  5942. if (result < 0) {
  5943. sprintf(message_, "RtApiAl: error getting device (%s) rates: %s.",
  5944. info->name.c_str(), alGetErrorString(oserror()));
  5945. error(RtError::WARNING);
  5946. }
  5947. else {
  5948. info->sampleRates.clear();
  5949. for (unsigned int k=0; k<MAX_SAMPLE_RATES; k++) {
  5950. if ( SAMPLE_RATES[k] >= pinfo.min.i && SAMPLE_RATES[k] <= pinfo.max.i )
  5951. info->sampleRates.push_back( SAMPLE_RATES[k] );
  5952. }
  5953. }
  5954. // The AL library supports all our formats, except 24-bit and 32-bit ints.
  5955. info->nativeFormats = (RtAudioFormat) 51;
  5956. }
  5957. // Now get input resource ID if it exists.
  5958. resource = id[1];
  5959. if (resource > 0) {
  5960. // Probe input device parameters.
  5961. result = alQueryValues(resource, AL_CHANNELS, &value, 1, 0, 0);
  5962. if (result < 0) {
  5963. sprintf(message_, "RtApiAl: error getting device (%s) channels: %s.",
  5964. info->name.c_str(), alGetErrorString(oserror()));
  5965. error(RtError::WARNING);
  5966. }
  5967. else {
  5968. info->maxInputChannels = value.i;
  5969. info->minInputChannels = 1;
  5970. }
  5971. result = alGetParamInfo(resource, AL_RATE, &pinfo);
  5972. if (result < 0) {
  5973. sprintf(message_, "RtApiAl: error getting device (%s) rates: %s.",
  5974. info->name.c_str(), alGetErrorString(oserror()));
  5975. error(RtError::WARNING);
  5976. }
  5977. else {
  5978. // In the case of the default device, these values will
  5979. // overwrite the rates determined for the output device. Since
  5980. // the input device is most likely to be more limited than the
  5981. // output device, this is ok.
  5982. info->sampleRates.clear();
  5983. for (unsigned int k=0; k<MAX_SAMPLE_RATES; k++) {
  5984. if ( SAMPLE_RATES[k] >= pinfo.min.i && SAMPLE_RATES[k] <= pinfo.max.i )
  5985. info->sampleRates.push_back( SAMPLE_RATES[k] );
  5986. }
  5987. }
  5988. // The AL library supports all our formats, except 24-bit and 32-bit ints.
  5989. info->nativeFormats = (RtAudioFormat) 51;
  5990. }
  5991. if ( info->maxInputChannels == 0 && info->maxOutputChannels == 0 )
  5992. return;
  5993. if ( info->sampleRates.size() == 0 )
  5994. return;
  5995. // Determine duplex status.
  5996. if (info->maxInputChannels < info->maxOutputChannels)
  5997. info->maxDuplexChannels = info->maxInputChannels;
  5998. else
  5999. info->maxDuplexChannels = info->maxOutputChannels;
  6000. if (info->minInputChannels < info->minOutputChannels)
  6001. info->minDuplexChannels = info->minInputChannels;
  6002. else
  6003. info->minDuplexChannels = info->minOutputChannels;
  6004. if ( info->maxDuplexChannels > 0 ) info->hasDuplexSupport = true;
  6005. else info->hasDuplexSupport = false;
  6006. info->probed = true;
  6007. return;
  6008. }
  6009. bool RtApiAl :: probeDeviceOpen(int device, StreamMode mode, int channels,
  6010. int sampleRate, RtAudioFormat format,
  6011. int *bufferSize, int numberOfBuffers)
  6012. {
  6013. int result, nBuffers;
  6014. long resource;
  6015. ALconfig al_config;
  6016. ALport port;
  6017. ALpv pvs[2];
  6018. long *id = (long *) devices_[device].apiDeviceId;
  6019. // Get a new ALconfig structure.
  6020. al_config = alNewConfig();
  6021. if ( !al_config ) {
  6022. sprintf(message_,"RtApiAl: can't get AL config: %s.",
  6023. alGetErrorString(oserror()));
  6024. error(RtError::WARNING);
  6025. return FAILURE;
  6026. }
  6027. // Set the channels.
  6028. result = alSetChannels(al_config, channels);
  6029. if ( result < 0 ) {
  6030. alFreeConfig(al_config);
  6031. sprintf(message_,"RtApiAl: can't set %d channels in AL config: %s.",
  6032. channels, alGetErrorString(oserror()));
  6033. error(RtError::WARNING);
  6034. return FAILURE;
  6035. }
  6036. // Attempt to set the queue size. The al API doesn't provide a
  6037. // means for querying the minimum/maximum buffer size of a device,
  6038. // so if the specified size doesn't work, take whatever the
  6039. // al_config structure returns.
  6040. if ( numberOfBuffers < 1 )
  6041. nBuffers = 1;
  6042. else
  6043. nBuffers = numberOfBuffers;
  6044. long buffer_size = *bufferSize * nBuffers;
  6045. result = alSetQueueSize(al_config, buffer_size); // in sample frames
  6046. if ( result < 0 ) {
  6047. // Get the buffer size specified by the al_config and try that.
  6048. buffer_size = alGetQueueSize(al_config);
  6049. result = alSetQueueSize(al_config, buffer_size);
  6050. if ( result < 0 ) {
  6051. alFreeConfig(al_config);
  6052. sprintf(message_,"RtApiAl: can't set buffer size (%ld) in AL config: %s.",
  6053. buffer_size, alGetErrorString(oserror()));
  6054. error(RtError::WARNING);
  6055. return FAILURE;
  6056. }
  6057. *bufferSize = buffer_size / nBuffers;
  6058. }
  6059. // Set the data format.
  6060. stream_.userFormat = format;
  6061. stream_.deviceFormat[mode] = format;
  6062. if (format == RTAUDIO_SINT8) {
  6063. result = alSetSampFmt(al_config, AL_SAMPFMT_TWOSCOMP);
  6064. result = alSetWidth(al_config, AL_SAMPLE_8);
  6065. }
  6066. else if (format == RTAUDIO_SINT16) {
  6067. result = alSetSampFmt(al_config, AL_SAMPFMT_TWOSCOMP);
  6068. result = alSetWidth(al_config, AL_SAMPLE_16);
  6069. }
  6070. else if (format == RTAUDIO_SINT24) {
  6071. // Our 24-bit format assumes the upper 3 bytes of a 4 byte word.
  6072. // The AL library uses the lower 3 bytes, so we'll need to do our
  6073. // own conversion.
  6074. result = alSetSampFmt(al_config, AL_SAMPFMT_FLOAT);
  6075. stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
  6076. }
  6077. else if (format == RTAUDIO_SINT32) {
  6078. // The AL library doesn't seem to support the 32-bit integer
  6079. // format, so we'll need to do our own conversion.
  6080. result = alSetSampFmt(al_config, AL_SAMPFMT_FLOAT);
  6081. stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
  6082. }
  6083. else if (format == RTAUDIO_FLOAT32)
  6084. result = alSetSampFmt(al_config, AL_SAMPFMT_FLOAT);
  6085. else if (format == RTAUDIO_FLOAT64)
  6086. result = alSetSampFmt(al_config, AL_SAMPFMT_DOUBLE);
  6087. if ( result == -1 ) {
  6088. alFreeConfig(al_config);
  6089. sprintf(message_,"RtApiAl: error setting sample format in AL config: %s.",
  6090. alGetErrorString(oserror()));
  6091. error(RtError::WARNING);
  6092. return FAILURE;
  6093. }
  6094. if (mode == OUTPUT) {
  6095. // Set our device.
  6096. if (device == 0)
  6097. resource = AL_DEFAULT_OUTPUT;
  6098. else
  6099. resource = id[0];
  6100. result = alSetDevice(al_config, resource);
  6101. if ( result == -1 ) {
  6102. alFreeConfig(al_config);
  6103. sprintf(message_,"RtApiAl: error setting device (%s) in AL config: %s.",
  6104. devices_[device].name.c_str(), alGetErrorString(oserror()));
  6105. error(RtError::WARNING);
  6106. return FAILURE;
  6107. }
  6108. // Open the port.
  6109. port = alOpenPort("RtApiAl Output Port", "w", al_config);
  6110. if( !port ) {
  6111. alFreeConfig(al_config);
  6112. sprintf(message_,"RtApiAl: error opening output port: %s.",
  6113. alGetErrorString(oserror()));
  6114. error(RtError::WARNING);
  6115. return FAILURE;
  6116. }
  6117. // Set the sample rate
  6118. pvs[0].param = AL_MASTER_CLOCK;
  6119. pvs[0].value.i = AL_CRYSTAL_MCLK_TYPE;
  6120. pvs[1].param = AL_RATE;
  6121. pvs[1].value.ll = alDoubleToFixed((double)sampleRate);
  6122. result = alSetParams(resource, pvs, 2);
  6123. if ( result < 0 ) {
  6124. alClosePort(port);
  6125. alFreeConfig(al_config);
  6126. sprintf(message_,"RtApiAl: error setting sample rate (%d) for device (%s): %s.",
  6127. sampleRate, devices_[device].name.c_str(), alGetErrorString(oserror()));
  6128. error(RtError::WARNING);
  6129. return FAILURE;
  6130. }
  6131. }
  6132. else { // mode == INPUT
  6133. // Set our device.
  6134. if (device == 0)
  6135. resource = AL_DEFAULT_INPUT;
  6136. else
  6137. resource = id[1];
  6138. result = alSetDevice(al_config, resource);
  6139. if ( result == -1 ) {
  6140. alFreeConfig(al_config);
  6141. sprintf(message_,"RtApiAl: error setting device (%s) in AL config: %s.",
  6142. devices_[device].name.c_str(), alGetErrorString(oserror()));
  6143. error(RtError::WARNING);
  6144. return FAILURE;
  6145. }
  6146. // Open the port.
  6147. port = alOpenPort("RtApiAl Input Port", "r", al_config);
  6148. if( !port ) {
  6149. alFreeConfig(al_config);
  6150. sprintf(message_,"RtApiAl: error opening input port: %s.",
  6151. alGetErrorString(oserror()));
  6152. error(RtError::WARNING);
  6153. return FAILURE;
  6154. }
  6155. // Set the sample rate
  6156. pvs[0].param = AL_MASTER_CLOCK;
  6157. pvs[0].value.i = AL_CRYSTAL_MCLK_TYPE;
  6158. pvs[1].param = AL_RATE;
  6159. pvs[1].value.ll = alDoubleToFixed((double)sampleRate);
  6160. result = alSetParams(resource, pvs, 2);
  6161. if ( result < 0 ) {
  6162. alClosePort(port);
  6163. alFreeConfig(al_config);
  6164. sprintf(message_,"RtApiAl: error setting sample rate (%d) for device (%s): %s.",
  6165. sampleRate, devices_[device].name.c_str(), alGetErrorString(oserror()));
  6166. error(RtError::WARNING);
  6167. return FAILURE;
  6168. }
  6169. }
  6170. alFreeConfig(al_config);
  6171. stream_.nUserChannels[mode] = channels;
  6172. stream_.nDeviceChannels[mode] = channels;
  6173. // Save stream handle.
  6174. ALport *handle = (ALport *) stream_.apiHandle;
  6175. if ( handle == 0 ) {
  6176. handle = (ALport *) calloc(2, sizeof(ALport));
  6177. if ( handle == NULL ) {
  6178. sprintf(message_, "RtApiAl: Irix Al error allocating handle memory (%s).",
  6179. devices_[device].name.c_str());
  6180. goto error;
  6181. }
  6182. stream_.apiHandle = (void *) handle;
  6183. handle[0] = 0;
  6184. handle[1] = 0;
  6185. }
  6186. handle[mode] = port;
  6187. // Set flags for buffer conversion
  6188. stream_.doConvertBuffer[mode] = false;
  6189. if (stream_.userFormat != stream_.deviceFormat[mode])
  6190. stream_.doConvertBuffer[mode] = true;
  6191. // Allocate necessary internal buffers
  6192. if ( stream_.nUserChannels[0] != stream_.nUserChannels[1] ) {
  6193. long buffer_bytes;
  6194. if (stream_.nUserChannels[0] >= stream_.nUserChannels[1])
  6195. buffer_bytes = stream_.nUserChannels[0];
  6196. else
  6197. buffer_bytes = stream_.nUserChannels[1];
  6198. buffer_bytes *= *bufferSize * formatBytes(stream_.userFormat);
  6199. if (stream_.userBuffer) free(stream_.userBuffer);
  6200. stream_.userBuffer = (char *) calloc(buffer_bytes, 1);
  6201. if (stream_.userBuffer == NULL) {
  6202. sprintf(message_, "RtApiAl: error allocating user buffer memory (%s).",
  6203. devices_[device].name.c_str());
  6204. goto error;
  6205. }
  6206. }
  6207. if ( stream_.doConvertBuffer[mode] ) {
  6208. long buffer_bytes;
  6209. bool makeBuffer = true;
  6210. if ( mode == OUTPUT )
  6211. buffer_bytes = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
  6212. else { // mode == INPUT
  6213. buffer_bytes = stream_.nDeviceChannels[1] * formatBytes(stream_.deviceFormat[1]);
  6214. if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
  6215. long bytes_out = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
  6216. if ( buffer_bytes < bytes_out ) makeBuffer = false;
  6217. }
  6218. }
  6219. if ( makeBuffer ) {
  6220. buffer_bytes *= *bufferSize;
  6221. if (stream_.deviceBuffer) free(stream_.deviceBuffer);
  6222. stream_.deviceBuffer = (char *) calloc(buffer_bytes, 1);
  6223. if (stream_.deviceBuffer == NULL) {
  6224. sprintf(message_, "RtApiAl: error allocating device buffer memory (%s).",
  6225. devices_[device].name.c_str());
  6226. goto error;
  6227. }
  6228. }
  6229. }
  6230. stream_.device[mode] = device;
  6231. stream_.state = STREAM_STOPPED;
  6232. if ( stream_.mode == OUTPUT && mode == INPUT )
  6233. // We had already set up an output stream.
  6234. stream_.mode = DUPLEX;
  6235. else
  6236. stream_.mode = mode;
  6237. stream_.nBuffers = nBuffers;
  6238. stream_.bufferSize = *bufferSize;
  6239. stream_.sampleRate = sampleRate;
  6240. return SUCCESS;
  6241. error:
  6242. if (handle) {
  6243. if (handle[0])
  6244. alClosePort(handle[0]);
  6245. if (handle[1])
  6246. alClosePort(handle[1]);
  6247. free(handle);
  6248. stream_.apiHandle = 0;
  6249. }
  6250. if (stream_.userBuffer) {
  6251. free(stream_.userBuffer);
  6252. stream_.userBuffer = 0;
  6253. }
  6254. error(RtError::WARNING);
  6255. return FAILURE;
  6256. }
  6257. void RtApiAl :: closeStream()
  6258. {
  6259. // We don't want an exception to be thrown here because this
  6260. // function is called by our class destructor. So, do our own
  6261. // streamId check.
  6262. if ( stream_.mode == UNINITIALIZED ) {
  6263. sprintf(message_, "RtApiAl::closeStream(): no open stream to close!");
  6264. error(RtError::WARNING);
  6265. return;
  6266. }
  6267. ALport *handle = (ALport *) stream_.apiHandle;
  6268. if (stream_.state == STREAM_RUNNING) {
  6269. int buffer_size = stream_.bufferSize * stream_.nBuffers;
  6270. if (stream_.mode == OUTPUT || stream_.mode == DUPLEX)
  6271. alDiscardFrames(handle[0], buffer_size);
  6272. if (stream_.mode == INPUT || stream_.mode == DUPLEX)
  6273. alDiscardFrames(handle[1], buffer_size);
  6274. stream_.state = STREAM_STOPPED;
  6275. }
  6276. if (stream_.callbackInfo.usingCallback) {
  6277. stream_.callbackInfo.usingCallback = false;
  6278. pthread_join(stream_.callbackInfo.thread, NULL);
  6279. }
  6280. if (handle) {
  6281. if (handle[0]) alClosePort(handle[0]);
  6282. if (handle[1]) alClosePort(handle[1]);
  6283. free(handle);
  6284. stream_.apiHandle = 0;
  6285. }
  6286. if (stream_.userBuffer) {
  6287. free(stream_.userBuffer);
  6288. stream_.userBuffer = 0;
  6289. }
  6290. if (stream_.deviceBuffer) {
  6291. free(stream_.deviceBuffer);
  6292. stream_.deviceBuffer = 0;
  6293. }
  6294. stream_.mode = UNINITIALIZED;
  6295. }
  6296. void RtApiAl :: startStream()
  6297. {
  6298. verifyStream();
  6299. if (stream_.state == STREAM_RUNNING) return;
  6300. MUTEX_LOCK(&stream_.mutex);
  6301. // The AL port is ready as soon as it is opened.
  6302. stream_.state = STREAM_RUNNING;
  6303. MUTEX_UNLOCK(&stream_.mutex);
  6304. }
  6305. void RtApiAl :: stopStream()
  6306. {
  6307. verifyStream();
  6308. if (stream_.state == STREAM_STOPPED) return;
  6309. // Change the state before the lock to improve shutdown response
  6310. // when using a callback.
  6311. stream_.state = STREAM_STOPPED;
  6312. MUTEX_LOCK(&stream_.mutex);
  6313. int result, buffer_size = stream_.bufferSize * stream_.nBuffers;
  6314. ALport *handle = (ALport *) stream_.apiHandle;
  6315. if (stream_.mode == OUTPUT || stream_.mode == DUPLEX)
  6316. alZeroFrames(handle[0], buffer_size);
  6317. if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
  6318. result = alDiscardFrames(handle[1], buffer_size);
  6319. if (result == -1) {
  6320. sprintf(message_, "RtApiAl: error draining stream device (%s): %s.",
  6321. devices_[stream_.device[1]].name.c_str(), alGetErrorString(oserror()));
  6322. error(RtError::DRIVER_ERROR);
  6323. }
  6324. }
  6325. MUTEX_UNLOCK(&stream_.mutex);
  6326. }
  6327. void RtApiAl :: abortStream()
  6328. {
  6329. verifyStream();
  6330. if (stream_.state == STREAM_STOPPED) return;
  6331. // Change the state before the lock to improve shutdown response
  6332. // when using a callback.
  6333. stream_.state = STREAM_STOPPED;
  6334. MUTEX_LOCK(&stream_.mutex);
  6335. ALport *handle = (ALport *) stream_.apiHandle;
  6336. if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
  6337. int buffer_size = stream_.bufferSize * stream_.nBuffers;
  6338. int result = alDiscardFrames(handle[0], buffer_size);
  6339. if (result == -1) {
  6340. sprintf(message_, "RtApiAl: error aborting stream device (%s): %s.",
  6341. devices_[stream_.device[0]].name.c_str(), alGetErrorString(oserror()));
  6342. error(RtError::DRIVER_ERROR);
  6343. }
  6344. }
  6345. // There is no clear action to take on the input stream, since the
  6346. // port will continue to run in any event.
  6347. MUTEX_UNLOCK(&stream_.mutex);
  6348. }
  6349. int RtApiAl :: streamWillBlock()
  6350. {
  6351. verifyStream();
  6352. if (stream_.state == STREAM_STOPPED) return 0;
  6353. MUTEX_LOCK(&stream_.mutex);
  6354. int frames = 0;
  6355. int err = 0;
  6356. ALport *handle = (ALport *) stream_.apiHandle;
  6357. if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
  6358. err = alGetFillable(handle[0]);
  6359. if (err < 0) {
  6360. sprintf(message_, "RtApiAl: error getting available frames for stream (%s): %s.",
  6361. devices_[stream_.device[0]].name.c_str(), alGetErrorString(oserror()));
  6362. error(RtError::DRIVER_ERROR);
  6363. }
  6364. }
  6365. frames = err;
  6366. if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
  6367. err = alGetFilled(handle[1]);
  6368. if (err < 0) {
  6369. sprintf(message_, "RtApiAl: error getting available frames for stream (%s): %s.",
  6370. devices_[stream_.device[1]].name.c_str(), alGetErrorString(oserror()));
  6371. error(RtError::DRIVER_ERROR);
  6372. }
  6373. if (frames > err) frames = err;
  6374. }
  6375. frames = stream_.bufferSize - frames;
  6376. if (frames < 0) frames = 0;
  6377. MUTEX_UNLOCK(&stream_.mutex);
  6378. return frames;
  6379. }
  6380. void RtApiAl :: tickStream()
  6381. {
  6382. verifyStream();
  6383. int stopStream = 0;
  6384. if (stream_.state == STREAM_STOPPED) {
  6385. if (stream_.callbackInfo.usingCallback) usleep(50000); // sleep 50 milliseconds
  6386. return;
  6387. }
  6388. else if (stream_.callbackInfo.usingCallback) {
  6389. RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;
  6390. stopStream = callback(stream_.userBuffer, stream_.bufferSize, stream_.callbackInfo.userData);
  6391. }
  6392. MUTEX_LOCK(&stream_.mutex);
  6393. // The state might change while waiting on a mutex.
  6394. if (stream_.state == STREAM_STOPPED)
  6395. goto unlock;
  6396. char *buffer;
  6397. int channels;
  6398. RtAudioFormat format;
  6399. ALport *handle = (ALport *) stream_.apiHandle;
  6400. if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
  6401. // Setup parameters and do buffer conversion if necessary.
  6402. if (stream_.doConvertBuffer[0]) {
  6403. convertStreamBuffer(OUTPUT);
  6404. buffer = stream_.deviceBuffer;
  6405. channels = stream_.nDeviceChannels[0];
  6406. format = stream_.deviceFormat[0];
  6407. }
  6408. else {
  6409. buffer = stream_.userBuffer;
  6410. channels = stream_.nUserChannels[0];
  6411. format = stream_.userFormat;
  6412. }
  6413. // Do byte swapping if necessary.
  6414. if (stream_.doByteSwap[0])
  6415. byteSwapBuffer(buffer, stream_.bufferSize * channels, format);
  6416. // Write interleaved samples to device.
  6417. alWriteFrames(handle[0], buffer, stream_.bufferSize);
  6418. }
  6419. if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
  6420. // Setup parameters.
  6421. if (stream_.doConvertBuffer[1]) {
  6422. buffer = stream_.deviceBuffer;
  6423. channels = stream_.nDeviceChannels[1];
  6424. format = stream_.deviceFormat[1];
  6425. }
  6426. else {
  6427. buffer = stream_.userBuffer;
  6428. channels = stream_.nUserChannels[1];
  6429. format = stream_.userFormat;
  6430. }
  6431. // Read interleaved samples from device.
  6432. alReadFrames(handle[1], buffer, stream_.bufferSize);
  6433. // Do byte swapping if necessary.
  6434. if (stream_.doByteSwap[1])
  6435. byteSwapBuffer(buffer, stream_.bufferSize * channels, format);
  6436. // Do buffer conversion if necessary.
  6437. if (stream_.doConvertBuffer[1])
  6438. convertStreamBuffer(INPUT);
  6439. }
  6440. unlock:
  6441. MUTEX_UNLOCK(&stream_.mutex);
  6442. if (stream_.callbackInfo.usingCallback && stopStream)
  6443. this->stopStream();
  6444. }
  6445. void RtApiAl :: setStreamCallback(RtAudioCallback callback, void *userData)
  6446. {
  6447. verifyStream();
  6448. CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
  6449. if ( info->usingCallback ) {
  6450. sprintf(message_, "RtApiAl: A callback is already set for this stream!");
  6451. error(RtError::WARNING);
  6452. return;
  6453. }
  6454. info->callback = (void *) callback;
  6455. info->userData = userData;
  6456. info->usingCallback = true;
  6457. info->object = (void *) this;
  6458. // Set the thread attributes for joinable and realtime scheduling
  6459. // priority. The higher priority will only take affect if the
  6460. // program is run as root or suid.
  6461. pthread_attr_t attr;
  6462. pthread_attr_init(&attr);
  6463. pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
  6464. pthread_attr_setschedpolicy(&attr, SCHED_RR);
  6465. int err = pthread_create(&info->thread, &attr, callbackHandler, &stream_.callbackInfo);
  6466. pthread_attr_destroy(&attr);
  6467. if (err) {
  6468. info->usingCallback = false;
  6469. sprintf(message_, "RtApiAl: error starting callback thread!");
  6470. error(RtError::THREAD_ERROR);
  6471. }
  6472. }
  6473. void RtApiAl :: cancelStreamCallback()
  6474. {
  6475. verifyStream();
  6476. if (stream_.callbackInfo.usingCallback) {
  6477. if (stream_.state == STREAM_RUNNING)
  6478. stopStream();
  6479. MUTEX_LOCK(&stream_.mutex);
  6480. stream_.callbackInfo.usingCallback = false;
  6481. pthread_join(stream_.callbackInfo.thread, NULL);
  6482. stream_.callbackInfo.thread = 0;
  6483. stream_.callbackInfo.callback = NULL;
  6484. stream_.callbackInfo.userData = NULL;
  6485. MUTEX_UNLOCK(&stream_.mutex);
  6486. }
  6487. }
  6488. extern "C" void *callbackHandler(void *ptr)
  6489. {
  6490. CallbackInfo *info = (CallbackInfo *) ptr;
  6491. RtApiAl *object = (RtApiAl *) info->object;
  6492. bool *usingCallback = &info->usingCallback;
  6493. while ( *usingCallback ) {
  6494. try {
  6495. object->tickStream();
  6496. }
  6497. catch (RtError &exception) {
  6498. fprintf(stderr, "\nRtApiAl: callback thread error (%s) ... closing thread.\n\n",
  6499. exception.getMessageString());
  6500. break;
  6501. }
  6502. }
  6503. return 0;
  6504. }
  6505. //******************** End of __IRIX_AL__ *********************//
  6506. #endif
  6507. // *************************************************** //
  6508. //
  6509. // Protected common (OS-independent) RtAudio methods.
  6510. //
  6511. // *************************************************** //
  6512. // This method can be modified to control the behavior of error
  6513. // message reporting and throwing.
  6514. void RtApi :: error(RtError::Type type)
  6515. {
  6516. if (type == RtError::WARNING) {
  6517. fprintf(stderr, "\n%s\n\n", message_);
  6518. }
  6519. else if (type == RtError::DEBUG_WARNING) {
  6520. #if defined(__RTAUDIO_DEBUG__)
  6521. fprintf(stderr, "\n%s\n\n", message_);
  6522. #endif
  6523. }
  6524. else {
  6525. #if defined(__RTAUDIO_DEBUG__)
  6526. fprintf(stderr, "\n%s\n\n", message_);
  6527. #endif
  6528. throw RtError(std::string(message_), type);
  6529. }
  6530. }
  6531. void RtApi :: verifyStream()
  6532. {
  6533. if ( stream_.mode == UNINITIALIZED ) {
  6534. sprintf(message_, "RtAudio: a stream was not previously opened!");
  6535. error(RtError::INVALID_STREAM);
  6536. }
  6537. }
  6538. void RtApi :: clearDeviceInfo(RtApiDevice *info)
  6539. {
  6540. // Don't clear the name or DEVICE_ID fields here ... they are
  6541. // typically set prior to a call of this function.
  6542. info->probed = false;
  6543. info->maxOutputChannels = 0;
  6544. info->maxInputChannels = 0;
  6545. info->maxDuplexChannels = 0;
  6546. info->minOutputChannels = 0;
  6547. info->minInputChannels = 0;
  6548. info->minDuplexChannels = 0;
  6549. info->hasDuplexSupport = false;
  6550. info->sampleRates.clear();
  6551. info->nativeFormats = 0;
  6552. }
  6553. void RtApi :: clearStreamInfo()
  6554. {
  6555. stream_.mode = UNINITIALIZED;
  6556. stream_.state = STREAM_STOPPED;
  6557. stream_.sampleRate = 0;
  6558. stream_.bufferSize = 0;
  6559. stream_.nBuffers = 0;
  6560. stream_.userFormat = 0;
  6561. for ( int i=0; i<2; i++ ) {
  6562. stream_.device[i] = 0;
  6563. stream_.doConvertBuffer[i] = false;
  6564. stream_.deInterleave[i] = false;
  6565. stream_.doByteSwap[i] = false;
  6566. stream_.nUserChannels[i] = 0;
  6567. stream_.nDeviceChannels[i] = 0;
  6568. stream_.deviceFormat[i] = 0;
  6569. }
  6570. }
  6571. int RtApi :: formatBytes(RtAudioFormat format)
  6572. {
  6573. if (format == RTAUDIO_SINT16)
  6574. return 2;
  6575. else if (format == RTAUDIO_SINT24 || format == RTAUDIO_SINT32 ||
  6576. format == RTAUDIO_FLOAT32)
  6577. return 4;
  6578. else if (format == RTAUDIO_FLOAT64)
  6579. return 8;
  6580. else if (format == RTAUDIO_SINT8)
  6581. return 1;
  6582. sprintf(message_,"RtApi: undefined format in formatBytes().");
  6583. error(RtError::WARNING);
  6584. return 0;
  6585. }
  6586. void RtApi :: convertStreamBuffer( StreamMode mode )
  6587. {
  6588. // This method does format conversion, input/output channel compensation, and
  6589. // data interleaving/deinterleaving. 24-bit integers are assumed to occupy
  6590. // the upper three bytes of a 32-bit integer.
  6591. int j, jump_in, jump_out, channels;
  6592. RtAudioFormat format_in, format_out;
  6593. char *input, *output;
  6594. if (mode == INPUT) { // convert device to user buffer
  6595. input = stream_.deviceBuffer;
  6596. output = stream_.userBuffer;
  6597. jump_in = stream_.nDeviceChannels[1];
  6598. jump_out = stream_.nUserChannels[1];
  6599. format_in = stream_.deviceFormat[1];
  6600. format_out = stream_.userFormat;
  6601. }
  6602. else { // convert user to device buffer
  6603. input = stream_.userBuffer;
  6604. output = stream_.deviceBuffer;
  6605. jump_in = stream_.nUserChannels[0];
  6606. jump_out = stream_.nDeviceChannels[0];
  6607. format_in = stream_.userFormat;
  6608. format_out = stream_.deviceFormat[0];
  6609. // clear our device buffer when in/out duplex device channels are different
  6610. if ( stream_.mode == DUPLEX &&
  6611. stream_.nDeviceChannels[0] != stream_.nDeviceChannels[1] )
  6612. memset(output, 0, stream_.bufferSize * jump_out * formatBytes(format_out));
  6613. }
  6614. channels = (jump_in < jump_out) ? jump_in : jump_out;
  6615. // Set up the interleave/deinterleave offsets
  6616. std::vector<int> offset_in(channels);
  6617. std::vector<int> offset_out(channels);
  6618. if (mode == INPUT && stream_.deInterleave[1]) {
  6619. for (int k=0; k<channels; k++) {
  6620. offset_in[k] = k * stream_.bufferSize;
  6621. offset_out[k] = k;
  6622. jump_in = 1;
  6623. }
  6624. }
  6625. else if (mode == OUTPUT && stream_.deInterleave[0]) {
  6626. for (int k=0; k<channels; k++) {
  6627. offset_in[k] = k;
  6628. offset_out[k] = k * stream_.bufferSize;
  6629. jump_out = 1;
  6630. }
  6631. }
  6632. else {
  6633. for (int k=0; k<channels; k++) {
  6634. offset_in[k] = k;
  6635. offset_out[k] = k;
  6636. }
  6637. }
  6638. if (format_out == RTAUDIO_FLOAT64) {
  6639. Float64 scale;
  6640. Float64 *out = (Float64 *)output;
  6641. if (format_in == RTAUDIO_SINT8) {
  6642. signed char *in = (signed char *)input;
  6643. scale = 1.0 / 128.0;
  6644. for (int i=0; i<stream_.bufferSize; i++) {
  6645. for (j=0; j<channels; j++) {
  6646. out[offset_out[j]] = (Float64) in[offset_in[j]];
  6647. out[offset_out[j]] *= scale;
  6648. }
  6649. in += jump_in;
  6650. out += jump_out;
  6651. }
  6652. }
  6653. else if (format_in == RTAUDIO_SINT16) {
  6654. Int16 *in = (Int16 *)input;
  6655. scale = 1.0 / 32768.0;
  6656. for (int i=0; i<stream_.bufferSize; i++) {
  6657. for (j=0; j<channels; j++) {
  6658. out[offset_out[j]] = (Float64) in[offset_in[j]];
  6659. out[offset_out[j]] *= scale;
  6660. }
  6661. in += jump_in;
  6662. out += jump_out;
  6663. }
  6664. }
  6665. else if (format_in == RTAUDIO_SINT24) {
  6666. Int32 *in = (Int32 *)input;
  6667. scale = 1.0 / 2147483648.0;
  6668. for (int i=0; i<stream_.bufferSize; i++) {
  6669. for (j=0; j<channels; j++) {
  6670. out[offset_out[j]] = (Float64) (in[offset_in[j]] & 0xffffff00);
  6671. out[offset_out[j]] *= scale;
  6672. }
  6673. in += jump_in;
  6674. out += jump_out;
  6675. }
  6676. }
  6677. else if (format_in == RTAUDIO_SINT32) {
  6678. Int32 *in = (Int32 *)input;
  6679. scale = 1.0 / 2147483648.0;
  6680. for (int i=0; i<stream_.bufferSize; i++) {
  6681. for (j=0; j<channels; j++) {
  6682. out[offset_out[j]] = (Float64) in[offset_in[j]];
  6683. out[offset_out[j]] *= scale;
  6684. }
  6685. in += jump_in;
  6686. out += jump_out;
  6687. }
  6688. }
  6689. else if (format_in == RTAUDIO_FLOAT32) {
  6690. Float32 *in = (Float32 *)input;
  6691. for (int i=0; i<stream_.bufferSize; i++) {
  6692. for (j=0; j<channels; j++) {
  6693. out[offset_out[j]] = (Float64) in[offset_in[j]];
  6694. }
  6695. in += jump_in;
  6696. out += jump_out;
  6697. }
  6698. }
  6699. else if (format_in == RTAUDIO_FLOAT64) {
  6700. // Channel compensation and/or (de)interleaving only.
  6701. Float64 *in = (Float64 *)input;
  6702. for (int i=0; i<stream_.bufferSize; i++) {
  6703. for (j=0; j<channels; j++) {
  6704. out[offset_out[j]] = in[offset_in[j]];
  6705. }
  6706. in += jump_in;
  6707. out += jump_out;
  6708. }
  6709. }
  6710. }
  6711. else if (format_out == RTAUDIO_FLOAT32) {
  6712. Float32 scale;
  6713. Float32 *out = (Float32 *)output;
  6714. if (format_in == RTAUDIO_SINT8) {
  6715. signed char *in = (signed char *)input;
  6716. scale = 1.0 / 128.0;
  6717. for (int i=0; i<stream_.bufferSize; i++) {
  6718. for (j=0; j<channels; j++) {
  6719. out[offset_out[j]] = (Float32) in[offset_in[j]];
  6720. out[offset_out[j]] *= scale;
  6721. }
  6722. in += jump_in;
  6723. out += jump_out;
  6724. }
  6725. }
  6726. else if (format_in == RTAUDIO_SINT16) {
  6727. Int16 *in = (Int16 *)input;
  6728. scale = 1.0 / 32768.0;
  6729. for (int i=0; i<stream_.bufferSize; i++) {
  6730. for (j=0; j<channels; j++) {
  6731. out[offset_out[j]] = (Float32) in[offset_in[j]];
  6732. out[offset_out[j]] *= scale;
  6733. }
  6734. in += jump_in;
  6735. out += jump_out;
  6736. }
  6737. }
  6738. else if (format_in == RTAUDIO_SINT24) {
  6739. Int32 *in = (Int32 *)input;
  6740. scale = 1.0 / 2147483648.0;
  6741. for (int i=0; i<stream_.bufferSize; i++) {
  6742. for (j=0; j<channels; j++) {
  6743. out[offset_out[j]] = (Float32) (in[offset_in[j]] & 0xffffff00);
  6744. out[offset_out[j]] *= scale;
  6745. }
  6746. in += jump_in;
  6747. out += jump_out;
  6748. }
  6749. }
  6750. else if (format_in == RTAUDIO_SINT32) {
  6751. Int32 *in = (Int32 *)input;
  6752. scale = 1.0 / 2147483648.0;
  6753. for (int i=0; i<stream_.bufferSize; i++) {
  6754. for (j=0; j<channels; j++) {
  6755. out[offset_out[j]] = (Float32) in[offset_in[j]];
  6756. out[offset_out[j]] *= scale;
  6757. }
  6758. in += jump_in;
  6759. out += jump_out;
  6760. }
  6761. }
  6762. else if (format_in == RTAUDIO_FLOAT32) {
  6763. // Channel compensation and/or (de)interleaving only.
  6764. Float32 *in = (Float32 *)input;
  6765. for (int i=0; i<stream_.bufferSize; i++) {
  6766. for (j=0; j<channels; j++) {
  6767. out[offset_out[j]] = in[offset_in[j]];
  6768. }
  6769. in += jump_in;
  6770. out += jump_out;
  6771. }
  6772. }
  6773. else if (format_in == RTAUDIO_FLOAT64) {
  6774. Float64 *in = (Float64 *)input;
  6775. for (int i=0; i<stream_.bufferSize; i++) {
  6776. for (j=0; j<channels; j++) {
  6777. out[offset_out[j]] = (Float32) in[offset_in[j]];
  6778. }
  6779. in += jump_in;
  6780. out += jump_out;
  6781. }
  6782. }
  6783. }
  6784. else if (format_out == RTAUDIO_SINT32) {
  6785. Int32 *out = (Int32 *)output;
  6786. if (format_in == RTAUDIO_SINT8) {
  6787. signed char *in = (signed char *)input;
  6788. for (int i=0; i<stream_.bufferSize; i++) {
  6789. for (j=0; j<channels; j++) {
  6790. out[offset_out[j]] = (Int32) in[offset_in[j]];
  6791. out[offset_out[j]] <<= 24;
  6792. }
  6793. in += jump_in;
  6794. out += jump_out;
  6795. }
  6796. }
  6797. else if (format_in == RTAUDIO_SINT16) {
  6798. Int16 *in = (Int16 *)input;
  6799. for (int i=0; i<stream_.bufferSize; i++) {
  6800. for (j=0; j<channels; j++) {
  6801. out[offset_out[j]] = (Int32) in[offset_in[j]];
  6802. out[offset_out[j]] <<= 16;
  6803. }
  6804. in += jump_in;
  6805. out += jump_out;
  6806. }
  6807. }
  6808. else if (format_in == RTAUDIO_SINT24) {
  6809. Int32 *in = (Int32 *)input;
  6810. for (int i=0; i<stream_.bufferSize; i++) {
  6811. for (j=0; j<channels; j++) {
  6812. out[offset_out[j]] = (Int32) in[offset_in[j]];
  6813. }
  6814. in += jump_in;
  6815. out += jump_out;
  6816. }
  6817. }
  6818. else if (format_in == RTAUDIO_SINT32) {
  6819. // Channel compensation and/or (de)interleaving only.
  6820. Int32 *in = (Int32 *)input;
  6821. for (int i=0; i<stream_.bufferSize; i++) {
  6822. for (j=0; j<channels; j++) {
  6823. out[offset_out[j]] = in[offset_in[j]];
  6824. }
  6825. in += jump_in;
  6826. out += jump_out;
  6827. }
  6828. }
  6829. else if (format_in == RTAUDIO_FLOAT32) {
  6830. Float32 *in = (Float32 *)input;
  6831. for (int i=0; i<stream_.bufferSize; i++) {
  6832. for (j=0; j<channels; j++) {
  6833. out[offset_out[j]] = (Int32) (in[offset_in[j]] * 2147483647.0);
  6834. }
  6835. in += jump_in;
  6836. out += jump_out;
  6837. }
  6838. }
  6839. else if (format_in == RTAUDIO_FLOAT64) {
  6840. Float64 *in = (Float64 *)input;
  6841. for (int i=0; i<stream_.bufferSize; i++) {
  6842. for (j=0; j<channels; j++) {
  6843. out[offset_out[j]] = (Int32) (in[offset_in[j]] * 2147483647.0);
  6844. }
  6845. in += jump_in;
  6846. out += jump_out;
  6847. }
  6848. }
  6849. }
  6850. else if (format_out == RTAUDIO_SINT24) {
  6851. Int32 *out = (Int32 *)output;
  6852. if (format_in == RTAUDIO_SINT8) {
  6853. signed char *in = (signed char *)input;
  6854. for (int i=0; i<stream_.bufferSize; i++) {
  6855. for (j=0; j<channels; j++) {
  6856. out[offset_out[j]] = (Int32) in[offset_in[j]];
  6857. out[offset_out[j]] <<= 24;
  6858. }
  6859. in += jump_in;
  6860. out += jump_out;
  6861. }
  6862. }
  6863. else if (format_in == RTAUDIO_SINT16) {
  6864. Int16 *in = (Int16 *)input;
  6865. for (int i=0; i<stream_.bufferSize; i++) {
  6866. for (j=0; j<channels; j++) {
  6867. out[offset_out[j]] = (Int32) in[offset_in[j]];
  6868. out[offset_out[j]] <<= 16;
  6869. }
  6870. in += jump_in;
  6871. out += jump_out;
  6872. }
  6873. }
  6874. else if (format_in == RTAUDIO_SINT24) {
  6875. // Channel compensation and/or (de)interleaving only.
  6876. Int32 *in = (Int32 *)input;
  6877. for (int i=0; i<stream_.bufferSize; i++) {
  6878. for (j=0; j<channels; j++) {
  6879. out[offset_out[j]] = in[offset_in[j]];
  6880. }
  6881. in += jump_in;
  6882. out += jump_out;
  6883. }
  6884. }
  6885. else if (format_in == RTAUDIO_SINT32) {
  6886. Int32 *in = (Int32 *)input;
  6887. for (int i=0; i<stream_.bufferSize; i++) {
  6888. for (j=0; j<channels; j++) {
  6889. out[offset_out[j]] = (Int32) (in[offset_in[j]] & 0xffffff00);
  6890. }
  6891. in += jump_in;
  6892. out += jump_out;
  6893. }
  6894. }
  6895. else if (format_in == RTAUDIO_FLOAT32) {
  6896. Float32 *in = (Float32 *)input;
  6897. for (int i=0; i<stream_.bufferSize; i++) {
  6898. for (j=0; j<channels; j++) {
  6899. out[offset_out[j]] = (Int32) (in[offset_in[j]] * 2147483647.0);
  6900. }
  6901. in += jump_in;
  6902. out += jump_out;
  6903. }
  6904. }
  6905. else if (format_in == RTAUDIO_FLOAT64) {
  6906. Float64 *in = (Float64 *)input;
  6907. for (int i=0; i<stream_.bufferSize; i++) {
  6908. for (j=0; j<channels; j++) {
  6909. out[offset_out[j]] = (Int32) (in[offset_in[j]] * 2147483647.0);
  6910. }
  6911. in += jump_in;
  6912. out += jump_out;
  6913. }
  6914. }
  6915. }
  6916. else if (format_out == RTAUDIO_SINT16) {
  6917. Int16 *out = (Int16 *)output;
  6918. if (format_in == RTAUDIO_SINT8) {
  6919. signed char *in = (signed char *)input;
  6920. for (int i=0; i<stream_.bufferSize; i++) {
  6921. for (j=0; j<channels; j++) {
  6922. out[offset_out[j]] = (Int16) in[offset_in[j]];
  6923. out[offset_out[j]] <<= 8;
  6924. }
  6925. in += jump_in;
  6926. out += jump_out;
  6927. }
  6928. }
  6929. else if (format_in == RTAUDIO_SINT16) {
  6930. // Channel compensation and/or (de)interleaving only.
  6931. Int16 *in = (Int16 *)input;
  6932. for (int i=0; i<stream_.bufferSize; i++) {
  6933. for (j=0; j<channels; j++) {
  6934. out[offset_out[j]] = in[offset_in[j]];
  6935. }
  6936. in += jump_in;
  6937. out += jump_out;
  6938. }
  6939. }
  6940. else if (format_in == RTAUDIO_SINT24) {
  6941. Int32 *in = (Int32 *)input;
  6942. for (int i=0; i<stream_.bufferSize; i++) {
  6943. for (j=0; j<channels; j++) {
  6944. out[offset_out[j]] = (Int16) ((in[offset_in[j]] >> 16) & 0x0000ffff);
  6945. }
  6946. in += jump_in;
  6947. out += jump_out;
  6948. }
  6949. }
  6950. else if (format_in == RTAUDIO_SINT32) {
  6951. Int32 *in = (Int32 *)input;
  6952. for (int i=0; i<stream_.bufferSize; i++) {
  6953. for (j=0; j<channels; j++) {
  6954. out[offset_out[j]] = (Int16) ((in[offset_in[j]] >> 16) & 0x0000ffff);
  6955. }
  6956. in += jump_in;
  6957. out += jump_out;
  6958. }
  6959. }
  6960. else if (format_in == RTAUDIO_FLOAT32) {
  6961. Float32 *in = (Float32 *)input;
  6962. for (int i=0; i<stream_.bufferSize; i++) {
  6963. for (j=0; j<channels; j++) {
  6964. out[offset_out[j]] = (Int16) (in[offset_in[j]] * 32767.0);
  6965. }
  6966. in += jump_in;
  6967. out += jump_out;
  6968. }
  6969. }
  6970. else if (format_in == RTAUDIO_FLOAT64) {
  6971. Float64 *in = (Float64 *)input;
  6972. for (int i=0; i<stream_.bufferSize; i++) {
  6973. for (j=0; j<channels; j++) {
  6974. out[offset_out[j]] = (Int16) (in[offset_in[j]] * 32767.0);
  6975. }
  6976. in += jump_in;
  6977. out += jump_out;
  6978. }
  6979. }
  6980. }
  6981. else if (format_out == RTAUDIO_SINT8) {
  6982. signed char *out = (signed char *)output;
  6983. if (format_in == RTAUDIO_SINT8) {
  6984. // Channel compensation and/or (de)interleaving only.
  6985. signed char *in = (signed char *)input;
  6986. for (int i=0; i<stream_.bufferSize; i++) {
  6987. for (j=0; j<channels; j++) {
  6988. out[offset_out[j]] = in[offset_in[j]];
  6989. }
  6990. in += jump_in;
  6991. out += jump_out;
  6992. }
  6993. }
  6994. if (format_in == RTAUDIO_SINT16) {
  6995. Int16 *in = (Int16 *)input;
  6996. for (int i=0; i<stream_.bufferSize; i++) {
  6997. for (j=0; j<channels; j++) {
  6998. out[offset_out[j]] = (signed char) ((in[offset_in[j]] >> 8) & 0x00ff);
  6999. }
  7000. in += jump_in;
  7001. out += jump_out;
  7002. }
  7003. }
  7004. else if (format_in == RTAUDIO_SINT24) {
  7005. Int32 *in = (Int32 *)input;
  7006. for (int i=0; i<stream_.bufferSize; i++) {
  7007. for (j=0; j<channels; j++) {
  7008. out[offset_out[j]] = (signed char) ((in[offset_in[j]] >> 24) & 0x000000ff);
  7009. }
  7010. in += jump_in;
  7011. out += jump_out;
  7012. }
  7013. }
  7014. else if (format_in == RTAUDIO_SINT32) {
  7015. Int32 *in = (Int32 *)input;
  7016. for (int i=0; i<stream_.bufferSize; i++) {
  7017. for (j=0; j<channels; j++) {
  7018. out[offset_out[j]] = (signed char) ((in[offset_in[j]] >> 24) & 0x000000ff);
  7019. }
  7020. in += jump_in;
  7021. out += jump_out;
  7022. }
  7023. }
  7024. else if (format_in == RTAUDIO_FLOAT32) {
  7025. Float32 *in = (Float32 *)input;
  7026. for (int i=0; i<stream_.bufferSize; i++) {
  7027. for (j=0; j<channels; j++) {
  7028. out[offset_out[j]] = (signed char) (in[offset_in[j]] * 127.0);
  7029. }
  7030. in += jump_in;
  7031. out += jump_out;
  7032. }
  7033. }
  7034. else if (format_in == RTAUDIO_FLOAT64) {
  7035. Float64 *in = (Float64 *)input;
  7036. for (int i=0; i<stream_.bufferSize; i++) {
  7037. for (j=0; j<channels; j++) {
  7038. out[offset_out[j]] = (signed char) (in[offset_in[j]] * 127.0);
  7039. }
  7040. in += jump_in;
  7041. out += jump_out;
  7042. }
  7043. }
  7044. }
  7045. }
  7046. void RtApi :: byteSwapBuffer( char *buffer, int samples, RtAudioFormat format )
  7047. {
  7048. register char val;
  7049. register char *ptr;
  7050. ptr = buffer;
  7051. if (format == RTAUDIO_SINT16) {
  7052. for (int i=0; i<samples; i++) {
  7053. // Swap 1st and 2nd bytes.
  7054. val = *(ptr);
  7055. *(ptr) = *(ptr+1);
  7056. *(ptr+1) = val;
  7057. // Increment 2 bytes.
  7058. ptr += 2;
  7059. }
  7060. }
  7061. else if (format == RTAUDIO_SINT24 ||
  7062. format == RTAUDIO_SINT32 ||
  7063. format == RTAUDIO_FLOAT32) {
  7064. for (int i=0; i<samples; i++) {
  7065. // Swap 1st and 4th bytes.
  7066. val = *(ptr);
  7067. *(ptr) = *(ptr+3);
  7068. *(ptr+3) = val;
  7069. // Swap 2nd and 3rd bytes.
  7070. ptr += 1;
  7071. val = *(ptr);
  7072. *(ptr) = *(ptr+1);
  7073. *(ptr+1) = val;
  7074. // Increment 4 bytes.
  7075. ptr += 4;
  7076. }
  7077. }
  7078. else if (format == RTAUDIO_FLOAT64) {
  7079. for (int i=0; i<samples; i++) {
  7080. // Swap 1st and 8th bytes
  7081. val = *(ptr);
  7082. *(ptr) = *(ptr+7);
  7083. *(ptr+7) = val;
  7084. // Swap 2nd and 7th bytes
  7085. ptr += 1;
  7086. val = *(ptr);
  7087. *(ptr) = *(ptr+5);
  7088. *(ptr+5) = val;
  7089. // Swap 3rd and 6th bytes
  7090. ptr += 1;
  7091. val = *(ptr);
  7092. *(ptr) = *(ptr+3);
  7093. *(ptr+3) = val;
  7094. // Swap 4th and 5th bytes
  7095. ptr += 1;
  7096. val = *(ptr);
  7097. *(ptr) = *(ptr+1);
  7098. *(ptr+1) = val;
  7099. // Increment 8 bytes.
  7100. ptr += 8;
  7101. }
  7102. }
  7103. }