You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

7527 lines
255KB

  1. /************************************************************************/
  2. /*! \class RtAudio
  3. \brief Realtime audio i/o C++ classes.
  4. RtAudio provides a common API (Application Programming Interface)
  5. for realtime audio input/output across Linux (native ALSA, Jack,
  6. and OSS), SGI, Macintosh OS X (CoreAudio and Jack), and Windows
  7. (DirectSound and ASIO) operating systems.
  8. RtAudio WWW site: http://www.music.mcgill.ca/~gary/rtaudio/
  9. RtAudio: realtime audio i/o C++ classes
  10. Copyright (c) 2001-2007 Gary P. Scavone
  11. Permission is hereby granted, free of charge, to any person
  12. obtaining a copy of this software and associated documentation files
  13. (the "Software"), to deal in the Software without restriction,
  14. including without limitation the rights to use, copy, modify, merge,
  15. publish, distribute, sublicense, and/or sell copies of the Software,
  16. and to permit persons to whom the Software is furnished to do so,
  17. subject to the following conditions:
  18. The above copyright notice and this permission notice shall be
  19. included in all copies or substantial portions of the Software.
  20. Any person wishing to distribute modifications to the Software is
  21. asked to send the modifications to the original developer so that
  22. they can be incorporated into the canonical version. This is,
  23. however, not a binding provision of this license.
  24. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
  25. EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
  26. MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
  27. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
  28. ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
  29. CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
  30. WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
  31. */
  32. /************************************************************************/
  33. // RtAudio: Version 4.0.3
  34. #include "RtAudio.h"
  35. #include <iostream>
  36. // Static variable definitions.
  37. const unsigned int RtApi::MAX_SAMPLE_RATES = 14;
  38. const unsigned int RtApi::SAMPLE_RATES[] = {
  39. 4000, 5512, 8000, 9600, 11025, 16000, 22050,
  40. 32000, 44100, 48000, 88200, 96000, 176400, 192000
  41. };
  42. #if defined(__WINDOWS_DS__) || defined(__WINDOWS_ASIO__)
  43. #define MUTEX_INITIALIZE(A) InitializeCriticalSection(A)
  44. #define MUTEX_DESTROY(A) DeleteCriticalSection(A)
  45. #define MUTEX_LOCK(A) EnterCriticalSection(A)
  46. #define MUTEX_UNLOCK(A) LeaveCriticalSection(A)
  47. #elif defined(__LINUX_ALSA__) || defined(__UNIX_JACK__) || defined(__LINUX_OSS__) || defined(__MACOSX_CORE__)
  48. // pthread API
  49. #define MUTEX_INITIALIZE(A) pthread_mutex_init(A, NULL)
  50. #define MUTEX_DESTROY(A) pthread_mutex_destroy(A)
  51. #define MUTEX_LOCK(A) pthread_mutex_lock(A)
  52. #define MUTEX_UNLOCK(A) pthread_mutex_unlock(A)
  53. #else
  54. #define MUTEX_INITIALIZE(A) abs(*A) // dummy definitions
  55. #define MUTEX_DESTROY(A) abs(*A) // dummy definitions
  56. #endif
  57. // *************************************************** //
  58. //
  59. // RtAudio definitions.
  60. //
  61. // *************************************************** //
  62. void RtAudio :: getCompiledApi( std::vector<RtAudio::Api> &apis ) throw()
  63. {
  64. apis.clear();
  65. // The order here will control the order of RtAudio's API search in
  66. // the constructor.
  67. #if defined(__UNIX_JACK__)
  68. apis.push_back( UNIX_JACK );
  69. #endif
  70. #if defined(__LINUX_ALSA__)
  71. apis.push_back( LINUX_ALSA );
  72. #endif
  73. #if defined(__LINUX_OSS__)
  74. apis.push_back( LINUX_OSS );
  75. #endif
  76. #if defined(__WINDOWS_ASIO__)
  77. apis.push_back( WINDOWS_ASIO );
  78. #endif
  79. #if defined(__WINDOWS_DS__)
  80. apis.push_back( WINDOWS_DS );
  81. #endif
  82. #if defined(__MACOSX_CORE__)
  83. apis.push_back( MACOSX_CORE );
  84. #endif
  85. #if defined(__RTAUDIO_DUMMY__)
  86. apis.push_back( RTAUDIO_DUMMY );
  87. #endif
  88. }
  89. void RtAudio :: openRtApi( RtAudio::Api api )
  90. {
  91. #if defined(__UNIX_JACK__)
  92. if ( api == UNIX_JACK )
  93. rtapi_ = new RtApiJack();
  94. #endif
  95. #if defined(__LINUX_ALSA__)
  96. if ( api == LINUX_ALSA )
  97. rtapi_ = new RtApiAlsa();
  98. #endif
  99. #if defined(__LINUX_OSS__)
  100. if ( api == LINUX_OSS )
  101. rtapi_ = new RtApiOss();
  102. #endif
  103. #if defined(__WINDOWS_ASIO__)
  104. if ( api == WINDOWS_ASIO )
  105. rtapi_ = new RtApiAsio();
  106. #endif
  107. #if defined(__WINDOWS_DS__)
  108. if ( api == WINDOWS_DS )
  109. rtapi_ = new RtApiDs();
  110. #endif
  111. #if defined(__MACOSX_CORE__)
  112. if ( api == MACOSX_CORE )
  113. rtapi_ = new RtApiCore();
  114. #endif
  115. #if defined(__RTAUDIO_DUMMY__)
  116. if ( api == RTAUDIO_DUMMY )
  117. rtapi_ = new RtApiDummy();
  118. #endif
  119. }
  120. RtAudio :: RtAudio( RtAudio::Api api ) throw()
  121. {
  122. rtapi_ = 0;
  123. if ( api != UNSPECIFIED ) {
  124. // Attempt to open the specified API.
  125. openRtApi( api );
  126. if ( rtapi_ ) return;
  127. // No compiled support for specified API value. Issue a debug
  128. // warning and continue as if no API was specified.
  129. std::cerr << "\nRtAudio: no compiled support for specified API argument!\n" << std::endl;
  130. }
  131. // Iterate through the compiled APIs and return as soon as we find
  132. // one with at least one device or we reach the end of the list.
  133. std::vector< RtAudio::Api > apis;
  134. getCompiledApi( apis );
  135. for ( unsigned int i=0; i<apis.size(); i++ ) {
  136. openRtApi( apis[i] );
  137. if ( rtapi_->getDeviceCount() ) break;
  138. }
  139. if ( rtapi_ ) return;
  140. // It should not be possible to get here because the preprocessor
  141. // definition __RTAUDIO_DUMMY__ is automatically defined if no
  142. // API-specific definitions are passed to the compiler. But just in
  143. // case something weird happens, we'll print out an error message.
  144. std::cerr << "\nRtAudio: no compiled API support found ... critical error!!\n\n";
  145. }
  146. RtAudio :: ~RtAudio() throw()
  147. {
  148. delete rtapi_;
  149. }
  150. void RtAudio :: openStream( RtAudio::StreamParameters *outputParameters,
  151. RtAudio::StreamParameters *inputParameters,
  152. RtAudioFormat format, unsigned int sampleRate,
  153. unsigned int *bufferFrames,
  154. RtAudioCallback callback, void *userData,
  155. RtAudio::StreamOptions *options )
  156. {
  157. return rtapi_->openStream( outputParameters, inputParameters, format,
  158. sampleRate, bufferFrames, callback,
  159. userData, options );
  160. }
  161. // *************************************************** //
  162. //
  163. // Public RtApi definitions (see end of file for
  164. // private or protected utility functions).
  165. //
  166. // *************************************************** //
  167. RtApi :: RtApi()
  168. {
  169. stream_.state = STREAM_CLOSED;
  170. stream_.mode = UNINITIALIZED;
  171. stream_.apiHandle = 0;
  172. stream_.userBuffer[0] = 0;
  173. stream_.userBuffer[1] = 0;
  174. MUTEX_INITIALIZE( &stream_.mutex );
  175. showWarnings_ = true;
  176. }
  177. RtApi :: ~RtApi()
  178. {
  179. MUTEX_DESTROY( &stream_.mutex );
  180. }
  181. void RtApi :: openStream( RtAudio::StreamParameters *oParams,
  182. RtAudio::StreamParameters *iParams,
  183. RtAudioFormat format, unsigned int sampleRate,
  184. unsigned int *bufferFrames,
  185. RtAudioCallback callback, void *userData,
  186. RtAudio::StreamOptions *options )
  187. {
  188. if ( stream_.state != STREAM_CLOSED ) {
  189. errorText_ = "RtApi::openStream: a stream is already open!";
  190. error( RtError::INVALID_USE );
  191. }
  192. if ( oParams && oParams->nChannels < 1 ) {
  193. errorText_ = "RtApi::openStream: a non-NULL output StreamParameters structure cannot have an nChannels value less than one.";
  194. error( RtError::INVALID_USE );
  195. }
  196. if ( iParams && iParams->nChannels < 1 ) {
  197. errorText_ = "RtApi::openStream: a non-NULL input StreamParameters structure cannot have an nChannels value less than one.";
  198. error( RtError::INVALID_USE );
  199. }
  200. if ( oParams == NULL && iParams == NULL ) {
  201. errorText_ = "RtApi::openStream: input and output StreamParameters structures are both NULL!";
  202. error( RtError::INVALID_USE );
  203. }
  204. if ( formatBytes(format) == 0 ) {
  205. errorText_ = "RtApi::openStream: 'format' parameter value is undefined.";
  206. error( RtError::INVALID_USE );
  207. }
  208. unsigned int nDevices = getDeviceCount();
  209. unsigned int oChannels = 0;
  210. if ( oParams ) {
  211. oChannels = oParams->nChannels;
  212. if ( oParams->deviceId >= nDevices ) {
  213. errorText_ = "RtApi::openStream: output device parameter value is invalid.";
  214. error( RtError::INVALID_USE );
  215. }
  216. }
  217. unsigned int iChannels = 0;
  218. if ( iParams ) {
  219. iChannels = iParams->nChannels;
  220. if ( iParams->deviceId >= nDevices ) {
  221. errorText_ = "RtApi::openStream: input device parameter value is invalid.";
  222. error( RtError::INVALID_USE );
  223. }
  224. }
  225. clearStreamInfo();
  226. bool result;
  227. if ( oChannels > 0 ) {
  228. result = probeDeviceOpen( oParams->deviceId, OUTPUT, oChannels, oParams->firstChannel,
  229. sampleRate, format, bufferFrames, options );
  230. if ( result == false ) error( RtError::SYSTEM_ERROR );
  231. }
  232. if ( iChannels > 0 ) {
  233. result = probeDeviceOpen( iParams->deviceId, INPUT, iChannels, iParams->firstChannel,
  234. sampleRate, format, bufferFrames, options );
  235. if ( result == false ) {
  236. if ( oChannels > 0 ) closeStream();
  237. error( RtError::SYSTEM_ERROR );
  238. }
  239. }
  240. stream_.callbackInfo.callback = (void *) callback;
  241. stream_.callbackInfo.userData = userData;
  242. if ( options ) options->numberOfBuffers = stream_.nBuffers;
  243. stream_.state = STREAM_STOPPED;
  244. }
  245. unsigned int RtApi :: getDefaultInputDevice( void )
  246. {
  247. // Should be implemented in subclasses if possible.
  248. return 0;
  249. }
  250. unsigned int RtApi :: getDefaultOutputDevice( void )
  251. {
  252. // Should be implemented in subclasses if possible.
  253. return 0;
  254. }
  255. void RtApi :: closeStream( void )
  256. {
  257. // MUST be implemented in subclasses!
  258. return;
  259. }
  260. bool RtApi :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
  261. unsigned int firstChannel, unsigned int sampleRate,
  262. RtAudioFormat format, unsigned int *bufferSize,
  263. RtAudio::StreamOptions *options )
  264. {
  265. // MUST be implemented in subclasses!
  266. return FAILURE;
  267. }
  268. void RtApi :: tickStreamTime( void )
  269. {
  270. // Subclasses that do not provide their own implementation of
  271. // getStreamTime should call this function once per buffer I/O to
  272. // provide basic stream time support.
  273. stream_.streamTime += ( stream_.bufferSize * 1.0 / stream_.sampleRate );
  274. #if defined( HAVE_GETTIMEOFDAY )
  275. gettimeofday( &stream_.lastTickTimestamp, NULL );
  276. #endif
  277. }
  278. long RtApi :: getStreamLatency( void )
  279. {
  280. verifyStream();
  281. long totalLatency = 0;
  282. if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX )
  283. totalLatency = stream_.latency[0];
  284. if ( stream_.mode == INPUT || stream_.mode == DUPLEX )
  285. totalLatency += stream_.latency[1];
  286. return totalLatency;
  287. }
  288. double RtApi :: getStreamTime( void )
  289. {
  290. verifyStream();
  291. #if defined( HAVE_GETTIMEOFDAY )
  292. // Return a very accurate estimate of the stream time by
  293. // adding in the elapsed time since the last tick.
  294. struct timeval then;
  295. struct timeval now;
  296. if ( stream_.state != STREAM_RUNNING || stream_.streamTime == 0.0 )
  297. return stream_.streamTime;
  298. gettimeofday( &now, NULL );
  299. then = stream_.lastTickTimestamp;
  300. return stream_.streamTime +
  301. ((now.tv_sec + 0.000001 * now.tv_usec) -
  302. (then.tv_sec + 0.000001 * then.tv_usec));
  303. #else
  304. return stream_.streamTime;
  305. #endif
  306. }
  307. // *************************************************** //
  308. //
  309. // OS/API-specific methods.
  310. //
  311. // *************************************************** //
  312. #if defined(__MACOSX_CORE__)
  313. // The OS X CoreAudio API is designed to use a separate callback
  314. // procedure for each of its audio devices. A single RtAudio duplex
  315. // stream using two different devices is supported here, though it
  316. // cannot be guaranteed to always behave correctly because we cannot
  317. // synchronize these two callbacks.
  318. //
  319. // A property listener is installed for over/underrun information.
  320. // However, no functionality is currently provided to allow property
  321. // listeners to trigger user handlers because it is unclear what could
  322. // be done if a critical stream parameter (buffer size, sample rate,
  323. // device disconnect) notification arrived. The listeners entail
  324. // quite a bit of extra code and most likely, a user program wouldn't
  325. // be prepared for the result anyway. However, we do provide a flag
  326. // to the client callback function to inform of an over/underrun.
  327. //
  328. // The mechanism for querying and setting system parameters was
  329. // updated (and perhaps simplified) in OS-X version 10.4. However,
  330. // since 10.4 support is not necessarily available to all users, I've
  331. // decided not to update the respective code at this time. Perhaps
  332. // this will happen when Apple makes 10.4 free for everyone. :-)
  333. // A structure to hold various information related to the CoreAudio API
  334. // implementation.
  335. struct CoreHandle {
  336. AudioDeviceID id[2]; // device ids
  337. UInt32 iStream[2]; // device stream index (first for mono mode)
  338. bool xrun[2];
  339. char *deviceBuffer;
  340. pthread_cond_t condition;
  341. int drainCounter; // Tracks callback counts when draining
  342. bool internalDrain; // Indicates if stop is initiated from callback or not.
  343. CoreHandle()
  344. :deviceBuffer(0), drainCounter(0), internalDrain(false) { id[0] = 0; id[1] = 0; xrun[0] = false; xrun[1] = false; }
  345. };
  346. RtApiCore :: RtApiCore()
  347. {
  348. // Nothing to do here.
  349. }
  350. RtApiCore :: ~RtApiCore()
  351. {
  352. // The subclass destructor gets called before the base class
  353. // destructor, so close an existing stream before deallocating
  354. // apiDeviceId memory.
  355. if ( stream_.state != STREAM_CLOSED ) closeStream();
  356. }
  357. unsigned int RtApiCore :: getDeviceCount( void )
  358. {
  359. // Find out how many audio devices there are, if any.
  360. UInt32 dataSize;
  361. OSStatus result = AudioHardwareGetPropertyInfo( kAudioHardwarePropertyDevices, &dataSize, NULL );
  362. if ( result != noErr ) {
  363. errorText_ = "RtApiCore::getDeviceCount: OS-X error getting device info!";
  364. error( RtError::WARNING );
  365. return 0;
  366. }
  367. return dataSize / sizeof( AudioDeviceID );
  368. }
  369. unsigned int RtApiCore :: getDefaultInputDevice( void )
  370. {
  371. unsigned int nDevices = getDeviceCount();
  372. if ( nDevices <= 1 ) return 0;
  373. AudioDeviceID id;
  374. UInt32 dataSize = sizeof( AudioDeviceID );
  375. OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDefaultInputDevice,
  376. &dataSize, &id );
  377. if ( result != noErr ) {
  378. errorText_ = "RtApiCore::getDefaultInputDevice: OS-X system error getting device.";
  379. error( RtError::WARNING );
  380. return 0;
  381. }
  382. dataSize *= nDevices;
  383. AudioDeviceID deviceList[ nDevices ];
  384. result = AudioHardwareGetProperty( kAudioHardwarePropertyDevices, &dataSize, (void *) &deviceList );
  385. if ( result != noErr ) {
  386. errorText_ = "RtApiCore::getDefaultInputDevice: OS-X system error getting device IDs.";
  387. error( RtError::WARNING );
  388. return 0;
  389. }
  390. for ( unsigned int i=0; i<nDevices; i++ )
  391. if ( id == deviceList[i] ) return i;
  392. errorText_ = "RtApiCore::getDefaultInputDevice: No default device found!";
  393. error( RtError::WARNING );
  394. return 0;
  395. }
  396. unsigned int RtApiCore :: getDefaultOutputDevice( void )
  397. {
  398. unsigned int nDevices = getDeviceCount();
  399. if ( nDevices <= 1 ) return 0;
  400. AudioDeviceID id;
  401. UInt32 dataSize = sizeof( AudioDeviceID );
  402. OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDefaultOutputDevice,
  403. &dataSize, &id );
  404. if ( result != noErr ) {
  405. errorText_ = "RtApiCore::getDefaultOutputDevice: OS-X system error getting device.";
  406. error( RtError::WARNING );
  407. return 0;
  408. }
  409. dataSize *= nDevices;
  410. AudioDeviceID deviceList[ nDevices ];
  411. result = AudioHardwareGetProperty( kAudioHardwarePropertyDevices, &dataSize, (void *) &deviceList );
  412. if ( result != noErr ) {
  413. errorText_ = "RtApiCore::getDefaultOutputDevice: OS-X system error getting device IDs.";
  414. error( RtError::WARNING );
  415. return 0;
  416. }
  417. for ( unsigned int i=0; i<nDevices; i++ )
  418. if ( id == deviceList[i] ) return i;
  419. errorText_ = "RtApiCore::getDefaultOutputDevice: No default device found!";
  420. error( RtError::WARNING );
  421. return 0;
  422. }
  423. RtAudio::DeviceInfo RtApiCore :: getDeviceInfo( unsigned int device )
  424. {
  425. RtAudio::DeviceInfo info;
  426. info.probed = false;
  427. // Get device ID
  428. unsigned int nDevices = getDeviceCount();
  429. if ( nDevices == 0 ) {
  430. errorText_ = "RtApiCore::getDeviceInfo: no devices found!";
  431. error( RtError::INVALID_USE );
  432. }
  433. if ( device >= nDevices ) {
  434. errorText_ = "RtApiCore::getDeviceInfo: device ID is invalid!";
  435. error( RtError::INVALID_USE );
  436. }
  437. AudioDeviceID deviceList[ nDevices ];
  438. UInt32 dataSize = sizeof( AudioDeviceID ) * nDevices;
  439. OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDevices, &dataSize, (void *) &deviceList );
  440. if ( result != noErr ) {
  441. errorText_ = "RtApiCore::getDeviceInfo: OS-X system error getting device IDs.";
  442. error( RtError::WARNING );
  443. return info;
  444. }
  445. AudioDeviceID id = deviceList[ device ];
  446. // Get the device name.
  447. info.name.erase();
  448. char name[256];
  449. dataSize = 256;
  450. result = AudioDeviceGetProperty( id, 0, false,
  451. kAudioDevicePropertyDeviceManufacturer,
  452. &dataSize, name );
  453. if ( result != noErr ) {
  454. errorStream_ << "RtApiCore::probeDeviceInfo: system error (" << getErrorCode( result ) << ") getting device manufacturer.";
  455. errorText_ = errorStream_.str();
  456. error( RtError::WARNING );
  457. return info;
  458. }
  459. info.name.append( (const char *)name, strlen(name) );
  460. info.name.append( ": " );
  461. dataSize = 256;
  462. result = AudioDeviceGetProperty( id, 0, false,
  463. kAudioDevicePropertyDeviceName,
  464. &dataSize, name );
  465. if ( result != noErr ) {
  466. errorStream_ << "RtApiCore::probeDeviceInfo: system error (" << getErrorCode( result ) << ") getting device name.";
  467. errorText_ = errorStream_.str();
  468. error( RtError::WARNING );
  469. return info;
  470. }
  471. info.name.append( (const char *)name, strlen(name) );
  472. // Get the output stream "configuration".
  473. AudioBufferList *bufferList = nil;
  474. result = AudioDeviceGetPropertyInfo( id, 0, false,
  475. kAudioDevicePropertyStreamConfiguration,
  476. &dataSize, NULL );
  477. if (result != noErr || dataSize == 0) {
  478. errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting output stream configuration info for device (" << device << ").";
  479. errorText_ = errorStream_.str();
  480. error( RtError::WARNING );
  481. return info;
  482. }
  483. // Allocate the AudioBufferList.
  484. bufferList = (AudioBufferList *) malloc( dataSize );
  485. if ( bufferList == NULL ) {
  486. errorText_ = "RtApiCore::getDeviceInfo: memory error allocating output AudioBufferList.";
  487. error( RtError::WARNING );
  488. return info;
  489. }
  490. result = AudioDeviceGetProperty( id, 0, false,
  491. kAudioDevicePropertyStreamConfiguration,
  492. &dataSize, bufferList );
  493. if ( result != noErr ) {
  494. free( bufferList );
  495. errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting output stream configuration for device (" << device << ").";
  496. errorText_ = errorStream_.str();
  497. error( RtError::WARNING );
  498. return info;
  499. }
  500. // Get output channel information.
  501. unsigned int i, nStreams = bufferList->mNumberBuffers;
  502. for ( i=0; i<nStreams; i++ )
  503. info.outputChannels += bufferList->mBuffers[i].mNumberChannels;
  504. free( bufferList );
  505. // Get the input stream "configuration".
  506. result = AudioDeviceGetPropertyInfo( id, 0, true,
  507. kAudioDevicePropertyStreamConfiguration,
  508. &dataSize, NULL );
  509. if (result != noErr || dataSize == 0) {
  510. errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting input stream configuration info for device (" << device << ").";
  511. errorText_ = errorStream_.str();
  512. error( RtError::WARNING );
  513. return info;
  514. }
  515. // Allocate the AudioBufferList.
  516. bufferList = (AudioBufferList *) malloc( dataSize );
  517. if ( bufferList == NULL ) {
  518. errorText_ = "RtApiCore::getDeviceInfo: memory error allocating input AudioBufferList.";
  519. error( RtError::WARNING );
  520. return info;
  521. }
  522. result = AudioDeviceGetProperty( id, 0, true,
  523. kAudioDevicePropertyStreamConfiguration,
  524. &dataSize, bufferList );
  525. if ( result != noErr ) {
  526. free( bufferList );
  527. errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting input stream configuration for device (" << device << ").";
  528. errorText_ = errorStream_.str();
  529. error( RtError::WARNING );
  530. return info;
  531. }
  532. // Get input channel information.
  533. nStreams = bufferList->mNumberBuffers;
  534. for ( i=0; i<nStreams; i++ )
  535. info.inputChannels += bufferList->mBuffers[i].mNumberChannels;
  536. free( bufferList );
  537. // If device opens for both playback and capture, we determine the channels.
  538. if ( info.outputChannels > 0 && info.inputChannels > 0 )
  539. info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
  540. // Probe the device sample rates.
  541. bool isInput = false;
  542. if ( info.outputChannels == 0 ) isInput = true;
  543. // Determine the supported sample rates.
  544. result = AudioDeviceGetPropertyInfo( id, 0, isInput,
  545. kAudioDevicePropertyAvailableNominalSampleRates,
  546. &dataSize, NULL );
  547. if ( result != kAudioHardwareNoError || dataSize == 0 ) {
  548. errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting sample rate info.";
  549. errorText_ = errorStream_.str();
  550. error( RtError::WARNING );
  551. return info;
  552. }
  553. UInt32 nRanges = dataSize / sizeof( AudioValueRange );
  554. AudioValueRange rangeList[ nRanges ];
  555. result = AudioDeviceGetProperty( id, 0, isInput,
  556. kAudioDevicePropertyAvailableNominalSampleRates,
  557. &dataSize, &rangeList );
  558. if ( result != kAudioHardwareNoError ) {
  559. errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting sample rates.";
  560. errorText_ = errorStream_.str();
  561. error( RtError::WARNING );
  562. return info;
  563. }
  564. Float64 minimumRate = 100000000.0, maximumRate = 0.0;
  565. for ( UInt32 i=0; i<nRanges; i++ ) {
  566. if ( rangeList[i].mMinimum < minimumRate ) minimumRate = rangeList[i].mMinimum;
  567. if ( rangeList[i].mMaximum > maximumRate ) maximumRate = rangeList[i].mMaximum;
  568. }
  569. info.sampleRates.clear();
  570. for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
  571. if ( SAMPLE_RATES[k] >= (unsigned int) minimumRate && SAMPLE_RATES[k] <= (unsigned int) maximumRate )
  572. info.sampleRates.push_back( SAMPLE_RATES[k] );
  573. }
  574. if ( info.sampleRates.size() == 0 ) {
  575. errorStream_ << "RtApiCore::probeDeviceInfo: No supported sample rates found for device (" << device << ").";
  576. errorText_ = errorStream_.str();
  577. error( RtError::WARNING );
  578. return info;
  579. }
  580. // CoreAudio always uses 32-bit floating point data for PCM streams.
  581. // Thus, any other "physical" formats supported by the device are of
  582. // no interest to the client.
  583. info.nativeFormats = RTAUDIO_FLOAT32;
  584. if ( getDefaultOutputDevice() == device )
  585. info.isDefaultOutput = true;
  586. if ( getDefaultInputDevice() == device )
  587. info.isDefaultInput = true;
  588. info.probed = true;
  589. return info;
  590. }
  591. OSStatus callbackHandler( AudioDeviceID inDevice,
  592. const AudioTimeStamp* inNow,
  593. const AudioBufferList* inInputData,
  594. const AudioTimeStamp* inInputTime,
  595. AudioBufferList* outOutputData,
  596. const AudioTimeStamp* inOutputTime,
  597. void* infoPointer )
  598. {
  599. CallbackInfo *info = (CallbackInfo *) infoPointer;
  600. RtApiCore *object = (RtApiCore *) info->object;
  601. if ( object->callbackEvent( inDevice, inInputData, outOutputData ) == false )
  602. return kAudioHardwareUnspecifiedError;
  603. else
  604. return kAudioHardwareNoError;
  605. }
  606. OSStatus deviceListener( AudioDeviceID inDevice,
  607. UInt32 channel,
  608. Boolean isInput,
  609. AudioDevicePropertyID propertyID,
  610. void* handlePointer )
  611. {
  612. CoreHandle *handle = (CoreHandle *) handlePointer;
  613. if ( propertyID == kAudioDeviceProcessorOverload ) {
  614. if ( isInput )
  615. handle->xrun[1] = true;
  616. else
  617. handle->xrun[0] = true;
  618. }
  619. return kAudioHardwareNoError;
  620. }
  621. static bool hasProperty( AudioDeviceID id, UInt32 channel, bool isInput, AudioDevicePropertyID property )
  622. {
  623. OSStatus result = AudioDeviceGetPropertyInfo( id, channel, isInput, property, NULL, NULL );
  624. return result == 0;
  625. }
  626. bool RtApiCore :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
  627. unsigned int firstChannel, unsigned int sampleRate,
  628. RtAudioFormat format, unsigned int *bufferSize,
  629. RtAudio::StreamOptions *options )
  630. {
  631. // Get device ID
  632. unsigned int nDevices = getDeviceCount();
  633. if ( nDevices == 0 ) {
  634. // This should not happen because a check is made before this function is called.
  635. errorText_ = "RtApiCore::probeDeviceOpen: no devices found!";
  636. return FAILURE;
  637. }
  638. if ( device >= nDevices ) {
  639. // This should not happen because a check is made before this function is called.
  640. errorText_ = "RtApiCore::probeDeviceOpen: device ID is invalid!";
  641. return FAILURE;
  642. }
  643. AudioDeviceID deviceList[ nDevices ];
  644. UInt32 dataSize = sizeof( AudioDeviceID ) * nDevices;
  645. OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDevices, &dataSize, (void *) &deviceList );
  646. if ( result != noErr ) {
  647. errorText_ = "RtApiCore::probeDeviceOpen: OS-X system error getting device IDs.";
  648. return FAILURE;
  649. }
  650. AudioDeviceID id = deviceList[ device ];
  651. // Setup for stream mode.
  652. bool isInput = false;
  653. if ( mode == INPUT ) isInput = true;
  654. // Set or disable "hog" mode.
  655. dataSize = sizeof( UInt32 );
  656. UInt32 doHog = 0;
  657. if ( options && options->flags & RTAUDIO_HOG_DEVICE ) doHog = 1;
  658. result = AudioHardwareSetProperty( kAudioHardwarePropertyHogModeIsAllowed, dataSize, &doHog );
  659. if ( result != noErr ) {
  660. errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting 'hog' state!";
  661. errorText_ = errorStream_.str();
  662. return FAILURE;
  663. }
  664. // Get the stream "configuration".
  665. AudioBufferList *bufferList;
  666. result = AudioDeviceGetPropertyInfo( id, 0, isInput,
  667. kAudioDevicePropertyStreamConfiguration,
  668. &dataSize, NULL );
  669. if (result != noErr || dataSize == 0) {
  670. errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream configuration info for device (" << device << ").";
  671. errorText_ = errorStream_.str();
  672. return FAILURE;
  673. }
  674. // Allocate the AudioBufferList.
  675. bufferList = (AudioBufferList *) malloc( dataSize );
  676. if ( bufferList == NULL ) {
  677. errorText_ = "RtApiCore::probeDeviceOpen: memory error allocating AudioBufferList.";
  678. return FAILURE;
  679. }
  680. result = AudioDeviceGetProperty( id, 0, isInput,
  681. kAudioDevicePropertyStreamConfiguration,
  682. &dataSize, bufferList );
  683. if ( result != noErr ) {
  684. free( bufferList );
  685. errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream configuration for device (" << device << ").";
  686. errorText_ = errorStream_.str();
  687. return FAILURE;
  688. }
  689. // Search for a stream that contains the desired number of
  690. // channels. CoreAudio devices can have an arbitrary number of
  691. // streams and each stream can have an arbitrary number of channels.
  692. // For each stream, a single buffer of interleaved samples is
  693. // provided. RtAudio currently only supports the use of one stream
  694. // of interleaved data or multiple consecutive single-channel
  695. // streams. Thus, our search below is limited to these two
  696. // contexts.
  697. unsigned int streamChannels = 0, nStreams = 0;
  698. UInt32 iChannel = 0, iStream = 0;
  699. unsigned int offsetCounter = firstChannel;
  700. stream_.deviceInterleaved[mode] = true;
  701. nStreams = bufferList->mNumberBuffers;
  702. bool foundStream = false;
  703. for ( iStream=0; iStream<nStreams; iStream++ ) {
  704. streamChannels = bufferList->mBuffers[iStream].mNumberChannels;
  705. if ( streamChannels >= channels + offsetCounter ) {
  706. iChannel += offsetCounter;
  707. foundStream = true;
  708. break;
  709. }
  710. if ( streamChannels > offsetCounter ) break;
  711. offsetCounter -= streamChannels;
  712. iChannel += streamChannels;
  713. }
  714. // If we didn't find a single stream above, see if we can meet
  715. // the channel specification in mono mode (i.e. using separate
  716. // non-interleaved buffers). This can only work if there are N
  717. // consecutive one-channel streams, where N is the number of
  718. // desired channels (+ channel offset).
  719. if ( foundStream == false ) {
  720. unsigned int counter = 0;
  721. offsetCounter = firstChannel;
  722. iChannel = 0;
  723. for ( iStream=0; iStream<nStreams; iStream++ ) {
  724. streamChannels = bufferList->mBuffers[iStream].mNumberChannels;
  725. if ( offsetCounter ) {
  726. if ( streamChannels > offsetCounter ) break;
  727. offsetCounter -= streamChannels;
  728. }
  729. else if ( streamChannels == 1 )
  730. counter++;
  731. else
  732. counter = 0;
  733. if ( counter == channels ) {
  734. iStream -= channels - 1;
  735. iChannel -= channels - 1;
  736. stream_.deviceInterleaved[mode] = false;
  737. foundStream = true;
  738. break;
  739. }
  740. iChannel += streamChannels;
  741. }
  742. }
  743. free( bufferList );
  744. if ( foundStream == false ) {
  745. errorStream_ << "RtApiCore::probeDeviceOpen: unable to find OS-X stream on device (" << device << ") for requested channels.";
  746. errorText_ = errorStream_.str();
  747. return FAILURE;
  748. }
  749. // Determine the buffer size.
  750. AudioValueRange bufferRange;
  751. dataSize = sizeof( AudioValueRange );
  752. result = AudioDeviceGetProperty( id, 0, isInput,
  753. kAudioDevicePropertyBufferFrameSizeRange,
  754. &dataSize, &bufferRange );
  755. if ( result != noErr ) {
  756. errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting buffer size range for device (" << device << ").";
  757. errorText_ = errorStream_.str();
  758. return FAILURE;
  759. }
  760. if ( bufferRange.mMinimum > *bufferSize ) *bufferSize = (unsigned long) bufferRange.mMinimum;
  761. else if ( bufferRange.mMaximum < *bufferSize ) *bufferSize = (unsigned long) bufferRange.mMaximum;
  762. if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) *bufferSize = (unsigned long) bufferRange.mMinimum;
  763. // Set the buffer size. For mono mode, I'm assuming we only need to
  764. // make this setting for the master channel.
  765. UInt32 theSize = (UInt32) *bufferSize;
  766. dataSize = sizeof( UInt32 );
  767. result = AudioDeviceSetProperty( id, NULL, 0, isInput,
  768. kAudioDevicePropertyBufferFrameSize,
  769. dataSize, &theSize );
  770. if ( result != noErr ) {
  771. errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting the buffer size for device (" << device << ").";
  772. errorText_ = errorStream_.str();
  773. return FAILURE;
  774. }
  775. // If attempting to setup a duplex stream, the bufferSize parameter
  776. // MUST be the same in both directions!
  777. *bufferSize = theSize;
  778. if ( stream_.mode == OUTPUT && mode == INPUT && *bufferSize != stream_.bufferSize ) {
  779. errorStream_ << "RtApiCore::probeDeviceOpen: system error setting buffer size for duplex stream on device (" << device << ").";
  780. errorText_ = errorStream_.str();
  781. return FAILURE;
  782. }
  783. stream_.bufferSize = *bufferSize;
  784. stream_.nBuffers = 1;
  785. // Get the stream ID(s) so we can set the stream format. In mono
  786. // mode, we'll have to do this for each stream (channel).
  787. AudioStreamID streamIDs[ nStreams ];
  788. dataSize = nStreams * sizeof( AudioStreamID );
  789. result = AudioDeviceGetProperty( id, 0, isInput,
  790. kAudioDevicePropertyStreams,
  791. &dataSize, &streamIDs );
  792. if ( result != noErr ) {
  793. errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream ID(s) for device (" << device << ").";
  794. errorText_ = errorStream_.str();
  795. return FAILURE;
  796. }
  797. // Now set the stream format. Also, check the physical format of the
  798. // device and change that if necessary.
  799. AudioStreamBasicDescription description;
  800. dataSize = sizeof( AudioStreamBasicDescription );
  801. if ( stream_.deviceInterleaved[mode] ) nStreams = 1;
  802. else nStreams = channels;
  803. bool updateFormat;
  804. for ( unsigned int i=0; i<nStreams; i++ ) {
  805. result = AudioStreamGetProperty( streamIDs[iStream+i], 0,
  806. kAudioStreamPropertyVirtualFormat,
  807. &dataSize, &description );
  808. if ( result != noErr ) {
  809. errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream format for device (" << device << ").";
  810. errorText_ = errorStream_.str();
  811. return FAILURE;
  812. }
  813. // Set the sample rate and data format id. However, only make the
  814. // change if the sample rate is not within 1.0 of the desired
  815. // rate and the format is not linear pcm.
  816. updateFormat = false;
  817. if ( fabs( description.mSampleRate - (double)sampleRate ) > 1.0 ) {
  818. description.mSampleRate = (double) sampleRate;
  819. updateFormat = true;
  820. }
  821. if ( description.mFormatID != kAudioFormatLinearPCM ) {
  822. description.mFormatID = kAudioFormatLinearPCM;
  823. updateFormat = true;
  824. }
  825. if ( updateFormat ) {
  826. result = AudioStreamSetProperty( streamIDs[iStream+i], NULL, 0,
  827. kAudioStreamPropertyVirtualFormat,
  828. dataSize, &description );
  829. if ( result != noErr ) {
  830. errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting sample rate or data format for device (" << device << ").";
  831. errorText_ = errorStream_.str();
  832. return FAILURE;
  833. }
  834. }
  835. // Now check the physical format.
  836. result = AudioStreamGetProperty( streamIDs[iStream+i], 0,
  837. kAudioStreamPropertyPhysicalFormat,
  838. &dataSize, &description );
  839. if ( result != noErr ) {
  840. errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream physical format for device (" << device << ").";
  841. errorText_ = errorStream_.str();
  842. return FAILURE;
  843. }
  844. if ( description.mFormatID != kAudioFormatLinearPCM || description.mBitsPerChannel < 24 ) {
  845. description.mFormatID = kAudioFormatLinearPCM;
  846. AudioStreamBasicDescription testDescription = description;
  847. unsigned long formatFlags;
  848. // We'll try higher bit rates first and then work our way down.
  849. testDescription.mBitsPerChannel = 32;
  850. formatFlags = description.mFormatFlags | kLinearPCMFormatFlagIsFloat & ~kLinearPCMFormatFlagIsSignedInteger;
  851. testDescription.mFormatFlags = formatFlags;
  852. result = AudioStreamSetProperty( streamIDs[iStream+i], NULL, 0, kAudioStreamPropertyPhysicalFormat, dataSize, &testDescription );
  853. if ( result == noErr ) continue;
  854. testDescription = description;
  855. testDescription.mBitsPerChannel = 32;
  856. formatFlags = (description.mFormatFlags | kLinearPCMFormatFlagIsSignedInteger) & ~kLinearPCMFormatFlagIsFloat;
  857. testDescription.mFormatFlags = formatFlags;
  858. result = AudioStreamSetProperty( streamIDs[iStream+i], NULL, 0, kAudioStreamPropertyPhysicalFormat, dataSize, &testDescription );
  859. if ( result == noErr ) continue;
  860. testDescription = description;
  861. testDescription.mBitsPerChannel = 24;
  862. testDescription.mFormatFlags = formatFlags;
  863. result = AudioStreamSetProperty( streamIDs[iStream+i], NULL, 0, kAudioStreamPropertyPhysicalFormat, dataSize, &testDescription );
  864. if ( result == noErr ) continue;
  865. testDescription = description;
  866. testDescription.mBitsPerChannel = 16;
  867. testDescription.mFormatFlags = formatFlags;
  868. result = AudioStreamSetProperty( streamIDs[iStream+i], NULL, 0, kAudioStreamPropertyPhysicalFormat, dataSize, &testDescription );
  869. if ( result == noErr ) continue;
  870. testDescription = description;
  871. testDescription.mBitsPerChannel = 8;
  872. testDescription.mFormatFlags = formatFlags;
  873. result = AudioStreamSetProperty( streamIDs[iStream+i], NULL, 0, kAudioStreamPropertyPhysicalFormat, dataSize, &testDescription );
  874. if ( result != noErr ) {
  875. errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting physical data format for device (" << device << ").";
  876. errorText_ = errorStream_.str();
  877. return FAILURE;
  878. }
  879. }
  880. }
  881. // Get the stream latency. There can be latency in both the device
  882. // and the stream. First, attempt to get the device latency on the
  883. // master channel or the first open channel. Errors that might
  884. // occur here are not deemed critical.
  885. UInt32 latency, channel = 0;
  886. dataSize = sizeof( UInt32 );
  887. AudioDevicePropertyID property = kAudioDevicePropertyLatency;
  888. for ( int i=0; i<2; i++ ) {
  889. if ( hasProperty( id, channel, isInput, property ) == true ) break;
  890. channel = iChannel + 1 + i;
  891. }
  892. if ( channel <= iChannel + 1 ) {
  893. result = AudioDeviceGetProperty( id, channel, isInput, property, &dataSize, &latency );
  894. if ( result == kAudioHardwareNoError ) stream_.latency[ mode ] = latency;
  895. else {
  896. errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting device latency for device (" << device << ").";
  897. errorText_ = errorStream_.str();
  898. error( RtError::WARNING );
  899. }
  900. }
  901. // Now try to get the stream latency. For "mono" mode, I assume the
  902. // latency is equal for all single-channel streams.
  903. result = AudioStreamGetProperty( streamIDs[iStream], 0, property, &dataSize, &latency );
  904. if ( result == kAudioHardwareNoError ) stream_.latency[ mode ] += latency;
  905. else {
  906. errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream latency for device (" << device << ").";
  907. errorText_ = errorStream_.str();
  908. error( RtError::WARNING );
  909. }
  910. // Byte-swapping: According to AudioHardware.h, the stream data will
  911. // always be presented in native-endian format, so we should never
  912. // need to byte swap.
  913. stream_.doByteSwap[mode] = false;
  914. // From the CoreAudio documentation, PCM data must be supplied as
  915. // 32-bit floats.
  916. stream_.userFormat = format;
  917. stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
  918. if ( stream_.deviceInterleaved[mode] )
  919. stream_.nDeviceChannels[mode] = description.mChannelsPerFrame;
  920. else // mono mode
  921. stream_.nDeviceChannels[mode] = channels;
  922. stream_.nUserChannels[mode] = channels;
  923. stream_.channelOffset[mode] = iChannel; // offset within a CoreAudio stream
  924. if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
  925. else stream_.userInterleaved = true;
  926. // Set flags for buffer conversion.
  927. stream_.doConvertBuffer[mode] = false;
  928. if ( stream_.userFormat != stream_.deviceFormat[mode] )
  929. stream_.doConvertBuffer[mode] = true;
  930. if ( stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode] )
  931. stream_.doConvertBuffer[mode] = true;
  932. if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
  933. stream_.nUserChannels[mode] > 1 )
  934. stream_.doConvertBuffer[mode] = true;
  935. // Allocate our CoreHandle structure for the stream.
  936. CoreHandle *handle = 0;
  937. if ( stream_.apiHandle == 0 ) {
  938. try {
  939. handle = new CoreHandle;
  940. }
  941. catch ( std::bad_alloc& ) {
  942. errorText_ = "RtApiCore::probeDeviceOpen: error allocating CoreHandle memory.";
  943. goto error;
  944. }
  945. if ( pthread_cond_init( &handle->condition, NULL ) ) {
  946. errorText_ = "RtApiCore::probeDeviceOpen: error initializing pthread condition variable.";
  947. goto error;
  948. }
  949. stream_.apiHandle = (void *) handle;
  950. }
  951. else
  952. handle = (CoreHandle *) stream_.apiHandle;
  953. handle->iStream[mode] = iStream;
  954. handle->id[mode] = id;
  955. // Allocate necessary internal buffers.
  956. unsigned long bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
  957. stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
  958. if ( stream_.userBuffer[mode] == NULL ) {
  959. errorText_ = "RtApiCore::probeDeviceOpen: error allocating user buffer memory.";
  960. goto error;
  961. }
  962. // If possible, we will make use of the CoreAudio stream buffers as
  963. // "device buffers". However, we can't do this if the device
  964. // buffers are non-interleaved ("mono" mode).
  965. if ( !stream_.deviceInterleaved[mode] && stream_.doConvertBuffer[mode] ) {
  966. bool makeBuffer = true;
  967. bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
  968. if ( mode == INPUT ) {
  969. if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
  970. unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
  971. if ( bufferBytes <= bytesOut ) makeBuffer = false;
  972. }
  973. }
  974. if ( makeBuffer ) {
  975. bufferBytes *= *bufferSize;
  976. if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
  977. stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
  978. if ( stream_.deviceBuffer == NULL ) {
  979. errorText_ = "RtApiCore::probeDeviceOpen: error allocating device buffer memory.";
  980. goto error;
  981. }
  982. // Save a pointer to our own device buffer in the CoreHandle
  983. // structure because we may need to use the stream_.deviceBuffer
  984. // variable to point to the CoreAudio buffer before buffer
  985. // conversion (if we have a duplex stream with two different
  986. // conversion schemes).
  987. handle->deviceBuffer = stream_.deviceBuffer;
  988. }
  989. }
  990. stream_.sampleRate = sampleRate;
  991. stream_.device[mode] = device;
  992. stream_.state = STREAM_STOPPED;
  993. stream_.callbackInfo.object = (void *) this;
  994. // Setup the buffer conversion information structure. We override
  995. // the channel offset value and perform our own setting for that
  996. // here.
  997. if ( stream_.doConvertBuffer[mode] ) {
  998. setConvertInfo( mode, 0 );
  999. // Add channel offset for interleaved channels.
  1000. if ( firstChannel > 0 && stream_.deviceInterleaved[mode] ) {
  1001. if ( mode == OUTPUT ) {
  1002. for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
  1003. stream_.convertInfo[mode].outOffset[k] += firstChannel;
  1004. }
  1005. else {
  1006. for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
  1007. stream_.convertInfo[mode].inOffset[k] += firstChannel;
  1008. }
  1009. }
  1010. }
  1011. if ( mode == INPUT && stream_.mode == OUTPUT && stream_.device[0] == device )
  1012. // Only one callback procedure per device.
  1013. stream_.mode = DUPLEX;
  1014. else {
  1015. result = AudioDeviceAddIOProc( id, callbackHandler, (void *) &stream_.callbackInfo );
  1016. if ( result != noErr ) {
  1017. errorStream_ << "RtApiCore::probeDeviceOpen: system error setting callback for device (" << device << ").";
  1018. errorText_ = errorStream_.str();
  1019. goto error;
  1020. }
  1021. if ( stream_.mode == OUTPUT && mode == INPUT )
  1022. stream_.mode = DUPLEX;
  1023. else
  1024. stream_.mode = mode;
  1025. }
  1026. // Setup the device property listener for over/underload.
  1027. result = AudioDeviceAddPropertyListener( id, 0, isInput,
  1028. kAudioDeviceProcessorOverload,
  1029. deviceListener, (void *) handle );
  1030. return SUCCESS;
  1031. error:
  1032. if ( handle ) {
  1033. pthread_cond_destroy( &handle->condition );
  1034. delete handle;
  1035. stream_.apiHandle = 0;
  1036. }
  1037. for ( int i=0; i<2; i++ ) {
  1038. if ( stream_.userBuffer[i] ) {
  1039. free( stream_.userBuffer[i] );
  1040. stream_.userBuffer[i] = 0;
  1041. }
  1042. }
  1043. if ( stream_.deviceBuffer ) {
  1044. free( stream_.deviceBuffer );
  1045. stream_.deviceBuffer = 0;
  1046. }
  1047. return FAILURE;
  1048. }
  1049. void RtApiCore :: closeStream( void )
  1050. {
  1051. if ( stream_.state == STREAM_CLOSED ) {
  1052. errorText_ = "RtApiCore::closeStream(): no open stream to close!";
  1053. error( RtError::WARNING );
  1054. return;
  1055. }
  1056. CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
  1057. if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
  1058. if ( stream_.state == STREAM_RUNNING )
  1059. AudioDeviceStop( handle->id[0], callbackHandler );
  1060. AudioDeviceRemoveIOProc( handle->id[0], callbackHandler );
  1061. }
  1062. if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1] ) ) {
  1063. if ( stream_.state == STREAM_RUNNING )
  1064. AudioDeviceStop( handle->id[1], callbackHandler );
  1065. AudioDeviceRemoveIOProc( handle->id[1], callbackHandler );
  1066. }
  1067. for ( int i=0; i<2; i++ ) {
  1068. if ( stream_.userBuffer[i] ) {
  1069. free( stream_.userBuffer[i] );
  1070. stream_.userBuffer[i] = 0;
  1071. }
  1072. }
  1073. if ( handle->deviceBuffer ) {
  1074. free( handle->deviceBuffer );
  1075. stream_.deviceBuffer = 0;
  1076. }
  1077. // Destroy pthread condition variable.
  1078. pthread_cond_destroy( &handle->condition );
  1079. delete handle;
  1080. stream_.apiHandle = 0;
  1081. stream_.mode = UNINITIALIZED;
  1082. stream_.state = STREAM_CLOSED;
  1083. }
  1084. void RtApiCore :: startStream( void )
  1085. {
  1086. verifyStream();
  1087. if ( stream_.state == STREAM_RUNNING ) {
  1088. errorText_ = "RtApiCore::startStream(): the stream is already running!";
  1089. error( RtError::WARNING );
  1090. return;
  1091. }
  1092. MUTEX_LOCK( &stream_.mutex );
  1093. OSStatus result = noErr;
  1094. CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
  1095. if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
  1096. result = AudioDeviceStart( handle->id[0], callbackHandler );
  1097. if ( result != noErr ) {
  1098. errorStream_ << "RtApiCore::startStream: system error (" << getErrorCode( result ) << ") starting callback procedure on device (" << stream_.device[0] << ").";
  1099. errorText_ = errorStream_.str();
  1100. goto unlock;
  1101. }
  1102. }
  1103. if ( stream_.mode == INPUT ||
  1104. ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1] ) ) {
  1105. result = AudioDeviceStart( handle->id[1], callbackHandler );
  1106. if ( result != noErr ) {
  1107. errorStream_ << "RtApiCore::startStream: system error starting input callback procedure on device (" << stream_.device[1] << ").";
  1108. errorText_ = errorStream_.str();
  1109. goto unlock;
  1110. }
  1111. }
  1112. handle->drainCounter = 0;
  1113. handle->internalDrain = false;
  1114. stream_.state = STREAM_RUNNING;
  1115. unlock:
  1116. MUTEX_UNLOCK( &stream_.mutex );
  1117. if ( result == noErr ) return;
  1118. error( RtError::SYSTEM_ERROR );
  1119. }
  1120. void RtApiCore :: stopStream( void )
  1121. {
  1122. verifyStream();
  1123. if ( stream_.state == STREAM_STOPPED ) {
  1124. errorText_ = "RtApiCore::stopStream(): the stream is already stopped!";
  1125. error( RtError::WARNING );
  1126. return;
  1127. }
  1128. MUTEX_LOCK( &stream_.mutex );
  1129. OSStatus result = noErr;
  1130. CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
  1131. if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
  1132. if ( handle->drainCounter == 0 ) {
  1133. handle->drainCounter = 1;
  1134. pthread_cond_wait( &handle->condition, &stream_.mutex ); // block until signaled
  1135. }
  1136. result = AudioDeviceStop( handle->id[0], callbackHandler );
  1137. if ( result != noErr ) {
  1138. errorStream_ << "RtApiCore::stopStream: system error (" << getErrorCode( result ) << ") stopping callback procedure on device (" << stream_.device[0] << ").";
  1139. errorText_ = errorStream_.str();
  1140. goto unlock;
  1141. }
  1142. }
  1143. if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1] ) ) {
  1144. result = AudioDeviceStop( handle->id[1], callbackHandler );
  1145. if ( result != noErr ) {
  1146. errorStream_ << "RtApiCore::stopStream: system error (" << getErrorCode( result ) << ") stopping input callback procedure on device (" << stream_.device[1] << ").";
  1147. errorText_ = errorStream_.str();
  1148. goto unlock;
  1149. }
  1150. }
  1151. unlock:
  1152. MUTEX_UNLOCK( &stream_.mutex );
  1153. stream_.state = STREAM_STOPPED;
  1154. if ( result == noErr ) return;
  1155. error( RtError::SYSTEM_ERROR );
  1156. }
  1157. void RtApiCore :: abortStream( void )
  1158. {
  1159. verifyStream();
  1160. if ( stream_.state == STREAM_STOPPED ) {
  1161. errorText_ = "RtApiCore::abortStream(): the stream is already stopped!";
  1162. error( RtError::WARNING );
  1163. return;
  1164. }
  1165. CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
  1166. handle->drainCounter = 1;
  1167. stopStream();
  1168. }
  1169. bool RtApiCore :: callbackEvent( AudioDeviceID deviceId,
  1170. const AudioBufferList *inBufferList,
  1171. const AudioBufferList *outBufferList )
  1172. {
  1173. if ( stream_.state == STREAM_STOPPED ) return SUCCESS;
  1174. if ( stream_.state == STREAM_CLOSED ) {
  1175. errorText_ = "RtApiCore::callbackEvent(): the stream is closed ... this shouldn't happen!";
  1176. error( RtError::WARNING );
  1177. return FAILURE;
  1178. }
  1179. CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
  1180. CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
  1181. // Check if we were draining the stream and signal is finished.
  1182. if ( handle->drainCounter > 3 ) {
  1183. if ( handle->internalDrain == false )
  1184. pthread_cond_signal( &handle->condition );
  1185. else
  1186. stopStream();
  1187. return SUCCESS;
  1188. }
  1189. MUTEX_LOCK( &stream_.mutex );
  1190. AudioDeviceID outputDevice = handle->id[0];
  1191. // Invoke user callback to get fresh output data UNLESS we are
  1192. // draining stream or duplex mode AND the input/output devices are
  1193. // different AND this function is called for the input device.
  1194. if ( handle->drainCounter == 0 && ( stream_.mode != DUPLEX || deviceId == outputDevice ) ) {
  1195. RtAudioCallback callback = (RtAudioCallback) info->callback;
  1196. double streamTime = getStreamTime();
  1197. RtAudioStreamStatus status = 0;
  1198. if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
  1199. status |= RTAUDIO_OUTPUT_UNDERFLOW;
  1200. handle->xrun[0] = false;
  1201. }
  1202. if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
  1203. status |= RTAUDIO_INPUT_OVERFLOW;
  1204. handle->xrun[1] = false;
  1205. }
  1206. handle->drainCounter = callback( stream_.userBuffer[0], stream_.userBuffer[1],
  1207. stream_.bufferSize, streamTime, status, info->userData );
  1208. if ( handle->drainCounter == 2 ) {
  1209. MUTEX_UNLOCK( &stream_.mutex );
  1210. abortStream();
  1211. return SUCCESS;
  1212. }
  1213. else if ( handle->drainCounter == 1 )
  1214. handle->internalDrain = true;
  1215. }
  1216. if ( stream_.mode == OUTPUT || ( stream_.mode == DUPLEX && deviceId == outputDevice ) ) {
  1217. if ( handle->drainCounter > 1 ) { // write zeros to the output stream
  1218. if ( stream_.deviceInterleaved[0] ) {
  1219. memset( outBufferList->mBuffers[handle->iStream[0]].mData,
  1220. 0,
  1221. outBufferList->mBuffers[handle->iStream[0]].mDataByteSize );
  1222. }
  1223. else {
  1224. for ( unsigned int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
  1225. memset( outBufferList->mBuffers[handle->iStream[0]+i].mData,
  1226. 0,
  1227. outBufferList->mBuffers[handle->iStream[0]+i].mDataByteSize );
  1228. }
  1229. }
  1230. }
  1231. else if ( stream_.doConvertBuffer[0] ) {
  1232. if ( stream_.deviceInterleaved[0] )
  1233. stream_.deviceBuffer = (char *) outBufferList->mBuffers[handle->iStream[0]].mData;
  1234. else
  1235. stream_.deviceBuffer = handle->deviceBuffer;
  1236. convertBuffer( stream_.deviceBuffer, stream_.userBuffer[0], stream_.convertInfo[0] );
  1237. if ( !stream_.deviceInterleaved[0] ) {
  1238. UInt32 bufferBytes = outBufferList->mBuffers[handle->iStream[0]].mDataByteSize;
  1239. for ( unsigned int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
  1240. memcpy( outBufferList->mBuffers[handle->iStream[0]+i].mData,
  1241. &stream_.deviceBuffer[i*bufferBytes], bufferBytes );
  1242. }
  1243. }
  1244. }
  1245. else {
  1246. if ( stream_.deviceInterleaved[0] ) {
  1247. memcpy( outBufferList->mBuffers[handle->iStream[0]].mData,
  1248. stream_.userBuffer[0],
  1249. outBufferList->mBuffers[handle->iStream[0]].mDataByteSize );
  1250. }
  1251. else {
  1252. UInt32 bufferBytes = outBufferList->mBuffers[handle->iStream[0]].mDataByteSize;
  1253. for ( unsigned int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
  1254. memcpy( outBufferList->mBuffers[handle->iStream[0]+i].mData,
  1255. &stream_.userBuffer[0][i*bufferBytes], bufferBytes );
  1256. }
  1257. }
  1258. }
  1259. if ( handle->drainCounter ) {
  1260. handle->drainCounter++;
  1261. goto unlock;
  1262. }
  1263. }
  1264. AudioDeviceID inputDevice = handle->id[1];
  1265. if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && deviceId == inputDevice ) ) {
  1266. if ( stream_.doConvertBuffer[1] ) {
  1267. if ( stream_.deviceInterleaved[1] )
  1268. stream_.deviceBuffer = (char *) inBufferList->mBuffers[handle->iStream[1]].mData;
  1269. else {
  1270. stream_.deviceBuffer = (char *) handle->deviceBuffer;
  1271. UInt32 bufferBytes = inBufferList->mBuffers[handle->iStream[1]].mDataByteSize;
  1272. for ( unsigned int i=0; i<stream_.nDeviceChannels[1]; i++ ) {
  1273. memcpy( &stream_.deviceBuffer[i*bufferBytes],
  1274. inBufferList->mBuffers[handle->iStream[1]+i].mData, bufferBytes );
  1275. }
  1276. }
  1277. convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
  1278. }
  1279. else {
  1280. memcpy( stream_.userBuffer[1],
  1281. inBufferList->mBuffers[handle->iStream[1]].mData,
  1282. inBufferList->mBuffers[handle->iStream[1]].mDataByteSize );
  1283. }
  1284. }
  1285. unlock:
  1286. MUTEX_UNLOCK( &stream_.mutex );
  1287. RtApi::tickStreamTime();
  1288. return SUCCESS;
  1289. }
  1290. const char* RtApiCore :: getErrorCode( OSStatus code )
  1291. {
  1292. switch( code ) {
  1293. case kAudioHardwareNotRunningError:
  1294. return "kAudioHardwareNotRunningError";
  1295. case kAudioHardwareUnspecifiedError:
  1296. return "kAudioHardwareUnspecifiedError";
  1297. case kAudioHardwareUnknownPropertyError:
  1298. return "kAudioHardwareUnknownPropertyError";
  1299. case kAudioHardwareBadPropertySizeError:
  1300. return "kAudioHardwareBadPropertySizeError";
  1301. case kAudioHardwareIllegalOperationError:
  1302. return "kAudioHardwareIllegalOperationError";
  1303. case kAudioHardwareBadObjectError:
  1304. return "kAudioHardwareBadObjectError";
  1305. case kAudioHardwareBadDeviceError:
  1306. return "kAudioHardwareBadDeviceError";
  1307. case kAudioHardwareBadStreamError:
  1308. return "kAudioHardwareBadStreamError";
  1309. case kAudioHardwareUnsupportedOperationError:
  1310. return "kAudioHardwareUnsupportedOperationError";
  1311. case kAudioDeviceUnsupportedFormatError:
  1312. return "kAudioDeviceUnsupportedFormatError";
  1313. case kAudioDevicePermissionsError:
  1314. return "kAudioDevicePermissionsError";
  1315. default:
  1316. return "CoreAudio unknown error";
  1317. }
  1318. }
  1319. //******************** End of __MACOSX_CORE__ *********************//
  1320. #endif
  1321. #if defined(__UNIX_JACK__)
  1322. // JACK is a low-latency audio server, originally written for the
  1323. // GNU/Linux operating system and now also ported to OS-X. It can
  1324. // connect a number of different applications to an audio device, as
  1325. // well as allowing them to share audio between themselves.
  1326. //
  1327. // When using JACK with RtAudio, "devices" refer to JACK clients that
  1328. // have ports connected to the server. The JACK server is typically
  1329. // started in a terminal as follows:
  1330. //
  1331. // .jackd -d alsa -d hw:0
  1332. //
  1333. // or through an interface program such as qjackctl. Many of the
  1334. // parameters normally set for a stream are fixed by the JACK server
  1335. // and can be specified when the JACK server is started. In
  1336. // particular,
  1337. //
  1338. // .jackd -d alsa -d hw:0 -r 44100 -p 512 -n 4
  1339. //
  1340. // specifies a sample rate of 44100 Hz, a buffer size of 512 sample
  1341. // frames, and number of buffers = 4. Once the server is running, it
  1342. // is not possible to override these values. If the values are not
  1343. // specified in the command-line, the JACK server uses default values.
  1344. //
  1345. // The JACK server does not have to be running when an instance of
  1346. // RtApiJack is created, though the function getDeviceCount() will
  1347. // report 0 devices found until JACK has been started. When no
  1348. // devices are available (i.e., the JACK server is not running), a
  1349. // stream cannot be opened.
  1350. #include <jack/jack.h>
  1351. #include <unistd.h>
  1352. // A structure to hold various information related to the Jack API
  1353. // implementation.
  1354. struct JackHandle {
  1355. jack_client_t *client;
  1356. jack_port_t **ports[2];
  1357. std::string deviceName[2];
  1358. bool xrun[2];
  1359. pthread_cond_t condition;
  1360. int drainCounter; // Tracks callback counts when draining
  1361. bool internalDrain; // Indicates if stop is initiated from callback or not.
  1362. JackHandle()
  1363. :client(0), drainCounter(0), internalDrain(false) { ports[0] = 0; ports[1] = 0; xrun[0] = false; xrun[1] = false; }
  1364. };
  1365. RtApiJack :: RtApiJack()
  1366. {
  1367. // Nothing to do here.
  1368. }
  1369. RtApiJack :: ~RtApiJack()
  1370. {
  1371. if ( stream_.state != STREAM_CLOSED ) closeStream();
  1372. }
  1373. unsigned int RtApiJack :: getDeviceCount( void )
  1374. {
  1375. // See if we can become a jack client.
  1376. jack_client_t *client = jack_client_new( "RtApiJackCount" );
  1377. if ( client == 0 ) return 0;
  1378. const char **ports;
  1379. std::string port, previousPort;
  1380. unsigned int nChannels = 0, nDevices = 0;
  1381. ports = jack_get_ports( client, NULL, NULL, 0 );
  1382. if ( ports ) {
  1383. // Parse the port names up to the first colon (:).
  1384. unsigned int iColon = 0;
  1385. do {
  1386. port = (char *) ports[ nChannels ];
  1387. iColon = port.find(":");
  1388. if ( iColon != std::string::npos ) {
  1389. port = port.substr( 0, iColon + 1 );
  1390. if ( port != previousPort ) {
  1391. nDevices++;
  1392. previousPort = port;
  1393. }
  1394. }
  1395. } while ( ports[++nChannels] );
  1396. free( ports );
  1397. }
  1398. jack_client_close( client );
  1399. return nDevices;
  1400. }
  1401. RtAudio::DeviceInfo RtApiJack :: getDeviceInfo( unsigned int device )
  1402. {
  1403. RtAudio::DeviceInfo info;
  1404. info.probed = false;
  1405. jack_client_t *client = jack_client_new( "RtApiJackInfo" );
  1406. if ( client == 0 ) {
  1407. errorText_ = "RtApiJack::getDeviceInfo: Jack server not found or connection error!";
  1408. error( RtError::WARNING );
  1409. return info;
  1410. }
  1411. const char **ports;
  1412. std::string port, previousPort;
  1413. unsigned int nPorts = 0, nDevices = 0;
  1414. ports = jack_get_ports( client, NULL, NULL, 0 );
  1415. if ( ports ) {
  1416. // Parse the port names up to the first colon (:).
  1417. unsigned int iColon = 0;
  1418. do {
  1419. port = (char *) ports[ nPorts ];
  1420. iColon = port.find(":");
  1421. if ( iColon != std::string::npos ) {
  1422. port = port.substr( 0, iColon );
  1423. if ( port != previousPort ) {
  1424. if ( nDevices == device ) info.name = port;
  1425. nDevices++;
  1426. previousPort = port;
  1427. }
  1428. }
  1429. } while ( ports[++nPorts] );
  1430. free( ports );
  1431. }
  1432. if ( device >= nDevices ) {
  1433. errorText_ = "RtApiJack::getDeviceInfo: device ID is invalid!";
  1434. error( RtError::INVALID_USE );
  1435. }
  1436. // Get the current jack server sample rate.
  1437. info.sampleRates.clear();
  1438. info.sampleRates.push_back( jack_get_sample_rate( client ) );
  1439. // Count the available ports containing the client name as device
  1440. // channels. Jack "input ports" equal RtAudio output channels.
  1441. unsigned int nChannels = 0;
  1442. ports = jack_get_ports( client, info.name.c_str(), NULL, JackPortIsInput );
  1443. if ( ports ) {
  1444. while ( ports[ nChannels ] ) nChannels++;
  1445. free( ports );
  1446. info.outputChannels = nChannels;
  1447. }
  1448. // Jack "output ports" equal RtAudio input channels.
  1449. nChannels = 0;
  1450. ports = jack_get_ports( client, info.name.c_str(), NULL, JackPortIsOutput );
  1451. if ( ports ) {
  1452. while ( ports[ nChannels ] ) nChannels++;
  1453. free( ports );
  1454. info.inputChannels = nChannels;
  1455. }
  1456. if ( info.outputChannels == 0 && info.inputChannels == 0 ) {
  1457. jack_client_close(client);
  1458. errorText_ = "RtApiJack::getDeviceInfo: error determining Jack input/output channels!";
  1459. error( RtError::WARNING );
  1460. return info;
  1461. }
  1462. // If device opens for both playback and capture, we determine the channels.
  1463. if ( info.outputChannels > 0 && info.inputChannels > 0 )
  1464. info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
  1465. // Jack always uses 32-bit floats.
  1466. info.nativeFormats = RTAUDIO_FLOAT32;
  1467. // Jack doesn't provide default devices so we'll use the first available one.
  1468. if ( device == 0 && info.outputChannels > 0 )
  1469. info.isDefaultOutput = true;
  1470. if ( device == 0 && info.inputChannels > 0 )
  1471. info.isDefaultInput = true;
  1472. jack_client_close(client);
  1473. info.probed = true;
  1474. return info;
  1475. }
  1476. int jackCallbackHandler( jack_nframes_t nframes, void *infoPointer )
  1477. {
  1478. CallbackInfo *info = (CallbackInfo *) infoPointer;
  1479. RtApiJack *object = (RtApiJack *) info->object;
  1480. if ( object->callbackEvent( (unsigned long) nframes ) == false ) return 1;
  1481. return 0;
  1482. }
  1483. void jackShutdown( void *infoPointer )
  1484. {
  1485. CallbackInfo *info = (CallbackInfo *) infoPointer;
  1486. RtApiJack *object = (RtApiJack *) info->object;
  1487. // Check current stream state. If stopped, then we'll assume this
  1488. // was called as a result of a call to RtApiJack::stopStream (the
  1489. // deactivation of a client handle causes this function to be called).
  1490. // If not, we'll assume the Jack server is shutting down or some
  1491. // other problem occurred and we should close the stream.
  1492. if ( object->isStreamRunning() == false ) return;
  1493. object->closeStream();
  1494. std::cerr << "\nRtApiJack: the Jack server is shutting down this client ... stream stopped and closed!!\n" << std::endl;
  1495. }
  1496. int jackXrun( void *infoPointer )
  1497. {
  1498. JackHandle *handle = (JackHandle *) infoPointer;
  1499. if ( handle->ports[0] ) handle->xrun[0] = true;
  1500. if ( handle->ports[1] ) handle->xrun[1] = true;
  1501. return 0;
  1502. }
  1503. bool RtApiJack :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
  1504. unsigned int firstChannel, unsigned int sampleRate,
  1505. RtAudioFormat format, unsigned int *bufferSize,
  1506. RtAudio::StreamOptions *options )
  1507. {
  1508. JackHandle *handle = (JackHandle *) stream_.apiHandle;
  1509. // Look for jack server and try to become a client (only do once per stream).
  1510. jack_client_t *client = 0;
  1511. if ( mode == OUTPUT || ( mode == INPUT && stream_.mode != OUTPUT ) ) {
  1512. if ( options && !options->streamName.empty() )
  1513. client = jack_client_new( options->streamName.c_str() );
  1514. else
  1515. client = jack_client_new( "RtApiJack" );
  1516. if ( client == 0 ) {
  1517. errorText_ = "RtApiJack::probeDeviceOpen: Jack server not found or connection error!";
  1518. error( RtError::WARNING );
  1519. return FAILURE;
  1520. }
  1521. }
  1522. else {
  1523. // The handle must have been created on an earlier pass.
  1524. client = handle->client;
  1525. }
  1526. const char **ports;
  1527. std::string port, previousPort, deviceName;
  1528. unsigned int nPorts = 0, nDevices = 0;
  1529. ports = jack_get_ports( client, NULL, NULL, 0 );
  1530. if ( ports ) {
  1531. // Parse the port names up to the first colon (:).
  1532. unsigned int iColon = 0;
  1533. do {
  1534. port = (char *) ports[ nPorts ];
  1535. iColon = port.find(":");
  1536. if ( iColon != std::string::npos ) {
  1537. port = port.substr( 0, iColon );
  1538. if ( port != previousPort ) {
  1539. if ( nDevices == device ) deviceName = port;
  1540. nDevices++;
  1541. previousPort = port;
  1542. }
  1543. }
  1544. } while ( ports[++nPorts] );
  1545. free( ports );
  1546. }
  1547. if ( device >= nDevices ) {
  1548. errorText_ = "RtApiJack::probeDeviceOpen: device ID is invalid!";
  1549. return FAILURE;
  1550. }
  1551. // Count the available ports containing the client name as device
  1552. // channels. Jack "input ports" equal RtAudio output channels.
  1553. unsigned int nChannels = 0;
  1554. unsigned long flag = JackPortIsOutput;
  1555. if ( mode == INPUT ) flag = JackPortIsInput;
  1556. ports = jack_get_ports( client, deviceName.c_str(), NULL, flag );
  1557. if ( ports ) {
  1558. while ( ports[ nChannels ] ) nChannels++;
  1559. free( ports );
  1560. }
  1561. // Compare the jack ports for specified client to the requested number of channels.
  1562. if ( nChannels < (channels + firstChannel) ) {
  1563. errorStream_ << "RtApiJack::probeDeviceOpen: requested number of channels (" << channels << ") + offset (" << firstChannel << ") not found for specified device (" << device << ":" << deviceName << ").";
  1564. errorText_ = errorStream_.str();
  1565. return FAILURE;
  1566. }
  1567. // Check the jack server sample rate.
  1568. unsigned int jackRate = jack_get_sample_rate( client );
  1569. if ( sampleRate != jackRate ) {
  1570. jack_client_close( client );
  1571. errorStream_ << "RtApiJack::probeDeviceOpen: the requested sample rate (" << sampleRate << ") is different than the JACK server rate (" << jackRate << ").";
  1572. errorText_ = errorStream_.str();
  1573. return FAILURE;
  1574. }
  1575. stream_.sampleRate = jackRate;
  1576. // Get the latency of the JACK port.
  1577. ports = jack_get_ports( client, deviceName.c_str(), NULL, flag );
  1578. if ( ports[ firstChannel ] )
  1579. stream_.latency[mode] = jack_port_get_latency( jack_port_by_name( client, ports[ firstChannel ] ) );
  1580. free( ports );
  1581. // The jack server always uses 32-bit floating-point data.
  1582. stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
  1583. stream_.userFormat = format;
  1584. if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
  1585. else stream_.userInterleaved = true;
  1586. // Jack always uses non-interleaved buffers.
  1587. stream_.deviceInterleaved[mode] = false;
  1588. // Jack always provides host byte-ordered data.
  1589. stream_.doByteSwap[mode] = false;
  1590. // Get the buffer size. The buffer size and number of buffers
  1591. // (periods) is set when the jack server is started.
  1592. stream_.bufferSize = (int) jack_get_buffer_size( client );
  1593. *bufferSize = stream_.bufferSize;
  1594. stream_.nDeviceChannels[mode] = channels;
  1595. stream_.nUserChannels[mode] = channels;
  1596. // Set flags for buffer conversion.
  1597. stream_.doConvertBuffer[mode] = false;
  1598. if ( stream_.userFormat != stream_.deviceFormat[mode] )
  1599. stream_.doConvertBuffer[mode] = true;
  1600. if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
  1601. stream_.nUserChannels[mode] > 1 )
  1602. stream_.doConvertBuffer[mode] = true;
  1603. // Allocate our JackHandle structure for the stream.
  1604. if ( handle == 0 ) {
  1605. try {
  1606. handle = new JackHandle;
  1607. }
  1608. catch ( std::bad_alloc& ) {
  1609. errorText_ = "RtApiJack::probeDeviceOpen: error allocating JackHandle memory.";
  1610. goto error;
  1611. }
  1612. if ( pthread_cond_init(&handle->condition, NULL) ) {
  1613. errorText_ = "RtApiJack::probeDeviceOpen: error initializing pthread condition variable.";
  1614. goto error;
  1615. }
  1616. stream_.apiHandle = (void *) handle;
  1617. handle->client = client;
  1618. }
  1619. handle->deviceName[mode] = deviceName;
  1620. // Allocate necessary internal buffers.
  1621. unsigned long bufferBytes;
  1622. bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
  1623. stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
  1624. if ( stream_.userBuffer[mode] == NULL ) {
  1625. errorText_ = "RtApiJack::probeDeviceOpen: error allocating user buffer memory.";
  1626. goto error;
  1627. }
  1628. if ( stream_.doConvertBuffer[mode] ) {
  1629. bool makeBuffer = true;
  1630. if ( mode == OUTPUT )
  1631. bufferBytes = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
  1632. else { // mode == INPUT
  1633. bufferBytes = stream_.nDeviceChannels[1] * formatBytes( stream_.deviceFormat[1] );
  1634. if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
  1635. unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
  1636. if ( bufferBytes < bytesOut ) makeBuffer = false;
  1637. }
  1638. }
  1639. if ( makeBuffer ) {
  1640. bufferBytes *= *bufferSize;
  1641. if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
  1642. stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
  1643. if ( stream_.deviceBuffer == NULL ) {
  1644. errorText_ = "RtApiJack::probeDeviceOpen: error allocating device buffer memory.";
  1645. goto error;
  1646. }
  1647. }
  1648. }
  1649. // Allocate memory for the Jack ports (channels) identifiers.
  1650. handle->ports[mode] = (jack_port_t **) malloc ( sizeof (jack_port_t *) * channels );
  1651. if ( handle->ports[mode] == NULL ) {
  1652. errorText_ = "RtApiJack::probeDeviceOpen: error allocating port memory.";
  1653. goto error;
  1654. }
  1655. stream_.device[mode] = device;
  1656. stream_.channelOffset[mode] = firstChannel;
  1657. stream_.state = STREAM_STOPPED;
  1658. stream_.callbackInfo.object = (void *) this;
  1659. if ( stream_.mode == OUTPUT && mode == INPUT )
  1660. // We had already set up the stream for output.
  1661. stream_.mode = DUPLEX;
  1662. else {
  1663. stream_.mode = mode;
  1664. jack_set_process_callback( handle->client, jackCallbackHandler, (void *) &stream_.callbackInfo );
  1665. jack_set_xrun_callback( handle->client, jackXrun, (void *) &handle );
  1666. jack_on_shutdown( handle->client, jackShutdown, (void *) &stream_.callbackInfo );
  1667. }
  1668. // Register our ports.
  1669. char label[64];
  1670. if ( mode == OUTPUT ) {
  1671. for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {
  1672. snprintf( label, 64, "outport %d", i );
  1673. handle->ports[0][i] = jack_port_register( handle->client, (const char *)label,
  1674. JACK_DEFAULT_AUDIO_TYPE, JackPortIsOutput, 0 );
  1675. }
  1676. }
  1677. else {
  1678. for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {
  1679. snprintf( label, 64, "inport %d", i );
  1680. handle->ports[1][i] = jack_port_register( handle->client, (const char *)label,
  1681. JACK_DEFAULT_AUDIO_TYPE, JackPortIsInput, 0 );
  1682. }
  1683. }
  1684. // Setup the buffer conversion information structure. We don't use
  1685. // buffers to do channel offsets, so we override that parameter
  1686. // here.
  1687. if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, 0 );
  1688. return SUCCESS;
  1689. error:
  1690. if ( handle ) {
  1691. pthread_cond_destroy( &handle->condition );
  1692. jack_client_close( handle->client );
  1693. if ( handle->ports[0] ) free( handle->ports[0] );
  1694. if ( handle->ports[1] ) free( handle->ports[1] );
  1695. delete handle;
  1696. stream_.apiHandle = 0;
  1697. }
  1698. for ( int i=0; i<2; i++ ) {
  1699. if ( stream_.userBuffer[i] ) {
  1700. free( stream_.userBuffer[i] );
  1701. stream_.userBuffer[i] = 0;
  1702. }
  1703. }
  1704. if ( stream_.deviceBuffer ) {
  1705. free( stream_.deviceBuffer );
  1706. stream_.deviceBuffer = 0;
  1707. }
  1708. return FAILURE;
  1709. }
  1710. void RtApiJack :: closeStream( void )
  1711. {
  1712. if ( stream_.state == STREAM_CLOSED ) {
  1713. errorText_ = "RtApiJack::closeStream(): no open stream to close!";
  1714. error( RtError::WARNING );
  1715. return;
  1716. }
  1717. JackHandle *handle = (JackHandle *) stream_.apiHandle;
  1718. if ( handle ) {
  1719. if ( stream_.state == STREAM_RUNNING )
  1720. jack_deactivate( handle->client );
  1721. jack_client_close( handle->client );
  1722. }
  1723. if ( handle ) {
  1724. if ( handle->ports[0] ) free( handle->ports[0] );
  1725. if ( handle->ports[1] ) free( handle->ports[1] );
  1726. pthread_cond_destroy( &handle->condition );
  1727. delete handle;
  1728. stream_.apiHandle = 0;
  1729. }
  1730. for ( int i=0; i<2; i++ ) {
  1731. if ( stream_.userBuffer[i] ) {
  1732. free( stream_.userBuffer[i] );
  1733. stream_.userBuffer[i] = 0;
  1734. }
  1735. }
  1736. if ( stream_.deviceBuffer ) {
  1737. free( stream_.deviceBuffer );
  1738. stream_.deviceBuffer = 0;
  1739. }
  1740. stream_.mode = UNINITIALIZED;
  1741. stream_.state = STREAM_CLOSED;
  1742. }
  1743. void RtApiJack :: startStream( void )
  1744. {
  1745. verifyStream();
  1746. if ( stream_.state == STREAM_RUNNING ) {
  1747. errorText_ = "RtApiJack::startStream(): the stream is already running!";
  1748. error( RtError::WARNING );
  1749. return;
  1750. }
  1751. MUTEX_LOCK(&stream_.mutex);
  1752. JackHandle *handle = (JackHandle *) stream_.apiHandle;
  1753. int result = jack_activate( handle->client );
  1754. if ( result ) {
  1755. errorText_ = "RtApiJack::startStream(): unable to activate JACK client!";
  1756. goto unlock;
  1757. }
  1758. const char **ports;
  1759. // Get the list of available ports.
  1760. if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
  1761. result = 1;
  1762. ports = jack_get_ports( handle->client, handle->deviceName[0].c_str(), NULL, JackPortIsInput);
  1763. if ( ports == NULL) {
  1764. errorText_ = "RtApiJack::startStream(): error determining available JACK input ports!";
  1765. goto unlock;
  1766. }
  1767. // Now make the port connections. Since RtAudio wasn't designed to
  1768. // allow the user to select particular channels of a device, we'll
  1769. // just open the first "nChannels" ports with offset.
  1770. for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {
  1771. result = 1;
  1772. if ( ports[ stream_.channelOffset[0] + i ] )
  1773. result = jack_connect( handle->client, jack_port_name( handle->ports[0][i] ), ports[ stream_.channelOffset[0] + i ] );
  1774. if ( result ) {
  1775. free( ports );
  1776. errorText_ = "RtApiJack::startStream(): error connecting output ports!";
  1777. goto unlock;
  1778. }
  1779. }
  1780. free(ports);
  1781. }
  1782. if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
  1783. result = 1;
  1784. ports = jack_get_ports( handle->client, handle->deviceName[1].c_str(), NULL, JackPortIsOutput );
  1785. if ( ports == NULL) {
  1786. errorText_ = "RtApiJack::startStream(): error determining available JACK output ports!";
  1787. goto unlock;
  1788. }
  1789. // Now make the port connections. See note above.
  1790. for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {
  1791. result = 1;
  1792. if ( ports[ stream_.channelOffset[1] + i ] )
  1793. result = jack_connect( handle->client, ports[ stream_.channelOffset[1] + i ], jack_port_name( handle->ports[1][i] ) );
  1794. if ( result ) {
  1795. free( ports );
  1796. errorText_ = "RtApiJack::startStream(): error connecting input ports!";
  1797. goto unlock;
  1798. }
  1799. }
  1800. free(ports);
  1801. }
  1802. handle->drainCounter = 0;
  1803. handle->internalDrain = false;
  1804. stream_.state = STREAM_RUNNING;
  1805. unlock:
  1806. MUTEX_UNLOCK(&stream_.mutex);
  1807. if ( result == 0 ) return;
  1808. error( RtError::SYSTEM_ERROR );
  1809. }
  1810. void RtApiJack :: stopStream( void )
  1811. {
  1812. verifyStream();
  1813. if ( stream_.state == STREAM_STOPPED ) {
  1814. errorText_ = "RtApiJack::stopStream(): the stream is already stopped!";
  1815. error( RtError::WARNING );
  1816. return;
  1817. }
  1818. MUTEX_LOCK( &stream_.mutex );
  1819. JackHandle *handle = (JackHandle *) stream_.apiHandle;
  1820. if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
  1821. if ( handle->drainCounter == 0 ) {
  1822. handle->drainCounter = 1;
  1823. pthread_cond_wait( &handle->condition, &stream_.mutex ); // block until signaled
  1824. }
  1825. }
  1826. jack_deactivate( handle->client );
  1827. stream_.state = STREAM_STOPPED;
  1828. MUTEX_UNLOCK( &stream_.mutex );
  1829. }
  1830. void RtApiJack :: abortStream( void )
  1831. {
  1832. verifyStream();
  1833. if ( stream_.state == STREAM_STOPPED ) {
  1834. errorText_ = "RtApiJack::abortStream(): the stream is already stopped!";
  1835. error( RtError::WARNING );
  1836. return;
  1837. }
  1838. JackHandle *handle = (JackHandle *) stream_.apiHandle;
  1839. handle->drainCounter = 1;
  1840. stopStream();
  1841. }
  1842. bool RtApiJack :: callbackEvent( unsigned long nframes )
  1843. {
  1844. if ( stream_.state == STREAM_STOPPED ) return SUCCESS;
  1845. if ( stream_.state == STREAM_CLOSED ) {
  1846. errorText_ = "RtApiCore::callbackEvent(): the stream is closed ... this shouldn't happen!";
  1847. error( RtError::WARNING );
  1848. return FAILURE;
  1849. }
  1850. if ( stream_.bufferSize != nframes ) {
  1851. errorText_ = "RtApiCore::callbackEvent(): the JACK buffer size has changed ... cannot process!";
  1852. error( RtError::WARNING );
  1853. return FAILURE;
  1854. }
  1855. CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
  1856. JackHandle *handle = (JackHandle *) stream_.apiHandle;
  1857. // Check if we were draining the stream and signal is finished.
  1858. if ( handle->drainCounter > 3 ) {
  1859. if ( handle->internalDrain == false )
  1860. pthread_cond_signal( &handle->condition );
  1861. else
  1862. stopStream();
  1863. return SUCCESS;
  1864. }
  1865. MUTEX_LOCK( &stream_.mutex );
  1866. // Invoke user callback first, to get fresh output data.
  1867. if ( handle->drainCounter == 0 ) {
  1868. RtAudioCallback callback = (RtAudioCallback) info->callback;
  1869. double streamTime = getStreamTime();
  1870. RtAudioStreamStatus status = 0;
  1871. if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
  1872. status |= RTAUDIO_OUTPUT_UNDERFLOW;
  1873. handle->xrun[0] = false;
  1874. }
  1875. if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
  1876. status |= RTAUDIO_INPUT_OVERFLOW;
  1877. handle->xrun[1] = false;
  1878. }
  1879. handle->drainCounter = callback( stream_.userBuffer[0], stream_.userBuffer[1],
  1880. stream_.bufferSize, streamTime, status, info->userData );
  1881. if ( handle->drainCounter == 2 ) {
  1882. MUTEX_UNLOCK( &stream_.mutex );
  1883. abortStream();
  1884. return SUCCESS;
  1885. }
  1886. else if ( handle->drainCounter == 1 )
  1887. handle->internalDrain = true;
  1888. }
  1889. jack_default_audio_sample_t *jackbuffer;
  1890. unsigned long bufferBytes = nframes * sizeof( jack_default_audio_sample_t );
  1891. if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
  1892. if ( handle->drainCounter > 0 ) { // write zeros to the output stream
  1893. for ( unsigned int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
  1894. jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[0][i], (jack_nframes_t) nframes );
  1895. memset( jackbuffer, 0, bufferBytes );
  1896. }
  1897. }
  1898. else if ( stream_.doConvertBuffer[0] ) {
  1899. convertBuffer( stream_.deviceBuffer, stream_.userBuffer[0], stream_.convertInfo[0] );
  1900. for ( unsigned int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
  1901. jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[0][i], (jack_nframes_t) nframes );
  1902. memcpy( jackbuffer, &stream_.deviceBuffer[i*bufferBytes], bufferBytes );
  1903. }
  1904. }
  1905. else { // no buffer conversion
  1906. for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {
  1907. jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[0][i], (jack_nframes_t) nframes );
  1908. memcpy( jackbuffer, &stream_.userBuffer[0][i*bufferBytes], bufferBytes );
  1909. }
  1910. }
  1911. if ( handle->drainCounter ) {
  1912. handle->drainCounter++;
  1913. goto unlock;
  1914. }
  1915. }
  1916. if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
  1917. if ( stream_.doConvertBuffer[1] ) {
  1918. for ( unsigned int i=0; i<stream_.nDeviceChannels[1]; i++ ) {
  1919. jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[1][i], (jack_nframes_t) nframes );
  1920. memcpy( &stream_.deviceBuffer[i*bufferBytes], jackbuffer, bufferBytes );
  1921. }
  1922. convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
  1923. }
  1924. else { // no buffer conversion
  1925. for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {
  1926. jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[1][i], (jack_nframes_t) nframes );
  1927. memcpy( &stream_.userBuffer[1][i*bufferBytes], jackbuffer, bufferBytes );
  1928. }
  1929. }
  1930. }
  1931. unlock:
  1932. MUTEX_UNLOCK(&stream_.mutex);
  1933. RtApi::tickStreamTime();
  1934. return SUCCESS;
  1935. }
  1936. //******************** End of __UNIX_JACK__ *********************//
  1937. #endif
  1938. #if defined(__WINDOWS_ASIO__) // ASIO API on Windows
  1939. // The ASIO API is designed around a callback scheme, so this
  1940. // implementation is similar to that used for OS-X CoreAudio and Linux
  1941. // Jack. The primary constraint with ASIO is that it only allows
  1942. // access to a single driver at a time. Thus, it is not possible to
  1943. // have more than one simultaneous RtAudio stream.
  1944. //
  1945. // This implementation also requires a number of external ASIO files
  1946. // and a few global variables. The ASIO callback scheme does not
  1947. // allow for the passing of user data, so we must create a global
  1948. // pointer to our callbackInfo structure.
  1949. //
  1950. // On unix systems, we make use of a pthread condition variable.
  1951. // Since there is no equivalent in Windows, I hacked something based
  1952. // on information found in
  1953. // http://www.cs.wustl.edu/~schmidt/win32-cv-1.html.
  1954. #include "asiosys.h"
  1955. #include "asio.h"
  1956. #include "iasiothiscallresolver.h"
  1957. #include "asiodrivers.h"
  1958. #include <cmath>
  1959. AsioDrivers drivers;
  1960. ASIOCallbacks asioCallbacks;
  1961. ASIODriverInfo driverInfo;
  1962. CallbackInfo *asioCallbackInfo;
  1963. bool asioXRun;
  1964. struct AsioHandle {
  1965. int drainCounter; // Tracks callback counts when draining
  1966. bool internalDrain; // Indicates if stop is initiated from callback or not.
  1967. ASIOBufferInfo *bufferInfos;
  1968. HANDLE condition;
  1969. AsioHandle()
  1970. :drainCounter(0), internalDrain(false), bufferInfos(0) {}
  1971. };
  1972. // Function declarations (definitions at end of section)
  1973. static const char* getAsioErrorString( ASIOError result );
  1974. void sampleRateChanged( ASIOSampleRate sRate );
  1975. long asioMessages( long selector, long value, void* message, double* opt );
  1976. RtApiAsio :: RtApiAsio()
  1977. {
  1978. // ASIO cannot run on a multi-threaded appartment. You can call
  1979. // CoInitialize beforehand, but it must be for appartment threading
  1980. // (in which case, CoInitilialize will return S_FALSE here).
  1981. coInitialized_ = false;
  1982. HRESULT hr = CoInitialize( NULL );
  1983. if ( FAILED(hr) ) {
  1984. errorText_ = "RtApiAsio::ASIO requires a single-threaded appartment. Call CoInitializeEx(0,COINIT_APARTMENTTHREADED)";
  1985. error( RtError::WARNING );
  1986. }
  1987. coInitialized_ = true;
  1988. drivers.removeCurrentDriver();
  1989. driverInfo.asioVersion = 2;
  1990. // See note in DirectSound implementation about GetDesktopWindow().
  1991. driverInfo.sysRef = GetForegroundWindow();
  1992. }
  1993. RtApiAsio :: ~RtApiAsio()
  1994. {
  1995. if ( stream_.state != STREAM_CLOSED ) closeStream();
  1996. if ( coInitialized_ ) CoUninitialize();
  1997. }
  1998. unsigned int RtApiAsio :: getDeviceCount( void )
  1999. {
  2000. return (unsigned int) drivers.asioGetNumDev();
  2001. }
  2002. RtAudio::DeviceInfo RtApiAsio :: getDeviceInfo( unsigned int device )
  2003. {
  2004. RtAudio::DeviceInfo info;
  2005. info.probed = false;
  2006. // Get device ID
  2007. unsigned int nDevices = getDeviceCount();
  2008. if ( nDevices == 0 ) {
  2009. errorText_ = "RtApiAsio::getDeviceInfo: no devices found!";
  2010. error( RtError::INVALID_USE );
  2011. }
  2012. if ( device >= nDevices ) {
  2013. errorText_ = "RtApiAsio::getDeviceInfo: device ID is invalid!";
  2014. error( RtError::INVALID_USE );
  2015. }
  2016. // If a stream is already open, we cannot probe other devices. Thus, use the saved results.
  2017. if ( stream_.state != STREAM_CLOSED ) {
  2018. if ( device >= devices_.size() ) {
  2019. errorText_ = "RtApiAsio::getDeviceInfo: device ID was not present before stream was opened.";
  2020. error( RtError::WARNING );
  2021. return info;
  2022. }
  2023. return devices_[ device ];
  2024. }
  2025. char driverName[32];
  2026. ASIOError result = drivers.asioGetDriverName( (int) device, driverName, 32 );
  2027. if ( result != ASE_OK ) {
  2028. errorStream_ << "RtApiAsio::getDeviceInfo: unable to get driver name (" << getAsioErrorString( result ) << ").";
  2029. errorText_ = errorStream_.str();
  2030. error( RtError::WARNING );
  2031. return info;
  2032. }
  2033. info.name = driverName;
  2034. if ( !drivers.loadDriver( driverName ) ) {
  2035. errorStream_ << "RtApiAsio::getDeviceInfo: unable to load driver (" << driverName << ").";
  2036. errorText_ = errorStream_.str();
  2037. error( RtError::WARNING );
  2038. return info;
  2039. }
  2040. result = ASIOInit( &driverInfo );
  2041. if ( result != ASE_OK ) {
  2042. errorStream_ << "RtApiAsio::getDeviceInfo: error (" << getAsioErrorString( result ) << ") initializing driver (" << driverName << ").";
  2043. errorText_ = errorStream_.str();
  2044. error( RtError::WARNING );
  2045. return info;
  2046. }
  2047. // Determine the device channel information.
  2048. long inputChannels, outputChannels;
  2049. result = ASIOGetChannels( &inputChannels, &outputChannels );
  2050. if ( result != ASE_OK ) {
  2051. drivers.removeCurrentDriver();
  2052. errorStream_ << "RtApiAsio::getDeviceInfo: error (" << getAsioErrorString( result ) << ") getting channel count (" << driverName << ").";
  2053. errorText_ = errorStream_.str();
  2054. error( RtError::WARNING );
  2055. return info;
  2056. }
  2057. info.outputChannels = outputChannels;
  2058. info.inputChannels = inputChannels;
  2059. if ( info.outputChannels > 0 && info.inputChannels > 0 )
  2060. info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
  2061. // Determine the supported sample rates.
  2062. info.sampleRates.clear();
  2063. for ( unsigned int i=0; i<MAX_SAMPLE_RATES; i++ ) {
  2064. result = ASIOCanSampleRate( (ASIOSampleRate) SAMPLE_RATES[i] );
  2065. if ( result == ASE_OK )
  2066. info.sampleRates.push_back( SAMPLE_RATES[i] );
  2067. }
  2068. // Determine supported data types ... just check first channel and assume rest are the same.
  2069. ASIOChannelInfo channelInfo;
  2070. channelInfo.channel = 0;
  2071. channelInfo.isInput = true;
  2072. if ( info.inputChannels <= 0 ) channelInfo.isInput = false;
  2073. result = ASIOGetChannelInfo( &channelInfo );
  2074. if ( result != ASE_OK ) {
  2075. drivers.removeCurrentDriver();
  2076. errorStream_ << "RtApiAsio::getDeviceInfo: error (" << getAsioErrorString( result ) << ") getting driver channel info (" << driverName << ").";
  2077. errorText_ = errorStream_.str();
  2078. error( RtError::WARNING );
  2079. return info;
  2080. }
  2081. info.nativeFormats = 0;
  2082. if ( channelInfo.type == ASIOSTInt16MSB || channelInfo.type == ASIOSTInt16LSB )
  2083. info.nativeFormats |= RTAUDIO_SINT16;
  2084. else if ( channelInfo.type == ASIOSTInt32MSB || channelInfo.type == ASIOSTInt32LSB )
  2085. info.nativeFormats |= RTAUDIO_SINT32;
  2086. else if ( channelInfo.type == ASIOSTFloat32MSB || channelInfo.type == ASIOSTFloat32LSB )
  2087. info.nativeFormats |= RTAUDIO_FLOAT32;
  2088. else if ( channelInfo.type == ASIOSTFloat64MSB || channelInfo.type == ASIOSTFloat64LSB )
  2089. info.nativeFormats |= RTAUDIO_FLOAT64;
  2090. if ( getDefaultOutputDevice() == device )
  2091. info.isDefaultOutput = true;
  2092. if ( getDefaultInputDevice() == device )
  2093. info.isDefaultInput = true;
  2094. info.probed = true;
  2095. drivers.removeCurrentDriver();
  2096. return info;
  2097. }
  2098. void bufferSwitch( long index, ASIOBool processNow )
  2099. {
  2100. RtApiAsio *object = (RtApiAsio *) asioCallbackInfo->object;
  2101. object->callbackEvent( index );
  2102. }
  2103. void RtApiAsio :: saveDeviceInfo( void )
  2104. {
  2105. devices_.clear();
  2106. unsigned int nDevices = getDeviceCount();
  2107. devices_.resize( nDevices );
  2108. for ( unsigned int i=0; i<nDevices; i++ )
  2109. devices_[i] = getDeviceInfo( i );
  2110. }
  2111. bool RtApiAsio :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
  2112. unsigned int firstChannel, unsigned int sampleRate,
  2113. RtAudioFormat format, unsigned int *bufferSize,
  2114. RtAudio::StreamOptions *options )
  2115. {
  2116. // For ASIO, a duplex stream MUST use the same driver.
  2117. if ( mode == INPUT && stream_.mode == OUTPUT && stream_.device[0] != device ) {
  2118. errorText_ = "RtApiAsio::probeDeviceOpen: an ASIO duplex stream must use the same device for input and output!";
  2119. return FAILURE;
  2120. }
  2121. char driverName[32];
  2122. ASIOError result = drivers.asioGetDriverName( (int) device, driverName, 32 );
  2123. if ( result != ASE_OK ) {
  2124. errorStream_ << "RtApiAsio::probeDeviceOpen: unable to get driver name (" << getAsioErrorString( result ) << ").";
  2125. errorText_ = errorStream_.str();
  2126. return FAILURE;
  2127. }
  2128. // The getDeviceInfo() function will not work when a stream is open
  2129. // because ASIO does not allow multiple devices to run at the same
  2130. // time. Thus, we'll probe the system before opening a stream and
  2131. // save the results for use by getDeviceInfo().
  2132. this->saveDeviceInfo();
  2133. // Only load the driver once for duplex stream.
  2134. if ( mode != INPUT || stream_.mode != OUTPUT ) {
  2135. if ( !drivers.loadDriver( driverName ) ) {
  2136. errorStream_ << "RtApiAsio::probeDeviceOpen: unable to load driver (" << driverName << ").";
  2137. errorText_ = errorStream_.str();
  2138. return FAILURE;
  2139. }
  2140. result = ASIOInit( &driverInfo );
  2141. if ( result != ASE_OK ) {
  2142. errorStream_ << "RtApiAsio::probeDeviceOpen: error (" << getAsioErrorString( result ) << ") initializing driver (" << driverName << ").";
  2143. errorText_ = errorStream_.str();
  2144. return FAILURE;
  2145. }
  2146. }
  2147. // Check the device channel count.
  2148. long inputChannels, outputChannels;
  2149. result = ASIOGetChannels( &inputChannels, &outputChannels );
  2150. if ( result != ASE_OK ) {
  2151. drivers.removeCurrentDriver();
  2152. errorStream_ << "RtApiAsio::probeDeviceOpen: error (" << getAsioErrorString( result ) << ") getting channel count (" << driverName << ").";
  2153. errorText_ = errorStream_.str();
  2154. return FAILURE;
  2155. }
  2156. if ( ( mode == OUTPUT && (channels+firstChannel) > (unsigned int) outputChannels) ||
  2157. ( mode == INPUT && (channels+firstChannel) > (unsigned int) inputChannels) ) {
  2158. drivers.removeCurrentDriver();
  2159. errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") does not support requested channel count (" << channels << ") + offset (" << firstChannel << ").";
  2160. errorText_ = errorStream_.str();
  2161. return FAILURE;
  2162. }
  2163. stream_.nDeviceChannels[mode] = channels;
  2164. stream_.nUserChannels[mode] = channels;
  2165. stream_.channelOffset[mode] = firstChannel;
  2166. // Verify the sample rate is supported.
  2167. result = ASIOCanSampleRate( (ASIOSampleRate) sampleRate );
  2168. if ( result != ASE_OK ) {
  2169. drivers.removeCurrentDriver();
  2170. errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") does not support requested sample rate (" << sampleRate << ").";
  2171. errorText_ = errorStream_.str();
  2172. return FAILURE;
  2173. }
  2174. // Get the current sample rate
  2175. ASIOSampleRate currentRate;
  2176. result = ASIOGetSampleRate( &currentRate );
  2177. if ( result != ASE_OK ) {
  2178. drivers.removeCurrentDriver();
  2179. errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error getting sample rate.";
  2180. errorText_ = errorStream_.str();
  2181. return FAILURE;
  2182. }
  2183. // Set the sample rate only if necessary
  2184. if ( currentRate != sampleRate ) {
  2185. result = ASIOSetSampleRate( (ASIOSampleRate) sampleRate );
  2186. if ( result != ASE_OK ) {
  2187. drivers.removeCurrentDriver();
  2188. errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error setting sample rate (" << sampleRate << ").";
  2189. errorText_ = errorStream_.str();
  2190. return FAILURE;
  2191. }
  2192. }
  2193. // Determine the driver data type.
  2194. ASIOChannelInfo channelInfo;
  2195. channelInfo.channel = 0;
  2196. if ( mode == OUTPUT ) channelInfo.isInput = false;
  2197. else channelInfo.isInput = true;
  2198. result = ASIOGetChannelInfo( &channelInfo );
  2199. if ( result != ASE_OK ) {
  2200. drivers.removeCurrentDriver();
  2201. errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") getting data format.";
  2202. errorText_ = errorStream_.str();
  2203. return FAILURE;
  2204. }
  2205. // Assuming WINDOWS host is always little-endian.
  2206. stream_.doByteSwap[mode] = false;
  2207. stream_.userFormat = format;
  2208. stream_.deviceFormat[mode] = 0;
  2209. if ( channelInfo.type == ASIOSTInt16MSB || channelInfo.type == ASIOSTInt16LSB ) {
  2210. stream_.deviceFormat[mode] = RTAUDIO_SINT16;
  2211. if ( channelInfo.type == ASIOSTInt16MSB ) stream_.doByteSwap[mode] = true;
  2212. }
  2213. else if ( channelInfo.type == ASIOSTInt32MSB || channelInfo.type == ASIOSTInt32LSB ) {
  2214. stream_.deviceFormat[mode] = RTAUDIO_SINT32;
  2215. if ( channelInfo.type == ASIOSTInt32MSB ) stream_.doByteSwap[mode] = true;
  2216. }
  2217. else if ( channelInfo.type == ASIOSTFloat32MSB || channelInfo.type == ASIOSTFloat32LSB ) {
  2218. stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
  2219. if ( channelInfo.type == ASIOSTFloat32MSB ) stream_.doByteSwap[mode] = true;
  2220. }
  2221. else if ( channelInfo.type == ASIOSTFloat64MSB || channelInfo.type == ASIOSTFloat64LSB ) {
  2222. stream_.deviceFormat[mode] = RTAUDIO_FLOAT64;
  2223. if ( channelInfo.type == ASIOSTFloat64MSB ) stream_.doByteSwap[mode] = true;
  2224. }
  2225. if ( stream_.deviceFormat[mode] == 0 ) {
  2226. drivers.removeCurrentDriver();
  2227. errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") data format not supported by RtAudio.";
  2228. errorText_ = errorStream_.str();
  2229. return FAILURE;
  2230. }
  2231. // Set the buffer size. For a duplex stream, this will end up
  2232. // setting the buffer size based on the input constraints, which
  2233. // should be ok.
  2234. long minSize, maxSize, preferSize, granularity;
  2235. result = ASIOGetBufferSize( &minSize, &maxSize, &preferSize, &granularity );
  2236. if ( result != ASE_OK ) {
  2237. drivers.removeCurrentDriver();
  2238. errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") getting buffer size.";
  2239. errorText_ = errorStream_.str();
  2240. return FAILURE;
  2241. }
  2242. if ( *bufferSize < (unsigned int) minSize ) *bufferSize = (unsigned int) minSize;
  2243. else if ( *bufferSize > (unsigned int) maxSize ) *bufferSize = (unsigned int) maxSize;
  2244. else if ( granularity == -1 ) {
  2245. // Make sure bufferSize is a power of two.
  2246. double power = std::log10( (double) *bufferSize ) / log10( 2.0 );
  2247. *bufferSize = (int) pow( 2.0, floor(power+0.5) );
  2248. if ( *bufferSize < (unsigned int) minSize ) *bufferSize = (unsigned int) minSize;
  2249. else if ( *bufferSize > (unsigned int) maxSize ) *bufferSize = (unsigned int) maxSize;
  2250. else *bufferSize = preferSize;
  2251. }
  2252. else if ( granularity != 0 ) {
  2253. // Set to an even multiple of granularity, rounding up.
  2254. *bufferSize = (*bufferSize + granularity-1) / granularity * granularity;
  2255. }
  2256. if ( mode == INPUT && stream_.mode == OUTPUT && stream_.bufferSize != *bufferSize ) {
  2257. drivers.removeCurrentDriver();
  2258. errorText_ = "RtApiAsio::probeDeviceOpen: input/output buffersize discrepancy!";
  2259. return FAILURE;
  2260. }
  2261. stream_.bufferSize = *bufferSize;
  2262. stream_.nBuffers = 2;
  2263. if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
  2264. else stream_.userInterleaved = true;
  2265. // ASIO always uses non-interleaved buffers.
  2266. stream_.deviceInterleaved[mode] = false;
  2267. // Allocate, if necessary, our AsioHandle structure for the stream.
  2268. AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
  2269. if ( handle == 0 ) {
  2270. try {
  2271. handle = new AsioHandle;
  2272. }
  2273. catch ( std::bad_alloc& ) {
  2274. //if ( handle == NULL ) {
  2275. drivers.removeCurrentDriver();
  2276. errorText_ = "RtApiAsio::probeDeviceOpen: error allocating AsioHandle memory.";
  2277. return FAILURE;
  2278. }
  2279. handle->bufferInfos = 0;
  2280. // Create a manual-reset event.
  2281. handle->condition = CreateEvent( NULL, // no security
  2282. TRUE, // manual-reset
  2283. FALSE, // non-signaled initially
  2284. NULL ); // unnamed
  2285. stream_.apiHandle = (void *) handle;
  2286. }
  2287. // Create the ASIO internal buffers. Since RtAudio sets up input
  2288. // and output separately, we'll have to dispose of previously
  2289. // created output buffers for a duplex stream.
  2290. long inputLatency, outputLatency;
  2291. if ( mode == INPUT && stream_.mode == OUTPUT ) {
  2292. ASIODisposeBuffers();
  2293. if ( handle->bufferInfos ) free( handle->bufferInfos );
  2294. }
  2295. // Allocate, initialize, and save the bufferInfos in our stream callbackInfo structure.
  2296. bool buffersAllocated = false;
  2297. unsigned int i, nChannels = stream_.nDeviceChannels[0] + stream_.nDeviceChannels[1];
  2298. handle->bufferInfos = (ASIOBufferInfo *) malloc( nChannels * sizeof(ASIOBufferInfo) );
  2299. if ( handle->bufferInfos == NULL ) {
  2300. errorStream_ << "RtApiAsio::probeDeviceOpen: error allocating bufferInfo memory for driver (" << driverName << ").";
  2301. errorText_ = errorStream_.str();
  2302. goto error;
  2303. }
  2304. ASIOBufferInfo *infos;
  2305. infos = handle->bufferInfos;
  2306. for ( i=0; i<stream_.nDeviceChannels[0]; i++, infos++ ) {
  2307. infos->isInput = ASIOFalse;
  2308. infos->channelNum = i + stream_.channelOffset[0];
  2309. infos->buffers[0] = infos->buffers[1] = 0;
  2310. }
  2311. for ( i=0; i<stream_.nDeviceChannels[1]; i++, infos++ ) {
  2312. infos->isInput = ASIOTrue;
  2313. infos->channelNum = i + stream_.channelOffset[1];
  2314. infos->buffers[0] = infos->buffers[1] = 0;
  2315. }
  2316. // Set up the ASIO callback structure and create the ASIO data buffers.
  2317. asioCallbacks.bufferSwitch = &bufferSwitch;
  2318. asioCallbacks.sampleRateDidChange = &sampleRateChanged;
  2319. asioCallbacks.asioMessage = &asioMessages;
  2320. asioCallbacks.bufferSwitchTimeInfo = NULL;
  2321. result = ASIOCreateBuffers( handle->bufferInfos, nChannels, stream_.bufferSize, &asioCallbacks );
  2322. if ( result != ASE_OK ) {
  2323. errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") creating buffers.";
  2324. errorText_ = errorStream_.str();
  2325. goto error;
  2326. }
  2327. buffersAllocated = true;
  2328. // Set flags for buffer conversion.
  2329. stream_.doConvertBuffer[mode] = false;
  2330. if ( stream_.userFormat != stream_.deviceFormat[mode] )
  2331. stream_.doConvertBuffer[mode] = true;
  2332. if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
  2333. stream_.nUserChannels[mode] > 1 )
  2334. stream_.doConvertBuffer[mode] = true;
  2335. // Allocate necessary internal buffers
  2336. unsigned long bufferBytes;
  2337. bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
  2338. stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
  2339. if ( stream_.userBuffer[mode] == NULL ) {
  2340. errorText_ = "RtApiAsio::probeDeviceOpen: error allocating user buffer memory.";
  2341. goto error;
  2342. }
  2343. if ( stream_.doConvertBuffer[mode] ) {
  2344. bool makeBuffer = true;
  2345. bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
  2346. if ( mode == INPUT ) {
  2347. if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
  2348. unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
  2349. if ( bufferBytes <= bytesOut ) makeBuffer = false;
  2350. }
  2351. }
  2352. if ( makeBuffer ) {
  2353. bufferBytes *= *bufferSize;
  2354. if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
  2355. stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
  2356. if ( stream_.deviceBuffer == NULL ) {
  2357. errorText_ = "RtApiAsio::probeDeviceOpen: error allocating device buffer memory.";
  2358. goto error;
  2359. }
  2360. }
  2361. }
  2362. stream_.sampleRate = sampleRate;
  2363. stream_.device[mode] = device;
  2364. stream_.state = STREAM_STOPPED;
  2365. asioCallbackInfo = &stream_.callbackInfo;
  2366. stream_.callbackInfo.object = (void *) this;
  2367. if ( stream_.mode == OUTPUT && mode == INPUT )
  2368. // We had already set up an output stream.
  2369. stream_.mode = DUPLEX;
  2370. else
  2371. stream_.mode = mode;
  2372. // Determine device latencies
  2373. result = ASIOGetLatencies( &inputLatency, &outputLatency );
  2374. if ( result != ASE_OK ) {
  2375. errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") getting latency.";
  2376. errorText_ = errorStream_.str();
  2377. error( RtError::WARNING); // warn but don't fail
  2378. }
  2379. else {
  2380. stream_.latency[0] = outputLatency;
  2381. stream_.latency[1] = inputLatency;
  2382. }
  2383. // Setup the buffer conversion information structure. We don't use
  2384. // buffers to do channel offsets, so we override that parameter
  2385. // here.
  2386. if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, 0 );
  2387. return SUCCESS;
  2388. error:
  2389. if ( buffersAllocated )
  2390. ASIODisposeBuffers();
  2391. drivers.removeCurrentDriver();
  2392. if ( handle ) {
  2393. CloseHandle( handle->condition );
  2394. if ( handle->bufferInfos )
  2395. free( handle->bufferInfos );
  2396. delete handle;
  2397. stream_.apiHandle = 0;
  2398. }
  2399. for ( int i=0; i<2; i++ ) {
  2400. if ( stream_.userBuffer[i] ) {
  2401. free( stream_.userBuffer[i] );
  2402. stream_.userBuffer[i] = 0;
  2403. }
  2404. }
  2405. if ( stream_.deviceBuffer ) {
  2406. free( stream_.deviceBuffer );
  2407. stream_.deviceBuffer = 0;
  2408. }
  2409. return FAILURE;
  2410. }
  2411. void RtApiAsio :: closeStream()
  2412. {
  2413. if ( stream_.state == STREAM_CLOSED ) {
  2414. errorText_ = "RtApiAsio::closeStream(): no open stream to close!";
  2415. error( RtError::WARNING );
  2416. return;
  2417. }
  2418. if ( stream_.state == STREAM_RUNNING ) {
  2419. stream_.state = STREAM_STOPPED;
  2420. ASIOStop();
  2421. }
  2422. ASIODisposeBuffers();
  2423. drivers.removeCurrentDriver();
  2424. AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
  2425. if ( handle ) {
  2426. CloseHandle( handle->condition );
  2427. if ( handle->bufferInfos )
  2428. free( handle->bufferInfos );
  2429. delete handle;
  2430. stream_.apiHandle = 0;
  2431. }
  2432. for ( int i=0; i<2; i++ ) {
  2433. if ( stream_.userBuffer[i] ) {
  2434. free( stream_.userBuffer[i] );
  2435. stream_.userBuffer[i] = 0;
  2436. }
  2437. }
  2438. if ( stream_.deviceBuffer ) {
  2439. free( stream_.deviceBuffer );
  2440. stream_.deviceBuffer = 0;
  2441. }
  2442. stream_.mode = UNINITIALIZED;
  2443. stream_.state = STREAM_CLOSED;
  2444. }
  2445. void RtApiAsio :: startStream()
  2446. {
  2447. verifyStream();
  2448. if ( stream_.state == STREAM_RUNNING ) {
  2449. errorText_ = "RtApiAsio::startStream(): the stream is already running!";
  2450. error( RtError::WARNING );
  2451. return;
  2452. }
  2453. MUTEX_LOCK( &stream_.mutex );
  2454. AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
  2455. ASIOError result = ASIOStart();
  2456. if ( result != ASE_OK ) {
  2457. errorStream_ << "RtApiAsio::startStream: error (" << getAsioErrorString( result ) << ") starting device.";
  2458. errorText_ = errorStream_.str();
  2459. goto unlock;
  2460. }
  2461. handle->drainCounter = 0;
  2462. handle->internalDrain = false;
  2463. stream_.state = STREAM_RUNNING;
  2464. asioXRun = false;
  2465. unlock:
  2466. MUTEX_UNLOCK( &stream_.mutex );
  2467. if ( result == ASE_OK ) return;
  2468. error( RtError::SYSTEM_ERROR );
  2469. }
  2470. void RtApiAsio :: stopStream()
  2471. {
  2472. verifyStream();
  2473. if ( stream_.state == STREAM_STOPPED ) {
  2474. errorText_ = "RtApiAsio::stopStream(): the stream is already stopped!";
  2475. error( RtError::WARNING );
  2476. return;
  2477. }
  2478. MUTEX_LOCK( &stream_.mutex );
  2479. AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
  2480. if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
  2481. if ( handle->drainCounter == 0 ) {
  2482. handle->drainCounter = 1;
  2483. MUTEX_UNLOCK( &stream_.mutex );
  2484. WaitForMultipleObjects( 1, &handle->condition, FALSE, INFINITE ); // block until signaled
  2485. ResetEvent( handle->condition );
  2486. MUTEX_LOCK( &stream_.mutex );
  2487. }
  2488. }
  2489. ASIOError result = ASIOStop();
  2490. if ( result != ASE_OK ) {
  2491. errorStream_ << "RtApiAsio::stopStream: error (" << getAsioErrorString( result ) << ") stopping device.";
  2492. errorText_ = errorStream_.str();
  2493. }
  2494. stream_.state = STREAM_STOPPED;
  2495. MUTEX_UNLOCK( &stream_.mutex );
  2496. if ( result == ASE_OK ) return;
  2497. error( RtError::SYSTEM_ERROR );
  2498. }
  2499. void RtApiAsio :: abortStream()
  2500. {
  2501. verifyStream();
  2502. if ( stream_.state == STREAM_STOPPED ) {
  2503. errorText_ = "RtApiAsio::abortStream(): the stream is already stopped!";
  2504. error( RtError::WARNING );
  2505. return;
  2506. }
  2507. // The following lines were commented-out because some behavior was
  2508. // noted where the device buffers need to be zeroed to avoid
  2509. // continuing sound, even when the device buffers are completed
  2510. // disposed. So now, calling abort is the same as calling stop.
  2511. //AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
  2512. //handle->drainCounter = 1;
  2513. stopStream();
  2514. }
  2515. bool RtApiAsio :: callbackEvent( long bufferIndex )
  2516. {
  2517. if ( stream_.state == STREAM_STOPPED ) return SUCCESS;
  2518. if ( stream_.state == STREAM_CLOSED ) {
  2519. errorText_ = "RtApiAsio::callbackEvent(): the stream is closed ... this shouldn't happen!";
  2520. error( RtError::WARNING );
  2521. return FAILURE;
  2522. }
  2523. CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
  2524. AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
  2525. // Check if we were draining the stream and signal is finished.
  2526. if ( handle->drainCounter > 3 ) {
  2527. if ( handle->internalDrain == false )
  2528. SetEvent( handle->condition );
  2529. else
  2530. stopStream();
  2531. return SUCCESS;
  2532. }
  2533. MUTEX_LOCK( &stream_.mutex );
  2534. // The state might change while waiting on a mutex.
  2535. if ( stream_.state == STREAM_STOPPED ) goto unlock;
  2536. // Invoke user callback to get fresh output data UNLESS we are
  2537. // draining stream.
  2538. if ( handle->drainCounter == 0 ) {
  2539. RtAudioCallback callback = (RtAudioCallback) info->callback;
  2540. double streamTime = getStreamTime();
  2541. RtAudioStreamStatus status = 0;
  2542. if ( stream_.mode != INPUT && asioXRun == true ) {
  2543. status |= RTAUDIO_OUTPUT_UNDERFLOW;
  2544. asioXRun = false;
  2545. }
  2546. if ( stream_.mode != OUTPUT && asioXRun == true ) {
  2547. status |= RTAUDIO_INPUT_OVERFLOW;
  2548. asioXRun = false;
  2549. }
  2550. handle->drainCounter = callback( stream_.userBuffer[0], stream_.userBuffer[1],
  2551. stream_.bufferSize, streamTime, status, info->userData );
  2552. if ( handle->drainCounter == 2 ) {
  2553. MUTEX_UNLOCK( &stream_.mutex );
  2554. abortStream();
  2555. return SUCCESS;
  2556. }
  2557. else if ( handle->drainCounter == 1 )
  2558. handle->internalDrain = true;
  2559. }
  2560. unsigned int nChannels, bufferBytes, i, j;
  2561. nChannels = stream_.nDeviceChannels[0] + stream_.nDeviceChannels[1];
  2562. if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
  2563. bufferBytes = stream_.bufferSize * formatBytes( stream_.deviceFormat[0] );
  2564. if ( handle->drainCounter > 1 ) { // write zeros to the output stream
  2565. for ( i=0, j=0; i<nChannels; i++ ) {
  2566. if ( handle->bufferInfos[i].isInput != ASIOTrue )
  2567. memset( handle->bufferInfos[i].buffers[bufferIndex], 0, bufferBytes );
  2568. }
  2569. }
  2570. else if ( stream_.doConvertBuffer[0] ) {
  2571. convertBuffer( stream_.deviceBuffer, stream_.userBuffer[0], stream_.convertInfo[0] );
  2572. if ( stream_.doByteSwap[0] )
  2573. byteSwapBuffer( stream_.deviceBuffer,
  2574. stream_.bufferSize * stream_.nDeviceChannels[0],
  2575. stream_.deviceFormat[0] );
  2576. for ( i=0, j=0; i<nChannels; i++ ) {
  2577. if ( handle->bufferInfos[i].isInput != ASIOTrue )
  2578. memcpy( handle->bufferInfos[i].buffers[bufferIndex],
  2579. &stream_.deviceBuffer[j++*bufferBytes], bufferBytes );
  2580. }
  2581. }
  2582. else {
  2583. if ( stream_.doByteSwap[0] )
  2584. byteSwapBuffer( stream_.userBuffer[0],
  2585. stream_.bufferSize * stream_.nUserChannels[0],
  2586. stream_.userFormat );
  2587. for ( i=0, j=0; i<nChannels; i++ ) {
  2588. if ( handle->bufferInfos[i].isInput != ASIOTrue )
  2589. memcpy( handle->bufferInfos[i].buffers[bufferIndex],
  2590. &stream_.userBuffer[0][bufferBytes*j++], bufferBytes );
  2591. }
  2592. }
  2593. if ( handle->drainCounter ) {
  2594. handle->drainCounter++;
  2595. goto unlock;
  2596. }
  2597. }
  2598. if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
  2599. bufferBytes = stream_.bufferSize * formatBytes(stream_.deviceFormat[1]);
  2600. if (stream_.doConvertBuffer[1]) {
  2601. // Always interleave ASIO input data.
  2602. for ( i=0, j=0; i<nChannels; i++ ) {
  2603. if ( handle->bufferInfos[i].isInput == ASIOTrue )
  2604. memcpy( &stream_.deviceBuffer[j++*bufferBytes],
  2605. handle->bufferInfos[i].buffers[bufferIndex],
  2606. bufferBytes );
  2607. }
  2608. if ( stream_.doByteSwap[1] )
  2609. byteSwapBuffer( stream_.deviceBuffer,
  2610. stream_.bufferSize * stream_.nDeviceChannels[1],
  2611. stream_.deviceFormat[1] );
  2612. convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
  2613. }
  2614. else {
  2615. for ( i=0, j=0; i<nChannels; i++ ) {
  2616. if ( handle->bufferInfos[i].isInput == ASIOTrue ) {
  2617. memcpy( &stream_.userBuffer[1][bufferBytes*j++],
  2618. handle->bufferInfos[i].buffers[bufferIndex],
  2619. bufferBytes );
  2620. }
  2621. }
  2622. if ( stream_.doByteSwap[1] )
  2623. byteSwapBuffer( stream_.userBuffer[1],
  2624. stream_.bufferSize * stream_.nUserChannels[1],
  2625. stream_.userFormat );
  2626. }
  2627. }
  2628. unlock:
  2629. // The following call was suggested by Malte Clasen. While the API
  2630. // documentation indicates it should not be required, some device
  2631. // drivers apparently do not function correctly without it.
  2632. ASIOOutputReady();
  2633. MUTEX_UNLOCK( &stream_.mutex );
  2634. RtApi::tickStreamTime();
  2635. return SUCCESS;
  2636. }
  2637. void sampleRateChanged( ASIOSampleRate sRate )
  2638. {
  2639. // The ASIO documentation says that this usually only happens during
  2640. // external sync. Audio processing is not stopped by the driver,
  2641. // actual sample rate might not have even changed, maybe only the
  2642. // sample rate status of an AES/EBU or S/PDIF digital input at the
  2643. // audio device.
  2644. RtApi *object = (RtApi *) asioCallbackInfo->object;
  2645. try {
  2646. object->stopStream();
  2647. }
  2648. catch ( RtError &exception ) {
  2649. std::cerr << "\nRtApiAsio: sampleRateChanged() error (" << exception.getMessage() << ")!\n" << std::endl;
  2650. return;
  2651. }
  2652. std::cerr << "\nRtApiAsio: driver reports sample rate changed to " << sRate << " ... stream stopped!!!\n" << std::endl;
  2653. }
  2654. long asioMessages( long selector, long value, void* message, double* opt )
  2655. {
  2656. long ret = 0;
  2657. switch( selector ) {
  2658. case kAsioSelectorSupported:
  2659. if ( value == kAsioResetRequest
  2660. || value == kAsioEngineVersion
  2661. || value == kAsioResyncRequest
  2662. || value == kAsioLatenciesChanged
  2663. // The following three were added for ASIO 2.0, you don't
  2664. // necessarily have to support them.
  2665. || value == kAsioSupportsTimeInfo
  2666. || value == kAsioSupportsTimeCode
  2667. || value == kAsioSupportsInputMonitor)
  2668. ret = 1L;
  2669. break;
  2670. case kAsioResetRequest:
  2671. // Defer the task and perform the reset of the driver during the
  2672. // next "safe" situation. You cannot reset the driver right now,
  2673. // as this code is called from the driver. Reset the driver is
  2674. // done by completely destruct is. I.e. ASIOStop(),
  2675. // ASIODisposeBuffers(), Destruction Afterwards you initialize the
  2676. // driver again.
  2677. std::cerr << "\nRtApiAsio: driver reset requested!!!" << std::endl;
  2678. ret = 1L;
  2679. break;
  2680. case kAsioResyncRequest:
  2681. // This informs the application that the driver encountered some
  2682. // non-fatal data loss. It is used for synchronization purposes
  2683. // of different media. Added mainly to work around the Win16Mutex
  2684. // problems in Windows 95/98 with the Windows Multimedia system,
  2685. // which could lose data because the Mutex was held too long by
  2686. // another thread. However a driver can issue it in other
  2687. // situations, too.
  2688. // std::cerr << "\nRtApiAsio: driver resync requested!!!" << std::endl;
  2689. asioXRun = true;
  2690. ret = 1L;
  2691. break;
  2692. case kAsioLatenciesChanged:
  2693. // This will inform the host application that the drivers were
  2694. // latencies changed. Beware, it this does not mean that the
  2695. // buffer sizes have changed! You might need to update internal
  2696. // delay data.
  2697. std::cerr << "\nRtApiAsio: driver latency may have changed!!!" << std::endl;
  2698. ret = 1L;
  2699. break;
  2700. case kAsioEngineVersion:
  2701. // Return the supported ASIO version of the host application. If
  2702. // a host application does not implement this selector, ASIO 1.0
  2703. // is assumed by the driver.
  2704. ret = 2L;
  2705. break;
  2706. case kAsioSupportsTimeInfo:
  2707. // Informs the driver whether the
  2708. // asioCallbacks.bufferSwitchTimeInfo() callback is supported.
  2709. // For compatibility with ASIO 1.0 drivers the host application
  2710. // should always support the "old" bufferSwitch method, too.
  2711. ret = 0;
  2712. break;
  2713. case kAsioSupportsTimeCode:
  2714. // Informs the driver whether application is interested in time
  2715. // code info. If an application does not need to know about time
  2716. // code, the driver has less work to do.
  2717. ret = 0;
  2718. break;
  2719. }
  2720. return ret;
  2721. }
  2722. static const char* getAsioErrorString( ASIOError result )
  2723. {
  2724. struct Messages
  2725. {
  2726. ASIOError value;
  2727. const char*message;
  2728. };
  2729. static Messages m[] =
  2730. {
  2731. { ASE_NotPresent, "Hardware input or output is not present or available." },
  2732. { ASE_HWMalfunction, "Hardware is malfunctioning." },
  2733. { ASE_InvalidParameter, "Invalid input parameter." },
  2734. { ASE_InvalidMode, "Invalid mode." },
  2735. { ASE_SPNotAdvancing, "Sample position not advancing." },
  2736. { ASE_NoClock, "Sample clock or rate cannot be determined or is not present." },
  2737. { ASE_NoMemory, "Not enough memory to complete the request." }
  2738. };
  2739. for ( unsigned int i = 0; i < sizeof(m)/sizeof(m[0]); ++i )
  2740. if ( m[i].value == result ) return m[i].message;
  2741. return "Unknown error.";
  2742. }
  2743. //******************** End of __WINDOWS_ASIO__ *********************//
  2744. #endif
  2745. #if defined(__WINDOWS_DS__) // Windows DirectSound API
  2746. // Modified by Robin Davies, October 2005
  2747. // - Improvements to DirectX pointer chasing.
  2748. // - Backdoor RtDsStatistics hook provides DirectX performance information.
  2749. // - Bug fix for non-power-of-two Asio granularity used by Edirol PCR-A30.
  2750. // - Auto-call CoInitialize for DSOUND and ASIO platforms.
  2751. // Various revisions for RtAudio 4.0 by Gary Scavone, April 2007
  2752. #include <dsound.h>
  2753. #include <assert.h>
  2754. #if defined(__MINGW32__)
  2755. // missing from latest mingw winapi
  2756. #define WAVE_FORMAT_96M08 0x00010000 /* 96 kHz, Mono, 8-bit */
  2757. #define WAVE_FORMAT_96S08 0x00020000 /* 96 kHz, Stereo, 8-bit */
  2758. #define WAVE_FORMAT_96M16 0x00040000 /* 96 kHz, Mono, 16-bit */
  2759. #define WAVE_FORMAT_96S16 0x00080000 /* 96 kHz, Stereo, 16-bit */
  2760. #endif
  2761. #define MINIMUM_DEVICE_BUFFER_SIZE 32768
  2762. #ifdef _MSC_VER // if Microsoft Visual C++
  2763. #pragma comment( lib, "winmm.lib" ) // then, auto-link winmm.lib. Otherwise, it has to be added manually.
  2764. #endif
  2765. static inline DWORD dsPointerDifference( DWORD laterPointer, DWORD earlierPointer, DWORD bufferSize )
  2766. {
  2767. if (laterPointer > earlierPointer)
  2768. return laterPointer - earlierPointer;
  2769. else
  2770. return laterPointer - earlierPointer + bufferSize;
  2771. }
  2772. static inline DWORD dsPointerBetween( DWORD pointer, DWORD laterPointer, DWORD earlierPointer, DWORD bufferSize )
  2773. {
  2774. if ( pointer > bufferSize ) pointer -= bufferSize;
  2775. if ( laterPointer < earlierPointer ) laterPointer += bufferSize;
  2776. if ( pointer < earlierPointer ) pointer += bufferSize;
  2777. return pointer >= earlierPointer && pointer < laterPointer;
  2778. }
  2779. // A structure to hold various information related to the DirectSound
  2780. // API implementation.
  2781. struct DsHandle {
  2782. unsigned int drainCounter; // Tracks callback counts when draining
  2783. bool internalDrain; // Indicates if stop is initiated from callback or not.
  2784. void *id[2];
  2785. void *buffer[2];
  2786. bool xrun[2];
  2787. UINT bufferPointer[2];
  2788. DWORD dsBufferSize[2];
  2789. DWORD dsPointerLeadTime[2]; // the number of bytes ahead of the safe pointer to lead by.
  2790. HANDLE condition;
  2791. DsHandle()
  2792. :drainCounter(0), internalDrain(false) { id[0] = 0; id[1] = 0; buffer[0] = 0; buffer[1] = 0; xrun[0] = false; xrun[1] = false; bufferPointer[0] = 0; bufferPointer[1] = 0; }
  2793. };
  2794. /*
  2795. RtApiDs::RtDsStatistics RtApiDs::statistics;
  2796. // Provides a backdoor hook to monitor for DirectSound read overruns and write underruns.
  2797. RtApiDs::RtDsStatistics RtApiDs::getDsStatistics()
  2798. {
  2799. RtDsStatistics s = statistics;
  2800. // update the calculated fields.
  2801. if ( s.inputFrameSize != 0 )
  2802. s.latency += s.readDeviceSafeLeadBytes * 1.0 / s.inputFrameSize / s.sampleRate;
  2803. if ( s.outputFrameSize != 0 )
  2804. s.latency += (s.writeDeviceSafeLeadBytes + s.writeDeviceBufferLeadBytes) * 1.0 / s.outputFrameSize / s.sampleRate;
  2805. return s;
  2806. }
  2807. */
  2808. // Declarations for utility functions, callbacks, and structures
  2809. // specific to the DirectSound implementation.
  2810. static BOOL CALLBACK deviceQueryCallback( LPGUID lpguid,
  2811. LPCTSTR description,
  2812. LPCTSTR module,
  2813. LPVOID lpContext );
  2814. static char* getErrorString( int code );
  2815. extern "C" unsigned __stdcall callbackHandler( void *ptr );
  2816. struct EnumInfo {
  2817. bool isInput;
  2818. bool getDefault;
  2819. bool findIndex;
  2820. unsigned int counter;
  2821. unsigned int index;
  2822. LPGUID id;
  2823. std::string name;
  2824. EnumInfo()
  2825. : isInput(false), getDefault(false), findIndex(false), counter(0), index(0) {}
  2826. };
  2827. RtApiDs :: RtApiDs()
  2828. {
  2829. // Dsound will run both-threaded. If CoInitialize fails, then just
  2830. // accept whatever the mainline chose for a threading model.
  2831. coInitialized_ = false;
  2832. HRESULT hr = CoInitialize( NULL );
  2833. if ( !FAILED( hr ) ) coInitialized_ = true;
  2834. }
  2835. RtApiDs :: ~RtApiDs()
  2836. {
  2837. if ( coInitialized_ ) CoUninitialize(); // balanced call.
  2838. if ( stream_.state != STREAM_CLOSED ) closeStream();
  2839. }
  2840. unsigned int RtApiDs :: getDefaultInputDevice( void )
  2841. {
  2842. // Count output devices.
  2843. EnumInfo info;
  2844. HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &info );
  2845. if ( FAILED( result ) ) {
  2846. errorStream_ << "RtApiDs::getDefaultOutputDevice: error (" << getErrorString( result ) << ") counting output devices!";
  2847. errorText_ = errorStream_.str();
  2848. error( RtError::WARNING );
  2849. return 0;
  2850. }
  2851. // Now enumerate input devices until we find the id = NULL.
  2852. info.isInput = true;
  2853. info.getDefault = true;
  2854. result = DirectSoundCaptureEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &info );
  2855. if ( FAILED( result ) ) {
  2856. errorStream_ << "RtApiDs::getDefaultInputDevice: error (" << getErrorString( result ) << ") enumerating input devices!";
  2857. errorText_ = errorStream_.str();
  2858. error( RtError::WARNING );
  2859. return 0;
  2860. }
  2861. if ( info.counter > 0 ) return info.counter - 1;
  2862. return 0;
  2863. }
  2864. unsigned int RtApiDs :: getDefaultOutputDevice( void )
  2865. {
  2866. // Enumerate output devices until we find the id = NULL.
  2867. EnumInfo info;
  2868. info.getDefault = true;
  2869. HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &info );
  2870. if ( FAILED( result ) ) {
  2871. errorStream_ << "RtApiDs::getDefaultOutputDevice: error (" << getErrorString( result ) << ") enumerating output devices!";
  2872. errorText_ = errorStream_.str();
  2873. error( RtError::WARNING );
  2874. return 0;
  2875. }
  2876. if ( info.counter > 0 ) return info.counter - 1;
  2877. return 0;
  2878. }
  2879. unsigned int RtApiDs :: getDeviceCount( void )
  2880. {
  2881. // Count DirectSound devices.
  2882. EnumInfo info;
  2883. HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &info );
  2884. if ( FAILED( result ) ) {
  2885. errorStream_ << "RtApiDs::getDeviceCount: error (" << getErrorString( result ) << ") enumerating output devices!";
  2886. errorText_ = errorStream_.str();
  2887. error( RtError::WARNING );
  2888. }
  2889. // Count DirectSoundCapture devices.
  2890. info.isInput = true;
  2891. result = DirectSoundCaptureEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &info );
  2892. if ( FAILED( result ) ) {
  2893. errorStream_ << "RtApiDs::getDeviceCount: error (" << getErrorString( result ) << ") enumerating input devices!";
  2894. errorText_ = errorStream_.str();
  2895. error( RtError::WARNING );
  2896. }
  2897. return info.counter;
  2898. }
  2899. RtAudio::DeviceInfo RtApiDs :: getDeviceInfo( unsigned int device )
  2900. {
  2901. // Because DirectSound always enumerates input and output devices
  2902. // separately (and because we don't attempt to combine devices
  2903. // internally), none of our "devices" will ever be duplex.
  2904. RtAudio::DeviceInfo info;
  2905. info.probed = false;
  2906. // Enumerate through devices to find the id (if it exists). Note
  2907. // that we have to do the output enumeration first, even if this is
  2908. // an input device, in order for the device counter to be correct.
  2909. EnumInfo dsinfo;
  2910. dsinfo.findIndex = true;
  2911. dsinfo.index = device;
  2912. HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &dsinfo );
  2913. if ( FAILED( result ) ) {
  2914. errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") enumerating output devices!";
  2915. errorText_ = errorStream_.str();
  2916. error( RtError::WARNING );
  2917. }
  2918. if ( dsinfo.name.empty() ) goto probeInput;
  2919. LPDIRECTSOUND output;
  2920. DSCAPS outCaps;
  2921. result = DirectSoundCreate( dsinfo.id, &output, NULL );
  2922. if ( FAILED( result ) ) {
  2923. errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") opening output device (" << dsinfo.name << ")!";
  2924. errorText_ = errorStream_.str();
  2925. error( RtError::WARNING );
  2926. return info;
  2927. }
  2928. outCaps.dwSize = sizeof( outCaps );
  2929. result = output->GetCaps( &outCaps );
  2930. if ( FAILED( result ) ) {
  2931. output->Release();
  2932. errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") getting capabilities!";
  2933. errorText_ = errorStream_.str();
  2934. error( RtError::WARNING );
  2935. return info;
  2936. }
  2937. // Get output channel information.
  2938. info.outputChannels = ( outCaps.dwFlags & DSCAPS_PRIMARYSTEREO ) ? 2 : 1;
  2939. // Get sample rate information.
  2940. info.sampleRates.clear();
  2941. for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
  2942. if ( SAMPLE_RATES[k] >= (unsigned int) outCaps.dwMinSecondarySampleRate &&
  2943. SAMPLE_RATES[k] <= (unsigned int) outCaps.dwMaxSecondarySampleRate )
  2944. info.sampleRates.push_back( SAMPLE_RATES[k] );
  2945. }
  2946. // Get format information.
  2947. if ( outCaps.dwFlags & DSCAPS_PRIMARY16BIT ) info.nativeFormats |= RTAUDIO_SINT16;
  2948. if ( outCaps.dwFlags & DSCAPS_PRIMARY8BIT ) info.nativeFormats |= RTAUDIO_SINT8;
  2949. output->Release();
  2950. if ( getDefaultOutputDevice() == device )
  2951. info.isDefaultOutput = true;
  2952. // Copy name and return.
  2953. info.name = dsinfo.name;
  2954. info.probed = true;
  2955. return info;
  2956. probeInput:
  2957. dsinfo.isInput = true;
  2958. result = DirectSoundCaptureEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &dsinfo );
  2959. if ( FAILED( result ) ) {
  2960. errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") enumerating input devices!";
  2961. errorText_ = errorStream_.str();
  2962. error( RtError::WARNING );
  2963. }
  2964. if ( dsinfo.name.empty() ) return info;
  2965. LPDIRECTSOUNDCAPTURE input;
  2966. result = DirectSoundCaptureCreate( dsinfo.id, &input, NULL );
  2967. if ( FAILED( result ) ) {
  2968. errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") opening input device (" << dsinfo.name << ")!";
  2969. errorText_ = errorStream_.str();
  2970. error( RtError::WARNING );
  2971. return info;
  2972. }
  2973. DSCCAPS inCaps;
  2974. inCaps.dwSize = sizeof( inCaps );
  2975. result = input->GetCaps( &inCaps );
  2976. if ( FAILED( result ) ) {
  2977. input->Release();
  2978. errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") getting object capabilities (" << dsinfo.name << ")!";
  2979. errorText_ = errorStream_.str();
  2980. error( RtError::WARNING );
  2981. return info;
  2982. }
  2983. // Get input channel information.
  2984. info.inputChannels = inCaps.dwChannels;
  2985. // Get sample rate and format information.
  2986. if ( inCaps.dwChannels == 2 ) {
  2987. if ( inCaps.dwFormats & WAVE_FORMAT_1S16 ) info.nativeFormats |= RTAUDIO_SINT16;
  2988. if ( inCaps.dwFormats & WAVE_FORMAT_2S16 ) info.nativeFormats |= RTAUDIO_SINT16;
  2989. if ( inCaps.dwFormats & WAVE_FORMAT_4S16 ) info.nativeFormats |= RTAUDIO_SINT16;
  2990. if ( inCaps.dwFormats & WAVE_FORMAT_96S16 ) info.nativeFormats |= RTAUDIO_SINT16;
  2991. if ( inCaps.dwFormats & WAVE_FORMAT_1S08 ) info.nativeFormats |= RTAUDIO_SINT8;
  2992. if ( inCaps.dwFormats & WAVE_FORMAT_2S08 ) info.nativeFormats |= RTAUDIO_SINT8;
  2993. if ( inCaps.dwFormats & WAVE_FORMAT_4S08 ) info.nativeFormats |= RTAUDIO_SINT8;
  2994. if ( inCaps.dwFormats & WAVE_FORMAT_96S08 ) info.nativeFormats |= RTAUDIO_SINT8;
  2995. if ( info.nativeFormats & RTAUDIO_SINT16 ) {
  2996. if ( inCaps.dwFormats & WAVE_FORMAT_1S16 ) info.sampleRates.push_back( 11025 );
  2997. if ( inCaps.dwFormats & WAVE_FORMAT_2S16 ) info.sampleRates.push_back( 22050 );
  2998. if ( inCaps.dwFormats & WAVE_FORMAT_4S16 ) info.sampleRates.push_back( 44100 );
  2999. if ( inCaps.dwFormats & WAVE_FORMAT_96S16 ) info.sampleRates.push_back( 96000 );
  3000. }
  3001. else if ( info.nativeFormats & RTAUDIO_SINT8 ) {
  3002. if ( inCaps.dwFormats & WAVE_FORMAT_1S08 ) info.sampleRates.push_back( 11025 );
  3003. if ( inCaps.dwFormats & WAVE_FORMAT_2S08 ) info.sampleRates.push_back( 22050 );
  3004. if ( inCaps.dwFormats & WAVE_FORMAT_4S08 ) info.sampleRates.push_back( 44100 );
  3005. if ( inCaps.dwFormats & WAVE_FORMAT_96S08 ) info.sampleRates.push_back( 44100 );
  3006. }
  3007. }
  3008. else if ( inCaps.dwChannels == 1 ) {
  3009. if ( inCaps.dwFormats & WAVE_FORMAT_1M16 ) info.nativeFormats |= RTAUDIO_SINT16;
  3010. if ( inCaps.dwFormats & WAVE_FORMAT_2M16 ) info.nativeFormats |= RTAUDIO_SINT16;
  3011. if ( inCaps.dwFormats & WAVE_FORMAT_4M16 ) info.nativeFormats |= RTAUDIO_SINT16;
  3012. if ( inCaps.dwFormats & WAVE_FORMAT_96M16 ) info.nativeFormats |= RTAUDIO_SINT16;
  3013. if ( inCaps.dwFormats & WAVE_FORMAT_1M08 ) info.nativeFormats |= RTAUDIO_SINT8;
  3014. if ( inCaps.dwFormats & WAVE_FORMAT_2M08 ) info.nativeFormats |= RTAUDIO_SINT8;
  3015. if ( inCaps.dwFormats & WAVE_FORMAT_4M08 ) info.nativeFormats |= RTAUDIO_SINT8;
  3016. if ( inCaps.dwFormats & WAVE_FORMAT_96M08 ) info.nativeFormats |= RTAUDIO_SINT8;
  3017. if ( info.nativeFormats & RTAUDIO_SINT16 ) {
  3018. if ( inCaps.dwFormats & WAVE_FORMAT_1M16 ) info.sampleRates.push_back( 11025 );
  3019. if ( inCaps.dwFormats & WAVE_FORMAT_2M16 ) info.sampleRates.push_back( 22050 );
  3020. if ( inCaps.dwFormats & WAVE_FORMAT_4M16 ) info.sampleRates.push_back( 44100 );
  3021. if ( inCaps.dwFormats & WAVE_FORMAT_96M16 ) info.sampleRates.push_back( 96000 );
  3022. }
  3023. else if ( info.nativeFormats & RTAUDIO_SINT8 ) {
  3024. if ( inCaps.dwFormats & WAVE_FORMAT_1M08 ) info.sampleRates.push_back( 11025 );
  3025. if ( inCaps.dwFormats & WAVE_FORMAT_2M08 ) info.sampleRates.push_back( 22050 );
  3026. if ( inCaps.dwFormats & WAVE_FORMAT_4M08 ) info.sampleRates.push_back( 44100 );
  3027. if ( inCaps.dwFormats & WAVE_FORMAT_96M08 ) info.sampleRates.push_back( 96000 );
  3028. }
  3029. }
  3030. else info.inputChannels = 0; // technically, this would be an error
  3031. input->Release();
  3032. if ( info.inputChannels == 0 ) return info;
  3033. if ( getDefaultInputDevice() == device )
  3034. info.isDefaultInput = true;
  3035. // Copy name and return.
  3036. info.name = dsinfo.name;
  3037. info.probed = true;
  3038. return info;
  3039. }
  3040. bool RtApiDs :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
  3041. unsigned int firstChannel, unsigned int sampleRate,
  3042. RtAudioFormat format, unsigned int *bufferSize,
  3043. RtAudio::StreamOptions *options )
  3044. {
  3045. if ( channels + firstChannel > 2 ) {
  3046. errorText_ = "RtApiDs::probeDeviceOpen: DirectSound does not support more than 2 channels per device.";
  3047. return FAILURE;
  3048. }
  3049. // Enumerate through devices to find the id (if it exists). Note
  3050. // that we have to do the output enumeration first, even if this is
  3051. // an input device, in order for the device counter to be correct.
  3052. EnumInfo dsinfo;
  3053. dsinfo.findIndex = true;
  3054. dsinfo.index = device;
  3055. HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &dsinfo );
  3056. if ( FAILED( result ) ) {
  3057. errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") enumerating output devices!";
  3058. errorText_ = errorStream_.str();
  3059. return FAILURE;
  3060. }
  3061. if ( mode == OUTPUT ) {
  3062. if ( dsinfo.name.empty() ) {
  3063. errorStream_ << "RtApiDs::probeDeviceOpen: device (" << device << ") does not support output!";
  3064. errorText_ = errorStream_.str();
  3065. return FAILURE;
  3066. }
  3067. }
  3068. else { // mode == INPUT
  3069. dsinfo.isInput = true;
  3070. HRESULT result = DirectSoundCaptureEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &dsinfo );
  3071. if ( FAILED( result ) ) {
  3072. errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") enumerating input devices!";
  3073. errorText_ = errorStream_.str();
  3074. return FAILURE;
  3075. }
  3076. if ( dsinfo.name.empty() ) {
  3077. errorStream_ << "RtApiDs::probeDeviceOpen: device (" << device << ") does not support input!";
  3078. errorText_ = errorStream_.str();
  3079. return FAILURE;
  3080. }
  3081. }
  3082. // According to a note in PortAudio, using GetDesktopWindow()
  3083. // instead of GetForegroundWindow() is supposed to avoid problems
  3084. // that occur when the application's window is not the foreground
  3085. // window. Also, if the application window closes before the
  3086. // DirectSound buffer, DirectSound can crash. However, for console
  3087. // applications, no sound was produced when using GetDesktopWindow().
  3088. HWND hWnd = GetForegroundWindow();
  3089. // Check the numberOfBuffers parameter and limit the lowest value to
  3090. // two. This is a judgement call and a value of two is probably too
  3091. // low for capture, but it should work for playback.
  3092. int nBuffers = 0;
  3093. if ( options ) nBuffers = options->numberOfBuffers;
  3094. if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) nBuffers = 2;
  3095. if ( nBuffers < 2 ) nBuffers = 3;
  3096. // Create the wave format structure. The data format setting will
  3097. // be determined later.
  3098. WAVEFORMATEX waveFormat;
  3099. ZeroMemory( &waveFormat, sizeof(WAVEFORMATEX) );
  3100. waveFormat.wFormatTag = WAVE_FORMAT_PCM;
  3101. waveFormat.nChannels = channels + firstChannel;
  3102. waveFormat.nSamplesPerSec = (unsigned long) sampleRate;
  3103. // Determine the device buffer size. By default, 32k, but we will
  3104. // grow it to make allowances for very large software buffer sizes.
  3105. DWORD dsBufferSize = 0;
  3106. DWORD dsPointerLeadTime = 0;
  3107. long bufferBytes = MINIMUM_DEVICE_BUFFER_SIZE; // sound cards will always *knock wood* support this
  3108. void *ohandle = 0, *bhandle = 0;
  3109. if ( mode == OUTPUT ) {
  3110. LPDIRECTSOUND output;
  3111. result = DirectSoundCreate( dsinfo.id, &output, NULL );
  3112. if ( FAILED( result ) ) {
  3113. errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") opening output device (" << dsinfo.name << ")!";
  3114. errorText_ = errorStream_.str();
  3115. return FAILURE;
  3116. }
  3117. DSCAPS outCaps;
  3118. outCaps.dwSize = sizeof( outCaps );
  3119. result = output->GetCaps( &outCaps );
  3120. if ( FAILED( result ) ) {
  3121. output->Release();
  3122. errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting capabilities (" << dsinfo.name << ")!";
  3123. errorText_ = errorStream_.str();
  3124. return FAILURE;
  3125. }
  3126. // Check channel information.
  3127. if ( channels + firstChannel == 2 && !( outCaps.dwFlags & DSCAPS_PRIMARYSTEREO ) ) {
  3128. errorStream_ << "RtApiDs::getDeviceInfo: the output device (" << dsinfo.name << ") does not support stereo playback.";
  3129. errorText_ = errorStream_.str();
  3130. return FAILURE;
  3131. }
  3132. // Check format information. Use 16-bit format unless not
  3133. // supported or user requests 8-bit.
  3134. if ( outCaps.dwFlags & DSCAPS_PRIMARY16BIT &&
  3135. !( format == RTAUDIO_SINT8 && outCaps.dwFlags & DSCAPS_PRIMARY8BIT ) ) {
  3136. waveFormat.wBitsPerSample = 16;
  3137. stream_.deviceFormat[mode] = RTAUDIO_SINT16;
  3138. }
  3139. else {
  3140. waveFormat.wBitsPerSample = 8;
  3141. stream_.deviceFormat[mode] = RTAUDIO_SINT8;
  3142. }
  3143. stream_.userFormat = format;
  3144. // Update wave format structure and buffer information.
  3145. waveFormat.nBlockAlign = waveFormat.nChannels * waveFormat.wBitsPerSample / 8;
  3146. waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
  3147. dsPointerLeadTime = nBuffers * (*bufferSize) * (waveFormat.wBitsPerSample / 8) * channels;
  3148. // If the user wants an even bigger buffer, increase the device buffer size accordingly.
  3149. while ( dsPointerLeadTime * 2U > (DWORD) bufferBytes )
  3150. bufferBytes *= 2;
  3151. // Set cooperative level to DSSCL_EXCLUSIVE
  3152. result = output->SetCooperativeLevel( hWnd, DSSCL_EXCLUSIVE );
  3153. if ( FAILED( result ) ) {
  3154. output->Release();
  3155. errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") setting cooperative level (" << dsinfo.name << ")!";
  3156. errorText_ = errorStream_.str();
  3157. return FAILURE;
  3158. }
  3159. // Even though we will write to the secondary buffer, we need to
  3160. // access the primary buffer to set the correct output format
  3161. // (since the default is 8-bit, 22 kHz!). Setup the DS primary
  3162. // buffer description.
  3163. DSBUFFERDESC bufferDescription;
  3164. ZeroMemory( &bufferDescription, sizeof( DSBUFFERDESC ) );
  3165. bufferDescription.dwSize = sizeof( DSBUFFERDESC );
  3166. bufferDescription.dwFlags = DSBCAPS_PRIMARYBUFFER;
  3167. // Obtain the primary buffer
  3168. LPDIRECTSOUNDBUFFER buffer;
  3169. result = output->CreateSoundBuffer( &bufferDescription, &buffer, NULL );
  3170. if ( FAILED( result ) ) {
  3171. output->Release();
  3172. errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") accessing primary buffer (" << dsinfo.name << ")!";
  3173. errorText_ = errorStream_.str();
  3174. return FAILURE;
  3175. }
  3176. // Set the primary DS buffer sound format.
  3177. result = buffer->SetFormat( &waveFormat );
  3178. if ( FAILED( result ) ) {
  3179. output->Release();
  3180. errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") setting primary buffer format (" << dsinfo.name << ")!";
  3181. errorText_ = errorStream_.str();
  3182. return FAILURE;
  3183. }
  3184. // Setup the secondary DS buffer description.
  3185. dsBufferSize = (DWORD) bufferBytes;
  3186. ZeroMemory( &bufferDescription, sizeof( DSBUFFERDESC ) );
  3187. bufferDescription.dwSize = sizeof( DSBUFFERDESC );
  3188. bufferDescription.dwFlags = ( DSBCAPS_STICKYFOCUS |
  3189. DSBCAPS_GETCURRENTPOSITION2 |
  3190. DSBCAPS_LOCHARDWARE ); // Force hardware mixing
  3191. bufferDescription.dwBufferBytes = bufferBytes;
  3192. bufferDescription.lpwfxFormat = &waveFormat;
  3193. // Try to create the secondary DS buffer. If that doesn't work,
  3194. // try to use software mixing. Otherwise, there's a problem.
  3195. result = output->CreateSoundBuffer( &bufferDescription, &buffer, NULL );
  3196. if ( FAILED( result ) ) {
  3197. bufferDescription.dwFlags = ( DSBCAPS_STICKYFOCUS |
  3198. DSBCAPS_GETCURRENTPOSITION2 |
  3199. DSBCAPS_LOCSOFTWARE ); // Force software mixing
  3200. result = output->CreateSoundBuffer( &bufferDescription, &buffer, NULL );
  3201. if ( FAILED( result ) ) {
  3202. output->Release();
  3203. errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") creating secondary buffer (" << dsinfo.name << ")!";
  3204. errorText_ = errorStream_.str();
  3205. return FAILURE;
  3206. }
  3207. }
  3208. // Get the buffer size ... might be different from what we specified.
  3209. DSBCAPS dsbcaps;
  3210. dsbcaps.dwSize = sizeof( DSBCAPS );
  3211. result = buffer->GetCaps( &dsbcaps );
  3212. if ( FAILED( result ) ) {
  3213. output->Release();
  3214. buffer->Release();
  3215. errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting buffer settings (" << dsinfo.name << ")!";
  3216. errorText_ = errorStream_.str();
  3217. return FAILURE;
  3218. }
  3219. bufferBytes = dsbcaps.dwBufferBytes;
  3220. // Lock the DS buffer
  3221. LPVOID audioPtr;
  3222. DWORD dataLen;
  3223. result = buffer->Lock( 0, bufferBytes, &audioPtr, &dataLen, NULL, NULL, 0 );
  3224. if ( FAILED( result ) ) {
  3225. output->Release();
  3226. buffer->Release();
  3227. errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") locking buffer (" << dsinfo.name << ")!";
  3228. errorText_ = errorStream_.str();
  3229. return FAILURE;
  3230. }
  3231. // Zero the DS buffer
  3232. ZeroMemory( audioPtr, dataLen );
  3233. // Unlock the DS buffer
  3234. result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
  3235. if ( FAILED( result ) ) {
  3236. output->Release();
  3237. buffer->Release();
  3238. errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") unlocking buffer (" << dsinfo.name << ")!";
  3239. errorText_ = errorStream_.str();
  3240. return FAILURE;
  3241. }
  3242. dsBufferSize = bufferBytes;
  3243. ohandle = (void *) output;
  3244. bhandle = (void *) buffer;
  3245. }
  3246. if ( mode == INPUT ) {
  3247. LPDIRECTSOUNDCAPTURE input;
  3248. result = DirectSoundCaptureCreate( dsinfo.id, &input, NULL );
  3249. if ( FAILED( result ) ) {
  3250. errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") opening input device (" << dsinfo.name << ")!";
  3251. errorText_ = errorStream_.str();
  3252. return FAILURE;
  3253. }
  3254. DSCCAPS inCaps;
  3255. inCaps.dwSize = sizeof( inCaps );
  3256. result = input->GetCaps( &inCaps );
  3257. if ( FAILED( result ) ) {
  3258. input->Release();
  3259. errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting input capabilities (" << dsinfo.name << ")!";
  3260. errorText_ = errorStream_.str();
  3261. return FAILURE;
  3262. }
  3263. // Check channel information.
  3264. if ( inCaps.dwChannels < channels + firstChannel ) {
  3265. errorText_ = "RtApiDs::getDeviceInfo: the input device does not support requested input channels.";
  3266. return FAILURE;
  3267. }
  3268. // Check format information. Use 16-bit format unless user
  3269. // requests 8-bit.
  3270. DWORD deviceFormats;
  3271. if ( channels + firstChannel == 2 ) {
  3272. deviceFormats = WAVE_FORMAT_1S08 | WAVE_FORMAT_2S08 | WAVE_FORMAT_4S08 | WAVE_FORMAT_96S08;
  3273. if ( format == RTAUDIO_SINT8 && inCaps.dwFormats & deviceFormats ) {
  3274. waveFormat.wBitsPerSample = 8;
  3275. stream_.deviceFormat[mode] = RTAUDIO_SINT8;
  3276. }
  3277. else { // assume 16-bit is supported
  3278. waveFormat.wBitsPerSample = 16;
  3279. stream_.deviceFormat[mode] = RTAUDIO_SINT16;
  3280. }
  3281. }
  3282. else { // channel == 1
  3283. deviceFormats = WAVE_FORMAT_1M08 | WAVE_FORMAT_2M08 | WAVE_FORMAT_4M08 | WAVE_FORMAT_96M08;
  3284. if ( format == RTAUDIO_SINT8 && inCaps.dwFormats & deviceFormats ) {
  3285. waveFormat.wBitsPerSample = 8;
  3286. stream_.deviceFormat[mode] = RTAUDIO_SINT8;
  3287. }
  3288. else { // assume 16-bit is supported
  3289. waveFormat.wBitsPerSample = 16;
  3290. stream_.deviceFormat[mode] = RTAUDIO_SINT16;
  3291. }
  3292. }
  3293. stream_.userFormat = format;
  3294. // Update wave format structure and buffer information.
  3295. waveFormat.nBlockAlign = waveFormat.nChannels * waveFormat.wBitsPerSample / 8;
  3296. waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
  3297. // Setup the secondary DS buffer description.
  3298. dsBufferSize = bufferBytes;
  3299. DSCBUFFERDESC bufferDescription;
  3300. ZeroMemory( &bufferDescription, sizeof( DSCBUFFERDESC ) );
  3301. bufferDescription.dwSize = sizeof( DSCBUFFERDESC );
  3302. bufferDescription.dwFlags = 0;
  3303. bufferDescription.dwReserved = 0;
  3304. bufferDescription.dwBufferBytes = bufferBytes;
  3305. bufferDescription.lpwfxFormat = &waveFormat;
  3306. // Create the capture buffer.
  3307. LPDIRECTSOUNDCAPTUREBUFFER buffer;
  3308. result = input->CreateCaptureBuffer( &bufferDescription, &buffer, NULL );
  3309. if ( FAILED( result ) ) {
  3310. input->Release();
  3311. errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") creating input buffer (" << dsinfo.name << ")!";
  3312. errorText_ = errorStream_.str();
  3313. return FAILURE;
  3314. }
  3315. // Lock the capture buffer
  3316. LPVOID audioPtr;
  3317. DWORD dataLen;
  3318. result = buffer->Lock( 0, bufferBytes, &audioPtr, &dataLen, NULL, NULL, 0 );
  3319. if ( FAILED( result ) ) {
  3320. input->Release();
  3321. buffer->Release();
  3322. errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") locking input buffer (" << dsinfo.name << ")!";
  3323. errorText_ = errorStream_.str();
  3324. return FAILURE;
  3325. }
  3326. // Zero the buffer
  3327. ZeroMemory( audioPtr, dataLen );
  3328. // Unlock the buffer
  3329. result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
  3330. if ( FAILED( result ) ) {
  3331. input->Release();
  3332. buffer->Release();
  3333. errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") unlocking input buffer (" << dsinfo.name << ")!";
  3334. errorText_ = errorStream_.str();
  3335. return FAILURE;
  3336. }
  3337. dsBufferSize = bufferBytes;
  3338. ohandle = (void *) input;
  3339. bhandle = (void *) buffer;
  3340. }
  3341. // Set various stream parameters
  3342. DsHandle *handle = 0;
  3343. stream_.nDeviceChannels[mode] = channels + firstChannel;
  3344. stream_.nUserChannels[mode] = channels;
  3345. stream_.bufferSize = *bufferSize;
  3346. stream_.channelOffset[mode] = firstChannel;
  3347. stream_.deviceInterleaved[mode] = true;
  3348. if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
  3349. else stream_.userInterleaved = true;
  3350. // Set flag for buffer conversion
  3351. stream_.doConvertBuffer[mode] = false;
  3352. if (stream_.nUserChannels[mode] != stream_.nDeviceChannels[mode])
  3353. stream_.doConvertBuffer[mode] = true;
  3354. if (stream_.userFormat != stream_.deviceFormat[mode])
  3355. stream_.doConvertBuffer[mode] = true;
  3356. if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
  3357. stream_.nUserChannels[mode] > 1 )
  3358. stream_.doConvertBuffer[mode] = true;
  3359. // Allocate necessary internal buffers
  3360. bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
  3361. stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
  3362. if ( stream_.userBuffer[mode] == NULL ) {
  3363. errorText_ = "RtApiDs::probeDeviceOpen: error allocating user buffer memory.";
  3364. goto error;
  3365. }
  3366. if ( stream_.doConvertBuffer[mode] ) {
  3367. bool makeBuffer = true;
  3368. bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
  3369. if ( mode == INPUT ) {
  3370. if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
  3371. unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
  3372. if ( bufferBytes <= (long) bytesOut ) makeBuffer = false;
  3373. }
  3374. }
  3375. if ( makeBuffer ) {
  3376. bufferBytes *= *bufferSize;
  3377. if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
  3378. stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
  3379. if ( stream_.deviceBuffer == NULL ) {
  3380. errorText_ = "RtApiDs::probeDeviceOpen: error allocating device buffer memory.";
  3381. goto error;
  3382. }
  3383. }
  3384. }
  3385. // Allocate our DsHandle structures for the stream.
  3386. if ( stream_.apiHandle == 0 ) {
  3387. try {
  3388. handle = new DsHandle;
  3389. }
  3390. catch ( std::bad_alloc& ) {
  3391. errorText_ = "RtApiDs::probeDeviceOpen: error allocating AsioHandle memory.";
  3392. goto error;
  3393. }
  3394. // Create a manual-reset event.
  3395. handle->condition = CreateEvent( NULL, // no security
  3396. TRUE, // manual-reset
  3397. FALSE, // non-signaled initially
  3398. NULL ); // unnamed
  3399. stream_.apiHandle = (void *) handle;
  3400. }
  3401. else
  3402. handle = (DsHandle *) stream_.apiHandle;
  3403. handle->id[mode] = ohandle;
  3404. handle->buffer[mode] = bhandle;
  3405. handle->dsBufferSize[mode] = dsBufferSize;
  3406. handle->dsPointerLeadTime[mode] = dsPointerLeadTime;
  3407. stream_.device[mode] = device;
  3408. stream_.state = STREAM_STOPPED;
  3409. if ( stream_.mode == OUTPUT && mode == INPUT )
  3410. // We had already set up an output stream.
  3411. stream_.mode = DUPLEX;
  3412. else
  3413. stream_.mode = mode;
  3414. stream_.nBuffers = nBuffers;
  3415. stream_.sampleRate = sampleRate;
  3416. // Setup the buffer conversion information structure.
  3417. if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, firstChannel );
  3418. // Setup the callback thread.
  3419. unsigned threadId;
  3420. stream_.callbackInfo.object = (void *) this;
  3421. stream_.callbackInfo.isRunning = true;
  3422. stream_.callbackInfo.thread = _beginthreadex( NULL, 0, &callbackHandler,
  3423. &stream_.callbackInfo, 0, &threadId );
  3424. if ( stream_.callbackInfo.thread == 0 ) {
  3425. errorText_ = "RtApiDs::probeDeviceOpen: error creating callback thread!";
  3426. goto error;
  3427. }
  3428. return SUCCESS;
  3429. error:
  3430. if ( handle ) {
  3431. if ( handle->buffer[0] ) { // the object pointer can be NULL and valid
  3432. LPDIRECTSOUND object = (LPDIRECTSOUND) handle->id[0];
  3433. LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
  3434. if ( buffer ) buffer->Release();
  3435. object->Release();
  3436. }
  3437. if ( handle->buffer[1] ) {
  3438. LPDIRECTSOUNDCAPTURE object = (LPDIRECTSOUNDCAPTURE) handle->id[1];
  3439. LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
  3440. if ( buffer ) buffer->Release();
  3441. object->Release();
  3442. }
  3443. CloseHandle( handle->condition );
  3444. delete handle;
  3445. stream_.apiHandle = 0;
  3446. }
  3447. for ( int i=0; i<2; i++ ) {
  3448. if ( stream_.userBuffer[i] ) {
  3449. free( stream_.userBuffer[i] );
  3450. stream_.userBuffer[i] = 0;
  3451. }
  3452. }
  3453. if ( stream_.deviceBuffer ) {
  3454. free( stream_.deviceBuffer );
  3455. stream_.deviceBuffer = 0;
  3456. }
  3457. return FAILURE;
  3458. }
  3459. void RtApiDs :: closeStream()
  3460. {
  3461. if ( stream_.state == STREAM_CLOSED ) {
  3462. errorText_ = "RtApiDs::closeStream(): no open stream to close!";
  3463. error( RtError::WARNING );
  3464. return;
  3465. }
  3466. // Stop the callback thread.
  3467. stream_.callbackInfo.isRunning = false;
  3468. WaitForSingleObject( (HANDLE) stream_.callbackInfo.thread, INFINITE );
  3469. CloseHandle( (HANDLE) stream_.callbackInfo.thread );
  3470. DsHandle *handle = (DsHandle *) stream_.apiHandle;
  3471. if ( handle ) {
  3472. if ( handle->buffer[0] ) { // the object pointer can be NULL and valid
  3473. LPDIRECTSOUND object = (LPDIRECTSOUND) handle->id[0];
  3474. LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
  3475. if ( buffer ) {
  3476. buffer->Stop();
  3477. buffer->Release();
  3478. }
  3479. object->Release();
  3480. }
  3481. if ( handle->buffer[1] ) {
  3482. LPDIRECTSOUNDCAPTURE object = (LPDIRECTSOUNDCAPTURE) handle->id[1];
  3483. LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
  3484. if ( buffer ) {
  3485. buffer->Stop();
  3486. buffer->Release();
  3487. }
  3488. object->Release();
  3489. }
  3490. CloseHandle( handle->condition );
  3491. delete handle;
  3492. stream_.apiHandle = 0;
  3493. }
  3494. for ( int i=0; i<2; i++ ) {
  3495. if ( stream_.userBuffer[i] ) {
  3496. free( stream_.userBuffer[i] );
  3497. stream_.userBuffer[i] = 0;
  3498. }
  3499. }
  3500. if ( stream_.deviceBuffer ) {
  3501. free( stream_.deviceBuffer );
  3502. stream_.deviceBuffer = 0;
  3503. }
  3504. stream_.mode = UNINITIALIZED;
  3505. stream_.state = STREAM_CLOSED;
  3506. }
  3507. void RtApiDs :: startStream()
  3508. {
  3509. verifyStream();
  3510. if ( stream_.state == STREAM_RUNNING ) {
  3511. errorText_ = "RtApiDs::startStream(): the stream is already running!";
  3512. error( RtError::WARNING );
  3513. return;
  3514. }
  3515. // Increase scheduler frequency on lesser windows (a side-effect of
  3516. // increasing timer accuracy). On greater windows (Win2K or later),
  3517. // this is already in effect.
  3518. MUTEX_LOCK( &stream_.mutex );
  3519. DsHandle *handle = (DsHandle *) stream_.apiHandle;
  3520. timeBeginPeriod( 1 );
  3521. /*
  3522. memset( &statistics, 0, sizeof( statistics ) );
  3523. statistics.sampleRate = stream_.sampleRate;
  3524. statistics.writeDeviceBufferLeadBytes = handle->dsPointerLeadTime[0];
  3525. */
  3526. buffersRolling = false;
  3527. duplexPrerollBytes = 0;
  3528. if ( stream_.mode == DUPLEX ) {
  3529. // 0.5 seconds of silence in DUPLEX mode while the devices spin up and synchronize.
  3530. duplexPrerollBytes = (int) ( 0.5 * stream_.sampleRate * formatBytes( stream_.deviceFormat[1] ) * stream_.nDeviceChannels[1] );
  3531. }
  3532. HRESULT result = 0;
  3533. if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
  3534. //statistics.outputFrameSize = formatBytes( stream_.deviceFormat[0] ) * stream_.nDeviceChannels[0];
  3535. LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
  3536. result = buffer->Play( 0, 0, DSBPLAY_LOOPING );
  3537. if ( FAILED( result ) ) {
  3538. errorStream_ << "RtApiDs::startStream: error (" << getErrorString( result ) << ") starting output buffer!";
  3539. errorText_ = errorStream_.str();
  3540. goto unlock;
  3541. }
  3542. }
  3543. if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
  3544. //statistics.inputFrameSize = formatBytes( stream_.deviceFormat[1]) * stream_.nDeviceChannels[1];
  3545. LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
  3546. result = buffer->Start( DSCBSTART_LOOPING );
  3547. if ( FAILED( result ) ) {
  3548. errorStream_ << "RtApiDs::startStream: error (" << getErrorString( result ) << ") starting input buffer!";
  3549. errorText_ = errorStream_.str();
  3550. goto unlock;
  3551. }
  3552. }
  3553. handle->drainCounter = 0;
  3554. handle->internalDrain = false;
  3555. stream_.state = STREAM_RUNNING;
  3556. unlock:
  3557. MUTEX_UNLOCK( &stream_.mutex );
  3558. if ( FAILED( result ) ) error( RtError::SYSTEM_ERROR );
  3559. }
  3560. void RtApiDs :: stopStream()
  3561. {
  3562. verifyStream();
  3563. if ( stream_.state == STREAM_STOPPED ) {
  3564. errorText_ = "RtApiDs::stopStream(): the stream is already stopped!";
  3565. error( RtError::WARNING );
  3566. return;
  3567. }
  3568. MUTEX_LOCK( &stream_.mutex );
  3569. HRESULT result = 0;
  3570. LPVOID audioPtr;
  3571. DWORD dataLen;
  3572. DsHandle *handle = (DsHandle *) stream_.apiHandle;
  3573. if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
  3574. if ( handle->drainCounter == 0 ) {
  3575. handle->drainCounter = 1;
  3576. MUTEX_UNLOCK( &stream_.mutex );
  3577. WaitForMultipleObjects( 1, &handle->condition, FALSE, INFINITE ); // block until signaled
  3578. ResetEvent( handle->condition );
  3579. MUTEX_LOCK( &stream_.mutex );
  3580. }
  3581. // Stop the buffer and clear memory
  3582. LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
  3583. result = buffer->Stop();
  3584. if ( FAILED( result ) ) {
  3585. errorStream_ << "RtApiDs::abortStream: error (" << getErrorString( result ) << ") stopping output buffer!";
  3586. errorText_ = errorStream_.str();
  3587. goto unlock;
  3588. }
  3589. // Lock the buffer and clear it so that if we start to play again,
  3590. // we won't have old data playing.
  3591. result = buffer->Lock( 0, handle->dsBufferSize[0], &audioPtr, &dataLen, NULL, NULL, 0 );
  3592. if ( FAILED( result ) ) {
  3593. errorStream_ << "RtApiDs::abortStream: error (" << getErrorString( result ) << ") locking output buffer!";
  3594. errorText_ = errorStream_.str();
  3595. goto unlock;
  3596. }
  3597. // Zero the DS buffer
  3598. ZeroMemory( audioPtr, dataLen );
  3599. // Unlock the DS buffer
  3600. result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
  3601. if ( FAILED( result ) ) {
  3602. errorStream_ << "RtApiDs::abortStream: error (" << getErrorString( result ) << ") unlocking output buffer!";
  3603. errorText_ = errorStream_.str();
  3604. goto unlock;
  3605. }
  3606. // If we start playing again, we must begin at beginning of buffer.
  3607. handle->bufferPointer[0] = 0;
  3608. }
  3609. if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
  3610. LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
  3611. audioPtr = NULL;
  3612. dataLen = 0;
  3613. result = buffer->Stop();
  3614. if ( FAILED( result ) ) {
  3615. errorStream_ << "RtApiDs::abortStream: error (" << getErrorString( result ) << ") stopping input buffer!";
  3616. errorText_ = errorStream_.str();
  3617. goto unlock;
  3618. }
  3619. // Lock the buffer and clear it so that if we start to play again,
  3620. // we won't have old data playing.
  3621. result = buffer->Lock( 0, handle->dsBufferSize[1], &audioPtr, &dataLen, NULL, NULL, 0 );
  3622. if ( FAILED( result ) ) {
  3623. errorStream_ << "RtApiDs::abortStream: error (" << getErrorString( result ) << ") locking input buffer!";
  3624. errorText_ = errorStream_.str();
  3625. goto unlock;
  3626. }
  3627. // Zero the DS buffer
  3628. ZeroMemory( audioPtr, dataLen );
  3629. // Unlock the DS buffer
  3630. result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
  3631. if ( FAILED( result ) ) {
  3632. errorStream_ << "RtApiDs::abortStream: error (" << getErrorString( result ) << ") unlocking input buffer!";
  3633. errorText_ = errorStream_.str();
  3634. goto unlock;
  3635. }
  3636. // If we start recording again, we must begin at beginning of buffer.
  3637. handle->bufferPointer[1] = 0;
  3638. }
  3639. unlock:
  3640. timeEndPeriod( 1 ); // revert to normal scheduler frequency on lesser windows.
  3641. stream_.state = STREAM_STOPPED;
  3642. MUTEX_UNLOCK( &stream_.mutex );
  3643. if ( FAILED( result ) ) error( RtError::SYSTEM_ERROR );
  3644. }
  3645. void RtApiDs :: abortStream()
  3646. {
  3647. verifyStream();
  3648. if ( stream_.state == STREAM_STOPPED ) {
  3649. errorText_ = "RtApiDs::abortStream(): the stream is already stopped!";
  3650. error( RtError::WARNING );
  3651. return;
  3652. }
  3653. DsHandle *handle = (DsHandle *) stream_.apiHandle;
  3654. handle->drainCounter = 1;
  3655. stopStream();
  3656. }
  3657. void RtApiDs :: callbackEvent()
  3658. {
  3659. if ( stream_.state == STREAM_STOPPED ) {
  3660. Sleep(50); // sleep 50 milliseconds
  3661. return;
  3662. }
  3663. if ( stream_.state == STREAM_CLOSED ) {
  3664. errorText_ = "RtApiDs::callbackEvent(): the stream is closed ... this shouldn't happen!";
  3665. error( RtError::WARNING );
  3666. return;
  3667. }
  3668. CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
  3669. DsHandle *handle = (DsHandle *) stream_.apiHandle;
  3670. // Check if we were draining the stream and signal is finished.
  3671. if ( handle->drainCounter > stream_.nBuffers + 2 ) {
  3672. if ( handle->internalDrain == false )
  3673. SetEvent( handle->condition );
  3674. else
  3675. stopStream();
  3676. return;
  3677. }
  3678. MUTEX_LOCK( &stream_.mutex );
  3679. // Invoke user callback to get fresh output data UNLESS we are
  3680. // draining stream.
  3681. if ( handle->drainCounter == 0 ) {
  3682. RtAudioCallback callback = (RtAudioCallback) info->callback;
  3683. double streamTime = getStreamTime();
  3684. RtAudioStreamStatus status = 0;
  3685. if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
  3686. status |= RTAUDIO_OUTPUT_UNDERFLOW;
  3687. handle->xrun[0] = false;
  3688. }
  3689. if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
  3690. status |= RTAUDIO_INPUT_OVERFLOW;
  3691. handle->xrun[1] = false;
  3692. }
  3693. handle->drainCounter = callback( stream_.userBuffer[0], stream_.userBuffer[1],
  3694. stream_.bufferSize, streamTime, status, info->userData );
  3695. if ( handle->drainCounter == 2 ) {
  3696. MUTEX_UNLOCK( &stream_.mutex );
  3697. abortStream();
  3698. return;
  3699. }
  3700. else if ( handle->drainCounter == 1 )
  3701. handle->internalDrain = true;
  3702. }
  3703. HRESULT result;
  3704. DWORD currentWritePos, safeWritePos;
  3705. DWORD currentReadPos, safeReadPos;
  3706. DWORD leadPos;
  3707. UINT nextWritePos;
  3708. #ifdef GENERATE_DEBUG_LOG
  3709. DWORD writeTime, readTime;
  3710. #endif
  3711. LPVOID buffer1 = NULL;
  3712. LPVOID buffer2 = NULL;
  3713. DWORD bufferSize1 = 0;
  3714. DWORD bufferSize2 = 0;
  3715. char *buffer;
  3716. long bufferBytes;
  3717. if ( stream_.mode == DUPLEX && !buffersRolling ) {
  3718. assert( handle->dsBufferSize[0] == handle->dsBufferSize[1] );
  3719. // It takes a while for the devices to get rolling. As a result,
  3720. // there's no guarantee that the capture and write device pointers
  3721. // will move in lockstep. Wait here for both devices to start
  3722. // rolling, and then set our buffer pointers accordingly.
  3723. // e.g. Crystal Drivers: the capture buffer starts up 5700 to 9600
  3724. // bytes later than the write buffer.
  3725. // Stub: a serious risk of having a pre-emptive scheduling round
  3726. // take place between the two GetCurrentPosition calls... but I'm
  3727. // really not sure how to solve the problem. Temporarily boost to
  3728. // Realtime priority, maybe; but I'm not sure what priority the
  3729. // DirectSound service threads run at. We *should* be roughly
  3730. // within a ms or so of correct.
  3731. LPDIRECTSOUNDBUFFER dsWriteBuffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
  3732. LPDIRECTSOUNDCAPTUREBUFFER dsCaptureBuffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
  3733. DWORD initialWritePos, initialSafeWritePos;
  3734. DWORD initialReadPos, initialSafeReadPos;
  3735. result = dsWriteBuffer->GetCurrentPosition( &initialWritePos, &initialSafeWritePos );
  3736. if ( FAILED( result ) ) {
  3737. errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current write position!";
  3738. errorText_ = errorStream_.str();
  3739. error( RtError::SYSTEM_ERROR );
  3740. }
  3741. result = dsCaptureBuffer->GetCurrentPosition( &initialReadPos, &initialSafeReadPos );
  3742. if ( FAILED( result ) ) {
  3743. errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
  3744. errorText_ = errorStream_.str();
  3745. error( RtError::SYSTEM_ERROR );
  3746. }
  3747. while ( true ) {
  3748. result = dsWriteBuffer->GetCurrentPosition( &currentWritePos, &safeWritePos );
  3749. if ( FAILED( result ) ) {
  3750. errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current write position!";
  3751. errorText_ = errorStream_.str();
  3752. error( RtError::SYSTEM_ERROR );
  3753. }
  3754. result = dsCaptureBuffer->GetCurrentPosition( &currentReadPos, &safeReadPos );
  3755. if ( FAILED( result ) ) {
  3756. errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
  3757. errorText_ = errorStream_.str();
  3758. error( RtError::SYSTEM_ERROR );
  3759. }
  3760. if ( safeWritePos != initialSafeWritePos && safeReadPos != initialSafeReadPos ) break;
  3761. Sleep( 1 );
  3762. }
  3763. assert( handle->dsBufferSize[0] == handle->dsBufferSize[1] );
  3764. buffersRolling = true;
  3765. handle->bufferPointer[0] = ( safeWritePos + handle->dsPointerLeadTime[0] );
  3766. handle->bufferPointer[1] = safeReadPos;
  3767. }
  3768. if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
  3769. LPDIRECTSOUNDBUFFER dsBuffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
  3770. if ( handle->drainCounter > 1 ) { // write zeros to the output stream
  3771. bufferBytes = stream_.bufferSize * stream_.nUserChannels[0];
  3772. bufferBytes *= formatBytes( stream_.userFormat );
  3773. memset( stream_.userBuffer[0], 0, bufferBytes );
  3774. }
  3775. // Setup parameters and do buffer conversion if necessary.
  3776. if ( stream_.doConvertBuffer[0] ) {
  3777. buffer = stream_.deviceBuffer;
  3778. convertBuffer( buffer, stream_.userBuffer[0], stream_.convertInfo[0] );
  3779. bufferBytes = stream_.bufferSize * stream_.nDeviceChannels[0];
  3780. bufferBytes *= formatBytes( stream_.deviceFormat[0] );
  3781. }
  3782. else {
  3783. buffer = stream_.userBuffer[0];
  3784. bufferBytes = stream_.bufferSize * stream_.nUserChannels[0];
  3785. bufferBytes *= formatBytes( stream_.userFormat );
  3786. }
  3787. // No byte swapping necessary in DirectSound implementation.
  3788. // Ahhh ... windoze. 16-bit data is signed but 8-bit data is
  3789. // unsigned. So, we need to convert our signed 8-bit data here to
  3790. // unsigned.
  3791. if ( stream_.deviceFormat[0] == RTAUDIO_SINT8 )
  3792. for ( int i=0; i<bufferBytes; i++ ) buffer[i] = (unsigned char) ( buffer[i] + 128 );
  3793. DWORD dsBufferSize = handle->dsBufferSize[0];
  3794. nextWritePos = handle->bufferPointer[0];
  3795. DWORD endWrite;
  3796. while ( true ) {
  3797. // Find out where the read and "safe write" pointers are.
  3798. result = dsBuffer->GetCurrentPosition( &currentWritePos, &safeWritePos );
  3799. if ( FAILED( result ) ) {
  3800. errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current write position!";
  3801. errorText_ = errorStream_.str();
  3802. error( RtError::SYSTEM_ERROR );
  3803. }
  3804. leadPos = safeWritePos + handle->dsPointerLeadTime[0];
  3805. if ( leadPos > dsBufferSize ) leadPos -= dsBufferSize;
  3806. if ( leadPos < nextWritePos ) leadPos += dsBufferSize; // unwrap offset
  3807. endWrite = nextWritePos + bufferBytes;
  3808. // Check whether the entire write region is behind the play pointer.
  3809. if ( leadPos >= endWrite ) break;
  3810. // If we are here, then we must wait until the play pointer gets
  3811. // beyond the write region. The approach here is to use the
  3812. // Sleep() function to suspend operation until safePos catches
  3813. // up. Calculate number of milliseconds to wait as:
  3814. // time = distance * (milliseconds/second) * fudgefactor /
  3815. // ((bytes/sample) * (samples/second))
  3816. // A "fudgefactor" less than 1 is used because it was found
  3817. // that sleeping too long was MUCH worse than sleeping for
  3818. // several shorter periods.
  3819. double millis = ( endWrite - leadPos ) * 900.0;
  3820. millis /= ( formatBytes( stream_.deviceFormat[0]) * stream_.nDeviceChannels[0] * stream_.sampleRate);
  3821. if ( millis < 1.0 ) millis = 1.0;
  3822. if ( millis > 50.0 ) {
  3823. static int nOverruns = 0;
  3824. ++nOverruns;
  3825. }
  3826. Sleep( (DWORD) millis );
  3827. }
  3828. //if ( statistics.writeDeviceSafeLeadBytes < dsPointerDifference( safeWritePos, currentWritePos, handle->dsBufferSize[0] ) ) {
  3829. // statistics.writeDeviceSafeLeadBytes = dsPointerDifference( safeWritePos, currentWritePos, handle->dsBufferSize[0] );
  3830. //}
  3831. if ( dsPointerBetween( nextWritePos, safeWritePos, currentWritePos, dsBufferSize )
  3832. || dsPointerBetween( endWrite, safeWritePos, currentWritePos, dsBufferSize ) ) {
  3833. // We've strayed into the forbidden zone ... resync the read pointer.
  3834. //++statistics.numberOfWriteUnderruns;
  3835. handle->xrun[0] = true;
  3836. nextWritePos = safeWritePos + handle->dsPointerLeadTime[0] - bufferBytes + dsBufferSize;
  3837. while ( nextWritePos >= dsBufferSize ) nextWritePos -= dsBufferSize;
  3838. handle->bufferPointer[0] = nextWritePos;
  3839. endWrite = nextWritePos + bufferBytes;
  3840. }
  3841. // Lock free space in the buffer
  3842. result = dsBuffer->Lock( nextWritePos, bufferBytes, &buffer1,
  3843. &bufferSize1, &buffer2, &bufferSize2, 0 );
  3844. if ( FAILED( result ) ) {
  3845. errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") locking buffer during playback!";
  3846. errorText_ = errorStream_.str();
  3847. error( RtError::SYSTEM_ERROR );
  3848. }
  3849. // Copy our buffer into the DS buffer
  3850. CopyMemory( buffer1, buffer, bufferSize1 );
  3851. if ( buffer2 != NULL ) CopyMemory( buffer2, buffer+bufferSize1, bufferSize2 );
  3852. // Update our buffer offset and unlock sound buffer
  3853. dsBuffer->Unlock( buffer1, bufferSize1, buffer2, bufferSize2 );
  3854. if ( FAILED( result ) ) {
  3855. errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") unlocking buffer during playback!";
  3856. errorText_ = errorStream_.str();
  3857. error( RtError::SYSTEM_ERROR );
  3858. }
  3859. nextWritePos = ( nextWritePos + bufferSize1 + bufferSize2 ) % dsBufferSize;
  3860. handle->bufferPointer[0] = nextWritePos;
  3861. if ( handle->drainCounter ) {
  3862. handle->drainCounter++;
  3863. goto unlock;
  3864. }
  3865. }
  3866. if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
  3867. // Setup parameters.
  3868. if ( stream_.doConvertBuffer[1] ) {
  3869. buffer = stream_.deviceBuffer;
  3870. bufferBytes = stream_.bufferSize * stream_.nDeviceChannels[1];
  3871. bufferBytes *= formatBytes( stream_.deviceFormat[1] );
  3872. }
  3873. else {
  3874. buffer = stream_.userBuffer[1];
  3875. bufferBytes = stream_.bufferSize * stream_.nUserChannels[1];
  3876. bufferBytes *= formatBytes( stream_.userFormat );
  3877. }
  3878. LPDIRECTSOUNDCAPTUREBUFFER dsBuffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
  3879. long nextReadPos = handle->bufferPointer[1];
  3880. DWORD dsBufferSize = handle->dsBufferSize[1];
  3881. // Find out where the write and "safe read" pointers are.
  3882. result = dsBuffer->GetCurrentPosition( &currentReadPos, &safeReadPos );
  3883. if ( FAILED( result ) ) {
  3884. errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
  3885. errorText_ = errorStream_.str();
  3886. error( RtError::SYSTEM_ERROR );
  3887. }
  3888. if ( safeReadPos < (DWORD)nextReadPos ) safeReadPos += dsBufferSize; // unwrap offset
  3889. DWORD endRead = nextReadPos + bufferBytes;
  3890. // Handling depends on whether we are INPUT or DUPLEX.
  3891. // If we're in INPUT mode then waiting is a good thing. If we're in DUPLEX mode,
  3892. // then a wait here will drag the write pointers into the forbidden zone.
  3893. //
  3894. // In DUPLEX mode, rather than wait, we will back off the read pointer until
  3895. // it's in a safe position. This causes dropouts, but it seems to be the only
  3896. // practical way to sync up the read and write pointers reliably, given the
  3897. // the very complex relationship between phase and increment of the read and write
  3898. // pointers.
  3899. //
  3900. // In order to minimize audible dropouts in DUPLEX mode, we will
  3901. // provide a pre-roll period of 0.5 seconds in which we return
  3902. // zeros from the read buffer while the pointers sync up.
  3903. if ( stream_.mode == DUPLEX ) {
  3904. if ( safeReadPos < endRead ) {
  3905. if ( duplexPrerollBytes <= 0 ) {
  3906. // Pre-roll time over. Be more agressive.
  3907. int adjustment = endRead-safeReadPos;
  3908. handle->xrun[1] = true;
  3909. //++statistics.numberOfReadOverruns;
  3910. // Two cases:
  3911. // - large adjustments: we've probably run out of CPU cycles, so just resync exactly,
  3912. // and perform fine adjustments later.
  3913. // - small adjustments: back off by twice as much.
  3914. if ( adjustment >= 2*bufferBytes )
  3915. nextReadPos = safeReadPos-2*bufferBytes;
  3916. else
  3917. nextReadPos = safeReadPos-bufferBytes-adjustment;
  3918. //statistics.readDeviceSafeLeadBytes = currentReadPos-nextReadPos;
  3919. //if ( statistics.readDeviceSafeLeadBytes < 0) statistics.readDeviceSafeLeadBytes += dsBufferSize;
  3920. if ( nextReadPos < 0 ) nextReadPos += dsBufferSize;
  3921. }
  3922. else {
  3923. // In pre=roll time. Just do it.
  3924. nextReadPos = safeReadPos-bufferBytes;
  3925. while ( nextReadPos < 0 ) nextReadPos += dsBufferSize;
  3926. }
  3927. endRead = nextReadPos + bufferBytes;
  3928. }
  3929. }
  3930. else { // mode == INPUT
  3931. while ( safeReadPos < endRead ) {
  3932. // See comments for playback.
  3933. double millis = (endRead - safeReadPos) * 900.0;
  3934. millis /= ( formatBytes(stream_.deviceFormat[1]) * stream_.nDeviceChannels[1] * stream_.sampleRate);
  3935. if ( millis < 1.0 ) millis = 1.0;
  3936. Sleep( (DWORD) millis );
  3937. // Wake up, find out where we are now
  3938. result = dsBuffer->GetCurrentPosition( &currentReadPos, &safeReadPos );
  3939. if ( FAILED( result ) ) {
  3940. errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
  3941. errorText_ = errorStream_.str();
  3942. error( RtError::SYSTEM_ERROR );
  3943. }
  3944. if ( safeReadPos < (DWORD)nextReadPos ) safeReadPos += dsBufferSize; // unwrap offset
  3945. }
  3946. }
  3947. //if (statistics.readDeviceSafeLeadBytes < dsPointerDifference( currentReadPos, nextReadPos, dsBufferSize ) )
  3948. // statistics.readDeviceSafeLeadBytes = dsPointerDifference( currentReadPos, nextReadPos, dsBufferSize );
  3949. // Lock free space in the buffer
  3950. result = dsBuffer->Lock( nextReadPos, bufferBytes, &buffer1,
  3951. &bufferSize1, &buffer2, &bufferSize2, 0 );
  3952. if ( FAILED( result ) ) {
  3953. errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") locking capture buffer!";
  3954. errorText_ = errorStream_.str();
  3955. error( RtError::SYSTEM_ERROR );
  3956. }
  3957. if ( duplexPrerollBytes <= 0 ) {
  3958. // Copy our buffer into the DS buffer
  3959. CopyMemory( buffer, buffer1, bufferSize1 );
  3960. if ( buffer2 != NULL ) CopyMemory( buffer+bufferSize1, buffer2, bufferSize2 );
  3961. }
  3962. else {
  3963. memset( buffer, 0, bufferSize1 );
  3964. if ( buffer2 != NULL ) memset( buffer + bufferSize1, 0, bufferSize2 );
  3965. duplexPrerollBytes -= bufferSize1 + bufferSize2;
  3966. }
  3967. // Update our buffer offset and unlock sound buffer
  3968. nextReadPos = ( nextReadPos + bufferSize1 + bufferSize2 ) % dsBufferSize;
  3969. dsBuffer->Unlock( buffer1, bufferSize1, buffer2, bufferSize2 );
  3970. if ( FAILED( result ) ) {
  3971. errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") unlocking capture buffer!";
  3972. errorText_ = errorStream_.str();
  3973. error( RtError::SYSTEM_ERROR );
  3974. }
  3975. handle->bufferPointer[1] = nextReadPos;
  3976. // No byte swapping necessary in DirectSound implementation.
  3977. // If necessary, convert 8-bit data from unsigned to signed.
  3978. if ( stream_.deviceFormat[1] == RTAUDIO_SINT8 )
  3979. for ( int j=0; j<bufferBytes; j++ ) buffer[j] = (signed char) ( buffer[j] - 128 );
  3980. // Do buffer conversion if necessary.
  3981. if ( stream_.doConvertBuffer[1] )
  3982. convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
  3983. }
  3984. #ifdef GENERATE_DEBUG_LOG
  3985. if ( currentDebugLogEntry < debugLog.size() )
  3986. {
  3987. TTickRecord &r = debugLog[currentDebugLogEntry++];
  3988. r.currentReadPointer = currentReadPos;
  3989. r.safeReadPointer = safeReadPos;
  3990. r.currentWritePointer = currentWritePos;
  3991. r.safeWritePointer = safeWritePos;
  3992. r.readTime = readTime;
  3993. r.writeTime = writeTime;
  3994. r.nextReadPointer = handles[1].bufferPointer;
  3995. r.nextWritePointer = handles[0].bufferPointer;
  3996. }
  3997. #endif
  3998. unlock:
  3999. MUTEX_UNLOCK( &stream_.mutex );
  4000. RtApi::tickStreamTime();
  4001. }
  4002. // Definitions for utility functions and callbacks
  4003. // specific to the DirectSound implementation.
  4004. extern "C" unsigned __stdcall callbackHandler( void *ptr )
  4005. {
  4006. CallbackInfo *info = (CallbackInfo *) ptr;
  4007. RtApiDs *object = (RtApiDs *) info->object;
  4008. bool* isRunning = &info->isRunning;
  4009. while ( *isRunning == true ) {
  4010. object->callbackEvent();
  4011. }
  4012. _endthreadex( 0 );
  4013. return 0;
  4014. }
  4015. #include "tchar.h"
  4016. std::string convertTChar( LPCTSTR name )
  4017. {
  4018. std::string s;
  4019. #if defined( UNICODE ) || defined( _UNICODE )
  4020. // Yes, this conversion doesn't make sense for two-byte characters
  4021. // but RtAudio is currently written to return an std::string of
  4022. // one-byte chars for the device name.
  4023. for ( unsigned int i=0; i<wcslen( name ); i++ )
  4024. s.push_back( name[i] );
  4025. #else
  4026. s.append( std::string( name ) );
  4027. #endif
  4028. return s;
  4029. }
  4030. static BOOL CALLBACK deviceQueryCallback( LPGUID lpguid,
  4031. LPCTSTR description,
  4032. LPCTSTR module,
  4033. LPVOID lpContext )
  4034. {
  4035. EnumInfo *info = (EnumInfo *) lpContext;
  4036. HRESULT hr;
  4037. if ( info->isInput == true ) {
  4038. DSCCAPS caps;
  4039. LPDIRECTSOUNDCAPTURE object;
  4040. hr = DirectSoundCaptureCreate( lpguid, &object, NULL );
  4041. if ( hr != DS_OK ) return TRUE;
  4042. caps.dwSize = sizeof(caps);
  4043. hr = object->GetCaps( &caps );
  4044. if ( hr == DS_OK ) {
  4045. if ( caps.dwChannels > 0 && caps.dwFormats > 0 )
  4046. info->counter++;
  4047. }
  4048. object->Release();
  4049. }
  4050. else {
  4051. DSCAPS caps;
  4052. LPDIRECTSOUND object;
  4053. hr = DirectSoundCreate( lpguid, &object, NULL );
  4054. if ( hr != DS_OK ) return TRUE;
  4055. caps.dwSize = sizeof(caps);
  4056. hr = object->GetCaps( &caps );
  4057. if ( hr == DS_OK ) {
  4058. if ( caps.dwFlags & DSCAPS_PRIMARYMONO || caps.dwFlags & DSCAPS_PRIMARYSTEREO )
  4059. info->counter++;
  4060. }
  4061. object->Release();
  4062. }
  4063. if ( info->getDefault && lpguid == NULL ) return FALSE;
  4064. if ( info->findIndex && info->counter > info->index ) {
  4065. info->id = lpguid;
  4066. info->name = convertTChar( description );
  4067. return FALSE;
  4068. }
  4069. return TRUE;
  4070. }
  4071. static char* getErrorString( int code )
  4072. {
  4073. switch ( code ) {
  4074. case DSERR_ALLOCATED:
  4075. return "Already allocated";
  4076. case DSERR_CONTROLUNAVAIL:
  4077. return "Control unavailable";
  4078. case DSERR_INVALIDPARAM:
  4079. return "Invalid parameter";
  4080. case DSERR_INVALIDCALL:
  4081. return "Invalid call";
  4082. case DSERR_GENERIC:
  4083. return "Generic error";
  4084. case DSERR_PRIOLEVELNEEDED:
  4085. return "Priority level needed";
  4086. case DSERR_OUTOFMEMORY:
  4087. return "Out of memory";
  4088. case DSERR_BADFORMAT:
  4089. return "The sample rate or the channel format is not supported";
  4090. case DSERR_UNSUPPORTED:
  4091. return "Not supported";
  4092. case DSERR_NODRIVER:
  4093. return "No driver";
  4094. case DSERR_ALREADYINITIALIZED:
  4095. return "Already initialized";
  4096. case DSERR_NOAGGREGATION:
  4097. return "No aggregation";
  4098. case DSERR_BUFFERLOST:
  4099. return "Buffer lost";
  4100. case DSERR_OTHERAPPHASPRIO:
  4101. return "Another application already has priority";
  4102. case DSERR_UNINITIALIZED:
  4103. return "Uninitialized";
  4104. default:
  4105. return "DirectSound unknown error";
  4106. }
  4107. }
  4108. //******************** End of __WINDOWS_DS__ *********************//
  4109. #endif
  4110. #if defined(__LINUX_ALSA__)
  4111. #include <alsa/asoundlib.h>
  4112. #include <unistd.h>
  4113. // A structure to hold various information related to the ALSA API
  4114. // implementation.
  4115. struct AlsaHandle {
  4116. snd_pcm_t *handles[2];
  4117. bool synchronized;
  4118. bool xrun[2];
  4119. AlsaHandle()
  4120. :synchronized(false) { xrun[0] = false; xrun[1] = false; }
  4121. };
  4122. extern "C" void *alsaCallbackHandler( void * ptr );
  4123. RtApiAlsa :: RtApiAlsa()
  4124. {
  4125. // Nothing to do here.
  4126. }
  4127. RtApiAlsa :: ~RtApiAlsa()
  4128. {
  4129. if ( stream_.state != STREAM_CLOSED ) closeStream();
  4130. }
  4131. unsigned int RtApiAlsa :: getDeviceCount( void )
  4132. {
  4133. unsigned nDevices = 0;
  4134. int result, subdevice, card;
  4135. char name[64];
  4136. snd_ctl_t *handle;
  4137. // Count cards and devices
  4138. card = -1;
  4139. snd_card_next( &card );
  4140. while ( card >= 0 ) {
  4141. sprintf( name, "hw:%d", card );
  4142. result = snd_ctl_open( &handle, name, 0 );
  4143. if ( result < 0 ) {
  4144. errorStream_ << "RtApiAlsa::getDeviceCount: control open, card = " << card << ", " << snd_strerror( result ) << ".";
  4145. errorText_ = errorStream_.str();
  4146. error( RtError::WARNING );
  4147. goto nextcard;
  4148. }
  4149. subdevice = -1;
  4150. while( 1 ) {
  4151. result = snd_ctl_pcm_next_device( handle, &subdevice );
  4152. if ( result < 0 ) {
  4153. errorStream_ << "RtApiAlsa::getDeviceCount: control next device, card = " << card << ", " << snd_strerror( result ) << ".";
  4154. errorText_ = errorStream_.str();
  4155. error( RtError::WARNING );
  4156. break;
  4157. }
  4158. if ( subdevice < 0 )
  4159. break;
  4160. nDevices++;
  4161. }
  4162. nextcard:
  4163. snd_ctl_close( handle );
  4164. snd_card_next( &card );
  4165. }
  4166. return nDevices;
  4167. }
  4168. RtAudio::DeviceInfo RtApiAlsa :: getDeviceInfo( unsigned int device )
  4169. {
  4170. RtAudio::DeviceInfo info;
  4171. info.probed = false;
  4172. unsigned nDevices = 0;
  4173. int result, subdevice, card;
  4174. char name[64];
  4175. snd_ctl_t *chandle;
  4176. // Count cards and devices
  4177. card = -1;
  4178. snd_card_next( &card );
  4179. while ( card >= 0 ) {
  4180. sprintf( name, "hw:%d", card );
  4181. result = snd_ctl_open( &chandle, name, SND_CTL_NONBLOCK );
  4182. if ( result < 0 ) {
  4183. errorStream_ << "RtApiAlsa::getDeviceInfo: control open, card = " << card << ", " << snd_strerror( result ) << ".";
  4184. errorText_ = errorStream_.str();
  4185. error( RtError::WARNING );
  4186. goto nextcard;
  4187. }
  4188. subdevice = -1;
  4189. while( 1 ) {
  4190. result = snd_ctl_pcm_next_device( chandle, &subdevice );
  4191. if ( result < 0 ) {
  4192. errorStream_ << "RtApiAlsa::getDeviceInfo: control next device, card = " << card << ", " << snd_strerror( result ) << ".";
  4193. errorText_ = errorStream_.str();
  4194. error( RtError::WARNING );
  4195. break;
  4196. }
  4197. if ( subdevice < 0 ) break;
  4198. if ( nDevices == device ) {
  4199. sprintf( name, "hw:%d,%d", card, subdevice );
  4200. goto foundDevice;
  4201. }
  4202. nDevices++;
  4203. }
  4204. nextcard:
  4205. snd_ctl_close( chandle );
  4206. snd_card_next( &card );
  4207. }
  4208. if ( nDevices == 0 ) {
  4209. errorText_ = "RtApiAlsa::getDeviceInfo: no devices found!";
  4210. error( RtError::INVALID_USE );
  4211. }
  4212. if ( device >= nDevices ) {
  4213. errorText_ = "RtApiAlsa::getDeviceInfo: device ID is invalid!";
  4214. error( RtError::INVALID_USE );
  4215. }
  4216. foundDevice:
  4217. int openMode = SND_PCM_ASYNC;
  4218. snd_pcm_stream_t stream;
  4219. snd_pcm_info_t *pcminfo;
  4220. snd_pcm_info_alloca( &pcminfo );
  4221. snd_pcm_t *phandle;
  4222. snd_pcm_hw_params_t *params;
  4223. snd_pcm_hw_params_alloca( &params );
  4224. // First try for playback
  4225. stream = SND_PCM_STREAM_PLAYBACK;
  4226. snd_pcm_info_set_device( pcminfo, subdevice );
  4227. snd_pcm_info_set_subdevice( pcminfo, 0 );
  4228. snd_pcm_info_set_stream( pcminfo, stream );
  4229. result = snd_ctl_pcm_info( chandle, pcminfo );
  4230. if ( result < 0 ) {
  4231. // Device probably doesn't support playback.
  4232. goto captureProbe;
  4233. }
  4234. result = snd_pcm_open( &phandle, name, stream, openMode | SND_PCM_NONBLOCK );
  4235. if ( result < 0 ) {
  4236. errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_open error for device (" << name << "), " << snd_strerror( result ) << ".";
  4237. errorText_ = errorStream_.str();
  4238. error( RtError::WARNING );
  4239. goto captureProbe;
  4240. }
  4241. // The device is open ... fill the parameter structure.
  4242. result = snd_pcm_hw_params_any( phandle, params );
  4243. if ( result < 0 ) {
  4244. snd_pcm_close( phandle );
  4245. errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_hw_params error for device (" << name << "), " << snd_strerror( result ) << ".";
  4246. errorText_ = errorStream_.str();
  4247. error( RtError::WARNING );
  4248. goto captureProbe;
  4249. }
  4250. // Get output channel information.
  4251. unsigned int value;
  4252. result = snd_pcm_hw_params_get_channels_max( params, &value );
  4253. if ( result < 0 ) {
  4254. snd_pcm_close( phandle );
  4255. errorStream_ << "RtApiAlsa::getDeviceInfo: error getting device (" << name << ") output channels, " << snd_strerror( result ) << ".";
  4256. errorText_ = errorStream_.str();
  4257. error( RtError::WARNING );
  4258. goto captureProbe;
  4259. }
  4260. info.outputChannels = value;
  4261. snd_pcm_close( phandle );
  4262. captureProbe:
  4263. // Now try for capture
  4264. stream = SND_PCM_STREAM_CAPTURE;
  4265. snd_pcm_info_set_stream( pcminfo, stream );
  4266. result = snd_ctl_pcm_info( chandle, pcminfo );
  4267. snd_ctl_close( chandle );
  4268. if ( result < 0 ) {
  4269. // Device probably doesn't support capture.
  4270. if ( info.outputChannels == 0 ) return info;
  4271. goto probeParameters;
  4272. }
  4273. result = snd_pcm_open( &phandle, name, stream, openMode | SND_PCM_NONBLOCK);
  4274. if ( result < 0 ) {
  4275. errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_open error for device (" << name << "), " << snd_strerror( result ) << ".";
  4276. errorText_ = errorStream_.str();
  4277. error( RtError::WARNING );
  4278. if ( info.outputChannels == 0 ) return info;
  4279. goto probeParameters;
  4280. }
  4281. // The device is open ... fill the parameter structure.
  4282. result = snd_pcm_hw_params_any( phandle, params );
  4283. if ( result < 0 ) {
  4284. snd_pcm_close( phandle );
  4285. errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_hw_params error for device (" << name << "), " << snd_strerror( result ) << ".";
  4286. errorText_ = errorStream_.str();
  4287. error( RtError::WARNING );
  4288. if ( info.outputChannels == 0 ) return info;
  4289. goto probeParameters;
  4290. }
  4291. result = snd_pcm_hw_params_get_channels_max( params, &value );
  4292. if ( result < 0 ) {
  4293. snd_pcm_close( phandle );
  4294. errorStream_ << "RtApiAlsa::getDeviceInfo: error getting device (" << name << ") input channels, " << snd_strerror( result ) << ".";
  4295. errorText_ = errorStream_.str();
  4296. error( RtError::WARNING );
  4297. if ( info.outputChannels == 0 ) return info;
  4298. goto probeParameters;
  4299. }
  4300. info.inputChannels = value;
  4301. snd_pcm_close( phandle );
  4302. // If device opens for both playback and capture, we determine the channels.
  4303. if ( info.outputChannels > 0 && info.inputChannels > 0 )
  4304. info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
  4305. // ALSA doesn't provide default devices so we'll use the first available one.
  4306. if ( device == 0 && info.outputChannels > 0 )
  4307. info.isDefaultOutput = true;
  4308. if ( device == 0 && info.inputChannels > 0 )
  4309. info.isDefaultInput = true;
  4310. probeParameters:
  4311. // At this point, we just need to figure out the supported data
  4312. // formats and sample rates. We'll proceed by opening the device in
  4313. // the direction with the maximum number of channels, or playback if
  4314. // they are equal. This might limit our sample rate options, but so
  4315. // be it.
  4316. if ( info.outputChannels >= info.inputChannels )
  4317. stream = SND_PCM_STREAM_PLAYBACK;
  4318. else
  4319. stream = SND_PCM_STREAM_CAPTURE;
  4320. snd_pcm_info_set_stream( pcminfo, stream );
  4321. result = snd_pcm_open( &phandle, name, stream, openMode | SND_PCM_NONBLOCK);
  4322. if ( result < 0 ) {
  4323. errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_open error for device (" << name << "), " << snd_strerror( result ) << ".";
  4324. errorText_ = errorStream_.str();
  4325. error( RtError::WARNING );
  4326. return info;
  4327. }
  4328. // The device is open ... fill the parameter structure.
  4329. result = snd_pcm_hw_params_any( phandle, params );
  4330. if ( result < 0 ) {
  4331. snd_pcm_close( phandle );
  4332. errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_hw_params error for device (" << name << "), " << snd_strerror( result ) << ".";
  4333. errorText_ = errorStream_.str();
  4334. error( RtError::WARNING );
  4335. return info;
  4336. }
  4337. // Test our discrete set of sample rate values.
  4338. info.sampleRates.clear();
  4339. for ( unsigned int i=0; i<MAX_SAMPLE_RATES; i++ ) {
  4340. if ( snd_pcm_hw_params_test_rate( phandle, params, SAMPLE_RATES[i], 0 ) == 0 )
  4341. info.sampleRates.push_back( SAMPLE_RATES[i] );
  4342. }
  4343. if ( info.sampleRates.size() == 0 ) {
  4344. snd_pcm_close( phandle );
  4345. errorStream_ << "RtApiAlsa::getDeviceInfo: no supported sample rates found for device (" << name << ").";
  4346. errorText_ = errorStream_.str();
  4347. error( RtError::WARNING );
  4348. return info;
  4349. }
  4350. // Probe the supported data formats ... we don't care about endian-ness just yet
  4351. snd_pcm_format_t format;
  4352. info.nativeFormats = 0;
  4353. format = SND_PCM_FORMAT_S8;
  4354. if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
  4355. info.nativeFormats |= RTAUDIO_SINT8;
  4356. format = SND_PCM_FORMAT_S16;
  4357. if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
  4358. info.nativeFormats |= RTAUDIO_SINT16;
  4359. format = SND_PCM_FORMAT_S24;
  4360. if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
  4361. info.nativeFormats |= RTAUDIO_SINT24;
  4362. format = SND_PCM_FORMAT_S32;
  4363. if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
  4364. info.nativeFormats |= RTAUDIO_SINT32;
  4365. format = SND_PCM_FORMAT_FLOAT;
  4366. if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
  4367. info.nativeFormats |= RTAUDIO_FLOAT32;
  4368. format = SND_PCM_FORMAT_FLOAT64;
  4369. if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
  4370. info.nativeFormats |= RTAUDIO_FLOAT64;
  4371. // Check that we have at least one supported format
  4372. if ( info.nativeFormats == 0 ) {
  4373. errorStream_ << "RtApiAlsa::getDeviceInfo: pcm device (" << name << ") data format not supported by RtAudio.";
  4374. errorText_ = errorStream_.str();
  4375. error( RtError::WARNING );
  4376. return info;
  4377. }
  4378. // Get the device name
  4379. char *cardname;
  4380. result = snd_card_get_name( card, &cardname );
  4381. if ( result >= 0 )
  4382. sprintf( name, "hw:%s,%d", cardname, subdevice );
  4383. info.name = name;
  4384. // That's all ... close the device and return
  4385. snd_pcm_close( phandle );
  4386. info.probed = true;
  4387. return info;
  4388. }
  4389. bool RtApiAlsa :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
  4390. unsigned int firstChannel, unsigned int sampleRate,
  4391. RtAudioFormat format, unsigned int *bufferSize,
  4392. RtAudio::StreamOptions *options )
  4393. {
  4394. #if defined(__RTAUDIO_DEBUG__)
  4395. snd_output_t *out;
  4396. snd_output_stdio_attach(&out, stderr, 0);
  4397. #endif
  4398. // I'm not using the "plug" interface ... too much inconsistent behavior.
  4399. unsigned nDevices = 0;
  4400. int result, subdevice, card;
  4401. char name[64];
  4402. snd_ctl_t *chandle;
  4403. // Count cards and devices
  4404. card = -1;
  4405. snd_card_next( &card );
  4406. while ( card >= 0 ) {
  4407. sprintf( name, "hw:%d", card );
  4408. result = snd_ctl_open( &chandle, name, SND_CTL_NONBLOCK );
  4409. if ( result < 0 ) {
  4410. errorStream_ << "RtApiAlsa::probeDeviceOpen: control open, card = " << card << ", " << snd_strerror( result ) << ".";
  4411. errorText_ = errorStream_.str();
  4412. return FAILURE;
  4413. }
  4414. subdevice = -1;
  4415. while( 1 ) {
  4416. result = snd_ctl_pcm_next_device( chandle, &subdevice );
  4417. if ( result < 0 ) break;
  4418. if ( subdevice < 0 ) break;
  4419. if ( nDevices == device ) {
  4420. sprintf( name, "hw:%d,%d", card, subdevice );
  4421. goto foundDevice;
  4422. }
  4423. nDevices++;
  4424. }
  4425. snd_ctl_close( chandle );
  4426. snd_card_next( &card );
  4427. }
  4428. if ( nDevices == 0 ) {
  4429. // This should not happen because a check is made before this function is called.
  4430. errorText_ = "RtApiAlsa::probeDeviceOpen: no devices found!";
  4431. return FAILURE;
  4432. }
  4433. if ( device >= nDevices ) {
  4434. // This should not happen because a check is made before this function is called.
  4435. errorText_ = "RtApiAlsa::probeDeviceOpen: device ID is invalid!";
  4436. return FAILURE;
  4437. }
  4438. foundDevice:
  4439. snd_pcm_stream_t stream;
  4440. if ( mode == OUTPUT )
  4441. stream = SND_PCM_STREAM_PLAYBACK;
  4442. else
  4443. stream = SND_PCM_STREAM_CAPTURE;
  4444. snd_pcm_t *phandle;
  4445. int openMode = SND_PCM_ASYNC;
  4446. result = snd_pcm_open( &phandle, name, stream, openMode );
  4447. if ( result < 0 ) {
  4448. if ( mode == OUTPUT )
  4449. errorStream_ << "RtApiAlsa::probeDeviceOpen: pcm device (" << name << ") won't open for output.";
  4450. else
  4451. errorStream_ << "RtApiAlsa::probeDeviceOpen: pcm device (" << name << ") won't open for input.";
  4452. errorText_ = errorStream_.str();
  4453. return FAILURE;
  4454. }
  4455. // Fill the parameter structure.
  4456. snd_pcm_hw_params_t *hw_params;
  4457. snd_pcm_hw_params_alloca( &hw_params );
  4458. result = snd_pcm_hw_params_any( phandle, hw_params );
  4459. if ( result < 0 ) {
  4460. snd_pcm_close( phandle );
  4461. errorStream_ << "RtApiAlsa::probeDeviceOpen: error getting pcm device (" << name << ") parameters, " << snd_strerror( result ) << ".";
  4462. errorText_ = errorStream_.str();
  4463. return FAILURE;
  4464. }
  4465. #if defined(__RTAUDIO_DEBUG__)
  4466. fprintf( stderr, "\nRtApiAlsa: dump hardware params just after device open:\n\n" );
  4467. snd_pcm_hw_params_dump( hw_params, out );
  4468. #endif
  4469. // Set access ... check user preference.
  4470. if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) {
  4471. stream_.userInterleaved = false;
  4472. result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_NONINTERLEAVED );
  4473. if ( result < 0 ) {
  4474. result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_INTERLEAVED );
  4475. stream_.deviceInterleaved[mode] = true;
  4476. }
  4477. else
  4478. stream_.deviceInterleaved[mode] = false;
  4479. }
  4480. else {
  4481. stream_.userInterleaved = true;
  4482. result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_INTERLEAVED );
  4483. if ( result < 0 ) {
  4484. result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_NONINTERLEAVED );
  4485. stream_.deviceInterleaved[mode] = false;
  4486. }
  4487. else
  4488. stream_.deviceInterleaved[mode] = true;
  4489. }
  4490. if ( result < 0 ) {
  4491. snd_pcm_close( phandle );
  4492. errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting pcm device (" << name << ") access, " << snd_strerror( result ) << ".";
  4493. errorText_ = errorStream_.str();
  4494. return FAILURE;
  4495. }
  4496. // Determine how to set the device format.
  4497. stream_.userFormat = format;
  4498. snd_pcm_format_t deviceFormat = SND_PCM_FORMAT_UNKNOWN;
  4499. if ( format == RTAUDIO_SINT8 )
  4500. deviceFormat = SND_PCM_FORMAT_S8;
  4501. else if ( format == RTAUDIO_SINT16 )
  4502. deviceFormat = SND_PCM_FORMAT_S16;
  4503. else if ( format == RTAUDIO_SINT24 )
  4504. deviceFormat = SND_PCM_FORMAT_S24;
  4505. else if ( format == RTAUDIO_SINT32 )
  4506. deviceFormat = SND_PCM_FORMAT_S32;
  4507. else if ( format == RTAUDIO_FLOAT32 )
  4508. deviceFormat = SND_PCM_FORMAT_FLOAT;
  4509. else if ( format == RTAUDIO_FLOAT64 )
  4510. deviceFormat = SND_PCM_FORMAT_FLOAT64;
  4511. if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat) == 0) {
  4512. stream_.deviceFormat[mode] = format;
  4513. goto setFormat;
  4514. }
  4515. // The user requested format is not natively supported by the device.
  4516. deviceFormat = SND_PCM_FORMAT_FLOAT64;
  4517. if ( snd_pcm_hw_params_test_format( phandle, hw_params, deviceFormat ) == 0 ) {
  4518. stream_.deviceFormat[mode] = RTAUDIO_FLOAT64;
  4519. goto setFormat;
  4520. }
  4521. deviceFormat = SND_PCM_FORMAT_FLOAT;
  4522. if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
  4523. stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
  4524. goto setFormat;
  4525. }
  4526. deviceFormat = SND_PCM_FORMAT_S32;
  4527. if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
  4528. stream_.deviceFormat[mode] = RTAUDIO_SINT32;
  4529. goto setFormat;
  4530. }
  4531. deviceFormat = SND_PCM_FORMAT_S24;
  4532. if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
  4533. stream_.deviceFormat[mode] = RTAUDIO_SINT24;
  4534. goto setFormat;
  4535. }
  4536. deviceFormat = SND_PCM_FORMAT_S16;
  4537. if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
  4538. stream_.deviceFormat[mode] = RTAUDIO_SINT16;
  4539. goto setFormat;
  4540. }
  4541. deviceFormat = SND_PCM_FORMAT_S8;
  4542. if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
  4543. stream_.deviceFormat[mode] = RTAUDIO_SINT8;
  4544. goto setFormat;
  4545. }
  4546. // If we get here, no supported format was found.
  4547. errorStream_ << "RtApiAlsa::probeDeviceOpen: pcm device " << device << " data format not supported by RtAudio.";
  4548. errorText_ = errorStream_.str();
  4549. return FAILURE;
  4550. setFormat:
  4551. result = snd_pcm_hw_params_set_format( phandle, hw_params, deviceFormat );
  4552. if ( result < 0 ) {
  4553. snd_pcm_close( phandle );
  4554. errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting pcm device (" << name << ") data format, " << snd_strerror( result ) << ".";
  4555. errorText_ = errorStream_.str();
  4556. return FAILURE;
  4557. }
  4558. // Determine whether byte-swaping is necessary.
  4559. stream_.doByteSwap[mode] = false;
  4560. if ( deviceFormat != SND_PCM_FORMAT_S8 ) {
  4561. result = snd_pcm_format_cpu_endian( deviceFormat );
  4562. if ( result == 0 )
  4563. stream_.doByteSwap[mode] = true;
  4564. else if (result < 0) {
  4565. snd_pcm_close( phandle );
  4566. errorStream_ << "RtApiAlsa::probeDeviceOpen: error getting pcm device (" << name << ") endian-ness, " << snd_strerror( result ) << ".";
  4567. errorText_ = errorStream_.str();
  4568. return FAILURE;
  4569. }
  4570. }
  4571. // Set the sample rate.
  4572. result = snd_pcm_hw_params_set_rate_near( phandle, hw_params, (unsigned int*) &sampleRate, 0 );
  4573. if ( result < 0 ) {
  4574. snd_pcm_close( phandle );
  4575. errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting sample rate on device (" << name << "), " << snd_strerror( result ) << ".";
  4576. errorText_ = errorStream_.str();
  4577. return FAILURE;
  4578. }
  4579. // Determine the number of channels for this device. We support a possible
  4580. // minimum device channel number > than the value requested by the user.
  4581. stream_.nUserChannels[mode] = channels;
  4582. unsigned int value;
  4583. result = snd_pcm_hw_params_get_channels_max( hw_params, &value );
  4584. unsigned int deviceChannels = value;
  4585. if ( result < 0 || deviceChannels < channels + firstChannel ) {
  4586. snd_pcm_close( phandle );
  4587. errorStream_ << "RtApiAlsa::probeDeviceOpen: requested channel parameters not supported by device (" << name << "), " << snd_strerror( result ) << ".";
  4588. errorText_ = errorStream_.str();
  4589. return FAILURE;
  4590. }
  4591. result = snd_pcm_hw_params_get_channels_min( hw_params, &value );
  4592. if ( result < 0 ) {
  4593. snd_pcm_close( phandle );
  4594. errorStream_ << "RtApiAlsa::probeDeviceOpen: error getting minimum channels for device (" << name << "), " << snd_strerror( result ) << ".";
  4595. errorText_ = errorStream_.str();
  4596. return FAILURE;
  4597. }
  4598. deviceChannels = value;
  4599. if ( deviceChannels < channels + firstChannel ) deviceChannels = channels + firstChannel;
  4600. stream_.nDeviceChannels[mode] = deviceChannels;
  4601. // Set the device channels.
  4602. result = snd_pcm_hw_params_set_channels( phandle, hw_params, deviceChannels );
  4603. if ( result < 0 ) {
  4604. snd_pcm_close( phandle );
  4605. errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting channels for device (" << name << "), " << snd_strerror( result ) << ".";
  4606. errorText_ = errorStream_.str();
  4607. return FAILURE;
  4608. }
  4609. // Set the buffer number, which in ALSA is referred to as the "period".
  4610. int dir;
  4611. unsigned int periods = 0;
  4612. if ( options ) periods = options->numberOfBuffers;
  4613. if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) periods = 2;
  4614. // Even though the hardware might allow 1 buffer, it won't work reliably.
  4615. if ( periods < 2 ) periods = 2;
  4616. result = snd_pcm_hw_params_set_periods_near( phandle, hw_params, &periods, &dir );
  4617. if ( result < 0 ) {
  4618. snd_pcm_close( phandle );
  4619. errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting periods for device (" << name << "), " << snd_strerror( result ) << ".";
  4620. errorText_ = errorStream_.str();
  4621. return FAILURE;
  4622. }
  4623. // Set the buffer (or period) size.
  4624. snd_pcm_uframes_t periodSize = *bufferSize;
  4625. result = snd_pcm_hw_params_set_period_size_near( phandle, hw_params, &periodSize, &dir );
  4626. if ( result < 0 ) {
  4627. snd_pcm_close( phandle );
  4628. errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting period size for device (" << name << "), " << snd_strerror( result ) << ".";
  4629. errorText_ = errorStream_.str();
  4630. return FAILURE;
  4631. }
  4632. *bufferSize = periodSize;
  4633. // If attempting to setup a duplex stream, the bufferSize parameter
  4634. // MUST be the same in both directions!
  4635. if ( stream_.mode == OUTPUT && mode == INPUT && *bufferSize != stream_.bufferSize ) {
  4636. errorStream_ << "RtApiAlsa::probeDeviceOpen: system error setting buffer size for duplex stream on device (" << name << ").";
  4637. errorText_ = errorStream_.str();
  4638. return FAILURE;
  4639. }
  4640. stream_.bufferSize = *bufferSize;
  4641. // Install the hardware configuration
  4642. result = snd_pcm_hw_params( phandle, hw_params );
  4643. if ( result < 0 ) {
  4644. snd_pcm_close( phandle );
  4645. errorStream_ << "RtApiAlsa::probeDeviceOpen: error installing hardware configuration on device (" << name << "), " << snd_strerror( result ) << ".";
  4646. errorText_ = errorStream_.str();
  4647. return FAILURE;
  4648. }
  4649. #if defined(__RTAUDIO_DEBUG__)
  4650. fprintf(stderr, "\nRtApiAlsa: dump hardware params after installation:\n\n");
  4651. snd_pcm_hw_params_dump( hw_params, out );
  4652. #endif
  4653. // Set the software configuration to fill buffers with zeros and prevent device stopping on xruns.
  4654. snd_pcm_sw_params_t *sw_params = NULL;
  4655. snd_pcm_sw_params_alloca( &sw_params );
  4656. snd_pcm_sw_params_current( phandle, sw_params );
  4657. snd_pcm_sw_params_set_start_threshold( phandle, sw_params, *bufferSize );
  4658. snd_pcm_sw_params_set_stop_threshold( phandle, sw_params, 0x7fffffff );
  4659. snd_pcm_sw_params_set_silence_threshold( phandle, sw_params, 0 );
  4660. snd_pcm_sw_params_set_silence_size( phandle, sw_params, INT_MAX );
  4661. result = snd_pcm_sw_params( phandle, sw_params );
  4662. if ( result < 0 ) {
  4663. snd_pcm_close( phandle );
  4664. errorStream_ << "RtApiAlsa::probeDeviceOpen: error installing software configuration on device (" << name << "), " << snd_strerror( result ) << ".";
  4665. errorText_ = errorStream_.str();
  4666. return FAILURE;
  4667. }
  4668. #if defined(__RTAUDIO_DEBUG__)
  4669. fprintf(stderr, "\nRtApiAlsa: dump software params after installation:\n\n");
  4670. snd_pcm_sw_params_dump( sw_params, out );
  4671. #endif
  4672. // Set flags for buffer conversion
  4673. stream_.doConvertBuffer[mode] = false;
  4674. if ( stream_.userFormat != stream_.deviceFormat[mode] )
  4675. stream_.doConvertBuffer[mode] = true;
  4676. if ( stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode] )
  4677. stream_.doConvertBuffer[mode] = true;
  4678. if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
  4679. stream_.nUserChannels[mode] > 1 )
  4680. stream_.doConvertBuffer[mode] = true;
  4681. // Allocate the ApiHandle if necessary and then save.
  4682. AlsaHandle *apiInfo = 0;
  4683. if ( stream_.apiHandle == 0 ) {
  4684. try {
  4685. apiInfo = (AlsaHandle *) new AlsaHandle;
  4686. }
  4687. catch ( std::bad_alloc& ) {
  4688. errorText_ = "RtApiAlsa::probeDeviceOpen: error allocating AlsaHandle memory.";
  4689. goto error;
  4690. }
  4691. stream_.apiHandle = (void *) apiInfo;
  4692. apiInfo->handles[0] = 0;
  4693. apiInfo->handles[1] = 0;
  4694. }
  4695. else {
  4696. apiInfo = (AlsaHandle *) stream_.apiHandle;
  4697. }
  4698. apiInfo->handles[mode] = phandle;
  4699. // Allocate necessary internal buffers.
  4700. unsigned long bufferBytes;
  4701. bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
  4702. stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
  4703. if ( stream_.userBuffer[mode] == NULL ) {
  4704. errorText_ = "RtApiAlsa::probeDeviceOpen: error allocating user buffer memory.";
  4705. goto error;
  4706. }
  4707. if ( stream_.doConvertBuffer[mode] ) {
  4708. bool makeBuffer = true;
  4709. bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
  4710. if ( mode == INPUT ) {
  4711. if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
  4712. unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
  4713. if ( bufferBytes <= bytesOut ) makeBuffer = false;
  4714. }
  4715. }
  4716. if ( makeBuffer ) {
  4717. bufferBytes *= *bufferSize;
  4718. if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
  4719. stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
  4720. if ( stream_.deviceBuffer == NULL ) {
  4721. errorText_ = "RtApiAlsa::probeDeviceOpen: error allocating device buffer memory.";
  4722. goto error;
  4723. }
  4724. }
  4725. }
  4726. stream_.sampleRate = sampleRate;
  4727. stream_.nBuffers = periods;
  4728. stream_.device[mode] = device;
  4729. stream_.state = STREAM_STOPPED;
  4730. // Setup the buffer conversion information structure.
  4731. if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, firstChannel );
  4732. // Setup thread if necessary.
  4733. if ( stream_.mode == OUTPUT && mode == INPUT ) {
  4734. // We had already set up an output stream.
  4735. stream_.mode = DUPLEX;
  4736. // Link the streams if possible.
  4737. apiInfo->synchronized = false;
  4738. if ( snd_pcm_link( apiInfo->handles[0], apiInfo->handles[1] ) == 0 )
  4739. apiInfo->synchronized = true;
  4740. else {
  4741. errorText_ = "RtApiAlsa::probeDeviceOpen: unable to synchronize input and output devices.";
  4742. error( RtError::WARNING );
  4743. }
  4744. }
  4745. else {
  4746. stream_.mode = mode;
  4747. // Setup callback thread.
  4748. stream_.callbackInfo.object = (void *) this;
  4749. // Set the thread attributes for joinable and realtime scheduling
  4750. // priority. The higher priority will only take affect if the
  4751. // program is run as root or suid.
  4752. pthread_attr_t attr;
  4753. pthread_attr_init( &attr );
  4754. pthread_attr_setdetachstate( &attr, PTHREAD_CREATE_JOINABLE );
  4755. #ifdef SCHED_RR // Undefined with some OSes (eg: NetBSD 1.6.x with GNU Pthread)
  4756. pthread_attr_setschedpolicy( &attr, SCHED_RR );
  4757. #else
  4758. pthread_attr_setschedpolicy( &attr, SCHED_OTHER );
  4759. #endif
  4760. stream_.callbackInfo.isRunning = true;
  4761. result = pthread_create( &stream_.callbackInfo.thread, &attr, alsaCallbackHandler, &stream_.callbackInfo );
  4762. pthread_attr_destroy( &attr );
  4763. if ( result ) {
  4764. stream_.callbackInfo.isRunning = false;
  4765. errorText_ = "RtApiAlsa::error creating callback thread!";
  4766. goto error;
  4767. }
  4768. }
  4769. return SUCCESS;
  4770. error:
  4771. if ( apiInfo ) {
  4772. if ( apiInfo->handles[0] ) snd_pcm_close( apiInfo->handles[0] );
  4773. if ( apiInfo->handles[1] ) snd_pcm_close( apiInfo->handles[1] );
  4774. delete apiInfo;
  4775. stream_.apiHandle = 0;
  4776. }
  4777. for ( int i=0; i<2; i++ ) {
  4778. if ( stream_.userBuffer[i] ) {
  4779. free( stream_.userBuffer[i] );
  4780. stream_.userBuffer[i] = 0;
  4781. }
  4782. }
  4783. if ( stream_.deviceBuffer ) {
  4784. free( stream_.deviceBuffer );
  4785. stream_.deviceBuffer = 0;
  4786. }
  4787. return FAILURE;
  4788. }
  4789. void RtApiAlsa :: closeStream()
  4790. {
  4791. if ( stream_.state == STREAM_CLOSED ) {
  4792. errorText_ = "RtApiAlsa::closeStream(): no open stream to close!";
  4793. error( RtError::WARNING );
  4794. return;
  4795. }
  4796. stream_.callbackInfo.isRunning = false;
  4797. pthread_join( stream_.callbackInfo.thread, NULL );
  4798. AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
  4799. if ( stream_.state == STREAM_RUNNING ) {
  4800. stream_.state = STREAM_STOPPED;
  4801. if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX )
  4802. snd_pcm_drop( apiInfo->handles[0] );
  4803. if ( stream_.mode == INPUT || stream_.mode == DUPLEX )
  4804. snd_pcm_drop( apiInfo->handles[1] );
  4805. }
  4806. if ( apiInfo ) {
  4807. if ( apiInfo->handles[0] ) snd_pcm_close( apiInfo->handles[0] );
  4808. if ( apiInfo->handles[1] ) snd_pcm_close( apiInfo->handles[1] );
  4809. delete apiInfo;
  4810. stream_.apiHandle = 0;
  4811. }
  4812. for ( int i=0; i<2; i++ ) {
  4813. if ( stream_.userBuffer[i] ) {
  4814. free( stream_.userBuffer[i] );
  4815. stream_.userBuffer[i] = 0;
  4816. }
  4817. }
  4818. if ( stream_.deviceBuffer ) {
  4819. free( stream_.deviceBuffer );
  4820. stream_.deviceBuffer = 0;
  4821. }
  4822. stream_.mode = UNINITIALIZED;
  4823. stream_.state = STREAM_CLOSED;
  4824. }
  4825. void RtApiAlsa :: startStream()
  4826. {
  4827. // This method calls snd_pcm_prepare if the device isn't already in that state.
  4828. verifyStream();
  4829. if ( stream_.state == STREAM_RUNNING ) {
  4830. errorText_ = "RtApiAlsa::startStream(): the stream is already running!";
  4831. error( RtError::WARNING );
  4832. return;
  4833. }
  4834. MUTEX_LOCK( &stream_.mutex );
  4835. int result = 0;
  4836. snd_pcm_state_t state;
  4837. AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
  4838. snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
  4839. if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
  4840. state = snd_pcm_state( handle[0] );
  4841. if ( state != SND_PCM_STATE_PREPARED ) {
  4842. result = snd_pcm_prepare( handle[0] );
  4843. if ( result < 0 ) {
  4844. errorStream_ << "RtApiAlsa::startStream: error preparing output pcm device, " << snd_strerror( result ) << ".";
  4845. errorText_ = errorStream_.str();
  4846. goto unlock;
  4847. }
  4848. }
  4849. }
  4850. if ( ( stream_.mode == INPUT || stream_.mode == DUPLEX ) && !apiInfo->synchronized ) {
  4851. state = snd_pcm_state( handle[1] );
  4852. if ( state != SND_PCM_STATE_PREPARED ) {
  4853. result = snd_pcm_prepare( handle[1] );
  4854. if ( result < 0 ) {
  4855. errorStream_ << "RtApiAlsa::startStream: error preparing input pcm device, " << snd_strerror( result ) << ".";
  4856. errorText_ = errorStream_.str();
  4857. goto unlock;
  4858. }
  4859. }
  4860. }
  4861. stream_.state = STREAM_RUNNING;
  4862. unlock:
  4863. MUTEX_UNLOCK( &stream_.mutex );
  4864. if ( result >= 0 ) return;
  4865. error( RtError::SYSTEM_ERROR );
  4866. }
  4867. void RtApiAlsa :: stopStream()
  4868. {
  4869. verifyStream();
  4870. if ( stream_.state == STREAM_STOPPED ) {
  4871. errorText_ = "RtApiAlsa::stopStream(): the stream is already stopped!";
  4872. error( RtError::WARNING );
  4873. return;
  4874. }
  4875. // Change the state before the lock to improve shutdown response
  4876. // when using a callback.
  4877. stream_.state = STREAM_STOPPED;
  4878. MUTEX_LOCK( &stream_.mutex );
  4879. int result = 0;
  4880. AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
  4881. snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
  4882. if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
  4883. if ( apiInfo->synchronized )
  4884. result = snd_pcm_drop( handle[0] );
  4885. else
  4886. result = snd_pcm_drain( handle[0] );
  4887. if ( result < 0 ) {
  4888. errorStream_ << "RtApiAlsa::stopStream: error draining output pcm device, " << snd_strerror( result ) << ".";
  4889. errorText_ = errorStream_.str();
  4890. goto unlock;
  4891. }
  4892. }
  4893. if ( ( stream_.mode == INPUT || stream_.mode == DUPLEX ) && !apiInfo->synchronized ) {
  4894. result = snd_pcm_drop( handle[1] );
  4895. if ( result < 0 ) {
  4896. errorStream_ << "RtApiAlsa::stopStream: error stopping input pcm device, " << snd_strerror( result ) << ".";
  4897. errorText_ = errorStream_.str();
  4898. goto unlock;
  4899. }
  4900. }
  4901. unlock:
  4902. MUTEX_UNLOCK( &stream_.mutex );
  4903. if ( result >= 0 ) return;
  4904. error( RtError::SYSTEM_ERROR );
  4905. }
  4906. void RtApiAlsa :: abortStream()
  4907. {
  4908. verifyStream();
  4909. if ( stream_.state == STREAM_STOPPED ) {
  4910. errorText_ = "RtApiAlsa::abortStream(): the stream is already stopped!";
  4911. error( RtError::WARNING );
  4912. return;
  4913. }
  4914. // Change the state before the lock to improve shutdown response
  4915. // when using a callback.
  4916. stream_.state = STREAM_STOPPED;
  4917. MUTEX_LOCK( &stream_.mutex );
  4918. int result = 0;
  4919. AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
  4920. snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
  4921. if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
  4922. result = snd_pcm_drop( handle[0] );
  4923. if ( result < 0 ) {
  4924. errorStream_ << "RtApiAlsa::abortStream: error aborting output pcm device, " << snd_strerror( result ) << ".";
  4925. errorText_ = errorStream_.str();
  4926. goto unlock;
  4927. }
  4928. }
  4929. if ( ( stream_.mode == INPUT || stream_.mode == DUPLEX ) && !apiInfo->synchronized ) {
  4930. result = snd_pcm_drop( handle[1] );
  4931. if ( result < 0 ) {
  4932. errorStream_ << "RtApiAlsa::abortStream: error aborting input pcm device, " << snd_strerror( result ) << ".";
  4933. errorText_ = errorStream_.str();
  4934. goto unlock;
  4935. }
  4936. }
  4937. unlock:
  4938. MUTEX_UNLOCK( &stream_.mutex );
  4939. stream_.state = STREAM_STOPPED;
  4940. if ( result >= 0 ) return;
  4941. error( RtError::SYSTEM_ERROR );
  4942. }
  4943. void RtApiAlsa :: callbackEvent()
  4944. {
  4945. if ( stream_.state == STREAM_STOPPED ) {
  4946. if ( stream_.callbackInfo.isRunning ) usleep( 50000 ); // sleep 50 milliseconds
  4947. return;
  4948. }
  4949. if ( stream_.state == STREAM_CLOSED ) {
  4950. errorText_ = "RtApiAlsa::callbackEvent(): the stream is closed ... this shouldn't happen!";
  4951. error( RtError::WARNING );
  4952. return;
  4953. }
  4954. int doStopStream = 0;
  4955. AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
  4956. RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;
  4957. double streamTime = getStreamTime();
  4958. RtAudioStreamStatus status = 0;
  4959. if ( stream_.mode != INPUT && apiInfo->xrun[0] == true ) {
  4960. status |= RTAUDIO_OUTPUT_UNDERFLOW;
  4961. apiInfo->xrun[0] = false;
  4962. }
  4963. if ( stream_.mode != OUTPUT && apiInfo->xrun[1] == true ) {
  4964. status |= RTAUDIO_INPUT_OVERFLOW;
  4965. apiInfo->xrun[1] = false;
  4966. }
  4967. doStopStream = callback( stream_.userBuffer[0], stream_.userBuffer[1],
  4968. stream_.bufferSize, streamTime, status, stream_.callbackInfo.userData );
  4969. MUTEX_LOCK( &stream_.mutex );
  4970. // The state might change while waiting on a mutex.
  4971. if ( stream_.state == STREAM_STOPPED ) goto unlock;
  4972. int result;
  4973. char *buffer;
  4974. int channels;
  4975. snd_pcm_t **handle;
  4976. snd_pcm_sframes_t frames;
  4977. RtAudioFormat format;
  4978. handle = (snd_pcm_t **) apiInfo->handles;
  4979. if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
  4980. // Setup parameters.
  4981. if ( stream_.doConvertBuffer[1] ) {
  4982. buffer = stream_.deviceBuffer;
  4983. channels = stream_.nDeviceChannels[1];
  4984. format = stream_.deviceFormat[1];
  4985. }
  4986. else {
  4987. buffer = stream_.userBuffer[1];
  4988. channels = stream_.nUserChannels[1];
  4989. format = stream_.userFormat;
  4990. }
  4991. // Read samples from device in interleaved/non-interleaved format.
  4992. if ( stream_.deviceInterleaved[1] )
  4993. result = snd_pcm_readi( handle[1], buffer, stream_.bufferSize );
  4994. else {
  4995. void *bufs[channels];
  4996. size_t offset = stream_.bufferSize * formatBytes( format );
  4997. for ( int i=0; i<channels; i++ )
  4998. bufs[i] = (void *) (buffer + (i * offset));
  4999. result = snd_pcm_readn( handle[1], bufs, stream_.bufferSize );
  5000. }
  5001. if ( result < (int) stream_.bufferSize ) {
  5002. // Either an error or underrun occured.
  5003. if ( result == -EPIPE ) {
  5004. snd_pcm_state_t state = snd_pcm_state( handle[1] );
  5005. if ( state == SND_PCM_STATE_XRUN ) {
  5006. apiInfo->xrun[1] = true;
  5007. result = snd_pcm_prepare( handle[1] );
  5008. if ( result < 0 ) {
  5009. errorStream_ << "RtApiAlsa::callbackEvent: error preparing device after overrun, " << snd_strerror( result ) << ".";
  5010. errorText_ = errorStream_.str();
  5011. }
  5012. }
  5013. else {
  5014. errorStream_ << "RtApiAlsa::callbackEvent: error, current state is " << snd_pcm_state_name( state ) << ", " << snd_strerror( result ) << ".";
  5015. errorText_ = errorStream_.str();
  5016. }
  5017. }
  5018. else {
  5019. errorStream_ << "RtApiAlsa::callbackEvent: audio read error, " << snd_strerror( result ) << ".";
  5020. errorText_ = errorStream_.str();
  5021. }
  5022. error( RtError::WARNING );
  5023. goto unlock;
  5024. }
  5025. // Do byte swapping if necessary.
  5026. if ( stream_.doByteSwap[1] )
  5027. byteSwapBuffer( buffer, stream_.bufferSize * channels, format );
  5028. // Do buffer conversion if necessary.
  5029. if ( stream_.doConvertBuffer[1] )
  5030. convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
  5031. // Check stream latency
  5032. result = snd_pcm_delay( handle[1], &frames );
  5033. if ( result == 0 && frames > 0 ) stream_.latency[1] = frames;
  5034. }
  5035. if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
  5036. // Setup parameters and do buffer conversion if necessary.
  5037. if ( stream_.doConvertBuffer[0] ) {
  5038. buffer = stream_.deviceBuffer;
  5039. convertBuffer( buffer, stream_.userBuffer[0], stream_.convertInfo[0] );
  5040. channels = stream_.nDeviceChannels[0];
  5041. format = stream_.deviceFormat[0];
  5042. }
  5043. else {
  5044. buffer = stream_.userBuffer[0];
  5045. channels = stream_.nUserChannels[0];
  5046. format = stream_.userFormat;
  5047. }
  5048. // Do byte swapping if necessary.
  5049. if ( stream_.doByteSwap[0] )
  5050. byteSwapBuffer(buffer, stream_.bufferSize * channels, format);
  5051. // Write samples to device in interleaved/non-interleaved format.
  5052. if ( stream_.deviceInterleaved[0] )
  5053. result = snd_pcm_writei( handle[0], buffer, stream_.bufferSize );
  5054. else {
  5055. void *bufs[channels];
  5056. size_t offset = stream_.bufferSize * formatBytes( format );
  5057. for ( int i=0; i<channels; i++ )
  5058. bufs[i] = (void *) (buffer + (i * offset));
  5059. result = snd_pcm_writen( handle[0], bufs, stream_.bufferSize );
  5060. }
  5061. if ( result < (int) stream_.bufferSize ) {
  5062. // Either an error or underrun occured.
  5063. if ( result == -EPIPE ) {
  5064. snd_pcm_state_t state = snd_pcm_state( handle[0] );
  5065. if ( state == SND_PCM_STATE_XRUN ) {
  5066. apiInfo->xrun[0] = true;
  5067. result = snd_pcm_prepare( handle[0] );
  5068. if ( result < 0 ) {
  5069. errorStream_ << "RtApiAlsa::callbackEvent: error preparing device after underrun, " << snd_strerror( result ) << ".";
  5070. errorText_ = errorStream_.str();
  5071. }
  5072. }
  5073. else {
  5074. errorStream_ << "RtApiAlsa::callbackEvent: error, current state is " << snd_pcm_state_name( state ) << ", " << snd_strerror( result ) << ".";
  5075. errorText_ = errorStream_.str();
  5076. }
  5077. }
  5078. else {
  5079. errorStream_ << "RtApiAlsa::callbackEvent: audio write error, " << snd_strerror( result ) << ".";
  5080. errorText_ = errorStream_.str();
  5081. }
  5082. error( RtError::WARNING );
  5083. goto unlock;
  5084. }
  5085. // Check stream latency
  5086. result = snd_pcm_delay( handle[0], &frames );
  5087. if ( result == 0 && frames > 0 ) stream_.latency[0] = frames;
  5088. }
  5089. unlock:
  5090. MUTEX_UNLOCK( &stream_.mutex );
  5091. RtApi::tickStreamTime();
  5092. if ( doStopStream == 1 ) this->stopStream();
  5093. else if ( doStopStream == 2 ) this->abortStream();
  5094. }
  5095. extern "C" void *alsaCallbackHandler( void *ptr )
  5096. {
  5097. CallbackInfo *info = (CallbackInfo *) ptr;
  5098. RtApiAlsa *object = (RtApiAlsa *) info->object;
  5099. bool *isRunning = &info->isRunning;
  5100. #ifdef SCHED_RR
  5101. // Set a higher scheduler priority (P.J. Leonard)
  5102. struct sched_param param;
  5103. int min = sched_get_priority_min( SCHED_RR );
  5104. int max = sched_get_priority_max( SCHED_RR );
  5105. param.sched_priority = min + ( max - min ) / 2; // Is this the best number?
  5106. sched_setscheduler( 0, SCHED_RR, &param );
  5107. #endif
  5108. while ( *isRunning == true ) {
  5109. pthread_testcancel();
  5110. object->callbackEvent();
  5111. }
  5112. pthread_exit( NULL );
  5113. }
  5114. //******************** End of __LINUX_ALSA__ *********************//
  5115. #endif
  5116. #if defined(__LINUX_OSS__)
  5117. #include <unistd.h>
  5118. #include <sys/ioctl.h>
  5119. #include <unistd.h>
  5120. #include <fcntl.h>
  5121. #include "soundcard.h"
  5122. #include <errno.h>
  5123. #include <math.h>
  5124. extern "C" void *ossCallbackHandler(void * ptr);
  5125. // A structure to hold various information related to the OSS API
  5126. // implementation.
  5127. struct OssHandle {
  5128. int id[2]; // device ids
  5129. bool xrun[2];
  5130. bool triggered;
  5131. OssHandle()
  5132. :triggered(false) { id[0] = 0; id[1] = 0; xrun[0] = false; xrun[1] = false; }
  5133. };
  5134. RtApiOss :: RtApiOss()
  5135. {
  5136. // Nothing to do here.
  5137. }
  5138. RtApiOss :: ~RtApiOss()
  5139. {
  5140. if ( stream_.state != STREAM_CLOSED ) closeStream();
  5141. }
  5142. unsigned int RtApiOss :: getDeviceCount( void )
  5143. {
  5144. int mixerfd = open( "/dev/mixer", O_RDWR, 0 );
  5145. if ( mixerfd == -1 ) {
  5146. errorText_ = "RtApiOss::getDeviceCount: error opening '/dev/mixer'.";
  5147. error( RtError::WARNING );
  5148. return 0;
  5149. }
  5150. oss_sysinfo sysinfo;
  5151. if ( ioctl( mixerfd, SNDCTL_SYSINFO, &sysinfo ) == -1 ) {
  5152. close( mixerfd );
  5153. errorText_ = "RtApiOss::getDeviceCount: error getting sysinfo, OSS version >= 4.0 is required.";
  5154. error( RtError::WARNING );
  5155. return 0;
  5156. }
  5157. return sysinfo.numaudios;
  5158. }
  5159. RtAudio::DeviceInfo RtApiOss :: getDeviceInfo( unsigned int device )
  5160. {
  5161. RtAudio::DeviceInfo info;
  5162. info.probed = false;
  5163. int mixerfd = open( "/dev/mixer", O_RDWR, 0 );
  5164. if ( mixerfd == -1 ) {
  5165. errorText_ = "RtApiOss::getDeviceInfo: error opening '/dev/mixer'.";
  5166. error( RtError::WARNING );
  5167. return info;
  5168. }
  5169. oss_sysinfo sysinfo;
  5170. int result = ioctl( mixerfd, SNDCTL_SYSINFO, &sysinfo );
  5171. if ( result == -1 ) {
  5172. close( mixerfd );
  5173. errorText_ = "RtApiOss::getDeviceInfo: error getting sysinfo, OSS version >= 4.0 is required.";
  5174. error( RtError::WARNING );
  5175. return info;
  5176. }
  5177. unsigned nDevices = sysinfo.numaudios;
  5178. if ( nDevices == 0 ) {
  5179. close( mixerfd );
  5180. errorText_ = "RtApiOss::getDeviceInfo: no devices found!";
  5181. error( RtError::INVALID_USE );
  5182. }
  5183. if ( device >= nDevices ) {
  5184. close( mixerfd );
  5185. errorText_ = "RtApiOss::getDeviceInfo: device ID is invalid!";
  5186. error( RtError::INVALID_USE );
  5187. }
  5188. oss_audioinfo ainfo;
  5189. ainfo.dev = device;
  5190. result = ioctl( mixerfd, SNDCTL_AUDIOINFO, &ainfo );
  5191. close( mixerfd );
  5192. if ( result == -1 ) {
  5193. errorStream_ << "RtApiOss::getDeviceInfo: error getting device (" << ainfo.name << ") info.";
  5194. errorText_ = errorStream_.str();
  5195. error( RtError::WARNING );
  5196. return info;
  5197. }
  5198. // Probe channels
  5199. if ( ainfo.caps & PCM_CAP_OUTPUT ) info.outputChannels = ainfo.max_channels;
  5200. if ( ainfo.caps & PCM_CAP_INPUT ) info.inputChannels = ainfo.max_channels;
  5201. if ( ainfo.caps & PCM_CAP_DUPLEX ) {
  5202. if ( info.outputChannels > 0 && info.inputChannels > 0 && ainfo.caps & PCM_CAP_DUPLEX )
  5203. info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
  5204. }
  5205. // Probe data formats ... do for input
  5206. unsigned long mask = ainfo.iformats;
  5207. if ( mask & AFMT_S16_LE || mask & AFMT_S16_BE )
  5208. info.nativeFormats |= RTAUDIO_SINT16;
  5209. if ( mask & AFMT_S8 )
  5210. info.nativeFormats |= RTAUDIO_SINT8;
  5211. if ( mask & AFMT_S32_LE || mask & AFMT_S32_BE )
  5212. info.nativeFormats |= RTAUDIO_SINT32;
  5213. if ( mask & AFMT_FLOAT )
  5214. info.nativeFormats |= RTAUDIO_FLOAT32;
  5215. if ( mask & AFMT_S24_LE || mask & AFMT_S24_BE )
  5216. info.nativeFormats |= RTAUDIO_SINT24;
  5217. // Check that we have at least one supported format
  5218. if ( info.nativeFormats == 0 ) {
  5219. errorStream_ << "RtApiOss::getDeviceInfo: device (" << ainfo.name << ") data format not supported by RtAudio.";
  5220. errorText_ = errorStream_.str();
  5221. error( RtError::WARNING );
  5222. return info;
  5223. }
  5224. // Probe the supported sample rates.
  5225. info.sampleRates.clear();
  5226. if ( ainfo.nrates ) {
  5227. for ( unsigned int i=0; i<ainfo.nrates; i++ ) {
  5228. for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
  5229. if ( ainfo.rates[i] == SAMPLE_RATES[k] ) {
  5230. info.sampleRates.push_back( SAMPLE_RATES[k] );
  5231. break;
  5232. }
  5233. }
  5234. }
  5235. }
  5236. else {
  5237. // Check min and max rate values;
  5238. for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
  5239. if ( ainfo.min_rate <= (int) SAMPLE_RATES[k] && ainfo.max_rate >= (int) SAMPLE_RATES[k] )
  5240. info.sampleRates.push_back( SAMPLE_RATES[k] );
  5241. }
  5242. }
  5243. if ( info.sampleRates.size() == 0 ) {
  5244. errorStream_ << "RtApiOss::getDeviceInfo: no supported sample rates found for device (" << ainfo.name << ").";
  5245. errorText_ = errorStream_.str();
  5246. error( RtError::WARNING );
  5247. }
  5248. else {
  5249. info.probed = true;
  5250. info.name = ainfo.name;
  5251. }
  5252. return info;
  5253. }
  5254. bool RtApiOss :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
  5255. unsigned int firstChannel, unsigned int sampleRate,
  5256. RtAudioFormat format, unsigned int *bufferSize,
  5257. RtAudio::StreamOptions *options )
  5258. {
  5259. int mixerfd = open( "/dev/mixer", O_RDWR, 0 );
  5260. if ( mixerfd == -1 ) {
  5261. errorText_ = "RtApiOss::probeDeviceOpen: error opening '/dev/mixer'.";
  5262. return FAILURE;
  5263. }
  5264. oss_sysinfo sysinfo;
  5265. int result = ioctl( mixerfd, SNDCTL_SYSINFO, &sysinfo );
  5266. if ( result == -1 ) {
  5267. close( mixerfd );
  5268. errorText_ = "RtApiOss::probeDeviceOpen: error getting sysinfo, OSS version >= 4.0 is required.";
  5269. return FAILURE;
  5270. }
  5271. unsigned nDevices = sysinfo.numaudios;
  5272. if ( nDevices == 0 ) {
  5273. // This should not happen because a check is made before this function is called.
  5274. close( mixerfd );
  5275. errorText_ = "RtApiOss::probeDeviceOpen: no devices found!";
  5276. return FAILURE;
  5277. }
  5278. if ( device >= nDevices ) {
  5279. // This should not happen because a check is made before this function is called.
  5280. close( mixerfd );
  5281. errorText_ = "RtApiOss::probeDeviceOpen: device ID is invalid!";
  5282. return FAILURE;
  5283. }
  5284. oss_audioinfo ainfo;
  5285. ainfo.dev = device;
  5286. result = ioctl( mixerfd, SNDCTL_AUDIOINFO, &ainfo );
  5287. close( mixerfd );
  5288. if ( result == -1 ) {
  5289. errorStream_ << "RtApiOss::getDeviceInfo: error getting device (" << ainfo.name << ") info.";
  5290. errorText_ = errorStream_.str();
  5291. return FAILURE;
  5292. }
  5293. // Check if device supports input or output
  5294. if ( ( mode == OUTPUT && !( ainfo.caps & PCM_CAP_OUTPUT ) ) ||
  5295. ( mode == INPUT && !( ainfo.caps & PCM_CAP_INPUT ) ) ) {
  5296. if ( mode == OUTPUT )
  5297. errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support output.";
  5298. else
  5299. errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support input.";
  5300. errorText_ = errorStream_.str();
  5301. return FAILURE;
  5302. }
  5303. int flags = 0;
  5304. OssHandle *handle = (OssHandle *) stream_.apiHandle;
  5305. if ( mode == OUTPUT )
  5306. flags |= O_WRONLY;
  5307. else { // mode == INPUT
  5308. if (stream_.mode == OUTPUT && stream_.device[0] == device) {
  5309. // We just set the same device for playback ... close and reopen for duplex (OSS only).
  5310. close( handle->id[0] );
  5311. handle->id[0] = 0;
  5312. if ( !( ainfo.caps & PCM_CAP_DUPLEX ) ) {
  5313. errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support duplex mode.";
  5314. errorText_ = errorStream_.str();
  5315. return FAILURE;
  5316. }
  5317. // Check that the number previously set channels is the same.
  5318. if ( stream_.nUserChannels[0] != channels ) {
  5319. errorStream_ << "RtApiOss::probeDeviceOpen: input/output channels must be equal for OSS duplex device (" << ainfo.name << ").";
  5320. errorText_ = errorStream_.str();
  5321. return FAILURE;
  5322. }
  5323. flags |= O_RDWR;
  5324. }
  5325. else
  5326. flags |= O_RDONLY;
  5327. }
  5328. // Set exclusive access if specified.
  5329. if ( options && options->flags & RTAUDIO_HOG_DEVICE ) flags |= O_EXCL;
  5330. // Try to open the device.
  5331. int fd;
  5332. fd = open( ainfo.devnode, flags, 0 );
  5333. if ( fd == -1 ) {
  5334. if ( errno == EBUSY )
  5335. errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") is busy.";
  5336. else
  5337. errorStream_ << "RtApiOss::probeDeviceOpen: error opening device (" << ainfo.name << ").";
  5338. errorText_ = errorStream_.str();
  5339. return FAILURE;
  5340. }
  5341. // For duplex operation, specifically set this mode (this doesn't seem to work).
  5342. /*
  5343. if ( flags | O_RDWR ) {
  5344. result = ioctl( fd, SNDCTL_DSP_SETDUPLEX, NULL );
  5345. if ( result == -1) {
  5346. errorStream_ << "RtApiOss::probeDeviceOpen: error setting duplex mode for device (" << ainfo.name << ").";
  5347. errorText_ = errorStream_.str();
  5348. return FAILURE;
  5349. }
  5350. }
  5351. */
  5352. // Check the device channel support.
  5353. stream_.nUserChannels[mode] = channels;
  5354. if ( ainfo.max_channels < (int)(channels + firstChannel) ) {
  5355. close( fd );
  5356. errorStream_ << "RtApiOss::probeDeviceOpen: the device (" << ainfo.name << ") does not support requested channel parameters.";
  5357. errorText_ = errorStream_.str();
  5358. return FAILURE;
  5359. }
  5360. // Set the number of channels.
  5361. int deviceChannels = channels + firstChannel;
  5362. result = ioctl( fd, SNDCTL_DSP_CHANNELS, &deviceChannels );
  5363. if ( result == -1 || deviceChannels < (int)(channels + firstChannel) ) {
  5364. close( fd );
  5365. errorStream_ << "RtApiOss::probeDeviceOpen: error setting channel parameters on device (" << ainfo.name << ").";
  5366. errorText_ = errorStream_.str();
  5367. return FAILURE;
  5368. }
  5369. stream_.nDeviceChannels[mode] = deviceChannels;
  5370. // Get the data format mask
  5371. int mask;
  5372. result = ioctl( fd, SNDCTL_DSP_GETFMTS, &mask );
  5373. if ( result == -1 ) {
  5374. close( fd );
  5375. errorStream_ << "RtApiOss::probeDeviceOpen: error getting device (" << ainfo.name << ") data formats.";
  5376. errorText_ = errorStream_.str();
  5377. return FAILURE;
  5378. }
  5379. // Determine how to set the device format.
  5380. stream_.userFormat = format;
  5381. int deviceFormat = -1;
  5382. stream_.doByteSwap[mode] = false;
  5383. if ( format == RTAUDIO_SINT8 ) {
  5384. if ( mask & AFMT_S8 ) {
  5385. deviceFormat = AFMT_S8;
  5386. stream_.deviceFormat[mode] = RTAUDIO_SINT8;
  5387. }
  5388. }
  5389. else if ( format == RTAUDIO_SINT16 ) {
  5390. if ( mask & AFMT_S16_NE ) {
  5391. deviceFormat = AFMT_S16_NE;
  5392. stream_.deviceFormat[mode] = RTAUDIO_SINT16;
  5393. }
  5394. else if ( mask & AFMT_S16_OE ) {
  5395. deviceFormat = AFMT_S16_OE;
  5396. stream_.deviceFormat[mode] = RTAUDIO_SINT16;
  5397. stream_.doByteSwap[mode] = true;
  5398. }
  5399. }
  5400. else if ( format == RTAUDIO_SINT24 ) {
  5401. if ( mask & AFMT_S24_NE ) {
  5402. deviceFormat = AFMT_S24_NE;
  5403. stream_.deviceFormat[mode] = RTAUDIO_SINT24;
  5404. }
  5405. else if ( mask & AFMT_S24_OE ) {
  5406. deviceFormat = AFMT_S24_OE;
  5407. stream_.deviceFormat[mode] = RTAUDIO_SINT24;
  5408. stream_.doByteSwap[mode] = true;
  5409. }
  5410. }
  5411. else if ( format == RTAUDIO_SINT32 ) {
  5412. if ( mask & AFMT_S32_NE ) {
  5413. deviceFormat = AFMT_S32_NE;
  5414. stream_.deviceFormat[mode] = RTAUDIO_SINT32;
  5415. }
  5416. else if ( mask & AFMT_S32_OE ) {
  5417. deviceFormat = AFMT_S32_OE;
  5418. stream_.deviceFormat[mode] = RTAUDIO_SINT32;
  5419. stream_.doByteSwap[mode] = true;
  5420. }
  5421. }
  5422. if ( deviceFormat == -1 ) {
  5423. // The user requested format is not natively supported by the device.
  5424. if ( mask & AFMT_S16_NE ) {
  5425. deviceFormat = AFMT_S16_NE;
  5426. stream_.deviceFormat[mode] = RTAUDIO_SINT16;
  5427. }
  5428. else if ( mask & AFMT_S32_NE ) {
  5429. deviceFormat = AFMT_S32_NE;
  5430. stream_.deviceFormat[mode] = RTAUDIO_SINT32;
  5431. }
  5432. else if ( mask & AFMT_S24_NE ) {
  5433. deviceFormat = AFMT_S24_NE;
  5434. stream_.deviceFormat[mode] = RTAUDIO_SINT24;
  5435. }
  5436. else if ( mask & AFMT_S16_OE ) {
  5437. deviceFormat = AFMT_S16_OE;
  5438. stream_.deviceFormat[mode] = RTAUDIO_SINT16;
  5439. stream_.doByteSwap[mode] = true;
  5440. }
  5441. else if ( mask & AFMT_S32_OE ) {
  5442. deviceFormat = AFMT_S32_OE;
  5443. stream_.deviceFormat[mode] = RTAUDIO_SINT32;
  5444. stream_.doByteSwap[mode] = true;
  5445. }
  5446. else if ( mask & AFMT_S24_OE ) {
  5447. deviceFormat = AFMT_S24_OE;
  5448. stream_.deviceFormat[mode] = RTAUDIO_SINT24;
  5449. stream_.doByteSwap[mode] = true;
  5450. }
  5451. else if ( mask & AFMT_S8) {
  5452. deviceFormat = AFMT_S8;
  5453. stream_.deviceFormat[mode] = RTAUDIO_SINT8;
  5454. }
  5455. }
  5456. if ( stream_.deviceFormat[mode] == 0 ) {
  5457. // This really shouldn't happen ...
  5458. close( fd );
  5459. errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") data format not supported by RtAudio.";
  5460. errorText_ = errorStream_.str();
  5461. return FAILURE;
  5462. }
  5463. // Set the data format.
  5464. int temp = deviceFormat;
  5465. result = ioctl( fd, SNDCTL_DSP_SETFMT, &deviceFormat );
  5466. if ( result == -1 || deviceFormat != temp ) {
  5467. close( fd );
  5468. errorStream_ << "RtApiOss::probeDeviceOpen: error setting data format on device (" << ainfo.name << ").";
  5469. errorText_ = errorStream_.str();
  5470. return FAILURE;
  5471. }
  5472. // Attempt to set the buffer size. According to OSS, the minimum
  5473. // number of buffers is two. The supposed minimum buffer size is 16
  5474. // bytes, so that will be our lower bound. The argument to this
  5475. // call is in the form 0xMMMMSSSS (hex), where the buffer size (in
  5476. // bytes) is given as 2^SSSS and the number of buffers as 2^MMMM.
  5477. // We'll check the actual value used near the end of the setup
  5478. // procedure.
  5479. int ossBufferBytes = *bufferSize * formatBytes( stream_.deviceFormat[mode] ) * deviceChannels;
  5480. if ( ossBufferBytes < 16 ) ossBufferBytes = 16;
  5481. int buffers = 0;
  5482. if ( options ) buffers = options->numberOfBuffers;
  5483. if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) buffers = 2;
  5484. if ( buffers < 2 ) buffers = 3;
  5485. temp = ((int) buffers << 16) + (int)( log10( (double)ossBufferBytes ) / log10( 2.0 ) );
  5486. result = ioctl( fd, SNDCTL_DSP_SETFRAGMENT, &temp );
  5487. if ( result == -1 ) {
  5488. close( fd );
  5489. errorStream_ << "RtApiOss::probeDeviceOpen: error setting buffer size on device (" << ainfo.name << ").";
  5490. errorText_ = errorStream_.str();
  5491. return FAILURE;
  5492. }
  5493. stream_.nBuffers = buffers;
  5494. // Save buffer size (in sample frames).
  5495. *bufferSize = ossBufferBytes / ( formatBytes(stream_.deviceFormat[mode]) * deviceChannels );
  5496. stream_.bufferSize = *bufferSize;
  5497. // Set the sample rate.
  5498. int srate = sampleRate;
  5499. result = ioctl( fd, SNDCTL_DSP_SPEED, &srate );
  5500. if ( result == -1 ) {
  5501. close( fd );
  5502. errorStream_ << "RtApiOss::probeDeviceOpen: error setting sample rate (" << sampleRate << ") on device (" << ainfo.name << ").";
  5503. errorText_ = errorStream_.str();
  5504. return FAILURE;
  5505. }
  5506. // Verify the sample rate setup worked.
  5507. if ( abs( srate - sampleRate ) > 100 ) {
  5508. close( fd );
  5509. errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support sample rate (" << sampleRate << ").";
  5510. errorText_ = errorStream_.str();
  5511. return FAILURE;
  5512. }
  5513. stream_.sampleRate = sampleRate;
  5514. if ( mode == INPUT && stream_.mode == OUTPUT && stream_.device[0] == device) {
  5515. // We're doing duplex setup here.
  5516. stream_.deviceFormat[0] = stream_.deviceFormat[1];
  5517. stream_.nDeviceChannels[0] = deviceChannels;
  5518. }
  5519. // Set interleaving parameters.
  5520. stream_.userInterleaved = true;
  5521. stream_.deviceInterleaved[mode] = true;
  5522. if ( options && options->flags & RTAUDIO_NONINTERLEAVED )
  5523. stream_.userInterleaved = false;
  5524. // Set flags for buffer conversion
  5525. stream_.doConvertBuffer[mode] = false;
  5526. if ( stream_.userFormat != stream_.deviceFormat[mode] )
  5527. stream_.doConvertBuffer[mode] = true;
  5528. if ( stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode] )
  5529. stream_.doConvertBuffer[mode] = true;
  5530. if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
  5531. stream_.nUserChannels[mode] > 1 )
  5532. stream_.doConvertBuffer[mode] = true;
  5533. // Allocate the stream handles if necessary and then save.
  5534. if ( stream_.apiHandle == 0 ) {
  5535. try {
  5536. handle = new OssHandle;
  5537. }
  5538. catch ( std::bad_alloc& ) {
  5539. errorText_ = "RtApiOss::probeDeviceOpen: error allocating OssHandle memory.";
  5540. goto error;
  5541. }
  5542. stream_.apiHandle = (void *) handle;
  5543. }
  5544. else {
  5545. handle = (OssHandle *) stream_.apiHandle;
  5546. }
  5547. handle->id[mode] = fd;
  5548. // Allocate necessary internal buffers.
  5549. unsigned long bufferBytes;
  5550. bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
  5551. stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
  5552. if ( stream_.userBuffer[mode] == NULL ) {
  5553. errorText_ = "RtApiOss::probeDeviceOpen: error allocating user buffer memory.";
  5554. goto error;
  5555. }
  5556. if ( stream_.doConvertBuffer[mode] ) {
  5557. bool makeBuffer = true;
  5558. bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
  5559. if ( mode == INPUT ) {
  5560. if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
  5561. unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
  5562. if ( bufferBytes <= bytesOut ) makeBuffer = false;
  5563. }
  5564. }
  5565. if ( makeBuffer ) {
  5566. bufferBytes *= *bufferSize;
  5567. if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
  5568. stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
  5569. if ( stream_.deviceBuffer == NULL ) {
  5570. errorText_ = "RtApiOss::probeDeviceOpen: error allocating device buffer memory.";
  5571. goto error;
  5572. }
  5573. }
  5574. }
  5575. stream_.device[mode] = device;
  5576. stream_.state = STREAM_STOPPED;
  5577. // Setup the buffer conversion information structure.
  5578. if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, firstChannel );
  5579. // Setup thread if necessary.
  5580. if ( stream_.mode == OUTPUT && mode == INPUT ) {
  5581. // We had already set up an output stream.
  5582. stream_.mode = DUPLEX;
  5583. if ( stream_.device[0] == device ) handle->id[0] = fd;
  5584. }
  5585. else {
  5586. stream_.mode = mode;
  5587. // Setup callback thread.
  5588. stream_.callbackInfo.object = (void *) this;
  5589. // Set the thread attributes for joinable and realtime scheduling
  5590. // priority. The higher priority will only take affect if the
  5591. // program is run as root or suid.
  5592. pthread_attr_t attr;
  5593. pthread_attr_init( &attr );
  5594. pthread_attr_setdetachstate( &attr, PTHREAD_CREATE_JOINABLE );
  5595. #ifdef SCHED_RR // Undefined with some OSes (eg: NetBSD 1.6.x with GNU Pthread)
  5596. pthread_attr_setschedpolicy( &attr, SCHED_RR );
  5597. #else
  5598. pthread_attr_setschedpolicy( &attr, SCHED_OTHER );
  5599. #endif
  5600. stream_.callbackInfo.isRunning = true;
  5601. result = pthread_create( &stream_.callbackInfo.thread, &attr, ossCallbackHandler, &stream_.callbackInfo );
  5602. pthread_attr_destroy( &attr );
  5603. if ( result ) {
  5604. stream_.callbackInfo.isRunning = false;
  5605. errorText_ = "RtApiOss::error creating callback thread!";
  5606. goto error;
  5607. }
  5608. }
  5609. return SUCCESS;
  5610. error:
  5611. if ( handle ) {
  5612. if ( handle->id[0] ) close( handle->id[0] );
  5613. if ( handle->id[1] ) close( handle->id[1] );
  5614. delete handle;
  5615. stream_.apiHandle = 0;
  5616. }
  5617. for ( int i=0; i<2; i++ ) {
  5618. if ( stream_.userBuffer[i] ) {
  5619. free( stream_.userBuffer[i] );
  5620. stream_.userBuffer[i] = 0;
  5621. }
  5622. }
  5623. if ( stream_.deviceBuffer ) {
  5624. free( stream_.deviceBuffer );
  5625. stream_.deviceBuffer = 0;
  5626. }
  5627. return FAILURE;
  5628. }
  5629. void RtApiOss :: closeStream()
  5630. {
  5631. if ( stream_.state == STREAM_CLOSED ) {
  5632. errorText_ = "RtApiOss::closeStream(): no open stream to close!";
  5633. error( RtError::WARNING );
  5634. return;
  5635. }
  5636. stream_.callbackInfo.isRunning = false;
  5637. pthread_join( stream_.callbackInfo.thread, NULL );
  5638. OssHandle *handle = (OssHandle *) stream_.apiHandle;
  5639. if ( stream_.state == STREAM_RUNNING ) {
  5640. if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX )
  5641. ioctl( handle->id[0], SNDCTL_DSP_HALT, 0 );
  5642. else
  5643. ioctl( handle->id[1], SNDCTL_DSP_HALT, 0 );
  5644. stream_.state = STREAM_STOPPED;
  5645. }
  5646. if ( handle ) {
  5647. if ( handle->id[0] ) close( handle->id[0] );
  5648. if ( handle->id[1] ) close( handle->id[1] );
  5649. delete handle;
  5650. stream_.apiHandle = 0;
  5651. }
  5652. for ( int i=0; i<2; i++ ) {
  5653. if ( stream_.userBuffer[i] ) {
  5654. free( stream_.userBuffer[i] );
  5655. stream_.userBuffer[i] = 0;
  5656. }
  5657. }
  5658. if ( stream_.deviceBuffer ) {
  5659. free( stream_.deviceBuffer );
  5660. stream_.deviceBuffer = 0;
  5661. }
  5662. stream_.mode = UNINITIALIZED;
  5663. stream_.state = STREAM_CLOSED;
  5664. }
  5665. void RtApiOss :: startStream()
  5666. {
  5667. verifyStream();
  5668. if ( stream_.state == STREAM_RUNNING ) {
  5669. errorText_ = "RtApiOss::startStream(): the stream is already running!";
  5670. error( RtError::WARNING );
  5671. return;
  5672. }
  5673. MUTEX_LOCK( &stream_.mutex );
  5674. stream_.state = STREAM_RUNNING;
  5675. // No need to do anything else here ... OSS automatically starts
  5676. // when fed samples.
  5677. MUTEX_UNLOCK( &stream_.mutex );
  5678. }
  5679. void RtApiOss :: stopStream()
  5680. {
  5681. verifyStream();
  5682. if ( stream_.state == STREAM_STOPPED ) {
  5683. errorText_ = "RtApiOss::stopStream(): the stream is already stopped!";
  5684. error( RtError::WARNING );
  5685. return;
  5686. }
  5687. // Change the state before the lock to improve shutdown response
  5688. // when using a callback.
  5689. stream_.state = STREAM_STOPPED;
  5690. MUTEX_LOCK( &stream_.mutex );
  5691. int result = 0;
  5692. OssHandle *handle = (OssHandle *) stream_.apiHandle;
  5693. if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
  5694. // Flush the output with zeros a few times.
  5695. char *buffer;
  5696. int samples;
  5697. RtAudioFormat format;
  5698. if ( stream_.doConvertBuffer[0] ) {
  5699. buffer = stream_.deviceBuffer;
  5700. samples = stream_.bufferSize * stream_.nDeviceChannels[0];
  5701. format = stream_.deviceFormat[0];
  5702. }
  5703. else {
  5704. buffer = stream_.userBuffer[0];
  5705. samples = stream_.bufferSize * stream_.nUserChannels[0];
  5706. format = stream_.userFormat;
  5707. }
  5708. memset( buffer, 0, samples * formatBytes(format) );
  5709. for ( unsigned int i=0; i<stream_.nBuffers+1; i++ ) {
  5710. result = write( handle->id[0], buffer, samples * formatBytes(format) );
  5711. if ( result == -1 ) {
  5712. errorText_ = "RtApiOss::stopStream: audio write error.";
  5713. error( RtError::WARNING );
  5714. }
  5715. }
  5716. result = ioctl( handle->id[0], SNDCTL_DSP_HALT, 0 );
  5717. if ( result == -1 ) {
  5718. errorStream_ << "RtApiOss::stopStream: system error stopping callback procedure on device (" << stream_.device[0] << ").";
  5719. errorText_ = errorStream_.str();
  5720. goto unlock;
  5721. }
  5722. handle->triggered = false;
  5723. }
  5724. if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && handle->id[0] != handle->id[1] ) ) {
  5725. result = ioctl( handle->id[1], SNDCTL_DSP_HALT, 0 );
  5726. if ( result == -1 ) {
  5727. errorStream_ << "RtApiOss::stopStream: system error stopping input callback procedure on device (" << stream_.device[0] << ").";
  5728. errorText_ = errorStream_.str();
  5729. goto unlock;
  5730. }
  5731. }
  5732. unlock:
  5733. MUTEX_UNLOCK( &stream_.mutex );
  5734. stream_.state = STREAM_STOPPED;
  5735. if ( result != -1 ) return;
  5736. error( RtError::SYSTEM_ERROR );
  5737. }
  5738. void RtApiOss :: abortStream()
  5739. {
  5740. verifyStream();
  5741. if ( stream_.state == STREAM_STOPPED ) {
  5742. errorText_ = "RtApiOss::abortStream(): the stream is already stopped!";
  5743. error( RtError::WARNING );
  5744. return;
  5745. }
  5746. // Change the state before the lock to improve shutdown response
  5747. // when using a callback.
  5748. stream_.state = STREAM_STOPPED;
  5749. MUTEX_LOCK( &stream_.mutex );
  5750. int result = 0;
  5751. OssHandle *handle = (OssHandle *) stream_.apiHandle;
  5752. if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
  5753. result = ioctl( handle->id[0], SNDCTL_DSP_HALT, 0 );
  5754. if ( result == -1 ) {
  5755. errorStream_ << "RtApiOss::abortStream: system error stopping callback procedure on device (" << stream_.device[0] << ").";
  5756. errorText_ = errorStream_.str();
  5757. goto unlock;
  5758. }
  5759. handle->triggered = false;
  5760. }
  5761. if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && handle->id[0] != handle->id[1] ) ) {
  5762. result = ioctl( handle->id[1], SNDCTL_DSP_HALT, 0 );
  5763. if ( result == -1 ) {
  5764. errorStream_ << "RtApiOss::abortStream: system error stopping input callback procedure on device (" << stream_.device[0] << ").";
  5765. errorText_ = errorStream_.str();
  5766. goto unlock;
  5767. }
  5768. }
  5769. unlock:
  5770. MUTEX_UNLOCK( &stream_.mutex );
  5771. stream_.state = STREAM_STOPPED;
  5772. if ( result != -1 ) return;
  5773. error( RtError::SYSTEM_ERROR );
  5774. }
  5775. void RtApiOss :: callbackEvent()
  5776. {
  5777. if ( stream_.state == STREAM_STOPPED ) {
  5778. if ( stream_.callbackInfo.isRunning ) usleep( 50000 ); // sleep 50 milliseconds
  5779. return;
  5780. }
  5781. if ( stream_.state == STREAM_CLOSED ) {
  5782. errorText_ = "RtApiOss::callbackEvent(): the stream is closed ... this shouldn't happen!";
  5783. error( RtError::WARNING );
  5784. return;
  5785. }
  5786. // Invoke user callback to get fresh output data.
  5787. int doStopStream = 0;
  5788. RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;
  5789. double streamTime = getStreamTime();
  5790. RtAudioStreamStatus status = 0;
  5791. OssHandle *handle = (OssHandle *) stream_.apiHandle;
  5792. if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
  5793. status |= RTAUDIO_OUTPUT_UNDERFLOW;
  5794. handle->xrun[0] = false;
  5795. }
  5796. if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
  5797. status |= RTAUDIO_INPUT_OVERFLOW;
  5798. handle->xrun[1] = false;
  5799. }
  5800. doStopStream = callback( stream_.userBuffer[0], stream_.userBuffer[1],
  5801. stream_.bufferSize, streamTime, status, stream_.callbackInfo.userData );
  5802. MUTEX_LOCK( &stream_.mutex );
  5803. // The state might change while waiting on a mutex.
  5804. if ( stream_.state == STREAM_STOPPED ) goto unlock;
  5805. int result;
  5806. char *buffer;
  5807. int samples;
  5808. RtAudioFormat format;
  5809. if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
  5810. // Setup parameters and do buffer conversion if necessary.
  5811. if ( stream_.doConvertBuffer[0] ) {
  5812. buffer = stream_.deviceBuffer;
  5813. convertBuffer( buffer, stream_.userBuffer[0], stream_.convertInfo[0] );
  5814. samples = stream_.bufferSize * stream_.nDeviceChannels[0];
  5815. format = stream_.deviceFormat[0];
  5816. }
  5817. else {
  5818. buffer = stream_.userBuffer[0];
  5819. samples = stream_.bufferSize * stream_.nUserChannels[0];
  5820. format = stream_.userFormat;
  5821. }
  5822. // Do byte swapping if necessary.
  5823. if ( stream_.doByteSwap[0] )
  5824. byteSwapBuffer( buffer, samples, format );
  5825. if ( stream_.mode == DUPLEX && handle->triggered == false ) {
  5826. int trig = 0;
  5827. ioctl( handle->id[0], SNDCTL_DSP_SETTRIGGER, &trig );
  5828. result = write( handle->id[0], buffer, samples * formatBytes(format) );
  5829. trig = PCM_ENABLE_INPUT|PCM_ENABLE_OUTPUT;
  5830. ioctl( handle->id[0], SNDCTL_DSP_SETTRIGGER, &trig );
  5831. handle->triggered = true;
  5832. }
  5833. else
  5834. // Write samples to device.
  5835. result = write( handle->id[0], buffer, samples * formatBytes(format) );
  5836. if ( result == -1 ) {
  5837. // We'll assume this is an underrun, though there isn't a
  5838. // specific means for determining that.
  5839. handle->xrun[0] = true;
  5840. errorText_ = "RtApiOss::callbackEvent: audio write error.";
  5841. error( RtError::WARNING );
  5842. goto unlock;
  5843. }
  5844. }
  5845. if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
  5846. // Setup parameters.
  5847. if ( stream_.doConvertBuffer[1] ) {
  5848. buffer = stream_.deviceBuffer;
  5849. samples = stream_.bufferSize * stream_.nDeviceChannels[1];
  5850. format = stream_.deviceFormat[1];
  5851. }
  5852. else {
  5853. buffer = stream_.userBuffer[1];
  5854. samples = stream_.bufferSize * stream_.nUserChannels[1];
  5855. format = stream_.userFormat;
  5856. }
  5857. // Read samples from device.
  5858. result = read( handle->id[1], buffer, samples * formatBytes(format) );
  5859. if ( result == -1 ) {
  5860. // We'll assume this is an overrun, though there isn't a
  5861. // specific means for determining that.
  5862. handle->xrun[1] = true;
  5863. errorText_ = "RtApiOss::callbackEvent: audio read error.";
  5864. error( RtError::WARNING );
  5865. goto unlock;
  5866. }
  5867. // Do byte swapping if necessary.
  5868. if ( stream_.doByteSwap[1] )
  5869. byteSwapBuffer( buffer, samples, format );
  5870. // Do buffer conversion if necessary.
  5871. if ( stream_.doConvertBuffer[1] )
  5872. convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
  5873. }
  5874. unlock:
  5875. MUTEX_UNLOCK( &stream_.mutex );
  5876. RtApi::tickStreamTime();
  5877. if ( doStopStream == 1 ) this->stopStream();
  5878. else if ( doStopStream == 2 ) this->abortStream();
  5879. }
  5880. extern "C" void *ossCallbackHandler( void *ptr )
  5881. {
  5882. CallbackInfo *info = (CallbackInfo *) ptr;
  5883. RtApiOss *object = (RtApiOss *) info->object;
  5884. bool *isRunning = &info->isRunning;
  5885. #ifdef SCHED_RR
  5886. // Set a higher scheduler priority (P.J. Leonard)
  5887. struct sched_param param;
  5888. param.sched_priority = 39; // Is this the best number?
  5889. sched_setscheduler( 0, SCHED_RR, &param );
  5890. #endif
  5891. while ( *isRunning == true ) {
  5892. pthread_testcancel();
  5893. object->callbackEvent();
  5894. }
  5895. pthread_exit( NULL );
  5896. }
  5897. //******************** End of __LINUX_OSS__ *********************//
  5898. #endif
  5899. // *************************************************** //
  5900. //
  5901. // Protected common (OS-independent) RtAudio methods.
  5902. //
  5903. // *************************************************** //
  5904. // This method can be modified to control the behavior of error
  5905. // message printing.
  5906. void RtApi :: error( RtError::Type type )
  5907. {
  5908. errorStream_.str(""); // clear the ostringstream
  5909. if ( type == RtError::WARNING && showWarnings_ == true )
  5910. std::cerr << '\n' << errorText_ << "\n\n";
  5911. else
  5912. throw( RtError( errorText_, type ) );
  5913. }
  5914. void RtApi :: verifyStream()
  5915. {
  5916. if ( stream_.state == STREAM_CLOSED ) {
  5917. errorText_ = "RtApi:: a stream is not open!";
  5918. error( RtError::INVALID_USE );
  5919. }
  5920. }
  5921. void RtApi :: clearStreamInfo()
  5922. {
  5923. stream_.mode = UNINITIALIZED;
  5924. stream_.state = STREAM_CLOSED;
  5925. stream_.sampleRate = 0;
  5926. stream_.bufferSize = 0;
  5927. stream_.nBuffers = 0;
  5928. stream_.userFormat = 0;
  5929. stream_.userInterleaved = true;
  5930. stream_.streamTime = 0.0;
  5931. stream_.apiHandle = 0;
  5932. stream_.deviceBuffer = 0;
  5933. stream_.callbackInfo.callback = 0;
  5934. stream_.callbackInfo.userData = 0;
  5935. stream_.callbackInfo.isRunning = false;
  5936. for ( int i=0; i<2; i++ ) {
  5937. stream_.device[i] = 0;
  5938. stream_.doConvertBuffer[i] = false;
  5939. stream_.deviceInterleaved[i] = true;
  5940. stream_.doByteSwap[i] = false;
  5941. stream_.nUserChannels[i] = 0;
  5942. stream_.nDeviceChannels[i] = 0;
  5943. stream_.channelOffset[i] = 0;
  5944. stream_.deviceFormat[i] = 0;
  5945. stream_.latency[i] = 0;
  5946. stream_.userBuffer[i] = 0;
  5947. stream_.convertInfo[i].channels = 0;
  5948. stream_.convertInfo[i].inJump = 0;
  5949. stream_.convertInfo[i].outJump = 0;
  5950. stream_.convertInfo[i].inFormat = 0;
  5951. stream_.convertInfo[i].outFormat = 0;
  5952. stream_.convertInfo[i].inOffset.clear();
  5953. stream_.convertInfo[i].outOffset.clear();
  5954. }
  5955. }
  5956. unsigned int RtApi :: formatBytes( RtAudioFormat format )
  5957. {
  5958. if ( format == RTAUDIO_SINT16 )
  5959. return 2;
  5960. else if ( format == RTAUDIO_SINT24 || format == RTAUDIO_SINT32 ||
  5961. format == RTAUDIO_FLOAT32 )
  5962. return 4;
  5963. else if ( format == RTAUDIO_FLOAT64 )
  5964. return 8;
  5965. else if ( format == RTAUDIO_SINT8 )
  5966. return 1;
  5967. errorText_ = "RtApi::formatBytes: undefined format.";
  5968. error( RtError::WARNING );
  5969. return 0;
  5970. }
  5971. void RtApi :: setConvertInfo( StreamMode mode, unsigned int firstChannel )
  5972. {
  5973. if ( mode == INPUT ) { // convert device to user buffer
  5974. stream_.convertInfo[mode].inJump = stream_.nDeviceChannels[1];
  5975. stream_.convertInfo[mode].outJump = stream_.nUserChannels[1];
  5976. stream_.convertInfo[mode].inFormat = stream_.deviceFormat[1];
  5977. stream_.convertInfo[mode].outFormat = stream_.userFormat;
  5978. }
  5979. else { // convert user to device buffer
  5980. stream_.convertInfo[mode].inJump = stream_.nUserChannels[0];
  5981. stream_.convertInfo[mode].outJump = stream_.nDeviceChannels[0];
  5982. stream_.convertInfo[mode].inFormat = stream_.userFormat;
  5983. stream_.convertInfo[mode].outFormat = stream_.deviceFormat[0];
  5984. }
  5985. if ( stream_.convertInfo[mode].inJump < stream_.convertInfo[mode].outJump )
  5986. stream_.convertInfo[mode].channels = stream_.convertInfo[mode].inJump;
  5987. else
  5988. stream_.convertInfo[mode].channels = stream_.convertInfo[mode].outJump;
  5989. // Set up the interleave/deinterleave offsets.
  5990. if ( stream_.deviceInterleaved[mode] != stream_.userInterleaved ) {
  5991. if ( ( mode == OUTPUT && stream_.deviceInterleaved[mode] ) ||
  5992. ( mode == INPUT && stream_.userInterleaved ) ) {
  5993. for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
  5994. stream_.convertInfo[mode].inOffset.push_back( k * stream_.bufferSize );
  5995. stream_.convertInfo[mode].outOffset.push_back( k );
  5996. stream_.convertInfo[mode].inJump = 1;
  5997. }
  5998. }
  5999. else {
  6000. for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
  6001. stream_.convertInfo[mode].inOffset.push_back( k );
  6002. stream_.convertInfo[mode].outOffset.push_back( k * stream_.bufferSize );
  6003. stream_.convertInfo[mode].outJump = 1;
  6004. }
  6005. }
  6006. }
  6007. else { // no (de)interleaving
  6008. if ( stream_.userInterleaved ) {
  6009. for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
  6010. stream_.convertInfo[mode].inOffset.push_back( k );
  6011. stream_.convertInfo[mode].outOffset.push_back( k );
  6012. }
  6013. }
  6014. else {
  6015. for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
  6016. stream_.convertInfo[mode].inOffset.push_back( k * stream_.bufferSize );
  6017. stream_.convertInfo[mode].outOffset.push_back( k * stream_.bufferSize );
  6018. stream_.convertInfo[mode].inJump = 1;
  6019. stream_.convertInfo[mode].outJump = 1;
  6020. }
  6021. }
  6022. }
  6023. // Add channel offset.
  6024. if ( firstChannel > 0 ) {
  6025. if ( stream_.deviceInterleaved[mode] ) {
  6026. if ( mode == OUTPUT ) {
  6027. for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
  6028. stream_.convertInfo[mode].outOffset[k] += firstChannel;
  6029. }
  6030. else {
  6031. for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
  6032. stream_.convertInfo[mode].inOffset[k] += firstChannel;
  6033. }
  6034. }
  6035. else {
  6036. if ( mode == OUTPUT ) {
  6037. for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
  6038. stream_.convertInfo[mode].outOffset[k] += ( firstChannel * stream_.bufferSize );
  6039. }
  6040. else {
  6041. for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
  6042. stream_.convertInfo[mode].inOffset[k] += ( firstChannel * stream_.bufferSize );
  6043. }
  6044. }
  6045. }
  6046. }
  6047. void RtApi :: convertBuffer( char *outBuffer, char *inBuffer, ConvertInfo &info )
  6048. {
  6049. // This function does format conversion, input/output channel compensation, and
  6050. // data interleaving/deinterleaving. 24-bit integers are assumed to occupy
  6051. // the upper three bytes of a 32-bit integer.
  6052. // Clear our device buffer when in/out duplex device channels are different
  6053. if ( outBuffer == stream_.deviceBuffer && stream_.mode == DUPLEX &&
  6054. ( stream_.nDeviceChannels[0] < stream_.nDeviceChannels[1] ) )
  6055. memset( outBuffer, 0, stream_.bufferSize * info.outJump * formatBytes( info.outFormat ) );
  6056. int j;
  6057. if (info.outFormat == RTAUDIO_FLOAT64) {
  6058. Float64 scale;
  6059. Float64 *out = (Float64 *)outBuffer;
  6060. if (info.inFormat == RTAUDIO_SINT8) {
  6061. signed char *in = (signed char *)inBuffer;
  6062. scale = 1.0 / 128.0;
  6063. for (unsigned int i=0; i<stream_.bufferSize; i++) {
  6064. for (j=0; j<info.channels; j++) {
  6065. out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
  6066. out[info.outOffset[j]] *= scale;
  6067. }
  6068. in += info.inJump;
  6069. out += info.outJump;
  6070. }
  6071. }
  6072. else if (info.inFormat == RTAUDIO_SINT16) {
  6073. Int16 *in = (Int16 *)inBuffer;
  6074. scale = 1.0 / 32768.0;
  6075. for (unsigned int i=0; i<stream_.bufferSize; i++) {
  6076. for (j=0; j<info.channels; j++) {
  6077. out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
  6078. out[info.outOffset[j]] *= scale;
  6079. }
  6080. in += info.inJump;
  6081. out += info.outJump;
  6082. }
  6083. }
  6084. else if (info.inFormat == RTAUDIO_SINT24) {
  6085. Int32 *in = (Int32 *)inBuffer;
  6086. scale = 1.0 / 8388608.0;
  6087. for (unsigned int i=0; i<stream_.bufferSize; i++) {
  6088. for (j=0; j<info.channels; j++) {
  6089. out[info.outOffset[j]] = (Float64) (in[info.inOffset[j]] & 0x00ffffff);
  6090. out[info.outOffset[j]] *= scale;
  6091. }
  6092. in += info.inJump;
  6093. out += info.outJump;
  6094. }
  6095. }
  6096. else if (info.inFormat == RTAUDIO_SINT32) {
  6097. Int32 *in = (Int32 *)inBuffer;
  6098. scale = 1.0 / 2147483648.0;
  6099. for (unsigned int i=0; i<stream_.bufferSize; i++) {
  6100. for (j=0; j<info.channels; j++) {
  6101. out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
  6102. out[info.outOffset[j]] *= scale;
  6103. }
  6104. in += info.inJump;
  6105. out += info.outJump;
  6106. }
  6107. }
  6108. else if (info.inFormat == RTAUDIO_FLOAT32) {
  6109. Float32 *in = (Float32 *)inBuffer;
  6110. for (unsigned int i=0; i<stream_.bufferSize; i++) {
  6111. for (j=0; j<info.channels; j++) {
  6112. out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
  6113. }
  6114. in += info.inJump;
  6115. out += info.outJump;
  6116. }
  6117. }
  6118. else if (info.inFormat == RTAUDIO_FLOAT64) {
  6119. // Channel compensation and/or (de)interleaving only.
  6120. Float64 *in = (Float64 *)inBuffer;
  6121. for (unsigned int i=0; i<stream_.bufferSize; i++) {
  6122. for (j=0; j<info.channels; j++) {
  6123. out[info.outOffset[j]] = in[info.inOffset[j]];
  6124. }
  6125. in += info.inJump;
  6126. out += info.outJump;
  6127. }
  6128. }
  6129. }
  6130. else if (info.outFormat == RTAUDIO_FLOAT32) {
  6131. Float32 scale;
  6132. Float32 *out = (Float32 *)outBuffer;
  6133. if (info.inFormat == RTAUDIO_SINT8) {
  6134. signed char *in = (signed char *)inBuffer;
  6135. scale = 1.0 / 128.0;
  6136. for (unsigned int i=0; i<stream_.bufferSize; i++) {
  6137. for (j=0; j<info.channels; j++) {
  6138. out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
  6139. out[info.outOffset[j]] *= scale;
  6140. }
  6141. in += info.inJump;
  6142. out += info.outJump;
  6143. }
  6144. }
  6145. else if (info.inFormat == RTAUDIO_SINT16) {
  6146. Int16 *in = (Int16 *)inBuffer;
  6147. scale = 1.0 / 32768.0;
  6148. for (unsigned int i=0; i<stream_.bufferSize; i++) {
  6149. for (j=0; j<info.channels; j++) {
  6150. out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
  6151. out[info.outOffset[j]] *= scale;
  6152. }
  6153. in += info.inJump;
  6154. out += info.outJump;
  6155. }
  6156. }
  6157. else if (info.inFormat == RTAUDIO_SINT24) {
  6158. Int32 *in = (Int32 *)inBuffer;
  6159. scale = 1.0 / 8388608.0;
  6160. for (unsigned int i=0; i<stream_.bufferSize; i++) {
  6161. for (j=0; j<info.channels; j++) {
  6162. out[info.outOffset[j]] = (Float32) (in[info.inOffset[j]] & 0x00ffffff);
  6163. out[info.outOffset[j]] *= scale;
  6164. }
  6165. in += info.inJump;
  6166. out += info.outJump;
  6167. }
  6168. }
  6169. else if (info.inFormat == RTAUDIO_SINT32) {
  6170. Int32 *in = (Int32 *)inBuffer;
  6171. scale = 1.0 / 2147483648.0;
  6172. for (unsigned int i=0; i<stream_.bufferSize; i++) {
  6173. for (j=0; j<info.channels; j++) {
  6174. out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
  6175. out[info.outOffset[j]] *= scale;
  6176. }
  6177. in += info.inJump;
  6178. out += info.outJump;
  6179. }
  6180. }
  6181. else if (info.inFormat == RTAUDIO_FLOAT32) {
  6182. // Channel compensation and/or (de)interleaving only.
  6183. Float32 *in = (Float32 *)inBuffer;
  6184. for (unsigned int i=0; i<stream_.bufferSize; i++) {
  6185. for (j=0; j<info.channels; j++) {
  6186. out[info.outOffset[j]] = in[info.inOffset[j]];
  6187. }
  6188. in += info.inJump;
  6189. out += info.outJump;
  6190. }
  6191. }
  6192. else if (info.inFormat == RTAUDIO_FLOAT64) {
  6193. Float64 *in = (Float64 *)inBuffer;
  6194. for (unsigned int i=0; i<stream_.bufferSize; i++) {
  6195. for (j=0; j<info.channels; j++) {
  6196. out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
  6197. }
  6198. in += info.inJump;
  6199. out += info.outJump;
  6200. }
  6201. }
  6202. }
  6203. else if (info.outFormat == RTAUDIO_SINT32) {
  6204. Int32 *out = (Int32 *)outBuffer;
  6205. if (info.inFormat == RTAUDIO_SINT8) {
  6206. signed char *in = (signed char *)inBuffer;
  6207. for (unsigned int i=0; i<stream_.bufferSize; i++) {
  6208. for (j=0; j<info.channels; j++) {
  6209. out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
  6210. out[info.outOffset[j]] <<= 24;
  6211. }
  6212. in += info.inJump;
  6213. out += info.outJump;
  6214. }
  6215. }
  6216. else if (info.inFormat == RTAUDIO_SINT16) {
  6217. Int16 *in = (Int16 *)inBuffer;
  6218. for (unsigned int i=0; i<stream_.bufferSize; i++) {
  6219. for (j=0; j<info.channels; j++) {
  6220. out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
  6221. out[info.outOffset[j]] <<= 16;
  6222. }
  6223. in += info.inJump;
  6224. out += info.outJump;
  6225. }
  6226. }
  6227. else if (info.inFormat == RTAUDIO_SINT24) {
  6228. Int32 *in = (Int32 *)inBuffer;
  6229. for (unsigned int i=0; i<stream_.bufferSize; i++) {
  6230. for (j=0; j<info.channels; j++) {
  6231. out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
  6232. out[info.outOffset[j]] <<= 8;
  6233. }
  6234. in += info.inJump;
  6235. out += info.outJump;
  6236. }
  6237. }
  6238. else if (info.inFormat == RTAUDIO_SINT32) {
  6239. // Channel compensation and/or (de)interleaving only.
  6240. Int32 *in = (Int32 *)inBuffer;
  6241. for (unsigned int i=0; i<stream_.bufferSize; i++) {
  6242. for (j=0; j<info.channels; j++) {
  6243. out[info.outOffset[j]] = in[info.inOffset[j]];
  6244. }
  6245. in += info.inJump;
  6246. out += info.outJump;
  6247. }
  6248. }
  6249. else if (info.inFormat == RTAUDIO_FLOAT32) {
  6250. Float32 *in = (Float32 *)inBuffer;
  6251. for (unsigned int i=0; i<stream_.bufferSize; i++) {
  6252. for (j=0; j<info.channels; j++) {
  6253. out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 2147483647.0);
  6254. }
  6255. in += info.inJump;
  6256. out += info.outJump;
  6257. }
  6258. }
  6259. else if (info.inFormat == RTAUDIO_FLOAT64) {
  6260. Float64 *in = (Float64 *)inBuffer;
  6261. for (unsigned int i=0; i<stream_.bufferSize; i++) {
  6262. for (j=0; j<info.channels; j++) {
  6263. out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 2147483647.0);
  6264. }
  6265. in += info.inJump;
  6266. out += info.outJump;
  6267. }
  6268. }
  6269. }
  6270. else if (info.outFormat == RTAUDIO_SINT24) {
  6271. Int32 *out = (Int32 *)outBuffer;
  6272. if (info.inFormat == RTAUDIO_SINT8) {
  6273. signed char *in = (signed char *)inBuffer;
  6274. for (unsigned int i=0; i<stream_.bufferSize; i++) {
  6275. for (j=0; j<info.channels; j++) {
  6276. out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
  6277. out[info.outOffset[j]] <<= 16;
  6278. }
  6279. in += info.inJump;
  6280. out += info.outJump;
  6281. }
  6282. }
  6283. else if (info.inFormat == RTAUDIO_SINT16) {
  6284. Int16 *in = (Int16 *)inBuffer;
  6285. for (unsigned int i=0; i<stream_.bufferSize; i++) {
  6286. for (j=0; j<info.channels; j++) {
  6287. out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
  6288. out[info.outOffset[j]] <<= 8;
  6289. }
  6290. in += info.inJump;
  6291. out += info.outJump;
  6292. }
  6293. }
  6294. else if (info.inFormat == RTAUDIO_SINT24) {
  6295. // Channel compensation and/or (de)interleaving only.
  6296. Int32 *in = (Int32 *)inBuffer;
  6297. for (unsigned int i=0; i<stream_.bufferSize; i++) {
  6298. for (j=0; j<info.channels; j++) {
  6299. out[info.outOffset[j]] = in[info.inOffset[j]];
  6300. }
  6301. in += info.inJump;
  6302. out += info.outJump;
  6303. }
  6304. }
  6305. else if (info.inFormat == RTAUDIO_SINT32) {
  6306. Int32 *in = (Int32 *)inBuffer;
  6307. for (unsigned int i=0; i<stream_.bufferSize; i++) {
  6308. for (j=0; j<info.channels; j++) {
  6309. out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
  6310. out[info.outOffset[j]] >>= 8;
  6311. }
  6312. in += info.inJump;
  6313. out += info.outJump;
  6314. }
  6315. }
  6316. else if (info.inFormat == RTAUDIO_FLOAT32) {
  6317. Float32 *in = (Float32 *)inBuffer;
  6318. for (unsigned int i=0; i<stream_.bufferSize; i++) {
  6319. for (j=0; j<info.channels; j++) {
  6320. out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 8388608.0);
  6321. }
  6322. in += info.inJump;
  6323. out += info.outJump;
  6324. }
  6325. }
  6326. else if (info.inFormat == RTAUDIO_FLOAT64) {
  6327. Float64 *in = (Float64 *)inBuffer;
  6328. for (unsigned int i=0; i<stream_.bufferSize; i++) {
  6329. for (j=0; j<info.channels; j++) {
  6330. out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 2147483647.0);
  6331. }
  6332. in += info.inJump;
  6333. out += info.outJump;
  6334. }
  6335. }
  6336. }
  6337. else if (info.outFormat == RTAUDIO_SINT16) {
  6338. Int16 *out = (Int16 *)outBuffer;
  6339. if (info.inFormat == RTAUDIO_SINT8) {
  6340. signed char *in = (signed char *)inBuffer;
  6341. for (unsigned int i=0; i<stream_.bufferSize; i++) {
  6342. for (j=0; j<info.channels; j++) {
  6343. out[info.outOffset[j]] = (Int16) in[info.inOffset[j]];
  6344. out[info.outOffset[j]] <<= 8;
  6345. }
  6346. in += info.inJump;
  6347. out += info.outJump;
  6348. }
  6349. }
  6350. else if (info.inFormat == RTAUDIO_SINT16) {
  6351. // Channel compensation and/or (de)interleaving only.
  6352. Int16 *in = (Int16 *)inBuffer;
  6353. for (unsigned int i=0; i<stream_.bufferSize; i++) {
  6354. for (j=0; j<info.channels; j++) {
  6355. out[info.outOffset[j]] = in[info.inOffset[j]];
  6356. }
  6357. in += info.inJump;
  6358. out += info.outJump;
  6359. }
  6360. }
  6361. else if (info.inFormat == RTAUDIO_SINT24) {
  6362. Int32 *in = (Int32 *)inBuffer;
  6363. for (unsigned int i=0; i<stream_.bufferSize; i++) {
  6364. for (j=0; j<info.channels; j++) {
  6365. out[info.outOffset[j]] = (Int16) ((in[info.inOffset[j]] >> 8) & 0x0000ffff);
  6366. }
  6367. in += info.inJump;
  6368. out += info.outJump;
  6369. }
  6370. }
  6371. else if (info.inFormat == RTAUDIO_SINT32) {
  6372. Int32 *in = (Int32 *)inBuffer;
  6373. for (unsigned int i=0; i<stream_.bufferSize; i++) {
  6374. for (j=0; j<info.channels; j++) {
  6375. out[info.outOffset[j]] = (Int16) ((in[info.inOffset[j]] >> 16) & 0x0000ffff);
  6376. }
  6377. in += info.inJump;
  6378. out += info.outJump;
  6379. }
  6380. }
  6381. else if (info.inFormat == RTAUDIO_FLOAT32) {
  6382. Float32 *in = (Float32 *)inBuffer;
  6383. for (unsigned int i=0; i<stream_.bufferSize; i++) {
  6384. for (j=0; j<info.channels; j++) {
  6385. out[info.outOffset[j]] = (Int16) (in[info.inOffset[j]] * 32767.0);
  6386. }
  6387. in += info.inJump;
  6388. out += info.outJump;
  6389. }
  6390. }
  6391. else if (info.inFormat == RTAUDIO_FLOAT64) {
  6392. Float64 *in = (Float64 *)inBuffer;
  6393. for (unsigned int i=0; i<stream_.bufferSize; i++) {
  6394. for (j=0; j<info.channels; j++) {
  6395. out[info.outOffset[j]] = (Int16) (in[info.inOffset[j]] * 32767.0);
  6396. }
  6397. in += info.inJump;
  6398. out += info.outJump;
  6399. }
  6400. }
  6401. }
  6402. else if (info.outFormat == RTAUDIO_SINT8) {
  6403. signed char *out = (signed char *)outBuffer;
  6404. if (info.inFormat == RTAUDIO_SINT8) {
  6405. // Channel compensation and/or (de)interleaving only.
  6406. signed char *in = (signed char *)inBuffer;
  6407. for (unsigned int i=0; i<stream_.bufferSize; i++) {
  6408. for (j=0; j<info.channels; j++) {
  6409. out[info.outOffset[j]] = in[info.inOffset[j]];
  6410. }
  6411. in += info.inJump;
  6412. out += info.outJump;
  6413. }
  6414. }
  6415. if (info.inFormat == RTAUDIO_SINT16) {
  6416. Int16 *in = (Int16 *)inBuffer;
  6417. for (unsigned int i=0; i<stream_.bufferSize; i++) {
  6418. for (j=0; j<info.channels; j++) {
  6419. out[info.outOffset[j]] = (signed char) ((in[info.inOffset[j]] >> 8) & 0x00ff);
  6420. }
  6421. in += info.inJump;
  6422. out += info.outJump;
  6423. }
  6424. }
  6425. else if (info.inFormat == RTAUDIO_SINT24) {
  6426. Int32 *in = (Int32 *)inBuffer;
  6427. for (unsigned int i=0; i<stream_.bufferSize; i++) {
  6428. for (j=0; j<info.channels; j++) {
  6429. out[info.outOffset[j]] = (signed char) ((in[info.inOffset[j]] >> 16) & 0x000000ff);
  6430. }
  6431. in += info.inJump;
  6432. out += info.outJump;
  6433. }
  6434. }
  6435. else if (info.inFormat == RTAUDIO_SINT32) {
  6436. Int32 *in = (Int32 *)inBuffer;
  6437. for (unsigned int i=0; i<stream_.bufferSize; i++) {
  6438. for (j=0; j<info.channels; j++) {
  6439. out[info.outOffset[j]] = (signed char) ((in[info.inOffset[j]] >> 24) & 0x000000ff);
  6440. }
  6441. in += info.inJump;
  6442. out += info.outJump;
  6443. }
  6444. }
  6445. else if (info.inFormat == RTAUDIO_FLOAT32) {
  6446. Float32 *in = (Float32 *)inBuffer;
  6447. for (unsigned int i=0; i<stream_.bufferSize; i++) {
  6448. for (j=0; j<info.channels; j++) {
  6449. out[info.outOffset[j]] = (signed char) (in[info.inOffset[j]] * 127.0);
  6450. }
  6451. in += info.inJump;
  6452. out += info.outJump;
  6453. }
  6454. }
  6455. else if (info.inFormat == RTAUDIO_FLOAT64) {
  6456. Float64 *in = (Float64 *)inBuffer;
  6457. for (unsigned int i=0; i<stream_.bufferSize; i++) {
  6458. for (j=0; j<info.channels; j++) {
  6459. out[info.outOffset[j]] = (signed char) (in[info.inOffset[j]] * 127.0);
  6460. }
  6461. in += info.inJump;
  6462. out += info.outJump;
  6463. }
  6464. }
  6465. }
  6466. }
  6467. void RtApi :: byteSwapBuffer( char *buffer, unsigned int samples, RtAudioFormat format )
  6468. {
  6469. register char val;
  6470. register char *ptr;
  6471. ptr = buffer;
  6472. if ( format == RTAUDIO_SINT16 ) {
  6473. for ( unsigned int i=0; i<samples; i++ ) {
  6474. // Swap 1st and 2nd bytes.
  6475. val = *(ptr);
  6476. *(ptr) = *(ptr+1);
  6477. *(ptr+1) = val;
  6478. // Increment 2 bytes.
  6479. ptr += 2;
  6480. }
  6481. }
  6482. else if ( format == RTAUDIO_SINT24 ||
  6483. format == RTAUDIO_SINT32 ||
  6484. format == RTAUDIO_FLOAT32 ) {
  6485. for ( unsigned int i=0; i<samples; i++ ) {
  6486. // Swap 1st and 4th bytes.
  6487. val = *(ptr);
  6488. *(ptr) = *(ptr+3);
  6489. *(ptr+3) = val;
  6490. // Swap 2nd and 3rd bytes.
  6491. ptr += 1;
  6492. val = *(ptr);
  6493. *(ptr) = *(ptr+1);
  6494. *(ptr+1) = val;
  6495. // Increment 4 bytes.
  6496. ptr += 4;
  6497. }
  6498. }
  6499. else if ( format == RTAUDIO_FLOAT64 ) {
  6500. for ( unsigned int i=0; i<samples; i++ ) {
  6501. // Swap 1st and 8th bytes
  6502. val = *(ptr);
  6503. *(ptr) = *(ptr+7);
  6504. *(ptr+7) = val;
  6505. // Swap 2nd and 7th bytes
  6506. ptr += 1;
  6507. val = *(ptr);
  6508. *(ptr) = *(ptr+5);
  6509. *(ptr+5) = val;
  6510. // Swap 3rd and 6th bytes
  6511. ptr += 1;
  6512. val = *(ptr);
  6513. *(ptr) = *(ptr+3);
  6514. *(ptr+3) = val;
  6515. // Swap 4th and 5th bytes
  6516. ptr += 1;
  6517. val = *(ptr);
  6518. *(ptr) = *(ptr+1);
  6519. *(ptr+1) = val;
  6520. // Increment 8 bytes.
  6521. ptr += 8;
  6522. }
  6523. }
  6524. }
  6525. // Indentation settings for Vim and Emacs
  6526. //
  6527. // Local Variables:
  6528. // c-basic-offset: 2
  6529. // indent-tabs-mode: nil
  6530. // End:
  6531. //
  6532. // vim: et sts=2 sw=2