The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1380 lines
55KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2017 - ROLI Ltd.
  5. JUCE is an open source library subject to commercial or open-source
  6. licensing.
  7. The code included in this file is provided under the terms of the ISC license
  8. http://www.isc.org/downloads/software-support-policy/isc-license. Permission
  9. To use, copy, modify, and/or distribute this software for any purpose with or
  10. without fee is hereby granted provided that the above copyright notice and
  11. this permission notice appear in all copies.
  12. JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
  13. EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
  14. DISCLAIMED.
  15. ==============================================================================
  16. */
  17. namespace juce
  18. {
  19. //==============================================================================
  20. #ifndef SL_ANDROID_DATAFORMAT_PCM_EX
  21. #define SL_ANDROID_DATAFORMAT_PCM_EX ((SLuint32) 0x00000004)
  22. #endif
  23. #ifndef SL_ANDROID_PCM_REPRESENTATION_FLOAT
  24. #define SL_ANDROID_PCM_REPRESENTATION_FLOAT ((SLuint32) 0x00000003)
  25. #endif
  26. #ifndef SL_ANDROID_RECORDING_PRESET_UNPROCESSED
  27. #define SL_ANDROID_RECORDING_PRESET_UNPROCESSED ((SLuint32) 0x00000005)
  28. #endif
  29. //==============================================================================
  30. struct PCMDataFormatEx : SLDataFormat_PCM
  31. {
  32. SLuint32 representation;
  33. };
  34. //==============================================================================
  35. template <typename T> struct IntfIID;
  36. template <> struct IntfIID<SLObjectItf_> { static SLInterfaceID_ iid; };
  37. template <> struct IntfIID<SLEngineItf_> { static SLInterfaceID_ iid; };
  38. template <> struct IntfIID<SLOutputMixItf_> { static SLInterfaceID_ iid; };
  39. template <> struct IntfIID<SLPlayItf_> { static SLInterfaceID_ iid; };
  40. template <> struct IntfIID<SLRecordItf_> { static SLInterfaceID_ iid; };
  41. template <> struct IntfIID<SLAndroidSimpleBufferQueueItf_> { static SLInterfaceID_ iid; };
  42. template <> struct IntfIID<SLAndroidConfigurationItf_> { static SLInterfaceID_ iid; };
  43. SLInterfaceID_ IntfIID<SLObjectItf_>::iid = { 0x79216360, 0xddd7, 0x11db, 0xac16, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b} };
  44. SLInterfaceID_ IntfIID<SLEngineItf_>::iid = { 0x8d97c260, 0xddd4, 0x11db, 0x958f, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b} };
  45. SLInterfaceID_ IntfIID<SLOutputMixItf_>::iid = { 0x97750f60, 0xddd7, 0x11db, 0x92b1, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b} };
  46. SLInterfaceID_ IntfIID<SLPlayItf_>::iid = { 0xef0bd9c0, 0xddd7, 0x11db, 0xbf49, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b} };
  47. SLInterfaceID_ IntfIID<SLRecordItf_>::iid = { 0xc5657aa0, 0xdddb, 0x11db, 0x82f7, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b} };
  48. SLInterfaceID_ IntfIID<SLAndroidSimpleBufferQueueItf_>::iid = { 0x198e4940, 0xc5d7, 0x11df, 0xa2a6, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b} };
  49. SLInterfaceID_ IntfIID<SLAndroidConfigurationItf_>::iid = { 0x89f6a7e0, 0xbeac, 0x11df, 0x8b5c, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b} };
  50. //==============================================================================
  51. // Some life-time and type management of OpenSL objects
  52. class SlObjectRef
  53. {
  54. public:
  55. //==============================================================================
  56. SlObjectRef() noexcept {}
  57. SlObjectRef (const SlObjectRef& obj) noexcept : cb (obj.cb) {}
  58. SlObjectRef (SlObjectRef&& obj) noexcept : cb (static_cast<ReferenceCountedObjectPtr<ControlBlock>&&> (obj.cb)) { obj.cb = nullptr; }
  59. explicit SlObjectRef (SLObjectItf o) : cb (new ControlBlock (o)) {}
  60. //==============================================================================
  61. SlObjectRef& operator=(const SlObjectRef& r) noexcept { cb = r.cb; return *this; }
  62. SlObjectRef& operator=(SlObjectRef&& r) noexcept { cb = static_cast<ReferenceCountedObjectPtr<ControlBlock>&&> (r.cb); r.cb = nullptr; return *this; }
  63. SlObjectRef& operator=(std::nullptr_t) noexcept { cb = nullptr; return *this; }
  64. //==============================================================================
  65. const SLObjectItf_* const operator*() noexcept { return *cb->ptr.get(); }
  66. SLObjectItf operator->() noexcept { return (cb == nullptr ? nullptr : cb->ptr.get()); }
  67. operator SLObjectItf() noexcept { return (cb == nullptr ? nullptr : cb->ptr.get()); }
  68. //==============================================================================
  69. bool operator== (nullptr_t) const noexcept { return (cb == nullptr || cb->ptr == nullptr); }
  70. bool operator!= (nullptr_t) const noexcept { return (cb != nullptr && cb->ptr != nullptr); }
  71. private:
  72. //==============================================================================
  73. struct ControlBlock : ReferenceCountedObject { std::unique_ptr<const SLObjectItf_* const> ptr; ControlBlock() {} ControlBlock (SLObjectItf o) : ptr (o) {} };
  74. ReferenceCountedObjectPtr<ControlBlock> cb;
  75. };
  76. template <typename T>
  77. class SlRef : public SlObjectRef
  78. {
  79. public:
  80. //==============================================================================
  81. SlRef() noexcept : type (nullptr) {}
  82. SlRef (SlRef& r) noexcept : SlObjectRef (r), type (r.type) {}
  83. SlRef (SlRef&& r) noexcept : SlObjectRef (static_cast<SlRef&&> (r)), type (r.type) { r.type = nullptr; }
  84. //==============================================================================
  85. SlRef& operator= (const SlRef& r) noexcept { SlObjectRef::operator= (r); type = r.type; return *this; }
  86. SlRef& operator= (SlRef&& r) noexcept { SlObjectRef::operator= (static_cast<SlObjectRef&&> (r)); type = r.type; r.type = nullptr; return *this; }
  87. SlRef& operator= (std::nullptr_t) noexcept { SlObjectRef::operator= (nullptr); type = nullptr; return *this; }
  88. //==============================================================================
  89. T* const operator*() noexcept { return *type; }
  90. T* const * operator->() noexcept { return type; }
  91. operator T* const *() noexcept { return type; }
  92. //==============================================================================
  93. static SlRef cast (SlObjectRef& base) { return SlRef (base); }
  94. static SlRef cast (SlObjectRef&& base) { return SlRef (static_cast<SlObjectRef&&> (base)); }
  95. private:
  96. //==============================================================================
  97. SlRef (SlObjectRef& base) : SlObjectRef (base)
  98. {
  99. SLObjectItf obj = SlObjectRef::operator->();
  100. SLresult err = (*obj)->GetInterface (obj, &IntfIID<T>::iid, &type);
  101. if (type == nullptr || err != SL_RESULT_SUCCESS)
  102. *this = nullptr;
  103. }
  104. SlRef (SlObjectRef&& base) : SlObjectRef (static_cast<SlObjectRef&&> (base))
  105. {
  106. SLObjectItf obj = SlObjectRef::operator->();
  107. SLresult err = (*obj)->GetInterface (obj, &IntfIID<T>::iid, &type);
  108. base = nullptr;
  109. if (type == nullptr || err != SL_RESULT_SUCCESS)
  110. *this = nullptr;
  111. }
  112. T* const * type;
  113. };
  114. template <>
  115. struct ContainerDeletePolicy<const SLObjectItf_* const>
  116. {
  117. static void destroy (SLObjectItf object)
  118. {
  119. if (object != nullptr)
  120. (*object)->Destroy (object);
  121. }
  122. };
  123. //==============================================================================
  124. template <typename T> struct BufferHelpers {};
  125. template <>
  126. struct BufferHelpers<int16>
  127. {
  128. enum { isFloatingPoint = 0 };
  129. static void initPCMDataFormat (PCMDataFormatEx& dataFormat, int numChannels, double sampleRate)
  130. {
  131. dataFormat.formatType = SL_DATAFORMAT_PCM;
  132. dataFormat.numChannels = (SLuint32) numChannels;
  133. dataFormat.samplesPerSec = (SLuint32) (sampleRate * 1000);
  134. dataFormat.bitsPerSample = SL_PCMSAMPLEFORMAT_FIXED_16;
  135. dataFormat.containerSize = SL_PCMSAMPLEFORMAT_FIXED_16;
  136. dataFormat.channelMask = (numChannels == 1) ? SL_SPEAKER_FRONT_CENTER :
  137. (SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT);
  138. dataFormat.endianness = SL_BYTEORDER_LITTLEENDIAN;
  139. dataFormat.representation = 0;
  140. }
  141. static void prepareCallbackBuffer (AudioBuffer<float>&, int16*) {}
  142. static void convertFromOpenSL (const int16* srcInterleaved, AudioBuffer<float>& audioBuffer)
  143. {
  144. for (int i = 0; i < audioBuffer.getNumChannels(); ++i)
  145. {
  146. using DstSampleType = AudioData::Pointer<AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::NonConst>;
  147. using SrcSampleType = AudioData::Pointer<AudioData::Int16, AudioData::LittleEndian, AudioData::Interleaved, AudioData::Const>;
  148. DstSampleType dstData (audioBuffer.getWritePointer (i));
  149. SrcSampleType srcData (srcInterleaved + i, audioBuffer.getNumChannels());
  150. dstData.convertSamples (srcData, audioBuffer.getNumSamples());
  151. }
  152. }
  153. static void convertToOpenSL (const AudioBuffer<float>& audioBuffer, int16* dstInterleaved)
  154. {
  155. for (int i = 0; i < audioBuffer.getNumChannels(); ++i)
  156. {
  157. using DstSampleType = AudioData::Pointer<AudioData::Int16, AudioData::LittleEndian, AudioData::Interleaved, AudioData::NonConst>;
  158. using SrcSampleType = AudioData::Pointer<AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::Const>;
  159. DstSampleType dstData (dstInterleaved + i, audioBuffer.getNumChannels());
  160. SrcSampleType srcData (audioBuffer.getReadPointer (i));
  161. dstData.convertSamples (srcData, audioBuffer.getNumSamples());
  162. }
  163. }
  164. };
  165. template <>
  166. struct BufferHelpers<float>
  167. {
  168. enum { isFloatingPoint = 1 };
  169. static void initPCMDataFormat (PCMDataFormatEx& dataFormat, int numChannels, double sampleRate)
  170. {
  171. dataFormat.formatType = SL_ANDROID_DATAFORMAT_PCM_EX;
  172. dataFormat.numChannels = (SLuint32) numChannels;
  173. dataFormat.samplesPerSec = (SLuint32) (sampleRate * 1000);
  174. dataFormat.bitsPerSample = 32;
  175. dataFormat.containerSize = 32;
  176. dataFormat.channelMask = (numChannels == 1) ? SL_SPEAKER_FRONT_CENTER :
  177. (SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT);
  178. dataFormat.endianness = SL_BYTEORDER_LITTLEENDIAN;
  179. dataFormat.representation = SL_ANDROID_PCM_REPRESENTATION_FLOAT;
  180. }
  181. static void prepareCallbackBuffer (AudioBuffer<float>& audioBuffer, float* native)
  182. {
  183. if (audioBuffer.getNumChannels() == 1)
  184. audioBuffer.setDataToReferTo (&native, 1, audioBuffer.getNumSamples());
  185. }
  186. static void convertFromOpenSL (const float* srcInterleaved, AudioBuffer<float>& audioBuffer)
  187. {
  188. if (audioBuffer.getNumChannels() == 1)
  189. {
  190. jassert (srcInterleaved == audioBuffer.getWritePointer (0));
  191. return;
  192. }
  193. for (int i = 0; i < audioBuffer.getNumChannels(); ++i)
  194. {
  195. using DstSampleType = AudioData::Pointer<AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::NonConst>;
  196. using SrcSampleType = AudioData::Pointer<AudioData::Float32, AudioData::LittleEndian, AudioData::Interleaved, AudioData::Const>;
  197. DstSampleType dstData (audioBuffer.getWritePointer (i));
  198. SrcSampleType srcData (srcInterleaved + i, audioBuffer.getNumChannels());
  199. dstData.convertSamples (srcData, audioBuffer.getNumSamples());
  200. }
  201. }
  202. static void convertToOpenSL (const AudioBuffer<float>& audioBuffer, float* dstInterleaved)
  203. {
  204. if (audioBuffer.getNumChannels() == 1)
  205. {
  206. jassert (dstInterleaved == audioBuffer.getReadPointer (0));
  207. return;
  208. }
  209. for (int i = 0; i < audioBuffer.getNumChannels(); ++i)
  210. {
  211. using DstSampleType = AudioData::Pointer<AudioData::Float32, AudioData::LittleEndian, AudioData::Interleaved, AudioData::NonConst>;
  212. using SrcSampleType = AudioData::Pointer<AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::Const>;
  213. DstSampleType dstData (dstInterleaved + i, audioBuffer.getNumChannels());
  214. SrcSampleType srcData (audioBuffer.getReadPointer (i));
  215. dstData.convertSamples (srcData, audioBuffer.getNumSamples());
  216. }
  217. }
  218. };
  219. class SLRealtimeThread;
  220. //==============================================================================
  221. class OpenSLAudioIODevice : public AudioIODevice
  222. {
  223. public:
  224. //==============================================================================
  225. template <typename T>
  226. class OpenSLSessionT;
  227. //==============================================================================
  228. // CRTP
  229. template <typename T, class Child, typename RunnerObjectType>
  230. struct OpenSLQueueRunner
  231. {
  232. OpenSLQueueRunner (OpenSLSessionT<T>& sessionToUse, int numChannelsToUse)
  233. : owner (sessionToUse),
  234. numChannels (numChannelsToUse),
  235. nativeBuffer (static_cast<size_t> (numChannels * owner.bufferSize * owner.numBuffers)),
  236. scratchBuffer (numChannelsToUse, owner.bufferSize),
  237. sampleBuffer (scratchBuffer.getArrayOfWritePointers(), numChannelsToUse, owner.bufferSize),
  238. nextBlock (0), numBlocksOut (0)
  239. {}
  240. ~OpenSLQueueRunner()
  241. {
  242. if (config != nullptr && javaProxy != nullptr)
  243. {
  244. javaProxy.clear();
  245. (*config)->ReleaseJavaProxy (config, /*SL_ANDROID_JAVA_PROXY_ROUTING*/1);
  246. }
  247. }
  248. bool init()
  249. {
  250. runner = crtp().createPlayerOrRecorder();
  251. if (runner == nullptr)
  252. return false;
  253. const bool supportsJavaProxy = (getEnv()->GetStaticIntField (AndroidBuildVersion, AndroidBuildVersion.SDK_INT) >= 24);
  254. if (supportsJavaProxy)
  255. {
  256. // may return nullptr on some platforms - that's ok
  257. config = SlRef<SLAndroidConfigurationItf_>::cast (runner);
  258. if (config != nullptr)
  259. {
  260. jobject audioRoutingJni;
  261. auto status = (*config)->AcquireJavaProxy (config, /*SL_ANDROID_JAVA_PROXY_ROUTING*/1,
  262. &audioRoutingJni);
  263. if (status == SL_RESULT_SUCCESS && audioRoutingJni != 0)
  264. javaProxy = GlobalRef (audioRoutingJni);
  265. }
  266. }
  267. queue = SlRef<SLAndroidSimpleBufferQueueItf_>::cast (runner);
  268. if (queue == nullptr)
  269. return false;
  270. return ((*queue)->RegisterCallback (queue, staticFinished, this) == SL_RESULT_SUCCESS);
  271. }
  272. void clear()
  273. {
  274. nextBlock.set (0);
  275. numBlocksOut.set (0);
  276. zeromem (nativeBuffer.get(), static_cast<size_t> (owner.bufferSize * numChannels * owner.numBuffers) * sizeof (T));
  277. scratchBuffer.clear();
  278. (*queue)->Clear (queue);
  279. }
  280. void enqueueBuffer()
  281. {
  282. (*queue)->Enqueue (queue, getCurrentBuffer(), static_cast<SLuint32> (getBufferSizeInSamples() * sizeof (T)));
  283. ++numBlocksOut;
  284. }
  285. bool isBufferAvailable() const { return (numBlocksOut.get() < owner.numBuffers); }
  286. T* getNextBuffer() { nextBlock.set((nextBlock.get() + 1) % owner.numBuffers); return getCurrentBuffer(); }
  287. T* getCurrentBuffer() { return nativeBuffer.get() + (static_cast<size_t> (nextBlock.get()) * getBufferSizeInSamples()); }
  288. size_t getBufferSizeInSamples() const { return static_cast<size_t> (owner.bufferSize * numChannels); }
  289. void finished (SLAndroidSimpleBufferQueueItf)
  290. {
  291. attachAndroidJNI();
  292. --numBlocksOut;
  293. owner.doSomeWorkOnAudioThread();
  294. }
  295. static void staticFinished (SLAndroidSimpleBufferQueueItf caller, void *pContext)
  296. {
  297. reinterpret_cast<OpenSLQueueRunner*> (pContext)->finished (caller);
  298. }
  299. // get the "this" pointer for CRTP
  300. Child& crtp() { return * ((Child*) this); }
  301. const Child& crtp() const { return * ((Child*) this); }
  302. OpenSLSessionT<T>& owner;
  303. SlRef<RunnerObjectType> runner;
  304. SlRef<SLAndroidSimpleBufferQueueItf_> queue;
  305. SlRef<SLAndroidConfigurationItf_> config;
  306. GlobalRef javaProxy;
  307. int numChannels;
  308. HeapBlock<T> nativeBuffer;
  309. AudioBuffer<float> scratchBuffer, sampleBuffer;
  310. Atomic<int> nextBlock, numBlocksOut;
  311. };
  312. //==============================================================================
  313. template <typename T>
  314. struct OpenSLQueueRunnerPlayer : OpenSLQueueRunner<T, OpenSLQueueRunnerPlayer<T>, SLPlayItf_>
  315. {
  316. using Base = OpenSLQueueRunner<T, OpenSLQueueRunnerPlayer<T>, SLPlayItf_>;
  317. enum { isPlayer = 1 };
  318. OpenSLQueueRunnerPlayer (OpenSLSessionT<T>& sessionToUse, int numChannelsToUse)
  319. : Base (sessionToUse, numChannelsToUse)
  320. {}
  321. SlRef<SLPlayItf_> createPlayerOrRecorder()
  322. {
  323. SLDataLocator_AndroidSimpleBufferQueue queueLocator = {SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, static_cast<SLuint32> (Base::owner.numBuffers)};
  324. SLDataLocator_OutputMix outputMix = {SL_DATALOCATOR_OUTPUTMIX, Base::owner.outputMix};
  325. PCMDataFormatEx dataFormat;
  326. BufferHelpers<T>::initPCMDataFormat (dataFormat, Base::numChannels, Base::owner.sampleRate);
  327. SLDataSource source = {&queueLocator, &dataFormat};
  328. SLDataSink sink = {&outputMix, nullptr};
  329. SLInterfaceID queueInterfaces[] = { &IntfIID<SLAndroidSimpleBufferQueueItf_>::iid, &IntfIID<SLAndroidConfigurationItf_>::iid };
  330. SLboolean interfaceRequired[] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_FALSE};
  331. SLObjectItf obj = nullptr;
  332. SLresult status = (*Base::owner.engine)->CreateAudioPlayer (Base::owner.engine, &obj, &source, &sink, 2, queueInterfaces, interfaceRequired);
  333. if (status != SL_RESULT_SUCCESS || obj == nullptr || (*obj)->Realize (obj, 0) != SL_RESULT_SUCCESS)
  334. {
  335. if (obj != nullptr)
  336. (*obj)->Destroy (obj);
  337. return SlRef<SLPlayItf_>();
  338. }
  339. return SlRef<SLPlayItf_>::cast (SlObjectRef (obj));
  340. }
  341. void setState (bool running) { (*Base::runner)->SetPlayState (Base::runner, running ? SL_PLAYSTATE_PLAYING : SL_PLAYSTATE_STOPPED); }
  342. };
  343. template <typename T>
  344. struct OpenSLQueueRunnerRecorder : OpenSLQueueRunner<T, OpenSLQueueRunnerRecorder<T>, SLRecordItf_>
  345. {
  346. using Base = OpenSLQueueRunner<T, OpenSLQueueRunnerRecorder<T>, SLRecordItf_>;
  347. enum { isPlayer = 0 };
  348. OpenSLQueueRunnerRecorder (OpenSLSessionT<T>& sessionToUse, int numChannelsToUse)
  349. : Base (sessionToUse, numChannelsToUse)
  350. {}
  351. SlRef<SLRecordItf_> createPlayerOrRecorder()
  352. {
  353. SLDataLocator_IODevice ioDeviceLocator = {SL_DATALOCATOR_IODEVICE, SL_IODEVICE_AUDIOINPUT, SL_DEFAULTDEVICEID_AUDIOINPUT, nullptr};
  354. SLDataLocator_AndroidSimpleBufferQueue queueLocator = {SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, static_cast<SLuint32> (Base::owner.numBuffers)};
  355. PCMDataFormatEx dataFormat;
  356. BufferHelpers<T>::initPCMDataFormat (dataFormat, Base::numChannels, Base::owner.sampleRate);
  357. SLDataSource source = {&ioDeviceLocator, nullptr};
  358. SLDataSink sink = {&queueLocator, &dataFormat};
  359. SLInterfaceID queueInterfaces[] = { &IntfIID<SLAndroidSimpleBufferQueueItf_>::iid, &IntfIID<SLAndroidConfigurationItf_>::iid };
  360. SLboolean interfaceRequired[] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_FALSE};
  361. SLObjectItf obj = nullptr;
  362. SLresult status = (*Base::owner.engine)->CreateAudioRecorder (Base::owner.engine, &obj, &source, &sink, 2, queueInterfaces, interfaceRequired);
  363. if (status != SL_RESULT_SUCCESS || obj == nullptr || (*obj)->Realize (obj, 0) != SL_RESULT_SUCCESS)
  364. {
  365. if (obj != nullptr)
  366. (*obj)->Destroy (obj);
  367. return SlRef<SLRecordItf_>();
  368. }
  369. SlRef<SLRecordItf_> recorder = SlRef<SLRecordItf_>::cast (SlObjectRef (obj));
  370. return recorder;
  371. }
  372. bool setAudioPreprocessingEnabled (bool shouldEnable)
  373. {
  374. if (Base::config != nullptr)
  375. {
  376. const bool supportsUnprocessed = (getEnv()->GetStaticIntField (AndroidBuildVersion, AndroidBuildVersion.SDK_INT) >= 25);
  377. const SLuint32 recordingPresetValue
  378. = (shouldEnable ? SL_ANDROID_RECORDING_PRESET_GENERIC
  379. : (supportsUnprocessed ? SL_ANDROID_RECORDING_PRESET_UNPROCESSED
  380. : SL_ANDROID_RECORDING_PRESET_VOICE_RECOGNITION));
  381. SLresult status = (*Base::config)->SetConfiguration (Base::config, SL_ANDROID_KEY_RECORDING_PRESET,
  382. &recordingPresetValue, sizeof (recordingPresetValue));
  383. return (status == SL_RESULT_SUCCESS);
  384. }
  385. return false;
  386. }
  387. void setState (bool running) { (*Base::runner)->SetRecordState (Base::runner, running ? SL_RECORDSTATE_RECORDING : SL_RECORDSTATE_STOPPED); }
  388. };
  389. //==============================================================================
  390. class OpenSLSession
  391. {
  392. public:
  393. OpenSLSession (DynamicLibrary& slLibraryToUse,
  394. int numInputChannels, int numOutputChannels,
  395. double samleRateToUse, int bufferSizeToUse,
  396. int numBuffersToUse)
  397. : inputChannels (numInputChannels), outputChannels (numOutputChannels),
  398. sampleRate (samleRateToUse), bufferSize (bufferSizeToUse), numBuffers (numBuffersToUse),
  399. running (false), audioProcessingEnabled (true), callback (nullptr)
  400. {
  401. jassert (numInputChannels > 0 || numOutputChannels > 0);
  402. if (CreateEngineFunc createEngine = (CreateEngineFunc) slLibraryToUse.getFunction ("slCreateEngine"))
  403. {
  404. SLObjectItf obj = nullptr;
  405. SLresult err = createEngine (&obj, 0, nullptr, 0, nullptr, nullptr);
  406. if (err != SL_RESULT_SUCCESS || obj == nullptr || (*obj)->Realize (obj, 0) != SL_RESULT_SUCCESS)
  407. {
  408. if (obj != nullptr)
  409. (*obj)->Destroy (obj);
  410. return;
  411. }
  412. engine = SlRef<SLEngineItf_>::cast (SlObjectRef (obj));
  413. }
  414. if (outputChannels > 0)
  415. {
  416. SLObjectItf obj = nullptr;
  417. SLresult err = (*engine)->CreateOutputMix (engine, &obj, 0, nullptr, nullptr);
  418. if (err != SL_RESULT_SUCCESS || obj == nullptr || (*obj)->Realize (obj, 0) != SL_RESULT_SUCCESS)
  419. {
  420. if (obj != nullptr)
  421. (*obj)->Destroy (obj);
  422. return;
  423. }
  424. outputMix = SlRef<SLOutputMixItf_>::cast (SlObjectRef (obj));
  425. }
  426. }
  427. virtual ~OpenSLSession() {}
  428. virtual bool openedOK() const { return (engine != nullptr && (outputChannels == 0 || (outputMix != nullptr))); }
  429. virtual void start() { stop(); jassert (callback.get() != nullptr); running = true; }
  430. virtual void stop() { running = false; }
  431. virtual bool setAudioPreprocessingEnabled (bool shouldEnable) = 0;
  432. virtual bool supportsFloatingPoint() const noexcept = 0;
  433. virtual int getXRunCount() const noexcept = 0;
  434. void setCallback (AudioIODeviceCallback* callbackToUse)
  435. {
  436. if (! running)
  437. {
  438. callback.set (callbackToUse);
  439. return;
  440. }
  441. // don't set callback to null! stop the playback instead!
  442. jassert (callbackToUse != nullptr);
  443. // spin-lock until we can set the callback
  444. while (true)
  445. {
  446. AudioIODeviceCallback* old = callback.get();
  447. if (old == callbackToUse)
  448. break;
  449. if (callback.compareAndSetBool (callbackToUse, old))
  450. break;
  451. Thread::sleep (1);
  452. }
  453. }
  454. void process (const float** inputChannelData, float** outputChannelData)
  455. {
  456. if (AudioIODeviceCallback* cb = callback.exchange(nullptr))
  457. {
  458. cb->audioDeviceIOCallback (inputChannelData, inputChannels, outputChannelData, outputChannels, bufferSize);
  459. callback.set (cb);
  460. }
  461. else
  462. {
  463. for (int i = 0; i < outputChannels; ++i)
  464. zeromem (outputChannelData[i], sizeof(float) * static_cast<size_t> (bufferSize));
  465. }
  466. }
  467. static OpenSLSession* create (DynamicLibrary& slLibrary,
  468. int numInputChannels, int numOutputChannels,
  469. double samleRateToUse, int bufferSizeToUse,
  470. int numBuffersToUse);
  471. //==============================================================================
  472. typedef SLresult (*CreateEngineFunc)(SLObjectItf*, SLuint32, const SLEngineOption*, SLuint32, const SLInterfaceID*, const SLboolean*);
  473. //==============================================================================
  474. int inputChannels, outputChannels;
  475. double sampleRate;
  476. int bufferSize, numBuffers;
  477. bool running, audioProcessingEnabled;
  478. SlRef<SLEngineItf_> engine;
  479. SlRef<SLOutputMixItf_> outputMix;
  480. Atomic<AudioIODeviceCallback*> callback;
  481. };
  482. template <typename T>
  483. class OpenSLSessionT : public OpenSLSession
  484. {
  485. public:
  486. OpenSLSessionT (DynamicLibrary& slLibraryToUse,
  487. int numInputChannels, int numOutputChannels,
  488. double samleRateToUse, int bufferSizeToUse,
  489. int numBuffersToUse)
  490. : OpenSLSession (slLibraryToUse, numInputChannels, numOutputChannels, samleRateToUse, bufferSizeToUse, numBuffersToUse)
  491. {
  492. jassert (numInputChannels > 0 || numOutputChannels > 0);
  493. if (OpenSLSession::openedOK())
  494. {
  495. if (inputChannels > 0)
  496. {
  497. recorder.reset (new OpenSLQueueRunnerRecorder<T> (*this, inputChannels));
  498. if (! recorder->init())
  499. {
  500. recorder = nullptr;
  501. return;
  502. }
  503. }
  504. if (outputChannels > 0)
  505. {
  506. player.reset (new OpenSLQueueRunnerPlayer<T> (*this, outputChannels));
  507. if (! player->init())
  508. {
  509. player = nullptr;
  510. return;
  511. }
  512. const bool supportsUnderrunCount = (getEnv()->GetStaticIntField (AndroidBuildVersion, AndroidBuildVersion.SDK_INT) >= 24);
  513. getUnderrunCount = supportsUnderrunCount ? getEnv()->GetMethodID (AudioTrack, "getUnderrunCount", "()I") : 0;
  514. }
  515. }
  516. }
  517. bool openedOK() const override
  518. {
  519. return (OpenSLSession::openedOK() && (inputChannels == 0 || recorder != nullptr)
  520. && (outputChannels == 0 || player != nullptr));
  521. }
  522. void start() override
  523. {
  524. OpenSLSession::start();
  525. guard.set (0);
  526. if (inputChannels > 0)
  527. recorder->clear();
  528. if (outputChannels > 0)
  529. player->clear();
  530. // first enqueue all buffers
  531. for (int i = 0; i < numBuffers; ++i)
  532. doSomeWorkOnAudioThread();
  533. if (inputChannels > 0)
  534. recorder->setState (true);
  535. if (outputChannels > 0)
  536. player->setState (true);
  537. }
  538. void stop() override
  539. {
  540. OpenSLSession::stop();
  541. while (! guard.compareAndSetBool (1, 0))
  542. Thread::sleep (1);
  543. if (inputChannels > 0)
  544. recorder->setState (false);
  545. if (outputChannels > 0)
  546. player->setState (false);
  547. guard.set (0);
  548. }
  549. bool setAudioPreprocessingEnabled (bool shouldEnable) override
  550. {
  551. if (shouldEnable != audioProcessingEnabled)
  552. {
  553. audioProcessingEnabled = shouldEnable;
  554. if (recorder != nullptr)
  555. return recorder->setAudioPreprocessingEnabled (audioProcessingEnabled);
  556. }
  557. return true;
  558. }
  559. int getXRunCount() const noexcept override
  560. {
  561. if (player != nullptr && player->javaProxy != nullptr && getUnderrunCount != 0)
  562. return getEnv()->CallIntMethod (player->javaProxy, getUnderrunCount);
  563. return -1;
  564. }
  565. bool supportsFloatingPoint() const noexcept override { return (BufferHelpers<T>::isFloatingPoint != 0); }
  566. void doSomeWorkOnAudioThread()
  567. {
  568. // only the player or the recorder should enter this section at any time
  569. if (guard.compareAndSetBool (1, 0))
  570. {
  571. // are there enough buffers avaialable to process some audio
  572. if ((inputChannels == 0 || recorder->isBufferAvailable()) && (outputChannels == 0 || player->isBufferAvailable()))
  573. {
  574. T* recorderBuffer = (inputChannels > 0 ? recorder->getNextBuffer() : nullptr);
  575. T* playerBuffer = (outputChannels > 0 ? player->getNextBuffer() : nullptr);
  576. const float** inputChannelData = nullptr;
  577. float** outputChannelData = nullptr;
  578. if (recorderBuffer != nullptr)
  579. {
  580. BufferHelpers<T>::prepareCallbackBuffer (recorder->sampleBuffer, recorderBuffer);
  581. BufferHelpers<T>::convertFromOpenSL (recorderBuffer, recorder->sampleBuffer);
  582. inputChannelData = recorder->sampleBuffer.getArrayOfReadPointers();
  583. }
  584. if (playerBuffer != nullptr)
  585. {
  586. BufferHelpers<T>::prepareCallbackBuffer (player->sampleBuffer, playerBuffer);
  587. outputChannelData = player->sampleBuffer.getArrayOfWritePointers();
  588. }
  589. process (inputChannelData, outputChannelData);
  590. if (recorderBuffer != nullptr)
  591. recorder->enqueueBuffer();
  592. if (playerBuffer != nullptr)
  593. {
  594. BufferHelpers<T>::convertToOpenSL (player->sampleBuffer, playerBuffer);
  595. player->enqueueBuffer();
  596. }
  597. }
  598. guard.set (0);
  599. }
  600. }
  601. //==============================================================================
  602. std::unique_ptr<OpenSLQueueRunnerPlayer<T>> player;
  603. std::unique_ptr<OpenSLQueueRunnerRecorder<T>> recorder;
  604. Atomic<int> guard;
  605. jmethodID getUnderrunCount = 0;
  606. };
  607. //==============================================================================
  608. OpenSLAudioIODevice (const String& deviceName)
  609. : AudioIODevice (deviceName, openSLTypeName),
  610. actualBufferSize (0), sampleRate (0), audioBuffersToEnqueue (0),
  611. audioProcessingEnabled (true),
  612. callback (nullptr)
  613. {
  614. // OpenSL has piss-poor support for determining latency, so the only way I can find to
  615. // get a number for this is by asking the AudioTrack/AudioRecord classes..
  616. AndroidAudioIODevice javaDevice (deviceName);
  617. // this is a total guess about how to calculate the latency, but seems to vaguely agree
  618. // with the devices I've tested.. YMMV
  619. inputLatency = (javaDevice.minBufferSizeIn * 2) / 3;
  620. outputLatency = (javaDevice.minBufferSizeOut * 2) / 3;
  621. const int64 longestLatency = jmax (inputLatency, outputLatency);
  622. const int64 totalLatency = inputLatency + outputLatency;
  623. inputLatency = (int) ((longestLatency * inputLatency) / totalLatency) & ~15;
  624. outputLatency = (int) ((longestLatency * outputLatency) / totalLatency) & ~15;
  625. bool success = slLibrary.open ("libOpenSLES.so");
  626. // You can only create this class if you are sure that your hardware supports OpenSL
  627. jassert (success);
  628. ignoreUnused (success);
  629. }
  630. ~OpenSLAudioIODevice()
  631. {
  632. close();
  633. }
  634. bool openedOk() const { return session != nullptr; }
  635. StringArray getOutputChannelNames() override
  636. {
  637. StringArray s;
  638. s.add ("Left");
  639. s.add ("Right");
  640. return s;
  641. }
  642. StringArray getInputChannelNames() override
  643. {
  644. StringArray s;
  645. s.add ("Audio Input");
  646. return s;
  647. }
  648. Array<double> getAvailableSampleRates() override
  649. {
  650. //see https://developer.android.com/ndk/guides/audio/opensl-for-android.html
  651. static const double rates[] = { 8000.0, 11025.0, 12000.0, 16000.0,
  652. 22050.0, 24000.0, 32000.0, 44100.0, 48000.0 };
  653. Array<double> retval (rates, numElementsInArray (rates));
  654. // make sure the native sample rate is pafrt of the list
  655. double native = getNativeSampleRate();
  656. if (native != 0.0 && ! retval.contains (native))
  657. retval.add (native);
  658. return retval;
  659. }
  660. Array<int> getAvailableBufferSizes() override
  661. {
  662. // we need to offer the lowest possible buffer size which
  663. // is the native buffer size
  664. auto nativeBufferSize = getNativeBufferSize();
  665. auto minBuffersToQueue = getMinimumBuffersToEnqueue();
  666. auto maxBuffersToQueue = getMaximumBuffersToEnqueue();
  667. Array<int> retval;
  668. for (int i = minBuffersToQueue; i <= maxBuffersToQueue; ++i)
  669. retval.add (i * nativeBufferSize);
  670. return retval;
  671. }
  672. String open (const BigInteger& inputChannels,
  673. const BigInteger& outputChannels,
  674. double requestedSampleRate,
  675. int bufferSize) override
  676. {
  677. close();
  678. lastError.clear();
  679. sampleRate = (int) requestedSampleRate;
  680. auto totalPreferredBufferSize = (bufferSize <= 0) ? getDefaultBufferSize() : bufferSize;
  681. auto nativeBufferSize = getNativeBufferSize();
  682. bool useHighPerformanceAudioPath = canUseHighPerformanceAudioPath (totalPreferredBufferSize, sampleRate);
  683. audioBuffersToEnqueue = useHighPerformanceAudioPath ? (totalPreferredBufferSize / nativeBufferSize) : 1;
  684. actualBufferSize = totalPreferredBufferSize / audioBuffersToEnqueue;
  685. jassert ((actualBufferSize * audioBuffersToEnqueue) == totalPreferredBufferSize);
  686. activeOutputChans = outputChannels;
  687. activeOutputChans.setRange (2, activeOutputChans.getHighestBit(), false);
  688. int numOutputChannels = activeOutputChans.countNumberOfSetBits();
  689. activeInputChans = inputChannels;
  690. activeInputChans.setRange (1, activeInputChans.getHighestBit(), false);
  691. int numInputChannels = activeInputChans.countNumberOfSetBits();
  692. if (numInputChannels > 0 && (! RuntimePermissions::isGranted (RuntimePermissions::recordAudio)))
  693. {
  694. // If you hit this assert, you probably forgot to get RuntimePermissions::recordAudio
  695. // before trying to open an audio input device. This is not going to work!
  696. jassertfalse;
  697. lastError = "Error opening OpenSL input device: the app was not granted android.permission.RECORD_AUDIO";
  698. }
  699. session.reset (OpenSLSession::create (slLibrary, numInputChannels, numOutputChannels,
  700. sampleRate, actualBufferSize, audioBuffersToEnqueue));
  701. if (session != nullptr)
  702. session->setAudioPreprocessingEnabled (audioProcessingEnabled);
  703. else
  704. {
  705. if (numInputChannels > 0 && numOutputChannels > 0 && RuntimePermissions::isGranted (RuntimePermissions::recordAudio))
  706. {
  707. // New versions of the Android emulator do not seem to support audio input anymore on OS X
  708. activeInputChans = BigInteger(0);
  709. numInputChannels = 0;
  710. session.reset (OpenSLSession::create (slLibrary, numInputChannels, numOutputChannels,
  711. sampleRate, actualBufferSize, audioBuffersToEnqueue));
  712. }
  713. }
  714. DBG ("OpenSL: numInputChannels = " << numInputChannels
  715. << ", numOutputChannels = " << numOutputChannels
  716. << ", nativeBufferSize = " << getNativeBufferSize()
  717. << ", nativeSampleRate = " << getNativeSampleRate()
  718. << ", actualBufferSize = " << actualBufferSize
  719. << ", audioBuffersToEnqueue = " << audioBuffersToEnqueue
  720. << ", sampleRate = " << sampleRate
  721. << ", supportsFloatingPoint = " << (session != nullptr && session->supportsFloatingPoint() ? "true" : "false"));
  722. if (session == nullptr)
  723. lastError = "Unknown error initializing opensl session";
  724. deviceOpen = (session != nullptr);
  725. return lastError;
  726. }
  727. void close() override
  728. {
  729. stop();
  730. session = nullptr;
  731. callback = nullptr;
  732. }
  733. int getOutputLatencyInSamples() override { return outputLatency; }
  734. int getInputLatencyInSamples() override { return inputLatency; }
  735. bool isOpen() override { return deviceOpen; }
  736. int getCurrentBufferSizeSamples() override { return actualBufferSize * audioBuffersToEnqueue; }
  737. int getCurrentBitDepth() override { return (session != nullptr && session->supportsFloatingPoint() ? 32 : 16); }
  738. BigInteger getActiveOutputChannels() const override { return activeOutputChans; }
  739. BigInteger getActiveInputChannels() const override { return activeInputChans; }
  740. String getLastError() override { return lastError; }
  741. bool isPlaying() override { return callback != nullptr; }
  742. int getXRunCount() const noexcept override { return (session != nullptr ? session->getXRunCount() : -1); }
  743. int getDefaultBufferSize() override
  744. {
  745. auto defaultBufferLength = (hasLowLatencyAudioPath() ? defaultBufferSizeForLowLatencyDeviceMs
  746. : defaultBufferSizeForStandardLatencyDeviceMs);
  747. auto defaultBuffersToEnqueue = buffersToQueueForBufferDuration (defaultBufferLength, getCurrentSampleRate());
  748. return defaultBuffersToEnqueue * getNativeBufferSize();
  749. }
  750. double getCurrentSampleRate() override
  751. {
  752. return (sampleRate == 0.0 ? getNativeSampleRate() : sampleRate);
  753. }
  754. void start (AudioIODeviceCallback* newCallback) override
  755. {
  756. if (session != nullptr && callback != newCallback)
  757. {
  758. AudioIODeviceCallback* oldCallback = callback;
  759. if (newCallback != nullptr)
  760. newCallback->audioDeviceAboutToStart (this);
  761. if (oldCallback != nullptr)
  762. {
  763. // already running
  764. if (newCallback == nullptr)
  765. stop();
  766. else
  767. session->setCallback (newCallback);
  768. oldCallback->audioDeviceStopped();
  769. }
  770. else
  771. {
  772. jassert (newCallback != nullptr);
  773. // session hasn't started yet
  774. session->setCallback (newCallback);
  775. session->start();
  776. }
  777. callback = newCallback;
  778. }
  779. }
  780. void stop() override
  781. {
  782. if (session != nullptr && callback != nullptr)
  783. {
  784. callback = nullptr;
  785. session->stop();
  786. session->setCallback (nullptr);
  787. }
  788. }
  789. bool setAudioPreprocessingEnabled (bool shouldAudioProcessingBeEnabled) override
  790. {
  791. audioProcessingEnabled = shouldAudioProcessingBeEnabled;
  792. if (session != nullptr)
  793. session->setAudioPreprocessingEnabled (audioProcessingEnabled);
  794. return true;
  795. }
  796. static const char* const openSLTypeName;
  797. private:
  798. //==============================================================================
  799. friend class SLRealtimeThread;
  800. //==============================================================================
  801. DynamicLibrary slLibrary;
  802. int actualBufferSize, sampleRate, audioBuffersToEnqueue;
  803. int inputLatency, outputLatency;
  804. bool deviceOpen, audioProcessingEnabled;
  805. String lastError;
  806. BigInteger activeOutputChans, activeInputChans;
  807. AudioIODeviceCallback* callback;
  808. std::unique_ptr<OpenSLSession> session;
  809. enum
  810. {
  811. defaultBufferSizeForLowLatencyDeviceMs = 40,
  812. defaultBufferSizeForStandardLatencyDeviceMs = 100
  813. };
  814. static int getMinimumBuffersToEnqueue (double sampleRateToCheck = getNativeSampleRate())
  815. {
  816. if (canUseHighPerformanceAudioPath (getNativeBufferSize(), (int) sampleRateToCheck))
  817. {
  818. // see https://developer.android.com/ndk/guides/audio/opensl/opensl-prog-notes.html#sandp
  819. // "For Android 4.2 (API level 17) and earlier, a buffer count of two or more is required
  820. // for lower latency. Beginning with Android 4.3 (API level 18), a buffer count of one
  821. // is sufficient for lower latency."
  822. auto sdkVersion = getEnv()->GetStaticIntField (AndroidBuildVersion, AndroidBuildVersion.SDK_INT);
  823. return (sdkVersion >= 18 ? 1 : 2);
  824. }
  825. // we will not use the low-latency path so we can use the absolute minimum number of buffers
  826. // to queue
  827. return 1;
  828. }
  829. int getMaximumBuffersToEnqueue() noexcept
  830. {
  831. constexpr auto maxBufferSizeMs = 200;
  832. auto availableSampleRates = getAvailableSampleRates();
  833. auto maximumSampleRate = findMaximum(availableSampleRates.getRawDataPointer(), availableSampleRates.size());
  834. // ensure we don't return something crazy small
  835. return jmax (8, buffersToQueueForBufferDuration (maxBufferSizeMs, maximumSampleRate));
  836. }
  837. static int buffersToQueueForBufferDuration (int bufferDurationInMs, double sampleRate) noexcept
  838. {
  839. auto maxBufferFrames = static_cast<int> (std::ceil (bufferDurationInMs * sampleRate / 1000.0));
  840. auto maxNumBuffers = static_cast<int> (std::ceil (static_cast<double> (maxBufferFrames)
  841. / static_cast<double> (getNativeBufferSize())));
  842. return jmax (getMinimumBuffersToEnqueue (sampleRate), maxNumBuffers);
  843. }
  844. //==============================================================================
  845. static String audioManagerGetProperty (const String& property)
  846. {
  847. const LocalRef<jstring> jProperty (javaString (property));
  848. const LocalRef<jstring> text ((jstring) android.activity.callObjectMethod (JuceAppActivity.audioManagerGetProperty,
  849. jProperty.get()));
  850. if (text.get() != 0)
  851. return juceString (text);
  852. return {};
  853. }
  854. static bool androidHasSystemFeature (const String& property)
  855. {
  856. const LocalRef<jstring> jProperty (javaString (property));
  857. return android.activity.callBooleanMethod (JuceAppActivity.hasSystemFeature, jProperty.get());
  858. }
  859. static double getNativeSampleRate()
  860. {
  861. return audioManagerGetProperty ("android.media.property.OUTPUT_SAMPLE_RATE").getDoubleValue();
  862. }
  863. static int getNativeBufferSize()
  864. {
  865. const int val = audioManagerGetProperty ("android.media.property.OUTPUT_FRAMES_PER_BUFFER").getIntValue();
  866. return val > 0 ? val : 512;
  867. }
  868. static bool isProAudioDevice()
  869. {
  870. return androidHasSystemFeature ("android.hardware.audio.pro") || isSapaSupported();
  871. }
  872. static bool hasLowLatencyAudioPath()
  873. {
  874. return androidHasSystemFeature ("android.hardware.audio.low_latency");
  875. }
  876. static bool canUseHighPerformanceAudioPath (int requestedBufferSize, int requestedSampleRate)
  877. {
  878. return ((requestedBufferSize % getNativeBufferSize()) == 0)
  879. && (requestedSampleRate == getNativeSampleRate())
  880. && isProAudioDevice();
  881. }
  882. //==============================================================================
  883. // Some minimum Sapa support to check if this device supports pro audio
  884. static bool isSamsungDevice()
  885. {
  886. return SystemStats::getDeviceManufacturer().containsIgnoreCase ("SAMSUNG");
  887. }
  888. static bool isSapaSupported()
  889. {
  890. static bool supported = isSamsungDevice() && DynamicLibrary().open ("libapa_jni.so");
  891. return supported;
  892. }
  893. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (OpenSLAudioIODevice)
  894. };
  895. OpenSLAudioIODevice::OpenSLSession* OpenSLAudioIODevice::OpenSLSession::create (DynamicLibrary& slLibrary,
  896. int numInputChannels, int numOutputChannels,
  897. double samleRateToUse, int bufferSizeToUse,
  898. int numBuffersToUse)
  899. {
  900. std::unique_ptr<OpenSLSession> retval;
  901. auto sdkVersion = getEnv()->GetStaticIntField (AndroidBuildVersion, AndroidBuildVersion.SDK_INT);
  902. // SDK versions 21 and higher should natively support floating point...
  903. if (sdkVersion >= 21)
  904. {
  905. retval.reset (new OpenSLSessionT<float> (slLibrary, numInputChannels, numOutputChannels, samleRateToUse,
  906. bufferSizeToUse, numBuffersToUse));
  907. // ...however, some devices lie so re-try without floating point
  908. if (retval != nullptr && (! retval->openedOK()))
  909. retval = nullptr;
  910. }
  911. if (retval == nullptr)
  912. {
  913. retval.reset (new OpenSLSessionT<int16> (slLibrary, numInputChannels, numOutputChannels, samleRateToUse,
  914. bufferSizeToUse, numBuffersToUse));
  915. if (retval != nullptr && (! retval->openedOK()))
  916. retval = nullptr;
  917. }
  918. return retval.release();
  919. }
  920. //==============================================================================
  921. class OpenSLAudioDeviceType : public AudioIODeviceType
  922. {
  923. public:
  924. OpenSLAudioDeviceType() : AudioIODeviceType (OpenSLAudioIODevice::openSLTypeName) {}
  925. //==============================================================================
  926. void scanForDevices() override {}
  927. StringArray getDeviceNames (bool) const override { return StringArray (OpenSLAudioIODevice::openSLTypeName); }
  928. int getDefaultDeviceIndex (bool) const override { return 0; }
  929. int getIndexOfDevice (AudioIODevice* device, bool) const override { return device != nullptr ? 0 : -1; }
  930. bool hasSeparateInputsAndOutputs() const override { return false; }
  931. AudioIODevice* createDevice (const String& outputDeviceName,
  932. const String& inputDeviceName) override
  933. {
  934. std::unique_ptr<OpenSLAudioIODevice> dev;
  935. if (outputDeviceName.isNotEmpty() || inputDeviceName.isNotEmpty())
  936. dev.reset (new OpenSLAudioIODevice (outputDeviceName.isNotEmpty() ? outputDeviceName
  937. : inputDeviceName));
  938. return dev.release();
  939. }
  940. static bool isOpenSLAvailable()
  941. {
  942. DynamicLibrary library;
  943. return library.open ("libOpenSLES.so");
  944. }
  945. private:
  946. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (OpenSLAudioDeviceType)
  947. };
  948. const char* const OpenSLAudioIODevice::openSLTypeName = "Android OpenSL";
  949. //==============================================================================
  950. bool isOpenSLAvailable() { return OpenSLAudioDeviceType::isOpenSLAvailable(); }
  951. AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_OpenSLES()
  952. {
  953. return isOpenSLAvailable() ? new OpenSLAudioDeviceType() : nullptr;
  954. }
  955. //==============================================================================
  956. class SLRealtimeThread
  957. {
  958. public:
  959. static constexpr int numBuffers = 4;
  960. SLRealtimeThread()
  961. {
  962. if (auto createEngine = (OpenSLAudioIODevice::OpenSLSession::CreateEngineFunc) slLibrary.getFunction ("slCreateEngine"))
  963. {
  964. SLObjectItf obj = nullptr;
  965. auto err = createEngine (&obj, 0, nullptr, 0, nullptr, nullptr);
  966. if (err != SL_RESULT_SUCCESS || obj == nullptr)
  967. return;
  968. if ((*obj)->Realize (obj, 0) != SL_RESULT_SUCCESS)
  969. {
  970. (*obj)->Destroy (obj);
  971. return;
  972. }
  973. engine = SlRef<SLEngineItf_>::cast (SlObjectRef (obj));
  974. if (engine == nullptr)
  975. {
  976. (*obj)->Destroy (obj);
  977. return;
  978. }
  979. obj = nullptr;
  980. err = (*engine)->CreateOutputMix (engine, &obj, 0, nullptr, nullptr);
  981. if (err != SL_RESULT_SUCCESS || obj == nullptr || (*obj)->Realize (obj, 0) != SL_RESULT_SUCCESS)
  982. {
  983. (*obj)->Destroy (obj);
  984. return;
  985. }
  986. outputMix = SlRef<SLOutputMixItf_>::cast (SlObjectRef (obj));
  987. if (outputMix == nullptr)
  988. {
  989. (*obj)->Destroy (obj);
  990. return;
  991. }
  992. SLDataLocator_AndroidSimpleBufferQueue queueLocator = {SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, static_cast<SLuint32> (numBuffers)};
  993. SLDataLocator_OutputMix outputMixLocator = {SL_DATALOCATOR_OUTPUTMIX, outputMix};
  994. PCMDataFormatEx dataFormat;
  995. BufferHelpers<int16>::initPCMDataFormat (dataFormat, 1, OpenSLAudioIODevice::getNativeSampleRate());
  996. SLDataSource source = { &queueLocator, &dataFormat };
  997. SLDataSink sink = { &outputMixLocator, nullptr };
  998. SLInterfaceID queueInterfaces[] = { &IntfIID<SLAndroidSimpleBufferQueueItf_>::iid };
  999. SLboolean trueFlag = SL_BOOLEAN_TRUE;
  1000. obj = nullptr;
  1001. err = (*engine)->CreateAudioPlayer (engine, &obj, &source, &sink, 1, queueInterfaces, &trueFlag);
  1002. if (err != SL_RESULT_SUCCESS || obj == nullptr)
  1003. return;
  1004. if ((*obj)->Realize (obj, 0) != SL_RESULT_SUCCESS)
  1005. {
  1006. (*obj)->Destroy (obj);
  1007. return;
  1008. }
  1009. player = SlRef<SLPlayItf_>::cast (SlObjectRef (obj));
  1010. if (player == nullptr)
  1011. {
  1012. (*obj)->Destroy (obj);
  1013. return;
  1014. }
  1015. queue = SlRef<SLAndroidSimpleBufferQueueItf_>::cast (player);
  1016. if (queue == nullptr)
  1017. return;
  1018. if ((*queue)->RegisterCallback (queue, staticFinished, this) != SL_RESULT_SUCCESS)
  1019. {
  1020. queue = nullptr;
  1021. return;
  1022. }
  1023. pthread_cond_init (&threadReady, nullptr);
  1024. pthread_mutex_init (&threadReadyMutex, nullptr);
  1025. }
  1026. }
  1027. bool isOK() const { return queue != nullptr; }
  1028. pthread_t startThread (void* (*entry) (void*), void* userPtr)
  1029. {
  1030. memset (buffer.get(), 0, static_cast<size_t> (sizeof (int16) * static_cast<size_t> (bufferSize * numBuffers)));
  1031. for (int i = 0; i < numBuffers; ++i)
  1032. {
  1033. int16* dst = buffer.get() + (bufferSize * i);
  1034. (*queue)->Enqueue (queue, dst, static_cast<SLuint32> (static_cast<size_t> (bufferSize) * sizeof (int16)));
  1035. }
  1036. pthread_mutex_lock (&threadReadyMutex);
  1037. threadEntryProc = entry;
  1038. threadUserPtr = userPtr;
  1039. (*player)->SetPlayState (player, SL_PLAYSTATE_PLAYING);
  1040. pthread_cond_wait (&threadReady, &threadReadyMutex);
  1041. pthread_mutex_unlock (&threadReadyMutex);
  1042. return threadID;
  1043. }
  1044. void finished()
  1045. {
  1046. if (threadEntryProc != nullptr)
  1047. {
  1048. pthread_mutex_lock (&threadReadyMutex);
  1049. threadID = pthread_self();
  1050. pthread_cond_signal (&threadReady);
  1051. pthread_mutex_unlock (&threadReadyMutex);
  1052. threadEntryProc (threadUserPtr);
  1053. threadEntryProc = nullptr;
  1054. (*player)->SetPlayState (player, SL_PLAYSTATE_STOPPED);
  1055. MessageManager::callAsync ([this] () { delete this; });
  1056. }
  1057. }
  1058. private:
  1059. //=============================================================================
  1060. static void staticFinished (SLAndroidSimpleBufferQueueItf, void* context)
  1061. {
  1062. static_cast<SLRealtimeThread*> (context)->finished();
  1063. }
  1064. //=============================================================================
  1065. DynamicLibrary slLibrary { "libOpenSLES.so" };
  1066. SlRef<SLEngineItf_> engine;
  1067. SlRef<SLOutputMixItf_> outputMix;
  1068. SlRef<SLPlayItf_> player;
  1069. SlRef<SLAndroidSimpleBufferQueueItf_> queue;
  1070. int bufferSize = OpenSLAudioIODevice::getNativeBufferSize();
  1071. HeapBlock<int16> buffer { HeapBlock<int16> (static_cast<size_t> (1 * bufferSize * numBuffers)) };
  1072. void* (*threadEntryProc) (void*) = nullptr;
  1073. void* threadUserPtr = nullptr;
  1074. pthread_cond_t threadReady;
  1075. pthread_mutex_t threadReadyMutex;
  1076. pthread_t threadID;
  1077. };
  1078. pthread_t juce_createRealtimeAudioThread (void* (*entry) (void*), void* userPtr)
  1079. {
  1080. std::unique_ptr<SLRealtimeThread> thread (new SLRealtimeThread);
  1081. if (! thread->isOK())
  1082. return 0;
  1083. pthread_t threadID = thread->startThread (entry, userPtr);
  1084. // the thread will de-allocate itself
  1085. thread.release();
  1086. return threadID;
  1087. }
  1088. } // namespace juce