Audio plugin host https://kx.studio/carla
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1293 lines
51KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2022 - Raw Material Software Limited
  5. JUCE is an open source library subject to commercial or open-source
  6. licensing.
  7. The code included in this file is provided under the terms of the ISC license
  8. http://www.isc.org/downloads/software-support-policy/isc-license. Permission
  9. To use, copy, modify, and/or distribute this software for any purpose with or
  10. without fee is hereby granted provided that the above copyright notice and
  11. this permission notice appear in all copies.
  12. JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
  13. EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
  14. DISCLAIMED.
  15. ==============================================================================
  16. */
  17. namespace juce
  18. {
  19. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD, CALLBACK) \
  20. DECLARE_JNI_CLASS (AndroidAudioManager, "android/media/AudioManager")
  21. #undef JNI_CLASS_MEMBERS
  22. //==============================================================================
  23. #ifndef SL_ANDROID_DATAFORMAT_PCM_EX
  24. #define SL_ANDROID_DATAFORMAT_PCM_EX ((SLuint32) 0x00000004)
  25. #endif
  26. #ifndef SL_ANDROID_PCM_REPRESENTATION_FLOAT
  27. #define SL_ANDROID_PCM_REPRESENTATION_FLOAT ((SLuint32) 0x00000003)
  28. #endif
  29. #ifndef SL_ANDROID_RECORDING_PRESET_UNPROCESSED
  30. #define SL_ANDROID_RECORDING_PRESET_UNPROCESSED ((SLuint32) 0x00000005)
  31. #endif
  32. //==============================================================================
  33. struct PCMDataFormatEx : SLDataFormat_PCM
  34. {
  35. SLuint32 representation;
  36. };
  37. //==============================================================================
  38. template <typename T> struct IntfIID;
  39. template <> struct IntfIID<SLObjectItf_> { static SLInterfaceID_ iid; };
  40. template <> struct IntfIID<SLEngineItf_> { static SLInterfaceID_ iid; };
  41. template <> struct IntfIID<SLOutputMixItf_> { static SLInterfaceID_ iid; };
  42. template <> struct IntfIID<SLPlayItf_> { static SLInterfaceID_ iid; };
  43. template <> struct IntfIID<SLRecordItf_> { static SLInterfaceID_ iid; };
  44. template <> struct IntfIID<SLAndroidSimpleBufferQueueItf_> { static SLInterfaceID_ iid; };
  45. template <> struct IntfIID<SLAndroidConfigurationItf_> { static SLInterfaceID_ iid; };
  46. SLInterfaceID_ IntfIID<SLObjectItf_>::iid = { 0x79216360, 0xddd7, 0x11db, 0xac16, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b} };
  47. SLInterfaceID_ IntfIID<SLEngineItf_>::iid = { 0x8d97c260, 0xddd4, 0x11db, 0x958f, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b} };
  48. SLInterfaceID_ IntfIID<SLOutputMixItf_>::iid = { 0x97750f60, 0xddd7, 0x11db, 0x92b1, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b} };
  49. SLInterfaceID_ IntfIID<SLPlayItf_>::iid = { 0xef0bd9c0, 0xddd7, 0x11db, 0xbf49, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b} };
  50. SLInterfaceID_ IntfIID<SLRecordItf_>::iid = { 0xc5657aa0, 0xdddb, 0x11db, 0x82f7, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b} };
  51. SLInterfaceID_ IntfIID<SLAndroidSimpleBufferQueueItf_>::iid = { 0x198e4940, 0xc5d7, 0x11df, 0xa2a6, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b} };
  52. SLInterfaceID_ IntfIID<SLAndroidConfigurationItf_>::iid = { 0x89f6a7e0, 0xbeac, 0x11df, 0x8b5c, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b} };
  53. template <typename SLObjectType>
  54. static void destroyObject (SLObjectType object)
  55. {
  56. if (object != nullptr && *object != nullptr)
  57. (*object)->Destroy (object);
  58. }
  59. struct SLObjectItfFree
  60. {
  61. void operator() (SLObjectItf obj) const noexcept
  62. {
  63. destroyObject (obj);
  64. }
  65. };
  66. //==============================================================================
  67. // Some life-time and type management of OpenSL objects
  68. class SlObjectRef
  69. {
  70. public:
  71. //==============================================================================
  72. SlObjectRef() noexcept {}
  73. SlObjectRef (const SlObjectRef& obj) noexcept : cb (obj.cb) {}
  74. SlObjectRef (SlObjectRef&& obj) noexcept : cb (std::move (obj.cb)) { obj.cb = nullptr; }
  75. explicit SlObjectRef (SLObjectItf o) : cb (new ControlBlock (o)) {}
  76. //==============================================================================
  77. SlObjectRef& operator= (const SlObjectRef& r) noexcept { cb = r.cb; return *this; }
  78. SlObjectRef& operator= (SlObjectRef&& r) noexcept { cb = std::move (r.cb); r.cb = nullptr; return *this; }
  79. SlObjectRef& operator= (std::nullptr_t) noexcept { cb = nullptr; return *this; }
  80. //==============================================================================
  81. const SLObjectItf_* operator*() noexcept { return *cb->ptr.get(); }
  82. SLObjectItf operator->() noexcept { return (cb == nullptr ? nullptr : cb->ptr.get()); }
  83. operator SLObjectItf() noexcept { return (cb == nullptr ? nullptr : cb->ptr.get()); }
  84. //==============================================================================
  85. bool operator== (nullptr_t) const noexcept { return (cb == nullptr || cb->ptr == nullptr); }
  86. bool operator!= (nullptr_t) const noexcept { return (cb != nullptr && cb->ptr != nullptr); }
  87. private:
  88. //==============================================================================
  89. struct ControlBlock : ReferenceCountedObject
  90. {
  91. ControlBlock() = default;
  92. ControlBlock (SLObjectItf o) : ptr (o) {}
  93. std::unique_ptr<const SLObjectItf_* const, SLObjectItfFree> ptr;
  94. };
  95. ReferenceCountedObjectPtr<ControlBlock> cb;
  96. };
  97. template <typename T>
  98. class SlRef : public SlObjectRef
  99. {
  100. public:
  101. //==============================================================================
  102. SlRef() noexcept {}
  103. SlRef (const SlRef& r) noexcept : SlObjectRef (r), type (r.type) {}
  104. SlRef (SlRef&& r) noexcept : SlObjectRef (std::move (r)), type (r.type) { r.type = nullptr; }
  105. //==============================================================================
  106. SlRef& operator= (const SlRef& r) noexcept { SlObjectRef::operator= (r); type = r.type; return *this; }
  107. SlRef& operator= (SlRef&& r) noexcept { SlObjectRef::operator= (std::move (r)); type = r.type; r.type = nullptr; return *this; }
  108. SlRef& operator= (std::nullptr_t) noexcept { SlObjectRef::operator= (nullptr); type = nullptr; return *this; }
  109. //==============================================================================
  110. T* const operator*() noexcept { return *type; }
  111. T* const* operator->() noexcept { return type; }
  112. operator T* const*() noexcept { return type; }
  113. //==============================================================================
  114. static SlRef cast (SlObjectRef& base) { return SlRef (base); }
  115. static SlRef cast (SlObjectRef&& base) { return SlRef (std::move (base)); }
  116. private:
  117. SlRef (SlObjectRef& base) : SlObjectRef (base)
  118. {
  119. if (auto obj = SlObjectRef::operator->())
  120. {
  121. auto err = (*obj)->GetInterface (obj, &IntfIID<T>::iid, &type);
  122. if (type != nullptr && err == SL_RESULT_SUCCESS)
  123. return;
  124. }
  125. *this = nullptr;
  126. }
  127. SlRef (SlObjectRef&& base) : SlObjectRef (std::move (base))
  128. {
  129. if (auto obj = SlObjectRef::operator->())
  130. {
  131. auto err = (*obj)->GetInterface (obj, &IntfIID<T>::iid, &type);
  132. base = nullptr;
  133. if (type != nullptr && err == SL_RESULT_SUCCESS)
  134. return;
  135. }
  136. *this = nullptr;
  137. }
  138. T* const* type = nullptr;
  139. };
  140. //==============================================================================
  141. template <typename T> struct BufferHelpers {};
  142. template <>
  143. struct BufferHelpers<int16>
  144. {
  145. enum { isFloatingPoint = 0 };
  146. static void initPCMDataFormat (PCMDataFormatEx& dataFormat, int numChannels, double sampleRate)
  147. {
  148. dataFormat.formatType = SL_DATAFORMAT_PCM;
  149. dataFormat.numChannels = (SLuint32) numChannels;
  150. dataFormat.samplesPerSec = (SLuint32) (sampleRate * 1000);
  151. dataFormat.bitsPerSample = SL_PCMSAMPLEFORMAT_FIXED_16;
  152. dataFormat.containerSize = SL_PCMSAMPLEFORMAT_FIXED_16;
  153. dataFormat.channelMask = (numChannels == 1) ? SL_SPEAKER_FRONT_CENTER :
  154. (SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT);
  155. dataFormat.endianness = SL_BYTEORDER_LITTLEENDIAN;
  156. dataFormat.representation = 0;
  157. }
  158. static void prepareCallbackBuffer (AudioBuffer<float>&, int16*) {}
  159. using LittleEndianInt16 = AudioData::Format<AudioData::Int16, AudioData::LittleEndian>;
  160. using NativeFloat32 = AudioData::Format<AudioData::Float32, AudioData::NativeEndian>;
  161. static void convertFromOpenSL (const int16* srcInterleaved, AudioBuffer<float>& audioBuffer)
  162. {
  163. const auto numChannels = audioBuffer.getNumChannels();
  164. AudioData::deinterleaveSamples (AudioData::InterleavedSource<LittleEndianInt16> { reinterpret_cast<const uint16*> (srcInterleaved), numChannels },
  165. AudioData::NonInterleavedDest<NativeFloat32> { audioBuffer.getArrayOfWritePointers(), numChannels },
  166. audioBuffer.getNumSamples());
  167. }
  168. static void convertToOpenSL (const AudioBuffer<float>& audioBuffer, int16* dstInterleaved)
  169. {
  170. const auto numChannels = audioBuffer.getNumChannels();
  171. AudioData::interleaveSamples (AudioData::NonInterleavedSource<NativeFloat32> { audioBuffer.getArrayOfReadPointers(), numChannels },
  172. AudioData::InterleavedDest<LittleEndianInt16> { reinterpret_cast<uint16*> (dstInterleaved), numChannels },
  173. audioBuffer.getNumSamples());
  174. }
  175. };
  176. template <>
  177. struct BufferHelpers<float>
  178. {
  179. enum { isFloatingPoint = 1 };
  180. static void initPCMDataFormat (PCMDataFormatEx& dataFormat, int numChannels, double sampleRate)
  181. {
  182. dataFormat.formatType = SL_ANDROID_DATAFORMAT_PCM_EX;
  183. dataFormat.numChannels = (SLuint32) numChannels;
  184. dataFormat.samplesPerSec = (SLuint32) (sampleRate * 1000);
  185. dataFormat.bitsPerSample = 32;
  186. dataFormat.containerSize = 32;
  187. dataFormat.channelMask = (numChannels == 1) ? SL_SPEAKER_FRONT_CENTER :
  188. (SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT);
  189. dataFormat.endianness = SL_BYTEORDER_LITTLEENDIAN;
  190. dataFormat.representation = SL_ANDROID_PCM_REPRESENTATION_FLOAT;
  191. }
  192. static void prepareCallbackBuffer (AudioBuffer<float>& audioBuffer, float* native)
  193. {
  194. if (audioBuffer.getNumChannels() == 1)
  195. audioBuffer.setDataToReferTo (&native, 1, audioBuffer.getNumSamples());
  196. }
  197. using LittleEndianFloat32 = AudioData::Format<AudioData::Float32, AudioData::LittleEndian>;
  198. using NativeFloat32 = AudioData::Format<AudioData::Float32, AudioData::NativeEndian>;
  199. static void convertFromOpenSL (const float* srcInterleaved, AudioBuffer<float>& audioBuffer)
  200. {
  201. const auto numChannels = audioBuffer.getNumChannels();
  202. if (numChannels == 1)
  203. {
  204. jassert (srcInterleaved == audioBuffer.getWritePointer (0));
  205. return;
  206. }
  207. AudioData::deinterleaveSamples (AudioData::InterleavedSource<LittleEndianFloat32> { srcInterleaved, numChannels },
  208. AudioData::NonInterleavedDest<NativeFloat32> { audioBuffer.getArrayOfWritePointers(), numChannels },
  209. audioBuffer.getNumSamples());
  210. }
  211. static void convertToOpenSL (const AudioBuffer<float>& audioBuffer, float* dstInterleaved)
  212. {
  213. const auto numChannels = audioBuffer.getNumChannels();
  214. if (numChannels == 1)
  215. {
  216. jassert (dstInterleaved == audioBuffer.getReadPointer (0));
  217. return;
  218. }
  219. AudioData::interleaveSamples (AudioData::NonInterleavedSource<NativeFloat32> { audioBuffer.getArrayOfReadPointers(), numChannels },
  220. AudioData::InterleavedDest<LittleEndianFloat32> { dstInterleaved, numChannels },
  221. audioBuffer.getNumSamples());
  222. }
  223. };
  224. //==============================================================================
  225. using CreateEngineFunc = SLresult (*) (SLObjectItf*, SLuint32, const SLEngineOption*,
  226. SLuint32, const SLInterfaceID*, const SLboolean*);
  227. struct OpenSLEngineHolder
  228. {
  229. OpenSLEngineHolder()
  230. {
  231. if (auto createEngine = (CreateEngineFunc) slLibrary.getFunction ("slCreateEngine"))
  232. {
  233. SLObjectItf obj = nullptr;
  234. auto err = createEngine (&obj, 0, nullptr, 0, nullptr, nullptr);
  235. if (err != SL_RESULT_SUCCESS || obj == nullptr || *obj == nullptr
  236. || (*obj)->Realize (obj, 0) != SL_RESULT_SUCCESS)
  237. {
  238. destroyObject (obj);
  239. }
  240. engine = SlRef<SLEngineItf_>::cast (SlObjectRef (obj));
  241. }
  242. }
  243. DynamicLibrary slLibrary { "libOpenSLES.so" };
  244. SlRef<SLEngineItf_> engine;
  245. };
  246. OpenSLEngineHolder& getEngineHolder()
  247. {
  248. static OpenSLEngineHolder holder;
  249. return holder;
  250. }
  251. //==============================================================================
  252. class SLRealtimeThread;
  253. //==============================================================================
  254. class OpenSLAudioIODevice : public AudioIODevice
  255. {
  256. public:
  257. //==============================================================================
  258. template <typename T>
  259. class OpenSLSessionT;
  260. //==============================================================================
  261. // CRTP
  262. template <typename T, class Child, typename RunnerObjectType>
  263. struct OpenSLQueueRunner
  264. {
  265. OpenSLQueueRunner (OpenSLSessionT<T>& sessionToUse, int numChannelsToUse)
  266. : owner (sessionToUse),
  267. numChannels (numChannelsToUse),
  268. nativeBuffer (static_cast<size_t> (numChannels * owner.bufferSize * owner.numBuffers)),
  269. scratchBuffer (numChannelsToUse, owner.bufferSize),
  270. sampleBuffer (scratchBuffer.getArrayOfWritePointers(), numChannelsToUse, owner.bufferSize)
  271. {}
  272. ~OpenSLQueueRunner()
  273. {
  274. if (config != nullptr && javaProxy != nullptr)
  275. {
  276. javaProxy.clear();
  277. (*config)->ReleaseJavaProxy (config, /*SL_ANDROID_JAVA_PROXY_ROUTING*/1);
  278. }
  279. }
  280. bool init()
  281. {
  282. runner = crtp().createPlayerOrRecorder();
  283. if (runner == nullptr)
  284. return false;
  285. const bool supportsJavaProxy = (getAndroidSDKVersion() >= 24);
  286. if (supportsJavaProxy)
  287. {
  288. // may return nullptr on some platforms - that's ok
  289. config = SlRef<SLAndroidConfigurationItf_>::cast (runner);
  290. if (config != nullptr)
  291. {
  292. jobject audioRoutingJni;
  293. auto status = (*config)->AcquireJavaProxy (config, /*SL_ANDROID_JAVA_PROXY_ROUTING*/1,
  294. &audioRoutingJni);
  295. if (status == SL_RESULT_SUCCESS && audioRoutingJni != nullptr)
  296. javaProxy = GlobalRef (LocalRef<jobject>(getEnv()->NewLocalRef (audioRoutingJni)));
  297. }
  298. }
  299. queue = SlRef<SLAndroidSimpleBufferQueueItf_>::cast (runner);
  300. if (queue == nullptr)
  301. return false;
  302. return ((*queue)->RegisterCallback (queue, staticFinished, this) == SL_RESULT_SUCCESS);
  303. }
  304. void clear()
  305. {
  306. nextBlock.set (0);
  307. numBlocksOut.set (0);
  308. zeromem (nativeBuffer.get(), static_cast<size_t> (owner.bufferSize * numChannels * owner.numBuffers) * sizeof (T));
  309. scratchBuffer.clear();
  310. (*queue)->Clear (queue);
  311. }
  312. void enqueueBuffer()
  313. {
  314. (*queue)->Enqueue (queue, getCurrentBuffer(), static_cast<SLuint32> (getBufferSizeInSamples() * sizeof (T)));
  315. ++numBlocksOut;
  316. }
  317. bool isBufferAvailable() const { return (numBlocksOut.get() < owner.numBuffers); }
  318. T* getNextBuffer() { nextBlock.set((nextBlock.get() + 1) % owner.numBuffers); return getCurrentBuffer(); }
  319. T* getCurrentBuffer() { return nativeBuffer.get() + (static_cast<size_t> (nextBlock.get()) * getBufferSizeInSamples()); }
  320. size_t getBufferSizeInSamples() const { return static_cast<size_t> (owner.bufferSize * numChannels); }
  321. void finished (SLAndroidSimpleBufferQueueItf)
  322. {
  323. --numBlocksOut;
  324. owner.doSomeWorkOnAudioThread();
  325. }
  326. static void staticFinished (SLAndroidSimpleBufferQueueItf caller, void *pContext)
  327. {
  328. reinterpret_cast<OpenSLQueueRunner*> (pContext)->finished (caller);
  329. }
  330. // get the "this" pointer for CRTP
  331. Child& crtp() { return * ((Child*) this); }
  332. const Child& crtp() const { return * ((Child*) this); }
  333. OpenSLSessionT<T>& owner;
  334. SlRef<RunnerObjectType> runner;
  335. SlRef<SLAndroidSimpleBufferQueueItf_> queue;
  336. SlRef<SLAndroidConfigurationItf_> config;
  337. GlobalRef javaProxy;
  338. int numChannels;
  339. HeapBlock<T> nativeBuffer;
  340. AudioBuffer<float> scratchBuffer, sampleBuffer;
  341. Atomic<int> nextBlock { 0 }, numBlocksOut { 0 };
  342. };
  343. //==============================================================================
  344. template <typename T>
  345. struct OpenSLQueueRunnerPlayer : OpenSLQueueRunner<T, OpenSLQueueRunnerPlayer<T>, SLPlayItf_>
  346. {
  347. using Base = OpenSLQueueRunner<T, OpenSLQueueRunnerPlayer<T>, SLPlayItf_>;
  348. OpenSLQueueRunnerPlayer (OpenSLSessionT<T>& sessionToUse, int numChannelsToUse)
  349. : Base (sessionToUse, numChannelsToUse)
  350. {}
  351. SlRef<SLPlayItf_> createPlayerOrRecorder()
  352. {
  353. SLDataLocator_AndroidSimpleBufferQueue queueLocator = { SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, static_cast<SLuint32> (Base::owner.numBuffers) };
  354. SLDataLocator_OutputMix outputMix = { SL_DATALOCATOR_OUTPUTMIX, Base::owner.outputMix };
  355. PCMDataFormatEx dataFormat;
  356. BufferHelpers<T>::initPCMDataFormat (dataFormat, Base::numChannels, Base::owner.sampleRate);
  357. SLDataSource source = { &queueLocator, &dataFormat };
  358. SLDataSink sink = { &outputMix, nullptr };
  359. SLInterfaceID queueInterfaces[] = { &IntfIID<SLAndroidSimpleBufferQueueItf_>::iid, &IntfIID<SLAndroidConfigurationItf_>::iid };
  360. SLboolean interfaceRequired[] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_FALSE};
  361. SLObjectItf obj = nullptr;
  362. auto& holder = getEngineHolder();
  363. if (auto e = *holder.engine)
  364. {
  365. auto status = e->CreateAudioPlayer (holder.engine, &obj, &source, &sink, 2,
  366. queueInterfaces, interfaceRequired);
  367. if (status != SL_RESULT_SUCCESS || obj == nullptr || (*obj)->Realize(obj, 0) != SL_RESULT_SUCCESS)
  368. {
  369. destroyObject (obj);
  370. return {};
  371. }
  372. }
  373. return SlRef<SLPlayItf_>::cast (SlObjectRef (obj));
  374. }
  375. void setState (bool running) { (*Base::runner)->SetPlayState (Base::runner, running ? SL_PLAYSTATE_PLAYING : SL_PLAYSTATE_STOPPED); }
  376. };
  377. template <typename T>
  378. struct OpenSLQueueRunnerRecorder : public OpenSLQueueRunner<T, OpenSLQueueRunnerRecorder<T>, SLRecordItf_>
  379. {
  380. using Base = OpenSLQueueRunner<T, OpenSLQueueRunnerRecorder<T>, SLRecordItf_>;
  381. OpenSLQueueRunnerRecorder (OpenSLSessionT<T>& sessionToUse, int numChannelsToUse)
  382. : Base (sessionToUse, numChannelsToUse)
  383. {}
  384. SlRef<SLRecordItf_> createPlayerOrRecorder()
  385. {
  386. SLDataLocator_IODevice ioDeviceLocator = { SL_DATALOCATOR_IODEVICE, SL_IODEVICE_AUDIOINPUT, SL_DEFAULTDEVICEID_AUDIOINPUT, nullptr };
  387. SLDataLocator_AndroidSimpleBufferQueue queueLocator = { SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, static_cast<SLuint32> (Base::owner.numBuffers) };
  388. PCMDataFormatEx dataFormat;
  389. BufferHelpers<T>::initPCMDataFormat (dataFormat, Base::numChannels, Base::owner.sampleRate);
  390. SLDataSource source = { &ioDeviceLocator, nullptr };
  391. SLDataSink sink = { &queueLocator, &dataFormat };
  392. SLInterfaceID queueInterfaces[] = { &IntfIID<SLAndroidSimpleBufferQueueItf_>::iid, &IntfIID<SLAndroidConfigurationItf_>::iid };
  393. SLboolean interfaceRequired[] = { SL_BOOLEAN_TRUE, SL_BOOLEAN_FALSE };
  394. SLObjectItf obj = nullptr;
  395. auto& holder = getEngineHolder();
  396. if (auto e = *holder.engine)
  397. {
  398. auto status = e->CreateAudioRecorder (holder.engine, &obj, &source, &sink, 2, queueInterfaces, interfaceRequired);
  399. if (status != SL_RESULT_SUCCESS || obj == nullptr || (*obj)->Realize (obj, 0) != SL_RESULT_SUCCESS)
  400. {
  401. destroyObject (obj);
  402. return {};
  403. }
  404. }
  405. return SlRef<SLRecordItf_>::cast (SlObjectRef (obj));
  406. }
  407. bool setAudioPreprocessingEnabled (bool shouldEnable)
  408. {
  409. if (Base::config != nullptr)
  410. {
  411. const bool supportsUnprocessed = (getAndroidSDKVersion() >= 25);
  412. const SLuint32 recordingPresetValue
  413. = (shouldEnable ? SL_ANDROID_RECORDING_PRESET_GENERIC
  414. : (supportsUnprocessed ? SL_ANDROID_RECORDING_PRESET_UNPROCESSED
  415. : SL_ANDROID_RECORDING_PRESET_VOICE_RECOGNITION));
  416. auto status = (*Base::config)->SetConfiguration (Base::config, SL_ANDROID_KEY_RECORDING_PRESET,
  417. &recordingPresetValue, sizeof (recordingPresetValue));
  418. return (status == SL_RESULT_SUCCESS);
  419. }
  420. return false;
  421. }
  422. void setState (bool running) { (*Base::runner)->SetRecordState (Base::runner, running ? SL_RECORDSTATE_RECORDING
  423. : SL_RECORDSTATE_STOPPED); }
  424. };
  425. //==============================================================================
  426. class OpenSLSession
  427. {
  428. public:
  429. OpenSLSession (int numInputChannels, int numOutputChannels,
  430. double samleRateToUse, int bufferSizeToUse,
  431. int numBuffersToUse)
  432. : inputChannels (numInputChannels), outputChannels (numOutputChannels),
  433. sampleRate (samleRateToUse), bufferSize (bufferSizeToUse), numBuffers (numBuffersToUse)
  434. {
  435. jassert (numInputChannels > 0 || numOutputChannels > 0);
  436. if (outputChannels > 0)
  437. {
  438. auto& holder = getEngineHolder();
  439. SLObjectItf obj = nullptr;
  440. auto err = (*holder.engine)->CreateOutputMix (holder.engine, &obj, 0, nullptr, nullptr);
  441. if (err != SL_RESULT_SUCCESS || obj == nullptr || *obj == nullptr
  442. || (*obj)->Realize (obj, 0) != SL_RESULT_SUCCESS)
  443. {
  444. destroyObject (obj);
  445. return;
  446. }
  447. outputMix = SlRef<SLOutputMixItf_>::cast (SlObjectRef (obj));
  448. }
  449. }
  450. virtual ~OpenSLSession() {}
  451. virtual bool openedOK() const { return (outputChannels == 0 || outputMix != nullptr); }
  452. virtual void start() { stop(); jassert (callback.get() != nullptr); running = true; }
  453. virtual void stop() { running = false; }
  454. virtual bool setAudioPreprocessingEnabled (bool shouldEnable) = 0;
  455. virtual bool supportsFloatingPoint() const noexcept = 0;
  456. virtual int getXRunCount() const noexcept = 0;
  457. void setCallback (AudioIODeviceCallback* callbackToUse)
  458. {
  459. if (! running)
  460. {
  461. callback.set (callbackToUse);
  462. return;
  463. }
  464. // don't set callback to null! stop the playback instead!
  465. jassert (callbackToUse != nullptr);
  466. // spin-lock until we can set the callback
  467. for (;;)
  468. {
  469. auto old = callback.get();
  470. if (old == callbackToUse)
  471. break;
  472. if (callback.compareAndSetBool (callbackToUse, old))
  473. break;
  474. Thread::sleep (1);
  475. }
  476. }
  477. void process (const float** inputChannelData, float** outputChannelData)
  478. {
  479. if (auto* cb = callback.exchange (nullptr))
  480. {
  481. cb->audioDeviceIOCallbackWithContext (inputChannelData, inputChannels, outputChannelData, outputChannels, bufferSize, {});
  482. callback.set (cb);
  483. }
  484. else
  485. {
  486. for (int i = 0; i < outputChannels; ++i)
  487. zeromem (outputChannelData[i], sizeof(float) * static_cast<size_t> (bufferSize));
  488. }
  489. }
  490. static OpenSLSession* create (int numInputChannels, int numOutputChannels,
  491. double samleRateToUse, int bufferSizeToUse,
  492. int numBuffersToUse);
  493. //==============================================================================
  494. int inputChannels, outputChannels;
  495. double sampleRate;
  496. int bufferSize, numBuffers;
  497. bool running = false, audioProcessingEnabled = true;
  498. SlRef<SLOutputMixItf_> outputMix;
  499. Atomic<AudioIODeviceCallback*> callback { nullptr };
  500. };
  501. template <typename T>
  502. class OpenSLSessionT : public OpenSLSession
  503. {
  504. public:
  505. OpenSLSessionT (int numInputChannels, int numOutputChannels,
  506. double samleRateToUse, int bufferSizeToUse,
  507. int numBuffersToUse)
  508. : OpenSLSession (numInputChannels, numOutputChannels,
  509. samleRateToUse, bufferSizeToUse, numBuffersToUse)
  510. {
  511. jassert (numInputChannels > 0 || numOutputChannels > 0);
  512. if (OpenSLSession::openedOK())
  513. {
  514. if (inputChannels > 0)
  515. {
  516. recorder.reset (new OpenSLQueueRunnerRecorder<T> (*this, inputChannels));
  517. if (! recorder->init())
  518. {
  519. recorder = nullptr;
  520. return;
  521. }
  522. }
  523. if (outputChannels > 0)
  524. {
  525. player.reset (new OpenSLQueueRunnerPlayer<T> (*this, outputChannels));
  526. if (! player->init())
  527. {
  528. player = nullptr;
  529. return;
  530. }
  531. const bool supportsUnderrunCount = (getAndroidSDKVersion() >= 24);
  532. getUnderrunCount = supportsUnderrunCount ? getEnv()->GetMethodID (AudioTrack, "getUnderrunCount", "()I") : nullptr;
  533. }
  534. }
  535. }
  536. bool openedOK() const override
  537. {
  538. return OpenSLSession::openedOK() && (inputChannels == 0 || recorder != nullptr)
  539. && (outputChannels == 0 || player != nullptr);
  540. }
  541. void start() override
  542. {
  543. OpenSLSession::start();
  544. guard.set (0);
  545. if (inputChannels > 0)
  546. recorder->clear();
  547. if (outputChannels > 0)
  548. player->clear();
  549. // first enqueue all buffers
  550. for (int i = 0; i < numBuffers; ++i)
  551. doSomeWorkOnAudioThread();
  552. if (inputChannels > 0)
  553. recorder->setState (true);
  554. if (outputChannels > 0)
  555. player->setState (true);
  556. }
  557. void stop() override
  558. {
  559. OpenSLSession::stop();
  560. while (! guard.compareAndSetBool (1, 0))
  561. Thread::sleep (1);
  562. if (inputChannels > 0)
  563. recorder->setState (false);
  564. if (outputChannels > 0)
  565. player->setState (false);
  566. guard.set (0);
  567. }
  568. bool setAudioPreprocessingEnabled (bool shouldEnable) override
  569. {
  570. if (shouldEnable != audioProcessingEnabled)
  571. {
  572. audioProcessingEnabled = shouldEnable;
  573. if (recorder != nullptr)
  574. return recorder->setAudioPreprocessingEnabled (audioProcessingEnabled);
  575. }
  576. return true;
  577. }
  578. int getXRunCount() const noexcept override
  579. {
  580. if (player != nullptr && player->javaProxy != nullptr && getUnderrunCount != nullptr)
  581. return getEnv()->CallIntMethod (player->javaProxy, getUnderrunCount);
  582. return -1;
  583. }
  584. bool supportsFloatingPoint() const noexcept override { return (BufferHelpers<T>::isFloatingPoint != 0); }
  585. void doSomeWorkOnAudioThread()
  586. {
  587. // only the player or the recorder should enter this section at any time
  588. if (guard.compareAndSetBool (1, 0))
  589. {
  590. // are there enough buffers available to process some audio
  591. if ((inputChannels == 0 || recorder->isBufferAvailable()) && (outputChannels == 0 || player->isBufferAvailable()))
  592. {
  593. T* recorderBuffer = (inputChannels > 0 ? recorder->getNextBuffer() : nullptr);
  594. T* playerBuffer = (outputChannels > 0 ? player->getNextBuffer() : nullptr);
  595. const float** inputChannelData = nullptr;
  596. float** outputChannelData = nullptr;
  597. if (recorderBuffer != nullptr)
  598. {
  599. BufferHelpers<T>::prepareCallbackBuffer (recorder->sampleBuffer, recorderBuffer);
  600. BufferHelpers<T>::convertFromOpenSL (recorderBuffer, recorder->sampleBuffer);
  601. inputChannelData = recorder->sampleBuffer.getArrayOfReadPointers();
  602. }
  603. if (playerBuffer != nullptr)
  604. {
  605. BufferHelpers<T>::prepareCallbackBuffer (player->sampleBuffer, playerBuffer);
  606. outputChannelData = player->sampleBuffer.getArrayOfWritePointers();
  607. }
  608. process (inputChannelData, outputChannelData);
  609. if (recorderBuffer != nullptr)
  610. recorder->enqueueBuffer();
  611. if (playerBuffer != nullptr)
  612. {
  613. BufferHelpers<T>::convertToOpenSL (player->sampleBuffer, playerBuffer);
  614. player->enqueueBuffer();
  615. }
  616. }
  617. guard.set (0);
  618. }
  619. }
  620. //==============================================================================
  621. std::unique_ptr<OpenSLQueueRunnerPlayer<T>> player;
  622. std::unique_ptr<OpenSLQueueRunnerRecorder<T>> recorder;
  623. Atomic<int> guard;
  624. jmethodID getUnderrunCount = nullptr;
  625. };
  626. //==============================================================================
  627. OpenSLAudioIODevice (const String& deviceName) : AudioIODevice (deviceName, openSLTypeName)
  628. {
  629. // OpenSL has piss-poor support for determining latency, so the only way I can find to
  630. // get a number for this is by asking the AudioTrack/AudioRecord classes..
  631. AndroidAudioIODevice javaDevice (deviceName);
  632. // this is a total guess about how to calculate the latency, but seems to vaguely agree
  633. // with the devices I've tested.. YMMV
  634. inputLatency = (javaDevice.minBufferSizeIn * 2) / 3;
  635. outputLatency = (javaDevice.minBufferSizeOut * 2) / 3;
  636. const int64 longestLatency = jmax (inputLatency, outputLatency);
  637. const int64 totalLatency = inputLatency + outputLatency;
  638. inputLatency = (int) ((longestLatency * inputLatency) / totalLatency) & ~15;
  639. outputLatency = (int) ((longestLatency * outputLatency) / totalLatency) & ~15;
  640. // You can only create this class if you are sure that your hardware supports OpenSL
  641. jassert (getEngineHolder().slLibrary.getNativeHandle() != nullptr);
  642. }
  643. ~OpenSLAudioIODevice() override
  644. {
  645. close();
  646. }
  647. bool openedOk() const { return session != nullptr; }
  648. StringArray getOutputChannelNames() override
  649. {
  650. StringArray s;
  651. s.add ("Left");
  652. s.add ("Right");
  653. return s;
  654. }
  655. StringArray getInputChannelNames() override
  656. {
  657. StringArray s;
  658. s.add ("Audio Input");
  659. return s;
  660. }
  661. Array<double> getAvailableSampleRates() override
  662. {
  663. // see https://developer.android.com/ndk/guides/audio/opensl-for-android.html
  664. static const double rates[] = { 8000.0, 11025.0, 12000.0, 16000.0,
  665. 22050.0, 24000.0, 32000.0, 44100.0, 48000.0 };
  666. Array<double> retval (rates, numElementsInArray (rates));
  667. // make sure the native sample rate is part of the list
  668. double native = AndroidHighPerformanceAudioHelpers::getNativeSampleRate();
  669. if (native != 0.0 && ! retval.contains (native))
  670. retval.add (native);
  671. return retval;
  672. }
  673. Array<int> getAvailableBufferSizes() override
  674. {
  675. return AndroidHighPerformanceAudioHelpers::getAvailableBufferSizes (AndroidHighPerformanceAudioHelpers::getNativeBufferSizeHint(),
  676. getAvailableSampleRates());
  677. }
  678. String open (const BigInteger& inputChannels,
  679. const BigInteger& outputChannels,
  680. double requestedSampleRate,
  681. int bufferSize) override
  682. {
  683. close();
  684. lastError.clear();
  685. sampleRate = (int) (requestedSampleRate > 0 ? requestedSampleRate : AndroidHighPerformanceAudioHelpers::getNativeSampleRate());
  686. auto preferredBufferSize = (bufferSize > 0) ? bufferSize : getDefaultBufferSize();
  687. audioBuffersToEnqueue = [this, preferredBufferSize]
  688. {
  689. using namespace AndroidHighPerformanceAudioHelpers;
  690. auto nativeBufferSize = getNativeBufferSizeHint();
  691. if (canUseHighPerformanceAudioPath (nativeBufferSize, preferredBufferSize, sampleRate))
  692. return preferredBufferSize / nativeBufferSize;
  693. return 1;
  694. }();
  695. actualBufferSize = preferredBufferSize / audioBuffersToEnqueue;
  696. jassert ((actualBufferSize * audioBuffersToEnqueue) == preferredBufferSize);
  697. activeOutputChans = outputChannels;
  698. activeOutputChans.setRange (2, activeOutputChans.getHighestBit(), false);
  699. auto numOutputChannels = activeOutputChans.countNumberOfSetBits();
  700. activeInputChans = inputChannels;
  701. activeInputChans.setRange (1, activeInputChans.getHighestBit(), false);
  702. auto numInputChannels = activeInputChans.countNumberOfSetBits();
  703. if (numInputChannels > 0 && (! RuntimePermissions::isGranted (RuntimePermissions::recordAudio)))
  704. {
  705. // If you hit this assert, you probably forgot to get RuntimePermissions::recordAudio
  706. // before trying to open an audio input device. This is not going to work!
  707. jassertfalse;
  708. lastError = "Error opening OpenSL input device: the app was not granted android.permission.RECORD_AUDIO";
  709. }
  710. session.reset (OpenSLSession::create (numInputChannels, numOutputChannels,
  711. sampleRate, actualBufferSize, audioBuffersToEnqueue));
  712. if (session != nullptr)
  713. {
  714. session->setAudioPreprocessingEnabled (audioProcessingEnabled);
  715. }
  716. else
  717. {
  718. if (numInputChannels > 0 && numOutputChannels > 0 && RuntimePermissions::isGranted (RuntimePermissions::recordAudio))
  719. {
  720. // New versions of the Android emulator do not seem to support audio input anymore on OS X
  721. activeInputChans = BigInteger(0);
  722. numInputChannels = 0;
  723. session.reset (OpenSLSession::create (numInputChannels, numOutputChannels,
  724. sampleRate, actualBufferSize, audioBuffersToEnqueue));
  725. }
  726. }
  727. DBG ("OpenSL: numInputChannels = " << numInputChannels
  728. << ", numOutputChannels = " << numOutputChannels
  729. << ", nativeBufferSize = " << AndroidHighPerformanceAudioHelpers::getNativeBufferSizeHint()
  730. << ", nativeSampleRate = " << AndroidHighPerformanceAudioHelpers::getNativeSampleRate()
  731. << ", actualBufferSize = " << actualBufferSize
  732. << ", audioBuffersToEnqueue = " << audioBuffersToEnqueue
  733. << ", sampleRate = " << sampleRate
  734. << ", supportsFloatingPoint = " << (session != nullptr && session->supportsFloatingPoint() ? "true" : "false"));
  735. if (session == nullptr)
  736. lastError = "Unknown error initializing opensl session";
  737. deviceOpen = (session != nullptr);
  738. return lastError;
  739. }
  740. void close() override
  741. {
  742. stop();
  743. session = nullptr;
  744. callback = nullptr;
  745. }
  746. int getOutputLatencyInSamples() override { return outputLatency; }
  747. int getInputLatencyInSamples() override { return inputLatency; }
  748. bool isOpen() override { return deviceOpen; }
  749. int getCurrentBufferSizeSamples() override { return actualBufferSize * audioBuffersToEnqueue; }
  750. int getCurrentBitDepth() override { return (session != nullptr && session->supportsFloatingPoint() ? 32 : 16); }
  751. BigInteger getActiveOutputChannels() const override { return activeOutputChans; }
  752. BigInteger getActiveInputChannels() const override { return activeInputChans; }
  753. String getLastError() override { return lastError; }
  754. bool isPlaying() override { return callback != nullptr; }
  755. int getXRunCount() const noexcept override { return (session != nullptr ? session->getXRunCount() : -1); }
  756. int getDefaultBufferSize() override
  757. {
  758. return AndroidHighPerformanceAudioHelpers::getDefaultBufferSize (AndroidHighPerformanceAudioHelpers::getNativeBufferSizeHint(),
  759. getCurrentSampleRate());
  760. }
  761. double getCurrentSampleRate() override
  762. {
  763. return (sampleRate == 0.0 ? AndroidHighPerformanceAudioHelpers::getNativeSampleRate() : sampleRate);
  764. }
  765. void start (AudioIODeviceCallback* newCallback) override
  766. {
  767. if (session != nullptr && callback != newCallback)
  768. {
  769. auto oldCallback = callback;
  770. if (newCallback != nullptr)
  771. newCallback->audioDeviceAboutToStart (this);
  772. if (oldCallback != nullptr)
  773. {
  774. // already running
  775. if (newCallback == nullptr)
  776. stop();
  777. else
  778. session->setCallback (newCallback);
  779. oldCallback->audioDeviceStopped();
  780. }
  781. else
  782. {
  783. jassert (newCallback != nullptr);
  784. // session hasn't started yet
  785. session->setCallback (newCallback);
  786. session->start();
  787. }
  788. callback = newCallback;
  789. }
  790. }
  791. void stop() override
  792. {
  793. if (session != nullptr && callback != nullptr)
  794. {
  795. callback = nullptr;
  796. session->stop();
  797. session->setCallback (nullptr);
  798. }
  799. }
  800. bool setAudioPreprocessingEnabled (bool shouldAudioProcessingBeEnabled) override
  801. {
  802. audioProcessingEnabled = shouldAudioProcessingBeEnabled;
  803. if (session != nullptr)
  804. session->setAudioPreprocessingEnabled (audioProcessingEnabled);
  805. return true;
  806. }
  807. static const char* const openSLTypeName;
  808. private:
  809. //==============================================================================
  810. friend class SLRealtimeThread;
  811. //==============================================================================
  812. int actualBufferSize = 0, sampleRate = 0, audioBuffersToEnqueue = 0;
  813. int inputLatency, outputLatency;
  814. bool deviceOpen = false, audioProcessingEnabled = true;
  815. String lastError;
  816. BigInteger activeOutputChans, activeInputChans;
  817. AudioIODeviceCallback* callback = nullptr;
  818. std::unique_ptr<OpenSLSession> session;
  819. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (OpenSLAudioIODevice)
  820. };
  821. OpenSLAudioIODevice::OpenSLSession* OpenSLAudioIODevice::OpenSLSession::create (int numInputChannels, int numOutputChannels,
  822. double samleRateToUse, int bufferSizeToUse,
  823. int numBuffersToUse)
  824. {
  825. std::unique_ptr<OpenSLSession> retval;
  826. auto sdkVersion = getAndroidSDKVersion();
  827. // SDK versions 21 and higher should natively support floating point...
  828. if (sdkVersion >= 21)
  829. {
  830. retval.reset (new OpenSLSessionT<float> (numInputChannels, numOutputChannels, samleRateToUse,
  831. bufferSizeToUse, numBuffersToUse));
  832. // ...however, some devices lie so re-try without floating point
  833. if (retval != nullptr && (! retval->openedOK()))
  834. retval = nullptr;
  835. }
  836. if (retval == nullptr)
  837. {
  838. retval.reset (new OpenSLSessionT<int16> (numInputChannels, numOutputChannels, samleRateToUse,
  839. bufferSizeToUse, numBuffersToUse));
  840. if (retval != nullptr && (! retval->openedOK()))
  841. retval = nullptr;
  842. }
  843. return retval.release();
  844. }
  845. //==============================================================================
  846. class OpenSLAudioDeviceType : public AudioIODeviceType
  847. {
  848. public:
  849. OpenSLAudioDeviceType() : AudioIODeviceType (OpenSLAudioIODevice::openSLTypeName) {}
  850. //==============================================================================
  851. void scanForDevices() override {}
  852. StringArray getDeviceNames (bool) const override { return StringArray (OpenSLAudioIODevice::openSLTypeName); }
  853. int getDefaultDeviceIndex (bool) const override { return 0; }
  854. int getIndexOfDevice (AudioIODevice* device, bool) const override { return device != nullptr ? 0 : -1; }
  855. bool hasSeparateInputsAndOutputs() const override { return false; }
  856. AudioIODevice* createDevice (const String& outputDeviceName,
  857. const String& inputDeviceName) override
  858. {
  859. std::unique_ptr<OpenSLAudioIODevice> dev;
  860. if (outputDeviceName.isNotEmpty() || inputDeviceName.isNotEmpty())
  861. dev.reset (new OpenSLAudioIODevice (outputDeviceName.isNotEmpty() ? outputDeviceName
  862. : inputDeviceName));
  863. return dev.release();
  864. }
  865. static bool isOpenSLAvailable()
  866. {
  867. DynamicLibrary library;
  868. return library.open ("libOpenSLES.so");
  869. }
  870. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (OpenSLAudioDeviceType)
  871. };
  872. const char* const OpenSLAudioIODevice::openSLTypeName = "Android OpenSL";
  873. //==============================================================================
  874. bool isOpenSLAvailable() { return OpenSLAudioDeviceType::isOpenSLAvailable(); }
  875. //==============================================================================
  876. class SLRealtimeThread
  877. {
  878. public:
  879. static constexpr int numBuffers = 4;
  880. SLRealtimeThread()
  881. {
  882. if (auto createEngine = (CreateEngineFunc) slLibrary.getFunction ("slCreateEngine"))
  883. {
  884. SLObjectItf obj = nullptr;
  885. auto err = createEngine (&obj, 0, nullptr, 0, nullptr, nullptr);
  886. if (err != SL_RESULT_SUCCESS || obj == nullptr || *obj == nullptr)
  887. return;
  888. if ((*obj)->Realize (obj, 0) != SL_RESULT_SUCCESS)
  889. {
  890. destroyObject (obj);
  891. return;
  892. }
  893. engine = SlRef<SLEngineItf_>::cast (SlObjectRef (obj));
  894. if (engine == nullptr)
  895. {
  896. destroyObject (obj);
  897. return;
  898. }
  899. obj = nullptr;
  900. err = (*engine)->CreateOutputMix (engine, &obj, 0, nullptr, nullptr);
  901. if (err != SL_RESULT_SUCCESS || obj == nullptr || (*obj)->Realize (obj, 0) != SL_RESULT_SUCCESS)
  902. {
  903. destroyObject (obj);
  904. return;
  905. }
  906. outputMix = SlRef<SLOutputMixItf_>::cast (SlObjectRef (obj));
  907. if (outputMix == nullptr)
  908. {
  909. destroyObject (obj);
  910. return;
  911. }
  912. SLDataLocator_AndroidSimpleBufferQueue queueLocator = {SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, static_cast<SLuint32> (numBuffers)};
  913. SLDataLocator_OutputMix outputMixLocator = {SL_DATALOCATOR_OUTPUTMIX, outputMix};
  914. PCMDataFormatEx dataFormat;
  915. BufferHelpers<int16>::initPCMDataFormat (dataFormat, 1, AndroidHighPerformanceAudioHelpers::getNativeSampleRate());
  916. SLDataSource source = { &queueLocator, &dataFormat };
  917. SLDataSink sink = { &outputMixLocator, nullptr };
  918. SLInterfaceID queueInterfaces[] = { &IntfIID<SLAndroidSimpleBufferQueueItf_>::iid };
  919. SLboolean trueFlag = SL_BOOLEAN_TRUE;
  920. obj = nullptr;
  921. err = (*engine)->CreateAudioPlayer (engine, &obj, &source, &sink, 1, queueInterfaces, &trueFlag);
  922. if (err != SL_RESULT_SUCCESS || obj == nullptr)
  923. return;
  924. if ((*obj)->Realize (obj, 0) != SL_RESULT_SUCCESS)
  925. {
  926. destroyObject (obj);
  927. return;
  928. }
  929. player = SlRef<SLPlayItf_>::cast (SlObjectRef (obj));
  930. if (player == nullptr)
  931. {
  932. destroyObject (obj);
  933. return;
  934. }
  935. queue = SlRef<SLAndroidSimpleBufferQueueItf_>::cast (player);
  936. if (queue == nullptr)
  937. return;
  938. if ((*queue)->RegisterCallback (queue, staticFinished, this) != SL_RESULT_SUCCESS)
  939. {
  940. queue = nullptr;
  941. return;
  942. }
  943. pthread_cond_init (&threadReady, nullptr);
  944. pthread_mutex_init (&threadReadyMutex, nullptr);
  945. }
  946. }
  947. bool isOk() const { return queue != nullptr; }
  948. pthread_t startThread (void* (*entry) (void*), void* userPtr)
  949. {
  950. memset (buffer.get(), 0, static_cast<size_t> (sizeof (int16) * static_cast<size_t> (bufferSize * numBuffers)));
  951. for (int i = 0; i < numBuffers; ++i)
  952. {
  953. int16* dst = buffer.get() + (bufferSize * i);
  954. (*queue)->Enqueue (queue, dst, static_cast<SLuint32> (static_cast<size_t> (bufferSize) * sizeof (int16)));
  955. }
  956. pthread_mutex_lock (&threadReadyMutex);
  957. threadEntryProc = entry;
  958. threadUserPtr = userPtr;
  959. (*player)->SetPlayState (player, SL_PLAYSTATE_PLAYING);
  960. pthread_cond_wait (&threadReady, &threadReadyMutex);
  961. pthread_mutex_unlock (&threadReadyMutex);
  962. return threadID;
  963. }
  964. void finished()
  965. {
  966. if (threadEntryProc != nullptr)
  967. {
  968. pthread_mutex_lock (&threadReadyMutex);
  969. threadID = pthread_self();
  970. pthread_cond_signal (&threadReady);
  971. pthread_mutex_unlock (&threadReadyMutex);
  972. threadEntryProc (threadUserPtr);
  973. threadEntryProc = nullptr;
  974. (*player)->SetPlayState (player, SL_PLAYSTATE_STOPPED);
  975. MessageManager::callAsync ([this]() { delete this; });
  976. }
  977. }
  978. private:
  979. //==============================================================================
  980. static void staticFinished (SLAndroidSimpleBufferQueueItf, void* context)
  981. {
  982. static_cast<SLRealtimeThread*> (context)->finished();
  983. }
  984. //==============================================================================
  985. DynamicLibrary slLibrary { "libOpenSLES.so" };
  986. SlRef<SLEngineItf_> engine;
  987. SlRef<SLOutputMixItf_> outputMix;
  988. SlRef<SLPlayItf_> player;
  989. SlRef<SLAndroidSimpleBufferQueueItf_> queue;
  990. int bufferSize = AndroidHighPerformanceAudioHelpers::getNativeBufferSizeHint();
  991. HeapBlock<int16> buffer { HeapBlock<int16> (static_cast<size_t> (1 * bufferSize * numBuffers)) };
  992. void* (*threadEntryProc) (void*) = nullptr;
  993. void* threadUserPtr = nullptr;
  994. pthread_cond_t threadReady;
  995. pthread_mutex_t threadReadyMutex;
  996. pthread_t threadID;
  997. };
  998. //==============================================================================
  999. pthread_t juce_createRealtimeAudioThread (void* (*entry) (void*), void* userPtr);
  1000. pthread_t juce_createRealtimeAudioThread (void* (*entry) (void*), void* userPtr)
  1001. {
  1002. auto thread = std::make_unique<SLRealtimeThread>();
  1003. if (! thread->isOk())
  1004. return {};
  1005. auto threadID = thread->startThread (entry, userPtr);
  1006. // the thread will de-allocate itself
  1007. thread.release();
  1008. return threadID;
  1009. }
  1010. } // namespace juce