The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1101 lines
45KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2017 - ROLI Ltd.
  5. JUCE is an open source library subject to commercial or open-source
  6. licensing.
  7. The code included in this file is provided under the terms of the ISC license
  8. http://www.isc.org/downloads/software-support-policy/isc-license. Permission
  9. To use, copy, modify, and/or distribute this software for any purpose with or
  10. without fee is hereby granted provided that the above copyright notice and
  11. this permission notice appear in all copies.
  12. JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
  13. EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
  14. DISCLAIMED.
  15. ==============================================================================
  16. */
  17. //==============================================================================
  18. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  19. STATICFIELD (SDK_INT, "SDK_INT", "I") \
  20. DECLARE_JNI_CLASS (AndroidBuildVersion, "android/os/Build$VERSION");
  21. #undef JNI_CLASS_MEMBERS
  22. //==============================================================================
  23. #ifndef SL_ANDROID_DATAFORMAT_PCM_EX
  24. #define SL_ANDROID_DATAFORMAT_PCM_EX ((SLuint32) 0x00000004)
  25. #endif
  26. #ifndef SL_ANDROID_PCM_REPRESENTATION_FLOAT
  27. #define SL_ANDROID_PCM_REPRESENTATION_FLOAT ((SLuint32) 0x00000003)
  28. #endif
  29. #ifndef SL_ANDROID_RECORDING_PRESET_UNPROCESSED
  30. #define SL_ANDROID_RECORDING_PRESET_UNPROCESSED ((SLuint32) 0x00000005)
  31. #endif
  32. //==============================================================================
  33. struct PCMDataFormatEx : SLDataFormat_PCM
  34. {
  35. SLuint32 representation;
  36. };
  37. //==============================================================================
  38. template <typename T> struct IntfIID;
  39. template <> struct IntfIID<SLObjectItf_> { static SLInterfaceID_ iid; };
  40. template <> struct IntfIID<SLEngineItf_> { static SLInterfaceID_ iid; };
  41. template <> struct IntfIID<SLOutputMixItf_> { static SLInterfaceID_ iid; };
  42. template <> struct IntfIID<SLPlayItf_> { static SLInterfaceID_ iid; };
  43. template <> struct IntfIID<SLRecordItf_> { static SLInterfaceID_ iid; };
  44. template <> struct IntfIID<SLAndroidSimpleBufferQueueItf_> { static SLInterfaceID_ iid; };
  45. template <> struct IntfIID<SLAndroidConfigurationItf_> { static SLInterfaceID_ iid; };
  46. SLInterfaceID_ IntfIID<SLObjectItf_>::iid = { 0x79216360, 0xddd7, 0x11db, 0xac16, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b} };
  47. SLInterfaceID_ IntfIID<SLEngineItf_>::iid = { 0x8d97c260, 0xddd4, 0x11db, 0x958f, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b} };
  48. SLInterfaceID_ IntfIID<SLOutputMixItf_>::iid = { 0x97750f60, 0xddd7, 0x11db, 0x92b1, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b} };
  49. SLInterfaceID_ IntfIID<SLPlayItf_>::iid = { 0xef0bd9c0, 0xddd7, 0x11db, 0xbf49, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b} };
  50. SLInterfaceID_ IntfIID<SLRecordItf_>::iid = { 0xc5657aa0, 0xdddb, 0x11db, 0x82f7, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b} };
  51. SLInterfaceID_ IntfIID<SLAndroidSimpleBufferQueueItf_>::iid = { 0x198e4940, 0xc5d7, 0x11df, 0xa2a6, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b} };
  52. SLInterfaceID_ IntfIID<SLAndroidConfigurationItf_>::iid = { 0x89f6a7e0, 0xbeac, 0x11df, 0x8b5c, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b} };
  53. //==============================================================================
  54. // Some life-time and type management of OpenSL objects
  55. class SlObjectRef
  56. {
  57. public:
  58. //==============================================================================
  59. SlObjectRef() noexcept {}
  60. SlObjectRef (const SlObjectRef& obj) noexcept : cb (obj.cb) {}
  61. SlObjectRef (SlObjectRef&& obj) noexcept : cb (static_cast<ReferenceCountedObjectPtr<ControlBlock>&&> (obj.cb)) { obj.cb = nullptr; }
  62. explicit SlObjectRef (SLObjectItf o) : cb (new ControlBlock (o)) {}
  63. //==============================================================================
  64. SlObjectRef& operator=(const SlObjectRef& r) noexcept { cb = r.cb; return *this; }
  65. SlObjectRef& operator=(SlObjectRef&& r) noexcept { cb = static_cast<ReferenceCountedObjectPtr<ControlBlock>&&> (r.cb); r.cb = nullptr; return *this; }
  66. SlObjectRef& operator=(std::nullptr_t) noexcept { cb = nullptr; return *this; }
  67. //==============================================================================
  68. const SLObjectItf_* const operator*() noexcept { return *cb->ptr.get(); }
  69. SLObjectItf operator->() noexcept { return (cb == nullptr ? nullptr : cb->ptr.get()); }
  70. operator SLObjectItf() noexcept { return (cb == nullptr ? nullptr : cb->ptr.get()); }
  71. //==============================================================================
  72. bool operator== (nullptr_t) const noexcept { return (cb == nullptr || cb->ptr == nullptr); }
  73. bool operator!= (nullptr_t) const noexcept { return (cb != nullptr && cb->ptr != nullptr); }
  74. private:
  75. //==============================================================================
  76. struct ControlBlock : ReferenceCountedObject { ScopedPointer<const SLObjectItf_* const> ptr; ControlBlock() {} ControlBlock (SLObjectItf o) : ptr (o) {} };
  77. ReferenceCountedObjectPtr<ControlBlock> cb;
  78. };
  79. template <typename T>
  80. class SlRef : public SlObjectRef
  81. {
  82. public:
  83. //==============================================================================
  84. SlRef() noexcept : type (nullptr) {}
  85. SlRef (SlRef& r) noexcept : SlObjectRef (r), type (r.type) {}
  86. SlRef (SlRef&& r) noexcept : SlObjectRef (static_cast<SlRef&&> (r)), type (r.type) { r.type = nullptr; }
  87. //==============================================================================
  88. SlRef& operator= (const SlRef& r) noexcept { SlObjectRef::operator= (r); type = r.type; return *this; }
  89. SlRef& operator= (SlRef&& r) noexcept { SlObjectRef::operator= (static_cast<SlObjectRef&&> (r)); type = r.type; r.type = nullptr; return *this; }
  90. SlRef& operator= (std::nullptr_t) noexcept { SlObjectRef::operator= (nullptr); type = nullptr; return *this; }
  91. //==============================================================================
  92. T* const operator*() noexcept { return *type; }
  93. T* const * operator->() noexcept { return type; }
  94. operator T* const *() noexcept { return type; }
  95. //==============================================================================
  96. static SlRef cast (SlObjectRef& base) { return SlRef (base); }
  97. static SlRef cast (SlObjectRef&& base) { return SlRef (static_cast<SlObjectRef&&> (base)); }
  98. private:
  99. //==============================================================================
  100. SlRef (SlObjectRef& base) : SlObjectRef (base)
  101. {
  102. SLObjectItf obj = SlObjectRef::operator->();
  103. SLresult err = (*obj)->GetInterface (obj, &IntfIID<T>::iid, &type);
  104. if (type == nullptr || err != SL_RESULT_SUCCESS)
  105. *this = nullptr;
  106. }
  107. SlRef (SlObjectRef&& base) : SlObjectRef (static_cast<SlObjectRef&&> (base))
  108. {
  109. SLObjectItf obj = SlObjectRef::operator->();
  110. SLresult err = (*obj)->GetInterface (obj, &IntfIID<T>::iid, &type);
  111. base = nullptr;
  112. if (type == nullptr || err != SL_RESULT_SUCCESS)
  113. *this = nullptr;
  114. }
  115. T* const * type;
  116. };
  117. template <>
  118. struct ContainerDeletePolicy<const SLObjectItf_* const>
  119. {
  120. static void destroy (SLObjectItf object)
  121. {
  122. if (object != nullptr)
  123. (*object)->Destroy (object);
  124. }
  125. };
  126. //==============================================================================
  127. template <typename T> struct BufferHelpers {};
  128. template <>
  129. struct BufferHelpers<int16>
  130. {
  131. static void initPCMDataFormat (PCMDataFormatEx& dataFormat, int numChannels, double sampleRate)
  132. {
  133. dataFormat.formatType = SL_DATAFORMAT_PCM;
  134. dataFormat.numChannels = (SLuint32) numChannels;
  135. dataFormat.samplesPerSec = (SLuint32) (sampleRate * 1000);
  136. dataFormat.bitsPerSample = SL_PCMSAMPLEFORMAT_FIXED_16;
  137. dataFormat.containerSize = SL_PCMSAMPLEFORMAT_FIXED_16;
  138. dataFormat.channelMask = (numChannels == 1) ? SL_SPEAKER_FRONT_CENTER :
  139. (SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT);
  140. dataFormat.endianness = SL_BYTEORDER_LITTLEENDIAN;
  141. dataFormat.representation = 0;
  142. }
  143. static void prepareCallbackBuffer (AudioSampleBuffer&, int16*) {}
  144. static void convertFromOpenSL (const int16* srcInterleaved, AudioSampleBuffer& audioBuffer)
  145. {
  146. for (int i = 0; i < audioBuffer.getNumChannels(); ++i)
  147. {
  148. typedef AudioData::Pointer<AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::NonConst> DstSampleType;
  149. typedef AudioData::Pointer<AudioData::Int16, AudioData::LittleEndian, AudioData::Interleaved, AudioData::Const> SrcSampleType;
  150. DstSampleType dstData (audioBuffer.getWritePointer (i));
  151. SrcSampleType srcData (srcInterleaved + i, audioBuffer.getNumChannels());
  152. dstData.convertSamples (srcData, audioBuffer.getNumSamples());
  153. }
  154. }
  155. static void convertToOpenSL (const AudioSampleBuffer& audioBuffer, int16* dstInterleaved)
  156. {
  157. for (int i = 0; i < audioBuffer.getNumChannels(); ++i)
  158. {
  159. typedef AudioData::Pointer<AudioData::Int16, AudioData::LittleEndian, AudioData::Interleaved, AudioData::NonConst> DstSampleType;
  160. typedef AudioData::Pointer<AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::Const> SrcSampleType;
  161. DstSampleType dstData (dstInterleaved + i, audioBuffer.getNumChannels());
  162. SrcSampleType srcData (audioBuffer.getReadPointer (i));
  163. dstData.convertSamples (srcData, audioBuffer.getNumSamples());
  164. }
  165. }
  166. };
  167. template <>
  168. struct BufferHelpers<float>
  169. {
  170. static void initPCMDataFormat (PCMDataFormatEx& dataFormat, int numChannels, double sampleRate)
  171. {
  172. dataFormat.formatType = SL_ANDROID_DATAFORMAT_PCM_EX;
  173. dataFormat.numChannels = (SLuint32) numChannels;
  174. dataFormat.samplesPerSec = (SLuint32) (sampleRate * 1000);
  175. dataFormat.bitsPerSample = 32;
  176. dataFormat.containerSize = 32;
  177. dataFormat.channelMask = (numChannels == 1) ? SL_SPEAKER_FRONT_CENTER :
  178. (SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT);
  179. dataFormat.endianness = SL_BYTEORDER_LITTLEENDIAN;
  180. dataFormat.representation = SL_ANDROID_PCM_REPRESENTATION_FLOAT;
  181. }
  182. static void prepareCallbackBuffer (AudioSampleBuffer& audioBuffer, float* native)
  183. {
  184. if (audioBuffer.getNumChannels() == 1)
  185. audioBuffer.setDataToReferTo (&native, 1, audioBuffer.getNumSamples());
  186. }
  187. static void convertFromOpenSL (const float* srcInterleaved, AudioSampleBuffer& audioBuffer)
  188. {
  189. if (audioBuffer.getNumChannels() == 1)
  190. {
  191. jassert (srcInterleaved == audioBuffer.getWritePointer (0));
  192. return;
  193. }
  194. for (int i = 0; i < audioBuffer.getNumChannels(); ++i)
  195. {
  196. typedef AudioData::Pointer<AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::NonConst> DstSampleType;
  197. typedef AudioData::Pointer<AudioData::Float32, AudioData::LittleEndian, AudioData::Interleaved, AudioData::Const> SrcSampleType;
  198. DstSampleType dstData (audioBuffer.getWritePointer (i));
  199. SrcSampleType srcData (srcInterleaved + i, audioBuffer.getNumChannels());
  200. dstData.convertSamples (srcData, audioBuffer.getNumSamples());
  201. }
  202. }
  203. static void convertToOpenSL (const AudioSampleBuffer& audioBuffer, float* dstInterleaved)
  204. {
  205. if (audioBuffer.getNumChannels() == 1)
  206. {
  207. jassert (dstInterleaved == audioBuffer.getReadPointer (0));
  208. return;
  209. }
  210. for (int i = 0; i < audioBuffer.getNumChannels(); ++i)
  211. {
  212. typedef AudioData::Pointer<AudioData::Float32, AudioData::LittleEndian, AudioData::Interleaved, AudioData::NonConst> DstSampleType;
  213. typedef AudioData::Pointer<AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::Const> SrcSampleType;
  214. DstSampleType dstData (dstInterleaved + i, audioBuffer.getNumChannels());
  215. SrcSampleType srcData (audioBuffer.getReadPointer (i));
  216. dstData.convertSamples (srcData, audioBuffer.getNumSamples());
  217. }
  218. }
  219. };
  220. //==============================================================================
  221. // need this from juce_core without asserting
  222. struct JniEnvThreadHolder { static JNIEnv* getEnv(); };
  223. extern JavaVM* androidJNIJavaVM;
  224. //==============================================================================
  225. class OpenSLAudioIODevice : public AudioIODevice
  226. {
  227. public:
  228. //==============================================================================
  229. template <typename T>
  230. class OpenSLSessionT;
  231. //==============================================================================
  232. // CRTP
  233. template <typename T, class Child, typename RunnerObjectType>
  234. struct OpenSLQueueRunner
  235. {
  236. OpenSLQueueRunner (OpenSLSessionT<T>& sessionToUse, int numChannelsToUse)
  237. : owner (sessionToUse),
  238. numChannels (numChannelsToUse),
  239. nativeBuffer (static_cast<size_t> (numChannels * owner.bufferSize * owner.numBuffers)),
  240. scratchBuffer (numChannelsToUse, owner.bufferSize),
  241. sampleBuffer (scratchBuffer.getArrayOfWritePointers(), numChannelsToUse, owner.bufferSize),
  242. nextBlock (0), numBlocksOut (0)
  243. {}
  244. bool init()
  245. {
  246. runner = crtp().createPlayerOrRecorder();
  247. if (runner == nullptr)
  248. return false;
  249. queue = SlRef<SLAndroidSimpleBufferQueueItf_>::cast (runner);
  250. if (queue == nullptr)
  251. return false;
  252. return ((*queue)->RegisterCallback (queue, staticFinished, this) == SL_RESULT_SUCCESS);
  253. }
  254. void clear()
  255. {
  256. nextBlock.set (0);
  257. numBlocksOut.set (0);
  258. zeromem (nativeBuffer.getData(), static_cast<size_t> (owner.bufferSize * numChannels * owner.numBuffers) * sizeof (T));
  259. scratchBuffer.clear();
  260. (*queue)->Clear (queue);
  261. }
  262. void enqueueBuffer()
  263. {
  264. (*queue)->Enqueue (queue, getCurrentBuffer(), static_cast<SLuint32> (getBufferSizeInSamples() * sizeof (T)));
  265. ++numBlocksOut;
  266. }
  267. bool isBufferAvailable() const { return (numBlocksOut.get() < owner.numBuffers); }
  268. T* getNextBuffer() { nextBlock.set((nextBlock.get() + 1) % owner.numBuffers); return getCurrentBuffer(); }
  269. T* getCurrentBuffer() { return nativeBuffer.getData() + (static_cast<size_t> (nextBlock.get()) * getBufferSizeInSamples()); }
  270. size_t getBufferSizeInSamples() const { return static_cast<size_t> (owner.bufferSize * numChannels); }
  271. void finished (SLAndroidSimpleBufferQueueItf)
  272. {
  273. if (JniEnvThreadHolder::getEnv() == nullptr)
  274. {
  275. JNIEnv* env;
  276. androidJNIJavaVM->AttachCurrentThread (&env, nullptr);
  277. setEnv (env);
  278. }
  279. --numBlocksOut;
  280. owner.doSomeWorkOnAudioThread();
  281. }
  282. static void staticFinished (SLAndroidSimpleBufferQueueItf caller, void *pContext)
  283. {
  284. reinterpret_cast<OpenSLQueueRunner*> (pContext)->finished (caller);
  285. }
  286. // get the "this" pointer for CRTP
  287. Child& crtp() { return * ((Child*) this); }
  288. const Child& crtp() const { return * ((Child*) this); }
  289. OpenSLSessionT<T>& owner;
  290. SlRef<RunnerObjectType> runner;
  291. SlRef<SLAndroidSimpleBufferQueueItf_> queue;
  292. int numChannels;
  293. HeapBlock<T> nativeBuffer;
  294. AudioSampleBuffer scratchBuffer, sampleBuffer;
  295. Atomic<int> nextBlock, numBlocksOut;
  296. };
  297. //==============================================================================
  298. template <typename T>
  299. struct OpenSLQueueRunnerPlayer : OpenSLQueueRunner<T, OpenSLQueueRunnerPlayer<T>, SLPlayItf_>
  300. {
  301. typedef OpenSLQueueRunner<T, OpenSLQueueRunnerPlayer<T>, SLPlayItf_> Base;
  302. enum { isPlayer = 1 };
  303. OpenSLQueueRunnerPlayer (OpenSLSessionT<T>& sessionToUse, int numChannelsToUse)
  304. : Base (sessionToUse, numChannelsToUse)
  305. {}
  306. SlRef<SLPlayItf_> createPlayerOrRecorder()
  307. {
  308. SLDataLocator_AndroidSimpleBufferQueue queueLocator = {SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, static_cast<SLuint32> (Base::owner.numBuffers)};
  309. SLDataLocator_OutputMix outputMix = {SL_DATALOCATOR_OUTPUTMIX, Base::owner.outputMix};
  310. PCMDataFormatEx dataFormat;
  311. BufferHelpers<T>::initPCMDataFormat (dataFormat, Base::numChannels, Base::owner.sampleRate);
  312. SLDataSource source = {&queueLocator, &dataFormat};
  313. SLDataSink sink = {&outputMix, nullptr};
  314. SLInterfaceID queueInterfaces[] = { &IntfIID<SLAndroidSimpleBufferQueueItf_>::iid };
  315. SLboolean trueFlag = SL_BOOLEAN_TRUE;
  316. SLObjectItf obj = nullptr;
  317. SLresult status = (*Base::owner.engine)->CreateAudioPlayer (Base::owner.engine, &obj, &source, &sink, 1, queueInterfaces, &trueFlag);
  318. if (status != SL_RESULT_SUCCESS || obj == nullptr || (*obj)->Realize (obj, 0) != SL_RESULT_SUCCESS)
  319. {
  320. if (obj != nullptr)
  321. (*obj)->Destroy (obj);
  322. return SlRef<SLPlayItf_>();
  323. }
  324. return SlRef<SLPlayItf_>::cast (SlObjectRef (obj));
  325. }
  326. void setState (bool running) { (*Base::runner)->SetPlayState (Base::runner, running ? SL_PLAYSTATE_PLAYING : SL_PLAYSTATE_STOPPED); }
  327. };
  328. template <typename T>
  329. struct OpenSLQueueRunnerRecorder : OpenSLQueueRunner<T, OpenSLQueueRunnerRecorder<T>, SLRecordItf_>
  330. {
  331. typedef OpenSLQueueRunner<T, OpenSLQueueRunnerRecorder<T>, SLRecordItf_> Base;
  332. enum { isPlayer = 0 };
  333. OpenSLQueueRunnerRecorder (OpenSLSessionT<T>& sessionToUse, int numChannelsToUse)
  334. : Base (sessionToUse, numChannelsToUse)
  335. {}
  336. SlRef<SLRecordItf_> createPlayerOrRecorder()
  337. {
  338. SLDataLocator_IODevice ioDeviceLocator = {SL_DATALOCATOR_IODEVICE, SL_IODEVICE_AUDIOINPUT, SL_DEFAULTDEVICEID_AUDIOINPUT, nullptr};
  339. SLDataLocator_AndroidSimpleBufferQueue queueLocator = {SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, static_cast<SLuint32> (Base::owner.numBuffers)};
  340. PCMDataFormatEx dataFormat;
  341. BufferHelpers<T>::initPCMDataFormat (dataFormat, Base::numChannels, Base::owner.sampleRate);
  342. SLDataSource source = {&ioDeviceLocator, nullptr};
  343. SLDataSink sink = {&queueLocator, &dataFormat};
  344. SLInterfaceID queueInterfaces[] = { &IntfIID<SLAndroidSimpleBufferQueueItf_>::iid, &IntfIID<SLAndroidConfigurationItf_>::iid };
  345. SLboolean interfaceRequired[] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_FALSE};
  346. SLObjectItf obj = nullptr;
  347. SLresult status = (*Base::owner.engine)->CreateAudioRecorder (Base::owner.engine, &obj, &source, &sink, 2, queueInterfaces, interfaceRequired);
  348. if (status != SL_RESULT_SUCCESS || obj == nullptr || (*obj)->Realize (obj, 0) != SL_RESULT_SUCCESS)
  349. {
  350. if (obj != nullptr)
  351. (*obj)->Destroy (obj);
  352. return SlRef<SLRecordItf_>();
  353. }
  354. SlRef<SLRecordItf_> recorder = SlRef<SLRecordItf_>::cast (SlObjectRef (obj));
  355. // may return nullptr on some platforms - that's ok
  356. config = SlRef<SLAndroidConfigurationItf_>::cast (recorder);
  357. return recorder;
  358. }
  359. bool setAudioPreprocessingEnabled (bool shouldEnable)
  360. {
  361. if (config != nullptr)
  362. {
  363. const bool supportsUnprocessed = (getEnv()->GetStaticIntField (AndroidBuildVersion, AndroidBuildVersion.SDK_INT) >= 25);
  364. const SLuint32 recordingPresetValue
  365. = (shouldEnable ? SL_ANDROID_RECORDING_PRESET_GENERIC
  366. : (supportsUnprocessed ? SL_ANDROID_RECORDING_PRESET_UNPROCESSED
  367. : SL_ANDROID_RECORDING_PRESET_VOICE_RECOGNITION));
  368. SLresult status = (*config)->SetConfiguration (config, SL_ANDROID_KEY_RECORDING_PRESET,
  369. &recordingPresetValue, sizeof (recordingPresetValue));
  370. return (status == SL_RESULT_SUCCESS);
  371. }
  372. return false;
  373. }
  374. void setState (bool running) { (*Base::runner)->SetRecordState (Base::runner, running ? SL_RECORDSTATE_RECORDING : SL_RECORDSTATE_STOPPED); }
  375. SlRef<SLAndroidConfigurationItf_> config;
  376. };
  377. //==============================================================================
  378. class OpenSLSession
  379. {
  380. public:
  381. OpenSLSession (DynamicLibrary& slLibraryToUse,
  382. int numInputChannels, int numOutputChannels,
  383. double samleRateToUse, int bufferSizeToUse,
  384. int numBuffersToUse)
  385. : inputChannels (numInputChannels), outputChannels (numOutputChannels),
  386. sampleRate (samleRateToUse), bufferSize (bufferSizeToUse), numBuffers (numBuffersToUse),
  387. running (false), audioProcessingEnabled (true), callback (nullptr)
  388. {
  389. jassert (numInputChannels > 0 || numOutputChannels > 0);
  390. if (CreateEngineFunc createEngine = (CreateEngineFunc) slLibraryToUse.getFunction ("slCreateEngine"))
  391. {
  392. SLObjectItf obj = nullptr;
  393. SLresult err = createEngine (&obj, 0, nullptr, 0, nullptr, nullptr);
  394. if (err != SL_RESULT_SUCCESS || obj == nullptr || (*obj)->Realize (obj, 0) != SL_RESULT_SUCCESS)
  395. {
  396. if (obj != nullptr)
  397. (*obj)->Destroy (obj);
  398. return;
  399. }
  400. engine = SlRef<SLEngineItf_>::cast (SlObjectRef (obj));
  401. }
  402. if (outputChannels > 0)
  403. {
  404. SLObjectItf obj = nullptr;
  405. SLresult err = (*engine)->CreateOutputMix (engine, &obj, 0, nullptr, nullptr);
  406. if (err != SL_RESULT_SUCCESS || obj == nullptr || (*obj)->Realize (obj, 0) != SL_RESULT_SUCCESS)
  407. {
  408. if (obj != nullptr)
  409. (*obj)->Destroy (obj);
  410. return;
  411. }
  412. outputMix = SlRef<SLOutputMixItf_>::cast (SlObjectRef (obj));
  413. }
  414. }
  415. virtual ~OpenSLSession() {}
  416. virtual bool openedOK() const { return (engine != nullptr && (outputChannels == 0 || (outputMix != nullptr))); }
  417. virtual void start() { stop(); jassert (callback.get() != nullptr); running = true; }
  418. virtual void stop() { running = false; }
  419. virtual bool setAudioPreprocessingEnabled (bool shouldEnable) = 0;
  420. void setCallback (AudioIODeviceCallback* callbackToUse)
  421. {
  422. if (! running)
  423. {
  424. callback.set (callbackToUse);
  425. return;
  426. }
  427. // don't set callback to null! stop the playback instead!
  428. jassert (callbackToUse != nullptr);
  429. // spin-lock until we can set the callback
  430. while (true)
  431. {
  432. AudioIODeviceCallback* old = callback.get();
  433. if (old == callbackToUse)
  434. break;
  435. if (callback.compareAndSetBool (callbackToUse, old))
  436. break;
  437. Thread::sleep (1);
  438. }
  439. }
  440. void process (const float** inputChannelData, float** outputChannelData)
  441. {
  442. if (AudioIODeviceCallback* cb = callback.exchange(nullptr))
  443. {
  444. cb->audioDeviceIOCallback (inputChannelData, inputChannels, outputChannelData, outputChannels, bufferSize);
  445. callback.set (cb);
  446. }
  447. else
  448. {
  449. for (int i = 0; i < outputChannels; ++i)
  450. zeromem (outputChannelData[i], sizeof(float) * static_cast<size_t> (bufferSize));
  451. }
  452. }
  453. static OpenSLSession* create (DynamicLibrary& slLibrary,
  454. int numInputChannels, int numOutputChannels,
  455. double samleRateToUse, int bufferSizeToUse,
  456. int numBuffersToUse,
  457. bool floatingPointSupport);
  458. //==============================================================================
  459. typedef SLresult (*CreateEngineFunc)(SLObjectItf*,SLuint32,const SLEngineOption*,SLuint32,const SLInterfaceID*,const SLboolean*);
  460. //==============================================================================
  461. int inputChannels, outputChannels;
  462. double sampleRate;
  463. int bufferSize, numBuffers;
  464. bool running, audioProcessingEnabled;
  465. SlRef<SLEngineItf_> engine;
  466. SlRef<SLOutputMixItf_> outputMix;
  467. Atomic<AudioIODeviceCallback*> callback;
  468. };
  469. template <typename T>
  470. class OpenSLSessionT : public OpenSLSession
  471. {
  472. public:
  473. OpenSLSessionT (DynamicLibrary& slLibraryToUse,
  474. int numInputChannels, int numOutputChannels,
  475. double samleRateToUse, int bufferSizeToUse,
  476. int numBuffersToUse)
  477. : OpenSLSession (slLibraryToUse, numInputChannels, numOutputChannels, samleRateToUse, bufferSizeToUse, numBuffersToUse)
  478. {
  479. jassert (numInputChannels > 0 || numOutputChannels > 0);
  480. if (OpenSLSession::openedOK())
  481. {
  482. if (inputChannels > 0)
  483. {
  484. recorder = new OpenSLQueueRunnerRecorder<T>(*this, inputChannels);
  485. if (! recorder->init())
  486. {
  487. recorder = nullptr;
  488. return;
  489. }
  490. }
  491. if (outputChannels > 0)
  492. {
  493. player = new OpenSLQueueRunnerPlayer<T>(*this, outputChannels);
  494. if (! player->init())
  495. {
  496. player = nullptr;
  497. return;
  498. }
  499. }
  500. }
  501. }
  502. bool openedOK() const override
  503. {
  504. return (OpenSLSession::openedOK() && (inputChannels == 0 || recorder != nullptr)
  505. && (outputChannels == 0 || player != nullptr));
  506. }
  507. void start() override
  508. {
  509. OpenSLSession::start();
  510. guard.set (0);
  511. if (inputChannels > 0)
  512. recorder->clear();
  513. if (outputChannels > 0)
  514. player->clear();
  515. // first enqueue all buffers
  516. for (int i = 0; i < numBuffers; ++i)
  517. doSomeWorkOnAudioThread();
  518. if (inputChannels > 0)
  519. recorder->setState (true);
  520. if (outputChannels > 0)
  521. player->setState (true);
  522. }
  523. void stop() override
  524. {
  525. OpenSLSession::stop();
  526. if (inputChannels > 0)
  527. recorder->setState (false);
  528. if (outputChannels > 0)
  529. player->setState (false);
  530. }
  531. bool setAudioPreprocessingEnabled (bool shouldEnable) override
  532. {
  533. if (shouldEnable != audioProcessingEnabled)
  534. {
  535. audioProcessingEnabled = shouldEnable;
  536. if (recorder != nullptr)
  537. return recorder->setAudioPreprocessingEnabled (audioProcessingEnabled);
  538. }
  539. return true;
  540. }
  541. void doSomeWorkOnAudioThread()
  542. {
  543. // only the player or the recorder should enter this section at any time
  544. if (guard.compareAndSetBool (1, 0))
  545. {
  546. // are there enough buffers avaialable to process some audio
  547. if ((inputChannels == 0 || recorder->isBufferAvailable()) && (outputChannels == 0 || player->isBufferAvailable()))
  548. {
  549. T* recorderBuffer = (inputChannels > 0 ? recorder->getNextBuffer() : nullptr);
  550. T* playerBuffer = (outputChannels > 0 ? player->getNextBuffer() : nullptr);
  551. const float** inputChannelData = nullptr;
  552. float** outputChannelData = nullptr;
  553. if (recorderBuffer != nullptr)
  554. {
  555. BufferHelpers<T>::prepareCallbackBuffer (recorder->sampleBuffer, recorderBuffer);
  556. BufferHelpers<T>::convertFromOpenSL (recorderBuffer, recorder->sampleBuffer);
  557. inputChannelData = recorder->sampleBuffer.getArrayOfReadPointers();
  558. }
  559. if (playerBuffer != nullptr)
  560. {
  561. BufferHelpers<T>::prepareCallbackBuffer (player->sampleBuffer, playerBuffer);
  562. outputChannelData = player->sampleBuffer.getArrayOfWritePointers();
  563. }
  564. process (inputChannelData, outputChannelData);
  565. if (recorderBuffer != nullptr)
  566. recorder->enqueueBuffer();
  567. if (playerBuffer != nullptr)
  568. {
  569. BufferHelpers<T>::convertToOpenSL (player->sampleBuffer, playerBuffer);
  570. player->enqueueBuffer();
  571. }
  572. }
  573. guard.set (0);
  574. }
  575. }
  576. //==============================================================================
  577. ScopedPointer<OpenSLQueueRunnerPlayer<T> > player;
  578. ScopedPointer<OpenSLQueueRunnerRecorder<T> > recorder;
  579. Atomic<int> guard;
  580. };
  581. //==============================================================================
  582. OpenSLAudioIODevice (const String& deviceName)
  583. : AudioIODevice (deviceName, openSLTypeName),
  584. actualBufferSize (0), sampleRate (0),
  585. audioProcessingEnabled (true),
  586. callback (nullptr)
  587. {
  588. // OpenSL has piss-poor support for determining latency, so the only way I can find to
  589. // get a number for this is by asking the AudioTrack/AudioRecord classes..
  590. AndroidAudioIODevice javaDevice (deviceName);
  591. // this is a total guess about how to calculate the latency, but seems to vaguely agree
  592. // with the devices I've tested.. YMMV
  593. inputLatency = (javaDevice.minBufferSizeIn * 2) / 3;
  594. outputLatency = (javaDevice.minBufferSizeOut * 2) / 3;
  595. const int64 longestLatency = jmax (inputLatency, outputLatency);
  596. const int64 totalLatency = inputLatency + outputLatency;
  597. inputLatency = (int) ((longestLatency * inputLatency) / totalLatency) & ~15;
  598. outputLatency = (int) ((longestLatency * outputLatency) / totalLatency) & ~15;
  599. supportsFloatingPoint = getSupportsFloatingPoint();
  600. bool success = slLibrary.open ("libOpenSLES.so");
  601. // You can only create this class if you are sure that your hardware supports OpenSL
  602. jassert (success);
  603. ignoreUnused (success);
  604. }
  605. ~OpenSLAudioIODevice()
  606. {
  607. close();
  608. }
  609. bool openedOk() const { return session != nullptr; }
  610. StringArray getOutputChannelNames() override
  611. {
  612. StringArray s;
  613. s.add ("Left");
  614. s.add ("Right");
  615. return s;
  616. }
  617. StringArray getInputChannelNames() override
  618. {
  619. StringArray s;
  620. s.add ("Audio Input");
  621. return s;
  622. }
  623. Array<double> getAvailableSampleRates() override
  624. {
  625. //see https://developer.android.com/ndk/guides/audio/opensl-for-android.html
  626. static const double rates[] = { 8000.0, 11025.0, 12000.0, 16000.0,
  627. 22050.0, 24000.0, 32000.0, 44100.0, 48000.0 };
  628. Array<double> retval (rates, numElementsInArray (rates));
  629. // make sure the native sample rate is pafrt of the list
  630. double native = getNativeSampleRate();
  631. if (native != 0.0 && ! retval.contains (native))
  632. retval.add (native);
  633. return retval;
  634. }
  635. Array<int> getAvailableBufferSizes() override
  636. {
  637. // we need to offer the lowest possible buffer size which
  638. // is the native buffer size
  639. const int defaultNumMultiples = 8;
  640. const int nativeBufferSize = getNativeBufferSize();
  641. Array<int> retval;
  642. for (int i = 1; i < defaultNumMultiples; ++i)
  643. retval.add (i * nativeBufferSize);
  644. return retval;
  645. }
  646. String open (const BigInteger& inputChannels,
  647. const BigInteger& outputChannels,
  648. double requestedSampleRate,
  649. int bufferSize) override
  650. {
  651. close();
  652. lastError.clear();
  653. sampleRate = (int) requestedSampleRate;
  654. int preferredBufferSize = (bufferSize <= 0) ? getDefaultBufferSize() : bufferSize;
  655. activeOutputChans = outputChannels;
  656. activeOutputChans.setRange (2, activeOutputChans.getHighestBit(), false);
  657. int numOutputChannels = activeOutputChans.countNumberOfSetBits();
  658. activeInputChans = inputChannels;
  659. activeInputChans.setRange (1, activeInputChans.getHighestBit(), false);
  660. int numInputChannels = activeInputChans.countNumberOfSetBits();
  661. actualBufferSize = preferredBufferSize;
  662. const int audioBuffersToEnqueue = hasLowLatencyAudioPath() ? buffersToEnqueueForLowLatency
  663. : buffersToEnqueueSlowAudio;
  664. DBG ("OpenSL: numInputChannels = " << numInputChannels
  665. << ", numOutputChannels = " << numOutputChannels
  666. << ", nativeBufferSize = " << getNativeBufferSize()
  667. << ", nativeSampleRate = " << getNativeSampleRate()
  668. << ", actualBufferSize = " << actualBufferSize
  669. << ", audioBuffersToEnqueue = " << audioBuffersToEnqueue
  670. << ", sampleRate = " << sampleRate
  671. << ", supportsFloatingPoint = " << (supportsFloatingPoint ? "true" : "false"));
  672. if (numInputChannels > 0 && (! RuntimePermissions::isGranted (RuntimePermissions::recordAudio)))
  673. {
  674. // If you hit this assert, you probably forgot to get RuntimePermissions::recordAudio
  675. // before trying to open an audio input device. This is not going to work!
  676. jassertfalse;
  677. lastError = "Error opening OpenSL input device: the app was not granted android.permission.RECORD_AUDIO";
  678. }
  679. session = OpenSLSession::create (slLibrary, numInputChannels, numOutputChannels,
  680. sampleRate, actualBufferSize, audioBuffersToEnqueue,
  681. supportsFloatingPoint);
  682. if (session != nullptr)
  683. session->setAudioPreprocessingEnabled (audioProcessingEnabled);
  684. else
  685. {
  686. if (numInputChannels > 0 && numOutputChannels > 0 && RuntimePermissions::isGranted (RuntimePermissions::recordAudio))
  687. {
  688. // New versions of the Android emulator do not seem to support audio input anymore on OS X
  689. activeInputChans = BigInteger(0);
  690. numInputChannels = 0;
  691. session = OpenSLSession::create(slLibrary, numInputChannels, numOutputChannels,
  692. sampleRate, actualBufferSize, audioBuffersToEnqueue,
  693. supportsFloatingPoint);
  694. }
  695. }
  696. if (session == nullptr)
  697. lastError = "Unknown error initializing opensl session";
  698. deviceOpen = (session != nullptr);
  699. return lastError;
  700. }
  701. void close() override
  702. {
  703. stop();
  704. session = nullptr;
  705. callback = nullptr;
  706. }
  707. int getOutputLatencyInSamples() override { return outputLatency; }
  708. int getInputLatencyInSamples() override { return inputLatency; }
  709. bool isOpen() override { return deviceOpen; }
  710. int getCurrentBufferSizeSamples() override { return actualBufferSize; }
  711. int getCurrentBitDepth() override { return supportsFloatingPoint ? 32 : 16; }
  712. BigInteger getActiveOutputChannels() const override { return activeOutputChans; }
  713. BigInteger getActiveInputChannels() const override { return activeInputChans; }
  714. String getLastError() override { return lastError; }
  715. bool isPlaying() override { return callback != nullptr; }
  716. int getDefaultBufferSize() override
  717. {
  718. // Only on a Pro-Audio device will we set the lowest possible buffer size
  719. // by default. We need to be more conservative on other devices
  720. // as they may be low-latency, but still have a crappy CPU.
  721. return (isProAudioDevice() ? 1 : 6)
  722. * defaultBufferSizeIsMultipleOfNative * getNativeBufferSize();
  723. }
  724. double getCurrentSampleRate() override
  725. {
  726. return (sampleRate == 0.0 ? getNativeSampleRate() : sampleRate);
  727. }
  728. void start (AudioIODeviceCallback* newCallback) override
  729. {
  730. if (session != nullptr && callback != newCallback)
  731. {
  732. AudioIODeviceCallback* oldCallback = callback;
  733. if (newCallback != nullptr)
  734. newCallback->audioDeviceAboutToStart (this);
  735. if (oldCallback != nullptr)
  736. {
  737. // already running
  738. if (newCallback == nullptr)
  739. stop();
  740. else
  741. session->setCallback (newCallback);
  742. oldCallback->audioDeviceStopped();
  743. }
  744. else
  745. {
  746. jassert (newCallback != nullptr);
  747. // session hasn't started yet
  748. session->setCallback (newCallback);
  749. session->start();
  750. }
  751. callback = newCallback;
  752. }
  753. }
  754. void stop() override
  755. {
  756. if (session != nullptr && callback != nullptr)
  757. {
  758. callback = nullptr;
  759. session->stop();
  760. session->setCallback (nullptr);
  761. }
  762. }
  763. bool setAudioPreprocessingEnabled (bool shouldAudioProcessingBeEnabled) override
  764. {
  765. audioProcessingEnabled = shouldAudioProcessingBeEnabled;
  766. if (session != nullptr)
  767. session->setAudioPreprocessingEnabled (audioProcessingEnabled);
  768. return true;
  769. }
  770. static const char* const openSLTypeName;
  771. private:
  772. //==============================================================================
  773. DynamicLibrary slLibrary;
  774. int actualBufferSize, sampleRate;
  775. int inputLatency, outputLatency;
  776. bool deviceOpen, supportsFloatingPoint, audioProcessingEnabled;
  777. String lastError;
  778. BigInteger activeOutputChans, activeInputChans;
  779. AudioIODeviceCallback* callback;
  780. ScopedPointer<OpenSLSession> session;
  781. enum
  782. {
  783. // The number of buffers to enqueue needs to be at least two for the audio to use the low-latency
  784. // audio path (see "Performance" section in ndk/docs/Additional_library_docs/opensles/index.html)
  785. buffersToEnqueueForLowLatency = 4,
  786. buffersToEnqueueSlowAudio = 8,
  787. defaultBufferSizeIsMultipleOfNative = 1
  788. };
  789. //==============================================================================
  790. static String audioManagerGetProperty (const String& property)
  791. {
  792. const LocalRef<jstring> jProperty (javaString (property));
  793. const LocalRef<jstring> text ((jstring) android.activity.callObjectMethod (JuceAppActivity.audioManagerGetProperty,
  794. jProperty.get()));
  795. if (text.get() != 0)
  796. return juceString (text);
  797. return {};
  798. }
  799. static bool androidHasSystemFeature (const String& property)
  800. {
  801. const LocalRef<jstring> jProperty (javaString (property));
  802. return android.activity.callBooleanMethod (JuceAppActivity.hasSystemFeature, jProperty.get());
  803. }
  804. static double getNativeSampleRate()
  805. {
  806. return audioManagerGetProperty ("android.media.property.OUTPUT_SAMPLE_RATE").getDoubleValue();
  807. }
  808. static int getNativeBufferSize()
  809. {
  810. const int val = audioManagerGetProperty ("android.media.property.OUTPUT_FRAMES_PER_BUFFER").getIntValue();
  811. return val > 0 ? val : 512;
  812. }
  813. static bool isProAudioDevice()
  814. {
  815. return androidHasSystemFeature ("android.hardware.audio.pro");
  816. }
  817. static bool hasLowLatencyAudioPath()
  818. {
  819. return androidHasSystemFeature ("android.hardware.audio.low_latency");
  820. }
  821. static bool getSupportsFloatingPoint()
  822. {
  823. return (getEnv()->GetStaticIntField (AndroidBuildVersion, AndroidBuildVersion.SDK_INT) >= 21);
  824. }
  825. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (OpenSLAudioIODevice)
  826. };
  827. OpenSLAudioIODevice::OpenSLSession* OpenSLAudioIODevice::OpenSLSession::create (DynamicLibrary& slLibrary,
  828. int numInputChannels, int numOutputChannels,
  829. double samleRateToUse, int bufferSizeToUse,
  830. int numBuffersToUse,
  831. bool floatingPointSupport)
  832. {
  833. ScopedPointer<OpenSLSession> retval;
  834. if (floatingPointSupport)
  835. retval = new OpenSLSessionT<float> (slLibrary, numInputChannels, numOutputChannels, samleRateToUse,
  836. bufferSizeToUse, numBuffersToUse);
  837. else
  838. retval = new OpenSLSessionT<int16> (slLibrary, numInputChannels, numOutputChannels, samleRateToUse,
  839. bufferSizeToUse, numBuffersToUse);
  840. if (retval != nullptr && (! retval->openedOK()))
  841. retval = nullptr;
  842. return retval.release();
  843. }
  844. //==============================================================================
  845. class OpenSLAudioDeviceType : public AudioIODeviceType
  846. {
  847. public:
  848. OpenSLAudioDeviceType() : AudioIODeviceType (OpenSLAudioIODevice::openSLTypeName) {}
  849. //==============================================================================
  850. void scanForDevices() override {}
  851. StringArray getDeviceNames (bool) const override { return StringArray (OpenSLAudioIODevice::openSLTypeName); }
  852. int getDefaultDeviceIndex (bool) const override { return 0; }
  853. int getIndexOfDevice (AudioIODevice* device, bool) const override { return device != nullptr ? 0 : -1; }
  854. bool hasSeparateInputsAndOutputs() const override { return false; }
  855. AudioIODevice* createDevice (const String& outputDeviceName,
  856. const String& inputDeviceName) override
  857. {
  858. ScopedPointer<OpenSLAudioIODevice> dev;
  859. if (outputDeviceName.isNotEmpty() || inputDeviceName.isNotEmpty())
  860. dev = new OpenSLAudioIODevice (outputDeviceName.isNotEmpty() ? outputDeviceName
  861. : inputDeviceName);
  862. return dev.release();
  863. }
  864. static bool isOpenSLAvailable()
  865. {
  866. DynamicLibrary library;
  867. return library.open ("libOpenSLES.so");
  868. }
  869. private:
  870. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (OpenSLAudioDeviceType)
  871. };
  872. const char* const OpenSLAudioIODevice::openSLTypeName = "Android OpenSL";
  873. //==============================================================================
  874. bool isOpenSLAvailable() { return OpenSLAudioDeviceType::isOpenSLAvailable(); }
  875. AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_OpenSLES()
  876. {
  877. return isOpenSLAvailable() ? new OpenSLAudioDeviceType() : nullptr;
  878. }