Audio plugin host https://kx.studio/carla
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

774 lines
31KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2015 - ROLI Ltd.
  5. Permission is granted to use this software under the terms of either:
  6. a) the GPL v2 (or any later version)
  7. b) the Affero GPL v3
  8. Details of these licenses can be found at: www.gnu.org/licenses
  9. JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
  10. WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
  11. A PARTICULAR PURPOSE. See the GNU General Public License for more details.
  12. ------------------------------------------------------------------------------
  13. To release a closed-source product which uses JUCE, commercial licenses are
  14. available: visit www.juce.com for more information.
  15. ==============================================================================
  16. */
  17. #undef check
  18. const char* const openSLTypeName = "Android OpenSL";
  19. bool isOpenSLAvailable()
  20. {
  21. DynamicLibrary library;
  22. return library.open ("libOpenSLES.so");
  23. }
  24. //==============================================================================
  25. class OpenSLAudioIODevice : public AudioIODevice,
  26. private Thread
  27. {
  28. public:
  29. OpenSLAudioIODevice (const String& deviceName)
  30. : AudioIODevice (deviceName, openSLTypeName),
  31. Thread ("OpenSL"),
  32. callback (nullptr), sampleRate (0), deviceOpen (false),
  33. inputBuffer (2, 2), outputBuffer (2, 2)
  34. {
  35. // OpenSL has piss-poor support for determining latency, so the only way I can find to
  36. // get a number for this is by asking the AudioTrack/AudioRecord classes..
  37. AndroidAudioIODevice javaDevice (deviceName);
  38. // this is a total guess about how to calculate the latency, but seems to vaguely agree
  39. // with the devices I've tested.. YMMV
  40. inputLatency = (javaDevice.minBufferSizeIn * 2) / 3;
  41. outputLatency = (javaDevice.minBufferSizeOut * 2) / 3;
  42. const int64 longestLatency = jmax (inputLatency, outputLatency);
  43. const int64 totalLatency = inputLatency + outputLatency;
  44. inputLatency = (int) ((longestLatency * inputLatency) / totalLatency) & ~15;
  45. outputLatency = (int) ((longestLatency * outputLatency) / totalLatency) & ~15;
  46. }
  47. ~OpenSLAudioIODevice()
  48. {
  49. close();
  50. }
  51. bool openedOk() const { return engine.outputMixObject != nullptr; }
  52. StringArray getOutputChannelNames() override
  53. {
  54. StringArray s;
  55. s.add ("Left");
  56. s.add ("Right");
  57. return s;
  58. }
  59. StringArray getInputChannelNames() override
  60. {
  61. StringArray s;
  62. s.add ("Audio Input");
  63. return s;
  64. }
  65. Array<double> getAvailableSampleRates() override
  66. {
  67. static const double rates[] = { 8000.0, 16000.0, 32000.0, 44100.0, 48000.0 };
  68. Array<double> retval (rates, numElementsInArray (rates));
  69. // make sure the native sample rate is pafrt of the list
  70. double native = getNativeSampleRate();
  71. if (native != 0.0 && ! retval.contains (native))
  72. retval.add (native);
  73. return retval;
  74. }
  75. Array<int> getAvailableBufferSizes() override
  76. {
  77. // we need to offer the lowest possible buffer size which
  78. // is the native buffer size
  79. const int defaultNumMultiples = 8;
  80. const int nativeBufferSize = getNativeBufferSize();
  81. Array<int> retval;
  82. for (int i = 1; i < defaultNumMultiples; ++i)
  83. retval.add (i * nativeBufferSize);
  84. return retval;
  85. }
  86. String open (const BigInteger& inputChannels,
  87. const BigInteger& outputChannels,
  88. double requestedSampleRate,
  89. int bufferSize) override
  90. {
  91. close();
  92. lastError.clear();
  93. sampleRate = (int) requestedSampleRate;
  94. int preferredBufferSize = (bufferSize <= 0) ? getDefaultBufferSize() : bufferSize;
  95. activeOutputChans = outputChannels;
  96. activeOutputChans.setRange (2, activeOutputChans.getHighestBit(), false);
  97. numOutputChannels = activeOutputChans.countNumberOfSetBits();
  98. activeInputChans = inputChannels;
  99. activeInputChans.setRange (1, activeInputChans.getHighestBit(), false);
  100. numInputChannels = activeInputChans.countNumberOfSetBits();
  101. actualBufferSize = preferredBufferSize;
  102. inputBuffer.setSize (jmax (1, numInputChannels), actualBufferSize);
  103. outputBuffer.setSize (jmax (1, numOutputChannels), actualBufferSize);
  104. outputBuffer.clear();
  105. const int audioBuffersToEnqueue = hasLowLatencyAudioPath() ? buffersToEnqueueForLowLatency
  106. : buffersToEnqueueSlowAudio;
  107. DBG ("OpenSL: numInputChannels = " << numInputChannels
  108. << ", numOutputChannels = " << numOutputChannels
  109. << ", nativeBufferSize = " << getNativeBufferSize()
  110. << ", nativeSampleRate = " << getNativeSampleRate()
  111. << ", actualBufferSize = " << actualBufferSize
  112. << ", audioBuffersToEnqueue = " << audioBuffersToEnqueue
  113. << ", sampleRate = " << sampleRate);
  114. if (numInputChannels > 0)
  115. {
  116. if (! RuntimePermissions::isGranted (RuntimePermissions::recordAudio))
  117. {
  118. // If you hit this assert, you probably forgot to get RuntimePermissions::recordAudio
  119. // before trying to open an audio input device. This is not going to work!
  120. jassertfalse;
  121. lastError = "Error opening OpenSL input device: the app was not granted android.permission.RECORD_AUDIO";
  122. }
  123. else
  124. {
  125. recorder = engine.createRecorder (numInputChannels, sampleRate,
  126. audioBuffersToEnqueue, actualBufferSize);
  127. if (recorder == nullptr)
  128. lastError = "Error opening OpenSL input device: creating Recorder failed.";
  129. }
  130. }
  131. if (numOutputChannels > 0)
  132. {
  133. player = engine.createPlayer (numOutputChannels, sampleRate,
  134. audioBuffersToEnqueue, actualBufferSize);
  135. if (player == nullptr)
  136. lastError = "Error opening OpenSL input device: creating Player failed.";
  137. }
  138. // pre-fill buffers
  139. for (int i = 0; i < audioBuffersToEnqueue; ++i)
  140. processBuffers();
  141. startThread (8);
  142. deviceOpen = true;
  143. return lastError;
  144. }
  145. void close() override
  146. {
  147. stop();
  148. stopThread (6000);
  149. deviceOpen = false;
  150. recorder = nullptr;
  151. player = nullptr;
  152. }
  153. int getOutputLatencyInSamples() override { return outputLatency; }
  154. int getInputLatencyInSamples() override { return inputLatency; }
  155. bool isOpen() override { return deviceOpen; }
  156. int getCurrentBufferSizeSamples() override { return actualBufferSize; }
  157. int getCurrentBitDepth() override { return 16; }
  158. BigInteger getActiveOutputChannels() const override { return activeOutputChans; }
  159. BigInteger getActiveInputChannels() const override { return activeInputChans; }
  160. String getLastError() override { return lastError; }
  161. bool isPlaying() override { return callback != nullptr; }
  162. int getDefaultBufferSize() override
  163. {
  164. // Only on a Pro-Audio device will we set the lowest possible buffer size
  165. // by default. We need to be more conservative on other devices
  166. // as they may be low-latency, but still have a crappy CPU.
  167. return (isProAudioDevice() ? 1 : 6)
  168. * defaultBufferSizeIsMultipleOfNative * getNativeBufferSize();
  169. }
  170. double getCurrentSampleRate() override
  171. {
  172. return (sampleRate == 0.0 ? getNativeSampleRate() : sampleRate);
  173. }
  174. void start (AudioIODeviceCallback* newCallback) override
  175. {
  176. stop();
  177. if (deviceOpen && callback != newCallback)
  178. {
  179. if (newCallback != nullptr)
  180. newCallback->audioDeviceAboutToStart (this);
  181. setCallback (newCallback);
  182. }
  183. }
  184. void stop() override
  185. {
  186. if (AudioIODeviceCallback* const oldCallback = setCallback (nullptr))
  187. oldCallback->audioDeviceStopped();
  188. }
  189. bool setAudioPreprocessingEnabled (bool enable) override
  190. {
  191. return recorder != nullptr && recorder->setAudioPreprocessingEnabled (enable);
  192. }
  193. private:
  194. //==============================================================================
  195. CriticalSection callbackLock;
  196. AudioIODeviceCallback* callback;
  197. int actualBufferSize, sampleRate;
  198. int inputLatency, outputLatency;
  199. bool deviceOpen;
  200. String lastError;
  201. BigInteger activeOutputChans, activeInputChans;
  202. int numInputChannels, numOutputChannels;
  203. AudioSampleBuffer inputBuffer, outputBuffer;
  204. struct Player;
  205. struct Recorder;
  206. enum
  207. {
  208. // The number of buffers to enqueue needs to be at least two for the audio to use the low-latency
  209. // audio path (see "Performance" section in ndk/docs/Additional_library_docs/opensles/index.html)
  210. buffersToEnqueueForLowLatency = 2,
  211. buffersToEnqueueSlowAudio = 4,
  212. defaultBufferSizeIsMultipleOfNative = 1
  213. };
  214. //==============================================================================
  215. static String audioManagerGetProperty (const String& property)
  216. {
  217. const LocalRef<jstring> jProperty (javaString (property));
  218. const LocalRef<jstring> text ((jstring) android.activity.callObjectMethod (JuceAppActivity.audioManagerGetProperty,
  219. jProperty.get()));
  220. if (text.get() != 0)
  221. return juceString (text);
  222. return String();
  223. }
  224. static bool androidHasSystemFeature (const String& property)
  225. {
  226. const LocalRef<jstring> jProperty (javaString (property));
  227. return android.activity.callBooleanMethod (JuceAppActivity.hasSystemFeature, jProperty.get());
  228. }
  229. static double getNativeSampleRate()
  230. {
  231. return audioManagerGetProperty ("android.media.property.OUTPUT_SAMPLE_RATE").getDoubleValue();
  232. }
  233. static int getNativeBufferSize()
  234. {
  235. const int val = audioManagerGetProperty ("android.media.property.OUTPUT_FRAMES_PER_BUFFER").getIntValue();
  236. return val > 0 ? val : 512;
  237. }
  238. static bool isProAudioDevice()
  239. {
  240. return androidHasSystemFeature ("android.hardware.audio.pro");
  241. }
  242. static bool hasLowLatencyAudioPath()
  243. {
  244. return androidHasSystemFeature ("android.hardware.audio.low_latency");
  245. }
  246. //==============================================================================
  247. AudioIODeviceCallback* setCallback (AudioIODeviceCallback* const newCallback)
  248. {
  249. const ScopedLock sl (callbackLock);
  250. AudioIODeviceCallback* const oldCallback = callback;
  251. callback = newCallback;
  252. return oldCallback;
  253. }
  254. void processBuffers()
  255. {
  256. if (recorder != nullptr)
  257. recorder->readNextBlock (inputBuffer, *this);
  258. {
  259. const ScopedLock sl (callbackLock);
  260. if (callback != nullptr)
  261. callback->audioDeviceIOCallback (numInputChannels > 0 ? inputBuffer.getArrayOfReadPointers() : nullptr, numInputChannels,
  262. numOutputChannels > 0 ? outputBuffer.getArrayOfWritePointers() : nullptr, numOutputChannels,
  263. actualBufferSize);
  264. else
  265. outputBuffer.clear();
  266. }
  267. if (player != nullptr)
  268. player->writeBuffer (outputBuffer, *this);
  269. }
  270. void run() override
  271. {
  272. setThreadToAudioPriority ();
  273. if (recorder != nullptr) recorder->start();
  274. if (player != nullptr) player->start();
  275. while (! threadShouldExit())
  276. processBuffers();
  277. }
  278. void setThreadToAudioPriority ()
  279. {
  280. // see android.os.Process.THREAD_PRIORITY_AUDIO
  281. const int THREAD_PRIORITY_AUDIO = -16;
  282. jint priority = THREAD_PRIORITY_AUDIO;
  283. if (priority != android.activity.callIntMethod (JuceAppActivity.setCurrentThreadPriority, (jint) priority))
  284. DBG ("Unable to set audio thread priority: priority is still " << priority);
  285. }
  286. //==============================================================================
  287. struct Engine
  288. {
  289. Engine()
  290. : engineObject (nullptr), engineInterface (nullptr), outputMixObject (nullptr)
  291. {
  292. if (library.open ("libOpenSLES.so"))
  293. {
  294. typedef SLresult (*CreateEngineFunc) (SLObjectItf*, SLuint32, const SLEngineOption*,
  295. SLuint32, const SLInterfaceID*, const SLboolean*);
  296. if (CreateEngineFunc createEngine = (CreateEngineFunc) library.getFunction ("slCreateEngine"))
  297. {
  298. check (createEngine (&engineObject, 0, nullptr, 0, nullptr, nullptr));
  299. SLInterfaceID* SL_IID_ENGINE = (SLInterfaceID*) library.getFunction ("SL_IID_ENGINE");
  300. SL_IID_ANDROIDSIMPLEBUFFERQUEUE = (SLInterfaceID*) library.getFunction ("SL_IID_ANDROIDSIMPLEBUFFERQUEUE");
  301. SL_IID_PLAY = (SLInterfaceID*) library.getFunction ("SL_IID_PLAY");
  302. SL_IID_RECORD = (SLInterfaceID*) library.getFunction ("SL_IID_RECORD");
  303. SL_IID_ANDROIDCONFIGURATION = (SLInterfaceID*) library.getFunction ("SL_IID_ANDROIDCONFIGURATION");
  304. check ((*engineObject)->Realize (engineObject, SL_BOOLEAN_FALSE));
  305. check ((*engineObject)->GetInterface (engineObject, *SL_IID_ENGINE, &engineInterface));
  306. check ((*engineInterface)->CreateOutputMix (engineInterface, &outputMixObject, 0, nullptr, nullptr));
  307. check ((*outputMixObject)->Realize (outputMixObject, SL_BOOLEAN_FALSE));
  308. }
  309. }
  310. }
  311. ~Engine()
  312. {
  313. if (outputMixObject != nullptr) (*outputMixObject)->Destroy (outputMixObject);
  314. if (engineObject != nullptr) (*engineObject)->Destroy (engineObject);
  315. }
  316. Player* createPlayer (const int numChannels, const int sampleRate, const int numBuffers, const int bufferSize)
  317. {
  318. if (numChannels <= 0)
  319. return nullptr;
  320. ScopedPointer<Player> player (new Player (numChannels, sampleRate, *this, numBuffers, bufferSize));
  321. return player->openedOk() ? player.release() : nullptr;
  322. }
  323. Recorder* createRecorder (const int numChannels, const int sampleRate, const int numBuffers, const int bufferSize)
  324. {
  325. if (numChannels <= 0)
  326. return nullptr;
  327. ScopedPointer<Recorder> recorder (new Recorder (numChannels, sampleRate, *this, numBuffers, bufferSize));
  328. return recorder->openedOk() ? recorder.release() : nullptr;
  329. }
  330. SLObjectItf engineObject;
  331. SLEngineItf engineInterface;
  332. SLObjectItf outputMixObject;
  333. SLInterfaceID* SL_IID_ANDROIDSIMPLEBUFFERQUEUE;
  334. SLInterfaceID* SL_IID_PLAY;
  335. SLInterfaceID* SL_IID_RECORD;
  336. SLInterfaceID* SL_IID_ANDROIDCONFIGURATION;
  337. private:
  338. DynamicLibrary library;
  339. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (Engine)
  340. };
  341. //==============================================================================
  342. struct BufferList
  343. {
  344. BufferList (const int numChannels_, const int numBuffers_, const int numSamples_)
  345. : numChannels (numChannels_), numBuffers (numBuffers_), numSamples (numSamples_),
  346. bufferSpace (numChannels_ * numSamples * numBuffers), nextBlock (0)
  347. {
  348. }
  349. int16* waitForFreeBuffer (Thread& threadToCheck) noexcept
  350. {
  351. while (numBlocksOut.get() == numBuffers)
  352. {
  353. dataArrived.wait (1);
  354. if (threadToCheck.threadShouldExit())
  355. return nullptr;
  356. }
  357. return getNextBuffer();
  358. }
  359. int16* getNextBuffer() noexcept
  360. {
  361. if (++nextBlock == numBuffers)
  362. nextBlock = 0;
  363. return bufferSpace + nextBlock * numChannels * numSamples;
  364. }
  365. void bufferReturned() noexcept { --numBlocksOut; dataArrived.signal(); }
  366. void bufferSent() noexcept { ++numBlocksOut; dataArrived.signal(); }
  367. int getBufferSizeBytes() const noexcept { return numChannels * numSamples * sizeof (int16); }
  368. const int numChannels, numBuffers, numSamples;
  369. private:
  370. HeapBlock<int16> bufferSpace;
  371. int nextBlock;
  372. Atomic<int> numBlocksOut;
  373. WaitableEvent dataArrived;
  374. };
  375. //==============================================================================
  376. struct Player
  377. {
  378. Player (int numChannels, int sampleRate, Engine& engine, int playerNumBuffers, int playerBufferSize)
  379. : playerObject (nullptr), playerPlay (nullptr), playerBufferQueue (nullptr),
  380. bufferList (numChannels, playerNumBuffers, playerBufferSize)
  381. {
  382. SLDataFormat_PCM pcmFormat =
  383. {
  384. SL_DATAFORMAT_PCM,
  385. (SLuint32) numChannels,
  386. (SLuint32) (sampleRate * 1000),
  387. SL_PCMSAMPLEFORMAT_FIXED_16,
  388. SL_PCMSAMPLEFORMAT_FIXED_16,
  389. (numChannels == 1) ? SL_SPEAKER_FRONT_CENTER : (SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT),
  390. SL_BYTEORDER_LITTLEENDIAN
  391. };
  392. SLDataLocator_AndroidSimpleBufferQueue bufferQueue = { SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE,
  393. static_cast<SLuint32> (bufferList.numBuffers) };
  394. SLDataSource audioSrc = { &bufferQueue, &pcmFormat };
  395. SLDataLocator_OutputMix outputMix = { SL_DATALOCATOR_OUTPUTMIX, engine.outputMixObject };
  396. SLDataSink audioSink = { &outputMix, nullptr };
  397. // (SL_IID_BUFFERQUEUE is not guaranteed to remain future-proof, so use SL_IID_ANDROIDSIMPLEBUFFERQUEUE)
  398. const SLInterfaceID interfaceIDs[] = { *engine.SL_IID_ANDROIDSIMPLEBUFFERQUEUE };
  399. const SLboolean flags[] = { SL_BOOLEAN_TRUE };
  400. check ((*engine.engineInterface)->CreateAudioPlayer (engine.engineInterface, &playerObject, &audioSrc, &audioSink,
  401. 1, interfaceIDs, flags));
  402. check ((*playerObject)->Realize (playerObject, SL_BOOLEAN_FALSE));
  403. check ((*playerObject)->GetInterface (playerObject, *engine.SL_IID_PLAY, &playerPlay));
  404. check ((*playerObject)->GetInterface (playerObject, *engine.SL_IID_ANDROIDSIMPLEBUFFERQUEUE, &playerBufferQueue));
  405. check ((*playerBufferQueue)->RegisterCallback (playerBufferQueue, staticCallback, this));
  406. }
  407. ~Player()
  408. {
  409. if (playerPlay != nullptr)
  410. check ((*playerPlay)->SetPlayState (playerPlay, SL_PLAYSTATE_STOPPED));
  411. if (playerBufferQueue != nullptr)
  412. check ((*playerBufferQueue)->Clear (playerBufferQueue));
  413. if (playerObject != nullptr)
  414. (*playerObject)->Destroy (playerObject);
  415. }
  416. bool openedOk() const noexcept { return playerBufferQueue != nullptr; }
  417. void start()
  418. {
  419. jassert (openedOk());
  420. check ((*playerPlay)->SetPlayState (playerPlay, SL_PLAYSTATE_PLAYING));
  421. }
  422. void writeBuffer (const AudioSampleBuffer& buffer, Thread& thread) noexcept
  423. {
  424. jassert (buffer.getNumChannels() == bufferList.numChannels);
  425. jassert (buffer.getNumSamples() < bufferList.numSamples * bufferList.numBuffers);
  426. int offset = 0;
  427. int numSamples = buffer.getNumSamples();
  428. while (numSamples > 0)
  429. {
  430. if (int16* const destBuffer = bufferList.waitForFreeBuffer (thread))
  431. {
  432. for (int i = 0; i < bufferList.numChannels; ++i)
  433. {
  434. typedef AudioData::Pointer<AudioData::Int16, AudioData::LittleEndian, AudioData::Interleaved, AudioData::NonConst> DstSampleType;
  435. typedef AudioData::Pointer<AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::Const> SrcSampleType;
  436. DstSampleType dstData (destBuffer + i, bufferList.numChannels);
  437. SrcSampleType srcData (buffer.getReadPointer (i, offset));
  438. dstData.convertSamples (srcData, bufferList.numSamples);
  439. }
  440. enqueueBuffer (destBuffer);
  441. numSamples -= bufferList.numSamples;
  442. offset += bufferList.numSamples;
  443. }
  444. else
  445. {
  446. break;
  447. }
  448. }
  449. }
  450. private:
  451. SLObjectItf playerObject;
  452. SLPlayItf playerPlay;
  453. SLAndroidSimpleBufferQueueItf playerBufferQueue;
  454. BufferList bufferList;
  455. void enqueueBuffer (int16* buffer) noexcept
  456. {
  457. check ((*playerBufferQueue)->Enqueue (playerBufferQueue, buffer, bufferList.getBufferSizeBytes()));
  458. bufferList.bufferSent();
  459. }
  460. static void staticCallback (SLAndroidSimpleBufferQueueItf queue, void* context) noexcept
  461. {
  462. jassert (queue == static_cast<Player*> (context)->playerBufferQueue); ignoreUnused (queue);
  463. static_cast<Player*> (context)->bufferList.bufferReturned();
  464. }
  465. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (Player)
  466. };
  467. //==============================================================================
  468. struct Recorder
  469. {
  470. Recorder (int numChannels, int sampleRate, Engine& engine, const int numBuffers, const int numSamples)
  471. : recorderObject (nullptr), recorderRecord (nullptr),
  472. recorderBufferQueue (nullptr), configObject (nullptr),
  473. bufferList (numChannels, numBuffers, numSamples)
  474. {
  475. SLDataFormat_PCM pcmFormat =
  476. {
  477. SL_DATAFORMAT_PCM,
  478. (SLuint32) numChannels,
  479. (SLuint32) (sampleRate * 1000), // (sample rate units are millihertz)
  480. SL_PCMSAMPLEFORMAT_FIXED_16,
  481. SL_PCMSAMPLEFORMAT_FIXED_16,
  482. (numChannels == 1) ? SL_SPEAKER_FRONT_CENTER : (SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT),
  483. SL_BYTEORDER_LITTLEENDIAN
  484. };
  485. SLDataLocator_IODevice ioDevice = { SL_DATALOCATOR_IODEVICE, SL_IODEVICE_AUDIOINPUT, SL_DEFAULTDEVICEID_AUDIOINPUT, nullptr };
  486. SLDataSource audioSrc = { &ioDevice, nullptr };
  487. SLDataLocator_AndroidSimpleBufferQueue bufferQueue = { SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE,
  488. static_cast<SLuint32> (bufferList.numBuffers) };
  489. SLDataSink audioSink = { &bufferQueue, &pcmFormat };
  490. const SLInterfaceID interfaceIDs[] = { *engine.SL_IID_ANDROIDSIMPLEBUFFERQUEUE };
  491. const SLboolean flags[] = { SL_BOOLEAN_TRUE };
  492. if (check ((*engine.engineInterface)->CreateAudioRecorder (engine.engineInterface, &recorderObject, &audioSrc,
  493. &audioSink, 1, interfaceIDs, flags)))
  494. {
  495. if (check ((*recorderObject)->Realize (recorderObject, SL_BOOLEAN_FALSE)))
  496. {
  497. check ((*recorderObject)->GetInterface (recorderObject, *engine.SL_IID_RECORD, &recorderRecord));
  498. check ((*recorderObject)->GetInterface (recorderObject, *engine.SL_IID_ANDROIDSIMPLEBUFFERQUEUE, &recorderBufferQueue));
  499. // not all android versions seem to have a config object
  500. SLresult result = (*recorderObject)->GetInterface (recorderObject,
  501. *engine.SL_IID_ANDROIDCONFIGURATION, &configObject);
  502. if (result != SL_RESULT_SUCCESS)
  503. configObject = nullptr;
  504. check ((*recorderBufferQueue)->RegisterCallback (recorderBufferQueue, staticCallback, this));
  505. check ((*recorderRecord)->SetRecordState (recorderRecord, SL_RECORDSTATE_STOPPED));
  506. }
  507. }
  508. }
  509. ~Recorder()
  510. {
  511. if (recorderRecord != nullptr)
  512. check ((*recorderRecord)->SetRecordState (recorderRecord, SL_RECORDSTATE_STOPPED));
  513. if (recorderBufferQueue != nullptr)
  514. check ((*recorderBufferQueue)->Clear (recorderBufferQueue));
  515. if (recorderObject != nullptr)
  516. (*recorderObject)->Destroy (recorderObject);
  517. }
  518. bool openedOk() const noexcept { return recorderBufferQueue != nullptr; }
  519. void start()
  520. {
  521. jassert (openedOk());
  522. check ((*recorderRecord)->SetRecordState (recorderRecord, SL_RECORDSTATE_RECORDING));
  523. }
  524. void readNextBlock (AudioSampleBuffer& buffer, Thread& thread)
  525. {
  526. jassert (buffer.getNumChannels() == bufferList.numChannels);
  527. jassert (buffer.getNumSamples() < bufferList.numSamples * bufferList.numBuffers);
  528. jassert ((buffer.getNumSamples() % bufferList.numSamples) == 0);
  529. int offset = 0;
  530. int numSamples = buffer.getNumSamples();
  531. while (numSamples > 0)
  532. {
  533. if (int16* const srcBuffer = bufferList.waitForFreeBuffer (thread))
  534. {
  535. for (int i = 0; i < bufferList.numChannels; ++i)
  536. {
  537. typedef AudioData::Pointer<AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::NonConst> DstSampleType;
  538. typedef AudioData::Pointer<AudioData::Int16, AudioData::LittleEndian, AudioData::Interleaved, AudioData::Const> SrcSampleType;
  539. DstSampleType dstData (buffer.getWritePointer (i, offset));
  540. SrcSampleType srcData (srcBuffer + i, bufferList.numChannels);
  541. dstData.convertSamples (srcData, bufferList.numSamples);
  542. }
  543. enqueueBuffer (srcBuffer);
  544. numSamples -= bufferList.numSamples;
  545. offset += bufferList.numSamples;
  546. }
  547. else
  548. {
  549. break;
  550. }
  551. }
  552. }
  553. bool setAudioPreprocessingEnabled (bool enable)
  554. {
  555. SLuint32 mode = enable ? SL_ANDROID_RECORDING_PRESET_GENERIC
  556. : SL_ANDROID_RECORDING_PRESET_VOICE_RECOGNITION;
  557. return configObject != nullptr
  558. && check ((*configObject)->SetConfiguration (configObject, SL_ANDROID_KEY_RECORDING_PRESET, &mode, sizeof (mode)));
  559. }
  560. private:
  561. SLObjectItf recorderObject;
  562. SLRecordItf recorderRecord;
  563. SLAndroidSimpleBufferQueueItf recorderBufferQueue;
  564. SLAndroidConfigurationItf configObject;
  565. BufferList bufferList;
  566. void enqueueBuffer (int16* buffer) noexcept
  567. {
  568. check ((*recorderBufferQueue)->Enqueue (recorderBufferQueue, buffer, bufferList.getBufferSizeBytes()));
  569. bufferList.bufferSent();
  570. }
  571. static void staticCallback (SLAndroidSimpleBufferQueueItf queue, void* context) noexcept
  572. {
  573. jassert (queue == static_cast<Recorder*> (context)->recorderBufferQueue); ignoreUnused (queue);
  574. static_cast<Recorder*> (context)->bufferList.bufferReturned();
  575. }
  576. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (Recorder)
  577. };
  578. //==============================================================================
  579. Engine engine;
  580. ScopedPointer<Player> player;
  581. ScopedPointer<Recorder> recorder;
  582. //==============================================================================
  583. static bool check (const SLresult result) noexcept
  584. {
  585. jassert (result == SL_RESULT_SUCCESS);
  586. return result == SL_RESULT_SUCCESS;
  587. }
  588. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (OpenSLAudioIODevice)
  589. };
  590. //==============================================================================
  591. class OpenSLAudioDeviceType : public AudioIODeviceType
  592. {
  593. public:
  594. OpenSLAudioDeviceType() : AudioIODeviceType (openSLTypeName) {}
  595. //==============================================================================
  596. void scanForDevices() override {}
  597. StringArray getDeviceNames (bool wantInputNames) const override { return StringArray (openSLTypeName); }
  598. int getDefaultDeviceIndex (bool forInput) const override { return 0; }
  599. int getIndexOfDevice (AudioIODevice* device, bool asInput) const override { return device != nullptr ? 0 : -1; }
  600. bool hasSeparateInputsAndOutputs() const override { return false; }
  601. AudioIODevice* createDevice (const String& outputDeviceName,
  602. const String& inputDeviceName) override
  603. {
  604. ScopedPointer<OpenSLAudioIODevice> dev;
  605. if (outputDeviceName.isNotEmpty() || inputDeviceName.isNotEmpty())
  606. {
  607. dev = new OpenSLAudioIODevice (outputDeviceName.isNotEmpty() ? outputDeviceName
  608. : inputDeviceName);
  609. if (! dev->openedOk())
  610. dev = nullptr;
  611. }
  612. return dev.release();
  613. }
  614. private:
  615. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (OpenSLAudioDeviceType)
  616. };
  617. //==============================================================================
  618. AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_OpenSLES()
  619. {
  620. return isOpenSLAvailable() ? new OpenSLAudioDeviceType() : nullptr;
  621. }