Audio plugin host https://kx.studio/carla
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

477 lines
18KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2015 - ROLI Ltd.
  5. Permission is granted to use this software under the terms of either:
  6. a) the GPL v2 (or any later version)
  7. b) the Affero GPL v3
  8. Details of these licenses can be found at: www.gnu.org/licenses
  9. JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
  10. WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
  11. A PARTICULAR PURPOSE. See the GNU General Public License for more details.
  12. ------------------------------------------------------------------------------
  13. To release a closed-source product which uses JUCE, commercial licenses are
  14. available: visit www.juce.com for more information.
  15. ==============================================================================
  16. */
  17. //==============================================================================
  18. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  19. STATICMETHOD (getMinBufferSize, "getMinBufferSize", "(III)I") \
  20. STATICMETHOD (getNativeOutputSampleRate, "getNativeOutputSampleRate", "(I)I") \
  21. METHOD (constructor, "<init>", "(IIIIII)V") \
  22. METHOD (getState, "getState", "()I") \
  23. METHOD (play, "play", "()V") \
  24. METHOD (stop, "stop", "()V") \
  25. METHOD (release, "release", "()V") \
  26. METHOD (flush, "flush", "()V") \
  27. METHOD (write, "write", "([SII)I") \
  28. DECLARE_JNI_CLASS (AudioTrack, "android/media/AudioTrack");
  29. #undef JNI_CLASS_MEMBERS
  30. //==============================================================================
  31. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  32. STATICMETHOD (getMinBufferSize, "getMinBufferSize", "(III)I") \
  33. METHOD (constructor, "<init>", "(IIIII)V") \
  34. METHOD (getState, "getState", "()I") \
  35. METHOD (startRecording, "startRecording", "()V") \
  36. METHOD (stop, "stop", "()V") \
  37. METHOD (read, "read", "([SII)I") \
  38. METHOD (release, "release", "()V") \
  39. DECLARE_JNI_CLASS (AudioRecord, "android/media/AudioRecord");
  40. #undef JNI_CLASS_MEMBERS
  41. //==============================================================================
  42. enum
  43. {
  44. CHANNEL_OUT_STEREO = 12,
  45. CHANNEL_IN_STEREO = 12,
  46. CHANNEL_IN_MONO = 16,
  47. ENCODING_PCM_16BIT = 2,
  48. STREAM_MUSIC = 3,
  49. MODE_STREAM = 1,
  50. STATE_UNINITIALIZED = 0
  51. };
  52. const char* const javaAudioTypeName = "Android Audio";
  53. //==============================================================================
  54. class AndroidAudioIODevice : public AudioIODevice,
  55. public Thread
  56. {
  57. public:
  58. //==============================================================================
  59. AndroidAudioIODevice (const String& deviceName)
  60. : AudioIODevice (deviceName, javaAudioTypeName),
  61. Thread ("audio"),
  62. minBufferSizeOut (0), minBufferSizeIn (0), callback (0), sampleRate (0),
  63. numClientInputChannels (0), numDeviceInputChannels (0), numDeviceInputChannelsAvailable (2),
  64. numClientOutputChannels (0), numDeviceOutputChannels (0),
  65. actualBufferSize (0), isRunning (false),
  66. inputChannelBuffer (1, 1),
  67. outputChannelBuffer (1, 1)
  68. {
  69. JNIEnv* env = getEnv();
  70. sampleRate = env->CallStaticIntMethod (AudioTrack, AudioTrack.getNativeOutputSampleRate, MODE_STREAM);
  71. minBufferSizeOut = (int) env->CallStaticIntMethod (AudioTrack, AudioTrack.getMinBufferSize, sampleRate, CHANNEL_OUT_STEREO, ENCODING_PCM_16BIT);
  72. minBufferSizeIn = (int) env->CallStaticIntMethod (AudioRecord, AudioRecord.getMinBufferSize, sampleRate, CHANNEL_IN_STEREO, ENCODING_PCM_16BIT);
  73. if (minBufferSizeIn <= 0)
  74. {
  75. minBufferSizeIn = env->CallStaticIntMethod (AudioRecord, AudioRecord.getMinBufferSize, sampleRate, CHANNEL_IN_MONO, ENCODING_PCM_16BIT);
  76. if (minBufferSizeIn > 0)
  77. numDeviceInputChannelsAvailable = 1;
  78. else
  79. numDeviceInputChannelsAvailable = 0;
  80. }
  81. DBG ("Audio device - min buffers: " << minBufferSizeOut << ", " << minBufferSizeIn << "; "
  82. << sampleRate << " Hz; input chans: " << numDeviceInputChannelsAvailable);
  83. }
  84. ~AndroidAudioIODevice()
  85. {
  86. close();
  87. }
  88. StringArray getOutputChannelNames() override
  89. {
  90. StringArray s;
  91. s.add ("Left");
  92. s.add ("Right");
  93. return s;
  94. }
  95. StringArray getInputChannelNames() override
  96. {
  97. StringArray s;
  98. if (numDeviceInputChannelsAvailable == 2)
  99. {
  100. s.add ("Left");
  101. s.add ("Right");
  102. }
  103. else if (numDeviceInputChannelsAvailable == 1)
  104. {
  105. s.add ("Audio Input");
  106. }
  107. return s;
  108. }
  109. Array<double> getAvailableSampleRates() override
  110. {
  111. Array<double> r;
  112. r.add ((double) sampleRate);
  113. return r;
  114. }
  115. Array<int> getAvailableBufferSizes() override
  116. {
  117. Array<int> b;
  118. int n = 16;
  119. for (int i = 0; i < 50; ++i)
  120. {
  121. b.add (n);
  122. n += n < 64 ? 16
  123. : (n < 512 ? 32
  124. : (n < 1024 ? 64
  125. : (n < 2048 ? 128 : 256)));
  126. }
  127. return b;
  128. }
  129. int getDefaultBufferSize() override { return 2048; }
  130. String open (const BigInteger& inputChannels,
  131. const BigInteger& outputChannels,
  132. double requestedSampleRate,
  133. int bufferSize) override
  134. {
  135. close();
  136. if (sampleRate != (int) requestedSampleRate)
  137. return "Sample rate not allowed";
  138. lastError.clear();
  139. int preferredBufferSize = (bufferSize <= 0) ? getDefaultBufferSize() : bufferSize;
  140. numDeviceInputChannels = 0;
  141. numDeviceOutputChannels = 0;
  142. activeOutputChans = outputChannels;
  143. activeOutputChans.setRange (2, activeOutputChans.getHighestBit(), false);
  144. numClientOutputChannels = activeOutputChans.countNumberOfSetBits();
  145. activeInputChans = inputChannels;
  146. activeInputChans.setRange (2, activeInputChans.getHighestBit(), false);
  147. numClientInputChannels = activeInputChans.countNumberOfSetBits();
  148. actualBufferSize = preferredBufferSize;
  149. inputChannelBuffer.setSize (2, actualBufferSize);
  150. inputChannelBuffer.clear();
  151. outputChannelBuffer.setSize (2, actualBufferSize);
  152. outputChannelBuffer.clear();
  153. JNIEnv* env = getEnv();
  154. if (numClientOutputChannels > 0)
  155. {
  156. numDeviceOutputChannels = 2;
  157. outputDevice = GlobalRef (env->NewObject (AudioTrack, AudioTrack.constructor,
  158. STREAM_MUSIC, sampleRate, CHANNEL_OUT_STEREO, ENCODING_PCM_16BIT,
  159. (jint) (minBufferSizeOut * numDeviceOutputChannels * sizeof (int16)), MODE_STREAM));
  160. int outputDeviceState = env->CallIntMethod (outputDevice, AudioTrack.getState);
  161. if (outputDeviceState > 0)
  162. {
  163. isRunning = true;
  164. }
  165. else
  166. {
  167. // failed to open the device
  168. outputDevice.clear();
  169. lastError = "Error opening audio output device: android.media.AudioTrack failed with state = " + String (outputDeviceState);
  170. }
  171. }
  172. if (numClientInputChannels > 0 && numDeviceInputChannelsAvailable > 0)
  173. {
  174. if (! RuntimePermissions::isGranted (RuntimePermissions::recordAudio))
  175. {
  176. // If you hit this assert, you probably forgot to get RuntimePermissions::recordAudio
  177. // before trying to open an audio input device. This is not going to work!
  178. jassertfalse;
  179. inputDevice.clear();
  180. lastError = "Error opening audio input device: the app was not granted android.permission.RECORD_AUDIO";
  181. }
  182. else
  183. {
  184. numDeviceInputChannels = jmin (numClientInputChannels, numDeviceInputChannelsAvailable);
  185. inputDevice = GlobalRef (env->NewObject (AudioRecord, AudioRecord.constructor,
  186. 0 /* (default audio source) */, sampleRate,
  187. numDeviceInputChannelsAvailable > 1 ? CHANNEL_IN_STEREO : CHANNEL_IN_MONO,
  188. ENCODING_PCM_16BIT,
  189. (jint) (minBufferSizeIn * numDeviceInputChannels * sizeof (int16))));
  190. int inputDeviceState = env->CallIntMethod (inputDevice, AudioRecord.getState);
  191. if (inputDeviceState > 0)
  192. {
  193. isRunning = true;
  194. }
  195. else
  196. {
  197. // failed to open the device
  198. inputDevice.clear();
  199. lastError = "Error opening audio input device: android.media.AudioRecord failed with state = " + String (inputDeviceState);
  200. }
  201. }
  202. }
  203. if (isRunning)
  204. {
  205. if (outputDevice != nullptr)
  206. env->CallVoidMethod (outputDevice, AudioTrack.play);
  207. if (inputDevice != nullptr)
  208. env->CallVoidMethod (inputDevice, AudioRecord.startRecording);
  209. startThread (8);
  210. }
  211. else
  212. {
  213. closeDevices();
  214. }
  215. return lastError;
  216. }
  217. void close() override
  218. {
  219. if (isRunning)
  220. {
  221. stopThread (2000);
  222. isRunning = false;
  223. closeDevices();
  224. }
  225. }
  226. int getOutputLatencyInSamples() override { return (minBufferSizeOut * 3) / 4; }
  227. int getInputLatencyInSamples() override { return (minBufferSizeIn * 3) / 4; }
  228. bool isOpen() override { return isRunning; }
  229. int getCurrentBufferSizeSamples() override { return actualBufferSize; }
  230. int getCurrentBitDepth() override { return 16; }
  231. double getCurrentSampleRate() override { return sampleRate; }
  232. BigInteger getActiveOutputChannels() const override { return activeOutputChans; }
  233. BigInteger getActiveInputChannels() const override { return activeInputChans; }
  234. String getLastError() override { return lastError; }
  235. bool isPlaying() override { return isRunning && callback != 0; }
  236. void start (AudioIODeviceCallback* newCallback) override
  237. {
  238. if (isRunning && callback != newCallback)
  239. {
  240. if (newCallback != nullptr)
  241. newCallback->audioDeviceAboutToStart (this);
  242. const ScopedLock sl (callbackLock);
  243. callback = newCallback;
  244. }
  245. }
  246. void stop() override
  247. {
  248. if (isRunning)
  249. {
  250. AudioIODeviceCallback* lastCallback;
  251. {
  252. const ScopedLock sl (callbackLock);
  253. lastCallback = callback;
  254. callback = nullptr;
  255. }
  256. if (lastCallback != nullptr)
  257. lastCallback->audioDeviceStopped();
  258. }
  259. }
  260. void run() override
  261. {
  262. JNIEnv* env = getEnv();
  263. jshortArray audioBuffer = env->NewShortArray (actualBufferSize * jmax (numDeviceOutputChannels, numDeviceInputChannels));
  264. while (! threadShouldExit())
  265. {
  266. if (inputDevice != nullptr)
  267. {
  268. jint numRead = env->CallIntMethod (inputDevice, AudioRecord.read, audioBuffer, 0, actualBufferSize * numDeviceInputChannels);
  269. if (numRead < actualBufferSize * numDeviceInputChannels)
  270. {
  271. DBG ("Audio read under-run! " << numRead);
  272. }
  273. jshort* const src = env->GetShortArrayElements (audioBuffer, 0);
  274. for (int chan = 0; chan < inputChannelBuffer.getNumChannels(); ++chan)
  275. {
  276. AudioData::Pointer <AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::NonConst> d (inputChannelBuffer.getWritePointer (chan));
  277. if (chan < numDeviceInputChannels)
  278. {
  279. AudioData::Pointer <AudioData::Int16, AudioData::NativeEndian, AudioData::Interleaved, AudioData::Const> s (src + chan, numDeviceInputChannels);
  280. d.convertSamples (s, actualBufferSize);
  281. }
  282. else
  283. {
  284. d.clearSamples (actualBufferSize);
  285. }
  286. }
  287. env->ReleaseShortArrayElements (audioBuffer, src, 0);
  288. }
  289. if (threadShouldExit())
  290. break;
  291. {
  292. const ScopedLock sl (callbackLock);
  293. if (callback != nullptr)
  294. {
  295. callback->audioDeviceIOCallback (inputChannelBuffer.getArrayOfReadPointers(), numClientInputChannels,
  296. outputChannelBuffer.getArrayOfWritePointers(), numClientOutputChannels,
  297. actualBufferSize);
  298. }
  299. else
  300. {
  301. outputChannelBuffer.clear();
  302. }
  303. }
  304. if (outputDevice != nullptr)
  305. {
  306. if (threadShouldExit())
  307. break;
  308. jshort* const dest = env->GetShortArrayElements (audioBuffer, 0);
  309. for (int chan = 0; chan < numDeviceOutputChannels; ++chan)
  310. {
  311. AudioData::Pointer <AudioData::Int16, AudioData::NativeEndian, AudioData::Interleaved, AudioData::NonConst> d (dest + chan, numDeviceOutputChannels);
  312. const float* const sourceChanData = outputChannelBuffer.getReadPointer (jmin (chan, outputChannelBuffer.getNumChannels() - 1));
  313. AudioData::Pointer <AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::Const> s (sourceChanData);
  314. d.convertSamples (s, actualBufferSize);
  315. }
  316. env->ReleaseShortArrayElements (audioBuffer, dest, 0);
  317. jint numWritten = env->CallIntMethod (outputDevice, AudioTrack.write, audioBuffer, 0, actualBufferSize * numDeviceOutputChannels);
  318. if (numWritten < actualBufferSize * numDeviceOutputChannels)
  319. {
  320. DBG ("Audio write underrun! " << numWritten);
  321. }
  322. }
  323. }
  324. }
  325. int minBufferSizeOut, minBufferSizeIn;
  326. private:
  327. //==============================================================================
  328. CriticalSection callbackLock;
  329. AudioIODeviceCallback* callback;
  330. jint sampleRate;
  331. int numClientInputChannels, numDeviceInputChannels, numDeviceInputChannelsAvailable;
  332. int numClientOutputChannels, numDeviceOutputChannels;
  333. int actualBufferSize;
  334. bool isRunning;
  335. String lastError;
  336. BigInteger activeOutputChans, activeInputChans;
  337. GlobalRef outputDevice, inputDevice;
  338. AudioSampleBuffer inputChannelBuffer, outputChannelBuffer;
  339. void closeDevices()
  340. {
  341. if (outputDevice != nullptr)
  342. {
  343. outputDevice.callVoidMethod (AudioTrack.stop);
  344. outputDevice.callVoidMethod (AudioTrack.release);
  345. outputDevice.clear();
  346. }
  347. if (inputDevice != nullptr)
  348. {
  349. inputDevice.callVoidMethod (AudioRecord.stop);
  350. inputDevice.callVoidMethod (AudioRecord.release);
  351. inputDevice.clear();
  352. }
  353. }
  354. JUCE_DECLARE_NON_COPYABLE (AndroidAudioIODevice)
  355. };
  356. //==============================================================================
  357. class AndroidAudioIODeviceType : public AudioIODeviceType
  358. {
  359. public:
  360. AndroidAudioIODeviceType() : AudioIODeviceType (javaAudioTypeName) {}
  361. //==============================================================================
  362. void scanForDevices() {}
  363. StringArray getDeviceNames (bool wantInputNames) const { return StringArray (javaAudioTypeName); }
  364. int getDefaultDeviceIndex (bool forInput) const { return 0; }
  365. int getIndexOfDevice (AudioIODevice* device, bool asInput) const { return device != nullptr ? 0 : -1; }
  366. bool hasSeparateInputsAndOutputs() const { return false; }
  367. AudioIODevice* createDevice (const String& outputDeviceName,
  368. const String& inputDeviceName)
  369. {
  370. ScopedPointer<AndroidAudioIODevice> dev;
  371. if (outputDeviceName.isNotEmpty() || inputDeviceName.isNotEmpty())
  372. {
  373. dev = new AndroidAudioIODevice (outputDeviceName.isNotEmpty() ? outputDeviceName
  374. : inputDeviceName);
  375. if (dev->getCurrentSampleRate() <= 0 || dev->getDefaultBufferSize() <= 0)
  376. dev = nullptr;
  377. }
  378. return dev.release();
  379. }
  380. private:
  381. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (AndroidAudioIODeviceType)
  382. };
  383. //==============================================================================
  384. extern bool isOpenSLAvailable();
  385. AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_Android()
  386. {
  387. #if JUCE_USE_ANDROID_OPENSLES
  388. if (isOpenSLAvailable())
  389. return nullptr;
  390. #endif
  391. return new AndroidAudioIODeviceType();
  392. }