Audio plugin host https://kx.studio/carla
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

451 lines
17KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2013 - Raw Material Software Ltd.
  5. Permission is granted to use this software under the terms of either:
  6. a) the GPL v2 (or any later version)
  7. b) the Affero GPL v3
  8. Details of these licenses can be found at: www.gnu.org/licenses
  9. JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
  10. WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
  11. A PARTICULAR PURPOSE. See the GNU General Public License for more details.
  12. ------------------------------------------------------------------------------
  13. To release a closed-source product which uses JUCE, commercial licenses are
  14. available: visit www.juce.com for more information.
  15. ==============================================================================
  16. */
  17. //==============================================================================
  18. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  19. STATICMETHOD (getMinBufferSize, "getMinBufferSize", "(III)I") \
  20. STATICMETHOD (getNativeOutputSampleRate, "getNativeOutputSampleRate", "(I)I") \
  21. METHOD (constructor, "<init>", "(IIIIII)V") \
  22. METHOD (getState, "getState", "()I") \
  23. METHOD (play, "play", "()V") \
  24. METHOD (stop, "stop", "()V") \
  25. METHOD (release, "release", "()V") \
  26. METHOD (flush, "flush", "()V") \
  27. METHOD (write, "write", "([SII)I") \
  28. DECLARE_JNI_CLASS (AudioTrack, "android/media/AudioTrack");
  29. #undef JNI_CLASS_MEMBERS
  30. //==============================================================================
  31. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  32. STATICMETHOD (getMinBufferSize, "getMinBufferSize", "(III)I") \
  33. METHOD (constructor, "<init>", "(IIIII)V") \
  34. METHOD (getState, "getState", "()I") \
  35. METHOD (startRecording, "startRecording", "()V") \
  36. METHOD (stop, "stop", "()V") \
  37. METHOD (read, "read", "([SII)I") \
  38. METHOD (release, "release", "()V") \
  39. DECLARE_JNI_CLASS (AudioRecord, "android/media/AudioRecord");
  40. #undef JNI_CLASS_MEMBERS
  41. //==============================================================================
  42. enum
  43. {
  44. CHANNEL_OUT_STEREO = 12,
  45. CHANNEL_IN_STEREO = 12,
  46. CHANNEL_IN_MONO = 16,
  47. ENCODING_PCM_16BIT = 2,
  48. STREAM_MUSIC = 3,
  49. MODE_STREAM = 1,
  50. STATE_UNINITIALIZED = 0
  51. };
  52. const char* const javaAudioTypeName = "Android Audio";
  53. //==============================================================================
  54. class AndroidAudioIODevice : public AudioIODevice,
  55. public Thread
  56. {
  57. public:
  58. //==============================================================================
  59. AndroidAudioIODevice (const String& deviceName)
  60. : AudioIODevice (deviceName, javaAudioTypeName),
  61. Thread ("audio"),
  62. minBufferSizeOut (0), minBufferSizeIn (0), callback (0), sampleRate (0),
  63. numClientInputChannels (0), numDeviceInputChannels (0), numDeviceInputChannelsAvailable (2),
  64. numClientOutputChannels (0), numDeviceOutputChannels (0),
  65. actualBufferSize (0), isRunning (false),
  66. inputChannelBuffer (1, 1),
  67. outputChannelBuffer (1, 1)
  68. {
  69. JNIEnv* env = getEnv();
  70. sampleRate = env->CallStaticIntMethod (AudioTrack, AudioTrack.getNativeOutputSampleRate, MODE_STREAM);
  71. minBufferSizeOut = (int) env->CallStaticIntMethod (AudioTrack, AudioTrack.getMinBufferSize, sampleRate, CHANNEL_OUT_STEREO, ENCODING_PCM_16BIT);
  72. minBufferSizeIn = (int) env->CallStaticIntMethod (AudioRecord, AudioRecord.getMinBufferSize, sampleRate, CHANNEL_IN_STEREO, ENCODING_PCM_16BIT);
  73. if (minBufferSizeIn <= 0)
  74. {
  75. minBufferSizeIn = env->CallStaticIntMethod (AudioRecord, AudioRecord.getMinBufferSize, sampleRate, CHANNEL_IN_MONO, ENCODING_PCM_16BIT);
  76. if (minBufferSizeIn > 0)
  77. numDeviceInputChannelsAvailable = 1;
  78. else
  79. numDeviceInputChannelsAvailable = 0;
  80. }
  81. DBG ("Audio device - min buffers: " << minBufferSizeOut << ", " << minBufferSizeIn << "; "
  82. << sampleRate << " Hz; input chans: " << numDeviceInputChannelsAvailable);
  83. }
  84. ~AndroidAudioIODevice()
  85. {
  86. close();
  87. }
  88. StringArray getOutputChannelNames() override
  89. {
  90. StringArray s;
  91. s.add ("Left");
  92. s.add ("Right");
  93. return s;
  94. }
  95. StringArray getInputChannelNames() override
  96. {
  97. StringArray s;
  98. if (numDeviceInputChannelsAvailable == 2)
  99. {
  100. s.add ("Left");
  101. s.add ("Right");
  102. }
  103. else if (numDeviceInputChannelsAvailable == 1)
  104. {
  105. s.add ("Audio Input");
  106. }
  107. return s;
  108. }
  109. Array<double> getAvailableSampleRates() override
  110. {
  111. Array<double> r;
  112. r.add ((double) sampleRate);
  113. return r;
  114. }
  115. Array<int> getAvailableBufferSizes() override
  116. {
  117. Array<int> b;
  118. int n = 16;
  119. for (int i = 0; i < 50; ++i)
  120. {
  121. b.add (n);
  122. n += n < 64 ? 16
  123. : (n < 512 ? 32
  124. : (n < 1024 ? 64
  125. : (n < 2048 ? 128 : 256)));
  126. }
  127. return b;
  128. }
  129. int getDefaultBufferSize() override { return 2048; }
  130. String open (const BigInteger& inputChannels,
  131. const BigInteger& outputChannels,
  132. double requestedSampleRate,
  133. int bufferSize) override
  134. {
  135. close();
  136. if (sampleRate != (int) requestedSampleRate)
  137. return "Sample rate not allowed";
  138. lastError.clear();
  139. int preferredBufferSize = (bufferSize <= 0) ? getDefaultBufferSize() : bufferSize;
  140. numDeviceInputChannels = 0;
  141. numDeviceOutputChannels = 0;
  142. activeOutputChans = outputChannels;
  143. activeOutputChans.setRange (2, activeOutputChans.getHighestBit(), false);
  144. numClientOutputChannels = activeOutputChans.countNumberOfSetBits();
  145. activeInputChans = inputChannels;
  146. activeInputChans.setRange (2, activeInputChans.getHighestBit(), false);
  147. numClientInputChannels = activeInputChans.countNumberOfSetBits();
  148. actualBufferSize = preferredBufferSize;
  149. inputChannelBuffer.setSize (2, actualBufferSize);
  150. inputChannelBuffer.clear();
  151. outputChannelBuffer.setSize (2, actualBufferSize);
  152. outputChannelBuffer.clear();
  153. JNIEnv* env = getEnv();
  154. if (numClientOutputChannels > 0)
  155. {
  156. numDeviceOutputChannels = 2;
  157. outputDevice = GlobalRef (env->NewObject (AudioTrack, AudioTrack.constructor,
  158. STREAM_MUSIC, sampleRate, CHANNEL_OUT_STEREO, ENCODING_PCM_16BIT,
  159. (jint) (minBufferSizeOut * numDeviceOutputChannels * sizeof (int16)), MODE_STREAM));
  160. if (env->CallIntMethod (outputDevice, AudioTrack.getState) != STATE_UNINITIALIZED)
  161. isRunning = true;
  162. else
  163. outputDevice.clear(); // failed to open the device
  164. }
  165. if (numClientInputChannels > 0 && numDeviceInputChannelsAvailable > 0)
  166. {
  167. numDeviceInputChannels = jmin (numClientInputChannels, numDeviceInputChannelsAvailable);
  168. inputDevice = GlobalRef (env->NewObject (AudioRecord, AudioRecord.constructor,
  169. 0 /* (default audio source) */, sampleRate,
  170. numDeviceInputChannelsAvailable > 1 ? CHANNEL_IN_STEREO : CHANNEL_IN_MONO,
  171. ENCODING_PCM_16BIT,
  172. (jint) (minBufferSizeIn * numDeviceInputChannels * sizeof (int16))));
  173. if (env->CallIntMethod (inputDevice, AudioRecord.getState) != STATE_UNINITIALIZED)
  174. isRunning = true;
  175. else
  176. inputDevice.clear(); // failed to open the device
  177. }
  178. if (isRunning)
  179. {
  180. if (outputDevice != nullptr)
  181. env->CallVoidMethod (outputDevice, AudioTrack.play);
  182. if (inputDevice != nullptr)
  183. env->CallVoidMethod (inputDevice, AudioRecord.startRecording);
  184. startThread (8);
  185. }
  186. else
  187. {
  188. closeDevices();
  189. }
  190. return lastError;
  191. }
  192. void close() override
  193. {
  194. if (isRunning)
  195. {
  196. stopThread (2000);
  197. isRunning = false;
  198. closeDevices();
  199. }
  200. }
  201. int getOutputLatencyInSamples() override { return (minBufferSizeOut * 3) / 4; }
  202. int getInputLatencyInSamples() override { return (minBufferSizeIn * 3) / 4; }
  203. bool isOpen() override { return isRunning; }
  204. int getCurrentBufferSizeSamples() override { return actualBufferSize; }
  205. int getCurrentBitDepth() override { return 16; }
  206. double getCurrentSampleRate() override { return sampleRate; }
  207. BigInteger getActiveOutputChannels() const override { return activeOutputChans; }
  208. BigInteger getActiveInputChannels() const override { return activeInputChans; }
  209. String getLastError() override { return lastError; }
  210. bool isPlaying() override { return isRunning && callback != 0; }
  211. void start (AudioIODeviceCallback* newCallback) override
  212. {
  213. if (isRunning && callback != newCallback)
  214. {
  215. if (newCallback != nullptr)
  216. newCallback->audioDeviceAboutToStart (this);
  217. const ScopedLock sl (callbackLock);
  218. callback = newCallback;
  219. }
  220. }
  221. void stop() override
  222. {
  223. if (isRunning)
  224. {
  225. AudioIODeviceCallback* lastCallback;
  226. {
  227. const ScopedLock sl (callbackLock);
  228. lastCallback = callback;
  229. callback = nullptr;
  230. }
  231. if (lastCallback != nullptr)
  232. lastCallback->audioDeviceStopped();
  233. }
  234. }
  235. void run() override
  236. {
  237. JNIEnv* env = getEnv();
  238. jshortArray audioBuffer = env->NewShortArray (actualBufferSize * jmax (numDeviceOutputChannels, numDeviceInputChannels));
  239. while (! threadShouldExit())
  240. {
  241. if (inputDevice != nullptr)
  242. {
  243. jint numRead = env->CallIntMethod (inputDevice, AudioRecord.read, audioBuffer, 0, actualBufferSize * numDeviceInputChannels);
  244. if (numRead < actualBufferSize * numDeviceInputChannels)
  245. {
  246. DBG ("Audio read under-run! " << numRead);
  247. }
  248. jshort* const src = env->GetShortArrayElements (audioBuffer, 0);
  249. for (int chan = 0; chan < inputChannelBuffer.getNumChannels(); ++chan)
  250. {
  251. AudioData::Pointer <AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::NonConst> d (inputChannelBuffer.getSampleData (chan));
  252. if (chan < numDeviceInputChannels)
  253. {
  254. AudioData::Pointer <AudioData::Int16, AudioData::NativeEndian, AudioData::Interleaved, AudioData::Const> s (src + chan, numDeviceInputChannels);
  255. d.convertSamples (s, actualBufferSize);
  256. }
  257. else
  258. {
  259. d.clearSamples (actualBufferSize);
  260. }
  261. }
  262. env->ReleaseShortArrayElements (audioBuffer, src, 0);
  263. }
  264. if (threadShouldExit())
  265. break;
  266. {
  267. const ScopedLock sl (callbackLock);
  268. if (callback != nullptr)
  269. {
  270. callback->audioDeviceIOCallback ((const float**) inputChannelBuffer.getArrayOfChannels(), numClientInputChannels,
  271. outputChannelBuffer.getArrayOfChannels(), numClientOutputChannels,
  272. actualBufferSize);
  273. }
  274. else
  275. {
  276. outputChannelBuffer.clear();
  277. }
  278. }
  279. if (outputDevice != nullptr)
  280. {
  281. if (threadShouldExit())
  282. break;
  283. jshort* const dest = env->GetShortArrayElements (audioBuffer, 0);
  284. for (int chan = 0; chan < numDeviceOutputChannels; ++chan)
  285. {
  286. AudioData::Pointer <AudioData::Int16, AudioData::NativeEndian, AudioData::Interleaved, AudioData::NonConst> d (dest + chan, numDeviceOutputChannels);
  287. const float* const sourceChanData = outputChannelBuffer.getSampleData (jmin (chan, outputChannelBuffer.getNumChannels() - 1));
  288. AudioData::Pointer <AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::Const> s (sourceChanData);
  289. d.convertSamples (s, actualBufferSize);
  290. }
  291. env->ReleaseShortArrayElements (audioBuffer, dest, 0);
  292. jint numWritten = env->CallIntMethod (outputDevice, AudioTrack.write, audioBuffer, 0, actualBufferSize * numDeviceOutputChannels);
  293. if (numWritten < actualBufferSize * numDeviceOutputChannels)
  294. {
  295. DBG ("Audio write underrun! " << numWritten);
  296. }
  297. }
  298. }
  299. }
  300. int minBufferSizeOut, minBufferSizeIn;
  301. private:
  302. //==================================================================================================
  303. CriticalSection callbackLock;
  304. AudioIODeviceCallback* callback;
  305. jint sampleRate;
  306. int numClientInputChannels, numDeviceInputChannels, numDeviceInputChannelsAvailable;
  307. int numClientOutputChannels, numDeviceOutputChannels;
  308. int actualBufferSize;
  309. bool isRunning;
  310. String lastError;
  311. BigInteger activeOutputChans, activeInputChans;
  312. GlobalRef outputDevice, inputDevice;
  313. AudioSampleBuffer inputChannelBuffer, outputChannelBuffer;
  314. void closeDevices()
  315. {
  316. if (outputDevice != nullptr)
  317. {
  318. outputDevice.callVoidMethod (AudioTrack.stop);
  319. outputDevice.callVoidMethod (AudioTrack.release);
  320. outputDevice.clear();
  321. }
  322. if (inputDevice != nullptr)
  323. {
  324. inputDevice.callVoidMethod (AudioRecord.stop);
  325. inputDevice.callVoidMethod (AudioRecord.release);
  326. inputDevice.clear();
  327. }
  328. }
  329. JUCE_DECLARE_NON_COPYABLE (AndroidAudioIODevice)
  330. };
  331. //==============================================================================
  332. class AndroidAudioIODeviceType : public AudioIODeviceType
  333. {
  334. public:
  335. AndroidAudioIODeviceType() : AudioIODeviceType (javaAudioTypeName) {}
  336. //==============================================================================
  337. void scanForDevices() {}
  338. StringArray getDeviceNames (bool wantInputNames) const { return StringArray (javaAudioTypeName); }
  339. int getDefaultDeviceIndex (bool forInput) const { return 0; }
  340. int getIndexOfDevice (AudioIODevice* device, bool asInput) const { return device != nullptr ? 0 : -1; }
  341. bool hasSeparateInputsAndOutputs() const { return false; }
  342. AudioIODevice* createDevice (const String& outputDeviceName,
  343. const String& inputDeviceName)
  344. {
  345. ScopedPointer<AndroidAudioIODevice> dev;
  346. if (outputDeviceName.isNotEmpty() || inputDeviceName.isNotEmpty())
  347. {
  348. dev = new AndroidAudioIODevice (outputDeviceName.isNotEmpty() ? outputDeviceName
  349. : inputDeviceName);
  350. if (dev->getCurrentSampleRate() <= 0 || dev->getDefaultBufferSize() <= 0)
  351. dev = nullptr;
  352. }
  353. return dev.release();
  354. }
  355. private:
  356. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (AndroidAudioIODeviceType)
  357. };
  358. //==============================================================================
  359. extern bool isOpenSLAvailable();
  360. AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_Android()
  361. {
  362. #if JUCE_USE_ANDROID_OPENSLES
  363. if (isOpenSLAvailable())
  364. return nullptr;
  365. #endif
  366. return new AndroidAudioIODeviceType();
  367. }