Audio plugin host https://kx.studio/carla
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

444 lines
17KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2013 - Raw Material Software Ltd.
  5. Permission is granted to use this software under the terms of either:
  6. a) the GPL v2 (or any later version)
  7. b) the Affero GPL v3
  8. Details of these licenses can be found at: www.gnu.org/licenses
  9. JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
  10. WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
  11. A PARTICULAR PURPOSE. See the GNU General Public License for more details.
  12. ------------------------------------------------------------------------------
  13. To release a closed-source product which uses JUCE, commercial licenses are
  14. available: visit www.juce.com for more information.
  15. ==============================================================================
  16. */
  17. //==============================================================================
  18. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  19. STATICMETHOD (getMinBufferSize, "getMinBufferSize", "(III)I") \
  20. STATICMETHOD (getNativeOutputSampleRate, "getNativeOutputSampleRate", "(I)I") \
  21. METHOD (constructor, "<init>", "(IIIIII)V") \
  22. METHOD (getState, "getState", "()I") \
  23. METHOD (play, "play", "()V") \
  24. METHOD (stop, "stop", "()V") \
  25. METHOD (release, "release", "()V") \
  26. METHOD (flush, "flush", "()V") \
  27. METHOD (write, "write", "([SII)I") \
  28. DECLARE_JNI_CLASS (AudioTrack, "android/media/AudioTrack");
  29. #undef JNI_CLASS_MEMBERS
  30. //==============================================================================
  31. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  32. STATICMETHOD (getMinBufferSize, "getMinBufferSize", "(III)I") \
  33. METHOD (constructor, "<init>", "(IIIII)V") \
  34. METHOD (getState, "getState", "()I") \
  35. METHOD (startRecording, "startRecording", "()V") \
  36. METHOD (stop, "stop", "()V") \
  37. METHOD (read, "read", "([SII)I") \
  38. METHOD (release, "release", "()V") \
  39. DECLARE_JNI_CLASS (AudioRecord, "android/media/AudioRecord");
  40. #undef JNI_CLASS_MEMBERS
  41. //==============================================================================
  42. enum
  43. {
  44. CHANNEL_OUT_STEREO = 12,
  45. CHANNEL_IN_STEREO = 12,
  46. CHANNEL_IN_MONO = 16,
  47. ENCODING_PCM_16BIT = 2,
  48. STREAM_MUSIC = 3,
  49. MODE_STREAM = 1,
  50. STATE_UNINITIALIZED = 0
  51. };
  52. const char* const javaAudioTypeName = "Android Audio";
  53. //==============================================================================
  54. class AndroidAudioIODevice : public AudioIODevice,
  55. public Thread
  56. {
  57. public:
  58. //==============================================================================
  59. AndroidAudioIODevice (const String& deviceName)
  60. : AudioIODevice (deviceName, javaAudioTypeName),
  61. Thread ("audio"),
  62. minBufferSizeOut (0), minBufferSizeIn (0), callback (0), sampleRate (0),
  63. numClientInputChannels (0), numDeviceInputChannels (0), numDeviceInputChannelsAvailable (2),
  64. numClientOutputChannels (0), numDeviceOutputChannels (0),
  65. actualBufferSize (0), isRunning (false),
  66. inputChannelBuffer (1, 1),
  67. outputChannelBuffer (1, 1)
  68. {
  69. JNIEnv* env = getEnv();
  70. sampleRate = env->CallStaticIntMethod (AudioTrack, AudioTrack.getNativeOutputSampleRate, MODE_STREAM);
  71. minBufferSizeOut = (int) env->CallStaticIntMethod (AudioTrack, AudioTrack.getMinBufferSize, sampleRate, CHANNEL_OUT_STEREO, ENCODING_PCM_16BIT);
  72. minBufferSizeIn = (int) env->CallStaticIntMethod (AudioRecord, AudioRecord.getMinBufferSize, sampleRate, CHANNEL_IN_STEREO, ENCODING_PCM_16BIT);
  73. if (minBufferSizeIn <= 0)
  74. {
  75. minBufferSizeIn = env->CallStaticIntMethod (AudioRecord, AudioRecord.getMinBufferSize, sampleRate, CHANNEL_IN_MONO, ENCODING_PCM_16BIT);
  76. if (minBufferSizeIn > 0)
  77. numDeviceInputChannelsAvailable = 1;
  78. else
  79. numDeviceInputChannelsAvailable = 0;
  80. }
  81. DBG ("Audio device - min buffers: " << minBufferSizeOut << ", " << minBufferSizeIn << "; "
  82. << sampleRate << " Hz; input chans: " << numDeviceInputChannelsAvailable);
  83. }
  84. ~AndroidAudioIODevice()
  85. {
  86. close();
  87. }
  88. StringArray getOutputChannelNames()
  89. {
  90. StringArray s;
  91. s.add ("Left");
  92. s.add ("Right");
  93. return s;
  94. }
  95. StringArray getInputChannelNames()
  96. {
  97. StringArray s;
  98. if (numDeviceInputChannelsAvailable == 2)
  99. {
  100. s.add ("Left");
  101. s.add ("Right");
  102. }
  103. else if (numDeviceInputChannelsAvailable == 1)
  104. {
  105. s.add ("Audio Input");
  106. }
  107. return s;
  108. }
  109. int getNumSampleRates() { return 1;}
  110. double getSampleRate (int index) { return sampleRate; }
  111. int getDefaultBufferSize() { return 2048; }
  112. int getNumBufferSizesAvailable() { return 50; }
  113. int getBufferSizeSamples (int index)
  114. {
  115. int n = 16;
  116. for (int i = 0; i < index; ++i)
  117. n += n < 64 ? 16
  118. : (n < 512 ? 32
  119. : (n < 1024 ? 64
  120. : (n < 2048 ? 128 : 256)));
  121. return n;
  122. }
  123. String open (const BigInteger& inputChannels,
  124. const BigInteger& outputChannels,
  125. double requestedSampleRate,
  126. int bufferSize)
  127. {
  128. close();
  129. if (sampleRate != (int) requestedSampleRate)
  130. return "Sample rate not allowed";
  131. lastError = String::empty;
  132. int preferredBufferSize = (bufferSize <= 0) ? getDefaultBufferSize() : bufferSize;
  133. numDeviceInputChannels = 0;
  134. numDeviceOutputChannels = 0;
  135. activeOutputChans = outputChannels;
  136. activeOutputChans.setRange (2, activeOutputChans.getHighestBit(), false);
  137. numClientOutputChannels = activeOutputChans.countNumberOfSetBits();
  138. activeInputChans = inputChannels;
  139. activeInputChans.setRange (2, activeInputChans.getHighestBit(), false);
  140. numClientInputChannels = activeInputChans.countNumberOfSetBits();
  141. actualBufferSize = preferredBufferSize;
  142. inputChannelBuffer.setSize (2, actualBufferSize);
  143. inputChannelBuffer.clear();
  144. outputChannelBuffer.setSize (2, actualBufferSize);
  145. outputChannelBuffer.clear();
  146. JNIEnv* env = getEnv();
  147. if (numClientOutputChannels > 0)
  148. {
  149. numDeviceOutputChannels = 2;
  150. outputDevice = GlobalRef (env->NewObject (AudioTrack, AudioTrack.constructor,
  151. STREAM_MUSIC, sampleRate, CHANNEL_OUT_STEREO, ENCODING_PCM_16BIT,
  152. (jint) (minBufferSizeOut * numDeviceOutputChannels * sizeof (int16)), MODE_STREAM));
  153. if (env->CallIntMethod (outputDevice, AudioTrack.getState) != STATE_UNINITIALIZED)
  154. isRunning = true;
  155. else
  156. outputDevice.clear(); // failed to open the device
  157. }
  158. if (numClientInputChannels > 0 && numDeviceInputChannelsAvailable > 0)
  159. {
  160. numDeviceInputChannels = jmin (numClientInputChannels, numDeviceInputChannelsAvailable);
  161. inputDevice = GlobalRef (env->NewObject (AudioRecord, AudioRecord.constructor,
  162. 0 /* (default audio source) */, sampleRate,
  163. numDeviceInputChannelsAvailable > 1 ? CHANNEL_IN_STEREO : CHANNEL_IN_MONO,
  164. ENCODING_PCM_16BIT,
  165. (jint) (minBufferSizeIn * numDeviceInputChannels * sizeof (int16))));
  166. if (env->CallIntMethod (inputDevice, AudioRecord.getState) != STATE_UNINITIALIZED)
  167. isRunning = true;
  168. else
  169. inputDevice.clear(); // failed to open the device
  170. }
  171. if (isRunning)
  172. {
  173. if (outputDevice != nullptr)
  174. env->CallVoidMethod (outputDevice, AudioTrack.play);
  175. if (inputDevice != nullptr)
  176. env->CallVoidMethod (inputDevice, AudioRecord.startRecording);
  177. startThread (8);
  178. }
  179. else
  180. {
  181. closeDevices();
  182. }
  183. return lastError;
  184. }
  185. void close()
  186. {
  187. if (isRunning)
  188. {
  189. stopThread (2000);
  190. isRunning = false;
  191. closeDevices();
  192. }
  193. }
  194. int getOutputLatencyInSamples() { return (minBufferSizeOut * 3) / 4; }
  195. int getInputLatencyInSamples() { return (minBufferSizeIn * 3) / 4; }
  196. bool isOpen() { return isRunning; }
  197. int getCurrentBufferSizeSamples() { return actualBufferSize; }
  198. int getCurrentBitDepth() { return 16; }
  199. double getCurrentSampleRate() { return sampleRate; }
  200. BigInteger getActiveOutputChannels() const { return activeOutputChans; }
  201. BigInteger getActiveInputChannels() const { return activeInputChans; }
  202. String getLastError() { return lastError; }
  203. bool isPlaying() { return isRunning && callback != 0; }
  204. void start (AudioIODeviceCallback* newCallback)
  205. {
  206. if (isRunning && callback != newCallback)
  207. {
  208. if (newCallback != nullptr)
  209. newCallback->audioDeviceAboutToStart (this);
  210. const ScopedLock sl (callbackLock);
  211. callback = newCallback;
  212. }
  213. }
  214. void stop()
  215. {
  216. if (isRunning)
  217. {
  218. AudioIODeviceCallback* lastCallback;
  219. {
  220. const ScopedLock sl (callbackLock);
  221. lastCallback = callback;
  222. callback = nullptr;
  223. }
  224. if (lastCallback != nullptr)
  225. lastCallback->audioDeviceStopped();
  226. }
  227. }
  228. void run() override
  229. {
  230. JNIEnv* env = getEnv();
  231. jshortArray audioBuffer = env->NewShortArray (actualBufferSize * jmax (numDeviceOutputChannels, numDeviceInputChannels));
  232. while (! threadShouldExit())
  233. {
  234. if (inputDevice != nullptr)
  235. {
  236. jint numRead = env->CallIntMethod (inputDevice, AudioRecord.read, audioBuffer, 0, actualBufferSize * numDeviceInputChannels);
  237. if (numRead < actualBufferSize * numDeviceInputChannels)
  238. {
  239. DBG ("Audio read under-run! " << numRead);
  240. }
  241. jshort* const src = env->GetShortArrayElements (audioBuffer, 0);
  242. for (int chan = 0; chan < inputChannelBuffer.getNumChannels(); ++chan)
  243. {
  244. AudioData::Pointer <AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::NonConst> d (inputChannelBuffer.getSampleData (chan));
  245. if (chan < numDeviceInputChannels)
  246. {
  247. AudioData::Pointer <AudioData::Int16, AudioData::NativeEndian, AudioData::Interleaved, AudioData::Const> s (src + chan, numDeviceInputChannels);
  248. d.convertSamples (s, actualBufferSize);
  249. }
  250. else
  251. {
  252. d.clearSamples (actualBufferSize);
  253. }
  254. }
  255. env->ReleaseShortArrayElements (audioBuffer, src, 0);
  256. }
  257. if (threadShouldExit())
  258. break;
  259. {
  260. const ScopedLock sl (callbackLock);
  261. if (callback != nullptr)
  262. {
  263. callback->audioDeviceIOCallback ((const float**) inputChannelBuffer.getArrayOfChannels(), numClientInputChannels,
  264. outputChannelBuffer.getArrayOfChannels(), numClientOutputChannels,
  265. actualBufferSize);
  266. }
  267. else
  268. {
  269. outputChannelBuffer.clear();
  270. }
  271. }
  272. if (outputDevice != nullptr)
  273. {
  274. if (threadShouldExit())
  275. break;
  276. jshort* const dest = env->GetShortArrayElements (audioBuffer, 0);
  277. for (int chan = 0; chan < numDeviceOutputChannels; ++chan)
  278. {
  279. AudioData::Pointer <AudioData::Int16, AudioData::NativeEndian, AudioData::Interleaved, AudioData::NonConst> d (dest + chan, numDeviceOutputChannels);
  280. const float* const sourceChanData = outputChannelBuffer.getSampleData (jmin (chan, outputChannelBuffer.getNumChannels() - 1));
  281. AudioData::Pointer <AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::Const> s (sourceChanData);
  282. d.convertSamples (s, actualBufferSize);
  283. }
  284. env->ReleaseShortArrayElements (audioBuffer, dest, 0);
  285. jint numWritten = env->CallIntMethod (outputDevice, AudioTrack.write, audioBuffer, 0, actualBufferSize * numDeviceOutputChannels);
  286. if (numWritten < actualBufferSize * numDeviceOutputChannels)
  287. {
  288. DBG ("Audio write underrun! " << numWritten);
  289. }
  290. }
  291. }
  292. }
  293. int minBufferSizeOut, minBufferSizeIn;
  294. private:
  295. //==================================================================================================
  296. CriticalSection callbackLock;
  297. AudioIODeviceCallback* callback;
  298. jint sampleRate;
  299. int numClientInputChannels, numDeviceInputChannels, numDeviceInputChannelsAvailable;
  300. int numClientOutputChannels, numDeviceOutputChannels;
  301. int actualBufferSize;
  302. bool isRunning;
  303. String lastError;
  304. BigInteger activeOutputChans, activeInputChans;
  305. GlobalRef outputDevice, inputDevice;
  306. AudioSampleBuffer inputChannelBuffer, outputChannelBuffer;
  307. void closeDevices()
  308. {
  309. if (outputDevice != nullptr)
  310. {
  311. outputDevice.callVoidMethod (AudioTrack.stop);
  312. outputDevice.callVoidMethod (AudioTrack.release);
  313. outputDevice.clear();
  314. }
  315. if (inputDevice != nullptr)
  316. {
  317. inputDevice.callVoidMethod (AudioRecord.stop);
  318. inputDevice.callVoidMethod (AudioRecord.release);
  319. inputDevice.clear();
  320. }
  321. }
  322. JUCE_DECLARE_NON_COPYABLE (AndroidAudioIODevice)
  323. };
  324. //==============================================================================
  325. class AndroidAudioIODeviceType : public AudioIODeviceType
  326. {
  327. public:
  328. AndroidAudioIODeviceType() : AudioIODeviceType (javaAudioTypeName) {}
  329. //==============================================================================
  330. void scanForDevices() {}
  331. StringArray getDeviceNames (bool wantInputNames) const { return StringArray (javaAudioTypeName); }
  332. int getDefaultDeviceIndex (bool forInput) const { return 0; }
  333. int getIndexOfDevice (AudioIODevice* device, bool asInput) const { return device != nullptr ? 0 : -1; }
  334. bool hasSeparateInputsAndOutputs() const { return false; }
  335. AudioIODevice* createDevice (const String& outputDeviceName,
  336. const String& inputDeviceName)
  337. {
  338. ScopedPointer<AndroidAudioIODevice> dev;
  339. if (outputDeviceName.isNotEmpty() || inputDeviceName.isNotEmpty())
  340. {
  341. dev = new AndroidAudioIODevice (outputDeviceName.isNotEmpty() ? outputDeviceName
  342. : inputDeviceName);
  343. if (dev->getCurrentSampleRate() <= 0 || dev->getDefaultBufferSize() <= 0)
  344. dev = nullptr;
  345. }
  346. return dev.release();
  347. }
  348. private:
  349. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (AndroidAudioIODeviceType)
  350. };
  351. //==============================================================================
  352. extern bool isOpenSLAvailable();
  353. AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_Android()
  354. {
  355. #if JUCE_USE_ANDROID_OPENSLES
  356. if (isOpenSLAvailable())
  357. return nullptr;
  358. #endif
  359. return new AndroidAudioIODeviceType();
  360. }