Audio plugin host https://kx.studio/carla
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

471 lines
18KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2022 - Raw Material Software Limited
  5. JUCE is an open source library subject to commercial or open-source
  6. licensing.
  7. The code included in this file is provided under the terms of the ISC license
  8. http://www.isc.org/downloads/software-support-policy/isc-license. Permission
  9. To use, copy, modify, and/or distribute this software for any purpose with or
  10. without fee is hereby granted provided that the above copyright notice and
  11. this permission notice appear in all copies.
  12. JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
  13. EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
  14. DISCLAIMED.
  15. ==============================================================================
  16. */
  17. namespace juce
  18. {
  19. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD, CALLBACK) \
  20. STATICMETHOD (getMinBufferSize, "getMinBufferSize", "(III)I") \
  21. STATICMETHOD (getNativeOutputSampleRate, "getNativeOutputSampleRate", "(I)I") \
  22. METHOD (constructor, "<init>", "(IIIIII)V") \
  23. METHOD (getState, "getState", "()I") \
  24. METHOD (play, "play", "()V") \
  25. METHOD (stop, "stop", "()V") \
  26. METHOD (release, "release", "()V") \
  27. METHOD (flush, "flush", "()V") \
  28. METHOD (write, "write", "([SII)I") \
  29. DECLARE_JNI_CLASS (AudioTrack, "android/media/AudioTrack")
  30. #undef JNI_CLASS_MEMBERS
  31. //==============================================================================
  32. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD, CALLBACK) \
  33. STATICMETHOD (getMinBufferSize, "getMinBufferSize", "(III)I") \
  34. METHOD (constructor, "<init>", "(IIIII)V") \
  35. METHOD (getState, "getState", "()I") \
  36. METHOD (startRecording, "startRecording", "()V") \
  37. METHOD (stop, "stop", "()V") \
  38. METHOD (read, "read", "([SII)I") \
  39. METHOD (release, "release", "()V") \
  40. DECLARE_JNI_CLASS (AudioRecord, "android/media/AudioRecord")
  41. #undef JNI_CLASS_MEMBERS
  42. //==============================================================================
  43. enum
  44. {
  45. CHANNEL_OUT_STEREO = 12,
  46. CHANNEL_IN_STEREO = 12,
  47. CHANNEL_IN_MONO = 16,
  48. ENCODING_PCM_16BIT = 2,
  49. STREAM_MUSIC = 3,
  50. MODE_STREAM = 1,
  51. STATE_UNINITIALIZED = 0
  52. };
  53. const char* const javaAudioTypeName = "Android Audio";
  54. //==============================================================================
  55. class AndroidAudioIODevice : public AudioIODevice,
  56. public Thread
  57. {
  58. public:
  59. //==============================================================================
  60. AndroidAudioIODevice (const String& deviceName)
  61. : AudioIODevice (deviceName, javaAudioTypeName),
  62. Thread ("audio"),
  63. minBufferSizeOut (0), minBufferSizeIn (0), callback (nullptr), sampleRate (0),
  64. numClientInputChannels (0), numDeviceInputChannels (0), numDeviceInputChannelsAvailable (2),
  65. numClientOutputChannels (0), numDeviceOutputChannels (0),
  66. actualBufferSize (0), isRunning (false),
  67. inputChannelBuffer (1, 1),
  68. outputChannelBuffer (1, 1)
  69. {
  70. JNIEnv* env = getEnv();
  71. sampleRate = env->CallStaticIntMethod (AudioTrack, AudioTrack.getNativeOutputSampleRate, MODE_STREAM);
  72. minBufferSizeOut = (int) env->CallStaticIntMethod (AudioTrack, AudioTrack.getMinBufferSize, sampleRate, CHANNEL_OUT_STEREO, ENCODING_PCM_16BIT);
  73. minBufferSizeIn = (int) env->CallStaticIntMethod (AudioRecord, AudioRecord.getMinBufferSize, sampleRate, CHANNEL_IN_STEREO, ENCODING_PCM_16BIT);
  74. if (minBufferSizeIn <= 0)
  75. {
  76. minBufferSizeIn = env->CallStaticIntMethod (AudioRecord, AudioRecord.getMinBufferSize, sampleRate, CHANNEL_IN_MONO, ENCODING_PCM_16BIT);
  77. if (minBufferSizeIn > 0)
  78. numDeviceInputChannelsAvailable = 1;
  79. else
  80. numDeviceInputChannelsAvailable = 0;
  81. }
  82. DBG ("Audio device - min buffers: " << minBufferSizeOut << ", " << minBufferSizeIn << "; "
  83. << sampleRate << " Hz; input chans: " << numDeviceInputChannelsAvailable);
  84. }
  85. ~AndroidAudioIODevice() override
  86. {
  87. close();
  88. }
  89. StringArray getOutputChannelNames() override
  90. {
  91. StringArray s;
  92. s.add ("Left");
  93. s.add ("Right");
  94. return s;
  95. }
  96. StringArray getInputChannelNames() override
  97. {
  98. StringArray s;
  99. if (numDeviceInputChannelsAvailable == 2)
  100. {
  101. s.add ("Left");
  102. s.add ("Right");
  103. }
  104. else if (numDeviceInputChannelsAvailable == 1)
  105. {
  106. s.add ("Audio Input");
  107. }
  108. return s;
  109. }
  110. Array<double> getAvailableSampleRates() override
  111. {
  112. Array<double> r;
  113. r.add ((double) sampleRate);
  114. return r;
  115. }
  116. Array<int> getAvailableBufferSizes() override
  117. {
  118. Array<int> b;
  119. int n = 16;
  120. for (int i = 0; i < 50; ++i)
  121. {
  122. b.add (n);
  123. n += n < 64 ? 16
  124. : (n < 512 ? 32
  125. : (n < 1024 ? 64
  126. : (n < 2048 ? 128 : 256)));
  127. }
  128. return b;
  129. }
  130. int getDefaultBufferSize() override { return 2048; }
  131. String open (const BigInteger& inputChannels,
  132. const BigInteger& outputChannels,
  133. double requestedSampleRate,
  134. int bufferSize) override
  135. {
  136. close();
  137. if (sampleRate != (int) requestedSampleRate)
  138. return "Sample rate not allowed";
  139. lastError.clear();
  140. int preferredBufferSize = (bufferSize <= 0) ? getDefaultBufferSize() : bufferSize;
  141. numDeviceInputChannels = 0;
  142. numDeviceOutputChannels = 0;
  143. activeOutputChans = outputChannels;
  144. activeOutputChans.setRange (2, activeOutputChans.getHighestBit(), false);
  145. numClientOutputChannels = activeOutputChans.countNumberOfSetBits();
  146. activeInputChans = inputChannels;
  147. activeInputChans.setRange (2, activeInputChans.getHighestBit(), false);
  148. numClientInputChannels = activeInputChans.countNumberOfSetBits();
  149. actualBufferSize = preferredBufferSize;
  150. inputChannelBuffer.setSize (2, actualBufferSize);
  151. inputChannelBuffer.clear();
  152. outputChannelBuffer.setSize (2, actualBufferSize);
  153. outputChannelBuffer.clear();
  154. JNIEnv* env = getEnv();
  155. if (numClientOutputChannels > 0)
  156. {
  157. numDeviceOutputChannels = 2;
  158. outputDevice = GlobalRef (LocalRef<jobject>(env->NewObject (AudioTrack, AudioTrack.constructor,
  159. STREAM_MUSIC, sampleRate, CHANNEL_OUT_STEREO, ENCODING_PCM_16BIT,
  160. (jint) (minBufferSizeOut * numDeviceOutputChannels * static_cast<int> (sizeof (int16))), MODE_STREAM)));
  161. const bool supportsUnderrunCount = (getAndroidSDKVersion() >= 24);
  162. getUnderrunCount = supportsUnderrunCount ? env->GetMethodID (AudioTrack, "getUnderrunCount", "()I") : nullptr;
  163. int outputDeviceState = env->CallIntMethod (outputDevice, AudioTrack.getState);
  164. if (outputDeviceState > 0)
  165. {
  166. isRunning = true;
  167. }
  168. else
  169. {
  170. // failed to open the device
  171. outputDevice.clear();
  172. lastError = "Error opening audio output device: android.media.AudioTrack failed with state = " + String (outputDeviceState);
  173. }
  174. }
  175. if (numClientInputChannels > 0 && numDeviceInputChannelsAvailable > 0)
  176. {
  177. if (! RuntimePermissions::isGranted (RuntimePermissions::recordAudio))
  178. {
  179. // If you hit this assert, you probably forgot to get RuntimePermissions::recordAudio
  180. // before trying to open an audio input device. This is not going to work!
  181. jassertfalse;
  182. inputDevice.clear();
  183. lastError = "Error opening audio input device: the app was not granted android.permission.RECORD_AUDIO";
  184. }
  185. else
  186. {
  187. numDeviceInputChannels = jmin (numClientInputChannels, numDeviceInputChannelsAvailable);
  188. inputDevice = GlobalRef (LocalRef<jobject>(env->NewObject (AudioRecord, AudioRecord.constructor,
  189. 0 /* (default audio source) */, sampleRate,
  190. numDeviceInputChannelsAvailable > 1 ? CHANNEL_IN_STEREO : CHANNEL_IN_MONO,
  191. ENCODING_PCM_16BIT,
  192. (jint) (minBufferSizeIn * numDeviceInputChannels * static_cast<int> (sizeof (int16))))));
  193. int inputDeviceState = env->CallIntMethod (inputDevice, AudioRecord.getState);
  194. if (inputDeviceState > 0)
  195. {
  196. isRunning = true;
  197. }
  198. else
  199. {
  200. // failed to open the device
  201. inputDevice.clear();
  202. lastError = "Error opening audio input device: android.media.AudioRecord failed with state = " + String (inputDeviceState);
  203. }
  204. }
  205. }
  206. if (isRunning)
  207. {
  208. if (outputDevice != nullptr)
  209. env->CallVoidMethod (outputDevice, AudioTrack.play);
  210. if (inputDevice != nullptr)
  211. env->CallVoidMethod (inputDevice, AudioRecord.startRecording);
  212. startThread (8);
  213. }
  214. else
  215. {
  216. closeDevices();
  217. }
  218. return lastError;
  219. }
  220. void close() override
  221. {
  222. if (isRunning)
  223. {
  224. stopThread (2000);
  225. isRunning = false;
  226. closeDevices();
  227. }
  228. }
  229. int getOutputLatencyInSamples() override { return (minBufferSizeOut * 3) / 4; }
  230. int getInputLatencyInSamples() override { return (minBufferSizeIn * 3) / 4; }
  231. bool isOpen() override { return isRunning; }
  232. int getCurrentBufferSizeSamples() override { return actualBufferSize; }
  233. int getCurrentBitDepth() override { return 16; }
  234. double getCurrentSampleRate() override { return sampleRate; }
  235. BigInteger getActiveOutputChannels() const override { return activeOutputChans; }
  236. BigInteger getActiveInputChannels() const override { return activeInputChans; }
  237. String getLastError() override { return lastError; }
  238. bool isPlaying() override { return isRunning && callback != nullptr; }
  239. int getXRunCount() const noexcept override
  240. {
  241. if (outputDevice != nullptr && getUnderrunCount != nullptr)
  242. return getEnv()->CallIntMethod (outputDevice, getUnderrunCount);
  243. return -1;
  244. }
  245. void start (AudioIODeviceCallback* newCallback) override
  246. {
  247. if (isRunning && callback != newCallback)
  248. {
  249. if (newCallback != nullptr)
  250. newCallback->audioDeviceAboutToStart (this);
  251. const ScopedLock sl (callbackLock);
  252. callback = newCallback;
  253. }
  254. }
  255. void stop() override
  256. {
  257. if (isRunning)
  258. {
  259. AudioIODeviceCallback* lastCallback;
  260. {
  261. const ScopedLock sl (callbackLock);
  262. lastCallback = callback;
  263. callback = nullptr;
  264. }
  265. if (lastCallback != nullptr)
  266. lastCallback->audioDeviceStopped();
  267. }
  268. }
  269. void run() override
  270. {
  271. JNIEnv* env = getEnv();
  272. jshortArray audioBuffer = env->NewShortArray (actualBufferSize * jmax (numDeviceOutputChannels, numDeviceInputChannels));
  273. using NativeInt16 = AudioData::Format<AudioData::Int16, AudioData::NativeEndian>;
  274. using NativeFloat32 = AudioData::Format<AudioData::Float32, AudioData::NativeEndian>;
  275. while (! threadShouldExit())
  276. {
  277. if (inputDevice != nullptr)
  278. {
  279. jint numRead = env->CallIntMethod (inputDevice, AudioRecord.read, audioBuffer, 0, actualBufferSize * numDeviceInputChannels);
  280. if (numRead < actualBufferSize * numDeviceInputChannels)
  281. {
  282. DBG ("Audio read under-run! " << numRead);
  283. }
  284. jshort* const src = env->GetShortArrayElements (audioBuffer, nullptr);
  285. AudioData::deinterleaveSamples (AudioData::InterleavedSource<NativeInt16> { reinterpret_cast<const uint16*> (src), numDeviceInputChannels },
  286. AudioData::NonInterleavedDest<NativeFloat32> { inputChannelBuffer.getArrayOfWritePointers(), inputChannelBuffer.getNumChannels() },
  287. actualBufferSize);
  288. env->ReleaseShortArrayElements (audioBuffer, src, 0);
  289. }
  290. if (threadShouldExit())
  291. break;
  292. {
  293. const ScopedLock sl (callbackLock);
  294. if (callback != nullptr)
  295. {
  296. callback->audioDeviceIOCallbackWithContext (inputChannelBuffer.getArrayOfReadPointers(),
  297. numClientInputChannels,
  298. outputChannelBuffer.getArrayOfWritePointers(),
  299. numClientOutputChannels,
  300. actualBufferSize, {});
  301. }
  302. else
  303. {
  304. outputChannelBuffer.clear();
  305. }
  306. }
  307. if (outputDevice != nullptr)
  308. {
  309. if (threadShouldExit())
  310. break;
  311. jshort* const dest = env->GetShortArrayElements (audioBuffer, nullptr);
  312. AudioData::interleaveSamples (AudioData::NonInterleavedSource<NativeFloat32> { outputChannelBuffer.getArrayOfReadPointers(), outputChannelBuffer.getNumChannels() },
  313. AudioData::InterleavedDest<NativeInt16> { reinterpret_cast<uint16*> (dest), numDeviceOutputChannels },
  314. actualBufferSize);
  315. env->ReleaseShortArrayElements (audioBuffer, dest, 0);
  316. jint numWritten = env->CallIntMethod (outputDevice, AudioTrack.write, audioBuffer, 0, actualBufferSize * numDeviceOutputChannels);
  317. if (numWritten < actualBufferSize * numDeviceOutputChannels)
  318. {
  319. DBG ("Audio write underrun! " << numWritten);
  320. }
  321. }
  322. }
  323. }
  324. int minBufferSizeOut, minBufferSizeIn;
  325. private:
  326. //==============================================================================
  327. CriticalSection callbackLock;
  328. AudioIODeviceCallback* callback;
  329. jint sampleRate;
  330. int numClientInputChannels, numDeviceInputChannels, numDeviceInputChannelsAvailable;
  331. int numClientOutputChannels, numDeviceOutputChannels;
  332. int actualBufferSize;
  333. bool isRunning;
  334. String lastError;
  335. BigInteger activeOutputChans, activeInputChans;
  336. GlobalRef outputDevice, inputDevice;
  337. AudioBuffer<float> inputChannelBuffer, outputChannelBuffer;
  338. jmethodID getUnderrunCount = nullptr;
  339. void closeDevices()
  340. {
  341. if (outputDevice != nullptr)
  342. {
  343. outputDevice.callVoidMethod (AudioTrack.stop);
  344. outputDevice.callVoidMethod (AudioTrack.release);
  345. outputDevice.clear();
  346. }
  347. if (inputDevice != nullptr)
  348. {
  349. inputDevice.callVoidMethod (AudioRecord.stop);
  350. inputDevice.callVoidMethod (AudioRecord.release);
  351. inputDevice.clear();
  352. }
  353. }
  354. JUCE_DECLARE_NON_COPYABLE (AndroidAudioIODevice)
  355. };
  356. //==============================================================================
  357. class AndroidAudioIODeviceType : public AudioIODeviceType
  358. {
  359. public:
  360. AndroidAudioIODeviceType() : AudioIODeviceType (javaAudioTypeName) {}
  361. //==============================================================================
  362. void scanForDevices() {}
  363. StringArray getDeviceNames (bool) const { return StringArray (javaAudioTypeName); }
  364. int getDefaultDeviceIndex (bool) const { return 0; }
  365. int getIndexOfDevice (AudioIODevice* device, bool) const { return device != nullptr ? 0 : -1; }
  366. bool hasSeparateInputsAndOutputs() const { return false; }
  367. AudioIODevice* createDevice (const String& outputDeviceName,
  368. const String& inputDeviceName)
  369. {
  370. std::unique_ptr<AndroidAudioIODevice> dev;
  371. if (outputDeviceName.isNotEmpty() || inputDeviceName.isNotEmpty())
  372. {
  373. dev.reset (new AndroidAudioIODevice (outputDeviceName.isNotEmpty() ? outputDeviceName
  374. : inputDeviceName));
  375. if (dev->getCurrentSampleRate() <= 0 || dev->getDefaultBufferSize() <= 0)
  376. dev = nullptr;
  377. }
  378. return dev.release();
  379. }
  380. private:
  381. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (AndroidAudioIODeviceType)
  382. };
  383. //==============================================================================
  384. extern bool isOboeAvailable();
  385. extern bool isOpenSLAvailable();
  386. } // namespace juce