Audio plugin host https://kx.studio/carla
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

498 lines
19KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2017 - ROLI Ltd.
  5. JUCE is an open source library subject to commercial or open-source
  6. licensing.
  7. The code included in this file is provided under the terms of the ISC license
  8. http://www.isc.org/downloads/software-support-policy/isc-license. Permission
  9. To use, copy, modify, and/or distribute this software for any purpose with or
  10. without fee is hereby granted provided that the above copyright notice and
  11. this permission notice appear in all copies.
  12. JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
  13. EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
  14. DISCLAIMED.
  15. ==============================================================================
  16. */
  17. namespace juce
  18. {
  19. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  20. STATICMETHOD (getMinBufferSize, "getMinBufferSize", "(III)I") \
  21. STATICMETHOD (getNativeOutputSampleRate, "getNativeOutputSampleRate", "(I)I") \
  22. METHOD (constructor, "<init>", "(IIIIII)V") \
  23. METHOD (getState, "getState", "()I") \
  24. METHOD (play, "play", "()V") \
  25. METHOD (stop, "stop", "()V") \
  26. METHOD (release, "release", "()V") \
  27. METHOD (flush, "flush", "()V") \
  28. METHOD (write, "write", "([SII)I") \
  29. DECLARE_JNI_CLASS (AudioTrack, "android/media/AudioTrack");
  30. #undef JNI_CLASS_MEMBERS
  31. //==============================================================================
  32. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  33. STATICMETHOD (getMinBufferSize, "getMinBufferSize", "(III)I") \
  34. METHOD (constructor, "<init>", "(IIIII)V") \
  35. METHOD (getState, "getState", "()I") \
  36. METHOD (startRecording, "startRecording", "()V") \
  37. METHOD (stop, "stop", "()V") \
  38. METHOD (read, "read", "([SII)I") \
  39. METHOD (release, "release", "()V") \
  40. DECLARE_JNI_CLASS (AudioRecord, "android/media/AudioRecord");
  41. #undef JNI_CLASS_MEMBERS
  42. //==============================================================================
  43. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  44. STATICFIELD (SDK_INT, "SDK_INT", "I") \
  45. DECLARE_JNI_CLASS (AndroidBuildVersion, "android/os/Build$VERSION");
  46. #undef JNI_CLASS_MEMBERS
  47. //==============================================================================
  48. enum
  49. {
  50. CHANNEL_OUT_STEREO = 12,
  51. CHANNEL_IN_STEREO = 12,
  52. CHANNEL_IN_MONO = 16,
  53. ENCODING_PCM_16BIT = 2,
  54. STREAM_MUSIC = 3,
  55. MODE_STREAM = 1,
  56. STATE_UNINITIALIZED = 0
  57. };
  58. const char* const javaAudioTypeName = "Android Audio";
  59. //==============================================================================
  60. class AndroidAudioIODevice : public AudioIODevice,
  61. public Thread
  62. {
  63. public:
  64. //==============================================================================
  65. AndroidAudioIODevice (const String& deviceName)
  66. : AudioIODevice (deviceName, javaAudioTypeName),
  67. Thread ("audio"),
  68. minBufferSizeOut (0), minBufferSizeIn (0), callback (0), sampleRate (0),
  69. numClientInputChannels (0), numDeviceInputChannels (0), numDeviceInputChannelsAvailable (2),
  70. numClientOutputChannels (0), numDeviceOutputChannels (0),
  71. actualBufferSize (0), isRunning (false),
  72. inputChannelBuffer (1, 1),
  73. outputChannelBuffer (1, 1)
  74. {
  75. JNIEnv* env = getEnv();
  76. sampleRate = env->CallStaticIntMethod (AudioTrack, AudioTrack.getNativeOutputSampleRate, MODE_STREAM);
  77. minBufferSizeOut = (int) env->CallStaticIntMethod (AudioTrack, AudioTrack.getMinBufferSize, sampleRate, CHANNEL_OUT_STEREO, ENCODING_PCM_16BIT);
  78. minBufferSizeIn = (int) env->CallStaticIntMethod (AudioRecord, AudioRecord.getMinBufferSize, sampleRate, CHANNEL_IN_STEREO, ENCODING_PCM_16BIT);
  79. if (minBufferSizeIn <= 0)
  80. {
  81. minBufferSizeIn = env->CallStaticIntMethod (AudioRecord, AudioRecord.getMinBufferSize, sampleRate, CHANNEL_IN_MONO, ENCODING_PCM_16BIT);
  82. if (minBufferSizeIn > 0)
  83. numDeviceInputChannelsAvailable = 1;
  84. else
  85. numDeviceInputChannelsAvailable = 0;
  86. }
  87. DBG ("Audio device - min buffers: " << minBufferSizeOut << ", " << minBufferSizeIn << "; "
  88. << sampleRate << " Hz; input chans: " << numDeviceInputChannelsAvailable);
  89. }
  90. ~AndroidAudioIODevice()
  91. {
  92. close();
  93. }
  94. StringArray getOutputChannelNames() override
  95. {
  96. StringArray s;
  97. s.add ("Left");
  98. s.add ("Right");
  99. return s;
  100. }
  101. StringArray getInputChannelNames() override
  102. {
  103. StringArray s;
  104. if (numDeviceInputChannelsAvailable == 2)
  105. {
  106. s.add ("Left");
  107. s.add ("Right");
  108. }
  109. else if (numDeviceInputChannelsAvailable == 1)
  110. {
  111. s.add ("Audio Input");
  112. }
  113. return s;
  114. }
  115. Array<double> getAvailableSampleRates() override
  116. {
  117. Array<double> r;
  118. r.add ((double) sampleRate);
  119. return r;
  120. }
  121. Array<int> getAvailableBufferSizes() override
  122. {
  123. Array<int> b;
  124. int n = 16;
  125. for (int i = 0; i < 50; ++i)
  126. {
  127. b.add (n);
  128. n += n < 64 ? 16
  129. : (n < 512 ? 32
  130. : (n < 1024 ? 64
  131. : (n < 2048 ? 128 : 256)));
  132. }
  133. return b;
  134. }
  135. int getDefaultBufferSize() override { return 2048; }
  136. String open (const BigInteger& inputChannels,
  137. const BigInteger& outputChannels,
  138. double requestedSampleRate,
  139. int bufferSize) override
  140. {
  141. close();
  142. if (sampleRate != (int) requestedSampleRate)
  143. return "Sample rate not allowed";
  144. lastError.clear();
  145. int preferredBufferSize = (bufferSize <= 0) ? getDefaultBufferSize() : bufferSize;
  146. numDeviceInputChannels = 0;
  147. numDeviceOutputChannels = 0;
  148. activeOutputChans = outputChannels;
  149. activeOutputChans.setRange (2, activeOutputChans.getHighestBit(), false);
  150. numClientOutputChannels = activeOutputChans.countNumberOfSetBits();
  151. activeInputChans = inputChannels;
  152. activeInputChans.setRange (2, activeInputChans.getHighestBit(), false);
  153. numClientInputChannels = activeInputChans.countNumberOfSetBits();
  154. actualBufferSize = preferredBufferSize;
  155. inputChannelBuffer.setSize (2, actualBufferSize);
  156. inputChannelBuffer.clear();
  157. outputChannelBuffer.setSize (2, actualBufferSize);
  158. outputChannelBuffer.clear();
  159. JNIEnv* env = getEnv();
  160. if (numClientOutputChannels > 0)
  161. {
  162. numDeviceOutputChannels = 2;
  163. outputDevice = GlobalRef (env->NewObject (AudioTrack, AudioTrack.constructor,
  164. STREAM_MUSIC, sampleRate, CHANNEL_OUT_STEREO, ENCODING_PCM_16BIT,
  165. (jint) (minBufferSizeOut * numDeviceOutputChannels * static_cast<int> (sizeof (int16))), MODE_STREAM));
  166. const bool supportsUnderrunCount = (getEnv()->GetStaticIntField (AndroidBuildVersion, AndroidBuildVersion.SDK_INT) >= 24);
  167. getUnderrunCount = supportsUnderrunCount ? env->GetMethodID (AudioTrack, "getUnderrunCount", "()I") : 0;
  168. int outputDeviceState = env->CallIntMethod (outputDevice, AudioTrack.getState);
  169. if (outputDeviceState > 0)
  170. {
  171. isRunning = true;
  172. }
  173. else
  174. {
  175. // failed to open the device
  176. outputDevice.clear();
  177. lastError = "Error opening audio output device: android.media.AudioTrack failed with state = " + String (outputDeviceState);
  178. }
  179. }
  180. if (numClientInputChannels > 0 && numDeviceInputChannelsAvailable > 0)
  181. {
  182. if (! RuntimePermissions::isGranted (RuntimePermissions::recordAudio))
  183. {
  184. // If you hit this assert, you probably forgot to get RuntimePermissions::recordAudio
  185. // before trying to open an audio input device. This is not going to work!
  186. jassertfalse;
  187. inputDevice.clear();
  188. lastError = "Error opening audio input device: the app was not granted android.permission.RECORD_AUDIO";
  189. }
  190. else
  191. {
  192. numDeviceInputChannels = jmin (numClientInputChannels, numDeviceInputChannelsAvailable);
  193. inputDevice = GlobalRef (env->NewObject (AudioRecord, AudioRecord.constructor,
  194. 0 /* (default audio source) */, sampleRate,
  195. numDeviceInputChannelsAvailable > 1 ? CHANNEL_IN_STEREO : CHANNEL_IN_MONO,
  196. ENCODING_PCM_16BIT,
  197. (jint) (minBufferSizeIn * numDeviceInputChannels * static_cast<int> (sizeof (int16)))));
  198. int inputDeviceState = env->CallIntMethod (inputDevice, AudioRecord.getState);
  199. if (inputDeviceState > 0)
  200. {
  201. isRunning = true;
  202. }
  203. else
  204. {
  205. // failed to open the device
  206. inputDevice.clear();
  207. lastError = "Error opening audio input device: android.media.AudioRecord failed with state = " + String (inputDeviceState);
  208. }
  209. }
  210. }
  211. if (isRunning)
  212. {
  213. if (outputDevice != nullptr)
  214. env->CallVoidMethod (outputDevice, AudioTrack.play);
  215. if (inputDevice != nullptr)
  216. env->CallVoidMethod (inputDevice, AudioRecord.startRecording);
  217. startThread (8);
  218. }
  219. else
  220. {
  221. closeDevices();
  222. }
  223. return lastError;
  224. }
  225. void close() override
  226. {
  227. if (isRunning)
  228. {
  229. stopThread (2000);
  230. isRunning = false;
  231. closeDevices();
  232. }
  233. }
  234. int getOutputLatencyInSamples() override { return (minBufferSizeOut * 3) / 4; }
  235. int getInputLatencyInSamples() override { return (minBufferSizeIn * 3) / 4; }
  236. bool isOpen() override { return isRunning; }
  237. int getCurrentBufferSizeSamples() override { return actualBufferSize; }
  238. int getCurrentBitDepth() override { return 16; }
  239. double getCurrentSampleRate() override { return sampleRate; }
  240. BigInteger getActiveOutputChannels() const override { return activeOutputChans; }
  241. BigInteger getActiveInputChannels() const override { return activeInputChans; }
  242. String getLastError() override { return lastError; }
  243. bool isPlaying() override { return isRunning && callback != 0; }
  244. int getXRunCount() const noexcept override
  245. {
  246. if (outputDevice != nullptr && getUnderrunCount != 0)
  247. return getEnv()->CallIntMethod (outputDevice, getUnderrunCount);
  248. return -1;
  249. }
  250. void start (AudioIODeviceCallback* newCallback) override
  251. {
  252. if (isRunning && callback != newCallback)
  253. {
  254. if (newCallback != nullptr)
  255. newCallback->audioDeviceAboutToStart (this);
  256. const ScopedLock sl (callbackLock);
  257. callback = newCallback;
  258. }
  259. }
  260. void stop() override
  261. {
  262. if (isRunning)
  263. {
  264. AudioIODeviceCallback* lastCallback;
  265. {
  266. const ScopedLock sl (callbackLock);
  267. lastCallback = callback;
  268. callback = nullptr;
  269. }
  270. if (lastCallback != nullptr)
  271. lastCallback->audioDeviceStopped();
  272. }
  273. }
  274. void run() override
  275. {
  276. JNIEnv* env = getEnv();
  277. jshortArray audioBuffer = env->NewShortArray (actualBufferSize * jmax (numDeviceOutputChannels, numDeviceInputChannels));
  278. while (! threadShouldExit())
  279. {
  280. if (inputDevice != nullptr)
  281. {
  282. jint numRead = env->CallIntMethod (inputDevice, AudioRecord.read, audioBuffer, 0, actualBufferSize * numDeviceInputChannels);
  283. if (numRead < actualBufferSize * numDeviceInputChannels)
  284. {
  285. DBG ("Audio read under-run! " << numRead);
  286. }
  287. jshort* const src = env->GetShortArrayElements (audioBuffer, 0);
  288. for (int chan = 0; chan < inputChannelBuffer.getNumChannels(); ++chan)
  289. {
  290. AudioData::Pointer <AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::NonConst> d (inputChannelBuffer.getWritePointer (chan));
  291. if (chan < numDeviceInputChannels)
  292. {
  293. AudioData::Pointer <AudioData::Int16, AudioData::NativeEndian, AudioData::Interleaved, AudioData::Const> s (src + chan, numDeviceInputChannels);
  294. d.convertSamples (s, actualBufferSize);
  295. }
  296. else
  297. {
  298. d.clearSamples (actualBufferSize);
  299. }
  300. }
  301. env->ReleaseShortArrayElements (audioBuffer, src, 0);
  302. }
  303. if (threadShouldExit())
  304. break;
  305. {
  306. const ScopedLock sl (callbackLock);
  307. if (callback != nullptr)
  308. {
  309. callback->audioDeviceIOCallback (inputChannelBuffer.getArrayOfReadPointers(), numClientInputChannels,
  310. outputChannelBuffer.getArrayOfWritePointers(), numClientOutputChannels,
  311. actualBufferSize);
  312. }
  313. else
  314. {
  315. outputChannelBuffer.clear();
  316. }
  317. }
  318. if (outputDevice != nullptr)
  319. {
  320. if (threadShouldExit())
  321. break;
  322. jshort* const dest = env->GetShortArrayElements (audioBuffer, 0);
  323. for (int chan = 0; chan < numDeviceOutputChannels; ++chan)
  324. {
  325. AudioData::Pointer <AudioData::Int16, AudioData::NativeEndian, AudioData::Interleaved, AudioData::NonConst> d (dest + chan, numDeviceOutputChannels);
  326. const float* const sourceChanData = outputChannelBuffer.getReadPointer (jmin (chan, outputChannelBuffer.getNumChannels() - 1));
  327. AudioData::Pointer <AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::Const> s (sourceChanData);
  328. d.convertSamples (s, actualBufferSize);
  329. }
  330. env->ReleaseShortArrayElements (audioBuffer, dest, 0);
  331. jint numWritten = env->CallIntMethod (outputDevice, AudioTrack.write, audioBuffer, 0, actualBufferSize * numDeviceOutputChannels);
  332. if (numWritten < actualBufferSize * numDeviceOutputChannels)
  333. {
  334. DBG ("Audio write underrun! " << numWritten);
  335. }
  336. }
  337. }
  338. }
  339. int minBufferSizeOut, minBufferSizeIn;
  340. private:
  341. //==============================================================================
  342. CriticalSection callbackLock;
  343. AudioIODeviceCallback* callback;
  344. jint sampleRate;
  345. int numClientInputChannels, numDeviceInputChannels, numDeviceInputChannelsAvailable;
  346. int numClientOutputChannels, numDeviceOutputChannels;
  347. int actualBufferSize;
  348. bool isRunning;
  349. String lastError;
  350. BigInteger activeOutputChans, activeInputChans;
  351. GlobalRef outputDevice, inputDevice;
  352. AudioSampleBuffer inputChannelBuffer, outputChannelBuffer;
  353. jmethodID getUnderrunCount = 0;
  354. void closeDevices()
  355. {
  356. if (outputDevice != nullptr)
  357. {
  358. outputDevice.callVoidMethod (AudioTrack.stop);
  359. outputDevice.callVoidMethod (AudioTrack.release);
  360. outputDevice.clear();
  361. }
  362. if (inputDevice != nullptr)
  363. {
  364. inputDevice.callVoidMethod (AudioRecord.stop);
  365. inputDevice.callVoidMethod (AudioRecord.release);
  366. inputDevice.clear();
  367. }
  368. }
  369. JUCE_DECLARE_NON_COPYABLE (AndroidAudioIODevice)
  370. };
  371. //==============================================================================
  372. class AndroidAudioIODeviceType : public AudioIODeviceType
  373. {
  374. public:
  375. AndroidAudioIODeviceType() : AudioIODeviceType (javaAudioTypeName) {}
  376. //==============================================================================
  377. void scanForDevices() {}
  378. StringArray getDeviceNames (bool) const { return StringArray (javaAudioTypeName); }
  379. int getDefaultDeviceIndex (bool) const { return 0; }
  380. int getIndexOfDevice (AudioIODevice* device, bool) const { return device != nullptr ? 0 : -1; }
  381. bool hasSeparateInputsAndOutputs() const { return false; }
  382. AudioIODevice* createDevice (const String& outputDeviceName,
  383. const String& inputDeviceName)
  384. {
  385. ScopedPointer<AndroidAudioIODevice> dev;
  386. if (outputDeviceName.isNotEmpty() || inputDeviceName.isNotEmpty())
  387. {
  388. dev = new AndroidAudioIODevice (outputDeviceName.isNotEmpty() ? outputDeviceName
  389. : inputDeviceName);
  390. if (dev->getCurrentSampleRate() <= 0 || dev->getDefaultBufferSize() <= 0)
  391. dev = nullptr;
  392. }
  393. return dev.release();
  394. }
  395. private:
  396. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (AndroidAudioIODeviceType)
  397. };
  398. //==============================================================================
  399. extern bool isOpenSLAvailable();
  400. AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_Android()
  401. {
  402. #if JUCE_USE_ANDROID_OPENSLES
  403. if (isOpenSLAvailable())
  404. return nullptr;
  405. #endif
  406. return new AndroidAudioIODeviceType();
  407. }
  408. } // namespace juce