The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

479 lines
18KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2017 - ROLI Ltd.
  5. JUCE is an open source library subject to commercial or open-source
  6. licensing.
  7. The code included in this file is provided under the terms of the ISC license
  8. http://www.isc.org/downloads/software-support-policy/isc-license. Permission
  9. To use, copy, modify, and/or distribute this software for any purpose with or
  10. without fee is hereby granted provided that the above copyright notice and
  11. this permission notice appear in all copies.
  12. JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
  13. EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
  14. DISCLAIMED.
  15. ==============================================================================
  16. */
  17. namespace juce
  18. {
  19. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  20. STATICMETHOD (getMinBufferSize, "getMinBufferSize", "(III)I") \
  21. STATICMETHOD (getNativeOutputSampleRate, "getNativeOutputSampleRate", "(I)I") \
  22. METHOD (constructor, "<init>", "(IIIIII)V") \
  23. METHOD (getState, "getState", "()I") \
  24. METHOD (play, "play", "()V") \
  25. METHOD (stop, "stop", "()V") \
  26. METHOD (release, "release", "()V") \
  27. METHOD (flush, "flush", "()V") \
  28. METHOD (write, "write", "([SII)I") \
  29. DECLARE_JNI_CLASS (AudioTrack, "android/media/AudioTrack");
  30. #undef JNI_CLASS_MEMBERS
  31. //==============================================================================
  32. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  33. STATICMETHOD (getMinBufferSize, "getMinBufferSize", "(III)I") \
  34. METHOD (constructor, "<init>", "(IIIII)V") \
  35. METHOD (getState, "getState", "()I") \
  36. METHOD (startRecording, "startRecording", "()V") \
  37. METHOD (stop, "stop", "()V") \
  38. METHOD (read, "read", "([SII)I") \
  39. METHOD (release, "release", "()V") \
  40. DECLARE_JNI_CLASS (AudioRecord, "android/media/AudioRecord");
  41. #undef JNI_CLASS_MEMBERS
  42. //==============================================================================
  43. enum
  44. {
  45. CHANNEL_OUT_STEREO = 12,
  46. CHANNEL_IN_STEREO = 12,
  47. CHANNEL_IN_MONO = 16,
  48. ENCODING_PCM_16BIT = 2,
  49. STREAM_MUSIC = 3,
  50. MODE_STREAM = 1,
  51. STATE_UNINITIALIZED = 0
  52. };
  53. const char* const javaAudioTypeName = "Android Audio";
  54. //==============================================================================
  55. class AndroidAudioIODevice : public AudioIODevice,
  56. public Thread
  57. {
  58. public:
  59. //==============================================================================
  60. AndroidAudioIODevice (const String& deviceName)
  61. : AudioIODevice (deviceName, javaAudioTypeName),
  62. Thread ("audio"),
  63. minBufferSizeOut (0), minBufferSizeIn (0), callback (0), sampleRate (0),
  64. numClientInputChannels (0), numDeviceInputChannels (0), numDeviceInputChannelsAvailable (2),
  65. numClientOutputChannels (0), numDeviceOutputChannels (0),
  66. actualBufferSize (0), isRunning (false),
  67. inputChannelBuffer (1, 1),
  68. outputChannelBuffer (1, 1)
  69. {
  70. JNIEnv* env = getEnv();
  71. sampleRate = env->CallStaticIntMethod (AudioTrack, AudioTrack.getNativeOutputSampleRate, MODE_STREAM);
  72. minBufferSizeOut = (int) env->CallStaticIntMethod (AudioTrack, AudioTrack.getMinBufferSize, sampleRate, CHANNEL_OUT_STEREO, ENCODING_PCM_16BIT);
  73. minBufferSizeIn = (int) env->CallStaticIntMethod (AudioRecord, AudioRecord.getMinBufferSize, sampleRate, CHANNEL_IN_STEREO, ENCODING_PCM_16BIT);
  74. if (minBufferSizeIn <= 0)
  75. {
  76. minBufferSizeIn = env->CallStaticIntMethod (AudioRecord, AudioRecord.getMinBufferSize, sampleRate, CHANNEL_IN_MONO, ENCODING_PCM_16BIT);
  77. if (minBufferSizeIn > 0)
  78. numDeviceInputChannelsAvailable = 1;
  79. else
  80. numDeviceInputChannelsAvailable = 0;
  81. }
  82. DBG ("Audio device - min buffers: " << minBufferSizeOut << ", " << minBufferSizeIn << "; "
  83. << sampleRate << " Hz; input chans: " << numDeviceInputChannelsAvailable);
  84. }
  85. ~AndroidAudioIODevice()
  86. {
  87. close();
  88. }
  89. StringArray getOutputChannelNames() override
  90. {
  91. StringArray s;
  92. s.add ("Left");
  93. s.add ("Right");
  94. return s;
  95. }
  96. StringArray getInputChannelNames() override
  97. {
  98. StringArray s;
  99. if (numDeviceInputChannelsAvailable == 2)
  100. {
  101. s.add ("Left");
  102. s.add ("Right");
  103. }
  104. else if (numDeviceInputChannelsAvailable == 1)
  105. {
  106. s.add ("Audio Input");
  107. }
  108. return s;
  109. }
  110. Array<double> getAvailableSampleRates() override
  111. {
  112. Array<double> r;
  113. r.add ((double) sampleRate);
  114. return r;
  115. }
  116. Array<int> getAvailableBufferSizes() override
  117. {
  118. Array<int> b;
  119. int n = 16;
  120. for (int i = 0; i < 50; ++i)
  121. {
  122. b.add (n);
  123. n += n < 64 ? 16
  124. : (n < 512 ? 32
  125. : (n < 1024 ? 64
  126. : (n < 2048 ? 128 : 256)));
  127. }
  128. return b;
  129. }
  130. int getDefaultBufferSize() override { return 2048; }
  131. String open (const BigInteger& inputChannels,
  132. const BigInteger& outputChannels,
  133. double requestedSampleRate,
  134. int bufferSize) override
  135. {
  136. close();
  137. if (sampleRate != (int) requestedSampleRate)
  138. return "Sample rate not allowed";
  139. lastError.clear();
  140. int preferredBufferSize = (bufferSize <= 0) ? getDefaultBufferSize() : bufferSize;
  141. numDeviceInputChannels = 0;
  142. numDeviceOutputChannels = 0;
  143. activeOutputChans = outputChannels;
  144. activeOutputChans.setRange (2, activeOutputChans.getHighestBit(), false);
  145. numClientOutputChannels = activeOutputChans.countNumberOfSetBits();
  146. activeInputChans = inputChannels;
  147. activeInputChans.setRange (2, activeInputChans.getHighestBit(), false);
  148. numClientInputChannels = activeInputChans.countNumberOfSetBits();
  149. actualBufferSize = preferredBufferSize;
  150. inputChannelBuffer.setSize (2, actualBufferSize);
  151. inputChannelBuffer.clear();
  152. outputChannelBuffer.setSize (2, actualBufferSize);
  153. outputChannelBuffer.clear();
  154. JNIEnv* env = getEnv();
  155. if (numClientOutputChannels > 0)
  156. {
  157. numDeviceOutputChannels = 2;
  158. outputDevice = GlobalRef (env->NewObject (AudioTrack, AudioTrack.constructor,
  159. STREAM_MUSIC, sampleRate, CHANNEL_OUT_STEREO, ENCODING_PCM_16BIT,
  160. (jint) (minBufferSizeOut * numDeviceOutputChannels * static_cast<int> (sizeof (int16))), MODE_STREAM));
  161. int outputDeviceState = env->CallIntMethod (outputDevice, AudioTrack.getState);
  162. if (outputDeviceState > 0)
  163. {
  164. isRunning = true;
  165. }
  166. else
  167. {
  168. // failed to open the device
  169. outputDevice.clear();
  170. lastError = "Error opening audio output device: android.media.AudioTrack failed with state = " + String (outputDeviceState);
  171. }
  172. }
  173. if (numClientInputChannels > 0 && numDeviceInputChannelsAvailable > 0)
  174. {
  175. if (! RuntimePermissions::isGranted (RuntimePermissions::recordAudio))
  176. {
  177. // If you hit this assert, you probably forgot to get RuntimePermissions::recordAudio
  178. // before trying to open an audio input device. This is not going to work!
  179. jassertfalse;
  180. inputDevice.clear();
  181. lastError = "Error opening audio input device: the app was not granted android.permission.RECORD_AUDIO";
  182. }
  183. else
  184. {
  185. numDeviceInputChannels = jmin (numClientInputChannels, numDeviceInputChannelsAvailable);
  186. inputDevice = GlobalRef (env->NewObject (AudioRecord, AudioRecord.constructor,
  187. 0 /* (default audio source) */, sampleRate,
  188. numDeviceInputChannelsAvailable > 1 ? CHANNEL_IN_STEREO : CHANNEL_IN_MONO,
  189. ENCODING_PCM_16BIT,
  190. (jint) (minBufferSizeIn * numDeviceInputChannels * static_cast<int> (sizeof (int16)))));
  191. int inputDeviceState = env->CallIntMethod (inputDevice, AudioRecord.getState);
  192. if (inputDeviceState > 0)
  193. {
  194. isRunning = true;
  195. }
  196. else
  197. {
  198. // failed to open the device
  199. inputDevice.clear();
  200. lastError = "Error opening audio input device: android.media.AudioRecord failed with state = " + String (inputDeviceState);
  201. }
  202. }
  203. }
  204. if (isRunning)
  205. {
  206. if (outputDevice != nullptr)
  207. env->CallVoidMethod (outputDevice, AudioTrack.play);
  208. if (inputDevice != nullptr)
  209. env->CallVoidMethod (inputDevice, AudioRecord.startRecording);
  210. startThread (8);
  211. }
  212. else
  213. {
  214. closeDevices();
  215. }
  216. return lastError;
  217. }
  218. void close() override
  219. {
  220. if (isRunning)
  221. {
  222. stopThread (2000);
  223. isRunning = false;
  224. closeDevices();
  225. }
  226. }
  227. int getOutputLatencyInSamples() override { return (minBufferSizeOut * 3) / 4; }
  228. int getInputLatencyInSamples() override { return (minBufferSizeIn * 3) / 4; }
  229. bool isOpen() override { return isRunning; }
  230. int getCurrentBufferSizeSamples() override { return actualBufferSize; }
  231. int getCurrentBitDepth() override { return 16; }
  232. double getCurrentSampleRate() override { return sampleRate; }
  233. BigInteger getActiveOutputChannels() const override { return activeOutputChans; }
  234. BigInteger getActiveInputChannels() const override { return activeInputChans; }
  235. String getLastError() override { return lastError; }
  236. bool isPlaying() override { return isRunning && callback != 0; }
  237. void start (AudioIODeviceCallback* newCallback) override
  238. {
  239. if (isRunning && callback != newCallback)
  240. {
  241. if (newCallback != nullptr)
  242. newCallback->audioDeviceAboutToStart (this);
  243. const ScopedLock sl (callbackLock);
  244. callback = newCallback;
  245. }
  246. }
  247. void stop() override
  248. {
  249. if (isRunning)
  250. {
  251. AudioIODeviceCallback* lastCallback;
  252. {
  253. const ScopedLock sl (callbackLock);
  254. lastCallback = callback;
  255. callback = nullptr;
  256. }
  257. if (lastCallback != nullptr)
  258. lastCallback->audioDeviceStopped();
  259. }
  260. }
  261. void run() override
  262. {
  263. JNIEnv* env = getEnv();
  264. jshortArray audioBuffer = env->NewShortArray (actualBufferSize * jmax (numDeviceOutputChannels, numDeviceInputChannels));
  265. while (! threadShouldExit())
  266. {
  267. if (inputDevice != nullptr)
  268. {
  269. jint numRead = env->CallIntMethod (inputDevice, AudioRecord.read, audioBuffer, 0, actualBufferSize * numDeviceInputChannels);
  270. if (numRead < actualBufferSize * numDeviceInputChannels)
  271. {
  272. DBG ("Audio read under-run! " << numRead);
  273. }
  274. jshort* const src = env->GetShortArrayElements (audioBuffer, 0);
  275. for (int chan = 0; chan < inputChannelBuffer.getNumChannels(); ++chan)
  276. {
  277. AudioData::Pointer <AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::NonConst> d (inputChannelBuffer.getWritePointer (chan));
  278. if (chan < numDeviceInputChannels)
  279. {
  280. AudioData::Pointer <AudioData::Int16, AudioData::NativeEndian, AudioData::Interleaved, AudioData::Const> s (src + chan, numDeviceInputChannels);
  281. d.convertSamples (s, actualBufferSize);
  282. }
  283. else
  284. {
  285. d.clearSamples (actualBufferSize);
  286. }
  287. }
  288. env->ReleaseShortArrayElements (audioBuffer, src, 0);
  289. }
  290. if (threadShouldExit())
  291. break;
  292. {
  293. const ScopedLock sl (callbackLock);
  294. if (callback != nullptr)
  295. {
  296. callback->audioDeviceIOCallback (inputChannelBuffer.getArrayOfReadPointers(), numClientInputChannels,
  297. outputChannelBuffer.getArrayOfWritePointers(), numClientOutputChannels,
  298. actualBufferSize);
  299. }
  300. else
  301. {
  302. outputChannelBuffer.clear();
  303. }
  304. }
  305. if (outputDevice != nullptr)
  306. {
  307. if (threadShouldExit())
  308. break;
  309. jshort* const dest = env->GetShortArrayElements (audioBuffer, 0);
  310. for (int chan = 0; chan < numDeviceOutputChannels; ++chan)
  311. {
  312. AudioData::Pointer <AudioData::Int16, AudioData::NativeEndian, AudioData::Interleaved, AudioData::NonConst> d (dest + chan, numDeviceOutputChannels);
  313. const float* const sourceChanData = outputChannelBuffer.getReadPointer (jmin (chan, outputChannelBuffer.getNumChannels() - 1));
  314. AudioData::Pointer <AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::Const> s (sourceChanData);
  315. d.convertSamples (s, actualBufferSize);
  316. }
  317. env->ReleaseShortArrayElements (audioBuffer, dest, 0);
  318. jint numWritten = env->CallIntMethod (outputDevice, AudioTrack.write, audioBuffer, 0, actualBufferSize * numDeviceOutputChannels);
  319. if (numWritten < actualBufferSize * numDeviceOutputChannels)
  320. {
  321. DBG ("Audio write underrun! " << numWritten);
  322. }
  323. }
  324. }
  325. }
  326. int minBufferSizeOut, minBufferSizeIn;
  327. private:
  328. //==============================================================================
  329. CriticalSection callbackLock;
  330. AudioIODeviceCallback* callback;
  331. jint sampleRate;
  332. int numClientInputChannels, numDeviceInputChannels, numDeviceInputChannelsAvailable;
  333. int numClientOutputChannels, numDeviceOutputChannels;
  334. int actualBufferSize;
  335. bool isRunning;
  336. String lastError;
  337. BigInteger activeOutputChans, activeInputChans;
  338. GlobalRef outputDevice, inputDevice;
  339. AudioSampleBuffer inputChannelBuffer, outputChannelBuffer;
  340. void closeDevices()
  341. {
  342. if (outputDevice != nullptr)
  343. {
  344. outputDevice.callVoidMethod (AudioTrack.stop);
  345. outputDevice.callVoidMethod (AudioTrack.release);
  346. outputDevice.clear();
  347. }
  348. if (inputDevice != nullptr)
  349. {
  350. inputDevice.callVoidMethod (AudioRecord.stop);
  351. inputDevice.callVoidMethod (AudioRecord.release);
  352. inputDevice.clear();
  353. }
  354. }
  355. JUCE_DECLARE_NON_COPYABLE (AndroidAudioIODevice)
  356. };
  357. //==============================================================================
  358. class AndroidAudioIODeviceType : public AudioIODeviceType
  359. {
  360. public:
  361. AndroidAudioIODeviceType() : AudioIODeviceType (javaAudioTypeName) {}
  362. //==============================================================================
  363. void scanForDevices() {}
  364. StringArray getDeviceNames (bool) const { return StringArray (javaAudioTypeName); }
  365. int getDefaultDeviceIndex (bool) const { return 0; }
  366. int getIndexOfDevice (AudioIODevice* device, bool) const { return device != nullptr ? 0 : -1; }
  367. bool hasSeparateInputsAndOutputs() const { return false; }
  368. AudioIODevice* createDevice (const String& outputDeviceName,
  369. const String& inputDeviceName)
  370. {
  371. ScopedPointer<AndroidAudioIODevice> dev;
  372. if (outputDeviceName.isNotEmpty() || inputDeviceName.isNotEmpty())
  373. {
  374. dev = new AndroidAudioIODevice (outputDeviceName.isNotEmpty() ? outputDeviceName
  375. : inputDeviceName);
  376. if (dev->getCurrentSampleRate() <= 0 || dev->getDefaultBufferSize() <= 0)
  377. dev = nullptr;
  378. }
  379. return dev.release();
  380. }
  381. private:
  382. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (AndroidAudioIODeviceType)
  383. };
  384. //==============================================================================
  385. extern bool isOpenSLAvailable();
  386. AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_Android()
  387. {
  388. #if JUCE_USE_ANDROID_OPENSLES
  389. if (isOpenSLAvailable())
  390. return nullptr;
  391. #endif
  392. return new AndroidAudioIODeviceType();
  393. }
  394. } // namespace juce