| @@ -234,13 +234,18 @@ | |||||
| #include "native/juce_android_Audio.cpp" | #include "native/juce_android_Audio.cpp" | ||||
| #include "native/juce_android_Midi.cpp" | #include "native/juce_android_Midi.cpp" | ||||
| #if JUCE_USE_ANDROID_OPENSLES | |||||
| #include "native/juce_android_OpenSL.cpp" | |||||
| #endif | |||||
| #if JUCE_USE_ANDROID_OPENSLES || JUCE_USE_ANDROID_OBOE | |||||
| #include "native/juce_android_HighPerformanceAudioHelpers.h" | |||||
| #if JUCE_USE_ANDROID_OBOE | |||||
| #include "native/juce_android_Oboe.cpp" | |||||
| #if JUCE_USE_ANDROID_OPENSLES | |||||
| #include "native/juce_android_OpenSL.cpp" | |||||
| #endif | |||||
| #if JUCE_USE_ANDROID_OBOE | |||||
| #include "native/juce_android_Oboe.cpp" | |||||
| #endif | |||||
| #endif | #endif | ||||
| #endif | #endif | ||||
| #if ! JUCE_SYSTEMAUDIOVOL_IMPLEMENTED | #if ! JUCE_SYSTEMAUDIOVOL_IMPLEMENTED | ||||
| @@ -0,0 +1,140 @@ | |||||
| /* | |||||
| ============================================================================== | |||||
| This file is part of the JUCE library. | |||||
| Copyright (c) 2017 - ROLI Ltd. | |||||
| JUCE is an open source library subject to commercial or open-source | |||||
| licensing. | |||||
| The code included in this file is provided under the terms of the ISC license | |||||
| http://www.isc.org/downloads/software-support-policy/isc-license. Permission | |||||
| To use, copy, modify, and/or distribute this software for any purpose with or | |||||
| without fee is hereby granted provided that the above copyright notice and | |||||
| this permission notice appear in all copies. | |||||
| JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER | |||||
| EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE | |||||
| DISCLAIMED. | |||||
| ============================================================================== | |||||
| */ | |||||
| namespace juce | |||||
| { | |||||
| //============================================================================== | |||||
| /** | |||||
| Some shared helpers methods for using the high-performance audio paths on | |||||
| Android devices (OpenSL and Oboe). | |||||
| @tags{Audio} | |||||
| */ | |||||
| namespace AndroidHighPerformanceAudioHelpers | |||||
| { | |||||
| //============================================================================== | |||||
| static double getNativeSampleRate() | |||||
| { | |||||
| return audioManagerGetProperty ("android.media.property.OUTPUT_SAMPLE_RATE").getDoubleValue(); | |||||
| } | |||||
| static int getNativeBufferSize() | |||||
| { | |||||
| auto deviceBufferSize = audioManagerGetProperty ("android.media.property.OUTPUT_FRAMES_PER_BUFFER").getIntValue(); | |||||
| if (deviceBufferSize == 0) | |||||
| return 192; | |||||
| return deviceBufferSize; | |||||
| } | |||||
| static bool isProAudioDevice() | |||||
| { | |||||
| static bool isSapaSupported = SystemStats::getDeviceManufacturer().containsIgnoreCase ("SAMSUNG") | |||||
| && DynamicLibrary().open ("libapa_jni.so"); | |||||
| return androidHasSystemFeature ("android.hardware.audio.pro") || isSapaSupported; | |||||
| } | |||||
| static bool hasLowLatencyAudioPath() | |||||
| { | |||||
| return androidHasSystemFeature ("android.hardware.audio.low_latency"); | |||||
| } | |||||
| static bool canUseHighPerformanceAudioPath (int requestedBufferSize, int requestedSampleRate) | |||||
| { | |||||
| return ((requestedBufferSize % getNativeBufferSize()) == 0) | |||||
| && (requestedSampleRate == getNativeSampleRate()) | |||||
| && isProAudioDevice(); | |||||
| } | |||||
| //============================================================================== | |||||
| static int getMinimumBuffersToEnqueue (double requestedSampleRate) | |||||
| { | |||||
| if (canUseHighPerformanceAudioPath (getNativeBufferSize(), (int) requestedSampleRate)) | |||||
| { | |||||
| // see https://developer.android.com/ndk/guides/audio/opensl/opensl-prog-notes.html#sandp | |||||
| // "For Android 4.2 (API level 17) and earlier, a buffer count of two or more is required | |||||
| // for lower latency. Beginning with Android 4.3 (API level 18), a buffer count of one | |||||
| // is sufficient for lower latency." | |||||
| return (getAndroidSDKVersion() >= 18 ? 1 : 2); | |||||
| } | |||||
| // not using low-latency path so we can use the absolute minimum number of buffers to queue | |||||
| return 1; | |||||
| } | |||||
| static int buffersToQueueForBufferDuration (int bufferDurationInMs, double sampleRate) noexcept | |||||
| { | |||||
| auto maxBufferFrames = static_cast<int> (std::ceil (bufferDurationInMs * sampleRate / 1000.0)); | |||||
| auto maxNumBuffers = static_cast<int> (std::ceil (static_cast<double> (maxBufferFrames) | |||||
| / static_cast<double> (getNativeBufferSize()))); | |||||
| return jmax (getMinimumBuffersToEnqueue (sampleRate), maxNumBuffers); | |||||
| } | |||||
| static int getMaximumBuffersToEnqueue (double maximumSampleRate) noexcept | |||||
| { | |||||
| static constexpr int maxBufferSizeMs = 200; | |||||
| return jmax (8, buffersToQueueForBufferDuration (maxBufferSizeMs, maximumSampleRate)); | |||||
| } | |||||
| static Array<int> getAvailableBufferSizes (Array<double> availableSampleRates) | |||||
| { | |||||
| auto nativeBufferSize = getNativeBufferSize(); | |||||
| auto minBuffersToQueue = getMinimumBuffersToEnqueue (getNativeSampleRate()); | |||||
| auto maxBuffersToQueue = getMaximumBuffersToEnqueue (findMaximum (availableSampleRates.getRawDataPointer(), | |||||
| availableSampleRates.size())); | |||||
| Array<int> bufferSizes; | |||||
| for (int i = minBuffersToQueue; i <= maxBuffersToQueue; ++i) | |||||
| bufferSizes.add (i * nativeBufferSize); | |||||
| return bufferSizes; | |||||
| } | |||||
| static int getDefaultBufferSize (double currentSampleRate) | |||||
| { | |||||
| static constexpr int defaultBufferSizeForLowLatencyDeviceMs = 40; | |||||
| static constexpr int defaultBufferSizeForStandardLatencyDeviceMs = 100; | |||||
| auto defaultBufferLength = (hasLowLatencyAudioPath() ? defaultBufferSizeForLowLatencyDeviceMs | |||||
| : defaultBufferSizeForStandardLatencyDeviceMs); | |||||
| auto defaultBuffersToEnqueue = buffersToQueueForBufferDuration (defaultBufferLength, currentSampleRate); | |||||
| return defaultBuffersToEnqueue * getNativeBufferSize(); | |||||
| } | |||||
| static int getNumBuffersToEnqueue (int preferredBufferSize, int sampleRate) | |||||
| { | |||||
| if (canUseHighPerformanceAudioPath (preferredBufferSize, sampleRate)) | |||||
| return preferredBufferSize / getNativeBufferSize(); | |||||
| return 1; | |||||
| } | |||||
| } | |||||
| } // namespace juce | |||||
| @@ -194,16 +194,7 @@ public: | |||||
| Array<int> getAvailableBufferSizes() override | Array<int> getAvailableBufferSizes() override | ||||
| { | { | ||||
| // we need to offer the lowest possible buffer size which | |||||
| // is the native buffer size | |||||
| const int defaultNumMultiples = 8; | |||||
| const int nativeBufferSize = getNativeBufferSize(); | |||||
| Array<int> bufferSizes; | |||||
| for (int i = 1; i < defaultNumMultiples; ++i) | |||||
| bufferSizes.add (i * nativeBufferSize); | |||||
| return bufferSizes; | |||||
| return AndroidHighPerformanceAudioHelpers::getAvailableBufferSizes (getAvailableSampleRates()); | |||||
| } | } | ||||
| String open (const BigInteger& inputChannels, const BigInteger& outputChannels, | String open (const BigInteger& inputChannels, const BigInteger& outputChannels, | ||||
| @@ -212,9 +203,14 @@ public: | |||||
| close(); | close(); | ||||
| lastError.clear(); | lastError.clear(); | ||||
| sampleRate = (int) requestedSampleRate; | |||||
| actualBufferSize = (bufferSize <= 0) ? getDefaultBufferSize() : bufferSize; | |||||
| sampleRate = (int) (requestedSampleRate > 0 ? requestedSampleRate : AndroidHighPerformanceAudioHelpers::getNativeSampleRate()); | |||||
| auto preferredBufferSize = (bufferSize > 0) ? bufferSize : getDefaultBufferSize(); | |||||
| audioBuffersToEnqueue = AndroidHighPerformanceAudioHelpers::getNumBuffersToEnqueue (preferredBufferSize, sampleRate); | |||||
| actualBufferSize = preferredBufferSize / audioBuffersToEnqueue; | |||||
| jassert ((actualBufferSize * audioBuffersToEnqueue) == preferredBufferSize); | |||||
| // The device may report no max, claiming "no limits". Pick sensible defaults. | // The device may report no max, claiming "no limits". Pick sensible defaults. | ||||
| int maxOutChans = maxNumOutputChannels > 0 ? maxNumOutputChannels : 2; | int maxOutChans = maxNumOutputChannels > 0 ? maxNumOutputChannels : 2; | ||||
| @@ -261,7 +257,7 @@ public: | |||||
| int getOutputLatencyInSamples() override { return session->getOutputLatencyInSamples(); } | int getOutputLatencyInSamples() override { return session->getOutputLatencyInSamples(); } | ||||
| int getInputLatencyInSamples() override { return session->getInputLatencyInSamples(); } | int getInputLatencyInSamples() override { return session->getInputLatencyInSamples(); } | ||||
| bool isOpen() override { return deviceOpen; } | bool isOpen() override { return deviceOpen; } | ||||
| int getCurrentBufferSizeSamples() override { return actualBufferSize; } | |||||
| int getCurrentBufferSizeSamples() override { return actualBufferSize * audioBuffersToEnqueue; } | |||||
| int getCurrentBitDepth() override { return session->getCurrentBitDepth(); } | int getCurrentBitDepth() override { return session->getCurrentBitDepth(); } | ||||
| BigInteger getActiveOutputChannels() const override { return activeOutputChans; } | BigInteger getActiveOutputChannels() const override { return activeOutputChans; } | ||||
| BigInteger getActiveInputChannels() const override { return activeInputChans; } | BigInteger getActiveInputChannels() const override { return activeInputChans; } | ||||
| @@ -271,16 +267,12 @@ public: | |||||
| int getDefaultBufferSize() override | int getDefaultBufferSize() override | ||||
| { | { | ||||
| // Only on a Pro-Audio device will we set the lowest possible buffer size | |||||
| // by default. We need to be more conservative on other devices | |||||
| // as they may be low-latency, but still have a crappy CPU. | |||||
| return (isProAudioDevice() ? 1 : 6) | |||||
| * getNativeBufferSize(); | |||||
| return AndroidHighPerformanceAudioHelpers::getDefaultBufferSize (getCurrentSampleRate()); | |||||
| } | } | ||||
| double getCurrentSampleRate() override | double getCurrentSampleRate() override | ||||
| { | { | ||||
| return (sampleRate == 0.0 ? getNativeSampleRate() : sampleRate); | |||||
| return (sampleRate == 0.0 ? AndroidHighPerformanceAudioHelpers::getNativeSampleRate() : sampleRate); | |||||
| } | } | ||||
| void start (AudioIODeviceCallback* newCallback) override | void start (AudioIODeviceCallback* newCallback) override | ||||
| @@ -372,10 +364,11 @@ private: | |||||
| { | { | ||||
| static const int standardRates[] = { 8000, 11025, 12000, 16000, | static const int standardRates[] = { 8000, 11025, 12000, 16000, | ||||
| 22050, 24000, 32000, 44100, 48000 }; | 22050, 24000, 32000, 44100, 48000 }; | ||||
| Array<int> rates (standardRates, numElementsInArray (standardRates)); | Array<int> rates (standardRates, numElementsInArray (standardRates)); | ||||
| // make sure the native sample rate is part of the list | // make sure the native sample rate is part of the list | ||||
| int native = (int) getNativeSampleRate(); | |||||
| int native = (int) AndroidHighPerformanceAudioHelpers::getNativeSampleRate(); | |||||
| if (native != 0 && ! rates.contains (native)) | if (native != 0 && ! rates.contains (native)) | ||||
| rates.add (native); | rates.add (native); | ||||
| @@ -511,7 +504,7 @@ private: | |||||
| int32 newSampleRate, int32 newBufferSize, | int32 newSampleRate, int32 newBufferSize, | ||||
| oboe::AudioStreamCallback* newCallback = nullptr) | oboe::AudioStreamCallback* newCallback = nullptr) | ||||
| { | { | ||||
| oboe::DefaultStreamValues::FramesPerBurst = getDefaultFramesPerBurst(); | |||||
| oboe::DefaultStreamValues::FramesPerBurst = AndroidHighPerformanceAudioHelpers::getNativeBufferSize(); | |||||
| oboe::AudioStreamBuilder builder; | oboe::AudioStreamBuilder builder; | ||||
| @@ -962,7 +955,7 @@ private: | |||||
| friend class OboeRealtimeThread; | friend class OboeRealtimeThread; | ||||
| //============================================================================== | //============================================================================== | ||||
| int actualBufferSize = 0, sampleRate = 0; | |||||
| int actualBufferSize = 0, sampleRate = 0, audioBuffersToEnqueue = 0; | |||||
| bool deviceOpen = false; | bool deviceOpen = false; | ||||
| String lastError; | String lastError; | ||||
| BigInteger activeOutputChans, activeInputChans; | BigInteger activeOutputChans, activeInputChans; | ||||
| @@ -979,31 +972,6 @@ private: | |||||
| bool running = false; | bool running = false; | ||||
| //============================================================================== | |||||
| static double getNativeSampleRate() | |||||
| { | |||||
| return audioManagerGetProperty ("android.media.property.OUTPUT_SAMPLE_RATE").getDoubleValue(); | |||||
| } | |||||
| static int getNativeBufferSize() | |||||
| { | |||||
| auto val = audioManagerGetProperty ("android.media.property.OUTPUT_FRAMES_PER_BUFFER").getIntValue(); | |||||
| return val > 0 ? val : 512; | |||||
| } | |||||
| static bool isProAudioDevice() | |||||
| { | |||||
| return androidHasSystemFeature ("android.hardware.audio.pro"); | |||||
| } | |||||
| static int getDefaultFramesPerBurst() | |||||
| { | |||||
| // NB: this function only works for inbuilt speakers and headphones | |||||
| auto framesPerBurstString = javaString (audioManagerGetProperty ("android.media.property.OUTPUT_FRAMES_PER_BUFFER")); | |||||
| return framesPerBurstString != 0 ? getEnv()->CallStaticIntMethod (JavaInteger, JavaInteger.parseInt, framesPerBurstString.get(), 10) : 192; | |||||
| } | |||||
| JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (OboeAudioIODevice) | JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (OboeAudioIODevice) | ||||
| }; | }; | ||||
| @@ -1087,8 +1055,8 @@ public: | |||||
| oboe::SharingMode::Shared, | oboe::SharingMode::Shared, | ||||
| forInput ? 1 : 2, | forInput ? 1 : 2, | ||||
| getAndroidSDKVersion() >= 21 ? oboe::AudioFormat::Float : oboe::AudioFormat::I16, | getAndroidSDKVersion() >= 21 ? oboe::AudioFormat::Float : oboe::AudioFormat::I16, | ||||
| (int) OboeAudioIODevice::getNativeSampleRate(), | |||||
| OboeAudioIODevice::getNativeBufferSize(), | |||||
| (int) AndroidHighPerformanceAudioHelpers::getNativeSampleRate(), | |||||
| AndroidHighPerformanceAudioHelpers::getNativeBufferSize(), | |||||
| nullptr); | nullptr); | ||||
| if (auto* nativeStream = tempStream.getNativeStream()) | if (auto* nativeStream = tempStream.getNativeStream()) | ||||
| @@ -1353,8 +1321,8 @@ public: | |||||
| oboe::SharingMode::Exclusive, | oboe::SharingMode::Exclusive, | ||||
| 1, | 1, | ||||
| oboe::AudioFormat::Float, | oboe::AudioFormat::Float, | ||||
| (int) OboeAudioIODevice::getNativeSampleRate(), | |||||
| OboeAudioIODevice::getNativeBufferSize(), | |||||
| (int) AndroidHighPerformanceAudioHelpers::getNativeSampleRate(), | |||||
| AndroidHighPerformanceAudioHelpers::getNativeBufferSize(), | |||||
| this)), | this)), | ||||
| formatUsed (oboe::AudioFormat::Float) | formatUsed (oboe::AudioFormat::Float) | ||||
| { | { | ||||
| @@ -1366,8 +1334,8 @@ public: | |||||
| oboe::SharingMode::Exclusive, | oboe::SharingMode::Exclusive, | ||||
| 1, | 1, | ||||
| oboe::AudioFormat::I16, | oboe::AudioFormat::I16, | ||||
| (int) OboeAudioIODevice::getNativeSampleRate(), | |||||
| OboeAudioIODevice::getNativeBufferSize(), | |||||
| (int) AndroidHighPerformanceAudioHelpers::getNativeSampleRate(), | |||||
| AndroidHighPerformanceAudioHelpers::getNativeBufferSize(), | |||||
| this)); | this)); | ||||
| formatUsed = oboe::AudioFormat::I16; | formatUsed = oboe::AudioFormat::I16; | ||||
| @@ -1449,15 +1417,19 @@ private: | |||||
| oboe::AudioFormat formatUsed; | oboe::AudioFormat formatUsed; | ||||
| }; | }; | ||||
| //============================================================================== | |||||
| pthread_t juce_createRealtimeAudioThread (void* (*entry) (void*), void* userPtr) | pthread_t juce_createRealtimeAudioThread (void* (*entry) (void*), void* userPtr) | ||||
| { | { | ||||
| std::unique_ptr<OboeRealtimeThread> thread (new OboeRealtimeThread()); | |||||
| auto thread = std::make_unique<OboeRealtimeThread>(); | |||||
| if (! thread->isOk()) | if (! thread->isOk()) | ||||
| return {}; | return {}; | ||||
| auto threadID = thread->startThread (entry, userPtr); | auto threadID = thread->startThread (entry, userPtr); | ||||
| thread.release(); // the thread will de-allocate itself | |||||
| // the thread will de-allocate itself | |||||
| thread.release(); | |||||
| return threadID; | return threadID; | ||||
| } | } | ||||
| @@ -852,10 +852,11 @@ public: | |||||
| static const double rates[] = { 8000.0, 11025.0, 12000.0, 16000.0, | static const double rates[] = { 8000.0, 11025.0, 12000.0, 16000.0, | ||||
| 22050.0, 24000.0, 32000.0, 44100.0, 48000.0 }; | 22050.0, 24000.0, 32000.0, 44100.0, 48000.0 }; | ||||
| Array<double> retval (rates, numElementsInArray (rates)); | Array<double> retval (rates, numElementsInArray (rates)); | ||||
| // make sure the native sample rate is part of the list | // make sure the native sample rate is part of the list | ||||
| double native = getNativeSampleRate(); | |||||
| double native = AndroidHighPerformanceAudioHelpers::getNativeSampleRate(); | |||||
| if (native != 0.0 && ! retval.contains (native)) | if (native != 0.0 && ! retval.contains (native)) | ||||
| retval.add (native); | retval.add (native); | ||||
| @@ -865,17 +866,7 @@ public: | |||||
| Array<int> getAvailableBufferSizes() override | Array<int> getAvailableBufferSizes() override | ||||
| { | { | ||||
| // we need to offer the lowest possible buffer size which | |||||
| // is the native buffer size | |||||
| auto nativeBufferSize = getNativeBufferSize(); | |||||
| auto minBuffersToQueue = getMinimumBuffersToEnqueue(); | |||||
| auto maxBuffersToQueue = getMaximumBuffersToEnqueue(); | |||||
| Array<int> retval; | |||||
| for (int i = minBuffersToQueue; i <= maxBuffersToQueue; ++i) | |||||
| retval.add (i * nativeBufferSize); | |||||
| return retval; | |||||
| return AndroidHighPerformanceAudioHelpers::getAvailableBufferSizes (getAvailableSampleRates()); | |||||
| } | } | ||||
| String open (const BigInteger& inputChannels, | String open (const BigInteger& inputChannels, | ||||
| @@ -887,15 +878,13 @@ public: | |||||
| lastError.clear(); | lastError.clear(); | ||||
| sampleRate = (int) (requestedSampleRate > 0 ? requestedSampleRate : getNativeSampleRate()); | |||||
| sampleRate = (int) (requestedSampleRate > 0 ? requestedSampleRate : AndroidHighPerformanceAudioHelpers::getNativeSampleRate()); | |||||
| auto preferredBufferSize = (bufferSize > 0) ? bufferSize : getDefaultBufferSize(); | |||||
| auto totalPreferredBufferSize = (bufferSize <= 0) ? getDefaultBufferSize() : bufferSize; | |||||
| auto nativeBufferSize = getNativeBufferSize(); | |||||
| bool useHighPerformanceAudioPath = canUseHighPerformanceAudioPath (totalPreferredBufferSize, sampleRate); | |||||
| audioBuffersToEnqueue = AndroidHighPerformanceAudioHelpers::getNumBuffersToEnqueue (preferredBufferSize, sampleRate); | |||||
| actualBufferSize = preferredBufferSize / audioBuffersToEnqueue; | |||||
| audioBuffersToEnqueue = useHighPerformanceAudioPath ? (totalPreferredBufferSize / nativeBufferSize) : 1; | |||||
| actualBufferSize = totalPreferredBufferSize / audioBuffersToEnqueue; | |||||
| jassert ((actualBufferSize * audioBuffersToEnqueue) == totalPreferredBufferSize); | |||||
| jassert ((actualBufferSize * audioBuffersToEnqueue) == preferredBufferSize); | |||||
| activeOutputChans = outputChannels; | activeOutputChans = outputChannels; | ||||
| activeOutputChans.setRange (2, activeOutputChans.getHighestBit(), false); | activeOutputChans.setRange (2, activeOutputChans.getHighestBit(), false); | ||||
| @@ -934,8 +923,8 @@ public: | |||||
| DBG ("OpenSL: numInputChannels = " << numInputChannels | DBG ("OpenSL: numInputChannels = " << numInputChannels | ||||
| << ", numOutputChannels = " << numOutputChannels | << ", numOutputChannels = " << numOutputChannels | ||||
| << ", nativeBufferSize = " << getNativeBufferSize() | |||||
| << ", nativeSampleRate = " << getNativeSampleRate() | |||||
| << ", nativeBufferSize = " << AndroidHighPerformanceAudioHelpers::getNativeBufferSize() | |||||
| << ", nativeSampleRate = " << AndroidHighPerformanceAudioHelpers::getNativeSampleRate() | |||||
| << ", actualBufferSize = " << actualBufferSize | << ", actualBufferSize = " << actualBufferSize | ||||
| << ", audioBuffersToEnqueue = " << audioBuffersToEnqueue | << ", audioBuffersToEnqueue = " << audioBuffersToEnqueue | ||||
| << ", sampleRate = " << sampleRate | << ", sampleRate = " << sampleRate | ||||
| @@ -968,16 +957,12 @@ public: | |||||
| int getDefaultBufferSize() override | int getDefaultBufferSize() override | ||||
| { | { | ||||
| auto defaultBufferLength = (hasLowLatencyAudioPath() ? defaultBufferSizeForLowLatencyDeviceMs | |||||
| : defaultBufferSizeForStandardLatencyDeviceMs); | |||||
| auto defaultBuffersToEnqueue = buffersToQueueForBufferDuration (defaultBufferLength, getCurrentSampleRate()); | |||||
| return defaultBuffersToEnqueue * getNativeBufferSize(); | |||||
| return AndroidHighPerformanceAudioHelpers::getDefaultBufferSize (getCurrentSampleRate()); | |||||
| } | } | ||||
| double getCurrentSampleRate() override | double getCurrentSampleRate() override | ||||
| { | { | ||||
| return (sampleRate == 0.0 ? getNativeSampleRate() : sampleRate); | |||||
| return (sampleRate == 0.0 ? AndroidHighPerformanceAudioHelpers::getNativeSampleRate() : sampleRate); | |||||
| } | } | ||||
| void start (AudioIODeviceCallback* newCallback) override | void start (AudioIODeviceCallback* newCallback) override | ||||
| @@ -1048,91 +1033,6 @@ private: | |||||
| std::unique_ptr<OpenSLSession> session; | std::unique_ptr<OpenSLSession> session; | ||||
| enum | |||||
| { | |||||
| defaultBufferSizeForLowLatencyDeviceMs = 40, | |||||
| defaultBufferSizeForStandardLatencyDeviceMs = 100 | |||||
| }; | |||||
| static int getMinimumBuffersToEnqueue (double sampleRateToCheck = getNativeSampleRate()) | |||||
| { | |||||
| if (canUseHighPerformanceAudioPath (getNativeBufferSize(), (int) sampleRateToCheck)) | |||||
| { | |||||
| // see https://developer.android.com/ndk/guides/audio/opensl/opensl-prog-notes.html#sandp | |||||
| // "For Android 4.2 (API level 17) and earlier, a buffer count of two or more is required | |||||
| // for lower latency. Beginning with Android 4.3 (API level 18), a buffer count of one | |||||
| // is sufficient for lower latency." | |||||
| return (getAndroidSDKVersion() >= 18 ? 1 : 2); | |||||
| } | |||||
| // we will not use the low-latency path so we can use the absolute minimum number of buffers | |||||
| // to queue | |||||
| return 1; | |||||
| } | |||||
| int getMaximumBuffersToEnqueue() noexcept | |||||
| { | |||||
| constexpr auto maxBufferSizeMs = 200; | |||||
| auto availableSampleRates = getAvailableSampleRates(); | |||||
| auto maximumSampleRate = findMaximum(availableSampleRates.getRawDataPointer(), availableSampleRates.size()); | |||||
| // ensure we don't return something crazy small | |||||
| return jmax (8, buffersToQueueForBufferDuration (maxBufferSizeMs, maximumSampleRate)); | |||||
| } | |||||
| static int buffersToQueueForBufferDuration (int bufferDurationInMs, double sampleRate) noexcept | |||||
| { | |||||
| auto maxBufferFrames = static_cast<int> (std::ceil (bufferDurationInMs * sampleRate / 1000.0)); | |||||
| auto maxNumBuffers = static_cast<int> (std::ceil (static_cast<double> (maxBufferFrames) | |||||
| / static_cast<double> (getNativeBufferSize()))); | |||||
| return jmax (getMinimumBuffersToEnqueue (sampleRate), maxNumBuffers); | |||||
| } | |||||
| //============================================================================== | |||||
| static double getNativeSampleRate() | |||||
| { | |||||
| return audioManagerGetProperty ("android.media.property.OUTPUT_SAMPLE_RATE").getDoubleValue(); | |||||
| } | |||||
| static int getNativeBufferSize() | |||||
| { | |||||
| const int val = audioManagerGetProperty ("android.media.property.OUTPUT_FRAMES_PER_BUFFER").getIntValue(); | |||||
| return val > 0 ? val : 512; | |||||
| } | |||||
| static bool isProAudioDevice() | |||||
| { | |||||
| return androidHasSystemFeature ("android.hardware.audio.pro") || isSapaSupported(); | |||||
| } | |||||
| static bool hasLowLatencyAudioPath() | |||||
| { | |||||
| return androidHasSystemFeature ("android.hardware.audio.low_latency"); | |||||
| } | |||||
| static bool canUseHighPerformanceAudioPath (int requestedBufferSize, int requestedSampleRate) | |||||
| { | |||||
| return ((requestedBufferSize % getNativeBufferSize()) == 0) | |||||
| && (requestedSampleRate == getNativeSampleRate()) | |||||
| && isProAudioDevice(); | |||||
| } | |||||
| //============================================================================== | |||||
| // Some minimum Sapa support to check if this device supports pro audio | |||||
| static bool isSamsungDevice() | |||||
| { | |||||
| return SystemStats::getDeviceManufacturer().containsIgnoreCase ("SAMSUNG"); | |||||
| } | |||||
| static bool isSapaSupported() | |||||
| { | |||||
| static bool supported = isSamsungDevice() && DynamicLibrary().open ("libapa_jni.so"); | |||||
| return supported; | |||||
| } | |||||
| JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (OpenSLAudioIODevice) | JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (OpenSLAudioIODevice) | ||||
| }; | }; | ||||
| @@ -1263,7 +1163,7 @@ public: | |||||
| SLDataLocator_OutputMix outputMixLocator = {SL_DATALOCATOR_OUTPUTMIX, outputMix}; | SLDataLocator_OutputMix outputMixLocator = {SL_DATALOCATOR_OUTPUTMIX, outputMix}; | ||||
| PCMDataFormatEx dataFormat; | PCMDataFormatEx dataFormat; | ||||
| BufferHelpers<int16>::initPCMDataFormat (dataFormat, 1, OpenSLAudioIODevice::getNativeSampleRate()); | |||||
| BufferHelpers<int16>::initPCMDataFormat (dataFormat, 1, AndroidHighPerformanceAudioHelpers::getNativeSampleRate()); | |||||
| SLDataSource source = { &queueLocator, &dataFormat }; | SLDataSource source = { &queueLocator, &dataFormat }; | ||||
| SLDataSink sink = { &outputMixLocator, nullptr }; | SLDataSink sink = { &outputMixLocator, nullptr }; | ||||
| @@ -1306,7 +1206,7 @@ public: | |||||
| } | } | ||||
| } | } | ||||
| bool isOK() const { return queue != nullptr; } | |||||
| bool isOk() const { return queue != nullptr; } | |||||
| pthread_t startThread (void* (*entry) (void*), void* userPtr) | pthread_t startThread (void* (*entry) (void*), void* userPtr) | ||||
| { | { | ||||
| @@ -1365,7 +1265,7 @@ private: | |||||
| SlRef<SLPlayItf_> player; | SlRef<SLPlayItf_> player; | ||||
| SlRef<SLAndroidSimpleBufferQueueItf_> queue; | SlRef<SLAndroidSimpleBufferQueueItf_> queue; | ||||
| int bufferSize = OpenSLAudioIODevice::getNativeBufferSize(); | |||||
| int bufferSize = AndroidHighPerformanceAudioHelpers::getNativeBufferSize(); | |||||
| HeapBlock<int16> buffer { HeapBlock<int16> (static_cast<size_t> (1 * bufferSize * numBuffers)) }; | HeapBlock<int16> buffer { HeapBlock<int16> (static_cast<size_t> (1 * bufferSize * numBuffers)) }; | ||||
| void* (*threadEntryProc) (void*) = nullptr; | void* (*threadEntryProc) (void*) = nullptr; | ||||
| @@ -1376,14 +1276,15 @@ private: | |||||
| pthread_t threadID; | pthread_t threadID; | ||||
| }; | }; | ||||
| //============================================================================== | |||||
| pthread_t juce_createRealtimeAudioThread (void* (*entry) (void*), void* userPtr) | pthread_t juce_createRealtimeAudioThread (void* (*entry) (void*), void* userPtr) | ||||
| { | { | ||||
| std::unique_ptr<SLRealtimeThread> thread (new SLRealtimeThread); | |||||
| auto thread = std::make_unique<SLRealtimeThread>(); | |||||
| if (! thread->isOK()) | |||||
| return 0; | |||||
| if (! thread->isOk()) | |||||
| return {}; | |||||
| pthread_t threadID = thread->startThread (entry, userPtr); | |||||
| auto threadID = thread->startThread (entry, userPtr); | |||||
| // the thread will de-allocate itself | // the thread will de-allocate itself | ||||
| thread.release(); | thread.release(); | ||||