Browse Source

Android audio implementation. Tweak to RecentlyOpenedFilesList.

tags/2021-05-28
Julian Storer 14 years ago
parent
commit
5eb389342d
10 changed files with 487 additions and 150 deletions
  1. +238
    -74
      juce_amalgamated.cpp
  2. +6
    -1
      juce_amalgamated.h
  3. +1
    -4
      src/audio/devices/juce_AudioDeviceManager.cpp
  4. +5
    -0
      src/audio/devices/juce_AudioIODeviceType.cpp
  5. +2
    -0
      src/audio/devices/juce_AudioIODeviceType.h
  6. +1
    -1
      src/core/juce_StandardHeader.h
  7. +212
    -60
      src/native/android/juce_android_Audio.cpp
  8. +9
    -1
      src/native/android/juce_android_NativeCode.cpp
  9. +10
    -9
      src/utilities/juce_RecentlyOpenedFilesList.cpp
  10. +3
    -0
      src/utilities/juce_RecentlyOpenedFilesList.h

+ 238
- 74
juce_amalgamated.cpp View File

@@ -19800,6 +19800,11 @@ void RecentlyOpenedFilesList::addFile (const File& file)
setMaxNumberOfItems (maxNumberOfItems);
}

void RecentlyOpenedFilesList::removeFile (const File& file)
{
files.removeString (file.getFullPathName());
}

void RecentlyOpenedFilesList::removeNonExistentFiles()
{
for (int i = getNumFiles(); --i >= 0;)
@@ -19808,10 +19813,10 @@ void RecentlyOpenedFilesList::removeNonExistentFiles()
}

int RecentlyOpenedFilesList::createPopupMenuItems (PopupMenu& menuToAddTo,
const int baseItemId,
const bool showFullPaths,
const bool dontAddNonExistentFiles,
const File** filesToAvoid)
const int baseItemId,
const bool showFullPaths,
const bool dontAddNonExistentFiles,
const File** filesToAvoid)
{
int num = 0;

@@ -19825,17 +19830,13 @@ int RecentlyOpenedFilesList::createPopupMenuItems (PopupMenu& menuToAddTo,

if (filesToAvoid != 0)
{
const File** avoid = filesToAvoid;

while (*avoid != 0)
for (const File** avoid = filesToAvoid; *avoid != 0; ++avoid)
{
if (f == **avoid)
{
needsAvoiding = true;
break;
}

++avoid;
}
}

@@ -25245,8 +25246,6 @@ const OwnedArray <AudioIODeviceType>& AudioDeviceManager::getAvailableDeviceType
return availableDeviceTypes;
}

AudioIODeviceType* juce_createAudioIODeviceType_JACK();

static void addIfNotNull (OwnedArray <AudioIODeviceType>& list, AudioIODeviceType* const device)
{
if (device != 0)
@@ -25258,12 +25257,11 @@ void AudioDeviceManager::createAudioDeviceTypes (OwnedArray <AudioIODeviceType>&
addIfNotNull (list, AudioIODeviceType::createAudioIODeviceType_WASAPI());
addIfNotNull (list, AudioIODeviceType::createAudioIODeviceType_DirectSound());
addIfNotNull (list, AudioIODeviceType::createAudioIODeviceType_ASIO());

addIfNotNull (list, AudioIODeviceType::createAudioIODeviceType_CoreAudio());
addIfNotNull (list, AudioIODeviceType::createAudioIODeviceType_iOSAudio());

addIfNotNull (list, AudioIODeviceType::createAudioIODeviceType_ALSA());
addIfNotNull (list, AudioIODeviceType::createAudioIODeviceType_JACK());
addIfNotNull (list, AudioIODeviceType::createAudioIODeviceType_Android());
}

const String AudioDeviceManager::initialise (const int numInputChannelsNeeded,
@@ -26144,6 +26142,10 @@ AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_ALSA() { return 0
AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_JACK() { return 0; }
#endif

#if ! JUCE_ANDROID
AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_Android() { return 0; }
#endif

END_JUCE_NAMESPACE
/*** End of inlined file: juce_AudioIODeviceType.cpp ***/

@@ -283697,12 +283699,20 @@ BEGIN_JUCE_NAMESPACE
FIELD (rectClass, rectBottom, "bottom", "I") \
\
METHOD (audioTrackClass, audioTrackConstructor, "<init>", "(IIIIII)V") \
STATICMETHOD (audioTrackClass, getMinBufferSize, "getMinBufferSize", "(III)I") \
STATICMETHOD (audioTrackClass, getNativeOutputSampleRate, "getNativeOutputSampleRate", "(I)I") \
METHOD (audioTrackClass, audioTrackPlay, "play", "()V") \
METHOD (audioTrackClass, audioTrackStop, "stop", "()V") \
METHOD (audioTrackClass, audioTrackRelease, "release", "()V") \
METHOD (audioTrackClass, audioTrackFlush, "flush", "()V") \
METHOD (audioTrackClass, audioTrackWrite, "write", "([SII)I") \
STATICMETHOD (audioTrackClass, getMinBufferSize, "getMinBufferSize", "(III)I") \
\
METHOD (audioRecordClass, audioRecordConstructor, "<init>", "(IIIII)V"); \
STATICMETHOD (audioRecordClass, getMinRecordBufferSize, "getMinBufferSize", "(III)I") \
METHOD (audioRecordClass, startRecording, "startRecording", "()V"); \
METHOD (audioRecordClass, stopRecording, "stop", "()V"); \
METHOD (audioRecordClass, audioRecordRead, "read", "([SII)I"); \
METHOD (audioRecordClass, audioRecordRelease, "release", "()V"); \

// List of extra methods needed when USE_ANDROID_CANVAS is enabled
#if ! USE_ANDROID_CANVAS
@@ -287630,23 +287640,49 @@ MidiInput* MidiInput::openDevice (int index, MidiInputCallback* callback)
// compiled on its own).
#if JUCE_INCLUDED_FILE

class AndroidAudioIODevice : public AudioIODevice
#define CHANNEL_OUT_STEREO ((jint) 12)
#define CHANNEL_IN_STEREO ((jint) 12)
#define CHANNEL_IN_MONO ((jint) 16)
#define ENCODING_PCM_16BIT ((jint) 2)
#define STREAM_MUSIC ((jint) 3)
#define MODE_STREAM ((jint) 1)

class AndroidAudioIODevice : public AudioIODevice,
public Thread
{
public:

AndroidAudioIODevice (const String& deviceName)
: AudioIODevice (deviceName, "Audio"),
callback (0),
sampleRate (0),
numInputChannels (0),
numOutputChannels (0),
actualBufferSize (0),
isRunning (false)
Thread ("audio"),
callback (0), sampleRate (0),
numClientInputChannels (0), numDeviceInputChannels (0), numDeviceInputChannelsAvailable (2),
numClientOutputChannels (0), numDeviceOutputChannels (0),
minbufferSize (0), actualBufferSize (0),
isRunning (false),
outputChannelBuffer (1, 1),
inputChannelBuffer (1, 1)
{
numInputChannels = 2;
numOutputChannels = 2;
JNIEnv* env = getEnv();
sampleRate = env->CallStaticIntMethod (android.audioTrackClass, android.getNativeOutputSampleRate, MODE_STREAM);

// TODO
const jint outMinBuffer = env->CallStaticIntMethod (android.audioTrackClass, android.getMinBufferSize, sampleRate, CHANNEL_OUT_STEREO, ENCODING_PCM_16BIT);

jint inMinBuffer = env->CallStaticIntMethod (android.audioRecordClass, android.getMinRecordBufferSize, sampleRate, CHANNEL_IN_STEREO, ENCODING_PCM_16BIT);
if (inMinBuffer <= 0)
{
inMinBuffer = env->CallStaticIntMethod (android.audioRecordClass, android.getMinRecordBufferSize, sampleRate, CHANNEL_IN_MONO, ENCODING_PCM_16BIT);

if (inMinBuffer > 0)
numDeviceInputChannelsAvailable = 1;
else
numDeviceInputChannelsAvailable = 0;
}

minbufferSize = jmax (outMinBuffer, inMinBuffer) / 4;

DBG ("Audio device - min buffers: " << outMinBuffer << ", " << inMinBuffer << "; "
<< sampleRate << " Hz; input chans: " << numDeviceInputChannelsAvailable);
}

~AndroidAudioIODevice()
@@ -287657,7 +287693,7 @@ public:
const StringArray getOutputChannelNames()
{
StringArray s;
s.add ("Left"); // TODO
s.add ("Left");
s.add ("Right");
return s;
}
@@ -287665,65 +287701,109 @@ public:
const StringArray getInputChannelNames()
{
StringArray s;
s.add ("Left");
s.add ("Right");

if (numDeviceInputChannelsAvailable == 2)
{
s.add ("Left");
s.add ("Right");
}
else if (numDeviceInputChannelsAvailable == 1)
{
s.add ("Audio Input");
}

return s;
}

int getNumSampleRates() { return 1;}
double getSampleRate (int index) { return sampleRate; }

int getNumBufferSizesAvailable() { return 1; }
int getBufferSizeSamples (int index) { return getDefaultBufferSize(); }
int getDefaultBufferSize() { return 1024; }
int getDefaultBufferSize() { return minbufferSize; }
int getNumBufferSizesAvailable() { return 10; }
int getBufferSizeSamples (int index) { return getDefaultBufferSize() + index * 128; }

const String open (const BigInteger& inputChannels,
const BigInteger& outputChannels,
double sampleRate,
double requestedSampleRate,
int bufferSize)
{
close();

if (sampleRate != (int) requestedSampleRate)
return "Sample rate not allowed";

lastError = String::empty;
int preferredBufferSize = (bufferSize <= 0) ? getDefaultBufferSize() : bufferSize;
int preferredBufferSize = (bufferSize <= 0) ? getDefaultBufferSize() : jmax (minbufferSize, bufferSize);

numDeviceInputChannels = 0;
numDeviceOutputChannels = 0;

activeOutputChans = outputChannels;
activeOutputChans.setRange (2, activeOutputChans.getHighestBit(), false);
numOutputChannels = activeOutputChans.countNumberOfSetBits();
numClientOutputChannels = activeOutputChans.countNumberOfSetBits();

activeInputChans = inputChannels;
activeInputChans.setRange (2, activeInputChans.getHighestBit(), false);
numInputChannels = activeInputChans.countNumberOfSetBits();
numClientInputChannels = activeInputChans.countNumberOfSetBits();

// TODO
actualBufferSize = preferredBufferSize;
inputChannelBuffer.setSize (actualBufferSize, 2);
outputChannelBuffer.setSize (actualBufferSize, 2);
inputChannelBuffer.clear();
outputChannelBuffer.clear();

actualBufferSize = 0; // whatever is possible based on preferredBufferSize
JNIEnv* env = getEnv();

isRunning = true;
if (numClientOutputChannels > 0)
{
numDeviceOutputChannels = 2;
outputDevice = GlobalRef (env->NewObject (android.audioTrackClass, android.audioTrackConstructor,
STREAM_MUSIC, sampleRate, CHANNEL_OUT_STEREO, ENCODING_PCM_16BIT,
(jint) (actualBufferSize * numDeviceOutputChannels * sizeof (float)), MODE_STREAM));
isRunning = true;
}

return lastError;
}
if (numClientInputChannels > 0 && numDeviceInputChannelsAvailable > 0)
{
numDeviceInputChannels = jmin (numClientInputChannels, numDeviceInputChannelsAvailable);
inputDevice = GlobalRef (env->NewObject (android.audioRecordClass, android.audioRecordConstructor,
0 /* (default audio source) */, sampleRate,
numDeviceInputChannelsAvailable > 1 ? CHANNEL_IN_STEREO : CHANNEL_IN_MONO,
ENCODING_PCM_16BIT,
(jint) (actualBufferSize * numDeviceInputChannels * sizeof (float))));
isRunning = true;
}

void close()
{
if (isRunning)
{
isRunning = false;
if (outputDevice != 0)
env->CallVoidMethod (outputDevice, android.audioTrackPlay);

// TODO
if (inputDevice != 0)
env->CallVoidMethod (inputDevice, android.startRecording);

startThread (8);
}
else
{
closeDevices();
}
}

int getOutputLatencyInSamples()
{
return 0; // TODO
return lastError;
}

int getInputLatencyInSamples()
void close()
{
return 0; // TODO
if (isRunning)
{
stopThread (2000);
isRunning = false;
closeDevices();
}
}

int getOutputLatencyInSamples() { return 0; } // TODO
int getInputLatencyInSamples() { return 0; } // TODO
bool isOpen() { return isRunning; }
int getCurrentBufferSizeSamples() { return actualBufferSize; }
int getCurrentBitDepth() { return 16; }
@@ -287733,8 +287813,6 @@ public:
const String getLastError() { return lastError; }
bool isPlaying() { return isRunning && callback != 0; }

// TODO

void start (AudioIODeviceCallback* newCallback)
{
if (isRunning && callback != newCallback)
@@ -287764,21 +287842,111 @@ public:
}
}

void run()
{
JNIEnv* env = getEnv();
jshortArray audioBuffer = env->NewShortArray (actualBufferSize * jmax (numDeviceOutputChannels, numDeviceInputChannels));

while (! threadShouldExit())
{
if (inputDevice != 0)
{
jint numRead = env->CallIntMethod (inputDevice, android.audioRecordRead, audioBuffer, 0, actualBufferSize * numDeviceInputChannels);

if (numRead < actualBufferSize * numDeviceInputChannels)
{
DBG ("Audio read under-run! " << numRead);
}

jshort* const src = env->GetShortArrayElements (audioBuffer, 0);

for (int chan = 0; chan < numDeviceInputChannels; ++chan)
{
AudioData::Pointer <AudioData::Int16, AudioData::NativeEndian, AudioData::Interleaved, AudioData::Const> s (src + chan, numDeviceInputChannels);
AudioData::Pointer <AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::NonConst> d (inputChannelBuffer.getSampleData (chan));
d.convertSamples (s, actualBufferSize);
}

env->ReleaseShortArrayElements (audioBuffer, src, 0);
}

if (threadShouldExit())
break;

{
const ScopedLock sl (callbackLock);

if (callback != 0)
{
callback->audioDeviceIOCallback ((const float**) inputChannelBuffer.getArrayOfChannels(), numClientInputChannels,
outputChannelBuffer.getArrayOfChannels(), numClientOutputChannels,
actualBufferSize);
}
else
{
outputChannelBuffer.clear();
}
}

if (outputDevice != 0)
{
if (threadShouldExit())
break;

jshort* const dest = env->GetShortArrayElements (audioBuffer, 0);

for (int chan = 0; chan < numDeviceOutputChannels; ++chan)
{
AudioData::Pointer <AudioData::Int16, AudioData::NativeEndian, AudioData::Interleaved, AudioData::NonConst> d (dest + chan, numDeviceOutputChannels);
AudioData::Pointer <AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::Const> s (outputChannelBuffer.getSampleData (chan));
d.convertSamples (s, actualBufferSize);
}

env->ReleaseShortArrayElements (audioBuffer, dest, 0);
jint numWritten = env->CallIntMethod (outputDevice, android.audioTrackWrite, audioBuffer, 0, actualBufferSize * numDeviceOutputChannels);

if (numWritten < actualBufferSize * numDeviceOutputChannels)
{
DBG ("Audio write underrun! " << numWritten);
}
}
}
}

private:

CriticalSection callbackLock;
AudioIODeviceCallback* callback;
double sampleRate;
int numInputChannels, numOutputChannels;
int actualBufferSize;
jint sampleRate;
int numClientInputChannels, numDeviceInputChannels, numDeviceInputChannelsAvailable;
int numClientOutputChannels, numDeviceOutputChannels;
int minbufferSize, actualBufferSize;
bool isRunning;
String lastError;
BigInteger activeOutputChans, activeInputChans;
GlobalRef outputDevice, inputDevice;
AudioSampleBuffer inputChannelBuffer, outputChannelBuffer;

void closeDevices()
{
if (outputDevice != 0)
{
outputDevice.callVoidMethod (android.audioTrackStop);
outputDevice.callVoidMethod (android.audioTrackRelease);
outputDevice.clear();
}

if (inputDevice != 0)
{
inputDevice.callVoidMethod (android.stopRecording);
inputDevice.callVoidMethod (android.audioRecordRelease);
inputDevice.clear();
}
}

JUCE_DECLARE_NON_COPYABLE (AndroidAudioIODevice);
};

// TODO
class AndroidAudioIODeviceType : public AudioIODeviceType
{
public:
@@ -287787,9 +287955,10 @@ public:
{
}

void scanForDevices()
{
}
void scanForDevices() {}
int getDefaultDeviceIndex (bool forInput) const { return 0; }
int getIndexOfDevice (AudioIODevice* device, bool asInput) const { return device != 0 ? 0 : -1; }
bool hasSeparateInputsAndOutputs() const { return false; }

const StringArray getDeviceNames (bool wantInputNames) const
{
@@ -287798,26 +287967,21 @@ public:
return s;
}

int getDefaultDeviceIndex (bool forInput) const
{
return 0;
}

int getIndexOfDevice (AudioIODevice* device, bool asInput) const
{
return device != 0 ? 0 : -1;
}

bool hasSeparateInputsAndOutputs() const { return false; }

AudioIODevice* createDevice (const String& outputDeviceName,
const String& inputDeviceName)
{
ScopedPointer<AndroidAudioIODevice> dev;

if (outputDeviceName.isNotEmpty() || inputDeviceName.isNotEmpty())
return new AndroidAudioIODevice (outputDeviceName.isNotEmpty() ? outputDeviceName
: inputDeviceName);
{
dev = new AndroidAudioIODevice (outputDeviceName.isNotEmpty() ? outputDeviceName
: inputDeviceName);

return 0;
if (dev->getCurrentSampleRate() <= 0 || dev->getDefaultBufferSize() <= 0)
dev = 0;
}

return dev.release();
}

private:
@@ -287825,7 +287989,7 @@ private:
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (AndroidAudioIODeviceType);
};

AudioIODeviceType* juce_createAudioIODeviceType_Android()
AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_Android()
{
return new AndroidAudioIODeviceType();
}


+ 6
- 1
juce_amalgamated.h View File

@@ -73,7 +73,7 @@ namespace JuceDummyNamespace {}
*/
#define JUCE_MAJOR_VERSION 1
#define JUCE_MINOR_VERSION 53
#define JUCE_BUILDNUMBER 52
#define JUCE_BUILDNUMBER 53

/** Current Juce version number.

@@ -38336,6 +38336,8 @@ public:
static AudioIODeviceType* createAudioIODeviceType_ALSA();
/** Creates a JACK device type if it's available on this platform, or returns null. */
static AudioIODeviceType* createAudioIODeviceType_JACK();
/** Creates an Android device type if it's available on this platform, or returns null. */
static AudioIODeviceType* createAudioIODeviceType_Android();

protected:
explicit AudioIODeviceType (const String& typeName);
@@ -66707,6 +66709,9 @@ public:
*/
void addFile (const File& file);

/** Removes a file from the list. */
void removeFile (const File& file);

/** Checks each of the files in the list, removing any that don't exist.

You might want to call this after reloading a list of files, or before putting them


+ 1
- 4
src/audio/devices/juce_AudioDeviceManager.cpp View File

@@ -101,8 +101,6 @@ const OwnedArray <AudioIODeviceType>& AudioDeviceManager::getAvailableDeviceType
}
//==============================================================================
AudioIODeviceType* juce_createAudioIODeviceType_JACK();
static void addIfNotNull (OwnedArray <AudioIODeviceType>& list, AudioIODeviceType* const device)
{
if (device != 0)
@@ -114,12 +112,11 @@ void AudioDeviceManager::createAudioDeviceTypes (OwnedArray <AudioIODeviceType>&
addIfNotNull (list, AudioIODeviceType::createAudioIODeviceType_WASAPI());
addIfNotNull (list, AudioIODeviceType::createAudioIODeviceType_DirectSound());
addIfNotNull (list, AudioIODeviceType::createAudioIODeviceType_ASIO());
addIfNotNull (list, AudioIODeviceType::createAudioIODeviceType_CoreAudio());
addIfNotNull (list, AudioIODeviceType::createAudioIODeviceType_iOSAudio());
addIfNotNull (list, AudioIODeviceType::createAudioIODeviceType_ALSA());
addIfNotNull (list, AudioIODeviceType::createAudioIODeviceType_JACK());
addIfNotNull (list, AudioIODeviceType::createAudioIODeviceType_Android());
}
//==============================================================================


+ 5
- 0
src/audio/devices/juce_AudioIODeviceType.cpp View File

@@ -68,4 +68,9 @@ AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_ALSA()
AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_JACK() { return 0; }
#endif
#if ! JUCE_ANDROID
AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_Android() { return 0; }
#endif
END_JUCE_NAMESPACE

+ 2
- 0
src/audio/devices/juce_AudioIODeviceType.h View File

@@ -146,6 +146,8 @@ public:
static AudioIODeviceType* createAudioIODeviceType_ALSA();
/** Creates a JACK device type if it's available on this platform, or returns null. */
static AudioIODeviceType* createAudioIODeviceType_JACK();
/** Creates an Android device type if it's available on this platform, or returns null. */
static AudioIODeviceType* createAudioIODeviceType_Android();
protected:
explicit AudioIODeviceType (const String& typeName);


+ 1
- 1
src/core/juce_StandardHeader.h View File

@@ -33,7 +33,7 @@
*/
#define JUCE_MAJOR_VERSION 1
#define JUCE_MINOR_VERSION 53
#define JUCE_BUILDNUMBER 52
#define JUCE_BUILDNUMBER 53
/** Current Juce version number.


+ 212
- 60
src/native/android/juce_android_Audio.cpp View File

@@ -27,25 +27,51 @@
// compiled on its own).
#if JUCE_INCLUDED_FILE
//==============================================================================
#define CHANNEL_OUT_STEREO ((jint) 12)
#define CHANNEL_IN_STEREO ((jint) 12)
#define CHANNEL_IN_MONO ((jint) 16)
#define ENCODING_PCM_16BIT ((jint) 2)
#define STREAM_MUSIC ((jint) 3)
#define MODE_STREAM ((jint) 1)
//==============================================================================
class AndroidAudioIODevice : public AudioIODevice
class AndroidAudioIODevice : public AudioIODevice,
public Thread
{
public:
//==============================================================================
AndroidAudioIODevice (const String& deviceName)
: AudioIODevice (deviceName, "Audio"),
callback (0),
sampleRate (0),
numInputChannels (0),
numOutputChannels (0),
actualBufferSize (0),
isRunning (false)
Thread ("audio"),
callback (0), sampleRate (0),
numClientInputChannels (0), numDeviceInputChannels (0), numDeviceInputChannelsAvailable (2),
numClientOutputChannels (0), numDeviceOutputChannels (0),
minbufferSize (0), actualBufferSize (0),
isRunning (false),
outputChannelBuffer (1, 1),
inputChannelBuffer (1, 1)
{
numInputChannels = 2;
numOutputChannels = 2;
JNIEnv* env = getEnv();
sampleRate = env->CallStaticIntMethod (android.audioTrackClass, android.getNativeOutputSampleRate, MODE_STREAM);
const jint outMinBuffer = env->CallStaticIntMethod (android.audioTrackClass, android.getMinBufferSize, sampleRate, CHANNEL_OUT_STEREO, ENCODING_PCM_16BIT);
jint inMinBuffer = env->CallStaticIntMethod (android.audioRecordClass, android.getMinRecordBufferSize, sampleRate, CHANNEL_IN_STEREO, ENCODING_PCM_16BIT);
if (inMinBuffer <= 0)
{
inMinBuffer = env->CallStaticIntMethod (android.audioRecordClass, android.getMinRecordBufferSize, sampleRate, CHANNEL_IN_MONO, ENCODING_PCM_16BIT);
if (inMinBuffer > 0)
numDeviceInputChannelsAvailable = 1;
else
numDeviceInputChannelsAvailable = 0;
}
// TODO
minbufferSize = jmax (outMinBuffer, inMinBuffer) / 4;
DBG ("Audio device - min buffers: " << outMinBuffer << ", " << inMinBuffer << "; "
<< sampleRate << " Hz; input chans: " << numDeviceInputChannelsAvailable);
}
~AndroidAudioIODevice()
@@ -56,7 +82,7 @@ public:
const StringArray getOutputChannelNames()
{
StringArray s;
s.add ("Left"); // TODO
s.add ("Left");
s.add ("Right");
return s;
}
@@ -64,43 +90,93 @@ public:
const StringArray getInputChannelNames()
{
StringArray s;
s.add ("Left");
s.add ("Right");
if (numDeviceInputChannelsAvailable == 2)
{
s.add ("Left");
s.add ("Right");
}
else if (numDeviceInputChannelsAvailable == 1)
{
s.add ("Audio Input");
}
return s;
}
int getNumSampleRates() { return 1;}
double getSampleRate (int index) { return sampleRate; }
int getNumBufferSizesAvailable() { return 1; }
int getBufferSizeSamples (int index) { return getDefaultBufferSize(); }
int getDefaultBufferSize() { return 1024; }
int getDefaultBufferSize() { return minbufferSize; }
int getNumBufferSizesAvailable() { return 10; }
int getBufferSizeSamples (int index) { return getDefaultBufferSize() + index * 128; }
const String open (const BigInteger& inputChannels,
const BigInteger& outputChannels,
double sampleRate,
double requestedSampleRate,
int bufferSize)
{
close();
if (sampleRate != (int) requestedSampleRate)
return "Sample rate not allowed";
lastError = String::empty;
int preferredBufferSize = (bufferSize <= 0) ? getDefaultBufferSize() : bufferSize;
int preferredBufferSize = (bufferSize <= 0) ? getDefaultBufferSize() : jmax (minbufferSize, bufferSize);
numDeviceInputChannels = 0;
numDeviceOutputChannels = 0;
activeOutputChans = outputChannels;
activeOutputChans.setRange (2, activeOutputChans.getHighestBit(), false);
numOutputChannels = activeOutputChans.countNumberOfSetBits();
numClientOutputChannels = activeOutputChans.countNumberOfSetBits();
activeInputChans = inputChannels;
activeInputChans.setRange (2, activeInputChans.getHighestBit(), false);
numInputChannels = activeInputChans.countNumberOfSetBits();
numClientInputChannels = activeInputChans.countNumberOfSetBits();
actualBufferSize = preferredBufferSize;
inputChannelBuffer.setSize (actualBufferSize, 2);
outputChannelBuffer.setSize (actualBufferSize, 2);
inputChannelBuffer.clear();
outputChannelBuffer.clear();
// TODO
JNIEnv* env = getEnv();
if (numClientOutputChannels > 0)
{
numDeviceOutputChannels = 2;
outputDevice = GlobalRef (env->NewObject (android.audioTrackClass, android.audioTrackConstructor,
STREAM_MUSIC, sampleRate, CHANNEL_OUT_STEREO, ENCODING_PCM_16BIT,
(jint) (actualBufferSize * numDeviceOutputChannels * sizeof (float)), MODE_STREAM));
isRunning = true;
}
if (numClientInputChannels > 0 && numDeviceInputChannelsAvailable > 0)
{
numDeviceInputChannels = jmin (numClientInputChannels, numDeviceInputChannelsAvailable);
inputDevice = GlobalRef (env->NewObject (android.audioRecordClass, android.audioRecordConstructor,
0 /* (default audio source) */, sampleRate,
numDeviceInputChannelsAvailable > 1 ? CHANNEL_IN_STEREO : CHANNEL_IN_MONO,
ENCODING_PCM_16BIT,
(jint) (actualBufferSize * numDeviceInputChannels * sizeof (float))));
isRunning = true;
}
if (isRunning)
{
if (outputDevice != 0)
env->CallVoidMethod (outputDevice, android.audioTrackPlay);
actualBufferSize = 0; // whatever is possible based on preferredBufferSize
if (inputDevice != 0)
env->CallVoidMethod (inputDevice, android.startRecording);
isRunning = true;
startThread (8);
}
else
{
closeDevices();
}
return lastError;
}
@@ -109,22 +185,14 @@ public:
{
if (isRunning)
{
stopThread (2000);
isRunning = false;
// TODO
closeDevices();
}
}
int getOutputLatencyInSamples()
{
return 0; // TODO
}
int getInputLatencyInSamples()
{
return 0; // TODO
}
int getOutputLatencyInSamples() { return 0; } // TODO
int getInputLatencyInSamples() { return 0; } // TODO
bool isOpen() { return isRunning; }
int getCurrentBufferSizeSamples() { return actualBufferSize; }
int getCurrentBitDepth() { return 16; }
@@ -134,8 +202,6 @@ public:
const String getLastError() { return lastError; }
bool isPlaying() { return isRunning && callback != 0; }
// TODO
void start (AudioIODeviceCallback* newCallback)
{
if (isRunning && callback != newCallback)
@@ -165,22 +231,112 @@ public:
}
}
void run()
{
JNIEnv* env = getEnv();
jshortArray audioBuffer = env->NewShortArray (actualBufferSize * jmax (numDeviceOutputChannels, numDeviceInputChannels));
while (! threadShouldExit())
{
if (inputDevice != 0)
{
jint numRead = env->CallIntMethod (inputDevice, android.audioRecordRead, audioBuffer, 0, actualBufferSize * numDeviceInputChannels);
if (numRead < actualBufferSize * numDeviceInputChannels)
{
DBG ("Audio read under-run! " << numRead);
}
jshort* const src = env->GetShortArrayElements (audioBuffer, 0);
for (int chan = 0; chan < numDeviceInputChannels; ++chan)
{
AudioData::Pointer <AudioData::Int16, AudioData::NativeEndian, AudioData::Interleaved, AudioData::Const> s (src + chan, numDeviceInputChannels);
AudioData::Pointer <AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::NonConst> d (inputChannelBuffer.getSampleData (chan));
d.convertSamples (s, actualBufferSize);
}
env->ReleaseShortArrayElements (audioBuffer, src, 0);
}
if (threadShouldExit())
break;
{
const ScopedLock sl (callbackLock);
if (callback != 0)
{
callback->audioDeviceIOCallback ((const float**) inputChannelBuffer.getArrayOfChannels(), numClientInputChannels,
outputChannelBuffer.getArrayOfChannels(), numClientOutputChannels,
actualBufferSize);
}
else
{
outputChannelBuffer.clear();
}
}
if (outputDevice != 0)
{
if (threadShouldExit())
break;
jshort* const dest = env->GetShortArrayElements (audioBuffer, 0);
for (int chan = 0; chan < numDeviceOutputChannels; ++chan)
{
AudioData::Pointer <AudioData::Int16, AudioData::NativeEndian, AudioData::Interleaved, AudioData::NonConst> d (dest + chan, numDeviceOutputChannels);
AudioData::Pointer <AudioData::Float32, AudioData::NativeEndian, AudioData::NonInterleaved, AudioData::Const> s (outputChannelBuffer.getSampleData (chan));
d.convertSamples (s, actualBufferSize);
}
env->ReleaseShortArrayElements (audioBuffer, dest, 0);
jint numWritten = env->CallIntMethod (outputDevice, android.audioTrackWrite, audioBuffer, 0, actualBufferSize * numDeviceOutputChannels);
if (numWritten < actualBufferSize * numDeviceOutputChannels)
{
DBG ("Audio write underrun! " << numWritten);
}
}
}
}
private:
//==================================================================================================
CriticalSection callbackLock;
AudioIODeviceCallback* callback;
double sampleRate;
int numInputChannels, numOutputChannels;
int actualBufferSize;
jint sampleRate;
int numClientInputChannels, numDeviceInputChannels, numDeviceInputChannelsAvailable;
int numClientOutputChannels, numDeviceOutputChannels;
int minbufferSize, actualBufferSize;
bool isRunning;
String lastError;
BigInteger activeOutputChans, activeInputChans;
GlobalRef outputDevice, inputDevice;
AudioSampleBuffer inputChannelBuffer, outputChannelBuffer;
void closeDevices()
{
if (outputDevice != 0)
{
outputDevice.callVoidMethod (android.audioTrackStop);
outputDevice.callVoidMethod (android.audioTrackRelease);
outputDevice.clear();
}
if (inputDevice != 0)
{
inputDevice.callVoidMethod (android.stopRecording);
inputDevice.callVoidMethod (android.audioRecordRelease);
inputDevice.clear();
}
}
JUCE_DECLARE_NON_COPYABLE (AndroidAudioIODevice);
};
//==============================================================================
// TODO
class AndroidAudioIODeviceType : public AudioIODeviceType
{
public:
@@ -190,9 +346,10 @@ public:
}
//==============================================================================
void scanForDevices()
{
}
void scanForDevices() {}
int getDefaultDeviceIndex (bool forInput) const { return 0; }
int getIndexOfDevice (AudioIODevice* device, bool asInput) const { return device != 0 ? 0 : -1; }
bool hasSeparateInputsAndOutputs() const { return false; }
const StringArray getDeviceNames (bool wantInputNames) const
{
@@ -201,26 +358,21 @@ public:
return s;
}
int getDefaultDeviceIndex (bool forInput) const
{
return 0;
}
int getIndexOfDevice (AudioIODevice* device, bool asInput) const
{
return device != 0 ? 0 : -1;
}
bool hasSeparateInputsAndOutputs() const { return false; }
AudioIODevice* createDevice (const String& outputDeviceName,
const String& inputDeviceName)
{
ScopedPointer<AndroidAudioIODevice> dev;
if (outputDeviceName.isNotEmpty() || inputDeviceName.isNotEmpty())
return new AndroidAudioIODevice (outputDeviceName.isNotEmpty() ? outputDeviceName
: inputDeviceName);
{
dev = new AndroidAudioIODevice (outputDeviceName.isNotEmpty() ? outputDeviceName
: inputDeviceName);
if (dev->getCurrentSampleRate() <= 0 || dev->getDefaultBufferSize() <= 0)
dev = 0;
}
return 0;
return dev.release();
}
private:
@@ -230,7 +382,7 @@ private:
//==============================================================================
AudioIODeviceType* juce_createAudioIODeviceType_Android()
AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_Android()
{
return new AndroidAudioIODeviceType();
}


+ 9
- 1
src/native/android/juce_android_NativeCode.cpp View File

@@ -203,12 +203,20 @@ BEGIN_JUCE_NAMESPACE
FIELD (rectClass, rectBottom, "bottom", "I") \
\
METHOD (audioTrackClass, audioTrackConstructor, "<init>", "(IIIIII)V") \
STATICMETHOD (audioTrackClass, getMinBufferSize, "getMinBufferSize", "(III)I") \
STATICMETHOD (audioTrackClass, getNativeOutputSampleRate, "getNativeOutputSampleRate", "(I)I") \
METHOD (audioTrackClass, audioTrackPlay, "play", "()V") \
METHOD (audioTrackClass, audioTrackStop, "stop", "()V") \
METHOD (audioTrackClass, audioTrackRelease, "release", "()V") \
METHOD (audioTrackClass, audioTrackFlush, "flush", "()V") \
METHOD (audioTrackClass, audioTrackWrite, "write", "([SII)I") \
STATICMETHOD (audioTrackClass, getMinBufferSize, "getMinBufferSize", "(III)I") \
\
METHOD (audioRecordClass, audioRecordConstructor, "<init>", "(IIIII)V"); \
STATICMETHOD (audioRecordClass, getMinRecordBufferSize, "getMinBufferSize", "(III)I") \
METHOD (audioRecordClass, startRecording, "startRecording", "()V"); \
METHOD (audioRecordClass, stopRecording, "stop", "()V"); \
METHOD (audioRecordClass, audioRecordRead, "read", "([SII)I"); \
METHOD (audioRecordClass, audioRecordRelease, "release", "()V"); \
//==============================================================================


+ 10
- 9
src/utilities/juce_RecentlyOpenedFilesList.cpp View File

@@ -74,6 +74,11 @@ void RecentlyOpenedFilesList::addFile (const File& file)
setMaxNumberOfItems (maxNumberOfItems);
}
void RecentlyOpenedFilesList::removeFile (const File& file)
{
files.removeString (file.getFullPathName());
}
void RecentlyOpenedFilesList::removeNonExistentFiles()
{
for (int i = getNumFiles(); --i >= 0;)
@@ -83,10 +88,10 @@ void RecentlyOpenedFilesList::removeNonExistentFiles()
//==============================================================================
int RecentlyOpenedFilesList::createPopupMenuItems (PopupMenu& menuToAddTo,
const int baseItemId,
const bool showFullPaths,
const bool dontAddNonExistentFiles,
const File** filesToAvoid)
const int baseItemId,
const bool showFullPaths,
const bool dontAddNonExistentFiles,
const File** filesToAvoid)
{
int num = 0;
@@ -100,17 +105,13 @@ int RecentlyOpenedFilesList::createPopupMenuItems (PopupMenu& menuToAddTo,
if (filesToAvoid != 0)
{
const File** avoid = filesToAvoid;
while (*avoid != 0)
for (const File** avoid = filesToAvoid; *avoid != 0; ++avoid)
{
if (f == **avoid)
{
needsAvoiding = true;
break;
}
++avoid;
}
}


+ 3
- 0
src/utilities/juce_RecentlyOpenedFilesList.h View File

@@ -95,6 +95,9 @@ public:
*/
void addFile (const File& file);
/** Removes a file from the list. */
void removeFile (const File& file);
/** Checks each of the files in the list, removing any that don't exist.
You might want to call this after reloading a list of files, or before putting them


Loading…
Cancel
Save