Browse Source

Update juce

tags/2018-04-16
falkTX 9 years ago
parent
commit
7bcbc9bf43
18 changed files with 604 additions and 260 deletions
  1. +1
    -1
      libs/juce/source/modules/juce_audio_basics/mpe/juce_MPENote.cpp
  2. +14
    -0
      libs/juce/source/modules/juce_audio_basics/mpe/juce_MPEZone.cpp
  3. +6
    -0
      libs/juce/source/modules/juce_audio_basics/mpe/juce_MPEZone.h
  4. +46
    -3
      libs/juce/source/modules/juce_audio_basics/mpe/juce_MPEZoneLayout.cpp
  5. +38
    -0
      libs/juce/source/modules/juce_audio_basics/mpe/juce_MPEZoneLayout.h
  6. +1
    -1
      libs/juce/source/modules/juce_audio_devices/juce_module_info
  7. +376
    -195
      libs/juce/source/modules/juce_audio_devices/native/juce_ios_Audio.cpp
  8. +9
    -0
      libs/juce/source/modules/juce_audio_plugin_client/AU/juce_AU_Wrapper.mm
  9. +5
    -4
      libs/juce/source/modules/juce_audio_plugin_client/VST/juce_VST_Wrapper.cpp
  10. +7
    -0
      libs/juce/source/modules/juce_audio_processors/format_types/juce_AudioUnitPluginFormat.h
  11. +49
    -19
      libs/juce/source/modules/juce_audio_processors/format_types/juce_AudioUnitPluginFormat.mm
  12. +7
    -5
      libs/juce/source/modules/juce_audio_processors/format_types/juce_VSTPluginFormat.cpp
  13. +3
    -0
      libs/juce/source/modules/juce_audio_processors/processors/juce_AudioProcessor.h
  14. +3
    -1
      libs/juce/source/modules/juce_core/native/juce_BasicNativeHeaders.h
  15. +0
    -6
      libs/juce/source/modules/juce_core/text/juce_CharPointer_UTF8.h
  16. +37
    -22
      libs/juce/source/modules/juce_core/text/juce_CharacterFunctions.h
  17. +1
    -2
      libs/juce/source/modules/juce_gui_basics/menus/juce_PopupMenu.cpp
  18. +1
    -1
      libs/juce/source/modules/juce_gui_basics/menus/juce_PopupMenu.h

+ 1
- 1
libs/juce/source/modules/juce_audio_basics/mpe/juce_MPENote.cpp View File

@@ -71,7 +71,7 @@ MPENote::MPENote() noexcept
//==============================================================================
bool MPENote::isValid() const noexcept
{
return midiChannel > 0 && midiChannel <= 16 && initialNote >= 0 && initialNote <= 127;
return midiChannel > 0 && midiChannel <= 16 && initialNote < 128;
}
//==============================================================================


+ 14
- 0
libs/juce/source/modules/juce_audio_basics/mpe/juce_MPEZone.cpp View File

@@ -144,6 +144,20 @@ bool MPEZone::truncateToFit (MPEZone other) noexcept
return true;
}
//==========================================================================
bool MPEZone::operator== (const MPEZone& other) const noexcept
{
return masterChannel == other.masterChannel
&& numNoteChannels == other.numNoteChannels
&& perNotePitchbendRange == other.perNotePitchbendRange
&& masterPitchbendRange == other.masterPitchbendRange;
}
bool MPEZone::operator!= (const MPEZone& other) const noexcept
{
return ! operator== (other);
}
//==============================================================================
//==============================================================================
#if JUCE_UNIT_TESTS


+ 6
- 0
libs/juce/source/modules/juce_audio_basics/mpe/juce_MPEZone.h View File

@@ -120,6 +120,12 @@ struct JUCE_API MPEZone
*/
bool truncateToFit (MPEZone zoneToAvoid) noexcept;
/** @returns true if this zone is equal to the one passed in. */
bool operator== (const MPEZone& other) const noexcept;
/** @returns true if this zone is not equal to the one passed in. */
bool operator!= (const MPEZone& other) const noexcept;
private:
//==========================================================================
int masterChannel;


+ 46
- 3
libs/juce/source/modules/juce_audio_basics/mpe/juce_MPEZoneLayout.cpp View File

@@ -26,6 +26,17 @@ MPEZoneLayout::MPEZoneLayout() noexcept
{
}
MPEZoneLayout::MPEZoneLayout (const MPEZoneLayout& other)
: zones (other.zones)
{
}
MPEZoneLayout& MPEZoneLayout::operator= (const MPEZoneLayout& other)
{
zones = other.zones;
return *this;
}
//==============================================================================
bool MPEZoneLayout::addZone (MPEZone newZone)
{
@@ -46,6 +57,7 @@ bool MPEZoneLayout::addZone (MPEZone newZone)
}
zones.add (newZone);
listeners.call (&MPEZoneLayout::Listener::zoneLayoutChanged, *this);
return noOtherZonesModified;
}
@@ -66,6 +78,7 @@ MPEZone* MPEZoneLayout::getZoneByIndex (int index) const noexcept
void MPEZoneLayout::clearAllZones()
{
zones.clear();
listeners.call (&MPEZoneLayout::Listener::zoneLayoutChanged, *this);
}
//==============================================================================
@@ -106,12 +119,23 @@ void MPEZoneLayout::processPitchbendRangeRpnMessage (MidiRPNMessage rpn)
{
if (MPEZone* zone = getZoneByFirstNoteChannel (rpn.channel))
{
zone->setPerNotePitchbendRange (rpn.value);
return;
if (zone->getPerNotePitchbendRange() != rpn.value)
{
zone->setPerNotePitchbendRange (rpn.value);
listeners.call (&MPEZoneLayout::Listener::zoneLayoutChanged, *this);
return;
}
}
if (MPEZone* zone = getZoneByMasterChannel (rpn.channel))
zone->setMasterPitchbendRange (rpn.value);
{
if (zone->getMasterPitchbendRange() != rpn.value)
{
zone->setMasterPitchbendRange (rpn.value);
listeners.call (&MPEZoneLayout::Listener::zoneLayoutChanged, *this);
return;
}
}
}
//==============================================================================
@@ -162,6 +186,25 @@ MPEZone* MPEZoneLayout::getZoneByNoteChannel (int channel) const noexcept
return nullptr;
}
//==============================================================================
void MPEZoneLayout::addListener (Listener* const listenerToAdd) noexcept
{
listeners.add (listenerToAdd);
}
void MPEZoneLayout::removeListener (Listener* const listenerToRemove) noexcept
{
listeners.remove (listenerToRemove);
}
MPEZoneLayout::Listener::Listener()
{
}
MPEZoneLayout::Listener::~Listener()
{
}
//==============================================================================
//==============================================================================
#if JUCE_UNIT_TESTS


+ 38
- 0
libs/juce/source/modules/juce_audio_basics/mpe/juce_MPEZoneLayout.h View File

@@ -47,6 +47,16 @@ public:
*/
MPEZoneLayout() noexcept;
/** Copy constuctor.
This will not copy the listeners registered to the MPEZoneLayout.
*/
MPEZoneLayout (const MPEZoneLayout& other);
/** Copy assignment operator.
This will not copy the listeners registered to the MPEZoneLayout.
*/
MPEZoneLayout& operator= (const MPEZoneLayout& other);
/** Adds a new MPE zone to the layout.
@param newZone The zone to add.
@@ -115,10 +125,38 @@ public:
*/
MPEZone* getZoneByNoteChannel (int midiChannel) const noexcept;
//==========================================================================
/** Listener class. Derive from this class to allow your class to be
notified about changes to the zone layout.
*/
class Listener
{
public:
/** Constructor. */
Listener();
/** Destructor. */
virtual ~Listener();
/** Implement this callback to be notified about any changes to this
MPEZoneLayout. Will be called whenever a zone is added, zones are
removed, or any zone's master or note pitchbend ranges change.
*/
virtual void zoneLayoutChanged (const MPEZoneLayout& layout) = 0;
};
//==========================================================================
/** Adds a listener. */
void addListener (Listener* const listenerToAdd) noexcept;
/** Removes a listener. */
void removeListener (Listener* const listenerToRemove) noexcept;
private:
//==========================================================================
Array<MPEZone> zones;
MidiRPNDetector rpnDetector;
ListenerList<Listener> listeners;
void processRpnMessage (MidiRPNMessage);
void processZoneLayoutRpnMessage (MidiRPNMessage);


+ 1
- 1
libs/juce/source/modules/juce_audio_devices/juce_module_info View File

@@ -22,7 +22,7 @@
"native/*" ],
"OSXFrameworks": "CoreAudio CoreMIDI DiscRecording",
"iOSFrameworks": "AudioToolbox CoreMIDI",
"iOSFrameworks": "AudioToolbox CoreMIDI CoreAudio AVFoundation",
"LinuxLibs": "asound",
"mingwLibs": "winmm"
}

+ 376
- 195
libs/juce/source/modules/juce_audio_devices/native/juce_ios_Audio.cpp View File

@@ -22,56 +22,256 @@
==============================================================================
*/
class iOSAudioIODevice : public AudioIODevice
class iOSAudioIODevice;
//==================================================================================================
struct AudioSessionHolder
{
public:
iOSAudioIODevice (const String& deviceName)
: AudioIODevice (deviceName, "Audio"),
actualBufferSize (0),
isRunning (false),
audioUnit (0),
callback (nullptr),
floatData (1, 2)
AudioSessionHolder();
~AudioSessionHolder();
void handleStatusChange (bool enabled, const char* reason) const;
void handleRouteChange (const char* reason) const;
Array<iOSAudioIODevice*> activeDevices;
id nativeSession;
};
static const char* getRoutingChangeReason (AVAudioSessionRouteChangeReason reason) noexcept
{
switch (reason)
{
case AVAudioSessionRouteChangeReasonNewDeviceAvailable: return "New device available";
case AVAudioSessionRouteChangeReasonOldDeviceUnavailable: return "Old device unavailable";
case AVAudioSessionRouteChangeReasonCategoryChange: return "Category change";
case AVAudioSessionRouteChangeReasonOverride: return "Override";
case AVAudioSessionRouteChangeReasonWakeFromSleep: return "Wake from sleep";
case AVAudioSessionRouteChangeReasonNoSuitableRouteForCategory: return "No suitable route for category";
case AVAudioSessionRouteChangeReasonRouteConfigurationChange: return "Route configuration change";
case AVAudioSessionRouteChangeReasonUnknown:
default: return "Unknown";
}
}
bool getNotificationValueForKey (NSNotification* notification, NSString* key, NSUInteger& value) noexcept
{
if (notification != nil)
{
if (NSDictionary* userInfo = [notification userInfo])
{
if (NSNumber* number = [userInfo objectForKey: key])
{
value = [number unsignedIntegerValue];
return true;
}
}
}
jassertfalse;
return false;
}
} // juce namespace
//==================================================================================================
@interface iOSAudioSessionNative : NSObject
{
@private
juce::AudioSessionHolder* audioSessionHolder;
};
- (id) init: (juce::AudioSessionHolder*) holder;
- (void) dealloc;
- (void) audioSessionDidChangeInterruptionType: (NSNotification*) notification;
- (void) handleMediaServicesReset;
- (void) handleMediaServicesLost;
- (void) handleRouteChange: (NSNotification*) notification;
@end
@implementation iOSAudioSessionNative
- (id) init: (juce::AudioSessionHolder*) holder
{
self = [super init];
if (self != nil)
{
audioSessionHolder = holder;
auto session = [AVAudioSession sharedInstance];
auto centre = [NSNotificationCenter defaultCenter];
[centre addObserver: self
selector: @selector (audioSessionDidChangeInterruptionType:)
name: AVAudioSessionInterruptionNotification
object: session];
[centre addObserver: self
selector: @selector (handleMediaServicesLost)
name: AVAudioSessionMediaServicesWereLostNotification
object: session];
[centre addObserver: self
selector: @selector (handleMediaServicesReset)
name: AVAudioSessionMediaServicesWereResetNotification
object: session];
[centre addObserver: self
selector: @selector (handleRouteChange:)
name: AVAudioSessionRouteChangeNotification
object: session];
}
else
{
getSessionHolder().activeDevices.add (this);
jassertfalse;
}
return self;
}
numInputChannels = 2;
numOutputChannels = 2;
preferredBufferSize = 0;
- (void) dealloc
{
[[NSNotificationCenter defaultCenter] removeObserver: self];
[super dealloc];
}
- (void) audioSessionDidChangeInterruptionType: (NSNotification*) notification
{
NSUInteger value;
updateDeviceInfo();
if (juce::getNotificationValueForKey (notification, AVAudioSessionInterruptionTypeKey, value))
{
switch ((AVAudioSessionInterruptionType) value)
{
case AVAudioSessionInterruptionTypeBegan:
audioSessionHolder->handleStatusChange (false, "AVAudioSessionInterruptionTypeBegan");
break;
case AVAudioSessionInterruptionTypeEnded:
audioSessionHolder->handleStatusChange (true, "AVAudioSessionInterruptionTypeEnded");
break;
// No default so the code doesn't compile if this enum is extended.
}
}
}
- (void) handleMediaServicesReset
{
audioSessionHolder->handleStatusChange (true, "AVAudioSessionMediaServicesWereResetNotification");
}
- (void) handleMediaServicesLost
{
audioSessionHolder->handleStatusChange (false, "AVAudioSessionMediaServicesWereLostNotification");
}
- (void) handleRouteChange: (NSNotification*) notification
{
NSUInteger value;
if (juce::getNotificationValueForKey (notification, AVAudioSessionRouteChangeReasonKey, value))
audioSessionHolder->handleRouteChange (juce::getRoutingChangeReason ((AVAudioSessionRouteChangeReason) value));
}
@end
//==================================================================================================
namespace juce {
#ifndef JUCE_IOS_AUDIO_LOGGING
#define JUCE_IOS_AUDIO_LOGGING 0
#endif
#if JUCE_IOS_AUDIO_LOGGING
#define JUCE_IOS_AUDIO_LOG(x) DBG(x)
#else
#define JUCE_IOS_AUDIO_LOG(x)
#endif
static void logNSError (NSError* e)
{
if (e != nil)
{
JUCE_IOS_AUDIO_LOG ("iOS Audio error: " << [e.localizedDescription UTF8String]);
jassertfalse;
}
}
#define JUCE_NSERROR_CHECK(X) { NSError* error = nil; X; logNSError (error); }
//==================================================================================================
class iOSAudioIODevice : public AudioIODevice
{
public:
iOSAudioIODevice (const String& deviceName) : AudioIODevice (deviceName, "Audio")
{
sessionHolder->activeDevices.add (this);
updateSampleRateAndAudioInput();
}
~iOSAudioIODevice()
{
getSessionHolder().activeDevices.removeFirstMatchingValue (this);
sessionHolder->activeDevices.removeFirstMatchingValue (this);
close();
}
StringArray getOutputChannelNames() override
{
StringArray s;
s.add ("Left");
s.add ("Right");
return s;
return { "Left", "Right" };
}
StringArray getInputChannelNames() override
{
StringArray s;
if (audioInputIsAvailable)
{
s.add ("Left");
s.add ("Right");
}
return s;
return { "Left", "Right" };
return {};
}
static void setAudioSessionActive (bool enabled)
{
JUCE_NSERROR_CHECK ([[AVAudioSession sharedInstance] setActive: enabled
error: &error]);
}
static double trySampleRate (double rate)
{
auto session = [AVAudioSession sharedInstance];
JUCE_NSERROR_CHECK ([session setPreferredSampleRate: rate
error: &error]);
return session.sampleRate;
}
Array<double> getAvailableSampleRates() override
{
// can't find a good way to actually ask the device for which of these it supports..
static const double rates[] = { 8000.0, 16000.0, 22050.0, 32000.0, 44100.0, 48000.0 };
return Array<double> (rates, numElementsInArray (rates));
Array<double> rates;
// Important: the supported audio sample rates change on the iPhone 6S
// depending on whether the headphones are plugged in or not!
setAudioSessionActive (true);
const double lowestRate = trySampleRate (4000);
const double highestRate = trySampleRate (192000);
for (double rate = lowestRate; rate <= highestRate; rate += 1000)
{
const double supportedRate = trySampleRate (rate);
rates.addIfNotAlreadyThere (supportedRate);
rate = jmax (rate, supportedRate);
}
for (auto r : rates)
{
ignoreUnused (r);
JUCE_IOS_AUDIO_LOG ("available rate = " + String (r, 0) + "Hz");
}
return rates;
}
Array<int> getAvailableBufferSizes() override
@@ -84,7 +284,7 @@ public:
return r;
}
int getDefaultBufferSize() override { return 1024; }
int getDefaultBufferSize() override { return 256; }
String open (const BigInteger& inputChannelsWanted,
const BigInteger& outputChannelsWanted,
@@ -93,7 +293,8 @@ public:
close();
lastError.clear();
preferredBufferSize = (bufferSize <= 0) ? getDefaultBufferSize() : bufferSize;
preferredBufferSize = bufferSize <= 0 ? getDefaultBufferSize()
: bufferSize;
// xxx set up channel mapping
@@ -107,34 +308,39 @@ public:
numInputChannels = activeInputChans.countNumberOfSetBits();
monoInputChannelNumber = activeInputChans.findNextSetBit (0);
AudioSessionSetActive (true);
setAudioSessionActive (true);
if (numInputChannels > 0 && audioInputIsAvailable)
{
setSessionUInt32Property (kAudioSessionProperty_AudioCategory, kAudioSessionCategory_PlayAndRecord);
setSessionUInt32Property (kAudioSessionProperty_OverrideCategoryEnableBluetoothInput, 1);
}
else
{
setSessionUInt32Property (kAudioSessionProperty_AudioCategory, kAudioSessionCategory_MediaPlayback);
}
// Set the session category & options:
auto session = [AVAudioSession sharedInstance];
AudioSessionAddPropertyListener (kAudioSessionProperty_AudioRouteChange, routingChangedStatic, this);
const bool useInputs = (numInputChannels > 0 && audioInputIsAvailable);
fixAudioRouteIfSetToReceiver();
NSString* category = (useInputs ? AVAudioSessionCategoryPlayAndRecord : AVAudioSessionCategoryPlayback);
NSUInteger options = AVAudioSessionCategoryOptionMixWithOthers; // Alternatively AVAudioSessionCategoryOptionDuckOthers
if (useInputs) // These options are only valid for category = PlayAndRecord
options |= (AVAudioSessionCategoryOptionDefaultToSpeaker | AVAudioSessionCategoryOptionAllowBluetooth);
setSessionFloat64Property (kAudioSessionProperty_PreferredHardwareSampleRate, targetSampleRate);
updateDeviceInfo();
JUCE_NSERROR_CHECK ([session setCategory: category
withOptions: options
error: &error]);
setSessionFloat32Property (kAudioSessionProperty_PreferredHardwareIOBufferDuration, preferredBufferSize / sampleRate);
fixAudioRouteIfSetToReceiver();
// Set the sample rate
trySampleRate (targetSampleRate);
updateSampleRateAndAudioInput();
updateCurrentBufferSize();
prepareFloatBuffers (actualBufferSize);
isRunning = true;
routingChanged (nullptr); // creates and starts the AU
handleRouteChange ("Started AudioUnit");
lastError = (audioUnit != 0 ? "" : "Couldn't open the device");
setAudioSessionActive (true);
lastError = audioUnit != 0 ? "" : "Couldn't open the device";
return lastError;
}
@@ -144,10 +350,7 @@ public:
{
isRunning = false;
setSessionUInt32Property (kAudioSessionProperty_AudioCategory, kAudioSessionCategory_MediaPlayback);
AudioSessionRemovePropertyListenerWithUserData (kAudioSessionProperty_AudioRouteChange, routingChangedStatic, this);
AudioSessionSetActive (false);
setAudioSessionActive (false);
if (audioUnit != 0)
{
@@ -157,24 +360,17 @@ public:
}
}
bool isOpen() override { return isRunning; }
bool isOpen() override { return isRunning; }
int getCurrentBufferSizeSamples() override { return actualBufferSize; }
double getCurrentSampleRate() override { return sampleRate; }
int getCurrentBitDepth() override { return 16; }
int getCurrentBufferSizeSamples() override { return actualBufferSize; }
double getCurrentSampleRate() override { return sampleRate; }
int getCurrentBitDepth() override { return 16; }
BigInteger getActiveOutputChannels() const override { return activeOutputChans; }
BigInteger getActiveInputChannels() const override { return activeInputChans; }
int getOutputLatencyInSamples() override { return getLatency (kAudioSessionProperty_CurrentHardwareOutputLatency); }
int getInputLatencyInSamples() override { return getLatency (kAudioSessionProperty_CurrentHardwareInputLatency); }
int getLatency (AudioSessionPropertyID propID)
{
Float32 latency = 0;
getSessionProperty (propID, latency);
return roundToInt (latency * getCurrentSampleRate());
}
int getOutputLatencyInSamples() override { return roundToInt (getCurrentSampleRate() * [AVAudioSession sharedInstance].outputLatency); }
int getInputLatencyInSamples() override { return roundToInt (getCurrentSampleRate() * [AVAudioSession sharedInstance].inputLatency); }
void start (AudioIODeviceCallback* newCallback) override
{
@@ -210,23 +406,82 @@ public:
bool setAudioPreprocessingEnabled (bool enable) override
{
return setSessionUInt32Property (kAudioSessionProperty_Mode, enable ? kAudioSessionMode_Default
: kAudioSessionMode_Measurement);
auto session = [AVAudioSession sharedInstance];
NSString* mode = (enable ? AVAudioSessionModeMeasurement
: AVAudioSessionModeDefault);
JUCE_NSERROR_CHECK ([session setMode: mode
error: &error]);
return session.mode == mode;
}
void invokeAudioDeviceErrorCallback (const String& reason)
{
const ScopedLock sl (callbackLock);
if (callback != nullptr)
callback->audioDeviceError (reason);
}
void handleStatusChange (bool enabled, const char* reason)
{
JUCE_IOS_AUDIO_LOG ("handleStatusChange: enabled: " << (int) enabled << ", reason: " << reason);
isRunning = enabled;
setAudioSessionActive (enabled);
if (enabled)
AudioOutputUnitStart (audioUnit);
else
AudioOutputUnitStop (audioUnit);
if (! enabled)
invokeAudioDeviceErrorCallback (reason);
}
void handleRouteChange (const char* reason)
{
JUCE_IOS_AUDIO_LOG ("handleRouteChange: reason: " << reason);
fixAudioRouteIfSetToReceiver();
if (isRunning)
{
invokeAudioDeviceErrorCallback (reason);
updateSampleRateAndAudioInput();
updateCurrentBufferSize();
createAudioUnit();
setAudioSessionActive (true);
if (audioUnit != 0)
{
UInt32 formatSize = sizeof (format);
AudioUnitGetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &format, &formatSize);
AudioOutputUnitStart (audioUnit);
}
if (callback)
callback->audioDeviceAboutToStart (this);
}
}
private:
//==================================================================================================
SharedResourcePointer<AudioSessionHolder> sessionHolder;
CriticalSection callbackLock;
Float64 sampleRate;
int numInputChannels, numOutputChannels;
int preferredBufferSize, actualBufferSize;
bool isRunning;
NSTimeInterval sampleRate = 0;
int numInputChannels = 2, numOutputChannels = 2;
int preferredBufferSize = 0, actualBufferSize = 0;
bool isRunning = false;
String lastError;
AudioStreamBasicDescription format;
AudioUnit audioUnit;
UInt32 audioInputIsAvailable;
AudioIODeviceCallback* callback;
AudioUnit audioUnit {};
bool audioInputIsAvailable = false;
AudioIODeviceCallback* callback = nullptr;
BigInteger activeOutputChans, activeInputChans;
AudioSampleBuffer floatData;
@@ -299,7 +554,7 @@ private:
callback->audioDeviceIOCallback ((const float**) inputChannels, numInputChannels,
outputChannels, numOutputChannels, (int) numFrames);
short* shortData = (short*) data->mBuffers[0].mData;
short* const shortData = (short*) data->mBuffers[0].mData;
int n = 0;
if (numOutputChannels >= 2)
@@ -332,119 +587,31 @@ private:
return err;
}
void updateDeviceInfo()
void updateSampleRateAndAudioInput()
{
getSessionProperty (kAudioSessionProperty_CurrentHardwareSampleRate, sampleRate);
getSessionProperty (kAudioSessionProperty_AudioInputAvailable, audioInputIsAvailable);
auto session = [AVAudioSession sharedInstance];
sampleRate = session.sampleRate;
audioInputIsAvailable = session.isInputAvailable;
JUCE_IOS_AUDIO_LOG ("AVAudioSession: sampleRate: " << sampleRate << "Hz, audioInputAvailable: " << (int) audioInputIsAvailable);
}
void updateCurrentBufferSize()
{
Float32 bufferDuration = sampleRate > 0 ? (Float32) (preferredBufferSize / sampleRate) : 0.0f;
getSessionProperty (kAudioSessionProperty_CurrentHardwareIOBufferDuration, bufferDuration);
actualBufferSize = (int) (sampleRate * bufferDuration + 0.5);
}
void routingChanged (const void* propertyValue)
{
if (! isRunning)
return;
if (propertyValue != nullptr)
{
CFDictionaryRef routeChangeDictionary = (CFDictionaryRef) propertyValue;
CFNumberRef routeChangeReasonRef = (CFNumberRef) CFDictionaryGetValue (routeChangeDictionary,
CFSTR (kAudioSession_AudioRouteChangeKey_Reason));
SInt32 routeChangeReason;
CFNumberGetValue (routeChangeReasonRef, kCFNumberSInt32Type, &routeChangeReason);
auto session = [AVAudioSession sharedInstance];
NSTimeInterval bufferDuration = sampleRate > 0 ? (NSTimeInterval) (preferredBufferSize / sampleRate) : 0.0;
JUCE_NSERROR_CHECK ([session setPreferredIOBufferDuration: bufferDuration
error: &error]);
if (routeChangeReason == kAudioSessionRouteChangeReason_OldDeviceUnavailable)
{
const ScopedLock sl (callbackLock);
if (callback != nullptr)
callback->audioDeviceError ("Old device unavailable");
}
}
updateDeviceInfo();
createAudioUnit();
AudioSessionSetActive (true);
if (audioUnit != 0)
{
UInt32 formatSize = sizeof (format);
AudioUnitGetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &format, &formatSize);
updateCurrentBufferSize();
AudioOutputUnitStart (audioUnit);
}
}
//==================================================================================================
struct AudioSessionHolder
{
AudioSessionHolder()
{
AudioSessionInitialize (0, 0, interruptionListenerCallback, this);
}
static void interruptionListenerCallback (void* client, UInt32 interruptionType)
{
const Array<iOSAudioIODevice*>& activeDevices = static_cast<AudioSessionHolder*> (client)->activeDevices;
for (int i = activeDevices.size(); --i >= 0;)
activeDevices.getUnchecked(i)->interruptionListener (interruptionType);
}
Array<iOSAudioIODevice*> activeDevices;
};
static AudioSessionHolder& getSessionHolder()
{
static AudioSessionHolder audioSessionHolder;
return audioSessionHolder;
}
void interruptionListener (const UInt32 interruptionType)
{
if (interruptionType == kAudioSessionBeginInterruption)
{
isRunning = false;
AudioOutputUnitStop (audioUnit);
AudioSessionSetActive (false);
const ScopedLock sl (callbackLock);
if (callback != nullptr)
callback->audioDeviceError ("iOS audio session interruption");
}
if (interruptionType == kAudioSessionEndInterruption)
{
isRunning = true;
AudioSessionSetActive (true);
AudioOutputUnitStart (audioUnit);
const ScopedLock sl (callbackLock);
if (callback != nullptr)
callback->audioDeviceError ("iOS audio session resumed");
}
bufferDuration = session.IOBufferDuration;
actualBufferSize = roundToInt (sampleRate * bufferDuration);
}
//==================================================================================================
static OSStatus processStatic (void* client, AudioUnitRenderActionFlags* flags, const AudioTimeStamp* time,
UInt32 /*busNumber*/, UInt32 numFrames, AudioBufferList* data)
{
return static_cast<iOSAudioIODevice*> (client)->process (flags, time, numFrames, data);
}
static void routingChangedStatic (void* client, AudioSessionPropertyID, UInt32 /*inDataSize*/, const void* propertyValue)
{
static_cast<iOSAudioIODevice*> (client)->routingChanged (propertyValue);
return static_cast<iOSAudioIODevice*> (client)->process (flags, time, numFrames, data);
}
//==================================================================================================
@@ -515,31 +682,28 @@ private:
// to make it loud. Needed because by default when using an input + output, the output is kept quiet.
static void fixAudioRouteIfSetToReceiver()
{
CFStringRef audioRoute = 0;
if (getSessionProperty (kAudioSessionProperty_AudioRoute, audioRoute) == noErr)
{
NSString* route = (NSString*) audioRoute;
auto session = [AVAudioSession sharedInstance];
auto route = session.currentRoute;
//DBG ("audio route: " + nsStringToJuce (route));
for (AVAudioSessionPortDescription* port in route.inputs)
{
ignoreUnused (port);
JUCE_IOS_AUDIO_LOG ("AVAudioSession: input: " << [port.description UTF8String]);
}
if ([route hasPrefix: @"Receiver"])
setSessionUInt32Property (kAudioSessionProperty_OverrideAudioRoute, kAudioSessionOverrideAudioRoute_Speaker);
for (AVAudioSessionPortDescription* port in route.outputs)
{
JUCE_IOS_AUDIO_LOG ("AVAudioSession: output: " << [port.description UTF8String]);
CFRelease (audioRoute);
if ([port.portName isEqualToString: @"Receiver"])
{
JUCE_NSERROR_CHECK ([session overrideOutputAudioPort: AVAudioSessionPortOverrideSpeaker
error: &error]);
setAudioSessionActive (true);
}
}
}
template <typename Type>
static OSStatus getSessionProperty (AudioSessionPropertyID propID, Type& result) noexcept
{
UInt32 valueSize = sizeof (result);
return AudioSessionGetProperty (propID, &valueSize, &result);
}
static bool setSessionUInt32Property (AudioSessionPropertyID propID, UInt32 v) noexcept { return AudioSessionSetProperty (propID, sizeof (v), &v) == kAudioSessionNoError; }
static bool setSessionFloat32Property (AudioSessionPropertyID propID, Float32 v) noexcept { return AudioSessionSetProperty (propID, sizeof (v), &v) == kAudioSessionNoError; }
static bool setSessionFloat64Property (AudioSessionPropertyID propID, Float64 v) noexcept { return AudioSessionSetProperty (propID, sizeof (v), &v) == kAudioSessionNoError; }
JUCE_DECLARE_NON_COPYABLE (iOSAudioIODevice)
};
@@ -559,8 +723,7 @@ public:
AudioIODevice* createDevice (const String& outputDeviceName, const String& inputDeviceName)
{
if (outputDeviceName.isNotEmpty() || inputDeviceName.isNotEmpty())
return new iOSAudioIODevice (outputDeviceName.isNotEmpty() ? outputDeviceName
: inputDeviceName);
return new iOSAudioIODevice (outputDeviceName.isNotEmpty() ? outputDeviceName : inputDeviceName);
return nullptr;
}
@@ -574,3 +737,21 @@ AudioIODeviceType* AudioIODeviceType::createAudioIODeviceType_iOSAudio()
{
return new iOSAudioIODeviceType();
}
//==================================================================================================
AudioSessionHolder::AudioSessionHolder() { nativeSession = [[iOSAudioSessionNative alloc] init: this]; }
AudioSessionHolder::~AudioSessionHolder() { [nativeSession release]; }
void AudioSessionHolder::handleStatusChange (bool enabled, const char* reason) const
{
for (auto device: activeDevices)
device->handleStatusChange (enabled, reason);
}
void AudioSessionHolder::handleRouteChange (const char* reason) const
{
for (auto device: activeDevices)
device->handleRouteChange (reason);
}
#undef JUCE_NSERROR_CHECK

+ 9
- 0
libs/juce/source/modules/juce_audio_plugin_client/AU/juce_AU_Wrapper.mm View File

@@ -390,6 +390,11 @@ public:
outWritable = true;
return noErr;
case kAudioUnitProperty_SupportsMPE:
outDataSize = sizeof (UInt32);
outWritable = false;
return noErr;
default: break;
}
}
@@ -423,6 +428,10 @@ public:
*(UInt32*) outData = isBypassed ? 1 : 0;
return noErr;
case kAudioUnitProperty_SupportsMPE:
*(UInt32*) outData = (juceFilter != nullptr && juceFilter->supportsMPE()) ? 1 : 0;
return noErr;
case kAudioUnitProperty_CocoaUI:
{
JUCE_AUTORELEASEPOOL


+ 5
- 4
libs/juce/source/modules/juce_audio_plugin_client/VST/juce_VST_Wrapper.cpp View File

@@ -413,12 +413,13 @@ public:
return 1;
}
// This tells Wavelab to use the UI thread to invoke open/close,
// like all other hosts do.
if (strcmp (text, "openCloseAnyThread") == 0)
{
// This tells Wavelab to use the UI thread to invoke open/close,
// like all other hosts do.
return -1;
}
if (strcmp (text, "MPE") == 0)
return filter->supportsMPE() ? 1 : 0;
#if JUCE_MAC
if (strcmp (text, "hasCockosViewAsConfig") == 0)


+ 7
- 0
libs/juce/source/modules/juce_audio_processors/format_types/juce_AudioUnitPluginFormat.h View File

@@ -53,3 +53,10 @@ private:
};
#endif
//==============================================================================
enum
{
/** Custom AudioUnit property used to indicate MPE support */
kAudioUnitProperty_SupportsMPE = 75001
};

+ 49
- 19
libs/juce/source/modules/juce_audio_processors/format_types/juce_AudioUnitPluginFormat.mm View File

@@ -445,12 +445,7 @@ public:
(int) (numOutputBusChannels * numOutputBusses),
(double) newSampleRate, estimatedSamplesPerBlock);
Float64 latencySecs = 0.0;
UInt32 latencySize = sizeof (latencySecs);
AudioUnitGetProperty (audioUnit, kAudioUnitProperty_Latency, kAudioUnitScope_Global,
0, &latencySecs, &latencySize);
setLatencySamples (roundToInt (latencySecs * newSampleRate));
updateLatency();
{
AudioStreamBasicDescription stream;
@@ -889,6 +884,16 @@ public:
}
}
void updateLatency()
{
Float64 latencySecs = 0.0;
UInt32 latencySize = sizeof (latencySecs);
AudioUnitGetProperty (audioUnit, kAudioUnitProperty_Latency, kAudioUnitScope_Global,
0, &latencySecs, &latencySize);
setLatencySamples (roundToInt (latencySecs * getSampleRate()));
}
void handleIncomingMidiMessage (void*, const MidiMessage& message)
{
const ScopedLock sl (midiInLock);
@@ -993,22 +998,24 @@ private:
AUEventListenerAddEventType (eventListenerRef, nullptr, &event);
}
// Add a listener for program changes
AudioUnitEvent event;
event.mArgument.mProperty.mAudioUnit = audioUnit;
event.mArgument.mProperty.mPropertyID = kAudioUnitProperty_PresentPreset;
event.mArgument.mProperty.mScope = kAudioUnitScope_Global;
event.mArgument.mProperty.mElement = 0;
event.mEventType = kAudioUnitEvent_PropertyChange;
AUEventListenerAddEventType (eventListenerRef, nullptr, &event);
// Add a listener for parameter list changes
event.mArgument.mProperty.mPropertyID = kAudioUnitProperty_ParameterList;
AUEventListenerAddEventType (eventListenerRef, nullptr, &event);
addPropertyChangeListener (kAudioUnitProperty_PresentPreset);
addPropertyChangeListener (kAudioUnitProperty_ParameterList);
addPropertyChangeListener (kAudioUnitProperty_Latency);
}
}
void addPropertyChangeListener (AudioUnitPropertyID type) const
{
AudioUnitEvent event;
event.mEventType = kAudioUnitEvent_PropertyChange;
event.mArgument.mProperty.mPropertyID = type;
event.mArgument.mProperty.mAudioUnit = audioUnit;
event.mArgument.mProperty.mPropertyID = kAudioUnitProperty_PresentPreset;
event.mArgument.mProperty.mScope = kAudioUnitScope_Global;
event.mArgument.mProperty.mElement = 0;
AUEventListenerAddEventType (eventListenerRef, nullptr, &event);
}
void eventCallback (const AudioUnitEvent& event, AudioUnitParameterValue newValue)
{
switch (event.mEventType)
@@ -1040,6 +1047,8 @@ private:
updateHostDisplay();
else if (event.mArgument.mProperty.mPropertyID == kAudioUnitProperty_PresentPreset)
sendAllParametersChangedEvents();
else if (event.mArgument.mProperty.mPropertyID == kAudioUnitProperty_Latency)
updateLatency();
break;
}
@@ -1329,6 +1338,27 @@ private:
return false;
}
bool supportsMPE() const override
{
UInt32 dataSize = 0;
Boolean isWritable = false;
if (AudioUnitGetPropertyInfo (audioUnit, kAudioUnitProperty_SupportsMPE,
kAudioUnitScope_Global, 0, &dataSize, &isWritable) == noErr
&& dataSize == sizeof (UInt32))
{
UInt32 result = 0;
if (AudioUnitGetProperty (audioUnit, kAudioUnitProperty_SupportsMPE,
kAudioUnitScope_Global, 0, &result, &dataSize) == noErr)
{
return result > 0;
}
}
return false;
}
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (AudioUnitPluginInstance)
};


+ 7
- 5
libs/juce/source/modules/juce_audio_processors/format_types/juce_VSTPluginFormat.cpp View File

@@ -868,10 +868,10 @@ public:
if (getVstCategory() != kPlugCategShell) // (workaround for Waves 5 plugins which crash during this call)
updateStoredProgramNames();
wantsMidiMessages = dispatch (effCanDo, 0, 0, (void*) "receiveVstMidiEvent", 0) > 0;
wantsMidiMessages = pluginCanDo ("receiveVstMidiEvent") > 0;
#if JUCE_MAC && JUCE_SUPPORT_CARBON
usesCocoaNSView = (dispatch (effCanDo, 0, 0, (void*) "hasCockosViewAsConfig", 0) & (int) 0xffff0000) == 0xbeef0000;
usesCocoaNSView = (pluginCanDo ("hasCockosViewAsConfig") & (int) 0xffff0000) == 0xbeef0000;
#endif
setLatencySamples (effect->initialDelay);
@@ -910,10 +910,13 @@ public:
}
bool acceptsMidi() const override { return wantsMidiMessages; }
bool producesMidi() const override { return dispatch (effCanDo, 0, 0, (void*) "sendVstMidiEvent", 0) > 0; }
bool producesMidi() const override { return pluginCanDo ("sendVstMidiEvent") > 0; }
bool supportsMPE() const override { return pluginCanDo ("MPE") > 0; }
VstPlugCategory getVstCategory() const noexcept { return (VstPlugCategory) dispatch (effGetPlugCategory, 0, 0, 0, 0); }
int pluginCanDo (const char* text) const { return (int) dispatch (effCanDo, 0, 0, (void*) text, 0); }
//==============================================================================
void prepareToPlay (double rate, int samplesPerBlockExpected) override
{
@@ -930,8 +933,7 @@ public:
if (initialised)
{
wantsMidiMessages = wantsMidiMessages
|| (dispatch (effCanDo, 0, 0, (void*) "receiveVstMidiEvent", 0) > 0);
wantsMidiMessages = wantsMidiMessages || (pluginCanDo ("receiveVstMidiEvent") > 0);
if (wantsMidiMessages)
midiEventsToSend.ensureSize (256);


+ 3
- 0
libs/juce/source/modules/juce_audio_processors/processors/juce_AudioProcessor.h View File

@@ -465,6 +465,9 @@ public:
/** Returns true if the processor produces midi messages. */
virtual bool producesMidi() const = 0;
/** Returns true if the processor supports MPE. */
virtual bool supportsMPE() const { return false; }
//==============================================================================
/** This returns a critical section that will automatically be locked while the host
is calling the processBlock() method.


+ 3
- 1
libs/juce/source/modules/juce_core/native/juce_BasicNativeHeaders.h View File

@@ -114,7 +114,9 @@
#if JUCE_MINGW
#include <basetyps.h>
#include <sys/time.h>
#define alloca(x) __builtin_alloca((x))
#ifndef alloca
#define alloca __builtin_alloca
#endif
#else
#include <crtdbg.h>
#include <comutil.h>


+ 0
- 6
libs/juce/source/modules/juce_core/text/juce_CharPointer_UTF8.h View File

@@ -420,13 +420,7 @@ public:
/** Compares this string with another one. */
int compareIgnoreCase (const CharPointer_UTF8 other) const noexcept
{
#if JUCE_MINGW || (JUCE_WINDOWS && JUCE_CLANG)
return CharacterFunctions::compareIgnoreCase (*this, other);
#elif JUCE_WINDOWS
return stricmp (data, other.data);
#else
return strcasecmp (data, other.data);
#endif
}
/** Compares this string with another one, up to a specified number of characters. */


+ 37
- 22
libs/juce/source/modules/juce_core/text/juce_CharacterFunctions.h View File

@@ -391,18 +391,28 @@ public:
dest.writeNull();
}
/** Compares two characters. */
static inline int compare (juce_wchar char1, juce_wchar char2) noexcept
{
if (int diff = static_cast<int> (char1) - static_cast<int> (char2))
return diff < 0 ? -1 : 1;
return 0;
}
/** Compares two null-terminated character strings. */
template <typename CharPointerType1, typename CharPointerType2>
static int compare (CharPointerType1 s1, CharPointerType2 s2) noexcept
{
for (;;)
{
const int c1 = (int) s1.getAndAdvance();
const int c2 = (int) s2.getAndAdvance();
const int diff = c1 - c2;
const juce_wchar c1 = s1.getAndAdvance();
if (int diff = compare (c1, s2.getAndAdvance()))
return diff;
if (diff != 0) return diff < 0 ? -1 : 1;
if (c1 == 0) break;
if (c1 == 0)
break;
}
return 0;
@@ -414,31 +424,37 @@ public:
{
while (--maxChars >= 0)
{
const int c1 = (int) s1.getAndAdvance();
const int c2 = (int) s2.getAndAdvance();
const int diff = c1 - c2;
const juce_wchar c1 = s1.getAndAdvance();
if (diff != 0) return diff < 0 ? -1 : 1;
if (c1 == 0) break;
if (int diff = compare (c1, s2.getAndAdvance()))
return diff;
if (c1 == 0)
break;
}
return 0;
}
/** Compares two characters, using a case-independant match. */
static inline int compareIgnoreCase (juce_wchar char1, juce_wchar char2) noexcept
{
return char1 != char2 ? compare (toUpperCase (char1), toUpperCase (char2)) : 0;
}
/** Compares two null-terminated character strings, using a case-independant match. */
template <typename CharPointerType1, typename CharPointerType2>
static int compareIgnoreCase (CharPointerType1 s1, CharPointerType2 s2) noexcept
{
for (;;)
{
const int c1 = (int) s1.toUpperCase();
const int c2 = (int) s2.toUpperCase();
const int diff = c1 - c2;
const juce_wchar c1 = s1.getAndAdvance();
if (diff != 0) return diff < 0 ? -1 : 1;
if (c1 == 0) break;
if (int diff = compareIgnoreCase (c1, s2.getAndAdvance()))
return diff;
++s1; ++s2;
if (c1 == 0)
break;
}
return 0;
@@ -450,14 +466,13 @@ public:
{
while (--maxChars >= 0)
{
const int c1 = (int) s1.toUpperCase();
const int c2 = (int) s2.toUpperCase();
const int diff = c1 - c2;
const juce_wchar c1 = s1.getAndAdvance();
if (diff != 0) return diff < 0 ? -1 : 1;
if (c1 == 0) break;
if (int diff = compareIgnoreCase (c1, s2.getAndAdvance()))
return diff;
++s1; ++s2;
if (c1 == 0)
break;
}
return 0;


+ 1
- 2
libs/juce/source/modules/juce_gui_basics/menus/juce_PopupMenu.cpp View File

@@ -248,7 +248,7 @@ public:
setAlwaysOnTop (true);
setLookAndFeel (parent != nullptr ? &(parent->getLookAndFeel())
: menu.lookAndFeel);
: menu.lookAndFeel.get());
setOpaque (getLookAndFeel().findColour (PopupMenu::backgroundColourId).isOpaque()
|| ! Desktop::canUseSemiTransparentWindows());
@@ -1234,7 +1234,6 @@ private:
//==============================================================================
PopupMenu::PopupMenu()
: lookAndFeel (nullptr)
{
}


+ 1
- 1
libs/juce/source/modules/juce_gui_basics/menus/juce_PopupMenu.h View File

@@ -614,7 +614,7 @@ private:
friend class MenuBarComponent;
OwnedArray<Item> items;
LookAndFeel* lookAndFeel;
WeakReference<LookAndFeel> lookAndFeel;
Component* createWindow (const Options&, ApplicationCommandManager**) const;
int showWithOptionalCallback (const Options&, ModalComponentManager::Callback*, bool);


Loading…
Cancel
Save