Browse Source

Added a hasEditor() virtual method to the AudioProcessor class, which you'll need to implement in your plugins so that we can work around non-standard threading behaviour in Wavelab.

tags/2021-05-28
Julian Storer 15 years ago
parent
commit
9168728a7e
16 changed files with 1228 additions and 1323 deletions
  1. +2
    -1
      extras/audio plugins/demo/Source/PluginProcessor.h
  2. +39
    -83
      extras/audio plugins/wrapper/VST/juce_VST_Wrapper.cpp
  3. +728
    -789
      juce_amalgamated.cpp
  4. +17
    -2
      juce_amalgamated.h
  5. +2
    -2
      src/audio/audio_file_formats/juce_QuickTimeAudioFormat.cpp
  6. +1
    -0
      src/audio/devices/juce_AudioDeviceManager.cpp
  7. +6
    -0
      src/audio/plugins/formats/juce_AudioUnitPluginFormat.mm
  8. +1
    -1
      src/audio/plugins/formats/juce_VSTPluginFormat.cpp
  9. +3
    -0
      src/audio/processors/juce_AudioProcessor.cpp
  10. +9
    -1
      src/audio/processors/juce_AudioProcessor.h
  11. +2
    -4
      src/audio/processors/juce_AudioProcessorGraph.cpp
  12. +2
    -0
      src/audio/processors/juce_AudioProcessorGraph.h
  13. +1
    -1
      src/core/juce_StandardHeader.h
  14. +313
    -349
      src/native/mac/juce_mac_CoreMidi.cpp
  15. +97
    -90
      src/native/windows/juce_win32_WASAPI.cpp
  16. +5
    -0
      src/text/juce_String.h

+ 2
- 1
extras/audio plugins/demo/Source/PluginProcessor.h View File

@@ -32,10 +32,11 @@ public:
void processBlock (AudioSampleBuffer& buffer, MidiBuffer& midiMessages);
//==============================================================================
bool hasEditor() const { return true; }
AudioProcessorEditor* createEditor();
//==============================================================================
const String getName() const { return JucePlugin_Name; }
const String getName() const { return JucePlugin_Name; }
int getNumParameters();
float getParameter (int index);


+ 39
- 83
extras/audio plugins/wrapper/VST/juce_VST_Wrapper.cpp View File

@@ -352,34 +352,20 @@ public:
void open()
{
JUCE_AUTORELEASEPOOL
if (editorComp == 0)
{
checkWhetherWavelabHasChangedThread();
const MessageManagerLock mmLock;
AudioProcessorEditor* const ed = filter->createEditorIfNeeded();
if (ed != 0)
cEffect.flags |= effFlagsHasEditor;
else
cEffect.flags &= ~effFlagsHasEditor;
filter->editorBeingDeleted (ed);
delete ed;
}
startTimer (1000 / 4);
// Note: most hosts call this on the UI thread, but wavelab doesn't, so be careful in here.
if (filter->hasEditor())
cEffect.flags |= effFlagsHasEditor;
else
cEffect.flags &= ~effFlagsHasEditor;
}
void close()
{
JUCE_AUTORELEASEPOOL
const NonWavelabMMLock mmLock;
jassert (! recursionCheck);
// Note: most hosts call this on the UI thread, but wavelab doesn't, so be careful in here.
stopTimer();
deleteEditor (false);
if (MessageManager::getInstance()->isThisTheMessageThread())
deleteEditor (false);
}
//==============================================================================
@@ -439,8 +425,10 @@ public:
{
result = 1;
}
else if (strcmp (text, "openCloseAnyThread" == 0)
else if (strcmp (text, "openCloseAnyThread") == 0)
{
// This tells Wavelab to use the UI thread to invoke open/close,
// like all other hosts do.
result = -1;
}
@@ -770,48 +758,22 @@ public:
switch (ti->smpteFrameRate)
{
case kVstSmpte24fps:
rate = AudioPlayHead::fps24;
fps = 24.0;
break;
case kVstSmpte25fps:
rate = AudioPlayHead::fps25;
fps = 25.0;
break;
case kVstSmpte2997fps:
rate = AudioPlayHead::fps2997;
fps = 29.97;
break;
case kVstSmpte30fps:
rate = AudioPlayHead::fps30;
fps = 30.0;
break;
case kVstSmpte2997dfps:
rate = AudioPlayHead::fps2997drop;
fps = 29.97;
break;
case kVstSmpte30dfps:
rate = AudioPlayHead::fps30drop;
fps = 30.0;
break;
case kVstSmpteFilm16mm:
case kVstSmpteFilm35mm:
fps = 24.0;
break;
case kVstSmpte239fps: fps = 23.976; break;
case kVstSmpte249fps: fps = 24.976; break;
case kVstSmpte599fps: fps = 59.94; break;
case kVstSmpte60fps: fps = 60; break;
default:
jassertfalse // unknown frame-rate..
case kVstSmpte24fps: rate = AudioPlayHead::fps24; fps = 24.0; break;
case kVstSmpte25fps: rate = AudioPlayHead::fps25; fps = 25.0; break;
case kVstSmpte2997fps: rate = AudioPlayHead::fps2997; fps = 29.97; break;
case kVstSmpte30fps: rate = AudioPlayHead::fps30; fps = 30.0; break;
case kVstSmpte2997dfps: rate = AudioPlayHead::fps2997drop; fps = 29.97; break;
case kVstSmpte30dfps: rate = AudioPlayHead::fps30drop; fps = 30.0; break;
case kVstSmpteFilm16mm:
case kVstSmpteFilm35mm: fps = 24.0; break;
case kVstSmpte239fps: fps = 23.976; break;
case kVstSmpte249fps: fps = 24.976; break;
case kVstSmpte599fps: fps = 59.94; break;
case kVstSmpte60fps: fps = 60; break;
default: jassertfalse; // unknown frame-rate..
}
info.frameRate = rate;
@@ -1068,6 +1030,7 @@ public:
{
recursionCheck = true;
JUCE_AUTORELEASEPOOL
juce_callAnyTimersSynchronously();
for (int i = ComponentPeer::getNumPeers(); --i >= 0;)
@@ -1105,6 +1068,7 @@ public:
void deleteEditor (bool canDeleteLaterIfModal)
{
JUCE_AUTORELEASEPOOL
PopupMenu::dismissAllActiveMenus();
jassert (! recursionCheck);
@@ -1161,10 +1125,12 @@ public:
}
else if (opCode == effEditOpen)
{
checkWhetherWavelabHasChangedThread();
checkWhetherMessageThreadIsCorrect();
const MessageManagerLock mmLock;
jassert (! recursionCheck);
startTimer (1000 / 4); // performs misc housekeeping chores
deleteEditor (true);
createEditorComp();
@@ -1191,14 +1157,14 @@ public:
}
else if (opCode == effEditClose)
{
checkWhetherWavelabHasChangedThread();
checkWhetherMessageThreadIsCorrect();
const MessageManagerLock mmLock;
deleteEditor (true);
return 0;
}
else if (opCode == effEditGetRect)
{
checkWhetherWavelabHasChangedThread();
checkWhetherMessageThreadIsCorrect();
const MessageManagerLock mmLock;
createEditorComp();
@@ -1436,18 +1402,9 @@ private:
bool shouldDeleteEditor;
//==============================================================================
#if JUCE_WINDOWS // Workarounds for Wavelab's happy-go-lucky use of threads.
class NonWavelabMMLock
{
public:
NonWavelabMMLock() : mm (getHostType().isWavelab() || getHostType().isCubaseBridged() ? 0 : new MessageManagerLock()) {}
~NonWavelabMMLock() {}
private:
ScopedPointer <MessageManagerLock> mm;
};
static void checkWhetherWavelabHasChangedThread()
#if JUCE_WINDOWS
// Workarounds for Wavelab's happy-go-lucky use of threads.
static void checkWhetherMessageThreadIsCorrect()
{
if (getHostType().isWavelab() || getHostType().isCubaseBridged())
{
@@ -1476,8 +1433,7 @@ private:
}
}
#else
typedef MessageManagerLock NonWavelabMMLock;
static void checkWhetherWavelabHasChangedThread() {}
static void checkWhetherMessageThreadIsCorrect() {}
#endif
//==============================================================================


+ 728
- 789
juce_amalgamated.cpp
File diff suppressed because it is too large
View File


+ 17
- 2
juce_amalgamated.h View File

@@ -64,7 +64,7 @@
*/
#define JUCE_MAJOR_VERSION 1
#define JUCE_MINOR_VERSION 52
#define JUCE_BUILDNUMBER 72
#define JUCE_BUILDNUMBER 73

/** Current Juce version number.

@@ -2594,6 +2594,11 @@ private:

void createInternal (const juce_wchar* text, size_t numChars);
void appendInternal (const juce_wchar* text, int numExtraChars);

// This private cast operator should prevent strings being accidentally cast
// to bools (this is possible because the compiler can add an implicit cast
// via a const char*)
operator bool() const throw() { return false; }
};

/** Concatenates two strings. */
@@ -39875,7 +39880,8 @@ public:
a generic UI that lets the user twiddle the parameters directly.

If you do want to pass back a component, the component should be created and set to
the correct size before returning it.
the correct size before returning it. If you implement this method, you must
also implement the hasEditor() method and make it return true.

Remember not to do anything silly like allowing your filter to keep a pointer to
the component that gets created - it could be deleted later without any warning, which
@@ -39892,9 +39898,16 @@ public:
not open one at all. Your filter mustn't rely on it being there.
- An editor object may be deleted and a replacement one created again at any time.
- It's safe to assume that an editor will be deleted before its filter.

@see hasEditor
*/
virtual AudioProcessorEditor* createEditor() = 0;

/** Your filter must override this and return true if it can create an editor component.
@see createEditor
*/
virtual bool hasEditor() const = 0;

/** Returns the active editor, if there is one.

Bear in mind this can return 0, even if an editor has previously been
@@ -42011,6 +42024,7 @@ public:
bool acceptsMidi() const;
bool producesMidi() const;

bool hasEditor() const;
AudioProcessorEditor* createEditor();

int getNumParameters();
@@ -42057,6 +42071,7 @@ public:
bool acceptsMidi() const;
bool producesMidi() const;

bool hasEditor() const { return false; }
AudioProcessorEditor* createEditor() { return 0; }

int getNumParameters() { return 0; }


+ 2
- 2
src/audio/audio_file_formats/juce_QuickTimeAudioFormat.cpp View File

@@ -43,7 +43,7 @@
/* If you've got an include error here, you probably need to install the QuickTime SDK and
add its header directory to your include path.
Alternatively, if you don't need any QuickTime services, just turn off the JUC_QUICKTIME
Alternatively, if you don't need any QuickTime services, just turn off the JUCE_QUICKTIME
flag in juce_Config.h
*/
#include <Movies.h>
@@ -70,7 +70,7 @@ BEGIN_JUCE_NAMESPACE
bool juce_OpenQuickTimeMovieFromStream (InputStream* input, Movie& movie, Handle& dataHandle);
static const char* const quickTimeFormatName = "QuickTime file";
static const char* const quickTimeExtensions[] = { ".mov", ".mp3", ".mp4", 0 };
static const char* const quickTimeExtensions[] = { ".mov", ".mp3", ".mp4", ".m4a", 0 };
//==============================================================================
class QTAudioReader : public AudioFormatReader


+ 1
- 0
src/audio/devices/juce_AudioDeviceManager.cpp View File

@@ -31,6 +31,7 @@ BEGIN_JUCE_NAMESPACE
#include "../../gui/components/juce_Desktop.h"
#include "../../text/juce_LocalisedStrings.h"
#include "../dsp/juce_AudioSampleBuffer.h"
#include "../../core/juce_SystemStats.h"
//==============================================================================


+ 6
- 0
src/audio/plugins/formats/juce_AudioUnitPluginFormat.mm View File

@@ -231,6 +231,7 @@ public:
void processBlock (AudioSampleBuffer& buffer,
MidiBuffer& midiMessages);
bool hasEditor() const;
AudioProcessorEditor* createEditor();
const String getInputChannelName (int index) const;
@@ -1087,6 +1088,11 @@ private:
#endif
//==============================================================================
bool AudioUnitPluginInstance::hasEditor() const
{
return true;
}
AudioProcessorEditor* AudioUnitPluginInstance::createEditor()
{
ScopedPointer<AudioProcessorEditor> w (new AudioUnitPluginWindowCocoa (*this, false));


+ 1
- 1
src/audio/plugins/formats/juce_VSTPluginFormat.cpp View File

@@ -729,6 +729,7 @@ public:
void processBlock (AudioSampleBuffer& buffer,
MidiBuffer& midiMessages);
bool hasEditor() const { return effect != 0 && (effect->flags & effFlagsHasEditor) != 0; }
AudioProcessorEditor* createEditor();
const String getInputChannelName (int index) const;
@@ -805,7 +806,6 @@ private:
const String getVersion() const;
const String getCategory() const;
bool hasEditor() const throw() { return effect != 0 && (effect->flags & effFlagsHasEditor) != 0; }
void setPower (const bool on);
VSTPluginInstance (const ReferenceCountedObjectPtr <ModuleHandle>& module);


+ 3
- 0
src/audio/processors/juce_AudioProcessor.cpp View File

@@ -231,6 +231,9 @@ AudioProcessorEditor* AudioProcessor::createEditorIfNeeded()
AudioProcessorEditor* const ed = createEditor();
// You must make your hasEditor() method return a consistent result!
jassert (hasEditor() == (ed != 0));
if (ed != 0)
{
// you must give your editor comp a size before returning it..


+ 9
- 1
src/audio/processors/juce_AudioProcessor.h View File

@@ -318,7 +318,8 @@ public:
a generic UI that lets the user twiddle the parameters directly.
If you do want to pass back a component, the component should be created and set to
the correct size before returning it.
the correct size before returning it. If you implement this method, you must
also implement the hasEditor() method and make it return true.
Remember not to do anything silly like allowing your filter to keep a pointer to
the component that gets created - it could be deleted later without any warning, which
@@ -335,9 +336,16 @@ public:
not open one at all. Your filter mustn't rely on it being there.
- An editor object may be deleted and a replacement one created again at any time.
- It's safe to assume that an editor will be deleted before its filter.
@see hasEditor
*/
virtual AudioProcessorEditor* createEditor() = 0;
/** Your filter must override this and return true if it can create an editor component.
@see createEditor
*/
virtual bool hasEditor() const = 0;
//==============================================================================
/** Returns the active editor, if there is one.


+ 2
- 4
src/audio/processors/juce_AudioProcessorGraph.cpp View File

@@ -1276,10 +1276,8 @@ bool AudioProcessorGraph::AudioGraphIOProcessor::isOutput() const
return type == audioOutputNode || type == midiOutputNode;
}
AudioProcessorEditor* AudioProcessorGraph::AudioGraphIOProcessor::createEditor()
{
return 0;
}
bool AudioProcessorGraph::AudioGraphIOProcessor::hasEditor() const { return false; }
AudioProcessorEditor* AudioProcessorGraph::AudioGraphIOProcessor::createEditor() { return 0; }
int AudioProcessorGraph::AudioGraphIOProcessor::getNumParameters() { return 0; }
const String AudioProcessorGraph::AudioGraphIOProcessor::getParameterName (int) { return String::empty; }


+ 2
- 0
src/audio/processors/juce_AudioProcessorGraph.h View File

@@ -335,6 +335,7 @@ public:
bool acceptsMidi() const;
bool producesMidi() const;
bool hasEditor() const;
AudioProcessorEditor* createEditor();
int getNumParameters();
@@ -382,6 +383,7 @@ public:
bool acceptsMidi() const;
bool producesMidi() const;
bool hasEditor() const { return false; }
AudioProcessorEditor* createEditor() { return 0; }
int getNumParameters() { return 0; }


+ 1
- 1
src/core/juce_StandardHeader.h View File

@@ -33,7 +33,7 @@
*/
#define JUCE_MAJOR_VERSION 1
#define JUCE_MINOR_VERSION 52
#define JUCE_BUILDNUMBER 72
#define JUCE_BUILDNUMBER 73
/** Current Juce version number.


+ 313
- 349
src/native/mac/juce_mac_CoreMidi.cpp View File

@@ -30,153 +30,315 @@
#if JUCE_MAC
//==============================================================================
#undef log
#define log(a) Logger::writeToLog(a)
static bool logAnyErrorsMidi (const OSStatus err, const int lineNum)
namespace CoreMidiHelpers
{
if (err == noErr)
return true;
static bool logError (const OSStatus err, const int lineNum)
{
if (err == noErr)
return true;
log ("CoreMidi error: " + String (lineNum) + " - " + String::toHexString ((int) err));
jassertfalse;
return false;
}
Logger::writeToLog ("CoreMidi error: " + String (lineNum) + " - " + String::toHexString ((int) err));
jassertfalse;
return false;
}
#undef OK
#define OK(a) logAnyErrorsMidi(a, __LINE__)
#undef CHECK_ERROR
#define CHECK_ERROR(a) CoreMidiHelpers::logError (a, __LINE__)
//==============================================================================
static const String getEndpointName (MIDIEndpointRef endpoint, bool isExternal)
{
String result;
CFStringRef str = 0;
//==============================================================================
static const String getEndpointName (MIDIEndpointRef endpoint, bool isExternal)
{
String result;
CFStringRef str = 0;
MIDIObjectGetStringProperty (endpoint, kMIDIPropertyName, &str);
MIDIObjectGetStringProperty (endpoint, kMIDIPropertyName, &str);
if (str != 0)
{
result = PlatformUtilities::cfStringToJuceString (str);
CFRelease (str);
str = 0;
}
if (str != 0)
{
result = PlatformUtilities::cfStringToJuceString (str);
CFRelease (str);
str = 0;
}
MIDIEntityRef entity = 0;
MIDIEndpointGetEntity (endpoint, &entity);
MIDIEntityRef entity = 0;
MIDIEndpointGetEntity (endpoint, &entity);
if (entity == 0)
return result; // probably virtual
if (entity == 0)
return result; // probably virtual
if (result.isEmpty())
{
// endpoint name has zero length - try the entity
MIDIObjectGetStringProperty (entity, kMIDIPropertyName, &str);
if (result.isEmpty())
{
// endpoint name has zero length - try the entity
MIDIObjectGetStringProperty (entity, kMIDIPropertyName, &str);
if (str != 0)
{
result += PlatformUtilities::cfStringToJuceString (str);
CFRelease (str);
str = 0;
}
}
// now consider the device's name
MIDIDeviceRef device = 0;
MIDIEntityGetDevice (entity, &device);
if (device == 0)
return result;
MIDIObjectGetStringProperty (device, kMIDIPropertyName, &str);
if (str != 0)
{
result += PlatformUtilities::cfStringToJuceString (str);
const String s (PlatformUtilities::cfStringToJuceString (str));
CFRelease (str);
str = 0;
// if an external device has only one entity, throw away
// the endpoint name and just use the device name
if (isExternal && MIDIDeviceGetNumberOfEntities (device) < 2)
{
result = s;
}
else if (! result.startsWithIgnoreCase (s))
{
// prepend the device name to the entity name
result = (s + " " + result).trimEnd();
}
}
}
// now consider the device's name
MIDIDeviceRef device = 0;
MIDIEntityGetDevice (entity, &device);
if (device == 0)
return result;
}
MIDIObjectGetStringProperty (device, kMIDIPropertyName, &str);
if (str != 0)
static const String getConnectedEndpointName (MIDIEndpointRef endpoint)
{
const String s (PlatformUtilities::cfStringToJuceString (str));
CFRelease (str);
String result;
// Does the endpoint have connections?
CFDataRef connections = 0;
int numConnections = 0;
MIDIObjectGetDataProperty (endpoint, kMIDIPropertyConnectionUniqueID, &connections);
// if an external device has only one entity, throw away
// the endpoint name and just use the device name
if (isExternal && MIDIDeviceGetNumberOfEntities (device) < 2)
if (connections != 0)
{
result = s;
numConnections = (int) (CFDataGetLength (connections) / sizeof (MIDIUniqueID));
if (numConnections > 0)
{
const SInt32* pid = reinterpret_cast <const SInt32*> (CFDataGetBytePtr (connections));
for (int i = 0; i < numConnections; ++i, ++pid)
{
MIDIUniqueID uid = EndianS32_BtoN (*pid);
MIDIObjectRef connObject;
MIDIObjectType connObjectType;
OSStatus err = MIDIObjectFindByUniqueID (uid, &connObject, &connObjectType);
if (err == noErr)
{
String s;
if (connObjectType == kMIDIObjectType_ExternalSource
|| connObjectType == kMIDIObjectType_ExternalDestination)
{
// Connected to an external device's endpoint (10.3 and later).
s = getEndpointName (static_cast <MIDIEndpointRef> (connObject), true);
}
else
{
// Connected to an external device (10.2) (or something else, catch-all)
CFStringRef str = 0;
MIDIObjectGetStringProperty (connObject, kMIDIPropertyName, &str);
if (str != 0)
{
s = PlatformUtilities::cfStringToJuceString (str);
CFRelease (str);
}
}
if (s.isNotEmpty())
{
if (result.isNotEmpty())
result += ", ";
result += s;
}
}
}
}
CFRelease (connections);
}
else if (! result.startsWithIgnoreCase (s))
if (result.isNotEmpty())
return result;
// Here, either the endpoint had no connections, or we failed to obtain names for any of them.
return getEndpointName (endpoint, false);
}
static MIDIClientRef getGlobalMidiClient()
{
static MIDIClientRef globalMidiClient = 0;
if (globalMidiClient == 0)
{
// prepend the device name to the entity name
result = (s + " " + result).trimEnd();
String name ("JUCE");
if (JUCEApplication::getInstance() != 0)
name = JUCEApplication::getInstance()->getApplicationName();
CFStringRef appName = PlatformUtilities::juceStringToCFString (name);
CHECK_ERROR (MIDIClientCreate (appName, 0, 0, &globalMidiClient));
CFRelease (appName);
}
return globalMidiClient;
}
return result;
}
//==============================================================================
class MidiPortAndEndpoint
{
public:
MidiPortAndEndpoint (MIDIPortRef port_, MIDIEndpointRef endPoint_)
: port (port_), endPoint (endPoint_)
{
}
static const String getConnectedEndpointName (MIDIEndpointRef endpoint)
{
String result;
~MidiPortAndEndpoint()
{
if (port != 0)
MIDIPortDispose (port);
if (port == 0 && endPoint != 0) // if port == 0, it means we created the endpoint, so it's safe to delete it
MIDIEndpointDispose (endPoint);
}
MIDIPortRef port;
MIDIEndpointRef endPoint;
};
//==============================================================================
class MidiPortAndCallback
{
public:
MidiInput* input;
MidiPortAndEndpoint* portAndEndpoint;
MidiInputCallback* callback;
MemoryBlock pendingData;
int pendingBytes;
double pendingDataTime;
bool active;
void processSysex (const uint8*& d, int& size, const double time)
{
if (*d == 0xf0)
{
pendingBytes = 0;
pendingDataTime = time;
}
pendingData.ensureSize (pendingBytes + size, false);
uint8* totalMessage = (uint8*) pendingData.getData();
uint8* dest = totalMessage + pendingBytes;
while (size > 0)
{
if (pendingBytes > 0 && *d >= 0x80)
{
if (*d >= 0xfa || *d == 0xf8)
{
callback->handleIncomingMidiMessage (input, MidiMessage (*d, time));
++d;
--size;
}
else
{
if (*d == 0xf7)
{
*dest++ = *d++;
pendingBytes++;
--size;
}
break;
}
}
else
{
*dest++ = *d++;
pendingBytes++;
--size;
}
}
// Does the endpoint have connections?
CFDataRef connections = 0;
int numConnections = 0;
if (totalMessage [pendingBytes - 1] == 0xf7)
{
callback->handleIncomingMidiMessage (input, MidiMessage (totalMessage, pendingBytes, pendingDataTime));
pendingBytes = 0;
}
else
{
callback->handlePartialSysexMessage (input, totalMessage, pendingBytes, pendingDataTime);
}
}
};
MIDIObjectGetDataProperty (endpoint, kMIDIPropertyConnectionUniqueID, &connections);
static CriticalSection callbackLock;
static Array<void*> activeCallbacks;
if (connections != 0)
static void midiInputProc (const MIDIPacketList* pktlist,
void* readProcRefCon,
void* /*srcConnRefCon*/)
{
numConnections = (int) (CFDataGetLength (connections) / sizeof (MIDIUniqueID));
double time = Time::getMillisecondCounterHiRes() * 0.001;
const double originalTime = time;
if (numConnections > 0)
MidiPortAndCallback* const mpc = (MidiPortAndCallback*) readProcRefCon;
const ScopedLock sl (CoreMidiHelpers::callbackLock);
if (CoreMidiHelpers::activeCallbacks.contains (mpc) && mpc->active)
{
const SInt32* pid = reinterpret_cast <const SInt32*> (CFDataGetBytePtr (connections));
const MIDIPacket* packet = &pktlist->packet[0];
for (int i = 0; i < numConnections; ++i, ++pid)
for (unsigned int i = 0; i < pktlist->numPackets; ++i)
{
MIDIUniqueID uid = EndianS32_BtoN (*pid);
MIDIObjectRef connObject;
MIDIObjectType connObjectType;
OSStatus err = MIDIObjectFindByUniqueID (uid, &connObject, &connObjectType);
const uint8* d = (const uint8*) (packet->data);
int size = packet->length;
if (err == noErr)
while (size > 0)
{
String s;
time = originalTime;
if (connObjectType == kMIDIObjectType_ExternalSource
|| connObjectType == kMIDIObjectType_ExternalDestination)
if (mpc->pendingBytes > 0 || d[0] == 0xf0)
{
// Connected to an external device's endpoint (10.3 and later).
s = getEndpointName (static_cast <MIDIEndpointRef> (connObject), true);
mpc->processSysex (d, size, time);
}
else
{
// Connected to an external device (10.2) (or something else, catch-all)
CFStringRef str = 0;
MIDIObjectGetStringProperty (connObject, kMIDIPropertyName, &str);
int used = 0;
const MidiMessage m (d, size, used, 0, time);
if (str != 0)
if (used <= 0)
{
s = PlatformUtilities::cfStringToJuceString (str);
CFRelease (str);
jassertfalse; // malformed midi message
break;
}
else
{
mpc->callback->handleIncomingMidiMessage (mpc->input, m);
}
}
if (s.isNotEmpty())
{
if (result.isNotEmpty())
result += ", ";
result += s;
size -= used;
d += used;
}
}
packet = MIDIPacketNext (packet);
}
}
CFRelease (connections);
}
if (result.isNotEmpty())
return result;
// Here, either the endpoint had no connections, or we failed to obtain names for any of them.
return getEndpointName (endpoint, false);
}
//==============================================================================
@@ -191,7 +353,7 @@ const StringArray MidiOutput::getDevices()
if (dest != 0)
{
String name (getConnectedEndpointName (dest));
String name (CoreMidiHelpers::getConnectedEndpointName (dest));
if (name.isEmpty())
name = "<error>";
@@ -212,48 +374,6 @@ int MidiOutput::getDefaultDeviceIndex()
return 0;
}
static MIDIClientRef globalMidiClient;
static bool hasGlobalClientBeenCreated = false;
static bool makeSureClientExists()
{
if (! hasGlobalClientBeenCreated)
{
String name ("JUCE");
if (JUCEApplication::getInstance() != 0)
name = JUCEApplication::getInstance()->getApplicationName();
CFStringRef appName = PlatformUtilities::juceStringToCFString (name);
hasGlobalClientBeenCreated = OK (MIDIClientCreate (appName, 0, 0, &globalMidiClient));
CFRelease (appName);
}
return hasGlobalClientBeenCreated;
}
class MidiPortAndEndpoint
{
public:
MidiPortAndEndpoint (MIDIPortRef port_, MIDIEndpointRef endPoint_)
: port (port_), endPoint (endPoint_)
{
}
~MidiPortAndEndpoint()
{
if (port != 0)
MIDIPortDispose (port);
if (port == 0 && endPoint != 0) // if port == 0, it means we created the endpoint, so it's safe to delete it
MIDIEndpointDispose (endPoint);
}
MIDIPortRef port;
MIDIEndpointRef endPoint;
};
MidiOutput* MidiOutput::openDevice (int index)
{
MidiOutput* mo = 0;
@@ -263,19 +383,15 @@ MidiOutput* MidiOutput::openDevice (int index)
MIDIEndpointRef endPoint = MIDIGetDestination (index);
CFStringRef pname;
if (OK (MIDIObjectGetStringProperty (endPoint, kMIDIPropertyName, &pname)))
if (CHECK_ERROR (MIDIObjectGetStringProperty (endPoint, kMIDIPropertyName, &pname)))
{
log ("CoreMidi - opening out: " + PlatformUtilities::cfStringToJuceString (pname));
MIDIClientRef client = CoreMidiHelpers::getGlobalMidiClient();
MIDIPortRef port;
if (makeSureClientExists())
if (client != 0 && CHECK_ERROR (MIDIOutputPortCreate (client, pname, &port)))
{
MIDIPortRef port;
if (OK (MIDIOutputPortCreate (globalMidiClient, pname, &port)))
{
mo = new MidiOutput();
mo->internal = new MidiPortAndEndpoint (port, endPoint);
}
mo = new MidiOutput();
mo->internal = new CoreMidiHelpers::MidiPortAndEndpoint (port, endPoint);
}
CFRelease (pname);
@@ -288,27 +404,24 @@ MidiOutput* MidiOutput::openDevice (int index)
MidiOutput* MidiOutput::createNewDevice (const String& deviceName)
{
MidiOutput* mo = 0;
MIDIClientRef client = CoreMidiHelpers::getGlobalMidiClient();
if (makeSureClientExists())
{
MIDIEndpointRef endPoint;
CFStringRef name = PlatformUtilities::juceStringToCFString (deviceName);
if (OK (MIDISourceCreate (globalMidiClient, name, &endPoint)))
{
mo = new MidiOutput();
mo->internal = new MidiPortAndEndpoint (0, endPoint);
}
MIDIEndpointRef endPoint;
CFStringRef name = PlatformUtilities::juceStringToCFString (deviceName);
CFRelease (name);
if (client != 0 && CHECK_ERROR (MIDISourceCreate (client, name, &endPoint)))
{
mo = new MidiOutput();
mo->internal = new CoreMidiHelpers::MidiPortAndEndpoint (0, endPoint);
}
CFRelease (name);
return mo;
}
MidiOutput::~MidiOutput()
{
delete static_cast<MidiPortAndEndpoint*> (internal);
delete static_cast<CoreMidiHelpers::MidiPortAndEndpoint*> (internal);
}
void MidiOutput::reset()
@@ -326,7 +439,7 @@ void MidiOutput::setVolume (float /*leftVol*/, float /*rightVol*/)
void MidiOutput::sendMessageNow (const MidiMessage& message)
{
MidiPortAndEndpoint* const mpe = static_cast<MidiPortAndEndpoint*> (internal);
CoreMidiHelpers::MidiPortAndEndpoint* const mpe = static_cast<CoreMidiHelpers::MidiPortAndEndpoint*> (internal);
if (message.isSysEx())
{
@@ -381,7 +494,7 @@ const StringArray MidiInput::getDevices()
if (source != 0)
{
String name (getConnectedEndpointName (source));
String name (CoreMidiHelpers::getConnectedEndpointName (source));
if (name.isEmpty())
name = "<error>";
@@ -402,132 +515,9 @@ int MidiInput::getDefaultDeviceIndex()
return 0;
}
//==============================================================================
struct MidiPortAndCallback
{
MidiInput* input;
MidiPortAndEndpoint* portAndEndpoint;
MidiInputCallback* callback;
MemoryBlock pendingData;
int pendingBytes;
double pendingDataTime;
bool active;
void processSysex (const uint8*& d, int& size, const double time)
{
if (*d == 0xf0)
{
pendingBytes = 0;
pendingDataTime = time;
}
pendingData.ensureSize (pendingBytes + size, false);
uint8* totalMessage = (uint8*) pendingData.getData();
uint8* dest = totalMessage + pendingBytes;
while (size > 0)
{
if (pendingBytes > 0 && *d >= 0x80)
{
if (*d >= 0xfa || *d == 0xf8)
{
callback->handleIncomingMidiMessage (input, MidiMessage (*d, time));
++d;
--size;
}
else
{
if (*d == 0xf7)
{
*dest++ = *d++;
pendingBytes++;
--size;
}
break;
}
}
else
{
*dest++ = *d++;
pendingBytes++;
--size;
}
}
if (totalMessage [pendingBytes - 1] == 0xf7)
{
callback->handleIncomingMidiMessage (input, MidiMessage (totalMessage, pendingBytes, pendingDataTime));
pendingBytes = 0;
}
else
{
callback->handlePartialSysexMessage (input, totalMessage, pendingBytes, pendingDataTime);
}
}
};
namespace CoreMidiCallbacks
{
static CriticalSection callbackLock;
static Array<void*> activeCallbacks;
}
static void midiInputProc (const MIDIPacketList* pktlist,
void* readProcRefCon,
void* /*srcConnRefCon*/)
{
double time = Time::getMillisecondCounterHiRes() * 0.001;
const double originalTime = time;
MidiPortAndCallback* const mpc = (MidiPortAndCallback*) readProcRefCon;
const ScopedLock sl (CoreMidiCallbacks::callbackLock);
if (CoreMidiCallbacks::activeCallbacks.contains (mpc) && mpc->active)
{
const MIDIPacket* packet = &pktlist->packet[0];
for (unsigned int i = 0; i < pktlist->numPackets; ++i)
{
const uint8* d = (const uint8*) (packet->data);
int size = packet->length;
while (size > 0)
{
time = originalTime;
if (mpc->pendingBytes > 0 || d[0] == 0xf0)
{
mpc->processSysex (d, size, time);
}
else
{
int used = 0;
const MidiMessage m (d, size, used, 0, time);
if (used <= 0)
{
jassertfalse; // malformed midi message
break;
}
else
{
mpc->callback->handleIncomingMidiMessage (mpc->input, m);
}
size -= used;
d += used;
}
}
packet = MIDIPacketNext (packet);
}
}
}
MidiInput* MidiInput::openDevice (int index, MidiInputCallback* callback)
{
using namespace CoreMidiHelpers;
MidiInput* mi = 0;
if (((unsigned int) index) < (unsigned int) MIDIGetNumberOfSources())
@@ -538,20 +528,20 @@ MidiInput* MidiInput::openDevice (int index, MidiInputCallback* callback)
{
CFStringRef pname;
if (OK (MIDIObjectGetStringProperty (endPoint, kMIDIPropertyName, &pname)))
if (CHECK_ERROR (MIDIObjectGetStringProperty (endPoint, kMIDIPropertyName, &pname)))
{
log ("CoreMidi - opening inp: " + PlatformUtilities::cfStringToJuceString (pname));
MIDIClientRef client = getGlobalMidiClient();
if (makeSureClientExists())
if (client != 0)
{
MIDIPortRef port;
ScopedPointer <MidiPortAndCallback> mpc (new MidiPortAndCallback());
mpc->active = false;
if (OK (MIDIInputPortCreate (globalMidiClient, pname, midiInputProc, mpc, &port)))
if (CHECK_ERROR (MIDIInputPortCreate (client, pname, midiInputProc, mpc, &port)))
{
if (OK (MIDIPortConnectSource (port, endPoint, 0)))
if (CHECK_ERROR (MIDIPortConnectSource (port, endPoint, 0)))
{
mpc->portAndEndpoint = new MidiPortAndEndpoint (port, endPoint);
mpc->callback = callback;
@@ -562,12 +552,12 @@ MidiInput* MidiInput::openDevice (int index, MidiInputCallback* callback)
mpc->input = mi;
mi->internal = mpc;
const ScopedLock sl (CoreMidiCallbacks::callbackLock);
CoreMidiCallbacks::activeCallbacks.add (mpc.release());
const ScopedLock sl (callbackLock);
activeCallbacks.add (mpc.release());
}
else
{
OK (MIDIPortDispose (port));
CHECK_ERROR (MIDIPortDispose (port));
}
}
}
@@ -582,16 +572,18 @@ MidiInput* MidiInput::openDevice (int index, MidiInputCallback* callback)
MidiInput* MidiInput::createNewDevice (const String& deviceName, MidiInputCallback* callback)
{
using namespace CoreMidiHelpers;
MidiInput* mi = 0;
MIDIClientRef client = getGlobalMidiClient();
if (makeSureClientExists())
if (client != 0)
{
ScopedPointer <MidiPortAndCallback> mpc (new MidiPortAndCallback());
mpc->active = false;
MIDIEndpointRef endPoint;
CFStringRef name = PlatformUtilities::juceStringToCFString(deviceName);
if (OK (MIDIDestinationCreate (globalMidiClient, name, midiInputProc, mpc, &endPoint)))
if (CHECK_ERROR (MIDIDestinationCreate (client, name, midiInputProc, mpc, &endPoint)))
{
mpc->portAndEndpoint = new MidiPortAndEndpoint (0, endPoint);
mpc->callback = callback;
@@ -602,8 +594,8 @@ MidiInput* MidiInput::createNewDevice (const String& deviceName, MidiInputCallba
mpc->input = mi;
mi->internal = mpc;
const ScopedLock sl (CoreMidiCallbacks::callbackLock);
CoreMidiCallbacks::activeCallbacks.add (mpc.release());
const ScopedLock sl (callbackLock);
activeCallbacks.add (mpc.release());
}
CFRelease (name);
@@ -619,16 +611,18 @@ MidiInput::MidiInput (const String& name_)
MidiInput::~MidiInput()
{
using namespace CoreMidiHelpers;
MidiPortAndCallback* const mpc = static_cast<MidiPortAndCallback*> (internal);
mpc->active = false;
{
const ScopedLock sl (CoreMidiCallbacks::callbackLock);
CoreMidiCallbacks::activeCallbacks.removeValue (mpc);
const ScopedLock sl (callbackLock);
activeCallbacks.removeValue (mpc);
}
if (mpc->portAndEndpoint->port != 0)
OK (MIDIPortDisconnectSource (mpc->portAndEndpoint->port, mpc->portAndEndpoint->endPoint));
CHECK_ERROR (MIDIPortDisconnectSource (mpc->portAndEndpoint->port, mpc->portAndEndpoint->endPoint));
delete mpc->portAndEndpoint;
delete mpc;
@@ -636,61 +630,31 @@ MidiInput::~MidiInput()
void MidiInput::start()
{
const ScopedLock sl (CoreMidiCallbacks::callbackLock);
static_cast<MidiPortAndCallback*> (internal)->active = true;
const ScopedLock sl (CoreMidiHelpers::callbackLock);
static_cast<CoreMidiHelpers::MidiPortAndCallback*> (internal)->active = true;
}
void MidiInput::stop()
{
const ScopedLock sl (CoreMidiCallbacks::callbackLock);
static_cast<MidiPortAndCallback*> (internal)->active = false;
}
#undef log
#else
MidiOutput::~MidiOutput()
{
}
void MidiOutput::reset()
{
}
bool MidiOutput::getVolume (float& /*leftVol*/, float& /*rightVol*/)
{
return false;
}
void MidiOutput::setVolume (float /*leftVol*/, float /*rightVol*/)
{
}
void MidiOutput::sendMessageNow (const MidiMessage& message)
{
}
const StringArray MidiOutput::getDevices()
{
return StringArray();
}
MidiOutput* MidiOutput::openDevice (int index)
{
return 0;
const ScopedLock sl (CoreMidiHelpers::callbackLock);
static_cast<CoreMidiHelpers::MidiPortAndCallback*> (internal)->active = false;
}
const StringArray MidiInput::getDevices()
{
return StringArray();
}
#undef CHECK_ERROR
MidiInput* MidiInput::openDevice (int index, MidiInputCallback* callback)
{
return 0;
}
//==============================================================================
#else // Stubs for iOS...
MidiOutput::~MidiOutput() {}
void MidiOutput::reset() {}
bool MidiOutput::getVolume (float& /*leftVol*/, float& /*rightVol*/) { return false; }
void MidiOutput::setVolume (float /*leftVol*/, float /*rightVol*/) {}
void MidiOutput::sendMessageNow (const MidiMessage& message) {}
const StringArray MidiOutput::getDevices() { return StringArray(); }
MidiOutput* MidiOutput::openDevice (int index) { return 0; }
const StringArray MidiInput::getDevices() { return StringArray(); }
MidiInput* MidiInput::openDevice (int index, MidiInputCallback* callback) { return 0; }
#endif


+ 97
- 90
src/native/windows/juce_win32_WASAPI.cpp View File

@@ -27,67 +27,74 @@
// compiled on its own).
#if JUCE_INCLUDED_FILE && JUCE_WASAPI
#ifndef WASAPI_ENABLE_LOGGING
#define WASAPI_ENABLE_LOGGING 1
#endif
//==============================================================================
#if 1
namespace WasapiClasses
{
const String getAudioErrorDesc (HRESULT hr)
static void logFailure (HRESULT hr)
{
const char* e = 0;
(void) hr;
switch (hr)
#if WASAPI_ENABLE_LOGGING
if (FAILED (hr))
{
case E_POINTER: e = "E_POINTER"; break;
case E_INVALIDARG: e = "E_INVALIDARG"; break;
case AUDCLNT_E_NOT_INITIALIZED: e = "AUDCLNT_E_NOT_INITIALIZED"; break;
case AUDCLNT_E_ALREADY_INITIALIZED: e = "AUDCLNT_E_ALREADY_INITIALIZED"; break;
case AUDCLNT_E_WRONG_ENDPOINT_TYPE: e = "AUDCLNT_E_WRONG_ENDPOINT_TYPE"; break;
case AUDCLNT_E_DEVICE_INVALIDATED: e = "AUDCLNT_E_DEVICE_INVALIDATED"; break;
case AUDCLNT_E_NOT_STOPPED: e = "AUDCLNT_E_NOT_STOPPED"; break;
case AUDCLNT_E_BUFFER_TOO_LARGE: e = "AUDCLNT_E_BUFFER_TOO_LARGE"; break;
case AUDCLNT_E_OUT_OF_ORDER: e = "AUDCLNT_E_OUT_OF_ORDER"; break;
case AUDCLNT_E_UNSUPPORTED_FORMAT: e = "AUDCLNT_E_UNSUPPORTED_FORMAT"; break;
case AUDCLNT_E_INVALID_SIZE: e = "AUDCLNT_E_INVALID_SIZE"; break;
case AUDCLNT_E_DEVICE_IN_USE: e = "AUDCLNT_E_DEVICE_IN_USE"; break;
case AUDCLNT_E_BUFFER_OPERATION_PENDING: e = "AUDCLNT_E_BUFFER_OPERATION_PENDING"; break;
case AUDCLNT_E_THREAD_NOT_REGISTERED: e = "AUDCLNT_E_THREAD_NOT_REGISTERED"; break;
case AUDCLNT_E_EXCLUSIVE_MODE_NOT_ALLOWED: e = "AUDCLNT_E_EXCLUSIVE_MODE_NOT_ALLOWED"; break;
case AUDCLNT_E_ENDPOINT_CREATE_FAILED: e = "AUDCLNT_E_ENDPOINT_CREATE_FAILED"; break;
case AUDCLNT_E_SERVICE_NOT_RUNNING: e = "AUDCLNT_E_SERVICE_NOT_RUNNING"; break;
case AUDCLNT_E_EVENTHANDLE_NOT_EXPECTED: e = "AUDCLNT_E_EVENTHANDLE_NOT_EXPECTED"; break;
case AUDCLNT_E_EXCLUSIVE_MODE_ONLY: e = "AUDCLNT_E_EXCLUSIVE_MODE_ONLY"; break;
case AUDCLNT_E_BUFDURATION_PERIOD_NOT_EQUAL: e = "AUDCLNT_E_BUFDURATION_PERIOD_NOT_EQUAL"; break;
case AUDCLNT_E_EVENTHANDLE_NOT_SET: e = "AUDCLNT_E_EVENTHANDLE_NOT_SET"; break;
case AUDCLNT_E_INCORRECT_BUFFER_SIZE: e = "AUDCLNT_E_INCORRECT_BUFFER_SIZE"; break;
case AUDCLNT_E_BUFFER_SIZE_ERROR: e = "AUDCLNT_E_BUFFER_SIZE_ERROR"; break;
case AUDCLNT_S_BUFFER_EMPTY: e = "AUDCLNT_S_BUFFER_EMPTY"; break;
case AUDCLNT_S_THREAD_ALREADY_REGISTERED: e = "AUDCLNT_S_THREAD_ALREADY_REGISTERED"; break;
default: return String::toHexString ((int) hr);
}
String e;
e << Time::getCurrentTime().toString (true, true, true, true)
<< " -- WASAPI error: ";
return e;
}
switch (hr)
{
case E_POINTER: e << "E_POINTER"; break;
case E_INVALIDARG: e << "E_INVALIDARG"; break;
case AUDCLNT_E_NOT_INITIALIZED: e << "AUDCLNT_E_NOT_INITIALIZED"; break;
case AUDCLNT_E_ALREADY_INITIALIZED: e << "AUDCLNT_E_ALREADY_INITIALIZED"; break;
case AUDCLNT_E_WRONG_ENDPOINT_TYPE: e << "AUDCLNT_E_WRONG_ENDPOINT_TYPE"; break;
case AUDCLNT_E_DEVICE_INVALIDATED: e << "AUDCLNT_E_DEVICE_INVALIDATED"; break;
case AUDCLNT_E_NOT_STOPPED: e << "AUDCLNT_E_NOT_STOPPED"; break;
case AUDCLNT_E_BUFFER_TOO_LARGE: e << "AUDCLNT_E_BUFFER_TOO_LARGE"; break;
case AUDCLNT_E_OUT_OF_ORDER: e << "AUDCLNT_E_OUT_OF_ORDER"; break;
case AUDCLNT_E_UNSUPPORTED_FORMAT: e << "AUDCLNT_E_UNSUPPORTED_FORMAT"; break;
case AUDCLNT_E_INVALID_SIZE: e << "AUDCLNT_E_INVALID_SIZE"; break;
case AUDCLNT_E_DEVICE_IN_USE: e << "AUDCLNT_E_DEVICE_IN_USE"; break;
case AUDCLNT_E_BUFFER_OPERATION_PENDING: e << "AUDCLNT_E_BUFFER_OPERATION_PENDING"; break;
case AUDCLNT_E_THREAD_NOT_REGISTERED: e << "AUDCLNT_E_THREAD_NOT_REGISTERED"; break;
case AUDCLNT_E_EXCLUSIVE_MODE_NOT_ALLOWED: e << "AUDCLNT_E_EXCLUSIVE_MODE_NOT_ALLOWED"; break;
case AUDCLNT_E_ENDPOINT_CREATE_FAILED: e << "AUDCLNT_E_ENDPOINT_CREATE_FAILED"; break;
case AUDCLNT_E_SERVICE_NOT_RUNNING: e << "AUDCLNT_E_SERVICE_NOT_RUNNING"; break;
case AUDCLNT_E_EVENTHANDLE_NOT_EXPECTED: e << "AUDCLNT_E_EVENTHANDLE_NOT_EXPECTED"; break;
case AUDCLNT_E_EXCLUSIVE_MODE_ONLY: e << "AUDCLNT_E_EXCLUSIVE_MODE_ONLY"; break;
case AUDCLNT_E_BUFDURATION_PERIOD_NOT_EQUAL: e << "AUDCLNT_E_BUFDURATION_PERIOD_NOT_EQUAL"; break;
case AUDCLNT_E_EVENTHANDLE_NOT_SET: e << "AUDCLNT_E_EVENTHANDLE_NOT_SET"; break;
case AUDCLNT_E_INCORRECT_BUFFER_SIZE: e << "AUDCLNT_E_INCORRECT_BUFFER_SIZE"; break;
case AUDCLNT_E_BUFFER_SIZE_ERROR: e << "AUDCLNT_E_BUFFER_SIZE_ERROR"; break;
case AUDCLNT_S_BUFFER_EMPTY: e << "AUDCLNT_S_BUFFER_EMPTY"; break;
case AUDCLNT_S_THREAD_ALREADY_REGISTERED: e << "AUDCLNT_S_THREAD_ALREADY_REGISTERED"; break;
default: e << String::toHexString ((int) hr); break;
}
#define logFailure(hr) { if (FAILED (hr)) { DBG ("WASAPI FAIL! " + getAudioErrorDesc (hr)); jassertfalse; } }
#define OK(a) wasapi_checkResult(a)
DBG (e);
jassertfalse;
}
#endif
}
static bool wasapi_checkResult (HRESULT hr)
static bool check (HRESULT hr)
{
logFailure (hr);
return SUCCEEDED (hr);
}
#else
#define logFailure(hr) {}
#define OK(a) SUCCEEDED(a)
#endif
//==============================================================================
static const String wasapi_getDeviceID (IMMDevice* const device)
static const String getDeviceID (IMMDevice* const device)
{
String s;
WCHAR* deviceId = 0;
if (OK (device->GetId (&deviceId)))
if (check (device->GetId (&deviceId)))
{
s = String (deviceId);
CoTaskMemFree (deviceId);
@@ -96,27 +103,28 @@ static const String wasapi_getDeviceID (IMMDevice* const device)
return s;
}
static EDataFlow wasapi_getDataFlow (IMMDevice* const device)
static EDataFlow getDataFlow (IMMDevice* const device)
{
EDataFlow flow = eRender;
ComSmartPtr <IMMEndpoint> endPoint;
if (OK (device->QueryInterface (__uuidof (IMMEndpoint), (void**) &endPoint)))
(void) OK (endPoint->GetDataFlow (&flow));
if (check (device->QueryInterface (__uuidof (IMMEndpoint), (void**) &endPoint)))
(void) check (endPoint->GetDataFlow (&flow));
return flow;
}
static int wasapi_refTimeToSamples (const REFERENCE_TIME& t, const double sampleRate) throw()
static int refTimeToSamples (const REFERENCE_TIME& t, const double sampleRate) throw()
{
return roundDoubleToInt (sampleRate * ((double) t) * 0.0000001);
}
static void wasapi_copyWavFormat (WAVEFORMATEXTENSIBLE& dest, const WAVEFORMATEX* const src) throw()
static void copyWavFormat (WAVEFORMATEXTENSIBLE& dest, const WAVEFORMATEX* const src) throw()
{
memcpy (&dest, src, src->wFormatTag == WAVE_FORMAT_EXTENSIBLE ? sizeof (WAVEFORMATEXTENSIBLE)
: sizeof (WAVEFORMATEX));
}
//==============================================================================
class WASAPIDeviceBase
{
@@ -139,21 +147,21 @@ public:
return;
REFERENCE_TIME defaultPeriod, minPeriod;
if (! OK (tempClient->GetDevicePeriod (&defaultPeriod, &minPeriod)))
if (! check (tempClient->GetDevicePeriod (&defaultPeriod, &minPeriod)))
return;
WAVEFORMATEX* mixFormat = 0;
if (! OK (tempClient->GetMixFormat (&mixFormat)))
if (! check (tempClient->GetMixFormat (&mixFormat)))
return;
WAVEFORMATEXTENSIBLE format;
wasapi_copyWavFormat (format, mixFormat);
copyWavFormat (format, mixFormat);
CoTaskMemFree (mixFormat);
actualNumChannels = numChannels = format.Format.nChannels;
defaultSampleRate = format.Format.nSamplesPerSec;
minBufferSize = wasapi_refTimeToSamples (minPeriod, defaultSampleRate);
defaultBufferSize = wasapi_refTimeToSamples (defaultPeriod, defaultSampleRate);
minBufferSize = refTimeToSamples (minPeriod, defaultSampleRate);
defaultBufferSize = refTimeToSamples (defaultPeriod, defaultSampleRate);
rates.addUsingDefaultSort (defaultSampleRate);
@@ -203,12 +211,12 @@ public:
channelMaps.add (i);
REFERENCE_TIME latency;
if (OK (client->GetStreamLatency (&latency)))
latencySamples = wasapi_refTimeToSamples (latency, sampleRate);
if (check (client->GetStreamLatency (&latency)))
latencySamples = refTimeToSamples (latency, sampleRate);
(void) OK (client->GetBufferSize (&actualBufferSize));
(void) check (client->GetBufferSize (&actualBufferSize));
return OK (client->SetEventHandle (clientEvent));
return check (client->SetEventHandle (clientEvent));
}
return false;
@@ -293,7 +301,7 @@ private:
if (hr == S_FALSE && format.Format.nSamplesPerSec == nearestFormat->Format.nSamplesPerSec)
{
wasapi_copyWavFormat (format, (WAVEFORMATEX*) nearestFormat);
copyWavFormat (format, (WAVEFORMATEX*) nearestFormat);
hr = S_OK;
}
@@ -301,13 +309,13 @@ private:
REFERENCE_TIME defaultPeriod = 0, minPeriod = 0;
if (useExclusiveMode)
OK (client->GetDevicePeriod (&defaultPeriod, &minPeriod));
check (client->GetDevicePeriod (&defaultPeriod, &minPeriod));
GUID session;
if (hr == S_OK
&& OK (client->Initialize (useExclusiveMode ? AUDCLNT_SHAREMODE_EXCLUSIVE : AUDCLNT_SHAREMODE_SHARED,
AUDCLNT_STREAMFLAGS_EVENTCALLBACK,
defaultPeriod, defaultPeriod, (WAVEFORMATEX*) &format, &session)))
&& check (client->Initialize (useExclusiveMode ? AUDCLNT_SHAREMODE_EXCLUSIVE : AUDCLNT_SHAREMODE_SHARED,
AUDCLNT_STREAMFLAGS_EVENTCALLBACK,
defaultPeriod, defaultPeriod, (WAVEFORMATEX*) &format, &session)))
{
actualNumChannels = format.Format.nChannels;
const bool isFloat = format.Format.wFormatTag == WAVE_FORMAT_EXTENSIBLE && format.SubFormat == KSDATAFORMAT_SUBTYPE_IEEE_FLOAT;
@@ -345,7 +353,7 @@ public:
reservoirCapacity = 16384;
reservoir.setSize (actualNumChannels * reservoirCapacity * sizeof (float));
return openClient (newSampleRate, newChannels)
&& (numChannels == 0 || OK (client->GetService (__uuidof (IAudioCaptureClient), (void**) &captureClient)));
&& (numChannels == 0 || check (client->GetService (__uuidof (IAudioCaptureClient), (void**) &captureClient)));
}
void close()
@@ -392,7 +400,7 @@ public:
else
{
UINT32 packetLength = 0;
if (! OK (captureClient->GetNextPacketSize (&packetLength)))
if (! check (captureClient->GetNextPacketSize (&packetLength)))
break;
if (packetLength == 0)
@@ -408,7 +416,7 @@ public:
UINT32 numSamplesAvailable;
DWORD flags;
if (OK (captureClient->GetBuffer (&inputData, &numSamplesAvailable, &flags, 0, 0)))
if (check (captureClient->GetBuffer (&inputData, &numSamplesAvailable, &flags, 0, 0)))
{
const int samplesToDo = jmin (bufferSize, (int) numSamplesAvailable);
@@ -458,7 +466,7 @@ public:
bool open (const double newSampleRate, const BigInteger& newChannels)
{
return openClient (newSampleRate, newChannels)
&& (numChannels == 0 || OK (client->GetService (__uuidof (IAudioRenderClient), (void**) &renderClient)));
&& (numChannels == 0 || check (client->GetService (__uuidof (IAudioRenderClient), (void**) &renderClient)));
}
void close()
@@ -491,7 +499,7 @@ public:
while (bufferSize > 0)
{
UINT32 padding = 0;
if (! OK (client->GetCurrentPadding (&padding)))
if (! check (client->GetCurrentPadding (&padding)))
return;
int samplesToDo = useExclusiveMode ? bufferSize
@@ -507,7 +515,7 @@ public:
}
uint8* outputData = 0;
if (OK (renderClient->GetBuffer (samplesToDo, &outputData)))
if (check (renderClient->GetBuffer (samplesToDo, &outputData)))
{
for (int i = 0; i < numSrcBuffers; ++i)
converter->convertSamples (outputData, channelMaps.getUnchecked(i), srcBuffers[i], offset, samplesToDo);
@@ -866,28 +874,28 @@ private:
bool createDevices()
{
ComSmartPtr <IMMDeviceEnumerator> enumerator;
if (! OK (enumerator.CoCreateInstance (__uuidof (MMDeviceEnumerator))))
if (! check (enumerator.CoCreateInstance (__uuidof (MMDeviceEnumerator))))
return false;
ComSmartPtr <IMMDeviceCollection> deviceCollection;
if (! OK (enumerator->EnumAudioEndpoints (eAll, DEVICE_STATE_ACTIVE, &deviceCollection)))
if (! check (enumerator->EnumAudioEndpoints (eAll, DEVICE_STATE_ACTIVE, &deviceCollection)))
return false;
UINT32 numDevices = 0;
if (! OK (deviceCollection->GetCount (&numDevices)))
if (! check (deviceCollection->GetCount (&numDevices)))
return false;
for (UINT32 i = 0; i < numDevices; ++i)
{
ComSmartPtr <IMMDevice> device;
if (! OK (deviceCollection->Item (i, &device)))
if (! check (deviceCollection->Item (i, &device)))
continue;
const String deviceId (wasapi_getDeviceID (device));
const String deviceId (getDeviceID (device));
if (deviceId.isEmpty())
continue;
const EDataFlow flow = wasapi_getDataFlow (device);
const EDataFlow flow = getDataFlow (device);
if (deviceId == inputDeviceId && flow == eCapture)
inputDevice = new WASAPIInputDevice (device, useExclusiveMode);
@@ -930,7 +938,7 @@ public:
inputDeviceIds.clear();
ComSmartPtr <IMMDeviceEnumerator> enumerator;
if (! OK (enumerator.CoCreateInstance (__uuidof (MMDeviceEnumerator))))
if (! check (enumerator.CoCreateInstance (__uuidof (MMDeviceEnumerator))))
return;
const String defaultRenderer = getDefaultEndpoint (enumerator, false);
@@ -939,20 +947,20 @@ public:
ComSmartPtr <IMMDeviceCollection> deviceCollection;
UINT32 numDevices = 0;
if (! (OK (enumerator->EnumAudioEndpoints (eAll, DEVICE_STATE_ACTIVE, &deviceCollection))
&& OK (deviceCollection->GetCount (&numDevices))))
if (! (check (enumerator->EnumAudioEndpoints (eAll, DEVICE_STATE_ACTIVE, &deviceCollection))
&& check (deviceCollection->GetCount (&numDevices))))
return;
for (UINT32 i = 0; i < numDevices; ++i)
{
ComSmartPtr <IMMDevice> device;
if (! OK (deviceCollection->Item (i, &device)))
if (! check (deviceCollection->Item (i, &device)))
continue;
const String deviceId (wasapi_getDeviceID (device));
const String deviceId (getDeviceID (device));
DWORD state = 0;
if (! OK (device->GetState (&state)))
if (! check (device->GetState (&state)))
continue;
if (state != DEVICE_STATE_ACTIVE)
@@ -962,18 +970,18 @@ public:
{
ComSmartPtr <IPropertyStore> properties;
if (! OK (device->OpenPropertyStore (STGM_READ, &properties)))
if (! check (device->OpenPropertyStore (STGM_READ, &properties)))
continue;
PROPVARIANT value;
PropVariantInit (&value);
if (OK (properties->GetValue (PKEY_Device_FriendlyName, &value)))
if (check (properties->GetValue (PKEY_Device_FriendlyName, &value)))
name = value.pwszVal;
PropVariantClear (&value);
}
const EDataFlow flow = wasapi_getDataFlow (device);
const EDataFlow flow = getDataFlow (device);
if (flow == eRender)
{
@@ -1057,11 +1065,11 @@ private:
{
String s;
IMMDevice* dev = 0;
if (OK (enumerator->GetDefaultAudioEndpoint (forCapture ? eCapture : eRender,
eMultimedia, &dev)))
if (check (enumerator->GetDefaultAudioEndpoint (forCapture ? eCapture : eRender,
eMultimedia, &dev)))
{
WCHAR* deviceId = 0;
if (OK (dev->GetId (&deviceId)))
if (check (dev->GetId (&deviceId)))
{
s = String (deviceId);
CoTaskMemFree (deviceId);
@@ -1078,13 +1086,12 @@ private:
WASAPIAudioIODeviceType& operator= (const WASAPIAudioIODeviceType&);
};
}
//==============================================================================
AudioIODeviceType* juce_createAudioIODeviceType_WASAPI()
{
return new WASAPIAudioIODeviceType();
return new WasapiClasses::WASAPIAudioIODeviceType();
}
#undef logFailure
#undef OK
#endif

+ 5
- 0
src/text/juce_String.h View File

@@ -1052,6 +1052,11 @@ private:
void createInternal (const juce_wchar* text, size_t numChars);
void appendInternal (const juce_wchar* text, int numExtraChars);
// This private cast operator should prevent strings being accidentally cast
// to bools (this is possible because the compiler can add an implicit cast
// via a const char*)
operator bool() const throw() { return false; }
};
//==============================================================================


Loading…
Cancel
Save