Browse Source

Removed Quicktime from the OSX build, and replaced the video player and camera implementations with AVFoundation

tags/2021-05-28
jules 8 years ago
parent
commit
74c96208fe
29 changed files with 2611 additions and 2158 deletions
  1. +7
    -10
      examples/Demo/Source/Demos/VideoDemo.cpp
  2. +0
    -389
      modules/juce_audio_formats/codecs/juce_QuickTimeAudioFormat.cpp
  3. +0
    -71
      modules/juce_audio_formats/codecs/juce_QuickTimeAudioFormat.h
  4. +2
    -36
      modules/juce_audio_formats/juce_audio_formats.cpp
  5. +0
    -1
      modules/juce_audio_formats/juce_audio_formats.h
  6. +778
    -0
      modules/juce_audio_plugin_client/AU/juce_AU_Shared.h
  7. +0
    -27
      modules/juce_audio_plugin_client/juce_audio_plugin_client_utils.cpp
  8. +3
    -4
      modules/juce_core/native/juce_mac_Files.mm
  9. +1
    -1
      modules/juce_core/native/juce_mac_Network.mm
  10. +10
    -0
      modules/juce_core/native/juce_osx_ObjCHelpers.h
  11. +0
    -4
      modules/juce_gui_basics/juce_gui_basics.cpp
  12. +1
    -1
      modules/juce_gui_basics/layout/juce_ComponentMovementWatcher.h
  13. +1
    -1
      modules/juce_gui_basics/native/juce_mac_FileChooser.mm
  14. +1
    -2
      modules/juce_gui_extra/misc/juce_RecentlyOpenedFilesList.cpp
  15. +17
    -10
      modules/juce_video/capture/juce_CameraDevice.cpp
  16. +9
    -61
      modules/juce_video/juce_video.cpp
  17. +7
    -40
      modules/juce_video/juce_video.h
  18. +0
    -0
      modules/juce_video/native/juce_android_CameraDevice.h
  19. +167
    -0
      modules/juce_video/native/juce_android_Video.h
  20. +276
    -0
      modules/juce_video/native/juce_mac_CameraDevice.h
  21. +0
    -353
      modules/juce_video/native/juce_mac_CameraDevice.mm
  22. +1
    -3
      modules/juce_video/native/juce_mac_MovieComponent.mm
  23. +185
    -0
      modules/juce_video/native/juce_mac_Video.h
  24. +1
    -3
      modules/juce_video/native/juce_win32_CameraDevice.h
  25. +0
    -928
      modules/juce_video/native/juce_win32_DirectShowComponent.cpp
  26. +896
    -0
      modules/juce_video/native/juce_win32_Video.h
  27. +0
    -213
      modules/juce_video/playback/juce_DirectShowComponent.h
  28. +116
    -0
      modules/juce_video/playback/juce_VideoComponent.cpp
  29. +132
    -0
      modules/juce_video/playback/juce_VideoComponent.h

+ 7
- 10
examples/Demo/Source/Demos/VideoDemo.cpp View File

@@ -63,9 +63,7 @@ public:
void resized() override
{
videoComp.setBoundsWithCorrectAspectRatio (Rectangle<int> (0, 0, getWidth(), getHeight() - 30),
Justification::centred);
fileChooser.setBounds (0, getHeight() - 24, getWidth(), 24);
videoComp.setBounds (getLocalBounds().reduced (10));
}
bool isInterestedInDragSource (const SourceDetails&) override { return true; }
@@ -90,11 +88,7 @@ public:
}
private:
#if JUCE_MAC
MovieComponent videoComp;
#elif JUCE_DIRECTSHOW
DirectShowComponent videoComp;
#endif
VideoComponent videoComp;
bool isDragOver;
FilenameComponent fileChooser;
@@ -102,7 +96,9 @@ private:
void filenameComponentChanged (FilenameComponent*) override
{
// this is called when the user changes the filename in the file chooser box
if (videoComp.loadMovie (fileChooser.getCurrentFile()))
auto result = videoComp.load (fileChooser.getCurrentFile());
if (result.wasOk())
{
// loaded the file ok, so let's start it playing..
@@ -112,7 +108,8 @@ private:
else
{
AlertWindow::showMessageBoxAsync (AlertWindow::WarningIcon,
"Couldn't load the file!", String());
"Couldn't load the file!",
result.getErrorMessage());
}
}


+ 0
- 389
modules/juce_audio_formats/codecs/juce_QuickTimeAudioFormat.cpp View File

@@ -1,389 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2017 - ROLI Ltd.
JUCE is an open source library subject to commercial or open-source
licensing.
By using JUCE, you agree to the terms of both the JUCE 5 End-User License
Agreement and JUCE 5 Privacy Policy (both updated and effective as of the
27th April 2017).
End User License Agreement: www.juce.com/juce-5-licence
Privacy Policy: www.juce.com/juce-5-privacy-policy
Or: You may also use this code under the terms of the GPL v3 (see
www.gnu.org/licenses).
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
#if JUCE_QUICKTIME && ! (JUCE_64BIT || JUCE_IOS)
} // (juce namespace)
#if ! JUCE_WINDOWS
#include <QuickTime/Movies.h>
#include <QuickTime/QTML.h>
#include <QuickTime/QuickTimeComponents.h>
#include <QuickTime/MediaHandlers.h>
#include <QuickTime/ImageCodec.h>
#else
#if JUCE_MSVC
#pragma warning (push)
#pragma warning (disable : 4100)
#endif
/* If you've got an include error here, you probably need to install the QuickTime SDK and
add its header directory to your include path.
Alternatively, if you don't need any QuickTime services, just set the JUCE_QUICKTIME flag to 0.
*/
#undef SIZE_MAX
#include <Movies.h>
#include <QTML.h>
#include <QuickTimeComponents.h>
#include <MediaHandlers.h>
#include <ImageCodec.h>
#undef SIZE_MAX
#if JUCE_MSVC
#pragma warning (pop)
#endif
#endif
namespace juce
{
bool juce_OpenQuickTimeMovieFromStream (InputStream* input, Movie& movie, Handle& dataHandle);
static const char* const quickTimeFormatName = "QuickTime file";
//==============================================================================
class QTAudioReader : public AudioFormatReader
{
public:
QTAudioReader (InputStream* const input_, const int trackNum_)
: AudioFormatReader (input_, quickTimeFormatName),
ok (false),
movie (0),
trackNum (trackNum_),
lastSampleRead (0),
lastThreadId (0),
extractor (0),
dataHandle (0)
{
JUCE_AUTORELEASEPOOL
{
bufferList.calloc (256, 1);
#if JUCE_WINDOWS
if (InitializeQTML (0) != noErr)
return;
#endif
if (EnterMovies() != noErr)
return;
bool opened = juce_OpenQuickTimeMovieFromStream (input_, movie, dataHandle);
if (! opened)
return;
{
const int numTracks = GetMovieTrackCount (movie);
int trackCount = 0;
for (int i = 1; i <= numTracks; ++i)
{
track = GetMovieIndTrack (movie, i);
media = GetTrackMedia (track);
OSType mediaType;
GetMediaHandlerDescription (media, &mediaType, 0, 0);
if (mediaType == SoundMediaType
&& trackCount++ == trackNum_)
{
ok = true;
break;
}
}
}
if (! ok)
return;
ok = false;
lengthInSamples = GetMediaDecodeDuration (media);
usesFloatingPointData = false;
samplesPerFrame = (int) (GetMediaDecodeDuration (media) / GetMediaSampleCount (media));
trackUnitsPerFrame = GetMovieTimeScale (movie) * samplesPerFrame
/ GetMediaTimeScale (media);
MovieAudioExtractionBegin (movie, 0, &extractor);
unsigned long output_layout_size;
OSStatus err = MovieAudioExtractionGetPropertyInfo (extractor,
kQTPropertyClass_MovieAudioExtraction_Audio,
kQTMovieAudioExtractionAudioPropertyID_AudioChannelLayout,
0, &output_layout_size, 0);
if (err != noErr)
return;
HeapBlock<AudioChannelLayout> qt_audio_channel_layout;
qt_audio_channel_layout.calloc (output_layout_size, 1);
MovieAudioExtractionGetProperty (extractor,
kQTPropertyClass_MovieAudioExtraction_Audio,
kQTMovieAudioExtractionAudioPropertyID_AudioChannelLayout,
output_layout_size, qt_audio_channel_layout, 0);
qt_audio_channel_layout[0].mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
MovieAudioExtractionSetProperty (extractor,
kQTPropertyClass_MovieAudioExtraction_Audio,
kQTMovieAudioExtractionAudioPropertyID_AudioChannelLayout,
output_layout_size,
qt_audio_channel_layout);
err = MovieAudioExtractionGetProperty (extractor,
kQTPropertyClass_MovieAudioExtraction_Audio,
kQTMovieAudioExtractionAudioPropertyID_AudioStreamBasicDescription,
sizeof (inputStreamDesc),
&inputStreamDesc, 0);
if (err != noErr)
return;
inputStreamDesc.mFormatFlags = kAudioFormatFlagIsSignedInteger
| kAudioFormatFlagIsPacked
| kAudioFormatFlagsNativeEndian;
inputStreamDesc.mBitsPerChannel = sizeof (SInt16) * 8;
inputStreamDesc.mChannelsPerFrame = jmin ((UInt32) 2, inputStreamDesc.mChannelsPerFrame);
inputStreamDesc.mBytesPerFrame = sizeof (SInt16) * inputStreamDesc.mChannelsPerFrame;
inputStreamDesc.mBytesPerPacket = inputStreamDesc.mBytesPerFrame;
err = MovieAudioExtractionSetProperty (extractor,
kQTPropertyClass_MovieAudioExtraction_Audio,
kQTMovieAudioExtractionAudioPropertyID_AudioStreamBasicDescription,
sizeof (inputStreamDesc),
&inputStreamDesc);
if (err != noErr)
return;
Boolean allChannelsDiscrete = false;
err = MovieAudioExtractionSetProperty (extractor,
kQTPropertyClass_MovieAudioExtraction_Movie,
kQTMovieAudioExtractionMoviePropertyID_AllChannelsDiscrete,
sizeof (allChannelsDiscrete),
&allChannelsDiscrete);
if (err != noErr)
return;
bufferList->mNumberBuffers = 1;
bufferList->mBuffers[0].mNumberChannels = inputStreamDesc.mChannelsPerFrame;
bufferList->mBuffers[0].mDataByteSize = jmax ((UInt32) 4096, (UInt32) (samplesPerFrame * (int) inputStreamDesc.mBytesPerFrame) + 16);
dataBuffer.malloc (bufferList->mBuffers[0].mDataByteSize);
bufferList->mBuffers[0].mData = dataBuffer;
sampleRate = inputStreamDesc.mSampleRate;
bitsPerSample = 16;
numChannels = inputStreamDesc.mChannelsPerFrame;
detachThread();
ok = true;
}
}
~QTAudioReader()
{
JUCE_AUTORELEASEPOOL
{
checkThreadIsAttached();
if (dataHandle != nullptr)
DisposeHandle (dataHandle);
if (extractor != nullptr)
{
MovieAudioExtractionEnd (extractor);
extractor = nullptr;
}
DisposeMovie (movie);
#if JUCE_MAC
ExitMoviesOnThread();
#endif
}
}
bool readSamples (int** destSamples, int numDestChannels, int startOffsetInDestBuffer,
int64 startSampleInFile, int numSamples)
{
JUCE_AUTORELEASEPOOL
{
checkThreadIsAttached();
bool readOk = true;
while (numSamples > 0)
{
if (lastSampleRead != startSampleInFile)
{
TimeRecord time;
time.scale = (TimeScale) inputStreamDesc.mSampleRate;
time.base = 0;
time.value.hi = 0;
time.value.lo = (UInt32) startSampleInFile;
OSStatus err = MovieAudioExtractionSetProperty (extractor,
kQTPropertyClass_MovieAudioExtraction_Movie,
kQTMovieAudioExtractionMoviePropertyID_CurrentTime,
sizeof (time), &time);
if (err != noErr)
{
readOk = false;
break;
}
}
int framesToDo = jmin (numSamples, (int) (bufferList->mBuffers[0].mDataByteSize / inputStreamDesc.mBytesPerFrame));
bufferList->mBuffers[0].mDataByteSize = inputStreamDesc.mBytesPerFrame * (UInt32) framesToDo;
UInt32 outFlags = 0;
UInt32 actualNumFrames = (UInt32) framesToDo;
OSStatus err = MovieAudioExtractionFillBuffer (extractor, &actualNumFrames, bufferList, &outFlags);
if (err != noErr)
{
readOk = false;
break;
}
lastSampleRead = startSampleInFile + actualNumFrames;
const int samplesReceived = (int) actualNumFrames;
for (int j = numDestChannels; --j >= 0;)
{
if (destSamples[j] != nullptr)
{
const short* src = ((const short*) bufferList->mBuffers[0].mData) + j;
for (int i = 0; i < samplesReceived; ++i)
{
destSamples[j][startOffsetInDestBuffer + i] = (*src << 16);
src += numChannels;
}
}
}
startOffsetInDestBuffer += samplesReceived;
startSampleInFile += samplesReceived;
numSamples -= samplesReceived;
if (((outFlags & kQTMovieAudioExtractionComplete) != 0 || samplesReceived == 0) && numSamples > 0)
{
for (int j = numDestChannels; --j >= 0;)
if (destSamples[j] != nullptr)
zeromem (destSamples[j] + startOffsetInDestBuffer, sizeof (int) * (size_t) numSamples);
break;
}
}
detachThread();
return readOk;
}
}
bool ok;
private:
Movie movie;
Media media;
Track track;
const int trackNum;
double trackUnitsPerFrame;
int samplesPerFrame;
int64 lastSampleRead;
Thread::ThreadID lastThreadId;
MovieAudioExtractionRef extractor;
AudioStreamBasicDescription inputStreamDesc;
HeapBlock<AudioBufferList> bufferList;
HeapBlock<char> dataBuffer;
Handle dataHandle;
//==============================================================================
void checkThreadIsAttached()
{
#if JUCE_MAC
if (Thread::getCurrentThreadId() != lastThreadId)
EnterMoviesOnThread (0);
AttachMovieToCurrentThread (movie);
#endif
}
void detachThread()
{
#if JUCE_MAC
DetachMovieFromCurrentThread (movie);
#endif
}
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (QTAudioReader)
};
//==============================================================================
QuickTimeAudioFormat::QuickTimeAudioFormat() : AudioFormat (quickTimeFormatName, ".mov .mp3 .mp4 .m4a")
{
}
QuickTimeAudioFormat::~QuickTimeAudioFormat()
{
}
Array<int> QuickTimeAudioFormat::getPossibleSampleRates() { return Array<int>(); }
Array<int> QuickTimeAudioFormat::getPossibleBitDepths() { return Array<int>(); }
bool QuickTimeAudioFormat::canDoStereo() { return true; }
bool QuickTimeAudioFormat::canDoMono() { return true; }
//==============================================================================
AudioFormatReader* QuickTimeAudioFormat::createReaderFor (InputStream* sourceStream,
const bool deleteStreamIfOpeningFails)
{
ScopedPointer<QTAudioReader> r (new QTAudioReader (sourceStream, 0));
if (r->ok)
return r.release();
if (! deleteStreamIfOpeningFails)
r->input = 0;
return nullptr;
}
AudioFormatWriter* QuickTimeAudioFormat::createWriterFor (OutputStream* /*streamToWriteTo*/,
double /*sampleRateToUse*/,
unsigned int /*numberOfChannels*/,
int /*bitsPerSample*/,
const StringPairArray& /*metadataValues*/,
int /*qualityOptionIndex*/)
{
jassertfalse; // not yet implemented!
return nullptr;
}
#endif

+ 0
- 71
modules/juce_audio_formats/codecs/juce_QuickTimeAudioFormat.h View File

@@ -1,71 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2017 - ROLI Ltd.
JUCE is an open source library subject to commercial or open-source
licensing.
By using JUCE, you agree to the terms of both the JUCE 5 End-User License
Agreement and JUCE 5 Privacy Policy (both updated and effective as of the
27th April 2017).
End User License Agreement: www.juce.com/juce-5-licence
Privacy Policy: www.juce.com/juce-5-privacy-policy
Or: You may also use this code under the terms of the GPL v3 (see
www.gnu.org/licenses).
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
#if JUCE_QUICKTIME
//==============================================================================
/**
Uses QuickTime to read the audio track a movie or media file.
As well as QuickTime movies, this should also manage to open other audio
files that quicktime can understand, like mp3, m4a, etc.
@see AudioFormat
*/
class JUCE_API QuickTimeAudioFormat : public AudioFormat
{
public:
//==============================================================================
/** Creates a format object. */
QuickTimeAudioFormat();
/** Destructor. */
~QuickTimeAudioFormat();
//==============================================================================
Array<int> getPossibleSampleRates();
Array<int> getPossibleBitDepths();
bool canDoStereo();
bool canDoMono();
//==============================================================================
AudioFormatReader* createReaderFor (InputStream* sourceStream,
bool deleteStreamIfOpeningFails);
AudioFormatWriter* createWriterFor (OutputStream* streamToWriteTo,
double sampleRateToUse,
unsigned int numberOfChannels,
int bitsPerSample,
const StringPairArray& metadataValues,
int qualityOptionIndex);
private:
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (QuickTimeAudioFormat)
};
#endif

+ 2
- 36
modules/juce_audio_formats/juce_audio_formats.cpp View File

@@ -41,9 +41,6 @@
//==============================================================================
#if JUCE_MAC
#if JUCE_QUICKTIME
#import <QTKit/QTKit.h>
#endif
#include <AudioToolbox/AudioToolbox.h>
#elif JUCE_IOS
@@ -51,44 +48,14 @@
#import <AVFoundation/AVFoundation.h>
//==============================================================================
#elif JUCE_WINDOWS
#if JUCE_QUICKTIME
/* If you've got an include error here, you probably need to install the QuickTime SDK and
add its header directory to your include path.
Alternatively, if you don't need any QuickTime services, just set the JUCE_QUICKTIME flag to 0.
*/
#include <Movies.h>
#include <QTML.h>
#include <QuickTimeComponents.h>
#include <MediaHandlers.h>
#include <ImageCodec.h>
/* If you've got QuickTime 7 installed, then these COM objects should be found in
the "\Program Files\Quicktime" directory. You'll need to add this directory to
your include search path to make these import statements work.
*/
#import <QTOLibrary.dll>
#import <QTOControl.dll>
#if JUCE_MSVC && ! JUCE_DONT_AUTOLINK_TO_WIN32_LIBRARIES
#pragma comment (lib, "QTMLClient.lib")
#endif
#endif
#if JUCE_USE_WINDOWS_MEDIA_FORMAT
#include <wmsdk.h>
#endif
#elif JUCE_WINDOWS && JUCE_USE_WINDOWS_MEDIA_FORMAT
#include <wmsdk.h>
#endif
//==============================================================================
namespace juce
{
#if JUCE_ANDROID
#undef JUCE_QUICKTIME
#endif
#include "format/juce_AudioFormat.cpp"
#include "format/juce_AudioFormatManager.cpp"
#include "format/juce_AudioFormatReader.cpp"
@@ -102,7 +69,6 @@ namespace juce
#include "codecs/juce_FlacAudioFormat.cpp"
#include "codecs/juce_MP3AudioFormat.cpp"
#include "codecs/juce_OggVorbisAudioFormat.cpp"
#include "codecs/juce_QuickTimeAudioFormat.cpp"
#include "codecs/juce_WavAudioFormat.cpp"
#include "codecs/juce_LAMEEncoderAudioFormat.cpp"


+ 0
- 1
modules/juce_audio_formats/juce_audio_formats.h View File

@@ -128,7 +128,6 @@ class AudioFormat;
#include "codecs/juce_LAMEEncoderAudioFormat.h"
#include "codecs/juce_MP3AudioFormat.h"
#include "codecs/juce_OggVorbisAudioFormat.h"
#include "codecs/juce_QuickTimeAudioFormat.h"
#include "codecs/juce_WavAudioFormat.h"
#include "codecs/juce_WindowsMediaAudioFormat.h"
#include "sampler/juce_Sampler.h"


+ 778
- 0
modules/juce_audio_plugin_client/AU/juce_AU_Shared.h View File

@@ -0,0 +1,778 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2015 - ROLI Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
// This macro can be set if you need to override this internal name for some reason..
#ifndef JUCE_STATE_DICTIONARY_KEY
#define JUCE_STATE_DICTIONARY_KEY "jucePluginState"
#endif
struct AudioUnitHelpers
{
// maps a channel index into an AU format to an index of a juce format
struct AUChannelStreamOrder
{
AudioChannelLayoutTag auLayoutTag;
AudioChannelLabel speakerOrder[8];
};
static AUChannelStreamOrder auChannelStreamOrder[];
static AudioChannelSet::ChannelType CoreAudioChannelLabelToJuceType (AudioChannelLabel label) noexcept
{
if (label >= kAudioChannelLabel_Discrete_0 && label <= kAudioChannelLabel_Discrete_65535)
{
const unsigned int discreteChannelNum = label - kAudioChannelLabel_Discrete_0;
return static_cast<AudioChannelSet::ChannelType> (AudioChannelSet::discreteChannel0 + discreteChannelNum);
}
switch (label)
{
case kAudioChannelLabel_Center:
case kAudioChannelLabel_Mono: return AudioChannelSet::centre;
case kAudioChannelLabel_Left:
case kAudioChannelLabel_HeadphonesLeft: return AudioChannelSet::left;
case kAudioChannelLabel_Right:
case kAudioChannelLabel_HeadphonesRight: return AudioChannelSet::right;
case kAudioChannelLabel_LFEScreen: return AudioChannelSet::subbass;
case kAudioChannelLabel_LeftSurround: return AudioChannelSet::leftSurround;
case kAudioChannelLabel_RightSurround: return AudioChannelSet::rightSurround;
case kAudioChannelLabel_LeftCenter: return AudioChannelSet::leftCentre;
case kAudioChannelLabel_RightCenter: return AudioChannelSet::rightCentre;
case kAudioChannelLabel_CenterSurround: return AudioChannelSet::surround;
case kAudioChannelLabel_LeftSurroundDirect: return AudioChannelSet::leftSurroundDirect;
case kAudioChannelLabel_RightSurroundDirect: return AudioChannelSet::rightSurroundDirect;
case kAudioChannelLabel_TopCenterSurround: return AudioChannelSet::topMiddle;
case kAudioChannelLabel_VerticalHeightLeft: return AudioChannelSet::topFrontLeft;
case kAudioChannelLabel_VerticalHeightRight: return AudioChannelSet::topFrontRight;
case kAudioChannelLabel_VerticalHeightCenter: return AudioChannelSet::topFrontCentre;
case kAudioChannelLabel_TopBackLeft: return AudioChannelSet::topRearLeft;
case kAudioChannelLabel_RearSurroundLeft: return AudioChannelSet::leftRearSurround;
case kAudioChannelLabel_TopBackRight: return AudioChannelSet::topRearRight;
case kAudioChannelLabel_RearSurroundRight: return AudioChannelSet::rightRearSurround;
case kAudioChannelLabel_TopBackCenter: return AudioChannelSet::topRearCentre;
case kAudioChannelLabel_LFE2: return AudioChannelSet::subbass2;
case kAudioChannelLabel_LeftWide: return AudioChannelSet::wideLeft;
case kAudioChannelLabel_RightWide: return AudioChannelSet::wideRight;
case kAudioChannelLabel_Ambisonic_W: return AudioChannelSet::ambisonicW;
case kAudioChannelLabel_Ambisonic_X: return AudioChannelSet::ambisonicX;
case kAudioChannelLabel_Ambisonic_Y: return AudioChannelSet::ambisonicY;
case kAudioChannelLabel_Ambisonic_Z: return AudioChannelSet::ambisonicZ;
default: return AudioChannelSet::unknown;
}
}
static AudioChannelLabel JuceChannelTypeToCoreAudioLabel (const AudioChannelSet::ChannelType& label) noexcept
{
if (label >= AudioChannelSet::discreteChannel0)
{
const unsigned int discreteChannelNum = label - AudioChannelSet::discreteChannel0;;
return static_cast<AudioChannelLabel> (kAudioChannelLabel_Discrete_0 + discreteChannelNum);
}
switch (label)
{
case AudioChannelSet::centre: return kAudioChannelLabel_Center;
case AudioChannelSet::left: return kAudioChannelLabel_Left;
case AudioChannelSet::right: return kAudioChannelLabel_Right;
case AudioChannelSet::subbass: return kAudioChannelLabel_LFEScreen;
case AudioChannelSet::leftRearSurround: return kAudioChannelLabel_RearSurroundLeft;
case AudioChannelSet::rightRearSurround: return kAudioChannelLabel_RearSurroundRight;
case AudioChannelSet::leftCentre: return kAudioChannelLabel_LeftCenter;
case AudioChannelSet::rightCentre: return kAudioChannelLabel_RightCenter;
case AudioChannelSet::surround: return kAudioChannelLabel_CenterSurround;
case AudioChannelSet::leftSurround: return kAudioChannelLabel_LeftSurround;
case AudioChannelSet::rightSurround: return kAudioChannelLabel_RightSurround;
case AudioChannelSet::topMiddle: return kAudioChannelLabel_TopCenterSurround;
case AudioChannelSet::topFrontLeft: return kAudioChannelLabel_VerticalHeightLeft;
case AudioChannelSet::topFrontRight: return kAudioChannelLabel_VerticalHeightRight;
case AudioChannelSet::topFrontCentre: return kAudioChannelLabel_VerticalHeightCenter;
case AudioChannelSet::topRearLeft: return kAudioChannelLabel_TopBackLeft;
case AudioChannelSet::topRearRight: return kAudioChannelLabel_TopBackRight;
case AudioChannelSet::topRearCentre: return kAudioChannelLabel_TopBackCenter;
case AudioChannelSet::subbass2: return kAudioChannelLabel_LFE2;
case AudioChannelSet::wideLeft: return kAudioChannelLabel_LeftWide;
case AudioChannelSet::wideRight: return kAudioChannelLabel_RightWide;
case AudioChannelSet::ambisonicW: return kAudioChannelLabel_Ambisonic_W;
case AudioChannelSet::ambisonicX: return kAudioChannelLabel_Ambisonic_X;
case AudioChannelSet::ambisonicY: return kAudioChannelLabel_Ambisonic_Y;
case AudioChannelSet::ambisonicZ: return kAudioChannelLabel_Ambisonic_Z;
case AudioChannelSet::leftSurroundDirect: return kAudioChannelLabel_LeftSurroundDirect;
case AudioChannelSet::rightSurroundDirect: return kAudioChannelLabel_RightSurroundDirect;
case AudioChannelSet::unknown: return kAudioChannelLabel_Unknown;
case AudioChannelSet::discreteChannel0: return kAudioChannelLabel_Discrete_0;
}
return kAudioChannelLabel_Unknown;
}
static AudioChannelSet CoreAudioChannelBitmapToJuceType (UInt32 bitmap) noexcept
{
AudioChannelSet set;
if ((bitmap & kAudioChannelBit_Left) != 0) set.addChannel (AudioChannelSet::left);
if ((bitmap & kAudioChannelBit_Right) != 0) set.addChannel (AudioChannelSet::right);
if ((bitmap & kAudioChannelBit_Center) != 0) set.addChannel (AudioChannelSet::centre);
if ((bitmap & kAudioChannelBit_LFEScreen) != 0) set.addChannel (AudioChannelSet::subbass);
if ((bitmap & kAudioChannelBit_LeftSurroundDirect) != 0) set.addChannel (AudioChannelSet::leftSurroundDirect);
if ((bitmap & kAudioChannelBit_RightSurroundDirect) != 0) set.addChannel (AudioChannelSet::rightSurroundDirect);
if ((bitmap & kAudioChannelBit_LeftCenter) != 0) set.addChannel (AudioChannelSet::leftCentre);
if ((bitmap & kAudioChannelBit_RightCenter) != 0) set.addChannel (AudioChannelSet::rightCentre);
if ((bitmap & kAudioChannelBit_CenterSurround) != 0) set.addChannel (AudioChannelSet::surround);
if ((bitmap & kAudioChannelBit_LeftSurround) != 0) set.addChannel (AudioChannelSet::leftSurround);
if ((bitmap & kAudioChannelBit_RightSurround) != 0) set.addChannel (AudioChannelSet::rightSurround);
if ((bitmap & kAudioChannelBit_TopCenterSurround) != 0) set.addChannel (AudioChannelSet::topMiddle);
if ((bitmap & kAudioChannelBit_VerticalHeightLeft) != 0) set.addChannel (AudioChannelSet::topFrontLeft);
if ((bitmap & kAudioChannelBit_VerticalHeightCenter) != 0) set.addChannel (AudioChannelSet::topFrontCentre);
if ((bitmap & kAudioChannelBit_VerticalHeightRight) != 0) set.addChannel (AudioChannelSet::topFrontRight);
if ((bitmap & kAudioChannelBit_TopBackLeft) != 0) set.addChannel (AudioChannelSet::topRearLeft);
if ((bitmap & kAudioChannelBit_TopBackCenter) != 0) set.addChannel (AudioChannelSet::topRearCentre);
if ((bitmap & kAudioChannelBit_TopBackRight) != 0) set.addChannel (AudioChannelSet::topRearRight);
return set;
}
static AudioChannelSet CoreAudioChannelLayoutToJuceType (const AudioChannelLayout& layout) noexcept
{
const AudioChannelLayoutTag tag = layout.mChannelLayoutTag;
if (tag == kAudioChannelLayoutTag_UseChannelBitmap) return CoreAudioChannelBitmapToJuceType (layout.mChannelBitmap);
if (tag == kAudioChannelLayoutTag_UseChannelDescriptions)
{
if (layout.mNumberChannelDescriptions <= 8)
{
// first try to convert the layout via the auChannelStreamOrder array
int layoutIndex;
for (layoutIndex = 0; auChannelStreamOrder[layoutIndex].auLayoutTag != 0; ++layoutIndex)
{
const AUChannelStreamOrder& streamOrder = auChannelStreamOrder[layoutIndex];
int numChannels;
for (numChannels = 0; numChannels < 8 && streamOrder.speakerOrder[numChannels] != 0;)
++numChannels;
if (numChannels != (int) layout.mNumberChannelDescriptions)
continue;
int ch;
for (ch = 0; ch < numChannels; ++ch)
if (streamOrder.speakerOrder[ch] != layout.mChannelDescriptions[ch].mChannelLabel)
break;
// match!
if (ch == numChannels)
break;
}
if (auChannelStreamOrder[layoutIndex].auLayoutTag != 0)
return CALayoutTagToChannelSet (auChannelStreamOrder[layoutIndex].auLayoutTag);
}
AudioChannelSet set;
for (unsigned int i = 0; i < layout.mNumberChannelDescriptions; ++i)
set.addChannel (CoreAudioChannelLabelToJuceType (layout.mChannelDescriptions[i].mChannelLabel));
return set;
}
return CALayoutTagToChannelSet (tag);
}
static AudioChannelSet CALayoutTagToChannelSet (AudioChannelLayoutTag tag) noexcept
{
switch (tag)
{
case kAudioChannelLayoutTag_Unknown: return AudioChannelSet::disabled();
case kAudioChannelLayoutTag_Mono: return AudioChannelSet::mono();
case kAudioChannelLayoutTag_Stereo:
case kAudioChannelLayoutTag_StereoHeadphones:
case kAudioChannelLayoutTag_Binaural: return AudioChannelSet::stereo();
case kAudioChannelLayoutTag_Quadraphonic: return AudioChannelSet::quadraphonic();
case kAudioChannelLayoutTag_Pentagonal: return AudioChannelSet::pentagonal();
case kAudioChannelLayoutTag_Hexagonal: return AudioChannelSet::hexagonal();
case kAudioChannelLayoutTag_Octagonal: return AudioChannelSet::octagonal();
case kAudioChannelLayoutTag_Ambisonic_B_Format: return AudioChannelSet::ambisonic();
case kAudioChannelLayoutTag_AudioUnit_6_0: return AudioChannelSet::create6point0();
case kAudioChannelLayoutTag_DTS_6_0_A: return AudioChannelSet::create6point0Music();
case kAudioChannelLayoutTag_MPEG_6_1_A: return AudioChannelSet::create6point1();
case kAudioChannelLayoutTag_MPEG_5_0_B: return AudioChannelSet::create5point0();
case kAudioChannelLayoutTag_MPEG_5_1_A: return AudioChannelSet::create5point1();
case kAudioChannelLayoutTag_DTS_7_1:
case kAudioChannelLayoutTag_MPEG_7_1_C: return AudioChannelSet::create7point1();
case kAudioChannelLayoutTag_AudioUnit_7_0: return AudioChannelSet::create7point0();
case kAudioChannelLayoutTag_AudioUnit_7_0_Front: return AudioChannelSet::createFront7point0();
case kAudioChannelLayoutTag_AudioUnit_7_1_Front: return AudioChannelSet::createFront7point1();
case kAudioChannelLayoutTag_MPEG_3_0_A:
case kAudioChannelLayoutTag_MPEG_3_0_B: return AudioChannelSet::createLCR();
case kAudioChannelLayoutTag_MPEG_4_0_A:
case kAudioChannelLayoutTag_MPEG_4_0_B: return AudioChannelSet::createLCRS();
case kAudioChannelLayoutTag_ITU_2_1: return AudioChannelSet::createLRS();
case kAudioChannelLayoutTag_EAC3_7_1_C: return AudioChannelSet::create7point1AC3();
}
if (int numChannels = static_cast<int> (tag) & 0xffff)
return AudioChannelSet::discreteChannels (numChannels);
// Bitmap and channel description array layout tags are currently unsupported :-(
jassertfalse;
return AudioChannelSet();
}
static AudioChannelLayoutTag ChannelSetToCALayoutTag (const AudioChannelSet& set) noexcept
{
if (set == AudioChannelSet::mono()) return kAudioChannelLayoutTag_Mono;
if (set == AudioChannelSet::stereo()) return kAudioChannelLayoutTag_Stereo;
if (set == AudioChannelSet::createLCR()) return kAudioChannelLayoutTag_MPEG_3_0_A;
if (set == AudioChannelSet::createLRS()) return kAudioChannelLayoutTag_ITU_2_1;
if (set == AudioChannelSet::createLCRS()) return kAudioChannelLayoutTag_MPEG_4_0_A;
if (set == AudioChannelSet::quadraphonic()) return kAudioChannelLayoutTag_Quadraphonic;
if (set == AudioChannelSet::pentagonal()) return kAudioChannelLayoutTag_Pentagonal;
if (set == AudioChannelSet::hexagonal()) return kAudioChannelLayoutTag_Hexagonal;
if (set == AudioChannelSet::octagonal()) return kAudioChannelLayoutTag_Octagonal;
if (set == AudioChannelSet::ambisonic()) return kAudioChannelLayoutTag_Ambisonic_B_Format;
if (set == AudioChannelSet::create5point0()) return kAudioChannelLayoutTag_MPEG_5_0_B;
if (set == AudioChannelSet::create5point1()) return kAudioChannelLayoutTag_MPEG_5_1_A;
if (set == AudioChannelSet::create6point0()) return kAudioChannelLayoutTag_AudioUnit_6_0;
if (set == AudioChannelSet::create6point0Music()) return kAudioChannelLayoutTag_DTS_6_0_A;
if (set == AudioChannelSet::create6point1()) return kAudioChannelLayoutTag_MPEG_6_1_A;
if (set == AudioChannelSet::create7point0()) return kAudioChannelLayoutTag_AudioUnit_7_0;
if (set == AudioChannelSet::create7point1()) return kAudioChannelLayoutTag_MPEG_7_1_C;
if (set == AudioChannelSet::createFront7point0()) return kAudioChannelLayoutTag_AudioUnit_7_0_Front;
if (set == AudioChannelSet::createFront7point1()) return kAudioChannelLayoutTag_AudioUnit_7_1_Front;
if (set == AudioChannelSet::create7point1AC3()) return kAudioChannelLayoutTag_EAC3_7_1_C;
if (set == AudioChannelSet::disabled()) return kAudioChannelLayoutTag_Unknown;
return static_cast<AudioChannelLayoutTag> ((int) kAudioChannelLayoutTag_DiscreteInOrder | set.size());
}
static int auChannelIndexToJuce (int auIndex, const AudioChannelSet& channelSet)
{
if (auIndex >= 8) return auIndex;
AudioChannelLayoutTag currentLayout = ChannelSetToCALayoutTag (channelSet);
int layoutIndex;
for (layoutIndex = 0; auChannelStreamOrder[layoutIndex].auLayoutTag != currentLayout; ++layoutIndex)
if (auChannelStreamOrder[layoutIndex].auLayoutTag == 0) return auIndex;
AudioChannelSet::ChannelType channelType
= CoreAudioChannelLabelToJuceType (auChannelStreamOrder[layoutIndex].speakerOrder[auIndex]);
// We need to map surround channels to rear surround channels for petagonal and hexagonal
if (channelSet == AudioChannelSet::pentagonal() || channelSet == AudioChannelSet::hexagonal())
{
switch (channelType)
{
case AudioChannelSet::leftSurround:
channelType = AudioChannelSet::leftRearSurround;
break;
case AudioChannelSet::rightSurround:
channelType = AudioChannelSet::rightRearSurround;
break;
default:
break;
}
}
const int juceIndex = channelSet.getChannelTypes().indexOf (channelType);
jassert (juceIndex >= 0);
return juceIndex >= 0 ? juceIndex : auIndex;
}
static int juceChannelIndexToAu (int juceIndex, const AudioChannelSet& channelSet)
{
AudioChannelLayoutTag currentLayout = ChannelSetToCALayoutTag (channelSet);
int layoutIndex;
for (layoutIndex = 0; auChannelStreamOrder[layoutIndex].auLayoutTag != currentLayout; ++layoutIndex)
{
if (auChannelStreamOrder[layoutIndex].auLayoutTag == 0)
{
jassertfalse;
return juceIndex;
}
}
const AUChannelStreamOrder& channelOrder = auChannelStreamOrder[layoutIndex];
AudioChannelSet::ChannelType channelType = channelSet.getTypeOfChannel (juceIndex);
// We need to map rear surround channels to surround channels for petagonal and hexagonal
if (channelSet == AudioChannelSet::pentagonal() || channelSet == AudioChannelSet::hexagonal())
{
switch (channelType)
{
case AudioChannelSet::leftRearSurround:
channelType = AudioChannelSet::leftSurround;
break;
case AudioChannelSet::rightRearSurround:
channelType = AudioChannelSet::rightSurround;
break;
default:
break;
}
}
for (int i = 0; i < 8 && channelOrder.speakerOrder[i] != 0; ++i)
if (CoreAudioChannelLabelToJuceType (channelOrder.speakerOrder[i]) == channelType)
return i;
jassertfalse;
return juceIndex;
}
class ChannelRemapper
{
public:
ChannelRemapper (PluginBusUtilities& bUtils) : busUtils (bUtils), inputLayoutMap (nullptr), outputLayoutMap (nullptr) {}
~ChannelRemapper () {}
void alloc()
{
const int numInputBuses = busUtils.getBusCount (true);
const int numOutputBuses = busUtils.getBusCount (false);
initializeChannelMapArray (true, numInputBuses);
initializeChannelMapArray (false, numOutputBuses);
for (int busIdx = 0; busIdx < numInputBuses; ++busIdx)
fillLayoutChannelMaps (true, busIdx);
for (int busIdx = 0; busIdx < numOutputBuses; ++busIdx)
fillLayoutChannelMaps (false, busIdx);
}
void release()
{
inputLayoutMap = outputLayoutMap = nullptr;
inputLayoutMapPtrStorage.free();
outputLayoutMapPtrStorage.free();
inputLayoutMapStorage.free();
outputLayoutMapStorage.free();
}
inline const int* get (bool input, int bus) const noexcept { return (input ? inputLayoutMap : outputLayoutMap) [bus]; }
private:
//==============================================================================
PluginBusUtilities& busUtils;
HeapBlock<int*> inputLayoutMapPtrStorage, outputLayoutMapPtrStorage;
HeapBlock<int> inputLayoutMapStorage, outputLayoutMapStorage;
int** inputLayoutMap;
int** outputLayoutMap;
//==============================================================================
void initializeChannelMapArray (bool isInput, const int numBuses)
{
HeapBlock<int*>& layoutMapPtrStorage = isInput ? inputLayoutMapPtrStorage : outputLayoutMapPtrStorage;
HeapBlock<int>& layoutMapStorage = isInput ? inputLayoutMapStorage : outputLayoutMapStorage;
int**& layoutMap = isInput ? inputLayoutMap : outputLayoutMap;
const int totalInChannels = busUtils.findTotalNumChannels (true);
const int totalOutChannels = busUtils.findTotalNumChannels (false);
layoutMapPtrStorage.calloc (static_cast<size_t> (numBuses));
layoutMapStorage.calloc (static_cast<size_t> (isInput ? totalInChannels : totalOutChannels));
layoutMap = layoutMapPtrStorage. getData();
int ch = 0;
for (int busIdx = 0; busIdx < numBuses; ++busIdx)
{
layoutMap[busIdx] = layoutMapStorage.getData() + ch;
ch += busUtils.getNumChannels (isInput, busIdx);
}
}
void fillLayoutChannelMaps (bool isInput, int busNr)
{
int* layoutMap = (isInput ? inputLayoutMap : outputLayoutMap)[busNr];
const AudioChannelSet& channelFormat = busUtils.getChannelSet (isInput, busNr);
const int numChannels = channelFormat.size();
for (int i = 0; i < numChannels; ++i)
layoutMap[i] = AudioUnitHelpers::juceChannelIndexToAu (i, channelFormat);
}
};
//==============================================================================
class CoreAudioBufferList
{
public:
CoreAudioBufferList () { reset(); }
//==============================================================================
void prepare (int inChannels, int outChannels, int maxFrames)
{
const int numChannels = jmax (inChannels, outChannels);
scratch.setSize (numChannels, maxFrames);
channels.calloc (static_cast<size_t> (numChannels));
reset();
}
void release()
{
scratch.setSize (0, 0);
channels.free();
}
void reset() noexcept
{
pushIdx = 0;
popIdx = 0;
zeromem (channels.getData(), sizeof(float*) * static_cast<size_t> (scratch.getNumChannels()));
}
//==============================================================================
float* setBuffer (const int idx, float* ptr = nullptr) noexcept
{
jassert (idx < scratch.getNumChannels());
return (channels [idx] = uniqueBuffer (idx, ptr));
}
//==============================================================================
float* push () noexcept
{
jassert (pushIdx < scratch.getNumChannels());
return channels [pushIdx++];
}
void push (AudioBufferList& bufferList, const int* channelMap) noexcept
{
jassert (pushIdx < scratch.getNumChannels());
if (bufferList.mNumberBuffers > 0)
{
const UInt32 n = bufferList.mBuffers [0].mDataByteSize /
(bufferList.mBuffers [0].mNumberChannels * sizeof (float));
const bool isInterleaved = isAudioBufferInterleaved (bufferList);
const int numChannels = static_cast<int> (isInterleaved ? bufferList.mBuffers [0].mNumberChannels
: bufferList.mNumberBuffers);
for (int ch = 0; ch < numChannels; ++ch)
{
float* data = push();
int mappedChannel = channelMap [ch];
if (isInterleaved || static_cast<float*> (bufferList.mBuffers [mappedChannel].mData) != data)
copyAudioBuffer (bufferList, mappedChannel, n, data);
}
}
}
//==============================================================================
float* pop () noexcept
{
jassert (popIdx < scratch.getNumChannels());
return channels[popIdx++];
}
void pop (AudioBufferList& buffer, const int* channelMap) noexcept
{
if (buffer.mNumberBuffers > 0)
{
const UInt32 n = buffer.mBuffers [0].mDataByteSize / (buffer.mBuffers [0].mNumberChannels * sizeof (float));
const bool isInterleaved = isAudioBufferInterleaved (buffer);
const int numChannels = static_cast<int> (isInterleaved ? buffer.mBuffers [0].mNumberChannels : buffer.mNumberBuffers);
for (int ch = 0; ch < numChannels; ++ch)
{
int mappedChannel = channelMap [ch];
float* nextBuffer = pop();
if (nextBuffer == buffer.mBuffers [mappedChannel].mData && ! isInterleaved)
continue; // no copying necessary
if (buffer.mBuffers [mappedChannel].mData == nullptr && ! isInterleaved)
buffer.mBuffers [mappedChannel].mData = nextBuffer;
else
copyAudioBuffer (nextBuffer, mappedChannel, n, buffer);
}
}
}
//==============================================================================
AudioSampleBuffer& getBuffer (UInt32 frames) noexcept
{
jassert (pushIdx == scratch.getNumChannels());
#if JUCE_DEBUG
for (int i = 0; i < pushIdx; ++i)
jassert (channels [i] != nullptr);
#endif
mutableBuffer.setDataToReferTo (channels, pushIdx, static_cast<int> (frames));
return mutableBuffer;
}
private:
float* uniqueBuffer (int idx, float* buffer) noexcept
{
if (buffer == nullptr)
return scratch.getWritePointer (idx);
for (int ch = 0; ch < idx; ++ch)
if (buffer == channels[ch])
return scratch.getWritePointer (idx);
return buffer;
}
//==============================================================================
AudioSampleBuffer scratch;
AudioSampleBuffer mutableBuffer;
HeapBlock<float*> channels;
int pushIdx, popIdx;
};
static bool isAudioBufferInterleaved (const AudioBufferList& audioBuffer) noexcept
{
return (audioBuffer.mNumberBuffers == 1 && audioBuffer.mBuffers[0].mNumberChannels > 1);
}
static void clearAudioBuffer (const AudioBufferList& audioBuffer) noexcept
{
for (unsigned int ch = 0; ch < audioBuffer.mNumberBuffers; ++ch)
zeromem (audioBuffer.mBuffers[ch].mData, audioBuffer.mBuffers[ch].mDataByteSize);
}
static void copyAudioBuffer (const AudioBufferList& audioBuffer, const int channel, const UInt32 size, float* dst) noexcept
{
if (! isAudioBufferInterleaved (audioBuffer))
{
jassert (channel < static_cast<int> (audioBuffer.mNumberBuffers));
jassert (audioBuffer.mBuffers[channel].mDataByteSize == (size * sizeof (float)));
memcpy (dst, audioBuffer.mBuffers[channel].mData, size * sizeof (float));
}
else
{
const int numChannels = static_cast<int> (audioBuffer.mBuffers[0].mNumberChannels);
const UInt32 n = static_cast<UInt32> (numChannels) * size;
const float* src = static_cast<const float*> (audioBuffer.mBuffers[0].mData);
jassert (channel < numChannels);
jassert (audioBuffer.mBuffers[0].mDataByteSize == (n * sizeof (float)));
for (const float* inData = src; inData < (src + n); inData += numChannels)
*dst++ = inData[channel];
}
}
static void copyAudioBuffer (const float *src, const int channel, const UInt32 size, AudioBufferList& audioBuffer) noexcept
{
if (! isAudioBufferInterleaved (audioBuffer))
{
jassert (channel < static_cast<int> (audioBuffer.mNumberBuffers));
jassert (audioBuffer.mBuffers[channel].mDataByteSize == (size * sizeof (float)));
memcpy (audioBuffer.mBuffers[channel].mData, src, size * sizeof (float));
}
else
{
const int numChannels = static_cast<int> (audioBuffer.mBuffers[0].mNumberChannels);
const UInt32 n = static_cast<UInt32> (numChannels) * size;
float* dst = static_cast<float*> (audioBuffer.mBuffers[0].mData);
jassert (channel < numChannels);
jassert (audioBuffer.mBuffers[0].mDataByteSize == (n * sizeof (float)));
for (float* outData = dst; outData < (dst + n); outData += numChannels)
outData[channel] = *src++;
}
}
static Array<AUChannelInfo> getAUChannelInfo (PluginBusUtilities& busUtils)
{
Array<AUChannelInfo> channelInfo;
AudioProcessor* juceFilter = &busUtils.processor;
const AudioProcessor::AudioBusArrangement& arr = juceFilter->busArrangement;
PluginBusUtilities::ScopedBusRestorer restorer (busUtils);
const bool hasMainInputBus = (busUtils.getNumEnabledBuses (true) > 0);
const bool hasMainOutputBus = (busUtils.getNumEnabledBuses (false) > 0);
if ((! hasMainInputBus) && (! hasMainOutputBus))
{
// midi effect plug-in: no audio
AUChannelInfo info;
info.inChannels = 0;
info.outChannels = 0;
channelInfo.add (info);
return channelInfo;
}
else
{
const uint32_t maxNumChanToCheckFor = 9;
uint32_t defaultInputs = static_cast<uint32_t> (busUtils.getNumChannels (true, 0));
uint32_t defaultOutputs = static_cast<uint32_t> (busUtils.getNumChannels (false, 0));
uint32_t lastInputs = defaultInputs;
uint32_t lastOutputs = defaultOutputs;
SortedSet<uint32_t> supportedChannels;
// add the current configuration
if (lastInputs != 0 || lastOutputs != 0)
supportedChannels.add ((lastInputs << 16) | lastOutputs);
for (uint32_t inChanNum = hasMainInputBus ? 1 : 0; inChanNum <= (hasMainInputBus ? maxNumChanToCheckFor : 0); ++inChanNum)
{
const AudioChannelSet dfltInLayout = busUtils.getDefaultLayoutForChannelNumAndBus(true, 0, static_cast<int> (inChanNum));
if (inChanNum != 0 && dfltInLayout.isDisabled())
continue;
for (uint32_t outChanNum = hasMainOutputBus ? 1 : 0; outChanNum <= (hasMainOutputBus ? maxNumChanToCheckFor : 0); ++outChanNum)
{
const AudioChannelSet dfltOutLayout = busUtils.getDefaultLayoutForChannelNumAndBus(false, 0, static_cast<int> (outChanNum));
if (outChanNum != 0 && dfltOutLayout.isDisabled())
continue;
// get the number of channels again. This is only needed for some processors that change their configuration
// even when they indicate that setPreferredBusArrangement failed.
lastInputs = hasMainInputBus ? static_cast<uint32_t> (arr.inputBuses. getReference (0). channels.size()) : 0;
lastOutputs = hasMainOutputBus ? static_cast<uint32_t> (arr.outputBuses.getReference (0). channels.size()) : 0;
uint32_t channelConfiguration = (inChanNum << 16) | outChanNum;
// did we already try this configuration?
if (supportedChannels.contains (channelConfiguration)) continue;
if (lastInputs != inChanNum && (! dfltInLayout.isDisabled()))
{
if (! juceFilter->setPreferredBusArrangement (true, 0, dfltInLayout)) continue;
lastInputs = inChanNum;
lastOutputs = hasMainOutputBus ? static_cast<uint32_t> (arr.outputBuses.getReference (0). channels.size()) : 0;
supportedChannels.add ((lastInputs << 16) | lastOutputs);
}
if (lastOutputs != outChanNum && (! dfltOutLayout.isDisabled()))
{
if (! juceFilter->setPreferredBusArrangement (false, 0, dfltOutLayout)) continue;
lastInputs = hasMainInputBus ? static_cast<uint32_t> (arr.inputBuses.getReference (0).channels.size()) : 0;
lastOutputs = outChanNum;
supportedChannels.add ((lastInputs << 16) | lastOutputs);
}
}
}
bool hasInOutMismatch = false;
for (int i = 0; i < supportedChannels.size(); ++i)
{
const uint32_t numInputs = (supportedChannels[i] >> 16) & 0xffff;
const uint32_t numOutputs = (supportedChannels[i] >> 0) & 0xffff;
if (numInputs != numOutputs)
{
hasInOutMismatch = true;
break;
}
}
bool hasUnsupportedInput = ! hasMainOutputBus, hasUnsupportedOutput = ! hasMainInputBus;
for (uint32_t inChanNum = hasMainInputBus ? 1 : 0; inChanNum <= (hasMainInputBus ? maxNumChanToCheckFor : 0); ++inChanNum)
{
uint32_t channelConfiguration = (inChanNum << 16) | (hasInOutMismatch ? defaultOutputs : inChanNum);
if (! supportedChannels.contains (channelConfiguration))
{
hasUnsupportedInput = true;
break;
}
}
for (uint32_t outChanNum = hasMainOutputBus ? 1 : 0; outChanNum <= (hasMainOutputBus ? maxNumChanToCheckFor : 0); ++outChanNum)
{
uint32_t channelConfiguration = ((hasInOutMismatch ? defaultInputs : outChanNum) << 16) | outChanNum;
if (! supportedChannels.contains (channelConfiguration))
{
hasUnsupportedOutput = true;
break;
}
}
for (int i = 0; i < supportedChannels.size(); ++i)
{
const int numInputs = (supportedChannels[i] >> 16) & 0xffff;
const int numOutputs = (supportedChannels[i] >> 0) & 0xffff;
AUChannelInfo info;
// see here: https://developer.apple.com/library/mac/documentation/MusicAudio/Conceptual/AudioUnitProgrammingGuide/TheAudioUnit/TheAudioUnit.html
info.inChannels = static_cast<SInt16> (hasMainInputBus ? (hasUnsupportedInput ? numInputs : (hasInOutMismatch && (! hasUnsupportedOutput) ? -2 : -1)) : 0);
info.outChannels = static_cast<SInt16> (hasMainOutputBus ? (hasUnsupportedOutput ? numOutputs : (hasInOutMismatch && (! hasUnsupportedInput) ? -2 : -1)) : 0);
if (info.inChannels == -2 && info.outChannels == -2)
info.inChannels = -1;
int j;
for (j = 0; j < channelInfo.size(); ++j)
if (channelInfo[j].inChannels == info.inChannels && channelInfo[j].outChannels == info.outChannels)
break;
if (j >= channelInfo.size())
channelInfo.add (info);
}
}
return channelInfo;
}
};
AudioUnitHelpers::AUChannelStreamOrder AudioUnitHelpers::auChannelStreamOrder[] =
{
{kAudioChannelLayoutTag_Mono, {kAudioChannelLabel_Center, 0, 0, 0, 0, 0, 0, 0}},
{kAudioChannelLayoutTag_Stereo, {kAudioChannelLabel_Left, kAudioChannelLabel_Right, 0, 0, 0, 0, 0, 0}},
{kAudioChannelLayoutTag_StereoHeadphones, {kAudioChannelLabel_HeadphonesLeft, kAudioChannelLabel_HeadphonesRight, 0, 0, 0, 0, 0, 0}},
{kAudioChannelLayoutTag_Binaural, {kAudioChannelLabel_Left, kAudioChannelLabel_Right, 0, 0, 0, 0, 0, 0}},
{kAudioChannelLayoutTag_Quadraphonic, {kAudioChannelLabel_Left, kAudioChannelLabel_Right, kAudioChannelLabel_LeftSurround, kAudioChannelLabel_RightSurround, 0, 0, 0, 0}},
{kAudioChannelLayoutTag_Pentagonal, {kAudioChannelLabel_Left, kAudioChannelLabel_Right, kAudioChannelLabel_LeftSurround, kAudioChannelLabel_RightSurround, kAudioChannelLabel_Center, 0, 0, 0}},
{kAudioChannelLayoutTag_Hexagonal, {kAudioChannelLabel_Left, kAudioChannelLabel_Right, kAudioChannelLabel_LeftSurround, kAudioChannelLabel_RightSurround, kAudioChannelLabel_Center, kAudioChannelLabel_CenterSurround, 0, 0}},
{kAudioChannelLayoutTag_Octagonal, {kAudioChannelLabel_Left, kAudioChannelLabel_Right, kAudioChannelLabel_LeftSurround, kAudioChannelLabel_RightSurround, kAudioChannelLabel_Center, kAudioChannelLabel_CenterSurround, kAudioChannelLabel_LeftWide, kAudioChannelLabel_RightWide}},
{kAudioChannelLayoutTag_Ambisonic_B_Format, {kAudioChannelLabel_Ambisonic_W, kAudioChannelLabel_Ambisonic_X, kAudioChannelLabel_Ambisonic_Y, kAudioChannelLabel_Ambisonic_Z, 0, 0, 0, 0}},
{kAudioChannelLayoutTag_MPEG_5_0_B, {kAudioChannelLabel_Left, kAudioChannelLabel_Right, kAudioChannelLabel_LeftSurround, kAudioChannelLabel_RightSurround, kAudioChannelLabel_Center, 0, 0, 0}},
{kAudioChannelLayoutTag_MPEG_5_1_A, {kAudioChannelLabel_Left, kAudioChannelLabel_Right, kAudioChannelLabel_Center, kAudioChannelLabel_LFEScreen, kAudioChannelLabel_LeftSurround, kAudioChannelLabel_RightSurround, 0, 0}},
{kAudioChannelLayoutTag_AudioUnit_6_0, {kAudioChannelLabel_Left, kAudioChannelLabel_Right, kAudioChannelLabel_LeftSurround, kAudioChannelLabel_RightSurround, kAudioChannelLabel_Center, kAudioChannelLabel_CenterSurround, 0, 0}},
{kAudioChannelLayoutTag_DTS_6_0_A, {kAudioChannelLabel_Left, kAudioChannelLabel_Right, kAudioChannelLabel_LeftSurround, kAudioChannelLabel_RightSurround, kAudioChannelLabel_RearSurroundLeft, kAudioChannelLabel_RearSurroundRight, 0, 0}},
{kAudioChannelLayoutTag_MPEG_6_1_A, {kAudioChannelLabel_Left, kAudioChannelLabel_Right, kAudioChannelLabel_Center, kAudioChannelLabel_LFEScreen, kAudioChannelLabel_LeftSurround, kAudioChannelLabel_RightSurround, kAudioChannelLabel_CenterSurround, 0}},
{kAudioChannelLayoutTag_AudioUnit_7_0, {kAudioChannelLabel_Left, kAudioChannelLabel_Right, kAudioChannelLabel_LeftSurround, kAudioChannelLabel_RightSurround, kAudioChannelLabel_Center, kAudioChannelLabel_RearSurroundLeft, kAudioChannelLabel_RearSurroundRight, 0}},
{kAudioChannelLayoutTag_MPEG_7_1_C, {kAudioChannelLabel_Left, kAudioChannelLabel_Right, kAudioChannelLabel_Center, kAudioChannelLabel_LFEScreen, kAudioChannelLabel_LeftSurround, kAudioChannelLabel_RightSurround, kAudioChannelLabel_RearSurroundLeft, kAudioChannelLabel_RearSurroundRight}},
{kAudioChannelLayoutTag_AudioUnit_7_0_Front,{kAudioChannelLabel_Left, kAudioChannelLabel_Right, kAudioChannelLabel_LeftSurround, kAudioChannelLabel_RightSurround, kAudioChannelLabel_Center, kAudioChannelLabel_LeftCenter, kAudioChannelLabel_RightCenter, 0}},
{kAudioChannelLayoutTag_AudioUnit_7_1_Front,{kAudioChannelLabel_Left, kAudioChannelLabel_Right, kAudioChannelLabel_Center, kAudioChannelLabel_LFEScreen, kAudioChannelLabel_LeftSurround, kAudioChannelLabel_RightSurround, kAudioChannelLabel_LeftCenter, kAudioChannelLabel_RightCenter}},
{kAudioChannelLayoutTag_MPEG_3_0_A, {kAudioChannelLabel_Left, kAudioChannelLabel_Right, kAudioChannelLabel_Center, 0, 0, 0, 0, 0}},
{kAudioChannelLayoutTag_MPEG_3_0_B, {kAudioChannelLabel_Center, kAudioChannelLabel_Left, kAudioChannelLabel_Right, 0, 0, 0, 0, 0}},
{kAudioChannelLayoutTag_MPEG_4_0_A, {kAudioChannelLabel_Left, kAudioChannelLabel_Right, kAudioChannelLabel_Center, kAudioChannelLabel_CenterSurround, 0, 0, 0, 0}},
{kAudioChannelLayoutTag_MPEG_4_0_B, {kAudioChannelLabel_Center, kAudioChannelLabel_Left, kAudioChannelLabel_Right, kAudioChannelLabel_CenterSurround, 0, 0, 0, 0}},
{kAudioChannelLayoutTag_ITU_2_1, {kAudioChannelLabel_Left, kAudioChannelLabel_Right, kAudioChannelLabel_CenterSurround, 0, 0, 0, 0, 0}},
{kAudioChannelLayoutTag_EAC3_7_1_C, {kAudioChannelLabel_Left, kAudioChannelLabel_Center, kAudioChannelLabel_Right, kAudioChannelLabel_LeftSurround, kAudioChannelLabel_RightSurround, kAudioChannelLabel_LFEScreen, kAudioChannelLabel_LeftSurroundDirect, kAudioChannelLabel_RightSurroundDirect}},
{0, {0,0,0,0,0,0,0,0}}
};

+ 0
- 27
modules/juce_audio_plugin_client/juce_audio_plugin_client_utils.cpp View File

@@ -1,27 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2017 - ROLI Ltd.
JUCE is an open source library subject to commercial or open-source
licensing.
By using JUCE, you agree to the terms of both the JUCE 5 End-User License
Agreement and JUCE 5 Privacy Policy (both updated and effective as of the
27th April 2017).
End User License Agreement: www.juce.com/juce-5-licence
Privacy Policy: www.juce.com/juce-5-privacy-policy
Or: You may also use this code under the terms of the GPL v3 (see
www.gnu.org/licenses).
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
#include "utility/juce_PluginUtilities.cpp"

+ 3
- 4
modules/juce_core/native/juce_mac_Files.mm View File

@@ -78,9 +78,8 @@ namespace MacFileHelpers
NSNumber* hidden = nil;
NSError* err = nil;
return [[NSURL fileURLWithPath: juceStringToNS (path)]
getResourceValue: &hidden forKey: NSURLIsHiddenKey error: &err]
&& [hidden boolValue];
return [createNSURLFromFile (path) getResourceValue: &hidden forKey: NSURLIsHiddenKey error: &err]
&& [hidden boolValue];
}
#elif JUCE_IOS
return File (path).getFileName().startsWithChar ('.');
@@ -298,7 +297,7 @@ bool File::moveToTrash() const
#else
JUCE_AUTORELEASEPOOL
{
NSURL* url = [NSURL fileURLWithPath: juceStringToNS (getFullPathName())];
NSURL* url = createNSURLFromFile (*this);
[[NSWorkspace sharedWorkspace] recycleURLs: [NSArray arrayWithObject: url]
completionHandler: nil];


+ 1
- 1
modules/juce_core/native/juce_mac_Network.mm View File

@@ -503,7 +503,7 @@ struct BackgroundDownloadTask : public URL::DownloadTask
{
NSFileManager* fileManager = [[NSFileManager alloc] init];
error = ([fileManager moveItemAtURL: location
toURL: [NSURL fileURLWithPath:juceStringToNS (targetLocation.getFullPathName())]
toURL: createNSURLFromFile (targetLocation)
error: nil] == NO);
httpCode = 200;
finished = true;


+ 10
- 0
modules/juce_core/native/juce_osx_ObjCHelpers.h View File

@@ -49,6 +49,16 @@ namespace
return [NSString string];
}
static inline NSURL* createNSURLFromFile (const String& f)
{
return [NSURL fileURLWithPath: juceStringToNS (f)];
}
static inline NSURL* createNSURLFromFile (const File& f)
{
return createNSURLFromFile (f.getFullPathName());
}
#if JUCE_MAC
template <typename RectangleType>
static NSRect makeNSRect (const RectangleType& r) noexcept


+ 0
- 4
modules/juce_gui_basics/juce_gui_basics.cpp View File

@@ -76,10 +76,6 @@
#endif
#endif
#if JUCE_QUICKTIME && JUCE_MSVC && ! JUCE_DONT_AUTOLINK_TO_WIN32_LIBRARIES
#pragma comment (lib, "QTMLClient.lib")
#endif
#if JUCE_DIRECT2D && JUCE_MSVC && ! JUCE_DONT_AUTOLINK_TO_WIN32_LIBRARIES
#pragma comment (lib, "Dwrite.lib")
#pragma comment (lib, "D2d1.lib")


+ 1
- 1
modules/juce_gui_basics/layout/juce_ComponentMovementWatcher.h View File

@@ -38,7 +38,7 @@
It also includes a callback that lets you know when the top-level peer is changed.
This class is used by specialised components like WebBrowserComponent or QuickTimeComponent
This class is used by specialised components like WebBrowserComponent
because they need to keep their custom windows in the right place and respond to
changes in the peer.
*/


+ 1
- 1
modules/juce_gui_basics/native/juce_mac_FileChooser.mm View File

@@ -213,7 +213,7 @@ void FileChooser::showPlatformDialog (Array<File>& results,
}
#if defined (MAC_OS_X_VERSION_10_6) && (MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_6)
[panel setDirectoryURL: [NSURL fileURLWithPath: juceStringToNS (directory)]];
[panel setDirectoryURL: createNSURLFromFile (directory)];
[panel setNameFieldStringValue: juceStringToNS (filename)];
if ([panel runModal] == 1 /*NSModalResponseOK*/)


+ 1
- 2
modules/juce_gui_extra/misc/juce_RecentlyOpenedFilesList.cpp View File

@@ -139,8 +139,7 @@ void RecentlyOpenedFilesList::registerRecentFileNatively (const File& file)
#if JUCE_MAC
JUCE_AUTORELEASEPOOL
{
[[NSDocumentController sharedDocumentController]
noteNewRecentDocumentURL: [NSURL fileURLWithPath: juceStringToNS (file.getFullPathName())]];
[[NSDocumentController sharedDocumentController] noteNewRecentDocumentURL: createNSURLFromFile (file)];
}
#else
ignoreUnused (file);


+ 17
- 10
modules/juce_video/capture/juce_CameraDevice.cpp View File

@@ -24,9 +24,18 @@
==============================================================================
*/
CameraDevice::CameraDevice (const String& nm, int index, int minWidth, int minHeight, int maxWidth, int maxHeight,
bool highQuality)
: name (nm), pimpl (new Pimpl (name, index, minWidth, minHeight, maxWidth, maxHeight, highQuality))
#if JUCE_MAC || JUCE_IOS
#include "../native/juce_mac_CameraDevice.h"
#elif JUCE_WINDOWS
#include "../native/juce_win32_CameraDevice.h"
#elif JUCE_ANDROID
#include "../native/juce_android_CameraDevice.h"
#endif
//==============================================================================
CameraDevice::CameraDevice (const String& nm, int index, int minWidth, int minHeight, int maxWidth, int maxHeight, bool useHighQuality)
: name (nm), pimpl (new Pimpl (name, index, minWidth, minHeight, maxWidth, maxHeight, useHighQuality))
{
}
@@ -81,14 +90,12 @@ StringArray CameraDevice::getAvailableDevices()
CameraDevice* CameraDevice::openDevice (int index,
int minWidth, int minHeight,
int maxWidth, int maxHeight,
bool highQuality)
bool useHighQuality)
{
ScopedPointer<CameraDevice> d (new CameraDevice (getAvailableDevices() [index], index,
minWidth, minHeight, maxWidth, maxHeight,
highQuality));
if (d->pimpl->openedOk())
return d.release();
if (ScopedPointer<CameraDevice> d = new CameraDevice (getAvailableDevices() [index], index,
minWidth, minHeight, maxWidth, maxHeight, useHighQuality))
if (d->pimpl->openedOk())
return d.release();
return nullptr;
}

+ 9
- 61
modules/juce_video/juce_video.cpp View File

@@ -34,50 +34,23 @@
#endif
#define JUCE_CORE_INCLUDE_OBJC_HELPERS 1
#define JUCE_CORE_INCLUDE_JNI_HELPERS 1
#define JUCE_CORE_INCLUDE_COM_SMART_PTR 1
#define JUCE_CORE_INCLUDE_NATIVE_HEADERS 1
#include "juce_video.h"
#if JUCE_MAC
#import <AVFoundation/AVFoundation.h>
#import <AVFoundation/AVFoundation.h>
#import <AVKit/AVKit.h>
//==============================================================================
#elif JUCE_WINDOWS
#if JUCE_QUICKTIME
/* If you've got an include error here, you probably need to install the QuickTime SDK and
add its header directory to your include path.
Alternatively, if you don't need any QuickTime services, just set the JUCE_QUICKTIME flag to 0.
*/
#include <Movies.h>
#include <QTML.h>
#include <QuickTimeComponents.h>
#include <MediaHandlers.h>
#include <ImageCodec.h>
/* If you've got QuickTime 7 installed, then these COM objects should be found in
the "\Program Files\Quicktime" directory. You'll need to add this directory to
your include search path to make these import statements work.
*/
#import <QTOLibrary.dll>
#import <QTOControl.dll>
#if JUCE_MSVC && ! JUCE_DONT_AUTOLINK_TO_WIN32_LIBRARIES
#pragma comment (lib, "QTMLClient.lib")
#endif
#endif
#if JUCE_USE_CAMERA || JUCE_DIRECTSHOW
/* If you're using the camera classes, you'll need access to a few DirectShow headers.
/* If you're using the camera classes, you'll need access to a few DirectShow headers.
These files are provided in the normal Windows SDK. */
#include <dshow.h>
#include <dshowasf.h>
#endif
#if JUCE_DIRECTSHOW && JUCE_MEDIAFOUNDATION
#include <evr.h>
#endif
#include <dshow.h>
#include <dshowasf.h>
#include <evr.h>
#if JUCE_USE_CAMERA && JUCE_MSVC && ! JUCE_DONT_AUTOLINK_TO_WIN32_LIBRARIES
#pragma comment (lib, "Strmiids.lib")
@@ -88,7 +61,7 @@
#pragma comment (lib, "mfuuid.lib")
#endif
#if JUCE_DIRECTSHOW && JUCE_MSVC && ! JUCE_DONT_AUTOLINK_TO_WIN32_LIBRARIES
#if JUCE_MSVC && ! JUCE_DONT_AUTOLINK_TO_WIN32_LIBRARIES
#pragma comment (lib, "strmiids.lib")
#endif
#endif
@@ -99,32 +72,7 @@ using namespace juce;
namespace juce
{
#if JUCE_MAC || JUCE_IOS
#if JUCE_USE_CAMERA
#include "native/juce_mac_CameraDevice.mm"
#endif
#if JUCE_MAC
#include "native/juce_mac_MovieComponent.mm"
#endif
#elif JUCE_WINDOWS
#if JUCE_USE_CAMERA
#include "native/juce_win32_CameraDevice.cpp"
#endif
#if JUCE_DIRECTSHOW
#include "native/juce_win32_DirectShowComponent.cpp"
#endif
#elif JUCE_LINUX
#elif JUCE_ANDROID
#if JUCE_USE_CAMERA
#include "native/juce_android_CameraDevice.cpp"
#endif
#endif
#include "playback/juce_VideoComponent.cpp"
#if JUCE_USE_CAMERA
#include "capture/juce_CameraDevice.cpp"


+ 7
- 40
modules/juce_video/juce_video.h View File

@@ -43,7 +43,7 @@
license: GPL/Commercial
dependencies: juce_data_structures juce_cryptography
OSXFrameworks: AVFoundation CoreMedia
OSXFrameworks: AVKit AVFoundation CoreMedia
END_JUCE_MODULE_DECLARATION
@@ -56,59 +56,26 @@
//==============================================================================
#include <juce_gui_extra/juce_gui_extra.h>
//==============================================================================
/** Config: JUCE_DIRECTSHOW
Enables DirectShow media-streaming architecture (MS Windows only).
*/
#ifndef JUCE_DIRECTSHOW
#define JUCE_DIRECTSHOW 0
#endif
/** Config: JUCE_MEDIAFOUNDATION
Enables Media Foundation multimedia platform (Windows Vista and above).
*/
#ifndef JUCE_MEDIAFOUNDATION
#define JUCE_MEDIAFOUNDATION 0
#endif
#if ! JUCE_WINDOWS
#undef JUCE_DIRECTSHOW
#undef JUCE_MEDIAFOUNDATION
#endif
/** Config: JUCE_QUICKTIME
Enables the QuickTimeMovieComponent class (Mac and Windows).
If you're building on Windows, you'll need to have the Apple QuickTime SDK
installed, and its header files will need to be on your include path.
*/
#if ! (defined (JUCE_QUICKTIME) || JUCE_LINUX || JUCE_IOS || JUCE_ANDROID || (JUCE_WINDOWS && ! JUCE_MSVC))
#define JUCE_QUICKTIME 0
#endif
//=============================================================================
#include "../juce_gui_extra/juce_gui_extra.h"
//=============================================================================
/** Config: JUCE_USE_CAMERA
Enables web-cam support using the CameraDevice class (Mac and Windows).
*/
#if (JUCE_QUICKTIME || JUCE_WINDOWS) && ! defined (JUCE_USE_CAMERA)
#ifndef JUCE_USE_CAMERA
#define JUCE_USE_CAMERA 0
#endif
#if ! (JUCE_MAC || JUCE_WINDOWS)
#undef JUCE_QUICKTIME
#undef JUCE_USE_CAMERA
#endif
//==============================================================================
//=============================================================================
namespace juce
{
#if JUCE_DIRECTSHOW || DOXYGEN
#include "playback/juce_DirectShowComponent.h"
#endif
#if JUCE_MAC || DOXYGEN
#include "playback/juce_MovieComponent.h"
#endif
#include "playback/juce_VideoComponent.h"
#include "capture/juce_CameraDevice.h"
}

modules/juce_video/native/juce_android_CameraDevice.cpp → modules/juce_video/native/juce_android_CameraDevice.h View File


+ 167
- 0
modules/juce_video/native/juce_android_Video.h View File

@@ -0,0 +1,167 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2015 - ROLI Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
//==============================================================================
#define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
METHOD (play, "play", "()V") \
METHOD (stop, "stop", "()V") \
METHOD (close, "close", "()V") \
METHOD (isPlaying, "isPlaying", "()Z") \
METHOD (loadFile, "loadFile", "(Ljava/lang/String;)Z") \
METHOD (loadURL, "loadURL", "(Ljava/lang/String;)Z") \
METHOD (setCurrentPosition, "setCurrentPosition", "(D)V") \
METHOD (getCurrentPosition, "getCurrentPosition", "()D") \
METHOD (setSpeed, "setSpeed", "(D)V") \
METHOD (getDuration, "getDuration", "()D") \
METHOD (getVideoWidth, "getVideoWidth", "()I") \
METHOD (getVideoHeight, "getVideoHeight", "()I") \
METHOD (setVolume, "setVolume", "(F)V") \
METHOD (getVolume, "getVolume", "()F") \
DECLARE_JNI_CLASS (VideoView, JUCE_ANDROID_ACTIVITY_CLASSPATH "$VideoView")
#undef JNI_CLASS_MEMBERS
struct VideoComponent::Pimpl : public Component
{
Pimpl()
{
}
~Pimpl()
{
close();
}
Result load (const File& file)
{
if (isOpen() && videoView.callBooleanMethod (VideoView.loadFile, javaString (file.getFullPathName()).get()))
{
currentFile = file;
return Result::ok();
}
return Result::fail ("Couldn't open file");
}
Result load (const URL& url)
{
if (isOpen() && videoView.callBooleanMethod (VideoView.loadFile, javaString (url.toString (true)).get()))
{
currentURL = url;
return Result::ok();
}
return Result::fail ("Couldn't open file");
}
void close()
{
if (isOpen())
videoView.callVoidMethod (VideoView.close);
}
bool isOpen() const
{
return videoView != nullptr;
}
bool isPlaying() const
{
return isOpen() && videoView.callBooleanMethod (VideoView.isPlaying);
}
void play()
{
if (isOpen())
videoView.callVoidMethod (VideoView.play);
}
void stop()
{
if (isOpen())
videoView.callVoidMethod (VideoView.stop);
}
void setPosition (double newPosition)
{
if (isOpen())
videoView.callVoidMethod (VideoView.setCurrentPosition, (jdouble) newPosition);
}
double getPosition() const
{
if (isOpen())
return videoView.callDoubleMethod (VideoView.getCurrentPosition);
return 0.0;
}
void setSpeed (double newSpeed)
{
if (isOpen())
videoView.callVoidMethod (VideoView.setSpeed, (jdouble) newSpeed);
}
Rectangle<int> getNativeSize() const
{
if (isOpen())
{
jint w = videoView.callIntMethod (VideoView.getVideoWidth);
jint h = videoView.callIntMethod (VideoView.getVideoHeight);
return Rectangle<int> (w, h);
}
return Rectangle<int>();
}
double getDuration() const
{
if (isOpen())
return videoView.callDoubleMethod (VideoView.getDuration);
return 0.0;
}
void setVolume (float newVolume)
{
if (isOpen())
videoView.callVoidMethod (VideoView.setVolume, (jfloat) newVolume);
}
float getVolume() const
{
if (isOpen())
return videoView.callFloatMethod (VideoView.getVolume);
return 0.0f;
}
File currentFile;
URL currentURL;
GlobalRef videoView;
};

+ 276
- 0
modules/juce_video/native/juce_mac_CameraDevice.h View File

@@ -0,0 +1,276 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2017 - ROLI Ltd.
JUCE is an open source library subject to commercial or open-source
licensing.
By using JUCE, you agree to the terms of both the JUCE 5 End-User License
Agreement and JUCE 5 Privacy Policy (both updated and effective as of the
27th April 2017).
End User License Agreement: www.juce.com/juce-5-licence
Privacy Policy: www.juce.com/juce-5-privacy-policy
Or: You may also use this code under the terms of the GPL v3 (see
www.gnu.org/licenses).
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
struct CameraDevice::Pimpl
{
Pimpl (const String&, int /*index*/, int /*minWidth*/, int /*minHeight*/,
int /*maxWidth*/, int /*maxHeight*/, bool useHighQuality)
{
JUCE_AUTORELEASEPOOL
{
captureView = [[AVCaptureView alloc] init];
session = captureView.session;
session.sessionPreset = useHighQuality ? AVCaptureSessionPresetHigh
: AVCaptureSessionPresetMedium;
refreshConnections();
static DelegateClass cls;
callbackDelegate = (id<AVCaptureFileOutputRecordingDelegate>) [cls.createInstance() init];
DelegateClass::setOwner (callbackDelegate, this);
}
}
~Pimpl()
{
[session stopRunning];
removeImageCapture();
removeMovieCapture();
[session release];
[callbackDelegate release];
}
bool openedOk() const noexcept { return openingError.isEmpty(); }
void addImageCapture()
{
if (imageOutput == nil)
{
imageOutput = [[AVCaptureStillImageOutput alloc] init];
auto* imageSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[imageOutput setOutputSettings: imageSettings];
[imageSettings release];
[session addOutput: imageOutput];
}
}
void addMovieCapture()
{
if (fileOutput == nil)
{
fileOutput = [[AVCaptureMovieFileOutput alloc] init];
[session addOutput: fileOutput];
}
}
void removeImageCapture()
{
if (imageOutput != nil)
{
[session removeOutput: imageOutput];
[imageOutput release];
imageOutput = nil;
}
}
void removeMovieCapture()
{
if (fileOutput != nil)
{
[session removeOutput: fileOutput];
[fileOutput release];
fileOutput = nil;
}
}
void refreshConnections()
{
[session beginConfiguration];
removeImageCapture();
removeMovieCapture();
addImageCapture();
addMovieCapture();
[session commitConfiguration];
}
void refreshIfNeeded()
{
if (getVideoConnection() == nullptr)
refreshConnections();
}
void startRecordingToFile (const File& file, int /*quality*/)
{
stopRecording();
refreshIfNeeded();
firstPresentationTime = Time::getCurrentTime();
file.deleteFile();
[fileOutput startRecordingToOutputFileURL: createNSURLFromFile (file)
recordingDelegate: callbackDelegate];
}
void stopRecording()
{
if (isRecording)
{
[fileOutput stopRecording];
isRecording = false;
}
}
Time getTimeOfFirstRecordedFrame() const
{
return firstPresentationTime;
}
AVCaptureConnection* getVideoConnection() const
{
if (imageOutput != nil)
for (AVCaptureConnection* connection in imageOutput.connections)
if ([connection isActive] && [connection isEnabled])
for (AVCaptureInputPort* port in [connection inputPorts])
if ([[port mediaType] isEqual: AVMediaTypeVideo])
return connection;
return nil;
}
void handleImageCapture (const void* data, size_t size)
{
auto image = ImageFileFormat::loadFrom (data, size);
const ScopedLock sl (listenerLock);
if (! listeners.isEmpty())
{
for (int i = listeners.size(); --i >= 0;)
if (auto* l = listeners[i])
l->imageReceived (image);
if (! listeners.isEmpty())
triggerImageCapture();
}
}
void triggerImageCapture()
{
refreshIfNeeded();
if (auto* videoConnection = getVideoConnection())
{
[imageOutput captureStillImageAsynchronouslyFromConnection: videoConnection
completionHandler: ^(CMSampleBufferRef sampleBuffer, NSError*)
{
auto buffer = CMSampleBufferGetDataBuffer (sampleBuffer);
size_t size = CMBlockBufferGetDataLength (buffer);
jassert (CMBlockBufferIsRangeContiguous (buffer, 0, size)); // TODO: need to add code to handle this if it happens
char* data = nullptr;
CMBlockBufferGetDataPointer (buffer, 0, &size, nullptr, &data);
handleImageCapture (data, size);
}];
}
}
void addListener (CameraDevice::Listener* listenerToAdd)
{
const ScopedLock sl (listenerLock);
listeners.addIfNotAlreadyThere (listenerToAdd);
if (listeners.size() == 1)
triggerImageCapture();
}
void removeListener (CameraDevice::Listener* listenerToRemove)
{
const ScopedLock sl (listenerLock);
listeners.removeFirstMatchingValue (listenerToRemove);
}
static StringArray getAvailableDevices()
{
StringArray results;
results.add ("default");
return results;
}
AVCaptureView* captureView = nil;
AVCaptureSession* session = nil;
AVCaptureMovieFileOutput* fileOutput = nil;
AVCaptureStillImageOutput* imageOutput = nil;
id<AVCaptureFileOutputRecordingDelegate> callbackDelegate = nil;
String openingError;
Time firstPresentationTime;
bool isRecording = false;
Array<CameraDevice::Listener*> listeners;
CriticalSection listenerLock;
private:
//==============================================================================
struct DelegateClass : public ObjCClass<NSObject>
{
DelegateClass() : ObjCClass<NSObject> ("JUCECameraDelegate_")
{
addIvar<Pimpl*> ("owner");
addProtocol (@protocol (AVCaptureFileOutputRecordingDelegate));
addMethod (@selector (captureOutput:didStartRecordingToOutputFileAtURL: fromConnections:), didStartRecordingToOutputFileAtURL, "v@:@@@");
addMethod (@selector (captureOutput:didPauseRecordingToOutputFileAtURL: fromConnections:), didPauseRecordingToOutputFileAtURL, "v@:@@@");
addMethod (@selector (captureOutput:didResumeRecordingToOutputFileAtURL: fromConnections:), didResumeRecordingToOutputFileAtURL, "v@:@@@");
addMethod (@selector (captureOutput:willFinishRecordingToOutputFileAtURL:fromConnections:error:), willFinishRecordingToOutputFileAtURL, "v@:@@@@");
registerClass();
}
static void setOwner (id self, Pimpl* owner) { object_setInstanceVariable (self, "owner", owner); }
static Pimpl* getOwner (id self) { return getIvar<Pimpl*> (self, "owner"); }
private:
static void didStartRecordingToOutputFileAtURL (id, SEL, AVCaptureFileOutput*, NSURL*, NSArray*) {}
static void didPauseRecordingToOutputFileAtURL (id, SEL, AVCaptureFileOutput*, NSURL*, NSArray*) {}
static void didResumeRecordingToOutputFileAtURL (id, SEL, AVCaptureFileOutput*, NSURL*, NSArray*) {}
static void willFinishRecordingToOutputFileAtURL (id, SEL, AVCaptureFileOutput*, NSURL*, NSArray*, NSError*) {}
};
JUCE_DECLARE_NON_COPYABLE (Pimpl)
};
struct CameraDevice::ViewerComponent : public NSViewComponent
{
ViewerComponent (CameraDevice& d)
{
JUCE_AUTORELEASEPOOL
{
setSize (640, 480);
setView (d.pimpl->captureView);
}
}
~ViewerComponent()
{
setView (nil);
}
JUCE_DECLARE_NON_COPYABLE (ViewerComponent)
};
String CameraDevice::getFileExtension()
{
return ".mov";
}

+ 0
- 353
modules/juce_video/native/juce_mac_CameraDevice.mm View File

@@ -1,353 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2017 - ROLI Ltd.
JUCE is an open source library subject to commercial or open-source
licensing.
By using JUCE, you agree to the terms of both the JUCE 5 End-User License
Agreement and JUCE 5 Privacy Policy (both updated and effective as of the
27th April 2017).
End User License Agreement: www.juce.com/juce-5-licence
Privacy Policy: www.juce.com/juce-5-privacy-policy
Or: You may also use this code under the terms of the GPL v3 (see
www.gnu.org/licenses).
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
#if ! JUCE_QUICKTIME
#error "To support cameras in OSX you'll need to enable the JUCE_QUICKTIME flag"
#endif
extern Image juce_createImageFromCIImage (CIImage*, int w, int h);
struct CameraDevice::Pimpl
{
Pimpl (const String&, const int index, int /*minWidth*/, int /*minHeight*/, int /*maxWidth*/, int /*maxHeight*/,
bool useHighQuality)
: input (nil),
audioDevice (nil),
audioInput (nil),
session (nil),
fileOutput (nil),
imageOutput (nil),
firstPresentationTime (0),
averageTimeOffset (0),
isRecording (false)
{
JUCE_AUTORELEASEPOOL
{
session = [[QTCaptureSession alloc] init];
NSArray* devs = [QTCaptureDevice inputDevicesWithMediaType: QTMediaTypeVideo];
device = (QTCaptureDevice*) [devs objectAtIndex: index];
static DelegateClass cls;
callbackDelegate = [cls.createInstance() init];
DelegateClass::setOwner (callbackDelegate, this);
NSError* err = nil;
[device retain];
[device open: &err];
if (err == nil)
{
input = [[QTCaptureDeviceInput alloc] initWithDevice: device];
audioInput = [[QTCaptureDeviceInput alloc] initWithDevice: device];
[session addInput: input error: &err];
if (err == nil)
{
resetFile();
imageOutput = useHighQuality ? [[QTCaptureDecompressedVideoOutput alloc] init] :
[[QTCaptureVideoPreviewOutput alloc] init];
[imageOutput setDelegate: callbackDelegate];
if (err == nil)
{
[session startRunning];
return;
}
}
}
openingError = nsStringToJuce ([err description]);
DBG (openingError);
}
}
~Pimpl()
{
[session stopRunning];
[session removeOutput: imageOutput];
[session release];
[input release];
[device release];
[audioDevice release];
[audioInput release];
[fileOutput release];
[imageOutput release];
[callbackDelegate release];
}
bool openedOk() const noexcept { return openingError.isEmpty(); }
void resetFile()
{
[fileOutput recordToOutputFileURL: nil];
[session removeOutput: fileOutput];
[fileOutput release];
fileOutput = [[QTCaptureMovieFileOutput alloc] init];
[session removeInput: audioInput];
[audioInput release];
audioInput = nil;
[audioDevice release];
audioDevice = nil;
[fileOutput setDelegate: callbackDelegate];
}
void addDefaultAudioInput()
{
NSError* err = nil;
audioDevice = [QTCaptureDevice defaultInputDeviceWithMediaType: QTMediaTypeSound];
if ([audioDevice open: &err])
[audioDevice retain];
else
audioDevice = nil;
if (audioDevice != nil)
{
audioInput = [[QTCaptureDeviceInput alloc] initWithDevice: audioDevice];
[session addInput: audioInput error: &err];
}
}
void startRecordingToFile (const File& file, int quality)
{
stopRecording();
firstPresentationTime = 0;
file.deleteFile();
// In some versions of QT (e.g. on 10.5), if you record video without audio, the speed comes
// out wrong, so we'll put some audio in there too..,
addDefaultAudioInput();
[session addOutput: fileOutput error: nil];
NSEnumerator* connectionEnumerator = [[fileOutput connections] objectEnumerator];
for (;;)
{
QTCaptureConnection* connection = [connectionEnumerator nextObject];
if (connection == nil)
break;
QTCompressionOptions* options = nil;
NSString* mediaType = [connection mediaType];
if ([mediaType isEqualToString: QTMediaTypeVideo])
options = [QTCompressionOptions compressionOptionsWithIdentifier:
quality >= 1 ? nsStringLiteral ("QTCompressionOptionsSD480SizeH264Video")
: nsStringLiteral ("QTCompressionOptions240SizeH264Video")];
else if ([mediaType isEqualToString: QTMediaTypeSound])
options = [QTCompressionOptions compressionOptionsWithIdentifier: nsStringLiteral ("QTCompressionOptionsHighQualityAACAudio")];
[fileOutput setCompressionOptions: options forConnection: connection];
}
[fileOutput recordToOutputFileURL: [NSURL fileURLWithPath: juceStringToNS (file.getFullPathName())]];
isRecording = true;
}
void stopRecording()
{
if (isRecording)
{
resetFile();
isRecording = false;
}
}
Time getTimeOfFirstRecordedFrame() const
{
return firstPresentationTime != 0 ? Time (firstPresentationTime + averageTimeOffset)
: Time();
}
void addListener (CameraDevice::Listener* listenerToAdd)
{
const ScopedLock sl (listenerLock);
if (listeners.size() == 0)
[session addOutput: imageOutput error: nil];
listeners.addIfNotAlreadyThere (listenerToAdd);
}
void removeListener (CameraDevice::Listener* listenerToRemove)
{
const ScopedLock sl (listenerLock);
listeners.removeFirstMatchingValue (listenerToRemove);
if (listeners.size() == 0)
[session removeOutput: imageOutput];
}
void callListeners (CIImage* frame, int w, int h)
{
Image image (juce_createImageFromCIImage (frame, w, h));
const ScopedLock sl (listenerLock);
for (int i = listeners.size(); --i >= 0;)
{
CameraDevice::Listener* const l = listeners[i];
if (l != nullptr)
l->imageReceived (image);
}
}
void captureBuffer (QTSampleBuffer* sampleBuffer)
{
const Time now (Time::getCurrentTime());
NSNumber* hosttime = (NSNumber*) [sampleBuffer attributeForKey: QTSampleBufferHostTimeAttribute];
int64 presentationTime = (hosttime != nil)
? ((int64) AudioConvertHostTimeToNanos ([hosttime unsignedLongLongValue]) / 1000000 + 40)
: (([sampleBuffer presentationTime].timeValue * 1000) / [sampleBuffer presentationTime].timeScale + 50);
const int64 timeDiff = now.toMilliseconds() - presentationTime;
if (firstPresentationTime == 0)
{
firstPresentationTime = presentationTime;
averageTimeOffset = timeDiff;
}
else
{
averageTimeOffset = (averageTimeOffset * 120 + timeDiff * 8) / 128;
}
}
static StringArray getAvailableDevices()
{
StringArray results;
NSArray* devs = [QTCaptureDevice inputDevicesWithMediaType: QTMediaTypeVideo];
for (int i = 0; i < (int) [devs count]; ++i)
{
QTCaptureDevice* dev = (QTCaptureDevice*) [devs objectAtIndex: i];
results.add (nsStringToJuce ([dev localizedDisplayName]));
}
return results;
}
QTCaptureDevice* device;
QTCaptureDevice* audioDevice;
QTCaptureDeviceInput* input;
QTCaptureDeviceInput* audioInput;
QTCaptureSession* session;
QTCaptureMovieFileOutput* fileOutput;
QTCaptureOutput* imageOutput;
NSObject* callbackDelegate;
String openingError;
int64 firstPresentationTime, averageTimeOffset;
bool isRecording;
Array<CameraDevice::Listener*> listeners;
CriticalSection listenerLock;
private:
//==============================================================================
struct DelegateClass : public ObjCClass<NSObject>
{
DelegateClass() : ObjCClass<NSObject> ("JUCEAppDelegate_")
{
addIvar<Pimpl*> ("owner");
addMethod (@selector (captureOutput:didOutputVideoFrame:withSampleBuffer:fromConnection:),
didOutputVideoFrame, "v@:@", @encode (CVImageBufferRef), "@@");
addMethod (@selector (captureOutput:didOutputSampleBuffer:fromConnection:),
didOutputVideoFrame, "v@:@@@");
registerClass();
}
static void setOwner (id self, Pimpl* owner) { object_setInstanceVariable (self, "owner", owner); }
static Pimpl* getOwner (id self) { return getIvar<Pimpl*> (self, "owner"); }
private:
static void didOutputVideoFrame (id self, SEL, QTCaptureOutput*, CVImageBufferRef videoFrame,
QTSampleBuffer*, QTCaptureConnection*)
{
Pimpl* const internal = getOwner (self);
if (internal->listeners.size() > 0)
{
JUCE_AUTORELEASEPOOL
{
internal->callListeners ([CIImage imageWithCVImageBuffer: videoFrame],
(int) CVPixelBufferGetWidth (videoFrame),
(int) CVPixelBufferGetHeight (videoFrame));
}
}
}
static void didOutputSampleBuffer (id self, SEL, QTCaptureFileOutput*, QTSampleBuffer* sampleBuffer, QTCaptureConnection*)
{
getOwner (self)->captureBuffer (sampleBuffer);
}
};
JUCE_DECLARE_NON_COPYABLE (Pimpl)
};
struct CameraDevice::ViewerComponent : public NSViewComponent
{
ViewerComponent (CameraDevice& d)
{
JUCE_AUTORELEASEPOOL
{
captureView = [[QTCaptureView alloc] init];
[captureView setCaptureSession: d.pimpl->session];
setSize (640, 480);
setView (captureView);
}
}
~ViewerComponent()
{
setView (nil);
[captureView setCaptureSession: nil];
[captureView release];
}
QTCaptureView* captureView;
JUCE_DECLARE_NON_COPYABLE (ViewerComponent)
};
String CameraDevice::getFileExtension()
{
return ".mov";
}

+ 1
- 3
modules/juce_video/native/juce_mac_MovieComponent.mm View File

@@ -32,9 +32,7 @@ struct MovieComponent::Pimpl
{
close();
NSString* videoFile = [NSString stringWithUTF8String: newPath.toUTF8()];
NSURL* url = [NSURL fileURLWithPath: videoFile];
NSURL* url = createNSURLFromFile (newPath);
AVAsset* asset = [AVAsset assetWithURL: url];
duration = CMTimeGetSeconds (asset.duration);
nativeSize = [[[asset tracksWithMediaType: AVMediaTypeVideo] objectAtIndex: 0] naturalSize];


+ 185
- 0
modules/juce_video/native/juce_mac_Video.h View File

@@ -0,0 +1,185 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2015 - ROLI Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
struct VideoComponent::Pimpl : public NSViewComponent
{
Pimpl()
{
setVisible (true);
AVPlayerView* view = [[AVPlayerView alloc] init];
setView (view);
[view setNextResponder: [view superview]];
[view setWantsLayer: YES];
[view release];
}
~Pimpl()
{
close();
setView (nil);
}
Result load (const File& file)
{
auto r = load (createNSURLFromFile (file));
if (r.wasOk())
currentFile = file;
return r;
}
Result load (const URL& url)
{
Result r = load ([NSURL URLWithString: juceStringToNS (url.toString (true))]);
if (r.wasOk())
currentURL = url;
return r;
}
Result load (NSURL* url)
{
if (url != nil)
{
close();
if (AVPlayer* player = [AVPlayer playerWithURL: url])
{
[getAVPlayerView() setPlayer: player];
return Result::ok();
}
}
return Result::fail ("Couldn't open movie");
}
void close()
{
stop();
[getAVPlayerView() setPlayer: nil];
currentFile = File();
currentURL = URL();
}
bool isOpen() const
{
return getAVPlayer() != nil;
}
bool isPlaying() const
{
return getSpeed() != 0;
}
void play()
{
[getAVPlayer() play];
}
void stop()
{
[getAVPlayer() pause];
}
void setPosition (double newPosition)
{
if (AVPlayer* p = getAVPlayer())
{
CMTime t = { (CMTimeValue) (100000.0 * newPosition),
(CMTimeScale) 100000, kCMTimeFlags_Valid };
[p seekToTime: t];
}
}
double getPosition() const
{
if (AVPlayer* p = getAVPlayer())
return toSeconds ([p currentTime]);
return 0.0;
}
void setSpeed (double newSpeed)
{
[getAVPlayer() setRate: newSpeed];
}
double getSpeed() const
{
if (AVPlayer* p = getAVPlayer())
return [p rate];
return 0.0;
}
Rectangle<int> getNativeSize() const
{
if (AVPlayer* player = getAVPlayer())
{
CGSize s = [[player currentItem] presentationSize];
return Rectangle<int> ((int) s.width, (int) s.height);
}
return Rectangle<int>();
}
double getDuration() const
{
if (AVPlayer* player = getAVPlayer())
return toSeconds ([[player currentItem] duration]);
return 0.0;
}
void setVolume (float newVolume)
{
[getAVPlayer() setVolume: newVolume];
}
float getVolume() const
{
if (AVPlayer* p = getAVPlayer())
return [p volume];
return 0.0f;
}
File currentFile;
URL currentURL;
private:
AVPlayerView* getAVPlayerView() const { return (AVPlayerView*) getView(); }
AVPlayer* getAVPlayer() const { return [getAVPlayerView() player]; }
static double toSeconds (const CMTime& t) noexcept
{
return t.timescale != 0 ? (t.value / (double) t.timescale) : 0.0;
}
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (Pimpl)
};

modules/juce_video/native/juce_win32_CameraDevice.cpp → modules/juce_video/native/juce_win32_CameraDevice.h View File

@@ -494,9 +494,8 @@ struct CameraDevice::Pimpl : public ChangeBroadcaster
return devs;
}
class GrabberCallback : public ComBaseClassHelperBase<ISampleGrabberCB>
struct GrabberCallback : public ComBaseClassHelperBase<ISampleGrabberCB>
{
public:
GrabberCallback (Pimpl& p)
: ComBaseClassHelperBase<ISampleGrabberCB> (0), owner (p) {}
@@ -516,7 +515,6 @@ struct CameraDevice::Pimpl : public ChangeBroadcaster
return S_OK;
}
private:
Pimpl& owner;
JUCE_DECLARE_NON_COPYABLE (GrabberCallback)

+ 0
- 928
modules/juce_video/native/juce_win32_DirectShowComponent.cpp View File

@@ -1,928 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2017 - ROLI Ltd.
JUCE is an open source library subject to commercial or open-source
licensing.
By using JUCE, you agree to the terms of both the JUCE 5 End-User License
Agreement and JUCE 5 Privacy Policy (both updated and effective as of the
27th April 2017).
End User License Agreement: www.juce.com/juce-5-licence
Privacy Policy: www.juce.com/juce-5-privacy-policy
Or: You may also use this code under the terms of the GPL v3 (see
www.gnu.org/licenses).
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace DirectShowHelpers
{
bool checkDShowAvailability()
{
ComSmartPtr<IGraphBuilder> graph;
return SUCCEEDED (graph.CoCreateInstance (CLSID_FilterGraph));
}
//==============================================================================
class VideoRenderer
{
public:
VideoRenderer() {}
virtual ~VideoRenderer() {}
virtual HRESULT create (ComSmartPtr<IGraphBuilder>& graphBuilder,
ComSmartPtr<IBaseFilter>& baseFilter, HWND hwnd) = 0;
virtual void setVideoWindow (HWND hwnd) = 0;
virtual void setVideoPosition (HWND hwnd, long videoWidth, long videoHeight) = 0;
virtual void repaintVideo (HWND hwnd, HDC hdc) = 0;
virtual void displayModeChanged() = 0;
virtual HRESULT getVideoSize (long& videoWidth, long& videoHeight) = 0;
};
//==============================================================================
class VMR7 : public VideoRenderer
{
public:
VMR7() {}
HRESULT create (ComSmartPtr<IGraphBuilder>& graphBuilder,
ComSmartPtr<IBaseFilter>& baseFilter, HWND hwnd)
{
ComSmartPtr<IVMRFilterConfig> filterConfig;
HRESULT hr = baseFilter.CoCreateInstance (CLSID_VideoMixingRenderer);
if (SUCCEEDED (hr)) hr = graphBuilder->AddFilter (baseFilter, L"VMR-7");
if (SUCCEEDED (hr)) hr = baseFilter.QueryInterface (filterConfig);
if (SUCCEEDED (hr)) hr = filterConfig->SetRenderingMode (VMRMode_Windowless);
if (SUCCEEDED (hr)) hr = baseFilter.QueryInterface (windowlessControl);
if (SUCCEEDED (hr)) hr = windowlessControl->SetVideoClippingWindow (hwnd);
if (SUCCEEDED (hr)) hr = windowlessControl->SetAspectRatioMode (VMR_ARMODE_LETTER_BOX);
return hr;
}
void setVideoWindow (HWND hwnd)
{
windowlessControl->SetVideoClippingWindow (hwnd);
}
void setVideoPosition (HWND hwnd, long videoWidth, long videoHeight)
{
RECT src, dest;
SetRect (&src, 0, 0, videoWidth, videoHeight);
GetClientRect (hwnd, &dest);
windowlessControl->SetVideoPosition (&src, &dest);
}
void repaintVideo (HWND hwnd, HDC hdc)
{
windowlessControl->RepaintVideo (hwnd, hdc);
}
void displayModeChanged()
{
windowlessControl->DisplayModeChanged();
}
HRESULT getVideoSize (long& videoWidth, long& videoHeight)
{
return windowlessControl->GetNativeVideoSize (&videoWidth, &videoHeight, nullptr, nullptr);
}
private:
ComSmartPtr<IVMRWindowlessControl> windowlessControl;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (VMR7)
};
//==============================================================================
#if JUCE_MEDIAFOUNDATION
class EVR : public VideoRenderer
{
public:
EVR() {}
HRESULT create (ComSmartPtr<IGraphBuilder>& graphBuilder,
ComSmartPtr<IBaseFilter>& baseFilter, HWND hwnd)
{
ComSmartPtr<IMFGetService> getService;
HRESULT hr = baseFilter.CoCreateInstance (CLSID_EnhancedVideoRenderer);
if (SUCCEEDED (hr)) hr = graphBuilder->AddFilter (baseFilter, L"EVR");
if (SUCCEEDED (hr)) hr = baseFilter.QueryInterface (getService);
if (SUCCEEDED (hr)) hr = getService->GetService (MR_VIDEO_RENDER_SERVICE, IID_IMFVideoDisplayControl,
(LPVOID*) videoDisplayControl.resetAndGetPointerAddress());
if (SUCCEEDED (hr)) hr = videoDisplayControl->SetVideoWindow (hwnd);
if (SUCCEEDED (hr)) hr = videoDisplayControl->SetAspectRatioMode (MFVideoARMode_PreservePicture);
return hr;
}
void setVideoWindow (HWND hwnd)
{
videoDisplayControl->SetVideoWindow (hwnd);
}
void setVideoPosition (HWND hwnd, long /*videoWidth*/, long /*videoHeight*/)
{
const MFVideoNormalizedRect src = { 0.0f, 0.0f, 1.0f, 1.0f };
RECT dest;
GetClientRect (hwnd, &dest);
videoDisplayControl->SetVideoPosition (&src, &dest);
}
void repaintVideo (HWND /*hwnd*/, HDC /*hdc*/)
{
videoDisplayControl->RepaintVideo();
}
void displayModeChanged() {}
HRESULT getVideoSize (long& videoWidth, long& videoHeight)
{
SIZE sz;
HRESULT hr = videoDisplayControl->GetNativeVideoSize (&sz, nullptr);
videoWidth = sz.cx;
videoHeight = sz.cy;
return hr;
}
private:
ComSmartPtr<IMFVideoDisplayControl> videoDisplayControl;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (EVR)
};
#endif
}
//==============================================================================
class DirectShowComponent::DirectShowContext : public AsyncUpdater
{
public:
DirectShowContext (DirectShowComponent& c, VideoRendererType renderType)
: component (c),
hwnd (0),
hdc (0),
state (uninitializedState),
hasVideo (false),
videoWidth (0),
videoHeight (0),
type (renderType),
needToUpdateViewport (true),
needToRecreateNativeWindow (false)
{
CoInitialize (0);
if (type == dshowDefault)
{
type = dshowVMR7;
#if JUCE_MEDIAFOUNDATION
if (SystemStats::getOperatingSystemType() >= SystemStats::WinVista)
type = dshowEVR;
#endif
}
}
~DirectShowContext()
{
release();
CoUninitialize();
}
//==============================================================================
void updateWindowPosition (const Rectangle<int>& newBounds)
{
nativeWindow->setWindowPosition (newBounds);
}
void showWindow (bool shouldBeVisible)
{
nativeWindow->showWindow (shouldBeVisible);
}
//==============================================================================
void repaint()
{
if (hasVideo)
videoRenderer->repaintVideo (nativeWindow->getHandle(), nativeWindow->getContext());
}
void updateVideoPosition()
{
if (hasVideo)
videoRenderer->setVideoPosition (nativeWindow->getHandle(), videoWidth, videoHeight);
}
void displayResolutionChanged()
{
if (hasVideo)
videoRenderer->displayModeChanged();
}
//==============================================================================
void peerChanged()
{
deleteNativeWindow();
mediaEvent->SetNotifyWindow (0, 0, 0);
if (videoRenderer != nullptr)
videoRenderer->setVideoWindow (nullptr);
createNativeWindow();
mediaEvent->SetNotifyWindow ((OAHWND) hwnd, graphEventID, 0);
if (videoRenderer != nullptr)
videoRenderer->setVideoWindow (hwnd);
}
void handleAsyncUpdate() override
{
if (hwnd != 0)
{
if (needToRecreateNativeWindow)
{
peerChanged();
needToRecreateNativeWindow = false;
}
if (needToUpdateViewport)
{
updateVideoPosition();
needToUpdateViewport = false;
}
repaint();
}
else
{
triggerAsyncUpdate();
}
}
void recreateNativeWindowAsync()
{
needToRecreateNativeWindow = true;
triggerAsyncUpdate();
}
void updateContextPosition()
{
needToUpdateViewport = true;
triggerAsyncUpdate();
}
//==============================================================================
bool loadFile (const String& fileOrURLPath)
{
jassert (state == uninitializedState);
if (! createNativeWindow())
return false;
HRESULT hr = graphBuilder.CoCreateInstance (CLSID_FilterGraph);
// basic playback interfaces
if (SUCCEEDED (hr)) hr = graphBuilder.QueryInterface (mediaControl);
if (SUCCEEDED (hr)) hr = graphBuilder.QueryInterface (mediaPosition);
if (SUCCEEDED (hr)) hr = graphBuilder.QueryInterface (mediaEvent);
if (SUCCEEDED (hr)) hr = graphBuilder.QueryInterface (basicAudio);
// video renderer interface
if (SUCCEEDED (hr))
{
#if JUCE_MEDIAFOUNDATION
if (type == dshowEVR)
videoRenderer = new DirectShowHelpers::EVR();
else
#endif
videoRenderer = new DirectShowHelpers::VMR7();
hr = videoRenderer->create (graphBuilder, baseFilter, hwnd);
}
// build filter graph
if (SUCCEEDED (hr))
{
hr = graphBuilder->RenderFile (fileOrURLPath.toWideCharPointer(), nullptr);
if (FAILED (hr))
{
// Annoyingly, if we don't run the msg loop between failing and deleting the window, the
// whole OS message-dispatch system gets itself into a state, and refuses to deliver any
// more messages for the whole app. (That's what happens in Win7, anyway)
MessageManager::getInstance()->runDispatchLoopUntil (200);
}
}
// remove video renderer if not connected (no video)
if (SUCCEEDED (hr))
{
if (isRendererConnected())
{
hasVideo = true;
hr = videoRenderer->getVideoSize (videoWidth, videoHeight);
}
else
{
hasVideo = false;
graphBuilder->RemoveFilter (baseFilter);
videoRenderer = nullptr;
baseFilter = nullptr;
}
}
// set window to receive events
if (SUCCEEDED (hr))
hr = mediaEvent->SetNotifyWindow ((OAHWND) hwnd, graphEventID, 0);
if (SUCCEEDED (hr))
{
state = stoppedState;
pause();
return true;
}
// Note that if you're trying to open a file and this method fails, you may
// just need to install a suitable codec. It seems that by default DirectShow
// doesn't support a very good range of formats.
release();
return false;
}
void release()
{
if (mediaControl != nullptr)
mediaControl->Stop();
if (mediaEvent != nullptr)
mediaEvent->SetNotifyWindow (0, 0, 0);
if (videoRenderer != nullptr)
videoRenderer->setVideoWindow (0);
hasVideo = false;
videoRenderer = nullptr;
baseFilter = nullptr;
basicAudio = nullptr;
mediaEvent = nullptr;
mediaPosition = nullptr;
mediaControl = nullptr;
graphBuilder = nullptr;
state = uninitializedState;
videoWidth = 0;
videoHeight = 0;
if (nativeWindow != nullptr)
deleteNativeWindow();
}
void graphEventProc()
{
LONG ec;
LONG_PTR p1, p2;
jassert (mediaEvent != nullptr);
while (SUCCEEDED (mediaEvent->GetEvent (&ec, &p1, &p2, 0)))
{
mediaEvent->FreeEventParams (ec, p1, p2);
switch (ec)
{
case EC_REPAINT:
component.repaint();
break;
case EC_COMPLETE:
if (component.isLooping())
component.goToStart();
else
component.stop();
break;
case EC_USERABORT:
case EC_ERRORABORT:
case EC_ERRORABORTEX:
component.closeMovie();
break;
default:
break;
}
}
}
//==============================================================================
void run()
{
mediaControl->Run();
state = runningState;
}
void stop()
{
mediaControl->Stop();
state = stoppedState;
}
void pause()
{
mediaControl->Pause();
state = pausedState;
}
//==============================================================================
bool isInitialised() const noexcept { return state != uninitializedState; }
bool isRunning() const noexcept { return state == runningState; }
bool isPaused() const noexcept { return state == pausedState; }
bool isStopped() const noexcept { return state == stoppedState; }
bool containsVideo() const noexcept { return hasVideo; }
int getVideoWidth() const noexcept { return (int) videoWidth; }
int getVideoHeight() const noexcept { return (int) videoHeight; }
//==============================================================================
double getDuration() const
{
REFTIME duration;
mediaPosition->get_Duration (&duration);
return duration;
}
double getPosition() const
{
REFTIME seconds;
mediaPosition->get_CurrentPosition (&seconds);
return seconds;
}
//==============================================================================
void setSpeed (const float newSpeed) { mediaPosition->put_Rate (newSpeed); }
void setPosition (const double seconds) { mediaPosition->put_CurrentPosition (seconds); }
void setVolume (const float newVolume) { basicAudio->put_Volume (convertToDShowVolume (newVolume)); }
// in DirectShow, full volume is 0, silence is -10000
static long convertToDShowVolume (const float vol) noexcept
{
if (vol >= 1.0f) return 0;
if (vol <= 0.0f) return -10000;
return roundToInt ((vol * 10000.0f) - 10000.0f);
}
float getVolume() const
{
long volume;
basicAudio->get_Volume (&volume);
return (volume + 10000) / 10000.0f;
}
private:
//==============================================================================
enum { graphEventID = WM_APP + 0x43f0 };
DirectShowComponent& component;
HWND hwnd;
HDC hdc;
enum State { uninitializedState, runningState, pausedState, stoppedState };
State state;
bool hasVideo;
long videoWidth, videoHeight;
VideoRendererType type;
ComSmartPtr<IGraphBuilder> graphBuilder;
ComSmartPtr<IMediaControl> mediaControl;
ComSmartPtr<IMediaPosition> mediaPosition;
ComSmartPtr<IMediaEventEx> mediaEvent;
ComSmartPtr<IBasicAudio> basicAudio;
ComSmartPtr<IBaseFilter> baseFilter;
ScopedPointer<DirectShowHelpers::VideoRenderer> videoRenderer;
bool needToUpdateViewport, needToRecreateNativeWindow;
//==============================================================================
class NativeWindowClass : private DeletedAtShutdown
{
public:
bool isRegistered() const noexcept { return atom != 0; }
LPCTSTR getWindowClassName() const noexcept { return (LPCTSTR) MAKELONG (atom, 0); }
juce_DeclareSingleton_SingleThreaded_Minimal (NativeWindowClass)
private:
NativeWindowClass()
: atom (0)
{
String windowClassName ("JUCE_DIRECTSHOW_");
windowClassName << (int) (Time::currentTimeMillis() & 0x7fffffff);
HINSTANCE moduleHandle = (HINSTANCE) Process::getCurrentModuleInstanceHandle();
TCHAR moduleFile [1024] = { 0 };
GetModuleFileName (moduleHandle, moduleFile, 1024);
WNDCLASSEX wcex = { 0 };
wcex.cbSize = sizeof (wcex);
wcex.style = CS_OWNDC;
wcex.lpfnWndProc = (WNDPROC) wndProc;
wcex.lpszClassName = windowClassName.toWideCharPointer();
wcex.hInstance = moduleHandle;
atom = RegisterClassEx (&wcex);
jassert (atom != 0);
}
~NativeWindowClass()
{
if (atom != 0)
UnregisterClass (getWindowClassName(), (HINSTANCE) Process::getCurrentModuleInstanceHandle());
clearSingletonInstance();
}
static LRESULT CALLBACK wndProc (HWND hwnd, UINT msg, WPARAM wParam, LPARAM lParam)
{
if (DirectShowContext* const c = (DirectShowContext*) GetWindowLongPtr (hwnd, GWLP_USERDATA))
{
switch (msg)
{
case WM_NCHITTEST: return HTTRANSPARENT;
case WM_ERASEBKGND: return 1;
case WM_DISPLAYCHANGE: c->displayResolutionChanged(); break;
case graphEventID: c->graphEventProc(); return 0;
default: break;
}
}
return DefWindowProc (hwnd, msg, wParam, lParam);
}
ATOM atom;
JUCE_DECLARE_NON_COPYABLE (NativeWindowClass)
};
//==============================================================================
class NativeWindow
{
public:
NativeWindow (HWND parentToAddTo, void* const userData)
: hwnd (0), hdc (0)
{
NativeWindowClass* const wc = NativeWindowClass::getInstance();
if (wc->isRegistered())
{
DWORD exstyle = 0;
DWORD windowType = WS_CHILD;
hwnd = CreateWindowEx (exstyle, wc->getWindowClassName(),
L"", windowType, 0, 0, 0, 0, parentToAddTo, 0,
(HINSTANCE) Process::getCurrentModuleInstanceHandle(), 0);
if (hwnd != 0)
{
hdc = GetDC (hwnd);
SetWindowLongPtr (hwnd, GWLP_USERDATA, (LONG_PTR) userData);
}
}
jassert (hwnd != 0);
}
~NativeWindow()
{
if (hwnd != 0)
{
SetWindowLongPtr (hwnd, GWLP_USERDATA, (LONG_PTR) 0);
DestroyWindow (hwnd);
}
}
HWND getHandle() const noexcept { return hwnd; }
HDC getContext() const noexcept { return hdc; }
void setWindowPosition (const Rectangle<int>& newBounds)
{
SetWindowPos (hwnd, 0, newBounds.getX(), newBounds.getY(),
newBounds.getWidth(), newBounds.getHeight(),
SWP_NOACTIVATE | SWP_NOZORDER | SWP_NOOWNERZORDER);
}
void showWindow (const bool shouldBeVisible)
{
ShowWindow (hwnd, shouldBeVisible ? SW_SHOWNA : SW_HIDE);
}
private:
HWND hwnd;
HDC hdc;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (NativeWindow)
};
ScopedPointer<NativeWindow> nativeWindow;
//==============================================================================
bool createNativeWindow()
{
jassert (nativeWindow == nullptr);
if (ComponentPeer* const topLevelPeer = component.getTopLevelComponent()->getPeer())
{
nativeWindow = new NativeWindow ((HWND) topLevelPeer->getNativeHandle(), this);
hwnd = nativeWindow->getHandle();
if (hwnd != 0)
{
hdc = GetDC (hwnd);
component.updateContextPosition();
component.showContext (component.isShowing());
return true;
}
else
{
nativeWindow = nullptr;
}
}
else
{
jassertfalse;
}
return false;
}
void deleteNativeWindow()
{
jassert (nativeWindow != nullptr);
ReleaseDC (hwnd, hdc);
hwnd = 0;
hdc = 0;
nativeWindow = nullptr;
}
bool isRendererConnected()
{
ComSmartPtr<IEnumPins> enumPins;
HRESULT hr = baseFilter->EnumPins (enumPins.resetAndGetPointerAddress());
if (SUCCEEDED (hr))
hr = enumPins->Reset();
ComSmartPtr<IPin> pin;
while (SUCCEEDED (hr)
&& enumPins->Next (1, pin.resetAndGetPointerAddress(), nullptr) == S_OK)
{
ComSmartPtr<IPin> otherPin;
hr = pin->ConnectedTo (otherPin.resetAndGetPointerAddress());
if (SUCCEEDED (hr))
{
PIN_DIRECTION direction;
hr = pin->QueryDirection (&direction);
if (SUCCEEDED (hr) && direction == PINDIR_INPUT)
return true;
}
else if (hr == VFW_E_NOT_CONNECTED)
{
hr = S_OK;
}
}
return false;
}
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (DirectShowContext)
};
juce_ImplementSingleton_SingleThreaded (DirectShowComponent::DirectShowContext::NativeWindowClass)
//==============================================================================
class DirectShowComponent::DirectShowComponentWatcher : public ComponentMovementWatcher
{
public:
DirectShowComponentWatcher (DirectShowComponent* const c)
: ComponentMovementWatcher (c),
owner (c)
{
}
void componentMovedOrResized (bool /*wasMoved*/, bool /*wasResized*/) override
{
if (owner->videoLoaded)
owner->updateContextPosition();
}
void componentPeerChanged() override
{
if (owner->videoLoaded)
owner->recreateNativeWindowAsync();
}
void componentVisibilityChanged() override
{
if (owner->videoLoaded)
owner->showContext (owner->isShowing());
}
private:
DirectShowComponent* const owner;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (DirectShowComponentWatcher)
};
//==============================================================================
DirectShowComponent::DirectShowComponent (VideoRendererType type)
: videoLoaded (false),
looping (false)
{
setOpaque (true);
context = new DirectShowContext (*this, type);
componentWatcher = new DirectShowComponentWatcher (this);
}
DirectShowComponent::~DirectShowComponent()
{
componentWatcher = nullptr;
}
bool DirectShowComponent::isDirectShowAvailable()
{
static bool isDSAvailable = DirectShowHelpers::checkDShowAvailability();
return isDSAvailable;
}
void DirectShowComponent::recreateNativeWindowAsync()
{
context->recreateNativeWindowAsync();
repaint();
}
void DirectShowComponent::updateContextPosition()
{
context->updateContextPosition();
if (getWidth() > 0 && getHeight() > 0)
if (ComponentPeer* peer = getTopLevelComponent()->getPeer())
context->updateWindowPosition (peer->getAreaCoveredBy (*this));
}
void DirectShowComponent::showContext (const bool shouldBeVisible)
{
context->showWindow (shouldBeVisible);
}
void DirectShowComponent::paint (Graphics& g)
{
if (videoLoaded)
context->handleUpdateNowIfNeeded();
else
g.fillAll (Colours::grey);
}
//==============================================================================
bool DirectShowComponent::loadMovie (const String& fileOrURLPath)
{
closeMovie();
videoLoaded = context->loadFile (fileOrURLPath);
if (videoLoaded)
{
videoPath = fileOrURLPath;
context->updateVideoPosition();
}
return videoLoaded;
}
bool DirectShowComponent::loadMovie (const File& videoFile)
{
return loadMovie (videoFile.getFullPathName());
}
bool DirectShowComponent::loadMovie (const URL& videoURL)
{
return loadMovie (videoURL.toString (false));
}
void DirectShowComponent::closeMovie()
{
if (videoLoaded)
context->release();
videoLoaded = false;
videoPath.clear();
}
//==============================================================================
File DirectShowComponent::getCurrentMoviePath() const { return videoPath; }
bool DirectShowComponent::isMovieOpen() const { return videoLoaded; }
double DirectShowComponent::getMovieDuration() const { return videoLoaded ? context->getDuration() : 0.0; }
void DirectShowComponent::setLooping (const bool shouldLoop) { looping = shouldLoop; }
bool DirectShowComponent::isLooping() const { return looping; }
void DirectShowComponent::getMovieNormalSize (int &width, int &height) const
{
width = context->getVideoWidth();
height = context->getVideoHeight();
}
//==============================================================================
void DirectShowComponent::setBoundsWithCorrectAspectRatio (const Rectangle<int>& spaceToFitWithin,
RectanglePlacement placement)
{
int normalWidth, normalHeight;
getMovieNormalSize (normalWidth, normalHeight);
const Rectangle<int> normalSize (0, 0, normalWidth, normalHeight);
if (! (spaceToFitWithin.isEmpty() || normalSize.isEmpty()))
setBounds (placement.appliedTo (normalSize, spaceToFitWithin));
else
setBounds (spaceToFitWithin);
}
//==============================================================================
void DirectShowComponent::play()
{
if (videoLoaded)
context->run();
}
void DirectShowComponent::stop()
{
if (videoLoaded)
context->pause();
}
bool DirectShowComponent::isPlaying() const
{
return context->isRunning();
}
void DirectShowComponent::goToStart()
{
setPosition (0.0);
}
void DirectShowComponent::setPosition (const double seconds)
{
if (videoLoaded)
context->setPosition (seconds);
}
double DirectShowComponent::getPosition() const
{
return videoLoaded ? context->getPosition() : 0.0;
}
void DirectShowComponent::setSpeed (const float newSpeed)
{
if (videoLoaded)
context->setSpeed (newSpeed);
}
void DirectShowComponent::setMovieVolume (const float newVolume)
{
if (videoLoaded)
context->setVolume (newVolume);
}
float DirectShowComponent::getMovieVolume() const
{
return videoLoaded ? context->getVolume() : 0.0f;
}

+ 896
- 0
modules/juce_video/native/juce_win32_Video.h View File

@@ -0,0 +1,896 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2015 - ROLI Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
namespace VideoRenderers
{
//======================================================================
struct Base
{
virtual ~Base() {}
virtual HRESULT create (ComSmartPtr<IGraphBuilder>&, ComSmartPtr<IBaseFilter>&, HWND) = 0;
virtual void setVideoWindow (HWND) = 0;
virtual void setVideoPosition (HWND) = 0;
virtual void repaintVideo (HWND, HDC) = 0;
virtual void displayModeChanged() = 0;
virtual HRESULT getVideoSize (long& videoWidth, long& videoHeight) = 0;
};
//======================================================================
struct VMR7 : public Base
{
VMR7() {}
HRESULT create (ComSmartPtr<IGraphBuilder>& graphBuilder,
ComSmartPtr<IBaseFilter>& baseFilter, HWND hwnd) override
{
ComSmartPtr<IVMRFilterConfig> filterConfig;
HRESULT hr = baseFilter.CoCreateInstance (CLSID_VideoMixingRenderer);
if (SUCCEEDED (hr)) hr = graphBuilder->AddFilter (baseFilter, L"VMR-7");
if (SUCCEEDED (hr)) hr = baseFilter.QueryInterface (filterConfig);
if (SUCCEEDED (hr)) hr = filterConfig->SetRenderingMode (VMRMode_Windowless);
if (SUCCEEDED (hr)) hr = baseFilter.QueryInterface (windowlessControl);
if (SUCCEEDED (hr)) hr = windowlessControl->SetVideoClippingWindow (hwnd);
if (SUCCEEDED (hr)) hr = windowlessControl->SetAspectRatioMode (VMR_ARMODE_LETTER_BOX);
return hr;
}
void setVideoWindow (HWND hwnd) override
{
windowlessControl->SetVideoClippingWindow (hwnd);
}
void setVideoPosition (HWND hwnd) override
{
long videoWidth = 0, videoHeight = 0;
windowlessControl->GetNativeVideoSize (&videoWidth, &videoHeight, nullptr, nullptr);
RECT src, dest;
SetRect (&src, 0, 0, videoWidth, videoHeight);
GetClientRect (hwnd, &dest);
windowlessControl->SetVideoPosition (&src, &dest);
}
void repaintVideo (HWND hwnd, HDC hdc) override
{
windowlessControl->RepaintVideo (hwnd, hdc);
}
void displayModeChanged() override
{
windowlessControl->DisplayModeChanged();
}
HRESULT getVideoSize (long& videoWidth, long& videoHeight) override
{
return windowlessControl->GetNativeVideoSize (&videoWidth, &videoHeight, nullptr, nullptr);
}
ComSmartPtr<IVMRWindowlessControl> windowlessControl;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (VMR7)
};
//======================================================================
struct EVR : public Base
{
EVR() {}
HRESULT create (ComSmartPtr<IGraphBuilder>& graphBuilder,
ComSmartPtr<IBaseFilter>& baseFilter, HWND hwnd) override
{
ComSmartPtr<IMFGetService> getService;
HRESULT hr = baseFilter.CoCreateInstance (CLSID_EnhancedVideoRenderer);
if (SUCCEEDED (hr)) hr = graphBuilder->AddFilter (baseFilter, L"EVR");
if (SUCCEEDED (hr)) hr = baseFilter.QueryInterface (getService);
if (SUCCEEDED (hr)) hr = getService->GetService (MR_VIDEO_RENDER_SERVICE, IID_IMFVideoDisplayControl,
(void**) videoDisplayControl.resetAndGetPointerAddress());
if (SUCCEEDED (hr)) hr = videoDisplayControl->SetVideoWindow (hwnd);
if (SUCCEEDED (hr)) hr = videoDisplayControl->SetAspectRatioMode (MFVideoARMode_PreservePicture);
return hr;
}
void setVideoWindow (HWND hwnd) override
{
videoDisplayControl->SetVideoWindow (hwnd);
}
void setVideoPosition (HWND hwnd) override
{
const MFVideoNormalizedRect src = { 0.0f, 0.0f, 1.0f, 1.0f };
RECT dest;
GetClientRect (hwnd, &dest);
videoDisplayControl->SetVideoPosition (&src, &dest);
}
void repaintVideo (HWND, HDC) override
{
videoDisplayControl->RepaintVideo();
}
void displayModeChanged() override {}
HRESULT getVideoSize (long& videoWidth, long& videoHeight) override
{
SIZE sz = { 0, 0 };
HRESULT hr = videoDisplayControl->GetNativeVideoSize (&sz, nullptr);
videoWidth = sz.cx;
videoHeight = sz.cy;
return hr;
}
ComSmartPtr<IMFVideoDisplayControl> videoDisplayControl;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (EVR)
};
};
//==============================================================================
struct VideoComponent::Pimpl : public Component
{
Pimpl() : videoLoaded (false)
{
setOpaque (true);
context = new DirectShowContext (*this);
componentWatcher = new ComponentWatcher (*this);
}
~Pimpl()
{
close();
context = nullptr;
componentWatcher = nullptr;
}
Result loadFromString (const String& fileOrURLPath)
{
close();
Result r = context->loadFile (fileOrURLPath);
if (r.wasOk())
{
videoLoaded = true;
context->updateVideoPosition();
}
return r;
}
Result load (const File& file)
{
Result r = loadFromString (file.getFullPathName());
if (r.wasOk())
currentFile = file;
return r;
}
Result load (const URL& url)
{
Result r = loadFromString (url.toString (true));
if (r.wasOk())
currentURL = url;
return r;
}
void close()
{
stop();
context->release();
videoLoaded = false;
currentFile = File();
currentURL = URL();
}
bool isOpen() const
{
return videoLoaded;
}
bool isPlaying() const
{
return context->state == DirectShowContext::runningState;
}
void play()
{
if (videoLoaded)
context->play();
}
void stop()
{
if (videoLoaded)
context->pause();
}
void setPosition (double newPosition)
{
if (videoLoaded)
context->setPosition (newPosition);
}
double getPosition() const
{
return videoLoaded ? context->getPosition() : 0.0;
}
void setSpeed (double newSpeed)
{
if (videoLoaded)
context->setSpeed (newSpeed);
}
Rectangle<int> getNativeSize() const
{
return videoLoaded ? context->getVideoSize()
: Rectangle<int>();
}
double getDuration() const
{
return videoLoaded ? context->getDuration() : 0.0;
}
void setVolume (float newVolume)
{
if (videoLoaded)
context->setVolume (newVolume);
}
float getVolume() const
{
return videoLoaded ? context->getVolume() : 0.0f;
}
void paint (Graphics& g) override
{
if (videoLoaded)
context->handleUpdateNowIfNeeded();
else
g.fillAll (Colours::grey);
}
void updateContextPosition()
{
context->updateContextPosition();
if (getWidth() > 0 && getHeight() > 0)
if (ComponentPeer* peer = getTopLevelComponent()->getPeer())
context->updateWindowPosition (peer->getAreaCoveredBy (*this));
}
void updateContextVisibility()
{
context->showWindow (isShowing());
}
void recreateNativeWindowAsync()
{
context->recreateNativeWindowAsync();
repaint();
}
File currentFile;
URL currentURL;
private:
bool videoLoaded;
//==============================================================================
struct ComponentWatcher : public ComponentMovementWatcher
{
ComponentWatcher (Pimpl& c) : ComponentMovementWatcher (&c), owner (c)
{
}
void componentMovedOrResized (bool, bool) override
{
if (owner.videoLoaded)
owner.updateContextPosition();
}
void componentPeerChanged() override
{
if (owner.videoLoaded)
owner.recreateNativeWindowAsync();
}
void componentVisibilityChanged() override
{
if (owner.videoLoaded)
owner.updateContextVisibility();
}
Pimpl& owner;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (ComponentWatcher)
};
ScopedPointer<ComponentWatcher> componentWatcher;
//======================================================================
struct DirectShowContext : public AsyncUpdater
{
DirectShowContext (Pimpl& c)
: component (c), hwnd(), hdc(),
state (uninitializedState),
hasVideo (false),
needToUpdateViewport (true),
needToRecreateNativeWindow (false)
{
CoInitialize (0);
}
~DirectShowContext()
{
release();
CoUninitialize();
}
//======================================================================
void updateWindowPosition (const Rectangle<int>& newBounds)
{
nativeWindow->setWindowPosition (newBounds);
}
void showWindow (bool shouldBeVisible)
{
nativeWindow->showWindow (shouldBeVisible);
}
//======================================================================
void repaint()
{
if (hasVideo)
videoRenderer->repaintVideo (nativeWindow->hwnd, nativeWindow->hdc);
}
void updateVideoPosition()
{
if (hasVideo)
videoRenderer->setVideoPosition (nativeWindow->hwnd);
}
void displayResolutionChanged()
{
if (hasVideo)
videoRenderer->displayModeChanged();
}
//======================================================================
void peerChanged()
{
deleteNativeWindow();
mediaEvent->SetNotifyWindow (0, 0, 0);
if (videoRenderer != nullptr)
videoRenderer->setVideoWindow (nullptr);
createNativeWindow();
mediaEvent->SetNotifyWindow ((OAHWND) hwnd, graphEventID, 0);
if (videoRenderer != nullptr)
videoRenderer->setVideoWindow (hwnd);
}
void handleAsyncUpdate() override
{
if (hwnd != 0)
{
if (needToRecreateNativeWindow)
{
peerChanged();
needToRecreateNativeWindow = false;
}
if (needToUpdateViewport)
{
updateVideoPosition();
needToUpdateViewport = false;
}
repaint();
}
else
{
triggerAsyncUpdate();
}
}
void recreateNativeWindowAsync()
{
needToRecreateNativeWindow = true;
triggerAsyncUpdate();
}
void updateContextPosition()
{
needToUpdateViewport = true;
triggerAsyncUpdate();
}
//======================================================================
Result loadFile (const String& fileOrURLPath)
{
jassert (state == uninitializedState);
if (! createNativeWindow())
return Result::fail ("Can't create window");
HRESULT hr = graphBuilder.CoCreateInstance (CLSID_FilterGraph);
// basic playback interfaces
if (SUCCEEDED (hr)) hr = graphBuilder.QueryInterface (mediaControl);
if (SUCCEEDED (hr)) hr = graphBuilder.QueryInterface (mediaPosition);
if (SUCCEEDED (hr)) hr = graphBuilder.QueryInterface (mediaEvent);
if (SUCCEEDED (hr)) hr = graphBuilder.QueryInterface (basicAudio);
// video renderer interface
if (SUCCEEDED (hr))
{
if (SystemStats::getOperatingSystemType() >= SystemStats::WinVista)
{
videoRenderer = new VideoRenderers::EVR();
hr = videoRenderer->create (graphBuilder, baseFilter, hwnd);
if (FAILED (hr))
videoRenderer = nullptr;
}
if (videoRenderer == nullptr)
{
videoRenderer = new VideoRenderers::VMR7();
hr = videoRenderer->create (graphBuilder, baseFilter, hwnd);
}
}
// build filter graph
if (SUCCEEDED (hr))
{
hr = graphBuilder->RenderFile (fileOrURLPath.toWideCharPointer(), nullptr);
if (FAILED (hr))
{
// Annoyingly, if we don't run the msg loop between failing and deleting the window, the
// whole OS message-dispatch system gets itself into a state, and refuses to deliver any
// more messages for the whole app. (That's what happens in Win7, anyway)
MessageManager::getInstance()->runDispatchLoopUntil (200);
}
}
// remove video renderer if not connected (no video)
if (SUCCEEDED (hr))
{
if (isRendererConnected())
{
hasVideo = true;
}
else
{
hasVideo = false;
graphBuilder->RemoveFilter (baseFilter);
videoRenderer = nullptr;
baseFilter = nullptr;
}
}
// set window to receive events
if (SUCCEEDED (hr))
hr = mediaEvent->SetNotifyWindow ((OAHWND) hwnd, graphEventID, 0);
if (SUCCEEDED (hr))
{
state = stoppedState;
pause();
return Result::ok();
}
// Note that if you're trying to open a file and this method fails, you may
// just need to install a suitable codec. It seems that by default DirectShow
// doesn't support a very good range of formats.
release();
return getErrorMessageFromResult (hr);
}
static Result getErrorMessageFromResult (HRESULT hr)
{
switch (hr)
{
case VFW_E_INVALID_FILE_FORMAT: return Result::fail ("Invalid file format");
case VFW_E_NOT_FOUND: return Result::fail ("File not found");
case VFW_E_UNKNOWN_FILE_TYPE: return Result::fail ("Unknown file type");
case VFW_E_UNSUPPORTED_STREAM: return Result::fail ("Unsupported stream");
case VFW_E_CANNOT_CONNECT: return Result::fail ("Cannot connect");
case VFW_E_CANNOT_LOAD_SOURCE_FILTER: return Result::fail ("Cannot load source filter");
}
TCHAR messageBuffer[512] = { 0 };
FormatMessage (FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS,
nullptr, hr, MAKELANGID (LANG_NEUTRAL, SUBLANG_DEFAULT),
messageBuffer, (DWORD) numElementsInArray (messageBuffer) - 1, nullptr);
return Result::fail (String (messageBuffer));
}
void release()
{
if (mediaControl != nullptr)
mediaControl->Stop();
if (mediaEvent != nullptr)
mediaEvent->SetNotifyWindow (0, 0, 0);
if (videoRenderer != nullptr)
videoRenderer->setVideoWindow (0);
hasVideo = false;
videoRenderer = nullptr;
baseFilter = nullptr;
basicAudio = nullptr;
mediaEvent = nullptr;
mediaPosition = nullptr;
mediaControl = nullptr;
graphBuilder = nullptr;
state = uninitializedState;
if (nativeWindow != nullptr)
deleteNativeWindow();
}
void graphEventProc()
{
LONG ec = 0;
LONG_PTR p1 = nullptr, p2 = nullptr;
jassert (mediaEvent != nullptr);
while (SUCCEEDED (mediaEvent->GetEvent (&ec, &p1, &p2, 0)))
{
mediaEvent->FreeEventParams (ec, p1, p2);
switch (ec)
{
case EC_REPAINT:
component.repaint();
break;
case EC_COMPLETE:
component.stop();
break;
case EC_USERABORT:
case EC_ERRORABORT:
case EC_ERRORABORTEX:
component.close();
break;
default:
break;
}
}
}
//======================================================================
void play()
{
mediaControl->Run();
state = runningState;
}
void stop()
{
mediaControl->Stop();
state = stoppedState;
}
void pause()
{
mediaControl->Pause();
state = pausedState;
}
//======================================================================
Rectangle<int> getVideoSize() const noexcept
{
long width = 0, height = 0;
if (hasVideo)
videoRenderer->getVideoSize (width, height);
return Rectangle<int> ((int) width, (int) height);
}
//======================================================================
double getDuration() const
{
REFTIME duration;
mediaPosition->get_Duration (&duration);
return duration;
}
double getPosition() const
{
REFTIME seconds;
mediaPosition->get_CurrentPosition (&seconds);
return seconds;
}
void setSpeed (double newSpeed) { mediaPosition->put_Rate (newSpeed); }
void setPosition (double seconds) { mediaPosition->put_CurrentPosition (seconds); }
void setVolume (float newVolume) { basicAudio->put_Volume (convertToDShowVolume (newVolume)); }
// in DirectShow, full volume is 0, silence is -10000
static long convertToDShowVolume (float vol) noexcept
{
if (vol >= 1.0f) return 0;
if (vol <= 0.0f) return -10000;
return roundToInt ((vol * 10000.0f) - 10000.0f);
}
float getVolume() const
{
long volume;
basicAudio->get_Volume (&volume);
return (volume + 10000) / 10000.0f;
}
enum State { uninitializedState, runningState, pausedState, stoppedState };
State state;
private:
//======================================================================
enum { graphEventID = WM_APP + 0x43f0 };
Pimpl& component;
HWND hwnd;
HDC hdc;
ComSmartPtr<IGraphBuilder> graphBuilder;
ComSmartPtr<IMediaControl> mediaControl;
ComSmartPtr<IMediaPosition> mediaPosition;
ComSmartPtr<IMediaEventEx> mediaEvent;
ComSmartPtr<IBasicAudio> basicAudio;
ComSmartPtr<IBaseFilter> baseFilter;
ScopedPointer<VideoRenderers::Base> videoRenderer;
bool hasVideo, needToUpdateViewport, needToRecreateNativeWindow;
//======================================================================
bool createNativeWindow()
{
jassert (nativeWindow == nullptr);
if (ComponentPeer* const topLevelPeer = component.getTopLevelComponent()->getPeer())
{
nativeWindow = new NativeWindow ((HWND) topLevelPeer->getNativeHandle(), this);
hwnd = nativeWindow->hwnd;
if (hwnd != 0)
{
hdc = GetDC (hwnd);
component.updateContextPosition();
component.updateContextVisibility();
return true;
}
nativeWindow = nullptr;
}
else
{
jassertfalse;
}
return false;
}
void deleteNativeWindow()
{
jassert (nativeWindow != nullptr);
ReleaseDC (hwnd, hdc);
hwnd = 0;
hdc = 0;
nativeWindow = nullptr;
}
bool isRendererConnected()
{
ComSmartPtr<IEnumPins> enumPins;
HRESULT hr = baseFilter->EnumPins (enumPins.resetAndGetPointerAddress());
if (SUCCEEDED (hr))
hr = enumPins->Reset();
ComSmartPtr<IPin> pin;
while (SUCCEEDED (hr)
&& enumPins->Next (1, pin.resetAndGetPointerAddress(), nullptr) == S_OK)
{
ComSmartPtr<IPin> otherPin;
hr = pin->ConnectedTo (otherPin.resetAndGetPointerAddress());
if (SUCCEEDED (hr))
{
PIN_DIRECTION direction;
hr = pin->QueryDirection (&direction);
if (SUCCEEDED (hr) && direction == PINDIR_INPUT)
return true;
}
else if (hr == VFW_E_NOT_CONNECTED)
{
hr = S_OK;
}
}
return false;
}
//======================================================================
struct NativeWindowClass : private DeletedAtShutdown
{
bool isRegistered() const noexcept { return atom != 0; }
LPCTSTR getWindowClassName() const noexcept { return (LPCTSTR) MAKELONG (atom, 0); }
juce_DeclareSingleton_SingleThreaded_Minimal (NativeWindowClass)
private:
NativeWindowClass() : atom()
{
String windowClassName ("JUCE_DIRECTSHOW_");
windowClassName << (int) (Time::currentTimeMillis() & 0x7fffffff);
HINSTANCE moduleHandle = (HINSTANCE) Process::getCurrentModuleInstanceHandle();
TCHAR moduleFile [1024] = { 0 };
GetModuleFileName (moduleHandle, moduleFile, 1024);
WNDCLASSEX wcex = { 0 };
wcex.cbSize = sizeof (wcex);
wcex.style = CS_OWNDC;
wcex.lpfnWndProc = (WNDPROC) wndProc;
wcex.lpszClassName = windowClassName.toWideCharPointer();
wcex.hInstance = moduleHandle;
atom = RegisterClassEx (&wcex);
jassert (atom != 0);
}
~NativeWindowClass()
{
if (atom != 0)
UnregisterClass (getWindowClassName(), (HINSTANCE) Process::getCurrentModuleInstanceHandle());
clearSingletonInstance();
}
static LRESULT CALLBACK wndProc (HWND hwnd, UINT msg, WPARAM wParam, LPARAM lParam)
{
if (DirectShowContext* const c
= (DirectShowContext*) GetWindowLongPtr (hwnd, GWLP_USERDATA))
{
switch (msg)
{
case WM_NCHITTEST: return HTTRANSPARENT;
case WM_ERASEBKGND: return 1;
case WM_DISPLAYCHANGE: c->displayResolutionChanged(); break;
case graphEventID: c->graphEventProc(); return 0;
default: break;
}
}
return DefWindowProc (hwnd, msg, wParam, lParam);
}
ATOM atom;
JUCE_DECLARE_NON_COPYABLE (NativeWindowClass)
};
//======================================================================
struct NativeWindow
{
NativeWindow (HWND parentToAddTo, void* userData) : hwnd(), hdc()
{
NativeWindowClass* wc = NativeWindowClass::getInstance();
if (wc->isRegistered())
{
DWORD exstyle = 0;
DWORD type = WS_CHILD;
hwnd = CreateWindowEx (exstyle, wc->getWindowClassName(),
L"", type, 0, 0, 0, 0, parentToAddTo, 0,
(HINSTANCE) Process::getCurrentModuleInstanceHandle(), 0);
if (hwnd != 0)
{
hdc = GetDC (hwnd);
SetWindowLongPtr (hwnd, GWLP_USERDATA, (LONG_PTR) userData);
}
}
jassert (hwnd != 0);
}
~NativeWindow()
{
if (hwnd != 0)
{
SetWindowLongPtr (hwnd, GWLP_USERDATA, (LONG_PTR) 0);
DestroyWindow (hwnd);
}
}
void setWindowPosition (Rectangle<int> newBounds)
{
SetWindowPos (hwnd, 0, newBounds.getX(), newBounds.getY(),
newBounds.getWidth(), newBounds.getHeight(),
SWP_NOACTIVATE | SWP_NOZORDER | SWP_NOOWNERZORDER);
}
void showWindow (const bool shouldBeVisible)
{
ShowWindow (hwnd, shouldBeVisible ? SW_SHOWNA : SW_HIDE);
}
HWND hwnd;
HDC hdc;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (NativeWindow)
};
ScopedPointer<NativeWindow> nativeWindow;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (DirectShowContext)
};
ScopedPointer<DirectShowContext> context;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (Pimpl)
};
juce_ImplementSingleton_SingleThreaded (VideoComponent::Pimpl::DirectShowContext::NativeWindowClass)

+ 0
- 213
modules/juce_video/playback/juce_DirectShowComponent.h View File

@@ -1,213 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2017 - ROLI Ltd.
JUCE is an open source library subject to commercial or open-source
licensing.
By using JUCE, you agree to the terms of both the JUCE 5 End-User License
Agreement and JUCE 5 Privacy Policy (both updated and effective as of the
27th April 2017).
End User License Agreement: www.juce.com/juce-5-licence
Privacy Policy: www.juce.com/juce-5-privacy-policy
Or: You may also use this code under the terms of the GPL v3 (see
www.gnu.org/licenses).
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
#pragma once
//==============================================================================
/**
A window that can play back a DirectShow video.
@note Controller is not implemented
*/
class JUCE_API DirectShowComponent : public Component
{
public:
//==============================================================================
/** DirectShow video renderer type.
See MSDN for advice about choosing the right renderer.
*/
enum VideoRendererType
{
dshowDefault, /**< VMR7 for Windows XP, EVR for Windows Vista and later */
dshowVMR7, /**< Video Mixing Renderer 7 */
dshowEVR /**< Enhanced Video Renderer */
};
/** Creates a DirectShowComponent, initially blank.
Use the loadMovie() method to load a video once you've added the
component to a window, (or put it on the desktop as a heavyweight window).
Loading a video when the component isn't visible can cause problems, as
DirectShow needs a window handle to initialise properly.
@see VideoRendererType
*/
DirectShowComponent (VideoRendererType type = dshowDefault);
/** Destructor. */
~DirectShowComponent();
/** Returns true if DirectShow is installed and working on this machine. */
static bool isDirectShowAvailable();
//==============================================================================
/** Tries to load a DirectShow video from a file or URL into the player.
It's best to call this function once you've added the component to a window,
(or put it on the desktop as a heavyweight window). Loading a video when the
component isn't visible can cause problems, because DirectShow needs a window
handle to do its stuff.
@param fileOrURLPath the file or URL path to open
@returns true if the video opens successfully
*/
bool loadMovie (const String& fileOrURLPath);
/** Tries to load a DirectShow video from a file into the player.
It's best to call this function once you've added the component to a window,
(or put it on the desktop as a heavyweight window). Loading a video when the
component isn't visible can cause problems, because DirectShow needs a window
handle to do its stuff.
@param videoFile the video file to open
@returns true if the video opens successfully
*/
bool loadMovie (const File& videoFile);
/** Tries to load a DirectShow video from a URL into the player.
It's best to call this function once you've added the component to a window,
(or put it on the desktop as a heavyweight window). Loading a video when the
component isn't visible can cause problems, because DirectShow needs a window
handle to do its stuff.
@param videoURL the video URL to open
@returns true if the video opens successfully
*/
bool loadMovie (const URL& videoURL);
/** Closes the video, if one is open. */
void closeMovie();
/** Returns the file path or URL from which the video file was loaded.
If there isn't one, this returns an empty string.
*/
File getCurrentMoviePath() const;
/** Returns true if there's currently a video open. */
bool isMovieOpen() const;
/** Returns the length of the video, in seconds. */
double getMovieDuration() const;
/** Returns the video's natural size, in pixels.
You can use this to resize the component to show the video at its preferred
scale.
If no video is loaded, the size returned will be 0 x 0.
*/
void getMovieNormalSize (int& width, int& height) const;
/** This will position the component within a given area, keeping its aspect
ratio correct according to the video's normal size.
The component will be made as large as it can go within the space, and will
be aligned according to the justification value if this means there are gaps at
the top or sides.
@note Not implemented
*/
void setBoundsWithCorrectAspectRatio (const Rectangle<int>& spaceToFitWithin,
RectanglePlacement placement);
/** Starts the video playing. */
void play();
/** Stops the video playing. */
void stop();
/** Returns true if the video is currently playing. */
bool isPlaying() const;
/** Moves the video's position back to the start. */
void goToStart();
/** Sets the video's position to a given time. */
void setPosition (double seconds);
/** Returns the current play position of the video. */
double getPosition() const;
/** Changes the video playback rate.
A value of 1 is normal speed, greater values play it proportionately faster,
smaller values play it slower.
*/
void setSpeed (float newSpeed);
/** Changes the video's playback volume.
@param newVolume the volume in the range 0 (silent) to 1.0 (full)
*/
void setMovieVolume (float newVolume);
/** Returns the video's playback volume.
@returns the volume in the range 0 (silent) to 1.0 (full)
*/
float getMovieVolume() const;
/** Tells the video whether it should loop. */
void setLooping (bool shouldLoop);
/** Returns true if the video is currently looping.
@see setLooping
*/
bool isLooping() const;
//==============================================================================
/** @internal */
void paint (Graphics&) override;
private:
//==============================================================================
String videoPath;
bool videoLoaded, looping;
class DirectShowContext;
friend class DirectShowContext;
friend struct ContainerDeletePolicy<DirectShowContext>;
ScopedPointer<DirectShowContext> context;
class DirectShowComponentWatcher;
friend class DirectShowComponentWatcher;
friend struct ContainerDeletePolicy<DirectShowComponentWatcher>;
ScopedPointer<DirectShowComponentWatcher> componentWatcher;
//==============================================================================
void updateContextPosition();
void showContext (bool shouldBeVisible);
void recreateNativeWindowAsync();
//==============================================================================
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (DirectShowComponent)
};

+ 116
- 0
modules/juce_video/playback/juce_VideoComponent.cpp View File

@@ -0,0 +1,116 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2015 - ROLI Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
#if JUCE_MAC
#include "../native/juce_mac_Video.h"
#elif JUCE_WINDOWS
#include "../native/juce_win32_Video.h"
#elif JUCE_ANDROID
#include "../native/juce_android_Video.h"
#endif
//==============================================================================
VideoComponent::VideoComponent() : pimpl (new Pimpl())
{
addAndMakeVisible (pimpl);
}
VideoComponent::~VideoComponent()
{
pimpl = nullptr;
}
Result VideoComponent::load (const File& file)
{
Result r = pimpl->load (file);
resized();
return r;
}
Result VideoComponent::load (const URL& url)
{
Result r = pimpl->load (url);
resized();
return r;
}
void VideoComponent::closeVideo()
{
pimpl->close();
resized();
}
bool VideoComponent::isVideoOpen() const { return pimpl->isOpen(); }
File VideoComponent::getCurrentVideoFile() const { return pimpl->currentFile; }
URL VideoComponent::getCurrentVideoURL() const { return pimpl->currentURL; }
double VideoComponent::getVideoDuration() const { return pimpl->getDuration(); }
Rectangle<int> VideoComponent::getVideoNativeSize() const { return pimpl->getNativeSize(); }
void VideoComponent::play() { pimpl->play(); }
void VideoComponent::stop() { pimpl->stop(); }
bool VideoComponent::isPlaying() const { return pimpl->isPlaying(); }
void VideoComponent::setPlayPosition (double newPos) { pimpl->setPosition (newPos); }
double VideoComponent::getPlayPosition() const { return pimpl->getPosition(); }
void VideoComponent::setPlaySpeed (double newSpeed) { pimpl->setSpeed (newSpeed); }
void VideoComponent::setAudioVolume (float newVolume) { pimpl->setVolume (newVolume); }
float VideoComponent::getAudioVolume() const { return pimpl->getVolume(); }
void VideoComponent::resized()
{
Rectangle<int> r = getLocalBounds();
if (isVideoOpen() && ! r.isEmpty())
{
Rectangle<int> nativeSize = getVideoNativeSize();
if (nativeSize.isEmpty())
{
// if we've just opened the file and are still waiting for it to
// figure out the size, start our timer..
if (! isTimerRunning())
startTimer (50);
}
else
{
r = RectanglePlacement (RectanglePlacement::centred).appliedTo (nativeSize, r);
stopTimer();
}
}
else
{
stopTimer();
}
pimpl->setBounds (r);
}
void VideoComponent::timerCallback()
{
resized();
}

+ 132
- 0
modules/juce_video/playback/juce_VideoComponent.h View File

@@ -0,0 +1,132 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2015 - ROLI Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
#ifndef JUCE_VIDEOCOMPONENT_H_INCLUDED
#define JUCE_VIDEOCOMPONENT_H_INCLUDED
//==============================================================================
/**
A component that can play a movie.
Use the load() method to open a video once you've added this component to
a parent (or put it on the desktop).
*/
class JUCE_API VideoComponent : public Component,
private Timer
{
public:
//==============================================================================
/** Creates an empty VideoComponent.
Use the load() method to open a video once you've added this component to
a parent (or put it on the desktop).
*/
VideoComponent();
/** Destructor. */
~VideoComponent();
//==============================================================================
/** Tries to load a video from a local file.
@returns am error if the file failed to be loaded correctly
*/
Result load (const File& file);
/** Tries to load a video from a URL.
@returns am error if the file failed to be loaded correctly
*/
Result load (const URL& url);
/** Closes the video and resets the component. */
void closeVideo();
/** Returns true if a video is currently open. */
bool isVideoOpen() const;
/** Returns the last file that was loaded.
If nothing is open, or if it was a URL rather than a file, this will return File().
*/
File getCurrentVideoFile() const;
/** Returns the last URL that was loaded.
If nothing is open, or if it was a file rather than a URL, this will return File().
*/
URL getCurrentVideoURL() const;
//==============================================================================
/** Returns the length of the video, in seconds. */
double getVideoDuration() const;
/** Returns the video's natural size, in pixels.
If no video is loaded, an empty rectangle will be returned.
*/
Rectangle<int> getVideoNativeSize() const;
/** Starts the video playing. */
void play();
/** Stops the video playing. */
void stop();
/** Returns true if the video is currently playing. */
bool isPlaying() const;
/** Sets the video's position to a given time. */
void setPlayPosition (double newPositionSeconds);
/** Returns the current play position of the video. */
double getPlayPosition() const;
/** Changes the video playback rate.
A value of 1.0 is normal speed, greater values will play faster, smaller
values play more slowly.
*/
void setPlaySpeed (double newSpeed);
/** Changes the video's playback volume.
@param newVolume the volume in the range 0 (silent) to 1.0 (full)
*/
void setAudioVolume (float newVolume);
/** Returns the video's playback volume.
@returns the volume in the range 0 (silent) to 1.0 (full)
*/
float getAudioVolume() const;
private:
//==============================================================================
struct Pimpl;
friend struct Pimpl;
friend struct ContainerDeletePolicy<Pimpl>;
ScopedPointer<Pimpl> pimpl;
void resized() override;
void timerCallback() override;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (VideoComponent)
};
#endif

Loading…
Cancel
Save