Browse Source

Internal refactoring of some camera capture classes to make it easier to update to new APIs.

tags/2021-05-28
jules 11 years ago
parent
commit
95f536383f
6 changed files with 476 additions and 549 deletions
  1. +90
    -0
      modules/juce_video/capture/juce_CameraDevice.cpp
  2. +11
    -8
      modules/juce_video/capture/juce_CameraDevice.h
  3. +4
    -0
      modules/juce_video/juce_video.cpp
  4. +39
    -67
      modules/juce_video/native/juce_android_CameraDevice.cpp
  5. +95
    -144
      modules/juce_video/native/juce_mac_CameraDevice.mm
  6. +237
    -330
      modules/juce_video/native/juce_win32_CameraDevice.cpp

+ 90
- 0
modules/juce_video/capture/juce_CameraDevice.cpp View File

@@ -0,0 +1,90 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
CameraDevice::CameraDevice (const String& nm, int index, int minWidth, int minHeight, int maxWidth, int maxHeight)
: name (nm), pimpl (new Pimpl (name, index, minWidth, minHeight, maxWidth, maxHeight))
{
}
CameraDevice::~CameraDevice()
{
stopRecording();
pimpl = nullptr;
}
Component* CameraDevice::createViewerComponent()
{
return new ViewerComponent (*this);
}
void CameraDevice::startRecordingToFile (const File& file, int quality)
{
stopRecording();
pimpl->startRecordingToFile (file, quality);
}
Time CameraDevice::getTimeOfFirstRecordedFrame() const
{
return pimpl->getTimeOfFirstRecordedFrame();
}
void CameraDevice::stopRecording()
{
pimpl->stopRecording();
}
void CameraDevice::addListener (Listener* listenerToAdd)
{
if (listenerToAdd != nullptr)
pimpl->addListener (listenerToAdd);
}
void CameraDevice::removeListener (Listener* listenerToRemove)
{
if (listenerToRemove != nullptr)
pimpl->removeListener (listenerToRemove);
}
//==============================================================================
StringArray CameraDevice::getAvailableDevices()
{
JUCE_AUTORELEASEPOOL
{
return Pimpl::getAvailableDevices();
}
}
CameraDevice* CameraDevice::openDevice (int index,
int minWidth, int minHeight,
int maxWidth, int maxHeight)
{
ScopedPointer<CameraDevice> d (new CameraDevice (getAvailableDevices() [index], index,
minWidth, minHeight, maxWidth, maxHeight));
if (d->pimpl->openedOk())
return d.release();
return nullptr;
}

+ 11
- 8
modules/juce_video/capture/juce_CameraDevice.h View File

@@ -135,17 +135,20 @@ public:
/** Removes a listener that was previously added with addListener(). */
void removeListener (Listener* listenerToRemove);
protected:
#ifndef DOXYGEN
CameraDevice (const String& name, int index);
#endif
private:
void* internal;
bool isRecording;
String name;
struct Pimpl;
friend struct Pimpl;
friend struct ContainerDeletePolicy<Pimpl>;
ScopedPointer<Pimpl> pimpl;
struct ViewerComponent;
friend struct ViewerComponent;
CameraDevice (const String& name, int index,
int minWidth, int minHeight, int maxWidth, int maxHeight);
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (CameraDevice)
};


+ 4
- 0
modules/juce_video/juce_video.cpp View File

@@ -139,4 +139,8 @@ namespace juce
#endif
#endif
#if JUCE_USE_CAMERA
#include "capture/juce_CameraDevice.cpp"
#endif
}

+ 39
- 67
modules/juce_video/native/juce_android_CameraDevice.cpp View File

@@ -22,90 +22,62 @@
==============================================================================
*/
// TODO
class AndroidCameraInternal
struct CameraDevice::Pimpl
{
public:
AndroidCameraInternal()
Pimpl (const String&, int /*index*/, int /*minWidth*/, int /*minHeight*/, int /*maxWidth*/, int /*maxHeight*/)
{
}
~AndroidCameraInternal()
~Pimpl()
{
}
private:
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (AndroidCameraInternal)
};
//==============================================================================
CameraDevice::CameraDevice (const String& name_, int /*index*/)
: name (name_)
{
internal = new AndroidCameraInternal();
// TODO
}
CameraDevice::~CameraDevice()
{
stopRecording();
delete static_cast <AndroidCameraInternal*> (internal);
internal = 0;
}
Component* CameraDevice::createViewerComponent()
{
// TODO
void startRecordingToFile (const File&, int /*quality*/)
{
}
return nullptr;
}
void stopRecording()
{
}
String CameraDevice::getFileExtension()
{
return ".m4a"; // TODO correct?
}
Time getTimeOfFirstRecordedFrame() const
{
return Time();
}
void CameraDevice::startRecordingToFile (const File& file, int quality)
{
// TODO
}
void addListener (CameraDevice::Listener* listenerToAdd)
{
const ScopedLock sl (listenerLock);
listeners.addIfNotAlreadyThere (listenerToAdd);
}
Time CameraDevice::getTimeOfFirstRecordedFrame() const
{
// TODO
return Time();
}
void removeListener (CameraDevice::Listener* listenerToRemove)
{
const ScopedLock sl (listenerLock);
listeners.removeFirstMatchingValue (listenerToRemove);
}
void CameraDevice::stopRecording()
{
// TODO
}
static StringArray getAvailableDevices()
{
StringArray results;
void CameraDevice::addListener (Listener* listenerToAdd)
{
// TODO
}
return results;
}
void CameraDevice::removeListener (Listener* listenerToRemove)
{
// TODO
}
private:
JUCE_DECLARE_NON_COPYABLE (Pimpl)
};
StringArray CameraDevice::getAvailableDevices()
struct CameraDevice::ViewerComponent : public Component
{
StringArray devs;
// TODO
ViewerComponent (CameraDevice&)
{
}
return devs;
}
JUCE_DECLARE_NON_COPYABLE (ViewerComponent)
};
CameraDevice* CameraDevice::openDevice (int index,
int minWidth, int minHeight,
int maxWidth, int maxHeight)
String CameraDevice::getFileExtension()
{
// TODO
return nullptr;
return ".mov";
}

+ 95
- 144
modules/juce_video/native/juce_mac_CameraDevice.mm View File

@@ -23,16 +23,14 @@
*/
#if ! JUCE_QUICKTIME
#error "On the Mac, cameras use Quicktime, so if you turn on JUCE_USE_CAMERA, you also need to enable JUCE_QUICKTIME"
#error "To support cameras in OSX you'll need to enable the JUCE_QUICKTIME flag"
#endif
extern Image juce_createImageFromCIImage (CIImage* im, int w, int h);
extern Image juce_createImageFromCIImage (CIImage*, int w, int h);
//==============================================================================
class QTCameraDeviceInternal
struct CameraDevice::Pimpl
{
public:
QTCameraDeviceInternal (CameraDevice*, const int index)
Pimpl (const String&, const int index, int /*minWidth*/, int /*minHeight*/, int /*maxWidth*/, int /*maxHeight*/)
: input (nil),
audioDevice (nil),
audioInput (nil),
@@ -40,7 +38,8 @@ public:
fileOutput (nil),
imageOutput (nil),
firstPresentationTime (0),
averageTimeOffset (0)
averageTimeOffset (0),
isRecording (false)
{
JUCE_AUTORELEASEPOOL
{
@@ -84,7 +83,7 @@ public:
}
}
~QTCameraDeviceInternal()
~Pimpl()
{
[session stopRunning];
[session removeOutput: imageOutput];
@@ -99,6 +98,8 @@ public:
[callbackDelegate release];
}
bool openedOk() const noexcept { return openingError.isEmpty(); }
void resetFile()
{
[fileOutput recordToOutputFileURL: nil];
@@ -132,6 +133,59 @@ public:
}
}
void startRecordingToFile (const File& file, int quality)
{
stopRecording();
firstPresentationTime = 0;
file.deleteFile();
// In some versions of QT (e.g. on 10.5), if you record video without audio, the speed comes
// out wrong, so we'll put some audio in there too..,
addDefaultAudioInput();
[session addOutput: fileOutput error: nil];
NSEnumerator* connectionEnumerator = [[fileOutput connections] objectEnumerator];
for (;;)
{
QTCaptureConnection* connection = [connectionEnumerator nextObject];
if (connection == nil)
break;
QTCompressionOptions* options = nil;
NSString* mediaType = [connection mediaType];
if ([mediaType isEqualToString: QTMediaTypeVideo])
options = [QTCompressionOptions compressionOptionsWithIdentifier:
quality >= 1 ? nsStringLiteral ("QTCompressionOptionsSD480SizeH264Video")
: nsStringLiteral ("QTCompressionOptions240SizeH264Video")];
else if ([mediaType isEqualToString: QTMediaTypeSound])
options = [QTCompressionOptions compressionOptionsWithIdentifier: nsStringLiteral ("QTCompressionOptionsHighQualityAACAudio")];
[fileOutput setCompressionOptions: options forConnection: connection];
}
[fileOutput recordToOutputFileURL: [NSURL fileURLWithPath: juceStringToNS (file.getFullPathName())]];
isRecording = true;
}
void stopRecording()
{
if (isRecording)
{
resetFile();
isRecording = false;
}
}
Time getTimeOfFirstRecordedFrame() const
{
return firstPresentationTime != 0 ? Time (firstPresentationTime + averageTimeOffset)
: Time();
}
void addListener (CameraDevice::Listener* listenerToAdd)
{
const ScopedLock sl (listenerLock);
@@ -193,28 +247,42 @@ public:
}
}
static StringArray getAvailableDevices()
{
StringArray results;
NSArray* devs = [QTCaptureDevice inputDevicesWithMediaType: QTMediaTypeVideo];
for (int i = 0; i < (int) [devs count]; ++i)
{
QTCaptureDevice* dev = (QTCaptureDevice*) [devs objectAtIndex: i];
results.add (nsStringToJuce ([dev localizedDisplayName]));
}
return results;
}
QTCaptureDevice* device;
QTCaptureDeviceInput* input;
QTCaptureDevice* audioDevice;
QTCaptureDeviceInput* input;
QTCaptureDeviceInput* audioInput;
QTCaptureSession* session;
QTCaptureMovieFileOutput* fileOutput;
QTCaptureDecompressedVideoOutput* imageOutput;
NSObject* callbackDelegate;
String openingError;
int64 firstPresentationTime;
int64 averageTimeOffset;
int64 firstPresentationTime, averageTimeOffset;
bool isRecording;
Array<CameraDevice::Listener*> listeners;
CriticalSection listenerLock;
private:
//==============================================================================
struct DelegateClass : public ObjCClass <NSObject>
struct DelegateClass : public ObjCClass<NSObject>
{
DelegateClass() : ObjCClass <NSObject> ("JUCEAppDelegate_")
DelegateClass() : ObjCClass<NSObject> ("JUCEAppDelegate_")
{
addIvar<QTCameraDeviceInternal*> ("owner");
addIvar<Pimpl*> ("owner");
addMethod (@selector (captureOutput:didOutputVideoFrame:withSampleBuffer:fromConnection:),
didOutputVideoFrame, "v@:@", @encode (CVImageBufferRef), "@@");
@@ -224,14 +292,14 @@ private:
registerClass();
}
static void setOwner (id self, QTCameraDeviceInternal* owner) { object_setInstanceVariable (self, "owner", owner); }
static QTCameraDeviceInternal* getOwner (id self) { return getIvar<QTCameraDeviceInternal*> (self, "owner"); }
static void setOwner (id self, Pimpl* owner) { object_setInstanceVariable (self, "owner", owner); }
static Pimpl* getOwner (id self) { return getIvar<Pimpl*> (self, "owner"); }
private:
static void didOutputVideoFrame (id self, SEL, QTCaptureOutput*, CVImageBufferRef videoFrame,
QTSampleBuffer*, QTCaptureConnection*)
{
QTCameraDeviceInternal* const internal = getOwner (self);
Pimpl* const internal = getOwner (self);
if (internal->listeners.size() > 0)
{
@@ -249,154 +317,37 @@ private:
getOwner (self)->captureBuffer (sampleBuffer);
}
};
JUCE_DECLARE_NON_COPYABLE (Pimpl)
};
//==============================================================================
class QTCaptureViewerComp : public NSViewComponent
struct CameraDevice::ViewerComponent : public NSViewComponent
{
public:
QTCaptureViewerComp (CameraDevice*, QTCameraDeviceInternal* internal)
ViewerComponent (CameraDevice& d)
{
JUCE_AUTORELEASEPOOL
{
captureView = [[QTCaptureView alloc] init];
[captureView setCaptureSession: internal->session];
[captureView setCaptureSession: d.pimpl->session];
setSize (640, 480); // xxx need to somehow get the movie size - how?
setSize (640, 480);
setView (captureView);
}
}
~QTCaptureViewerComp()
~ViewerComponent()
{
setView (0);
setView (nil);
[captureView setCaptureSession: nil];
[captureView release];
}
QTCaptureView* captureView;
};
//==============================================================================
CameraDevice::CameraDevice (const String& name_, int index)
: name (name_)
{
isRecording = false;
internal = new QTCameraDeviceInternal (this, index);
}
CameraDevice::~CameraDevice()
{
stopRecording();
delete static_cast <QTCameraDeviceInternal*> (internal);
internal = nullptr;
}
Component* CameraDevice::createViewerComponent()
{
return new QTCaptureViewerComp (this, static_cast <QTCameraDeviceInternal*> (internal));
}
JUCE_DECLARE_NON_COPYABLE (ViewerComponent)
};
String CameraDevice::getFileExtension()
{
return ".mov";
}
void CameraDevice::startRecordingToFile (const File& file, int quality)
{
stopRecording();
QTCameraDeviceInternal* const d = static_cast <QTCameraDeviceInternal*> (internal);
d->firstPresentationTime = 0;
file.deleteFile();
// In some versions of QT (e.g. on 10.5), if you record video without audio, the speed comes
// out wrong, so we'll put some audio in there too..,
d->addDefaultAudioInput();
[d->session addOutput: d->fileOutput error: nil];
NSEnumerator* connectionEnumerator = [[d->fileOutput connections] objectEnumerator];
for (;;)
{
QTCaptureConnection* connection = [connectionEnumerator nextObject];
if (connection == nil)
break;
QTCompressionOptions* options = nil;
NSString* mediaType = [connection mediaType];
if ([mediaType isEqualToString: QTMediaTypeVideo])
options = [QTCompressionOptions compressionOptionsWithIdentifier:
quality >= 1 ? nsStringLiteral ("QTCompressionOptionsSD480SizeH264Video")
: nsStringLiteral ("QTCompressionOptions240SizeH264Video")];
else if ([mediaType isEqualToString: QTMediaTypeSound])
options = [QTCompressionOptions compressionOptionsWithIdentifier: nsStringLiteral ("QTCompressionOptionsHighQualityAACAudio")];
[d->fileOutput setCompressionOptions: options forConnection: connection];
}
[d->fileOutput recordToOutputFileURL: [NSURL fileURLWithPath: juceStringToNS (file.getFullPathName())]];
isRecording = true;
}
Time CameraDevice::getTimeOfFirstRecordedFrame() const
{
QTCameraDeviceInternal* const d = static_cast <QTCameraDeviceInternal*> (internal);
if (d->firstPresentationTime != 0)
return Time (d->firstPresentationTime + d->averageTimeOffset);
return Time();
}
void CameraDevice::stopRecording()
{
if (isRecording)
{
static_cast <QTCameraDeviceInternal*> (internal)->resetFile();
isRecording = false;
}
}
void CameraDevice::addListener (Listener* listenerToAdd)
{
if (listenerToAdd != nullptr)
static_cast <QTCameraDeviceInternal*> (internal)->addListener (listenerToAdd);
}
void CameraDevice::removeListener (Listener* listenerToRemove)
{
if (listenerToRemove != nullptr)
static_cast <QTCameraDeviceInternal*> (internal)->removeListener (listenerToRemove);
}
//==============================================================================
StringArray CameraDevice::getAvailableDevices()
{
JUCE_AUTORELEASEPOOL
{
StringArray results;
NSArray* devs = [QTCaptureDevice inputDevicesWithMediaType: QTMediaTypeVideo];
for (int i = 0; i < (int) [devs count]; ++i)
{
QTCaptureDevice* dev = (QTCaptureDevice*) [devs objectAtIndex: i];
results.add (nsStringToJuce ([dev localizedDisplayName]));
}
return results;
}
}
CameraDevice* CameraDevice::openDevice (int index,
int /*minWidth*/, int /*minHeight*/,
int /*maxWidth*/, int /*maxHeight*/)
{
ScopedPointer <CameraDevice> d (new CameraDevice (getAvailableDevices() [index], index));
if (static_cast <QTCameraDeviceInternal*> (d->internal)->openingError.isEmpty())
return d.release();
return nullptr;
}

+ 237
- 330
modules/juce_video/native/juce_win32_CameraDevice.cpp View File

@@ -44,27 +44,29 @@ static const IID IID_ISampleGrabber = { 0x6B652FFF, 0x11FE, 0x4fce, { 0x92, 0
static const CLSID CLSID_SampleGrabber = { 0xC1F400A0, 0x3F08, 0x11d3, { 0x9F, 0x0B, 0x00, 0x60, 0x08, 0x03, 0x9E, 0x37 } };
static const CLSID CLSID_NullRenderer = { 0xC1F400A4, 0x3F08, 0x11d3, { 0x9F, 0x0B, 0x00, 0x60, 0x08, 0x03, 0x9E, 0x37 } };
//==============================================================================
class DShowCameraDeviceInteral : public ChangeBroadcaster
struct CameraDevice::Pimpl : public ChangeBroadcaster
{
public:
DShowCameraDeviceInteral (CameraDevice* const owner_,
const ComSmartPtr <ICaptureGraphBuilder2>& captureGraphBuilder_,
const ComSmartPtr <IBaseFilter>& filter_,
int minWidth, int minHeight,
int maxWidth, int maxHeight)
: owner (owner_),
captureGraphBuilder (captureGraphBuilder_),
filter (filter_),
ok (false),
imageNeedsFlipping (false),
width (0),
height (0),
activeUsers (0),
recordNextFrameTime (false),
previewMaxFPS (60)
Pimpl (const String&, int index,
int minWidth, int minHeight,
int maxWidth, int maxHeight)
: isRecording (false),
openedSuccessfully (false),
imageNeedsFlipping (false),
width (0), height (0),
activeUsers (0),
recordNextFrameTime (false),
previewMaxFPS (60)
{
HRESULT hr = graphBuilder.CoCreateInstance (CLSID_FilterGraph);
HRESULT hr = captureGraphBuilder.CoCreateInstance (CLSID_CaptureGraphBuilder2);
if (FAILED (hr))
return;
filter = enumerateCameras (nullptr, index);
if (filter == nullptr)
return;
hr = graphBuilder.CoCreateInstance (CLSID_FilterGraph);
if (FAILED (hr))
return;
@@ -77,7 +79,7 @@ public:
return;
{
ComSmartPtr <IAMStreamConfig> streamConfig;
ComSmartPtr<IAMStreamConfig> streamConfig;
hr = captureGraphBuilder->FindInterface (&PIN_CATEGORY_CAPTURE, 0, filter,
IID_IAMStreamConfig, (void**) streamConfig.resetAndGetPointerAddress());
@@ -106,7 +108,7 @@ public:
if (! connectFilters (filter, smartTee))
return;
ComSmartPtr <IBaseFilter> sampleGrabberBase;
ComSmartPtr<IBaseFilter> sampleGrabberBase;
hr = sampleGrabberBase.CoCreateInstance (CLSID_SampleGrabber);
if (FAILED (hr))
return;
@@ -130,7 +132,7 @@ public:
if (FAILED (hr))
return;
ComSmartPtr <IPin> grabberInputPin;
ComSmartPtr<IPin> grabberInputPin;
if (! (getPin (smartTee, PINDIR_OUTPUT, smartTeeCaptureOutputPin, "capture")
&& getPin (smartTee, PINDIR_OUTPUT, smartTeePreviewOutputPin, "preview")
&& getPin (sampleGrabberBase, PINDIR_INPUT, grabberInputPin)))
@@ -146,7 +148,7 @@ public:
width = pVih->bmiHeader.biWidth;
height = pVih->bmiHeader.biHeight;
ComSmartPtr <IBaseFilter> nullFilter;
ComSmartPtr<IBaseFilter> nullFilter;
hr = nullFilter.CoCreateInstance (CLSID_NullRenderer);
hr = graphBuilder->AddFilter (nullFilter, _T("Null Renderer"));
@@ -156,19 +158,17 @@ public:
activeImage = Image (Image::RGB, width, height, true);
loadingImage = Image (Image::RGB, width, height, true);
ok = true;
openedSuccessfully = true;
}
}
~DShowCameraDeviceInteral()
~Pimpl()
{
if (mediaControl != nullptr)
mediaControl->Stop();
removeGraphFromRot();
for (int i = viewerComps.size(); --i >= 0;)
viewerComps.getUnchecked(i)->ownerDeleted();
disconnectAnyViewers();
if (sampleGrabber != nullptr)
{
@@ -187,23 +187,69 @@ public:
asfWriter = nullptr;
}
bool openedOk() const noexcept { return openedSuccessfully; }
void startRecordingToFile (const File& file, int quality)
{
addUser();
isRecording = createFileCaptureFilter (file, quality);
}
void stopRecording()
{
if (isRecording)
{
removeFileCaptureFilter();
removeUser();
isRecording = false;
}
}
Time getTimeOfFirstRecordedFrame() const
{
return firstRecordedTime;
}
void addListener (CameraDevice::Listener* listenerToAdd)
{
const ScopedLock sl (listenerLock);
if (listeners.size() == 0)
addUser();
listeners.addIfNotAlreadyThere (listenerToAdd);
}
void removeListener (CameraDevice::Listener* listenerToRemove)
{
const ScopedLock sl (listenerLock);
listeners.removeAllInstancesOf (listenerToRemove);
if (listeners.size() == 0)
removeUser();
}
void callListeners (const Image& image)
{
const ScopedLock sl (listenerLock);
for (int i = listeners.size(); --i >= 0;)
if (CameraDevice::Listener* const l = listeners[i])
l->imageReceived (image);
}
void addUser()
{
if (ok && activeUsers++ == 0)
if (openedSuccessfully && activeUsers++ == 0)
mediaControl->Run();
}
void removeUser()
{
if (ok && --activeUsers == 0)
if (openedSuccessfully && --activeUsers == 0)
mediaControl->Stop();
}
int getPreviewMaxFPS() const
{
return previewMaxFPS;
}
void handleFrame (double /*time*/, BYTE* buffer, long /*bufferSize*/)
{
if (recordNextFrameTime)
@@ -213,10 +259,10 @@ public:
firstRecordedTime = Time::getCurrentTime() - RelativeTime (defaultCameraLatency);
recordNextFrameTime = false;
ComSmartPtr <IPin> pin;
ComSmartPtr<IPin> pin;
if (getPin (filter, PINDIR_OUTPUT, pin))
{
ComSmartPtr <IAMPushSource> pushSource;
ComSmartPtr<IAMPushSource> pushSource;
HRESULT hr = pin.QueryInterface (pushSource);
if (pushSource != nullptr)
@@ -234,6 +280,7 @@ public:
const ScopedLock sl (imageSwapLock);
{
loadingImage.duplicateIfShared();
const Image::BitmapData destData (loadingImage, 0, 0, width, height, Image::BitmapData::writeOnly);
for (int i = 0; i < height; ++i)
@@ -251,7 +298,7 @@ public:
sendChangeMessage();
}
void drawCurrentImage (Graphics& g, int x, int y, int w, int h)
void drawCurrentImage (Graphics& g, Rectangle<int> area)
{
if (imageNeedsFlipping)
{
@@ -260,20 +307,16 @@ public:
imageNeedsFlipping = false;
}
RectanglePlacement rp (RectanglePlacement::centred);
double dx = 0, dy = 0, dw = width, dh = height;
rp.applyTo (dx, dy, dw, dh, x, y, w, h);
const int rx = roundToInt (dx), ry = roundToInt (dy);
const int rw = roundToInt (dw), rh = roundToInt (dh);
{
Graphics::ScopedSaveState ss (g);
Rectangle<int> centred (RectanglePlacement (RectanglePlacement::centred)
.appliedTo (Rectangle<int> (width, height), area));
g.excludeClipRegion (Rectangle<int> (rx, ry, rw, rh));
g.fillAll (Colours::black);
}
RectangleList<int> borders (area);
borders.subtract (centred);
g.setColour (Colours::black);
g.fillRectList (borders);
g.drawImage (activeImage, rx, ry, rw, rh, 0, 0, width, height);
g.drawImage (activeImage, centred.getX(), centred.getY(),
centred.getWidth(), centred.getHeight(), 0, 0, width, height);
}
bool createFileCaptureFilter (const File& file, int quality)
@@ -289,7 +332,7 @@ public:
if (SUCCEEDED (hr))
{
ComSmartPtr <IFileSinkFilter> fileSink;
ComSmartPtr<IFileSinkFilter> fileSink;
hr = asfWriter.QueryInterface (fileSink);
if (SUCCEEDED (hr))
@@ -302,10 +345,10 @@ public:
if (SUCCEEDED (hr))
{
ComSmartPtr <IConfigAsfWriter> asfConfig;
ComSmartPtr<IConfigAsfWriter> asfConfig;
hr = asfWriter.QueryInterface (asfConfig);
asfConfig->SetIndexMode (true);
ComSmartPtr <IWMProfileManager> profileManager;
ComSmartPtr<IWMProfileManager> profileManager;
hr = WMCreateProfileManager (profileManager.resetAndGetPointerAddress());
// This gibberish is the DirectShow profile for a video-only wmv file.
@@ -337,19 +380,19 @@ public:
.replace ("$HEIGHT", String (height))
.replace ("$AVGTIMEPERFRAME", String (10000000 / maxFramesPerSecond));
ComSmartPtr <IWMProfile> currentProfile;
ComSmartPtr<IWMProfile> currentProfile;
hr = profileManager->LoadProfileByData (prof.toWideCharPointer(), currentProfile.resetAndGetPointerAddress());
hr = asfConfig->ConfigureFilterUsingProfile (currentProfile);
if (SUCCEEDED (hr))
{
ComSmartPtr <IPin> asfWriterInputPin;
ComSmartPtr<IPin> asfWriterInputPin;
if (getPin (asfWriter, PINDIR_INPUT, asfWriterInputPin, "Video Input 01"))
{
hr = graphBuilder->Connect (smartTeeCaptureOutputPin, asfWriterInputPin);
if (SUCCEEDED (hr) && ok && activeUsers > 0
if (SUCCEEDED (hr) && openedSuccessfully && activeUsers > 0
&& SUCCEEDED (mediaControl->Run()))
{
previewMaxFPS = (quality < 2) ? 15 : 25; // throttle back the preview comps to try to leave the cpu free for encoding
@@ -368,7 +411,7 @@ public:
removeFileCaptureFilter();
if (ok && activeUsers > 0)
if (openedSuccessfully && activeUsers > 0)
mediaControl->Run();
return false;
@@ -384,132 +427,127 @@ public:
asfWriter = nullptr;
}
if (ok && activeUsers > 0)
if (openedSuccessfully && activeUsers > 0)
mediaControl->Run();
previewMaxFPS = 60;
}
//==============================================================================
void addListener (CameraDevice::Listener* listenerToAdd)
static ComSmartPtr<IBaseFilter> enumerateCameras (StringArray* names, const int deviceIndexToOpen)
{
const ScopedLock sl (listenerLock);
int index = 0;
ComSmartPtr<ICreateDevEnum> pDevEnum;
if (listeners.size() == 0)
addUser();
if (SUCCEEDED (pDevEnum.CoCreateInstance (CLSID_SystemDeviceEnum)))
{
ComSmartPtr<IEnumMoniker> enumerator;
HRESULT hr = pDevEnum->CreateClassEnumerator (CLSID_VideoInputDeviceCategory, enumerator.resetAndGetPointerAddress(), 0);
listeners.addIfNotAlreadyThere (listenerToAdd);
}
if (SUCCEEDED (hr) && enumerator != nullptr)
{
ComSmartPtr<IMoniker> moniker;
ULONG fetched;
void removeListener (CameraDevice::Listener* listenerToRemove)
{
const ScopedLock sl (listenerLock);
listeners.removeFirstMatchingValue (listenerToRemove);
while (enumerator->Next (1, moniker.resetAndGetPointerAddress(), &fetched) == S_OK)
{
ComSmartPtr<IBaseFilter> captureFilter;
hr = moniker->BindToObject (0, 0, IID_IBaseFilter, (void**) captureFilter.resetAndGetPointerAddress());
if (listeners.size() == 0)
removeUser();
if (SUCCEEDED (hr))
{
ComSmartPtr<IPropertyBag> propertyBag;
hr = moniker->BindToStorage (0, 0, IID_IPropertyBag, (void**) propertyBag.resetAndGetPointerAddress());
if (SUCCEEDED (hr))
{
VARIANT var;
var.vt = VT_BSTR;
hr = propertyBag->Read (_T("FriendlyName"), &var, 0);
propertyBag = nullptr;
if (SUCCEEDED (hr))
{
if (names != nullptr)
names->add (var.bstrVal);
if (index == deviceIndexToOpen)
return captureFilter;
++index;
}
}
}
}
}
}
return nullptr;
}
void callListeners (const Image& image)
static StringArray getAvailableDevices()
{
const ScopedLock sl (listenerLock);
for (int i = listeners.size(); --i >= 0;)
if (CameraDevice::Listener* const l = listeners[i])
l->imageReceived (image);
StringArray devs;
enumerateCameras (&devs, -1);
return devs;
}
//==============================================================================
class DShowCaptureViewerComp : public Component,
public ChangeListener
class GrabberCallback : public ComBaseClassHelperBase<ISampleGrabberCB>
{
public:
DShowCaptureViewerComp (DShowCameraDeviceInteral* const owner_)
: owner (owner_), maxFPS (15), lastRepaintTime (0)
{
setOpaque (true);
owner->addChangeListener (this);
owner->addUser();
owner->viewerComps.add (this);
setSize (owner->width, owner->height);
}
GrabberCallback (Pimpl& p)
: ComBaseClassHelperBase<ISampleGrabberCB> (0), owner (p) {}
~DShowCaptureViewerComp()
JUCE_COMRESULT QueryInterface (REFIID refId, void** result)
{
if (owner != nullptr)
{
owner->viewerComps.removeFirstMatchingValue (this);
owner->removeUser();
owner->removeChangeListener (this);
}
}
if (refId == IID_ISampleGrabberCB)
return castToType<ISampleGrabberCB> (result);
void ownerDeleted()
{
owner = nullptr;
return ComBaseClassHelperBase<ISampleGrabberCB>::QueryInterface (refId, result);
}
void paint (Graphics& g) override
{
g.setColour (Colours::black);
g.setImageResamplingQuality (Graphics::lowResamplingQuality);
if (owner != nullptr)
owner->drawCurrentImage (g, 0, 0, getWidth(), getHeight());
else
g.fillAll (Colours::black);
}
STDMETHODIMP SampleCB (double, IMediaSample*) { return E_FAIL; }
void changeListenerCallback (ChangeBroadcaster*) override
STDMETHODIMP BufferCB (double time, BYTE* buffer, long bufferSize)
{
const int64 now = Time::currentTimeMillis();
if (now >= lastRepaintTime + (1000 / maxFPS))
{
lastRepaintTime = now;
repaint();
if (owner != nullptr)
maxFPS = owner->getPreviewMaxFPS();
}
owner.handleFrame (time, buffer, bufferSize);
return S_OK;
}
private:
DShowCameraDeviceInteral* owner;
int maxFPS;
int64 lastRepaintTime;
Pimpl& owner;
JUCE_DECLARE_NON_COPYABLE (GrabberCallback)
};
//==============================================================================
bool ok;
ComSmartPtr<GrabberCallback> callback;
Array<CameraDevice::Listener*> listeners;
CriticalSection listenerLock;
bool isRecording, openedSuccessfully;
int width, height;
Time firstRecordedTime;
Array <DShowCaptureViewerComp*> viewerComps;
Array<ViewerComponent*> viewerComps;
private:
CameraDevice* const owner;
ComSmartPtr <ICaptureGraphBuilder2> captureGraphBuilder;
ComSmartPtr <IBaseFilter> filter;
ComSmartPtr <IBaseFilter> smartTee;
ComSmartPtr <IGraphBuilder> graphBuilder;
ComSmartPtr <ISampleGrabber> sampleGrabber;
ComSmartPtr <IMediaControl> mediaControl;
ComSmartPtr <IPin> smartTeePreviewOutputPin;
ComSmartPtr <IPin> smartTeeCaptureOutputPin;
ComSmartPtr <IBaseFilter> asfWriter;
ComSmartPtr<ICaptureGraphBuilder2> captureGraphBuilder;
ComSmartPtr<IBaseFilter> filter, smartTee, asfWriter;
ComSmartPtr<IGraphBuilder> graphBuilder;
ComSmartPtr<ISampleGrabber> sampleGrabber;
ComSmartPtr<IMediaControl> mediaControl;
ComSmartPtr<IPin> smartTeePreviewOutputPin, smartTeeCaptureOutputPin;
int activeUsers;
Array <int> widths, heights;
Array<int> widths, heights;
DWORD graphRegistrationID;
CriticalSection imageSwapLock;
bool imageNeedsFlipping;
Image loadingImage;
Image activeImage;
Image loadingImage, activeImage;
bool recordNextFrameTime;
int previewMaxFPS;
private:
void getVideoSizes (IAMStreamConfig* const streamConfig)
{
widths.clear();
@@ -607,8 +645,8 @@ private:
static bool getPin (IBaseFilter* filter, const PIN_DIRECTION wantedDirection,
ComSmartPtr<IPin>& result, const char* pinName = nullptr)
{
ComSmartPtr <IEnumPins> enumerator;
ComSmartPtr <IPin> pin;
ComSmartPtr<IEnumPins> enumerator;
ComSmartPtr<IPin> pin;
filter->EnumPins (enumerator.resetAndGetPointerAddress());
@@ -635,7 +673,7 @@ private:
bool connectFilters (IBaseFilter* const first, IBaseFilter* const second) const
{
ComSmartPtr <IPin> in, out;
ComSmartPtr<IPin> in, out;
return getPin (first, PINDIR_OUTPUT, out)
&& getPin (second, PINDIR_INPUT, in)
@@ -644,11 +682,11 @@ private:
bool addGraphToRot()
{
ComSmartPtr <IRunningObjectTable> rot;
ComSmartPtr<IRunningObjectTable> rot;
if (FAILED (GetRunningObjectTable (0, rot.resetAndGetPointerAddress())))
return false;
ComSmartPtr <IMoniker> moniker;
ComSmartPtr<IMoniker> moniker;
WCHAR buffer[128];
HRESULT hr = CreateItemMoniker (_T("!"), buffer, moniker.resetAndGetPointerAddress());
if (FAILED (hr))
@@ -660,12 +698,14 @@ private:
void removeGraphFromRot()
{
ComSmartPtr <IRunningObjectTable> rot;
ComSmartPtr<IRunningObjectTable> rot;
if (SUCCEEDED (GetRunningObjectTable (0, rot.resetAndGetPointerAddress())))
rot->Revoke (graphRegistrationID);
}
void disconnectAnyViewers();
static void deleteMediaType (AM_MEDIA_TYPE* const pmt)
{
if (pmt->cbFormat != 0)
@@ -677,209 +717,76 @@ private:
CoTaskMemFree (pmt);
}
//==============================================================================
class GrabberCallback : public ComBaseClassHelperBase <ISampleGrabberCB>
{
public:
GrabberCallback (DShowCameraDeviceInteral& cam)
: ComBaseClassHelperBase <ISampleGrabberCB> (0), owner (cam) {}
JUCE_COMRESULT QueryInterface (REFIID refId, void** result)
{
if (refId == IID_ISampleGrabberCB)
return castToType <ISampleGrabberCB> (result);
return ComBaseClassHelperBase<ISampleGrabberCB>::QueryInterface (refId, result);
}
STDMETHODIMP SampleCB (double, IMediaSample*) { return E_FAIL; }
STDMETHODIMP BufferCB (double time, BYTE* buffer, long bufferSize)
{
owner.handleFrame (time, buffer, bufferSize);
return S_OK;
}
private:
DShowCameraDeviceInteral& owner;
JUCE_DECLARE_NON_COPYABLE (GrabberCallback)
};
ComSmartPtr <GrabberCallback> callback;
Array <CameraDevice::Listener*> listeners;
CriticalSection listenerLock;
//==============================================================================
JUCE_DECLARE_NON_COPYABLE (DShowCameraDeviceInteral)
JUCE_DECLARE_NON_COPYABLE (Pimpl)
};
//==============================================================================
CameraDevice::CameraDevice (const String& nm, int /*index*/)
: name (nm)
{
isRecording = false;
}
CameraDevice::~CameraDevice()
{
stopRecording();
delete static_cast <DShowCameraDeviceInteral*> (internal);
internal = nullptr;
}
Component* CameraDevice::createViewerComponent()
{
return new DShowCameraDeviceInteral::DShowCaptureViewerComp (static_cast <DShowCameraDeviceInteral*> (internal));
}
String CameraDevice::getFileExtension()
{
return ".wmv";
}
void CameraDevice::startRecordingToFile (const File& file, int quality)
struct CameraDevice::ViewerComponent : public Component,
public ChangeListener
{
stopRecording();
DShowCameraDeviceInteral* const d = (DShowCameraDeviceInteral*) internal;
d->addUser();
isRecording = d->createFileCaptureFilter (file, quality);
}
Time CameraDevice::getTimeOfFirstRecordedFrame() const
{
DShowCameraDeviceInteral* const d = (DShowCameraDeviceInteral*) internal;
return d->firstRecordedTime;
}
void CameraDevice::stopRecording()
{
if (isRecording)
ViewerComponent (CameraDevice& d)
: owner (d.pimpl), maxFPS (15), lastRepaintTime (0)
{
DShowCameraDeviceInteral* const d = (DShowCameraDeviceInteral*) internal;
d->removeFileCaptureFilter();
d->removeUser();
isRecording = false;
setOpaque (true);
owner->addChangeListener (this);
owner->addUser();
owner->viewerComps.add (this);
setSize (owner->width, owner->height);
}
}
void CameraDevice::addListener (Listener* listenerToAdd)
{
DShowCameraDeviceInteral* const d = (DShowCameraDeviceInteral*) internal;
if (listenerToAdd != nullptr)
d->addListener (listenerToAdd);
}
void CameraDevice::removeListener (Listener* listenerToRemove)
{
DShowCameraDeviceInteral* const d = (DShowCameraDeviceInteral*) internal;
if (listenerToRemove != nullptr)
d->removeListener (listenerToRemove);
}
//==============================================================================
namespace
{
ComSmartPtr <IBaseFilter> enumerateCameras (StringArray* const names,
const int deviceIndexToOpen,
String& name)
~ViewerComponent()
{
int index = 0;
ComSmartPtr <IBaseFilter> result;
ComSmartPtr <ICreateDevEnum> pDevEnum;
HRESULT hr = pDevEnum.CoCreateInstance (CLSID_SystemDeviceEnum);
if (SUCCEEDED (hr))
if (owner != nullptr)
{
ComSmartPtr <IEnumMoniker> enumerator;
hr = pDevEnum->CreateClassEnumerator (CLSID_VideoInputDeviceCategory, enumerator.resetAndGetPointerAddress(), 0);
if (SUCCEEDED (hr) && enumerator != nullptr)
{
ComSmartPtr <IMoniker> moniker;
ULONG fetched;
while (enumerator->Next (1, moniker.resetAndGetPointerAddress(), &fetched) == S_OK)
{
ComSmartPtr <IBaseFilter> captureFilter;
hr = moniker->BindToObject (0, 0, IID_IBaseFilter, (void**) captureFilter.resetAndGetPointerAddress());
if (SUCCEEDED (hr))
{
ComSmartPtr <IPropertyBag> propertyBag;
hr = moniker->BindToStorage (0, 0, IID_IPropertyBag, (void**) propertyBag.resetAndGetPointerAddress());
if (SUCCEEDED (hr))
{
VARIANT var;
var.vt = VT_BSTR;
hr = propertyBag->Read (_T("FriendlyName"), &var, 0);
propertyBag = nullptr;
if (SUCCEEDED (hr))
{
if (names != nullptr)
names->add (var.bstrVal);
if (index == deviceIndexToOpen)
{
name = var.bstrVal;
result = captureFilter;
break;
}
++index;
}
}
}
}
}
owner->viewerComps.removeFirstMatchingValue (this);
owner->removeUser();
owner->removeChangeListener (this);
}
}
return result;
void ownerDeleted()
{
owner = nullptr;
}
}
StringArray CameraDevice::getAvailableDevices()
{
StringArray devs;
String dummy;
enumerateCameras (&devs, -1, dummy);
return devs;
}
void paint (Graphics& g) override
{
g.setColour (Colours::black);
g.setImageResamplingQuality (Graphics::lowResamplingQuality);
CameraDevice* CameraDevice::openDevice (int index,
int minWidth, int minHeight,
int maxWidth, int maxHeight)
{
ComSmartPtr <ICaptureGraphBuilder2> captureGraphBuilder;
HRESULT hr = captureGraphBuilder.CoCreateInstance (CLSID_CaptureGraphBuilder2);
if (owner != nullptr)
owner->drawCurrentImage (g, getLocalBounds());
else
g.fillAll();
}
if (SUCCEEDED (hr))
void changeListenerCallback (ChangeBroadcaster*) override
{
String name;
const ComSmartPtr <IBaseFilter> filter (enumerateCameras (0, index, name));
const int64 now = Time::currentTimeMillis();
if (filter != nullptr)
if (now >= lastRepaintTime + (1000 / maxFPS))
{
ScopedPointer <CameraDevice> cam (new CameraDevice (name, index));
lastRepaintTime = now;
repaint();
DShowCameraDeviceInteral* const intern
= new DShowCameraDeviceInteral (cam, captureGraphBuilder, filter,
minWidth, minHeight, maxWidth, maxHeight);
cam->internal = intern;
if (intern->ok)
return cam.release();
if (owner != nullptr)
maxFPS = owner->previewMaxFPS;
}
}
return nullptr;
private:
Pimpl* owner;
int maxFPS;
int64 lastRepaintTime;
};
void CameraDevice::Pimpl::disconnectAnyViewers()
{
for (int i = viewerComps.size(); --i >= 0;)
viewerComps.getUnchecked(i)->ownerDeleted();
}
String CameraDevice::getFileExtension()
{
return ".wmv";
}

Loading…
Cancel
Save