/* ============================================================================== This file is part of the JUCE 6 technical preview. Copyright (c) 2020 - Raw Material Software Limited You may use this code under the terms of the GPL v3 (see www.gnu.org/licenses). For this technical preview, this file is not subject to commercial licensing. JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE DISCLAIMED. ============================================================================== */ interface ISampleGrabberCB : public IUnknown { virtual STDMETHODIMP SampleCB (double, IMediaSample*) = 0; virtual STDMETHODIMP BufferCB (double, BYTE*, long) = 0; }; interface ISampleGrabber : public IUnknown { virtual HRESULT STDMETHODCALLTYPE SetOneShot (BOOL) = 0; virtual HRESULT STDMETHODCALLTYPE SetMediaType (const AM_MEDIA_TYPE*) = 0; virtual HRESULT STDMETHODCALLTYPE GetConnectedMediaType (AM_MEDIA_TYPE*) = 0; virtual HRESULT STDMETHODCALLTYPE SetBufferSamples (BOOL) = 0; virtual HRESULT STDMETHODCALLTYPE GetCurrentBuffer (long*, long*) = 0; virtual HRESULT STDMETHODCALLTYPE GetCurrentSample (IMediaSample**) = 0; virtual HRESULT STDMETHODCALLTYPE SetCallback (ISampleGrabberCB*, long) = 0; }; static const IID IID_ISampleGrabberCB = { 0x0579154A, 0x2B53, 0x4994, { 0xB0, 0xD0, 0xE7, 0x73, 0x14, 0x8E, 0xFF, 0x85 } }; static const IID IID_ISampleGrabber = { 0x6B652FFF, 0x11FE, 0x4fce, { 0x92, 0xAD, 0x02, 0x66, 0xB5, 0xD7, 0xC7, 0x8F } }; static const CLSID CLSID_SampleGrabber = { 0xC1F400A0, 0x3F08, 0x11d3, { 0x9F, 0x0B, 0x00, 0x60, 0x08, 0x03, 0x9E, 0x37 } }; static const CLSID CLSID_NullRenderer = { 0xC1F400A4, 0x3F08, 0x11d3, { 0x9F, 0x0B, 0x00, 0x60, 0x08, 0x03, 0x9E, 0x37 } }; struct CameraDevice::Pimpl : public ChangeBroadcaster { Pimpl (CameraDevice& ownerToUse, const String&, int index, int minWidth, int minHeight, int maxWidth, int maxHeight, bool /*highQuality*/) : owner (ownerToUse) { HRESULT hr = captureGraphBuilder.CoCreateInstance (CLSID_CaptureGraphBuilder2); if (FAILED (hr)) return; filter = enumerateCameras (nullptr, index); if (filter == nullptr) return; hr = graphBuilder.CoCreateInstance (CLSID_FilterGraph); if (FAILED (hr)) return; hr = captureGraphBuilder->SetFiltergraph (graphBuilder); if (FAILED (hr)) return; hr = graphBuilder.QueryInterface (mediaControl); if (FAILED (hr)) return; { ComSmartPtr streamConfig; hr = captureGraphBuilder->FindInterface (&PIN_CATEGORY_CAPTURE, 0, filter, IID_IAMStreamConfig, (void**) streamConfig.resetAndGetPointerAddress()); if (streamConfig != nullptr) { getVideoSizes (streamConfig); if (! selectVideoSize (streamConfig, minWidth, minHeight, maxWidth, maxHeight)) return; } } hr = graphBuilder->AddFilter (filter, _T("Video Capture")); if (FAILED (hr)) return; hr = smartTee.CoCreateInstance (CLSID_SmartTee); if (FAILED (hr)) return; hr = graphBuilder->AddFilter (smartTee, _T("Smart Tee")); if (FAILED (hr)) return; if (! connectFilters (filter, smartTee)) return; ComSmartPtr sampleGrabberBase; hr = sampleGrabberBase.CoCreateInstance (CLSID_SampleGrabber); if (FAILED (hr)) return; hr = sampleGrabberBase.QueryInterface (IID_ISampleGrabber, sampleGrabber); if (FAILED (hr)) return; { AM_MEDIA_TYPE mt = {}; mt.majortype = MEDIATYPE_Video; mt.subtype = MEDIASUBTYPE_RGB24; mt.formattype = FORMAT_VideoInfo; sampleGrabber->SetMediaType (&mt); } callback = new GrabberCallback (*this); hr = sampleGrabber->SetCallback (callback, 1); hr = graphBuilder->AddFilter (sampleGrabberBase, _T("Sample Grabber")); if (FAILED (hr)) return; ComSmartPtr grabberInputPin; if (! (getPin (smartTee, PINDIR_OUTPUT, smartTeeCaptureOutputPin, "capture") && getPin (smartTee, PINDIR_OUTPUT, smartTeePreviewOutputPin, "preview") && getPin (sampleGrabberBase, PINDIR_INPUT, grabberInputPin))) return; hr = graphBuilder->Connect (smartTeePreviewOutputPin, grabberInputPin); if (FAILED (hr)) return; AM_MEDIA_TYPE mt = {}; hr = sampleGrabber->GetConnectedMediaType (&mt); VIDEOINFOHEADER* pVih = (VIDEOINFOHEADER*) (mt.pbFormat); width = pVih->bmiHeader.biWidth; height = pVih->bmiHeader.biHeight; ComSmartPtr nullFilter; hr = nullFilter.CoCreateInstance (CLSID_NullRenderer); hr = graphBuilder->AddFilter (nullFilter, _T("Null Renderer")); if (connectFilters (sampleGrabberBase, nullFilter) && addGraphToRot()) { activeImage = Image (Image::RGB, width, height, true); loadingImage = Image (Image::RGB, width, height, true); openedSuccessfully = true; } } ~Pimpl() { if (mediaControl != nullptr) mediaControl->Stop(); removeGraphFromRot(); disconnectAnyViewers(); if (sampleGrabber != nullptr) { sampleGrabber->SetCallback (nullptr, 0); sampleGrabber = nullptr; } callback = nullptr; graphBuilder = nullptr; mediaControl = nullptr; filter = nullptr; captureGraphBuilder = nullptr; smartTee = nullptr; smartTeePreviewOutputPin = nullptr; smartTeeCaptureOutputPin = nullptr; asfWriter = nullptr; } bool openedOk() const noexcept { return openedSuccessfully; } void takeStillPicture (std::function pictureTakenCallbackToUse) { { const ScopedLock sl (pictureTakenCallbackLock); jassert (pictureTakenCallbackToUse != nullptr); if (pictureTakenCallbackToUse == nullptr) return; pictureTakenCallback = std::move (pictureTakenCallbackToUse); } addUser(); } void startRecordingToFile (const File& file, int quality) { addUser(); isRecording = createFileCaptureFilter (file, quality); } void stopRecording() { if (isRecording) { removeFileCaptureFilter(); removeUser(); isRecording = false; } } Time getTimeOfFirstRecordedFrame() const { return firstRecordedTime; } void addListener (CameraDevice::Listener* listenerToAdd) { const ScopedLock sl (listenerLock); if (listeners.size() == 0) addUser(); listeners.add (listenerToAdd); } void removeListener (CameraDevice::Listener* listenerToRemove) { const ScopedLock sl (listenerLock); listeners.remove (listenerToRemove); if (listeners.size() == 0) removeUser(); } void callListeners (const Image& image) { const ScopedLock sl (listenerLock); listeners.call ([=] (Listener& l) { l.imageReceived (image); }); } void notifyPictureTakenIfNeeded (const Image& image) { { const ScopedLock sl (pictureTakenCallbackLock); if (pictureTakenCallback == nullptr) return; } WeakReference weakRef (this); MessageManager::callAsync ([weakRef, image]() mutable { if (weakRef == nullptr) return; if (weakRef->pictureTakenCallback != nullptr) weakRef->pictureTakenCallback (image); weakRef->pictureTakenCallback = nullptr; }); } void addUser() { if (openedSuccessfully && activeUsers++ == 0) mediaControl->Run(); } void removeUser() { if (openedSuccessfully && --activeUsers == 0) mediaControl->Stop(); } void handleFrame (double /*time*/, BYTE* buffer, long /*bufferSize*/) { if (recordNextFrameTime) { const double defaultCameraLatency = 0.1; firstRecordedTime = Time::getCurrentTime() - RelativeTime (defaultCameraLatency); recordNextFrameTime = false; ComSmartPtr pin; if (getPin (filter, PINDIR_OUTPUT, pin)) { ComSmartPtr pushSource; HRESULT hr = pin.QueryInterface (pushSource); if (pushSource != nullptr) { REFERENCE_TIME latency = 0; hr = pushSource->GetLatency (&latency); firstRecordedTime = firstRecordedTime - RelativeTime ((double) latency); } } } { const int lineStride = width * 3; const ScopedLock sl (imageSwapLock); { loadingImage.duplicateIfShared(); const Image::BitmapData destData (loadingImage, 0, 0, width, height, Image::BitmapData::writeOnly); for (int i = 0; i < height; ++i) memcpy (destData.getLinePointer ((height - 1) - i), buffer + lineStride * i, lineStride); } imageNeedsFlipping = true; } if (listeners.size() > 0) callListeners (loadingImage); notifyPictureTakenIfNeeded (loadingImage); sendChangeMessage(); } void drawCurrentImage (Graphics& g, Rectangle area) { if (imageNeedsFlipping) { const ScopedLock sl (imageSwapLock); std::swap (loadingImage, activeImage); imageNeedsFlipping = false; } Rectangle centred (RectanglePlacement (RectanglePlacement::centred) .appliedTo (Rectangle (width, height), area)); RectangleList borders (area); borders.subtract (centred); g.setColour (Colours::black); g.fillRectList (borders); g.drawImage (activeImage, centred.getX(), centred.getY(), centred.getWidth(), centred.getHeight(), 0, 0, width, height); } bool createFileCaptureFilter (const File& file, int quality) { removeFileCaptureFilter(); file.deleteFile(); mediaControl->Stop(); firstRecordedTime = Time(); recordNextFrameTime = true; previewMaxFPS = 60; HRESULT hr = asfWriter.CoCreateInstance (CLSID_WMAsfWriter); if (SUCCEEDED (hr)) { ComSmartPtr fileSink; hr = asfWriter.QueryInterface (fileSink); if (SUCCEEDED (hr)) { hr = fileSink->SetFileName (file.getFullPathName().toWideCharPointer(), 0); if (SUCCEEDED (hr)) { hr = graphBuilder->AddFilter (asfWriter, _T("AsfWriter")); if (SUCCEEDED (hr)) { ComSmartPtr asfConfig; hr = asfWriter.QueryInterface (asfConfig); asfConfig->SetIndexMode (true); ComSmartPtr profileManager; hr = WMCreateProfileManager (profileManager.resetAndGetPointerAddress()); // This gibberish is the DirectShow profile for a video-only wmv file. String prof ("" "" "" "" "" "" "" "" "" "" "" ""); const int fps[] = { 10, 15, 30 }; int maxFramesPerSecond = fps [jlimit (0, numElementsInArray (fps) - 1, quality & 0xff)]; if ((quality & 0xff000000) != 0) // (internal hacky way to pass explicit frame rates for testing) maxFramesPerSecond = (quality >> 24) & 0xff; prof = prof.replace ("$WIDTH", String (width)) .replace ("$HEIGHT", String (height)) .replace ("$AVGTIMEPERFRAME", String (10000000 / maxFramesPerSecond)); ComSmartPtr currentProfile; hr = profileManager->LoadProfileByData (prof.toWideCharPointer(), currentProfile.resetAndGetPointerAddress()); hr = asfConfig->ConfigureFilterUsingProfile (currentProfile); if (SUCCEEDED (hr)) { ComSmartPtr asfWriterInputPin; if (getPin (asfWriter, PINDIR_INPUT, asfWriterInputPin, "Video Input 01")) { hr = graphBuilder->Connect (smartTeeCaptureOutputPin, asfWriterInputPin); if (SUCCEEDED (hr) && openedSuccessfully && activeUsers > 0 && SUCCEEDED (mediaControl->Run())) { previewMaxFPS = (quality < 2) ? 15 : 25; // throttle back the preview comps to try to leave the cpu free for encoding if ((quality & 0x00ff0000) != 0) // (internal hacky way to pass explicit frame rates for testing) previewMaxFPS = (quality >> 16) & 0xff; return true; } } } } } } } removeFileCaptureFilter(); if (openedSuccessfully && activeUsers > 0) mediaControl->Run(); return false; } void removeFileCaptureFilter() { mediaControl->Stop(); if (asfWriter != nullptr) { graphBuilder->RemoveFilter (asfWriter); asfWriter = nullptr; } if (openedSuccessfully && activeUsers > 0) mediaControl->Run(); previewMaxFPS = 60; } static ComSmartPtr enumerateCameras (StringArray* names, const int deviceIndexToOpen) { int index = 0; ComSmartPtr pDevEnum; if (SUCCEEDED (pDevEnum.CoCreateInstance (CLSID_SystemDeviceEnum))) { ComSmartPtr enumerator; HRESULT hr = pDevEnum->CreateClassEnumerator (CLSID_VideoInputDeviceCategory, enumerator.resetAndGetPointerAddress(), 0); if (SUCCEEDED (hr) && enumerator != nullptr) { ComSmartPtr moniker; ULONG fetched; while (enumerator->Next (1, moniker.resetAndGetPointerAddress(), &fetched) == S_OK) { ComSmartPtr captureFilter; hr = moniker->BindToObject (0, 0, IID_IBaseFilter, (void**) captureFilter.resetAndGetPointerAddress()); if (SUCCEEDED (hr)) { ComSmartPtr propertyBag; hr = moniker->BindToStorage (0, 0, IID_IPropertyBag, (void**) propertyBag.resetAndGetPointerAddress()); if (SUCCEEDED (hr)) { VARIANT var; var.vt = VT_BSTR; hr = propertyBag->Read (_T("FriendlyName"), &var, 0); propertyBag = nullptr; if (SUCCEEDED (hr)) { if (names != nullptr) names->add (var.bstrVal); if (index == deviceIndexToOpen) return captureFilter; ++index; } } } } } } return nullptr; } static StringArray getAvailableDevices() { StringArray devs; enumerateCameras (&devs, -1); return devs; } struct GrabberCallback : public ComBaseClassHelperBase { GrabberCallback (Pimpl& p) : ComBaseClassHelperBase (0), owner (p) {} JUCE_COMRESULT QueryInterface (REFIID refId, void** result) { if (refId == IID_ISampleGrabberCB) return castToType (result); return ComBaseClassHelperBase::QueryInterface (refId, result); } STDMETHODIMP SampleCB (double, IMediaSample*) { return E_FAIL; } STDMETHODIMP BufferCB (double time, BYTE* buffer, long bufferSize) { owner.handleFrame (time, buffer, bufferSize); return S_OK; } Pimpl& owner; JUCE_DECLARE_NON_COPYABLE (GrabberCallback) }; CameraDevice& owner; ComSmartPtr callback; CriticalSection listenerLock; ListenerList listeners; CriticalSection pictureTakenCallbackLock; std::function pictureTakenCallback; bool isRecording = false, openedSuccessfully = false; int width = 0, height = 0; Time firstRecordedTime; Array viewerComps; ComSmartPtr captureGraphBuilder; ComSmartPtr filter, smartTee, asfWriter; ComSmartPtr graphBuilder; ComSmartPtr sampleGrabber; ComSmartPtr mediaControl; ComSmartPtr smartTeePreviewOutputPin, smartTeeCaptureOutputPin; int activeUsers = 0; Array widths, heights; DWORD graphRegistrationID; CriticalSection imageSwapLock; bool imageNeedsFlipping = false; Image loadingImage, activeImage; bool recordNextFrameTime = false; int previewMaxFPS = 60; JUCE_DECLARE_WEAK_REFERENCEABLE (Pimpl) private: void getVideoSizes (IAMStreamConfig* const streamConfig) { widths.clear(); heights.clear(); int count = 0, size = 0; streamConfig->GetNumberOfCapabilities (&count, &size); if (size == sizeof (VIDEO_STREAM_CONFIG_CAPS)) { for (int i = 0; i < count; ++i) { VIDEO_STREAM_CONFIG_CAPS scc; AM_MEDIA_TYPE* config; HRESULT hr = streamConfig->GetStreamCaps (i, &config, (BYTE*) &scc); if (SUCCEEDED (hr)) { const int w = scc.InputSize.cx; const int h = scc.InputSize.cy; bool duplicate = false; for (int j = widths.size(); --j >= 0;) { if (w == widths.getUnchecked (j) && h == heights.getUnchecked (j)) { duplicate = true; break; } } if (! duplicate) { widths.add (w); heights.add (h); } deleteMediaType (config); } } } } bool selectVideoSize (IAMStreamConfig* const streamConfig, const int minWidth, const int minHeight, const int maxWidth, const int maxHeight) { int count = 0, size = 0, bestArea = 0, bestIndex = -1; streamConfig->GetNumberOfCapabilities (&count, &size); if (size == sizeof (VIDEO_STREAM_CONFIG_CAPS)) { AM_MEDIA_TYPE* config; VIDEO_STREAM_CONFIG_CAPS scc; for (int i = 0; i < count; ++i) { HRESULT hr = streamConfig->GetStreamCaps (i, &config, (BYTE*) &scc); if (SUCCEEDED (hr)) { if (scc.InputSize.cx >= minWidth && scc.InputSize.cy >= minHeight && scc.InputSize.cx <= maxWidth && scc.InputSize.cy <= maxHeight) { int area = scc.InputSize.cx * scc.InputSize.cy; if (area > bestArea) { bestIndex = i; bestArea = area; } } deleteMediaType (config); } } if (bestIndex >= 0) { HRESULT hr = streamConfig->GetStreamCaps (bestIndex, &config, (BYTE*) &scc); hr = streamConfig->SetFormat (config); deleteMediaType (config); return SUCCEEDED (hr); } } return false; } static bool getPin (IBaseFilter* filter, const PIN_DIRECTION wantedDirection, ComSmartPtr& result, const char* pinName = nullptr) { ComSmartPtr enumerator; ComSmartPtr pin; filter->EnumPins (enumerator.resetAndGetPointerAddress()); while (enumerator->Next (1, pin.resetAndGetPointerAddress(), 0) == S_OK) { PIN_DIRECTION dir; pin->QueryDirection (&dir); if (wantedDirection == dir) { PIN_INFO info = {}; pin->QueryPinInfo (&info); if (pinName == nullptr || String (pinName).equalsIgnoreCase (String (info.achName))) { result = pin; return true; } } } return false; } bool connectFilters (IBaseFilter* const first, IBaseFilter* const second) const { ComSmartPtr in, out; return getPin (first, PINDIR_OUTPUT, out) && getPin (second, PINDIR_INPUT, in) && SUCCEEDED (graphBuilder->Connect (out, in)); } bool addGraphToRot() { ComSmartPtr rot; if (FAILED (GetRunningObjectTable (0, rot.resetAndGetPointerAddress()))) return false; ComSmartPtr moniker; WCHAR buffer[128]; HRESULT hr = CreateItemMoniker (_T("!"), buffer, moniker.resetAndGetPointerAddress()); if (FAILED (hr)) return false; graphRegistrationID = 0; return SUCCEEDED (rot->Register (0, graphBuilder, moniker, &graphRegistrationID)); } void removeGraphFromRot() { ComSmartPtr rot; if (SUCCEEDED (GetRunningObjectTable (0, rot.resetAndGetPointerAddress()))) rot->Revoke (graphRegistrationID); } void disconnectAnyViewers(); static void deleteMediaType (AM_MEDIA_TYPE* const pmt) { if (pmt->cbFormat != 0) CoTaskMemFree ((PVOID) pmt->pbFormat); if (pmt->pUnk != nullptr) pmt->pUnk->Release(); CoTaskMemFree (pmt); } JUCE_DECLARE_NON_COPYABLE (Pimpl) }; //============================================================================== struct CameraDevice::ViewerComponent : public Component, public ChangeListener { ViewerComponent (CameraDevice& d) : owner (d.pimpl.get()), maxFPS (15), lastRepaintTime (0) { setOpaque (true); owner->addChangeListener (this); owner->addUser(); owner->viewerComps.add (this); setSize (owner->width, owner->height); } ~ViewerComponent() { if (owner != nullptr) { owner->viewerComps.removeFirstMatchingValue (this); owner->removeUser(); owner->removeChangeListener (this); } } void ownerDeleted() { owner = nullptr; } void paint (Graphics& g) override { g.setColour (Colours::black); g.setImageResamplingQuality (Graphics::lowResamplingQuality); if (owner != nullptr) owner->drawCurrentImage (g, getLocalBounds()); else g.fillAll(); } void changeListenerCallback (ChangeBroadcaster*) override { const int64 now = Time::currentTimeMillis(); if (now >= lastRepaintTime + (1000 / maxFPS)) { lastRepaintTime = now; repaint(); if (owner != nullptr) maxFPS = owner->previewMaxFPS; } } private: Pimpl* owner; int maxFPS; int64 lastRepaintTime; }; void CameraDevice::Pimpl::disconnectAnyViewers() { for (int i = viewerComps.size(); --i >= 0;) viewerComps.getUnchecked(i)->ownerDeleted(); } String CameraDevice::getFileExtension() { return ".wmv"; }