The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

823 lines
28KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE 6 technical preview.
  4. Copyright (c) 2020 - Raw Material Software Limited
  5. You may use this code under the terms of the GPL v3
  6. (see www.gnu.org/licenses).
  7. For this technical preview, this file is not subject to commercial licensing.
  8. JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
  9. EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
  10. DISCLAIMED.
  11. ==============================================================================
  12. */
  13. interface ISampleGrabberCB : public IUnknown
  14. {
  15. virtual STDMETHODIMP SampleCB (double, IMediaSample*) = 0;
  16. virtual STDMETHODIMP BufferCB (double, BYTE*, long) = 0;
  17. };
  18. interface ISampleGrabber : public IUnknown
  19. {
  20. virtual HRESULT STDMETHODCALLTYPE SetOneShot (BOOL) = 0;
  21. virtual HRESULT STDMETHODCALLTYPE SetMediaType (const AM_MEDIA_TYPE*) = 0;
  22. virtual HRESULT STDMETHODCALLTYPE GetConnectedMediaType (AM_MEDIA_TYPE*) = 0;
  23. virtual HRESULT STDMETHODCALLTYPE SetBufferSamples (BOOL) = 0;
  24. virtual HRESULT STDMETHODCALLTYPE GetCurrentBuffer (long*, long*) = 0;
  25. virtual HRESULT STDMETHODCALLTYPE GetCurrentSample (IMediaSample**) = 0;
  26. virtual HRESULT STDMETHODCALLTYPE SetCallback (ISampleGrabberCB*, long) = 0;
  27. };
  28. static const IID IID_ISampleGrabberCB = { 0x0579154A, 0x2B53, 0x4994, { 0xB0, 0xD0, 0xE7, 0x73, 0x14, 0x8E, 0xFF, 0x85 } };
  29. static const IID IID_ISampleGrabber = { 0x6B652FFF, 0x11FE, 0x4fce, { 0x92, 0xAD, 0x02, 0x66, 0xB5, 0xD7, 0xC7, 0x8F } };
  30. static const CLSID CLSID_SampleGrabber = { 0xC1F400A0, 0x3F08, 0x11d3, { 0x9F, 0x0B, 0x00, 0x60, 0x08, 0x03, 0x9E, 0x37 } };
  31. static const CLSID CLSID_NullRenderer = { 0xC1F400A4, 0x3F08, 0x11d3, { 0x9F, 0x0B, 0x00, 0x60, 0x08, 0x03, 0x9E, 0x37 } };
  32. struct CameraDevice::Pimpl : public ChangeBroadcaster
  33. {
  34. Pimpl (CameraDevice& ownerToUse, const String&, int index,
  35. int minWidth, int minHeight, int maxWidth, int maxHeight,
  36. bool /*highQuality*/)
  37. : owner (ownerToUse)
  38. {
  39. HRESULT hr = captureGraphBuilder.CoCreateInstance (CLSID_CaptureGraphBuilder2);
  40. if (FAILED (hr))
  41. return;
  42. filter = enumerateCameras (nullptr, index);
  43. if (filter == nullptr)
  44. return;
  45. hr = graphBuilder.CoCreateInstance (CLSID_FilterGraph);
  46. if (FAILED (hr))
  47. return;
  48. hr = captureGraphBuilder->SetFiltergraph (graphBuilder);
  49. if (FAILED (hr))
  50. return;
  51. hr = graphBuilder.QueryInterface (mediaControl);
  52. if (FAILED (hr))
  53. return;
  54. {
  55. ComSmartPtr<IAMStreamConfig> streamConfig;
  56. hr = captureGraphBuilder->FindInterface (&PIN_CATEGORY_CAPTURE, 0, filter,
  57. IID_IAMStreamConfig, (void**) streamConfig.resetAndGetPointerAddress());
  58. if (streamConfig != nullptr)
  59. {
  60. getVideoSizes (streamConfig);
  61. if (! selectVideoSize (streamConfig, minWidth, minHeight, maxWidth, maxHeight))
  62. return;
  63. }
  64. }
  65. hr = graphBuilder->AddFilter (filter, _T("Video Capture"));
  66. if (FAILED (hr))
  67. return;
  68. hr = smartTee.CoCreateInstance (CLSID_SmartTee);
  69. if (FAILED (hr))
  70. return;
  71. hr = graphBuilder->AddFilter (smartTee, _T("Smart Tee"));
  72. if (FAILED (hr))
  73. return;
  74. if (! connectFilters (filter, smartTee))
  75. return;
  76. ComSmartPtr<IBaseFilter> sampleGrabberBase;
  77. hr = sampleGrabberBase.CoCreateInstance (CLSID_SampleGrabber);
  78. if (FAILED (hr))
  79. return;
  80. hr = sampleGrabberBase.QueryInterface (IID_ISampleGrabber, sampleGrabber);
  81. if (FAILED (hr))
  82. return;
  83. {
  84. AM_MEDIA_TYPE mt = {};
  85. mt.majortype = MEDIATYPE_Video;
  86. mt.subtype = MEDIASUBTYPE_RGB24;
  87. mt.formattype = FORMAT_VideoInfo;
  88. sampleGrabber->SetMediaType (&mt);
  89. }
  90. callback = new GrabberCallback (*this);
  91. hr = sampleGrabber->SetCallback (callback, 1);
  92. hr = graphBuilder->AddFilter (sampleGrabberBase, _T("Sample Grabber"));
  93. if (FAILED (hr))
  94. return;
  95. ComSmartPtr<IPin> grabberInputPin;
  96. if (! (getPin (smartTee, PINDIR_OUTPUT, smartTeeCaptureOutputPin, "capture")
  97. && getPin (smartTee, PINDIR_OUTPUT, smartTeePreviewOutputPin, "preview")
  98. && getPin (sampleGrabberBase, PINDIR_INPUT, grabberInputPin)))
  99. return;
  100. hr = graphBuilder->Connect (smartTeePreviewOutputPin, grabberInputPin);
  101. if (FAILED (hr))
  102. return;
  103. AM_MEDIA_TYPE mt = {};
  104. hr = sampleGrabber->GetConnectedMediaType (&mt);
  105. VIDEOINFOHEADER* pVih = (VIDEOINFOHEADER*) (mt.pbFormat);
  106. width = pVih->bmiHeader.biWidth;
  107. height = pVih->bmiHeader.biHeight;
  108. ComSmartPtr<IBaseFilter> nullFilter;
  109. hr = nullFilter.CoCreateInstance (CLSID_NullRenderer);
  110. hr = graphBuilder->AddFilter (nullFilter, _T("Null Renderer"));
  111. if (connectFilters (sampleGrabberBase, nullFilter)
  112. && addGraphToRot())
  113. {
  114. activeImage = Image (Image::RGB, width, height, true);
  115. loadingImage = Image (Image::RGB, width, height, true);
  116. openedSuccessfully = true;
  117. }
  118. }
  119. ~Pimpl()
  120. {
  121. if (mediaControl != nullptr)
  122. mediaControl->Stop();
  123. removeGraphFromRot();
  124. disconnectAnyViewers();
  125. if (sampleGrabber != nullptr)
  126. {
  127. sampleGrabber->SetCallback (nullptr, 0);
  128. sampleGrabber = nullptr;
  129. }
  130. callback = nullptr;
  131. graphBuilder = nullptr;
  132. mediaControl = nullptr;
  133. filter = nullptr;
  134. captureGraphBuilder = nullptr;
  135. smartTee = nullptr;
  136. smartTeePreviewOutputPin = nullptr;
  137. smartTeeCaptureOutputPin = nullptr;
  138. asfWriter = nullptr;
  139. }
  140. bool openedOk() const noexcept { return openedSuccessfully; }
  141. void takeStillPicture (std::function<void(const Image&)> pictureTakenCallbackToUse)
  142. {
  143. {
  144. const ScopedLock sl (pictureTakenCallbackLock);
  145. jassert (pictureTakenCallbackToUse != nullptr);
  146. if (pictureTakenCallbackToUse == nullptr)
  147. return;
  148. pictureTakenCallback = std::move (pictureTakenCallbackToUse);
  149. }
  150. addUser();
  151. }
  152. void startRecordingToFile (const File& file, int quality)
  153. {
  154. addUser();
  155. isRecording = createFileCaptureFilter (file, quality);
  156. }
  157. void stopRecording()
  158. {
  159. if (isRecording)
  160. {
  161. removeFileCaptureFilter();
  162. removeUser();
  163. isRecording = false;
  164. }
  165. }
  166. Time getTimeOfFirstRecordedFrame() const
  167. {
  168. return firstRecordedTime;
  169. }
  170. void addListener (CameraDevice::Listener* listenerToAdd)
  171. {
  172. const ScopedLock sl (listenerLock);
  173. if (listeners.size() == 0)
  174. addUser();
  175. listeners.add (listenerToAdd);
  176. }
  177. void removeListener (CameraDevice::Listener* listenerToRemove)
  178. {
  179. const ScopedLock sl (listenerLock);
  180. listeners.remove (listenerToRemove);
  181. if (listeners.size() == 0)
  182. removeUser();
  183. }
  184. void callListeners (const Image& image)
  185. {
  186. const ScopedLock sl (listenerLock);
  187. listeners.call ([=] (Listener& l) { l.imageReceived (image); });
  188. }
  189. void notifyPictureTakenIfNeeded (const Image& image)
  190. {
  191. {
  192. const ScopedLock sl (pictureTakenCallbackLock);
  193. if (pictureTakenCallback == nullptr)
  194. return;
  195. }
  196. WeakReference<Pimpl> weakRef (this);
  197. MessageManager::callAsync ([weakRef, image]() mutable
  198. {
  199. if (weakRef == nullptr)
  200. return;
  201. if (weakRef->pictureTakenCallback != nullptr)
  202. weakRef->pictureTakenCallback (image);
  203. weakRef->pictureTakenCallback = nullptr;
  204. });
  205. }
  206. void addUser()
  207. {
  208. if (openedSuccessfully && activeUsers++ == 0)
  209. mediaControl->Run();
  210. }
  211. void removeUser()
  212. {
  213. if (openedSuccessfully && --activeUsers == 0)
  214. mediaControl->Stop();
  215. }
  216. void handleFrame (double /*time*/, BYTE* buffer, long /*bufferSize*/)
  217. {
  218. if (recordNextFrameTime)
  219. {
  220. const double defaultCameraLatency = 0.1;
  221. firstRecordedTime = Time::getCurrentTime() - RelativeTime (defaultCameraLatency);
  222. recordNextFrameTime = false;
  223. ComSmartPtr<IPin> pin;
  224. if (getPin (filter, PINDIR_OUTPUT, pin))
  225. {
  226. ComSmartPtr<IAMPushSource> pushSource;
  227. HRESULT hr = pin.QueryInterface (pushSource);
  228. if (pushSource != nullptr)
  229. {
  230. REFERENCE_TIME latency = 0;
  231. hr = pushSource->GetLatency (&latency);
  232. firstRecordedTime = firstRecordedTime - RelativeTime ((double) latency);
  233. }
  234. }
  235. }
  236. {
  237. const int lineStride = width * 3;
  238. const ScopedLock sl (imageSwapLock);
  239. {
  240. loadingImage.duplicateIfShared();
  241. const Image::BitmapData destData (loadingImage, 0, 0, width, height, Image::BitmapData::writeOnly);
  242. for (int i = 0; i < height; ++i)
  243. memcpy (destData.getLinePointer ((height - 1) - i),
  244. buffer + lineStride * i,
  245. lineStride);
  246. }
  247. imageNeedsFlipping = true;
  248. }
  249. if (listeners.size() > 0)
  250. callListeners (loadingImage);
  251. notifyPictureTakenIfNeeded (loadingImage);
  252. sendChangeMessage();
  253. }
  254. void drawCurrentImage (Graphics& g, Rectangle<int> area)
  255. {
  256. if (imageNeedsFlipping)
  257. {
  258. const ScopedLock sl (imageSwapLock);
  259. std::swap (loadingImage, activeImage);
  260. imageNeedsFlipping = false;
  261. }
  262. Rectangle<int> centred (RectanglePlacement (RectanglePlacement::centred)
  263. .appliedTo (Rectangle<int> (width, height), area));
  264. RectangleList<int> borders (area);
  265. borders.subtract (centred);
  266. g.setColour (Colours::black);
  267. g.fillRectList (borders);
  268. g.drawImage (activeImage, centred.getX(), centred.getY(),
  269. centred.getWidth(), centred.getHeight(), 0, 0, width, height);
  270. }
  271. bool createFileCaptureFilter (const File& file, int quality)
  272. {
  273. removeFileCaptureFilter();
  274. file.deleteFile();
  275. mediaControl->Stop();
  276. firstRecordedTime = Time();
  277. recordNextFrameTime = true;
  278. previewMaxFPS = 60;
  279. HRESULT hr = asfWriter.CoCreateInstance (CLSID_WMAsfWriter);
  280. if (SUCCEEDED (hr))
  281. {
  282. ComSmartPtr<IFileSinkFilter> fileSink;
  283. hr = asfWriter.QueryInterface (fileSink);
  284. if (SUCCEEDED (hr))
  285. {
  286. hr = fileSink->SetFileName (file.getFullPathName().toWideCharPointer(), 0);
  287. if (SUCCEEDED (hr))
  288. {
  289. hr = graphBuilder->AddFilter (asfWriter, _T("AsfWriter"));
  290. if (SUCCEEDED (hr))
  291. {
  292. ComSmartPtr<IConfigAsfWriter> asfConfig;
  293. hr = asfWriter.QueryInterface (asfConfig);
  294. asfConfig->SetIndexMode (true);
  295. ComSmartPtr<IWMProfileManager> profileManager;
  296. hr = WMCreateProfileManager (profileManager.resetAndGetPointerAddress());
  297. // This gibberish is the DirectShow profile for a video-only wmv file.
  298. String prof ("<profile version=\"589824\" storageformat=\"1\" name=\"Quality\" description=\"Quality type for output.\">"
  299. "<streamconfig majortype=\"{73646976-0000-0010-8000-00AA00389B71}\" streamnumber=\"1\" "
  300. "streamname=\"Video Stream\" inputname=\"Video409\" bitrate=\"894960\" "
  301. "bufferwindow=\"0\" reliabletransport=\"1\" decodercomplexity=\"AU\" rfc1766langid=\"en-us\">"
  302. "<videomediaprops maxkeyframespacing=\"50000000\" quality=\"90\"/>"
  303. "<wmmediatype subtype=\"{33564D57-0000-0010-8000-00AA00389B71}\" bfixedsizesamples=\"0\" "
  304. "btemporalcompression=\"1\" lsamplesize=\"0\">"
  305. "<videoinfoheader dwbitrate=\"894960\" dwbiterrorrate=\"0\" avgtimeperframe=\"$AVGTIMEPERFRAME\">"
  306. "<rcsource left=\"0\" top=\"0\" right=\"$WIDTH\" bottom=\"$HEIGHT\"/>"
  307. "<rctarget left=\"0\" top=\"0\" right=\"$WIDTH\" bottom=\"$HEIGHT\"/>"
  308. "<bitmapinfoheader biwidth=\"$WIDTH\" biheight=\"$HEIGHT\" biplanes=\"1\" bibitcount=\"24\" "
  309. "bicompression=\"WMV3\" bisizeimage=\"0\" bixpelspermeter=\"0\" biypelspermeter=\"0\" "
  310. "biclrused=\"0\" biclrimportant=\"0\"/>"
  311. "</videoinfoheader>"
  312. "</wmmediatype>"
  313. "</streamconfig>"
  314. "</profile>");
  315. const int fps[] = { 10, 15, 30 };
  316. int maxFramesPerSecond = fps [jlimit (0, numElementsInArray (fps) - 1, quality & 0xff)];
  317. if ((quality & 0xff000000) != 0) // (internal hacky way to pass explicit frame rates for testing)
  318. maxFramesPerSecond = (quality >> 24) & 0xff;
  319. prof = prof.replace ("$WIDTH", String (width))
  320. .replace ("$HEIGHT", String (height))
  321. .replace ("$AVGTIMEPERFRAME", String (10000000 / maxFramesPerSecond));
  322. ComSmartPtr<IWMProfile> currentProfile;
  323. hr = profileManager->LoadProfileByData (prof.toWideCharPointer(), currentProfile.resetAndGetPointerAddress());
  324. hr = asfConfig->ConfigureFilterUsingProfile (currentProfile);
  325. if (SUCCEEDED (hr))
  326. {
  327. ComSmartPtr<IPin> asfWriterInputPin;
  328. if (getPin (asfWriter, PINDIR_INPUT, asfWriterInputPin, "Video Input 01"))
  329. {
  330. hr = graphBuilder->Connect (smartTeeCaptureOutputPin, asfWriterInputPin);
  331. if (SUCCEEDED (hr) && openedSuccessfully && activeUsers > 0
  332. && SUCCEEDED (mediaControl->Run()))
  333. {
  334. previewMaxFPS = (quality < 2) ? 15 : 25; // throttle back the preview comps to try to leave the cpu free for encoding
  335. if ((quality & 0x00ff0000) != 0) // (internal hacky way to pass explicit frame rates for testing)
  336. previewMaxFPS = (quality >> 16) & 0xff;
  337. return true;
  338. }
  339. }
  340. }
  341. }
  342. }
  343. }
  344. }
  345. removeFileCaptureFilter();
  346. if (openedSuccessfully && activeUsers > 0)
  347. mediaControl->Run();
  348. return false;
  349. }
  350. void removeFileCaptureFilter()
  351. {
  352. mediaControl->Stop();
  353. if (asfWriter != nullptr)
  354. {
  355. graphBuilder->RemoveFilter (asfWriter);
  356. asfWriter = nullptr;
  357. }
  358. if (openedSuccessfully && activeUsers > 0)
  359. mediaControl->Run();
  360. previewMaxFPS = 60;
  361. }
  362. static ComSmartPtr<IBaseFilter> enumerateCameras (StringArray* names, const int deviceIndexToOpen)
  363. {
  364. int index = 0;
  365. ComSmartPtr<ICreateDevEnum> pDevEnum;
  366. if (SUCCEEDED (pDevEnum.CoCreateInstance (CLSID_SystemDeviceEnum)))
  367. {
  368. ComSmartPtr<IEnumMoniker> enumerator;
  369. HRESULT hr = pDevEnum->CreateClassEnumerator (CLSID_VideoInputDeviceCategory, enumerator.resetAndGetPointerAddress(), 0);
  370. if (SUCCEEDED (hr) && enumerator != nullptr)
  371. {
  372. ComSmartPtr<IMoniker> moniker;
  373. ULONG fetched;
  374. while (enumerator->Next (1, moniker.resetAndGetPointerAddress(), &fetched) == S_OK)
  375. {
  376. ComSmartPtr<IBaseFilter> captureFilter;
  377. hr = moniker->BindToObject (0, 0, IID_IBaseFilter, (void**) captureFilter.resetAndGetPointerAddress());
  378. if (SUCCEEDED (hr))
  379. {
  380. ComSmartPtr<IPropertyBag> propertyBag;
  381. hr = moniker->BindToStorage (0, 0, IID_IPropertyBag, (void**) propertyBag.resetAndGetPointerAddress());
  382. if (SUCCEEDED (hr))
  383. {
  384. VARIANT var;
  385. var.vt = VT_BSTR;
  386. hr = propertyBag->Read (_T("FriendlyName"), &var, 0);
  387. propertyBag = nullptr;
  388. if (SUCCEEDED (hr))
  389. {
  390. if (names != nullptr)
  391. names->add (var.bstrVal);
  392. if (index == deviceIndexToOpen)
  393. return captureFilter;
  394. ++index;
  395. }
  396. }
  397. }
  398. }
  399. }
  400. }
  401. return nullptr;
  402. }
  403. static StringArray getAvailableDevices()
  404. {
  405. StringArray devs;
  406. enumerateCameras (&devs, -1);
  407. return devs;
  408. }
  409. struct GrabberCallback : public ComBaseClassHelperBase<ISampleGrabberCB>
  410. {
  411. GrabberCallback (Pimpl& p)
  412. : ComBaseClassHelperBase<ISampleGrabberCB> (0), owner (p) {}
  413. JUCE_COMRESULT QueryInterface (REFIID refId, void** result)
  414. {
  415. if (refId == IID_ISampleGrabberCB)
  416. return castToType<ISampleGrabberCB> (result);
  417. return ComBaseClassHelperBase<ISampleGrabberCB>::QueryInterface (refId, result);
  418. }
  419. STDMETHODIMP SampleCB (double, IMediaSample*) { return E_FAIL; }
  420. STDMETHODIMP BufferCB (double time, BYTE* buffer, long bufferSize)
  421. {
  422. owner.handleFrame (time, buffer, bufferSize);
  423. return S_OK;
  424. }
  425. Pimpl& owner;
  426. JUCE_DECLARE_NON_COPYABLE (GrabberCallback)
  427. };
  428. CameraDevice& owner;
  429. ComSmartPtr<GrabberCallback> callback;
  430. CriticalSection listenerLock;
  431. ListenerList<Listener> listeners;
  432. CriticalSection pictureTakenCallbackLock;
  433. std::function<void(const Image&)> pictureTakenCallback;
  434. bool isRecording = false, openedSuccessfully = false;
  435. int width = 0, height = 0;
  436. Time firstRecordedTime;
  437. Array<ViewerComponent*> viewerComps;
  438. ComSmartPtr<ICaptureGraphBuilder2> captureGraphBuilder;
  439. ComSmartPtr<IBaseFilter> filter, smartTee, asfWriter;
  440. ComSmartPtr<IGraphBuilder> graphBuilder;
  441. ComSmartPtr<ISampleGrabber> sampleGrabber;
  442. ComSmartPtr<IMediaControl> mediaControl;
  443. ComSmartPtr<IPin> smartTeePreviewOutputPin, smartTeeCaptureOutputPin;
  444. int activeUsers = 0;
  445. Array<int> widths, heights;
  446. DWORD graphRegistrationID;
  447. CriticalSection imageSwapLock;
  448. bool imageNeedsFlipping = false;
  449. Image loadingImage, activeImage;
  450. bool recordNextFrameTime = false;
  451. int previewMaxFPS = 60;
  452. JUCE_DECLARE_WEAK_REFERENCEABLE (Pimpl)
  453. private:
  454. void getVideoSizes (IAMStreamConfig* const streamConfig)
  455. {
  456. widths.clear();
  457. heights.clear();
  458. int count = 0, size = 0;
  459. streamConfig->GetNumberOfCapabilities (&count, &size);
  460. if (size == sizeof (VIDEO_STREAM_CONFIG_CAPS))
  461. {
  462. for (int i = 0; i < count; ++i)
  463. {
  464. VIDEO_STREAM_CONFIG_CAPS scc;
  465. AM_MEDIA_TYPE* config;
  466. HRESULT hr = streamConfig->GetStreamCaps (i, &config, (BYTE*) &scc);
  467. if (SUCCEEDED (hr))
  468. {
  469. const int w = scc.InputSize.cx;
  470. const int h = scc.InputSize.cy;
  471. bool duplicate = false;
  472. for (int j = widths.size(); --j >= 0;)
  473. {
  474. if (w == widths.getUnchecked (j) && h == heights.getUnchecked (j))
  475. {
  476. duplicate = true;
  477. break;
  478. }
  479. }
  480. if (! duplicate)
  481. {
  482. widths.add (w);
  483. heights.add (h);
  484. }
  485. deleteMediaType (config);
  486. }
  487. }
  488. }
  489. }
  490. bool selectVideoSize (IAMStreamConfig* const streamConfig,
  491. const int minWidth, const int minHeight,
  492. const int maxWidth, const int maxHeight)
  493. {
  494. int count = 0, size = 0, bestArea = 0, bestIndex = -1;
  495. streamConfig->GetNumberOfCapabilities (&count, &size);
  496. if (size == sizeof (VIDEO_STREAM_CONFIG_CAPS))
  497. {
  498. AM_MEDIA_TYPE* config;
  499. VIDEO_STREAM_CONFIG_CAPS scc;
  500. for (int i = 0; i < count; ++i)
  501. {
  502. HRESULT hr = streamConfig->GetStreamCaps (i, &config, (BYTE*) &scc);
  503. if (SUCCEEDED (hr))
  504. {
  505. if (scc.InputSize.cx >= minWidth
  506. && scc.InputSize.cy >= minHeight
  507. && scc.InputSize.cx <= maxWidth
  508. && scc.InputSize.cy <= maxHeight)
  509. {
  510. int area = scc.InputSize.cx * scc.InputSize.cy;
  511. if (area > bestArea)
  512. {
  513. bestIndex = i;
  514. bestArea = area;
  515. }
  516. }
  517. deleteMediaType (config);
  518. }
  519. }
  520. if (bestIndex >= 0)
  521. {
  522. HRESULT hr = streamConfig->GetStreamCaps (bestIndex, &config, (BYTE*) &scc);
  523. hr = streamConfig->SetFormat (config);
  524. deleteMediaType (config);
  525. return SUCCEEDED (hr);
  526. }
  527. }
  528. return false;
  529. }
  530. static bool getPin (IBaseFilter* filter, const PIN_DIRECTION wantedDirection,
  531. ComSmartPtr<IPin>& result, const char* pinName = nullptr)
  532. {
  533. ComSmartPtr<IEnumPins> enumerator;
  534. ComSmartPtr<IPin> pin;
  535. filter->EnumPins (enumerator.resetAndGetPointerAddress());
  536. while (enumerator->Next (1, pin.resetAndGetPointerAddress(), 0) == S_OK)
  537. {
  538. PIN_DIRECTION dir;
  539. pin->QueryDirection (&dir);
  540. if (wantedDirection == dir)
  541. {
  542. PIN_INFO info = {};
  543. pin->QueryPinInfo (&info);
  544. if (pinName == nullptr || String (pinName).equalsIgnoreCase (String (info.achName)))
  545. {
  546. result = pin;
  547. return true;
  548. }
  549. }
  550. }
  551. return false;
  552. }
  553. bool connectFilters (IBaseFilter* const first, IBaseFilter* const second) const
  554. {
  555. ComSmartPtr<IPin> in, out;
  556. return getPin (first, PINDIR_OUTPUT, out)
  557. && getPin (second, PINDIR_INPUT, in)
  558. && SUCCEEDED (graphBuilder->Connect (out, in));
  559. }
  560. bool addGraphToRot()
  561. {
  562. ComSmartPtr<IRunningObjectTable> rot;
  563. if (FAILED (GetRunningObjectTable (0, rot.resetAndGetPointerAddress())))
  564. return false;
  565. ComSmartPtr<IMoniker> moniker;
  566. WCHAR buffer[128];
  567. HRESULT hr = CreateItemMoniker (_T("!"), buffer, moniker.resetAndGetPointerAddress());
  568. if (FAILED (hr))
  569. return false;
  570. graphRegistrationID = 0;
  571. return SUCCEEDED (rot->Register (0, graphBuilder, moniker, &graphRegistrationID));
  572. }
  573. void removeGraphFromRot()
  574. {
  575. ComSmartPtr<IRunningObjectTable> rot;
  576. if (SUCCEEDED (GetRunningObjectTable (0, rot.resetAndGetPointerAddress())))
  577. rot->Revoke (graphRegistrationID);
  578. }
  579. void disconnectAnyViewers();
  580. static void deleteMediaType (AM_MEDIA_TYPE* const pmt)
  581. {
  582. if (pmt->cbFormat != 0)
  583. CoTaskMemFree ((PVOID) pmt->pbFormat);
  584. if (pmt->pUnk != nullptr)
  585. pmt->pUnk->Release();
  586. CoTaskMemFree (pmt);
  587. }
  588. JUCE_DECLARE_NON_COPYABLE (Pimpl)
  589. };
  590. //==============================================================================
  591. struct CameraDevice::ViewerComponent : public Component,
  592. public ChangeListener
  593. {
  594. ViewerComponent (CameraDevice& d)
  595. : owner (d.pimpl.get()), maxFPS (15), lastRepaintTime (0)
  596. {
  597. setOpaque (true);
  598. owner->addChangeListener (this);
  599. owner->addUser();
  600. owner->viewerComps.add (this);
  601. setSize (owner->width, owner->height);
  602. }
  603. ~ViewerComponent()
  604. {
  605. if (owner != nullptr)
  606. {
  607. owner->viewerComps.removeFirstMatchingValue (this);
  608. owner->removeUser();
  609. owner->removeChangeListener (this);
  610. }
  611. }
  612. void ownerDeleted()
  613. {
  614. owner = nullptr;
  615. }
  616. void paint (Graphics& g) override
  617. {
  618. g.setColour (Colours::black);
  619. g.setImageResamplingQuality (Graphics::lowResamplingQuality);
  620. if (owner != nullptr)
  621. owner->drawCurrentImage (g, getLocalBounds());
  622. else
  623. g.fillAll();
  624. }
  625. void changeListenerCallback (ChangeBroadcaster*) override
  626. {
  627. const int64 now = Time::currentTimeMillis();
  628. if (now >= lastRepaintTime + (1000 / maxFPS))
  629. {
  630. lastRepaintTime = now;
  631. repaint();
  632. if (owner != nullptr)
  633. maxFPS = owner->previewMaxFPS;
  634. }
  635. }
  636. private:
  637. Pimpl* owner;
  638. int maxFPS;
  639. int64 lastRepaintTime;
  640. };
  641. void CameraDevice::Pimpl::disconnectAnyViewers()
  642. {
  643. for (int i = viewerComps.size(); --i >= 0;)
  644. viewerComps.getUnchecked(i)->ownerDeleted();
  645. }
  646. String CameraDevice::getFileExtension()
  647. {
  648. return ".wmv";
  649. }