The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

831 lines
28KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2017 - ROLI Ltd.
  5. JUCE is an open source library subject to commercial or open-source
  6. licensing.
  7. By using JUCE, you agree to the terms of both the JUCE 5 End-User License
  8. Agreement and JUCE 5 Privacy Policy (both updated and effective as of the
  9. 27th April 2017).
  10. End User License Agreement: www.juce.com/juce-5-licence
  11. Privacy Policy: www.juce.com/juce-5-privacy-policy
  12. Or: You may also use this code under the terms of the GPL v3 (see
  13. www.gnu.org/licenses).
  14. JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
  15. EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
  16. DISCLAIMED.
  17. ==============================================================================
  18. */
  19. interface ISampleGrabberCB : public IUnknown
  20. {
  21. virtual STDMETHODIMP SampleCB (double, IMediaSample*) = 0;
  22. virtual STDMETHODIMP BufferCB (double, BYTE*, long) = 0;
  23. };
  24. interface ISampleGrabber : public IUnknown
  25. {
  26. virtual HRESULT STDMETHODCALLTYPE SetOneShot (BOOL) = 0;
  27. virtual HRESULT STDMETHODCALLTYPE SetMediaType (const AM_MEDIA_TYPE*) = 0;
  28. virtual HRESULT STDMETHODCALLTYPE GetConnectedMediaType (AM_MEDIA_TYPE*) = 0;
  29. virtual HRESULT STDMETHODCALLTYPE SetBufferSamples (BOOL) = 0;
  30. virtual HRESULT STDMETHODCALLTYPE GetCurrentBuffer (long*, long*) = 0;
  31. virtual HRESULT STDMETHODCALLTYPE GetCurrentSample (IMediaSample**) = 0;
  32. virtual HRESULT STDMETHODCALLTYPE SetCallback (ISampleGrabberCB*, long) = 0;
  33. };
  34. static const IID IID_ISampleGrabberCB = { 0x0579154A, 0x2B53, 0x4994, { 0xB0, 0xD0, 0xE7, 0x73, 0x14, 0x8E, 0xFF, 0x85 } };
  35. static const IID IID_ISampleGrabber = { 0x6B652FFF, 0x11FE, 0x4fce, { 0x92, 0xAD, 0x02, 0x66, 0xB5, 0xD7, 0xC7, 0x8F } };
  36. static const CLSID CLSID_SampleGrabber = { 0xC1F400A0, 0x3F08, 0x11d3, { 0x9F, 0x0B, 0x00, 0x60, 0x08, 0x03, 0x9E, 0x37 } };
  37. static const CLSID CLSID_NullRenderer = { 0xC1F400A4, 0x3F08, 0x11d3, { 0x9F, 0x0B, 0x00, 0x60, 0x08, 0x03, 0x9E, 0x37 } };
  38. struct CameraDevice::Pimpl : public ChangeBroadcaster
  39. {
  40. Pimpl (CameraDevice& ownerToUse, const String&, int index,
  41. int minWidth, int minHeight, int maxWidth, int maxHeight,
  42. bool /*highQuality*/)
  43. : owner (ownerToUse)
  44. {
  45. HRESULT hr = captureGraphBuilder.CoCreateInstance (CLSID_CaptureGraphBuilder2);
  46. if (FAILED (hr))
  47. return;
  48. filter = enumerateCameras (nullptr, index);
  49. if (filter == nullptr)
  50. return;
  51. hr = graphBuilder.CoCreateInstance (CLSID_FilterGraph);
  52. if (FAILED (hr))
  53. return;
  54. hr = captureGraphBuilder->SetFiltergraph (graphBuilder);
  55. if (FAILED (hr))
  56. return;
  57. hr = graphBuilder.QueryInterface (mediaControl);
  58. if (FAILED (hr))
  59. return;
  60. {
  61. ComSmartPtr<IAMStreamConfig> streamConfig;
  62. hr = captureGraphBuilder->FindInterface (&PIN_CATEGORY_CAPTURE, 0, filter,
  63. IID_IAMStreamConfig, (void**) streamConfig.resetAndGetPointerAddress());
  64. if (streamConfig != nullptr)
  65. {
  66. getVideoSizes (streamConfig);
  67. if (! selectVideoSize (streamConfig, minWidth, minHeight, maxWidth, maxHeight))
  68. return;
  69. }
  70. }
  71. hr = graphBuilder->AddFilter (filter, _T("Video Capture"));
  72. if (FAILED (hr))
  73. return;
  74. hr = smartTee.CoCreateInstance (CLSID_SmartTee);
  75. if (FAILED (hr))
  76. return;
  77. hr = graphBuilder->AddFilter (smartTee, _T("Smart Tee"));
  78. if (FAILED (hr))
  79. return;
  80. if (! connectFilters (filter, smartTee))
  81. return;
  82. ComSmartPtr<IBaseFilter> sampleGrabberBase;
  83. hr = sampleGrabberBase.CoCreateInstance (CLSID_SampleGrabber);
  84. if (FAILED (hr))
  85. return;
  86. hr = sampleGrabberBase.QueryInterface (IID_ISampleGrabber, sampleGrabber);
  87. if (FAILED (hr))
  88. return;
  89. {
  90. AM_MEDIA_TYPE mt = { 0 };
  91. mt.majortype = MEDIATYPE_Video;
  92. mt.subtype = MEDIASUBTYPE_RGB24;
  93. mt.formattype = FORMAT_VideoInfo;
  94. sampleGrabber->SetMediaType (&mt);
  95. }
  96. callback = new GrabberCallback (*this);
  97. hr = sampleGrabber->SetCallback (callback, 1);
  98. hr = graphBuilder->AddFilter (sampleGrabberBase, _T("Sample Grabber"));
  99. if (FAILED (hr))
  100. return;
  101. ComSmartPtr<IPin> grabberInputPin;
  102. if (! (getPin (smartTee, PINDIR_OUTPUT, smartTeeCaptureOutputPin, "capture")
  103. && getPin (smartTee, PINDIR_OUTPUT, smartTeePreviewOutputPin, "preview")
  104. && getPin (sampleGrabberBase, PINDIR_INPUT, grabberInputPin)))
  105. return;
  106. hr = graphBuilder->Connect (smartTeePreviewOutputPin, grabberInputPin);
  107. if (FAILED (hr))
  108. return;
  109. AM_MEDIA_TYPE mt = { 0 };
  110. hr = sampleGrabber->GetConnectedMediaType (&mt);
  111. VIDEOINFOHEADER* pVih = (VIDEOINFOHEADER*) (mt.pbFormat);
  112. width = pVih->bmiHeader.biWidth;
  113. height = pVih->bmiHeader.biHeight;
  114. ComSmartPtr<IBaseFilter> nullFilter;
  115. hr = nullFilter.CoCreateInstance (CLSID_NullRenderer);
  116. hr = graphBuilder->AddFilter (nullFilter, _T("Null Renderer"));
  117. if (connectFilters (sampleGrabberBase, nullFilter)
  118. && addGraphToRot())
  119. {
  120. activeImage = Image (Image::RGB, width, height, true);
  121. loadingImage = Image (Image::RGB, width, height, true);
  122. openedSuccessfully = true;
  123. }
  124. }
  125. ~Pimpl()
  126. {
  127. if (mediaControl != nullptr)
  128. mediaControl->Stop();
  129. removeGraphFromRot();
  130. disconnectAnyViewers();
  131. if (sampleGrabber != nullptr)
  132. {
  133. sampleGrabber->SetCallback (nullptr, 0);
  134. sampleGrabber = nullptr;
  135. }
  136. callback = nullptr;
  137. graphBuilder = nullptr;
  138. mediaControl = nullptr;
  139. filter = nullptr;
  140. captureGraphBuilder = nullptr;
  141. smartTee = nullptr;
  142. smartTeePreviewOutputPin = nullptr;
  143. smartTeeCaptureOutputPin = nullptr;
  144. asfWriter = nullptr;
  145. }
  146. bool openedOk() const noexcept { return openedSuccessfully; }
  147. void takeStillPicture (std::function<void (const Image&)> pictureTakenCallbackToUse)
  148. {
  149. {
  150. const ScopedLock sl (pictureTakenCallbackLock);
  151. jassert (pictureTakenCallbackToUse != nullptr);
  152. if (pictureTakenCallbackToUse == nullptr)
  153. return;
  154. pictureTakenCallback = static_cast<std::function<void (const Image&)>&&> (pictureTakenCallbackToUse);
  155. }
  156. addUser();
  157. }
  158. void startRecordingToFile (const File& file, int quality)
  159. {
  160. addUser();
  161. isRecording = createFileCaptureFilter (file, quality);
  162. }
  163. void stopRecording()
  164. {
  165. if (isRecording)
  166. {
  167. removeFileCaptureFilter();
  168. removeUser();
  169. isRecording = false;
  170. }
  171. }
  172. Time getTimeOfFirstRecordedFrame() const
  173. {
  174. return firstRecordedTime;
  175. }
  176. void addListener (CameraDevice::Listener* listenerToAdd)
  177. {
  178. const ScopedLock sl (listenerLock);
  179. if (listeners.size() == 0)
  180. addUser();
  181. listeners.add (listenerToAdd);
  182. }
  183. void removeListener (CameraDevice::Listener* listenerToRemove)
  184. {
  185. const ScopedLock sl (listenerLock);
  186. listeners.remove (listenerToRemove);
  187. if (listeners.size() == 0)
  188. removeUser();
  189. }
  190. void callListeners (const Image& image)
  191. {
  192. const ScopedLock sl (listenerLock);
  193. listeners.call ([=] (Listener& l) { l.imageReceived (image); });
  194. }
  195. void notifyPictureTakenIfNeeded (const Image& image)
  196. {
  197. {
  198. const ScopedLock sl (pictureTakenCallbackLock);
  199. if (pictureTakenCallback == nullptr)
  200. return;
  201. }
  202. WeakReference<Pimpl> weakRef (this);
  203. MessageManager::callAsync ([weakRef, image]() mutable
  204. {
  205. if (weakRef == nullptr)
  206. return;
  207. if (weakRef->pictureTakenCallback != nullptr)
  208. weakRef->pictureTakenCallback (image);
  209. weakRef->pictureTakenCallback = nullptr;
  210. });
  211. }
  212. void addUser()
  213. {
  214. if (openedSuccessfully && activeUsers++ == 0)
  215. mediaControl->Run();
  216. }
  217. void removeUser()
  218. {
  219. if (openedSuccessfully && --activeUsers == 0)
  220. mediaControl->Stop();
  221. }
  222. void handleFrame (double /*time*/, BYTE* buffer, long /*bufferSize*/)
  223. {
  224. if (recordNextFrameTime)
  225. {
  226. const double defaultCameraLatency = 0.1;
  227. firstRecordedTime = Time::getCurrentTime() - RelativeTime (defaultCameraLatency);
  228. recordNextFrameTime = false;
  229. ComSmartPtr<IPin> pin;
  230. if (getPin (filter, PINDIR_OUTPUT, pin))
  231. {
  232. ComSmartPtr<IAMPushSource> pushSource;
  233. HRESULT hr = pin.QueryInterface (pushSource);
  234. if (pushSource != nullptr)
  235. {
  236. REFERENCE_TIME latency = 0;
  237. hr = pushSource->GetLatency (&latency);
  238. firstRecordedTime = firstRecordedTime - RelativeTime ((double) latency);
  239. }
  240. }
  241. }
  242. {
  243. const int lineStride = width * 3;
  244. const ScopedLock sl (imageSwapLock);
  245. {
  246. loadingImage.duplicateIfShared();
  247. const Image::BitmapData destData (loadingImage, 0, 0, width, height, Image::BitmapData::writeOnly);
  248. for (int i = 0; i < height; ++i)
  249. memcpy (destData.getLinePointer ((height - 1) - i),
  250. buffer + lineStride * i,
  251. lineStride);
  252. }
  253. imageNeedsFlipping = true;
  254. }
  255. if (listeners.size() > 0)
  256. callListeners (loadingImage);
  257. notifyPictureTakenIfNeeded (loadingImage);
  258. sendChangeMessage();
  259. }
  260. void drawCurrentImage (Graphics& g, Rectangle<int> area)
  261. {
  262. if (imageNeedsFlipping)
  263. {
  264. const ScopedLock sl (imageSwapLock);
  265. std::swap (loadingImage, activeImage);
  266. imageNeedsFlipping = false;
  267. }
  268. Rectangle<int> centred (RectanglePlacement (RectanglePlacement::centred)
  269. .appliedTo (Rectangle<int> (width, height), area));
  270. RectangleList<int> borders (area);
  271. borders.subtract (centred);
  272. g.setColour (Colours::black);
  273. g.fillRectList (borders);
  274. g.drawImage (activeImage, centred.getX(), centred.getY(),
  275. centred.getWidth(), centred.getHeight(), 0, 0, width, height);
  276. }
  277. bool createFileCaptureFilter (const File& file, int quality)
  278. {
  279. removeFileCaptureFilter();
  280. file.deleteFile();
  281. mediaControl->Stop();
  282. firstRecordedTime = Time();
  283. recordNextFrameTime = true;
  284. previewMaxFPS = 60;
  285. HRESULT hr = asfWriter.CoCreateInstance (CLSID_WMAsfWriter);
  286. if (SUCCEEDED (hr))
  287. {
  288. ComSmartPtr<IFileSinkFilter> fileSink;
  289. hr = asfWriter.QueryInterface (fileSink);
  290. if (SUCCEEDED (hr))
  291. {
  292. hr = fileSink->SetFileName (file.getFullPathName().toWideCharPointer(), 0);
  293. if (SUCCEEDED (hr))
  294. {
  295. hr = graphBuilder->AddFilter (asfWriter, _T("AsfWriter"));
  296. if (SUCCEEDED (hr))
  297. {
  298. ComSmartPtr<IConfigAsfWriter> asfConfig;
  299. hr = asfWriter.QueryInterface (asfConfig);
  300. asfConfig->SetIndexMode (true);
  301. ComSmartPtr<IWMProfileManager> profileManager;
  302. hr = WMCreateProfileManager (profileManager.resetAndGetPointerAddress());
  303. // This gibberish is the DirectShow profile for a video-only wmv file.
  304. String prof ("<profile version=\"589824\" storageformat=\"1\" name=\"Quality\" description=\"Quality type for output.\">"
  305. "<streamconfig majortype=\"{73646976-0000-0010-8000-00AA00389B71}\" streamnumber=\"1\" "
  306. "streamname=\"Video Stream\" inputname=\"Video409\" bitrate=\"894960\" "
  307. "bufferwindow=\"0\" reliabletransport=\"1\" decodercomplexity=\"AU\" rfc1766langid=\"en-us\">"
  308. "<videomediaprops maxkeyframespacing=\"50000000\" quality=\"90\"/>"
  309. "<wmmediatype subtype=\"{33564D57-0000-0010-8000-00AA00389B71}\" bfixedsizesamples=\"0\" "
  310. "btemporalcompression=\"1\" lsamplesize=\"0\">"
  311. "<videoinfoheader dwbitrate=\"894960\" dwbiterrorrate=\"0\" avgtimeperframe=\"$AVGTIMEPERFRAME\">"
  312. "<rcsource left=\"0\" top=\"0\" right=\"$WIDTH\" bottom=\"$HEIGHT\"/>"
  313. "<rctarget left=\"0\" top=\"0\" right=\"$WIDTH\" bottom=\"$HEIGHT\"/>"
  314. "<bitmapinfoheader biwidth=\"$WIDTH\" biheight=\"$HEIGHT\" biplanes=\"1\" bibitcount=\"24\" "
  315. "bicompression=\"WMV3\" bisizeimage=\"0\" bixpelspermeter=\"0\" biypelspermeter=\"0\" "
  316. "biclrused=\"0\" biclrimportant=\"0\"/>"
  317. "</videoinfoheader>"
  318. "</wmmediatype>"
  319. "</streamconfig>"
  320. "</profile>");
  321. const int fps[] = { 10, 15, 30 };
  322. int maxFramesPerSecond = fps [jlimit (0, numElementsInArray (fps) - 1, quality & 0xff)];
  323. if ((quality & 0xff000000) != 0) // (internal hacky way to pass explicit frame rates for testing)
  324. maxFramesPerSecond = (quality >> 24) & 0xff;
  325. prof = prof.replace ("$WIDTH", String (width))
  326. .replace ("$HEIGHT", String (height))
  327. .replace ("$AVGTIMEPERFRAME", String (10000000 / maxFramesPerSecond));
  328. ComSmartPtr<IWMProfile> currentProfile;
  329. hr = profileManager->LoadProfileByData (prof.toWideCharPointer(), currentProfile.resetAndGetPointerAddress());
  330. hr = asfConfig->ConfigureFilterUsingProfile (currentProfile);
  331. if (SUCCEEDED (hr))
  332. {
  333. ComSmartPtr<IPin> asfWriterInputPin;
  334. if (getPin (asfWriter, PINDIR_INPUT, asfWriterInputPin, "Video Input 01"))
  335. {
  336. hr = graphBuilder->Connect (smartTeeCaptureOutputPin, asfWriterInputPin);
  337. if (SUCCEEDED (hr) && openedSuccessfully && activeUsers > 0
  338. && SUCCEEDED (mediaControl->Run()))
  339. {
  340. previewMaxFPS = (quality < 2) ? 15 : 25; // throttle back the preview comps to try to leave the cpu free for encoding
  341. if ((quality & 0x00ff0000) != 0) // (internal hacky way to pass explicit frame rates for testing)
  342. previewMaxFPS = (quality >> 16) & 0xff;
  343. return true;
  344. }
  345. }
  346. }
  347. }
  348. }
  349. }
  350. }
  351. removeFileCaptureFilter();
  352. if (openedSuccessfully && activeUsers > 0)
  353. mediaControl->Run();
  354. return false;
  355. }
  356. void removeFileCaptureFilter()
  357. {
  358. mediaControl->Stop();
  359. if (asfWriter != nullptr)
  360. {
  361. graphBuilder->RemoveFilter (asfWriter);
  362. asfWriter = nullptr;
  363. }
  364. if (openedSuccessfully && activeUsers > 0)
  365. mediaControl->Run();
  366. previewMaxFPS = 60;
  367. }
  368. static ComSmartPtr<IBaseFilter> enumerateCameras (StringArray* names, const int deviceIndexToOpen)
  369. {
  370. int index = 0;
  371. ComSmartPtr<ICreateDevEnum> pDevEnum;
  372. if (SUCCEEDED (pDevEnum.CoCreateInstance (CLSID_SystemDeviceEnum)))
  373. {
  374. ComSmartPtr<IEnumMoniker> enumerator;
  375. HRESULT hr = pDevEnum->CreateClassEnumerator (CLSID_VideoInputDeviceCategory, enumerator.resetAndGetPointerAddress(), 0);
  376. if (SUCCEEDED (hr) && enumerator != nullptr)
  377. {
  378. ComSmartPtr<IMoniker> moniker;
  379. ULONG fetched;
  380. while (enumerator->Next (1, moniker.resetAndGetPointerAddress(), &fetched) == S_OK)
  381. {
  382. ComSmartPtr<IBaseFilter> captureFilter;
  383. hr = moniker->BindToObject (0, 0, IID_IBaseFilter, (void**) captureFilter.resetAndGetPointerAddress());
  384. if (SUCCEEDED (hr))
  385. {
  386. ComSmartPtr<IPropertyBag> propertyBag;
  387. hr = moniker->BindToStorage (0, 0, IID_IPropertyBag, (void**) propertyBag.resetAndGetPointerAddress());
  388. if (SUCCEEDED (hr))
  389. {
  390. VARIANT var;
  391. var.vt = VT_BSTR;
  392. hr = propertyBag->Read (_T("FriendlyName"), &var, 0);
  393. propertyBag = nullptr;
  394. if (SUCCEEDED (hr))
  395. {
  396. if (names != nullptr)
  397. names->add (var.bstrVal);
  398. if (index == deviceIndexToOpen)
  399. return captureFilter;
  400. ++index;
  401. }
  402. }
  403. }
  404. }
  405. }
  406. }
  407. return nullptr;
  408. }
  409. static StringArray getAvailableDevices()
  410. {
  411. StringArray devs;
  412. enumerateCameras (&devs, -1);
  413. return devs;
  414. }
  415. struct GrabberCallback : public ComBaseClassHelperBase<ISampleGrabberCB>
  416. {
  417. GrabberCallback (Pimpl& p)
  418. : ComBaseClassHelperBase<ISampleGrabberCB> (0), owner (p) {}
  419. JUCE_COMRESULT QueryInterface (REFIID refId, void** result)
  420. {
  421. if (refId == IID_ISampleGrabberCB)
  422. return castToType<ISampleGrabberCB> (result);
  423. return ComBaseClassHelperBase<ISampleGrabberCB>::QueryInterface (refId, result);
  424. }
  425. STDMETHODIMP SampleCB (double, IMediaSample*) { return E_FAIL; }
  426. STDMETHODIMP BufferCB (double time, BYTE* buffer, long bufferSize)
  427. {
  428. owner.handleFrame (time, buffer, bufferSize);
  429. return S_OK;
  430. }
  431. Pimpl& owner;
  432. JUCE_DECLARE_NON_COPYABLE (GrabberCallback)
  433. };
  434. CameraDevice& owner;
  435. ComSmartPtr<GrabberCallback> callback;
  436. CriticalSection listenerLock;
  437. ListenerList<Listener> listeners;
  438. CriticalSection pictureTakenCallbackLock;
  439. std::function<void (const Image&)> pictureTakenCallback;
  440. bool isRecording = false, openedSuccessfully = false;
  441. int width = 0, height = 0;
  442. Time firstRecordedTime;
  443. Array<ViewerComponent*> viewerComps;
  444. ComSmartPtr<ICaptureGraphBuilder2> captureGraphBuilder;
  445. ComSmartPtr<IBaseFilter> filter, smartTee, asfWriter;
  446. ComSmartPtr<IGraphBuilder> graphBuilder;
  447. ComSmartPtr<ISampleGrabber> sampleGrabber;
  448. ComSmartPtr<IMediaControl> mediaControl;
  449. ComSmartPtr<IPin> smartTeePreviewOutputPin, smartTeeCaptureOutputPin;
  450. int activeUsers = 0;
  451. Array<int> widths, heights;
  452. DWORD graphRegistrationID;
  453. CriticalSection imageSwapLock;
  454. bool imageNeedsFlipping = false;
  455. Image loadingImage, activeImage;
  456. bool recordNextFrameTime = false;
  457. int previewMaxFPS = 60;
  458. JUCE_DECLARE_WEAK_REFERENCEABLE (Pimpl)
  459. private:
  460. void getVideoSizes (IAMStreamConfig* const streamConfig)
  461. {
  462. widths.clear();
  463. heights.clear();
  464. int count = 0, size = 0;
  465. streamConfig->GetNumberOfCapabilities (&count, &size);
  466. if (size == sizeof (VIDEO_STREAM_CONFIG_CAPS))
  467. {
  468. for (int i = 0; i < count; ++i)
  469. {
  470. VIDEO_STREAM_CONFIG_CAPS scc;
  471. AM_MEDIA_TYPE* config;
  472. HRESULT hr = streamConfig->GetStreamCaps (i, &config, (BYTE*) &scc);
  473. if (SUCCEEDED (hr))
  474. {
  475. const int w = scc.InputSize.cx;
  476. const int h = scc.InputSize.cy;
  477. bool duplicate = false;
  478. for (int j = widths.size(); --j >= 0;)
  479. {
  480. if (w == widths.getUnchecked (j) && h == heights.getUnchecked (j))
  481. {
  482. duplicate = true;
  483. break;
  484. }
  485. }
  486. if (! duplicate)
  487. {
  488. widths.add (w);
  489. heights.add (h);
  490. }
  491. deleteMediaType (config);
  492. }
  493. }
  494. }
  495. }
  496. bool selectVideoSize (IAMStreamConfig* const streamConfig,
  497. const int minWidth, const int minHeight,
  498. const int maxWidth, const int maxHeight)
  499. {
  500. int count = 0, size = 0, bestArea = 0, bestIndex = -1;
  501. streamConfig->GetNumberOfCapabilities (&count, &size);
  502. if (size == sizeof (VIDEO_STREAM_CONFIG_CAPS))
  503. {
  504. AM_MEDIA_TYPE* config;
  505. VIDEO_STREAM_CONFIG_CAPS scc;
  506. for (int i = 0; i < count; ++i)
  507. {
  508. HRESULT hr = streamConfig->GetStreamCaps (i, &config, (BYTE*) &scc);
  509. if (SUCCEEDED (hr))
  510. {
  511. if (scc.InputSize.cx >= minWidth
  512. && scc.InputSize.cy >= minHeight
  513. && scc.InputSize.cx <= maxWidth
  514. && scc.InputSize.cy <= maxHeight)
  515. {
  516. int area = scc.InputSize.cx * scc.InputSize.cy;
  517. if (area > bestArea)
  518. {
  519. bestIndex = i;
  520. bestArea = area;
  521. }
  522. }
  523. deleteMediaType (config);
  524. }
  525. }
  526. if (bestIndex >= 0)
  527. {
  528. HRESULT hr = streamConfig->GetStreamCaps (bestIndex, &config, (BYTE*) &scc);
  529. hr = streamConfig->SetFormat (config);
  530. deleteMediaType (config);
  531. return SUCCEEDED (hr);
  532. }
  533. }
  534. return false;
  535. }
  536. static bool getPin (IBaseFilter* filter, const PIN_DIRECTION wantedDirection,
  537. ComSmartPtr<IPin>& result, const char* pinName = nullptr)
  538. {
  539. ComSmartPtr<IEnumPins> enumerator;
  540. ComSmartPtr<IPin> pin;
  541. filter->EnumPins (enumerator.resetAndGetPointerAddress());
  542. while (enumerator->Next (1, pin.resetAndGetPointerAddress(), 0) == S_OK)
  543. {
  544. PIN_DIRECTION dir;
  545. pin->QueryDirection (&dir);
  546. if (wantedDirection == dir)
  547. {
  548. PIN_INFO info = { 0 };
  549. pin->QueryPinInfo (&info);
  550. if (pinName == nullptr || String (pinName).equalsIgnoreCase (String (info.achName)))
  551. {
  552. result = pin;
  553. return true;
  554. }
  555. }
  556. }
  557. return false;
  558. }
  559. bool connectFilters (IBaseFilter* const first, IBaseFilter* const second) const
  560. {
  561. ComSmartPtr<IPin> in, out;
  562. return getPin (first, PINDIR_OUTPUT, out)
  563. && getPin (second, PINDIR_INPUT, in)
  564. && SUCCEEDED (graphBuilder->Connect (out, in));
  565. }
  566. bool addGraphToRot()
  567. {
  568. ComSmartPtr<IRunningObjectTable> rot;
  569. if (FAILED (GetRunningObjectTable (0, rot.resetAndGetPointerAddress())))
  570. return false;
  571. ComSmartPtr<IMoniker> moniker;
  572. WCHAR buffer[128];
  573. HRESULT hr = CreateItemMoniker (_T("!"), buffer, moniker.resetAndGetPointerAddress());
  574. if (FAILED (hr))
  575. return false;
  576. graphRegistrationID = 0;
  577. return SUCCEEDED (rot->Register (0, graphBuilder, moniker, &graphRegistrationID));
  578. }
  579. void removeGraphFromRot()
  580. {
  581. ComSmartPtr<IRunningObjectTable> rot;
  582. if (SUCCEEDED (GetRunningObjectTable (0, rot.resetAndGetPointerAddress())))
  583. rot->Revoke (graphRegistrationID);
  584. }
  585. void disconnectAnyViewers();
  586. static void deleteMediaType (AM_MEDIA_TYPE* const pmt)
  587. {
  588. if (pmt->cbFormat != 0)
  589. CoTaskMemFree ((PVOID) pmt->pbFormat);
  590. if (pmt->pUnk != nullptr)
  591. pmt->pUnk->Release();
  592. CoTaskMemFree (pmt);
  593. }
  594. JUCE_DECLARE_NON_COPYABLE (Pimpl)
  595. };
  596. //==============================================================================
  597. struct CameraDevice::ViewerComponent : public Component,
  598. public ChangeListener
  599. {
  600. ViewerComponent (CameraDevice& d)
  601. : owner (d.pimpl.get()), maxFPS (15), lastRepaintTime (0)
  602. {
  603. setOpaque (true);
  604. owner->addChangeListener (this);
  605. owner->addUser();
  606. owner->viewerComps.add (this);
  607. setSize (owner->width, owner->height);
  608. }
  609. ~ViewerComponent()
  610. {
  611. if (owner != nullptr)
  612. {
  613. owner->viewerComps.removeFirstMatchingValue (this);
  614. owner->removeUser();
  615. owner->removeChangeListener (this);
  616. }
  617. }
  618. void ownerDeleted()
  619. {
  620. owner = nullptr;
  621. }
  622. void paint (Graphics& g) override
  623. {
  624. g.setColour (Colours::black);
  625. g.setImageResamplingQuality (Graphics::lowResamplingQuality);
  626. if (owner != nullptr)
  627. owner->drawCurrentImage (g, getLocalBounds());
  628. else
  629. g.fillAll();
  630. }
  631. void changeListenerCallback (ChangeBroadcaster*) override
  632. {
  633. const int64 now = Time::currentTimeMillis();
  634. if (now >= lastRepaintTime + (1000 / maxFPS))
  635. {
  636. lastRepaintTime = now;
  637. repaint();
  638. if (owner != nullptr)
  639. maxFPS = owner->previewMaxFPS;
  640. }
  641. }
  642. private:
  643. Pimpl* owner;
  644. int maxFPS;
  645. int64 lastRepaintTime;
  646. };
  647. void CameraDevice::Pimpl::disconnectAnyViewers()
  648. {
  649. for (int i = viewerComps.size(); --i >= 0;)
  650. viewerComps.getUnchecked(i)->ownerDeleted();
  651. }
  652. String CameraDevice::getFileExtension()
  653. {
  654. return ".wmv";
  655. }