The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

839 lines
28KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2017 - ROLI Ltd.
  5. JUCE is an open source library subject to commercial or open-source
  6. licensing.
  7. By using JUCE, you agree to the terms of both the JUCE 5 End-User License
  8. Agreement and JUCE 5 Privacy Policy (both updated and effective as of the
  9. 27th April 2017).
  10. End User License Agreement: www.juce.com/juce-5-licence
  11. Privacy Policy: www.juce.com/juce-5-privacy-policy
  12. Or: You may also use this code under the terms of the GPL v3 (see
  13. www.gnu.org/licenses).
  14. JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
  15. EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
  16. DISCLAIMED.
  17. ==============================================================================
  18. */
  19. interface ISampleGrabberCB : public IUnknown
  20. {
  21. virtual STDMETHODIMP SampleCB (double, IMediaSample*) = 0;
  22. virtual STDMETHODIMP BufferCB (double, BYTE*, long) = 0;
  23. };
  24. interface ISampleGrabber : public IUnknown
  25. {
  26. virtual HRESULT STDMETHODCALLTYPE SetOneShot (BOOL) = 0;
  27. virtual HRESULT STDMETHODCALLTYPE SetMediaType (const AM_MEDIA_TYPE*) = 0;
  28. virtual HRESULT STDMETHODCALLTYPE GetConnectedMediaType (AM_MEDIA_TYPE*) = 0;
  29. virtual HRESULT STDMETHODCALLTYPE SetBufferSamples (BOOL) = 0;
  30. virtual HRESULT STDMETHODCALLTYPE GetCurrentBuffer (long*, long*) = 0;
  31. virtual HRESULT STDMETHODCALLTYPE GetCurrentSample (IMediaSample**) = 0;
  32. virtual HRESULT STDMETHODCALLTYPE SetCallback (ISampleGrabberCB*, long) = 0;
  33. };
  34. static const IID IID_ISampleGrabberCB = { 0x0579154A, 0x2B53, 0x4994, { 0xB0, 0xD0, 0xE7, 0x73, 0x14, 0x8E, 0xFF, 0x85 } };
  35. static const IID IID_ISampleGrabber = { 0x6B652FFF, 0x11FE, 0x4fce, { 0x92, 0xAD, 0x02, 0x66, 0xB5, 0xD7, 0xC7, 0x8F } };
  36. static const CLSID CLSID_SampleGrabber = { 0xC1F400A0, 0x3F08, 0x11d3, { 0x9F, 0x0B, 0x00, 0x60, 0x08, 0x03, 0x9E, 0x37 } };
  37. static const CLSID CLSID_NullRenderer = { 0xC1F400A4, 0x3F08, 0x11d3, { 0x9F, 0x0B, 0x00, 0x60, 0x08, 0x03, 0x9E, 0x37 } };
  38. struct CameraDevice::Pimpl : public ChangeBroadcaster
  39. {
  40. Pimpl (CameraDevice& ownerToUse, const String&, int index,
  41. int minWidth, int minHeight, int maxWidth, int maxHeight,
  42. bool /*highQuality*/)
  43. : owner (ownerToUse),
  44. isRecording (false),
  45. openedSuccessfully (false),
  46. imageNeedsFlipping (false),
  47. width (0), height (0),
  48. activeUsers (0),
  49. recordNextFrameTime (false),
  50. previewMaxFPS (60)
  51. {
  52. HRESULT hr = captureGraphBuilder.CoCreateInstance (CLSID_CaptureGraphBuilder2);
  53. if (FAILED (hr))
  54. return;
  55. filter = enumerateCameras (nullptr, index);
  56. if (filter == nullptr)
  57. return;
  58. hr = graphBuilder.CoCreateInstance (CLSID_FilterGraph);
  59. if (FAILED (hr))
  60. return;
  61. hr = captureGraphBuilder->SetFiltergraph (graphBuilder);
  62. if (FAILED (hr))
  63. return;
  64. hr = graphBuilder.QueryInterface (mediaControl);
  65. if (FAILED (hr))
  66. return;
  67. {
  68. ComSmartPtr<IAMStreamConfig> streamConfig;
  69. hr = captureGraphBuilder->FindInterface (&PIN_CATEGORY_CAPTURE, 0, filter,
  70. IID_IAMStreamConfig, (void**) streamConfig.resetAndGetPointerAddress());
  71. if (streamConfig != nullptr)
  72. {
  73. getVideoSizes (streamConfig);
  74. if (! selectVideoSize (streamConfig, minWidth, minHeight, maxWidth, maxHeight))
  75. return;
  76. }
  77. }
  78. hr = graphBuilder->AddFilter (filter, _T("Video Capture"));
  79. if (FAILED (hr))
  80. return;
  81. hr = smartTee.CoCreateInstance (CLSID_SmartTee);
  82. if (FAILED (hr))
  83. return;
  84. hr = graphBuilder->AddFilter (smartTee, _T("Smart Tee"));
  85. if (FAILED (hr))
  86. return;
  87. if (! connectFilters (filter, smartTee))
  88. return;
  89. ComSmartPtr<IBaseFilter> sampleGrabberBase;
  90. hr = sampleGrabberBase.CoCreateInstance (CLSID_SampleGrabber);
  91. if (FAILED (hr))
  92. return;
  93. hr = sampleGrabberBase.QueryInterface (IID_ISampleGrabber, sampleGrabber);
  94. if (FAILED (hr))
  95. return;
  96. {
  97. AM_MEDIA_TYPE mt = { 0 };
  98. mt.majortype = MEDIATYPE_Video;
  99. mt.subtype = MEDIASUBTYPE_RGB24;
  100. mt.formattype = FORMAT_VideoInfo;
  101. sampleGrabber->SetMediaType (&mt);
  102. }
  103. callback = new GrabberCallback (*this);
  104. hr = sampleGrabber->SetCallback (callback, 1);
  105. hr = graphBuilder->AddFilter (sampleGrabberBase, _T("Sample Grabber"));
  106. if (FAILED (hr))
  107. return;
  108. ComSmartPtr<IPin> grabberInputPin;
  109. if (! (getPin (smartTee, PINDIR_OUTPUT, smartTeeCaptureOutputPin, "capture")
  110. && getPin (smartTee, PINDIR_OUTPUT, smartTeePreviewOutputPin, "preview")
  111. && getPin (sampleGrabberBase, PINDIR_INPUT, grabberInputPin)))
  112. return;
  113. hr = graphBuilder->Connect (smartTeePreviewOutputPin, grabberInputPin);
  114. if (FAILED (hr))
  115. return;
  116. AM_MEDIA_TYPE mt = { 0 };
  117. hr = sampleGrabber->GetConnectedMediaType (&mt);
  118. VIDEOINFOHEADER* pVih = (VIDEOINFOHEADER*) (mt.pbFormat);
  119. width = pVih->bmiHeader.biWidth;
  120. height = pVih->bmiHeader.biHeight;
  121. ComSmartPtr<IBaseFilter> nullFilter;
  122. hr = nullFilter.CoCreateInstance (CLSID_NullRenderer);
  123. hr = graphBuilder->AddFilter (nullFilter, _T("Null Renderer"));
  124. if (connectFilters (sampleGrabberBase, nullFilter)
  125. && addGraphToRot())
  126. {
  127. activeImage = Image (Image::RGB, width, height, true);
  128. loadingImage = Image (Image::RGB, width, height, true);
  129. openedSuccessfully = true;
  130. }
  131. }
  132. ~Pimpl()
  133. {
  134. if (mediaControl != nullptr)
  135. mediaControl->Stop();
  136. removeGraphFromRot();
  137. disconnectAnyViewers();
  138. if (sampleGrabber != nullptr)
  139. {
  140. sampleGrabber->SetCallback (nullptr, 0);
  141. sampleGrabber = nullptr;
  142. }
  143. callback = nullptr;
  144. graphBuilder = nullptr;
  145. mediaControl = nullptr;
  146. filter = nullptr;
  147. captureGraphBuilder = nullptr;
  148. smartTee = nullptr;
  149. smartTeePreviewOutputPin = nullptr;
  150. smartTeeCaptureOutputPin = nullptr;
  151. asfWriter = nullptr;
  152. }
  153. bool openedOk() const noexcept { return openedSuccessfully; }
  154. void takeStillPicture (std::function<void (const Image&)> pictureTakenCallbackToUse)
  155. {
  156. {
  157. const ScopedLock sl (pictureTakenCallbackLock);
  158. jassert (pictureTakenCallbackToUse != nullptr);
  159. if (pictureTakenCallbackToUse == nullptr)
  160. return;
  161. pictureTakenCallback = static_cast<std::function<void (const Image&)>&&> (pictureTakenCallbackToUse);
  162. }
  163. addUser();
  164. }
  165. void startRecordingToFile (const File& file, int quality)
  166. {
  167. addUser();
  168. isRecording = createFileCaptureFilter (file, quality);
  169. }
  170. void stopRecording()
  171. {
  172. if (isRecording)
  173. {
  174. removeFileCaptureFilter();
  175. removeUser();
  176. isRecording = false;
  177. }
  178. }
  179. Time getTimeOfFirstRecordedFrame() const
  180. {
  181. return firstRecordedTime;
  182. }
  183. void addListener (CameraDevice::Listener* listenerToAdd)
  184. {
  185. const ScopedLock sl (listenerLock);
  186. if (listeners.size() == 0)
  187. addUser();
  188. listeners.add (listenerToAdd);
  189. }
  190. void removeListener (CameraDevice::Listener* listenerToRemove)
  191. {
  192. const ScopedLock sl (listenerLock);
  193. listeners.remove (listenerToRemove);
  194. if (listeners.size() == 0)
  195. removeUser();
  196. }
  197. void callListeners (const Image& image)
  198. {
  199. const ScopedLock sl (listenerLock);
  200. listeners.call ([=] (Listener& l) { l.imageReceived (image); });
  201. }
  202. void notifyPictureTakenIfNeeded (const Image& image)
  203. {
  204. {
  205. const ScopedLock sl (pictureTakenCallbackLock);
  206. if (pictureTakenCallback == nullptr)
  207. return;
  208. }
  209. WeakReference<Pimpl> weakRef (this);
  210. MessageManager::callAsync ([weakRef, image]() mutable
  211. {
  212. if (weakRef == nullptr)
  213. return;
  214. if (weakRef->pictureTakenCallback != nullptr)
  215. weakRef->pictureTakenCallback (image);
  216. weakRef->pictureTakenCallback = nullptr;
  217. });
  218. }
  219. void addUser()
  220. {
  221. if (openedSuccessfully && activeUsers++ == 0)
  222. mediaControl->Run();
  223. }
  224. void removeUser()
  225. {
  226. if (openedSuccessfully && --activeUsers == 0)
  227. mediaControl->Stop();
  228. }
  229. void handleFrame (double /*time*/, BYTE* buffer, long /*bufferSize*/)
  230. {
  231. if (recordNextFrameTime)
  232. {
  233. const double defaultCameraLatency = 0.1;
  234. firstRecordedTime = Time::getCurrentTime() - RelativeTime (defaultCameraLatency);
  235. recordNextFrameTime = false;
  236. ComSmartPtr<IPin> pin;
  237. if (getPin (filter, PINDIR_OUTPUT, pin))
  238. {
  239. ComSmartPtr<IAMPushSource> pushSource;
  240. HRESULT hr = pin.QueryInterface (pushSource);
  241. if (pushSource != nullptr)
  242. {
  243. REFERENCE_TIME latency = 0;
  244. hr = pushSource->GetLatency (&latency);
  245. firstRecordedTime = firstRecordedTime - RelativeTime ((double) latency);
  246. }
  247. }
  248. }
  249. {
  250. const int lineStride = width * 3;
  251. const ScopedLock sl (imageSwapLock);
  252. {
  253. loadingImage.duplicateIfShared();
  254. const Image::BitmapData destData (loadingImage, 0, 0, width, height, Image::BitmapData::writeOnly);
  255. for (int i = 0; i < height; ++i)
  256. memcpy (destData.getLinePointer ((height - 1) - i),
  257. buffer + lineStride * i,
  258. lineStride);
  259. }
  260. imageNeedsFlipping = true;
  261. }
  262. if (listeners.size() > 0)
  263. callListeners (loadingImage);
  264. notifyPictureTakenIfNeeded (loadingImage);
  265. sendChangeMessage();
  266. }
  267. void drawCurrentImage (Graphics& g, Rectangle<int> area)
  268. {
  269. if (imageNeedsFlipping)
  270. {
  271. const ScopedLock sl (imageSwapLock);
  272. std::swap (loadingImage, activeImage);
  273. imageNeedsFlipping = false;
  274. }
  275. Rectangle<int> centred (RectanglePlacement (RectanglePlacement::centred)
  276. .appliedTo (Rectangle<int> (width, height), area));
  277. RectangleList<int> borders (area);
  278. borders.subtract (centred);
  279. g.setColour (Colours::black);
  280. g.fillRectList (borders);
  281. g.drawImage (activeImage, centred.getX(), centred.getY(),
  282. centred.getWidth(), centred.getHeight(), 0, 0, width, height);
  283. }
  284. bool createFileCaptureFilter (const File& file, int quality)
  285. {
  286. removeFileCaptureFilter();
  287. file.deleteFile();
  288. mediaControl->Stop();
  289. firstRecordedTime = Time();
  290. recordNextFrameTime = true;
  291. previewMaxFPS = 60;
  292. HRESULT hr = asfWriter.CoCreateInstance (CLSID_WMAsfWriter);
  293. if (SUCCEEDED (hr))
  294. {
  295. ComSmartPtr<IFileSinkFilter> fileSink;
  296. hr = asfWriter.QueryInterface (fileSink);
  297. if (SUCCEEDED (hr))
  298. {
  299. hr = fileSink->SetFileName (file.getFullPathName().toWideCharPointer(), 0);
  300. if (SUCCEEDED (hr))
  301. {
  302. hr = graphBuilder->AddFilter (asfWriter, _T("AsfWriter"));
  303. if (SUCCEEDED (hr))
  304. {
  305. ComSmartPtr<IConfigAsfWriter> asfConfig;
  306. hr = asfWriter.QueryInterface (asfConfig);
  307. asfConfig->SetIndexMode (true);
  308. ComSmartPtr<IWMProfileManager> profileManager;
  309. hr = WMCreateProfileManager (profileManager.resetAndGetPointerAddress());
  310. // This gibberish is the DirectShow profile for a video-only wmv file.
  311. String prof ("<profile version=\"589824\" storageformat=\"1\" name=\"Quality\" description=\"Quality type for output.\">"
  312. "<streamconfig majortype=\"{73646976-0000-0010-8000-00AA00389B71}\" streamnumber=\"1\" "
  313. "streamname=\"Video Stream\" inputname=\"Video409\" bitrate=\"894960\" "
  314. "bufferwindow=\"0\" reliabletransport=\"1\" decodercomplexity=\"AU\" rfc1766langid=\"en-us\">"
  315. "<videomediaprops maxkeyframespacing=\"50000000\" quality=\"90\"/>"
  316. "<wmmediatype subtype=\"{33564D57-0000-0010-8000-00AA00389B71}\" bfixedsizesamples=\"0\" "
  317. "btemporalcompression=\"1\" lsamplesize=\"0\">"
  318. "<videoinfoheader dwbitrate=\"894960\" dwbiterrorrate=\"0\" avgtimeperframe=\"$AVGTIMEPERFRAME\">"
  319. "<rcsource left=\"0\" top=\"0\" right=\"$WIDTH\" bottom=\"$HEIGHT\"/>"
  320. "<rctarget left=\"0\" top=\"0\" right=\"$WIDTH\" bottom=\"$HEIGHT\"/>"
  321. "<bitmapinfoheader biwidth=\"$WIDTH\" biheight=\"$HEIGHT\" biplanes=\"1\" bibitcount=\"24\" "
  322. "bicompression=\"WMV3\" bisizeimage=\"0\" bixpelspermeter=\"0\" biypelspermeter=\"0\" "
  323. "biclrused=\"0\" biclrimportant=\"0\"/>"
  324. "</videoinfoheader>"
  325. "</wmmediatype>"
  326. "</streamconfig>"
  327. "</profile>");
  328. const int fps[] = { 10, 15, 30 };
  329. int maxFramesPerSecond = fps [jlimit (0, numElementsInArray (fps) - 1, quality & 0xff)];
  330. if ((quality & 0xff000000) != 0) // (internal hacky way to pass explicit frame rates for testing)
  331. maxFramesPerSecond = (quality >> 24) & 0xff;
  332. prof = prof.replace ("$WIDTH", String (width))
  333. .replace ("$HEIGHT", String (height))
  334. .replace ("$AVGTIMEPERFRAME", String (10000000 / maxFramesPerSecond));
  335. ComSmartPtr<IWMProfile> currentProfile;
  336. hr = profileManager->LoadProfileByData (prof.toWideCharPointer(), currentProfile.resetAndGetPointerAddress());
  337. hr = asfConfig->ConfigureFilterUsingProfile (currentProfile);
  338. if (SUCCEEDED (hr))
  339. {
  340. ComSmartPtr<IPin> asfWriterInputPin;
  341. if (getPin (asfWriter, PINDIR_INPUT, asfWriterInputPin, "Video Input 01"))
  342. {
  343. hr = graphBuilder->Connect (smartTeeCaptureOutputPin, asfWriterInputPin);
  344. if (SUCCEEDED (hr) && openedSuccessfully && activeUsers > 0
  345. && SUCCEEDED (mediaControl->Run()))
  346. {
  347. previewMaxFPS = (quality < 2) ? 15 : 25; // throttle back the preview comps to try to leave the cpu free for encoding
  348. if ((quality & 0x00ff0000) != 0) // (internal hacky way to pass explicit frame rates for testing)
  349. previewMaxFPS = (quality >> 16) & 0xff;
  350. return true;
  351. }
  352. }
  353. }
  354. }
  355. }
  356. }
  357. }
  358. removeFileCaptureFilter();
  359. if (openedSuccessfully && activeUsers > 0)
  360. mediaControl->Run();
  361. return false;
  362. }
  363. void removeFileCaptureFilter()
  364. {
  365. mediaControl->Stop();
  366. if (asfWriter != nullptr)
  367. {
  368. graphBuilder->RemoveFilter (asfWriter);
  369. asfWriter = nullptr;
  370. }
  371. if (openedSuccessfully && activeUsers > 0)
  372. mediaControl->Run();
  373. previewMaxFPS = 60;
  374. }
  375. static ComSmartPtr<IBaseFilter> enumerateCameras (StringArray* names, const int deviceIndexToOpen)
  376. {
  377. int index = 0;
  378. ComSmartPtr<ICreateDevEnum> pDevEnum;
  379. if (SUCCEEDED (pDevEnum.CoCreateInstance (CLSID_SystemDeviceEnum)))
  380. {
  381. ComSmartPtr<IEnumMoniker> enumerator;
  382. HRESULT hr = pDevEnum->CreateClassEnumerator (CLSID_VideoInputDeviceCategory, enumerator.resetAndGetPointerAddress(), 0);
  383. if (SUCCEEDED (hr) && enumerator != nullptr)
  384. {
  385. ComSmartPtr<IMoniker> moniker;
  386. ULONG fetched;
  387. while (enumerator->Next (1, moniker.resetAndGetPointerAddress(), &fetched) == S_OK)
  388. {
  389. ComSmartPtr<IBaseFilter> captureFilter;
  390. hr = moniker->BindToObject (0, 0, IID_IBaseFilter, (void**) captureFilter.resetAndGetPointerAddress());
  391. if (SUCCEEDED (hr))
  392. {
  393. ComSmartPtr<IPropertyBag> propertyBag;
  394. hr = moniker->BindToStorage (0, 0, IID_IPropertyBag, (void**) propertyBag.resetAndGetPointerAddress());
  395. if (SUCCEEDED (hr))
  396. {
  397. VARIANT var;
  398. var.vt = VT_BSTR;
  399. hr = propertyBag->Read (_T("FriendlyName"), &var, 0);
  400. propertyBag = nullptr;
  401. if (SUCCEEDED (hr))
  402. {
  403. if (names != nullptr)
  404. names->add (var.bstrVal);
  405. if (index == deviceIndexToOpen)
  406. return captureFilter;
  407. ++index;
  408. }
  409. }
  410. }
  411. }
  412. }
  413. }
  414. return nullptr;
  415. }
  416. static StringArray getAvailableDevices()
  417. {
  418. StringArray devs;
  419. enumerateCameras (&devs, -1);
  420. return devs;
  421. }
  422. struct GrabberCallback : public ComBaseClassHelperBase<ISampleGrabberCB>
  423. {
  424. GrabberCallback (Pimpl& p)
  425. : ComBaseClassHelperBase<ISampleGrabberCB> (0), owner (p) {}
  426. JUCE_COMRESULT QueryInterface (REFIID refId, void** result)
  427. {
  428. if (refId == IID_ISampleGrabberCB)
  429. return castToType<ISampleGrabberCB> (result);
  430. return ComBaseClassHelperBase<ISampleGrabberCB>::QueryInterface (refId, result);
  431. }
  432. STDMETHODIMP SampleCB (double, IMediaSample*) { return E_FAIL; }
  433. STDMETHODIMP BufferCB (double time, BYTE* buffer, long bufferSize)
  434. {
  435. owner.handleFrame (time, buffer, bufferSize);
  436. return S_OK;
  437. }
  438. Pimpl& owner;
  439. JUCE_DECLARE_NON_COPYABLE (GrabberCallback)
  440. };
  441. CameraDevice& owner;
  442. ComSmartPtr<GrabberCallback> callback;
  443. CriticalSection listenerLock;
  444. ListenerList<Listener> listeners;
  445. CriticalSection pictureTakenCallbackLock;
  446. std::function<void (const Image&)> pictureTakenCallback;
  447. bool isRecording, openedSuccessfully;
  448. int width, height;
  449. Time firstRecordedTime;
  450. Array<ViewerComponent*> viewerComps;
  451. ComSmartPtr<ICaptureGraphBuilder2> captureGraphBuilder;
  452. ComSmartPtr<IBaseFilter> filter, smartTee, asfWriter;
  453. ComSmartPtr<IGraphBuilder> graphBuilder;
  454. ComSmartPtr<ISampleGrabber> sampleGrabber;
  455. ComSmartPtr<IMediaControl> mediaControl;
  456. ComSmartPtr<IPin> smartTeePreviewOutputPin, smartTeeCaptureOutputPin;
  457. int activeUsers;
  458. Array<int> widths, heights;
  459. DWORD graphRegistrationID;
  460. CriticalSection imageSwapLock;
  461. bool imageNeedsFlipping;
  462. Image loadingImage, activeImage;
  463. bool recordNextFrameTime;
  464. int previewMaxFPS;
  465. JUCE_DECLARE_WEAK_REFERENCEABLE (Pimpl)
  466. private:
  467. void getVideoSizes (IAMStreamConfig* const streamConfig)
  468. {
  469. widths.clear();
  470. heights.clear();
  471. int count = 0, size = 0;
  472. streamConfig->GetNumberOfCapabilities (&count, &size);
  473. if (size == sizeof (VIDEO_STREAM_CONFIG_CAPS))
  474. {
  475. for (int i = 0; i < count; ++i)
  476. {
  477. VIDEO_STREAM_CONFIG_CAPS scc;
  478. AM_MEDIA_TYPE* config;
  479. HRESULT hr = streamConfig->GetStreamCaps (i, &config, (BYTE*) &scc);
  480. if (SUCCEEDED (hr))
  481. {
  482. const int w = scc.InputSize.cx;
  483. const int h = scc.InputSize.cy;
  484. bool duplicate = false;
  485. for (int j = widths.size(); --j >= 0;)
  486. {
  487. if (w == widths.getUnchecked (j) && h == heights.getUnchecked (j))
  488. {
  489. duplicate = true;
  490. break;
  491. }
  492. }
  493. if (! duplicate)
  494. {
  495. DBG ("Camera capture size: " + String (w) + ", " + String (h));
  496. widths.add (w);
  497. heights.add (h);
  498. }
  499. deleteMediaType (config);
  500. }
  501. }
  502. }
  503. }
  504. bool selectVideoSize (IAMStreamConfig* const streamConfig,
  505. const int minWidth, const int minHeight,
  506. const int maxWidth, const int maxHeight)
  507. {
  508. int count = 0, size = 0, bestArea = 0, bestIndex = -1;
  509. streamConfig->GetNumberOfCapabilities (&count, &size);
  510. if (size == sizeof (VIDEO_STREAM_CONFIG_CAPS))
  511. {
  512. AM_MEDIA_TYPE* config;
  513. VIDEO_STREAM_CONFIG_CAPS scc;
  514. for (int i = 0; i < count; ++i)
  515. {
  516. HRESULT hr = streamConfig->GetStreamCaps (i, &config, (BYTE*) &scc);
  517. if (SUCCEEDED (hr))
  518. {
  519. if (scc.InputSize.cx >= minWidth
  520. && scc.InputSize.cy >= minHeight
  521. && scc.InputSize.cx <= maxWidth
  522. && scc.InputSize.cy <= maxHeight)
  523. {
  524. int area = scc.InputSize.cx * scc.InputSize.cy;
  525. if (area > bestArea)
  526. {
  527. bestIndex = i;
  528. bestArea = area;
  529. }
  530. }
  531. deleteMediaType (config);
  532. }
  533. }
  534. if (bestIndex >= 0)
  535. {
  536. HRESULT hr = streamConfig->GetStreamCaps (bestIndex, &config, (BYTE*) &scc);
  537. hr = streamConfig->SetFormat (config);
  538. deleteMediaType (config);
  539. return SUCCEEDED (hr);
  540. }
  541. }
  542. return false;
  543. }
  544. static bool getPin (IBaseFilter* filter, const PIN_DIRECTION wantedDirection,
  545. ComSmartPtr<IPin>& result, const char* pinName = nullptr)
  546. {
  547. ComSmartPtr<IEnumPins> enumerator;
  548. ComSmartPtr<IPin> pin;
  549. filter->EnumPins (enumerator.resetAndGetPointerAddress());
  550. while (enumerator->Next (1, pin.resetAndGetPointerAddress(), 0) == S_OK)
  551. {
  552. PIN_DIRECTION dir;
  553. pin->QueryDirection (&dir);
  554. if (wantedDirection == dir)
  555. {
  556. PIN_INFO info = { 0 };
  557. pin->QueryPinInfo (&info);
  558. if (pinName == nullptr || String (pinName).equalsIgnoreCase (String (info.achName)))
  559. {
  560. result = pin;
  561. return true;
  562. }
  563. }
  564. }
  565. return false;
  566. }
  567. bool connectFilters (IBaseFilter* const first, IBaseFilter* const second) const
  568. {
  569. ComSmartPtr<IPin> in, out;
  570. return getPin (first, PINDIR_OUTPUT, out)
  571. && getPin (second, PINDIR_INPUT, in)
  572. && SUCCEEDED (graphBuilder->Connect (out, in));
  573. }
  574. bool addGraphToRot()
  575. {
  576. ComSmartPtr<IRunningObjectTable> rot;
  577. if (FAILED (GetRunningObjectTable (0, rot.resetAndGetPointerAddress())))
  578. return false;
  579. ComSmartPtr<IMoniker> moniker;
  580. WCHAR buffer[128];
  581. HRESULT hr = CreateItemMoniker (_T("!"), buffer, moniker.resetAndGetPointerAddress());
  582. if (FAILED (hr))
  583. return false;
  584. graphRegistrationID = 0;
  585. return SUCCEEDED (rot->Register (0, graphBuilder, moniker, &graphRegistrationID));
  586. }
  587. void removeGraphFromRot()
  588. {
  589. ComSmartPtr<IRunningObjectTable> rot;
  590. if (SUCCEEDED (GetRunningObjectTable (0, rot.resetAndGetPointerAddress())))
  591. rot->Revoke (graphRegistrationID);
  592. }
  593. void disconnectAnyViewers();
  594. static void deleteMediaType (AM_MEDIA_TYPE* const pmt)
  595. {
  596. if (pmt->cbFormat != 0)
  597. CoTaskMemFree ((PVOID) pmt->pbFormat);
  598. if (pmt->pUnk != nullptr)
  599. pmt->pUnk->Release();
  600. CoTaskMemFree (pmt);
  601. }
  602. JUCE_DECLARE_NON_COPYABLE (Pimpl)
  603. };
  604. //==============================================================================
  605. struct CameraDevice::ViewerComponent : public Component,
  606. public ChangeListener
  607. {
  608. ViewerComponent (CameraDevice& d)
  609. : owner (d.pimpl.get()), maxFPS (15), lastRepaintTime (0)
  610. {
  611. setOpaque (true);
  612. owner->addChangeListener (this);
  613. owner->addUser();
  614. owner->viewerComps.add (this);
  615. setSize (owner->width, owner->height);
  616. }
  617. ~ViewerComponent()
  618. {
  619. if (owner != nullptr)
  620. {
  621. owner->viewerComps.removeFirstMatchingValue (this);
  622. owner->removeUser();
  623. owner->removeChangeListener (this);
  624. }
  625. }
  626. void ownerDeleted()
  627. {
  628. owner = nullptr;
  629. }
  630. void paint (Graphics& g) override
  631. {
  632. g.setColour (Colours::black);
  633. g.setImageResamplingQuality (Graphics::lowResamplingQuality);
  634. if (owner != nullptr)
  635. owner->drawCurrentImage (g, getLocalBounds());
  636. else
  637. g.fillAll();
  638. }
  639. void changeListenerCallback (ChangeBroadcaster*) override
  640. {
  641. const int64 now = Time::currentTimeMillis();
  642. if (now >= lastRepaintTime + (1000 / maxFPS))
  643. {
  644. lastRepaintTime = now;
  645. repaint();
  646. if (owner != nullptr)
  647. maxFPS = owner->previewMaxFPS;
  648. }
  649. }
  650. private:
  651. Pimpl* owner;
  652. int maxFPS;
  653. int64 lastRepaintTime;
  654. };
  655. void CameraDevice::Pimpl::disconnectAnyViewers()
  656. {
  657. for (int i = viewerComps.size(); --i >= 0;)
  658. viewerComps.getUnchecked(i)->ownerDeleted();
  659. }
  660. String CameraDevice::getFileExtension()
  661. {
  662. return ".wmv";
  663. }