The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

795 lines
27KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2017 - ROLI Ltd.
  5. JUCE is an open source library subject to commercial or open-source
  6. licensing.
  7. By using JUCE, you agree to the terms of both the JUCE 5 End-User License
  8. Agreement and JUCE 5 Privacy Policy (both updated and effective as of the
  9. 27th April 2017).
  10. End User License Agreement: www.juce.com/juce-5-licence
  11. Privacy Policy: www.juce.com/juce-5-privacy-policy
  12. Or: You may also use this code under the terms of the GPL v3 (see
  13. www.gnu.org/licenses).
  14. JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
  15. EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
  16. DISCLAIMED.
  17. ==============================================================================
  18. */
  19. interface ISampleGrabberCB : public IUnknown
  20. {
  21. virtual STDMETHODIMP SampleCB (double, IMediaSample*) = 0;
  22. virtual STDMETHODIMP BufferCB (double, BYTE*, long) = 0;
  23. };
  24. interface ISampleGrabber : public IUnknown
  25. {
  26. virtual HRESULT STDMETHODCALLTYPE SetOneShot (BOOL) = 0;
  27. virtual HRESULT STDMETHODCALLTYPE SetMediaType (const AM_MEDIA_TYPE*) = 0;
  28. virtual HRESULT STDMETHODCALLTYPE GetConnectedMediaType (AM_MEDIA_TYPE*) = 0;
  29. virtual HRESULT STDMETHODCALLTYPE SetBufferSamples (BOOL) = 0;
  30. virtual HRESULT STDMETHODCALLTYPE GetCurrentBuffer (long*, long*) = 0;
  31. virtual HRESULT STDMETHODCALLTYPE GetCurrentSample (IMediaSample**) = 0;
  32. virtual HRESULT STDMETHODCALLTYPE SetCallback (ISampleGrabberCB*, long) = 0;
  33. };
  34. static const IID IID_ISampleGrabberCB = { 0x0579154A, 0x2B53, 0x4994, { 0xB0, 0xD0, 0xE7, 0x73, 0x14, 0x8E, 0xFF, 0x85 } };
  35. static const IID IID_ISampleGrabber = { 0x6B652FFF, 0x11FE, 0x4fce, { 0x92, 0xAD, 0x02, 0x66, 0xB5, 0xD7, 0xC7, 0x8F } };
  36. static const CLSID CLSID_SampleGrabber = { 0xC1F400A0, 0x3F08, 0x11d3, { 0x9F, 0x0B, 0x00, 0x60, 0x08, 0x03, 0x9E, 0x37 } };
  37. static const CLSID CLSID_NullRenderer = { 0xC1F400A4, 0x3F08, 0x11d3, { 0x9F, 0x0B, 0x00, 0x60, 0x08, 0x03, 0x9E, 0x37 } };
  38. struct CameraDevice::Pimpl : public ChangeBroadcaster
  39. {
  40. Pimpl (const String&, int index,
  41. int minWidth, int minHeight,
  42. int maxWidth, int maxHeight, bool /*highQuality*/)
  43. : isRecording (false),
  44. openedSuccessfully (false),
  45. imageNeedsFlipping (false),
  46. width (0), height (0),
  47. activeUsers (0),
  48. recordNextFrameTime (false),
  49. previewMaxFPS (60)
  50. {
  51. HRESULT hr = captureGraphBuilder.CoCreateInstance (CLSID_CaptureGraphBuilder2);
  52. if (FAILED (hr))
  53. return;
  54. filter = enumerateCameras (nullptr, index);
  55. if (filter == nullptr)
  56. return;
  57. hr = graphBuilder.CoCreateInstance (CLSID_FilterGraph);
  58. if (FAILED (hr))
  59. return;
  60. hr = captureGraphBuilder->SetFiltergraph (graphBuilder);
  61. if (FAILED (hr))
  62. return;
  63. hr = graphBuilder.QueryInterface (mediaControl);
  64. if (FAILED (hr))
  65. return;
  66. {
  67. ComSmartPtr<IAMStreamConfig> streamConfig;
  68. hr = captureGraphBuilder->FindInterface (&PIN_CATEGORY_CAPTURE, 0, filter,
  69. IID_IAMStreamConfig, (void**) streamConfig.resetAndGetPointerAddress());
  70. if (streamConfig != nullptr)
  71. {
  72. getVideoSizes (streamConfig);
  73. if (! selectVideoSize (streamConfig, minWidth, minHeight, maxWidth, maxHeight))
  74. return;
  75. }
  76. }
  77. hr = graphBuilder->AddFilter (filter, _T("Video Capture"));
  78. if (FAILED (hr))
  79. return;
  80. hr = smartTee.CoCreateInstance (CLSID_SmartTee);
  81. if (FAILED (hr))
  82. return;
  83. hr = graphBuilder->AddFilter (smartTee, _T("Smart Tee"));
  84. if (FAILED (hr))
  85. return;
  86. if (! connectFilters (filter, smartTee))
  87. return;
  88. ComSmartPtr<IBaseFilter> sampleGrabberBase;
  89. hr = sampleGrabberBase.CoCreateInstance (CLSID_SampleGrabber);
  90. if (FAILED (hr))
  91. return;
  92. hr = sampleGrabberBase.QueryInterface (IID_ISampleGrabber, sampleGrabber);
  93. if (FAILED (hr))
  94. return;
  95. {
  96. AM_MEDIA_TYPE mt = { 0 };
  97. mt.majortype = MEDIATYPE_Video;
  98. mt.subtype = MEDIASUBTYPE_RGB24;
  99. mt.formattype = FORMAT_VideoInfo;
  100. sampleGrabber->SetMediaType (&mt);
  101. }
  102. callback = new GrabberCallback (*this);
  103. hr = sampleGrabber->SetCallback (callback, 1);
  104. hr = graphBuilder->AddFilter (sampleGrabberBase, _T("Sample Grabber"));
  105. if (FAILED (hr))
  106. return;
  107. ComSmartPtr<IPin> grabberInputPin;
  108. if (! (getPin (smartTee, PINDIR_OUTPUT, smartTeeCaptureOutputPin, "capture")
  109. && getPin (smartTee, PINDIR_OUTPUT, smartTeePreviewOutputPin, "preview")
  110. && getPin (sampleGrabberBase, PINDIR_INPUT, grabberInputPin)))
  111. return;
  112. hr = graphBuilder->Connect (smartTeePreviewOutputPin, grabberInputPin);
  113. if (FAILED (hr))
  114. return;
  115. AM_MEDIA_TYPE mt = { 0 };
  116. hr = sampleGrabber->GetConnectedMediaType (&mt);
  117. VIDEOINFOHEADER* pVih = (VIDEOINFOHEADER*) (mt.pbFormat);
  118. width = pVih->bmiHeader.biWidth;
  119. height = pVih->bmiHeader.biHeight;
  120. ComSmartPtr<IBaseFilter> nullFilter;
  121. hr = nullFilter.CoCreateInstance (CLSID_NullRenderer);
  122. hr = graphBuilder->AddFilter (nullFilter, _T("Null Renderer"));
  123. if (connectFilters (sampleGrabberBase, nullFilter)
  124. && addGraphToRot())
  125. {
  126. activeImage = Image (Image::RGB, width, height, true);
  127. loadingImage = Image (Image::RGB, width, height, true);
  128. openedSuccessfully = true;
  129. }
  130. }
  131. ~Pimpl()
  132. {
  133. if (mediaControl != nullptr)
  134. mediaControl->Stop();
  135. removeGraphFromRot();
  136. disconnectAnyViewers();
  137. if (sampleGrabber != nullptr)
  138. {
  139. sampleGrabber->SetCallback (nullptr, 0);
  140. sampleGrabber = nullptr;
  141. }
  142. callback = nullptr;
  143. graphBuilder = nullptr;
  144. mediaControl = nullptr;
  145. filter = nullptr;
  146. captureGraphBuilder = nullptr;
  147. smartTee = nullptr;
  148. smartTeePreviewOutputPin = nullptr;
  149. smartTeeCaptureOutputPin = nullptr;
  150. asfWriter = nullptr;
  151. }
  152. bool openedOk() const noexcept { return openedSuccessfully; }
  153. void startRecordingToFile (const File& file, int quality)
  154. {
  155. addUser();
  156. isRecording = createFileCaptureFilter (file, quality);
  157. }
  158. void stopRecording()
  159. {
  160. if (isRecording)
  161. {
  162. removeFileCaptureFilter();
  163. removeUser();
  164. isRecording = false;
  165. }
  166. }
  167. Time getTimeOfFirstRecordedFrame() const
  168. {
  169. return firstRecordedTime;
  170. }
  171. void addListener (CameraDevice::Listener* listenerToAdd)
  172. {
  173. const ScopedLock sl (listenerLock);
  174. if (listeners.size() == 0)
  175. addUser();
  176. listeners.addIfNotAlreadyThere (listenerToAdd);
  177. }
  178. void removeListener (CameraDevice::Listener* listenerToRemove)
  179. {
  180. const ScopedLock sl (listenerLock);
  181. listeners.removeAllInstancesOf (listenerToRemove);
  182. if (listeners.size() == 0)
  183. removeUser();
  184. }
  185. void callListeners (const Image& image)
  186. {
  187. const ScopedLock sl (listenerLock);
  188. for (int i = listeners.size(); --i >= 0;)
  189. if (CameraDevice::Listener* const l = listeners[i])
  190. l->imageReceived (image);
  191. }
  192. void addUser()
  193. {
  194. if (openedSuccessfully && activeUsers++ == 0)
  195. mediaControl->Run();
  196. }
  197. void removeUser()
  198. {
  199. if (openedSuccessfully && --activeUsers == 0)
  200. mediaControl->Stop();
  201. }
  202. void handleFrame (double /*time*/, BYTE* buffer, long /*bufferSize*/)
  203. {
  204. if (recordNextFrameTime)
  205. {
  206. const double defaultCameraLatency = 0.1;
  207. firstRecordedTime = Time::getCurrentTime() - RelativeTime (defaultCameraLatency);
  208. recordNextFrameTime = false;
  209. ComSmartPtr<IPin> pin;
  210. if (getPin (filter, PINDIR_OUTPUT, pin))
  211. {
  212. ComSmartPtr<IAMPushSource> pushSource;
  213. HRESULT hr = pin.QueryInterface (pushSource);
  214. if (pushSource != nullptr)
  215. {
  216. REFERENCE_TIME latency = 0;
  217. hr = pushSource->GetLatency (&latency);
  218. firstRecordedTime = firstRecordedTime - RelativeTime ((double) latency);
  219. }
  220. }
  221. }
  222. {
  223. const int lineStride = width * 3;
  224. const ScopedLock sl (imageSwapLock);
  225. {
  226. loadingImage.duplicateIfShared();
  227. const Image::BitmapData destData (loadingImage, 0, 0, width, height, Image::BitmapData::writeOnly);
  228. for (int i = 0; i < height; ++i)
  229. memcpy (destData.getLinePointer ((height - 1) - i),
  230. buffer + lineStride * i,
  231. lineStride);
  232. }
  233. imageNeedsFlipping = true;
  234. }
  235. if (listeners.size() > 0)
  236. callListeners (loadingImage);
  237. sendChangeMessage();
  238. }
  239. void drawCurrentImage (Graphics& g, Rectangle<int> area)
  240. {
  241. if (imageNeedsFlipping)
  242. {
  243. const ScopedLock sl (imageSwapLock);
  244. std::swap (loadingImage, activeImage);
  245. imageNeedsFlipping = false;
  246. }
  247. Rectangle<int> centred (RectanglePlacement (RectanglePlacement::centred)
  248. .appliedTo (Rectangle<int> (width, height), area));
  249. RectangleList<int> borders (area);
  250. borders.subtract (centred);
  251. g.setColour (Colours::black);
  252. g.fillRectList (borders);
  253. g.drawImage (activeImage, centred.getX(), centred.getY(),
  254. centred.getWidth(), centred.getHeight(), 0, 0, width, height);
  255. }
  256. bool createFileCaptureFilter (const File& file, int quality)
  257. {
  258. removeFileCaptureFilter();
  259. file.deleteFile();
  260. mediaControl->Stop();
  261. firstRecordedTime = Time();
  262. recordNextFrameTime = true;
  263. previewMaxFPS = 60;
  264. HRESULT hr = asfWriter.CoCreateInstance (CLSID_WMAsfWriter);
  265. if (SUCCEEDED (hr))
  266. {
  267. ComSmartPtr<IFileSinkFilter> fileSink;
  268. hr = asfWriter.QueryInterface (fileSink);
  269. if (SUCCEEDED (hr))
  270. {
  271. hr = fileSink->SetFileName (file.getFullPathName().toWideCharPointer(), 0);
  272. if (SUCCEEDED (hr))
  273. {
  274. hr = graphBuilder->AddFilter (asfWriter, _T("AsfWriter"));
  275. if (SUCCEEDED (hr))
  276. {
  277. ComSmartPtr<IConfigAsfWriter> asfConfig;
  278. hr = asfWriter.QueryInterface (asfConfig);
  279. asfConfig->SetIndexMode (true);
  280. ComSmartPtr<IWMProfileManager> profileManager;
  281. hr = WMCreateProfileManager (profileManager.resetAndGetPointerAddress());
  282. // This gibberish is the DirectShow profile for a video-only wmv file.
  283. String prof ("<profile version=\"589824\" storageformat=\"1\" name=\"Quality\" description=\"Quality type for output.\">"
  284. "<streamconfig majortype=\"{73646976-0000-0010-8000-00AA00389B71}\" streamnumber=\"1\" "
  285. "streamname=\"Video Stream\" inputname=\"Video409\" bitrate=\"894960\" "
  286. "bufferwindow=\"0\" reliabletransport=\"1\" decodercomplexity=\"AU\" rfc1766langid=\"en-us\">"
  287. "<videomediaprops maxkeyframespacing=\"50000000\" quality=\"90\"/>"
  288. "<wmmediatype subtype=\"{33564D57-0000-0010-8000-00AA00389B71}\" bfixedsizesamples=\"0\" "
  289. "btemporalcompression=\"1\" lsamplesize=\"0\">"
  290. "<videoinfoheader dwbitrate=\"894960\" dwbiterrorrate=\"0\" avgtimeperframe=\"$AVGTIMEPERFRAME\">"
  291. "<rcsource left=\"0\" top=\"0\" right=\"$WIDTH\" bottom=\"$HEIGHT\"/>"
  292. "<rctarget left=\"0\" top=\"0\" right=\"$WIDTH\" bottom=\"$HEIGHT\"/>"
  293. "<bitmapinfoheader biwidth=\"$WIDTH\" biheight=\"$HEIGHT\" biplanes=\"1\" bibitcount=\"24\" "
  294. "bicompression=\"WMV3\" bisizeimage=\"0\" bixpelspermeter=\"0\" biypelspermeter=\"0\" "
  295. "biclrused=\"0\" biclrimportant=\"0\"/>"
  296. "</videoinfoheader>"
  297. "</wmmediatype>"
  298. "</streamconfig>"
  299. "</profile>");
  300. const int fps[] = { 10, 15, 30 };
  301. int maxFramesPerSecond = fps [jlimit (0, numElementsInArray (fps) - 1, quality & 0xff)];
  302. if ((quality & 0xff000000) != 0) // (internal hacky way to pass explicit frame rates for testing)
  303. maxFramesPerSecond = (quality >> 24) & 0xff;
  304. prof = prof.replace ("$WIDTH", String (width))
  305. .replace ("$HEIGHT", String (height))
  306. .replace ("$AVGTIMEPERFRAME", String (10000000 / maxFramesPerSecond));
  307. ComSmartPtr<IWMProfile> currentProfile;
  308. hr = profileManager->LoadProfileByData (prof.toWideCharPointer(), currentProfile.resetAndGetPointerAddress());
  309. hr = asfConfig->ConfigureFilterUsingProfile (currentProfile);
  310. if (SUCCEEDED (hr))
  311. {
  312. ComSmartPtr<IPin> asfWriterInputPin;
  313. if (getPin (asfWriter, PINDIR_INPUT, asfWriterInputPin, "Video Input 01"))
  314. {
  315. hr = graphBuilder->Connect (smartTeeCaptureOutputPin, asfWriterInputPin);
  316. if (SUCCEEDED (hr) && openedSuccessfully && activeUsers > 0
  317. && SUCCEEDED (mediaControl->Run()))
  318. {
  319. previewMaxFPS = (quality < 2) ? 15 : 25; // throttle back the preview comps to try to leave the cpu free for encoding
  320. if ((quality & 0x00ff0000) != 0) // (internal hacky way to pass explicit frame rates for testing)
  321. previewMaxFPS = (quality >> 16) & 0xff;
  322. return true;
  323. }
  324. }
  325. }
  326. }
  327. }
  328. }
  329. }
  330. removeFileCaptureFilter();
  331. if (openedSuccessfully && activeUsers > 0)
  332. mediaControl->Run();
  333. return false;
  334. }
  335. void removeFileCaptureFilter()
  336. {
  337. mediaControl->Stop();
  338. if (asfWriter != nullptr)
  339. {
  340. graphBuilder->RemoveFilter (asfWriter);
  341. asfWriter = nullptr;
  342. }
  343. if (openedSuccessfully && activeUsers > 0)
  344. mediaControl->Run();
  345. previewMaxFPS = 60;
  346. }
  347. static ComSmartPtr<IBaseFilter> enumerateCameras (StringArray* names, const int deviceIndexToOpen)
  348. {
  349. int index = 0;
  350. ComSmartPtr<ICreateDevEnum> pDevEnum;
  351. if (SUCCEEDED (pDevEnum.CoCreateInstance (CLSID_SystemDeviceEnum)))
  352. {
  353. ComSmartPtr<IEnumMoniker> enumerator;
  354. HRESULT hr = pDevEnum->CreateClassEnumerator (CLSID_VideoInputDeviceCategory, enumerator.resetAndGetPointerAddress(), 0);
  355. if (SUCCEEDED (hr) && enumerator != nullptr)
  356. {
  357. ComSmartPtr<IMoniker> moniker;
  358. ULONG fetched;
  359. while (enumerator->Next (1, moniker.resetAndGetPointerAddress(), &fetched) == S_OK)
  360. {
  361. ComSmartPtr<IBaseFilter> captureFilter;
  362. hr = moniker->BindToObject (0, 0, IID_IBaseFilter, (void**) captureFilter.resetAndGetPointerAddress());
  363. if (SUCCEEDED (hr))
  364. {
  365. ComSmartPtr<IPropertyBag> propertyBag;
  366. hr = moniker->BindToStorage (0, 0, IID_IPropertyBag, (void**) propertyBag.resetAndGetPointerAddress());
  367. if (SUCCEEDED (hr))
  368. {
  369. VARIANT var;
  370. var.vt = VT_BSTR;
  371. hr = propertyBag->Read (_T("FriendlyName"), &var, 0);
  372. propertyBag = nullptr;
  373. if (SUCCEEDED (hr))
  374. {
  375. if (names != nullptr)
  376. names->add (var.bstrVal);
  377. if (index == deviceIndexToOpen)
  378. return captureFilter;
  379. ++index;
  380. }
  381. }
  382. }
  383. }
  384. }
  385. }
  386. return nullptr;
  387. }
  388. static StringArray getAvailableDevices()
  389. {
  390. StringArray devs;
  391. enumerateCameras (&devs, -1);
  392. return devs;
  393. }
  394. class GrabberCallback : public ComBaseClassHelperBase<ISampleGrabberCB>
  395. {
  396. public:
  397. GrabberCallback (Pimpl& p)
  398. : ComBaseClassHelperBase<ISampleGrabberCB> (0), owner (p) {}
  399. JUCE_COMRESULT QueryInterface (REFIID refId, void** result)
  400. {
  401. if (refId == IID_ISampleGrabberCB)
  402. return castToType<ISampleGrabberCB> (result);
  403. return ComBaseClassHelperBase<ISampleGrabberCB>::QueryInterface (refId, result);
  404. }
  405. STDMETHODIMP SampleCB (double, IMediaSample*) { return E_FAIL; }
  406. STDMETHODIMP BufferCB (double time, BYTE* buffer, long bufferSize)
  407. {
  408. owner.handleFrame (time, buffer, bufferSize);
  409. return S_OK;
  410. }
  411. private:
  412. Pimpl& owner;
  413. JUCE_DECLARE_NON_COPYABLE (GrabberCallback)
  414. };
  415. ComSmartPtr<GrabberCallback> callback;
  416. Array<CameraDevice::Listener*> listeners;
  417. CriticalSection listenerLock;
  418. bool isRecording, openedSuccessfully;
  419. int width, height;
  420. Time firstRecordedTime;
  421. Array<ViewerComponent*> viewerComps;
  422. ComSmartPtr<ICaptureGraphBuilder2> captureGraphBuilder;
  423. ComSmartPtr<IBaseFilter> filter, smartTee, asfWriter;
  424. ComSmartPtr<IGraphBuilder> graphBuilder;
  425. ComSmartPtr<ISampleGrabber> sampleGrabber;
  426. ComSmartPtr<IMediaControl> mediaControl;
  427. ComSmartPtr<IPin> smartTeePreviewOutputPin, smartTeeCaptureOutputPin;
  428. int activeUsers;
  429. Array<int> widths, heights;
  430. DWORD graphRegistrationID;
  431. CriticalSection imageSwapLock;
  432. bool imageNeedsFlipping;
  433. Image loadingImage, activeImage;
  434. bool recordNextFrameTime;
  435. int previewMaxFPS;
  436. private:
  437. void getVideoSizes (IAMStreamConfig* const streamConfig)
  438. {
  439. widths.clear();
  440. heights.clear();
  441. int count = 0, size = 0;
  442. streamConfig->GetNumberOfCapabilities (&count, &size);
  443. if (size == sizeof (VIDEO_STREAM_CONFIG_CAPS))
  444. {
  445. for (int i = 0; i < count; ++i)
  446. {
  447. VIDEO_STREAM_CONFIG_CAPS scc;
  448. AM_MEDIA_TYPE* config;
  449. HRESULT hr = streamConfig->GetStreamCaps (i, &config, (BYTE*) &scc);
  450. if (SUCCEEDED (hr))
  451. {
  452. const int w = scc.InputSize.cx;
  453. const int h = scc.InputSize.cy;
  454. bool duplicate = false;
  455. for (int j = widths.size(); --j >= 0;)
  456. {
  457. if (w == widths.getUnchecked (j) && h == heights.getUnchecked (j))
  458. {
  459. duplicate = true;
  460. break;
  461. }
  462. }
  463. if (! duplicate)
  464. {
  465. DBG ("Camera capture size: " + String (w) + ", " + String (h));
  466. widths.add (w);
  467. heights.add (h);
  468. }
  469. deleteMediaType (config);
  470. }
  471. }
  472. }
  473. }
  474. bool selectVideoSize (IAMStreamConfig* const streamConfig,
  475. const int minWidth, const int minHeight,
  476. const int maxWidth, const int maxHeight)
  477. {
  478. int count = 0, size = 0, bestArea = 0, bestIndex = -1;
  479. streamConfig->GetNumberOfCapabilities (&count, &size);
  480. if (size == sizeof (VIDEO_STREAM_CONFIG_CAPS))
  481. {
  482. AM_MEDIA_TYPE* config;
  483. VIDEO_STREAM_CONFIG_CAPS scc;
  484. for (int i = 0; i < count; ++i)
  485. {
  486. HRESULT hr = streamConfig->GetStreamCaps (i, &config, (BYTE*) &scc);
  487. if (SUCCEEDED (hr))
  488. {
  489. if (scc.InputSize.cx >= minWidth
  490. && scc.InputSize.cy >= minHeight
  491. && scc.InputSize.cx <= maxWidth
  492. && scc.InputSize.cy <= maxHeight)
  493. {
  494. int area = scc.InputSize.cx * scc.InputSize.cy;
  495. if (area > bestArea)
  496. {
  497. bestIndex = i;
  498. bestArea = area;
  499. }
  500. }
  501. deleteMediaType (config);
  502. }
  503. }
  504. if (bestIndex >= 0)
  505. {
  506. HRESULT hr = streamConfig->GetStreamCaps (bestIndex, &config, (BYTE*) &scc);
  507. hr = streamConfig->SetFormat (config);
  508. deleteMediaType (config);
  509. return SUCCEEDED (hr);
  510. }
  511. }
  512. return false;
  513. }
  514. static bool getPin (IBaseFilter* filter, const PIN_DIRECTION wantedDirection,
  515. ComSmartPtr<IPin>& result, const char* pinName = nullptr)
  516. {
  517. ComSmartPtr<IEnumPins> enumerator;
  518. ComSmartPtr<IPin> pin;
  519. filter->EnumPins (enumerator.resetAndGetPointerAddress());
  520. while (enumerator->Next (1, pin.resetAndGetPointerAddress(), 0) == S_OK)
  521. {
  522. PIN_DIRECTION dir;
  523. pin->QueryDirection (&dir);
  524. if (wantedDirection == dir)
  525. {
  526. PIN_INFO info = { 0 };
  527. pin->QueryPinInfo (&info);
  528. if (pinName == nullptr || String (pinName).equalsIgnoreCase (String (info.achName)))
  529. {
  530. result = pin;
  531. return true;
  532. }
  533. }
  534. }
  535. return false;
  536. }
  537. bool connectFilters (IBaseFilter* const first, IBaseFilter* const second) const
  538. {
  539. ComSmartPtr<IPin> in, out;
  540. return getPin (first, PINDIR_OUTPUT, out)
  541. && getPin (second, PINDIR_INPUT, in)
  542. && SUCCEEDED (graphBuilder->Connect (out, in));
  543. }
  544. bool addGraphToRot()
  545. {
  546. ComSmartPtr<IRunningObjectTable> rot;
  547. if (FAILED (GetRunningObjectTable (0, rot.resetAndGetPointerAddress())))
  548. return false;
  549. ComSmartPtr<IMoniker> moniker;
  550. WCHAR buffer[128];
  551. HRESULT hr = CreateItemMoniker (_T("!"), buffer, moniker.resetAndGetPointerAddress());
  552. if (FAILED (hr))
  553. return false;
  554. graphRegistrationID = 0;
  555. return SUCCEEDED (rot->Register (0, graphBuilder, moniker, &graphRegistrationID));
  556. }
  557. void removeGraphFromRot()
  558. {
  559. ComSmartPtr<IRunningObjectTable> rot;
  560. if (SUCCEEDED (GetRunningObjectTable (0, rot.resetAndGetPointerAddress())))
  561. rot->Revoke (graphRegistrationID);
  562. }
  563. void disconnectAnyViewers();
  564. static void deleteMediaType (AM_MEDIA_TYPE* const pmt)
  565. {
  566. if (pmt->cbFormat != 0)
  567. CoTaskMemFree ((PVOID) pmt->pbFormat);
  568. if (pmt->pUnk != nullptr)
  569. pmt->pUnk->Release();
  570. CoTaskMemFree (pmt);
  571. }
  572. JUCE_DECLARE_NON_COPYABLE (Pimpl)
  573. };
  574. //==============================================================================
  575. struct CameraDevice::ViewerComponent : public Component,
  576. public ChangeListener
  577. {
  578. ViewerComponent (CameraDevice& d)
  579. : owner (d.pimpl), maxFPS (15), lastRepaintTime (0)
  580. {
  581. setOpaque (true);
  582. owner->addChangeListener (this);
  583. owner->addUser();
  584. owner->viewerComps.add (this);
  585. setSize (owner->width, owner->height);
  586. }
  587. ~ViewerComponent()
  588. {
  589. if (owner != nullptr)
  590. {
  591. owner->viewerComps.removeFirstMatchingValue (this);
  592. owner->removeUser();
  593. owner->removeChangeListener (this);
  594. }
  595. }
  596. void ownerDeleted()
  597. {
  598. owner = nullptr;
  599. }
  600. void paint (Graphics& g) override
  601. {
  602. g.setColour (Colours::black);
  603. g.setImageResamplingQuality (Graphics::lowResamplingQuality);
  604. if (owner != nullptr)
  605. owner->drawCurrentImage (g, getLocalBounds());
  606. else
  607. g.fillAll();
  608. }
  609. void changeListenerCallback (ChangeBroadcaster*) override
  610. {
  611. const int64 now = Time::currentTimeMillis();
  612. if (now >= lastRepaintTime + (1000 / maxFPS))
  613. {
  614. lastRepaintTime = now;
  615. repaint();
  616. if (owner != nullptr)
  617. maxFPS = owner->previewMaxFPS;
  618. }
  619. }
  620. private:
  621. Pimpl* owner;
  622. int maxFPS;
  623. int64 lastRepaintTime;
  624. };
  625. void CameraDevice::Pimpl::disconnectAnyViewers()
  626. {
  627. for (int i = viewerComps.size(); --i >= 0;)
  628. viewerComps.getUnchecked(i)->ownerDeleted();
  629. }
  630. String CameraDevice::getFileExtension()
  631. {
  632. return ".wmv";
  633. }