The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

793 lines
27KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2015 - ROLI Ltd.
  5. Permission is granted to use this software under the terms of either:
  6. a) the GPL v2 (or any later version)
  7. b) the Affero GPL v3
  8. Details of these licenses can be found at: www.gnu.org/licenses
  9. JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
  10. WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
  11. A PARTICULAR PURPOSE. See the GNU General Public License for more details.
  12. ------------------------------------------------------------------------------
  13. To release a closed-source product which uses JUCE, commercial licenses are
  14. available: visit www.juce.com for more information.
  15. ==============================================================================
  16. */
  17. interface ISampleGrabberCB : public IUnknown
  18. {
  19. virtual STDMETHODIMP SampleCB (double, IMediaSample*) = 0;
  20. virtual STDMETHODIMP BufferCB (double, BYTE*, long) = 0;
  21. };
  22. interface ISampleGrabber : public IUnknown
  23. {
  24. virtual HRESULT STDMETHODCALLTYPE SetOneShot (BOOL) = 0;
  25. virtual HRESULT STDMETHODCALLTYPE SetMediaType (const AM_MEDIA_TYPE*) = 0;
  26. virtual HRESULT STDMETHODCALLTYPE GetConnectedMediaType (AM_MEDIA_TYPE*) = 0;
  27. virtual HRESULT STDMETHODCALLTYPE SetBufferSamples (BOOL) = 0;
  28. virtual HRESULT STDMETHODCALLTYPE GetCurrentBuffer (long*, long*) = 0;
  29. virtual HRESULT STDMETHODCALLTYPE GetCurrentSample (IMediaSample**) = 0;
  30. virtual HRESULT STDMETHODCALLTYPE SetCallback (ISampleGrabberCB*, long) = 0;
  31. };
  32. static const IID IID_ISampleGrabberCB = { 0x0579154A, 0x2B53, 0x4994, { 0xB0, 0xD0, 0xE7, 0x73, 0x14, 0x8E, 0xFF, 0x85 } };
  33. static const IID IID_ISampleGrabber = { 0x6B652FFF, 0x11FE, 0x4fce, { 0x92, 0xAD, 0x02, 0x66, 0xB5, 0xD7, 0xC7, 0x8F } };
  34. static const CLSID CLSID_SampleGrabber = { 0xC1F400A0, 0x3F08, 0x11d3, { 0x9F, 0x0B, 0x00, 0x60, 0x08, 0x03, 0x9E, 0x37 } };
  35. static const CLSID CLSID_NullRenderer = { 0xC1F400A4, 0x3F08, 0x11d3, { 0x9F, 0x0B, 0x00, 0x60, 0x08, 0x03, 0x9E, 0x37 } };
  36. struct CameraDevice::Pimpl : public ChangeBroadcaster
  37. {
  38. Pimpl (const String&, int index,
  39. int minWidth, int minHeight,
  40. int maxWidth, int maxHeight)
  41. : isRecording (false),
  42. openedSuccessfully (false),
  43. imageNeedsFlipping (false),
  44. width (0), height (0),
  45. activeUsers (0),
  46. recordNextFrameTime (false),
  47. previewMaxFPS (60)
  48. {
  49. HRESULT hr = captureGraphBuilder.CoCreateInstance (CLSID_CaptureGraphBuilder2);
  50. if (FAILED (hr))
  51. return;
  52. filter = enumerateCameras (nullptr, index);
  53. if (filter == nullptr)
  54. return;
  55. hr = graphBuilder.CoCreateInstance (CLSID_FilterGraph);
  56. if (FAILED (hr))
  57. return;
  58. hr = captureGraphBuilder->SetFiltergraph (graphBuilder);
  59. if (FAILED (hr))
  60. return;
  61. hr = graphBuilder.QueryInterface (mediaControl);
  62. if (FAILED (hr))
  63. return;
  64. {
  65. ComSmartPtr<IAMStreamConfig> streamConfig;
  66. hr = captureGraphBuilder->FindInterface (&PIN_CATEGORY_CAPTURE, 0, filter,
  67. IID_IAMStreamConfig, (void**) streamConfig.resetAndGetPointerAddress());
  68. if (streamConfig != nullptr)
  69. {
  70. getVideoSizes (streamConfig);
  71. if (! selectVideoSize (streamConfig, minWidth, minHeight, maxWidth, maxHeight))
  72. return;
  73. }
  74. }
  75. hr = graphBuilder->AddFilter (filter, _T("Video Capture"));
  76. if (FAILED (hr))
  77. return;
  78. hr = smartTee.CoCreateInstance (CLSID_SmartTee);
  79. if (FAILED (hr))
  80. return;
  81. hr = graphBuilder->AddFilter (smartTee, _T("Smart Tee"));
  82. if (FAILED (hr))
  83. return;
  84. if (! connectFilters (filter, smartTee))
  85. return;
  86. ComSmartPtr<IBaseFilter> sampleGrabberBase;
  87. hr = sampleGrabberBase.CoCreateInstance (CLSID_SampleGrabber);
  88. if (FAILED (hr))
  89. return;
  90. hr = sampleGrabberBase.QueryInterface (IID_ISampleGrabber, sampleGrabber);
  91. if (FAILED (hr))
  92. return;
  93. {
  94. AM_MEDIA_TYPE mt = { 0 };
  95. mt.majortype = MEDIATYPE_Video;
  96. mt.subtype = MEDIASUBTYPE_RGB24;
  97. mt.formattype = FORMAT_VideoInfo;
  98. sampleGrabber->SetMediaType (&mt);
  99. }
  100. callback = new GrabberCallback (*this);
  101. hr = sampleGrabber->SetCallback (callback, 1);
  102. hr = graphBuilder->AddFilter (sampleGrabberBase, _T("Sample Grabber"));
  103. if (FAILED (hr))
  104. return;
  105. ComSmartPtr<IPin> grabberInputPin;
  106. if (! (getPin (smartTee, PINDIR_OUTPUT, smartTeeCaptureOutputPin, "capture")
  107. && getPin (smartTee, PINDIR_OUTPUT, smartTeePreviewOutputPin, "preview")
  108. && getPin (sampleGrabberBase, PINDIR_INPUT, grabberInputPin)))
  109. return;
  110. hr = graphBuilder->Connect (smartTeePreviewOutputPin, grabberInputPin);
  111. if (FAILED (hr))
  112. return;
  113. AM_MEDIA_TYPE mt = { 0 };
  114. hr = sampleGrabber->GetConnectedMediaType (&mt);
  115. VIDEOINFOHEADER* pVih = (VIDEOINFOHEADER*) (mt.pbFormat);
  116. width = pVih->bmiHeader.biWidth;
  117. height = pVih->bmiHeader.biHeight;
  118. ComSmartPtr<IBaseFilter> nullFilter;
  119. hr = nullFilter.CoCreateInstance (CLSID_NullRenderer);
  120. hr = graphBuilder->AddFilter (nullFilter, _T("Null Renderer"));
  121. if (connectFilters (sampleGrabberBase, nullFilter)
  122. && addGraphToRot())
  123. {
  124. activeImage = Image (Image::RGB, width, height, true);
  125. loadingImage = Image (Image::RGB, width, height, true);
  126. openedSuccessfully = true;
  127. }
  128. }
  129. ~Pimpl()
  130. {
  131. if (mediaControl != nullptr)
  132. mediaControl->Stop();
  133. removeGraphFromRot();
  134. disconnectAnyViewers();
  135. if (sampleGrabber != nullptr)
  136. {
  137. sampleGrabber->SetCallback (nullptr, 0);
  138. sampleGrabber = nullptr;
  139. }
  140. callback = nullptr;
  141. graphBuilder = nullptr;
  142. mediaControl = nullptr;
  143. filter = nullptr;
  144. captureGraphBuilder = nullptr;
  145. smartTee = nullptr;
  146. smartTeePreviewOutputPin = nullptr;
  147. smartTeeCaptureOutputPin = nullptr;
  148. asfWriter = nullptr;
  149. }
  150. bool openedOk() const noexcept { return openedSuccessfully; }
  151. void startRecordingToFile (const File& file, int quality)
  152. {
  153. addUser();
  154. isRecording = createFileCaptureFilter (file, quality);
  155. }
  156. void stopRecording()
  157. {
  158. if (isRecording)
  159. {
  160. removeFileCaptureFilter();
  161. removeUser();
  162. isRecording = false;
  163. }
  164. }
  165. Time getTimeOfFirstRecordedFrame() const
  166. {
  167. return firstRecordedTime;
  168. }
  169. void addListener (CameraDevice::Listener* listenerToAdd)
  170. {
  171. const ScopedLock sl (listenerLock);
  172. if (listeners.size() == 0)
  173. addUser();
  174. listeners.addIfNotAlreadyThere (listenerToAdd);
  175. }
  176. void removeListener (CameraDevice::Listener* listenerToRemove)
  177. {
  178. const ScopedLock sl (listenerLock);
  179. listeners.removeAllInstancesOf (listenerToRemove);
  180. if (listeners.size() == 0)
  181. removeUser();
  182. }
  183. void callListeners (const Image& image)
  184. {
  185. const ScopedLock sl (listenerLock);
  186. for (int i = listeners.size(); --i >= 0;)
  187. if (CameraDevice::Listener* const l = listeners[i])
  188. l->imageReceived (image);
  189. }
  190. void addUser()
  191. {
  192. if (openedSuccessfully && activeUsers++ == 0)
  193. mediaControl->Run();
  194. }
  195. void removeUser()
  196. {
  197. if (openedSuccessfully && --activeUsers == 0)
  198. mediaControl->Stop();
  199. }
  200. void handleFrame (double /*time*/, BYTE* buffer, long /*bufferSize*/)
  201. {
  202. if (recordNextFrameTime)
  203. {
  204. const double defaultCameraLatency = 0.1;
  205. firstRecordedTime = Time::getCurrentTime() - RelativeTime (defaultCameraLatency);
  206. recordNextFrameTime = false;
  207. ComSmartPtr<IPin> pin;
  208. if (getPin (filter, PINDIR_OUTPUT, pin))
  209. {
  210. ComSmartPtr<IAMPushSource> pushSource;
  211. HRESULT hr = pin.QueryInterface (pushSource);
  212. if (pushSource != nullptr)
  213. {
  214. REFERENCE_TIME latency = 0;
  215. hr = pushSource->GetLatency (&latency);
  216. firstRecordedTime = firstRecordedTime - RelativeTime ((double) latency);
  217. }
  218. }
  219. }
  220. {
  221. const int lineStride = width * 3;
  222. const ScopedLock sl (imageSwapLock);
  223. {
  224. loadingImage.duplicateIfShared();
  225. const Image::BitmapData destData (loadingImage, 0, 0, width, height, Image::BitmapData::writeOnly);
  226. for (int i = 0; i < height; ++i)
  227. memcpy (destData.getLinePointer ((height - 1) - i),
  228. buffer + lineStride * i,
  229. lineStride);
  230. }
  231. imageNeedsFlipping = true;
  232. }
  233. if (listeners.size() > 0)
  234. callListeners (loadingImage);
  235. sendChangeMessage();
  236. }
  237. void drawCurrentImage (Graphics& g, Rectangle<int> area)
  238. {
  239. if (imageNeedsFlipping)
  240. {
  241. const ScopedLock sl (imageSwapLock);
  242. std::swap (loadingImage, activeImage);
  243. imageNeedsFlipping = false;
  244. }
  245. Rectangle<int> centred (RectanglePlacement (RectanglePlacement::centred)
  246. .appliedTo (Rectangle<int> (width, height), area));
  247. RectangleList<int> borders (area);
  248. borders.subtract (centred);
  249. g.setColour (Colours::black);
  250. g.fillRectList (borders);
  251. g.drawImage (activeImage, centred.getX(), centred.getY(),
  252. centred.getWidth(), centred.getHeight(), 0, 0, width, height);
  253. }
  254. bool createFileCaptureFilter (const File& file, int quality)
  255. {
  256. removeFileCaptureFilter();
  257. file.deleteFile();
  258. mediaControl->Stop();
  259. firstRecordedTime = Time();
  260. recordNextFrameTime = true;
  261. previewMaxFPS = 60;
  262. HRESULT hr = asfWriter.CoCreateInstance (CLSID_WMAsfWriter);
  263. if (SUCCEEDED (hr))
  264. {
  265. ComSmartPtr<IFileSinkFilter> fileSink;
  266. hr = asfWriter.QueryInterface (fileSink);
  267. if (SUCCEEDED (hr))
  268. {
  269. hr = fileSink->SetFileName (file.getFullPathName().toWideCharPointer(), 0);
  270. if (SUCCEEDED (hr))
  271. {
  272. hr = graphBuilder->AddFilter (asfWriter, _T("AsfWriter"));
  273. if (SUCCEEDED (hr))
  274. {
  275. ComSmartPtr<IConfigAsfWriter> asfConfig;
  276. hr = asfWriter.QueryInterface (asfConfig);
  277. asfConfig->SetIndexMode (true);
  278. ComSmartPtr<IWMProfileManager> profileManager;
  279. hr = WMCreateProfileManager (profileManager.resetAndGetPointerAddress());
  280. // This gibberish is the DirectShow profile for a video-only wmv file.
  281. String prof ("<profile version=\"589824\" storageformat=\"1\" name=\"Quality\" description=\"Quality type for output.\">"
  282. "<streamconfig majortype=\"{73646976-0000-0010-8000-00AA00389B71}\" streamnumber=\"1\" "
  283. "streamname=\"Video Stream\" inputname=\"Video409\" bitrate=\"894960\" "
  284. "bufferwindow=\"0\" reliabletransport=\"1\" decodercomplexity=\"AU\" rfc1766langid=\"en-us\">"
  285. "<videomediaprops maxkeyframespacing=\"50000000\" quality=\"90\"/>"
  286. "<wmmediatype subtype=\"{33564D57-0000-0010-8000-00AA00389B71}\" bfixedsizesamples=\"0\" "
  287. "btemporalcompression=\"1\" lsamplesize=\"0\">"
  288. "<videoinfoheader dwbitrate=\"894960\" dwbiterrorrate=\"0\" avgtimeperframe=\"$AVGTIMEPERFRAME\">"
  289. "<rcsource left=\"0\" top=\"0\" right=\"$WIDTH\" bottom=\"$HEIGHT\"/>"
  290. "<rctarget left=\"0\" top=\"0\" right=\"$WIDTH\" bottom=\"$HEIGHT\"/>"
  291. "<bitmapinfoheader biwidth=\"$WIDTH\" biheight=\"$HEIGHT\" biplanes=\"1\" bibitcount=\"24\" "
  292. "bicompression=\"WMV3\" bisizeimage=\"0\" bixpelspermeter=\"0\" biypelspermeter=\"0\" "
  293. "biclrused=\"0\" biclrimportant=\"0\"/>"
  294. "</videoinfoheader>"
  295. "</wmmediatype>"
  296. "</streamconfig>"
  297. "</profile>");
  298. const int fps[] = { 10, 15, 30 };
  299. int maxFramesPerSecond = fps [jlimit (0, numElementsInArray (fps) - 1, quality & 0xff)];
  300. if ((quality & 0xff000000) != 0) // (internal hacky way to pass explicit frame rates for testing)
  301. maxFramesPerSecond = (quality >> 24) & 0xff;
  302. prof = prof.replace ("$WIDTH", String (width))
  303. .replace ("$HEIGHT", String (height))
  304. .replace ("$AVGTIMEPERFRAME", String (10000000 / maxFramesPerSecond));
  305. ComSmartPtr<IWMProfile> currentProfile;
  306. hr = profileManager->LoadProfileByData (prof.toWideCharPointer(), currentProfile.resetAndGetPointerAddress());
  307. hr = asfConfig->ConfigureFilterUsingProfile (currentProfile);
  308. if (SUCCEEDED (hr))
  309. {
  310. ComSmartPtr<IPin> asfWriterInputPin;
  311. if (getPin (asfWriter, PINDIR_INPUT, asfWriterInputPin, "Video Input 01"))
  312. {
  313. hr = graphBuilder->Connect (smartTeeCaptureOutputPin, asfWriterInputPin);
  314. if (SUCCEEDED (hr) && openedSuccessfully && activeUsers > 0
  315. && SUCCEEDED (mediaControl->Run()))
  316. {
  317. previewMaxFPS = (quality < 2) ? 15 : 25; // throttle back the preview comps to try to leave the cpu free for encoding
  318. if ((quality & 0x00ff0000) != 0) // (internal hacky way to pass explicit frame rates for testing)
  319. previewMaxFPS = (quality >> 16) & 0xff;
  320. return true;
  321. }
  322. }
  323. }
  324. }
  325. }
  326. }
  327. }
  328. removeFileCaptureFilter();
  329. if (openedSuccessfully && activeUsers > 0)
  330. mediaControl->Run();
  331. return false;
  332. }
  333. void removeFileCaptureFilter()
  334. {
  335. mediaControl->Stop();
  336. if (asfWriter != nullptr)
  337. {
  338. graphBuilder->RemoveFilter (asfWriter);
  339. asfWriter = nullptr;
  340. }
  341. if (openedSuccessfully && activeUsers > 0)
  342. mediaControl->Run();
  343. previewMaxFPS = 60;
  344. }
  345. static ComSmartPtr<IBaseFilter> enumerateCameras (StringArray* names, const int deviceIndexToOpen)
  346. {
  347. int index = 0;
  348. ComSmartPtr<ICreateDevEnum> pDevEnum;
  349. if (SUCCEEDED (pDevEnum.CoCreateInstance (CLSID_SystemDeviceEnum)))
  350. {
  351. ComSmartPtr<IEnumMoniker> enumerator;
  352. HRESULT hr = pDevEnum->CreateClassEnumerator (CLSID_VideoInputDeviceCategory, enumerator.resetAndGetPointerAddress(), 0);
  353. if (SUCCEEDED (hr) && enumerator != nullptr)
  354. {
  355. ComSmartPtr<IMoniker> moniker;
  356. ULONG fetched;
  357. while (enumerator->Next (1, moniker.resetAndGetPointerAddress(), &fetched) == S_OK)
  358. {
  359. ComSmartPtr<IBaseFilter> captureFilter;
  360. hr = moniker->BindToObject (0, 0, IID_IBaseFilter, (void**) captureFilter.resetAndGetPointerAddress());
  361. if (SUCCEEDED (hr))
  362. {
  363. ComSmartPtr<IPropertyBag> propertyBag;
  364. hr = moniker->BindToStorage (0, 0, IID_IPropertyBag, (void**) propertyBag.resetAndGetPointerAddress());
  365. if (SUCCEEDED (hr))
  366. {
  367. VARIANT var;
  368. var.vt = VT_BSTR;
  369. hr = propertyBag->Read (_T("FriendlyName"), &var, 0);
  370. propertyBag = nullptr;
  371. if (SUCCEEDED (hr))
  372. {
  373. if (names != nullptr)
  374. names->add (var.bstrVal);
  375. if (index == deviceIndexToOpen)
  376. return captureFilter;
  377. ++index;
  378. }
  379. }
  380. }
  381. }
  382. }
  383. }
  384. return nullptr;
  385. }
  386. static StringArray getAvailableDevices()
  387. {
  388. StringArray devs;
  389. enumerateCameras (&devs, -1);
  390. return devs;
  391. }
  392. class GrabberCallback : public ComBaseClassHelperBase<ISampleGrabberCB>
  393. {
  394. public:
  395. GrabberCallback (Pimpl& p)
  396. : ComBaseClassHelperBase<ISampleGrabberCB> (0), owner (p) {}
  397. JUCE_COMRESULT QueryInterface (REFIID refId, void** result)
  398. {
  399. if (refId == IID_ISampleGrabberCB)
  400. return castToType<ISampleGrabberCB> (result);
  401. return ComBaseClassHelperBase<ISampleGrabberCB>::QueryInterface (refId, result);
  402. }
  403. STDMETHODIMP SampleCB (double, IMediaSample*) { return E_FAIL; }
  404. STDMETHODIMP BufferCB (double time, BYTE* buffer, long bufferSize)
  405. {
  406. owner.handleFrame (time, buffer, bufferSize);
  407. return S_OK;
  408. }
  409. private:
  410. Pimpl& owner;
  411. JUCE_DECLARE_NON_COPYABLE (GrabberCallback)
  412. };
  413. ComSmartPtr<GrabberCallback> callback;
  414. Array<CameraDevice::Listener*> listeners;
  415. CriticalSection listenerLock;
  416. bool isRecording, openedSuccessfully;
  417. int width, height;
  418. Time firstRecordedTime;
  419. Array<ViewerComponent*> viewerComps;
  420. ComSmartPtr<ICaptureGraphBuilder2> captureGraphBuilder;
  421. ComSmartPtr<IBaseFilter> filter, smartTee, asfWriter;
  422. ComSmartPtr<IGraphBuilder> graphBuilder;
  423. ComSmartPtr<ISampleGrabber> sampleGrabber;
  424. ComSmartPtr<IMediaControl> mediaControl;
  425. ComSmartPtr<IPin> smartTeePreviewOutputPin, smartTeeCaptureOutputPin;
  426. int activeUsers;
  427. Array<int> widths, heights;
  428. DWORD graphRegistrationID;
  429. CriticalSection imageSwapLock;
  430. bool imageNeedsFlipping;
  431. Image loadingImage, activeImage;
  432. bool recordNextFrameTime;
  433. int previewMaxFPS;
  434. private:
  435. void getVideoSizes (IAMStreamConfig* const streamConfig)
  436. {
  437. widths.clear();
  438. heights.clear();
  439. int count = 0, size = 0;
  440. streamConfig->GetNumberOfCapabilities (&count, &size);
  441. if (size == sizeof (VIDEO_STREAM_CONFIG_CAPS))
  442. {
  443. for (int i = 0; i < count; ++i)
  444. {
  445. VIDEO_STREAM_CONFIG_CAPS scc;
  446. AM_MEDIA_TYPE* config;
  447. HRESULT hr = streamConfig->GetStreamCaps (i, &config, (BYTE*) &scc);
  448. if (SUCCEEDED (hr))
  449. {
  450. const int w = scc.InputSize.cx;
  451. const int h = scc.InputSize.cy;
  452. bool duplicate = false;
  453. for (int j = widths.size(); --j >= 0;)
  454. {
  455. if (w == widths.getUnchecked (j) && h == heights.getUnchecked (j))
  456. {
  457. duplicate = true;
  458. break;
  459. }
  460. }
  461. if (! duplicate)
  462. {
  463. DBG ("Camera capture size: " + String (w) + ", " + String (h));
  464. widths.add (w);
  465. heights.add (h);
  466. }
  467. deleteMediaType (config);
  468. }
  469. }
  470. }
  471. }
  472. bool selectVideoSize (IAMStreamConfig* const streamConfig,
  473. const int minWidth, const int minHeight,
  474. const int maxWidth, const int maxHeight)
  475. {
  476. int count = 0, size = 0, bestArea = 0, bestIndex = -1;
  477. streamConfig->GetNumberOfCapabilities (&count, &size);
  478. if (size == sizeof (VIDEO_STREAM_CONFIG_CAPS))
  479. {
  480. AM_MEDIA_TYPE* config;
  481. VIDEO_STREAM_CONFIG_CAPS scc;
  482. for (int i = 0; i < count; ++i)
  483. {
  484. HRESULT hr = streamConfig->GetStreamCaps (i, &config, (BYTE*) &scc);
  485. if (SUCCEEDED (hr))
  486. {
  487. if (scc.InputSize.cx >= minWidth
  488. && scc.InputSize.cy >= minHeight
  489. && scc.InputSize.cx <= maxWidth
  490. && scc.InputSize.cy <= maxHeight)
  491. {
  492. int area = scc.InputSize.cx * scc.InputSize.cy;
  493. if (area > bestArea)
  494. {
  495. bestIndex = i;
  496. bestArea = area;
  497. }
  498. }
  499. deleteMediaType (config);
  500. }
  501. }
  502. if (bestIndex >= 0)
  503. {
  504. HRESULT hr = streamConfig->GetStreamCaps (bestIndex, &config, (BYTE*) &scc);
  505. hr = streamConfig->SetFormat (config);
  506. deleteMediaType (config);
  507. return SUCCEEDED (hr);
  508. }
  509. }
  510. return false;
  511. }
  512. static bool getPin (IBaseFilter* filter, const PIN_DIRECTION wantedDirection,
  513. ComSmartPtr<IPin>& result, const char* pinName = nullptr)
  514. {
  515. ComSmartPtr<IEnumPins> enumerator;
  516. ComSmartPtr<IPin> pin;
  517. filter->EnumPins (enumerator.resetAndGetPointerAddress());
  518. while (enumerator->Next (1, pin.resetAndGetPointerAddress(), 0) == S_OK)
  519. {
  520. PIN_DIRECTION dir;
  521. pin->QueryDirection (&dir);
  522. if (wantedDirection == dir)
  523. {
  524. PIN_INFO info = { 0 };
  525. pin->QueryPinInfo (&info);
  526. if (pinName == nullptr || String (pinName).equalsIgnoreCase (String (info.achName)))
  527. {
  528. result = pin;
  529. return true;
  530. }
  531. }
  532. }
  533. return false;
  534. }
  535. bool connectFilters (IBaseFilter* const first, IBaseFilter* const second) const
  536. {
  537. ComSmartPtr<IPin> in, out;
  538. return getPin (first, PINDIR_OUTPUT, out)
  539. && getPin (second, PINDIR_INPUT, in)
  540. && SUCCEEDED (graphBuilder->Connect (out, in));
  541. }
  542. bool addGraphToRot()
  543. {
  544. ComSmartPtr<IRunningObjectTable> rot;
  545. if (FAILED (GetRunningObjectTable (0, rot.resetAndGetPointerAddress())))
  546. return false;
  547. ComSmartPtr<IMoniker> moniker;
  548. WCHAR buffer[128];
  549. HRESULT hr = CreateItemMoniker (_T("!"), buffer, moniker.resetAndGetPointerAddress());
  550. if (FAILED (hr))
  551. return false;
  552. graphRegistrationID = 0;
  553. return SUCCEEDED (rot->Register (0, graphBuilder, moniker, &graphRegistrationID));
  554. }
  555. void removeGraphFromRot()
  556. {
  557. ComSmartPtr<IRunningObjectTable> rot;
  558. if (SUCCEEDED (GetRunningObjectTable (0, rot.resetAndGetPointerAddress())))
  559. rot->Revoke (graphRegistrationID);
  560. }
  561. void disconnectAnyViewers();
  562. static void deleteMediaType (AM_MEDIA_TYPE* const pmt)
  563. {
  564. if (pmt->cbFormat != 0)
  565. CoTaskMemFree ((PVOID) pmt->pbFormat);
  566. if (pmt->pUnk != nullptr)
  567. pmt->pUnk->Release();
  568. CoTaskMemFree (pmt);
  569. }
  570. JUCE_DECLARE_NON_COPYABLE (Pimpl)
  571. };
  572. //==============================================================================
  573. struct CameraDevice::ViewerComponent : public Component,
  574. public ChangeListener
  575. {
  576. ViewerComponent (CameraDevice& d)
  577. : owner (d.pimpl), maxFPS (15), lastRepaintTime (0)
  578. {
  579. setOpaque (true);
  580. owner->addChangeListener (this);
  581. owner->addUser();
  582. owner->viewerComps.add (this);
  583. setSize (owner->width, owner->height);
  584. }
  585. ~ViewerComponent()
  586. {
  587. if (owner != nullptr)
  588. {
  589. owner->viewerComps.removeFirstMatchingValue (this);
  590. owner->removeUser();
  591. owner->removeChangeListener (this);
  592. }
  593. }
  594. void ownerDeleted()
  595. {
  596. owner = nullptr;
  597. }
  598. void paint (Graphics& g) override
  599. {
  600. g.setColour (Colours::black);
  601. g.setImageResamplingQuality (Graphics::lowResamplingQuality);
  602. if (owner != nullptr)
  603. owner->drawCurrentImage (g, getLocalBounds());
  604. else
  605. g.fillAll();
  606. }
  607. void changeListenerCallback (ChangeBroadcaster*) override
  608. {
  609. const int64 now = Time::currentTimeMillis();
  610. if (now >= lastRepaintTime + (1000 / maxFPS))
  611. {
  612. lastRepaintTime = now;
  613. repaint();
  614. if (owner != nullptr)
  615. maxFPS = owner->previewMaxFPS;
  616. }
  617. }
  618. private:
  619. Pimpl* owner;
  620. int maxFPS;
  621. int64 lastRepaintTime;
  622. };
  623. void CameraDevice::Pimpl::disconnectAnyViewers()
  624. {
  625. for (int i = viewerComps.size(); --i >= 0;)
  626. viewerComps.getUnchecked(i)->ownerDeleted();
  627. }
  628. String CameraDevice::getFileExtension()
  629. {
  630. return ".wmv";
  631. }