The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

600 lines
20KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2022 - Raw Material Software Limited
  5. JUCE is an open source library subject to commercial or open-source
  6. licensing.
  7. By using JUCE, you agree to the terms of both the JUCE 7 End-User License
  8. Agreement and JUCE Privacy Policy.
  9. End User License Agreement: www.juce.com/juce-7-licence
  10. Privacy Policy: www.juce.com/juce-privacy-policy
  11. Or: You may also use this code under the terms of the GPL v3 (see
  12. www.gnu.org/licenses).
  13. JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
  14. EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
  15. DISCLAIMED.
  16. ==============================================================================
  17. */
  18. #if defined (MAC_OS_X_VERSION_10_15) && MAC_OS_X_VERSION_MAX_ALLOWED >= MAC_OS_X_VERSION_10_15
  19. #define JUCE_USE_NEW_CAMERA_API 1
  20. #endif
  21. struct CameraDevice::Pimpl
  22. {
  23. Pimpl (CameraDevice& ownerToUse, const String& deviceNameToUse, int /*index*/,
  24. int /*minWidth*/, int /*minHeight*/,
  25. int /*maxWidth*/, int /*maxHeight*/,
  26. bool useHighQuality)
  27. : owner (ownerToUse),
  28. deviceName (deviceNameToUse)
  29. {
  30. imageOutput = []() -> std::unique_ptr<ImageOutputBase>
  31. {
  32. #if JUCE_USE_NEW_CAMERA_API
  33. if (@available (macOS 10.15, *))
  34. return std::make_unique<PostCatalinaPhotoOutput>();
  35. #endif
  36. return std::make_unique<PreCatalinaStillImageOutput>();
  37. }();
  38. session = [[AVCaptureSession alloc] init];
  39. session.sessionPreset = useHighQuality ? AVCaptureSessionPresetHigh
  40. : AVCaptureSessionPresetMedium;
  41. refreshConnections();
  42. static DelegateClass cls;
  43. callbackDelegate = (id<AVCaptureFileOutputRecordingDelegate>) [cls.createInstance() init];
  44. DelegateClass::setOwner (callbackDelegate, this);
  45. JUCE_BEGIN_IGNORE_WARNINGS_GCC_LIKE ("-Wundeclared-selector")
  46. [[NSNotificationCenter defaultCenter] addObserver: callbackDelegate
  47. selector: @selector (captureSessionRuntimeError:)
  48. name: AVCaptureSessionRuntimeErrorNotification
  49. object: session];
  50. JUCE_END_IGNORE_WARNINGS_GCC_LIKE
  51. }
  52. ~Pimpl()
  53. {
  54. [[NSNotificationCenter defaultCenter] removeObserver: callbackDelegate];
  55. [session stopRunning];
  56. removeInput();
  57. removeImageCapture();
  58. removeMovieCapture();
  59. [session release];
  60. [callbackDelegate release];
  61. }
  62. //==============================================================================
  63. bool openedOk() const noexcept { return openingError.isEmpty(); }
  64. void startSession()
  65. {
  66. if (! [session isRunning])
  67. [session startRunning];
  68. }
  69. void takeStillPicture (std::function<void (const Image&)> pictureTakenCallbackToUse)
  70. {
  71. if (pictureTakenCallbackToUse == nullptr)
  72. {
  73. jassertfalse;
  74. return;
  75. }
  76. pictureTakenCallback = std::move (pictureTakenCallbackToUse);
  77. triggerImageCapture();
  78. }
  79. void startRecordingToFile (const File& file, int /*quality*/)
  80. {
  81. stopRecording();
  82. refreshIfNeeded();
  83. firstPresentationTime = Time::getCurrentTime();
  84. file.deleteFile();
  85. startSession();
  86. isRecording = true;
  87. [fileOutput startRecordingToOutputFileURL: createNSURLFromFile (file)
  88. recordingDelegate: callbackDelegate];
  89. }
  90. void stopRecording()
  91. {
  92. if (isRecording)
  93. {
  94. [fileOutput stopRecording];
  95. isRecording = false;
  96. }
  97. }
  98. Time getTimeOfFirstRecordedFrame() const
  99. {
  100. return firstPresentationTime;
  101. }
  102. void addListener (CameraDevice::Listener* listenerToAdd)
  103. {
  104. const ScopedLock sl (listenerLock);
  105. listeners.add (listenerToAdd);
  106. if (listeners.size() == 1)
  107. triggerImageCapture();
  108. }
  109. void removeListener (CameraDevice::Listener* listenerToRemove)
  110. {
  111. const ScopedLock sl (listenerLock);
  112. listeners.remove (listenerToRemove);
  113. }
  114. static NSArray* getCaptureDevices()
  115. {
  116. #if JUCE_USE_NEW_CAMERA_API
  117. if (@available (macOS 10.15, *))
  118. {
  119. auto* discovery = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes: @[AVCaptureDeviceTypeBuiltInWideAngleCamera,
  120. AVCaptureDeviceTypeExternalUnknown]
  121. mediaType: AVMediaTypeVideo
  122. position: AVCaptureDevicePositionUnspecified];
  123. return [discovery devices];
  124. }
  125. #endif
  126. JUCE_BEGIN_IGNORE_WARNINGS_GCC_LIKE ("-Wdeprecated-declarations")
  127. return [AVCaptureDevice devicesWithMediaType: AVMediaTypeVideo];
  128. JUCE_END_IGNORE_WARNINGS_GCC_LIKE
  129. }
  130. static StringArray getAvailableDevices()
  131. {
  132. StringArray results;
  133. for (AVCaptureDevice* device : getCaptureDevices())
  134. results.add (nsStringToJuce ([device localizedName]));
  135. return results;
  136. }
  137. AVCaptureSession* getCaptureSession()
  138. {
  139. return session;
  140. }
  141. NSView* createVideoCapturePreview()
  142. {
  143. // The video preview must be created before the capture session is
  144. // started. Make sure you haven't called `addListener`,
  145. // `startRecordingToFile`, or `takeStillPicture` before calling this
  146. // function.
  147. jassert (! [session isRunning]);
  148. startSession();
  149. JUCE_AUTORELEASEPOOL
  150. {
  151. NSView* view = [[NSView alloc] init];
  152. [view setLayer: [AVCaptureVideoPreviewLayer layerWithSession: getCaptureSession()]];
  153. return view;
  154. }
  155. }
  156. private:
  157. //==============================================================================
  158. struct DelegateClass : public ObjCClass<NSObject>
  159. {
  160. DelegateClass() : ObjCClass<NSObject> ("JUCECameraDelegate_")
  161. {
  162. addIvar<Pimpl*> ("owner");
  163. addProtocol (@protocol (AVCaptureFileOutputRecordingDelegate));
  164. addMethod (@selector (captureOutput:didStartRecordingToOutputFileAtURL: fromConnections:), didStartRecordingToOutputFileAtURL);
  165. addMethod (@selector (captureOutput:didPauseRecordingToOutputFileAtURL: fromConnections:), didPauseRecordingToOutputFileAtURL);
  166. addMethod (@selector (captureOutput:didResumeRecordingToOutputFileAtURL: fromConnections:), didResumeRecordingToOutputFileAtURL);
  167. addMethod (@selector (captureOutput:willFinishRecordingToOutputFileAtURL:fromConnections:error:), willFinishRecordingToOutputFileAtURL);
  168. JUCE_BEGIN_IGNORE_WARNINGS_GCC_LIKE ("-Wundeclared-selector")
  169. addMethod (@selector (captureSessionRuntimeError:), sessionRuntimeError);
  170. JUCE_END_IGNORE_WARNINGS_GCC_LIKE
  171. registerClass();
  172. }
  173. static void setOwner (id self, Pimpl* owner) { object_setInstanceVariable (self, "owner", owner); }
  174. static Pimpl& getOwner (id self) { return *getIvar<Pimpl*> (self, "owner"); }
  175. private:
  176. static void didStartRecordingToOutputFileAtURL (id, SEL, AVCaptureFileOutput*, NSURL*, NSArray*) {}
  177. static void didPauseRecordingToOutputFileAtURL (id, SEL, AVCaptureFileOutput*, NSURL*, NSArray*) {}
  178. static void didResumeRecordingToOutputFileAtURL (id, SEL, AVCaptureFileOutput*, NSURL*, NSArray*) {}
  179. static void willFinishRecordingToOutputFileAtURL (id, SEL, AVCaptureFileOutput*, NSURL*, NSArray*, NSError*) {}
  180. static void sessionRuntimeError (id self, SEL, NSNotification* notification)
  181. {
  182. JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
  183. NSError* error = [notification.userInfo objectForKey: AVCaptureSessionErrorKey];
  184. auto errorString = error != nil ? nsStringToJuce (error.localizedDescription) : String();
  185. getOwner (self).cameraSessionRuntimeError (errorString);
  186. }
  187. };
  188. //==============================================================================
  189. struct ImageOutputBase
  190. {
  191. virtual ~ImageOutputBase() = default;
  192. virtual void addImageCapture (AVCaptureSession*) = 0;
  193. virtual void removeImageCapture (AVCaptureSession*) = 0;
  194. virtual NSArray<AVCaptureConnection*>* getConnections() const = 0;
  195. virtual void triggerImageCapture (Pimpl& p) = 0;
  196. };
  197. #if JUCE_USE_NEW_CAMERA_API
  198. class API_AVAILABLE (macos (10.15)) PostCatalinaPhotoOutput : public ImageOutputBase
  199. {
  200. public:
  201. PostCatalinaPhotoOutput()
  202. {
  203. static PhotoOutputDelegateClass cls;
  204. delegate.reset ([cls.createInstance() init]);
  205. }
  206. void addImageCapture (AVCaptureSession* s) override
  207. {
  208. if (imageOutput != nil)
  209. return;
  210. imageOutput = [[AVCapturePhotoOutput alloc] init];
  211. [s addOutput: imageOutput];
  212. }
  213. void removeImageCapture (AVCaptureSession* s) override
  214. {
  215. if (imageOutput == nil)
  216. return;
  217. [s removeOutput: imageOutput];
  218. [imageOutput release];
  219. imageOutput = nil;
  220. }
  221. NSArray<AVCaptureConnection*>* getConnections() const override
  222. {
  223. if (imageOutput != nil)
  224. return imageOutput.connections;
  225. return nil;
  226. }
  227. void triggerImageCapture (Pimpl& p) override
  228. {
  229. if (imageOutput == nil)
  230. return;
  231. PhotoOutputDelegateClass::setOwner (delegate.get(), &p);
  232. [imageOutput capturePhotoWithSettings: [AVCapturePhotoSettings photoSettings]
  233. delegate: id<AVCapturePhotoCaptureDelegate> (delegate.get())];
  234. }
  235. private:
  236. class PhotoOutputDelegateClass : public ObjCClass<NSObject>
  237. {
  238. public:
  239. PhotoOutputDelegateClass() : ObjCClass<NSObject> ("PhotoOutputDelegateClass_")
  240. {
  241. addMethod (@selector (captureOutput:didFinishProcessingPhoto:error:), didFinishProcessingPhoto);
  242. addIvar<Pimpl*> ("owner");
  243. registerClass();
  244. }
  245. static void didFinishProcessingPhoto (id self, SEL, AVCapturePhotoOutput*, AVCapturePhoto* photo, NSError* error)
  246. {
  247. if (error != nil)
  248. {
  249. String errorString = error != nil ? nsStringToJuce (error.localizedDescription) : String();
  250. ignoreUnused (errorString);
  251. JUCE_CAMERA_LOG ("Still picture capture failed, error: " + errorString);
  252. jassertfalse;
  253. return;
  254. }
  255. auto* imageData = [photo fileDataRepresentation];
  256. auto image = ImageFileFormat::loadFrom (imageData.bytes, (size_t) imageData.length);
  257. getOwner (self).imageCaptureFinished (image);
  258. }
  259. static Pimpl& getOwner (id self) { return *getIvar<Pimpl*> (self, "owner"); }
  260. static void setOwner (id self, Pimpl* t) { object_setInstanceVariable (self, "owner", t); }
  261. };
  262. AVCapturePhotoOutput* imageOutput = nil;
  263. std::unique_ptr<NSObject, NSObjectDeleter> delegate;
  264. };
  265. #endif
  266. JUCE_BEGIN_IGNORE_WARNINGS_GCC_LIKE ("-Wdeprecated-declarations")
  267. class PreCatalinaStillImageOutput : public ImageOutputBase
  268. {
  269. public:
  270. void addImageCapture (AVCaptureSession* s) override
  271. {
  272. if (imageOutput != nil)
  273. return;
  274. const auto codecType = []
  275. {
  276. if (@available (macOS 10.13, *))
  277. return AVVideoCodecTypeJPEG;
  278. return AVVideoCodecJPEG;
  279. }();
  280. imageOutput = [[AVCaptureStillImageOutput alloc] init];
  281. auto imageSettings = [[NSDictionary alloc] initWithObjectsAndKeys: codecType, AVVideoCodecKey, nil];
  282. [imageOutput setOutputSettings: imageSettings];
  283. [imageSettings release];
  284. [s addOutput: imageOutput];
  285. }
  286. void removeImageCapture (AVCaptureSession* s) override
  287. {
  288. if (imageOutput == nil)
  289. return;
  290. [s removeOutput: imageOutput];
  291. [imageOutput release];
  292. imageOutput = nil;
  293. }
  294. NSArray<AVCaptureConnection*>* getConnections() const override
  295. {
  296. if (imageOutput != nil)
  297. return imageOutput.connections;
  298. return nil;
  299. }
  300. void triggerImageCapture (Pimpl& p) override
  301. {
  302. if (auto* videoConnection = p.getVideoConnection())
  303. {
  304. [imageOutput captureStillImageAsynchronouslyFromConnection: videoConnection
  305. completionHandler: ^(CMSampleBufferRef sampleBuffer, NSError* error)
  306. {
  307. if (error != nil)
  308. {
  309. JUCE_CAMERA_LOG ("Still picture capture failed, error: " + nsStringToJuce (error.localizedDescription));
  310. jassertfalse;
  311. return;
  312. }
  313. auto* imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation: sampleBuffer];
  314. auto image = ImageFileFormat::loadFrom (imageData.bytes, (size_t) imageData.length);
  315. p.imageCaptureFinished (image);
  316. }];
  317. }
  318. }
  319. private:
  320. AVCaptureStillImageOutput* imageOutput = nil;
  321. };
  322. JUCE_END_IGNORE_WARNINGS_GCC_LIKE
  323. //==============================================================================
  324. void addImageCapture()
  325. {
  326. imageOutput->addImageCapture (session);
  327. }
  328. void addMovieCapture()
  329. {
  330. if (fileOutput == nil)
  331. {
  332. fileOutput = [[AVCaptureMovieFileOutput alloc] init];
  333. [session addOutput: fileOutput];
  334. }
  335. }
  336. void removeImageCapture()
  337. {
  338. imageOutput->removeImageCapture (session);
  339. }
  340. void removeMovieCapture()
  341. {
  342. if (fileOutput != nil)
  343. {
  344. [session removeOutput: fileOutput];
  345. [fileOutput release];
  346. fileOutput = nil;
  347. }
  348. }
  349. void removeCurrentSessionVideoInputs()
  350. {
  351. if (session != nil)
  352. {
  353. NSArray<AVCaptureDeviceInput*>* inputs = session.inputs;
  354. for (AVCaptureDeviceInput* input : inputs)
  355. if ([input.device hasMediaType: AVMediaTypeVideo])
  356. [session removeInput:input];
  357. }
  358. }
  359. void addInput()
  360. {
  361. if (currentInput == nil)
  362. {
  363. for (AVCaptureDevice* device : getCaptureDevices())
  364. {
  365. if (deviceName == nsStringToJuce ([device localizedName]))
  366. {
  367. removeCurrentSessionVideoInputs();
  368. NSError* err = nil;
  369. AVCaptureDeviceInput* inputDevice = [[AVCaptureDeviceInput alloc] initWithDevice: device
  370. error: &err];
  371. jassert (err == nil);
  372. if ([session canAddInput: inputDevice])
  373. {
  374. [session addInput: inputDevice];
  375. currentInput = inputDevice;
  376. }
  377. else
  378. {
  379. jassertfalse;
  380. [inputDevice release];
  381. }
  382. return;
  383. }
  384. }
  385. }
  386. }
  387. void removeInput()
  388. {
  389. if (currentInput != nil)
  390. {
  391. [session removeInput: currentInput];
  392. [currentInput release];
  393. currentInput = nil;
  394. }
  395. }
  396. void refreshConnections()
  397. {
  398. [session beginConfiguration];
  399. removeInput();
  400. removeImageCapture();
  401. removeMovieCapture();
  402. addInput();
  403. addImageCapture();
  404. addMovieCapture();
  405. [session commitConfiguration];
  406. }
  407. void refreshIfNeeded()
  408. {
  409. if (getVideoConnection() == nullptr)
  410. refreshConnections();
  411. }
  412. AVCaptureConnection* getVideoConnection() const
  413. {
  414. auto* connections = imageOutput->getConnections();
  415. if (connections != nil)
  416. for (AVCaptureConnection* connection in connections)
  417. if ([connection isActive] && [connection isEnabled])
  418. for (AVCaptureInputPort* port in [connection inputPorts])
  419. if ([[port mediaType] isEqual: AVMediaTypeVideo])
  420. return connection;
  421. return nil;
  422. }
  423. void imageCaptureFinished (const Image& image)
  424. {
  425. handleImageCapture (image);
  426. MessageManager::callAsync ([weakRef = WeakReference<Pimpl> { this }, image]() mutable
  427. {
  428. if (weakRef != nullptr && weakRef->pictureTakenCallback != nullptr)
  429. weakRef->pictureTakenCallback (image);
  430. });
  431. }
  432. void handleImageCapture (const Image& image)
  433. {
  434. const ScopedLock sl (listenerLock);
  435. listeners.call ([=] (Listener& l) { l.imageReceived (image); });
  436. if (! listeners.isEmpty())
  437. triggerImageCapture();
  438. }
  439. void triggerImageCapture()
  440. {
  441. refreshIfNeeded();
  442. startSession();
  443. if (auto* videoConnection = getVideoConnection())
  444. imageOutput->triggerImageCapture (*this);
  445. }
  446. void cameraSessionRuntimeError (const String& error)
  447. {
  448. JUCE_CAMERA_LOG ("cameraSessionRuntimeError(), error = " + error);
  449. if (owner.onErrorOccurred != nullptr)
  450. owner.onErrorOccurred (error);
  451. }
  452. //==============================================================================
  453. CameraDevice& owner;
  454. String deviceName;
  455. AVCaptureSession* session = nil;
  456. AVCaptureMovieFileOutput* fileOutput = nil;
  457. std::unique_ptr<ImageOutputBase> imageOutput;
  458. AVCaptureDeviceInput* currentInput = nil;
  459. id<AVCaptureFileOutputRecordingDelegate> callbackDelegate = nil;
  460. String openingError;
  461. Time firstPresentationTime;
  462. bool isRecording = false;
  463. CriticalSection listenerLock;
  464. ListenerList<Listener> listeners;
  465. std::function<void (const Image&)> pictureTakenCallback = nullptr;
  466. //==============================================================================
  467. JUCE_DECLARE_WEAK_REFERENCEABLE (Pimpl)
  468. JUCE_DECLARE_NON_COPYABLE (Pimpl)
  469. };
  470. //==============================================================================
  471. struct CameraDevice::ViewerComponent : public NSViewComponent
  472. {
  473. ViewerComponent (CameraDevice& device)
  474. {
  475. setView (device.pimpl->createVideoCapturePreview());
  476. }
  477. ~ViewerComponent()
  478. {
  479. setView (nil);
  480. }
  481. JUCE_DECLARE_NON_COPYABLE (ViewerComponent)
  482. };
  483. String CameraDevice::getFileExtension()
  484. {
  485. return ".mov";
  486. }