The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

599 lines
20KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2022 - Raw Material Software Limited
  5. JUCE is an open source library subject to commercial or open-source
  6. licensing.
  7. By using JUCE, you agree to the terms of both the JUCE 7 End-User License
  8. Agreement and JUCE Privacy Policy.
  9. End User License Agreement: www.juce.com/juce-7-licence
  10. Privacy Policy: www.juce.com/juce-privacy-policy
  11. Or: You may also use this code under the terms of the GPL v3 (see
  12. www.gnu.org/licenses).
  13. JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
  14. EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
  15. DISCLAIMED.
  16. ==============================================================================
  17. */
  18. #if defined (MAC_OS_X_VERSION_10_15) && MAC_OS_X_VERSION_MAX_ALLOWED >= MAC_OS_X_VERSION_10_15
  19. #define JUCE_USE_NEW_CAMERA_API 1
  20. #endif
  21. struct CameraDevice::Pimpl
  22. {
  23. Pimpl (CameraDevice& ownerToUse, const String& deviceNameToUse, int /*index*/,
  24. int /*minWidth*/, int /*minHeight*/,
  25. int /*maxWidth*/, int /*maxHeight*/,
  26. bool useHighQuality)
  27. : owner (ownerToUse),
  28. deviceName (deviceNameToUse)
  29. {
  30. imageOutput = []() -> std::unique_ptr<ImageOutputBase>
  31. {
  32. #if JUCE_USE_NEW_CAMERA_API
  33. if (@available (macOS 10.15, *))
  34. return std::make_unique<PostCatalinaPhotoOutput>();
  35. #endif
  36. return std::make_unique<PreCatalinaStillImageOutput>();
  37. }();
  38. session = [[AVCaptureSession alloc] init];
  39. session.sessionPreset = useHighQuality ? AVCaptureSessionPresetHigh
  40. : AVCaptureSessionPresetMedium;
  41. refreshConnections();
  42. static DelegateClass cls;
  43. callbackDelegate = (id<AVCaptureFileOutputRecordingDelegate>) [cls.createInstance() init];
  44. DelegateClass::setOwner (callbackDelegate, this);
  45. JUCE_BEGIN_IGNORE_WARNINGS_GCC_LIKE ("-Wundeclared-selector")
  46. [[NSNotificationCenter defaultCenter] addObserver: callbackDelegate
  47. selector: @selector (captureSessionRuntimeError:)
  48. name: AVCaptureSessionRuntimeErrorNotification
  49. object: session];
  50. JUCE_END_IGNORE_WARNINGS_GCC_LIKE
  51. }
  52. ~Pimpl()
  53. {
  54. [[NSNotificationCenter defaultCenter] removeObserver: callbackDelegate];
  55. [session stopRunning];
  56. removeInput();
  57. removeImageCapture();
  58. removeMovieCapture();
  59. [session release];
  60. [callbackDelegate release];
  61. }
  62. //==============================================================================
  63. bool openedOk() const noexcept { return openingError.isEmpty(); }
  64. void startSession()
  65. {
  66. if (! [session isRunning])
  67. [session startRunning];
  68. }
  69. void takeStillPicture (std::function<void (const Image&)> pictureTakenCallbackToUse)
  70. {
  71. if (pictureTakenCallbackToUse == nullptr)
  72. {
  73. jassertfalse;
  74. return;
  75. }
  76. pictureTakenCallback = std::move (pictureTakenCallbackToUse);
  77. triggerImageCapture();
  78. }
  79. void startRecordingToFile (const File& file, int /*quality*/)
  80. {
  81. stopRecording();
  82. refreshIfNeeded();
  83. firstPresentationTime = Time::getCurrentTime();
  84. file.deleteFile();
  85. startSession();
  86. isRecording = true;
  87. [fileOutput startRecordingToOutputFileURL: createNSURLFromFile (file)
  88. recordingDelegate: callbackDelegate];
  89. }
  90. void stopRecording()
  91. {
  92. if (isRecording)
  93. {
  94. [fileOutput stopRecording];
  95. isRecording = false;
  96. }
  97. }
  98. Time getTimeOfFirstRecordedFrame() const
  99. {
  100. return firstPresentationTime;
  101. }
  102. void addListener (CameraDevice::Listener* listenerToAdd)
  103. {
  104. const ScopedLock sl (listenerLock);
  105. listeners.add (listenerToAdd);
  106. if (listeners.size() == 1)
  107. triggerImageCapture();
  108. }
  109. void removeListener (CameraDevice::Listener* listenerToRemove)
  110. {
  111. const ScopedLock sl (listenerLock);
  112. listeners.remove (listenerToRemove);
  113. }
  114. static NSArray* getCaptureDevices()
  115. {
  116. #if JUCE_USE_NEW_CAMERA_API
  117. if (@available (macOS 10.15, *))
  118. {
  119. auto* discovery = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes: @[AVCaptureDeviceTypeBuiltInWideAngleCamera,
  120. AVCaptureDeviceTypeExternalUnknown]
  121. mediaType: AVMediaTypeVideo
  122. position: AVCaptureDevicePositionUnspecified];
  123. return [discovery devices];
  124. }
  125. #endif
  126. JUCE_BEGIN_IGNORE_WARNINGS_GCC_LIKE ("-Wdeprecated-declarations")
  127. return [AVCaptureDevice devicesWithMediaType: AVMediaTypeVideo];
  128. JUCE_END_IGNORE_WARNINGS_GCC_LIKE
  129. }
  130. static StringArray getAvailableDevices()
  131. {
  132. StringArray results;
  133. for (AVCaptureDevice* device : getCaptureDevices())
  134. results.add (nsStringToJuce ([device localizedName]));
  135. return results;
  136. }
  137. AVCaptureSession* getCaptureSession()
  138. {
  139. return session;
  140. }
  141. NSView* createVideoCapturePreview()
  142. {
  143. // The video preview must be created before the capture session is
  144. // started. Make sure you haven't called `addListener`,
  145. // `startRecordingToFile`, or `takeStillPicture` before calling this
  146. // function.
  147. jassert (! [session isRunning]);
  148. startSession();
  149. JUCE_AUTORELEASEPOOL
  150. {
  151. NSView* view = [[NSView alloc] init];
  152. [view setLayer: [AVCaptureVideoPreviewLayer layerWithSession: getCaptureSession()]];
  153. return view;
  154. }
  155. }
  156. private:
  157. //==============================================================================
  158. struct DelegateClass : public ObjCClass<NSObject>
  159. {
  160. DelegateClass() : ObjCClass<NSObject> ("JUCECameraDelegate_")
  161. {
  162. addIvar<Pimpl*> ("owner");
  163. addProtocol (@protocol (AVCaptureFileOutputRecordingDelegate));
  164. addMethod (@selector (captureOutput:didStartRecordingToOutputFileAtURL: fromConnections:), didStartRecordingToOutputFileAtURL);
  165. addMethod (@selector (captureOutput:didPauseRecordingToOutputFileAtURL: fromConnections:), didPauseRecordingToOutputFileAtURL);
  166. addMethod (@selector (captureOutput:didResumeRecordingToOutputFileAtURL: fromConnections:), didResumeRecordingToOutputFileAtURL);
  167. addMethod (@selector (captureOutput:willFinishRecordingToOutputFileAtURL:fromConnections:error:), willFinishRecordingToOutputFileAtURL);
  168. JUCE_BEGIN_IGNORE_WARNINGS_GCC_LIKE ("-Wundeclared-selector")
  169. addMethod (@selector (captureSessionRuntimeError:), sessionRuntimeError);
  170. JUCE_END_IGNORE_WARNINGS_GCC_LIKE
  171. registerClass();
  172. }
  173. static void setOwner (id self, Pimpl* owner) { object_setInstanceVariable (self, "owner", owner); }
  174. static Pimpl& getOwner (id self) { return *getIvar<Pimpl*> (self, "owner"); }
  175. private:
  176. static void didStartRecordingToOutputFileAtURL (id, SEL, AVCaptureFileOutput*, NSURL*, NSArray*) {}
  177. static void didPauseRecordingToOutputFileAtURL (id, SEL, AVCaptureFileOutput*, NSURL*, NSArray*) {}
  178. static void didResumeRecordingToOutputFileAtURL (id, SEL, AVCaptureFileOutput*, NSURL*, NSArray*) {}
  179. static void willFinishRecordingToOutputFileAtURL (id, SEL, AVCaptureFileOutput*, NSURL*, NSArray*, NSError*) {}
  180. static void sessionRuntimeError (id self, SEL, NSNotification* notification)
  181. {
  182. JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
  183. NSError* error = [notification.userInfo objectForKey: AVCaptureSessionErrorKey];
  184. auto errorString = error != nil ? nsStringToJuce (error.localizedDescription) : String();
  185. getOwner (self).cameraSessionRuntimeError (errorString);
  186. }
  187. };
  188. //==============================================================================
  189. struct ImageOutputBase
  190. {
  191. virtual ~ImageOutputBase() = default;
  192. virtual void addImageCapture (AVCaptureSession*) = 0;
  193. virtual void removeImageCapture (AVCaptureSession*) = 0;
  194. virtual NSArray<AVCaptureConnection*>* getConnections() const = 0;
  195. virtual void triggerImageCapture (Pimpl& p) = 0;
  196. };
  197. #if JUCE_USE_NEW_CAMERA_API
  198. class API_AVAILABLE (macos (10.15)) PostCatalinaPhotoOutput : public ImageOutputBase
  199. {
  200. public:
  201. PostCatalinaPhotoOutput()
  202. {
  203. static PhotoOutputDelegateClass cls;
  204. delegate.reset ([cls.createInstance() init]);
  205. }
  206. void addImageCapture (AVCaptureSession* s) override
  207. {
  208. if (imageOutput != nil)
  209. return;
  210. imageOutput = [[AVCapturePhotoOutput alloc] init];
  211. [s addOutput: imageOutput];
  212. }
  213. void removeImageCapture (AVCaptureSession* s) override
  214. {
  215. if (imageOutput == nil)
  216. return;
  217. [s removeOutput: imageOutput];
  218. [imageOutput release];
  219. imageOutput = nil;
  220. }
  221. NSArray<AVCaptureConnection*>* getConnections() const override
  222. {
  223. if (imageOutput != nil)
  224. return imageOutput.connections;
  225. return nil;
  226. }
  227. void triggerImageCapture (Pimpl& p) override
  228. {
  229. if (imageOutput == nil)
  230. return;
  231. PhotoOutputDelegateClass::setOwner (delegate.get(), &p);
  232. [imageOutput capturePhotoWithSettings: [AVCapturePhotoSettings photoSettings]
  233. delegate: id<AVCapturePhotoCaptureDelegate> (delegate.get())];
  234. }
  235. private:
  236. class PhotoOutputDelegateClass : public ObjCClass<NSObject>
  237. {
  238. public:
  239. PhotoOutputDelegateClass() : ObjCClass<NSObject> ("PhotoOutputDelegateClass_")
  240. {
  241. addMethod (@selector (captureOutput:didFinishProcessingPhoto:error:), didFinishProcessingPhoto);
  242. addIvar<Pimpl*> ("owner");
  243. registerClass();
  244. }
  245. static void didFinishProcessingPhoto (id self, SEL, AVCapturePhotoOutput*, AVCapturePhoto* photo, NSError* error)
  246. {
  247. if (error != nil)
  248. {
  249. [[maybe_unused]] String errorString = error != nil ? nsStringToJuce (error.localizedDescription) : String();
  250. JUCE_CAMERA_LOG ("Still picture capture failed, error: " + errorString);
  251. jassertfalse;
  252. return;
  253. }
  254. auto* imageData = [photo fileDataRepresentation];
  255. auto image = ImageFileFormat::loadFrom (imageData.bytes, (size_t) imageData.length);
  256. getOwner (self).imageCaptureFinished (image);
  257. }
  258. static Pimpl& getOwner (id self) { return *getIvar<Pimpl*> (self, "owner"); }
  259. static void setOwner (id self, Pimpl* t) { object_setInstanceVariable (self, "owner", t); }
  260. };
  261. AVCapturePhotoOutput* imageOutput = nil;
  262. std::unique_ptr<NSObject, NSObjectDeleter> delegate;
  263. };
  264. #endif
  265. JUCE_BEGIN_IGNORE_WARNINGS_GCC_LIKE ("-Wdeprecated-declarations")
  266. class PreCatalinaStillImageOutput : public ImageOutputBase
  267. {
  268. public:
  269. void addImageCapture (AVCaptureSession* s) override
  270. {
  271. if (imageOutput != nil)
  272. return;
  273. const auto codecType = []
  274. {
  275. if (@available (macOS 10.13, *))
  276. return AVVideoCodecTypeJPEG;
  277. return AVVideoCodecJPEG;
  278. }();
  279. imageOutput = [[AVCaptureStillImageOutput alloc] init];
  280. auto imageSettings = [[NSDictionary alloc] initWithObjectsAndKeys: codecType, AVVideoCodecKey, nil];
  281. [imageOutput setOutputSettings: imageSettings];
  282. [imageSettings release];
  283. [s addOutput: imageOutput];
  284. }
  285. void removeImageCapture (AVCaptureSession* s) override
  286. {
  287. if (imageOutput == nil)
  288. return;
  289. [s removeOutput: imageOutput];
  290. [imageOutput release];
  291. imageOutput = nil;
  292. }
  293. NSArray<AVCaptureConnection*>* getConnections() const override
  294. {
  295. if (imageOutput != nil)
  296. return imageOutput.connections;
  297. return nil;
  298. }
  299. void triggerImageCapture (Pimpl& p) override
  300. {
  301. if (auto* videoConnection = p.getVideoConnection())
  302. {
  303. [imageOutput captureStillImageAsynchronouslyFromConnection: videoConnection
  304. completionHandler: ^(CMSampleBufferRef sampleBuffer, NSError* error)
  305. {
  306. if (error != nil)
  307. {
  308. JUCE_CAMERA_LOG ("Still picture capture failed, error: " + nsStringToJuce (error.localizedDescription));
  309. jassertfalse;
  310. return;
  311. }
  312. auto* imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation: sampleBuffer];
  313. auto image = ImageFileFormat::loadFrom (imageData.bytes, (size_t) imageData.length);
  314. p.imageCaptureFinished (image);
  315. }];
  316. }
  317. }
  318. private:
  319. AVCaptureStillImageOutput* imageOutput = nil;
  320. };
  321. JUCE_END_IGNORE_WARNINGS_GCC_LIKE
  322. //==============================================================================
  323. void addImageCapture()
  324. {
  325. imageOutput->addImageCapture (session);
  326. }
  327. void addMovieCapture()
  328. {
  329. if (fileOutput == nil)
  330. {
  331. fileOutput = [[AVCaptureMovieFileOutput alloc] init];
  332. [session addOutput: fileOutput];
  333. }
  334. }
  335. void removeImageCapture()
  336. {
  337. imageOutput->removeImageCapture (session);
  338. }
  339. void removeMovieCapture()
  340. {
  341. if (fileOutput != nil)
  342. {
  343. [session removeOutput: fileOutput];
  344. [fileOutput release];
  345. fileOutput = nil;
  346. }
  347. }
  348. void removeCurrentSessionVideoInputs()
  349. {
  350. if (session != nil)
  351. {
  352. NSArray<AVCaptureDeviceInput*>* inputs = session.inputs;
  353. for (AVCaptureDeviceInput* input : inputs)
  354. if ([input.device hasMediaType: AVMediaTypeVideo])
  355. [session removeInput:input];
  356. }
  357. }
  358. void addInput()
  359. {
  360. if (currentInput == nil)
  361. {
  362. for (AVCaptureDevice* device : getCaptureDevices())
  363. {
  364. if (deviceName == nsStringToJuce ([device localizedName]))
  365. {
  366. removeCurrentSessionVideoInputs();
  367. NSError* err = nil;
  368. AVCaptureDeviceInput* inputDevice = [[AVCaptureDeviceInput alloc] initWithDevice: device
  369. error: &err];
  370. jassert (err == nil);
  371. if ([session canAddInput: inputDevice])
  372. {
  373. [session addInput: inputDevice];
  374. currentInput = inputDevice;
  375. }
  376. else
  377. {
  378. jassertfalse;
  379. [inputDevice release];
  380. }
  381. return;
  382. }
  383. }
  384. }
  385. }
  386. void removeInput()
  387. {
  388. if (currentInput != nil)
  389. {
  390. [session removeInput: currentInput];
  391. [currentInput release];
  392. currentInput = nil;
  393. }
  394. }
  395. void refreshConnections()
  396. {
  397. [session beginConfiguration];
  398. removeInput();
  399. removeImageCapture();
  400. removeMovieCapture();
  401. addInput();
  402. addImageCapture();
  403. addMovieCapture();
  404. [session commitConfiguration];
  405. }
  406. void refreshIfNeeded()
  407. {
  408. if (getVideoConnection() == nullptr)
  409. refreshConnections();
  410. }
  411. AVCaptureConnection* getVideoConnection() const
  412. {
  413. auto* connections = imageOutput->getConnections();
  414. if (connections != nil)
  415. for (AVCaptureConnection* connection in connections)
  416. if ([connection isActive] && [connection isEnabled])
  417. for (AVCaptureInputPort* port in [connection inputPorts])
  418. if ([[port mediaType] isEqual: AVMediaTypeVideo])
  419. return connection;
  420. return nil;
  421. }
  422. void imageCaptureFinished (const Image& image)
  423. {
  424. handleImageCapture (image);
  425. MessageManager::callAsync ([weakRef = WeakReference<Pimpl> { this }, image]() mutable
  426. {
  427. if (weakRef != nullptr && weakRef->pictureTakenCallback != nullptr)
  428. weakRef->pictureTakenCallback (image);
  429. });
  430. }
  431. void handleImageCapture (const Image& image)
  432. {
  433. const ScopedLock sl (listenerLock);
  434. listeners.call ([=] (Listener& l) { l.imageReceived (image); });
  435. if (! listeners.isEmpty())
  436. triggerImageCapture();
  437. }
  438. void triggerImageCapture()
  439. {
  440. refreshIfNeeded();
  441. startSession();
  442. if (auto* videoConnection = getVideoConnection())
  443. imageOutput->triggerImageCapture (*this);
  444. }
  445. void cameraSessionRuntimeError (const String& error)
  446. {
  447. JUCE_CAMERA_LOG ("cameraSessionRuntimeError(), error = " + error);
  448. if (owner.onErrorOccurred != nullptr)
  449. owner.onErrorOccurred (error);
  450. }
  451. //==============================================================================
  452. CameraDevice& owner;
  453. String deviceName;
  454. AVCaptureSession* session = nil;
  455. AVCaptureMovieFileOutput* fileOutput = nil;
  456. std::unique_ptr<ImageOutputBase> imageOutput;
  457. AVCaptureDeviceInput* currentInput = nil;
  458. id<AVCaptureFileOutputRecordingDelegate> callbackDelegate = nil;
  459. String openingError;
  460. Time firstPresentationTime;
  461. bool isRecording = false;
  462. CriticalSection listenerLock;
  463. ListenerList<Listener> listeners;
  464. std::function<void (const Image&)> pictureTakenCallback = nullptr;
  465. //==============================================================================
  466. JUCE_DECLARE_WEAK_REFERENCEABLE (Pimpl)
  467. JUCE_DECLARE_NON_COPYABLE (Pimpl)
  468. };
  469. //==============================================================================
  470. struct CameraDevice::ViewerComponent : public NSViewComponent
  471. {
  472. ViewerComponent (CameraDevice& device)
  473. {
  474. setView (device.pimpl->createVideoCapturePreview());
  475. }
  476. ~ViewerComponent()
  477. {
  478. setView (nil);
  479. }
  480. JUCE_DECLARE_NON_COPYABLE (ViewerComponent)
  481. };
  482. String CameraDevice::getFileExtension()
  483. {
  484. return ".mov";
  485. }