The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

582 lines
19KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2020 - Raw Material Software Limited
  5. JUCE is an open source library subject to commercial or open-source
  6. licensing.
  7. By using JUCE, you agree to the terms of both the JUCE 6 End-User License
  8. Agreement and JUCE Privacy Policy (both effective as of the 16th June 2020).
  9. End User License Agreement: www.juce.com/juce-6-licence
  10. Privacy Policy: www.juce.com/juce-privacy-policy
  11. Or: You may also use this code under the terms of the GPL v3 (see
  12. www.gnu.org/licenses).
  13. JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
  14. EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
  15. DISCLAIMED.
  16. ==============================================================================
  17. */
  18. struct CameraDevice::Pimpl
  19. {
  20. #if defined (MAC_OS_X_VERSION_10_15) && MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_15
  21. #define JUCE_USE_NEW_APPLE_CAMERA_API 1
  22. #else
  23. #define JUCE_USE_NEW_APPLE_CAMERA_API 0
  24. #endif
  25. #if JUCE_USE_NEW_APPLE_CAMERA_API
  26. class PostCatalinaPhotoOutput
  27. {
  28. public:
  29. PostCatalinaPhotoOutput()
  30. {
  31. static PhotoOutputDelegateClass cls;
  32. delegate.reset ([cls.createInstance() init]);
  33. }
  34. void addImageCapture (AVCaptureSession* s)
  35. {
  36. if (imageOutput != nil)
  37. return;
  38. imageOutput = [[AVCapturePhotoOutput alloc] init];
  39. [s addOutput: imageOutput];
  40. }
  41. void removeImageCapture (AVCaptureSession* s)
  42. {
  43. if (imageOutput == nil)
  44. return;
  45. [s removeOutput: imageOutput];
  46. [imageOutput release];
  47. imageOutput = nil;
  48. }
  49. NSArray<AVCaptureConnection*>* getConnections() const
  50. {
  51. if (imageOutput != nil)
  52. return imageOutput.connections;
  53. return nil;
  54. }
  55. void triggerImageCapture (Pimpl& p)
  56. {
  57. if (imageOutput == nil)
  58. return;
  59. PhotoOutputDelegateClass::setOwner (delegate.get(), &p);
  60. [imageOutput capturePhotoWithSettings: [AVCapturePhotoSettings photoSettings]
  61. delegate: id<AVCapturePhotoCaptureDelegate> (delegate.get())];
  62. }
  63. static NSArray* getAvailableDevices()
  64. {
  65. auto* discovery = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes: @[AVCaptureDeviceTypeBuiltInWideAngleCamera,
  66. AVCaptureDeviceTypeExternalUnknown]
  67. mediaType: AVMediaTypeVideo
  68. position: AVCaptureDevicePositionUnspecified];
  69. return [discovery devices];
  70. }
  71. private:
  72. class PhotoOutputDelegateClass : public ObjCClass<NSObject>
  73. {
  74. public:
  75. PhotoOutputDelegateClass() : ObjCClass<NSObject> ("PhotoOutputDelegateClass_")
  76. {
  77. addMethod (@selector (captureOutput:didFinishProcessingPhoto:error:), didFinishProcessingPhoto, "v@:@@@");
  78. addIvar<Pimpl*> ("owner");
  79. registerClass();
  80. }
  81. static void didFinishProcessingPhoto (id self, SEL, AVCapturePhotoOutput*, AVCapturePhoto* photo, NSError* error)
  82. {
  83. if (error != nil)
  84. {
  85. String errorString = error != nil ? nsStringToJuce (error.localizedDescription) : String();
  86. ignoreUnused (errorString);
  87. JUCE_CAMERA_LOG ("Still picture capture failed, error: " + errorString);
  88. jassertfalse;
  89. return;
  90. }
  91. auto* imageData = [photo fileDataRepresentation];
  92. auto image = ImageFileFormat::loadFrom (imageData.bytes, (size_t) imageData.length);
  93. getOwner (self).imageCaptureFinished (image);
  94. }
  95. static Pimpl& getOwner (id self) { return *getIvar<Pimpl*> (self, "owner"); }
  96. static void setOwner (id self, Pimpl* t) { object_setInstanceVariable (self, "owner", t); }
  97. };
  98. AVCapturePhotoOutput* imageOutput = nil;
  99. std::unique_ptr<NSObject, NSObjectDeleter> delegate;
  100. };
  101. #else
  102. struct PreCatalinaStillImageOutput
  103. {
  104. public:
  105. void addImageCapture (AVCaptureSession* s)
  106. {
  107. if (imageOutput != nil)
  108. return;
  109. const auto codecType =
  110. #if defined (MAC_OS_X_VERSION_10_13) && MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_13
  111. AVVideoCodecTypeJPEG;
  112. #else
  113. AVVideoCodecJPEG;
  114. #endif
  115. imageOutput = [[AVCaptureStillImageOutput alloc] init];
  116. auto imageSettings = [[NSDictionary alloc] initWithObjectsAndKeys: codecType, AVVideoCodecKey, nil];
  117. [imageOutput setOutputSettings: imageSettings];
  118. [imageSettings release];
  119. [s addOutput: imageOutput];
  120. }
  121. void removeImageCapture (AVCaptureSession* s)
  122. {
  123. if (imageOutput == nil)
  124. return;
  125. [s removeOutput: imageOutput];
  126. [imageOutput release];
  127. imageOutput = nil;
  128. }
  129. NSArray<AVCaptureConnection*>* getConnections() const
  130. {
  131. if (imageOutput != nil)
  132. return imageOutput.connections;
  133. return nil;
  134. }
  135. void triggerImageCapture (Pimpl& p)
  136. {
  137. if (auto* videoConnection = p.getVideoConnection())
  138. {
  139. [imageOutput captureStillImageAsynchronouslyFromConnection: videoConnection
  140. completionHandler: ^(CMSampleBufferRef sampleBuffer, NSError* error)
  141. {
  142. if (error != nil)
  143. {
  144. JUCE_CAMERA_LOG ("Still picture capture failed, error: " + nsStringToJuce (error.localizedDescription));
  145. jassertfalse;
  146. return;
  147. }
  148. auto* imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation: sampleBuffer];
  149. auto image = ImageFileFormat::loadFrom (imageData.bytes, (size_t) imageData.length);
  150. p.imageCaptureFinished (image);
  151. }];
  152. }
  153. }
  154. static NSArray* getAvailableDevices()
  155. {
  156. return [AVCaptureDevice devicesWithMediaType: AVMediaTypeVideo];
  157. }
  158. private:
  159. AVCaptureStillImageOutput* imageOutput = nil;
  160. };
  161. #endif
  162. Pimpl (CameraDevice& ownerToUse, const String& deviceNameToUse, int /*index*/,
  163. int /*minWidth*/, int /*minHeight*/,
  164. int /*maxWidth*/, int /*maxHeight*/,
  165. bool useHighQuality)
  166. : owner (ownerToUse),
  167. deviceName (deviceNameToUse)
  168. {
  169. session = [[AVCaptureSession alloc] init];
  170. session.sessionPreset = useHighQuality ? AVCaptureSessionPresetHigh
  171. : AVCaptureSessionPresetMedium;
  172. refreshConnections();
  173. static DelegateClass cls;
  174. callbackDelegate = (id<AVCaptureFileOutputRecordingDelegate>) [cls.createInstance() init];
  175. DelegateClass::setOwner (callbackDelegate, this);
  176. [[NSNotificationCenter defaultCenter] addObserver: callbackDelegate
  177. selector: DelegateClass::runtimeErrorSel()
  178. name: AVCaptureSessionRuntimeErrorNotification
  179. object: session];
  180. }
  181. ~Pimpl()
  182. {
  183. [[NSNotificationCenter defaultCenter] removeObserver: callbackDelegate];
  184. [session stopRunning];
  185. removeInput();
  186. removeImageCapture();
  187. removeMovieCapture();
  188. [session release];
  189. [callbackDelegate release];
  190. }
  191. //==============================================================================
  192. bool openedOk() const noexcept { return openingError.isEmpty(); }
  193. void startSession()
  194. {
  195. if (! [session isRunning])
  196. [session startRunning];
  197. }
  198. void takeStillPicture (std::function<void (const Image&)> pictureTakenCallbackToUse)
  199. {
  200. if (pictureTakenCallbackToUse == nullptr)
  201. {
  202. jassertfalse;
  203. return;
  204. }
  205. pictureTakenCallback = std::move (pictureTakenCallbackToUse);
  206. triggerImageCapture();
  207. }
  208. void startRecordingToFile (const File& file, int /*quality*/)
  209. {
  210. stopRecording();
  211. refreshIfNeeded();
  212. firstPresentationTime = Time::getCurrentTime();
  213. file.deleteFile();
  214. startSession();
  215. isRecording = true;
  216. [fileOutput startRecordingToOutputFileURL: createNSURLFromFile (file)
  217. recordingDelegate: callbackDelegate];
  218. }
  219. void stopRecording()
  220. {
  221. if (isRecording)
  222. {
  223. [fileOutput stopRecording];
  224. isRecording = false;
  225. }
  226. }
  227. Time getTimeOfFirstRecordedFrame() const
  228. {
  229. return firstPresentationTime;
  230. }
  231. void addListener (CameraDevice::Listener* listenerToAdd)
  232. {
  233. const ScopedLock sl (listenerLock);
  234. listeners.add (listenerToAdd);
  235. if (listeners.size() == 1)
  236. triggerImageCapture();
  237. }
  238. void removeListener (CameraDevice::Listener* listenerToRemove)
  239. {
  240. const ScopedLock sl (listenerLock);
  241. listeners.remove (listenerToRemove);
  242. }
  243. static StringArray getAvailableDevices()
  244. {
  245. auto* devices = decltype (imageOutput)::getAvailableDevices();
  246. StringArray results;
  247. for (AVCaptureDevice* device : devices)
  248. results.add (nsStringToJuce ([device localizedName]));
  249. return results;
  250. }
  251. AVCaptureSession* getCaptureSession()
  252. {
  253. return session;
  254. }
  255. NSView* createVideoCapturePreview()
  256. {
  257. // The video preview must be created before the capture session is
  258. // started. Make sure you haven't called `addListener`,
  259. // `startRecordingToFile`, or `takeStillPicture` before calling this
  260. // function.
  261. jassert (! [session isRunning]);
  262. startSession();
  263. JUCE_AUTORELEASEPOOL
  264. {
  265. NSView* view = [[NSView alloc] init];
  266. [view setLayer: [AVCaptureVideoPreviewLayer layerWithSession: getCaptureSession()]];
  267. return view;
  268. }
  269. }
  270. private:
  271. //==============================================================================
  272. struct DelegateClass : public ObjCClass<NSObject>
  273. {
  274. DelegateClass() : ObjCClass<NSObject> ("JUCECameraDelegate_")
  275. {
  276. addIvar<Pimpl*> ("owner");
  277. addProtocol (@protocol (AVCaptureFileOutputRecordingDelegate));
  278. addMethod (@selector (captureOutput:didStartRecordingToOutputFileAtURL: fromConnections:), didStartRecordingToOutputFileAtURL, "v@:@@@");
  279. addMethod (@selector (captureOutput:didPauseRecordingToOutputFileAtURL: fromConnections:), didPauseRecordingToOutputFileAtURL, "v@:@@@");
  280. addMethod (@selector (captureOutput:didResumeRecordingToOutputFileAtURL: fromConnections:), didResumeRecordingToOutputFileAtURL, "v@:@@@");
  281. addMethod (@selector (captureOutput:willFinishRecordingToOutputFileAtURL:fromConnections:error:), willFinishRecordingToOutputFileAtURL, "v@:@@@@");
  282. addMethod (runtimeErrorSel(), sessionRuntimeError, "v@:@");
  283. registerClass();
  284. }
  285. static void setOwner (id self, Pimpl* owner) { object_setInstanceVariable (self, "owner", owner); }
  286. static Pimpl& getOwner (id self) { return *getIvar<Pimpl*> (self, "owner"); }
  287. static SEL runtimeErrorSel() { return NSSelectorFromString (nsStringLiteral ("captureSessionRuntimeError:")); }
  288. private:
  289. static void didStartRecordingToOutputFileAtURL (id, SEL, AVCaptureFileOutput*, NSURL*, NSArray*) {}
  290. static void didPauseRecordingToOutputFileAtURL (id, SEL, AVCaptureFileOutput*, NSURL*, NSArray*) {}
  291. static void didResumeRecordingToOutputFileAtURL (id, SEL, AVCaptureFileOutput*, NSURL*, NSArray*) {}
  292. static void willFinishRecordingToOutputFileAtURL (id, SEL, AVCaptureFileOutput*, NSURL*, NSArray*, NSError*) {}
  293. static void sessionRuntimeError (id self, SEL, NSNotification* notification)
  294. {
  295. JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
  296. NSError* error = notification.userInfo[AVCaptureSessionErrorKey];
  297. auto errorString = error != nil ? nsStringToJuce (error.localizedDescription) : String();
  298. getOwner (self).cameraSessionRuntimeError (errorString);
  299. }
  300. };
  301. //==============================================================================
  302. void addImageCapture()
  303. {
  304. imageOutput.addImageCapture (session);
  305. }
  306. void addMovieCapture()
  307. {
  308. if (fileOutput == nil)
  309. {
  310. fileOutput = [[AVCaptureMovieFileOutput alloc] init];
  311. [session addOutput: fileOutput];
  312. }
  313. }
  314. void removeImageCapture()
  315. {
  316. imageOutput.removeImageCapture (session);
  317. }
  318. void removeMovieCapture()
  319. {
  320. if (fileOutput != nil)
  321. {
  322. [session removeOutput: fileOutput];
  323. [fileOutput release];
  324. fileOutput = nil;
  325. }
  326. }
  327. void removeCurrentSessionVideoInputs()
  328. {
  329. if (session != nil)
  330. {
  331. NSArray<AVCaptureDeviceInput*>* inputs = session.inputs;
  332. for (AVCaptureDeviceInput* input : inputs)
  333. if ([input.device hasMediaType: AVMediaTypeVideo])
  334. [session removeInput:input];
  335. }
  336. }
  337. void addInput()
  338. {
  339. if (currentInput == nil)
  340. {
  341. auto* availableDevices = decltype (imageOutput)::getAvailableDevices();
  342. for (AVCaptureDevice* device : availableDevices)
  343. {
  344. if (deviceName == nsStringToJuce ([device localizedName]))
  345. {
  346. removeCurrentSessionVideoInputs();
  347. NSError* err = nil;
  348. AVCaptureDeviceInput* inputDevice = [[AVCaptureDeviceInput alloc] initWithDevice: device
  349. error: &err];
  350. jassert (err == nil);
  351. if ([session canAddInput: inputDevice])
  352. {
  353. [session addInput: inputDevice];
  354. currentInput = inputDevice;
  355. }
  356. else
  357. {
  358. jassertfalse;
  359. [inputDevice release];
  360. }
  361. return;
  362. }
  363. }
  364. }
  365. }
  366. void removeInput()
  367. {
  368. if (currentInput != nil)
  369. {
  370. [session removeInput: currentInput];
  371. [currentInput release];
  372. currentInput = nil;
  373. }
  374. }
  375. void refreshConnections()
  376. {
  377. [session beginConfiguration];
  378. removeInput();
  379. removeImageCapture();
  380. removeMovieCapture();
  381. addInput();
  382. addImageCapture();
  383. addMovieCapture();
  384. [session commitConfiguration];
  385. }
  386. void refreshIfNeeded()
  387. {
  388. if (getVideoConnection() == nullptr)
  389. refreshConnections();
  390. }
  391. AVCaptureConnection* getVideoConnection() const
  392. {
  393. auto* connections = imageOutput.getConnections();
  394. if (connections != nil)
  395. for (AVCaptureConnection* connection in connections)
  396. if ([connection isActive] && [connection isEnabled])
  397. for (AVCaptureInputPort* port in [connection inputPorts])
  398. if ([[port mediaType] isEqual: AVMediaTypeVideo])
  399. return connection;
  400. return nil;
  401. }
  402. void imageCaptureFinished (const Image& image)
  403. {
  404. handleImageCapture (image);
  405. WeakReference<Pimpl> weakRef (this);
  406. MessageManager::callAsync ([weakRef, image]() mutable
  407. {
  408. if (weakRef != nullptr && weakRef->pictureTakenCallback != nullptr)
  409. weakRef->pictureTakenCallback (image);
  410. });
  411. }
  412. void handleImageCapture (const Image& image)
  413. {
  414. const ScopedLock sl (listenerLock);
  415. listeners.call ([=] (Listener& l) { l.imageReceived (image); });
  416. if (! listeners.isEmpty())
  417. triggerImageCapture();
  418. }
  419. void triggerImageCapture()
  420. {
  421. refreshIfNeeded();
  422. startSession();
  423. if (auto* videoConnection = getVideoConnection())
  424. imageOutput.triggerImageCapture (*this);
  425. }
  426. void cameraSessionRuntimeError (const String& error)
  427. {
  428. JUCE_CAMERA_LOG ("cameraSessionRuntimeError(), error = " + error);
  429. if (owner.onErrorOccurred != nullptr)
  430. owner.onErrorOccurred (error);
  431. }
  432. //==============================================================================
  433. CameraDevice& owner;
  434. String deviceName;
  435. AVCaptureSession* session = nil;
  436. AVCaptureMovieFileOutput* fileOutput = nil;
  437. #if JUCE_USE_NEW_APPLE_CAMERA_API
  438. PostCatalinaPhotoOutput imageOutput;
  439. #else
  440. PreCatalinaStillImageOutput imageOutput;
  441. #endif
  442. AVCaptureDeviceInput* currentInput = nil;
  443. id<AVCaptureFileOutputRecordingDelegate> callbackDelegate = nil;
  444. String openingError;
  445. Time firstPresentationTime;
  446. bool isRecording = false;
  447. CriticalSection listenerLock;
  448. ListenerList<Listener> listeners;
  449. std::function<void (const Image&)> pictureTakenCallback = nullptr;
  450. //==============================================================================
  451. JUCE_DECLARE_WEAK_REFERENCEABLE (Pimpl)
  452. JUCE_DECLARE_NON_COPYABLE (Pimpl)
  453. };
  454. //==============================================================================
  455. struct CameraDevice::ViewerComponent : public NSViewComponent
  456. {
  457. ViewerComponent (CameraDevice& device)
  458. {
  459. setView (device.pimpl->createVideoCapturePreview());
  460. }
  461. ~ViewerComponent()
  462. {
  463. setView (nil);
  464. }
  465. JUCE_DECLARE_NON_COPYABLE (ViewerComponent)
  466. };
  467. String CameraDevice::getFileExtension()
  468. {
  469. return ".mov";
  470. }
  471. #undef JUCE_USE_NEW_APPLE_CAMERA_API