The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

584 lines
19KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2020 - Raw Material Software Limited
  5. JUCE is an open source library subject to commercial or open-source
  6. licensing.
  7. By using JUCE, you agree to the terms of both the JUCE 6 End-User License
  8. Agreement and JUCE Privacy Policy (both effective as of the 16th June 2020).
  9. End User License Agreement: www.juce.com/juce-6-licence
  10. Privacy Policy: www.juce.com/juce-privacy-policy
  11. Or: You may also use this code under the terms of the GPL v3 (see
  12. www.gnu.org/licenses).
  13. JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
  14. EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
  15. DISCLAIMED.
  16. ==============================================================================
  17. */
  18. struct CameraDevice::Pimpl
  19. {
  20. #if defined (MAC_OS_X_VERSION_10_15) && MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_15
  21. #define JUCE_USE_NEW_APPLE_CAMERA_API 1
  22. #else
  23. #define JUCE_USE_NEW_APPLE_CAMERA_API 0
  24. #endif
  25. #if JUCE_USE_NEW_APPLE_CAMERA_API
  26. class PostCatalinaPhotoOutput
  27. {
  28. public:
  29. PostCatalinaPhotoOutput()
  30. {
  31. static PhotoOutputDelegateClass cls;
  32. delegate.reset ([cls.createInstance() init]);
  33. }
  34. void addImageCapture (AVCaptureSession* s)
  35. {
  36. if (imageOutput != nil)
  37. return;
  38. imageOutput = [[AVCapturePhotoOutput alloc] init];
  39. [s addOutput: imageOutput];
  40. }
  41. void removeImageCapture (AVCaptureSession* s)
  42. {
  43. if (imageOutput == nil)
  44. return;
  45. [s removeOutput: imageOutput];
  46. [imageOutput release];
  47. imageOutput = nil;
  48. }
  49. NSArray<AVCaptureConnection*>* getConnections() const
  50. {
  51. if (imageOutput != nil)
  52. return imageOutput.connections;
  53. return nil;
  54. }
  55. void triggerImageCapture (Pimpl& p)
  56. {
  57. if (imageOutput == nil)
  58. return;
  59. PhotoOutputDelegateClass::setOwner (delegate.get(), &p);
  60. [imageOutput capturePhotoWithSettings: [AVCapturePhotoSettings photoSettings]
  61. delegate: id<AVCapturePhotoCaptureDelegate> (delegate.get())];
  62. }
  63. static NSArray* getAvailableDevices()
  64. {
  65. auto* discovery = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes: @[AVCaptureDeviceTypeBuiltInWideAngleCamera,
  66. AVCaptureDeviceTypeExternalUnknown]
  67. mediaType: AVMediaTypeVideo
  68. position: AVCaptureDevicePositionUnspecified];
  69. return [discovery devices];
  70. }
  71. private:
  72. class PhotoOutputDelegateClass : public ObjCClass<NSObject>
  73. {
  74. public:
  75. PhotoOutputDelegateClass() : ObjCClass<NSObject> ("PhotoOutputDelegateClass_")
  76. {
  77. addMethod (@selector (captureOutput:didFinishProcessingPhoto:error:), didFinishProcessingPhoto, "v@:@@@");
  78. addIvar<Pimpl*> ("owner");
  79. registerClass();
  80. }
  81. static void didFinishProcessingPhoto (id self, SEL, AVCapturePhotoOutput*, AVCapturePhoto* photo, NSError* error)
  82. {
  83. if (error != nil)
  84. {
  85. String errorString = error != nil ? nsStringToJuce (error.localizedDescription) : String();
  86. ignoreUnused (errorString);
  87. JUCE_CAMERA_LOG ("Still picture capture failed, error: " + errorString);
  88. jassertfalse;
  89. return;
  90. }
  91. auto* imageData = [photo fileDataRepresentation];
  92. auto image = ImageFileFormat::loadFrom (imageData.bytes, (size_t) imageData.length);
  93. getOwner (self).imageCaptureFinished (image);
  94. }
  95. static Pimpl& getOwner (id self) { return *getIvar<Pimpl*> (self, "owner"); }
  96. static void setOwner (id self, Pimpl* t) { object_setInstanceVariable (self, "owner", t); }
  97. };
  98. AVCapturePhotoOutput* imageOutput = nil;
  99. std::unique_ptr<NSObject, NSObjectDeleter> delegate;
  100. };
  101. #else
  102. struct PreCatalinaStillImageOutput
  103. {
  104. public:
  105. void addImageCapture (AVCaptureSession* s)
  106. {
  107. if (imageOutput != nil)
  108. return;
  109. const auto codecType =
  110. #if defined (MAC_OS_X_VERSION_10_13) && MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_13
  111. AVVideoCodecTypeJPEG;
  112. #else
  113. AVVideoCodecJPEG;
  114. #endif
  115. imageOutput = [[AVCaptureStillImageOutput alloc] init];
  116. auto imageSettings = [[NSDictionary alloc] initWithObjectsAndKeys: codecType, AVVideoCodecKey, nil];
  117. [imageOutput setOutputSettings: imageSettings];
  118. [imageSettings release];
  119. [s addOutput: imageOutput];
  120. }
  121. void removeImageCapture (AVCaptureSession* s)
  122. {
  123. if (imageOutput == nil)
  124. return;
  125. [s removeOutput: imageOutput];
  126. [imageOutput release];
  127. imageOutput = nil;
  128. }
  129. NSArray<AVCaptureConnection*>* getConnections() const
  130. {
  131. if (imageOutput != nil)
  132. return imageOutput.connections;
  133. return nil;
  134. }
  135. void triggerImageCapture (Pimpl& p)
  136. {
  137. if (auto* videoConnection = p.getVideoConnection())
  138. {
  139. [imageOutput captureStillImageAsynchronouslyFromConnection: videoConnection
  140. completionHandler: ^(CMSampleBufferRef sampleBuffer, NSError* error)
  141. {
  142. if (error != nil)
  143. {
  144. JUCE_CAMERA_LOG ("Still picture capture failed, error: " + nsStringToJuce (error.localizedDescription));
  145. jassertfalse;
  146. return;
  147. }
  148. auto* imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation: sampleBuffer];
  149. auto image = ImageFileFormat::loadFrom (imageData.bytes, (size_t) imageData.length);
  150. p.imageCaptureFinished (image);
  151. }];
  152. }
  153. }
  154. static NSArray* getAvailableDevices()
  155. {
  156. return [AVCaptureDevice devicesWithMediaType: AVMediaTypeVideo];
  157. }
  158. private:
  159. AVCaptureStillImageOutput* imageOutput = nil;
  160. };
  161. #endif
  162. Pimpl (CameraDevice& ownerToUse, const String& deviceNameToUse, int /*index*/,
  163. int /*minWidth*/, int /*minHeight*/,
  164. int /*maxWidth*/, int /*maxHeight*/,
  165. bool useHighQuality)
  166. : owner (ownerToUse),
  167. deviceName (deviceNameToUse)
  168. {
  169. session = [[AVCaptureSession alloc] init];
  170. session.sessionPreset = useHighQuality ? AVCaptureSessionPresetHigh
  171. : AVCaptureSessionPresetMedium;
  172. refreshConnections();
  173. static DelegateClass cls;
  174. callbackDelegate = (id<AVCaptureFileOutputRecordingDelegate>) [cls.createInstance() init];
  175. DelegateClass::setOwner (callbackDelegate, this);
  176. JUCE_BEGIN_IGNORE_WARNINGS_GCC_LIKE ("-Wundeclared-selector")
  177. [[NSNotificationCenter defaultCenter] addObserver: callbackDelegate
  178. selector: @selector (captureSessionRuntimeError:)
  179. name: AVCaptureSessionRuntimeErrorNotification
  180. object: session];
  181. JUCE_END_IGNORE_WARNINGS_GCC_LIKE
  182. }
  183. ~Pimpl()
  184. {
  185. [[NSNotificationCenter defaultCenter] removeObserver: callbackDelegate];
  186. [session stopRunning];
  187. removeInput();
  188. removeImageCapture();
  189. removeMovieCapture();
  190. [session release];
  191. [callbackDelegate release];
  192. }
  193. //==============================================================================
  194. bool openedOk() const noexcept { return openingError.isEmpty(); }
  195. void startSession()
  196. {
  197. if (! [session isRunning])
  198. [session startRunning];
  199. }
  200. void takeStillPicture (std::function<void (const Image&)> pictureTakenCallbackToUse)
  201. {
  202. if (pictureTakenCallbackToUse == nullptr)
  203. {
  204. jassertfalse;
  205. return;
  206. }
  207. pictureTakenCallback = std::move (pictureTakenCallbackToUse);
  208. triggerImageCapture();
  209. }
  210. void startRecordingToFile (const File& file, int /*quality*/)
  211. {
  212. stopRecording();
  213. refreshIfNeeded();
  214. firstPresentationTime = Time::getCurrentTime();
  215. file.deleteFile();
  216. startSession();
  217. isRecording = true;
  218. [fileOutput startRecordingToOutputFileURL: createNSURLFromFile (file)
  219. recordingDelegate: callbackDelegate];
  220. }
  221. void stopRecording()
  222. {
  223. if (isRecording)
  224. {
  225. [fileOutput stopRecording];
  226. isRecording = false;
  227. }
  228. }
  229. Time getTimeOfFirstRecordedFrame() const
  230. {
  231. return firstPresentationTime;
  232. }
  233. void addListener (CameraDevice::Listener* listenerToAdd)
  234. {
  235. const ScopedLock sl (listenerLock);
  236. listeners.add (listenerToAdd);
  237. if (listeners.size() == 1)
  238. triggerImageCapture();
  239. }
  240. void removeListener (CameraDevice::Listener* listenerToRemove)
  241. {
  242. const ScopedLock sl (listenerLock);
  243. listeners.remove (listenerToRemove);
  244. }
  245. static StringArray getAvailableDevices()
  246. {
  247. auto* devices = decltype (imageOutput)::getAvailableDevices();
  248. StringArray results;
  249. for (AVCaptureDevice* device : devices)
  250. results.add (nsStringToJuce ([device localizedName]));
  251. return results;
  252. }
  253. AVCaptureSession* getCaptureSession()
  254. {
  255. return session;
  256. }
  257. NSView* createVideoCapturePreview()
  258. {
  259. // The video preview must be created before the capture session is
  260. // started. Make sure you haven't called `addListener`,
  261. // `startRecordingToFile`, or `takeStillPicture` before calling this
  262. // function.
  263. jassert (! [session isRunning]);
  264. startSession();
  265. JUCE_AUTORELEASEPOOL
  266. {
  267. NSView* view = [[NSView alloc] init];
  268. [view setLayer: [AVCaptureVideoPreviewLayer layerWithSession: getCaptureSession()]];
  269. return view;
  270. }
  271. }
  272. private:
  273. //==============================================================================
  274. struct DelegateClass : public ObjCClass<NSObject>
  275. {
  276. DelegateClass() : ObjCClass<NSObject> ("JUCECameraDelegate_")
  277. {
  278. addIvar<Pimpl*> ("owner");
  279. addProtocol (@protocol (AVCaptureFileOutputRecordingDelegate));
  280. addMethod (@selector (captureOutput:didStartRecordingToOutputFileAtURL: fromConnections:), didStartRecordingToOutputFileAtURL, "v@:@@@");
  281. addMethod (@selector (captureOutput:didPauseRecordingToOutputFileAtURL: fromConnections:), didPauseRecordingToOutputFileAtURL, "v@:@@@");
  282. addMethod (@selector (captureOutput:didResumeRecordingToOutputFileAtURL: fromConnections:), didResumeRecordingToOutputFileAtURL, "v@:@@@");
  283. addMethod (@selector (captureOutput:willFinishRecordingToOutputFileAtURL:fromConnections:error:), willFinishRecordingToOutputFileAtURL, "v@:@@@@");
  284. JUCE_BEGIN_IGNORE_WARNINGS_GCC_LIKE ("-Wundeclared-selector")
  285. addMethod (@selector (captureSessionRuntimeError:), sessionRuntimeError, "v@:@");
  286. JUCE_END_IGNORE_WARNINGS_GCC_LIKE
  287. registerClass();
  288. }
  289. static void setOwner (id self, Pimpl* owner) { object_setInstanceVariable (self, "owner", owner); }
  290. static Pimpl& getOwner (id self) { return *getIvar<Pimpl*> (self, "owner"); }
  291. private:
  292. static void didStartRecordingToOutputFileAtURL (id, SEL, AVCaptureFileOutput*, NSURL*, NSArray*) {}
  293. static void didPauseRecordingToOutputFileAtURL (id, SEL, AVCaptureFileOutput*, NSURL*, NSArray*) {}
  294. static void didResumeRecordingToOutputFileAtURL (id, SEL, AVCaptureFileOutput*, NSURL*, NSArray*) {}
  295. static void willFinishRecordingToOutputFileAtURL (id, SEL, AVCaptureFileOutput*, NSURL*, NSArray*, NSError*) {}
  296. static void sessionRuntimeError (id self, SEL, NSNotification* notification)
  297. {
  298. JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
  299. NSError* error = notification.userInfo[AVCaptureSessionErrorKey];
  300. auto errorString = error != nil ? nsStringToJuce (error.localizedDescription) : String();
  301. getOwner (self).cameraSessionRuntimeError (errorString);
  302. }
  303. };
  304. //==============================================================================
  305. void addImageCapture()
  306. {
  307. imageOutput.addImageCapture (session);
  308. }
  309. void addMovieCapture()
  310. {
  311. if (fileOutput == nil)
  312. {
  313. fileOutput = [[AVCaptureMovieFileOutput alloc] init];
  314. [session addOutput: fileOutput];
  315. }
  316. }
  317. void removeImageCapture()
  318. {
  319. imageOutput.removeImageCapture (session);
  320. }
  321. void removeMovieCapture()
  322. {
  323. if (fileOutput != nil)
  324. {
  325. [session removeOutput: fileOutput];
  326. [fileOutput release];
  327. fileOutput = nil;
  328. }
  329. }
  330. void removeCurrentSessionVideoInputs()
  331. {
  332. if (session != nil)
  333. {
  334. NSArray<AVCaptureDeviceInput*>* inputs = session.inputs;
  335. for (AVCaptureDeviceInput* input : inputs)
  336. if ([input.device hasMediaType: AVMediaTypeVideo])
  337. [session removeInput:input];
  338. }
  339. }
  340. void addInput()
  341. {
  342. if (currentInput == nil)
  343. {
  344. auto* availableDevices = decltype (imageOutput)::getAvailableDevices();
  345. for (AVCaptureDevice* device : availableDevices)
  346. {
  347. if (deviceName == nsStringToJuce ([device localizedName]))
  348. {
  349. removeCurrentSessionVideoInputs();
  350. NSError* err = nil;
  351. AVCaptureDeviceInput* inputDevice = [[AVCaptureDeviceInput alloc] initWithDevice: device
  352. error: &err];
  353. jassert (err == nil);
  354. if ([session canAddInput: inputDevice])
  355. {
  356. [session addInput: inputDevice];
  357. currentInput = inputDevice;
  358. }
  359. else
  360. {
  361. jassertfalse;
  362. [inputDevice release];
  363. }
  364. return;
  365. }
  366. }
  367. }
  368. }
  369. void removeInput()
  370. {
  371. if (currentInput != nil)
  372. {
  373. [session removeInput: currentInput];
  374. [currentInput release];
  375. currentInput = nil;
  376. }
  377. }
  378. void refreshConnections()
  379. {
  380. [session beginConfiguration];
  381. removeInput();
  382. removeImageCapture();
  383. removeMovieCapture();
  384. addInput();
  385. addImageCapture();
  386. addMovieCapture();
  387. [session commitConfiguration];
  388. }
  389. void refreshIfNeeded()
  390. {
  391. if (getVideoConnection() == nullptr)
  392. refreshConnections();
  393. }
  394. AVCaptureConnection* getVideoConnection() const
  395. {
  396. auto* connections = imageOutput.getConnections();
  397. if (connections != nil)
  398. for (AVCaptureConnection* connection in connections)
  399. if ([connection isActive] && [connection isEnabled])
  400. for (AVCaptureInputPort* port in [connection inputPorts])
  401. if ([[port mediaType] isEqual: AVMediaTypeVideo])
  402. return connection;
  403. return nil;
  404. }
  405. void imageCaptureFinished (const Image& image)
  406. {
  407. handleImageCapture (image);
  408. WeakReference<Pimpl> weakRef (this);
  409. MessageManager::callAsync ([weakRef, image]() mutable
  410. {
  411. if (weakRef != nullptr && weakRef->pictureTakenCallback != nullptr)
  412. weakRef->pictureTakenCallback (image);
  413. });
  414. }
  415. void handleImageCapture (const Image& image)
  416. {
  417. const ScopedLock sl (listenerLock);
  418. listeners.call ([=] (Listener& l) { l.imageReceived (image); });
  419. if (! listeners.isEmpty())
  420. triggerImageCapture();
  421. }
  422. void triggerImageCapture()
  423. {
  424. refreshIfNeeded();
  425. startSession();
  426. if (auto* videoConnection = getVideoConnection())
  427. imageOutput.triggerImageCapture (*this);
  428. }
  429. void cameraSessionRuntimeError (const String& error)
  430. {
  431. JUCE_CAMERA_LOG ("cameraSessionRuntimeError(), error = " + error);
  432. if (owner.onErrorOccurred != nullptr)
  433. owner.onErrorOccurred (error);
  434. }
  435. //==============================================================================
  436. CameraDevice& owner;
  437. String deviceName;
  438. AVCaptureSession* session = nil;
  439. AVCaptureMovieFileOutput* fileOutput = nil;
  440. #if JUCE_USE_NEW_APPLE_CAMERA_API
  441. PostCatalinaPhotoOutput imageOutput;
  442. #else
  443. PreCatalinaStillImageOutput imageOutput;
  444. #endif
  445. AVCaptureDeviceInput* currentInput = nil;
  446. id<AVCaptureFileOutputRecordingDelegate> callbackDelegate = nil;
  447. String openingError;
  448. Time firstPresentationTime;
  449. bool isRecording = false;
  450. CriticalSection listenerLock;
  451. ListenerList<Listener> listeners;
  452. std::function<void (const Image&)> pictureTakenCallback = nullptr;
  453. //==============================================================================
  454. JUCE_DECLARE_WEAK_REFERENCEABLE (Pimpl)
  455. JUCE_DECLARE_NON_COPYABLE (Pimpl)
  456. };
  457. //==============================================================================
  458. struct CameraDevice::ViewerComponent : public NSViewComponent
  459. {
  460. ViewerComponent (CameraDevice& device)
  461. {
  462. setView (device.pimpl->createVideoCapturePreview());
  463. }
  464. ~ViewerComponent()
  465. {
  466. setView (nil);
  467. }
  468. JUCE_DECLARE_NON_COPYABLE (ViewerComponent)
  469. };
  470. String CameraDevice::getFileExtension()
  471. {
  472. return ".mov";
  473. }
  474. #undef JUCE_USE_NEW_APPLE_CAMERA_API