The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

556 lines
19KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2020 - Raw Material Software Limited
  5. JUCE is an open source library subject to commercial or open-source
  6. licensing.
  7. By using JUCE, you agree to the terms of both the JUCE 6 End-User License
  8. Agreement and JUCE Privacy Policy (both effective as of the 16th June 2020).
  9. End User License Agreement: www.juce.com/juce-6-licence
  10. Privacy Policy: www.juce.com/juce-privacy-policy
  11. Or: You may also use this code under the terms of the GPL v3 (see
  12. www.gnu.org/licenses).
  13. JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
  14. EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
  15. DISCLAIMED.
  16. ==============================================================================
  17. */
  18. struct CameraDevice::Pimpl
  19. {
  20. #if defined (MAC_OS_X_VERSION_10_15) && MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_15
  21. class PostCatalinaPhotoOutput
  22. {
  23. public:
  24. PostCatalinaPhotoOutput()
  25. {
  26. static PhotoOutputDelegateClass cls;
  27. delegate.reset ([cls.createInstance() init]);
  28. }
  29. void addImageCapture (AVCaptureSession* s)
  30. {
  31. if (imageOutput != nil)
  32. return;
  33. imageOutput = [[AVCapturePhotoOutput alloc] init];
  34. [s addOutput: imageOutput];
  35. }
  36. void removeImageCapture (AVCaptureSession* s)
  37. {
  38. if (imageOutput == nil)
  39. return;
  40. [s removeOutput: imageOutput];
  41. [imageOutput release];
  42. imageOutput = nil;
  43. }
  44. NSArray<AVCaptureConnection*>* getConnections() const
  45. {
  46. if (imageOutput != nil)
  47. return imageOutput.connections;
  48. return nil;
  49. }
  50. void triggerImageCapture (Pimpl& p)
  51. {
  52. if (imageOutput == nil)
  53. return;
  54. PhotoOutputDelegateClass::setOwner (delegate.get(), &p);
  55. [imageOutput capturePhotoWithSettings: [AVCapturePhotoSettings photoSettings]
  56. delegate: id<AVCapturePhotoCaptureDelegate> (delegate.get())];
  57. }
  58. static NSArray* getAvailableDevices()
  59. {
  60. auto* discovery = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes: @[AVCaptureDeviceTypeBuiltInWideAngleCamera,
  61. AVCaptureDeviceTypeExternalUnknown]
  62. mediaType: AVMediaTypeVideo
  63. position: AVCaptureDevicePositionUnspecified];
  64. return [discovery devices];
  65. }
  66. private:
  67. class PhotoOutputDelegateClass : public ObjCClass<NSObject>
  68. {
  69. public:
  70. PhotoOutputDelegateClass() : ObjCClass<NSObject> ("PhotoOutputDelegateClass_")
  71. {
  72. addMethod (@selector (captureOutput:didFinishProcessingPhoto:error:), didFinishProcessingPhoto, "v@:@@@");
  73. addIvar<Pimpl*> ("owner");
  74. registerClass();
  75. }
  76. static void didFinishProcessingPhoto (id self, SEL, AVCapturePhotoOutput*, AVCapturePhoto* photo, NSError* error)
  77. {
  78. String errorString = error != nil ? nsStringToJuce (error.localizedDescription) : String();
  79. ignoreUnused (errorString);
  80. JUCE_CAMERA_LOG ("didFinishProcessingPhoto(), error = " + errorString);
  81. if (error != nil)
  82. {
  83. JUCE_CAMERA_LOG ("Still picture capture failed, error: " + nsStringToJuce (error.localizedDescription));
  84. jassertfalse;
  85. return;
  86. }
  87. auto* imageData = [photo fileDataRepresentation];
  88. auto image = ImageFileFormat::loadFrom (imageData.bytes, (size_t) imageData.length);
  89. getOwner (self).imageCaptureFinished (image);;
  90. }
  91. static Pimpl& getOwner (id self) { return *getIvar<Pimpl*> (self, "owner"); }
  92. static void setOwner (id self, Pimpl* t) { object_setInstanceVariable (self, "owner", t); }
  93. };
  94. AVCapturePhotoOutput* imageOutput = nil;
  95. std::unique_ptr<NSObject, NSObjectDeleter> delegate;
  96. };
  97. #else
  98. struct PreCatalinaStillImageOutput
  99. {
  100. public:
  101. void addImageCapture (AVCaptureSession* s)
  102. {
  103. if (imageOutput != nil)
  104. return;
  105. imageOutput = [[AVCaptureStillImageOutput alloc] init];
  106. auto imageSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
  107. [imageOutput setOutputSettings: imageSettings];
  108. [imageSettings release];
  109. [s addOutput: imageOutput];
  110. }
  111. void removeImageCapture (AVCaptureSession* s)
  112. {
  113. if (imageOutput == nil)
  114. return;
  115. [s removeOutput: imageOutput];
  116. [imageOutput release];
  117. imageOutput = nil;
  118. }
  119. NSArray<AVCaptureConnection*>* getConnections() const
  120. {
  121. if (imageOutput != nil)
  122. return imageOutput.connections;
  123. return nil;
  124. }
  125. void triggerImageCapture (Pimpl& p)
  126. {
  127. if (auto* videoConnection = p.getVideoConnection())
  128. {
  129. [imageOutput captureStillImageAsynchronouslyFromConnection: videoConnection
  130. completionHandler: ^(CMSampleBufferRef sampleBuffer, NSError* error)
  131. {
  132. if (error != nil)
  133. {
  134. JUCE_CAMERA_LOG ("Still picture capture failed, error: " + nsStringToJuce (error.localizedDescription));
  135. jassertfalse;
  136. return;
  137. }
  138. auto* imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation: sampleBuffer];
  139. auto image = ImageFileFormat::loadFrom (imageData.bytes, (size_t) imageData.length);
  140. p.imageCaptureFinished (image);
  141. }];
  142. }
  143. }
  144. static NSArray* getAvailableDevices()
  145. {
  146. return [AVCaptureDevice devicesWithMediaType: AVMediaTypeVideo];
  147. }
  148. private:
  149. AVCaptureStillImageOutput* imageOutput = nil;
  150. };
  151. #endif
  152. Pimpl (CameraDevice& ownerToUse, const String& deviceNameToUse, int /*index*/,
  153. int /*minWidth*/, int /*minHeight*/,
  154. int /*maxWidth*/, int /*maxHeight*/,
  155. bool useHighQuality)
  156. : owner (ownerToUse),
  157. deviceName (deviceNameToUse)
  158. {
  159. session = [[AVCaptureSession alloc] init];
  160. session.sessionPreset = useHighQuality ? AVCaptureSessionPresetHigh
  161. : AVCaptureSessionPresetMedium;
  162. refreshConnections();
  163. static DelegateClass cls;
  164. callbackDelegate = (id<AVCaptureFileOutputRecordingDelegate>) [cls.createInstance() init];
  165. DelegateClass::setOwner (callbackDelegate, this);
  166. SEL runtimeErrorSel = NSSelectorFromString (nsStringLiteral ("captureSessionRuntimeError:"));
  167. [[NSNotificationCenter defaultCenter] addObserver: callbackDelegate
  168. selector: runtimeErrorSel
  169. name: AVCaptureSessionRuntimeErrorNotification
  170. object: session];
  171. }
  172. ~Pimpl()
  173. {
  174. [[NSNotificationCenter defaultCenter] removeObserver: callbackDelegate];
  175. [session stopRunning];
  176. removeInput();
  177. removeImageCapture();
  178. removeMovieCapture();
  179. [session release];
  180. [callbackDelegate release];
  181. }
  182. //==============================================================================
  183. bool openedOk() const noexcept { return openingError.isEmpty(); }
  184. void takeStillPicture (std::function<void (const Image&)> pictureTakenCallbackToUse)
  185. {
  186. if (pictureTakenCallbackToUse == nullptr)
  187. {
  188. jassertfalse;
  189. return;
  190. }
  191. pictureTakenCallback = std::move (pictureTakenCallbackToUse);
  192. triggerImageCapture();
  193. }
  194. void startRecordingToFile (const File& file, int /*quality*/)
  195. {
  196. stopRecording();
  197. refreshIfNeeded();
  198. firstPresentationTime = Time::getCurrentTime();
  199. file.deleteFile();
  200. isRecording = true;
  201. [fileOutput startRecordingToOutputFileURL: createNSURLFromFile (file)
  202. recordingDelegate: callbackDelegate];
  203. }
  204. void stopRecording()
  205. {
  206. if (isRecording)
  207. {
  208. [fileOutput stopRecording];
  209. isRecording = false;
  210. }
  211. }
  212. Time getTimeOfFirstRecordedFrame() const
  213. {
  214. return firstPresentationTime;
  215. }
  216. void addListener (CameraDevice::Listener* listenerToAdd)
  217. {
  218. const ScopedLock sl (listenerLock);
  219. listeners.add (listenerToAdd);
  220. if (listeners.size() == 1)
  221. triggerImageCapture();
  222. }
  223. void removeListener (CameraDevice::Listener* listenerToRemove)
  224. {
  225. const ScopedLock sl (listenerLock);
  226. listeners.remove (listenerToRemove);
  227. }
  228. static StringArray getAvailableDevices()
  229. {
  230. auto* devices = decltype (imageOutput)::getAvailableDevices();
  231. StringArray results;
  232. for (AVCaptureDevice* device : devices)
  233. results.add (nsStringToJuce ([device localizedName]));
  234. return results;
  235. }
  236. AVCaptureSession* getCaptureSession()
  237. {
  238. return session;
  239. }
  240. private:
  241. //==============================================================================
  242. struct DelegateClass : public ObjCClass<NSObject>
  243. {
  244. DelegateClass() : ObjCClass<NSObject> ("JUCECameraDelegate_")
  245. {
  246. addIvar<Pimpl*> ("owner");
  247. addProtocol (@protocol (AVCaptureFileOutputRecordingDelegate));
  248. addMethod (@selector (captureOutput:didStartRecordingToOutputFileAtURL: fromConnections:), didStartRecordingToOutputFileAtURL, "v@:@@@");
  249. addMethod (@selector (captureOutput:didPauseRecordingToOutputFileAtURL: fromConnections:), didPauseRecordingToOutputFileAtURL, "v@:@@@");
  250. addMethod (@selector (captureOutput:didResumeRecordingToOutputFileAtURL: fromConnections:), didResumeRecordingToOutputFileAtURL, "v@:@@@");
  251. addMethod (@selector (captureOutput:willFinishRecordingToOutputFileAtURL:fromConnections:error:), willFinishRecordingToOutputFileAtURL, "v@:@@@@");
  252. SEL runtimeErrorSel = NSSelectorFromString (nsStringLiteral ("captureSessionRuntimeError:"));
  253. addMethod (runtimeErrorSel, sessionRuntimeError, "v@:@");
  254. registerClass();
  255. }
  256. static void setOwner (id self, Pimpl* owner) { object_setInstanceVariable (self, "owner", owner); }
  257. static Pimpl& getOwner (id self) { return *getIvar<Pimpl*> (self, "owner"); }
  258. private:
  259. static void didStartRecordingToOutputFileAtURL (id, SEL, AVCaptureFileOutput*, NSURL*, NSArray*) {}
  260. static void didPauseRecordingToOutputFileAtURL (id, SEL, AVCaptureFileOutput*, NSURL*, NSArray*) {}
  261. static void didResumeRecordingToOutputFileAtURL (id, SEL, AVCaptureFileOutput*, NSURL*, NSArray*) {}
  262. static void willFinishRecordingToOutputFileAtURL (id, SEL, AVCaptureFileOutput*, NSURL*, NSArray*, NSError*) {}
  263. static void sessionRuntimeError (id self, SEL, NSNotification* notification)
  264. {
  265. JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
  266. NSError* error = notification.userInfo[AVCaptureSessionErrorKey];
  267. auto errorString = error != nil ? nsStringToJuce (error.localizedDescription) : String();
  268. getOwner (self).cameraSessionRuntimeError (errorString);
  269. }
  270. };
  271. //==============================================================================
  272. void addImageCapture()
  273. {
  274. imageOutput.addImageCapture (session);
  275. }
  276. void addMovieCapture()
  277. {
  278. if (fileOutput == nil)
  279. {
  280. fileOutput = [[AVCaptureMovieFileOutput alloc] init];
  281. [session addOutput: fileOutput];
  282. }
  283. }
  284. void removeImageCapture()
  285. {
  286. imageOutput.removeImageCapture (session);
  287. }
  288. void removeMovieCapture()
  289. {
  290. if (fileOutput != nil)
  291. {
  292. [session removeOutput: fileOutput];
  293. [fileOutput release];
  294. fileOutput = nil;
  295. }
  296. }
  297. void removeCurrentSessionVideoInputs()
  298. {
  299. if (session != nil)
  300. {
  301. NSArray<AVCaptureDeviceInput*>* inputs = session.inputs;
  302. for (AVCaptureDeviceInput* input : inputs)
  303. if ([input.device hasMediaType: AVMediaTypeVideo])
  304. [session removeInput:input];
  305. }
  306. }
  307. void addInput()
  308. {
  309. if (currentInput == nil)
  310. {
  311. auto* availableDevices = decltype (imageOutput)::getAvailableDevices();
  312. for (AVCaptureDevice* device : availableDevices)
  313. {
  314. if (deviceName == nsStringToJuce ([device localizedName]))
  315. {
  316. removeCurrentSessionVideoInputs();
  317. NSError* err = nil;
  318. AVCaptureDeviceInput* inputDevice = [[AVCaptureDeviceInput alloc] initWithDevice: device
  319. error: &err];
  320. jassert (err == nil);
  321. if ([session canAddInput: inputDevice])
  322. {
  323. [session addInput: inputDevice];
  324. currentInput = inputDevice;
  325. }
  326. else
  327. {
  328. jassertfalse;
  329. [inputDevice release];
  330. }
  331. return;
  332. }
  333. }
  334. }
  335. }
  336. void removeInput()
  337. {
  338. if (currentInput != nil)
  339. {
  340. [session removeInput: currentInput];
  341. [currentInput release];
  342. currentInput = nil;
  343. }
  344. }
  345. void refreshConnections()
  346. {
  347. [session beginConfiguration];
  348. removeInput();
  349. removeImageCapture();
  350. removeMovieCapture();
  351. addInput();
  352. addImageCapture();
  353. addMovieCapture();
  354. [session commitConfiguration];
  355. }
  356. void refreshIfNeeded()
  357. {
  358. if (getVideoConnection() == nullptr)
  359. refreshConnections();
  360. }
  361. AVCaptureConnection* getVideoConnection() const
  362. {
  363. auto* connections = imageOutput.getConnections();
  364. if (connections != nil)
  365. for (AVCaptureConnection* connection in connections)
  366. if ([connection isActive] && [connection isEnabled])
  367. for (AVCaptureInputPort* port in [connection inputPorts])
  368. if ([[port mediaType] isEqual: AVMediaTypeVideo])
  369. return connection;
  370. return nil;
  371. }
  372. void imageCaptureFinished (const Image& image)
  373. {
  374. handleImageCapture (image);
  375. WeakReference<Pimpl> weakRef (this);
  376. MessageManager::callAsync ([weakRef, image]() mutable
  377. {
  378. if (weakRef != nullptr && weakRef->pictureTakenCallback != nullptr)
  379. weakRef->pictureTakenCallback (image);
  380. });
  381. }
  382. void handleImageCapture (const Image& image)
  383. {
  384. const ScopedLock sl (listenerLock);
  385. listeners.call ([=] (Listener& l) { l.imageReceived (image); });
  386. if (! listeners.isEmpty())
  387. triggerImageCapture();
  388. }
  389. void triggerImageCapture()
  390. {
  391. refreshIfNeeded();
  392. if (auto* videoConnection = getVideoConnection())
  393. imageOutput.triggerImageCapture (*this);
  394. }
  395. void cameraSessionRuntimeError (const String& error)
  396. {
  397. JUCE_CAMERA_LOG ("cameraSessionRuntimeError(), error = " + error);
  398. if (owner.onErrorOccurred != nullptr)
  399. owner.onErrorOccurred (error);
  400. }
  401. //==============================================================================
  402. CameraDevice& owner;
  403. String deviceName;
  404. AVCaptureSession* session = nil;
  405. AVCaptureMovieFileOutput* fileOutput = nil;
  406. #if defined (MAC_OS_X_VERSION_10_15) && MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_15
  407. PostCatalinaPhotoOutput imageOutput;
  408. #else
  409. PreCatalinaStillImageOutput imageOutput;
  410. #endif
  411. AVCaptureDeviceInput* currentInput = nil;
  412. id<AVCaptureFileOutputRecordingDelegate> callbackDelegate = nil;
  413. String openingError;
  414. Time firstPresentationTime;
  415. bool isRecording = false;
  416. CriticalSection listenerLock;
  417. ListenerList<Listener> listeners;
  418. std::function<void (const Image&)> pictureTakenCallback = nullptr;
  419. //==============================================================================
  420. JUCE_DECLARE_WEAK_REFERENCEABLE (Pimpl)
  421. JUCE_DECLARE_NON_COPYABLE (Pimpl)
  422. };
  423. //==============================================================================
  424. struct CameraDevice::ViewerComponent : public NSViewComponent
  425. {
  426. ViewerComponent (CameraDevice& device)
  427. {
  428. JUCE_AUTORELEASEPOOL
  429. {
  430. AVCaptureVideoPreviewLayer* previewLayer = [[AVCaptureVideoPreviewLayer alloc] init];
  431. AVCaptureSession* session = device.pimpl->getCaptureSession();
  432. [session stopRunning];
  433. [previewLayer setSession: session];
  434. [session startRunning];
  435. NSView* view = [[NSView alloc] init];
  436. [view setLayer: previewLayer];
  437. setView (view);
  438. }
  439. }
  440. ~ViewerComponent()
  441. {
  442. setView (nil);
  443. }
  444. JUCE_DECLARE_NON_COPYABLE (ViewerComponent)
  445. };
  446. String CameraDevice::getFileExtension()
  447. {
  448. return ".mov";
  449. }