The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1290 lines
57KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2022 - Raw Material Software Limited
  5. JUCE is an open source library subject to commercial or open-source
  6. licensing.
  7. By using JUCE, you agree to the terms of both the JUCE 7 End-User License
  8. Agreement and JUCE Privacy Policy.
  9. End User License Agreement: www.juce.com/juce-7-licence
  10. Privacy Policy: www.juce.com/juce-privacy-policy
  11. Or: You may also use this code under the terms of the GPL v3 (see
  12. www.gnu.org/licenses).
  13. JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
  14. EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
  15. DISCLAIMED.
  16. ==============================================================================
  17. */
  18. JUCE_BEGIN_IGNORE_WARNINGS_GCC_LIKE ("-Wdeprecated-declarations")
  19. struct CameraDevice::Pimpl
  20. {
  21. using InternalOpenCameraResultCallback = std::function<void (const String& /*cameraId*/, const String& /*error*/)>;
  22. Pimpl (CameraDevice& ownerToUse, const String& cameraIdToUse, int /*index*/,
  23. int /*minWidth*/, int /*minHeight*/, int /*maxWidth*/, int /*maxHeight*/,
  24. bool useHighQuality)
  25. : owner (ownerToUse),
  26. cameraId (cameraIdToUse),
  27. captureSession (*this, useHighQuality)
  28. {
  29. }
  30. String getCameraId() const noexcept { return cameraId; }
  31. void open (InternalOpenCameraResultCallback cameraOpenCallbackToUse)
  32. {
  33. cameraOpenCallback = std::move (cameraOpenCallbackToUse);
  34. if (cameraOpenCallback == nullptr)
  35. {
  36. // A valid camera open callback must be passed.
  37. jassertfalse;
  38. return;
  39. }
  40. [AVCaptureDevice requestAccessForMediaType: AVMediaTypeVideo
  41. completionHandler: ^([[maybe_unused]] BOOL granted)
  42. {
  43. // Access to video is required for camera to work,
  44. // black images will be produced otherwise!
  45. jassert (granted);
  46. }];
  47. [AVCaptureDevice requestAccessForMediaType: AVMediaTypeAudio
  48. completionHandler: ^([[maybe_unused]] BOOL granted)
  49. {
  50. // Access to audio is required for camera to work,
  51. // silence will be produced otherwise!
  52. jassert (granted);
  53. }];
  54. captureSession.startSessionForDeviceWithId (cameraId);
  55. }
  56. bool openedOk() const noexcept { return captureSession.openedOk(); }
  57. void takeStillPicture (std::function<void (const Image&)> pictureTakenCallbackToUse)
  58. {
  59. if (pictureTakenCallbackToUse == nullptr)
  60. {
  61. jassertfalse;
  62. return;
  63. }
  64. pictureTakenCallback = std::move (pictureTakenCallbackToUse);
  65. triggerStillPictureCapture();
  66. }
  67. void startRecordingToFile (const File& file, int /*quality*/)
  68. {
  69. file.deleteFile();
  70. captureSession.startRecording (file);
  71. }
  72. void stopRecording()
  73. {
  74. captureSession.stopRecording();
  75. }
  76. Time getTimeOfFirstRecordedFrame() const
  77. {
  78. return captureSession.getTimeOfFirstRecordedFrame();
  79. }
  80. static StringArray getAvailableDevices()
  81. {
  82. StringArray results;
  83. JUCE_CAMERA_LOG ("Available camera devices: ");
  84. for (AVCaptureDevice* device in getDevices())
  85. {
  86. JUCE_CAMERA_LOG ("Device start----------------------------------");
  87. printDebugCameraInfo (device);
  88. JUCE_CAMERA_LOG ("Device end----------------------------------");
  89. results.add (nsStringToJuce (device.uniqueID));
  90. }
  91. return results;
  92. }
  93. void addListener (CameraDevice::Listener* listenerToAdd)
  94. {
  95. const ScopedLock sl (listenerLock);
  96. listeners.add (listenerToAdd);
  97. if (listeners.size() == 1)
  98. triggerStillPictureCapture();
  99. }
  100. void removeListener (CameraDevice::Listener* listenerToRemove)
  101. {
  102. const ScopedLock sl (listenerLock);
  103. listeners.remove (listenerToRemove);
  104. }
  105. private:
  106. static NSArray<AVCaptureDevice*>* getDevices()
  107. {
  108. if (@available (iOS 10.0, *))
  109. {
  110. std::unique_ptr<NSMutableArray<AVCaptureDeviceType>, NSObjectDeleter> deviceTypes ([[NSMutableArray alloc] initWithCapacity: 2]);
  111. [deviceTypes.get() addObject: AVCaptureDeviceTypeBuiltInWideAngleCamera];
  112. [deviceTypes.get() addObject: AVCaptureDeviceTypeBuiltInTelephotoCamera];
  113. if (@available (iOS 10.2, *))
  114. [deviceTypes.get() addObject: AVCaptureDeviceTypeBuiltInDualCamera];
  115. if (@available (iOS 11.1, *))
  116. [deviceTypes.get() addObject: AVCaptureDeviceTypeBuiltInTrueDepthCamera];
  117. auto discoverySession = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes: deviceTypes.get()
  118. mediaType: AVMediaTypeVideo
  119. position: AVCaptureDevicePositionUnspecified];
  120. return [discoverySession devices];
  121. }
  122. return [AVCaptureDevice devicesWithMediaType: AVMediaTypeVideo];
  123. }
  124. //==============================================================================
  125. static void printDebugCameraInfo (AVCaptureDevice* device)
  126. {
  127. auto position = device.position;
  128. String positionString = position == AVCaptureDevicePositionBack
  129. ? "Back"
  130. : position == AVCaptureDevicePositionFront
  131. ? "Front"
  132. : "Unspecified";
  133. JUCE_CAMERA_LOG ("Position: " + positionString);
  134. JUCE_CAMERA_LOG ("Model ID: " + nsStringToJuce (device.modelID));
  135. JUCE_CAMERA_LOG ("Localized name: " + nsStringToJuce (device.localizedName));
  136. JUCE_CAMERA_LOG ("Unique ID: " + nsStringToJuce (device.uniqueID));
  137. JUCE_CAMERA_LOG ("Lens aperture: " + String (device.lensAperture));
  138. JUCE_CAMERA_LOG ("Has flash: " + String ((int)device.hasFlash));
  139. JUCE_CAMERA_LOG ("Supports flash always on: " + String ((int)[device isFlashModeSupported: AVCaptureFlashModeOn]));
  140. JUCE_CAMERA_LOG ("Supports auto flash: " + String ((int)[device isFlashModeSupported: AVCaptureFlashModeAuto]));
  141. JUCE_CAMERA_LOG ("Has torch: " + String ((int)device.hasTorch));
  142. JUCE_CAMERA_LOG ("Supports torch always on: " + String ((int)[device isTorchModeSupported: AVCaptureTorchModeOn]));
  143. JUCE_CAMERA_LOG ("Supports auto torch: " + String ((int)[device isTorchModeSupported: AVCaptureTorchModeAuto]));
  144. JUCE_CAMERA_LOG ("Low light boost supported: " + String ((int)device.lowLightBoostEnabled));
  145. JUCE_CAMERA_LOG ("Supports auto white balance: " + String ((int)[device isWhiteBalanceModeSupported: AVCaptureWhiteBalanceModeAutoWhiteBalance]));
  146. JUCE_CAMERA_LOG ("Supports continuous auto white balance: " + String ((int)[device isWhiteBalanceModeSupported: AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]));
  147. JUCE_CAMERA_LOG ("Supports auto focus: " + String ((int)[device isFocusModeSupported: AVCaptureFocusModeAutoFocus]));
  148. JUCE_CAMERA_LOG ("Supports continuous auto focus: " + String ((int)[device isFocusModeSupported: AVCaptureFocusModeContinuousAutoFocus]));
  149. JUCE_CAMERA_LOG ("Supports point of interest focus: " + String ((int)device.focusPointOfInterestSupported));
  150. JUCE_CAMERA_LOG ("Smooth auto focus supported: " + String ((int)device.smoothAutoFocusSupported));
  151. JUCE_CAMERA_LOG ("Auto focus range restriction supported: " + String ((int)device.autoFocusRangeRestrictionSupported));
  152. JUCE_CAMERA_LOG ("Supports auto exposure: " + String ((int)[device isExposureModeSupported: AVCaptureExposureModeAutoExpose]));
  153. JUCE_CAMERA_LOG ("Supports continuous auto exposure: " + String ((int)[device isExposureModeSupported: AVCaptureExposureModeContinuousAutoExposure]));
  154. JUCE_CAMERA_LOG ("Supports custom exposure: " + String ((int)[device isExposureModeSupported: AVCaptureExposureModeCustom]));
  155. JUCE_CAMERA_LOG ("Supports point of interest exposure: " + String ((int)device.exposurePointOfInterestSupported));
  156. if (@available (iOS 10.0, *))
  157. {
  158. JUCE_CAMERA_LOG ("Device type: " + nsStringToJuce (device.deviceType));
  159. JUCE_CAMERA_LOG ("Locking focus with custom lens position supported: " + String ((int)device.lockingFocusWithCustomLensPositionSupported));
  160. }
  161. if (@available (iOS 11.0, *))
  162. {
  163. JUCE_CAMERA_LOG ("Min available video zoom factor: " + String (device.minAvailableVideoZoomFactor));
  164. JUCE_CAMERA_LOG ("Max available video zoom factor: " + String (device.maxAvailableVideoZoomFactor));
  165. JUCE_CAMERA_LOG ("Dual camera switch over video zoom factor: " + String (device.dualCameraSwitchOverVideoZoomFactor));
  166. }
  167. JUCE_CAMERA_LOG ("Capture formats start-------------------");
  168. for (AVCaptureDeviceFormat* format in device.formats)
  169. {
  170. JUCE_CAMERA_LOG ("Capture format start------");
  171. printDebugCameraFormatInfo (format);
  172. JUCE_CAMERA_LOG ("Capture format end------");
  173. }
  174. JUCE_CAMERA_LOG ("Capture formats end-------------------");
  175. }
  176. static void printDebugCameraFormatInfo (AVCaptureDeviceFormat* format)
  177. {
  178. JUCE_CAMERA_LOG ("Media type: " + nsStringToJuce (format.mediaType));
  179. if (@available (iOS 10.0, *))
  180. {
  181. String colourSpaces;
  182. for (NSNumber* number in format.supportedColorSpaces)
  183. {
  184. switch ([number intValue])
  185. {
  186. case AVCaptureColorSpace_sRGB: colourSpaces << "sRGB "; break;
  187. case AVCaptureColorSpace_P3_D65: colourSpaces << "P3_D65 "; break;
  188. default: break;
  189. }
  190. }
  191. JUCE_CAMERA_LOG ("Supported colour spaces: " + colourSpaces);
  192. }
  193. JUCE_CAMERA_LOG ("Video field of view: " + String (format.videoFieldOfView));
  194. JUCE_CAMERA_LOG ("Video max zoom factor: " + String (format.videoMaxZoomFactor));
  195. JUCE_CAMERA_LOG ("Video zoom factor upscale threshold: " + String (format.videoZoomFactorUpscaleThreshold));
  196. String videoFrameRateRangesString = "Video supported frame rate ranges: ";
  197. for (AVFrameRateRange* range in format.videoSupportedFrameRateRanges)
  198. videoFrameRateRangesString << frameRateRangeToString (range);
  199. JUCE_CAMERA_LOG (videoFrameRateRangesString);
  200. JUCE_CAMERA_LOG ("Video binned: " + String (int (format.videoBinned)));
  201. JUCE_CAMERA_LOG ("Video HDR supported: " + String (int (format.videoHDRSupported)));
  202. JUCE_CAMERA_LOG ("High resolution still image dimensions: " + getHighResStillImgDimensionsString (format.highResolutionStillImageDimensions));
  203. JUCE_CAMERA_LOG ("Min ISO: " + String (format.minISO));
  204. JUCE_CAMERA_LOG ("Max ISO: " + String (format.maxISO));
  205. JUCE_CAMERA_LOG ("Min exposure duration: " + cmTimeToString (format.minExposureDuration));
  206. String autoFocusSystemString;
  207. switch (format.autoFocusSystem)
  208. {
  209. case AVCaptureAutoFocusSystemPhaseDetection: autoFocusSystemString = "PhaseDetection"; break;
  210. case AVCaptureAutoFocusSystemContrastDetection: autoFocusSystemString = "ContrastDetection"; break;
  211. case AVCaptureAutoFocusSystemNone:
  212. default: autoFocusSystemString = "None";
  213. }
  214. JUCE_CAMERA_LOG ("Auto focus system: " + autoFocusSystemString);
  215. JUCE_CAMERA_LOG ("Standard (iOS 5.0) video stabilization supported: " + String ((int) [format isVideoStabilizationModeSupported: AVCaptureVideoStabilizationModeStandard]));
  216. JUCE_CAMERA_LOG ("Cinematic video stabilization supported: " + String ((int) [format isVideoStabilizationModeSupported: AVCaptureVideoStabilizationModeCinematic]));
  217. JUCE_CAMERA_LOG ("Auto video stabilization supported: " + String ((int) [format isVideoStabilizationModeSupported: AVCaptureVideoStabilizationModeAuto]));
  218. if (@available (iOS 11.0, *))
  219. {
  220. JUCE_CAMERA_LOG ("Min zoom factor for depth data delivery: " + String (format.videoMinZoomFactorForDepthDataDelivery));
  221. JUCE_CAMERA_LOG ("Max zoom factor for depth data delivery: " + String (format.videoMaxZoomFactorForDepthDataDelivery));
  222. }
  223. }
  224. static String getHighResStillImgDimensionsString (CMVideoDimensions d)
  225. {
  226. return "[" + String (d.width) + " " + String (d.height) + "]";
  227. }
  228. static String cmTimeToString (CMTime time)
  229. {
  230. CFUniquePtr<CFStringRef> timeDesc (CMTimeCopyDescription (nullptr, time));
  231. return String::fromCFString (timeDesc.get());
  232. }
  233. static String frameRateRangeToString (AVFrameRateRange* range)
  234. {
  235. String result;
  236. result << "[minFrameDuration: " + cmTimeToString (range.minFrameDuration);
  237. result << " maxFrameDuration: " + cmTimeToString (range.maxFrameDuration);
  238. result << " minFrameRate: " + String (range.minFrameRate);
  239. result << " maxFrameRate: " + String (range.maxFrameRate) << "] ";
  240. return result;
  241. }
  242. //==============================================================================
  243. class CaptureSession
  244. {
  245. public:
  246. CaptureSession (Pimpl& ownerToUse, bool useHighQuality)
  247. : owner (ownerToUse),
  248. captureSessionQueue (dispatch_queue_create ("JuceCameraDeviceBackgroundDispatchQueue", DISPATCH_QUEUE_SERIAL)),
  249. captureSession ([[AVCaptureSession alloc] init]),
  250. delegate (nullptr),
  251. stillPictureTaker (*this),
  252. videoRecorder (*this)
  253. {
  254. static SessionDelegateClass cls;
  255. delegate.reset ([cls.createInstance() init]);
  256. SessionDelegateClass::setOwner (delegate.get(), this);
  257. JUCE_BEGIN_IGNORE_WARNINGS_GCC_LIKE ("-Wundeclared-selector")
  258. [[NSNotificationCenter defaultCenter] addObserver: delegate.get()
  259. selector: @selector (sessionDidStartRunning:)
  260. name: AVCaptureSessionDidStartRunningNotification
  261. object: captureSession.get()];
  262. [[NSNotificationCenter defaultCenter] addObserver: delegate.get()
  263. selector: @selector (sessionDidStopRunning:)
  264. name: AVCaptureSessionDidStopRunningNotification
  265. object: captureSession.get()];
  266. [[NSNotificationCenter defaultCenter] addObserver: delegate.get()
  267. selector: @selector (runtimeError:)
  268. name: AVCaptureSessionRuntimeErrorNotification
  269. object: captureSession.get()];
  270. [[NSNotificationCenter defaultCenter] addObserver: delegate.get()
  271. selector: @selector (sessionWasInterrupted:)
  272. name: AVCaptureSessionWasInterruptedNotification
  273. object: captureSession.get()];
  274. [[NSNotificationCenter defaultCenter] addObserver: delegate.get()
  275. selector: @selector (sessionInterruptionEnded:)
  276. name: AVCaptureSessionInterruptionEndedNotification
  277. object: captureSession.get()];
  278. JUCE_END_IGNORE_WARNINGS_GCC_LIKE
  279. dispatch_async (captureSessionQueue,^
  280. {
  281. [captureSession.get() setSessionPreset: useHighQuality ? AVCaptureSessionPresetHigh
  282. : AVCaptureSessionPresetMedium];
  283. });
  284. ++numCaptureSessions;
  285. }
  286. ~CaptureSession()
  287. {
  288. [[NSNotificationCenter defaultCenter] removeObserver: delegate.get()];
  289. stopRecording();
  290. if (--numCaptureSessions == 0)
  291. {
  292. dispatch_async (captureSessionQueue, ^
  293. {
  294. if (captureSession.get().running)
  295. [captureSession.get() stopRunning];
  296. sessionClosedEvent.signal();
  297. });
  298. sessionClosedEvent.wait (-1);
  299. }
  300. }
  301. bool openedOk() const noexcept { return sessionStarted; }
  302. void startSessionForDeviceWithId (const String& cameraIdToUse)
  303. {
  304. dispatch_async (captureSessionQueue,^
  305. {
  306. cameraDevice = [AVCaptureDevice deviceWithUniqueID: juceStringToNS (cameraIdToUse)];
  307. auto audioDevice = [AVCaptureDevice defaultDeviceWithMediaType: AVMediaTypeAudio];
  308. [captureSession.get() beginConfiguration];
  309. // This will add just video...
  310. auto error = addInputToDevice (cameraDevice);
  311. if (error.isNotEmpty())
  312. {
  313. MessageManager::callAsync ([weakRef = WeakReference<CaptureSession> { this }, error]() mutable
  314. {
  315. if (weakRef != nullptr)
  316. weakRef->owner.cameraOpenCallback ({}, error);
  317. });
  318. return;
  319. }
  320. // ... so add audio explicitly here
  321. error = addInputToDevice (audioDevice);
  322. if (error.isNotEmpty())
  323. {
  324. MessageManager::callAsync ([weakRef = WeakReference<CaptureSession> { this }, error]() mutable
  325. {
  326. if (weakRef != nullptr)
  327. weakRef->owner.cameraOpenCallback ({}, error);
  328. });
  329. return;
  330. }
  331. [captureSession.get() commitConfiguration];
  332. if (! captureSession.get().running)
  333. [captureSession.get() startRunning];
  334. });
  335. }
  336. AVCaptureVideoPreviewLayer* createPreviewLayer()
  337. {
  338. if (! openedOk())
  339. {
  340. // A session must be started first!
  341. jassertfalse;
  342. return nullptr;
  343. }
  344. previewLayer = [AVCaptureVideoPreviewLayer layerWithSession: captureSession.get()];
  345. return previewLayer;
  346. }
  347. void takeStillPicture()
  348. {
  349. if (! openedOk())
  350. {
  351. // A session must be started first!
  352. jassert (openedOk());
  353. return;
  354. }
  355. stillPictureTaker.takePicture (previewLayer.connection.videoOrientation);
  356. }
  357. void startRecording (const File& file)
  358. {
  359. if (! openedOk())
  360. {
  361. // A session must be started first!
  362. jassertfalse;
  363. return;
  364. }
  365. if (file.existsAsFile())
  366. {
  367. // File overwriting is not supported by iOS video recorder, the target
  368. // file must not exist.
  369. jassertfalse;
  370. return;
  371. }
  372. videoRecorder.startRecording (file, previewLayer.connection.videoOrientation);
  373. }
  374. void stopRecording()
  375. {
  376. videoRecorder.stopRecording();
  377. }
  378. Time getTimeOfFirstRecordedFrame() const
  379. {
  380. return videoRecorder.getTimeOfFirstRecordedFrame();
  381. }
  382. JUCE_DECLARE_WEAK_REFERENCEABLE (CaptureSession)
  383. private:
  384. String addInputToDevice (AVCaptureDevice* device)
  385. {
  386. NSError* error = nil;
  387. auto input = [AVCaptureDeviceInput deviceInputWithDevice: device
  388. error: &error];
  389. if (error != nil)
  390. return nsStringToJuce (error.localizedDescription);
  391. JUCE_BEGIN_IGNORE_WARNINGS_GCC_LIKE ("-Wnullable-to-nonnull-conversion")
  392. if (! [captureSession.get() canAddInput: input])
  393. return "Could not add input to camera session.";
  394. [captureSession.get() addInput: input];
  395. JUCE_END_IGNORE_WARNINGS_GCC_LIKE
  396. return {};
  397. }
  398. //==============================================================================
  399. struct SessionDelegateClass : public ObjCClass<NSObject>
  400. {
  401. SessionDelegateClass() : ObjCClass<NSObject> ("SessionDelegateClass_")
  402. {
  403. JUCE_BEGIN_IGNORE_WARNINGS_GCC_LIKE ("-Wundeclared-selector")
  404. addMethod (@selector (sessionDidStartRunning:), started);
  405. addMethod (@selector (sessionDidStopRunning:), stopped);
  406. addMethod (@selector (runtimeError:), runtimeError);
  407. addMethod (@selector (sessionWasInterrupted:), interrupted);
  408. addMethod (@selector (sessionInterruptionEnded:), interruptionEnded);
  409. JUCE_END_IGNORE_WARNINGS_GCC_LIKE
  410. addIvar<CaptureSession*> ("owner");
  411. registerClass();
  412. }
  413. //==============================================================================
  414. static CaptureSession& getOwner (id self) { return *getIvar<CaptureSession*> (self, "owner"); }
  415. static void setOwner (id self, CaptureSession* s) { object_setInstanceVariable (self, "owner", s); }
  416. private:
  417. //==============================================================================
  418. static void started (id self, SEL, [[maybe_unused]] NSNotification* notification)
  419. {
  420. JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
  421. dispatch_async (dispatch_get_main_queue(),
  422. ^{
  423. getOwner (self).cameraSessionStarted();
  424. });
  425. }
  426. static void stopped (id, SEL, [[maybe_unused]] NSNotification* notification)
  427. {
  428. JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
  429. }
  430. static void runtimeError (id self, SEL, NSNotification* notification)
  431. {
  432. JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
  433. dispatch_async (dispatch_get_main_queue(),
  434. ^{
  435. NSError* error = notification.userInfo[AVCaptureSessionErrorKey];
  436. auto errorString = error != nil ? nsStringToJuce (error.localizedDescription) : String();
  437. getOwner (self).cameraSessionRuntimeError (errorString);
  438. });
  439. }
  440. static void interrupted (id, SEL, [[maybe_unused]] NSNotification* notification)
  441. {
  442. JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
  443. }
  444. static void interruptionEnded (id, SEL, [[maybe_unused]] NSNotification* notification)
  445. {
  446. JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
  447. }
  448. };
  449. //==============================================================================
  450. class StillPictureTaker
  451. {
  452. public:
  453. StillPictureTaker (CaptureSession& cs)
  454. : captureSession (cs),
  455. captureOutput (createCaptureOutput()),
  456. photoOutputDelegate (nullptr)
  457. {
  458. if (@available (iOS 10.0, *))
  459. {
  460. static PhotoOutputDelegateClass cls;
  461. photoOutputDelegate.reset ([cls.createInstance() init]);
  462. PhotoOutputDelegateClass::setOwner (photoOutputDelegate.get(), this);
  463. }
  464. captureSession.addOutputIfPossible (captureOutput);
  465. }
  466. void takePicture (AVCaptureVideoOrientation orientationToUse)
  467. {
  468. if (takingPicture)
  469. {
  470. // Picture taking already in progress!
  471. jassertfalse;
  472. return;
  473. }
  474. takingPicture = true;
  475. printImageOutputDebugInfo (captureOutput);
  476. if (auto* connection = findVideoConnection (captureOutput))
  477. {
  478. if (@available (iOS 10.0, *))
  479. {
  480. if ([captureOutput isKindOfClass: [AVCapturePhotoOutput class]])
  481. {
  482. auto* photoOutput = (AVCapturePhotoOutput*) captureOutput;
  483. auto outputConnection = [photoOutput connectionWithMediaType: AVMediaTypeVideo];
  484. outputConnection.videoOrientation = orientationToUse;
  485. [photoOutput capturePhotoWithSettings: [AVCapturePhotoSettings photoSettings]
  486. delegate: id<AVCapturePhotoCaptureDelegate> (photoOutputDelegate.get())];
  487. return;
  488. }
  489. }
  490. auto* stillImageOutput = (AVCaptureStillImageOutput*) captureOutput;
  491. auto outputConnection = [stillImageOutput connectionWithMediaType: AVMediaTypeVideo];
  492. outputConnection.videoOrientation = orientationToUse;
  493. [stillImageOutput captureStillImageAsynchronouslyFromConnection: connection completionHandler:
  494. ^(CMSampleBufferRef imageSampleBuffer, NSError* error)
  495. {
  496. takingPicture = false;
  497. if (error != nil)
  498. {
  499. JUCE_CAMERA_LOG ("Still picture capture failed, error: " + nsStringToJuce (error.localizedDescription));
  500. jassertfalse;
  501. return;
  502. }
  503. NSData* imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation: imageSampleBuffer];
  504. auto image = ImageFileFormat::loadFrom (imageData.bytes, (size_t) imageData.length);
  505. callListeners (image);
  506. MessageManager::callAsync ([this, image] { notifyPictureTaken (image); });
  507. }];
  508. }
  509. else
  510. {
  511. // Could not find a connection of video type
  512. jassertfalse;
  513. }
  514. }
  515. private:
  516. static AVCaptureOutput* createCaptureOutput()
  517. {
  518. if (@available (iOS 10.0, *))
  519. return [AVCapturePhotoOutput new];
  520. return [AVCaptureStillImageOutput new];
  521. }
  522. static void printImageOutputDebugInfo (AVCaptureOutput* captureOutput)
  523. {
  524. if (@available (iOS 10.0, *))
  525. {
  526. if ([captureOutput isKindOfClass: [AVCapturePhotoOutput class]])
  527. {
  528. auto* photoOutput = (AVCapturePhotoOutput*) captureOutput;
  529. String typesString;
  530. for (id type in photoOutput.availablePhotoCodecTypes)
  531. typesString << nsStringToJuce (type) << " ";
  532. JUCE_CAMERA_LOG ("Available image codec types: " + typesString);
  533. JUCE_CAMERA_LOG ("Still image stabilization supported: " + String ((int) photoOutput.stillImageStabilizationSupported));
  534. JUCE_CAMERA_LOG ("Dual camera fusion supported: " + String ((int) photoOutput.dualCameraFusionSupported));
  535. JUCE_CAMERA_LOG ("Supports flash: " + String ((int) [photoOutput.supportedFlashModes containsObject: @(AVCaptureFlashModeOn)]));
  536. JUCE_CAMERA_LOG ("Supports auto flash: " + String ((int) [photoOutput.supportedFlashModes containsObject: @(AVCaptureFlashModeAuto)]));
  537. JUCE_CAMERA_LOG ("Max bracketed photo count: " + String (photoOutput.maxBracketedCapturePhotoCount));
  538. JUCE_CAMERA_LOG ("Lens stabilization during bracketed capture supported: " + String ((int) photoOutput.lensStabilizationDuringBracketedCaptureSupported));
  539. JUCE_CAMERA_LOG ("Live photo capture supported: " + String ((int) photoOutput.livePhotoCaptureSupported));
  540. if (@available (iOS 11.0, *))
  541. {
  542. typesString.clear();
  543. for (AVFileType type in photoOutput.availablePhotoFileTypes)
  544. typesString << nsStringToJuce (type) << " ";
  545. JUCE_CAMERA_LOG ("Available photo file types: " + typesString);
  546. typesString.clear();
  547. for (AVFileType type in photoOutput.availableRawPhotoFileTypes)
  548. typesString << nsStringToJuce (type) << " ";
  549. JUCE_CAMERA_LOG ("Available RAW photo file types: " + typesString);
  550. typesString.clear();
  551. for (AVFileType type in photoOutput.availableLivePhotoVideoCodecTypes)
  552. typesString << nsStringToJuce (type) << " ";
  553. JUCE_CAMERA_LOG ("Available live photo video codec types: " + typesString);
  554. JUCE_CAMERA_LOG ("Dual camera dual photo delivery supported: " + String ((int) photoOutput.dualCameraDualPhotoDeliverySupported));
  555. JUCE_CAMERA_LOG ("Camera calibration data delivery supported: " + String ((int) photoOutput.cameraCalibrationDataDeliverySupported));
  556. JUCE_CAMERA_LOG ("Depth data delivery supported: " + String ((int) photoOutput.depthDataDeliverySupported));
  557. }
  558. return;
  559. }
  560. }
  561. auto* stillImageOutput = (AVCaptureStillImageOutput*) captureOutput;
  562. String typesString;
  563. for (id type in stillImageOutput.availableImageDataCodecTypes)
  564. typesString << nsStringToJuce (type) << " ";
  565. JUCE_CAMERA_LOG ("Available image codec types: " + typesString);
  566. JUCE_CAMERA_LOG ("Still image stabilization supported: " + String ((int) stillImageOutput.stillImageStabilizationSupported));
  567. JUCE_CAMERA_LOG ("Automatically enables still image stabilization when available: " + String ((int) stillImageOutput.automaticallyEnablesStillImageStabilizationWhenAvailable));
  568. JUCE_CAMERA_LOG ("Output settings for image output: " + nsStringToJuce ([stillImageOutput.outputSettings description]));
  569. }
  570. //==============================================================================
  571. static AVCaptureConnection* findVideoConnection (AVCaptureOutput* output)
  572. {
  573. for (AVCaptureConnection* connection in output.connections)
  574. for (AVCaptureInputPort* port in connection.inputPorts)
  575. if ([port.mediaType isEqual: AVMediaTypeVideo])
  576. return connection;
  577. return nullptr;
  578. }
  579. //==============================================================================
  580. class API_AVAILABLE (ios (10.0)) PhotoOutputDelegateClass : public ObjCClass<NSObject>
  581. {
  582. public:
  583. PhotoOutputDelegateClass() : ObjCClass<NSObject> ("PhotoOutputDelegateClass_")
  584. {
  585. addMethod (@selector (captureOutput:willBeginCaptureForResolvedSettings:), willBeginCaptureForSettings);
  586. addMethod (@selector (captureOutput:willCapturePhotoForResolvedSettings:), willCaptureForSettings);
  587. addMethod (@selector (captureOutput:didCapturePhotoForResolvedSettings:), didCaptureForSettings);
  588. addMethod (@selector (captureOutput:didFinishCaptureForResolvedSettings:error:), didFinishCaptureForSettings);
  589. if (@available (iOS 11.0, *))
  590. {
  591. addMethod (@selector (captureOutput:didFinishProcessingPhoto:error:), didFinishProcessingPhoto);
  592. }
  593. else
  594. {
  595. addMethod (@selector (captureOutput:didFinishProcessingPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:), didFinishProcessingPhotoSampleBuffer);
  596. }
  597. addIvar<StillPictureTaker*> ("owner");
  598. registerClass();
  599. }
  600. //==============================================================================
  601. static StillPictureTaker& getOwner (id self) { return *getIvar<StillPictureTaker*> (self, "owner"); }
  602. static void setOwner (id self, StillPictureTaker* t) { object_setInstanceVariable (self, "owner", t); }
  603. private:
  604. static void willBeginCaptureForSettings (id, SEL, AVCapturePhotoOutput*, AVCaptureResolvedPhotoSettings*)
  605. {
  606. JUCE_CAMERA_LOG ("willBeginCaptureForSettings()");
  607. }
  608. static void willCaptureForSettings (id, SEL, AVCapturePhotoOutput*, AVCaptureResolvedPhotoSettings*)
  609. {
  610. JUCE_CAMERA_LOG ("willCaptureForSettings()");
  611. }
  612. static void didCaptureForSettings (id, SEL, AVCapturePhotoOutput*, AVCaptureResolvedPhotoSettings*)
  613. {
  614. JUCE_CAMERA_LOG ("didCaptureForSettings()");
  615. }
  616. static void didFinishCaptureForSettings (id, SEL, AVCapturePhotoOutput*, AVCaptureResolvedPhotoSettings*, NSError* error)
  617. {
  618. [[maybe_unused]] String errorString = error != nil ? nsStringToJuce (error.localizedDescription) : String();
  619. JUCE_CAMERA_LOG ("didFinishCaptureForSettings(), error = " + errorString);
  620. }
  621. API_AVAILABLE (ios (11.0))
  622. static void didFinishProcessingPhoto (id self, SEL, AVCapturePhotoOutput*, AVCapturePhoto* capturePhoto, NSError* error)
  623. {
  624. getOwner (self).takingPicture = false;
  625. [[maybe_unused]] String errorString = error != nil ? nsStringToJuce (error.localizedDescription) : String();
  626. JUCE_CAMERA_LOG ("didFinishProcessingPhoto(), error = " + errorString);
  627. if (error != nil)
  628. {
  629. JUCE_CAMERA_LOG ("Still picture capture failed, error: " + nsStringToJuce (error.localizedDescription));
  630. jassertfalse;
  631. return;
  632. }
  633. auto* imageOrientation = (NSNumber *) capturePhoto.metadata[(NSString*) kCGImagePropertyOrientation];
  634. auto* uiImage = getImageWithCorrectOrientation ((CGImagePropertyOrientation) imageOrientation.unsignedIntValue,
  635. [capturePhoto CGImageRepresentation]);
  636. auto* imageData = UIImageJPEGRepresentation (uiImage, 0.f);
  637. auto image = ImageFileFormat::loadFrom (imageData.bytes, (size_t) imageData.length);
  638. getOwner (self).callListeners (image);
  639. MessageManager::callAsync ([self, image]() { getOwner (self).notifyPictureTaken (image); });
  640. }
  641. static UIImage* getImageWithCorrectOrientation (CGImagePropertyOrientation imageOrientation,
  642. CGImageRef imageData)
  643. {
  644. auto origWidth = CGImageGetWidth (imageData);
  645. auto origHeight = CGImageGetHeight (imageData);
  646. auto targetSize = getTargetImageDimensionFor (imageOrientation, imageData);
  647. UIGraphicsBeginImageContext (targetSize);
  648. CGContextRef context = UIGraphicsGetCurrentContext();
  649. switch (imageOrientation)
  650. {
  651. case kCGImagePropertyOrientationUp:
  652. CGContextScaleCTM (context, 1.0, -1.0);
  653. CGContextTranslateCTM (context, 0.0, -targetSize.height);
  654. break;
  655. case kCGImagePropertyOrientationRight:
  656. CGContextRotateCTM (context, 90 * MathConstants<CGFloat>::pi / 180);
  657. CGContextScaleCTM (context, targetSize.height / (CGFloat) origHeight, -targetSize.width / (CGFloat) origWidth);
  658. break;
  659. case kCGImagePropertyOrientationDown:
  660. CGContextTranslateCTM (context, targetSize.width, 0.0);
  661. CGContextScaleCTM (context, -1.0, 1.0);
  662. break;
  663. case kCGImagePropertyOrientationLeft:
  664. CGContextRotateCTM (context, -90 * MathConstants<CGFloat>::pi / 180);
  665. CGContextScaleCTM (context, targetSize.height / (CGFloat) origHeight, -targetSize.width / (CGFloat) origWidth);
  666. CGContextTranslateCTM (context, -targetSize.width, -targetSize.height);
  667. break;
  668. case kCGImagePropertyOrientationUpMirrored:
  669. case kCGImagePropertyOrientationDownMirrored:
  670. case kCGImagePropertyOrientationLeftMirrored:
  671. case kCGImagePropertyOrientationRightMirrored:
  672. default:
  673. // Not implemented.
  674. jassertfalse;
  675. break;
  676. }
  677. CGContextDrawImage (context, CGRectMake (0, 0, targetSize.width, targetSize.height), imageData);
  678. UIImage* correctedImage = UIGraphicsGetImageFromCurrentImageContext();
  679. UIGraphicsEndImageContext();
  680. return correctedImage;
  681. }
  682. static CGSize getTargetImageDimensionFor (CGImagePropertyOrientation imageOrientation,
  683. CGImageRef imageData)
  684. {
  685. auto width = CGImageGetWidth (imageData);
  686. auto height = CGImageGetHeight (imageData);
  687. switch (imageOrientation)
  688. {
  689. case kCGImagePropertyOrientationUp:
  690. case kCGImagePropertyOrientationUpMirrored:
  691. case kCGImagePropertyOrientationDown:
  692. case kCGImagePropertyOrientationDownMirrored:
  693. return CGSizeMake ((CGFloat) width, (CGFloat) height);
  694. case kCGImagePropertyOrientationRight:
  695. case kCGImagePropertyOrientationRightMirrored:
  696. case kCGImagePropertyOrientationLeft:
  697. case kCGImagePropertyOrientationLeftMirrored:
  698. return CGSizeMake ((CGFloat) height, (CGFloat) width);
  699. }
  700. jassertfalse;
  701. return CGSizeMake ((CGFloat) width, (CGFloat) height);
  702. }
  703. static void didFinishProcessingPhotoSampleBuffer (id self, SEL, AVCapturePhotoOutput*,
  704. CMSampleBufferRef imageBuffer, CMSampleBufferRef imagePreviewBuffer,
  705. AVCaptureResolvedPhotoSettings*, AVCaptureBracketedStillImageSettings*,
  706. NSError* error)
  707. {
  708. getOwner (self).takingPicture = false;
  709. [[maybe_unused]] String errorString = error != nil ? nsStringToJuce (error.localizedDescription) : String();
  710. JUCE_CAMERA_LOG ("didFinishProcessingPhotoSampleBuffer(), error = " + errorString);
  711. if (error != nil)
  712. {
  713. JUCE_CAMERA_LOG ("Still picture capture failed, error: " + nsStringToJuce (error.localizedDescription));
  714. jassertfalse;
  715. return;
  716. }
  717. NSData* origImageData = [AVCapturePhotoOutput JPEGPhotoDataRepresentationForJPEGSampleBuffer: imageBuffer previewPhotoSampleBuffer: imagePreviewBuffer];
  718. auto origImage = [UIImage imageWithData: origImageData];
  719. auto imageOrientation = uiImageOrientationToCGImageOrientation (origImage.imageOrientation);
  720. auto* uiImage = getImageWithCorrectOrientation (imageOrientation, origImage.CGImage);
  721. auto* imageData = UIImageJPEGRepresentation (uiImage, 0.f);
  722. auto image = ImageFileFormat::loadFrom (imageData.bytes, (size_t) imageData.length);
  723. getOwner (self).callListeners (image);
  724. MessageManager::callAsync ([self, image]() { getOwner (self).notifyPictureTaken (image); });
  725. }
  726. static CGImagePropertyOrientation uiImageOrientationToCGImageOrientation (UIImageOrientation orientation)
  727. {
  728. switch (orientation)
  729. {
  730. case UIImageOrientationUp: return kCGImagePropertyOrientationUp;
  731. case UIImageOrientationDown: return kCGImagePropertyOrientationDown;
  732. case UIImageOrientationLeft: return kCGImagePropertyOrientationLeft;
  733. case UIImageOrientationRight: return kCGImagePropertyOrientationRight;
  734. case UIImageOrientationUpMirrored: return kCGImagePropertyOrientationUpMirrored;
  735. case UIImageOrientationDownMirrored: return kCGImagePropertyOrientationDownMirrored;
  736. case UIImageOrientationLeftMirrored: return kCGImagePropertyOrientationLeftMirrored;
  737. case UIImageOrientationRightMirrored: return kCGImagePropertyOrientationRightMirrored;
  738. }
  739. }
  740. };
  741. //==============================================================================
  742. void callListeners (const Image& image)
  743. {
  744. captureSession.callListeners (image);
  745. }
  746. void notifyPictureTaken (const Image& image)
  747. {
  748. captureSession.notifyPictureTaken (image);
  749. }
  750. CaptureSession& captureSession;
  751. AVCaptureOutput* captureOutput;
  752. std::unique_ptr<NSObject, NSObjectDeleter> photoOutputDelegate;
  753. bool takingPicture = false;
  754. };
  755. //==============================================================================
  756. // NB: FileOutputRecordingDelegateClass callbacks can be called from any thread (incl.
  757. // the message thread), so waiting for an event when stopping recording is not an
  758. // option and VideoRecorder must be alive at all times in order to get stopped
  759. // recording callback.
  760. class VideoRecorder
  761. {
  762. public:
  763. VideoRecorder (CaptureSession& session)
  764. : movieFileOutput ([AVCaptureMovieFileOutput new]),
  765. delegate (nullptr)
  766. {
  767. static FileOutputRecordingDelegateClass cls;
  768. delegate.reset ([cls.createInstance() init]);
  769. FileOutputRecordingDelegateClass::setOwner (delegate.get(), this);
  770. session.addOutputIfPossible (movieFileOutput);
  771. }
  772. ~VideoRecorder()
  773. {
  774. stopRecording();
  775. // Shutting down a device while recording will stop the recording
  776. // abruptly and the recording will be lost.
  777. jassert (! recordingInProgress);
  778. }
  779. void startRecording (const File& file, AVCaptureVideoOrientation orientationToUse)
  780. {
  781. if (@available (iOS 10.0, *))
  782. printVideoOutputDebugInfo (movieFileOutput);
  783. auto url = [NSURL fileURLWithPath: juceStringToNS (file.getFullPathName())
  784. isDirectory: NO];
  785. auto outputConnection = [movieFileOutput connectionWithMediaType: AVMediaTypeVideo];
  786. outputConnection.videoOrientation = orientationToUse;
  787. [movieFileOutput startRecordingToOutputFileURL: url recordingDelegate: delegate.get()];
  788. }
  789. void stopRecording()
  790. {
  791. [movieFileOutput stopRecording];
  792. }
  793. Time getTimeOfFirstRecordedFrame() const
  794. {
  795. return Time (firstRecordedFrameTimeMs.get());
  796. }
  797. private:
  798. static void printVideoOutputDebugInfo ([[maybe_unused]] AVCaptureMovieFileOutput* output)
  799. {
  800. JUCE_CAMERA_LOG ("Available video codec types:");
  801. #if JUCE_CAMERA_LOG_ENABLED
  802. for (id type in output.availableVideoCodecTypes)
  803. JUCE_CAMERA_LOG (nsStringToJuce (type));
  804. #endif
  805. JUCE_CAMERA_LOG ("Output settings per video connection:");
  806. #if JUCE_CAMERA_LOG_ENABLED
  807. for (AVCaptureConnection* connection in output.connections)
  808. JUCE_CAMERA_LOG (nsStringToJuce ([[output outputSettingsForConnection: connection] description]));
  809. #endif
  810. }
  811. //==============================================================================
  812. struct FileOutputRecordingDelegateClass : public ObjCClass<NSObject<AVCaptureFileOutputRecordingDelegate>>
  813. {
  814. FileOutputRecordingDelegateClass() : ObjCClass<NSObject<AVCaptureFileOutputRecordingDelegate>> ("FileOutputRecordingDelegateClass_")
  815. {
  816. addMethod (@selector (captureOutput:didStartRecordingToOutputFileAtURL:fromConnections:), started);
  817. addMethod (@selector (captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error:), stopped);
  818. addIvar<VideoRecorder*> ("owner");
  819. registerClass();
  820. }
  821. //==============================================================================
  822. static VideoRecorder& getOwner (id self) { return *getIvar<VideoRecorder*> (self, "owner"); }
  823. static void setOwner (id self, VideoRecorder* r) { object_setInstanceVariable (self, "owner", r); }
  824. private:
  825. static void started (id self, SEL, AVCaptureFileOutput*, NSURL*, NSArray<AVCaptureConnection*>*)
  826. {
  827. JUCE_CAMERA_LOG ("Started recording");
  828. getOwner (self).firstRecordedFrameTimeMs.set (Time::getCurrentTime().toMilliseconds());
  829. getOwner (self).recordingInProgress = true;
  830. }
  831. static void stopped (id self, SEL, AVCaptureFileOutput*, NSURL*, NSArray<AVCaptureConnection*>*, NSError* error)
  832. {
  833. String errorString;
  834. bool recordingPlayable = true;
  835. // There might have been an error in the recording, yet there may be a playable file...
  836. if ([error code] != noErr)
  837. {
  838. id value = [[error userInfo] objectForKey: AVErrorRecordingSuccessfullyFinishedKey];
  839. if (value != nil && ! [value boolValue])
  840. recordingPlayable = false;
  841. errorString = nsStringToJuce (error.localizedDescription) + ", playable: " + String ((int) recordingPlayable);
  842. }
  843. JUCE_CAMERA_LOG ("Stopped recording, error = " + errorString);
  844. getOwner (self).recordingInProgress = false;
  845. }
  846. };
  847. AVCaptureMovieFileOutput* movieFileOutput;
  848. std::unique_ptr<NSObject<AVCaptureFileOutputRecordingDelegate>, NSObjectDeleter> delegate;
  849. bool recordingInProgress = false;
  850. Atomic<int64> firstRecordedFrameTimeMs { 0 };
  851. };
  852. //==============================================================================
  853. void addOutputIfPossible (AVCaptureOutput* output)
  854. {
  855. dispatch_async (captureSessionQueue,^
  856. {
  857. if ([captureSession.get() canAddOutput: output])
  858. {
  859. [captureSession.get() beginConfiguration];
  860. [captureSession.get() addOutput: output];
  861. [captureSession.get() commitConfiguration];
  862. return;
  863. }
  864. // Can't add output to camera session!
  865. jassertfalse;
  866. });
  867. }
  868. //==============================================================================
  869. void cameraSessionStarted()
  870. {
  871. sessionStarted = true;
  872. owner.cameraSessionStarted();
  873. }
  874. void cameraSessionRuntimeError (const String& error)
  875. {
  876. owner.cameraSessionRuntimeError (error);
  877. }
  878. void callListeners (const Image& image)
  879. {
  880. owner.callListeners (image);
  881. }
  882. void notifyPictureTaken (const Image& image)
  883. {
  884. owner.notifyPictureTaken (image);
  885. }
  886. Pimpl& owner;
  887. dispatch_queue_t captureSessionQueue;
  888. std::unique_ptr<AVCaptureSession, NSObjectDeleter> captureSession;
  889. std::unique_ptr<NSObject, NSObjectDeleter> delegate;
  890. StillPictureTaker stillPictureTaker;
  891. VideoRecorder videoRecorder;
  892. AVCaptureDevice* cameraDevice = nil;
  893. AVCaptureVideoPreviewLayer* previewLayer = nil;
  894. bool sessionStarted = false;
  895. WaitableEvent sessionClosedEvent;
  896. static int numCaptureSessions;
  897. };
  898. //==============================================================================
  899. void cameraSessionStarted()
  900. {
  901. JUCE_CAMERA_LOG ("cameraSessionStarted()");
  902. cameraOpenCallback (cameraId, {});
  903. }
  904. void cameraSessionRuntimeError (const String& error)
  905. {
  906. JUCE_CAMERA_LOG ("cameraSessionRuntimeError(), error = " + error);
  907. if (! notifiedOfCameraOpening)
  908. cameraOpenCallback ({}, error);
  909. else
  910. NullCheckedInvocation::invoke (owner.onErrorOccurred, error);
  911. }
  912. void callListeners (const Image& image)
  913. {
  914. const ScopedLock sl (listenerLock);
  915. listeners.call ([=] (Listener& l) { l.imageReceived (image); });
  916. if (listeners.size() == 1)
  917. triggerStillPictureCapture();
  918. }
  919. void notifyPictureTaken (const Image& image)
  920. {
  921. JUCE_CAMERA_LOG ("notifyPictureTaken()");
  922. NullCheckedInvocation::invoke (pictureTakenCallback, image);
  923. }
  924. //==============================================================================
  925. void triggerStillPictureCapture()
  926. {
  927. captureSession.takeStillPicture();
  928. }
  929. //==============================================================================
  930. CameraDevice& owner;
  931. String cameraId;
  932. InternalOpenCameraResultCallback cameraOpenCallback;
  933. CriticalSection listenerLock;
  934. ListenerList<Listener> listeners;
  935. std::function<void (const Image&)> pictureTakenCallback;
  936. CaptureSession captureSession;
  937. bool notifiedOfCameraOpening = false;
  938. friend struct CameraDevice::ViewerComponent;
  939. //==============================================================================
  940. JUCE_DECLARE_NON_COPYABLE (Pimpl)
  941. };
  942. int CameraDevice::Pimpl::CaptureSession::numCaptureSessions = 0;
  943. //==============================================================================
  944. struct CameraDevice::ViewerComponent : public UIViewComponent
  945. {
  946. //==============================================================================
  947. struct JuceCameraDeviceViewerClass : public ObjCClass<UIView>
  948. {
  949. JuceCameraDeviceViewerClass() : ObjCClass<UIView> ("JuceCameraDeviceViewerClass_")
  950. {
  951. addMethod (@selector (layoutSubviews), layoutSubviews);
  952. registerClass();
  953. }
  954. private:
  955. static void layoutSubviews (id self, SEL)
  956. {
  957. sendSuperclassMessage<void> (self, @selector (layoutSubviews));
  958. UIView* asUIView = (UIView*) self;
  959. updateOrientation (self);
  960. if (auto* previewLayer = getPreviewLayer (self))
  961. previewLayer.frame = asUIView.bounds;
  962. }
  963. static AVCaptureVideoPreviewLayer* getPreviewLayer (id self)
  964. {
  965. UIView* asUIView = (UIView*) self;
  966. if (asUIView.layer.sublayers != nil && [asUIView.layer.sublayers count] > 0)
  967. if ([asUIView.layer.sublayers[0] isKindOfClass: [AVCaptureVideoPreviewLayer class]])
  968. return (AVCaptureVideoPreviewLayer*) asUIView.layer.sublayers[0];
  969. return nil;
  970. }
  971. static void updateOrientation (id self)
  972. {
  973. if (auto* previewLayer = getPreviewLayer (self))
  974. {
  975. UIDeviceOrientation o = [UIDevice currentDevice].orientation;
  976. if (UIDeviceOrientationIsPortrait (o) || UIDeviceOrientationIsLandscape (o))
  977. {
  978. if (previewLayer.connection != nil)
  979. previewLayer.connection.videoOrientation = (AVCaptureVideoOrientation) o;
  980. }
  981. }
  982. }
  983. };
  984. ViewerComponent (CameraDevice& device)
  985. {
  986. static JuceCameraDeviceViewerClass cls;
  987. // Initial size that can be overridden later.
  988. setSize (640, 480);
  989. auto view = [cls.createInstance() init];
  990. setView (view);
  991. auto* previewLayer = device.pimpl->captureSession.createPreviewLayer();
  992. previewLayer.frame = view.bounds;
  993. UIInterfaceOrientation statusBarOrientation = [UIApplication sharedApplication].statusBarOrientation;
  994. AVCaptureVideoOrientation videoOrientation = statusBarOrientation != UIInterfaceOrientationUnknown
  995. ? (AVCaptureVideoOrientation) statusBarOrientation
  996. : AVCaptureVideoOrientationPortrait;
  997. previewLayer.connection.videoOrientation = videoOrientation;
  998. [view.layer addSublayer: previewLayer];
  999. }
  1000. };
  1001. //==============================================================================
  1002. String CameraDevice::getFileExtension()
  1003. {
  1004. return ".mov";
  1005. }
  1006. JUCE_END_IGNORE_WARNINGS_GCC_LIKE