The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1341 lines
59KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2020 - Raw Material Software Limited
  5. JUCE is an open source library subject to commercial or open-source
  6. licensing.
  7. By using JUCE, you agree to the terms of both the JUCE 6 End-User License
  8. Agreement and JUCE Privacy Policy (both effective as of the 16th June 2020).
  9. End User License Agreement: www.juce.com/juce-6-licence
  10. Privacy Policy: www.juce.com/juce-privacy-policy
  11. Or: You may also use this code under the terms of the GPL v3 (see
  12. www.gnu.org/licenses).
  13. JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
  14. EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
  15. DISCLAIMED.
  16. ==============================================================================
  17. */
  18. #if (defined (__IPHONE_10_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_10_0)
  19. JUCE_BEGIN_IGNORE_WARNINGS_GCC_LIKE ("-Wdeprecated-declarations")
  20. #define JUCE_USE_NEW_CAMERA_API 1
  21. #endif
  22. struct CameraDevice::Pimpl
  23. {
  24. using InternalOpenCameraResultCallback = std::function<void (const String& /*cameraId*/, const String& /*error*/)>;
  25. Pimpl (CameraDevice& ownerToUse, const String& cameraIdToUse, int /*index*/,
  26. int /*minWidth*/, int /*minHeight*/, int /*maxWidth*/, int /*maxHeight*/,
  27. bool useHighQuality)
  28. : owner (ownerToUse),
  29. cameraId (cameraIdToUse),
  30. captureSession (*this, useHighQuality)
  31. {
  32. }
  33. String getCameraId() const noexcept { return cameraId; }
  34. void open (InternalOpenCameraResultCallback cameraOpenCallbackToUse)
  35. {
  36. cameraOpenCallback = std::move (cameraOpenCallbackToUse);
  37. if (cameraOpenCallback == nullptr)
  38. {
  39. // A valid camera open callback must be passed.
  40. jassertfalse;
  41. return;
  42. }
  43. [AVCaptureDevice requestAccessForMediaType: AVMediaTypeVideo
  44. completionHandler: ^(BOOL granted)
  45. {
  46. // Access to video is required for camera to work,
  47. // black images will be produced otherwise!
  48. jassertquiet (granted);
  49. }];
  50. [AVCaptureDevice requestAccessForMediaType: AVMediaTypeAudio
  51. completionHandler: ^(BOOL granted)
  52. {
  53. // Access to audio is required for camera to work,
  54. // silence will be produced otherwise!
  55. jassertquiet (granted);
  56. }];
  57. captureSession.startSessionForDeviceWithId (cameraId);
  58. }
  59. bool openedOk() const noexcept { return captureSession.openedOk(); }
  60. void takeStillPicture (std::function<void (const Image&)> pictureTakenCallbackToUse)
  61. {
  62. if (pictureTakenCallbackToUse == nullptr)
  63. {
  64. jassertfalse;
  65. return;
  66. }
  67. pictureTakenCallback = std::move (pictureTakenCallbackToUse);
  68. triggerStillPictureCapture();
  69. }
  70. void startRecordingToFile (const File& file, int /*quality*/)
  71. {
  72. file.deleteFile();
  73. captureSession.startRecording (file);
  74. }
  75. void stopRecording()
  76. {
  77. captureSession.stopRecording();
  78. }
  79. Time getTimeOfFirstRecordedFrame() const
  80. {
  81. return captureSession.getTimeOfFirstRecordedFrame();
  82. }
  83. static StringArray getAvailableDevices()
  84. {
  85. StringArray results;
  86. JUCE_CAMERA_LOG ("Available camera devices: ");
  87. for (AVCaptureDevice* device in getDevices())
  88. {
  89. JUCE_CAMERA_LOG ("Device start----------------------------------");
  90. printDebugCameraInfo (device);
  91. JUCE_CAMERA_LOG ("Device end----------------------------------");
  92. results.add (nsStringToJuce (device.uniqueID));
  93. }
  94. return results;
  95. }
  96. void addListener (CameraDevice::Listener* listenerToAdd)
  97. {
  98. const ScopedLock sl (listenerLock);
  99. listeners.add (listenerToAdd);
  100. if (listeners.size() == 1)
  101. triggerStillPictureCapture();
  102. }
  103. void removeListener (CameraDevice::Listener* listenerToRemove)
  104. {
  105. const ScopedLock sl (listenerLock);
  106. listeners.remove (listenerToRemove);
  107. }
  108. private:
  109. static NSArray<AVCaptureDevice*>* getDevices()
  110. {
  111. #if JUCE_USE_NEW_CAMERA_API
  112. if (@available (iOS 10.0, *))
  113. {
  114. std::unique_ptr<NSMutableArray<AVCaptureDeviceType>, NSObjectDeleter> deviceTypes ([[NSMutableArray alloc] initWithCapacity: 2]);
  115. [deviceTypes.get() addObject: AVCaptureDeviceTypeBuiltInWideAngleCamera];
  116. [deviceTypes.get() addObject: AVCaptureDeviceTypeBuiltInTelephotoCamera];
  117. if (@available (iOS 10.2, *))
  118. [deviceTypes.get() addObject: AVCaptureDeviceTypeBuiltInDualCamera];
  119. if (@available (iOS 11.1, *))
  120. [deviceTypes.get() addObject: AVCaptureDeviceTypeBuiltInTrueDepthCamera];
  121. auto discoverySession = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes: deviceTypes.get()
  122. mediaType: AVMediaTypeVideo
  123. position: AVCaptureDevicePositionUnspecified];
  124. return [discoverySession devices];
  125. }
  126. #endif
  127. return [AVCaptureDevice devicesWithMediaType: AVMediaTypeVideo];
  128. }
  129. //==============================================================================
  130. static void printDebugCameraInfo (AVCaptureDevice* device)
  131. {
  132. auto position = device.position;
  133. String positionString = position == AVCaptureDevicePositionBack
  134. ? "Back"
  135. : position == AVCaptureDevicePositionFront
  136. ? "Front"
  137. : "Unspecified";
  138. JUCE_CAMERA_LOG ("Position: " + positionString);
  139. JUCE_CAMERA_LOG ("Model ID: " + nsStringToJuce (device.modelID));
  140. JUCE_CAMERA_LOG ("Localized name: " + nsStringToJuce (device.localizedName));
  141. JUCE_CAMERA_LOG ("Unique ID: " + nsStringToJuce (device.uniqueID));
  142. JUCE_CAMERA_LOG ("Lens aperture: " + String (device.lensAperture));
  143. JUCE_CAMERA_LOG ("Has flash: " + String ((int)device.hasFlash));
  144. JUCE_CAMERA_LOG ("Supports flash always on: " + String ((int)[device isFlashModeSupported: AVCaptureFlashModeOn]));
  145. JUCE_CAMERA_LOG ("Supports auto flash: " + String ((int)[device isFlashModeSupported: AVCaptureFlashModeAuto]));
  146. JUCE_CAMERA_LOG ("Has torch: " + String ((int)device.hasTorch));
  147. JUCE_CAMERA_LOG ("Supports torch always on: " + String ((int)[device isTorchModeSupported: AVCaptureTorchModeOn]));
  148. JUCE_CAMERA_LOG ("Supports auto torch: " + String ((int)[device isTorchModeSupported: AVCaptureTorchModeAuto]));
  149. JUCE_CAMERA_LOG ("Low light boost supported: " + String ((int)device.lowLightBoostEnabled));
  150. JUCE_CAMERA_LOG ("Supports auto white balance: " + String ((int)[device isWhiteBalanceModeSupported: AVCaptureWhiteBalanceModeAutoWhiteBalance]));
  151. JUCE_CAMERA_LOG ("Supports continuous auto white balance: " + String ((int)[device isWhiteBalanceModeSupported: AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]));
  152. JUCE_CAMERA_LOG ("Supports auto focus: " + String ((int)[device isFocusModeSupported: AVCaptureFocusModeAutoFocus]));
  153. JUCE_CAMERA_LOG ("Supports continuous auto focus: " + String ((int)[device isFocusModeSupported: AVCaptureFocusModeContinuousAutoFocus]));
  154. JUCE_CAMERA_LOG ("Supports point of interest focus: " + String ((int)device.focusPointOfInterestSupported));
  155. JUCE_CAMERA_LOG ("Smooth auto focus supported: " + String ((int)device.smoothAutoFocusSupported));
  156. JUCE_CAMERA_LOG ("Auto focus range restriction supported: " + String ((int)device.autoFocusRangeRestrictionSupported));
  157. JUCE_CAMERA_LOG ("Supports auto exposure: " + String ((int)[device isExposureModeSupported: AVCaptureExposureModeAutoExpose]));
  158. JUCE_CAMERA_LOG ("Supports continuous auto exposure: " + String ((int)[device isExposureModeSupported: AVCaptureExposureModeContinuousAutoExposure]));
  159. JUCE_CAMERA_LOG ("Supports custom exposure: " + String ((int)[device isExposureModeSupported: AVCaptureExposureModeCustom]));
  160. JUCE_CAMERA_LOG ("Supports point of interest exposure: " + String ((int)device.exposurePointOfInterestSupported));
  161. #if JUCE_USE_NEW_CAMERA_API
  162. if (@available (iOS 10.0, *))
  163. {
  164. JUCE_CAMERA_LOG ("Device type: " + nsStringToJuce (device.deviceType));
  165. JUCE_CAMERA_LOG ("Locking focus with custom lens position supported: " + String ((int)device.lockingFocusWithCustomLensPositionSupported));
  166. }
  167. #endif
  168. #if defined (__IPHONE_11_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0
  169. if (@available (iOS 11.0, *))
  170. {
  171. JUCE_CAMERA_LOG ("Min available video zoom factor: " + String (device.minAvailableVideoZoomFactor));
  172. JUCE_CAMERA_LOG ("Max available video zoom factor: " + String (device.maxAvailableVideoZoomFactor));
  173. JUCE_CAMERA_LOG ("Dual camera switch over video zoom factor: " + String (device.dualCameraSwitchOverVideoZoomFactor));
  174. }
  175. #endif
  176. JUCE_CAMERA_LOG ("Capture formats start-------------------");
  177. for (AVCaptureDeviceFormat* format in device.formats)
  178. {
  179. JUCE_CAMERA_LOG ("Capture format start------");
  180. printDebugCameraFormatInfo (format);
  181. JUCE_CAMERA_LOG ("Capture format end------");
  182. }
  183. JUCE_CAMERA_LOG ("Capture formats end-------------------");
  184. }
  185. static void printDebugCameraFormatInfo (AVCaptureDeviceFormat* format)
  186. {
  187. JUCE_CAMERA_LOG ("Media type: " + nsStringToJuce (format.mediaType));
  188. #if JUCE_USE_NEW_CAMERA_API
  189. if (@available (iOS 10.0, *))
  190. {
  191. String colourSpaces;
  192. for (NSNumber* number in format.supportedColorSpaces)
  193. {
  194. switch ([number intValue])
  195. {
  196. case AVCaptureColorSpace_sRGB: colourSpaces << "sRGB "; break;
  197. case AVCaptureColorSpace_P3_D65: colourSpaces << "P3_D65 "; break;
  198. default: break;
  199. }
  200. }
  201. JUCE_CAMERA_LOG ("Supported colour spaces: " + colourSpaces);
  202. }
  203. #endif
  204. JUCE_CAMERA_LOG ("Video field of view: " + String (format.videoFieldOfView));
  205. JUCE_CAMERA_LOG ("Video max zoom factor: " + String (format.videoMaxZoomFactor));
  206. JUCE_CAMERA_LOG ("Video zoom factor upscale threshold: " + String (format.videoZoomFactorUpscaleThreshold));
  207. String videoFrameRateRangesString = "Video supported frame rate ranges: ";
  208. for (AVFrameRateRange* range in format.videoSupportedFrameRateRanges)
  209. videoFrameRateRangesString << frameRateRangeToString (range);
  210. JUCE_CAMERA_LOG (videoFrameRateRangesString);
  211. JUCE_CAMERA_LOG ("Video binned: " + String (int (format.videoBinned)));
  212. JUCE_CAMERA_LOG ("Video HDR supported: " + String (int (format.videoHDRSupported)));
  213. JUCE_CAMERA_LOG ("High resolution still image dimensions: " + getHighResStillImgDimensionsString (format.highResolutionStillImageDimensions));
  214. JUCE_CAMERA_LOG ("Min ISO: " + String (format.minISO));
  215. JUCE_CAMERA_LOG ("Max ISO: " + String (format.maxISO));
  216. JUCE_CAMERA_LOG ("Min exposure duration: " + cmTimeToString (format.minExposureDuration));
  217. String autoFocusSystemString;
  218. switch (format.autoFocusSystem)
  219. {
  220. case AVCaptureAutoFocusSystemPhaseDetection: autoFocusSystemString = "PhaseDetection"; break;
  221. case AVCaptureAutoFocusSystemContrastDetection: autoFocusSystemString = "ContrastDetection"; break;
  222. case AVCaptureAutoFocusSystemNone:
  223. default: autoFocusSystemString = "None";
  224. }
  225. JUCE_CAMERA_LOG ("Auto focus system: " + autoFocusSystemString);
  226. JUCE_CAMERA_LOG ("Standard (iOS 5.0) video stabilization supported: " + String ((int) [format isVideoStabilizationModeSupported: AVCaptureVideoStabilizationModeStandard]));
  227. JUCE_CAMERA_LOG ("Cinematic video stabilization supported: " + String ((int) [format isVideoStabilizationModeSupported: AVCaptureVideoStabilizationModeCinematic]));
  228. JUCE_CAMERA_LOG ("Auto video stabilization supported: " + String ((int) [format isVideoStabilizationModeSupported: AVCaptureVideoStabilizationModeAuto]));
  229. #if defined (__IPHONE_11_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0
  230. if (@available (iOS 11.0, *))
  231. {
  232. JUCE_CAMERA_LOG ("Min zoom factor for depth data delivery: " + String (format.videoMinZoomFactorForDepthDataDelivery));
  233. JUCE_CAMERA_LOG ("Max zoom factor for depth data delivery: " + String (format.videoMaxZoomFactorForDepthDataDelivery));
  234. }
  235. #endif
  236. }
  237. static String getHighResStillImgDimensionsString (CMVideoDimensions d)
  238. {
  239. return "[" + String (d.width) + " " + String (d.height) + "]";
  240. }
  241. static String cmTimeToString (CMTime time)
  242. {
  243. CFUniquePtr<CFStringRef> timeDesc (CMTimeCopyDescription (nullptr, time));
  244. return String::fromCFString (timeDesc.get());
  245. }
  246. static String frameRateRangeToString (AVFrameRateRange* range)
  247. {
  248. String result;
  249. result << "[minFrameDuration: " + cmTimeToString (range.minFrameDuration);
  250. result << " maxFrameDuration: " + cmTimeToString (range.maxFrameDuration);
  251. result << " minFrameRate: " + String (range.minFrameRate);
  252. result << " maxFrameRate: " + String (range.maxFrameRate) << "] ";
  253. return result;
  254. }
  255. //==============================================================================
  256. class CaptureSession
  257. {
  258. public:
  259. CaptureSession (Pimpl& ownerToUse, bool useHighQuality)
  260. : owner (ownerToUse),
  261. captureSessionQueue (dispatch_queue_create ("JuceCameraDeviceBackgroundDispatchQueue", DISPATCH_QUEUE_SERIAL)),
  262. captureSession ([[AVCaptureSession alloc] init]),
  263. delegate (nullptr),
  264. stillPictureTaker (*this),
  265. videoRecorder (*this)
  266. {
  267. static SessionDelegateClass cls;
  268. delegate.reset ([cls.createInstance() init]);
  269. SessionDelegateClass::setOwner (delegate.get(), this);
  270. JUCE_BEGIN_IGNORE_WARNINGS_GCC_LIKE ("-Wundeclared-selector")
  271. [[NSNotificationCenter defaultCenter] addObserver: delegate.get()
  272. selector: @selector (sessionDidStartRunning:)
  273. name: AVCaptureSessionDidStartRunningNotification
  274. object: captureSession.get()];
  275. [[NSNotificationCenter defaultCenter] addObserver: delegate.get()
  276. selector: @selector (sessionDidStopRunning:)
  277. name: AVCaptureSessionDidStopRunningNotification
  278. object: captureSession.get()];
  279. [[NSNotificationCenter defaultCenter] addObserver: delegate.get()
  280. selector: @selector (runtimeError:)
  281. name: AVCaptureSessionRuntimeErrorNotification
  282. object: captureSession.get()];
  283. [[NSNotificationCenter defaultCenter] addObserver: delegate.get()
  284. selector: @selector (sessionWasInterrupted:)
  285. name: AVCaptureSessionWasInterruptedNotification
  286. object: captureSession.get()];
  287. [[NSNotificationCenter defaultCenter] addObserver: delegate.get()
  288. selector: @selector (sessionInterruptionEnded:)
  289. name: AVCaptureSessionInterruptionEndedNotification
  290. object: captureSession.get()];
  291. JUCE_END_IGNORE_WARNINGS_GCC_LIKE
  292. dispatch_async (captureSessionQueue,^
  293. {
  294. [captureSession.get() setSessionPreset: useHighQuality ? AVCaptureSessionPresetHigh
  295. : AVCaptureSessionPresetMedium];
  296. });
  297. ++numCaptureSessions;
  298. }
  299. ~CaptureSession()
  300. {
  301. [[NSNotificationCenter defaultCenter] removeObserver: delegate.get()];
  302. stopRecording();
  303. if (--numCaptureSessions == 0)
  304. {
  305. dispatch_async (captureSessionQueue, ^
  306. {
  307. if (captureSession.get().running)
  308. [captureSession.get() stopRunning];
  309. sessionClosedEvent.signal();
  310. });
  311. sessionClosedEvent.wait (-1);
  312. }
  313. }
  314. bool openedOk() const noexcept { return sessionStarted; }
  315. void startSessionForDeviceWithId (const String& cameraIdToUse)
  316. {
  317. dispatch_async (captureSessionQueue,^
  318. {
  319. cameraDevice = [AVCaptureDevice deviceWithUniqueID: juceStringToNS (cameraIdToUse)];
  320. auto audioDevice = [AVCaptureDevice defaultDeviceWithMediaType: AVMediaTypeAudio];
  321. [captureSession.get() beginConfiguration];
  322. // This will add just video...
  323. auto error = addInputToDevice (cameraDevice);
  324. if (error.isNotEmpty())
  325. {
  326. MessageManager::callAsync ([weakRef = WeakReference<CaptureSession> { this }, error]() mutable
  327. {
  328. if (weakRef != nullptr)
  329. weakRef->owner.cameraOpenCallback ({}, error);
  330. });
  331. return;
  332. }
  333. // ... so add audio explicitly here
  334. error = addInputToDevice (audioDevice);
  335. if (error.isNotEmpty())
  336. {
  337. MessageManager::callAsync ([weakRef = WeakReference<CaptureSession> { this }, error]() mutable
  338. {
  339. if (weakRef != nullptr)
  340. weakRef->owner.cameraOpenCallback ({}, error);
  341. });
  342. return;
  343. }
  344. [captureSession.get() commitConfiguration];
  345. if (! captureSession.get().running)
  346. [captureSession.get() startRunning];
  347. });
  348. }
  349. AVCaptureVideoPreviewLayer* createPreviewLayer()
  350. {
  351. if (! openedOk())
  352. {
  353. // A session must be started first!
  354. jassertfalse;
  355. return nullptr;
  356. }
  357. previewLayer = [AVCaptureVideoPreviewLayer layerWithSession: captureSession.get()];
  358. return previewLayer;
  359. }
  360. void takeStillPicture()
  361. {
  362. if (! openedOk())
  363. {
  364. // A session must be started first!
  365. jassert (openedOk());
  366. return;
  367. }
  368. stillPictureTaker.takePicture (previewLayer.connection.videoOrientation);
  369. }
  370. void startRecording (const File& file)
  371. {
  372. if (! openedOk())
  373. {
  374. // A session must be started first!
  375. jassertfalse;
  376. return;
  377. }
  378. if (file.existsAsFile())
  379. {
  380. // File overwriting is not supported by iOS video recorder, the target
  381. // file must not exist.
  382. jassertfalse;
  383. return;
  384. }
  385. videoRecorder.startRecording (file, previewLayer.connection.videoOrientation);
  386. }
  387. void stopRecording()
  388. {
  389. videoRecorder.stopRecording();
  390. }
  391. Time getTimeOfFirstRecordedFrame() const
  392. {
  393. return videoRecorder.getTimeOfFirstRecordedFrame();
  394. }
  395. JUCE_DECLARE_WEAK_REFERENCEABLE (CaptureSession)
  396. private:
  397. String addInputToDevice (AVCaptureDevice* device)
  398. {
  399. NSError* error = nil;
  400. auto input = [AVCaptureDeviceInput deviceInputWithDevice: device
  401. error: &error];
  402. if (error != nil)
  403. return nsStringToJuce (error.localizedDescription);
  404. if (! [captureSession.get() canAddInput: input])
  405. return "Could not add input to camera session.";
  406. [captureSession.get() addInput: input];
  407. return {};
  408. }
  409. //==============================================================================
  410. struct SessionDelegateClass : public ObjCClass<NSObject>
  411. {
  412. SessionDelegateClass() : ObjCClass<NSObject> ("SessionDelegateClass_")
  413. {
  414. JUCE_BEGIN_IGNORE_WARNINGS_GCC_LIKE ("-Wundeclared-selector")
  415. addMethod (@selector (sessionDidStartRunning:), started, "v@:@");
  416. addMethod (@selector (sessionDidStopRunning:), stopped, "v@:@");
  417. addMethod (@selector (runtimeError:), runtimeError, "v@:@");
  418. addMethod (@selector (sessionWasInterrupted:), interrupted, "v@:@");
  419. addMethod (@selector (sessionInterruptionEnded:), interruptionEnded, "v@:@");
  420. JUCE_END_IGNORE_WARNINGS_GCC_LIKE
  421. addIvar<CaptureSession*> ("owner");
  422. registerClass();
  423. }
  424. //==============================================================================
  425. static CaptureSession& getOwner (id self) { return *getIvar<CaptureSession*> (self, "owner"); }
  426. static void setOwner (id self, CaptureSession* s) { object_setInstanceVariable (self, "owner", s); }
  427. private:
  428. //==============================================================================
  429. static void started (id self, SEL, NSNotification* notification)
  430. {
  431. JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
  432. ignoreUnused (notification);
  433. dispatch_async (dispatch_get_main_queue(),
  434. ^{
  435. getOwner (self).cameraSessionStarted();
  436. });
  437. }
  438. static void stopped (id, SEL, NSNotification* notification)
  439. {
  440. JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
  441. ignoreUnused (notification);
  442. }
  443. static void runtimeError (id self, SEL, NSNotification* notification)
  444. {
  445. JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
  446. dispatch_async (dispatch_get_main_queue(),
  447. ^{
  448. NSError* error = notification.userInfo[AVCaptureSessionErrorKey];
  449. auto errorString = error != nil ? nsStringToJuce (error.localizedDescription) : String();
  450. getOwner (self).cameraSessionRuntimeError (errorString);
  451. });
  452. }
  453. static void interrupted (id, SEL, NSNotification* notification)
  454. {
  455. JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
  456. ignoreUnused (notification);
  457. }
  458. static void interruptionEnded (id, SEL, NSNotification* notification)
  459. {
  460. JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
  461. ignoreUnused (notification);
  462. }
  463. };
  464. //==============================================================================
  465. class StillPictureTaker
  466. {
  467. public:
  468. StillPictureTaker (CaptureSession& cs)
  469. : captureSession (cs),
  470. captureOutput (createCaptureOutput()),
  471. photoOutputDelegate (nullptr)
  472. {
  473. #if JUCE_USE_NEW_CAMERA_API
  474. if (@available (iOS 10.0, *))
  475. {
  476. static PhotoOutputDelegateClass cls;
  477. photoOutputDelegate.reset ([cls.createInstance() init]);
  478. PhotoOutputDelegateClass::setOwner (photoOutputDelegate.get(), this);
  479. }
  480. #endif
  481. captureSession.addOutputIfPossible (captureOutput);
  482. }
  483. void takePicture (AVCaptureVideoOrientation orientationToUse)
  484. {
  485. if (takingPicture)
  486. {
  487. // Picture taking already in progress!
  488. jassertfalse;
  489. return;
  490. }
  491. takingPicture = true;
  492. printImageOutputDebugInfo (captureOutput);
  493. if (auto* connection = findVideoConnection (captureOutput))
  494. {
  495. #if JUCE_USE_NEW_CAMERA_API
  496. if (@available (iOS 10.0, *))
  497. {
  498. if ([captureOutput isKindOfClass: [AVCapturePhotoOutput class]])
  499. {
  500. auto* photoOutput = (AVCapturePhotoOutput*) captureOutput;
  501. auto outputConnection = [photoOutput connectionWithMediaType: AVMediaTypeVideo];
  502. outputConnection.videoOrientation = orientationToUse;
  503. [photoOutput capturePhotoWithSettings: [AVCapturePhotoSettings photoSettings]
  504. delegate: id<AVCapturePhotoCaptureDelegate> (photoOutputDelegate.get())];
  505. return;
  506. }
  507. }
  508. #endif
  509. auto* stillImageOutput = (AVCaptureStillImageOutput*) captureOutput;
  510. auto outputConnection = [stillImageOutput connectionWithMediaType: AVMediaTypeVideo];
  511. outputConnection.videoOrientation = orientationToUse;
  512. [stillImageOutput captureStillImageAsynchronouslyFromConnection: connection completionHandler:
  513. ^(CMSampleBufferRef imageSampleBuffer, NSError* error)
  514. {
  515. takingPicture = false;
  516. if (error != nil)
  517. {
  518. JUCE_CAMERA_LOG ("Still picture capture failed, error: " + nsStringToJuce (error.localizedDescription));
  519. jassertfalse;
  520. return;
  521. }
  522. NSData* imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation: imageSampleBuffer];
  523. auto image = ImageFileFormat::loadFrom (imageData.bytes, (size_t) imageData.length);
  524. callListeners (image);
  525. MessageManager::callAsync ([this, image] { notifyPictureTaken (image); });
  526. }];
  527. }
  528. else
  529. {
  530. // Could not find a connection of video type
  531. jassertfalse;
  532. }
  533. }
  534. private:
  535. static AVCaptureOutput* createCaptureOutput()
  536. {
  537. #if JUCE_USE_NEW_CAMERA_API
  538. if (@available (iOS 10.0, *))
  539. return [AVCapturePhotoOutput new];
  540. #endif
  541. return [AVCaptureStillImageOutput new];
  542. }
  543. static void printImageOutputDebugInfo (AVCaptureOutput* captureOutput)
  544. {
  545. #if JUCE_USE_NEW_CAMERA_API
  546. if (@available (iOS 10.0, *))
  547. {
  548. if ([captureOutput isKindOfClass: [AVCapturePhotoOutput class]])
  549. {
  550. auto* photoOutput = (AVCapturePhotoOutput*) captureOutput;
  551. String typesString;
  552. for (id type in photoOutput.availablePhotoCodecTypes)
  553. typesString << nsStringToJuce (type) << " ";
  554. JUCE_CAMERA_LOG ("Available image codec types: " + typesString);
  555. JUCE_CAMERA_LOG ("Still image stabilization supported: " + String ((int) photoOutput.stillImageStabilizationSupported));
  556. JUCE_CAMERA_LOG ("Dual camera fusion supported: " + String ((int) photoOutput.dualCameraFusionSupported));
  557. JUCE_CAMERA_LOG ("Supports flash: " + String ((int) [photoOutput.supportedFlashModes containsObject: @(AVCaptureFlashModeOn)]));
  558. JUCE_CAMERA_LOG ("Supports auto flash: " + String ((int) [photoOutput.supportedFlashModes containsObject: @(AVCaptureFlashModeAuto)]));
  559. JUCE_CAMERA_LOG ("Max bracketed photo count: " + String (photoOutput.maxBracketedCapturePhotoCount));
  560. JUCE_CAMERA_LOG ("Lens stabilization during bracketed capture supported: " + String ((int) photoOutput.lensStabilizationDuringBracketedCaptureSupported));
  561. JUCE_CAMERA_LOG ("Live photo capture supported: " + String ((int) photoOutput.livePhotoCaptureSupported));
  562. #if defined (__IPHONE_11_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0
  563. if (@available (iOS 11.0, *))
  564. {
  565. typesString.clear();
  566. for (AVFileType type in photoOutput.availablePhotoFileTypes)
  567. typesString << nsStringToJuce (type) << " ";
  568. JUCE_CAMERA_LOG ("Available photo file types: " + typesString);
  569. typesString.clear();
  570. for (AVFileType type in photoOutput.availableRawPhotoFileTypes)
  571. typesString << nsStringToJuce (type) << " ";
  572. JUCE_CAMERA_LOG ("Available RAW photo file types: " + typesString);
  573. typesString.clear();
  574. for (AVFileType type in photoOutput.availableLivePhotoVideoCodecTypes)
  575. typesString << nsStringToJuce (type) << " ";
  576. JUCE_CAMERA_LOG ("Available live photo video codec types: " + typesString);
  577. JUCE_CAMERA_LOG ("Dual camera dual photo delivery supported: " + String ((int) photoOutput.dualCameraDualPhotoDeliverySupported));
  578. JUCE_CAMERA_LOG ("Camera calibration data delivery supported: " + String ((int) photoOutput.cameraCalibrationDataDeliverySupported));
  579. JUCE_CAMERA_LOG ("Depth data delivery supported: " + String ((int) photoOutput.depthDataDeliverySupported));
  580. }
  581. #endif
  582. return;
  583. }
  584. }
  585. #endif
  586. auto* stillImageOutput = (AVCaptureStillImageOutput*) captureOutput;
  587. String typesString;
  588. for (id type in stillImageOutput.availableImageDataCodecTypes)
  589. typesString << nsStringToJuce (type) << " ";
  590. JUCE_CAMERA_LOG ("Available image codec types: " + typesString);
  591. JUCE_CAMERA_LOG ("Still image stabilization supported: " + String ((int) stillImageOutput.stillImageStabilizationSupported));
  592. JUCE_CAMERA_LOG ("Automatically enables still image stabilization when available: " + String ((int) stillImageOutput.automaticallyEnablesStillImageStabilizationWhenAvailable));
  593. JUCE_CAMERA_LOG ("Output settings for image output: " + nsStringToJuce ([stillImageOutput.outputSettings description]));
  594. }
  595. //==============================================================================
  596. static AVCaptureConnection* findVideoConnection (AVCaptureOutput* output)
  597. {
  598. for (AVCaptureConnection* connection in output.connections)
  599. for (AVCaptureInputPort* port in connection.inputPorts)
  600. if ([port.mediaType isEqual: AVMediaTypeVideo])
  601. return connection;
  602. return nullptr;
  603. }
  604. //==============================================================================
  605. #if JUCE_USE_NEW_CAMERA_API
  606. class PhotoOutputDelegateClass : public ObjCClass<NSObject>
  607. {
  608. public:
  609. PhotoOutputDelegateClass() : ObjCClass<NSObject> ("PhotoOutputDelegateClass_")
  610. {
  611. addMethod (@selector (captureOutput:willBeginCaptureForResolvedSettings:), willBeginCaptureForSettings, "v@:@@");
  612. addMethod (@selector (captureOutput:willCapturePhotoForResolvedSettings:), willCaptureForSettings, "v@:@@");
  613. addMethod (@selector (captureOutput:didCapturePhotoForResolvedSettings:), didCaptureForSettings, "v@:@@");
  614. addMethod (@selector (captureOutput:didFinishCaptureForResolvedSettings:error:), didFinishCaptureForSettings, "v@:@@@");
  615. #if defined (__IPHONE_11_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0
  616. if (@available (iOS 11.0, *))
  617. {
  618. addMethod (@selector (captureOutput:didFinishProcessingPhoto:error:),
  619. didFinishProcessingPhoto,
  620. "v@:@@@");
  621. }
  622. else
  623. #endif
  624. {
  625. addMethod (@selector (captureOutput:didFinishProcessingPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:),
  626. didFinishProcessingPhotoSampleBuffer,
  627. "v@:@@@@@@");
  628. }
  629. addIvar<StillPictureTaker*> ("owner");
  630. registerClass();
  631. }
  632. //==============================================================================
  633. static StillPictureTaker& getOwner (id self) { return *getIvar<StillPictureTaker*> (self, "owner"); }
  634. static void setOwner (id self, StillPictureTaker* t) { object_setInstanceVariable (self, "owner", t); }
  635. private:
  636. static void willBeginCaptureForSettings (id, SEL, AVCapturePhotoOutput*, AVCaptureResolvedPhotoSettings*)
  637. {
  638. JUCE_CAMERA_LOG ("willBeginCaptureForSettings()");
  639. }
  640. static void willCaptureForSettings (id, SEL, AVCapturePhotoOutput*, AVCaptureResolvedPhotoSettings*)
  641. {
  642. JUCE_CAMERA_LOG ("willCaptureForSettings()");
  643. }
  644. static void didCaptureForSettings (id, SEL, AVCapturePhotoOutput*, AVCaptureResolvedPhotoSettings*)
  645. {
  646. JUCE_CAMERA_LOG ("didCaptureForSettings()");
  647. }
  648. static void didFinishCaptureForSettings (id, SEL, AVCapturePhotoOutput*, AVCaptureResolvedPhotoSettings*, NSError* error)
  649. {
  650. String errorString = error != nil ? nsStringToJuce (error.localizedDescription) : String();
  651. ignoreUnused (errorString);
  652. JUCE_CAMERA_LOG ("didFinishCaptureForSettings(), error = " + errorString);
  653. }
  654. static void didFinishProcessingPhoto (id self, SEL, AVCapturePhotoOutput*, AVCapturePhoto* capturePhoto, NSError* error)
  655. {
  656. getOwner (self).takingPicture = false;
  657. String errorString = error != nil ? nsStringToJuce (error.localizedDescription) : String();
  658. ignoreUnused (errorString);
  659. JUCE_CAMERA_LOG ("didFinishProcessingPhoto(), error = " + errorString);
  660. if (error != nil)
  661. {
  662. JUCE_CAMERA_LOG ("Still picture capture failed, error: " + nsStringToJuce (error.localizedDescription));
  663. jassertfalse;
  664. return;
  665. }
  666. auto* imageOrientation = (NSNumber *) capturePhoto.metadata[(NSString*) kCGImagePropertyOrientation];
  667. auto* uiImage = getImageWithCorrectOrientation ((CGImagePropertyOrientation) imageOrientation.unsignedIntValue,
  668. [capturePhoto CGImageRepresentation]);
  669. auto* imageData = UIImageJPEGRepresentation (uiImage, 0.f);
  670. auto image = ImageFileFormat::loadFrom (imageData.bytes, (size_t) imageData.length);
  671. getOwner (self).callListeners (image);
  672. MessageManager::callAsync ([self, image]() { getOwner (self).notifyPictureTaken (image); });
  673. }
  674. static UIImage* getImageWithCorrectOrientation (CGImagePropertyOrientation imageOrientation,
  675. CGImageRef imageData)
  676. {
  677. auto origWidth = CGImageGetWidth (imageData);
  678. auto origHeight = CGImageGetHeight (imageData);
  679. auto targetSize = getTargetImageDimensionFor (imageOrientation, imageData);
  680. UIGraphicsBeginImageContext (targetSize);
  681. CGContextRef context = UIGraphicsGetCurrentContext();
  682. switch (imageOrientation)
  683. {
  684. case kCGImagePropertyOrientationUp:
  685. CGContextScaleCTM (context, 1.0, -1.0);
  686. CGContextTranslateCTM (context, 0.0, -targetSize.height);
  687. break;
  688. case kCGImagePropertyOrientationRight:
  689. CGContextRotateCTM (context, 90 * MathConstants<CGFloat>::pi / 180);
  690. CGContextScaleCTM (context, targetSize.height / origHeight, -targetSize.width / origWidth);
  691. break;
  692. case kCGImagePropertyOrientationDown:
  693. CGContextTranslateCTM (context, targetSize.width, 0.0);
  694. CGContextScaleCTM (context, -1.0, 1.0);
  695. break;
  696. case kCGImagePropertyOrientationLeft:
  697. CGContextRotateCTM (context, -90 * MathConstants<CGFloat>::pi / 180);
  698. CGContextScaleCTM (context, targetSize.height / origHeight, -targetSize.width / origWidth);
  699. CGContextTranslateCTM (context, -targetSize.width, -targetSize.height);
  700. break;
  701. case kCGImagePropertyOrientationUpMirrored:
  702. case kCGImagePropertyOrientationDownMirrored:
  703. case kCGImagePropertyOrientationLeftMirrored:
  704. case kCGImagePropertyOrientationRightMirrored:
  705. default:
  706. // Not implemented.
  707. jassertfalse;
  708. break;
  709. }
  710. CGContextDrawImage (context, CGRectMake (0, 0, targetSize.width, targetSize.height), imageData);
  711. UIImage* correctedImage = UIGraphicsGetImageFromCurrentImageContext();
  712. UIGraphicsEndImageContext();
  713. return correctedImage;
  714. }
  715. static CGSize getTargetImageDimensionFor (CGImagePropertyOrientation imageOrientation,
  716. CGImageRef imageData)
  717. {
  718. auto width = CGImageGetWidth (imageData);
  719. auto height = CGImageGetHeight (imageData);
  720. switch (imageOrientation)
  721. {
  722. case kCGImagePropertyOrientationUp:
  723. case kCGImagePropertyOrientationUpMirrored:
  724. case kCGImagePropertyOrientationDown:
  725. case kCGImagePropertyOrientationDownMirrored:
  726. return CGSizeMake ((CGFloat) width, (CGFloat) height);
  727. case kCGImagePropertyOrientationRight:
  728. case kCGImagePropertyOrientationRightMirrored:
  729. case kCGImagePropertyOrientationLeft:
  730. case kCGImagePropertyOrientationLeftMirrored:
  731. return CGSizeMake ((CGFloat) height, (CGFloat) width);
  732. }
  733. jassertfalse;
  734. return CGSizeMake ((CGFloat) width, (CGFloat) height);
  735. }
  736. static void didFinishProcessingPhotoSampleBuffer (id self, SEL, AVCapturePhotoOutput*,
  737. CMSampleBufferRef imageBuffer, CMSampleBufferRef imagePreviewBuffer,
  738. AVCaptureResolvedPhotoSettings*, AVCaptureBracketedStillImageSettings*,
  739. NSError* error)
  740. {
  741. getOwner (self).takingPicture = false;
  742. String errorString = error != nil ? nsStringToJuce (error.localizedDescription) : String();
  743. ignoreUnused (errorString);
  744. JUCE_CAMERA_LOG ("didFinishProcessingPhotoSampleBuffer(), error = " + errorString);
  745. if (error != nil)
  746. {
  747. JUCE_CAMERA_LOG ("Still picture capture failed, error: " + nsStringToJuce (error.localizedDescription));
  748. jassertfalse;
  749. return;
  750. }
  751. NSData* origImageData = [AVCapturePhotoOutput JPEGPhotoDataRepresentationForJPEGSampleBuffer: imageBuffer previewPhotoSampleBuffer: imagePreviewBuffer];
  752. auto origImage = [UIImage imageWithData: origImageData];
  753. auto imageOrientation = uiImageOrientationToCGImageOrientation (origImage.imageOrientation);
  754. auto* uiImage = getImageWithCorrectOrientation (imageOrientation, origImage.CGImage);
  755. auto* imageData = UIImageJPEGRepresentation (uiImage, 0.f);
  756. auto image = ImageFileFormat::loadFrom (imageData.bytes, (size_t) imageData.length);
  757. getOwner (self).callListeners (image);
  758. MessageManager::callAsync ([self, image]() { getOwner (self).notifyPictureTaken (image); });
  759. }
  760. static CGImagePropertyOrientation uiImageOrientationToCGImageOrientation (UIImageOrientation orientation)
  761. {
  762. switch (orientation)
  763. {
  764. case UIImageOrientationUp: return kCGImagePropertyOrientationUp;
  765. case UIImageOrientationDown: return kCGImagePropertyOrientationDown;
  766. case UIImageOrientationLeft: return kCGImagePropertyOrientationLeft;
  767. case UIImageOrientationRight: return kCGImagePropertyOrientationRight;
  768. case UIImageOrientationUpMirrored: return kCGImagePropertyOrientationUpMirrored;
  769. case UIImageOrientationDownMirrored: return kCGImagePropertyOrientationDownMirrored;
  770. case UIImageOrientationLeftMirrored: return kCGImagePropertyOrientationLeftMirrored;
  771. case UIImageOrientationRightMirrored: return kCGImagePropertyOrientationRightMirrored;
  772. }
  773. }
  774. };
  775. #endif
  776. //==============================================================================
  777. void callListeners (const Image& image)
  778. {
  779. captureSession.callListeners (image);
  780. }
  781. void notifyPictureTaken (const Image& image)
  782. {
  783. captureSession.notifyPictureTaken (image);
  784. }
  785. CaptureSession& captureSession;
  786. AVCaptureOutput* captureOutput;
  787. std::unique_ptr<NSObject, NSObjectDeleter> photoOutputDelegate;
  788. bool takingPicture = false;
  789. };
  790. //==============================================================================
  791. // NB: FileOutputRecordingDelegateClass callbacks can be called from any thread (incl.
  792. // the message thread), so waiting for an event when stopping recording is not an
  793. // option and VideoRecorder must be alive at all times in order to get stopped
  794. // recording callback.
  795. class VideoRecorder
  796. {
  797. public:
  798. VideoRecorder (CaptureSession& session)
  799. : movieFileOutput ([AVCaptureMovieFileOutput new]),
  800. delegate (nullptr)
  801. {
  802. static FileOutputRecordingDelegateClass cls;
  803. delegate.reset ([cls.createInstance() init]);
  804. FileOutputRecordingDelegateClass::setOwner (delegate.get(), this);
  805. session.addOutputIfPossible (movieFileOutput);
  806. }
  807. ~VideoRecorder()
  808. {
  809. stopRecording();
  810. // Shutting down a device while recording will stop the recording
  811. // abruptly and the recording will be lost.
  812. jassert (! recordingInProgress);
  813. }
  814. void startRecording (const File& file, AVCaptureVideoOrientation orientationToUse)
  815. {
  816. #if JUCE_USE_NEW_CAMERA_API
  817. if (@available (iOS 10.0, *))
  818. printVideoOutputDebugInfo (movieFileOutput);
  819. #endif
  820. auto url = [NSURL fileURLWithPath: juceStringToNS (file.getFullPathName())
  821. isDirectory: NO];
  822. auto outputConnection = [movieFileOutput connectionWithMediaType: AVMediaTypeVideo];
  823. outputConnection.videoOrientation = orientationToUse;
  824. [movieFileOutput startRecordingToOutputFileURL: url recordingDelegate: delegate.get()];
  825. }
  826. void stopRecording()
  827. {
  828. [movieFileOutput stopRecording];
  829. }
  830. Time getTimeOfFirstRecordedFrame() const
  831. {
  832. return Time (firstRecordedFrameTimeMs.get());
  833. }
  834. private:
  835. static void printVideoOutputDebugInfo (AVCaptureMovieFileOutput* output)
  836. {
  837. ignoreUnused (output);
  838. JUCE_CAMERA_LOG ("Available video codec types:");
  839. #if JUCE_CAMERA_LOG_ENABLED
  840. for (id type in output.availableVideoCodecTypes)
  841. JUCE_CAMERA_LOG (nsStringToJuce (type));
  842. #endif
  843. JUCE_CAMERA_LOG ("Output settings per video connection:");
  844. #if JUCE_CAMERA_LOG_ENABLED
  845. for (AVCaptureConnection* connection in output.connections)
  846. JUCE_CAMERA_LOG (nsStringToJuce ([[output outputSettingsForConnection: connection] description]));
  847. #endif
  848. }
  849. //==============================================================================
  850. struct FileOutputRecordingDelegateClass : public ObjCClass<NSObject<AVCaptureFileOutputRecordingDelegate>>
  851. {
  852. FileOutputRecordingDelegateClass() : ObjCClass<NSObject<AVCaptureFileOutputRecordingDelegate>> ("FileOutputRecordingDelegateClass_")
  853. {
  854. addMethod (@selector (captureOutput:didStartRecordingToOutputFileAtURL:fromConnections:), started, "v@:@@@");
  855. addMethod (@selector (captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error:), stopped, "v@:@@@@");
  856. addIvar<VideoRecorder*> ("owner");
  857. registerClass();
  858. }
  859. //==============================================================================
  860. static VideoRecorder& getOwner (id self) { return *getIvar<VideoRecorder*> (self, "owner"); }
  861. static void setOwner (id self, VideoRecorder* r) { object_setInstanceVariable (self, "owner", r); }
  862. private:
  863. static void started (id self, SEL, AVCaptureFileOutput*, NSURL*, NSArray<AVCaptureConnection*>*)
  864. {
  865. JUCE_CAMERA_LOG ("Started recording");
  866. getOwner (self).firstRecordedFrameTimeMs.set (Time::getCurrentTime().toMilliseconds());
  867. getOwner (self).recordingInProgress = true;
  868. }
  869. static void stopped (id self, SEL, AVCaptureFileOutput*, NSURL*, NSArray<AVCaptureConnection*>*, NSError* error)
  870. {
  871. String errorString;
  872. bool recordingPlayable = true;
  873. // There might have been an error in the recording, yet there may be a playable file...
  874. if ([error code] != noErr)
  875. {
  876. id value = [[error userInfo] objectForKey: AVErrorRecordingSuccessfullyFinishedKey];
  877. if (value != nil && ! [value boolValue])
  878. recordingPlayable = false;
  879. errorString = nsStringToJuce (error.localizedDescription) + ", playable: " + String ((int) recordingPlayable);
  880. }
  881. JUCE_CAMERA_LOG ("Stopped recording, error = " + errorString);
  882. getOwner (self).recordingInProgress = false;
  883. }
  884. };
  885. AVCaptureMovieFileOutput* movieFileOutput;
  886. std::unique_ptr<NSObject<AVCaptureFileOutputRecordingDelegate>, NSObjectDeleter> delegate;
  887. bool recordingInProgress = false;
  888. Atomic<int64> firstRecordedFrameTimeMs { 0 };
  889. };
  890. //==============================================================================
  891. void addOutputIfPossible (AVCaptureOutput* output)
  892. {
  893. dispatch_async (captureSessionQueue,^
  894. {
  895. if ([captureSession.get() canAddOutput: output])
  896. {
  897. [captureSession.get() beginConfiguration];
  898. [captureSession.get() addOutput: output];
  899. [captureSession.get() commitConfiguration];
  900. return;
  901. }
  902. // Can't add output to camera session!
  903. jassertfalse;
  904. });
  905. }
  906. //==============================================================================
  907. void cameraSessionStarted()
  908. {
  909. sessionStarted = true;
  910. owner.cameraSessionStarted();
  911. }
  912. void cameraSessionRuntimeError (const String& error)
  913. {
  914. owner.cameraSessionRuntimeError (error);
  915. }
  916. void callListeners (const Image& image)
  917. {
  918. owner.callListeners (image);
  919. }
  920. void notifyPictureTaken (const Image& image)
  921. {
  922. owner.notifyPictureTaken (image);
  923. }
  924. Pimpl& owner;
  925. dispatch_queue_t captureSessionQueue;
  926. std::unique_ptr<AVCaptureSession, NSObjectDeleter> captureSession;
  927. std::unique_ptr<NSObject, NSObjectDeleter> delegate;
  928. StillPictureTaker stillPictureTaker;
  929. VideoRecorder videoRecorder;
  930. AVCaptureDevice* cameraDevice = nil;
  931. AVCaptureVideoPreviewLayer* previewLayer = nil;
  932. bool sessionStarted = false;
  933. WaitableEvent sessionClosedEvent;
  934. static int numCaptureSessions;
  935. };
  936. //==============================================================================
  937. void cameraSessionStarted()
  938. {
  939. JUCE_CAMERA_LOG ("cameraSessionStarted()");
  940. cameraOpenCallback (cameraId, {});
  941. }
  942. void cameraSessionRuntimeError (const String& error)
  943. {
  944. JUCE_CAMERA_LOG ("cameraSessionRuntimeError(), error = " + error);
  945. if (! notifiedOfCameraOpening)
  946. {
  947. cameraOpenCallback ({}, error);
  948. }
  949. else
  950. {
  951. if (owner.onErrorOccurred != nullptr)
  952. owner.onErrorOccurred (error);
  953. }
  954. }
  955. void callListeners (const Image& image)
  956. {
  957. const ScopedLock sl (listenerLock);
  958. listeners.call ([=] (Listener& l) { l.imageReceived (image); });
  959. if (listeners.size() == 1)
  960. triggerStillPictureCapture();
  961. }
  962. void notifyPictureTaken (const Image& image)
  963. {
  964. JUCE_CAMERA_LOG ("notifyPictureTaken()");
  965. if (pictureTakenCallback != nullptr)
  966. pictureTakenCallback (image);
  967. }
  968. //==============================================================================
  969. void triggerStillPictureCapture()
  970. {
  971. captureSession.takeStillPicture();
  972. }
  973. //==============================================================================
  974. CameraDevice& owner;
  975. String cameraId;
  976. InternalOpenCameraResultCallback cameraOpenCallback;
  977. CriticalSection listenerLock;
  978. ListenerList<Listener> listeners;
  979. std::function<void (const Image&)> pictureTakenCallback;
  980. CaptureSession captureSession;
  981. bool notifiedOfCameraOpening = false;
  982. friend struct CameraDevice::ViewerComponent;
  983. //==============================================================================
  984. JUCE_DECLARE_NON_COPYABLE (Pimpl)
  985. };
  986. int CameraDevice::Pimpl::CaptureSession::numCaptureSessions = 0;
  987. //==============================================================================
  988. struct CameraDevice::ViewerComponent : public UIViewComponent
  989. {
  990. //==============================================================================
  991. struct JuceCameraDeviceViewerClass : public ObjCClass<UIView>
  992. {
  993. JuceCameraDeviceViewerClass() : ObjCClass<UIView> ("JuceCameraDeviceViewerClass_")
  994. {
  995. addMethod (@selector (layoutSubviews), layoutSubviews, "v@:");
  996. registerClass();
  997. }
  998. private:
  999. static void layoutSubviews (id self, SEL)
  1000. {
  1001. sendSuperclassMessage<void> (self, @selector (layoutSubviews));
  1002. UIView* asUIView = (UIView*) self;
  1003. updateOrientation (self);
  1004. if (auto* previewLayer = getPreviewLayer (self))
  1005. previewLayer.frame = asUIView.bounds;
  1006. }
  1007. static AVCaptureVideoPreviewLayer* getPreviewLayer (id self)
  1008. {
  1009. UIView* asUIView = (UIView*) self;
  1010. if (asUIView.layer.sublayers != nil && [asUIView.layer.sublayers count] > 0)
  1011. if ([asUIView.layer.sublayers[0] isKindOfClass: [AVCaptureVideoPreviewLayer class]])
  1012. return (AVCaptureVideoPreviewLayer*) asUIView.layer.sublayers[0];
  1013. return nil;
  1014. }
  1015. static void updateOrientation (id self)
  1016. {
  1017. if (auto* previewLayer = getPreviewLayer (self))
  1018. {
  1019. UIDeviceOrientation o = [UIDevice currentDevice].orientation;
  1020. if (UIDeviceOrientationIsPortrait (o) || UIDeviceOrientationIsLandscape (o))
  1021. {
  1022. if (previewLayer.connection != nil)
  1023. previewLayer.connection.videoOrientation = (AVCaptureVideoOrientation) o;
  1024. }
  1025. }
  1026. }
  1027. };
  1028. ViewerComponent (CameraDevice& device)
  1029. {
  1030. static JuceCameraDeviceViewerClass cls;
  1031. // Initial size that can be overriden later.
  1032. setSize (640, 480);
  1033. auto view = [cls.createInstance() init];
  1034. setView (view);
  1035. auto* previewLayer = device.pimpl->captureSession.createPreviewLayer();
  1036. previewLayer.frame = view.bounds;
  1037. UIInterfaceOrientation statusBarOrientation = [UIApplication sharedApplication].statusBarOrientation;
  1038. AVCaptureVideoOrientation videoOrientation = statusBarOrientation != UIInterfaceOrientationUnknown
  1039. ? (AVCaptureVideoOrientation) statusBarOrientation
  1040. : AVCaptureVideoOrientationPortrait;
  1041. previewLayer.connection.videoOrientation = videoOrientation;
  1042. [view.layer addSublayer: previewLayer];
  1043. }
  1044. };
  1045. //==============================================================================
  1046. String CameraDevice::getFileExtension()
  1047. {
  1048. return ".mov";
  1049. }
  1050. #if JUCE_USE_NEW_CAMERA_API
  1051. JUCE_END_IGNORE_WARNINGS_GCC_LIKE
  1052. #endif