The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1345 lines
59KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2020 - Raw Material Software Limited
  5. JUCE is an open source library subject to commercial or open-source
  6. licensing.
  7. By using JUCE, you agree to the terms of both the JUCE 6 End-User License
  8. Agreement and JUCE Privacy Policy (both effective as of the 16th June 2020).
  9. End User License Agreement: www.juce.com/juce-6-licence
  10. Privacy Policy: www.juce.com/juce-privacy-policy
  11. Or: You may also use this code under the terms of the GPL v3 (see
  12. www.gnu.org/licenses).
  13. JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
  14. EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
  15. DISCLAIMED.
  16. ==============================================================================
  17. */
  18. #if (defined (__IPHONE_10_0) && __IPHONE_OS_VERSION_MIN_REQUIRED >= __IPHONE_10_0)
  19. JUCE_BEGIN_IGNORE_WARNINGS_GCC_LIKE ("-Wdeprecated-declarations")
  20. #define JUCE_DEPRECATION_IGNORED 1
  21. #endif
  22. struct CameraDevice::Pimpl
  23. {
  24. using InternalOpenCameraResultCallback = std::function<void (const String& /*cameraId*/, const String& /*error*/)>;
  25. Pimpl (CameraDevice& ownerToUse, const String& cameraIdToUse, int /*index*/,
  26. int /*minWidth*/, int /*minHeight*/, int /*maxWidth*/, int /*maxHeight*/,
  27. bool useHighQuality)
  28. : owner (ownerToUse),
  29. cameraId (cameraIdToUse),
  30. captureSession (*this, useHighQuality)
  31. {
  32. }
  33. String getCameraId() const noexcept { return cameraId; }
  34. void open (InternalOpenCameraResultCallback cameraOpenCallbackToUse)
  35. {
  36. cameraOpenCallback = std::move (cameraOpenCallbackToUse);
  37. if (cameraOpenCallback == nullptr)
  38. {
  39. // A valid camera open callback must be passed.
  40. jassertfalse;
  41. return;
  42. }
  43. [AVCaptureDevice requestAccessForMediaType: AVMediaTypeVideo
  44. completionHandler: ^(BOOL granted)
  45. {
  46. // Access to video is required for camera to work,
  47. // black images will be produced otherwise!
  48. jassert (granted);
  49. ignoreUnused (granted);
  50. }];
  51. [AVCaptureDevice requestAccessForMediaType: AVMediaTypeAudio
  52. completionHandler: ^(BOOL granted)
  53. {
  54. // Access to audio is required for camera to work,
  55. // silence will be produced otherwise!
  56. jassert (granted);
  57. ignoreUnused (granted);
  58. }];
  59. captureSession.startSessionForDeviceWithId (cameraId);
  60. }
  61. bool openedOk() const noexcept { return captureSession.openedOk(); }
  62. void takeStillPicture (std::function<void (const Image&)> pictureTakenCallbackToUse)
  63. {
  64. if (pictureTakenCallbackToUse == nullptr)
  65. {
  66. jassertfalse;
  67. return;
  68. }
  69. pictureTakenCallback = std::move (pictureTakenCallbackToUse);
  70. triggerStillPictureCapture();
  71. }
  72. void startRecordingToFile (const File& file, int /*quality*/)
  73. {
  74. file.deleteFile();
  75. captureSession.startRecording (file);
  76. }
  77. void stopRecording()
  78. {
  79. captureSession.stopRecording();
  80. }
  81. Time getTimeOfFirstRecordedFrame() const
  82. {
  83. return captureSession.getTimeOfFirstRecordedFrame();
  84. }
  85. static StringArray getAvailableDevices()
  86. {
  87. StringArray results;
  88. JUCE_CAMERA_LOG ("Available camera devices: ");
  89. for (AVCaptureDevice* device in getDevices())
  90. {
  91. JUCE_CAMERA_LOG ("Device start----------------------------------");
  92. printDebugCameraInfo (device);
  93. JUCE_CAMERA_LOG ("Device end----------------------------------");
  94. results.add (nsStringToJuce (device.uniqueID));
  95. }
  96. return results;
  97. }
  98. void addListener (CameraDevice::Listener* listenerToAdd)
  99. {
  100. const ScopedLock sl (listenerLock);
  101. listeners.add (listenerToAdd);
  102. if (listeners.size() == 1)
  103. triggerStillPictureCapture();
  104. }
  105. void removeListener (CameraDevice::Listener* listenerToRemove)
  106. {
  107. const ScopedLock sl (listenerLock);
  108. listeners.remove (listenerToRemove);
  109. }
  110. private:
  111. static NSArray<AVCaptureDevice*>* getDevices()
  112. {
  113. #if defined (__IPHONE_10_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_10_0
  114. if (iosVersion.major >= 10)
  115. {
  116. std::unique_ptr<NSMutableArray<AVCaptureDeviceType>, NSObjectDeleter> deviceTypes ([[NSMutableArray alloc] initWithCapacity: 2]);
  117. [deviceTypes.get() addObject: AVCaptureDeviceTypeBuiltInWideAngleCamera];
  118. [deviceTypes.get() addObject: AVCaptureDeviceTypeBuiltInTelephotoCamera];
  119. if ((iosVersion.major == 10 && iosVersion.minor >= 2) || iosVersion.major >= 11)
  120. [deviceTypes.get() addObject: AVCaptureDeviceTypeBuiltInDualCamera];
  121. if ((iosVersion.major == 11 && iosVersion.minor >= 1) || iosVersion.major >= 12)
  122. [deviceTypes.get() addObject: AVCaptureDeviceTypeBuiltInTrueDepthCamera];
  123. auto discoverySession = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes: deviceTypes.get()
  124. mediaType: AVMediaTypeVideo
  125. position: AVCaptureDevicePositionUnspecified];
  126. return [discoverySession devices];
  127. }
  128. #endif
  129. return [AVCaptureDevice devicesWithMediaType: AVMediaTypeVideo];
  130. }
  131. //==============================================================================
  132. static void printDebugCameraInfo (AVCaptureDevice* device)
  133. {
  134. auto position = device.position;
  135. String positionString = position == AVCaptureDevicePositionBack
  136. ? "Back"
  137. : position == AVCaptureDevicePositionFront
  138. ? "Front"
  139. : "Unspecified";
  140. JUCE_CAMERA_LOG ("Position: " + positionString);
  141. JUCE_CAMERA_LOG ("Model ID: " + nsStringToJuce (device.modelID));
  142. JUCE_CAMERA_LOG ("Localized name: " + nsStringToJuce (device.localizedName));
  143. JUCE_CAMERA_LOG ("Unique ID: " + nsStringToJuce (device.uniqueID));
  144. JUCE_CAMERA_LOG ("Lens aperture: " + String (device.lensAperture));
  145. JUCE_CAMERA_LOG ("Has flash: " + String ((int)device.hasFlash));
  146. JUCE_CAMERA_LOG ("Supports flash always on: " + String ((int)[device isFlashModeSupported: AVCaptureFlashModeOn]));
  147. JUCE_CAMERA_LOG ("Supports auto flash: " + String ((int)[device isFlashModeSupported: AVCaptureFlashModeAuto]));
  148. JUCE_CAMERA_LOG ("Has torch: " + String ((int)device.hasTorch));
  149. JUCE_CAMERA_LOG ("Supports torch always on: " + String ((int)[device isTorchModeSupported: AVCaptureTorchModeOn]));
  150. JUCE_CAMERA_LOG ("Supports auto torch: " + String ((int)[device isTorchModeSupported: AVCaptureTorchModeAuto]));
  151. JUCE_CAMERA_LOG ("Low light boost supported: " + String ((int)device.lowLightBoostEnabled));
  152. JUCE_CAMERA_LOG ("Supports auto white balance: " + String ((int)[device isWhiteBalanceModeSupported: AVCaptureWhiteBalanceModeAutoWhiteBalance]));
  153. JUCE_CAMERA_LOG ("Supports continuous auto white balance: " + String ((int)[device isWhiteBalanceModeSupported: AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]));
  154. JUCE_CAMERA_LOG ("Supports auto focus: " + String ((int)[device isFocusModeSupported: AVCaptureFocusModeAutoFocus]));
  155. JUCE_CAMERA_LOG ("Supports continuous auto focus: " + String ((int)[device isFocusModeSupported: AVCaptureFocusModeContinuousAutoFocus]));
  156. JUCE_CAMERA_LOG ("Supports point of interest focus: " + String ((int)device.focusPointOfInterestSupported));
  157. JUCE_CAMERA_LOG ("Smooth auto focus supported: " + String ((int)device.smoothAutoFocusSupported));
  158. JUCE_CAMERA_LOG ("Auto focus range restriction supported: " + String ((int)device.autoFocusRangeRestrictionSupported));
  159. JUCE_CAMERA_LOG ("Supports auto exposure: " + String ((int)[device isExposureModeSupported: AVCaptureExposureModeAutoExpose]));
  160. JUCE_CAMERA_LOG ("Supports continuous auto exposure: " + String ((int)[device isExposureModeSupported: AVCaptureExposureModeContinuousAutoExposure]));
  161. JUCE_CAMERA_LOG ("Supports custom exposure: " + String ((int)[device isExposureModeSupported: AVCaptureExposureModeCustom]));
  162. JUCE_CAMERA_LOG ("Supports point of interest exposure: " + String ((int)device.exposurePointOfInterestSupported));
  163. #if defined (__IPHONE_10_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_10_0
  164. if (iosVersion.major >= 10)
  165. {
  166. JUCE_CAMERA_LOG ("Device type: " + nsStringToJuce (device.deviceType));
  167. JUCE_CAMERA_LOG ("Locking focus with custom lens position supported: " + String ((int)device.lockingFocusWithCustomLensPositionSupported));
  168. }
  169. #endif
  170. #if defined (__IPHONE_11_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0
  171. if (iosVersion.major >= 11)
  172. {
  173. JUCE_CAMERA_LOG ("Min available video zoom factor: " + String (device.minAvailableVideoZoomFactor));
  174. JUCE_CAMERA_LOG ("Max available video zoom factor: " + String (device.maxAvailableVideoZoomFactor));
  175. JUCE_CAMERA_LOG ("Dual camera switch over video zoom factor: " + String (device.dualCameraSwitchOverVideoZoomFactor));
  176. }
  177. #endif
  178. JUCE_CAMERA_LOG ("Capture formats start-------------------");
  179. for (AVCaptureDeviceFormat* format in device.formats)
  180. {
  181. JUCE_CAMERA_LOG ("Capture format start------");
  182. printDebugCameraFormatInfo (format);
  183. JUCE_CAMERA_LOG ("Capture format end------");
  184. }
  185. JUCE_CAMERA_LOG ("Capture formats end-------------------");
  186. }
  187. static void printDebugCameraFormatInfo (AVCaptureDeviceFormat* format)
  188. {
  189. JUCE_CAMERA_LOG ("Media type: " + nsStringToJuce (format.mediaType));
  190. String colourSpaces;
  191. for (NSNumber* number in format.supportedColorSpaces)
  192. {
  193. switch ([number intValue])
  194. {
  195. case AVCaptureColorSpace_sRGB: colourSpaces << "sRGB "; break;
  196. case AVCaptureColorSpace_P3_D65: colourSpaces << "P3_D65 "; break;
  197. default: break;
  198. }
  199. }
  200. JUCE_CAMERA_LOG ("Supported colour spaces: " + colourSpaces);
  201. JUCE_CAMERA_LOG ("Video field of view: " + String (format.videoFieldOfView));
  202. JUCE_CAMERA_LOG ("Video max zoom factor: " + String (format.videoMaxZoomFactor));
  203. JUCE_CAMERA_LOG ("Video zoom factor upscale threshold: " + String (format.videoZoomFactorUpscaleThreshold));
  204. String videoFrameRateRangesString = "Video supported frame rate ranges: ";
  205. for (AVFrameRateRange* range in format.videoSupportedFrameRateRanges)
  206. videoFrameRateRangesString << frameRateRangeToString (range);
  207. JUCE_CAMERA_LOG (videoFrameRateRangesString);
  208. JUCE_CAMERA_LOG ("Video binned: " + String (int (format.videoBinned)));
  209. JUCE_CAMERA_LOG ("Video HDR supported: " + String (int (format.videoHDRSupported)));
  210. JUCE_CAMERA_LOG ("High resolution still image dimensions: " + getHighResStillImgDimensionsString (format.highResolutionStillImageDimensions));
  211. JUCE_CAMERA_LOG ("Min ISO: " + String (format.minISO));
  212. JUCE_CAMERA_LOG ("Max ISO: " + String (format.maxISO));
  213. JUCE_CAMERA_LOG ("Min exposure duration: " + cmTimeToString (format.minExposureDuration));
  214. String autoFocusSystemString;
  215. switch (format.autoFocusSystem)
  216. {
  217. case AVCaptureAutoFocusSystemPhaseDetection: autoFocusSystemString = "PhaseDetection"; break;
  218. case AVCaptureAutoFocusSystemContrastDetection: autoFocusSystemString = "ContrastDetection"; break;
  219. case AVCaptureAutoFocusSystemNone:
  220. default: autoFocusSystemString = "None";
  221. }
  222. JUCE_CAMERA_LOG ("Auto focus system: " + autoFocusSystemString);
  223. JUCE_CAMERA_LOG ("Standard (iOS 5.0) video stabilization supported: " + String ((int) [format isVideoStabilizationModeSupported: AVCaptureVideoStabilizationModeStandard]));
  224. JUCE_CAMERA_LOG ("Cinematic video stabilization supported: " + String ((int) [format isVideoStabilizationModeSupported: AVCaptureVideoStabilizationModeCinematic]));
  225. JUCE_CAMERA_LOG ("Auto video stabilization supported: " + String ((int) [format isVideoStabilizationModeSupported: AVCaptureVideoStabilizationModeAuto]));
  226. #if defined (__IPHONE_11_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0
  227. if (iosVersion.major >= 11)
  228. {
  229. JUCE_CAMERA_LOG ("Min zoom factor for depth data delivery: " + String (format.videoMinZoomFactorForDepthDataDelivery));
  230. JUCE_CAMERA_LOG ("Max zoom factor for depth data delivery: " + String (format.videoMaxZoomFactorForDepthDataDelivery));
  231. }
  232. #endif
  233. }
  234. static String getHighResStillImgDimensionsString (CMVideoDimensions d)
  235. {
  236. return "[" + String (d.width) + " " + String (d.height) + "]";
  237. }
  238. static String cmTimeToString (CMTime time)
  239. {
  240. CFStringRef timeDesc = CMTimeCopyDescription (nullptr, time);
  241. String result = String::fromCFString (timeDesc);
  242. CFRelease (timeDesc);
  243. return result;
  244. }
  245. static String frameRateRangeToString (AVFrameRateRange* range)
  246. {
  247. String result;
  248. result << "[minFrameDuration: " + cmTimeToString (range.minFrameDuration);
  249. result << " maxFrameDuration: " + cmTimeToString (range.maxFrameDuration);
  250. result << " minFrameRate: " + String (range.minFrameRate);
  251. result << " maxFrameRate: " + String (range.maxFrameRate) << "] ";
  252. return result;
  253. }
  254. //==============================================================================
  255. class CaptureSession
  256. {
  257. public:
  258. CaptureSession (Pimpl& ownerToUse, bool useHighQuality)
  259. : owner (ownerToUse),
  260. captureSessionQueue (dispatch_queue_create ("JuceCameraDeviceBackgroundDispatchQueue", DISPATCH_QUEUE_SERIAL)),
  261. captureSession ([[AVCaptureSession alloc] init]),
  262. delegate (nullptr),
  263. stillPictureTaker (*this),
  264. videoRecorder (*this)
  265. {
  266. static SessionDelegateClass cls;
  267. delegate.reset ([cls.createInstance() init]);
  268. SessionDelegateClass::setOwner (delegate.get(), this);
  269. JUCE_BEGIN_IGNORE_WARNINGS_GCC_LIKE ("-Wundeclared-selector")
  270. [[NSNotificationCenter defaultCenter] addObserver: delegate.get()
  271. selector: @selector (sessionDidStartRunning:)
  272. name: AVCaptureSessionDidStartRunningNotification
  273. object: captureSession.get()];
  274. [[NSNotificationCenter defaultCenter] addObserver: delegate.get()
  275. selector: @selector (sessionDidStopRunning:)
  276. name: AVCaptureSessionDidStopRunningNotification
  277. object: captureSession.get()];
  278. [[NSNotificationCenter defaultCenter] addObserver: delegate.get()
  279. selector: @selector (sessionRuntimeError:)
  280. name: AVCaptureSessionRuntimeErrorNotification
  281. object: captureSession.get()];
  282. [[NSNotificationCenter defaultCenter] addObserver: delegate.get()
  283. selector: @selector (sessionWasInterrupted:)
  284. name: AVCaptureSessionWasInterruptedNotification
  285. object: captureSession.get()];
  286. [[NSNotificationCenter defaultCenter] addObserver: delegate.get()
  287. selector: @selector (sessionInterruptionEnded:)
  288. name: AVCaptureSessionInterruptionEndedNotification
  289. object: captureSession.get()];
  290. JUCE_END_IGNORE_WARNINGS_GCC_LIKE
  291. dispatch_async (captureSessionQueue,^
  292. {
  293. [captureSession.get() setSessionPreset: useHighQuality ? AVCaptureSessionPresetHigh
  294. : AVCaptureSessionPresetMedium];
  295. });
  296. ++numCaptureSessions;
  297. }
  298. ~CaptureSession()
  299. {
  300. [[NSNotificationCenter defaultCenter] removeObserver: delegate.get()];
  301. stopRecording();
  302. if (--numCaptureSessions == 0)
  303. {
  304. dispatch_async (captureSessionQueue, ^
  305. {
  306. if (captureSession.get().running)
  307. [captureSession.get() stopRunning];
  308. sessionClosedEvent.signal();
  309. });
  310. sessionClosedEvent.wait (-1);
  311. }
  312. }
  313. bool openedOk() const noexcept { return sessionStarted; }
  314. void startSessionForDeviceWithId (const String& cameraIdToUse)
  315. {
  316. dispatch_async (captureSessionQueue,^
  317. {
  318. cameraDevice = [AVCaptureDevice deviceWithUniqueID: juceStringToNS (cameraIdToUse)];
  319. auto audioDevice = [AVCaptureDevice defaultDeviceWithMediaType: AVMediaTypeAudio];
  320. [captureSession.get() beginConfiguration];
  321. // This will add just video...
  322. auto error = addInputToDevice (cameraDevice);
  323. if (error.isNotEmpty())
  324. {
  325. WeakReference<CaptureSession> weakRef (this);
  326. MessageManager::callAsync ([weakRef, error]() mutable
  327. {
  328. if (weakRef != nullptr)
  329. weakRef->owner.cameraOpenCallback ({}, error);
  330. });
  331. return;
  332. }
  333. // ... so add audio explicitly here
  334. error = addInputToDevice (audioDevice);
  335. if (error.isNotEmpty())
  336. {
  337. WeakReference<CaptureSession> weakRef (this);
  338. MessageManager::callAsync ([weakRef, error]() mutable
  339. {
  340. if (weakRef != nullptr)
  341. weakRef->owner.cameraOpenCallback ({}, error);
  342. });
  343. return;
  344. }
  345. [captureSession.get() commitConfiguration];
  346. if (! captureSession.get().running)
  347. [captureSession.get() startRunning];
  348. });
  349. }
  350. AVCaptureVideoPreviewLayer* createPreviewLayer()
  351. {
  352. if (! openedOk())
  353. {
  354. // A session must be started first!
  355. jassertfalse;
  356. return nullptr;
  357. }
  358. previewLayer = [AVCaptureVideoPreviewLayer layerWithSession: captureSession.get()];
  359. return previewLayer;
  360. }
  361. void takeStillPicture()
  362. {
  363. if (! openedOk())
  364. {
  365. // A session must be started first!
  366. jassert (openedOk());
  367. return;
  368. }
  369. stillPictureTaker.takePicture (previewLayer.connection.videoOrientation);
  370. }
  371. void startRecording (const File& file)
  372. {
  373. if (! openedOk())
  374. {
  375. // A session must be started first!
  376. jassertfalse;
  377. return;
  378. }
  379. if (file.existsAsFile())
  380. {
  381. // File overwriting is not supported by iOS video recorder, the target
  382. // file must not exist.
  383. jassertfalse;
  384. return;
  385. }
  386. videoRecorder.startRecording (file, previewLayer.connection.videoOrientation);
  387. }
  388. void stopRecording()
  389. {
  390. videoRecorder.stopRecording();
  391. }
  392. Time getTimeOfFirstRecordedFrame() const
  393. {
  394. return videoRecorder.getTimeOfFirstRecordedFrame();
  395. }
  396. JUCE_DECLARE_WEAK_REFERENCEABLE (CaptureSession)
  397. private:
  398. String addInputToDevice (AVCaptureDevice* device)
  399. {
  400. NSError* error = nil;
  401. auto input = [AVCaptureDeviceInput deviceInputWithDevice: device
  402. error: &error];
  403. if (error != nil)
  404. return nsStringToJuce (error.localizedDescription);
  405. if (! [captureSession.get() canAddInput: input])
  406. return "Could not add input to camera session.";
  407. [captureSession.get() addInput: input];
  408. return {};
  409. }
  410. //==============================================================================
  411. struct SessionDelegateClass : public ObjCClass<NSObject>
  412. {
  413. SessionDelegateClass() : ObjCClass<NSObject> ("SessionDelegateClass_")
  414. {
  415. JUCE_BEGIN_IGNORE_WARNINGS_GCC_LIKE ("-Wundeclared-selector")
  416. addMethod (@selector (sessionDidStartRunning:), started, "v@:@");
  417. addMethod (@selector (sessionDidStopRunning:), stopped, "v@:@");
  418. addMethod (@selector (sessionRuntimeError:), runtimeError, "v@:@");
  419. addMethod (@selector (sessionWasInterrupted:), interrupted, "v@:@");
  420. addMethod (@selector (sessionInterruptionEnded:), interruptionEnded, "v@:@");
  421. JUCE_END_IGNORE_WARNINGS_GCC_LIKE
  422. addIvar<CaptureSession*> ("owner");
  423. registerClass();
  424. }
  425. //==============================================================================
  426. static CaptureSession& getOwner (id self) { return *getIvar<CaptureSession*> (self, "owner"); }
  427. static void setOwner (id self, CaptureSession* s) { object_setInstanceVariable (self, "owner", s); }
  428. private:
  429. //==============================================================================
  430. static void started (id self, SEL, NSNotification* notification)
  431. {
  432. JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
  433. ignoreUnused (notification);
  434. dispatch_async (dispatch_get_main_queue(),
  435. ^{
  436. getOwner (self).cameraSessionStarted();
  437. });
  438. }
  439. static void stopped (id, SEL, NSNotification* notification)
  440. {
  441. JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
  442. ignoreUnused (notification);
  443. }
  444. static void runtimeError (id self, SEL, NSNotification* notification)
  445. {
  446. JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
  447. dispatch_async (dispatch_get_main_queue(),
  448. ^{
  449. NSError* error = notification.userInfo[AVCaptureSessionErrorKey];
  450. auto errorString = error != nil ? nsStringToJuce (error.localizedDescription) : String();
  451. getOwner (self).cameraSessionRuntimeError (errorString);
  452. });
  453. }
  454. static void interrupted (id, SEL, NSNotification* notification)
  455. {
  456. JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
  457. ignoreUnused (notification);
  458. }
  459. static void interruptionEnded (id, SEL, NSNotification* notification)
  460. {
  461. JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
  462. ignoreUnused (notification);
  463. }
  464. };
  465. //==============================================================================
  466. class StillPictureTaker
  467. {
  468. public:
  469. StillPictureTaker (CaptureSession& cs)
  470. : captureSession (cs),
  471. captureOutput (createCaptureOutput()),
  472. photoOutputDelegate (nullptr)
  473. {
  474. if (Pimpl::getIOSVersion().major >= 10)
  475. {
  476. static PhotoOutputDelegateClass cls;
  477. photoOutputDelegate.reset ([cls.createInstance() init]);
  478. PhotoOutputDelegateClass::setOwner (photoOutputDelegate.get(), this);
  479. }
  480. captureSession.addOutputIfPossible (captureOutput);
  481. }
  482. void takePicture (AVCaptureVideoOrientation orientationToUse)
  483. {
  484. if (takingPicture)
  485. {
  486. // Picture taking already in progress!
  487. jassertfalse;
  488. return;
  489. }
  490. takingPicture = true;
  491. printImageOutputDebugInfo (captureOutput);
  492. if (auto* connection = findVideoConnection (captureOutput))
  493. {
  494. #if defined (__IPHONE_10_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_10_0
  495. if (Pimpl::getIOSVersion().major >= 10 && [captureOutput isKindOfClass: [AVCapturePhotoOutput class]])
  496. {
  497. auto* photoOutput = (AVCapturePhotoOutput*) captureOutput;
  498. auto outputConnection = [photoOutput connectionWithMediaType: AVMediaTypeVideo];
  499. outputConnection.videoOrientation = orientationToUse;
  500. [photoOutput capturePhotoWithSettings: [AVCapturePhotoSettings photoSettings]
  501. delegate: id<AVCapturePhotoCaptureDelegate> (photoOutputDelegate.get())];
  502. return;
  503. }
  504. #endif
  505. auto* stillImageOutput = (AVCaptureStillImageOutput*) captureOutput;
  506. auto outputConnection = [stillImageOutput connectionWithMediaType: AVMediaTypeVideo];
  507. outputConnection.videoOrientation = orientationToUse;
  508. [stillImageOutput captureStillImageAsynchronouslyFromConnection: connection completionHandler:
  509. ^(CMSampleBufferRef imageSampleBuffer, NSError* error)
  510. {
  511. takingPicture = false;
  512. if (error != nil)
  513. {
  514. JUCE_CAMERA_LOG ("Still picture capture failed, error: " + nsStringToJuce (error.localizedDescription));
  515. jassertfalse;
  516. return;
  517. }
  518. NSData* imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation: imageSampleBuffer];
  519. auto image = ImageFileFormat::loadFrom (imageData.bytes, (size_t) imageData.length);
  520. callListeners (image);
  521. MessageManager::callAsync ([this, image] { notifyPictureTaken (image); });
  522. }];
  523. }
  524. else
  525. {
  526. // Could not find a connection of video type
  527. jassertfalse;
  528. }
  529. }
  530. private:
  531. static AVCaptureOutput* createCaptureOutput()
  532. {
  533. #if defined (__IPHONE_10_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_10_0
  534. if (Pimpl::getIOSVersion().major >= 10)
  535. return [AVCapturePhotoOutput new];
  536. #endif
  537. return [AVCaptureStillImageOutput new];
  538. }
  539. static void printImageOutputDebugInfo (AVCaptureOutput* captureOutput)
  540. {
  541. #if defined (__IPHONE_10_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_10_0
  542. if (Pimpl::getIOSVersion().major >= 10 && [captureOutput isKindOfClass: [AVCapturePhotoOutput class]])
  543. {
  544. auto* photoOutput = (AVCapturePhotoOutput*) captureOutput;
  545. String typesString;
  546. for (AVVideoCodecType type in photoOutput.availablePhotoCodecTypes)
  547. typesString << nsStringToJuce (type) << " ";
  548. JUCE_CAMERA_LOG ("Available image codec types: " + typesString);
  549. JUCE_CAMERA_LOG ("Still image stabilization supported: " + String ((int) photoOutput.stillImageStabilizationSupported));
  550. JUCE_CAMERA_LOG ("Dual camera fusion supported: " + String ((int) photoOutput.dualCameraFusionSupported));
  551. JUCE_CAMERA_LOG ("Supports flash: " + String ((int) [photoOutput.supportedFlashModes containsObject: @(AVCaptureFlashModeOn)]));
  552. JUCE_CAMERA_LOG ("Supports auto flash: " + String ((int) [photoOutput.supportedFlashModes containsObject: @(AVCaptureFlashModeAuto)]));
  553. JUCE_CAMERA_LOG ("Max bracketed photo count: " + String (photoOutput.maxBracketedCapturePhotoCount));
  554. JUCE_CAMERA_LOG ("Lens stabilization during bracketed capture supported: " + String ((int) photoOutput.lensStabilizationDuringBracketedCaptureSupported));
  555. JUCE_CAMERA_LOG ("Live photo capture supported: " + String ((int) photoOutput.livePhotoCaptureSupported));
  556. #if defined (__IPHONE_11_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0
  557. if (Pimpl::getIOSVersion().major >= 11)
  558. {
  559. typesString.clear();
  560. for (AVFileType type in photoOutput.availablePhotoFileTypes)
  561. typesString << nsStringToJuce (type) << " ";
  562. JUCE_CAMERA_LOG ("Available photo file types: " + typesString);
  563. typesString.clear();
  564. for (AVFileType type in photoOutput.availableRawPhotoFileTypes)
  565. typesString << nsStringToJuce (type) << " ";
  566. JUCE_CAMERA_LOG ("Available RAW photo file types: " + typesString);
  567. typesString.clear();
  568. for (AVFileType type in photoOutput.availableLivePhotoVideoCodecTypes)
  569. typesString << nsStringToJuce (type) << " ";
  570. JUCE_CAMERA_LOG ("Available live photo video codec types: " + typesString);
  571. JUCE_CAMERA_LOG ("Dual camera dual photo delivery supported: " + String ((int) photoOutput.dualCameraDualPhotoDeliverySupported));
  572. JUCE_CAMERA_LOG ("Camera calibration data delivery supported: " + String ((int) photoOutput.cameraCalibrationDataDeliverySupported));
  573. JUCE_CAMERA_LOG ("Depth data delivery supported: " + String ((int) photoOutput.depthDataDeliverySupported));
  574. }
  575. #endif
  576. return;
  577. }
  578. #endif
  579. auto* stillImageOutput = (AVCaptureStillImageOutput*) captureOutput;
  580. String typesString;
  581. for (AVVideoCodecType type in stillImageOutput.availableImageDataCodecTypes)
  582. typesString << nsStringToJuce (type) << " ";
  583. JUCE_CAMERA_LOG ("Available image codec types: " + typesString);
  584. JUCE_CAMERA_LOG ("Still image stabilization supported: " + String ((int) stillImageOutput.stillImageStabilizationSupported));
  585. JUCE_CAMERA_LOG ("Automatically enables still image stabilization when available: " + String ((int) stillImageOutput.automaticallyEnablesStillImageStabilizationWhenAvailable));
  586. JUCE_CAMERA_LOG ("Output settings for image output: " + nsStringToJuce ([stillImageOutput.outputSettings description]));
  587. }
  588. //==============================================================================
  589. static AVCaptureConnection* findVideoConnection (AVCaptureOutput* output)
  590. {
  591. for (AVCaptureConnection* connection in output.connections)
  592. for (AVCaptureInputPort* port in connection.inputPorts)
  593. if ([port.mediaType isEqual: AVMediaTypeVideo])
  594. return connection;
  595. return nullptr;
  596. }
  597. //==============================================================================
  598. class PhotoOutputDelegateClass : public ObjCClass<NSObject>
  599. {
  600. public:
  601. PhotoOutputDelegateClass() : ObjCClass<NSObject> ("PhotoOutputDelegateClass_")
  602. {
  603. addMethod (@selector (captureOutput:willBeginCaptureForResolvedSettings:), willBeginCaptureForSettings, "v@:@@");
  604. addMethod (@selector (captureOutput:willCapturePhotoForResolvedSettings:), willCaptureForSettings, "v@:@@");
  605. addMethod (@selector (captureOutput:didCapturePhotoForResolvedSettings:), didCaptureForSettings, "v@:@@");
  606. addMethod (@selector (captureOutput:didFinishCaptureForResolvedSettings:error:), didFinishCaptureForSettings, "v@:@@@");
  607. if (Pimpl::getIOSVersion().major >= 11)
  608. addMethod (@selector (captureOutput:didFinishProcessingPhoto:error:), didFinishProcessingPhoto, "v@:@@@");
  609. else
  610. addMethod (@selector (captureOutput:didFinishProcessingPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:), didFinishProcessingPhotoSampleBuffer, "v@:@@@@@@");
  611. addIvar<StillPictureTaker*> ("owner");
  612. registerClass();
  613. }
  614. //==============================================================================
  615. static StillPictureTaker& getOwner (id self) { return *getIvar<StillPictureTaker*> (self, "owner"); }
  616. static void setOwner (id self, StillPictureTaker* t) { object_setInstanceVariable (self, "owner", t); }
  617. private:
  618. static void willBeginCaptureForSettings (id, SEL, AVCapturePhotoOutput*, AVCaptureResolvedPhotoSettings*)
  619. {
  620. JUCE_CAMERA_LOG ("willBeginCaptureForSettings()");
  621. }
  622. static void willCaptureForSettings (id, SEL, AVCapturePhotoOutput*, AVCaptureResolvedPhotoSettings*)
  623. {
  624. JUCE_CAMERA_LOG ("willCaptureForSettings()");
  625. }
  626. static void didCaptureForSettings (id, SEL, AVCapturePhotoOutput*, AVCaptureResolvedPhotoSettings*)
  627. {
  628. JUCE_CAMERA_LOG ("didCaptureForSettings()");
  629. }
  630. static void didFinishCaptureForSettings (id, SEL, AVCapturePhotoOutput*, AVCaptureResolvedPhotoSettings*, NSError* error)
  631. {
  632. String errorString = error != nil ? nsStringToJuce (error.localizedDescription) : String();
  633. ignoreUnused (errorString);
  634. JUCE_CAMERA_LOG ("didFinishCaptureForSettings(), error = " + errorString);
  635. }
  636. static void didFinishProcessingPhoto (id self, SEL, AVCapturePhotoOutput*, AVCapturePhoto* capturePhoto, NSError* error)
  637. {
  638. getOwner (self).takingPicture = false;
  639. String errorString = error != nil ? nsStringToJuce (error.localizedDescription) : String();
  640. ignoreUnused (errorString);
  641. JUCE_CAMERA_LOG ("didFinishProcessingPhoto(), error = " + errorString);
  642. if (error != nil)
  643. {
  644. JUCE_CAMERA_LOG ("Still picture capture failed, error: " + nsStringToJuce (error.localizedDescription));
  645. jassertfalse;
  646. return;
  647. }
  648. auto* imageOrientation = (NSNumber *) capturePhoto.metadata[(NSString*) kCGImagePropertyOrientation];
  649. auto* uiImage = getImageWithCorrectOrientation ((CGImagePropertyOrientation) imageOrientation.unsignedIntValue,
  650. [capturePhoto CGImageRepresentation]);
  651. auto* imageData = UIImageJPEGRepresentation (uiImage, 0.f);
  652. auto image = ImageFileFormat::loadFrom (imageData.bytes, (size_t) imageData.length);
  653. getOwner (self).callListeners (image);
  654. MessageManager::callAsync ([self, image]() { getOwner (self).notifyPictureTaken (image); });
  655. }
  656. static UIImage* getImageWithCorrectOrientation (CGImagePropertyOrientation imageOrientation,
  657. CGImageRef imageData)
  658. {
  659. auto origWidth = CGImageGetWidth (imageData);
  660. auto origHeight = CGImageGetHeight (imageData);
  661. auto targetSize = getTargetImageDimensionFor (imageOrientation, imageData);
  662. UIGraphicsBeginImageContext (targetSize);
  663. CGContextRef context = UIGraphicsGetCurrentContext();
  664. switch (imageOrientation)
  665. {
  666. case kCGImagePropertyOrientationUp:
  667. CGContextScaleCTM (context, 1.0, -1.0);
  668. CGContextTranslateCTM (context, 0.0, -targetSize.height);
  669. break;
  670. case kCGImagePropertyOrientationRight:
  671. CGContextRotateCTM (context, 90 * MathConstants<CGFloat>::pi / 180);
  672. CGContextScaleCTM (context, targetSize.height / origHeight, -targetSize.width / origWidth);
  673. break;
  674. case kCGImagePropertyOrientationDown:
  675. CGContextTranslateCTM (context, targetSize.width, 0.0);
  676. CGContextScaleCTM (context, -1.0, 1.0);
  677. break;
  678. case kCGImagePropertyOrientationLeft:
  679. CGContextRotateCTM (context, -90 * MathConstants<CGFloat>::pi / 180);
  680. CGContextScaleCTM (context, targetSize.height / origHeight, -targetSize.width / origWidth);
  681. CGContextTranslateCTM (context, -targetSize.width, -targetSize.height);
  682. break;
  683. case kCGImagePropertyOrientationUpMirrored:
  684. case kCGImagePropertyOrientationDownMirrored:
  685. case kCGImagePropertyOrientationLeftMirrored:
  686. case kCGImagePropertyOrientationRightMirrored:
  687. default:
  688. // Not implemented.
  689. jassertfalse;
  690. break;
  691. }
  692. CGContextDrawImage (context, CGRectMake (0, 0, targetSize.width, targetSize.height), imageData);
  693. UIImage* correctedImage = UIGraphicsGetImageFromCurrentImageContext();
  694. UIGraphicsEndImageContext();
  695. return correctedImage;
  696. }
  697. static CGSize getTargetImageDimensionFor (CGImagePropertyOrientation imageOrientation,
  698. CGImageRef imageData)
  699. {
  700. auto width = CGImageGetWidth (imageData);
  701. auto height = CGImageGetHeight (imageData);
  702. switch (imageOrientation)
  703. {
  704. case kCGImagePropertyOrientationUp:
  705. case kCGImagePropertyOrientationUpMirrored:
  706. case kCGImagePropertyOrientationDown:
  707. case kCGImagePropertyOrientationDownMirrored:
  708. return CGSizeMake ((CGFloat) width, (CGFloat) height);
  709. case kCGImagePropertyOrientationRight:
  710. case kCGImagePropertyOrientationRightMirrored:
  711. case kCGImagePropertyOrientationLeft:
  712. case kCGImagePropertyOrientationLeftMirrored:
  713. return CGSizeMake ((CGFloat) height, (CGFloat) width);
  714. }
  715. jassertfalse;
  716. return CGSizeMake ((CGFloat) width, (CGFloat) height);
  717. }
  718. static void didFinishProcessingPhotoSampleBuffer (id self, SEL, AVCapturePhotoOutput*,
  719. CMSampleBufferRef imageBuffer, CMSampleBufferRef imagePreviewBuffer,
  720. AVCaptureResolvedPhotoSettings*, AVCaptureBracketedStillImageSettings*,
  721. NSError* error)
  722. {
  723. getOwner (self).takingPicture = false;
  724. String errorString = error != nil ? nsStringToJuce (error.localizedDescription) : String();
  725. ignoreUnused (errorString);
  726. JUCE_CAMERA_LOG ("didFinishProcessingPhotoSampleBuffer(), error = " + errorString);
  727. if (error != nil)
  728. {
  729. JUCE_CAMERA_LOG ("Still picture capture failed, error: " + nsStringToJuce (error.localizedDescription));
  730. jassertfalse;
  731. return;
  732. }
  733. NSData* origImageData = [AVCapturePhotoOutput JPEGPhotoDataRepresentationForJPEGSampleBuffer: imageBuffer previewPhotoSampleBuffer: imagePreviewBuffer];
  734. auto origImage = [UIImage imageWithData: origImageData];
  735. auto imageOrientation = uiImageOrientationToCGImageOrientation (origImage.imageOrientation);
  736. auto* uiImage = getImageWithCorrectOrientation (imageOrientation, origImage.CGImage);
  737. auto* imageData = UIImageJPEGRepresentation (uiImage, 0.f);
  738. auto image = ImageFileFormat::loadFrom (imageData.bytes, (size_t) imageData.length);
  739. getOwner (self).callListeners (image);
  740. MessageManager::callAsync ([self, image]() { getOwner (self).notifyPictureTaken (image); });
  741. }
  742. static CGImagePropertyOrientation uiImageOrientationToCGImageOrientation (UIImageOrientation orientation)
  743. {
  744. switch (orientation)
  745. {
  746. case UIImageOrientationUp: return kCGImagePropertyOrientationUp;
  747. case UIImageOrientationDown: return kCGImagePropertyOrientationDown;
  748. case UIImageOrientationLeft: return kCGImagePropertyOrientationLeft;
  749. case UIImageOrientationRight: return kCGImagePropertyOrientationRight;
  750. case UIImageOrientationUpMirrored: return kCGImagePropertyOrientationUpMirrored;
  751. case UIImageOrientationDownMirrored: return kCGImagePropertyOrientationDownMirrored;
  752. case UIImageOrientationLeftMirrored: return kCGImagePropertyOrientationLeftMirrored;
  753. case UIImageOrientationRightMirrored: return kCGImagePropertyOrientationRightMirrored;
  754. }
  755. }
  756. };
  757. //==============================================================================
  758. void callListeners (const Image& image)
  759. {
  760. captureSession.callListeners (image);
  761. }
  762. void notifyPictureTaken (const Image& image)
  763. {
  764. captureSession.notifyPictureTaken (image);
  765. }
  766. CaptureSession& captureSession;
  767. AVCaptureOutput* captureOutput;
  768. std::unique_ptr<NSObject, NSObjectDeleter> photoOutputDelegate;
  769. bool takingPicture = false;
  770. };
  771. //==============================================================================
  772. // NB: FileOutputRecordingDelegateClass callbacks can be called from any thread (incl.
  773. // the message thread), so waiting for an event when stopping recording is not an
  774. // option and VideoRecorder must be alive at all times in order to get stopped
  775. // recording callback.
  776. class VideoRecorder
  777. {
  778. public:
  779. VideoRecorder (CaptureSession& session)
  780. : movieFileOutput ([AVCaptureMovieFileOutput new]),
  781. delegate (nullptr)
  782. {
  783. static FileOutputRecordingDelegateClass cls;
  784. delegate.reset ([cls.createInstance() init]);
  785. FileOutputRecordingDelegateClass::setOwner (delegate.get(), this);
  786. session.addOutputIfPossible (movieFileOutput);
  787. }
  788. ~VideoRecorder()
  789. {
  790. stopRecording();
  791. // Shutting down a device while recording will stop the recording
  792. // abruptly and the recording will be lost.
  793. jassert (! recordingInProgress);
  794. }
  795. void startRecording (const File& file, AVCaptureVideoOrientation orientationToUse)
  796. {
  797. if (Pimpl::getIOSVersion().major >= 10)
  798. printVideoOutputDebugInfo (movieFileOutput);
  799. auto url = [NSURL fileURLWithPath: juceStringToNS (file.getFullPathName())
  800. isDirectory: NO];
  801. auto outputConnection = [movieFileOutput connectionWithMediaType: AVMediaTypeVideo];
  802. outputConnection.videoOrientation = orientationToUse;
  803. [movieFileOutput startRecordingToOutputFileURL: url recordingDelegate: delegate.get()];
  804. }
  805. void stopRecording()
  806. {
  807. [movieFileOutput stopRecording];
  808. }
  809. Time getTimeOfFirstRecordedFrame() const
  810. {
  811. return Time (firstRecordedFrameTimeMs.get());
  812. }
  813. private:
  814. static void printVideoOutputDebugInfo (AVCaptureMovieFileOutput* output)
  815. {
  816. ignoreUnused (output);
  817. JUCE_CAMERA_LOG ("Available video codec types:");
  818. #if JUCE_CAMERA_LOG_ENABLED
  819. for (AVVideoCodecType type in output.availableVideoCodecTypes)
  820. JUCE_CAMERA_LOG (nsStringToJuce (type));
  821. #endif
  822. JUCE_CAMERA_LOG ("Output settings per video connection:");
  823. #if JUCE_CAMERA_LOG_ENABLED
  824. for (AVCaptureConnection* connection in output.connections)
  825. JUCE_CAMERA_LOG (nsStringToJuce ([[output outputSettingsForConnection: connection] description]));
  826. #endif
  827. }
  828. //==============================================================================
  829. struct FileOutputRecordingDelegateClass : public ObjCClass<NSObject<AVCaptureFileOutputRecordingDelegate>>
  830. {
  831. FileOutputRecordingDelegateClass() : ObjCClass<NSObject<AVCaptureFileOutputRecordingDelegate>> ("FileOutputRecordingDelegateClass_")
  832. {
  833. addMethod (@selector (captureOutput:didStartRecordingToOutputFileAtURL:fromConnections:), started, "v@:@@@");
  834. addMethod (@selector (captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error:), stopped, "v@:@@@@");
  835. addIvar<VideoRecorder*> ("owner");
  836. registerClass();
  837. }
  838. //==============================================================================
  839. static VideoRecorder& getOwner (id self) { return *getIvar<VideoRecorder*> (self, "owner"); }
  840. static void setOwner (id self, VideoRecorder* r) { object_setInstanceVariable (self, "owner", r); }
  841. private:
  842. static void started (id self, SEL, AVCaptureFileOutput*, NSURL*, NSArray<AVCaptureConnection*>*)
  843. {
  844. JUCE_CAMERA_LOG ("Started recording");
  845. getOwner (self).firstRecordedFrameTimeMs.set (Time::getCurrentTime().toMilliseconds());
  846. getOwner (self).recordingInProgress = true;
  847. }
  848. static void stopped (id self, SEL, AVCaptureFileOutput*, NSURL*, NSArray<AVCaptureConnection*>*, NSError* error)
  849. {
  850. String errorString;
  851. bool recordingPlayable = true;
  852. // There might have been an error in the recording, yet there may be a playable file...
  853. if ([error code] != noErr)
  854. {
  855. id value = [[error userInfo] objectForKey: AVErrorRecordingSuccessfullyFinishedKey];
  856. if (value != nil && ! [value boolValue])
  857. recordingPlayable = false;
  858. errorString = nsStringToJuce (error.localizedDescription) + ", playable: " + String ((int) recordingPlayable);
  859. }
  860. JUCE_CAMERA_LOG ("Stopped recording, error = " + errorString);
  861. getOwner (self).recordingInProgress = false;
  862. }
  863. };
  864. AVCaptureMovieFileOutput* movieFileOutput;
  865. std::unique_ptr<NSObject<AVCaptureFileOutputRecordingDelegate>, NSObjectDeleter> delegate;
  866. bool recordingInProgress = false;
  867. Atomic<int64> firstRecordedFrameTimeMs { 0 };
  868. };
  869. //==============================================================================
  870. void addOutputIfPossible (AVCaptureOutput* output)
  871. {
  872. dispatch_async (captureSessionQueue,^
  873. {
  874. if ([captureSession.get() canAddOutput: output])
  875. {
  876. [captureSession.get() beginConfiguration];
  877. [captureSession.get() addOutput: output];
  878. [captureSession.get() commitConfiguration];
  879. return;
  880. }
  881. // Can't add output to camera session!
  882. jassertfalse;
  883. });
  884. }
  885. //==============================================================================
  886. void cameraSessionStarted()
  887. {
  888. sessionStarted = true;
  889. owner.cameraSessionStarted();
  890. }
  891. void cameraSessionRuntimeError (const String& error)
  892. {
  893. owner.cameraSessionRuntimeError (error);
  894. }
  895. void callListeners (const Image& image)
  896. {
  897. owner.callListeners (image);
  898. }
  899. void notifyPictureTaken (const Image& image)
  900. {
  901. owner.notifyPictureTaken (image);
  902. }
  903. Pimpl& owner;
  904. dispatch_queue_t captureSessionQueue;
  905. std::unique_ptr<AVCaptureSession, NSObjectDeleter> captureSession;
  906. std::unique_ptr<NSObject, NSObjectDeleter> delegate;
  907. StillPictureTaker stillPictureTaker;
  908. VideoRecorder videoRecorder;
  909. AVCaptureDevice* cameraDevice = nil;
  910. AVCaptureVideoPreviewLayer* previewLayer = nil;
  911. bool sessionStarted = false;
  912. WaitableEvent sessionClosedEvent;
  913. static int numCaptureSessions;
  914. };
  915. //==============================================================================
  916. void cameraSessionStarted()
  917. {
  918. JUCE_CAMERA_LOG ("cameraSessionStarted()");
  919. cameraOpenCallback (cameraId, {});
  920. }
  921. void cameraSessionRuntimeError (const String& error)
  922. {
  923. JUCE_CAMERA_LOG ("cameraSessionRuntimeError(), error = " + error);
  924. if (! notifiedOfCameraOpening)
  925. {
  926. cameraOpenCallback ({}, error);
  927. }
  928. else
  929. {
  930. if (owner.onErrorOccurred != nullptr)
  931. owner.onErrorOccurred (error);
  932. }
  933. }
  934. void callListeners (const Image& image)
  935. {
  936. const ScopedLock sl (listenerLock);
  937. listeners.call ([=] (Listener& l) { l.imageReceived (image); });
  938. if (listeners.size() == 1)
  939. triggerStillPictureCapture();
  940. }
  941. void notifyPictureTaken (const Image& image)
  942. {
  943. JUCE_CAMERA_LOG ("notifyPictureTaken()");
  944. if (pictureTakenCallback != nullptr)
  945. pictureTakenCallback (image);
  946. }
  947. //==============================================================================
  948. void triggerStillPictureCapture()
  949. {
  950. captureSession.takeStillPicture();
  951. }
  952. //==============================================================================
  953. CameraDevice& owner;
  954. String cameraId;
  955. InternalOpenCameraResultCallback cameraOpenCallback;
  956. CriticalSection listenerLock;
  957. ListenerList<Listener> listeners;
  958. std::function<void (const Image&)> pictureTakenCallback;
  959. CaptureSession captureSession;
  960. bool notifiedOfCameraOpening = false;
  961. //==============================================================================
  962. struct IOSVersion
  963. {
  964. int major;
  965. int minor;
  966. };
  967. static IOSVersion getIOSVersion()
  968. {
  969. auto processInfo = [NSProcessInfo processInfo];
  970. if (! [processInfo respondsToSelector: @selector (operatingSystemVersion)])
  971. return {7, 0}; // Below 8.0 in fact, but only care that it's below 8
  972. return { (int)[processInfo operatingSystemVersion].majorVersion,
  973. (int)[processInfo operatingSystemVersion].minorVersion };
  974. }
  975. static IOSVersion iosVersion;
  976. friend struct CameraDevice::ViewerComponent;
  977. JUCE_DECLARE_NON_COPYABLE (Pimpl)
  978. };
  979. CameraDevice::Pimpl::IOSVersion CameraDevice::Pimpl::iosVersion = CameraDevice::Pimpl::getIOSVersion();
  980. int CameraDevice::Pimpl::CaptureSession::numCaptureSessions = 0;
  981. //==============================================================================
  982. struct CameraDevice::ViewerComponent : public UIViewComponent
  983. {
  984. //==============================================================================
  985. struct JuceCameraDeviceViewerClass : public ObjCClass<UIView>
  986. {
  987. JuceCameraDeviceViewerClass() : ObjCClass<UIView> ("JuceCameraDeviceViewerClass_")
  988. {
  989. addMethod (@selector (layoutSubviews), layoutSubviews, "v@:");
  990. registerClass();
  991. }
  992. private:
  993. static void layoutSubviews (id self, SEL)
  994. {
  995. sendSuperclassMessage<void> (self, @selector (layoutSubviews));
  996. UIView* asUIView = (UIView*) self;
  997. updateOrientation (self);
  998. if (auto* previewLayer = getPreviewLayer (self))
  999. previewLayer.frame = asUIView.bounds;
  1000. }
  1001. static AVCaptureVideoPreviewLayer* getPreviewLayer (id self)
  1002. {
  1003. UIView* asUIView = (UIView*) self;
  1004. if (asUIView.layer.sublayers != nil && [asUIView.layer.sublayers count] > 0)
  1005. if ([asUIView.layer.sublayers[0] isKindOfClass: [AVCaptureVideoPreviewLayer class]])
  1006. return (AVCaptureVideoPreviewLayer*) asUIView.layer.sublayers[0];
  1007. return nil;
  1008. }
  1009. static void updateOrientation (id self)
  1010. {
  1011. if (auto* previewLayer = getPreviewLayer (self))
  1012. {
  1013. UIDeviceOrientation o = [UIDevice currentDevice].orientation;
  1014. if (UIDeviceOrientationIsPortrait (o) || UIDeviceOrientationIsLandscape (o))
  1015. {
  1016. if (previewLayer.connection != nil)
  1017. previewLayer.connection.videoOrientation = (AVCaptureVideoOrientation) o;
  1018. }
  1019. }
  1020. }
  1021. };
  1022. ViewerComponent (CameraDevice& device)
  1023. {
  1024. static JuceCameraDeviceViewerClass cls;
  1025. // Initial size that can be overriden later.
  1026. setSize (640, 480);
  1027. auto view = [cls.createInstance() init];
  1028. setView (view);
  1029. auto* previewLayer = device.pimpl->captureSession.createPreviewLayer();
  1030. previewLayer.frame = view.bounds;
  1031. UIInterfaceOrientation statusBarOrientation = [UIApplication sharedApplication].statusBarOrientation;
  1032. AVCaptureVideoOrientation videoOrientation = statusBarOrientation != UIInterfaceOrientationUnknown
  1033. ? (AVCaptureVideoOrientation) statusBarOrientation
  1034. : AVCaptureVideoOrientationPortrait;
  1035. previewLayer.connection.videoOrientation = videoOrientation;
  1036. [view.layer addSublayer: previewLayer];
  1037. }
  1038. };
  1039. //==============================================================================
  1040. String CameraDevice::getFileExtension()
  1041. {
  1042. return ".mov";
  1043. }
  1044. #if JUCE_DEPRECATION_IGNORED
  1045. JUCE_END_IGNORE_WARNINGS_GCC_LIKE
  1046. #endif