The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1336 lines
59KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE 6 technical preview.
  4. Copyright (c) 2020 - Raw Material Software Limited
  5. You may use this code under the terms of the GPL v3
  6. (see www.gnu.org/licenses).
  7. For this technical preview, this file is not subject to commercial licensing.
  8. JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
  9. EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
  10. DISCLAIMED.
  11. ==============================================================================
  12. */
  13. #if (defined (__IPHONE_10_0) && __IPHONE_OS_VERSION_MIN_REQUIRED >= __IPHONE_10_0)
  14. JUCE_BEGIN_IGNORE_WARNINGS_GCC_LIKE ("-Wdeprecated-declarations")
  15. #define JUCE_DEPRECATION_IGNORED 1
  16. #endif
  17. struct CameraDevice::Pimpl
  18. {
  19. using InternalOpenCameraResultCallback = std::function<void(const String& /*cameraId*/, const String& /*error*/)>;
  20. Pimpl (CameraDevice& ownerToUse, const String& cameraIdToUse, int /*index*/,
  21. int /*minWidth*/, int /*minHeight*/, int /*maxWidth*/, int /*maxHeight*/,
  22. bool useHighQuality)
  23. : owner (ownerToUse),
  24. cameraId (cameraIdToUse),
  25. captureSession (*this, useHighQuality)
  26. {
  27. }
  28. String getCameraId() const noexcept { return cameraId; }
  29. void open (InternalOpenCameraResultCallback cameraOpenCallbackToUse)
  30. {
  31. cameraOpenCallback = std::move (cameraOpenCallbackToUse);
  32. if (cameraOpenCallback == nullptr)
  33. {
  34. // A valid camera open callback must be passed.
  35. jassertfalse;
  36. return;
  37. }
  38. [AVCaptureDevice requestAccessForMediaType: AVMediaTypeVideo
  39. completionHandler: ^(BOOL granted)
  40. {
  41. // Access to video is required for camera to work,
  42. // black images will be produced otherwise!
  43. jassert (granted);
  44. ignoreUnused (granted);
  45. }];
  46. [AVCaptureDevice requestAccessForMediaType: AVMediaTypeAudio
  47. completionHandler: ^(BOOL granted)
  48. {
  49. // Access to audio is required for camera to work,
  50. // silence will be produced otherwise!
  51. jassert (granted);
  52. ignoreUnused (granted);
  53. }];
  54. captureSession.startSessionForDeviceWithId (cameraId);
  55. }
  56. bool openedOk() const noexcept { return captureSession.openedOk(); }
  57. void takeStillPicture (std::function<void(const Image&)> pictureTakenCallbackToUse)
  58. {
  59. if (pictureTakenCallbackToUse == nullptr)
  60. {
  61. jassertfalse;
  62. return;
  63. }
  64. pictureTakenCallback = std::move (pictureTakenCallbackToUse);
  65. triggerStillPictureCapture();
  66. }
  67. void startRecordingToFile (const File& file, int /*quality*/)
  68. {
  69. file.deleteFile();
  70. captureSession.startRecording (file);
  71. }
  72. void stopRecording()
  73. {
  74. captureSession.stopRecording();
  75. }
  76. Time getTimeOfFirstRecordedFrame() const
  77. {
  78. return captureSession.getTimeOfFirstRecordedFrame();
  79. }
  80. static StringArray getAvailableDevices()
  81. {
  82. StringArray results;
  83. JUCE_CAMERA_LOG ("Available camera devices: ");
  84. for (AVCaptureDevice* device in getDevices())
  85. {
  86. JUCE_CAMERA_LOG ("Device start----------------------------------");
  87. printDebugCameraInfo (device);
  88. JUCE_CAMERA_LOG ("Device end----------------------------------");
  89. results.add (nsStringToJuce (device.uniqueID));
  90. }
  91. return results;
  92. }
  93. void addListener (CameraDevice::Listener* listenerToAdd)
  94. {
  95. const ScopedLock sl (listenerLock);
  96. listeners.add (listenerToAdd);
  97. if (listeners.size() == 1)
  98. triggerStillPictureCapture();
  99. }
  100. void removeListener (CameraDevice::Listener* listenerToRemove)
  101. {
  102. const ScopedLock sl (listenerLock);
  103. listeners.remove (listenerToRemove);
  104. }
  105. private:
  106. static NSArray<AVCaptureDevice*>* getDevices()
  107. {
  108. #if defined (__IPHONE_10_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_10_0
  109. if (iosVersion.major >= 10)
  110. {
  111. std::unique_ptr<NSMutableArray<AVCaptureDeviceType>, NSObjectDeleter> deviceTypes ([[NSMutableArray alloc] initWithCapacity: 2]);
  112. [deviceTypes.get() addObject: AVCaptureDeviceTypeBuiltInWideAngleCamera];
  113. [deviceTypes.get() addObject: AVCaptureDeviceTypeBuiltInTelephotoCamera];
  114. if ((iosVersion.major == 10 && iosVersion.minor >= 2) || iosVersion.major >= 11)
  115. [deviceTypes.get() addObject: AVCaptureDeviceTypeBuiltInDualCamera];
  116. if ((iosVersion.major == 11 && iosVersion.minor >= 1) || iosVersion.major >= 12)
  117. [deviceTypes.get() addObject: AVCaptureDeviceTypeBuiltInTrueDepthCamera];
  118. auto discoverySession = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes: deviceTypes.get()
  119. mediaType: AVMediaTypeVideo
  120. position: AVCaptureDevicePositionUnspecified];
  121. return [discoverySession devices];
  122. }
  123. #endif
  124. return [AVCaptureDevice devicesWithMediaType: AVMediaTypeVideo];
  125. }
  126. //==============================================================================
  127. static void printDebugCameraInfo (AVCaptureDevice* device)
  128. {
  129. auto position = device.position;
  130. String positionString = position == AVCaptureDevicePositionBack
  131. ? "Back"
  132. : position == AVCaptureDevicePositionFront
  133. ? "Front"
  134. : "Unspecified";
  135. JUCE_CAMERA_LOG ("Position: " + positionString);
  136. JUCE_CAMERA_LOG ("Model ID: " + nsStringToJuce (device.modelID));
  137. JUCE_CAMERA_LOG ("Localized name: " + nsStringToJuce (device.localizedName));
  138. JUCE_CAMERA_LOG ("Unique ID: " + nsStringToJuce (device.uniqueID));
  139. JUCE_CAMERA_LOG ("Lens aperture: " + String (device.lensAperture));
  140. JUCE_CAMERA_LOG ("Has flash: " + String ((int)device.hasFlash));
  141. JUCE_CAMERA_LOG ("Supports flash always on: " + String ((int)[device isFlashModeSupported: AVCaptureFlashModeOn]));
  142. JUCE_CAMERA_LOG ("Supports auto flash: " + String ((int)[device isFlashModeSupported: AVCaptureFlashModeAuto]));
  143. JUCE_CAMERA_LOG ("Has torch: " + String ((int)device.hasTorch));
  144. JUCE_CAMERA_LOG ("Supports torch always on: " + String ((int)[device isTorchModeSupported: AVCaptureTorchModeOn]));
  145. JUCE_CAMERA_LOG ("Supports auto torch: " + String ((int)[device isTorchModeSupported: AVCaptureTorchModeAuto]));
  146. JUCE_CAMERA_LOG ("Low light boost supported: " + String ((int)device.lowLightBoostEnabled));
  147. JUCE_CAMERA_LOG ("Supports auto white balance: " + String ((int)[device isWhiteBalanceModeSupported: AVCaptureWhiteBalanceModeAutoWhiteBalance]));
  148. JUCE_CAMERA_LOG ("Supports continuous auto white balance: " + String ((int)[device isWhiteBalanceModeSupported: AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]));
  149. JUCE_CAMERA_LOG ("Supports auto focus: " + String ((int)[device isFocusModeSupported: AVCaptureFocusModeAutoFocus]));
  150. JUCE_CAMERA_LOG ("Supports continuous auto focus: " + String ((int)[device isFocusModeSupported: AVCaptureFocusModeContinuousAutoFocus]));
  151. JUCE_CAMERA_LOG ("Supports point of interest focus: " + String ((int)device.focusPointOfInterestSupported));
  152. JUCE_CAMERA_LOG ("Smooth auto focus supported: " + String ((int)device.smoothAutoFocusSupported));
  153. JUCE_CAMERA_LOG ("Auto focus range restriction supported: " + String ((int)device.autoFocusRangeRestrictionSupported));
  154. JUCE_CAMERA_LOG ("Supports auto exposure: " + String ((int)[device isExposureModeSupported: AVCaptureExposureModeAutoExpose]));
  155. JUCE_CAMERA_LOG ("Supports continuous auto exposure: " + String ((int)[device isExposureModeSupported: AVCaptureExposureModeContinuousAutoExposure]));
  156. JUCE_CAMERA_LOG ("Supports custom exposure: " + String ((int)[device isExposureModeSupported: AVCaptureExposureModeCustom]));
  157. JUCE_CAMERA_LOG ("Supports point of interest exposure: " + String ((int)device.exposurePointOfInterestSupported));
  158. #if defined (__IPHONE_10_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_10_0
  159. if (iosVersion.major >= 10)
  160. {
  161. JUCE_CAMERA_LOG ("Device type: " + nsStringToJuce (device.deviceType));
  162. JUCE_CAMERA_LOG ("Locking focus with custom lens position supported: " + String ((int)device.lockingFocusWithCustomLensPositionSupported));
  163. }
  164. #endif
  165. #if defined (__IPHONE_11_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0
  166. if (iosVersion.major >= 11)
  167. {
  168. JUCE_CAMERA_LOG ("Min available video zoom factor: " + String (device.minAvailableVideoZoomFactor));
  169. JUCE_CAMERA_LOG ("Max available video zoom factor: " + String (device.maxAvailableVideoZoomFactor));
  170. JUCE_CAMERA_LOG ("Dual camera switch over video zoom factor: " + String (device.dualCameraSwitchOverVideoZoomFactor));
  171. }
  172. #endif
  173. JUCE_CAMERA_LOG ("Capture formats start-------------------");
  174. for (AVCaptureDeviceFormat* format in device.formats)
  175. {
  176. JUCE_CAMERA_LOG ("Capture format start------");
  177. printDebugCameraFormatInfo (format);
  178. JUCE_CAMERA_LOG ("Capture format end------");
  179. }
  180. JUCE_CAMERA_LOG ("Capture formats end-------------------");
  181. }
  182. static void printDebugCameraFormatInfo (AVCaptureDeviceFormat* format)
  183. {
  184. JUCE_CAMERA_LOG ("Media type: " + nsStringToJuce (format.mediaType));
  185. String colourSpaces;
  186. for (NSNumber* number in format.supportedColorSpaces)
  187. {
  188. switch ([number intValue])
  189. {
  190. case AVCaptureColorSpace_sRGB: colourSpaces << "sRGB "; break;
  191. case AVCaptureColorSpace_P3_D65: colourSpaces << "P3_D65 "; break;
  192. default: break;
  193. }
  194. }
  195. JUCE_CAMERA_LOG ("Supported colour spaces: " + colourSpaces);
  196. JUCE_CAMERA_LOG ("Video field of view: " + String (format.videoFieldOfView));
  197. JUCE_CAMERA_LOG ("Video max zoom factor: " + String (format.videoMaxZoomFactor));
  198. JUCE_CAMERA_LOG ("Video zoom factor upscale threshold: " + String (format.videoZoomFactorUpscaleThreshold));
  199. String videoFrameRateRangesString = "Video supported frame rate ranges: ";
  200. for (AVFrameRateRange* range in format.videoSupportedFrameRateRanges)
  201. videoFrameRateRangesString << frameRateRangeToString (range);
  202. JUCE_CAMERA_LOG (videoFrameRateRangesString);
  203. JUCE_CAMERA_LOG ("Video binned: " + String (int(format.videoBinned)));
  204. #if defined (__IPHONE_8_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_8_0
  205. if (iosVersion.major >= 8)
  206. {
  207. JUCE_CAMERA_LOG ("Video HDR supported: " + String (int (format.videoHDRSupported)));
  208. JUCE_CAMERA_LOG ("High resolution still image dimensions: " + getHighResStillImgDimensionsString (format.highResolutionStillImageDimensions));
  209. JUCE_CAMERA_LOG ("Min ISO: " + String (format.minISO));
  210. JUCE_CAMERA_LOG ("Max ISO: " + String (format.maxISO));
  211. JUCE_CAMERA_LOG ("Min exposure duration: " + cmTimeToString (format.minExposureDuration));
  212. String autoFocusSystemString;
  213. switch (format.autoFocusSystem)
  214. {
  215. case AVCaptureAutoFocusSystemPhaseDetection: autoFocusSystemString = "PhaseDetection"; break;
  216. case AVCaptureAutoFocusSystemContrastDetection: autoFocusSystemString = "ContrastDetection"; break;
  217. case AVCaptureAutoFocusSystemNone:
  218. default: autoFocusSystemString = "None";
  219. }
  220. JUCE_CAMERA_LOG ("Auto focus system: " + autoFocusSystemString);
  221. JUCE_CAMERA_LOG ("Standard (iOS 5.0) video stabilization supported: " + String ((int) [format isVideoStabilizationModeSupported: AVCaptureVideoStabilizationModeStandard]));
  222. JUCE_CAMERA_LOG ("Cinematic video stabilization supported: " + String ((int) [format isVideoStabilizationModeSupported: AVCaptureVideoStabilizationModeCinematic]));
  223. JUCE_CAMERA_LOG ("Auto video stabilization supported: " + String ((int) [format isVideoStabilizationModeSupported: AVCaptureVideoStabilizationModeAuto]));
  224. }
  225. #endif
  226. #if defined (__IPHONE_11_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0
  227. if (iosVersion.major >= 11)
  228. {
  229. JUCE_CAMERA_LOG ("Min zoom factor for depth data delivery: " + String (format.videoMinZoomFactorForDepthDataDelivery));
  230. JUCE_CAMERA_LOG ("Max zoom factor for depth data delivery: " + String (format.videoMaxZoomFactorForDepthDataDelivery));
  231. }
  232. #endif
  233. }
  234. static String getHighResStillImgDimensionsString (CMVideoDimensions d)
  235. {
  236. return "[" + String (d.width) + " " + String (d.height) + "]";
  237. }
  238. static String cmTimeToString (CMTime time)
  239. {
  240. CFStringRef timeDesc = CMTimeCopyDescription (nullptr, time);
  241. String result = String::fromCFString (timeDesc);
  242. CFRelease (timeDesc);
  243. return result;
  244. }
  245. static String frameRateRangeToString (AVFrameRateRange* range)
  246. {
  247. String result;
  248. result << "[minFrameDuration: " + cmTimeToString (range.minFrameDuration);
  249. result << " maxFrameDuration: " + cmTimeToString (range.maxFrameDuration);
  250. result << " minFrameRate: " + String (range.minFrameRate);
  251. result << " maxFrameRate: " + String (range.maxFrameRate) << "] ";
  252. return result;
  253. }
  254. //==============================================================================
  255. class CaptureSession
  256. {
  257. public:
  258. CaptureSession (Pimpl& ownerToUse, bool useHighQuality)
  259. : owner (ownerToUse),
  260. captureSessionQueue (dispatch_queue_create ("JuceCameraDeviceBackgroundDispatchQueue", DISPATCH_QUEUE_SERIAL)),
  261. captureSession ([[AVCaptureSession alloc] init]),
  262. delegate (nullptr),
  263. stillPictureTaker (*this),
  264. videoRecorder (*this)
  265. {
  266. static SessionDelegateClass cls;
  267. delegate.reset ([cls.createInstance() init]);
  268. SessionDelegateClass::setOwner (delegate.get(), this);
  269. JUCE_BEGIN_IGNORE_WARNINGS_GCC_LIKE ("-Wundeclared-selector")
  270. [[NSNotificationCenter defaultCenter] addObserver: delegate.get()
  271. selector: @selector (sessionDidStartRunning:)
  272. name: AVCaptureSessionDidStartRunningNotification
  273. object: captureSession.get()];
  274. [[NSNotificationCenter defaultCenter] addObserver: delegate.get()
  275. selector: @selector (sessionDidStopRunning:)
  276. name: AVCaptureSessionDidStopRunningNotification
  277. object: captureSession.get()];
  278. [[NSNotificationCenter defaultCenter] addObserver: delegate.get()
  279. selector: @selector (sessionRuntimeError:)
  280. name: AVCaptureSessionRuntimeErrorNotification
  281. object: captureSession.get()];
  282. [[NSNotificationCenter defaultCenter] addObserver: delegate.get()
  283. selector: @selector (sessionWasInterrupted:)
  284. name: AVCaptureSessionWasInterruptedNotification
  285. object: captureSession.get()];
  286. [[NSNotificationCenter defaultCenter] addObserver: delegate.get()
  287. selector: @selector (sessionInterruptionEnded:)
  288. name: AVCaptureSessionInterruptionEndedNotification
  289. object: captureSession.get()];
  290. JUCE_END_IGNORE_WARNINGS_GCC_LIKE
  291. dispatch_async (captureSessionQueue,^
  292. {
  293. [captureSession.get() setSessionPreset: useHighQuality ? AVCaptureSessionPresetHigh
  294. : AVCaptureSessionPresetMedium];
  295. });
  296. ++numCaptureSessions;
  297. }
  298. ~CaptureSession()
  299. {
  300. [[NSNotificationCenter defaultCenter] removeObserver: delegate.get()];
  301. stopRecording();
  302. if (--numCaptureSessions == 0)
  303. {
  304. dispatch_async (captureSessionQueue, ^
  305. {
  306. if (captureSession.get().running)
  307. [captureSession.get() stopRunning];
  308. sessionClosedEvent.signal();
  309. });
  310. sessionClosedEvent.wait (-1);
  311. }
  312. }
  313. bool openedOk() const noexcept { return sessionStarted; }
  314. void startSessionForDeviceWithId (const String& cameraIdToUse)
  315. {
  316. dispatch_async (captureSessionQueue,^
  317. {
  318. cameraDevice = [AVCaptureDevice deviceWithUniqueID: juceStringToNS (cameraIdToUse)];
  319. auto audioDevice = [AVCaptureDevice defaultDeviceWithMediaType: AVMediaTypeAudio];
  320. [captureSession.get() beginConfiguration];
  321. // This will add just video...
  322. auto error = addInputToDevice (cameraDevice);
  323. if (error.isNotEmpty())
  324. {
  325. WeakReference<CaptureSession> weakRef (this);
  326. MessageManager::callAsync ([weakRef, error]() mutable
  327. {
  328. if (weakRef != nullptr)
  329. weakRef->owner.cameraOpenCallback ({}, error);
  330. });
  331. return;
  332. }
  333. // ... so add audio explicitly here
  334. error = addInputToDevice (audioDevice);
  335. if (error.isNotEmpty())
  336. {
  337. WeakReference<CaptureSession> weakRef (this);
  338. MessageManager::callAsync ([weakRef, error]() mutable
  339. {
  340. if (weakRef != nullptr)
  341. weakRef->owner.cameraOpenCallback ({}, error);
  342. });
  343. return;
  344. }
  345. [captureSession.get() commitConfiguration];
  346. if (! captureSession.get().running)
  347. [captureSession.get() startRunning];
  348. });
  349. }
  350. AVCaptureVideoPreviewLayer* createPreviewLayer()
  351. {
  352. if (! openedOk())
  353. {
  354. // A session must be started first!
  355. jassertfalse;
  356. return nullptr;
  357. }
  358. previewLayer = [AVCaptureVideoPreviewLayer layerWithSession: captureSession.get()];
  359. return previewLayer;
  360. }
  361. void takeStillPicture()
  362. {
  363. if (! openedOk())
  364. {
  365. // A session must be started first!
  366. jassert (openedOk());
  367. return;
  368. }
  369. stillPictureTaker.takePicture (previewLayer.connection.videoOrientation);
  370. }
  371. void startRecording (const File& file)
  372. {
  373. if (! openedOk())
  374. {
  375. // A session must be started first!
  376. jassertfalse;
  377. return;
  378. }
  379. if (file.existsAsFile())
  380. {
  381. // File overwriting is not supported by iOS video recorder, the target
  382. // file must not exist.
  383. jassertfalse;
  384. return;
  385. }
  386. videoRecorder.startRecording (file, previewLayer.connection.videoOrientation);
  387. }
  388. void stopRecording()
  389. {
  390. videoRecorder.stopRecording();
  391. }
  392. Time getTimeOfFirstRecordedFrame() const
  393. {
  394. return videoRecorder.getTimeOfFirstRecordedFrame();
  395. }
  396. JUCE_DECLARE_WEAK_REFERENCEABLE (CaptureSession)
  397. private:
  398. String addInputToDevice (AVCaptureDevice* device)
  399. {
  400. NSError* error = nil;
  401. auto input = [AVCaptureDeviceInput deviceInputWithDevice: device
  402. error: &error];
  403. if (error != nil)
  404. return nsStringToJuce (error.localizedDescription);
  405. if (! [captureSession.get() canAddInput: input])
  406. return "Could not add input to camera session.";
  407. [captureSession.get() addInput: input];
  408. return {};
  409. }
  410. //==============================================================================
  411. struct SessionDelegateClass : public ObjCClass<NSObject>
  412. {
  413. SessionDelegateClass() : ObjCClass<NSObject> ("SessionDelegateClass_")
  414. {
  415. JUCE_BEGIN_IGNORE_WARNINGS_GCC_LIKE ("-Wundeclared-selector")
  416. addMethod (@selector (sessionDidStartRunning:), started, "v@:@");
  417. addMethod (@selector (sessionDidStopRunning:), stopped, "v@:@");
  418. addMethod (@selector (sessionRuntimeError:), runtimeError, "v@:@");
  419. addMethod (@selector (sessionWasInterrupted:), interrupted, "v@:@");
  420. addMethod (@selector (sessionInterruptionEnded:), interruptionEnded, "v@:@");
  421. JUCE_END_IGNORE_WARNINGS_GCC_LIKE
  422. addIvar<CaptureSession*> ("owner");
  423. registerClass();
  424. }
  425. //==============================================================================
  426. static CaptureSession& getOwner (id self) { return *getIvar<CaptureSession*> (self, "owner"); }
  427. static void setOwner (id self, CaptureSession* s) { object_setInstanceVariable (self, "owner", s); }
  428. private:
  429. //==============================================================================
  430. static void started (id self, SEL, NSNotification* notification)
  431. {
  432. JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
  433. ignoreUnused (notification);
  434. dispatch_async (dispatch_get_main_queue(),
  435. ^{
  436. getOwner (self).cameraSessionStarted();
  437. });
  438. }
  439. static void stopped (id, SEL, NSNotification* notification)
  440. {
  441. JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
  442. ignoreUnused (notification);
  443. }
  444. static void runtimeError (id self, SEL, NSNotification* notification)
  445. {
  446. JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
  447. dispatch_async (dispatch_get_main_queue(),
  448. ^{
  449. NSError* error = notification.userInfo[AVCaptureSessionErrorKey];
  450. auto errorString = error != nil ? nsStringToJuce (error.localizedDescription) : String();
  451. getOwner (self).cameraSessionRuntimeError (errorString);
  452. });
  453. }
  454. static void interrupted (id, SEL, NSNotification* notification)
  455. {
  456. JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
  457. ignoreUnused (notification);
  458. }
  459. static void interruptionEnded (id, SEL, NSNotification* notification)
  460. {
  461. JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
  462. ignoreUnused (notification);
  463. }
  464. };
  465. //==============================================================================
  466. class StillPictureTaker
  467. {
  468. public:
  469. StillPictureTaker (CaptureSession& cs)
  470. : captureSession (cs),
  471. captureOutput (createCaptureOutput()),
  472. photoOutputDelegate (nullptr)
  473. {
  474. if (Pimpl::getIOSVersion().major >= 10)
  475. {
  476. static PhotoOutputDelegateClass cls;
  477. photoOutputDelegate.reset ([cls.createInstance() init]);
  478. PhotoOutputDelegateClass::setOwner (photoOutputDelegate.get(), this);
  479. }
  480. captureSession.addOutputIfPossible (captureOutput);
  481. }
  482. void takePicture (AVCaptureVideoOrientation orientationToUse)
  483. {
  484. if (takingPicture)
  485. {
  486. // Picture taking already in progress!
  487. jassertfalse;
  488. return;
  489. }
  490. takingPicture = true;
  491. printImageOutputDebugInfo (captureOutput);
  492. if (auto* connection = findVideoConnection (captureOutput))
  493. {
  494. #if defined (__IPHONE_10_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_10_0
  495. if (Pimpl::getIOSVersion().major >= 10 && [captureOutput isKindOfClass: [AVCapturePhotoOutput class]])
  496. {
  497. auto* photoOutput = (AVCapturePhotoOutput*) captureOutput;
  498. auto outputConnection = [photoOutput connectionWithMediaType: AVMediaTypeVideo];
  499. outputConnection.videoOrientation = orientationToUse;
  500. [photoOutput capturePhotoWithSettings: [AVCapturePhotoSettings photoSettings]
  501. delegate: id<AVCapturePhotoCaptureDelegate> (photoOutputDelegate.get())];
  502. return;
  503. }
  504. #endif
  505. auto* stillImageOutput = (AVCaptureStillImageOutput*) captureOutput;
  506. auto outputConnection = [stillImageOutput connectionWithMediaType: AVMediaTypeVideo];
  507. outputConnection.videoOrientation = orientationToUse;
  508. [stillImageOutput captureStillImageAsynchronouslyFromConnection: connection completionHandler:
  509. ^(CMSampleBufferRef imageSampleBuffer, NSError* error)
  510. {
  511. if (error != nil)
  512. {
  513. JUCE_CAMERA_LOG ("Still picture capture failed, error: " + nsStringToJuce (error.localizedDescription));
  514. jassertfalse;
  515. return;
  516. }
  517. NSData* imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation: imageSampleBuffer];
  518. auto image = ImageFileFormat::loadFrom (imageData.bytes, (size_t) imageData.length);
  519. callListeners (image);
  520. MessageManager::callAsync ([this, image]() { notifyPictureTaken (image); });
  521. }];
  522. }
  523. else
  524. {
  525. // Could not find a connection of video type
  526. jassertfalse;
  527. }
  528. }
  529. private:
  530. static AVCaptureOutput* createCaptureOutput()
  531. {
  532. #if defined (__IPHONE_10_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_10_0
  533. if (Pimpl::getIOSVersion().major >= 10)
  534. return [AVCapturePhotoOutput new];
  535. #endif
  536. return [AVCaptureStillImageOutput new];
  537. }
  538. static void printImageOutputDebugInfo (AVCaptureOutput* captureOutput)
  539. {
  540. #if defined (__IPHONE_10_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_10_0
  541. if (Pimpl::getIOSVersion().major >= 10 && [captureOutput isKindOfClass: [AVCapturePhotoOutput class]])
  542. {
  543. auto* photoOutput = (AVCapturePhotoOutput*) captureOutput;
  544. String typesString;
  545. for (AVVideoCodecType type in photoOutput.availablePhotoCodecTypes)
  546. typesString << nsStringToJuce (type) << " ";
  547. JUCE_CAMERA_LOG ("Available image codec types: " + typesString);
  548. JUCE_CAMERA_LOG ("Still image stabilization supported: " + String ((int) photoOutput.stillImageStabilizationSupported));
  549. JUCE_CAMERA_LOG ("Dual camera fusion supported: " + String ((int) photoOutput.dualCameraFusionSupported));
  550. JUCE_CAMERA_LOG ("Supports flash: " + String ((int) [photoOutput.supportedFlashModes containsObject: @(AVCaptureFlashModeOn)]));
  551. JUCE_CAMERA_LOG ("Supports auto flash: " + String ((int) [photoOutput.supportedFlashModes containsObject: @(AVCaptureFlashModeAuto)]));
  552. JUCE_CAMERA_LOG ("Max bracketed photo count: " + String (photoOutput.maxBracketedCapturePhotoCount));
  553. JUCE_CAMERA_LOG ("Lens stabilization during bracketed capture supported: " + String ((int) photoOutput.lensStabilizationDuringBracketedCaptureSupported));
  554. JUCE_CAMERA_LOG ("Live photo capture supported: " + String ((int) photoOutput.livePhotoCaptureSupported));
  555. #if defined (__IPHONE_11_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0
  556. if (Pimpl::getIOSVersion().major >= 11)
  557. {
  558. typesString.clear();
  559. for (AVFileType type in photoOutput.availablePhotoFileTypes)
  560. typesString << nsStringToJuce (type) << " ";
  561. JUCE_CAMERA_LOG ("Available photo file types: " + typesString);
  562. typesString.clear();
  563. for (AVFileType type in photoOutput.availableRawPhotoFileTypes)
  564. typesString << nsStringToJuce (type) << " ";
  565. JUCE_CAMERA_LOG ("Available RAW photo file types: " + typesString);
  566. typesString.clear();
  567. for (AVFileType type in photoOutput.availableLivePhotoVideoCodecTypes)
  568. typesString << nsStringToJuce (type) << " ";
  569. JUCE_CAMERA_LOG ("Available live photo video codec types: " + typesString);
  570. JUCE_CAMERA_LOG ("Dual camera dual photo delivery supported: " + String ((int) photoOutput.dualCameraDualPhotoDeliverySupported));
  571. JUCE_CAMERA_LOG ("Camera calibration data delivery supported: " + String ((int) photoOutput.cameraCalibrationDataDeliverySupported));
  572. JUCE_CAMERA_LOG ("Depth data delivery supported: " + String ((int) photoOutput.depthDataDeliverySupported));
  573. }
  574. #endif
  575. return;
  576. }
  577. #endif
  578. auto* stillImageOutput = (AVCaptureStillImageOutput*) captureOutput;
  579. String typesString;
  580. for (AVVideoCodecType type in stillImageOutput.availableImageDataCodecTypes)
  581. typesString << nsStringToJuce (type) << " ";
  582. JUCE_CAMERA_LOG ("Available image codec types: " + typesString);
  583. JUCE_CAMERA_LOG ("Still image stabilization supported: " + String ((int) stillImageOutput.stillImageStabilizationSupported));
  584. JUCE_CAMERA_LOG ("Automatically enables still image stabilization when available: " + String ((int) stillImageOutput.automaticallyEnablesStillImageStabilizationWhenAvailable));
  585. JUCE_CAMERA_LOG ("Output settings for image output: " + nsStringToJuce ([stillImageOutput.outputSettings description]));
  586. }
  587. //==============================================================================
  588. static AVCaptureConnection* findVideoConnection (AVCaptureOutput* output)
  589. {
  590. for (AVCaptureConnection* connection in output.connections)
  591. for (AVCaptureInputPort* port in connection.inputPorts)
  592. if ([port.mediaType isEqual: AVMediaTypeVideo])
  593. return connection;
  594. return nullptr;
  595. }
  596. //==============================================================================
  597. class PhotoOutputDelegateClass : public ObjCClass<NSObject>
  598. {
  599. public:
  600. PhotoOutputDelegateClass() : ObjCClass<NSObject> ("PhotoOutputDelegateClass_")
  601. {
  602. addMethod (@selector (captureOutput:willBeginCaptureForResolvedSettings:), willBeginCaptureForSettings, "v@:@@");
  603. addMethod (@selector (captureOutput:willCapturePhotoForResolvedSettings:), willCaptureForSettings, "v@:@@");
  604. addMethod (@selector (captureOutput:didCapturePhotoForResolvedSettings:), didCaptureForSettings, "v@:@@");
  605. addMethod (@selector (captureOutput:didFinishCaptureForResolvedSettings:error:), didFinishCaptureForSettings, "v@:@@@");
  606. if (Pimpl::getIOSVersion().major >= 11)
  607. addMethod (@selector (captureOutput:didFinishProcessingPhoto:error:), didFinishProcessingPhoto, "v@:@@@");
  608. else
  609. addMethod (@selector (captureOutput:didFinishProcessingPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:), didFinishProcessingPhotoSampleBuffer, "v@:@@@@@@");
  610. addIvar<StillPictureTaker*> ("owner");
  611. registerClass();
  612. }
  613. //==============================================================================
  614. static StillPictureTaker& getOwner (id self) { return *getIvar<StillPictureTaker*> (self, "owner"); }
  615. static void setOwner (id self, StillPictureTaker* t) { object_setInstanceVariable (self, "owner", t); }
  616. private:
  617. static void willBeginCaptureForSettings (id, SEL, AVCapturePhotoOutput*, AVCaptureResolvedPhotoSettings*)
  618. {
  619. JUCE_CAMERA_LOG ("willBeginCaptureForSettings()");
  620. }
  621. static void willCaptureForSettings (id, SEL, AVCapturePhotoOutput*, AVCaptureResolvedPhotoSettings*)
  622. {
  623. JUCE_CAMERA_LOG ("willCaptureForSettings()");
  624. }
  625. static void didCaptureForSettings (id, SEL, AVCapturePhotoOutput*, AVCaptureResolvedPhotoSettings*)
  626. {
  627. JUCE_CAMERA_LOG ("didCaptureForSettings()");
  628. }
  629. static void didFinishCaptureForSettings (id, SEL, AVCapturePhotoOutput*, AVCaptureResolvedPhotoSettings*, NSError* error)
  630. {
  631. String errorString = error != nil ? nsStringToJuce (error.localizedDescription) : String();
  632. ignoreUnused (errorString);
  633. JUCE_CAMERA_LOG ("didFinishCaptureForSettings(), error = " + errorString);
  634. }
  635. static void didFinishProcessingPhoto (id self, SEL, AVCapturePhotoOutput*, AVCapturePhoto* capturePhoto, NSError* error)
  636. {
  637. String errorString = error != nil ? nsStringToJuce (error.localizedDescription) : String();
  638. ignoreUnused (errorString);
  639. JUCE_CAMERA_LOG ("didFinishProcessingPhoto(), error = " + errorString);
  640. if (error != nil)
  641. {
  642. JUCE_CAMERA_LOG ("Still picture capture failed, error: " + nsStringToJuce (error.localizedDescription));
  643. jassertfalse;
  644. return;
  645. }
  646. auto* imageOrientation = (NSNumber *) capturePhoto.metadata[(NSString*) kCGImagePropertyOrientation];
  647. auto* uiImage = getImageWithCorrectOrientation ((CGImagePropertyOrientation) imageOrientation.unsignedIntValue,
  648. [capturePhoto CGImageRepresentation]);
  649. auto* imageData = UIImageJPEGRepresentation (uiImage, 0.f);
  650. auto image = ImageFileFormat::loadFrom (imageData.bytes, (size_t) imageData.length);
  651. getOwner (self).callListeners (image);
  652. MessageManager::callAsync ([self, image]() { getOwner (self).notifyPictureTaken (image); });
  653. }
  654. static UIImage* getImageWithCorrectOrientation (CGImagePropertyOrientation imageOrientation,
  655. CGImageRef imageData)
  656. {
  657. auto origWidth = CGImageGetWidth (imageData);
  658. auto origHeight = CGImageGetHeight (imageData);
  659. auto targetSize = getTargetImageDimensionFor (imageOrientation, imageData);
  660. UIGraphicsBeginImageContext (targetSize);
  661. CGContextRef context = UIGraphicsGetCurrentContext();
  662. switch (imageOrientation)
  663. {
  664. case kCGImagePropertyOrientationUp:
  665. CGContextScaleCTM (context, 1.0, -1.0);
  666. CGContextTranslateCTM (context, 0.0, -targetSize.height);
  667. break;
  668. case kCGImagePropertyOrientationRight:
  669. CGContextRotateCTM (context, 90 * MathConstants<CGFloat>::pi / 180);
  670. CGContextScaleCTM (context, targetSize.height / origHeight, -targetSize.width / origWidth);
  671. break;
  672. case kCGImagePropertyOrientationDown:
  673. CGContextTranslateCTM (context, targetSize.width, 0.0);
  674. CGContextScaleCTM (context, -1.0, 1.0);
  675. break;
  676. case kCGImagePropertyOrientationLeft:
  677. CGContextRotateCTM (context, -90 * MathConstants<CGFloat>::pi / 180);
  678. CGContextScaleCTM (context, targetSize.height / origHeight, -targetSize.width / origWidth);
  679. CGContextTranslateCTM (context, -targetSize.width, -targetSize.height);
  680. break;
  681. case kCGImagePropertyOrientationUpMirrored:
  682. case kCGImagePropertyOrientationDownMirrored:
  683. case kCGImagePropertyOrientationLeftMirrored:
  684. case kCGImagePropertyOrientationRightMirrored:
  685. default:
  686. // Not implemented.
  687. jassertfalse;
  688. break;
  689. }
  690. CGContextDrawImage (context, CGRectMake (0, 0, targetSize.width, targetSize.height), imageData);
  691. UIImage* correctedImage = UIGraphicsGetImageFromCurrentImageContext();
  692. UIGraphicsEndImageContext();
  693. return correctedImage;
  694. }
  695. static CGSize getTargetImageDimensionFor (CGImagePropertyOrientation imageOrientation,
  696. CGImageRef imageData)
  697. {
  698. auto width = CGImageGetWidth (imageData);
  699. auto height = CGImageGetHeight (imageData);
  700. switch (imageOrientation)
  701. {
  702. case kCGImagePropertyOrientationUp:
  703. case kCGImagePropertyOrientationUpMirrored:
  704. case kCGImagePropertyOrientationDown:
  705. case kCGImagePropertyOrientationDownMirrored:
  706. return CGSizeMake ((CGFloat) width, (CGFloat) height);
  707. case kCGImagePropertyOrientationRight:
  708. case kCGImagePropertyOrientationRightMirrored:
  709. case kCGImagePropertyOrientationLeft:
  710. case kCGImagePropertyOrientationLeftMirrored:
  711. return CGSizeMake ((CGFloat) height, (CGFloat) width);
  712. }
  713. jassertfalse;
  714. return CGSizeMake ((CGFloat) width, (CGFloat) height);
  715. }
  716. static void didFinishProcessingPhotoSampleBuffer (id self, SEL, AVCapturePhotoOutput*,
  717. CMSampleBufferRef imageBuffer, CMSampleBufferRef imagePreviewBuffer,
  718. AVCaptureResolvedPhotoSettings*, AVCaptureBracketedStillImageSettings*,
  719. NSError* error)
  720. {
  721. String errorString = error != nil ? nsStringToJuce (error.localizedDescription) : String();
  722. ignoreUnused (errorString);
  723. JUCE_CAMERA_LOG ("didFinishProcessingPhotoSampleBuffer(), error = " + errorString);
  724. if (error != nil)
  725. {
  726. JUCE_CAMERA_LOG ("Still picture capture failed, error: " + nsStringToJuce (error.localizedDescription));
  727. jassertfalse;
  728. return;
  729. }
  730. NSData* origImageData = [AVCapturePhotoOutput JPEGPhotoDataRepresentationForJPEGSampleBuffer: imageBuffer previewPhotoSampleBuffer: imagePreviewBuffer];
  731. auto origImage = [UIImage imageWithData: origImageData];
  732. auto imageOrientation = uiImageOrientationToCGImageOrientation (origImage.imageOrientation);
  733. auto* uiImage = getImageWithCorrectOrientation (imageOrientation, origImage.CGImage);
  734. auto* imageData = UIImageJPEGRepresentation (uiImage, 0.f);
  735. auto image = ImageFileFormat::loadFrom (imageData.bytes, (size_t) imageData.length);
  736. getOwner (self).callListeners (image);
  737. MessageManager::callAsync ([self, image]() { getOwner (self).notifyPictureTaken (image); });
  738. }
  739. static CGImagePropertyOrientation uiImageOrientationToCGImageOrientation (UIImageOrientation orientation)
  740. {
  741. switch (orientation)
  742. {
  743. case UIImageOrientationUp: return kCGImagePropertyOrientationUp;
  744. case UIImageOrientationDown: return kCGImagePropertyOrientationDown;
  745. case UIImageOrientationLeft: return kCGImagePropertyOrientationLeft;
  746. case UIImageOrientationRight: return kCGImagePropertyOrientationRight;
  747. case UIImageOrientationUpMirrored: return kCGImagePropertyOrientationUpMirrored;
  748. case UIImageOrientationDownMirrored: return kCGImagePropertyOrientationDownMirrored;
  749. case UIImageOrientationLeftMirrored: return kCGImagePropertyOrientationLeftMirrored;
  750. case UIImageOrientationRightMirrored: return kCGImagePropertyOrientationRightMirrored;
  751. }
  752. }
  753. };
  754. //==============================================================================
  755. void callListeners (const Image& image)
  756. {
  757. captureSession.callListeners (image);
  758. }
  759. void notifyPictureTaken (const Image& image)
  760. {
  761. takingPicture = false;
  762. captureSession.notifyPictureTaken (image);
  763. }
  764. CaptureSession& captureSession;
  765. AVCaptureOutput* captureOutput;
  766. std::unique_ptr<NSObject, NSObjectDeleter> photoOutputDelegate;
  767. bool takingPicture = false;
  768. };
  769. //==============================================================================
  770. // NB: FileOutputRecordingDelegateClass callbacks can be called from any thread (incl.
  771. // the message thread), so waiting for an event when stopping recording is not an
  772. // option and VideoRecorder must be alive at all times in order to get stopped
  773. // recording callback.
  774. class VideoRecorder
  775. {
  776. public:
  777. VideoRecorder (CaptureSession& session)
  778. : movieFileOutput ([AVCaptureMovieFileOutput new]),
  779. delegate (nullptr)
  780. {
  781. static FileOutputRecordingDelegateClass cls;
  782. delegate.reset ([cls.createInstance() init]);
  783. FileOutputRecordingDelegateClass::setOwner (delegate.get(), this);
  784. session.addOutputIfPossible (movieFileOutput);
  785. }
  786. ~VideoRecorder()
  787. {
  788. stopRecording();
  789. // Shutting down a device while recording will stop the recording
  790. // abruptly and the recording will be lost.
  791. jassert (! recordingInProgress);
  792. }
  793. void startRecording (const File& file, AVCaptureVideoOrientation orientationToUse)
  794. {
  795. if (Pimpl::getIOSVersion().major >= 10)
  796. printVideoOutputDebugInfo (movieFileOutput);
  797. auto url = [NSURL fileURLWithPath: juceStringToNS (file.getFullPathName())
  798. isDirectory: NO];
  799. auto outputConnection = [movieFileOutput connectionWithMediaType: AVMediaTypeVideo];
  800. outputConnection.videoOrientation = orientationToUse;
  801. [movieFileOutput startRecordingToOutputFileURL: url recordingDelegate: delegate.get()];
  802. }
  803. void stopRecording()
  804. {
  805. [movieFileOutput stopRecording];
  806. }
  807. Time getTimeOfFirstRecordedFrame() const
  808. {
  809. return Time (firstRecordedFrameTimeMs.get());
  810. }
  811. private:
  812. static void printVideoOutputDebugInfo (AVCaptureMovieFileOutput* output)
  813. {
  814. ignoreUnused (output);
  815. JUCE_CAMERA_LOG ("Available video codec types:");
  816. #if JUCE_CAMERA_LOG_ENABLED
  817. for (AVVideoCodecType type in output.availableVideoCodecTypes)
  818. JUCE_CAMERA_LOG (nsStringToJuce (type));
  819. #endif
  820. JUCE_CAMERA_LOG ("Output settings per video connection:");
  821. #if JUCE_CAMERA_LOG_ENABLED
  822. for (AVCaptureConnection* connection in output.connections)
  823. JUCE_CAMERA_LOG (nsStringToJuce ([[output outputSettingsForConnection: connection] description]));
  824. #endif
  825. }
  826. //==============================================================================
  827. struct FileOutputRecordingDelegateClass : public ObjCClass<NSObject<AVCaptureFileOutputRecordingDelegate>>
  828. {
  829. FileOutputRecordingDelegateClass() : ObjCClass<NSObject<AVCaptureFileOutputRecordingDelegate>> ("FileOutputRecordingDelegateClass_")
  830. {
  831. addMethod (@selector (captureOutput:didStartRecordingToOutputFileAtURL:fromConnections:), started, "v@:@@@");
  832. addMethod (@selector (captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error:), stopped, "v@:@@@@");
  833. addIvar<VideoRecorder*> ("owner");
  834. registerClass();
  835. }
  836. //==============================================================================
  837. static VideoRecorder& getOwner (id self) { return *getIvar<VideoRecorder*> (self, "owner"); }
  838. static void setOwner (id self, VideoRecorder* r) { object_setInstanceVariable (self, "owner", r); }
  839. private:
  840. static void started (id self, SEL, AVCaptureFileOutput*, NSURL*, NSArray<AVCaptureConnection*>*)
  841. {
  842. JUCE_CAMERA_LOG ("Started recording");
  843. getOwner (self).firstRecordedFrameTimeMs.set (Time::getCurrentTime().toMilliseconds());
  844. getOwner (self).recordingInProgress = true;
  845. }
  846. static void stopped (id self, SEL, AVCaptureFileOutput*, NSURL*, NSArray<AVCaptureConnection*>*, NSError* error)
  847. {
  848. String errorString;
  849. bool recordingPlayable = true;
  850. // There might have been an error in the recording, yet there may be a playable file...
  851. if ([error code] != noErr)
  852. {
  853. id value = [[error userInfo] objectForKey: AVErrorRecordingSuccessfullyFinishedKey];
  854. if (value != nil && ! [value boolValue])
  855. recordingPlayable = false;
  856. errorString = nsStringToJuce (error.localizedDescription) + ", playable: " + String ((int) recordingPlayable);
  857. }
  858. JUCE_CAMERA_LOG ("Stopped recording, error = " + errorString);
  859. getOwner (self).recordingInProgress = false;
  860. }
  861. };
  862. AVCaptureMovieFileOutput* movieFileOutput;
  863. std::unique_ptr<NSObject<AVCaptureFileOutputRecordingDelegate>, NSObjectDeleter> delegate;
  864. bool recordingInProgress = false;
  865. Atomic<int64> firstRecordedFrameTimeMs { 0 };
  866. };
  867. //==============================================================================
  868. void addOutputIfPossible (AVCaptureOutput* output)
  869. {
  870. dispatch_async (captureSessionQueue,^
  871. {
  872. if ([captureSession.get() canAddOutput: output])
  873. {
  874. [captureSession.get() beginConfiguration];
  875. [captureSession.get() addOutput: output];
  876. [captureSession.get() commitConfiguration];
  877. return;
  878. }
  879. // Can't add output to camera session!
  880. jassertfalse;
  881. });
  882. }
  883. //==============================================================================
  884. void cameraSessionStarted()
  885. {
  886. sessionStarted = true;
  887. owner.cameraSessionStarted();
  888. }
  889. void cameraSessionRuntimeError (const String& error)
  890. {
  891. owner.cameraSessionRuntimeError (error);
  892. }
  893. void callListeners (const Image& image)
  894. {
  895. owner.callListeners (image);
  896. }
  897. void notifyPictureTaken (const Image& image)
  898. {
  899. owner.notifyPictureTaken (image);
  900. }
  901. Pimpl& owner;
  902. dispatch_queue_t captureSessionQueue;
  903. std::unique_ptr<AVCaptureSession, NSObjectDeleter> captureSession;
  904. std::unique_ptr<NSObject, NSObjectDeleter> delegate;
  905. StillPictureTaker stillPictureTaker;
  906. VideoRecorder videoRecorder;
  907. AVCaptureDevice* cameraDevice = nil;
  908. AVCaptureVideoPreviewLayer* previewLayer = nil;
  909. bool sessionStarted = false;
  910. WaitableEvent sessionClosedEvent;
  911. static int numCaptureSessions;
  912. };
  913. //==============================================================================
  914. void cameraSessionStarted()
  915. {
  916. JUCE_CAMERA_LOG ("cameraSessionStarted()");
  917. cameraOpenCallback (cameraId, {});
  918. }
  919. void cameraSessionRuntimeError (const String& error)
  920. {
  921. JUCE_CAMERA_LOG ("cameraSessionRuntimeError(), error = " + error);
  922. if (! notifiedOfCameraOpening)
  923. {
  924. cameraOpenCallback ({}, error);
  925. }
  926. else
  927. {
  928. if (owner.onErrorOccurred != nullptr)
  929. owner.onErrorOccurred (error);
  930. }
  931. }
  932. void callListeners (const Image& image)
  933. {
  934. const ScopedLock sl (listenerLock);
  935. listeners.call ([=] (Listener& l) { l.imageReceived (image); });
  936. }
  937. void notifyPictureTaken (const Image& image)
  938. {
  939. JUCE_CAMERA_LOG ("notifyPictureTaken()");
  940. if (pictureTakenCallback != nullptr)
  941. pictureTakenCallback (image);
  942. }
  943. //==============================================================================
  944. void triggerStillPictureCapture()
  945. {
  946. captureSession.takeStillPicture();
  947. }
  948. //==============================================================================
  949. CameraDevice& owner;
  950. String cameraId;
  951. InternalOpenCameraResultCallback cameraOpenCallback;
  952. CriticalSection listenerLock;
  953. ListenerList<Listener> listeners;
  954. std::function<void(const Image&)> pictureTakenCallback;
  955. CaptureSession captureSession;
  956. bool notifiedOfCameraOpening = false;
  957. //==============================================================================
  958. struct IOSVersion
  959. {
  960. int major;
  961. int minor;
  962. };
  963. static IOSVersion getIOSVersion()
  964. {
  965. auto processInfo = [NSProcessInfo processInfo];
  966. if (! [processInfo respondsToSelector: @selector (operatingSystemVersion)])
  967. return {7, 0}; // Below 8.0 in fact, but only care that it's below 8
  968. return { (int)[processInfo operatingSystemVersion].majorVersion,
  969. (int)[processInfo operatingSystemVersion].minorVersion };
  970. }
  971. static IOSVersion iosVersion;
  972. friend struct CameraDevice::ViewerComponent;
  973. JUCE_DECLARE_NON_COPYABLE (Pimpl)
  974. };
  975. CameraDevice::Pimpl::IOSVersion CameraDevice::Pimpl::iosVersion = CameraDevice::Pimpl::getIOSVersion();
  976. int CameraDevice::Pimpl::CaptureSession::numCaptureSessions = 0;
  977. //==============================================================================
  978. struct CameraDevice::ViewerComponent : public UIViewComponent
  979. {
  980. //==============================================================================
  981. struct JuceCameraDeviceViewerClass : public ObjCClass<UIView>
  982. {
  983. JuceCameraDeviceViewerClass() : ObjCClass<UIView> ("JuceCameraDeviceViewerClass_")
  984. {
  985. addMethod (@selector (layoutSubviews), layoutSubviews, "v@:");
  986. registerClass();
  987. }
  988. private:
  989. static void layoutSubviews (id self, SEL)
  990. {
  991. sendSuperclassMessage (self, @selector (layoutSubviews));
  992. UIView* asUIView = (UIView*) self;
  993. updateOrientation (self);
  994. if (auto* previewLayer = getPreviewLayer (self))
  995. previewLayer.frame = asUIView.bounds;
  996. }
  997. static AVCaptureVideoPreviewLayer* getPreviewLayer (id self)
  998. {
  999. UIView* asUIView = (UIView*) self;
  1000. if (asUIView.layer.sublayers != nil && [asUIView.layer.sublayers count] > 0)
  1001. if ([asUIView.layer.sublayers[0] isKindOfClass: [AVCaptureVideoPreviewLayer class]])
  1002. return (AVCaptureVideoPreviewLayer*) asUIView.layer.sublayers[0];
  1003. return nil;
  1004. }
  1005. static void updateOrientation (id self)
  1006. {
  1007. if (auto* previewLayer = getPreviewLayer (self))
  1008. {
  1009. UIDeviceOrientation o = [UIDevice currentDevice].orientation;
  1010. if (UIDeviceOrientationIsPortrait (o) || UIDeviceOrientationIsLandscape (o))
  1011. {
  1012. if (previewLayer.connection != nil)
  1013. previewLayer.connection.videoOrientation = (AVCaptureVideoOrientation) o;
  1014. }
  1015. }
  1016. }
  1017. };
  1018. ViewerComponent (CameraDevice& device)
  1019. {
  1020. static JuceCameraDeviceViewerClass cls;
  1021. // Initial size that can be overriden later.
  1022. setSize (640, 480);
  1023. auto view = [cls.createInstance() init];
  1024. setView (view);
  1025. auto* previewLayer = device.pimpl->captureSession.createPreviewLayer();
  1026. previewLayer.frame = view.bounds;
  1027. UIInterfaceOrientation statusBarOrientation = [UIApplication sharedApplication].statusBarOrientation;
  1028. AVCaptureVideoOrientation videoOrientation = statusBarOrientation != UIInterfaceOrientationUnknown
  1029. ? (AVCaptureVideoOrientation) statusBarOrientation
  1030. : AVCaptureVideoOrientationPortrait;
  1031. previewLayer.connection.videoOrientation = videoOrientation;
  1032. [view.layer addSublayer: previewLayer];
  1033. }
  1034. };
  1035. //==============================================================================
  1036. String CameraDevice::getFileExtension()
  1037. {
  1038. return ".mov";
  1039. }
  1040. #if JUCE_DEPRECATION_IGNORED
  1041. JUCE_END_IGNORE_WARNINGS_GCC_LIKE
  1042. #endif