The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1332 lines
59KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2017 - ROLI Ltd.
  5. JUCE is an open source library subject to commercial or open-source
  6. licensing.
  7. By using JUCE, you agree to the terms of both the JUCE 5 End-User License
  8. Agreement and JUCE 5 Privacy Policy (both updated and effective as of the
  9. 27th April 2017).
  10. End User License Agreement: www.juce.com/juce-5-licence
  11. Privacy Policy: www.juce.com/juce-5-privacy-policy
  12. Or: You may also use this code under the terms of the GPL v3 (see
  13. www.gnu.org/licenses).
  14. JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
  15. EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
  16. DISCLAIMED.
  17. ==============================================================================
  18. */
  19. struct CameraDevice::Pimpl
  20. {
  21. using InternalOpenCameraResultCallback = std::function<void (const String& /*cameraId*/, const String& /*error*/)>;
  22. Pimpl (CameraDevice& ownerToUse, const String& cameraIdToUse, int /*index*/,
  23. int /*minWidth*/, int /*minHeight*/, int /*maxWidth*/, int /*maxHeight*/,
  24. bool useHighQuality)
  25. : owner (ownerToUse),
  26. cameraId (cameraIdToUse),
  27. captureSession (*this, useHighQuality)
  28. {
  29. }
  30. String getCameraId() const noexcept { return cameraId; }
  31. void open (InternalOpenCameraResultCallback cameraOpenCallbackToUse)
  32. {
  33. cameraOpenCallback = static_cast<InternalOpenCameraResultCallback&&> (cameraOpenCallbackToUse);
  34. if (cameraOpenCallback == nullptr)
  35. {
  36. // A valid camera open callback must be passed.
  37. jassertfalse;
  38. return;
  39. }
  40. [AVCaptureDevice requestAccessForMediaType: AVMediaTypeVideo
  41. completionHandler: ^(BOOL granted)
  42. {
  43. // Access to video is required for camera to work,
  44. // black images will be produced otherwise!
  45. jassert (granted);
  46. ignoreUnused (granted);
  47. }];
  48. [AVCaptureDevice requestAccessForMediaType: AVMediaTypeAudio
  49. completionHandler: ^(BOOL granted)
  50. {
  51. // Access to audio is required for camera to work,
  52. // silence will be produced otherwise!
  53. jassert (granted);
  54. ignoreUnused (granted);
  55. }];
  56. captureSession.startSessionForDeviceWithId (cameraId);
  57. }
  58. bool openedOk() const noexcept { return captureSession.openedOk(); }
  59. void takeStillPicture (std::function<void (const Image&)> pictureTakenCallbackToUse)
  60. {
  61. if (pictureTakenCallbackToUse == nullptr)
  62. {
  63. jassertfalse;
  64. return;
  65. }
  66. pictureTakenCallback = static_cast<std::function<void (const Image&)>&&> (pictureTakenCallbackToUse);
  67. triggerStillPictureCapture();
  68. }
  69. void startRecordingToFile (const File& file, int /*quality*/)
  70. {
  71. file.deleteFile();
  72. captureSession.startRecording (file);
  73. }
  74. void stopRecording()
  75. {
  76. captureSession.stopRecording();
  77. }
  78. Time getTimeOfFirstRecordedFrame() const
  79. {
  80. return captureSession.getTimeOfFirstRecordedFrame();
  81. }
  82. static StringArray getAvailableDevices()
  83. {
  84. StringArray results;
  85. JUCE_CAMERA_LOG ("Available camera devices: ");
  86. for (AVCaptureDevice* device in getDevices())
  87. {
  88. JUCE_CAMERA_LOG ("Device start----------------------------------");
  89. printDebugCameraInfo (device);
  90. JUCE_CAMERA_LOG ("Device end----------------------------------");
  91. results.add (nsStringToJuce (device.uniqueID));
  92. }
  93. return results;
  94. }
  95. void addListener (CameraDevice::Listener* listenerToAdd)
  96. {
  97. const ScopedLock sl (listenerLock);
  98. listeners.add (listenerToAdd);
  99. if (listeners.size() == 1)
  100. triggerStillPictureCapture();
  101. }
  102. void removeListener (CameraDevice::Listener* listenerToRemove)
  103. {
  104. const ScopedLock sl (listenerLock);
  105. listeners.remove (listenerToRemove);
  106. }
  107. private:
  108. static NSArray<AVCaptureDevice*>* getDevices()
  109. {
  110. #if defined (__IPHONE_10_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_10_0
  111. if (iosVersion.major >= 10)
  112. {
  113. std::unique_ptr<NSMutableArray<AVCaptureDeviceType>, NSObjectDeleter> deviceTypes ([[NSMutableArray alloc] initWithCapacity: 2]);
  114. [deviceTypes.get() addObject: AVCaptureDeviceTypeBuiltInWideAngleCamera];
  115. [deviceTypes.get() addObject: AVCaptureDeviceTypeBuiltInTelephotoCamera];
  116. if ((iosVersion.major == 10 && iosVersion.minor >= 2) || iosVersion.major >= 11)
  117. [deviceTypes.get() addObject: AVCaptureDeviceTypeBuiltInDualCamera];
  118. if ((iosVersion.major == 11 && iosVersion.minor >= 1) || iosVersion.major >= 12)
  119. [deviceTypes.get() addObject: AVCaptureDeviceTypeBuiltInTrueDepthCamera];
  120. auto discoverySession = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes: deviceTypes.get()
  121. mediaType: AVMediaTypeVideo
  122. position: AVCaptureDevicePositionUnspecified];
  123. return [discoverySession devices];
  124. }
  125. #endif
  126. return [AVCaptureDevice devicesWithMediaType: AVMediaTypeVideo];
  127. }
  128. //==============================================================================
  129. static void printDebugCameraInfo (AVCaptureDevice* device)
  130. {
  131. auto position = device.position;
  132. String positionString = position == AVCaptureDevicePositionBack
  133. ? "Back"
  134. : position == AVCaptureDevicePositionFront
  135. ? "Front"
  136. : "Unspecified";
  137. JUCE_CAMERA_LOG ("Position: " + positionString);
  138. JUCE_CAMERA_LOG ("Model ID: " + nsStringToJuce (device.modelID));
  139. JUCE_CAMERA_LOG ("Localized name: " + nsStringToJuce (device.localizedName));
  140. JUCE_CAMERA_LOG ("Unique ID: " + nsStringToJuce (device.uniqueID));
  141. JUCE_CAMERA_LOG ("Lens aperture: " + String (device.lensAperture));
  142. JUCE_CAMERA_LOG ("Has flash: " + String ((int)device.hasFlash));
  143. JUCE_CAMERA_LOG ("Supports flash always on: " + String ((int)[device isFlashModeSupported: AVCaptureFlashModeOn]));
  144. JUCE_CAMERA_LOG ("Supports auto flash: " + String ((int)[device isFlashModeSupported: AVCaptureFlashModeAuto]));
  145. JUCE_CAMERA_LOG ("Has torch: " + String ((int)device.hasTorch));
  146. JUCE_CAMERA_LOG ("Supports torch always on: " + String ((int)[device isTorchModeSupported: AVCaptureTorchModeOn]));
  147. JUCE_CAMERA_LOG ("Supports auto torch: " + String ((int)[device isTorchModeSupported: AVCaptureTorchModeAuto]));
  148. JUCE_CAMERA_LOG ("Low light boost supported: " + String ((int)device.lowLightBoostEnabled));
  149. JUCE_CAMERA_LOG ("Supports auto white balance: " + String ((int)[device isWhiteBalanceModeSupported: AVCaptureWhiteBalanceModeAutoWhiteBalance]));
  150. JUCE_CAMERA_LOG ("Supports continuous auto white balance: " + String ((int)[device isWhiteBalanceModeSupported: AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]));
  151. JUCE_CAMERA_LOG ("Supports auto focus: " + String ((int)[device isFocusModeSupported: AVCaptureFocusModeAutoFocus]));
  152. JUCE_CAMERA_LOG ("Supports continuous auto focus: " + String ((int)[device isFocusModeSupported: AVCaptureFocusModeContinuousAutoFocus]));
  153. JUCE_CAMERA_LOG ("Supports point of interest focus: " + String ((int)device.focusPointOfInterestSupported));
  154. JUCE_CAMERA_LOG ("Smooth auto focus supported: " + String ((int)device.smoothAutoFocusSupported));
  155. JUCE_CAMERA_LOG ("Auto focus range restriction supported: " + String ((int)device.autoFocusRangeRestrictionSupported));
  156. JUCE_CAMERA_LOG ("Supports auto exposure: " + String ((int)[device isExposureModeSupported: AVCaptureExposureModeAutoExpose]));
  157. JUCE_CAMERA_LOG ("Supports continuous auto exposure: " + String ((int)[device isExposureModeSupported: AVCaptureExposureModeContinuousAutoExposure]));
  158. JUCE_CAMERA_LOG ("Supports custom exposure: " + String ((int)[device isExposureModeSupported: AVCaptureExposureModeCustom]));
  159. JUCE_CAMERA_LOG ("Supports point of interest exposure: " + String ((int)device.exposurePointOfInterestSupported));
  160. #if defined (__IPHONE_10_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_10_0
  161. if (iosVersion.major >= 10)
  162. {
  163. JUCE_CAMERA_LOG ("Device type: " + nsStringToJuce (device.deviceType));
  164. JUCE_CAMERA_LOG ("Locking focus with custom lens position supported: " + String ((int)device.lockingFocusWithCustomLensPositionSupported));
  165. }
  166. #endif
  167. #if defined (__IPHONE_11_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0
  168. if (iosVersion.major >= 11)
  169. {
  170. JUCE_CAMERA_LOG ("Min available video zoom factor: " + String (device.minAvailableVideoZoomFactor));
  171. JUCE_CAMERA_LOG ("Max available video zoom factor: " + String (device.maxAvailableVideoZoomFactor));
  172. JUCE_CAMERA_LOG ("Dual camera switch over video zoom factor: " + String (device.dualCameraSwitchOverVideoZoomFactor));
  173. }
  174. #endif
  175. JUCE_CAMERA_LOG ("Capture formats start-------------------");
  176. for (AVCaptureDeviceFormat* format in device.formats)
  177. {
  178. JUCE_CAMERA_LOG ("Capture format start------");
  179. printDebugCameraFormatInfo (format);
  180. JUCE_CAMERA_LOG ("Capture format end------");
  181. }
  182. JUCE_CAMERA_LOG ("Capture formats end-------------------");
  183. }
  184. static void printDebugCameraFormatInfo (AVCaptureDeviceFormat* format)
  185. {
  186. JUCE_CAMERA_LOG ("Media type: " + nsStringToJuce (format.mediaType));
  187. String colourSpaces;
  188. for (NSNumber* number in format.supportedColorSpaces)
  189. {
  190. switch ([number intValue])
  191. {
  192. case AVCaptureColorSpace_sRGB: colourSpaces << "sRGB "; break;
  193. case AVCaptureColorSpace_P3_D65: colourSpaces << "P3_D65 "; break;
  194. default: break;
  195. }
  196. }
  197. JUCE_CAMERA_LOG ("Supported colour spaces: " + colourSpaces);
  198. JUCE_CAMERA_LOG ("Video field of view: " + String (format.videoFieldOfView));
  199. JUCE_CAMERA_LOG ("Video max zoom factor: " + String (format.videoMaxZoomFactor));
  200. JUCE_CAMERA_LOG ("Video zoom factor upscale threshold: " + String (format.videoZoomFactorUpscaleThreshold));
  201. String videoFrameRateRangesString = "Video supported frame rate ranges: ";
  202. for (AVFrameRateRange* range in format.videoSupportedFrameRateRanges)
  203. videoFrameRateRangesString << frameRateRangeToString (range);
  204. JUCE_CAMERA_LOG (videoFrameRateRangesString);
  205. JUCE_CAMERA_LOG ("Video binned: " + String (int(format.videoBinned)));
  206. #if defined (__IPHONE_8_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_8_0
  207. if (iosVersion.major >= 8)
  208. {
  209. JUCE_CAMERA_LOG ("Video HDR supported: " + String (int (format.videoHDRSupported)));
  210. JUCE_CAMERA_LOG ("High resolution still image dimensions: " + getHighResStillImgDimensionsString (format.highResolutionStillImageDimensions));
  211. JUCE_CAMERA_LOG ("Min ISO: " + String (format.minISO));
  212. JUCE_CAMERA_LOG ("Max ISO: " + String (format.maxISO));
  213. JUCE_CAMERA_LOG ("Min exposure duration: " + cmTimeToString (format.minExposureDuration));
  214. String autoFocusSystemString;
  215. switch (format.autoFocusSystem)
  216. {
  217. case AVCaptureAutoFocusSystemPhaseDetection: autoFocusSystemString = "PhaseDetection"; break;
  218. case AVCaptureAutoFocusSystemContrastDetection: autoFocusSystemString = "ContrastDetection"; break;
  219. default: autoFocusSystemString = "None";
  220. }
  221. JUCE_CAMERA_LOG ("Auto focus system: " + autoFocusSystemString);
  222. JUCE_CAMERA_LOG ("Standard (iOS 5.0) video stabilization supported: " + String ((int) [format isVideoStabilizationModeSupported: AVCaptureVideoStabilizationModeStandard]));
  223. JUCE_CAMERA_LOG ("Cinematic video stabilization supported: " + String ((int) [format isVideoStabilizationModeSupported: AVCaptureVideoStabilizationModeCinematic]));
  224. JUCE_CAMERA_LOG ("Auto video stabilization supported: " + String ((int) [format isVideoStabilizationModeSupported: AVCaptureVideoStabilizationModeAuto]));
  225. }
  226. #endif
  227. #if defined (__IPHONE_11_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0
  228. if (iosVersion.major >= 11)
  229. {
  230. JUCE_CAMERA_LOG ("Min zoom factor for depth data delivery: " + String (format.videoMinZoomFactorForDepthDataDelivery));
  231. JUCE_CAMERA_LOG ("Max zoom factor for depth data delivery: " + String (format.videoMaxZoomFactorForDepthDataDelivery));
  232. }
  233. #endif
  234. }
  235. static String getHighResStillImgDimensionsString (CMVideoDimensions d)
  236. {
  237. return "[" + String (d.width) + " " + String (d.height) + "]";
  238. }
  239. static String cmTimeToString (CMTime time)
  240. {
  241. CFStringRef timeDesc = CMTimeCopyDescription (NULL, time);
  242. String result = String::fromCFString (timeDesc);
  243. CFRelease (timeDesc);
  244. return result;
  245. }
  246. static String frameRateRangeToString (AVFrameRateRange* range)
  247. {
  248. String result;
  249. result << "[minFrameDuration: " + cmTimeToString (range.minFrameDuration);
  250. result << " maxFrameDuration: " + cmTimeToString (range.maxFrameDuration);
  251. result << " minFrameRate: " + String (range.minFrameRate);
  252. result << " maxFrameRate: " + String (range.maxFrameRate) << "] ";
  253. return result;
  254. }
  255. //==============================================================================
  256. class CaptureSession
  257. {
  258. public:
  259. CaptureSession (Pimpl& ownerToUse, bool useHighQuality)
  260. : owner (ownerToUse),
  261. captureSessionQueue (dispatch_queue_create ("JuceCameraDeviceBackgroundDispatchQueue", DISPATCH_QUEUE_SERIAL)),
  262. captureSession ([[AVCaptureSession alloc] init]),
  263. delegate (nullptr),
  264. stillPictureTaker (*this),
  265. videoRecorder (*this)
  266. {
  267. static SessionDelegateClass cls;
  268. delegate.reset ([cls.createInstance() init]);
  269. SessionDelegateClass::setOwner (delegate.get(), this);
  270. #pragma clang diagnostic push
  271. #pragma clang diagnostic ignored "-Wundeclared-selector"
  272. [[NSNotificationCenter defaultCenter] addObserver: delegate.get()
  273. selector: @selector (sessionDidStartRunning:)
  274. name: AVCaptureSessionDidStartRunningNotification
  275. object: captureSession.get()];
  276. [[NSNotificationCenter defaultCenter] addObserver: delegate.get()
  277. selector: @selector (sessionDidStopRunning:)
  278. name: AVCaptureSessionDidStopRunningNotification
  279. object: captureSession.get()];
  280. [[NSNotificationCenter defaultCenter] addObserver: delegate.get()
  281. selector: @selector (sessionRuntimeError:)
  282. name: AVCaptureSessionRuntimeErrorNotification
  283. object: captureSession.get()];
  284. [[NSNotificationCenter defaultCenter] addObserver: delegate.get()
  285. selector: @selector (sessionWasInterrupted:)
  286. name: AVCaptureSessionWasInterruptedNotification
  287. object: captureSession.get()];
  288. [[NSNotificationCenter defaultCenter] addObserver: delegate.get()
  289. selector: @selector (sessionInterruptionEnded:)
  290. name: AVCaptureSessionInterruptionEndedNotification
  291. object: captureSession.get()];
  292. #pragma clang diagnostic pop
  293. dispatch_async (captureSessionQueue,^
  294. {
  295. [captureSession.get() setSessionPreset: useHighQuality ? AVCaptureSessionPresetHigh
  296. : AVCaptureSessionPresetMedium];
  297. });
  298. ++numCaptureSessions;
  299. }
  300. ~CaptureSession()
  301. {
  302. [[NSNotificationCenter defaultCenter] removeObserver: delegate.get()];
  303. stopRecording();
  304. if (--numCaptureSessions == 0)
  305. {
  306. dispatch_async (captureSessionQueue, ^
  307. {
  308. if (captureSession.get().running)
  309. [captureSession.get() stopRunning];
  310. sessionClosedEvent.signal();
  311. });
  312. sessionClosedEvent.wait (-1);
  313. }
  314. }
  315. bool openedOk() const noexcept { return sessionStarted; }
  316. void startSessionForDeviceWithId (const String& cameraIdToUse)
  317. {
  318. dispatch_async (captureSessionQueue,^
  319. {
  320. cameraDevice = [AVCaptureDevice deviceWithUniqueID: juceStringToNS (cameraIdToUse)];
  321. auto* audioDevice = [AVCaptureDevice defaultDeviceWithMediaType: AVMediaTypeAudio];
  322. [captureSession.get() beginConfiguration];
  323. // This will add just video...
  324. auto error = addInputToDevice (cameraDevice);
  325. if (error.isNotEmpty())
  326. {
  327. WeakReference<CaptureSession> weakRef (this);
  328. MessageManager::callAsync ([weakRef, error]() mutable
  329. {
  330. if (weakRef != nullptr)
  331. weakRef->owner.cameraOpenCallback ({}, error);
  332. });
  333. return;
  334. }
  335. // ... so add audio explicitly here
  336. error = addInputToDevice (audioDevice);
  337. if (error.isNotEmpty())
  338. {
  339. WeakReference<CaptureSession> weakRef (this);
  340. MessageManager::callAsync ([weakRef, error]() mutable
  341. {
  342. if (weakRef != nullptr)
  343. weakRef->owner.cameraOpenCallback ({}, error);
  344. });
  345. return;
  346. }
  347. [captureSession.get() commitConfiguration];
  348. if (! captureSession.get().running)
  349. [captureSession.get() startRunning];
  350. });
  351. }
  352. AVCaptureVideoPreviewLayer* createPreviewLayer()
  353. {
  354. if (! openedOk())
  355. {
  356. // A session must be started first!
  357. jassertfalse;
  358. return nullptr;
  359. }
  360. previewLayer = [AVCaptureVideoPreviewLayer layerWithSession: captureSession.get()];
  361. return previewLayer;
  362. }
  363. void takeStillPicture()
  364. {
  365. if (! openedOk())
  366. {
  367. // A session must be started first!
  368. jassert (openedOk());
  369. return;
  370. }
  371. stillPictureTaker.takePicture (previewLayer.connection.videoOrientation);
  372. }
  373. void startRecording (const File& file)
  374. {
  375. if (! openedOk())
  376. {
  377. // A session must be started first!
  378. jassertfalse;
  379. return;
  380. }
  381. if (file.existsAsFile())
  382. {
  383. // File overwriting is not supported by iOS video recorder, the target
  384. // file must not exist.
  385. jassertfalse;
  386. return;
  387. }
  388. videoRecorder.startRecording (file, previewLayer.connection.videoOrientation);
  389. }
  390. void stopRecording()
  391. {
  392. videoRecorder.stopRecording();
  393. }
  394. Time getTimeOfFirstRecordedFrame() const
  395. {
  396. return videoRecorder.getTimeOfFirstRecordedFrame();
  397. }
  398. JUCE_DECLARE_WEAK_REFERENCEABLE (CaptureSession)
  399. private:
  400. String addInputToDevice (AVCaptureDevice* device)
  401. {
  402. NSError* error = nil;
  403. auto* input = [AVCaptureDeviceInput deviceInputWithDevice: device
  404. error: &error];
  405. if (error != nil)
  406. return nsStringToJuce (error.localizedDescription);
  407. if (! [captureSession.get() canAddInput: input])
  408. return "Could not add input to camera session.";
  409. [captureSession.get() addInput: input];
  410. return {};
  411. }
  412. //==============================================================================
  413. struct SessionDelegateClass : public ObjCClass<NSObject>
  414. {
  415. SessionDelegateClass() : ObjCClass<NSObject> ("SessionDelegateClass_")
  416. {
  417. #pragma clang diagnostic push
  418. #pragma clang diagnostic ignored "-Wundeclared-selector"
  419. addMethod (@selector (sessionDidStartRunning:), started, "v@:@");
  420. addMethod (@selector (sessionDidStopRunning:), stopped, "v@:@");
  421. addMethod (@selector (sessionRuntimeError:), runtimeError, "v@:@");
  422. addMethod (@selector (sessionWasInterrupted:), interrupted, "v@:@");
  423. addMethod (@selector (sessionInterruptionEnded:), interruptionEnded, "v@:@");
  424. #pragma clang diagnostic pop
  425. addIvar<CaptureSession*> ("owner");
  426. registerClass();
  427. }
  428. //==============================================================================
  429. static CaptureSession& getOwner (id self) { return *getIvar<CaptureSession*> (self, "owner"); }
  430. static void setOwner (id self, CaptureSession* s) { object_setInstanceVariable (self, "owner", s); }
  431. private:
  432. //==============================================================================
  433. static void started (id self, SEL, NSNotification* notification)
  434. {
  435. JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
  436. ignoreUnused (notification);
  437. dispatch_async (dispatch_get_main_queue(),
  438. ^{
  439. getOwner (self).cameraSessionStarted();
  440. });
  441. }
  442. static void stopped (id, SEL, NSNotification* notification)
  443. {
  444. JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
  445. ignoreUnused (notification);
  446. }
  447. static void runtimeError (id self, SEL, NSNotification* notification)
  448. {
  449. JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
  450. dispatch_async (dispatch_get_main_queue(),
  451. ^{
  452. NSError* error = notification.userInfo[AVCaptureSessionErrorKey];
  453. auto errorString = error != nil ? nsStringToJuce (error.localizedDescription) : String();
  454. getOwner (self).cameraSessionRuntimeError (errorString);
  455. });
  456. }
  457. static void interrupted (id, SEL, NSNotification* notification)
  458. {
  459. JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
  460. ignoreUnused (notification);
  461. }
  462. static void interruptionEnded (id, SEL, NSNotification* notification)
  463. {
  464. JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
  465. ignoreUnused (notification);
  466. }
  467. };
  468. //==============================================================================
  469. class StillPictureTaker
  470. {
  471. public:
  472. StillPictureTaker (CaptureSession& cs)
  473. : captureSession (cs),
  474. captureOutput (createCaptureOutput()),
  475. photoOutputDelegate (nullptr)
  476. {
  477. if (Pimpl::getIOSVersion().major >= 10)
  478. {
  479. static PhotoOutputDelegateClass cls;
  480. photoOutputDelegate.reset ([cls.createInstance() init]);
  481. PhotoOutputDelegateClass::setOwner (photoOutputDelegate.get(), this);
  482. }
  483. captureSession.addOutputIfPossible (captureOutput);
  484. }
  485. void takePicture (AVCaptureVideoOrientation orientationToUse)
  486. {
  487. if (takingPicture)
  488. {
  489. // Picture taking already in progress!
  490. jassertfalse;
  491. return;
  492. }
  493. takingPicture = true;
  494. printImageOutputDebugInfo (captureOutput);
  495. if (auto* connection = findVideoConnection (captureOutput))
  496. {
  497. #if defined (__IPHONE_10_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_10_0
  498. if (Pimpl::getIOSVersion().major >= 10 && [captureOutput isKindOfClass: [AVCapturePhotoOutput class]])
  499. {
  500. auto* photoOutput = (AVCapturePhotoOutput*) captureOutput;
  501. auto* outputConnection = [photoOutput connectionWithMediaType: AVMediaTypeVideo];
  502. outputConnection.videoOrientation = orientationToUse;
  503. [photoOutput capturePhotoWithSettings: [AVCapturePhotoSettings photoSettings]
  504. delegate: id<AVCapturePhotoCaptureDelegate> (photoOutputDelegate.get())];
  505. return;
  506. }
  507. #endif
  508. auto* stillImageOutput = (AVCaptureStillImageOutput*) captureOutput;
  509. auto* outputConnection = [stillImageOutput connectionWithMediaType: AVMediaTypeVideo];
  510. outputConnection.videoOrientation = orientationToUse;
  511. [stillImageOutput captureStillImageAsynchronouslyFromConnection: connection completionHandler:
  512. ^(CMSampleBufferRef imageSampleBuffer, NSError* error)
  513. {
  514. if (error != nil)
  515. {
  516. JUCE_CAMERA_LOG ("Still picture capture failed, error: " + nsStringToJuce (error.localizedDescription));
  517. jassertfalse;
  518. return;
  519. }
  520. NSData* imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation: imageSampleBuffer];
  521. auto image = ImageFileFormat::loadFrom (imageData.bytes, (size_t) imageData.length);
  522. callListeners (image);
  523. MessageManager::callAsync ([this, image]() { notifyPictureTaken (image); });
  524. }];
  525. }
  526. else
  527. {
  528. // Could not find a connection of video type
  529. jassertfalse;
  530. }
  531. }
  532. private:
  533. static AVCaptureOutput* createCaptureOutput()
  534. {
  535. #if defined (__IPHONE_10_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_10_0
  536. if (Pimpl::getIOSVersion().major >= 10)
  537. return [AVCapturePhotoOutput new];
  538. #endif
  539. return [AVCaptureStillImageOutput new];
  540. }
  541. static void printImageOutputDebugInfo (AVCaptureOutput* captureOutput)
  542. {
  543. #if defined (__IPHONE_10_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_10_0
  544. if (Pimpl::getIOSVersion().major >= 10 && [captureOutput isKindOfClass: [AVCapturePhotoOutput class]])
  545. {
  546. auto* photoOutput = (AVCapturePhotoOutput*) captureOutput;
  547. String typesString;
  548. for (AVVideoCodecType type in photoOutput.availablePhotoCodecTypes)
  549. typesString << nsStringToJuce (type) << " ";
  550. JUCE_CAMERA_LOG ("Available image codec types: " + typesString);
  551. JUCE_CAMERA_LOG ("Still image stabilization supported: " + String ((int) photoOutput.stillImageStabilizationSupported));
  552. JUCE_CAMERA_LOG ("Dual camera fusion supported: " + String ((int) photoOutput.dualCameraFusionSupported));
  553. JUCE_CAMERA_LOG ("Supports flash: " + String ((int) [photoOutput.supportedFlashModes containsObject: @(AVCaptureFlashModeOn)]));
  554. JUCE_CAMERA_LOG ("Supports auto flash: " + String ((int) [photoOutput.supportedFlashModes containsObject: @(AVCaptureFlashModeAuto)]));
  555. JUCE_CAMERA_LOG ("Max bracketed photo count: " + String (photoOutput.maxBracketedCapturePhotoCount));
  556. JUCE_CAMERA_LOG ("Lens stabilization during bracketed capture supported: " + String ((int) photoOutput.lensStabilizationDuringBracketedCaptureSupported));
  557. JUCE_CAMERA_LOG ("Live photo capture supported: " + String ((int) photoOutput.livePhotoCaptureSupported));
  558. #if defined (__IPHONE_11_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0
  559. if (Pimpl::getIOSVersion().major >= 11)
  560. {
  561. typesString.clear();
  562. for (AVFileType type in photoOutput.availablePhotoFileTypes)
  563. typesString << nsStringToJuce (type) << " ";
  564. JUCE_CAMERA_LOG ("Available photo file types: " + typesString);
  565. typesString.clear();
  566. for (AVFileType type in photoOutput.availableRawPhotoFileTypes)
  567. typesString << nsStringToJuce (type) << " ";
  568. JUCE_CAMERA_LOG ("Available RAW photo file types: " + typesString);
  569. typesString.clear();
  570. for (AVFileType type in photoOutput.availableLivePhotoVideoCodecTypes)
  571. typesString << nsStringToJuce (type) << " ";
  572. JUCE_CAMERA_LOG ("Available live photo video codec types: " + typesString);
  573. JUCE_CAMERA_LOG ("Dual camera dual photo delivery supported: " + String ((int) photoOutput.dualCameraDualPhotoDeliverySupported));
  574. JUCE_CAMERA_LOG ("Camera calibration data delivery supported: " + String ((int) photoOutput.cameraCalibrationDataDeliverySupported));
  575. JUCE_CAMERA_LOG ("Depth data delivery supported: " + String ((int) photoOutput.depthDataDeliverySupported));
  576. }
  577. #endif
  578. return;
  579. }
  580. #endif
  581. auto* stillImageOutput = (AVCaptureStillImageOutput*) captureOutput;
  582. String typesString;
  583. for (AVVideoCodecType type in stillImageOutput.availableImageDataCodecTypes)
  584. typesString << nsStringToJuce (type) << " ";
  585. JUCE_CAMERA_LOG ("Available image codec types: " + typesString);
  586. JUCE_CAMERA_LOG ("Still image stabilization supported: " + String ((int) stillImageOutput.stillImageStabilizationSupported));
  587. JUCE_CAMERA_LOG ("Automatically enableds still image stabilization when available: " + String ((int) stillImageOutput.automaticallyEnablesStillImageStabilizationWhenAvailable));
  588. JUCE_CAMERA_LOG ("Output settings for image output: " + nsStringToJuce ([stillImageOutput.outputSettings description]));
  589. }
  590. //==============================================================================
  591. static AVCaptureConnection* findVideoConnection (AVCaptureOutput* output)
  592. {
  593. for (AVCaptureConnection* connection in output.connections)
  594. for (AVCaptureInputPort* port in connection.inputPorts)
  595. if ([port.mediaType isEqual: AVMediaTypeVideo])
  596. return connection;
  597. return nullptr;
  598. }
  599. //==============================================================================
  600. class PhotoOutputDelegateClass : public ObjCClass<NSObject>
  601. {
  602. public:
  603. PhotoOutputDelegateClass() : ObjCClass<NSObject> ("PhotoOutputDelegateClass_")
  604. {
  605. addMethod (@selector (captureOutput:willBeginCaptureForResolvedSettings:), willBeginCaptureForSettings, "v@:@@");
  606. addMethod (@selector (captureOutput:willCapturePhotoForResolvedSettings:), willCaptureForSettings, "v@:@@");
  607. addMethod (@selector (captureOutput:didCapturePhotoForResolvedSettings:), didCaptureForSettings, "v@:@@");
  608. addMethod (@selector (captureOutput:didFinishCaptureForResolvedSettings:error:), didFinishCaptureForSettings, "v@:@@@");
  609. if (Pimpl::getIOSVersion().major >= 11)
  610. addMethod (@selector (captureOutput:didFinishProcessingPhoto:error:), didFinishProcessingPhoto, "v@:@@@");
  611. else
  612. addMethod (@selector (captureOutput:didFinishProcessingPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:), didFinishProcessingPhotoSampleBuffer, "v@:@@@@@@");
  613. addIvar<StillPictureTaker*> ("owner");
  614. registerClass();
  615. }
  616. //==============================================================================
  617. static StillPictureTaker& getOwner (id self) { return *getIvar<StillPictureTaker*> (self, "owner"); }
  618. static void setOwner (id self, StillPictureTaker* t) { object_setInstanceVariable (self, "owner", t); }
  619. private:
  620. static void willBeginCaptureForSettings (id, SEL, AVCapturePhotoOutput*, AVCaptureResolvedPhotoSettings*)
  621. {
  622. JUCE_CAMERA_LOG ("willBeginCaptureForSettings()");
  623. }
  624. static void willCaptureForSettings (id, SEL, AVCapturePhotoOutput*, AVCaptureResolvedPhotoSettings*)
  625. {
  626. JUCE_CAMERA_LOG ("willCaptureForSettings()");
  627. }
  628. static void didCaptureForSettings (id, SEL, AVCapturePhotoOutput*, AVCaptureResolvedPhotoSettings*)
  629. {
  630. JUCE_CAMERA_LOG ("didCaptureForSettings()");
  631. }
  632. static void didFinishCaptureForSettings (id, SEL, AVCapturePhotoOutput*, AVCaptureResolvedPhotoSettings*, NSError* error)
  633. {
  634. String errorString = error != nil ? nsStringToJuce (error.localizedDescription) : String();
  635. ignoreUnused (errorString);
  636. JUCE_CAMERA_LOG ("didFinishCaptureForSettings(), error = " + errorString);
  637. }
  638. static void didFinishProcessingPhoto (id self, SEL, AVCapturePhotoOutput*, AVCapturePhoto* capturePhoto, NSError* error)
  639. {
  640. String errorString = error != nil ? nsStringToJuce (error.localizedDescription) : String();
  641. ignoreUnused (errorString);
  642. JUCE_CAMERA_LOG ("didFinishProcessingPhoto(), error = " + errorString);
  643. if (error != nil)
  644. {
  645. JUCE_CAMERA_LOG ("Still picture capture failed, error: " + nsStringToJuce (error.localizedDescription));
  646. jassertfalse;
  647. return;
  648. }
  649. auto* imageOrientation = (NSNumber *) capturePhoto.metadata[(NSString*) kCGImagePropertyOrientation];
  650. auto* uiImage = getImageWithCorrectOrientation ((CGImagePropertyOrientation) imageOrientation.unsignedIntValue,
  651. [capturePhoto CGImageRepresentation]);
  652. auto* imageData = UIImageJPEGRepresentation (uiImage, 0.f);
  653. auto image = ImageFileFormat::loadFrom (imageData.bytes, (size_t) imageData.length);
  654. getOwner (self).callListeners (image);
  655. MessageManager::callAsync ([self, image]() { getOwner (self).notifyPictureTaken (image); });
  656. }
  657. static UIImage* getImageWithCorrectOrientation (CGImagePropertyOrientation imageOrientation,
  658. CGImageRef imageData)
  659. {
  660. auto origWidth = CGImageGetWidth (imageData);
  661. auto origHeight = CGImageGetHeight (imageData);
  662. auto targetSize = getTargetImageDimensionFor (imageOrientation, imageData);
  663. UIGraphicsBeginImageContext (targetSize);
  664. CGContextRef context = UIGraphicsGetCurrentContext();
  665. switch (imageOrientation)
  666. {
  667. case kCGImagePropertyOrientationUp:
  668. CGContextScaleCTM (context, 1.0, -1.0);
  669. CGContextTranslateCTM (context, 0.0, -targetSize.height);
  670. break;
  671. case kCGImagePropertyOrientationRight:
  672. CGContextRotateCTM (context, 90 * MathConstants<CGFloat>::pi / 180);
  673. CGContextScaleCTM (context, targetSize.height / origHeight, -targetSize.width / origWidth);
  674. break;
  675. case kCGImagePropertyOrientationDown:
  676. CGContextTranslateCTM (context, targetSize.width, 0.0);
  677. CGContextScaleCTM (context, -1.0, 1.0);
  678. break;
  679. case kCGImagePropertyOrientationLeft:
  680. CGContextRotateCTM (context, -90 * MathConstants<CGFloat>::pi / 180);
  681. CGContextScaleCTM (context, targetSize.height / origHeight, -targetSize.width / origWidth);
  682. CGContextTranslateCTM (context, -targetSize.width, -targetSize.height);
  683. break;
  684. default:
  685. // Not implemented.
  686. jassertfalse;
  687. break;
  688. }
  689. CGContextDrawImage (context, CGRectMake (0, 0, targetSize.width, targetSize.height), imageData);
  690. UIImage* correctedImage = UIGraphicsGetImageFromCurrentImageContext();
  691. UIGraphicsEndImageContext();
  692. return correctedImage;
  693. }
  694. static CGSize getTargetImageDimensionFor (CGImagePropertyOrientation imageOrientation,
  695. CGImageRef imageData)
  696. {
  697. auto width = CGImageGetWidth (imageData);
  698. auto height = CGImageGetHeight (imageData);
  699. switch (imageOrientation)
  700. {
  701. case kCGImagePropertyOrientationUp:
  702. case kCGImagePropertyOrientationUpMirrored:
  703. case kCGImagePropertyOrientationDown:
  704. case kCGImagePropertyOrientationDownMirrored:
  705. return CGSizeMake ((CGFloat) width, (CGFloat) height);
  706. case kCGImagePropertyOrientationRight:
  707. case kCGImagePropertyOrientationRightMirrored:
  708. case kCGImagePropertyOrientationLeft:
  709. case kCGImagePropertyOrientationLeftMirrored:
  710. return CGSizeMake ((CGFloat) height, (CGFloat) width);
  711. }
  712. jassertfalse;
  713. return CGSizeMake ((CGFloat) width, (CGFloat) height);
  714. }
  715. static void didFinishProcessingPhotoSampleBuffer (id self, SEL, AVCapturePhotoOutput*,
  716. CMSampleBufferRef imageBuffer, CMSampleBufferRef imagePreviewBuffer,
  717. AVCaptureResolvedPhotoSettings*, AVCaptureBracketedStillImageSettings*,
  718. NSError* error)
  719. {
  720. String errorString = error != nil ? nsStringToJuce (error.localizedDescription) : String();
  721. ignoreUnused (errorString);
  722. JUCE_CAMERA_LOG ("didFinishProcessingPhotoSampleBuffer(), error = " + errorString);
  723. if (error != nil)
  724. {
  725. JUCE_CAMERA_LOG ("Still picture capture failed, error: " + nsStringToJuce (error.localizedDescription));
  726. jassertfalse;
  727. return;
  728. }
  729. NSData* origImageData = [AVCapturePhotoOutput JPEGPhotoDataRepresentationForJPEGSampleBuffer: imageBuffer previewPhotoSampleBuffer: imagePreviewBuffer];
  730. auto* origImage = [UIImage imageWithData: origImageData];
  731. auto imageOrientation = uiImageOrientationToCGImageOrientation (origImage.imageOrientation);
  732. auto* uiImage = getImageWithCorrectOrientation (imageOrientation, origImage.CGImage);
  733. auto* imageData = UIImageJPEGRepresentation (uiImage, 0.f);
  734. auto image = ImageFileFormat::loadFrom (imageData.bytes, (size_t) imageData.length);
  735. getOwner (self).callListeners (image);
  736. MessageManager::callAsync ([self, image]() { getOwner (self).notifyPictureTaken (image); });
  737. }
  738. static CGImagePropertyOrientation uiImageOrientationToCGImageOrientation (UIImageOrientation orientation)
  739. {
  740. switch (orientation)
  741. {
  742. case UIImageOrientationUp: return kCGImagePropertyOrientationUp;
  743. case UIImageOrientationDown: return kCGImagePropertyOrientationDown;
  744. case UIImageOrientationLeft: return kCGImagePropertyOrientationLeft;
  745. case UIImageOrientationRight: return kCGImagePropertyOrientationRight;
  746. case UIImageOrientationUpMirrored: return kCGImagePropertyOrientationUpMirrored;
  747. case UIImageOrientationDownMirrored: return kCGImagePropertyOrientationDownMirrored;
  748. case UIImageOrientationLeftMirrored: return kCGImagePropertyOrientationLeftMirrored;
  749. case UIImageOrientationRightMirrored: return kCGImagePropertyOrientationRightMirrored;
  750. }
  751. }
  752. };
  753. //==============================================================================
  754. void callListeners (const Image& image)
  755. {
  756. captureSession.callListeners (image);
  757. }
  758. void notifyPictureTaken (const Image& image)
  759. {
  760. takingPicture = false;
  761. captureSession.notifyPictureTaken (image);
  762. }
  763. CaptureSession& captureSession;
  764. AVCaptureOutput* captureOutput;
  765. std::unique_ptr<NSObject, NSObjectDeleter> photoOutputDelegate;
  766. bool takingPicture = false;
  767. };
  768. //==============================================================================
  769. // NB: FileOutputRecordingDelegateClass callbacks can be called from any thread (incl.
  770. // the message thread), so waiting for an event when stopping recording is not an
  771. // option and VideoRecorder must be alive at all times in order to get stopped
  772. // recording callback.
  773. class VideoRecorder
  774. {
  775. public:
  776. VideoRecorder (CaptureSession& captureSession)
  777. : movieFileOutput ([AVCaptureMovieFileOutput new]),
  778. delegate (nullptr)
  779. {
  780. static FileOutputRecordingDelegateClass cls;
  781. delegate.reset ([cls.createInstance() init]);
  782. FileOutputRecordingDelegateClass::setOwner (delegate.get(), this);
  783. captureSession.addOutputIfPossible (movieFileOutput);
  784. }
  785. ~VideoRecorder()
  786. {
  787. stopRecording();
  788. // Shutting down a device while recording will stop the recording
  789. // abruptly and the recording will be lost.
  790. jassert (! recordingInProgress);
  791. }
  792. void startRecording (const File& file, AVCaptureVideoOrientation orientationToUse)
  793. {
  794. if (Pimpl::getIOSVersion().major >= 10)
  795. printVideoOutputDebugInfo (movieFileOutput);
  796. auto* url = [NSURL fileURLWithPath: juceStringToNS (file.getFullPathName())
  797. isDirectory: NO];
  798. auto* outputConnection = [movieFileOutput connectionWithMediaType: AVMediaTypeVideo];
  799. outputConnection.videoOrientation = orientationToUse;
  800. [movieFileOutput startRecordingToOutputFileURL: url recordingDelegate: delegate.get()];
  801. }
  802. void stopRecording()
  803. {
  804. [movieFileOutput stopRecording];
  805. }
  806. Time getTimeOfFirstRecordedFrame() const
  807. {
  808. return Time (firstRecordedFrameTimeMs.get());
  809. }
  810. private:
  811. static void printVideoOutputDebugInfo (AVCaptureMovieFileOutput* output)
  812. {
  813. ignoreUnused (output);
  814. JUCE_CAMERA_LOG ("Available video codec types:");
  815. #if JUCE_CAMERA_LOG_ENABLED
  816. for (AVVideoCodecType type in output.availableVideoCodecTypes)
  817. JUCE_CAMERA_LOG (nsStringToJuce (type));
  818. #endif
  819. JUCE_CAMERA_LOG ("Output settings per video connection:");
  820. #if JUCE_CAMERA_LOG_ENABLED
  821. for (AVCaptureConnection* connection in output.connections)
  822. JUCE_CAMERA_LOG (nsStringToJuce ([[output outputSettingsForConnection: connection] description]));
  823. #endif
  824. }
  825. //==============================================================================
  826. struct FileOutputRecordingDelegateClass : public ObjCClass<NSObject<AVCaptureFileOutputRecordingDelegate>>
  827. {
  828. FileOutputRecordingDelegateClass() : ObjCClass<NSObject<AVCaptureFileOutputRecordingDelegate>> ("FileOutputRecordingDelegateClass_")
  829. {
  830. addMethod (@selector (captureOutput:didStartRecordingToOutputFileAtURL:fromConnections:), started, "v@:@@@");
  831. addMethod (@selector (captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error:), stopped, "v@:@@@@");
  832. addIvar<VideoRecorder*> ("owner");
  833. registerClass();
  834. }
  835. //==============================================================================
  836. static VideoRecorder& getOwner (id self) { return *getIvar<VideoRecorder*> (self, "owner"); }
  837. static void setOwner (id self, VideoRecorder* r) { object_setInstanceVariable (self, "owner", r); }
  838. private:
  839. static void started (id self, SEL, AVCaptureFileOutput*, NSURL*, NSArray<AVCaptureConnection*>*)
  840. {
  841. JUCE_CAMERA_LOG ("Started recording");
  842. getOwner (self).firstRecordedFrameTimeMs.set (Time::getCurrentTime().toMilliseconds());
  843. getOwner (self).recordingInProgress = true;
  844. }
  845. static void stopped (id self, SEL, AVCaptureFileOutput*, NSURL*, NSArray<AVCaptureConnection*>*, NSError* error)
  846. {
  847. String errorString;
  848. bool recordingPlayable = true;
  849. // There might have been an error in the recording, yet there may be a playable file...
  850. if ([error code] != noErr)
  851. {
  852. id value = [[error userInfo] objectForKey: AVErrorRecordingSuccessfullyFinishedKey];
  853. if (value != nil && ! [value boolValue])
  854. recordingPlayable = false;
  855. errorString = nsStringToJuce (error.localizedDescription) + ", playable: " + String ((int) recordingPlayable);
  856. }
  857. JUCE_CAMERA_LOG ("Stopped recording, error = " + errorString);
  858. getOwner (self).recordingInProgress = false;
  859. }
  860. };
  861. AVCaptureMovieFileOutput* movieFileOutput;
  862. std::unique_ptr<NSObject<AVCaptureFileOutputRecordingDelegate>, NSObjectDeleter> delegate;
  863. bool recordingInProgress = false;
  864. Atomic<int64> firstRecordedFrameTimeMs { 0 };
  865. };
  866. //==============================================================================
  867. void addOutputIfPossible (AVCaptureOutput* output)
  868. {
  869. dispatch_async (captureSessionQueue,^
  870. {
  871. if ([captureSession.get() canAddOutput: output])
  872. {
  873. [captureSession.get() beginConfiguration];
  874. [captureSession.get() addOutput: output];
  875. [captureSession.get() commitConfiguration];
  876. return;
  877. }
  878. // Can't add output to camera session!
  879. jassertfalse;
  880. });
  881. }
  882. //==============================================================================
  883. void cameraSessionStarted()
  884. {
  885. sessionStarted = true;
  886. owner.cameraSessionStarted();
  887. }
  888. void cameraSessionRuntimeError (const String& error)
  889. {
  890. owner.cameraSessionRuntimeError (error);
  891. }
  892. void callListeners (const Image& image)
  893. {
  894. owner.callListeners (image);
  895. }
  896. void notifyPictureTaken (const Image& image)
  897. {
  898. owner.notifyPictureTaken (image);
  899. }
  900. Pimpl& owner;
  901. dispatch_queue_t captureSessionQueue;
  902. std::unique_ptr<AVCaptureSession, NSObjectDeleter> captureSession;
  903. std::unique_ptr<NSObject, NSObjectDeleter> delegate;
  904. StillPictureTaker stillPictureTaker;
  905. VideoRecorder videoRecorder;
  906. AVCaptureDevice* cameraDevice = nil;
  907. AVCaptureVideoPreviewLayer* previewLayer = nil;
  908. bool sessionStarted = false;
  909. WaitableEvent sessionClosedEvent;
  910. static int numCaptureSessions;
  911. };
  912. //==============================================================================
  913. void cameraSessionStarted()
  914. {
  915. JUCE_CAMERA_LOG ("cameraSessionStarted()");
  916. cameraOpenCallback (cameraId, {});
  917. }
  918. void cameraSessionRuntimeError (const String& error)
  919. {
  920. JUCE_CAMERA_LOG ("cameraSessionRuntimeError(), error = " + error);
  921. if (! notifiedOfCameraOpening)
  922. {
  923. cameraOpenCallback ({}, error);
  924. }
  925. else
  926. {
  927. if (owner.onErrorOccurred != nullptr)
  928. owner.onErrorOccurred (error);
  929. }
  930. }
  931. void callListeners (const Image& image)
  932. {
  933. const ScopedLock sl (listenerLock);
  934. listeners.call ([=] (Listener& l) { l.imageReceived (image); });
  935. }
  936. void notifyPictureTaken (const Image& image)
  937. {
  938. JUCE_CAMERA_LOG ("notifyPictureTaken()");
  939. if (pictureTakenCallback != nullptr)
  940. pictureTakenCallback (image);
  941. }
  942. //==============================================================================
  943. void triggerStillPictureCapture()
  944. {
  945. captureSession.takeStillPicture();
  946. }
  947. //==============================================================================
  948. CameraDevice& owner;
  949. String cameraId;
  950. InternalOpenCameraResultCallback cameraOpenCallback;
  951. CriticalSection listenerLock;
  952. ListenerList<Listener> listeners;
  953. std::function<void (const Image&)> pictureTakenCallback;
  954. CaptureSession captureSession;
  955. bool notifiedOfCameraOpening = false;
  956. //==============================================================================
  957. struct IOSVersion
  958. {
  959. int major;
  960. int minor;
  961. };
  962. static IOSVersion getIOSVersion()
  963. {
  964. auto processInfo = [NSProcessInfo processInfo];
  965. if (! [processInfo respondsToSelector: @selector (operatingSystemVersion)])
  966. return {7, 0}; // Below 8.0 in fact, but only care that it's below 8
  967. return { (int)[processInfo operatingSystemVersion].majorVersion,
  968. (int)[processInfo operatingSystemVersion].minorVersion };
  969. }
  970. static IOSVersion iosVersion;
  971. friend struct CameraDevice::ViewerComponent;
  972. JUCE_DECLARE_NON_COPYABLE (Pimpl)
  973. };
  974. CameraDevice::Pimpl::IOSVersion CameraDevice::Pimpl::iosVersion = CameraDevice::Pimpl::getIOSVersion();
  975. int CameraDevice::Pimpl::CaptureSession::numCaptureSessions = 0;
  976. //==============================================================================
  977. struct CameraDevice::ViewerComponent : public UIViewComponent
  978. {
  979. //==============================================================================
  980. struct JuceCameraDeviceViewerClass : public ObjCClass<UIView>
  981. {
  982. JuceCameraDeviceViewerClass() : ObjCClass<UIView> ("JuceCameraDeviceViewerClass_")
  983. {
  984. addMethod (@selector (layoutSubviews), layoutSubviews, "v@:");
  985. registerClass();
  986. }
  987. private:
  988. static void layoutSubviews (id self, SEL)
  989. {
  990. sendSuperclassMessage (self, @selector (layoutSubviews));
  991. UIView* asUIView = (UIView*) self;
  992. updateOrientation (self);
  993. if (auto* previewLayer = getPreviewLayer (self))
  994. previewLayer.frame = asUIView.bounds;
  995. }
  996. static AVCaptureVideoPreviewLayer* getPreviewLayer (id self)
  997. {
  998. UIView* asUIView = (UIView*) self;
  999. if (asUIView.layer.sublayers != nil && [asUIView.layer.sublayers count] > 0)
  1000. if ([asUIView.layer.sublayers[0] isKindOfClass: [AVCaptureVideoPreviewLayer class]])
  1001. return (AVCaptureVideoPreviewLayer*) asUIView.layer.sublayers[0];
  1002. return nil;
  1003. }
  1004. static void updateOrientation (id self)
  1005. {
  1006. if (auto* previewLayer = getPreviewLayer (self))
  1007. {
  1008. UIDeviceOrientation o = [UIDevice currentDevice].orientation;
  1009. if (UIDeviceOrientationIsPortrait (o) || UIDeviceOrientationIsLandscape (o))
  1010. {
  1011. if (previewLayer.connection != nil)
  1012. previewLayer.connection.videoOrientation = (AVCaptureVideoOrientation) o;
  1013. }
  1014. }
  1015. }
  1016. };
  1017. ViewerComponent (CameraDevice& device)
  1018. {
  1019. static JuceCameraDeviceViewerClass cls;
  1020. // Initial size that can be overriden later.
  1021. setSize (640, 480);
  1022. auto* view = [cls.createInstance() init];
  1023. setView (view);
  1024. auto* previewLayer = device.pimpl->captureSession.createPreviewLayer();
  1025. previewLayer.frame = view.bounds;
  1026. UIInterfaceOrientation statusBarOrientation = [UIApplication sharedApplication].statusBarOrientation;
  1027. AVCaptureVideoOrientation videoOrientation = statusBarOrientation != UIInterfaceOrientationUnknown
  1028. ? (AVCaptureVideoOrientation) statusBarOrientation
  1029. : AVCaptureVideoOrientationPortrait;
  1030. previewLayer.connection.videoOrientation = videoOrientation;
  1031. [view.layer addSublayer: previewLayer];
  1032. }
  1033. };
  1034. //==============================================================================
  1035. String CameraDevice::getFileExtension()
  1036. {
  1037. return ".mov";
  1038. }