The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1289 lines
57KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2017 - ROLI Ltd.
  5. JUCE is an open source library subject to commercial or open-source
  6. licensing.
  7. By using JUCE, you agree to the terms of both the JUCE 5 End-User License
  8. Agreement and JUCE 5 Privacy Policy (both updated and effective as of the
  9. 27th April 2017).
  10. End User License Agreement: www.juce.com/juce-5-licence
  11. Privacy Policy: www.juce.com/juce-5-privacy-policy
  12. Or: You may also use this code under the terms of the GPL v3 (see
  13. www.gnu.org/licenses).
  14. JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
  15. EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
  16. DISCLAIMED.
  17. ==============================================================================
  18. */
  19. struct CameraDevice::Pimpl
  20. {
  21. using InternalOpenCameraResultCallback = std::function<void (const String& /*cameraId*/, const String& /*error*/)>;
  22. Pimpl (CameraDevice& ownerToUse, const String& cameraIdToUse, int /*index*/,
  23. int /*minWidth*/, int /*minHeight*/, int /*maxWidth*/, int /*maxHeight*/,
  24. bool useHighQuality)
  25. : owner (ownerToUse),
  26. cameraId (cameraIdToUse),
  27. captureSession (*this, useHighQuality)
  28. {
  29. }
  30. String getCameraId() const noexcept { return cameraId; }
  31. void open (InternalOpenCameraResultCallback cameraOpenCallbackToUse)
  32. {
  33. cameraOpenCallback = static_cast<InternalOpenCameraResultCallback&&> (cameraOpenCallbackToUse);
  34. if (cameraOpenCallback == nullptr)
  35. {
  36. // A valid camera open callback must be passed.
  37. jassertfalse;
  38. return;
  39. }
  40. [AVCaptureDevice requestAccessForMediaType: AVMediaTypeVideo
  41. completionHandler: ^(BOOL granted)
  42. {
  43. // Access to video is required for camera to work,
  44. // black images will be produced otherwise!
  45. jassert (granted);
  46. ignoreUnused (granted);
  47. }];
  48. [AVCaptureDevice requestAccessForMediaType: AVMediaTypeAudio
  49. completionHandler: ^(BOOL granted)
  50. {
  51. // Access to audio is required for camera to work,
  52. // silence will be produced otherwise!
  53. jassert (granted);
  54. ignoreUnused (granted);
  55. }];
  56. captureSession.startSessionForDeviceWithId (cameraId);
  57. }
  58. bool openedOk() const noexcept { return captureSession.openedOk(); }
  59. void takeStillPicture (std::function<void (const Image&)> pictureTakenCallbackToUse)
  60. {
  61. if (pictureTakenCallbackToUse == nullptr)
  62. {
  63. jassertfalse;
  64. return;
  65. }
  66. pictureTakenCallback = static_cast<std::function<void (const Image&)>&&> (pictureTakenCallbackToUse);
  67. triggerStillPictureCapture();
  68. }
  69. void startRecordingToFile (const File& file, int /*quality*/)
  70. {
  71. file.deleteFile();
  72. captureSession.startRecording (file);
  73. }
  74. void stopRecording()
  75. {
  76. captureSession.stopRecording();
  77. }
  78. Time getTimeOfFirstRecordedFrame() const
  79. {
  80. return captureSession.getTimeOfFirstRecordedFrame();
  81. }
  82. static StringArray getAvailableDevices()
  83. {
  84. StringArray results;
  85. JUCE_CAMERA_LOG ("Available camera devices: ");
  86. for (AVCaptureDevice* device in getDevices())
  87. {
  88. JUCE_CAMERA_LOG ("Device start----------------------------------");
  89. printDebugCameraInfo (device);
  90. JUCE_CAMERA_LOG ("Device end----------------------------------");
  91. results.add (nsStringToJuce (device.uniqueID));
  92. }
  93. return results;
  94. }
  95. private:
  96. static NSArray<AVCaptureDevice*>* getDevices()
  97. {
  98. #if defined (__IPHONE_10_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_10_0
  99. if (iosVersion.major >= 10)
  100. {
  101. std::unique_ptr<NSMutableArray<AVCaptureDeviceType>, NSObjectDeleter> deviceTypes ([[NSMutableArray alloc] initWithCapacity: 2]);
  102. [deviceTypes.get() addObject: AVCaptureDeviceTypeBuiltInWideAngleCamera];
  103. [deviceTypes.get() addObject: AVCaptureDeviceTypeBuiltInTelephotoCamera];
  104. if ((iosVersion.major == 10 && iosVersion.minor >= 2) || iosVersion.major >= 11)
  105. [deviceTypes.get() addObject: AVCaptureDeviceTypeBuiltInDualCamera];
  106. if ((iosVersion.major == 11 && iosVersion.minor >= 1) || iosVersion.major >= 12)
  107. [deviceTypes.get() addObject: AVCaptureDeviceTypeBuiltInTrueDepthCamera];
  108. auto discoverySession = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes: deviceTypes.get()
  109. mediaType: AVMediaTypeVideo
  110. position: AVCaptureDevicePositionUnspecified];
  111. return [discoverySession devices];
  112. }
  113. #endif
  114. return [AVCaptureDevice devicesWithMediaType: AVMediaTypeVideo];
  115. }
  116. //==============================================================================
  117. static void printDebugCameraInfo (AVCaptureDevice* device)
  118. {
  119. auto position = device.position;
  120. String positionString = position == AVCaptureDevicePositionBack
  121. ? "Back"
  122. : position == AVCaptureDevicePositionFront
  123. ? "Front"
  124. : "Unspecified";
  125. JUCE_CAMERA_LOG ("Position: " + positionString);
  126. JUCE_CAMERA_LOG ("Model ID: " + nsStringToJuce (device.modelID));
  127. JUCE_CAMERA_LOG ("Localized name: " + nsStringToJuce (device.localizedName));
  128. JUCE_CAMERA_LOG ("Unique ID: " + nsStringToJuce (device.uniqueID));
  129. JUCE_CAMERA_LOG ("Lens aperture: " + String (device.lensAperture));
  130. JUCE_CAMERA_LOG ("Has flash: " + String ((int)device.hasFlash));
  131. JUCE_CAMERA_LOG ("Supports flash always on: " + String ((int)[device isFlashModeSupported: AVCaptureFlashModeOn]));
  132. JUCE_CAMERA_LOG ("Supports auto flash: " + String ((int)[device isFlashModeSupported: AVCaptureFlashModeAuto]));
  133. JUCE_CAMERA_LOG ("Has torch: " + String ((int)device.hasTorch));
  134. JUCE_CAMERA_LOG ("Supports torch always on: " + String ((int)[device isTorchModeSupported: AVCaptureTorchModeOn]));
  135. JUCE_CAMERA_LOG ("Supports auto torch: " + String ((int)[device isTorchModeSupported: AVCaptureTorchModeAuto]));
  136. JUCE_CAMERA_LOG ("Low light boost supported: " + String ((int)device.lowLightBoostEnabled));
  137. JUCE_CAMERA_LOG ("Supports auto white balance: " + String ((int)[device isWhiteBalanceModeSupported: AVCaptureWhiteBalanceModeAutoWhiteBalance]));
  138. JUCE_CAMERA_LOG ("Supports continuous auto white balance: " + String ((int)[device isWhiteBalanceModeSupported: AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]));
  139. JUCE_CAMERA_LOG ("Supports auto focus: " + String ((int)[device isFocusModeSupported: AVCaptureFocusModeAutoFocus]));
  140. JUCE_CAMERA_LOG ("Supports continuous auto focus: " + String ((int)[device isFocusModeSupported: AVCaptureFocusModeContinuousAutoFocus]));
  141. JUCE_CAMERA_LOG ("Supports point of interest focus: " + String ((int)device.focusPointOfInterestSupported));
  142. JUCE_CAMERA_LOG ("Smooth auto focus supported: " + String ((int)device.smoothAutoFocusSupported));
  143. JUCE_CAMERA_LOG ("Auto focus range restriction supported: " + String ((int)device.autoFocusRangeRestrictionSupported));
  144. JUCE_CAMERA_LOG ("Supports auto exposure: " + String ((int)[device isExposureModeSupported: AVCaptureExposureModeAutoExpose]));
  145. JUCE_CAMERA_LOG ("Supports continuous auto exposure: " + String ((int)[device isExposureModeSupported: AVCaptureExposureModeContinuousAutoExposure]));
  146. JUCE_CAMERA_LOG ("Supports custom exposure: " + String ((int)[device isExposureModeSupported: AVCaptureExposureModeCustom]));
  147. JUCE_CAMERA_LOG ("Supports point of interest exposure: " + String ((int)device.exposurePointOfInterestSupported));
  148. #if defined (__IPHONE_10_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_10_0
  149. if (iosVersion.major >= 10)
  150. {
  151. JUCE_CAMERA_LOG ("Device type: " + nsStringToJuce (device.deviceType));
  152. JUCE_CAMERA_LOG ("Locking focus with custom lens position supported: " + String ((int)device.lockingFocusWithCustomLensPositionSupported));
  153. }
  154. #endif
  155. #if defined (__IPHONE_11_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0
  156. if (iosVersion.major >= 11)
  157. {
  158. JUCE_CAMERA_LOG ("Min available video zoom factor: " + String (device.minAvailableVideoZoomFactor));
  159. JUCE_CAMERA_LOG ("Max available video zoom factor: " + String (device.maxAvailableVideoZoomFactor));
  160. JUCE_CAMERA_LOG ("Dual camera switch over video zoom factor: " + String (device.dualCameraSwitchOverVideoZoomFactor));
  161. }
  162. #endif
  163. JUCE_CAMERA_LOG ("Capture formats start-------------------");
  164. for (AVCaptureDeviceFormat* format in device.formats)
  165. {
  166. JUCE_CAMERA_LOG ("Capture format start------");
  167. printDebugCameraFormatInfo (format);
  168. JUCE_CAMERA_LOG ("Capture format end------");
  169. }
  170. JUCE_CAMERA_LOG ("Capture formats end-------------------");
  171. }
  172. static void printDebugCameraFormatInfo (AVCaptureDeviceFormat* format)
  173. {
  174. JUCE_CAMERA_LOG ("Media type: " + nsStringToJuce (format.mediaType));
  175. String colourSpaces;
  176. for (NSNumber* number in format.supportedColorSpaces)
  177. {
  178. switch ([number intValue])
  179. {
  180. case AVCaptureColorSpace_sRGB: colourSpaces << "sRGB "; break;
  181. case AVCaptureColorSpace_P3_D65: colourSpaces << "P3_D65 "; break;
  182. default: break;
  183. }
  184. }
  185. JUCE_CAMERA_LOG ("Supported colour spaces: " + colourSpaces);
  186. JUCE_CAMERA_LOG ("Video field of view: " + String (format.videoFieldOfView));
  187. JUCE_CAMERA_LOG ("Video max zoom factor: " + String (format.videoMaxZoomFactor));
  188. JUCE_CAMERA_LOG ("Video zoom factor upscale threshold: " + String (format.videoZoomFactorUpscaleThreshold));
  189. String videoFrameRateRangesString = "Video supported frame rate ranges: ";
  190. for (AVFrameRateRange* range in format.videoSupportedFrameRateRanges)
  191. videoFrameRateRangesString << frameRateRangeToString (range);
  192. JUCE_CAMERA_LOG (videoFrameRateRangesString);
  193. JUCE_CAMERA_LOG ("Video binned: " + String (int(format.videoBinned)));
  194. #if defined (__IPHONE_8_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_8_0
  195. if (iosVersion.major >= 8)
  196. {
  197. JUCE_CAMERA_LOG ("Video HDR supported: " + String (int (format.videoHDRSupported)));
  198. JUCE_CAMERA_LOG ("High resolution still image dimensions: " + getHighResStillImgDimensionsString (format.highResolutionStillImageDimensions));
  199. JUCE_CAMERA_LOG ("Min ISO: " + String (format.minISO));
  200. JUCE_CAMERA_LOG ("Max ISO: " + String (format.maxISO));
  201. JUCE_CAMERA_LOG ("Min exposure duration: " + cmTimeToString (format.minExposureDuration));
  202. String autoFocusSystemString;
  203. switch (format.autoFocusSystem)
  204. {
  205. case AVCaptureAutoFocusSystemPhaseDetection: autoFocusSystemString = "PhaseDetection"; break;
  206. case AVCaptureAutoFocusSystemContrastDetection: autoFocusSystemString = "ContrastDetection"; break;
  207. default: autoFocusSystemString = "None";
  208. }
  209. JUCE_CAMERA_LOG ("Auto focus system: " + autoFocusSystemString);
  210. JUCE_CAMERA_LOG ("Standard (iOS 5.0) video stabilization supported: " + String ((int) [format isVideoStabilizationModeSupported: AVCaptureVideoStabilizationModeStandard]));
  211. JUCE_CAMERA_LOG ("Cinematic video stabilization supported: " + String ((int) [format isVideoStabilizationModeSupported: AVCaptureVideoStabilizationModeCinematic]));
  212. JUCE_CAMERA_LOG ("Auto video stabilization supported: " + String ((int) [format isVideoStabilizationModeSupported: AVCaptureVideoStabilizationModeAuto]));
  213. }
  214. #endif
  215. #if defined (__IPHONE_11_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0
  216. if (iosVersion.major >= 11)
  217. {
  218. JUCE_CAMERA_LOG ("Min zoom factor for depth data delivery: " + String (format.videoMinZoomFactorForDepthDataDelivery));
  219. JUCE_CAMERA_LOG ("Max zoom factor for depth data delivery: " + String (format.videoMaxZoomFactorForDepthDataDelivery));
  220. }
  221. #endif
  222. }
  223. static String getHighResStillImgDimensionsString (CMVideoDimensions d)
  224. {
  225. return "[" + String (d.width) + " " + String (d.height) + "]";
  226. }
  227. static String cmTimeToString (CMTime time)
  228. {
  229. CFStringRef timeDesc = CMTimeCopyDescription (NULL, time);
  230. String result = String::fromCFString (timeDesc);
  231. CFRelease (timeDesc);
  232. return result;
  233. }
  234. static String frameRateRangeToString (AVFrameRateRange* range)
  235. {
  236. String result;
  237. result << "[minFrameDuration: " + cmTimeToString (range.minFrameDuration);
  238. result << " maxFrameDuration: " + cmTimeToString (range.maxFrameDuration);
  239. result << " minFrameRate: " + String (range.minFrameRate);
  240. result << " maxFrameRate: " + String (range.maxFrameRate) << "] ";
  241. return result;
  242. }
  243. //==============================================================================
  244. class CaptureSession
  245. {
  246. public:
  247. CaptureSession (Pimpl& ownerToUse, bool useHighQuality)
  248. : owner (ownerToUse),
  249. captureSessionQueue (dispatch_queue_create ("JuceCameraDeviceBackgroundDispatchQueue", DISPATCH_QUEUE_SERIAL)),
  250. captureSession ([[AVCaptureSession alloc] init]),
  251. delegate (nullptr),
  252. stillPictureTaker (*this),
  253. videoRecorder (*this)
  254. {
  255. static SessionDelegateClass cls;
  256. delegate.reset ([cls.createInstance() init]);
  257. SessionDelegateClass::setOwner (delegate.get(), this);
  258. #pragma clang diagnostic push
  259. #pragma clang diagnostic ignored "-Wundeclared-selector"
  260. [[NSNotificationCenter defaultCenter] addObserver: delegate.get()
  261. selector: @selector (sessionDidStartRunning:)
  262. name: AVCaptureSessionDidStartRunningNotification
  263. object: captureSession.get()];
  264. [[NSNotificationCenter defaultCenter] addObserver: delegate.get()
  265. selector: @selector (sessionDidStopRunning:)
  266. name: AVCaptureSessionDidStopRunningNotification
  267. object: captureSession.get()];
  268. [[NSNotificationCenter defaultCenter] addObserver: delegate.get()
  269. selector: @selector (sessionRuntimeError:)
  270. name: AVCaptureSessionRuntimeErrorNotification
  271. object: captureSession.get()];
  272. [[NSNotificationCenter defaultCenter] addObserver: delegate.get()
  273. selector: @selector (sessionWasInterrupted:)
  274. name: AVCaptureSessionWasInterruptedNotification
  275. object: captureSession.get()];
  276. [[NSNotificationCenter defaultCenter] addObserver: delegate.get()
  277. selector: @selector (sessionInterruptionEnded:)
  278. name: AVCaptureSessionInterruptionEndedNotification
  279. object: captureSession.get()];
  280. #pragma clang diagnostic pop
  281. dispatch_async (captureSessionQueue,^
  282. {
  283. [captureSession.get() setSessionPreset: useHighQuality ? AVCaptureSessionPresetHigh
  284. : AVCaptureSessionPresetMedium];
  285. });
  286. ++numCaptureSessions;
  287. }
  288. ~CaptureSession()
  289. {
  290. [[NSNotificationCenter defaultCenter] removeObserver: delegate.get()];
  291. stopRecording();
  292. if (--numCaptureSessions == 0)
  293. {
  294. dispatch_async (captureSessionQueue, ^
  295. {
  296. if (captureSession.get().running)
  297. [captureSession.get() stopRunning];
  298. sessionClosedEvent.signal();
  299. });
  300. sessionClosedEvent.wait (-1);
  301. }
  302. }
  303. bool openedOk() const noexcept { return sessionStarted; }
  304. void startSessionForDeviceWithId (const String& cameraIdToUse)
  305. {
  306. dispatch_async (captureSessionQueue,^
  307. {
  308. cameraDevice = [AVCaptureDevice deviceWithUniqueID: juceStringToNS (cameraIdToUse)];
  309. auto* audioDevice = [AVCaptureDevice defaultDeviceWithMediaType: AVMediaTypeAudio];
  310. [captureSession.get() beginConfiguration];
  311. // This will add just video...
  312. auto error = addInputToDevice (cameraDevice);
  313. if (error.isNotEmpty())
  314. {
  315. WeakReference<CaptureSession> weakRef (this);
  316. MessageManager::callAsync ([weakRef, error]() mutable
  317. {
  318. if (weakRef != nullptr)
  319. weakRef->owner.cameraOpenCallback ({}, error);
  320. });
  321. return;
  322. }
  323. // ... so add audio explicitly here
  324. error = addInputToDevice (audioDevice);
  325. if (error.isNotEmpty())
  326. {
  327. WeakReference<CaptureSession> weakRef (this);
  328. MessageManager::callAsync ([weakRef, error]() mutable
  329. {
  330. if (weakRef != nullptr)
  331. weakRef->owner.cameraOpenCallback ({}, error);
  332. });
  333. return;
  334. }
  335. [captureSession.get() commitConfiguration];
  336. if (! captureSession.get().running)
  337. [captureSession.get() startRunning];
  338. });
  339. }
  340. AVCaptureVideoPreviewLayer* createPreviewLayer()
  341. {
  342. if (! openedOk())
  343. {
  344. // A session must be started first!
  345. jassertfalse;
  346. return nullptr;
  347. }
  348. previewLayer = [AVCaptureVideoPreviewLayer layerWithSession: captureSession.get()];
  349. return previewLayer;
  350. }
  351. void takeStillPicture()
  352. {
  353. if (! openedOk())
  354. {
  355. // A session must be started first!
  356. jassert (openedOk());
  357. return;
  358. }
  359. stillPictureTaker.takePicture (previewLayer.connection.videoOrientation);
  360. }
  361. void startRecording (const File& file)
  362. {
  363. if (! openedOk())
  364. {
  365. // A session must be started first!
  366. jassertfalse;
  367. return;
  368. }
  369. if (file.existsAsFile())
  370. {
  371. // File overwriting is not supported by iOS video recorder, the target
  372. // file must not exist.
  373. jassertfalse;
  374. return;
  375. }
  376. videoRecorder.startRecording (file, previewLayer.connection.videoOrientation);
  377. }
  378. void stopRecording()
  379. {
  380. videoRecorder.stopRecording();
  381. }
  382. Time getTimeOfFirstRecordedFrame() const
  383. {
  384. return videoRecorder.getTimeOfFirstRecordedFrame();
  385. }
  386. JUCE_DECLARE_WEAK_REFERENCEABLE (CaptureSession)
  387. private:
  388. String addInputToDevice (AVCaptureDevice* device)
  389. {
  390. NSError* error = nil;
  391. auto* input = [AVCaptureDeviceInput deviceInputWithDevice: device
  392. error: &error];
  393. if (error != nil)
  394. return nsStringToJuce (error.localizedDescription);
  395. if (! [captureSession.get() canAddInput: input])
  396. return "Could not add input to camera session.";
  397. [captureSession.get() addInput: input];
  398. return {};
  399. }
  400. //==============================================================================
  401. struct SessionDelegateClass : public ObjCClass<NSObject>
  402. {
  403. SessionDelegateClass() : ObjCClass<NSObject> ("SessionDelegateClass_")
  404. {
  405. #pragma clang diagnostic push
  406. #pragma clang diagnostic ignored "-Wundeclared-selector"
  407. addMethod (@selector (sessionDidStartRunning:), started, "v@:@");
  408. addMethod (@selector (sessionDidStopRunning:), stopped, "v@:@");
  409. addMethod (@selector (sessionRuntimeError:), runtimeError, "v@:@");
  410. addMethod (@selector (sessionWasInterrupted:), interrupted, "v@:@");
  411. addMethod (@selector (sessionDidStartRunning:), interruptionEnded, "v@:@");
  412. #pragma clang diagnostic pop
  413. addIvar<CaptureSession*> ("owner");
  414. registerClass();
  415. }
  416. //==============================================================================
  417. static CaptureSession& getOwner (id self) { return *getIvar<CaptureSession*> (self, "owner"); }
  418. static void setOwner (id self, CaptureSession* s) { object_setInstanceVariable (self, "owner", s); }
  419. private:
  420. //==============================================================================
  421. static void started (id self, SEL, NSNotification* notification)
  422. {
  423. JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
  424. ignoreUnused (notification);
  425. dispatch_async (dispatch_get_main_queue(),
  426. ^{
  427. getOwner (self).cameraSessionStarted();
  428. });
  429. }
  430. static void stopped (id, SEL, NSNotification* notification)
  431. {
  432. JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
  433. ignoreUnused (notification);
  434. }
  435. static void runtimeError (id self, SEL, NSNotification* notification)
  436. {
  437. JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
  438. dispatch_async (dispatch_get_main_queue(),
  439. ^{
  440. NSError* error = notification.userInfo[AVCaptureSessionErrorKey];
  441. auto errorString = error != nil ? nsStringToJuce (error.localizedDescription) : String();
  442. getOwner (self).cameraSessionRuntimeError (errorString);
  443. });
  444. }
  445. static void interrupted (id, SEL, NSNotification* notification)
  446. {
  447. JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
  448. ignoreUnused (notification);
  449. }
  450. static void interruptionEnded (id, SEL, NSNotification* notification)
  451. {
  452. JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
  453. ignoreUnused (notification);
  454. }
  455. };
  456. //==============================================================================
  457. class StillPictureTaker
  458. {
  459. public:
  460. StillPictureTaker (CaptureSession& cs)
  461. : captureSession (cs),
  462. captureOutput (createCaptureOutput()),
  463. photoOutputDelegate (nullptr)
  464. {
  465. if (Pimpl::getIOSVersion().major >= 10)
  466. {
  467. static PhotoOutputDelegateClass cls;
  468. photoOutputDelegate.reset ([cls.createInstance() init]);
  469. PhotoOutputDelegateClass::setOwner (photoOutputDelegate.get(), this);
  470. }
  471. captureSession.addOutputIfPossible (captureOutput);
  472. }
  473. void takePicture (AVCaptureVideoOrientation orientationToUse)
  474. {
  475. if (takingPicture)
  476. {
  477. // Picture taking already in progress!
  478. jassertfalse;
  479. return;
  480. }
  481. takingPicture = true;
  482. printImageOutputDebugInfo (captureOutput);
  483. if (auto* connection = findVideoConnection (captureOutput))
  484. {
  485. #if defined (__IPHONE_10_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_10_0
  486. if (Pimpl::getIOSVersion().major >= 10 && [captureOutput isKindOfClass: [AVCapturePhotoOutput class]])
  487. {
  488. auto* photoOutput = (AVCapturePhotoOutput*) captureOutput;
  489. auto* outputConnection = [photoOutput connectionWithMediaType: AVMediaTypeVideo];
  490. outputConnection.videoOrientation = orientationToUse;
  491. [photoOutput capturePhotoWithSettings: [AVCapturePhotoSettings photoSettings]
  492. delegate: id<AVCapturePhotoCaptureDelegate> (photoOutputDelegate.get())];
  493. return;
  494. }
  495. #endif
  496. auto* stillImageOutput = (AVCaptureStillImageOutput*) captureOutput;
  497. auto* outputConnection = [stillImageOutput connectionWithMediaType: AVMediaTypeVideo];
  498. outputConnection.videoOrientation = orientationToUse;
  499. [stillImageOutput captureStillImageAsynchronouslyFromConnection: connection completionHandler:
  500. ^(CMSampleBufferRef imageSampleBuffer, NSError* error)
  501. {
  502. if (error != nil)
  503. {
  504. JUCE_CAMERA_LOG ("Still picture capture failed, error: " + nsStringToJuce (error.localizedDescription));
  505. jassertfalse;
  506. return;
  507. }
  508. NSData* imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation: imageSampleBuffer];
  509. auto image = ImageFileFormat::loadFrom (imageData.bytes, (size_t) imageData.length);
  510. MessageManager::callAsync ([this, image]() { imageTaken (image); });
  511. }];
  512. }
  513. else
  514. {
  515. // Could not find a connection of video type
  516. jassertfalse;
  517. }
  518. }
  519. private:
  520. static AVCaptureOutput* createCaptureOutput()
  521. {
  522. #if defined (__IPHONE_10_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_10_0
  523. if (Pimpl::getIOSVersion().major >= 10)
  524. return [AVCapturePhotoOutput new];
  525. #endif
  526. return [AVCaptureStillImageOutput new];
  527. }
  528. static void printImageOutputDebugInfo (AVCaptureOutput* captureOutput)
  529. {
  530. #if defined (__IPHONE_10_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_10_0
  531. if (Pimpl::getIOSVersion().major >= 10 && [captureOutput isKindOfClass: [AVCapturePhotoOutput class]])
  532. {
  533. auto* photoOutput = (AVCapturePhotoOutput*) captureOutput;
  534. String typesString;
  535. for (AVVideoCodecType type in photoOutput.availablePhotoCodecTypes)
  536. typesString << nsStringToJuce (type) << " ";
  537. JUCE_CAMERA_LOG ("Available image codec types: " + typesString);
  538. JUCE_CAMERA_LOG ("Still image stabilization supported: " + String ((int) photoOutput.stillImageStabilizationSupported));
  539. JUCE_CAMERA_LOG ("Dual camera fusion supported: " + String ((int) photoOutput.dualCameraFusionSupported));
  540. JUCE_CAMERA_LOG ("Supports flash: " + String ((int) [photoOutput.supportedFlashModes containsObject: @(AVCaptureFlashModeOn)]));
  541. JUCE_CAMERA_LOG ("Supports auto flash: " + String ((int) [photoOutput.supportedFlashModes containsObject: @(AVCaptureFlashModeAuto)]));
  542. JUCE_CAMERA_LOG ("Max bracketed photo count: " + String (photoOutput.maxBracketedCapturePhotoCount));
  543. JUCE_CAMERA_LOG ("Lens stabilization during bracketed capture supported: " + String ((int) photoOutput.lensStabilizationDuringBracketedCaptureSupported));
  544. JUCE_CAMERA_LOG ("Live photo capture supported: " + String ((int) photoOutput.livePhotoCaptureSupported));
  545. #if defined (__IPHONE_11_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0
  546. if (Pimpl::getIOSVersion().major >= 11)
  547. {
  548. typesString.clear();
  549. for (AVFileType type in photoOutput.availablePhotoFileTypes)
  550. typesString << nsStringToJuce (type) << " ";
  551. JUCE_CAMERA_LOG ("Available photo file types: " + typesString);
  552. typesString.clear();
  553. for (AVFileType type in photoOutput.availableRawPhotoFileTypes)
  554. typesString << nsStringToJuce (type) << " ";
  555. JUCE_CAMERA_LOG ("Available RAW photo file types: " + typesString);
  556. typesString.clear();
  557. for (AVFileType type in photoOutput.availableLivePhotoVideoCodecTypes)
  558. typesString << nsStringToJuce (type) << " ";
  559. JUCE_CAMERA_LOG ("Available live photo video codec types: " + typesString);
  560. JUCE_CAMERA_LOG ("Dual camera dual photo delivery supported: " + String ((int) photoOutput.dualCameraDualPhotoDeliverySupported));
  561. JUCE_CAMERA_LOG ("Camera calibration data delivery supported: " + String ((int) photoOutput.cameraCalibrationDataDeliverySupported));
  562. JUCE_CAMERA_LOG ("Depth data delivery supported: " + String ((int) photoOutput.depthDataDeliverySupported));
  563. }
  564. #endif
  565. return;
  566. }
  567. #endif
  568. auto* stillImageOutput = (AVCaptureStillImageOutput*) captureOutput;
  569. String typesString;
  570. for (AVVideoCodecType type in stillImageOutput.availableImageDataCodecTypes)
  571. typesString << nsStringToJuce (type) << " ";
  572. JUCE_CAMERA_LOG ("Available image codec types: " + typesString);
  573. JUCE_CAMERA_LOG ("Still image stabilization supported: " + String ((int) stillImageOutput.stillImageStabilizationSupported));
  574. JUCE_CAMERA_LOG ("Automatically enableds still image stabilization when available: " + String ((int) stillImageOutput.automaticallyEnablesStillImageStabilizationWhenAvailable));
  575. JUCE_CAMERA_LOG ("Output settings for image output: " + nsStringToJuce ([stillImageOutput.outputSettings description]));
  576. }
  577. //==============================================================================
  578. static AVCaptureConnection* findVideoConnection (AVCaptureOutput* output)
  579. {
  580. for (AVCaptureConnection* connection in output.connections)
  581. for (AVCaptureInputPort* port in connection.inputPorts)
  582. if ([port.mediaType isEqual: AVMediaTypeVideo])
  583. return connection;
  584. return nullptr;
  585. }
  586. //==============================================================================
  587. class PhotoOutputDelegateClass : public ObjCClass<NSObject>
  588. {
  589. public:
  590. PhotoOutputDelegateClass() : ObjCClass<NSObject> ("PhotoOutputDelegateClass_")
  591. {
  592. addMethod (@selector (captureOutput:willBeginCaptureForResolvedSettings:), willBeginCaptureForSettings, "v@:@@");
  593. addMethod (@selector (captureOutput:willCapturePhotoForResolvedSettings:), willCaptureForSettings, "v@:@@");
  594. addMethod (@selector (captureOutput:didCapturePhotoForResolvedSettings:), didCaptureForSettings, "v@:@@");
  595. addMethod (@selector (captureOutput:didFinishCaptureForResolvedSettings:error:), didFinishCaptureForSettings, "v@:@@@");
  596. if (Pimpl::getIOSVersion().major >= 11)
  597. addMethod (@selector (captureOutput:didFinishProcessingPhoto:error:), didFinishProcessingPhoto, "v@:@@@");
  598. else
  599. addMethod (@selector (captureOutput:didFinishProcessingPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:), didFinishProcessingPhotoSampleBuffer, "v@:@@@@@@");
  600. addIvar<StillPictureTaker*> ("owner");
  601. registerClass();
  602. }
  603. //==============================================================================
  604. static StillPictureTaker& getOwner (id self) { return *getIvar<StillPictureTaker*> (self, "owner"); }
  605. static void setOwner (id self, StillPictureTaker* t) { object_setInstanceVariable (self, "owner", t); }
  606. private:
  607. static void willBeginCaptureForSettings (id, SEL, AVCapturePhotoOutput*, AVCaptureResolvedPhotoSettings*)
  608. {
  609. JUCE_CAMERA_LOG ("willBeginCaptureForSettings()");
  610. }
  611. static void willCaptureForSettings (id, SEL, AVCapturePhotoOutput*, AVCaptureResolvedPhotoSettings*)
  612. {
  613. JUCE_CAMERA_LOG ("willCaptureForSettings()");
  614. }
  615. static void didCaptureForSettings (id, SEL, AVCapturePhotoOutput*, AVCaptureResolvedPhotoSettings*)
  616. {
  617. JUCE_CAMERA_LOG ("didCaptureForSettings()");
  618. }
  619. static void didFinishCaptureForSettings (id, SEL, AVCapturePhotoOutput*, AVCaptureResolvedPhotoSettings*, NSError* error)
  620. {
  621. String errorString = error != nil ? nsStringToJuce (error.localizedDescription) : String();
  622. ignoreUnused (errorString);
  623. JUCE_CAMERA_LOG ("didFinishCaptureForSettings(), error = " + errorString);
  624. }
  625. static void didFinishProcessingPhoto (id self, SEL, AVCapturePhotoOutput*, AVCapturePhoto* capturePhoto, NSError* error)
  626. {
  627. String errorString = error != nil ? nsStringToJuce (error.localizedDescription) : String();
  628. ignoreUnused (errorString);
  629. JUCE_CAMERA_LOG ("didFinishProcessingPhoto(), error = " + errorString);
  630. if (error != nil)
  631. {
  632. JUCE_CAMERA_LOG ("Still picture capture failed, error: " + nsStringToJuce (error.localizedDescription));
  633. jassertfalse;
  634. return;
  635. }
  636. auto* imageOrientation = (NSNumber *) capturePhoto.metadata[(NSString*) kCGImagePropertyOrientation];
  637. auto* uiImage = getImageWithCorrectOrientation ((CGImagePropertyOrientation) imageOrientation.unsignedIntValue,
  638. [capturePhoto CGImageRepresentation]);
  639. auto* imageData = UIImageJPEGRepresentation (uiImage, 0.f);
  640. auto image = ImageFileFormat::loadFrom (imageData.bytes, (size_t) imageData.length);
  641. MessageManager::callAsync ([self, image]() { getOwner (self).imageTaken (image); });
  642. }
  643. static UIImage* getImageWithCorrectOrientation (CGImagePropertyOrientation imageOrientation,
  644. CGImageRef imageData)
  645. {
  646. auto origWidth = CGImageGetWidth (imageData);
  647. auto origHeight = CGImageGetHeight (imageData);
  648. auto targetSize = getTargetImageDimensionFor (imageOrientation, imageData);
  649. UIGraphicsBeginImageContext (targetSize);
  650. CGContextRef context = UIGraphicsGetCurrentContext();
  651. switch (imageOrientation)
  652. {
  653. case kCGImagePropertyOrientationUp:
  654. CGContextScaleCTM (context, 1.0, -1.0);
  655. CGContextTranslateCTM (context, 0.0, -targetSize.height);
  656. break;
  657. case kCGImagePropertyOrientationRight:
  658. CGContextRotateCTM (context, 90 * MathConstants<CGFloat>::pi / 180);
  659. CGContextScaleCTM (context, targetSize.height / origHeight, -targetSize.width / origWidth);
  660. break;
  661. case kCGImagePropertyOrientationDown:
  662. CGContextTranslateCTM (context, targetSize.width, 0.0);
  663. CGContextScaleCTM (context, -1.0, 1.0);
  664. break;
  665. case kCGImagePropertyOrientationLeft:
  666. CGContextRotateCTM (context, -90 * MathConstants<CGFloat>::pi / 180);
  667. CGContextScaleCTM (context, targetSize.height / origHeight, -targetSize.width / origWidth);
  668. CGContextTranslateCTM (context, -targetSize.width, -targetSize.height);
  669. break;
  670. default:
  671. // Not implemented.
  672. jassertfalse;
  673. break;
  674. }
  675. CGContextDrawImage (context, CGRectMake (0, 0, targetSize.width, targetSize.height), imageData);
  676. UIImage* correctedImage = UIGraphicsGetImageFromCurrentImageContext();
  677. UIGraphicsEndImageContext();
  678. return correctedImage;
  679. }
  680. static CGSize getTargetImageDimensionFor (CGImagePropertyOrientation imageOrientation,
  681. CGImageRef imageData)
  682. {
  683. auto width = CGImageGetWidth (imageData);
  684. auto height = CGImageGetHeight (imageData);
  685. switch (imageOrientation)
  686. {
  687. case kCGImagePropertyOrientationUp:
  688. case kCGImagePropertyOrientationUpMirrored:
  689. case kCGImagePropertyOrientationDown:
  690. case kCGImagePropertyOrientationDownMirrored:
  691. return CGSizeMake ((CGFloat) width, (CGFloat) height);
  692. case kCGImagePropertyOrientationRight:
  693. case kCGImagePropertyOrientationRightMirrored:
  694. case kCGImagePropertyOrientationLeft:
  695. case kCGImagePropertyOrientationLeftMirrored:
  696. return CGSizeMake ((CGFloat) height, (CGFloat) width);
  697. }
  698. jassertfalse;
  699. return CGSizeMake ((CGFloat) width, (CGFloat) height);
  700. }
  701. static void didFinishProcessingPhotoSampleBuffer (id self, SEL, AVCapturePhotoOutput*,
  702. CMSampleBufferRef imageBuffer, CMSampleBufferRef imagePreviewBuffer,
  703. AVCaptureResolvedPhotoSettings*, AVCaptureBracketedStillImageSettings*,
  704. NSError* error)
  705. {
  706. String errorString = error != nil ? nsStringToJuce (error.localizedDescription) : String();
  707. ignoreUnused (errorString);
  708. JUCE_CAMERA_LOG ("didFinishProcessingPhotoSampleBuffer(), error = " + errorString);
  709. if (error != nil)
  710. {
  711. JUCE_CAMERA_LOG ("Still picture capture failed, error: " + nsStringToJuce (error.localizedDescription));
  712. jassertfalse;
  713. return;
  714. }
  715. NSData* origImageData = [AVCapturePhotoOutput JPEGPhotoDataRepresentationForJPEGSampleBuffer: imageBuffer previewPhotoSampleBuffer: imagePreviewBuffer];
  716. auto* origImage = [UIImage imageWithData: origImageData];
  717. auto imageOrientation = uiImageOrientationToCGImageOrientation (origImage.imageOrientation);
  718. auto* uiImage = getImageWithCorrectOrientation (imageOrientation, origImage.CGImage);
  719. auto* imageData = UIImageJPEGRepresentation (uiImage, 0.f);
  720. auto image = ImageFileFormat::loadFrom (imageData.bytes, (size_t) imageData.length);
  721. MessageManager::callAsync ([self, image]() { getOwner (self).imageTaken (image); });
  722. }
  723. static CGImagePropertyOrientation uiImageOrientationToCGImageOrientation (UIImageOrientation orientation)
  724. {
  725. switch (orientation)
  726. {
  727. case UIImageOrientationUp: return kCGImagePropertyOrientationUp;
  728. case UIImageOrientationDown: return kCGImagePropertyOrientationDown;
  729. case UIImageOrientationLeft: return kCGImagePropertyOrientationLeft;
  730. case UIImageOrientationRight: return kCGImagePropertyOrientationRight;
  731. case UIImageOrientationUpMirrored: return kCGImagePropertyOrientationUpMirrored;
  732. case UIImageOrientationDownMirrored: return kCGImagePropertyOrientationDownMirrored;
  733. case UIImageOrientationLeftMirrored: return kCGImagePropertyOrientationLeftMirrored;
  734. case UIImageOrientationRightMirrored: return kCGImagePropertyOrientationRightMirrored;
  735. }
  736. }
  737. };
  738. //==============================================================================
  739. void imageTaken (const Image& image)
  740. {
  741. takingPicture = false;
  742. captureSession.notifyImageReceived (image);
  743. }
  744. CaptureSession& captureSession;
  745. AVCaptureOutput* captureOutput;
  746. std::unique_ptr<NSObject, NSObjectDeleter> photoOutputDelegate;
  747. bool takingPicture = false;
  748. };
  749. //==============================================================================
  750. // NB: FileOutputRecordingDelegateClass callbacks can be called from any thread (incl.
  751. // the message thread), so waiting for an event when stopping recording is not an
  752. // option and VideoRecorder must be alive at all times in order to get stopped
  753. // recording callback.
  754. class VideoRecorder
  755. {
  756. public:
  757. VideoRecorder (CaptureSession& captureSession)
  758. : movieFileOutput ([AVCaptureMovieFileOutput new]),
  759. delegate (nullptr)
  760. {
  761. static FileOutputRecordingDelegateClass cls;
  762. delegate.reset ([cls.createInstance() init]);
  763. FileOutputRecordingDelegateClass::setOwner (delegate.get(), this);
  764. captureSession.addOutputIfPossible (movieFileOutput);
  765. }
  766. ~VideoRecorder()
  767. {
  768. stopRecording();
  769. // Shutting down a device while recording will stop the recording
  770. // abruptly and the recording will be lost.
  771. jassert (! recordingInProgress);
  772. }
  773. void startRecording (const File& file, AVCaptureVideoOrientation orientationToUse)
  774. {
  775. if (Pimpl::getIOSVersion().major >= 10)
  776. printVideoOutputDebugInfo (movieFileOutput);
  777. auto* url = [NSURL fileURLWithPath: juceStringToNS (file.getFullPathName())
  778. isDirectory: NO];
  779. auto* outputConnection = [movieFileOutput connectionWithMediaType: AVMediaTypeVideo];
  780. outputConnection.videoOrientation = orientationToUse;
  781. [movieFileOutput startRecordingToOutputFileURL: url recordingDelegate: delegate.get()];
  782. }
  783. void stopRecording()
  784. {
  785. [movieFileOutput stopRecording];
  786. }
  787. Time getTimeOfFirstRecordedFrame() const
  788. {
  789. return Time (firstRecordedFrameTimeMs.get());
  790. }
  791. private:
  792. static void printVideoOutputDebugInfo (AVCaptureMovieFileOutput* output)
  793. {
  794. ignoreUnused (output);
  795. JUCE_CAMERA_LOG ("Available video codec types:");
  796. #if JUCE_CAMERA_LOG_ENABLED
  797. for (AVVideoCodecType type in output.availableVideoCodecTypes)
  798. JUCE_CAMERA_LOG (nsStringToJuce (type));
  799. #endif
  800. JUCE_CAMERA_LOG ("Output settings per video connection:");
  801. #if JUCE_CAMERA_LOG_ENABLED
  802. for (AVCaptureConnection* connection in output.connections)
  803. JUCE_CAMERA_LOG (nsStringToJuce ([[output outputSettingsForConnection: connection] description]));
  804. #endif
  805. }
  806. //==============================================================================
  807. struct FileOutputRecordingDelegateClass : public ObjCClass<NSObject<AVCaptureFileOutputRecordingDelegate>>
  808. {
  809. FileOutputRecordingDelegateClass() : ObjCClass<NSObject<AVCaptureFileOutputRecordingDelegate>> ("FileOutputRecordingDelegateClass_")
  810. {
  811. addMethod (@selector (captureOutput:didStartRecordingToOutputFileAtURL:fromConnections:), started, "v@:@@@");
  812. addMethod (@selector (captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error:), stopped, "v@:@@@@");
  813. addIvar<VideoRecorder*> ("owner");
  814. registerClass();
  815. }
  816. //==============================================================================
  817. static VideoRecorder& getOwner (id self) { return *getIvar<VideoRecorder*> (self, "owner"); }
  818. static void setOwner (id self, VideoRecorder* r) { object_setInstanceVariable (self, "owner", r); }
  819. private:
  820. static void started (id self, SEL, AVCaptureFileOutput*, NSURL*, NSArray<AVCaptureConnection*>*)
  821. {
  822. JUCE_CAMERA_LOG ("Started recording");
  823. getOwner (self).firstRecordedFrameTimeMs.set (Time::getCurrentTime().toMilliseconds());
  824. getOwner (self).recordingInProgress = true;
  825. }
  826. static void stopped (id self, SEL, AVCaptureFileOutput*, NSURL*, NSArray<AVCaptureConnection*>*, NSError* error)
  827. {
  828. String errorString;
  829. bool recordingPlayable = true;
  830. // There might have been an error in the recording, yet there may be a playable file...
  831. if ([error code] != noErr)
  832. {
  833. id value = [[error userInfo] objectForKey: AVErrorRecordingSuccessfullyFinishedKey];
  834. if (value != nil && ! [value boolValue])
  835. recordingPlayable = false;
  836. errorString = nsStringToJuce (error.localizedDescription) + ", playable: " + String ((int) recordingPlayable);
  837. }
  838. JUCE_CAMERA_LOG ("Stopped recording, error = " + errorString);
  839. getOwner (self).recordingInProgress = false;
  840. }
  841. };
  842. AVCaptureMovieFileOutput* movieFileOutput;
  843. std::unique_ptr<NSObject<AVCaptureFileOutputRecordingDelegate>, NSObjectDeleter> delegate;
  844. bool recordingInProgress = false;
  845. Atomic<int64> firstRecordedFrameTimeMs { 0 };
  846. };
  847. //==============================================================================
  848. void addOutputIfPossible (AVCaptureOutput* output)
  849. {
  850. dispatch_async (captureSessionQueue,^
  851. {
  852. if ([captureSession.get() canAddOutput: output])
  853. {
  854. [captureSession.get() beginConfiguration];
  855. [captureSession.get() addOutput: output];
  856. [captureSession.get() commitConfiguration];
  857. return;
  858. }
  859. // Can't add output to camera session!
  860. jassertfalse;
  861. });
  862. }
  863. //==============================================================================
  864. void cameraSessionStarted()
  865. {
  866. sessionStarted = true;
  867. owner.cameraSessionStarted();
  868. }
  869. void cameraSessionRuntimeError (const String& error)
  870. {
  871. owner.cameraSessionRuntimeError (error);
  872. }
  873. void notifyImageReceived (const Image& image)
  874. {
  875. owner.notifyImageReceived (image);
  876. }
  877. Pimpl& owner;
  878. dispatch_queue_t captureSessionQueue;
  879. std::unique_ptr<AVCaptureSession, NSObjectDeleter> captureSession;
  880. std::unique_ptr<NSObject, NSObjectDeleter> delegate;
  881. StillPictureTaker stillPictureTaker;
  882. VideoRecorder videoRecorder;
  883. AVCaptureDevice* cameraDevice = nil;
  884. AVCaptureVideoPreviewLayer* previewLayer = nil;
  885. bool sessionStarted = false;
  886. WaitableEvent sessionClosedEvent;
  887. static int numCaptureSessions;
  888. };
  889. //==============================================================================
  890. void cameraSessionStarted()
  891. {
  892. JUCE_CAMERA_LOG ("cameraSessionStarted()");
  893. cameraOpenCallback (cameraId, {});
  894. }
  895. void cameraSessionRuntimeError (const String& error)
  896. {
  897. JUCE_CAMERA_LOG ("cameraSessionRuntimeError(), error = " + error);
  898. if (! notifiedOfCameraOpening)
  899. {
  900. cameraOpenCallback ({}, error);
  901. }
  902. else
  903. {
  904. if (owner.onErrorOccurred != nullptr)
  905. owner.onErrorOccurred (error);
  906. }
  907. }
  908. void notifyImageReceived (const Image& image)
  909. {
  910. JUCE_CAMERA_LOG ("notifyImageReceived()");
  911. if (pictureTakenCallback != nullptr)
  912. pictureTakenCallback (image);
  913. }
  914. //==============================================================================
  915. void triggerStillPictureCapture()
  916. {
  917. captureSession.takeStillPicture();
  918. }
  919. //==============================================================================
  920. CameraDevice& owner;
  921. String cameraId;
  922. InternalOpenCameraResultCallback cameraOpenCallback;
  923. std::function<void (const Image&)> pictureTakenCallback;
  924. CaptureSession captureSession;
  925. bool notifiedOfCameraOpening = false;
  926. //==============================================================================
  927. struct IOSVersion
  928. {
  929. int major;
  930. int minor;
  931. };
  932. static IOSVersion getIOSVersion()
  933. {
  934. auto processInfo = [NSProcessInfo processInfo];
  935. if (! [processInfo respondsToSelector: @selector (operatingSystemVersion)])
  936. return {7, 0}; // Below 8.0 in fact, but only care that it's below 8
  937. return { (int)[processInfo operatingSystemVersion].majorVersion,
  938. (int)[processInfo operatingSystemVersion].minorVersion };
  939. }
  940. static IOSVersion iosVersion;
  941. friend struct CameraDevice::ViewerComponent;
  942. JUCE_DECLARE_NON_COPYABLE (Pimpl)
  943. };
  944. CameraDevice::Pimpl::IOSVersion CameraDevice::Pimpl::iosVersion = CameraDevice::Pimpl::getIOSVersion();
  945. int CameraDevice::Pimpl::CaptureSession::numCaptureSessions = 0;
  946. //==============================================================================
  947. struct CameraDevice::ViewerComponent : public UIViewComponent
  948. {
  949. //==============================================================================
  950. struct JuceCameraDeviceViewerClass : public ObjCClass<UIView>
  951. {
  952. JuceCameraDeviceViewerClass() : ObjCClass<UIView> ("JuceCameraDeviceViewerClass_")
  953. {
  954. addMethod (@selector (layoutSubviews), layoutSubviews, "v@:");
  955. registerClass();
  956. }
  957. private:
  958. static void layoutSubviews (id self, SEL)
  959. {
  960. sendSuperclassMessage (self, @selector (layoutSubviews));
  961. UIView* asUIView = (UIView*) self;
  962. updateOrientation (self);
  963. if (auto* previewLayer = getPreviewLayer (self))
  964. previewLayer.frame = asUIView.bounds;
  965. }
  966. static AVCaptureVideoPreviewLayer* getPreviewLayer (id self)
  967. {
  968. UIView* asUIView = (UIView*) self;
  969. if (asUIView.layer.sublayers != nil && [asUIView.layer.sublayers count] > 0)
  970. if ([asUIView.layer.sublayers[0] isKindOfClass: [AVCaptureVideoPreviewLayer class]])
  971. return (AVCaptureVideoPreviewLayer*) asUIView.layer.sublayers[0];
  972. return nil;
  973. }
  974. static void updateOrientation (id self)
  975. {
  976. if (auto* previewLayer = getPreviewLayer (self))
  977. {
  978. UIDeviceOrientation o = [UIDevice currentDevice].orientation;
  979. if (UIDeviceOrientationIsPortrait (o) || UIDeviceOrientationIsLandscape (o))
  980. {
  981. if (previewLayer.connection != nil)
  982. previewLayer.connection.videoOrientation = (AVCaptureVideoOrientation) o;
  983. }
  984. }
  985. }
  986. };
  987. ViewerComponent (CameraDevice& device)
  988. {
  989. static JuceCameraDeviceViewerClass cls;
  990. // Initial size that can be overriden later.
  991. setSize (640, 480);
  992. auto* view = [cls.createInstance() init];
  993. setView (view);
  994. auto* previewLayer = device.pimpl->captureSession.createPreviewLayer();
  995. previewLayer.frame = view.bounds;
  996. UIInterfaceOrientation statusBarOrientation = [UIApplication sharedApplication].statusBarOrientation;
  997. AVCaptureVideoOrientation videoOrientation = statusBarOrientation != UIInterfaceOrientationUnknown
  998. ? (AVCaptureVideoOrientation) statusBarOrientation
  999. : AVCaptureVideoOrientationPortrait;
  1000. previewLayer.connection.videoOrientation = videoOrientation;
  1001. [view.layer addSublayer: previewLayer];
  1002. }
  1003. };
  1004. //==============================================================================
  1005. String CameraDevice::getFileExtension()
  1006. {
  1007. return ".mov";
  1008. }