The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1334 lines
59KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2020 - Raw Material Software Limited
  5. JUCE is an open source library subject to commercial or open-source
  6. licensing.
  7. By using JUCE, you agree to the terms of both the JUCE 6 End-User License
  8. Agreement and JUCE Privacy Policy (both effective as of the 16th June 2020).
  9. End User License Agreement: www.juce.com/juce-6-licence
  10. Privacy Policy: www.juce.com/juce-privacy-policy
  11. Or: You may also use this code under the terms of the GPL v3 (see
  12. www.gnu.org/licenses).
  13. JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
  14. EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
  15. DISCLAIMED.
  16. ==============================================================================
  17. */
  18. #if (defined (__IPHONE_10_0) && __IPHONE_OS_VERSION_MIN_REQUIRED >= __IPHONE_10_0)
  19. JUCE_BEGIN_IGNORE_WARNINGS_GCC_LIKE ("-Wdeprecated-declarations")
  20. #define JUCE_DEPRECATION_IGNORED 1
  21. #endif
  22. struct CameraDevice::Pimpl
  23. {
  24. using InternalOpenCameraResultCallback = std::function<void (const String& /*cameraId*/, const String& /*error*/)>;
  25. Pimpl (CameraDevice& ownerToUse, const String& cameraIdToUse, int /*index*/,
  26. int /*minWidth*/, int /*minHeight*/, int /*maxWidth*/, int /*maxHeight*/,
  27. bool useHighQuality)
  28. : owner (ownerToUse),
  29. cameraId (cameraIdToUse),
  30. captureSession (*this, useHighQuality)
  31. {
  32. }
  33. String getCameraId() const noexcept { return cameraId; }
  34. void open (InternalOpenCameraResultCallback cameraOpenCallbackToUse)
  35. {
  36. cameraOpenCallback = std::move (cameraOpenCallbackToUse);
  37. if (cameraOpenCallback == nullptr)
  38. {
  39. // A valid camera open callback must be passed.
  40. jassertfalse;
  41. return;
  42. }
  43. [AVCaptureDevice requestAccessForMediaType: AVMediaTypeVideo
  44. completionHandler: ^(BOOL granted)
  45. {
  46. // Access to video is required for camera to work,
  47. // black images will be produced otherwise!
  48. jassertquiet (granted);
  49. }];
  50. [AVCaptureDevice requestAccessForMediaType: AVMediaTypeAudio
  51. completionHandler: ^(BOOL granted)
  52. {
  53. // Access to audio is required for camera to work,
  54. // silence will be produced otherwise!
  55. jassertquiet (granted);
  56. }];
  57. captureSession.startSessionForDeviceWithId (cameraId);
  58. }
  59. bool openedOk() const noexcept { return captureSession.openedOk(); }
  60. void takeStillPicture (std::function<void (const Image&)> pictureTakenCallbackToUse)
  61. {
  62. if (pictureTakenCallbackToUse == nullptr)
  63. {
  64. jassertfalse;
  65. return;
  66. }
  67. pictureTakenCallback = std::move (pictureTakenCallbackToUse);
  68. triggerStillPictureCapture();
  69. }
  70. void startRecordingToFile (const File& file, int /*quality*/)
  71. {
  72. file.deleteFile();
  73. captureSession.startRecording (file);
  74. }
  75. void stopRecording()
  76. {
  77. captureSession.stopRecording();
  78. }
  79. Time getTimeOfFirstRecordedFrame() const
  80. {
  81. return captureSession.getTimeOfFirstRecordedFrame();
  82. }
  83. static StringArray getAvailableDevices()
  84. {
  85. StringArray results;
  86. JUCE_CAMERA_LOG ("Available camera devices: ");
  87. for (AVCaptureDevice* device in getDevices())
  88. {
  89. JUCE_CAMERA_LOG ("Device start----------------------------------");
  90. printDebugCameraInfo (device);
  91. JUCE_CAMERA_LOG ("Device end----------------------------------");
  92. results.add (nsStringToJuce (device.uniqueID));
  93. }
  94. return results;
  95. }
  96. void addListener (CameraDevice::Listener* listenerToAdd)
  97. {
  98. const ScopedLock sl (listenerLock);
  99. listeners.add (listenerToAdd);
  100. if (listeners.size() == 1)
  101. triggerStillPictureCapture();
  102. }
  103. void removeListener (CameraDevice::Listener* listenerToRemove)
  104. {
  105. const ScopedLock sl (listenerLock);
  106. listeners.remove (listenerToRemove);
  107. }
  108. private:
  109. static NSArray<AVCaptureDevice*>* getDevices()
  110. {
  111. #if defined (__IPHONE_10_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_10_0
  112. if (iosVersion.major >= 10)
  113. {
  114. std::unique_ptr<NSMutableArray<AVCaptureDeviceType>, NSObjectDeleter> deviceTypes ([[NSMutableArray alloc] initWithCapacity: 2]);
  115. [deviceTypes.get() addObject: AVCaptureDeviceTypeBuiltInWideAngleCamera];
  116. [deviceTypes.get() addObject: AVCaptureDeviceTypeBuiltInTelephotoCamera];
  117. if ((iosVersion.major == 10 && iosVersion.minor >= 2) || iosVersion.major >= 11)
  118. [deviceTypes.get() addObject: AVCaptureDeviceTypeBuiltInDualCamera];
  119. if ((iosVersion.major == 11 && iosVersion.minor >= 1) || iosVersion.major >= 12)
  120. [deviceTypes.get() addObject: AVCaptureDeviceTypeBuiltInTrueDepthCamera];
  121. auto discoverySession = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes: deviceTypes.get()
  122. mediaType: AVMediaTypeVideo
  123. position: AVCaptureDevicePositionUnspecified];
  124. return [discoverySession devices];
  125. }
  126. #endif
  127. return [AVCaptureDevice devicesWithMediaType: AVMediaTypeVideo];
  128. }
  129. //==============================================================================
  130. static void printDebugCameraInfo (AVCaptureDevice* device)
  131. {
  132. auto position = device.position;
  133. String positionString = position == AVCaptureDevicePositionBack
  134. ? "Back"
  135. : position == AVCaptureDevicePositionFront
  136. ? "Front"
  137. : "Unspecified";
  138. JUCE_CAMERA_LOG ("Position: " + positionString);
  139. JUCE_CAMERA_LOG ("Model ID: " + nsStringToJuce (device.modelID));
  140. JUCE_CAMERA_LOG ("Localized name: " + nsStringToJuce (device.localizedName));
  141. JUCE_CAMERA_LOG ("Unique ID: " + nsStringToJuce (device.uniqueID));
  142. JUCE_CAMERA_LOG ("Lens aperture: " + String (device.lensAperture));
  143. JUCE_CAMERA_LOG ("Has flash: " + String ((int)device.hasFlash));
  144. JUCE_CAMERA_LOG ("Supports flash always on: " + String ((int)[device isFlashModeSupported: AVCaptureFlashModeOn]));
  145. JUCE_CAMERA_LOG ("Supports auto flash: " + String ((int)[device isFlashModeSupported: AVCaptureFlashModeAuto]));
  146. JUCE_CAMERA_LOG ("Has torch: " + String ((int)device.hasTorch));
  147. JUCE_CAMERA_LOG ("Supports torch always on: " + String ((int)[device isTorchModeSupported: AVCaptureTorchModeOn]));
  148. JUCE_CAMERA_LOG ("Supports auto torch: " + String ((int)[device isTorchModeSupported: AVCaptureTorchModeAuto]));
  149. JUCE_CAMERA_LOG ("Low light boost supported: " + String ((int)device.lowLightBoostEnabled));
  150. JUCE_CAMERA_LOG ("Supports auto white balance: " + String ((int)[device isWhiteBalanceModeSupported: AVCaptureWhiteBalanceModeAutoWhiteBalance]));
  151. JUCE_CAMERA_LOG ("Supports continuous auto white balance: " + String ((int)[device isWhiteBalanceModeSupported: AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]));
  152. JUCE_CAMERA_LOG ("Supports auto focus: " + String ((int)[device isFocusModeSupported: AVCaptureFocusModeAutoFocus]));
  153. JUCE_CAMERA_LOG ("Supports continuous auto focus: " + String ((int)[device isFocusModeSupported: AVCaptureFocusModeContinuousAutoFocus]));
  154. JUCE_CAMERA_LOG ("Supports point of interest focus: " + String ((int)device.focusPointOfInterestSupported));
  155. JUCE_CAMERA_LOG ("Smooth auto focus supported: " + String ((int)device.smoothAutoFocusSupported));
  156. JUCE_CAMERA_LOG ("Auto focus range restriction supported: " + String ((int)device.autoFocusRangeRestrictionSupported));
  157. JUCE_CAMERA_LOG ("Supports auto exposure: " + String ((int)[device isExposureModeSupported: AVCaptureExposureModeAutoExpose]));
  158. JUCE_CAMERA_LOG ("Supports continuous auto exposure: " + String ((int)[device isExposureModeSupported: AVCaptureExposureModeContinuousAutoExposure]));
  159. JUCE_CAMERA_LOG ("Supports custom exposure: " + String ((int)[device isExposureModeSupported: AVCaptureExposureModeCustom]));
  160. JUCE_CAMERA_LOG ("Supports point of interest exposure: " + String ((int)device.exposurePointOfInterestSupported));
  161. #if defined (__IPHONE_10_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_10_0
  162. if (iosVersion.major >= 10)
  163. {
  164. JUCE_CAMERA_LOG ("Device type: " + nsStringToJuce (device.deviceType));
  165. JUCE_CAMERA_LOG ("Locking focus with custom lens position supported: " + String ((int)device.lockingFocusWithCustomLensPositionSupported));
  166. }
  167. #endif
  168. #if defined (__IPHONE_11_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0
  169. if (iosVersion.major >= 11)
  170. {
  171. JUCE_CAMERA_LOG ("Min available video zoom factor: " + String (device.minAvailableVideoZoomFactor));
  172. JUCE_CAMERA_LOG ("Max available video zoom factor: " + String (device.maxAvailableVideoZoomFactor));
  173. JUCE_CAMERA_LOG ("Dual camera switch over video zoom factor: " + String (device.dualCameraSwitchOverVideoZoomFactor));
  174. }
  175. #endif
  176. JUCE_CAMERA_LOG ("Capture formats start-------------------");
  177. for (AVCaptureDeviceFormat* format in device.formats)
  178. {
  179. JUCE_CAMERA_LOG ("Capture format start------");
  180. printDebugCameraFormatInfo (format);
  181. JUCE_CAMERA_LOG ("Capture format end------");
  182. }
  183. JUCE_CAMERA_LOG ("Capture formats end-------------------");
  184. }
  185. static void printDebugCameraFormatInfo (AVCaptureDeviceFormat* format)
  186. {
  187. JUCE_CAMERA_LOG ("Media type: " + nsStringToJuce (format.mediaType));
  188. String colourSpaces;
  189. for (NSNumber* number in format.supportedColorSpaces)
  190. {
  191. switch ([number intValue])
  192. {
  193. case AVCaptureColorSpace_sRGB: colourSpaces << "sRGB "; break;
  194. case AVCaptureColorSpace_P3_D65: colourSpaces << "P3_D65 "; break;
  195. default: break;
  196. }
  197. }
  198. JUCE_CAMERA_LOG ("Supported colour spaces: " + colourSpaces);
  199. JUCE_CAMERA_LOG ("Video field of view: " + String (format.videoFieldOfView));
  200. JUCE_CAMERA_LOG ("Video max zoom factor: " + String (format.videoMaxZoomFactor));
  201. JUCE_CAMERA_LOG ("Video zoom factor upscale threshold: " + String (format.videoZoomFactorUpscaleThreshold));
  202. String videoFrameRateRangesString = "Video supported frame rate ranges: ";
  203. for (AVFrameRateRange* range in format.videoSupportedFrameRateRanges)
  204. videoFrameRateRangesString << frameRateRangeToString (range);
  205. JUCE_CAMERA_LOG (videoFrameRateRangesString);
  206. JUCE_CAMERA_LOG ("Video binned: " + String (int (format.videoBinned)));
  207. JUCE_CAMERA_LOG ("Video HDR supported: " + String (int (format.videoHDRSupported)));
  208. JUCE_CAMERA_LOG ("High resolution still image dimensions: " + getHighResStillImgDimensionsString (format.highResolutionStillImageDimensions));
  209. JUCE_CAMERA_LOG ("Min ISO: " + String (format.minISO));
  210. JUCE_CAMERA_LOG ("Max ISO: " + String (format.maxISO));
  211. JUCE_CAMERA_LOG ("Min exposure duration: " + cmTimeToString (format.minExposureDuration));
  212. String autoFocusSystemString;
  213. switch (format.autoFocusSystem)
  214. {
  215. case AVCaptureAutoFocusSystemPhaseDetection: autoFocusSystemString = "PhaseDetection"; break;
  216. case AVCaptureAutoFocusSystemContrastDetection: autoFocusSystemString = "ContrastDetection"; break;
  217. case AVCaptureAutoFocusSystemNone:
  218. default: autoFocusSystemString = "None";
  219. }
  220. JUCE_CAMERA_LOG ("Auto focus system: " + autoFocusSystemString);
  221. JUCE_CAMERA_LOG ("Standard (iOS 5.0) video stabilization supported: " + String ((int) [format isVideoStabilizationModeSupported: AVCaptureVideoStabilizationModeStandard]));
  222. JUCE_CAMERA_LOG ("Cinematic video stabilization supported: " + String ((int) [format isVideoStabilizationModeSupported: AVCaptureVideoStabilizationModeCinematic]));
  223. JUCE_CAMERA_LOG ("Auto video stabilization supported: " + String ((int) [format isVideoStabilizationModeSupported: AVCaptureVideoStabilizationModeAuto]));
  224. #if defined (__IPHONE_11_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0
  225. if (iosVersion.major >= 11)
  226. {
  227. JUCE_CAMERA_LOG ("Min zoom factor for depth data delivery: " + String (format.videoMinZoomFactorForDepthDataDelivery));
  228. JUCE_CAMERA_LOG ("Max zoom factor for depth data delivery: " + String (format.videoMaxZoomFactorForDepthDataDelivery));
  229. }
  230. #endif
  231. }
  232. static String getHighResStillImgDimensionsString (CMVideoDimensions d)
  233. {
  234. return "[" + String (d.width) + " " + String (d.height) + "]";
  235. }
  236. static String cmTimeToString (CMTime time)
  237. {
  238. CFUniquePtr<CFStringRef> timeDesc (CMTimeCopyDescription (nullptr, time));
  239. return String::fromCFString (timeDesc.get());
  240. }
  241. static String frameRateRangeToString (AVFrameRateRange* range)
  242. {
  243. String result;
  244. result << "[minFrameDuration: " + cmTimeToString (range.minFrameDuration);
  245. result << " maxFrameDuration: " + cmTimeToString (range.maxFrameDuration);
  246. result << " minFrameRate: " + String (range.minFrameRate);
  247. result << " maxFrameRate: " + String (range.maxFrameRate) << "] ";
  248. return result;
  249. }
  250. //==============================================================================
  251. class CaptureSession
  252. {
  253. public:
  254. CaptureSession (Pimpl& ownerToUse, bool useHighQuality)
  255. : owner (ownerToUse),
  256. captureSessionQueue (dispatch_queue_create ("JuceCameraDeviceBackgroundDispatchQueue", DISPATCH_QUEUE_SERIAL)),
  257. captureSession ([[AVCaptureSession alloc] init]),
  258. delegate (nullptr),
  259. stillPictureTaker (*this),
  260. videoRecorder (*this)
  261. {
  262. static SessionDelegateClass cls;
  263. delegate.reset ([cls.createInstance() init]);
  264. SessionDelegateClass::setOwner (delegate.get(), this);
  265. JUCE_BEGIN_IGNORE_WARNINGS_GCC_LIKE ("-Wundeclared-selector")
  266. [[NSNotificationCenter defaultCenter] addObserver: delegate.get()
  267. selector: @selector (sessionDidStartRunning:)
  268. name: AVCaptureSessionDidStartRunningNotification
  269. object: captureSession.get()];
  270. [[NSNotificationCenter defaultCenter] addObserver: delegate.get()
  271. selector: @selector (sessionDidStopRunning:)
  272. name: AVCaptureSessionDidStopRunningNotification
  273. object: captureSession.get()];
  274. [[NSNotificationCenter defaultCenter] addObserver: delegate.get()
  275. selector: @selector (runtimeError:)
  276. name: AVCaptureSessionRuntimeErrorNotification
  277. object: captureSession.get()];
  278. [[NSNotificationCenter defaultCenter] addObserver: delegate.get()
  279. selector: @selector (sessionWasInterrupted:)
  280. name: AVCaptureSessionWasInterruptedNotification
  281. object: captureSession.get()];
  282. [[NSNotificationCenter defaultCenter] addObserver: delegate.get()
  283. selector: @selector (sessionInterruptionEnded:)
  284. name: AVCaptureSessionInterruptionEndedNotification
  285. object: captureSession.get()];
  286. JUCE_END_IGNORE_WARNINGS_GCC_LIKE
  287. dispatch_async (captureSessionQueue,^
  288. {
  289. [captureSession.get() setSessionPreset: useHighQuality ? AVCaptureSessionPresetHigh
  290. : AVCaptureSessionPresetMedium];
  291. });
  292. ++numCaptureSessions;
  293. }
  294. ~CaptureSession()
  295. {
  296. [[NSNotificationCenter defaultCenter] removeObserver: delegate.get()];
  297. stopRecording();
  298. if (--numCaptureSessions == 0)
  299. {
  300. dispatch_async (captureSessionQueue, ^
  301. {
  302. if (captureSession.get().running)
  303. [captureSession.get() stopRunning];
  304. sessionClosedEvent.signal();
  305. });
  306. sessionClosedEvent.wait (-1);
  307. }
  308. }
  309. bool openedOk() const noexcept { return sessionStarted; }
  310. void startSessionForDeviceWithId (const String& cameraIdToUse)
  311. {
  312. dispatch_async (captureSessionQueue,^
  313. {
  314. cameraDevice = [AVCaptureDevice deviceWithUniqueID: juceStringToNS (cameraIdToUse)];
  315. auto audioDevice = [AVCaptureDevice defaultDeviceWithMediaType: AVMediaTypeAudio];
  316. [captureSession.get() beginConfiguration];
  317. // This will add just video...
  318. auto error = addInputToDevice (cameraDevice);
  319. if (error.isNotEmpty())
  320. {
  321. MessageManager::callAsync ([weakRef = WeakReference<CaptureSession> { this }, error]() mutable
  322. {
  323. if (weakRef != nullptr)
  324. weakRef->owner.cameraOpenCallback ({}, error);
  325. });
  326. return;
  327. }
  328. // ... so add audio explicitly here
  329. error = addInputToDevice (audioDevice);
  330. if (error.isNotEmpty())
  331. {
  332. MessageManager::callAsync ([weakRef = WeakReference<CaptureSession> { this }, error]() mutable
  333. {
  334. if (weakRef != nullptr)
  335. weakRef->owner.cameraOpenCallback ({}, error);
  336. });
  337. return;
  338. }
  339. [captureSession.get() commitConfiguration];
  340. if (! captureSession.get().running)
  341. [captureSession.get() startRunning];
  342. });
  343. }
  344. AVCaptureVideoPreviewLayer* createPreviewLayer()
  345. {
  346. if (! openedOk())
  347. {
  348. // A session must be started first!
  349. jassertfalse;
  350. return nullptr;
  351. }
  352. previewLayer = [AVCaptureVideoPreviewLayer layerWithSession: captureSession.get()];
  353. return previewLayer;
  354. }
  355. void takeStillPicture()
  356. {
  357. if (! openedOk())
  358. {
  359. // A session must be started first!
  360. jassert (openedOk());
  361. return;
  362. }
  363. stillPictureTaker.takePicture (previewLayer.connection.videoOrientation);
  364. }
  365. void startRecording (const File& file)
  366. {
  367. if (! openedOk())
  368. {
  369. // A session must be started first!
  370. jassertfalse;
  371. return;
  372. }
  373. if (file.existsAsFile())
  374. {
  375. // File overwriting is not supported by iOS video recorder, the target
  376. // file must not exist.
  377. jassertfalse;
  378. return;
  379. }
  380. videoRecorder.startRecording (file, previewLayer.connection.videoOrientation);
  381. }
  382. void stopRecording()
  383. {
  384. videoRecorder.stopRecording();
  385. }
  386. Time getTimeOfFirstRecordedFrame() const
  387. {
  388. return videoRecorder.getTimeOfFirstRecordedFrame();
  389. }
  390. JUCE_DECLARE_WEAK_REFERENCEABLE (CaptureSession)
  391. private:
  392. String addInputToDevice (AVCaptureDevice* device)
  393. {
  394. NSError* error = nil;
  395. auto input = [AVCaptureDeviceInput deviceInputWithDevice: device
  396. error: &error];
  397. if (error != nil)
  398. return nsStringToJuce (error.localizedDescription);
  399. if (! [captureSession.get() canAddInput: input])
  400. return "Could not add input to camera session.";
  401. [captureSession.get() addInput: input];
  402. return {};
  403. }
  404. //==============================================================================
  405. struct SessionDelegateClass : public ObjCClass<NSObject>
  406. {
  407. SessionDelegateClass() : ObjCClass<NSObject> ("SessionDelegateClass_")
  408. {
  409. JUCE_BEGIN_IGNORE_WARNINGS_GCC_LIKE ("-Wundeclared-selector")
  410. addMethod (@selector (sessionDidStartRunning:), started, "v@:@");
  411. addMethod (@selector (sessionDidStopRunning:), stopped, "v@:@");
  412. addMethod (@selector (runtimeError:), runtimeError, "v@:@");
  413. addMethod (@selector (sessionWasInterrupted:), interrupted, "v@:@");
  414. addMethod (@selector (sessionInterruptionEnded:), interruptionEnded, "v@:@");
  415. JUCE_END_IGNORE_WARNINGS_GCC_LIKE
  416. addIvar<CaptureSession*> ("owner");
  417. registerClass();
  418. }
  419. //==============================================================================
  420. static CaptureSession& getOwner (id self) { return *getIvar<CaptureSession*> (self, "owner"); }
  421. static void setOwner (id self, CaptureSession* s) { object_setInstanceVariable (self, "owner", s); }
  422. private:
  423. //==============================================================================
  424. static void started (id self, SEL, NSNotification* notification)
  425. {
  426. JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
  427. ignoreUnused (notification);
  428. dispatch_async (dispatch_get_main_queue(),
  429. ^{
  430. getOwner (self).cameraSessionStarted();
  431. });
  432. }
  433. static void stopped (id, SEL, NSNotification* notification)
  434. {
  435. JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
  436. ignoreUnused (notification);
  437. }
  438. static void runtimeError (id self, SEL, NSNotification* notification)
  439. {
  440. JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
  441. dispatch_async (dispatch_get_main_queue(),
  442. ^{
  443. NSError* error = notification.userInfo[AVCaptureSessionErrorKey];
  444. auto errorString = error != nil ? nsStringToJuce (error.localizedDescription) : String();
  445. getOwner (self).cameraSessionRuntimeError (errorString);
  446. });
  447. }
  448. static void interrupted (id, SEL, NSNotification* notification)
  449. {
  450. JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
  451. ignoreUnused (notification);
  452. }
  453. static void interruptionEnded (id, SEL, NSNotification* notification)
  454. {
  455. JUCE_CAMERA_LOG (nsStringToJuce ([notification description]));
  456. ignoreUnused (notification);
  457. }
  458. };
  459. //==============================================================================
  460. class StillPictureTaker
  461. {
  462. public:
  463. StillPictureTaker (CaptureSession& cs)
  464. : captureSession (cs),
  465. captureOutput (createCaptureOutput()),
  466. photoOutputDelegate (nullptr)
  467. {
  468. if (Pimpl::getIOSVersion().major >= 10)
  469. {
  470. static PhotoOutputDelegateClass cls;
  471. photoOutputDelegate.reset ([cls.createInstance() init]);
  472. PhotoOutputDelegateClass::setOwner (photoOutputDelegate.get(), this);
  473. }
  474. captureSession.addOutputIfPossible (captureOutput);
  475. }
  476. void takePicture (AVCaptureVideoOrientation orientationToUse)
  477. {
  478. if (takingPicture)
  479. {
  480. // Picture taking already in progress!
  481. jassertfalse;
  482. return;
  483. }
  484. takingPicture = true;
  485. printImageOutputDebugInfo (captureOutput);
  486. if (auto* connection = findVideoConnection (captureOutput))
  487. {
  488. #if defined (__IPHONE_10_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_10_0
  489. if (Pimpl::getIOSVersion().major >= 10 && [captureOutput isKindOfClass: [AVCapturePhotoOutput class]])
  490. {
  491. auto* photoOutput = (AVCapturePhotoOutput*) captureOutput;
  492. auto outputConnection = [photoOutput connectionWithMediaType: AVMediaTypeVideo];
  493. outputConnection.videoOrientation = orientationToUse;
  494. [photoOutput capturePhotoWithSettings: [AVCapturePhotoSettings photoSettings]
  495. delegate: id<AVCapturePhotoCaptureDelegate> (photoOutputDelegate.get())];
  496. return;
  497. }
  498. #endif
  499. auto* stillImageOutput = (AVCaptureStillImageOutput*) captureOutput;
  500. auto outputConnection = [stillImageOutput connectionWithMediaType: AVMediaTypeVideo];
  501. outputConnection.videoOrientation = orientationToUse;
  502. [stillImageOutput captureStillImageAsynchronouslyFromConnection: connection completionHandler:
  503. ^(CMSampleBufferRef imageSampleBuffer, NSError* error)
  504. {
  505. takingPicture = false;
  506. if (error != nil)
  507. {
  508. JUCE_CAMERA_LOG ("Still picture capture failed, error: " + nsStringToJuce (error.localizedDescription));
  509. jassertfalse;
  510. return;
  511. }
  512. NSData* imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation: imageSampleBuffer];
  513. auto image = ImageFileFormat::loadFrom (imageData.bytes, (size_t) imageData.length);
  514. callListeners (image);
  515. MessageManager::callAsync ([this, image] { notifyPictureTaken (image); });
  516. }];
  517. }
  518. else
  519. {
  520. // Could not find a connection of video type
  521. jassertfalse;
  522. }
  523. }
  524. private:
  525. static AVCaptureOutput* createCaptureOutput()
  526. {
  527. #if defined (__IPHONE_10_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_10_0
  528. if (Pimpl::getIOSVersion().major >= 10)
  529. return [AVCapturePhotoOutput new];
  530. #endif
  531. return [AVCaptureStillImageOutput new];
  532. }
  533. static void printImageOutputDebugInfo (AVCaptureOutput* captureOutput)
  534. {
  535. #if defined (__IPHONE_10_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_10_0
  536. if (Pimpl::getIOSVersion().major >= 10 && [captureOutput isKindOfClass: [AVCapturePhotoOutput class]])
  537. {
  538. auto* photoOutput = (AVCapturePhotoOutput*) captureOutput;
  539. String typesString;
  540. for (AVVideoCodecType type in photoOutput.availablePhotoCodecTypes)
  541. typesString << nsStringToJuce (type) << " ";
  542. JUCE_CAMERA_LOG ("Available image codec types: " + typesString);
  543. JUCE_CAMERA_LOG ("Still image stabilization supported: " + String ((int) photoOutput.stillImageStabilizationSupported));
  544. JUCE_CAMERA_LOG ("Dual camera fusion supported: " + String ((int) photoOutput.dualCameraFusionSupported));
  545. JUCE_CAMERA_LOG ("Supports flash: " + String ((int) [photoOutput.supportedFlashModes containsObject: @(AVCaptureFlashModeOn)]));
  546. JUCE_CAMERA_LOG ("Supports auto flash: " + String ((int) [photoOutput.supportedFlashModes containsObject: @(AVCaptureFlashModeAuto)]));
  547. JUCE_CAMERA_LOG ("Max bracketed photo count: " + String (photoOutput.maxBracketedCapturePhotoCount));
  548. JUCE_CAMERA_LOG ("Lens stabilization during bracketed capture supported: " + String ((int) photoOutput.lensStabilizationDuringBracketedCaptureSupported));
  549. JUCE_CAMERA_LOG ("Live photo capture supported: " + String ((int) photoOutput.livePhotoCaptureSupported));
  550. #if defined (__IPHONE_11_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0
  551. if (Pimpl::getIOSVersion().major >= 11)
  552. {
  553. typesString.clear();
  554. for (AVFileType type in photoOutput.availablePhotoFileTypes)
  555. typesString << nsStringToJuce (type) << " ";
  556. JUCE_CAMERA_LOG ("Available photo file types: " + typesString);
  557. typesString.clear();
  558. for (AVFileType type in photoOutput.availableRawPhotoFileTypes)
  559. typesString << nsStringToJuce (type) << " ";
  560. JUCE_CAMERA_LOG ("Available RAW photo file types: " + typesString);
  561. typesString.clear();
  562. for (AVFileType type in photoOutput.availableLivePhotoVideoCodecTypes)
  563. typesString << nsStringToJuce (type) << " ";
  564. JUCE_CAMERA_LOG ("Available live photo video codec types: " + typesString);
  565. JUCE_CAMERA_LOG ("Dual camera dual photo delivery supported: " + String ((int) photoOutput.dualCameraDualPhotoDeliverySupported));
  566. JUCE_CAMERA_LOG ("Camera calibration data delivery supported: " + String ((int) photoOutput.cameraCalibrationDataDeliverySupported));
  567. JUCE_CAMERA_LOG ("Depth data delivery supported: " + String ((int) photoOutput.depthDataDeliverySupported));
  568. }
  569. #endif
  570. return;
  571. }
  572. #endif
  573. auto* stillImageOutput = (AVCaptureStillImageOutput*) captureOutput;
  574. String typesString;
  575. for (AVVideoCodecType type in stillImageOutput.availableImageDataCodecTypes)
  576. typesString << nsStringToJuce (type) << " ";
  577. JUCE_CAMERA_LOG ("Available image codec types: " + typesString);
  578. JUCE_CAMERA_LOG ("Still image stabilization supported: " + String ((int) stillImageOutput.stillImageStabilizationSupported));
  579. JUCE_CAMERA_LOG ("Automatically enables still image stabilization when available: " + String ((int) stillImageOutput.automaticallyEnablesStillImageStabilizationWhenAvailable));
  580. JUCE_CAMERA_LOG ("Output settings for image output: " + nsStringToJuce ([stillImageOutput.outputSettings description]));
  581. }
  582. //==============================================================================
  583. static AVCaptureConnection* findVideoConnection (AVCaptureOutput* output)
  584. {
  585. for (AVCaptureConnection* connection in output.connections)
  586. for (AVCaptureInputPort* port in connection.inputPorts)
  587. if ([port.mediaType isEqual: AVMediaTypeVideo])
  588. return connection;
  589. return nullptr;
  590. }
  591. //==============================================================================
  592. class PhotoOutputDelegateClass : public ObjCClass<NSObject>
  593. {
  594. public:
  595. PhotoOutputDelegateClass() : ObjCClass<NSObject> ("PhotoOutputDelegateClass_")
  596. {
  597. addMethod (@selector (captureOutput:willBeginCaptureForResolvedSettings:), willBeginCaptureForSettings, "v@:@@");
  598. addMethod (@selector (captureOutput:willCapturePhotoForResolvedSettings:), willCaptureForSettings, "v@:@@");
  599. addMethod (@selector (captureOutput:didCapturePhotoForResolvedSettings:), didCaptureForSettings, "v@:@@");
  600. addMethod (@selector (captureOutput:didFinishCaptureForResolvedSettings:error:), didFinishCaptureForSettings, "v@:@@@");
  601. if (Pimpl::getIOSVersion().major >= 11)
  602. addMethod (@selector (captureOutput:didFinishProcessingPhoto:error:), didFinishProcessingPhoto, "v@:@@@");
  603. else
  604. addMethod (@selector (captureOutput:didFinishProcessingPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:), didFinishProcessingPhotoSampleBuffer, "v@:@@@@@@");
  605. addIvar<StillPictureTaker*> ("owner");
  606. registerClass();
  607. }
  608. //==============================================================================
  609. static StillPictureTaker& getOwner (id self) { return *getIvar<StillPictureTaker*> (self, "owner"); }
  610. static void setOwner (id self, StillPictureTaker* t) { object_setInstanceVariable (self, "owner", t); }
  611. private:
  612. static void willBeginCaptureForSettings (id, SEL, AVCapturePhotoOutput*, AVCaptureResolvedPhotoSettings*)
  613. {
  614. JUCE_CAMERA_LOG ("willBeginCaptureForSettings()");
  615. }
  616. static void willCaptureForSettings (id, SEL, AVCapturePhotoOutput*, AVCaptureResolvedPhotoSettings*)
  617. {
  618. JUCE_CAMERA_LOG ("willCaptureForSettings()");
  619. }
  620. static void didCaptureForSettings (id, SEL, AVCapturePhotoOutput*, AVCaptureResolvedPhotoSettings*)
  621. {
  622. JUCE_CAMERA_LOG ("didCaptureForSettings()");
  623. }
  624. static void didFinishCaptureForSettings (id, SEL, AVCapturePhotoOutput*, AVCaptureResolvedPhotoSettings*, NSError* error)
  625. {
  626. String errorString = error != nil ? nsStringToJuce (error.localizedDescription) : String();
  627. ignoreUnused (errorString);
  628. JUCE_CAMERA_LOG ("didFinishCaptureForSettings(), error = " + errorString);
  629. }
  630. static void didFinishProcessingPhoto (id self, SEL, AVCapturePhotoOutput*, AVCapturePhoto* capturePhoto, NSError* error)
  631. {
  632. getOwner (self).takingPicture = false;
  633. String errorString = error != nil ? nsStringToJuce (error.localizedDescription) : String();
  634. ignoreUnused (errorString);
  635. JUCE_CAMERA_LOG ("didFinishProcessingPhoto(), error = " + errorString);
  636. if (error != nil)
  637. {
  638. JUCE_CAMERA_LOG ("Still picture capture failed, error: " + nsStringToJuce (error.localizedDescription));
  639. jassertfalse;
  640. return;
  641. }
  642. auto* imageOrientation = (NSNumber *) capturePhoto.metadata[(NSString*) kCGImagePropertyOrientation];
  643. auto* uiImage = getImageWithCorrectOrientation ((CGImagePropertyOrientation) imageOrientation.unsignedIntValue,
  644. [capturePhoto CGImageRepresentation]);
  645. auto* imageData = UIImageJPEGRepresentation (uiImage, 0.f);
  646. auto image = ImageFileFormat::loadFrom (imageData.bytes, (size_t) imageData.length);
  647. getOwner (self).callListeners (image);
  648. MessageManager::callAsync ([self, image]() { getOwner (self).notifyPictureTaken (image); });
  649. }
  650. static UIImage* getImageWithCorrectOrientation (CGImagePropertyOrientation imageOrientation,
  651. CGImageRef imageData)
  652. {
  653. auto origWidth = CGImageGetWidth (imageData);
  654. auto origHeight = CGImageGetHeight (imageData);
  655. auto targetSize = getTargetImageDimensionFor (imageOrientation, imageData);
  656. UIGraphicsBeginImageContext (targetSize);
  657. CGContextRef context = UIGraphicsGetCurrentContext();
  658. switch (imageOrientation)
  659. {
  660. case kCGImagePropertyOrientationUp:
  661. CGContextScaleCTM (context, 1.0, -1.0);
  662. CGContextTranslateCTM (context, 0.0, -targetSize.height);
  663. break;
  664. case kCGImagePropertyOrientationRight:
  665. CGContextRotateCTM (context, 90 * MathConstants<CGFloat>::pi / 180);
  666. CGContextScaleCTM (context, targetSize.height / origHeight, -targetSize.width / origWidth);
  667. break;
  668. case kCGImagePropertyOrientationDown:
  669. CGContextTranslateCTM (context, targetSize.width, 0.0);
  670. CGContextScaleCTM (context, -1.0, 1.0);
  671. break;
  672. case kCGImagePropertyOrientationLeft:
  673. CGContextRotateCTM (context, -90 * MathConstants<CGFloat>::pi / 180);
  674. CGContextScaleCTM (context, targetSize.height / origHeight, -targetSize.width / origWidth);
  675. CGContextTranslateCTM (context, -targetSize.width, -targetSize.height);
  676. break;
  677. case kCGImagePropertyOrientationUpMirrored:
  678. case kCGImagePropertyOrientationDownMirrored:
  679. case kCGImagePropertyOrientationLeftMirrored:
  680. case kCGImagePropertyOrientationRightMirrored:
  681. default:
  682. // Not implemented.
  683. jassertfalse;
  684. break;
  685. }
  686. CGContextDrawImage (context, CGRectMake (0, 0, targetSize.width, targetSize.height), imageData);
  687. UIImage* correctedImage = UIGraphicsGetImageFromCurrentImageContext();
  688. UIGraphicsEndImageContext();
  689. return correctedImage;
  690. }
  691. static CGSize getTargetImageDimensionFor (CGImagePropertyOrientation imageOrientation,
  692. CGImageRef imageData)
  693. {
  694. auto width = CGImageGetWidth (imageData);
  695. auto height = CGImageGetHeight (imageData);
  696. switch (imageOrientation)
  697. {
  698. case kCGImagePropertyOrientationUp:
  699. case kCGImagePropertyOrientationUpMirrored:
  700. case kCGImagePropertyOrientationDown:
  701. case kCGImagePropertyOrientationDownMirrored:
  702. return CGSizeMake ((CGFloat) width, (CGFloat) height);
  703. case kCGImagePropertyOrientationRight:
  704. case kCGImagePropertyOrientationRightMirrored:
  705. case kCGImagePropertyOrientationLeft:
  706. case kCGImagePropertyOrientationLeftMirrored:
  707. return CGSizeMake ((CGFloat) height, (CGFloat) width);
  708. }
  709. jassertfalse;
  710. return CGSizeMake ((CGFloat) width, (CGFloat) height);
  711. }
  712. static void didFinishProcessingPhotoSampleBuffer (id self, SEL, AVCapturePhotoOutput*,
  713. CMSampleBufferRef imageBuffer, CMSampleBufferRef imagePreviewBuffer,
  714. AVCaptureResolvedPhotoSettings*, AVCaptureBracketedStillImageSettings*,
  715. NSError* error)
  716. {
  717. getOwner (self).takingPicture = false;
  718. String errorString = error != nil ? nsStringToJuce (error.localizedDescription) : String();
  719. ignoreUnused (errorString);
  720. JUCE_CAMERA_LOG ("didFinishProcessingPhotoSampleBuffer(), error = " + errorString);
  721. if (error != nil)
  722. {
  723. JUCE_CAMERA_LOG ("Still picture capture failed, error: " + nsStringToJuce (error.localizedDescription));
  724. jassertfalse;
  725. return;
  726. }
  727. NSData* origImageData = [AVCapturePhotoOutput JPEGPhotoDataRepresentationForJPEGSampleBuffer: imageBuffer previewPhotoSampleBuffer: imagePreviewBuffer];
  728. auto origImage = [UIImage imageWithData: origImageData];
  729. auto imageOrientation = uiImageOrientationToCGImageOrientation (origImage.imageOrientation);
  730. auto* uiImage = getImageWithCorrectOrientation (imageOrientation, origImage.CGImage);
  731. auto* imageData = UIImageJPEGRepresentation (uiImage, 0.f);
  732. auto image = ImageFileFormat::loadFrom (imageData.bytes, (size_t) imageData.length);
  733. getOwner (self).callListeners (image);
  734. MessageManager::callAsync ([self, image]() { getOwner (self).notifyPictureTaken (image); });
  735. }
  736. static CGImagePropertyOrientation uiImageOrientationToCGImageOrientation (UIImageOrientation orientation)
  737. {
  738. switch (orientation)
  739. {
  740. case UIImageOrientationUp: return kCGImagePropertyOrientationUp;
  741. case UIImageOrientationDown: return kCGImagePropertyOrientationDown;
  742. case UIImageOrientationLeft: return kCGImagePropertyOrientationLeft;
  743. case UIImageOrientationRight: return kCGImagePropertyOrientationRight;
  744. case UIImageOrientationUpMirrored: return kCGImagePropertyOrientationUpMirrored;
  745. case UIImageOrientationDownMirrored: return kCGImagePropertyOrientationDownMirrored;
  746. case UIImageOrientationLeftMirrored: return kCGImagePropertyOrientationLeftMirrored;
  747. case UIImageOrientationRightMirrored: return kCGImagePropertyOrientationRightMirrored;
  748. }
  749. }
  750. };
  751. //==============================================================================
  752. void callListeners (const Image& image)
  753. {
  754. captureSession.callListeners (image);
  755. }
  756. void notifyPictureTaken (const Image& image)
  757. {
  758. captureSession.notifyPictureTaken (image);
  759. }
  760. CaptureSession& captureSession;
  761. AVCaptureOutput* captureOutput;
  762. std::unique_ptr<NSObject, NSObjectDeleter> photoOutputDelegate;
  763. bool takingPicture = false;
  764. };
  765. //==============================================================================
  766. // NB: FileOutputRecordingDelegateClass callbacks can be called from any thread (incl.
  767. // the message thread), so waiting for an event when stopping recording is not an
  768. // option and VideoRecorder must be alive at all times in order to get stopped
  769. // recording callback.
  770. class VideoRecorder
  771. {
  772. public:
  773. VideoRecorder (CaptureSession& session)
  774. : movieFileOutput ([AVCaptureMovieFileOutput new]),
  775. delegate (nullptr)
  776. {
  777. static FileOutputRecordingDelegateClass cls;
  778. delegate.reset ([cls.createInstance() init]);
  779. FileOutputRecordingDelegateClass::setOwner (delegate.get(), this);
  780. session.addOutputIfPossible (movieFileOutput);
  781. }
  782. ~VideoRecorder()
  783. {
  784. stopRecording();
  785. // Shutting down a device while recording will stop the recording
  786. // abruptly and the recording will be lost.
  787. jassert (! recordingInProgress);
  788. }
  789. void startRecording (const File& file, AVCaptureVideoOrientation orientationToUse)
  790. {
  791. if (Pimpl::getIOSVersion().major >= 10)
  792. printVideoOutputDebugInfo (movieFileOutput);
  793. auto url = [NSURL fileURLWithPath: juceStringToNS (file.getFullPathName())
  794. isDirectory: NO];
  795. auto outputConnection = [movieFileOutput connectionWithMediaType: AVMediaTypeVideo];
  796. outputConnection.videoOrientation = orientationToUse;
  797. [movieFileOutput startRecordingToOutputFileURL: url recordingDelegate: delegate.get()];
  798. }
  799. void stopRecording()
  800. {
  801. [movieFileOutput stopRecording];
  802. }
  803. Time getTimeOfFirstRecordedFrame() const
  804. {
  805. return Time (firstRecordedFrameTimeMs.get());
  806. }
  807. private:
  808. static void printVideoOutputDebugInfo (AVCaptureMovieFileOutput* output)
  809. {
  810. ignoreUnused (output);
  811. JUCE_CAMERA_LOG ("Available video codec types:");
  812. #if JUCE_CAMERA_LOG_ENABLED
  813. for (AVVideoCodecType type in output.availableVideoCodecTypes)
  814. JUCE_CAMERA_LOG (nsStringToJuce (type));
  815. #endif
  816. JUCE_CAMERA_LOG ("Output settings per video connection:");
  817. #if JUCE_CAMERA_LOG_ENABLED
  818. for (AVCaptureConnection* connection in output.connections)
  819. JUCE_CAMERA_LOG (nsStringToJuce ([[output outputSettingsForConnection: connection] description]));
  820. #endif
  821. }
  822. //==============================================================================
  823. struct FileOutputRecordingDelegateClass : public ObjCClass<NSObject<AVCaptureFileOutputRecordingDelegate>>
  824. {
  825. FileOutputRecordingDelegateClass() : ObjCClass<NSObject<AVCaptureFileOutputRecordingDelegate>> ("FileOutputRecordingDelegateClass_")
  826. {
  827. addMethod (@selector (captureOutput:didStartRecordingToOutputFileAtURL:fromConnections:), started, "v@:@@@");
  828. addMethod (@selector (captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error:), stopped, "v@:@@@@");
  829. addIvar<VideoRecorder*> ("owner");
  830. registerClass();
  831. }
  832. //==============================================================================
  833. static VideoRecorder& getOwner (id self) { return *getIvar<VideoRecorder*> (self, "owner"); }
  834. static void setOwner (id self, VideoRecorder* r) { object_setInstanceVariable (self, "owner", r); }
  835. private:
  836. static void started (id self, SEL, AVCaptureFileOutput*, NSURL*, NSArray<AVCaptureConnection*>*)
  837. {
  838. JUCE_CAMERA_LOG ("Started recording");
  839. getOwner (self).firstRecordedFrameTimeMs.set (Time::getCurrentTime().toMilliseconds());
  840. getOwner (self).recordingInProgress = true;
  841. }
  842. static void stopped (id self, SEL, AVCaptureFileOutput*, NSURL*, NSArray<AVCaptureConnection*>*, NSError* error)
  843. {
  844. String errorString;
  845. bool recordingPlayable = true;
  846. // There might have been an error in the recording, yet there may be a playable file...
  847. if ([error code] != noErr)
  848. {
  849. id value = [[error userInfo] objectForKey: AVErrorRecordingSuccessfullyFinishedKey];
  850. if (value != nil && ! [value boolValue])
  851. recordingPlayable = false;
  852. errorString = nsStringToJuce (error.localizedDescription) + ", playable: " + String ((int) recordingPlayable);
  853. }
  854. JUCE_CAMERA_LOG ("Stopped recording, error = " + errorString);
  855. getOwner (self).recordingInProgress = false;
  856. }
  857. };
  858. AVCaptureMovieFileOutput* movieFileOutput;
  859. std::unique_ptr<NSObject<AVCaptureFileOutputRecordingDelegate>, NSObjectDeleter> delegate;
  860. bool recordingInProgress = false;
  861. Atomic<int64> firstRecordedFrameTimeMs { 0 };
  862. };
  863. //==============================================================================
  864. void addOutputIfPossible (AVCaptureOutput* output)
  865. {
  866. dispatch_async (captureSessionQueue,^
  867. {
  868. if ([captureSession.get() canAddOutput: output])
  869. {
  870. [captureSession.get() beginConfiguration];
  871. [captureSession.get() addOutput: output];
  872. [captureSession.get() commitConfiguration];
  873. return;
  874. }
  875. // Can't add output to camera session!
  876. jassertfalse;
  877. });
  878. }
  879. //==============================================================================
  880. void cameraSessionStarted()
  881. {
  882. sessionStarted = true;
  883. owner.cameraSessionStarted();
  884. }
  885. void cameraSessionRuntimeError (const String& error)
  886. {
  887. owner.cameraSessionRuntimeError (error);
  888. }
  889. void callListeners (const Image& image)
  890. {
  891. owner.callListeners (image);
  892. }
  893. void notifyPictureTaken (const Image& image)
  894. {
  895. owner.notifyPictureTaken (image);
  896. }
  897. Pimpl& owner;
  898. dispatch_queue_t captureSessionQueue;
  899. std::unique_ptr<AVCaptureSession, NSObjectDeleter> captureSession;
  900. std::unique_ptr<NSObject, NSObjectDeleter> delegate;
  901. StillPictureTaker stillPictureTaker;
  902. VideoRecorder videoRecorder;
  903. AVCaptureDevice* cameraDevice = nil;
  904. AVCaptureVideoPreviewLayer* previewLayer = nil;
  905. bool sessionStarted = false;
  906. WaitableEvent sessionClosedEvent;
  907. static int numCaptureSessions;
  908. };
  909. //==============================================================================
  910. void cameraSessionStarted()
  911. {
  912. JUCE_CAMERA_LOG ("cameraSessionStarted()");
  913. cameraOpenCallback (cameraId, {});
  914. }
  915. void cameraSessionRuntimeError (const String& error)
  916. {
  917. JUCE_CAMERA_LOG ("cameraSessionRuntimeError(), error = " + error);
  918. if (! notifiedOfCameraOpening)
  919. {
  920. cameraOpenCallback ({}, error);
  921. }
  922. else
  923. {
  924. if (owner.onErrorOccurred != nullptr)
  925. owner.onErrorOccurred (error);
  926. }
  927. }
  928. void callListeners (const Image& image)
  929. {
  930. const ScopedLock sl (listenerLock);
  931. listeners.call ([=] (Listener& l) { l.imageReceived (image); });
  932. if (listeners.size() == 1)
  933. triggerStillPictureCapture();
  934. }
  935. void notifyPictureTaken (const Image& image)
  936. {
  937. JUCE_CAMERA_LOG ("notifyPictureTaken()");
  938. if (pictureTakenCallback != nullptr)
  939. pictureTakenCallback (image);
  940. }
  941. //==============================================================================
  942. void triggerStillPictureCapture()
  943. {
  944. captureSession.takeStillPicture();
  945. }
  946. //==============================================================================
  947. CameraDevice& owner;
  948. String cameraId;
  949. InternalOpenCameraResultCallback cameraOpenCallback;
  950. CriticalSection listenerLock;
  951. ListenerList<Listener> listeners;
  952. std::function<void (const Image&)> pictureTakenCallback;
  953. CaptureSession captureSession;
  954. bool notifiedOfCameraOpening = false;
  955. //==============================================================================
  956. struct IOSVersion
  957. {
  958. int major;
  959. int minor;
  960. };
  961. static IOSVersion getIOSVersion()
  962. {
  963. auto processInfo = [NSProcessInfo processInfo];
  964. if (! [processInfo respondsToSelector: @selector (operatingSystemVersion)])
  965. return {7, 0}; // Below 8.0 in fact, but only care that it's below 8
  966. return { (int)[processInfo operatingSystemVersion].majorVersion,
  967. (int)[processInfo operatingSystemVersion].minorVersion };
  968. }
  969. static IOSVersion iosVersion;
  970. friend struct CameraDevice::ViewerComponent;
  971. JUCE_DECLARE_NON_COPYABLE (Pimpl)
  972. };
  973. CameraDevice::Pimpl::IOSVersion CameraDevice::Pimpl::iosVersion = CameraDevice::Pimpl::getIOSVersion();
  974. int CameraDevice::Pimpl::CaptureSession::numCaptureSessions = 0;
  975. //==============================================================================
  976. struct CameraDevice::ViewerComponent : public UIViewComponent
  977. {
  978. //==============================================================================
  979. struct JuceCameraDeviceViewerClass : public ObjCClass<UIView>
  980. {
  981. JuceCameraDeviceViewerClass() : ObjCClass<UIView> ("JuceCameraDeviceViewerClass_")
  982. {
  983. addMethod (@selector (layoutSubviews), layoutSubviews, "v@:");
  984. registerClass();
  985. }
  986. private:
  987. static void layoutSubviews (id self, SEL)
  988. {
  989. sendSuperclassMessage<void> (self, @selector (layoutSubviews));
  990. UIView* asUIView = (UIView*) self;
  991. updateOrientation (self);
  992. if (auto* previewLayer = getPreviewLayer (self))
  993. previewLayer.frame = asUIView.bounds;
  994. }
  995. static AVCaptureVideoPreviewLayer* getPreviewLayer (id self)
  996. {
  997. UIView* asUIView = (UIView*) self;
  998. if (asUIView.layer.sublayers != nil && [asUIView.layer.sublayers count] > 0)
  999. if ([asUIView.layer.sublayers[0] isKindOfClass: [AVCaptureVideoPreviewLayer class]])
  1000. return (AVCaptureVideoPreviewLayer*) asUIView.layer.sublayers[0];
  1001. return nil;
  1002. }
  1003. static void updateOrientation (id self)
  1004. {
  1005. if (auto* previewLayer = getPreviewLayer (self))
  1006. {
  1007. UIDeviceOrientation o = [UIDevice currentDevice].orientation;
  1008. if (UIDeviceOrientationIsPortrait (o) || UIDeviceOrientationIsLandscape (o))
  1009. {
  1010. if (previewLayer.connection != nil)
  1011. previewLayer.connection.videoOrientation = (AVCaptureVideoOrientation) o;
  1012. }
  1013. }
  1014. }
  1015. };
  1016. ViewerComponent (CameraDevice& device)
  1017. {
  1018. static JuceCameraDeviceViewerClass cls;
  1019. // Initial size that can be overriden later.
  1020. setSize (640, 480);
  1021. auto view = [cls.createInstance() init];
  1022. setView (view);
  1023. auto* previewLayer = device.pimpl->captureSession.createPreviewLayer();
  1024. previewLayer.frame = view.bounds;
  1025. UIInterfaceOrientation statusBarOrientation = [UIApplication sharedApplication].statusBarOrientation;
  1026. AVCaptureVideoOrientation videoOrientation = statusBarOrientation != UIInterfaceOrientationUnknown
  1027. ? (AVCaptureVideoOrientation) statusBarOrientation
  1028. : AVCaptureVideoOrientationPortrait;
  1029. previewLayer.connection.videoOrientation = videoOrientation;
  1030. [view.layer addSublayer: previewLayer];
  1031. }
  1032. };
  1033. //==============================================================================
  1034. String CameraDevice::getFileExtension()
  1035. {
  1036. return ".mov";
  1037. }
  1038. #if JUCE_DEPRECATION_IGNORED
  1039. JUCE_END_IGNORE_WARNINGS_GCC_LIKE
  1040. #endif