The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

3338 lines
141KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2017 - ROLI Ltd.
  5. JUCE is an open source library subject to commercial or open-source
  6. licensing.
  7. By using JUCE, you agree to the terms of both the JUCE 5 End-User License
  8. Agreement and JUCE 5 Privacy Policy (both updated and effective as of the
  9. 27th April 2017).
  10. End User License Agreement: www.juce.com/juce-5-licence
  11. Privacy Policy: www.juce.com/juce-5-privacy-policy
  12. Or: You may also use this code under the terms of the GPL v3 (see
  13. www.gnu.org/licenses).
  14. JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
  15. EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
  16. DISCLAIMED.
  17. ==============================================================================
  18. */
  19. #if __ANDROID_API__ >= 21
  20. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  21. STATICMETHOD (valueOf, "valueOf", "(Ljava/lang/String;)Landroid/graphics/Bitmap$CompressFormat;")
  22. DECLARE_JNI_CLASS (AndroidBitmapCompressFormat, "android/graphics/Bitmap$CompressFormat");
  23. #undef JNI_CLASS_MEMBERS
  24. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  25. METHOD (close, "close", "()V") \
  26. METHOD (createCaptureRequest, "createCaptureRequest", "(I)Landroid/hardware/camera2/CaptureRequest$Builder;") \
  27. METHOD (createCaptureSession, "createCaptureSession", "(Ljava/util/List;Landroid/hardware/camera2/CameraCaptureSession$StateCallback;Landroid/os/Handler;)V")
  28. DECLARE_JNI_CLASS (AndroidCameraDevice, "android/hardware/camera2/CameraDevice");
  29. #undef JNI_CLASS_MEMBERS
  30. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  31. METHOD (close, "close", "()V") \
  32. METHOD (getPlanes, "getPlanes", "()[Landroid/media/Image$Plane;")
  33. DECLARE_JNI_CLASS (AndroidImage, "android/media/Image");
  34. #undef JNI_CLASS_MEMBERS
  35. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  36. METHOD (getBuffer, "getBuffer", "()Ljava/nio/ByteBuffer;")
  37. DECLARE_JNI_CLASS (AndroidImagePlane, "android/media/Image$Plane");
  38. #undef JNI_CLASS_MEMBERS
  39. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  40. METHOD (acquireLatestImage, "acquireLatestImage", "()Landroid/media/Image;") \
  41. METHOD (close, "close", "()V") \
  42. METHOD (getSurface, "getSurface", "()Landroid/view/Surface;") \
  43. METHOD (setOnImageAvailableListener, "setOnImageAvailableListener", "(Landroid/media/ImageReader$OnImageAvailableListener;Landroid/os/Handler;)V") \
  44. STATICMETHOD (newInstance, "newInstance", "(IIII)Landroid/media/ImageReader;")
  45. DECLARE_JNI_CLASS (AndroidImageReader, "android/media/ImageReader");
  46. #undef JNI_CLASS_MEMBERS
  47. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  48. METHOD (constructor, "<init>", "()V") \
  49. METHOD (getSurface, "getSurface", "()Landroid/view/Surface;") \
  50. METHOD (prepare, "prepare", "()V") \
  51. METHOD (release, "release", "()V") \
  52. METHOD (setAudioEncoder, "setAudioEncoder", "(I)V") \
  53. METHOD (setAudioSource, "setAudioSource", "(I)V") \
  54. METHOD (setOnErrorListener, "setOnErrorListener", "(Landroid/media/MediaRecorder$OnErrorListener;)V") \
  55. METHOD (setOnInfoListener, "setOnInfoListener", "(Landroid/media/MediaRecorder$OnInfoListener;)V") \
  56. METHOD (setOrientationHint, "setOrientationHint", "(I)V") \
  57. METHOD (setOutputFile, "setOutputFile", "(Ljava/lang/String;)V") \
  58. METHOD (setOutputFormat, "setOutputFormat", "(I)V") \
  59. METHOD (setVideoEncoder, "setVideoEncoder", "(I)V") \
  60. METHOD (setVideoEncodingBitRate, "setVideoEncodingBitRate", "(I)V") \
  61. METHOD (setVideoFrameRate, "setVideoFrameRate", "(I)V") \
  62. METHOD (setVideoSize, "setVideoSize", "(II)V") \
  63. METHOD (setVideoSource, "setVideoSource", "(I)V") \
  64. METHOD (start, "start", "()V") \
  65. METHOD (stop, "stop", "()V")
  66. DECLARE_JNI_CLASS (AndroidMediaRecorder, "android/media/MediaRecorder");
  67. #undef JNI_CLASS_MEMBERS
  68. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  69. METHOD (constructor, "<init>", "(Landroid/content/Context;)V") \
  70. METHOD (getSurfaceTexture, "getSurfaceTexture", "()Landroid/graphics/SurfaceTexture;") \
  71. METHOD (isAvailable, "isAvailable", "()Z") \
  72. METHOD (setSurfaceTextureListener, "setSurfaceTextureListener", "(Landroid/view/TextureView$SurfaceTextureListener;)V") \
  73. METHOD (setTransform, "setTransform", "(Landroid/graphics/Matrix;)V")
  74. DECLARE_JNI_CLASS (AndroidTextureView, "android/view/TextureView");
  75. #undef JNI_CLASS_MEMBERS
  76. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  77. METHOD (constructor, "<init>", "(Landroid/graphics/SurfaceTexture;)V")
  78. DECLARE_JNI_CLASS (AndroidSurface, "android/view/Surface");
  79. #undef JNI_CLASS_MEMBERS
  80. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  81. METHOD (setDefaultBufferSize, "setDefaultBufferSize", "(II)V")
  82. DECLARE_JNI_CLASS (AndroidSurfaceTexture, "android/graphics/SurfaceTexture");
  83. #undef JNI_CLASS_MEMBERS
  84. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  85. METHOD (getOutputSizesForClass, "getOutputSizes", "(Ljava/lang/Class;)[Landroid/util/Size;") \
  86. METHOD (getOutputSizesForFormat, "getOutputSizes", "(I)[Landroid/util/Size;") \
  87. METHOD (isOutputSupportedFor, "isOutputSupportedFor", "(I)Z") \
  88. METHOD (isOutputSupportedForSurface, "isOutputSupportedFor", "(Landroid/view/Surface;)Z")
  89. DECLARE_JNI_CLASS (AndroidStreamConfigurationMap, "android/hardware/camera2/params/StreamConfigurationMap");
  90. #undef JNI_CLASS_MEMBERS
  91. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  92. METHOD (constructor, "<init>", "()V") \
  93. METHOD (toByteArray, "toByteArray", "()[B") \
  94. METHOD (size, "size", "()I")
  95. DECLARE_JNI_CLASS (ByteArrayOutputStream, "java/io/ByteArrayOutputStream");
  96. #undef JNI_CLASS_MEMBERS
  97. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  98. METHOD (abortCaptures, "abortCaptures", "()V") \
  99. METHOD (capture, "capture", "(Landroid/hardware/camera2/CaptureRequest;Landroid/hardware/camera2/CameraCaptureSession$CaptureCallback;Landroid/os/Handler;)I") \
  100. METHOD (close, "close", "()V") \
  101. METHOD (setRepeatingRequest, "setRepeatingRequest", "(Landroid/hardware/camera2/CaptureRequest;Landroid/hardware/camera2/CameraCaptureSession$CaptureCallback;Landroid/os/Handler;)I") \
  102. METHOD (stopRepeating, "stopRepeating", "()V")
  103. DECLARE_JNI_CLASS (CameraCaptureSession, "android/hardware/camera2/CameraCaptureSession")
  104. #undef JNI_CLASS_MEMBERS
  105. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  106. METHOD (constructor, "<init>", "(L" JUCE_ANDROID_ACTIVITY_CLASSPATH ";JZ)V")
  107. DECLARE_JNI_CLASS (CameraCaptureSessionCaptureCallback, JUCE_ANDROID_ACTIVITY_CLASSPATH "$CameraCaptureSessionCaptureCallback");
  108. #undef JNI_CLASS_MEMBERS
  109. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  110. METHOD (constructor, "<init>", "(L" JUCE_ANDROID_ACTIVITY_CLASSPATH ";J)V")
  111. DECLARE_JNI_CLASS (CameraCaptureSessionStateCallback, JUCE_ANDROID_ACTIVITY_CLASSPATH "$CameraCaptureSessionStateCallback");
  112. #undef JNI_CLASS_MEMBERS
  113. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  114. METHOD (get, "get", "(Landroid/hardware/camera2/CameraCharacteristics$Key;)Ljava/lang/Object;") \
  115. METHOD (getKeys, "getKeys", "()Ljava/util/List;") \
  116. STATICFIELD (CONTROL_AF_AVAILABLE_MODES, "CONTROL_AF_AVAILABLE_MODES", "Landroid/hardware/camera2/CameraCharacteristics$Key;") \
  117. STATICFIELD (LENS_FACING, "LENS_FACING", "Landroid/hardware/camera2/CameraCharacteristics$Key;") \
  118. STATICFIELD (SCALER_STREAM_CONFIGURATION_MAP, "SCALER_STREAM_CONFIGURATION_MAP", "Landroid/hardware/camera2/CameraCharacteristics$Key;") \
  119. STATICFIELD (SENSOR_ORIENTATION, "SENSOR_ORIENTATION", "Landroid/hardware/camera2/CameraCharacteristics$Key;")
  120. DECLARE_JNI_CLASS (CameraCharacteristics, "android/hardware/camera2/CameraCharacteristics");
  121. #undef JNI_CLASS_MEMBERS
  122. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  123. METHOD (getName, "getName", "()Ljava/lang/String;")
  124. DECLARE_JNI_CLASS (CameraCharacteristicsKey, "android/hardware/camera2/CameraCharacteristics$Key");
  125. #undef JNI_CLASS_MEMBERS
  126. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  127. METHOD (constructor, "<init>", "(L" JUCE_ANDROID_ACTIVITY_CLASSPATH ";J)V")
  128. DECLARE_JNI_CLASS (CameraDeviceStateCallback, JUCE_ANDROID_ACTIVITY_CLASSPATH "$CameraDeviceStateCallback");
  129. #undef JNI_CLASS_MEMBERS
  130. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  131. METHOD (getCameraCharacteristics, "getCameraCharacteristics", "(Ljava/lang/String;)Landroid/hardware/camera2/CameraCharacteristics;") \
  132. METHOD (getCameraIdList, "getCameraIdList", "()[Ljava/lang/String;") \
  133. METHOD (openCamera, "openCamera", "(Ljava/lang/String;Landroid/hardware/camera2/CameraDevice$StateCallback;Landroid/os/Handler;)V")
  134. DECLARE_JNI_CLASS (CameraManager, "android/hardware/camera2/CameraManager");
  135. #undef JNI_CLASS_MEMBERS
  136. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  137. STATICFIELD (CONTROL_AE_PRECAPTURE_TRIGGER, "CONTROL_AE_PRECAPTURE_TRIGGER", "Landroid/hardware/camera2/CaptureRequest$Key;") \
  138. STATICFIELD (CONTROL_AF_MODE, "CONTROL_AF_MODE", "Landroid/hardware/camera2/CaptureRequest$Key;") \
  139. STATICFIELD (CONTROL_AF_TRIGGER, "CONTROL_AF_TRIGGER", "Landroid/hardware/camera2/CaptureRequest$Key;") \
  140. STATICFIELD (CONTROL_MODE, "CONTROL_MODE", "Landroid/hardware/camera2/CaptureRequest$Key;")
  141. DECLARE_JNI_CLASS (CaptureRequest, "android/hardware/camera2/CaptureRequest");
  142. #undef JNI_CLASS_MEMBERS
  143. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  144. METHOD (addTarget, "addTarget", "(Landroid/view/Surface;)V") \
  145. METHOD (build, "build", "()Landroid/hardware/camera2/CaptureRequest;") \
  146. METHOD (set, "set", "(Landroid/hardware/camera2/CaptureRequest$Key;Ljava/lang/Object;)V")
  147. DECLARE_JNI_CLASS (CaptureRequestBuilder, "android/hardware/camera2/CaptureRequest$Builder");
  148. #undef JNI_CLASS_MEMBERS
  149. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  150. METHOD (get, "get", "(Landroid/hardware/camera2/CaptureResult$Key;)Ljava/lang/Object;") \
  151. STATICFIELD (CONTROL_AE_STATE, "CONTROL_AE_STATE", "Landroid/hardware/camera2/CaptureResult$Key;") \
  152. STATICFIELD (CONTROL_AF_STATE, "CONTROL_AF_STATE", "Landroid/hardware/camera2/CaptureResult$Key;")
  153. DECLARE_JNI_CLASS (CaptureResult, "android/hardware/camera2/CaptureResult");
  154. #undef JNI_CLASS_MEMBERS
  155. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  156. METHOD (canDetectOrientation, "canDetectOrientation", "()Z") \
  157. METHOD (constructor, "<init>", "(L" JUCE_ANDROID_ACTIVITY_CLASSPATH ";JLandroid/content/Context;I)V") \
  158. METHOD (disable, "disable", "()V") \
  159. METHOD (enable, "enable", "()V")
  160. DECLARE_JNI_CLASS (OrientationEventListener, JUCE_ANDROID_ACTIVITY_CLASSPATH "$JuceOrientationEventListener");
  161. #undef JNI_CLASS_MEMBERS
  162. #endif
  163. //==============================================================================
  164. class AndroidRunnable : public juce::AndroidInterfaceImplementer
  165. {
  166. public:
  167. struct Owner
  168. {
  169. virtual ~Owner() {}
  170. virtual void run() = 0;
  171. };
  172. AndroidRunnable (Owner& ownerToUse)
  173. : owner (ownerToUse)
  174. {}
  175. private:
  176. Owner& owner;
  177. jobject invoke (jobject proxy, jobject method, jobjectArray args) override
  178. {
  179. auto* env = getEnv();
  180. auto methodName = juce::juceString ((jstring) env->CallObjectMethod (method, JavaMethod.getName));
  181. if (methodName == "run")
  182. {
  183. owner.run();
  184. return nullptr;
  185. }
  186. // invoke base class
  187. return AndroidInterfaceImplementer::invoke (proxy, method, args);
  188. }
  189. };
  190. //==============================================================================
  191. class TextureViewSurfaceTextureListener : public AndroidInterfaceImplementer
  192. {
  193. public:
  194. struct Owner
  195. {
  196. virtual ~Owner() {}
  197. virtual void onSurfaceTextureAvailable (LocalRef<jobject>& surface, int width, int height) = 0;
  198. virtual bool onSurfaceTextureDestroyed (LocalRef<jobject>& surface) = 0;
  199. virtual void onSurfaceTextureSizeChanged (LocalRef<jobject>& surface, int width, int height) = 0;
  200. virtual void onSurfaceTextureUpdated (LocalRef<jobject>& surface) = 0;
  201. };
  202. TextureViewSurfaceTextureListener (Owner& ownerToUse)
  203. : owner (ownerToUse)
  204. {}
  205. jobject invoke (jobject proxy, jobject method, jobjectArray args) override
  206. {
  207. auto* env = getEnv();
  208. auto methodName = juceString ((jstring) env->CallObjectMethod (method, JavaMethod.getName));
  209. int numArgs = args != nullptr ? env->GetArrayLength (args) : 0;
  210. if (methodName == "onSurfaceTextureAvailable" && numArgs == 3)
  211. {
  212. auto surface = LocalRef<jobject> (env->GetObjectArrayElement (args, 0));
  213. auto width = LocalRef<jobject> (env->GetObjectArrayElement (args, 1));
  214. auto height = LocalRef<jobject> (env->GetObjectArrayElement (args, 2));
  215. auto widthInt = env->CallIntMethod (width, JavaInteger.intValue);
  216. auto heightInt = env->CallIntMethod (height, JavaInteger.intValue);
  217. owner.onSurfaceTextureAvailable (surface, widthInt, heightInt);
  218. return nullptr;
  219. }
  220. else if (methodName == "onSurfaceTextureDestroyed" && numArgs == 1)
  221. {
  222. auto surface = LocalRef<jobject> (env->GetObjectArrayElement (args, 0));
  223. auto result = owner.onSurfaceTextureDestroyed (surface);
  224. return env->CallStaticObjectMethod (JavaBoolean, JavaBoolean.valueOf, result);
  225. }
  226. else if (methodName == "onSurfaceTextureSizeChanged" && numArgs == 3)
  227. {
  228. auto surface = LocalRef<jobject> (env->GetObjectArrayElement (args, 0));
  229. auto width = LocalRef<jobject> (env->GetObjectArrayElement (args, 1));
  230. auto height = LocalRef<jobject> (env->GetObjectArrayElement (args, 2));
  231. auto widthInt = env->CallIntMethod (width, JavaInteger.intValue);
  232. auto heightInt = env->CallIntMethod (height, JavaInteger.intValue);
  233. owner.onSurfaceTextureSizeChanged (surface, widthInt, heightInt);
  234. return nullptr;
  235. }
  236. else if (methodName == "onSurfaceTextureUpdated" && numArgs == 1)
  237. {
  238. auto surface = LocalRef<jobject> (env->GetObjectArrayElement (args, 0));
  239. owner.onSurfaceTextureUpdated (surface);
  240. return nullptr;
  241. }
  242. return AndroidInterfaceImplementer::invoke (proxy, method, args);
  243. }
  244. private:
  245. Owner& owner;
  246. };
  247. //==============================================================================
  248. class ImageReaderOnImageAvailableListener : public AndroidInterfaceImplementer
  249. {
  250. public:
  251. struct Owner
  252. {
  253. virtual ~Owner() {}
  254. virtual void onImageAvailable (LocalRef<jobject>& imageReader) = 0;
  255. };
  256. ImageReaderOnImageAvailableListener (Owner& ownerToUse)
  257. : owner (ownerToUse)
  258. {}
  259. jobject invoke (jobject proxy, jobject method, jobjectArray args) override
  260. {
  261. auto* env = getEnv();
  262. auto methodName = juceString ((jstring) env->CallObjectMethod (method, JavaMethod.getName));
  263. int numArgs = args != nullptr ? env->GetArrayLength (args) : 0;
  264. if (methodName == "onImageAvailable" && numArgs == 1)
  265. {
  266. auto imageReader = LocalRef<jobject> (env->GetObjectArrayElement (args, 0));
  267. owner.onImageAvailable (imageReader);
  268. return nullptr;
  269. }
  270. return AndroidInterfaceImplementer::invoke (proxy, method, args);
  271. }
  272. private:
  273. Owner& owner;
  274. };
  275. //==============================================================================
  276. class MediaRecorderOnInfoListener : public AndroidInterfaceImplementer
  277. {
  278. public:
  279. struct Owner
  280. {
  281. virtual ~Owner() {}
  282. virtual void onInfo (LocalRef<jobject>& mediaRecorder, int what, int extra) = 0;
  283. };
  284. MediaRecorderOnInfoListener (Owner& ownerToUse)
  285. : owner (ownerToUse)
  286. {}
  287. jobject invoke (jobject proxy, jobject method, jobjectArray args) override
  288. {
  289. auto* env = getEnv();
  290. auto methodName = juceString ((jstring) env->CallObjectMethod (method, JavaMethod.getName));
  291. int numArgs = args != nullptr ? env->GetArrayLength (args) : 0;
  292. if (methodName == "onInfo" && numArgs == 3)
  293. {
  294. auto mediaRecorder = LocalRef<jobject> (env->GetObjectArrayElement (args, 0));
  295. auto what = LocalRef<jobject> (env->GetObjectArrayElement (args, 1));
  296. auto extra = LocalRef<jobject> (env->GetObjectArrayElement (args, 2));
  297. auto whatInt = (int) env->CallIntMethod (what, JavaInteger.intValue);
  298. auto extraInt = (int) env->CallIntMethod (extra, JavaInteger.intValue);
  299. owner.onInfo (mediaRecorder, whatInt, extraInt);
  300. return nullptr;
  301. }
  302. return AndroidInterfaceImplementer::invoke (proxy, method, args);
  303. }
  304. private:
  305. Owner& owner;
  306. };
  307. //==============================================================================
  308. class MediaRecorderOnErrorListener : public AndroidInterfaceImplementer
  309. {
  310. public:
  311. struct Owner
  312. {
  313. virtual ~Owner() {}
  314. virtual void onError (LocalRef<jobject>& mediaRecorder, int what, int extra) = 0;
  315. };
  316. MediaRecorderOnErrorListener (Owner& ownerToUse)
  317. : owner (ownerToUse)
  318. {}
  319. jobject invoke (jobject proxy, jobject method, jobjectArray args) override
  320. {
  321. auto* env = getEnv();
  322. auto methodName = juceString ((jstring) env->CallObjectMethod (method, JavaMethod.getName));
  323. int numArgs = args != nullptr ? env->GetArrayLength (args) : 0;
  324. if (methodName == "onError" && numArgs == 3)
  325. {
  326. auto mediaRecorder = LocalRef<jobject> (env->GetObjectArrayElement (args, 0));
  327. auto what = LocalRef<jobject> (env->GetObjectArrayElement (args, 1));
  328. auto extra = LocalRef<jobject> (env->GetObjectArrayElement (args, 2));
  329. auto whatInt = (int) env->CallIntMethod (what, JavaInteger.intValue);
  330. auto extraInt = (int) env->CallIntMethod (extra, JavaInteger.intValue);
  331. owner.onError (mediaRecorder, whatInt, extraInt);
  332. return nullptr;
  333. }
  334. return AndroidInterfaceImplementer::invoke (proxy, method, args);
  335. }
  336. private:
  337. Owner& owner;
  338. };
  339. //==============================================================================
  340. struct CameraDevice::Pimpl
  341. #if __ANDROID_API__ >= 21
  342. : private AppPausedResumedListener::Owner
  343. #endif
  344. {
  345. using InternalOpenCameraResultCallback = std::function<void (const String& /*cameraId*/, const String& /*error*/)>;
  346. Pimpl (CameraDevice& ownerToUse, const String& cameraIdToUse, int /*index*/,
  347. int minWidthToUse, int minHeightToUse, int maxWidthToUse, int maxHeightToUse,
  348. bool /*useHighQuality*/)
  349. #if __ANDROID_API__ >= 21
  350. : owner (ownerToUse),
  351. minWidth (minWidthToUse),
  352. minHeight (minHeightToUse),
  353. maxWidth (maxWidthToUse),
  354. maxHeight (maxHeightToUse),
  355. cameraId (cameraIdToUse),
  356. appPausedResumedListener (*this),
  357. appPausedResumedListenerNative (CreateJavaInterface (&appPausedResumedListener,
  358. JUCE_ANDROID_ACTIVITY_CLASSPATH "$AppPausedResumedListener").get()),
  359. cameraManager (initialiseCameraManager()),
  360. cameraCharacteristics (initialiseCameraCharacteristics (cameraManager, cameraId)),
  361. streamConfigurationMap (cameraCharacteristics),
  362. previewDisplay (streamConfigurationMap.getPreviewBufferSize()),
  363. deviceOrientationChangeListener (previewDisplay)
  364. #endif
  365. {
  366. #if __ANDROID_API__ >= 21
  367. startBackgroundThread();
  368. #endif
  369. }
  370. ~Pimpl()
  371. {
  372. #if __ANDROID_API__ >= 21
  373. getEnv()->CallVoidMethod (android.activity, JuceAppActivity.removeAppPausedResumedListener,
  374. appPausedResumedListenerNative.get(), reinterpret_cast<jlong>(this));
  375. #endif
  376. }
  377. #if __ANDROID_API__ < 21
  378. // Dummy implementations for unsupported API levels.
  379. void open (InternalOpenCameraResultCallback) {}
  380. void takeStillPicture (std::function<void (const Image&)>) {}
  381. void startRecordingToFile (const File&, int) {}
  382. void stopRecording() {}
  383. void addListener (CameraDevice::Listener*) {}
  384. void removeListener (CameraDevice::Listener*) {}
  385. String getCameraId() const noexcept { return {}; }
  386. bool openedOk() const noexcept { return false; }
  387. Time getTimeOfFirstRecordedFrame() const { return {}; }
  388. static StringArray getAvailableDevices()
  389. {
  390. // Camera on Android requires API 21 or above.
  391. jassertfalse;
  392. return {};
  393. }
  394. #else
  395. JUCE_DECLARE_WEAK_REFERENCEABLE (Pimpl)
  396. String getCameraId() const noexcept { return cameraId; }
  397. void open (InternalOpenCameraResultCallback cameraOpenCallbackToUse)
  398. {
  399. cameraOpenCallback = static_cast<InternalOpenCameraResultCallback&&> (cameraOpenCallbackToUse);
  400. // A valid camera open callback must be passed.
  401. jassert (cameraOpenCallback != nullptr);
  402. // The same camera can be opened only once!
  403. jassert (scopedCameraDevice == nullptr);
  404. if (cameraOpenCallback == nullptr || scopedCameraDevice != nullptr)
  405. return;
  406. WeakReference<Pimpl> safeThis (this);
  407. RuntimePermissions::request (RuntimePermissions::camera, [safeThis] (bool granted) mutable
  408. {
  409. if (safeThis != nullptr)
  410. safeThis->continueOpenRequest (granted);
  411. });
  412. }
  413. void continueOpenRequest (bool granted)
  414. {
  415. if (granted)
  416. {
  417. getEnv()->CallVoidMethod (android.activity, JuceAppActivity.addAppPausedResumedListener,
  418. appPausedResumedListenerNative.get(), reinterpret_cast<jlong> (this));
  419. scopedCameraDevice.reset (new ScopedCameraDevice (*this, cameraId, cameraManager, handler, getAutoFocusModeToUse()));
  420. }
  421. else
  422. {
  423. invokeCameraOpenCallback ("Camera permission not granted");
  424. }
  425. }
  426. bool openedOk() const noexcept { return scopedCameraDevice->openedOk(); }
  427. void takeStillPicture (std::function<void (const Image&)> pictureTakenCallbackToUse)
  428. {
  429. if (pictureTakenCallbackToUse == nullptr)
  430. {
  431. jassertfalse;
  432. return;
  433. }
  434. if (currentCaptureSessionMode->isVideoRecordSession())
  435. {
  436. // Taking still pictures while recording video is not supported on Android.
  437. jassertfalse;
  438. return;
  439. }
  440. pictureTakenCallback = static_cast<std::function<void (const Image&)>&&> (pictureTakenCallbackToUse);
  441. triggerStillPictureCapture();
  442. }
  443. void startRecordingToFile (const File& file, int /*quality*/)
  444. {
  445. if (! openedOk())
  446. {
  447. jassertfalse;
  448. return;
  449. }
  450. if (! previewDisplay.isReady())
  451. {
  452. // Did you remember to create and show a preview display?
  453. jassertfalse;
  454. return;
  455. }
  456. file.deleteFile();
  457. file.create();
  458. jassert (file.existsAsFile());
  459. // MediaRecorder can't handle videos larger than 1080p
  460. auto videoSize = chooseBestSize (minWidth, minHeight, jmin (maxWidth, 1080), maxHeight,
  461. streamConfigurationMap.getSupportedVideoRecordingOutputSizes());
  462. mediaRecorder.reset (new MediaRecorder (file.getFullPathName(), videoSize.getWidth(), videoSize.getHeight(),
  463. getCameraSensorOrientation(), getCameraLensFacing()));
  464. firstRecordedFrameTimeMs = Time::getCurrentTime();
  465. currentCaptureSessionMode.reset();
  466. startVideoRecordingMode (*mediaRecorder);
  467. }
  468. void stopRecording()
  469. {
  470. currentCaptureSessionMode.reset();
  471. mediaRecorder.reset();
  472. startPreviewMode (*imageReader);
  473. }
  474. Time getTimeOfFirstRecordedFrame() const
  475. {
  476. return firstRecordedFrameTimeMs;
  477. }
  478. static StringArray getAvailableDevices()
  479. {
  480. StringArray results;
  481. auto* env = getEnv();
  482. auto cameraManagerToUse = initialiseCameraManager();
  483. auto cameraIdArray = LocalRef<jobjectArray> ((jobjectArray) env->CallObjectMethod (cameraManagerToUse,
  484. CameraManager.getCameraIdList));
  485. results = javaStringArrayToJuce (cameraIdArray);
  486. for (auto& result : results)
  487. printDebugCameraInfo (cameraManagerToUse, result);
  488. return results;
  489. }
  490. void addListener (CameraDevice::Listener* listenerToAdd)
  491. {
  492. const ScopedLock sl (listenerLock);
  493. listeners.add (listenerToAdd);
  494. if (listeners.size() == 1)
  495. triggerStillPictureCapture();
  496. }
  497. void removeListener (CameraDevice::Listener* listenerToRemove)
  498. {
  499. const ScopedLock sl (listenerLock);
  500. listeners.remove (listenerToRemove);
  501. }
  502. private:
  503. enum
  504. {
  505. ERROR_CAMERA_IN_USE = 1,
  506. ERROR_MAX_CAMERAS_IN_USE = 2,
  507. ERROR_CAMERA_DISABLED = 3,
  508. ERROR_CAMERA_DEVICE = 4,
  509. ERROR_CAMERA_SERVICE = 5
  510. };
  511. static String cameraErrorCodeToString (int errorCode)
  512. {
  513. switch (errorCode)
  514. {
  515. case ERROR_CAMERA_IN_USE: return "Camera already in use.";
  516. case ERROR_MAX_CAMERAS_IN_USE: return "Too many opened camera devices.";
  517. case ERROR_CAMERA_DISABLED: return "Camera disabled.";
  518. case ERROR_CAMERA_DEVICE: return "Fatal error.";
  519. case ERROR_CAMERA_SERVICE: return "Fatal error. Reboot required or persistent hardware problem.";
  520. default: return "Unknown error.";
  521. }
  522. }
  523. static LocalRef<jobject> initialiseCameraManager()
  524. {
  525. return LocalRef<jobject> (getEnv()->CallObjectMethod (android.activity, JuceAppActivity.getSystemService,
  526. javaString ("camera").get()));
  527. }
  528. static LocalRef<jobject> initialiseCameraCharacteristics (const GlobalRef& cameraManager, const String& cameraId)
  529. {
  530. return LocalRef<jobject> (getEnv()->CallObjectMethod (cameraManager,
  531. CameraManager.getCameraCharacteristics,
  532. javaString (cameraId).get()));
  533. }
  534. static void printDebugCameraInfo (const LocalRef<jobject>& cameraManagerToUse, const String& cameraId)
  535. {
  536. auto* env = getEnv();
  537. auto characteristics = LocalRef<jobject> (env->CallObjectMethod (cameraManagerToUse,
  538. CameraManager.getCameraCharacteristics,
  539. javaString (cameraId).get()));
  540. auto keysList = LocalRef<jobject> (env->CallObjectMethod (characteristics, CameraCharacteristics.getKeys));
  541. const int size = env->CallIntMethod (keysList, JavaList.size);
  542. JUCE_CAMERA_LOG ("Camera id: " + cameraId + ", characteristics keys num: " + String (size));
  543. for (int i = 0; i < size; ++i)
  544. {
  545. auto key = LocalRef<jobject> (env->CallObjectMethod (keysList, JavaList.get, i));
  546. auto jKeyName = LocalRef<jstring> ((jstring) env->CallObjectMethod (key, CameraCharacteristicsKey.getName));
  547. auto keyName = juceString (jKeyName);
  548. auto keyValue = LocalRef<jobject> (env->CallObjectMethod (characteristics, CameraCharacteristics.get, key.get()));
  549. auto jKeyValueString = LocalRef<jstring> ((jstring) env->CallObjectMethod (keyValue, JavaObject.toString));
  550. auto keyValueString = juceString (jKeyValueString);
  551. auto &kvs = keyValueString;
  552. if (kvs.startsWith ("[I") || kvs.startsWith ("[F") || kvs.startsWith ("[Z") || kvs.startsWith ("[B"))
  553. {
  554. printPrimitiveArrayElements (keyValue, keyName, keyValueString);
  555. }
  556. else if (kvs.startsWith ("[Landroid.util.Range"))
  557. {
  558. printRangeArrayElements (keyValue, keyName);
  559. }
  560. else
  561. {
  562. int chunkSize = 256;
  563. if (keyValueString.length() > chunkSize)
  564. {
  565. JUCE_CAMERA_LOG ("Key: " + keyName);
  566. for (int i = 0, j = 1; i < keyValueString.length(); i += chunkSize, ++j)
  567. JUCE_CAMERA_LOG ("value part " + String (j) + ": " + keyValueString.substring (i, i + chunkSize));
  568. }
  569. else
  570. {
  571. JUCE_CAMERA_LOG ("Key: " + keyName + ", value: " + keyValueString);
  572. }
  573. }
  574. ignoreUnused (keyName);
  575. }
  576. }
  577. static void printPrimitiveArrayElements (const LocalRef<jobject>& keyValue, const String& keyName,
  578. const String& keyValueString)
  579. {
  580. ignoreUnused (keyName);
  581. String result = "[";
  582. auto* env = getEnv();
  583. #define PRINT_ELEMENTS(elem_type, array_type, fun_name_middle) \
  584. { \
  585. elem_type* elements = env->Get##fun_name_middle##ArrayElements ((array_type) keyValue.get(), 0); \
  586. int size = env->GetArrayLength ((array_type) keyValue.get()); \
  587. \
  588. for (int i = 0; i < size - 1; ++i) \
  589. result << String (elements[i]) << " "; \
  590. \
  591. if (size > 0) \
  592. result << String (elements[size - 1]); \
  593. \
  594. env->Release##fun_name_middle##ArrayElements ((array_type) keyValue.get(), elements, 0); \
  595. }
  596. if (keyValueString.startsWith ("[I"))
  597. PRINT_ELEMENTS (jint, jintArray, Int)
  598. else if (keyValueString.startsWith ("[F"))
  599. PRINT_ELEMENTS (float, jfloatArray, Float)
  600. else if (keyValueString.startsWith ("[Z"))
  601. PRINT_ELEMENTS (jboolean, jbooleanArray, Boolean)
  602. else if (keyValueString.startsWith ("[B"))
  603. PRINT_ELEMENTS (jbyte, jbyteArray, Byte);
  604. #undef PRINT_ELEMENTS
  605. result << "]";
  606. JUCE_CAMERA_LOG ("Key: " + keyName + ", value: " + result);
  607. }
  608. static void printRangeArrayElements (const LocalRef<jobject>& rangeArray, const String& keyName)
  609. {
  610. auto* env = getEnv();
  611. jobjectArray ranges = static_cast<jobjectArray> (rangeArray.get());
  612. int numRanges = env->GetArrayLength (ranges);
  613. String result;
  614. for (int i = 0; i < numRanges; ++i)
  615. {
  616. auto range = LocalRef<jobject> (env->GetObjectArrayElement (ranges, i));
  617. auto jRangeString = LocalRef<jstring> ((jstring) env->CallObjectMethod (range, AndroidRange.toString));
  618. result << juceString (jRangeString) << " ";
  619. }
  620. JUCE_CAMERA_LOG ("Key: " + keyName + ", value: " + result);
  621. }
  622. //==============================================================================
  623. class StreamConfigurationMap
  624. {
  625. public:
  626. StreamConfigurationMap (const GlobalRef& cameraCharacteristics)
  627. : scalerStreamConfigurationMap (getStreamConfigurationMap (cameraCharacteristics)),
  628. supportedPreviewOutputSizes (retrieveOutputSizes (scalerStreamConfigurationMap,
  629. getClassForName ("android.graphics.SurfaceTexture"),
  630. -1)),
  631. supportedStillImageOutputSizes (retrieveOutputSizes (scalerStreamConfigurationMap,
  632. LocalRef<jobject>(),
  633. jpegImageFormat)),
  634. supportedVideoRecordingOutputSizes (retrieveOutputSizes (scalerStreamConfigurationMap,
  635. getClassForName ("android.media.MediaRecorder"),
  636. -1)),
  637. defaultPreviewSize (getSmallestSize (supportedPreviewOutputSizes)),
  638. previewBufferSize (getLargestSize (supportedPreviewOutputSizes))
  639. {
  640. printSizesLog (supportedPreviewOutputSizes, "SurfaceTexture");
  641. printSizesLog (supportedStillImageOutputSizes, "JPEG");
  642. printSizesLog (supportedVideoRecordingOutputSizes, "MediaRecorder");
  643. }
  644. Array<Rectangle<int>> getSupportedPreviewOutputSizes() const noexcept { return supportedPreviewOutputSizes; }
  645. Array<Rectangle<int>> getSupportedStillImageOutputSizes() const noexcept { return supportedStillImageOutputSizes; }
  646. Array<Rectangle<int>> getSupportedVideoRecordingOutputSizes() const noexcept { return supportedVideoRecordingOutputSizes; }
  647. Rectangle<int> getDefaultPreviewSize() const noexcept { return defaultPreviewSize; }
  648. Rectangle<int> getPreviewBufferSize() const noexcept { return previewBufferSize; }
  649. bool isOutputSupportedForSurface (const LocalRef<jobject>& surface) const
  650. {
  651. return getEnv()->CallBooleanMethod (scalerStreamConfigurationMap, AndroidStreamConfigurationMap.isOutputSupportedForSurface, surface.get()) != 0;
  652. }
  653. static constexpr int jpegImageFormat = 256;
  654. private:
  655. GlobalRef scalerStreamConfigurationMap;
  656. Array<Rectangle<int>> supportedPreviewOutputSizes;
  657. Array<Rectangle<int>> supportedStillImageOutputSizes;
  658. Array<Rectangle<int>> supportedVideoRecordingOutputSizes;
  659. Rectangle<int> defaultPreviewSize, previewBufferSize;
  660. GlobalRef getStreamConfigurationMap (const GlobalRef& cameraCharacteristics)
  661. {
  662. auto* env = getEnv();
  663. auto scalerStreamConfigurationMapKey = LocalRef<jobject> (env->GetStaticObjectField (CameraCharacteristics,
  664. CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP));
  665. return GlobalRef (LocalRef<jobject> (env->CallObjectMethod (cameraCharacteristics,
  666. CameraCharacteristics.get,
  667. scalerStreamConfigurationMapKey.get())));
  668. }
  669. static Array<Rectangle<int>> retrieveOutputSizes (GlobalRef& scalerStreamConfigurationMap,
  670. const LocalRef<jobject>& outputClass,
  671. int format)
  672. {
  673. Array<Rectangle<int>> result;
  674. auto* env = getEnv();
  675. auto outputSizes = outputClass.get() != nullptr
  676. ? LocalRef<jobjectArray> ((jobjectArray) env->CallObjectMethod (scalerStreamConfigurationMap,
  677. AndroidStreamConfigurationMap.getOutputSizesForClass,
  678. outputClass.get()))
  679. : LocalRef<jobjectArray> ((jobjectArray) env->CallObjectMethod (scalerStreamConfigurationMap,
  680. AndroidStreamConfigurationMap.getOutputSizesForFormat,
  681. (jint) format));
  682. if (format != -1)
  683. {
  684. auto supported = (env->CallBooleanMethod (scalerStreamConfigurationMap, AndroidStreamConfigurationMap.isOutputSupportedFor, (jint) format) != 0);
  685. if (! supported)
  686. {
  687. // The output format is not supported by this device, still image capture will not work!
  688. jassertfalse;
  689. return {};
  690. }
  691. }
  692. int numSizes = env->GetArrayLength (outputSizes);
  693. jassert (numSizes > 0);
  694. for (int i = 0; i < numSizes; ++i)
  695. {
  696. auto size = LocalRef<jobject> (env->GetObjectArrayElement (outputSizes, i));
  697. auto width = env->CallIntMethod (size, AndroidSize.getWidth);
  698. auto height = env->CallIntMethod (size, AndroidSize.getHeight);
  699. result.add (Rectangle<int> (0, 0, width, height));
  700. }
  701. return result;
  702. }
  703. static LocalRef<jobject> getClassForName (const String& name)
  704. {
  705. return LocalRef<jobject> (getEnv()->CallStaticObjectMethod (JavaClass, JavaClass.forName,
  706. javaString (name).get()));
  707. }
  708. static void printSizesLog (const Array<Rectangle<int>>& sizes, const String& className)
  709. {
  710. ignoreUnused (sizes, className);
  711. JUCE_CAMERA_LOG ("Sizes for class " + className);
  712. #if JUCE_CAMERA_LOG_ENABLED
  713. for (auto& s : sizes)
  714. JUCE_CAMERA_LOG (s.toString() + "\n");
  715. #endif
  716. }
  717. Rectangle<int> getSmallestSize (const Array<Rectangle<int>>& sizes) const
  718. {
  719. if (sizes.size() == 0)
  720. return {};
  721. auto smallestSize = sizes[0];
  722. for (auto& size : sizes)
  723. {
  724. if (size.getWidth() < smallestSize.getWidth() && size.getHeight() < smallestSize.getHeight())
  725. smallestSize = size;
  726. }
  727. return smallestSize;
  728. }
  729. Rectangle<int> getLargestSize (const Array<Rectangle<int>>& sizes) const
  730. {
  731. if (sizes.size() == 0)
  732. return {};
  733. auto largestSize = sizes[0];
  734. for (auto& size : sizes)
  735. {
  736. if (size.getWidth() > largestSize.getWidth() && size.getHeight() > largestSize.getHeight())
  737. largestSize = size;
  738. }
  739. return largestSize;
  740. }
  741. };
  742. //==============================================================================
  743. class PreviewDisplay : private TextureViewSurfaceTextureListener::Owner
  744. {
  745. public:
  746. struct Listener
  747. {
  748. virtual ~Listener() {}
  749. virtual void previewDisplayReady() = 0;
  750. virtual void previewDisplayAboutToBeDestroyed() = 0;
  751. };
  752. PreviewDisplay (Rectangle<int> bufferSize)
  753. : textureViewSurfaceTextureListener (*this),
  754. textureView (getEnv()->NewObject (AndroidTextureView, AndroidTextureView.constructor,
  755. android.activity.get())),
  756. bufferWidth (bufferSize.getWidth()),
  757. bufferHeight (bufferSize.getHeight())
  758. {
  759. auto* env = getEnv();
  760. if (! isReady())
  761. env->CallVoidMethod (textureView, AndroidTextureView.setSurfaceTextureListener,
  762. CreateJavaInterface (&textureViewSurfaceTextureListener,
  763. "android/view/TextureView$SurfaceTextureListener").get());
  764. }
  765. ~PreviewDisplay()
  766. {
  767. getEnv()->CallVoidMethod (textureView, AndroidTextureView.setSurfaceTextureListener, nullptr);
  768. }
  769. void addListener (Listener* l)
  770. {
  771. if (l == nullptr)
  772. {
  773. jassertfalse;
  774. return;
  775. }
  776. listeners.add (l);
  777. if (isReady())
  778. l->previewDisplayReady();
  779. }
  780. void removeListener (Listener* l)
  781. {
  782. if (l == nullptr)
  783. {
  784. jassertfalse;
  785. return;
  786. }
  787. listeners.remove (l);
  788. }
  789. bool isReady() const
  790. {
  791. return (getEnv()->CallBooleanMethod (textureView, AndroidTextureView.isAvailable) != 0)
  792. && width > 0 && height > 0;
  793. }
  794. LocalRef<jobject> createSurface()
  795. {
  796. // Surface may get destroyed while session is being configured, if
  797. // the preview gets hidden in the meantime, so bailout.
  798. if (! isReady())
  799. return LocalRef<jobject> (nullptr);
  800. auto* env = getEnv();
  801. auto surfaceTexture = LocalRef<jobject> (env->CallObjectMethod (textureView,
  802. AndroidTextureView.getSurfaceTexture));
  803. // NB: too small buffer will result in pixelated preview. A buffer with wrong aspect ratio
  804. // can result in a cropped preview.
  805. env->CallVoidMethod (surfaceTexture, AndroidSurfaceTexture.setDefaultBufferSize, (jint) bufferWidth, (jint) bufferHeight);
  806. auto surface = LocalRef<jobject> (env->NewObject (AndroidSurface, AndroidSurface.constructor, surfaceTexture.get()));
  807. return surface;
  808. }
  809. const GlobalRef& getNativeView() { return textureView; }
  810. void updateSurfaceTransform()
  811. {
  812. auto* env = getEnv();
  813. auto windowManager = LocalRef<jobject> (env->CallObjectMethod (android.activity, JuceAppActivity.getWindowManager));
  814. auto display = LocalRef<jobject> (env->CallObjectMethod (windowManager, AndroidWindowManager.getDefaultDisplay));
  815. auto rotation = env->CallIntMethod (display, AndroidDisplay.getRotation);
  816. static constexpr int rotation90 = 1;
  817. static constexpr int rotation270 = 3;
  818. auto matrix = LocalRef<jobject> (env->NewObject (AndroidMatrix, AndroidMatrix.constructor));
  819. if (rotation == rotation90 || rotation == rotation270)
  820. {
  821. env->CallBooleanMethod (matrix, AndroidMatrix.postScale, jfloat (height / (float) width), jfloat (width / (float) height), (jfloat) 0, (jfloat) 0);
  822. env->CallBooleanMethod (matrix, AndroidMatrix.postRotate, (jfloat) 90 * (rotation - 2), (jfloat) 0, (jfloat) 0);
  823. env->CallBooleanMethod (matrix, AndroidMatrix.postTranslate, (jfloat) (rotation == 3 ? width : 0), (jfloat) (rotation == 1 ? height : 0));
  824. }
  825. env->CallVoidMethod (textureView, AndroidTextureView.setTransform, matrix.get());
  826. }
  827. private:
  828. ListenerList<Listener> listeners;
  829. TextureViewSurfaceTextureListener textureViewSurfaceTextureListener;
  830. GlobalRef textureView;
  831. int width = -1, height = -1;
  832. int bufferWidth, bufferHeight;
  833. void onSurfaceTextureAvailable (LocalRef<jobject>& /*surface*/, int widthToUse, int heightToUse) override
  834. {
  835. JUCE_CAMERA_LOG ("onSurfaceTextureAvailable()");
  836. width = widthToUse;
  837. height = heightToUse;
  838. updateSurfaceTransform();
  839. listeners.call (&Listener::previewDisplayReady);
  840. }
  841. bool onSurfaceTextureDestroyed (LocalRef<jobject>& /*surface*/) override
  842. {
  843. JUCE_CAMERA_LOG ("onSurfaceTextureDestroyed()");
  844. listeners.call (&Listener::previewDisplayAboutToBeDestroyed);
  845. return true;
  846. }
  847. void onSurfaceTextureSizeChanged (LocalRef<jobject>& /*surface*/, int widthToUse, int heightToUse) override
  848. {
  849. JUCE_CAMERA_LOG ("onSurfaceTextureSizeChanged()");
  850. width = widthToUse;
  851. height = heightToUse;
  852. updateSurfaceTransform();
  853. }
  854. void onSurfaceTextureUpdated (LocalRef<jobject>& /*surface*/) override
  855. {
  856. JUCE_CAMERA_LOG ("onSurfaceTextureUpdated()");
  857. }
  858. JUCE_DECLARE_NON_COPYABLE (PreviewDisplay)
  859. };
  860. //==============================================================================
  861. class ImageReader : private ImageReaderOnImageAvailableListener::Owner
  862. {
  863. public:
  864. ImageReader (Pimpl& ownerToUse, GlobalRef& handlerToUse,
  865. int imageWidth, int imageHeight, int cameraSensorOrientationToUse)
  866. : owner (ownerToUse),
  867. cameraSensorOrientation (cameraSensorOrientationToUse),
  868. imageReader (getEnv()->CallStaticObjectMethod (AndroidImageReader, AndroidImageReader.newInstance,
  869. imageWidth, imageHeight, StreamConfigurationMap::jpegImageFormat,
  870. numImagesToKeep)),
  871. onImageAvailableListener (*this)
  872. {
  873. getEnv()->CallVoidMethod (imageReader, AndroidImageReader.setOnImageAvailableListener,
  874. CreateJavaInterface (&onImageAvailableListener,
  875. "android/media/ImageReader$OnImageAvailableListener").get(),
  876. handlerToUse.get());
  877. }
  878. ~ImageReader()
  879. {
  880. getEnv()->CallVoidMethod (imageReader, AndroidImageReader.close);
  881. }
  882. LocalRef<jobject> getSurface() const
  883. {
  884. return LocalRef<jobject> (getEnv()->CallObjectMethod (imageReader, AndroidImageReader.getSurface));
  885. }
  886. void resetNotificationFlag()
  887. {
  888. hasNotifiedListeners.set (0);
  889. }
  890. private:
  891. Pimpl& owner;
  892. int cameraSensorOrientation;
  893. GlobalRef imageReader;
  894. ImageReaderOnImageAvailableListener onImageAvailableListener;
  895. static constexpr int numImagesToKeep = 2;
  896. Atomic<int> hasNotifiedListeners { 0 };
  897. JUCE_DECLARE_WEAK_REFERENCEABLE (ImageReader)
  898. void onImageAvailable (LocalRef<jobject>& /*imageReader*/) override
  899. {
  900. JUCE_CAMERA_LOG ("onImageAvailable()");
  901. auto* env = getEnv();
  902. auto jImage = LocalRef<jobject> (env->CallObjectMethod (imageReader, AndroidImageReader.acquireLatestImage));
  903. if (jImage.get() == nullptr)
  904. return;
  905. auto cameraLensFrontFacing = owner.getCameraLensFacing() == 0;
  906. // NB: could use sensor orientation here to get real-world orientation, but then the resulting
  907. // image could not match the UI orientation.
  908. auto image = androidImageToJuceWithFixedOrientation (jImage, owner.deviceOrientationChangeListener.getDeviceOrientation(),
  909. Desktop::getInstance().getCurrentOrientation(),
  910. cameraLensFrontFacing,
  911. cameraSensorOrientation);
  912. env->CallVoidMethod (jImage, AndroidImage.close);
  913. WeakReference<ImageReader> safeThis (this);
  914. owner.callListeners (image);
  915. // Android may take multiple pictures before it handles a request to stop.
  916. if (hasNotifiedListeners.compareAndSetBool (1, 0))
  917. MessageManager::callAsync ([safeThis, image]() mutable { if (safeThis != nullptr) safeThis->owner.notifyPictureTaken (image); });
  918. }
  919. struct ImageBuffer
  920. {
  921. LocalRef<jbyteArray> byteArray;
  922. int size;
  923. };
  924. static Image androidImageToJuceWithFixedOrientation (const LocalRef<jobject>& androidImage,
  925. Desktop::DisplayOrientation deviceOrientationFromAccelerometerSensor,
  926. Desktop::DisplayOrientation targetOrientation,
  927. bool cameraLensFrontFacing,
  928. int cameraSensorOrientation)
  929. {
  930. auto* env = getEnv();
  931. auto planes = LocalRef<jobjectArray> ((jobjectArray) env->CallObjectMethod (androidImage, AndroidImage.getPlanes));
  932. jassert (env->GetArrayLength (planes) > 0);
  933. auto plane = LocalRef<jobject> (env->GetObjectArrayElement (planes, 0));
  934. auto byteBuffer = LocalRef<jobject> (env->CallObjectMethod (plane, AndroidImagePlane.getBuffer));
  935. ImageBuffer correctedBuffer = getImageBufferWithCorrectedOrientationFrom (byteBuffer, deviceOrientationFromAccelerometerSensor,
  936. targetOrientation, cameraLensFrontFacing, cameraSensorOrientation);
  937. jbyte* rawBytes = env->GetByteArrayElements (correctedBuffer.byteArray, nullptr);
  938. Image result = ImageFileFormat::loadFrom (rawBytes, (size_t) correctedBuffer.size);
  939. env->ReleaseByteArrayElements (correctedBuffer.byteArray, rawBytes, 0);
  940. return result;
  941. }
  942. static ImageBuffer getImageBufferWithCorrectedOrientationFrom (const LocalRef<jobject>& imagePlaneBuffer,
  943. Desktop::DisplayOrientation deviceOrientationFromAccelerometerSensor,
  944. Desktop::DisplayOrientation targetOrientation,
  945. bool cameraLensFrontFacing,
  946. int cameraSensorOrientation)
  947. {
  948. auto* env = getEnv();
  949. auto bufferSize = env->CallIntMethod (imagePlaneBuffer, JavaByteBuffer.remaining);
  950. auto byteArray = LocalRef<jbyteArray> (env->NewByteArray (bufferSize));
  951. env->CallObjectMethod (imagePlaneBuffer, JavaByteBuffer.get, byteArray.get());
  952. auto orientationsEnabled = Desktop::getInstance().getOrientationsEnabled() & ~Desktop::upsideDown;
  953. auto rotationAngle = getRotationAngle (deviceOrientationFromAccelerometerSensor, targetOrientation,
  954. cameraLensFrontFacing, cameraSensorOrientation);
  955. if (rotationAngle == 0)
  956. {
  957. // Nothing to do, just get the bytes
  958. return { byteArray, bufferSize };
  959. }
  960. auto origBitmap = LocalRef<jobject> (env->CallStaticObjectMethod (AndroidBitmapFactory,
  961. AndroidBitmapFactory.decodeByteArray,
  962. byteArray.get(), (jint) 0, (jint) bufferSize));
  963. auto correctedBitmap = getBitmapWithCorrectOrientationFrom (origBitmap, rotationAngle);
  964. auto byteArrayOutputStream = LocalRef<jobject> (env->NewObject (ByteArrayOutputStream,
  965. ByteArrayOutputStream.constructor));
  966. auto jCompressFormatString = javaString ("JPEG");
  967. auto compressFormat = LocalRef<jobject> (env->CallStaticObjectMethod (AndroidBitmapCompressFormat,
  968. AndroidBitmapCompressFormat.valueOf,
  969. jCompressFormatString.get()));
  970. if (env->CallBooleanMethod (correctedBitmap, AndroidBitmap.compress, compressFormat.get(),
  971. (jint) 100, byteArrayOutputStream.get()) != 0)
  972. {
  973. auto correctedByteArray = LocalRef<jbyteArray> ((jbyteArray) env->CallObjectMethod (byteArrayOutputStream,
  974. ByteArrayOutputStream.toByteArray));
  975. int correctedByteArraySize = env->CallIntMethod (byteArrayOutputStream, ByteArrayOutputStream.size);
  976. return { correctedByteArray, correctedByteArraySize };
  977. }
  978. jassertfalse;
  979. // fallback, return original bitmap
  980. return { byteArray, bufferSize };
  981. }
  982. static int getRotationAngle (Desktop::DisplayOrientation deviceOrientationFromAccelerometerSensor,
  983. Desktop::DisplayOrientation targetOrientation,
  984. bool cameraLensFrontFacing,
  985. int cameraSensorOrientation)
  986. {
  987. auto orientationsEnabled = Desktop::getInstance().getOrientationsEnabled() & ~Desktop::upsideDown;
  988. auto isSensorOrientationHorizontal = deviceOrientationFromAccelerometerSensor == Desktop::rotatedAntiClockwise
  989. || deviceOrientationFromAccelerometerSensor == Desktop::rotatedClockwise;
  990. if (cameraLensFrontFacing && isSensorOrientationHorizontal)
  991. {
  992. // flip angles for front camera
  993. return getRotationAngle (deviceOrientationFromAccelerometerSensor, targetOrientation, false, (cameraSensorOrientation + 180) % 360);
  994. }
  995. switch (targetOrientation)
  996. {
  997. case Desktop::rotatedAntiClockwise:
  998. return cameraSensorOrientation == 90 ? 0 : 180;
  999. case Desktop::rotatedClockwise:
  1000. return cameraSensorOrientation == 90 ? 180 : 0;
  1001. case Desktop::upright:
  1002. case Desktop::upsideDown:
  1003. if ((targetOrientation == Desktop::upright && ! cameraLensFrontFacing)
  1004. || (targetOrientation == Desktop::upsideDown && cameraLensFrontFacing))
  1005. {
  1006. return cameraSensorOrientation;
  1007. }
  1008. else
  1009. {
  1010. if (deviceOrientationFromAccelerometerSensor == Desktop::upright || deviceOrientationFromAccelerometerSensor == Desktop::upsideDown)
  1011. return cameraSensorOrientation;
  1012. else
  1013. return (cameraSensorOrientation + 180) % 360;
  1014. }
  1015. break;
  1016. default:
  1017. return 0;
  1018. }
  1019. }
  1020. static LocalRef<jobject> getBitmapWithCorrectOrientationFrom (LocalRef<jobject>& origBitmap, int rotationAngle)
  1021. {
  1022. auto* env = getEnv();
  1023. auto origBitmapWidth = env->CallIntMethod (origBitmap, AndroidBitmap.getWidth);
  1024. auto origBitmapHeight = env->CallIntMethod (origBitmap, AndroidBitmap.getHeight);
  1025. auto orientationsEnabled = Desktop::getInstance().getOrientationsEnabled() & ~Desktop::upsideDown;
  1026. auto matrix = LocalRef<jobject> (env->NewObject (AndroidMatrix, AndroidMatrix.constructor));
  1027. env->CallBooleanMethod (matrix, AndroidMatrix.postRotate, (jfloat) rotationAngle, (jfloat) 0, (jfloat) 0);
  1028. auto rotatedBitmap = LocalRef<jobject> (env->CallStaticObjectMethod (AndroidBitmap, AndroidBitmap.createBitmapFrom,
  1029. origBitmap.get(), (jint) 0, (jint) 0,
  1030. (jint) origBitmapWidth, (jint) origBitmapHeight,
  1031. matrix.get(), true));
  1032. env->CallVoidMethod (origBitmap, AndroidBitmap.recycle);
  1033. return rotatedBitmap;
  1034. }
  1035. };
  1036. //==============================================================================
  1037. class MediaRecorder : private MediaRecorderOnInfoListener::Owner,
  1038. private MediaRecorderOnErrorListener::Owner
  1039. {
  1040. public:
  1041. MediaRecorder (const String& outputFilePath, int videoWidth, int videoHeight,
  1042. int sensorOrientation, int cameraLensFacing)
  1043. : onInfoListener (*this),
  1044. onErrorListener (*this),
  1045. mediaRecorder (LocalRef<jobject> (getEnv()->NewObject (AndroidMediaRecorder,
  1046. AndroidMediaRecorder.constructor)))
  1047. {
  1048. auto* env = getEnv();
  1049. env->CallVoidMethod (mediaRecorder, AndroidMediaRecorder.setOnInfoListener,
  1050. CreateJavaInterface (&onInfoListener,
  1051. "android/media/MediaRecorder$OnInfoListener").get());
  1052. env->CallVoidMethod (mediaRecorder, AndroidMediaRecorder.setOnErrorListener,
  1053. CreateJavaInterface (&onErrorListener,
  1054. "android/media/MediaRecorder$OnErrorListener").get());
  1055. // NB: the order of function calls here is enforced, and exceptions will be thrown if
  1056. // the order is changed.
  1057. static constexpr int audioSourceMic = 1;
  1058. env->CallVoidMethod (mediaRecorder, AndroidMediaRecorder.setAudioSource, (jint) audioSourceMic);
  1059. static constexpr int videoSourceSurface = 2;
  1060. env->CallVoidMethod (mediaRecorder, AndroidMediaRecorder.setVideoSource, (jint) videoSourceSurface);
  1061. static constexpr int outputFormatMPEG4 = 2;
  1062. env->CallVoidMethod (mediaRecorder, AndroidMediaRecorder.setOutputFormat, (jint) outputFormatMPEG4);
  1063. static constexpr int audioEncoderAAC = 3;
  1064. env->CallVoidMethod (mediaRecorder, AndroidMediaRecorder.setAudioEncoder, (jint) audioEncoderAAC);
  1065. static constexpr int videoEncoderH264 = 2;
  1066. env->CallVoidMethod (mediaRecorder, AndroidMediaRecorder.setVideoEncoder, (jint) videoEncoderH264);
  1067. env->CallVoidMethod (mediaRecorder, AndroidMediaRecorder.setVideoEncodingBitRate, (jint) 10000000);
  1068. env->CallVoidMethod (mediaRecorder, AndroidMediaRecorder.setVideoFrameRate, (jint) 30);
  1069. auto frontFacing = cameraLensFacing == 0;
  1070. auto useInverseDegrees = frontFacing && sensorOrientation == 90;
  1071. int orientationHint = getOrientationHint (useInverseDegrees, sensorOrientation);
  1072. env->CallVoidMethod (mediaRecorder, AndroidMediaRecorder.setOrientationHint, (jint) orientationHint);
  1073. getEnv()->CallVoidMethod (mediaRecorder, AndroidMediaRecorder.setVideoSize, (jint) videoWidth, (jint) videoHeight);
  1074. getEnv()->CallVoidMethod (mediaRecorder, AndroidMediaRecorder.setOutputFile, javaString (outputFilePath).get());
  1075. getEnv()->CallVoidMethod (mediaRecorder, AndroidMediaRecorder.prepare);
  1076. }
  1077. ~MediaRecorder()
  1078. {
  1079. getEnv()->CallVoidMethod (mediaRecorder, AndroidMediaRecorder.release);
  1080. }
  1081. LocalRef<jobject> getSurface()
  1082. {
  1083. return LocalRef<jobject> (getEnv()->CallObjectMethod (mediaRecorder, AndroidMediaRecorder.getSurface));
  1084. }
  1085. void start()
  1086. {
  1087. lockScreenOrientation();
  1088. getEnv()->CallVoidMethod (mediaRecorder, AndroidMediaRecorder.start);
  1089. hasStartedRecording = true;
  1090. }
  1091. void stop()
  1092. {
  1093. // A request to stop can be sent before recording has had a chance to start, so
  1094. // ignore the request rather than calling AndroidMediaRecorder.stop because
  1095. // otherwise MediaRecorder will throw an exception and...
  1096. if (! hasStartedRecording)
  1097. return;
  1098. hasStartedRecording = false;
  1099. auto* env = getEnv();
  1100. env->CallVoidMethod (mediaRecorder, AndroidMediaRecorder.stop);
  1101. // ... ignore RuntimeException that can be thrown if stop() was called after recording
  1102. // has started but before any frame was written to a file. This is not an error.
  1103. jniCheckHasExceptionOccurredAndClear();
  1104. unlockScreenOrientation();
  1105. }
  1106. private:
  1107. MediaRecorderOnInfoListener onInfoListener;
  1108. MediaRecorderOnErrorListener onErrorListener;
  1109. GlobalRef mediaRecorder;
  1110. bool hasStartedRecording = false;
  1111. int orientationsEnabled = -1;
  1112. void lockScreenOrientation()
  1113. {
  1114. orientationsEnabled = Desktop::getInstance().getOrientationsEnabled();
  1115. auto o = Desktop::getInstance().getCurrentOrientation();
  1116. Desktop::getInstance().setOrientationsEnabled (o);
  1117. }
  1118. static jint juceOrientationToNativeOrientation (int orientations) noexcept
  1119. {
  1120. enum
  1121. {
  1122. SCREEN_ORIENTATION_LANDSCAPE = 0,
  1123. SCREEN_ORIENTATION_PORTRAIT = 1,
  1124. SCREEN_ORIENTATION_USER = 2,
  1125. SCREEN_ORIENTATION_REVERSE_LANDSCAPE = 8,
  1126. SCREEN_ORIENTATION_REVERSE_PORTRAIT = 9,
  1127. SCREEN_ORIENTATION_USER_LANDSCAPE = 11,
  1128. SCREEN_ORIENTATION_USER_PORTRAIT = 12,
  1129. };
  1130. switch (orientations)
  1131. {
  1132. case Desktop::upright: return (jint) SCREEN_ORIENTATION_PORTRAIT;
  1133. case Desktop::upsideDown: return (jint) SCREEN_ORIENTATION_REVERSE_PORTRAIT;
  1134. case Desktop::upright + Desktop::upsideDown: return (jint) SCREEN_ORIENTATION_USER_PORTRAIT;
  1135. case Desktop::rotatedAntiClockwise: return (jint) SCREEN_ORIENTATION_LANDSCAPE;
  1136. case Desktop::rotatedClockwise: return (jint) SCREEN_ORIENTATION_REVERSE_LANDSCAPE;
  1137. case Desktop::rotatedClockwise + Desktop::rotatedAntiClockwise: return (jint) SCREEN_ORIENTATION_USER_LANDSCAPE;
  1138. default: return (jint) SCREEN_ORIENTATION_USER;
  1139. }
  1140. }
  1141. void unlockScreenOrientation()
  1142. {
  1143. Desktop::getInstance().setOrientationsEnabled (orientationsEnabled);
  1144. }
  1145. void onInfo (LocalRef<jobject>& recorder, int what, int extra) override
  1146. {
  1147. ignoreUnused (recorder, what, extra);
  1148. JUCE_CAMERA_LOG ("MediaRecorder::OnInfo: " + getInfoStringFromCode (what)
  1149. + ", extra code = " + String (extra));
  1150. }
  1151. void onError (LocalRef<jobject>& recorder, int what, int extra) override
  1152. {
  1153. ignoreUnused (recorder, what, extra);
  1154. JUCE_CAMERA_LOG ("MediaRecorder::onError: " + getErrorStringFromCode (what)
  1155. + ", extra code = " + String (extra));
  1156. }
  1157. static String getInfoStringFromCode (int what)
  1158. {
  1159. enum
  1160. {
  1161. MEDIA_RECORDER_INFO_UNKNOWN = 1,
  1162. MEDIA_RECORDER_INFO_MAX_DURATION_REACHED = 800,
  1163. MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED = 801,
  1164. MEDIA_RECORDER_INFO_MAX_FILESIZE_APPROACHING = 802,
  1165. MEDIA_RECORDER_INFO_NEXT_OUTPUT_FILE_STARTED = 803
  1166. };
  1167. switch (what)
  1168. {
  1169. case MEDIA_RECORDER_INFO_UNKNOWN: return { "Unknown info" };
  1170. case MEDIA_RECORDER_INFO_MAX_DURATION_REACHED: return { "Max duration reached" };
  1171. case MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED: return { "Max filesize reached" };
  1172. case MEDIA_RECORDER_INFO_MAX_FILESIZE_APPROACHING: return { "Max filesize approaching" };
  1173. case MEDIA_RECORDER_INFO_NEXT_OUTPUT_FILE_STARTED: return { "Next output file started" };
  1174. default: return String (what);
  1175. };
  1176. }
  1177. static String getErrorStringFromCode (int what)
  1178. {
  1179. enum
  1180. {
  1181. MEDIA_RECORDER_ERROR_UNKNOWN = 1,
  1182. MEDIA_ERROR_SERVER_DIED = 100
  1183. };
  1184. switch (what)
  1185. {
  1186. case MEDIA_RECORDER_ERROR_UNKNOWN: return { "Unknown error" };
  1187. case MEDIA_ERROR_SERVER_DIED: return { "Server died" };
  1188. default: return String (what);
  1189. };
  1190. }
  1191. static int getOrientationHint (bool useInverseDegrees, int cameraSensorOrientation)
  1192. {
  1193. auto* env = getEnv();
  1194. auto windowManager = LocalRef<jobject> (env->CallObjectMethod (android.activity, JuceAppActivity.getWindowManager));
  1195. auto display = LocalRef<jobject> (env->CallObjectMethod (windowManager, AndroidWindowManager.getDefaultDisplay));
  1196. auto rotation = env->CallIntMethod (display, AndroidDisplay.getRotation);
  1197. enum
  1198. {
  1199. ROTATION_0 = 0,
  1200. ROTATION_90,
  1201. ROTATION_180,
  1202. ROTATION_270
  1203. };
  1204. int hint = 0;
  1205. switch (rotation)
  1206. {
  1207. case ROTATION_0: hint = cameraSensorOrientation; break;
  1208. case ROTATION_90: hint = useInverseDegrees ? 180 : 0; break;
  1209. case ROTATION_180: hint = cameraSensorOrientation + 180; break;
  1210. case ROTATION_270: hint = useInverseDegrees ? 0 : 180; break;
  1211. default: jassertfalse;
  1212. }
  1213. return (hint + 360) % 360;
  1214. }
  1215. };
  1216. //==============================================================================
  1217. class ScopedCameraDevice
  1218. {
  1219. public:
  1220. //==============================================================================
  1221. class CaptureSession
  1222. {
  1223. public:
  1224. struct ConfiguredCallback
  1225. {
  1226. virtual ~ConfiguredCallback() {}
  1227. virtual void captureSessionConfigured (CaptureSession*) = 0;
  1228. };
  1229. ~CaptureSession()
  1230. {
  1231. bool calledClose = false;
  1232. auto* env = getEnv();
  1233. {
  1234. const ScopedLock lock (captureSessionLock);
  1235. if (captureSession.get() != nullptr)
  1236. {
  1237. calledClose = true;
  1238. env->CallVoidMethod (captureSession, CameraCaptureSession.close);
  1239. }
  1240. }
  1241. // When exception occurs, CameraCaptureSession.close will never finish, so
  1242. // we should not wait for it. For fatal error an exception does occur, but
  1243. // it is catched internally in Java...
  1244. if (jniCheckHasExceptionOccurredAndClear() || scopedCameraDevice.fatalErrorOccurred.get())
  1245. {
  1246. JUCE_CAMERA_LOG ("Exception or fatal error occurred while closing Capture Session, closing by force");
  1247. }
  1248. else if (calledClose)
  1249. {
  1250. pendingClose.set (1);
  1251. closedEvent.wait (-1);
  1252. }
  1253. }
  1254. bool openedOk() const noexcept { return captureSession != nullptr; }
  1255. const GlobalRef& getNativeSession() const { return captureSession; }
  1256. bool start (const LocalRef<jobject>& targetSurfacesList, GlobalRef& handlerToUse)
  1257. {
  1258. if (! openedOk())
  1259. {
  1260. jassertfalse;
  1261. return false;
  1262. }
  1263. auto* env = getEnv();
  1264. auto numSurfaces = env->CallIntMethod (targetSurfacesList, JavaArrayList.size);
  1265. for (int i = 0; i < numSurfaces; ++i)
  1266. {
  1267. auto surface = LocalRef<jobject> (env->CallObjectMethod (targetSurfacesList, JavaArrayList.get, (jint) i));
  1268. env->CallVoidMethod (captureRequestBuilder, CaptureRequestBuilder.addTarget, surface.get());
  1269. }
  1270. previewCaptureRequest = GlobalRef (env->CallObjectMethod (captureRequestBuilder, CaptureRequestBuilder.build));
  1271. env->CallIntMethod (captureSession, CameraCaptureSession.setRepeatingRequest,
  1272. previewCaptureRequest.get(), nullptr, handlerToUse.get());
  1273. return true;
  1274. }
  1275. void takeStillPicture (jobject targetSurface)
  1276. {
  1277. if (stillPictureTaker == nullptr)
  1278. {
  1279. // Can only take picture once session was successfully configured!
  1280. jassertfalse;
  1281. return;
  1282. }
  1283. auto* env = getEnv();
  1284. static constexpr int templateStillCapture = 2;
  1285. auto builder = LocalRef<jobject> (env->CallObjectMethod (scopedCameraDevice.cameraDevice,
  1286. AndroidCameraDevice.createCaptureRequest,
  1287. (jint) templateStillCapture));
  1288. env->CallVoidMethod (builder, CaptureRequestBuilder.addTarget, targetSurface);
  1289. setCaptureRequestBuilderIntegerKey (builder.get(), CaptureRequest.CONTROL_AF_MODE, autoFocusMode);
  1290. auto stillPictureCaptureRequest = LocalRef<jobject> (env->CallObjectMethod (builder, CaptureRequestBuilder.build));
  1291. stillPictureTaker->takePicture (stillPictureCaptureRequest.get());
  1292. }
  1293. private:
  1294. //==============================================================================
  1295. class StillPictureTaker : private AndroidRunnable::Owner
  1296. {
  1297. public:
  1298. StillPictureTaker (GlobalRef& captureSessionToUse, GlobalRef& captureRequestBuilderToUse,
  1299. GlobalRef& previewCaptureRequestToUse, GlobalRef& handlerToUse,
  1300. int autoFocusModeToUse)
  1301. : captureSession (captureSessionToUse),
  1302. captureRequestBuilder (captureRequestBuilderToUse),
  1303. previewCaptureRequest (previewCaptureRequestToUse),
  1304. handler (handlerToUse),
  1305. runnable (*this),
  1306. captureSessionPreviewCaptureCallback (LocalRef<jobject> (getEnv()->NewObject (CameraCaptureSessionCaptureCallback,
  1307. CameraCaptureSessionCaptureCallback.constructor,
  1308. android.activity.get(),
  1309. reinterpret_cast<jlong> (this),
  1310. true))),
  1311. captureSessionStillPictureCaptureCallback (LocalRef<jobject> (getEnv()->NewObject (CameraCaptureSessionCaptureCallback,
  1312. CameraCaptureSessionCaptureCallback.constructor,
  1313. android.activity.get(),
  1314. reinterpret_cast<jlong> (this),
  1315. false))),
  1316. autoFocusMode (autoFocusModeToUse)
  1317. {
  1318. }
  1319. void takePicture (jobject stillPictureCaptureRequestToUse)
  1320. {
  1321. JUCE_CAMERA_LOG ("Taking picture...");
  1322. stillPictureCaptureRequest = GlobalRef (stillPictureCaptureRequestToUse);
  1323. lockFocus();
  1324. }
  1325. private:
  1326. GlobalRef& captureSession;
  1327. GlobalRef& captureRequestBuilder;
  1328. GlobalRef& previewCaptureRequest;
  1329. GlobalRef& handler;
  1330. AndroidRunnable runnable;
  1331. GlobalRef delayedCaptureRunnable;
  1332. GlobalRef captureSessionPreviewCaptureCallback;
  1333. GlobalRef stillPictureCaptureRequest;
  1334. GlobalRef captureSessionStillPictureCaptureCallback;
  1335. int autoFocusMode;
  1336. enum class State
  1337. {
  1338. idle = 0,
  1339. pendingFocusLock,
  1340. pendingExposurePrecapture,
  1341. pendingExposurePostPrecapture,
  1342. pictureTaken
  1343. };
  1344. State currentState = State::idle;
  1345. void lockFocus()
  1346. {
  1347. if (jniCheckHasExceptionOccurredAndClear())
  1348. return;
  1349. JUCE_CAMERA_LOG ("Performing auto-focus if possible...");
  1350. currentState = State::pendingFocusLock;
  1351. auto* env = getEnv();
  1352. // NB: auto-focus may be unavailable on a device, in which case it may have already
  1353. // automatically adjusted the exposure. We check for that in updateState().
  1354. static constexpr int controlAfTriggerStart = 1;
  1355. CaptureSession::setCaptureRequestBuilderIntegerKey (captureRequestBuilder.get(),
  1356. CaptureRequest.CONTROL_AF_TRIGGER,
  1357. controlAfTriggerStart);
  1358. auto previewRequest = LocalRef<jobject> (env->CallObjectMethod (captureRequestBuilder,
  1359. CaptureRequestBuilder.build));
  1360. env->CallIntMethod (captureSession, CameraCaptureSession.capture, previewRequest.get(),
  1361. captureSessionPreviewCaptureCallback.get(), handler.get());
  1362. }
  1363. void updateState (jobject captureResult)
  1364. {
  1365. // IllegalStateException can be thrown when accessing CaptureSession,
  1366. // claiming that capture session was already closed but we may not
  1367. // get relevant callback yet, so check for this and bailout when needed.
  1368. if (jniCheckHasExceptionOccurredAndClear())
  1369. return;
  1370. auto* env = getEnv();
  1371. switch (currentState)
  1372. {
  1373. case State::pendingFocusLock:
  1374. {
  1375. JUCE_CAMERA_LOG ("Still picture capture, updateState(), State::pendingFocusLock...");
  1376. auto controlAfStateValue = getCaptureResultIntegerKeyValue (CaptureResult.CONTROL_AF_STATE, captureResult);
  1377. if (controlAfStateValue.get() == nullptr)
  1378. {
  1379. captureStillPictureDelayed();
  1380. return;
  1381. }
  1382. auto autoToFocusNotAvailable = autoFocusMode == 0;
  1383. if (autoToFocusNotAvailable || autoFocusHasFinished (controlAfStateValue))
  1384. {
  1385. auto controlAeStateIntValue = getControlAEState (captureResult);
  1386. static constexpr int controlAeStateConverged = 2;
  1387. if (controlAeStateIntValue == -1 || controlAeStateIntValue == controlAeStateConverged)
  1388. {
  1389. currentState = State::pictureTaken;
  1390. captureStillPictureDelayed();
  1391. }
  1392. else
  1393. {
  1394. runPrecaptureSequence();
  1395. }
  1396. }
  1397. break;
  1398. }
  1399. case State::pendingExposurePrecapture:
  1400. {
  1401. JUCE_CAMERA_LOG ("Still picture capture, updateState(), State::pendingExposurePrecapture...");
  1402. auto controlAeStateIntValue = getControlAEState (captureResult);
  1403. static constexpr int controlAeStateFlashRequired = 4;
  1404. static constexpr int controlAeStatePrecapture = 5;
  1405. if (controlAeStateIntValue == -1 || controlAeStateIntValue == controlAeStateFlashRequired
  1406. || controlAeStateIntValue == controlAeStatePrecapture)
  1407. {
  1408. currentState = State::pendingExposurePostPrecapture;
  1409. }
  1410. break;
  1411. }
  1412. case State::pendingExposurePostPrecapture:
  1413. {
  1414. JUCE_CAMERA_LOG ("Still picture capture, updateState(), State::pendingExposurePostPrecapture...");
  1415. auto controlAeStateIntValue = getControlAEState (captureResult);
  1416. static constexpr int controlAeStatePrecapture = 5;
  1417. if (controlAeStateIntValue == -1 || controlAeStateIntValue != controlAeStatePrecapture)
  1418. {
  1419. currentState = State::pictureTaken;
  1420. captureStillPictureDelayed();
  1421. }
  1422. break;
  1423. }
  1424. case State::idle:
  1425. case State::pictureTaken:
  1426. { /* do nothing */ break; }
  1427. };
  1428. }
  1429. static int getControlAEState (jobject captureResult)
  1430. {
  1431. auto controlAeStateValue = getCaptureResultIntegerKeyValue (CaptureResult.CONTROL_AE_STATE, captureResult);
  1432. return controlAeStateValue.get() != nullptr
  1433. ? getEnv()->CallIntMethod (controlAeStateValue, JavaInteger.intValue) : -1;
  1434. }
  1435. static bool autoFocusHasFinished (const LocalRef<jobject>& controlAfStateValue)
  1436. {
  1437. static constexpr int controlAfStateFocusedLocked = 4;
  1438. static constexpr int controlAfStateNotFocusedLocked = 5;
  1439. auto controlAfStateIntValue = getEnv()->CallIntMethod (controlAfStateValue, JavaInteger.intValue);
  1440. return controlAfStateIntValue == controlAfStateFocusedLocked || controlAfStateIntValue == controlAfStateNotFocusedLocked;
  1441. }
  1442. static LocalRef<jobject> getCaptureResultIntegerKeyValue (jfieldID key, jobject captureResult)
  1443. {
  1444. auto* env = getEnv();
  1445. auto jKey = LocalRef<jobject> (env->GetStaticObjectField (CaptureResult, key));
  1446. return LocalRef<jobject> (env->CallObjectMethod (captureResult, CaptureResult.get, jKey.get()));
  1447. }
  1448. void captureStillPictureDelayed()
  1449. {
  1450. if (jniCheckHasExceptionOccurredAndClear())
  1451. return;
  1452. JUCE_CAMERA_LOG ("Still picture capture, device ready, capturing now...");
  1453. auto* env = getEnv();
  1454. env->CallVoidMethod (captureSession, CameraCaptureSession.stopRepeating);
  1455. if (jniCheckHasExceptionOccurredAndClear())
  1456. return;
  1457. env->CallVoidMethod (captureSession, CameraCaptureSession.abortCaptures);
  1458. if (jniCheckHasExceptionOccurredAndClear())
  1459. return;
  1460. // Delay still picture capture for devices that can't handle it right after
  1461. // stopRepeating/abortCaptures calls.
  1462. if (delayedCaptureRunnable.get() == nullptr)
  1463. delayedCaptureRunnable = GlobalRef (CreateJavaInterface (&runnable, "java/lang/Runnable").get());
  1464. env->CallBooleanMethod (handler, AndroidHandler.postDelayed, delayedCaptureRunnable.get(), (jlong) 200);
  1465. }
  1466. void runPrecaptureSequence()
  1467. {
  1468. if (jniCheckHasExceptionOccurredAndClear())
  1469. return;
  1470. auto* env = getEnv();
  1471. static constexpr int controlAePrecaptureTriggerStart = 1;
  1472. CaptureSession::setCaptureRequestBuilderIntegerKey (captureRequestBuilder.get(),
  1473. CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
  1474. controlAePrecaptureTriggerStart);
  1475. currentState = State::pendingExposurePrecapture;
  1476. auto previewRequest = LocalRef<jobject> (env->CallObjectMethod (captureRequestBuilder,
  1477. CaptureRequestBuilder.build));
  1478. env->CallIntMethod (captureSession, CameraCaptureSession.capture, previewRequest.get(),
  1479. captureSessionPreviewCaptureCallback.get(), handler.get());
  1480. }
  1481. void unlockFocus()
  1482. {
  1483. if (jniCheckHasExceptionOccurredAndClear())
  1484. return;
  1485. JUCE_CAMERA_LOG ("Unlocking focus...");
  1486. currentState = State::idle;
  1487. auto* env = getEnv();
  1488. static constexpr int controlAfTriggerCancel = 2;
  1489. CaptureSession::setCaptureRequestBuilderIntegerKey (captureRequestBuilder.get(),
  1490. CaptureRequest.CONTROL_AF_TRIGGER,
  1491. controlAfTriggerCancel);
  1492. auto resetAutoFocusRequest = LocalRef<jobject> (env->CallObjectMethod (captureRequestBuilder,
  1493. CaptureRequestBuilder.build));
  1494. env->CallIntMethod (captureSession, CameraCaptureSession.capture, resetAutoFocusRequest.get(),
  1495. nullptr, handler.get());
  1496. if (jniCheckHasExceptionOccurredAndClear())
  1497. return;
  1498. // NB: for preview, using preview capture request again
  1499. env->CallIntMethod (captureSession, CameraCaptureSession.setRepeatingRequest, previewCaptureRequest.get(),
  1500. nullptr, handler.get());
  1501. }
  1502. //==============================================================================
  1503. void run() override
  1504. {
  1505. captureStillPicture();
  1506. }
  1507. void captureStillPicture()
  1508. {
  1509. getEnv()->CallIntMethod (captureSession, CameraCaptureSession.capture,
  1510. stillPictureCaptureRequest.get(), captureSessionStillPictureCaptureCallback.get(),
  1511. nullptr);
  1512. }
  1513. //==============================================================================
  1514. void cameraCaptureSessionCaptureCompleted (bool isPreview, jobject session, jobject request, jobject result)
  1515. {
  1516. JUCE_CAMERA_LOG ("cameraCaptureSessionCaptureCompleted()");
  1517. ignoreUnused (session, request);
  1518. if (isPreview)
  1519. updateState (result);
  1520. else if (currentState != State::idle)
  1521. unlockFocus();
  1522. }
  1523. void cameraCaptureSessionCaptureFailed (bool isPreview, jobject session, jobject request, jobject failure)
  1524. {
  1525. JUCE_CAMERA_LOG ("cameraCaptureSessionCaptureFailed()");
  1526. ignoreUnused (isPreview, session, request, failure);
  1527. }
  1528. void cameraCaptureSessionCaptureProgressed (bool isPreview, jobject session, jobject request, jobject partialResult)
  1529. {
  1530. JUCE_CAMERA_LOG ("cameraCaptureSessionCaptureProgressed()");
  1531. ignoreUnused (session, request);
  1532. if (isPreview)
  1533. updateState (partialResult);
  1534. }
  1535. void cameraCaptureSessionCaptureSequenceAborted (bool isPreview, jobject session, int sequenceId)
  1536. {
  1537. JUCE_CAMERA_LOG ("cameraCaptureSessionCaptureSequenceAborted()");
  1538. ignoreUnused (isPreview, isPreview, session, sequenceId);
  1539. }
  1540. void cameraCaptureSessionCaptureSequenceCompleted (bool isPreview, jobject session, int sequenceId, int64 frameNumber)
  1541. {
  1542. JUCE_CAMERA_LOG ("cameraCaptureSessionCaptureSequenceCompleted()");
  1543. ignoreUnused (isPreview, session, sequenceId, frameNumber);
  1544. }
  1545. void cameraCaptureSessionCaptureStarted (bool isPreview, jobject session, jobject request, int64 timestamp, int64 frameNumber)
  1546. {
  1547. JUCE_CAMERA_LOG ("cameraCaptureSessionCaptureStarted()");
  1548. ignoreUnused (isPreview, session, request, timestamp, frameNumber);
  1549. }
  1550. friend void juce_cameraCaptureSessionCaptureCompleted (int64, bool, void*, void*, void*);
  1551. friend void juce_cameraCaptureSessionCaptureFailed (int64, bool, void*, void*, void*);
  1552. friend void juce_cameraCaptureSessionCaptureProgressed (int64, bool, void*, void*, void*);
  1553. friend void juce_cameraCaptureSessionCaptureSequenceAborted (int64, bool, void*, int);
  1554. friend void juce_cameraCaptureSessionCaptureSequenceCompleted (int64, bool, void*, int, int64);
  1555. friend void juce_cameraCaptureSessionCaptureStarted (int64, bool, void*, void*, int64, int64);
  1556. };
  1557. //==============================================================================
  1558. ScopedCameraDevice& scopedCameraDevice;
  1559. ConfiguredCallback& configuredCallback;
  1560. GlobalRef& handler;
  1561. GlobalRef captureRequestBuilder;
  1562. GlobalRef previewCaptureRequest;
  1563. GlobalRef captureSessionStateCallback;
  1564. int autoFocusMode;
  1565. GlobalRef captureSession;
  1566. CriticalSection captureSessionLock;
  1567. Atomic<int> pendingClose { 0 };
  1568. std::unique_ptr<StillPictureTaker> stillPictureTaker;
  1569. WaitableEvent closedEvent;
  1570. JUCE_DECLARE_WEAK_REFERENCEABLE (CaptureSession)
  1571. //==============================================================================
  1572. CaptureSession (ScopedCameraDevice& scopedCameraDeviceToUse, ConfiguredCallback& configuredCallbackToUse,
  1573. const LocalRef<jobject>& surfacesList, GlobalRef& handlerToUse,
  1574. int captureSessionTemplate, int autoFocusModeToUse)
  1575. : scopedCameraDevice (scopedCameraDeviceToUse),
  1576. configuredCallback (configuredCallbackToUse),
  1577. handler (handlerToUse),
  1578. captureRequestBuilder (LocalRef<jobject> (getEnv()->CallObjectMethod (scopedCameraDevice.cameraDevice,
  1579. AndroidCameraDevice.createCaptureRequest,
  1580. (jint) captureSessionTemplate))),
  1581. captureSessionStateCallback (LocalRef<jobject> (getEnv()->NewObject (CameraCaptureSessionStateCallback,
  1582. CameraCaptureSessionStateCallback.constructor,
  1583. android.activity.get(),
  1584. reinterpret_cast<jlong> (this)))),
  1585. autoFocusMode (autoFocusModeToUse)
  1586. {
  1587. auto* env = getEnv();
  1588. env->CallVoidMethod (scopedCameraDevice.cameraDevice, AndroidCameraDevice.createCaptureSession,
  1589. surfacesList.get(), captureSessionStateCallback.get(), handler.get());
  1590. static constexpr int controlModeAuto = 1;
  1591. setCaptureRequestBuilderIntegerKey (captureRequestBuilder.get(), CaptureRequest.CONTROL_MODE, controlModeAuto);
  1592. setCaptureRequestBuilderIntegerKey (captureRequestBuilder.get(), CaptureRequest.CONTROL_AF_MODE, autoFocusMode);
  1593. }
  1594. static void setCaptureRequestBuilderIntegerKey (jobject captureRequestBuilder, jfieldID key, int value)
  1595. {
  1596. auto* env = getEnv();
  1597. auto jKey = LocalRef<jobject> (env->GetStaticObjectField (CaptureRequest, key));
  1598. auto jValue = LocalRef<jobject> (env->CallStaticObjectMethod (JavaInteger, JavaInteger.valueOf, (jint) value));
  1599. env->CallVoidMethod (captureRequestBuilder, CaptureRequestBuilder.set, jKey.get(), jValue.get());
  1600. }
  1601. void cameraCaptureSessionActive (jobject session)
  1602. {
  1603. JUCE_CAMERA_LOG ("cameraCaptureSessionActive()");
  1604. ignoreUnused (session);
  1605. }
  1606. void cameraCaptureSessionClosed (jobject session)
  1607. {
  1608. JUCE_CAMERA_LOG ("cameraCaptureSessionClosed()");
  1609. ignoreUnused (session);
  1610. closedEvent.signal();
  1611. }
  1612. void cameraCaptureSessionConfigureFailed (jobject session)
  1613. {
  1614. JUCE_CAMERA_LOG ("cameraCaptureSessionConfigureFailed()");
  1615. ignoreUnused (session);
  1616. WeakReference<CaptureSession> weakRef (this);
  1617. MessageManager::callAsync ([this, weakRef]()
  1618. {
  1619. if (weakRef == nullptr)
  1620. return;
  1621. configuredCallback.captureSessionConfigured (nullptr);
  1622. });
  1623. }
  1624. void cameraCaptureSessionConfigured (jobject session)
  1625. {
  1626. JUCE_CAMERA_LOG ("cameraCaptureSessionConfigured()");
  1627. if (pendingClose.get() == 1)
  1628. {
  1629. // Already closing, bailout.
  1630. closedEvent.signal();
  1631. GlobalRef s (session);
  1632. MessageManager::callAsync ([s]()
  1633. {
  1634. getEnv()->CallVoidMethod (s, CameraCaptureSession.close);
  1635. });
  1636. return;
  1637. }
  1638. {
  1639. const ScopedLock lock (captureSessionLock);
  1640. captureSession = GlobalRef (session);
  1641. }
  1642. WeakReference<CaptureSession> weakRef (this);
  1643. MessageManager::callAsync ([this, weakRef]()
  1644. {
  1645. if (weakRef == nullptr)
  1646. return;
  1647. stillPictureTaker.reset (new StillPictureTaker (captureSession, captureRequestBuilder,
  1648. previewCaptureRequest, handler, autoFocusMode));
  1649. configuredCallback.captureSessionConfigured (this);
  1650. });
  1651. }
  1652. void cameraCaptureSessionReady (jobject session)
  1653. {
  1654. JUCE_CAMERA_LOG ("cameraCaptureSessionReady()");
  1655. ignoreUnused (session);
  1656. }
  1657. friend class ScopedCameraDevice;
  1658. friend void juce_cameraCaptureSessionActive (int64, void*);
  1659. friend void juce_cameraCaptureSessionClosed (int64, void*);
  1660. friend void juce_cameraCaptureSessionConfigureFailed (int64, void*);
  1661. friend void juce_cameraCaptureSessionConfigured (int64, void*);
  1662. friend void juce_cameraCaptureSessionReady (int64, void*);
  1663. friend void juce_cameraCaptureSessionCaptureCompleted (int64, bool, void*, void*, void*);
  1664. friend void juce_cameraCaptureSessionCaptureFailed (int64, bool, void*, void*, void*);
  1665. friend void juce_cameraCaptureSessionCaptureProgressed (int64, bool, void*, void*, void*);
  1666. friend void juce_cameraCaptureSessionCaptureSequenceAborted (int64, bool, void*, int);
  1667. friend void juce_cameraCaptureSessionCaptureSequenceCompleted (int64, bool, void*, int, int64);
  1668. friend void juce_cameraCaptureSessionCaptureStarted (int64, bool, void*, void*, int64, int64);
  1669. JUCE_DECLARE_NON_COPYABLE (CaptureSession)
  1670. };
  1671. //==============================================================================
  1672. ScopedCameraDevice (Pimpl& ownerToUse, const String& cameraIdToUse, GlobalRef& cameraManagerToUse,
  1673. GlobalRef& handlerToUse, int autoFocusModeToUse)
  1674. : owner (ownerToUse),
  1675. cameraId (cameraIdToUse),
  1676. cameraManager (cameraManagerToUse),
  1677. handler (handlerToUse),
  1678. cameraStateCallback (LocalRef<jobject> (getEnv()->NewObject (CameraDeviceStateCallback,
  1679. CameraDeviceStateCallback.constructor,
  1680. android.activity.get(),
  1681. reinterpret_cast<jlong> (this)))),
  1682. autoFocusMode (autoFocusModeToUse)
  1683. {
  1684. open();
  1685. }
  1686. ~ScopedCameraDevice()
  1687. {
  1688. close();
  1689. }
  1690. void open()
  1691. {
  1692. pendingOpen.set (1);
  1693. auto* env = getEnv();
  1694. env->CallVoidMethod (cameraManager, CameraManager.openCamera,
  1695. javaString (cameraId).get(),
  1696. cameraStateCallback.get(), handler.get());
  1697. // If something went wrong we will be pinged in cameraDeviceStateError()
  1698. // callback, silence the redundant exception.
  1699. jniCheckHasExceptionOccurredAndClear();
  1700. }
  1701. void close()
  1702. {
  1703. if (pendingClose.compareAndSetBool (1, 0))
  1704. {
  1705. auto* env = getEnv();
  1706. if (cameraDevice.get() != nullptr)
  1707. {
  1708. env->CallVoidMethod (cameraDevice, AndroidCameraDevice.close);
  1709. closedEvent.wait (-1);
  1710. }
  1711. pendingClose.set (0);
  1712. pendingOpen .set (0);
  1713. cameraDevice.clear();
  1714. }
  1715. }
  1716. bool openedOk() const { return cameraDevice != nullptr; }
  1717. bool hasErrorOccurred() const { return fatalErrorOccurred.get(); }
  1718. CaptureSession* createCaptureSession (CaptureSession::ConfiguredCallback& cc,
  1719. const LocalRef<jobject>& surfacesList,
  1720. GlobalRef& handlerToUse,
  1721. int captureSessionTemplate)
  1722. {
  1723. if (! openedOk())
  1724. {
  1725. jassertfalse;
  1726. return nullptr;
  1727. }
  1728. return new CaptureSession (*this, cc, surfacesList, handlerToUse, captureSessionTemplate, autoFocusMode);
  1729. }
  1730. private:
  1731. Pimpl& owner;
  1732. const String cameraId;
  1733. GlobalRef& cameraManager;
  1734. GlobalRef& handler;
  1735. GlobalRef cameraStateCallback;
  1736. int autoFocusMode;
  1737. GlobalRef cameraDevice;
  1738. Atomic<int> pendingOpen { 0 };
  1739. Atomic<int> pendingClose { 0 };
  1740. Atomic<int> fatalErrorOccurred { 0 };
  1741. String openError;
  1742. WaitableEvent closedEvent;
  1743. void cameraDeviceStateClosed()
  1744. {
  1745. JUCE_CAMERA_LOG ("cameraDeviceStateClosed()");
  1746. closedEvent.signal();
  1747. }
  1748. void cameraDeviceStateDisconnected()
  1749. {
  1750. JUCE_CAMERA_LOG ("cameraDeviceStateDisconnected()");
  1751. if (pendingOpen.compareAndSetBool (0, 1))
  1752. {
  1753. openError = "Device disconnected";
  1754. notifyOpenResult();
  1755. }
  1756. MessageManager::callAsync ([this]() { close(); });
  1757. }
  1758. void cameraDeviceStateError (int errorCode)
  1759. {
  1760. String error = cameraErrorCodeToString (errorCode);
  1761. JUCE_CAMERA_LOG ("cameraDeviceStateError(), error: " + error);
  1762. if (pendingOpen.compareAndSetBool (0, 1))
  1763. {
  1764. openError = error;
  1765. notifyOpenResult();
  1766. }
  1767. fatalErrorOccurred.set (1);
  1768. MessageManager::callAsync ([this, error]()
  1769. {
  1770. owner.cameraDeviceError (error);
  1771. close();
  1772. });
  1773. }
  1774. void cameraDeviceStateOpened (jobject cameraDeviceToUse)
  1775. {
  1776. JUCE_CAMERA_LOG ("cameraDeviceStateOpened()");
  1777. pendingOpen.set (0);
  1778. cameraDevice = GlobalRef (cameraDeviceToUse);
  1779. notifyOpenResult();
  1780. }
  1781. void notifyOpenResult()
  1782. {
  1783. MessageManager::callAsync ([this]() { owner.cameraOpenFinished (openError); });
  1784. }
  1785. friend void juce_cameraDeviceStateClosed (int64);
  1786. friend void juce_cameraDeviceStateDisconnected (int64);
  1787. friend void juce_cameraDeviceStateError (int64, int);
  1788. friend void juce_cameraDeviceStateOpened (int64, void*);
  1789. friend void juce_cameraCaptureSessionActive (int64, void*);
  1790. friend void juce_cameraCaptureSessionClosed (int64, void*);
  1791. friend void juce_cameraCaptureSessionConfigureFailed (int64, void*);
  1792. friend void juce_cameraCaptureSessionConfigured (int64, void*);
  1793. friend void juce_cameraCaptureSessionReady (int64, void*);
  1794. friend void juce_cameraCaptureSessionCaptureCompleted (int64, bool, void*, void*, void*);
  1795. friend void juce_cameraCaptureSessionCaptureFailed (int64, bool, void*, void*, void*);
  1796. friend void juce_cameraCaptureSessionCaptureProgressed (int64, bool, void*, void*, void*);
  1797. friend void juce_cameraCaptureSessionCaptureSequenceAborted (int64, bool, void*, int);
  1798. friend void juce_cameraCaptureSessionCaptureSequenceCompleted (int64, bool, void*, int, int64);
  1799. friend void juce_cameraCaptureSessionCaptureStarted (int64, bool, void*, void*, int64, int64);
  1800. };
  1801. //==============================================================================
  1802. struct CaptureSessionModeBase
  1803. {
  1804. virtual ~CaptureSessionModeBase() { }
  1805. virtual bool isVideoRecordSession() const = 0;
  1806. virtual void triggerStillPictureCapture() = 0;
  1807. };
  1808. //==============================================================================
  1809. template <typename Mode>
  1810. struct CaptureSessionMode : public CaptureSessionModeBase,
  1811. private PreviewDisplay::Listener,
  1812. private ScopedCameraDevice::CaptureSession::ConfiguredCallback
  1813. {
  1814. ~CaptureSessionMode()
  1815. {
  1816. captureSession.reset();
  1817. previewDisplay.removeListener (this);
  1818. }
  1819. bool isVideoRecordSession() const override
  1820. {
  1821. return Mode::isVideoRecord();
  1822. }
  1823. void triggerStillPictureCapture() override
  1824. {
  1825. if (captureSession == nullptr)
  1826. {
  1827. // The capture session must be ready before taking a still picture.
  1828. // Did you remember to create and show a preview display?
  1829. jassertfalse;
  1830. return;
  1831. }
  1832. crtp().takeStillPicture();
  1833. }
  1834. protected:
  1835. CaptureSessionMode (Pimpl& ownerToUse, ScopedCameraDevice& cameraDeviceToUse,
  1836. GlobalRef& handlerToUse, PreviewDisplay& pd, int cameraSensorOrientationToUse,
  1837. int cameraLensFacingToUse, StreamConfigurationMap& streamConfigurationMapToUse)
  1838. : owner (ownerToUse),
  1839. scopedCameraDevice (cameraDeviceToUse),
  1840. handler (handlerToUse),
  1841. previewDisplay (pd),
  1842. cameraSensorOrientation (cameraSensorOrientationToUse),
  1843. cameraLensFacing (cameraLensFacingToUse),
  1844. streamConfigurationMap (streamConfigurationMapToUse)
  1845. {
  1846. WeakReference<CaptureSessionMode<Mode>> weakRef (this);
  1847. if (weakRef == nullptr)
  1848. return;
  1849. // async so that the object is fully constructed before the callback gets invoked
  1850. MessageManager::callAsync ([this, weakRef]()
  1851. {
  1852. if (weakRef == nullptr)
  1853. return;
  1854. previewDisplay.addListener (this);
  1855. });
  1856. }
  1857. Mode& crtp() { return static_cast<Mode&> (*this); }
  1858. void previewDisplayReady() override
  1859. {
  1860. jassert (previewDisplay.isReady());
  1861. JUCE_CAMERA_LOG ("previewDisplayReady()");
  1862. // close previous capture session first
  1863. captureSession.reset();
  1864. if (scopedCameraDevice.hasErrorOccurred())
  1865. {
  1866. JUCE_CAMERA_LOG ("Device error detected, not recreating a new camera session. The device needs to be reopened.");
  1867. return;
  1868. }
  1869. captureSession.reset (scopedCameraDevice.createCaptureSession (*this, crtp().getCaptureSessionSurfaces(),
  1870. handler, Mode::getTemplate()));
  1871. }
  1872. void previewDisplayAboutToBeDestroyed() override
  1873. {
  1874. JUCE_CAMERA_LOG ("previewDisplayAboutToBeDestroyed()");
  1875. stopPreview();
  1876. }
  1877. void captureSessionConfigured (ScopedCameraDevice::CaptureSession* session) override
  1878. {
  1879. if (session == nullptr)
  1880. {
  1881. owner.cameraDeviceError ("Failed to configure camera session.");
  1882. return;
  1883. }
  1884. jassert (session == captureSession.get());
  1885. startSession();
  1886. }
  1887. void startSession()
  1888. {
  1889. if (! captureSession->start (crtp().getTargetSurfaces(), handler))
  1890. {
  1891. jassertfalse;
  1892. JUCE_CAMERA_LOG ("Could not start capture session");
  1893. }
  1894. crtp().sessionStarted();
  1895. }
  1896. void stopPreview()
  1897. {
  1898. if (captureSession != nullptr)
  1899. {
  1900. auto session = captureSession->getNativeSession();
  1901. auto* env = getEnv();
  1902. env->CallVoidMethod (session, CameraCaptureSession.stopRepeating);
  1903. if (jniCheckHasExceptionOccurredAndClear())
  1904. return;
  1905. env->CallVoidMethod (session, CameraCaptureSession.abortCaptures);
  1906. jniCheckHasExceptionOccurredAndClear();
  1907. }
  1908. }
  1909. Pimpl& owner;
  1910. ScopedCameraDevice& scopedCameraDevice;
  1911. GlobalRef& handler;
  1912. PreviewDisplay& previewDisplay;
  1913. int cameraSensorOrientation;
  1914. int cameraLensFacing;
  1915. StreamConfigurationMap& streamConfigurationMap;
  1916. std::unique_ptr<ScopedCameraDevice::CaptureSession> captureSession;
  1917. JUCE_DECLARE_WEAK_REFERENCEABLE (CaptureSessionMode<Mode>)
  1918. };
  1919. //==============================================================================
  1920. struct CaptureSessionPreviewMode : public CaptureSessionMode<CaptureSessionPreviewMode>
  1921. {
  1922. CaptureSessionPreviewMode (Pimpl& ownerToUse, ScopedCameraDevice& cameraDeviceToUse, GlobalRef& handlerToUse,
  1923. PreviewDisplay& pd, ImageReader& ir, int cameraSensorOrientation,
  1924. int cameraLensFacingToUse, StreamConfigurationMap& streamConfigurationMapToUse)
  1925. : CaptureSessionMode<CaptureSessionPreviewMode> (ownerToUse, cameraDeviceToUse, handlerToUse, pd,
  1926. cameraSensorOrientation, cameraLensFacingToUse, streamConfigurationMapToUse),
  1927. imageReader (ir)
  1928. {
  1929. }
  1930. // Surfaces passed to newly created capture session.
  1931. LocalRef<jobject> getCaptureSessionSurfaces() const
  1932. {
  1933. auto* env = getEnv();
  1934. auto previewSurface = LocalRef<jobject> (previewDisplay.createSurface());
  1935. auto imageSurface = LocalRef<jobject> (imageReader.getSurface());
  1936. auto arrayList = LocalRef<jobject> (env->NewObject (JavaArrayList, JavaArrayList.constructor, 2));
  1937. env->CallBooleanMethod (arrayList, JavaArrayList.add, previewSurface.get());
  1938. env->CallBooleanMethod (arrayList, JavaArrayList.add, imageSurface.get());
  1939. auto supported = streamConfigurationMap.isOutputSupportedForSurface (imageSurface);
  1940. // Output surface is not supported by this device, still image capture will not work!
  1941. jassert (supported);
  1942. return arrayList;
  1943. }
  1944. // Surfaces set as target during capture.
  1945. LocalRef<jobject> getTargetSurfaces() const
  1946. {
  1947. auto* env = getEnv();
  1948. auto previewSurface = LocalRef<jobject> (previewDisplay.createSurface());
  1949. auto arrayList = LocalRef<jobject> (env->NewObject (JavaArrayList, JavaArrayList.constructor, 1));
  1950. env->CallBooleanMethod (arrayList, JavaArrayList.add, previewSurface.get());
  1951. return arrayList;
  1952. }
  1953. static int getTemplate()
  1954. {
  1955. static constexpr int templatePreview = 1;
  1956. return templatePreview;
  1957. }
  1958. static bool isVideoRecord() { return false; }
  1959. void sessionStarted() {}
  1960. void takeStillPicture()
  1961. {
  1962. imageReader.resetNotificationFlag();
  1963. captureSession->takeStillPicture (imageReader.getSurface());
  1964. }
  1965. private:
  1966. ImageReader& imageReader;
  1967. };
  1968. //==============================================================================
  1969. struct CaptureSessionVideoRecordingMode : public CaptureSessionMode<CaptureSessionVideoRecordingMode>
  1970. {
  1971. CaptureSessionVideoRecordingMode (Pimpl& ownerToUse, ScopedCameraDevice& cameraDeviceToUse, GlobalRef& handlerToUse,
  1972. PreviewDisplay& pd, MediaRecorder& mr, int cameraSensorOrientation,
  1973. int cameraLensFacingToUse, StreamConfigurationMap& streamConfigurationMapToUse)
  1974. : CaptureSessionMode<CaptureSessionVideoRecordingMode> (ownerToUse, cameraDeviceToUse, handlerToUse, pd,
  1975. cameraSensorOrientation, cameraLensFacingToUse, streamConfigurationMapToUse),
  1976. mediaRecorder (mr)
  1977. {
  1978. }
  1979. ~CaptureSessionVideoRecordingMode()
  1980. {
  1981. // We need to explicitly stop the preview before stopping the media recorder,
  1982. // because legacy devices can't handle recording stop before stopping the preview.
  1983. stopPreview();
  1984. mediaRecorder.stop();
  1985. }
  1986. // Surfaces passed to newly created capture session.
  1987. LocalRef<jobject> getCaptureSessionSurfaces() const
  1988. {
  1989. auto* env = getEnv();
  1990. auto previewSurface = LocalRef<jobject> (previewDisplay.createSurface());
  1991. auto mediaRecorderSurface = LocalRef<jobject> (mediaRecorder.getSurface());
  1992. auto arrayList = LocalRef<jobject> (env->NewObject (JavaArrayList, JavaArrayList.constructor, 2));
  1993. env->CallBooleanMethod (arrayList, JavaArrayList.add, previewSurface.get());
  1994. env->CallBooleanMethod (arrayList, JavaArrayList.add, mediaRecorderSurface.get());
  1995. return arrayList;
  1996. }
  1997. // Surfaces set as target during capture.
  1998. LocalRef<jobject> getTargetSurfaces() const
  1999. {
  2000. // Same surfaces used.
  2001. return getCaptureSessionSurfaces();
  2002. }
  2003. static int getTemplate()
  2004. {
  2005. static constexpr int templateRecord = 3;
  2006. return templateRecord;
  2007. }
  2008. static bool isVideoRecord() { return true; }
  2009. void sessionStarted()
  2010. {
  2011. MessageManager::callAsync ([this]() { mediaRecorder.start(); });
  2012. }
  2013. void takeStillPicture()
  2014. {
  2015. // Taking still pictures while recording video is not supported on Android.
  2016. jassertfalse;
  2017. }
  2018. private:
  2019. MediaRecorder& mediaRecorder;
  2020. };
  2021. //==============================================================================
  2022. class DeviceOrientationChangeListener : private Timer
  2023. {
  2024. public:
  2025. DeviceOrientationChangeListener (PreviewDisplay& pd)
  2026. : previewDisplay (pd),
  2027. orientationEventListener (getEnv()->NewObject (OrientationEventListener,
  2028. OrientationEventListener.constructor,
  2029. android.activity.get(),
  2030. reinterpret_cast<jlong> (this),
  2031. android.activity.get(),
  2032. sensorDelayUI)),
  2033. canDetectChange (getEnv()->CallBooleanMethod (orientationEventListener,
  2034. OrientationEventListener.canDetectOrientation) != 0),
  2035. deviceOrientation (Desktop::getInstance().getCurrentOrientation()),
  2036. lastKnownScreenOrientation (deviceOrientation)
  2037. {
  2038. setEnabled (true);
  2039. }
  2040. ~DeviceOrientationChangeListener()
  2041. {
  2042. setEnabled (false);
  2043. }
  2044. void setEnabled (bool shouldBeEnabled)
  2045. {
  2046. if (shouldBeEnabled && ! canDetectChange)
  2047. {
  2048. // This device does not support orientation listening, photos may have wrong orientation!
  2049. jassertfalse;
  2050. return;
  2051. }
  2052. if (shouldBeEnabled)
  2053. getEnv()->CallVoidMethod (orientationEventListener, OrientationEventListener.enable);
  2054. else
  2055. getEnv()->CallVoidMethod (orientationEventListener, OrientationEventListener.disable);
  2056. }
  2057. bool isSupported() const noexcept { return canDetectChange; }
  2058. Desktop::DisplayOrientation getDeviceOrientation() const noexcept
  2059. {
  2060. return deviceOrientation;
  2061. }
  2062. private:
  2063. PreviewDisplay& previewDisplay;
  2064. GlobalRef orientationEventListener;
  2065. static constexpr jint sensorDelayUI = 2;
  2066. bool canDetectChange;
  2067. Desktop::DisplayOrientation deviceOrientation;
  2068. Desktop::DisplayOrientation lastKnownScreenOrientation;
  2069. int numChecksForOrientationChange = 10;
  2070. void orientationChanged (int orientation)
  2071. {
  2072. jassert (orientation < 360);
  2073. // -1 == unknown
  2074. if (orientation < 0)
  2075. return;
  2076. auto oldOrientation = deviceOrientation;
  2077. // NB: this assumes natural position to be portrait always, but some devices may be landscape...
  2078. if (orientation > (360 - 45) || orientation < 45)
  2079. deviceOrientation = Desktop::upright;
  2080. else if (orientation < 135)
  2081. deviceOrientation = Desktop::rotatedClockwise;
  2082. else if (orientation < 225)
  2083. deviceOrientation = Desktop::upsideDown;
  2084. else
  2085. deviceOrientation = Desktop::rotatedAntiClockwise;
  2086. if (oldOrientation != deviceOrientation)
  2087. {
  2088. lastKnownScreenOrientation = Desktop::getInstance().getCurrentOrientation();
  2089. // Need to update preview transform, but screen orientation will change slightly
  2090. // later than sensor orientation.
  2091. startTimer (500);
  2092. }
  2093. }
  2094. void timerCallback() override
  2095. {
  2096. auto currentOrientation = Desktop::getInstance().getCurrentOrientation();
  2097. if (lastKnownScreenOrientation != currentOrientation)
  2098. {
  2099. lastKnownScreenOrientation = currentOrientation;
  2100. stopTimer();
  2101. numChecksForOrientationChange = 10;
  2102. previewDisplay.updateSurfaceTransform();
  2103. return;
  2104. }
  2105. if (--numChecksForOrientationChange == 0)
  2106. {
  2107. stopTimer();
  2108. numChecksForOrientationChange = 10;
  2109. }
  2110. }
  2111. friend void juce_deviceOrientationChanged (int64, int);
  2112. };
  2113. //==============================================================================
  2114. CameraDevice& owner;
  2115. int minWidth, minHeight, maxWidth, maxHeight;
  2116. String cameraId;
  2117. InternalOpenCameraResultCallback cameraOpenCallback;
  2118. #if __ANDROID_API__ >= 21
  2119. AppPausedResumedListener appPausedResumedListener;
  2120. GlobalRef appPausedResumedListenerNative;
  2121. GlobalRef cameraManager;
  2122. GlobalRef cameraCharacteristics;
  2123. GlobalRef handlerThread;
  2124. GlobalRef handler;
  2125. StreamConfigurationMap streamConfigurationMap;
  2126. PreviewDisplay previewDisplay;
  2127. DeviceOrientationChangeListener deviceOrientationChangeListener;
  2128. std::unique_ptr<ImageReader> imageReader;
  2129. std::unique_ptr<MediaRecorder> mediaRecorder;
  2130. std::unique_ptr<CaptureSessionModeBase> currentCaptureSessionMode;
  2131. std::unique_ptr<ScopedCameraDevice> scopedCameraDevice;
  2132. CriticalSection listenerLock;
  2133. ListenerList<Listener> listeners;
  2134. std::function<void (const Image&)> pictureTakenCallback;
  2135. Time firstRecordedFrameTimeMs;
  2136. bool notifiedOfCameraOpening = false;
  2137. #endif
  2138. bool appWasPaused = false;
  2139. //==============================================================================
  2140. int getCameraSensorOrientation() const
  2141. {
  2142. return getCameraCharacteristicsIntegerKeyValue (CameraCharacteristics.SENSOR_ORIENTATION);
  2143. }
  2144. int getAutoFocusModeToUse() const
  2145. {
  2146. auto supportedModes = getSupportedAutoFocusModes();
  2147. enum
  2148. {
  2149. CONTROL_AF_MODE_OFF = 0,
  2150. CONTROL_AF_MODE_AUTO = 1,
  2151. CONTROL_AF_MODE_CONTINUOUS_PICTURE = 4
  2152. };
  2153. if (supportedModes.contains (CONTROL_AF_MODE_CONTINUOUS_PICTURE))
  2154. return CONTROL_AF_MODE_CONTINUOUS_PICTURE;
  2155. if (supportedModes.contains (CONTROL_AF_MODE_AUTO))
  2156. return CONTROL_AF_MODE_AUTO;
  2157. return CONTROL_AF_MODE_OFF;
  2158. }
  2159. Array<int> getSupportedAutoFocusModes() const
  2160. {
  2161. auto* env = getEnv();
  2162. auto jKey = LocalRef<jobject> (env->GetStaticObjectField (CameraCharacteristics, CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES));
  2163. auto supportedModes = LocalRef<jintArray> ((jintArray) env->CallObjectMethod (cameraCharacteristics,
  2164. CameraCharacteristics.get,
  2165. jKey.get()));
  2166. return jintArrayToJuceArray (supportedModes);
  2167. }
  2168. static Array<int> jintArrayToJuceArray (const LocalRef<jintArray>& jArray)
  2169. {
  2170. auto* env = getEnv();
  2171. auto* jArrayElems = env->GetIntArrayElements (jArray, 0);
  2172. auto numElems = env->GetArrayLength (jArray);
  2173. Array<int> juceArray;
  2174. for (int s = 0; s < numElems; ++s)
  2175. juceArray.add (jArrayElems[s]);
  2176. env->ReleaseIntArrayElements (jArray, jArrayElems, 0);
  2177. return juceArray;
  2178. }
  2179. int getCameraCharacteristicsIntegerKeyValue (jfieldID key) const
  2180. {
  2181. auto* env = getEnv();
  2182. auto jKey = LocalRef<jobject> (env->GetStaticObjectField (CameraCharacteristics, key));
  2183. auto jValue = LocalRef<jobject> (env->CallObjectMethod (cameraCharacteristics,
  2184. CameraCharacteristics.get,
  2185. jKey.get()));
  2186. return env->CallIntMethod (jValue, JavaInteger.intValue);
  2187. }
  2188. int getCameraLensFacing() const
  2189. {
  2190. return getCameraCharacteristicsIntegerKeyValue (CameraCharacteristics.LENS_FACING);
  2191. }
  2192. //==============================================================================
  2193. void cameraOpenFinished (const String& error)
  2194. {
  2195. JUCE_CAMERA_LOG ("cameraOpenFinished(), error = " + error);
  2196. if (error.isEmpty())
  2197. {
  2198. setupStillImageSize();
  2199. startPreviewMode (*imageReader);
  2200. }
  2201. // Do not notify about camera being reopened on app resume.
  2202. if (! notifiedOfCameraOpening)
  2203. {
  2204. notifiedOfCameraOpening = true;
  2205. invokeCameraOpenCallback (error);
  2206. }
  2207. }
  2208. void cameraDeviceError (const String& error)
  2209. {
  2210. if (owner.onErrorOccurred != nullptr)
  2211. owner.onErrorOccurred (error);
  2212. }
  2213. void invokeCameraOpenCallback (const String& error)
  2214. {
  2215. JUCE_CAMERA_LOG ("invokeCameraOpenCallback(), error = " + error);
  2216. if (cameraOpenCallback != nullptr)
  2217. cameraOpenCallback (cameraId, error);
  2218. }
  2219. //==============================================================================
  2220. void callListeners (const Image& image)
  2221. {
  2222. const ScopedLock sl (listenerLock);
  2223. listeners.call ([=] (Listener& l) { l.imageReceived (image); });
  2224. }
  2225. void notifyPictureTaken (const Image& image)
  2226. {
  2227. JUCE_CAMERA_LOG ("notifyPictureTaken()");
  2228. if (pictureTakenCallback != nullptr)
  2229. pictureTakenCallback (image);
  2230. }
  2231. void triggerStillPictureCapture()
  2232. {
  2233. currentCaptureSessionMode->triggerStillPictureCapture();
  2234. }
  2235. //==============================================================================
  2236. void setupStillImageSize()
  2237. {
  2238. imageReader.reset();
  2239. auto imageSize = chooseBestSize (minWidth, minHeight, maxWidth, maxHeight,
  2240. streamConfigurationMap.getSupportedStillImageOutputSizes());
  2241. imageReader.reset (new ImageReader (*this, handler, imageSize.getWidth(), imageSize.getHeight(),
  2242. getCameraSensorOrientation()));
  2243. }
  2244. static Rectangle<int> chooseBestSize (int minWidth, int minHeight, int maxWidth, int maxHeight,
  2245. Array<Rectangle<int>> supportedSizes)
  2246. {
  2247. Rectangle<int> result;
  2248. for (auto& size : supportedSizes)
  2249. {
  2250. auto width = size.getWidth();
  2251. auto height = size.getHeight();
  2252. if (width < minWidth || width > maxWidth || height < minHeight || height > maxHeight)
  2253. continue;
  2254. if (size.contains (result))
  2255. result = size;
  2256. }
  2257. // None of the supported sizes matches required width & height limitations, picking
  2258. // the first one available...
  2259. jassert (! result.isEmpty());
  2260. if (result.isEmpty())
  2261. result = supportedSizes[0];
  2262. return result;
  2263. }
  2264. //==============================================================================
  2265. void startPreviewMode (ImageReader& ir)
  2266. {
  2267. if (currentCaptureSessionMode != nullptr && ! currentCaptureSessionMode->isVideoRecordSession())
  2268. return;
  2269. // previous mode has to be stopped first
  2270. jassert (currentCaptureSessionMode.get() == nullptr);
  2271. if (scopedCameraDevice == nullptr || ! scopedCameraDevice->openedOk())
  2272. return;
  2273. currentCaptureSessionMode.reset (new CaptureSessionPreviewMode (*this, *scopedCameraDevice, handler,
  2274. previewDisplay, ir,
  2275. getCameraSensorOrientation(),
  2276. getCameraLensFacing(),
  2277. streamConfigurationMap));
  2278. }
  2279. void startVideoRecordingMode (MediaRecorder& mr)
  2280. {
  2281. if (currentCaptureSessionMode != nullptr && currentCaptureSessionMode->isVideoRecordSession())
  2282. return;
  2283. // previous mode has to be stopped first
  2284. jassert (currentCaptureSessionMode.get() == nullptr);
  2285. jassert (scopedCameraDevice != nullptr && scopedCameraDevice->openedOk());
  2286. if (scopedCameraDevice == nullptr || ! scopedCameraDevice->openedOk())
  2287. return;
  2288. currentCaptureSessionMode.reset (new CaptureSessionVideoRecordingMode (*this, *scopedCameraDevice, handler,
  2289. previewDisplay, mr,
  2290. getCameraSensorOrientation(),
  2291. getCameraLensFacing(),
  2292. streamConfigurationMap));
  2293. }
  2294. //==============================================================================
  2295. void appPaused() override
  2296. {
  2297. JUCE_CAMERA_LOG ("appPaused, closing camera...");
  2298. appWasPaused = true;
  2299. deviceOrientationChangeListener.setEnabled (false);
  2300. // We need to restart the whole session mode when the app gets resumed.
  2301. currentCaptureSessionMode.reset();
  2302. if (scopedCameraDevice != nullptr)
  2303. scopedCameraDevice->close();
  2304. stopBackgroundThread();
  2305. }
  2306. void appResumed() override
  2307. {
  2308. // Only care about resumed event when paused event was called first.
  2309. if (! appWasPaused)
  2310. return;
  2311. JUCE_CAMERA_LOG ("appResumed, opening camera...");
  2312. deviceOrientationChangeListener.setEnabled (true);
  2313. startBackgroundThread();
  2314. if (scopedCameraDevice != nullptr)
  2315. scopedCameraDevice->open();
  2316. }
  2317. void startBackgroundThread()
  2318. {
  2319. auto* env = getEnv();
  2320. handlerThread = GlobalRef (LocalRef<jobject> (env->NewObject (AndroidHandlerThread,
  2321. AndroidHandlerThread.constructor,
  2322. javaString ("JuceCameraDeviceBackgroundThread").get())));
  2323. // handler thread has to be started before its looper can be fetched
  2324. env->CallVoidMethod (handlerThread, AndroidHandlerThread.start);
  2325. handler = GlobalRef (LocalRef<jobject> (env->NewObject (AndroidHandler,
  2326. AndroidHandler.constructorWithLooper,
  2327. env->CallObjectMethod (handlerThread, AndroidHandlerThread.getLooper))));
  2328. }
  2329. void stopBackgroundThread()
  2330. {
  2331. auto* env = getEnv();
  2332. env->CallBooleanMethod (handlerThread, AndroidHandlerThread.quitSafely);
  2333. env->CallVoidMethod (handlerThread, AndroidHandlerThread.join);
  2334. jniCheckHasExceptionOccurredAndClear();
  2335. handlerThread.clear();
  2336. handler.clear();
  2337. }
  2338. #endif
  2339. friend struct CameraDevice::ViewerComponent;
  2340. friend void juce_cameraDeviceStateClosed (int64);
  2341. friend void juce_cameraDeviceStateDisconnected (int64);
  2342. friend void juce_cameraDeviceStateError (int64, int);
  2343. friend void juce_cameraDeviceStateOpened (int64, void*);
  2344. friend void juce_cameraCaptureSessionActive (int64, void*);
  2345. friend void juce_cameraCaptureSessionClosed (int64, void*);
  2346. friend void juce_cameraCaptureSessionConfigureFailed (int64, void*);
  2347. friend void juce_cameraCaptureSessionConfigured (int64, void*);
  2348. friend void juce_cameraCaptureSessionReady (int64, void*);
  2349. friend void juce_cameraCaptureSessionCaptureCompleted (int64, bool, void*, void*, void*);
  2350. friend void juce_cameraCaptureSessionCaptureFailed (int64, bool, void*, void*, void*);
  2351. friend void juce_cameraCaptureSessionCaptureProgressed (int64, bool, void*, void*, void*);
  2352. friend void juce_cameraCaptureSessionCaptureSequenceAborted (int64, bool, void*, int);
  2353. friend void juce_cameraCaptureSessionCaptureSequenceCompleted (int64, bool, void*, int, int64);
  2354. friend void juce_cameraCaptureSessionCaptureStarted (int64, bool, void*, void*, int64, int64);
  2355. friend void juce_deviceOrientationChanged (int64, int);
  2356. JUCE_DECLARE_NON_COPYABLE (Pimpl)
  2357. };
  2358. //==============================================================================
  2359. struct CameraDevice::ViewerComponent : public Component,
  2360. private ComponentMovementWatcher
  2361. {
  2362. ViewerComponent (CameraDevice& device) : ComponentMovementWatcher (this)
  2363. {
  2364. #if __ANDROID_API__ >= 21
  2365. auto previewSize = device.pimpl->streamConfigurationMap.getDefaultPreviewSize();
  2366. targetAspectRatio = previewSize.getWidth() / (float) previewSize.getHeight();
  2367. if (isOrientationLandscape())
  2368. setBounds (previewSize);
  2369. else
  2370. setBounds (0, 0, previewSize.getHeight(), previewSize.getWidth());
  2371. addAndMakeVisible (viewerComponent);
  2372. viewerComponent.setView (device.pimpl->previewDisplay.getNativeView());
  2373. #else
  2374. ignoreUnused (device);
  2375. #endif
  2376. }
  2377. private:
  2378. AndroidViewComponent viewerComponent;
  2379. float targetAspectRatio = 1.0f;
  2380. void componentMovedOrResized (bool, bool) override
  2381. {
  2382. auto b = getLocalBounds();
  2383. auto targetWidth = b.getWidth();
  2384. auto targetHeight = b.getHeight();
  2385. if (isOrientationLandscape())
  2386. {
  2387. auto currentAspectRatio = b.getWidth() / (float) b.getHeight();
  2388. if (currentAspectRatio > targetAspectRatio)
  2389. targetWidth = static_cast<int> (targetWidth * targetAspectRatio / currentAspectRatio);
  2390. else
  2391. targetHeight = static_cast<int> (targetHeight * currentAspectRatio / targetAspectRatio);
  2392. }
  2393. else
  2394. {
  2395. auto currentAspectRatio = b.getHeight() / (float) b.getWidth();
  2396. if (currentAspectRatio > targetAspectRatio)
  2397. targetHeight = static_cast<int> (targetHeight * targetAspectRatio / currentAspectRatio);
  2398. else
  2399. targetWidth = static_cast<int> (targetWidth * currentAspectRatio / targetAspectRatio);
  2400. }
  2401. viewerComponent.setBounds (Rectangle<int> (0, 0, targetWidth, targetHeight).withCentre (b.getCentre()));
  2402. }
  2403. bool isOrientationLandscape() const
  2404. {
  2405. auto o = Desktop::getInstance().getCurrentOrientation();
  2406. return o == Desktop::rotatedClockwise || o == Desktop::rotatedAntiClockwise;
  2407. }
  2408. void componentPeerChanged() override {}
  2409. void componentVisibilityChanged() override {}
  2410. JUCE_DECLARE_NON_COPYABLE (ViewerComponent)
  2411. };
  2412. String CameraDevice::getFileExtension()
  2413. {
  2414. return ".mp4";
  2415. }
  2416. #if __ANDROID_API__ >= 21
  2417. //==============================================================================
  2418. void juce_cameraDeviceStateClosed (int64 host)
  2419. {
  2420. reinterpret_cast<CameraDevice::Pimpl::ScopedCameraDevice*> (host)->cameraDeviceStateClosed();
  2421. }
  2422. void juce_cameraDeviceStateDisconnected (int64 host)
  2423. {
  2424. reinterpret_cast<CameraDevice::Pimpl::ScopedCameraDevice*> (host)->cameraDeviceStateDisconnected();
  2425. }
  2426. void juce_cameraDeviceStateError (int64 host, int error)
  2427. {
  2428. reinterpret_cast<CameraDevice::Pimpl::ScopedCameraDevice*> (host)->cameraDeviceStateError (error);
  2429. }
  2430. void juce_cameraDeviceStateOpened (int64 host, void* camera)
  2431. {
  2432. reinterpret_cast<CameraDevice::Pimpl::ScopedCameraDevice*> (host)->cameraDeviceStateOpened ((jobject) camera);
  2433. }
  2434. //==============================================================================
  2435. JUCE_JNI_CALLBACK (JUCE_JOIN_MACRO (JUCE_ANDROID_ACTIVITY_CLASSNAME, _00024CameraDeviceStateCallback), cameraDeviceStateClosed, void, (JNIEnv* env, jobject /*activity*/, jlong host, jobject /*camera*/))
  2436. {
  2437. setEnv (env);
  2438. juce_cameraDeviceStateClosed (host);
  2439. }
  2440. JUCE_JNI_CALLBACK (JUCE_JOIN_MACRO (JUCE_ANDROID_ACTIVITY_CLASSNAME, _00024CameraDeviceStateCallback), cameraDeviceStateDisconnected, void, (JNIEnv* env, jobject /*activity*/, jlong host, jobject /*camera*/))
  2441. {
  2442. setEnv (env);
  2443. juce_cameraDeviceStateDisconnected (host);
  2444. }
  2445. JUCE_JNI_CALLBACK (JUCE_JOIN_MACRO (JUCE_ANDROID_ACTIVITY_CLASSNAME, _00024CameraDeviceStateCallback), cameraDeviceStateError, void, (JNIEnv* env, jobject /*activity*/, jlong host, jobject /*camera*/, int error))
  2446. {
  2447. setEnv (env);
  2448. juce_cameraDeviceStateError (host, error);
  2449. }
  2450. JUCE_JNI_CALLBACK (JUCE_JOIN_MACRO (JUCE_ANDROID_ACTIVITY_CLASSNAME, _00024CameraDeviceStateCallback), cameraDeviceStateOpened, void, (JNIEnv* env, jobject /*activity*/, jlong host, jobject camera))
  2451. {
  2452. setEnv (env);
  2453. juce_cameraDeviceStateOpened (host, camera);
  2454. }
  2455. //==============================================================================
  2456. void juce_cameraCaptureSessionActive (int64 host, void* session)
  2457. {
  2458. auto* juceCaptureSession = reinterpret_cast<CameraDevice::Pimpl::ScopedCameraDevice::CaptureSession*> (host);
  2459. juceCaptureSession->cameraCaptureSessionActive ((jobject) session);
  2460. }
  2461. void juce_cameraCaptureSessionClosed (int64 host, void* session)
  2462. {
  2463. auto* juceCaptureSession = reinterpret_cast<CameraDevice::Pimpl::ScopedCameraDevice::CaptureSession*> (host);
  2464. juceCaptureSession->cameraCaptureSessionClosed ((jobject) session);
  2465. }
  2466. void juce_cameraCaptureSessionConfigureFailed (int64 host, void* session)
  2467. {
  2468. auto* juceCaptureSession = reinterpret_cast<CameraDevice::Pimpl::ScopedCameraDevice::CaptureSession*> (host);
  2469. juceCaptureSession->cameraCaptureSessionConfigureFailed ((jobject) session);
  2470. }
  2471. void juce_cameraCaptureSessionConfigured (int64 host, void* session)
  2472. {
  2473. auto* juceCaptureSession = reinterpret_cast<CameraDevice::Pimpl::ScopedCameraDevice::CaptureSession*> (host);
  2474. juceCaptureSession->cameraCaptureSessionConfigured ((jobject) session);
  2475. }
  2476. void juce_cameraCaptureSessionReady (int64 host, void* session)
  2477. {
  2478. auto* juceCaptureSession = reinterpret_cast<CameraDevice::Pimpl::ScopedCameraDevice::CaptureSession*> (host);
  2479. juceCaptureSession->cameraCaptureSessionReady ((jobject) session);
  2480. }
  2481. //==============================================================================
  2482. JUCE_JNI_CALLBACK (JUCE_JOIN_MACRO (JUCE_ANDROID_ACTIVITY_CLASSNAME, _00024CameraCaptureSessionStateCallback), cameraCaptureSessionActive, void, (JNIEnv* env, jobject /*activity*/, jlong host, jobject session))
  2483. {
  2484. setEnv (env);
  2485. juce_cameraCaptureSessionActive (host, session);
  2486. }
  2487. JUCE_JNI_CALLBACK (JUCE_JOIN_MACRO (JUCE_ANDROID_ACTIVITY_CLASSNAME, _00024CameraCaptureSessionStateCallback), cameraCaptureSessionClosed, void, (JNIEnv* env, jobject /*activity*/, jlong host, jobject session))
  2488. {
  2489. setEnv (env);
  2490. juce_cameraCaptureSessionClosed (host, session);
  2491. }
  2492. JUCE_JNI_CALLBACK (JUCE_JOIN_MACRO (JUCE_ANDROID_ACTIVITY_CLASSNAME, _00024CameraCaptureSessionStateCallback), cameraCaptureSessionConfigureFailed, void, (JNIEnv* env, jobject /*activity*/, jlong host, jobject session))
  2493. {
  2494. setEnv (env);
  2495. juce_cameraCaptureSessionConfigureFailed (host, session);
  2496. }
  2497. JUCE_JNI_CALLBACK (JUCE_JOIN_MACRO (JUCE_ANDROID_ACTIVITY_CLASSNAME, _00024CameraCaptureSessionStateCallback), cameraCaptureSessionConfigured, void, (JNIEnv* env, jobject /*activity*/, jlong host, jobject session))
  2498. {
  2499. setEnv (env);
  2500. juce_cameraCaptureSessionConfigured (host, session);
  2501. }
  2502. JUCE_JNI_CALLBACK (JUCE_JOIN_MACRO (JUCE_ANDROID_ACTIVITY_CLASSNAME, _00024CameraCaptureSessionStateCallback), cameraCaptureSessionReady, void, (JNIEnv* env, jobject /*activity*/, jlong host, jobject session))
  2503. {
  2504. setEnv (env);
  2505. juce_cameraCaptureSessionReady (host, session);
  2506. }
  2507. //==============================================================================
  2508. void juce_cameraCaptureSessionCaptureCompleted (int64 host, bool isPreview, void* session, void* request, void* result)
  2509. {
  2510. auto* stillPictureTaker = reinterpret_cast<CameraDevice::Pimpl::ScopedCameraDevice::CaptureSession::StillPictureTaker*> (host);
  2511. stillPictureTaker->cameraCaptureSessionCaptureCompleted (isPreview, (jobject) session, (jobject) request, (jobject) result);
  2512. }
  2513. void juce_cameraCaptureSessionCaptureFailed (int64 host, bool isPreview, void* session, void* request, void* failure)
  2514. {
  2515. auto* stillPictureTaker = reinterpret_cast<CameraDevice::Pimpl::ScopedCameraDevice::CaptureSession::StillPictureTaker*> (host);
  2516. stillPictureTaker->cameraCaptureSessionCaptureFailed (isPreview, (jobject) session, (jobject) request, (jobject) failure);
  2517. }
  2518. void juce_cameraCaptureSessionCaptureProgressed (int64 host, bool isPreview, void* session, void* request, void* partialResult)
  2519. {
  2520. auto* stillPictureTaker = reinterpret_cast<CameraDevice::Pimpl::ScopedCameraDevice::CaptureSession::StillPictureTaker*> (host);
  2521. stillPictureTaker->cameraCaptureSessionCaptureProgressed (isPreview, (jobject) session, (jobject) request, (jobject) partialResult);
  2522. }
  2523. void juce_cameraCaptureSessionCaptureSequenceAborted (int64 host, bool isPreview, void* session, int sequenceId)
  2524. {
  2525. auto* stillPictureTaker = reinterpret_cast<CameraDevice::Pimpl::ScopedCameraDevice::CaptureSession::StillPictureTaker*> (host);
  2526. stillPictureTaker->cameraCaptureSessionCaptureSequenceAborted (isPreview, (jobject) session, sequenceId);
  2527. }
  2528. void juce_cameraCaptureSessionCaptureSequenceCompleted (int64 host, bool isPreview, void* session, int sequenceId, int64 frameNumber)
  2529. {
  2530. auto* stillPictureTaker = reinterpret_cast<CameraDevice::Pimpl::ScopedCameraDevice::CaptureSession::StillPictureTaker*> (host);
  2531. stillPictureTaker->cameraCaptureSessionCaptureSequenceCompleted (isPreview, (jobject) session, sequenceId, frameNumber);
  2532. }
  2533. void juce_cameraCaptureSessionCaptureStarted (int64 host, bool isPreview, void* session, void* request, int64 timestamp, int64 frameNumber)
  2534. {
  2535. auto* stillPictureTaker = reinterpret_cast<CameraDevice::Pimpl::ScopedCameraDevice::CaptureSession::StillPictureTaker*> (host);
  2536. stillPictureTaker->cameraCaptureSessionCaptureStarted (isPreview, (jobject) session, (jobject) request, timestamp, frameNumber);
  2537. }
  2538. JUCE_JNI_CALLBACK (JUCE_JOIN_MACRO (JUCE_ANDROID_ACTIVITY_CLASSNAME, _00024CameraCaptureSessionCaptureCallback), cameraCaptureSessionCaptureCompleted, \
  2539. void, (JNIEnv* env, jobject /*activity*/, jlong host, bool isPreview, jobject session, jobject request, jobject result))
  2540. {
  2541. setEnv (env);
  2542. juce_cameraCaptureSessionCaptureCompleted (host, isPreview, session, request, result);
  2543. }
  2544. JUCE_JNI_CALLBACK (JUCE_JOIN_MACRO (JUCE_ANDROID_ACTIVITY_CLASSNAME, _00024CameraCaptureSessionCaptureCallback), cameraCaptureSessionCaptureFailed, \
  2545. void, (JNIEnv* env, jobject /*activity*/, jlong host, bool isPreview, jobject session, jobject request, jobject failure))
  2546. {
  2547. setEnv (env);
  2548. juce_cameraCaptureSessionCaptureFailed (host, isPreview, session, request, failure);
  2549. }
  2550. JUCE_JNI_CALLBACK (JUCE_JOIN_MACRO (JUCE_ANDROID_ACTIVITY_CLASSNAME, _00024CameraCaptureSessionCaptureCallback), cameraCaptureSessionCaptureProgressed, \
  2551. void, (JNIEnv* env, jobject /*activity*/, jlong host, bool isPreview, jobject session, jobject request, jobject partialResult))
  2552. {
  2553. setEnv (env);
  2554. juce_cameraCaptureSessionCaptureProgressed (host, isPreview, session, request, partialResult);
  2555. }
  2556. JUCE_JNI_CALLBACK (JUCE_JOIN_MACRO (JUCE_ANDROID_ACTIVITY_CLASSNAME, _00024CameraCaptureSessionCaptureCallback), cameraCaptureSessionCaptureSequenceAborted, \
  2557. void, (JNIEnv* env, jobject /*activity*/, jlong host, bool isPreview, jobject session, jint sequenceId))
  2558. {
  2559. setEnv (env);
  2560. juce_cameraCaptureSessionCaptureSequenceAborted (host, isPreview, session, (int) sequenceId);
  2561. }
  2562. JUCE_JNI_CALLBACK (JUCE_JOIN_MACRO (JUCE_ANDROID_ACTIVITY_CLASSNAME, _00024CameraCaptureSessionCaptureCallback), cameraCaptureSessionCaptureSequenceCompleted, \
  2563. void, (JNIEnv* env, jobject /*activity*/, jlong host, bool isPreview, jobject session, jint sequenceId, jlong frameNumber))
  2564. {
  2565. setEnv (env);
  2566. juce_cameraCaptureSessionCaptureSequenceCompleted (host, isPreview, session, (int) sequenceId, frameNumber);
  2567. }
  2568. JUCE_JNI_CALLBACK (JUCE_JOIN_MACRO (JUCE_ANDROID_ACTIVITY_CLASSNAME, _00024CameraCaptureSessionCaptureCallback), cameraCaptureSessionCaptureStarted, \
  2569. void, (JNIEnv* env, jobject /*activity*/, jlong host, bool isPreview, jobject session, jobject request, int64 timestamp, int64 frameNumber))
  2570. {
  2571. setEnv (env);
  2572. juce_cameraCaptureSessionCaptureStarted (host, isPreview, session, request, timestamp, frameNumber);
  2573. }
  2574. //==============================================================================
  2575. void juce_deviceOrientationChanged (int64 host, int orientation)
  2576. {
  2577. auto* listener = reinterpret_cast<CameraDevice::Pimpl::DeviceOrientationChangeListener*> (host);
  2578. listener->orientationChanged (orientation);
  2579. }
  2580. JUCE_JNI_CALLBACK (JUCE_JOIN_MACRO (JUCE_ANDROID_ACTIVITY_CLASSNAME, _00024JuceOrientationEventListener), deviceOrientationChanged, \
  2581. void, (JNIEnv* env, jobject /*activity*/, jlong host, jint orientation))
  2582. {
  2583. setEnv (env);
  2584. juce_deviceOrientationChanged (host, (int) orientation);
  2585. }
  2586. #endif