The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

3410 lines
143KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2017 - ROLI Ltd.
  5. JUCE is an open source library subject to commercial or open-source
  6. licensing.
  7. By using JUCE, you agree to the terms of both the JUCE 5 End-User License
  8. Agreement and JUCE 5 Privacy Policy (both updated and effective as of the
  9. 27th April 2017).
  10. End User License Agreement: www.juce.com/juce-5-licence
  11. Privacy Policy: www.juce.com/juce-5-privacy-policy
  12. Or: You may also use this code under the terms of the GPL v3 (see
  13. www.gnu.org/licenses).
  14. JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
  15. EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
  16. DISCLAIMED.
  17. ==============================================================================
  18. */
  19. #if __ANDROID_API__ >= 21
  20. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  21. STATICMETHOD (valueOf, "valueOf", "(Ljava/lang/String;)Landroid/graphics/Bitmap$CompressFormat;")
  22. DECLARE_JNI_CLASS (AndroidBitmapCompressFormat, "android/graphics/Bitmap$CompressFormat");
  23. #undef JNI_CLASS_MEMBERS
  24. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  25. METHOD (close, "close", "()V") \
  26. METHOD (createCaptureRequest, "createCaptureRequest", "(I)Landroid/hardware/camera2/CaptureRequest$Builder;") \
  27. METHOD (createCaptureSession, "createCaptureSession", "(Ljava/util/List;Landroid/hardware/camera2/CameraCaptureSession$StateCallback;Landroid/os/Handler;)V")
  28. DECLARE_JNI_CLASS (AndroidCameraDevice, "android/hardware/camera2/CameraDevice");
  29. #undef JNI_CLASS_MEMBERS
  30. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  31. METHOD (close, "close", "()V") \
  32. METHOD (getPlanes, "getPlanes", "()[Landroid/media/Image$Plane;")
  33. DECLARE_JNI_CLASS (AndroidImage, "android/media/Image");
  34. #undef JNI_CLASS_MEMBERS
  35. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  36. METHOD (getBuffer, "getBuffer", "()Ljava/nio/ByteBuffer;")
  37. DECLARE_JNI_CLASS (AndroidImagePlane, "android/media/Image$Plane");
  38. #undef JNI_CLASS_MEMBERS
  39. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  40. METHOD (acquireLatestImage, "acquireLatestImage", "()Landroid/media/Image;") \
  41. METHOD (close, "close", "()V") \
  42. METHOD (getSurface, "getSurface", "()Landroid/view/Surface;") \
  43. METHOD (setOnImageAvailableListener, "setOnImageAvailableListener", "(Landroid/media/ImageReader$OnImageAvailableListener;Landroid/os/Handler;)V") \
  44. STATICMETHOD (newInstance, "newInstance", "(IIII)Landroid/media/ImageReader;")
  45. DECLARE_JNI_CLASS (AndroidImageReader, "android/media/ImageReader");
  46. #undef JNI_CLASS_MEMBERS
  47. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  48. METHOD (constructor, "<init>", "()V") \
  49. METHOD (getSurface, "getSurface", "()Landroid/view/Surface;") \
  50. METHOD (prepare, "prepare", "()V") \
  51. METHOD (release, "release", "()V") \
  52. METHOD (setAudioEncoder, "setAudioEncoder", "(I)V") \
  53. METHOD (setAudioSource, "setAudioSource", "(I)V") \
  54. METHOD (setOnErrorListener, "setOnErrorListener", "(Landroid/media/MediaRecorder$OnErrorListener;)V") \
  55. METHOD (setOnInfoListener, "setOnInfoListener", "(Landroid/media/MediaRecorder$OnInfoListener;)V") \
  56. METHOD (setOrientationHint, "setOrientationHint", "(I)V") \
  57. METHOD (setOutputFile, "setOutputFile", "(Ljava/lang/String;)V") \
  58. METHOD (setOutputFormat, "setOutputFormat", "(I)V") \
  59. METHOD (setVideoEncoder, "setVideoEncoder", "(I)V") \
  60. METHOD (setVideoEncodingBitRate, "setVideoEncodingBitRate", "(I)V") \
  61. METHOD (setVideoFrameRate, "setVideoFrameRate", "(I)V") \
  62. METHOD (setVideoSize, "setVideoSize", "(II)V") \
  63. METHOD (setVideoSource, "setVideoSource", "(I)V") \
  64. METHOD (start, "start", "()V") \
  65. METHOD (stop, "stop", "()V")
  66. DECLARE_JNI_CLASS (AndroidMediaRecorder, "android/media/MediaRecorder");
  67. #undef JNI_CLASS_MEMBERS
  68. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  69. METHOD (constructor, "<init>", "(Landroid/content/Context;)V") \
  70. METHOD (getSurfaceTexture, "getSurfaceTexture", "()Landroid/graphics/SurfaceTexture;") \
  71. METHOD (isAvailable, "isAvailable", "()Z") \
  72. METHOD (setSurfaceTextureListener, "setSurfaceTextureListener", "(Landroid/view/TextureView$SurfaceTextureListener;)V") \
  73. METHOD (setTransform, "setTransform", "(Landroid/graphics/Matrix;)V")
  74. DECLARE_JNI_CLASS (AndroidTextureView, "android/view/TextureView");
  75. #undef JNI_CLASS_MEMBERS
  76. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  77. METHOD (constructor, "<init>", "(Landroid/graphics/SurfaceTexture;)V")
  78. DECLARE_JNI_CLASS (AndroidSurface, "android/view/Surface");
  79. #undef JNI_CLASS_MEMBERS
  80. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  81. METHOD (setDefaultBufferSize, "setDefaultBufferSize", "(II)V")
  82. DECLARE_JNI_CLASS (AndroidSurfaceTexture, "android/graphics/SurfaceTexture");
  83. #undef JNI_CLASS_MEMBERS
  84. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  85. METHOD (getOutputSizesForClass, "getOutputSizes", "(Ljava/lang/Class;)[Landroid/util/Size;") \
  86. METHOD (getOutputSizesForFormat, "getOutputSizes", "(I)[Landroid/util/Size;") \
  87. METHOD (isOutputSupportedFor, "isOutputSupportedFor", "(I)Z") \
  88. METHOD (isOutputSupportedForSurface, "isOutputSupportedFor", "(Landroid/view/Surface;)Z")
  89. DECLARE_JNI_CLASS (AndroidStreamConfigurationMap, "android/hardware/camera2/params/StreamConfigurationMap");
  90. #undef JNI_CLASS_MEMBERS
  91. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  92. METHOD (constructor, "<init>", "()V") \
  93. METHOD (toByteArray, "toByteArray", "()[B") \
  94. METHOD (size, "size", "()I")
  95. DECLARE_JNI_CLASS (ByteArrayOutputStream, "java/io/ByteArrayOutputStream");
  96. #undef JNI_CLASS_MEMBERS
  97. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  98. METHOD (abortCaptures, "abortCaptures", "()V") \
  99. METHOD (capture, "capture", "(Landroid/hardware/camera2/CaptureRequest;Landroid/hardware/camera2/CameraCaptureSession$CaptureCallback;Landroid/os/Handler;)I") \
  100. METHOD (close, "close", "()V") \
  101. METHOD (setRepeatingRequest, "setRepeatingRequest", "(Landroid/hardware/camera2/CaptureRequest;Landroid/hardware/camera2/CameraCaptureSession$CaptureCallback;Landroid/os/Handler;)I") \
  102. METHOD (stopRepeating, "stopRepeating", "()V")
  103. DECLARE_JNI_CLASS (CameraCaptureSession, "android/hardware/camera2/CameraCaptureSession")
  104. #undef JNI_CLASS_MEMBERS
  105. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  106. METHOD (constructor, "<init>", "(L" JUCE_ANDROID_ACTIVITY_CLASSPATH ";JZ)V")
  107. DECLARE_JNI_CLASS (CameraCaptureSessionCaptureCallback, JUCE_ANDROID_ACTIVITY_CLASSPATH "$CameraCaptureSessionCaptureCallback");
  108. #undef JNI_CLASS_MEMBERS
  109. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  110. METHOD (constructor, "<init>", "(L" JUCE_ANDROID_ACTIVITY_CLASSPATH ";J)V")
  111. DECLARE_JNI_CLASS (CameraCaptureSessionStateCallback, JUCE_ANDROID_ACTIVITY_CLASSPATH "$CameraCaptureSessionStateCallback");
  112. #undef JNI_CLASS_MEMBERS
  113. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  114. METHOD (get, "get", "(Landroid/hardware/camera2/CameraCharacteristics$Key;)Ljava/lang/Object;") \
  115. METHOD (getKeys, "getKeys", "()Ljava/util/List;") \
  116. STATICFIELD (CONTROL_AF_AVAILABLE_MODES, "CONTROL_AF_AVAILABLE_MODES", "Landroid/hardware/camera2/CameraCharacteristics$Key;") \
  117. STATICFIELD (LENS_FACING, "LENS_FACING", "Landroid/hardware/camera2/CameraCharacteristics$Key;") \
  118. STATICFIELD (SCALER_STREAM_CONFIGURATION_MAP, "SCALER_STREAM_CONFIGURATION_MAP", "Landroid/hardware/camera2/CameraCharacteristics$Key;") \
  119. STATICFIELD (SENSOR_ORIENTATION, "SENSOR_ORIENTATION", "Landroid/hardware/camera2/CameraCharacteristics$Key;")
  120. DECLARE_JNI_CLASS (CameraCharacteristics, "android/hardware/camera2/CameraCharacteristics");
  121. #undef JNI_CLASS_MEMBERS
  122. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  123. METHOD (getName, "getName", "()Ljava/lang/String;")
  124. DECLARE_JNI_CLASS (CameraCharacteristicsKey, "android/hardware/camera2/CameraCharacteristics$Key");
  125. #undef JNI_CLASS_MEMBERS
  126. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  127. METHOD (constructor, "<init>", "(L" JUCE_ANDROID_ACTIVITY_CLASSPATH ";J)V")
  128. DECLARE_JNI_CLASS (CameraDeviceStateCallback, JUCE_ANDROID_ACTIVITY_CLASSPATH "$CameraDeviceStateCallback");
  129. #undef JNI_CLASS_MEMBERS
  130. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  131. METHOD (getCameraCharacteristics, "getCameraCharacteristics", "(Ljava/lang/String;)Landroid/hardware/camera2/CameraCharacteristics;") \
  132. METHOD (getCameraIdList, "getCameraIdList", "()[Ljava/lang/String;") \
  133. METHOD (openCamera, "openCamera", "(Ljava/lang/String;Landroid/hardware/camera2/CameraDevice$StateCallback;Landroid/os/Handler;)V")
  134. DECLARE_JNI_CLASS (CameraManager, "android/hardware/camera2/CameraManager");
  135. #undef JNI_CLASS_MEMBERS
  136. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  137. STATICFIELD (CONTROL_AE_PRECAPTURE_TRIGGER, "CONTROL_AE_PRECAPTURE_TRIGGER", "Landroid/hardware/camera2/CaptureRequest$Key;") \
  138. STATICFIELD (CONTROL_AF_MODE, "CONTROL_AF_MODE", "Landroid/hardware/camera2/CaptureRequest$Key;") \
  139. STATICFIELD (CONTROL_AF_TRIGGER, "CONTROL_AF_TRIGGER", "Landroid/hardware/camera2/CaptureRequest$Key;") \
  140. STATICFIELD (CONTROL_MODE, "CONTROL_MODE", "Landroid/hardware/camera2/CaptureRequest$Key;")
  141. DECLARE_JNI_CLASS (CaptureRequest, "android/hardware/camera2/CaptureRequest");
  142. #undef JNI_CLASS_MEMBERS
  143. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  144. METHOD (addTarget, "addTarget", "(Landroid/view/Surface;)V") \
  145. METHOD (build, "build", "()Landroid/hardware/camera2/CaptureRequest;") \
  146. METHOD (set, "set", "(Landroid/hardware/camera2/CaptureRequest$Key;Ljava/lang/Object;)V")
  147. DECLARE_JNI_CLASS (CaptureRequestBuilder, "android/hardware/camera2/CaptureRequest$Builder");
  148. #undef JNI_CLASS_MEMBERS
  149. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  150. METHOD (get, "get", "(Landroid/hardware/camera2/CaptureResult$Key;)Ljava/lang/Object;") \
  151. STATICFIELD (CONTROL_AE_STATE, "CONTROL_AE_STATE", "Landroid/hardware/camera2/CaptureResult$Key;") \
  152. STATICFIELD (CONTROL_AF_STATE, "CONTROL_AF_STATE", "Landroid/hardware/camera2/CaptureResult$Key;")
  153. DECLARE_JNI_CLASS (CaptureResult, "android/hardware/camera2/CaptureResult");
  154. #undef JNI_CLASS_MEMBERS
  155. #define JNI_CLASS_MEMBERS(METHOD, STATICMETHOD, FIELD, STATICFIELD) \
  156. METHOD (canDetectOrientation, "canDetectOrientation", "()Z") \
  157. METHOD (constructor, "<init>", "(L" JUCE_ANDROID_ACTIVITY_CLASSPATH ";JLandroid/content/Context;I)V") \
  158. METHOD (disable, "disable", "()V") \
  159. METHOD (enable, "enable", "()V")
  160. DECLARE_JNI_CLASS (OrientationEventListener, JUCE_ANDROID_ACTIVITY_CLASSPATH "$JuceOrientationEventListener");
  161. #undef JNI_CLASS_MEMBERS
  162. #endif
  163. //==============================================================================
  164. class AndroidRunnable : public juce::AndroidInterfaceImplementer
  165. {
  166. public:
  167. struct Owner
  168. {
  169. virtual ~Owner() {}
  170. virtual void run() = 0;
  171. };
  172. AndroidRunnable (Owner& ownerToUse)
  173. : owner (ownerToUse)
  174. {}
  175. private:
  176. Owner& owner;
  177. jobject invoke (jobject proxy, jobject method, jobjectArray args) override
  178. {
  179. auto* env = getEnv();
  180. auto methodName = juce::juceString ((jstring) env->CallObjectMethod (method, JavaMethod.getName));
  181. if (methodName == "run")
  182. {
  183. owner.run();
  184. return nullptr;
  185. }
  186. // invoke base class
  187. return AndroidInterfaceImplementer::invoke (proxy, method, args);
  188. }
  189. };
  190. //==============================================================================
  191. class TextureViewSurfaceTextureListener : public AndroidInterfaceImplementer
  192. {
  193. public:
  194. struct Owner
  195. {
  196. virtual ~Owner() {}
  197. virtual void onSurfaceTextureAvailable (LocalRef<jobject>& surface, int width, int height) = 0;
  198. virtual bool onSurfaceTextureDestroyed (LocalRef<jobject>& surface) = 0;
  199. virtual void onSurfaceTextureSizeChanged (LocalRef<jobject>& surface, int width, int height) = 0;
  200. virtual void onSurfaceTextureUpdated (LocalRef<jobject>& surface) = 0;
  201. };
  202. TextureViewSurfaceTextureListener (Owner& ownerToUse)
  203. : owner (ownerToUse)
  204. {}
  205. jobject invoke (jobject proxy, jobject method, jobjectArray args) override
  206. {
  207. auto* env = getEnv();
  208. auto methodName = juceString ((jstring) env->CallObjectMethod (method, JavaMethod.getName));
  209. int numArgs = args != nullptr ? env->GetArrayLength (args) : 0;
  210. if (methodName == "onSurfaceTextureAvailable" && numArgs == 3)
  211. {
  212. auto surface = LocalRef<jobject> (env->GetObjectArrayElement (args, 0));
  213. auto width = LocalRef<jobject> (env->GetObjectArrayElement (args, 1));
  214. auto height = LocalRef<jobject> (env->GetObjectArrayElement (args, 2));
  215. auto widthInt = env->CallIntMethod (width, JavaInteger.intValue);
  216. auto heightInt = env->CallIntMethod (height, JavaInteger.intValue);
  217. owner.onSurfaceTextureAvailable (surface, widthInt, heightInt);
  218. return nullptr;
  219. }
  220. else if (methodName == "onSurfaceTextureDestroyed" && numArgs == 1)
  221. {
  222. auto surface = LocalRef<jobject> (env->GetObjectArrayElement (args, 0));
  223. auto result = owner.onSurfaceTextureDestroyed (surface);
  224. return env->CallStaticObjectMethod (JavaBoolean, JavaBoolean.valueOf, result);
  225. }
  226. else if (methodName == "onSurfaceTextureSizeChanged" && numArgs == 3)
  227. {
  228. auto surface = LocalRef<jobject> (env->GetObjectArrayElement (args, 0));
  229. auto width = LocalRef<jobject> (env->GetObjectArrayElement (args, 1));
  230. auto height = LocalRef<jobject> (env->GetObjectArrayElement (args, 2));
  231. auto widthInt = env->CallIntMethod (width, JavaInteger.intValue);
  232. auto heightInt = env->CallIntMethod (height, JavaInteger.intValue);
  233. owner.onSurfaceTextureSizeChanged (surface, widthInt, heightInt);
  234. return nullptr;
  235. }
  236. else if (methodName == "onSurfaceTextureUpdated" && numArgs == 1)
  237. {
  238. auto surface = LocalRef<jobject> (env->GetObjectArrayElement (args, 0));
  239. owner.onSurfaceTextureUpdated (surface);
  240. return nullptr;
  241. }
  242. return AndroidInterfaceImplementer::invoke (proxy, method, args);
  243. }
  244. private:
  245. Owner& owner;
  246. };
  247. //==============================================================================
  248. class ImageReaderOnImageAvailableListener : public AndroidInterfaceImplementer
  249. {
  250. public:
  251. struct Owner
  252. {
  253. virtual ~Owner() {}
  254. virtual void onImageAvailable (LocalRef<jobject>& imageReader) = 0;
  255. };
  256. ImageReaderOnImageAvailableListener (Owner& ownerToUse)
  257. : owner (ownerToUse)
  258. {}
  259. jobject invoke (jobject proxy, jobject method, jobjectArray args) override
  260. {
  261. auto* env = getEnv();
  262. auto methodName = juceString ((jstring) env->CallObjectMethod (method, JavaMethod.getName));
  263. int numArgs = args != nullptr ? env->GetArrayLength (args) : 0;
  264. if (methodName == "onImageAvailable" && numArgs == 1)
  265. {
  266. auto imageReader = LocalRef<jobject> (env->GetObjectArrayElement (args, 0));
  267. owner.onImageAvailable (imageReader);
  268. return nullptr;
  269. }
  270. return AndroidInterfaceImplementer::invoke (proxy, method, args);
  271. }
  272. private:
  273. Owner& owner;
  274. };
  275. //==============================================================================
  276. class MediaRecorderOnInfoListener : public AndroidInterfaceImplementer
  277. {
  278. public:
  279. struct Owner
  280. {
  281. virtual ~Owner() {}
  282. virtual void onInfo (LocalRef<jobject>& mediaRecorder, int what, int extra) = 0;
  283. };
  284. MediaRecorderOnInfoListener (Owner& ownerToUse)
  285. : owner (ownerToUse)
  286. {}
  287. jobject invoke (jobject proxy, jobject method, jobjectArray args) override
  288. {
  289. auto* env = getEnv();
  290. auto methodName = juceString ((jstring) env->CallObjectMethod (method, JavaMethod.getName));
  291. int numArgs = args != nullptr ? env->GetArrayLength (args) : 0;
  292. if (methodName == "onInfo" && numArgs == 3)
  293. {
  294. auto mediaRecorder = LocalRef<jobject> (env->GetObjectArrayElement (args, 0));
  295. auto what = LocalRef<jobject> (env->GetObjectArrayElement (args, 1));
  296. auto extra = LocalRef<jobject> (env->GetObjectArrayElement (args, 2));
  297. auto whatInt = (int) env->CallIntMethod (what, JavaInteger.intValue);
  298. auto extraInt = (int) env->CallIntMethod (extra, JavaInteger.intValue);
  299. owner.onInfo (mediaRecorder, whatInt, extraInt);
  300. return nullptr;
  301. }
  302. return AndroidInterfaceImplementer::invoke (proxy, method, args);
  303. }
  304. private:
  305. Owner& owner;
  306. };
  307. //==============================================================================
  308. class MediaRecorderOnErrorListener : public AndroidInterfaceImplementer
  309. {
  310. public:
  311. struct Owner
  312. {
  313. virtual ~Owner() {}
  314. virtual void onError (LocalRef<jobject>& mediaRecorder, int what, int extra) = 0;
  315. };
  316. MediaRecorderOnErrorListener (Owner& ownerToUse)
  317. : owner (ownerToUse)
  318. {}
  319. jobject invoke (jobject proxy, jobject method, jobjectArray args) override
  320. {
  321. auto* env = getEnv();
  322. auto methodName = juceString ((jstring) env->CallObjectMethod (method, JavaMethod.getName));
  323. int numArgs = args != nullptr ? env->GetArrayLength (args) : 0;
  324. if (methodName == "onError" && numArgs == 3)
  325. {
  326. auto mediaRecorder = LocalRef<jobject> (env->GetObjectArrayElement (args, 0));
  327. auto what = LocalRef<jobject> (env->GetObjectArrayElement (args, 1));
  328. auto extra = LocalRef<jobject> (env->GetObjectArrayElement (args, 2));
  329. auto whatInt = (int) env->CallIntMethod (what, JavaInteger.intValue);
  330. auto extraInt = (int) env->CallIntMethod (extra, JavaInteger.intValue);
  331. owner.onError (mediaRecorder, whatInt, extraInt);
  332. return nullptr;
  333. }
  334. return AndroidInterfaceImplementer::invoke (proxy, method, args);
  335. }
  336. private:
  337. Owner& owner;
  338. };
  339. //==============================================================================
  340. class AppPausedResumedListener : public AndroidInterfaceImplementer
  341. {
  342. public:
  343. struct Owner
  344. {
  345. virtual ~Owner() {}
  346. virtual void appPaused() = 0;
  347. virtual void appResumed() = 0;
  348. };
  349. AppPausedResumedListener (Owner& ownerToUse)
  350. : owner (ownerToUse)
  351. {}
  352. jobject invoke (jobject proxy, jobject method, jobjectArray args) override
  353. {
  354. auto* env = getEnv();
  355. auto methodName = juceString ((jstring) env->CallObjectMethod (method, JavaMethod.getName));
  356. int numArgs = args != nullptr ? env->GetArrayLength (args) : 0;
  357. if (methodName == "appPaused" && numArgs == 0)
  358. {
  359. owner.appPaused();
  360. return nullptr;
  361. }
  362. if (methodName == "appResumed" && numArgs == 0)
  363. {
  364. owner.appResumed();
  365. return nullptr;
  366. }
  367. return AndroidInterfaceImplementer::invoke (proxy, method, args);
  368. }
  369. private:
  370. Owner& owner;
  371. };
  372. //==============================================================================
  373. struct CameraDevice::Pimpl
  374. #if __ANDROID_API__ >= 21
  375. : private AppPausedResumedListener::Owner
  376. #endif
  377. {
  378. using InternalOpenCameraResultCallback = std::function<void (const String& /*cameraId*/, const String& /*error*/)>;
  379. Pimpl (CameraDevice& ownerToUse, const String& cameraIdToUse, int /*index*/,
  380. int minWidthToUse, int minHeightToUse, int maxWidthToUse, int maxHeightToUse,
  381. bool /*useHighQuality*/)
  382. #if __ANDROID_API__ >= 21
  383. : owner (ownerToUse),
  384. minWidth (minWidthToUse),
  385. minHeight (minHeightToUse),
  386. maxWidth (maxWidthToUse),
  387. maxHeight (maxHeightToUse),
  388. cameraId (cameraIdToUse),
  389. appPausedResumedListener (*this),
  390. appPausedResumedListenerNative (CreateJavaInterface (&appPausedResumedListener,
  391. JUCE_ANDROID_ACTIVITY_CLASSPATH "$AppPausedResumedListener").get()),
  392. cameraManager (initialiseCameraManager()),
  393. cameraCharacteristics (initialiseCameraCharacteristics (cameraManager, cameraId)),
  394. streamConfigurationMap (cameraCharacteristics),
  395. previewDisplay (streamConfigurationMap.getPreviewBufferSize()),
  396. deviceOrientationChangeListener (previewDisplay)
  397. #endif
  398. {
  399. #if __ANDROID_API__ >= 21
  400. startBackgroundThread();
  401. #endif
  402. }
  403. ~Pimpl()
  404. {
  405. #if __ANDROID_API__ >= 21
  406. getEnv()->CallVoidMethod (android.activity, JuceAppActivity.removeAppPausedResumedListener,
  407. appPausedResumedListenerNative.get(), reinterpret_cast<jlong>(this));
  408. #endif
  409. }
  410. #if __ANDROID_API__ < 21
  411. // Dummy implementations for unsupported API levels.
  412. void open (InternalOpenCameraResultCallback) {}
  413. void takeStillPicture (std::function<void (const Image&)>) {}
  414. void startRecordingToFile (const File&, int) {}
  415. void stopRecording() {}
  416. void addListener (CameraDevice::Listener*) {}
  417. void removeListener (CameraDevice::Listener*) {}
  418. String getCameraId() const noexcept { return {}; }
  419. bool openedOk() const noexcept { return false; }
  420. Time getTimeOfFirstRecordedFrame() const { return {}; }
  421. static StringArray getAvailableDevices()
  422. {
  423. // Camera on Android requires API 21 or above.
  424. jassertfalse;
  425. return {};
  426. }
  427. #else
  428. JUCE_DECLARE_WEAK_REFERENCEABLE (Pimpl)
  429. String getCameraId() const noexcept { return cameraId; }
  430. void open (InternalOpenCameraResultCallback cameraOpenCallbackToUse)
  431. {
  432. cameraOpenCallback = static_cast<InternalOpenCameraResultCallback&&> (cameraOpenCallbackToUse);
  433. // A valid camera open callback must be passed.
  434. jassert (cameraOpenCallback != nullptr);
  435. // The same camera can be opened only once!
  436. jassert (scopedCameraDevice == nullptr);
  437. if (cameraOpenCallback == nullptr || scopedCameraDevice != nullptr)
  438. return;
  439. WeakReference<Pimpl> safeThis (this);
  440. RuntimePermissions::request (RuntimePermissions::camera, [safeThis] (bool granted) mutable
  441. {
  442. if (safeThis != nullptr)
  443. safeThis->continueOpenRequest (granted);
  444. });
  445. }
  446. void continueOpenRequest (bool granted)
  447. {
  448. if (granted)
  449. {
  450. getEnv()->CallVoidMethod (android.activity, JuceAppActivity.addAppPausedResumedListener,
  451. appPausedResumedListenerNative.get(), reinterpret_cast<jlong> (this));
  452. scopedCameraDevice.reset (new ScopedCameraDevice (*this, cameraId, cameraManager, handler, getAutoFocusModeToUse()));
  453. }
  454. else
  455. {
  456. invokeCameraOpenCallback ("Camera permission not granted");
  457. }
  458. }
  459. bool openedOk() const noexcept { return scopedCameraDevice->openedOk(); }
  460. void takeStillPicture (std::function<void (const Image&)> pictureTakenCallbackToUse)
  461. {
  462. if (pictureTakenCallbackToUse == nullptr)
  463. {
  464. jassertfalse;
  465. return;
  466. }
  467. if (currentCaptureSessionMode->isVideoRecordSession())
  468. {
  469. // Taking still pictures while recording video is not supported on Android.
  470. jassertfalse;
  471. return;
  472. }
  473. pictureTakenCallback = static_cast<std::function<void (const Image&)>&&> (pictureTakenCallbackToUse);
  474. triggerStillPictureCapture();
  475. }
  476. void startRecordingToFile (const File& file, int /*quality*/)
  477. {
  478. if (! openedOk())
  479. {
  480. jassertfalse;
  481. return;
  482. }
  483. if (! previewDisplay.isReady())
  484. {
  485. // Did you remember to create and show a preview display?
  486. jassertfalse;
  487. return;
  488. }
  489. file.deleteFile();
  490. file.create();
  491. jassert (file.existsAsFile());
  492. // MediaRecorder can't handle videos larger than 1080p
  493. auto videoSize = chooseBestSize (minWidth, minHeight, jmin (maxWidth, 1080), maxHeight,
  494. streamConfigurationMap.getSupportedVideoRecordingOutputSizes());
  495. mediaRecorder.reset (new MediaRecorder (file.getFullPathName(), videoSize.getWidth(), videoSize.getHeight(),
  496. getCameraSensorOrientation(), getCameraLensFacing()));
  497. firstRecordedFrameTimeMs = Time::getCurrentTime();
  498. currentCaptureSessionMode.reset();
  499. startVideoRecordingMode (*mediaRecorder);
  500. }
  501. void stopRecording()
  502. {
  503. currentCaptureSessionMode.reset();
  504. mediaRecorder.reset();
  505. startPreviewMode (*imageReader);
  506. }
  507. Time getTimeOfFirstRecordedFrame() const
  508. {
  509. return firstRecordedFrameTimeMs;
  510. }
  511. static StringArray getAvailableDevices()
  512. {
  513. StringArray results;
  514. auto* env = getEnv();
  515. auto cameraManagerToUse = initialiseCameraManager();
  516. auto cameraIdArray = LocalRef<jobjectArray> ((jobjectArray) env->CallObjectMethod (cameraManagerToUse,
  517. CameraManager.getCameraIdList));
  518. results = javaStringArrayToJuce (cameraIdArray);
  519. for (auto& result : results)
  520. printDebugCameraInfo (cameraManagerToUse, result);
  521. return results;
  522. }
  523. void addListener (CameraDevice::Listener* listenerToAdd)
  524. {
  525. const ScopedLock sl (listenerLock);
  526. listeners.add (listenerToAdd);
  527. if (listeners.size() == 1)
  528. triggerStillPictureCapture();
  529. }
  530. void removeListener (CameraDevice::Listener* listenerToRemove)
  531. {
  532. const ScopedLock sl (listenerLock);
  533. listeners.remove (listenerToRemove);
  534. }
  535. private:
  536. enum
  537. {
  538. ERROR_CAMERA_IN_USE = 1,
  539. ERROR_MAX_CAMERAS_IN_USE = 2,
  540. ERROR_CAMERA_DISABLED = 3,
  541. ERROR_CAMERA_DEVICE = 4,
  542. ERROR_CAMERA_SERVICE = 5
  543. };
  544. static String cameraErrorCodeToString (int errorCode)
  545. {
  546. switch (errorCode)
  547. {
  548. case ERROR_CAMERA_IN_USE: return "Camera already in use.";
  549. case ERROR_MAX_CAMERAS_IN_USE: return "Too many opened camera devices.";
  550. case ERROR_CAMERA_DISABLED: return "Camera disabled.";
  551. case ERROR_CAMERA_DEVICE: return "Fatal error.";
  552. case ERROR_CAMERA_SERVICE: return "Fatal error. Reboot required or persistent hardware problem.";
  553. default: return "Unknown error.";
  554. }
  555. }
  556. static LocalRef<jobject> initialiseCameraManager()
  557. {
  558. return LocalRef<jobject> (getEnv()->CallObjectMethod (android.activity, JuceAppActivity.getSystemService,
  559. javaString ("camera").get()));
  560. }
  561. static LocalRef<jobject> initialiseCameraCharacteristics (const GlobalRef& cameraManager, const String& cameraId)
  562. {
  563. return LocalRef<jobject> (getEnv()->CallObjectMethod (cameraManager,
  564. CameraManager.getCameraCharacteristics,
  565. javaString (cameraId).get()));
  566. }
  567. static void printDebugCameraInfo (const LocalRef<jobject>& cameraManagerToUse, const String& cameraId)
  568. {
  569. auto* env = getEnv();
  570. auto characteristics = LocalRef<jobject> (env->CallObjectMethod (cameraManagerToUse,
  571. CameraManager.getCameraCharacteristics,
  572. javaString (cameraId).get()));
  573. auto keysList = LocalRef<jobject> (env->CallObjectMethod (characteristics, CameraCharacteristics.getKeys));
  574. const int size = env->CallIntMethod (keysList, JavaList.size);
  575. JUCE_CAMERA_LOG ("Camera id: " + cameraId + ", characteristics keys num: " + String (size));
  576. for (int i = 0; i < size; ++i)
  577. {
  578. auto key = LocalRef<jobject> (env->CallObjectMethod (keysList, JavaList.get, i));
  579. auto jKeyName = LocalRef<jstring> ((jstring) env->CallObjectMethod (key, CameraCharacteristicsKey.getName));
  580. auto keyName = juceString (jKeyName);
  581. auto keyValue = LocalRef<jobject> (env->CallObjectMethod (characteristics, CameraCharacteristics.get, key.get()));
  582. auto jKeyValueString = LocalRef<jstring> ((jstring) env->CallObjectMethod (keyValue, JavaObject.toString));
  583. auto keyValueString = juceString (jKeyValueString);
  584. auto &kvs = keyValueString;
  585. if (kvs.startsWith ("[I") || kvs.startsWith ("[F") || kvs.startsWith ("[Z") || kvs.startsWith ("[B"))
  586. {
  587. printPrimitiveArrayElements (keyValue, keyName, keyValueString);
  588. }
  589. else if (kvs.startsWith ("[Landroid.util.Range"))
  590. {
  591. printRangeArrayElements (keyValue, keyName);
  592. }
  593. else
  594. {
  595. int chunkSize = 256;
  596. if (keyValueString.length() > chunkSize)
  597. {
  598. JUCE_CAMERA_LOG ("Key: " + keyName);
  599. for (int i = 0, j = 1; i < keyValueString.length(); i += chunkSize, ++j)
  600. JUCE_CAMERA_LOG ("value part " + String (j) + ": " + keyValueString.substring (i, i + chunkSize));
  601. }
  602. else
  603. {
  604. JUCE_CAMERA_LOG ("Key: " + keyName + ", value: " + keyValueString);
  605. }
  606. }
  607. ignoreUnused (keyName);
  608. }
  609. }
  610. static void printPrimitiveArrayElements (const LocalRef<jobject>& keyValue, const String& keyName,
  611. const String& keyValueString)
  612. {
  613. ignoreUnused (keyName);
  614. String result = "[";
  615. auto* env = getEnv();
  616. #define PRINT_ELEMENTS(elem_type, array_type, fun_name_middle) \
  617. { \
  618. elem_type* elements = env->Get##fun_name_middle##ArrayElements ((array_type) keyValue.get(), 0); \
  619. int size = env->GetArrayLength ((array_type) keyValue.get()); \
  620. \
  621. for (int i = 0; i < size - 1; ++i) \
  622. result << String (elements[i]) << " "; \
  623. \
  624. if (size > 0) \
  625. result << String (elements[size - 1]); \
  626. \
  627. env->Release##fun_name_middle##ArrayElements ((array_type) keyValue.get(), elements, 0); \
  628. }
  629. if (keyValueString.startsWith ("[I"))
  630. PRINT_ELEMENTS (jint, jintArray, Int)
  631. else if (keyValueString.startsWith ("[F"))
  632. PRINT_ELEMENTS (float, jfloatArray, Float)
  633. else if (keyValueString.startsWith ("[Z"))
  634. PRINT_ELEMENTS (jboolean, jbooleanArray, Boolean)
  635. else if (keyValueString.startsWith ("[B"))
  636. PRINT_ELEMENTS (jbyte, jbyteArray, Byte);
  637. #undef PRINT_ELEMENTS
  638. result << "]";
  639. JUCE_CAMERA_LOG ("Key: " + keyName + ", value: " + result);
  640. }
  641. static void printRangeArrayElements (const LocalRef<jobject>& rangeArray, const String& keyName)
  642. {
  643. auto* env = getEnv();
  644. jobjectArray ranges = static_cast<jobjectArray> (rangeArray.get());
  645. int numRanges = env->GetArrayLength (ranges);
  646. String result;
  647. for (int i = 0; i < numRanges; ++i)
  648. {
  649. auto range = LocalRef<jobject> (env->GetObjectArrayElement (ranges, i));
  650. auto jRangeString = LocalRef<jstring> ((jstring) env->CallObjectMethod (range, AndroidRange.toString));
  651. result << juceString (jRangeString) << " ";
  652. }
  653. JUCE_CAMERA_LOG ("Key: " + keyName + ", value: " + result);
  654. }
  655. //==============================================================================
  656. class StreamConfigurationMap
  657. {
  658. public:
  659. StreamConfigurationMap (const GlobalRef& cameraCharacteristics)
  660. : scalerStreamConfigurationMap (getStreamConfigurationMap (cameraCharacteristics)),
  661. supportedPreviewOutputSizes (retrieveOutputSizes (scalerStreamConfigurationMap,
  662. getClassForName ("android.graphics.SurfaceTexture"),
  663. -1)),
  664. supportedStillImageOutputSizes (retrieveOutputSizes (scalerStreamConfigurationMap,
  665. LocalRef<jobject>(),
  666. jpegImageFormat)),
  667. supportedVideoRecordingOutputSizes (retrieveOutputSizes (scalerStreamConfigurationMap,
  668. getClassForName ("android.media.MediaRecorder"),
  669. -1)),
  670. defaultPreviewSize (getSmallestSize (supportedPreviewOutputSizes)),
  671. previewBufferSize (getLargestSize (supportedPreviewOutputSizes))
  672. {
  673. printSizesLog (supportedPreviewOutputSizes, "SurfaceTexture");
  674. printSizesLog (supportedStillImageOutputSizes, "JPEG");
  675. printSizesLog (supportedVideoRecordingOutputSizes, "MediaRecorder");
  676. }
  677. Array<Rectangle<int>> getSupportedPreviewOutputSizes() const noexcept { return supportedPreviewOutputSizes; }
  678. Array<Rectangle<int>> getSupportedStillImageOutputSizes() const noexcept { return supportedStillImageOutputSizes; }
  679. Array<Rectangle<int>> getSupportedVideoRecordingOutputSizes() const noexcept { return supportedVideoRecordingOutputSizes; }
  680. Rectangle<int> getDefaultPreviewSize() const noexcept { return defaultPreviewSize; }
  681. Rectangle<int> getPreviewBufferSize() const noexcept { return previewBufferSize; }
  682. bool isOutputSupportedForSurface (const LocalRef<jobject>& surface) const
  683. {
  684. return getEnv()->CallBooleanMethod (scalerStreamConfigurationMap, AndroidStreamConfigurationMap.isOutputSupportedForSurface, surface.get());
  685. }
  686. static constexpr int jpegImageFormat = 256;
  687. private:
  688. GlobalRef scalerStreamConfigurationMap;
  689. Array<Rectangle<int>> supportedPreviewOutputSizes;
  690. Array<Rectangle<int>> supportedStillImageOutputSizes;
  691. Array<Rectangle<int>> supportedVideoRecordingOutputSizes;
  692. Rectangle<int> defaultPreviewSize, previewBufferSize;
  693. GlobalRef getStreamConfigurationMap (const GlobalRef& cameraCharacteristics)
  694. {
  695. auto* env = getEnv();
  696. auto scalerStreamConfigurationMapKey = LocalRef<jobject> (env->GetStaticObjectField (CameraCharacteristics,
  697. CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP));
  698. return GlobalRef (LocalRef<jobject> (env->CallObjectMethod (cameraCharacteristics,
  699. CameraCharacteristics.get,
  700. scalerStreamConfigurationMapKey.get())));
  701. }
  702. static Array<Rectangle<int>> retrieveOutputSizes (GlobalRef& scalerStreamConfigurationMap,
  703. const LocalRef<jobject>& outputClass,
  704. int format)
  705. {
  706. Array<Rectangle<int>> result;
  707. auto* env = getEnv();
  708. auto outputSizes = outputClass.get() != nullptr
  709. ? LocalRef<jobjectArray> ((jobjectArray) env->CallObjectMethod (scalerStreamConfigurationMap,
  710. AndroidStreamConfigurationMap.getOutputSizesForClass,
  711. outputClass.get()))
  712. : LocalRef<jobjectArray> ((jobjectArray) env->CallObjectMethod (scalerStreamConfigurationMap,
  713. AndroidStreamConfigurationMap.getOutputSizesForFormat,
  714. (jint) format));
  715. if (format != -1)
  716. {
  717. auto supported = (env->CallBooleanMethod (scalerStreamConfigurationMap, AndroidStreamConfigurationMap.isOutputSupportedFor, (jint) format) != 0);
  718. if (! supported)
  719. {
  720. // The output format is not supported by this device, still image capture will not work!
  721. jassertfalse;
  722. return {};
  723. }
  724. }
  725. int numSizes = env->GetArrayLength (outputSizes);
  726. jassert (numSizes > 0);
  727. for (int i = 0; i < numSizes; ++i)
  728. {
  729. auto size = LocalRef<jobject> (env->GetObjectArrayElement (outputSizes, i));
  730. auto width = env->CallIntMethod (size, AndroidSize.getWidth);
  731. auto height = env->CallIntMethod (size, AndroidSize.getHeight);
  732. result.add (Rectangle<int> (0, 0, width, height));
  733. }
  734. return result;
  735. }
  736. static LocalRef<jobject> getClassForName (const String& name)
  737. {
  738. return LocalRef<jobject> (getEnv()->CallStaticObjectMethod (JavaClass, JavaClass.forName,
  739. javaString (name).get()));
  740. }
  741. static void printSizesLog (const Array<Rectangle<int>>& sizes, const String& className)
  742. {
  743. ignoreUnused (sizes, className);
  744. JUCE_CAMERA_LOG ("Sizes for class " + className);
  745. #if JUCE_CAMERA_LOG_ENABLED
  746. for (auto& s : sizes)
  747. JUCE_CAMERA_LOG (s.toString() + "\n");
  748. #endif
  749. }
  750. Rectangle<int> getSmallestSize (const Array<Rectangle<int>>& sizes) const
  751. {
  752. if (sizes.size() == 0)
  753. return {};
  754. auto smallestSize = sizes[0];
  755. for (auto& size : sizes)
  756. {
  757. if (size.getWidth() < smallestSize.getWidth() && size.getHeight() < smallestSize.getHeight())
  758. smallestSize = size;
  759. }
  760. return smallestSize;
  761. }
  762. Rectangle<int> getLargestSize (const Array<Rectangle<int>>& sizes) const
  763. {
  764. if (sizes.size() == 0)
  765. return {};
  766. auto largestSize = sizes[0];
  767. for (auto& size : sizes)
  768. {
  769. if (size.getWidth() > largestSize.getWidth() && size.getHeight() > largestSize.getHeight())
  770. largestSize = size;
  771. }
  772. return largestSize;
  773. }
  774. };
  775. //==============================================================================
  776. class PreviewDisplay : private TextureViewSurfaceTextureListener::Owner
  777. {
  778. public:
  779. struct Listener
  780. {
  781. virtual ~Listener() {}
  782. virtual void previewDisplayReady() = 0;
  783. virtual void previewDisplayAboutToBeDestroyed() = 0;
  784. };
  785. PreviewDisplay (Rectangle<int> bufferSize)
  786. : textureViewSurfaceTextureListener (*this),
  787. textureView (getEnv()->NewObject (AndroidTextureView, AndroidTextureView.constructor,
  788. android.activity.get())),
  789. bufferWidth (bufferSize.getWidth()),
  790. bufferHeight (bufferSize.getHeight())
  791. {
  792. auto* env = getEnv();
  793. if (! isReady())
  794. env->CallVoidMethod (textureView, AndroidTextureView.setSurfaceTextureListener,
  795. CreateJavaInterface (&textureViewSurfaceTextureListener,
  796. "android/view/TextureView$SurfaceTextureListener").get());
  797. }
  798. ~PreviewDisplay()
  799. {
  800. getEnv()->CallVoidMethod (textureView, AndroidTextureView.setSurfaceTextureListener, nullptr);
  801. }
  802. void addListener (Listener* l)
  803. {
  804. if (l == nullptr)
  805. {
  806. jassertfalse;
  807. return;
  808. }
  809. listeners.add (l);
  810. if (isReady())
  811. l->previewDisplayReady();
  812. }
  813. void removeListener (Listener* l)
  814. {
  815. if (l == nullptr)
  816. {
  817. jassertfalse;
  818. return;
  819. }
  820. listeners.remove (l);
  821. }
  822. bool isReady() const
  823. {
  824. return (getEnv()->CallBooleanMethod (textureView, AndroidTextureView.isAvailable) != 0)
  825. && width > 0 && height > 0;
  826. }
  827. LocalRef<jobject> createSurface()
  828. {
  829. // Surface may get destroyed while session is being configured, if
  830. // the preview gets hidden in the meantime, so bailout.
  831. if (! isReady())
  832. return LocalRef<jobject> (nullptr);
  833. auto* env = getEnv();
  834. auto surfaceTexture = LocalRef<jobject> (env->CallObjectMethod (textureView,
  835. AndroidTextureView.getSurfaceTexture));
  836. // NB: too small buffer will result in pixelated preview. A buffer with wrong aspect ratio
  837. // can result in a cropped preview.
  838. env->CallVoidMethod (surfaceTexture, AndroidSurfaceTexture.setDefaultBufferSize, (jint) bufferWidth, (jint) bufferHeight);
  839. auto surface = LocalRef<jobject> (env->NewObject (AndroidSurface, AndroidSurface.constructor, surfaceTexture.get()));
  840. return surface;
  841. }
  842. const GlobalRef& getNativeView() { return textureView; }
  843. void updateSurfaceTransform()
  844. {
  845. auto* env = getEnv();
  846. auto windowManager = LocalRef<jobject> (env->CallObjectMethod (android.activity, JuceAppActivity.getWindowManager));
  847. auto display = LocalRef<jobject> (env->CallObjectMethod (windowManager, AndroidWindowManager.getDefaultDisplay));
  848. auto rotation = env->CallIntMethod (display, AndroidDisplay.getRotation);
  849. static constexpr int rotation90 = 1;
  850. static constexpr int rotation270 = 3;
  851. auto matrix = LocalRef<jobject> (env->NewObject (AndroidMatrix, AndroidMatrix.constructor));
  852. if (rotation == rotation90 || rotation == rotation270)
  853. {
  854. env->CallBooleanMethod (matrix, AndroidMatrix.postScale, jfloat (height / (float) width), jfloat (width / (float) height), (jfloat) 0, (jfloat) 0);
  855. env->CallBooleanMethod (matrix, AndroidMatrix.postRotate, (jfloat) 90 * (rotation - 2), (jfloat) 0, (jfloat) 0);
  856. env->CallBooleanMethod (matrix, AndroidMatrix.postTranslate, (jfloat) (rotation == 3 ? width : 0), (jfloat) (rotation == 1 ? height : 0));
  857. }
  858. env->CallVoidMethod (textureView, AndroidTextureView.setTransform, matrix.get());
  859. }
  860. private:
  861. ListenerList<Listener> listeners;
  862. TextureViewSurfaceTextureListener textureViewSurfaceTextureListener;
  863. GlobalRef textureView;
  864. int width = -1, height = -1;
  865. int bufferWidth, bufferHeight;
  866. void onSurfaceTextureAvailable (LocalRef<jobject>& /*surface*/, int widthToUse, int heightToUse) override
  867. {
  868. JUCE_CAMERA_LOG ("onSurfaceTextureAvailable()");
  869. width = widthToUse;
  870. height = heightToUse;
  871. updateSurfaceTransform();
  872. listeners.call (&Listener::previewDisplayReady);
  873. }
  874. bool onSurfaceTextureDestroyed (LocalRef<jobject>& /*surface*/) override
  875. {
  876. JUCE_CAMERA_LOG ("onSurfaceTextureDestroyed()");
  877. listeners.call (&Listener::previewDisplayAboutToBeDestroyed);
  878. return true;
  879. }
  880. void onSurfaceTextureSizeChanged (LocalRef<jobject>& /*surface*/, int widthToUse, int heightToUse) override
  881. {
  882. JUCE_CAMERA_LOG ("onSurfaceTextureSizeChanged()");
  883. width = widthToUse;
  884. height = heightToUse;
  885. updateSurfaceTransform();
  886. }
  887. void onSurfaceTextureUpdated (LocalRef<jobject>& /*surface*/) override
  888. {
  889. JUCE_CAMERA_LOG ("onSurfaceTextureUpdated()");
  890. }
  891. JUCE_DECLARE_NON_COPYABLE (PreviewDisplay)
  892. };
  893. //==============================================================================
  894. class ImageReader : private ImageReaderOnImageAvailableListener::Owner
  895. {
  896. public:
  897. ImageReader (Pimpl& ownerToUse, GlobalRef& handlerToUse,
  898. int imageWidth, int imageHeight, int cameraSensorOrientationToUse)
  899. : owner (ownerToUse),
  900. cameraSensorOrientation (cameraSensorOrientationToUse),
  901. imageReader (getEnv()->CallStaticObjectMethod (AndroidImageReader, AndroidImageReader.newInstance,
  902. imageWidth, imageHeight, StreamConfigurationMap::jpegImageFormat,
  903. numImagesToKeep)),
  904. onImageAvailableListener (*this)
  905. {
  906. getEnv()->CallVoidMethod (imageReader, AndroidImageReader.setOnImageAvailableListener,
  907. CreateJavaInterface (&onImageAvailableListener,
  908. "android/media/ImageReader$OnImageAvailableListener").get(),
  909. handlerToUse.get());
  910. }
  911. ~ImageReader()
  912. {
  913. getEnv()->CallVoidMethod (imageReader, AndroidImageReader.close);
  914. }
  915. LocalRef<jobject> getSurface() const
  916. {
  917. return LocalRef<jobject> (getEnv()->CallObjectMethod (imageReader, AndroidImageReader.getSurface));
  918. }
  919. void resetNotificationFlag()
  920. {
  921. hasNotifiedListeners.set (0);
  922. }
  923. private:
  924. Pimpl& owner;
  925. int cameraSensorOrientation;
  926. GlobalRef imageReader;
  927. ImageReaderOnImageAvailableListener onImageAvailableListener;
  928. static constexpr int numImagesToKeep = 2;
  929. Atomic<int> hasNotifiedListeners { 0 };
  930. JUCE_DECLARE_WEAK_REFERENCEABLE (ImageReader)
  931. void onImageAvailable (LocalRef<jobject>& /*imageReader*/) override
  932. {
  933. JUCE_CAMERA_LOG ("onImageAvailable()");
  934. auto* env = getEnv();
  935. auto jImage = LocalRef<jobject> (env->CallObjectMethod (imageReader, AndroidImageReader.acquireLatestImage));
  936. if (jImage.get() == nullptr)
  937. return;
  938. auto cameraLensFrontFacing = owner.getCameraLensFacing() == 0;
  939. // NB: could use sensor orientation here to get real-world orientation, but then the resulting
  940. // image could not match the UI orientation.
  941. auto image = androidImageToJuceWithFixedOrientation (jImage, owner.deviceOrientationChangeListener.getDeviceOrientation(),
  942. Desktop::getInstance().getCurrentOrientation(),
  943. cameraLensFrontFacing,
  944. cameraSensorOrientation);
  945. env->CallVoidMethod (jImage, AndroidImage.close);
  946. WeakReference<ImageReader> safeThis (this);
  947. owner.callListeners (image);
  948. // Android may take multiple pictures before it handles a request to stop.
  949. if (hasNotifiedListeners.compareAndSetBool (1, 0))
  950. MessageManager::callAsync ([safeThis, image]() mutable { if (safeThis != nullptr) safeThis->owner.notifyPictureTaken (image); });
  951. }
  952. struct ImageBuffer
  953. {
  954. LocalRef<jbyteArray> byteArray;
  955. int size;
  956. };
  957. static Image androidImageToJuceWithFixedOrientation (const LocalRef<jobject>& androidImage,
  958. Desktop::DisplayOrientation deviceOrientationFromAccelerometerSensor,
  959. Desktop::DisplayOrientation targetOrientation,
  960. bool cameraLensFrontFacing,
  961. int cameraSensorOrientation)
  962. {
  963. auto* env = getEnv();
  964. auto planes = LocalRef<jobjectArray> ((jobjectArray) env->CallObjectMethod (androidImage, AndroidImage.getPlanes));
  965. jassert (env->GetArrayLength (planes) > 0);
  966. auto plane = LocalRef<jobject> (env->GetObjectArrayElement (planes, 0));
  967. auto byteBuffer = LocalRef<jobject> (env->CallObjectMethod (plane, AndroidImagePlane.getBuffer));
  968. ImageBuffer correctedBuffer = getImageBufferWithCorrectedOrientationFrom (byteBuffer, deviceOrientationFromAccelerometerSensor,
  969. targetOrientation, cameraLensFrontFacing, cameraSensorOrientation);
  970. jbyte* rawBytes = env->GetByteArrayElements (correctedBuffer.byteArray, nullptr);
  971. Image result = ImageFileFormat::loadFrom (rawBytes, (size_t) correctedBuffer.size);
  972. env->ReleaseByteArrayElements (correctedBuffer.byteArray, rawBytes, 0);
  973. return result;
  974. }
  975. static ImageBuffer getImageBufferWithCorrectedOrientationFrom (const LocalRef<jobject>& imagePlaneBuffer,
  976. Desktop::DisplayOrientation deviceOrientationFromAccelerometerSensor,
  977. Desktop::DisplayOrientation targetOrientation,
  978. bool cameraLensFrontFacing,
  979. int cameraSensorOrientation)
  980. {
  981. auto* env = getEnv();
  982. auto bufferSize = env->CallIntMethod (imagePlaneBuffer, JavaByteBuffer.remaining);
  983. auto byteArray = LocalRef<jbyteArray> (env->NewByteArray (bufferSize));
  984. env->CallObjectMethod (imagePlaneBuffer, JavaByteBuffer.get, byteArray.get());
  985. auto orientationsEnabled = Desktop::getInstance().getOrientationsEnabled() & ~Desktop::upsideDown;
  986. auto rotationAngle = getRotationAngle (deviceOrientationFromAccelerometerSensor, targetOrientation,
  987. cameraLensFrontFacing, cameraSensorOrientation);
  988. if (rotationAngle == 0)
  989. {
  990. // Nothing to do, just get the bytes
  991. return { byteArray, bufferSize };
  992. }
  993. auto origBitmap = LocalRef<jobject> (env->CallStaticObjectMethod (AndroidBitmapFactory,
  994. AndroidBitmapFactory.decodeByteArray,
  995. byteArray.get(), (jint) 0, (jint) bufferSize));
  996. auto correctedBitmap = getBitmapWithCorrectOrientationFrom (origBitmap, rotationAngle);
  997. auto byteArrayOutputStream = LocalRef<jobject> (env->NewObject (ByteArrayOutputStream,
  998. ByteArrayOutputStream.constructor));
  999. auto jCompressFormatString = javaString ("JPEG");
  1000. auto compressFormat = LocalRef<jobject> (env->CallStaticObjectMethod (AndroidBitmapCompressFormat,
  1001. AndroidBitmapCompressFormat.valueOf,
  1002. jCompressFormatString.get()));
  1003. if (env->CallBooleanMethod (correctedBitmap, AndroidBitmap.compress, compressFormat.get(),
  1004. (jint) 100, byteArrayOutputStream.get()) != 0)
  1005. {
  1006. auto correctedByteArray = LocalRef<jbyteArray> ((jbyteArray) env->CallObjectMethod (byteArrayOutputStream,
  1007. ByteArrayOutputStream.toByteArray));
  1008. int correctedByteArraySize = env->CallIntMethod (byteArrayOutputStream, ByteArrayOutputStream.size);
  1009. return { correctedByteArray, correctedByteArraySize };
  1010. }
  1011. jassertfalse;
  1012. // fallback, return original bitmap
  1013. return { byteArray, bufferSize };
  1014. }
  1015. static int getRotationAngle (Desktop::DisplayOrientation deviceOrientationFromAccelerometerSensor,
  1016. Desktop::DisplayOrientation targetOrientation,
  1017. bool cameraLensFrontFacing,
  1018. int cameraSensorOrientation)
  1019. {
  1020. auto orientationsEnabled = Desktop::getInstance().getOrientationsEnabled() & ~Desktop::upsideDown;
  1021. auto isSensorOrientationHorizontal = deviceOrientationFromAccelerometerSensor == Desktop::rotatedAntiClockwise
  1022. || deviceOrientationFromAccelerometerSensor == Desktop::rotatedClockwise;
  1023. if (cameraLensFrontFacing && isSensorOrientationHorizontal)
  1024. {
  1025. // flip angles for front camera
  1026. return getRotationAngle (deviceOrientationFromAccelerometerSensor, targetOrientation, false, (cameraSensorOrientation + 180) % 360);
  1027. }
  1028. switch (targetOrientation)
  1029. {
  1030. case Desktop::rotatedAntiClockwise:
  1031. return cameraSensorOrientation == 90 ? 0 : 180;
  1032. case Desktop::rotatedClockwise:
  1033. return cameraSensorOrientation == 90 ? 180 : 0;
  1034. case Desktop::upright:
  1035. case Desktop::upsideDown:
  1036. if ((targetOrientation == Desktop::upright && ! cameraLensFrontFacing)
  1037. || (targetOrientation == Desktop::upsideDown && cameraLensFrontFacing))
  1038. {
  1039. return cameraSensorOrientation;
  1040. }
  1041. else
  1042. {
  1043. if (deviceOrientationFromAccelerometerSensor == Desktop::upright || deviceOrientationFromAccelerometerSensor == Desktop::upsideDown)
  1044. return cameraSensorOrientation;
  1045. else
  1046. return (cameraSensorOrientation + 180) % 360;
  1047. }
  1048. break;
  1049. default:
  1050. return 0;
  1051. }
  1052. }
  1053. static LocalRef<jobject> getBitmapWithCorrectOrientationFrom (LocalRef<jobject>& origBitmap, int rotationAngle)
  1054. {
  1055. auto* env = getEnv();
  1056. auto origBitmapWidth = env->CallIntMethod (origBitmap, AndroidBitmap.getWidth);
  1057. auto origBitmapHeight = env->CallIntMethod (origBitmap, AndroidBitmap.getHeight);
  1058. auto orientationsEnabled = Desktop::getInstance().getOrientationsEnabled() & ~Desktop::upsideDown;
  1059. auto matrix = LocalRef<jobject> (env->NewObject (AndroidMatrix, AndroidMatrix.constructor));
  1060. env->CallBooleanMethod (matrix, AndroidMatrix.postRotate, (jfloat) rotationAngle, (jfloat) 0, (jfloat) 0);
  1061. auto rotatedBitmap = LocalRef<jobject> (env->CallStaticObjectMethod (AndroidBitmap, AndroidBitmap.createBitmapFrom,
  1062. origBitmap.get(), (jint) 0, (jint) 0,
  1063. (jint) origBitmapWidth, (jint) origBitmapHeight,
  1064. matrix.get(), true));
  1065. env->CallVoidMethod (origBitmap, AndroidBitmap.recycle);
  1066. return rotatedBitmap;
  1067. }
  1068. };
  1069. //==============================================================================
  1070. class MediaRecorder : private MediaRecorderOnInfoListener::Owner,
  1071. private MediaRecorderOnErrorListener::Owner
  1072. {
  1073. public:
  1074. MediaRecorder (const String& outputFilePath, int videoWidth, int videoHeight,
  1075. int sensorOrientation, int cameraLensFacing)
  1076. : onInfoListener (*this),
  1077. onErrorListener (*this),
  1078. mediaRecorder (LocalRef<jobject> (getEnv()->NewObject (AndroidMediaRecorder,
  1079. AndroidMediaRecorder.constructor)))
  1080. {
  1081. auto* env = getEnv();
  1082. env->CallVoidMethod (mediaRecorder, AndroidMediaRecorder.setOnInfoListener,
  1083. CreateJavaInterface (&onInfoListener,
  1084. "android/media/MediaRecorder$OnInfoListener").get());
  1085. env->CallVoidMethod (mediaRecorder, AndroidMediaRecorder.setOnErrorListener,
  1086. CreateJavaInterface (&onErrorListener,
  1087. "android/media/MediaRecorder$OnErrorListener").get());
  1088. // NB: the order of function calls here is enforced, and exceptions will be thrown if
  1089. // the order is changed.
  1090. static constexpr int audioSourceMic = 1;
  1091. env->CallVoidMethod (mediaRecorder, AndroidMediaRecorder.setAudioSource, (jint) audioSourceMic);
  1092. static constexpr int videoSourceSurface = 2;
  1093. env->CallVoidMethod (mediaRecorder, AndroidMediaRecorder.setVideoSource, (jint) videoSourceSurface);
  1094. static constexpr int outputFormatMPEG4 = 2;
  1095. env->CallVoidMethod (mediaRecorder, AndroidMediaRecorder.setOutputFormat, (jint) outputFormatMPEG4);
  1096. static constexpr int audioEncoderAAC = 3;
  1097. env->CallVoidMethod (mediaRecorder, AndroidMediaRecorder.setAudioEncoder, (jint) audioEncoderAAC);
  1098. static constexpr int videoEncoderH264 = 2;
  1099. env->CallVoidMethod (mediaRecorder, AndroidMediaRecorder.setVideoEncoder, (jint) videoEncoderH264);
  1100. env->CallVoidMethod (mediaRecorder, AndroidMediaRecorder.setVideoEncodingBitRate, (jint) 10000000);
  1101. env->CallVoidMethod (mediaRecorder, AndroidMediaRecorder.setVideoFrameRate, (jint) 30);
  1102. auto frontFacing = cameraLensFacing == 0;
  1103. auto useInverseDegrees = frontFacing && sensorOrientation == 90;
  1104. int orientationHint = getOrientationHint (useInverseDegrees, sensorOrientation);
  1105. env->CallVoidMethod (mediaRecorder, AndroidMediaRecorder.setOrientationHint, (jint) orientationHint);
  1106. getEnv()->CallVoidMethod (mediaRecorder, AndroidMediaRecorder.setVideoSize, (jint) videoWidth, (jint) videoHeight);
  1107. getEnv()->CallVoidMethod (mediaRecorder, AndroidMediaRecorder.setOutputFile, javaString (outputFilePath).get());
  1108. getEnv()->CallVoidMethod (mediaRecorder, AndroidMediaRecorder.prepare);
  1109. }
  1110. ~MediaRecorder()
  1111. {
  1112. getEnv()->CallVoidMethod (mediaRecorder, AndroidMediaRecorder.release);
  1113. }
  1114. LocalRef<jobject> getSurface()
  1115. {
  1116. return LocalRef<jobject> (getEnv()->CallObjectMethod (mediaRecorder, AndroidMediaRecorder.getSurface));
  1117. }
  1118. void start()
  1119. {
  1120. lockScreenOrientation();
  1121. getEnv()->CallVoidMethod (mediaRecorder, AndroidMediaRecorder.start);
  1122. hasStartedRecording = true;
  1123. }
  1124. void stop()
  1125. {
  1126. // A request to stop can be sent before recording has had a chance to start, so
  1127. // ignore the request rather than calling AndroidMediaRecorder.stop because
  1128. // otherwise MediaRecorder will throw an exception and...
  1129. if (! hasStartedRecording)
  1130. return;
  1131. hasStartedRecording = false;
  1132. auto* env = getEnv();
  1133. env->CallVoidMethod (mediaRecorder, AndroidMediaRecorder.stop);
  1134. // ... ignore RuntimeException that can be thrown if stop() was called after recording
  1135. // has started but before any frame was written to a file. This is not an error.
  1136. auto exception = LocalRef<jobject> (env->ExceptionOccurred());
  1137. if (exception != 0)
  1138. env->ExceptionClear();
  1139. unlockScreenOrientation();
  1140. }
  1141. private:
  1142. MediaRecorderOnInfoListener onInfoListener;
  1143. MediaRecorderOnErrorListener onErrorListener;
  1144. GlobalRef mediaRecorder;
  1145. bool hasStartedRecording = false;
  1146. int orientationsEnabled = -1;
  1147. void lockScreenOrientation()
  1148. {
  1149. orientationsEnabled = Desktop::getInstance().getOrientationsEnabled();
  1150. auto o = Desktop::getInstance().getCurrentOrientation();
  1151. Desktop::getInstance().setOrientationsEnabled (o);
  1152. }
  1153. static jint juceOrientationToNativeOrientation (int orientations) noexcept
  1154. {
  1155. enum
  1156. {
  1157. SCREEN_ORIENTATION_LANDSCAPE = 0,
  1158. SCREEN_ORIENTATION_PORTRAIT = 1,
  1159. SCREEN_ORIENTATION_USER = 2,
  1160. SCREEN_ORIENTATION_REVERSE_LANDSCAPE = 8,
  1161. SCREEN_ORIENTATION_REVERSE_PORTRAIT = 9,
  1162. SCREEN_ORIENTATION_USER_LANDSCAPE = 11,
  1163. SCREEN_ORIENTATION_USER_PORTRAIT = 12,
  1164. };
  1165. switch (orientations)
  1166. {
  1167. case Desktop::upright: return (jint) SCREEN_ORIENTATION_PORTRAIT;
  1168. case Desktop::upsideDown: return (jint) SCREEN_ORIENTATION_REVERSE_PORTRAIT;
  1169. case Desktop::upright + Desktop::upsideDown: return (jint) SCREEN_ORIENTATION_USER_PORTRAIT;
  1170. case Desktop::rotatedAntiClockwise: return (jint) SCREEN_ORIENTATION_LANDSCAPE;
  1171. case Desktop::rotatedClockwise: return (jint) SCREEN_ORIENTATION_REVERSE_LANDSCAPE;
  1172. case Desktop::rotatedClockwise + Desktop::rotatedAntiClockwise: return (jint) SCREEN_ORIENTATION_USER_LANDSCAPE;
  1173. default: return (jint) SCREEN_ORIENTATION_USER;
  1174. }
  1175. }
  1176. void unlockScreenOrientation()
  1177. {
  1178. Desktop::getInstance().setOrientationsEnabled (orientationsEnabled);
  1179. }
  1180. void onInfo (LocalRef<jobject>& recorder, int what, int extra) override
  1181. {
  1182. ignoreUnused (recorder, what, extra);
  1183. JUCE_CAMERA_LOG ("MediaRecorder::OnInfo: " + getInfoStringFromCode (what)
  1184. + ", extra code = " + String (extra));
  1185. }
  1186. void onError (LocalRef<jobject>& recorder, int what, int extra) override
  1187. {
  1188. ignoreUnused (recorder, what, extra);
  1189. JUCE_CAMERA_LOG ("MediaRecorder::onError: " + getErrorStringFromCode (what)
  1190. + ", extra code = " + String (extra));
  1191. }
  1192. static String getInfoStringFromCode (int what)
  1193. {
  1194. enum
  1195. {
  1196. MEDIA_RECORDER_INFO_UNKNOWN = 1,
  1197. MEDIA_RECORDER_INFO_MAX_DURATION_REACHED = 800,
  1198. MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED = 801,
  1199. MEDIA_RECORDER_INFO_MAX_FILESIZE_APPROACHING = 802,
  1200. MEDIA_RECORDER_INFO_NEXT_OUTPUT_FILE_STARTED = 803
  1201. };
  1202. switch (what)
  1203. {
  1204. case MEDIA_RECORDER_INFO_UNKNOWN: return { "Unknown info" };
  1205. case MEDIA_RECORDER_INFO_MAX_DURATION_REACHED: return { "Max duration reached" };
  1206. case MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED: return { "Max filesize reached" };
  1207. case MEDIA_RECORDER_INFO_MAX_FILESIZE_APPROACHING: return { "Max filesize approaching" };
  1208. case MEDIA_RECORDER_INFO_NEXT_OUTPUT_FILE_STARTED: return { "Next output file started" };
  1209. default: return String (what);
  1210. };
  1211. }
  1212. static String getErrorStringFromCode (int what)
  1213. {
  1214. enum
  1215. {
  1216. MEDIA_RECORDER_ERROR_UNKNOWN = 1,
  1217. MEDIA_ERROR_SERVER_DIED = 100
  1218. };
  1219. switch (what)
  1220. {
  1221. case MEDIA_RECORDER_ERROR_UNKNOWN: return { "Unknown error" };
  1222. case MEDIA_ERROR_SERVER_DIED: return { "Server died" };
  1223. default: return String (what);
  1224. };
  1225. }
  1226. static int getOrientationHint (bool useInverseDegrees, int cameraSensorOrientation)
  1227. {
  1228. auto* env = getEnv();
  1229. auto windowManager = LocalRef<jobject> (env->CallObjectMethod (android.activity, JuceAppActivity.getWindowManager));
  1230. auto display = LocalRef<jobject> (env->CallObjectMethod (windowManager, AndroidWindowManager.getDefaultDisplay));
  1231. auto rotation = env->CallIntMethod (display, AndroidDisplay.getRotation);
  1232. enum
  1233. {
  1234. ROTATION_0 = 0,
  1235. ROTATION_90,
  1236. ROTATION_180,
  1237. ROTATION_270
  1238. };
  1239. int hint = 0;
  1240. switch (rotation)
  1241. {
  1242. case ROTATION_0: hint = cameraSensorOrientation; break;
  1243. case ROTATION_90: hint = useInverseDegrees ? 180 : 0; break;
  1244. case ROTATION_180: hint = cameraSensorOrientation + 180; break;
  1245. case ROTATION_270: hint = useInverseDegrees ? 0 : 180; break;
  1246. default: jassertfalse;
  1247. }
  1248. return (hint + 360) % 360;
  1249. }
  1250. };
  1251. //==============================================================================
  1252. class ScopedCameraDevice
  1253. {
  1254. public:
  1255. //==============================================================================
  1256. class CaptureSession
  1257. {
  1258. public:
  1259. struct ConfiguredCallback
  1260. {
  1261. virtual ~ConfiguredCallback() {}
  1262. virtual void captureSessionConfigured (CaptureSession*) = 0;
  1263. };
  1264. ~CaptureSession()
  1265. {
  1266. bool calledClose = false;
  1267. auto* env = getEnv();
  1268. {
  1269. const ScopedLock lock (captureSessionLock);
  1270. if (captureSession.get() != nullptr)
  1271. {
  1272. calledClose = true;
  1273. env->CallVoidMethod (captureSession, CameraCaptureSession.close);
  1274. }
  1275. }
  1276. auto exception = LocalRef<jobject> (env->ExceptionOccurred());
  1277. // When exception occurs, CameraCaptureSession.close will never finish, so
  1278. // we should not wait for it. For fatal error an exception does occur, but
  1279. // it is catched internally in Java...
  1280. if (exception != 0 || scopedCameraDevice.fatalErrorOccurred.get())
  1281. {
  1282. JUCE_CAMERA_LOG ("Exception or fatal error occurred while closing Capture Session, closing by force");
  1283. env->ExceptionClear();
  1284. }
  1285. else if (calledClose)
  1286. {
  1287. pendingClose.set (1);
  1288. closedEvent.wait (-1);
  1289. }
  1290. }
  1291. bool openedOk() const noexcept { return captureSession != nullptr; }
  1292. const GlobalRef& getNativeSession() const { return captureSession; }
  1293. bool start (const LocalRef<jobject>& targetSurfacesList, GlobalRef& handlerToUse)
  1294. {
  1295. if (! openedOk())
  1296. {
  1297. jassertfalse;
  1298. return false;
  1299. }
  1300. auto* env = getEnv();
  1301. auto numSurfaces = env->CallIntMethod (targetSurfacesList, JavaArrayList.size);
  1302. for (int i = 0; i < numSurfaces; ++i)
  1303. {
  1304. auto surface = LocalRef<jobject> (env->CallObjectMethod (targetSurfacesList, JavaArrayList.get, (jint) i));
  1305. env->CallVoidMethod (captureRequestBuilder, CaptureRequestBuilder.addTarget, surface.get());
  1306. }
  1307. previewCaptureRequest = GlobalRef (env->CallObjectMethod (captureRequestBuilder, CaptureRequestBuilder.build));
  1308. env->CallIntMethod (captureSession, CameraCaptureSession.setRepeatingRequest,
  1309. previewCaptureRequest.get(), nullptr, handlerToUse.get());
  1310. return true;
  1311. }
  1312. void takeStillPicture (jobject targetSurface)
  1313. {
  1314. if (stillPictureTaker == nullptr)
  1315. {
  1316. // Can only take picture once session was successfully configured!
  1317. jassertfalse;
  1318. return;
  1319. }
  1320. auto* env = getEnv();
  1321. static constexpr int templateStillCapture = 2;
  1322. auto builder = LocalRef<jobject> (env->CallObjectMethod (scopedCameraDevice.cameraDevice,
  1323. AndroidCameraDevice.createCaptureRequest,
  1324. (jint) templateStillCapture));
  1325. env->CallVoidMethod (builder, CaptureRequestBuilder.addTarget, targetSurface);
  1326. setCaptureRequestBuilderIntegerKey (builder.get(), CaptureRequest.CONTROL_AF_MODE, autoFocusMode);
  1327. auto stillPictureCaptureRequest = LocalRef<jobject> (env->CallObjectMethod (builder, CaptureRequestBuilder.build));
  1328. stillPictureTaker->takePicture (stillPictureCaptureRequest.get());
  1329. }
  1330. private:
  1331. //==============================================================================
  1332. class StillPictureTaker : private AndroidRunnable::Owner
  1333. {
  1334. public:
  1335. StillPictureTaker (GlobalRef& captureSessionToUse, GlobalRef& captureRequestBuilderToUse,
  1336. GlobalRef& previewCaptureRequestToUse, GlobalRef& handlerToUse,
  1337. int autoFocusModeToUse)
  1338. : captureSession (captureSessionToUse),
  1339. captureRequestBuilder (captureRequestBuilderToUse),
  1340. previewCaptureRequest (previewCaptureRequestToUse),
  1341. handler (handlerToUse),
  1342. runnable (*this),
  1343. captureSessionPreviewCaptureCallback (LocalRef<jobject> (getEnv()->NewObject (CameraCaptureSessionCaptureCallback,
  1344. CameraCaptureSessionCaptureCallback.constructor,
  1345. android.activity.get(),
  1346. reinterpret_cast<jlong> (this),
  1347. true))),
  1348. captureSessionStillPictureCaptureCallback (LocalRef<jobject> (getEnv()->NewObject (CameraCaptureSessionCaptureCallback,
  1349. CameraCaptureSessionCaptureCallback.constructor,
  1350. android.activity.get(),
  1351. reinterpret_cast<jlong> (this),
  1352. false))),
  1353. autoFocusMode (autoFocusModeToUse)
  1354. {
  1355. }
  1356. void takePicture (jobject stillPictureCaptureRequestToUse)
  1357. {
  1358. JUCE_CAMERA_LOG ("Taking picture...");
  1359. stillPictureCaptureRequest = GlobalRef (stillPictureCaptureRequestToUse);
  1360. lockFocus();
  1361. }
  1362. private:
  1363. GlobalRef& captureSession;
  1364. GlobalRef& captureRequestBuilder;
  1365. GlobalRef& previewCaptureRequest;
  1366. GlobalRef& handler;
  1367. AndroidRunnable runnable;
  1368. GlobalRef delayedCaptureRunnable;
  1369. GlobalRef captureSessionPreviewCaptureCallback;
  1370. GlobalRef stillPictureCaptureRequest;
  1371. GlobalRef captureSessionStillPictureCaptureCallback;
  1372. int autoFocusMode;
  1373. enum class State
  1374. {
  1375. idle = 0,
  1376. pendingFocusLock,
  1377. pendingExposurePrecapture,
  1378. pendingExposurePostPrecapture,
  1379. pictureTaken
  1380. };
  1381. State currentState = State::idle;
  1382. void lockFocus()
  1383. {
  1384. if (Pimpl::checkHasExceptionOccurred())
  1385. return;
  1386. JUCE_CAMERA_LOG ("Performing auto-focus if possible...");
  1387. currentState = State::pendingFocusLock;
  1388. auto* env = getEnv();
  1389. // NB: auto-focus may be unavailable on a device, in which case it may have already
  1390. // automatically adjusted the exposure. We check for that in updateState().
  1391. static constexpr int controlAfTriggerStart = 1;
  1392. CaptureSession::setCaptureRequestBuilderIntegerKey (captureRequestBuilder.get(),
  1393. CaptureRequest.CONTROL_AF_TRIGGER,
  1394. controlAfTriggerStart);
  1395. auto previewRequest = LocalRef<jobject> (env->CallObjectMethod (captureRequestBuilder,
  1396. CaptureRequestBuilder.build));
  1397. env->CallIntMethod (captureSession, CameraCaptureSession.capture, previewRequest.get(),
  1398. captureSessionPreviewCaptureCallback.get(), handler.get());
  1399. }
  1400. void updateState (jobject captureResult)
  1401. {
  1402. // IllegalStateException can be thrown when accessing CaptureSession,
  1403. // claiming that capture session was already closed but we may not
  1404. // get relevant callback yet, so check for this and bailout when needed.
  1405. if (Pimpl::checkHasExceptionOccurred())
  1406. return;
  1407. auto* env = getEnv();
  1408. switch (currentState)
  1409. {
  1410. case State::pendingFocusLock:
  1411. {
  1412. JUCE_CAMERA_LOG ("Still picture capture, updateState(), State::pendingFocusLock...");
  1413. auto controlAfStateValue = getCaptureResultIntegerKeyValue (CaptureResult.CONTROL_AF_STATE, captureResult);
  1414. if (controlAfStateValue.get() == nullptr)
  1415. {
  1416. captureStillPictureDelayed();
  1417. return;
  1418. }
  1419. auto autoToFocusNotAvailable = autoFocusMode == 0;
  1420. if (autoToFocusNotAvailable || autoFocusHasFinished (controlAfStateValue))
  1421. {
  1422. auto controlAeStateIntValue = getControlAEState (captureResult);
  1423. static constexpr int controlAeStateConverged = 2;
  1424. if (controlAeStateIntValue == -1 || controlAeStateIntValue == controlAeStateConverged)
  1425. {
  1426. currentState = State::pictureTaken;
  1427. captureStillPictureDelayed();
  1428. }
  1429. else
  1430. {
  1431. runPrecaptureSequence();
  1432. }
  1433. }
  1434. break;
  1435. }
  1436. case State::pendingExposurePrecapture:
  1437. {
  1438. JUCE_CAMERA_LOG ("Still picture capture, updateState(), State::pendingExposurePrecapture...");
  1439. auto controlAeStateIntValue = getControlAEState (captureResult);
  1440. static constexpr int controlAeStateFlashRequired = 4;
  1441. static constexpr int controlAeStatePrecapture = 5;
  1442. if (controlAeStateIntValue == -1 || controlAeStateIntValue == controlAeStateFlashRequired
  1443. || controlAeStateIntValue == controlAeStatePrecapture)
  1444. {
  1445. currentState = State::pendingExposurePostPrecapture;
  1446. }
  1447. break;
  1448. }
  1449. case State::pendingExposurePostPrecapture:
  1450. {
  1451. JUCE_CAMERA_LOG ("Still picture capture, updateState(), State::pendingExposurePostPrecapture...");
  1452. auto controlAeStateIntValue = getControlAEState (captureResult);
  1453. static constexpr int controlAeStatePrecapture = 5;
  1454. if (controlAeStateIntValue == -1 || controlAeStateIntValue != controlAeStatePrecapture)
  1455. {
  1456. currentState = State::pictureTaken;
  1457. captureStillPictureDelayed();
  1458. }
  1459. break;
  1460. }
  1461. case State::idle:
  1462. case State::pictureTaken:
  1463. { /* do nothing */ break; }
  1464. };
  1465. }
  1466. static int getControlAEState (jobject captureResult)
  1467. {
  1468. auto controlAeStateValue = getCaptureResultIntegerKeyValue (CaptureResult.CONTROL_AE_STATE, captureResult);
  1469. return controlAeStateValue.get() != nullptr
  1470. ? getEnv()->CallIntMethod (controlAeStateValue, JavaInteger.intValue) : -1;
  1471. }
  1472. static bool autoFocusHasFinished (const LocalRef<jobject>& controlAfStateValue)
  1473. {
  1474. static constexpr int controlAfStateFocusedLocked = 4;
  1475. static constexpr int controlAfStateNotFocusedLocked = 5;
  1476. auto controlAfStateIntValue = getEnv()->CallIntMethod (controlAfStateValue, JavaInteger.intValue);
  1477. return controlAfStateIntValue == controlAfStateFocusedLocked || controlAfStateIntValue == controlAfStateNotFocusedLocked;
  1478. }
  1479. static LocalRef<jobject> getCaptureResultIntegerKeyValue (jfieldID key, jobject captureResult)
  1480. {
  1481. auto* env = getEnv();
  1482. auto jKey = LocalRef<jobject> (env->GetStaticObjectField (CaptureResult, key));
  1483. return LocalRef<jobject> (env->CallObjectMethod (captureResult, CaptureResult.get, jKey.get()));
  1484. }
  1485. void captureStillPictureDelayed()
  1486. {
  1487. if (Pimpl::checkHasExceptionOccurred())
  1488. return;
  1489. JUCE_CAMERA_LOG ("Still picture capture, device ready, capturing now...");
  1490. auto* env = getEnv();
  1491. env->CallVoidMethod (captureSession, CameraCaptureSession.stopRepeating);
  1492. if (Pimpl::checkHasExceptionOccurred())
  1493. return;
  1494. env->CallVoidMethod (captureSession, CameraCaptureSession.abortCaptures);
  1495. if (Pimpl::checkHasExceptionOccurred())
  1496. return;
  1497. // Delay still picture capture for devices that can't handle it right after
  1498. // stopRepeating/abortCaptures calls.
  1499. if (delayedCaptureRunnable.get() == nullptr)
  1500. delayedCaptureRunnable = GlobalRef (CreateJavaInterface (&runnable, "java/lang/Runnable").get());
  1501. env->CallBooleanMethod (handler, AndroidHandler.postDelayed, delayedCaptureRunnable.get(), (jlong) 200);
  1502. }
  1503. void runPrecaptureSequence()
  1504. {
  1505. if (Pimpl::checkHasExceptionOccurred())
  1506. return;
  1507. auto* env = getEnv();
  1508. static constexpr int controlAePrecaptureTriggerStart = 1;
  1509. CaptureSession::setCaptureRequestBuilderIntegerKey (captureRequestBuilder.get(),
  1510. CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
  1511. controlAePrecaptureTriggerStart);
  1512. currentState = State::pendingExposurePrecapture;
  1513. auto previewRequest = LocalRef<jobject> (env->CallObjectMethod (captureRequestBuilder,
  1514. CaptureRequestBuilder.build));
  1515. env->CallIntMethod (captureSession, CameraCaptureSession.capture, previewRequest.get(),
  1516. captureSessionPreviewCaptureCallback.get(), handler.get());
  1517. }
  1518. void unlockFocus()
  1519. {
  1520. if (Pimpl::checkHasExceptionOccurred())
  1521. return;
  1522. JUCE_CAMERA_LOG ("Unlocking focus...");
  1523. currentState = State::idle;
  1524. auto* env = getEnv();
  1525. static constexpr int controlAfTriggerCancel = 2;
  1526. CaptureSession::setCaptureRequestBuilderIntegerKey (captureRequestBuilder.get(),
  1527. CaptureRequest.CONTROL_AF_TRIGGER,
  1528. controlAfTriggerCancel);
  1529. auto resetAutoFocusRequest = LocalRef<jobject> (env->CallObjectMethod (captureRequestBuilder,
  1530. CaptureRequestBuilder.build));
  1531. env->CallIntMethod (captureSession, CameraCaptureSession.capture, resetAutoFocusRequest.get(),
  1532. nullptr, handler.get());
  1533. if (Pimpl::checkHasExceptionOccurred())
  1534. return;
  1535. // NB: for preview, using preview capture request again
  1536. env->CallIntMethod (captureSession, CameraCaptureSession.setRepeatingRequest, previewCaptureRequest.get(),
  1537. nullptr, handler.get());
  1538. }
  1539. //==============================================================================
  1540. void run() override
  1541. {
  1542. captureStillPicture();
  1543. }
  1544. void captureStillPicture()
  1545. {
  1546. getEnv()->CallIntMethod (captureSession, CameraCaptureSession.capture,
  1547. stillPictureCaptureRequest.get(), captureSessionStillPictureCaptureCallback.get(),
  1548. nullptr);
  1549. }
  1550. //==============================================================================
  1551. void cameraCaptureSessionCaptureCompleted (bool isPreview, jobject session, jobject request, jobject result)
  1552. {
  1553. JUCE_CAMERA_LOG ("cameraCaptureSessionCaptureCompleted()");
  1554. ignoreUnused (session, request);
  1555. if (isPreview)
  1556. updateState (result);
  1557. else if (currentState != State::idle)
  1558. unlockFocus();
  1559. }
  1560. void cameraCaptureSessionCaptureFailed (bool isPreview, jobject session, jobject request, jobject failure)
  1561. {
  1562. JUCE_CAMERA_LOG ("cameraCaptureSessionCaptureFailed()");
  1563. ignoreUnused (isPreview, session, request, failure);
  1564. }
  1565. void cameraCaptureSessionCaptureProgressed (bool isPreview, jobject session, jobject request, jobject partialResult)
  1566. {
  1567. JUCE_CAMERA_LOG ("cameraCaptureSessionCaptureProgressed()");
  1568. ignoreUnused (session, request);
  1569. if (isPreview)
  1570. updateState (partialResult);
  1571. }
  1572. void cameraCaptureSessionCaptureSequenceAborted (bool isPreview, jobject session, int sequenceId)
  1573. {
  1574. JUCE_CAMERA_LOG ("cameraCaptureSessionCaptureSequenceAborted()");
  1575. ignoreUnused (isPreview, isPreview, session, sequenceId);
  1576. }
  1577. void cameraCaptureSessionCaptureSequenceCompleted (bool isPreview, jobject session, int sequenceId, int64 frameNumber)
  1578. {
  1579. JUCE_CAMERA_LOG ("cameraCaptureSessionCaptureSequenceCompleted()");
  1580. ignoreUnused (isPreview, session, sequenceId, frameNumber);
  1581. }
  1582. void cameraCaptureSessionCaptureStarted (bool isPreview, jobject session, jobject request, int64 timestamp, int64 frameNumber)
  1583. {
  1584. JUCE_CAMERA_LOG ("cameraCaptureSessionCaptureStarted()");
  1585. ignoreUnused (isPreview, session, request, timestamp, frameNumber);
  1586. }
  1587. friend void juce_cameraCaptureSessionCaptureCompleted (int64, bool, void*, void*, void*);
  1588. friend void juce_cameraCaptureSessionCaptureFailed (int64, bool, void*, void*, void*);
  1589. friend void juce_cameraCaptureSessionCaptureProgressed (int64, bool, void*, void*, void*);
  1590. friend void juce_cameraCaptureSessionCaptureSequenceAborted (int64, bool, void*, int);
  1591. friend void juce_cameraCaptureSessionCaptureSequenceCompleted (int64, bool, void*, int, int64);
  1592. friend void juce_cameraCaptureSessionCaptureStarted (int64, bool, void*, void*, int64, int64);
  1593. };
  1594. //==============================================================================
  1595. ScopedCameraDevice& scopedCameraDevice;
  1596. ConfiguredCallback& configuredCallback;
  1597. GlobalRef& handler;
  1598. GlobalRef captureRequestBuilder;
  1599. GlobalRef previewCaptureRequest;
  1600. GlobalRef captureSessionStateCallback;
  1601. int autoFocusMode;
  1602. GlobalRef captureSession;
  1603. CriticalSection captureSessionLock;
  1604. Atomic<int> pendingClose { 0 };
  1605. std::unique_ptr<StillPictureTaker> stillPictureTaker;
  1606. WaitableEvent closedEvent;
  1607. JUCE_DECLARE_WEAK_REFERENCEABLE (CaptureSession)
  1608. //==============================================================================
  1609. CaptureSession (ScopedCameraDevice& scopedCameraDeviceToUse, ConfiguredCallback& configuredCallbackToUse,
  1610. const LocalRef<jobject>& surfacesList, GlobalRef& handlerToUse,
  1611. int captureSessionTemplate, int autoFocusModeToUse)
  1612. : scopedCameraDevice (scopedCameraDeviceToUse),
  1613. configuredCallback (configuredCallbackToUse),
  1614. handler (handlerToUse),
  1615. captureRequestBuilder (LocalRef<jobject> (getEnv()->CallObjectMethod (scopedCameraDevice.cameraDevice,
  1616. AndroidCameraDevice.createCaptureRequest,
  1617. (jint) captureSessionTemplate))),
  1618. captureSessionStateCallback (LocalRef<jobject> (getEnv()->NewObject (CameraCaptureSessionStateCallback,
  1619. CameraCaptureSessionStateCallback.constructor,
  1620. android.activity.get(),
  1621. reinterpret_cast<jlong> (this)))),
  1622. autoFocusMode (autoFocusModeToUse)
  1623. {
  1624. auto* env = getEnv();
  1625. env->CallVoidMethod (scopedCameraDevice.cameraDevice, AndroidCameraDevice.createCaptureSession,
  1626. surfacesList.get(), captureSessionStateCallback.get(), handler.get());
  1627. static constexpr int controlModeAuto = 1;
  1628. setCaptureRequestBuilderIntegerKey (captureRequestBuilder.get(), CaptureRequest.CONTROL_MODE, controlModeAuto);
  1629. setCaptureRequestBuilderIntegerKey (captureRequestBuilder.get(), CaptureRequest.CONTROL_AF_MODE, autoFocusMode);
  1630. }
  1631. static void setCaptureRequestBuilderIntegerKey (jobject captureRequestBuilder, jfieldID key, int value)
  1632. {
  1633. auto* env = getEnv();
  1634. auto jKey = LocalRef<jobject> (env->GetStaticObjectField (CaptureRequest, key));
  1635. auto jValue = LocalRef<jobject> (env->CallStaticObjectMethod (JavaInteger, JavaInteger.valueOf, (jint) value));
  1636. env->CallVoidMethod (captureRequestBuilder, CaptureRequestBuilder.set, jKey.get(), jValue.get());
  1637. }
  1638. void cameraCaptureSessionActive (jobject session)
  1639. {
  1640. JUCE_CAMERA_LOG ("cameraCaptureSessionActive()");
  1641. ignoreUnused (session);
  1642. }
  1643. void cameraCaptureSessionClosed (jobject session)
  1644. {
  1645. JUCE_CAMERA_LOG ("cameraCaptureSessionClosed()");
  1646. ignoreUnused (session);
  1647. closedEvent.signal();
  1648. }
  1649. void cameraCaptureSessionConfigureFailed (jobject session)
  1650. {
  1651. JUCE_CAMERA_LOG ("cameraCaptureSessionConfigureFailed()");
  1652. ignoreUnused (session);
  1653. WeakReference<CaptureSession> weakRef (this);
  1654. MessageManager::callAsync ([this, weakRef]()
  1655. {
  1656. if (weakRef == nullptr)
  1657. return;
  1658. configuredCallback.captureSessionConfigured (nullptr);
  1659. });
  1660. }
  1661. void cameraCaptureSessionConfigured (jobject session)
  1662. {
  1663. JUCE_CAMERA_LOG ("cameraCaptureSessionConfigured()");
  1664. if (pendingClose.get() == 1)
  1665. {
  1666. // Already closing, bailout.
  1667. closedEvent.signal();
  1668. GlobalRef s (session);
  1669. MessageManager::callAsync ([s]()
  1670. {
  1671. getEnv()->CallVoidMethod (s, CameraCaptureSession.close);
  1672. });
  1673. return;
  1674. }
  1675. {
  1676. const ScopedLock lock (captureSessionLock);
  1677. captureSession = GlobalRef (session);
  1678. }
  1679. WeakReference<CaptureSession> weakRef (this);
  1680. MessageManager::callAsync ([this, weakRef]()
  1681. {
  1682. if (weakRef == nullptr)
  1683. return;
  1684. stillPictureTaker.reset (new StillPictureTaker (captureSession, captureRequestBuilder,
  1685. previewCaptureRequest, handler, autoFocusMode));
  1686. configuredCallback.captureSessionConfigured (this);
  1687. });
  1688. }
  1689. void cameraCaptureSessionReady (jobject session)
  1690. {
  1691. JUCE_CAMERA_LOG ("cameraCaptureSessionReady()");
  1692. ignoreUnused (session);
  1693. }
  1694. friend class ScopedCameraDevice;
  1695. friend void juce_cameraCaptureSessionActive (int64, void*);
  1696. friend void juce_cameraCaptureSessionClosed (int64, void*);
  1697. friend void juce_cameraCaptureSessionConfigureFailed (int64, void*);
  1698. friend void juce_cameraCaptureSessionConfigured (int64, void*);
  1699. friend void juce_cameraCaptureSessionReady (int64, void*);
  1700. friend void juce_cameraCaptureSessionCaptureCompleted (int64, bool, void*, void*, void*);
  1701. friend void juce_cameraCaptureSessionCaptureFailed (int64, bool, void*, void*, void*);
  1702. friend void juce_cameraCaptureSessionCaptureProgressed (int64, bool, void*, void*, void*);
  1703. friend void juce_cameraCaptureSessionCaptureSequenceAborted (int64, bool, void*, int);
  1704. friend void juce_cameraCaptureSessionCaptureSequenceCompleted (int64, bool, void*, int, int64);
  1705. friend void juce_cameraCaptureSessionCaptureStarted (int64, bool, void*, void*, int64, int64);
  1706. JUCE_DECLARE_NON_COPYABLE (CaptureSession)
  1707. };
  1708. //==============================================================================
  1709. ScopedCameraDevice (Pimpl& ownerToUse, const String& cameraIdToUse, GlobalRef& cameraManagerToUse,
  1710. GlobalRef& handlerToUse, int autoFocusModeToUse)
  1711. : owner (ownerToUse),
  1712. cameraId (cameraIdToUse),
  1713. cameraManager (cameraManagerToUse),
  1714. handler (handlerToUse),
  1715. cameraStateCallback (LocalRef<jobject> (getEnv()->NewObject (CameraDeviceStateCallback,
  1716. CameraDeviceStateCallback.constructor,
  1717. android.activity.get(),
  1718. reinterpret_cast<jlong> (this)))),
  1719. autoFocusMode (autoFocusModeToUse)
  1720. {
  1721. open();
  1722. }
  1723. ~ScopedCameraDevice()
  1724. {
  1725. close();
  1726. }
  1727. void open()
  1728. {
  1729. pendingOpen.set (1);
  1730. auto* env = getEnv();
  1731. env->CallVoidMethod (cameraManager, CameraManager.openCamera,
  1732. javaString (cameraId).get(),
  1733. cameraStateCallback.get(), handler.get());
  1734. // If something went wrong we will be pinged in cameraDeviceStateError()
  1735. // callback, silence the redundant exception.
  1736. auto exception = LocalRef<jobject> (env->ExceptionOccurred());
  1737. if (exception != 0)
  1738. env->ExceptionClear();
  1739. }
  1740. void close()
  1741. {
  1742. if (pendingClose.compareAndSetBool (1, 0))
  1743. {
  1744. auto* env = getEnv();
  1745. if (cameraDevice.get() != nullptr)
  1746. {
  1747. env->CallVoidMethod (cameraDevice, AndroidCameraDevice.close);
  1748. closedEvent.wait (-1);
  1749. }
  1750. pendingClose.set (0);
  1751. pendingOpen .set (0);
  1752. cameraDevice.clear();
  1753. }
  1754. }
  1755. bool openedOk() const { return cameraDevice != nullptr; }
  1756. bool hasErrorOccurred() const { return fatalErrorOccurred.get(); }
  1757. CaptureSession* createCaptureSession (CaptureSession::ConfiguredCallback& cc,
  1758. const LocalRef<jobject>& surfacesList,
  1759. GlobalRef& handlerToUse,
  1760. int captureSessionTemplate)
  1761. {
  1762. if (! openedOk())
  1763. {
  1764. jassertfalse;
  1765. return nullptr;
  1766. }
  1767. return new CaptureSession (*this, cc, surfacesList, handlerToUse, captureSessionTemplate, autoFocusMode);
  1768. }
  1769. private:
  1770. Pimpl& owner;
  1771. const String cameraId;
  1772. GlobalRef& cameraManager;
  1773. GlobalRef& handler;
  1774. GlobalRef cameraStateCallback;
  1775. int autoFocusMode;
  1776. GlobalRef cameraDevice;
  1777. Atomic<int> pendingOpen { 0 };
  1778. Atomic<int> pendingClose { 0 };
  1779. Atomic<int> fatalErrorOccurred { 0 };
  1780. String openError;
  1781. WaitableEvent closedEvent;
  1782. void cameraDeviceStateClosed()
  1783. {
  1784. JUCE_CAMERA_LOG ("cameraDeviceStateClosed()");
  1785. closedEvent.signal();
  1786. }
  1787. void cameraDeviceStateDisconnected()
  1788. {
  1789. JUCE_CAMERA_LOG ("cameraDeviceStateDisconnected()");
  1790. if (pendingOpen.compareAndSetBool (0, 1))
  1791. {
  1792. openError = "Device disconnected";
  1793. notifyOpenResult();
  1794. }
  1795. MessageManager::callAsync ([this]() { close(); });
  1796. }
  1797. void cameraDeviceStateError (int errorCode)
  1798. {
  1799. String error = cameraErrorCodeToString (errorCode);
  1800. JUCE_CAMERA_LOG ("cameraDeviceStateError(), error: " + error);
  1801. if (pendingOpen.compareAndSetBool (0, 1))
  1802. {
  1803. openError = error;
  1804. notifyOpenResult();
  1805. }
  1806. fatalErrorOccurred.set (1);
  1807. MessageManager::callAsync ([this, error]()
  1808. {
  1809. owner.cameraDeviceError (error);
  1810. close();
  1811. });
  1812. }
  1813. void cameraDeviceStateOpened (jobject cameraDeviceToUse)
  1814. {
  1815. JUCE_CAMERA_LOG ("cameraDeviceStateOpened()");
  1816. pendingOpen.set (0);
  1817. cameraDevice = GlobalRef (cameraDeviceToUse);
  1818. notifyOpenResult();
  1819. }
  1820. void notifyOpenResult()
  1821. {
  1822. MessageManager::callAsync ([this]() { owner.cameraOpenFinished (openError); });
  1823. }
  1824. friend void juce_cameraDeviceStateClosed (int64);
  1825. friend void juce_cameraDeviceStateDisconnected (int64);
  1826. friend void juce_cameraDeviceStateError (int64, int);
  1827. friend void juce_cameraDeviceStateOpened (int64, void*);
  1828. friend void juce_cameraCaptureSessionActive (int64, void*);
  1829. friend void juce_cameraCaptureSessionClosed (int64, void*);
  1830. friend void juce_cameraCaptureSessionConfigureFailed (int64, void*);
  1831. friend void juce_cameraCaptureSessionConfigured (int64, void*);
  1832. friend void juce_cameraCaptureSessionReady (int64, void*);
  1833. friend void juce_cameraCaptureSessionCaptureCompleted (int64, bool, void*, void*, void*);
  1834. friend void juce_cameraCaptureSessionCaptureFailed (int64, bool, void*, void*, void*);
  1835. friend void juce_cameraCaptureSessionCaptureProgressed (int64, bool, void*, void*, void*);
  1836. friend void juce_cameraCaptureSessionCaptureSequenceAborted (int64, bool, void*, int);
  1837. friend void juce_cameraCaptureSessionCaptureSequenceCompleted (int64, bool, void*, int, int64);
  1838. friend void juce_cameraCaptureSessionCaptureStarted (int64, bool, void*, void*, int64, int64);
  1839. };
  1840. //==============================================================================
  1841. struct CaptureSessionModeBase
  1842. {
  1843. virtual ~CaptureSessionModeBase() { }
  1844. virtual bool isVideoRecordSession() const = 0;
  1845. virtual void triggerStillPictureCapture() = 0;
  1846. };
  1847. //==============================================================================
  1848. template <typename Mode>
  1849. struct CaptureSessionMode : public CaptureSessionModeBase,
  1850. private PreviewDisplay::Listener,
  1851. private ScopedCameraDevice::CaptureSession::ConfiguredCallback
  1852. {
  1853. ~CaptureSessionMode()
  1854. {
  1855. captureSession.reset();
  1856. previewDisplay.removeListener (this);
  1857. }
  1858. bool isVideoRecordSession() const override
  1859. {
  1860. return Mode::isVideoRecord();
  1861. }
  1862. void triggerStillPictureCapture() override
  1863. {
  1864. if (captureSession == nullptr)
  1865. {
  1866. // The capture session must be ready before taking a still picture.
  1867. // Did you remember to create and show a preview display?
  1868. jassertfalse;
  1869. return;
  1870. }
  1871. crtp().takeStillPicture();
  1872. }
  1873. protected:
  1874. CaptureSessionMode (Pimpl& ownerToUse, ScopedCameraDevice& cameraDeviceToUse,
  1875. GlobalRef& handlerToUse, PreviewDisplay& pd, int cameraSensorOrientationToUse,
  1876. int cameraLensFacingToUse, StreamConfigurationMap& streamConfigurationMapToUse)
  1877. : owner (ownerToUse),
  1878. scopedCameraDevice (cameraDeviceToUse),
  1879. handler (handlerToUse),
  1880. previewDisplay (pd),
  1881. cameraSensorOrientation (cameraSensorOrientationToUse),
  1882. cameraLensFacing (cameraLensFacingToUse),
  1883. streamConfigurationMap (streamConfigurationMapToUse)
  1884. {
  1885. WeakReference<CaptureSessionMode<Mode>> weakRef (this);
  1886. if (weakRef == nullptr)
  1887. return;
  1888. // async so that the object is fully constructed before the callback gets invoked
  1889. MessageManager::callAsync ([this, weakRef]()
  1890. {
  1891. if (weakRef == nullptr)
  1892. return;
  1893. previewDisplay.addListener (this);
  1894. });
  1895. }
  1896. Mode& crtp() { return static_cast<Mode&> (*this); }
  1897. void previewDisplayReady() override
  1898. {
  1899. jassert (previewDisplay.isReady());
  1900. JUCE_CAMERA_LOG ("previewDisplayReady()");
  1901. // close previous capture session first
  1902. captureSession.reset();
  1903. if (scopedCameraDevice.hasErrorOccurred())
  1904. {
  1905. JUCE_CAMERA_LOG ("Device error detected, not recreating a new camera session. The device needs to be reopened.");
  1906. return;
  1907. }
  1908. captureSession.reset (scopedCameraDevice.createCaptureSession (*this, crtp().getCaptureSessionSurfaces(),
  1909. handler, Mode::getTemplate()));
  1910. }
  1911. void previewDisplayAboutToBeDestroyed() override
  1912. {
  1913. JUCE_CAMERA_LOG ("previewDisplayAboutToBeDestroyed()");
  1914. stopPreview();
  1915. }
  1916. void captureSessionConfigured (ScopedCameraDevice::CaptureSession* session) override
  1917. {
  1918. if (session == nullptr)
  1919. {
  1920. owner.cameraDeviceError ("Failed to configure camera session.");
  1921. return;
  1922. }
  1923. jassert (session == captureSession.get());
  1924. startSession();
  1925. }
  1926. void startSession()
  1927. {
  1928. if (! captureSession->start (crtp().getTargetSurfaces(), handler))
  1929. {
  1930. jassertfalse;
  1931. JUCE_CAMERA_LOG ("Could not start capture session");
  1932. }
  1933. crtp().sessionStarted();
  1934. }
  1935. void stopPreview()
  1936. {
  1937. if (captureSession != nullptr)
  1938. {
  1939. auto session = captureSession->getNativeSession();
  1940. auto* env = getEnv();
  1941. env->CallVoidMethod (session, CameraCaptureSession.stopRepeating);
  1942. if (Pimpl::checkHasExceptionOccurred())
  1943. return;
  1944. env->CallVoidMethod (session, CameraCaptureSession.abortCaptures);
  1945. Pimpl::checkHasExceptionOccurred();
  1946. }
  1947. }
  1948. Pimpl& owner;
  1949. ScopedCameraDevice& scopedCameraDevice;
  1950. GlobalRef& handler;
  1951. PreviewDisplay& previewDisplay;
  1952. int cameraSensorOrientation;
  1953. int cameraLensFacing;
  1954. StreamConfigurationMap& streamConfigurationMap;
  1955. std::unique_ptr<ScopedCameraDevice::CaptureSession> captureSession;
  1956. JUCE_DECLARE_WEAK_REFERENCEABLE (CaptureSessionMode<Mode>)
  1957. };
  1958. //==============================================================================
  1959. struct CaptureSessionPreviewMode : public CaptureSessionMode<CaptureSessionPreviewMode>
  1960. {
  1961. CaptureSessionPreviewMode (Pimpl& ownerToUse, ScopedCameraDevice& cameraDeviceToUse, GlobalRef& handlerToUse,
  1962. PreviewDisplay& pd, ImageReader& ir, int cameraSensorOrientation,
  1963. int cameraLensFacingToUse, StreamConfigurationMap& streamConfigurationMapToUse)
  1964. : CaptureSessionMode<CaptureSessionPreviewMode> (ownerToUse, cameraDeviceToUse, handlerToUse, pd,
  1965. cameraSensorOrientation, cameraLensFacingToUse, streamConfigurationMapToUse),
  1966. imageReader (ir)
  1967. {
  1968. }
  1969. // Surfaces passed to newly created capture session.
  1970. LocalRef<jobject> getCaptureSessionSurfaces() const
  1971. {
  1972. auto* env = getEnv();
  1973. auto previewSurface = LocalRef<jobject> (previewDisplay.createSurface());
  1974. auto imageSurface = LocalRef<jobject> (imageReader.getSurface());
  1975. auto arrayList = LocalRef<jobject> (env->NewObject (JavaArrayList, JavaArrayList.constructor, 2));
  1976. env->CallBooleanMethod (arrayList, JavaArrayList.add, previewSurface.get());
  1977. env->CallBooleanMethod (arrayList, JavaArrayList.add, imageSurface.get());
  1978. auto supported = streamConfigurationMap.isOutputSupportedForSurface (imageSurface);
  1979. // Output surface is not supported by this device, still image capture will not work!
  1980. jassert (supported);
  1981. return arrayList;
  1982. }
  1983. // Surfaces set as target during capture.
  1984. LocalRef<jobject> getTargetSurfaces() const
  1985. {
  1986. auto* env = getEnv();
  1987. auto previewSurface = LocalRef<jobject> (previewDisplay.createSurface());
  1988. auto arrayList = LocalRef<jobject> (env->NewObject (JavaArrayList, JavaArrayList.constructor, 1));
  1989. env->CallBooleanMethod (arrayList, JavaArrayList.add, previewSurface.get());
  1990. return arrayList;
  1991. }
  1992. static int getTemplate()
  1993. {
  1994. static constexpr int templatePreview = 1;
  1995. return templatePreview;
  1996. }
  1997. static bool isVideoRecord() { return false; }
  1998. void sessionStarted() {}
  1999. void takeStillPicture()
  2000. {
  2001. imageReader.resetNotificationFlag();
  2002. captureSession->takeStillPicture (imageReader.getSurface());
  2003. }
  2004. private:
  2005. ImageReader& imageReader;
  2006. };
  2007. //==============================================================================
  2008. struct CaptureSessionVideoRecordingMode : public CaptureSessionMode<CaptureSessionVideoRecordingMode>
  2009. {
  2010. CaptureSessionVideoRecordingMode (Pimpl& ownerToUse, ScopedCameraDevice& cameraDeviceToUse, GlobalRef& handlerToUse,
  2011. PreviewDisplay& pd, MediaRecorder& mr, int cameraSensorOrientation,
  2012. int cameraLensFacingToUse, StreamConfigurationMap& streamConfigurationMapToUse)
  2013. : CaptureSessionMode<CaptureSessionVideoRecordingMode> (ownerToUse, cameraDeviceToUse, handlerToUse, pd,
  2014. cameraSensorOrientation, cameraLensFacingToUse, streamConfigurationMapToUse),
  2015. mediaRecorder (mr)
  2016. {
  2017. }
  2018. ~CaptureSessionVideoRecordingMode()
  2019. {
  2020. // We need to explicitly stop the preview before stopping the media recorder,
  2021. // because legacy devices can't handle recording stop before stopping the preview.
  2022. stopPreview();
  2023. mediaRecorder.stop();
  2024. }
  2025. // Surfaces passed to newly created capture session.
  2026. LocalRef<jobject> getCaptureSessionSurfaces() const
  2027. {
  2028. auto* env = getEnv();
  2029. auto previewSurface = LocalRef<jobject> (previewDisplay.createSurface());
  2030. auto mediaRecorderSurface = LocalRef<jobject> (mediaRecorder.getSurface());
  2031. auto arrayList = LocalRef<jobject> (env->NewObject (JavaArrayList, JavaArrayList.constructor, 2));
  2032. env->CallBooleanMethod (arrayList, JavaArrayList.add, previewSurface.get());
  2033. env->CallBooleanMethod (arrayList, JavaArrayList.add, mediaRecorderSurface.get());
  2034. return arrayList;
  2035. }
  2036. // Surfaces set as target during capture.
  2037. LocalRef<jobject> getTargetSurfaces() const
  2038. {
  2039. // Same surfaces used.
  2040. return getCaptureSessionSurfaces();
  2041. }
  2042. static int getTemplate()
  2043. {
  2044. static constexpr int templateRecord = 3;
  2045. return templateRecord;
  2046. }
  2047. static bool isVideoRecord() { return true; }
  2048. void sessionStarted()
  2049. {
  2050. MessageManager::callAsync ([this]() { mediaRecorder.start(); });
  2051. }
  2052. void takeStillPicture()
  2053. {
  2054. // Taking still pictures while recording video is not supported on Android.
  2055. jassertfalse;
  2056. }
  2057. private:
  2058. MediaRecorder& mediaRecorder;
  2059. };
  2060. //==============================================================================
  2061. class DeviceOrientationChangeListener : private Timer
  2062. {
  2063. public:
  2064. DeviceOrientationChangeListener (PreviewDisplay& pd)
  2065. : previewDisplay (pd),
  2066. orientationEventListener (getEnv()->NewObject (OrientationEventListener,
  2067. OrientationEventListener.constructor,
  2068. android.activity.get(),
  2069. reinterpret_cast<jlong> (this),
  2070. android.activity.get(),
  2071. sensorDelayUI)),
  2072. canDetectChange (getEnv()->CallBooleanMethod (orientationEventListener,
  2073. OrientationEventListener.canDetectOrientation) != 0),
  2074. deviceOrientation (Desktop::getInstance().getCurrentOrientation()),
  2075. lastKnownScreenOrientation (deviceOrientation)
  2076. {
  2077. setEnabled (true);
  2078. }
  2079. ~DeviceOrientationChangeListener()
  2080. {
  2081. setEnabled (false);
  2082. }
  2083. void setEnabled (bool shouldBeEnabled)
  2084. {
  2085. if (shouldBeEnabled && ! canDetectChange)
  2086. {
  2087. // This device does not support orientation listening, photos may have wrong orientation!
  2088. jassertfalse;
  2089. return;
  2090. }
  2091. if (shouldBeEnabled)
  2092. getEnv()->CallVoidMethod (orientationEventListener, OrientationEventListener.enable);
  2093. else
  2094. getEnv()->CallVoidMethod (orientationEventListener, OrientationEventListener.disable);
  2095. }
  2096. bool isSupported() const noexcept { return canDetectChange; }
  2097. Desktop::DisplayOrientation getDeviceOrientation() const noexcept
  2098. {
  2099. return deviceOrientation;
  2100. }
  2101. private:
  2102. PreviewDisplay& previewDisplay;
  2103. GlobalRef orientationEventListener;
  2104. static constexpr jint sensorDelayUI = 2;
  2105. bool canDetectChange;
  2106. Desktop::DisplayOrientation deviceOrientation;
  2107. Desktop::DisplayOrientation lastKnownScreenOrientation;
  2108. int numChecksForOrientationChange = 10;
  2109. void orientationChanged (int orientation)
  2110. {
  2111. jassert (orientation < 360);
  2112. // -1 == unknown
  2113. if (orientation < 0)
  2114. return;
  2115. auto oldOrientation = deviceOrientation;
  2116. // NB: this assumes natural position to be portrait always, but some devices may be landscape...
  2117. if (orientation > (360 - 45) || orientation < 45)
  2118. deviceOrientation = Desktop::upright;
  2119. else if (orientation < 135)
  2120. deviceOrientation = Desktop::rotatedClockwise;
  2121. else if (orientation < 225)
  2122. deviceOrientation = Desktop::upsideDown;
  2123. else
  2124. deviceOrientation = Desktop::rotatedAntiClockwise;
  2125. if (oldOrientation != deviceOrientation)
  2126. {
  2127. lastKnownScreenOrientation = Desktop::getInstance().getCurrentOrientation();
  2128. // Need to update preview transform, but screen orientation will change slightly
  2129. // later than sensor orientation.
  2130. startTimer (500);
  2131. }
  2132. }
  2133. void timerCallback() override
  2134. {
  2135. auto currentOrientation = Desktop::getInstance().getCurrentOrientation();
  2136. if (lastKnownScreenOrientation != currentOrientation)
  2137. {
  2138. lastKnownScreenOrientation = currentOrientation;
  2139. stopTimer();
  2140. numChecksForOrientationChange = 10;
  2141. previewDisplay.updateSurfaceTransform();
  2142. return;
  2143. }
  2144. if (--numChecksForOrientationChange == 0)
  2145. {
  2146. stopTimer();
  2147. numChecksForOrientationChange = 10;
  2148. }
  2149. }
  2150. friend void juce_deviceOrientationChanged (int64, int);
  2151. };
  2152. //==============================================================================
  2153. CameraDevice& owner;
  2154. int minWidth, minHeight, maxWidth, maxHeight;
  2155. String cameraId;
  2156. InternalOpenCameraResultCallback cameraOpenCallback;
  2157. #if __ANDROID_API__ >= 21
  2158. AppPausedResumedListener appPausedResumedListener;
  2159. GlobalRef appPausedResumedListenerNative;
  2160. GlobalRef cameraManager;
  2161. GlobalRef cameraCharacteristics;
  2162. GlobalRef handlerThread;
  2163. GlobalRef handler;
  2164. StreamConfigurationMap streamConfigurationMap;
  2165. PreviewDisplay previewDisplay;
  2166. DeviceOrientationChangeListener deviceOrientationChangeListener;
  2167. std::unique_ptr<ImageReader> imageReader;
  2168. std::unique_ptr<MediaRecorder> mediaRecorder;
  2169. std::unique_ptr<CaptureSessionModeBase> currentCaptureSessionMode;
  2170. std::unique_ptr<ScopedCameraDevice> scopedCameraDevice;
  2171. CriticalSection listenerLock;
  2172. ListenerList<Listener> listeners;
  2173. std::function<void (const Image&)> pictureTakenCallback;
  2174. Time firstRecordedFrameTimeMs;
  2175. bool notifiedOfCameraOpening = false;
  2176. #endif
  2177. bool appWasPaused = false;
  2178. //==============================================================================
  2179. int getCameraSensorOrientation() const
  2180. {
  2181. return getCameraCharacteristicsIntegerKeyValue (CameraCharacteristics.SENSOR_ORIENTATION);
  2182. }
  2183. int getAutoFocusModeToUse() const
  2184. {
  2185. auto supportedModes = getSupportedAutoFocusModes();
  2186. enum
  2187. {
  2188. CONTROL_AF_MODE_OFF = 0,
  2189. CONTROL_AF_MODE_AUTO = 1,
  2190. CONTROL_AF_MODE_CONTINUOUS_PICTURE = 4
  2191. };
  2192. if (supportedModes.contains (CONTROL_AF_MODE_CONTINUOUS_PICTURE))
  2193. return CONTROL_AF_MODE_CONTINUOUS_PICTURE;
  2194. if (supportedModes.contains (CONTROL_AF_MODE_AUTO))
  2195. return CONTROL_AF_MODE_AUTO;
  2196. return CONTROL_AF_MODE_OFF;
  2197. }
  2198. Array<int> getSupportedAutoFocusModes() const
  2199. {
  2200. auto* env = getEnv();
  2201. auto jKey = LocalRef<jobject> (env->GetStaticObjectField (CameraCharacteristics, CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES));
  2202. auto supportedModes = LocalRef<jintArray> ((jintArray) env->CallObjectMethod (cameraCharacteristics,
  2203. CameraCharacteristics.get,
  2204. jKey.get()));
  2205. return jintArrayToJuceArray (supportedModes);
  2206. }
  2207. static Array<int> jintArrayToJuceArray (const LocalRef<jintArray>& jArray)
  2208. {
  2209. auto* env = getEnv();
  2210. auto* jArrayElems = env->GetIntArrayElements (jArray, 0);
  2211. auto numElems = env->GetArrayLength (jArray);
  2212. Array<int> juceArray;
  2213. for (int s = 0; s < numElems; ++s)
  2214. juceArray.add (jArrayElems[s]);
  2215. env->ReleaseIntArrayElements (jArray, jArrayElems, 0);
  2216. return juceArray;
  2217. }
  2218. int getCameraCharacteristicsIntegerKeyValue (jfieldID key) const
  2219. {
  2220. auto* env = getEnv();
  2221. auto jKey = LocalRef<jobject> (env->GetStaticObjectField (CameraCharacteristics, key));
  2222. auto jValue = LocalRef<jobject> (env->CallObjectMethod (cameraCharacteristics,
  2223. CameraCharacteristics.get,
  2224. jKey.get()));
  2225. return env->CallIntMethod (jValue, JavaInteger.intValue);
  2226. }
  2227. int getCameraLensFacing() const
  2228. {
  2229. return getCameraCharacteristicsIntegerKeyValue (CameraCharacteristics.LENS_FACING);
  2230. }
  2231. //==============================================================================
  2232. void cameraOpenFinished (const String& error)
  2233. {
  2234. JUCE_CAMERA_LOG ("cameraOpenFinished(), error = " + error);
  2235. if (error.isEmpty())
  2236. {
  2237. setupStillImageSize();
  2238. startPreviewMode (*imageReader);
  2239. }
  2240. // Do not notify about camera being reopened on app resume.
  2241. if (! notifiedOfCameraOpening)
  2242. {
  2243. notifiedOfCameraOpening = true;
  2244. invokeCameraOpenCallback (error);
  2245. }
  2246. }
  2247. void cameraDeviceError (const String& error)
  2248. {
  2249. if (owner.onErrorOccurred != nullptr)
  2250. owner.onErrorOccurred (error);
  2251. }
  2252. void invokeCameraOpenCallback (const String& error)
  2253. {
  2254. JUCE_CAMERA_LOG ("invokeCameraOpenCallback(), error = " + error);
  2255. if (cameraOpenCallback != nullptr)
  2256. cameraOpenCallback (cameraId, error);
  2257. }
  2258. //==============================================================================
  2259. void callListeners (const Image& image)
  2260. {
  2261. const ScopedLock sl (listenerLock);
  2262. listeners.call ([=] (Listener& l) { l.imageReceived (image); });
  2263. }
  2264. void notifyPictureTaken (const Image& image)
  2265. {
  2266. JUCE_CAMERA_LOG ("notifyPictureTaken()");
  2267. if (pictureTakenCallback != nullptr)
  2268. pictureTakenCallback (image);
  2269. }
  2270. void triggerStillPictureCapture()
  2271. {
  2272. currentCaptureSessionMode->triggerStillPictureCapture();
  2273. }
  2274. //==============================================================================
  2275. void setupStillImageSize()
  2276. {
  2277. imageReader.reset();
  2278. auto imageSize = chooseBestSize (minWidth, minHeight, maxWidth, maxHeight,
  2279. streamConfigurationMap.getSupportedStillImageOutputSizes());
  2280. imageReader.reset (new ImageReader (*this, handler, imageSize.getWidth(), imageSize.getHeight(),
  2281. getCameraSensorOrientation()));
  2282. }
  2283. static Rectangle<int> chooseBestSize (int minWidth, int minHeight, int maxWidth, int maxHeight,
  2284. Array<Rectangle<int>> supportedSizes)
  2285. {
  2286. Rectangle<int> result;
  2287. for (auto& size : supportedSizes)
  2288. {
  2289. auto width = size.getWidth();
  2290. auto height = size.getHeight();
  2291. if (width < minWidth || width > maxWidth || height < minHeight || height > maxHeight)
  2292. continue;
  2293. if (size.contains (result))
  2294. result = size;
  2295. }
  2296. // None of the supported sizes matches required width & height limitations, picking
  2297. // the first one available...
  2298. jassert (! result.isEmpty());
  2299. if (result.isEmpty())
  2300. result = supportedSizes[0];
  2301. return result;
  2302. }
  2303. //==============================================================================
  2304. void startPreviewMode (ImageReader& ir)
  2305. {
  2306. if (currentCaptureSessionMode != nullptr && ! currentCaptureSessionMode->isVideoRecordSession())
  2307. return;
  2308. // previous mode has to be stopped first
  2309. jassert (currentCaptureSessionMode.get() == nullptr);
  2310. if (scopedCameraDevice == nullptr || ! scopedCameraDevice->openedOk())
  2311. return;
  2312. currentCaptureSessionMode.reset (new CaptureSessionPreviewMode (*this, *scopedCameraDevice, handler,
  2313. previewDisplay, ir,
  2314. getCameraSensorOrientation(),
  2315. getCameraLensFacing(),
  2316. streamConfigurationMap));
  2317. }
  2318. void startVideoRecordingMode (MediaRecorder& mr)
  2319. {
  2320. if (currentCaptureSessionMode != nullptr && currentCaptureSessionMode->isVideoRecordSession())
  2321. return;
  2322. // previous mode has to be stopped first
  2323. jassert (currentCaptureSessionMode.get() == nullptr);
  2324. jassert (scopedCameraDevice != nullptr && scopedCameraDevice->openedOk());
  2325. if (scopedCameraDevice == nullptr || ! scopedCameraDevice->openedOk())
  2326. return;
  2327. currentCaptureSessionMode.reset (new CaptureSessionVideoRecordingMode (*this, *scopedCameraDevice, handler,
  2328. previewDisplay, mr,
  2329. getCameraSensorOrientation(),
  2330. getCameraLensFacing(),
  2331. streamConfigurationMap));
  2332. }
  2333. //==============================================================================
  2334. void appPaused() override
  2335. {
  2336. JUCE_CAMERA_LOG ("appPaused, closing camera...");
  2337. appWasPaused = true;
  2338. deviceOrientationChangeListener.setEnabled (false);
  2339. // We need to restart the whole session mode when the app gets resumed.
  2340. currentCaptureSessionMode.reset();
  2341. if (scopedCameraDevice != nullptr)
  2342. scopedCameraDevice->close();
  2343. stopBackgroundThread();
  2344. }
  2345. void appResumed() override
  2346. {
  2347. // Only care about resumed event when paused event was called first.
  2348. if (! appWasPaused)
  2349. return;
  2350. JUCE_CAMERA_LOG ("appResumed, opening camera...");
  2351. deviceOrientationChangeListener.setEnabled (true);
  2352. startBackgroundThread();
  2353. if (scopedCameraDevice != nullptr)
  2354. scopedCameraDevice->open();
  2355. }
  2356. void startBackgroundThread()
  2357. {
  2358. auto* env = getEnv();
  2359. handlerThread = GlobalRef (LocalRef<jobject> (env->NewObject (AndroidHandlerThread,
  2360. AndroidHandlerThread.constructor,
  2361. javaString ("JuceCameraDeviceBackgroundThread").get())));
  2362. // handler thread has to be started before its looper can be fetched
  2363. env->CallVoidMethod (handlerThread, AndroidHandlerThread.start);
  2364. handler = GlobalRef (LocalRef<jobject> (env->NewObject (AndroidHandler,
  2365. AndroidHandler.constructorWithLooper,
  2366. env->CallObjectMethod (handlerThread, AndroidHandlerThread.getLooper))));
  2367. }
  2368. void stopBackgroundThread()
  2369. {
  2370. auto* env = getEnv();
  2371. env->CallBooleanMethod (handlerThread, AndroidHandlerThread.quitSafely);
  2372. env->CallVoidMethod (handlerThread, AndroidHandlerThread.join);
  2373. auto exception = LocalRef<jobject> (env->ExceptionOccurred());
  2374. if (exception != 0)
  2375. env->ExceptionClear();
  2376. handlerThread.clear();
  2377. handler.clear();
  2378. }
  2379. static bool checkHasExceptionOccurred()
  2380. {
  2381. auto* env = getEnv();
  2382. auto exception = LocalRef<jobject> (env->ExceptionOccurred());
  2383. if (exception != 0)
  2384. {
  2385. env->ExceptionClear();
  2386. return true;
  2387. }
  2388. return false;
  2389. }
  2390. #endif
  2391. friend struct CameraDevice::ViewerComponent;
  2392. friend void juce_cameraDeviceStateClosed (int64);
  2393. friend void juce_cameraDeviceStateDisconnected (int64);
  2394. friend void juce_cameraDeviceStateError (int64, int);
  2395. friend void juce_cameraDeviceStateOpened (int64, void*);
  2396. friend void juce_cameraCaptureSessionActive (int64, void*);
  2397. friend void juce_cameraCaptureSessionClosed (int64, void*);
  2398. friend void juce_cameraCaptureSessionConfigureFailed (int64, void*);
  2399. friend void juce_cameraCaptureSessionConfigured (int64, void*);
  2400. friend void juce_cameraCaptureSessionReady (int64, void*);
  2401. friend void juce_cameraCaptureSessionCaptureCompleted (int64, bool, void*, void*, void*);
  2402. friend void juce_cameraCaptureSessionCaptureFailed (int64, bool, void*, void*, void*);
  2403. friend void juce_cameraCaptureSessionCaptureProgressed (int64, bool, void*, void*, void*);
  2404. friend void juce_cameraCaptureSessionCaptureSequenceAborted (int64, bool, void*, int);
  2405. friend void juce_cameraCaptureSessionCaptureSequenceCompleted (int64, bool, void*, int, int64);
  2406. friend void juce_cameraCaptureSessionCaptureStarted (int64, bool, void*, void*, int64, int64);
  2407. friend void juce_deviceOrientationChanged (int64, int);
  2408. JUCE_DECLARE_NON_COPYABLE (Pimpl)
  2409. };
  2410. //==============================================================================
  2411. struct CameraDevice::ViewerComponent : public Component,
  2412. private ComponentMovementWatcher
  2413. {
  2414. ViewerComponent (CameraDevice& device) : ComponentMovementWatcher (this)
  2415. {
  2416. #if __ANDROID_API__ >= 21
  2417. auto previewSize = device.pimpl->streamConfigurationMap.getDefaultPreviewSize();
  2418. targetAspectRatio = previewSize.getWidth() / (float) previewSize.getHeight();
  2419. if (isOrientationLandscape())
  2420. setBounds (previewSize);
  2421. else
  2422. setBounds (0, 0, previewSize.getHeight(), previewSize.getWidth());
  2423. addAndMakeVisible (viewerComponent);
  2424. viewerComponent.setView (device.pimpl->previewDisplay.getNativeView());
  2425. #else
  2426. ignoreUnused (device);
  2427. #endif
  2428. }
  2429. private:
  2430. AndroidViewComponent viewerComponent;
  2431. float targetAspectRatio = 1.0f;
  2432. void componentMovedOrResized (bool, bool) override
  2433. {
  2434. auto b = getLocalBounds();
  2435. auto targetWidth = b.getWidth();
  2436. auto targetHeight = b.getHeight();
  2437. if (isOrientationLandscape())
  2438. {
  2439. auto currentAspectRatio = b.getWidth() / (float) b.getHeight();
  2440. if (currentAspectRatio > targetAspectRatio)
  2441. targetWidth = static_cast<int> (targetWidth * targetAspectRatio / currentAspectRatio);
  2442. else
  2443. targetHeight = static_cast<int> (targetHeight * currentAspectRatio / targetAspectRatio);
  2444. }
  2445. else
  2446. {
  2447. auto currentAspectRatio = b.getHeight() / (float) b.getWidth();
  2448. if (currentAspectRatio > targetAspectRatio)
  2449. targetHeight = static_cast<int> (targetHeight * targetAspectRatio / currentAspectRatio);
  2450. else
  2451. targetWidth = static_cast<int> (targetWidth * currentAspectRatio / targetAspectRatio);
  2452. }
  2453. viewerComponent.setBounds (Rectangle<int> (0, 0, targetWidth, targetHeight).withCentre (b.getCentre()));
  2454. }
  2455. bool isOrientationLandscape() const
  2456. {
  2457. auto o = Desktop::getInstance().getCurrentOrientation();
  2458. return o == Desktop::rotatedClockwise || o == Desktop::rotatedAntiClockwise;
  2459. }
  2460. void componentPeerChanged() override {}
  2461. void componentVisibilityChanged() override {}
  2462. JUCE_DECLARE_NON_COPYABLE (ViewerComponent)
  2463. };
  2464. String CameraDevice::getFileExtension()
  2465. {
  2466. return ".mp4";
  2467. }
  2468. #if __ANDROID_API__ >= 21
  2469. //==============================================================================
  2470. void juce_cameraDeviceStateClosed (int64 host)
  2471. {
  2472. reinterpret_cast<CameraDevice::Pimpl::ScopedCameraDevice*> (host)->cameraDeviceStateClosed();
  2473. }
  2474. void juce_cameraDeviceStateDisconnected (int64 host)
  2475. {
  2476. reinterpret_cast<CameraDevice::Pimpl::ScopedCameraDevice*> (host)->cameraDeviceStateDisconnected();
  2477. }
  2478. void juce_cameraDeviceStateError (int64 host, int error)
  2479. {
  2480. reinterpret_cast<CameraDevice::Pimpl::ScopedCameraDevice*> (host)->cameraDeviceStateError (error);
  2481. }
  2482. void juce_cameraDeviceStateOpened (int64 host, void* camera)
  2483. {
  2484. reinterpret_cast<CameraDevice::Pimpl::ScopedCameraDevice*> (host)->cameraDeviceStateOpened ((jobject) camera);
  2485. }
  2486. //==============================================================================
  2487. JUCE_JNI_CALLBACK (JUCE_JOIN_MACRO (JUCE_ANDROID_ACTIVITY_CLASSNAME, _00024CameraDeviceStateCallback), cameraDeviceStateClosed, void, (JNIEnv* env, jobject /*activity*/, jlong host, jobject /*camera*/))
  2488. {
  2489. setEnv (env);
  2490. juce_cameraDeviceStateClosed (host);
  2491. }
  2492. JUCE_JNI_CALLBACK (JUCE_JOIN_MACRO (JUCE_ANDROID_ACTIVITY_CLASSNAME, _00024CameraDeviceStateCallback), cameraDeviceStateDisconnected, void, (JNIEnv* env, jobject /*activity*/, jlong host, jobject /*camera*/))
  2493. {
  2494. setEnv (env);
  2495. juce_cameraDeviceStateDisconnected (host);
  2496. }
  2497. JUCE_JNI_CALLBACK (JUCE_JOIN_MACRO (JUCE_ANDROID_ACTIVITY_CLASSNAME, _00024CameraDeviceStateCallback), cameraDeviceStateError, void, (JNIEnv* env, jobject /*activity*/, jlong host, jobject /*camera*/, int error))
  2498. {
  2499. setEnv (env);
  2500. juce_cameraDeviceStateError (host, error);
  2501. }
  2502. JUCE_JNI_CALLBACK (JUCE_JOIN_MACRO (JUCE_ANDROID_ACTIVITY_CLASSNAME, _00024CameraDeviceStateCallback), cameraDeviceStateOpened, void, (JNIEnv* env, jobject /*activity*/, jlong host, jobject camera))
  2503. {
  2504. setEnv (env);
  2505. juce_cameraDeviceStateOpened (host, camera);
  2506. }
  2507. //==============================================================================
  2508. void juce_cameraCaptureSessionActive (int64 host, void* session)
  2509. {
  2510. auto* juceCaptureSession = reinterpret_cast<CameraDevice::Pimpl::ScopedCameraDevice::CaptureSession*> (host);
  2511. juceCaptureSession->cameraCaptureSessionActive ((jobject) session);
  2512. }
  2513. void juce_cameraCaptureSessionClosed (int64 host, void* session)
  2514. {
  2515. auto* juceCaptureSession = reinterpret_cast<CameraDevice::Pimpl::ScopedCameraDevice::CaptureSession*> (host);
  2516. juceCaptureSession->cameraCaptureSessionClosed ((jobject) session);
  2517. }
  2518. void juce_cameraCaptureSessionConfigureFailed (int64 host, void* session)
  2519. {
  2520. auto* juceCaptureSession = reinterpret_cast<CameraDevice::Pimpl::ScopedCameraDevice::CaptureSession*> (host);
  2521. juceCaptureSession->cameraCaptureSessionConfigureFailed ((jobject) session);
  2522. }
  2523. void juce_cameraCaptureSessionConfigured (int64 host, void* session)
  2524. {
  2525. auto* juceCaptureSession = reinterpret_cast<CameraDevice::Pimpl::ScopedCameraDevice::CaptureSession*> (host);
  2526. juceCaptureSession->cameraCaptureSessionConfigured ((jobject) session);
  2527. }
  2528. void juce_cameraCaptureSessionReady (int64 host, void* session)
  2529. {
  2530. auto* juceCaptureSession = reinterpret_cast<CameraDevice::Pimpl::ScopedCameraDevice::CaptureSession*> (host);
  2531. juceCaptureSession->cameraCaptureSessionReady ((jobject) session);
  2532. }
  2533. //==============================================================================
  2534. JUCE_JNI_CALLBACK (JUCE_JOIN_MACRO (JUCE_ANDROID_ACTIVITY_CLASSNAME, _00024CameraCaptureSessionStateCallback), cameraCaptureSessionActive, void, (JNIEnv* env, jobject /*activity*/, jlong host, jobject session))
  2535. {
  2536. setEnv (env);
  2537. juce_cameraCaptureSessionActive (host, session);
  2538. }
  2539. JUCE_JNI_CALLBACK (JUCE_JOIN_MACRO (JUCE_ANDROID_ACTIVITY_CLASSNAME, _00024CameraCaptureSessionStateCallback), cameraCaptureSessionClosed, void, (JNIEnv* env, jobject /*activity*/, jlong host, jobject session))
  2540. {
  2541. setEnv (env);
  2542. juce_cameraCaptureSessionClosed (host, session);
  2543. }
  2544. JUCE_JNI_CALLBACK (JUCE_JOIN_MACRO (JUCE_ANDROID_ACTIVITY_CLASSNAME, _00024CameraCaptureSessionStateCallback), cameraCaptureSessionConfigureFailed, void, (JNIEnv* env, jobject /*activity*/, jlong host, jobject session))
  2545. {
  2546. setEnv (env);
  2547. juce_cameraCaptureSessionConfigureFailed (host, session);
  2548. }
  2549. JUCE_JNI_CALLBACK (JUCE_JOIN_MACRO (JUCE_ANDROID_ACTIVITY_CLASSNAME, _00024CameraCaptureSessionStateCallback), cameraCaptureSessionConfigured, void, (JNIEnv* env, jobject /*activity*/, jlong host, jobject session))
  2550. {
  2551. setEnv (env);
  2552. juce_cameraCaptureSessionConfigured (host, session);
  2553. }
  2554. JUCE_JNI_CALLBACK (JUCE_JOIN_MACRO (JUCE_ANDROID_ACTIVITY_CLASSNAME, _00024CameraCaptureSessionStateCallback), cameraCaptureSessionReady, void, (JNIEnv* env, jobject /*activity*/, jlong host, jobject session))
  2555. {
  2556. setEnv (env);
  2557. juce_cameraCaptureSessionReady (host, session);
  2558. }
  2559. //==============================================================================
  2560. void juce_cameraCaptureSessionCaptureCompleted (int64 host, bool isPreview, void* session, void* request, void* result)
  2561. {
  2562. auto* stillPictureTaker = reinterpret_cast<CameraDevice::Pimpl::ScopedCameraDevice::CaptureSession::StillPictureTaker*> (host);
  2563. stillPictureTaker->cameraCaptureSessionCaptureCompleted (isPreview, (jobject) session, (jobject) request, (jobject) result);
  2564. }
  2565. void juce_cameraCaptureSessionCaptureFailed (int64 host, bool isPreview, void* session, void* request, void* failure)
  2566. {
  2567. auto* stillPictureTaker = reinterpret_cast<CameraDevice::Pimpl::ScopedCameraDevice::CaptureSession::StillPictureTaker*> (host);
  2568. stillPictureTaker->cameraCaptureSessionCaptureFailed (isPreview, (jobject) session, (jobject) request, (jobject) failure);
  2569. }
  2570. void juce_cameraCaptureSessionCaptureProgressed (int64 host, bool isPreview, void* session, void* request, void* partialResult)
  2571. {
  2572. auto* stillPictureTaker = reinterpret_cast<CameraDevice::Pimpl::ScopedCameraDevice::CaptureSession::StillPictureTaker*> (host);
  2573. stillPictureTaker->cameraCaptureSessionCaptureProgressed (isPreview, (jobject) session, (jobject) request, (jobject) partialResult);
  2574. }
  2575. void juce_cameraCaptureSessionCaptureSequenceAborted (int64 host, bool isPreview, void* session, int sequenceId)
  2576. {
  2577. auto* stillPictureTaker = reinterpret_cast<CameraDevice::Pimpl::ScopedCameraDevice::CaptureSession::StillPictureTaker*> (host);
  2578. stillPictureTaker->cameraCaptureSessionCaptureSequenceAborted (isPreview, (jobject) session, sequenceId);
  2579. }
  2580. void juce_cameraCaptureSessionCaptureSequenceCompleted (int64 host, bool isPreview, void* session, int sequenceId, int64 frameNumber)
  2581. {
  2582. auto* stillPictureTaker = reinterpret_cast<CameraDevice::Pimpl::ScopedCameraDevice::CaptureSession::StillPictureTaker*> (host);
  2583. stillPictureTaker->cameraCaptureSessionCaptureSequenceCompleted (isPreview, (jobject) session, sequenceId, frameNumber);
  2584. }
  2585. void juce_cameraCaptureSessionCaptureStarted (int64 host, bool isPreview, void* session, void* request, int64 timestamp, int64 frameNumber)
  2586. {
  2587. auto* stillPictureTaker = reinterpret_cast<CameraDevice::Pimpl::ScopedCameraDevice::CaptureSession::StillPictureTaker*> (host);
  2588. stillPictureTaker->cameraCaptureSessionCaptureStarted (isPreview, (jobject) session, (jobject) request, timestamp, frameNumber);
  2589. }
  2590. JUCE_JNI_CALLBACK (JUCE_JOIN_MACRO (JUCE_ANDROID_ACTIVITY_CLASSNAME, _00024CameraCaptureSessionCaptureCallback), cameraCaptureSessionCaptureCompleted, \
  2591. void, (JNIEnv* env, jobject /*activity*/, jlong host, bool isPreview, jobject session, jobject request, jobject result))
  2592. {
  2593. setEnv (env);
  2594. juce_cameraCaptureSessionCaptureCompleted (host, isPreview, session, request, result);
  2595. }
  2596. JUCE_JNI_CALLBACK (JUCE_JOIN_MACRO (JUCE_ANDROID_ACTIVITY_CLASSNAME, _00024CameraCaptureSessionCaptureCallback), cameraCaptureSessionCaptureFailed, \
  2597. void, (JNIEnv* env, jobject /*activity*/, jlong host, bool isPreview, jobject session, jobject request, jobject failure))
  2598. {
  2599. setEnv (env);
  2600. juce_cameraCaptureSessionCaptureFailed (host, isPreview, session, request, failure);
  2601. }
  2602. JUCE_JNI_CALLBACK (JUCE_JOIN_MACRO (JUCE_ANDROID_ACTIVITY_CLASSNAME, _00024CameraCaptureSessionCaptureCallback), cameraCaptureSessionCaptureProgressed, \
  2603. void, (JNIEnv* env, jobject /*activity*/, jlong host, bool isPreview, jobject session, jobject request, jobject partialResult))
  2604. {
  2605. setEnv (env);
  2606. juce_cameraCaptureSessionCaptureProgressed (host, isPreview, session, request, partialResult);
  2607. }
  2608. JUCE_JNI_CALLBACK (JUCE_JOIN_MACRO (JUCE_ANDROID_ACTIVITY_CLASSNAME, _00024CameraCaptureSessionCaptureCallback), cameraCaptureSessionCaptureSequenceAborted, \
  2609. void, (JNIEnv* env, jobject /*activity*/, jlong host, bool isPreview, jobject session, jint sequenceId))
  2610. {
  2611. setEnv (env);
  2612. juce_cameraCaptureSessionCaptureSequenceAborted (host, isPreview, session, (int) sequenceId);
  2613. }
  2614. JUCE_JNI_CALLBACK (JUCE_JOIN_MACRO (JUCE_ANDROID_ACTIVITY_CLASSNAME, _00024CameraCaptureSessionCaptureCallback), cameraCaptureSessionCaptureSequenceCompleted, \
  2615. void, (JNIEnv* env, jobject /*activity*/, jlong host, bool isPreview, jobject session, jint sequenceId, jlong frameNumber))
  2616. {
  2617. setEnv (env);
  2618. juce_cameraCaptureSessionCaptureSequenceCompleted (host, isPreview, session, (int) sequenceId, frameNumber);
  2619. }
  2620. JUCE_JNI_CALLBACK (JUCE_JOIN_MACRO (JUCE_ANDROID_ACTIVITY_CLASSNAME, _00024CameraCaptureSessionCaptureCallback), cameraCaptureSessionCaptureStarted, \
  2621. void, (JNIEnv* env, jobject /*activity*/, jlong host, bool isPreview, jobject session, jobject request, int64 timestamp, int64 frameNumber))
  2622. {
  2623. setEnv (env);
  2624. juce_cameraCaptureSessionCaptureStarted (host, isPreview, session, request, timestamp, frameNumber);
  2625. }
  2626. //==============================================================================
  2627. void juce_deviceOrientationChanged (int64 host, int orientation)
  2628. {
  2629. auto* listener = reinterpret_cast<CameraDevice::Pimpl::DeviceOrientationChangeListener*> (host);
  2630. listener->orientationChanged (orientation);
  2631. }
  2632. JUCE_JNI_CALLBACK (JUCE_JOIN_MACRO (JUCE_ANDROID_ACTIVITY_CLASSNAME, _00024JuceOrientationEventListener), deviceOrientationChanged, \
  2633. void, (JNIEnv* env, jobject /*activity*/, jlong host, jint orientation))
  2634. {
  2635. setEnv (env);
  2636. juce_deviceOrientationChanged (host, (int) orientation);
  2637. }
  2638. #endif