The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

404 lines
13KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2013 - Raw Material Software Ltd.
  5. Permission is granted to use this software under the terms of either:
  6. a) the GPL v2 (or any later version)
  7. b) the Affero GPL v3
  8. Details of these licenses can be found at: www.gnu.org/licenses
  9. JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
  10. WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
  11. A PARTICULAR PURPOSE. See the GNU General Public License for more details.
  12. ------------------------------------------------------------------------------
  13. To release a closed-source product which uses JUCE, commercial licenses are
  14. available: visit www.juce.com for more information.
  15. ==============================================================================
  16. */
  17. #if ! JUCE_QUICKTIME
  18. #error "On the Mac, cameras use Quicktime, so if you turn on JUCE_USE_CAMERA, you also need to enable JUCE_QUICKTIME"
  19. #endif
  20. extern Image juce_createImageFromCIImage (CIImage* im, int w, int h);
  21. //==============================================================================
  22. class QTCameraDeviceInternal
  23. {
  24. public:
  25. QTCameraDeviceInternal (CameraDevice* owner, const int index)
  26. : input (nil),
  27. audioDevice (nil),
  28. audioInput (nil),
  29. session (nil),
  30. fileOutput (nil),
  31. imageOutput (nil),
  32. firstPresentationTime (0),
  33. averageTimeOffset (0)
  34. {
  35. JUCE_AUTORELEASEPOOL
  36. {
  37. session = [[QTCaptureSession alloc] init];
  38. NSArray* devs = [QTCaptureDevice inputDevicesWithMediaType: QTMediaTypeVideo];
  39. device = (QTCaptureDevice*) [devs objectAtIndex: index];
  40. static DelegateClass cls;
  41. callbackDelegate = [cls.createInstance() init];
  42. DelegateClass::setOwner (callbackDelegate, this);
  43. NSError* err = nil;
  44. [device retain];
  45. [device open: &err];
  46. if (err == nil)
  47. {
  48. input = [[QTCaptureDeviceInput alloc] initWithDevice: device];
  49. audioInput = [[QTCaptureDeviceInput alloc] initWithDevice: device];
  50. [session addInput: input error: &err];
  51. if (err == nil)
  52. {
  53. resetFile();
  54. imageOutput = [[QTCaptureDecompressedVideoOutput alloc] init];
  55. [imageOutput setDelegate: callbackDelegate];
  56. if (err == nil)
  57. {
  58. [session startRunning];
  59. return;
  60. }
  61. }
  62. }
  63. openingError = nsStringToJuce ([err description]);
  64. DBG (openingError);
  65. }
  66. }
  67. ~QTCameraDeviceInternal()
  68. {
  69. [session stopRunning];
  70. [session removeOutput: imageOutput];
  71. [session release];
  72. [input release];
  73. [device release];
  74. [audioDevice release];
  75. [audioInput release];
  76. [fileOutput release];
  77. [imageOutput release];
  78. [callbackDelegate release];
  79. }
  80. void resetFile()
  81. {
  82. [fileOutput recordToOutputFileURL: nil];
  83. [session removeOutput: fileOutput];
  84. [fileOutput release];
  85. fileOutput = [[QTCaptureMovieFileOutput alloc] init];
  86. [session removeInput: audioInput];
  87. [audioInput release];
  88. audioInput = nil;
  89. [audioDevice release];
  90. audioDevice = nil;
  91. [fileOutput setDelegate: callbackDelegate];
  92. }
  93. void addDefaultAudioInput()
  94. {
  95. NSError* err = nil;
  96. audioDevice = [QTCaptureDevice defaultInputDeviceWithMediaType: QTMediaTypeSound];
  97. if ([audioDevice open: &err])
  98. [audioDevice retain];
  99. else
  100. audioDevice = nil;
  101. if (audioDevice != nil)
  102. {
  103. audioInput = [[QTCaptureDeviceInput alloc] initWithDevice: audioDevice];
  104. [session addInput: audioInput error: &err];
  105. }
  106. }
  107. void addListener (CameraDevice::Listener* listenerToAdd)
  108. {
  109. const ScopedLock sl (listenerLock);
  110. if (listeners.size() == 0)
  111. [session addOutput: imageOutput error: nil];
  112. listeners.addIfNotAlreadyThere (listenerToAdd);
  113. }
  114. void removeListener (CameraDevice::Listener* listenerToRemove)
  115. {
  116. const ScopedLock sl (listenerLock);
  117. listeners.removeFirstMatchingValue (listenerToRemove);
  118. if (listeners.size() == 0)
  119. [session removeOutput: imageOutput];
  120. }
  121. void callListeners (CIImage* frame, int w, int h)
  122. {
  123. Image image (juce_createImageFromCIImage (frame, w, h));
  124. const ScopedLock sl (listenerLock);
  125. for (int i = listeners.size(); --i >= 0;)
  126. {
  127. CameraDevice::Listener* const l = listeners[i];
  128. if (l != nullptr)
  129. l->imageReceived (image);
  130. }
  131. }
  132. void captureBuffer (QTSampleBuffer* sampleBuffer)
  133. {
  134. const Time now (Time::getCurrentTime());
  135. #if MAC_OS_X_VERSION_MAX_ALLOWED >= MAC_OS_X_VERSION_10_5
  136. NSNumber* hosttime = (NSNumber*) [sampleBuffer attributeForKey: QTSampleBufferHostTimeAttribute];
  137. #else
  138. NSNumber* hosttime = (NSNumber*) [sampleBuffer attributeForKey: nsStringLiteral ("hostTime")];
  139. #endif
  140. int64 presentationTime = (hosttime != nil)
  141. ? ((int64) AudioConvertHostTimeToNanos ([hosttime unsignedLongLongValue]) / 1000000 + 40)
  142. : (([sampleBuffer presentationTime].timeValue * 1000) / [sampleBuffer presentationTime].timeScale + 50);
  143. const int64 timeDiff = now.toMilliseconds() - presentationTime;
  144. if (firstPresentationTime == 0)
  145. {
  146. firstPresentationTime = presentationTime;
  147. averageTimeOffset = timeDiff;
  148. }
  149. else
  150. {
  151. averageTimeOffset = (averageTimeOffset * 120 + timeDiff * 8) / 128;
  152. }
  153. }
  154. QTCaptureDevice* device;
  155. QTCaptureDeviceInput* input;
  156. QTCaptureDevice* audioDevice;
  157. QTCaptureDeviceInput* audioInput;
  158. QTCaptureSession* session;
  159. QTCaptureMovieFileOutput* fileOutput;
  160. QTCaptureDecompressedVideoOutput* imageOutput;
  161. NSObject* callbackDelegate;
  162. String openingError;
  163. int64 firstPresentationTime;
  164. int64 averageTimeOffset;
  165. Array<CameraDevice::Listener*> listeners;
  166. CriticalSection listenerLock;
  167. private:
  168. //==============================================================================
  169. struct DelegateClass : public ObjCClass <NSObject>
  170. {
  171. DelegateClass() : ObjCClass <NSObject> ("JUCEAppDelegate_")
  172. {
  173. addIvar<QTCameraDeviceInternal*> ("owner");
  174. addMethod (@selector (captureOutput:didOutputVideoFrame:withSampleBuffer:fromConnection:),
  175. didOutputVideoFrame, "v@:@", @encode (CVImageBufferRef), "@@");
  176. addMethod (@selector (captureOutput:didOutputSampleBuffer:fromConnection:),
  177. didOutputVideoFrame, "v@:@@@");
  178. registerClass();
  179. }
  180. static void setOwner (id self, QTCameraDeviceInternal* owner) { object_setInstanceVariable (self, "owner", owner); }
  181. static QTCameraDeviceInternal* getOwner (id self) { return getIvar<QTCameraDeviceInternal*> (self, "owner"); }
  182. private:
  183. static void didOutputVideoFrame (id self, SEL, QTCaptureOutput* captureOutput,
  184. CVImageBufferRef videoFrame, QTSampleBuffer* sampleBuffer,
  185. QTCaptureConnection* connection)
  186. {
  187. QTCameraDeviceInternal* const internal = getOwner (self);
  188. if (internal->listeners.size() > 0)
  189. {
  190. JUCE_AUTORELEASEPOOL
  191. {
  192. internal->callListeners ([CIImage imageWithCVImageBuffer: videoFrame],
  193. CVPixelBufferGetWidth (videoFrame),
  194. CVPixelBufferGetHeight (videoFrame));
  195. }
  196. }
  197. }
  198. static void didOutputSampleBuffer (id self, SEL, QTCaptureFileOutput*, QTSampleBuffer* sampleBuffer, QTCaptureConnection*)
  199. {
  200. getOwner (self)->captureBuffer (sampleBuffer);
  201. }
  202. };
  203. };
  204. //==============================================================================
  205. class QTCaptureViewerComp : public NSViewComponent
  206. {
  207. public:
  208. QTCaptureViewerComp (CameraDevice* const cameraDevice, QTCameraDeviceInternal* const internal)
  209. {
  210. JUCE_AUTORELEASEPOOL
  211. {
  212. captureView = [[QTCaptureView alloc] init];
  213. [captureView setCaptureSession: internal->session];
  214. setSize (640, 480); // xxx need to somehow get the movie size - how?
  215. setView (captureView);
  216. }
  217. }
  218. ~QTCaptureViewerComp()
  219. {
  220. setView (0);
  221. [captureView setCaptureSession: nil];
  222. [captureView release];
  223. }
  224. QTCaptureView* captureView;
  225. };
  226. //==============================================================================
  227. CameraDevice::CameraDevice (const String& name_, int index)
  228. : name (name_)
  229. {
  230. isRecording = false;
  231. internal = new QTCameraDeviceInternal (this, index);
  232. }
  233. CameraDevice::~CameraDevice()
  234. {
  235. stopRecording();
  236. delete static_cast <QTCameraDeviceInternal*> (internal);
  237. internal = nullptr;
  238. }
  239. Component* CameraDevice::createViewerComponent()
  240. {
  241. return new QTCaptureViewerComp (this, static_cast <QTCameraDeviceInternal*> (internal));
  242. }
  243. String CameraDevice::getFileExtension()
  244. {
  245. return ".mov";
  246. }
  247. void CameraDevice::startRecordingToFile (const File& file, int quality)
  248. {
  249. stopRecording();
  250. QTCameraDeviceInternal* const d = static_cast <QTCameraDeviceInternal*> (internal);
  251. d->firstPresentationTime = 0;
  252. file.deleteFile();
  253. // In some versions of QT (e.g. on 10.5), if you record video without audio, the speed comes
  254. // out wrong, so we'll put some audio in there too..,
  255. d->addDefaultAudioInput();
  256. [d->session addOutput: d->fileOutput error: nil];
  257. NSEnumerator* connectionEnumerator = [[d->fileOutput connections] objectEnumerator];
  258. for (;;)
  259. {
  260. QTCaptureConnection* connection = [connectionEnumerator nextObject];
  261. if (connection == nil)
  262. break;
  263. QTCompressionOptions* options = nil;
  264. NSString* mediaType = [connection mediaType];
  265. if ([mediaType isEqualToString: QTMediaTypeVideo])
  266. options = [QTCompressionOptions compressionOptionsWithIdentifier:
  267. quality >= 1 ? nsStringLiteral ("QTCompressionOptionsSD480SizeH264Video")
  268. : nsStringLiteral ("QTCompressionOptions240SizeH264Video")];
  269. else if ([mediaType isEqualToString: QTMediaTypeSound])
  270. options = [QTCompressionOptions compressionOptionsWithIdentifier: nsStringLiteral ("QTCompressionOptionsHighQualityAACAudio")];
  271. [d->fileOutput setCompressionOptions: options forConnection: connection];
  272. }
  273. [d->fileOutput recordToOutputFileURL: [NSURL fileURLWithPath: juceStringToNS (file.getFullPathName())]];
  274. isRecording = true;
  275. }
  276. Time CameraDevice::getTimeOfFirstRecordedFrame() const
  277. {
  278. QTCameraDeviceInternal* const d = static_cast <QTCameraDeviceInternal*> (internal);
  279. if (d->firstPresentationTime != 0)
  280. return Time (d->firstPresentationTime + d->averageTimeOffset);
  281. return Time();
  282. }
  283. void CameraDevice::stopRecording()
  284. {
  285. if (isRecording)
  286. {
  287. static_cast <QTCameraDeviceInternal*> (internal)->resetFile();
  288. isRecording = false;
  289. }
  290. }
  291. void CameraDevice::addListener (Listener* listenerToAdd)
  292. {
  293. if (listenerToAdd != nullptr)
  294. static_cast <QTCameraDeviceInternal*> (internal)->addListener (listenerToAdd);
  295. }
  296. void CameraDevice::removeListener (Listener* listenerToRemove)
  297. {
  298. if (listenerToRemove != nullptr)
  299. static_cast <QTCameraDeviceInternal*> (internal)->removeListener (listenerToRemove);
  300. }
  301. //==============================================================================
  302. StringArray CameraDevice::getAvailableDevices()
  303. {
  304. JUCE_AUTORELEASEPOOL
  305. {
  306. StringArray results;
  307. NSArray* devs = [QTCaptureDevice inputDevicesWithMediaType: QTMediaTypeVideo];
  308. for (int i = 0; i < (int) [devs count]; ++i)
  309. {
  310. QTCaptureDevice* dev = (QTCaptureDevice*) [devs objectAtIndex: i];
  311. results.add (nsStringToJuce ([dev localizedDisplayName]));
  312. }
  313. return results;
  314. }
  315. }
  316. CameraDevice* CameraDevice::openDevice (int index,
  317. int minWidth, int minHeight,
  318. int maxWidth, int maxHeight)
  319. {
  320. ScopedPointer <CameraDevice> d (new CameraDevice (getAvailableDevices() [index], index));
  321. if (static_cast <QTCameraDeviceInternal*> (d->internal)->openingError.isEmpty())
  322. return d.release();
  323. return nullptr;
  324. }