The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

400 lines
13KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library - "Jules' Utility Class Extensions"
  4. Copyright 2004-11 by Raw Material Software Ltd.
  5. ------------------------------------------------------------------------------
  6. JUCE can be redistributed and/or modified under the terms of the GNU General
  7. Public License (Version 2), as published by the Free Software Foundation.
  8. A copy of the license is included in the JUCE distribution, or can be found
  9. online at www.gnu.org/licenses.
  10. JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
  11. WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
  12. A PARTICULAR PURPOSE. See the GNU General Public License for more details.
  13. ------------------------------------------------------------------------------
  14. To release a closed-source product which uses JUCE, commercial licenses are
  15. available: visit www.rawmaterialsoftware.com/juce for more information.
  16. ==============================================================================
  17. */
  18. #if ! JUCE_QUICKTIME
  19. #error "On the Mac, cameras use Quicktime, so if you turn on JUCE_USE_CAMERA, you also need to enable JUCE_QUICKTIME"
  20. #endif
  21. extern Image juce_createImageFromCIImage (CIImage* im, int w, int h);
  22. //==============================================================================
  23. class QTCameraDeviceInternal
  24. {
  25. public:
  26. QTCameraDeviceInternal (CameraDevice* owner, const int index)
  27. : input (nil),
  28. audioDevice (nil),
  29. audioInput (nil),
  30. session (nil),
  31. fileOutput (nil),
  32. imageOutput (nil),
  33. firstPresentationTime (0),
  34. averageTimeOffset (0)
  35. {
  36. JUCE_AUTORELEASEPOOL
  37. session = [[QTCaptureSession alloc] init];
  38. NSArray* devs = [QTCaptureDevice inputDevicesWithMediaType: QTMediaTypeVideo];
  39. device = (QTCaptureDevice*) [devs objectAtIndex: index];
  40. static DelegateClass cls;
  41. callbackDelegate = [cls.createInstance() init];
  42. DelegateClass::setOwner (callbackDelegate, this);
  43. NSError* err = nil;
  44. [device retain];
  45. [device open: &err];
  46. if (err == nil)
  47. {
  48. input = [[QTCaptureDeviceInput alloc] initWithDevice: device];
  49. audioInput = [[QTCaptureDeviceInput alloc] initWithDevice: device];
  50. [session addInput: input error: &err];
  51. if (err == nil)
  52. {
  53. resetFile();
  54. imageOutput = [[QTCaptureDecompressedVideoOutput alloc] init];
  55. [imageOutput setDelegate: callbackDelegate];
  56. if (err == nil)
  57. {
  58. [session startRunning];
  59. return;
  60. }
  61. }
  62. }
  63. openingError = nsStringToJuce ([err description]);
  64. DBG (openingError);
  65. }
  66. ~QTCameraDeviceInternal()
  67. {
  68. [session stopRunning];
  69. [session removeOutput: imageOutput];
  70. [session release];
  71. [input release];
  72. [device release];
  73. [audioDevice release];
  74. [audioInput release];
  75. [fileOutput release];
  76. [imageOutput release];
  77. [callbackDelegate release];
  78. }
  79. void resetFile()
  80. {
  81. [fileOutput recordToOutputFileURL: nil];
  82. [session removeOutput: fileOutput];
  83. [fileOutput release];
  84. fileOutput = [[QTCaptureMovieFileOutput alloc] init];
  85. [session removeInput: audioInput];
  86. [audioInput release];
  87. audioInput = nil;
  88. [audioDevice release];
  89. audioDevice = nil;
  90. [fileOutput setDelegate: callbackDelegate];
  91. }
  92. void addDefaultAudioInput()
  93. {
  94. NSError* err = nil;
  95. audioDevice = [QTCaptureDevice defaultInputDeviceWithMediaType: QTMediaTypeSound];
  96. if ([audioDevice open: &err])
  97. [audioDevice retain];
  98. else
  99. audioDevice = nil;
  100. if (audioDevice != nil)
  101. {
  102. audioInput = [[QTCaptureDeviceInput alloc] initWithDevice: audioDevice];
  103. [session addInput: audioInput error: &err];
  104. }
  105. }
  106. void addListener (CameraDevice::Listener* listenerToAdd)
  107. {
  108. const ScopedLock sl (listenerLock);
  109. if (listeners.size() == 0)
  110. [session addOutput: imageOutput error: nil];
  111. listeners.addIfNotAlreadyThere (listenerToAdd);
  112. }
  113. void removeListener (CameraDevice::Listener* listenerToRemove)
  114. {
  115. const ScopedLock sl (listenerLock);
  116. listeners.removeFirstMatchingValue (listenerToRemove);
  117. if (listeners.size() == 0)
  118. [session removeOutput: imageOutput];
  119. }
  120. void callListeners (CIImage* frame, int w, int h)
  121. {
  122. Image image (juce_createImageFromCIImage (frame, w, h));
  123. const ScopedLock sl (listenerLock);
  124. for (int i = listeners.size(); --i >= 0;)
  125. {
  126. CameraDevice::Listener* const l = listeners[i];
  127. if (l != nullptr)
  128. l->imageReceived (image);
  129. }
  130. }
  131. void captureBuffer (QTSampleBuffer* sampleBuffer)
  132. {
  133. const Time now (Time::getCurrentTime());
  134. #if MAC_OS_X_VERSION_MAX_ALLOWED >= MAC_OS_X_VERSION_10_5
  135. NSNumber* hosttime = (NSNumber*) [sampleBuffer attributeForKey: QTSampleBufferHostTimeAttribute];
  136. #else
  137. NSNumber* hosttime = (NSNumber*) [sampleBuffer attributeForKey: nsStringLiteral ("hostTime")];
  138. #endif
  139. int64 presentationTime = (hosttime != nil)
  140. ? ((int64) AudioConvertHostTimeToNanos ([hosttime unsignedLongLongValue]) / 1000000 + 40)
  141. : (([sampleBuffer presentationTime].timeValue * 1000) / [sampleBuffer presentationTime].timeScale + 50);
  142. const int64 timeDiff = now.toMilliseconds() - presentationTime;
  143. if (firstPresentationTime == 0)
  144. {
  145. firstPresentationTime = presentationTime;
  146. averageTimeOffset = timeDiff;
  147. }
  148. else
  149. {
  150. averageTimeOffset = (averageTimeOffset * 120 + timeDiff * 8) / 128;
  151. }
  152. }
  153. QTCaptureDevice* device;
  154. QTCaptureDeviceInput* input;
  155. QTCaptureDevice* audioDevice;
  156. QTCaptureDeviceInput* audioInput;
  157. QTCaptureSession* session;
  158. QTCaptureMovieFileOutput* fileOutput;
  159. QTCaptureDecompressedVideoOutput* imageOutput;
  160. NSObject* callbackDelegate;
  161. String openingError;
  162. int64 firstPresentationTime;
  163. int64 averageTimeOffset;
  164. Array<CameraDevice::Listener*> listeners;
  165. CriticalSection listenerLock;
  166. private:
  167. //==============================================================================
  168. struct DelegateClass : public ObjCClass <NSObject>
  169. {
  170. DelegateClass() : ObjCClass <NSObject> ("JUCEAppDelegate_")
  171. {
  172. addIvar<QTCameraDeviceInternal*> ("owner");
  173. addMethod (@selector (captureOutput:didOutputVideoFrame:withSampleBuffer:fromConnection:),
  174. didOutputVideoFrame, "v@:@", @encode (CVImageBufferRef), "@@");
  175. addMethod (@selector (captureOutput:didOutputSampleBuffer:fromConnection:),
  176. didOutputVideoFrame, "v@:@@@");
  177. registerClass();
  178. }
  179. static void setOwner (id self, QTCameraDeviceInternal* owner) { object_setInstanceVariable (self, "owner", owner); }
  180. static QTCameraDeviceInternal* getOwner (id self) { return getIvar<QTCameraDeviceInternal*> (self, "owner"); }
  181. private:
  182. static void didOutputVideoFrame (id self, SEL, QTCaptureOutput* captureOutput,
  183. CVImageBufferRef videoFrame, QTSampleBuffer* sampleBuffer,
  184. QTCaptureConnection* connection)
  185. {
  186. QTCameraDeviceInternal* const internal = getOwner (self);
  187. if (internal->listeners.size() > 0)
  188. {
  189. JUCE_AUTORELEASEPOOL
  190. internal->callListeners ([CIImage imageWithCVImageBuffer: videoFrame],
  191. CVPixelBufferGetWidth (videoFrame),
  192. CVPixelBufferGetHeight (videoFrame));
  193. }
  194. }
  195. static void didOutputSampleBuffer (id self, SEL, QTCaptureFileOutput*, QTSampleBuffer* sampleBuffer, QTCaptureConnection*)
  196. {
  197. getOwner (self)->captureBuffer (sampleBuffer);
  198. }
  199. };
  200. };
  201. //==============================================================================
  202. class QTCaptureViewerComp : public NSViewComponent
  203. {
  204. public:
  205. QTCaptureViewerComp (CameraDevice* const cameraDevice, QTCameraDeviceInternal* const internal)
  206. {
  207. JUCE_AUTORELEASEPOOL
  208. captureView = [[QTCaptureView alloc] init];
  209. [captureView setCaptureSession: internal->session];
  210. setSize (640, 480); // xxx need to somehow get the movie size - how?
  211. setView (captureView);
  212. }
  213. ~QTCaptureViewerComp()
  214. {
  215. setView (0);
  216. [captureView setCaptureSession: nil];
  217. [captureView release];
  218. }
  219. QTCaptureView* captureView;
  220. };
  221. //==============================================================================
  222. CameraDevice::CameraDevice (const String& name_, int index)
  223. : name (name_)
  224. {
  225. isRecording = false;
  226. internal = new QTCameraDeviceInternal (this, index);
  227. }
  228. CameraDevice::~CameraDevice()
  229. {
  230. stopRecording();
  231. delete static_cast <QTCameraDeviceInternal*> (internal);
  232. internal = nullptr;
  233. }
  234. Component* CameraDevice::createViewerComponent()
  235. {
  236. return new QTCaptureViewerComp (this, static_cast <QTCameraDeviceInternal*> (internal));
  237. }
  238. String CameraDevice::getFileExtension()
  239. {
  240. return ".mov";
  241. }
  242. void CameraDevice::startRecordingToFile (const File& file, int quality)
  243. {
  244. stopRecording();
  245. QTCameraDeviceInternal* const d = static_cast <QTCameraDeviceInternal*> (internal);
  246. d->firstPresentationTime = 0;
  247. file.deleteFile();
  248. // In some versions of QT (e.g. on 10.5), if you record video without audio, the speed comes
  249. // out wrong, so we'll put some audio in there too..,
  250. d->addDefaultAudioInput();
  251. [d->session addOutput: d->fileOutput error: nil];
  252. NSEnumerator* connectionEnumerator = [[d->fileOutput connections] objectEnumerator];
  253. for (;;)
  254. {
  255. QTCaptureConnection* connection = [connectionEnumerator nextObject];
  256. if (connection == nil)
  257. break;
  258. QTCompressionOptions* options = nil;
  259. NSString* mediaType = [connection mediaType];
  260. if ([mediaType isEqualToString: QTMediaTypeVideo])
  261. options = [QTCompressionOptions compressionOptionsWithIdentifier:
  262. quality >= 1 ? nsStringLiteral ("QTCompressionOptionsSD480SizeH264Video")
  263. : nsStringLiteral ("QTCompressionOptions240SizeH264Video")];
  264. else if ([mediaType isEqualToString: QTMediaTypeSound])
  265. options = [QTCompressionOptions compressionOptionsWithIdentifier: nsStringLiteral ("QTCompressionOptionsHighQualityAACAudio")];
  266. [d->fileOutput setCompressionOptions: options forConnection: connection];
  267. }
  268. [d->fileOutput recordToOutputFileURL: [NSURL fileURLWithPath: juceStringToNS (file.getFullPathName())]];
  269. isRecording = true;
  270. }
  271. Time CameraDevice::getTimeOfFirstRecordedFrame() const
  272. {
  273. QTCameraDeviceInternal* const d = static_cast <QTCameraDeviceInternal*> (internal);
  274. if (d->firstPresentationTime != 0)
  275. return Time (d->firstPresentationTime + d->averageTimeOffset);
  276. return Time();
  277. }
  278. void CameraDevice::stopRecording()
  279. {
  280. if (isRecording)
  281. {
  282. static_cast <QTCameraDeviceInternal*> (internal)->resetFile();
  283. isRecording = false;
  284. }
  285. }
  286. void CameraDevice::addListener (Listener* listenerToAdd)
  287. {
  288. if (listenerToAdd != nullptr)
  289. static_cast <QTCameraDeviceInternal*> (internal)->addListener (listenerToAdd);
  290. }
  291. void CameraDevice::removeListener (Listener* listenerToRemove)
  292. {
  293. if (listenerToRemove != nullptr)
  294. static_cast <QTCameraDeviceInternal*> (internal)->removeListener (listenerToRemove);
  295. }
  296. //==============================================================================
  297. StringArray CameraDevice::getAvailableDevices()
  298. {
  299. JUCE_AUTORELEASEPOOL
  300. StringArray results;
  301. NSArray* devs = [QTCaptureDevice inputDevicesWithMediaType: QTMediaTypeVideo];
  302. for (int i = 0; i < (int) [devs count]; ++i)
  303. {
  304. QTCaptureDevice* dev = (QTCaptureDevice*) [devs objectAtIndex: i];
  305. results.add (nsStringToJuce ([dev localizedDisplayName]));
  306. }
  307. return results;
  308. }
  309. CameraDevice* CameraDevice::openDevice (int index,
  310. int minWidth, int minHeight,
  311. int maxWidth, int maxHeight)
  312. {
  313. ScopedPointer <CameraDevice> d (new CameraDevice (getAvailableDevices() [index], index));
  314. if (static_cast <QTCameraDeviceInternal*> (d->internal)->openingError.isEmpty())
  315. return d.release();
  316. return nullptr;
  317. }