You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

981 lines
31KB

  1. /*
  2. * Linux video grab interface
  3. * Copyright (c) 2000,2001 Fabrice Bellard.
  4. *
  5. * This library is free software; you can redistribute it and/or
  6. * modify it under the terms of the GNU Lesser General Public
  7. * License as published by the Free Software Foundation; either
  8. * version 2 of the License, or (at your option) any later version.
  9. *
  10. * This library is distributed in the hope that it will be useful,
  11. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  12. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  13. * Lesser General Public License for more details.
  14. *
  15. * You should have received a copy of the GNU Lesser General Public
  16. * License along with this library; if not, write to the Free Software
  17. * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
  18. */
  19. #include "avformat.h"
  20. #include <linux/videodev.h>
  21. #include <unistd.h>
  22. #include <fcntl.h>
  23. #include <sys/ioctl.h>
  24. #include <sys/mman.h>
  25. #include <sys/time.h>
  26. #include <time.h>
  27. typedef struct {
  28. int fd;
  29. int frame_format; /* see VIDEO_PALETTE_xxx */
  30. int use_mmap;
  31. int width, height;
  32. int frame_rate;
  33. INT64 time_frame;
  34. int frame_size;
  35. } VideoData;
  36. const char *v4l_device = "/dev/video";
  37. /* XXX: move all that to the context */
  38. static struct video_capability video_cap;
  39. static UINT8 *video_buf;
  40. static struct video_mbuf gb_buffers;
  41. static struct video_mmap gb_buf;
  42. static struct video_audio audio, audio_saved;
  43. static int gb_frame = 0;
  44. static int grab_read_header(AVFormatContext *s1, AVFormatParameters *ap)
  45. {
  46. VideoData *s = s1->priv_data;
  47. AVStream *st;
  48. int width, height;
  49. int video_fd, frame_size;
  50. int ret, frame_rate;
  51. int desired_palette;
  52. if (!ap || ap->width <= 0 || ap->height <= 0 || ap->frame_rate <= 0)
  53. return -1;
  54. width = ap->width;
  55. height = ap->height;
  56. frame_rate = ap->frame_rate;
  57. st = av_new_stream(s1, 0);
  58. if (!st)
  59. return -ENOMEM;
  60. s->width = width;
  61. s->height = height;
  62. s->frame_rate = frame_rate;
  63. video_fd = open(v4l_device, O_RDWR);
  64. if (video_fd < 0) {
  65. perror(v4l_device);
  66. goto fail;
  67. }
  68. if (ioctl(video_fd,VIDIOCGCAP,&video_cap) < 0) {
  69. perror("VIDIOCGCAP");
  70. goto fail;
  71. }
  72. if (!(video_cap.type & VID_TYPE_CAPTURE)) {
  73. fprintf(stderr, "Fatal: grab device does not handle capture\n");
  74. goto fail;
  75. }
  76. desired_palette = -1;
  77. if (st->codec.pix_fmt == PIX_FMT_YUV420P) {
  78. desired_palette = VIDEO_PALETTE_YUV420P;
  79. } else if (st->codec.pix_fmt == PIX_FMT_YUV422) {
  80. desired_palette = VIDEO_PALETTE_YUV422;
  81. } else if (st->codec.pix_fmt == PIX_FMT_BGR24) {
  82. desired_palette = VIDEO_PALETTE_RGB24;
  83. }
  84. /* unmute audio */
  85. ioctl(video_fd, VIDIOCGAUDIO, &audio);
  86. memcpy(&audio_saved, &audio, sizeof(audio));
  87. audio.flags &= ~VIDEO_AUDIO_MUTE;
  88. ioctl(video_fd, VIDIOCSAUDIO, &audio);
  89. ret = ioctl(video_fd,VIDIOCGMBUF,&gb_buffers);
  90. if (ret < 0) {
  91. /* try to use read based access */
  92. struct video_window win;
  93. struct video_picture pict;
  94. int val;
  95. win.x = 0;
  96. win.y = 0;
  97. win.width = width;
  98. win.height = height;
  99. win.chromakey = -1;
  100. win.flags = 0;
  101. ioctl(video_fd, VIDIOCSWIN, &win);
  102. ioctl(video_fd, VIDIOCGPICT, &pict);
  103. #if 0
  104. printf("v4l: colour=%d hue=%d brightness=%d constrast=%d whiteness=%d\n",
  105. pict.colour,
  106. pict.hue,
  107. pict.brightness,
  108. pict.contrast,
  109. pict.whiteness);
  110. #endif
  111. /* try to choose a suitable video format */
  112. pict.palette = desired_palette;
  113. if (desired_palette == -1 || (ret = ioctl(video_fd, VIDIOCSPICT, &pict)) < 0) {
  114. pict.palette=VIDEO_PALETTE_YUV420P;
  115. ret = ioctl(video_fd, VIDIOCSPICT, &pict);
  116. if (ret < 0) {
  117. pict.palette=VIDEO_PALETTE_YUV422;
  118. ret = ioctl(video_fd, VIDIOCSPICT, &pict);
  119. if (ret < 0) {
  120. pict.palette=VIDEO_PALETTE_RGB24;
  121. ret = ioctl(video_fd, VIDIOCSPICT, &pict);
  122. if (ret < 0)
  123. goto fail1;
  124. }
  125. }
  126. }
  127. s->frame_format = pict.palette;
  128. val = 1;
  129. ioctl(video_fd, VIDIOCCAPTURE, &val);
  130. s->time_frame = av_gettime();
  131. s->use_mmap = 0;
  132. } else {
  133. video_buf = mmap(0,gb_buffers.size,PROT_READ|PROT_WRITE,MAP_SHARED,video_fd,0);
  134. if ((unsigned char*)-1 == video_buf) {
  135. perror("mmap");
  136. goto fail;
  137. }
  138. gb_frame = 0;
  139. s->time_frame = av_gettime();
  140. /* start to grab the first frame */
  141. gb_buf.frame = gb_frame % gb_buffers.frames;
  142. gb_buf.height = height;
  143. gb_buf.width = width;
  144. gb_buf.format = desired_palette;
  145. if (desired_palette == -1 || (ret = ioctl(video_fd, VIDIOCMCAPTURE, &gb_buf)) < 0) {
  146. gb_buf.format = VIDEO_PALETTE_YUV420P;
  147. ret = ioctl(video_fd, VIDIOCMCAPTURE, &gb_buf);
  148. if (ret < 0 && errno != EAGAIN) {
  149. /* try YUV422 */
  150. gb_buf.format = VIDEO_PALETTE_YUV422;
  151. ret = ioctl(video_fd, VIDIOCMCAPTURE, &gb_buf);
  152. if (ret < 0 && errno != EAGAIN) {
  153. /* try RGB24 */
  154. gb_buf.format = VIDEO_PALETTE_RGB24;
  155. ret = ioctl(video_fd, VIDIOCMCAPTURE, &gb_buf);
  156. }
  157. }
  158. }
  159. if (ret < 0) {
  160. if (errno != EAGAIN) {
  161. fail1:
  162. fprintf(stderr, "Fatal: grab device does not support suitable format\n");
  163. } else {
  164. fprintf(stderr,"Fatal: grab device does not receive any video signal\n");
  165. }
  166. goto fail;
  167. }
  168. s->frame_format = gb_buf.format;
  169. s->use_mmap = 1;
  170. }
  171. switch(s->frame_format) {
  172. case VIDEO_PALETTE_YUV420P:
  173. frame_size = (width * height * 3) / 2;
  174. st->codec.pix_fmt = PIX_FMT_YUV420P;
  175. break;
  176. case VIDEO_PALETTE_YUV422:
  177. frame_size = width * height * 2;
  178. st->codec.pix_fmt = PIX_FMT_YUV422;
  179. break;
  180. case VIDEO_PALETTE_RGB24:
  181. frame_size = width * height * 3;
  182. st->codec.pix_fmt = PIX_FMT_BGR24; /* NOTE: v4l uses BGR24, not RGB24 ! */
  183. break;
  184. default:
  185. goto fail;
  186. }
  187. s->fd = video_fd;
  188. s->frame_size = frame_size;
  189. st->codec.codec_type = CODEC_TYPE_VIDEO;
  190. st->codec.codec_id = CODEC_ID_RAWVIDEO;
  191. st->codec.width = width;
  192. st->codec.height = height;
  193. st->codec.frame_rate = frame_rate;
  194. return 0;
  195. fail:
  196. if (video_fd >= 0)
  197. close(video_fd);
  198. av_free(st);
  199. return -EIO;
  200. }
  201. static int v4l_mm_read_picture(VideoData *s, UINT8 *buf)
  202. {
  203. UINT8 *ptr;
  204. /* Setup to capture the next frame */
  205. gb_buf.frame = (gb_frame + 1) % gb_buffers.frames;
  206. if (ioctl(s->fd, VIDIOCMCAPTURE, &gb_buf) < 0) {
  207. if (errno == EAGAIN)
  208. fprintf(stderr,"Cannot Sync\n");
  209. else
  210. perror("VIDIOCMCAPTURE");
  211. return -EIO;
  212. }
  213. while (ioctl(s->fd, VIDIOCSYNC, &gb_frame) < 0 &&
  214. (errno == EAGAIN || errno == EINTR));
  215. ptr = video_buf + gb_buffers.offsets[gb_frame];
  216. memcpy(buf, ptr, s->frame_size);
  217. /* This is now the grabbing frame */
  218. gb_frame = gb_buf.frame;
  219. return s->frame_size;
  220. }
  221. static int grab_read_packet(AVFormatContext *s1, AVPacket *pkt)
  222. {
  223. VideoData *s = s1->priv_data;
  224. INT64 curtime, delay;
  225. struct timespec ts;
  226. int first;
  227. INT64 per_frame = (INT64_C(1000000) * FRAME_RATE_BASE) / s->frame_rate;
  228. int dropped = 0;
  229. /* Calculate the time of the next frame */
  230. s->time_frame += per_frame;
  231. /* wait based on the frame rate */
  232. for(first = 1;; first = 0) {
  233. curtime = av_gettime();
  234. delay = s->time_frame - curtime;
  235. if (delay <= 0) {
  236. if (delay < -per_frame) {
  237. /* printf("grabbing is %d frames late (dropping)\n", (int) -(delay / 16666)); */
  238. dropped = 1;
  239. s->time_frame += per_frame;
  240. }
  241. break;
  242. }
  243. ts.tv_sec = delay / 1000000;
  244. ts.tv_nsec = (delay % 1000000) * 1000;
  245. nanosleep(&ts, NULL);
  246. }
  247. if (av_new_packet(pkt, s->frame_size) < 0)
  248. return -EIO;
  249. if (dropped)
  250. pkt->flags |= PKT_FLAG_DROPPED_FRAME;
  251. /* read one frame */
  252. if (s->use_mmap) {
  253. return v4l_mm_read_picture(s, pkt->data);
  254. } else {
  255. if (read(s->fd, pkt->data, pkt->size) != pkt->size)
  256. return -EIO;
  257. return s->frame_size;
  258. }
  259. }
  260. static int grab_read_close(AVFormatContext *s1)
  261. {
  262. VideoData *s = s1->priv_data;
  263. if (s->use_mmap)
  264. munmap(video_buf, gb_buffers.size);
  265. /* restore audio settings */
  266. ioctl(s->fd, VIDIOCSAUDIO, &audio_saved);
  267. close(s->fd);
  268. return 0;
  269. }
  270. AVInputFormat video_grab_device_format = {
  271. "video_grab_device",
  272. "video grab",
  273. sizeof(VideoData),
  274. NULL,
  275. grab_read_header,
  276. grab_read_packet,
  277. grab_read_close,
  278. .flags = AVFMT_NOFILE,
  279. };
  280. /*
  281. * Done below so we can register the aiw grabber
  282. * /
  283. int video_grab_init(void)
  284. {
  285. av_register_input_format(&video_grab_device_format);
  286. return 0;
  287. }
  288. */
  289. typedef struct {
  290. int fd;
  291. int frame_format; /* see VIDEO_PALETTE_xxx */
  292. int width, height;
  293. int frame_rate;
  294. INT64 time_frame;
  295. int frame_size;
  296. int deint;
  297. int halfw;
  298. UINT8 *src_mem;
  299. UINT8 *lum_m4_mem;
  300. } AIWVideoData;
  301. static int aiw_grab_read_header(AVFormatContext *s1, AVFormatParameters *ap)
  302. {
  303. AIWVideoData *s = s1->priv_data;
  304. AVStream *st;
  305. int width, height;
  306. int video_fd, frame_size;
  307. int ret, frame_rate;
  308. int desired_palette;
  309. if (!ap || ap->width <= 0 || ap->height <= 0 || ap->frame_rate <= 0)
  310. return -1;
  311. width = ap->width;
  312. height = ap->height;
  313. frame_rate = ap->frame_rate;
  314. st = av_new_stream(s1, 0);
  315. if (!st)
  316. return -ENOMEM;
  317. s->width = width;
  318. s->height = height;
  319. s->frame_rate = frame_rate;
  320. video_fd = open(v4l_device, O_RDONLY | O_NONBLOCK);
  321. if (video_fd < 0) {
  322. perror(v4l_device);
  323. goto fail;
  324. }
  325. if (ioctl(video_fd,VIDIOCGCAP,&video_cap) < 0) {
  326. perror("VIDIOCGCAP");
  327. goto fail;
  328. }
  329. if (!(video_cap.type & VID_TYPE_CAPTURE)) {
  330. fprintf(stderr, "Fatal: grab device does not handle capture\n");
  331. goto fail;
  332. }
  333. desired_palette = -1;
  334. if (st->codec.pix_fmt == PIX_FMT_YUV420P) {
  335. desired_palette = VIDEO_PALETTE_YUV420P;
  336. } else if (st->codec.pix_fmt == PIX_FMT_YUV422) {
  337. desired_palette = VIDEO_PALETTE_YUV422;
  338. } else if (st->codec.pix_fmt == PIX_FMT_BGR24) {
  339. desired_palette = VIDEO_PALETTE_RGB24;
  340. }
  341. /* unmute audio */
  342. ret = ioctl(video_fd,VIDIOCGMBUF,&gb_buffers);
  343. if (ret < 0) {
  344. /* try to use read based access */
  345. struct video_window win;
  346. struct video_picture pict;
  347. int val;
  348. win.x = 0;
  349. win.y = 0;
  350. win.width = width;
  351. win.height = height;
  352. win.chromakey = -1;
  353. win.flags = 0;
  354. ioctl(video_fd, VIDIOCSWIN, &win);
  355. ioctl(video_fd, VIDIOCGPICT, &pict);
  356. #if 0
  357. printf("v4l: colour=%d hue=%d brightness=%d constrast=%d whiteness=%d\n",
  358. pict.colour,
  359. pict.hue,
  360. pict.brightness,
  361. pict.contrast,
  362. pict.whiteness);
  363. #endif
  364. /* try to choose a suitable video format */
  365. pict.palette=VIDEO_PALETTE_YUV422;
  366. ret = ioctl(video_fd, VIDIOCSPICT, &pict);
  367. if (ret < 0) {
  368. fprintf(stderr,"Could Not Find YUY2 capture window.\n");
  369. goto fail;
  370. }
  371. if ((width == video_cap.maxwidth && height == video_cap.maxheight) ||
  372. (width == video_cap.maxwidth && height == video_cap.maxheight*2) ||
  373. (width == video_cap.maxwidth/2 && height == video_cap.maxheight)) {
  374. s->deint=0;
  375. s->halfw=0;
  376. if (height == video_cap.maxheight*2) s->deint=1;
  377. if (width == video_cap.maxwidth/2) s->halfw=1;
  378. } else {
  379. fprintf(stderr,"\nIncorrect Grab Size Supplied - Supported Sizes Are:\n");
  380. fprintf(stderr," %dx%d %dx%d %dx%d\n\n",
  381. video_cap.maxwidth,video_cap.maxheight,
  382. video_cap.maxwidth,video_cap.maxheight*2,
  383. video_cap.maxwidth/2,video_cap.maxheight);
  384. goto fail;
  385. }
  386. s->frame_format = pict.palette;
  387. val = 1;
  388. ioctl(video_fd, VIDIOCCAPTURE, &val);
  389. s->time_frame = av_gettime();
  390. } else {
  391. fprintf(stderr,"mmap-based capture will not work with this grab.\n");
  392. goto fail;
  393. }
  394. frame_size = (width * height * 3) / 2;
  395. st->codec.pix_fmt = PIX_FMT_YUV420P;
  396. s->fd = video_fd;
  397. s->frame_size = frame_size;
  398. st->codec.codec_type = CODEC_TYPE_VIDEO;
  399. st->codec.codec_id = CODEC_ID_RAWVIDEO;
  400. st->codec.width = width;
  401. st->codec.height = height;
  402. st->codec.frame_rate = frame_rate;
  403. if (s->halfw == 0) {
  404. s->src_mem = av_malloc(s->width*2);
  405. } else {
  406. s->src_mem = av_malloc(s->width*4);
  407. }
  408. if (!s->src_mem) goto fail;
  409. s->lum_m4_mem = av_malloc(s->width);
  410. if (!s->lum_m4_mem) {
  411. av_free(s->src_mem);
  412. goto fail;
  413. }
  414. return 0;
  415. fail:
  416. if (video_fd >= 0)
  417. close(video_fd);
  418. av_free(st);
  419. return -EIO;
  420. }
  421. //#ifdef HAVE_MMX
  422. //#undef HAVE_MMX
  423. //#endif
  424. #ifdef HAVE_MMX
  425. #include "../libavcodec/i386/mmx.h"
  426. #define LINE_WITH_UV \
  427. movq_m2r(ptr[0],mm0); \
  428. movq_m2r(ptr[8],mm1); \
  429. movq_r2r(mm0, mm4); \
  430. punpcklbw_r2r(mm1,mm0); \
  431. punpckhbw_r2r(mm1,mm4); \
  432. movq_r2r(mm0,mm5); \
  433. punpcklbw_r2r(mm4,mm0); \
  434. punpckhbw_r2r(mm4,mm5); \
  435. movq_r2r(mm0,mm1); \
  436. punpcklbw_r2r(mm5,mm1); \
  437. movq_r2m(mm1,lum[0]); \
  438. movq_m2r(ptr[16],mm2); \
  439. movq_m2r(ptr[24],mm1); \
  440. movq_r2r(mm2,mm4); \
  441. punpcklbw_r2r(mm1,mm2); \
  442. punpckhbw_r2r(mm1,mm4); \
  443. movq_r2r(mm2,mm3); \
  444. punpcklbw_r2r(mm4,mm2); \
  445. punpckhbw_r2r(mm4,mm3); \
  446. movq_r2r(mm2,mm1); \
  447. punpcklbw_r2r(mm3,mm1); \
  448. movq_r2m(mm1,lum[8]); \
  449. punpckhdq_r2r(mm2,mm0); \
  450. punpckhdq_r2r(mm3,mm5); \
  451. movq_r2m(mm0,cb[0]); \
  452. movq_r2m(mm5,cr[0]);
  453. #define LINE_NO_UV \
  454. movq_m2r(ptr[0],mm0);\
  455. movq_m2r(ptr[8],mm1);\
  456. movq_r2r(mm0, mm4);\
  457. punpcklbw_r2r(mm1,mm0); \
  458. punpckhbw_r2r(mm1,mm4);\
  459. movq_r2r(mm0,mm5);\
  460. punpcklbw_r2r(mm4,mm0);\
  461. punpckhbw_r2r(mm4,mm5);\
  462. movq_r2r(mm0,mm1);\
  463. punpcklbw_r2r(mm5,mm1);\
  464. movq_r2m(mm1,lum[0]);\
  465. movq_m2r(ptr[16],mm2);\
  466. movq_m2r(ptr[24],mm1);\
  467. movq_r2r(mm2,mm4);\
  468. punpcklbw_r2r(mm1,mm2);\
  469. punpckhbw_r2r(mm1,mm4);\
  470. movq_r2r(mm2,mm3);\
  471. punpcklbw_r2r(mm4,mm2);\
  472. punpckhbw_r2r(mm4,mm3);\
  473. movq_r2r(mm2,mm1);\
  474. punpcklbw_r2r(mm3,mm1);\
  475. movq_r2m(mm1,lum[8]);
  476. #define LINE_WITHUV_AVG \
  477. movq_m2r(ptr[0], mm0);\
  478. movq_m2r(ptr[8], mm1);\
  479. movq_r2r(mm0, mm4);\
  480. punpcklbw_r2r(mm1,mm0);\
  481. punpckhbw_r2r(mm1,mm4);\
  482. movq_r2r(mm0,mm5);\
  483. punpcklbw_r2r(mm4,mm0);\
  484. punpckhbw_r2r(mm4,mm5);\
  485. movq_r2r(mm0,mm1);\
  486. movq_r2r(mm5,mm2);\
  487. punpcklbw_r2r(mm7,mm1);\
  488. punpcklbw_r2r(mm7,mm2);\
  489. paddw_r2r(mm6,mm1);\
  490. paddw_r2r(mm2,mm1);\
  491. psraw_i2r(1,mm1);\
  492. packuswb_r2r(mm7,mm1);\
  493. movd_r2m(mm1,lum[0]);\
  494. movq_m2r(ptr[16],mm2);\
  495. movq_m2r(ptr[24],mm1);\
  496. movq_r2r(mm2,mm4);\
  497. punpcklbw_r2r(mm1,mm2);\
  498. punpckhbw_r2r(mm1,mm4);\
  499. movq_r2r(mm2,mm3);\
  500. punpcklbw_r2r(mm4,mm2);\
  501. punpckhbw_r2r(mm4,mm3);\
  502. movq_r2r(mm2,mm1);\
  503. movq_r2r(mm3,mm4);\
  504. punpcklbw_r2r(mm7,mm1);\
  505. punpcklbw_r2r(mm7,mm4);\
  506. paddw_r2r(mm6,mm1);\
  507. paddw_r2r(mm4,mm1);\
  508. psraw_i2r(1,mm1);\
  509. packuswb_r2r(mm7,mm1);\
  510. movd_r2m(mm1,lum[4]);\
  511. punpckhbw_r2r(mm7,mm0);\
  512. punpckhbw_r2r(mm7,mm2);\
  513. paddw_r2r(mm6,mm0);\
  514. paddw_r2r(mm2,mm0);\
  515. psraw_i2r(1,mm0);\
  516. packuswb_r2r(mm7,mm0);\
  517. punpckhbw_r2r(mm7,mm5);\
  518. punpckhbw_r2r(mm7,mm3);\
  519. paddw_r2r(mm6,mm5);\
  520. paddw_r2r(mm3,mm5);\
  521. psraw_i2r(1,mm5);\
  522. packuswb_r2r(mm7,mm5);\
  523. movd_r2m(mm0,cb[0]);\
  524. movd_r2m(mm5,cr[0]);
  525. #define LINE_NOUV_AVG \
  526. movq_m2r(ptr[0],mm0);\
  527. movq_m2r(ptr[8],mm1);\
  528. pand_r2r(mm5,mm0);\
  529. pand_r2r(mm5,mm1);\
  530. pmaddwd_r2r(mm6,mm0);\
  531. pmaddwd_r2r(mm6,mm1);\
  532. packssdw_r2r(mm1,mm0);\
  533. paddw_r2r(mm6,mm0);\
  534. psraw_i2r(1,mm0);\
  535. movq_m2r(ptr[16],mm2);\
  536. movq_m2r(ptr[24],mm3);\
  537. pand_r2r(mm5,mm2);\
  538. pand_r2r(mm5,mm3);\
  539. pmaddwd_r2r(mm6,mm2);\
  540. pmaddwd_r2r(mm6,mm3);\
  541. packssdw_r2r(mm3,mm2);\
  542. paddw_r2r(mm6,mm2);\
  543. psraw_i2r(1,mm2);\
  544. packuswb_r2r(mm2,mm0);\
  545. movq_r2m(mm0,lum[0]);
  546. #define DEINT_LINE_LUM(ptroff) \
  547. movd_m2r(lum_m4[(ptroff)],mm0);\
  548. movd_m2r(lum_m3[(ptroff)],mm1);\
  549. movd_m2r(lum_m2[(ptroff)],mm2);\
  550. movd_m2r(lum_m1[(ptroff)],mm3);\
  551. movd_m2r(lum[(ptroff)],mm4);\
  552. punpcklbw_r2r(mm7,mm0);\
  553. movd_r2m(mm2,lum_m4[(ptroff)]);\
  554. punpcklbw_r2r(mm7,mm1);\
  555. punpcklbw_r2r(mm7,mm2);\
  556. punpcklbw_r2r(mm7,mm3);\
  557. punpcklbw_r2r(mm7,mm4);\
  558. psllw_i2r(2,mm1);\
  559. psllw_i2r(1,mm2);\
  560. paddw_r2r(mm6,mm1);\
  561. psllw_i2r(2,mm3);\
  562. paddw_r2r(mm2,mm1);\
  563. paddw_r2r(mm4,mm0);\
  564. paddw_r2r(mm3,mm1);\
  565. psubusw_r2r(mm0,mm1);\
  566. psrlw_i2r(3,mm1);\
  567. packuswb_r2r(mm7,mm1);\
  568. movd_r2m(mm1,lum_m2[(ptroff)]);
  569. #else
  570. #include "../libavcodec/dsputil.h"
  571. #define LINE_WITH_UV \
  572. lum[0]=ptr[0];lum[1]=ptr[2];lum[2]=ptr[4];lum[3]=ptr[6];\
  573. cb[0]=ptr[1];cb[1]=ptr[5];\
  574. cr[0]=ptr[3];cr[1]=ptr[7];\
  575. lum[4]=ptr[8];lum[5]=ptr[10];lum[6]=ptr[12];lum[7]=ptr[14];\
  576. cb[2]=ptr[9];cb[3]=ptr[13];\
  577. cr[2]=ptr[11];cr[3]=ptr[15];\
  578. lum[8]=ptr[16];lum[9]=ptr[18];lum[10]=ptr[20];lum[11]=ptr[22];\
  579. cb[4]=ptr[17];cb[5]=ptr[21];\
  580. cr[4]=ptr[19];cr[5]=ptr[23];\
  581. lum[12]=ptr[24];lum[13]=ptr[26];lum[14]=ptr[28];lum[15]=ptr[30];\
  582. cb[6]=ptr[25];cb[7]=ptr[29];\
  583. cr[6]=ptr[27];cr[7]=ptr[31];
  584. #define LINE_NO_UV \
  585. lum[0]=ptr[0];lum[1]=ptr[2];lum[2]=ptr[4];lum[3]=ptr[6];\
  586. lum[4]=ptr[8];lum[5]=ptr[10];lum[6]=ptr[12];lum[7]=ptr[14];\
  587. lum[8]=ptr[16];lum[9]=ptr[18];lum[10]=ptr[20];lum[11]=ptr[22];\
  588. lum[12]=ptr[24];lum[13]=ptr[26];lum[14]=ptr[28];lum[15]=ptr[30];
  589. #define LINE_WITHUV_AVG \
  590. sum=(ptr[0]+ptr[2]+1) >> 1;lum[0]=sum; \
  591. sum=(ptr[4]+ptr[6]+1) >> 1;lum[1]=sum; \
  592. sum=(ptr[1]+ptr[5]+1) >> 1;cb[0]=sum; \
  593. sum=(ptr[3]+ptr[7]+1) >> 1;cr[0]=sum; \
  594. sum=(ptr[8]+ptr[10]+1) >> 1;lum[2]=sum; \
  595. sum=(ptr[12]+ptr[14]+1) >> 1;lum[3]=sum; \
  596. sum=(ptr[9]+ptr[13]+1) >> 1;cb[1]=sum; \
  597. sum=(ptr[11]+ptr[15]+1) >> 1;cr[1]=sum; \
  598. sum=(ptr[16]+ptr[18]+1) >> 1;lum[4]=sum; \
  599. sum=(ptr[20]+ptr[22]+1) >> 1;lum[5]=sum; \
  600. sum=(ptr[17]+ptr[21]+1) >> 1;cb[2]=sum; \
  601. sum=(ptr[19]+ptr[23]+1) >> 1;cr[2]=sum; \
  602. sum=(ptr[24]+ptr[26]+1) >> 1;lum[6]=sum; \
  603. sum=(ptr[28]+ptr[30]+1) >> 1;lum[7]=sum; \
  604. sum=(ptr[25]+ptr[29]+1) >> 1;cb[3]=sum; \
  605. sum=(ptr[27]+ptr[31]+1) >> 1;cr[3]=sum;
  606. #define LINE_NOUV_AVG \
  607. sum=(ptr[0]+ptr[2]+1) >> 1;lum[0]=sum; \
  608. sum=(ptr[4]+ptr[6]+1) >> 1;lum[1]=sum; \
  609. sum=(ptr[8]+ptr[10]+1) >> 1;lum[2]=sum; \
  610. sum=(ptr[12]+ptr[14]+1) >> 1;lum[3]=sum; \
  611. sum=(ptr[16]+ptr[18]+1) >> 1;lum[4]=sum; \
  612. sum=(ptr[20]+ptr[22]+1) >> 1;lum[5]=sum; \
  613. sum=(ptr[24]+ptr[26]+1) >> 1;lum[6]=sum; \
  614. sum=(ptr[28]+ptr[30]+1) >> 1;lum[7]=sum;
  615. #define DEINT_LINE_LUM(ptroff) \
  616. sum=(-lum_m4[(ptroff)]+(lum_m3[(ptroff)]<<2)+(lum_m2[(ptroff)]<<1)+(lum_m1[(ptroff)]<<2)-lum[(ptroff)]); \
  617. lum_m4[(ptroff)]=lum_m2[(ptroff)];\
  618. lum_m2[(ptroff)]=cm[(sum+4)>>3];\
  619. sum=(-lum_m4[(ptroff)+1]+(lum_m3[(ptroff)+1]<<2)+(lum_m2[(ptroff)+1]<<1)+(lum_m1[(ptroff)+1]<<2)-lum[(ptroff)+1]); \
  620. lum_m4[(ptroff)+1]=lum_m2[(ptroff)+1];\
  621. lum_m2[(ptroff)+1]=cm[(sum+4)>>3];\
  622. sum=(-lum_m4[(ptroff)+2]+(lum_m3[(ptroff)+2]<<2)+(lum_m2[(ptroff)+2]<<1)+(lum_m1[(ptroff)+2]<<2)-lum[(ptroff)+2]); \
  623. lum_m4[(ptroff)+2]=lum_m2[(ptroff)+2];\
  624. lum_m2[(ptroff)+2]=cm[(sum+4)>>3];\
  625. sum=(-lum_m4[(ptroff)+3]+(lum_m3[(ptroff)+3]<<2)+(lum_m2[(ptroff)+3]<<1)+(lum_m1[(ptroff)+3]<<2)-lum[(ptroff)+3]); \
  626. lum_m4[(ptroff)+3]=lum_m2[(ptroff)+3];\
  627. lum_m2[(ptroff)+3]=cm[(sum+4)>>3];
  628. #endif
  629. static int aiw_grab_read_packet(AVFormatContext *s1, AVPacket *pkt)
  630. {
  631. AIWVideoData *s = s1->priv_data;
  632. INT64 curtime, delay;
  633. struct timespec ts;
  634. int first;
  635. INT64 per_frame = (INT64_C(1000000) * FRAME_RATE_BASE) / s->frame_rate;
  636. int dropped = 0;
  637. /* Calculate the time of the next frame */
  638. s->time_frame += per_frame;
  639. /* wait based on the frame rate */
  640. for(first = 1;; first = 0) {
  641. curtime = av_gettime();
  642. delay = s->time_frame - curtime;
  643. if (delay <= 0) {
  644. if (delay < -per_frame) {
  645. /* printf("grabbing is %d frames late (dropping)\n", (int) -(delay / 16666)); */
  646. dropped = 1;
  647. s->time_frame += per_frame;
  648. }
  649. break;
  650. }
  651. ts.tv_sec = delay / 1000000;
  652. ts.tv_nsec = (delay % 1000000) * 1000;
  653. nanosleep(&ts, NULL);
  654. }
  655. if (av_new_packet(pkt, s->frame_size) < 0)
  656. return -EIO;
  657. if (dropped)
  658. pkt->flags |= PKT_FLAG_DROPPED_FRAME;
  659. /* read fields */
  660. {
  661. UINT8 *ptr, *lum, *cb, *cr;
  662. int h;
  663. #ifndef HAVE_MMX
  664. int sum;
  665. #endif
  666. UINT8* src = s->src_mem;
  667. UINT8 *ptrend = &src[s->width*2];
  668. lum=&pkt->data[0];
  669. cb=&lum[s->width*s->height];
  670. cr=&cb[(s->width*s->height)/4];
  671. if (s->deint == 0 && s->halfw == 0) {
  672. while (read(s->fd,src,s->width*2) < 0) {
  673. usleep(100);
  674. }
  675. for (h = 0; h < s->height-2; h+=2) {
  676. for (ptr = &src[0]; ptr < ptrend; ptr+=32, lum+=16, cb+=8, cr+=8) {
  677. LINE_WITH_UV
  678. }
  679. read(s->fd,src,s->width*2);
  680. for (ptr = &src[0]; ptr < ptrend; ptr+=32, lum+=16) {
  681. LINE_NO_UV
  682. }
  683. read(s->fd,src,s->width*2);
  684. }
  685. /*
  686. * Do last two lines
  687. */
  688. for (ptr = &src[0]; ptr < ptrend; ptr+=32, lum+=16, cb+=8, cr+=8) {
  689. LINE_WITH_UV
  690. }
  691. read(s->fd,src,s->width*2);
  692. for (ptr = &src[0]; ptr < ptrend; ptr+=32, lum+=16) {
  693. LINE_NO_UV
  694. }
  695. /* drop second field */
  696. while (read(s->fd,src,s->width*2) < 0) {
  697. usleep(100);
  698. }
  699. for (h = 0; h < s->height - 1; h++) {
  700. read(s->fd,src,s->width*2);
  701. }
  702. } else if (s->halfw == 1) {
  703. #ifdef HAVE_MMX
  704. mmx_t rounder;
  705. mmx_t masker;
  706. rounder.uw[0]=1;
  707. rounder.uw[1]=1;
  708. rounder.uw[2]=1;
  709. rounder.uw[3]=1;
  710. masker.ub[0]=0xff;
  711. masker.ub[1]=0;
  712. masker.ub[2]=0xff;
  713. masker.ub[3]=0;
  714. masker.ub[4]=0xff;
  715. masker.ub[5]=0;
  716. masker.ub[6]=0xff;
  717. masker.ub[7]=0;
  718. pxor_r2r(mm7,mm7);
  719. movq_m2r(rounder,mm6);
  720. #endif
  721. while (read(s->fd,src,s->width*4) < 0) {
  722. usleep(100);
  723. }
  724. ptrend = &src[s->width*4];
  725. for (h = 0; h < s->height-2; h+=2) {
  726. for (ptr = &src[0]; ptr < ptrend; ptr+=32, lum+=8, cb+=4, cr+=4) {
  727. LINE_WITHUV_AVG
  728. }
  729. read(s->fd,src,s->width*4);
  730. #ifdef HAVE_MMX
  731. movq_m2r(masker,mm5);
  732. #endif
  733. for (ptr = &src[0]; ptr < ptrend; ptr+=32, lum+=8) {
  734. LINE_NOUV_AVG
  735. }
  736. read(s->fd,src,s->width*4);
  737. }
  738. /*
  739. * Do last two lines
  740. */
  741. for (ptr = &src[0]; ptr < ptrend; ptr+=32, lum+=8, cb+=4, cr+=4) {
  742. LINE_WITHUV_AVG
  743. }
  744. read(s->fd,src,s->width*4);
  745. #ifdef HAVE_MMX
  746. movq_m2r(masker,mm5);
  747. #endif
  748. for (ptr = &src[0]; ptr < ptrend; ptr+=32, lum+=8) {
  749. LINE_NOUV_AVG
  750. }
  751. /* drop second field */
  752. while (read(s->fd,src,s->width*4) < 0) {
  753. usleep(100);
  754. }
  755. for (h = 0; h < s->height - 1; h++) {
  756. read(s->fd,src,s->width*4);
  757. }
  758. } else {
  759. UINT8 *lum_m1, *lum_m2, *lum_m3, *lum_m4;
  760. #ifdef HAVE_MMX
  761. mmx_t rounder;
  762. rounder.uw[0]=4;
  763. rounder.uw[1]=4;
  764. rounder.uw[2]=4;
  765. rounder.uw[3]=4;
  766. movq_m2r(rounder,mm6);
  767. pxor_r2r(mm7,mm7);
  768. #else
  769. UINT8 *cm = cropTbl + MAX_NEG_CROP;
  770. #endif
  771. /* read two fields and deinterlace them */
  772. while (read(s->fd,src,s->width*2) < 0) {
  773. usleep(100);
  774. }
  775. for (h = 0; h < (s->height/2)-2; h+=2) {
  776. for (ptr = &src[0]; ptr < ptrend; ptr+=32, lum+=16, cb+=8, cr+=8) {
  777. LINE_WITH_UV
  778. }
  779. read(s->fd,src,s->width*2);
  780. /* skip a luminance line - will be filled in later */
  781. lum += s->width;
  782. for (ptr = &src[0]; ptr < ptrend; ptr+=32, lum+=16, cb+=8, cr+=8) {
  783. LINE_WITH_UV
  784. }
  785. /* skip a luminance line - will be filled in later */
  786. lum += s->width;
  787. read(s->fd,src,s->width*2);
  788. }
  789. /*
  790. * Do last two lines
  791. */
  792. for (ptr = &src[0]; ptr < ptrend; ptr+=32, lum+=16, cb+=8, cr+=8) {
  793. LINE_WITH_UV
  794. }
  795. /* skip a luminance line - will be filled in later */
  796. lum += s->width;
  797. read(s->fd,src,s->width*2);
  798. for (ptr = &src[0]; ptr < ptrend; ptr+=32, lum+=16, cb+=8, cr+=8) {
  799. LINE_WITH_UV
  800. }
  801. /*
  802. *
  803. * SECOND FIELD
  804. *
  805. */
  806. lum=&pkt->data[s->width];
  807. while (read(s->fd,src,s->width*2) < 0) {
  808. usleep(10);
  809. }
  810. /* First (and last) two lines not interlaced */
  811. for (h = 0; h < 2; h++) {
  812. for (ptr = &src[0]; ptr < ptrend; ptr+=32, lum+=16) {
  813. LINE_NO_UV
  814. }
  815. read(s->fd,src,s->width*2);
  816. /* skip a luminance line */
  817. lum += s->width;
  818. }
  819. lum_m1=&lum[-s->width];
  820. lum_m2=&lum_m1[-s->width];
  821. lum_m3=&lum_m2[-s->width];
  822. memmove(s->lum_m4_mem,&lum_m3[-s->width],s->width);
  823. for (; h < (s->height/2)-1; h++) {
  824. lum_m4=s->lum_m4_mem;
  825. for (ptr = &src[0]; ptr < ptrend; ptr+=32, lum+=16,lum_m1+=16,lum_m2+=16,lum_m3+=16,lum_m4+=16) {
  826. LINE_NO_UV
  827. DEINT_LINE_LUM(0)
  828. DEINT_LINE_LUM(4)
  829. DEINT_LINE_LUM(8)
  830. DEINT_LINE_LUM(12)
  831. }
  832. read(s->fd,src,s->width*2);
  833. /* skip a luminance line */
  834. lum += s->width;
  835. lum_m1 += s->width;
  836. lum_m2 += s->width;
  837. lum_m3 += s->width;
  838. // lum_m4 += s->width;
  839. }
  840. /*
  841. * Do last line
  842. */
  843. lum_m4=s->lum_m4_mem;
  844. for (ptr = &src[0]; ptr < ptrend; ptr+=32, lum+=16, lum_m1+=16, lum_m2+=16, lum_m3+=16, lum_m4+=16) {
  845. LINE_NO_UV
  846. DEINT_LINE_LUM(0)
  847. DEINT_LINE_LUM(4)
  848. DEINT_LINE_LUM(8)
  849. DEINT_LINE_LUM(12)
  850. }
  851. }
  852. #ifdef HAVE_MMX
  853. emms();
  854. #endif
  855. }
  856. return s->frame_size;
  857. }
  858. static int aiw_grab_read_close(AVFormatContext *s1)
  859. {
  860. AIWVideoData *s = s1->priv_data;
  861. close(s->fd);
  862. av_free(s->lum_m4_mem);
  863. av_free(s->src_mem);
  864. return 0;
  865. }
  866. AVInputFormat aiw_grab_device_format = {
  867. "aiw_grab_device",
  868. "All-In-Wonder (km read-based) video grab",
  869. sizeof(AIWVideoData),
  870. NULL,
  871. aiw_grab_read_header,
  872. aiw_grab_read_packet,
  873. aiw_grab_read_close,
  874. .flags = AVFMT_NOFILE,
  875. };
  876. int video_grab_init(void)
  877. {
  878. av_register_input_format(&video_grab_device_format);
  879. av_register_input_format(&aiw_grab_device_format);
  880. return 0;
  881. }