You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1353 lines
38KB

  1. /*
  2. * Misc image conversion routines
  3. * Copyright (c) 2001, 2002, 2003 Fabrice Bellard
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. /**
  22. * @file
  23. * misc image conversion routines
  24. */
  25. /* TODO:
  26. * - write 'ffimg' program to test all the image related stuff
  27. * - move all api to slice based system
  28. * - integrate deinterlacing, postprocessing and scaling in the conversion process
  29. */
  30. #include "avcodec.h"
  31. #include "dsputil.h"
  32. #include "internal.h"
  33. #include "imgconvert.h"
  34. #include "libavutil/colorspace.h"
  35. #include "libavutil/pixdesc.h"
  36. #include "libavcore/imgutils.h"
  37. #if HAVE_MMX
  38. #include "x86/mmx.h"
  39. #include "x86/dsputil_mmx.h"
  40. #endif
  41. #define xglue(x, y) x ## y
  42. #define glue(x, y) xglue(x, y)
  43. #define FF_COLOR_RGB 0 /**< RGB color space */
  44. #define FF_COLOR_GRAY 1 /**< gray color space */
  45. #define FF_COLOR_YUV 2 /**< YUV color space. 16 <= Y <= 235, 16 <= U, V <= 240 */
  46. #define FF_COLOR_YUV_JPEG 3 /**< YUV color space. 0 <= Y <= 255, 0 <= U, V <= 255 */
  47. #define FF_PIXEL_PLANAR 0 /**< each channel has one component in AVPicture */
  48. #define FF_PIXEL_PACKED 1 /**< only one components containing all the channels */
  49. #define FF_PIXEL_PALETTE 2 /**< one components containing indexes for a palette */
  50. typedef struct PixFmtInfo {
  51. uint8_t nb_channels; /**< number of channels (including alpha) */
  52. uint8_t color_type; /**< color type (see FF_COLOR_xxx constants) */
  53. uint8_t pixel_type; /**< pixel storage type (see FF_PIXEL_xxx constants) */
  54. uint8_t is_alpha : 1; /**< true if alpha can be specified */
  55. uint8_t depth; /**< bit depth of the color components */
  56. } PixFmtInfo;
  57. /* this table gives more information about formats */
  58. static const PixFmtInfo pix_fmt_info[PIX_FMT_NB] = {
  59. /* YUV formats */
  60. [PIX_FMT_YUV420P] = {
  61. .nb_channels = 3,
  62. .color_type = FF_COLOR_YUV,
  63. .pixel_type = FF_PIXEL_PLANAR,
  64. .depth = 8,
  65. },
  66. [PIX_FMT_YUV422P] = {
  67. .nb_channels = 3,
  68. .color_type = FF_COLOR_YUV,
  69. .pixel_type = FF_PIXEL_PLANAR,
  70. .depth = 8,
  71. },
  72. [PIX_FMT_YUV444P] = {
  73. .nb_channels = 3,
  74. .color_type = FF_COLOR_YUV,
  75. .pixel_type = FF_PIXEL_PLANAR,
  76. .depth = 8,
  77. },
  78. [PIX_FMT_YUYV422] = {
  79. .nb_channels = 1,
  80. .color_type = FF_COLOR_YUV,
  81. .pixel_type = FF_PIXEL_PACKED,
  82. .depth = 8,
  83. },
  84. [PIX_FMT_UYVY422] = {
  85. .nb_channels = 1,
  86. .color_type = FF_COLOR_YUV,
  87. .pixel_type = FF_PIXEL_PACKED,
  88. .depth = 8,
  89. },
  90. [PIX_FMT_YUV410P] = {
  91. .nb_channels = 3,
  92. .color_type = FF_COLOR_YUV,
  93. .pixel_type = FF_PIXEL_PLANAR,
  94. .depth = 8,
  95. },
  96. [PIX_FMT_YUV411P] = {
  97. .nb_channels = 3,
  98. .color_type = FF_COLOR_YUV,
  99. .pixel_type = FF_PIXEL_PLANAR,
  100. .depth = 8,
  101. },
  102. [PIX_FMT_YUV440P] = {
  103. .nb_channels = 3,
  104. .color_type = FF_COLOR_YUV,
  105. .pixel_type = FF_PIXEL_PLANAR,
  106. .depth = 8,
  107. },
  108. [PIX_FMT_YUV420P16LE] = {
  109. .nb_channels = 3,
  110. .color_type = FF_COLOR_YUV,
  111. .pixel_type = FF_PIXEL_PLANAR,
  112. .depth = 16,
  113. },
  114. [PIX_FMT_YUV422P16LE] = {
  115. .nb_channels = 3,
  116. .color_type = FF_COLOR_YUV,
  117. .pixel_type = FF_PIXEL_PLANAR,
  118. .depth = 16,
  119. },
  120. [PIX_FMT_YUV444P16LE] = {
  121. .nb_channels = 3,
  122. .color_type = FF_COLOR_YUV,
  123. .pixel_type = FF_PIXEL_PLANAR,
  124. .depth = 16,
  125. },
  126. [PIX_FMT_YUV420P16BE] = {
  127. .nb_channels = 3,
  128. .color_type = FF_COLOR_YUV,
  129. .pixel_type = FF_PIXEL_PLANAR,
  130. .depth = 16,
  131. },
  132. [PIX_FMT_YUV422P16BE] = {
  133. .nb_channels = 3,
  134. .color_type = FF_COLOR_YUV,
  135. .pixel_type = FF_PIXEL_PLANAR,
  136. .depth = 16,
  137. },
  138. [PIX_FMT_YUV444P16BE] = {
  139. .nb_channels = 3,
  140. .color_type = FF_COLOR_YUV,
  141. .pixel_type = FF_PIXEL_PLANAR,
  142. .depth = 16,
  143. },
  144. /* YUV formats with alpha plane */
  145. [PIX_FMT_YUVA420P] = {
  146. .nb_channels = 4,
  147. .color_type = FF_COLOR_YUV,
  148. .pixel_type = FF_PIXEL_PLANAR,
  149. .depth = 8,
  150. },
  151. /* JPEG YUV */
  152. [PIX_FMT_YUVJ420P] = {
  153. .nb_channels = 3,
  154. .color_type = FF_COLOR_YUV_JPEG,
  155. .pixel_type = FF_PIXEL_PLANAR,
  156. .depth = 8,
  157. },
  158. [PIX_FMT_YUVJ422P] = {
  159. .nb_channels = 3,
  160. .color_type = FF_COLOR_YUV_JPEG,
  161. .pixel_type = FF_PIXEL_PLANAR,
  162. .depth = 8,
  163. },
  164. [PIX_FMT_YUVJ444P] = {
  165. .nb_channels = 3,
  166. .color_type = FF_COLOR_YUV_JPEG,
  167. .pixel_type = FF_PIXEL_PLANAR,
  168. .depth = 8,
  169. },
  170. [PIX_FMT_YUVJ440P] = {
  171. .nb_channels = 3,
  172. .color_type = FF_COLOR_YUV_JPEG,
  173. .pixel_type = FF_PIXEL_PLANAR,
  174. .depth = 8,
  175. },
  176. /* RGB formats */
  177. [PIX_FMT_RGB24] = {
  178. .nb_channels = 3,
  179. .color_type = FF_COLOR_RGB,
  180. .pixel_type = FF_PIXEL_PACKED,
  181. .depth = 8,
  182. },
  183. [PIX_FMT_BGR24] = {
  184. .nb_channels = 3,
  185. .color_type = FF_COLOR_RGB,
  186. .pixel_type = FF_PIXEL_PACKED,
  187. .depth = 8,
  188. },
  189. [PIX_FMT_ARGB] = {
  190. .nb_channels = 4, .is_alpha = 1,
  191. .color_type = FF_COLOR_RGB,
  192. .pixel_type = FF_PIXEL_PACKED,
  193. .depth = 8,
  194. },
  195. [PIX_FMT_RGB48BE] = {
  196. .nb_channels = 3,
  197. .color_type = FF_COLOR_RGB,
  198. .pixel_type = FF_PIXEL_PACKED,
  199. .depth = 16,
  200. },
  201. [PIX_FMT_RGB48LE] = {
  202. .nb_channels = 3,
  203. .color_type = FF_COLOR_RGB,
  204. .pixel_type = FF_PIXEL_PACKED,
  205. .depth = 16,
  206. },
  207. [PIX_FMT_RGB565BE] = {
  208. .nb_channels = 3,
  209. .color_type = FF_COLOR_RGB,
  210. .pixel_type = FF_PIXEL_PACKED,
  211. .depth = 5,
  212. },
  213. [PIX_FMT_RGB565LE] = {
  214. .nb_channels = 3,
  215. .color_type = FF_COLOR_RGB,
  216. .pixel_type = FF_PIXEL_PACKED,
  217. .depth = 5,
  218. },
  219. [PIX_FMT_RGB555BE] = {
  220. .nb_channels = 3,
  221. .color_type = FF_COLOR_RGB,
  222. .pixel_type = FF_PIXEL_PACKED,
  223. .depth = 5,
  224. },
  225. [PIX_FMT_RGB555LE] = {
  226. .nb_channels = 3,
  227. .color_type = FF_COLOR_RGB,
  228. .pixel_type = FF_PIXEL_PACKED,
  229. .depth = 5,
  230. },
  231. [PIX_FMT_RGB444BE] = {
  232. .nb_channels = 3,
  233. .color_type = FF_COLOR_RGB,
  234. .pixel_type = FF_PIXEL_PACKED,
  235. .depth = 4,
  236. },
  237. [PIX_FMT_RGB444LE] = {
  238. .nb_channels = 3,
  239. .color_type = FF_COLOR_RGB,
  240. .pixel_type = FF_PIXEL_PACKED,
  241. .depth = 4,
  242. },
  243. /* gray / mono formats */
  244. [PIX_FMT_GRAY16BE] = {
  245. .nb_channels = 1,
  246. .color_type = FF_COLOR_GRAY,
  247. .pixel_type = FF_PIXEL_PLANAR,
  248. .depth = 16,
  249. },
  250. [PIX_FMT_GRAY16LE] = {
  251. .nb_channels = 1,
  252. .color_type = FF_COLOR_GRAY,
  253. .pixel_type = FF_PIXEL_PLANAR,
  254. .depth = 16,
  255. },
  256. [PIX_FMT_GRAY8] = {
  257. .nb_channels = 1,
  258. .color_type = FF_COLOR_GRAY,
  259. .pixel_type = FF_PIXEL_PLANAR,
  260. .depth = 8,
  261. },
  262. [PIX_FMT_MONOWHITE] = {
  263. .nb_channels = 1,
  264. .color_type = FF_COLOR_GRAY,
  265. .pixel_type = FF_PIXEL_PLANAR,
  266. .depth = 1,
  267. },
  268. [PIX_FMT_MONOBLACK] = {
  269. .nb_channels = 1,
  270. .color_type = FF_COLOR_GRAY,
  271. .pixel_type = FF_PIXEL_PLANAR,
  272. .depth = 1,
  273. },
  274. /* paletted formats */
  275. [PIX_FMT_PAL8] = {
  276. .nb_channels = 4, .is_alpha = 1,
  277. .color_type = FF_COLOR_RGB,
  278. .pixel_type = FF_PIXEL_PALETTE,
  279. .depth = 8,
  280. },
  281. [PIX_FMT_UYYVYY411] = {
  282. .nb_channels = 1,
  283. .color_type = FF_COLOR_YUV,
  284. .pixel_type = FF_PIXEL_PACKED,
  285. .depth = 8,
  286. },
  287. [PIX_FMT_ABGR] = {
  288. .nb_channels = 4, .is_alpha = 1,
  289. .color_type = FF_COLOR_RGB,
  290. .pixel_type = FF_PIXEL_PACKED,
  291. .depth = 8,
  292. },
  293. [PIX_FMT_BGR565BE] = {
  294. .nb_channels = 3,
  295. .color_type = FF_COLOR_RGB,
  296. .pixel_type = FF_PIXEL_PACKED,
  297. .depth = 5,
  298. },
  299. [PIX_FMT_BGR565LE] = {
  300. .nb_channels = 3,
  301. .color_type = FF_COLOR_RGB,
  302. .pixel_type = FF_PIXEL_PACKED,
  303. .depth = 5,
  304. },
  305. [PIX_FMT_BGR555BE] = {
  306. .nb_channels = 3,
  307. .color_type = FF_COLOR_RGB,
  308. .pixel_type = FF_PIXEL_PACKED,
  309. .depth = 5,
  310. },
  311. [PIX_FMT_BGR555LE] = {
  312. .nb_channels = 3,
  313. .color_type = FF_COLOR_RGB,
  314. .pixel_type = FF_PIXEL_PACKED,
  315. .depth = 5,
  316. },
  317. [PIX_FMT_BGR444BE] = {
  318. .nb_channels = 3,
  319. .color_type = FF_COLOR_RGB,
  320. .pixel_type = FF_PIXEL_PACKED,
  321. .depth = 4,
  322. },
  323. [PIX_FMT_BGR444LE] = {
  324. .nb_channels = 3,
  325. .color_type = FF_COLOR_RGB,
  326. .pixel_type = FF_PIXEL_PACKED,
  327. .depth = 4,
  328. },
  329. [PIX_FMT_RGB8] = {
  330. .nb_channels = 1,
  331. .color_type = FF_COLOR_RGB,
  332. .pixel_type = FF_PIXEL_PACKED,
  333. .depth = 8,
  334. },
  335. [PIX_FMT_RGB4] = {
  336. .nb_channels = 1,
  337. .color_type = FF_COLOR_RGB,
  338. .pixel_type = FF_PIXEL_PACKED,
  339. .depth = 4,
  340. },
  341. [PIX_FMT_RGB4_BYTE] = {
  342. .nb_channels = 1,
  343. .color_type = FF_COLOR_RGB,
  344. .pixel_type = FF_PIXEL_PACKED,
  345. .depth = 8,
  346. },
  347. [PIX_FMT_BGR8] = {
  348. .nb_channels = 1,
  349. .color_type = FF_COLOR_RGB,
  350. .pixel_type = FF_PIXEL_PACKED,
  351. .depth = 8,
  352. },
  353. [PIX_FMT_BGR4] = {
  354. .nb_channels = 1,
  355. .color_type = FF_COLOR_RGB,
  356. .pixel_type = FF_PIXEL_PACKED,
  357. .depth = 4,
  358. },
  359. [PIX_FMT_BGR4_BYTE] = {
  360. .nb_channels = 1,
  361. .color_type = FF_COLOR_RGB,
  362. .pixel_type = FF_PIXEL_PACKED,
  363. .depth = 8,
  364. },
  365. [PIX_FMT_NV12] = {
  366. .nb_channels = 2,
  367. .color_type = FF_COLOR_YUV,
  368. .pixel_type = FF_PIXEL_PLANAR,
  369. .depth = 8,
  370. },
  371. [PIX_FMT_NV21] = {
  372. .nb_channels = 2,
  373. .color_type = FF_COLOR_YUV,
  374. .pixel_type = FF_PIXEL_PLANAR,
  375. .depth = 8,
  376. },
  377. [PIX_FMT_BGRA] = {
  378. .nb_channels = 4, .is_alpha = 1,
  379. .color_type = FF_COLOR_RGB,
  380. .pixel_type = FF_PIXEL_PACKED,
  381. .depth = 8,
  382. },
  383. [PIX_FMT_RGBA] = {
  384. .nb_channels = 4, .is_alpha = 1,
  385. .color_type = FF_COLOR_RGB,
  386. .pixel_type = FF_PIXEL_PACKED,
  387. .depth = 8,
  388. },
  389. };
  390. void avcodec_get_chroma_sub_sample(enum PixelFormat pix_fmt, int *h_shift, int *v_shift)
  391. {
  392. *h_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_w;
  393. *v_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_h;
  394. }
  395. const char *avcodec_get_pix_fmt_name(enum PixelFormat pix_fmt)
  396. {
  397. if (pix_fmt < 0 || pix_fmt >= PIX_FMT_NB)
  398. return NULL;
  399. else
  400. return av_pix_fmt_descriptors[pix_fmt].name;
  401. }
  402. #if LIBAVCODEC_VERSION_MAJOR < 53
  403. enum PixelFormat avcodec_get_pix_fmt(const char *name)
  404. {
  405. return av_get_pix_fmt(name);
  406. }
  407. #endif
  408. void avcodec_pix_fmt_string (char *buf, int buf_size, enum PixelFormat pix_fmt)
  409. {
  410. /* print header */
  411. if (pix_fmt < 0)
  412. snprintf (buf, buf_size,
  413. "name " " nb_channels" " depth" " is_alpha"
  414. );
  415. else{
  416. PixFmtInfo info= pix_fmt_info[pix_fmt];
  417. char is_alpha_char= info.is_alpha ? 'y' : 'n';
  418. snprintf (buf, buf_size,
  419. "%-11s %5d %9d %6c",
  420. av_pix_fmt_descriptors[pix_fmt].name,
  421. info.nb_channels,
  422. info.depth,
  423. is_alpha_char
  424. );
  425. }
  426. }
  427. int ff_is_hwaccel_pix_fmt(enum PixelFormat pix_fmt)
  428. {
  429. return av_pix_fmt_descriptors[pix_fmt].flags & PIX_FMT_HWACCEL;
  430. }
  431. int ff_set_systematic_pal(uint32_t pal[256], enum PixelFormat pix_fmt){
  432. int i;
  433. for(i=0; i<256; i++){
  434. int r,g,b;
  435. switch(pix_fmt) {
  436. case PIX_FMT_RGB8:
  437. r= (i>>5 )*36;
  438. g= ((i>>2)&7)*36;
  439. b= (i&3 )*85;
  440. break;
  441. case PIX_FMT_BGR8:
  442. b= (i>>6 )*85;
  443. g= ((i>>3)&7)*36;
  444. r= (i&7 )*36;
  445. break;
  446. case PIX_FMT_RGB4_BYTE:
  447. r= (i>>3 )*255;
  448. g= ((i>>1)&3)*85;
  449. b= (i&1 )*255;
  450. break;
  451. case PIX_FMT_BGR4_BYTE:
  452. b= (i>>3 )*255;
  453. g= ((i>>1)&3)*85;
  454. r= (i&1 )*255;
  455. break;
  456. case PIX_FMT_GRAY8:
  457. r=b=g= i;
  458. break;
  459. default:
  460. return -1;
  461. }
  462. pal[i] = b + (g<<8) + (r<<16);
  463. }
  464. return 0;
  465. }
  466. int ff_fill_linesize(AVPicture *picture, enum PixelFormat pix_fmt, int width)
  467. {
  468. return av_fill_image_linesizes(picture->linesize, pix_fmt, width);
  469. }
  470. int ff_fill_pointer(AVPicture *picture, uint8_t *ptr, enum PixelFormat pix_fmt,
  471. int height)
  472. {
  473. return av_fill_image_pointers(picture->data, pix_fmt, height, ptr, picture->linesize);
  474. }
  475. int avpicture_fill(AVPicture *picture, uint8_t *ptr,
  476. enum PixelFormat pix_fmt, int width, int height)
  477. {
  478. if(avcodec_check_dimensions(NULL, width, height))
  479. return -1;
  480. if (av_fill_image_linesizes(picture->linesize, pix_fmt, width))
  481. return -1;
  482. return av_fill_image_pointers(picture->data, pix_fmt, height, ptr, picture->linesize);
  483. }
  484. int avpicture_layout(const AVPicture* src, enum PixelFormat pix_fmt, int width, int height,
  485. unsigned char *dest, int dest_size)
  486. {
  487. const PixFmtInfo* pf = &pix_fmt_info[pix_fmt];
  488. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  489. int i, j, w, ow, h, oh, data_planes;
  490. const unsigned char* s;
  491. int size = avpicture_get_size(pix_fmt, width, height);
  492. if (size > dest_size || size < 0)
  493. return -1;
  494. if (pf->pixel_type == FF_PIXEL_PACKED || pf->pixel_type == FF_PIXEL_PALETTE) {
  495. if (pix_fmt == PIX_FMT_YUYV422 ||
  496. pix_fmt == PIX_FMT_UYVY422 ||
  497. pix_fmt == PIX_FMT_BGR565BE ||
  498. pix_fmt == PIX_FMT_BGR565LE ||
  499. pix_fmt == PIX_FMT_BGR555BE ||
  500. pix_fmt == PIX_FMT_BGR555LE ||
  501. pix_fmt == PIX_FMT_BGR444BE ||
  502. pix_fmt == PIX_FMT_BGR444LE ||
  503. pix_fmt == PIX_FMT_RGB565BE ||
  504. pix_fmt == PIX_FMT_RGB565LE ||
  505. pix_fmt == PIX_FMT_RGB555BE ||
  506. pix_fmt == PIX_FMT_RGB555LE ||
  507. pix_fmt == PIX_FMT_RGB444BE ||
  508. pix_fmt == PIX_FMT_RGB444LE)
  509. w = width * 2;
  510. else if (pix_fmt == PIX_FMT_UYYVYY411)
  511. w = width + width/2;
  512. else if (pix_fmt == PIX_FMT_PAL8)
  513. w = width;
  514. else
  515. w = width * (pf->depth * pf->nb_channels / 8);
  516. data_planes = 1;
  517. h = height;
  518. } else {
  519. data_planes = pf->nb_channels;
  520. w = (width*pf->depth + 7)/8;
  521. h = height;
  522. }
  523. ow = w;
  524. oh = h;
  525. for (i=0; i<data_planes; i++) {
  526. if (i == 1) {
  527. w = (- ((-width) >> desc->log2_chroma_w) * pf->depth + 7) / 8;
  528. h = -((-height) >> desc->log2_chroma_h);
  529. if (pix_fmt == PIX_FMT_NV12 || pix_fmt == PIX_FMT_NV21)
  530. w <<= 1;
  531. } else if (i == 3) {
  532. w = ow;
  533. h = oh;
  534. }
  535. s = src->data[i];
  536. for(j=0; j<h; j++) {
  537. memcpy(dest, s, w);
  538. dest += w;
  539. s += src->linesize[i];
  540. }
  541. }
  542. if (pf->pixel_type == FF_PIXEL_PALETTE)
  543. memcpy((unsigned char *)(((size_t)dest + 3) & ~3), src->data[1], 256 * 4);
  544. return size;
  545. }
  546. int avpicture_get_size(enum PixelFormat pix_fmt, int width, int height)
  547. {
  548. AVPicture dummy_pict;
  549. if(avcodec_check_dimensions(NULL, width, height))
  550. return -1;
  551. switch (pix_fmt) {
  552. case PIX_FMT_RGB8:
  553. case PIX_FMT_BGR8:
  554. case PIX_FMT_RGB4_BYTE:
  555. case PIX_FMT_BGR4_BYTE:
  556. case PIX_FMT_GRAY8:
  557. // do not include palette for these pseudo-paletted formats
  558. return width * height;
  559. }
  560. return avpicture_fill(&dummy_pict, NULL, pix_fmt, width, height);
  561. }
  562. int avcodec_get_pix_fmt_loss(enum PixelFormat dst_pix_fmt, enum PixelFormat src_pix_fmt,
  563. int has_alpha)
  564. {
  565. const PixFmtInfo *pf, *ps;
  566. const AVPixFmtDescriptor *src_desc = &av_pix_fmt_descriptors[src_pix_fmt];
  567. const AVPixFmtDescriptor *dst_desc = &av_pix_fmt_descriptors[dst_pix_fmt];
  568. int loss;
  569. ps = &pix_fmt_info[src_pix_fmt];
  570. /* compute loss */
  571. loss = 0;
  572. pf = &pix_fmt_info[dst_pix_fmt];
  573. if (pf->depth < ps->depth ||
  574. ((dst_pix_fmt == PIX_FMT_RGB555BE || dst_pix_fmt == PIX_FMT_RGB555LE ||
  575. dst_pix_fmt == PIX_FMT_BGR555BE || dst_pix_fmt == PIX_FMT_BGR555LE) &&
  576. (src_pix_fmt == PIX_FMT_RGB565BE || src_pix_fmt == PIX_FMT_RGB565LE ||
  577. src_pix_fmt == PIX_FMT_BGR565BE || src_pix_fmt == PIX_FMT_BGR565LE)))
  578. loss |= FF_LOSS_DEPTH;
  579. if (dst_desc->log2_chroma_w > src_desc->log2_chroma_w ||
  580. dst_desc->log2_chroma_h > src_desc->log2_chroma_h)
  581. loss |= FF_LOSS_RESOLUTION;
  582. switch(pf->color_type) {
  583. case FF_COLOR_RGB:
  584. if (ps->color_type != FF_COLOR_RGB &&
  585. ps->color_type != FF_COLOR_GRAY)
  586. loss |= FF_LOSS_COLORSPACE;
  587. break;
  588. case FF_COLOR_GRAY:
  589. if (ps->color_type != FF_COLOR_GRAY)
  590. loss |= FF_LOSS_COLORSPACE;
  591. break;
  592. case FF_COLOR_YUV:
  593. if (ps->color_type != FF_COLOR_YUV)
  594. loss |= FF_LOSS_COLORSPACE;
  595. break;
  596. case FF_COLOR_YUV_JPEG:
  597. if (ps->color_type != FF_COLOR_YUV_JPEG &&
  598. ps->color_type != FF_COLOR_YUV &&
  599. ps->color_type != FF_COLOR_GRAY)
  600. loss |= FF_LOSS_COLORSPACE;
  601. break;
  602. default:
  603. /* fail safe test */
  604. if (ps->color_type != pf->color_type)
  605. loss |= FF_LOSS_COLORSPACE;
  606. break;
  607. }
  608. if (pf->color_type == FF_COLOR_GRAY &&
  609. ps->color_type != FF_COLOR_GRAY)
  610. loss |= FF_LOSS_CHROMA;
  611. if (!pf->is_alpha && (ps->is_alpha && has_alpha))
  612. loss |= FF_LOSS_ALPHA;
  613. if (pf->pixel_type == FF_PIXEL_PALETTE &&
  614. (ps->pixel_type != FF_PIXEL_PALETTE && ps->color_type != FF_COLOR_GRAY))
  615. loss |= FF_LOSS_COLORQUANT;
  616. return loss;
  617. }
  618. static int avg_bits_per_pixel(enum PixelFormat pix_fmt)
  619. {
  620. int bits;
  621. const PixFmtInfo *pf;
  622. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  623. pf = &pix_fmt_info[pix_fmt];
  624. switch(pf->pixel_type) {
  625. case FF_PIXEL_PACKED:
  626. switch(pix_fmt) {
  627. case PIX_FMT_YUYV422:
  628. case PIX_FMT_UYVY422:
  629. case PIX_FMT_RGB565BE:
  630. case PIX_FMT_RGB565LE:
  631. case PIX_FMT_RGB555BE:
  632. case PIX_FMT_RGB555LE:
  633. case PIX_FMT_RGB444BE:
  634. case PIX_FMT_RGB444LE:
  635. case PIX_FMT_BGR565BE:
  636. case PIX_FMT_BGR565LE:
  637. case PIX_FMT_BGR555BE:
  638. case PIX_FMT_BGR555LE:
  639. case PIX_FMT_BGR444BE:
  640. case PIX_FMT_BGR444LE:
  641. bits = 16;
  642. break;
  643. case PIX_FMT_UYYVYY411:
  644. bits = 12;
  645. break;
  646. default:
  647. bits = pf->depth * pf->nb_channels;
  648. break;
  649. }
  650. break;
  651. case FF_PIXEL_PLANAR:
  652. if (desc->log2_chroma_w == 0 && desc->log2_chroma_h == 0) {
  653. bits = pf->depth * pf->nb_channels;
  654. } else {
  655. bits = pf->depth + ((2 * pf->depth) >>
  656. (desc->log2_chroma_w + desc->log2_chroma_h));
  657. }
  658. break;
  659. case FF_PIXEL_PALETTE:
  660. bits = 8;
  661. break;
  662. default:
  663. bits = -1;
  664. break;
  665. }
  666. return bits;
  667. }
  668. static enum PixelFormat avcodec_find_best_pix_fmt1(int64_t pix_fmt_mask,
  669. enum PixelFormat src_pix_fmt,
  670. int has_alpha,
  671. int loss_mask)
  672. {
  673. int dist, i, loss, min_dist;
  674. enum PixelFormat dst_pix_fmt;
  675. /* find exact color match with smallest size */
  676. dst_pix_fmt = PIX_FMT_NONE;
  677. min_dist = 0x7fffffff;
  678. for(i = 0;i < PIX_FMT_NB; i++) {
  679. if (pix_fmt_mask & (1ULL << i)) {
  680. loss = avcodec_get_pix_fmt_loss(i, src_pix_fmt, has_alpha) & loss_mask;
  681. if (loss == 0) {
  682. dist = avg_bits_per_pixel(i);
  683. if (dist < min_dist) {
  684. min_dist = dist;
  685. dst_pix_fmt = i;
  686. }
  687. }
  688. }
  689. }
  690. return dst_pix_fmt;
  691. }
  692. enum PixelFormat avcodec_find_best_pix_fmt(int64_t pix_fmt_mask, enum PixelFormat src_pix_fmt,
  693. int has_alpha, int *loss_ptr)
  694. {
  695. enum PixelFormat dst_pix_fmt;
  696. int loss_mask, i;
  697. static const int loss_mask_order[] = {
  698. ~0, /* no loss first */
  699. ~FF_LOSS_ALPHA,
  700. ~FF_LOSS_RESOLUTION,
  701. ~(FF_LOSS_COLORSPACE | FF_LOSS_RESOLUTION),
  702. ~FF_LOSS_COLORQUANT,
  703. ~FF_LOSS_DEPTH,
  704. 0,
  705. };
  706. /* try with successive loss */
  707. i = 0;
  708. for(;;) {
  709. loss_mask = loss_mask_order[i++];
  710. dst_pix_fmt = avcodec_find_best_pix_fmt1(pix_fmt_mask, src_pix_fmt,
  711. has_alpha, loss_mask);
  712. if (dst_pix_fmt >= 0)
  713. goto found;
  714. if (loss_mask == 0)
  715. break;
  716. }
  717. return PIX_FMT_NONE;
  718. found:
  719. if (loss_ptr)
  720. *loss_ptr = avcodec_get_pix_fmt_loss(dst_pix_fmt, src_pix_fmt, has_alpha);
  721. return dst_pix_fmt;
  722. }
  723. void ff_img_copy_plane(uint8_t *dst, int dst_wrap,
  724. const uint8_t *src, int src_wrap,
  725. int width, int height)
  726. {
  727. if((!dst) || (!src))
  728. return;
  729. for(;height > 0; height--) {
  730. memcpy(dst, src, width);
  731. dst += dst_wrap;
  732. src += src_wrap;
  733. }
  734. }
  735. int ff_get_plane_bytewidth(enum PixelFormat pix_fmt, int width, int plane)
  736. {
  737. int bits;
  738. const PixFmtInfo *pf = &pix_fmt_info[pix_fmt];
  739. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  740. pf = &pix_fmt_info[pix_fmt];
  741. switch(pf->pixel_type) {
  742. case FF_PIXEL_PACKED:
  743. switch(pix_fmt) {
  744. case PIX_FMT_YUYV422:
  745. case PIX_FMT_UYVY422:
  746. case PIX_FMT_RGB565BE:
  747. case PIX_FMT_RGB565LE:
  748. case PIX_FMT_RGB555BE:
  749. case PIX_FMT_RGB555LE:
  750. case PIX_FMT_RGB444BE:
  751. case PIX_FMT_RGB444LE:
  752. case PIX_FMT_BGR565BE:
  753. case PIX_FMT_BGR565LE:
  754. case PIX_FMT_BGR555BE:
  755. case PIX_FMT_BGR555LE:
  756. case PIX_FMT_BGR444BE:
  757. case PIX_FMT_BGR444LE:
  758. bits = 16;
  759. break;
  760. case PIX_FMT_UYYVYY411:
  761. bits = 12;
  762. break;
  763. default:
  764. bits = pf->depth * pf->nb_channels;
  765. break;
  766. }
  767. return (width * bits + 7) >> 3;
  768. break;
  769. case FF_PIXEL_PLANAR:
  770. if ((pix_fmt != PIX_FMT_NV12 && pix_fmt != PIX_FMT_NV21) &&
  771. (plane == 1 || plane == 2))
  772. width= -((-width)>>desc->log2_chroma_w);
  773. return (width * pf->depth + 7) >> 3;
  774. break;
  775. case FF_PIXEL_PALETTE:
  776. if (plane == 0)
  777. return width;
  778. break;
  779. }
  780. return -1;
  781. }
  782. void av_picture_copy(AVPicture *dst, const AVPicture *src,
  783. enum PixelFormat pix_fmt, int width, int height)
  784. {
  785. int i;
  786. const PixFmtInfo *pf = &pix_fmt_info[pix_fmt];
  787. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  788. switch(pf->pixel_type) {
  789. case FF_PIXEL_PACKED:
  790. case FF_PIXEL_PLANAR:
  791. for(i = 0; i < pf->nb_channels; i++) {
  792. int h;
  793. int bwidth = ff_get_plane_bytewidth(pix_fmt, width, i);
  794. h = height;
  795. if (i == 1 || i == 2) {
  796. h= -((-height)>>desc->log2_chroma_h);
  797. }
  798. ff_img_copy_plane(dst->data[i], dst->linesize[i],
  799. src->data[i], src->linesize[i],
  800. bwidth, h);
  801. }
  802. break;
  803. case FF_PIXEL_PALETTE:
  804. ff_img_copy_plane(dst->data[0], dst->linesize[0],
  805. src->data[0], src->linesize[0],
  806. width, height);
  807. /* copy the palette */
  808. memcpy(dst->data[1], src->data[1], 4*256);
  809. break;
  810. }
  811. }
  812. /* 2x2 -> 1x1 */
  813. void ff_shrink22(uint8_t *dst, int dst_wrap,
  814. const uint8_t *src, int src_wrap,
  815. int width, int height)
  816. {
  817. int w;
  818. const uint8_t *s1, *s2;
  819. uint8_t *d;
  820. for(;height > 0; height--) {
  821. s1 = src;
  822. s2 = s1 + src_wrap;
  823. d = dst;
  824. for(w = width;w >= 4; w-=4) {
  825. d[0] = (s1[0] + s1[1] + s2[0] + s2[1] + 2) >> 2;
  826. d[1] = (s1[2] + s1[3] + s2[2] + s2[3] + 2) >> 2;
  827. d[2] = (s1[4] + s1[5] + s2[4] + s2[5] + 2) >> 2;
  828. d[3] = (s1[6] + s1[7] + s2[6] + s2[7] + 2) >> 2;
  829. s1 += 8;
  830. s2 += 8;
  831. d += 4;
  832. }
  833. for(;w > 0; w--) {
  834. d[0] = (s1[0] + s1[1] + s2[0] + s2[1] + 2) >> 2;
  835. s1 += 2;
  836. s2 += 2;
  837. d++;
  838. }
  839. src += 2 * src_wrap;
  840. dst += dst_wrap;
  841. }
  842. }
  843. /* 4x4 -> 1x1 */
  844. void ff_shrink44(uint8_t *dst, int dst_wrap,
  845. const uint8_t *src, int src_wrap,
  846. int width, int height)
  847. {
  848. int w;
  849. const uint8_t *s1, *s2, *s3, *s4;
  850. uint8_t *d;
  851. for(;height > 0; height--) {
  852. s1 = src;
  853. s2 = s1 + src_wrap;
  854. s3 = s2 + src_wrap;
  855. s4 = s3 + src_wrap;
  856. d = dst;
  857. for(w = width;w > 0; w--) {
  858. d[0] = (s1[0] + s1[1] + s1[2] + s1[3] +
  859. s2[0] + s2[1] + s2[2] + s2[3] +
  860. s3[0] + s3[1] + s3[2] + s3[3] +
  861. s4[0] + s4[1] + s4[2] + s4[3] + 8) >> 4;
  862. s1 += 4;
  863. s2 += 4;
  864. s3 += 4;
  865. s4 += 4;
  866. d++;
  867. }
  868. src += 4 * src_wrap;
  869. dst += dst_wrap;
  870. }
  871. }
  872. /* 8x8 -> 1x1 */
  873. void ff_shrink88(uint8_t *dst, int dst_wrap,
  874. const uint8_t *src, int src_wrap,
  875. int width, int height)
  876. {
  877. int w, i;
  878. for(;height > 0; height--) {
  879. for(w = width;w > 0; w--) {
  880. int tmp=0;
  881. for(i=0; i<8; i++){
  882. tmp += src[0] + src[1] + src[2] + src[3] + src[4] + src[5] + src[6] + src[7];
  883. src += src_wrap;
  884. }
  885. *(dst++) = (tmp + 32)>>6;
  886. src += 8 - 8*src_wrap;
  887. }
  888. src += 8*src_wrap - 8*width;
  889. dst += dst_wrap - width;
  890. }
  891. }
  892. int avpicture_alloc(AVPicture *picture,
  893. enum PixelFormat pix_fmt, int width, int height)
  894. {
  895. int size;
  896. void *ptr;
  897. size = avpicture_fill(picture, NULL, pix_fmt, width, height);
  898. if(size<0)
  899. goto fail;
  900. ptr = av_malloc(size);
  901. if (!ptr)
  902. goto fail;
  903. avpicture_fill(picture, ptr, pix_fmt, width, height);
  904. if(picture->data[1] && !picture->data[2])
  905. ff_set_systematic_pal((uint32_t*)picture->data[1], pix_fmt);
  906. return 0;
  907. fail:
  908. memset(picture, 0, sizeof(AVPicture));
  909. return -1;
  910. }
  911. void avpicture_free(AVPicture *picture)
  912. {
  913. av_free(picture->data[0]);
  914. }
  915. /* return true if yuv planar */
  916. static inline int is_yuv_planar(const PixFmtInfo *ps)
  917. {
  918. return (ps->color_type == FF_COLOR_YUV ||
  919. ps->color_type == FF_COLOR_YUV_JPEG) &&
  920. ps->pixel_type == FF_PIXEL_PLANAR;
  921. }
  922. int av_picture_crop(AVPicture *dst, const AVPicture *src,
  923. enum PixelFormat pix_fmt, int top_band, int left_band)
  924. {
  925. int y_shift;
  926. int x_shift;
  927. if (pix_fmt < 0 || pix_fmt >= PIX_FMT_NB || !is_yuv_planar(&pix_fmt_info[pix_fmt]))
  928. return -1;
  929. y_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_h;
  930. x_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_w;
  931. dst->data[0] = src->data[0] + (top_band * src->linesize[0]) + left_band;
  932. dst->data[1] = src->data[1] + ((top_band >> y_shift) * src->linesize[1]) + (left_band >> x_shift);
  933. dst->data[2] = src->data[2] + ((top_band >> y_shift) * src->linesize[2]) + (left_band >> x_shift);
  934. dst->linesize[0] = src->linesize[0];
  935. dst->linesize[1] = src->linesize[1];
  936. dst->linesize[2] = src->linesize[2];
  937. return 0;
  938. }
  939. int av_picture_pad(AVPicture *dst, const AVPicture *src, int height, int width,
  940. enum PixelFormat pix_fmt, int padtop, int padbottom, int padleft, int padright,
  941. int *color)
  942. {
  943. uint8_t *optr;
  944. int y_shift;
  945. int x_shift;
  946. int yheight;
  947. int i, y;
  948. if (pix_fmt < 0 || pix_fmt >= PIX_FMT_NB ||
  949. !is_yuv_planar(&pix_fmt_info[pix_fmt])) return -1;
  950. for (i = 0; i < 3; i++) {
  951. x_shift = i ? av_pix_fmt_descriptors[pix_fmt].log2_chroma_w : 0;
  952. y_shift = i ? av_pix_fmt_descriptors[pix_fmt].log2_chroma_h : 0;
  953. if (padtop || padleft) {
  954. memset(dst->data[i], color[i],
  955. dst->linesize[i] * (padtop >> y_shift) + (padleft >> x_shift));
  956. }
  957. if (padleft || padright) {
  958. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  959. (dst->linesize[i] - (padright >> x_shift));
  960. yheight = (height - 1 - (padtop + padbottom)) >> y_shift;
  961. for (y = 0; y < yheight; y++) {
  962. memset(optr, color[i], (padleft + padright) >> x_shift);
  963. optr += dst->linesize[i];
  964. }
  965. }
  966. if (src) { /* first line */
  967. uint8_t *iptr = src->data[i];
  968. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  969. (padleft >> x_shift);
  970. memcpy(optr, iptr, (width - padleft - padright) >> x_shift);
  971. iptr += src->linesize[i];
  972. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  973. (dst->linesize[i] - (padright >> x_shift));
  974. yheight = (height - 1 - (padtop + padbottom)) >> y_shift;
  975. for (y = 0; y < yheight; y++) {
  976. memset(optr, color[i], (padleft + padright) >> x_shift);
  977. memcpy(optr + ((padleft + padright) >> x_shift), iptr,
  978. (width - padleft - padright) >> x_shift);
  979. iptr += src->linesize[i];
  980. optr += dst->linesize[i];
  981. }
  982. }
  983. if (padbottom || padright) {
  984. optr = dst->data[i] + dst->linesize[i] *
  985. ((height - padbottom) >> y_shift) - (padright >> x_shift);
  986. memset(optr, color[i],dst->linesize[i] *
  987. (padbottom >> y_shift) + (padright >> x_shift));
  988. }
  989. }
  990. return 0;
  991. }
  992. /* NOTE: we scan all the pixels to have an exact information */
  993. static int get_alpha_info_pal8(const AVPicture *src, int width, int height)
  994. {
  995. const unsigned char *p;
  996. int src_wrap, ret, x, y;
  997. unsigned int a;
  998. uint32_t *palette = (uint32_t *)src->data[1];
  999. p = src->data[0];
  1000. src_wrap = src->linesize[0] - width;
  1001. ret = 0;
  1002. for(y=0;y<height;y++) {
  1003. for(x=0;x<width;x++) {
  1004. a = palette[p[0]] >> 24;
  1005. if (a == 0x00) {
  1006. ret |= FF_ALPHA_TRANSP;
  1007. } else if (a != 0xff) {
  1008. ret |= FF_ALPHA_SEMI_TRANSP;
  1009. }
  1010. p++;
  1011. }
  1012. p += src_wrap;
  1013. }
  1014. return ret;
  1015. }
  1016. int img_get_alpha_info(const AVPicture *src,
  1017. enum PixelFormat pix_fmt, int width, int height)
  1018. {
  1019. const PixFmtInfo *pf = &pix_fmt_info[pix_fmt];
  1020. int ret;
  1021. /* no alpha can be represented in format */
  1022. if (!pf->is_alpha)
  1023. return 0;
  1024. switch(pix_fmt) {
  1025. case PIX_FMT_PAL8:
  1026. ret = get_alpha_info_pal8(src, width, height);
  1027. break;
  1028. default:
  1029. /* we do not know, so everything is indicated */
  1030. ret = FF_ALPHA_TRANSP | FF_ALPHA_SEMI_TRANSP;
  1031. break;
  1032. }
  1033. return ret;
  1034. }
  1035. #if HAVE_MMX
  1036. #define DEINT_INPLACE_LINE_LUM \
  1037. movd_m2r(lum_m4[0],mm0);\
  1038. movd_m2r(lum_m3[0],mm1);\
  1039. movd_m2r(lum_m2[0],mm2);\
  1040. movd_m2r(lum_m1[0],mm3);\
  1041. movd_m2r(lum[0],mm4);\
  1042. punpcklbw_r2r(mm7,mm0);\
  1043. movd_r2m(mm2,lum_m4[0]);\
  1044. punpcklbw_r2r(mm7,mm1);\
  1045. punpcklbw_r2r(mm7,mm2);\
  1046. punpcklbw_r2r(mm7,mm3);\
  1047. punpcklbw_r2r(mm7,mm4);\
  1048. paddw_r2r(mm3,mm1);\
  1049. psllw_i2r(1,mm2);\
  1050. paddw_r2r(mm4,mm0);\
  1051. psllw_i2r(2,mm1);\
  1052. paddw_r2r(mm6,mm2);\
  1053. paddw_r2r(mm2,mm1);\
  1054. psubusw_r2r(mm0,mm1);\
  1055. psrlw_i2r(3,mm1);\
  1056. packuswb_r2r(mm7,mm1);\
  1057. movd_r2m(mm1,lum_m2[0]);
  1058. #define DEINT_LINE_LUM \
  1059. movd_m2r(lum_m4[0],mm0);\
  1060. movd_m2r(lum_m3[0],mm1);\
  1061. movd_m2r(lum_m2[0],mm2);\
  1062. movd_m2r(lum_m1[0],mm3);\
  1063. movd_m2r(lum[0],mm4);\
  1064. punpcklbw_r2r(mm7,mm0);\
  1065. punpcklbw_r2r(mm7,mm1);\
  1066. punpcklbw_r2r(mm7,mm2);\
  1067. punpcklbw_r2r(mm7,mm3);\
  1068. punpcklbw_r2r(mm7,mm4);\
  1069. paddw_r2r(mm3,mm1);\
  1070. psllw_i2r(1,mm2);\
  1071. paddw_r2r(mm4,mm0);\
  1072. psllw_i2r(2,mm1);\
  1073. paddw_r2r(mm6,mm2);\
  1074. paddw_r2r(mm2,mm1);\
  1075. psubusw_r2r(mm0,mm1);\
  1076. psrlw_i2r(3,mm1);\
  1077. packuswb_r2r(mm7,mm1);\
  1078. movd_r2m(mm1,dst[0]);
  1079. #endif
  1080. /* filter parameters: [-1 4 2 4 -1] // 8 */
  1081. static void deinterlace_line(uint8_t *dst,
  1082. const uint8_t *lum_m4, const uint8_t *lum_m3,
  1083. const uint8_t *lum_m2, const uint8_t *lum_m1,
  1084. const uint8_t *lum,
  1085. int size)
  1086. {
  1087. #if !HAVE_MMX
  1088. uint8_t *cm = ff_cropTbl + MAX_NEG_CROP;
  1089. int sum;
  1090. for(;size > 0;size--) {
  1091. sum = -lum_m4[0];
  1092. sum += lum_m3[0] << 2;
  1093. sum += lum_m2[0] << 1;
  1094. sum += lum_m1[0] << 2;
  1095. sum += -lum[0];
  1096. dst[0] = cm[(sum + 4) >> 3];
  1097. lum_m4++;
  1098. lum_m3++;
  1099. lum_m2++;
  1100. lum_m1++;
  1101. lum++;
  1102. dst++;
  1103. }
  1104. #else
  1105. {
  1106. pxor_r2r(mm7,mm7);
  1107. movq_m2r(ff_pw_4,mm6);
  1108. }
  1109. for (;size > 3; size-=4) {
  1110. DEINT_LINE_LUM
  1111. lum_m4+=4;
  1112. lum_m3+=4;
  1113. lum_m2+=4;
  1114. lum_m1+=4;
  1115. lum+=4;
  1116. dst+=4;
  1117. }
  1118. #endif
  1119. }
  1120. static void deinterlace_line_inplace(uint8_t *lum_m4, uint8_t *lum_m3, uint8_t *lum_m2, uint8_t *lum_m1, uint8_t *lum,
  1121. int size)
  1122. {
  1123. #if !HAVE_MMX
  1124. uint8_t *cm = ff_cropTbl + MAX_NEG_CROP;
  1125. int sum;
  1126. for(;size > 0;size--) {
  1127. sum = -lum_m4[0];
  1128. sum += lum_m3[0] << 2;
  1129. sum += lum_m2[0] << 1;
  1130. lum_m4[0]=lum_m2[0];
  1131. sum += lum_m1[0] << 2;
  1132. sum += -lum[0];
  1133. lum_m2[0] = cm[(sum + 4) >> 3];
  1134. lum_m4++;
  1135. lum_m3++;
  1136. lum_m2++;
  1137. lum_m1++;
  1138. lum++;
  1139. }
  1140. #else
  1141. {
  1142. pxor_r2r(mm7,mm7);
  1143. movq_m2r(ff_pw_4,mm6);
  1144. }
  1145. for (;size > 3; size-=4) {
  1146. DEINT_INPLACE_LINE_LUM
  1147. lum_m4+=4;
  1148. lum_m3+=4;
  1149. lum_m2+=4;
  1150. lum_m1+=4;
  1151. lum+=4;
  1152. }
  1153. #endif
  1154. }
  1155. /* deinterlacing : 2 temporal taps, 3 spatial taps linear filter. The
  1156. top field is copied as is, but the bottom field is deinterlaced
  1157. against the top field. */
  1158. static void deinterlace_bottom_field(uint8_t *dst, int dst_wrap,
  1159. const uint8_t *src1, int src_wrap,
  1160. int width, int height)
  1161. {
  1162. const uint8_t *src_m2, *src_m1, *src_0, *src_p1, *src_p2;
  1163. int y;
  1164. src_m2 = src1;
  1165. src_m1 = src1;
  1166. src_0=&src_m1[src_wrap];
  1167. src_p1=&src_0[src_wrap];
  1168. src_p2=&src_p1[src_wrap];
  1169. for(y=0;y<(height-2);y+=2) {
  1170. memcpy(dst,src_m1,width);
  1171. dst += dst_wrap;
  1172. deinterlace_line(dst,src_m2,src_m1,src_0,src_p1,src_p2,width);
  1173. src_m2 = src_0;
  1174. src_m1 = src_p1;
  1175. src_0 = src_p2;
  1176. src_p1 += 2*src_wrap;
  1177. src_p2 += 2*src_wrap;
  1178. dst += dst_wrap;
  1179. }
  1180. memcpy(dst,src_m1,width);
  1181. dst += dst_wrap;
  1182. /* do last line */
  1183. deinterlace_line(dst,src_m2,src_m1,src_0,src_0,src_0,width);
  1184. }
  1185. static void deinterlace_bottom_field_inplace(uint8_t *src1, int src_wrap,
  1186. int width, int height)
  1187. {
  1188. uint8_t *src_m1, *src_0, *src_p1, *src_p2;
  1189. int y;
  1190. uint8_t *buf;
  1191. buf = (uint8_t*)av_malloc(width);
  1192. src_m1 = src1;
  1193. memcpy(buf,src_m1,width);
  1194. src_0=&src_m1[src_wrap];
  1195. src_p1=&src_0[src_wrap];
  1196. src_p2=&src_p1[src_wrap];
  1197. for(y=0;y<(height-2);y+=2) {
  1198. deinterlace_line_inplace(buf,src_m1,src_0,src_p1,src_p2,width);
  1199. src_m1 = src_p1;
  1200. src_0 = src_p2;
  1201. src_p1 += 2*src_wrap;
  1202. src_p2 += 2*src_wrap;
  1203. }
  1204. /* do last line */
  1205. deinterlace_line_inplace(buf,src_m1,src_0,src_0,src_0,width);
  1206. av_free(buf);
  1207. }
  1208. int avpicture_deinterlace(AVPicture *dst, const AVPicture *src,
  1209. enum PixelFormat pix_fmt, int width, int height)
  1210. {
  1211. int i;
  1212. if (pix_fmt != PIX_FMT_YUV420P &&
  1213. pix_fmt != PIX_FMT_YUV422P &&
  1214. pix_fmt != PIX_FMT_YUV444P &&
  1215. pix_fmt != PIX_FMT_YUV411P &&
  1216. pix_fmt != PIX_FMT_GRAY8)
  1217. return -1;
  1218. if ((width & 3) != 0 || (height & 3) != 0)
  1219. return -1;
  1220. for(i=0;i<3;i++) {
  1221. if (i == 1) {
  1222. switch(pix_fmt) {
  1223. case PIX_FMT_YUV420P:
  1224. width >>= 1;
  1225. height >>= 1;
  1226. break;
  1227. case PIX_FMT_YUV422P:
  1228. width >>= 1;
  1229. break;
  1230. case PIX_FMT_YUV411P:
  1231. width >>= 2;
  1232. break;
  1233. default:
  1234. break;
  1235. }
  1236. if (pix_fmt == PIX_FMT_GRAY8) {
  1237. break;
  1238. }
  1239. }
  1240. if (src == dst) {
  1241. deinterlace_bottom_field_inplace(dst->data[i], dst->linesize[i],
  1242. width, height);
  1243. } else {
  1244. deinterlace_bottom_field(dst->data[i],dst->linesize[i],
  1245. src->data[i], src->linesize[i],
  1246. width, height);
  1247. }
  1248. }
  1249. emms_c();
  1250. return 0;
  1251. }