You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1186 lines
34KB

  1. /*
  2. * Misc image conversion routines
  3. * Copyright (c) 2001, 2002, 2003 Fabrice Bellard
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. /**
  22. * @file
  23. * misc image conversion routines
  24. */
  25. /* TODO:
  26. * - write 'ffimg' program to test all the image related stuff
  27. * - move all api to slice based system
  28. * - integrate deinterlacing, postprocessing and scaling in the conversion process
  29. */
  30. #include "avcodec.h"
  31. #include "dsputil.h"
  32. #include "internal.h"
  33. #include "imgconvert.h"
  34. #include "libavutil/colorspace.h"
  35. #include "libavutil/pixdesc.h"
  36. #include "libavcore/imgutils.h"
  37. #include "libavcore/internal.h"
  38. #if HAVE_MMX && HAVE_YASM
  39. #include "x86/dsputil_mmx.h"
  40. #endif
  41. #define xglue(x, y) x ## y
  42. #define glue(x, y) xglue(x, y)
  43. #define FF_COLOR_RGB 0 /**< RGB color space */
  44. #define FF_COLOR_GRAY 1 /**< gray color space */
  45. #define FF_COLOR_YUV 2 /**< YUV color space. 16 <= Y <= 235, 16 <= U, V <= 240 */
  46. #define FF_COLOR_YUV_JPEG 3 /**< YUV color space. 0 <= Y <= 255, 0 <= U, V <= 255 */
  47. #define FF_PIXEL_PLANAR 0 /**< each channel has one component in AVPicture */
  48. #define FF_PIXEL_PACKED 1 /**< only one components containing all the channels */
  49. #define FF_PIXEL_PALETTE 2 /**< one components containing indexes for a palette */
  50. #if HAVE_MMX && HAVE_YASM
  51. #define deinterlace_line_inplace ff_deinterlace_line_inplace_mmx
  52. #define deinterlace_line ff_deinterlace_line_mmx
  53. #else
  54. #define deinterlace_line_inplace deinterlace_line_inplace_c
  55. #define deinterlace_line deinterlace_line_c
  56. #endif
  57. typedef struct PixFmtInfo {
  58. uint8_t nb_channels; /**< number of channels (including alpha) */
  59. uint8_t color_type; /**< color type (see FF_COLOR_xxx constants) */
  60. uint8_t pixel_type; /**< pixel storage type (see FF_PIXEL_xxx constants) */
  61. uint8_t is_alpha : 1; /**< true if alpha can be specified */
  62. uint8_t depth; /**< bit depth of the color components */
  63. } PixFmtInfo;
  64. /* this table gives more information about formats */
  65. static const PixFmtInfo pix_fmt_info[PIX_FMT_NB] = {
  66. /* YUV formats */
  67. [PIX_FMT_YUV420P] = {
  68. .nb_channels = 3,
  69. .color_type = FF_COLOR_YUV,
  70. .pixel_type = FF_PIXEL_PLANAR,
  71. .depth = 8,
  72. },
  73. [PIX_FMT_YUV422P] = {
  74. .nb_channels = 3,
  75. .color_type = FF_COLOR_YUV,
  76. .pixel_type = FF_PIXEL_PLANAR,
  77. .depth = 8,
  78. },
  79. [PIX_FMT_YUV444P] = {
  80. .nb_channels = 3,
  81. .color_type = FF_COLOR_YUV,
  82. .pixel_type = FF_PIXEL_PLANAR,
  83. .depth = 8,
  84. },
  85. [PIX_FMT_YUYV422] = {
  86. .nb_channels = 1,
  87. .color_type = FF_COLOR_YUV,
  88. .pixel_type = FF_PIXEL_PACKED,
  89. .depth = 8,
  90. },
  91. [PIX_FMT_UYVY422] = {
  92. .nb_channels = 1,
  93. .color_type = FF_COLOR_YUV,
  94. .pixel_type = FF_PIXEL_PACKED,
  95. .depth = 8,
  96. },
  97. [PIX_FMT_YUV410P] = {
  98. .nb_channels = 3,
  99. .color_type = FF_COLOR_YUV,
  100. .pixel_type = FF_PIXEL_PLANAR,
  101. .depth = 8,
  102. },
  103. [PIX_FMT_YUV411P] = {
  104. .nb_channels = 3,
  105. .color_type = FF_COLOR_YUV,
  106. .pixel_type = FF_PIXEL_PLANAR,
  107. .depth = 8,
  108. },
  109. [PIX_FMT_YUV440P] = {
  110. .nb_channels = 3,
  111. .color_type = FF_COLOR_YUV,
  112. .pixel_type = FF_PIXEL_PLANAR,
  113. .depth = 8,
  114. },
  115. [PIX_FMT_YUV420P16LE] = {
  116. .nb_channels = 3,
  117. .color_type = FF_COLOR_YUV,
  118. .pixel_type = FF_PIXEL_PLANAR,
  119. .depth = 16,
  120. },
  121. [PIX_FMT_YUV422P16LE] = {
  122. .nb_channels = 3,
  123. .color_type = FF_COLOR_YUV,
  124. .pixel_type = FF_PIXEL_PLANAR,
  125. .depth = 16,
  126. },
  127. [PIX_FMT_YUV444P16LE] = {
  128. .nb_channels = 3,
  129. .color_type = FF_COLOR_YUV,
  130. .pixel_type = FF_PIXEL_PLANAR,
  131. .depth = 16,
  132. },
  133. [PIX_FMT_YUV420P16BE] = {
  134. .nb_channels = 3,
  135. .color_type = FF_COLOR_YUV,
  136. .pixel_type = FF_PIXEL_PLANAR,
  137. .depth = 16,
  138. },
  139. [PIX_FMT_YUV422P16BE] = {
  140. .nb_channels = 3,
  141. .color_type = FF_COLOR_YUV,
  142. .pixel_type = FF_PIXEL_PLANAR,
  143. .depth = 16,
  144. },
  145. [PIX_FMT_YUV444P16BE] = {
  146. .nb_channels = 3,
  147. .color_type = FF_COLOR_YUV,
  148. .pixel_type = FF_PIXEL_PLANAR,
  149. .depth = 16,
  150. },
  151. /* YUV formats with alpha plane */
  152. [PIX_FMT_YUVA420P] = {
  153. .nb_channels = 4,
  154. .color_type = FF_COLOR_YUV,
  155. .pixel_type = FF_PIXEL_PLANAR,
  156. .depth = 8,
  157. },
  158. /* JPEG YUV */
  159. [PIX_FMT_YUVJ420P] = {
  160. .nb_channels = 3,
  161. .color_type = FF_COLOR_YUV_JPEG,
  162. .pixel_type = FF_PIXEL_PLANAR,
  163. .depth = 8,
  164. },
  165. [PIX_FMT_YUVJ422P] = {
  166. .nb_channels = 3,
  167. .color_type = FF_COLOR_YUV_JPEG,
  168. .pixel_type = FF_PIXEL_PLANAR,
  169. .depth = 8,
  170. },
  171. [PIX_FMT_YUVJ444P] = {
  172. .nb_channels = 3,
  173. .color_type = FF_COLOR_YUV_JPEG,
  174. .pixel_type = FF_PIXEL_PLANAR,
  175. .depth = 8,
  176. },
  177. [PIX_FMT_YUVJ440P] = {
  178. .nb_channels = 3,
  179. .color_type = FF_COLOR_YUV_JPEG,
  180. .pixel_type = FF_PIXEL_PLANAR,
  181. .depth = 8,
  182. },
  183. /* RGB formats */
  184. [PIX_FMT_RGB24] = {
  185. .nb_channels = 3,
  186. .color_type = FF_COLOR_RGB,
  187. .pixel_type = FF_PIXEL_PACKED,
  188. .depth = 8,
  189. },
  190. [PIX_FMT_BGR24] = {
  191. .nb_channels = 3,
  192. .color_type = FF_COLOR_RGB,
  193. .pixel_type = FF_PIXEL_PACKED,
  194. .depth = 8,
  195. },
  196. [PIX_FMT_ARGB] = {
  197. .nb_channels = 4, .is_alpha = 1,
  198. .color_type = FF_COLOR_RGB,
  199. .pixel_type = FF_PIXEL_PACKED,
  200. .depth = 8,
  201. },
  202. [PIX_FMT_RGB48BE] = {
  203. .nb_channels = 3,
  204. .color_type = FF_COLOR_RGB,
  205. .pixel_type = FF_PIXEL_PACKED,
  206. .depth = 16,
  207. },
  208. [PIX_FMT_RGB48LE] = {
  209. .nb_channels = 3,
  210. .color_type = FF_COLOR_RGB,
  211. .pixel_type = FF_PIXEL_PACKED,
  212. .depth = 16,
  213. },
  214. [PIX_FMT_RGB565BE] = {
  215. .nb_channels = 3,
  216. .color_type = FF_COLOR_RGB,
  217. .pixel_type = FF_PIXEL_PACKED,
  218. .depth = 5,
  219. },
  220. [PIX_FMT_RGB565LE] = {
  221. .nb_channels = 3,
  222. .color_type = FF_COLOR_RGB,
  223. .pixel_type = FF_PIXEL_PACKED,
  224. .depth = 5,
  225. },
  226. [PIX_FMT_RGB555BE] = {
  227. .nb_channels = 3,
  228. .color_type = FF_COLOR_RGB,
  229. .pixel_type = FF_PIXEL_PACKED,
  230. .depth = 5,
  231. },
  232. [PIX_FMT_RGB555LE] = {
  233. .nb_channels = 3,
  234. .color_type = FF_COLOR_RGB,
  235. .pixel_type = FF_PIXEL_PACKED,
  236. .depth = 5,
  237. },
  238. [PIX_FMT_RGB444BE] = {
  239. .nb_channels = 3,
  240. .color_type = FF_COLOR_RGB,
  241. .pixel_type = FF_PIXEL_PACKED,
  242. .depth = 4,
  243. },
  244. [PIX_FMT_RGB444LE] = {
  245. .nb_channels = 3,
  246. .color_type = FF_COLOR_RGB,
  247. .pixel_type = FF_PIXEL_PACKED,
  248. .depth = 4,
  249. },
  250. /* gray / mono formats */
  251. [PIX_FMT_GRAY16BE] = {
  252. .nb_channels = 1,
  253. .color_type = FF_COLOR_GRAY,
  254. .pixel_type = FF_PIXEL_PLANAR,
  255. .depth = 16,
  256. },
  257. [PIX_FMT_GRAY16LE] = {
  258. .nb_channels = 1,
  259. .color_type = FF_COLOR_GRAY,
  260. .pixel_type = FF_PIXEL_PLANAR,
  261. .depth = 16,
  262. },
  263. [PIX_FMT_GRAY8] = {
  264. .nb_channels = 1,
  265. .color_type = FF_COLOR_GRAY,
  266. .pixel_type = FF_PIXEL_PLANAR,
  267. .depth = 8,
  268. },
  269. [PIX_FMT_MONOWHITE] = {
  270. .nb_channels = 1,
  271. .color_type = FF_COLOR_GRAY,
  272. .pixel_type = FF_PIXEL_PLANAR,
  273. .depth = 1,
  274. },
  275. [PIX_FMT_MONOBLACK] = {
  276. .nb_channels = 1,
  277. .color_type = FF_COLOR_GRAY,
  278. .pixel_type = FF_PIXEL_PLANAR,
  279. .depth = 1,
  280. },
  281. /* paletted formats */
  282. [PIX_FMT_PAL8] = {
  283. .nb_channels = 4, .is_alpha = 1,
  284. .color_type = FF_COLOR_RGB,
  285. .pixel_type = FF_PIXEL_PALETTE,
  286. .depth = 8,
  287. },
  288. [PIX_FMT_UYYVYY411] = {
  289. .nb_channels = 1,
  290. .color_type = FF_COLOR_YUV,
  291. .pixel_type = FF_PIXEL_PACKED,
  292. .depth = 8,
  293. },
  294. [PIX_FMT_ABGR] = {
  295. .nb_channels = 4, .is_alpha = 1,
  296. .color_type = FF_COLOR_RGB,
  297. .pixel_type = FF_PIXEL_PACKED,
  298. .depth = 8,
  299. },
  300. [PIX_FMT_BGR565BE] = {
  301. .nb_channels = 3,
  302. .color_type = FF_COLOR_RGB,
  303. .pixel_type = FF_PIXEL_PACKED,
  304. .depth = 5,
  305. },
  306. [PIX_FMT_BGR565LE] = {
  307. .nb_channels = 3,
  308. .color_type = FF_COLOR_RGB,
  309. .pixel_type = FF_PIXEL_PACKED,
  310. .depth = 5,
  311. },
  312. [PIX_FMT_BGR555BE] = {
  313. .nb_channels = 3,
  314. .color_type = FF_COLOR_RGB,
  315. .pixel_type = FF_PIXEL_PACKED,
  316. .depth = 5,
  317. },
  318. [PIX_FMT_BGR555LE] = {
  319. .nb_channels = 3,
  320. .color_type = FF_COLOR_RGB,
  321. .pixel_type = FF_PIXEL_PACKED,
  322. .depth = 5,
  323. },
  324. [PIX_FMT_BGR444BE] = {
  325. .nb_channels = 3,
  326. .color_type = FF_COLOR_RGB,
  327. .pixel_type = FF_PIXEL_PACKED,
  328. .depth = 4,
  329. },
  330. [PIX_FMT_BGR444LE] = {
  331. .nb_channels = 3,
  332. .color_type = FF_COLOR_RGB,
  333. .pixel_type = FF_PIXEL_PACKED,
  334. .depth = 4,
  335. },
  336. [PIX_FMT_RGB8] = {
  337. .nb_channels = 1,
  338. .color_type = FF_COLOR_RGB,
  339. .pixel_type = FF_PIXEL_PACKED,
  340. .depth = 8,
  341. },
  342. [PIX_FMT_RGB4] = {
  343. .nb_channels = 1,
  344. .color_type = FF_COLOR_RGB,
  345. .pixel_type = FF_PIXEL_PACKED,
  346. .depth = 4,
  347. },
  348. [PIX_FMT_RGB4_BYTE] = {
  349. .nb_channels = 1,
  350. .color_type = FF_COLOR_RGB,
  351. .pixel_type = FF_PIXEL_PACKED,
  352. .depth = 8,
  353. },
  354. [PIX_FMT_BGR8] = {
  355. .nb_channels = 1,
  356. .color_type = FF_COLOR_RGB,
  357. .pixel_type = FF_PIXEL_PACKED,
  358. .depth = 8,
  359. },
  360. [PIX_FMT_BGR4] = {
  361. .nb_channels = 1,
  362. .color_type = FF_COLOR_RGB,
  363. .pixel_type = FF_PIXEL_PACKED,
  364. .depth = 4,
  365. },
  366. [PIX_FMT_BGR4_BYTE] = {
  367. .nb_channels = 1,
  368. .color_type = FF_COLOR_RGB,
  369. .pixel_type = FF_PIXEL_PACKED,
  370. .depth = 8,
  371. },
  372. [PIX_FMT_NV12] = {
  373. .nb_channels = 2,
  374. .color_type = FF_COLOR_YUV,
  375. .pixel_type = FF_PIXEL_PLANAR,
  376. .depth = 8,
  377. },
  378. [PIX_FMT_NV21] = {
  379. .nb_channels = 2,
  380. .color_type = FF_COLOR_YUV,
  381. .pixel_type = FF_PIXEL_PLANAR,
  382. .depth = 8,
  383. },
  384. [PIX_FMT_BGRA] = {
  385. .nb_channels = 4, .is_alpha = 1,
  386. .color_type = FF_COLOR_RGB,
  387. .pixel_type = FF_PIXEL_PACKED,
  388. .depth = 8,
  389. },
  390. [PIX_FMT_RGBA] = {
  391. .nb_channels = 4, .is_alpha = 1,
  392. .color_type = FF_COLOR_RGB,
  393. .pixel_type = FF_PIXEL_PACKED,
  394. .depth = 8,
  395. },
  396. };
  397. void avcodec_get_chroma_sub_sample(enum PixelFormat pix_fmt, int *h_shift, int *v_shift)
  398. {
  399. *h_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_w;
  400. *v_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_h;
  401. }
  402. const char *avcodec_get_pix_fmt_name(enum PixelFormat pix_fmt)
  403. {
  404. if ((unsigned)pix_fmt >= PIX_FMT_NB)
  405. return NULL;
  406. else
  407. return av_pix_fmt_descriptors[pix_fmt].name;
  408. }
  409. #if LIBAVCODEC_VERSION_MAJOR < 53
  410. enum PixelFormat avcodec_get_pix_fmt(const char *name)
  411. {
  412. return av_get_pix_fmt(name);
  413. }
  414. #endif
  415. void avcodec_pix_fmt_string (char *buf, int buf_size, enum PixelFormat pix_fmt)
  416. {
  417. /* print header */
  418. if (pix_fmt < 0)
  419. snprintf (buf, buf_size,
  420. "name " " nb_components" " depth"
  421. );
  422. else{
  423. const AVPixFmtDescriptor *pixdesc = &av_pix_fmt_descriptors[pix_fmt];
  424. snprintf (buf, buf_size,
  425. "%-11s %5d %7d",
  426. pixdesc->name,
  427. pixdesc->nb_components,
  428. av_get_bits_per_pixel(pixdesc)
  429. );
  430. }
  431. }
  432. int ff_is_hwaccel_pix_fmt(enum PixelFormat pix_fmt)
  433. {
  434. return av_pix_fmt_descriptors[pix_fmt].flags & PIX_FMT_HWACCEL;
  435. }
  436. #if LIBAVCODEC_VERSION_MAJOR < 53
  437. int ff_set_systematic_pal(uint32_t pal[256], enum PixelFormat pix_fmt){
  438. return ff_set_systematic_pal2(pal, pix_fmt);
  439. }
  440. int ff_fill_linesize(AVPicture *picture, enum PixelFormat pix_fmt, int width)
  441. {
  442. return av_image_fill_linesizes(picture->linesize, pix_fmt, width);
  443. }
  444. int ff_fill_pointer(AVPicture *picture, uint8_t *ptr, enum PixelFormat pix_fmt,
  445. int height)
  446. {
  447. return av_image_fill_pointers(picture->data, pix_fmt, height, ptr, picture->linesize);
  448. }
  449. #endif
  450. int avpicture_fill(AVPicture *picture, uint8_t *ptr,
  451. enum PixelFormat pix_fmt, int width, int height)
  452. {
  453. int ret;
  454. if ((ret = av_image_check_size(width, height, 0, NULL)) < 0)
  455. return ret;
  456. if ((ret = av_image_fill_linesizes(picture->linesize, pix_fmt, width)) < 0)
  457. return ret;
  458. return av_image_fill_pointers(picture->data, pix_fmt, height, ptr, picture->linesize);
  459. }
  460. int avpicture_layout(const AVPicture* src, enum PixelFormat pix_fmt, int width, int height,
  461. unsigned char *dest, int dest_size)
  462. {
  463. const PixFmtInfo* pf = &pix_fmt_info[pix_fmt];
  464. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  465. int i, j, w, ow, h, oh, data_planes;
  466. const unsigned char* s;
  467. int size = avpicture_get_size(pix_fmt, width, height);
  468. if (size > dest_size || size < 0)
  469. return -1;
  470. if (pf->pixel_type == FF_PIXEL_PACKED || pf->pixel_type == FF_PIXEL_PALETTE) {
  471. if (pix_fmt == PIX_FMT_YUYV422 ||
  472. pix_fmt == PIX_FMT_UYVY422 ||
  473. pix_fmt == PIX_FMT_BGR565BE ||
  474. pix_fmt == PIX_FMT_BGR565LE ||
  475. pix_fmt == PIX_FMT_BGR555BE ||
  476. pix_fmt == PIX_FMT_BGR555LE ||
  477. pix_fmt == PIX_FMT_BGR444BE ||
  478. pix_fmt == PIX_FMT_BGR444LE ||
  479. pix_fmt == PIX_FMT_RGB565BE ||
  480. pix_fmt == PIX_FMT_RGB565LE ||
  481. pix_fmt == PIX_FMT_RGB555BE ||
  482. pix_fmt == PIX_FMT_RGB555LE ||
  483. pix_fmt == PIX_FMT_RGB444BE ||
  484. pix_fmt == PIX_FMT_RGB444LE)
  485. w = width * 2;
  486. else if (pix_fmt == PIX_FMT_UYYVYY411)
  487. w = width + width/2;
  488. else if (pix_fmt == PIX_FMT_PAL8)
  489. w = width;
  490. else
  491. w = width * (pf->depth * pf->nb_channels / 8);
  492. data_planes = 1;
  493. h = height;
  494. } else {
  495. data_planes = pf->nb_channels;
  496. w = (width*pf->depth + 7)/8;
  497. h = height;
  498. }
  499. ow = w;
  500. oh = h;
  501. for (i=0; i<data_planes; i++) {
  502. if (i == 1) {
  503. w = (- ((-width) >> desc->log2_chroma_w) * pf->depth + 7) / 8;
  504. h = -((-height) >> desc->log2_chroma_h);
  505. if (pix_fmt == PIX_FMT_NV12 || pix_fmt == PIX_FMT_NV21)
  506. w <<= 1;
  507. } else if (i == 3) {
  508. w = ow;
  509. h = oh;
  510. }
  511. s = src->data[i];
  512. for(j=0; j<h; j++) {
  513. memcpy(dest, s, w);
  514. dest += w;
  515. s += src->linesize[i];
  516. }
  517. }
  518. if (pf->pixel_type == FF_PIXEL_PALETTE)
  519. memcpy((unsigned char *)(((size_t)dest + 3) & ~3), src->data[1], 256 * 4);
  520. return size;
  521. }
  522. int avpicture_get_size(enum PixelFormat pix_fmt, int width, int height)
  523. {
  524. AVPicture dummy_pict;
  525. if(av_image_check_size(width, height, 0, NULL))
  526. return -1;
  527. switch (pix_fmt) {
  528. case PIX_FMT_RGB8:
  529. case PIX_FMT_BGR8:
  530. case PIX_FMT_RGB4_BYTE:
  531. case PIX_FMT_BGR4_BYTE:
  532. case PIX_FMT_GRAY8:
  533. // do not include palette for these pseudo-paletted formats
  534. return width * height;
  535. }
  536. return avpicture_fill(&dummy_pict, NULL, pix_fmt, width, height);
  537. }
  538. int avcodec_get_pix_fmt_loss(enum PixelFormat dst_pix_fmt, enum PixelFormat src_pix_fmt,
  539. int has_alpha)
  540. {
  541. const PixFmtInfo *pf, *ps;
  542. const AVPixFmtDescriptor *src_desc = &av_pix_fmt_descriptors[src_pix_fmt];
  543. const AVPixFmtDescriptor *dst_desc = &av_pix_fmt_descriptors[dst_pix_fmt];
  544. int loss;
  545. ps = &pix_fmt_info[src_pix_fmt];
  546. /* compute loss */
  547. loss = 0;
  548. pf = &pix_fmt_info[dst_pix_fmt];
  549. if (pf->depth < ps->depth ||
  550. ((dst_pix_fmt == PIX_FMT_RGB555BE || dst_pix_fmt == PIX_FMT_RGB555LE ||
  551. dst_pix_fmt == PIX_FMT_BGR555BE || dst_pix_fmt == PIX_FMT_BGR555LE) &&
  552. (src_pix_fmt == PIX_FMT_RGB565BE || src_pix_fmt == PIX_FMT_RGB565LE ||
  553. src_pix_fmt == PIX_FMT_BGR565BE || src_pix_fmt == PIX_FMT_BGR565LE)))
  554. loss |= FF_LOSS_DEPTH;
  555. if (dst_desc->log2_chroma_w > src_desc->log2_chroma_w ||
  556. dst_desc->log2_chroma_h > src_desc->log2_chroma_h)
  557. loss |= FF_LOSS_RESOLUTION;
  558. switch(pf->color_type) {
  559. case FF_COLOR_RGB:
  560. if (ps->color_type != FF_COLOR_RGB &&
  561. ps->color_type != FF_COLOR_GRAY)
  562. loss |= FF_LOSS_COLORSPACE;
  563. break;
  564. case FF_COLOR_GRAY:
  565. if (ps->color_type != FF_COLOR_GRAY)
  566. loss |= FF_LOSS_COLORSPACE;
  567. break;
  568. case FF_COLOR_YUV:
  569. if (ps->color_type != FF_COLOR_YUV)
  570. loss |= FF_LOSS_COLORSPACE;
  571. break;
  572. case FF_COLOR_YUV_JPEG:
  573. if (ps->color_type != FF_COLOR_YUV_JPEG &&
  574. ps->color_type != FF_COLOR_YUV &&
  575. ps->color_type != FF_COLOR_GRAY)
  576. loss |= FF_LOSS_COLORSPACE;
  577. break;
  578. default:
  579. /* fail safe test */
  580. if (ps->color_type != pf->color_type)
  581. loss |= FF_LOSS_COLORSPACE;
  582. break;
  583. }
  584. if (pf->color_type == FF_COLOR_GRAY &&
  585. ps->color_type != FF_COLOR_GRAY)
  586. loss |= FF_LOSS_CHROMA;
  587. if (!pf->is_alpha && (ps->is_alpha && has_alpha))
  588. loss |= FF_LOSS_ALPHA;
  589. if (pf->pixel_type == FF_PIXEL_PALETTE &&
  590. (ps->pixel_type != FF_PIXEL_PALETTE && ps->color_type != FF_COLOR_GRAY))
  591. loss |= FF_LOSS_COLORQUANT;
  592. return loss;
  593. }
  594. static int avg_bits_per_pixel(enum PixelFormat pix_fmt)
  595. {
  596. int bits;
  597. const PixFmtInfo *pf;
  598. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  599. pf = &pix_fmt_info[pix_fmt];
  600. switch(pf->pixel_type) {
  601. case FF_PIXEL_PACKED:
  602. switch(pix_fmt) {
  603. case PIX_FMT_YUYV422:
  604. case PIX_FMT_UYVY422:
  605. case PIX_FMT_RGB565BE:
  606. case PIX_FMT_RGB565LE:
  607. case PIX_FMT_RGB555BE:
  608. case PIX_FMT_RGB555LE:
  609. case PIX_FMT_RGB444BE:
  610. case PIX_FMT_RGB444LE:
  611. case PIX_FMT_BGR565BE:
  612. case PIX_FMT_BGR565LE:
  613. case PIX_FMT_BGR555BE:
  614. case PIX_FMT_BGR555LE:
  615. case PIX_FMT_BGR444BE:
  616. case PIX_FMT_BGR444LE:
  617. bits = 16;
  618. break;
  619. case PIX_FMT_UYYVYY411:
  620. bits = 12;
  621. break;
  622. default:
  623. bits = pf->depth * pf->nb_channels;
  624. break;
  625. }
  626. break;
  627. case FF_PIXEL_PLANAR:
  628. if (desc->log2_chroma_w == 0 && desc->log2_chroma_h == 0) {
  629. bits = pf->depth * pf->nb_channels;
  630. } else {
  631. bits = pf->depth + ((2 * pf->depth) >>
  632. (desc->log2_chroma_w + desc->log2_chroma_h));
  633. }
  634. break;
  635. case FF_PIXEL_PALETTE:
  636. bits = 8;
  637. break;
  638. default:
  639. bits = -1;
  640. break;
  641. }
  642. return bits;
  643. }
  644. static enum PixelFormat avcodec_find_best_pix_fmt1(int64_t pix_fmt_mask,
  645. enum PixelFormat src_pix_fmt,
  646. int has_alpha,
  647. int loss_mask)
  648. {
  649. int dist, i, loss, min_dist;
  650. enum PixelFormat dst_pix_fmt;
  651. /* find exact color match with smallest size */
  652. dst_pix_fmt = PIX_FMT_NONE;
  653. min_dist = 0x7fffffff;
  654. for(i = 0;i < PIX_FMT_NB; i++) {
  655. if (pix_fmt_mask & (1ULL << i)) {
  656. loss = avcodec_get_pix_fmt_loss(i, src_pix_fmt, has_alpha) & loss_mask;
  657. if (loss == 0) {
  658. dist = avg_bits_per_pixel(i);
  659. if (dist < min_dist) {
  660. min_dist = dist;
  661. dst_pix_fmt = i;
  662. }
  663. }
  664. }
  665. }
  666. return dst_pix_fmt;
  667. }
  668. enum PixelFormat avcodec_find_best_pix_fmt(int64_t pix_fmt_mask, enum PixelFormat src_pix_fmt,
  669. int has_alpha, int *loss_ptr)
  670. {
  671. enum PixelFormat dst_pix_fmt;
  672. int loss_mask, i;
  673. static const int loss_mask_order[] = {
  674. ~0, /* no loss first */
  675. ~FF_LOSS_ALPHA,
  676. ~FF_LOSS_RESOLUTION,
  677. ~(FF_LOSS_COLORSPACE | FF_LOSS_RESOLUTION),
  678. ~FF_LOSS_COLORQUANT,
  679. ~FF_LOSS_DEPTH,
  680. 0,
  681. };
  682. /* try with successive loss */
  683. i = 0;
  684. for(;;) {
  685. loss_mask = loss_mask_order[i++];
  686. dst_pix_fmt = avcodec_find_best_pix_fmt1(pix_fmt_mask, src_pix_fmt,
  687. has_alpha, loss_mask);
  688. if (dst_pix_fmt >= 0)
  689. goto found;
  690. if (loss_mask == 0)
  691. break;
  692. }
  693. return PIX_FMT_NONE;
  694. found:
  695. if (loss_ptr)
  696. *loss_ptr = avcodec_get_pix_fmt_loss(dst_pix_fmt, src_pix_fmt, has_alpha);
  697. return dst_pix_fmt;
  698. }
  699. #if LIBAVCODEC_VERSION_MAJOR < 53
  700. void ff_img_copy_plane(uint8_t *dst, int dst_wrap,
  701. const uint8_t *src, int src_wrap,
  702. int width, int height)
  703. {
  704. av_image_copy_plane(dst, dst_wrap, src, src_wrap, width, height);
  705. }
  706. int ff_get_plane_bytewidth(enum PixelFormat pix_fmt, int width, int plane)
  707. {
  708. return av_image_get_linesize(pix_fmt, width, plane);
  709. }
  710. void av_picture_data_copy(uint8_t *dst_data[4], int dst_linesize[4],
  711. uint8_t *src_data[4], int src_linesize[4],
  712. enum PixelFormat pix_fmt, int width, int height)
  713. {
  714. av_image_copy(dst_data, dst_linesize, src_data, src_linesize,
  715. pix_fmt, width, height);
  716. }
  717. #endif
  718. void av_picture_copy(AVPicture *dst, const AVPicture *src,
  719. enum PixelFormat pix_fmt, int width, int height)
  720. {
  721. av_image_copy(dst->data, dst->linesize, src->data,
  722. src->linesize, pix_fmt, width, height);
  723. }
  724. /* 2x2 -> 1x1 */
  725. void ff_shrink22(uint8_t *dst, int dst_wrap,
  726. const uint8_t *src, int src_wrap,
  727. int width, int height)
  728. {
  729. int w;
  730. const uint8_t *s1, *s2;
  731. uint8_t *d;
  732. for(;height > 0; height--) {
  733. s1 = src;
  734. s2 = s1 + src_wrap;
  735. d = dst;
  736. for(w = width;w >= 4; w-=4) {
  737. d[0] = (s1[0] + s1[1] + s2[0] + s2[1] + 2) >> 2;
  738. d[1] = (s1[2] + s1[3] + s2[2] + s2[3] + 2) >> 2;
  739. d[2] = (s1[4] + s1[5] + s2[4] + s2[5] + 2) >> 2;
  740. d[3] = (s1[6] + s1[7] + s2[6] + s2[7] + 2) >> 2;
  741. s1 += 8;
  742. s2 += 8;
  743. d += 4;
  744. }
  745. for(;w > 0; w--) {
  746. d[0] = (s1[0] + s1[1] + s2[0] + s2[1] + 2) >> 2;
  747. s1 += 2;
  748. s2 += 2;
  749. d++;
  750. }
  751. src += 2 * src_wrap;
  752. dst += dst_wrap;
  753. }
  754. }
  755. /* 4x4 -> 1x1 */
  756. void ff_shrink44(uint8_t *dst, int dst_wrap,
  757. const uint8_t *src, int src_wrap,
  758. int width, int height)
  759. {
  760. int w;
  761. const uint8_t *s1, *s2, *s3, *s4;
  762. uint8_t *d;
  763. for(;height > 0; height--) {
  764. s1 = src;
  765. s2 = s1 + src_wrap;
  766. s3 = s2 + src_wrap;
  767. s4 = s3 + src_wrap;
  768. d = dst;
  769. for(w = width;w > 0; w--) {
  770. d[0] = (s1[0] + s1[1] + s1[2] + s1[3] +
  771. s2[0] + s2[1] + s2[2] + s2[3] +
  772. s3[0] + s3[1] + s3[2] + s3[3] +
  773. s4[0] + s4[1] + s4[2] + s4[3] + 8) >> 4;
  774. s1 += 4;
  775. s2 += 4;
  776. s3 += 4;
  777. s4 += 4;
  778. d++;
  779. }
  780. src += 4 * src_wrap;
  781. dst += dst_wrap;
  782. }
  783. }
  784. /* 8x8 -> 1x1 */
  785. void ff_shrink88(uint8_t *dst, int dst_wrap,
  786. const uint8_t *src, int src_wrap,
  787. int width, int height)
  788. {
  789. int w, i;
  790. for(;height > 0; height--) {
  791. for(w = width;w > 0; w--) {
  792. int tmp=0;
  793. for(i=0; i<8; i++){
  794. tmp += src[0] + src[1] + src[2] + src[3] + src[4] + src[5] + src[6] + src[7];
  795. src += src_wrap;
  796. }
  797. *(dst++) = (tmp + 32)>>6;
  798. src += 8 - 8*src_wrap;
  799. }
  800. src += 8*src_wrap - 8*width;
  801. dst += dst_wrap - width;
  802. }
  803. }
  804. int avpicture_alloc(AVPicture *picture,
  805. enum PixelFormat pix_fmt, int width, int height)
  806. {
  807. int size;
  808. void *ptr;
  809. size = avpicture_fill(picture, NULL, pix_fmt, width, height);
  810. if(size<0)
  811. goto fail;
  812. ptr = av_malloc(size);
  813. if (!ptr)
  814. goto fail;
  815. avpicture_fill(picture, ptr, pix_fmt, width, height);
  816. if(picture->data[1] && !picture->data[2])
  817. ff_set_systematic_pal2((uint32_t*)picture->data[1], pix_fmt);
  818. return 0;
  819. fail:
  820. memset(picture, 0, sizeof(AVPicture));
  821. return -1;
  822. }
  823. void avpicture_free(AVPicture *picture)
  824. {
  825. av_free(picture->data[0]);
  826. }
  827. /* return true if yuv planar */
  828. static inline int is_yuv_planar(const PixFmtInfo *ps)
  829. {
  830. return (ps->color_type == FF_COLOR_YUV ||
  831. ps->color_type == FF_COLOR_YUV_JPEG) &&
  832. ps->pixel_type == FF_PIXEL_PLANAR;
  833. }
  834. int av_picture_crop(AVPicture *dst, const AVPicture *src,
  835. enum PixelFormat pix_fmt, int top_band, int left_band)
  836. {
  837. int y_shift;
  838. int x_shift;
  839. if (pix_fmt < 0 || pix_fmt >= PIX_FMT_NB || !is_yuv_planar(&pix_fmt_info[pix_fmt]))
  840. return -1;
  841. y_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_h;
  842. x_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_w;
  843. dst->data[0] = src->data[0] + (top_band * src->linesize[0]) + left_band;
  844. dst->data[1] = src->data[1] + ((top_band >> y_shift) * src->linesize[1]) + (left_band >> x_shift);
  845. dst->data[2] = src->data[2] + ((top_band >> y_shift) * src->linesize[2]) + (left_band >> x_shift);
  846. dst->linesize[0] = src->linesize[0];
  847. dst->linesize[1] = src->linesize[1];
  848. dst->linesize[2] = src->linesize[2];
  849. return 0;
  850. }
  851. int av_picture_pad(AVPicture *dst, const AVPicture *src, int height, int width,
  852. enum PixelFormat pix_fmt, int padtop, int padbottom, int padleft, int padright,
  853. int *color)
  854. {
  855. uint8_t *optr;
  856. int y_shift;
  857. int x_shift;
  858. int yheight;
  859. int i, y;
  860. if (pix_fmt < 0 || pix_fmt >= PIX_FMT_NB ||
  861. !is_yuv_planar(&pix_fmt_info[pix_fmt])) return -1;
  862. for (i = 0; i < 3; i++) {
  863. x_shift = i ? av_pix_fmt_descriptors[pix_fmt].log2_chroma_w : 0;
  864. y_shift = i ? av_pix_fmt_descriptors[pix_fmt].log2_chroma_h : 0;
  865. if (padtop || padleft) {
  866. memset(dst->data[i], color[i],
  867. dst->linesize[i] * (padtop >> y_shift) + (padleft >> x_shift));
  868. }
  869. if (padleft || padright) {
  870. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  871. (dst->linesize[i] - (padright >> x_shift));
  872. yheight = (height - 1 - (padtop + padbottom)) >> y_shift;
  873. for (y = 0; y < yheight; y++) {
  874. memset(optr, color[i], (padleft + padright) >> x_shift);
  875. optr += dst->linesize[i];
  876. }
  877. }
  878. if (src) { /* first line */
  879. uint8_t *iptr = src->data[i];
  880. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  881. (padleft >> x_shift);
  882. memcpy(optr, iptr, (width - padleft - padright) >> x_shift);
  883. iptr += src->linesize[i];
  884. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  885. (dst->linesize[i] - (padright >> x_shift));
  886. yheight = (height - 1 - (padtop + padbottom)) >> y_shift;
  887. for (y = 0; y < yheight; y++) {
  888. memset(optr, color[i], (padleft + padright) >> x_shift);
  889. memcpy(optr + ((padleft + padright) >> x_shift), iptr,
  890. (width - padleft - padright) >> x_shift);
  891. iptr += src->linesize[i];
  892. optr += dst->linesize[i];
  893. }
  894. }
  895. if (padbottom || padright) {
  896. optr = dst->data[i] + dst->linesize[i] *
  897. ((height - padbottom) >> y_shift) - (padright >> x_shift);
  898. memset(optr, color[i],dst->linesize[i] *
  899. (padbottom >> y_shift) + (padright >> x_shift));
  900. }
  901. }
  902. return 0;
  903. }
  904. /* NOTE: we scan all the pixels to have an exact information */
  905. static int get_alpha_info_pal8(const AVPicture *src, int width, int height)
  906. {
  907. const unsigned char *p;
  908. int src_wrap, ret, x, y;
  909. unsigned int a;
  910. uint32_t *palette = (uint32_t *)src->data[1];
  911. p = src->data[0];
  912. src_wrap = src->linesize[0] - width;
  913. ret = 0;
  914. for(y=0;y<height;y++) {
  915. for(x=0;x<width;x++) {
  916. a = palette[p[0]] >> 24;
  917. if (a == 0x00) {
  918. ret |= FF_ALPHA_TRANSP;
  919. } else if (a != 0xff) {
  920. ret |= FF_ALPHA_SEMI_TRANSP;
  921. }
  922. p++;
  923. }
  924. p += src_wrap;
  925. }
  926. return ret;
  927. }
  928. int img_get_alpha_info(const AVPicture *src,
  929. enum PixelFormat pix_fmt, int width, int height)
  930. {
  931. const PixFmtInfo *pf = &pix_fmt_info[pix_fmt];
  932. int ret;
  933. /* no alpha can be represented in format */
  934. if (!pf->is_alpha)
  935. return 0;
  936. switch(pix_fmt) {
  937. case PIX_FMT_PAL8:
  938. ret = get_alpha_info_pal8(src, width, height);
  939. break;
  940. default:
  941. /* we do not know, so everything is indicated */
  942. ret = FF_ALPHA_TRANSP | FF_ALPHA_SEMI_TRANSP;
  943. break;
  944. }
  945. return ret;
  946. }
  947. #if !(HAVE_MMX && HAVE_YASM)
  948. /* filter parameters: [-1 4 2 4 -1] // 8 */
  949. static void deinterlace_line_c(uint8_t *dst,
  950. const uint8_t *lum_m4, const uint8_t *lum_m3,
  951. const uint8_t *lum_m2, const uint8_t *lum_m1,
  952. const uint8_t *lum,
  953. int size)
  954. {
  955. uint8_t *cm = ff_cropTbl + MAX_NEG_CROP;
  956. int sum;
  957. for(;size > 0;size--) {
  958. sum = -lum_m4[0];
  959. sum += lum_m3[0] << 2;
  960. sum += lum_m2[0] << 1;
  961. sum += lum_m1[0] << 2;
  962. sum += -lum[0];
  963. dst[0] = cm[(sum + 4) >> 3];
  964. lum_m4++;
  965. lum_m3++;
  966. lum_m2++;
  967. lum_m1++;
  968. lum++;
  969. dst++;
  970. }
  971. }
  972. static void deinterlace_line_inplace_c(uint8_t *lum_m4, uint8_t *lum_m3,
  973. uint8_t *lum_m2, uint8_t *lum_m1,
  974. uint8_t *lum, int size)
  975. {
  976. uint8_t *cm = ff_cropTbl + MAX_NEG_CROP;
  977. int sum;
  978. for(;size > 0;size--) {
  979. sum = -lum_m4[0];
  980. sum += lum_m3[0] << 2;
  981. sum += lum_m2[0] << 1;
  982. lum_m4[0]=lum_m2[0];
  983. sum += lum_m1[0] << 2;
  984. sum += -lum[0];
  985. lum_m2[0] = cm[(sum + 4) >> 3];
  986. lum_m4++;
  987. lum_m3++;
  988. lum_m2++;
  989. lum_m1++;
  990. lum++;
  991. }
  992. }
  993. #endif
  994. /* deinterlacing : 2 temporal taps, 3 spatial taps linear filter. The
  995. top field is copied as is, but the bottom field is deinterlaced
  996. against the top field. */
  997. static void deinterlace_bottom_field(uint8_t *dst, int dst_wrap,
  998. const uint8_t *src1, int src_wrap,
  999. int width, int height)
  1000. {
  1001. const uint8_t *src_m2, *src_m1, *src_0, *src_p1, *src_p2;
  1002. int y;
  1003. src_m2 = src1;
  1004. src_m1 = src1;
  1005. src_0=&src_m1[src_wrap];
  1006. src_p1=&src_0[src_wrap];
  1007. src_p2=&src_p1[src_wrap];
  1008. for(y=0;y<(height-2);y+=2) {
  1009. memcpy(dst,src_m1,width);
  1010. dst += dst_wrap;
  1011. deinterlace_line(dst,src_m2,src_m1,src_0,src_p1,src_p2,width);
  1012. src_m2 = src_0;
  1013. src_m1 = src_p1;
  1014. src_0 = src_p2;
  1015. src_p1 += 2*src_wrap;
  1016. src_p2 += 2*src_wrap;
  1017. dst += dst_wrap;
  1018. }
  1019. memcpy(dst,src_m1,width);
  1020. dst += dst_wrap;
  1021. /* do last line */
  1022. deinterlace_line(dst,src_m2,src_m1,src_0,src_0,src_0,width);
  1023. }
  1024. static void deinterlace_bottom_field_inplace(uint8_t *src1, int src_wrap,
  1025. int width, int height)
  1026. {
  1027. uint8_t *src_m1, *src_0, *src_p1, *src_p2;
  1028. int y;
  1029. uint8_t *buf;
  1030. buf = (uint8_t*)av_malloc(width);
  1031. src_m1 = src1;
  1032. memcpy(buf,src_m1,width);
  1033. src_0=&src_m1[src_wrap];
  1034. src_p1=&src_0[src_wrap];
  1035. src_p2=&src_p1[src_wrap];
  1036. for(y=0;y<(height-2);y+=2) {
  1037. deinterlace_line_inplace(buf,src_m1,src_0,src_p1,src_p2,width);
  1038. src_m1 = src_p1;
  1039. src_0 = src_p2;
  1040. src_p1 += 2*src_wrap;
  1041. src_p2 += 2*src_wrap;
  1042. }
  1043. /* do last line */
  1044. deinterlace_line_inplace(buf,src_m1,src_0,src_0,src_0,width);
  1045. av_free(buf);
  1046. }
  1047. int avpicture_deinterlace(AVPicture *dst, const AVPicture *src,
  1048. enum PixelFormat pix_fmt, int width, int height)
  1049. {
  1050. int i;
  1051. if (pix_fmt != PIX_FMT_YUV420P &&
  1052. pix_fmt != PIX_FMT_YUVJ420P &&
  1053. pix_fmt != PIX_FMT_YUV422P &&
  1054. pix_fmt != PIX_FMT_YUVJ422P &&
  1055. pix_fmt != PIX_FMT_YUV444P &&
  1056. pix_fmt != PIX_FMT_YUV411P &&
  1057. pix_fmt != PIX_FMT_GRAY8)
  1058. return -1;
  1059. if ((width & 3) != 0 || (height & 3) != 0)
  1060. return -1;
  1061. for(i=0;i<3;i++) {
  1062. if (i == 1) {
  1063. switch(pix_fmt) {
  1064. case PIX_FMT_YUVJ420P:
  1065. case PIX_FMT_YUV420P:
  1066. width >>= 1;
  1067. height >>= 1;
  1068. break;
  1069. case PIX_FMT_YUV422P:
  1070. case PIX_FMT_YUVJ422P:
  1071. width >>= 1;
  1072. break;
  1073. case PIX_FMT_YUV411P:
  1074. width >>= 2;
  1075. break;
  1076. default:
  1077. break;
  1078. }
  1079. if (pix_fmt == PIX_FMT_GRAY8) {
  1080. break;
  1081. }
  1082. }
  1083. if (src == dst) {
  1084. deinterlace_bottom_field_inplace(dst->data[i], dst->linesize[i],
  1085. width, height);
  1086. } else {
  1087. deinterlace_bottom_field(dst->data[i],dst->linesize[i],
  1088. src->data[i], src->linesize[i],
  1089. width, height);
  1090. }
  1091. }
  1092. emms_c();
  1093. return 0;
  1094. }