You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1143 lines
33KB

  1. /*
  2. * Misc image conversion routines
  3. * Copyright (c) 2001, 2002, 2003 Fabrice Bellard
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. /**
  22. * @file
  23. * misc image conversion routines
  24. */
  25. /* TODO:
  26. * - write 'ffimg' program to test all the image related stuff
  27. * - move all api to slice based system
  28. * - integrate deinterlacing, postprocessing and scaling in the conversion process
  29. */
  30. #include "avcodec.h"
  31. #include "dsputil.h"
  32. #include "internal.h"
  33. #include "imgconvert.h"
  34. #include "libavutil/colorspace.h"
  35. #include "libavutil/pixdesc.h"
  36. #include "libavutil/imgutils.h"
  37. #if HAVE_MMX && HAVE_YASM
  38. #include "x86/dsputil_mmx.h"
  39. #endif
  40. #define xglue(x, y) x ## y
  41. #define glue(x, y) xglue(x, y)
  42. #define FF_COLOR_RGB 0 /**< RGB color space */
  43. #define FF_COLOR_GRAY 1 /**< gray color space */
  44. #define FF_COLOR_YUV 2 /**< YUV color space. 16 <= Y <= 235, 16 <= U, V <= 240 */
  45. #define FF_COLOR_YUV_JPEG 3 /**< YUV color space. 0 <= Y <= 255, 0 <= U, V <= 255 */
  46. #define FF_PIXEL_PLANAR 0 /**< each channel has one component in AVPicture */
  47. #define FF_PIXEL_PACKED 1 /**< only one components containing all the channels */
  48. #define FF_PIXEL_PALETTE 2 /**< one components containing indexes for a palette */
  49. #if HAVE_MMX && HAVE_YASM
  50. #define deinterlace_line_inplace ff_deinterlace_line_inplace_mmx
  51. #define deinterlace_line ff_deinterlace_line_mmx
  52. #else
  53. #define deinterlace_line_inplace deinterlace_line_inplace_c
  54. #define deinterlace_line deinterlace_line_c
  55. #endif
  56. typedef struct PixFmtInfo {
  57. uint8_t nb_channels; /**< number of channels (including alpha) */
  58. uint8_t color_type; /**< color type (see FF_COLOR_xxx constants) */
  59. uint8_t pixel_type; /**< pixel storage type (see FF_PIXEL_xxx constants) */
  60. uint8_t is_alpha : 1; /**< true if alpha can be specified */
  61. uint8_t depth; /**< bit depth of the color components */
  62. } PixFmtInfo;
  63. /* this table gives more information about formats */
  64. static const PixFmtInfo pix_fmt_info[PIX_FMT_NB] = {
  65. /* YUV formats */
  66. [PIX_FMT_YUV420P] = {
  67. .nb_channels = 3,
  68. .color_type = FF_COLOR_YUV,
  69. .pixel_type = FF_PIXEL_PLANAR,
  70. .depth = 8,
  71. },
  72. [PIX_FMT_YUV422P] = {
  73. .nb_channels = 3,
  74. .color_type = FF_COLOR_YUV,
  75. .pixel_type = FF_PIXEL_PLANAR,
  76. .depth = 8,
  77. },
  78. [PIX_FMT_YUV444P] = {
  79. .nb_channels = 3,
  80. .color_type = FF_COLOR_YUV,
  81. .pixel_type = FF_PIXEL_PLANAR,
  82. .depth = 8,
  83. },
  84. [PIX_FMT_YUYV422] = {
  85. .nb_channels = 1,
  86. .color_type = FF_COLOR_YUV,
  87. .pixel_type = FF_PIXEL_PACKED,
  88. .depth = 8,
  89. },
  90. [PIX_FMT_UYVY422] = {
  91. .nb_channels = 1,
  92. .color_type = FF_COLOR_YUV,
  93. .pixel_type = FF_PIXEL_PACKED,
  94. .depth = 8,
  95. },
  96. [PIX_FMT_YUV410P] = {
  97. .nb_channels = 3,
  98. .color_type = FF_COLOR_YUV,
  99. .pixel_type = FF_PIXEL_PLANAR,
  100. .depth = 8,
  101. },
  102. [PIX_FMT_YUV411P] = {
  103. .nb_channels = 3,
  104. .color_type = FF_COLOR_YUV,
  105. .pixel_type = FF_PIXEL_PLANAR,
  106. .depth = 8,
  107. },
  108. [PIX_FMT_YUV440P] = {
  109. .nb_channels = 3,
  110. .color_type = FF_COLOR_YUV,
  111. .pixel_type = FF_PIXEL_PLANAR,
  112. .depth = 8,
  113. },
  114. [PIX_FMT_YUV420P16LE] = {
  115. .nb_channels = 3,
  116. .color_type = FF_COLOR_YUV,
  117. .pixel_type = FF_PIXEL_PLANAR,
  118. .depth = 16,
  119. },
  120. [PIX_FMT_YUV422P16LE] = {
  121. .nb_channels = 3,
  122. .color_type = FF_COLOR_YUV,
  123. .pixel_type = FF_PIXEL_PLANAR,
  124. .depth = 16,
  125. },
  126. [PIX_FMT_YUV444P16LE] = {
  127. .nb_channels = 3,
  128. .color_type = FF_COLOR_YUV,
  129. .pixel_type = FF_PIXEL_PLANAR,
  130. .depth = 16,
  131. },
  132. [PIX_FMT_YUV420P16BE] = {
  133. .nb_channels = 3,
  134. .color_type = FF_COLOR_YUV,
  135. .pixel_type = FF_PIXEL_PLANAR,
  136. .depth = 16,
  137. },
  138. [PIX_FMT_YUV422P16BE] = {
  139. .nb_channels = 3,
  140. .color_type = FF_COLOR_YUV,
  141. .pixel_type = FF_PIXEL_PLANAR,
  142. .depth = 16,
  143. },
  144. [PIX_FMT_YUV444P16BE] = {
  145. .nb_channels = 3,
  146. .color_type = FF_COLOR_YUV,
  147. .pixel_type = FF_PIXEL_PLANAR,
  148. .depth = 16,
  149. },
  150. /* YUV formats with alpha plane */
  151. [PIX_FMT_YUVA420P] = {
  152. .nb_channels = 4,
  153. .color_type = FF_COLOR_YUV,
  154. .pixel_type = FF_PIXEL_PLANAR,
  155. .depth = 8,
  156. },
  157. /* JPEG YUV */
  158. [PIX_FMT_YUVJ420P] = {
  159. .nb_channels = 3,
  160. .color_type = FF_COLOR_YUV_JPEG,
  161. .pixel_type = FF_PIXEL_PLANAR,
  162. .depth = 8,
  163. },
  164. [PIX_FMT_YUVJ422P] = {
  165. .nb_channels = 3,
  166. .color_type = FF_COLOR_YUV_JPEG,
  167. .pixel_type = FF_PIXEL_PLANAR,
  168. .depth = 8,
  169. },
  170. [PIX_FMT_YUVJ444P] = {
  171. .nb_channels = 3,
  172. .color_type = FF_COLOR_YUV_JPEG,
  173. .pixel_type = FF_PIXEL_PLANAR,
  174. .depth = 8,
  175. },
  176. [PIX_FMT_YUVJ440P] = {
  177. .nb_channels = 3,
  178. .color_type = FF_COLOR_YUV_JPEG,
  179. .pixel_type = FF_PIXEL_PLANAR,
  180. .depth = 8,
  181. },
  182. /* RGB formats */
  183. [PIX_FMT_RGB24] = {
  184. .nb_channels = 3,
  185. .color_type = FF_COLOR_RGB,
  186. .pixel_type = FF_PIXEL_PACKED,
  187. .depth = 8,
  188. },
  189. [PIX_FMT_BGR24] = {
  190. .nb_channels = 3,
  191. .color_type = FF_COLOR_RGB,
  192. .pixel_type = FF_PIXEL_PACKED,
  193. .depth = 8,
  194. },
  195. [PIX_FMT_ARGB] = {
  196. .nb_channels = 4, .is_alpha = 1,
  197. .color_type = FF_COLOR_RGB,
  198. .pixel_type = FF_PIXEL_PACKED,
  199. .depth = 8,
  200. },
  201. [PIX_FMT_RGB48BE] = {
  202. .nb_channels = 3,
  203. .color_type = FF_COLOR_RGB,
  204. .pixel_type = FF_PIXEL_PACKED,
  205. .depth = 16,
  206. },
  207. [PIX_FMT_RGB48LE] = {
  208. .nb_channels = 3,
  209. .color_type = FF_COLOR_RGB,
  210. .pixel_type = FF_PIXEL_PACKED,
  211. .depth = 16,
  212. },
  213. [PIX_FMT_RGB565BE] = {
  214. .nb_channels = 3,
  215. .color_type = FF_COLOR_RGB,
  216. .pixel_type = FF_PIXEL_PACKED,
  217. .depth = 5,
  218. },
  219. [PIX_FMT_RGB565LE] = {
  220. .nb_channels = 3,
  221. .color_type = FF_COLOR_RGB,
  222. .pixel_type = FF_PIXEL_PACKED,
  223. .depth = 5,
  224. },
  225. [PIX_FMT_RGB555BE] = {
  226. .nb_channels = 3,
  227. .color_type = FF_COLOR_RGB,
  228. .pixel_type = FF_PIXEL_PACKED,
  229. .depth = 5,
  230. },
  231. [PIX_FMT_RGB555LE] = {
  232. .nb_channels = 3,
  233. .color_type = FF_COLOR_RGB,
  234. .pixel_type = FF_PIXEL_PACKED,
  235. .depth = 5,
  236. },
  237. [PIX_FMT_RGB444BE] = {
  238. .nb_channels = 3,
  239. .color_type = FF_COLOR_RGB,
  240. .pixel_type = FF_PIXEL_PACKED,
  241. .depth = 4,
  242. },
  243. [PIX_FMT_RGB444LE] = {
  244. .nb_channels = 3,
  245. .color_type = FF_COLOR_RGB,
  246. .pixel_type = FF_PIXEL_PACKED,
  247. .depth = 4,
  248. },
  249. /* gray / mono formats */
  250. [PIX_FMT_GRAY16BE] = {
  251. .nb_channels = 1,
  252. .color_type = FF_COLOR_GRAY,
  253. .pixel_type = FF_PIXEL_PLANAR,
  254. .depth = 16,
  255. },
  256. [PIX_FMT_GRAY16LE] = {
  257. .nb_channels = 1,
  258. .color_type = FF_COLOR_GRAY,
  259. .pixel_type = FF_PIXEL_PLANAR,
  260. .depth = 16,
  261. },
  262. [PIX_FMT_GRAY8] = {
  263. .nb_channels = 1,
  264. .color_type = FF_COLOR_GRAY,
  265. .pixel_type = FF_PIXEL_PLANAR,
  266. .depth = 8,
  267. },
  268. [PIX_FMT_MONOWHITE] = {
  269. .nb_channels = 1,
  270. .color_type = FF_COLOR_GRAY,
  271. .pixel_type = FF_PIXEL_PLANAR,
  272. .depth = 1,
  273. },
  274. [PIX_FMT_MONOBLACK] = {
  275. .nb_channels = 1,
  276. .color_type = FF_COLOR_GRAY,
  277. .pixel_type = FF_PIXEL_PLANAR,
  278. .depth = 1,
  279. },
  280. /* paletted formats */
  281. [PIX_FMT_PAL8] = {
  282. .nb_channels = 4, .is_alpha = 1,
  283. .color_type = FF_COLOR_RGB,
  284. .pixel_type = FF_PIXEL_PALETTE,
  285. .depth = 8,
  286. },
  287. [PIX_FMT_UYYVYY411] = {
  288. .nb_channels = 1,
  289. .color_type = FF_COLOR_YUV,
  290. .pixel_type = FF_PIXEL_PACKED,
  291. .depth = 8,
  292. },
  293. [PIX_FMT_ABGR] = {
  294. .nb_channels = 4, .is_alpha = 1,
  295. .color_type = FF_COLOR_RGB,
  296. .pixel_type = FF_PIXEL_PACKED,
  297. .depth = 8,
  298. },
  299. [PIX_FMT_BGR565BE] = {
  300. .nb_channels = 3,
  301. .color_type = FF_COLOR_RGB,
  302. .pixel_type = FF_PIXEL_PACKED,
  303. .depth = 5,
  304. },
  305. [PIX_FMT_BGR565LE] = {
  306. .nb_channels = 3,
  307. .color_type = FF_COLOR_RGB,
  308. .pixel_type = FF_PIXEL_PACKED,
  309. .depth = 5,
  310. },
  311. [PIX_FMT_BGR555BE] = {
  312. .nb_channels = 3,
  313. .color_type = FF_COLOR_RGB,
  314. .pixel_type = FF_PIXEL_PACKED,
  315. .depth = 5,
  316. },
  317. [PIX_FMT_BGR555LE] = {
  318. .nb_channels = 3,
  319. .color_type = FF_COLOR_RGB,
  320. .pixel_type = FF_PIXEL_PACKED,
  321. .depth = 5,
  322. },
  323. [PIX_FMT_BGR444BE] = {
  324. .nb_channels = 3,
  325. .color_type = FF_COLOR_RGB,
  326. .pixel_type = FF_PIXEL_PACKED,
  327. .depth = 4,
  328. },
  329. [PIX_FMT_BGR444LE] = {
  330. .nb_channels = 3,
  331. .color_type = FF_COLOR_RGB,
  332. .pixel_type = FF_PIXEL_PACKED,
  333. .depth = 4,
  334. },
  335. [PIX_FMT_RGB8] = {
  336. .nb_channels = 1,
  337. .color_type = FF_COLOR_RGB,
  338. .pixel_type = FF_PIXEL_PACKED,
  339. .depth = 8,
  340. },
  341. [PIX_FMT_RGB4] = {
  342. .nb_channels = 1,
  343. .color_type = FF_COLOR_RGB,
  344. .pixel_type = FF_PIXEL_PACKED,
  345. .depth = 4,
  346. },
  347. [PIX_FMT_RGB4_BYTE] = {
  348. .nb_channels = 1,
  349. .color_type = FF_COLOR_RGB,
  350. .pixel_type = FF_PIXEL_PACKED,
  351. .depth = 8,
  352. },
  353. [PIX_FMT_BGR8] = {
  354. .nb_channels = 1,
  355. .color_type = FF_COLOR_RGB,
  356. .pixel_type = FF_PIXEL_PACKED,
  357. .depth = 8,
  358. },
  359. [PIX_FMT_BGR4] = {
  360. .nb_channels = 1,
  361. .color_type = FF_COLOR_RGB,
  362. .pixel_type = FF_PIXEL_PACKED,
  363. .depth = 4,
  364. },
  365. [PIX_FMT_BGR4_BYTE] = {
  366. .nb_channels = 1,
  367. .color_type = FF_COLOR_RGB,
  368. .pixel_type = FF_PIXEL_PACKED,
  369. .depth = 8,
  370. },
  371. [PIX_FMT_NV12] = {
  372. .nb_channels = 2,
  373. .color_type = FF_COLOR_YUV,
  374. .pixel_type = FF_PIXEL_PLANAR,
  375. .depth = 8,
  376. },
  377. [PIX_FMT_NV21] = {
  378. .nb_channels = 2,
  379. .color_type = FF_COLOR_YUV,
  380. .pixel_type = FF_PIXEL_PLANAR,
  381. .depth = 8,
  382. },
  383. [PIX_FMT_BGRA] = {
  384. .nb_channels = 4, .is_alpha = 1,
  385. .color_type = FF_COLOR_RGB,
  386. .pixel_type = FF_PIXEL_PACKED,
  387. .depth = 8,
  388. },
  389. [PIX_FMT_RGBA] = {
  390. .nb_channels = 4, .is_alpha = 1,
  391. .color_type = FF_COLOR_RGB,
  392. .pixel_type = FF_PIXEL_PACKED,
  393. .depth = 8,
  394. },
  395. };
  396. void avcodec_get_chroma_sub_sample(enum PixelFormat pix_fmt, int *h_shift, int *v_shift)
  397. {
  398. *h_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_w;
  399. *v_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_h;
  400. }
  401. #if FF_API_GET_PIX_FMT_NAME
  402. const char *avcodec_get_pix_fmt_name(enum PixelFormat pix_fmt)
  403. {
  404. return av_get_pix_fmt_name(pix_fmt);
  405. }
  406. #endif
  407. #if LIBAVCODEC_VERSION_MAJOR < 53
  408. enum PixelFormat avcodec_get_pix_fmt(const char *name)
  409. {
  410. return av_get_pix_fmt(name);
  411. }
  412. void avcodec_pix_fmt_string (char *buf, int buf_size, enum PixelFormat pix_fmt)
  413. {
  414. av_get_pix_fmt_string(buf, buf_size, pix_fmt);
  415. }
  416. #endif
  417. int ff_is_hwaccel_pix_fmt(enum PixelFormat pix_fmt)
  418. {
  419. return av_pix_fmt_descriptors[pix_fmt].flags & PIX_FMT_HWACCEL;
  420. }
  421. #if LIBAVCODEC_VERSION_MAJOR < 53
  422. int ff_set_systematic_pal(uint32_t pal[256], enum PixelFormat pix_fmt){
  423. return ff_set_systematic_pal2(pal, pix_fmt);
  424. }
  425. int ff_fill_linesize(AVPicture *picture, enum PixelFormat pix_fmt, int width)
  426. {
  427. return av_image_fill_linesizes(picture->linesize, pix_fmt, width);
  428. }
  429. int ff_fill_pointer(AVPicture *picture, uint8_t *ptr, enum PixelFormat pix_fmt,
  430. int height)
  431. {
  432. return av_image_fill_pointers(picture->data, pix_fmt, height, ptr, picture->linesize);
  433. }
  434. #endif
  435. int avpicture_fill(AVPicture *picture, uint8_t *ptr,
  436. enum PixelFormat pix_fmt, int width, int height)
  437. {
  438. int ret;
  439. if ((ret = av_image_check_size(width, height, 0, NULL)) < 0)
  440. return ret;
  441. if ((ret = av_image_fill_linesizes(picture->linesize, pix_fmt, width)) < 0)
  442. return ret;
  443. return av_image_fill_pointers(picture->data, pix_fmt, height, ptr, picture->linesize);
  444. }
  445. int avpicture_layout(const AVPicture* src, enum PixelFormat pix_fmt, int width, int height,
  446. unsigned char *dest, int dest_size)
  447. {
  448. int i, j, nb_planes = 0, linesizes[4];
  449. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  450. int size = avpicture_get_size(pix_fmt, width, height);
  451. if (size > dest_size || size < 0)
  452. return AVERROR(EINVAL);
  453. for (i = 0; i < desc->nb_components; i++)
  454. nb_planes = FFMAX(desc->comp[i].plane, nb_planes);
  455. nb_planes++;
  456. av_image_fill_linesizes(linesizes, pix_fmt, width);
  457. for (i = 0; i < nb_planes; i++) {
  458. int h, shift = (i == 1 || i == 2) ? desc->log2_chroma_h : 0;
  459. const unsigned char *s = src->data[i];
  460. h = (height + (1 << shift) - 1) >> shift;
  461. for (j = 0; j < h; j++) {
  462. memcpy(dest, s, linesizes[i]);
  463. dest += linesizes[i];
  464. s += src->linesize[i];
  465. }
  466. }
  467. switch (pix_fmt) {
  468. case PIX_FMT_RGB8:
  469. case PIX_FMT_BGR8:
  470. case PIX_FMT_RGB4_BYTE:
  471. case PIX_FMT_BGR4_BYTE:
  472. case PIX_FMT_GRAY8:
  473. // do not include palette for these pseudo-paletted formats
  474. return size;
  475. }
  476. if (desc->flags & PIX_FMT_PAL)
  477. memcpy((unsigned char *)(((size_t)dest + 3) & ~3), src->data[1], 256 * 4);
  478. return size;
  479. }
  480. int avpicture_get_size(enum PixelFormat pix_fmt, int width, int height)
  481. {
  482. AVPicture dummy_pict;
  483. if(av_image_check_size(width, height, 0, NULL))
  484. return -1;
  485. switch (pix_fmt) {
  486. case PIX_FMT_RGB8:
  487. case PIX_FMT_BGR8:
  488. case PIX_FMT_RGB4_BYTE:
  489. case PIX_FMT_BGR4_BYTE:
  490. case PIX_FMT_GRAY8:
  491. // do not include palette for these pseudo-paletted formats
  492. return width * height;
  493. }
  494. return avpicture_fill(&dummy_pict, NULL, pix_fmt, width, height);
  495. }
  496. int avcodec_get_pix_fmt_loss(enum PixelFormat dst_pix_fmt, enum PixelFormat src_pix_fmt,
  497. int has_alpha)
  498. {
  499. const PixFmtInfo *pf, *ps;
  500. const AVPixFmtDescriptor *src_desc = &av_pix_fmt_descriptors[src_pix_fmt];
  501. const AVPixFmtDescriptor *dst_desc = &av_pix_fmt_descriptors[dst_pix_fmt];
  502. int loss;
  503. ps = &pix_fmt_info[src_pix_fmt];
  504. /* compute loss */
  505. loss = 0;
  506. pf = &pix_fmt_info[dst_pix_fmt];
  507. if (pf->depth < ps->depth ||
  508. ((dst_pix_fmt == PIX_FMT_RGB555BE || dst_pix_fmt == PIX_FMT_RGB555LE ||
  509. dst_pix_fmt == PIX_FMT_BGR555BE || dst_pix_fmt == PIX_FMT_BGR555LE) &&
  510. (src_pix_fmt == PIX_FMT_RGB565BE || src_pix_fmt == PIX_FMT_RGB565LE ||
  511. src_pix_fmt == PIX_FMT_BGR565BE || src_pix_fmt == PIX_FMT_BGR565LE)))
  512. loss |= FF_LOSS_DEPTH;
  513. if (dst_desc->log2_chroma_w > src_desc->log2_chroma_w ||
  514. dst_desc->log2_chroma_h > src_desc->log2_chroma_h)
  515. loss |= FF_LOSS_RESOLUTION;
  516. switch(pf->color_type) {
  517. case FF_COLOR_RGB:
  518. if (ps->color_type != FF_COLOR_RGB &&
  519. ps->color_type != FF_COLOR_GRAY)
  520. loss |= FF_LOSS_COLORSPACE;
  521. break;
  522. case FF_COLOR_GRAY:
  523. if (ps->color_type != FF_COLOR_GRAY)
  524. loss |= FF_LOSS_COLORSPACE;
  525. break;
  526. case FF_COLOR_YUV:
  527. if (ps->color_type != FF_COLOR_YUV)
  528. loss |= FF_LOSS_COLORSPACE;
  529. break;
  530. case FF_COLOR_YUV_JPEG:
  531. if (ps->color_type != FF_COLOR_YUV_JPEG &&
  532. ps->color_type != FF_COLOR_YUV &&
  533. ps->color_type != FF_COLOR_GRAY)
  534. loss |= FF_LOSS_COLORSPACE;
  535. break;
  536. default:
  537. /* fail safe test */
  538. if (ps->color_type != pf->color_type)
  539. loss |= FF_LOSS_COLORSPACE;
  540. break;
  541. }
  542. if (pf->color_type == FF_COLOR_GRAY &&
  543. ps->color_type != FF_COLOR_GRAY)
  544. loss |= FF_LOSS_CHROMA;
  545. if (!pf->is_alpha && (ps->is_alpha && has_alpha))
  546. loss |= FF_LOSS_ALPHA;
  547. if (pf->pixel_type == FF_PIXEL_PALETTE &&
  548. (ps->pixel_type != FF_PIXEL_PALETTE && ps->color_type != FF_COLOR_GRAY))
  549. loss |= FF_LOSS_COLORQUANT;
  550. return loss;
  551. }
  552. static int avg_bits_per_pixel(enum PixelFormat pix_fmt)
  553. {
  554. int bits;
  555. const PixFmtInfo *pf;
  556. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  557. pf = &pix_fmt_info[pix_fmt];
  558. switch(pf->pixel_type) {
  559. case FF_PIXEL_PACKED:
  560. switch(pix_fmt) {
  561. case PIX_FMT_YUYV422:
  562. case PIX_FMT_UYVY422:
  563. case PIX_FMT_RGB565BE:
  564. case PIX_FMT_RGB565LE:
  565. case PIX_FMT_RGB555BE:
  566. case PIX_FMT_RGB555LE:
  567. case PIX_FMT_RGB444BE:
  568. case PIX_FMT_RGB444LE:
  569. case PIX_FMT_BGR565BE:
  570. case PIX_FMT_BGR565LE:
  571. case PIX_FMT_BGR555BE:
  572. case PIX_FMT_BGR555LE:
  573. case PIX_FMT_BGR444BE:
  574. case PIX_FMT_BGR444LE:
  575. bits = 16;
  576. break;
  577. case PIX_FMT_UYYVYY411:
  578. bits = 12;
  579. break;
  580. default:
  581. bits = pf->depth * pf->nb_channels;
  582. break;
  583. }
  584. break;
  585. case FF_PIXEL_PLANAR:
  586. if (desc->log2_chroma_w == 0 && desc->log2_chroma_h == 0) {
  587. bits = pf->depth * pf->nb_channels;
  588. } else {
  589. bits = pf->depth + ((2 * pf->depth) >>
  590. (desc->log2_chroma_w + desc->log2_chroma_h));
  591. }
  592. break;
  593. case FF_PIXEL_PALETTE:
  594. bits = 8;
  595. break;
  596. default:
  597. bits = -1;
  598. break;
  599. }
  600. return bits;
  601. }
  602. static enum PixelFormat avcodec_find_best_pix_fmt1(int64_t pix_fmt_mask,
  603. enum PixelFormat src_pix_fmt,
  604. int has_alpha,
  605. int loss_mask)
  606. {
  607. int dist, i, loss, min_dist;
  608. enum PixelFormat dst_pix_fmt;
  609. /* find exact color match with smallest size */
  610. dst_pix_fmt = PIX_FMT_NONE;
  611. min_dist = 0x7fffffff;
  612. for(i = 0;i < PIX_FMT_NB; i++) {
  613. if (pix_fmt_mask & (1ULL << i)) {
  614. loss = avcodec_get_pix_fmt_loss(i, src_pix_fmt, has_alpha) & loss_mask;
  615. if (loss == 0) {
  616. dist = avg_bits_per_pixel(i);
  617. if (dist < min_dist) {
  618. min_dist = dist;
  619. dst_pix_fmt = i;
  620. }
  621. }
  622. }
  623. }
  624. return dst_pix_fmt;
  625. }
  626. enum PixelFormat avcodec_find_best_pix_fmt(int64_t pix_fmt_mask, enum PixelFormat src_pix_fmt,
  627. int has_alpha, int *loss_ptr)
  628. {
  629. enum PixelFormat dst_pix_fmt;
  630. int loss_mask, i;
  631. static const int loss_mask_order[] = {
  632. ~0, /* no loss first */
  633. ~FF_LOSS_ALPHA,
  634. ~FF_LOSS_RESOLUTION,
  635. ~(FF_LOSS_COLORSPACE | FF_LOSS_RESOLUTION),
  636. ~FF_LOSS_COLORQUANT,
  637. ~FF_LOSS_DEPTH,
  638. 0,
  639. };
  640. /* try with successive loss */
  641. i = 0;
  642. for(;;) {
  643. loss_mask = loss_mask_order[i++];
  644. dst_pix_fmt = avcodec_find_best_pix_fmt1(pix_fmt_mask, src_pix_fmt,
  645. has_alpha, loss_mask);
  646. if (dst_pix_fmt >= 0)
  647. goto found;
  648. if (loss_mask == 0)
  649. break;
  650. }
  651. return PIX_FMT_NONE;
  652. found:
  653. if (loss_ptr)
  654. *loss_ptr = avcodec_get_pix_fmt_loss(dst_pix_fmt, src_pix_fmt, has_alpha);
  655. return dst_pix_fmt;
  656. }
  657. #if LIBAVCODEC_VERSION_MAJOR < 53
  658. void ff_img_copy_plane(uint8_t *dst, int dst_wrap,
  659. const uint8_t *src, int src_wrap,
  660. int width, int height)
  661. {
  662. av_image_copy_plane(dst, dst_wrap, src, src_wrap, width, height);
  663. }
  664. int ff_get_plane_bytewidth(enum PixelFormat pix_fmt, int width, int plane)
  665. {
  666. return av_image_get_linesize(pix_fmt, width, plane);
  667. }
  668. void av_picture_data_copy(uint8_t *dst_data[4], int dst_linesize[4],
  669. uint8_t *src_data[4], int src_linesize[4],
  670. enum PixelFormat pix_fmt, int width, int height)
  671. {
  672. av_image_copy(dst_data, dst_linesize, src_data, src_linesize,
  673. pix_fmt, width, height);
  674. }
  675. #endif
  676. void av_picture_copy(AVPicture *dst, const AVPicture *src,
  677. enum PixelFormat pix_fmt, int width, int height)
  678. {
  679. av_image_copy(dst->data, dst->linesize, src->data,
  680. src->linesize, pix_fmt, width, height);
  681. }
  682. /* 2x2 -> 1x1 */
  683. void ff_shrink22(uint8_t *dst, int dst_wrap,
  684. const uint8_t *src, int src_wrap,
  685. int width, int height)
  686. {
  687. int w;
  688. const uint8_t *s1, *s2;
  689. uint8_t *d;
  690. for(;height > 0; height--) {
  691. s1 = src;
  692. s2 = s1 + src_wrap;
  693. d = dst;
  694. for(w = width;w >= 4; w-=4) {
  695. d[0] = (s1[0] + s1[1] + s2[0] + s2[1] + 2) >> 2;
  696. d[1] = (s1[2] + s1[3] + s2[2] + s2[3] + 2) >> 2;
  697. d[2] = (s1[4] + s1[5] + s2[4] + s2[5] + 2) >> 2;
  698. d[3] = (s1[6] + s1[7] + s2[6] + s2[7] + 2) >> 2;
  699. s1 += 8;
  700. s2 += 8;
  701. d += 4;
  702. }
  703. for(;w > 0; w--) {
  704. d[0] = (s1[0] + s1[1] + s2[0] + s2[1] + 2) >> 2;
  705. s1 += 2;
  706. s2 += 2;
  707. d++;
  708. }
  709. src += 2 * src_wrap;
  710. dst += dst_wrap;
  711. }
  712. }
  713. /* 4x4 -> 1x1 */
  714. void ff_shrink44(uint8_t *dst, int dst_wrap,
  715. const uint8_t *src, int src_wrap,
  716. int width, int height)
  717. {
  718. int w;
  719. const uint8_t *s1, *s2, *s3, *s4;
  720. uint8_t *d;
  721. for(;height > 0; height--) {
  722. s1 = src;
  723. s2 = s1 + src_wrap;
  724. s3 = s2 + src_wrap;
  725. s4 = s3 + src_wrap;
  726. d = dst;
  727. for(w = width;w > 0; w--) {
  728. d[0] = (s1[0] + s1[1] + s1[2] + s1[3] +
  729. s2[0] + s2[1] + s2[2] + s2[3] +
  730. s3[0] + s3[1] + s3[2] + s3[3] +
  731. s4[0] + s4[1] + s4[2] + s4[3] + 8) >> 4;
  732. s1 += 4;
  733. s2 += 4;
  734. s3 += 4;
  735. s4 += 4;
  736. d++;
  737. }
  738. src += 4 * src_wrap;
  739. dst += dst_wrap;
  740. }
  741. }
  742. /* 8x8 -> 1x1 */
  743. void ff_shrink88(uint8_t *dst, int dst_wrap,
  744. const uint8_t *src, int src_wrap,
  745. int width, int height)
  746. {
  747. int w, i;
  748. for(;height > 0; height--) {
  749. for(w = width;w > 0; w--) {
  750. int tmp=0;
  751. for(i=0; i<8; i++){
  752. tmp += src[0] + src[1] + src[2] + src[3] + src[4] + src[5] + src[6] + src[7];
  753. src += src_wrap;
  754. }
  755. *(dst++) = (tmp + 32)>>6;
  756. src += 8 - 8*src_wrap;
  757. }
  758. src += 8*src_wrap - 8*width;
  759. dst += dst_wrap - width;
  760. }
  761. }
  762. int avpicture_alloc(AVPicture *picture,
  763. enum PixelFormat pix_fmt, int width, int height)
  764. {
  765. int ret;
  766. if ((ret = av_image_alloc(picture->data, picture->linesize, width, height, pix_fmt, 1)) < 0) {
  767. memset(picture, 0, sizeof(AVPicture));
  768. return ret;
  769. }
  770. return 0;
  771. }
  772. void avpicture_free(AVPicture *picture)
  773. {
  774. av_free(picture->data[0]);
  775. }
  776. /* return true if yuv planar */
  777. static inline int is_yuv_planar(const PixFmtInfo *ps)
  778. {
  779. return (ps->color_type == FF_COLOR_YUV ||
  780. ps->color_type == FF_COLOR_YUV_JPEG) &&
  781. ps->pixel_type == FF_PIXEL_PLANAR;
  782. }
  783. int av_picture_crop(AVPicture *dst, const AVPicture *src,
  784. enum PixelFormat pix_fmt, int top_band, int left_band)
  785. {
  786. int y_shift;
  787. int x_shift;
  788. if (pix_fmt < 0 || pix_fmt >= PIX_FMT_NB)
  789. return -1;
  790. y_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_h;
  791. x_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_w;
  792. if (is_yuv_planar(&pix_fmt_info[pix_fmt])) {
  793. dst->data[0] = src->data[0] + (top_band * src->linesize[0]) + left_band;
  794. dst->data[1] = src->data[1] + ((top_band >> y_shift) * src->linesize[1]) + (left_band >> x_shift);
  795. dst->data[2] = src->data[2] + ((top_band >> y_shift) * src->linesize[2]) + (left_band >> x_shift);
  796. } else{
  797. if(top_band % (1<<y_shift) || left_band % (1<<x_shift))
  798. return -1;
  799. if(left_band) //FIXME add support for this too
  800. return -1;
  801. dst->data[0] = src->data[0] + (top_band * src->linesize[0]) + left_band;
  802. }
  803. dst->linesize[0] = src->linesize[0];
  804. dst->linesize[1] = src->linesize[1];
  805. dst->linesize[2] = src->linesize[2];
  806. return 0;
  807. }
  808. int av_picture_pad(AVPicture *dst, const AVPicture *src, int height, int width,
  809. enum PixelFormat pix_fmt, int padtop, int padbottom, int padleft, int padright,
  810. int *color)
  811. {
  812. uint8_t *optr;
  813. int y_shift;
  814. int x_shift;
  815. int yheight;
  816. int i, y;
  817. if (pix_fmt < 0 || pix_fmt >= PIX_FMT_NB ||
  818. !is_yuv_planar(&pix_fmt_info[pix_fmt])) return -1;
  819. for (i = 0; i < 3; i++) {
  820. x_shift = i ? av_pix_fmt_descriptors[pix_fmt].log2_chroma_w : 0;
  821. y_shift = i ? av_pix_fmt_descriptors[pix_fmt].log2_chroma_h : 0;
  822. if (padtop || padleft) {
  823. memset(dst->data[i], color[i],
  824. dst->linesize[i] * (padtop >> y_shift) + (padleft >> x_shift));
  825. }
  826. if (padleft || padright) {
  827. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  828. (dst->linesize[i] - (padright >> x_shift));
  829. yheight = (height - 1 - (padtop + padbottom)) >> y_shift;
  830. for (y = 0; y < yheight; y++) {
  831. memset(optr, color[i], (padleft + padright) >> x_shift);
  832. optr += dst->linesize[i];
  833. }
  834. }
  835. if (src) { /* first line */
  836. uint8_t *iptr = src->data[i];
  837. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  838. (padleft >> x_shift);
  839. memcpy(optr, iptr, (width - padleft - padright) >> x_shift);
  840. iptr += src->linesize[i];
  841. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  842. (dst->linesize[i] - (padright >> x_shift));
  843. yheight = (height - 1 - (padtop + padbottom)) >> y_shift;
  844. for (y = 0; y < yheight; y++) {
  845. memset(optr, color[i], (padleft + padright) >> x_shift);
  846. memcpy(optr + ((padleft + padright) >> x_shift), iptr,
  847. (width - padleft - padright) >> x_shift);
  848. iptr += src->linesize[i];
  849. optr += dst->linesize[i];
  850. }
  851. }
  852. if (padbottom || padright) {
  853. optr = dst->data[i] + dst->linesize[i] *
  854. ((height - padbottom) >> y_shift) - (padright >> x_shift);
  855. memset(optr, color[i],dst->linesize[i] *
  856. (padbottom >> y_shift) + (padright >> x_shift));
  857. }
  858. }
  859. return 0;
  860. }
  861. /* NOTE: we scan all the pixels to have an exact information */
  862. static int get_alpha_info_pal8(const AVPicture *src, int width, int height)
  863. {
  864. const unsigned char *p;
  865. int src_wrap, ret, x, y;
  866. unsigned int a;
  867. uint32_t *palette = (uint32_t *)src->data[1];
  868. p = src->data[0];
  869. src_wrap = src->linesize[0] - width;
  870. ret = 0;
  871. for(y=0;y<height;y++) {
  872. for(x=0;x<width;x++) {
  873. a = palette[p[0]] >> 24;
  874. if (a == 0x00) {
  875. ret |= FF_ALPHA_TRANSP;
  876. } else if (a != 0xff) {
  877. ret |= FF_ALPHA_SEMI_TRANSP;
  878. }
  879. p++;
  880. }
  881. p += src_wrap;
  882. }
  883. return ret;
  884. }
  885. int img_get_alpha_info(const AVPicture *src,
  886. enum PixelFormat pix_fmt, int width, int height)
  887. {
  888. const PixFmtInfo *pf = &pix_fmt_info[pix_fmt];
  889. int ret;
  890. /* no alpha can be represented in format */
  891. if (!pf->is_alpha)
  892. return 0;
  893. switch(pix_fmt) {
  894. case PIX_FMT_PAL8:
  895. ret = get_alpha_info_pal8(src, width, height);
  896. break;
  897. default:
  898. /* we do not know, so everything is indicated */
  899. ret = FF_ALPHA_TRANSP | FF_ALPHA_SEMI_TRANSP;
  900. break;
  901. }
  902. return ret;
  903. }
  904. #if !(HAVE_MMX && HAVE_YASM)
  905. /* filter parameters: [-1 4 2 4 -1] // 8 */
  906. static void deinterlace_line_c(uint8_t *dst,
  907. const uint8_t *lum_m4, const uint8_t *lum_m3,
  908. const uint8_t *lum_m2, const uint8_t *lum_m1,
  909. const uint8_t *lum,
  910. int size)
  911. {
  912. uint8_t *cm = ff_cropTbl + MAX_NEG_CROP;
  913. int sum;
  914. for(;size > 0;size--) {
  915. sum = -lum_m4[0];
  916. sum += lum_m3[0] << 2;
  917. sum += lum_m2[0] << 1;
  918. sum += lum_m1[0] << 2;
  919. sum += -lum[0];
  920. dst[0] = cm[(sum + 4) >> 3];
  921. lum_m4++;
  922. lum_m3++;
  923. lum_m2++;
  924. lum_m1++;
  925. lum++;
  926. dst++;
  927. }
  928. }
  929. static void deinterlace_line_inplace_c(uint8_t *lum_m4, uint8_t *lum_m3,
  930. uint8_t *lum_m2, uint8_t *lum_m1,
  931. uint8_t *lum, int size)
  932. {
  933. uint8_t *cm = ff_cropTbl + MAX_NEG_CROP;
  934. int sum;
  935. for(;size > 0;size--) {
  936. sum = -lum_m4[0];
  937. sum += lum_m3[0] << 2;
  938. sum += lum_m2[0] << 1;
  939. lum_m4[0]=lum_m2[0];
  940. sum += lum_m1[0] << 2;
  941. sum += -lum[0];
  942. lum_m2[0] = cm[(sum + 4) >> 3];
  943. lum_m4++;
  944. lum_m3++;
  945. lum_m2++;
  946. lum_m1++;
  947. lum++;
  948. }
  949. }
  950. #endif
  951. /* deinterlacing : 2 temporal taps, 3 spatial taps linear filter. The
  952. top field is copied as is, but the bottom field is deinterlaced
  953. against the top field. */
  954. static void deinterlace_bottom_field(uint8_t *dst, int dst_wrap,
  955. const uint8_t *src1, int src_wrap,
  956. int width, int height)
  957. {
  958. const uint8_t *src_m2, *src_m1, *src_0, *src_p1, *src_p2;
  959. int y;
  960. src_m2 = src1;
  961. src_m1 = src1;
  962. src_0=&src_m1[src_wrap];
  963. src_p1=&src_0[src_wrap];
  964. src_p2=&src_p1[src_wrap];
  965. for(y=0;y<(height-2);y+=2) {
  966. memcpy(dst,src_m1,width);
  967. dst += dst_wrap;
  968. deinterlace_line(dst,src_m2,src_m1,src_0,src_p1,src_p2,width);
  969. src_m2 = src_0;
  970. src_m1 = src_p1;
  971. src_0 = src_p2;
  972. src_p1 += 2*src_wrap;
  973. src_p2 += 2*src_wrap;
  974. dst += dst_wrap;
  975. }
  976. memcpy(dst,src_m1,width);
  977. dst += dst_wrap;
  978. /* do last line */
  979. deinterlace_line(dst,src_m2,src_m1,src_0,src_0,src_0,width);
  980. }
  981. static void deinterlace_bottom_field_inplace(uint8_t *src1, int src_wrap,
  982. int width, int height)
  983. {
  984. uint8_t *src_m1, *src_0, *src_p1, *src_p2;
  985. int y;
  986. uint8_t *buf;
  987. buf = (uint8_t*)av_malloc(width);
  988. src_m1 = src1;
  989. memcpy(buf,src_m1,width);
  990. src_0=&src_m1[src_wrap];
  991. src_p1=&src_0[src_wrap];
  992. src_p2=&src_p1[src_wrap];
  993. for(y=0;y<(height-2);y+=2) {
  994. deinterlace_line_inplace(buf,src_m1,src_0,src_p1,src_p2,width);
  995. src_m1 = src_p1;
  996. src_0 = src_p2;
  997. src_p1 += 2*src_wrap;
  998. src_p2 += 2*src_wrap;
  999. }
  1000. /* do last line */
  1001. deinterlace_line_inplace(buf,src_m1,src_0,src_0,src_0,width);
  1002. av_free(buf);
  1003. }
  1004. int avpicture_deinterlace(AVPicture *dst, const AVPicture *src,
  1005. enum PixelFormat pix_fmt, int width, int height)
  1006. {
  1007. int i;
  1008. if (pix_fmt != PIX_FMT_YUV420P &&
  1009. pix_fmt != PIX_FMT_YUVJ420P &&
  1010. pix_fmt != PIX_FMT_YUV422P &&
  1011. pix_fmt != PIX_FMT_YUVJ422P &&
  1012. pix_fmt != PIX_FMT_YUV444P &&
  1013. pix_fmt != PIX_FMT_YUV411P &&
  1014. pix_fmt != PIX_FMT_GRAY8)
  1015. return -1;
  1016. if ((width & 3) != 0 || (height & 3) != 0)
  1017. return -1;
  1018. for(i=0;i<3;i++) {
  1019. if (i == 1) {
  1020. switch(pix_fmt) {
  1021. case PIX_FMT_YUVJ420P:
  1022. case PIX_FMT_YUV420P:
  1023. width >>= 1;
  1024. height >>= 1;
  1025. break;
  1026. case PIX_FMT_YUV422P:
  1027. case PIX_FMT_YUVJ422P:
  1028. width >>= 1;
  1029. break;
  1030. case PIX_FMT_YUV411P:
  1031. width >>= 2;
  1032. break;
  1033. default:
  1034. break;
  1035. }
  1036. if (pix_fmt == PIX_FMT_GRAY8) {
  1037. break;
  1038. }
  1039. }
  1040. if (src == dst) {
  1041. deinterlace_bottom_field_inplace(dst->data[i], dst->linesize[i],
  1042. width, height);
  1043. } else {
  1044. deinterlace_bottom_field(dst->data[i],dst->linesize[i],
  1045. src->data[i], src->linesize[i],
  1046. width, height);
  1047. }
  1048. }
  1049. emms_c();
  1050. return 0;
  1051. }