You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1073 lines
31KB

  1. /*
  2. * Misc image conversion routines
  3. * Copyright (c) 2001, 2002, 2003 Fabrice Bellard
  4. *
  5. * This file is part of Libav.
  6. *
  7. * Libav is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * Libav is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with Libav; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. /**
  22. * @file
  23. * misc image conversion routines
  24. */
  25. /* TODO:
  26. * - write 'ffimg' program to test all the image related stuff
  27. * - move all api to slice based system
  28. * - integrate deinterlacing, postprocessing and scaling in the conversion process
  29. */
  30. #include "avcodec.h"
  31. #include "dsputil.h"
  32. #include "internal.h"
  33. #include "imgconvert.h"
  34. #include "libavutil/colorspace.h"
  35. #include "libavutil/pixdesc.h"
  36. #include "libavutil/imgutils.h"
  37. #if HAVE_MMX && HAVE_YASM
  38. #include "x86/dsputil_mmx.h"
  39. #endif
  40. #define FF_COLOR_RGB 0 /**< RGB color space */
  41. #define FF_COLOR_GRAY 1 /**< gray color space */
  42. #define FF_COLOR_YUV 2 /**< YUV color space. 16 <= Y <= 235, 16 <= U, V <= 240 */
  43. #define FF_COLOR_YUV_JPEG 3 /**< YUV color space. 0 <= Y <= 255, 0 <= U, V <= 255 */
  44. #define FF_PIXEL_PLANAR 0 /**< each channel has one component in AVPicture */
  45. #define FF_PIXEL_PACKED 1 /**< only one components containing all the channels */
  46. #define FF_PIXEL_PALETTE 2 /**< one components containing indexes for a palette */
  47. #if HAVE_MMX && HAVE_YASM
  48. #define deinterlace_line_inplace ff_deinterlace_line_inplace_mmx
  49. #define deinterlace_line ff_deinterlace_line_mmx
  50. #else
  51. #define deinterlace_line_inplace deinterlace_line_inplace_c
  52. #define deinterlace_line deinterlace_line_c
  53. #endif
  54. typedef struct PixFmtInfo {
  55. uint8_t nb_channels; /**< number of channels (including alpha) */
  56. uint8_t color_type; /**< color type (see FF_COLOR_xxx constants) */
  57. uint8_t pixel_type; /**< pixel storage type (see FF_PIXEL_xxx constants) */
  58. uint8_t is_alpha : 1; /**< true if alpha can be specified */
  59. uint8_t depth; /**< bit depth of the color components */
  60. } PixFmtInfo;
  61. /* this table gives more information about formats */
  62. static const PixFmtInfo pix_fmt_info[PIX_FMT_NB] = {
  63. /* YUV formats */
  64. [PIX_FMT_YUV420P] = {
  65. .nb_channels = 3,
  66. .color_type = FF_COLOR_YUV,
  67. .pixel_type = FF_PIXEL_PLANAR,
  68. .depth = 8,
  69. },
  70. [PIX_FMT_YUV422P] = {
  71. .nb_channels = 3,
  72. .color_type = FF_COLOR_YUV,
  73. .pixel_type = FF_PIXEL_PLANAR,
  74. .depth = 8,
  75. },
  76. [PIX_FMT_YUV444P] = {
  77. .nb_channels = 3,
  78. .color_type = FF_COLOR_YUV,
  79. .pixel_type = FF_PIXEL_PLANAR,
  80. .depth = 8,
  81. },
  82. [PIX_FMT_YUYV422] = {
  83. .nb_channels = 1,
  84. .color_type = FF_COLOR_YUV,
  85. .pixel_type = FF_PIXEL_PACKED,
  86. .depth = 8,
  87. },
  88. [PIX_FMT_UYVY422] = {
  89. .nb_channels = 1,
  90. .color_type = FF_COLOR_YUV,
  91. .pixel_type = FF_PIXEL_PACKED,
  92. .depth = 8,
  93. },
  94. [PIX_FMT_YUV410P] = {
  95. .nb_channels = 3,
  96. .color_type = FF_COLOR_YUV,
  97. .pixel_type = FF_PIXEL_PLANAR,
  98. .depth = 8,
  99. },
  100. [PIX_FMT_YUV411P] = {
  101. .nb_channels = 3,
  102. .color_type = FF_COLOR_YUV,
  103. .pixel_type = FF_PIXEL_PLANAR,
  104. .depth = 8,
  105. },
  106. [PIX_FMT_YUV440P] = {
  107. .nb_channels = 3,
  108. .color_type = FF_COLOR_YUV,
  109. .pixel_type = FF_PIXEL_PLANAR,
  110. .depth = 8,
  111. },
  112. [PIX_FMT_YUV420P16LE] = {
  113. .nb_channels = 3,
  114. .color_type = FF_COLOR_YUV,
  115. .pixel_type = FF_PIXEL_PLANAR,
  116. .depth = 16,
  117. },
  118. [PIX_FMT_YUV422P16LE] = {
  119. .nb_channels = 3,
  120. .color_type = FF_COLOR_YUV,
  121. .pixel_type = FF_PIXEL_PLANAR,
  122. .depth = 16,
  123. },
  124. [PIX_FMT_YUV444P16LE] = {
  125. .nb_channels = 3,
  126. .color_type = FF_COLOR_YUV,
  127. .pixel_type = FF_PIXEL_PLANAR,
  128. .depth = 16,
  129. },
  130. [PIX_FMT_YUV420P16BE] = {
  131. .nb_channels = 3,
  132. .color_type = FF_COLOR_YUV,
  133. .pixel_type = FF_PIXEL_PLANAR,
  134. .depth = 16,
  135. },
  136. [PIX_FMT_YUV422P16BE] = {
  137. .nb_channels = 3,
  138. .color_type = FF_COLOR_YUV,
  139. .pixel_type = FF_PIXEL_PLANAR,
  140. .depth = 16,
  141. },
  142. [PIX_FMT_YUV444P16BE] = {
  143. .nb_channels = 3,
  144. .color_type = FF_COLOR_YUV,
  145. .pixel_type = FF_PIXEL_PLANAR,
  146. .depth = 16,
  147. },
  148. /* YUV formats with alpha plane */
  149. [PIX_FMT_YUVA420P] = {
  150. .nb_channels = 4,
  151. .color_type = FF_COLOR_YUV,
  152. .pixel_type = FF_PIXEL_PLANAR,
  153. .depth = 8,
  154. },
  155. /* JPEG YUV */
  156. [PIX_FMT_YUVJ420P] = {
  157. .nb_channels = 3,
  158. .color_type = FF_COLOR_YUV_JPEG,
  159. .pixel_type = FF_PIXEL_PLANAR,
  160. .depth = 8,
  161. },
  162. [PIX_FMT_YUVJ422P] = {
  163. .nb_channels = 3,
  164. .color_type = FF_COLOR_YUV_JPEG,
  165. .pixel_type = FF_PIXEL_PLANAR,
  166. .depth = 8,
  167. },
  168. [PIX_FMT_YUVJ444P] = {
  169. .nb_channels = 3,
  170. .color_type = FF_COLOR_YUV_JPEG,
  171. .pixel_type = FF_PIXEL_PLANAR,
  172. .depth = 8,
  173. },
  174. [PIX_FMT_YUVJ440P] = {
  175. .nb_channels = 3,
  176. .color_type = FF_COLOR_YUV_JPEG,
  177. .pixel_type = FF_PIXEL_PLANAR,
  178. .depth = 8,
  179. },
  180. /* RGB formats */
  181. [PIX_FMT_RGB24] = {
  182. .nb_channels = 3,
  183. .color_type = FF_COLOR_RGB,
  184. .pixel_type = FF_PIXEL_PACKED,
  185. .depth = 8,
  186. },
  187. [PIX_FMT_BGR24] = {
  188. .nb_channels = 3,
  189. .color_type = FF_COLOR_RGB,
  190. .pixel_type = FF_PIXEL_PACKED,
  191. .depth = 8,
  192. },
  193. [PIX_FMT_ARGB] = {
  194. .nb_channels = 4, .is_alpha = 1,
  195. .color_type = FF_COLOR_RGB,
  196. .pixel_type = FF_PIXEL_PACKED,
  197. .depth = 8,
  198. },
  199. [PIX_FMT_RGB48BE] = {
  200. .nb_channels = 3,
  201. .color_type = FF_COLOR_RGB,
  202. .pixel_type = FF_PIXEL_PACKED,
  203. .depth = 16,
  204. },
  205. [PIX_FMT_RGB48LE] = {
  206. .nb_channels = 3,
  207. .color_type = FF_COLOR_RGB,
  208. .pixel_type = FF_PIXEL_PACKED,
  209. .depth = 16,
  210. },
  211. [PIX_FMT_RGB565BE] = {
  212. .nb_channels = 3,
  213. .color_type = FF_COLOR_RGB,
  214. .pixel_type = FF_PIXEL_PACKED,
  215. .depth = 5,
  216. },
  217. [PIX_FMT_RGB565LE] = {
  218. .nb_channels = 3,
  219. .color_type = FF_COLOR_RGB,
  220. .pixel_type = FF_PIXEL_PACKED,
  221. .depth = 5,
  222. },
  223. [PIX_FMT_RGB555BE] = {
  224. .nb_channels = 3,
  225. .color_type = FF_COLOR_RGB,
  226. .pixel_type = FF_PIXEL_PACKED,
  227. .depth = 5,
  228. },
  229. [PIX_FMT_RGB555LE] = {
  230. .nb_channels = 3,
  231. .color_type = FF_COLOR_RGB,
  232. .pixel_type = FF_PIXEL_PACKED,
  233. .depth = 5,
  234. },
  235. [PIX_FMT_RGB444BE] = {
  236. .nb_channels = 3,
  237. .color_type = FF_COLOR_RGB,
  238. .pixel_type = FF_PIXEL_PACKED,
  239. .depth = 4,
  240. },
  241. [PIX_FMT_RGB444LE] = {
  242. .nb_channels = 3,
  243. .color_type = FF_COLOR_RGB,
  244. .pixel_type = FF_PIXEL_PACKED,
  245. .depth = 4,
  246. },
  247. /* gray / mono formats */
  248. [PIX_FMT_GRAY16BE] = {
  249. .nb_channels = 1,
  250. .color_type = FF_COLOR_GRAY,
  251. .pixel_type = FF_PIXEL_PLANAR,
  252. .depth = 16,
  253. },
  254. [PIX_FMT_GRAY16LE] = {
  255. .nb_channels = 1,
  256. .color_type = FF_COLOR_GRAY,
  257. .pixel_type = FF_PIXEL_PLANAR,
  258. .depth = 16,
  259. },
  260. [PIX_FMT_GRAY8] = {
  261. .nb_channels = 1,
  262. .color_type = FF_COLOR_GRAY,
  263. .pixel_type = FF_PIXEL_PLANAR,
  264. .depth = 8,
  265. },
  266. [PIX_FMT_MONOWHITE] = {
  267. .nb_channels = 1,
  268. .color_type = FF_COLOR_GRAY,
  269. .pixel_type = FF_PIXEL_PLANAR,
  270. .depth = 1,
  271. },
  272. [PIX_FMT_MONOBLACK] = {
  273. .nb_channels = 1,
  274. .color_type = FF_COLOR_GRAY,
  275. .pixel_type = FF_PIXEL_PLANAR,
  276. .depth = 1,
  277. },
  278. /* paletted formats */
  279. [PIX_FMT_PAL8] = {
  280. .nb_channels = 4, .is_alpha = 1,
  281. .color_type = FF_COLOR_RGB,
  282. .pixel_type = FF_PIXEL_PALETTE,
  283. .depth = 8,
  284. },
  285. [PIX_FMT_UYYVYY411] = {
  286. .nb_channels = 1,
  287. .color_type = FF_COLOR_YUV,
  288. .pixel_type = FF_PIXEL_PACKED,
  289. .depth = 8,
  290. },
  291. [PIX_FMT_ABGR] = {
  292. .nb_channels = 4, .is_alpha = 1,
  293. .color_type = FF_COLOR_RGB,
  294. .pixel_type = FF_PIXEL_PACKED,
  295. .depth = 8,
  296. },
  297. [PIX_FMT_BGR565BE] = {
  298. .nb_channels = 3,
  299. .color_type = FF_COLOR_RGB,
  300. .pixel_type = FF_PIXEL_PACKED,
  301. .depth = 5,
  302. },
  303. [PIX_FMT_BGR565LE] = {
  304. .nb_channels = 3,
  305. .color_type = FF_COLOR_RGB,
  306. .pixel_type = FF_PIXEL_PACKED,
  307. .depth = 5,
  308. },
  309. [PIX_FMT_BGR555BE] = {
  310. .nb_channels = 3,
  311. .color_type = FF_COLOR_RGB,
  312. .pixel_type = FF_PIXEL_PACKED,
  313. .depth = 5,
  314. },
  315. [PIX_FMT_BGR555LE] = {
  316. .nb_channels = 3,
  317. .color_type = FF_COLOR_RGB,
  318. .pixel_type = FF_PIXEL_PACKED,
  319. .depth = 5,
  320. },
  321. [PIX_FMT_BGR444BE] = {
  322. .nb_channels = 3,
  323. .color_type = FF_COLOR_RGB,
  324. .pixel_type = FF_PIXEL_PACKED,
  325. .depth = 4,
  326. },
  327. [PIX_FMT_BGR444LE] = {
  328. .nb_channels = 3,
  329. .color_type = FF_COLOR_RGB,
  330. .pixel_type = FF_PIXEL_PACKED,
  331. .depth = 4,
  332. },
  333. [PIX_FMT_RGB8] = {
  334. .nb_channels = 1,
  335. .color_type = FF_COLOR_RGB,
  336. .pixel_type = FF_PIXEL_PACKED,
  337. .depth = 8,
  338. },
  339. [PIX_FMT_RGB4] = {
  340. .nb_channels = 1,
  341. .color_type = FF_COLOR_RGB,
  342. .pixel_type = FF_PIXEL_PACKED,
  343. .depth = 4,
  344. },
  345. [PIX_FMT_RGB4_BYTE] = {
  346. .nb_channels = 1,
  347. .color_type = FF_COLOR_RGB,
  348. .pixel_type = FF_PIXEL_PACKED,
  349. .depth = 8,
  350. },
  351. [PIX_FMT_BGR8] = {
  352. .nb_channels = 1,
  353. .color_type = FF_COLOR_RGB,
  354. .pixel_type = FF_PIXEL_PACKED,
  355. .depth = 8,
  356. },
  357. [PIX_FMT_BGR4] = {
  358. .nb_channels = 1,
  359. .color_type = FF_COLOR_RGB,
  360. .pixel_type = FF_PIXEL_PACKED,
  361. .depth = 4,
  362. },
  363. [PIX_FMT_BGR4_BYTE] = {
  364. .nb_channels = 1,
  365. .color_type = FF_COLOR_RGB,
  366. .pixel_type = FF_PIXEL_PACKED,
  367. .depth = 8,
  368. },
  369. [PIX_FMT_NV12] = {
  370. .nb_channels = 2,
  371. .color_type = FF_COLOR_YUV,
  372. .pixel_type = FF_PIXEL_PLANAR,
  373. .depth = 8,
  374. },
  375. [PIX_FMT_NV21] = {
  376. .nb_channels = 2,
  377. .color_type = FF_COLOR_YUV,
  378. .pixel_type = FF_PIXEL_PLANAR,
  379. .depth = 8,
  380. },
  381. [PIX_FMT_BGRA] = {
  382. .nb_channels = 4, .is_alpha = 1,
  383. .color_type = FF_COLOR_RGB,
  384. .pixel_type = FF_PIXEL_PACKED,
  385. .depth = 8,
  386. },
  387. [PIX_FMT_RGBA] = {
  388. .nb_channels = 4, .is_alpha = 1,
  389. .color_type = FF_COLOR_RGB,
  390. .pixel_type = FF_PIXEL_PACKED,
  391. .depth = 8,
  392. },
  393. };
  394. void avcodec_get_chroma_sub_sample(enum PixelFormat pix_fmt, int *h_shift, int *v_shift)
  395. {
  396. *h_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_w;
  397. *v_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_h;
  398. }
  399. #if FF_API_GET_PIX_FMT_NAME
  400. const char *avcodec_get_pix_fmt_name(enum PixelFormat pix_fmt)
  401. {
  402. return av_get_pix_fmt_name(pix_fmt);
  403. }
  404. #endif
  405. int ff_is_hwaccel_pix_fmt(enum PixelFormat pix_fmt)
  406. {
  407. return av_pix_fmt_descriptors[pix_fmt].flags & PIX_FMT_HWACCEL;
  408. }
  409. int avpicture_fill(AVPicture *picture, uint8_t *ptr,
  410. enum PixelFormat pix_fmt, int width, int height)
  411. {
  412. int ret;
  413. if ((ret = av_image_check_size(width, height, 0, NULL)) < 0)
  414. return ret;
  415. if ((ret = av_image_fill_linesizes(picture->linesize, pix_fmt, width)) < 0)
  416. return ret;
  417. return av_image_fill_pointers(picture->data, pix_fmt, height, ptr, picture->linesize);
  418. }
  419. int avpicture_layout(const AVPicture* src, enum PixelFormat pix_fmt, int width, int height,
  420. unsigned char *dest, int dest_size)
  421. {
  422. int i, j, nb_planes = 0, linesizes[4];
  423. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  424. int size = avpicture_get_size(pix_fmt, width, height);
  425. if (size > dest_size || size < 0)
  426. return AVERROR(EINVAL);
  427. for (i = 0; i < desc->nb_components; i++)
  428. nb_planes = FFMAX(desc->comp[i].plane, nb_planes);
  429. nb_planes++;
  430. av_image_fill_linesizes(linesizes, pix_fmt, width);
  431. for (i = 0; i < nb_planes; i++) {
  432. int h, shift = (i == 1 || i == 2) ? desc->log2_chroma_h : 0;
  433. const unsigned char *s = src->data[i];
  434. h = (height + (1 << shift) - 1) >> shift;
  435. for (j = 0; j < h; j++) {
  436. memcpy(dest, s, linesizes[i]);
  437. dest += linesizes[i];
  438. s += src->linesize[i];
  439. }
  440. }
  441. if (desc->flags & PIX_FMT_PAL)
  442. memcpy((unsigned char *)(((size_t)dest + 3) & ~3), src->data[1], 256 * 4);
  443. return size;
  444. }
  445. int avpicture_get_size(enum PixelFormat pix_fmt, int width, int height)
  446. {
  447. AVPicture dummy_pict;
  448. if(av_image_check_size(width, height, 0, NULL))
  449. return -1;
  450. switch (pix_fmt) {
  451. case PIX_FMT_RGB8:
  452. case PIX_FMT_BGR8:
  453. case PIX_FMT_RGB4_BYTE:
  454. case PIX_FMT_BGR4_BYTE:
  455. case PIX_FMT_GRAY8:
  456. // do not include palette for these pseudo-paletted formats
  457. return width * height;
  458. }
  459. return avpicture_fill(&dummy_pict, NULL, pix_fmt, width, height);
  460. }
  461. int avcodec_get_pix_fmt_loss(enum PixelFormat dst_pix_fmt, enum PixelFormat src_pix_fmt,
  462. int has_alpha)
  463. {
  464. const PixFmtInfo *pf, *ps;
  465. const AVPixFmtDescriptor *src_desc = &av_pix_fmt_descriptors[src_pix_fmt];
  466. const AVPixFmtDescriptor *dst_desc = &av_pix_fmt_descriptors[dst_pix_fmt];
  467. int loss;
  468. ps = &pix_fmt_info[src_pix_fmt];
  469. /* compute loss */
  470. loss = 0;
  471. pf = &pix_fmt_info[dst_pix_fmt];
  472. if (pf->depth < ps->depth ||
  473. ((dst_pix_fmt == PIX_FMT_RGB555BE || dst_pix_fmt == PIX_FMT_RGB555LE ||
  474. dst_pix_fmt == PIX_FMT_BGR555BE || dst_pix_fmt == PIX_FMT_BGR555LE) &&
  475. (src_pix_fmt == PIX_FMT_RGB565BE || src_pix_fmt == PIX_FMT_RGB565LE ||
  476. src_pix_fmt == PIX_FMT_BGR565BE || src_pix_fmt == PIX_FMT_BGR565LE)))
  477. loss |= FF_LOSS_DEPTH;
  478. if (dst_desc->log2_chroma_w > src_desc->log2_chroma_w ||
  479. dst_desc->log2_chroma_h > src_desc->log2_chroma_h)
  480. loss |= FF_LOSS_RESOLUTION;
  481. switch(pf->color_type) {
  482. case FF_COLOR_RGB:
  483. if (ps->color_type != FF_COLOR_RGB &&
  484. ps->color_type != FF_COLOR_GRAY)
  485. loss |= FF_LOSS_COLORSPACE;
  486. break;
  487. case FF_COLOR_GRAY:
  488. if (ps->color_type != FF_COLOR_GRAY)
  489. loss |= FF_LOSS_COLORSPACE;
  490. break;
  491. case FF_COLOR_YUV:
  492. if (ps->color_type != FF_COLOR_YUV)
  493. loss |= FF_LOSS_COLORSPACE;
  494. break;
  495. case FF_COLOR_YUV_JPEG:
  496. if (ps->color_type != FF_COLOR_YUV_JPEG &&
  497. ps->color_type != FF_COLOR_YUV &&
  498. ps->color_type != FF_COLOR_GRAY)
  499. loss |= FF_LOSS_COLORSPACE;
  500. break;
  501. default:
  502. /* fail safe test */
  503. if (ps->color_type != pf->color_type)
  504. loss |= FF_LOSS_COLORSPACE;
  505. break;
  506. }
  507. if (pf->color_type == FF_COLOR_GRAY &&
  508. ps->color_type != FF_COLOR_GRAY)
  509. loss |= FF_LOSS_CHROMA;
  510. if (!pf->is_alpha && (ps->is_alpha && has_alpha))
  511. loss |= FF_LOSS_ALPHA;
  512. if (pf->pixel_type == FF_PIXEL_PALETTE &&
  513. (ps->pixel_type != FF_PIXEL_PALETTE && ps->color_type != FF_COLOR_GRAY))
  514. loss |= FF_LOSS_COLORQUANT;
  515. return loss;
  516. }
  517. static int avg_bits_per_pixel(enum PixelFormat pix_fmt)
  518. {
  519. int bits;
  520. const PixFmtInfo *pf;
  521. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  522. pf = &pix_fmt_info[pix_fmt];
  523. switch(pf->pixel_type) {
  524. case FF_PIXEL_PACKED:
  525. switch(pix_fmt) {
  526. case PIX_FMT_YUYV422:
  527. case PIX_FMT_UYVY422:
  528. case PIX_FMT_RGB565BE:
  529. case PIX_FMT_RGB565LE:
  530. case PIX_FMT_RGB555BE:
  531. case PIX_FMT_RGB555LE:
  532. case PIX_FMT_RGB444BE:
  533. case PIX_FMT_RGB444LE:
  534. case PIX_FMT_BGR565BE:
  535. case PIX_FMT_BGR565LE:
  536. case PIX_FMT_BGR555BE:
  537. case PIX_FMT_BGR555LE:
  538. case PIX_FMT_BGR444BE:
  539. case PIX_FMT_BGR444LE:
  540. bits = 16;
  541. break;
  542. case PIX_FMT_UYYVYY411:
  543. bits = 12;
  544. break;
  545. default:
  546. bits = pf->depth * pf->nb_channels;
  547. break;
  548. }
  549. break;
  550. case FF_PIXEL_PLANAR:
  551. if (desc->log2_chroma_w == 0 && desc->log2_chroma_h == 0) {
  552. bits = pf->depth * pf->nb_channels;
  553. } else {
  554. bits = pf->depth + ((2 * pf->depth) >>
  555. (desc->log2_chroma_w + desc->log2_chroma_h));
  556. }
  557. break;
  558. case FF_PIXEL_PALETTE:
  559. bits = 8;
  560. break;
  561. default:
  562. bits = -1;
  563. break;
  564. }
  565. return bits;
  566. }
  567. static enum PixelFormat avcodec_find_best_pix_fmt1(int64_t pix_fmt_mask,
  568. enum PixelFormat src_pix_fmt,
  569. int has_alpha,
  570. int loss_mask)
  571. {
  572. int dist, i, loss, min_dist;
  573. enum PixelFormat dst_pix_fmt;
  574. /* find exact color match with smallest size */
  575. dst_pix_fmt = PIX_FMT_NONE;
  576. min_dist = 0x7fffffff;
  577. for(i = 0;i < PIX_FMT_NB; i++) {
  578. if (pix_fmt_mask & (1ULL << i)) {
  579. loss = avcodec_get_pix_fmt_loss(i, src_pix_fmt, has_alpha) & loss_mask;
  580. if (loss == 0) {
  581. dist = avg_bits_per_pixel(i);
  582. if (dist < min_dist) {
  583. min_dist = dist;
  584. dst_pix_fmt = i;
  585. }
  586. }
  587. }
  588. }
  589. return dst_pix_fmt;
  590. }
  591. enum PixelFormat avcodec_find_best_pix_fmt(int64_t pix_fmt_mask, enum PixelFormat src_pix_fmt,
  592. int has_alpha, int *loss_ptr)
  593. {
  594. enum PixelFormat dst_pix_fmt;
  595. int loss_mask, i;
  596. static const int loss_mask_order[] = {
  597. ~0, /* no loss first */
  598. ~FF_LOSS_ALPHA,
  599. ~FF_LOSS_RESOLUTION,
  600. ~(FF_LOSS_COLORSPACE | FF_LOSS_RESOLUTION),
  601. ~FF_LOSS_COLORQUANT,
  602. ~FF_LOSS_DEPTH,
  603. 0,
  604. };
  605. /* try with successive loss */
  606. i = 0;
  607. for(;;) {
  608. loss_mask = loss_mask_order[i++];
  609. dst_pix_fmt = avcodec_find_best_pix_fmt1(pix_fmt_mask, src_pix_fmt,
  610. has_alpha, loss_mask);
  611. if (dst_pix_fmt >= 0)
  612. goto found;
  613. if (loss_mask == 0)
  614. break;
  615. }
  616. return PIX_FMT_NONE;
  617. found:
  618. if (loss_ptr)
  619. *loss_ptr = avcodec_get_pix_fmt_loss(dst_pix_fmt, src_pix_fmt, has_alpha);
  620. return dst_pix_fmt;
  621. }
  622. void av_picture_copy(AVPicture *dst, const AVPicture *src,
  623. enum PixelFormat pix_fmt, int width, int height)
  624. {
  625. av_image_copy(dst->data, dst->linesize, src->data,
  626. src->linesize, pix_fmt, width, height);
  627. }
  628. /* 2x2 -> 1x1 */
  629. void ff_shrink22(uint8_t *dst, int dst_wrap,
  630. const uint8_t *src, int src_wrap,
  631. int width, int height)
  632. {
  633. int w;
  634. const uint8_t *s1, *s2;
  635. uint8_t *d;
  636. for(;height > 0; height--) {
  637. s1 = src;
  638. s2 = s1 + src_wrap;
  639. d = dst;
  640. for(w = width;w >= 4; w-=4) {
  641. d[0] = (s1[0] + s1[1] + s2[0] + s2[1] + 2) >> 2;
  642. d[1] = (s1[2] + s1[3] + s2[2] + s2[3] + 2) >> 2;
  643. d[2] = (s1[4] + s1[5] + s2[4] + s2[5] + 2) >> 2;
  644. d[3] = (s1[6] + s1[7] + s2[6] + s2[7] + 2) >> 2;
  645. s1 += 8;
  646. s2 += 8;
  647. d += 4;
  648. }
  649. for(;w > 0; w--) {
  650. d[0] = (s1[0] + s1[1] + s2[0] + s2[1] + 2) >> 2;
  651. s1 += 2;
  652. s2 += 2;
  653. d++;
  654. }
  655. src += 2 * src_wrap;
  656. dst += dst_wrap;
  657. }
  658. }
  659. /* 4x4 -> 1x1 */
  660. void ff_shrink44(uint8_t *dst, int dst_wrap,
  661. const uint8_t *src, int src_wrap,
  662. int width, int height)
  663. {
  664. int w;
  665. const uint8_t *s1, *s2, *s3, *s4;
  666. uint8_t *d;
  667. for(;height > 0; height--) {
  668. s1 = src;
  669. s2 = s1 + src_wrap;
  670. s3 = s2 + src_wrap;
  671. s4 = s3 + src_wrap;
  672. d = dst;
  673. for(w = width;w > 0; w--) {
  674. d[0] = (s1[0] + s1[1] + s1[2] + s1[3] +
  675. s2[0] + s2[1] + s2[2] + s2[3] +
  676. s3[0] + s3[1] + s3[2] + s3[3] +
  677. s4[0] + s4[1] + s4[2] + s4[3] + 8) >> 4;
  678. s1 += 4;
  679. s2 += 4;
  680. s3 += 4;
  681. s4 += 4;
  682. d++;
  683. }
  684. src += 4 * src_wrap;
  685. dst += dst_wrap;
  686. }
  687. }
  688. /* 8x8 -> 1x1 */
  689. void ff_shrink88(uint8_t *dst, int dst_wrap,
  690. const uint8_t *src, int src_wrap,
  691. int width, int height)
  692. {
  693. int w, i;
  694. for(;height > 0; height--) {
  695. for(w = width;w > 0; w--) {
  696. int tmp=0;
  697. for(i=0; i<8; i++){
  698. tmp += src[0] + src[1] + src[2] + src[3] + src[4] + src[5] + src[6] + src[7];
  699. src += src_wrap;
  700. }
  701. *(dst++) = (tmp + 32)>>6;
  702. src += 8 - 8*src_wrap;
  703. }
  704. src += 8*src_wrap - 8*width;
  705. dst += dst_wrap - width;
  706. }
  707. }
  708. int avpicture_alloc(AVPicture *picture,
  709. enum PixelFormat pix_fmt, int width, int height)
  710. {
  711. int ret;
  712. if ((ret = av_image_alloc(picture->data, picture->linesize, width, height, pix_fmt, 1)) < 0) {
  713. memset(picture, 0, sizeof(AVPicture));
  714. return ret;
  715. }
  716. return 0;
  717. }
  718. void avpicture_free(AVPicture *picture)
  719. {
  720. av_free(picture->data[0]);
  721. }
  722. /* return true if yuv planar */
  723. static inline int is_yuv_planar(const PixFmtInfo *ps)
  724. {
  725. return (ps->color_type == FF_COLOR_YUV ||
  726. ps->color_type == FF_COLOR_YUV_JPEG) &&
  727. ps->pixel_type == FF_PIXEL_PLANAR;
  728. }
  729. int av_picture_crop(AVPicture *dst, const AVPicture *src,
  730. enum PixelFormat pix_fmt, int top_band, int left_band)
  731. {
  732. int y_shift;
  733. int x_shift;
  734. if (pix_fmt < 0 || pix_fmt >= PIX_FMT_NB || !is_yuv_planar(&pix_fmt_info[pix_fmt]))
  735. return -1;
  736. y_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_h;
  737. x_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_w;
  738. dst->data[0] = src->data[0] + (top_band * src->linesize[0]) + left_band;
  739. dst->data[1] = src->data[1] + ((top_band >> y_shift) * src->linesize[1]) + (left_band >> x_shift);
  740. dst->data[2] = src->data[2] + ((top_band >> y_shift) * src->linesize[2]) + (left_band >> x_shift);
  741. dst->linesize[0] = src->linesize[0];
  742. dst->linesize[1] = src->linesize[1];
  743. dst->linesize[2] = src->linesize[2];
  744. return 0;
  745. }
  746. int av_picture_pad(AVPicture *dst, const AVPicture *src, int height, int width,
  747. enum PixelFormat pix_fmt, int padtop, int padbottom, int padleft, int padright,
  748. int *color)
  749. {
  750. uint8_t *optr;
  751. int y_shift;
  752. int x_shift;
  753. int yheight;
  754. int i, y;
  755. if (pix_fmt < 0 || pix_fmt >= PIX_FMT_NB ||
  756. !is_yuv_planar(&pix_fmt_info[pix_fmt])) return -1;
  757. for (i = 0; i < 3; i++) {
  758. x_shift = i ? av_pix_fmt_descriptors[pix_fmt].log2_chroma_w : 0;
  759. y_shift = i ? av_pix_fmt_descriptors[pix_fmt].log2_chroma_h : 0;
  760. if (padtop || padleft) {
  761. memset(dst->data[i], color[i],
  762. dst->linesize[i] * (padtop >> y_shift) + (padleft >> x_shift));
  763. }
  764. if (padleft || padright) {
  765. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  766. (dst->linesize[i] - (padright >> x_shift));
  767. yheight = (height - 1 - (padtop + padbottom)) >> y_shift;
  768. for (y = 0; y < yheight; y++) {
  769. memset(optr, color[i], (padleft + padright) >> x_shift);
  770. optr += dst->linesize[i];
  771. }
  772. }
  773. if (src) { /* first line */
  774. uint8_t *iptr = src->data[i];
  775. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  776. (padleft >> x_shift);
  777. memcpy(optr, iptr, (width - padleft - padright) >> x_shift);
  778. iptr += src->linesize[i];
  779. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  780. (dst->linesize[i] - (padright >> x_shift));
  781. yheight = (height - 1 - (padtop + padbottom)) >> y_shift;
  782. for (y = 0; y < yheight; y++) {
  783. memset(optr, color[i], (padleft + padright) >> x_shift);
  784. memcpy(optr + ((padleft + padright) >> x_shift), iptr,
  785. (width - padleft - padright) >> x_shift);
  786. iptr += src->linesize[i];
  787. optr += dst->linesize[i];
  788. }
  789. }
  790. if (padbottom || padright) {
  791. optr = dst->data[i] + dst->linesize[i] *
  792. ((height - padbottom) >> y_shift) - (padright >> x_shift);
  793. memset(optr, color[i],dst->linesize[i] *
  794. (padbottom >> y_shift) + (padright >> x_shift));
  795. }
  796. }
  797. return 0;
  798. }
  799. #if FF_API_GET_ALPHA_INFO
  800. /* NOTE: we scan all the pixels to have an exact information */
  801. static int get_alpha_info_pal8(const AVPicture *src, int width, int height)
  802. {
  803. const unsigned char *p;
  804. int src_wrap, ret, x, y;
  805. unsigned int a;
  806. uint32_t *palette = (uint32_t *)src->data[1];
  807. p = src->data[0];
  808. src_wrap = src->linesize[0] - width;
  809. ret = 0;
  810. for(y=0;y<height;y++) {
  811. for(x=0;x<width;x++) {
  812. a = palette[p[0]] >> 24;
  813. if (a == 0x00) {
  814. ret |= FF_ALPHA_TRANSP;
  815. } else if (a != 0xff) {
  816. ret |= FF_ALPHA_SEMI_TRANSP;
  817. }
  818. p++;
  819. }
  820. p += src_wrap;
  821. }
  822. return ret;
  823. }
  824. int img_get_alpha_info(const AVPicture *src,
  825. enum PixelFormat pix_fmt, int width, int height)
  826. {
  827. const PixFmtInfo *pf = &pix_fmt_info[pix_fmt];
  828. int ret;
  829. /* no alpha can be represented in format */
  830. if (!pf->is_alpha)
  831. return 0;
  832. switch(pix_fmt) {
  833. case PIX_FMT_PAL8:
  834. ret = get_alpha_info_pal8(src, width, height);
  835. break;
  836. default:
  837. /* we do not know, so everything is indicated */
  838. ret = FF_ALPHA_TRANSP | FF_ALPHA_SEMI_TRANSP;
  839. break;
  840. }
  841. return ret;
  842. }
  843. #endif
  844. #if !(HAVE_MMX && HAVE_YASM)
  845. /* filter parameters: [-1 4 2 4 -1] // 8 */
  846. static void deinterlace_line_c(uint8_t *dst,
  847. const uint8_t *lum_m4, const uint8_t *lum_m3,
  848. const uint8_t *lum_m2, const uint8_t *lum_m1,
  849. const uint8_t *lum,
  850. int size)
  851. {
  852. uint8_t *cm = ff_cropTbl + MAX_NEG_CROP;
  853. int sum;
  854. for(;size > 0;size--) {
  855. sum = -lum_m4[0];
  856. sum += lum_m3[0] << 2;
  857. sum += lum_m2[0] << 1;
  858. sum += lum_m1[0] << 2;
  859. sum += -lum[0];
  860. dst[0] = cm[(sum + 4) >> 3];
  861. lum_m4++;
  862. lum_m3++;
  863. lum_m2++;
  864. lum_m1++;
  865. lum++;
  866. dst++;
  867. }
  868. }
  869. static void deinterlace_line_inplace_c(uint8_t *lum_m4, uint8_t *lum_m3,
  870. uint8_t *lum_m2, uint8_t *lum_m1,
  871. uint8_t *lum, int size)
  872. {
  873. uint8_t *cm = ff_cropTbl + MAX_NEG_CROP;
  874. int sum;
  875. for(;size > 0;size--) {
  876. sum = -lum_m4[0];
  877. sum += lum_m3[0] << 2;
  878. sum += lum_m2[0] << 1;
  879. lum_m4[0]=lum_m2[0];
  880. sum += lum_m1[0] << 2;
  881. sum += -lum[0];
  882. lum_m2[0] = cm[(sum + 4) >> 3];
  883. lum_m4++;
  884. lum_m3++;
  885. lum_m2++;
  886. lum_m1++;
  887. lum++;
  888. }
  889. }
  890. #endif
  891. /* deinterlacing : 2 temporal taps, 3 spatial taps linear filter. The
  892. top field is copied as is, but the bottom field is deinterlaced
  893. against the top field. */
  894. static void deinterlace_bottom_field(uint8_t *dst, int dst_wrap,
  895. const uint8_t *src1, int src_wrap,
  896. int width, int height)
  897. {
  898. const uint8_t *src_m2, *src_m1, *src_0, *src_p1, *src_p2;
  899. int y;
  900. src_m2 = src1;
  901. src_m1 = src1;
  902. src_0=&src_m1[src_wrap];
  903. src_p1=&src_0[src_wrap];
  904. src_p2=&src_p1[src_wrap];
  905. for(y=0;y<(height-2);y+=2) {
  906. memcpy(dst,src_m1,width);
  907. dst += dst_wrap;
  908. deinterlace_line(dst,src_m2,src_m1,src_0,src_p1,src_p2,width);
  909. src_m2 = src_0;
  910. src_m1 = src_p1;
  911. src_0 = src_p2;
  912. src_p1 += 2*src_wrap;
  913. src_p2 += 2*src_wrap;
  914. dst += dst_wrap;
  915. }
  916. memcpy(dst,src_m1,width);
  917. dst += dst_wrap;
  918. /* do last line */
  919. deinterlace_line(dst,src_m2,src_m1,src_0,src_0,src_0,width);
  920. }
  921. static void deinterlace_bottom_field_inplace(uint8_t *src1, int src_wrap,
  922. int width, int height)
  923. {
  924. uint8_t *src_m1, *src_0, *src_p1, *src_p2;
  925. int y;
  926. uint8_t *buf;
  927. buf = av_malloc(width);
  928. src_m1 = src1;
  929. memcpy(buf,src_m1,width);
  930. src_0=&src_m1[src_wrap];
  931. src_p1=&src_0[src_wrap];
  932. src_p2=&src_p1[src_wrap];
  933. for(y=0;y<(height-2);y+=2) {
  934. deinterlace_line_inplace(buf,src_m1,src_0,src_p1,src_p2,width);
  935. src_m1 = src_p1;
  936. src_0 = src_p2;
  937. src_p1 += 2*src_wrap;
  938. src_p2 += 2*src_wrap;
  939. }
  940. /* do last line */
  941. deinterlace_line_inplace(buf,src_m1,src_0,src_0,src_0,width);
  942. av_free(buf);
  943. }
  944. int avpicture_deinterlace(AVPicture *dst, const AVPicture *src,
  945. enum PixelFormat pix_fmt, int width, int height)
  946. {
  947. int i;
  948. if (pix_fmt != PIX_FMT_YUV420P &&
  949. pix_fmt != PIX_FMT_YUVJ420P &&
  950. pix_fmt != PIX_FMT_YUV422P &&
  951. pix_fmt != PIX_FMT_YUVJ422P &&
  952. pix_fmt != PIX_FMT_YUV444P &&
  953. pix_fmt != PIX_FMT_YUV411P &&
  954. pix_fmt != PIX_FMT_GRAY8)
  955. return -1;
  956. if ((width & 3) != 0 || (height & 3) != 0)
  957. return -1;
  958. for(i=0;i<3;i++) {
  959. if (i == 1) {
  960. switch(pix_fmt) {
  961. case PIX_FMT_YUVJ420P:
  962. case PIX_FMT_YUV420P:
  963. width >>= 1;
  964. height >>= 1;
  965. break;
  966. case PIX_FMT_YUV422P:
  967. case PIX_FMT_YUVJ422P:
  968. width >>= 1;
  969. break;
  970. case PIX_FMT_YUV411P:
  971. width >>= 2;
  972. break;
  973. default:
  974. break;
  975. }
  976. if (pix_fmt == PIX_FMT_GRAY8) {
  977. break;
  978. }
  979. }
  980. if (src == dst) {
  981. deinterlace_bottom_field_inplace(dst->data[i], dst->linesize[i],
  982. width, height);
  983. } else {
  984. deinterlace_bottom_field(dst->data[i],dst->linesize[i],
  985. src->data[i], src->linesize[i],
  986. width, height);
  987. }
  988. }
  989. emms_c();
  990. return 0;
  991. }