You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1677 lines
48KB

  1. /*
  2. * Misc image conversion routines
  3. * Copyright (c) 2001, 2002, 2003 Fabrice Bellard
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. /**
  22. * @file libavcodec/imgconvert.c
  23. * misc image conversion routines
  24. */
  25. /* TODO:
  26. * - write 'ffimg' program to test all the image related stuff
  27. * - move all api to slice based system
  28. * - integrate deinterlacing, postprocessing and scaling in the conversion process
  29. */
  30. #include "avcodec.h"
  31. #include "dsputil.h"
  32. #include "colorspace.h"
  33. #include "libavutil/pixdesc.h"
  34. #if HAVE_MMX
  35. #include "x86/mmx.h"
  36. #include "x86/dsputil_mmx.h"
  37. #endif
  38. #define xglue(x, y) x ## y
  39. #define glue(x, y) xglue(x, y)
  40. #define FF_COLOR_RGB 0 /**< RGB color space */
  41. #define FF_COLOR_GRAY 1 /**< gray color space */
  42. #define FF_COLOR_YUV 2 /**< YUV color space. 16 <= Y <= 235, 16 <= U, V <= 240 */
  43. #define FF_COLOR_YUV_JPEG 3 /**< YUV color space. 0 <= Y <= 255, 0 <= U, V <= 255 */
  44. #define FF_PIXEL_PLANAR 0 /**< each channel has one component in AVPicture */
  45. #define FF_PIXEL_PACKED 1 /**< only one components containing all the channels */
  46. #define FF_PIXEL_PALETTE 2 /**< one components containing indexes for a palette */
  47. typedef struct PixFmtInfo {
  48. const char *name;
  49. uint8_t nb_channels; /**< number of channels (including alpha) */
  50. uint8_t color_type; /**< color type (see FF_COLOR_xxx constants) */
  51. uint8_t pixel_type; /**< pixel storage type (see FF_PIXEL_xxx constants) */
  52. uint8_t is_alpha : 1; /**< true if alpha can be specified */
  53. uint8_t is_hwaccel : 1; /**< true if this is an HW accelerated format */
  54. uint8_t x_chroma_shift; /**< X chroma subsampling factor is 2 ^ shift */
  55. uint8_t y_chroma_shift; /**< Y chroma subsampling factor is 2 ^ shift */
  56. uint8_t depth; /**< bit depth of the color components */
  57. } PixFmtInfo;
  58. /* this table gives more information about formats */
  59. static const PixFmtInfo pix_fmt_info[PIX_FMT_NB] = {
  60. /* YUV formats */
  61. [PIX_FMT_YUV420P] = {
  62. .name = "yuv420p",
  63. .nb_channels = 3,
  64. .color_type = FF_COLOR_YUV,
  65. .pixel_type = FF_PIXEL_PLANAR,
  66. .depth = 8,
  67. .x_chroma_shift = 1, .y_chroma_shift = 1,
  68. },
  69. [PIX_FMT_YUV422P] = {
  70. .name = "yuv422p",
  71. .nb_channels = 3,
  72. .color_type = FF_COLOR_YUV,
  73. .pixel_type = FF_PIXEL_PLANAR,
  74. .depth = 8,
  75. .x_chroma_shift = 1, .y_chroma_shift = 0,
  76. },
  77. [PIX_FMT_YUV444P] = {
  78. .name = "yuv444p",
  79. .nb_channels = 3,
  80. .color_type = FF_COLOR_YUV,
  81. .pixel_type = FF_PIXEL_PLANAR,
  82. .depth = 8,
  83. .x_chroma_shift = 0, .y_chroma_shift = 0,
  84. },
  85. [PIX_FMT_YUYV422] = {
  86. .name = "yuyv422",
  87. .nb_channels = 1,
  88. .color_type = FF_COLOR_YUV,
  89. .pixel_type = FF_PIXEL_PACKED,
  90. .depth = 8,
  91. .x_chroma_shift = 1, .y_chroma_shift = 0,
  92. },
  93. [PIX_FMT_UYVY422] = {
  94. .name = "uyvy422",
  95. .nb_channels = 1,
  96. .color_type = FF_COLOR_YUV,
  97. .pixel_type = FF_PIXEL_PACKED,
  98. .depth = 8,
  99. .x_chroma_shift = 1, .y_chroma_shift = 0,
  100. },
  101. [PIX_FMT_YUV410P] = {
  102. .name = "yuv410p",
  103. .nb_channels = 3,
  104. .color_type = FF_COLOR_YUV,
  105. .pixel_type = FF_PIXEL_PLANAR,
  106. .depth = 8,
  107. .x_chroma_shift = 2, .y_chroma_shift = 2,
  108. },
  109. [PIX_FMT_YUV411P] = {
  110. .name = "yuv411p",
  111. .nb_channels = 3,
  112. .color_type = FF_COLOR_YUV,
  113. .pixel_type = FF_PIXEL_PLANAR,
  114. .depth = 8,
  115. .x_chroma_shift = 2, .y_chroma_shift = 0,
  116. },
  117. [PIX_FMT_YUV440P] = {
  118. .name = "yuv440p",
  119. .nb_channels = 3,
  120. .color_type = FF_COLOR_YUV,
  121. .pixel_type = FF_PIXEL_PLANAR,
  122. .depth = 8,
  123. .x_chroma_shift = 0, .y_chroma_shift = 1,
  124. },
  125. [PIX_FMT_YUV420P16LE] = {
  126. .name = "yuv420p16le",
  127. .nb_channels = 3,
  128. .color_type = FF_COLOR_YUV,
  129. .pixel_type = FF_PIXEL_PLANAR,
  130. .depth = 16,
  131. .x_chroma_shift = 1, .y_chroma_shift = 1,
  132. },
  133. [PIX_FMT_YUV422P16LE] = {
  134. .name = "yuv422p16le",
  135. .nb_channels = 3,
  136. .color_type = FF_COLOR_YUV,
  137. .pixel_type = FF_PIXEL_PLANAR,
  138. .depth = 16,
  139. .x_chroma_shift = 1, .y_chroma_shift = 0,
  140. },
  141. [PIX_FMT_YUV444P16LE] = {
  142. .name = "yuv444p16le",
  143. .nb_channels = 3,
  144. .color_type = FF_COLOR_YUV,
  145. .pixel_type = FF_PIXEL_PLANAR,
  146. .depth = 16,
  147. .x_chroma_shift = 0, .y_chroma_shift = 0,
  148. },
  149. [PIX_FMT_YUV420P16BE] = {
  150. .name = "yuv420p16be",
  151. .nb_channels = 3,
  152. .color_type = FF_COLOR_YUV,
  153. .pixel_type = FF_PIXEL_PLANAR,
  154. .depth = 16,
  155. .x_chroma_shift = 1, .y_chroma_shift = 1,
  156. },
  157. [PIX_FMT_YUV422P16BE] = {
  158. .name = "yuv422p16be",
  159. .nb_channels = 3,
  160. .color_type = FF_COLOR_YUV,
  161. .pixel_type = FF_PIXEL_PLANAR,
  162. .depth = 16,
  163. .x_chroma_shift = 1, .y_chroma_shift = 0,
  164. },
  165. [PIX_FMT_YUV444P16BE] = {
  166. .name = "yuv444p16be",
  167. .nb_channels = 3,
  168. .color_type = FF_COLOR_YUV,
  169. .pixel_type = FF_PIXEL_PLANAR,
  170. .depth = 16,
  171. .x_chroma_shift = 0, .y_chroma_shift = 0,
  172. },
  173. /* YUV formats with alpha plane */
  174. [PIX_FMT_YUVA420P] = {
  175. .name = "yuva420p",
  176. .nb_channels = 4,
  177. .color_type = FF_COLOR_YUV,
  178. .pixel_type = FF_PIXEL_PLANAR,
  179. .depth = 8,
  180. .x_chroma_shift = 1, .y_chroma_shift = 1,
  181. },
  182. /* JPEG YUV */
  183. [PIX_FMT_YUVJ420P] = {
  184. .name = "yuvj420p",
  185. .nb_channels = 3,
  186. .color_type = FF_COLOR_YUV_JPEG,
  187. .pixel_type = FF_PIXEL_PLANAR,
  188. .depth = 8,
  189. .x_chroma_shift = 1, .y_chroma_shift = 1,
  190. },
  191. [PIX_FMT_YUVJ422P] = {
  192. .name = "yuvj422p",
  193. .nb_channels = 3,
  194. .color_type = FF_COLOR_YUV_JPEG,
  195. .pixel_type = FF_PIXEL_PLANAR,
  196. .depth = 8,
  197. .x_chroma_shift = 1, .y_chroma_shift = 0,
  198. },
  199. [PIX_FMT_YUVJ444P] = {
  200. .name = "yuvj444p",
  201. .nb_channels = 3,
  202. .color_type = FF_COLOR_YUV_JPEG,
  203. .pixel_type = FF_PIXEL_PLANAR,
  204. .depth = 8,
  205. .x_chroma_shift = 0, .y_chroma_shift = 0,
  206. },
  207. [PIX_FMT_YUVJ440P] = {
  208. .name = "yuvj440p",
  209. .nb_channels = 3,
  210. .color_type = FF_COLOR_YUV_JPEG,
  211. .pixel_type = FF_PIXEL_PLANAR,
  212. .depth = 8,
  213. .x_chroma_shift = 0, .y_chroma_shift = 1,
  214. },
  215. /* RGB formats */
  216. [PIX_FMT_RGB24] = {
  217. .name = "rgb24",
  218. .nb_channels = 3,
  219. .color_type = FF_COLOR_RGB,
  220. .pixel_type = FF_PIXEL_PACKED,
  221. .depth = 8,
  222. .x_chroma_shift = 0, .y_chroma_shift = 0,
  223. },
  224. [PIX_FMT_BGR24] = {
  225. .name = "bgr24",
  226. .nb_channels = 3,
  227. .color_type = FF_COLOR_RGB,
  228. .pixel_type = FF_PIXEL_PACKED,
  229. .depth = 8,
  230. .x_chroma_shift = 0, .y_chroma_shift = 0,
  231. },
  232. [PIX_FMT_ARGB] = {
  233. .name = "argb",
  234. .nb_channels = 4, .is_alpha = 1,
  235. .color_type = FF_COLOR_RGB,
  236. .pixel_type = FF_PIXEL_PACKED,
  237. .depth = 8,
  238. .x_chroma_shift = 0, .y_chroma_shift = 0,
  239. },
  240. [PIX_FMT_RGB48BE] = {
  241. .name = "rgb48be",
  242. .nb_channels = 3,
  243. .color_type = FF_COLOR_RGB,
  244. .pixel_type = FF_PIXEL_PACKED,
  245. .depth = 16,
  246. .x_chroma_shift = 0, .y_chroma_shift = 0,
  247. },
  248. [PIX_FMT_RGB48LE] = {
  249. .name = "rgb48le",
  250. .nb_channels = 3,
  251. .color_type = FF_COLOR_RGB,
  252. .pixel_type = FF_PIXEL_PACKED,
  253. .depth = 16,
  254. .x_chroma_shift = 0, .y_chroma_shift = 0,
  255. },
  256. [PIX_FMT_RGB565BE] = {
  257. .name = "rgb565be",
  258. .nb_channels = 3,
  259. .color_type = FF_COLOR_RGB,
  260. .pixel_type = FF_PIXEL_PACKED,
  261. .depth = 5,
  262. .x_chroma_shift = 0, .y_chroma_shift = 0,
  263. },
  264. [PIX_FMT_RGB565LE] = {
  265. .name = "rgb565le",
  266. .nb_channels = 3,
  267. .color_type = FF_COLOR_RGB,
  268. .pixel_type = FF_PIXEL_PACKED,
  269. .depth = 5,
  270. .x_chroma_shift = 0, .y_chroma_shift = 0,
  271. },
  272. [PIX_FMT_RGB555BE] = {
  273. .name = "rgb555be",
  274. .nb_channels = 3,
  275. .color_type = FF_COLOR_RGB,
  276. .pixel_type = FF_PIXEL_PACKED,
  277. .depth = 5,
  278. .x_chroma_shift = 0, .y_chroma_shift = 0,
  279. },
  280. [PIX_FMT_RGB555LE] = {
  281. .name = "rgb555le",
  282. .nb_channels = 3,
  283. .color_type = FF_COLOR_RGB,
  284. .pixel_type = FF_PIXEL_PACKED,
  285. .depth = 5,
  286. .x_chroma_shift = 0, .y_chroma_shift = 0,
  287. },
  288. /* gray / mono formats */
  289. [PIX_FMT_GRAY16BE] = {
  290. .name = "gray16be",
  291. .nb_channels = 1,
  292. .color_type = FF_COLOR_GRAY,
  293. .pixel_type = FF_PIXEL_PLANAR,
  294. .depth = 16,
  295. },
  296. [PIX_FMT_GRAY16LE] = {
  297. .name = "gray16le",
  298. .nb_channels = 1,
  299. .color_type = FF_COLOR_GRAY,
  300. .pixel_type = FF_PIXEL_PLANAR,
  301. .depth = 16,
  302. },
  303. [PIX_FMT_GRAY8] = {
  304. .name = "gray",
  305. .nb_channels = 1,
  306. .color_type = FF_COLOR_GRAY,
  307. .pixel_type = FF_PIXEL_PLANAR,
  308. .depth = 8,
  309. },
  310. [PIX_FMT_MONOWHITE] = {
  311. .name = "monow",
  312. .nb_channels = 1,
  313. .color_type = FF_COLOR_GRAY,
  314. .pixel_type = FF_PIXEL_PLANAR,
  315. .depth = 1,
  316. },
  317. [PIX_FMT_MONOBLACK] = {
  318. .name = "monob",
  319. .nb_channels = 1,
  320. .color_type = FF_COLOR_GRAY,
  321. .pixel_type = FF_PIXEL_PLANAR,
  322. .depth = 1,
  323. },
  324. /* paletted formats */
  325. [PIX_FMT_PAL8] = {
  326. .name = "pal8",
  327. .nb_channels = 4, .is_alpha = 1,
  328. .color_type = FF_COLOR_RGB,
  329. .pixel_type = FF_PIXEL_PALETTE,
  330. .depth = 8,
  331. },
  332. [PIX_FMT_XVMC_MPEG2_MC] = {
  333. .name = "xvmcmc",
  334. .is_hwaccel = 1,
  335. },
  336. [PIX_FMT_XVMC_MPEG2_IDCT] = {
  337. .name = "xvmcidct",
  338. .is_hwaccel = 1,
  339. },
  340. [PIX_FMT_VDPAU_MPEG1] = {
  341. .name = "vdpau_mpeg1",
  342. .is_hwaccel = 1,
  343. .x_chroma_shift = 1, .y_chroma_shift = 1,
  344. },
  345. [PIX_FMT_VDPAU_MPEG2] = {
  346. .name = "vdpau_mpeg2",
  347. .is_hwaccel = 1,
  348. .x_chroma_shift = 1, .y_chroma_shift = 1,
  349. },
  350. [PIX_FMT_VDPAU_H264] = {
  351. .name = "vdpau_h264",
  352. .is_hwaccel = 1,
  353. .x_chroma_shift = 1, .y_chroma_shift = 1,
  354. },
  355. [PIX_FMT_VDPAU_WMV3] = {
  356. .name = "vdpau_wmv3",
  357. .is_hwaccel = 1,
  358. .x_chroma_shift = 1, .y_chroma_shift = 1,
  359. },
  360. [PIX_FMT_VDPAU_VC1] = {
  361. .name = "vdpau_vc1",
  362. .is_hwaccel = 1,
  363. .x_chroma_shift = 1, .y_chroma_shift = 1,
  364. },
  365. [PIX_FMT_VDPAU_MPEG4] = {
  366. .name = "vdpau_mpeg4",
  367. .is_hwaccel = 1,
  368. .x_chroma_shift = 1, .y_chroma_shift = 1,
  369. },
  370. [PIX_FMT_UYYVYY411] = {
  371. .name = "uyyvyy411",
  372. .nb_channels = 1,
  373. .color_type = FF_COLOR_YUV,
  374. .pixel_type = FF_PIXEL_PACKED,
  375. .depth = 8,
  376. .x_chroma_shift = 2, .y_chroma_shift = 0,
  377. },
  378. [PIX_FMT_ABGR] = {
  379. .name = "abgr",
  380. .nb_channels = 4, .is_alpha = 1,
  381. .color_type = FF_COLOR_RGB,
  382. .pixel_type = FF_PIXEL_PACKED,
  383. .depth = 8,
  384. .x_chroma_shift = 0, .y_chroma_shift = 0,
  385. },
  386. [PIX_FMT_BGR565BE] = {
  387. .name = "bgr565be",
  388. .nb_channels = 3,
  389. .color_type = FF_COLOR_RGB,
  390. .pixel_type = FF_PIXEL_PACKED,
  391. .depth = 5,
  392. .x_chroma_shift = 0, .y_chroma_shift = 0,
  393. },
  394. [PIX_FMT_BGR565LE] = {
  395. .name = "bgr565le",
  396. .nb_channels = 3,
  397. .color_type = FF_COLOR_RGB,
  398. .pixel_type = FF_PIXEL_PACKED,
  399. .depth = 5,
  400. .x_chroma_shift = 0, .y_chroma_shift = 0,
  401. },
  402. [PIX_FMT_BGR555BE] = {
  403. .name = "bgr555be",
  404. .nb_channels = 3,
  405. .color_type = FF_COLOR_RGB,
  406. .pixel_type = FF_PIXEL_PACKED,
  407. .depth = 5,
  408. .x_chroma_shift = 0, .y_chroma_shift = 0,
  409. },
  410. [PIX_FMT_BGR555LE] = {
  411. .name = "bgr555le",
  412. .nb_channels = 3,
  413. .color_type = FF_COLOR_RGB,
  414. .pixel_type = FF_PIXEL_PACKED,
  415. .depth = 5,
  416. .x_chroma_shift = 0, .y_chroma_shift = 0,
  417. },
  418. [PIX_FMT_RGB8] = {
  419. .name = "rgb8",
  420. .nb_channels = 1,
  421. .color_type = FF_COLOR_RGB,
  422. .pixel_type = FF_PIXEL_PACKED,
  423. .depth = 8,
  424. .x_chroma_shift = 0, .y_chroma_shift = 0,
  425. },
  426. [PIX_FMT_RGB4] = {
  427. .name = "rgb4",
  428. .nb_channels = 1,
  429. .color_type = FF_COLOR_RGB,
  430. .pixel_type = FF_PIXEL_PACKED,
  431. .depth = 4,
  432. .x_chroma_shift = 0, .y_chroma_shift = 0,
  433. },
  434. [PIX_FMT_RGB4_BYTE] = {
  435. .name = "rgb4_byte",
  436. .nb_channels = 1,
  437. .color_type = FF_COLOR_RGB,
  438. .pixel_type = FF_PIXEL_PACKED,
  439. .depth = 8,
  440. .x_chroma_shift = 0, .y_chroma_shift = 0,
  441. },
  442. [PIX_FMT_BGR8] = {
  443. .name = "bgr8",
  444. .nb_channels = 1,
  445. .color_type = FF_COLOR_RGB,
  446. .pixel_type = FF_PIXEL_PACKED,
  447. .depth = 8,
  448. .x_chroma_shift = 0, .y_chroma_shift = 0,
  449. },
  450. [PIX_FMT_BGR4] = {
  451. .name = "bgr4",
  452. .nb_channels = 1,
  453. .color_type = FF_COLOR_RGB,
  454. .pixel_type = FF_PIXEL_PACKED,
  455. .depth = 4,
  456. .x_chroma_shift = 0, .y_chroma_shift = 0,
  457. },
  458. [PIX_FMT_BGR4_BYTE] = {
  459. .name = "bgr4_byte",
  460. .nb_channels = 1,
  461. .color_type = FF_COLOR_RGB,
  462. .pixel_type = FF_PIXEL_PACKED,
  463. .depth = 8,
  464. .x_chroma_shift = 0, .y_chroma_shift = 0,
  465. },
  466. [PIX_FMT_NV12] = {
  467. .name = "nv12",
  468. .nb_channels = 2,
  469. .color_type = FF_COLOR_YUV,
  470. .pixel_type = FF_PIXEL_PLANAR,
  471. .depth = 8,
  472. .x_chroma_shift = 1, .y_chroma_shift = 1,
  473. },
  474. [PIX_FMT_NV21] = {
  475. .name = "nv21",
  476. .nb_channels = 2,
  477. .color_type = FF_COLOR_YUV,
  478. .pixel_type = FF_PIXEL_PLANAR,
  479. .depth = 8,
  480. .x_chroma_shift = 1, .y_chroma_shift = 1,
  481. },
  482. [PIX_FMT_BGRA] = {
  483. .name = "bgra",
  484. .nb_channels = 4, .is_alpha = 1,
  485. .color_type = FF_COLOR_RGB,
  486. .pixel_type = FF_PIXEL_PACKED,
  487. .depth = 8,
  488. .x_chroma_shift = 0, .y_chroma_shift = 0,
  489. },
  490. [PIX_FMT_RGBA] = {
  491. .name = "rgba",
  492. .nb_channels = 4, .is_alpha = 1,
  493. .color_type = FF_COLOR_RGB,
  494. .pixel_type = FF_PIXEL_PACKED,
  495. .depth = 8,
  496. .x_chroma_shift = 0, .y_chroma_shift = 0,
  497. },
  498. /* VA API formats */
  499. [PIX_FMT_VAAPI_MOCO] = {
  500. .name = "vaapi_moco",
  501. .is_hwaccel = 1,
  502. .x_chroma_shift = 1, .y_chroma_shift = 1,
  503. },
  504. [PIX_FMT_VAAPI_IDCT] = {
  505. .name = "vaapi_idct",
  506. .is_hwaccel = 1,
  507. .x_chroma_shift = 1, .y_chroma_shift = 1,
  508. },
  509. [PIX_FMT_VAAPI_VLD] = {
  510. .name = "vaapi_vld",
  511. .is_hwaccel = 1,
  512. .x_chroma_shift = 1, .y_chroma_shift = 1,
  513. },
  514. };
  515. void avcodec_get_chroma_sub_sample(enum PixelFormat pix_fmt, int *h_shift, int *v_shift)
  516. {
  517. *h_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_w;
  518. *v_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_h;
  519. }
  520. const char *avcodec_get_pix_fmt_name(enum PixelFormat pix_fmt)
  521. {
  522. if (pix_fmt < 0 || pix_fmt >= PIX_FMT_NB)
  523. return NULL;
  524. else
  525. return pix_fmt_info[pix_fmt].name;
  526. }
  527. static enum PixelFormat avcodec_get_pix_fmt_internal(const char *name)
  528. {
  529. int i;
  530. for (i=0; i < PIX_FMT_NB; i++)
  531. if (pix_fmt_info[i].name && !strcmp(pix_fmt_info[i].name, name))
  532. return i;
  533. return PIX_FMT_NONE;
  534. }
  535. #if HAVE_BIGENDIAN
  536. # define X_NE(be, le) be
  537. #else
  538. # define X_NE(be, le) le
  539. #endif
  540. enum PixelFormat avcodec_get_pix_fmt(const char *name)
  541. {
  542. enum PixelFormat pix_fmt;
  543. if (!strcmp(name, "rgb32"))
  544. name = X_NE("argb", "bgra");
  545. else if (!strcmp(name, "bgr32"))
  546. name = X_NE("abgr", "rgba");
  547. pix_fmt = avcodec_get_pix_fmt_internal(name);
  548. if (pix_fmt == PIX_FMT_NONE) {
  549. char name2[32];
  550. snprintf(name2, sizeof(name2), "%s%s", name, X_NE("be", "le"));
  551. pix_fmt = avcodec_get_pix_fmt_internal(name2);
  552. }
  553. return pix_fmt;
  554. }
  555. void avcodec_pix_fmt_string (char *buf, int buf_size, enum PixelFormat pix_fmt)
  556. {
  557. /* print header */
  558. if (pix_fmt < 0)
  559. snprintf (buf, buf_size,
  560. "name " " nb_channels" " depth" " is_alpha"
  561. );
  562. else{
  563. PixFmtInfo info= pix_fmt_info[pix_fmt];
  564. char is_alpha_char= info.is_alpha ? 'y' : 'n';
  565. snprintf (buf, buf_size,
  566. "%-11s %5d %9d %6c",
  567. info.name,
  568. info.nb_channels,
  569. info.depth,
  570. is_alpha_char
  571. );
  572. }
  573. }
  574. int ff_is_hwaccel_pix_fmt(enum PixelFormat pix_fmt)
  575. {
  576. return av_pix_fmt_descriptors[pix_fmt].flags & PIX_FMT_HWACCEL;
  577. }
  578. int ff_set_systematic_pal(uint32_t pal[256], enum PixelFormat pix_fmt){
  579. int i;
  580. for(i=0; i<256; i++){
  581. int r,g,b;
  582. switch(pix_fmt) {
  583. case PIX_FMT_RGB8:
  584. r= (i>>5 )*36;
  585. g= ((i>>2)&7)*36;
  586. b= (i&3 )*85;
  587. break;
  588. case PIX_FMT_BGR8:
  589. b= (i>>6 )*85;
  590. g= ((i>>3)&7)*36;
  591. r= (i&7 )*36;
  592. break;
  593. case PIX_FMT_RGB4_BYTE:
  594. r= (i>>3 )*255;
  595. g= ((i>>1)&3)*85;
  596. b= (i&1 )*255;
  597. break;
  598. case PIX_FMT_BGR4_BYTE:
  599. b= (i>>3 )*255;
  600. g= ((i>>1)&3)*85;
  601. r= (i&1 )*255;
  602. break;
  603. case PIX_FMT_GRAY8:
  604. r=b=g= i;
  605. break;
  606. default:
  607. return -1;
  608. }
  609. pal[i] = b + (g<<8) + (r<<16);
  610. }
  611. return 0;
  612. }
  613. int ff_fill_linesize(AVPicture *picture, enum PixelFormat pix_fmt, int width)
  614. {
  615. int w2;
  616. const PixFmtInfo *pinfo;
  617. memset(picture->linesize, 0, sizeof(picture->linesize));
  618. pinfo = &pix_fmt_info[pix_fmt];
  619. switch(pix_fmt) {
  620. case PIX_FMT_YUV420P:
  621. case PIX_FMT_YUV422P:
  622. case PIX_FMT_YUV444P:
  623. case PIX_FMT_YUV410P:
  624. case PIX_FMT_YUV411P:
  625. case PIX_FMT_YUV440P:
  626. case PIX_FMT_YUVJ420P:
  627. case PIX_FMT_YUVJ422P:
  628. case PIX_FMT_YUVJ444P:
  629. case PIX_FMT_YUVJ440P:
  630. w2 = (width + (1 << pinfo->x_chroma_shift) - 1) >> pinfo->x_chroma_shift;
  631. picture->linesize[0] = width;
  632. picture->linesize[1] = w2;
  633. picture->linesize[2] = w2;
  634. break;
  635. case PIX_FMT_YUV420P16LE:
  636. case PIX_FMT_YUV422P16LE:
  637. case PIX_FMT_YUV444P16LE:
  638. case PIX_FMT_YUV420P16BE:
  639. case PIX_FMT_YUV422P16BE:
  640. case PIX_FMT_YUV444P16BE:
  641. w2 = (width + (1 << pinfo->x_chroma_shift) - 1) >> pinfo->x_chroma_shift;
  642. picture->linesize[0] = 2*width;
  643. picture->linesize[1] = 2*w2;
  644. picture->linesize[2] = 2*w2;
  645. break;
  646. case PIX_FMT_YUVA420P:
  647. w2 = (width + (1 << pinfo->x_chroma_shift) - 1) >> pinfo->x_chroma_shift;
  648. picture->linesize[0] = width;
  649. picture->linesize[1] = w2;
  650. picture->linesize[2] = w2;
  651. picture->linesize[3] = width;
  652. break;
  653. case PIX_FMT_NV12:
  654. case PIX_FMT_NV21:
  655. w2 = (width + (1 << pinfo->x_chroma_shift) - 1) >> pinfo->x_chroma_shift;
  656. picture->linesize[0] = width;
  657. picture->linesize[1] = 2 * w2;
  658. break;
  659. case PIX_FMT_RGB24:
  660. case PIX_FMT_BGR24:
  661. picture->linesize[0] = width * 3;
  662. break;
  663. case PIX_FMT_ARGB:
  664. case PIX_FMT_ABGR:
  665. case PIX_FMT_RGBA:
  666. case PIX_FMT_BGRA:
  667. picture->linesize[0] = width * 4;
  668. break;
  669. case PIX_FMT_RGB48BE:
  670. case PIX_FMT_RGB48LE:
  671. picture->linesize[0] = width * 6;
  672. break;
  673. case PIX_FMT_GRAY16BE:
  674. case PIX_FMT_GRAY16LE:
  675. case PIX_FMT_BGR555BE:
  676. case PIX_FMT_BGR555LE:
  677. case PIX_FMT_BGR565BE:
  678. case PIX_FMT_BGR565LE:
  679. case PIX_FMT_RGB555BE:
  680. case PIX_FMT_RGB555LE:
  681. case PIX_FMT_RGB565BE:
  682. case PIX_FMT_RGB565LE:
  683. case PIX_FMT_YUYV422:
  684. picture->linesize[0] = width * 2;
  685. break;
  686. case PIX_FMT_UYVY422:
  687. picture->linesize[0] = width * 2;
  688. break;
  689. case PIX_FMT_UYYVYY411:
  690. picture->linesize[0] = width + width/2;
  691. break;
  692. case PIX_FMT_RGB4:
  693. case PIX_FMT_BGR4:
  694. picture->linesize[0] = width / 2;
  695. break;
  696. case PIX_FMT_MONOWHITE:
  697. case PIX_FMT_MONOBLACK:
  698. picture->linesize[0] = (width + 7) >> 3;
  699. break;
  700. case PIX_FMT_PAL8:
  701. case PIX_FMT_RGB8:
  702. case PIX_FMT_BGR8:
  703. case PIX_FMT_RGB4_BYTE:
  704. case PIX_FMT_BGR4_BYTE:
  705. case PIX_FMT_GRAY8:
  706. picture->linesize[0] = width;
  707. break;
  708. default:
  709. return -1;
  710. }
  711. return 0;
  712. }
  713. int ff_fill_pointer(AVPicture *picture, uint8_t *ptr, enum PixelFormat pix_fmt,
  714. int height)
  715. {
  716. int size, h2, size2;
  717. const PixFmtInfo *pinfo;
  718. pinfo = &pix_fmt_info[pix_fmt];
  719. size = picture->linesize[0] * height;
  720. switch(pix_fmt) {
  721. case PIX_FMT_YUV420P:
  722. case PIX_FMT_YUV422P:
  723. case PIX_FMT_YUV444P:
  724. case PIX_FMT_YUV410P:
  725. case PIX_FMT_YUV411P:
  726. case PIX_FMT_YUV440P:
  727. case PIX_FMT_YUVJ420P:
  728. case PIX_FMT_YUVJ422P:
  729. case PIX_FMT_YUVJ444P:
  730. case PIX_FMT_YUVJ440P:
  731. case PIX_FMT_YUV420P16LE:
  732. case PIX_FMT_YUV422P16LE:
  733. case PIX_FMT_YUV444P16LE:
  734. case PIX_FMT_YUV420P16BE:
  735. case PIX_FMT_YUV422P16BE:
  736. case PIX_FMT_YUV444P16BE:
  737. h2 = (height + (1 << pinfo->y_chroma_shift) - 1) >> pinfo->y_chroma_shift;
  738. size2 = picture->linesize[1] * h2;
  739. picture->data[0] = ptr;
  740. picture->data[1] = picture->data[0] + size;
  741. picture->data[2] = picture->data[1] + size2;
  742. picture->data[3] = NULL;
  743. return size + 2 * size2;
  744. case PIX_FMT_YUVA420P:
  745. h2 = (height + (1 << pinfo->y_chroma_shift) - 1) >> pinfo->y_chroma_shift;
  746. size2 = picture->linesize[1] * h2;
  747. picture->data[0] = ptr;
  748. picture->data[1] = picture->data[0] + size;
  749. picture->data[2] = picture->data[1] + size2;
  750. picture->data[3] = picture->data[1] + size2 + size2;
  751. return 2 * size + 2 * size2;
  752. case PIX_FMT_NV12:
  753. case PIX_FMT_NV21:
  754. h2 = (height + (1 << pinfo->y_chroma_shift) - 1) >> pinfo->y_chroma_shift;
  755. size2 = picture->linesize[1] * h2;
  756. picture->data[0] = ptr;
  757. picture->data[1] = picture->data[0] + size;
  758. picture->data[2] = NULL;
  759. picture->data[3] = NULL;
  760. return size + size2;
  761. case PIX_FMT_RGB24:
  762. case PIX_FMT_BGR24:
  763. case PIX_FMT_ARGB:
  764. case PIX_FMT_ABGR:
  765. case PIX_FMT_RGBA:
  766. case PIX_FMT_BGRA:
  767. case PIX_FMT_RGB48BE:
  768. case PIX_FMT_RGB48LE:
  769. case PIX_FMT_GRAY16BE:
  770. case PIX_FMT_GRAY16LE:
  771. case PIX_FMT_BGR555BE:
  772. case PIX_FMT_BGR555LE:
  773. case PIX_FMT_BGR565BE:
  774. case PIX_FMT_BGR565LE:
  775. case PIX_FMT_RGB555BE:
  776. case PIX_FMT_RGB555LE:
  777. case PIX_FMT_RGB565BE:
  778. case PIX_FMT_RGB565LE:
  779. case PIX_FMT_YUYV422:
  780. case PIX_FMT_UYVY422:
  781. case PIX_FMT_UYYVYY411:
  782. case PIX_FMT_RGB4:
  783. case PIX_FMT_BGR4:
  784. case PIX_FMT_MONOWHITE:
  785. case PIX_FMT_MONOBLACK:
  786. picture->data[0] = ptr;
  787. picture->data[1] = NULL;
  788. picture->data[2] = NULL;
  789. picture->data[3] = NULL;
  790. return size;
  791. case PIX_FMT_PAL8:
  792. case PIX_FMT_RGB8:
  793. case PIX_FMT_BGR8:
  794. case PIX_FMT_RGB4_BYTE:
  795. case PIX_FMT_BGR4_BYTE:
  796. case PIX_FMT_GRAY8:
  797. size2 = (size + 3) & ~3;
  798. picture->data[0] = ptr;
  799. picture->data[1] = ptr + size2; /* palette is stored here as 256 32 bit words */
  800. picture->data[2] = NULL;
  801. picture->data[3] = NULL;
  802. return size2 + 256 * 4;
  803. default:
  804. picture->data[0] = NULL;
  805. picture->data[1] = NULL;
  806. picture->data[2] = NULL;
  807. picture->data[3] = NULL;
  808. return -1;
  809. }
  810. }
  811. int avpicture_fill(AVPicture *picture, uint8_t *ptr,
  812. enum PixelFormat pix_fmt, int width, int height)
  813. {
  814. if(avcodec_check_dimensions(NULL, width, height))
  815. return -1;
  816. if (ff_fill_linesize(picture, pix_fmt, width))
  817. return -1;
  818. return ff_fill_pointer(picture, ptr, pix_fmt, height);
  819. }
  820. int avpicture_layout(const AVPicture* src, enum PixelFormat pix_fmt, int width, int height,
  821. unsigned char *dest, int dest_size)
  822. {
  823. const PixFmtInfo* pf = &pix_fmt_info[pix_fmt];
  824. int i, j, w, ow, h, oh, data_planes;
  825. const unsigned char* s;
  826. int size = avpicture_get_size(pix_fmt, width, height);
  827. if (size > dest_size || size < 0)
  828. return -1;
  829. if (pf->pixel_type == FF_PIXEL_PACKED || pf->pixel_type == FF_PIXEL_PALETTE) {
  830. if (pix_fmt == PIX_FMT_YUYV422 ||
  831. pix_fmt == PIX_FMT_UYVY422 ||
  832. pix_fmt == PIX_FMT_BGR565BE ||
  833. pix_fmt == PIX_FMT_BGR565LE ||
  834. pix_fmt == PIX_FMT_BGR555BE ||
  835. pix_fmt == PIX_FMT_BGR555LE ||
  836. pix_fmt == PIX_FMT_RGB565BE ||
  837. pix_fmt == PIX_FMT_RGB565LE ||
  838. pix_fmt == PIX_FMT_RGB555BE ||
  839. pix_fmt == PIX_FMT_RGB555LE)
  840. w = width * 2;
  841. else if (pix_fmt == PIX_FMT_UYYVYY411)
  842. w = width + width/2;
  843. else if (pix_fmt == PIX_FMT_PAL8)
  844. w = width;
  845. else
  846. w = width * (pf->depth * pf->nb_channels / 8);
  847. data_planes = 1;
  848. h = height;
  849. } else {
  850. data_planes = pf->nb_channels;
  851. w = (width*pf->depth + 7)/8;
  852. h = height;
  853. }
  854. ow = w;
  855. oh = h;
  856. for (i=0; i<data_planes; i++) {
  857. if (i == 1) {
  858. w = ((width >> pf->x_chroma_shift) * pf->depth + 7) / 8;
  859. h = height >> pf->y_chroma_shift;
  860. if (pix_fmt == PIX_FMT_NV12 || pix_fmt == PIX_FMT_NV21)
  861. w <<= 1;
  862. } else if (i == 3) {
  863. w = ow;
  864. h = oh;
  865. }
  866. s = src->data[i];
  867. for(j=0; j<h; j++) {
  868. memcpy(dest, s, w);
  869. dest += w;
  870. s += src->linesize[i];
  871. }
  872. }
  873. if (pf->pixel_type == FF_PIXEL_PALETTE)
  874. memcpy((unsigned char *)(((size_t)dest + 3) & ~3), src->data[1], 256 * 4);
  875. return size;
  876. }
  877. int avpicture_get_size(enum PixelFormat pix_fmt, int width, int height)
  878. {
  879. AVPicture dummy_pict;
  880. if(avcodec_check_dimensions(NULL, width, height))
  881. return -1;
  882. switch (pix_fmt) {
  883. case PIX_FMT_RGB8:
  884. case PIX_FMT_BGR8:
  885. case PIX_FMT_RGB4_BYTE:
  886. case PIX_FMT_BGR4_BYTE:
  887. case PIX_FMT_GRAY8:
  888. // do not include palette for these pseudo-paletted formats
  889. return width * height;
  890. }
  891. return avpicture_fill(&dummy_pict, NULL, pix_fmt, width, height);
  892. }
  893. int avcodec_get_pix_fmt_loss(enum PixelFormat dst_pix_fmt, enum PixelFormat src_pix_fmt,
  894. int has_alpha)
  895. {
  896. const PixFmtInfo *pf, *ps;
  897. int loss;
  898. ps = &pix_fmt_info[src_pix_fmt];
  899. /* compute loss */
  900. loss = 0;
  901. pf = &pix_fmt_info[dst_pix_fmt];
  902. if (pf->depth < ps->depth ||
  903. ((dst_pix_fmt == PIX_FMT_RGB555BE || dst_pix_fmt == PIX_FMT_RGB555LE) &&
  904. (src_pix_fmt == PIX_FMT_RGB565BE || src_pix_fmt == PIX_FMT_RGB565LE)))
  905. loss |= FF_LOSS_DEPTH;
  906. if (pf->x_chroma_shift > ps->x_chroma_shift ||
  907. pf->y_chroma_shift > ps->y_chroma_shift)
  908. loss |= FF_LOSS_RESOLUTION;
  909. switch(pf->color_type) {
  910. case FF_COLOR_RGB:
  911. if (ps->color_type != FF_COLOR_RGB &&
  912. ps->color_type != FF_COLOR_GRAY)
  913. loss |= FF_LOSS_COLORSPACE;
  914. break;
  915. case FF_COLOR_GRAY:
  916. if (ps->color_type != FF_COLOR_GRAY)
  917. loss |= FF_LOSS_COLORSPACE;
  918. break;
  919. case FF_COLOR_YUV:
  920. if (ps->color_type != FF_COLOR_YUV)
  921. loss |= FF_LOSS_COLORSPACE;
  922. break;
  923. case FF_COLOR_YUV_JPEG:
  924. if (ps->color_type != FF_COLOR_YUV_JPEG &&
  925. ps->color_type != FF_COLOR_YUV &&
  926. ps->color_type != FF_COLOR_GRAY)
  927. loss |= FF_LOSS_COLORSPACE;
  928. break;
  929. default:
  930. /* fail safe test */
  931. if (ps->color_type != pf->color_type)
  932. loss |= FF_LOSS_COLORSPACE;
  933. break;
  934. }
  935. if (pf->color_type == FF_COLOR_GRAY &&
  936. ps->color_type != FF_COLOR_GRAY)
  937. loss |= FF_LOSS_CHROMA;
  938. if (!pf->is_alpha && (ps->is_alpha && has_alpha))
  939. loss |= FF_LOSS_ALPHA;
  940. if (pf->pixel_type == FF_PIXEL_PALETTE &&
  941. (ps->pixel_type != FF_PIXEL_PALETTE && ps->color_type != FF_COLOR_GRAY))
  942. loss |= FF_LOSS_COLORQUANT;
  943. return loss;
  944. }
  945. static int avg_bits_per_pixel(enum PixelFormat pix_fmt)
  946. {
  947. int bits;
  948. const PixFmtInfo *pf;
  949. pf = &pix_fmt_info[pix_fmt];
  950. switch(pf->pixel_type) {
  951. case FF_PIXEL_PACKED:
  952. switch(pix_fmt) {
  953. case PIX_FMT_YUYV422:
  954. case PIX_FMT_UYVY422:
  955. case PIX_FMT_RGB565BE:
  956. case PIX_FMT_RGB565LE:
  957. case PIX_FMT_RGB555BE:
  958. case PIX_FMT_RGB555LE:
  959. case PIX_FMT_BGR565BE:
  960. case PIX_FMT_BGR565LE:
  961. case PIX_FMT_BGR555BE:
  962. case PIX_FMT_BGR555LE:
  963. bits = 16;
  964. break;
  965. case PIX_FMT_UYYVYY411:
  966. bits = 12;
  967. break;
  968. default:
  969. bits = pf->depth * pf->nb_channels;
  970. break;
  971. }
  972. break;
  973. case FF_PIXEL_PLANAR:
  974. if (pf->x_chroma_shift == 0 && pf->y_chroma_shift == 0) {
  975. bits = pf->depth * pf->nb_channels;
  976. } else {
  977. bits = pf->depth + ((2 * pf->depth) >>
  978. (pf->x_chroma_shift + pf->y_chroma_shift));
  979. }
  980. break;
  981. case FF_PIXEL_PALETTE:
  982. bits = 8;
  983. break;
  984. default:
  985. bits = -1;
  986. break;
  987. }
  988. return bits;
  989. }
  990. static enum PixelFormat avcodec_find_best_pix_fmt1(int64_t pix_fmt_mask,
  991. enum PixelFormat src_pix_fmt,
  992. int has_alpha,
  993. int loss_mask)
  994. {
  995. int dist, i, loss, min_dist;
  996. enum PixelFormat dst_pix_fmt;
  997. /* find exact color match with smallest size */
  998. dst_pix_fmt = -1;
  999. min_dist = 0x7fffffff;
  1000. for(i = 0;i < PIX_FMT_NB; i++) {
  1001. if (pix_fmt_mask & (1ULL << i)) {
  1002. loss = avcodec_get_pix_fmt_loss(i, src_pix_fmt, has_alpha) & loss_mask;
  1003. if (loss == 0) {
  1004. dist = avg_bits_per_pixel(i);
  1005. if (dist < min_dist) {
  1006. min_dist = dist;
  1007. dst_pix_fmt = i;
  1008. }
  1009. }
  1010. }
  1011. }
  1012. return dst_pix_fmt;
  1013. }
  1014. enum PixelFormat avcodec_find_best_pix_fmt(int64_t pix_fmt_mask, enum PixelFormat src_pix_fmt,
  1015. int has_alpha, int *loss_ptr)
  1016. {
  1017. enum PixelFormat dst_pix_fmt;
  1018. int loss_mask, i;
  1019. static const int loss_mask_order[] = {
  1020. ~0, /* no loss first */
  1021. ~FF_LOSS_ALPHA,
  1022. ~FF_LOSS_RESOLUTION,
  1023. ~(FF_LOSS_COLORSPACE | FF_LOSS_RESOLUTION),
  1024. ~FF_LOSS_COLORQUANT,
  1025. ~FF_LOSS_DEPTH,
  1026. 0,
  1027. };
  1028. /* try with successive loss */
  1029. i = 0;
  1030. for(;;) {
  1031. loss_mask = loss_mask_order[i++];
  1032. dst_pix_fmt = avcodec_find_best_pix_fmt1(pix_fmt_mask, src_pix_fmt,
  1033. has_alpha, loss_mask);
  1034. if (dst_pix_fmt >= 0)
  1035. goto found;
  1036. if (loss_mask == 0)
  1037. break;
  1038. }
  1039. return -1;
  1040. found:
  1041. if (loss_ptr)
  1042. *loss_ptr = avcodec_get_pix_fmt_loss(dst_pix_fmt, src_pix_fmt, has_alpha);
  1043. return dst_pix_fmt;
  1044. }
  1045. void ff_img_copy_plane(uint8_t *dst, int dst_wrap,
  1046. const uint8_t *src, int src_wrap,
  1047. int width, int height)
  1048. {
  1049. if((!dst) || (!src))
  1050. return;
  1051. for(;height > 0; height--) {
  1052. memcpy(dst, src, width);
  1053. dst += dst_wrap;
  1054. src += src_wrap;
  1055. }
  1056. }
  1057. int ff_get_plane_bytewidth(enum PixelFormat pix_fmt, int width, int plane)
  1058. {
  1059. int bits;
  1060. const PixFmtInfo *pf = &pix_fmt_info[pix_fmt];
  1061. pf = &pix_fmt_info[pix_fmt];
  1062. switch(pf->pixel_type) {
  1063. case FF_PIXEL_PACKED:
  1064. switch(pix_fmt) {
  1065. case PIX_FMT_YUYV422:
  1066. case PIX_FMT_UYVY422:
  1067. case PIX_FMT_RGB565BE:
  1068. case PIX_FMT_RGB565LE:
  1069. case PIX_FMT_RGB555BE:
  1070. case PIX_FMT_RGB555LE:
  1071. case PIX_FMT_BGR565BE:
  1072. case PIX_FMT_BGR565LE:
  1073. case PIX_FMT_BGR555BE:
  1074. case PIX_FMT_BGR555LE:
  1075. bits = 16;
  1076. break;
  1077. case PIX_FMT_UYYVYY411:
  1078. bits = 12;
  1079. break;
  1080. default:
  1081. bits = pf->depth * pf->nb_channels;
  1082. break;
  1083. }
  1084. return (width * bits + 7) >> 3;
  1085. break;
  1086. case FF_PIXEL_PLANAR:
  1087. if (plane == 1 || plane == 2)
  1088. width= -((-width)>>pf->x_chroma_shift);
  1089. return (width * pf->depth + 7) >> 3;
  1090. break;
  1091. case FF_PIXEL_PALETTE:
  1092. if (plane == 0)
  1093. return width;
  1094. break;
  1095. }
  1096. return -1;
  1097. }
  1098. void av_picture_copy(AVPicture *dst, const AVPicture *src,
  1099. enum PixelFormat pix_fmt, int width, int height)
  1100. {
  1101. int i;
  1102. const PixFmtInfo *pf = &pix_fmt_info[pix_fmt];
  1103. switch(pf->pixel_type) {
  1104. case FF_PIXEL_PACKED:
  1105. case FF_PIXEL_PLANAR:
  1106. for(i = 0; i < pf->nb_channels; i++) {
  1107. int h;
  1108. int bwidth = ff_get_plane_bytewidth(pix_fmt, width, i);
  1109. h = height;
  1110. if (i == 1 || i == 2) {
  1111. h= -((-height)>>pf->y_chroma_shift);
  1112. }
  1113. ff_img_copy_plane(dst->data[i], dst->linesize[i],
  1114. src->data[i], src->linesize[i],
  1115. bwidth, h);
  1116. }
  1117. break;
  1118. case FF_PIXEL_PALETTE:
  1119. ff_img_copy_plane(dst->data[0], dst->linesize[0],
  1120. src->data[0], src->linesize[0],
  1121. width, height);
  1122. /* copy the palette */
  1123. memcpy(dst->data[1], src->data[1], 4*256);
  1124. break;
  1125. }
  1126. }
  1127. /* 2x2 -> 1x1 */
  1128. void ff_shrink22(uint8_t *dst, int dst_wrap,
  1129. const uint8_t *src, int src_wrap,
  1130. int width, int height)
  1131. {
  1132. int w;
  1133. const uint8_t *s1, *s2;
  1134. uint8_t *d;
  1135. for(;height > 0; height--) {
  1136. s1 = src;
  1137. s2 = s1 + src_wrap;
  1138. d = dst;
  1139. for(w = width;w >= 4; w-=4) {
  1140. d[0] = (s1[0] + s1[1] + s2[0] + s2[1] + 2) >> 2;
  1141. d[1] = (s1[2] + s1[3] + s2[2] + s2[3] + 2) >> 2;
  1142. d[2] = (s1[4] + s1[5] + s2[4] + s2[5] + 2) >> 2;
  1143. d[3] = (s1[6] + s1[7] + s2[6] + s2[7] + 2) >> 2;
  1144. s1 += 8;
  1145. s2 += 8;
  1146. d += 4;
  1147. }
  1148. for(;w > 0; w--) {
  1149. d[0] = (s1[0] + s1[1] + s2[0] + s2[1] + 2) >> 2;
  1150. s1 += 2;
  1151. s2 += 2;
  1152. d++;
  1153. }
  1154. src += 2 * src_wrap;
  1155. dst += dst_wrap;
  1156. }
  1157. }
  1158. /* 4x4 -> 1x1 */
  1159. void ff_shrink44(uint8_t *dst, int dst_wrap,
  1160. const uint8_t *src, int src_wrap,
  1161. int width, int height)
  1162. {
  1163. int w;
  1164. const uint8_t *s1, *s2, *s3, *s4;
  1165. uint8_t *d;
  1166. for(;height > 0; height--) {
  1167. s1 = src;
  1168. s2 = s1 + src_wrap;
  1169. s3 = s2 + src_wrap;
  1170. s4 = s3 + src_wrap;
  1171. d = dst;
  1172. for(w = width;w > 0; w--) {
  1173. d[0] = (s1[0] + s1[1] + s1[2] + s1[3] +
  1174. s2[0] + s2[1] + s2[2] + s2[3] +
  1175. s3[0] + s3[1] + s3[2] + s3[3] +
  1176. s4[0] + s4[1] + s4[2] + s4[3] + 8) >> 4;
  1177. s1 += 4;
  1178. s2 += 4;
  1179. s3 += 4;
  1180. s4 += 4;
  1181. d++;
  1182. }
  1183. src += 4 * src_wrap;
  1184. dst += dst_wrap;
  1185. }
  1186. }
  1187. /* 8x8 -> 1x1 */
  1188. void ff_shrink88(uint8_t *dst, int dst_wrap,
  1189. const uint8_t *src, int src_wrap,
  1190. int width, int height)
  1191. {
  1192. int w, i;
  1193. for(;height > 0; height--) {
  1194. for(w = width;w > 0; w--) {
  1195. int tmp=0;
  1196. for(i=0; i<8; i++){
  1197. tmp += src[0] + src[1] + src[2] + src[3] + src[4] + src[5] + src[6] + src[7];
  1198. src += src_wrap;
  1199. }
  1200. *(dst++) = (tmp + 32)>>6;
  1201. src += 8 - 8*src_wrap;
  1202. }
  1203. src += 8*src_wrap - 8*width;
  1204. dst += dst_wrap - width;
  1205. }
  1206. }
  1207. int avpicture_alloc(AVPicture *picture,
  1208. enum PixelFormat pix_fmt, int width, int height)
  1209. {
  1210. int size;
  1211. void *ptr;
  1212. size = avpicture_fill(picture, NULL, pix_fmt, width, height);
  1213. if(size<0)
  1214. goto fail;
  1215. ptr = av_malloc(size);
  1216. if (!ptr)
  1217. goto fail;
  1218. avpicture_fill(picture, ptr, pix_fmt, width, height);
  1219. if(picture->data[1] && !picture->data[2])
  1220. ff_set_systematic_pal((uint32_t*)picture->data[1], pix_fmt);
  1221. return 0;
  1222. fail:
  1223. memset(picture, 0, sizeof(AVPicture));
  1224. return -1;
  1225. }
  1226. void avpicture_free(AVPicture *picture)
  1227. {
  1228. av_free(picture->data[0]);
  1229. }
  1230. /* return true if yuv planar */
  1231. static inline int is_yuv_planar(const PixFmtInfo *ps)
  1232. {
  1233. return (ps->color_type == FF_COLOR_YUV ||
  1234. ps->color_type == FF_COLOR_YUV_JPEG) &&
  1235. ps->pixel_type == FF_PIXEL_PLANAR;
  1236. }
  1237. int av_picture_crop(AVPicture *dst, const AVPicture *src,
  1238. enum PixelFormat pix_fmt, int top_band, int left_band)
  1239. {
  1240. int y_shift;
  1241. int x_shift;
  1242. if (pix_fmt < 0 || pix_fmt >= PIX_FMT_NB || !is_yuv_planar(&pix_fmt_info[pix_fmt]))
  1243. return -1;
  1244. y_shift = pix_fmt_info[pix_fmt].y_chroma_shift;
  1245. x_shift = pix_fmt_info[pix_fmt].x_chroma_shift;
  1246. dst->data[0] = src->data[0] + (top_band * src->linesize[0]) + left_band;
  1247. dst->data[1] = src->data[1] + ((top_band >> y_shift) * src->linesize[1]) + (left_band >> x_shift);
  1248. dst->data[2] = src->data[2] + ((top_band >> y_shift) * src->linesize[2]) + (left_band >> x_shift);
  1249. dst->linesize[0] = src->linesize[0];
  1250. dst->linesize[1] = src->linesize[1];
  1251. dst->linesize[2] = src->linesize[2];
  1252. return 0;
  1253. }
  1254. int av_picture_pad(AVPicture *dst, const AVPicture *src, int height, int width,
  1255. enum PixelFormat pix_fmt, int padtop, int padbottom, int padleft, int padright,
  1256. int *color)
  1257. {
  1258. uint8_t *optr;
  1259. int y_shift;
  1260. int x_shift;
  1261. int yheight;
  1262. int i, y;
  1263. if (pix_fmt < 0 || pix_fmt >= PIX_FMT_NB ||
  1264. !is_yuv_planar(&pix_fmt_info[pix_fmt])) return -1;
  1265. for (i = 0; i < 3; i++) {
  1266. x_shift = i ? pix_fmt_info[pix_fmt].x_chroma_shift : 0;
  1267. y_shift = i ? pix_fmt_info[pix_fmt].y_chroma_shift : 0;
  1268. if (padtop || padleft) {
  1269. memset(dst->data[i], color[i],
  1270. dst->linesize[i] * (padtop >> y_shift) + (padleft >> x_shift));
  1271. }
  1272. if (padleft || padright) {
  1273. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  1274. (dst->linesize[i] - (padright >> x_shift));
  1275. yheight = (height - 1 - (padtop + padbottom)) >> y_shift;
  1276. for (y = 0; y < yheight; y++) {
  1277. memset(optr, color[i], (padleft + padright) >> x_shift);
  1278. optr += dst->linesize[i];
  1279. }
  1280. }
  1281. if (src) { /* first line */
  1282. uint8_t *iptr = src->data[i];
  1283. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  1284. (padleft >> x_shift);
  1285. memcpy(optr, iptr, (width - padleft - padright) >> x_shift);
  1286. iptr += src->linesize[i];
  1287. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  1288. (dst->linesize[i] - (padright >> x_shift));
  1289. yheight = (height - 1 - (padtop + padbottom)) >> y_shift;
  1290. for (y = 0; y < yheight; y++) {
  1291. memset(optr, color[i], (padleft + padright) >> x_shift);
  1292. memcpy(optr + ((padleft + padright) >> x_shift), iptr,
  1293. (width - padleft - padright) >> x_shift);
  1294. iptr += src->linesize[i];
  1295. optr += dst->linesize[i];
  1296. }
  1297. }
  1298. if (padbottom || padright) {
  1299. optr = dst->data[i] + dst->linesize[i] *
  1300. ((height - padbottom) >> y_shift) - (padright >> x_shift);
  1301. memset(optr, color[i],dst->linesize[i] *
  1302. (padbottom >> y_shift) + (padright >> x_shift));
  1303. }
  1304. }
  1305. return 0;
  1306. }
  1307. /* NOTE: we scan all the pixels to have an exact information */
  1308. static int get_alpha_info_pal8(const AVPicture *src, int width, int height)
  1309. {
  1310. const unsigned char *p;
  1311. int src_wrap, ret, x, y;
  1312. unsigned int a;
  1313. uint32_t *palette = (uint32_t *)src->data[1];
  1314. p = src->data[0];
  1315. src_wrap = src->linesize[0] - width;
  1316. ret = 0;
  1317. for(y=0;y<height;y++) {
  1318. for(x=0;x<width;x++) {
  1319. a = palette[p[0]] >> 24;
  1320. if (a == 0x00) {
  1321. ret |= FF_ALPHA_TRANSP;
  1322. } else if (a != 0xff) {
  1323. ret |= FF_ALPHA_SEMI_TRANSP;
  1324. }
  1325. p++;
  1326. }
  1327. p += src_wrap;
  1328. }
  1329. return ret;
  1330. }
  1331. int img_get_alpha_info(const AVPicture *src,
  1332. enum PixelFormat pix_fmt, int width, int height)
  1333. {
  1334. const PixFmtInfo *pf = &pix_fmt_info[pix_fmt];
  1335. int ret;
  1336. /* no alpha can be represented in format */
  1337. if (!pf->is_alpha)
  1338. return 0;
  1339. switch(pix_fmt) {
  1340. case PIX_FMT_PAL8:
  1341. ret = get_alpha_info_pal8(src, width, height);
  1342. break;
  1343. default:
  1344. /* we do not know, so everything is indicated */
  1345. ret = FF_ALPHA_TRANSP | FF_ALPHA_SEMI_TRANSP;
  1346. break;
  1347. }
  1348. return ret;
  1349. }
  1350. #if HAVE_MMX
  1351. #define DEINT_INPLACE_LINE_LUM \
  1352. movd_m2r(lum_m4[0],mm0);\
  1353. movd_m2r(lum_m3[0],mm1);\
  1354. movd_m2r(lum_m2[0],mm2);\
  1355. movd_m2r(lum_m1[0],mm3);\
  1356. movd_m2r(lum[0],mm4);\
  1357. punpcklbw_r2r(mm7,mm0);\
  1358. movd_r2m(mm2,lum_m4[0]);\
  1359. punpcklbw_r2r(mm7,mm1);\
  1360. punpcklbw_r2r(mm7,mm2);\
  1361. punpcklbw_r2r(mm7,mm3);\
  1362. punpcklbw_r2r(mm7,mm4);\
  1363. paddw_r2r(mm3,mm1);\
  1364. psllw_i2r(1,mm2);\
  1365. paddw_r2r(mm4,mm0);\
  1366. psllw_i2r(2,mm1);\
  1367. paddw_r2r(mm6,mm2);\
  1368. paddw_r2r(mm2,mm1);\
  1369. psubusw_r2r(mm0,mm1);\
  1370. psrlw_i2r(3,mm1);\
  1371. packuswb_r2r(mm7,mm1);\
  1372. movd_r2m(mm1,lum_m2[0]);
  1373. #define DEINT_LINE_LUM \
  1374. movd_m2r(lum_m4[0],mm0);\
  1375. movd_m2r(lum_m3[0],mm1);\
  1376. movd_m2r(lum_m2[0],mm2);\
  1377. movd_m2r(lum_m1[0],mm3);\
  1378. movd_m2r(lum[0],mm4);\
  1379. punpcklbw_r2r(mm7,mm0);\
  1380. punpcklbw_r2r(mm7,mm1);\
  1381. punpcklbw_r2r(mm7,mm2);\
  1382. punpcklbw_r2r(mm7,mm3);\
  1383. punpcklbw_r2r(mm7,mm4);\
  1384. paddw_r2r(mm3,mm1);\
  1385. psllw_i2r(1,mm2);\
  1386. paddw_r2r(mm4,mm0);\
  1387. psllw_i2r(2,mm1);\
  1388. paddw_r2r(mm6,mm2);\
  1389. paddw_r2r(mm2,mm1);\
  1390. psubusw_r2r(mm0,mm1);\
  1391. psrlw_i2r(3,mm1);\
  1392. packuswb_r2r(mm7,mm1);\
  1393. movd_r2m(mm1,dst[0]);
  1394. #endif
  1395. /* filter parameters: [-1 4 2 4 -1] // 8 */
  1396. static void deinterlace_line(uint8_t *dst,
  1397. const uint8_t *lum_m4, const uint8_t *lum_m3,
  1398. const uint8_t *lum_m2, const uint8_t *lum_m1,
  1399. const uint8_t *lum,
  1400. int size)
  1401. {
  1402. #if !HAVE_MMX
  1403. uint8_t *cm = ff_cropTbl + MAX_NEG_CROP;
  1404. int sum;
  1405. for(;size > 0;size--) {
  1406. sum = -lum_m4[0];
  1407. sum += lum_m3[0] << 2;
  1408. sum += lum_m2[0] << 1;
  1409. sum += lum_m1[0] << 2;
  1410. sum += -lum[0];
  1411. dst[0] = cm[(sum + 4) >> 3];
  1412. lum_m4++;
  1413. lum_m3++;
  1414. lum_m2++;
  1415. lum_m1++;
  1416. lum++;
  1417. dst++;
  1418. }
  1419. #else
  1420. {
  1421. pxor_r2r(mm7,mm7);
  1422. movq_m2r(ff_pw_4,mm6);
  1423. }
  1424. for (;size > 3; size-=4) {
  1425. DEINT_LINE_LUM
  1426. lum_m4+=4;
  1427. lum_m3+=4;
  1428. lum_m2+=4;
  1429. lum_m1+=4;
  1430. lum+=4;
  1431. dst+=4;
  1432. }
  1433. #endif
  1434. }
  1435. static void deinterlace_line_inplace(uint8_t *lum_m4, uint8_t *lum_m3, uint8_t *lum_m2, uint8_t *lum_m1, uint8_t *lum,
  1436. int size)
  1437. {
  1438. #if !HAVE_MMX
  1439. uint8_t *cm = ff_cropTbl + MAX_NEG_CROP;
  1440. int sum;
  1441. for(;size > 0;size--) {
  1442. sum = -lum_m4[0];
  1443. sum += lum_m3[0] << 2;
  1444. sum += lum_m2[0] << 1;
  1445. lum_m4[0]=lum_m2[0];
  1446. sum += lum_m1[0] << 2;
  1447. sum += -lum[0];
  1448. lum_m2[0] = cm[(sum + 4) >> 3];
  1449. lum_m4++;
  1450. lum_m3++;
  1451. lum_m2++;
  1452. lum_m1++;
  1453. lum++;
  1454. }
  1455. #else
  1456. {
  1457. pxor_r2r(mm7,mm7);
  1458. movq_m2r(ff_pw_4,mm6);
  1459. }
  1460. for (;size > 3; size-=4) {
  1461. DEINT_INPLACE_LINE_LUM
  1462. lum_m4+=4;
  1463. lum_m3+=4;
  1464. lum_m2+=4;
  1465. lum_m1+=4;
  1466. lum+=4;
  1467. }
  1468. #endif
  1469. }
  1470. /* deinterlacing : 2 temporal taps, 3 spatial taps linear filter. The
  1471. top field is copied as is, but the bottom field is deinterlaced
  1472. against the top field. */
  1473. static void deinterlace_bottom_field(uint8_t *dst, int dst_wrap,
  1474. const uint8_t *src1, int src_wrap,
  1475. int width, int height)
  1476. {
  1477. const uint8_t *src_m2, *src_m1, *src_0, *src_p1, *src_p2;
  1478. int y;
  1479. src_m2 = src1;
  1480. src_m1 = src1;
  1481. src_0=&src_m1[src_wrap];
  1482. src_p1=&src_0[src_wrap];
  1483. src_p2=&src_p1[src_wrap];
  1484. for(y=0;y<(height-2);y+=2) {
  1485. memcpy(dst,src_m1,width);
  1486. dst += dst_wrap;
  1487. deinterlace_line(dst,src_m2,src_m1,src_0,src_p1,src_p2,width);
  1488. src_m2 = src_0;
  1489. src_m1 = src_p1;
  1490. src_0 = src_p2;
  1491. src_p1 += 2*src_wrap;
  1492. src_p2 += 2*src_wrap;
  1493. dst += dst_wrap;
  1494. }
  1495. memcpy(dst,src_m1,width);
  1496. dst += dst_wrap;
  1497. /* do last line */
  1498. deinterlace_line(dst,src_m2,src_m1,src_0,src_0,src_0,width);
  1499. }
  1500. static void deinterlace_bottom_field_inplace(uint8_t *src1, int src_wrap,
  1501. int width, int height)
  1502. {
  1503. uint8_t *src_m1, *src_0, *src_p1, *src_p2;
  1504. int y;
  1505. uint8_t *buf;
  1506. buf = (uint8_t*)av_malloc(width);
  1507. src_m1 = src1;
  1508. memcpy(buf,src_m1,width);
  1509. src_0=&src_m1[src_wrap];
  1510. src_p1=&src_0[src_wrap];
  1511. src_p2=&src_p1[src_wrap];
  1512. for(y=0;y<(height-2);y+=2) {
  1513. deinterlace_line_inplace(buf,src_m1,src_0,src_p1,src_p2,width);
  1514. src_m1 = src_p1;
  1515. src_0 = src_p2;
  1516. src_p1 += 2*src_wrap;
  1517. src_p2 += 2*src_wrap;
  1518. }
  1519. /* do last line */
  1520. deinterlace_line_inplace(buf,src_m1,src_0,src_0,src_0,width);
  1521. av_free(buf);
  1522. }
  1523. int avpicture_deinterlace(AVPicture *dst, const AVPicture *src,
  1524. enum PixelFormat pix_fmt, int width, int height)
  1525. {
  1526. int i;
  1527. if (pix_fmt != PIX_FMT_YUV420P &&
  1528. pix_fmt != PIX_FMT_YUV422P &&
  1529. pix_fmt != PIX_FMT_YUV444P &&
  1530. pix_fmt != PIX_FMT_YUV411P &&
  1531. pix_fmt != PIX_FMT_GRAY8)
  1532. return -1;
  1533. if ((width & 3) != 0 || (height & 3) != 0)
  1534. return -1;
  1535. for(i=0;i<3;i++) {
  1536. if (i == 1) {
  1537. switch(pix_fmt) {
  1538. case PIX_FMT_YUV420P:
  1539. width >>= 1;
  1540. height >>= 1;
  1541. break;
  1542. case PIX_FMT_YUV422P:
  1543. width >>= 1;
  1544. break;
  1545. case PIX_FMT_YUV411P:
  1546. width >>= 2;
  1547. break;
  1548. default:
  1549. break;
  1550. }
  1551. if (pix_fmt == PIX_FMT_GRAY8) {
  1552. break;
  1553. }
  1554. }
  1555. if (src == dst) {
  1556. deinterlace_bottom_field_inplace(dst->data[i], dst->linesize[i],
  1557. width, height);
  1558. } else {
  1559. deinterlace_bottom_field(dst->data[i],dst->linesize[i],
  1560. src->data[i], src->linesize[i],
  1561. width, height);
  1562. }
  1563. }
  1564. emms_c();
  1565. return 0;
  1566. }