You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1581 lines
45KB

  1. /*
  2. * Misc image conversion routines
  3. * Copyright (c) 2001, 2002, 2003 Fabrice Bellard
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. /**
  22. * @file libavcodec/imgconvert.c
  23. * misc image conversion routines
  24. */
  25. /* TODO:
  26. * - write 'ffimg' program to test all the image related stuff
  27. * - move all api to slice based system
  28. * - integrate deinterlacing, postprocessing and scaling in the conversion process
  29. */
  30. #include "avcodec.h"
  31. #include "dsputil.h"
  32. #include "colorspace.h"
  33. #if HAVE_MMX
  34. #include "x86/mmx.h"
  35. #include "x86/dsputil_mmx.h"
  36. #endif
  37. #define xglue(x, y) x ## y
  38. #define glue(x, y) xglue(x, y)
  39. #define FF_COLOR_RGB 0 /**< RGB color space */
  40. #define FF_COLOR_GRAY 1 /**< gray color space */
  41. #define FF_COLOR_YUV 2 /**< YUV color space. 16 <= Y <= 235, 16 <= U, V <= 240 */
  42. #define FF_COLOR_YUV_JPEG 3 /**< YUV color space. 0 <= Y <= 255, 0 <= U, V <= 255 */
  43. #define FF_PIXEL_PLANAR 0 /**< each channel has one component in AVPicture */
  44. #define FF_PIXEL_PACKED 1 /**< only one components containing all the channels */
  45. #define FF_PIXEL_PALETTE 2 /**< one components containing indexes for a palette */
  46. typedef struct PixFmtInfo {
  47. const char *name;
  48. uint8_t nb_channels; /**< number of channels (including alpha) */
  49. uint8_t color_type; /**< color type (see FF_COLOR_xxx constants) */
  50. uint8_t pixel_type; /**< pixel storage type (see FF_PIXEL_xxx constants) */
  51. uint8_t is_alpha : 1; /**< true if alpha can be specified */
  52. uint8_t is_hwaccel : 1; /**< true if this is an HW accelerated format */
  53. uint8_t x_chroma_shift; /**< X chroma subsampling factor is 2 ^ shift */
  54. uint8_t y_chroma_shift; /**< Y chroma subsampling factor is 2 ^ shift */
  55. uint8_t depth; /**< bit depth of the color components */
  56. } PixFmtInfo;
  57. /* this table gives more information about formats */
  58. static const PixFmtInfo pix_fmt_info[PIX_FMT_NB] = {
  59. /* YUV formats */
  60. [PIX_FMT_YUV420P] = {
  61. .name = "yuv420p",
  62. .nb_channels = 3,
  63. .color_type = FF_COLOR_YUV,
  64. .pixel_type = FF_PIXEL_PLANAR,
  65. .depth = 8,
  66. .x_chroma_shift = 1, .y_chroma_shift = 1,
  67. },
  68. [PIX_FMT_YUV422P] = {
  69. .name = "yuv422p",
  70. .nb_channels = 3,
  71. .color_type = FF_COLOR_YUV,
  72. .pixel_type = FF_PIXEL_PLANAR,
  73. .depth = 8,
  74. .x_chroma_shift = 1, .y_chroma_shift = 0,
  75. },
  76. [PIX_FMT_YUV444P] = {
  77. .name = "yuv444p",
  78. .nb_channels = 3,
  79. .color_type = FF_COLOR_YUV,
  80. .pixel_type = FF_PIXEL_PLANAR,
  81. .depth = 8,
  82. .x_chroma_shift = 0, .y_chroma_shift = 0,
  83. },
  84. [PIX_FMT_YUYV422] = {
  85. .name = "yuyv422",
  86. .nb_channels = 1,
  87. .color_type = FF_COLOR_YUV,
  88. .pixel_type = FF_PIXEL_PACKED,
  89. .depth = 8,
  90. .x_chroma_shift = 1, .y_chroma_shift = 0,
  91. },
  92. [PIX_FMT_UYVY422] = {
  93. .name = "uyvy422",
  94. .nb_channels = 1,
  95. .color_type = FF_COLOR_YUV,
  96. .pixel_type = FF_PIXEL_PACKED,
  97. .depth = 8,
  98. .x_chroma_shift = 1, .y_chroma_shift = 0,
  99. },
  100. [PIX_FMT_YUV410P] = {
  101. .name = "yuv410p",
  102. .nb_channels = 3,
  103. .color_type = FF_COLOR_YUV,
  104. .pixel_type = FF_PIXEL_PLANAR,
  105. .depth = 8,
  106. .x_chroma_shift = 2, .y_chroma_shift = 2,
  107. },
  108. [PIX_FMT_YUV411P] = {
  109. .name = "yuv411p",
  110. .nb_channels = 3,
  111. .color_type = FF_COLOR_YUV,
  112. .pixel_type = FF_PIXEL_PLANAR,
  113. .depth = 8,
  114. .x_chroma_shift = 2, .y_chroma_shift = 0,
  115. },
  116. [PIX_FMT_YUV440P] = {
  117. .name = "yuv440p",
  118. .nb_channels = 3,
  119. .color_type = FF_COLOR_YUV,
  120. .pixel_type = FF_PIXEL_PLANAR,
  121. .depth = 8,
  122. .x_chroma_shift = 0, .y_chroma_shift = 1,
  123. },
  124. /* YUV formats with alpha plane */
  125. [PIX_FMT_YUVA420P] = {
  126. .name = "yuva420p",
  127. .nb_channels = 4,
  128. .color_type = FF_COLOR_YUV,
  129. .pixel_type = FF_PIXEL_PLANAR,
  130. .depth = 8,
  131. .x_chroma_shift = 1, .y_chroma_shift = 1,
  132. },
  133. /* JPEG YUV */
  134. [PIX_FMT_YUVJ420P] = {
  135. .name = "yuvj420p",
  136. .nb_channels = 3,
  137. .color_type = FF_COLOR_YUV_JPEG,
  138. .pixel_type = FF_PIXEL_PLANAR,
  139. .depth = 8,
  140. .x_chroma_shift = 1, .y_chroma_shift = 1,
  141. },
  142. [PIX_FMT_YUVJ422P] = {
  143. .name = "yuvj422p",
  144. .nb_channels = 3,
  145. .color_type = FF_COLOR_YUV_JPEG,
  146. .pixel_type = FF_PIXEL_PLANAR,
  147. .depth = 8,
  148. .x_chroma_shift = 1, .y_chroma_shift = 0,
  149. },
  150. [PIX_FMT_YUVJ444P] = {
  151. .name = "yuvj444p",
  152. .nb_channels = 3,
  153. .color_type = FF_COLOR_YUV_JPEG,
  154. .pixel_type = FF_PIXEL_PLANAR,
  155. .depth = 8,
  156. .x_chroma_shift = 0, .y_chroma_shift = 0,
  157. },
  158. [PIX_FMT_YUVJ440P] = {
  159. .name = "yuvj440p",
  160. .nb_channels = 3,
  161. .color_type = FF_COLOR_YUV_JPEG,
  162. .pixel_type = FF_PIXEL_PLANAR,
  163. .depth = 8,
  164. .x_chroma_shift = 0, .y_chroma_shift = 1,
  165. },
  166. /* RGB formats */
  167. [PIX_FMT_RGB24] = {
  168. .name = "rgb24",
  169. .nb_channels = 3,
  170. .color_type = FF_COLOR_RGB,
  171. .pixel_type = FF_PIXEL_PACKED,
  172. .depth = 8,
  173. .x_chroma_shift = 0, .y_chroma_shift = 0,
  174. },
  175. [PIX_FMT_BGR24] = {
  176. .name = "bgr24",
  177. .nb_channels = 3,
  178. .color_type = FF_COLOR_RGB,
  179. .pixel_type = FF_PIXEL_PACKED,
  180. .depth = 8,
  181. .x_chroma_shift = 0, .y_chroma_shift = 0,
  182. },
  183. [PIX_FMT_RGB32] = {
  184. .name = "rgb32",
  185. .nb_channels = 4, .is_alpha = 1,
  186. .color_type = FF_COLOR_RGB,
  187. .pixel_type = FF_PIXEL_PACKED,
  188. .depth = 8,
  189. .x_chroma_shift = 0, .y_chroma_shift = 0,
  190. },
  191. [PIX_FMT_RGB48BE] = {
  192. .name = "rgb48be",
  193. .nb_channels = 3,
  194. .color_type = FF_COLOR_RGB,
  195. .pixel_type = FF_PIXEL_PACKED,
  196. .depth = 16,
  197. .x_chroma_shift = 0, .y_chroma_shift = 0,
  198. },
  199. [PIX_FMT_RGB48LE] = {
  200. .name = "rgb48le",
  201. .nb_channels = 3,
  202. .color_type = FF_COLOR_RGB,
  203. .pixel_type = FF_PIXEL_PACKED,
  204. .depth = 16,
  205. .x_chroma_shift = 0, .y_chroma_shift = 0,
  206. },
  207. [PIX_FMT_RGB565BE] = {
  208. .name = "rgb565be",
  209. .nb_channels = 3,
  210. .color_type = FF_COLOR_RGB,
  211. .pixel_type = FF_PIXEL_PACKED,
  212. .depth = 5,
  213. .x_chroma_shift = 0, .y_chroma_shift = 0,
  214. },
  215. [PIX_FMT_RGB565LE] = {
  216. .name = "rgb565le",
  217. .nb_channels = 3,
  218. .color_type = FF_COLOR_RGB,
  219. .pixel_type = FF_PIXEL_PACKED,
  220. .depth = 5,
  221. .x_chroma_shift = 0, .y_chroma_shift = 0,
  222. },
  223. [PIX_FMT_RGB555BE] = {
  224. .name = "rgb555be",
  225. .nb_channels = 3,
  226. .color_type = FF_COLOR_RGB,
  227. .pixel_type = FF_PIXEL_PACKED,
  228. .depth = 5,
  229. .x_chroma_shift = 0, .y_chroma_shift = 0,
  230. },
  231. [PIX_FMT_RGB555LE] = {
  232. .name = "rgb555le",
  233. .nb_channels = 3,
  234. .color_type = FF_COLOR_RGB,
  235. .pixel_type = FF_PIXEL_PACKED,
  236. .depth = 5,
  237. .x_chroma_shift = 0, .y_chroma_shift = 0,
  238. },
  239. /* gray / mono formats */
  240. [PIX_FMT_GRAY16BE] = {
  241. .name = "gray16be",
  242. .nb_channels = 1,
  243. .color_type = FF_COLOR_GRAY,
  244. .pixel_type = FF_PIXEL_PLANAR,
  245. .depth = 16,
  246. },
  247. [PIX_FMT_GRAY16LE] = {
  248. .name = "gray16le",
  249. .nb_channels = 1,
  250. .color_type = FF_COLOR_GRAY,
  251. .pixel_type = FF_PIXEL_PLANAR,
  252. .depth = 16,
  253. },
  254. [PIX_FMT_GRAY8] = {
  255. .name = "gray",
  256. .nb_channels = 1,
  257. .color_type = FF_COLOR_GRAY,
  258. .pixel_type = FF_PIXEL_PLANAR,
  259. .depth = 8,
  260. },
  261. [PIX_FMT_MONOWHITE] = {
  262. .name = "monow",
  263. .nb_channels = 1,
  264. .color_type = FF_COLOR_GRAY,
  265. .pixel_type = FF_PIXEL_PLANAR,
  266. .depth = 1,
  267. },
  268. [PIX_FMT_MONOBLACK] = {
  269. .name = "monob",
  270. .nb_channels = 1,
  271. .color_type = FF_COLOR_GRAY,
  272. .pixel_type = FF_PIXEL_PLANAR,
  273. .depth = 1,
  274. },
  275. /* paletted formats */
  276. [PIX_FMT_PAL8] = {
  277. .name = "pal8",
  278. .nb_channels = 4, .is_alpha = 1,
  279. .color_type = FF_COLOR_RGB,
  280. .pixel_type = FF_PIXEL_PALETTE,
  281. .depth = 8,
  282. },
  283. [PIX_FMT_XVMC_MPEG2_MC] = {
  284. .name = "xvmcmc",
  285. .is_hwaccel = 1,
  286. },
  287. [PIX_FMT_XVMC_MPEG2_IDCT] = {
  288. .name = "xvmcidct",
  289. .is_hwaccel = 1,
  290. },
  291. [PIX_FMT_VDPAU_MPEG1] = {
  292. .name = "vdpau_mpeg1",
  293. .is_hwaccel = 1,
  294. .x_chroma_shift = 1, .y_chroma_shift = 1,
  295. },
  296. [PIX_FMT_VDPAU_MPEG2] = {
  297. .name = "vdpau_mpeg2",
  298. .is_hwaccel = 1,
  299. .x_chroma_shift = 1, .y_chroma_shift = 1,
  300. },
  301. [PIX_FMT_VDPAU_H264] = {
  302. .name = "vdpau_h264",
  303. .is_hwaccel = 1,
  304. .x_chroma_shift = 1, .y_chroma_shift = 1,
  305. },
  306. [PIX_FMT_VDPAU_WMV3] = {
  307. .name = "vdpau_wmv3",
  308. .is_hwaccel = 1,
  309. .x_chroma_shift = 1, .y_chroma_shift = 1,
  310. },
  311. [PIX_FMT_VDPAU_VC1] = {
  312. .name = "vdpau_vc1",
  313. .is_hwaccel = 1,
  314. .x_chroma_shift = 1, .y_chroma_shift = 1,
  315. },
  316. [PIX_FMT_UYYVYY411] = {
  317. .name = "uyyvyy411",
  318. .nb_channels = 1,
  319. .color_type = FF_COLOR_YUV,
  320. .pixel_type = FF_PIXEL_PACKED,
  321. .depth = 8,
  322. .x_chroma_shift = 2, .y_chroma_shift = 0,
  323. },
  324. [PIX_FMT_BGR32] = {
  325. .name = "bgr32",
  326. .nb_channels = 4, .is_alpha = 1,
  327. .color_type = FF_COLOR_RGB,
  328. .pixel_type = FF_PIXEL_PACKED,
  329. .depth = 8,
  330. .x_chroma_shift = 0, .y_chroma_shift = 0,
  331. },
  332. [PIX_FMT_BGR565BE] = {
  333. .name = "bgr565be",
  334. .nb_channels = 3,
  335. .color_type = FF_COLOR_RGB,
  336. .pixel_type = FF_PIXEL_PACKED,
  337. .depth = 5,
  338. .x_chroma_shift = 0, .y_chroma_shift = 0,
  339. },
  340. [PIX_FMT_BGR565LE] = {
  341. .name = "bgr565le",
  342. .nb_channels = 3,
  343. .color_type = FF_COLOR_RGB,
  344. .pixel_type = FF_PIXEL_PACKED,
  345. .depth = 5,
  346. .x_chroma_shift = 0, .y_chroma_shift = 0,
  347. },
  348. [PIX_FMT_BGR555BE] = {
  349. .name = "bgr555be",
  350. .nb_channels = 3,
  351. .color_type = FF_COLOR_RGB,
  352. .pixel_type = FF_PIXEL_PACKED,
  353. .depth = 5,
  354. .x_chroma_shift = 0, .y_chroma_shift = 0,
  355. },
  356. [PIX_FMT_BGR555LE] = {
  357. .name = "bgr555le",
  358. .nb_channels = 3,
  359. .color_type = FF_COLOR_RGB,
  360. .pixel_type = FF_PIXEL_PACKED,
  361. .depth = 5,
  362. .x_chroma_shift = 0, .y_chroma_shift = 0,
  363. },
  364. [PIX_FMT_RGB8] = {
  365. .name = "rgb8",
  366. .nb_channels = 1,
  367. .color_type = FF_COLOR_RGB,
  368. .pixel_type = FF_PIXEL_PACKED,
  369. .depth = 8,
  370. .x_chroma_shift = 0, .y_chroma_shift = 0,
  371. },
  372. [PIX_FMT_RGB4] = {
  373. .name = "rgb4",
  374. .nb_channels = 1,
  375. .color_type = FF_COLOR_RGB,
  376. .pixel_type = FF_PIXEL_PACKED,
  377. .depth = 4,
  378. .x_chroma_shift = 0, .y_chroma_shift = 0,
  379. },
  380. [PIX_FMT_RGB4_BYTE] = {
  381. .name = "rgb4_byte",
  382. .nb_channels = 1,
  383. .color_type = FF_COLOR_RGB,
  384. .pixel_type = FF_PIXEL_PACKED,
  385. .depth = 8,
  386. .x_chroma_shift = 0, .y_chroma_shift = 0,
  387. },
  388. [PIX_FMT_BGR8] = {
  389. .name = "bgr8",
  390. .nb_channels = 1,
  391. .color_type = FF_COLOR_RGB,
  392. .pixel_type = FF_PIXEL_PACKED,
  393. .depth = 8,
  394. .x_chroma_shift = 0, .y_chroma_shift = 0,
  395. },
  396. [PIX_FMT_BGR4] = {
  397. .name = "bgr4",
  398. .nb_channels = 1,
  399. .color_type = FF_COLOR_RGB,
  400. .pixel_type = FF_PIXEL_PACKED,
  401. .depth = 4,
  402. .x_chroma_shift = 0, .y_chroma_shift = 0,
  403. },
  404. [PIX_FMT_BGR4_BYTE] = {
  405. .name = "bgr4_byte",
  406. .nb_channels = 1,
  407. .color_type = FF_COLOR_RGB,
  408. .pixel_type = FF_PIXEL_PACKED,
  409. .depth = 8,
  410. .x_chroma_shift = 0, .y_chroma_shift = 0,
  411. },
  412. [PIX_FMT_NV12] = {
  413. .name = "nv12",
  414. .nb_channels = 2,
  415. .color_type = FF_COLOR_YUV,
  416. .pixel_type = FF_PIXEL_PLANAR,
  417. .depth = 8,
  418. .x_chroma_shift = 1, .y_chroma_shift = 1,
  419. },
  420. [PIX_FMT_NV21] = {
  421. .name = "nv12",
  422. .nb_channels = 2,
  423. .color_type = FF_COLOR_YUV,
  424. .pixel_type = FF_PIXEL_PLANAR,
  425. .depth = 8,
  426. .x_chroma_shift = 1, .y_chroma_shift = 1,
  427. },
  428. [PIX_FMT_BGR32_1] = {
  429. .name = "bgr32_1",
  430. .nb_channels = 4, .is_alpha = 1,
  431. .color_type = FF_COLOR_RGB,
  432. .pixel_type = FF_PIXEL_PACKED,
  433. .depth = 8,
  434. .x_chroma_shift = 0, .y_chroma_shift = 0,
  435. },
  436. [PIX_FMT_RGB32_1] = {
  437. .name = "rgb32_1",
  438. .nb_channels = 4, .is_alpha = 1,
  439. .color_type = FF_COLOR_RGB,
  440. .pixel_type = FF_PIXEL_PACKED,
  441. .depth = 8,
  442. .x_chroma_shift = 0, .y_chroma_shift = 0,
  443. },
  444. /* VA API formats */
  445. [PIX_FMT_VAAPI_MOCO] = {
  446. .name = "vaapi_moco",
  447. .is_hwaccel = 1,
  448. .x_chroma_shift = 1, .y_chroma_shift = 1,
  449. },
  450. [PIX_FMT_VAAPI_IDCT] = {
  451. .name = "vaapi_idct",
  452. .is_hwaccel = 1,
  453. .x_chroma_shift = 1, .y_chroma_shift = 1,
  454. },
  455. [PIX_FMT_VAAPI_VLD] = {
  456. .name = "vaapi_vld",
  457. .is_hwaccel = 1,
  458. .x_chroma_shift = 1, .y_chroma_shift = 1,
  459. },
  460. };
  461. void avcodec_get_chroma_sub_sample(enum PixelFormat pix_fmt, int *h_shift, int *v_shift)
  462. {
  463. *h_shift = pix_fmt_info[pix_fmt].x_chroma_shift;
  464. *v_shift = pix_fmt_info[pix_fmt].y_chroma_shift;
  465. }
  466. const char *avcodec_get_pix_fmt_name(enum PixelFormat pix_fmt)
  467. {
  468. if (pix_fmt < 0 || pix_fmt >= PIX_FMT_NB)
  469. return NULL;
  470. else
  471. return pix_fmt_info[pix_fmt].name;
  472. }
  473. static enum PixelFormat avcodec_get_pix_fmt_internal(const char *name)
  474. {
  475. int i;
  476. for (i=0; i < PIX_FMT_NB; i++)
  477. if (!strcmp(pix_fmt_info[i].name, name))
  478. return i;
  479. return PIX_FMT_NONE;
  480. }
  481. enum PixelFormat avcodec_get_pix_fmt(const char *name)
  482. {
  483. #ifdef WORDS_BIGENDIAN
  484. # define NE "be"
  485. #else
  486. # define NE "le"
  487. #endif
  488. enum PixelFormat pix_fmt = avcodec_get_pix_fmt_internal(name);
  489. if (pix_fmt == PIX_FMT_NONE) {
  490. char name2[32];
  491. snprintf(name2, sizeof(name2), "%s%s", name, NE);
  492. pix_fmt = avcodec_get_pix_fmt_internal(name2);
  493. }
  494. return pix_fmt;
  495. #undef NE
  496. }
  497. void avcodec_pix_fmt_string (char *buf, int buf_size, enum PixelFormat pix_fmt)
  498. {
  499. /* print header */
  500. if (pix_fmt < 0)
  501. snprintf (buf, buf_size,
  502. "name " " nb_channels" " depth" " is_alpha"
  503. );
  504. else{
  505. PixFmtInfo info= pix_fmt_info[pix_fmt];
  506. char is_alpha_char= info.is_alpha ? 'y' : 'n';
  507. snprintf (buf, buf_size,
  508. "%-10s" " %1d " " %2d " " %c ",
  509. info.name,
  510. info.nb_channels,
  511. info.depth,
  512. is_alpha_char
  513. );
  514. }
  515. }
  516. int ff_is_hwaccel_pix_fmt(enum PixelFormat pix_fmt)
  517. {
  518. return pix_fmt_info[pix_fmt].is_hwaccel;
  519. }
  520. int ff_set_systematic_pal(uint32_t pal[256], enum PixelFormat pix_fmt){
  521. int i;
  522. for(i=0; i<256; i++){
  523. int r,g,b;
  524. switch(pix_fmt) {
  525. case PIX_FMT_RGB8:
  526. r= (i>>5 )*36;
  527. g= ((i>>2)&7)*36;
  528. b= (i&3 )*85;
  529. break;
  530. case PIX_FMT_BGR8:
  531. b= (i>>6 )*85;
  532. g= ((i>>3)&7)*36;
  533. r= (i&7 )*36;
  534. break;
  535. case PIX_FMT_RGB4_BYTE:
  536. r= (i>>3 )*255;
  537. g= ((i>>1)&3)*85;
  538. b= (i&1 )*255;
  539. break;
  540. case PIX_FMT_BGR4_BYTE:
  541. b= (i>>3 )*255;
  542. g= ((i>>1)&3)*85;
  543. r= (i&1 )*255;
  544. break;
  545. case PIX_FMT_GRAY8:
  546. r=b=g= i;
  547. break;
  548. default:
  549. return -1;
  550. }
  551. pal[i] = b + (g<<8) + (r<<16);
  552. }
  553. return 0;
  554. }
  555. int ff_fill_linesize(AVPicture *picture, enum PixelFormat pix_fmt, int width)
  556. {
  557. int w2;
  558. const PixFmtInfo *pinfo;
  559. memset(picture->linesize, 0, sizeof(picture->linesize));
  560. pinfo = &pix_fmt_info[pix_fmt];
  561. switch(pix_fmt) {
  562. case PIX_FMT_YUV420P:
  563. case PIX_FMT_YUV422P:
  564. case PIX_FMT_YUV444P:
  565. case PIX_FMT_YUV410P:
  566. case PIX_FMT_YUV411P:
  567. case PIX_FMT_YUV440P:
  568. case PIX_FMT_YUVJ420P:
  569. case PIX_FMT_YUVJ422P:
  570. case PIX_FMT_YUVJ444P:
  571. case PIX_FMT_YUVJ440P:
  572. w2 = (width + (1 << pinfo->x_chroma_shift) - 1) >> pinfo->x_chroma_shift;
  573. picture->linesize[0] = width;
  574. picture->linesize[1] = w2;
  575. picture->linesize[2] = w2;
  576. break;
  577. case PIX_FMT_YUVA420P:
  578. w2 = (width + (1 << pinfo->x_chroma_shift) - 1) >> pinfo->x_chroma_shift;
  579. picture->linesize[0] = width;
  580. picture->linesize[1] = w2;
  581. picture->linesize[2] = w2;
  582. picture->linesize[3] = width;
  583. break;
  584. case PIX_FMT_NV12:
  585. case PIX_FMT_NV21:
  586. w2 = (width + (1 << pinfo->x_chroma_shift) - 1) >> pinfo->x_chroma_shift;
  587. picture->linesize[0] = width;
  588. picture->linesize[1] = w2;
  589. break;
  590. case PIX_FMT_RGB24:
  591. case PIX_FMT_BGR24:
  592. picture->linesize[0] = width * 3;
  593. break;
  594. case PIX_FMT_RGB32:
  595. case PIX_FMT_BGR32:
  596. case PIX_FMT_RGB32_1:
  597. case PIX_FMT_BGR32_1:
  598. picture->linesize[0] = width * 4;
  599. break;
  600. case PIX_FMT_RGB48BE:
  601. case PIX_FMT_RGB48LE:
  602. picture->linesize[0] = width * 6;
  603. break;
  604. case PIX_FMT_GRAY16BE:
  605. case PIX_FMT_GRAY16LE:
  606. case PIX_FMT_BGR555:
  607. case PIX_FMT_BGR565:
  608. case PIX_FMT_RGB555:
  609. case PIX_FMT_RGB565:
  610. case PIX_FMT_YUYV422:
  611. picture->linesize[0] = width * 2;
  612. break;
  613. case PIX_FMT_UYVY422:
  614. picture->linesize[0] = width * 2;
  615. break;
  616. case PIX_FMT_UYYVYY411:
  617. picture->linesize[0] = width + width/2;
  618. break;
  619. case PIX_FMT_RGB4:
  620. case PIX_FMT_BGR4:
  621. picture->linesize[0] = width / 2;
  622. break;
  623. case PIX_FMT_MONOWHITE:
  624. case PIX_FMT_MONOBLACK:
  625. picture->linesize[0] = (width + 7) >> 3;
  626. break;
  627. case PIX_FMT_PAL8:
  628. case PIX_FMT_RGB8:
  629. case PIX_FMT_BGR8:
  630. case PIX_FMT_RGB4_BYTE:
  631. case PIX_FMT_BGR4_BYTE:
  632. case PIX_FMT_GRAY8:
  633. picture->linesize[0] = width;
  634. break;
  635. default:
  636. return -1;
  637. }
  638. return 0;
  639. }
  640. int ff_fill_pointer(AVPicture *picture, uint8_t *ptr, enum PixelFormat pix_fmt,
  641. int height)
  642. {
  643. int size, h2, size2;
  644. const PixFmtInfo *pinfo;
  645. pinfo = &pix_fmt_info[pix_fmt];
  646. size = picture->linesize[0] * height;
  647. switch(pix_fmt) {
  648. case PIX_FMT_YUV420P:
  649. case PIX_FMT_YUV422P:
  650. case PIX_FMT_YUV444P:
  651. case PIX_FMT_YUV410P:
  652. case PIX_FMT_YUV411P:
  653. case PIX_FMT_YUV440P:
  654. case PIX_FMT_YUVJ420P:
  655. case PIX_FMT_YUVJ422P:
  656. case PIX_FMT_YUVJ444P:
  657. case PIX_FMT_YUVJ440P:
  658. h2 = (height + (1 << pinfo->y_chroma_shift) - 1) >> pinfo->y_chroma_shift;
  659. size2 = picture->linesize[1] * h2;
  660. picture->data[0] = ptr;
  661. picture->data[1] = picture->data[0] + size;
  662. picture->data[2] = picture->data[1] + size2;
  663. picture->data[3] = NULL;
  664. return size + 2 * size2;
  665. case PIX_FMT_YUVA420P:
  666. h2 = (height + (1 << pinfo->y_chroma_shift) - 1) >> pinfo->y_chroma_shift;
  667. size2 = picture->linesize[1] * h2;
  668. picture->data[0] = ptr;
  669. picture->data[1] = picture->data[0] + size;
  670. picture->data[2] = picture->data[1] + size2;
  671. picture->data[3] = picture->data[1] + size2 + size2;
  672. return 2 * size + 2 * size2;
  673. case PIX_FMT_NV12:
  674. case PIX_FMT_NV21:
  675. h2 = (height + (1 << pinfo->y_chroma_shift) - 1) >> pinfo->y_chroma_shift;
  676. size2 = picture->linesize[1] * h2 * 2;
  677. picture->data[0] = ptr;
  678. picture->data[1] = picture->data[0] + size;
  679. picture->data[2] = NULL;
  680. picture->data[3] = NULL;
  681. return size + 2 * size2;
  682. case PIX_FMT_RGB24:
  683. case PIX_FMT_BGR24:
  684. case PIX_FMT_RGB32:
  685. case PIX_FMT_BGR32:
  686. case PIX_FMT_RGB32_1:
  687. case PIX_FMT_BGR32_1:
  688. case PIX_FMT_RGB48BE:
  689. case PIX_FMT_RGB48LE:
  690. case PIX_FMT_GRAY16BE:
  691. case PIX_FMT_GRAY16LE:
  692. case PIX_FMT_BGR555:
  693. case PIX_FMT_BGR565:
  694. case PIX_FMT_RGB555:
  695. case PIX_FMT_RGB565:
  696. case PIX_FMT_YUYV422:
  697. case PIX_FMT_UYVY422:
  698. case PIX_FMT_UYYVYY411:
  699. case PIX_FMT_RGB4:
  700. case PIX_FMT_BGR4:
  701. case PIX_FMT_MONOWHITE:
  702. case PIX_FMT_MONOBLACK:
  703. picture->data[0] = ptr;
  704. picture->data[1] = NULL;
  705. picture->data[2] = NULL;
  706. picture->data[3] = NULL;
  707. return size;
  708. case PIX_FMT_PAL8:
  709. case PIX_FMT_RGB8:
  710. case PIX_FMT_BGR8:
  711. case PIX_FMT_RGB4_BYTE:
  712. case PIX_FMT_BGR4_BYTE:
  713. case PIX_FMT_GRAY8:
  714. size2 = (size + 3) & ~3;
  715. picture->data[0] = ptr;
  716. picture->data[1] = ptr + size2; /* palette is stored here as 256 32 bit words */
  717. picture->data[2] = NULL;
  718. picture->data[3] = NULL;
  719. return size2 + 256 * 4;
  720. default:
  721. picture->data[0] = NULL;
  722. picture->data[1] = NULL;
  723. picture->data[2] = NULL;
  724. picture->data[3] = NULL;
  725. return -1;
  726. }
  727. }
  728. int avpicture_fill(AVPicture *picture, uint8_t *ptr,
  729. enum PixelFormat pix_fmt, int width, int height)
  730. {
  731. if(avcodec_check_dimensions(NULL, width, height))
  732. return -1;
  733. if (ff_fill_linesize(picture, pix_fmt, width))
  734. return -1;
  735. return ff_fill_pointer(picture, ptr, pix_fmt, height);
  736. }
  737. int avpicture_layout(const AVPicture* src, enum PixelFormat pix_fmt, int width, int height,
  738. unsigned char *dest, int dest_size)
  739. {
  740. const PixFmtInfo* pf = &pix_fmt_info[pix_fmt];
  741. int i, j, w, ow, h, oh, data_planes;
  742. const unsigned char* s;
  743. int size = avpicture_get_size(pix_fmt, width, height);
  744. if (size > dest_size || size < 0)
  745. return -1;
  746. if (pf->pixel_type == FF_PIXEL_PACKED || pf->pixel_type == FF_PIXEL_PALETTE) {
  747. if (pix_fmt == PIX_FMT_YUYV422 ||
  748. pix_fmt == PIX_FMT_UYVY422 ||
  749. pix_fmt == PIX_FMT_BGR565 ||
  750. pix_fmt == PIX_FMT_BGR555 ||
  751. pix_fmt == PIX_FMT_RGB565 ||
  752. pix_fmt == PIX_FMT_RGB555)
  753. w = width * 2;
  754. else if (pix_fmt == PIX_FMT_UYYVYY411)
  755. w = width + width/2;
  756. else if (pix_fmt == PIX_FMT_PAL8)
  757. w = width;
  758. else
  759. w = width * (pf->depth * pf->nb_channels / 8);
  760. data_planes = 1;
  761. h = height;
  762. } else {
  763. data_planes = pf->nb_channels;
  764. w = (width*pf->depth + 7)/8;
  765. h = height;
  766. }
  767. ow = w;
  768. oh = h;
  769. for (i=0; i<data_planes; i++) {
  770. if (i == 1) {
  771. w = width >> pf->x_chroma_shift;
  772. h = height >> pf->y_chroma_shift;
  773. } else if (i == 3) {
  774. w = ow;
  775. h = oh;
  776. }
  777. s = src->data[i];
  778. for(j=0; j<h; j++) {
  779. memcpy(dest, s, w);
  780. dest += w;
  781. s += src->linesize[i];
  782. }
  783. }
  784. if (pf->pixel_type == FF_PIXEL_PALETTE)
  785. memcpy((unsigned char *)(((size_t)dest + 3) & ~3), src->data[1], 256 * 4);
  786. return size;
  787. }
  788. int avpicture_get_size(enum PixelFormat pix_fmt, int width, int height)
  789. {
  790. AVPicture dummy_pict;
  791. if(avcodec_check_dimensions(NULL, width, height))
  792. return -1;
  793. switch (pix_fmt) {
  794. case PIX_FMT_RGB8:
  795. case PIX_FMT_BGR8:
  796. case PIX_FMT_RGB4_BYTE:
  797. case PIX_FMT_BGR4_BYTE:
  798. case PIX_FMT_GRAY8:
  799. // do not include palette for these pseudo-paletted formats
  800. return width * height;
  801. }
  802. return avpicture_fill(&dummy_pict, NULL, pix_fmt, width, height);
  803. }
  804. int avcodec_get_pix_fmt_loss(enum PixelFormat dst_pix_fmt, enum PixelFormat src_pix_fmt,
  805. int has_alpha)
  806. {
  807. const PixFmtInfo *pf, *ps;
  808. int loss;
  809. ps = &pix_fmt_info[src_pix_fmt];
  810. pf = &pix_fmt_info[dst_pix_fmt];
  811. /* compute loss */
  812. loss = 0;
  813. pf = &pix_fmt_info[dst_pix_fmt];
  814. if (pf->depth < ps->depth ||
  815. (dst_pix_fmt == PIX_FMT_RGB555 && src_pix_fmt == PIX_FMT_RGB565))
  816. loss |= FF_LOSS_DEPTH;
  817. if (pf->x_chroma_shift > ps->x_chroma_shift ||
  818. pf->y_chroma_shift > ps->y_chroma_shift)
  819. loss |= FF_LOSS_RESOLUTION;
  820. switch(pf->color_type) {
  821. case FF_COLOR_RGB:
  822. if (ps->color_type != FF_COLOR_RGB &&
  823. ps->color_type != FF_COLOR_GRAY)
  824. loss |= FF_LOSS_COLORSPACE;
  825. break;
  826. case FF_COLOR_GRAY:
  827. if (ps->color_type != FF_COLOR_GRAY)
  828. loss |= FF_LOSS_COLORSPACE;
  829. break;
  830. case FF_COLOR_YUV:
  831. if (ps->color_type != FF_COLOR_YUV)
  832. loss |= FF_LOSS_COLORSPACE;
  833. break;
  834. case FF_COLOR_YUV_JPEG:
  835. if (ps->color_type != FF_COLOR_YUV_JPEG &&
  836. ps->color_type != FF_COLOR_YUV &&
  837. ps->color_type != FF_COLOR_GRAY)
  838. loss |= FF_LOSS_COLORSPACE;
  839. break;
  840. default:
  841. /* fail safe test */
  842. if (ps->color_type != pf->color_type)
  843. loss |= FF_LOSS_COLORSPACE;
  844. break;
  845. }
  846. if (pf->color_type == FF_COLOR_GRAY &&
  847. ps->color_type != FF_COLOR_GRAY)
  848. loss |= FF_LOSS_CHROMA;
  849. if (!pf->is_alpha && (ps->is_alpha && has_alpha))
  850. loss |= FF_LOSS_ALPHA;
  851. if (pf->pixel_type == FF_PIXEL_PALETTE &&
  852. (ps->pixel_type != FF_PIXEL_PALETTE && ps->color_type != FF_COLOR_GRAY))
  853. loss |= FF_LOSS_COLORQUANT;
  854. return loss;
  855. }
  856. static int avg_bits_per_pixel(enum PixelFormat pix_fmt)
  857. {
  858. int bits;
  859. const PixFmtInfo *pf;
  860. pf = &pix_fmt_info[pix_fmt];
  861. switch(pf->pixel_type) {
  862. case FF_PIXEL_PACKED:
  863. switch(pix_fmt) {
  864. case PIX_FMT_YUYV422:
  865. case PIX_FMT_UYVY422:
  866. case PIX_FMT_RGB565:
  867. case PIX_FMT_RGB555:
  868. case PIX_FMT_BGR565:
  869. case PIX_FMT_BGR555:
  870. bits = 16;
  871. break;
  872. case PIX_FMT_UYYVYY411:
  873. bits = 12;
  874. break;
  875. default:
  876. bits = pf->depth * pf->nb_channels;
  877. break;
  878. }
  879. break;
  880. case FF_PIXEL_PLANAR:
  881. if (pf->x_chroma_shift == 0 && pf->y_chroma_shift == 0) {
  882. bits = pf->depth * pf->nb_channels;
  883. } else {
  884. bits = pf->depth + ((2 * pf->depth) >>
  885. (pf->x_chroma_shift + pf->y_chroma_shift));
  886. }
  887. break;
  888. case FF_PIXEL_PALETTE:
  889. bits = 8;
  890. break;
  891. default:
  892. bits = -1;
  893. break;
  894. }
  895. return bits;
  896. }
  897. static enum PixelFormat avcodec_find_best_pix_fmt1(int64_t pix_fmt_mask,
  898. enum PixelFormat src_pix_fmt,
  899. int has_alpha,
  900. int loss_mask)
  901. {
  902. int dist, i, loss, min_dist;
  903. enum PixelFormat dst_pix_fmt;
  904. /* find exact color match with smallest size */
  905. dst_pix_fmt = -1;
  906. min_dist = 0x7fffffff;
  907. for(i = 0;i < PIX_FMT_NB; i++) {
  908. if (pix_fmt_mask & (1ULL << i)) {
  909. loss = avcodec_get_pix_fmt_loss(i, src_pix_fmt, has_alpha) & loss_mask;
  910. if (loss == 0) {
  911. dist = avg_bits_per_pixel(i);
  912. if (dist < min_dist) {
  913. min_dist = dist;
  914. dst_pix_fmt = i;
  915. }
  916. }
  917. }
  918. }
  919. return dst_pix_fmt;
  920. }
  921. enum PixelFormat avcodec_find_best_pix_fmt(int64_t pix_fmt_mask, enum PixelFormat src_pix_fmt,
  922. int has_alpha, int *loss_ptr)
  923. {
  924. enum PixelFormat dst_pix_fmt;
  925. int loss_mask, i;
  926. static const int loss_mask_order[] = {
  927. ~0, /* no loss first */
  928. ~FF_LOSS_ALPHA,
  929. ~FF_LOSS_RESOLUTION,
  930. ~(FF_LOSS_COLORSPACE | FF_LOSS_RESOLUTION),
  931. ~FF_LOSS_COLORQUANT,
  932. ~FF_LOSS_DEPTH,
  933. 0,
  934. };
  935. /* try with successive loss */
  936. i = 0;
  937. for(;;) {
  938. loss_mask = loss_mask_order[i++];
  939. dst_pix_fmt = avcodec_find_best_pix_fmt1(pix_fmt_mask, src_pix_fmt,
  940. has_alpha, loss_mask);
  941. if (dst_pix_fmt >= 0)
  942. goto found;
  943. if (loss_mask == 0)
  944. break;
  945. }
  946. return -1;
  947. found:
  948. if (loss_ptr)
  949. *loss_ptr = avcodec_get_pix_fmt_loss(dst_pix_fmt, src_pix_fmt, has_alpha);
  950. return dst_pix_fmt;
  951. }
  952. void ff_img_copy_plane(uint8_t *dst, int dst_wrap,
  953. const uint8_t *src, int src_wrap,
  954. int width, int height)
  955. {
  956. if((!dst) || (!src))
  957. return;
  958. for(;height > 0; height--) {
  959. memcpy(dst, src, width);
  960. dst += dst_wrap;
  961. src += src_wrap;
  962. }
  963. }
  964. int ff_get_plane_bytewidth(enum PixelFormat pix_fmt, int width, int plane)
  965. {
  966. int bits;
  967. const PixFmtInfo *pf = &pix_fmt_info[pix_fmt];
  968. pf = &pix_fmt_info[pix_fmt];
  969. switch(pf->pixel_type) {
  970. case FF_PIXEL_PACKED:
  971. switch(pix_fmt) {
  972. case PIX_FMT_YUYV422:
  973. case PIX_FMT_UYVY422:
  974. case PIX_FMT_RGB565:
  975. case PIX_FMT_RGB555:
  976. case PIX_FMT_BGR565:
  977. case PIX_FMT_BGR555:
  978. bits = 16;
  979. break;
  980. case PIX_FMT_UYYVYY411:
  981. bits = 12;
  982. break;
  983. default:
  984. bits = pf->depth * pf->nb_channels;
  985. break;
  986. }
  987. return (width * bits + 7) >> 3;
  988. break;
  989. case FF_PIXEL_PLANAR:
  990. if (plane == 1 || plane == 2)
  991. width= -((-width)>>pf->x_chroma_shift);
  992. return (width * pf->depth + 7) >> 3;
  993. break;
  994. case FF_PIXEL_PALETTE:
  995. if (plane == 0)
  996. return width;
  997. break;
  998. }
  999. return -1;
  1000. }
  1001. void av_picture_copy(AVPicture *dst, const AVPicture *src,
  1002. enum PixelFormat pix_fmt, int width, int height)
  1003. {
  1004. int i;
  1005. const PixFmtInfo *pf = &pix_fmt_info[pix_fmt];
  1006. pf = &pix_fmt_info[pix_fmt];
  1007. switch(pf->pixel_type) {
  1008. case FF_PIXEL_PACKED:
  1009. case FF_PIXEL_PLANAR:
  1010. for(i = 0; i < pf->nb_channels; i++) {
  1011. int h;
  1012. int bwidth = ff_get_plane_bytewidth(pix_fmt, width, i);
  1013. h = height;
  1014. if (i == 1 || i == 2) {
  1015. h= -((-height)>>pf->y_chroma_shift);
  1016. }
  1017. ff_img_copy_plane(dst->data[i], dst->linesize[i],
  1018. src->data[i], src->linesize[i],
  1019. bwidth, h);
  1020. }
  1021. break;
  1022. case FF_PIXEL_PALETTE:
  1023. ff_img_copy_plane(dst->data[0], dst->linesize[0],
  1024. src->data[0], src->linesize[0],
  1025. width, height);
  1026. /* copy the palette */
  1027. ff_img_copy_plane(dst->data[1], dst->linesize[1],
  1028. src->data[1], src->linesize[1],
  1029. 4, 256);
  1030. break;
  1031. }
  1032. }
  1033. /* 2x2 -> 1x1 */
  1034. void ff_shrink22(uint8_t *dst, int dst_wrap,
  1035. const uint8_t *src, int src_wrap,
  1036. int width, int height)
  1037. {
  1038. int w;
  1039. const uint8_t *s1, *s2;
  1040. uint8_t *d;
  1041. for(;height > 0; height--) {
  1042. s1 = src;
  1043. s2 = s1 + src_wrap;
  1044. d = dst;
  1045. for(w = width;w >= 4; w-=4) {
  1046. d[0] = (s1[0] + s1[1] + s2[0] + s2[1] + 2) >> 2;
  1047. d[1] = (s1[2] + s1[3] + s2[2] + s2[3] + 2) >> 2;
  1048. d[2] = (s1[4] + s1[5] + s2[4] + s2[5] + 2) >> 2;
  1049. d[3] = (s1[6] + s1[7] + s2[6] + s2[7] + 2) >> 2;
  1050. s1 += 8;
  1051. s2 += 8;
  1052. d += 4;
  1053. }
  1054. for(;w > 0; w--) {
  1055. d[0] = (s1[0] + s1[1] + s2[0] + s2[1] + 2) >> 2;
  1056. s1 += 2;
  1057. s2 += 2;
  1058. d++;
  1059. }
  1060. src += 2 * src_wrap;
  1061. dst += dst_wrap;
  1062. }
  1063. }
  1064. /* 4x4 -> 1x1 */
  1065. void ff_shrink44(uint8_t *dst, int dst_wrap,
  1066. const uint8_t *src, int src_wrap,
  1067. int width, int height)
  1068. {
  1069. int w;
  1070. const uint8_t *s1, *s2, *s3, *s4;
  1071. uint8_t *d;
  1072. for(;height > 0; height--) {
  1073. s1 = src;
  1074. s2 = s1 + src_wrap;
  1075. s3 = s2 + src_wrap;
  1076. s4 = s3 + src_wrap;
  1077. d = dst;
  1078. for(w = width;w > 0; w--) {
  1079. d[0] = (s1[0] + s1[1] + s1[2] + s1[3] +
  1080. s2[0] + s2[1] + s2[2] + s2[3] +
  1081. s3[0] + s3[1] + s3[2] + s3[3] +
  1082. s4[0] + s4[1] + s4[2] + s4[3] + 8) >> 4;
  1083. s1 += 4;
  1084. s2 += 4;
  1085. s3 += 4;
  1086. s4 += 4;
  1087. d++;
  1088. }
  1089. src += 4 * src_wrap;
  1090. dst += dst_wrap;
  1091. }
  1092. }
  1093. /* 8x8 -> 1x1 */
  1094. void ff_shrink88(uint8_t *dst, int dst_wrap,
  1095. const uint8_t *src, int src_wrap,
  1096. int width, int height)
  1097. {
  1098. int w, i;
  1099. for(;height > 0; height--) {
  1100. for(w = width;w > 0; w--) {
  1101. int tmp=0;
  1102. for(i=0; i<8; i++){
  1103. tmp += src[0] + src[1] + src[2] + src[3] + src[4] + src[5] + src[6] + src[7];
  1104. src += src_wrap;
  1105. }
  1106. *(dst++) = (tmp + 32)>>6;
  1107. src += 8 - 8*src_wrap;
  1108. }
  1109. src += 8*src_wrap - 8*width;
  1110. dst += dst_wrap - width;
  1111. }
  1112. }
  1113. int avpicture_alloc(AVPicture *picture,
  1114. enum PixelFormat pix_fmt, int width, int height)
  1115. {
  1116. int size;
  1117. void *ptr;
  1118. size = avpicture_fill(picture, NULL, pix_fmt, width, height);
  1119. if(size<0)
  1120. goto fail;
  1121. ptr = av_malloc(size);
  1122. if (!ptr)
  1123. goto fail;
  1124. avpicture_fill(picture, ptr, pix_fmt, width, height);
  1125. if(picture->data[1] && !picture->data[2])
  1126. ff_set_systematic_pal((uint32_t*)picture->data[1], pix_fmt);
  1127. return 0;
  1128. fail:
  1129. memset(picture, 0, sizeof(AVPicture));
  1130. return -1;
  1131. }
  1132. void avpicture_free(AVPicture *picture)
  1133. {
  1134. av_free(picture->data[0]);
  1135. }
  1136. /* return true if yuv planar */
  1137. static inline int is_yuv_planar(const PixFmtInfo *ps)
  1138. {
  1139. return (ps->color_type == FF_COLOR_YUV ||
  1140. ps->color_type == FF_COLOR_YUV_JPEG) &&
  1141. ps->pixel_type == FF_PIXEL_PLANAR;
  1142. }
  1143. int av_picture_crop(AVPicture *dst, const AVPicture *src,
  1144. int pix_fmt, int top_band, int left_band)
  1145. {
  1146. int y_shift;
  1147. int x_shift;
  1148. if (pix_fmt < 0 || pix_fmt >= PIX_FMT_NB || !is_yuv_planar(&pix_fmt_info[pix_fmt]))
  1149. return -1;
  1150. y_shift = pix_fmt_info[pix_fmt].y_chroma_shift;
  1151. x_shift = pix_fmt_info[pix_fmt].x_chroma_shift;
  1152. dst->data[0] = src->data[0] + (top_band * src->linesize[0]) + left_band;
  1153. dst->data[1] = src->data[1] + ((top_band >> y_shift) * src->linesize[1]) + (left_band >> x_shift);
  1154. dst->data[2] = src->data[2] + ((top_band >> y_shift) * src->linesize[2]) + (left_band >> x_shift);
  1155. dst->linesize[0] = src->linesize[0];
  1156. dst->linesize[1] = src->linesize[1];
  1157. dst->linesize[2] = src->linesize[2];
  1158. return 0;
  1159. }
  1160. int av_picture_pad(AVPicture *dst, const AVPicture *src, int height, int width,
  1161. enum PixelFormat pix_fmt, int padtop, int padbottom, int padleft, int padright,
  1162. int *color)
  1163. {
  1164. uint8_t *optr;
  1165. int y_shift;
  1166. int x_shift;
  1167. int yheight;
  1168. int i, y;
  1169. if (pix_fmt < 0 || pix_fmt >= PIX_FMT_NB ||
  1170. !is_yuv_planar(&pix_fmt_info[pix_fmt])) return -1;
  1171. for (i = 0; i < 3; i++) {
  1172. x_shift = i ? pix_fmt_info[pix_fmt].x_chroma_shift : 0;
  1173. y_shift = i ? pix_fmt_info[pix_fmt].y_chroma_shift : 0;
  1174. if (padtop || padleft) {
  1175. memset(dst->data[i], color[i],
  1176. dst->linesize[i] * (padtop >> y_shift) + (padleft >> x_shift));
  1177. }
  1178. if (padleft || padright) {
  1179. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  1180. (dst->linesize[i] - (padright >> x_shift));
  1181. yheight = (height - 1 - (padtop + padbottom)) >> y_shift;
  1182. for (y = 0; y < yheight; y++) {
  1183. memset(optr, color[i], (padleft + padright) >> x_shift);
  1184. optr += dst->linesize[i];
  1185. }
  1186. }
  1187. if (src) { /* first line */
  1188. uint8_t *iptr = src->data[i];
  1189. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  1190. (padleft >> x_shift);
  1191. memcpy(optr, iptr, (width - padleft - padright) >> x_shift);
  1192. iptr += src->linesize[i];
  1193. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  1194. (dst->linesize[i] - (padright >> x_shift));
  1195. yheight = (height - 1 - (padtop + padbottom)) >> y_shift;
  1196. for (y = 0; y < yheight; y++) {
  1197. memset(optr, color[i], (padleft + padright) >> x_shift);
  1198. memcpy(optr + ((padleft + padright) >> x_shift), iptr,
  1199. (width - padleft - padright) >> x_shift);
  1200. iptr += src->linesize[i];
  1201. optr += dst->linesize[i];
  1202. }
  1203. }
  1204. if (padbottom || padright) {
  1205. optr = dst->data[i] + dst->linesize[i] *
  1206. ((height - padbottom) >> y_shift) - (padright >> x_shift);
  1207. memset(optr, color[i],dst->linesize[i] *
  1208. (padbottom >> y_shift) + (padright >> x_shift));
  1209. }
  1210. }
  1211. return 0;
  1212. }
  1213. /* NOTE: we scan all the pixels to have an exact information */
  1214. static int get_alpha_info_pal8(const AVPicture *src, int width, int height)
  1215. {
  1216. const unsigned char *p;
  1217. int src_wrap, ret, x, y;
  1218. unsigned int a;
  1219. uint32_t *palette = (uint32_t *)src->data[1];
  1220. p = src->data[0];
  1221. src_wrap = src->linesize[0] - width;
  1222. ret = 0;
  1223. for(y=0;y<height;y++) {
  1224. for(x=0;x<width;x++) {
  1225. a = palette[p[0]] >> 24;
  1226. if (a == 0x00) {
  1227. ret |= FF_ALPHA_TRANSP;
  1228. } else if (a != 0xff) {
  1229. ret |= FF_ALPHA_SEMI_TRANSP;
  1230. }
  1231. p++;
  1232. }
  1233. p += src_wrap;
  1234. }
  1235. return ret;
  1236. }
  1237. int img_get_alpha_info(const AVPicture *src,
  1238. enum PixelFormat pix_fmt, int width, int height)
  1239. {
  1240. const PixFmtInfo *pf = &pix_fmt_info[pix_fmt];
  1241. int ret;
  1242. pf = &pix_fmt_info[pix_fmt];
  1243. /* no alpha can be represented in format */
  1244. if (!pf->is_alpha)
  1245. return 0;
  1246. switch(pix_fmt) {
  1247. case PIX_FMT_PAL8:
  1248. ret = get_alpha_info_pal8(src, width, height);
  1249. break;
  1250. default:
  1251. /* we do not know, so everything is indicated */
  1252. ret = FF_ALPHA_TRANSP | FF_ALPHA_SEMI_TRANSP;
  1253. break;
  1254. }
  1255. return ret;
  1256. }
  1257. #if HAVE_MMX
  1258. #define DEINT_INPLACE_LINE_LUM \
  1259. movd_m2r(lum_m4[0],mm0);\
  1260. movd_m2r(lum_m3[0],mm1);\
  1261. movd_m2r(lum_m2[0],mm2);\
  1262. movd_m2r(lum_m1[0],mm3);\
  1263. movd_m2r(lum[0],mm4);\
  1264. punpcklbw_r2r(mm7,mm0);\
  1265. movd_r2m(mm2,lum_m4[0]);\
  1266. punpcklbw_r2r(mm7,mm1);\
  1267. punpcklbw_r2r(mm7,mm2);\
  1268. punpcklbw_r2r(mm7,mm3);\
  1269. punpcklbw_r2r(mm7,mm4);\
  1270. paddw_r2r(mm3,mm1);\
  1271. psllw_i2r(1,mm2);\
  1272. paddw_r2r(mm4,mm0);\
  1273. psllw_i2r(2,mm1);\
  1274. paddw_r2r(mm6,mm2);\
  1275. paddw_r2r(mm2,mm1);\
  1276. psubusw_r2r(mm0,mm1);\
  1277. psrlw_i2r(3,mm1);\
  1278. packuswb_r2r(mm7,mm1);\
  1279. movd_r2m(mm1,lum_m2[0]);
  1280. #define DEINT_LINE_LUM \
  1281. movd_m2r(lum_m4[0],mm0);\
  1282. movd_m2r(lum_m3[0],mm1);\
  1283. movd_m2r(lum_m2[0],mm2);\
  1284. movd_m2r(lum_m1[0],mm3);\
  1285. movd_m2r(lum[0],mm4);\
  1286. punpcklbw_r2r(mm7,mm0);\
  1287. punpcklbw_r2r(mm7,mm1);\
  1288. punpcklbw_r2r(mm7,mm2);\
  1289. punpcklbw_r2r(mm7,mm3);\
  1290. punpcklbw_r2r(mm7,mm4);\
  1291. paddw_r2r(mm3,mm1);\
  1292. psllw_i2r(1,mm2);\
  1293. paddw_r2r(mm4,mm0);\
  1294. psllw_i2r(2,mm1);\
  1295. paddw_r2r(mm6,mm2);\
  1296. paddw_r2r(mm2,mm1);\
  1297. psubusw_r2r(mm0,mm1);\
  1298. psrlw_i2r(3,mm1);\
  1299. packuswb_r2r(mm7,mm1);\
  1300. movd_r2m(mm1,dst[0]);
  1301. #endif
  1302. /* filter parameters: [-1 4 2 4 -1] // 8 */
  1303. static void deinterlace_line(uint8_t *dst,
  1304. const uint8_t *lum_m4, const uint8_t *lum_m3,
  1305. const uint8_t *lum_m2, const uint8_t *lum_m1,
  1306. const uint8_t *lum,
  1307. int size)
  1308. {
  1309. #if !HAVE_MMX
  1310. uint8_t *cm = ff_cropTbl + MAX_NEG_CROP;
  1311. int sum;
  1312. for(;size > 0;size--) {
  1313. sum = -lum_m4[0];
  1314. sum += lum_m3[0] << 2;
  1315. sum += lum_m2[0] << 1;
  1316. sum += lum_m1[0] << 2;
  1317. sum += -lum[0];
  1318. dst[0] = cm[(sum + 4) >> 3];
  1319. lum_m4++;
  1320. lum_m3++;
  1321. lum_m2++;
  1322. lum_m1++;
  1323. lum++;
  1324. dst++;
  1325. }
  1326. #else
  1327. {
  1328. pxor_r2r(mm7,mm7);
  1329. movq_m2r(ff_pw_4,mm6);
  1330. }
  1331. for (;size > 3; size-=4) {
  1332. DEINT_LINE_LUM
  1333. lum_m4+=4;
  1334. lum_m3+=4;
  1335. lum_m2+=4;
  1336. lum_m1+=4;
  1337. lum+=4;
  1338. dst+=4;
  1339. }
  1340. #endif
  1341. }
  1342. static void deinterlace_line_inplace(uint8_t *lum_m4, uint8_t *lum_m3, uint8_t *lum_m2, uint8_t *lum_m1, uint8_t *lum,
  1343. int size)
  1344. {
  1345. #if !HAVE_MMX
  1346. uint8_t *cm = ff_cropTbl + MAX_NEG_CROP;
  1347. int sum;
  1348. for(;size > 0;size--) {
  1349. sum = -lum_m4[0];
  1350. sum += lum_m3[0] << 2;
  1351. sum += lum_m2[0] << 1;
  1352. lum_m4[0]=lum_m2[0];
  1353. sum += lum_m1[0] << 2;
  1354. sum += -lum[0];
  1355. lum_m2[0] = cm[(sum + 4) >> 3];
  1356. lum_m4++;
  1357. lum_m3++;
  1358. lum_m2++;
  1359. lum_m1++;
  1360. lum++;
  1361. }
  1362. #else
  1363. {
  1364. pxor_r2r(mm7,mm7);
  1365. movq_m2r(ff_pw_4,mm6);
  1366. }
  1367. for (;size > 3; size-=4) {
  1368. DEINT_INPLACE_LINE_LUM
  1369. lum_m4+=4;
  1370. lum_m3+=4;
  1371. lum_m2+=4;
  1372. lum_m1+=4;
  1373. lum+=4;
  1374. }
  1375. #endif
  1376. }
  1377. /* deinterlacing : 2 temporal taps, 3 spatial taps linear filter. The
  1378. top field is copied as is, but the bottom field is deinterlaced
  1379. against the top field. */
  1380. static void deinterlace_bottom_field(uint8_t *dst, int dst_wrap,
  1381. const uint8_t *src1, int src_wrap,
  1382. int width, int height)
  1383. {
  1384. const uint8_t *src_m2, *src_m1, *src_0, *src_p1, *src_p2;
  1385. int y;
  1386. src_m2 = src1;
  1387. src_m1 = src1;
  1388. src_0=&src_m1[src_wrap];
  1389. src_p1=&src_0[src_wrap];
  1390. src_p2=&src_p1[src_wrap];
  1391. for(y=0;y<(height-2);y+=2) {
  1392. memcpy(dst,src_m1,width);
  1393. dst += dst_wrap;
  1394. deinterlace_line(dst,src_m2,src_m1,src_0,src_p1,src_p2,width);
  1395. src_m2 = src_0;
  1396. src_m1 = src_p1;
  1397. src_0 = src_p2;
  1398. src_p1 += 2*src_wrap;
  1399. src_p2 += 2*src_wrap;
  1400. dst += dst_wrap;
  1401. }
  1402. memcpy(dst,src_m1,width);
  1403. dst += dst_wrap;
  1404. /* do last line */
  1405. deinterlace_line(dst,src_m2,src_m1,src_0,src_0,src_0,width);
  1406. }
  1407. static void deinterlace_bottom_field_inplace(uint8_t *src1, int src_wrap,
  1408. int width, int height)
  1409. {
  1410. uint8_t *src_m1, *src_0, *src_p1, *src_p2;
  1411. int y;
  1412. uint8_t *buf;
  1413. buf = (uint8_t*)av_malloc(width);
  1414. src_m1 = src1;
  1415. memcpy(buf,src_m1,width);
  1416. src_0=&src_m1[src_wrap];
  1417. src_p1=&src_0[src_wrap];
  1418. src_p2=&src_p1[src_wrap];
  1419. for(y=0;y<(height-2);y+=2) {
  1420. deinterlace_line_inplace(buf,src_m1,src_0,src_p1,src_p2,width);
  1421. src_m1 = src_p1;
  1422. src_0 = src_p2;
  1423. src_p1 += 2*src_wrap;
  1424. src_p2 += 2*src_wrap;
  1425. }
  1426. /* do last line */
  1427. deinterlace_line_inplace(buf,src_m1,src_0,src_0,src_0,width);
  1428. av_free(buf);
  1429. }
  1430. int avpicture_deinterlace(AVPicture *dst, const AVPicture *src,
  1431. enum PixelFormat pix_fmt, int width, int height)
  1432. {
  1433. int i;
  1434. if (pix_fmt != PIX_FMT_YUV420P &&
  1435. pix_fmt != PIX_FMT_YUV422P &&
  1436. pix_fmt != PIX_FMT_YUV444P &&
  1437. pix_fmt != PIX_FMT_YUV411P &&
  1438. pix_fmt != PIX_FMT_GRAY8)
  1439. return -1;
  1440. if ((width & 3) != 0 || (height & 3) != 0)
  1441. return -1;
  1442. for(i=0;i<3;i++) {
  1443. if (i == 1) {
  1444. switch(pix_fmt) {
  1445. case PIX_FMT_YUV420P:
  1446. width >>= 1;
  1447. height >>= 1;
  1448. break;
  1449. case PIX_FMT_YUV422P:
  1450. width >>= 1;
  1451. break;
  1452. case PIX_FMT_YUV411P:
  1453. width >>= 2;
  1454. break;
  1455. default:
  1456. break;
  1457. }
  1458. if (pix_fmt == PIX_FMT_GRAY8) {
  1459. break;
  1460. }
  1461. }
  1462. if (src == dst) {
  1463. deinterlace_bottom_field_inplace(dst->data[i], dst->linesize[i],
  1464. width, height);
  1465. } else {
  1466. deinterlace_bottom_field(dst->data[i],dst->linesize[i],
  1467. src->data[i], src->linesize[i],
  1468. width, height);
  1469. }
  1470. }
  1471. emms_c();
  1472. return 0;
  1473. }