You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1549 lines
44KB

  1. /*
  2. * Misc image conversion routines
  3. * Copyright (c) 2001, 2002, 2003 Fabrice Bellard
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. /**
  22. * @file libavcodec/imgconvert.c
  23. * misc image conversion routines
  24. */
  25. /* TODO:
  26. * - write 'ffimg' program to test all the image related stuff
  27. * - move all api to slice based system
  28. * - integrate deinterlacing, postprocessing and scaling in the conversion process
  29. */
  30. #include "avcodec.h"
  31. #include "dsputil.h"
  32. #include "colorspace.h"
  33. #include "internal.h"
  34. #include "imgconvert.h"
  35. #include "libavutil/pixdesc.h"
  36. #if HAVE_MMX
  37. #include "x86/mmx.h"
  38. #include "x86/dsputil_mmx.h"
  39. #endif
  40. #define xglue(x, y) x ## y
  41. #define glue(x, y) xglue(x, y)
  42. #define FF_COLOR_RGB 0 /**< RGB color space */
  43. #define FF_COLOR_GRAY 1 /**< gray color space */
  44. #define FF_COLOR_YUV 2 /**< YUV color space. 16 <= Y <= 235, 16 <= U, V <= 240 */
  45. #define FF_COLOR_YUV_JPEG 3 /**< YUV color space. 0 <= Y <= 255, 0 <= U, V <= 255 */
  46. #define FF_PIXEL_PLANAR 0 /**< each channel has one component in AVPicture */
  47. #define FF_PIXEL_PACKED 1 /**< only one components containing all the channels */
  48. #define FF_PIXEL_PALETTE 2 /**< one components containing indexes for a palette */
  49. typedef struct PixFmtInfo {
  50. uint8_t nb_channels; /**< number of channels (including alpha) */
  51. uint8_t color_type; /**< color type (see FF_COLOR_xxx constants) */
  52. uint8_t pixel_type; /**< pixel storage type (see FF_PIXEL_xxx constants) */
  53. uint8_t is_alpha : 1; /**< true if alpha can be specified */
  54. uint8_t depth; /**< bit depth of the color components */
  55. } PixFmtInfo;
  56. /* this table gives more information about formats */
  57. static const PixFmtInfo pix_fmt_info[PIX_FMT_NB] = {
  58. /* YUV formats */
  59. [PIX_FMT_YUV420P] = {
  60. .nb_channels = 3,
  61. .color_type = FF_COLOR_YUV,
  62. .pixel_type = FF_PIXEL_PLANAR,
  63. .depth = 8,
  64. },
  65. [PIX_FMT_YUV422P] = {
  66. .nb_channels = 3,
  67. .color_type = FF_COLOR_YUV,
  68. .pixel_type = FF_PIXEL_PLANAR,
  69. .depth = 8,
  70. },
  71. [PIX_FMT_YUV444P] = {
  72. .nb_channels = 3,
  73. .color_type = FF_COLOR_YUV,
  74. .pixel_type = FF_PIXEL_PLANAR,
  75. .depth = 8,
  76. },
  77. [PIX_FMT_YUYV422] = {
  78. .nb_channels = 1,
  79. .color_type = FF_COLOR_YUV,
  80. .pixel_type = FF_PIXEL_PACKED,
  81. .depth = 8,
  82. },
  83. [PIX_FMT_UYVY422] = {
  84. .nb_channels = 1,
  85. .color_type = FF_COLOR_YUV,
  86. .pixel_type = FF_PIXEL_PACKED,
  87. .depth = 8,
  88. },
  89. [PIX_FMT_YUV410P] = {
  90. .nb_channels = 3,
  91. .color_type = FF_COLOR_YUV,
  92. .pixel_type = FF_PIXEL_PLANAR,
  93. .depth = 8,
  94. },
  95. [PIX_FMT_YUV411P] = {
  96. .nb_channels = 3,
  97. .color_type = FF_COLOR_YUV,
  98. .pixel_type = FF_PIXEL_PLANAR,
  99. .depth = 8,
  100. },
  101. [PIX_FMT_YUV440P] = {
  102. .nb_channels = 3,
  103. .color_type = FF_COLOR_YUV,
  104. .pixel_type = FF_PIXEL_PLANAR,
  105. .depth = 8,
  106. },
  107. [PIX_FMT_YUV420P16LE] = {
  108. .nb_channels = 3,
  109. .color_type = FF_COLOR_YUV,
  110. .pixel_type = FF_PIXEL_PLANAR,
  111. .depth = 16,
  112. },
  113. [PIX_FMT_YUV422P16LE] = {
  114. .nb_channels = 3,
  115. .color_type = FF_COLOR_YUV,
  116. .pixel_type = FF_PIXEL_PLANAR,
  117. .depth = 16,
  118. },
  119. [PIX_FMT_YUV444P16LE] = {
  120. .nb_channels = 3,
  121. .color_type = FF_COLOR_YUV,
  122. .pixel_type = FF_PIXEL_PLANAR,
  123. .depth = 16,
  124. },
  125. [PIX_FMT_YUV420P16BE] = {
  126. .nb_channels = 3,
  127. .color_type = FF_COLOR_YUV,
  128. .pixel_type = FF_PIXEL_PLANAR,
  129. .depth = 16,
  130. },
  131. [PIX_FMT_YUV422P16BE] = {
  132. .nb_channels = 3,
  133. .color_type = FF_COLOR_YUV,
  134. .pixel_type = FF_PIXEL_PLANAR,
  135. .depth = 16,
  136. },
  137. [PIX_FMT_YUV444P16BE] = {
  138. .nb_channels = 3,
  139. .color_type = FF_COLOR_YUV,
  140. .pixel_type = FF_PIXEL_PLANAR,
  141. .depth = 16,
  142. },
  143. /* YUV formats with alpha plane */
  144. [PIX_FMT_YUVA420P] = {
  145. .nb_channels = 4,
  146. .color_type = FF_COLOR_YUV,
  147. .pixel_type = FF_PIXEL_PLANAR,
  148. .depth = 8,
  149. },
  150. /* JPEG YUV */
  151. [PIX_FMT_YUVJ420P] = {
  152. .nb_channels = 3,
  153. .color_type = FF_COLOR_YUV_JPEG,
  154. .pixel_type = FF_PIXEL_PLANAR,
  155. .depth = 8,
  156. },
  157. [PIX_FMT_YUVJ422P] = {
  158. .nb_channels = 3,
  159. .color_type = FF_COLOR_YUV_JPEG,
  160. .pixel_type = FF_PIXEL_PLANAR,
  161. .depth = 8,
  162. },
  163. [PIX_FMT_YUVJ444P] = {
  164. .nb_channels = 3,
  165. .color_type = FF_COLOR_YUV_JPEG,
  166. .pixel_type = FF_PIXEL_PLANAR,
  167. .depth = 8,
  168. },
  169. [PIX_FMT_YUVJ440P] = {
  170. .nb_channels = 3,
  171. .color_type = FF_COLOR_YUV_JPEG,
  172. .pixel_type = FF_PIXEL_PLANAR,
  173. .depth = 8,
  174. },
  175. /* RGB formats */
  176. [PIX_FMT_RGB24] = {
  177. .nb_channels = 3,
  178. .color_type = FF_COLOR_RGB,
  179. .pixel_type = FF_PIXEL_PACKED,
  180. .depth = 8,
  181. },
  182. [PIX_FMT_BGR24] = {
  183. .nb_channels = 3,
  184. .color_type = FF_COLOR_RGB,
  185. .pixel_type = FF_PIXEL_PACKED,
  186. .depth = 8,
  187. },
  188. [PIX_FMT_ARGB] = {
  189. .nb_channels = 4, .is_alpha = 1,
  190. .color_type = FF_COLOR_RGB,
  191. .pixel_type = FF_PIXEL_PACKED,
  192. .depth = 8,
  193. },
  194. [PIX_FMT_RGB48BE] = {
  195. .nb_channels = 3,
  196. .color_type = FF_COLOR_RGB,
  197. .pixel_type = FF_PIXEL_PACKED,
  198. .depth = 16,
  199. },
  200. [PIX_FMT_RGB48LE] = {
  201. .nb_channels = 3,
  202. .color_type = FF_COLOR_RGB,
  203. .pixel_type = FF_PIXEL_PACKED,
  204. .depth = 16,
  205. },
  206. [PIX_FMT_RGB565BE] = {
  207. .nb_channels = 3,
  208. .color_type = FF_COLOR_RGB,
  209. .pixel_type = FF_PIXEL_PACKED,
  210. .depth = 5,
  211. },
  212. [PIX_FMT_RGB565LE] = {
  213. .nb_channels = 3,
  214. .color_type = FF_COLOR_RGB,
  215. .pixel_type = FF_PIXEL_PACKED,
  216. .depth = 5,
  217. },
  218. [PIX_FMT_RGB555BE] = {
  219. .nb_channels = 3,
  220. .color_type = FF_COLOR_RGB,
  221. .pixel_type = FF_PIXEL_PACKED,
  222. .depth = 5,
  223. },
  224. [PIX_FMT_RGB555LE] = {
  225. .nb_channels = 3,
  226. .color_type = FF_COLOR_RGB,
  227. .pixel_type = FF_PIXEL_PACKED,
  228. .depth = 5,
  229. },
  230. [PIX_FMT_RGB444BE] = {
  231. .nb_channels = 3,
  232. .color_type = FF_COLOR_RGB,
  233. .pixel_type = FF_PIXEL_PACKED,
  234. .depth = 4,
  235. },
  236. [PIX_FMT_RGB444LE] = {
  237. .nb_channels = 3,
  238. .color_type = FF_COLOR_RGB,
  239. .pixel_type = FF_PIXEL_PACKED,
  240. .depth = 4,
  241. },
  242. /* gray / mono formats */
  243. [PIX_FMT_GRAY16BE] = {
  244. .nb_channels = 1,
  245. .color_type = FF_COLOR_GRAY,
  246. .pixel_type = FF_PIXEL_PLANAR,
  247. .depth = 16,
  248. },
  249. [PIX_FMT_GRAY16LE] = {
  250. .nb_channels = 1,
  251. .color_type = FF_COLOR_GRAY,
  252. .pixel_type = FF_PIXEL_PLANAR,
  253. .depth = 16,
  254. },
  255. [PIX_FMT_GRAY8] = {
  256. .nb_channels = 1,
  257. .color_type = FF_COLOR_GRAY,
  258. .pixel_type = FF_PIXEL_PLANAR,
  259. .depth = 8,
  260. },
  261. [PIX_FMT_MONOWHITE] = {
  262. .nb_channels = 1,
  263. .color_type = FF_COLOR_GRAY,
  264. .pixel_type = FF_PIXEL_PLANAR,
  265. .depth = 1,
  266. },
  267. [PIX_FMT_MONOBLACK] = {
  268. .nb_channels = 1,
  269. .color_type = FF_COLOR_GRAY,
  270. .pixel_type = FF_PIXEL_PLANAR,
  271. .depth = 1,
  272. },
  273. /* paletted formats */
  274. [PIX_FMT_PAL8] = {
  275. .nb_channels = 4, .is_alpha = 1,
  276. .color_type = FF_COLOR_RGB,
  277. .pixel_type = FF_PIXEL_PALETTE,
  278. .depth = 8,
  279. },
  280. [PIX_FMT_UYYVYY411] = {
  281. .nb_channels = 1,
  282. .color_type = FF_COLOR_YUV,
  283. .pixel_type = FF_PIXEL_PACKED,
  284. .depth = 8,
  285. },
  286. [PIX_FMT_ABGR] = {
  287. .nb_channels = 4, .is_alpha = 1,
  288. .color_type = FF_COLOR_RGB,
  289. .pixel_type = FF_PIXEL_PACKED,
  290. .depth = 8,
  291. },
  292. [PIX_FMT_BGR565BE] = {
  293. .nb_channels = 3,
  294. .color_type = FF_COLOR_RGB,
  295. .pixel_type = FF_PIXEL_PACKED,
  296. .depth = 5,
  297. },
  298. [PIX_FMT_BGR565LE] = {
  299. .nb_channels = 3,
  300. .color_type = FF_COLOR_RGB,
  301. .pixel_type = FF_PIXEL_PACKED,
  302. .depth = 5,
  303. },
  304. [PIX_FMT_BGR555BE] = {
  305. .nb_channels = 3,
  306. .color_type = FF_COLOR_RGB,
  307. .pixel_type = FF_PIXEL_PACKED,
  308. .depth = 5,
  309. },
  310. [PIX_FMT_BGR555LE] = {
  311. .nb_channels = 3,
  312. .color_type = FF_COLOR_RGB,
  313. .pixel_type = FF_PIXEL_PACKED,
  314. .depth = 5,
  315. },
  316. [PIX_FMT_BGR444BE] = {
  317. .nb_channels = 3,
  318. .color_type = FF_COLOR_RGB,
  319. .pixel_type = FF_PIXEL_PACKED,
  320. .depth = 4,
  321. },
  322. [PIX_FMT_BGR444LE] = {
  323. .nb_channels = 3,
  324. .color_type = FF_COLOR_RGB,
  325. .pixel_type = FF_PIXEL_PACKED,
  326. .depth = 4,
  327. },
  328. [PIX_FMT_RGB8] = {
  329. .nb_channels = 1,
  330. .color_type = FF_COLOR_RGB,
  331. .pixel_type = FF_PIXEL_PACKED,
  332. .depth = 8,
  333. },
  334. [PIX_FMT_RGB4] = {
  335. .nb_channels = 1,
  336. .color_type = FF_COLOR_RGB,
  337. .pixel_type = FF_PIXEL_PACKED,
  338. .depth = 4,
  339. },
  340. [PIX_FMT_RGB4_BYTE] = {
  341. .nb_channels = 1,
  342. .color_type = FF_COLOR_RGB,
  343. .pixel_type = FF_PIXEL_PACKED,
  344. .depth = 8,
  345. },
  346. [PIX_FMT_BGR8] = {
  347. .nb_channels = 1,
  348. .color_type = FF_COLOR_RGB,
  349. .pixel_type = FF_PIXEL_PACKED,
  350. .depth = 8,
  351. },
  352. [PIX_FMT_BGR4] = {
  353. .nb_channels = 1,
  354. .color_type = FF_COLOR_RGB,
  355. .pixel_type = FF_PIXEL_PACKED,
  356. .depth = 4,
  357. },
  358. [PIX_FMT_BGR4_BYTE] = {
  359. .nb_channels = 1,
  360. .color_type = FF_COLOR_RGB,
  361. .pixel_type = FF_PIXEL_PACKED,
  362. .depth = 8,
  363. },
  364. [PIX_FMT_NV12] = {
  365. .nb_channels = 2,
  366. .color_type = FF_COLOR_YUV,
  367. .pixel_type = FF_PIXEL_PLANAR,
  368. .depth = 8,
  369. },
  370. [PIX_FMT_NV21] = {
  371. .nb_channels = 2,
  372. .color_type = FF_COLOR_YUV,
  373. .pixel_type = FF_PIXEL_PLANAR,
  374. .depth = 8,
  375. },
  376. [PIX_FMT_BGRA] = {
  377. .nb_channels = 4, .is_alpha = 1,
  378. .color_type = FF_COLOR_RGB,
  379. .pixel_type = FF_PIXEL_PACKED,
  380. .depth = 8,
  381. },
  382. [PIX_FMT_RGBA] = {
  383. .nb_channels = 4, .is_alpha = 1,
  384. .color_type = FF_COLOR_RGB,
  385. .pixel_type = FF_PIXEL_PACKED,
  386. .depth = 8,
  387. },
  388. };
  389. void avcodec_get_chroma_sub_sample(enum PixelFormat pix_fmt, int *h_shift, int *v_shift)
  390. {
  391. *h_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_w;
  392. *v_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_h;
  393. }
  394. const char *avcodec_get_pix_fmt_name(enum PixelFormat pix_fmt)
  395. {
  396. if (pix_fmt < 0 || pix_fmt >= PIX_FMT_NB)
  397. return NULL;
  398. else
  399. return av_pix_fmt_descriptors[pix_fmt].name;
  400. }
  401. #if LIBAVCODEC_VERSION_MAJOR < 53
  402. enum PixelFormat avcodec_get_pix_fmt(const char *name)
  403. {
  404. return av_get_pix_fmt(name);
  405. }
  406. #endif
  407. void avcodec_pix_fmt_string (char *buf, int buf_size, enum PixelFormat pix_fmt)
  408. {
  409. /* print header */
  410. if (pix_fmt < 0)
  411. snprintf (buf, buf_size,
  412. "name " " nb_channels" " depth" " is_alpha"
  413. );
  414. else{
  415. PixFmtInfo info= pix_fmt_info[pix_fmt];
  416. char is_alpha_char= info.is_alpha ? 'y' : 'n';
  417. snprintf (buf, buf_size,
  418. "%-11s %5d %9d %6c",
  419. av_pix_fmt_descriptors[pix_fmt].name,
  420. info.nb_channels,
  421. info.depth,
  422. is_alpha_char
  423. );
  424. }
  425. }
  426. int ff_is_hwaccel_pix_fmt(enum PixelFormat pix_fmt)
  427. {
  428. return av_pix_fmt_descriptors[pix_fmt].flags & PIX_FMT_HWACCEL;
  429. }
  430. int ff_set_systematic_pal(uint32_t pal[256], enum PixelFormat pix_fmt){
  431. int i;
  432. for(i=0; i<256; i++){
  433. int r,g,b;
  434. switch(pix_fmt) {
  435. case PIX_FMT_RGB8:
  436. r= (i>>5 )*36;
  437. g= ((i>>2)&7)*36;
  438. b= (i&3 )*85;
  439. break;
  440. case PIX_FMT_BGR8:
  441. b= (i>>6 )*85;
  442. g= ((i>>3)&7)*36;
  443. r= (i&7 )*36;
  444. break;
  445. case PIX_FMT_RGB4_BYTE:
  446. r= (i>>3 )*255;
  447. g= ((i>>1)&3)*85;
  448. b= (i&1 )*255;
  449. break;
  450. case PIX_FMT_BGR4_BYTE:
  451. b= (i>>3 )*255;
  452. g= ((i>>1)&3)*85;
  453. r= (i&1 )*255;
  454. break;
  455. case PIX_FMT_GRAY8:
  456. r=b=g= i;
  457. break;
  458. default:
  459. return -1;
  460. }
  461. pal[i] = b + (g<<8) + (r<<16);
  462. }
  463. return 0;
  464. }
  465. int ff_fill_linesize(AVPicture *picture, enum PixelFormat pix_fmt, int width)
  466. {
  467. int w2;
  468. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  469. memset(picture->linesize, 0, sizeof(picture->linesize));
  470. switch(pix_fmt) {
  471. case PIX_FMT_YUV420P:
  472. case PIX_FMT_YUV422P:
  473. case PIX_FMT_YUV444P:
  474. case PIX_FMT_YUV410P:
  475. case PIX_FMT_YUV411P:
  476. case PIX_FMT_YUV440P:
  477. case PIX_FMT_YUVJ420P:
  478. case PIX_FMT_YUVJ422P:
  479. case PIX_FMT_YUVJ444P:
  480. case PIX_FMT_YUVJ440P:
  481. w2 = (width + (1 << desc->log2_chroma_w) - 1) >> desc->log2_chroma_w;
  482. picture->linesize[0] = width;
  483. picture->linesize[1] = w2;
  484. picture->linesize[2] = w2;
  485. break;
  486. case PIX_FMT_YUV420P16LE:
  487. case PIX_FMT_YUV422P16LE:
  488. case PIX_FMT_YUV444P16LE:
  489. case PIX_FMT_YUV420P16BE:
  490. case PIX_FMT_YUV422P16BE:
  491. case PIX_FMT_YUV444P16BE:
  492. w2 = (width + (1 << desc->log2_chroma_w) - 1) >> desc->log2_chroma_w;
  493. picture->linesize[0] = 2*width;
  494. picture->linesize[1] = 2*w2;
  495. picture->linesize[2] = 2*w2;
  496. break;
  497. case PIX_FMT_YUVA420P:
  498. w2 = (width + (1 << desc->log2_chroma_w) - 1) >> desc->log2_chroma_w;
  499. picture->linesize[0] = width;
  500. picture->linesize[1] = w2;
  501. picture->linesize[2] = w2;
  502. picture->linesize[3] = width;
  503. break;
  504. case PIX_FMT_NV12:
  505. case PIX_FMT_NV21:
  506. w2 = (width + (1 << desc->log2_chroma_w) - 1) >> desc->log2_chroma_w;
  507. picture->linesize[0] = width;
  508. picture->linesize[1] = 2 * w2;
  509. break;
  510. case PIX_FMT_RGB24:
  511. case PIX_FMT_BGR24:
  512. picture->linesize[0] = width * 3;
  513. break;
  514. case PIX_FMT_ARGB:
  515. case PIX_FMT_ABGR:
  516. case PIX_FMT_RGBA:
  517. case PIX_FMT_BGRA:
  518. picture->linesize[0] = width * 4;
  519. break;
  520. case PIX_FMT_RGB48BE:
  521. case PIX_FMT_RGB48LE:
  522. picture->linesize[0] = width * 6;
  523. break;
  524. case PIX_FMT_GRAY16BE:
  525. case PIX_FMT_GRAY16LE:
  526. case PIX_FMT_BGR444BE:
  527. case PIX_FMT_BGR444LE:
  528. case PIX_FMT_BGR555BE:
  529. case PIX_FMT_BGR555LE:
  530. case PIX_FMT_BGR565BE:
  531. case PIX_FMT_BGR565LE:
  532. case PIX_FMT_RGB444BE:
  533. case PIX_FMT_RGB444LE:
  534. case PIX_FMT_RGB555BE:
  535. case PIX_FMT_RGB555LE:
  536. case PIX_FMT_RGB565BE:
  537. case PIX_FMT_RGB565LE:
  538. case PIX_FMT_YUYV422:
  539. picture->linesize[0] = width * 2;
  540. break;
  541. case PIX_FMT_UYVY422:
  542. picture->linesize[0] = width * 2;
  543. break;
  544. case PIX_FMT_UYYVYY411:
  545. picture->linesize[0] = width + width/2;
  546. break;
  547. case PIX_FMT_RGB4:
  548. case PIX_FMT_BGR4:
  549. picture->linesize[0] = width / 2;
  550. break;
  551. case PIX_FMT_MONOWHITE:
  552. case PIX_FMT_MONOBLACK:
  553. picture->linesize[0] = (width + 7) >> 3;
  554. break;
  555. case PIX_FMT_PAL8:
  556. case PIX_FMT_RGB8:
  557. case PIX_FMT_BGR8:
  558. case PIX_FMT_RGB4_BYTE:
  559. case PIX_FMT_BGR4_BYTE:
  560. case PIX_FMT_GRAY8:
  561. picture->linesize[0] = width;
  562. break;
  563. default:
  564. return -1;
  565. }
  566. return 0;
  567. }
  568. int ff_fill_pointer(AVPicture *picture, uint8_t *ptr, enum PixelFormat pix_fmt,
  569. int height)
  570. {
  571. int size, h2, size2;
  572. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  573. size = picture->linesize[0] * height;
  574. switch(pix_fmt) {
  575. case PIX_FMT_YUV420P:
  576. case PIX_FMT_YUV422P:
  577. case PIX_FMT_YUV444P:
  578. case PIX_FMT_YUV410P:
  579. case PIX_FMT_YUV411P:
  580. case PIX_FMT_YUV440P:
  581. case PIX_FMT_YUVJ420P:
  582. case PIX_FMT_YUVJ422P:
  583. case PIX_FMT_YUVJ444P:
  584. case PIX_FMT_YUVJ440P:
  585. case PIX_FMT_YUV420P16LE:
  586. case PIX_FMT_YUV422P16LE:
  587. case PIX_FMT_YUV444P16LE:
  588. case PIX_FMT_YUV420P16BE:
  589. case PIX_FMT_YUV422P16BE:
  590. case PIX_FMT_YUV444P16BE:
  591. h2 = (height + (1 << desc->log2_chroma_h) - 1) >> desc->log2_chroma_h;
  592. size2 = picture->linesize[1] * h2;
  593. picture->data[0] = ptr;
  594. picture->data[1] = picture->data[0] + size;
  595. picture->data[2] = picture->data[1] + size2;
  596. picture->data[3] = NULL;
  597. return size + 2 * size2;
  598. case PIX_FMT_YUVA420P:
  599. h2 = (height + (1 << desc->log2_chroma_h) - 1) >> desc->log2_chroma_h;
  600. size2 = picture->linesize[1] * h2;
  601. picture->data[0] = ptr;
  602. picture->data[1] = picture->data[0] + size;
  603. picture->data[2] = picture->data[1] + size2;
  604. picture->data[3] = picture->data[1] + size2 + size2;
  605. return 2 * size + 2 * size2;
  606. case PIX_FMT_NV12:
  607. case PIX_FMT_NV21:
  608. h2 = (height + (1 << desc->log2_chroma_h) - 1) >> desc->log2_chroma_h;
  609. size2 = picture->linesize[1] * h2;
  610. picture->data[0] = ptr;
  611. picture->data[1] = picture->data[0] + size;
  612. picture->data[2] = NULL;
  613. picture->data[3] = NULL;
  614. return size + size2;
  615. case PIX_FMT_RGB24:
  616. case PIX_FMT_BGR24:
  617. case PIX_FMT_ARGB:
  618. case PIX_FMT_ABGR:
  619. case PIX_FMT_RGBA:
  620. case PIX_FMT_BGRA:
  621. case PIX_FMT_RGB48BE:
  622. case PIX_FMT_RGB48LE:
  623. case PIX_FMT_GRAY16BE:
  624. case PIX_FMT_GRAY16LE:
  625. case PIX_FMT_BGR444BE:
  626. case PIX_FMT_BGR444LE:
  627. case PIX_FMT_BGR555BE:
  628. case PIX_FMT_BGR555LE:
  629. case PIX_FMT_BGR565BE:
  630. case PIX_FMT_BGR565LE:
  631. case PIX_FMT_RGB444BE:
  632. case PIX_FMT_RGB444LE:
  633. case PIX_FMT_RGB555BE:
  634. case PIX_FMT_RGB555LE:
  635. case PIX_FMT_RGB565BE:
  636. case PIX_FMT_RGB565LE:
  637. case PIX_FMT_YUYV422:
  638. case PIX_FMT_UYVY422:
  639. case PIX_FMT_UYYVYY411:
  640. case PIX_FMT_RGB4:
  641. case PIX_FMT_BGR4:
  642. case PIX_FMT_MONOWHITE:
  643. case PIX_FMT_MONOBLACK:
  644. picture->data[0] = ptr;
  645. picture->data[1] = NULL;
  646. picture->data[2] = NULL;
  647. picture->data[3] = NULL;
  648. return size;
  649. case PIX_FMT_PAL8:
  650. case PIX_FMT_RGB8:
  651. case PIX_FMT_BGR8:
  652. case PIX_FMT_RGB4_BYTE:
  653. case PIX_FMT_BGR4_BYTE:
  654. case PIX_FMT_GRAY8:
  655. size2 = (size + 3) & ~3;
  656. picture->data[0] = ptr;
  657. picture->data[1] = ptr + size2; /* palette is stored here as 256 32 bit words */
  658. picture->data[2] = NULL;
  659. picture->data[3] = NULL;
  660. return size2 + 256 * 4;
  661. default:
  662. picture->data[0] = NULL;
  663. picture->data[1] = NULL;
  664. picture->data[2] = NULL;
  665. picture->data[3] = NULL;
  666. return -1;
  667. }
  668. }
  669. int avpicture_fill(AVPicture *picture, uint8_t *ptr,
  670. enum PixelFormat pix_fmt, int width, int height)
  671. {
  672. if(avcodec_check_dimensions(NULL, width, height))
  673. return -1;
  674. if (ff_fill_linesize(picture, pix_fmt, width))
  675. return -1;
  676. return ff_fill_pointer(picture, ptr, pix_fmt, height);
  677. }
  678. int avpicture_layout(const AVPicture* src, enum PixelFormat pix_fmt, int width, int height,
  679. unsigned char *dest, int dest_size)
  680. {
  681. const PixFmtInfo* pf = &pix_fmt_info[pix_fmt];
  682. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  683. int i, j, w, ow, h, oh, data_planes;
  684. const unsigned char* s;
  685. int size = avpicture_get_size(pix_fmt, width, height);
  686. if (size > dest_size || size < 0)
  687. return -1;
  688. if (pf->pixel_type == FF_PIXEL_PACKED || pf->pixel_type == FF_PIXEL_PALETTE) {
  689. if (pix_fmt == PIX_FMT_YUYV422 ||
  690. pix_fmt == PIX_FMT_UYVY422 ||
  691. pix_fmt == PIX_FMT_BGR565BE ||
  692. pix_fmt == PIX_FMT_BGR565LE ||
  693. pix_fmt == PIX_FMT_BGR555BE ||
  694. pix_fmt == PIX_FMT_BGR555LE ||
  695. pix_fmt == PIX_FMT_BGR444BE ||
  696. pix_fmt == PIX_FMT_BGR444LE ||
  697. pix_fmt == PIX_FMT_RGB565BE ||
  698. pix_fmt == PIX_FMT_RGB565LE ||
  699. pix_fmt == PIX_FMT_RGB555BE ||
  700. pix_fmt == PIX_FMT_RGB555LE ||
  701. pix_fmt == PIX_FMT_RGB444BE ||
  702. pix_fmt == PIX_FMT_RGB444LE)
  703. w = width * 2;
  704. else if (pix_fmt == PIX_FMT_UYYVYY411)
  705. w = width + width/2;
  706. else if (pix_fmt == PIX_FMT_PAL8)
  707. w = width;
  708. else
  709. w = width * (pf->depth * pf->nb_channels / 8);
  710. data_planes = 1;
  711. h = height;
  712. } else {
  713. data_planes = pf->nb_channels;
  714. w = (width*pf->depth + 7)/8;
  715. h = height;
  716. }
  717. ow = w;
  718. oh = h;
  719. for (i=0; i<data_planes; i++) {
  720. if (i == 1) {
  721. w = (- ((-width) >> desc->log2_chroma_w) * pf->depth + 7) / 8;
  722. h = -((-height) >> desc->log2_chroma_h);
  723. if (pix_fmt == PIX_FMT_NV12 || pix_fmt == PIX_FMT_NV21)
  724. w <<= 1;
  725. } else if (i == 3) {
  726. w = ow;
  727. h = oh;
  728. }
  729. s = src->data[i];
  730. for(j=0; j<h; j++) {
  731. memcpy(dest, s, w);
  732. dest += w;
  733. s += src->linesize[i];
  734. }
  735. }
  736. if (pf->pixel_type == FF_PIXEL_PALETTE)
  737. memcpy((unsigned char *)(((size_t)dest + 3) & ~3), src->data[1], 256 * 4);
  738. return size;
  739. }
  740. int avpicture_get_size(enum PixelFormat pix_fmt, int width, int height)
  741. {
  742. AVPicture dummy_pict;
  743. if(avcodec_check_dimensions(NULL, width, height))
  744. return -1;
  745. switch (pix_fmt) {
  746. case PIX_FMT_RGB8:
  747. case PIX_FMT_BGR8:
  748. case PIX_FMT_RGB4_BYTE:
  749. case PIX_FMT_BGR4_BYTE:
  750. case PIX_FMT_GRAY8:
  751. // do not include palette for these pseudo-paletted formats
  752. return width * height;
  753. }
  754. return avpicture_fill(&dummy_pict, NULL, pix_fmt, width, height);
  755. }
  756. int avcodec_get_pix_fmt_loss(enum PixelFormat dst_pix_fmt, enum PixelFormat src_pix_fmt,
  757. int has_alpha)
  758. {
  759. const PixFmtInfo *pf, *ps;
  760. const AVPixFmtDescriptor *src_desc = &av_pix_fmt_descriptors[src_pix_fmt];
  761. const AVPixFmtDescriptor *dst_desc = &av_pix_fmt_descriptors[dst_pix_fmt];
  762. int loss;
  763. ps = &pix_fmt_info[src_pix_fmt];
  764. /* compute loss */
  765. loss = 0;
  766. pf = &pix_fmt_info[dst_pix_fmt];
  767. if (pf->depth < ps->depth ||
  768. ((dst_pix_fmt == PIX_FMT_RGB555BE || dst_pix_fmt == PIX_FMT_RGB555LE ||
  769. dst_pix_fmt == PIX_FMT_BGR555BE || dst_pix_fmt == PIX_FMT_BGR555LE) &&
  770. (src_pix_fmt == PIX_FMT_RGB565BE || src_pix_fmt == PIX_FMT_RGB565LE ||
  771. src_pix_fmt == PIX_FMT_BGR565BE || src_pix_fmt == PIX_FMT_BGR565LE)))
  772. loss |= FF_LOSS_DEPTH;
  773. if (dst_desc->log2_chroma_w > src_desc->log2_chroma_w ||
  774. dst_desc->log2_chroma_h > src_desc->log2_chroma_h)
  775. loss |= FF_LOSS_RESOLUTION;
  776. switch(pf->color_type) {
  777. case FF_COLOR_RGB:
  778. if (ps->color_type != FF_COLOR_RGB &&
  779. ps->color_type != FF_COLOR_GRAY)
  780. loss |= FF_LOSS_COLORSPACE;
  781. break;
  782. case FF_COLOR_GRAY:
  783. if (ps->color_type != FF_COLOR_GRAY)
  784. loss |= FF_LOSS_COLORSPACE;
  785. break;
  786. case FF_COLOR_YUV:
  787. if (ps->color_type != FF_COLOR_YUV)
  788. loss |= FF_LOSS_COLORSPACE;
  789. break;
  790. case FF_COLOR_YUV_JPEG:
  791. if (ps->color_type != FF_COLOR_YUV_JPEG &&
  792. ps->color_type != FF_COLOR_YUV &&
  793. ps->color_type != FF_COLOR_GRAY)
  794. loss |= FF_LOSS_COLORSPACE;
  795. break;
  796. default:
  797. /* fail safe test */
  798. if (ps->color_type != pf->color_type)
  799. loss |= FF_LOSS_COLORSPACE;
  800. break;
  801. }
  802. if (pf->color_type == FF_COLOR_GRAY &&
  803. ps->color_type != FF_COLOR_GRAY)
  804. loss |= FF_LOSS_CHROMA;
  805. if (!pf->is_alpha && (ps->is_alpha && has_alpha))
  806. loss |= FF_LOSS_ALPHA;
  807. if (pf->pixel_type == FF_PIXEL_PALETTE &&
  808. (ps->pixel_type != FF_PIXEL_PALETTE && ps->color_type != FF_COLOR_GRAY))
  809. loss |= FF_LOSS_COLORQUANT;
  810. return loss;
  811. }
  812. static int avg_bits_per_pixel(enum PixelFormat pix_fmt)
  813. {
  814. int bits;
  815. const PixFmtInfo *pf;
  816. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  817. pf = &pix_fmt_info[pix_fmt];
  818. switch(pf->pixel_type) {
  819. case FF_PIXEL_PACKED:
  820. switch(pix_fmt) {
  821. case PIX_FMT_YUYV422:
  822. case PIX_FMT_UYVY422:
  823. case PIX_FMT_RGB565BE:
  824. case PIX_FMT_RGB565LE:
  825. case PIX_FMT_RGB555BE:
  826. case PIX_FMT_RGB555LE:
  827. case PIX_FMT_RGB444BE:
  828. case PIX_FMT_RGB444LE:
  829. case PIX_FMT_BGR565BE:
  830. case PIX_FMT_BGR565LE:
  831. case PIX_FMT_BGR555BE:
  832. case PIX_FMT_BGR555LE:
  833. case PIX_FMT_BGR444BE:
  834. case PIX_FMT_BGR444LE:
  835. bits = 16;
  836. break;
  837. case PIX_FMT_UYYVYY411:
  838. bits = 12;
  839. break;
  840. default:
  841. bits = pf->depth * pf->nb_channels;
  842. break;
  843. }
  844. break;
  845. case FF_PIXEL_PLANAR:
  846. if (desc->log2_chroma_w == 0 && desc->log2_chroma_h == 0) {
  847. bits = pf->depth * pf->nb_channels;
  848. } else {
  849. bits = pf->depth + ((2 * pf->depth) >>
  850. (desc->log2_chroma_w + desc->log2_chroma_h));
  851. }
  852. break;
  853. case FF_PIXEL_PALETTE:
  854. bits = 8;
  855. break;
  856. default:
  857. bits = -1;
  858. break;
  859. }
  860. return bits;
  861. }
  862. static enum PixelFormat avcodec_find_best_pix_fmt1(int64_t pix_fmt_mask,
  863. enum PixelFormat src_pix_fmt,
  864. int has_alpha,
  865. int loss_mask)
  866. {
  867. int dist, i, loss, min_dist;
  868. enum PixelFormat dst_pix_fmt;
  869. /* find exact color match with smallest size */
  870. dst_pix_fmt = PIX_FMT_NONE;
  871. min_dist = 0x7fffffff;
  872. for(i = 0;i < PIX_FMT_NB; i++) {
  873. if (pix_fmt_mask & (1ULL << i)) {
  874. loss = avcodec_get_pix_fmt_loss(i, src_pix_fmt, has_alpha) & loss_mask;
  875. if (loss == 0) {
  876. dist = avg_bits_per_pixel(i);
  877. if (dist < min_dist) {
  878. min_dist = dist;
  879. dst_pix_fmt = i;
  880. }
  881. }
  882. }
  883. }
  884. return dst_pix_fmt;
  885. }
  886. enum PixelFormat avcodec_find_best_pix_fmt(int64_t pix_fmt_mask, enum PixelFormat src_pix_fmt,
  887. int has_alpha, int *loss_ptr)
  888. {
  889. enum PixelFormat dst_pix_fmt;
  890. int loss_mask, i;
  891. static const int loss_mask_order[] = {
  892. ~0, /* no loss first */
  893. ~FF_LOSS_ALPHA,
  894. ~FF_LOSS_RESOLUTION,
  895. ~(FF_LOSS_COLORSPACE | FF_LOSS_RESOLUTION),
  896. ~FF_LOSS_COLORQUANT,
  897. ~FF_LOSS_DEPTH,
  898. 0,
  899. };
  900. /* try with successive loss */
  901. i = 0;
  902. for(;;) {
  903. loss_mask = loss_mask_order[i++];
  904. dst_pix_fmt = avcodec_find_best_pix_fmt1(pix_fmt_mask, src_pix_fmt,
  905. has_alpha, loss_mask);
  906. if (dst_pix_fmt >= 0)
  907. goto found;
  908. if (loss_mask == 0)
  909. break;
  910. }
  911. return PIX_FMT_NONE;
  912. found:
  913. if (loss_ptr)
  914. *loss_ptr = avcodec_get_pix_fmt_loss(dst_pix_fmt, src_pix_fmt, has_alpha);
  915. return dst_pix_fmt;
  916. }
  917. void ff_img_copy_plane(uint8_t *dst, int dst_wrap,
  918. const uint8_t *src, int src_wrap,
  919. int width, int height)
  920. {
  921. if((!dst) || (!src))
  922. return;
  923. for(;height > 0; height--) {
  924. memcpy(dst, src, width);
  925. dst += dst_wrap;
  926. src += src_wrap;
  927. }
  928. }
  929. int ff_get_plane_bytewidth(enum PixelFormat pix_fmt, int width, int plane)
  930. {
  931. int bits;
  932. const PixFmtInfo *pf = &pix_fmt_info[pix_fmt];
  933. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  934. pf = &pix_fmt_info[pix_fmt];
  935. switch(pf->pixel_type) {
  936. case FF_PIXEL_PACKED:
  937. switch(pix_fmt) {
  938. case PIX_FMT_YUYV422:
  939. case PIX_FMT_UYVY422:
  940. case PIX_FMT_RGB565BE:
  941. case PIX_FMT_RGB565LE:
  942. case PIX_FMT_RGB555BE:
  943. case PIX_FMT_RGB555LE:
  944. case PIX_FMT_RGB444BE:
  945. case PIX_FMT_RGB444LE:
  946. case PIX_FMT_BGR565BE:
  947. case PIX_FMT_BGR565LE:
  948. case PIX_FMT_BGR555BE:
  949. case PIX_FMT_BGR555LE:
  950. case PIX_FMT_BGR444BE:
  951. case PIX_FMT_BGR444LE:
  952. bits = 16;
  953. break;
  954. case PIX_FMT_UYYVYY411:
  955. bits = 12;
  956. break;
  957. default:
  958. bits = pf->depth * pf->nb_channels;
  959. break;
  960. }
  961. return (width * bits + 7) >> 3;
  962. break;
  963. case FF_PIXEL_PLANAR:
  964. if (plane == 1 || plane == 2)
  965. width= -((-width)>>desc->log2_chroma_w);
  966. return (width * pf->depth + 7) >> 3;
  967. break;
  968. case FF_PIXEL_PALETTE:
  969. if (plane == 0)
  970. return width;
  971. break;
  972. }
  973. return -1;
  974. }
  975. void av_picture_copy(AVPicture *dst, const AVPicture *src,
  976. enum PixelFormat pix_fmt, int width, int height)
  977. {
  978. int i;
  979. const PixFmtInfo *pf = &pix_fmt_info[pix_fmt];
  980. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  981. switch(pf->pixel_type) {
  982. case FF_PIXEL_PACKED:
  983. case FF_PIXEL_PLANAR:
  984. for(i = 0; i < pf->nb_channels; i++) {
  985. int h;
  986. int bwidth = ff_get_plane_bytewidth(pix_fmt, width, i);
  987. h = height;
  988. if (i == 1 || i == 2) {
  989. h= -((-height)>>desc->log2_chroma_h);
  990. }
  991. ff_img_copy_plane(dst->data[i], dst->linesize[i],
  992. src->data[i], src->linesize[i],
  993. bwidth, h);
  994. }
  995. break;
  996. case FF_PIXEL_PALETTE:
  997. ff_img_copy_plane(dst->data[0], dst->linesize[0],
  998. src->data[0], src->linesize[0],
  999. width, height);
  1000. /* copy the palette */
  1001. memcpy(dst->data[1], src->data[1], 4*256);
  1002. break;
  1003. }
  1004. }
  1005. /* 2x2 -> 1x1 */
  1006. void ff_shrink22(uint8_t *dst, int dst_wrap,
  1007. const uint8_t *src, int src_wrap,
  1008. int width, int height)
  1009. {
  1010. int w;
  1011. const uint8_t *s1, *s2;
  1012. uint8_t *d;
  1013. for(;height > 0; height--) {
  1014. s1 = src;
  1015. s2 = s1 + src_wrap;
  1016. d = dst;
  1017. for(w = width;w >= 4; w-=4) {
  1018. d[0] = (s1[0] + s1[1] + s2[0] + s2[1] + 2) >> 2;
  1019. d[1] = (s1[2] + s1[3] + s2[2] + s2[3] + 2) >> 2;
  1020. d[2] = (s1[4] + s1[5] + s2[4] + s2[5] + 2) >> 2;
  1021. d[3] = (s1[6] + s1[7] + s2[6] + s2[7] + 2) >> 2;
  1022. s1 += 8;
  1023. s2 += 8;
  1024. d += 4;
  1025. }
  1026. for(;w > 0; w--) {
  1027. d[0] = (s1[0] + s1[1] + s2[0] + s2[1] + 2) >> 2;
  1028. s1 += 2;
  1029. s2 += 2;
  1030. d++;
  1031. }
  1032. src += 2 * src_wrap;
  1033. dst += dst_wrap;
  1034. }
  1035. }
  1036. /* 4x4 -> 1x1 */
  1037. void ff_shrink44(uint8_t *dst, int dst_wrap,
  1038. const uint8_t *src, int src_wrap,
  1039. int width, int height)
  1040. {
  1041. int w;
  1042. const uint8_t *s1, *s2, *s3, *s4;
  1043. uint8_t *d;
  1044. for(;height > 0; height--) {
  1045. s1 = src;
  1046. s2 = s1 + src_wrap;
  1047. s3 = s2 + src_wrap;
  1048. s4 = s3 + src_wrap;
  1049. d = dst;
  1050. for(w = width;w > 0; w--) {
  1051. d[0] = (s1[0] + s1[1] + s1[2] + s1[3] +
  1052. s2[0] + s2[1] + s2[2] + s2[3] +
  1053. s3[0] + s3[1] + s3[2] + s3[3] +
  1054. s4[0] + s4[1] + s4[2] + s4[3] + 8) >> 4;
  1055. s1 += 4;
  1056. s2 += 4;
  1057. s3 += 4;
  1058. s4 += 4;
  1059. d++;
  1060. }
  1061. src += 4 * src_wrap;
  1062. dst += dst_wrap;
  1063. }
  1064. }
  1065. /* 8x8 -> 1x1 */
  1066. void ff_shrink88(uint8_t *dst, int dst_wrap,
  1067. const uint8_t *src, int src_wrap,
  1068. int width, int height)
  1069. {
  1070. int w, i;
  1071. for(;height > 0; height--) {
  1072. for(w = width;w > 0; w--) {
  1073. int tmp=0;
  1074. for(i=0; i<8; i++){
  1075. tmp += src[0] + src[1] + src[2] + src[3] + src[4] + src[5] + src[6] + src[7];
  1076. src += src_wrap;
  1077. }
  1078. *(dst++) = (tmp + 32)>>6;
  1079. src += 8 - 8*src_wrap;
  1080. }
  1081. src += 8*src_wrap - 8*width;
  1082. dst += dst_wrap - width;
  1083. }
  1084. }
  1085. int avpicture_alloc(AVPicture *picture,
  1086. enum PixelFormat pix_fmt, int width, int height)
  1087. {
  1088. int size;
  1089. void *ptr;
  1090. size = avpicture_fill(picture, NULL, pix_fmt, width, height);
  1091. if(size<0)
  1092. goto fail;
  1093. ptr = av_malloc(size);
  1094. if (!ptr)
  1095. goto fail;
  1096. avpicture_fill(picture, ptr, pix_fmt, width, height);
  1097. if(picture->data[1] && !picture->data[2])
  1098. ff_set_systematic_pal((uint32_t*)picture->data[1], pix_fmt);
  1099. return 0;
  1100. fail:
  1101. memset(picture, 0, sizeof(AVPicture));
  1102. return -1;
  1103. }
  1104. void avpicture_free(AVPicture *picture)
  1105. {
  1106. av_free(picture->data[0]);
  1107. }
  1108. /* return true if yuv planar */
  1109. static inline int is_yuv_planar(const PixFmtInfo *ps)
  1110. {
  1111. return (ps->color_type == FF_COLOR_YUV ||
  1112. ps->color_type == FF_COLOR_YUV_JPEG) &&
  1113. ps->pixel_type == FF_PIXEL_PLANAR;
  1114. }
  1115. int av_picture_crop(AVPicture *dst, const AVPicture *src,
  1116. enum PixelFormat pix_fmt, int top_band, int left_band)
  1117. {
  1118. int y_shift;
  1119. int x_shift;
  1120. if (pix_fmt < 0 || pix_fmt >= PIX_FMT_NB || !is_yuv_planar(&pix_fmt_info[pix_fmt]))
  1121. return -1;
  1122. y_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_h;
  1123. x_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_w;
  1124. dst->data[0] = src->data[0] + (top_band * src->linesize[0]) + left_band;
  1125. dst->data[1] = src->data[1] + ((top_band >> y_shift) * src->linesize[1]) + (left_band >> x_shift);
  1126. dst->data[2] = src->data[2] + ((top_band >> y_shift) * src->linesize[2]) + (left_band >> x_shift);
  1127. dst->linesize[0] = src->linesize[0];
  1128. dst->linesize[1] = src->linesize[1];
  1129. dst->linesize[2] = src->linesize[2];
  1130. return 0;
  1131. }
  1132. int av_picture_pad(AVPicture *dst, const AVPicture *src, int height, int width,
  1133. enum PixelFormat pix_fmt, int padtop, int padbottom, int padleft, int padright,
  1134. int *color)
  1135. {
  1136. uint8_t *optr;
  1137. int y_shift;
  1138. int x_shift;
  1139. int yheight;
  1140. int i, y;
  1141. if (pix_fmt < 0 || pix_fmt >= PIX_FMT_NB ||
  1142. !is_yuv_planar(&pix_fmt_info[pix_fmt])) return -1;
  1143. for (i = 0; i < 3; i++) {
  1144. x_shift = i ? av_pix_fmt_descriptors[pix_fmt].log2_chroma_w : 0;
  1145. y_shift = i ? av_pix_fmt_descriptors[pix_fmt].log2_chroma_h : 0;
  1146. if (padtop || padleft) {
  1147. memset(dst->data[i], color[i],
  1148. dst->linesize[i] * (padtop >> y_shift) + (padleft >> x_shift));
  1149. }
  1150. if (padleft || padright) {
  1151. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  1152. (dst->linesize[i] - (padright >> x_shift));
  1153. yheight = (height - 1 - (padtop + padbottom)) >> y_shift;
  1154. for (y = 0; y < yheight; y++) {
  1155. memset(optr, color[i], (padleft + padright) >> x_shift);
  1156. optr += dst->linesize[i];
  1157. }
  1158. }
  1159. if (src) { /* first line */
  1160. uint8_t *iptr = src->data[i];
  1161. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  1162. (padleft >> x_shift);
  1163. memcpy(optr, iptr, (width - padleft - padright) >> x_shift);
  1164. iptr += src->linesize[i];
  1165. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  1166. (dst->linesize[i] - (padright >> x_shift));
  1167. yheight = (height - 1 - (padtop + padbottom)) >> y_shift;
  1168. for (y = 0; y < yheight; y++) {
  1169. memset(optr, color[i], (padleft + padright) >> x_shift);
  1170. memcpy(optr + ((padleft + padright) >> x_shift), iptr,
  1171. (width - padleft - padright) >> x_shift);
  1172. iptr += src->linesize[i];
  1173. optr += dst->linesize[i];
  1174. }
  1175. }
  1176. if (padbottom || padright) {
  1177. optr = dst->data[i] + dst->linesize[i] *
  1178. ((height - padbottom) >> y_shift) - (padright >> x_shift);
  1179. memset(optr, color[i],dst->linesize[i] *
  1180. (padbottom >> y_shift) + (padright >> x_shift));
  1181. }
  1182. }
  1183. return 0;
  1184. }
  1185. /* NOTE: we scan all the pixels to have an exact information */
  1186. static int get_alpha_info_pal8(const AVPicture *src, int width, int height)
  1187. {
  1188. const unsigned char *p;
  1189. int src_wrap, ret, x, y;
  1190. unsigned int a;
  1191. uint32_t *palette = (uint32_t *)src->data[1];
  1192. p = src->data[0];
  1193. src_wrap = src->linesize[0] - width;
  1194. ret = 0;
  1195. for(y=0;y<height;y++) {
  1196. for(x=0;x<width;x++) {
  1197. a = palette[p[0]] >> 24;
  1198. if (a == 0x00) {
  1199. ret |= FF_ALPHA_TRANSP;
  1200. } else if (a != 0xff) {
  1201. ret |= FF_ALPHA_SEMI_TRANSP;
  1202. }
  1203. p++;
  1204. }
  1205. p += src_wrap;
  1206. }
  1207. return ret;
  1208. }
  1209. int img_get_alpha_info(const AVPicture *src,
  1210. enum PixelFormat pix_fmt, int width, int height)
  1211. {
  1212. const PixFmtInfo *pf = &pix_fmt_info[pix_fmt];
  1213. int ret;
  1214. /* no alpha can be represented in format */
  1215. if (!pf->is_alpha)
  1216. return 0;
  1217. switch(pix_fmt) {
  1218. case PIX_FMT_PAL8:
  1219. ret = get_alpha_info_pal8(src, width, height);
  1220. break;
  1221. default:
  1222. /* we do not know, so everything is indicated */
  1223. ret = FF_ALPHA_TRANSP | FF_ALPHA_SEMI_TRANSP;
  1224. break;
  1225. }
  1226. return ret;
  1227. }
  1228. #if HAVE_MMX
  1229. #define DEINT_INPLACE_LINE_LUM \
  1230. movd_m2r(lum_m4[0],mm0);\
  1231. movd_m2r(lum_m3[0],mm1);\
  1232. movd_m2r(lum_m2[0],mm2);\
  1233. movd_m2r(lum_m1[0],mm3);\
  1234. movd_m2r(lum[0],mm4);\
  1235. punpcklbw_r2r(mm7,mm0);\
  1236. movd_r2m(mm2,lum_m4[0]);\
  1237. punpcklbw_r2r(mm7,mm1);\
  1238. punpcklbw_r2r(mm7,mm2);\
  1239. punpcklbw_r2r(mm7,mm3);\
  1240. punpcklbw_r2r(mm7,mm4);\
  1241. paddw_r2r(mm3,mm1);\
  1242. psllw_i2r(1,mm2);\
  1243. paddw_r2r(mm4,mm0);\
  1244. psllw_i2r(2,mm1);\
  1245. paddw_r2r(mm6,mm2);\
  1246. paddw_r2r(mm2,mm1);\
  1247. psubusw_r2r(mm0,mm1);\
  1248. psrlw_i2r(3,mm1);\
  1249. packuswb_r2r(mm7,mm1);\
  1250. movd_r2m(mm1,lum_m2[0]);
  1251. #define DEINT_LINE_LUM \
  1252. movd_m2r(lum_m4[0],mm0);\
  1253. movd_m2r(lum_m3[0],mm1);\
  1254. movd_m2r(lum_m2[0],mm2);\
  1255. movd_m2r(lum_m1[0],mm3);\
  1256. movd_m2r(lum[0],mm4);\
  1257. punpcklbw_r2r(mm7,mm0);\
  1258. punpcklbw_r2r(mm7,mm1);\
  1259. punpcklbw_r2r(mm7,mm2);\
  1260. punpcklbw_r2r(mm7,mm3);\
  1261. punpcklbw_r2r(mm7,mm4);\
  1262. paddw_r2r(mm3,mm1);\
  1263. psllw_i2r(1,mm2);\
  1264. paddw_r2r(mm4,mm0);\
  1265. psllw_i2r(2,mm1);\
  1266. paddw_r2r(mm6,mm2);\
  1267. paddw_r2r(mm2,mm1);\
  1268. psubusw_r2r(mm0,mm1);\
  1269. psrlw_i2r(3,mm1);\
  1270. packuswb_r2r(mm7,mm1);\
  1271. movd_r2m(mm1,dst[0]);
  1272. #endif
  1273. /* filter parameters: [-1 4 2 4 -1] // 8 */
  1274. static void deinterlace_line(uint8_t *dst,
  1275. const uint8_t *lum_m4, const uint8_t *lum_m3,
  1276. const uint8_t *lum_m2, const uint8_t *lum_m1,
  1277. const uint8_t *lum,
  1278. int size)
  1279. {
  1280. #if !HAVE_MMX
  1281. uint8_t *cm = ff_cropTbl + MAX_NEG_CROP;
  1282. int sum;
  1283. for(;size > 0;size--) {
  1284. sum = -lum_m4[0];
  1285. sum += lum_m3[0] << 2;
  1286. sum += lum_m2[0] << 1;
  1287. sum += lum_m1[0] << 2;
  1288. sum += -lum[0];
  1289. dst[0] = cm[(sum + 4) >> 3];
  1290. lum_m4++;
  1291. lum_m3++;
  1292. lum_m2++;
  1293. lum_m1++;
  1294. lum++;
  1295. dst++;
  1296. }
  1297. #else
  1298. {
  1299. pxor_r2r(mm7,mm7);
  1300. movq_m2r(ff_pw_4,mm6);
  1301. }
  1302. for (;size > 3; size-=4) {
  1303. DEINT_LINE_LUM
  1304. lum_m4+=4;
  1305. lum_m3+=4;
  1306. lum_m2+=4;
  1307. lum_m1+=4;
  1308. lum+=4;
  1309. dst+=4;
  1310. }
  1311. #endif
  1312. }
  1313. static void deinterlace_line_inplace(uint8_t *lum_m4, uint8_t *lum_m3, uint8_t *lum_m2, uint8_t *lum_m1, uint8_t *lum,
  1314. int size)
  1315. {
  1316. #if !HAVE_MMX
  1317. uint8_t *cm = ff_cropTbl + MAX_NEG_CROP;
  1318. int sum;
  1319. for(;size > 0;size--) {
  1320. sum = -lum_m4[0];
  1321. sum += lum_m3[0] << 2;
  1322. sum += lum_m2[0] << 1;
  1323. lum_m4[0]=lum_m2[0];
  1324. sum += lum_m1[0] << 2;
  1325. sum += -lum[0];
  1326. lum_m2[0] = cm[(sum + 4) >> 3];
  1327. lum_m4++;
  1328. lum_m3++;
  1329. lum_m2++;
  1330. lum_m1++;
  1331. lum++;
  1332. }
  1333. #else
  1334. {
  1335. pxor_r2r(mm7,mm7);
  1336. movq_m2r(ff_pw_4,mm6);
  1337. }
  1338. for (;size > 3; size-=4) {
  1339. DEINT_INPLACE_LINE_LUM
  1340. lum_m4+=4;
  1341. lum_m3+=4;
  1342. lum_m2+=4;
  1343. lum_m1+=4;
  1344. lum+=4;
  1345. }
  1346. #endif
  1347. }
  1348. /* deinterlacing : 2 temporal taps, 3 spatial taps linear filter. The
  1349. top field is copied as is, but the bottom field is deinterlaced
  1350. against the top field. */
  1351. static void deinterlace_bottom_field(uint8_t *dst, int dst_wrap,
  1352. const uint8_t *src1, int src_wrap,
  1353. int width, int height)
  1354. {
  1355. const uint8_t *src_m2, *src_m1, *src_0, *src_p1, *src_p2;
  1356. int y;
  1357. src_m2 = src1;
  1358. src_m1 = src1;
  1359. src_0=&src_m1[src_wrap];
  1360. src_p1=&src_0[src_wrap];
  1361. src_p2=&src_p1[src_wrap];
  1362. for(y=0;y<(height-2);y+=2) {
  1363. memcpy(dst,src_m1,width);
  1364. dst += dst_wrap;
  1365. deinterlace_line(dst,src_m2,src_m1,src_0,src_p1,src_p2,width);
  1366. src_m2 = src_0;
  1367. src_m1 = src_p1;
  1368. src_0 = src_p2;
  1369. src_p1 += 2*src_wrap;
  1370. src_p2 += 2*src_wrap;
  1371. dst += dst_wrap;
  1372. }
  1373. memcpy(dst,src_m1,width);
  1374. dst += dst_wrap;
  1375. /* do last line */
  1376. deinterlace_line(dst,src_m2,src_m1,src_0,src_0,src_0,width);
  1377. }
  1378. static void deinterlace_bottom_field_inplace(uint8_t *src1, int src_wrap,
  1379. int width, int height)
  1380. {
  1381. uint8_t *src_m1, *src_0, *src_p1, *src_p2;
  1382. int y;
  1383. uint8_t *buf;
  1384. buf = (uint8_t*)av_malloc(width);
  1385. src_m1 = src1;
  1386. memcpy(buf,src_m1,width);
  1387. src_0=&src_m1[src_wrap];
  1388. src_p1=&src_0[src_wrap];
  1389. src_p2=&src_p1[src_wrap];
  1390. for(y=0;y<(height-2);y+=2) {
  1391. deinterlace_line_inplace(buf,src_m1,src_0,src_p1,src_p2,width);
  1392. src_m1 = src_p1;
  1393. src_0 = src_p2;
  1394. src_p1 += 2*src_wrap;
  1395. src_p2 += 2*src_wrap;
  1396. }
  1397. /* do last line */
  1398. deinterlace_line_inplace(buf,src_m1,src_0,src_0,src_0,width);
  1399. av_free(buf);
  1400. }
  1401. int avpicture_deinterlace(AVPicture *dst, const AVPicture *src,
  1402. enum PixelFormat pix_fmt, int width, int height)
  1403. {
  1404. int i;
  1405. if (pix_fmt != PIX_FMT_YUV420P &&
  1406. pix_fmt != PIX_FMT_YUV422P &&
  1407. pix_fmt != PIX_FMT_YUV444P &&
  1408. pix_fmt != PIX_FMT_YUV411P &&
  1409. pix_fmt != PIX_FMT_GRAY8)
  1410. return -1;
  1411. if ((width & 3) != 0 || (height & 3) != 0)
  1412. return -1;
  1413. for(i=0;i<3;i++) {
  1414. if (i == 1) {
  1415. switch(pix_fmt) {
  1416. case PIX_FMT_YUV420P:
  1417. width >>= 1;
  1418. height >>= 1;
  1419. break;
  1420. case PIX_FMT_YUV422P:
  1421. width >>= 1;
  1422. break;
  1423. case PIX_FMT_YUV411P:
  1424. width >>= 2;
  1425. break;
  1426. default:
  1427. break;
  1428. }
  1429. if (pix_fmt == PIX_FMT_GRAY8) {
  1430. break;
  1431. }
  1432. }
  1433. if (src == dst) {
  1434. deinterlace_bottom_field_inplace(dst->data[i], dst->linesize[i],
  1435. width, height);
  1436. } else {
  1437. deinterlace_bottom_field(dst->data[i],dst->linesize[i],
  1438. src->data[i], src->linesize[i],
  1439. width, height);
  1440. }
  1441. }
  1442. emms_c();
  1443. return 0;
  1444. }