You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1503 lines
43KB

  1. /*
  2. * Misc image conversion routines
  3. * Copyright (c) 2001, 2002, 2003 Fabrice Bellard
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. /**
  22. * @file libavcodec/imgconvert.c
  23. * misc image conversion routines
  24. */
  25. /* TODO:
  26. * - write 'ffimg' program to test all the image related stuff
  27. * - move all api to slice based system
  28. * - integrate deinterlacing, postprocessing and scaling in the conversion process
  29. */
  30. #include "avcodec.h"
  31. #include "dsputil.h"
  32. #include "colorspace.h"
  33. #include "internal.h"
  34. #include "imgconvert.h"
  35. #include "libavutil/pixdesc.h"
  36. #if HAVE_MMX
  37. #include "x86/mmx.h"
  38. #include "x86/dsputil_mmx.h"
  39. #endif
  40. #define xglue(x, y) x ## y
  41. #define glue(x, y) xglue(x, y)
  42. #define FF_COLOR_RGB 0 /**< RGB color space */
  43. #define FF_COLOR_GRAY 1 /**< gray color space */
  44. #define FF_COLOR_YUV 2 /**< YUV color space. 16 <= Y <= 235, 16 <= U, V <= 240 */
  45. #define FF_COLOR_YUV_JPEG 3 /**< YUV color space. 0 <= Y <= 255, 0 <= U, V <= 255 */
  46. #define FF_PIXEL_PLANAR 0 /**< each channel has one component in AVPicture */
  47. #define FF_PIXEL_PACKED 1 /**< only one components containing all the channels */
  48. #define FF_PIXEL_PALETTE 2 /**< one components containing indexes for a palette */
  49. typedef struct PixFmtInfo {
  50. uint8_t nb_channels; /**< number of channels (including alpha) */
  51. uint8_t color_type; /**< color type (see FF_COLOR_xxx constants) */
  52. uint8_t pixel_type; /**< pixel storage type (see FF_PIXEL_xxx constants) */
  53. uint8_t is_alpha : 1; /**< true if alpha can be specified */
  54. uint8_t depth; /**< bit depth of the color components */
  55. } PixFmtInfo;
  56. /* this table gives more information about formats */
  57. static const PixFmtInfo pix_fmt_info[PIX_FMT_NB] = {
  58. /* YUV formats */
  59. [PIX_FMT_YUV420P] = {
  60. .nb_channels = 3,
  61. .color_type = FF_COLOR_YUV,
  62. .pixel_type = FF_PIXEL_PLANAR,
  63. .depth = 8,
  64. },
  65. [PIX_FMT_YUV422P] = {
  66. .nb_channels = 3,
  67. .color_type = FF_COLOR_YUV,
  68. .pixel_type = FF_PIXEL_PLANAR,
  69. .depth = 8,
  70. },
  71. [PIX_FMT_YUV444P] = {
  72. .nb_channels = 3,
  73. .color_type = FF_COLOR_YUV,
  74. .pixel_type = FF_PIXEL_PLANAR,
  75. .depth = 8,
  76. },
  77. [PIX_FMT_YUYV422] = {
  78. .nb_channels = 1,
  79. .color_type = FF_COLOR_YUV,
  80. .pixel_type = FF_PIXEL_PACKED,
  81. .depth = 8,
  82. },
  83. [PIX_FMT_UYVY422] = {
  84. .nb_channels = 1,
  85. .color_type = FF_COLOR_YUV,
  86. .pixel_type = FF_PIXEL_PACKED,
  87. .depth = 8,
  88. },
  89. [PIX_FMT_YUV410P] = {
  90. .nb_channels = 3,
  91. .color_type = FF_COLOR_YUV,
  92. .pixel_type = FF_PIXEL_PLANAR,
  93. .depth = 8,
  94. },
  95. [PIX_FMT_YUV411P] = {
  96. .nb_channels = 3,
  97. .color_type = FF_COLOR_YUV,
  98. .pixel_type = FF_PIXEL_PLANAR,
  99. .depth = 8,
  100. },
  101. [PIX_FMT_YUV440P] = {
  102. .nb_channels = 3,
  103. .color_type = FF_COLOR_YUV,
  104. .pixel_type = FF_PIXEL_PLANAR,
  105. .depth = 8,
  106. },
  107. [PIX_FMT_YUV420P16LE] = {
  108. .nb_channels = 3,
  109. .color_type = FF_COLOR_YUV,
  110. .pixel_type = FF_PIXEL_PLANAR,
  111. .depth = 16,
  112. },
  113. [PIX_FMT_YUV422P16LE] = {
  114. .nb_channels = 3,
  115. .color_type = FF_COLOR_YUV,
  116. .pixel_type = FF_PIXEL_PLANAR,
  117. .depth = 16,
  118. },
  119. [PIX_FMT_YUV444P16LE] = {
  120. .nb_channels = 3,
  121. .color_type = FF_COLOR_YUV,
  122. .pixel_type = FF_PIXEL_PLANAR,
  123. .depth = 16,
  124. },
  125. [PIX_FMT_YUV420P16BE] = {
  126. .nb_channels = 3,
  127. .color_type = FF_COLOR_YUV,
  128. .pixel_type = FF_PIXEL_PLANAR,
  129. .depth = 16,
  130. },
  131. [PIX_FMT_YUV422P16BE] = {
  132. .nb_channels = 3,
  133. .color_type = FF_COLOR_YUV,
  134. .pixel_type = FF_PIXEL_PLANAR,
  135. .depth = 16,
  136. },
  137. [PIX_FMT_YUV444P16BE] = {
  138. .nb_channels = 3,
  139. .color_type = FF_COLOR_YUV,
  140. .pixel_type = FF_PIXEL_PLANAR,
  141. .depth = 16,
  142. },
  143. /* YUV formats with alpha plane */
  144. [PIX_FMT_YUVA420P] = {
  145. .nb_channels = 4,
  146. .color_type = FF_COLOR_YUV,
  147. .pixel_type = FF_PIXEL_PLANAR,
  148. .depth = 8,
  149. },
  150. /* JPEG YUV */
  151. [PIX_FMT_YUVJ420P] = {
  152. .nb_channels = 3,
  153. .color_type = FF_COLOR_YUV_JPEG,
  154. .pixel_type = FF_PIXEL_PLANAR,
  155. .depth = 8,
  156. },
  157. [PIX_FMT_YUVJ422P] = {
  158. .nb_channels = 3,
  159. .color_type = FF_COLOR_YUV_JPEG,
  160. .pixel_type = FF_PIXEL_PLANAR,
  161. .depth = 8,
  162. },
  163. [PIX_FMT_YUVJ444P] = {
  164. .nb_channels = 3,
  165. .color_type = FF_COLOR_YUV_JPEG,
  166. .pixel_type = FF_PIXEL_PLANAR,
  167. .depth = 8,
  168. },
  169. [PIX_FMT_YUVJ440P] = {
  170. .nb_channels = 3,
  171. .color_type = FF_COLOR_YUV_JPEG,
  172. .pixel_type = FF_PIXEL_PLANAR,
  173. .depth = 8,
  174. },
  175. /* RGB formats */
  176. [PIX_FMT_RGB24] = {
  177. .nb_channels = 3,
  178. .color_type = FF_COLOR_RGB,
  179. .pixel_type = FF_PIXEL_PACKED,
  180. .depth = 8,
  181. },
  182. [PIX_FMT_BGR24] = {
  183. .nb_channels = 3,
  184. .color_type = FF_COLOR_RGB,
  185. .pixel_type = FF_PIXEL_PACKED,
  186. .depth = 8,
  187. },
  188. [PIX_FMT_ARGB] = {
  189. .nb_channels = 4, .is_alpha = 1,
  190. .color_type = FF_COLOR_RGB,
  191. .pixel_type = FF_PIXEL_PACKED,
  192. .depth = 8,
  193. },
  194. [PIX_FMT_RGB48BE] = {
  195. .nb_channels = 3,
  196. .color_type = FF_COLOR_RGB,
  197. .pixel_type = FF_PIXEL_PACKED,
  198. .depth = 16,
  199. },
  200. [PIX_FMT_RGB48LE] = {
  201. .nb_channels = 3,
  202. .color_type = FF_COLOR_RGB,
  203. .pixel_type = FF_PIXEL_PACKED,
  204. .depth = 16,
  205. },
  206. [PIX_FMT_RGB565BE] = {
  207. .nb_channels = 3,
  208. .color_type = FF_COLOR_RGB,
  209. .pixel_type = FF_PIXEL_PACKED,
  210. .depth = 5,
  211. },
  212. [PIX_FMT_RGB565LE] = {
  213. .nb_channels = 3,
  214. .color_type = FF_COLOR_RGB,
  215. .pixel_type = FF_PIXEL_PACKED,
  216. .depth = 5,
  217. },
  218. [PIX_FMT_RGB555BE] = {
  219. .nb_channels = 3,
  220. .color_type = FF_COLOR_RGB,
  221. .pixel_type = FF_PIXEL_PACKED,
  222. .depth = 5,
  223. },
  224. [PIX_FMT_RGB555LE] = {
  225. .nb_channels = 3,
  226. .color_type = FF_COLOR_RGB,
  227. .pixel_type = FF_PIXEL_PACKED,
  228. .depth = 5,
  229. },
  230. /* gray / mono formats */
  231. [PIX_FMT_GRAY16BE] = {
  232. .nb_channels = 1,
  233. .color_type = FF_COLOR_GRAY,
  234. .pixel_type = FF_PIXEL_PLANAR,
  235. .depth = 16,
  236. },
  237. [PIX_FMT_GRAY16LE] = {
  238. .nb_channels = 1,
  239. .color_type = FF_COLOR_GRAY,
  240. .pixel_type = FF_PIXEL_PLANAR,
  241. .depth = 16,
  242. },
  243. [PIX_FMT_GRAY8] = {
  244. .nb_channels = 1,
  245. .color_type = FF_COLOR_GRAY,
  246. .pixel_type = FF_PIXEL_PLANAR,
  247. .depth = 8,
  248. },
  249. [PIX_FMT_MONOWHITE] = {
  250. .nb_channels = 1,
  251. .color_type = FF_COLOR_GRAY,
  252. .pixel_type = FF_PIXEL_PLANAR,
  253. .depth = 1,
  254. },
  255. [PIX_FMT_MONOBLACK] = {
  256. .nb_channels = 1,
  257. .color_type = FF_COLOR_GRAY,
  258. .pixel_type = FF_PIXEL_PLANAR,
  259. .depth = 1,
  260. },
  261. /* paletted formats */
  262. [PIX_FMT_PAL8] = {
  263. .nb_channels = 4, .is_alpha = 1,
  264. .color_type = FF_COLOR_RGB,
  265. .pixel_type = FF_PIXEL_PALETTE,
  266. .depth = 8,
  267. },
  268. [PIX_FMT_UYYVYY411] = {
  269. .nb_channels = 1,
  270. .color_type = FF_COLOR_YUV,
  271. .pixel_type = FF_PIXEL_PACKED,
  272. .depth = 8,
  273. },
  274. [PIX_FMT_ABGR] = {
  275. .nb_channels = 4, .is_alpha = 1,
  276. .color_type = FF_COLOR_RGB,
  277. .pixel_type = FF_PIXEL_PACKED,
  278. .depth = 8,
  279. },
  280. [PIX_FMT_BGR565BE] = {
  281. .nb_channels = 3,
  282. .color_type = FF_COLOR_RGB,
  283. .pixel_type = FF_PIXEL_PACKED,
  284. .depth = 5,
  285. },
  286. [PIX_FMT_BGR565LE] = {
  287. .nb_channels = 3,
  288. .color_type = FF_COLOR_RGB,
  289. .pixel_type = FF_PIXEL_PACKED,
  290. .depth = 5,
  291. },
  292. [PIX_FMT_BGR555BE] = {
  293. .nb_channels = 3,
  294. .color_type = FF_COLOR_RGB,
  295. .pixel_type = FF_PIXEL_PACKED,
  296. .depth = 5,
  297. },
  298. [PIX_FMT_BGR555LE] = {
  299. .nb_channels = 3,
  300. .color_type = FF_COLOR_RGB,
  301. .pixel_type = FF_PIXEL_PACKED,
  302. .depth = 5,
  303. },
  304. [PIX_FMT_RGB8] = {
  305. .nb_channels = 1,
  306. .color_type = FF_COLOR_RGB,
  307. .pixel_type = FF_PIXEL_PACKED,
  308. .depth = 8,
  309. },
  310. [PIX_FMT_RGB4] = {
  311. .nb_channels = 1,
  312. .color_type = FF_COLOR_RGB,
  313. .pixel_type = FF_PIXEL_PACKED,
  314. .depth = 4,
  315. },
  316. [PIX_FMT_RGB4_BYTE] = {
  317. .nb_channels = 1,
  318. .color_type = FF_COLOR_RGB,
  319. .pixel_type = FF_PIXEL_PACKED,
  320. .depth = 8,
  321. },
  322. [PIX_FMT_BGR8] = {
  323. .nb_channels = 1,
  324. .color_type = FF_COLOR_RGB,
  325. .pixel_type = FF_PIXEL_PACKED,
  326. .depth = 8,
  327. },
  328. [PIX_FMT_BGR4] = {
  329. .nb_channels = 1,
  330. .color_type = FF_COLOR_RGB,
  331. .pixel_type = FF_PIXEL_PACKED,
  332. .depth = 4,
  333. },
  334. [PIX_FMT_BGR4_BYTE] = {
  335. .nb_channels = 1,
  336. .color_type = FF_COLOR_RGB,
  337. .pixel_type = FF_PIXEL_PACKED,
  338. .depth = 8,
  339. },
  340. [PIX_FMT_NV12] = {
  341. .nb_channels = 2,
  342. .color_type = FF_COLOR_YUV,
  343. .pixel_type = FF_PIXEL_PLANAR,
  344. .depth = 8,
  345. },
  346. [PIX_FMT_NV21] = {
  347. .nb_channels = 2,
  348. .color_type = FF_COLOR_YUV,
  349. .pixel_type = FF_PIXEL_PLANAR,
  350. .depth = 8,
  351. },
  352. [PIX_FMT_BGRA] = {
  353. .nb_channels = 4, .is_alpha = 1,
  354. .color_type = FF_COLOR_RGB,
  355. .pixel_type = FF_PIXEL_PACKED,
  356. .depth = 8,
  357. },
  358. [PIX_FMT_RGBA] = {
  359. .nb_channels = 4, .is_alpha = 1,
  360. .color_type = FF_COLOR_RGB,
  361. .pixel_type = FF_PIXEL_PACKED,
  362. .depth = 8,
  363. },
  364. };
  365. void avcodec_get_chroma_sub_sample(enum PixelFormat pix_fmt, int *h_shift, int *v_shift)
  366. {
  367. *h_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_w;
  368. *v_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_h;
  369. }
  370. const char *avcodec_get_pix_fmt_name(enum PixelFormat pix_fmt)
  371. {
  372. if (pix_fmt < 0 || pix_fmt >= PIX_FMT_NB)
  373. return NULL;
  374. else
  375. return av_pix_fmt_descriptors[pix_fmt].name;
  376. }
  377. #if LIBAVCODEC_VERSION_MAJOR < 53
  378. enum PixelFormat avcodec_get_pix_fmt(const char *name)
  379. {
  380. return av_get_pix_fmt(name);
  381. }
  382. #endif
  383. void avcodec_pix_fmt_string (char *buf, int buf_size, enum PixelFormat pix_fmt)
  384. {
  385. /* print header */
  386. if (pix_fmt < 0)
  387. snprintf (buf, buf_size,
  388. "name " " nb_channels" " depth" " is_alpha"
  389. );
  390. else{
  391. PixFmtInfo info= pix_fmt_info[pix_fmt];
  392. char is_alpha_char= info.is_alpha ? 'y' : 'n';
  393. snprintf (buf, buf_size,
  394. "%-11s %5d %9d %6c",
  395. av_pix_fmt_descriptors[pix_fmt].name,
  396. info.nb_channels,
  397. info.depth,
  398. is_alpha_char
  399. );
  400. }
  401. }
  402. int ff_is_hwaccel_pix_fmt(enum PixelFormat pix_fmt)
  403. {
  404. return av_pix_fmt_descriptors[pix_fmt].flags & PIX_FMT_HWACCEL;
  405. }
  406. int ff_set_systematic_pal(uint32_t pal[256], enum PixelFormat pix_fmt){
  407. int i;
  408. for(i=0; i<256; i++){
  409. int r,g,b;
  410. switch(pix_fmt) {
  411. case PIX_FMT_RGB8:
  412. r= (i>>5 )*36;
  413. g= ((i>>2)&7)*36;
  414. b= (i&3 )*85;
  415. break;
  416. case PIX_FMT_BGR8:
  417. b= (i>>6 )*85;
  418. g= ((i>>3)&7)*36;
  419. r= (i&7 )*36;
  420. break;
  421. case PIX_FMT_RGB4_BYTE:
  422. r= (i>>3 )*255;
  423. g= ((i>>1)&3)*85;
  424. b= (i&1 )*255;
  425. break;
  426. case PIX_FMT_BGR4_BYTE:
  427. b= (i>>3 )*255;
  428. g= ((i>>1)&3)*85;
  429. r= (i&1 )*255;
  430. break;
  431. case PIX_FMT_GRAY8:
  432. r=b=g= i;
  433. break;
  434. default:
  435. return -1;
  436. }
  437. pal[i] = b + (g<<8) + (r<<16);
  438. }
  439. return 0;
  440. }
  441. int ff_fill_linesize(AVPicture *picture, enum PixelFormat pix_fmt, int width)
  442. {
  443. int w2;
  444. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  445. memset(picture->linesize, 0, sizeof(picture->linesize));
  446. switch(pix_fmt) {
  447. case PIX_FMT_YUV420P:
  448. case PIX_FMT_YUV422P:
  449. case PIX_FMT_YUV444P:
  450. case PIX_FMT_YUV410P:
  451. case PIX_FMT_YUV411P:
  452. case PIX_FMT_YUV440P:
  453. case PIX_FMT_YUVJ420P:
  454. case PIX_FMT_YUVJ422P:
  455. case PIX_FMT_YUVJ444P:
  456. case PIX_FMT_YUVJ440P:
  457. w2 = (width + (1 << desc->log2_chroma_w) - 1) >> desc->log2_chroma_w;
  458. picture->linesize[0] = width;
  459. picture->linesize[1] = w2;
  460. picture->linesize[2] = w2;
  461. break;
  462. case PIX_FMT_YUV420P16LE:
  463. case PIX_FMT_YUV422P16LE:
  464. case PIX_FMT_YUV444P16LE:
  465. case PIX_FMT_YUV420P16BE:
  466. case PIX_FMT_YUV422P16BE:
  467. case PIX_FMT_YUV444P16BE:
  468. w2 = (width + (1 << desc->log2_chroma_w) - 1) >> desc->log2_chroma_w;
  469. picture->linesize[0] = 2*width;
  470. picture->linesize[1] = 2*w2;
  471. picture->linesize[2] = 2*w2;
  472. break;
  473. case PIX_FMT_YUVA420P:
  474. w2 = (width + (1 << desc->log2_chroma_w) - 1) >> desc->log2_chroma_w;
  475. picture->linesize[0] = width;
  476. picture->linesize[1] = w2;
  477. picture->linesize[2] = w2;
  478. picture->linesize[3] = width;
  479. break;
  480. case PIX_FMT_NV12:
  481. case PIX_FMT_NV21:
  482. w2 = (width + (1 << desc->log2_chroma_w) - 1) >> desc->log2_chroma_w;
  483. picture->linesize[0] = width;
  484. picture->linesize[1] = 2 * w2;
  485. break;
  486. case PIX_FMT_RGB24:
  487. case PIX_FMT_BGR24:
  488. picture->linesize[0] = width * 3;
  489. break;
  490. case PIX_FMT_ARGB:
  491. case PIX_FMT_ABGR:
  492. case PIX_FMT_RGBA:
  493. case PIX_FMT_BGRA:
  494. picture->linesize[0] = width * 4;
  495. break;
  496. case PIX_FMT_RGB48BE:
  497. case PIX_FMT_RGB48LE:
  498. picture->linesize[0] = width * 6;
  499. break;
  500. case PIX_FMT_GRAY16BE:
  501. case PIX_FMT_GRAY16LE:
  502. case PIX_FMT_BGR555BE:
  503. case PIX_FMT_BGR555LE:
  504. case PIX_FMT_BGR565BE:
  505. case PIX_FMT_BGR565LE:
  506. case PIX_FMT_RGB555BE:
  507. case PIX_FMT_RGB555LE:
  508. case PIX_FMT_RGB565BE:
  509. case PIX_FMT_RGB565LE:
  510. case PIX_FMT_YUYV422:
  511. picture->linesize[0] = width * 2;
  512. break;
  513. case PIX_FMT_UYVY422:
  514. picture->linesize[0] = width * 2;
  515. break;
  516. case PIX_FMT_UYYVYY411:
  517. picture->linesize[0] = width + width/2;
  518. break;
  519. case PIX_FMT_RGB4:
  520. case PIX_FMT_BGR4:
  521. picture->linesize[0] = width / 2;
  522. break;
  523. case PIX_FMT_MONOWHITE:
  524. case PIX_FMT_MONOBLACK:
  525. picture->linesize[0] = (width + 7) >> 3;
  526. break;
  527. case PIX_FMT_PAL8:
  528. case PIX_FMT_RGB8:
  529. case PIX_FMT_BGR8:
  530. case PIX_FMT_RGB4_BYTE:
  531. case PIX_FMT_BGR4_BYTE:
  532. case PIX_FMT_GRAY8:
  533. picture->linesize[0] = width;
  534. break;
  535. default:
  536. return -1;
  537. }
  538. return 0;
  539. }
  540. int ff_fill_pointer(AVPicture *picture, uint8_t *ptr, enum PixelFormat pix_fmt,
  541. int height)
  542. {
  543. int size, h2, size2;
  544. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  545. size = picture->linesize[0] * height;
  546. switch(pix_fmt) {
  547. case PIX_FMT_YUV420P:
  548. case PIX_FMT_YUV422P:
  549. case PIX_FMT_YUV444P:
  550. case PIX_FMT_YUV410P:
  551. case PIX_FMT_YUV411P:
  552. case PIX_FMT_YUV440P:
  553. case PIX_FMT_YUVJ420P:
  554. case PIX_FMT_YUVJ422P:
  555. case PIX_FMT_YUVJ444P:
  556. case PIX_FMT_YUVJ440P:
  557. case PIX_FMT_YUV420P16LE:
  558. case PIX_FMT_YUV422P16LE:
  559. case PIX_FMT_YUV444P16LE:
  560. case PIX_FMT_YUV420P16BE:
  561. case PIX_FMT_YUV422P16BE:
  562. case PIX_FMT_YUV444P16BE:
  563. h2 = (height + (1 << desc->log2_chroma_h) - 1) >> desc->log2_chroma_h;
  564. size2 = picture->linesize[1] * h2;
  565. picture->data[0] = ptr;
  566. picture->data[1] = picture->data[0] + size;
  567. picture->data[2] = picture->data[1] + size2;
  568. picture->data[3] = NULL;
  569. return size + 2 * size2;
  570. case PIX_FMT_YUVA420P:
  571. h2 = (height + (1 << desc->log2_chroma_h) - 1) >> desc->log2_chroma_h;
  572. size2 = picture->linesize[1] * h2;
  573. picture->data[0] = ptr;
  574. picture->data[1] = picture->data[0] + size;
  575. picture->data[2] = picture->data[1] + size2;
  576. picture->data[3] = picture->data[1] + size2 + size2;
  577. return 2 * size + 2 * size2;
  578. case PIX_FMT_NV12:
  579. case PIX_FMT_NV21:
  580. h2 = (height + (1 << desc->log2_chroma_h) - 1) >> desc->log2_chroma_h;
  581. size2 = picture->linesize[1] * h2;
  582. picture->data[0] = ptr;
  583. picture->data[1] = picture->data[0] + size;
  584. picture->data[2] = NULL;
  585. picture->data[3] = NULL;
  586. return size + size2;
  587. case PIX_FMT_RGB24:
  588. case PIX_FMT_BGR24:
  589. case PIX_FMT_ARGB:
  590. case PIX_FMT_ABGR:
  591. case PIX_FMT_RGBA:
  592. case PIX_FMT_BGRA:
  593. case PIX_FMT_RGB48BE:
  594. case PIX_FMT_RGB48LE:
  595. case PIX_FMT_GRAY16BE:
  596. case PIX_FMT_GRAY16LE:
  597. case PIX_FMT_BGR555BE:
  598. case PIX_FMT_BGR555LE:
  599. case PIX_FMT_BGR565BE:
  600. case PIX_FMT_BGR565LE:
  601. case PIX_FMT_RGB555BE:
  602. case PIX_FMT_RGB555LE:
  603. case PIX_FMT_RGB565BE:
  604. case PIX_FMT_RGB565LE:
  605. case PIX_FMT_YUYV422:
  606. case PIX_FMT_UYVY422:
  607. case PIX_FMT_UYYVYY411:
  608. case PIX_FMT_RGB4:
  609. case PIX_FMT_BGR4:
  610. case PIX_FMT_MONOWHITE:
  611. case PIX_FMT_MONOBLACK:
  612. picture->data[0] = ptr;
  613. picture->data[1] = NULL;
  614. picture->data[2] = NULL;
  615. picture->data[3] = NULL;
  616. return size;
  617. case PIX_FMT_PAL8:
  618. case PIX_FMT_RGB8:
  619. case PIX_FMT_BGR8:
  620. case PIX_FMT_RGB4_BYTE:
  621. case PIX_FMT_BGR4_BYTE:
  622. case PIX_FMT_GRAY8:
  623. size2 = (size + 3) & ~3;
  624. picture->data[0] = ptr;
  625. picture->data[1] = ptr + size2; /* palette is stored here as 256 32 bit words */
  626. picture->data[2] = NULL;
  627. picture->data[3] = NULL;
  628. return size2 + 256 * 4;
  629. default:
  630. picture->data[0] = NULL;
  631. picture->data[1] = NULL;
  632. picture->data[2] = NULL;
  633. picture->data[3] = NULL;
  634. return -1;
  635. }
  636. }
  637. int avpicture_fill(AVPicture *picture, uint8_t *ptr,
  638. enum PixelFormat pix_fmt, int width, int height)
  639. {
  640. if(avcodec_check_dimensions(NULL, width, height))
  641. return -1;
  642. if (ff_fill_linesize(picture, pix_fmt, width))
  643. return -1;
  644. return ff_fill_pointer(picture, ptr, pix_fmt, height);
  645. }
  646. int avpicture_layout(const AVPicture* src, enum PixelFormat pix_fmt, int width, int height,
  647. unsigned char *dest, int dest_size)
  648. {
  649. const PixFmtInfo* pf = &pix_fmt_info[pix_fmt];
  650. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  651. int i, j, w, ow, h, oh, data_planes;
  652. const unsigned char* s;
  653. int size = avpicture_get_size(pix_fmt, width, height);
  654. if (size > dest_size || size < 0)
  655. return -1;
  656. if (pf->pixel_type == FF_PIXEL_PACKED || pf->pixel_type == FF_PIXEL_PALETTE) {
  657. if (pix_fmt == PIX_FMT_YUYV422 ||
  658. pix_fmt == PIX_FMT_UYVY422 ||
  659. pix_fmt == PIX_FMT_BGR565BE ||
  660. pix_fmt == PIX_FMT_BGR565LE ||
  661. pix_fmt == PIX_FMT_BGR555BE ||
  662. pix_fmt == PIX_FMT_BGR555LE ||
  663. pix_fmt == PIX_FMT_RGB565BE ||
  664. pix_fmt == PIX_FMT_RGB565LE ||
  665. pix_fmt == PIX_FMT_RGB555BE ||
  666. pix_fmt == PIX_FMT_RGB555LE)
  667. w = width * 2;
  668. else if (pix_fmt == PIX_FMT_UYYVYY411)
  669. w = width + width/2;
  670. else if (pix_fmt == PIX_FMT_PAL8)
  671. w = width;
  672. else
  673. w = width * (pf->depth * pf->nb_channels / 8);
  674. data_planes = 1;
  675. h = height;
  676. } else {
  677. data_planes = pf->nb_channels;
  678. w = (width*pf->depth + 7)/8;
  679. h = height;
  680. }
  681. ow = w;
  682. oh = h;
  683. for (i=0; i<data_planes; i++) {
  684. if (i == 1) {
  685. w = ((width >> desc->log2_chroma_w) * pf->depth + 7) / 8;
  686. h = height >> desc->log2_chroma_h;
  687. if (pix_fmt == PIX_FMT_NV12 || pix_fmt == PIX_FMT_NV21)
  688. w <<= 1;
  689. } else if (i == 3) {
  690. w = ow;
  691. h = oh;
  692. }
  693. s = src->data[i];
  694. for(j=0; j<h; j++) {
  695. memcpy(dest, s, w);
  696. dest += w;
  697. s += src->linesize[i];
  698. }
  699. }
  700. if (pf->pixel_type == FF_PIXEL_PALETTE)
  701. memcpy((unsigned char *)(((size_t)dest + 3) & ~3), src->data[1], 256 * 4);
  702. return size;
  703. }
  704. int avpicture_get_size(enum PixelFormat pix_fmt, int width, int height)
  705. {
  706. AVPicture dummy_pict;
  707. if(avcodec_check_dimensions(NULL, width, height))
  708. return -1;
  709. switch (pix_fmt) {
  710. case PIX_FMT_RGB8:
  711. case PIX_FMT_BGR8:
  712. case PIX_FMT_RGB4_BYTE:
  713. case PIX_FMT_BGR4_BYTE:
  714. case PIX_FMT_GRAY8:
  715. // do not include palette for these pseudo-paletted formats
  716. return width * height;
  717. }
  718. return avpicture_fill(&dummy_pict, NULL, pix_fmt, width, height);
  719. }
  720. int avcodec_get_pix_fmt_loss(enum PixelFormat dst_pix_fmt, enum PixelFormat src_pix_fmt,
  721. int has_alpha)
  722. {
  723. const PixFmtInfo *pf, *ps;
  724. const AVPixFmtDescriptor *src_desc = &av_pix_fmt_descriptors[src_pix_fmt];
  725. const AVPixFmtDescriptor *dst_desc = &av_pix_fmt_descriptors[dst_pix_fmt];
  726. int loss;
  727. ps = &pix_fmt_info[src_pix_fmt];
  728. /* compute loss */
  729. loss = 0;
  730. pf = &pix_fmt_info[dst_pix_fmt];
  731. if (pf->depth < ps->depth ||
  732. ((dst_pix_fmt == PIX_FMT_RGB555BE || dst_pix_fmt == PIX_FMT_RGB555LE) &&
  733. (src_pix_fmt == PIX_FMT_RGB565BE || src_pix_fmt == PIX_FMT_RGB565LE)))
  734. loss |= FF_LOSS_DEPTH;
  735. if (dst_desc->log2_chroma_w > src_desc->log2_chroma_w ||
  736. dst_desc->log2_chroma_h > src_desc->log2_chroma_h)
  737. loss |= FF_LOSS_RESOLUTION;
  738. switch(pf->color_type) {
  739. case FF_COLOR_RGB:
  740. if (ps->color_type != FF_COLOR_RGB &&
  741. ps->color_type != FF_COLOR_GRAY)
  742. loss |= FF_LOSS_COLORSPACE;
  743. break;
  744. case FF_COLOR_GRAY:
  745. if (ps->color_type != FF_COLOR_GRAY)
  746. loss |= FF_LOSS_COLORSPACE;
  747. break;
  748. case FF_COLOR_YUV:
  749. if (ps->color_type != FF_COLOR_YUV)
  750. loss |= FF_LOSS_COLORSPACE;
  751. break;
  752. case FF_COLOR_YUV_JPEG:
  753. if (ps->color_type != FF_COLOR_YUV_JPEG &&
  754. ps->color_type != FF_COLOR_YUV &&
  755. ps->color_type != FF_COLOR_GRAY)
  756. loss |= FF_LOSS_COLORSPACE;
  757. break;
  758. default:
  759. /* fail safe test */
  760. if (ps->color_type != pf->color_type)
  761. loss |= FF_LOSS_COLORSPACE;
  762. break;
  763. }
  764. if (pf->color_type == FF_COLOR_GRAY &&
  765. ps->color_type != FF_COLOR_GRAY)
  766. loss |= FF_LOSS_CHROMA;
  767. if (!pf->is_alpha && (ps->is_alpha && has_alpha))
  768. loss |= FF_LOSS_ALPHA;
  769. if (pf->pixel_type == FF_PIXEL_PALETTE &&
  770. (ps->pixel_type != FF_PIXEL_PALETTE && ps->color_type != FF_COLOR_GRAY))
  771. loss |= FF_LOSS_COLORQUANT;
  772. return loss;
  773. }
  774. static int avg_bits_per_pixel(enum PixelFormat pix_fmt)
  775. {
  776. int bits;
  777. const PixFmtInfo *pf;
  778. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  779. pf = &pix_fmt_info[pix_fmt];
  780. switch(pf->pixel_type) {
  781. case FF_PIXEL_PACKED:
  782. switch(pix_fmt) {
  783. case PIX_FMT_YUYV422:
  784. case PIX_FMT_UYVY422:
  785. case PIX_FMT_RGB565BE:
  786. case PIX_FMT_RGB565LE:
  787. case PIX_FMT_RGB555BE:
  788. case PIX_FMT_RGB555LE:
  789. case PIX_FMT_BGR565BE:
  790. case PIX_FMT_BGR565LE:
  791. case PIX_FMT_BGR555BE:
  792. case PIX_FMT_BGR555LE:
  793. bits = 16;
  794. break;
  795. case PIX_FMT_UYYVYY411:
  796. bits = 12;
  797. break;
  798. default:
  799. bits = pf->depth * pf->nb_channels;
  800. break;
  801. }
  802. break;
  803. case FF_PIXEL_PLANAR:
  804. if (desc->log2_chroma_w == 0 && desc->log2_chroma_h == 0) {
  805. bits = pf->depth * pf->nb_channels;
  806. } else {
  807. bits = pf->depth + ((2 * pf->depth) >>
  808. (desc->log2_chroma_w + desc->log2_chroma_h));
  809. }
  810. break;
  811. case FF_PIXEL_PALETTE:
  812. bits = 8;
  813. break;
  814. default:
  815. bits = -1;
  816. break;
  817. }
  818. return bits;
  819. }
  820. static enum PixelFormat avcodec_find_best_pix_fmt1(int64_t pix_fmt_mask,
  821. enum PixelFormat src_pix_fmt,
  822. int has_alpha,
  823. int loss_mask)
  824. {
  825. int dist, i, loss, min_dist;
  826. enum PixelFormat dst_pix_fmt;
  827. /* find exact color match with smallest size */
  828. dst_pix_fmt = PIX_FMT_NONE;
  829. min_dist = 0x7fffffff;
  830. for(i = 0;i < PIX_FMT_NB; i++) {
  831. if (pix_fmt_mask & (1ULL << i)) {
  832. loss = avcodec_get_pix_fmt_loss(i, src_pix_fmt, has_alpha) & loss_mask;
  833. if (loss == 0) {
  834. dist = avg_bits_per_pixel(i);
  835. if (dist < min_dist) {
  836. min_dist = dist;
  837. dst_pix_fmt = i;
  838. }
  839. }
  840. }
  841. }
  842. return dst_pix_fmt;
  843. }
  844. enum PixelFormat avcodec_find_best_pix_fmt(int64_t pix_fmt_mask, enum PixelFormat src_pix_fmt,
  845. int has_alpha, int *loss_ptr)
  846. {
  847. enum PixelFormat dst_pix_fmt;
  848. int loss_mask, i;
  849. static const int loss_mask_order[] = {
  850. ~0, /* no loss first */
  851. ~FF_LOSS_ALPHA,
  852. ~FF_LOSS_RESOLUTION,
  853. ~(FF_LOSS_COLORSPACE | FF_LOSS_RESOLUTION),
  854. ~FF_LOSS_COLORQUANT,
  855. ~FF_LOSS_DEPTH,
  856. 0,
  857. };
  858. /* try with successive loss */
  859. i = 0;
  860. for(;;) {
  861. loss_mask = loss_mask_order[i++];
  862. dst_pix_fmt = avcodec_find_best_pix_fmt1(pix_fmt_mask, src_pix_fmt,
  863. has_alpha, loss_mask);
  864. if (dst_pix_fmt >= 0)
  865. goto found;
  866. if (loss_mask == 0)
  867. break;
  868. }
  869. return PIX_FMT_NONE;
  870. found:
  871. if (loss_ptr)
  872. *loss_ptr = avcodec_get_pix_fmt_loss(dst_pix_fmt, src_pix_fmt, has_alpha);
  873. return dst_pix_fmt;
  874. }
  875. void ff_img_copy_plane(uint8_t *dst, int dst_wrap,
  876. const uint8_t *src, int src_wrap,
  877. int width, int height)
  878. {
  879. if((!dst) || (!src))
  880. return;
  881. for(;height > 0; height--) {
  882. memcpy(dst, src, width);
  883. dst += dst_wrap;
  884. src += src_wrap;
  885. }
  886. }
  887. int ff_get_plane_bytewidth(enum PixelFormat pix_fmt, int width, int plane)
  888. {
  889. int bits;
  890. const PixFmtInfo *pf = &pix_fmt_info[pix_fmt];
  891. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  892. pf = &pix_fmt_info[pix_fmt];
  893. switch(pf->pixel_type) {
  894. case FF_PIXEL_PACKED:
  895. switch(pix_fmt) {
  896. case PIX_FMT_YUYV422:
  897. case PIX_FMT_UYVY422:
  898. case PIX_FMT_RGB565BE:
  899. case PIX_FMT_RGB565LE:
  900. case PIX_FMT_RGB555BE:
  901. case PIX_FMT_RGB555LE:
  902. case PIX_FMT_BGR565BE:
  903. case PIX_FMT_BGR565LE:
  904. case PIX_FMT_BGR555BE:
  905. case PIX_FMT_BGR555LE:
  906. bits = 16;
  907. break;
  908. case PIX_FMT_UYYVYY411:
  909. bits = 12;
  910. break;
  911. default:
  912. bits = pf->depth * pf->nb_channels;
  913. break;
  914. }
  915. return (width * bits + 7) >> 3;
  916. break;
  917. case FF_PIXEL_PLANAR:
  918. if (plane == 1 || plane == 2)
  919. width= -((-width)>>desc->log2_chroma_w);
  920. return (width * pf->depth + 7) >> 3;
  921. break;
  922. case FF_PIXEL_PALETTE:
  923. if (plane == 0)
  924. return width;
  925. break;
  926. }
  927. return -1;
  928. }
  929. void av_picture_copy(AVPicture *dst, const AVPicture *src,
  930. enum PixelFormat pix_fmt, int width, int height)
  931. {
  932. int i;
  933. const PixFmtInfo *pf = &pix_fmt_info[pix_fmt];
  934. const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
  935. switch(pf->pixel_type) {
  936. case FF_PIXEL_PACKED:
  937. case FF_PIXEL_PLANAR:
  938. for(i = 0; i < pf->nb_channels; i++) {
  939. int h;
  940. int bwidth = ff_get_plane_bytewidth(pix_fmt, width, i);
  941. h = height;
  942. if (i == 1 || i == 2) {
  943. h= -((-height)>>desc->log2_chroma_h);
  944. }
  945. ff_img_copy_plane(dst->data[i], dst->linesize[i],
  946. src->data[i], src->linesize[i],
  947. bwidth, h);
  948. }
  949. break;
  950. case FF_PIXEL_PALETTE:
  951. ff_img_copy_plane(dst->data[0], dst->linesize[0],
  952. src->data[0], src->linesize[0],
  953. width, height);
  954. /* copy the palette */
  955. memcpy(dst->data[1], src->data[1], 4*256);
  956. break;
  957. }
  958. }
  959. /* 2x2 -> 1x1 */
  960. void ff_shrink22(uint8_t *dst, int dst_wrap,
  961. const uint8_t *src, int src_wrap,
  962. int width, int height)
  963. {
  964. int w;
  965. const uint8_t *s1, *s2;
  966. uint8_t *d;
  967. for(;height > 0; height--) {
  968. s1 = src;
  969. s2 = s1 + src_wrap;
  970. d = dst;
  971. for(w = width;w >= 4; w-=4) {
  972. d[0] = (s1[0] + s1[1] + s2[0] + s2[1] + 2) >> 2;
  973. d[1] = (s1[2] + s1[3] + s2[2] + s2[3] + 2) >> 2;
  974. d[2] = (s1[4] + s1[5] + s2[4] + s2[5] + 2) >> 2;
  975. d[3] = (s1[6] + s1[7] + s2[6] + s2[7] + 2) >> 2;
  976. s1 += 8;
  977. s2 += 8;
  978. d += 4;
  979. }
  980. for(;w > 0; w--) {
  981. d[0] = (s1[0] + s1[1] + s2[0] + s2[1] + 2) >> 2;
  982. s1 += 2;
  983. s2 += 2;
  984. d++;
  985. }
  986. src += 2 * src_wrap;
  987. dst += dst_wrap;
  988. }
  989. }
  990. /* 4x4 -> 1x1 */
  991. void ff_shrink44(uint8_t *dst, int dst_wrap,
  992. const uint8_t *src, int src_wrap,
  993. int width, int height)
  994. {
  995. int w;
  996. const uint8_t *s1, *s2, *s3, *s4;
  997. uint8_t *d;
  998. for(;height > 0; height--) {
  999. s1 = src;
  1000. s2 = s1 + src_wrap;
  1001. s3 = s2 + src_wrap;
  1002. s4 = s3 + src_wrap;
  1003. d = dst;
  1004. for(w = width;w > 0; w--) {
  1005. d[0] = (s1[0] + s1[1] + s1[2] + s1[3] +
  1006. s2[0] + s2[1] + s2[2] + s2[3] +
  1007. s3[0] + s3[1] + s3[2] + s3[3] +
  1008. s4[0] + s4[1] + s4[2] + s4[3] + 8) >> 4;
  1009. s1 += 4;
  1010. s2 += 4;
  1011. s3 += 4;
  1012. s4 += 4;
  1013. d++;
  1014. }
  1015. src += 4 * src_wrap;
  1016. dst += dst_wrap;
  1017. }
  1018. }
  1019. /* 8x8 -> 1x1 */
  1020. void ff_shrink88(uint8_t *dst, int dst_wrap,
  1021. const uint8_t *src, int src_wrap,
  1022. int width, int height)
  1023. {
  1024. int w, i;
  1025. for(;height > 0; height--) {
  1026. for(w = width;w > 0; w--) {
  1027. int tmp=0;
  1028. for(i=0; i<8; i++){
  1029. tmp += src[0] + src[1] + src[2] + src[3] + src[4] + src[5] + src[6] + src[7];
  1030. src += src_wrap;
  1031. }
  1032. *(dst++) = (tmp + 32)>>6;
  1033. src += 8 - 8*src_wrap;
  1034. }
  1035. src += 8*src_wrap - 8*width;
  1036. dst += dst_wrap - width;
  1037. }
  1038. }
  1039. int avpicture_alloc(AVPicture *picture,
  1040. enum PixelFormat pix_fmt, int width, int height)
  1041. {
  1042. int size;
  1043. void *ptr;
  1044. size = avpicture_fill(picture, NULL, pix_fmt, width, height);
  1045. if(size<0)
  1046. goto fail;
  1047. ptr = av_malloc(size);
  1048. if (!ptr)
  1049. goto fail;
  1050. avpicture_fill(picture, ptr, pix_fmt, width, height);
  1051. if(picture->data[1] && !picture->data[2])
  1052. ff_set_systematic_pal((uint32_t*)picture->data[1], pix_fmt);
  1053. return 0;
  1054. fail:
  1055. memset(picture, 0, sizeof(AVPicture));
  1056. return -1;
  1057. }
  1058. void avpicture_free(AVPicture *picture)
  1059. {
  1060. av_free(picture->data[0]);
  1061. }
  1062. /* return true if yuv planar */
  1063. static inline int is_yuv_planar(const PixFmtInfo *ps)
  1064. {
  1065. return (ps->color_type == FF_COLOR_YUV ||
  1066. ps->color_type == FF_COLOR_YUV_JPEG) &&
  1067. ps->pixel_type == FF_PIXEL_PLANAR;
  1068. }
  1069. int av_picture_crop(AVPicture *dst, const AVPicture *src,
  1070. enum PixelFormat pix_fmt, int top_band, int left_band)
  1071. {
  1072. int y_shift;
  1073. int x_shift;
  1074. if (pix_fmt < 0 || pix_fmt >= PIX_FMT_NB || !is_yuv_planar(&pix_fmt_info[pix_fmt]))
  1075. return -1;
  1076. y_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_h;
  1077. x_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_w;
  1078. dst->data[0] = src->data[0] + (top_band * src->linesize[0]) + left_band;
  1079. dst->data[1] = src->data[1] + ((top_band >> y_shift) * src->linesize[1]) + (left_band >> x_shift);
  1080. dst->data[2] = src->data[2] + ((top_band >> y_shift) * src->linesize[2]) + (left_band >> x_shift);
  1081. dst->linesize[0] = src->linesize[0];
  1082. dst->linesize[1] = src->linesize[1];
  1083. dst->linesize[2] = src->linesize[2];
  1084. return 0;
  1085. }
  1086. int av_picture_pad(AVPicture *dst, const AVPicture *src, int height, int width,
  1087. enum PixelFormat pix_fmt, int padtop, int padbottom, int padleft, int padright,
  1088. int *color)
  1089. {
  1090. uint8_t *optr;
  1091. int y_shift;
  1092. int x_shift;
  1093. int yheight;
  1094. int i, y;
  1095. if (pix_fmt < 0 || pix_fmt >= PIX_FMT_NB ||
  1096. !is_yuv_planar(&pix_fmt_info[pix_fmt])) return -1;
  1097. for (i = 0; i < 3; i++) {
  1098. x_shift = i ? av_pix_fmt_descriptors[pix_fmt].log2_chroma_w : 0;
  1099. y_shift = i ? av_pix_fmt_descriptors[pix_fmt].log2_chroma_h : 0;
  1100. if (padtop || padleft) {
  1101. memset(dst->data[i], color[i],
  1102. dst->linesize[i] * (padtop >> y_shift) + (padleft >> x_shift));
  1103. }
  1104. if (padleft || padright) {
  1105. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  1106. (dst->linesize[i] - (padright >> x_shift));
  1107. yheight = (height - 1 - (padtop + padbottom)) >> y_shift;
  1108. for (y = 0; y < yheight; y++) {
  1109. memset(optr, color[i], (padleft + padright) >> x_shift);
  1110. optr += dst->linesize[i];
  1111. }
  1112. }
  1113. if (src) { /* first line */
  1114. uint8_t *iptr = src->data[i];
  1115. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  1116. (padleft >> x_shift);
  1117. memcpy(optr, iptr, (width - padleft - padright) >> x_shift);
  1118. iptr += src->linesize[i];
  1119. optr = dst->data[i] + dst->linesize[i] * (padtop >> y_shift) +
  1120. (dst->linesize[i] - (padright >> x_shift));
  1121. yheight = (height - 1 - (padtop + padbottom)) >> y_shift;
  1122. for (y = 0; y < yheight; y++) {
  1123. memset(optr, color[i], (padleft + padright) >> x_shift);
  1124. memcpy(optr + ((padleft + padright) >> x_shift), iptr,
  1125. (width - padleft - padright) >> x_shift);
  1126. iptr += src->linesize[i];
  1127. optr += dst->linesize[i];
  1128. }
  1129. }
  1130. if (padbottom || padright) {
  1131. optr = dst->data[i] + dst->linesize[i] *
  1132. ((height - padbottom) >> y_shift) - (padright >> x_shift);
  1133. memset(optr, color[i],dst->linesize[i] *
  1134. (padbottom >> y_shift) + (padright >> x_shift));
  1135. }
  1136. }
  1137. return 0;
  1138. }
  1139. /* NOTE: we scan all the pixels to have an exact information */
  1140. static int get_alpha_info_pal8(const AVPicture *src, int width, int height)
  1141. {
  1142. const unsigned char *p;
  1143. int src_wrap, ret, x, y;
  1144. unsigned int a;
  1145. uint32_t *palette = (uint32_t *)src->data[1];
  1146. p = src->data[0];
  1147. src_wrap = src->linesize[0] - width;
  1148. ret = 0;
  1149. for(y=0;y<height;y++) {
  1150. for(x=0;x<width;x++) {
  1151. a = palette[p[0]] >> 24;
  1152. if (a == 0x00) {
  1153. ret |= FF_ALPHA_TRANSP;
  1154. } else if (a != 0xff) {
  1155. ret |= FF_ALPHA_SEMI_TRANSP;
  1156. }
  1157. p++;
  1158. }
  1159. p += src_wrap;
  1160. }
  1161. return ret;
  1162. }
  1163. int img_get_alpha_info(const AVPicture *src,
  1164. enum PixelFormat pix_fmt, int width, int height)
  1165. {
  1166. const PixFmtInfo *pf = &pix_fmt_info[pix_fmt];
  1167. int ret;
  1168. /* no alpha can be represented in format */
  1169. if (!pf->is_alpha)
  1170. return 0;
  1171. switch(pix_fmt) {
  1172. case PIX_FMT_PAL8:
  1173. ret = get_alpha_info_pal8(src, width, height);
  1174. break;
  1175. default:
  1176. /* we do not know, so everything is indicated */
  1177. ret = FF_ALPHA_TRANSP | FF_ALPHA_SEMI_TRANSP;
  1178. break;
  1179. }
  1180. return ret;
  1181. }
  1182. #if HAVE_MMX
  1183. #define DEINT_INPLACE_LINE_LUM \
  1184. movd_m2r(lum_m4[0],mm0);\
  1185. movd_m2r(lum_m3[0],mm1);\
  1186. movd_m2r(lum_m2[0],mm2);\
  1187. movd_m2r(lum_m1[0],mm3);\
  1188. movd_m2r(lum[0],mm4);\
  1189. punpcklbw_r2r(mm7,mm0);\
  1190. movd_r2m(mm2,lum_m4[0]);\
  1191. punpcklbw_r2r(mm7,mm1);\
  1192. punpcklbw_r2r(mm7,mm2);\
  1193. punpcklbw_r2r(mm7,mm3);\
  1194. punpcklbw_r2r(mm7,mm4);\
  1195. paddw_r2r(mm3,mm1);\
  1196. psllw_i2r(1,mm2);\
  1197. paddw_r2r(mm4,mm0);\
  1198. psllw_i2r(2,mm1);\
  1199. paddw_r2r(mm6,mm2);\
  1200. paddw_r2r(mm2,mm1);\
  1201. psubusw_r2r(mm0,mm1);\
  1202. psrlw_i2r(3,mm1);\
  1203. packuswb_r2r(mm7,mm1);\
  1204. movd_r2m(mm1,lum_m2[0]);
  1205. #define DEINT_LINE_LUM \
  1206. movd_m2r(lum_m4[0],mm0);\
  1207. movd_m2r(lum_m3[0],mm1);\
  1208. movd_m2r(lum_m2[0],mm2);\
  1209. movd_m2r(lum_m1[0],mm3);\
  1210. movd_m2r(lum[0],mm4);\
  1211. punpcklbw_r2r(mm7,mm0);\
  1212. punpcklbw_r2r(mm7,mm1);\
  1213. punpcklbw_r2r(mm7,mm2);\
  1214. punpcklbw_r2r(mm7,mm3);\
  1215. punpcklbw_r2r(mm7,mm4);\
  1216. paddw_r2r(mm3,mm1);\
  1217. psllw_i2r(1,mm2);\
  1218. paddw_r2r(mm4,mm0);\
  1219. psllw_i2r(2,mm1);\
  1220. paddw_r2r(mm6,mm2);\
  1221. paddw_r2r(mm2,mm1);\
  1222. psubusw_r2r(mm0,mm1);\
  1223. psrlw_i2r(3,mm1);\
  1224. packuswb_r2r(mm7,mm1);\
  1225. movd_r2m(mm1,dst[0]);
  1226. #endif
  1227. /* filter parameters: [-1 4 2 4 -1] // 8 */
  1228. static void deinterlace_line(uint8_t *dst,
  1229. const uint8_t *lum_m4, const uint8_t *lum_m3,
  1230. const uint8_t *lum_m2, const uint8_t *lum_m1,
  1231. const uint8_t *lum,
  1232. int size)
  1233. {
  1234. #if !HAVE_MMX
  1235. uint8_t *cm = ff_cropTbl + MAX_NEG_CROP;
  1236. int sum;
  1237. for(;size > 0;size--) {
  1238. sum = -lum_m4[0];
  1239. sum += lum_m3[0] << 2;
  1240. sum += lum_m2[0] << 1;
  1241. sum += lum_m1[0] << 2;
  1242. sum += -lum[0];
  1243. dst[0] = cm[(sum + 4) >> 3];
  1244. lum_m4++;
  1245. lum_m3++;
  1246. lum_m2++;
  1247. lum_m1++;
  1248. lum++;
  1249. dst++;
  1250. }
  1251. #else
  1252. {
  1253. pxor_r2r(mm7,mm7);
  1254. movq_m2r(ff_pw_4,mm6);
  1255. }
  1256. for (;size > 3; size-=4) {
  1257. DEINT_LINE_LUM
  1258. lum_m4+=4;
  1259. lum_m3+=4;
  1260. lum_m2+=4;
  1261. lum_m1+=4;
  1262. lum+=4;
  1263. dst+=4;
  1264. }
  1265. #endif
  1266. }
  1267. static void deinterlace_line_inplace(uint8_t *lum_m4, uint8_t *lum_m3, uint8_t *lum_m2, uint8_t *lum_m1, uint8_t *lum,
  1268. int size)
  1269. {
  1270. #if !HAVE_MMX
  1271. uint8_t *cm = ff_cropTbl + MAX_NEG_CROP;
  1272. int sum;
  1273. for(;size > 0;size--) {
  1274. sum = -lum_m4[0];
  1275. sum += lum_m3[0] << 2;
  1276. sum += lum_m2[0] << 1;
  1277. lum_m4[0]=lum_m2[0];
  1278. sum += lum_m1[0] << 2;
  1279. sum += -lum[0];
  1280. lum_m2[0] = cm[(sum + 4) >> 3];
  1281. lum_m4++;
  1282. lum_m3++;
  1283. lum_m2++;
  1284. lum_m1++;
  1285. lum++;
  1286. }
  1287. #else
  1288. {
  1289. pxor_r2r(mm7,mm7);
  1290. movq_m2r(ff_pw_4,mm6);
  1291. }
  1292. for (;size > 3; size-=4) {
  1293. DEINT_INPLACE_LINE_LUM
  1294. lum_m4+=4;
  1295. lum_m3+=4;
  1296. lum_m2+=4;
  1297. lum_m1+=4;
  1298. lum+=4;
  1299. }
  1300. #endif
  1301. }
  1302. /* deinterlacing : 2 temporal taps, 3 spatial taps linear filter. The
  1303. top field is copied as is, but the bottom field is deinterlaced
  1304. against the top field. */
  1305. static void deinterlace_bottom_field(uint8_t *dst, int dst_wrap,
  1306. const uint8_t *src1, int src_wrap,
  1307. int width, int height)
  1308. {
  1309. const uint8_t *src_m2, *src_m1, *src_0, *src_p1, *src_p2;
  1310. int y;
  1311. src_m2 = src1;
  1312. src_m1 = src1;
  1313. src_0=&src_m1[src_wrap];
  1314. src_p1=&src_0[src_wrap];
  1315. src_p2=&src_p1[src_wrap];
  1316. for(y=0;y<(height-2);y+=2) {
  1317. memcpy(dst,src_m1,width);
  1318. dst += dst_wrap;
  1319. deinterlace_line(dst,src_m2,src_m1,src_0,src_p1,src_p2,width);
  1320. src_m2 = src_0;
  1321. src_m1 = src_p1;
  1322. src_0 = src_p2;
  1323. src_p1 += 2*src_wrap;
  1324. src_p2 += 2*src_wrap;
  1325. dst += dst_wrap;
  1326. }
  1327. memcpy(dst,src_m1,width);
  1328. dst += dst_wrap;
  1329. /* do last line */
  1330. deinterlace_line(dst,src_m2,src_m1,src_0,src_0,src_0,width);
  1331. }
  1332. static void deinterlace_bottom_field_inplace(uint8_t *src1, int src_wrap,
  1333. int width, int height)
  1334. {
  1335. uint8_t *src_m1, *src_0, *src_p1, *src_p2;
  1336. int y;
  1337. uint8_t *buf;
  1338. buf = (uint8_t*)av_malloc(width);
  1339. src_m1 = src1;
  1340. memcpy(buf,src_m1,width);
  1341. src_0=&src_m1[src_wrap];
  1342. src_p1=&src_0[src_wrap];
  1343. src_p2=&src_p1[src_wrap];
  1344. for(y=0;y<(height-2);y+=2) {
  1345. deinterlace_line_inplace(buf,src_m1,src_0,src_p1,src_p2,width);
  1346. src_m1 = src_p1;
  1347. src_0 = src_p2;
  1348. src_p1 += 2*src_wrap;
  1349. src_p2 += 2*src_wrap;
  1350. }
  1351. /* do last line */
  1352. deinterlace_line_inplace(buf,src_m1,src_0,src_0,src_0,width);
  1353. av_free(buf);
  1354. }
  1355. int avpicture_deinterlace(AVPicture *dst, const AVPicture *src,
  1356. enum PixelFormat pix_fmt, int width, int height)
  1357. {
  1358. int i;
  1359. if (pix_fmt != PIX_FMT_YUV420P &&
  1360. pix_fmt != PIX_FMT_YUV422P &&
  1361. pix_fmt != PIX_FMT_YUV444P &&
  1362. pix_fmt != PIX_FMT_YUV411P &&
  1363. pix_fmt != PIX_FMT_GRAY8)
  1364. return -1;
  1365. if ((width & 3) != 0 || (height & 3) != 0)
  1366. return -1;
  1367. for(i=0;i<3;i++) {
  1368. if (i == 1) {
  1369. switch(pix_fmt) {
  1370. case PIX_FMT_YUV420P:
  1371. width >>= 1;
  1372. height >>= 1;
  1373. break;
  1374. case PIX_FMT_YUV422P:
  1375. width >>= 1;
  1376. break;
  1377. case PIX_FMT_YUV411P:
  1378. width >>= 2;
  1379. break;
  1380. default:
  1381. break;
  1382. }
  1383. if (pix_fmt == PIX_FMT_GRAY8) {
  1384. break;
  1385. }
  1386. }
  1387. if (src == dst) {
  1388. deinterlace_bottom_field_inplace(dst->data[i], dst->linesize[i],
  1389. width, height);
  1390. } else {
  1391. deinterlace_bottom_field(dst->data[i],dst->linesize[i],
  1392. src->data[i], src->linesize[i],
  1393. width, height);
  1394. }
  1395. }
  1396. emms_c();
  1397. return 0;
  1398. }