You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

989 lines
39KB

  1. /*
  2. * OMX Video encoder
  3. * Copyright (C) 2011 Martin Storsjo
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. #include "config.h"
  22. #if CONFIG_OMX_RPI
  23. #define OMX_SKIP64BIT
  24. #endif
  25. #include <dlfcn.h>
  26. #include <OMX_Core.h>
  27. #include <OMX_Component.h>
  28. #include <pthread.h>
  29. #include <stdio.h>
  30. #include <stdlib.h>
  31. #include <sys/time.h>
  32. #include "libavutil/avstring.h"
  33. #include "libavutil/avutil.h"
  34. #include "libavutil/common.h"
  35. #include "libavutil/imgutils.h"
  36. #include "libavutil/log.h"
  37. #include "libavutil/opt.h"
  38. #include "avcodec.h"
  39. #include "h264.h"
  40. #include "internal.h"
  41. #ifdef OMX_SKIP64BIT
  42. static OMX_TICKS to_omx_ticks(int64_t value)
  43. {
  44. OMX_TICKS s;
  45. s.nLowPart = value & 0xffffffff;
  46. s.nHighPart = value >> 32;
  47. return s;
  48. }
  49. static int64_t from_omx_ticks(OMX_TICKS value)
  50. {
  51. return (((int64_t)value.nHighPart) << 32) | value.nLowPart;
  52. }
  53. #else
  54. #define to_omx_ticks(x) (x)
  55. #define from_omx_ticks(x) (x)
  56. #endif
  57. #define INIT_STRUCT(x) do { \
  58. x.nSize = sizeof(x); \
  59. x.nVersion = s->version; \
  60. } while (0)
  61. #define CHECK(x) do { \
  62. if (x != OMX_ErrorNone) { \
  63. av_log(avctx, AV_LOG_ERROR, \
  64. "err %x (%d) on line %d\n", x, x, __LINE__); \
  65. return AVERROR_UNKNOWN; \
  66. } \
  67. } while (0)
  68. typedef struct OMXContext {
  69. void *lib;
  70. void *lib2;
  71. OMX_ERRORTYPE (*ptr_Init)(void);
  72. OMX_ERRORTYPE (*ptr_Deinit)(void);
  73. OMX_ERRORTYPE (*ptr_ComponentNameEnum)(OMX_STRING, OMX_U32, OMX_U32);
  74. OMX_ERRORTYPE (*ptr_GetHandle)(OMX_HANDLETYPE*, OMX_STRING, OMX_PTR, OMX_CALLBACKTYPE*);
  75. OMX_ERRORTYPE (*ptr_FreeHandle)(OMX_HANDLETYPE);
  76. OMX_ERRORTYPE (*ptr_GetComponentsOfRole)(OMX_STRING, OMX_U32*, OMX_U8**);
  77. OMX_ERRORTYPE (*ptr_GetRolesOfComponent)(OMX_STRING, OMX_U32*, OMX_U8**);
  78. void (*host_init)(void);
  79. } OMXContext;
  80. static av_cold void *dlsym_prefixed(void *handle, const char *symbol, const char *prefix)
  81. {
  82. char buf[50];
  83. snprintf(buf, sizeof(buf), "%s%s", prefix ? prefix : "", symbol);
  84. return dlsym(handle, buf);
  85. }
  86. static av_cold int omx_try_load(OMXContext *s, void *logctx,
  87. const char *libname, const char *prefix,
  88. const char *libname2)
  89. {
  90. if (libname2) {
  91. s->lib2 = dlopen(libname2, RTLD_NOW | RTLD_GLOBAL);
  92. if (!s->lib2) {
  93. av_log(logctx, AV_LOG_WARNING, "%s not found\n", libname2);
  94. return AVERROR_ENCODER_NOT_FOUND;
  95. }
  96. s->host_init = dlsym(s->lib2, "bcm_host_init");
  97. if (!s->host_init) {
  98. av_log(logctx, AV_LOG_WARNING, "bcm_host_init not found\n");
  99. dlclose(s->lib2);
  100. s->lib2 = NULL;
  101. return AVERROR_ENCODER_NOT_FOUND;
  102. }
  103. }
  104. s->lib = dlopen(libname, RTLD_NOW | RTLD_GLOBAL);
  105. if (!s->lib) {
  106. av_log(logctx, AV_LOG_WARNING, "%s not found\n", libname);
  107. return AVERROR_ENCODER_NOT_FOUND;
  108. }
  109. s->ptr_Init = dlsym_prefixed(s->lib, "OMX_Init", prefix);
  110. s->ptr_Deinit = dlsym_prefixed(s->lib, "OMX_Deinit", prefix);
  111. s->ptr_ComponentNameEnum = dlsym_prefixed(s->lib, "OMX_ComponentNameEnum", prefix);
  112. s->ptr_GetHandle = dlsym_prefixed(s->lib, "OMX_GetHandle", prefix);
  113. s->ptr_FreeHandle = dlsym_prefixed(s->lib, "OMX_FreeHandle", prefix);
  114. s->ptr_GetComponentsOfRole = dlsym_prefixed(s->lib, "OMX_GetComponentsOfRole", prefix);
  115. s->ptr_GetRolesOfComponent = dlsym_prefixed(s->lib, "OMX_GetRolesOfComponent", prefix);
  116. if (!s->ptr_Init || !s->ptr_Deinit || !s->ptr_ComponentNameEnum ||
  117. !s->ptr_GetHandle || !s->ptr_FreeHandle ||
  118. !s->ptr_GetComponentsOfRole || !s->ptr_GetRolesOfComponent) {
  119. av_log(logctx, AV_LOG_WARNING, "Not all functions found in %s\n", libname);
  120. dlclose(s->lib);
  121. s->lib = NULL;
  122. if (s->lib2)
  123. dlclose(s->lib2);
  124. s->lib2 = NULL;
  125. return AVERROR_ENCODER_NOT_FOUND;
  126. }
  127. return 0;
  128. }
  129. static av_cold OMXContext *omx_init(void *logctx, const char *libname, const char *prefix)
  130. {
  131. static const char * const libnames[] = {
  132. #if CONFIG_OMX_RPI
  133. "/opt/vc/lib/libopenmaxil.so", "/opt/vc/lib/libbcm_host.so",
  134. #else
  135. "libOMX_Core.so", NULL,
  136. "libOmxCore.so", NULL,
  137. #endif
  138. NULL
  139. };
  140. const char* const* nameptr;
  141. int ret = AVERROR_ENCODER_NOT_FOUND;
  142. OMXContext *omx_context;
  143. omx_context = av_mallocz(sizeof(*omx_context));
  144. if (!omx_context)
  145. return NULL;
  146. if (libname) {
  147. ret = omx_try_load(omx_context, logctx, libname, prefix, NULL);
  148. if (ret < 0) {
  149. av_free(omx_context);
  150. return NULL;
  151. }
  152. } else {
  153. for (nameptr = libnames; *nameptr; nameptr += 2)
  154. if (!(ret = omx_try_load(omx_context, logctx, nameptr[0], prefix, nameptr[1])))
  155. break;
  156. if (!*nameptr) {
  157. av_free(omx_context);
  158. return NULL;
  159. }
  160. }
  161. if (omx_context->host_init)
  162. omx_context->host_init();
  163. omx_context->ptr_Init();
  164. return omx_context;
  165. }
  166. static av_cold void omx_deinit(OMXContext *omx_context)
  167. {
  168. if (!omx_context)
  169. return;
  170. omx_context->ptr_Deinit();
  171. dlclose(omx_context->lib);
  172. av_free(omx_context);
  173. }
  174. typedef struct OMXCodecContext {
  175. const AVClass *class;
  176. char *libname;
  177. char *libprefix;
  178. OMXContext *omx_context;
  179. AVCodecContext *avctx;
  180. char component_name[OMX_MAX_STRINGNAME_SIZE];
  181. OMX_VERSIONTYPE version;
  182. OMX_HANDLETYPE handle;
  183. int in_port, out_port;
  184. OMX_COLOR_FORMATTYPE color_format;
  185. int stride, plane_size;
  186. int num_in_buffers, num_out_buffers;
  187. OMX_BUFFERHEADERTYPE **in_buffer_headers;
  188. OMX_BUFFERHEADERTYPE **out_buffer_headers;
  189. int num_free_in_buffers;
  190. OMX_BUFFERHEADERTYPE **free_in_buffers;
  191. int num_done_out_buffers;
  192. OMX_BUFFERHEADERTYPE **done_out_buffers;
  193. pthread_mutex_t input_mutex;
  194. pthread_cond_t input_cond;
  195. pthread_mutex_t output_mutex;
  196. pthread_cond_t output_cond;
  197. pthread_mutex_t state_mutex;
  198. pthread_cond_t state_cond;
  199. OMX_STATETYPE state;
  200. OMX_ERRORTYPE error;
  201. int mutex_cond_inited;
  202. int eos_sent, got_eos;
  203. uint8_t *output_buf;
  204. int output_buf_size;
  205. int input_zerocopy;
  206. int profile;
  207. } OMXCodecContext;
  208. static void append_buffer(pthread_mutex_t *mutex, pthread_cond_t *cond,
  209. int* array_size, OMX_BUFFERHEADERTYPE **array,
  210. OMX_BUFFERHEADERTYPE *buffer)
  211. {
  212. pthread_mutex_lock(mutex);
  213. array[(*array_size)++] = buffer;
  214. pthread_cond_broadcast(cond);
  215. pthread_mutex_unlock(mutex);
  216. }
  217. static OMX_BUFFERHEADERTYPE *get_buffer(pthread_mutex_t *mutex, pthread_cond_t *cond,
  218. int* array_size, OMX_BUFFERHEADERTYPE **array,
  219. int wait)
  220. {
  221. OMX_BUFFERHEADERTYPE *buffer;
  222. pthread_mutex_lock(mutex);
  223. if (wait) {
  224. while (!*array_size)
  225. pthread_cond_wait(cond, mutex);
  226. }
  227. if (*array_size > 0) {
  228. buffer = array[0];
  229. (*array_size)--;
  230. memmove(&array[0], &array[1], (*array_size) * sizeof(OMX_BUFFERHEADERTYPE*));
  231. } else {
  232. buffer = NULL;
  233. }
  234. pthread_mutex_unlock(mutex);
  235. return buffer;
  236. }
  237. static OMX_ERRORTYPE event_handler(OMX_HANDLETYPE component, OMX_PTR app_data, OMX_EVENTTYPE event,
  238. OMX_U32 data1, OMX_U32 data2, OMX_PTR event_data)
  239. {
  240. OMXCodecContext *s = app_data;
  241. // This uses casts in the printfs, since OMX_U32 actually is a typedef for
  242. // unsigned long in official header versions (but there are also modified
  243. // versions where it is something else).
  244. switch (event) {
  245. case OMX_EventError:
  246. pthread_mutex_lock(&s->state_mutex);
  247. av_log(s->avctx, AV_LOG_ERROR, "OMX error %"PRIx32"\n", (uint32_t) data1);
  248. s->error = data1;
  249. pthread_cond_broadcast(&s->state_cond);
  250. pthread_mutex_unlock(&s->state_mutex);
  251. break;
  252. case OMX_EventCmdComplete:
  253. if (data1 == OMX_CommandStateSet) {
  254. pthread_mutex_lock(&s->state_mutex);
  255. s->state = data2;
  256. av_log(s->avctx, AV_LOG_VERBOSE, "OMX state changed to %"PRIu32"\n", (uint32_t) data2);
  257. pthread_cond_broadcast(&s->state_cond);
  258. pthread_mutex_unlock(&s->state_mutex);
  259. } else if (data1 == OMX_CommandPortDisable) {
  260. av_log(s->avctx, AV_LOG_VERBOSE, "OMX port %"PRIu32" disabled\n", (uint32_t) data2);
  261. } else if (data1 == OMX_CommandPortEnable) {
  262. av_log(s->avctx, AV_LOG_VERBOSE, "OMX port %"PRIu32" enabled\n", (uint32_t) data2);
  263. } else {
  264. av_log(s->avctx, AV_LOG_VERBOSE, "OMX command complete, command %"PRIu32", value %"PRIu32"\n",
  265. (uint32_t) data1, (uint32_t) data2);
  266. }
  267. break;
  268. case OMX_EventPortSettingsChanged:
  269. av_log(s->avctx, AV_LOG_VERBOSE, "OMX port %"PRIu32" settings changed\n", (uint32_t) data1);
  270. break;
  271. default:
  272. av_log(s->avctx, AV_LOG_VERBOSE, "OMX event %d %"PRIx32" %"PRIx32"\n",
  273. event, (uint32_t) data1, (uint32_t) data2);
  274. break;
  275. }
  276. return OMX_ErrorNone;
  277. }
  278. static OMX_ERRORTYPE empty_buffer_done(OMX_HANDLETYPE component, OMX_PTR app_data,
  279. OMX_BUFFERHEADERTYPE *buffer)
  280. {
  281. OMXCodecContext *s = app_data;
  282. if (s->input_zerocopy) {
  283. if (buffer->pAppPrivate) {
  284. if (buffer->pOutputPortPrivate)
  285. av_free(buffer->pAppPrivate);
  286. else
  287. av_frame_free((AVFrame**)&buffer->pAppPrivate);
  288. buffer->pAppPrivate = NULL;
  289. }
  290. }
  291. append_buffer(&s->input_mutex, &s->input_cond,
  292. &s->num_free_in_buffers, s->free_in_buffers, buffer);
  293. return OMX_ErrorNone;
  294. }
  295. static OMX_ERRORTYPE fill_buffer_done(OMX_HANDLETYPE component, OMX_PTR app_data,
  296. OMX_BUFFERHEADERTYPE *buffer)
  297. {
  298. OMXCodecContext *s = app_data;
  299. append_buffer(&s->output_mutex, &s->output_cond,
  300. &s->num_done_out_buffers, s->done_out_buffers, buffer);
  301. return OMX_ErrorNone;
  302. }
  303. static const OMX_CALLBACKTYPE callbacks = {
  304. event_handler,
  305. empty_buffer_done,
  306. fill_buffer_done
  307. };
  308. static av_cold int find_component(OMXContext *omx_context, void *logctx,
  309. const char *role, char *str, int str_size)
  310. {
  311. OMX_U32 i, num = 0;
  312. char **components;
  313. int ret = 0;
  314. #if CONFIG_OMX_RPI
  315. if (av_strstart(role, "video_encoder.", NULL)) {
  316. av_strlcpy(str, "OMX.broadcom.video_encode", str_size);
  317. return 0;
  318. }
  319. #endif
  320. omx_context->ptr_GetComponentsOfRole((OMX_STRING) role, &num, NULL);
  321. if (!num) {
  322. av_log(logctx, AV_LOG_WARNING, "No component for role %s found\n", role);
  323. return AVERROR_ENCODER_NOT_FOUND;
  324. }
  325. components = av_mallocz_array(num, sizeof(*components));
  326. if (!components)
  327. return AVERROR(ENOMEM);
  328. for (i = 0; i < num; i++) {
  329. components[i] = av_mallocz(OMX_MAX_STRINGNAME_SIZE);
  330. if (!components[i]) {
  331. ret = AVERROR(ENOMEM);
  332. goto end;
  333. }
  334. }
  335. omx_context->ptr_GetComponentsOfRole((OMX_STRING) role, &num, (OMX_U8**) components);
  336. av_strlcpy(str, components[0], str_size);
  337. end:
  338. for (i = 0; i < num; i++)
  339. av_free(components[i]);
  340. av_free(components);
  341. return ret;
  342. }
  343. static av_cold int wait_for_state(OMXCodecContext *s, OMX_STATETYPE state)
  344. {
  345. int ret = 0;
  346. pthread_mutex_lock(&s->state_mutex);
  347. while (s->state != state && s->error == OMX_ErrorNone)
  348. pthread_cond_wait(&s->state_cond, &s->state_mutex);
  349. if (s->error != OMX_ErrorNone)
  350. ret = AVERROR_ENCODER_NOT_FOUND;
  351. pthread_mutex_unlock(&s->state_mutex);
  352. return ret;
  353. }
  354. static av_cold int omx_component_init(AVCodecContext *avctx, const char *role)
  355. {
  356. OMXCodecContext *s = avctx->priv_data;
  357. OMX_PARAM_COMPONENTROLETYPE role_params = { 0 };
  358. OMX_PORT_PARAM_TYPE video_port_params = { 0 };
  359. OMX_PARAM_PORTDEFINITIONTYPE in_port_params = { 0 }, out_port_params = { 0 };
  360. OMX_VIDEO_PARAM_PORTFORMATTYPE video_port_format = { 0 };
  361. OMX_VIDEO_PARAM_BITRATETYPE vid_param_bitrate = { 0 };
  362. OMX_ERRORTYPE err;
  363. int i;
  364. s->version.s.nVersionMajor = 1;
  365. s->version.s.nVersionMinor = 1;
  366. s->version.s.nRevision = 2;
  367. err = s->omx_context->ptr_GetHandle(&s->handle, s->component_name, s, (OMX_CALLBACKTYPE*) &callbacks);
  368. if (err != OMX_ErrorNone) {
  369. av_log(avctx, AV_LOG_ERROR, "OMX_GetHandle(%s) failed: %x\n", s->component_name, err);
  370. return AVERROR_UNKNOWN;
  371. }
  372. // This one crashes the mediaserver on qcom, if used over IOMX
  373. INIT_STRUCT(role_params);
  374. av_strlcpy(role_params.cRole, role, sizeof(role_params.cRole));
  375. // Intentionally ignore errors on this one
  376. OMX_SetParameter(s->handle, OMX_IndexParamStandardComponentRole, &role_params);
  377. INIT_STRUCT(video_port_params);
  378. err = OMX_GetParameter(s->handle, OMX_IndexParamVideoInit, &video_port_params);
  379. CHECK(err);
  380. s->in_port = s->out_port = -1;
  381. for (i = 0; i < video_port_params.nPorts; i++) {
  382. int port = video_port_params.nStartPortNumber + i;
  383. OMX_PARAM_PORTDEFINITIONTYPE port_params = { 0 };
  384. INIT_STRUCT(port_params);
  385. port_params.nPortIndex = port;
  386. err = OMX_GetParameter(s->handle, OMX_IndexParamPortDefinition, &port_params);
  387. if (err != OMX_ErrorNone) {
  388. av_log(avctx, AV_LOG_WARNING, "port %d error %x\n", port, err);
  389. break;
  390. }
  391. if (port_params.eDir == OMX_DirInput && s->in_port < 0) {
  392. in_port_params = port_params;
  393. s->in_port = port;
  394. } else if (port_params.eDir == OMX_DirOutput && s->out_port < 0) {
  395. out_port_params = port_params;
  396. s->out_port = port;
  397. }
  398. }
  399. if (s->in_port < 0 || s->out_port < 0) {
  400. av_log(avctx, AV_LOG_ERROR, "No in or out port found (in %d out %d)\n", s->in_port, s->out_port);
  401. return AVERROR_UNKNOWN;
  402. }
  403. s->color_format = 0;
  404. for (i = 0; ; i++) {
  405. INIT_STRUCT(video_port_format);
  406. video_port_format.nIndex = i;
  407. video_port_format.nPortIndex = s->in_port;
  408. if (OMX_GetParameter(s->handle, OMX_IndexParamVideoPortFormat, &video_port_format) != OMX_ErrorNone)
  409. break;
  410. if (video_port_format.eColorFormat == OMX_COLOR_FormatYUV420Planar ||
  411. video_port_format.eColorFormat == OMX_COLOR_FormatYUV420PackedPlanar) {
  412. s->color_format = video_port_format.eColorFormat;
  413. break;
  414. }
  415. }
  416. if (s->color_format == 0) {
  417. av_log(avctx, AV_LOG_ERROR, "No supported pixel formats (%d formats available)\n", i);
  418. return AVERROR_UNKNOWN;
  419. }
  420. in_port_params.bEnabled = OMX_TRUE;
  421. in_port_params.bPopulated = OMX_FALSE;
  422. in_port_params.eDomain = OMX_PortDomainVideo;
  423. in_port_params.format.video.pNativeRender = NULL;
  424. in_port_params.format.video.bFlagErrorConcealment = OMX_FALSE;
  425. in_port_params.format.video.eColorFormat = s->color_format;
  426. s->stride = avctx->width;
  427. s->plane_size = avctx->height;
  428. // If specific codecs need to manually override the stride/plane_size,
  429. // that can be done here.
  430. in_port_params.format.video.nStride = s->stride;
  431. in_port_params.format.video.nSliceHeight = s->plane_size;
  432. in_port_params.format.video.nFrameWidth = avctx->width;
  433. in_port_params.format.video.nFrameHeight = avctx->height;
  434. if (avctx->framerate.den > 0 && avctx->framerate.num > 0)
  435. in_port_params.format.video.xFramerate = (1LL << 16) * avctx->framerate.num / avctx->framerate.den;
  436. else
  437. in_port_params.format.video.xFramerate = (1LL << 16) * avctx->time_base.den / avctx->time_base.num;
  438. err = OMX_SetParameter(s->handle, OMX_IndexParamPortDefinition, &in_port_params);
  439. CHECK(err);
  440. err = OMX_GetParameter(s->handle, OMX_IndexParamPortDefinition, &in_port_params);
  441. CHECK(err);
  442. s->stride = in_port_params.format.video.nStride;
  443. s->plane_size = in_port_params.format.video.nSliceHeight;
  444. s->num_in_buffers = in_port_params.nBufferCountActual;
  445. err = OMX_GetParameter(s->handle, OMX_IndexParamPortDefinition, &out_port_params);
  446. out_port_params.bEnabled = OMX_TRUE;
  447. out_port_params.bPopulated = OMX_FALSE;
  448. out_port_params.eDomain = OMX_PortDomainVideo;
  449. out_port_params.format.video.pNativeRender = NULL;
  450. out_port_params.format.video.nFrameWidth = avctx->width;
  451. out_port_params.format.video.nFrameHeight = avctx->height;
  452. out_port_params.format.video.nStride = 0;
  453. out_port_params.format.video.nSliceHeight = 0;
  454. out_port_params.format.video.nBitrate = avctx->bit_rate;
  455. out_port_params.format.video.xFramerate = in_port_params.format.video.xFramerate;
  456. out_port_params.format.video.bFlagErrorConcealment = OMX_FALSE;
  457. if (avctx->codec->id == AV_CODEC_ID_MPEG4)
  458. out_port_params.format.video.eCompressionFormat = OMX_VIDEO_CodingMPEG4;
  459. else if (avctx->codec->id == AV_CODEC_ID_H264)
  460. out_port_params.format.video.eCompressionFormat = OMX_VIDEO_CodingAVC;
  461. err = OMX_SetParameter(s->handle, OMX_IndexParamPortDefinition, &out_port_params);
  462. CHECK(err);
  463. err = OMX_GetParameter(s->handle, OMX_IndexParamPortDefinition, &out_port_params);
  464. CHECK(err);
  465. s->num_out_buffers = out_port_params.nBufferCountActual;
  466. INIT_STRUCT(vid_param_bitrate);
  467. vid_param_bitrate.nPortIndex = s->out_port;
  468. vid_param_bitrate.eControlRate = OMX_Video_ControlRateVariable;
  469. vid_param_bitrate.nTargetBitrate = avctx->bit_rate;
  470. err = OMX_SetParameter(s->handle, OMX_IndexParamVideoBitrate, &vid_param_bitrate);
  471. if (err != OMX_ErrorNone)
  472. av_log(avctx, AV_LOG_WARNING, "Unable to set video bitrate parameter\n");
  473. if (avctx->codec->id == AV_CODEC_ID_H264) {
  474. OMX_VIDEO_PARAM_AVCTYPE avc = { 0 };
  475. INIT_STRUCT(avc);
  476. avc.nPortIndex = s->out_port;
  477. err = OMX_GetParameter(s->handle, OMX_IndexParamVideoAvc, &avc);
  478. CHECK(err);
  479. avc.nBFrames = 0;
  480. avc.nPFrames = avctx->gop_size - 1;
  481. switch (s->profile == FF_PROFILE_UNKNOWN ? avctx->profile : s->profile) {
  482. case FF_PROFILE_H264_BASELINE:
  483. avc.eProfile = OMX_VIDEO_AVCProfileBaseline;
  484. break;
  485. case FF_PROFILE_H264_MAIN:
  486. avc.eProfile = OMX_VIDEO_AVCProfileMain;
  487. break;
  488. case FF_PROFILE_H264_HIGH:
  489. avc.eProfile = OMX_VIDEO_AVCProfileHigh;
  490. break;
  491. default:
  492. break;
  493. }
  494. err = OMX_SetParameter(s->handle, OMX_IndexParamVideoAvc, &avc);
  495. CHECK(err);
  496. }
  497. err = OMX_SendCommand(s->handle, OMX_CommandStateSet, OMX_StateIdle, NULL);
  498. CHECK(err);
  499. s->in_buffer_headers = av_mallocz(sizeof(OMX_BUFFERHEADERTYPE*) * s->num_in_buffers);
  500. s->free_in_buffers = av_mallocz(sizeof(OMX_BUFFERHEADERTYPE*) * s->num_in_buffers);
  501. s->out_buffer_headers = av_mallocz(sizeof(OMX_BUFFERHEADERTYPE*) * s->num_out_buffers);
  502. s->done_out_buffers = av_mallocz(sizeof(OMX_BUFFERHEADERTYPE*) * s->num_out_buffers);
  503. if (!s->in_buffer_headers || !s->free_in_buffers || !s->out_buffer_headers || !s->done_out_buffers)
  504. return AVERROR(ENOMEM);
  505. for (i = 0; i < s->num_in_buffers && err == OMX_ErrorNone; i++) {
  506. if (s->input_zerocopy)
  507. err = OMX_UseBuffer(s->handle, &s->in_buffer_headers[i], s->in_port, s, in_port_params.nBufferSize, NULL);
  508. else
  509. err = OMX_AllocateBuffer(s->handle, &s->in_buffer_headers[i], s->in_port, s, in_port_params.nBufferSize);
  510. if (err == OMX_ErrorNone)
  511. s->in_buffer_headers[i]->pAppPrivate = s->in_buffer_headers[i]->pOutputPortPrivate = NULL;
  512. }
  513. CHECK(err);
  514. s->num_in_buffers = i;
  515. for (i = 0; i < s->num_out_buffers && err == OMX_ErrorNone; i++)
  516. err = OMX_AllocateBuffer(s->handle, &s->out_buffer_headers[i], s->out_port, s, out_port_params.nBufferSize);
  517. CHECK(err);
  518. s->num_out_buffers = i;
  519. if (wait_for_state(s, OMX_StateIdle) < 0) {
  520. av_log(avctx, AV_LOG_ERROR, "Didn't get OMX_StateIdle\n");
  521. return AVERROR_UNKNOWN;
  522. }
  523. err = OMX_SendCommand(s->handle, OMX_CommandStateSet, OMX_StateExecuting, NULL);
  524. CHECK(err);
  525. if (wait_for_state(s, OMX_StateExecuting) < 0) {
  526. av_log(avctx, AV_LOG_ERROR, "Didn't get OMX_StateExecuting\n");
  527. return AVERROR_UNKNOWN;
  528. }
  529. for (i = 0; i < s->num_out_buffers && err == OMX_ErrorNone; i++)
  530. err = OMX_FillThisBuffer(s->handle, s->out_buffer_headers[i]);
  531. if (err != OMX_ErrorNone) {
  532. for (; i < s->num_out_buffers; i++)
  533. s->done_out_buffers[s->num_done_out_buffers++] = s->out_buffer_headers[i];
  534. }
  535. for (i = 0; i < s->num_in_buffers; i++)
  536. s->free_in_buffers[s->num_free_in_buffers++] = s->in_buffer_headers[i];
  537. return err != OMX_ErrorNone ? AVERROR_UNKNOWN : 0;
  538. }
  539. static av_cold void cleanup(OMXCodecContext *s)
  540. {
  541. int i, executing;
  542. pthread_mutex_lock(&s->state_mutex);
  543. executing = s->state == OMX_StateExecuting;
  544. pthread_mutex_unlock(&s->state_mutex);
  545. if (executing) {
  546. OMX_SendCommand(s->handle, OMX_CommandStateSet, OMX_StateIdle, NULL);
  547. wait_for_state(s, OMX_StateIdle);
  548. OMX_SendCommand(s->handle, OMX_CommandStateSet, OMX_StateLoaded, NULL);
  549. for (i = 0; i < s->num_in_buffers; i++) {
  550. OMX_BUFFERHEADERTYPE *buffer = get_buffer(&s->input_mutex, &s->input_cond,
  551. &s->num_free_in_buffers, s->free_in_buffers, 1);
  552. if (s->input_zerocopy)
  553. buffer->pBuffer = NULL;
  554. OMX_FreeBuffer(s->handle, s->in_port, buffer);
  555. }
  556. for (i = 0; i < s->num_out_buffers; i++) {
  557. OMX_BUFFERHEADERTYPE *buffer = get_buffer(&s->output_mutex, &s->output_cond,
  558. &s->num_done_out_buffers, s->done_out_buffers, 1);
  559. OMX_FreeBuffer(s->handle, s->out_port, buffer);
  560. }
  561. wait_for_state(s, OMX_StateLoaded);
  562. }
  563. if (s->handle) {
  564. s->omx_context->ptr_FreeHandle(s->handle);
  565. s->handle = NULL;
  566. }
  567. omx_deinit(s->omx_context);
  568. s->omx_context = NULL;
  569. if (s->mutex_cond_inited) {
  570. pthread_cond_destroy(&s->state_cond);
  571. pthread_mutex_destroy(&s->state_mutex);
  572. pthread_cond_destroy(&s->input_cond);
  573. pthread_mutex_destroy(&s->input_mutex);
  574. pthread_cond_destroy(&s->output_cond);
  575. pthread_mutex_destroy(&s->output_mutex);
  576. s->mutex_cond_inited = 0;
  577. }
  578. av_freep(&s->in_buffer_headers);
  579. av_freep(&s->out_buffer_headers);
  580. av_freep(&s->free_in_buffers);
  581. av_freep(&s->done_out_buffers);
  582. av_freep(&s->output_buf);
  583. }
  584. static av_cold int omx_encode_init(AVCodecContext *avctx)
  585. {
  586. OMXCodecContext *s = avctx->priv_data;
  587. int ret = AVERROR_ENCODER_NOT_FOUND;
  588. const char *role;
  589. OMX_BUFFERHEADERTYPE *buffer;
  590. OMX_ERRORTYPE err;
  591. s->omx_context = omx_init(avctx, s->libname, s->libprefix);
  592. if (!s->omx_context)
  593. return AVERROR_ENCODER_NOT_FOUND;
  594. pthread_mutex_init(&s->state_mutex, NULL);
  595. pthread_cond_init(&s->state_cond, NULL);
  596. pthread_mutex_init(&s->input_mutex, NULL);
  597. pthread_cond_init(&s->input_cond, NULL);
  598. pthread_mutex_init(&s->output_mutex, NULL);
  599. pthread_cond_init(&s->output_cond, NULL);
  600. s->mutex_cond_inited = 1;
  601. s->avctx = avctx;
  602. s->state = OMX_StateLoaded;
  603. s->error = OMX_ErrorNone;
  604. switch (avctx->codec->id) {
  605. case AV_CODEC_ID_MPEG4:
  606. role = "video_encoder.mpeg4";
  607. break;
  608. case AV_CODEC_ID_H264:
  609. role = "video_encoder.avc";
  610. break;
  611. default:
  612. return AVERROR(ENOSYS);
  613. }
  614. if ((ret = find_component(s->omx_context, avctx, role, s->component_name, sizeof(s->component_name))) < 0)
  615. goto fail;
  616. av_log(avctx, AV_LOG_INFO, "Using %s\n", s->component_name);
  617. if ((ret = omx_component_init(avctx, role)) < 0)
  618. goto fail;
  619. if (avctx->flags & AV_CODEC_FLAG_GLOBAL_HEADER) {
  620. while (1) {
  621. buffer = get_buffer(&s->output_mutex, &s->output_cond,
  622. &s->num_done_out_buffers, s->done_out_buffers, 1);
  623. if (buffer->nFlags & OMX_BUFFERFLAG_CODECCONFIG) {
  624. if ((ret = av_reallocp(&avctx->extradata, avctx->extradata_size + buffer->nFilledLen + AV_INPUT_BUFFER_PADDING_SIZE)) < 0) {
  625. avctx->extradata_size = 0;
  626. goto fail;
  627. }
  628. memcpy(avctx->extradata + avctx->extradata_size, buffer->pBuffer + buffer->nOffset, buffer->nFilledLen);
  629. avctx->extradata_size += buffer->nFilledLen;
  630. memset(avctx->extradata + avctx->extradata_size, 0, AV_INPUT_BUFFER_PADDING_SIZE);
  631. }
  632. err = OMX_FillThisBuffer(s->handle, buffer);
  633. if (err != OMX_ErrorNone) {
  634. append_buffer(&s->output_mutex, &s->output_cond,
  635. &s->num_done_out_buffers, s->done_out_buffers, buffer);
  636. av_log(avctx, AV_LOG_ERROR, "OMX_FillThisBuffer failed: %x\n", err);
  637. ret = AVERROR_UNKNOWN;
  638. goto fail;
  639. }
  640. if (avctx->codec->id == AV_CODEC_ID_H264) {
  641. // For H.264, the extradata can be returned in two separate buffers
  642. // (the videocore encoder on raspberry pi does this);
  643. // therefore check that we have got both SPS and PPS before continuing.
  644. int nals[32] = { 0 };
  645. int i;
  646. for (i = 0; i + 4 < avctx->extradata_size; i++) {
  647. if (!avctx->extradata[i + 0] &&
  648. !avctx->extradata[i + 1] &&
  649. !avctx->extradata[i + 2] &&
  650. avctx->extradata[i + 3] == 1) {
  651. nals[avctx->extradata[i + 4] & 0x1f]++;
  652. }
  653. }
  654. if (nals[H264_NAL_SPS] && nals[H264_NAL_PPS])
  655. break;
  656. } else {
  657. if (avctx->extradata_size > 0)
  658. break;
  659. }
  660. }
  661. }
  662. return 0;
  663. fail:
  664. return ret;
  665. }
  666. static int omx_encode_frame(AVCodecContext *avctx, AVPacket *pkt,
  667. const AVFrame *frame, int *got_packet)
  668. {
  669. OMXCodecContext *s = avctx->priv_data;
  670. int ret = 0;
  671. OMX_BUFFERHEADERTYPE* buffer;
  672. OMX_ERRORTYPE err;
  673. int had_partial = 0;
  674. if (frame) {
  675. uint8_t *dst[4];
  676. int linesize[4];
  677. int need_copy;
  678. buffer = get_buffer(&s->input_mutex, &s->input_cond,
  679. &s->num_free_in_buffers, s->free_in_buffers, 1);
  680. buffer->nFilledLen = av_image_fill_arrays(dst, linesize, buffer->pBuffer, avctx->pix_fmt, s->stride, s->plane_size, 1);
  681. if (s->input_zerocopy) {
  682. uint8_t *src[4] = { NULL };
  683. int src_linesize[4];
  684. av_image_fill_arrays(src, src_linesize, frame->data[0], avctx->pix_fmt, s->stride, s->plane_size, 1);
  685. if (frame->linesize[0] == src_linesize[0] &&
  686. frame->linesize[1] == src_linesize[1] &&
  687. frame->linesize[2] == src_linesize[2] &&
  688. frame->data[1] == src[1] &&
  689. frame->data[2] == src[2]) {
  690. // If the input frame happens to have all planes stored contiguously,
  691. // with the right strides, just clone the frame and set the OMX
  692. // buffer header to point to it
  693. AVFrame *local = av_frame_clone(frame);
  694. if (!local) {
  695. // Return the buffer to the queue so it's not lost
  696. append_buffer(&s->input_mutex, &s->input_cond, &s->num_free_in_buffers, s->free_in_buffers, buffer);
  697. return AVERROR(ENOMEM);
  698. } else {
  699. buffer->pAppPrivate = local;
  700. buffer->pOutputPortPrivate = NULL;
  701. buffer->pBuffer = local->data[0];
  702. need_copy = 0;
  703. }
  704. } else {
  705. // If not, we need to allocate a new buffer with the right
  706. // size and copy the input frame into it.
  707. uint8_t *buf = NULL;
  708. int image_buffer_size = av_image_get_buffer_size(avctx->pix_fmt, s->stride, s->plane_size, 1);
  709. if (image_buffer_size >= 0)
  710. buf = av_malloc(image_buffer_size);
  711. if (!buf) {
  712. // Return the buffer to the queue so it's not lost
  713. append_buffer(&s->input_mutex, &s->input_cond, &s->num_free_in_buffers, s->free_in_buffers, buffer);
  714. return AVERROR(ENOMEM);
  715. } else {
  716. buffer->pAppPrivate = buf;
  717. // Mark that pAppPrivate is an av_malloc'ed buffer, not an AVFrame
  718. buffer->pOutputPortPrivate = (void*) 1;
  719. buffer->pBuffer = buf;
  720. need_copy = 1;
  721. buffer->nFilledLen = av_image_fill_arrays(dst, linesize, buffer->pBuffer, avctx->pix_fmt, s->stride, s->plane_size, 1);
  722. }
  723. }
  724. } else {
  725. need_copy = 1;
  726. }
  727. if (need_copy)
  728. av_image_copy(dst, linesize, (const uint8_t**) frame->data, frame->linesize, avctx->pix_fmt, avctx->width, avctx->height);
  729. buffer->nFlags = OMX_BUFFERFLAG_ENDOFFRAME;
  730. buffer->nOffset = 0;
  731. // Convert the timestamps to microseconds; some encoders can ignore
  732. // the framerate and do VFR bit allocation based on timestamps.
  733. buffer->nTimeStamp = to_omx_ticks(av_rescale_q(frame->pts, avctx->time_base, AV_TIME_BASE_Q));
  734. if (frame->pict_type == AV_PICTURE_TYPE_I) {
  735. #if CONFIG_OMX_RPI
  736. OMX_CONFIG_BOOLEANTYPE config = {0, };
  737. INIT_STRUCT(config);
  738. config.bEnabled = OMX_TRUE;
  739. err = OMX_SetConfig(s->handle, OMX_IndexConfigBrcmVideoRequestIFrame, &config);
  740. if (err != OMX_ErrorNone) {
  741. av_log(avctx, AV_LOG_ERROR, "OMX_SetConfig(RequestIFrame) failed: %x\n", err);
  742. }
  743. #else
  744. OMX_CONFIG_INTRAREFRESHVOPTYPE config = {0, };
  745. INIT_STRUCT(config);
  746. config.nPortIndex = s->out_port;
  747. config.IntraRefreshVOP = OMX_TRUE;
  748. err = OMX_SetConfig(s->handle, OMX_IndexConfigVideoIntraVOPRefresh, &config);
  749. if (err != OMX_ErrorNone) {
  750. av_log(avctx, AV_LOG_ERROR, "OMX_SetConfig(IntraVOPRefresh) failed: %x\n", err);
  751. }
  752. #endif
  753. }
  754. err = OMX_EmptyThisBuffer(s->handle, buffer);
  755. if (err != OMX_ErrorNone) {
  756. append_buffer(&s->input_mutex, &s->input_cond, &s->num_free_in_buffers, s->free_in_buffers, buffer);
  757. av_log(avctx, AV_LOG_ERROR, "OMX_EmptyThisBuffer failed: %x\n", err);
  758. return AVERROR_UNKNOWN;
  759. }
  760. } else if (!s->eos_sent) {
  761. buffer = get_buffer(&s->input_mutex, &s->input_cond,
  762. &s->num_free_in_buffers, s->free_in_buffers, 1);
  763. buffer->nFilledLen = 0;
  764. buffer->nFlags = OMX_BUFFERFLAG_EOS;
  765. buffer->pAppPrivate = buffer->pOutputPortPrivate = NULL;
  766. err = OMX_EmptyThisBuffer(s->handle, buffer);
  767. if (err != OMX_ErrorNone) {
  768. append_buffer(&s->input_mutex, &s->input_cond, &s->num_free_in_buffers, s->free_in_buffers, buffer);
  769. av_log(avctx, AV_LOG_ERROR, "OMX_EmptyThisBuffer failed: %x\n", err);
  770. return AVERROR_UNKNOWN;
  771. }
  772. s->eos_sent = 1;
  773. }
  774. while (!*got_packet && ret == 0 && !s->got_eos) {
  775. // If not flushing, just poll the queue if there's finished packets.
  776. // If flushing, do a blocking wait until we either get a completed
  777. // packet, or get EOS.
  778. buffer = get_buffer(&s->output_mutex, &s->output_cond,
  779. &s->num_done_out_buffers, s->done_out_buffers,
  780. !frame || had_partial);
  781. if (!buffer)
  782. break;
  783. if (buffer->nFlags & OMX_BUFFERFLAG_EOS)
  784. s->got_eos = 1;
  785. if (buffer->nFlags & OMX_BUFFERFLAG_CODECCONFIG && avctx->flags & AV_CODEC_FLAG_GLOBAL_HEADER) {
  786. if ((ret = av_reallocp(&avctx->extradata, avctx->extradata_size + buffer->nFilledLen + AV_INPUT_BUFFER_PADDING_SIZE)) < 0) {
  787. avctx->extradata_size = 0;
  788. goto end;
  789. }
  790. memcpy(avctx->extradata + avctx->extradata_size, buffer->pBuffer + buffer->nOffset, buffer->nFilledLen);
  791. avctx->extradata_size += buffer->nFilledLen;
  792. memset(avctx->extradata + avctx->extradata_size, 0, AV_INPUT_BUFFER_PADDING_SIZE);
  793. } else {
  794. if (!(buffer->nFlags & OMX_BUFFERFLAG_ENDOFFRAME) || !pkt->data) {
  795. // If the output packet isn't preallocated, just concatenate everything in our
  796. // own buffer
  797. int newsize = s->output_buf_size + buffer->nFilledLen + AV_INPUT_BUFFER_PADDING_SIZE;
  798. if ((ret = av_reallocp(&s->output_buf, newsize)) < 0) {
  799. s->output_buf_size = 0;
  800. goto end;
  801. }
  802. memcpy(s->output_buf + s->output_buf_size, buffer->pBuffer + buffer->nOffset, buffer->nFilledLen);
  803. s->output_buf_size += buffer->nFilledLen;
  804. if (buffer->nFlags & OMX_BUFFERFLAG_ENDOFFRAME) {
  805. if ((ret = av_packet_from_data(pkt, s->output_buf, s->output_buf_size)) < 0) {
  806. av_freep(&s->output_buf);
  807. s->output_buf_size = 0;
  808. goto end;
  809. }
  810. s->output_buf = NULL;
  811. s->output_buf_size = 0;
  812. }
  813. #if CONFIG_OMX_RPI
  814. had_partial = 1;
  815. #endif
  816. } else {
  817. // End of frame, and the caller provided a preallocated frame
  818. if ((ret = ff_alloc_packet2(avctx, pkt, s->output_buf_size + buffer->nFilledLen, 0)) < 0) {
  819. av_log(avctx, AV_LOG_ERROR, "Error getting output packet of size %d.\n",
  820. (int)(s->output_buf_size + buffer->nFilledLen));
  821. goto end;
  822. }
  823. memcpy(pkt->data, s->output_buf, s->output_buf_size);
  824. memcpy(pkt->data + s->output_buf_size, buffer->pBuffer + buffer->nOffset, buffer->nFilledLen);
  825. av_freep(&s->output_buf);
  826. s->output_buf_size = 0;
  827. }
  828. if (buffer->nFlags & OMX_BUFFERFLAG_ENDOFFRAME) {
  829. pkt->pts = av_rescale_q(from_omx_ticks(buffer->nTimeStamp), AV_TIME_BASE_Q, avctx->time_base);
  830. // We don't currently enable B-frames for the encoders, so set
  831. // pkt->dts = pkt->pts. (The calling code behaves worse if the encoder
  832. // doesn't set the dts).
  833. pkt->dts = pkt->pts;
  834. if (buffer->nFlags & OMX_BUFFERFLAG_SYNCFRAME)
  835. pkt->flags |= AV_PKT_FLAG_KEY;
  836. *got_packet = 1;
  837. }
  838. }
  839. end:
  840. err = OMX_FillThisBuffer(s->handle, buffer);
  841. if (err != OMX_ErrorNone) {
  842. append_buffer(&s->output_mutex, &s->output_cond, &s->num_done_out_buffers, s->done_out_buffers, buffer);
  843. av_log(avctx, AV_LOG_ERROR, "OMX_FillThisBuffer failed: %x\n", err);
  844. ret = AVERROR_UNKNOWN;
  845. }
  846. }
  847. return ret;
  848. }
  849. static av_cold int omx_encode_end(AVCodecContext *avctx)
  850. {
  851. OMXCodecContext *s = avctx->priv_data;
  852. cleanup(s);
  853. return 0;
  854. }
  855. #define OFFSET(x) offsetof(OMXCodecContext, x)
  856. #define VDE AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_DECODING_PARAM | AV_OPT_FLAG_ENCODING_PARAM
  857. #define VE AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM
  858. static const AVOption options[] = {
  859. { "omx_libname", "OpenMAX library name", OFFSET(libname), AV_OPT_TYPE_STRING, { 0 }, 0, 0, VDE },
  860. { "omx_libprefix", "OpenMAX library prefix", OFFSET(libprefix), AV_OPT_TYPE_STRING, { 0 }, 0, 0, VDE },
  861. { "zerocopy", "Try to avoid copying input frames if possible", OFFSET(input_zerocopy), AV_OPT_TYPE_INT, { .i64 = CONFIG_OMX_RPI }, 0, 1, VE },
  862. { "profile", "Set the encoding profile", OFFSET(profile), AV_OPT_TYPE_INT, { .i64 = FF_PROFILE_UNKNOWN }, FF_PROFILE_UNKNOWN, FF_PROFILE_H264_HIGH, VE, "profile" },
  863. { "baseline", "", 0, AV_OPT_TYPE_CONST, { .i64 = FF_PROFILE_H264_BASELINE }, 0, 0, VE, "profile" },
  864. { "main", "", 0, AV_OPT_TYPE_CONST, { .i64 = FF_PROFILE_H264_MAIN }, 0, 0, VE, "profile" },
  865. { "high", "", 0, AV_OPT_TYPE_CONST, { .i64 = FF_PROFILE_H264_HIGH }, 0, 0, VE, "profile" },
  866. { NULL }
  867. };
  868. static const enum AVPixelFormat omx_encoder_pix_fmts[] = {
  869. AV_PIX_FMT_YUV420P, AV_PIX_FMT_NONE
  870. };
  871. static const AVClass omx_mpeg4enc_class = {
  872. .class_name = "mpeg4_omx",
  873. .item_name = av_default_item_name,
  874. .option = options,
  875. .version = LIBAVUTIL_VERSION_INT,
  876. };
  877. AVCodec ff_mpeg4_omx_encoder = {
  878. .name = "mpeg4_omx",
  879. .long_name = NULL_IF_CONFIG_SMALL("OpenMAX IL MPEG-4 video encoder"),
  880. .type = AVMEDIA_TYPE_VIDEO,
  881. .id = AV_CODEC_ID_MPEG4,
  882. .priv_data_size = sizeof(OMXCodecContext),
  883. .init = omx_encode_init,
  884. .encode2 = omx_encode_frame,
  885. .close = omx_encode_end,
  886. .pix_fmts = omx_encoder_pix_fmts,
  887. .capabilities = AV_CODEC_CAP_DELAY,
  888. .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE | FF_CODEC_CAP_INIT_CLEANUP,
  889. .priv_class = &omx_mpeg4enc_class,
  890. };
  891. static const AVClass omx_h264enc_class = {
  892. .class_name = "h264_omx",
  893. .item_name = av_default_item_name,
  894. .option = options,
  895. .version = LIBAVUTIL_VERSION_INT,
  896. };
  897. AVCodec ff_h264_omx_encoder = {
  898. .name = "h264_omx",
  899. .long_name = NULL_IF_CONFIG_SMALL("OpenMAX IL H.264 video encoder"),
  900. .type = AVMEDIA_TYPE_VIDEO,
  901. .id = AV_CODEC_ID_H264,
  902. .priv_data_size = sizeof(OMXCodecContext),
  903. .init = omx_encode_init,
  904. .encode2 = omx_encode_frame,
  905. .close = omx_encode_end,
  906. .pix_fmts = omx_encoder_pix_fmts,
  907. .capabilities = AV_CODEC_CAP_DELAY,
  908. .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE | FF_CODEC_CAP_INIT_CLEANUP,
  909. .priv_class = &omx_h264enc_class,
  910. };